Exemple #1
0
class SiteStorage:
    def __init__(self, site, allow_create=True):
        self.site = site
        self.directory = "%s/%s" % (config.data_dir, self.site.address
                                    )  # Site data diretory
        self.allowed_dir = os.path.abspath(
            self.directory.decode(sys.getfilesystemencoding())
        )  # Only serve/modify file within this dir
        self.log = site.log
        self.db = None  # Db class
        self.db_checked = False  # Checked db tables since startup
        self.event_db_busy = None  # Gevent AsyncResult if db is working on rebuild
        self.has_db = self.isFile("dbschema.json")  # The site has schema

        if not os.path.isdir(self.directory):
            if allow_create:
                os.mkdir(self.directory)  # Create directory if not found
            else:
                raise Exception("Directory not exists: %s" % self.directory)

    # Load db from dbschema.json
    def openDb(self, check=True):
        try:
            schema = self.loadJson("dbschema.json")
            db_path = self.getPath(schema["db_file"])
        except Exception, err:
            raise Exception("dbschema.json is not a valid JSON: %s" % err)

        if check:
            if not os.path.isfile(db_path) or os.path.getsize(
                    db_path) == 0:  # Not exist or null
                self.rebuildDb()

        if not self.db:
            self.db = Db(schema, db_path)

        if check and not self.db_checked:
            changed_tables = self.db.checkTables()
            if changed_tables:
                self.rebuildDb(delete_db=False
                               )  # TODO: only update the changed table datas
Exemple #2
0
    def __init__(self, twitchusername, bet):

        # Create the connection the database and retrieve viewers current points
        con = Db()
        points = con.get_points(twitchusername)
        if points < bet:
            # This will be a twitch message when using an API
            logging.error("You do not have enough points to make this bet")
        else:
            logging.info("Running bet")
            number_rolled = random.randint(1, 100)
            if number_rolled <= 50:
                con.remove_points(twitchusername, bet)
                logging.info(twitchusername + " lost.")
            elif 51 <= number_rolled <= 95:
                con.add_points(twitchusername, bet)
                logging.info(twitchusername + " won.")
            else:
                big_win = bet * 1.5
                con.add_points(twitchusername, big_win)
                logging.info(twitchusername + " won the big one.")
Exemple #3
0
    def testDb(self):
        print "Importing db..."
        from Db import Db
        for db_path in [
                os.path.abspath("data/test/zeronet.db"), "data/test/zeronet.db"
        ]:
            print "Creating db using %s..." % db_path,
            schema = {
                "db_name": "TestDb",
                "db_file": "data/test/zeronet.db",
                "map": {
                    "data.json": {
                        "to_table": {
                            "test": "test"
                        }
                    }
                },
                "tables": {
                    "test": {
                        "cols": [
                            ["test_id", "INTEGER"],
                            ["title", "TEXT"],
                        ],
                        "indexes":
                        ["CREATE UNIQUE INDEX test_id ON test(test_id)"],
                        "schema_changed": 1426195822
                    }
                }
            }

            if os.path.isfile("data/test/zeronet.db"):
                os.unlink("data/test/zeronet.db")
            db = Db(schema, "data/test/zeronet.db")
            db.checkTables()
            db.close()

            # Cleanup
            os.unlink("data/test/zeronet.db")
            os.rmdir("data/test/")
            print "ok"
Exemple #4
0
    def init_db(self):
        """Initialize database"""
        db = Db(self.args['db'])
        if self.args['target'] == 'asm':
            if not db.get('my_asm') or self.args['-u']:
                db.add_record('my_asm', input('Relative path to your asm:'))
            if not db.get('true_asm') or self.args['-u']:
                db.add_record('true_asm',
                              input('Relative path to original asm:'))
        elif self.args['target'] == 'corewar':
            if not db.get('my_cw') or self.args['-u']:
                db.add_record('my_cw', input("Relative path to your corewar:"))
            if not db.get('true_cw') or self.args['-u']:
                db.add_record('true_cw',
                              input("Relative path to original corewar:"))
            if not db.get('true_asm') or self.args['-u']:
                db.add_record('true_asm',
                              input('Relative path to original asm:'))

        if not db.get('src_dir') or self.args['-u']:
            db.add_record(
                'src_dir',
                input('Relative path to the directory with *.s files:'))
        return db
Exemple #5
0
 def delete(self):
     request_id = list(self.__dict__.values())
     Db().delete('giveaway', request_id[0])
Exemple #6
0
 def openDb(self):
     schema = self.getDbSchema()
     db_path = self.getPath(schema["db_file"])
     return Db(schema, db_path)
Exemple #7
0
    def actionBenchmark(self):
        import sys
        import gc
        from contextlib import contextmanager

        output = self.sendHeader()

        if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
            yield "This function is disabled on this proxy"
            return

        @contextmanager
        def benchmark(name, standard):
            self.log.debug("Benchmark: %s" % name)
            s = time.time()
            output(b"- %s" % name.encode())
            try:
                yield 1
            except Exception as err:
                self.log.exception(err)
                output(b"<br><b>! Error: %s</b><br>" % Debug.formatException(err).encode())
            taken = time.time() - s
            if taken > 0:
                multipler = standard / taken
            else:
                multipler = 99
            if multipler < 0.3:
                speed = "Sloooow"
            elif multipler < 0.5:
                speed = "Ehh"
            elif multipler < 0.8:
                speed = "Goodish"
            elif multipler < 1.2:
                speed = "OK"
            elif multipler < 1.7:
                speed = "Fine"
            elif multipler < 2.5:
                speed = "Fast"
            elif multipler < 3.5:
                speed = "WOW"
            else:
                speed = "Insane!!"
            output(b"%.3fs [x%.2f: %s]<br>" % (taken, multipler, speed.encode()))
            time.sleep(0.01)

        yield """
        <style>
         * { font-family: monospace }
         table * { text-align: right; padding: 0px 10px }
        </style>
        """

        yield "Benchmarking ZeroNet %s (rev%s) Python %s on: %s...<br>" % (config.version, config.rev, sys.version, sys.platform)

        t = time.time()

        # CryptBitcoin
        yield "<br>CryptBitcoin:<br>"
        from Crypt import CryptBitcoin

        # seed = CryptBitcoin.newSeed()
        # yield "- Seed: %s<br>" % seed
        seed = "e180efa477c63b0f2757eac7b1cce781877177fe0966be62754ffd4c8592ce38"

        with benchmark("hdPrivatekey x 10", 0.7):
            for i in range(10):
                privatekey = CryptBitcoin.hdPrivatekey(seed, i * 10)
                yield "."
            valid = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
            assert privatekey == valid, "%s != %s" % (privatekey, valid)

        data = "Hello" * 1024  # 5k
        with benchmark("sign x 10", 0.35):
            for i in range(10):
                yield "."
                sign = CryptBitcoin.sign(data, privatekey)
            valid = "G1GXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOiBHB+kp4cRPZOL7l1yqK5BHa6J+W97bMjvTXtxzljp6w="
            assert sign == valid, "%s != %s" % (sign, valid)

        address = CryptBitcoin.privatekeyToAddress(privatekey)
        for lib_verify in ["btctools", "openssl", "libsecp256k1"]:
            try:
                CryptBitcoin.loadLib(lib_verify)
                loaded = True
                if lib_verify == "openssl":
                    yield "+ Loaded lib: %s<br>" % html.escape(str(CryptBitcoin.bitcoin.core.key._ssl))
                elif lib_verify == "libsecp256k1":
                    import coincurve
                    yield "+ Loaded lib: %s<br>" % type(coincurve._libsecp256k1.lib).__name__
            except Exception as err:
                yield "- Error loading %s: %s<br>" % (lib_verify, err)
                loaded = False
            if not loaded:
                continue
            with benchmark("%s verify x 100" % lib_verify, 0.37):
                for i in range(100):
                    if i % 10 == 0:
                        yield "."
                    ok = CryptBitcoin.verify(data, address, sign, lib_verify=lib_verify)
                assert ok, "does not verify from %s" % address

        # CryptHash
        yield "<br>CryptHash:<br>"
        from Crypt import CryptHash
        import io

        data = io.BytesIO(b"Hello" * 1024 * 1024)  # 5m
        with benchmark("sha256 5M x 10", 0.6):
            for i in range(10):
                data.seek(0)
                hash = CryptHash.sha256sum(data)
                yield "."
            valid = "8cd629d9d6aff6590da8b80782a5046d2673d5917b99d5603c3dcb4005c45ffa"
            assert hash == valid, "%s != %s" % (hash, valid)

        data = io.BytesIO(b"Hello" * 1024 * 1024)  # 5m
        with benchmark("sha512 5M x 10", 0.6):
            for i in range(10):
                data.seek(0)
                hash = CryptHash.sha512sum(data)
                yield "."
            valid = "9ca7e855d430964d5b55b114e95c6bbb114a6d478f6485df93044d87b108904d"
            assert hash == valid, "%s != %s" % (hash, valid)

        with benchmark("os.urandom(256) x 1000", 0.0065):
            for i in range(10):
                for y in range(100):
                    data = os.urandom(256)
                yield "."

        # Msgpack
        from util import Msgpack
        yield "<br>Msgpack: (version: %s)<br>" % ".".join(map(str, Msgpack.msgpack.version))
        binary = b'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv'
        data = OrderedDict(
            sorted({"int": 1024 * 1024 * 1024, "float": 12345.67890, "text": "hello" * 1024, "binary": binary}.items())
        )
        data_packed_valid = b'\x84\xa6binary\xc5\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00hellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohello'
        with benchmark("pack 5K x 10 000", 0.78):
            for i in range(10):
                for y in range(1000):
                    data_packed = Msgpack.pack(data)
                yield "."
            assert data_packed == data_packed_valid, "%s<br>!=<br>%s" % (repr(data_packed), repr(data_packed_valid))

        with benchmark("unpack 5K x 10 000", 1.2):
            for i in range(10):
                for y in range(1000):
                    data_unpacked = Msgpack.unpack(data_packed, decode=False)
                yield "."
            assert data == data_unpacked, "%s != %s" % (data_unpacked, data)

        for fallback in [True, False]:
            with benchmark("streaming unpack 5K x 10 000 (fallback: %s)" % fallback, 1.4):
                for i in range(10):
                    unpacker = Msgpack.getUnpacker(decode=False, fallback=fallback)
                    for y in range(1000):
                        unpacker.feed(data_packed)
                        for data_unpacked in unpacker:
                            pass
                    yield "."
                assert data == data_unpacked, "%s != %s" % (data_unpacked, data)

        # Db
        import sqlite3
        yield "<br>Db: (version: %s, API: %s)<br>" % (sqlite3.sqlite_version, sqlite3.version)

        schema = {
            "db_name": "TestDb",
            "db_file": "%s/benchmark.db" % config.data_dir,
            "maps": {
                ".*": {
                    "to_table": {
                        "test": "test"
                    }
                }
            },
            "tables": {
                "test": {
                    "cols": [
                        ["test_id", "INTEGER"],
                        ["title", "TEXT"],
                        ["json_id", "INTEGER REFERENCES json (json_id)"]
                    ],
                    "indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"],
                    "schema_changed": 1426195822
                }
            }
        }

        if os.path.isfile("%s/benchmark.db" % config.data_dir):
            os.unlink("%s/benchmark.db" % config.data_dir)

        with benchmark("Open x 10", 0.13):
            for i in range(10):
                db = Db.Db(schema, "%s/benchmark.db" % config.data_dir)
                db.checkTables()
                db.close()
                yield "."

        db = Db.Db(schema, "%s/benchmark.db" % config.data_dir)
        db.checkTables()
        import json

        with benchmark("Insert x 10 x 1000", 1.0):
            for u in range(10):  # 10 user
                data = {"test": []}
                for i in range(1000):  # 1000 line of data
                    data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
                json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
                db.updateJson("%s/test_%s.json" % (config.data_dir, u))
                os.unlink("%s/test_%s.json" % (config.data_dir, u))
                yield "."

        with benchmark("Buffered insert x 100 x 100", 1.3):
            cur = db.getCursor()
            cur.logging = False
            for u in range(100, 200):  # 100 user
                data = {"test": []}
                for i in range(100):  # 1000 line of data
                    data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)})
                json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
                db.updateJson("%s/test_%s.json" % (config.data_dir, u), cur=cur)
                os.unlink("%s/test_%s.json" % (config.data_dir, u))
                if u % 10 == 0:
                    yield "."

        yield " + Total rows in db: %s<br>" % db.execute("SELECT COUNT(*) AS num FROM test").fetchone()[0]

        with benchmark("Indexed query x 1000", 0.25):
            found = 0
            cur = db.getCursor()
            cur.logging = False
            for i in range(1000):  # 1000x by test_id
                res = cur.execute("SELECT * FROM test WHERE test_id = %s" % i)
                for row in res:
                    found += 1
                if i % 100 == 0:
                    yield "."

            assert found == 20000, "Found: %s != 20000" % found

        with benchmark("Not indexed query x 100", 0.6):
            found = 0
            cur = db.getCursor()
            cur.logging = False
            for i in range(100):  # 1000x by test_id
                res = cur.execute("SELECT * FROM test WHERE json_id = %s" % i)
                for row in res:
                    found += 1
                if i % 10 == 0:
                    yield "."

            assert found == 18900, "Found: %s != 18900" % found

        with benchmark("Like query x 100", 1.8):
            found = 0
            cur = db.getCursor()
            cur.logging = False
            for i in range(100):  # 1000x by test_id
                res = cur.execute("SELECT * FROM test WHERE title LIKE '%%message %s%%'" % i)
                for row in res:
                    found += 1
                if i % 10 == 0:
                    yield "."

            assert found == 38900, "Found: %s != 11000" % found

        db.close()
        if os.path.isfile("%s/benchmark.db" % config.data_dir):
            os.unlink("%s/benchmark.db" % config.data_dir)

        gc.collect()  # Implicit grabage collection

        # Zip
        yield "<br>Compression:<br>"
        import zipfile
        test_data = b"Test" * 1024
        file_name = b"\xc3\x81rv\xc3\xadzt\xc5\xb0r\xc5\x91t\xc3\xbck\xc3\xb6r\xc3\xb3g\xc3\xa9p\xe4\xb8\xad\xe5\x8d\x8e%s.txt".decode("utf8")

        with benchmark("Zip pack x 10", 0.12):
            for i in range(10):
                with zipfile.ZipFile('%s/test.zip' % config.data_dir, 'w') as archive:
                    for y in range(100):
                        zip_info = zipfile.ZipInfo(file_name % y, (1980,1,1,0,0,0))
                        zip_info.compress_type = zipfile.ZIP_DEFLATED
                        zip_info.create_system = 3
                        zip_info.flag_bits = 0
                        zip_info.external_attr = 25165824
                        archive.writestr(zip_info, test_data)
                yield "."

            hash = CryptHash.sha512sum(open("%s/test.zip" % config.data_dir, "rb"))
            valid = "f630fece29fff1cc8dbf454e47a87fea2746a4dbbd2ceec098afebab45301562"
            assert hash == valid, "Invalid hash: %s != %s<br>" % (hash, valid)

        with benchmark("Zip unpack x 10", 0.2):
            for i in range(10):
                with zipfile.ZipFile('%s/test.zip' % config.data_dir) as archive:
                    for y in range(100):
                        data = archive.open(file_name % y).read()
                        assert archive.open(file_name % y).read() == test_data, "Invalid data: %s..." % data[0:30]
                yield "."

        if os.path.isfile("%s/test.zip" % config.data_dir):
            os.unlink("%s/test.zip" % config.data_dir)

        # gz, bz2, xz
        import tarfile
        import gzip

        # Monkey patch _init_write_gz to use fixed date in order to keep the hash independent from datetime
        def nodate_write_gzip_header(self):
            self._write_mtime = 0
            original_write_gzip_header(self)

        original_write_gzip_header = gzip.GzipFile._write_gzip_header
        gzip.GzipFile._write_gzip_header = nodate_write_gzip_header

        test_data_io = io.BytesIO(b"Test" * 1024)
        archive_formats = {
            "gz": {"hash": "4704ebd8c987ed6f833059f1de9c475d443b0539b8d4c4cb8b49b26f7bbf2d19", "time_pack": 0.3, "time_unpack": 0.2},
            "bz2": {"hash": "90cba0b4d9abaa37b830bf37e4adba93bfd183e095b489ebee62aaa94339f3b5", "time_pack": 2.0, "time_unpack": 0.5},
            "xz": {"hash": "37abc16d552cfd4a495cb2acbf8b1d5877631d084f6571f4d6544bc548c69bae", "time_pack": 1.4, "time_unpack": 0.2}
        }
        for ext, format_data in archive_formats.items():
            archive_path = '%s/test.tar.%s' % (config.data_dir, ext)
            with benchmark("Tar.%s pack x 10" % ext, format_data["time_pack"]):
                for i in range(10):
                    with tarfile.open(archive_path, 'w:%s' % ext) as archive:
                        for y in range(100):
                            test_data_io.seek(0)
                            tar_info = tarfile.TarInfo(file_name % y)
                            tar_info.size = 4 * 1024
                            archive.addfile(tar_info, test_data_io)
                    yield "."

                hash = CryptHash.sha512sum(open("%s/test.tar.%s" % (config.data_dir, ext), "rb"))
                valid = format_data["hash"]
                assert hash == valid, "Invalid hash: %s != %s<br>" % (hash, valid)

            archive_size = os.path.getsize(archive_path) / 1024
            with benchmark("Tar.%s unpack (%.2fkB) x 10" % (ext, archive_size), format_data["time_unpack"]):
                for i in range(10):
                    with tarfile.open(archive_path, 'r:%s' % ext) as archive:
                        for y in range(100):
                            assert archive.extractfile(file_name % y).read() == test_data
                    yield "."

            if os.path.isfile(archive_path):
                os.unlink(archive_path)

        yield "<br>Done. Total: %.2fs" % (time.time() - t)
Exemple #8
0
import os.path
import sys
from Db import Db
import requests
from bs4 import BeautifulSoup
import re
import time
import re
import string
import subprocess
import json
import math
reload(sys)
sys.setdefaultencoding('utf-8')

db = Db()
req = requests.Session()


class HongHaiZi:
    count = 0
    site_id = 5
    items = []
    listurl = ''
    listname = ''
    goodstotal = ''
    groupid = 0
    groupname = ''
    source_urls = [
        'http://redbaby.suning.com/naifen.html',
        'http://redbaby.suning.com/zhiniaoku.html',
Exemple #9
0
 def delete(self):
     request_id = list(self.__dict__.values())
     Db().delete('quote', request_id[0])
Exemple #10
0
 def load(self):
     data = Db().load_by_id('quote', self.id, self.channel)
     current_status = self.filter_values()
     for key, value in enumerate(current_status):
         current_status[value] = data[key]
     return self
Exemple #11
0
import sqlite3
from Db import Db

if __name__ == '__main__':
    db = Db('rating.db')
    db.execue_script('rating.sql')

    comm1 = "SELECT Movie.title FROM Movie WHERE director = 'Steven Spielberg'"
    a = db.query(comm1)
    print(a)

    print(
        "2. Find all years that have a movie that received a rating of 4 or 5 "
        "and sort them in increasing order.")
    comm1 = "SELECT DISTINCT Movie.year FROM Movie, Rating " \
            "WHERE Movie.mID = Rating.mID " \
            "AND Rating.stars >= 4 " \
            "ORDER BY Movie.year ASC"
    a = db.query(comm1)
    print(a)

    print("3. Find the titles of all movies that have no ratings.")
    comm1 = "SELECT Movie.title FROM Movie " \
            "WHERE NOT EXISTS "\
            "(SELECT * FROM Rating "\
            "WHERE Movie.mID = Rating.mID)"
    a = db.query(comm1)
    print(a)

    print("4. Some reviewers didn't provide a date with their rating. "
          "Find the names of all reviewers who have "
Exemple #12
0
import sqlite3
from Db import Db

if __name__ == '__main__':
    db = Db('test.db')
    db.execue_script('xjadralci.sql')
    comm = "SELECT ime, rating FROM Jadralec j WHERE rating%2=0"
    k = db.query(comm)
    print(k)

    comm = "SELECT c1.ime, c1.cid FROM Coln c1, Coln c2 WHERE c1.ime = c2.ime AND c1.cid != c2.cid"
    k = db.query(comm)
    print(k)

    comm = "SELECT c.ime, c.dolzina, j.starost FROM Coln c " \
           "JOIN Rezervacija r USING(cid) " \
           "JOIN Jadralec j USING(jid) " \
           "WHERE c.dolzina > 35 " \
           "AND j.starost > 35"
    k = db.query(comm)
    print(k)

    comm = "SELECT c.ime, c.dolzina, j.starost FROM Coln c, Rezervacija r, Jadralec j " \
           "WHERE c.cid = r.cid " \
           "AND r.jid = j.jid " \
           "AND c.dolzina > 35 "\
           "AND j.starost > 35"
    k = db.query(comm)
    print(k)

    comm = "SELECT DISTINCT j.ime, r.dan FROM Jadralec j LEFT JOIN Rezervacija r"
Exemple #13
0
                "test": {
                    "cols": [["test_id", "INTEGER"], ["title", "TEXT"],
                             ["json_id", "INTEGER REFERENCES json (json_id)"]],
                    "indexes":
                    ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"],
                    "schema_changed":
                    1426195822
                }
            }
        }

        if os.path.isfile("data/benchmark.db"): os.unlink("data/benchmark.db")

        with benchmark("Open x 10", 0.13):
            for i in range(10):
                db = Db(schema, "data/benchmark.db")
                db.checkTables()
                db.close()
                yield "."

        db = Db(schema, "data/benchmark.db")
        db.checkTables()
        import json

        with benchmark("Insert x 10 x 1000", 1.0):
            for u in range(10):  # 10 user
                data = {"test": []}
                for i in range(1000):  # 1000 line of data
                    data["test"].append({
                        "test_id":
                        i,
Exemple #14
0
cfg = {}
with open("config.properties") as f:
    for line in f:
       (key, val) = line.split('=')
       cfg[key] = val.replace("\n","")

print ("List of Config Values \n")

for keys,values in cfg.items():
    print(keys, values)

print ("Processing the input file .....")

u1.replace_word(cfg['ip_file'], cfg['op_file'], "^&^", ",")

db = Db(username=cfg['db_user'], password=cfg['db_passwd'], database=cfg['db_name'], driver=cfg['db_engine'])

ret = []
ret = db.select(cfg['tbl_name'], columns='*')
print("Currently Number of records in table :", len(ret))

if (len(ret) > 0):
    print ("Truncating the table : ", cfg['tbl_name'])
    db.truncate(cfg['tbl_name'])

sql = 'INSERT INTO %s(%s) VALUES (%s)' % (self.enclose_sys(table), ','.join(cols), ','.join(['%s'] * len(vals)))

query = 'LOAD DATA INFILE (%s) INTO (%s) FIELDS TERMINATED BY ',' ENCLOSED BY '"' LINES TERMINATED BY '\r\n' IGNORE 1 LINES;' %(cfg['op_file'], ','.join(cfg['tbl_name'])),' FIELDS TERMINATED BY ',' ENCLOSED BY '"' LINES TERMINATED BY '\r\n' IGNORE 1 LINES;'

with open(cfg['op_file'], 'r') as f:
    reader = csv.reader(f)
Exemple #15
0
 def get_db(self):
     return Db(self.logger)
Exemple #16
0
from Db import Db
from Program import Program
from postActionConfig import POST_ACTIONS
import constants
import getpass

program = Program(Db(), POST_ACTIONS)


def main():
    program.start()
    while (True):
        startAction = input(constants.START_ACTION_PROMPT).lower()
        if (startAction == "login" or startAction == "1"):
            # user name, password
            # login action
            program.login()
        elif (startAction == "register" or startAction == "2"):
            # Register action
            program.register()
        elif (startAction == "exit" or startAction == "3"):
            # exit action
            break
        else:
            # invalid action
            print("Invalid input, please try again.")

        while (program.currentUser != None):
            action = input(constants.ACTION_OPTIONS).lower()

            if (action == "post" or action == "1"):
Exemple #17
0
 def openDb(self, close_idle=False):
     schema = self.getDbSchema()
     db_path = self.getPath(schema["db_file"])
     return Db(schema, db_path, close_idle=close_idle)
Exemple #18
0
    def testCheckTables(self):
        db_path = "%s/zeronet.db" % config.data_dir
        schema = {
            "db_name": "TestDb",
            "db_file": "%s/zeronet.db" % config.data_dir,
            "map": {
                "data.json": {
                    "to_table": {
                        "test": "test"
                    }
                }
            },
            "tables": {
                "test": {
                    "cols": [
                        ["test_id", "INTEGER"],
                        ["title", "TEXT"],
                    ],
                    "indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"],
                    "schema_changed": 1426195822
                }
            }
        }

        if os.path.isfile(db_path):
            os.unlink(db_path)
        db = Db(schema, db_path)
        db.checkTables()
        db.close()

        # Verify tables
        assert os.path.isfile(db_path)
        db = Db(schema, db_path)

        tables = [row["name"] for row in db.execute("SELECT name FROM sqlite_master WHERE type='table'")]
        assert "keyvalue" in tables  # To store simple key -> value
        assert "json" in tables  # Json file path registry
        assert "test" in tables  # The table defined in dbschema.json

        # Verify test table
        cols = [col["name"] for col in db.execute("PRAGMA table_info(test)")]
        assert "test_id" in cols
        assert "title" in cols

        # Add new table
        assert "newtest" not in tables
        db.schema["tables"]["newtest"] = {
            "cols": [
                ["newtest_id", "INTEGER"],
                ["newtitle", "TEXT"],
            ],
            "indexes": ["CREATE UNIQUE INDEX newtest_id ON newtest(newtest_id)"],
            "schema_changed": 1426195822
        }
        db.checkTables()
        tables = [row["name"] for row in db.execute("SELECT name FROM sqlite_master WHERE type='table'")]
        assert "test" in tables
        assert "newtest" in tables

        db.close()

        # Cleanup
        os.unlink(db_path)
Exemple #19
0
 def save(self):
     filtered = str(self.filter_values().values()).replace(
         "None", "'None'")[13:-2]
     Db().save('quote', filtered)
     return self
Exemple #20
0
import sqlite3
from Db import Db

if __name__ == '__main__':
    db = Db('yworld.db')
    db.execue_script('yworld.sql')
    db.execue_script('wdrop.sql')

    tribes = ["Romans", "Teutons", "Gauls", "Natures", "Natars"]

    tables = dict()
    tables['Tribe'] = "CREATE TABLE Tribe(tid integer default 0 NOT NULL)"

    tables['Alliance'] = "CREATE TABLE Alliance(aid integer default 0 NOT NULL, "\
                        "alliance varchar(100) default '')"

    tables['Player'] = "CREATE TABLE Player(pid integer default 0 NOT NULL, "\
                        "player varchar(100) default '', "\
                        "tid integer default 0 NOT NULL, "\
                        "aid integer default 0 NOT NULL, " \
                        "PRIMARY KEY(pid), "\
                        "FOREIGN KEY(tid) REFERENCES Tribe(tid), "\
                        "FOREIGN KEY(aid) REFERENCES Alliance(aid))"

    tables['Village'] = "CREATE TABLE Village(id integer default 0 NOT NULL, "\
                        "x integer default 0 NOT NULL, "\
                        "y integer default 0 NOT NULL, "\
                        "vid integer default 0 NOT NULL, "\
                        "village varchar(100) default '' NOT NULL, "\
                        "population integer default 0 NOT NULL, "\
                        "pid integer default 0 NOT NULL, "\
Exemple #21
0
class UiRequestPlugin(object):
    def formatTableRow(self, row, class_name=""):
        back = []
        for format, val in row:
            if val is None:
                formatted = "n/a"
            elif format == "since":
                if val:
                    formatted = "%.0f" % (time.time() - val)
                else:
                    formatted = "n/a"
            else:
                formatted = format % val
            back.append("<td>%s</td>" % formatted)
        return "<tr class='%s'>%s</tr>" % (class_name, "".join(back))

    def getObjSize(self, obj, hpy=None):
        if hpy:
            return float(hpy.iso(obj).domisize) / 1024
        else:
            return 0

    # /Stats entry point
    def actionStats(self):
        import gc
        import sys
        from Ui import UiRequest
        from Db import Db
        from Crypt import CryptConnection

        hpy = None
        if self.get.get("size") == "1":  # Calc obj size
            try:
                import guppy
                hpy = guppy.hpy()
            except:
                pass
        self.sendHeader()

        if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
            yield "This function is disabled on this proxy"
            raise StopIteration

        s = time.time()
        main = sys.modules["main"]

        # Style
        yield """
        <style>
         * { font-family: monospace }
         table td, table th { text-align: right; padding: 0px 10px }
         .connections td { white-space: nowrap }
         .serving-False { opacity: 0.3 }
        </style>
        """

        # Memory
        try:
            yield "rev%s | " % config.rev
            yield "%s | " % config.ip_external
            yield "Opened: %s | " % main.file_server.port_opened
            yield "Crypt: %s | " % CryptConnection.manager.crypt_supported
            yield "In: %.2fMB, Out: %.2fMB  | " % (
                float(main.file_server.bytes_recv) / 1024 / 1024,
                float(main.file_server.bytes_sent) / 1024 / 1024)
            yield "Peerid: %s  | " % main.file_server.peer_id
            import psutil
            process = psutil.Process(os.getpid())
            mem = process.get_memory_info()[0] / float(2**20)
            yield "Mem: %.2fMB | " % mem
            yield "Threads: %s | " % len(process.threads())
            yield "CPU: usr %.2fs sys %.2fs | " % process.cpu_times()
            yield "Files: %s | " % len(process.open_files())
            yield "Sockets: %s | " % len(process.connections())
            yield "Calc size <a href='?size=1'>on</a> <a href='?size=0'>off</a>"
        except Exception:
            pass
        yield "<br>"

        # Connections
        yield "<b>Connections</b> (%s, total made: %s):<br>" % (len(
            main.file_server.connections), main.file_server.last_connection_id)
        yield "<table class='connections'><tr> <th>id</th> <th>type</th> <th>ip</th> <th>open</th> <th>crypt</th> <th>ping</th>"
        yield "<th>buff</th> <th>bad</th> <th>idle</th> <th>open</th> <th>delay</th> <th>cpu</th> <th>out</th> <th>in</th> <th>last sent</th>"
        yield "<th>wait</th> <th>version</th> <th>sites</th> </tr>"
        for connection in main.file_server.connections:
            if "cipher" in dir(connection.sock):
                cipher = connection.sock.cipher()[0]
            else:
                cipher = connection.crypt
            yield self.formatTableRow([
                ("%3d", connection.id), ("%s", connection.type),
                ("%s:%s", (connection.ip, connection.port)),
                ("%s", connection.handshake.get("port_opened")),
                ("<span title='%s'>%s</span>", (connection.crypt, cipher)),
                ("%6.3f", connection.last_ping_delay),
                ("%s", connection.incomplete_buff_recv),
                ("%s", connection.bad_actions),
                ("since",
                 max(connection.last_send_time, connection.last_recv_time)),
                ("since", connection.start_time),
                ("%.3f",
                 connection.last_sent_time - connection.last_send_time),
                ("%.3f", connection.cpu_time),
                ("%.0fkB", connection.bytes_sent / 1024),
                ("%.0fkB", connection.bytes_recv / 1024),
                ("%s", connection.last_cmd),
                ("%s", connection.waiting_requests.keys()),
                ("%s r%s", (connection.handshake.get("version"),
                            connection.handshake.get("rev", "?"))),
                ("%s", connection.sites)
            ])
        yield "</table>"

        # Tor hidden services
        yield "<br><br><b>Tor hidden services (status: %s):</b><br>" % main.file_server.tor_manager.status
        for site_address, onion in main.file_server.tor_manager.site_onions.items(
        ):
            yield "- %-34s: %s<br>" % (site_address, onion)

        # Db
        yield "<br><br><b>Db</b>:<br>"
        for db in sys.modules["Db.Db"].opened_dbs:
            yield "- %.3fs: %s<br>" % (time.time() - db.last_query_time,
                                       db.db_path.encode("utf8"))

        # Sites
        yield "<br><br><b>Sites</b>:"
        yield "<table>"
        yield "<tr><th>address</th> <th>connected</th> <th title='connected/good/total'>peers</th> <th>content.json</th> <th>out</th> <th>in</th>  </tr>"
        for site in sorted(self.server.sites.values(),
                           lambda a, b: cmp(a.address, b.address)):
            yield self.formatTableRow([
                ("""<a href='#' onclick='document.getElementById("peers_%s").style.display="initial"; return false'>%s</a>""",
                 (site.address, site.address)),
                ("%s", [
                    peer.connection.id for peer in site.peers.values()
                    if peer.connection and peer.connection.connected
                ]),
                ("%s/%s/%s", (len([
                    peer for peer in site.peers.values()
                    if peer.connection and peer.connection.connected
                ]), len(site.getConnectablePeers(100)), len(site.peers))),
                ("%s (loaded: %s)",
                 (len(site.content_manager.contents),
                  len([
                      key for key, val in dict(
                          site.content_manager.contents).iteritems() if val
                  ]))),
                ("%.0fkB", site.settings.get("bytes_sent", 0) / 1024),
                ("%.0fkB", site.settings.get("bytes_recv", 0) / 1024),
            ], "serving-%s" % site.settings["serving"])
            yield "<tr><td id='peers_%s' style='display: none; white-space: pre' colspan=6>" % site.address
            for key, peer in site.peers.items():
                if peer.time_found:
                    time_found = int(time.time() - peer.time_found) / 60
                else:
                    time_found = "--"
                if peer.connection:
                    connection_id = peer.connection.id
                else:
                    connection_id = None
                if site.content_manager.hashfield:
                    yield "Optional files: %4s " % len(peer.hashfield)
                time_added = (time.time() - peer.time_added) / (60 * 60 * 24)
                yield "(#%4s, err: %s, found: %3s min, add: %.1f day) %30s -<br>" % (
                    connection_id, peer.connection_error, time_found,
                    time_added, key)
            yield "<br></td></tr>"
        yield "</table>"

        # No more if not in debug mode
        if not config.debug:
            raise StopIteration

        # Object types

        obj_count = {}
        for obj in gc.get_objects():
            obj_type = str(type(obj))
            if obj_type not in obj_count:
                obj_count[obj_type] = [0, 0]
            obj_count[obj_type][0] += 1  # Count
            obj_count[obj_type][1] += float(sys.getsizeof(obj)) / 1024  # Size

        yield "<br><br><b>Objects in memory (types: %s, total: %s, %.2fkb):</b><br>" % (
            len(obj_count), sum([stat[0] for stat in obj_count.values()]),
            sum([stat[1] for stat in obj_count.values()]))

        for obj, stat in sorted(obj_count.items(),
                                key=lambda x: x[1][0],
                                reverse=True):  # Sorted by count
            yield " - %.1fkb = %s x <a href=\"/Listobj?type=%s\">%s</a><br>" % (
                stat[1], stat[0], obj, cgi.escape(obj))

        # Classes

        class_count = {}
        for obj in gc.get_objects():
            obj_type = str(type(obj))
            if obj_type != "<type 'instance'>":
                continue
            class_name = obj.__class__.__name__
            if class_name not in class_count:
                class_count[class_name] = [0, 0]
            class_count[class_name][0] += 1  # Count
            class_count[class_name][1] += float(
                sys.getsizeof(obj)) / 1024  # Size

        yield "<br><br><b>Classes in memory (types: %s, total: %s, %.2fkb):</b><br>" % (
            len(class_count), sum([stat[0] for stat in class_count.values()]),
            sum([stat[1] for stat in class_count.values()]))

        for obj, stat in sorted(class_count.items(),
                                key=lambda x: x[1][0],
                                reverse=True):  # Sorted by count
            yield " - %.1fkb = %s x <a href=\"/Dumpobj?class=%s\">%s</a><br>" % (
                stat[1], stat[0], obj, cgi.escape(obj))

        from greenlet import greenlet
        objs = [obj for obj in gc.get_objects() if isinstance(obj, greenlet)]
        yield "<br>Greenlets (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj).encode("utf8")))

        from Worker import Worker
        objs = [obj for obj in gc.get_objects() if isinstance(obj, Worker)]
        yield "<br>Workers (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj)))

        from Connection import Connection
        objs = [obj for obj in gc.get_objects() if isinstance(obj, Connection)]
        yield "<br>Connections (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj)))

        from socket import socket
        objs = [obj for obj in gc.get_objects() if isinstance(obj, socket)]
        yield "<br>Sockets (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj)))

        from msgpack import Unpacker
        objs = [obj for obj in gc.get_objects() if isinstance(obj, Unpacker)]
        yield "<br>Msgpack unpacker (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj)))

        from Site import Site
        objs = [obj for obj in gc.get_objects() if isinstance(obj, Site)]
        yield "<br>Sites (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj)))

        objs = [
            obj for obj in gc.get_objects()
            if isinstance(obj, self.server.log.__class__)
        ]
        yield "<br>Loggers (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj.name)))

        objs = [obj for obj in gc.get_objects() if isinstance(obj, UiRequest)]
        yield "<br>UiRequests (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj)))

        from Peer import Peer
        objs = [obj for obj in gc.get_objects() if isinstance(obj, Peer)]
        yield "<br>Peers (%s):<br>" % len(objs)
        for obj in objs:
            yield " - %.1fkb: %s<br>" % (self.getObjSize(
                obj, hpy), cgi.escape(repr(obj)))

        objs = [(key, val) for key, val in sys.modules.iteritems()
                if val is not None]
        objs.sort()
        yield "<br>Modules (%s):<br>" % len(objs)
        for module_name, module in objs:
            yield " - %.3fkb: %s %s<br>" % (self.getObjSize(
                module, hpy), module_name, cgi.escape(repr(module)))

        gc.collect()  # Implicit grabage collection
        yield "Done in %.1f" % (time.time() - s)

    def actionDumpobj(self):

        import gc
        import sys

        self.sendHeader()

        if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
            yield "This function is disabled on this proxy"
            raise StopIteration

        # No more if not in debug mode
        if not config.debug:
            yield "Not in debug mode"
            raise StopIteration

        class_filter = self.get.get("class")

        yield """
        <style>
         * { font-family: monospace; white-space: pre }
         table * { text-align: right; padding: 0px 10px }
        </style>
        """

        objs = gc.get_objects()
        for obj in objs:
            obj_type = str(type(obj))
            if obj_type != "<type 'instance'>" or obj.__class__.__name__ != class_filter:
                continue
            yield "%.1fkb %s... " % (float(sys.getsizeof(obj)) / 1024,
                                     cgi.escape(str(obj)))
            for attr in dir(obj):
                yield "- %s: %s<br>" % (attr,
                                        cgi.escape(str(getattr(obj, attr))))
            yield "<br>"

        gc.collect()  # Implicit grabage collection

    def actionListobj(self):

        import gc
        import sys

        self.sendHeader()

        if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
            yield "This function is disabled on this proxy"
            raise StopIteration

        # No more if not in debug mode
        if not config.debug:
            yield "Not in debug mode"
            raise StopIteration

        type_filter = self.get.get("type")

        yield """
        <style>
         * { font-family: monospace; white-space: pre }
         table * { text-align: right; padding: 0px 10px }
        </style>
        """

        yield "Listing all %s objects in memory...<br>" % cgi.escape(
            type_filter)

        ref_count = {}
        objs = gc.get_objects()
        for obj in objs:
            obj_type = str(type(obj))
            if obj_type != type_filter:
                continue
            refs = [
                ref for ref in gc.get_referrers(obj) if
                hasattr(ref, "__class__") and ref.__class__.__name__ not in [
                    "list", "dict", "function", "type", "frame", "WeakSet",
                    "tuple"
                ]
            ]
            if not refs:
                continue
            try:
                yield "%.1fkb <span title=\"%s\">%s</span>... " % (
                    float(sys.getsizeof(obj)) / 1024, cgi.escape(
                        str(obj)), cgi.escape(str(obj)[0:100].ljust(100)))
            except:
                continue
            for ref in refs:
                yield " ["
                if "object at" in str(ref) or len(str(ref)) > 100:
                    yield str(ref.__class__.__name__)
                else:
                    yield str(ref.__class__.__name__) + ":" + cgi.escape(
                        str(ref))
                yield "] "
                ref_type = ref.__class__.__name__
                if ref_type not in ref_count:
                    ref_count[ref_type] = [0, 0]
                ref_count[ref_type][0] += 1  # Count
                ref_count[ref_type][1] += float(
                    sys.getsizeof(obj)) / 1024  # Size
            yield "<br>"

        yield "<br>Object referrer (total: %s, %.2fkb):<br>" % (
            len(ref_count), sum([stat[1] for stat in ref_count.values()]))

        for obj, stat in sorted(ref_count.items(),
                                key=lambda x: x[1][0],
                                reverse=True)[0:30]:  # Sorted by count
            yield " - %.1fkb = %s x %s<br>" % (stat[1], stat[0],
                                               cgi.escape(str(obj)))

        gc.collect()  # Implicit grabage collection

    def actionBenchmark(self):
        import sys
        import gc
        from contextlib import contextmanager

        output = self.sendHeader()

        if "Multiuser" in PluginManager.plugin_manager.plugin_names and not config.multiuser_local:
            yield "This function is disabled on this proxy"
            raise StopIteration

        @contextmanager
        def benchmark(name, standard):
            s = time.time()
            output("- %s" % name)
            try:
                yield 1
            except Exception, err:
                output("<br><b>! Error: %s</b><br>" % err)
            taken = time.time() - s
            if taken > 0:
                multipler = standard / taken
            else:
                multipler = 99
            if multipler < 0.3:
                speed = "Sloooow"
            elif multipler < 0.5:
                speed = "Ehh"
            elif multipler < 0.8:
                speed = "Goodish"
            elif multipler < 1.2:
                speed = "OK"
            elif multipler < 1.7:
                speed = "Fine"
            elif multipler < 2.5:
                speed = "Fast"
            elif multipler < 3.5:
                speed = "WOW"
            else:
                speed = "Insane!!"
            output("%.3fs [x%.2f: %s]<br>" % (taken, multipler, speed))
            time.sleep(0.01)

        yield """
        <style>
         * { font-family: monospace }
         table * { text-align: right; padding: 0px 10px }
        </style>
        """

        yield "Benchmarking ZeroNet %s (rev%s) Python %s on: %s...<br>" % (
            config.version, config.rev, sys.version, sys.platform)

        t = time.time()

        # CryptBitcoin
        yield "<br>CryptBitcoin:<br>"
        from Crypt import CryptBitcoin

        # seed = CryptBitcoin.newSeed()
        # yield "- Seed: %s<br>" % seed
        seed = "e180efa477c63b0f2757eac7b1cce781877177fe0966be62754ffd4c8592ce38"

        with benchmark("hdPrivatekey x 10", 0.7):
            for i in range(10):
                privatekey = CryptBitcoin.hdPrivatekey(seed, i * 10)
                yield "."
            valid = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk"
            assert privatekey == valid, "%s != %s" % (privatekey, valid)

        data = "Hello" * 1024  # 5k
        with benchmark("sign x 10", 0.35):
            for i in range(10):
                yield "."
                sign = CryptBitcoin.sign(data, privatekey)
            valid = "G1GXaDauZ8vX/N9Jn+MRiGm9h+I94zUhDnNYFaqMGuOiBHB+kp4cRPZOL7l1yqK5BHa6J+W97bMjvTXtxzljp6w="
            assert sign == valid, "%s != %s" % (sign, valid)

        address = CryptBitcoin.privatekeyToAddress(privatekey)
        if CryptBitcoin.opensslVerify:  # Openssl avalible
            with benchmark("openssl verify x 100", 0.37):
                for i in range(100):
                    if i % 10 == 0:
                        yield "."
                    ok = CryptBitcoin.verify(data, address, sign)
                assert ok, "does not verify from %s" % address
        else:
            yield " - openssl verify x 100...not avalible :(<br>"

        openssl_verify_bk = CryptBitcoin.opensslVerify  # Emulate openssl not found in any way
        CryptBitcoin.opensslVerify = None
        with benchmark("pure-python verify x 10", 1.6):
            for i in range(10):
                yield "."
                ok = CryptBitcoin.verify(data, address, sign)
            assert ok, "does not verify from %s" % address
        CryptBitcoin.opensslVerify = openssl_verify_bk

        # CryptHash
        yield "<br>CryptHash:<br>"
        from Crypt import CryptHash
        from cStringIO import StringIO

        data = StringIO("Hello" * 1024 * 1024)  # 5m
        with benchmark("sha256 5M x 10", 0.6):
            for i in range(10):
                data.seek(0)
                hash = CryptHash.sha256sum(data)
                yield "."
            valid = "8cd629d9d6aff6590da8b80782a5046d2673d5917b99d5603c3dcb4005c45ffa"
            assert hash == valid, "%s != %s" % (hash, valid)

        data = StringIO("Hello" * 1024 * 1024)  # 5m
        with benchmark("sha512 5M x 10", 0.6):
            for i in range(10):
                data.seek(0)
                hash = CryptHash.sha512sum(data)
                yield "."
            valid = "9ca7e855d430964d5b55b114e95c6bbb114a6d478f6485df93044d87b108904d"
            assert hash == valid, "%s != %s" % (hash, valid)

        with benchmark("os.urandom(256) x 1000", 0.0065):
            for i in range(10):
                for y in range(100):
                    data = os.urandom(256)
                yield "."

        # Msgpack
        import msgpack
        yield "<br>Msgpack: (version: %s)<br>" % ".".join(
            map(str, msgpack.version))
        binary = 'fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv'
        data = {
            "int": 1024 * 1024 * 1024,
            "float": 12345.67890,
            "text": "hello" * 1024,
            "binary": binary
        }
        with benchmark("pack 5K x 10 000", 0.78):
            for i in range(10):
                for y in range(1000):
                    data_packed = msgpack.packb(data)
                yield "."
            valid = """\x84\xa3int\xce@\x00\x00\x00\xa4text\xda\x14\x00hellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohellohello\xa5float\xcb@\xc8\x1c\xd6\xe61\xf8\xa1\xa6binary\xda\x01\x00fqv\xf0\x1a"e\x10,\xbe\x9cT\x9e(\xa5]u\x072C\x8c\x15\xa2\xa8\x93Sw)\x19\x02\xdd\t\xfb\xf67\x88\xd9\xee\x86\xa1\xe4\xb6,\xc6\x14\xbb\xd7$z\x1d\xb2\xda\x85\xf5\xa0\x97^\x01*\xaf\xd3\xb0!\xb7\x9d\xea\x89\xbbh8\xa1"\xa7]e(@\xa2\xa5g\xb7[\xae\x8eE\xc2\x9fL\xb6s\x19\x19\r\xc8\x04S\xd0N\xe4]?/\x01\xea\xf6\xec\xd1\xb3\xc2\x91\x86\xd7\xf4K\xdf\xc2lV\xf4\xe8\x80\xfc\x8ep\xbb\x82\xb3\x86\x98F\x1c\xecS\xc8\x15\xcf\xdc\xf1\xed\xfc\xd8\x18r\xf9\x80\x0f\xfa\x8cO\x97(\x0b]\xf1\xdd\r\xe7\xbf\xed\x06\xbd\x1b?\xc5\xa0\xd7a\x82\xf3\xa8\xe6@\xf3\ri\xa1\xb10\xf6\xd4W\xbc\x86\x1a\xbb\xfd\x94!bS\xdb\xaeM\x92\x00#\x0b\xf7\xad\xe9\xc2\x8e\x86\xbfi![%\xd31]\xc6\xfc2\xc9\xda\xc6v\x82P\xcc\xa9\xea\xb9\xff\xf6\xc8\x17iD\xcf\xf3\xeeI\x04\xe9\xa1\x19\xbb\x01\x92\xf5nn4K\xf8\xbb\xc6\x17e>\xa7 \xbbv"""
            assert data_packed == valid, "%s<br>!=<br>%s" % (repr(data_packed),
                                                             repr(valid))

        with benchmark("unpack 5K x 10 000", 1.2):
            for i in range(10):
                for y in range(1000):
                    data_unpacked = msgpack.unpackb(data_packed)
                yield "."
            assert data == data_unpacked, "%s != %s" % (data_unpack, data)

        with benchmark("streaming unpack 5K x 10 000", 1.4):
            for i in range(10):
                unpacker = msgpack.Unpacker()
                for y in range(1000):
                    unpacker.feed(data_packed)
                    for data_unpacked in unpacker:
                        pass
                yield "."
            assert data == data_unpacked, "%s != %s" % (data_unpack, data)

        # Db
        from Db import Db
        import sqlite3
        yield "<br>Db: (version: %s, API: %s)<br>" % (sqlite3.sqlite_version,
                                                      sqlite3.version)

        schema = {
            "db_name": "TestDb",
            "db_file": "%s/benchmark.db" % config.data_dir,
            "maps": {
                ".*": {
                    "to_table": {
                        "test": "test"
                    }
                }
            },
            "tables": {
                "test": {
                    "cols": [["test_id", "INTEGER"], ["title", "TEXT"],
                             ["json_id", "INTEGER REFERENCES json (json_id)"]],
                    "indexes":
                    ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"],
                    "schema_changed":
                    1426195822
                }
            }
        }

        if os.path.isfile("%s/benchmark.db" % config.data_dir):
            os.unlink("%s/benchmark.db" % config.data_dir)

        with benchmark("Open x 10", 0.13):
            for i in range(10):
                db = Db(schema, "%s/benchmark.db" % config.data_dir)
                db.checkTables()
                db.close()
                yield "."

        db = Db(schema, "%s/benchmark.db" % config.data_dir)
        db.checkTables()
        import json

        with benchmark("Insert x 10 x 1000", 1.0):
            for u in range(10):  # 10 user
                data = {"test": []}
                for i in range(1000):  # 1000 line of data
                    data["test"].append({
                        "test_id":
                        i,
                        "title":
                        "Testdata for %s message %s" % (u, i)
                    })
                json.dump(data,
                          open("%s/test_%s.json" % (config.data_dir, u), "w"))
                db.updateJson("%s/test_%s.json" % (config.data_dir, u))
                os.unlink("%s/test_%s.json" % (config.data_dir, u))
                yield "."

        with benchmark("Buffered insert x 100 x 100", 1.3):
            cur = db.getCursor()
            cur.execute("BEGIN")
            cur.logging = False
            for u in range(100, 200):  # 100 user
                data = {"test": []}
                for i in range(100):  # 1000 line of data
                    data["test"].append({
                        "test_id":
                        i,
                        "title":
                        "Testdata for %s message %s" % (u, i)
                    })
                json.dump(data,
                          open("%s/test_%s.json" % (config.data_dir, u), "w"))
                db.updateJson("%s/test_%s.json" % (config.data_dir, u),
                              cur=cur)
                os.unlink("%s/test_%s.json" % (config.data_dir, u))
                if u % 10 == 0:
                    yield "."
            cur.execute("COMMIT")

        yield " - Total rows in db: %s<br>" % db.execute(
            "SELECT COUNT(*) AS num FROM test").fetchone()[0]

        with benchmark("Indexed query x 1000", 0.25):
            found = 0
            cur = db.getCursor()
            cur.logging = False
            for i in range(1000):  # 1000x by test_id
                res = cur.execute("SELECT * FROM test WHERE test_id = %s" % i)
                for row in res:
                    found += 1
                if i % 100 == 0:
                    yield "."

            assert found == 20000, "Found: %s != 20000" % found

        with benchmark("Not indexed query x 100", 0.6):
            found = 0
            cur = db.getCursor()
            cur.logging = False
            for i in range(100):  # 1000x by test_id
                res = cur.execute("SELECT * FROM test WHERE json_id = %s" % i)
                for row in res:
                    found += 1
                if i % 10 == 0:
                    yield "."

            assert found == 18900, "Found: %s != 18900" % found

        with benchmark("Like query x 100", 1.8):
            found = 0
            cur = db.getCursor()
            cur.logging = False
            for i in range(100):  # 1000x by test_id
                res = cur.execute(
                    "SELECT * FROM test WHERE title LIKE '%%message %s%%'" % i)
                for row in res:
                    found += 1
                if i % 10 == 0:
                    yield "."

            assert found == 38900, "Found: %s != 11000" % found

        db.close()
        if os.path.isfile("%s/benchmark.db" % config.data_dir):
            os.unlink("%s/benchmark.db" % config.data_dir)

        gc.collect()  # Implicit grabage collection

        yield "<br>Done. Total: %.2fs" % (time.time() - t)
Exemple #22
0
def gorev_ekle():
    db =Db()
    sql = """ insert into "gorev" ("GorevAdi", "TamamlandiMi")  values (%s, %s) """
    gorev_adi = request.form.get('gorev_aciklama')
    db.execute(sql,(gorev_adi,True))
    return redirect("/")
Exemple #23
0
        open(filePath, 'w').write(model.to_json())

        # hist_list.append(history.history);
        # print history.history
        np.savetxt('data/result/lstm_result/lstm/lstm_advanced3_%d_1_5.txt' %
                   testid,
                   np.array(history.history['val_acc']),
                   delimiter=',')

    # print hist_list
    # hist_list = np.hist_list(hist_list);
    # np.savetxt('data/result/lstm_oneimage_fivelength.txt', hist_list, delimiter=',')


if __name__ == "__main__":
    setting = edict()
    setting.gpu = 1
    setting.db = cfg.db.gtea

    db = Db(setting.db, cfg.dstDir)
    db.genDb()

    # print db.xid2ocid.max()
    KTF.set_session(get_session())

    modelTrain(db)

    # print X_train.shape
    # print X_test.shape
    # mergeLayerTest(db, X_train, [], X_test, [], Y_train, Y_test)
Exemple #24
0
from Db import Db
from Excel import Excel

d = Db()
print d.getFirm_by_id('516cf679a09eee0ce4979f10')

Exemple #25
0
import os
import csv
from Properties import Properties
from Db import Db

function_map = {
	"TEXT": str,
	"INTEGER": int,
	"BOOLEAN": int,
	"DATE": str
}

properties = Properties.getProperties()
db = Db(properties)

spec_files = os.listdir("specs")
data_files = os.listdir("data")

# map files into a dictionary
file_map = {}
for data_file in data_files:
	file_info = data_file.split("_")
	if not file_info[0] in file_map:
		file_map[file_info[0]] = []
	file_map[file_info[0]].append(data_file)

for spec_filename in spec_files:
	with open("specs/" + spec_filename, "rt") as spec_file:
		filename_info = spec_filename.split(".")
		file = csv.reader(spec_file, delimiter=",")
		next(file) # Skip first row
Exemple #26
0
def giveAnswer(bot, update):
    print('[giveAnswer]:')
    query = update.callback_query
    print('[giveAnswer]' + query.message.text)
    # query = update.callback_query
    message_id_Of_user_text = query.data.split(';')[0]
    if query.data.find(';') != -1:
        fma = For_more_answers().Decompress(query.data)
        t = Db().ExecuteSingle(
            DB_NAME, "SELECT Text FROM " + T_TELEGRAM_MESSAGES +
            " WHERE message_id=" + fma.message_id_from_usersText)

        results = search(t[0], T_Question_Answer,
                         QUESTION)  # TODO: поменять бд
        results = sorted(results, key=lambda k: k['matchedCount'])
        fma_answer_ids = [f.split(',')[0] for f in fma.messages]
        fma_messages_ids = [
            f.split(',')[1] for f in fma.messages if f.split(',')[1] != ''
        ]  # used not now
        sort = []
        for x in results:
            alredy_exists = False
            for fmm_id in fma_answer_ids:
                if int(fmm_id) == x['question'][0]:
                    alredy_exists = True
                    break
            if not alredy_exists:
                sort.append(x)

        sort = sort[:-4:-1]

        for i, item in enumerate(sort):
            keyboard = [[
                InlineKeyboardButton("Показать ответ:",
                                     callback_data=item['question'][0])
            ]]
            reply = InlineKeyboardMarkup(keyboard)
            t = item['question'][1]
            try:
                bot.edit_message_text(
                    text=str(t),
                    chat_id=query.message.chat_id,
                    message_id=int(fma_messages_ids[i]),  #TODO think
                    reply_markup=reply)
            except BaseException:
                print('BaseException')
            fma.messages.append(str(item['question'][0]) + ',')

        gg = fma.Compress_for_recieve()
        if gg == query.data:
            if len(sort) == 0:
                print("больше нечего выдавать")
                pass
            keyboard = [[
                InlineKeyboardButton("Больше нет :(", callback_data=gg)
            ]]
        else:
            keyboard = [[
                InlineKeyboardButton("Показать еще!", callback_data=gg)
            ]]  # TODO: ссылка на мессадж
        reply = InlineKeyboardMarkup(keyboard)
        try:
            bot.edit_message_text(text="____У нас есть еще:)_____",
                                  chat_id=query.message.chat_id,
                                  message_id=query.message.message_id,
                                  reply_markup=reply)
        except BaseException:
            print("Дошли до предела callback_data")
            bot.edit_message_text(text="____ЭТО КОНЕЦ :(_____",
                                  chat_id=query.message.chat_id,
                                  message_id=query.message.message_id)
    else:
        #print(update.message.chat.username+' [giveAnswer]'+'\r\n'+query.message.text+'\r\n')
        t = '<b>' + query.message.text + '</b> \r\n' + Db().GetByColumnName(
            'db_001.db', 'T_Question_Answer', 'id', query.data)[0][2]
        bot.edit_message_text(text=t,
                              chat_id=query.message.chat_id,
                              message_id=query.message.message_id,
                              parse_mode=ParseMode.HTML)
Exemple #27
0
    def initUi(self):
        """Ui is created here.

        DO NOT USE 'x' AS A VARIABLE HERE AGAIN, IT WILL BREAK THE CODE
        """
        self.holder = Holder.Holder(parent=self)

        self.orderTotal = OrderTotal.OrderTotal(0, self)

        dBa = Db()

        categories = dBa.getCategories()
        itemsLayout = QStackedLayout()
        tabs = {}
        x = 0  # this is he only x that can be used in init

        payStyle = """
            QLabel {
                color: black;
                font-weight: bold;
                font-size: 25pt;
                font-family: Asap;
            };
            """

        llevaStyle = """
            QLabel {
                color: Black;
                font-weight: bold;
                font-size: 15pt;
                font-family: Asap;
            };
            """

        tinyStyle = """
            QRadioButton {
                color: Black;
                font-weight: bold;
                font-family: Asap;
                font-size: 15pt;
            }
            QRadioButton::indicator::unchecked{
                border: 1px solid darkgray;
                border-radius: 10px;
                background-color: yellow;
                width: 20px;
                height: 20px;
                margin-left: 5px;
            }
            QRadioButton::indicator::checked{
                border: 1px solid darkgray;
                border-radius: 10px;
                background-color: black;
                width: 20px;
                height: 20px;
                margin-left: 5px;
            };
            """

        self.payBtn = Buttons.StrokeBtn2(100,
                                         60,
                                         15,
                                         qRgb(226, 224, 33),
                                         "PAGAR",
                                         payStyle,
                                         self,
                                         sWidth=10,
                                         hExpand=True)
        self.payBtn.clicked.connect(self.pay)

        self.llevaBtn = Buttons.StrokeBtn2(100,
                                           60,
                                           15,
                                           qRgb(33, 46, 226),
                                           "L?",
                                           llevaStyle,
                                           self,
                                           sWidth=10)
        self.llevaBtn.clicked.connect(self.toggleLleva)

        self.npBtn = Buttons.StrokeBtn2(100,
                                        60,
                                        15,
                                        qRgb(33, 46, 226),
                                        "P?",
                                        llevaStyle,
                                        self,
                                        sWidth=10)
        self.npBtn.clicked.connect(self.toggleNp)

        sexAgeLayout = QHBoxLayout()

        sexAgeLayout.addStretch()
        sexM = QRadioButton("M")
        sexH = QRadioButton("H")
        self.sexo = QButtonGroup(self)
        self.sexBtns = [sexM, sexH]
        z = 0
        for btn in self.sexBtns:
            btn.setStyleSheet(tinyStyle)
            self.sexo.addButton(btn, x)
            sexAgeLayout.addWidget(btn)
            z += 1

        sexAgeLayout.addSpacing(20)

        age1 = QRadioButton("1")
        age2 = QRadioButton("2")
        age3 = QRadioButton("3")
        age4 = QRadioButton("4")
        self.edad = QButtonGroup(self)
        self.ageBtns = [age1, age2, age3, age4]
        z = 1
        for btn in self.ageBtns:
            btn.setStyleSheet(tinyStyle)
            self.edad.addButton(btn, x)
            sexAgeLayout.addWidget(btn)
            z += 1
        sexAgeLayout.addStretch()

        for category in categories:
            products = dBa.getProducts(category[1])
            setattr(self, "menu" + category[1],
                    Menu.Menu(products, category[2], self, hold=self.holder))
            itemsLayout.addWidget(getattr(self, "menu" + category[1]))
            tabs[category[0]] = (category[1], x, itemsLayout)
            x += 1
        tabsWidget = Menu.Tabs(tabs, parent=self)
        tabsLayout = QHBoxLayout()
        tabsLayout.addWidget(tabsWidget)

        self.inputField = TextInput.TextInput(parent=self)
        self.nameField = TextInput.TextInputSmall(parent=self)
        self.nameField.setFixedHeight(55)

        nameLayout = QVBoxLayout()
        nameLayout.setSpacing(0)
        nameLayout.addWidget(self.nameField)
        nameLayout.addLayout(sexAgeLayout)

        orderTopLayout = QHBoxLayout()
        orderTopLayout.addLayout(nameLayout)
        orderTopLayout.addWidget(self.orderTotal)

        layoutC11 = QHBoxLayout()
        layoutC11.addWidget(self.npBtn)
        layoutC11.addWidget(self.llevaBtn)
        layoutC11.addWidget(self.payBtn)

        layoutC1 = QVBoxLayout()
        layoutC1.addLayout(orderTopLayout)
        layoutC1.addWidget(self.holder)
        layoutC1.addLayout(layoutC11)

        layoutH1C1 = QHBoxLayout()
        layoutH1C1.addLayout(self.imgBtns())
        layoutH1C1.addLayout(layoutC1)

        layoutC2 = QVBoxLayout()
        layoutC2.addLayout(tabsLayout)
        layoutC2.addLayout(itemsLayout)
        layoutC2.addWidget(self.inputField)

        layout = QHBoxLayout()
        layout.addLayout(layoutH1C1)
        layout.addLayout(layoutC2)

        self.setLayout(layout)
Exemple #28
0
 def __init__(self):
     self.base_url = u"http://www.39eh.com"
     self.db = Db()
Exemple #29
0
 def process_scroll(process_no, lists, page_rows):
     dbconfig = {
         'host': '127.0.0.1',
         'port': 3306,
         'user': '******',
         'password': '******',
         'db': 'tracking',
         'charset': 'utf8'
     }
     db_class = Db(dbconfig)
     try:
         for line in lists[process_no * page_rows:(process_no + 1) *
                           page_rows]:
             try:
                 if '_source' not in line:
                     continue
                 data = line['_source']['message']
                 data_list = data.split(' ')
                 if not data_list:
                     continue
                 if not isinstance(data_list, list):
                     continue
                 user_agent = ''
                 for index in range(12, len(data_list)):
                     user_agent = user_agent + (' ' if user_agent else
                                                '') + data_list[index]
                 try:
                     visit_time = str(
                         datetime_now.datetime.strptime(
                             data_list[4][1:], "%d/%b/%Y:%H:%M:%S"))
                 except Exception as e:
                     print(e)
                     visit_time = ''
                 sub_pos = user_agent.find('"', 2) if user_agent.find(
                     '"', 2) else len(user_agent)
                 referer = data_list[11] if data_list[11] else ''
                 insert_data = {
                     'ip':
                     data_list[0],
                     'website_url':
                     urllib.parse.unquote_plus(data_list[2] + data_list[7]),
                     'user_agent':
                     escape(user_agent[1:sub_pos]),
                     'visit_time':
                     visit_time,
                     'referer':
                     referer[1:-1]
                 }
                 insert_data['created_time'] = datetime_now.datetime.now(
                 ).strftime("%Y-%m-%d %H:%M:%S")
                 insert_data['md5_hash'] = hashlib.md5(
                     (insert_data['ip'] + insert_data['user_agent'][0:255]
                      ).encode('utf8')).hexdigest()  # 截取255个字符进行hash
                 table_name = 'statistics_data'
                 print(insert_data)
                 id = db_class.insert(table_name, insert_data)
                 print(id)
             except Exception as e:
                 print(e)
         return str(os.getpid()) + ' done '
     except Exception as e:
         print(e)
Exemple #30
0
 def fetch_active_alerts(self):
     try:
         while (True):
             db = Db(logger)
             alerts = db.get_alerts_to_send()
             data_offset = 0
             print "fetched alert to sent %r" % alerts
             if alerts is not None:
                 for alert in alerts:
                     print "flag alert as sent %r" % alert
                     #flag alert as sent to avoid issues
                     if (db.flag_alert_sent(alert)):
                         print "flagged alert as sent proceed fetch subs %r"\
                          % alert
                         if alert.msisdn is not None and alert.msisdn != '':
                             print "send quicksms %r" % alert.msisdn
                             message = {
                                 "short_code":
                                 alert.shortcode,
                                 "msisdn":
                                 alert.msisdn,
                                 "message":
                                 alert.message,
                                 "network":
                                 1,
                                 "alert_type":
                                 alert.alert_type_id,
                                 "alert_id":
                                 alert.id,
                                 "correlator":
                                 str(alert.id) + "_" + str(alert.msisdn),
                                 "linkId":
                                 None
                             }
                             self.publish_rabbit(message)
                         else:
                             #fetch subs 4 this alert
                             sub_count = db.count_alert_subs(
                                 alert.service_id)
                             for data_offset in range(sub_count):
                                 subs =\
                                  db.fetch_alert_subscribers(alert.service_id,
                                       data_offset)
                                 print "fetched subs for senting alert\
  %r offset %r" % (subs, data_offset)
                                 if subs is not None:
                                     for sub in subs:
                                         print "looping pushing to queue %r"\
                                          % sub
                                         sub_details =\
                                          db.get_sub_msisdn(sub.profile_id)
                                         message = {
                                             "short_code":
                                             alert.shortcode,
                                             "msisdn":
                                             sub_details.msisdn,
                                             "message":
                                             alert.message,
                                             "network":
                                             sub_details.network_id,
                                             "alert_type":
                                             alert.alert_type_id,
                                             "alert_id":
                                             alert.id,
                                             "correlator":
                                             str(alert.id) + "_" +
                                             str(sub_details.msisdn),
                                             "linkId":
                                             None
                                         }
                                         self.publish_rabbit(message)
                                     data_offset += 10000
             else:
                 time.sleep(30)
     except Exception, e:
         print("error processing alerts to publish :: %r" % e)