Beispiel #1
0
    def do_one_task(task):
        import peewee
        from peewee import MySQLDatabase

        db = MySQLDatabase(
            task["stream"]["database"], **{
                'host': connection["host"],
                'password': connection["passwd"],
                'port': connection["port"],
                'user': connection["user"]
            })

        pk = task["stream"].get("pk")
        if not pk:
            pk = {"field": "id", "type": "int"}

        class MyModel(peewee.Model):
            _pk = {
                "char": peewee.CharField(primary_key=True),
                "int": peewee.IntegerField(primary_key=True)
            }[pk["type"]]

            class Meta:
                database = db

        setattr(MyModel, pk["field"], MyModel._pk)

        # 替换 sql 中的 `?` 为上次执行 sql 语句的时间
        md5 = hashlib.md5(config.__name__ + task["stream"]["database"] +
                          task["stream"]["sql"]).hexdigest()
        start_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        last_start_time = r.get(md5)
        if not last_start_time:
            if task["stream"].get("init_time"):
                last_start_time = task["stream"].get("init_time")
            else:
                last_start_time = start_time

        query = MyModel.raw(task["stream"]["sql"].replace("?", "%s"),
                            (last_start_time, )).dicts().iterator()
        for row in query:
            for job in task["jobs"]:
                event = {"action": "insert", "values": row}

                watched = job.get("watched") or job.get("filters")
                if watched:
                    func_name, kwargs = watched.items()[0]
                    func = getattr(custom_rowfilters, func_name,
                                   None) or getattr(row_filters, func_name)
                    is_ok = func(event, **kwargs)
                    if not is_ok:
                        continue

                rows = do_pipeline(job["pipeline"], event["values"])
                to_dest.make_docs(job["dest"], rows)
                if len(to_dest.docs) >= to_dest.bulk_size:
                    to_dest.upload_docs()
        db.close()
        to_dest.upload_docs()
        r.set(md5, start_time)
Beispiel #2
0
def handle_init_stream(config):
    connection = config.CONNECTION
    to_dest = ToDest(config)
    for task in config.TASKS:
        import peewee
        from peewee import MySQLDatabase

        db = MySQLDatabase(
            task["stream"]["database"], **{
                'host': connection["host"],
                'password': connection["passwd"],
                'port': connection["port"],
                'user': connection["user"]
            })

        pk = task["stream"].get("pk")
        if not pk:
            pk = {"field": "id", "type": "int"}

        class MyModel(peewee.Model):
            _pk = {
                "char": peewee.CharField(primary_key=True),
                "int": peewee.IntegerField(primary_key=True)
            }[pk["type"]]

            class Meta:
                database = db

        setattr(MyModel, pk["field"], MyModel._pk)

        query = MyModel.raw(task["stream"]["sql"]).dicts().iterator()
        for row in query:
            for job in task["jobs"]:
                event = {"action": "insert", "values": row}
                if event["action"] not in job["actions"]:
                    continue

                watched = job.get("watched")
                if watched:
                    func_name, kwargs = watched.items()[0]
                    func = getattr(custom_rowfilters, func_name,
                                   None) or getattr(row_filters, func_name)
                    is_ok = func(event, **kwargs)
                    if not is_ok:
                        continue

                rows = do_pipeline(job["pipeline"], event["values"])
                to_dest.make_docs(job["dest"], rows)
                if len(to_dest.docs) >= to_dest.bulk_size:
                    to_dest.upload_docs()
        db.close()
        to_dest.upload_docs()
Beispiel #3
0
    def _load_tracked_guilds(self):
        """Charger les guilds à tracker et les channels à ignorer"""
        config = configparser.ConfigParser(allow_no_value=True)
        p = pathlib.Path(__file__).parent.parent
        config.read(p / "config.ini")
        print("Chargement des guilds trackées...")
        for guild_id_str in config["Tracked"]:
            guild_id = int(guild_id_str)

            new_db = MySQLDatabase(
                f"{guild_id}_db",
                host=db_host,
                user=db_user,
                password=db_password,
                charset="utf8mb4",
                autoconnect=False,
            )

            try:
                new_db.connect()
                self.tracked_guilds[guild_id] = new_db
            except OperationalError:
                print(f"Base de données indisponible pour {guild_id}")
                continue
            finally:
                new_db.close()

            # Création tables
            with new_db:
                with new_db.bind_ctx([Message]):
                    new_db.create_tables([Message])

            print(f"Guild {guild_id} trackée")

        total_tracked = len(self.tracked_guilds)
        print(total_tracked, "guild(s) trackée(s)")

        # Ignorer channels
        print("Chargement des channels ignorés...")
        for section in config.sections():
            try:
                section_int = int(section)
            except ValueError:
                continue
            if section_int in self.tracked_guilds:
                for channel_id_str in config[section]:
                    channel_id = int(channel_id_str)
                    self.ignored_channels.append(channel_id)
                    print(f"{channel_id} ignoré")
Beispiel #4
0
class BaseHandler(tornado.web.RequestHandler):
    '''
    所有Handler的父类
    定义数据库的连接与关闭
    '''
    def prepare(self):
        self.my_database = MySQLDatabase(host='127.0.0.1',
                                         user='******',
                                         passwd='123456',
                                         database='task_manage')
        self.my_database.connect()
        return super(BaseHandler, self).prepare()

    def on_finish(self):
        if not self.my_database.is_closed():
            self.my_database.close()
        return super(BaseHandler, self).on_finish()
Beispiel #5
0
class BackupDatabase(object):
    __slots__ = ('database', )

    def __init__(self, srv, info):
        db_host = srv.cfg.db_host
        db_port = srv.cfg.db_port
        db_user = srv.cfg.db_user
        db_passwd = srv.cfg.db_pass

        self.database = MySQLDatabase(
            info, **{
                'host': db_host,
                'password': db_passwd,
                'user': db_user
            })

    def close(self):
        self.database.close()
Beispiel #6
0
def do_sql(row, sql, connection, database):
    from peewee import MySQLDatabase, Model

    db = MySQLDatabase(
        database, **{
            'host': connection["host"],
            'password': connection["passwd"],
            'port': connection["port"],
            'user': connection["user"]
        })

    class MyModel(Model):
        class Meta:
            database = db

    import re
    keys = re.findall("\{(\w+)\}", sql)
    params = [row[key] for key in keys]

    sql = re.sub("\{\w+\}", "%s", sql)
    result = list(MyModel.raw(sql, *params).dicts())
    db.close()
    return result
Beispiel #7
0
def handle_init_stream(config):
    connection = config.CONNECTION
    for task in config.TASKS:
        from peewee import MySQLDatabase, Model

        db = MySQLDatabase(task["stream"]["database"],
                           **{'host': connection["host"], 'password': connection["passwd"], 'port': connection["port"],
                              'user': connection["user"]})

        class MyModel(Model):
            class Meta:
                database = db

        query = MyModel.raw(task["stream"]["sql"]).dicts().iterator()
        for row in query:
            for job in task["jobs"]:
                event = {
                    "action": "insert",
                    "values": row
                }
                if event["action"] in job["actions"]:
                    rows = do_pipeline(job["pipeline"], event["values"])
                    to_dest(job["dest"], rows)
        db.close()
Beispiel #8
0
def main():
    database = MySQLDatabase(**settings['db'])
    database.create_tables([
        Note,
    ])
    database.close()
Beispiel #9
0
    database = db

class Concept(BaseModel):
  name = CharField()
  text = TextField(default="# Explanation / Analogy / Like I'm Five" )
  last_rekall = DateTimeField(default=datetime.datetime.now)
  rekalls = IntegerField(default=0)

  def next_rekall(self):
    return self.last_rekall + datetime.timedelta(seconds=(INTERVAL_SECONDS ^ self.rekalls))

Concept.create_table(fail_silently=True)

## START
if args.daemon:
  db.close()
  print("Starting Rekall Daemon...")

  while (True):
    db.connect()
    concept = None
    for c in Concept.select():
      if concept is None:
        if c.next_rekall() < datetime.datetime.now():
          concept = c
        continue
      concept = c if concept.next_rekall() > c.next_rekall() else concept
    if concept is not None:
      os.system('notify-send -t 10000 -u critical "REKALL\n' + concept.name + '"')
    db.close()
    sleep(INTERVAL_SECONDS)
Beispiel #10
0
if __name__ == "__main__":

    parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter)

    parser.add_argument("-p", "--dbpass", required=True)
    parser.add_argument("-i", "--pdb_dir", default="/data/databases/pdb/")
    parser.add_argument("-db", "--dbname", default="pdbdb")
    parser.add_argument("-u", "--dbuser", default="root")

    args = parser.parse_args()
    from peewee import MySQLDatabase

    mysql_db = MySQLDatabase(args.dbname,
                             user=args.dbuser,
                             password=args.dbpass)
    mysql_db.close()
    sqldb.initialize(mysql_db)

    pdb_utils = PDBs(pdb_dir=args.pdb_dir)
    df = pdb_utils.entries_df()
    pdbs = list(pdb_utils)
    with tqdm(pdbs) as pbar:
        for (code, pdb_path) in pbar:
            mysql_db.connect(reuse_if_open=True)
            pbar.set_description(code)
            try:
                entry = df[df.IDCODE == code.upper()].iloc[0]
            except IndexError:
                continue

            pdb_model = PDB(code=code, experiment=str(entry.EXPERIMENT))
Beispiel #11
0
class Database(object):

    """db封装,自动查找数据库
    """

    def __init__(self, **connect_kwargs):
        self.connect_kwargs = connect_kwargs
        self.load_database()
        self.Model = self.get_model_class()

    def load_database(self):
        self.db = self.connect_kwargs.pop("db")
        self.database = MySQLDatabase(self.db, **self.connect_kwargs)
        self.database.field_overrides.update({"enum": "enum"})  # 增加枚举类型

    def get_model_class(self):
        """获取基类model
        """

        class BaseModel(_Model):

            """BaseModel的封装
            """

            class Meta(object):
                """元类
                """

                database = self.database

            @classmethod
            def one(cls, *query, **kwargs):
                """获取单条数据
                Retruns:
                    返回单条数据不存在则返回None
                """
                try:
                    return cls.get(*query, **kwargs)
                except DoesNotExist:
                    return None

            def delete_instance(self, *args, **kwargs):
                """如果deleted字段存在自动使用逻辑删除
                """
                if "deleted" in self._meta.fields:
                    setattr(self, "deleted", "1")
                    super(BaseModel, self).save()
                else:
                    super(BaseModel, self).delete_instance(*args, **kwargs)

            def __hash__(self):
                """提供hash支持
                """
                return hash(self.id)

        return BaseModel

    def connect(self):
        """主从建立连接,如果连接关闭重试
        """
        i = 0
        while i < 4:
            try:
                if self.database.is_closed():
                    self.database.get_conn().ping(True)
                break
            except OperationalError:
                self.close()
                i = i + 1

    def close(self):
        """关闭连接
        """
        try:
            self.database.close()
        except:
            pass
Beispiel #12
0
MYSQL_DATABASE = environ.get("MYSQL_DATABASE", None)
MYSQL_HOSTNAME = environ.get("MYSQL_HOSTNAME", None)
MYSQL_PORT = environ.get("MYSQL_PORT", 3306)
MYSQL_PASSWORD = environ.get("MYSQL_PASSWORD", None)

if MYSQL_DATABASE is not None:
    db = MySQLDatabase(MYSQL_DATABASE,
                       user=MYSQL_DATABASE,
                       password=MYSQL_PASSWORD,
                       host=MYSQL_HOSTNAME,
                       port=int(MYSQL_PORT))
else:
    db = connect("sqlite:///sigbro_acl.db")

db.close()


class BaseModel(Model):
    class Meta:
        database = db


class acl_scanner(BaseModel):
    """Table for store scanner metadata."""
    network = CharField(max_length=10, null=False, index=True,
                        unique=True)  # metric name
    block = IntegerField(null=True, default=1)


class acl_accounts(BaseModel):