Ejemplo n.º 1
0
def getRigistRequest():
    # 把用户名和密码注册到数据库中
    name = request.form.get('name')
    pwd = request.form.get('pwd')
    dt = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")

    # 连接数据库,此前在数据库中创建数据库TESTDB
    db = Database()
    conn = db.Connection()
    # 使用cursor()方法获取操作游标
    cursor = conn.cursor()
    # SQL 插入语句
    sql = "INSERT INTO user(name, pwd, time) VALUES ('%s', '%s', '%s')" % (
        name, pwd, dt)
    print("SQL:", sql)
    try:
        # 执行sql语句
        cursor.execute(sql)
        # 提交到数据库执行
        conn.commit()
        # 注册成功之后跳转到登录页面
        return render_template('login.html')
    except:
        # 抛出错误信息
        traceback.print_exc()
        # 如果发生错误则回滚
        conn.rollback()
        return '注册失败'
    finally:
        # 关闭数据库连接
        db.Close()
Ejemplo n.º 2
0
def neo4j(config, setup_indexes):
    """
    Configure the Neo4j instance.
    """
    db = Database.from_config(config)
    # Make sure the database is empty
    with db.session() as session:
        session.run("MATCH (n) DETACH DELETE n")
    yield db

    constraints.check_integrity(db)
    db.driver.close()
Ejemplo n.º 3
0
def getLoginRequest():
    # 查询用户名及密码是否匹配及存在
    # 连接数据库,此前在数据库中创建数据库TESTDB
    db = Database()
    conn = db.Connection()
    # 使用cursor()方法获取操作游标
    cursor = conn.cursor()
    # SQL 查询语句
    name = request.form.get('name')
    pwd = request.form.get('pwd')

    sql = "select * from user where name='%s' and pwd='%s';" % (name, pwd)
    print("SQL:", sql)
    try:
        # 执行sql语句
        cursor.execute(sql)
        results = cursor.fetchall()
        print(results)
        if len(results) == 1:
            data = {}
            data["code"] = '登录成功'
            for row in results:
                data["id"] = row[0]
                data["pwd"] = row[1]
                data["name"] = row[2]
                data["time"] = row[3]
            return jsonify(data)
        else:
            return '用户名或密码不正确'
    except:
        # 如果发生错误则回滚
        traceback.print_exc()
        conn.rollback()
    finally:
        # 关闭数据库连接
        db.Close()
Ejemplo n.º 4
0
def with_databases(settings, jumpbox: Optional[str] = "non-prod"):
    with SSHTunnel(
            remote_host=settings.postgres_host,
            remote_port=settings.postgres_port,
            local_port=7777,
            jumpbox=jumpbox,
    ) as postgres_tunnel, SSHTunnel(
            remote_host=settings.neo4j_host,
            remote_port=settings.neo4j_port,
            local_port=8888,
            jumpbox=jumpbox,
    ) as neo4j_tunnel:

        engine, factory = migrate_db.get_postgres(
            f"postgresql://{settings.postgres_user}:{settings.postgres_password}@{postgres_tunnel.host}:{postgres_tunnel.port}/{settings.postgres_db}"
        )

        yield Database(
            uri=f"bolt://{neo4j_tunnel.host}:{neo4j_tunnel.port}",
            user=settings.neo4j_user,
            password=settings.neo4j_password,
            max_connection_lifetime=300,
        ), engine, factory
Ejemplo n.º 5
0
 def update_user(olddata, newdata):
     return Database.update(UserModel.__collection, new_data=newdata, old_data=olddata)
Ejemplo n.º 6
0
 def remove_user(data:dict):
     return Database.delete(UserModel.__collection, data=data)
Ejemplo n.º 7
0
 def all_user():
     return Database.get(collection=UserModel.__collection, data={})
Ejemplo n.º 8
0
 def get_user(data):
     return (Database.get(UserModel.__collection, data))
Ejemplo n.º 9
0
def delete_orphaned_datasets_impl(
    bf_database: PennsieveDatabase,
    db: Database,
    organization_id: int,
    dry_run: bool = True,
):
    def completely_delete(partitioned_db):
        cumulative_counts = DatasetDeletionCounts.empty()
        sequential_failures = 0

        while True:
            try:
                summary = partitioned_db.delete_dataset(batch_size=1000,
                                                        duration=2000)
                if summary.done:
                    return summary.update_counts(cumulative_counts)
                else:
                    cumulative_counts = cumulative_counts.update(
                        summary.counts)
                sequential_failures = 0
                time.sleep(0.5)
            except Exception as e:
                sequential_failures += 1
                log.warn("FAIL({sequential_failures}): {str(e)}")
                log.warn("WAITING 2s")
                time.sleep(2.0)
                if sequential_failures >= 5:
                    raise e

    model_service_dataset_ids = db.get_dataset_ids(
        OrganizationId(organization_id))
    api_dataset_ids = bf_database.get_dataset_ids(organization_id)
    orphaned_dataset_ids = set(model_service_dataset_ids) - set(
        api_dataset_ids)

    if dry_run:
        log.info(f"""{"*" * 20} DRY RUN {"*" * 20}""")
        log.info(f"Found {len(orphaned_dataset_ids)} orphaned dataset(s)")
        for dataset_id in orphaned_dataset_ids:
            ds = bf_database.get_dataset(organization_id, dataset_id)
            assert ds is None or ds.state == "DELETING"
            log.info(
                f"Deleting: organization={organization_id} / dataset={dataset_id} ({db.count_child_nodes(organization_id, dataset_id)}) => {ds}"
            )
    else:
        log.info(f"Found {len(orphaned_dataset_ids)} orphaned dataset(s)")
        for dataset_id in orphaned_dataset_ids:
            ds = bf_database.get_dataset(organization_id, dataset_id)
            assert ds is None or ds.state == "DELETING"
            partitioned_db = PartitionedDatabase(
                db,
                OrganizationId(organization_id),
                DatasetId(dataset_id),
                UserNodeId("dataset-delete-migration"),
            )
            log.info(
                f"Deleting: organization={organization_id} / dataset={dataset_id} ({db.count_child_nodes(organization_id, dataset_id)}) => {ds}"
            )
            summary = completely_delete(partitioned_db)
            log.info(str(summary))

    log.info("Done")
Ejemplo n.º 10
0
class User:
    def __init__(self,
                 username: str,
                 age: int = None,
                 gender: int = None,
                 password: str = None):
        self.DB: Database = None
        self.username = username
        self.age = age
        self.gender = gender
        self.pwhash = None
        self.is_admin = False
        if password is not None:
            self.pwhash = generate_password_hash(password)
        self.id = username  # get this from the database

    def delete_self(self):
        self._ensure_db_exists()

        self.DB.execute(DELETE_USER, (self.username, ))
        print("Deleted user", self.username)
        # TODO delete user files

    def verify_password(self, password: str) -> bool:
        print("Verifying password", self.pwhash, password)
        return check_password_hash(self.pwhash, password)

    def save_to_db(self) -> tuple:
        self._ensure_db_exists()

        # admin needs no gender and age
        valid = self.is_admin or (self.gender is not None
                                  and self.age is not None)
        if not valid:
            return False, 'No gender or age specified'

        if self.is_admin:
            if self.age is None:
                self.age = 100

        ret = True, None
        try:
            self.DB.execute(
                INSERT_USER,
                (self.username, self.age, self.gender, self.pwhash))
            # print(self.DB.get_users())
            # self.DB.commit()
        except sqlite3.IntegrityError:
            ret = False, 'User already exists with same fields, try login'
        except Exception as e:
            ret = False, str(e)

        # TODO remove this
        # no user can be an admin when registering

        # if user is admin when they are registering
        if self.is_admin:
            err = self.make_user_admin()
            ret = err is None, err

        self.DB.commit()
        return ret

    def populate(self):
        """Populates the user based on the username"""
        self._ensure_db_exists()
        users = self.DB.execute(GET_USER, (self.username, ))
        print(users)
        if len(users) == 0:
            raise Exception
        self.username = users[0]["username"]
        self.age = users[0]["age"]
        self.gender = users[0]["gender"]
        self.pwhash = users[0]["password"]
        self.id = self.username
        print(self.pwhash)
        return self

    def pickle_instance(self):
        return (self.username, self.age, self.gender)

    def attach_DB(self, DB: Database) -> None:
        """Attaches the given DB to this user"""
        self.DB = DB
        return self

    def _ensure_db_exists(self) -> None:
        """This ensures DB is initialized"""
        if self.DB is None:
            self.DB = Database("data/data.db")
        print(self.DB)
        return self

    def _fecth_id(self) -> int:
        """Last inserted ROW id (row number)"""
        self._ensure_db_exists()
        return self.DB.execute(GET_LAST_ID)[0]

    def dispose(self):
        """Disposes the sqlite3 instance"""
        self._ensure_db_exists()
        self.DB.cursor.close()
        self.DB.close()

    def make_user_admin(self) -> Exception:
        """
        Makes a given user admin
        TODO add roles to db and also this call
        """
        user = self
        user.is_admin = True
        print(f"Making {user} an admin")
        try:
            user.DB.execute(INSERT_ADMIN_USER, (user.username, ))
        except Exception as e:
            return str(e)
        user.DB.commit()
        return None

    def __repr__(self):
        return f"User {self.username}"
Ejemplo n.º 11
0
    )
    parser.add_argument("--organization-id", type=int, default=1)
    parser.add_argument("--organization-node-id",
                        type=str,
                        default=str(uuid.uuid4()))
    parser.add_argument("--dataset-id", type=int, default=1)
    parser.add_argument("--dataset-node-id",
                        type=str,
                        default=str(uuid.uuid4()))
    parser.add_argument("--user-id", type=int, default=1)
    parser.add_argument("--user-node-id", type=str, default=str(uuid.uuid4()))
    parser.add_argument("--records", "-n", dest="n", type=int, default=1000)

    args = parser.parse_args()

    raw_db = Database.from_config(Config())

    with raw_db.transaction() as tx:
        raw_db.initialize_organization_and_dataset(
            tx,
            organization_id=OrganizationId(args.organization_id),
            dataset_id=DatasetId(args.dataset_id),
            organization_node_id=OrganizationNodeId(args.organization_node_id),
            dataset_node_id=DatasetNodeId(args.dataset_node_id),
        )

    db = PartitionedDatabase(
        raw_db,
        OrganizationId(args.organization_id),
        DatasetId(args.dataset_id),
        UserNodeId(args.user_node_id),
Ejemplo n.º 12
0
import calendar
import datetime
import os
from pathlib import Path

from flask import jsonify

from server.db import Database, UserFileSystem
from server.main import main

DB = Database("data/data.db")

STATS_OBJECT = {
    'type': 'line',
    'data': {
        'labels': [],
        'datasets': [{
            'data': [],
            'lineTension': 0,
            'backgroundColor': 'transparent',
            'borderColor': '#007bff',
            'borderWidth': 4,
            'pointBackgroundColor': '#007bff'
        }]
    },
    'options': {
        'scales': {
            'yAxes': [{
                'ticks': {
                    'beginAtZero': False
                }
Ejemplo n.º 13
0
import datetime

from flask import send_from_directory, render_template, request, redirect, url_for

from server.spotify import SpotifyApi
from server.db import Database

import param
import dbPrmAndReq

spotify = SpotifyApi(param.spotifyApiClientId, param.spotifyApiSecret,
                     param.spotifyApiScopeLs, param.localServerName,
                     param.localServerPort, param.localServerPage)
spotifyData = {}

db = Database(dbPrmAndReq.dbFilePath, dbPrmAndReq.dbSchema,
              dbPrmAndReq.dbIndexesAfterMassInsert)


def favicon():
    return send_from_directory("./",
                               'favicon.ico',
                               mimetype='image/vnd.microsoft.icon')


def init():
    print("sending init page")
    spotifyUserConnectionUrl = spotify.userConnectionUrl()
    return redirect(spotifyUserConnectionUrl)


def index():
Ejemplo n.º 14
0
def migrate_dataset(
    organization_id: int,
    dataset_ids: Optional[List[int]] = None,
    remove_existing: bool = False,
    environment: str = "dev",
    jumpbox: Optional[str] = "non-prod",
    smoke_test: bool = True,
    remap_ids: bool = False,
):  # TODO does this need node IDs?

    if dataset_ids is None and remove_existing:
        raise Exception(
            f"Cannot remove existing data from Neo4j while migrating the entire organization {organization_id}"
        )
    elif dataset_ids is None and remap_ids:
        raise Exception(f"Can only remap IDs for a single dataset")
    elif dataset_ids is None:
        entire_organization = True
    else:
        entire_organization = False

    settings = SSMParameters(environment)

    with SSHTunnel(
            remote_host=settings.postgres_host,
            remote_port=settings.postgres_port,
            local_port=7777,
            jumpbox=jumpbox,
    ) as postgres_tunnel, SSHTunnel(
            remote_host=settings.neo4j_host,
            remote_port=settings.neo4j_port,
            local_port=8888,
            jumpbox=jumpbox,
    ) as neo4j_tunnel:

        engine, factory = migrate_db.get_postgres(
            f"postgresql://{settings.postgres_user}:{settings.postgres_password}@{postgres_tunnel.host}:{postgres_tunnel.port}/{settings.postgres_db}"
        )

        neo4j = Database(
            uri=f"bolt://{neo4j_tunnel.host}:{neo4j_tunnel.port}",
            user=settings.neo4j_user,
            password=settings.neo4j_password,
            max_connection_lifetime=300,
        )

        bf_database = PennsieveDatabase(engine, factory, organization_id)

        # 1) Get the target datasets for the migration
        if dataset_ids is None:
            dataset_ids = bf_database.get_dataset_ids(organization_id)

        for dataset_id in dataset_ids:
            print(f"Migrating dataset {dataset_id}")

            partitioned_db = PartitionedDatabase(
                neo4j,
                organization_id=organization_id,
                dataset_id=dataset_id,
                user_id=0)

            # 3) Lock dataset in Pennsieve DB
            bf_database.lock_dataset(organization_id, dataset_id)
            print(f"Got dataset {dataset_id}")

            try:
                # 4) Export data to S3 from Neptune
                export_from_neptune(
                    settings,
                    postgres_tunnel=postgres_tunnel,
                    organization_id=organization_id,
                    dataset_id=dataset_id,
                    jumpbox=jumpbox,
                    smoke_test=smoke_test,
                )

                # 5) Import into Neo4j from S3
                import_to_neo4j.load(
                    dataset=f"{organization_id}/{dataset_id}",
                    bucket=settings.export_bucket,
                    db=partitioned_db,
                    cutover=True,
                    remove_existing=remove_existing,
                    smoke_test=smoke_test,
                    remap_ids=remap_ids,
                )

            finally:
                # Whatever happens, unlock the dataset
                bf_database.unlock_dataset(organization_id, dataset_id)

        # 6) Sanity check that all datasets in the organization have been
        # migrated, then mark the organization as migrated.
        if entire_organization:
            print("Validating migration....")

            for dataset_id in bf_database.get_dataset_ids(organization_id):
                partitioned_db = PartitionedDatabase(
                    neo4j,
                    organization_id=organization_id,
                    dataset_id=dataset_id,
                    user_id=0,
                )

            neo4j.toggle_service_for_organization(
                organization_id=organization_id)

        print("Done.")
Ejemplo n.º 15
0
def get_health() -> int:
    db = Database.from_server()
    m = db.get_one()
    if m == 1:
        return 200
    return 500
Ejemplo n.º 16
0
 def _ensure_db_exists(self) -> None:
     """This ensures DB is initialized"""
     if self.DB is None:
         self.DB = Database("data/data.db")
     print(self.DB)
     return self
Ejemplo n.º 17
0
def setup_indexes(config):
    db = Database.from_config(config)
    index.setup()
    db.driver.close()
Ejemplo n.º 18
0
    )

    args = parser.parse_args()

    if args.environment not in ["dev", "prod", "prd"]:
        raise Exception(f"Invalid environment {args.environment}")

    ssm_parameters = SSMParameters(args.environment)

    logging.getLogger().info(
        f"Validating structure of {args.environment} Neo4j database {ssm_parameters.neo4j_host}. Connecting via jumpbox: {args.jumpbox}"
    )

    with SSHTunnel(
            remote_host=ssm_parameters.neo4j_host,
            remote_port=ssm_parameters.neo4j_port,
            local_port=8888,
            jumpbox=args.jumpbox,
    ) as neo4j_tunnel:

        db = Database(
            uri=f"bolt://{neo4j_tunnel.host}:{neo4j_tunnel.port}",
            user=ssm_parameters.neo4j_user,
            password=ssm_parameters.neo4j_password,
            max_connection_lifetime=300,
        )

        constraints.check_integrity(db)

    log.info("Done. All OK.")
Ejemplo n.º 19
0
import waitress  # type: ignore

from server.app import create_app  # type: ignore
from server.config import Config
from server.db import Database, index
from server.logging import configure_logging

parser = argparse.ArgumentParser(description="Pennsieve Model Service v2")

parser.add_argument(
    "-H", "--host", type=str, required=False, default="0.0.0.0", help="Server host"
)
parser.add_argument(
    "-P", "--port", type=int, required=False, default=8080, help="Server port"
)
parser.add_argument(
    "-T", "--threads", type=int, required=False, default=4, help="Number of threads"
)

if __name__ == "__main__":
    args = parser.parse_args()

    log = configure_logging()

    config = Config()
    db = Database.from_config(config)

    app = create_app(db=db, config=config)
    waitress.serve(app, host=args.host, port=args.port, threads=args.threads)
Ejemplo n.º 20
0
 def save(self):
     return Database.save(UserModel.__collection, data= self.json())
Ejemplo n.º 21
0
def split_corpora(
    self,
    file_path: str = "",
    split_path: str = "",
    langcode: str = "",
    filename: str = "",
):
    # TODO new database file separate from the main one
    # Also save sentences to db
    DB = Database("data/sentences.db")
    DB.execute(CREATE_SENTENCES_TABLE)
    DB.commit()

    with open(file_path, 'r') as txt:
        print("Starting adding sentences to db")
        linecount = 0
        added_at = str(datetime.now())
        for line in txt.readlines():
            DB.execute(INSERT_SENTENCE,
                       (line.strip(), added_at, langcode, filename))
            linecount += 1
            if linecount % 900 == 0:
                # commit every 100 lines
                print("Added", linecount, "sentences")
                DB.commit()
        # commit any remaining line at the end
        DB.commit()
        print("Done Adding", filename, "to DB")