Exemplo n.º 1
0
	def __init__(self, name, open_existing=None, host='localhost',
		port=5432, user='******', password='', connection_limit=-1):

		if name is None:
			raise interface.EngineError("Database name must be specified")

		if not isinstance(connection_limit, int):
			raise interface.EngineError("Connection limit must be an integer")

		conn = postgresql.open(user=user,
			password=password, host=host, port=port)

		query = conn.prepare("SELECT datname FROM pg_catalog.pg_database")
		db_list = [x[0] for x in query()]
		db_exists = name in db_list

		if open_existing == 1:
		# do not create DB if it does not exist
			if not db_exists:
				raise interface.EngineError(name + " was not found")
		elif open_existing == 0:
		# recreate DB even if such file already exists
			if db_exists:
				conn.execute("DROP DATABASE " + self.getSafeName(name))
			conn.execute("CREATE DATABASE " + self.getSafeName(name) +
			    " CONNECTION LIMIT " + str(connection_limit))

		conn.close()

		self._conn = postgresql.open(user=user,
			password=password, host=host, port=port, database=name)

		self._transaction = None
def main():
    if len(sys.argv) != 6:
        print("Erreur dans les arguments")
        sys.exit(1)
    else:
        # recuperation des arguments
        host = sys.argv[1]
        database_name = sys.argv[2]
        table_name = sys.argv[3]
        user = sys.argv[4]
        pwd = sys.argv[5]
        IRI = 'pq://'+user+':'+pwd+'@'+host+'/'
        start_time = time.time()
        db = postgresql.open(IRI + database_name)
        # Creation de la table 'velov'
        utils_Json_Postgres.create_table(db, sql_table, table_name, database_name)
        if db.closed:
            db = postgresql.open(IRI + database_name)
        # chargement et modification du json pour preparation a l'insertion en bdd
        data, d = utils_Json_Postgres.data_create(lienF, True, set_int, set_float, set_date, "%Y-%m-%d %H:%M:%S")
        # Declaration  et preparation de la requete d'insertion
        sql_insert = utils_Json_Postgres.cons_insert(table_name, data)
        statement = None
        try:
            statement = db.prepare(sql_insert)
        except exceptions.DuplicateTableError:
            print("Une exception est soulevee!!! Erreur sur la requete d'insertion")
        # insertion
        utils_Json_Postgres.insertion(data, d, statement)
        tmp = time.time() - start_time
        tmp = str(datetime.timedelta(seconds=tmp))
        print(" Tps execution --- %s  ---" % tmp)
Exemplo n.º 3
0
def main():
    start_time = time.time()
    i = 0
    # récupération de tous les json du dossier folder
    f_test = glob.glob(folder+'*.json')
    nb_f = f_test.__len__()
    db = postgresql.open(IRI + database_name)
    # Création de la table 'velov'
    utils_Json_Postgres.create_table(db, sql_table, table_name, database_name)
    # Si la table existe déjà, la connexion est fermée automatiquement
    # construction de la liste de valeurs à insérer dans l'ordre annoncée des VALUES
    while i<nb_f:
        print("******* fichier %s" %i)
        # si la connexion est fermée -> la réouvrir
        if db.closed:
            db = postgresql.open(IRI + database_name)
        # chargement et modification du json pour préparation à l'insertion en bdd
        data, d = utils_Json_Postgres.data_create(f_test[i], False, set_int, set_float, set_date)
        # Déclaration  et préparation de la requête d'insertion
        sql_insert = utils_Json_Postgres.cons_insert(table_name, data)
        statement = None
        try:
            statement = db.prepare(sql_insert)
        except exceptions.DuplicateTableError:
            print("Une exception est soulevée!!! Erreur sur la requête d'insertion")
        # insertion des lignes
        utils_Json_Postgres.insertion(data, d, statement)
        i += 1
    tmp = time.time() - start_time
    tmp = str(datetime.timedelta(seconds=tmp))
    print(" Tps execution --- %s  ---" % tmp)
Exemplo n.º 4
0
    def __init__(self):
        pq = 'pq://{0}:{1}@{2}:{3}/image_spider'
        pq = pq.format(os.environ['DOTCLOUD_POSTGRES_SQL_LOGIN'],
                       os.environ['DOTCLOUD_POSTGRES_SQL_PASSWORD'],
                       os.environ['DOTCLOUD_POSTGRES_SQL_HOST'],
                       os.environ['DOTCLOUD_POSTGRES_SQL_PORT'])

        redis_cred = {'host': os.environ['DOTCLOUD_REDIS_REDIS_HOST'],
                      'port': int(os.environ['DOTCLOUD_REDIS_REDIS_PORT']),
                      'password': os.environ['DOTCLOUD_REDIS_REDIS_PASSWORD'],
                      'db': 0}

        self.pg = postgresql.open(pq)
        self.redis = redis.StrictRedis(**redis_cred)
        pubsub = redis.StrictRedis(**redis_cred)
        self.pubsub = pubsub.pubsub()

        setattr(self, 'add_webpages',
                self.pg.proc('add_webpages(text,text[],integer)'))

        setattr(self, 'get_webpage_info',
                self.pg.proc('get_webpage_info(text)'))

        setattr(self, 'complete_crawl',
                self.pg.proc('complete_crawl(text)'))
Exemplo n.º 5
0
def postgres_dump(alignment, pq_locator):
    """Dumps an alignment of SAM reads into a Postgres database"""
    with postgresql.open(pq_locator) as database:
        database.execute("DROP TABLE IF EXISTS reads;")
        database.execute("CREATE TABLE reads ( "
                   "id          int, "
                   "qname       varchar(80), "
                   "flag        int, "
                   "rname       varchar(80), "
                   "pos         int, "
                   "mapq        int, "
                   "cigar       varchar(80), "
                   "rnext       varchar(80), "
                   "pnext       int, "
                   "tlen        int, "
                   "seq         varchar(200), "
                   "qual        varchar(200), "
                   "tags        text"
                   ");")

        id_number = 1
        for read in alignment:
            command = sql_insert_command(read, "reads", id_number)
            database.execute(command)
            id_number += 1

        database.execute("DROP TABLE IF EXISTS head;"
                   "CREATE TABLE head (head  text);")

        head_command = "INSERT INTO head (head) VALUES ('"
        head_command += alignment.head()
        head_command += "');"
        database.execute(head_command)
Exemplo n.º 6
0
 def __init__(self,database,user,password,host,port):
     self.database = database
     self.user = user
     self.password = password
     self.host = host
     self.port = port
     self.db = postgresql.open(database = self.database,user = self.user,password = self.password, host = self.host, port = self.port)
Exemplo n.º 7
0
	def __init__(self, user, password, host, database_name, use_database):
		self.disabled = not use_database

		if (not self or not password or not host or not database_name or self.disabled):
			self.connection = None
		else:
			self.connection = postgresql.open('pq://{0}:{1}@{2}/{3}'.format(user, password, host, database_name))
Exemplo n.º 8
0
def connectdb(dbstring = open("/home/fido/PyFTN/database.cfg").read().strip()):
  # pq://user:password@hostname/databasename
  db = postgresql.open(dbstring)
  init_domains(db)
  init_commuter(db)
  db.FECHOIMPORTLOCK = None
  return db
def main():
    args = parser.parse_args()

    global conn
    conn = postgresql.open(
        host=args.host,
        password=args.password,
        database=args.database,
        user=args.user
    )

    timings = {
            i: test(args, i, [ 1024, 4096, 16384, 65536 ]) #16, 64, 256, 1024 ])
            for i in args.indexes
        }

    csvfile = open(args.output, 'w', newline='')
    writer = csv.writer(csvfile)

    writer.writerow(['index_name', 'size', 'duration', 'passes', 'time_per_pass', 'avg_item_count'])

    for t, td in timings.items():
        for s, d in td.items():
            writer.writerow([t, s, d['duration'], d['passes'], d['time_per_pass'], d['avg_item_count']])
    csvfile.close()
    print('Created file {}'.format(args.output))
Exemplo n.º 10
0
    def __init__(self):
        handlers = [
            (r"/", HomeHandler),
            (r"/archive", ArchiveHandler),
            (r"/feed", FeedHandler),
            (r"/entry/([^/]+)", EntryHandler),
            (r"/compose", ComposeHandler),
            (r"/auth/login", AuthLoginHandler),
            (r"/auth/logout", AuthLogoutHandler),
        ]
        settings = dict(
            blog_title="Psyclone Blog",
            template_path=os.path.join(os.path.dirname(__file__), "templates"),
            static_path=os.path.join(os.path.dirname(__file__), "static"),
            ui_modules={"Entry": EntryModule},
            xsrf_cookies=True,
            cookie_secret=b"11oETzKXQAGaYdkL5gEmGeJJFuYh7EQnp2XdTP1o/Vo=",
            login_url="/auth/login",
            debug=True
        )
        psyclone.web.Application.__init__(self, handlers, **settings)

        # Have one global connection to the blog DB across all handlers
        self.db = postgresql.open(
            host=options.db_host, database=options.db_database,
            user=options.db_user, password=options.db_password)
Exemplo n.º 11
0
 def tearDownClass(cls):
     if isinstance(cls.api_process, Popen):
         os.kill(cls.api_process.pid, signal.SIGINT)
         try:
             cls.api_process.wait(1.5)
         except TimeoutExpired:
             cls.api_process.terminate()
     app_config_dir = os.path.expanduser(os.path.join(
         '~',
         '.pvacseq'
     ))
     app_data_dir = os.path.expanduser(os.path.join(
         '~',
         'pVAC-Seq'
     ))
     shutil.rmtree(app_config_dir, ignore_errors=True)
     shutil.rmtree(app_data_dir, ignore_errors=True)
     if cls.restoreConfig:
         shutil.copytree(app_config_dir+'.bak', app_config_dir)
         shutil.rmtree(app_config_dir+'.bak', ignore_errors=True)
     if cls.restoreData:
         shutil.copytree(app_data_dir+'.bak', app_data_dir)
         shutil.rmtree(app_data_dir+'.bak', ignore_errors=True)
     db = psql.open("localhost/pvacseq")
     for row in db.prepare("SELECT table_name FROM information_schema.tables WHERE table_name LIKE 'data\__%\__%'")():
         name = row[0]
         if re.match(r'data_(dropbox|\d+)_\d+', name):
             print("DROP TABLE", name)
             db.execute("DROP TABLE %s"%name)
Exemplo n.º 12
0
def connect(args, stat):
    global conn

    if not args.database_type in ('osm2pgsql', 'osmosis', 'overpass'):
        print('* Database type "{}" not supported right now'.format(args.database_type))
        exit(1)

    if stat['config'].get('offline', False) in (False, 'no', 'false'):
        conn = postgresql.open(
            host=args.host,
            password=args.password,
            database=args.database,
            user=args.user
        )
    else:
        conn = NullDB()

    conn.database_type=args.database_type

    if 'db.search_path' in stat['config']:
        conn.prepare('set search_path to {}'.format(postgresql.string.escape_ident(stat['config']['db.search_path'])))()

    if args.database_type == 'osm2pgsql':
        conn.database = pgmapcss.db.osm2pgsql.db(conn, stat)
    elif args.database_type == 'osmosis':
        conn.database = pgmapcss.db.osmosis.db(conn, stat)
    elif args.database_type == 'overpass':
        conn.database = pgmapcss.db.overpass.db(conn, stat)
    else:
        raise Exception('unknown database type {}'.format(args.database_type))

    db_version_check()

    return conn
Exemplo n.º 13
0
    def __init__(self, dsn):
        self.uri = dsn
        self.used = False
        self.lastused = 0.0

        self.session = postgresql.open(self.uri)
        self.prepare()
def main():
    start_time = time.time()
    nb_files_tot = 0
    # Ouverture et connexion à la bd
    db = postgresql.open(IRI + database_name)
    # Création de la table 'velov'
    utils_Json_Postgres.create_table(db, sql_table, table_name, database_name)
    credentials = get_credentials()
    http = credentials.authorize(httplib2.Http())
    service = discovery.build('drive', 'v3', http=http, credentials=credentials)
    s_f = service.files()
    results = s_f.list(pageSize=1000, fields="nextPageToken, files(id, name)").execute()
    # id de la page suivante pour n'oublier aucun fichier
    n_p_t = results.get('nextPageToken')
    nb_files_tot = nb_files_tot + dl_insert(results, s_f, outpath, format_f, IRI, database_name,table_name)
    while n_p_t is not None and nb_files_tot < 1282:
        print("************Nouvelle page du Drive**************")
        results = s_f.list(pageSize=1000, pageToken = n_p_t, fields="nextPageToken, files(id, name)").execute()
        # id de la page suivante pour n'oublier aucun fichier
        n_p_t = results.get('nextPageToken')
        nb_files_tot = nb_files_tot + dl_insert(results, s_f, outpath, format_f, IRI, database_name,table_name)
    print("nb tot: %s" % nb_files_tot)
    tmp = time.time() - start_time
    tmp = str(datetime.timedelta(seconds=tmp))
    print(" Fin du script en --- %s  ---" % tmp)
Exemplo n.º 15
0
    def head(self):
        """The commented data normally found in the head of a SAM file.

        """
        with postgresql.open(self.data_file) as db:
            head_tuple = next(iter(db.prepare("SELECT * FROM head;")))
            return head_tuple[0]
Exemplo n.º 16
0
	def __init__(self):
		username = self._getProperty('config.ini', 'db', 'username')
		password = self._getProperty('config.ini', 'db', 'password')
		host = self._getProperty('config.ini', 'db', 'host')
		port = self._getProperty('config.ini', 'db', 'port')
		database = self._getProperty('config.ini', 'db', 'database')

		self._db = postgresql.open(user = username, password = password, host = host, port = port, database = database)
 def __init__(self):
     sf = NS()
     sf.db_name = config_get_section_attribute('MOODLE', 'db_name')
     sf.db_username = config_get_section_attribute('MOODLE', 'db_username')
     sf.db_password = config_get_section_attribute('MOODLE', 'db_password')
     sf.db_host = config_get_section_attribute('MOODLE', 'db_host')
     self.db = postgresql.open(sf('pq://{db_username}:{db_password}@{db_host}/{db_name}'))
     self.sql = self.db.prepare
Exemplo n.º 18
0
	def __init__(self,ID,code,target,SIP):
		self.PID = ID
		self.code = code
		self.target = target
		self.SIP = SIP
		self.db = postgresql.open("pq://*****:*****@" + self.SIP + "/ceranubis")
		self.update = self.db.prepare("Update arguments SET Result = $1 WHERE ArgID = $2 AND ProjID = $3;")
		self.arg = self.db.prepare("SELECT * FROM arguments WHERE result IS NULL")
Exemplo n.º 19
0
def conectadb():
    user = "******"
    passwd = "eU9Za2ofNGfGUGe8"
    server = "bbdd.cmima.csic.es"
    datab = "dof"
    db = postgresql.open("pq://"+user+":"+passwd+"@"+server+"/"+datab)
    db.connect()
    return db
Exemplo n.º 20
0
Arquivo: db.py Projeto: m00nsome/share
 def __init__(self):
     connectstring=string.Template('pq://$user:$password@$host:$port/$database').\
                         safe_substitute(access)
     try:
         print("Connecting to:", connectstring)
         self._db = postgresql.open(connectstring)
     except:
         raise
Exemplo n.º 21
0
 def reconnect(self):
     self.bus.log('Reconnect..')
     self.db = None
     conn = cherrypy.config['db_str'] 
     try:
       self.db = postgresql.open(conn)
     except Exception as e: 
         self.bus.log('Connection error:', e)
Exemplo n.º 22
0
def get_connection(kernel):
    conf = kernel.configs['/']

    username = conf.get('database', 'username')
    password = conf.get('database', 'password')
    database = conf.get('database', 'database')

    db = postgresql.open('pq://%s:%s@localhost/%s' % (username, password, database))
    return db
Exemplo n.º 23
0
def getTracks3(transpid, time):
	"""Selection tracks if track_transp_id = transpid"""
	odCon = postgresql.open(user = '******', host = 'localhost', port = 5432, password='******', database='odessapoint')
	queryString = "SELECT lat, lon, time, alt, speed, direction FROM tracks2 WHERE (transpid=$1 and time>$2) ORDER BY time;"
	pr = odCon.prepare(queryString)
	ret = []
	for x in pr(transpid,time):
		ret.append([Point(x[0], x[1], x[2], x[3]), x[4], x[5]])
	return ret
Exemplo n.º 24
0
	def results(self):
		try:
			db = postgresql.open("pq://*****:*****@" + self.SIP + "/ceranubis")
			self.connected = True
		except:
			self.connected = False
			return False

		getRes = db.prepare("SELECT arg,result from arguments where result is not null;")	
		return getRes()
def dl_insert(param_results, param_s_f, param_outpath, param_format_f, param_IRI, param_database_name, param_table_name):
    start_time = time.time()
    db = postgresql.open(param_IRI + param_database_name)
    nb_files = 0
    #récupération des fichiers
    items = param_results.get('files', [])
    if not items:
        print('No files found.')
    else:
        print('Files:')
        for item in items:
            print('{0} ({1})'.format(item['name'], item['id']))
            file_id = item['id']
            file_name = item['name']
            # filtrage sur le 26/01
            if file_name[0:10] == '26/01/2016':
                nb_files += 1
                print("*******Chargement et traitement du fichier******", file_name)
                file_name = file_name.replace("/", "_").replace(" ", "_").replace(":", "_")
                # chargement get_media
                request = param_s_f.get_media(fileId=file_id)
                outfilename = param_outpath+file_name+"."+param_format_f
                outfile = io.FileIO(outfilename, mode='w+')
                downloader = googleapiclient.http.MediaIoBaseDownload(outfile, request)
                done = False
                while done is False:
                    status, done = downloader.next_chunk()
                #vérification de l'ouverture de la connexion à la BDD
                if db.closed:
                    print("!!Connexion réouverte!!")
                    db = postgresql.open(param_IRI + param_database_name)
                data, d = utils_Json_Postgres.data_create(outfilename, False, set_int, set_float, set_date)
                # Déclaration  et préparation de la requête d'insertion
                sql_insert = utils_Json_Postgres.cons_insert(param_table_name, data)
                statement = None
                try:
                    statement = db.prepare(sql_insert)
                except exceptions.DuplicateTableError:
                    print("Une exception est soulevée!!! Erreur sur la requête d'insertion")
                utils_Json_Postgres.insertion(data, d, statement)
    print("Tps execution pour le chargement et l'insertion des %s fichiers" % nb_files)
    print(" --- %s seconds ---" % (time.time() - start_time))
    return nb_files
Exemplo n.º 26
0
	def __init__(self, conn_str, salt=None):
		"""
		Loads up the given password file and initializes the lookup table
		"""		
		# Connect to database
		self.__db = postgresql.open(conn_str)
		
		# Use a different salt than default?
		if salt is None:
			self.__salt = 'cHRKE9U8983hrkboerc'
		else:
			self.__salt = salt
Exemplo n.º 27
0
def connect(args):
    global conn
    conn = postgresql.open(
        host=args.host,
        password=args.password,
        database=args.database,
        user=args.user
    )

    db_version_check()

    return conn
Exemplo n.º 28
0
 def open(self, configuration):
     """Open the connexion."""
     host = configuration["host"]
     port = configuration["port"]
     dbuser = configuration["dbuser"]
     dbpass = configuration["dbpass"]
     dbname = configuration["dbname"]
     self.connection = postgresql.open(
             "pq://{user}:{password}@{host}:{port}/{database}".format(
             user=dbuser, password=dbpass, host=host, port=port,
             database=dbname))
     SQLDriver.open(self, configuration)
Exemplo n.º 29
0
 def test_postgresql(self):
     try:
         import postgresql
     except ImportError:
         return
     conn = postgresql.open('pq://*****:*****@localhost')
     c = conn.cursor()
     c.execute(self.stmt)
     c.fetchone()[0]
     c.close()
     conn.close()
     stats, result = get_local_storage(local_timing).get_thread_stats()
     self.assertEqual(len(result), 2)
Exemplo n.º 30
0
	def status(self):
		try:
			db = postgresql.open("pq://*****:*****@" + self.SIP + "/ceranubis")
			self.connected = True
		except:
			self.connected = False
			return False
		
		getNumDone = db.prepare("SELECT COUNT(result) AS done FROM arguments WHERE result IS NOT NULL  AND arguments.projid = $1;")
		getNumArg = db.prepare("SELECT COUNT(arg) AS overall FROM arguments WHERE arguments.projid = $1;")
		done = getNumDone(self.PID)
		arg = getNumArg(self.PID)
		return((done[0][0],arg[0][0]))
Exemplo n.º 31
0
#!/usr/bin/env python
import pika
import time
import postgresql

connection = pika.BlockingConnection(
    pika.ConnectionParameters(host='localhost'))
channel = connection.channel()

mdatabase = postgresql.open('pq://*****:*****@database:5432/docker')
mdatabase.execute("DROP TABLE IF EXISTS messages;")
mdatabase.execute(
    "CREATE TABLE messages (id SERIAL PRIMARY KEY, msg CHAR(256));")
insert_to_db = mdatabase.prepare("INSERT INTO messages (msg) VALUES ($1)")

print("Insert was successful.")

channel.queue_declare(queue='task_queue', durable=True)


def callback(ch, method, properties, body):
    time.sleep(10)
    insert_to_db(str(body))
    ch.basic_ack(delivery_tag=method.delivery_tag)


channel.basic_qos(prefetch_count=1)
channel.basic_consume(callback, queue='task_queue')

channel.start_consuming()
Exemplo n.º 32
0
def pypostgresql_connect(args):
    conn = postgresql.open(user=args.pguser,
                           host=args.pghost,
                           port=args.pgport)
    return conn
Exemplo n.º 33
0
class ForumDAO:
    db_settings = settings.DatabaseSettings()
    db = postgresql.open(db_settings.get_command())

    def __init__(self):
        pass

    def __del__(self):
        # self.db.close()
        pass

    def create_forum(self, forum):
        try:
            result = self.db.query(
                "INSERT INTO forums(slug, title, nickname) VALUES ('{}', '{}', "
                "(SELECT nickname FROM users WHERE nickname = '{}')) RETURNING *"
                .format(forum["slug"], forum["title"], forum["user"]))
            return self.forum_from_table(result[0]), falcon.HTTP_201
        except postgresql.exceptions.UniqueError:
            forum_det = self.get_forum(forum["slug"])
            return forum_det[0], falcon.HTTP_409
        except postgresql.exceptions.NotNullError:
            return {"message": "Can't find user"}, falcon.HTTP_404

    def create_thread(self, slug, thread):
        try:
            if thread.get("slug"):
                thread["slug"] = "'{}'".format(thread["slug"])
            else:
                thread["slug"] = "NULL"
            if not thread.get("created"):
                thread["created"] = datetime.datetime.now(
                    tzlocal()).isoformat()
            result = self.db.query(
                "INSERT INTO threads (nickname, created, forum, message, title, slug) VALUES "
                "((SELECT nickname FROM users WHERE nickname = '{}'),'{}',(SELECT slug FROM forums WHERE slug = '{}'),"
                "'{}', '{}', {}) RETURNING *".format(thread["author"],
                                                     thread["created"], slug,
                                                     thread["message"],
                                                     thread["title"],
                                                     thread["slug"]))
            return self.thread_from_table(result[0]), falcon.HTTP_201
        except postgresql.exceptions.UniqueError:
            thread = self.db.query(
                "SELECT * FROM threads WHERE slug = {}".format(thread["slug"]))
            return self.thread_from_table(thread[0]), falcon.HTTP_409
        except postgresql.exceptions.NotNullError:
            return {"message": "Can't find"}, falcon.HTTP_404

    def get_forum(self, slug):
        info = self.db.query(
            "SELECT * FROM forums WHERE slug = '{}'".format(slug))
        if len(info) == 0:
            return {}, falcon.HTTP_404
        return self.forum_from_table(info[0]), falcon.HTTP_200

    def get_forum_details(self, slug):
        forum_det = self.forum_info(slug)
        if len(forum_det.keys()) == 0:
            return {
                "message": "Can't find forum {}".format(slug)
            }, falcon.HTTP_404
        return forum_det, falcon.HTTP_200

    def get_forum_threads(self, slug, limit, since, desc):
        #print(slug, limit, since, desc)
        query = "SELECT threads.slug, nickname,title, votes, created, message,id, forum, test.slug as test FROM threads "\
                "RIGHT JOIN (SELECT slug FROM forums WHERE slug='{}') AS test ON forum = '{}'".format(slug, slug)

        if since is not None:
            if desc == "true":
                query = query + " AND created <= '{}'".format(since)
            else:
                query = query + " AND created >= '{}'".format(since)
        query = query + " ORDER BY created"
        if desc == "true":
            query = query + " DESC"
        if limit is not None:
            query = query + " LIMIT {}".format(limit)
        info = self.db.query(query)
        #print(query)
        result = []
        if len(info) == 0 or info[0]["test"] is None:
            return {"message": "Can't find forum"}, falcon.HTTP_404
        if info[0]["id"] is None:
            return result, falcon.HTTP_200

        for i in info:
            result.append(self.thread_from_table(i))
        return result, falcon.HTTP_200

    def get_forum_users(self, slug, limit, since, desc):
        if limit is not None:
            limit = "LIMIT {}".format(limit)
        else:
            limit = ""
        if since and desc == "true":
            since = " AND nickname < '{}' COLLATE ucs_basic".format(since)
        elif since:
            since = " AND nickname > '{}' COLLATE ucs_basic".format(since)
        else:
            since = ""
        if desc == "true":
            desc = " DESC"
        else:
            desc = ""

        query ="SELECT nickname, email, about, fullname, test.slug "\
                          " FROM users RIGHT JOIN (SELECT slug FROM forums WHERE slug='{}') AS test "\
                          "ON nickname IN (SELECT nickname FROM forums_users "\
                          "WHERE slug = '{}') {} ORDER BY nickname COLLATE ucs_basic {} {}".format(slug, slug, since, desc, limit)
        t = self.db.query(query)
        if len(t) == 0 or t[0]["slug"] is None:
            return {"message": "Can't find forum"}, falcon.HTTP_404
        result = []
        if t[0]["nickname"] is None:
            return result, falcon.HTTP_200
        for i in t:
            result.append(userDAO.UserDAO.user_from_table(i))
        return result, falcon.HTTP_200

    def forum_info(self, slug):
        info = self.db.query("SELECT * FROM forums WHERE slug ='{}';".format(
            slug, slug, slug))
        if len(info) == 0:
            return {}
        return {
            "posts": info[0]["posts"],
            "slug": info[0]["slug"],
            "threads": info[0]["threads"],
            "title": info[0]["title"],
            "user": info[0]["nickname"]
        }

    # FIX IT
    @staticmethod
    def forum_from_table(t):
        return {
            "posts": 0,
            "slug": t["slug"],
            "threads": 0,
            "title": t["title"],
            "user": t["nickname"]
        }

    @staticmethod
    def thread_from_table(t):
        result = {
            "author": t["nickname"],
            "created": t["created"].isoformat(),
            "forum": t["forum"],
            "id": t["id"],
            "message": t["message"],
            "title": t["title"],
            "votes": t["votes"]
        }
        if t["slug"]:
            result["slug"] = t["slug"]
        return result
Exemplo n.º 34
0
from flask_restful import Resource
from flask import request
import secrets, postgresql, os, json
from datetime import datetime

from config import DATABASE_PATH, UPLOAD, LINK

database = postgresql.open(DATABASE_PATH)


class CreateDiscussion(Resource):
    def put(self):
        token = request.headers.get('token', False)

        if not token or len(token) < 5:
            return {'status': False, 'message': 'Fack token, allday'}, 401

        query = database.prepare(
            "SELECT id, email, fullname FROM users WHERE token = $1")
        user_result = query(token)

        user = user_result[0]

        discussion_title = request.form.get('title', False)
        discussion_cover = request.files.get('cover', False)

        aspect_id = request.form.get('aspect_id', False)
        aspect_title = request.form.get('aspect_title', False)
        aspect_image = request.files.get('aspect_image', False)

        argument_position = request.form.get('argument_position', False)
Exemplo n.º 35
0
import postgresql
db = postgresql.open('pq://*****:*****@localhost:5432/learn_heroes_view_development')

# set updated_at to use RETURNING
create_matchup = db.prepare("""

    INSERT INTO matchups ("ally_heroes", "enemy_heroes", "created_at", "updated_at")
           VALUES ($1, $2, now(), now())
           ON CONFLICT (ally_heroes, enemy_heroes) DO UPDATE SET updated_at=now()
           RETURNING ID

""")

lookup_matchup = db.prepare("SELECT id FROM matchups WHERE ally_heroes = $1::int[] AND enemy_heroes = $2::int[]")

def find_or_create_matchup(team1, team2):
    return create_matchup.first(team1, team2)

def generate_hero_stats_upsert_statement(matchup):
    ally, enemy = matchup

    ally_heroes = ','.join(str(al) for al in ally)
    enemy_heroes = ','.join(str(en) for en in enemy)

    matchup_upsert = f"""

        INSERT INTO matchups ("ally_heroes", "enemy_heroes", "created_at", "updated_at")
               VALUES ( array[{ally_heroes}]::integer[], array[{enemy_heroes}]::integer[], now(), now() )
               ON CONFLICT (ally_heroes, enemy_heroes) DO UPDATE SET updated_at=now()
               RETURNING ID
Exemplo n.º 36
0
def db_conn():
    return postgresql.open('pq://*****:*****@localhost:5432/mydb')
Exemplo n.º 37
0
def error_log(module_name, error_message):
    db = postgresql.open(db_query.connection_string())
    print('Error:' + error_message)
    insert = db.prepare("insert into error_log (module_name, error_message) values($1,$2)")
    insert(module_name, str(error_message))
    play_alarm_sound()
Exemplo n.º 38
0
# импортим в код библиотеку postgresql для работы с базой данных postgresql
import postgresql

# создаем соединение с базой данной с указанием хоста, порта, логина и пароля
db = postgresql.open("pq://*****:*****@localhost:5432/postgres")

print('Before')
# готовим select по табличке студенты
students = db.prepare("SELECT * FROM student")

# извлекаем студентов из бд и печатаем в консоль
for row in students:
    print(row)

# готовим DELETE по табличке студенты с условием по имени
sql_delete = db.prepare("DELETE FROM student WHERE first_name = $1")

# в транзакции удаляем из бд студентов с именем Иван и бросаем ошибку с собственным пояснением
# перехватываем ошибку и ничего не делаем.
# Этот код показывает работу в транзакии, изменения не зафиксируются пока весь код в транзакции не будет завершен без ошибок
try:
    with db.xact():
        sql_delete("Иван")
        raise Exception(
            'Because we do raise in transaction, changes will not applied!')
except Exception:
    pass

# печатаем студентов. Список будет тот же что и выводом раньше, таким образом показываем что изменения не принялись, транзакция работает
print('After')
for row in students:
	def __init__(self, user, password, host, database_name):
		if (not self or not password or not host or not database_name):
			self.connection = None
		else:
			self.connection = postgresql.open('pq://{0}:{1}@{2}/{3}'.format(user, password, host, database_name))
Exemplo n.º 40
0
def get_connection():
    db = postgresql.open('pq://*****:*****@localhost:5432/mydb')
    db_name = arguments.db_name     # Имя базы
    url = arguments.url             # URL для подключения к базе

    parser = None
    if dbd_file:
        parser = DbdToRam(dbd_file)
    elif xml_file:
        parser = XdbToRam(xml_file)
    else:
        print("Источник данных не указан")
        quit()

    ram = parser.parse()

    ddl_generator = RamToPgDdl(ram)
    ddls = ddl_generator.generate(False)
    ddl_generator.write_to_file(ddl_path)
    print("DDL: " + ddl_path)


    conn = postgresql.open(url)
    conn.execute('DROP DATABASE IF EXISTS {};'.format(db_name))
    conn.execute('CREATE DATABASE {};'.format(db_name))
    conn = postgresql.open("{}/{}".format(url,db_name))

    conn.execute(ddls)
    conn.close()

    print("Finished")

Exemplo n.º 42
0
def negative(sc, cb=0):
    stack_result = []
    stack_errors = []
    exp_values = []
    wait = WebDriverWait(driver, 15)
    for x in range(len(sc)):
        #get_browser_logs('performance')
        for y in range(len(sc[x])):

            if sc[x][y] == 'oib':
                wait.until(EC.element_to_be_clickable(
                    (By.ID, 's2id_user'))).click()
                wait.until(
                    EC.element_to_be_clickable(
                        (By.CLASS_NAME,
                         'select2-input'))).send_keys(sc[x][y + 1])
                wait.until(
                    EC.element_to_be_clickable(
                        (By.CLASS_NAME, 'select2-result-label'))).click()
                wait.until(EC.element_to_be_clickable(
                    (By.ID, 'password'))).send_keys('123456')
                wait.until(EC.element_to_be_clickable((By.ID, 'id1'))).click()

            if sc[x][y] == 'page':
                driver.get(sc[x][y + 1])

            if sc[x][y] == 'new_pack':
                try:
                    exp_values.append(cr_pack_init(sc[x][y + 1], sc[x][y + 2]))
                except:
                    exit_browser(driver)
            if sc[x][y] == 'button':
                if sc[x][y + 1] == 'add-doc':
                    ms.waiting('element_to_be_clickable', 'CLASS_NAME',
                               sc[x][y + 2], 2, 0).click()
                    sleep(1)
                else:
                    try:
                        ms.waiting('element_to_be_clickable', 'XPATH',
                                   sc[x][y + 2], 2, 0).click()
                        sleep(1)
                    except:
                        exit_browser(driver)

            if sc[x][y] == 'dropdown':
                exp_values.append(
                    dropdown_feeler(sc[x][y + 1], sc[x][y + 2], sc[x][y + 3]))

            if sc[x][y] == 'text':
                exp_values.append(sc[x][y + 2])
                ms.waiting('presence_of_element_located', 'XPATH',
                           sc[x][y + 1], 2, 0).send_keys(sc[x][y + 2])

            if sc[x][y] == 'datapicker':
                try:
                    temp = f"{sc[x][y+1]}"
                    dd, mm, yyyy = '', '', ''
                    r = sc[x][y + 2].split(' ')
                    dd = r[0]
                    mm = r[1]
                    yyyy = r[2]
                    driver.find_element_by_xpath(temp).send_keys(dd)
                    driver.find_element_by_xpath(temp).send_keys(mm)
                    driver.find_element_by_xpath(temp).send_keys(yyyy)
                except:
                    print('Ошибка при заполнении датапикера ' + sc[x][y])
                    exit_browser(driver)

            if sc[x][y] == 'check':
                if sc[x][y + 1] == 'prime-doc-imported':
                    try:
                        #Проверяем добавился ли документ на страницу...
                        print('Проверяем добавился ли документ на страницу...')
                        #print(len(driver.find_elements_by_class_name('item-main')))
                        if len(driver.find_elements_by_class_name(
                                'item-main')) == 2:
                            print('Найден главный документ!')
                            result = []
                            #Получаем учетный номер пакета
                            uch_num = get_uch_num_pack_doc()

                            # Подключаемся к бд и раз в секунду проверяем значение id документа
                            with ps.open(db_conn) as db:
                                id_value = ''
                                while (id_value == ''):
                                    sleep(1)
                                    id_value = db.query(
                                        "select le.id from lde_event le join document d on d.id=le.document_id join document_package dp on d.document_package_id=dp.id where dp.document_package_number='%s'"
                                        % uch_num)[0][0]
                                    if id_value is not 'Null':
                                        print(
                                            f'В БД получена id документа: {id_value}'
                                        )
                                    else:
                                        print('id документа не получена')

                        else:
                            #print('Главный документ не найден! Либо найдено больше 1-го главного документа.')
                            errors = []
                            count = len(
                                driver.find_elements_by_class_name(
                                    'item-main'))
                            if len(
                                    driver.find_elements_by_class_name(
                                        'item-main')) > 2:
                                errors.append(
                                    f'Найдено больше одного документа. Документов на странице: {count-1}'
                                )  # Отнимаем 1 т.к. элемент в доп. материалах тоже попадает под выборку.
                            return errors
                    except:
                        print('Произошла ошибка при проверке получения id')

            # if sc[x][y] == 'requisites':
            #     wait = WebDriverWait(driver, 10)
            #     add_emps=[]
            #     add_emps = driver.find_elements_by_class_name('add-button')
            #
            #     # нажимаем кнопки Добавить сотрудника для каждого блока (sc[x][y+1])-раз
            #     for _ in range(sc[x][y+1]):
            #         add_emps[0].click()
            #         add_emps[1].click()
            #         add_emps[2].click()
            #
            #     drops = driver.find_elements_by_id('__result')
            #     org = []
            #     job_list=[]
            #     empls = []
            #
            #     org = [drops[n] for n in range(0, (len(drops)-1), 3)]
            #     job_positions = [drops[n] for n in range(1, len(drops), 3)]
            #     emps = [drops[n] for n in range(2, len(drops), 3)]
            #     #print(len(drops))
            #
            #
            #     driver.find_element_by_xpath('//*[@id="requisites"]/app-participant-requisites-form/div[2]/div/app-autocomplete/div/div/div').click()
            #     sleep(1)
            #     driver.find_element_by_xpath('//*[@id="requisites"]/app-participant-requisites-form/div[2]/div/app-autocomplete/div/div/div/div[3]/div/div[1]').click()
            #     # Последовательно вызываем функцию чтобы после каждого выбора элементов
            #     job_list = requisite(dr=org, nth=7)
            #     empls = requisite(dr=job_positions, ls=job_list)
            #     requisite(dr=emps, ls=empls)

            if sc[x][y] == 'prime-doc' and sc[x][y + 1] == 'import':
                # переход по ссылке Состав пакета
                sleep(1)
                driver.execute_script(
                    "document.getElementById('menu-item_composition').click()")

                sleep(3)
                driver.find_elements_by_class_name('add-button-big')[0].click()
                driver.find_element_by_xpath(
                    '//*[@id="file-input"]'
                ).send_keys(
                    os.path.abspath(
                        'Лицензионное соглашение об использовании iTunes.docx')
                )
                sleep(1)
                # добавить нажатие кнопки Добавить

            if sc[x][y] == 'open-any-first-package':
                wait = WebDriverWait(driver, 8)
                wait.until(
                    EC.element_to_be_clickable(
                        (By.CLASS_NAME, 'item__left-side'))).click()
                sleep(1)

            if sc[x][y] == 'pop-up' and sc[x][y + 1] == 'error':
                exp = []
                exp = [sc[x][o] for o in range(y + 3, (len(sc[x]) - 1))]
                errors = driver.find_elements_by_class_name(sc[x][y + 2])
                print(exp, errors)
                for i in range(len(errors)):
                    #print(errors[i].text)
                    if errors[i].text == exp[i]:
                        print('Поле ' + errors[i].text + ': ок' + '\n Шаг ' +
                              str(sc[x][0]))
                        stack_result.append(1)
                    else:
                        stack_errors.append('Не пройден шаг ' + str(sc[x][0]))
                        print('Поле ' + errors[i].text + ': не ок' +
                              '\n Шаг ' + str(sc[x][0]))
                ms.waiting('element_to_be_clickable', 'CLASS_NAME', sc[x][-1],
                           2, 0).click()

            if sc[x][y] == 'alert':
                #sleep(0.5)
                if alert_passer():
                    pass
                else:
                    print('Тест провалился при проверке текста алерта')
            if sc[x][y] == 'pop-up-success':
                pop_up_text(sc[x][y + 1], sc[x][y + 2])

            if sc[x][y] == 'check_db':
                db_value = []
                exp_values.append(
                    ms.waiting('presence_of_element_located', 'XPATH',
                               "//*[@id='uch-num']", 2, 0).text)
                print(exp_values[len(exp_values) - 1])
                if 'main' in sc[x]:
                    with ps.open(db_conn) as db:
                        db_value.append(
                            db.query(
                                "SELECT count(*) FROM document_package WHERE document_package_number = '%s'"
                                % exp_values[len(exp_values) - 1])[0][0]
                        )  # Считаем кол-во пакетов с таким же учетным номером
                        db_value.append(
                            db.query(
                                "SELECT package_type FROM document_package WHERE document_package_number = '%s'"
                                % exp_values[len(exp_values) -
                                             1])[0][0])  # Тип пакета
                        db_value.append(
                            db.query(
                                "SELECT project_type_id FROM document_package where document_package_number = '%s'"
                                % exp_values[len(exp_values) -
                                             1])[0][0])  # Тип проекта
                        db_value.append(
                            db.query(
                                "SELECT status FROM document_package where document_package_number = '%s'"
                                % exp_values[len(exp_values) -
                                             1])[0][0])  # Статус
                        db_value.append(
                            db.query(
                                "SELECT name FROM document_package where document_package_number = '%s'"
                                % exp_values[len(exp_values) -
                                             1])[0][0])  # Наименование

                if 'review-date' in sc[x]:
                    with ps.open(db_conn) as db:
                        db_value.append(
                            db.query(
                                "SELECT (rd.planned_review_date)::date FROM review_date rd LEFT join document_package dp on rd.document_package_id = dp.id WHERE dp.document_package_number ='%s'"
                                % exp_values[len(exp_values) - 1])[0]
                            [0])  # Плановая дата рассмотрения
                        db_value.append(
                            db.query(
                                "SELECT rd.approval_form_id FROM review_date rd LEFT join document_package dp on rd.document_package_id = dp.id WHERE dp.document_package_number ='%s'"
                                % exp_values[len(exp_values) -
                                             1])[0][0])  # Формат утверждения
                        db_value.append(
                            db.query(
                                "SELECT (rd.reason_date_review) FROM review_date rd LEFT join document_package dp on rd.document_package_id = dp.id WHERE dp.document_package_number ='%s'"
                                % exp_values[len(exp_values) - 1])[0]
                            [0])  # Обоснование даты рассмотрения
                        if db_value[5] is not 'Null':
                            stack_result.append(1)
                        else:
                            stack_errors.append(
                                'Планируемая дата рассмотрения - не ОК: ' +
                                str(db_value[5]))

                        if db_value[6] == exp_values[4]:
                            stack_result.append(1)
                        else:
                            stack_errors.append(
                                'Формат утверждения - не ОК: ' +
                                str(db_value[6]))

                        if db_value[7] == exp_values[5]:
                            stack_result.append(1)
                        else:
                            stack_errors.append('Обоснование - не ОК: ' +
                                                str(db_value[7]))

                if 'subpoena' in sc[x]:
                    with ps.open(db_conn) as db:
                        db_value.append(
                            db.query(
                                "SELECT (sd.subpoena_date)::date FROM subpoena_date sd LEFT join review_date rd on sd.review_date_id=rd.id LEFT join document_package dp on rd.document_package_id = dp.id WHERE dp.document_package_number ='%s'"
                                % exp_values[len(exp_values) - 1])[0][0])
                        db_value.append(
                            db.query(
                                "SELECT sd.review_type_id FROM subpoena_date sd LEFT join review_date rd on sd.review_date_id=rd.id LEFT join document_package dp on rd.document_package_id = dp.id WHERE dp.document_package_number ='%s'"
                                % exp_values[len(exp_values) - 1])[0][0])
                        if db_value[8] is not 'Null':
                            stack_result.append(1)
                        else:
                            stack_errors.append('Дата повестки - не ОК: ' +
                                                str(db_value[8]))
                        if db_value[9] == exp_values[6]:
                            stack_result.append(1)
                        else:
                            stack_errors.append('Тип заседания - не ОК: ' +
                                                str(db_value[9]))

                #print('Ожидаемые значения' + str(exp_values))
                #print(db_value)
                if db_value[0] == 1:
                    stack_result.append(1)
                    print('Найден 1 пакет документа')
                else:
                    stack_errors.append('Найдено кол-во пакетов ' +
                                        str(db_value[0]))
                if str(db_value[1]) == exp_values[0]:

                    stack_result.append(1)
                else:
                    #print(db_value[1], exp_values[0])
                    stack_errors.append(
                        'Сохраненное значение типа пакета в БД не ОК: ' +
                        str(db_value[1]))

                if db_value[2] == exp_values[1]:
                    stack_result.append(1)
                else:
                    stack_errors.append('Тип проекта сохранился в БД не ОК ' +
                                        str(db_value[2]))
                #print(db_value[3], exp_values[2])
                if db_value[3] == exp_values[2]:
                    stack_result.append(1)
                else:
                    stack_errors.append('Статус - не ОК ' + str(db_value[3]))

                if db_value[4] == exp_values[3]:
                    stack_result.append(1)
                else:
                    stack_errors.append('Наименование - не ОК: ' +
                                        str(db_value[4]))
            if sc[x][y] == 'quit':
                exit_browser(driver)

    if stack_errors == []:
        print('Все тесты пройдены')
    else:
        print(stack_errors)
Exemplo n.º 43
0
 def open_connection(self):
     self.parse_ini()
     self.conn = pg.open(self.db_uri)
Exemplo n.º 44
0
            print("Переборщили с попытками. Подождите " + str(sleep_duration) +
                  " секунд")
            time.sleep(sleep_duration)
    return email


def make_username(email):
    return email.split("@")[0].lower()


email = get_email_from_user()
name_list = str(make_username(email))

l = []
try:
    db = postgresql.open("pq://*****:*****@127.0.0.Lesson1:5432/Lessons")

    def selekt_user(name_list):
        with db.xact() as xact:
            usernames = db.query("SELECT username,last_seen FROM users")
            for username in usernames:
                if name_list in username:
                    t = username
                    last_seen = (t[-1])
                    print(last_seen)
                    delta = dt(year=2018, month=11, day=12) - last_seen
                    print(delta)
                    if delta.days > 180:
                        print("Вам надо подтвердить логин")
                        get_email_from_user()
                    else:
Exemplo n.º 45
0
import fasteners
import time
import psycopg2
import psycopg2.extras

from pprint import pprint

from distutils.version import StrictVersion
from mastodon import Mastodon

pattern_version = r"^[0-9]+(\.[0-9]+){2}$"
lockfile = 'update.lock'
resultfile = 'result.txt'
scrapefile = 'scrape.txt'

db = postgresql.open("pq://postgres@localhost/instances")

mastodon = Mastodon(client_id='pytooter_clientcred.secret',
                    access_token='pytooter_usercred.secret',
                    api_base_url='https://don.tacostea.net')


def get_watching_uri():
    get_list = db.prepare("SELECT uri FROM target WHERE crawl = True")
    with db.xact():
        for row in get_list():
            print(row["uri"])


def get_version(uri):
    get_list = db.prepare(
Exemplo n.º 46
0
import postgresql, xml.sax, re, delijnosmlib, OSM_Data_Model
# -*- coding: utf-8 -*-

sixdigitsRE = re.compile(r'\d\d\d\d\d\d')
TECstopRE = re.compile(r'[BCNHXL].{4,7}')
db = postgresql.open('pq://*****:*****@localhost:5432/DL')

nodeexists = db.prepare(
    """SELECT stopidentifier FROM stops WHERE OSM_node_ID = $1
                           UNION
                           SELECT stopidentifier FROM stops_tec WHERE OSM_node_ID = $1;"""
)
# refexists = db.prepare("""SELECT OSM_node_ID, description
# FROM stops
# WHERE stopidentifier::TEXT = $1
# UNION
# SELECT OSM_node_ID, description_normalised
# FROM stops_tec
# WHERE stopidentifier = $1;""")
refdlexists = db.prepare("""SELECT OSM_node_ID, description
                             FROM stops
                             WHERE stopidentifier = $1;""")
reftecexists = db.prepare("""SELECT OSM_node_ID, description_normalised
                              FROM stops_tec
                              WHERE stopidentifier = $1;""")
purgeOSMdata = db.prepare("""UPDATE stops SET OSM_node_ID = NULL,
                                              OSM_name = NULL,
                                              OSM_city = NULL,
                                              OSM_street = NULL,
                                              OSM_operator = NULL,
                                              OSM_route_ref = NULL,
Exemplo n.º 47
0
def filterfile(parentID, fileID, count, page, filters, sort, direction):
    """Gets the file ID belonging to the parent.\
    For result files, the parentID is the process ID that spawned them.\
    For visualize files, the parentID is -1"""
    data = current_app.config['storage']['loader']()

    # first, generate the key
    tablekey = "data_%s_%s" % (
        (parentID if parentID >= 0 else 'visualize'), fileID)

    # check if the table exists:
    db = psql.open("localhost/pvacseq")
    fileID = str(fileID)
    with db.xact():
        query = db.prepare(
            "SELECT 1 FROM information_schema.tables WHERE table_name = $1")
        response = query(tablekey)
    if not len(response):  # table does not exist
        table_errors = create_table(parentID, fileID, data, tablekey, db)
        if table_errors != None:
            return table_errors
    #with db.synchronizer:
    #    test_query = db.prepare("SELECT 1 FROM information_schema.tables WHERE table_name = $1")
    #    test_response = query(tablekey)
    with db.xact():
        typequery = db.prepare(
            "SELECT column_name, data_type FROM information_schema.columns WHERE table_name = $1"
        )
        column_defs = typequery(tablekey)
        column_maps = {}
        for (col, typ) in column_defs:
            if 'int' in typ:
                column_maps[col] = int
            elif typ == 'numeric' or typ == 'decimal':
                column_maps[col] = float
            else:
                column_maps[col] = str
    formatted_filters = []
    for i in range(len(filters)):
        f = filters[i].strip()
        if not len(f):
            continue
        result = queryfilters.match(f)
        if not result:
            return ({
                "code": 400,
                "message": "Encountered an invalid filter (%s)" % f,
                "fields": "filters"
            }, 400)
        colname = column_filter(result.group(1))
        if colname not in column_maps:
            return ({
                "code": 400,
                "message": "Unknown column name %s" % result.group(1),
                "fields": "filters"
            }, 400)
        op = result.group(2)
        typ = column_maps[colname]
        val = None
        try:
            val = column_maps[colname](result.group(3))
        except ValueError:
            return ({
                "code":
                400,
                "message":
                "Value %s cannot be formatted to match the type of column %s (%s)"
                % (result.group(3), result.group(1), typ)
            }, 400)
        if typ == str and (op in {'==', '!='}):
            formatted_filters.append(
                json.dumps(colname) + (' not ' if '!' in op else ' ') +
                "LIKE '%s'" % (json.dumps(val)[1:-1]))
        else:  # type is numerical
            op = op.replace('==', '=')
            formatted_filters.append(
                '%s %s %s' % (json.dumps(colname), op, json.dumps(val)))
    raw_query = "SELECT %s FROM %s" % (','.join([k[0] for k in column_defs
                                                 ]), tablekey)
    if len(formatted_filters):
        raw_query += " WHERE " + " AND ".join(formatted_filters)
    if sort:
        if column_filter(sort) not in column_maps:
            return ({
                'code': 400,
                'message': 'Invalid column name %s' % sort,
                'fields': 'sort'
            }, 400)
        raw_query += " ORDER BY %s" % (column_filter(sort))
        if direction:
            raw_query += " " + direction
    if count:
        raw_query += " LIMIT %d" % count
    if page:
        raw_query += " OFFSET %d" % (page * count)
    print("Query:", raw_query)
    import decimal
    with db.xact('SERIALIZABLE', 'READ ONLY DEFERRABLE'):
        query = db.prepare(raw_query)
        decimalizer = lambda x: (float(x) if type(x) == decimal.Decimal else x)
        result = [{
            colname: decimalizer(value)
            for (colname,
                 value) in zip([k[0]
                                for k in column_defs], [val for val in row])
        } for row in query.rows()]
    db.close()
    return result
Exemplo n.º 48
0
def initialize(current_app, args):
    """Setup anything that needs to be configured before the app start"""
    #This section is run once, when the API spins up
    print("Initializing app configuration")
    #First, read all the json config files to load app configuration
    config = {'storage': {}}
    config_dir = os.path.join(
        os.path.dirname(__file__),
        '..',
        'config'
    )
    user_config_dir = os.path.expanduser("~/.pvacseq")
    if not os.path.isdir(user_config_dir):
        os.makedirs(user_config_dir)
    #For every config file predefined in the config directory,
    #first read and load the file, then
    #check the user config directory for an override
    for configfile in iglob(os.path.join(config_dir, '*.json')):
        reader = open(configfile)
        key = os.path.splitext(os.path.basename(configfile))[0]
        config[key] = json.load(reader)
        reader.close()
        try:
            reader = open(os.path.join(user_config_dir, os.path.basename(configfile)))
            if key == 'schema':
                config[key].update({
                    column_filter(k):v for (k,v) in json.load(reader).items()
                })
            else:
                config[key].update(json.load(reader))
            reader.close()
        except FileNotFoundError:
            pass
    for key in config['files']:
        config['files'][key] = os.path.abspath(os.path.expanduser(config['files'][key]))
    current_app.config.update(config) #save to the app configuration object

    #Now load the data object from the files specified in the configuration
    synchronizer = threading.RLock()
    data = loaddata(current_app.config['files'], synchronizer)
    if 'processid' not in data:
        data.addKey('processid', 0, current_app.config['files']['processes'])
    if 'visualize' not in data:
        data.addKey('visualize', {}, current_app.config['files']['visualize'])
    if 'input' not in data:
        data.addKey('input', {}, current_app.config['files']['input'])
    #Check the last reboot (because pid's won't remain valid after a reboot)
    current_app.config['storage']['data'] = data
    import weakref
    current_app.config['storage']['loader'] = weakref.ref(current_app.config['storage']['data'])
    loader = current_app.config['storage']['loader']
    reboot = subprocess.check_output(['last', 'reboot']).decode().split("\n")[0]
    current_app.config['reboot'] = reboot
    if 'reboot' in data and data['reboot'] != reboot:
        print("A reboot has occurred since the server was first started")
        print(
            "pid's of old pVAC-Seq runs with id's",
            data['processid'],
            "and lower may be inaccurate"
        )
    current_app.config['storage']['children']={}
    current_app.config['storage']['manifest']={}

    visapp_path = os.path.relpath(
        os.path.join(
            os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
            'visualizations.py'
        )
    )
    #Check if the bokeh port is already in use.  Attempt to reconnect?
    current_app.config['storage']['bokeh']=subprocess.Popen(
        'bokeh serve %s --allow-websocket-origin=localhost:8080'%(
            quote(visapp_path)
        ),
        shell=True,
        stdout=subprocess.DEVNULL
    )
    print(
        "Visualization server started on PID",
        current_app.config['storage']['bokeh'].pid
    )

    @atexit.register
    def cleanup_bokeh():
        print("Cleaning up visualization server")
        import signal
        current_app.config['storage']['bokeh'].send_signal(signal.SIGINT)
        try:
            current_app.config['storage']['bokeh'].wait(1)
        except subprocess.TimeoutExpired:
            current_app.config['storage']['bokeh'].terminate()

    #Establish a connection to the local postgres database
    try:
        tmp = psql.open("localhost/postgres")
    except psqlException as e:
        raise SystemExit("Unable to connect to your Postgres server.\
                         The pVAC-Seq API requires a running local Postgres server") from e
    if not len(tmp.prepare("SELECT 1 FROM pg_database WHERE datname = $1")('pvacseq')):
        tmp.execute("CREATE DATABASE pvacseq")
    tmp.close()
    db = psql.open("localhost/pvacseq")
    db.synchronizer = threading.RLock()
    current_app.config['storage']['db'] = db

    @atexit.register
    def cleanup_database():
        print("Cleaning up database connections")
        if 'db-clean' in current_app.config:
            with db.synchronizer:
                for table in current_app.config['db-clean']:
                    try:
                        current_app.config['storage']['db'].execute("DROP TABLE %s"%table)
                    except UndefinedTableError:
                        pass
        current_app.config['storage']['db'].close()

    #setup directory structure:
    os.makedirs(
        os.path.join(current_app.config['files']['data-dir'],'input'),
        exist_ok=True
    )
    os.makedirs(
        os.path.join(current_app.config['files']['data-dir'],'.processes'),
        exist_ok=True
    )
    os.makedirs(
        os.path.join(current_app.config['files']['data-dir'],'archive'),
        exist_ok=True
    )
    os.makedirs(
        os.path.join(current_app.config['files']['data-dir'],'visualize'),
        exist_ok=True
    )
    os.makedirs(
        os.path.join(current_app.config['files']['data-dir'],'export'),
        exist_ok=True
    )
    os.makedirs(
        os.path.join(current_app.config['files']['data-dir'],'.tmp'),
        exist_ok=True
    )

    def make_config():
        import yaml
        base = os.path.join(current_app.config['files']['data-dir'],'visualize')
        runs = [d for d in os.listdir(base) if os.path.isdir(os.path.join(base, d))]
        for run in runs:
            config_path = os.path.join(base, run, 'config.json')
            MHCI = os.path.join(base, run, 'MHC_Class_I', 'log', 'inputs.yml')
            MHCII = os.path.join(base, run, 'MHC_Class_II', 'log', 'inputs.yml')
            if os.path.exists(MHCI):
                with open(MHCI, 'r') as MHCI_input:
                    MHC_dict = yaml.load(MHCI_input)
                    if MHC_dict:
                        if os.path.exists(MHCII):
                            with open(MHCII, 'r') as MHCII_input:
                                temp_dict = yaml.load(MHCII_input)
                                if temp_dict:
                                    MHC_dict.update({k:v for k,v in temp_dict.items() if k not in MHC_dict})
                                    MHC_dict['alleles'].extend(temp_dict['alleles'])
                                    MHC_dict['prediction_algorithms'].extend(temp_dict['prediction_algorithms'])
                        del MHC_dict['tmp_dir']
                        MHC_dict['output'] = MHC_dict['output_dir']
                        del MHC_dict['output_dir']
                        if 'MHC_Class' in os.path.basename(MHC_dict['output']):
                            MHC_dict['output'] = MHC_dict['output'][:MHC_dict['output'].rfind('/')]
                        if os.path.exists(config_path):
                            old_dict = json.load(open(config_path))
                            if old_dict and MHC_dict != old_dict:
                                with open(config_path, 'w') as config_file:
                                    json.dump(MHC_dict, config_file, indent='\t')
                        else:
                            with open(config_path, 'w') as config_file:
                                json.dump(MHC_dict, config_file, indent='\t')
            elif os.path.exists(MHCII):
                with open(MHCII, 'r') as MHCII_input:
                    MHC_dict = yaml.load(MHCII_input)
                    if MHC_dict:
                        del MHC_dict['tmp_dir']
                        MHC_dict['output'] = MHC_dict['output_dir']
                        del MHC_dict['output_dir']
                        if 'MHC_Class' in os.path.basename(MHC_dict['output']):
                            MHC_dict['output'] = MHC_dict['output'][:MHC_dict['output'].rfind('/')]
                        if os.path.exists(config_path):
                            old_dict = json.load(open(config_path))
                            if old_dict and MHC_dict != old_dict:
                                with open(config_path, 'w') as config_file:
                                    json.dump(MHC_dict, config_file, indent='\t')
                        else:
                            with open(config_path, 'w') as config_file:
                                json.dump(MHC_dict, config_file, indent='\t')

    #checks if any previous runs results are already provided and creates subsequent config files if so
    if os.listdir(os.path.join(current_app.config['files']['data-dir'],'visualize')): make_config()

    #Setup the watchers to observe the files
    current_app.config['storage']['watchers'] = []

    inputdir = os.path.join(current_app.config['files']['data-dir'],'input')
    manifest_data = current_app.config['storage']['manifest']
    input_watcher = Observe(inputdir)
    input_watcher.subscribe(lambda x:print("Input Event:", x))

    manifest_data['input'] = []
    hier_inp = manifest_data['input']

    current = {
        os.path.join(path, filename)
        for (path, _, files) in os.walk(inputdir)
        for filename in files
    }
    for (key, filename) in data['input'].items():
        if type(data['input'][key])==str:
            ext = '.'.join(os.path.basename(filename).split('.')[1:])
            print("Updating input entry",key,"to new format")
            fullname = os.path.join(inputdir, filename)
            viz = is_visualizable(ext)
            size = check_size(fullname, 1) if viz else None
            data['input'][key] = {
                'fullname':fullname,
                'display_name':os.path.relpath(
                    filename,
                    inputdir
                ),
                'description':descriptions(ext),
                'is_visualizable': viz and size,
                'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
            }
    recorded = {item['fullname'] for item in data['input'].values()}
    targets = {k for k in data['input'] if data['input'][k]['fullname'] in recorded-current}
    for file_id in targets:
        del data['input'][file_id]
    file_id = 0
    for filename in current-recorded:
        while str(file_id) in data['input']:
            file_id += 1
        ext = '.'.join(os.path.basename(filename).split('.')[0b1:])
        print("Assigning file:", file_id,"-->",filename)
        fullname = os.path.abspath(os.path.join(inputdir, filename))
        viz = is_visualizable(ext)
        size = check_size(fullname, 1) if viz else None
        data['input'][str(file_id)] = {
            'fullname':fullname,
            'display_name':os.path.relpath(
                filename,
                inputdir
            ),
            'description':descriptions(ext),
            'is_visualizable': viz and size,
            'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
        }
    for filename in current:
        file_path = os.path.abspath(os.path.join(inputdir, filename))
        ext = '.'.join(os.path.basename(filename).split('.')[0b1:])
        file_id = str([k for k,v in data['input'].items() if v['fullname'] == file_path][0])
        viz = is_visualizable(ext)
        size = check_size(data['input'][file_id]['fullname'], 1) if viz else None
        nav_to_dir(file_path, inputdir, hier_inp).append({
            'display_name':filename[filename.rfind('/')+1:],
            'type':'file',
            'fileID':file_id,
            'description':descriptions(ext),
            'is_visualizable': viz and size,
            'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
        })

    def _create(event):
        data = loader()
        filename = os.path.relpath(
            event.src_path,
            inputdir
        )
        file_id = 0
        while str(file_id) in data['input']:
            file_id += 1
        ext = '.'.join(os.path.basename(filename).split('.')[0b1:])
        print("Creating file:", file_id, "-->",filename)
        viz = is_visualizable(ext)
        size = check_size(event.src_path) if viz else None
        data['input'][str(file_id)] = {
            'fullname':os.path.abspath(os.path.join(
                inputdir,
                filename
            )),
            'display_name':filename,
            'description':descriptions(ext),
            'is_visualizable': viz and size,
            'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
        }
        nav_to_dir(event.src_path, inputdir, hier_inp).append({
            'display_name':filename[filename.rfind('/')+1:],
            'type':'file',
            'fileID':str(file_id),
            'description':descriptions(ext),
            'is_visualizable': viz and size,
            'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
        })
        data.save()
    input_watcher.subscribe(
        _create,
        watchdog.events.FileCreatedEvent
    )

    def _delete(event):
        data = loader()
        filename = os.path.relpath(
            event.src_path,
            inputdir
        )
        current = nav_to_dir(event.src_path, inputdir, hier_inp)
        for entity in current:
            if entity['display_name'] == filename[filename.rfind('/')+1:]:
                current.remove(entity)
        clean_tree(hier_inp)
        for key in list(data['input']):
            if data['input'][key]['display_name'] == filename:
                del data['input'][key]
                print("Deleting file:",key,'-->', filename)
                data.save()
                return
    input_watcher.subscribe(
        _delete,
        watchdog.events.FileDeletedEvent
    )

    def _move(event):
        data = loader()
        filesrc = os.path.relpath(
            event.src_path,
            inputdir
        )
        filedest = os.path.relpath(
            event.dest_path,
            inputdir
        ) 
        ext = '.'.join(os.path.basename(filedest).split('.')[0b1:])
        viz = is_visualizable(ext)
        size = check_size(event.dest_path) if viz else None
        #This accounts for how Watchdog records duplicate symlinks (i.e. symlinks of the same file) 
        #as File Moved Events from the previously added duplicate symlink, resulting in said symlinks not being 
        #properly recorded and causing situations where the source file of such events may not also be recorded.
        current = {
            filename
            for (_, _, files) in os.walk(inputdir)
            for filename in files
        }
        if filesrc in [data['input'][k]['display_name'] for k in data['input']] and os.path.basename(filesrc) not in current:
            file_id = [k for k in data['input'] if data['input'][k]['display_name'] == filesrc][0]
            current_src = nav_to_dir(event.src_path, inputdir, hier_inp)
            current_src.remove([
                entity for entity in current_src if entity['type'] == 'file' and entity['fileID'] == str(file_id)
            ][0])
        else:
            file_id = 0
            while str(file_id) in data['input']:
                file_id += 1
                
        nav_to_dir(event.dest_path, inputdir, hier_inp).append({
            'display_name':filedest[filedest.rfind('/')+1:],
            'type':'file',
            'fileID':str(file_id),
            'description':descriptions(ext),
            'is_visualizable': viz and size,
            'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
        })
        clean_tree(hier_inp)

        data['input'][str(file_id)] = {
            'fullname':os.path.abspath(os.path.join(
                inputdir,
                filedest
            )),
            'display_name':filedest,
            'description':descriptions(ext),
            'is_visualizable': viz and size,
            'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
        }
        print("Moving file:", key,'(',filesrc,'-->',filedest,')')
        data.save()

    input_watcher.subscribe(
        _move,
        watchdog.events.FileMovedEvent
    )
    current_app.config['storage']['watchers'].append(input_watcher)

    vsz = os.path.join(current_app.config['files']['data-dir'],'visualize')
    visualize_watcher = Observe(vsz)
    visualize_watcher.subscribe(lambda x:print("visualize Event:", x))

    manifest_data['visualize'] = []
    hier_vz = manifest_data['visualize']
    #Now we set up event handlers for the visualize
    #This ensures that file ids are held consistent
    current = {
        os.path.join(path, filename)
        for (path, _, files) in os.walk(vsz)
        for filename in files
    }
    for (key, filename) in data['visualize'].items():
        if type(data['visualize'][key])==str:
            ext = '.'.join(os.path.basename(filename).split('.')[1:])
            print("Updating visualize entry",key,"to new format")
            fullname = os.path.join(vsz, filename)
            viz = is_visualizable(ext)
            size = check_size(fullname, 1) if viz else None
            data['visualize'][key] = {
                'fullname':fullname,
                'display_name':os.path.relpath(
                    filename,
                    vsz
                ),
                'description':descriptions(ext),
                'is_visualizable': viz and size,
                'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
            }
    recorded = {item['fullname'] for item in data['visualize'].values()}
    targets = {k for k in data['visualize'] if data['visualize'][k]['fullname'] in recorded-current}
    for file_id in targets:
        del data['visualize'][file_id]
    file_id = 0
    for filename in current-recorded:
        while str(file_id) in data['visualize']:
            file_id += 1
        ext = '.'.join(os.path.basename(filename).split('.')[0b1:])
        print("Assigning file:", file_id,"-->",filename)
        fullname = os.path.abspath(os.path.join(vsz, filename))
        viz = is_visualizable(ext)
        size = check_size(fullname, 1) if viz else None
        data['visualize'][str(file_id)] = {
            'fullname':fullname,
            'display_name':os.path.relpath(
                filename,
                vsz
            ),
            'description':descriptions(ext),
            'is_visualizable': viz and size,
            'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
        }
    for filename in current:
        file_path = os.path.abspath(os.path.join(vsz, filename))
        ext = '.'.join(os.path.basename(filename).split('.')[0b1:])
        file_id = str([k for k,v in data['visualize'].items() if v['fullname'] == file_path][0])
        viz = is_visualizable(ext)
        size = check_size(data['visualize'][file_id]['fullname'], 1) if viz else None
        nav_to_dir(file_path, vsz, hier_vz).append({
            'display_name':filename[filename.rfind('/')+1:],
            'type':'file',
            'fileID':file_id,
            'description':descriptions(ext),
            'is_visualizable': viz and size,
            'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
        })

    data_path = current_app.config['files']
    def _create(event):
        data = loader()
        make_config()
        filename = os.path.relpath(
            event.src_path,
            vsz
        )
        file_id = 0
        while str(file_id) in data['visualize']:
            file_id += 1
        ext = '.'.join(os.path.basename(filename).split('.')[0b1:])
        print("Creating file:", file_id, "-->",filename)
        viz = is_visualizable(ext)
        size = check_size(event.src_path) if viz else None
        data['visualize'][str(file_id)] = {
            'fullname':os.path.abspath(os.path.join(
                vsz,
                filename
            )),
            'display_name':filename,
            'description':descriptions(ext),
            'is_visualizable': viz and size,
            'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
        }
        nav_to_dir(event.src_path, vsz, hier_vz).append({
            'display_name':filename[filename.rfind('/')+1:],
            'type':'file',
            'fileID':str(file_id),
            'description':descriptions(ext),
            'is_visualizable': viz and size,
            'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
        })
        data.save()
    visualize_watcher.subscribe(
        _create,
        watchdog.events.FileCreatedEvent
    )

    def _delete(event):
        data = loader()
        filename = os.path.relpath(
            event.src_path,
            vsz
        )
        current = nav_to_dir(event.src_path, vsz, hier_vz)
        for entity in current:
            if entity['display_name'] == filename[filename.rfind('/')+1:]:
                current.remove(entity)
        clean_tree(hier_vz)
        for key in list(data['visualize']):
            if data['visualize'][key]['display_name'] == filename:
                del data['visualize'][key]
                print("Deleting file:",key,'-->', filename)
                with db.synchronizer:
                    query = db.prepare("SELECT 1 FROM information_schema.tables WHERE table_name = $1")
                    if len(query('data_visualize_'+str(key))):
                        db.execute("DROP TABLE data_visualize_"+str(key))
                data.save()
                return
    visualize_watcher.subscribe(
        _delete,
        watchdog.events.FileDeletedEvent
    )

    def _move(event):
        data = loader()
        filesrc = os.path.relpath(
            event.src_path,
            vsz
        )
        filedest = os.path.relpath(
            event.dest_path,
            vsz
        )
        file_id = [k for k in data['visualize'] if data['visualize'][k]['display_name'] == filesrc][0]
        ext = '.'.join(os.path.basename(filedest).split('.')[0b1:])
        viz = is_visualizable(ext)
        size = check_size(event.dest_path) if viz else None
        current_src = nav_to_dir(event.src_path, vsz, hier_vz)
        nav_to_dir(event.dest_path, vsz, hier_vz).append({
            'display_name':filedest[filedest.rfind('/')+1:],
            'type':'file',
            'fileID':str(file_id),
            'description':descriptions(ext),
            'is_visualizable': viz and size,
            'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
        })
        current_src.remove([
            entity for entity in current_src if entity['type'] == 'file' and entity['fileID'] == str(file_id)
        ][0])
        clean_tree(hier_vz)
        for key in data['visualize']:
            if key == file_id:
                data['visualize'][key] = {
                    'fullname':os.path.abspath(os.path.join(
                        vsz,
                        filedest
                    )),
                    'display_name':filedest,
                    'description':descriptions(ext),
                    'is_visualizable': viz and size,
                    'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
                }
                print("Moving file:", key,'(',filesrc,'-->',filedest,')')
                data.save()
                return
    visualize_watcher.subscribe(
        _move,
        watchdog.events.FileMovedEvent
    )
    current_app.config['storage']['watchers'].append(visualize_watcher)

    manifest_data['results'] = []
    hier_res = manifest_data['results']

    resultdir = os.path.join(current_app.config['files']['data-dir'], '.processes')
    results_watcher = Observe(resultdir)
    results_watcher.subscribe(lambda x:print("Results Event:", x))
    for processID in range(data['processid']+1):
        processkey = 'process-%d'%processID
        if processkey in data:
            print("Checking files for process", processID)
            if 'files' in data[processkey]:
                if type(data[processkey]['files']) == list:
                    print("Updating file manifest of process",processID,"to new format")
                    for (filename, file_id) in zip(data[processkey]['files'], range(sys.maxsize)):
                        ext = '.'.join(os.path.basename(filename).split('.')[1:])
                        viz = is_visualizable(ext)
                        size = check_size(filename, 1) if viz else None
                        data[processkey]['files']={
                            file_id:{
                                'fullname':filename,
                                'display_name':os.path.relpath(
                                    filename,
                                    data[processkey]['output']
                                ),
                                'description':descriptions(
                                    '.'.join(os.path.basename(filename).split('.')[1:])
                                ),
                                'is_visualizable': viz and size,
                                'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
                            }
                        }
            else:
                data[processkey]['files'] = {}
            current = {
                os.path.join(path, filename)
                for (path, _, files) in os.walk(data[processkey]['output'])
                for filename in files
            }
            recorded = {entry['fullname']:k for k,entry in data[processkey]['files'].items()}
            for file_id in recorded.keys()-current:
                print("Deleting file",file_id,"from manifest")
                file_id = recorded[file_id]
                del data[processkey]['files'][file_id]
            for filename in current-recorded.keys():
                file_id = len(data[processkey]['files'])
                while str(file_id) in data[processkey]['files']:
                    file_id += 1
                file_id = str(file_id)
                ext = '.'.join(os.path.basename(filename).split('.')[1:])
                print("Assigning file:",file_id,"-->",filename)
                viz = is_visualizable(ext)
                size = check_size(filename, 1) if viz else None
                data[processkey]['files'][file_id] = {
                    'fullname':filename,
                    'display_name':os.path.relpath(
                        filename,
                        data[processkey]['output']
                    ),
                    'description':descriptions(ext),
                    'is_visualizable': viz and size,
                    'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
                }
            for filename in current:
                file_path = os.path.abspath(os.path.join(data[processkey]['output'], filename))
                ext = '.'.join(os.path.basename(filename).split('.')[1:])
                file_id = str([k for k,v in data[processkey]['files'].items() if v['fullname'] == file_path][0])
                viz = is_visualizable(ext)
                size = check_size(data[processkey]['files'][file_id]['fullname'], 1) if viz else None
                nav_to_dir(file_path, resultdir, hier_res).append({
                    'display_name':filename[filename.rfind('/')+1:],
                    'type':'file',
                    'fileID':file_id,
                    'description':descriptions(ext),
                    'is_visualizable': viz and size,
                    'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
                })

    def _create(event):
        data = loader()
        parentpaths = {
            (data['process-%d'%i]['output'], i)
            for i in range(data['processid']+1)
            if 'process-%d'%i in data
        }
        filepath = event.src_path
        for (parentpath, parentID) in parentpaths:
            if os.path.commonpath([filepath, parentpath])==parentpath:
                print("New output from process",parentID)
                processkey = 'process-%d'%parentID
                file_id = len(data[processkey]['files'])
                while str(file_id) in data[processkey]['files']:
                    file_id+=1
                file_id = str(file_id)
                display_name = os.path.relpath(
                    filepath,
                    data[processkey]['output']
                )
                ext = '.'.join(os.path.basename(filepath).split('.')[1:])
                print("Assigning id",file_id,'-->',display_name)
                viz = is_visualizable(ext)
                size = check_size(filepath) if viz else None
                data[processkey]['files'][file_id] = {
                    'fullname':filepath,
                    'display_name':display_name,
                    'description':descriptions(ext),
                    'is_visualizable': viz and size,
                    'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
                }
                nav_to_dir(filepath, resultdir, hier_res).append({
                    'display_name':filepath[filepath.rfind('/')+1:],
                    'type':'file',
                    'fileID':file_id,
                    'description':descriptions(ext),
                    'is_visualizable': viz and size,
                    'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
                })
                data.save()
                return
    results_watcher.subscribe(
        _create,
        watchdog.events.FileCreatedEvent
    )

    def _delete(event):
        data = loader()
        parentpaths = {
            (data['process-%d'%i]['output'], i)
            for i in range(data['processid']+1)
            if 'process-%d'%i in data
        }
        filepath = event.src_path
        current = nav_to_dir(filepath, resultdir, hier_res)
        for entity in current:
            if entity['display_name'] == filepath[filepath.rfind('/')+1:]:
                current.remove(entity)
            clean_tree(hier_res)
        for (parentpath, parentID) in parentpaths:
            if os.path.commonpath([filepath, parentpath])==parentpath:
                print("Deleted output from process",parentID)
                processkey = 'process-%d'%parentID
                for (file_id, filedata) in list(data[processkey]['files'].items()):
                    if filedata['fullname'] == filepath:
                        del data[processkey]['files'][file_id]
                        print("Deleted file:", file_id,'-->',filepath)
                        with db.synchronizer:
                            query = db.prepare("SELECT 1 FROM information_schema.tables WHERE table_name = $1")
                            if len(query('data_%d_%s'%(parentID, file_id))):
                                db.execute("DROP TABLE data_%d_%s"%(parentID, file_id))
                data.save()
                return
    results_watcher.subscribe(
        _delete,
        watchdog.events.FileDeletedEvent
    )

    def _move(event):
        data = loader()
        filesrc = event.src_path
        filedest = event.dest_path
        parentpaths = {
            (data['process-%d'%i]['output'], i)
            for i in range(data['processid']+1)
            if 'process-%d'%i in data
        }
        srckey = ''
        destkey = ''
        for (parentpath, parentID) in parentpaths:
            if os.path.commonpath([filesrc, parentpath])==parentpath:
                srckey = 'process-%d'%parentID
            elif os.path.commonpath([filedest, parentpath]) == parentpath:
                destkey = 'process-%d'%parentID

        ext = '.'.join(os.path.basename(filedest).split('.')[1:])
        viz = is_visualizable(ext)
        size = check_size(filedest) if viz else None
        if srckey == destkey:
            for (file_id, filedata) in data[srckey]['files'].items():
                if filedata['fullname'] == filesrc:
                    nav_to_dir(filedest, resultdir, hier_res).append({
                        'display_name':filedest[filedest.rfind('/')+1:],
                        'type':'file',
                        'fileID':file_id,
                        'description':descriptions(ext),
                        'is_visualizable': viz and size,
                        'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
                    })
                    current_src = nav_to_dir(filesrc, resultdir, hier_res)
                    current_src.remove([
                        entity for entity in current_src if entity['type'] == 'file' and entity['fileID'] == str(file_id)
                    ][0])
                    data[srckey]['files'][file_id] = {
                        'fullname':filedest,
                        'display_name':os.path.relpath(
                            filedest,
                            data[srckey]['output']
                        ),
                        'description':descriptions(ext),
                        'is_visualizable': viz and size,
                        'visualization_type': 'File contains no data' if viz and not size else visualization_type(ext),
                    }
        else:
            _delete(event)
            evt = lambda:None
            evt.src_path = event.dest_path
            _create(evt)
        clean_tree(hier_res)
    results_watcher.subscribe(
        _move,
        watchdog.events.FileMovedEvent
    )
    current_app.config['storage']['watchers'].append(results_watcher)


    @atexit.register
    def cleanup_watchers():
        print("Cleaning up observers")
        for watcher in current_app.config['storage']['watchers']:
            watcher.stop()
            watcher.join()

    current_app.config['storage']['synchronizer'] = synchronizer
    data.save()

    print("Initialization complete.  Booting API")
Exemplo n.º 49
0
import vk
import data
import time
# from flask import Flask, render_template
import postgresql

# app = Flask(__name__)
db = postgresql.open(user=data.p_login, host=data.p_host, database=data.p_dbname, password=data.p_pass)
session = vk.AuthSession(data.appid, data.login, data.password)
vk_api = vk.API(session, v='5.84', lang='ru', timeout=10)
group_id = "-126351542"  # streampub id


def data_update():
    values = "INSERT INTO posts VALUES ($1, to_timestamp($2), $3, $4, $5," \
             " $6, $7, $8, $9, $10, $11, $12)"
    set_table = db.prepare(values)
    search_id = 13783
    offset = 0
    while True:
        cur_wall = vk_api.wall.get(owner_id=group_id, count=100, offset=offset)
        cur_wall = cur_wall['items']
        for post in cur_wall:
            post_id = post['id']
            date = post['date']
            marked_as_ads = post['marked_as_ads']
            have_repost = False if hasattr(post, 'copy_history') else True
            post_type = post['post_type']
            text = post['text'].replace("'", r"''")
            signer_id = post['signer_id'] if hasattr(post, 'signer_id') else 0
            comments = post['comments']['count']
Exemplo n.º 50
0
 def connect():
     return postgresql.open(current_app.config['PSQL_DATABASE_URI'])
Exemplo n.º 51
0
# coding: utf-8

# In[1]:

import postgresql
from collections import Counter, defaultdict
import csv

# In[2]:

db = postgresql.open("pq://localhost/chembl_20")

# In[3]:

ps = db.prepare("SELECT * FROM drug_mechanism")

# In[4]:

columns = ps.column_names
interactions = list()
for row in ps:
    row_dict = dict(zip(columns, row))
    interactions.append(row_dict)

# In[5]:

# There are 2,266 drug <-> gene interactions
len(interactions)

# In[6]:
Exemplo n.º 52
0
import postgresql

db = postgresql.open("pq://medic_admin:@localhost/medic_prod")
db.execute("CREATE TABLE emp (emp_name text PRIMARY KEY, emp_salary numeric)")

make_emp = db.prepare("INSERT INTO emp VALUES ($1, $2)")
raise_emp = db.prepare(
    "UPDATE emp SET emp_salary = emp_salary + $2 WHERE emp_name = $1")
get_emp_with_salary_lt = db.prepare(
    "SELECT emp_name FROM emp WHERE emp_salary < $1")

with db.xact():
    make_emp("John Doe", 150)
    make_emp("Jane Doe", 150)
    make_emp("Andrew Doe", 55)
    make_emp("Susan Doe", 60)

with db.xact():
    for row in get_emp_with_salary_lt(125):
        print(row["emp_name"])
        raise_emp(row["emp_name"], 10)
Exemplo n.º 53
0
import postgresql
db = postgresql.open(user = '******', database = 'tccDB', port = 5432, password = '******')
# OR
# db = postgresql.open("pq://*****:*****@host/name_of_database")

db.execute("CREATE TABLE tb_user (ds_user varchar(20) PRIMARY KEY, ds_passwd text)")

make_tb_user = db.prepare("INSERT INTO tb_user VALUES ($1, $2)")
raise_tb_user = db.prepare("UPDATE tb_user SET ds_passwd = 'new_password' WHERE ds_user = $1")
select_tb_user = db.prepare("SELECT ds_user FROM tb_user")

with db.xact():
	make_tb_user("avelino", "thiago")
	make_tb_user("thiago", "avelino")
	make_tb_user("python", "postgresql")

with db.xact():
	for row in select_tb_user():
		print(row["ds_user"])

raise_tb_user(row["ds_user"], "avelino")
Exemplo n.º 54
0
import postgresql

db = postgresql.open('pq://*****:*****@localhost:5432/chat')
Exemplo n.º 55
0
import postgresql

DB_NAME = 'quazi'
DB_USER = '******'
DB_PASS = '******'
DB_HOST = 'localhost'
DB_PORT = '5432'

db = postgresql.open('pq://' + DB_USER + ':' + DB_PASS +'@' + DB_HOST + ':' + DB_PORT + '/' + DB_NAME)
Exemplo n.º 56
0
'ср': 'wed',
'чт': 'thu',
'пт': 'fri',
'сб': 'sat',
'вс': 'sun'
}

if __name__=="__main__":
  import sys

  if len(sys.argv)<2:
    print("nothing to do")
    exit()

  dbconn = json.load(open("database.json"))
  db = postgresql.open(**dbconn)

  if sys.argv[1]=="passwords":
    """ populate and show passwords for operators """
    for ext, name in db.prepare("select op_ext, op_name from operators")():
      if not ext: continue
      pw = db.prepare("select ext_pw from extensions where ext_n=$1")(ext)
      if len(pw)==0:
        print("generate password for", ext)
        npw = gen_pw(12)
        db.prepare("insert into extensions (ext_n, ext_pw) values ($1, $2)")(ext, npw)
      else:
        npw = pw[0][0]
      print("%s;%s;%s"%(name, ext, npw))

  elif sys.argv[1]=="rmpw":
Exemplo n.º 57
0
def db_connection():
    return postgresql.open("pq://*****:*****@localhost:5432/birds_db")
Exemplo n.º 58
0
#!/opt/anaconda/bin/python3.6
import json
from sys import path
path.append('/home/bf7750/python/pkgs')

import postgresql as pg

#%%  Connection
db = pg.open('pq://*****:*****@localhost:5432/crdb')

#%%  Header
print("Content-Type: application/json")
print()
print()
#%% Getting data
print(
    json.dumps([
        r[0] for r in db.query("""
    select schema_name 
    from information_schema.schemata 
    where 
	left(schema_name,3) <> 'pg_' and 
    schema_name <> 'information_schema'
""")
    ]))
Exemplo n.º 59
0
import base64
import postgresql

admin_reset = base64.b64decode("{{admin_reset.sql}}").decode('utf-8')

db = postgresql.open("pq://{{USERNAME}}:{{PASSWORD}}@localhost/{{DBNAME}}")
db.execute(admin_reset)
db.close()
Exemplo n.º 60
0
import random
import sys
import getopt
import math
import pickle
import time
from PIL import Image, ImageDraw

# db informations
db_name = "madlib"
db_user = "******"
db_server = "localhost"
db_port = 5432
db_table_name = "k_means_test"
db_field_name = "coord"
db = postgresql.open("pq://" + db_user + "@" + db_server + ":" + str(db_port) +
                     "/" + db_name)

# dataset informations
ds_max_groups = 10
ds_max_x = 300
ds_max_y = 300
group_max_elts = 1000
group_max_width = 100
group_max_height = 100

default_output_file = "clustered_data.png"
data_file = "clusters.dat"

colors = [
    (255, 0, 0),  # red
    (0, 255, 0),  # green