Exemple #1
0
def createDb():
    if __isExistsDb() == False:
        db = Connection(dbFilePath)
        statement = "CREATE TABLE WeatherInfo ("
        statement = statement + " prec_no INTEGER,"
        statement = statement + " block_no INTEGER,"
        statement = statement + " date TEXT,"
        statement = statement + " hour INTEGER,"
        statement = statement + " pressure_onland REAL DEFAULT 0.0,"
        statement = statement + " pressure_onsea REAL DEFAULT 0.0,"
        statement = statement + " amount_rain REAL DEFAULT 0.0,"
        statement = statement + " temperature REAL DEFAULT 0.0,"
        statement = statement + " dew_point_temp REAL DEFAULT 0.0,"
        statement = statement + " vapor_pressure REAL DEFAULT 0.0,"
        statement = statement + " moisture INTEGER DEFAULT 0,"
        statement = statement + " wind_speed REAL DEFAULT 0.0,"
        statement = statement + " wind_direction TEXT DEFAULT '',"
        statement = statement + " sunlight REAL DEFAULT 0.0,"
        statement = statement + " total_solar_radiation REAL DEFAULT 0.0,"
        statement = statement + " amount_snowfall REAL DEFAULT 0.0,"
        statement = statement + " amount_snowcover REAL DEFAULT 0.0,"
        statement = statement + " weather TEXT DEFAULT '',"
        statement = statement + " amount_cloud TEXT DEFAULT '',"
        statement = statement + " visibility REAL DEFAULT 0.0,"
        statement = statement + " PRIMARY KEY (prec_no, block_no, date, hour)"
        statement = statement + ")"
        db.cursor().execute(statement)
    else:
        __mylogger.error('Database already exists. nothing to do create table.')
Exemple #2
0
 def fetch_unprocessed(self):
     """Returns one unprocessed row in a tuple(id, a, b, c, d, e, f, g, h)
     and set its status to 'processing'
     """
     connect = Connection(self.dbname)
     cursor = connect.cursor()
     cursor.execute("""
     SELECT
         *
     FROM
         'data'
     WHERE
         status = 'unprocessed'
         LIMIT 1
     """)
     unprocessed_row = cursor.fetchone()
     if unprocessed_row:
         cursor.execute(
             """
         UPDATE
             data
         SET
             status = 'processing'
         WHERE
             id = :id
         """,
             {'id': unprocessed_row[0]},
         )
     connect.commit()
     connect.close()
     if unprocessed_row:
         return unprocessed_row[:9]
Exemple #3
0
def __main__():
    """
    Reads an org-mode file containing org-drill flashcards, creates Flashcard
    object instances and prints them to screen.
    :return:
    """

    readfile_name = "C:/Users/juras/PycharmProjects/elkoi_py/worte_excerpt.org"
    # writefile_name = "C:/Users/juras/PycharmProjects/elkoi_py/parsed-worte_excerpt.org"
    database_name = "C:/Users/juras/elkoi/db/test.db"

    if len(sys.argv) == 2 or len(sys.argv) > 3:
        sys.exit(
            "This script expects names of 2 files -- one for reading"
            " org-drill flashcards and one for the flashcards database."
            # TODO writing file will have different purpse
            " Expected format: 'script-name read-file-name database-name'."
            " To use default files, pass 0 arguments.  To use default value"
            " for only one file, write '-' on place of that file's name.")
    if len(sys.argv) == 3:
        if sys.argv[1] != '-':
            readfile_name = sys.argv[1]
        if sys.argv[2] != '-':
            database_name = sys.argv[2]
    # with FileWrapper(open(readfile_name, 'r', encoding="utf-8")) as filewrp:
    #     db_connection = Connection(database_name)
    #     read_and_save_flashcards(filewrp, db_connection)

    filewrp = FileWrapper(open(readfile_name, 'r', encoding="utf-8"))
    db_connection = Connection(database_name)
    read_and_save_flashcards(filewrp, db_connection)
Exemple #4
0
    def _update_dispersy(self):
        """
        Cleans up all SearchCommunity and MetadataCommunity stuff in dispersy database.
        """
        db_path = os.path.join(self.state_dir, u"sqlite", u"dispersy.db")
        if not os.path.isfile(db_path):
            return

        communities_to_delete = (u"SearchCommunity", u"MetadataCommunity")

        connection = Connection(db_path)
        cursor = connection.cursor()

        data_updated = False
        for community in communities_to_delete:
            try:
                result = list(cursor.execute(u"SELECT id FROM community WHERE classification == ?", (community,)))

                for community_id, in result:
                    self._logger.info(u"deleting all data for community %s...", community_id)
                    cursor.execute(u"DELETE FROM community WHERE id == ?", (community_id,))
                    cursor.execute(u"DELETE FROM meta_message WHERE community == ?", (community_id,))
                    cursor.execute(u"DELETE FROM sync WHERE community == ?", (community_id,))
                    data_updated = True
            except StopIteration:
                continue

        if data_updated:
            connection.commit()
        cursor.close()
        connection.close()
Exemple #5
0
def selectByDateRange(prec_no, block_no, fromDate, toDate):
    __mylogger.info('selectByDateRange start!')
    
    try:
        #dbのオープン
        db = Connection(dbFilePath)
        
        #データ取得
        statement = "SELECT * FROM WeatherInfo WHERE"
        statement = statement + " date BETWEEN '{fromDate}' and '{toDate}' ".format(fromDate=fromDate, toDate=toDate)
        result = np.array([])
        for item in db.cursor().execute(statement):
            if len(result) > 0:
                result = np.vstack([result, item])
            else:
                result = np.array(item)
            #result.append(item)
        return result
    
    except Exception as ex:
        __mylogger.error('error has occured.')
        __mylogger.error(ex)
        if db != None:
            db.rollback()
    finally:
        if db != None:
            db.close()
        __mylogger.info('selectByDateRange ended')
 def __init__(self, dbpath, block_limit=None):
     self.dbpath = dbpath
     self.connection = Connection(dbpath)
     self.cursor = self.connection.cursor()
     self.cursor.executescript(schema)
     assert self.cursor is not None, "Database.close() has been called or Database.open() has not been called"
     assert self.connection is not None, "Database.close() has been called or Database.open() has not been called"
     self.time_limit = self._set_block_limit(block_limit)
Exemple #7
0
def _init(db_dir: Path) -> Connection:
    db = (db_dir / _SCHEMA).with_suffix(".sqlite3")
    db.parent.mkdir(parents=True, exist_ok=True)
    conn = Connection(db, isolation_level=None)
    init_db(conn)
    conn.executescript(sql("create", "pragma"))
    conn.executescript(sql("create", "tables"))
    return conn
Exemple #8
0
def importCsv(csvName):
    __mylogger.info('importing start!')
    
    try:
        #csvファイル読み込み
        df = pd.read_csv(csvName, encoding='utf-8', parse_dates=True, header=0)
        
        #INTEGERの精度変更 objectにする
        #なぜかint64のままだとsqliteにBLOBとして登録される。int8やint32に変更すると文字化けしてしまう。
        #objectにすると自動判定でintと判断されるようである。
        df['県番号'] = df['県番号'].astype('object')
        df['地区番号'] = df['地区番号'].astype('object')
        df['時刻'] = df['時刻'].astype('object')
        df['湿度(%)'] = df['湿度(%)'].astype('object')
        
        '''
        #欠損値の補完
        def __conv00(val):
            if val == '--' or val == None:
                return 0.0
            else:
                return val
        df['降水量'] = df['降水量'].apply(__conv00)
        df['日照時間(h)'] = df['日照時間(h)'].fillna(0.0)
        df['全天日射量(MJ/m2)'] = df['全天日射量(MJ/m2)'].fillna(0.0)
        df['降雪(cm)'] = df['降雪(cm)'].fillna(0.0)
        df['降雪(cm)'] = df['降雪(cm)'].apply(__conv00)
        df['積雪(cm)'] = df['積雪(cm)'].fillna(0.0)
        df['積雪(cm)'] = df['積雪(cm)'].apply(__conv00)
        df['天気'] = df['天気'].fillna('')
        df['雨雲'] = df['雨雲'].fillna('')
        df['視程(km)'] = df['視程(km)'].fillna(0.0)
        '''
        
        #dbのオープン
        db = Connection(dbFilePath)
        
        #データ一括登録
        for idx in range(0, len(df)):
            __mylogger.debug("\n" + str(df.iloc[idx]))
            statement = "INSERT INTO WeatherInfo VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) "
            try:
                db.cursor().execute(statement, df.iloc[idx])
            except DatabaseError as insertEx:
                __mylogger.error('error has occured.')
                __mylogger.error(insertEx)
        
        db.commit()
        
    except Exception as ex:
        __mylogger.error('error has occured.')
        __mylogger.error(ex)
        if db != None:
            db.rollback()
    finally:
        if db != None:
            db.close()
        __mylogger.info('importing ended')
Exemple #9
0
 def __init__(self, uri, graphs=()):
     self.uri = uri
     self.db = Connection(
         database=uri,
         check_same_thread=False,
         isolation_level=None,
     )
     self.lock = Lock()
     self.setup_sql(graphs)
Exemple #10
0
    def open_database(self):
        # Check if db file exists, otherwise make a new one
        if path.exists(self.db_file):
            try:
                self.connection = Connection(self.db_file)
                print("[SQL] Connected to db " + self.db_file)
                return True
            except Exception as e:
                return e
        # Make a new db
        else:
            try:
                self.connection = Connection(self.db_file)
                print("[SQL] Created db " + self.db_file)
                self.make_table()
                return True

            except Exception as e:
                return e
Exemple #11
0
def _init(db_dir: Path, cwd: PurePath) -> Connection:
    ncwd = normcase(cwd)
    name = f"{md5(encode(ncwd)).hexdigest()}-{_SCHEMA}"
    db = (db_dir / name).with_suffix(".sqlite3")
    db.parent.mkdir(parents=True, exist_ok=True)
    conn = Connection(str(db), isolation_level=None)
    init_db(conn)
    conn.executescript(sql("create", "pragma"))
    conn.executescript(sql("create", "tables"))
    return conn
Exemple #12
0
def init_from_db(db_file, country_or_ip):
    db = Connection(db_file)
    conf = get_local_config()
    loop = asyncio.get_event_loop()
    root = os.path.expanduser(conf['client']['server_dir'])
    if not os.path.exists(root):
        os.mkdir(root)
    if os.path.exists(db_file):
        servers = []
        try:
            if re.match(r'^\d{1,3}', country_or_ip):
                servers = db.execute(
                    "select host,passwd,port,user from  Host where host like '%{ip}%'"
                    .format(ip=country_or_ip)).fetchall()
                [L({'ip': i[0], 'country': ip2geo(i[0])}) for i in servers]
            elif country_or_ip == '.':
                servers = db.execute(
                    "select host,passwd,port,user from  Host").fetchall()
                [L({'ip': i[0], 'country': ip2geo(i[0])}) for i in servers]

            else:
                servers = []
                for i in db.execute(
                        "select host,passwd,port,user from  Host").fetchall():
                    if country_or_ip in ip2geo(i[0]).lower():
                        L({'ip': i[0], 'country': ip2geo(i[0])})
                        servers.append(i)

        except Exception as e:
            logging.error(e)
            servers = []
        fs = []

        if input("sure to init ?:[y/other]").lower().strip() != 'y':
            return [{"msg": "exit "}]

        for server in servers:
            host, pwd, port, user = server
            config = {
                'server': host,
                'server_port': 53000,
                'password': os.urandom(6).hex(),
                'method': 'aes-256-cfb'
            }

            CLIENT_CONFIG = os.path.join(root, host)
            with open(CLIENT_CONFIG, 'w') as fp:
                json.dump(config, fp)
            L("--- to install ----")
            fs.append(wait_err(host, pwd, port, user, conf=CLIENT_CONFIG))
        return loop.run_until_complete(asyncio.gather(*fs))
    else:
        return []
Exemple #13
0
def add_to_db(articles: list):
    connect = Connection(DB_FILENAME)
    cursor = connect.cursor()
    for article in articles:
        # print(article)
        values_string = '"' + '", "'.join(
            article) + '"'  # генеруємо рядок для запиту
        query = 'INSERT INTO articles ' \
                f'VALUES ({values_string})'
        cursor.execute(query)
    connect.commit()
    connect.close()
Exemple #14
0
def __isExistsDb():
    if os.path.exists(dbFilePath) == False:
        return False
    
    db = None
    try:
        db = Connection(dbFilePath)
        return True
    except Exception as ex:
        return False
    finally:
        if db != None:
            db.close()
Exemple #15
0
def get_cursor(path, get_db=True):
    """
    `return` a `Cursor` object for the sqlite3 database specified by "path",
    which must be a string. Also `return` a `Connection` object by default,
    unless `get_db == False`.
    """

    db = Connection(path)
    curs = db.cursor()
    if get_db:
        return curs, db
    else:
        return curs
Exemple #16
0
def calc_longitudinal_qc(infiles):
    qcmap = {}
    qcpsms = []
    psms = parse_psms(infiles['psmtable'], is_instrument_qc=True)
    header = next(psms)
    perrorix = header.index('PrecursorError(ppm)')
    qvalix = header.index('QValue')
    msgfix = header.index('MSGFScore')
    rtix = header.index('Retention time(min)')
    misclix = header.index('missed_cleavage')
    for line in psms:
        # FIXME filtering in galaxy? will be incorrect num of peptides
        if float(line[qvalix]) > 0.01:
            continue
        qcpsms.append(line)
        if int(line[misclix]) < 4:
            mckey = 'miscleav{}'.format(line[misclix])
            try:
                qcmap[mckey] += 1
            except KeyError:
                qcmap[mckey] = 1
    qcmap['perror'] = calc_boxplot([psm[perrorix] for psm in qcpsms])
    qcmap['msgfscore'] = calc_boxplot([psm[msgfix] for psm in qcpsms])
    qcmap['rt'] = calc_boxplot([psm[rtix] for psm in qcpsms])
    con = Connection(infiles['sqltable'])
    qcmap.update({
        'psms':
        len(qcpsms),
        'scans':
        con.execute('SELECT COUNT(*) FROM mzml').fetchone()[0]
    })
    peps = []
    with open(infiles['peptable']) as fp:
        header, lines = table_reader(fp)
        areaix = header.index('MS1 area (highest of all PSMs)')
        protix = header.index('Protein(s)')
        count = 0
        unicount = 0
        for line in lines:
            count += 1
            if ';' not in line[protix]:
                unicount += 1
            try:
                peps.append(line)
            except ValueError:
                pass
    qcmap['peparea'] = calc_boxplot([x[areaix] for x in peps])
    qcmap.update({'peptides': count, 'unique_peptides': unicount})
    with open(infiles['prottable']) as fp:
        qcmap['proteins'] = sum(1 for _ in fp) - 1
    return qcmap
Exemple #17
0
	def connect(self):
		table 		= """CREATE TABLE IF NOT EXISTS Users(
		id INTEGER PRIMARY KEY AUTOINCREMENT,
		email TEXT UNIQUE,
		password TEXT)"""

		self.conn 	= Connection("users.db");
		self.cur 	= self.conn.cursor()
		try:
			self.cur.execute(table)
			print("Table Created Successfully")
		except:
			self.critical(None, self.tr("Table Creation Error"),self.tr("Table Could not be created"),
				self.Cancel)
Exemple #18
0
def mutator(mac):

    #category of some common variations, default is "variations on first byte" seen later
    category = {
        0: 'Registered OUI',
        1: 'Second nibble - 2',
        2: 'Second nibble - 6',
        3: 'Second nibble - 10'
    }

    #macvendors database taken from oui.txt from IEEE
    OUI_CON = Connection('./databases/macvendors.db')
    OUI_CUR = OUI_CON.cursor()

    #strip down the input mac or oui to just 6 hex digits, in text, uppercased
    oui = mac.replace(':', '').replace('-', '').replace('.', '').upper()[0:6]

    #pull first and second nibbles
    n1 = int(oui[0], 16) * 16
    n2 = int(oui[1], 16)

    #define all possible byte variations for later matching
    bytes = [n1 + n2, (n1 + n2) - 2, (n1 + n2) - 6, (n1 + n2) - 10,\
      n2 - 2, (0x10 + n2) - 2, (0x20 + n2) - 2, (0x30 + n2) - 2,\
      (0x40 + n2) - 2, (0x50 + n2) - 2, (0x60 + n2) - 2,
      (0x70 + n2) - 2, (0x80 + n2) - 2, (0x90 + n2) - 2,\
      (0xa0 + n2) - 2, (0xb0 + n2) - 2, (0xc0 + n2) - 2,\
      (0xd0 + n2) - 2, (0xe0 + n2) - 2, (0xf0 + n2) - 2]
    print bytes

    #uppercase and convert to hex strings and add back the rest of the oui, padd back leading zeroes
    bytes = [(hex(byte)[2:].upper() + oui[2:]).zfill(6) for byte in bytes]
    print bytes

    #look for all of them in the database using enumerate
    #using enumerate will keep a running tab of the current index to be
    #able to use the category dictionary above without if statements
    for index, byte in enumerate(bytes):
        OUI_CUR.execute(
            'SELECT vendor FROM macvendors WHERE mac="{}";'.format(byte))
        hit = OUI_CUR.fetchone()
        if hit != None:
            result, oui_match = hit[0].rstrip(), byte
            OUI_CON.close()
            return (oui, oui_match, result,
                    category.get(index, 'Variations on first byte')
                    )  #default category

    return (oui, 'None', 'No Vendor Found', 'Unknown - Possibly random')
Exemple #19
0
 def insert_parsed_data(self, parsed_data, primary_key):
     """Insert parsed data into row with given promary key
     """
     connect = Connection(self.dbname)
     cursor = connect.cursor()
     cursor.execute(
         """
     UPDATE
         data
     SET
         top_tube_min = :top_tube_min,
         top_tube_max = :top_tube_max,
         seat_tube_cc_min = :seat_tube_cc_min,
         seat_tube_cc_max = :seat_tube_cc_max,
         seat_tube_ct_min = :seat_tube_ct_min,
         seat_tube_ct_max = :seat_tube_ct_max,
         stem_min = :stem_min,
         stem_max = :stem_max,
         bb_saddle_min = :bb_saddle_min,
         bb_saddle_max = :bb_saddle_max,
         saddle_handlebar_min = :saddle_handlebar_min,
         saddle_handlebar_max = :saddle_handlebar_max,
         saddle_setback_min = :saddle_setback_min,
         saddle_setback_max = :saddle_setback_max,
         seatpost_setback = :seatpost_setback,
         status = :status
     WHERE
         id = :id
     """, {
             'top_tube_min': parsed_data['Top Tube'][0],
             'top_tube_max': parsed_data['Top Tube'][1],
             'seat_tube_cc_min': parsed_data['Seat Tube Range CC'][0],
             'seat_tube_cc_max': parsed_data['Seat Tube Range CC'][1],
             'seat_tube_ct_min': parsed_data['Seat Tube Range CT'][0],
             'seat_tube_ct_max': parsed_data['Seat Tube Range CT'][1],
             'stem_min': parsed_data['Stem Length'][0],
             'stem_max': parsed_data['Stem Length'][1],
             'bb_saddle_min': parsed_data['BB Saddle Position'][0],
             'bb_saddle_max': parsed_data['BB Saddle Position'][1],
             'saddle_handlebar_min': parsed_data['Saddle Handlebar'][0],
             'saddle_handlebar_max': parsed_data['Saddle Handlebar'][1],
             'saddle_setback_min': parsed_data['Saddle Setback'][0],
             'saddle_setback_max': parsed_data['Saddle Setback'][1],
             'seatpost_setback': 1 if parsed_data['Saddle Setback'] else 0,
             'status': 'processed',
             'id': primary_key,
         })
     connect.commit()
     connect.close()
Exemple #20
0
 def heal(self):
     """Change status from processing to unprocess for all rows in case of
     failed attempt
     """
     connect = Connection(self.dbname)
     cursor = connect.cursor()
     cursor.execute("""
     UPDATE
         "data"
     SET
         status = "unprocessed"
     WHERE
         status = "processing"
     """)
     connect.commit()
     connect.close()
Exemple #21
0
 def get_unprocessed_count(self):
     """Return number of unprocessed rows
     """
     connect = Connection(self.dbname)
     cursor = connect.cursor()
     cursor.execute("""
     SELECT
         count(*)
     FROM
         "data"
     WHERE
         status = "unprocessed"
     """)
     unprocessed = cursor.fetchone()
     connect.commit()
     connect.close()
     return unprocessed[0]
Exemple #22
0
 def insert_input_data(self, inseam, trunk, forearm, arm, thigh, leg, notch,
                       height):
     """Insert data into a table
     """
     connect = Connection(self.dbname)
     cursor = connect.cursor()
     cursor.execute(
         """
     INSERT INTO data
         (
             inseam,
             trunk,
             forearm,
             arm,
             thigh,
             lower_leg,
             sternal_notch,
             height,
             status
         )
     VALUES
         (
             :inseam,
             :trunk,
             :forearm,
             :arm,
             :thigh,
             :leg,
             :notch,
             :height,
             :status
         )
     """, {
             'inseam': inseam,
             'trunk': trunk,
             'forearm': forearm,
             'arm': arm,
             'thigh': thigh,
             'leg': leg,
             'notch': notch,
             'height': height,
             'status': 'unprocessed',
         })
     connect.commit()
     connect.close()
Exemple #23
0
class RedardCommand(Command):
    redard = re.compile(r"\bredard\b", re.I)
    database = Connection("merc.db", isolation_level=None)

    def handles(self, cmd):
        return bool(cmd)

    def handle_cmd(self, cmd, remainder, msg):
        if not self.allowed(msg):
            return False
        if not self.redard.search(msg.msg):
            return False
        cur = self.database.cursor()
        cur.execute("SELECT msg FROM red ORDER BY RANDOM() LIMIT 1")
        quote = cur.fetchone()
        if not quote:
            return False
        self.post(">Red: {}", quote[0])
        return True
Exemple #24
0
def prepareSqlite(out, featureClass, fileType, includeGeometry):
    [shp, shpType] = getShp(featureClass)
    if shpType == "point":
        gType = 1
    elif shpType == "multipoint":
        gType = 4
    elif shpType == "polyline":
        gType = 5
    elif shpType == "polygon":
        gType = 6
    fields = listFields(featureClass)
    fieldNames = []
    fieldNames.append("OGC_FID INTEGER PRIMARY KEY")
    if includeGeometry:
        fieldNames.append("GEOMETRY blob")
    for field in fields:
        if (fields[field] != u'OID') and field.lower() != shp.lower():
            fieldNames.append(parseFieldType(field, fields[field]))

    conn = Connection(out)
    c = conn.cursor()
    name = splitext(split(out)[1])[0]
    c.execute(
        """CREATE TABLE geometry_columns (     f_table_name VARCHAR,      f_geometry_column VARCHAR,      geometry_type INTEGER,      coord_dimension INTEGER,      srid INTEGER,     geometry_format VARCHAR )"""
    )
    c.execute(
        """insert into geometry_columns( f_table_name, f_geometry_column, geometry_type, coord_dimension, srid, geometry_format) values(?,?,?,?,?,?)""",
        (name, "GEOMETRY", gType, 2, 4326, "WKB"))
    c.execute(
        """CREATE TABLE spatial_ref_sys        (     srid INTEGER UNIQUE,     auth_name TEXT,     auth_srid TEXT,     srtext TEXT)"""
    )
    c.execute(
        "insert into spatial_ref_sys(srid ,auth_name ,auth_srid ,srtext) values(?,?,?,?)",
        (4326, u'EPSG', 4326,
         u'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]'
         ))
    c.execute("create table {0}({1})".format(name, ", ".join(fieldNames)))
    return [name, c, conn]
Exemple #25
0
 def create(self):
     """Creates a db with all required fields
     """
     if exists(self.dbname):
         raise ValueError('Database {} already exist'.format(self.dbname))
     connect = Connection(self.dbname)
     cursor = connect.cursor()
     cursor.execute("""
     CREATE TABLE data (
         id integer primary key asc,
         inseam integer,
         trunk integer,
         forearm integer,
         arm integer,
         thigh integer,
         lower_leg integer,
         sternal_notch integer,
         height integer,
         top_tube_min real,
         top_tube_max real,
         seat_tube_cc_min real,
         seat_tube_cc_max real,
         seat_tube_ct_min real,
         seat_tube_ct_max real,
         stem_min real,
         stem_max real,
         bb_saddle_min real,
         bb_saddle_max real,
         saddle_handlebar_min real,
         saddle_handlebar_max real,
         saddle_setback_min real,
         saddle_setback_max real,
         seatpost_setback integer,
         status text
     )
     """)
     connect.commit()
     connect.close()
Exemple #26
0
    def _purge_old_search_metadata_communities(self):
        """
        Cleans up all SearchCommunity and MetadataCommunity stuff in dispersy database.
        """
        db_path = os.path.join(self.session.get_state_dir(), u"sqlite",
                               u"dispersy.db")
        if not os.path.isfile(db_path):
            return

        communities_to_delete = (u"SearchCommunity", u"MetadataCommunity",
                                 u"TunnelCommunity")

        connection = Connection(db_path)
        cursor = connection.cursor()

        for community in communities_to_delete:
            try:
                result = list(
                    cursor.execute(
                        u"SELECT id FROM community WHERE classification == ?;",
                        (community, )))

                for community_id, in result:
                    cursor.execute(u"DELETE FROM community WHERE id == ?;",
                                   (community_id, ))
                    cursor.execute(
                        u"DELETE FROM meta_message WHERE community == ?;",
                        (community_id, ))
                    cursor.execute(u"DELETE FROM sync WHERE community == ?;",
                                   (community_id, ))
            except StopIteration:
                continue

        cursor.close()
        connection.commit()
        connection.close()
Exemple #27
0
 def __init__(self, conn=Connection()):
     self.conn = conn
Exemple #28
0
            'lower': q1 - 1.5 * iqr
        }
    else:
        return False


def parse_wc_output(wc_out):
    return int(wc_out[:wc_out.index(' ')])


lookup = sys.argv[1]
numpsms = parse_wc_output(sys.argv[2]) - 1
numpeps = parse_wc_output(sys.argv[3]) - 1
numuni = int(sys.argv[4]) - 1
numprot = parse_wc_output(sys.argv[5]) - 1
with Connection(lookup) as con:
    nrscans = con.execute('SELECT COUNT(*) FROM mzml').fetchone()[0]

qcout = {
    'nrpsms': numpsms,
    'nrscans': nrscans,
    'nrpeptides': numpeps,
    'nr_unique_peptides': numuni,
    'nrproteins': numprot,
    'missed_cleavages': {},
}

with open('tpsms') as fp:
    header = next(fp).strip('\n').split('\t')
    perrorix = header.index('PrecursorError(ppm)')
    calc_ms1data = True
    def __init__(self):

        #Create the database:
        self.db = Connection("../data/mvm_titanium_tank_tour_progress.sq3",
                             check_same_thread=False)

        #Create the tour progress table and the medal recepients tables.
        #The first table stores all the individual wave credits.
        #The second table stores the recepients of the participant medal.
        self.db.execute(
            "CREATE TABLE IF NOT EXISTS WaveCredits (Steam64 Text, TimeStamp Int, MissionIndex Int, WaveNumber Int)"
        )  #Steam64 needs to be text since Sourcepawn can't hold 64-bit numbers as ints - only as strings
        self.db.execute(
            "CREATE TABLE IF NOT EXISTS MedalOwners (Steam64 Text, TimeStamp Int)"
        )

        #Commit the query:
        self.db.commit()

        #Open 2 text files *in APPEND mode* and hold onto its handle forever.
        #As a backup, write the wave credits and medal recepients data as a flatfile, should the database become corrupted.
        self.f = open("../data/_Wave Credits.txt", mode="a", encoding="UTF-8")
        self.g = open("../data/_Medal Recepients.txt",
                      mode="a",
                      encoding="UTF-8")

        #From the config CSV files, load important tour and medal information:
        (self.promoid, self.steam_api_key, self.tt_api_key,
         self.completed_tour_tuple) = self.load_tour_information()

        #For optimal performance (and also as extra security), cache the tour data into a big dictionary.
        #This allows us to check if a player has completed the tour or not, without having to query the database every time.
        #
        #Test suite: Ran this with 5 million players simulation, the RAM usage didn't make a dent on the
        #server, so we should be good even though this is not scalable to infinity. (Can easily revert to
        #a scalable but slower approach if it ends up being an issue.)
        self.progress_dictionary = dict(
        )  #To win a medal, your steam ID must be fully packed with all the required completion flags.

        #In this set, store the steam IDs of players who have received the medal.
        #This acts as a sanity check to prevent the medal distributor from giving people multiple medals.
        self.medal_recepients = set()

        #Blank tuple to use when initializing a player's tour progress in the progress dictionary:
        #Each index in the tuple represents a mission's progress, and each number is a bitflag of the completed waves.
        self.blank_tour_tuple = (0, 0, 0, 0, 0, 0)

        #Since the HTTP server is threaded (1 thread per request), we cannot directly insert
        #the wave credits into the database and the dictionary in here, or else that's asking
        #for a huge headache of race conditions and bugs. For a server that determines if
        #someone gets an in-game item drop, that's something we want to completely avoid.
        #
        #The HTTP server will shove POST requests in this list, and then the worker thread that
        #runs on this class will grind the queue down if there's any pending request data.
        #
        #This solves the thread safety issue while also allowing the HTTP server to still
        #accept POST requests without any speed or throttling limitations.
        self.post_requests_queue = list()

        #Init the medal recepients set with steam IDs of people who received the medal:
        for x in self.db.execute("SELECT Steam64 FROM MedalOwners"):
            self.medal_recepients.add(int(x[0]))

        #Init the progress dictionary with the database's contents:
        for x in self.db.execute(
                "SELECT Steam64, TimeStamp, MissionIndex, WaveNumber FROM WaveCredits"
        ):
            self.insert_client_wave_credit(*x)
    def __init__(self):

        #Grab the steam API key. We want the generic key, not the one used for medals.
        (_, self.steam_api_key, _) = pytt.get_steam_api_data()

        #Grab our cryptography keys to decrypt the server data:
        (self.aes_key, self.aes_iv, self.salt) = pytt.get_cryptography_keys()

        #Grab the tour data, and put the maps and the total number of waves in it:
        self.tour_maps_list = list()
        for (x, y, z) in pytt.get_tt_tour_data():
            self.tour_maps_list.append((x, y))

        #A null tour tuple. This represents a blank tour with no progress made on it.
        #While we build the null tuple, also compute the total number of wave credits for this tour.
        self.null_tuple = list()
        self.total_credits = 0
        for (x, y) in self.tour_maps_list:
            self.total_credits += y
            self.null_tuple.append((None, ) * y)

        #...actually make it into a tuple:
        self.null_tuple = tuple(self.null_tuple)

        #Cache the maximum number of waves any single mission has:
        self.max_waves = max(self.tour_maps_list, key=lambda j: j[1])[1]

        #Now init the tour database:

        #Create the database file:
        self.db_path = "../data/mvm_titanium_tank_tour_progress.sq3"
        self.db = Connection(self.db_path, check_same_thread=False)

        #Create the tour progress table. This contains ALL players' tour data.
        self.db.execute(
            "CREATE TABLE IF NOT EXISTS WaveCredits (Steam64 Text, TimeStamp Int, MissionIndex Int, WaveNumber Int)"
        )

        #Commit the query:
        self.db.commit()

        #Hold the database file's modification time stamp here.
        #We will check if the file was updated, and if so, refresh our tour data dictionary cache:
        self.mod_time = 0

        #Store the rowid of the most-recent loaded database entry.
        #This allows us to not have to load previously-cached data from the dictionary.
        self.row_id = 0

        #The big tour dictionary:
        self.tour_progress_dict = dict()

        #The dictionary that holds the tour server information:
        self.server_info_dict = dict()

        #IP address rate limit dictionary.
        #This gets dumped out once a minute, but is otherwise used to prevent a single IP address
        #from making too many GET/POST requests to this web server.
        self.ip_requests_count = dict()

        #The csv data containing the global tour statistics:
        self.global_data_csv = (bytes(), bytes())

        #For one of the global stats graphs, we want to keep track of the total number
        #of wave credits given each day, including duplicates.
        self.wave_credits_earned_per_day = dict()

        #Preload all the HTML webpages from the html folder into a dictionary.
        #
        #This will allow us to serve webpages to the client quickly without hammering the file system,
        #which allows us to skip some I/O overhead.
        #
        #The javascript, css, and images are served from GitHub so that we can easily update them
        #with a GitHub commit without having to restart this web server.
        self.html_pages = {
            "main": self.load_html_page("main.html"),
            "global": self.load_html_page("global.html"),
            "individual": self.load_html_page("individual.html"),
            "servers": self.load_html_page("servers.html"),
            None: self.load_html_page("404.html"),
            False: self.load_html_page("badprofile.html"),
        }