コード例 #1
0
    def run(self):
        # Log live migration duration
        self.start = time.time()

        # Get connections to the source and target node
        lv_connection_from = util.connect(self.node_from)
        lv_connection_to = util.connect(self.node_to)

        # Success and error status
        success, error = False, None
        try:
            # Lookup domain on source node
            # This call might fail in the following constellation:
            # 1. Controller decides to migrate a VM and triggers migration thread
            # 2. VM gets deallocated by Rain
            # 3. Migration thread starts and tries to lookup the domain
            self.lv_domain = lv_connection_from.lookupByName(self.domain)

            # Trigger migration
            self.lv_domain = self.lv_domain.migrate(
                lv_connection_to,
                VIR_MIGRATE_LIVE | VIR_MIGRATE_UNDEFINE_SOURCE | VIR_MIGRATE_PERSIST_DEST,
                self.domain,
                None,
                0,
            )

            # Sleep a bit
            time.sleep(5)

            # Check if domain is actually there
            test_lv_domain = lv_connection_to.lookupByName(self.domain)

            # Migration is successful if domain was found on target node
            success = test_lv_domain != None

        except libvirt.libvirtError as e:
            print "Migration failed"
            error = e
        except Exception as e:
            print "Migration failed"
            error = e
        except:
            print "Migration failed"
            error = sys.exc_info()[0]
        finally:
            # Log migration duration
            self.end = time.time()

            # Callback with success and error status
            self.migration_callback(
                self.domain, self.node_from, self.node_to, self.start, self.end, self.info, success, error
            )

            # Set exited flag
            self.exited = True
コード例 #2
0
ファイル: test_hashes.py プロジェクト: syegres8/scylla
def test_hset_return_changes(redis_host, redis_port):
    r = connect(redis_host, redis_port)
    key = random_string(10)
    field = random_string(10)
    val = random_string(10)

    assert r.hset(key, field, val) == 1
    assert r.hset(key, field, val) == 0
コード例 #3
0
ファイル: test_strings.py プロジェクト: tzach/scylla
def test_del_existent_key(redis_host, redis_port):
    r = connect(redis_host, redis_port)
    key = random_string(10)
    val = random_string(10)

    r.set(key, val)
    assert r.get(key) == val
    assert r.delete(key) == 1
コード例 #4
0
ファイル: test_strings.py プロジェクト: tzach/scylla
def test_set_ex(redis_host, redis_port):
    r = connect(redis_host, redis_port)
    key = random_string(10)
    val = random_string(10)

    assert r.execute_command('SET', key, val, 'EX', 100)
    time.sleep(1)
    assert r.ttl(key) == 99
コード例 #5
0
ファイル: test_strings.py プロジェクト: tzach/scylla
def test_setex_ttl(redis_host, redis_port):
    r = connect(redis_host, redis_port)
    key = random_string(10)
    val = random_string(10)

    assert r.setex(key, 100, val) == True
    time.sleep(1)
    assert r.ttl(key) == 99
コード例 #6
0
def test_setex_ttl():
    r = connect()
    key = random_string(10)
    val = random_string(10)

    assert r.setex(key, 100, val) == True
    time.sleep(1)
    assert r.ttl(key) == 99
コード例 #7
0
def test_exists_existent_key():
    r = connect()
    key = random_string(10)
    val = random_string(10)

    r.set(key, val)
    assert r.get(key) == val
    assert r.exists(key) == 1
コード例 #8
0
def test_exists_lots_of_keys(redis_host, redis_port):
    r = connect(redis_host, redis_port)
    keys = []
    for i in range(0, 30):
        k = random_string(11)
        v = random_string(10)
        r.set(k, v)
        keys.append(k)
    assert r.exists(*keys) == len(keys)
コード例 #9
0
def test_set_get_delete(redis_host, redis_port):
    r = connect(redis_host, redis_port)
    key = random_string(10)
    val = random_string(10)

    assert r.set(key, val) == True
    assert r.get(key) == val
    assert r.delete(key) == 1
    assert r.get(key) == None
コード例 #10
0
ファイル: test_strings.py プロジェクト: xifenmin/scylla
def test_set_get_delete():
    r = connect()
    key = random_string(10)
    val = random_string(10)

    assert r.set(key, val) == True
    assert r.get(key) == val
    assert r.delete(key) == 1
    assert r.get(key) == None
コード例 #11
0
def consoomer(i, txn_queue, price_queue):
    web3 = connect()
    while True:
        if not txn_queue.empty():
            block_num, txnhash = txn_queue.get()
            txn = web3.eth.getTransaction(txnhash)
            t = (block_num, txnhash, txn['gasPrice'])
            price_queue.put(t)
            # update progress bar
            PBAR.update(1)
コード例 #12
0
ファイル: driver.py プロジェクト: KeithLatteri/awips2
 def connect(self):
   try:
     # XXX: should make this non blocking
     self._socket = connect(self.connection.host, self.connection.port)
     self._timeout = None
   except socket.error, e:
     if self.connection.reconnect:
       self._error(e, True)
       return
     else:
       raise e
コード例 #13
0
ファイル: subclub.py プロジェクト: LKarel/subdl
    def find(self, query, lang=None):
        if lang != "et":
            # Subclub has only Estonian subtitles
            return []

        params = {
            "otsing": query.name,
            "tp": "nimi"
        }

        if query.imdb():
            params["otsing"] = query.imdb()
            params["tp"] = "kood"
        elif query.pointer:
            params["otsing"] += " %sx%s" % (query.pointer.season, query.pointer.episode)

        params = urllib.parse.urlencode(params)
        soup = util.connect("subclub.eu", "/jutud.php?" + params)

        ret = []

        for link in soup.find_all("a"):
            url = link.get("href")
            if "down.php" in url:
                subid = self._get_subid(url)

                if not subid:
                    continue

                soup_new = util.connect("subclub.eu", "/subtitles_archivecontent.php?id=" + subid)

                for anchor in soup_new.find_all("a"):
                    dl = "http://subclub.eu%s" % anchor.get("href")[2:]
                    filename = anchor.text.strip()
                    score = 0.4 + 0.6 * SequenceMatcher(None, query.filename, filename).ratio()

                    result = SubtitleResult(dl, score)
                    result.target_name = filename
                    ret.append(result)

        return ret
コード例 #14
0
ファイル: client6.py プロジェクト: zdpau/socket-MPI
def pingpong(host, port, size):
    s = util.connect(host, port)
    n_trials = 10 if size < 100000 else 1
    util.sendInt(s, size)  # sending the number of bytes
    util.sendInt(s, n_trials)
    before = time.time()
    data = b'x' * size
    for _ in range(n_trials):
        s.sendall(data)
        util.recvall(s, size)
    after = time.time()
    return (size * n_trials) / (after - before)
コード例 #15
0
def test_strlen_wrongtype(redis_host, redis_port):
    r = connect(redis_host, redis_port)
    key1 = random_string(10)
    val1 = random_string(10)
    val2 = random_string(10)

    assert r.rpush(key1, val1)
    assert r.rpush(key1, val2)
    try:
        r.strlen(key1)
    except redis.exceptions.ResponseError as ex:
        assert str(ex) == 'WRONGTYPE Operation against a key holding the wrong kind of value'
コード例 #16
0
def test_select_invalid_db(redis_host, redis_port):
    r = connect(redis_host, redis_port)
    logger.debug('Assume that user will not set redis_database_count to be bigger as 100')
    invalid_db_idx = 100

    logger.debug('Try to switch to invalid database {}'.format(invalid_db_idx))
    try:
        query = 'SELECT {}'.format(invalid_db_idx)
        r.execute_command(query)
        raise Exception('Expect that `{}` does not work'.format(query))
    except redis.exceptions.ResponseError as ex:
        assert str(ex) == 'DB index is out of range'
コード例 #17
0
def main():
    os.chdir(directory)

    nodes_list = []
    with open(list_nodes, 'r') as nodes_file:
        for line in nodes_file:
            if "#" not in line:
                nodes_list.append(line.split(':')[0])

    nodes = []
    print "Nodes:"
    print "[0] All"
    for i, node in enumerate(nodes_list):
        print "[%d] %s" % (i + 1, node)

    servers = raw_input("\nSelect node(s) separated by a comma: ")
    servers = servers.strip().split(",")
    print ''

    if '0' in servers:
        nodes = nodes_list
    else:
        for index in servers:
            nodes.append(nodes_list[int(index) - 1])

    with open(log_server, 'w') as log:
        log.write(datetime.now().strftime(datetime_format) + '\n\n')

        for node in nodes:
            print "retrieving node log: " + node
            with open(log_node % node, 'w') as node_log:
                connection = util.connect(node)

                if connection is not None:
                    with util.get_scp(connection) as scp:
                        scp.get(log_server, 'tmp_' + log_server)

                    with open('tmp_' + log_server, 'r') as node_log_file:
                        for line in node_log_file:
                            node_log.write(line)
                            log.write(line)

                    log.write('\n\n')
                    connection.close()
                else:
                    node_log.write("failed to connect")
                    log.write("failed to connect to node: " + node + '\n\n')
                    print "failed to connect to node: " + node

    os.remove('tmp_' + log_server)
    # os.startfile(log_server)

    print "\ndone"
コード例 #18
0
def producer(block_nums, txn_queue, price_queue, outfile):
    web3 = connect()

    for i, block_num in enumerate(block_nums):

        block = web3.eth.getBlock(block_num)
        date = datetime.utcfromtimestamp(block['timestamp'])

        for txnhash in block['transactions']:
            # add all txns to txn queue
            txn_queue.put((block_num, txnhash))

        # write results periodically to file
        # TODO: parameterize this
        if i // 50 and i % 50 == 0:
            part = i // 50
            logging.info(f"Writing part {part} to file.")
            write_to_file(price_queue,
                          f"{outfile}_{part}.csv",
                          part=part,
                          ntxns=5000)
    '''
    producer thread waits for consumer threads.
    Maybe not the most elegant solution, but it does decouple
    main thread from consumer threads so the main only needs
    to keep track of (i.e. "join") the one producer thread.
    '''

    while not txn_queue.empty():
        # wait for price queue to populate
        # TODO: might still fail with lots of lag time on API calls
        # TODO: approximate sleep time based on query params
        time.sleep(30)

    logging.info("Transaction queue is empty.")
    '''
    NOTE: Just because the txn queue is empty,
    doesn't mean the operation is over. The last
    "popped" transaction might still be in
    processing by one of the consumer threads.

    To ensure this isn't the case, we make sure
    the price queue hasn't changed since we last
    checked.
    '''
    qsize = None
    if not qsize or qsize != price_queue.qsize():
        # TODO: might still fail with lots of lag time on API calls
        time.sleep(10)

    write_to_file(price_queue, f"{outfile}_{i // 50 + 1}.csv", part=part)
    PBAR.close()
    return
コード例 #19
0
def retrieveVideos():
    youtube = credentialVerification()
    request = youtube.videos().list(
        part="snippet,contentDetails,statistics",
        id="yDKWmNpw7gE, 3uk6rKXbG1M, p-aVhSEO8Ro, t_CbWtSSHMw, z7Tadx4XGjA"
    )
    response = request.execute()

    # connect to database
    cursor, connection = connect()

    # try:
        # collect data on search results
    for video in response['items']:
        VIEWS = video['statistics']['viewCount']
        LIKES = video['statistics']['likeCount']
        DISLIKES = video['statistics']['dislikeCount']
        FAV = video['statistics']['favoriteCount']
        TITLE = video['snippet']['title']
        DESCRP = video['snippet']['description']
        CHAN_ID = video['snippet']['channelId']
        CHAN_TITLE = video['snippet']['channelTitle']
        videoID = video['id']
        URL = "https://youtube.com/watch?v=" + videoID
        searchQ = 'control'

        print(VIEWS, LIKES, DISLIKES, FAV, TITLE, DESCRP, CHAN_ID, CHAN_TITLE, videoID, URL)

        cursor.execute(
            """INSERT INTO video (title, descr, views, likes, dislikes, v_id, channel_id, channel_name, "searchQ", url)
            VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);""",
            (TITLE, DESCRP, VIEWS, LIKES, DISLIKES,
             videoID, CHAN_ID, CHAN_TITLE,
             searchQ, URL))
        connection.commit()
        print("Inserted into Video Table")

        closeConnection(connection, cursor)
        cursor, connection = connect()
        makeRequestCommentThread(videoID, youtube, cursor, connection)
コード例 #20
0
def test_delete_hash(redis_host, redis_port):
    r = connect(redis_host, redis_port)
    key = random_string(10)
    field = random_string(10)
    val = random_string(10)
    field2 = random_string(10)
    val2 = random_string(10)

    assert r.hset(key, field, val) == 1
    assert r.hset(key, field2, val2) == 1
    assert r.hgetall(key) == {field: val, field2: val2}
    assert r.delete(key) == 1
    assert r.hgetall(key) == {}
コード例 #21
0
def entry(host, dry_run):
    try:
        local = Context()
        c = util.connect(host, sudo=True)

        PASS.unlock()  # TODO: only open if needed

        context = build_context(local)

        if dry_run:
            print("DRY RUN")

            @dataclass
            class success:
                ok: bool = True
                exited: int = 0
                stdout: str = ""

            def just_print(*args, **kwargs):
                args = " ".join(args)
                print(f"{args}")
                return success()

            c.run = just_print
            c.sudo = just_print
            c.put = just_print

        # TODO: validate context with jsonschema

        start_time = datetime.now()

        pre_deploy(c, local, context)
        deploy(c, context)
        post_deploy(c, context)

        # util.print_json(context)

        end_time = datetime.now()

        elapsed = end_time - start_time
        total_seconds = int(elapsed.total_seconds())
        hours, remainder = divmod(total_seconds, 60 * 60)
        minutes, seconds = divmod(remainder, 60)

        log.success(
            f"deployment complete, took {hours:02d}:{minutes:02d}:{seconds:02d}"
        )
    except KeyboardInterrupt:
        pass
    except Exception as err:
        log.error(err)
コード例 #22
0
    def do_work(self, user):
        client = util.connect(url)
        login_response = client.service.login(user, 'password')
        #If connecting to the cookie-based server, the response is just "OK"
        token = self.client.factory.create('RequestHeader')
        if login_response != "OK":
            token.session_id = login_response.session_id
        token.app_name = "Unit Test"

        client.set_options(cache=None, soapheaders=token)
        n = util.create_network_with_data(client, new_proj=True)
        util.get_network(client, n.id)

        client.service.logout(user)
コード例 #23
0
def test_hset_multiple_key_field(redis_host, redis_port):
    # This test requires the library to support multiple mappings in one
    # command, or we cannot test this feature. This was added to redis-py
    # in version 3.5.0, in April 29, 2020.
    if Version(redis.__version__) < Version('3.5.0'):
        pytest.skip('redis-py library too old to run this test')
    r = connect(redis_host, redis_port)
    key = random_string(10)
    field = random_string(10)
    val = random_string(10)
    field2 = random_string(10)
    val2 = random_string(10)

    assert r.hset(key, None, None, {field: val, field2: val2}) == 2
コード例 #24
0
def test_strlen(redis_host, redis_port):
    r = connect(redis_host, redis_port)
    key1 = random_string(10)
    val1 = random_string(10)
    key2 = random_string(10)
    val2 = random_string(1000)
    key3 = random_string(10)

    assert r.set(key1, val1) == True
    assert r.set(key2, val2) == True
    r.delete(key3)
    assert r.strlen(key1) == 10
    assert r.strlen(key2) == 1000
    assert r.strlen(key3) == 0
コード例 #25
0
    def do_work(self, user):
        client = util.connect(url)
        login_response = client.service.login(user, 'password')
        #If connecting to the cookie-based server, the response is just "OK"
        token = self.client.factory.create('RequestHeader')
        if login_response != "OK":
            token.session_id = login_response.session_id
        token.app_name = "Unit Test"

        client.set_options(cache=None, soapheaders=token)
        n = util.create_network_with_data(client, new_proj=True)
        util.get_network(client, n.id)

        client.service.logout(user)
コード例 #26
0
    def connect_zx(self, z):
        def lambda_negexp(x_in):
            return tf.exp(-x_in)

        def lambda_negsum(x_in):
            return tf.reduce_sum(-x_in[0], axis=1) + tf.reduce_sum(-x_in[1],
                                                                   axis=1)

        z1 = z[0]
        z2 = z[1]
        self.input_z1 = z1
        self.input_z2 = z2

        y2 = z2
        self.Szy_layer = connect(z2, self.S2)
        self.Tzy_layer = connect(z2, self.T2)
        z1_minus_Tz2 = tf.keras.layers.Subtract()([z1, self.Tzy_layer])
        y1 = tf.keras.layers.Multiply()([
            z1_minus_Tz2,
            tf.keras.layers.Lambda(lambda_negexp)(self.Szy_layer)
        ])

        self.output_x1 = y1
        self.Syx_layer = connect(y1, self.S1)
        self.Tyx_layer = connect(y1, self.T1)
        y2_minus_Ty1 = tf.keras.layers.Subtract()([y2, self.Tyx_layer])
        self.output_x2 = tf.keras.layers.Multiply()([
            y2_minus_Ty1,
            tf.keras.layers.Lambda(lambda_negexp)(self.Syx_layer)
        ])

        # log det(dx/dz)
        self.log_det_zx = tf.keras.layers.Lambda(lambda_negsum)(
            [self.Szy_layer, self.Syx_layer])

        return [self.output_x1, self.output_x2
                ] + z[2:]  # append other layers if there are any
コード例 #27
0
def test_select(redis_host, redis_port):
    r = connect(redis_host, redis_port)
    key = random_string(10)
    val = random_string(4096)
    r.set(key, val)
    assert r.get(key) == val

    logger.debug('Switch to database 1')
    assert r.execute_command('SELECT 1') == 'OK'
    assert r.get(key) == None

    logger.debug('Switch back to default database 0')
    assert r.execute_command('SELECT 0') == 'OK'
    assert r.get(key) == val
    r.delete(key)
    assert r.get(key) == None
コード例 #28
0
def corrupt_files():
    # connect to ftp
    util.connect(PRIMARY_PATH)
    global sorted_files_list
    #sort wav files
    wav_files_list_with_timestamp = util.sort_on_ftp_time(
        PRIMARY_PATH, character)
    sorted_files_list = [
        element[1] for element in wav_files_list_with_timestamp
    ]
    # print sorted_files_list

    connect()
    if 'CorruptFiles' in ftp.nlst():
        pass
    else:
        ftp.mkd('CorruptFiles')
    #reading wav header
    wav_file_list = []
    for wav_file in sorted_files_list:
        try:
            wav_header, extra_header = util.get_wavheader_extraheader(wav_file)
            blockalign = wav_header["BlockAlign"]
            samplerate = wav_header["SampleRate"]

            # check for wav file size
            if util.check_wav_file_size(wav_file, blockalign, samplerate):
                pass
            else:
                wav_file_list.append(wav_file)
                ftp.rename(PRIMARY_PATH + wav_file,
                           PRIMARY_PATH + 'CorruptFiles/' + wav_file)
        except socket.error:
            util.connect(PRIMARY_PATH)
        except ftplib.error_temp:
            util.connect(PRIMARY_PATH)
        except struct.error:
            util.connect(PRIMARY_PATH)

    connect()
    ftp.cwd(PRIMARY_PATH + 'CorruptFiles')
    corrupt_count = ftp.nlst()
    print("Corrupt files:", corrupt_count)
    print("No. of corrupt files moved:", len(corrupt_count))
コード例 #29
0
 def serverConn(self):
     try:
         cert = base64.b64encode(cc.get_certificate())
         puk = base64.b64encode(cc.cert_puk(cc.get_certificate()))
         nounce = util.generate_nonce()
         sign = base64.b64encode(cc.sign(nounce))
     except:
         cert = ""
         puk = ""
         nounce = ""
         sign = ""
     out = util.connect(1, base64.b64encode(self.name), self.id,
                        self.ciphers, {
                            "cert": cert,
                            "puk": puk,
                            "nounce": str(nounce),
                            "sign": sign
                        })
     self.ss.send(json.dumps(out) + TERMINATOR)
コード例 #30
0
def test_exists_multiple_existent_key(redis_host, redis_port):
    r = connect(redis_host, redis_port)
    key1 = random_string(10)
    val1 = random_string(10)
    key2 = random_string(10)
    val2 = random_string(10)
    key3 = random_string(10)
    val3 = random_string(10)
    key4 = random_string(10)

    r.set(key1, val1)
    r.set(key2, val2)
    r.set(key3, val3)
    r.delete(key4)
    assert r.get(key1) == val1
    assert r.get(key2) == val2
    assert r.get(key3) == val3
    assert r.get(key4) == None
    assert r.exists(key1, key2, key3, key4) == 3
コード例 #31
0
ファイル: query.py プロジェクト: LKarel/subdl
    def imdb(self):
        if not self._imdb:
            query = urllib.parse.quote(str(self))
            soup = util.connect("www.imdb.com", "/find?q=%s" % query)

            for a in soup.select("tr.findResult td.result_text a"):
                href = a.get("href")

                if not href.startswith("/title/"):
                    continue

                match = re.match(r"^/title/(?P<id>\w+)/", href)

                if not match:
                    continue

                self._imdb = match.group("id")
                break

        return self._imdb
コード例 #32
0
ファイル: server.py プロジェクト: UMWRG/HydraPlatform
    def setUp(self):
        logging.getLogger('suds').setLevel(logging.ERROR)
        logging.getLogger('suds.client').setLevel(logging.CRITICAL)
        logging.getLogger('suds.metrics').setLevel(logging.CRITICAL)
        # Clear SUDS cache:
        #shutil.rmtree(os.path.join(tmp(), 'suds'), True)
        global CLIENT
        if CLIENT is None:
            CLIENT = util.connect(self.url)

        self.client = CLIENT

        self.login('root', '')

        self.create_user("UserA")
        self.create_user("UserB")
        self.create_user("UserC")
        self.project_id = self.create_project().id

        self.fmt = config.get('DEFAULT', 'datetime_format', "%Y-%m-%dT%H:%M:%S.%f000Z")
コード例 #33
0
def test_hdel_several_keys(redis_host, redis_port):
    r = connect(redis_host, redis_port)
    key = random_string(10)
    field1 = random_string(10)
    val1 = random_string(10)
    field2 = random_string(10)
    val2 = random_string(10)
    field3 = random_string(10)
    val3 = random_string(10)

    # Set 2 key/field
    assert r.hset(key, field1, val1) == 1
    assert r.hset(key, field2, val2) == 1
    assert r.hset(key, field3, val3) == 1

    # Delete 2 of them
    assert r.hdel(key, field1, field3) == 2

    # Check the remaining item
    assert r.hgetall(key) == {field2: val2}
コード例 #34
0
    def setUp(self):
        logging.getLogger('suds').setLevel(logging.ERROR)
        logging.getLogger('suds.client').setLevel(logging.CRITICAL)
        logging.getLogger('suds.metrics').setLevel(logging.CRITICAL)
        # Clear SUDS cache:
        #shutil.rmtree(os.path.join(tmp(), 'suds'), True)
        global CLIENT
        if CLIENT is None:
            CLIENT = util.connect(self.url)

        self.client = CLIENT

        self.login('root', '')

        self.create_user("UserA")
        self.create_user("UserB")
        self.create_user("UserC")
        self.project_id = self.create_project().id

        self.fmt = config.get('DEFAULT', 'datetime_format',
                              "%Y-%m-%dT%H:%M:%S.%f000Z")
コード例 #35
0
ファイル: statg.py プロジェクト: olivierch/openBarter
def statg(options):
	prims.execSql('statg.sql')
	dbcon = util.connect()
	cursor = dbcon.cursor()
	cols = None
	try:
		cols = stat(cursor,options)
		
	except KeyboardInterrupt:
		print 'interrupted by user' 	

	finally:
		try:
			cursor.close()
			# print "cursor closed"
		except Exception,e:
			print "Exception while trying to close the cursor"
		try:
			dbcon.close()
			# print "DB close"
		except Exception,e:
			print "Exception while trying to close the connexion"
コード例 #36
0
def statg(options):
    prims.execSql('statg.sql')
    dbcon = util.connect()
    cursor = dbcon.cursor()
    cols = None
    try:
        cols = stat(cursor, options)

    except KeyboardInterrupt:
        print 'interrupted by user'

    finally:
        try:
            cursor.close()
            # print "cursor closed"
        except Exception, e:
            print "Exception while trying to close the cursor"
        try:
            dbcon.close()
            # print "DB close"
        except Exception, e:
            print "Exception while trying to close the connexion"
コード例 #37
0
def collect_inputs(search_q):
    try:
        cursor, connection = connect()

        cursor.execute(
            """SELECT video.likes, video.dislikes, views, title,descr,
                        clean_descr, channel_name, video.duration, AVG(comments.prob_pos)*100,
                        AVG(comments.prob_neg)*100,
                        AVG(comments.prob_neutral)*100, v_id, url
                        FROM comments right join video
                        ON comments.video_id = video.v_id
                        WHERE "searchQ"=%s
                        GROUP BY v_id, video.likes, video.dislikes, views, title
                        ORDER BY views desc;""", (search_q, ))
        results = cursor.fetchall()

        return results

    except Exception as e:
        print("Exception in collectInputs:", e)
    finally:
        closeConnection(connection, cursor)
コード例 #38
0
ファイル: script.py プロジェクト: Web5design/meetup-graph
pancakes = add(name='pancakes')
waffles = add(name='waffles')
liver = add(name='liver')
homebrew = add(name='homebrew')
documentation = add(name='documentation')
cold = add(name='the cold')
snakes = add(name='snakes')
money = add(name='money')
yankees = add(name='yankees')
nothing = add(name='nothing')
cigs = add(name='cigs')
pennies = add(name='pennies')
tennis = add(name='tennis')

#add what people like
connect(zac, beer, 'likes')
connect(afsheen, cigs, 'likes')
connect(afsheen, zac, 'dislikes')
connect(afsheen, pennies, 'dislikes')
connect(zac, afsheen, 'dislikes')
connect(vlad, beer, 'likes')
connect(vlad, cleaning, 'likes')
connect(cleaning, brian, 'likes')
connect(zac, sgt, 'likes')
connect(brian, starcraft, 'likes')
connect(brian, starcraft2, 'dislikes')
connect(hogan, dc, 'likes')
connect(paul, sgt, 'works')
connect(paul, sgt, 'likes')
connect(paul, cleaning, 'dislikes')
connect(patrick, sgt, 'works')
コード例 #39
0
ファイル: server.py プロジェクト: UMWRG/HydraPlatform
def connect():
    return util.connect()
コード例 #40
0
ファイル: compare.py プロジェクト: chenyuntc/crawler
        r = urlopen(url)
        rr = eval(r.read())
        if 'errorCode' in rr:
            print '--------------------'
            print 'error'
            print ii, url,rr
            print '--------------------'
            continue
        mlog_data = rr['series']
        moji_data = [qq['aqi'] for qq in ii['info']]
        n= len(moji_data) if len(moji_data)<len(mlog_data) else len(mlog_data)
        mse=sqrt( float(sum(map(lambda x:(x[0]-x[1])**2, zip(moji_data,mlog_data))))/n)
        mae= ( float(sum(map(lambda x:abs(x[0]-x[1]), zip(moji_data,mlog_data))))/n)
        results[ii['id']]=(mae,mse)
    c=db.compare_results
    c.insert_one({
        'key':'moji_compare_results',
        'data':results,
        'info':u'储存与墨迹对比的结果 data是个字典 id->(mae,mse)'
    }
    )
    return remind_info


if __name__ == '__main__':
    host='54.223.178.198'#host='172.31.11.244'
    db=connect(host)
    all_cities=get_cityid(db)
    remind_info=get_compare(all_cities,db)
コード例 #41
0
ファイル: subscene.py プロジェクト: LKarel/subdl
    def find(self, query, lang=None):
        lang = self._convert_lang(lang)

        search = str(query)

        if query.filename and not query.pointer:
            search = query.filename

        params = urllib.parse.urlencode({"q": search})

        if not query.pointer and query.filename == query.name:
            soup = util.connect("subscene.com", "/subtitles/title?" + params)
            sub_links_page = self._find_movie_by_name(query, soup)
        else:
            sub_links_page = "/subtitles/release?" + params

        if not sub_links_page:
            return []

        soup = util.connect("subscene.com", sub_links_page)

        sub_links = []
        for sub in soup.find_all("a"):
            if lang not in sub.get("href"):
                continue

            spans = sub.find_all("span")
            if len(spans) <= 1 or not spans[0].contents:
                continue

            link_name = spans[1].contents[0].strip()
            link_query = Query.parse(link_name)

            if str(link_query.pointer) != str(query.pointer):
                continue

            if SequenceMatcher(None, query.name, link_query.name).ratio() < 0.8:
                continue

            sub_links.append({
                "filename": link_name + ".srt",
                "url": sub.get("href"),
                "score": SequenceMatcher(None, query.filename, link_name.lower()).ratio()
            })

        sub_links = sorted(sub_links, key=lambda v: v["score"], reverse=True)
        ret = []
        i = 0

        for item in sub_links:
            soup = util.connect("subscene.com", item["url"])
            dl_button = soup.find(id="downloadButton")
            dl_link = dl_button.get("href")

            rating = 0
            rating_title = soup.find("span", class_="rating-bar")

            if rating_title:
                rating = self._extract_rating(rating_title["title"])

            score = (rating / 10) * 0.15 + 0.6 * item["score"]
            result = SubtitleResult("http://subscene.com" + dl_link, score)
            result.target_name = item["filename"]
            result.zipped = True

            ret.append(result)
            i += 1
            if i == 10:
                break

        return ret