コード例 #1
0
ファイル: example_annoy.py プロジェクト: zhouxiupeng/pymilvus
from milvus import Milvus, IndexType, MetricType

from utils import gen_collection, insert_data

if __name__ == '__main__':
    collection_name = "milvus_demo_annoy"

    client = Milvus()
    # use default host:127.0.0.1, port:19530
    client.connect()

    # create collection: dimension is 128, metric type is L2
    gen_collection(client, collection_name, 128, MetricType.L2)

    # insert 10000 vectors into collection
    _, _, vectors = insert_data(client, collection_name, 128, 100000)

    # flush data into disk for persistent storage
    client.flush([collection_name])

    # specify index param
    index_param = {
        "n_trees": 50,
    }

    # create `ANNOY` index
    print("Start create index ......")
    status = client.create_index(collection_name, IndexType.ANNOY, index_param)
    if status.OK():
        print("Create index ANNOY successfully\n")
    else:
コード例 #2
0
from pymongo import MongoClient
from kafka import KafkaConsumer
from utils import insert_data

# mongo =====
## Connect
cliente = MongoClient('mongodb://*****:*****@localhost:27017/')

## Selecionando um Banco
banco = cliente.stagioptr

## collection
album = banco.emergency

#kafka======
consumer = KafkaConsumer('emergency')

print('Iniciando Consumer Emergency!')

for message in consumer:
    insert_data(message, album, topic='emergency')
コード例 #3
0
if __name__ == "__main__":

	parser = argparse.ArgumentParser(description='modelisation running')
	parser.add_argument('symbol', type=str, help='')
	args = parser.parse_args()
	trade_symbol  = args.symbol
	if len(trade_symbol) != 0 and isinstance(trade_symbol, str) == True:
		try:
			df = get_historical_intraday(trade_symbol.upper(), output_format='pandas',token= credentials.token)
			df = df.fillna(method='ffill')
		except:
			pass

		MYDB = mysql.connector.connect(
		  host=db_credentials.host,
		  user=db_credentials.user,
		  passwd=db_credentials.passwd,
		  database=db_credentials.database
		) 

		name = [str(trade_symbol) for i in range(len(df.date.values.tolist()))]
		df['name'] = name
		df = df[['date', 'label', 'name', 'high', 'low', 'open', 'close', 'volume', 'numberOfTrades']]
		VAL = [tuple(x) for x in df.values]
		SQL = "INSERT INTO raw_stock (date, hour, name, high, low, open, close, volume, numberOfTrades) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)"
		utils.insert_data(MYDB,SQL,VAL)

	else:
		pass

コード例 #4
0
ファイル: consumerSensor.py プロジェクト: stagioptr/ipbl2019
from utils import insert_data
from pymongo import MongoClient
from kafka import KafkaConsumer
from datetime import datetime

# mongo =====
# Connect
cliente = MongoClient('mongodb://*****:*****@localhost:27017/')

# Selecionando um Banco
banco = cliente.stagioptr

# collection
album = banco.sensor

# kafka======
consumer = KafkaConsumer('sensor')

print('Iniciando Consumer Sensor!')

for message in consumer:
    insert_data(message,
                album,
                ["temperatura", "giroscopio", "umidade", "acelerometro"],
                topic='sensor')
コード例 #5
0
from utils import insert_data
from pymongo import MongoClient
from kafka import KafkaConsumer

# mongo =====
## Connect
cliente = MongoClient('mongodb://*****:*****@localhost:27017/')

## Selecionando um Banco
banco = cliente.stagioptr

## collection
album = banco.sleep

#kafka======
consumer = KafkaConsumer('sleep')

print('Iniciando Consumer Sleep!')

for message in consumer:
    insert_data(message, album, topic='sleep')
コード例 #6
0
ファイル: get_data.py プロジェクト: zoukuan1/china-map
four_flag = np.random.randint(0, 4, size=500)

# 将学生的相关信息组合,使用上面获取的下标向对应的列表获取数据,并保存到数据库里
studentInfos = []
for i in range(0, 500):
    studentInfo = [
        names[i], sex[i],
        np.random.randint(18, 23),
        np.random.uniform(1.50, 2.00),
        np.random.uniform(70, 200), province[province_indexs[i]],
        parties[four_flag[i]],
        np.random.randint(100,
                          2000), havefriend[two_flag[i]], playgame[two_flag[i]]
    ]
    studentInfos.append(studentInfo)
    utils.insert_data(studentInfo)

# 将所有学生信息写出为.csv文件
file_csv = codecs.open("studentInfo.csv", 'w+', 'utf-8')
writer = csv.writer(file_csv,
                    delimiter=' ',
                    quotechar=' ',
                    quoting=csv.QUOTE_MINIMAL)
writer.writerow(
    ["姓名", "性别", "年龄", "身高", "体重", "省份", "党派", "消费", "有无男/女朋友", "是否打游戏"])
for studentInfo in studentInfos:
    writer.writerow(studentInfo)

# 运行该.py,完成数据获取和保存
if __name__ == '__main__':
    print("完成数据集获取,并保存到MySQL数据库")
コード例 #7
0
ファイル: consumerFeeding.py プロジェクト: stagioptr/ipbl2019
from utils import insert_data
from pymongo import MongoClient
from kafka import KafkaConsumer

# mongo =====
## Connect
cliente = MongoClient('mongodb://*****:*****@localhost:27017/')

## Selecionando um Banco
banco = cliente.stagioptr

## collection
album = banco.feeding

#kafka======
consumer = KafkaConsumer('feeding')

print('Iniciando Consumer Feeding!')

for message in consumer:
    insert_data(message, album, topic='feeding')
コード例 #8
0
from utils import insert_data
from pymongo import MongoClient
from kafka import KafkaConsumer

# mongo =====
## Connect
cliente = MongoClient('mongodb://*****:*****@localhost:27017/')

## Selecionando um Banco
banco = cliente.stagioptr

## collection
album = banco.emotional

#kafka======
consumer = KafkaConsumer('emotional')

print('Iniciando Consumer Emotional!')

for message in consumer:
    insert_data(message, album, topic='emotional')