コード例 #1
0
ファイル: train.py プロジェクト: Morgan-Griffiths/PokerAI
def dual_learning_update(actor, critic, target_actor, target_critic, params,
                         rank):
    mongo = MongoDB()
    actor.train()
    query = {'training_round': params['training_round'], 'rank': rank}
    projection = {
        'obs': 1,
        'state': 1,
        'betsize_mask': 1,
        'action_mask': 1,
        'action': 1,
        'reward': 1,
        '_id': 0
    }
    data = mongo.get_data(query, projection)
    for i in range(params['learning_rounds']):
        policy_losses = []
        losses = []
        for poker_round in data:
            update_actor_critic(poker_round, critic, target_critic, actor,
                                target_actor, params)
        soft_update(critic, target_critic, params['device'])
        soft_update(actor, target_actor, params['device'])
    mongo.close()
    del data
    return actor, critic, params
コード例 #2
0
ファイル: index_db.py プロジェクト: l1905/qau_search
class SearchIndex(object):
    """docstring for SearchIndex"""
    def __init__(self):
        self.schema = schema
        self.collect = MongoDB()

    def create_index(self):
        if not os.path.exists("index"):
            os.mkdir("index")
            index = create_in("index", schema)
            writer = index.writer()
            for post in self.collect.find():
                writer.update_document(title=post['title'],
                                        content=post['content'],
                                        nid=unicode(post["_id"]),
                                        url=post['url'])
            writer.commit()

    def get_index_info(self, query):
        index = open_dir("index")
        with index.searcher() as searcher:
            parser = QueryParser("content", index.schema)
            my_query = parser.parse(query)
            result = []
            resp = searcher.search(my_query, limit=None)
            if resp:
                for one in resp:
                    post = self.collect.find_one(ObjectId(one["nid"]))
                    result.append({"title":one['title'], "url":post['url'], "content":one.highlights("content")})
                    # return one['title']
                    # print one["title"]
                    # print post['url']
                    # print one.highlights("content")
                    # print "###############"
            return result, len(result)
コード例 #3
0
ファイル: train.py プロジェクト: Morgan-Griffiths/PokerAI
def batch_learning_update(actor, critic, target_actor, target_critic, params):
    mongo = MongoDB()
    actor.train()
    query = {'training_round': params['training_round']}
    projection = {
        'obs': 1,
        'state': 1,
        'betsize_mask': 1,
        'action_mask': 1,
        'action': 1,
        'reward': 1,
        '_id': 0
    }
    db_data = mongo.get_data(query, projection)
    trainloader = return_trajectoryloader(db_data)
    for _ in range(params['learning_rounds']):
        losses = []
        for i, data in enumerate(trainloader):
            critic_loss = update_actor_critic_batch(data, actor, critic,
                                                    target_actor,
                                                    target_critic, params)
            losses.append(critic_loss)
        # print(f'Learning Round {i}, critic loss {sum(losses)}, policy loss {sum(policy_losses)}')
    mongo.close()
    return actor, critic, params
コード例 #4
0
ファイル: visualize.py プロジェクト: hobbit19/PokerAI
def plot_critic_values(training_round=0):
    query = {
        # 'position':args.position,
        # 'training_round':args.run
    }
    projection = {'values': 1, 'reward': 1, 'action': 1, '_id': 0}
    mongo = MongoDB()
    # for position in [pdt.PositionStrs.SB,pdt.PositionStrs.BB]:
    # query['position'] = position
    data = mongo.get_data(query, projection)
    rewards = []
    actions = []
    values = []
    for point in data:
        rewards.append(point['reward'])
        values.append(point['values'])
        actions.append(point['action'])
    M = len(values)
    # plot value loss over time
    interval = M // 4
    values = np.vstack(values)
    rewards = np.vstack(rewards)
    actions = np.array(actions)
    mask = np.zeros((actions.size, pdt.Action.RAISE), dtype=bool)
    mask[np.arange(actions.size), actions] = 1
    critic_loss = values[mask].reshape(M, 1) - rewards
    critic_loss_rolling_mean = []
    for i in range(len(critic_loss) - interval):
        critic_loss_rolling_mean.append(np.mean(critic_loss[i:interval + i]))
    plot_data(f'Critic loss ', [critic_loss_rolling_mean], ['Values'])
コード例 #5
0
    def process_item(self, item, spider):
        mongo_ins = MongoDB()
        if isinstance(item, TopicBriefItem):
            if mongo_ins.check_topic_brief_exisit(item.get('target')) is False:
                mongo_ins.topic_brief.insert(dict(item))

        elif isinstance(item, TopicInfoItem):
            if mongo_ins.check_topic_info_exisit(item.get('target')) is False:
                mongo_ins.topic_info.insert(dict(item))
コード例 #6
0
 def __init__(self):
     self.mysql = MySQL()
     self.mongo = MongoDB()
     self.query = Query()
     cfg = getConfig()
     self.mysql.init_cfg(cfg)
     self.mongo.init_cfg(cfg)
     self._con = self.mysql.get_connection()
     self._client = self.mongo.get_client()
     self.query.init_db(self._con, self._client)
     self._db = self._client[cfg.MONGODB_COLLECTION]
コード例 #7
0
ファイル: visualize.py プロジェクト: hobbit19/PokerAI
def plot_betsize_probabilities(training_round=0):
    query = {'training_round': training_round}
    projection = {'betsizes': 1, 'hand': 1, '_id': 0}
    params = {'interval': 100}
    mongo = MongoDB()
    gametype = "Omaha"
    # SB
    for position in [pdt.PositionStrs.SB, pdt.PositionStrs.BB]:
        query['position'] = position
        data = mongo.get_data(query, projection)
        betsize, unique_hands, unique_betsize = mongo.betsizeByHand(
            data, params)
        hand_labels = [
            f'Hand {pdt.Globals.KUHN_CARD_DICT[hand]}' for hand in unique_hands
        ]
        action_labels = [size for size in unique_betsize]
        plot_frequencies(
            f'{gametype}_betsize_probabilities_for_{query["position"]}',
            betsize, hand_labels, action_labels)
コード例 #8
0
 def get(self):
     db = MongoDB()
     pics = []
     #按CreateTime降序排列,最新照片在最上面,sort写法特殊,参考http://stackoverflow.com/questions/10242149/using-sort-with-pymongo
     for i in db.collection.find({'MsgType': 'image'}).sort([('CreateTime', -1)]):
         print i
         pic_path = self.get_rela_pic_path(i.get('LocalPicUrl'))
         pic_txt = i.get('ResponseMsg')
         if not pic_path or not pic_txt:
             continue
         pics.append((pic_path, pic_txt))
     self.render('img.html', pics=pics)
コード例 #9
0
def export_quake_log(file, pattern):
    db = MongoDB(database_name='quake', collection_name='games')

    game_count = 0
    game_name = 'game_{}'.format(game_count)
    game = {'game': game_name, 'kills': []}

    with open(file, "r") as f:
        started_game = False
        for row in f:
            if INIT_GAME in row and started_game:
                db.save(game)
                game_count += 1
                game_name = 'game_{}'.format(game_count)
                game = {'game': game_name, 'kills': []}
            if INIT_GAME in row:
                started_game = True
            if 'Kill:' in row and started_game:
                regex_groups = re.search(pattern, row).groups()
                kill_row = {
                    'player_killer':
                    regex_groups[GroupPattern.PLAYER_KILLER.value],
                    'player_dead':
                    regex_groups[GroupPattern.PLAYER_DEAD.value],
                    'type_gun': regex_groups[GroupPattern.TYPE_GUN.value]
                }
                game['kills'].append(kill_row)
        db.save(game)
        print('Successfully export Quake 3 Logs to MongoDB ')
コード例 #10
0
def train_shared_model(agent_params,env_params,training_params,id,actor,critic):
    actor.train()
    critic.train()
    pid = os.getpid()
    print(f"Intiantiating process PID {pid}")
    env = Poker(env_params)
    nS = env.state_space
    nO = env.observation_space
    nA = env.action_space
    nB = env.betsize_space
    nC = nA - 2 + nB
    print_every = (training_params['epochs']+1) // 5
    seed = 154
    agent = ParallelAgent(nS,nO,nA,nB,seed,agent_params,actor,critic)
    training_data = copy.deepcopy(training_params['training_data'])
    for e in range(1,training_params['epochs']+1):
        last_state,state,obs,done,mask,betsize_mask = env.reset()
        while not done:
            if env.game == pdt.GameTypes.HISTORICALKUHN:
                actor_outputs = agent(state,mask,betsize_mask) if env.rules.betsize == True else agent(state,mask)
            else:
                actor_outputs = agent(last_state,mask,betsize_mask) if env.rules.betsize == True else agent(last_state,mask)
            last_state,state,obs,done,mask,betsize_mask = env.step(actor_outputs)
        ml_inputs = env.ml_inputs()
        agent.learn(ml_inputs)
        ml_inputs = detach_ml(ml_inputs)
        for position in ml_inputs.keys():
            training_data[position].append(ml_inputs[position])
        if id == 0 and e % print_every == 0:
            print(f'PID {pid}, Epoch {e}')
    mongo = MongoDB()
    mongo.clean_db()
    mongo.store_data(training_data,env.db_mapping,training_params['training_round'],env.game,id,training_params['epochs'])
コード例 #11
0
def make_app():
    load_dotenv()

    settings = {
        "template_path": os.path.join(os.path.dirname(__file__), "templates")
    }
    global db

    db = MongoDB(os.getenv("MONGO_CONN_STRING"))

    return tornado.web.Application([
        (r"/", MainHandler),
    ], **settings)
コード例 #12
0
ファイル: visualize.py プロジェクト: hobbit19/PokerAI
def plot_action_frequencies(actiontype, handtype, training_round=0):
    print(actiontype, handtype)
    query = {'training_round': training_round}
    projection = {'action': 1, 'hand_strength': 1, 'hand': 1, '_id': 0}
    data_params = {'interval': 100}
    mongo = MongoDB()
    # gametype = mongo.get_gametype(training_round)
    gametype = "Omaha"
    for position in [pdt.PositionStrs.SB, pdt.PositionStrs.BB]:
        query['position'] = position
        data = mongo.get_data(query, projection)
        if handtype == pdt.VisualHandTypes.HAND:
            actions, hands, unique_actions = mongo.actionByHand(
                data, data_params)
        else:
            actions, hands, unique_actions = mongo.actionByHandStrength(
                data, data_params)
        hand_labels = HAND_LABELS_DICT[actiontype](hands)
        action_labels = [pdt.ACTION_DICT[act] for act in unique_actions]
        plot_frequencies(
            f'{gametype}_action_{handtype}_for_{query["position"]}', actions,
            hand_labels, action_labels)
コード例 #13
0
ファイル: base_model.py プロジェクト: rebootshen/backtrader
 def __init__(self, tn=None, location=None, dbname=None):
     name = self.__class__.__name__
     self.tablename = tn.strip(
     ) if tn is not None and len(tn) else name.lower()
     if location is None and dbname is None:
         if mongodb is not None:
             self.mc = mongodb[self.tablename]
         else:
             raise Exception('Unable to find available dbname')
     elif location is None and dbname is not None:
         self.mc = connection[dbname][self.tablename]
     else:
         self.location = location
         self.dbname = dbname
         self.mc = MongoDB.db_connection(self.location,
                                         self.dbname)[self.tablename]
コード例 #14
0
ファイル: tasks.py プロジェクト: esehara/mongocutter
class TaskTracker(object):

    def __init__(self, filename):
        self._dynamic_import(filename)

    def _is_exist(self, filename):
        if not os.path.exists(filename):
            raise IOError(
                "IOError: File '%s' does not exists." % filename)

    def _dynamic_import(self, filename):
        self._is_exist(filename)
        package_name = _package_parse(filename)
        self.export_module = __import__(package_name, fromlist=['*'])

    def _do_function(self, target_prefix):
        return [
            getattr(self.export_module, attr)
            for attr in dir(self.export_module)
            if re.match(target_prefix, attr)]

    def yaml_read(self):
        yamlpath = self.export_module.settings
        self._is_exist(yamlpath)
        raw_yaml_file = open(yamlpath)
        self.settings = yaml.load(raw_yaml_file)

    def connect_database(self):
        self.db = MongoDB(self.settings.get("server"))

    def disconnect_database(self):
        self.db.disconnect()

    def create_setup_data(self):
        self.db.set_models(self.settings["scheme"])
        self.db.setup_run(self.settings["setup"])

    def destory_setup_data(self):
        self.db.destroy_run()

    def previous_run(self):
        for f in self._do_function("setup_"):
            f()
コード例 #15
0
 def __set_fast_data(img_file_path, lbl):
     payload = list()
     db_handle = MongoDB()
     feature_vector = FeatureExtractor().get_features(img_file_path)
     feature_map = dict()
     key_p = os.path.splitext(os.path.basename(img_file_path))
     key = key_p[0] + '_' + key_p[1][1:] + '_' + str(
         int(time.time() * 1000.0))
     key = key.replace('.', '_')
     feature_map['file'] = key
     feature_map['label'] = lbl
     feature_map['feature'] = feature_vector
     payload.append(feature_map)
     try:
         db_handle.to_db(payload=payload,
                         key=None,
                         db=MONGO_HOPS_DB,
                         collection=MONGO_XRAY_COLLECTION)
         payload.clear()
         db_handle.close()
     except Exception as e:
         db_handle.close()
         print(img_file_path)
         print("Ignoring Exception : " + str(e))
コード例 #16
0
def task_get_matches(record):
    """
    For a given userId, set of coordinates, and datetime
    find all users that were nearby within 1 minute of
    user identified by userId
    """
    db = MongoDB().connect()
    # Hack b/c of Celery's serialization converting
    # datetimes to strings
    record['time'] = parser.parse(record['time'])

    locs = db.user_locations.find({
        'loc': {
            '$geoNear': {
                '$geometry': record['loc'],
                '$maxDistance': 150,
            },
        },
        'userId': {
            '$ne': record['userId']
        },
        'time': {
            '$gte': record['time'] - timedelta(seconds=30),
            '$lte': record['time'] + timedelta(seconds=30)
        },
    })

    for loc in locs:
        # For each match, create two records.
        criteria = {'from': record['userId'], 'to': loc['userId']}
        new_values = criteria.copy()
        new_values.update({'time': record['time']})
        # Update existing or create new record
        db.matches.update(criteria, new_values, upsert=True)
        # Symmetrical - both users get a match made `to` them `from` the other user
        reversed_criteria = {'from': loc['userId'], 'to': record['userId']}
        new_values.update(reversed_criteria)
        db.matches.update(reversed_criteria, new_values, upsert=True)
コード例 #17
0
from cleantext import clean
from scrapy import (Spider, Request)
from uuid import uuid4

root_url = 'https://www.metacritic.com'
genres = [
    'adventure', 'fighting', 'first-person', 'flight', 'party', 'platformer',
    'puzzle', 'racing', 'real-time', 'role-playing', 'simulation', 'sports',
    'strategy', 'third-person', 'turn-based', 'wargame', 'wrestling', 'action'
]
genre_url = '/browse/games/genre/date/'
query_str = '/all?view=condensed'
critic_reviews = 'critic'
user_reviews = 'user'
batch_size = 100
database = MongoDB()
database.open()
meta_db = database.get_database('metacritic')


def now():
    from datetime import timezone, datetime
    return int(datetime.now(tz=timezone.utc).timestamp() * 1000)


def is_none(d):
    for k, v in d.items():
        if v is None:
            return True
    return False
コード例 #18
0
ファイル: wxpyInterface.py プロジェクト: BrishenLee/iRobot
import hashlib
import traceback
import tornado.web
import tornado.ioloop
from tornado import ioloop, gen
from lxml import etree
from utils import *
from db import MongoDB
from db import RedisDB
from logger_helper import logger
from wxpy import *

logger = logging.getLogger('wxpyInterface')

TURING_BOT = TuringBot()
MONGO_OBJ = MongoDB()
REDIS_OBJ = RedisDB()
global bot


@gen.coroutine
def cur_wxpy_mode():
    wxpy_mode = REDIS_OBJ.client.get('WXPY_MODE')
    logger.info('Current mode: %s' % wxpy_mode)
    bot.self.send_msg('Current mode: %s' % wxpy_mode)


@gen.coroutine
def register_task_reminder():
    if get_wxpy_mode() != 'GAI':
        logger.info('Not GAI mode, pass the reminder')
コード例 #19
0
ファイル: spider.py プロジェクト: smallDou/spider_exercises
 def save_to_mongo(self,dic):
         mon = MongoDB()
         if not mon.find({'内容':self.weibo_content}):
             mon.insert(dic)
コード例 #20
0
ファイル: article_model.py プロジェクト: CharlesNie/DECC
from db import MongoDB
import scrapy

db = MongoDB()


# BBC article model
class BBCArticleItem(scrapy.Item):
    _id = scrapy.Field()  # preset id field for mongo db
    title = scrapy.Field()
    date = scrapy.Field()
    time = scrapy.Field()
    section = scrapy.Field()
    content = scrapy.Field()
    url = scrapy.Field()

    def save(self):
        # write data to database
        db.insert_article(self)

    def update(self, updates={}):
        db.update_article(self, updates)

    @classmethod
    def fetch_by_title(cls, title):
        results = db.find_article_by_title(title)
        return cls.parse_result(results)

    @classmethod
    def fetch_by_section(cls, section):
        results = db.find_articles_by_section(section)
コード例 #21
0
from db import MongoDB
from flask import Flask, request, render_template
from model import Node

import json
import threading
import time
import requests

app = Flask(__name__)
mongodb = MongoDB()

thread = threading.Thread(target=mongodb.find_vacant)
thread.start()


@app.route('/')
def index():
    list = mongodb.get_all()
    return render_template('index.html', rooms=list.get('nodes'))


@app.route('/ping', methods=['POST'])
def ping():
    if request.method == 'POST':
        node_id = request.headers.get('node-id')
        node_floor = request.headers.get('node-floor')
        print 'Incoming ping from node ' + node_id
        incoming_node = Node(node_id, node_floor)
        success = mongodb.add_or_update(incoming_node)
コード例 #22
0
ファイル: index_db.py プロジェクト: l1905/qau_search
 def __init__(self):
     self.schema = schema
     self.collect = MongoDB()
コード例 #23
0
from flask import escape
from config import Config, ProductionConfig, DevelopmentConfig
from db import MySQL, MongoDB, Query

mysql = MySQL()
mongoDB = MongoDB()
query = Query()


def html_escape(inp):
    return str(escape(inp))


def toHourandMin(timeDel):
    secs = timeDel.total_seconds()
    hours = int(secs / 3600)
    minutes = int(secs / 60) % 60
    return "%02d:%02d" % (hours, minutes)


def getConfig():
    cfg = ProductionConfig
    if Config.ENABLE_DEV:
        cfg = DevelopmentConfig
    return cfg
コード例 #24
0
ファイル: historical.py プロジェクト: nwfella/cryptosub
        if row["done"] == 1:
            log("%s already processed, skipping!" % row["Subreddit"])
            continue
        subreddit = row["Subreddit"]
        log("Gathering post ID's for %s" % row["Subreddit"])
        pushshift = Pushshift(sys.argv[1], sys.argv[2], subreddit)
        post_ids = pushshift._post_ids
        reddit = Reddit(db, subreddit, post_ids)
        log("Gathering datasets")
        reddit.gather()

        # Get a List of files in working directory
        all_datasets = listdir(am._historical_dir)
        dataset_names = [
            "comments_{}.csv".format(subreddit),
            "posts_{}.csv".format(subreddit)
        ]
        if any(dataset in all_datasets for dataset in dataset_names):
            log("Processing: " + subreddit)
            loaded = am.load_datasets(subreddit)
            if loaded:
                am.comments_and_posts()
                am.wordfreq()
                am.bigramfreq()
                am.currency_mentions()
                log("*" * 30)

            db.mark_subreddit_done(subreddit)

    db.reset_subreddit_done()
コード例 #25
0
ファイル: app.py プロジェクト: bf6/fumble
import socket
import threading
import time
from http.server import HTTPServer

from handler import FumbleHandler
from db import MongoDB


MongoDB().recreate()


addr = ('0.0.0.0', 8000)
sock = socket.socket()
# One socket, many threads
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(addr)
sock.listen(5)


class Thread(threading.Thread):
    def __init__(self, i):
        super().__init__()
        self.i = i
        # ctrl-c will kill process
        self.daemon = True
        self.start()

    def run(self):
        server = HTTPServer(addr, FumbleHandler, False)
        server.socket = sock
コード例 #26
0
ファイル: menu.py プロジェクト: sky2464/DS_Project1
 def __init__(self):
     self.neo_db = NeoDB()
     self.mongo_db = MongoDB()
     self.query_history = []
コード例 #27
0
	post = {"author": random_data(8),
			"text": ' '.join(random_data(10) for _ in range(10)),
			"tags": ["mongodb", "python", "pymongo"],
			"date": datetime.datetime.utcnow()}
	return post


def status(obj, msg=''):
	print()
	print('-'*60, msg.upper(), '-'*60)
	if obj:
		pprint.pprint(obj)
	
	
# req - create connection only
m0 = MongoDB()
status(m0.get_current_status(), 'only connection')
# req - create database with validation
m0.create_db('try000')
status(m0.get_current_status(), 'only database')
# 2
m = MongoDB(database_name='try001')
status(m.get_current_status(), 'only database - part 2')

# req - drop db by name
mm = MongoDB(database_name='DELETE_ME')
mm.create_collection(collection_name='Temporary')
mm.insert(get_dummy_data())
status(mm.get_database_names(), msg='create and delete db')
print('collection created - ', mm.get_collection_names())
mm.drop_db(database_name='DELETE_ME')
コード例 #28
0
ファイル: tasks.py プロジェクト: esehara/mongocutter
 def connect_database(self):
     self.db = MongoDB(self.settings.get("server"))
コード例 #29
0
ファイル: kuhn_main.py プロジェクト: hobbit19/PokerAI
 # manager = mp.Manager()
 # return_dict = manager.dict()
 # online training
 seed = 123
 device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
 actor = env_networks['actor'](seed, nS, nA, nB, agent_params)
 critic = env_networks['critic'][args.critic](seed, nS, nA, nB,
                                              agent_params)
 del env
 actor.share_memory()  #.to(device)
 critic.share_memory()  #.to(device)
 processes = []
 num_processes = mp.cpu_count()
 if args.clean:
     print('Cleaning db')
     mongo = MongoDB()
     mongo.clean_db()
     del mongo
 for i in range(num_processes):  # No. of processes
     p = mp.Process(target=train_shared_model,
                    args=(agent_params, env_params, training_params, i,
                          actor, critic))
     p.start()
     processes.append(p)
 for p in processes:
     p.join()
 basepath = os.path.abspath(sys.argv[0])
 torch.save(
     actor.state_dict(),
     os.path.join(basepath, 'checkpoints/Historical_kuhn' + '_actor'))
 torch.save(
コード例 #30
0
ファイル: dbSchedule.py プロジェクト: BrishenLee/iRobot
 def __init__(self):
     self.db = MongoDB()
     self.duration = 2000
コード例 #31
0
class dbHandler():
    def __init__(self):
        self.mysql = MySQL()
        self.mongo = MongoDB()
        self.query = Query()
        cfg = getConfig()
        self.mysql.init_cfg(cfg)
        self.mongo.init_cfg(cfg)
        self._con = self.mysql.get_connection()
        self._client = self.mongo.get_client()
        self.query.init_db(self._con, self._client)
        self._db = self._client[cfg.MONGODB_COLLECTION]

    def getQuery(self):
        return self.query

    def getallMAC(self, table):  #/
        allmac = self._db[table].find().distinct("message.MAC")
        return allmac

    def getallSensor(self, table, mac):  #/
        result = self._db[table].find(
            {"$and": [{
                "message.MAC": mac
            }, {
                "message.s": {
                    "$exists": True
                }
            }]})
        alltopic = result.distinct("topic")
        allnum = result.distinct("message.s")
        res = []
        for s in allnum:
            for topic in alltopic:
                res.append({"s": s, "topic": topic})
        return res

    def getallTopic(self, table, mac):  #/
        result = self._db[table].find_one(
            {"$or": [{
                "message.MAC": mac
            }, {
                "message.s": mac
            }]})
        return ['topic', result["topic"], 'MAC', result["message"]["MAC"]]

    def getLastVAL(self, sid, mac, _type="ct"):
        createdAt = 0
        res = None
        query = {"message.MAC": mac}
        if (_type.split("_") == "dm"):
            query["message.s"] = sid
        rawCT = self._db[_type].find(query).sort("created_at", -1).limit(1)
        for CT in rawCT:
            if (CT["created_at"] > createdAt):
                res = CT
                createdAt = CT["created_at"]
        if (_type.split("_")[0] == "dm"):
            return (res["message"]["AL1"] + res["message"]["AL2"] +
                    res["message"]["AL3"]) / 3
        return res["message"]["a"]

    def getLastPIR(self, mac, _type="pir"):
        createdAt = 0
        res = {}
        for t in self.getTable({"sensor_type": _type}):
            rawPIR = self._db[t].find({
                "message.MAC": mac
            }).sort("created_at", -1).limit(1)
            for PIR in rawPIR:
                if (PIR["created_at"] > createdAt):
                    res = PIR
                    createdAt = PIR["created_at"]
        return res

    def getTable(self, devFind={}):  #/
        res = []
        for _type in self._db["iot_type"].find(devFind):
            name = _type["sensor_type"]
            if (_type["device_type"] != ''):
                name = name + "_" + _type["device_type"]
            res.append(name)
        return res
コード例 #32
0
class Logics(object):
    db = MongoDB()

    @classmethod
    def wrap_dict(cls, data):
        if isinstance(data, (list, tuple)):
            lst = []
            for i in data:
                lst.append(cls.wrap_dict(i))
            return lst
        elif isinstance(data, dict):
            return AttrDict(data)
        else:
            return data

    @classmethod
    def wrap_member(cls, m, status=False, history=False, infos=False):
        """ 装饰成员, 将成员换成AttrDict
        Arguments:
            `m`         -   成员
            `status`    -   是否装载状态
            `history`   -   是否装载消息历史
            `infos`     -   是否装载信息
        """
        if isinstance(m, (list, tuple)):
            lst = []
            for i in m:
                lst.append(
                    cls.wrap_member(i, status=status, history=history,
                                    infos=infos))
            return lst
        elif isinstance(m, dict):
            m = AttrDict(m)
            if infos:
                m["infos"] = cls.wrap_dict(
                    list(cls.db[const.INFO].find({"mid": m._id})))

            if history:
                m["history"] = cls.wrap_dict(
                    list(cls.db[const.HISTORY].find({"from_member.$id": m._id})))

            if status:
                m["status"] = cls.wrap_dict(
                    list(cls.db[const.STATUS].find({"mid": m._id})))

            return m
        else:
            return m

    @classmethod
    def get_with_nick(cls, nick, **kwargs):
        """ 根据昵称获取成员
        Arguments:
            `nick`  -   成员昵称
        """

        m = cls.db[const.MEMBER].find_one({"nick": nick})
        return cls.wrap_member(m, **kwargs)

    @classmethod
    def get_one(cls, jid, **kwargs):
        """ 获取一个成员
        Arguments:
            `jid`   -   成员jid
        """
        email = get_email(jid)
        return cls.wrap_member(cls.db[const.MEMBER].find_one({"email": email}),
                               **kwargs)

    @classmethod
    def add(cls, jid, nick=None, show=None):
        """ 添加一个成员
        Arguments:
            `jid`   -   成员jid
            `nick`  -   昵称
            `show`  -   stanze.show
        """
        if cls.get_one(jid):
            return
        if not nick:
            nick = get_email(jid).split("@")[0]
        doc = {"email": get_email(jid), "nick": nick, "isonline": True,
               "join_date": now()}
        mid = cls.db[const.MEMBER].insert(doc)
        cls.db[const.STATUS].insert({"mid": mid, "statustext": show,
                                     "resource": jid.resource,
                                     "status": const.ONLINE})

        return cls.get_one(jid)

    @classmethod
    def drop(cls, jid):
        """ 删除一个成员
        Arguments:
            `jid`   -   成员jid
        """
        m = cls.get_one(jid)
        try:
            cls.db[const.MEMBER].remove({"email": get_email(jid)})
            cls.db[const.STATUS].remove({"mid": m._id})
            cls.db[const.INFO].remove({"mid": m._id})
        except:
            traceback.print_exc()

        return

    @classmethod
    def get_members(cls, remove=None, **kwargs):
        """ 获取所有成员
        Arguments:
            `remove`    -   排除成员
        """
        remove_email = get_email(remove)
        if remove:
            ms = cls.db[const.MEMBER].find({"email": {"$ne": remove_email}})
            return cls.wrap_member(list(ms), **kwargs)
        ms = cls.db[const.MEMBER].find()
        return cls.wrap_member(list(ms), **kwargs)

    @classmethod
    def modify_nick(cls, jid, nick):
        """ 修改成员昵称
        Arguments:
            `jid`   -   jid
            `nick`  -   新昵称
        Return:
            False   // 昵称已存在
            True    // 更改昵称成功
        """
        m = cls.get_one(jid)
        if not m:
            return False
        if m:
            exists = cls.get_with_nick(nick)
            if exists:
                return False
            cls.db[const.MEMBER].update({"_id": m._id},
                                        {"$set": {"nick": nick, "last_change": now()},
                                         "$push": {"used_nick": nick}})
            cls.set_info(jid, const.INFO_CHANGE_NICK_TIMES,
                         int(cls.get_info(jid,
                                          const.INFO_CHANGE_NICK_TIMES,
                                          0).value) + 1)
            return True

    @classmethod
    def get_one_status(cls, jid):
        m = cls.get_one(jid)
        if not m:
            return False, False
        return cls.db[const.STATUS].find_one({"resource": jid.resource},
                                             {"mid": m._id}), m

    @classmethod
    def set_online(cls, jid, show=None):
        """ 设置成员在线
        Arguments:
            `jid`   -   成员jid
            `show`  -   stanza.show
        """
        status, m = cls.get_one_status(jid)
        if not m:
            return False

        if status:
            cls.db[const.STATUS].update({"_id": status.get("_id")},
                                        {"$set": {"statustext": show}})
        else:
            cls.db[const.STATUS].insert({"status": const.ONLINE,
                                         "statustext": show,
                                         "resource": jid.resource,
                                         "mid": m._id})

        return True

    @classmethod
    def set_offline(cls, jid):
        status, m = cls.get_one_status(jid)
        if not m or not status:
            return False
        cls.db[const.STATUS].remove({"_id": status.get("_id")})

    @classmethod
    def _get_info(cls, jid=None, key=None, default=None, is_global=False):
        """ 获取成员选项
        Arguments:
            `jid`   -   jid
            `key`   -   选项键
            `default` -   默认值
        """
        cond = {"key": key, "is_global": is_global}
        m = None
        if jid:
            m = cls.get_one(jid)
            if not m:
                return AttrDict(dict(key=key, value=default, is_global=is_global)), False, None
            cond.update(mid=m._id)
        info = cls.db[const.INFO].find_one(cond)

        from_db = True
        if not info:
            info = dict(key=key, value=default, is_global=is_global)
            from_db = False

        return AttrDict(info), from_db, m

    @classmethod
    def get_info(cls, jid, key, default=None):
        return cls._get_info(jid, key, default)[0]

    @classmethod
    def set_info(cls, jid, key, value):
        """ 设置成员选项
        Arguments:
            `jid`   -   jid
            `key`   -   选项键
            `value` -   选项值
        """
        info, f, m = cls._get_info(jid, key)
        if f:
            cls.db[const.INFO].update({"_id": info._id},
                                      {"$set": {"value": value}})
        else:
            cls.db[const.INFO].insert({"key": key, "value": value,
                                       "is_global": False,
                                       "pubdate": now(),
                                       "mid": m._id})
        return info

    @classmethod
    def get_today_rp(cls, jid):
        """ 获取今日rp """
        rp = None
        rp_date = Logics.get_info(jid, const.INFO_RP_DATE).value

        if rp_date:
            try:
                rp_date = datetime.fromtimestamp(float(rp_date))
            except:
                rp_date = time.time() - 86400
                rp_date = datetime.fromtimestamp(float(rp_date))
            now = datetime.now()

            if now.year == rp_date.year and now.month == rp_date.month and \
                    now.day == rp_date.day:
                rp = Logics.get_info(jid, const.INFO_RP).value

        return rp

    @classmethod
    def set_today_rp(cls, jid, rp):
        cls.set_info(jid, const.INFO_RP, rp)
        cls.set_info(jid, const.INFO_RP_DATE, time.time())
        cls.db[const.MEMBER].update({"email": get_email(jid)},
                                    {"$push": {"rps": {"value": rp, "date": now()}}})

    @classmethod
    def get_global_info(cls, key, default=None):
        """ 获取全局选项
        Arguments:
            `key`   -   选项键
            `default` -   默认值
        """
        return cls._get_info(key=key, default=default, is_global=True)[0]

    @classmethod
    def set_global_info(cls, key, value):
        """ 设置全局选项
        Arguments:
            `key`   -   选项键
            `value` -   选项值
        """
        info, f, _ = cls._get_info(key=key,  is_global=True)
        if f:
            cls.db[const.INFO].update({"_id": info._id},
                                      {"$set": {"value": value}})
        else:
            cls.db[const.INFO].insert({"key": key, "value": value,
                                       "pubdate": now(), "is_global": True})
        return info

    @classmethod
    def add_history(cls, jid, to_jid, content):
        m = cls.get_one(jid)
        cls.db[const.MEMBER].update(
            {"_id": m._id}, {"$set": {"last_say": now()}})
        cls.db[const.HISTORY].insert(
            {"from_member": cls.db.ref(const.MEMBER, m._id),
             "to_member": to_jid, "content": content,
             "pubdate": now()})

    @classmethod
    def get_history(cls, jid=None,  starttime=None):
        """ 获取历史信息
        Arguments:
            `jid`   -   发送人
            `to`    -   接收人
            `starttime` -   开始时间
        """
        condition = {"to_member": "all"}
        if jid:
            m = cls.get_one(jid)
            condition.update({"from_member.$id": m._id})

        if starttime:
            condition.update(pubdate={"$gte": starttime})

        return cls.db.deref(list(cls.db[const.HISTORY].find(condition)
                                 .sort("pubdate", cls.db.asc)))

    @classmethod
    def is_online(cls, jid):
        m = cls.get_one(jid, status=True)
        return bool([status.status for status in m.status if status.status])

    @classmethod
    def empty_status(cls):
        cls.db[const.STATUS].remove()

    @classmethod
    def get_all_rps(cls, starttime=None, endtime=None):
        condition = {"key": const.INFO_RP}
        if starttime or endtime:
            condition["pubdate"] = {}

            if starttime:
                condition["pubdate"].update({"$gt": starttime})

            if endtime:
                condition["pubdate"].update({"$lte": endtime})

        return list(cls.db[const.INFO].find(condition))

    @classmethod
    def get_today_rps(cls):
        now = now()
        starttime = datetime(now.year, now.month, now.day)
        endtime = datetime(now.year, now.month, now.day, 23, 59, 59)
        return sorted(cls.get_all_rps(starttime, endtime), key=lambda x: x["value"])

    @classmethod
    def add_honor(cls, jid, value, typ, item, desc):
        m = cls.get_with_nick(jid)

        doc = {"getdate": now(), "date": now(), "type": typ, "desc": desc,
               "mid": m._id, "item": item, "value": value}
        cls.db[const.HONOR].insert(doc)

    @classmethod
    def get_honor(cls, m):
        honors = cls.db[const.HONOR].find({"mid": m._id})
        return list(honors)

    @classmethod
    def get_honor_str(cls, m):
        honors = cls.get_honor(m)
        body = u""
        for h in honors:
            date = h.get("date").strftime("%Y/%m/%d")
            body += u"{0} {1}为{2}达到{3}, 获得成就"\
                    .format(date, h.get("item"), h.get("value"),
                            h.get("desc"))

        return body
コード例 #33
0
ファイル: weixinInterface.py プロジェクト: BrishenLee/iRobot
 def __init__(self):
     self.db_obj = MongoDB()
     self.msg_dic = {}
コード例 #34
0
import time
from datetime import datetime

import schedule

from analyzer import BangumiAnalyzer
from conf import conf
from crawler import BangumiCrawler
from db import MongoDB
from utils import logger

if __name__ == '__main__':
    logger.info('Hello! This is Bangumi-Provider :)')

    client = MongoDB(conf)

    crawler = BangumiCrawler(client, conf)
    analyzer = BangumiAnalyzer(client, conf)

    def jobs():
        crawler.crawl()
        analyzer.analyze()

    if conf.SCHEDULE_ENABLE:
        logger.info(
            'Running with Schedule Enabled, Tasks Schedule Every Day. Now: %s, Next Schedule: %s.'
            % (datetime.now(), conf.SCHEDULE_CRON_AT))
        schedule.every().day.at(conf.SCHEDULE_CRON_AT).do(jobs)
        while True:
            schedule.run_pending()
            time.sleep(1)
コード例 #35
0
ファイル: menu.py プロジェクト: sky2464/DS_Project1
class Menu():

    def __init__(self):
        self.neo_db = NeoDB()
        self.mongo_db = MongoDB()
        self.query_history = []


    def main_menu(self):
        print("*******SELECT AN OPTION*************")
        print("************************************")
        print("* 1 - Create DBs (Mongo and Neo4j)**")
        print("* 2 - Perform Query for Answer 1  **")
        print("* 3 - Perform Query for Answer 2  **")
        print("* 4 - Query MongoDB               **")
        print("* 5 - Query Neo4j                 **")
        print("* 6 - Query History               **")
        print("* 7 - Import CSV file to DBs      **")
        print("* 8 - Quit the APP                **")
        print("************************************")
        print("NOTE: Please run Option 1 before any others")


    def parse_choice(self, choice):
        if(choice=="1"): #todo: show all the user
            self.neo_db.setup()
            self.mongo_db.setupv2()
            input("Press Enter to continue...")
        if(choice=="2"):
            self.get_user_list() 
        if(choice=="3"):
            print(self.mongo_db.return_users()) #todo: return each user once
            user = input("\nSelect User you would like to find \ntrusted colleagues-of-colleagues \nwith one or more interests: ")
            print(self.mongo_db.answer_for_question2(user))
            input("Press Enter to continue...")
            pass
        if(choice=="4"):
            query = input("Write Query for MongoDB: ")
            self.query_history.append(query)
            pass
        if(choice=="5"):
            query = input("Write Query for Neo4j: ")
            self.query_history.append(query)
            pass
        if(choice=="6"):
            print(self.query_history)
            pass
        if(choice=="7"):
            print("Import CSV file ... \n") 
            print("For example: /home/Navid/Documents/BigData/project1_sample/user.csv\n")
            filepath = input ("enter file path:")
            self.mongo_db.import_content(filepath)

            pass
        if(choice=="8"):
            quit()

    def get_user_list(self):
        print( self.neo_db.get_all_users() )
        user = input("SELECT User (Type Name): ")
        company = input("SELECT Company Name of Same User: "******"Press ENTER to Continue...")


    def run(self):
        print("Welcome To Collaboration NET DB")
        while(True):
            self.main_menu()
            choice = input("Choice: ")
            self.parse_choice(choice)