Exemple #1
0
 def __init__(self):
     '''
     :param mobile: 电话 int or string
     :param password: 密码 string
     :param membername: 成员列表 string
     '''
     self.color = ['#FEEEED', '##CCFFFF', '#CCFFCC', '#E0FFFF', '#F0FFF0', '#F0F8FF', '#FFF0F5', '#FFFAFA',
                   '#F0FFFF', '#FFF5EE', '#E6E6FA', '#FFC0CB']
     self.dbtoken = MongoDB('Pocket48', 'dbtoken')
     self.key = b'1O8zgva3PlT_Evikm61A97wsWZ0JlTGSNEiRc0S7rCY='
     self.Fernet = Fernet(self.key)
     self.source_url = 'https://source.48.cn'
     self.headers = {
         'Host': 'pocketapi.48.cn',
         'accept': '*/*',
         'Accept-Language': 'zh-Hans-CN;q=1',
         'User-Agent': 'PocketFans201807/6.0.0 (iPhone; iOS 12.2; Scale/2.00)',
         'Accept-Encoding': 'gzip, deflate',
         'appInfo': '{"vendor":"apple","deviceId":"0", \
                             "appVersion":"6.0.0","appBuild":"190409", \
                             "osVersion":"12.2.0","osType":"ios", \
                             "deviceName":"iphone","os":"ios"}',
         'Content-Type': 'application/json;charset=utf-8',
         'Connection': 'keep-alive',
     }
Exemple #2
0
 def __init__(self, mobile, password, membername):
     '''
     :param mobile: 电话 int or string
     :param password: 密码 string
     :param membername: 成员全名 string
     '''
     self.mobile = mobile
     self.password = password
     self.membername = membername
     self.dbtoken = MongoDB('Pocket48', 'dbtoken')
     self.headers = {
         'Host': 'pocketapi.48.cn',
         'accept': '*/*',
         'Accept-Language': 'zh-Hans-CN;q=1',
         'User-Agent':
         'PocketFans201807/6.0.0 (iPhone; iOS 12.2; Scale/2.00)',
         'Accept-Encoding': 'gzip, deflate',
         'appInfo': '{"vendor":"apple","deviceId":"0", \
                             "appVersion":"6.0.0","appBuild":"190409", \
                             "osVersion":"12.2.0","osType":"ios", \
                             "deviceName":"iphone","os":"ios"}',
         'Content-Type': 'application/json;charset=utf-8',
         'Connection': 'keep-alive',
     }
     dic_data = self.searchroom
     self.ownerId, self.roomId = dic_data['ownerId'], dic_data['roomId']
Exemple #3
0
def webhook():

    habitica_update = request.json  # type: dict

    logging.info("Received update")

    if habitica_update['direction'] != "down":

        client = MongoDB(configuration, habitica_update['task']['userId'])

        if habitica_update['task']['type'] == "daily":
            logging.info("Received a daily task and we are now processing.")
            client.update_habit(habitica_update['task'], False)
        elif habitica_update['task']['type'] == "habit":
            logging.info("Received a habit and we are now processing.")
            client.update_habit(habitica_update['task'], True)
        else:
            logging.error("Received an unrecognized task type.")

        es = Elasticsearch([{'host': configuration['elasticsearch_server'], 'port': configuration['elasticsearch_port']}])

        logging.info("Pushing event to Elasticsearch")

        res = es.index(index="test-index", doc_type='_doc', body=habitica_update)
        
        logging.debug("Elasticsearch returned " + str(res['result']))

    return '', 200
Exemple #4
0
    def __init__(self):
        """ METHOD INITIALIZES LOGGER, MONGO, GMAIL, EXCEPTION HOOK, ECT.
        """

        # INSTANTIATE LOGGER
        self.logger = Logger()

        # CONNECT TO MONGO
        self.mongo = MongoDB(self.logger)

        # CONNECT TO GMAIL API
        self.gmail = Gmail(self.mongo, self.logger)

        # SET GMAIL AND MONGO ATTRIBUTE FOR LOGGER
        self.logger.gmail = self.gmail

        self.logger.mongo = self.mongo

        self.traders = {}

        self.accounts = []

        self.sim_trader = SimTrader(self.mongo)

        self.not_connected = []
Exemple #5
0
 def __init__(self, mobile, password, membername):
     super().__init__(mobile, password, membername)
     CQclient.__init__(self)
     SCHEDULE.__init__(self)
     self.dbchat = MongoDB('Poket48', str(self.roomId))
     self.msgType = dict(TEXT=self.text, VIDEO=self.video,
                         IMAGE=self.image, AUDIO=self.audio)
def searching_mongo(mongo_id):
    """
    Search Mongo for a specific _id returned from elastic and returns the result.
    """
    
    mongodb = MongoDB(host=os.getenv("MONGO_URI"))
    mongo_result = mongodb.search_mongo(mongo_id)
    return mongo_result
Exemple #7
0
def mongoOperation(i, data):
    if i == 'amazon':
        try:
            m1 = MongoDB('amazon')
            m1.insert(data)
        except Exception as e:
            print('Exception in MongoDB Amazon\n' + str(e))

    elif i == 'flipkart':
        try:
            m2 = MongoDB('flipkart')
            m2.insert(data)
        except Exception as e:
            print('Exception in MongoDB Flipkart\n' + str(e))

    elif i == 'paytm':
        try:
            m3 = MongoDB('paytm')
            m3.insert(data)
        except Exception as e:
            print('Exception in MongoDB Paytm\n' + str(e))
Exemple #8
0
def read_update(pref):
    db = MongoDB()
    print(request.method)
    if request.method == 'POST':
        value = request.form['value']
        db.update_db(pref, value)
        user_info = db.get_info(pref)
        return render_template('output.html', pref=pref, user=user_info)

    else:
        user_info = db.get_info(pref)
        return render_template('output.html', pref=pref, user=user_info)
Exemple #9
0
 def __init__(self):
     self.cookie = ''
     self.url = 'http://www.p26y.com/cn/'
     self.torrent_url = 'https://www.bturl.at/search/'
     self.video = {}
     self.headers = {'Cookie': self.cookie,
                     'Host': 'www.p26y.com',
                     'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,'
                               '*/*;q=0.8',
                     'User-Agent': 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, '
                                   'like Gecko) Chrome/68.0.3440.75 Safari/537.36',
                     'If-None-Match': 'W/"3a18de34b39581014d560cc3522a80b7"',
                     'If-Modified-Since': 'Tue, 22 Jan 2019 00:00:00 GMT',
                     'Connection': 'keep-alive',
                     'Cache-Control': 'max-age=0',
                     'Accept-Language': 'zh-CN,zh;q=0.9',
                     'Upgrade-Insecure-Requests': '1',
                     'Accept-Encoding': 'gzip, deflate'
                     }
     self.allgirls = MongoDB('Javdb', 'girlsname')  # 所有girl名称—>页面编码键值对
     self.rankdb = MongoDB('Javdb', 'rankdb')  # 最受受欢迎作品榜单
     self.girlsindexdb = MongoDB('Javdb', 'girlsindexdb')  # 单个girl的所有作品db
Exemple #10
0
 def testReadAndDecodeJSON(self):
     db = MongoDB()
     payloadToCompare = {
         "userId":
         1,
         "id":
         1,
         "title":
         "sunt aut facere repellat provident occaecati excepturi optio reprehenderit",
         "body":
         "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto"
     }
     self.assertEqual(PayloadRetriever().readAndDecodeJSON(db),
                      payloadToCompare)
Exemple #11
0
def index():

    logging.info("Received request for index page.")

    players = {}

    for key, value in configuration['user_ids'].items():

        client = MongoDB(configuration, key)

        points = 0
        time = 0
        work_time = 0
        total_activities = 0

        for habit in client.get_habits():

            points = points + habit['points']
            total_activities = total_activities + habit['total']

            if habit['name'].lower() == 'work':
                work_time = work_time + habit['time']
                continue
            
            time = time + habit['time']
            
        for daily in client.get_dailies():
            points = points + daily['points']
            time = time + daily['time']
            total_activities = total_activities + daily['total']

        logging.debug("Calculated time to be: " + str(time))

        total_time = work_time + time

        # Gets the total number of days
        days = divmod(time, 1440)
        hours = divmod(days[1], 60)
        minutes = hours[1]

        work_days = divmod(work_time, 1440)
        work_hours = divmod(work_days[1], 60)

        players[key] = {"Username": value['username'], "Total Points": points, "Hobby Days": days[0], "Hobby Hours": hours[0], "Hobby Minutes": minutes, "Work Days": work_days[0], "Work Hours": work_hours[0], "Total Activities": total_activities}

    logging.debug(pprint(players))

    return render_template('index.html', players=players)
Exemple #12
0
 def __init__(self, pro_id):
     '''
     :param pro_id: 众筹项目编号 int
     '''
     super().__init__()
     SCHEDULE.__init__(self)
     self.pro_id = pro_id
     self.url = 'https://zhongchou.modian.com/item/' + str(self.pro_id)
     self.dbmodian = MongoDB('modian', str(self.pro_id))
     # self.dbmodian.remove({})
     self.headers = {
         'User-Agent':
         'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36'
     }
     self.url = 'https://m.modian.com/project/' + str(self.pro_id)
     self.dataType = dict(orders=ORDERS_MESSAGE,
                          rank=RANK_MESSAGE,
                          detail=DETAIL_MESSAGE)
Exemple #13
0
def scraper_loop():
    db = MongoDB(host=os.environ['DB_PORT_27017_TCP_ADDR'], port=27017)
    # db = MongoDB(host='localhost', port=27017)  # local flag

    s = Scraper()
    print("Starting scraper...")
    while True:
        try:
            s.pull(n_pages=NPAGE)  # number of altcound pages to scrape)
            db.update(s)
            update_static_page()

        except KeyboardInterrupt:
            print("Exiting....")
            sys.exit(1)
        except Exception as exc:
            print("Error with the scraping:", sys.exc_info()[0])
            traceback.print_exc()
        else:
            print("{}: Successfully finished scraping".format(s.timestamp))
        time.sleep(SCRAPE_FREQ * 60)
Exemple #14
0
    def tls_connection(self):
        try:
            # Receive the secure payload using TLS
            print("App 2 connecting on port 8080 using SSL (TLS)")
            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            s2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            s2.connect(('localhost', 9999))
            s2.send(b'App 2 got connection from app 1')
            s2.close()
            ssl_socket = ssl.wrap_socket(s,
                                         server_side=True,
                                         certfile="team1tls.crt",
                                         keyfile="team1tls.key")

            # Listen on port 8080
            ssl_socket.bind(('localhost', 8080))
            ssl_socket.listen(5)
            mongoDB = MongoDB()
            condition = True
            while condition:
                print("Accept connections from outside")
                (clientSocket, address) = ssl_socket.accept()
                #print(clientSocket.recv(1024))
                dataRecieved = clientSocket.recv(1024)
                print(dataRecieved)
                mongoDB.mongoInstance("Test", "Got Connection")
                #if(clientSocket.recv(1024) is None):
                if (dataRecieved is None):
                    print("data is none")
                    #condition = False
                    #print(dataRecieved)
                    #hash_payload_hmac(dataRecieved)
                else:
                    condition = False
                    return dataRecieved
        except Exception as e:
            print(e)
Exemple #15
0
def main():

    #timestamp = 1545730073
    #startDtObject = date.fromtimestamp(timestamp)
    #client = MongoClient('localhost', 27017)
    #db = client.Team1
    #collection = db.logs
    startTime = time.time()
    mongoDB = MongoDB()

    #    print(startDtObject)
    print("Retrieving JSON payload from source.")
    payload = PayloadRetriever().readAndDecodeJSON(mongoDB)

    print("Sending payload to App2.")
    SendPayload().sendPayload(payload, mongoDB)

    print("Saving payload to text file.")
    PayloadSaver().savePayload(payload, mongoDB)

    RabbitReciever.getMessage(startTime)

    endTime = time.time()
    print(str(endTime - startTime) + " seconds")
Exemple #16
0
                                 '$lte': 5
                             }}))
        self.failUnlessEqual(range(11), _query_to_list({}))

    def test_sort(self):
        #sort by property
        sorted = [nm.n for nm in NumberModel.find(sort=NumberModel.n)]
        self.failUnlessEqual(range(11), sorted)
        #sort by a field name
        subset = NumberModel.n.range(4, 8)
        res = NumberModel.find(subset, sort='quad')
        self.failUnlessEqual([0, 1, 1, 4], [nm.quad for nm in res])
        #sort by a list of fields
        res = NumberModel.find(subset, sort_list=['quad', NumberModel.n])
        self.failUnlessEqual([5, 4, 6, 7], [nm.n for nm in res])
        res = NumberModel.find(subset,
                               sort_list=['quad', ('n', maroondb.DESCENDING)])
        self.failUnlessEqual([5, 6, 4, 7], [nm.n for nm in res])


if __name__ == '__main__':
    db = sys.argv[1]
    if db == 'mongo':
        Model.database = MongoDB(None, 'test_maroon', port=2727)
        Model.database.NumberModel.remove()
    elif db == 'mock':
        Model.database = MockDB(None)
    _number_set_up()
    del sys.argv[1]
    unittest.main()
def load_to_mongodb(tweets):
    mongodb = MongoDB()
    mongodb.bulk_load(tweets)
 def testSendPayload(self):
    db=MongoDB()
    results = SendPayload().sendPayload(PayloadRetriever().readAndDecodeJSON(db),db)
    self.assertTrue(results)
Exemple #19
0
 def setUp(self):
     self.dir = tempfile.mkdtemp()
     self.path = self.dir + "/log"
     maroon.Model.database = TeeDB(self.path,
                                   MongoDB(None, 'test_maroon', port=2727))
     maroon.Model.database.PersonModel.remove()
 def testSavePayload(self):
     db = MongoDB()
     self.assertTrue(PayloadSaver().savePayload(
         PayloadRetriever().readAndDecodeJSON(db),db))
Exemple #21
0
def tt(urls, dbs):
    while True:
        a = options(urls, dbs)
        if not a:
            break


if __name__ == '__main__':
    #爬虫调度主程序
    #from multiprocessing import Pool
    from threading import Thread
    #pool=Pool(5)
    spider_name = 'car'
    star_url = 'http://xian.taoche.com/all/?from=2103093&reffer=https://www.baidu.com/s?wd=%E6%B7%98%E8%BD%A6%E7%BD%91&rsv_spt=1&rsv_iqid=0xe7e34e6800000427&issp=1&f=8&rsv_bp=0&rsv_idx=2&ie=utf-8&tn=baiduhome_pg&rsv_enter=1&rsv_sug3=1&rsv_sug1=1&rsv_sug7=100&rsv_sug2=0&inputT=2355&rsv_sug4=2356'
    urls = DisUrls(spider=spider_name, url=star_url)
    dbs = MongoDB(db=spider_name)
    #线程调度方式一
    for x in range(5):
        t = Thread(target=tt, args=(urls, dbs))
        t.start()
    t.join()
    #线程调度方式二
'''    while True:
        #pool.apply_async(options,args=(urls,dbs))
        t=Thread(target=options,args=(urls,dbs))
        t.start()
        #设置爬虫结束条件
        for x in range(3):
            if len(urls)!=0:
                break
            time.sleep(3)
Exemple #22
0
def process(body):
    # ------ 初始化配置 start -------------
    # TODO maybe need try catch to avoid key not exist error
    config = init()
    # 根据config初始化mongodb client
    settings = {'ip': config.ip, 'port': config.port, 'db_name': config.db}
    client = MongoDB.MongoDB(settings)
    image_base_path = config.image_path
    model_base_path = config.model_path
    generate_image_base_path = config.generate_image_path
    adversial_str = body['adversial_str']
    exam_id = body['exam_id']
    image_id = body['image']['id']
    image_path = body['image']['path']
    models = body['mutation_models']
    standard_model_path = body["standard_model_path"]
    user_id = body['user_id']
    # ------- 初始化配置end ----------------

    # ---------- 获取原图 | 噪音图/前景图 | 合成图的数据 生成及保存 start -------------
    # 获取干扰值的数据/前景图
    if ',' in adversial_str:
        # 获取纯的base64数据,不包含前缀
        adversial_str = adversial_str.split(',')[1]
    adversial = base64.b64decode(adversial_str)
    image_data = BytesIO(adversial)
    adversial_image = Image.open(image_data).convert('L').resize((28, 28))
    # 获取扰动的原始图片数据
    original_path = os.path.join(OSPath(image_base_path), OSPath(image_path))
    original_image = Image.open(original_path).convert('L')
    # 获取原始图片 前景图  合成图 的最终数据
    original_data, adversial_data, compose_data = getMutaionImage(
        original_image, adversial_image)
    time_str = str(int(time.time()))
    # 图片的保存位置 以及保存
    adversial_path = '_'.join(
        (str(user_id), str(exam_id), 'adversial', time_str)) + '.png'
    compose_path = '_'.join(
        (str(user_id), str(exam_id), 'compose', time_str)) + '.png'
    adversial_image = Image.fromarray(
        (adversial_data.reshape(28, 28) * 255).astype(np.uint8))
    compose_image = Image.fromarray(
        (adversial_data.reshape(28, 28) * 255).astype(np.uint8))
    print(
        os.path.join(OSPath(generate_image_base_path), OSPath(adversial_path)))
    print(os.path.join(OSPath(generate_image_base_path), OSPath(compose_path)))
    adversial_image.save(
        os.path.join(OSPath(generate_image_base_path), OSPath(adversial_path)))
    compose_image.save(
        os.path.join(OSPath(generate_image_base_path), OSPath(compose_path)))
    # ------------------------------ end ---------------------------------------------------

    # ------------------- 处理数据 存储 数据 ------------------------------------------------
    # 插入数据的documents
    results = []
    # 预测
    # 标准模型加载预测

    # 调用模型前,清空内存防止内存泄漏
    K.clear_session()
    standard_model = load_model(
        os.path.join(OSPath(model_base_path), OSPath(standard_model_path)))
    # 获取三层切分模型
    standard_layer1, standard_layer2, standard_layer3 = getActivationLayers(
        standard_model)
    temp = standard_model.predict(np.zeros((1, 784)))
    standard_result = np.argmax(standard_model.predict(
        compose_data.reshape((-1, 784)))[0],
                                axis=0)
    standard_layer1_output = standard_layer1.predict(
        compose_data.reshape((-1, 784)))[0]
    standard_layer2_output = standard_layer2.predict(
        compose_data.reshape((-1, 784)))[0]
    standard_layer3_output = standard_layer3.predict(
        compose_data.reshape((-1, 784)))[0]
    standard_activation_data = [
        standard_layer1_output.tolist(),
        standard_layer2_output.tolist(),
        standard_layer3_output.tolist()
    ]

    for mutation in models:
        # id 属性和 path 属性
        mutation_model = load_model(
            os.path.join(OSPath(model_base_path), OSPath(mutation['path'])))
        mutation_layer1, mutation_layer2, mutation_layer3 = getActivationLayers(
            mutation_model)
        mutation_result = np.argmax(mutation_model.predict(
            compose_data.reshape((-1, 784)))[0],
                                    axis=0)
        mutation_layer1_output = mutation_layer1.predict(
            compose_data.reshape((-1, 784)))[0]
        mutation_layer2_output = mutation_layer2.predict(
            compose_data.reshape((-1, 784)))[0]
        mutation_layer3_output = mutation_layer3.predict(
            compose_data.reshape((-1, 784)))[0]
        mutation_activation_data = [
            mutation_layer1_output.tolist(),
            mutation_layer2_output.tolist(),
            mutation_layer3_output.tolist()
        ]
        isKilled = False
        score = 0.0
        if mutation_result != standard_result:
            isKilled = True
            # 计算成绩
            score = cal_score(original_data, compose_data)
        result = {
            'exam_id':
            exam_id,
            'user_id':
            user_id,
            'image_id':
            image_id,
            'model_id':
            mutation['id'],
            'isKilled':
            isKilled,
            'adversial_path':
            adversial_path,
            'compose_path':
            compose_path,
            'standard_activation_data':
            standard_activation_data,
            'mutation_activation_data':
            mutation_activation_data,
            'score':
            score,
            'submit_time':
            time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
        }
        results.append(result)
        # 存数据库
        # print(standard_result)
        # print(standard_layer1_output)
        # print(standard_layer2_output)
        # print(standard_layer3_output)
        # print(mutation_result)
        # print(mutation_layer1_output)
        # print(mutation_layer2_output)
        # print(mutation_layer3_output)
        # print('isKilled', isKilled)

    # 插入submit数据~ insert many~
    client.change_collection('submit_data')
    client.insert_many(results)
    client.conn.close()
    # ----------------------------存储数据 end ----------------------------------------------------------------

    # 返回数据
    return results
Exemple #23
0
from flask import *
from flask_cors import *

from mongo.MongoDB import *

USERNAME = PASSWORD = "******"
DB = "sandboxDB"
COLLECTIONS = {"sandbox": AUTO_INCREMENT}
URL = f"mongodb+srv://{USERNAME}:{PASSWORD}@cluster0.u6lhh.mongodb.net/{DB}?retryWrites=true&w=majority"

mongo = MongoDB(url=URL, database=DB, collections=COLLECTIONS)
sandbox = mongo.collection["sandbox"]

app = Flask(__name__)


@app.route('/', methods=['GET'])
def index():
    return "<h1>API Sandbox</h1>" \
           "<p> To play around with the APIs, follow the guide below</p>" \
           "<ul>" \
           "<li> /add, POST, (request body required)" \
           "<li> /showAll, GET" \
           "<li> /update/<id>, PUT" \
           "<li> /delete/<id>, DELETE" \
           "</ul>"


@app.route('/add', methods=['POST'])
def add_to_db():
    obj = request.get_json()
Exemple #24
0
# *-* coding: utf-8 *-*

import requests
import datetime
from hashlib import md5
from mongo import MongoDB

try:
    print("Starting importing process.....")
    mongo_db = MongoDB(database_name='marvel',
                       collection_name='character_info')
    data_in_db = mongo_db.get_all()
    if not data_in_db:
        START_URL = "https://gateway.marvel.com/v1/public/characters"
        PRIVATE_KEY = "0f56ea9eaad631cee9aed42478145bac470364fb"
        PUBLIC_KEY = "f455a0f7b332cf2db81353408eabae56"
        timestamp = str(datetime.datetime.now().timestamp())
        hash_ = timestamp + PRIVATE_KEY + PUBLIC_KEY
        params = {
            "ts": timestamp,
            "apikey": PUBLIC_KEY,
            "hash": md5(hash_.encode()).hexdigest()
        }
        print("Making request to  Marvel.....")
        request = requests.get(START_URL, params)

        if request.status_code == 200:
            response = request.json()
            print("Inserting results.....")
            for value in response["data"]["results"]:
                mongo_db.insert(value)
Exemple #25
0
from mongo.MongoDB import *

PASSWORD = "******"
DATABASE = "dealership"
COLLECTIONS = ["car", "dealer"]
URL = f"mongodb+srv://alan:{PASSWORD}@cluster0.u6lhh.mongodb.net/{DATABASE}?retryWrites=true&w=majority"

mongo = MongoDB(database=DATABASE, docs=COLLECTIONS, url=URL)
car = mongo.collection["car"]
dealer = mongo.collection["dealer"]

cars = \
    [
        {"make": "honda", "model": "civic", "year": 2001},
        {"make": "toyota", "model": "camry", "year": 2005},
    ]

dealers = \
    [
        {"name": "varshika", "age": 23, "loves": ["sleeping", "watching tv"]},
        {"name": "smruti", "age": 20, "loves": ["bubble tea", "beer", "sleeping"]},
        {"name": "angela", "age": 21, "loves": ["python", "books"]},
        {"name": "omar", "age": 25, "loves": ["sql", "shell scripts", "research", "flask"]}
    ]


def main():
    for d in dealers:
        dealer.add(d)
    for c in cars:
        car.add(c)
Exemple #26
0
from time import sleep
from reddit import RedditClient
from mongo import MongoDB
from datetime import datetime

mongodb = MongoDB()
rc = RedditClient()
time = datetime.now().strftime("%Y-%m-%d-%H:%M:%S")

LIMIT = 1000

def setSubreddit(subredditName):
    rc.getSubreddit(subredditName)

def getMemes(type):
    memesArr = []
    memes = rc.getPosts(type, LIMIT)
    for meme in memes:
        try:
            m = {
                "type": type,
                "author": meme.author.name,
                "title": meme.title,
                "id": meme.id,
                "comments": [
                    {time: meme.num_comments}
                ],
                "permalink": meme.permalink,
                "score": [
                    {time: meme.score}
                ],
Exemple #27
0
class Scraper(threading.Thread):

    logger.info("Connecting with mongo database with parameters .....")
    logger.info("host %s" % (mongo_host))
    logger.info("port %s" % (mongo_port))
    logger.info("database %s" % (mongo_database))

    mongo = MongoDB(mongo_host, mongo_port, mongo_database, mongo_collection)
    tweet = mongo.db.tweets
    urls = {}
    count = 1

    def __init__(self, _url, _json):
        threading.Thread.__init__(self)
        self.url = _url
        self.json = _json
        try:
            resp = urllib.urlopen(self.url)
        except:
            return
        if resp.getcode() is 200:
            self.url = resp.url

    def run(self):

        if self.urls.has_key(self.url):
            print "URL %s has already been visited " % (self.url)
            logger.info("URL %s has already been visited " % (self.url))
            return

        self.urls[self.url] = True
        header = {'User-Agent': 'Mozilla/5.0'}
        req = urllib2.Request(self.url, headers=header)
        try:
            logger.info("Making HTTP request for url %s " % (self.url))
            page = urllib2.urlopen(req)
        except:
            logger.info("HTTP request for url %s is crashed" % (self.url))
            return

        logger.info("HTTP Request for url %s successfull" % (self.url))
        soup = BeautifulSoup(page)

        # kill all script and style elements
        for script in soup(["script", "style", "a"]):
            script.extract()  # rip it out

        # get text
        text = soup.get_text()

        # break into lines and remove leading and trailing space on each
        lines = (line.strip() for line in text.splitlines())
        # break multi-headlines into a line each
        chunks = (phrase.strip() for line in lines
                  for phrase in line.split("  "))
        # drop blank lines
        text = '\n'.join(chunk for chunk in chunks if chunk)
        fin = open(base_dir + str(Scraper.count), "w")
        print Scraper.count
        self.json['doc_id'] = Scraper.count
        Scraper.count += 1
        self.tweet.insert(self.json)
        print "Tweet inserted in mongo"
        logger.info("Tweet inserted in mongo")
        fin.write(text.encode('utf-8'))
        fin.close()


#url = "http://newjobs.pk/government-jobs/medical/jobs-in-shalamar-hospital-lahore"
#thraed = Scraper(1,url)
#thraed.start()
def process():
    print("* Initialize parameters...")
    mongodb_settings = {
        'ip': config.ip,
        'port': config.port,
        'db_name': config.db
    }
    image_base_path = config.image_path
    model_base_path = config.model_path
    generate_image_base_path = config.generate_image_path
    print("* End initialize")
    print("* Initialize mongodb connection...")
    client = MongoDB.MongoDB(settings=mongodb_settings)
    print("* Connect success")
    print("* Loading model")
    standard_model = load_model(os.path.join(OSPath(model_base_path), OSPath('standard_model.hdf5')))
    print("* Finish loading")

    while True:
        queue = redis_db.lrange(config.IMAGE_QUEUE, 0, config.BATCH_SIZE - 1)
        imageIDs = []

        for q in queue:
            # ------- 初始化配置 -------------------
            q = json.loads(q.decode('utf-8'))

            imageId = q['id']
            compose_image_str = q['compose_image_str']
            exam_id = q['exam_id']
            image_id = q['image']['id']
            image_path = q['image']['path']
            image_tag = q['image']['tag']
            # models = q['mutation_models']
            standard_model_path = q['standard_model_path']
            user_id = q['user_id']
            case_id = q['case_id']
            print(q)
            # ------- 初始化配置end ----------------

            imageIDs.append(imageId)

            # ---------- 获取原图 | 噪音图/前景图 | 合成图的数据 生成及保存 start -------------
            # 获取干扰值的数据/前景图
            # 获取干扰值的数据/前景图
            if ',' in compose_image_str:
                # 获取纯的base64数据,不包含前缀
                compose_image_str = compose_image_str.split(',')[1]
            compose = base64.b64decode(compose_image_str)
            image_data = BytesIO(compose)
            compose_image = Image.open(image_data).convert('L').resize((28, 28))
            # 获取扰动的原始图片数据
            original_path = os.path.join(OSPath(image_base_path), OSPath(image_path))
            original_image = Image.open(original_path).convert('L')
            # 获取原始图片 前景图  合成图 的最终数据
            original_data, adversial_data, compose_data = getMutaionImage(original_image, compose_image)
            time_str = str(int(time.time()))
            # 图片的保存位置 以及保存
            adversial_path = '_'.join((str(user_id), str(exam_id), 'adversial', time_str)) + '.png'
            compose_path = '_'.join((str(user_id), str(exam_id), 'compose', time_str)) + '.png'
            adversial_image = Image.fromarray((adversial_data.reshape(28, 28) * 255).astype(np.uint8))
            compose_image = Image.fromarray((compose_data.reshape(28, 28) * 255).astype(np.uint8))
            print('* Save images')
            print(os.path.join(OSPath(generate_image_base_path), OSPath(adversial_path)))
            print(os.path.join(OSPath(generate_image_base_path), OSPath(compose_path)))
            adversial_image.save(os.path.join(OSPath(generate_image_base_path), OSPath(adversial_path)))
            compose_image.save(os.path.join(OSPath(generate_image_base_path), OSPath(compose_path)))
            print("* Finish saving")
            # ------------------------------ end ---------------------------------------------------

            # ------------------- 处理数据 存储 数据 ------------------------------------------------
            # 插入数据的documents
            results = []
            # 预测
            # 标准模型加载预测

            # 调用模型前,清空内存防止内存泄漏
            # K.clear_session()
            # standard_model = load_model(os.path.join(OSPath(model_base_path), OSPath(standard_model_path)))
            # 获取三层切分模型
            standard_layer1, standard_layer2, standard_layer3 = getActivationLayers(standard_model)
            # temp = standard_model.predict(np.zeros((1, 784)))
            standard_result = np.argmax(standard_model.predict(compose_data.reshape((-1, 784)))[0], axis=0)
            standard_layer1_output = standard_layer1.predict(compose_data.reshape((-1, 784)))[0]
            standard_layer2_output = standard_layer2.predict(compose_data.reshape((-1, 784)))[0]
            standard_layer3_output = standard_layer3.predict(compose_data.reshape((-1, 784)))[0]
            standard_activation_data = [standard_layer1_output.tolist(), standard_layer2_output.tolist(),
                                        standard_layer3_output.tolist()]

            isKilled = False
            score = 0.0
            if int(image_tag) != int(standard_result):
                isKilled = True
                # 计算成绩 TODO cal_score函数需要进行测试
                score = cal_score(original_data, compose_data)
            result = {
                'exam_id': exam_id,
                'user_id': user_id,
                'image_id': image_id,
                'case_id': case_id,
                # 'model_id': mutation['id'],
                'isKilled': isKilled,
                'adversial_path': adversial_path,
                'original_predict': int(image_tag),
                # 'standard_predict': int(standard_result),
                # 'mutation_predict': int(mutation_result),
                'compose_path': compose_path,
                'standard_predict': int(standard_result),
                # 'mutation_predict': int(mutation_result),
                'standard_activation_data': standard_activation_data,
                # 'mutation_activation_data': mutation_activation_data,
                'score': score,
                'submit_time': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
            }
            results.append(result)
            # for mutation in models:
            #     # id 属性和 path 属性
            #     mutation_model = load_model(os.path.join(OSPath(model_base_path), OSPath(mutation['path'])))
            #     mutation_layer1, mutation_layer2, mutation_layer3 = getActivationLayers(mutation_model)
            #     mutation_result = np.argmax(mutation_model.predict(compose_data.reshape((-1, 784)))[0], axis=0)
            #     mutation_layer1_output = mutation_layer1.predict(compose_data.reshape((-1, 784)))[0]
            #     mutation_layer2_output = mutation_layer2.predict(compose_data.reshape((-1, 784)))[0]
            #     mutation_layer3_output = mutation_layer3.predict(compose_data.reshape((-1, 784)))[0]
            #     mutation_activation_data = [mutation_layer1_output.tolist(), mutation_layer2_output.tolist(),
            #                                 mutation_layer3_output.tolist()]
            #     isKilled = False
            #     score = 0.0
            #     if mutation_result != standard_result:
            #         isKilled = True
            #         # 计算成绩
            #         score = cal_score(original_data, compose_data)
            #     result = {
            #         'exam_id': exam_id,
            #         'user_id': user_id,
            #         'image_id': image_id,
            #         'model_id': mutation['id'],
            #         'isKilled': isKilled,
            #         'adversial_path': adversial_path,
            #         'standard_predict': int(standard_result),
            #         'mutation_predict': int(mutation_result),
            #         'compose_path': compose_path,
            #         'standard_predict': int(standard_result),
            #         'mutation_predict': int(mutation_result),
            #         'standard_activation_data': standard_activation_data,
            #         'mutation_activation_data': mutation_activation_data,
            #         'score': score,
            #         'submit_time': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
            #     }
            #     results.append(result)

            results_cpy = copy.deepcopy(results)

            # 插入submit数据~ insert many~
            client.change_collection('submit_data')
            client.insert_many(results)
            client.conn.close()

            redis_db.set(imageId, json.dumps(results_cpy))

        if len(imageIDs) > 0:
            redis_db.ltrim(config.IMAGE_QUEUE, len(imageIDs), -1)

        time.sleep(config.SERVER_SLEEP)
Exemple #29
0
'''
author: Utpal Das
'''
import time
import stripe
from flask import Flask, request, jsonify
from flask_cors import CORS, cross_origin
from mongo import MongoDB

app = Flask(__name__)
data = dict()

mongo =  MongoDB()

stripe.api_key = '<stripe secret key>'

@app.route('/',methods = ['POST'])
@cross_origin(origin='localhost',headers=['Content- Type','Authorization'])
def home():
    if request.method == 'POST':
        data = mongo.insertorupdate_transaction(request.data)
        return data

@app.route('/<userid>/<date>',methods = ['GET'])
@cross_origin(origin='localhost',headers=['Content- Type','Authorization'])
def get_current_time(userid,date):
    data = mongo.read_transaction(userid,date)
    return data

@app.route('/create-checkout-session/<item>/<amount>', methods=['POST'])
@cross_origin(origin='localhost',headers=['Content- Type','Authorization'])
Exemple #30
0
 def __init__(self):
     self.rankdb = MongoDB('Javdb', 'rankdb')