Example #1
0
    def run(self):
        self.log("Running daemon ...")
        self.store.start()

        while True:
            query = "SELECT id, url, is_archived FROM query_urls WHERE is_active = 1"
            conn = database.get_connection()
            cursor = database.get_cursor(conn)
            cursor.execute(query)
            active_ids = []

            # this loop spawns new threads after reading from the database
            while True:
                row = cursor.fetchone()
                if not row:
                    cursor.close()
                    break
                self.log("Found url %s at %s" % (row['id'], row['url']))
                active_ids.append(row['id'])
                if row['id'] not in self.tweet_threads:
                    self.log("Spawning thread for row %s \n" % (row['id']))
                    self.log("Spawing thread for query_url_id %s " % (row['id'],))
                    self.tweet_threads[row['id']] = TweetThread(row['id'], row['url'], self.store)
                    self.tweet_threads[row['id']].start()
            conn.close()

            for thread_id in self.tweet_threads.iterkeys():
                if thread_id not in active_ids:
                    self.log("Stopped thread %s" % (row['id'],))
                    self.tweet_threads[thread_id].stop()
                    del(self.tweet_threads[thread_id])
            if len(self.tweet_threads) == 0:
                break
            time.sleep(600)
Example #2
0
def add_task():
    if not request.json:
        abort(400)

    #get the database connection
    cnx = get_connection()

    #get application
    application = load_application(request.json['applicationNumber'])
    if application == None:
        return make_response(jsonify( { 'error': 'Application not found' } ), 404)

    #Add Data
    #TODO: Check parameters
    try:               
        cursor = cnx.cursor()
        command = ("INSERT INTO `HumanTaskService`.`Tasks` (`ApplicationId`, `Params`, `Copies`) VALUES (%s, %s, %s)")
        #TODO Get the number of copies from application, if not avaliable in object
        data_command = (application['id'],request.json['params'],request.json['copies'])
        cursor.execute(command,data_command)
        cnx.commit()
    except mysql.connector.Error as err:
        abort(400) #invalid parameters
    
    #return the new object
    task_id = cursor.lastrowid

    task = {
        'id': task_id,
        'applicationNumber': request.json['applicationNumber'],
        'params': request.json['params'],
        'copies': request.json['copies']
    }

    return jsonify({ 'task': task }), 201
	def save_tractor(self,datos):
		msg = "TCPServer - Guardando datos de tractor..."
		print datos
		print msg
		logging.info(msg)
		logging.info(str(datos))
		
		conn, cursor = get_connection()
		print 'Actualizando tabla tractor...'
		print datos.get('codTractor')
		query = "UPDATE tractor SET actual = 0 WHERE codTractor = '"+datos.get('codTractor')+"';"
		
		cursor.execute(query)
		values = []
		cols = []
		for l in datos:
			cols.append(l)
			values.append(datos[l])
		print 'Insertando nuevo registro tractor en la base de datos..'
		query = "INSERT INTO tractor ("+",".join(cols)+",actual) VALUES ('"+"','".join(values)+"',1);"
		
		cursor.execute(query)
		conn.close()
		
		return True
	def save_satelite(self,datos):
		msg = "TCPServer - Guardando datos de satelite..."
		print msg
		
		logging.info(msg)
		logging.info(str(datos))
		nombre_imagen = 'img/'+datos.get('codSatelite','')+'_'+datos.get('fechaHora','')+'.jpg'
		
		print 'Procesando imagen...'
		#Se guarda la imagen en un directorio y la direccion en la base de datos
		img = open(nombre_imagen,'wb')
		img.write(base64.b64decode(datos.get('imagen','')))
		img.close()
		
		values = []
		cols = []
		for l in datos:
			if l !='imagen' and datos[l]!='':
				cols.append(l)
				values.append(datos[l])
		query = "INSERT INTO satelite (imagen,"+",".join(cols)+") VALUES ('"+nombre_imagen+"','"+"','".join(values)+"' );"
		conn, cursor = get_connection()
		cursor.execute(query)
		conn.close()
		return True
def add_application():
    if not request.json:
        abort(400)

    #get the database connection
    cnx = get_connection()

    #Add Data
    #TODO: Check parameters
    try:
        #create the new UUID
        guid = uuid.uuid1()
        cursor = cnx.cursor()
        command = ("INSERT INTO `HumanTaskService`.`Applications` (`OwnerUserId`, `Name`, `Description`, `TaskCopies`, `TaskScheduler`,`MagicNumber` ) VALUES (%s, %s, %s, %s, %s, %s)")
        data_command = (request.json['ownerUserId'],request.json['name'],request.json['description'],request.json['taskCopies'],request.json['taskScheduler'],str(guid))
        cursor.execute(command,data_command)
        cnx.commit()
    except mysql.connector.Error as err:
        abort(400) #invalid parameters
    
    #return the new object
    app_id = cursor.lastrowid

    application = {
        'id': app_id,
        'ownerUserId': request.json['ownerUserId'],
        'name': request.json['name'],
        'description': request.json['description'],
        'taskCopies': request.json['taskCopies'],
        'taskScheduler': request.json['taskScheduler'],
        'magicNumber': str(guid)
    }

    return jsonify({ 'application': application }), 201
def get_results(taskId):
    #get the database connection
    cnx = get_connection()

    #Get Data
    cursor = cnx.cursor()
    query = ("SELECT * from `HumanTaskService`.`TaskResults` where TaskId="+str(taskId))
    cursor.execute(query)
    rows = cursor.fetchall()
    cursor.close()
    cnx.close()

    #Build result
    tasks = []
    if (len(rows) > 0):
        #get application
        application = load_application_id(rows[0][4])
        if application == None:
            return make_response(jsonify( { 'error': 'Application not found' } ), 404)

        #build result
        for task in rows:
            taskResult = {
                "id":task[0],
                "userId": task[1],
                "result": task[2],
                "finishDate": task[3],
                "applicationNumber": application['magicNumber'],
                "taskId":taskId
            }
            tasks.append(taskResult)

    return jsonify(TaskResults=tasks),200
def resolve_task():
    if not request.json:
        abort(400)

    #get the database connection
    cnx = get_connection()

    #get application
    application = load_application(request.json['applicationNumber'])
    if application == None:
        return make_response(jsonify( { 'error': 'Application not found' } ), 404)

    #Add Data
    #TODO: Check parameters
    try:               
        cursor = cnx.cursor()
        command = ("INSERT INTO `HumanTaskService`.`TaskResults` (`UserId`, `Result`, `FinishDate`, `ApplicationId`, `TaskId`) VALUES (%s, %s, %s, %s, %s)")
        #TODO Get the number of copies from application, if not avaliable in object
        data_command = (request.json['userId'],request.json['result'],datetime.datetime.now(),application['id'],request.json['taskId'])
        cursor.execute(command,data_command)
        cnx.commit()
    except mysql.connector.Error as err:
        abort(400) #invalid parameters
    
    #return the new object
    task_id = cursor.lastrowid

    #TODO: Check datetime format
    return jsonify(id= task_id,
        applicationNumber= request.json['applicationNumber'],
        userId= request.json['userId'],
        taskId= request.json['taskId'],
        result= request.json['result'],
        finishDate= datetime.datetime.now()), 201
Example #8
0
def get_tasks(appNumber):
    #get the database connection
    cnx = get_connection()

     #get application
    application = load_application(appNumber)
    if application == None:
        return make_response(jsonify( { 'error': 'Application not found' } ), 404)

    #Get Data
    cursor = cnx.cursor()
    query = ("SELECT * from `HumanTaskService`.`Tasks` where ApplicationId="+str(application['id']))
    cursor.execute(query)
    rows = cursor.fetchall()
    cursor.close()
    cnx.close()

    #Build result
    tasks = []
    if (len(rows) > 0):
        for task in rows:
            taskResult = {
                "id":task[0],
                "applicationNumber": appNumber,
                "params":task[2],
                "copies":task[3]
            }
            tasks.append(taskResult)

    return jsonify(Tasks=tasks),200
Example #9
0
def check_connection():
    #get the database connection
    cnx = get_connection();
    #Get Data
    cursor = cnx.cursor();
    query = ("SELECT count(*) FROM users");
    cursor.execute(query);
    rows = cursor.fetchall();

    cursor.close();
    cnx.close();
    return "database ok!"
Example #10
0
def archive(item, id = None):
    print "Starting to archive ..."
    conn = database.get_connection()
    cursor = database.get_cursor(conn)
    cursor.execute("SELECT url FROM query_urls WHERE id = %s", (item,))
    result = cursor.fetchone()
    cursor.close()
    url = result['url']
    if id is None:
        cursor = database.get_cursor()
        cursor.execute("SELECT min(twitter_id) AS twitter_id FROM tweets WHERE query_url_id = %s", (item, ))
        result = cursor.fetchone()
        if result is not None:
            id = result['twitter_id']
    conn.close()
    archiver = Archiver(url, item, id)
    archiver.run()
Example #11
0
def reset_verify(uid, date_hash):
    connection = get_connection()
    cursor = get_cursor()
    with connection:
        with cursor:
            cursor.execute("SELECT last_login FROM user_account WHERE id = %s",
                           str(uid))
            row = cursor.fetchone()
            if safe_str_cmp(
                    hashlib.md5(str(row['last_login']).encode()).hexdigest(),
                    date_hash):
                session.clear()
                session['reset_user_id'] = uid
                session['reset_ok'] = True
                return redirect(url_for("auth.reset_password"))
            else:
                flash("Invalid reset link. Please try again.", 'error')
                return redirect(url_for("auth.request_reset"))
Example #12
0
def edit_campaign(id):
    connection = get_connection()
    cursor = get_cursor()
    cursor.execute(
        """SELECT name, description, image, amount_requested FROM campaign WHERE id=%s;""",
        (id, ))
    curr_campaign = cursor.fetchone()
    curr_campaign['amount_requested'] = Decimal(
        curr_campaign['amount_requested'].replace(",", "").replace("$", ""))
    curr_campaign = dict(
        zip(list(curr_campaign._index.keys()), list(curr_campaign)))
    current_app.logger.info(curr_campaign)
    form = CampaignEditForm(**curr_campaign)
    owner_id = None

    try:
        cursor.execute(
            """SELECT user_account_id FROM campaign_relation WHERE campaign_id=%s AND user_role='owner'""",
            (id, ))
        owner_id = cursor.fetchone()[0]

    except Exception as e:
        current_app.logger.error(e)

    if owner_id != session['user_id']:
        flash("You cannot edit a campaign you don't own!", 'error')
        return redirect(url_for("campaign.view_campaign", id=id))

    if form.validate_on_submit():
        try:
            with connection:
                with cursor:
                    cursor.execute(
                        """UPDATE campaign SET (name, description, image, amount_requested) =  
                                      (%s, %s, %s, %s) WHERE id=%s;""",
                        (form.name.data, form.description.data,
                         form.image.data, form.amount_requested.data,
                         form.campaign_id.data))
            flash("Succesully updated!", 'success')
            return redirect(url_for("campaign.view_campaign", id=id))
        except Exception as e:
            current_app.logger.error(e)

    return render_template("campaign/edit.html", form=form, id=id)
Example #13
0
async def process_callback_xcall(callback_query: types.CallbackQuery):
    user_id = callback_query.from_user.id

    try:
        callback_data = CallbackData.deserialize(callback_query.data)
    except (KeyError, ValueError):
        logging.warning(
            f"Callback data deserialize error: data=[{callback_query.data}]",
            exc_info=True)
        await bot.answer_callback_query(callback_query_id=callback_query.id,
                                        text="Что-то пошло не так!")
        return await callback_query.message.delete()

    if callback_data.user_id != user_id:
        logging.warning(f"Wrong user:"******" user_id=[{user_id}],"
                        f" callback_data={callback_data}")
        return await bot.answer_callback_query(
            callback_query_id=callback_query.id,
            text="Это чужой диалог!",
            show_alert=True)

    if callback_data.type == CallbackType.CANCEL:
        await callback_query.message.delete()
        return await bot.answer_callback_query(
            callback_query_id=callback_query.id,
            text="Операция отменена!",
        )

    with db.get_connection() as conn:
        members = db.select_members(conn, group_id=callback_data.group_id)

    if len(members) == 0:
        return await bot.answer_callback_query(
            callback_query_id=callback_query.id,
            text="Эта группа пуста! Выберите другую.",
            show_alert=True)

    await bot.answer_callback_query(callback_query.id)

    mentions = convert_members_to_mentions(members)
    await callback_query.message.edit_text(" ".join(mentions),
                                           parse_mode=ParseMode.MARKDOWN)
def get_applications():
    #get the database connection
    cnx = get_connection()

    #Get Data
    cursor = cnx.cursor()
    query = ("SELECT * from `HumanTaskService`.`Applications` ORDER BY Name")
    cursor.execute(query)
    rows = cursor.fetchall()
    cursor.close()
    cnx.close()

    #Build result
    applications = []
    if (len(rows) > 0):
        for application in rows:
            applications.append(application)

    return jsonify(Applications=applications),200
Example #15
0
def archive(item, id=None):
    print "Starting to archive ..."
    conn = database.get_connection()
    cursor = database.get_cursor(conn)
    cursor.execute("SELECT url FROM query_urls WHERE id = %s", (item, ))
    result = cursor.fetchone()
    cursor.close()
    url = result['url']
    if id is None:
        cursor = database.get_cursor()
        cursor.execute(
            "SELECT min(twitter_id) AS twitter_id FROM tweets WHERE query_url_id = %s",
            (item, ))
        result = cursor.fetchone()
        if result is not None:
            id = result['twitter_id']
    conn.close()
    archiver = Archiver(url, item, id)
    archiver.run()
Example #16
0
def get_newtask(user,appNumber):
    #get the database connection
    cnx = get_connection()

    #
    #TODO: Get User
    #


     #get application
    application = load_application(appNumber)
    if application == None:
        return make_response(jsonify( { 'error': 'Application not found' } ), 404)

    #Get Data
    cursor = cnx.cursor()
    #
    #TODO Check the scheduler type
    #

    #Sequencial schedule
    query = ("SELECT * FROM HumanTaskService.Tasks WHERE HumanTaskService.Tasks.ApplicationId = " + str(application['id']) + 
             " AND HumanTaskService.Tasks.id NOT IN (select TaskId from HumanTaskService.TaskResults where HumanTaskService.TaskResults.ApplicationId = " + str(application['id']) + 
             " AND HumanTaskService.TaskResults.UserId = " + str(user) + ") LIMIT 1")
    cursor.execute(query)
    rows = cursor.fetchall()
    cursor.close()
    cnx.close()

    #Build result
    if (len(rows) > 0):
        return jsonify(
            id=rows[0][0],
            applicationNumber=appNumber,
            params=rows[0][2],
            copies=rows[0][3]
        ), 200

    #
    #TODO: No more tasks..and now?
    #
    return None
Example #17
0
def import_file_to_database(file, database, table, truncate, ignore_fields):
    if truncate:
        truncate_table(database, table)

    stream = io.StringIO(file.stream.read().decode("UTF8"), newline=None)
    csv_input = csv.DictReader(stream)

    file_upload_id = uuid.uuid1()

    file_fieldnames = csv_input.fieldnames
    file_fieldnames_count = len(csv_input.fieldnames)
    table_columns = get_table_columns(database, table)

    if ignore_fields:
        file_fieldnames = list(set(file_fieldnames).intersection(set(table_columns)))
        file_fieldnames_count = len(file_fieldnames)
    else:
        invalid_columns = list(set(file_fieldnames).difference(set(table_columns)))
        if invalid_columns:
            raise ValueError('Invalid columns: {}  Table has columns: {}'.format(invalid_columns, table_columns))

    query_columns = ', '.join(map(str, file_fieldnames)) + ', FileUploadID'

    insert_query = 'INSERT INTO {table} ({columns}) VALUES '.format(table=table, columns=query_columns)
    value_subs = '(' + ', '.join(map(str, ['%s'] * (file_fieldnames_count + 1))) + ')'
    query_values = [[row[file_fieldnames[i]] for i in range(0, file_fieldnames_count)] for row in csv_input]
    for values in query_values:
        values.append(str(file_upload_id))
    query_values = [tuple(values) for values in query_values]

    conn = get_connection(database)
    cursor = conn.cursor()

    try:
        cursor.executemany(insert_query + value_subs, query_values)
    except Exception as e:
        raise ValueError((e, insert_query + value_subs, query_values))

    conn.commit()

    cursor.close()
    conn.close()
Example #18
0
async def handler_remove_group(message: types.Message):
    await check_access(message, Grant.WRITE_ACCESS)
    match = constraints.REGEX_CMD_GROUP.search(message.text)
    if not match:
        return await message.reply(md.text(
            md_style.bold("Пример вызова:"),
            md_style.code("/remove_group group"),
            " ",
            md_style.bold("Ограничения:"),
            md.text("group:", constraints.MESSAGE_FOR_GROUP),
            sep='\n'),
                                   parse_mode=ParseMode.MARKDOWN)
    group_name = match.group("group")

    with db.get_connection() as conn:
        db.select_chat_for_update(conn, chat_id=message.chat.id)
        group = db.select_group_by_alias_name(conn,
                                              chat_id=message.chat.id,
                                              alias_name=group_name)
        if not group:
            return await message.reply(md.text('Группа',
                                               md_style.code(group_name),
                                               'не найдена!'),
                                       parse_mode=ParseMode.MARKDOWN)
        logging.info(f"group: {group}")
        members = db.select_members(conn, group.group_id)
        if len(members) != 0:
            logging.info(f"members: {members}")
            return await message.reply(
                'Группу нельзя удалить, в ней есть пользователи!')

        group_aliases = db.select_group_aliases_by_group_id(
            conn, group_id=group.group_id)

        for a in group_aliases:
            db.delete_group_alias(conn, alias_id=a.alias_id)

        db.delete_group(conn, group_id=group.group_id)

    await message.reply(md.text("Группа", md_style.bold(group_name),
                                "удалена!"),
                        parse_mode=ParseMode.MARKDOWN)
Example #19
0
async def handler_list_members(message: types.Message):
    await check_access(message, grant=Grant.READ_ACCESS)
    match = constraints.REGEX_CMD_GROUP.search(message.text)
    if not match:
        return await message.reply(md.text(md_style.bold("Пример вызова:"),
                                           md_style.code("/members group"),
                                           " ",
                                           md_style.bold("Ограничения:"),
                                           md.text(
                                               "group:",
                                               constraints.MESSAGE_FOR_GROUP),
                                           sep='\n'),
                                   parse_mode=ParseMode.MARKDOWN)
    group_name = match.group("group")

    with db.get_connection() as conn:
        group = db.select_group_by_alias_name(conn,
                                              chat_id=message.chat.id,
                                              alias_name=group_name)
        if not group:
            return await message.reply(md.text('Группа',
                                               md_style.code(group_name),
                                               'не найдена!'),
                                       parse_mode=ParseMode.MARKDOWN)
        members = db.select_members(conn, group_id=group.group_id)

    members = sorted(convert_members_to_names(members))
    logging.info(f"members: {members}")

    if len(members) == 0:
        text = md.text(
            "В группе",
            md_style.code(group_name),
            "нет ни одного пользователя!",
        )
    else:
        text = md.text(md.text(md_style.bold("Участники группы"),
                               md_style.code(group_name)),
                       md_style.code("\n".join([f"- {x}" for x in members])),
                       sep='\n')

    await message.reply(text, parse_mode=ParseMode.MARKDOWN)
Example #20
0
async def handler_xcall(message: types.Message):
    await check_access(message, grant=Grant.READ_ACCESS)

    with db.get_connection() as conn:
        aliases: List[GroupAlias] = db.select_group_aliases_by_chat_id(
            conn, chat_id=message.chat.id)
        if len(aliases) == 0:
            return await message.reply("Нет ни одной группы.",
                                       parse_mode=ParseMode.MARKDOWN)

        aliases_lookup: Dict[int, List[GroupAlias]] = {}
        for a in aliases:
            aliases_lookup.setdefault(a.group_id, []).append(a)

    inline_keyboard = InlineKeyboardMarkup()

    inline_keyboard.add(
        InlineKeyboardButton(text="✖ Отмена ✖",
                             callback_data=CallbackData(
                                 type=CallbackType.CANCEL,
                                 user_id=message.from_user.id).serialize()))

    groups_for_print = []
    for group_id in sorted({x.group_id for x in aliases}):
        group_aliases = sorted(aliases_lookup.get(group_id, []),
                               key=lambda x: x.alias_id)
        group_aliases = [x.alias_name for x in group_aliases]
        head, *tail = group_aliases
        tail = f" (синонимы: {', '.join(tail)})" if len(tail) > 0 else ""
        groups_for_print.append(f"{head}{tail}")

        inline_keyboard.add(
            InlineKeyboardButton(text=f"{head}{tail}",
                                 callback_data=CallbackData(
                                     type=CallbackType.SELECT_GROUP,
                                     user_id=message.from_user.id,
                                     group_id=group_id).serialize()))

    await message.reply(md_style.bold("Выберите группу"),
                        parse_mode=ParseMode.MARKDOWN,
                        reply_markup=inline_keyboard)
Example #21
0
def acsess_main_page():
    language = request.args.get("l")
    q_i = request.args.get("i")

    if language == None:
        language = "en"

    textdef = td.get_textdef(language)

    if q_i != None:
        with database.get_connection() as conn:
            with conn.cursor(cursor_factory=DictCursor) as cur:
                cur.execute("SELECT * FROM images WHERE file_id=%s", (q_i, ))
                record = cur.fetchone()
        if record == None:
            return redirect("/image_missing?l=" + language)
        else:
            result = record["result"]
            result_texts = []
            for r in result:
                result_texts.append(textdef[r["name"]] + ": " +
                                    str(r["rate"]) + "%")
            detail = " / ".join(result_texts)
            answer = textdef[result[0]["name"]]
            img_path = STORAGE_URL + record["file_id"]
            url = urllib.parse.quote(APP_URL + "/k?l=" + language + "&i=" +
                                     record["file_id"])
            html = render_template('result.html',
                                   language=language,
                                   filepath=img_path,
                                   detail=detail,
                                   answer=answer,
                                   url=url,
                                   textdef=textdef,
                                   textdef_text=json.dumps(textdef))
    else:
        html = render_template('index.html',
                               language=language,
                               textdef=textdef,
                               textdef_text=json.dumps(textdef))
    return html
Example #22
0
    async def handle_message(self, bot, message):
        if len(message.content) == 0:
            return

        h = hashlib.sha256(message.content.lower().encode('utf-8')).hexdigest()
        should_delete = False
        self.mutex.acquire()
        try:
            if h in self.cache:
                should_delete = True
            else:
                conn = database.get_connection()
                c = conn.cursor()
                c.execute('INSERT INTO message (hash) VALUES (?)', (h,))
                conn.commit()
                conn.close()
                self.cache.add(h)
        finally:
            self.mutex.release()
        if should_delete:
            await message.delete(delay=0.5)
Example #23
0
def main():
    try:
        db.create_pool()
        with db.get_connection() as conn:
            db.create_schema(conn)

        if settings.WEBHOOK_ENABLED:
            start_webhook(
                dispatcher=dp,
                webhook_path=settings.WEBHOOK_PATH,
                skip_updates=True,
                on_startup=bot_startup,
                host=settings.WEBAPP_HOST,
                port=settings.WEBAPP_PORT,
            )
        else:
            executor.start_polling(dp,
                                   on_startup=bot_startup,
                                   on_shutdown=bot_shutdown)
    finally:
        db.close_pool()
Example #24
0
def reset_password():
    form = ResetForm()
    connection = get_connection()
    cursor = get_cursor()
    if session['reset_ok'] and session[
            'reset_user_id'] is not None and form.validate_on_submit():
        with connection:
            with cursor:
                try:
                    cursor.execute(
                        "UPDATE user_account SET password = %s WHERE id = %s",
                        (generate_password_hash(
                            form.password.data), session['reset_user_id']))
                    flash("Reset successful!", 'success')
                    return redirect(url_for("auth.login"))
                except Exception as e:
                    error = "Something went wrong. Please try again later"
                    current_app.logger.error(e)
                    flash(error, 'error')

    return render_template("auth/password_reset.html", form=form)
Example #25
0
def delete_task(id):  
    #get the database connection
    cnx = get_connection()

    #Remove Data
    try:
        cursor = cnx.cursor()
        command = ("DELETE from `HumanTaskService`.`Tasks` WHERE id='" + str(id)+"'")
        cursor.execute(command)
        cnx.commit()
    except mysql.connector.Error as err:
        abort(400) #invalid parameters
    
    if (cursor.rowcount == 0):
        abort(404)  #not found

    #
    #TODO: remove all tasksruns
    #

    return jsonify({ 'result': True }), 200
	def save_clima(self,datos):
		msg = "UDPServer - Guardando datos de clima..."
		print datos
		print msg
		logging.info(msg)
		logging.info(str(datos))
		conn, cursor = get_connection()
		
		values = []
		cols = []
		for l in datos:
			cols.append(l)
			values.append(datos[l])
		print 'Insertando nuevo registro clima en la base de datos..'
		query = "INSERT INTO clima ("+",".join(cols)+") VALUES ('"+"','".join(values)+"');"
		
		cursor.execute(query)
		
		conn.close()
		
		return True
def delete_application(guid):  
    #get the database connection
    cnx = get_connection()

    #Remove Data
    try:
        cursor = cnx.cursor()
        command = ("DELETE FROM `HumanTaskService`.`Applications` WHERE MagicNumber='" + str(guid)+"'")
        cursor.execute(command)
        cnx.commit()
    except mysql.connector.Error as err:
        abort(400) #invalid parameters
    
    if (cursor.rowcount == 0):
        abort(404)  #not found

    #
    #TODO: remove all tasks, etc
    #

    return jsonify({ 'result': True }), 200
Example #28
0
def add_to_maps_database(origin, destination):
    db = database.get_connection()
    cur = db.cursor()
    if destination == "":
        cur.execute(
            "INSERT INTO directions_table(origin_location) VALUES (%s)",
            (origin, ))
        db.commit()
    elif origin == "":
        cur.execute("SELECT id FROM directions_table ORDER BY id DESC")
        res = cur.fetchone()
        last_id = res[0]
        cur.execute(
            'UPDATE directions_table SET destination_location=%s WHERE id=%s',
            (destination, last_id))
        db.commit()
    else:
        cur.execute(
            "INSERT INTO directions_table(origin_location,destination_location) VALUES (%s,%s)",
            (origin, destination))
        db.commit()
Example #29
0
async def check_access(message: types.Message, grant: Grant):
    chat_id = message.chat.id
    user_id = message.from_user.id

    chat_member: ChatMember = await message.chat.get_member(user_id=user_id)

    logging.info(f"Request from chat member:"
                 f" chat_id=[{chat_id}],"
                 f" chat_type=[{message.chat.type}],"
                 f" user_id=[{message.from_user.id}],"
                 f" chat_member_status=[{chat_member.status}],"
                 f" grant=[{grant}]")

    is_private = ChatType.is_private(message)
    is_creator_or_admin = chat_member.is_chat_creator(
    ) or chat_member.is_chat_admin()

    if is_private:
        logging.info("No restrictions in private chat")
    elif is_creator_or_admin:
        logging.info("No restrictions for creator or admin")
    else:
        if grant == Grant.READ_ACCESS:
            logging.info("No restrictions for read access")
        elif grant == Grant.WRITE_ACCESS:
            with db.get_connection() as conn:
                chat = db.select_chat(conn, chat_id=chat_id)
            if not chat:
                raise AuthorizationError(
                    "Chat not found => anarchy is disabled by default")
            elif not chat.is_anarchy_enabled:
                raise AuthorizationError("Chat found, anarchy is disabled")
            else:
                logging.info("Anarchy enabled for chat")
        elif grant == Grant.CHANGE_CHAT_SETTINGS:
            raise AuthorizationError(
                "Action allowed only for creator or admin")
        else:
            raise IllegalStateError(f"Unknown grant [{grant}]")
Example #30
0
async def analyze_all_users(limit=100, from_newest=False, prioritize_new=True):
    if limit > 100:
        raise UserWarning(batch_one_hundred)

    async with database.get_connection() as connection:
        transcribers = await connection.fetch("""
            SELECT
                COALESCE(transcriber, name) AS name
                FROM (
                    SELECT transcriber
                    FROM gammas
                    WHERE time > NOW() - interval '24 hours'
                    GROUP BY transcriber
                    ORDER BY SUM(new_gamma - old_gamma) DESC
                ) top_gammas
                RIGHT OUTER JOIN transcribers ON name = transcriber
                ORDER BY transcriber;
            """)

    for transcriber in transcribers:
        await analyze_user(transcriber["name"], limit, from_newest,
                           prioritize_new)
Example #31
0
def create_quiz_record(quiz_data, email):

    query = """INSERT INTO profile 
            (category, difficulty, score, userid) VALUES 
            (%s, %s, %s, 
                (select id from user where email=%s)
            )"""

    try:
        category = quiz_data['category']
        difficulty = quiz_data['difficulty']
        score = quiz_data['score']

        con = get_connection()
        with con.cursor() as cursor:
            cursor.execute(query, (category, difficulty, score, email))
            con.commit()

    except Exception as ex:
        print(traceback.print_exc())
    finally:
        con.close()
def create_transaction_history(account, amount):
    """
    Creates transaction history of requested account
    :param account: account requested
    :param amount: amount of transaction requested
    :return:
    """
    conn = database.get_connection()
    c = conn.cursor()
    c.execute(
        """INSERT INTO transaction_history VALUES(
        :account_id,
        DATE('now', 'localtime'),
        TIME('now', 'localtime'),
        :amount,
        :balance
    )""", {
            'account_id': account.account_id,
            'amount': amount,
            'balance': account.balance
        })
    conn.commit()
Example #33
0
def register():
    form = RegistrationForm()
    if form.validate_on_submit():
        connection = get_connection()
        cursor = get_cursor()

        error = None

        # careful now
        with connection:
            with cursor:
                try:
                    cursor.execute(
                        "INSERT INTO user_account(email, password) VALUES (%s, %s);",
                        (form.email.data,
                         generate_password_hash(form.password.data)))

                    cursor.execute(
                        """INSERT INTO 
                        user_profile(first_name, last_name, address1, address2, postal_code, phone_number, 
                        profile_image, description, credit_card, user_account_id) 
                        VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s,(SELECT id FROM user_account WHERE email=%s));""",
                        (form.first_name.data, form.last_name.data,
                         form.address1.data, form.address2.data,
                         form.postal_code.data, form.phone_number.data,
                         form.profile_image.data, form.description.data,
                         form.credit_card.data, form.email.data))
                    return redirect(url_for('auth.login'))
                except IntegrityError as e:
                    current_app.logger.error(e)
                    error = "Email already exists!"
                except Exception as e:
                    current_app.logger.error(e)
                    error = "An error has occurred. Please try again later."
    else:
        flash(form.errors, 'error')

    return render_template('auth/register.html', form=form)
Example #34
0
    def query(self):

        # g.debug("Downloading corpus...")

        query = f"""
                    SELECT {self._id_column}, {self._text_column}
                    FROM {self._table_name}
                    WHERE {self._where_clause}
                """

        conn = get_connection()
        with conn.cursor(name="corpus_getter") as cursor:
            cursor.itersize = g.DOC_BUFFER_SIZE
            doc_ids = []

            cursor.execute(query)
            for doc_id, doc in cursor:
                doc_ids.append(doc_id)
                if self._strip_html:
                    doc = text_processing.strip_html(doc)
                yield doc

        self._doc_ids = np.array(doc_ids)
def get_state_all():
    try:
        connect = get_connection()

        cursor = connect.cursor()
        sql = """
                SELECT MAC_ADDRESS, RED, YELLOW, GREEN, LED1_ON_OFF, LED2_ON_OFF, NHIET_DO FROM PLC_STATE_CONTROL
              """
        cursor.execute(sql)
        columns = [column[0] for column in cursor.description]
        # print(columns)
        results = []

        for row in cursor.fetchall():
            results.append(dict(zip(columns, row)))

        return jsonify(results)

    except Exception as e:
        print(e)
    finally:
        cursor.close()
        connect.close()
Example #36
0
def create_user(data):

    try:
        firstName = data['firstName']
        lastName = data['lastName']
        gender = data['gender']
        email = data['email']
        password = data['password']

        con = get_connection()
        with con.cursor() as cursor:
            query = """ INSERT INTO user 
                        (firstname, lastname, gender, email, password1) 
                        VALUES (%s, %s, %s, %s, %s);
                    """
            cursor.execute(query,
                           (firstName, lastName, gender, email, password))
            con.commit()

    except Exception as ex:
        print(traceback.print_exc())
    finally:
        con.close()
Example #37
0
async def analyze_all_transcriptions():
    async with database.get_connection() as connection:
        transcriptions = await connection.fetch("""
            SELECT comment_id
            FROM transcriptions
            WHERE EXTRACT(epoch FROM NOW()) - EXTRACT(epoch FROM found) < (24 * 60 * 60)
                OR last_checked IS NULL OR good_bot IS NULL OR bad_bot IS NULL
                OR good_human IS NULL OR bad_human IS NULL
            ORDER BY last_checked ASC;
            """)

    if transcriptions is None:
        logging.info("There are no transcriptions to analyze.")
        return

    transcriptions = [
        reddit.comment(row["comment_id"]) for row in transcriptions
    ]
    for transcription in transcriptions:
        logging.info(
            f"Analyzing /u/{transcription.author}'s transcription: {transcription.id}"
        )
        await analyze_transcription(transcription)
Example #38
0
async def main():
    await database.create_pool()

    async with database.get_connection() as connection:
        rows = await connection.fetch("""
            SELECT
                comment_id
            FROM transcriptions
            WHERE permalink IS NULL;
            """)

        if rows is None:
            return

        comment_ids = [row["comment_id"] for row in rows]

        for comment_id in comment_ids:
            comment = reddit.comment(comment_id)

            try:
                permalink = comment.permalink
            except ValueError:
                logging.info(
                    f"Comment {comment_id} skipped because it has no permalink"
                )
                continue

            await connection.execute(
                """
                UPDATE transcriptions
                    SET permalink = $1
                WHERE comment_id = $2
                """,
                permalink,
                comment_id,
            )
Example #39
0
async def login(ctx, *_sname):
    map_result = map(str, _sname)
    sname = ' '.join(map_result)
    summoner = riotapi.getSummoner(sname)
    userName = ctx.message.author.name

    with db.get_connection() as conn:
        with conn.cursor() as cur:
            cur.execute(
                'SELECT summoner_name FROM LissWard WHERE discord_id = %s;',
                (str(ctx.message.author.id), ))
            sn = "null"
            for summoner_name in cur:
                sn = summoner_name[0]
            if sn != summoner["name"]:
                cur.execute('DELETE FROM LissWard WHERE discord_id = %s;',
                            (str(ctx.message.author.id), ))
                cur.execute(
                    'INSERT INTO LissWard (discord_id, summoner_name) VALUES (%s, %s)',
                    (ctx.message.author.id, summoner['name']))
                loginText = "登録しました."
            else:
                loginText = "ログイン済みです."
        conn.commit()
    embed = discord.Embed(color=0x30DADD)
    embed.set_author(name=userName,
                     icon_url=ctx.message.author.avatar_url_as(size=32))
    embed.set_thumbnail(
        url=
        "http://ddragon.leagueoflegends.com/cdn/{0}.1/img/profileicon/{1}.png".
        format(PATCH, summoner['profileIconId']))
    embed.add_field(
        name="サモナーネーム:" + summoner['name'] + " " + loginText,
        value="サモナーレベル:" + str(summoner['summonerLevel']),
    )
    await ctx.send(embed=embed)
def get_application(guid):
    #get the database connection
    cnx = get_connection()

    #Get Data
    cursor = cnx.cursor()
    query = ("SELECT * from `HumanTaskService`.`Applications` WHERE MagicNumber='" + str(guid) + "'")
    cursor.execute(query)
    rows = cursor.fetchall()

    cursor.close()
    cnx.close()

    #Build result
    if (len(rows) > 0):
        application = rows[0]
        return jsonify(ownerUserId=application[1],
                name=application[2],
                description=application[3],
                taskCopies=application[4],
                taskScheduler=application[5],
                magicNumber=application[6]),200

    abort(404) #not found
Example #41
0
def naics_descriptions(doc_ids):

    from database import get_connection
    query = f"""
                        SELECT opportunity_id, naics_description
                        FROM import.govwin_opportunity
                        WHERE opportunity_id IN ({", ".join([str(x) for x in doc_ids])})
                    """

    conn = get_connection()
    with conn.cursor(name="doc_getter") as cursor:
        cursor.itersize = g.DOC_BUFFER_SIZE
        cursor.execute(query)

        ids = []
        docs = []

        for id, doc in cursor:
            ids.append(id)
            docs.append(doc)

    # return the docs in the same order they were requested
    result = np.array([docs[ids.index(doc_id)] for doc_id in doc_ids])
    return result
Example #42
0
async def update_gamma_count(user: str):
    async with database.get_connection() as connection:
        reference_comment, old_gamma = await connection.fetchrow(
            """
            SELECT
                reference_comment,
                official_gamma_count
            FROM transcribers
            WHERE name = $1;
            """,
            user,
        )

        if reference_comment is None:
            logging.warn(f"Reference comment could not be found for /u/{user}")
            return

        reference_comment = reddit.comment(reference_comment)

        no_flair = False
        no_flair_message = (
            f"No flair on /u/{user}'s reference comment: {reference_comment}")

        try:
            flair = reference_comment.author_flair_text
        except Exception:
            no_flair = True
        else:
            no_flair = flair == "" or flair is None

        if no_flair is True:
            if reference_comment is not None:
                await connection.execute(
                    """
                    UPDATE transcribers
                        SET reference_comment = NULL
                    WHERE name = $1;
                    """,
                    user,
                )

            logging.warn(no_flair_message)
            return

        try:
            official_gamma = int(flair.split(" ")[0])
        except ValueError:
            pass

        if old_gamma != official_gamma:
            if old_gamma is not None:
                logging.info(
                    f"/u/{user} got from {old_gamma}Γ to {official_gamma}Γ")
            else:
                logging.info(
                    f"First gamma check for /u/{user} they have {official_gamma}Γ."
                )

            await connection.execute(
                """
                INSERT INTO new_gammas (transcriber, gamma, time)
                    VALUES ($1, $2, NOW());
                """,
                user,
                official_gamma,
            )

            await connection.execute(
                """
                UPDATE transcribers
                    SET official_gamma_count = $1
                WHERE name = $2;
                """,
                official_gamma,
                user,
            )

            await announce_gamma(user, old_gamma, official_gamma)
        elif old_gamma < official_gamma:
            logging.info(f"Old gamma is less than new gamma for /u/{user}")
        else:
            logging.info(
                f"/u/{user} has {official_gamma}Γ. It did not change.")
Example #43
0
    parser = argparse.ArgumentParser(prog="Daniels workouts")
    parser.add_argument('-t',
                        dest='time',
                        type=int,
                        help='Length (of time) of workout in minutes',
                        required=True)
    parser.add_argument('-w',
                        dest='workout',
                        type=str,
                        help='Workout config JSON',
                        nargs='?',
                        default='config.json')
    parser.add_argument('-d',
                        dest='debug',
                        action='store_true',
                        help='Run in debug mode.')
    args = parser.parse_args()
    ses = get_connection(args.debug)
    config = Config(args.workout, args.time)
    gw = WorkoutGenerator(ses)
    wo = gw.generate_workout(config)
    global player
    player = ClementinePlayer()
    start = time.time()
    run_workout(wo, args.debug)
    end = time.time()

    ses.add(wo)
    ses.commit()
    logging.info('Workout took a total of: %.0f seconds', end - start)
Example #44
0
        logging.info("Exception occurred for" + str(image_url))

    return img


if __name__ == "__main__":
    logging.info("Started processing")
    os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"  # see issue #152
    os.environ["CUDA_VISIBLE_DEVICES"] = "3"
    PROCESS = True

    database = Database()
    try:
        start_time = time.time()
        templates = pd.read_sql(util.GET_TEMPLATES,
                                con=database.get_connection())
        # Create Model
        classifier = Classifier()
        classifier.load_model()

        current_time = time.time()
        print("Loaded Model", current_time - start_time)
        start_time = current_time

        hf = h5py.File('x_train_1.h5', 'r')
        x_train_1 = hf.get('x_train_1')
        x_train_1 = np.array(x_train_1, dtype=np.float32) / 255

        hf = h5py.File('y_train_1.h5', 'r')
        y_train_1 = hf.get('y_train_1')
        y_train_1 = np.array(y_train_1)
Example #45
0
async def analyze_user(user,
                       limit=100,
                       from_newest=False,
                       prioritize_new=True):
    if limit > 100:
        raise UserWarning(batch_one_hundred)

    if user in (None, ""):
        logging.info("A user to be analyzed must be passed.")
        return

    if user.casefold() in ignored_users:
        logging.info(f"/u/{user} ignored")
        return

    redditor = reddit.redditor(user)

    new_user = False
    async with database.get_connection() as connection:
        redditor_id = None
        try:
            redditor_id = getattr(redditor, "id")
            first_comment = next(redditor.comments.new(limit=1))
        except (prawcore.exceptions.PrawcoreException, AttributeError,
                StopIteration):
            if redditor_id is None:
                logging.info(f"/u/{user} is not a valid redditor.")
                await connection.execute(
                    """
                    UPDATE transcribers
                        SET valid = FALSE
                    WHERE name = $1;
                    """,
                    user,
                )
            else:
                logging.info(
                    f"/u/{user} has no comments, cannot fetch their stats.")
            return

        logging.info(f"Getting stats for /u/{user}")

        transcriber = await connection.fetchrow(
            """SELECT
                    start_comment,
                    end_comment,
                    reference_comment,
                    forwards,
                    valid
                FROM transcribers
                WHERE name = $1;
            """,
            user,
        )

        new_user = transcriber is None

        if new_user is True:
            logging.info(f"New user: /u/{user}")
            await connection.execute(
                """
                INSERT INTO transcribers (name)
                VALUES ($1);
                """,
                user,
            )
            start_comment = end_comment = reference_comment = forwards, valid = None
        else:
            start_comment, end_comment, reference_comment, forwards, valid = (
                transcriber.values())

        # If the user has gotten through all of these checks, they're valid.
        if valid is False or valid is None:
            await connection.execute(
                """
                UPDATE transcribers
                    SET valid = TRUE
                WHERE name = $1;
                """,
                user,
            )

        if reference_comment is not None:
            await update_gamma_count(user)

        if first_comment == start_comment:
            if forwards is True:
                logging.info(f"/u/{user} has no unchecked comments")
                return
        elif prioritize_new is True and forwards is True:
            forwards = True

        if start_comment is None or end_comment is None:
            await connection.execute(
                """
                UPDATE transcribers
                    SET start_comment = $1,
                    end_comment = $1,
                    forwards = FALSE
                WHERE name = $2
                """,
                first_comment.id,
                user,
            )
            start_comment = end_comment = first_comment.id

            # The range is [start_comment, end_comment]; exclusive.
            # So the first comment has to be checked.
            if is_transcription(first_comment):
                return_value = await add_transcription(user,
                                                       first_comment,
                                                       connection=connection)
            elif is_reference_comment(first_comment):
                reference_comment = first_comment.id
                logging.info(
                    f"Setting reference comment to {reference_comment}")
                await connection.execute(
                    """
                    UPDATE transcribers
                        SET reference_comment = $1
                    WHERE name = $2;
                    """,
                    reference_comment,
                    user,
                )
                await update_gamma_count(user)

        params = {}
        if forwards is True:
            if start_comment is not None:
                params = {"before": f"t1_{start_comment}"}
            else:
                params = {"after": f"t1_{first_comment}"}
                forwards = False
        else:
            if end_comment is not None:
                params = {"after": f"t1_{end_comment}"}
            else:
                params = {"after": f"t1_{first_comment}"}

        # Passing limit in the signature is overridden by the params argument
        params.update({"limit": limit, "type": "comments"})

        comment = start_comment if forwards is True else end_comment

        up_to = f"up to {limit} " if limit is not None else ""
        direction = "forwards" if forwards is True else "backwards"
        comment_with_id = (f" starting at comment with id: {comment}"
                           if comment is not None else "")

        logging.info(
            f"Fetching {up_to}comments for /u/{user} reading {direction}"
            f"{comment_with_id}.")
        try:
            comments = list(redditor.comments.new(params=params))
        except prawcore.exceptions.PrawcoreException:
            logging.warn(
                f"Exception {traceback.format_exc()}\n Setting /u/{user} to invalid"
            )

            await connection.execute(
                """
                UPDATE transcribers
                    SET valid = FALSE
                WHERE name = $1;
                """,
                user,
            )
            return

        reddit_comments = [reddit.comment(comment.id) for comment in comments]
        comment_count = len(comments)

        end_reached = f"Reached the end of /u/{user}'s comments."
        newest_reached = f"Reached /u/{user}'s newest comment."
        none_to_read = "No comments to read."
        if comment_count == 0:
            if forwards is True:
                logging.info(newest_reached)
                logging.info(none_to_read)
            else:
                logging.info(end_reached)
                logging.info(none_to_read)
                await connection.execute(
                    """
                    UPDATE transcribers
                        SET forwards = TRUE
                    WHERE name = $1;
                    """,
                    user,
                )
            return

        logging.info(f"Reading {comment_count} comments for /u/{user}.")

        transcriptions = 0
        new_transcriptions = 0
        for comment in reddit_comments:
            if is_transcription(comment):
                return_value = await add_transcription(user,
                                                       comment,
                                                       connection=connection)

                transcriptions += 1
                if return_value != "INSERT 0 0":
                    new_transcriptions += 1
            elif (reference_comment is None and comment.subreddit == tor
                  and comment.author_flair_text is not None):
                reference_comment = comment.id
                logging.info(
                    f"Setting reference comment to {reference_comment}")
                await connection.execute(
                    """
                    UPDATE transcribers
                        SET reference_comment = $1
                        WHERE name = $2;
                    """,
                    reference_comment,
                    user,
                )
                await update_gamma_count(user)

        await connection.execute(
            """
            UPDATE transcribers
                SET counted_comments = counted_comments + $1
            WHERE name = $2;
            """,
            comment_count,
            user,
        )

        s = "s" if transcriptions != 1 else ""
        new_s = "s" if new_transcriptions != 1 else ""
        logging.info(f"Found {transcriptions} total transcription{s}. "
                     f"Added {new_transcriptions} new transcription{new_s}.")

        first_checked_comment = reddit_comments[0].id
        last_checked_comment = reddit_comments[-1].id

        if comment_count < limit:
            if forwards is True:
                logging.info(newest_reached)
            else:
                logging.info(end_reached)
                await connection.execute(
                    """
                    UPDATE transcribers
                        SET forwards = TRUE
                    WHERE name = $1;
                    """,
                    user,
                )
            return

        logging.info(f"Reached comment with id {last_checked_comment} "
                     f"from {first_checked_comment}")

        if forwards is True:
            await connection.execute(
                """
                UPDATE transcribers
                    SET start_comment = $1
                WHERE name = $2;
                """,
                last_checked_comment,
                user,
            )
        else:
            await connection.execute(
                """
                UPDATE transcribers
                    SET end_comment = $1
                WHERE name = $2;
                """,
                last_checked_comment,
                user,
            )

    logging.info(f"Done checking /u/{user}")
Example #46
0
    def backtest(self,
                 testlist='all',
                 start='20110101',
                 end=None,
                 savepath=PATH + '/data/backtest_records.csv'):
        """
        backtest(self, testlist='all', start='20110101', end=None):

        回测系统,统计成功率和平均收益率和持有天数
        # TODO: 对学习系统还应该有准确率和召回率

        Input:
            testlist: ('all' or list of code): 要进行回测的股票列表, 'all'为全部股票

            start: (string of date): 回测数据的起始时间(含start)

            end: (string of date): 回测数据的结束时间(含end)

            savepath: (string): 生成的记录报告的保存路径,为None则不保存
        Return:
            None
        """
        if testlist is 'all':
            testlist = []
            for pool in ['0', '3', '6']:
                testlist.extend(get_stock_list(stock_pool=pool))
        con0 = get_connection(stock_pool='0')
        con3 = get_connection(stock_pool='3')
        con6 = get_connection(stock_pool='6')
        con = {'0': con0, '3': con3, '6': con6}
        records, records_tmp = pd.DataFrame(), pd.DataFrame()
        cnt = 0
        bar = ProgressBar(total=len(testlist))
        for code in testlist:
            bar.log(code)
            sql = 'select distinct * from ' + code + ' where date>=' + start\
                + ((' and date<='+end) if end is not None else '') + ';'
            df = pd.read_sql(sql, con[code[2]])
            self.df = _sort(df)
            self.df['date'] = self.df['date'].apply(lambda x: Timestamp(x))
            self.buy()
            self.sell()
            if self.record is not None and len(self.record) > 0:
                self.record = self.record.apply(
                    lambda record: self.integrate(record), axis=1)
            if self.record is not None and len(self.record) > 0:
                records_tmp = self.record.append(records_tmp)
                cnt += 1
            if cnt >= 100:
                records = records_tmp.append(records)
                records_tmp = pd.DataFrame()
                cnt = 0
            bar.move()
        records = records_tmp.append(records)
        if len(records) > 0:
            self.avggainrate = round(records['gainrate'].mean(),
                                     4) - self.gainbias
            self.successrate = round(
                len(records[records['gainrate'] > self.gainbias]) /
                len(records), 4)
            self.keepdays = round(records['keepdays'].mean(), 2)
            if savepath is not None:
                records.to_csv(savepath)
                print('records is saved at ' + savepath)
        else:
            print('No records')
Example #47
0
def setup_request():
    g.db = database.get_connection()
Example #48
0
from match_runner import MatchRunner
import riot_api
import database as db

connection = db.get_connection()
collection = db.get_connection_to_collection(
    connection, riot_api.Available_tiers.master.name)
number_of_matches_per_player = 200

riot_api.init_logger()
mr = MatchRunner(collection, riot_api.Available_tiers.master.name,
                 number_of_matches_per_player)
mr.init_logger()
mr.run()
Example #49
0
def read_tweets(url, url_id, id, sleep_interval=None):
    print "Reading tweets ..."
    #store = database.TweetStore(interval=10)
    #store.start()
    if id is None:
        id = 1
    no_result_counter = 0
    while id:
        if id == 1:
            id = None
        query_url = url
        if id is not None:
            query_url += "&max_id=%s" %(id,)
#       print query_url
        try:
                result = get_json(query_url)
        except IOError:
            print "Socket timeout or other IOError. Pausing for 5 seconds ...."
            time.sleep(5)
            continue
        except AttributeError:
            time.sleep(5)
        #print query_url
        if result is None:
                time.sleep(5)
            #   break
        elif "error" in result:
                print "Cute error message from Twitter: %s" % (result["error"])
                # TODO: sleep
                id = None
                break
        elif "results" not in result:
            print "Nothing from twitter...."
            if no_result_counter < 5:
                no_result_counter += 1
                time.sleep(5)
            else:
                print "Believe tweets are exhausted. Exiting ...."
                break
        elif "results" in result and len(result["results"]) == 0:
            print "No tweets found."
            break
        else:
            no_result_counter = 0
            id = result['results'][-1]['id'] - 1
            #store.add_tweets({
            #   "url_id": sys.argv[1],
            #   "results": result['results'],
            #})
            #print result
            #sys.exit()
            for tweet in result["results"]:
                fields = """query_url_id, from_user_id, profile_image_url, tweeted_at,
                        from_user, twitter_id, text, source"""
                value_string = u"""%s, %s, %s, %s, %s, %s,%s, %s"""
                print tweet['created_at']
                values = (
                    url_id, tweet['from_user_id'],
                    tweet['profile_image_url'],
                    time.strftime("%Y-%m-%d %H:%M:%S", time.strptime(tweet['created_at'],
                        "%a, %d %b %Y %H:%M:%S +0000")),
                    tweet['from_user'], tweet['id'],
                    tweet['text'],
                    tweet['source']
                )
                if "location" in tweet:
                    fields += ", location"
                    value_string += ", %s"
                    values += (tweet['location'], )
                if "iso_language_code" in tweet:
                    fields += ", iso_language_code "
                    value_string += ", %s"
                    values += (tweet['iso_language_code'], )
                if "geo" in tweet and tweet["geo"]:
                    fields += ", reported_lat, reported_lon, reported_geometry"
                    value_string += u", %s, %s, %s"
                    values += (
                        tweet['geo']['coordinates'][1], tweet['geo']['coordinates'][0],
                        "GeomFromText('POINT(%s,%s)')" % (tweet['geo']['coordinates'][1],
                            tweet['geo']['coordinates'][0]),
                    )
                if "to_user_id" in tweet and tweet["to_user_id"]:
                    fields += ", to_user_id"
                    value_string += ", %s"
                    values += (tweet['to_user_id'], )
                if "retweet_id" in tweet and tweet["retweet_id"]:
                    fields += ", retweet_id"
                    value_string += ", %s "
                    values += (tweet["retweet_id"], )
                query = "INSERT INTO " + ' tweets ' + "(" + fields + ", created_at, updated_at) VALUES (" + value_string + ", NOW(), NOW())"
                conn = database.get_connection()
                cursor = conn.cursor(MySQLdb.cursors.DictCursor)
                cursor.execute(query, values)
                conn.commit()
            print result['results'][-1]['created_at'] + " " + result['results'][-1]['text']                     
        if sleep_interval is not None:
            time.sleep(sleep_interval)