Ejemplo n.º 1
0
    def __init__(self, config_file, kafka_config_file, verbose):
        self.logger = helper.setup_logging(self.__class__.__name__, verbose)

        self.db_connection = DBConnection(verbose)
        self.kafka_consumer = None

        self.kafka_params = helper.config(kafka_config_file)
        self.db_params = helper.config(config_file, "db")
        self.params = helper.config(config_file)
Ejemplo n.º 2
0
    def CreateBlock():
        data = request.get_json()
        connection = DBConnection.NewConnection()

        if (connection == None):
            response = jsonify({
                'status': 0,
                'message': 'Unable to connect to the database'
            })
            return response

        sql = """\
        EXEC [dbo].[CreateExersiceBlock]    @name=?,
                                            @planID=?,
                                            @token=?
        """

        params = [data['name'], data['planID'], data['token']]
        cursor = connection.cursor()
        cursor.execute(sql, params)
        result = cursor.fetchall()
        cursor.commit()
        cursor.close()
        connection.close()
        response = jsonify({'status': 1, 'message': 'Ok'})
        return response
Ejemplo n.º 3
0
    def UpdatePlan(id):
        data = request.get_json()
        connection = DBConnection.NewConnection()
        sql = """\
         EXEC [dbo].[UpdatePlan]    @postId=?
                                    @duration=?
                                    @name=?
                                    @token=?
         """

        params = [
            data['postId'], data['duration'], data['name'], data['token']
        ]
        cursor = connection.cursor()
        cursor.execute(sql, params)
        result = cursor.fetchall()
        cursor.commit()
        status = result[0][0]
        cursor.close()
        connection.close()

        if (status == 0):
            response = jsonify({'status': 0, 'message': 'Access denied'})
            return response

        response = jsonify({
            'status': 1,
            'message': 'Resume was successfully added'
        })
        return response
Ejemplo n.º 4
0
    def GetPlan():

        connection = DBConnection.NewConnection()
        sql = """\
         EXEC [dbo].[GetPlans]       @token=?
         """

        params = [request.headers['Authorization']]
        cursor = connection.cursor()
        cursor.execute(sql, params)
        result = cursor.fetchall()
        cursor.commit()
        cursor.close()
        connection.close()

        data = []

        for row in result:
            currentRow = {
                'type': row[3],
                'description': row[2],
                'name': row[1],
                'id': row[0]
            }
            data.append(currentRow)

        response = jsonify(data)
        return response
Ejemplo n.º 5
0
    def UpdateUser():
        data = request.get_json()
        connection = DBConnection.NewConnection()
        if (connection == None):
            response = jsonify({
                'status': 0,
                'message': 'Unable to connect to the database'
            })
            return response
        sql = """\
        EXEC [dbo].[UpdateUser]  @firstname=?,
                                 @lastname=?,
                                 @email=?,
                                 @sex=?,
                                 @height=?,
                                 @weight=?,
                                 @age=?,
                                 @token=?
        """

        params = [
            data['firstname'], data['lastname'], data['email'], data['sex'],
            data['height'], data['weight'], data['age'],
            request.headers['Authorization']
        ]
        cursor = connection.cursor()
        cursor.execute(sql, params)
        result = cursor.fetchall()
        cursor.commit()
        cursor.close()
        connection.close()
        return jsonify({'status': 0})
Ejemplo n.º 6
0
    def UserInfo():
        connection = DBConnection.NewConnection()
        if (connection == None):
            response = jsonify({
                'status': 0,
                'message': 'Unable to connect to the database'
            })
            return response
        sql = """\
        EXEC [dbo].[GetUserInfo]  @token=?
        """

        params = [request.headers['Authorization']]
        cursor = connection.cursor()
        cursor.execute(sql, params)
        result = cursor.fetchall()
        cursor.commit()
        cursor.close()
        connection.close()
        firstname = ""
        lastname = ""
        age = ""
        email = ""
        height = ""
        weight = ""
        role = None
        sex = ""
        status = 1
        message = ""

        if (result):
            firstname = result[0][0]
            lastname = result[0][1]
            age = result[0][2]
            email = result[0][3]
            image = result[0][4]
            role = result[0][7]
            height = result[0][6]
            weight = result[0][5]
            sex = result[0][8]
            status = 1

        if (role == None):
            status = 0
            message = "User not found"

        response = jsonify({
            'status': status,
            'message': message,
            'firstname': firstname,
            'lastname': lastname,
            'age': age,
            'email': email,
            'role': role,
            'weight': weight,
            'height': height,
            'image': image,
            'sex': sex
        })
        return response
Ejemplo n.º 7
0
    def Auth():
        data = request.get_json()
        email = ""
        if ('email' in data):
            email = data['email']

        password = ""
        if ('password' in data):
            password = data['password']

        connection = DBConnection.NewConnection()
        if (connection == None):
            response = jsonify({
                'status': 0,
                'message': 'Unable to connect to the database'
            })
            return response
        sql = """\
        EXEC [dbo].[UserAuthorization]  @Email=?,
                                        @Password=?,
                                        @Token=?
        """
        token = generate_token()
        params = (email, password, token)
        cursor = connection.cursor()
        cursor.execute(sql, params)
        result = cursor.fetchall()
        cursor.commit()

        outToken = result[0][0]

        response = jsonify({'status': 1, 'token': token})
        return response
Ejemplo n.º 8
0
def main(argv):

    # Parse arguments
    parser = argparse.ArgumentParser(description='Minecraft Discord Bot')
    parser.add_argument('-c', '--config', nargs='?', type=str, default='mc-bot.cfg', help='config path')
    args = parser.parse_args(argv[1:])

    # Load config
    cnf_manager = Configuration(args.config)

    # Init database
    url = os.getenv('DATABASE_ACCESS_URL')
    connection = DBConnection(url)
    services = ServiceProvider(connection)

    # Init bot
    discord_bot = Overlord(cnf_manager, services)

    # Init extensions
    extras_ext = UtilityExtension(bot=discord_bot)
    conf_ext = ConfigExtension(bot=discord_bot)
    wl_ext = WhitelistExtension(bot=discord_bot)
    # ranking_ext = RankingExtension(bot=discord_bot)

    # Attach extensions
    discord_bot.extend(extras_ext)
    discord_bot.extend(conf_ext)
    discord_bot.extend(wl_ext)
    # discord_bot.extend(ranking_ext)

    # Start bot
    discord_bot.run()

    return 0
Ejemplo n.º 9
0
def api_registration():
    print(request.get_json())

    data = request.get_json()

    email = data['email']
    is_email_valid = validate_email(email)
    if (is_email_valid == False):
        return jsonify({'status': 0, 'message': 'Email is not valid!'})

    password = data['password']

    if (len(password) < 8):
        return jsonify({'status': 0, 'message': 'Password is too short'})

    connection = DBConnection.NewConnection()
    if (connection == None):
        response = jsonify({
            'status': 0,
            'message': 'Unable to connect to the database'
        })
        return response
    sql = """\
    EXEC [dbo].[uspCreateNewUser]  @email=?,
                            	@password=?,
                            	@firstname=?,
                            	@lastname=?,
                            	@role=?,
                                @age=?,
                                @height=?,
                                @sex=?,
                                @weight=?
    """
    userType = 0 if data['role'] == 'User' else 1
    params = (data['email'], data['password'], data['firstname'],
              data['lastname'], userType, data['age'], data['height'],
              data['sex'], data['weight'])
    cursor = connection.cursor()
    cursor.execute(sql, params)
    result = cursor.fetchall()
    cursor.commit()
    status = result[0][0]
    cursor.close()
    del cursor

    connection.close()

    if (status == 0):
        response = jsonify({
            'status': 0,
            'message': 'This email has already been registered!'
        })
        return response

    response = jsonify({
        'status': 1,
        'message': 'You have been successfully registered!'
    })
    return response
Ejemplo n.º 10
0
def main_route():

    from db import DBFeedItem, DBLogItem, DBConnection, LOG_LEVEL

    conn = DBConnection()

    feeditems = []
    addonitems = []

    if addons == None:
        print 'No addon loaded...'
        return

    for addon in addons:
        if hasattr(addon, 'get_feeds'):
            feeds = getattr(addon, 'get_feeds')()
        else:
            conn.insert_element(
                DBLogItem('The addon ' + str(addon) + ' is not supported. Please implement a "get_feeds()" function or contact the developer.',
                            datetime.datetime.now(),
                            LOG_LEVEL['warn']
                )
            )
            feeds = None

        if feeds == None or len(feeds) == 0:
            continue

        addonitems.append(feeds[0].type)

        for feed in feeds:
            if isinstance(feed, Feeditem) or isinstance(feed, DBFeedItem):
                feeditems.append(feed)
            else:
                conn.insert_element(
                    DBLogItem('The Element (' + str(feed) + ') could not be placed on the stream.',
                                datetime.datetime.now(),
                                LOG_LEVEL['warn']
                    )
                )

    feeditems.sort(key = lambda element: element.time, reverse = True)

    return render_template('dashboard.html', feeditems = feeditems, addonitems = addonitems, extended = False)
Ejemplo n.º 11
0
 def __init__(self, file_name="local_config"):
     self.resultset = []
     settings = __import__("%s" % file_name)
     self.is_threading = settings.IMPLEMENT_THREADED_SEARCH
     db_config = settings.DATABASE
     self.index_classes = settings.INDEX_CLASSES
     self.is_indexing = settings.INDEXING
     self.intervals = settings.INTERVALS
     self.bucket_intervals = settings.BUCKET_INTERVAL
     self.word_split_pattern = settings.WORD_SPLIT_PATTERN
     self.conn = DBConnection(db_config['HOST'], db_config['USER'], db_config['PASSWORD'], db_config['NAME'],
                              db_config['PORT'], settings.FILE_PATH)
Ejemplo n.º 12
0
 def __init__(self, file_name="local_config"):
     self.resultset = []
     settings = __import__("%s" % file_name)
     self.is_threading = settings.IMPLEMENT_THREADED_SEARCH
     db_config = settings.DATABASE
     self.index_classes = settings.INDEX_CLASSES
     self.is_indexing = settings.INDEXING
     self.intervals = settings.INTERVALS
     self.file_path = settings.FILE_PATH
     self.engine = settings.ENGINE
     self.operator = settings.DEFAULT_OPERATOR
     self.text_based_search = None
     if self.is_indexing and hasattr(settings, "TEXT_BASED_SEARCH"):
         self.text_based_search = settings.TEXT_BASED_SEARCH
     if hasattr(settings, "BUCKET_INTERVAL"):
         self.bucket_intervals = settings.BUCKET_INTERVAL
     self.thread_timeout = 0.5
     if hasattr(settings, "THREAD_TIMEOUT"):
         self.thread_timeout = settings.THREAD_TIMEOUT
     self.ids_only = False
     if hasattr(settings, "IDS_ONLY") and settings.IDS_ONLY:
         self.ids_only = True
     self.conn = DBConnection(db_config['HOST'], db_config['USER'], db_config['PASSWORD'], db_config['NAME'])
     self.cmd_obj = Commands(self.engine, self.operator, self.is_indexing)
Ejemplo n.º 13
0
class IndexData(object):

    def __init__(self, file_name="local_config"):
        self.resultset = []
        settings = __import__("%s" % file_name)
        self.is_threading = settings.IMPLEMENT_THREADED_SEARCH
        db_config = settings.DATABASE
        self.index_classes = settings.INDEX_CLASSES
        self.is_indexing = settings.INDEXING
        self.intervals = settings.INTERVALS
        self.bucket_intervals = settings.BUCKET_INTERVAL
        self.word_split_pattern = settings.WORD_SPLIT_PATTERN
        self.conn = DBConnection(db_config['HOST'], db_config['USER'], db_config['PASSWORD'], db_config['NAME'],
                                 db_config['PORT'], settings.FILE_PATH)

    def split_sentence(self, raw_sentence):
        word_list = resplit(self.word_split_pattern, raw_sentence)
        return word_list

    def create_hash(self, sentence):
        word_list = self.split_sentence(sentence)
        weight_list = Commands.assign_weight(word_list)
        return weight_list

    def false_index(self, data_count, table_name, field_list):
        dump_file_counts = int(data_count/self.intervals) + 1
        start, offset = 0, self.intervals
        if not self.is_threading:
            offset = data_count
            dump_file_counts = 1
        for file_no in range(dump_file_counts):
            self.conn.create_outfile(table_name, field_list, start, offset, file_no)
            start += self.intervals

    def true_index(self, data_count, table_name, field_list):
        interval_count = int(data_count/self.intervals) if data_count > self.intervals else 1
        start, offset = 0, self.intervals
        file_dict = {}
        pool = Pool(processes=2)
        args_list = []
        for i in range(interval_count):
            args_list.append([start, offset, table_name, field_list, file_dict],)
            start += offset
        pool.map(index_data, args_list)
        pool.close()
        pool.join()

    def index(self):
        for instance in self.index_classes:
            data_count = self.conn.get_table_counts(instance.table_name)
            if data_count:
                getattr(self, ("%s_index" % self.is_indexing).lower())(data_count, instance.table_name,
                                                                       instance.field_list)
            else:
                print "No Data to Index. Exiting...."
        self.conn.close()

    @classmethod
    def run(cls, field_list, table_name, bucket={}):
        self = IndexData("local_config")
        result_set = self.conn.get_all_records(field_list, table_name)
        for pos, data in result_set:
                word_list = self.create_hash(data)
                for word, weight in word_list:
                    bucket_no = Commands.assign_bucket(weight, self.bucket_intervals)
                    try:
                        bucket[bucket_no][word].append(pos)
                    except KeyError:
                        bucket[bucket_no][word] = [pos, ]
        return bucket
Ejemplo n.º 14
0
class SimpleSearch(object):

    def __init__(self, file_name="local_config"):
        self.resultset = []
        settings = __import__("%s" % file_name)
        self.is_threading = settings.IMPLEMENT_THREADED_SEARCH
        db_config = settings.DATABASE
        self.index_classes = settings.INDEX_CLASSES
        self.is_indexing = settings.INDEXING
        self.intervals = settings.INTERVALS
        self.file_path = settings.FILE_PATH
        self.engine = settings.ENGINE
        self.operator = settings.DEFAULT_OPERATOR
        self.text_based_search = None
        if self.is_indexing and hasattr(settings, "TEXT_BASED_SEARCH"):
            self.text_based_search = settings.TEXT_BASED_SEARCH
        if hasattr(settings, "BUCKET_INTERVAL"):
            self.bucket_intervals = settings.BUCKET_INTERVAL
        self.thread_timeout = 0.5
        if hasattr(settings, "THREAD_TIMEOUT"):
            self.thread_timeout = settings.THREAD_TIMEOUT
        self.ids_only = False
        if hasattr(settings, "IDS_ONLY") and settings.IDS_ONLY:
            self.ids_only = True
        self.conn = DBConnection(db_config['HOST'], db_config['USER'], db_config['PASSWORD'], db_config['NAME'])
        self.cmd_obj = Commands(self.engine, self.operator, self.is_indexing)

    def search(self, search_text):
        search_text_list = re.split("[ ,\-,_,',\",=,.,:,;]", search_text)
        result_list = []
        for instance in self.index_classes:
            if not self.is_indexing:
                data_count = int(self.conn.get_table_counts(instance.table_name)) + 1
                interval_count = int(data_count/self.intervals) if data_count > self.intervals else 1
                id_list = self.simple_search(instance, interval_count, search_text_list)
            else:
                weight_list = Commands.assign_weight(search_text_list)
                minimum_weight = min([weight for word, weight in weight_list])/self.bucket_intervals
                id_list = self.index_search(instance.table_name, search_text_list, minimum_weight, self.is_threading, self.thread_timeout)
            if id_list and not self.ids_only:
                where = "where id in (%s)" % ", ".join(id_list[:100000])
                result_list.extend(self.conn.get_all_records(instance.field_list, instance.table_name, where))
        return result_list or id_list

    def simple_search(self, instance, data_count, search_text_list):
        threads = []
        if not self.is_threading:
            data_count = 1
        for i in range(data_count):
            queue.put([i, instance, search_text_list])
            t = Thread(target=self.run)
            t.daemon = True
            t.start()
            threads.append(t)
        for t in threads:
            t.join(self.thread_timeout)
        return self.resultset

    def run(self):
        while not queue.empty():
            i, instance, search_text_list = queue.get()
            file_name = self.file_path + "%s%s.txt" % (instance.table_name, i)
            awk_cmd = self.cmd_obj.create_cmd(search_text_list, file_name)
            output = exec_cmd(awk_cmd)
            result = Commands.process_data(instance, output)
            self.resultset.extend(result)
            queue.task_done()


    def configure_buckets(self, word_list, bucket_list):
        temp_list = bucket_list
        if self.text_based_search:
            temp_list = []
            weight_list = self.cmd_obj.assign_weight(word_list)
            for word, weight in weight_list:
                temp_list.append(self.cmd_obj.assign_bucket(weight, self.bucket_intervals))
        return temp_list

    def file_search(self):
        global CMD_OUTPUT
        while not queue.empty():
            file_prefix, table_name, bucket_no, search_text_list, file_prefix = queue.get()
            file_name = "%sindex_%s_%s.txt" % (file_prefix, table_name, bucket_no)
            cmd_list = self.cmd_obj.create_cmd(search_text_list, file_name)
            result = exec_cmd(cmd_list)
            CMD_OUTPUT.append(result)
            queue.task_done()

    def index_search(self, table_name, search_text_list, minimum_weight, is_threading=False, thread_timeout=0.2,
                     file_prefix="/tmp/"):
        threads = []
        global CMD_OUTPUT
        search_text_id_dict = {}
        bucket_keys = self.configure_buckets(search_text_list, BUCKET_KEYS)
        for search_text in search_text_list:
            for bucket_no in bucket_keys:
                if bucket_no >= minimum_weight:
                    queue.put([file_prefix, table_name, bucket_no, [search_text, ], file_prefix, ])
                    t = Thread(target=self.file_search)
                    t.daemon = True
                    t.start()
                    threads.append(t)
                    if not is_threading:
                        t.join(thread_timeout)
            for t in threads:
                t.join(thread_timeout)
            id_list = Commands.process_index_data(CMD_OUTPUT)
            search_text_id_dict[search_text] = id_list
            CMD_OUTPUT = []
        lists = search_text_id_dict.values()
        try:
            intersected = set(lists[0]).intersection(*lists)
        except ValueError:
            intersected = set()
        return list(intersected)
Ejemplo n.º 15
0
import json
from requests.exceptions import ConnectionError
from config import soundcloud as config
from db import DBConnection, DBFeedItem
from feeditem import Feeditem

TYPE = 'soundcloud'

client = soundcloud.Client(
    client_id = config['client_id'],
    client_secret = config['client_secret'],
    username = config['username'],
    password = config['password']
)

conn = DBConnection()

accepted_types = [m for m in config['accepted_types'] if config['accepted_types'][m] == True]

source_type_mapping = {
    'track': u'A new song was uploaded by {0}',
    'track-repost': u'A song was reposted by {0}',
    'comment': u'A new comment was given',
    'favoriting': u'Someone liked a song'
}

def get_feeds():
    _load_feeds()

    result = []