Example #1
0
""" This module defines uthe parcel routes """

from flask import Blueprint, jsonify, request
from application import app
from db import DatabaseConnection
from application.api.models.user import User
from application.api.models.parcels import Parcel
from flask_jwt_extended import (jwt_required, get_jwt_identity)

parcel = Blueprint('Parcel', __name__, url_prefix='/api/v2/')

user_object = User()
parcel_object = Parcel()
conn_object = DatabaseConnection()


@parcel.route('/')
def index():
    """ This is the index route, returns jsonified welcome message """
    return jsonify({'message': 'Welcome to the SendIT application'}), 200


@parcel.route('/parcels', methods=['POST'])
@jwt_required
def make_order():
    """ This function enables a user to make a parcel delivery order """
    current_user = get_jwt_identity()
    user = user_object.get_user_by_id(current_user)
    if user['admin'] == True:
        return jsonify({'message': 'This is a normal user route'}), 401
    data = request.get_json()
Example #2
0
class BogBot(irc.bot.SingleServerIRCBot):
    def __init__(self, channel, nickname, realname, server, port=6667):
        irc.bot.SingleServerIRCBot.__init__(self, [(server, port)], nickname,
                                            realname)
        self.channel = channel
        self.dbcon = DatabaseConnection()

    def on_disconnect(self, c, e):
        raise SystemExit()

    def on_welcome(self, c, e):
        c.join(self.channel)

    def on_nicknameinuse(self, c, e):
        c.nick(c.get_nickname() + "_")

    def on_privmsg(self, c, e):
        self.do_command(e, e.arguments[0])

    def on_pubmsg(self, srvcon, event):
        try:
            if event.arguments[0].strip().startswith("!"):
                self.do_command(event, event.arguments[0][1:])
            else:
                self.process_text(event)
        except Exception as e:
            exc_type, exc_obj, exc_traceback = sys.exc_info()
            tb = traceback.format_list(
                traceback.extract_tb(exc_traceback)[-1:])[-1]
            tb = ''.join(tb.splitlines())
            msg = "%s: %s  %s" % (exc_type, e.message, tb)
            if len(msg) > 400:
                msg = "%s %s %s" % (msg[:340], "...", msg[-50:])
            self.connection.privmsg("jabr", msg)

    def process_text(self, event):
        message = event.arguments[0]
        if "http" in message:
            # Add nick/user/host
            hostmask_id = self.add_or_update_hostmask(event.source)

            start = message.find("http")
            end = message.find(" ", start)
            if end == -1:
                url = message[start:]
            else:
                url = message[start:end]

            if "spotify.com" in url:
                spl = SpotifyLookup()
                spotify_meta = spl.lookup(url)
                if spotify_meta is not None:
                    self.connection.notice(event.target, spotify_meta)
                return
            elif "twitter.com" and "status" in url:
                twit_lookup = TwitterLookup()
                twit_meta = twit_lookup.compose_meta(url)
                if twit_meta is not None:
                    self.connection.notice(event.target, twit_meta)
                return

            redirect, idn, title = self._get_url_meta(url)

            # Log url
            if redirect is not None:
                self.add_url(redirect, title, hostmask_id, event.target)
            else:
                self.add_url(url, title, hostmask_id, event.target)

            # Output meta
            if title is not None:
                url_meta = self._compose_url_meta_string(
                    url, redirect, idn, title)
                self.connection.notice(event.target, url_meta)

    def _get_url_meta(self, url):
        abort, redirect, idn = self._check_headers(url)
        if abort:
            return None, None, None

        doc = self._get_url_content(url)
        title = self._get_html_title(doc)
        if title is not None and title != "":
            return redirect, idn, title
        return None, None, None

    def _compose_url_meta_string(self, url, redirect, idn, title):
        meta = ""
        if redirect is not None and idn is False:
            meta = "%s )> " % redirect

        if title is not None and title != "":
            meta = "%s%s" % (meta, title)
            return meta

    def _get_html_title(self, doc):
        """
        Parse the string representation ('document') of the web page.
        """
        parsed_doc = lxml.html.fromstring(doc)
        title = parsed_doc.find(".//title")
        if title is not None:
            title_stripped = ''.join(title.text.splitlines())
            return title_stripped.strip()

    def _check_headers(self, url):
        """
        Check size of URL content is within limit. Also check if URL and
        response URL are different, and if the response URL indicates
        that the original URL is a Internationalized Domain Name (IDN).
        """

        response = requests.head(url)
        if response.headers is not None:
            if "content-type" in response.headers:
                if "text/html" not in response.headers['content-type']:
                    self.connection.privmsg(
                        "jabr", "No 'text/html' in headers for %s" % url)
                    return True, None, None
            if "content-length" in response.headers:
                # 5.000.000 bytes ~= 5MB
                if int(response.headers['content-length']) > 5000000:
                    self.connection.privmsg(
                        "jabr", "Content length too long for %s" % url)
                    return True, None, None
        else:
            self.connection.privmsg("jabr", "No response headers for %s" % url)
            return True, None, None

        if url != response.url:
            if response.url.split('://')[1].startswith('xn--'):
                return False, response.url, True
            return False, response.url, False
        return False, None, False

    def _get_url_content(self, url):
        response = requests.get(url)
        if response.text and response.encoding is not None:
            return response.text.encode(response.encoding)

    def add_or_update_hostmask(self, hostmask_str):
        nick, user, host = self.parse_hostmask(hostmask_str)
        hostmask_id, nick_present = self.is_nick_in_hostmask(nick, user, host)

        if hostmask_id is not None:
            if nick_present:
                #self.connection.privmsg("jabr", "Nickname, username and hostmask already registered.")
                return hostmask_id
            else:
                self.connection.privmsg(
                    "jabr",
                    "Username and hostmask already registered, but not nick. Adding %s"
                    % nick)
                return self.dbcon.add_nick(nick, user, host)
        else:
            self.connection.privmsg(
                "jabr",
                "Username and hostmask not registered. Adding %s %s %s" %
                (nick, user, host))
            return self.dbcon.add_hostmask(nick, user, host)

    def add_consumption(self, hostmask_id, consumable_str, source=None):
        with self.dbcon.scoped_db_session() as session:
            consumable_qr = session.query(Consumable).\
                            filter(Consumable.name==consumable_str).all() # One?
            if len(consumable_qr) == 0:
                self.connection.privmsg(
                    "jabr", "Consumable not registered. Registering.")
                consumable = Consumable(consumable_str)
                session.add(consumable)
            elif len(consumable_qr) == 1:
                self.connection.privmsg(
                    "jabr", "The consumable was found in database.")
                consumable = consumable_qr[0]
            else:
                self.connection.privmsg(
                    "jabr", "ERROR: Several consumables with same name!")

            consumption = Consumption(source, consumable)
            hostmask = session.query(Hostmask).get(hostmask_id)
            hostmask.consumption.append(consumption)

    def add_url(self, url, title, hostmask_id, channel=None):
        with self.dbcon.scoped_db_session() as session:
            url_qr = session.query(URL).filter(URL.url == url).all()  # One?
            if len(url_qr) == 0:
                url_obj = URL()
                #self.connection.privmsg("jabr", type(url_obj.hostmask_id))

                url_obj.url = url
                url_obj.title = title
                url_obj.channel = channel
                url_obj.hostmask_id = hostmask_id
                session.add(url_obj)
                msg = "URL %s added by %d" % (url, hostmask_id)
                self.connection.privmsg("jabr", msg)
            elif len(url_qr) == 1:
                msg = "URL %s already exist" % url
                self.connection.privmsg("jabr", msg)
            else:
                msg = "ERROR: %d instances of URL (%s) in DB" % (len(url_qr),
                                                                 url)
                self.connection.privmsg("jabr", msg)

    def do_command(self, event, cmd):
        self.connection.privmsg(
            "jabr", "%s requested command %s" % (event.source.nick, cmd))

        hostmask_id = self.add_or_update_hostmask(event.source)
        self.connection.privmsg("jabr", "Hostmask ID: %s" % hostmask_id)

        if cmd == "kaffe":
            self.add_consumption(hostmask_id, cmd, event.target)
            self.connection.privmsg(event.target, "Coffee added!")
        elif cmd == "brus":
            self.add_consumption(hostmask_id, cmd, event.target)
            self.connection.privmsg(event.target, "Brus added!")
        elif cmd == "halt" and event.source == "[email protected]":
            self.die()
        elif cmd == "stats":
            for chname, chobj in self.channels.items():
                c.notice(nick, "--- Channel statistics ---")
                c.notice(nick, "Channel: " + chname)
                users = chobj.users()
                users.sort()
                c.notice(nick, "Users: " + ", ".join(users))
                opers = chobj.opers()
                opers.sort()
                c.notice(nick, "Opers: " + ", ".join(opers))
                voiced = chobj.voiced()
                voiced.sort()
                c.notice(nick, "Voiced: " + ", ".join(voiced))

    def parse_hostmask(self, hostmask):
        nick = hostmask.split('!', 1)[0]
        user_host = hostmask.split('!', 1)[1].split('@', 1)
        user = user_host[0]
        host = user_host[1]
        return nick, user, host

    def is_nick_in_hostmask(self, nick, user, host):
        with self.dbcon.scoped_ro_db_session() as session:
            try:
                hostmask = session.query(Hostmask).\
                            filter(Hostmask.username==user).\
                            filter(Hostmask.hostname==host).one()
            except MultipleResultsFound, e:
                self.connection.privmsg(
                    "jabr",
                    "Multiple hostmasks found for username and hostname. Should not be possible: %s"
                    % e)
            except NoResultFound, e:
                return None, False

            if nick in (nickname.nickname for nickname in hostmask.nickname):
                return hostmask.id, True
            else:
                return hostmask.id, False
Example #3
0
 def __init__(self, channel, nickname, realname, server, port=6667):
     irc.bot.SingleServerIRCBot.__init__(self, [(server, port)], nickname,
                                         realname)
     self.channel = channel
     self.dbcon = DatabaseConnection()
def ProcessSource(db_prod, geocoder, entities, config, test_mode):
    """ Process one source table (read from db_source) using the config and
    performing normalization using the given geocoder and entities lookup.

    The produced data are written into db_prod connection. The function writes
    new entities and addresses in to the Entities and Address tables. It also
    creates and populates supplementary tables as specified by a config.
    """

    # Connect to the most recent schema from the current source
    db_source = DatabaseConnection(path_config='db_config_update_source.yaml')
    source_schema_name = db_source.get_latest_schema('source_' + config["source_schema"])
    print "Processing source_schema_name", source_schema_name
    db_source.execute('SET search_path="' + source_schema_name + '";')

    columns_for_table = {}
    with db_prod.dict_cursor() as cur:
        # Create supplementaty tables using the provided command.
        # Also store the columns of the table for later use.
        for table in config["tables"]:
            table_config = config["tables"][table]
            columns_for_table[table] = table_config["columns"]
            cur.execute(table_config["create_command"])

    def AddValuesToTable(columns, values, eid, supplier_eid=None):
        if eid is not None:
            columns += ["eid"]
            values += [eid]
        if supplier_eid is not None:
            columns += ["supplier_eid"]
            values += [supplier_eid]
 
        if all(v is None for v in values):
            # Ignore this entry, all meaningful values are None
            return

        # TODO: find out how to build SQL statement properly
        column_names = ",".join(columns)
        values_params = ",".join(["%s"] * (len(columns)))
        command = (
                "INSERT INTO %s (" + column_names + ") " +
                "VALUES (" + values_params + ") " +
                "ON CONFLICT DO NOTHING"
        )
        with db_prod.dict_cursor() as cur:
            cur.execute(command,
                        [AsIs(table)] + values)



    def AddToTable(row, table, eid, years, supplier_eid=None):
        """ Add values for the given row into the supplementary table 'table'.

        It reads the corresponding values from the row and adds them into the
        table with the corresponding eid.
        """
        columns = list(columns_for_table[table])
        if years:
            for year in years:
                values = []
                columns_per_year = columns[:]
                for column in columns:
                    col_name = column + "_" + str(year)
                    if col_name in row:
                        values.append(row[col_name])
                    else:
                        values.append(None)
                columns_per_year.append("year")
                values.append(year)
                AddValuesToTable(columns_per_year, values, eid)    
        else:
            values = [row[column] for column in columns]
            AddValuesToTable(columns, values, eid, supplier_eid)

    with db_source.dict_cursor() as cur:
        # Read data using the given command.
        print "Executing SQL command ..."
        suffix_for_testing = ""
        if test_mode:
            suffix_for_testing = " LIMIT 1000"
        cur.execute(config["command"] + suffix_for_testing)
        print "Done."
        missed = 0
        found = 0
        empty = 0

        missed_eid = 0
        found_eid = 0

        missed_addresses = set([])
        for row in cur:
            # Read entries one by one and try to geocode them. If the address
            # lookup succeeds, try to normalize the entities. If it succeeds,
            # insert into Entities and supplementary tables.
            address = ""
            if "address" in row:
                address = row["address"]
                if address is None: continue
            name = ""
            if "name" in row:
                name = row["name"]
                if name is None: continue
            # Sometimes FirstName and Surname are joined. Lets try the simplest splitting on Capital
            # letters.
            if (len(name.split()) == 1):
              name = ' '.join(re.findall('[A-Z][^A-Z]*', name))
            addressId = geocoder.GetAddressId(address.encode("utf8"))
            if addressId is None:
                if test_mode and missed < 10:
                    print "MISSING ADDRESS", address.encode("utf8")
                if address == "":
                    empty += 1
                else:
                    missed_addresses.add(address)
                    missed += 1
                    continue
            found += 1;
            
            eid = None
            if config.get("no_entity_id"):
                eid = None
            else:
                eid = entities.GetEntity(row["ico"], name, addressId)
            # print name, "-> eid:", eid
            if found%20000==0:
                print "Progress:", found
                sys.stdout.flush()

            if config.get("save_org_id"):
                entities.AddOrg2Eid(row["org_id"], eid)
            if config.get("use_org_id_as_eid_relation"):
                eid2 = entities.GetEidForOrgId(row["eid_relation"])
                if eid2 is None:
                  continue
                row["eid_relation"] = eid2 
            if config.get("extract_description_from_body"):
                row["body"] = ExtractDescriptionFromBody(row["body"])
            supplier_eid = None
            if config.get("supplier_eid"):
                supplier_address_id = None
                if "supplier_address" in row and not row["supplier_address"] is None:
                    supplier_address = row["supplier_address"]
                    if supplier_address:
                        supplier_address_id = geocoder.GetAddressId(supplier_address.encode("utf8"))
                        if supplier_address_id is None:
                            missed_addresses.add(supplier_address)
                            missed += 1
                            continue
                    else:
                        empty += 1
                supplier_name = ""
                if "supplier_name" in row and not row["supplier_name"] is None:
                    supplier_name = row["supplier_name"]
                supplier_eid = entities.GetEntity(row["supplier_ico"], supplier_name, supplier_address_id)    
            if table_config.get("strip_html"):
                for strip_html_column in table_config["strip_html"]:
                    if row.get(strip_html_column):
                        row[strip_html_column] = StripHtml(row[strip_html_column])
            if eid is None: missed_eid += 1
            found_eid += 1
            AddToTable(row, table, eid, table_config.get("years"), supplier_eid)

    print "FOUND", found
    print "MISSED", missed
    print "EMPTY", empty
    print "MISSED UNIQUE", len(missed_addresses)
    print "FOUND EID", found_eid
    print "MISSED EID", missed_eid
    db_source.close()
Example #5
0
def update_CSV_source(source, timestamp, dry_run, verbose):
    # Load the CSV file
    with open(source['path'], 'r') as f:
        delimiter = str(source['delimiter']) # requires string, not unicode
        reader = csv.reader(f, delimiter=delimiter)

        # Extract column names from header line and then the actual data
        header = next(reader)
        column_names = [column_name.decode('utf-8') for column_name in header]
        data = [tuple(row) for row in reader]
    if verbose:
        print('Loaded CSV file with %d columns and %d data rows' % (len(column_names), len(data)))

    # Create postgres schema
    db = DatabaseConnection(path_config='db_config_update_source.yaml')
    schema = 'source_' + source['name'] + '_' + timestamp
    q = 'CREATE SCHEMA %s; SET search_path="%s";' % (schema, schema)
    db.execute(q)

    # Compute normalised column names, saving original names in a separate table
    column_names_normalised = map(normalise_CSV_column_name, column_names)
    q = 'CREATE TABLE column_names (name_original text, name_normalised text);'
    db.execute(q)
    q = """INSERT INTO column_names VALUES %s;"""
    q_data = [(original, normalised) for original, normalised in zip(column_names, column_names_normalised)]
    db.execute_values(q, q_data)

    # Create table containing the actual data from the CSV file
    table = source['table_name']
    table_columns = ', '.join(['%s text' % (name) for name in column_names_normalised])
    q = 'CREATE TABLE %s (%s);' % (table, table_columns)
    db.execute(q)

    # Populate the table with data
    q = 'INSERT INTO ' + table + ' VALUES %s;'
    db.execute_values(q, data)
    if verbose:
        print('Inserted %d rows into %s.%s%s' % (len(data), schema, table, ' (dry run)' if dry_run else ''))

    # Grant privileges to user data for data/SourceDataInfo to work properly
    db.grant_usage_and_select_on_schema(schema, 'data')

    # Commit and close database connection
    if not dry_run:
        db.commit()
    db.close()
Example #6
0
from flask import Flask, jsonify, request, send_from_directory
from flask_cors import CORS
from psycopg2 import sql
import psycopg2
import json
from orm.report import Report
from orm.airport import Airport
from sqlalchemy import func
from db import DatabaseConnection
from orm.queries import handleFilters

app = Flask(__name__)
CORS(app, send_wildcard=True)

db_connection = DatabaseConnection()


@app.route('/reports')
def reports():
    query = Report.query(db_connection.get_read_session())
    query = handleFilters(query, request.args)
    limit = request.args.get("limit", 1000)
    query = query.limit(limit)
    reports = tuple(Report.row_to_dict(report) for report in query)
    return jsonify(reports), 200


@app.route('/shapes')
def shapes():
    query = db_connection.get_read_session().query(Report.shape).distinct()
    query = handleFilters(query, request.args)
Example #7
0
 def __init__(self):
     self.db_object = DatabaseConnection()
def save_in_db(connection, bookmarks: List[Bookmark]) -> None:
    '''
        Save bookmarks, tags and bookmark tag mappings in database
    '''

    tag_records_set: Set[str] = set()
    bookmark_records:  List[Tuple[str, str, str, str, str]] = []
    bookmark_tag_mapping_records: List[Tuple[str, str]] = []

    len_bookmarks = len(bookmarks)
    if len_bookmarks > 0:
        logger.info(f"Saving {len_bookmarks} bookmarks in database")
    else:
        logger.info("No new bookmarks to save in database")
        return

    # convert models into tuple form to be used in sql execute statement
    for bookmark in bookmarks:
        # 200 is length of title column in database
        bookmark_title = (
            bookmark.title[:197] + '...') if len(bookmark.title) > 200 else bookmark.title
        bookmark_records.append((bookmark._id, bookmark.created_at, bookmark.updated_at,
                                 bookmark.link, bookmark_title))
        bookmark_tag_mapping_records.extend(
            [(mapping.bookmark_id, mapping.tag_id) for mapping in bookmark.mappings])
        # use a set to make a list of non-duplicate tags
        tag_records_set.update([tag._id for tag in bookmark.tags])

    tag_records = [(tag,) for tag in tag_records_set]

    with DatabaseConnection.Cursor(connection) as cursor:
        try:
            sql_bookmark_upsert_query = """
                            INSERT INTO bookmark (_id, created_at, updated_at, link, title)
                            VALUES (%s,%s,%s,%s,%s)
                            ON CONFLICT (_id) DO UPDATE SET updated_at = EXCLUDED.updated_at, link = EXCLUDED.link, title = EXCLUDED.title
                            """
            cursor.executemany(sql_bookmark_upsert_query, bookmark_records)
            connection.commit()
        except (Exception, psycopg2.Error) as error:
            logger.error("Failed inserting records in bookmark table: {}".format(
                error.__str__().strip()))

        try:
            sql_tag_insert_query = """
                            INSERT INTO tag (_id)
                            VALUES (%s)
                            ON CONFLICT (_id) DO NOTHING
                            """
            cursor.executemany(sql_tag_insert_query, tag_records)
            connection.commit()
        except (Exception, psycopg2.Error) as error:
            logger.error("Failed inserting record in tag table: {}".format(
                error.__str__().strip()))

        try:
            sql_tag_mapping_record_insert_query = """
                            INSERT INTO bookmark_tag_mapping (bookmark_id, tag_id)
                            VALUES (%s,%s)
                            ON CONFLICT (bookmark_id, tag_id) DO NOTHING
                            """
            cursor.executemany(
                sql_tag_mapping_record_insert_query, bookmark_tag_mapping_records)
            connection.commit()
        except (Exception, psycopg2.Error) as error:
            logger.error(
                "Failed inserting records in bookmark tag mapping table: {}".format(error.__str__().strip()))
Example #9
0
def generate_public_data_dumps(limit=None, verbose=False):
    """ Generates the public data dump files from the latest production data """

    # Connect to the latest production data schema
    db = DatabaseConnection(path_config='db_config_update_source.yaml')
    schema = db.get_latest_schema('prod_')
    db.execute('SET search_path="' + schema + '";')
    timestamp = schema[schema.rfind('_') + 1:]
    if verbose:
        print('[OK] Dumping from schema "%s"...' % (schema))
    if limit is not None:
        print('[WARNING] Dumping with row limit %d!' % (limit))

    # Read YAML configuration file
    config = yaml_load('public_dumps.yaml')
    dir_save = config['save_directory']
    dumps = config['dumps']

    # Process all dumps
    for dump_name in dumps:
        # Construct dump query
        q = dumps[dump_name]['query']
        q = q.rstrip().rstrip(';')  # possibly remove ; ending
        if limit is not None:
            q += ' LIMIT %d' % (limit)

        # Dump to CSV without timestamp
        path_output = '%s%s.csv' % (dir_save, dump_name)
        db.dump_to_CSV(q, path_output)
        if verbose:
            print('[OK] Created dump "%s" in %s' % (dump_name, path_output))

        # Dump to CSV with timestamp
        path_output = '%s%s_%s.csv' % (dir_save, dump_name, timestamp)
        db.dump_to_CSV(q, path_output)
        if verbose:
            print('[OK] Created dump "%s" in %s' % (dump_name, path_output))

    # Close database connection
    db.close()
Example #10
0
def delete_product(id):
    db = DatabaseConnection()
    message = db.delete_product(id)

    return jsonify({'message': message})
Example #11
0
def get_products():
    db = DatabaseConnection()
    products = db.get_products()
    return jsonify({'products': products}), 200
Example #12
0
class TestViews(unittest.TestCase):
    @classmethod
    def setUpClass(self):
        self.client = app.test_client()
        self.conn_object = DatabaseConnection()
        self.present_location = {'present_location': 'Bukumiro'}

        self.admin_user = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**'
        }

        self.user = {
            "username": "******",
            "password": "******",
            "email": "*****@*****.**"
        }

        self.user2 = {
            "username": "******",
            "password": "******",
            "email": "*****@*****.**"
        }
        self.client.post('/api/v2/signup',
                         data=json.dumps(self.user2),
                         content_type="application/json")

    @classmethod
    def tearDownClass(self):
        self.conn_object.drop_tables('users')

    def test_initial_page(self):
        response = self.client.get('/api/v2/')
        self.assertEqual(response.status_code, 200)
        data = json.loads(response.data.decode())
        self.assertEqual(data.get('message'),
                         'Welcome to the SendIT application')

    def test_user_registration_user(self):

        user_one = self.client.post('/api/v2/auth/signup',
                                    data=json.dumps({
                                        'username': '******',
                                        'password': '******',
                                        'email': '*****@*****.**'
                                    }),
                                    content_type='application/json')
        response = json.loads(user_one.data.decode())
        self.assertEqual(response['message'], 'User registered successfully')
        self.assertEqual(user_one.status_code, 201)

    def test_user_registration_already_existing(self):
        user_one = self.client.post('/api/v2/auth/signup',
                                    data=json.dumps({
                                        "username":
                                        "******",
                                        "password":
                                        "******",
                                        "email":
                                        "*****@*****.**"
                                    }),
                                    content_type='application/json')
        response = json.loads(user_one.data.decode())
        self.assertEqual(response['message'], 'Username already exists')
        self.assertEqual(user_one.status_code, 400)

    def test_login(self):
        self.client.post('/api/v2/auth/signup',
                         data=json.dumps(self.user),
                         content_type='application/json')

        response = self.client.post('/api/v2/auth/login',
                                    data=json.dumps(
                                        dict(username="******",
                                             password='******')),
                                    content_type='application/json')
        self.assertEqual(response.status_code, 200)
        response_data = json.loads(response.data.decode())
        self.assertTrue(response_data.get('token'))

        response_2 = self.client.post('/api/v2/auth/login',
                                      data=json.dumps(
                                          dict(username="******",
                                               password='******')),
                                      content_type='application/json')

        response_2_data = json.loads(response_2.data.decode())
        self.assertEqual(response_2_data['message'],
                         'Verification of credentials failed !')

        wrong_data = self.client.post('/api/v2/auth/login',
                                      data=json.dumps(
                                          dict(username="******",
                                               password="******")),
                                      content_type='application/json')
        response3 = json.loads(wrong_data.data.decode())
        self.assertEqual(response3['message'], 'password does not match !')
        self.assertEqual(wrong_data.status_code, 401)

    def test_login_without_data(self):
        response = self.client.post('/api/v2/auth/login',
                                    data=json.dumps(
                                        dict(username="", password='')),
                                    content_type='application/json')
        response_data = json.loads(response.data.decode())
        self.assertEqual(response_data['message'], 'No data has been sent')
        self.assertEqual(response.status_code, 400)

    def test_promote_user(self):
        response = self.client.put('/api/v2/promote/Gafabusa2')
        self.assertEqual(response.status_code, 200)
        response_data = json.loads(response.data.decode())
        self.assertEqual(response_data['message'],
                         'Gafabusa2 promoted to admin')

    def test_promote_wrong_user(self):
        response = self.client.put('/api/v2/promote/78')
        self.assertEqual(response.status_code, 400)
        response_data = json.loads(response.data.decode())
        self.assertEqual(response_data['message'], 'user promotion failed')
Example #13
0
class TestViews(unittest.TestCase):
    @classmethod
    def setUpClass(self):
        self.client = app.test_client()  
        self.conn_object = DatabaseConnection()
        self.present_location = {'present_location':'Bukumiro'}
        self.client.post('/api/v2/parcels',data=json.dumps({'parcel_description':'this parcel contains a phone','parcel_weight':50,'parcel_source':'Ntinda','parcel_destination':'Mbarara','receiver_name':'Ritah','receiver_telephone':'077890340','current_location':'Ntinda','status':'pending'}), content_type="application/json")
        self.admin_user = {'username':'******','password':'******','email':'*****@*****.**'}
        self.user ={"username":"******","password":"******","email":"*****@*****.**"}
        self.user2 ={"username":"******","password":"******","email":"*****@*****.**"}
        
        self.new_destination = {'destination':'Kamwokya'}
        self.new_status = {'status':'delivered'}
        # testing with user below
        self.client.post('/api/v2/auth/signup',data=json.dumps(self.user2),content_type='application/json')
        login_response = self.client.post('/api/v2/auth/login',data=json.dumps(dict( username="******",password='******')),content_type='application/json')
        login_data = json.loads(login_response.data.decode())
        self.token = login_data.get('token')
        #admin token below
        self.client.post('/api/v2/auth/signup',data=json.dumps(self.admin_user),content_type='application/json')
        self.client.put('/api/v2/promote/timo')
        admin_login_response = self.client.post('/api/v2/auth/login',data=json.dumps(dict( username="******",password='******')),content_type='application/json')
        admin_login_data = json.loads(admin_login_response.data.decode()) 
        self.admin_token = admin_login_data.get('token')
        
        
    @classmethod
    def tearDownClass(self):
        self.conn_object.drop_tables('parcel_orders')   

    

    def test_create_parcel_delivery_order(self):
            
            #test for right details
            response = self.client.post('/api/v2/parcels',data=json.dumps({'parcel_description':'this parcel contains a bag','parcel_weight':30,'parcel_source':'Ntinda','parcel_destination':'Lubaga','receiver_name':'Godfrey','receiver_telephone':'077890340','current_location':'Ntinda','status':'pending'}), content_type="application/json", headers={'Authorization': 'Bearer ' + self.token})
            self.assertEqual(response.status_code,201)
            response_data = json.loads(response.data.decode())
            self.assertEqual(response_data['message'], 'order placed successfully')
    
    def test_create_parcel_delivery_order_as_admin(self):
        response = self.client.post('/api/v2/parcels',data=json.dumps({'parcel_description':'this parcel contains a bag','parcel_weight':30,'parcel_source':'Ntinda','parcel_destination':'Lubaga','receiver_name':'Godfrey','receiver_telephone':'077890340','current_location':'Ntinda','status':'pending'}), content_type="application/json", headers={'Authorization': 'Bearer ' + self.admin_token})
        response_data = json.loads(response.data.decode())
        self.assertEqual(response_data['message'],'This is a normal user route')
        self.assertEqual(response.status_code,401)
        

    def test_user_getting_orders(self):
        response = self.client.get('/api/v2/parcels',headers={'Authorization': 'Bearer ' + self.token})
        self.assertEqual(response.status_code,200)

    def user_get_my_order_as_admin(self):
        response = self.client.get('/api/v2/parcels',headers={'Authorization': 'Bearer ' + self.admin_token})
        response_data = json.loads(response.data.decode())
        self.assertEqual(response_data['message'],'This is a normal user route')
        self.assertEqual(response.status_code,401)
        
        
    def test_get_all_user_orders(self):
        #test for right details        
        response = self.client.get('/api/v2/admin/parcels',headers={'Authorization': 'Bearer ' + self.admin_token})
        self.assertEqual(response.status_code,200)
    def test_get_all_user_order_as_user(self):
        response = self.client.get('/api/v2/admin/parcels',headers={'Authorization': 'Bearer ' + self.token})
        response_data = json.loads(response.data.decode())
        self.assertEqual(response_data['message'],'This is an admin route, you are not authorized to access it')
        self.assertEqual(response.status_code,401)

    
    #this is a user function
    def test_change_destination(self):
        #test for right details
        response = self.client.put('/api/v2/parcels/1/destination', data=json.dumps(self.new_destination), content_type="application/json", headers={'Authorization': 'Bearer ' + self.token})
        self.assertEqual(response.status_code,200)
        response_data = json.loads(response.data.decode())
        self.assertEqual(response_data['message'],'destination of parcel delivery order changed')
   

    def test_change_destination_as_admin(self):
        response = self.client.put('/api/v2/parcels/1/destination', data=json.dumps(self.new_destination), content_type="application/json", headers={'Authorization': 'Bearer ' + self.admin_token})
        response_data = json.loads(response.data.decode())
        self.assertEqual(response_data['message'],'This is a normal user route')
        self.assertEqual(response.status_code,401)
    #admin
    def test_change_status(self):
        response = self.client.put('/api/v2/parcels/1/status', data=json.dumps(self.new_status), content_type="application/json",headers={'Authorization': 'Bearer ' + self.admin_token})
        self.assertEqual(response.status_code,200)
        response_data = json.loads(response.data.decode())
        self.assertEqual(response_data['message'],'status of parcel delivery order changed')

    def test_change_status_as_normal_user(self):
        response = self.client.put('/api/v2/parcels/1/status', data=json.dumps(self.new_status), content_type="application/json",headers={'Authorization': 'Bearer ' + self.token})
        self.assertEqual(response.status_code,401)
        response_data = json.loads(response.data.decode())
        self.assertEqual(response_data['message'],'This is an admin route, you are not authorized to access it')     

    def test_change_present_location(self):
        response = self.client.put('/api/v2/parcels/1/presentLocation', data=json.dumps(self.present_location), content_type="application/json",headers={'Authorization': 'Bearer ' + self.admin_token})
        self.assertEqual(response.status_code,200)
        response_data = json.loads(response.data.decode())
        self.assertEqual(response_data['message'],'present location of parcel delivery order changed')
    def test_change_present_location_as_user(self):
        response = self.client.put('/api/v2/parcels/1/presentLocation', data=json.dumps(self.present_location), content_type="application/json",headers={'Authorization': 'Bearer ' + self.token})
        self.assertEqual(response.status_code,401)
        response_data = json.loads(response.data.decode())
        self.assertEqual(response_data['message'],'This is an admin route, you are not authorized to access it') 
Example #14
0
def generate(sql, temp, output, db, projectconfig, compilepdf):
    """
    Generates LaTeX file
    """
    templateFilePath = defaultTemplatePath
    dbConfig = None
    projConfig = None

    #TODO Can't be used rn except it follows the same rules as example template... Fix???
    if not temp:
        if not path.exists(defaultTemplatePath):
            click.secho("[-] Default template doesn't exist")
            return
    elif not path.exists(temp):
        click.secho("[-] Template file doesn't exist")
        return

    if not path.exists(sql):
        click.secho("[-] SQL file doesn't exist")
        return

    output = output.replace("\\", "/")

    #TODO Database config and project config handling is copy-paste, refactor...

    # Handling database config
    if db:
        if not path.exists('db_config.json'):
            click.secho("[-] There is no db_config.json in current directory")
            return
        dbConfig = config.readConfig("db_config.json",
                                     config.validateDatabaseConfig)
    else:
        dbConfig = config.createDatabaseConfig("db_config.json")

    # Handling project config
    if projectconfig:
        if not path.exists('proj_config.json'):
            click.secho(
                "[-] There is no proj_config.json in current directory")
            return
        projConfig = config.readConfig("proj_config.json",
                                       config.validateProjectConfig)
    else:
        projConfig = config.createProjectConfig("proj_config.json")

    try:
        mysqldb = DatabaseConnection(dbConfig)
        click.secho("[+] Connected to database")
    except mysql.connector.Error as err:
        if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
            click.secho("[-] Bad credentials")
        elif err.errno == errorcode.ER_BAD_DB_ERROR:
            click.secho("[-] Database doesn't exist")
        else:
            print("\tError:", err)
        return

    try:
        latexWriter = LatexCreator(mysqldb, sql, templateFilePath, output,
                                   projConfig)
        codeWrap = "\\begin{sqlCode}\n%s\n\\end{sqlCode}\n"
        latexWriter.write(codeWrap, "...template...")
        print("[+] Successfully written data to %s" % (output))
        if compilepdf:
            # OSError will be raised if called command in Popen is not available
            try:
                pdfOutput = "/".join(output.split("/")[:-1])
                if pdfOutput.strip() == "": pdfOutput = "."
                compileProcess = subprocess.Popen([
                    "pdflatex", "-interaction=nonstopmode",
                    "-output-directory", pdfOutput, output
                ],
                                                  shell=False,
                                                  stdout=subprocess.DEVNULL)
                compileProcess.wait()
                # Code 0 means success
                if compileProcess.returncode == 0:
                    click.secho("[+] Successfully compiled to PDF")
                else:
                    click.secho("[-] Failed to compile to PDF")
            except OSError as err:
                click.secho("[-] Command 'pdflatex' not found")
                print("\tError:", err)
    except Exception as e:
        print("\tError:", e)
    try:
        mysqldb.kill()
    except:
        pass
Example #15
0
class BogBot(irc.bot.SingleServerIRCBot):
    def __init__(self, channel, nickname, realname, server, port=6667):
        irc.bot.SingleServerIRCBot.__init__(self, [(server, port)], nickname, realname)
        self.channel = channel
        self.dbcon = DatabaseConnection()

    def on_disconnect(self, c, e):
        raise SystemExit()

    def on_welcome(self, c, e):
        c.join(self.channel)

    def on_nicknameinuse(self, c, e):
        c.nick(c.get_nickname() + "_")

    def on_privmsg(self, c, e):
        self.do_command(e, e.arguments[0])

    def on_pubmsg(self, srvcon, event):
        try:
            if event.arguments[0].strip().startswith("!"):
                self.do_command(event, event.arguments[0][1:])
            else:
                self.process_text(event)
        except Exception as e:
            exc_type, exc_obj, exc_traceback = sys.exc_info()
            tb = traceback.format_list(traceback.extract_tb(exc_traceback)[-1:])[-1]
            tb = ''.join(tb.splitlines())
            msg = "%s: %s  %s" % (exc_type, e.message, tb)
            if len(msg) > 400:
                msg = "%s %s %s" % (msg[:340], "...", msg[-50:])
            self.connection.privmsg("jabr", msg)

    def process_text(self, event):
        message = event.arguments[0]
        if "http" in  message:
            # Add nick/user/host
            hostmask_id = self.add_or_update_hostmask(event.source)

            start = message.find("http")
            end = message.find(" ", start)
            if end == -1:
                url = message[start:]
            else:
                url = message[start:end]

            if "spotify.com" in url:
                spl = SpotifyLookup()
                spotify_meta = spl.lookup(url)
                if spotify_meta is not None:
                    self.connection.notice(event.target, spotify_meta)
                return
            elif "twitter.com" and "status" in url:
                twit_lookup = TwitterLookup()
                twit_meta = twit_lookup.compose_meta(url)
                if twit_meta is not None:
                    self.connection.notice(event.target, twit_meta)
                return

            redirect, idn, title = self._get_url_meta(url)

            # Log url
            if redirect is not None:
                self.add_url(redirect, title, hostmask_id, event.target)
            else:
                self.add_url(url, title, hostmask_id, event.target)

            # Output meta
            if title is not None:
                url_meta = self._compose_url_meta_string(url, redirect,
                                                         idn, title)
                self.connection.notice(event.target, url_meta)

    def _get_url_meta(self, url):
        abort, redirect, idn = self._check_headers(url)
        if abort:
            return None, None, None

        doc = self._get_url_content(url)
        title = self._get_html_title(doc)
        if title is not None and title != "":
            return redirect, idn, title
        return None, None, None

    def _compose_url_meta_string(self, url, redirect, idn, title):
        meta = ""
        if redirect is not None and idn is False:
            meta = "%s )> " % redirect

        if title is not None and title != "":
            meta = "%s%s" % (meta, title)
            return meta

    def _get_html_title(self, doc):
        """
        Parse the string representation ('document') of the web page.
        """
        parsed_doc = lxml.html.fromstring(doc)
        title = parsed_doc.find(".//title")
        if title is not None:
            title_stripped = ''.join(title.text.splitlines())
            return title_stripped.strip()

    def _check_headers(self, url):
        """
        Check size of URL content is within limit. Also check if URL and
        response URL are different, and if the response URL indicates
        that the original URL is a Internationalized Domain Name (IDN).
        """

        response = requests.head(url)
        if response.headers is not None:
            if "content-type" in response.headers:
                if "text/html" not in response.headers['content-type']:
                    self.connection.privmsg("jabr", "No 'text/html' in headers for %s" % url)
                    return True, None, None
            if "content-length" in response.headers:
                # 5.000.000 bytes ~= 5MB
                if int(response.headers['content-length']) > 5000000:
                    self.connection.privmsg("jabr", "Content length too long for %s" % url)
                    return True, None, None
        else:
            self.connection.privmsg("jabr", "No response headers for %s" % url)
            return True, None, None

        if url != response.url:
            if response.url.split('://')[1].startswith('xn--'):
                return False, response.url, True
            return False, response.url, False
        return False, None, False

    def _get_url_content(self, url):
        response = requests.get(url)
        if response.text and response.encoding is not None:
            return response.text.encode(response.encoding)

    def add_or_update_hostmask(self, hostmask_str):
        nick, user, host = self.parse_hostmask(hostmask_str)
        hostmask_id, nick_present = self.is_nick_in_hostmask(nick, user, host)

        if hostmask_id is not None:
            if nick_present:
                #self.connection.privmsg("jabr", "Nickname, username and hostmask already registered.")
                return hostmask_id
            else:
                self.connection.privmsg("jabr", "Username and hostmask already registered, but not nick. Adding %s" % nick)
                return self.dbcon.add_nick(nick, user, host)
        else:
            self.connection.privmsg("jabr", "Username and hostmask not registered. Adding %s %s %s" % (nick, user, host))
            return self.dbcon.add_hostmask(nick, user, host)

    def add_consumption(self, hostmask_id, consumable_str, source=None):
        with self.dbcon.scoped_db_session() as session:
            consumable_qr = session.query(Consumable).\
                            filter(Consumable.name==consumable_str).all() # One?
            if len(consumable_qr) == 0:
                self.connection.privmsg("jabr", "Consumable not registered. Registering.")
                consumable = Consumable(consumable_str)
                session.add(consumable)
            elif len(consumable_qr) == 1:
                self.connection.privmsg("jabr", "The consumable was found in database.")
                consumable = consumable_qr[0]
            else:
                self.connection.privmsg("jabr","ERROR: Several consumables with same name!")

            consumption = Consumption(source, consumable)
            hostmask = session.query(Hostmask).get(hostmask_id)
            hostmask.consumption.append(consumption)

    def add_url(self, url, title, hostmask_id, channel=None):
        with self.dbcon.scoped_db_session() as session:
            url_qr = session.query(URL).filter(URL.url==url).all() # One?
            if len(url_qr) == 0:
                url_obj = URL()
                #self.connection.privmsg("jabr", type(url_obj.hostmask_id))

                url_obj.url = url
                url_obj.title = title
                url_obj.channel = channel
                url_obj.hostmask_id = hostmask_id
                session.add(url_obj)
                msg = "URL %s added by %d" % (url, hostmask_id)
                self.connection.privmsg("jabr", msg)
            elif len(url_qr) == 1:
                msg = "URL %s already exist" % url
                self.connection.privmsg("jabr", msg)
            else:
                msg = "ERROR: %d instances of URL (%s) in DB" % (len(url_qr), url)
                self.connection.privmsg("jabr", msg)

    def do_command(self, event, cmd):
        self.connection.privmsg("jabr", "%s requested command %s" % (event.source.nick, cmd))

        hostmask_id = self.add_or_update_hostmask(event.source)
        self.connection.privmsg("jabr", "Hostmask ID: %s" % hostmask_id)

        if cmd == "kaffe":
            self.add_consumption(hostmask_id, cmd, event.target)
            self.connection.privmsg(event.target, "Coffee added!")
        elif cmd == "brus":
            self.add_consumption(hostmask_id, cmd, event.target)
            self.connection.privmsg(event.target, "Brus added!")
        elif cmd == "halt" and event.source == "[email protected]":
            self.die()
        elif cmd == "stats":
            for chname, chobj in self.channels.items():
                c.notice(nick, "--- Channel statistics ---")
                c.notice(nick, "Channel: " + chname)
                users = chobj.users()
                users.sort()
                c.notice(nick, "Users: " + ", ".join(users))
                opers = chobj.opers()
                opers.sort()
                c.notice(nick, "Opers: " + ", ".join(opers))
                voiced = chobj.voiced()
                voiced.sort()
                c.notice(nick, "Voiced: " + ", ".join(voiced))

    def parse_hostmask(self, hostmask):
        nick = hostmask.split('!', 1)[0]
        user_host = hostmask.split('!', 1)[1].split('@', 1)
        user = user_host[0]
        host = user_host[1]
        return nick, user, host

    def is_nick_in_hostmask(self, nick, user, host):
        with self.dbcon.scoped_ro_db_session() as session:
            try:
                hostmask = session.query(Hostmask).\
                            filter(Hostmask.username==user).\
                            filter(Hostmask.hostname==host).one()
            except MultipleResultsFound, e:
                self.connection.privmsg("jabr",
                    "Multiple hostmasks found for username and hostname. Should not be possible: %s" % e)
            except NoResultFound, e:
                return None, False

            if nick in (nickname.nickname for nickname in hostmask.nickname):
                return hostmask.id, True
            else:
                return hostmask.id, False
Example #16
0
from bottle import Bottle, run, response, request, template
from json import dumps as jdumps
from db import DatabaseConnection
from auth import Auth, UserType, UserAlreadyExistsException
import atexit

app = Bottle()

DBConn=DatabaseConnection('db')

def cleanup():
    conn=DBConn.get_conn()
    if conn:
        conn.close()

atexit.register(cleanup)

@app.route('/hello-world')
def hello():
    response.content_type='application/json'
    ret_val=jdumps({ 'message': "Hello World" })
    return ret_val

@app.route('/hello/<name>')
def greet(name='Stranger'):
    response.content_type='application/json'
    ret_message='Hellllllow {name}'.format(name=name)
    return jdumps({ 'visitor':name, 'message': ret_message })
    # return template("Helloww {{name}}, how are you?", name=name)

@app.get('/login')
Example #17
0
 def __init__(self, channel, nickname, realname, server, port=6667):
     irc.bot.SingleServerIRCBot.__init__(self, [(server, port)], nickname, realname)
     self.channel = channel
     self.dbcon = DatabaseConnection()
Example #18
0
def update_JSON_source(source, timestamp, dry_run, verbose):
    # Load the JSON file
    data = json_load(source['path'])

    # Obtain column names appearing anywhere in the JSON
    columns = sorted(list(set(chain.from_iterable([datum.keys() for datum in data]))))
    if verbose:
        print('Loaded JSON files with %d columns and %d data rows' % (len(columns), len(data)))

    # Reorganise data into a list of tuples
    data = [tuple(datum[column] if column in datum else "" for column in columns) for datum in data]

    # Create postgres schema
    db = DatabaseConnection(path_config='db_config_update_source.yaml')
    schema = 'source_' + source['name'] + '_' + timestamp
    q = 'CREATE SCHEMA "%s"; SET search_path="%s";' % (schema, schema)
    db.execute(q)

    # Create table containing the actual data from the CSV file
    table = source['table_name']
    table_columns = ', '.join(['%s text' % (name) for name in columns])
    q = 'CREATE TABLE %s (%s);' % (table, table_columns)
    db.execute(q)

    # Populate the table with data
    q = 'INSERT INTO ' + table + ' VALUES %s;'
    db.execute_values(q, data)
    if verbose:
        print('Inserted %d rows into %s.%s%s' % (len(data), schema, table, ' (dry run)' if dry_run else ''))

    # Grant privileges to user data for data/SourceDataInfo to work properly
    db.grant_usage_and_select_on_schema(schema, 'data')

    # Commit and close database connection
    if not dry_run:
        db.commit()
    db.close()
Example #19
0
from flask import Flask, jsonify, request
import json
from api.models import Users
from db import DatabaseConnection
from Flask_JWT_Extended import create_access_token, JWTManager, jwt_required, get_jwt_identity
from werkzeug.security import generate_password_hash, check_password_hash

app = Flask(__name__)
jwt = JWTManager(app)
app.config['JWT_SECRET_KEY'] = 'KenG0W@Da4!'

db = DatabaseConnection()

@app.route('/api/v1/signup', methods=['POST'])
def signup():
    data = request.get_json()

    username = data.get('username')
    email = data.get('email')
    password = data.get('password')

    user = Users(username, email, password)
    error = user.validate_input()
    exists = user.check_user_exist()

    if error != None:
        return jsonify({'Error': error}), 400
    if not exists:
        password_hash = generate_password_hash(password, method='sha256')
        db.register_user(username, email, password_hash)
        token = create_access_token(username)
Example #20
0
def update_SQL_source(source, timestamp, dry_run, verbose):
    # Check that the (temporary) schema names created by this data source
    # do not conflict with existing schemas in the database
    db = DatabaseConnection(path_config='db_config_update_source.yaml')
    q = """SELECT schema_name FROM information_schema.schemata WHERE schema_name IN %s LIMIT 1;"""
    q_data = (tuple(source['schemas']),)
    res = db.query(q, q_data, return_dicts=False)
    db.close()
    if len(res) >= 1:
        raise Exception('Schema "%s" that source "%s" reads into already exists' % (res[0][0], source['name']))
    if verbose:
        print('[OK] No conflicting schema names found')

    # Download online resource if a URL is specified, storing it at the
    # location specified in source['path']
    if ('url' in source):
        urllib.urlretrieve(source['url'], source['path'])
        if verbose:
            print('[OK] Downloaded from %s to %s' % (source['url'], source['path']))

    if dry_run:
        print('[WARNING] --dry_run option not implemented for entire pipeline of updating an SQL source')
        db.close()
        return

    # Load into postgres, unzipping along the way
    if source['path'].endswith('.sql.gz'):
        p1 = subprocess.Popen(['gunzip', '-c', source['path']], stdout=subprocess.PIPE)
        subprocess.check_output(['psql', '-d', 'vd', '-q'], stdin=p1.stdout)
    # Load into postgres directly
    else:
        # The options -q -o /dev/null just suppress output
        subprocess.call(['psql', '-d', 'vd', '-f', source['path'], '-q', '-o', '/dev/null'])

    # Rename loaded schema(s) to the desired schema name(s)
    # If there is a single schema, rename it to source_NAME_TIMESTAMP
    # If there are multiple schemas, rename them to source_NAME_SCHEMA_TIMESTAMP
    db = DatabaseConnection(path_config='db_config_update_source.yaml')
    if len(source['schemas']) == 1:
        schema_old = source['schemas'][0]
        schema_new = 'source_' + source['name'] + '_' + timestamp
        db.rename_schema(schema_old, schema_new, verbose)
        # Grant privileges to user data for data/SourceDataInfo to work properly
        db.grant_usage_and_select_on_schema(schema_new, 'data')
    else:
        for schema_old in source['schemas']:
            schema_new = 'source_' + source['name'] + '_' + schema_old + '_' + timestamp
            db.rename_schema(schema_old, schema_new, verbose)
            # Grant privileges to user data for data/SourceDataInfo to work properly
            db.grant_usage_and_select_on_schema(schema_new, 'data')

    # Commit and close database connection
    db.commit()
    db.close()
Example #21
0
import datetime

from flask import *
from config import *
from forms import CompanyForm

from db import DatabaseConnection

app = Flask(__name__)
app.config.from_object(DevelopmentConfig())

cnx = DatabaseConnection()
cursor = cnx.cursor


@app.route('/')
def home():
    if not session.get('logged_in'):
        return render_template('login.html')
    else:
        return redirect('/dashboard')


@app.route('/login', methods=['POST'])
def do_admin_login():
    if request.form['password'] == 'triveni@123' and request.form[
            'username'] == 'srujal':
        session['logged_in'] = True
    else:
        return render_template('login.html',
                               message="Incorrect username or password")
def main(args_dict):
    test_mode = not args_dict['disable_test_mode']
    if test_mode:
        print "======================="
        print "=======TEST MODE======="
        print "======================="

    timestamp = datetime.now().strftime('%Y%m%d%H%M%S')
    # Write output into prod_schema_name
    prod_schema_name = "prod_" + timestamp
    print "prod_schema_name", prod_schema_name

    # Create database connections:
    # Read / write address cache from this one
    db_address_cache = DatabaseConnection(
        path_config='db_config_update_source.yaml', search_path='address_cache')
    # Write prod tables into this one
    db_prod = DatabaseConnection(path_config='db_config_update_source.yaml')
    CreateAndSetProdSchema(db_prod, prod_schema_name)

    # Initialize geocoder
    geocoder = geocoder_lib.Geocoder(db_address_cache, db_prod, test_mode)
    # Initialize entity lookup
    entities_lookup = entities.Entities(db_prod)
    # Table prod_tables.yaml defines a specifications of SQL selects to read
    # source data and describtion of additional tables to be created.
    with open('prod_tables.yaml', 'r') as stream:
        config = yaml.load(stream)
    # This is where all the population happens!!!
    # Go through all the specified data sources and process them, adding data
    # as needed. We process them in the order!
    for key in sorted(config.keys()):
        config_per_source = config[key]
        print "Working on source:", key
        ProcessSource(db_prod, geocoder, entities_lookup, config_per_source, test_mode)

    # Grant apps read-only access to the newly created schema and tables within
    db_prod.grant_usage_and_select_on_schema(prod_schema_name, 'data')
    db_prod.grant_usage_and_select_on_schema(prod_schema_name, 'verejne')
    db_prod.grant_usage_and_select_on_schema(prod_schema_name, 'kataster')
    db_prod.grant_usage_and_select_on_schema(prod_schema_name, 'prepojenia')
    db_prod.grant_usage_and_select_on_schema(prod_schema_name, 'obstaravania')

    # Commit database changes and close database connections
    db_address_cache.commit()
    db_address_cache.close()
    if test_mode:
        db_prod.conn.rollback()
    else:
        db_prod.commit()
        db_prod.close()

    print "STATS"
    geocoder.PrintStats()