Exemple #1
0
 def get_datastore(self, dsid=None):
     if dsid is not None:
         req = self.get_request(
             'v1/datastores/{0}'.format(dsid))
         return Datastore(**req.json())
     req = self.get_request('v1/datastores')
     return list([Datastore(**res) for res in req.json()])
Exemple #2
0
def setupdb(filepath, config):
  print "Creating database..."
  datastore = Datastore(filepath)
  cursor = datastore.connect()
  
  cleanup(cursor)
  datastore.commit()
  
  create_tables(cursor)
  datastore.commit()
    
  ansi = Datastore('/tmp/ansi.db')
  c = ansi.connect()
  cleanup(c)
  load_ansi(c, 'national.txt')
  ansi.commit()
  ansi.close()
  
  load_data(cursor, config)
  datastore.commit()
  
  update_data(cursor)
  datastore.commit()
  
  datastore.close()
Exemple #3
0
def claim(modem):
    try:
        datastore = Datastore()
        modem.extractInfo()
        isp = ISP.acquire_isp(
            operator_code=modem.details["modem.3gpp.operator-code"])
        # print("[+] Deduced ISP:", isp)
        router = False
        if "isp" in CONFIGS["ROUTER"] and CONFIGS["ROUTER"]["isp"] == isp:
            router = True
        # print("[+] ROUTER SET TO: ", router)
        new_message = datastore.acquire_message(
            modem_index=modem.index,
            modem_imei=modem.details["modem.3gpp.imei"],
            isp=isp,
            router=router)
    except Exception as error:
        raise Exception(error)
    else:
        if not new_message == None:
            sms = SMS(messageID=new_message["id"])
            sms.create_sms(phonenumber=new_message["phonenumber"],
                           text=new_message["text"])

            return sms
        else:
            return None
Exemple #4
0
def init(num):
    """ Initialization function. Must be explicitly called: globes.init(n) """

    global server_num       # this server's number (i.e. this instance)
    global total_servers    # the total number of servers in the system (usually 4)
    global command_sock     # this server's UDP socket for sending/receiving commands
    global reply_sock       # this server's UDP socket for receiving reply values/successes
    global db               # this server's Datastore object
    global delays           # delays[i] returns the avg delay from this server to server i
    global addresses        # addresses[i] returns the 'localhost:1500#' of server i
    global num_replicas     # number of replicas for each key
    global command_counter  # counter for the number of commands send
    global port_offset      # offset between a server's command port and reply port

    server_num = num

    command_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
    reply_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)

    db = Datastore()

    # read in the config.json file to initialize delays and addresses
    config_json = open('config.json')
    data = json.load(config_json)
    delays = data['avg_delays'][server_num] # delays is a list of avg delay times
    addresses = data['addresses']

    total_servers = len(addresses)

    num_replicas = 3
    command_counter = 0
    port_offset = 100
Exemple #5
0
def send_sms(modem, sms):
    datastore = Datastore()
    send_status = None
    try:
        messageLogID = datastore.new_log(messageID=sms.messageID)
    except Exception as error:
        raise (error)
    else:
        try:
            send_status = modem.send_sms(modem.set_sms(sms))
            if send_status is None:
                logging.warning("[-] Send status failed with sys error")

            elif not send_status["state"]:
                logging.warning("[-] Failed to send...")
                datastore.release_message(messageID=sms.messageID,
                                          status="failed")
                modem.remove_sms(sms)
            else:
                logging.info("[+] Message sent!")
        except Exception as error:
            print("[+] Exception as:", error)
            raise Exception(error)
        else:
            datastore.update_log(messageLogID=messageLogID,
                                 status=send_status["status"],
                                 message=send_status["message"])
            print(">>send status:", send_status)
            return send_status["state"]
Exemple #6
0
def new_messages():
    if request.method == 'POST':
        request_body = request.json
        if not 'text' in request_body:
            return jsonify({"status": 400, "message": "missing text"})

        if not 'phonenumber' in request_body:
            return jsonify({"status": 400, "message": "missing phonenumber"})

        text = request_body["text"]
        phonenumber = isp.rm_country_code(request_body["phonenumber"])
        dec_isp = isp.deduce_isp(phonenumber)

        # TODO: authenticate min length
        # TODO: put logger in here to log everything
        print(
            f"[+] New sending message...\n\t-text: {text}\n\t-phonenumber: {phonenumber},\n\t-isp: {dec_isp}"
        )

        return_json = {"status": ""}
        try:
            # TODO: Determine ISP before sending messages
            # datastore = Datastore(configs_filepath="libs/configs/config.ini")
            datastore = Datastore()
            messageID = datastore.new_message(text=text,
                                              phonenumber=phonenumber,
                                              isp=dec_isp,
                                              _type="sending")
            return_json["status"] = 200
            return_json["messageID"] = messageID
        except Exception as err:
            print(traceback.format_exc())

    elif request.method == 'GET':
        print("[?] Fetching messages....")
        return_json = {"status": "", "tstate": ""}
        try:
            # datastore = Datastore(configs_filepath="libs/configs/config.ini")
            datastore = Datastore()
            messages = datastore.get_all_received_messages()
            return_json["status"] = 200
            return_json["messages"] = messages
            return_json["size"] = len(messages)
        except Exception as err:
            print(traceback.format_exc())

    return jsonify(return_json)
Exemple #7
0
def build():
    ds = Datastore(f'{homeDir}lootnika_tasks_journal.db')
    try:
        newRst = check_rst(ds)
    except Exception as e:
        log.fatal(f"Error: fail check docs rst: {traceback.format_exc()}")

    sphinxecutor(newRst)
    snapshot(ds, newRst)
    ds.close()
Exemple #8
0
def cu_make_time_serieses(time_resolution, data_store, param_store):
    """ Generate cumulative Dash bar charts for all root accounts """
    preventupdate_if_empty(data_store)
    params: Params = Params.from_json(param_store)
    if not time_resolution:
        time_resolution = params.init_time_res
    data_store: Datastore() = Datastore.from_json(data_store, params.cu_roots)
    trans: pd.DataFrame = data_store.trans
    account_tree: ATree = data_store.account_tree
    if len(params.cu_roots) > 0:
        account_list = params.cu_roots
    else:
        account_list = [account_tree.root]
    unit: str = params.unit
    data_title = params.ds_data_title
    result: list = []
    # make one chart for each item in the Cumulative account filter

    if not isinstance(account_list, list):
        app.logger.warning(
            f"Account list should be a list but isn't: {account_list}")
        raise PreventUpdate
    for account in account_list:
        fig: go.Figure = go.Figure(layout=layouts['base'])
        fig.update_layout(
            title={"text": f"{data_title} {account}: Cumulative {unit}"},
            xaxis={
                "showgrid": True,
                "nticks": 20
            },
            yaxis={"showgrid": True},
            legend={
                "xanchor": "left",
                "x": 0,
                "yanchor": "bottom",
                "y": 0,
                "bgcolor": "rgba(0, 0, 0, 0)",
            },
            barmode="relative",
        )
        subaccounts: iter = account_tree.get_children_ids(account)
        for j, subaccount in enumerate(subaccounts):
            sub_desc = account_tree.get_descendent_ids(subaccount)
            sub_desc.append(subaccount)
            tba = trans[trans["account"].isin(sub_desc)]
            if len(tba) > 0:
                fig.add_trace(
                    make_cum_area(tba, subaccount, j, time_resolution))
        output = dcc.Graph(id=f"{account}{j}", figure=fig)
        if len(result) > 0:
            result.append(output)
        else:
            result = [output]
    return [result]
Exemple #9
0
def get_logs():
    return_json = {"status": ""}
    try:
        # TODO: Determine ISP before sending messages
        # datastore = Datastore(configs_filepath="libs/configs/config.ini")
        datastore = Datastore()
        logs = datastore.get_logs()
        return_json["status"] = 200
        return_json["logs"] = logs
        return_json["size"] = len(logs)
    except Exception as err:
        print(traceback.format_exc())

    return jsonify(return_json)
Exemple #10
0
def start():
    global DATASTORE, URL, KEY

    default_tmp = "/tmp/itzod"
    if sys.platform.startswith('win'):
        default_tmp = "C:\\temp\\itzod"

    parser = argparse.ArgumentParser()
    parser.add_argument("-H",
                        "--host",
                        help="Web server Host address to bind to",
                        default="0.0.0.0",
                        action="store",
                        required=False)
    parser.add_argument("-p",
                        "--port",
                        help="Web server Port to bind to",
                        default=8000,
                        action="store",
                        required=False)
    parser.add_argument("-k",
                        "--key",
                        help="Itzod User APIKey for accessing json urls",
                        action="store",
                        required=True)
    parser.add_argument("-u",
                        "--url",
                        help="Base itzod URL for accessing api",
                        default="https://pool.itzod.ru/apiex.php",
                        action="store",
                        required=False)
    parser.add_argument("-d",
                        "--datadir",
                        help="Data directory to store state",
                        default=default_tmp,
                        action="store",
                        required=False)
    args = parser.parse_args()

    logging.basicConfig()
    DATASTORE = Datastore(args.datadir)
    URL = args.url
    KEY = args.key

    t = Timer(60, poll, ())
    t.start()

    run(host=args.host, port=args.port, reloader=True)
def update_status_on_ds_tab_content(data_store: str, param_store: str):
    """When the loaded files change, and the data source tab is open,
    then presumably the files changed because of user input to the
    tab controls. So, show feedback.  If the loaded files change
    through the URL mechanism, and the data source tab isn't open,
    then this callback is ignored."""
    preventupdate_if_empty(data_store)
    data_store: Datastore() = Datastore.from_json(data_store)
    params = Params.from_json(param_store)
    trans: pd.DataFrame = data_store.trans
    preventupdate_if_empty(trans)
    trans_filename = params.ds_data_title
    c1: pd.DataFrame = trans.iloc[0]
    r1: list = [
        c1.account,
        c1.amount,
        c1.date,
        c1.get("desc"),
        c1.get("full account name"),
        c1.get("parent account"),
    ]

    # As quick hack to get linebreaks in Dash for pre-formatted text, generate status info as lists,
    # then render lists into Divs

    earliest_trans: datetime64 = trans["date"].min()
    latest_trans: datetime64 = trans["date"].max()

    trans_summary: list = [
        f"{trans_filename}: {len(trans)} records loaded, between {pretty_date(earliest_trans)} and {pretty_date(latest_trans)}"
    ]  # NOQA

    atree = data_store.account_tree
    atree_summary: str = None
    atree_display: str = None
    if atree and len(atree) > 0:
        atree_summary: str = (
            f"{len(atree)} accounts loaded, {atree.depth()} levels deep")
        atree_display: str = atree.show_to_string()

    eras = data_store.eras
    eras_summary: str = None
    if len(eras) > 0:
        eras_summary: str = f"{len(eras)} reporting eras"

    return r1 + [trans_summary, atree_summary, atree_display, eras_summary]
Exemple #12
0
    def __init__(self):
        self.datastore = getattr(g, '_datastore', None)
        if not self.datastore:
            info("Creating connection to datastore")
            self.minio_host = "127.0.0.1:9000"
            self.minio_key = "test_key"
            self.minio_secret = "test_secret"
            self.minio_bucket = "seismic"
            self.datastore = g._datastore = Datastore(self.minio_host,
                                                      self.minio_key,
                                                      self.minio_secret,
                                                      self.minio_bucket)

        self.metadata = getattr(g, '_metadata', None)
        if not self.metadata:
            info("Creating connection to Metadata DB")
            self.crate_endpoints = ["localhost:4200"]
            self.metadata = g._metadata = Metadata(self.crate_endpoints)
            self.metadata.create_tables()
Exemple #13
0
def get_locality(county_id, state_fips):
  ds = Datastore('/tmp/ansi.db')
  c = ds.connect()
  
  c.execute(
    """SELECT state_id, county_id, county_name
      FROM
        Ansi
      WHERE
        state_id=? AND
        county_id=?""",
    (
      state_fips,
      county_id,
    )
  )
  
  row = dict(c.fetchone())
  id = "".join([str(row['state_id']),str(row['county_id']).rjust(3,'0')])
  name = row['county_name']
  ds.close()
  
  return (id,name,)
Exemple #14
0
def list_users():
    filters = []
    ds = Datastore()
    res = []
    territory = request.args.get('territory', default='', type=str)
    los = request.args.get('line_of_service', default='', type=str)
    last_name = request.args.get('last_name', default='', type=str)
    ids = request.args.getlist('id', type=None)
    if territory:
        filters.append(('territory', '=', territory))
    if los:
        filters.append(('line_of_service', '=', los))
    if last_name:
        filters.append(('last_name', '=', last_name))
    if ids:
        for every in ids[0].split(','):
            for user in ds.list_users('test', [('id', '=', every)]):
                res.append(dict(user))
        return Response(json.dumps(res), mimetype='application/json')

    users = ds.list_users('test', filters)
    for user in users:
        res.append(dict(user))
    return Response(json.dumps(res), mimetype='application/json')
Exemple #15
0
#!/usr/bin/env python
import json
import time
import datetime
import os
import logging
from config import Config
from datastore import Datastore

config = Config()

datastore = Datastore(db=2)


def write_event(event_type, message, app_name='global'):
    try:
        # Pipeline adding to app event set and then global
        epoch_timestamp = time.time()
        message = "Event: {}, Message: {}".format(event_type, message)
        datastore.writeEvent(app_name, message)
    except Exception as e:
        logging.error("Unable to write log event")


def get_events(app_name='all'):
    events_data = datastore.getEvent(app_name)

    events_formatted = list()
    for event in events_data:
        print event
        message = event[0]
Exemple #16
0
 def setUp(self):
     self.datastore = Datastore()
Exemple #17
0
from lmodems import Modems
from router import Router
from datastore import Datastore

format = "[%(asctime)s]>> %(message)s"
logging.basicConfig(format=format, level=logging.DEBUG, datefmt="%H:%M:%S")

CONFIGS = configparser.ConfigParser(interpolation=None)
PATH_CONFIG_FILE = os.path.join(os.path.dirname(__file__), 'configs',
                                'config.ini')
if os.path.exists(PATH_CONFIG_FILE):
    CONFIGS.read(PATH_CONFIG_FILE)
else:
    raise Exception(f"config file not found: {PATH_CONFIG_FILE}")

datastore = Datastore()


def route(mode, sms, modem=None):
    if mode == "online":
        logging.warning("ROUTING ONLINE MODE...")
        # router_url = DEKU_CONFIGS['router_url']
        router_url = CONFIGS["ROUTER"]["default"]
        router = Router(router_url)
        router_response = router.publish(sms)
        if router_response:
            logging.info("successfully routed!")
        else:
            logging.warning("routing failed!")
    elif mode == "offline":
        logging.warning("ROUTING OFFLINE MODE...")
Exemple #18
0
def ex_apply_selection(dummy, selectedData, figure, data_store, param_store):
    """Take the selected account from the main explorer chart
    and show it in a series of drill-down charts
    """
    preventupdate_if_empty(data_store)
    data_store: Datastore() = Datastore.from_json(data_store)
    tree: ATree = data_store.account_tree
    params: Params() = Params.from_json(param_store)
    unit: str = params.unit

    if not selectedData or len(selectedData) == 0:
        account = tree.root
    else:
        for i, indexed_fig in enumerate(figure):
            try:
                account = figure[i]["data"][0]["customdata"][
                    selectedData[i]["points"][0]["pointNumber"]]
                if account and len(account) > 0:
                    break
            except TypeError:
                # happens when clicking on the second or later chart, because
                # the corresponding selectedData will be empty
                pass
    if not account:
        raise PreventUpdate
    lineage = tree.get_lineage_ids(account) + [account]
    charts: list = []
    trans: pd.DataFrame = data_store.trans
    tree: ATree = data_store.account_tree
    tree = tree.append_sums_from_trans(trans)
    tree = tree.roll_up_subtotals()
    palette = cb.Set3
    selection_color = None
    color_data = pd.DataFrame(columns=["account", "color"])

    # iterate through the lineage and make a new stacked bar chart for each level.
    for i, node in enumerate(lineage):
        palette_mod = 12 - i  # compensate for shrinking palette
        drill_data = pd.DataFrame(
            columns=["account", "child_id", "child_tag", "color", "amount"])
        children = tree.children(node)
        level_selection = []
        if len(children) > 0:
            try:
                level_selection = [
                    x.identifier for x in children
                    if x.identifier == lineage[i + 1]
                ]
            except IndexError:
                pass
            for j, point in enumerate(children):
                point_id = point.identifier
                color = palette[j % palette_mod]
                color_data = color_data.append(dict(account=point_id,
                                                    color=color),
                                               ignore_index=True)
                if len(level_selection) > 0:  # If there is a selection …
                    if point_id == level_selection[0]:
                        selection_color = color
                    else:
                        color = "rgba(100, 100, 100, .5)"
                drill_data = drill_data.append(
                    dict(
                        account=node,
                        child_id=point.identifier,
                        child_tag=point.tag,
                        color=color,
                        amount=point.data["total"],
                    ),
                    ignore_index=True,
                )
        else:
            continue
        try:
            drill_data = drill_data.sort_values("amount")
            node_bar: go.Bar = go.Bar(
                y=drill_data["account"],
                x=drill_data["amount"],
                marker_color=drill_data["color"],
                textposition="inside",
                text=drill_data["child_tag"],
                texttemplate="%{text}<br>" + unit + "%{value:,.0f}",
                hovertemplate="%{text}<br>" + unit +
                "%{value:,.0f}<extra></extra>",
                customdata=drill_data["child_id"],
                orientation="h",
            )
            fig: go.Figure = go.Figure(data=node_bar)
            fig.update_layout(layouts["drill"])
            fig.update_traces(traces["drill"])
            if selection_color and len(selection_color) > 0:
                # Don't reuse selected colors in later bars.
                palette = list(set(cb.Set3) - set([selection_color]))
                if i > 0:
                    fig.update_layout(title_text=node, title_x=0, title_y=0.98)
            charts = charts + [
                dcc.Graph(figure=fig, id={
                    "type": "ex_chart",
                    "index": i
                })
            ]
        except Exception as E:
            charts = charts + [html.Div(f"Error making {node}: {E}")]

    if len(lineage) > 1:
        selected_accounts = tree.get_descendent_ids(lineage[-1]) + [lineage[i]]
        sel_trans = trans[trans["account"].isin(selected_accounts)]
        color_data = color_data.set_index("account")
        sel_trans["color"] = sel_trans.account.map(color_data.color)
        sel_trans["color"] = sel_trans["color"].fillna("darkslategray")
    else:
        sel_trans = trans
        sel_trans["color"] = "darkslategray"
    wrapper = textwrap.TextWrapper(width=40)

    def brfill(text, TW):
        return "<br>".join(TW.wrap(text))

    sel_trans["wrap"] = sel_trans["description"].apply(brfill, TW=wrapper)
    sel_trans["pretty_value"] = sel_trans["amount"].apply("{:,.0f}".format)

    sel_trans["customdata"] = (sel_trans["account"] + "<br>" +
                               sel_trans["date"].astype(str) + "<br>" +
                               sel_trans["pretty_value"] + "<br>" +
                               sel_trans["wrap"])
    dot_fig = px.scatter(
        sel_trans,
        x="date",
        y="amount",
        color="color",
        color_discrete_map="identity",
    )
    dot_fig.update_layout(layouts["dot_fig"])
    dot_fig.update_traces(traces["dot_fig"])
    dot_fig.update_traces(customdata=sel_trans["customdata"],
                          hovertemplate="%{customdata}<extra></extra>")

    charts = charts + [dcc.Graph(figure=dot_fig, id="ex_dot_chart")]
    return [charts]
Exemple #19
0
from flask import Flask, render_template, Response, redirect, session
from datastore import Datastore
from datetime import datetime
import wakeword as wa
import squat as sq
import cv2
import time

# Flaskサーバーの初期化
app = Flask(__name__)
# データストアの初期化
datastore = Datastore('data.json')
# ウェイクワード検知の初期化
wakeword_detector = wa.Detector('./ai_models/wakeword-detection.h5')
# 人体検知の初期化
squat_detector = sq.Detector('./ai_models/person_detection.tflite')
squat_detector.start()
# カウンターの作成
counter = sq.Counter()


# ルートパス (/) にアクセスがあれば実行する
@app.route('/')
def index():
    # 直近2週間分のスクワット記録を取得
    items = datastore.get_items(days=14)
    # クライアントに index.html を返す
    return render_template('index.html', items=items)


# /wakeword にアクセスがあれば実行する
Exemple #20
0
# See the License for the specific language governing permissions and
# limitations under the License.
#
import webapp2
import json
import pika
from datastore import Datastore
from gcloud import datastore

PROJECT = 'cloudcv-1302'
ENTITY_KIND = 'job'
QUEUE = 'queue'
AMQP_URL = ''
REQUEST_RECEIVED = 1

ds = Datastore()

def publish_msg(message):
    conn = pika.BlockingConnection(pika.URLParameters(AMQP_URL))
    channel = conn.channel()
    channel.queue_declare(queue=QUEUE, durable=True)
    channel.basic_publish(exchange='', routing_key=QUEUE, body=message, properties=pika.BasicProperties(delivery_mode=2))
    conn.close()

def format_entity(entity):
	data = json.loads(entity['data'])
	data['id'] = entity.key.id
	return json.dumps(data)

class GetHandler(webapp2.RequestHandler):
    def get(self, id):
Exemple #21
0
from task_worker import TaskWorker
from datastore import Datastore
from cloud_storage import CloudStorage
from cloudwatch import CloudWatch
import threading
import pika

ENTITY_KIND = 'job'
QUEUE = 'queue'
AMQP_URL = ''

DATASTORE = Datastore()
STORAGE = CloudStorage()
CLOUDWATCH = CloudWatch()


def publish_queue_length(channel):
    try:
        count = channel.queue_declare(queue=QUEUE,
                                      durable=True).method.message_count
        CLOUDWATCH.publish_queue_length(count)
    finally:
        threading.Timer(5, publish_queue_length, [channel]).start()


def callback(ch, method, properties, body):
    try:
        entity = DATASTORE.get(ENTITY_KIND, long(body))
        worker = TaskWorker(DATASTORE, STORAGE, entity)
        worker.start()
    finally:
Exemple #22
0
import csv
import pandas as pd
from flask import Flask, render_template, request, redirect, url_for,\
                  make_response, jsonify
import predictor
from werkzeug.contrib.cache import SimpleCache
from config_handler import ConfigHandler
from pathlib import Path
from datastore import Datastore, PickledDatastore

config = ConfigHandler()

if config.has('redis') and config.get('redis'):
    cache = Datastore()
    config = ConfigHandler(PickledDatastore(cache))
else:
    cache = SimpleCache()
    config = ConfigHandler(cache)

app = Flask(__name__)

data = {}
if config.has('input', 'ieee'):
    data['ieee'] = pd.read_csv(config.get('input', 'ieee'))

if config.has('input', 'acm'):
    acm_data = pd.read_csv(config.get('input', 'acm'))
    acm_data['url'] = acm_data['id'].apply(
        "https://dl.acm.org/citation.cfm?id={}&preflayout=flat#abstract".format
    )
    acm_data['title'] = acm_data['title'].fillna(acm_data['booktitle'])
Exemple #23
0
    except ModuleNotFoundError as e:
        log.fatal(f"Can't initialize picker {pickerType}: {e}")
        raise SystemExit(1)
    except AttributeError as e:
        log.fatal(f'Wrong picker initializing: {e}')
        raise SystemExit(1)
    except Exception as e:
        log.fatal(f'Fail initialize picker: {e}')
        raise SystemExit(1)


if __name__ != "__main__":
    log.debug("Starting main thread")

    selfControl = SelfControl()
    ds = Datastore(f'{homeDir}lootnika_tasks_journal.db')

    sphinxbuilder.check_rst(ds)

    from scheduler import Scheduler, first_start_calc
    startTime, taskCycles, repeatMin = first_start_calc(cfg['schedule'])

    # Scheduler и Picker должны видеть друг друга
    scheduler = Scheduler(cfg['schedule']['tasks'], taskCycles, repeatMin,
                          startTime)
    Picker = load_picker()
    Thread(name='Scheduler', target=scheduler.run, args=(Picker, )).start()

    import restserv
    Thread(
        name='RestServer',
Exemple #24
0
"""

import yaml

from flask import Flask, jsonify, make_response, request, url_for, abort
import flask_httpauth
from flask_httpauth import HTTPBasicAuth
from datastore import Datastore

API_SERVER_VERSION = "0.0.1"
LISTEN_PORT = 5000

api_app = Flask(__name__)  # pylint disable=invalid-name
http_auth = HTTPBasicAuth()  # pylint disable=invalid-name
app_users = {}  # pylint disable=invalid-name
db = Datastore()  # pylint disable=invalid-name

#############################################################################
# Error handlers
#############################################################################


@api_app.errorhandler(404)
def not_found(error):
    """Error handler for 404 (pastie not found) errors."""
    return make_response(jsonify({
        'error': 'Not Found',
        'details': error
    }), 404)

Exemple #25
0
from flask import Flask, render_template, request, jsonify
from datastore import Datastore
from validation import Validator
from features import FareCalculator, NearestBus

# Initialize datastore
datastore = Datastore("busdata.db")
datastore.init_all()

# Initialize app features
nearestbus = NearestBus(datastore)
farecalculator = FareCalculator(datastore)

# Initialize validation
validator = Validator()

app = Flask(__name__)


@app.route("/", methods=["GET", "POST"])
def main():
    return render_template("index.html")


@app.route("/nearestbusstop", methods=["GET", "POST"])
def nearestbusstop():
    """Find the nearest bus stop from a given coordinate
    Coordinate can be retrieved automatically or
    manually entered

    NOTE: Rendering a external HTML file is a workaround