def build_database(idl_files,
                   database_dir,
                   feature_defines=None,
                   logging_level=logging.WARNING,
                   examine_idls=False):
    """This code reconstructs the FremontCut IDL database from W3C,
  WebKit and Dart IDL files."""
    current_dir = os.path.dirname(__file__)
    logging.config.fileConfig(os.path.join(current_dir, "logging.conf"))

    _logger.setLevel(logging_level)

    db = database.Database(database_dir)

    # Delete all existing IDLs in the DB.
    db.Delete()

    builder = databasebuilder.DatabaseBuilder(db)

    # TODO(vsm): Move this to a README.
    # This is the Dart SVN revision.
    webkit_revision = '1060'

    # TODO(vsm): Reconcile what is exposed here and inside WebKit code
    # generation.  We need to recheck this periodically for now.
    webkit_defines = ['LANGUAGE_DART', 'LANGUAGE_JAVASCRIPT']

    if feature_defines is None:
        feature_defines = FEATURE_DEFINES

    webkit_options = databasebuilder.DatabaseBuilderOptions(
        # TODO(vsm): What else should we define as on when processing IDL?
        idl_defines=webkit_defines + feature_defines,
        source='WebKit',
        source_attributes={'revision': webkit_revision},
        logging_level=logging_level)

    # Import WebKit IDLs.
    builder.import_idl_files(idl_files, webkit_options, False)

    # Import Dart idl:
    dart_options = databasebuilder.DatabaseBuilderOptions(
        source='Dart',
        rename_operation_arguments_on_merge=True,
        logging_level=logging_level)

    utilities.KNOWN_COMPONENTS = frozenset(['core', 'modules', 'dart'])

    builder.import_idl_files(
        [os.path.join(current_dir, '..', 'idl', 'dart', 'dart.idl')],
        dart_options, True)

    start_time = time.time()

    # Merging:
    builder.merge_imported_interfaces()

    builder.fetch_constructor_data(webkit_options)
    builder.fix_displacements('WebKit')

    # Cleanup:
    builder.normalize_annotations(['WebKit', 'Dart'])

    # Map any IDL defined dictionaries to Dictionary.
    builder.map_dictionaries()

    # Examine all IDL and produce a diagnoses of areas (e.g., list dictionaries
    # declared and usage, etc.)
    if examine_idls:
        builder.examine_database()

    conditionals_met = set('ENABLE_' + conditional
                           for conditional in builder.conditionals_met)
    known_conditionals = set(FEATURE_DEFINES + FEATURE_DISABLED)

    unused_conditionals = known_conditionals - conditionals_met
    if unused_conditionals:
        _logger.warning('There are some unused conditionals %s' %
                        sorted(unused_conditionals))
        _logger.warning('Please update fremontcutbuilder.py')

    unknown_conditionals = conditionals_met - known_conditionals
    if unknown_conditionals:
        _logger.warning('There are some unknown conditionals %s' %
                        sorted(unknown_conditionals))
        _logger.warning('Please update fremontcutbuilder.py')

    print 'Merging interfaces %s seconds' % round(time.time() - start_time, 2)

    return db
Esempio n. 2
0
from PIL import Image
import sys
import time
from io import BytesIO

from imageparser import *
from chequeparser import *

import database
import datetime

db = database.Database('cheque - Copy.db')

a = time.perf_counter()
products = parse_cheque("C:\\Program Files\\Tesseract-OCR\\Tesseract.exe",
                        'testcheque.png')
b = time.perf_counter()
print("Parsing, time = " + str(b - a) + " sec")

a = time.perf_counter()
db.insert_purchases("Petr", products)
b = time.perf_counter()
print("Adding, time = " + str(b - a) + " sec")

a = time.perf_counter()
print(
    db.report_str('Petr', datetime.date(2020, 11, 30),
                  datetime.date(2020, 12, 14)))
b = time.perf_counter()
print("Reporting, time = " + str(b - a) + " sec")
Esempio n. 3
0
from crw import \
    DATABASE_HOST, DATABASE_PORT, DATABASE_NAME, DATABASE_USER, DATABASE_PASS
import database

if __name__ == '__main__':
    # Setup the user database (the users table in the database)
    db = database.Database(DATABASE_HOST, DATABASE_PORT, DATABASE_NAME,
                           DATABASE_USER, DATABASE_PASS)
    db.init_database()
    db.close_database_connection()
Esempio n. 4
0
from flask_lib import FlaskLib
import database
import time
import os
import urllib.request
import math
import csv
import io


backend = FlaskLib()

if os.environ.get('FRESHMART_SONU_ENV') == 'mohit':
	db = database.Database(dbname='postgres', user="******", password="")
else:
	db = database.Database(dbname='postgres', user="******", password="******")


@backend.api('/api/multi')
def multi(d):
	return d['x']*d['y']

########
def Is_login(session):
  if "login_key" in session and "id" in session["login_key"]:
    return True
  else:
    return False


def login_id(session):
Esempio n. 5
0
    if id == stud1:
        count1 = count1 + 1
    elif id == stud2:
        count2 = count2 + 1
    elif id == stud3:
        count3 = count3 + 1
    else:
        count4 = count4 + 1

    print("Enter 0 to exit")
    id = input()

print("--------Attendence is updated--------")
print("-------Details are-------")
database.Database("335", "sarbeshwar singh", "22", "2016", "cse", count1)
print("**************************")
database.Database("389", "sumit sharma", "22", "2016", "cse", count2)
print("**************************")
database.Database("324", "sahil dhiman", "11", "2016", "cse", count3)
print("**************************")
database.Database("344", "savar kaul", "22", "2016", "cse", count4)


#database linking
def create_table():
    c.execute(
        " CREATE TABLE IF NOT EXISTS attendence(Time time,id int ,name varchar(20),section int,batch int,course char(30),attendence int)"
    )

Esempio n. 6
0
    def __init__(self, app):
        self.ui = plan_compute_dlg_ui.PlanComputeDlgUI()
        self.app = app
        self.db = database.Database()

        self.ui.dialog.connect('response', self.on_response)
Esempio n. 7
0
    def prepare(self):
        try:
            self.database = database.Database(settings.SWAPI_DATABASE)

        except error.ServerError as server_error:
            self.finish(server_error.to_json())
Esempio n. 8
0
File: app.py Progetto: lipi/zabla
def admin():
    db = database.Database()
    items = db.counters.all()
    table = AdminTable(items)
    return table.__html__()
Esempio n. 9
0
import reddit, database
import fetcher.collective, fetcher.eternal, fetcher.mtg, fetcher.ygo, fetcher.hs
import discord, requests
from PIL import Image
from discord.ext import commands
import re, os, io

# global variables
bot = commands.Bot(command_prefix="!")
collective_sub = reddit.CollectiveSub()

# database connections
try:
    db = database.Database(os.environ.get("DATABASE_URL"))
    new_command_table = database.TableWrapper(db, "new_command", "name",
                                              "content")
    memes_table = database.TableWrapper(db, "memes", "name", "content")
    admins_table = database.TableWrapper(db, "admins", "user_id", "privileges")
except:
    print("db off")

# This is the fetcher dict. when a search modifier is specified,
# the bot looks here for the right fetcher to use.
# if you are extending this bot, add your fetcher through here.
# a fetcher class must have a __getitem__ method that returns a string
# on success and KeyError on failure.
# if you want to override the default search, override the value of the key "none".

card_fetchers = {
    "none": fetcher.collective.CollectiveFetcher(),
    "tk": fetcher.collective.CollectiveTokenFetcher(),
Esempio n. 10
0
	inbox.flush()
	db.flush()
	
	
#Check and Connect to WiFi
if wifi.is_connected():
	pass
else:
	wifi.connect()

#Init GFX and Buttons
ugfx.init()
buttons.init()

#Setup Databases
db = database.Database()
inbox = database.Database(filename='inbox.json')

#Server Address
server = 'badge.emf.camp'

#Get CPU ID
id = str(ubinascii.hexlify(pyb.unique_id()), 'ascii')

#Get MSISDN
mynumber = None
myNumber()

# Setup Sequence ID
if db.get('msgseq') == None:
	db.set('msgseq', 0)
Esempio n. 11
0
File: app.py Progetto: lipi/zabla
def zabla():
    db = database.Database()
    items = db.counters.all()
    table = CounterTable(items)
    return table.__html__()
Esempio n. 12
0
def build_database(idl_files, database_dir, feature_defines = None):
  """This code reconstructs the FremontCut IDL database from W3C,
  WebKit and Dart IDL files."""
  current_dir = os.path.dirname(__file__)
  logging.config.fileConfig(os.path.join(current_dir, "logging.conf"))

  db = database.Database(database_dir)

  # Delete all existing IDLs in the DB.
  db.Delete()

  builder = databasebuilder.DatabaseBuilder(db)

  # TODO(vsm): Move this to a README.
  # This is the Dart SVN revision.
  webkit_revision = '1060'

  # TODO(vsm): Reconcile what is exposed here and inside WebKit code
  # generation.  We need to recheck this periodically for now.
  webkit_defines = [ 'LANGUAGE_DART', 'LANGUAGE_JAVASCRIPT' ]
  if feature_defines is None:
      feature_defines = DEFAULT_FEATURE_DEFINES

  webkit_options = databasebuilder.DatabaseBuilderOptions(
      idl_syntax=idlparser.WEBKIT_SYNTAX,
      # TODO(vsm): What else should we define as on when processing IDL?
      idl_defines=webkit_defines + feature_defines,
      source='WebKit',
      source_attributes={'revision': webkit_revision},
      type_rename_map={
        'BarInfo': 'BarProp',
        'DedicatedWorkerContext': 'DedicatedWorkerGlobalScope',
        'DOMApplicationCache': 'ApplicationCache',
        'DOMCoreException': 'DOMException',
        'DOMFormData': 'FormData',
        'DOMSelection': 'Selection',
        'DOMWindow': 'Window',
        'SharedWorkerContext': 'SharedWorkerGlobalScope',
        'WorkerContext': 'WorkerGlobalScope',
      })

  optional_argument_whitelist = [
      ('CSSStyleDeclaration', 'setProperty', 'priority'),
      ]

  # Import WebKit IDLs.
  for file_name in idl_files:
    builder.import_idl_file(file_name, webkit_options)

  # Import Dart idl:
  dart_options = databasebuilder.DatabaseBuilderOptions(
    idl_syntax=idlparser.FREMONTCUT_SYNTAX,
    source='Dart',
    rename_operation_arguments_on_merge=True)

  builder.import_idl_file(
      os.path.join(current_dir, '..', 'idl', 'dart', 'dart.idl'),
      dart_options)

  # Merging:
  builder.merge_imported_interfaces(optional_argument_whitelist)

  builder.fetch_constructor_data(webkit_options)
  builder.fix_displacements('WebKit')

  # Cleanup:
  builder.normalize_annotations(['WebKit', 'Dart'])

  db.Save()
Esempio n. 13
0
import sys, database

for arg in range(1, len(sys.argv)):

    # Read in file name from cmd line
    input_file = sys.argv[arg]
    with open(input_file, "r") as read_file:

        transaction_count = 0  # Used to know whether inside of a transaction
        records = []  # Transaction Blocks
        prev = [
        ]  # Keeps track of prev variable to maintain ROLLBACK consistency
        my_db = database.Database()  # Database

        # The following will Parse and Execute Commands
        for line in read_file:

            # Separate Command Arguments into a list
            # and assign to simpler variables for Readability

            arguments = line.split()
            length = len(arguments)
            COMMAND = arguments[0]
            if length == 2:
                name = arguments[1]
            if length == 3:
                name = arguments[1]
                value = arguments[2]

            # The Interface for DataBase Commands
Esempio n. 14
0
from mako.template import Template
from mako.lookup import TemplateLookup
import os, os.path
import sys
current_dir = os.path.dirname(os.path.abspath(__file__))

lookup = TemplateLookup(directories=['html'])

datafilename = 'data.pkl'
#ser = {}
pour_serial_obj = pour_serial()

try:
  database = pickle.load(open(datafilename, 'r'))
except IOError:
  database = db.Database()

def save_data():
  global database
  pickle.dump(database, open(datafilename, 'w'))
  dbase = pickle.load(open(datafilename, 'r'))

class Server(object):

  @cherrypy.expose()
  def index(self):
    tmpl = lookup.get_template('header.html')
    return tmpl.render()


class Pour(object):
Esempio n. 15
0
def make_app():
    settings = {
        'debug': True,
        'cookie_secret': config.settings["authCookie"]["secret"],
        'google_oauth': {
            'key': config.settings["oauth2"]["clientId"],
            'secret': config.settings["oauth2"]["clientSecret"]
        },
        'login_url': '/auth/login',
        'static_path': os.path.join(PATH, 'static'),
        'template_path': os.path.join(PATH, 'templates'),
        'ui_modules': uimodules
    }

    return tornado.web.Application([
        (r"/", MainHandler),
        (r"/auth/login", authentication.GoogleOAuth2LoginHandler),
        (r"/auth/logout", authentication.LogoutHandler),
        (r"/secure", ExampleAuthRequiredHandler),
        (r"/perms/(.*)", PermHandler),
    ], **settings)


if __name__ == "__main__":
    config.init()
    db = database.Database('makermon.db', database.db_schema)
    app = make_app()
    app.listen(8888)
    print("Makermon server started...")
    tornado.ioloop.IOLoop.current().start()
Esempio n. 16
0
import os
import bcrypt
from flask import (Flask, render_template, request, url_for, session, redirect,
                   flash, jsonify)

import database

app = Flask(__name__)
app.config['SECRET_KEY'] = os.urandom(32)

db = database.Database('store.db')


@app.route('/')
def index():
    return render_template('index.html')


@app.route('/checkuname', methods=['POST'])
def checkuname():
    uname = request.json['uname']
    if db.get_user(uname): response = False
    else: response = True
    return jsonify({'availability': response})


@app.route('/signin', methods=['POST'])
def signin():
    uname = request.form.get('uname')
    passwd = request.form.get('passwd')
    entry = db.get_user(uname)
Esempio n. 17
0
 def userJoined(self, user, channel):
     nick, _, host = user.partition("!")
     self.cl.log_chan(nick, channel, "joined-channel")
     db = database.Database(config.sqlite_path)
     if db.store_user_login(nick):
         db.enqueue_msg(nick, WELCOME_MSG)
Esempio n. 18
0
import tkinter as tk
from api_functions import CoinsClass
from api_functions import TradesClass
from api_functions import GraphsData
import tkinter.font as tk_fonts
import database
import extra_functions
import ctypes
import matplotlib.pyplot as plt

# The page where our wallets will be created
coin_class = CoinsClass()
trades_class = TradesClass()
plot_data = GraphsData()
mydatabase = database.Database()

class Wallets(tk.Frame):

    def __init__(self, parent, controller):

        b_g = "#e0ebeb"
        f_g = "gray31"
        families = ["Courier", "Comic Sans MS", "Arial Black", "Verdana", "Yu Gothic UI"]
        d_font = tk_fonts.Font(family=families[4], size=10, weight="bold")

        tk.Frame.__init__(self, parent, bg=b_g)
        self.controller = controller

        self.options_frame = tk.Frame(self, bg="white")
        self.options_frame.grid(column=0, columnspan=2, row=0, pady=(10, 5))
Esempio n. 19
0
def cleanup():
    query = "UPDATE GBNCC SET ProcessingStatus='u',ProcessingSite=NULL "\
            "WHERE ProcessingStatus='d' AND ProcessingSite='%s'"%config.machine
    db = database.Database("observations")
    db.execute(query)
    db.close()
Esempio n. 20
0
def worldCrawling():
    doc = requests.get("https://www.worldometers.info/coronavirus/")
    soup = BeautifulSoup(doc.text, "html.parser")
    data = soup.select("#main_table_countries_today > tbody > tr")

    worldData = []

    for datum in data:
        country = datum.find_all("td")[0].text.strip()
        totalCases = datum.find_all("td")[1].text.strip()
        newCases = datum.find_all("td")[2].text.strip()
        totalDeaths = datum.find_all("td")[3].text.strip()
        newDeaths = datum.find_all("td")[4].text.strip()
        totalRecovered = datum.find_all("td")[5].text.strip()

        if len(country) == 0:
            continue

        if country == "World" or country == "Total:":
            continue

        if len(totalCases) == 0:
            totalCases += '0'

        if len(totalDeaths) == 0:
            totalDeaths += '0'

        if len(totalRecovered) == 0:
            totalRecovered += '0'

        if len(newCases) == 0:
            newCases += "+0"

        if len(newDeaths) == 0:
            newDeaths += "+0"

        if country == "Europe" or country == "North America" or country == "Asia" or country == "South America" or country == "Africa" or country == "Oceania":
            continue

        worldData.append([country, totalCases + '\n' + newCases, totalDeaths + '\n' + newDeaths, totalRecovered])

    db = database.Database()
    db.connectDb()
    db.inIt()

    '''
    data[0] = country
    data[1] = totalCases + '\n' + newCases
    data[2] = totalDeaths + '\n' + newDeaths
    data[3] = totalRecovered
    '''

    for data in worldData:
        db.insertDb(data[0], data[1], data[2], data[3])

    db.closeDb()

    timeDb = timeDatabase.TimeDatabase()
    timeDb.connectDb()
    timeDb.inIt()

    # 실시간 정보
    dayInfo = soup.select("div.content-inner")
    dayData = dayInfo[0].find_all("div")[1].text.strip().split(' ')

    year = dayData[4][:4]
    month = monthTrans(dayData[2])
    day = dayData[3][:2]
    time = dayData[5]

    '''
    2020 04.29 10:35
    '''

    timeDb.insertDb(year, month, day, time)
    timeDb.closeDb()
Esempio n. 21
0
from flask import Flask, render_template
from flask_socketio import SocketIO, emit
from flask_cors import CORS, cross_origin
import database as db
import hashlib

app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
app.config['CORS_HEADERS'] = 'Content-Type'
cors = CORS(app, resources={r"/*": {"origins": "*"}})
socketio = SocketIO(app)

db = db.Database()
db.setup()


# # we need this or else it shuts down post requests
@app.after_request
def add_header(response):
    # response.headers['Access-Control-Allow-Origin'] = '*'
    # response.headers['Access-Control-Allow-Headers'] = '*'
    return response


@app.route('/')
def index():
    return render_template('index.html', flask_token="Hello world")


@socketio.on("FromFrontend")
def handcle_message(data):
Esempio n. 22
0
import os
from sys import argv
import database
import utilities
import micr
import imageExtractor
import handWritingRecognition
import signatureVerification

# Current Working Directory
currentWorkingDir = os.path.dirname(os.getcwd())
currentWorkingDir = os.path.join(currentWorkingDir, "cheque-verification")

# Connecting to Database
db = database.Database(currentWorkingDir)

# Receiver Account Number
receiverAccountNumber = int(argv[2])

# Image Path
imageDir = currentWorkingDir + "/cheque_images"
imagePath = os.path.join(imageDir, argv[1])

# MICR Object
micrCode = micr.MICR(imagePath)

# Extracted Micr from Cheque
micrString = micrCode.extractMICR()

# Micr Id (middle part of Micr)
micrId = micrString.split(" ")
Esempio n. 23
0
    def emptyline(self):
        pass

    def precmd(self, line):
        self.time_before = time.time()
        return line

    def postcmd(self, stop, line):
        logger.info('Execution time was {}'.format(
            round(time.time() - self.time_before, 2)))
        return stop

if __name__ == '__main__':
    colorama.init()
    config = ConfigParser()
    if os.path.exists('yampconfig'):
        config.read('yampconfig')
        path = config['DefaultDirectory']['path']
    else:
        path = get_path_from_user()
        config['DefaultDirectory'] = {'path': path}
        with open('yampconfig', 'w') as file:
            config.write(file)
    verify_dir(path)
    db = database.Database(path)
    logger.info('Started')
    readline.set_completer_delims(' \t\n;')
    YampShell(completekey='tab').cmdloop(
        'This is yet another media player.\nUse help or help <command> to get help. '
    )
Esempio n. 24
0
    np.random.seed(100)

    # read config file
    config = ec.ExperimentConfig(args.config)
    output_dir = args.output_dir
    #chunk = db.Chunk(config)

    # make output directory
    """
    try:
        os.makedirs(dest)
    except os.error:
        pass
    """

    # loop through objects, labelling each
    database = db.Database(config)
    keys = []
    logging.info('Reading datset %s' % (database.datasets[0].name))

    obj = database.datasets[0][0]  #['xbox_0120']
    logging.info('Labelling object {}'.format(obj.key))
    label_pfc(obj, database.datasets[0], output_dir, config)
    """
    obj = chunk['dove_beauty_bar']
#    for obj in chunk:
    logging.info('Labelling object {}'.format(obj.key))
    label_pfc(obj, chunk, config)
    """
 def test_get_user_model(self):
     db = database.Database(config.DB_HOST, config.DB_NAME, config.DB_USER,
                            config.DB_PASSWORD)
     user = models.User.create_from_database(db, 1)
Esempio n. 26
0
def save_player(player2save):
    db = database.Database("pokemon.db")

    db.update_player(player2save)
    db.update_party(player2save)
    db.update_bag(player2save)
Esempio n. 27
0
        for i in range(22):
            print(f.stockfish.stdout.readline())


if __name__ == '__main__':
    moves = """1. d4 Nf6 2. d5 c6 3. c4 cxd5 4. Qc2 e6 5. c5 Na6 6. Nf3 Bxc5 7. e3 Qa5+ 8. Nfd2 O-O 9. Bd3 g5 10. O-O b5 
    11. Nb3 Qa4 12. Nc3 Qg4 13. Nxb5 Nb4 14. Qxc5 Nxd3 15.
Qc2 Nxc1 16. Rfxc1 Ba6 17. Nc7 Bb7 18. Nxa8 Bxa8 19. Nc5 Rc8 20. b4 Ne4 21. f3
Qf5 22. fxe4 dxe4 23. Qd2 a5 24. Qxd7 Rc6 25. Qd8+ Kg7 26. Qxa8 Rxc5 27. bxc5 g4
28. c6 Qe5 29. c7 Qxa1 30. Rxa1 1-0"""
    moves2 = """1. d4 Nf6"""
    settings = {"database": {"db_dir": "database"}, "user": "******"}
    b = board.Board()
    f = Integration()
    f.set_depth(10)
    db = database.Database(settings)
    for game in db.fetch_all():
        move = game[13].split("200.")
        #print(move)
        #if "1. Nf3 Nc6 2. c4 Nf6 3. d4 d6 4. Bd2 Bg4 5. g3 Bxf3 6. exf3 g6 7. f4 Bg7 8. Bg2 Qc8 9. O-O Nxd4 10. Qa4+ Nd7" in move[0]:
        if "1. Nf3 Nc6 2. c4 Nf6 3. d4 d6 4. Bd2 Bg4 5. g3 Bxf3 6. exf3 g6" in move[
                0]:

            b.to_fen(move_string="1. Nf3 Nc6 2. c4 Nf6 3. d4")
            print(b)
            print(b.fen)
            f = copy.deepcopy(b)
            b.to_fen(move_string="d6 4. Bd2")
            print(b)
            print(b.fen)
            print(f)
Esempio n. 28
0
import random
import database

print "Connecting to common DB"
db = database.Database('common')
print "Connection established"
print "conn:", db.conn
print "cursor:", db.cursor

query = "SELECT COUNT(*) " \
        "FROM headers " \
        "WHERE source_name LIKE 'G%'"
print "\nPerforming SELECT query:", query
db.execute(query)
print db.cursor.fetchone()[0]

randint = random.randint(0, 1000)
randfloat = randint / 1000.0
teststring = "This is a test"
query = "INSERT INTO Test_table (" \
             "test_int, " \
             "test_float, " \
             "test_string) " \
        "VALUES (%d, %f, '%s') " % \
            (randint, randfloat, teststring)
print "\nInserting to test table:", query
db.execute(query)

query = "SELECT test_id, test_int, test_float, test_string " \
        "FROM Test_table " \
        "WHERE test_id=@@IDENTITY"
Esempio n. 29
0
def generateLink(sqlData):
    res = dict()
    res['user_id'] = sqlData[0]
    res['group_id'] = sqlData[1]
    return res


# if we've been asked to display a group, get that group.
data = cgi.FieldStorage()
if 'group' in data:
    selectedGroup = int(data['group'].value)
else:
    selectedGroup = 0

db = database.Database()
groups = db.LoadFromDatabase("groups", generateGroup)
group_index = dict()
for group in groups:
    group_index[group['id']] = group

people = participant.LoadAll(db)
people_index = dict()
for person in people:
    people_index[person._id] = person

user_group_link = db.LoadFromDatabase("user_group_join", generateLink)

if selectedGroup is not 0:
    listOfPeople = []
    for join in user_group_link:
Esempio n. 30
0
def main():
    t0 = time()
    # Read commandline options
    conf_parser = argparse.ArgumentParser(
        description='Collection of utilities' + ' to manage TLEs')
    conf_parser.add_argument(
        "-c",
        "--conf_file",
        help="Specify configuration file. [Default configuration.ini]",
        dest='conf_file',
        nargs='?',
        const=1,
        default='configuration.ini',
        type=str,
        metavar="FILE")
    conf_parser.add_argument("-f",
                             "--tle",
                             help="Specify TLE file. [Default bulk.tle]",
                             dest='tle_file',
                             nargs='?',
                             type=str,
                             metavar="FILE")
    conf_parser.add_argument("--tlepath",
                             help="Specify TLE path. [Default ./tle]",
                             dest='tle_path',
                             nargs='?',
                             type=str,
                             metavar="PATH")
    conf_parser.add_argument("--update",
                             help="update TLEs from online sources",
                             action="store_true")
    conf_parser.add_argument("-dbname",
                             "--database",
                             help="database to USE",
                             dest='dbname',
                             default='opensatcat_dev',
                             nargs='?',
                             const=1,
                             type=str,
                             metavar="NAME")
    conf_parser.add_argument("-H",
                             "--hostname",
                             help="database hostname",
                             dest='dbhostname',
                             default='db.consensys.space',
                             nargs='?',
                             const=1,
                             type=str,
                             metavar="HOSTNAME")
    conf_parser.add_argument("-u",
                             "--user",
                             help="database user name",
                             dest='dbusername',
                             nargs='?',
                             type=str,
                             metavar="USER")
    conf_parser.add_argument("-p",
                             "--password",
                             help="database user password",
                             dest='dbpassword',
                             nargs='?',
                             type=str,
                             metavar="PASSWD")
    conf_parser.add_argument("-t",
                             "--dbtype",
                             help="database type [INFILE, sqlserver, sqlite] \
                                   default: INFILE",
                             dest='dbtype',
                             nargs='?',
                             choices=['INFILE', 'sqlserver', 'sqlite'],
                             default='INFILE',
                             type=str,
                             metavar="TYPE")
    conf_parser.add_argument("-i",
                             "--import",
                             help="Import TLEs to database",
                             dest='importTLE',
                             action="store_true")
    conf_parser.add_argument("-q",
                             "--quiet",
                             help="Suppress console output",
                             dest='quiet',
                             action="store_true")
    conf_parser.add_argument(
        "-V",
        "--verbose",
        help=
        "increase verbosity: 0 = only warnings, 1 = info, 2 = debug. No number means info. Default is no verbosity.",
        const=1,
        default=0,
        type=int,
        nargs="?")

    # Command to upload McCants files from DIR
    # with defaults provided from login.txt:
    # python db_import_tles.py --import --tlepath mccants_archive/elsets2019/new -V

    # Process commandline options and parse configuration
    cfg = configparser.ConfigParser(inline_comment_prefixes=('#', ';'))
    args = conf_parser.parse_args()
    log = logging.getLogger()

    # make it print to the console.
    console = logging.StreamHandler()
    log.addHandler(console)

    conf_file = args.conf_file
    tle_file = args.tle_file
    tle_path = args.tle_path
    update = args.update
    dbname = args.dbname
    dbhostname = args.dbhostname
    dbusername = args.dbusername
    dbpassword = args.dbpassword
    dbtype = args.dbtype
    importTLE = args.importTLE
    verbose = args.verbose
    quiet = args.quiet

    if (quiet == False):
        if verbose == 0:
            log.setLevel(logging.WARN)
        elif verbose == 1:
            log.setLevel(logging.INFO)
        elif verbose == 2:
            log.setLevel(logging.DEBUG)
        log.debug("Log level set to {}".format(log.level))

    if verbose:
        for arg in vars(args):
            log.debug("%s : %s", arg, getattr(args, arg))

    cfg.read([args.conf_file])
    log.info("Reading config from: {}".format(args.conf_file))

    if not (tle_path):
        try:
            tle_path = cfg.get('Common', 'tle_path')
        except KeyError:
            tle_path = "./"

    if not (tle_file):
        tle_file = os.path.join(tle_path, "bulk.tle")
    else:
        tle_file = os.path.join(tle_path, tle_file)

    if update:
        update_from_online(tle_path)

    if (importTLE):
        # Temporary database credentials hack
        try:
            with open('../../login.txt', 'r') as f:
                lines = f.readlines()
                dbname = lines[0].strip()
                dbtype = lines[1].strip()
                dbhostname = lines[2].strip()
                dbusername = lines[3].strip()
                dbpassword = lines[4].strip()
        except:
            log.error("DB Login credentials not available.")

        if (dbtype == "sqlserver"):
            if dbusername == None:
                try:
                    dbusername = input("Username: "******"ERROR: password must be specified {}".format(error))
            if dbpassword == None:
                try:
                    dbpassword = getpass()
                except Exception as error:
                    log.warning(
                        "ERROR: password must be specified {}".format(error))

        # Set up database connection or files
        db = database.Database(dbname, dbtype, dbhostname, dbusername,
                               dbpassword)
        # TODO: Probably need an error check to ensure this was set up correctly
        if (dbtype != "INFILE"):
            try:
                db.createTLETables()
            except:
                log.warning(
                    "Tables already exist or there is a big problem buddy.")

    # Main processing loop
    t1 = time()
    log.debug(t1 - t0)

    # Initialize variables we want fresh for the processing loop
    existing_files = {
    }  # A rolling list of users that have already had addresses assigned
    TLETotalCount = 0
    runningFileCount = 0
    runningUserCount = 0

    # Traverse the directory
    # FIXME: Make this deal gracefully with a non-existent directory
    totalFileCount = sum(
        [len(fileList) for dirName, subdirList, fileList in os.walk(tle_path)])
    totalDirCount = sum(
        os.path.isdir(os.path.join(tle_path, i)) for i in os.listdir(tle_path))

    if (quiet == False):
        print("Processing {} files in {}...".format(totalFileCount, tle_path))

    for dirName, subdirList, fileList in os.walk(tle_path):
        subdirList.sort()
        # Go through individual files from the subdirectories
        dirfileTotal = len(fileList)
        dirfileCount = 0
        for fname in sorted(fileList):
            time_start = time()
            dirfileCount += 1
            if ("DS_Store" in fname):
                continue
            _file = os.path.join(dirName, fname)
            log.info("\nReading TLEs from {}".format(_file))
            TLEs = tle_util.TLEFile(_file)

            # In theory, we could md5 the file directly *before* reading the TLE, but it really doesn't save much.
            # And we'd still want to check in the class, so we'd be doubling-up the MD5 checks.
            tle_file_fingerprint_array = db.selectTLEFile(
                TLEs.tle_file_fingerprint)

            if (tle_file_fingerprint_array):
                log.warning(
                    "Skipping {} TLEs in file: {} - fingerprint {} already in database."
                    .format(len(TLEs.Satellites), fname,
                            TLEs.tle_file_fingerprint))
                continue  # Already have the file
            else:
                print("Processing file {}...".format(fname))
                for sat_num in TLEs.Satellites:
                    Sat = TLEs.Satellites[sat_num]

                    tle_fingerprint_array = db.selectTLEFingerprint(
                        Sat.tle_fingerprint)
                    if (tle_fingerprint_array):
                        log.warning(
                            "Skipping TLE in file: {} for sat {} - fingerprint {} already in database."
                            .format(fname, sat_num, Sat.tle_fingerprint))
                        continue  # Already have the TLE
                    else:
                        db.addTLE(Sat)

                TLETotalCount += len(TLEs.Satellites)

                # Make note of file if it contained valid TLEs
                if (len(TLEs.Satellites) > 0):
                    result = db.addTLEFile(TLEs)

                # Commit the writes after we're done with the file.
                # We might also want to do this every maximum number of elements (i.e. 1000)
                db.commit_TLE_db_writes()

            # Check to see if the file is in the DB or local file
            # But defer the database interaction until we have a need to post an observation
            # tle_file_fingerprint_array = db.selectTLEFILE(TLEs.tle_file_fingerprint)

            # if id_array:
            #     sender_id = id_array[0]
            # else:
            #     # Note that this is creating users who may have not submitted an observation
            #     acct = Account.create('password123')
            #     sender_id = db.addObserver(acct.address, sender, 0, first_line)
            #     log.debug("Creating account {} for Sender (ID)".format(acct.address,sender,sender_id))
            #     runningUserCount += 1
            # '''try:
            #     sender_id = existing_users[sender]
            # # If the user is new and does not have
            # except:
            #     acct = Account.create('password123')
            #     existing_users[sender] = acct.address'''
            # log.debug(" Sender (ID): {} ({})".format(sender,sender_id))

            # obsid = db.addParsedIOD(IOD_line, sender_id)
            # log.debug(" {} {} ({}) {}".format(obsid,sender,sender_id, IOD_line.line))

            # for sat_num in TLEs.Satellites:
            #     Sat = TLEs.Satellites[sat_num]

            # log.info("Imported {} TLE records.".format(len(TLEs.Satellites)))
            # log.info("Fingerprint of {} is {}".format(tle_file,TLEs.tle_file_fingerprint))

    t2 = time()
    if (not quiet):
        print(
            "Imported {} TLEs from {} files in {} directories in {:3f} seconds."
            .format(TLETotalCount, totalFileCount, totalDirCount, t2 - t1))