def parse_station(station, start_date, end_date):
    with DBManager() as db:
        stations_timetables = db.plan_of_station(station,
                                                 date1=start_date,
                                                 date2=end_date)
        parsed = parse_timetable(stations_timetables, db)

    if parsed:
        parsed = pd.DataFrame(parsed, columns=empty_rtd.keys())
        parsed = parsed.set_index('hash_id')
        # Remove duplicates. Duplicates may happen if a stop is shifted to the next hour due to delays.
        # It than reappears in the planned timetable of the next hour.
        parsed = parsed.loc[~parsed.index.duplicated(keep='last')]
        parsed['station'] = station
        parsed[['ar_dc', 'ar_hi', 'dp_dc',
                'dp_hi']] = parsed[['ar_dc', 'ar_hi', 'dp_dc', 'dp_hi']] == '1'
        parsed = add_distance(parsed)
        current_array_cols = [
            col for col in RtdArrays.__table__.columns.keys()
            if col in parsed.columns
        ]
        # There are many columns that contain arrays. These take up a lot of space and aren't
        # used after parsing, so we currently don't store them in the database
        # rtd_arrays_df = parsed.loc[:, current_array_cols]
        # rtd.upsert_arrays(rtd_arrays_df)
        rtd_df = parsed.drop(current_array_cols, axis=1)
        with DBManager() as db:
            db.upsert_rtd(rtd_df)

    return True
    def __init__(self,
                 symbol=None,
                 start_date=None,
                 end_date=None,
                 amount=None):
        self.symbol = symbol
        self.start_date = start_date
        self.end_date = end_date
        self.investment_amount = amount

        # überprüfen, ob daten am start_datum schon vorhanden waren
        db = DBManager(self.symbol)
        if db.get_closest_day(start_date).close == -1:
            raise DataNotInDataset

        self.set_name()

        # self.check_data_exists()

        self.ask_input()

        self.trans_db = TransactionsDBManager(self.symbol)

        self.simulate_strategy()
        self.print_roi()
Exemple #3
0
 def setUp(self):
     try:
         os.remove(self.FILENAME)
     except Exception as e:
         pass
     self._table = Table(self.FILENAME)
     self.db_manage = DBManager(self._table)
Exemple #4
0
    def __init__(self, args):
        """
        Class initialization
        """
        super(FleetCommanderDbusService, self).__init__()

        self.home_dir = os.path.expanduser('~')

        if 'state_dir' in args:
            self.state_dir = args['state_dir']
        else:
            # Set state dir to $HOME/.local/share/fleetcommander
            self.state_dir = os.path.join(self.home_dir,
                                          '.local/share/fleetcommander')

        if not os.path.exists(self.state_dir):
            os.makedirs(self.state_dir)

        self.database_path = os.path.join(self.state_dir, 'fleetcommander.db')

        self.args = args

        self.log_level = args['log_level'].lower()
        loglevel = getattr(logging, args['log_level'].upper())
        logging.basicConfig(level=loglevel, format=args['log_format'])

        self.default_profile_priority = args['default_profile_priority']

        # Load FreeIPA connector
        self.ipa = fcfreeipa.FreeIPAConnector()

        self.GOA_PROVIDERS_FILE = os.path.join(args['data_dir'],
                                               'fc-goa-providers.ini')

        # Initialize database
        self.db = DBManager(self.database_path)

        # Initialize change mergers
        self.changemergers = {
            'org.gnome.gsettings': mergers.GSettingsChangeMerger(),
            'org.libreoffice.registry': mergers.LibreOfficeChangeMerger(),
            'org.chromium.Policies': mergers.ChromiumChangeMerger(),
            'com.google.chrome.Policies': mergers.ChromiumChangeMerger(),
            'org.mozilla.firefox': mergers.FirefoxChangeMerger(),
            'org.freedesktop.NetworkManager':
            mergers.NetworkManagerChangeMerger(),
        }

        # Initialize SSH controller
        self.ssh = sshcontroller.SSHController()
        self.known_hosts_file = os.path.join(self.home_dir, '.ssh/known_hosts')

        # Timeout values
        self.tmp_session_destroy_timeout = float(
            args['tmp_session_destroy_timeout'])
        self.auto_quit_timeout = float(args['auto_quit_timeout'])
Exemple #5
0
    def __init__(self, database_path, profiles_dir):
        """
        Class initialization
        """
        self.PROFILES_DIR = profiles_dir
        self.INDEX_FILE = os.path.join(profiles_dir, 'index.json')
        self.APPLIES_FILE = os.path.join(profiles_dir, 'applies.json')

        # Setup database
        self.db = DBManager(database_path)
        self.profiles = self.db.profiles
Exemple #6
0
    def __init__(self, args):
        """
        Class initialization
        """
        super(FleetCommanderDbusService, self).__init__()

        if 'profiles_dir' not in args:
            args['profiles_dir'] = os.path.join(args['state_dir'], 'profiles')
            if not os.path.exists(args['profiles_dir']):
                os.mkdir(args['profiles_dir'])

        self.args = args
        self.state_dir = args['state_dir']

        self.log_level = args['log_level'].lower()
        loglevel = getattr(logging, args['log_level'].upper())
        logging.basicConfig(level=loglevel, format=args['log_format'])

        self.profiles = profiles.ProfileManager(args['database_path'],
                                                args['profiles_dir'])

        # Load previous missing profiles data for retrocompatibility
        self.profiles.load_missing_profiles_data()

        self.profiles_dir = args['profiles_dir']

        self.GOA_PROVIDERS_FILE = os.path.join(args['data_dir'],
                                               'fc-goa-providers.ini')

        # Initialize database
        self.db = DBManager(args['database_path'])

        # Initialize collectors
        self.collectors_by_name = {
            'org.gnome.gsettings':
            collectors.GSettingsCollector(self.db),
            'org.libreoffice.registry':
            collectors.LibreOfficeCollector(self.db),
            'org.freedesktop.NetworkManager':
            collectors.NetworkManagerCollector(self.db),
        }

        # Initialize SSH controller
        self.ssh = sshcontroller.SSHController()
        self.known_hosts_file = '/root/.ssh/known_hosts'

        self.webservice_host = args['webservice_host']
        self.webservice_port = int(args['webservice_port'])
        self.client_data_url = args['client_data_url']

        self.tmp_session_destroy_timeout = float(
            args['tmp_session_destroy_timeout'])
Exemple #7
0
def get_graph_data(symbol, table, dict):
    print(symbol, table)
    trans_db = TransactionsDBManager(symbol, table)
    db = DBManager(symbol)
    running_date = start_date
    while running_date < end_date:
        if running_date not in dict.keys():
            dict[running_date] = 0

        transaction = trans_db.get_latest_transaction(running_date)
        dict[running_date] += transaction.depotkonto + \
                                     transaction.count * decimal.Decimal(db.get_closest_raw_day(running_date).close)

        running_date += datetime.timedelta(days=1)
Exemple #8
0
def get_csv_from_feedback(csv_file_location):
    """
    get_csv_from_feedback: cleans the database, and dumps the result into a csv
    file

    Args:
        csv_file_location (string): location of the csv file to generate
    
    Returns: void
    """

    db_manager = DBManager(c.feedback_db)
    feedbacks = db_manager.get_all_emotion_feedbacks()
    db_manager.clear_tables()

    with open(csv_file_location, 'wb') as output:
        writer = csv.writer(output)
        writer.writerow(["headline"] + c.sentiment_lookup)

        for (headline, emotions) in feedbacks:
            writer.writerow([headline] + emotions)
Exemple #9
0
from flask import *
import os
import util
from database import DBManager

app = Flask(__name__)
database = DBManager()
host = '0.0.0.0'
port = int(os.environ['PORT']) if 'PORT' in os.environ else 5000
base_url = os.environ['APP_URL'].rstrip('/') if 'APP_URL' in os.environ else "%s:%d" % (host, port)
if 'OPTIMEET_DEBUG' in os.environ:
    debug = os.environ['OPTIMEET_DEBUG'] in [ "True", "true", "T", "t", "1" ]
else:
    debug = True

@app.route('/')
def index():
    return render_template('halloween.html')

@app.route('/about')
def about():
    return render_template('about.html')

@app.errorhandler(404)
def page_not_found(error):
    return render_template('page_not_found.html'), 404

@app.route('/create-event')
def create_event():
    if request.args:
        eventname = request.args.get('eventName')
Exemple #10
0
end_date = datetime.datetime.strptime(lines.pop(0), "%Y-%m-%d").date()
symbols = lines

invest_per_symbol = invest_amount / len(symbols)

tables_avg200 = []
tables_avg200_3 = []

values_BnH = {}
values_avg200 = {}
values_avg200_3 = {}

real_symbols = []

for symbol in symbols:
    db = DBManager(symbol)
    try:
        if not data_handler.scrape_and_save_raw_prices_to_db(db):
            data_handler.analyze_data_and_save_to_db(db)
        db.close()
    except scrape.SymbolNotFound:
        print("Symbol not Found - Skipping")
        continue

    try:
        buynhold = BuyAndHold(symbol=symbol,
                              start_date=start_date,
                              end_date=end_date,
                              amount=invest_per_symbol)
        avg200 = AVG200Strat(percents=1.0,
                             symbol=symbol,
Exemple #11
0
from flask import Flask, render_template
import requests
import dill
from HeadlineSentiment import SentimentAnalyzer
import json
import os
from database import DBManager
import constants as c

app = Flask(__name__)
db_manager = DBManager()

sentiment_lookup = ['anger', 'disgust', 'fear', 'joy', 'sadness', 'surprise']
headline_lookup = [
    'Angry', 'Disgusting', 'Fearful', 'Happy', 'Sad', 'Surprising'
]
key = open("google_news.key").read()[:-1]
api_endpt = "https://newsapi.org/v1/articles?apiKey={}&source=google-news".format(
    key)


def get_news():
    response_dict = {}
    r = requests.get(api_endpt, data={"source": "google-news", "apiKey": key})
    r = json.loads(r.text)
    sentiment_tally = [0, 0, 0, 0, 0, 0]
    h_data = []
    for article in r['articles']:
        headline = article['title']
        data = {'headline': headline}
        ranked_list = sorted(analyzer.predict_all(headline),