Exemplo n.º 1
0
def events():
    keyword = request.query.keyword
    return template(
        'events',
        events=get_events(keyword),
        keyword=keyword
    )
Exemplo n.º 2
0
def lstx(no_group, no_annotations, minimal):
    """List all transactions that have been derived from events and annotated."""
    events = get_events(loaders.all)
    transactions = get_transactions(events,
                                    XDG_CONFIG_HOME + "/mistbat/tx_match.yaml")
    if not no_annotations:
        transactions = annotate_transactions(
            transactions, XDG_CONFIG_HOME + "/mistbat/tx_annotations.yaml")
    transactions = fmv_transactions(transactions,
                                    XDG_DATA_HOME + "/mistbat/tx_fmv.yaml")
    transactions = imply_fees(transactions)

    if no_group:
        transactions = [
            tx for tx in transactions if getattr(tx, "groups", None) is None
        ]

    # Print transactions
    for tx in transactions:
        if minimal:
            print(tx)
        else:
            print(tx.description())

    print("--------------------")
    print("{} total transactions".format(len(transactions)))
    print_usd_exposure()
Exemplo n.º 3
0
def fees():
    events = get_events(loaders.all)
    transactions = get_transactions(events,
                                    XDG_CONFIG_HOME + "/mistbat/tx_match.yaml")
    transactions = fmv_transactions(transactions,
                                    XDG_DATA_HOME + "/mistbat/tx_fmv.yaml")
    transactions = imply_fees(transactions)

    print("\nFees Incurred")
    print("-------------")
    fees = {}
    for tx in transactions:
        fees[tx.__class__.__name__] = fees.get(tx.__class__.__name__,
                                               0) + tx.fee_usd
    for k, v in fees.items():
        print(f"{k}: USD {v:0.2f}")
    print("TOTAL: USD {:0.2f}\n".format(sum(fees.values())))

    print("\nFees Incurred (negative values ignored)")
    print("-----------------------------------------")
    fees = {}
    for tx in transactions:
        fees[tx.__class__.__name__] = fees.get(tx.__class__.__name__, 0) + max(
            tx.fee_usd, 0)
    for k, v in fees.items():
        print(f"{k}: USD {v:0.2f}")
    print("TOTAL: USD {:0.2f}\n".format(sum(fees.values())))
Exemplo n.º 4
0
def event_type():
    if session:
        type = int(request.form["type"])
        list = events.get_events(type)
        user_id=session["user_id"]
        return render_template("index.html", events=list, user_id=user_id, selected=type)
    else:
        return render_template("error.html", message="Et ole kirjautunut sisään")
Exemplo n.º 5
0
def index():
    if session:
        list = events.get_events(0)
        user_id=session["user_id"]
    else:
        list = []
        user_id = 0
    return render_template("index.html", events=list, user_id=user_id, selected=0)
Exemplo n.º 6
0
def lsev(remote_update):
    """List all events parsed from observations."""
    events = get_events(loaders.all, remote_update=remote_update)
    for ev in events:
        print(ev)

    print("--------------------")
    print("{} total events".format(len(events)))
    print_usd_exposure()
Exemplo n.º 7
0
 def index_legacy_live_events(self, args):
     espn_url = args.get(ESPN_URL)[0]
     chosen_sport = args.get(SPORT, None)
     if chosen_sport is not None:
         chosen_sport = chosen_sport[0]
     chosen_network = args.get(NETWORK_ID, None)
     if chosen_network is not None:
         chosen_network = chosen_network[0]
     live = 'action=live' in espn_url
     upcoming = 'action=upcoming' in espn_url
     replay = 'action=replay' in espn_url
     if live:
         data = events.get_events(espn_url)
     else:
         data = util.get_url_as_xml_soup_cache(espn_url).findall(".//event")
     num_espn3 = 0
     num_secplus = 0
     num_events = 0
     for event in data:
         sport = event.find('sportDisplayValue').text.encode('utf-8')
         if chosen_sport <> sport and chosen_sport is not None:
             continue
         networkid = event.find('networkId').text
         if chosen_network <> networkid and chosen_network is not None:
             continue
         if networkid == ESPN3_ID and chosen_network is None and live:
             num_espn3 = num_espn3 + 1
         elif networkid == SECPLUS_ID and chosen_network is None and live:
             num_secplus = num_secplus + 1
         else:
             num_events = num_events + 1
             self.index_event(event, live, upcoming, replay, chosen_sport)
     # Don't show ESPN3 folder if there are no premium events
     if num_events == 0:
         for event in data:
             sport = event.find('sportDisplayValue').text.encode('utf-8')
             if chosen_sport <> sport and chosen_sport is not None:
                 continue
             self.index_event(event, live, upcoming, replay, chosen_sport)
     # Dir for ESPN3/SECPlus
     elif chosen_network is None:
         if num_espn3 > 0:
             translation_number = 30191 if num_espn3 == 1 else 30190
             if selfAddon.getSetting('NoColors') == 'true':
                 name = translation(translation_number) % num_espn3
             else:
                 name = '[COLOR=FFCC0000]' + (translation(translation_number) % num_espn3) + '[/COLOR]'
             addDir(name, dict(ESPN_URL=espn_url, MODE=self.make_mode(LIVE_EVENTS_MODE), NETWORK_ID=ESPN3_ID),
                    defaultlive)
         if num_secplus > 0:
             translation_number = 30201 if num_espn3 == 1 else 30200
             if selfAddon.getSetting('NoColors') == 'true':
                 name = translation(translation_number) % num_secplus
             else:
                 name = '[COLOR=FFCC0000]' + (translation(translation_number) % num_secplus) + '[/COLOR]'
             addDir(name, dict(ESPN_URL=espn_url, MODE=self.make_mode(LIVE_EVENTS_MODE), NETWORK_ID=SECPLUS_ID),
                    defaultlive)
Exemplo n.º 8
0
def currentbasis(harvest):
    """See available basis by coin"""
    events = get_events(loaders.all)
    transactions = get_transactions(events,
                                    XDG_CONFIG_HOME + "/mistbat/tx_match.yaml")
    transactions = annotate_transactions(
        transactions, XDG_CONFIG_HOME + "/mistbat/tx_annotations.yaml")
    transactions = fmv_transactions(transactions,
                                    XDG_DATA_HOME + "/mistbat/tx_fmv.yaml")
    transactions = imply_fees(transactions)

    form_8949 = Form8949(transactions)
    print("\nAVAILABLE BASIS REPORT")
    print(
        "Note: Coin totals will slighly deviate from 'holdings' since SENDRECV fees do not impact basis.\n"
    )
    table_headings = [
        "Coin",
        "Date Acquired",
        "Amount",
        "Basis per Coin",
        "Total Basis",
    ]
    if harvest:
        table_headings.append("Cum. G/L at Spot Price")
        spot_prices = get_coin_spot_prices(
            set(form_8949.current_available_basis().keys()))
    table = PrettyTable(table_headings)

    for coin, available_basis in form_8949.current_available_basis().items():
        coin_usd_total = 0.00
        coin_amount_total = 0.00
        cumulative_gain_or_loss = 0.00
        for basis in available_basis:
            time = basis[0].strftime("%Y-%m-%d %H:%M:%S")
            amount = round(basis[1], 8)
            fmv = round(basis[2], 2)
            total = round(amount * fmv, 2)
            row = [coin, time, amount, fmv, total]
            if harvest:
                cumulative_gain_or_loss += (spot_prices[coin] *
                                            amount) - (fmv * amount)
                row.append(round(cumulative_gain_or_loss, 2))
            table.add_row(row)
            coin_usd_total += total
            coin_amount_total += amount
        row = [
            "", "TOTAL",
            round(coin_amount_total, 8), "",
            round(coin_usd_total, 2)
        ]
        if harvest:
            row.append("")
        table.add_row(row)
        table.add_row([" "] * len(table.field_names))
    print(table)
Exemplo n.º 9
0
 def post(self):
     request_info = request.json
     search_topic = request_info['keyword']
     search_lng = request_info['lng']
     search_lat = request_info['lat']
     events = get_events(search_topic, search_lat, search_lng)
     if events:
         return events
     else:
         abort(404, events)
     return jsonify(error="error while processing data")
Exemplo n.º 10
0
    def get(self):
        params = self.prepare_params()
        if not params:
            self.write(xxdumps({}, ensure_ascii=False))
            self.finish()
            return

        if self.version < 2:
            json_response = xxdumps({
                'error': 'Not implemented (current version API < 2)'
            })
            self.write(json_response, ensure_ascii=False)
            self.finish()
            return

        track_data = yield self.get_data(with_sensors=True,
                                            debug=self.application.debug)

        track = track_data.get('track', [])
        cars_sensors = track_data.get('cars_sensors')
        sensors_data = track_data.get('sensors_data')

        events = yield get_events(
            self.application.db, track, self.from_dt_utc,
            self.to_dt_utc, self.gps_code, strict=True
        )
        new_consumptions = {}
        if events:
            new_consumptions = count_fuel_consumptions(
                track, events['sensors']
            )

        # рассчитываем суммарное потребление
        new_consumptions_sum = None
        if new_consumptions:
            new_consumptions_sum = sum_consumption(new_consumptions)

        # в метрах
        distance = int(track_data.get('distance') or 0)
        distance_agg2 = int(track_data.get('distance_agg2') or 0)

        data = {
            'consumption': new_consumptions_sum,
            'distance': distance,
            'distance_agg2': distance_agg2,
            'track': track,
        }
        if self.without_track:
            tmp = data.pop('track', None)
        self.write(xxdumps(data, ensure_ascii=False))
        self.finish()
Exemplo n.º 11
0
def print_usd_exposure():
    """Calculate total amount of USD invested and not redeemed and total fees spent."""
    fiat_events = get_events(loaders.all, "FiatExchange")
    invested = round(sum(ev.sell_amount for ev in fiat_events if ev.investing),
                     2)
    redeemed = round(sum(ev.buy_amount for ev in fiat_events if ev.redeeming),
                     2)
    net_invested = round(invested - redeemed, 2)

    fees = round(sum(ev.fee_amount for ev in fiat_events), 2)
    print("USD Exposure: {} + {} fees (FIAT ONLY) = {:.2f}".format(
        net_invested, fees, net_invested + fees))
    print("Aggregate Fee %: {:.2f}%".format(fees * 100 /
                                            (invested - redeemed)))
Exemplo n.º 12
0
def tax(aggregated, year):
    """Generate the information needed for IRS Form 8949"""
    events = get_events(loaders.all)
    transactions = get_transactions(events,
                                    XDG_CONFIG_HOME + "/mistbat/tx_match.yaml")
    transactions = annotate_transactions(
        transactions, XDG_CONFIG_HOME + "/mistbat/tx_annotations.yaml")
    transactions = fmv_transactions(transactions,
                                    XDG_DATA_HOME + "/mistbat/tx_fmv.yaml")
    transactions = imply_fees(transactions)

    form_8949 = Form8949(transactions)

    print("SHORT-TERM CAPITAL GAINS")
    table = PrettyTable([
        "(a) Description",
        "(b) Date acquired",
        "(c) Date sold",
        "(d) Proceeds",
        "(e) Basis",
        "(h) Gain",
    ])
    total_gain = 0.00
    for line in form_8949.generate_form(term="short",
                                        aggregated=aggregated,
                                        year=year):
        table.add_row(line)
        if str(line[-1]).strip():
            total_gain += line[-1]
    print(table)
    print(f"TOTAL SHORT-TERM CAPITAL GAIN: USD {total_gain:0.2f}")

    print("\nLONG-TERM CAPITAL GAINS")
    table = PrettyTable([
        "(a) Description",
        "(b) Date acquired",
        "(c) Date sold",
        "(d) Proceeds",
        "(e) Basis",
        "(h) Gain",
    ])
    total_gain = 0.00
    for line in form_8949.generate_form(term="long",
                                        aggregated=aggregated,
                                        year=year):
        table.add_row(line)
        if str(line[-1]).strip():
            total_gain += line[-1]
    print(table)
    print(f"TOTAL LONG-TERM CAPITAL GAIN: USD {total_gain:0.2f}")
Exemplo n.º 13
0
def get_my_favorite_venues(request):
    # TODO check no session
    if MOCK_LOCATION == True:
        lat = '52.5029'
        lng = '13.447424'
    else:
        lat=request.session['lat']
        lng=request.session['lng']

    events = get_events(lat=lat, lng=lng)
    if not MOCK_FAVORITES:
        favorites = get_user_favorites(request.user)

        with open("favorites.json", "w") as f:
            json.dump(favorites, f)
    else:
        with open("favorites.json") as f:
            favorites = json.load(f)
    matching_events = match_user_events(favorites, events)
    return render_to_response('yourevents.html', {'events': matching_events})
Exemplo n.º 14
0
def eventos(update, context, q=5):
    """Get the events on meetup website and send on chat"""

    user_username = update.message.from_user['username']
    user_id = update.message.from_user['id']
    if q == 1:
        logger.info(f'Usuário: {user_id} {user_username} - /evento')
    else:
        logger.info(f'Usuário: {user_id} {user_username} - /eventos')

    events = get_events()
    qtd = len(events)

    if qtd == 0:
        update.message.reply_text('Não há nenhum evento registrado no Meetup!')
    else:
        update.message.reply_text('Próximos eventos:\n')
        for i in range(qtd):
            event = events[i]

            msg = ((
                f"[{event['title']}]({event['url']})",
                event['date'].strftime('Dia %d de %B de %Y, às %H:%M'),
                '\n',
            ))

            update.message.reply_text(
                '\n'.join(msg),
                disable_web_page_preview=False,
                parse_mode='Markdown',
            )
            """
            This snipet would be used to send the location
            However I couldn't get the location on the events listing page
            One ideia is to get the individual event page

            update.message.reply_venue(
                latitude=lat, longitude=lon, title=location, address=address
            )
            """
        update.message.reply_text('#evento #Meetup')
Exemplo n.º 15
0
def updatefmv(verbose):
    """Update the tx_fmv.yaml file for any missing figures"""
    # Load storage file and events and transactions
    try:
        fmv_raw = yaml.load(open(XDG_DATA_HOME + "/mistbat/tx_fmv.yaml"))
    except FileNotFoundError:
        fmv_raw = {}
    fmv_data = {}
    for id in fmv_raw:
        fmvs = fmv_raw[id].split(" -- ")
        comment = None
        if len(fmvs) == 2:  # If there is a comment
            comment = fmvs[1]
        fmvs = fmvs[0].split()
        fmvs = {fmv.split("@")[0]: fmv.split("@")[1] for fmv in fmvs}
        fmvs["comment"] = comment
        fmv_data[id] = fmvs

    events = get_events(loaders.all)
    transactions = get_transactions(events,
                                    XDG_CONFIG_HOME + "/mistbat/tx_match.yaml")

    # Identify missing transactions
    missing = [
        tx for tx in transactions if tx.missing_fmv and tx.id not in fmv_data
    ]

    # Error-check that stored transactions have necessary FMV info
    stored = [tx for tx in transactions if tx.id in fmv_data]
    for tx in stored:
        stored_coins = set(fmv_data[tx.id].keys())
        stored_coins.remove("comment")
        if set(tx.affected_coins) != stored_coins:
            raise RuntimeError(
                f"Transaction {tx.id} does not have correct fmv info")

    # Confirm that the tx_fmv file doesn't have any unknown tx ids
    diff = set(id for id in fmv_data) - set(tx.id for tx in transactions)
    diff = ", ".join(diff)
    if len(diff) != 0:
        raise RuntimeError(
            f"Unrecognized transaction ids in tx_fmv.yaml: {diff}. Tip: Dont inlude fiat transaction fmvs."
        )

    # Fill remaining missing transactions with public closing price
    print(f"{len(missing)} missing transactions") if verbose else None
    for tx in missing:
        print(f"{tx.id}")
        fmv_data[tx.id] = {"comment": "from crytpocompare daily close api"}
        for coin in tx.affected_coins:
            coin_fmv = get_historical_close(coin, int(tx.time.timestamp()))
            fmv_data[tx.id][coin] = coin_fmv
            print(f"{coin}@{coin_fmv}\n") if verbose else None
            time.sleep(0.1)

    # Convert fmv_data back into fmv_raw and dump to disk
    fmv_raw = {}
    for id, coins in fmv_data.items():
        comment = coins.pop("comment")
        fmv_raw[id] = " ".join(f"{coin}@{price}"
                               for coin, price in coins.items())
        if comment:
            fmv_raw[id] += " -- " + comment

    yaml.dump(
        fmv_raw,
        open(XDG_DATA_HOME + "/mistbat/tx_fmv.yaml", "w"),
        default_flow_style=False,
    )
Exemplo n.º 16
0
def generate_patient_db(
    demographics_path,
    meddra_extractions_dir,
    drug_exposure_dir,
    concept_dir,
    output_dir,
    debug,
    use_dask,
):

    # Create patient DB to store data
    patients = PatientDB(name="all")

    # Get demographics dataframe
    demographics = get_df(demographics_path, use_dask=use_dask, debug=debug)

    ### NLP TABLES ###
    # Get meddra extractions dataframe
    meddra_extractions_pattern = "*_*"
    meddra_extractions_pattern_re = ".*_.*"
    meddra_extractions = get_table(
        meddra_extractions_dir,
        prefix="all_POS_batch",
        pattern=meddra_extractions_pattern,
        pattern_re=meddra_extractions_pattern_re,
        extension=".parquet",
        use_dask=use_dask,
        debug=debug,
    )

    meddra_extractions_columns = sorted(meddra_extractions.columns.tolist())
    print(f"meddra extractions column names:\n\t{meddra_extractions_columns}",
          flush=True)

    ### OMOP TABLES ###
    # OMOP DRUG_EXPOSURE table
    drug_exposure_pattern = "0000000000*"
    drug_exposure_pattern_re = "0000000000.*"
    drug_exposure = omop_drug_exposure(
        drug_exposure_dir,
        prefix="drug_exposure",
        pattern=drug_exposure_pattern,
        pattern_re=drug_exposure_pattern_re,
        extension=".csv",
        use_dask=use_dask,
        debug=debug,
    )
    drug_exposure_columns = sorted(drug_exposure.columns.tolist())
    print(f"drug exposure column names:\n\t{drug_exposure_columns}",
          flush=True)

    # OMOP CONCEPT table
    concept = omop_concept(concept_dir, use_dask=use_dask, debug=debug)
    concept_columns = sorted(concept.columns.tolist())
    print(f"concept column names:\n\t{concept_columns}", flush=True)
    # import pdb;pdb.set_trace()

    patient_ids = get_all_patient_ids(demographics,
                                      meddra_extractions,
                                      drug_exposure,
                                      use_dask=use_dask)

    get_events(patients,
               concept,
               meddra_extractions,
               drug_exposure,
               use_dask=False)
    if not patients.data["events"]:
        print("Empty events dict! Exiting...", flush=True)
        sys.exit(0)
    print(f"Found {patients.num_events()} events", flush=True)

    print("Filter out patient IDs that don't have any events", flush=True)
    patient_ids = patients.select_non_empty_patients(patient_ids)

    print("Generate patients from IDs", flush=True)
    patients.generate_patients_from_ids(patient_ids)
    # import pdb
    # pdb.set_trace()

    # print('Get all patient visit dates...')
    # patient_visit_dates = \
    # get_all_patient_visit_dates(patients, meddra_extractions)
    # unique_dates = get_dates(meddra_extractions, args.use_dask)
    # unique_date_strs = [date_obj_to_str(d) for d in unique_dates]
    # patient_visit_dates = \
    #    create_patient_visit_dates(patient_ids, unique_date_strs)

    # print('Creating patient visits...')
    # create_patient_visits(patients, patient_visit_dates)

    # print('Attach visits to patients')
    # patients.attach_visits_to_patients(patient_ids)
    # import pdb
    # pdb.set_trace()

    # FIXME
    print("Attach events to visits...", flush=True)
    patients.attach_events_to_visits()
    # import pdb
    # pdb.set_trace()

    print("Attach demographic information to patients", flush=True)
    patients.add_demographic_info(demographics, use_dask)
    # import pdb
    # pdb.set_trace()

    print("Dump patients to a file", flush=True)
    patients.dump(output_dir, "patients", "jsonl", unique=True)
Exemplo n.º 17
0
 def index_legacy_live_events(self, args):
     espn_url = args.get(ESPN_URL)[0]
     chosen_sport = args.get(SPORT, None)
     if chosen_sport is not None:
         chosen_sport = chosen_sport[0]
     chosen_network = args.get(NETWORK_ID, None)
     if chosen_network is not None:
         chosen_network = chosen_network[0]
     live = 'action=live' in espn_url
     upcoming = 'action=upcoming' in espn_url
     replay = 'action=replay' in espn_url
     if live:
         data = events.get_events(espn_url)
     else:
         data = util.get_url_as_xml_cache(
             espn_url, encoding='ISO-8859-1').findall(".//event")
     num_espn3 = 0
     num_secplus = 0
     num_accextra = 0
     num_events = 0
     for event in data:
         sport = event.find('sportDisplayValue').text.encode('utf-8')
         if chosen_sport <> sport and chosen_sport is not None:
             continue
         networkid = event.find('networkId').text
         if chosen_network <> networkid and chosen_network is not None:
             continue
         if networkid == ESPN3_ID and chosen_network is None and live:
             num_espn3 += 1
         elif networkid == SECPLUS_ID and chosen_network is None and live:
             num_secplus += 1
         elif networkid == ACC_EXTRA_ID and chosen_network is None and live:
             num_accextra += 1
         else:
             num_events += 1
             self.index_event(event, live, upcoming, replay, chosen_sport)
     # Don't show ESPN3 folder if there are no premium events
     if num_events == 0:
         for event in data:
             sport = event.find('sportDisplayValue').text.encode('utf-8')
             if chosen_sport <> sport and chosen_sport is not None:
                 continue
             self.index_event(event, live, upcoming, replay, chosen_sport)
     # Dir for ESPN3/SECPlus/ACC Extra
     elif chosen_network is None:
         if num_espn3 > 0 and selfAddon.getSetting('ShowEspn3') == 'true':
             translation_number = 30191 if num_espn3 == 1 else 30190
             if selfAddon.getSetting('NoColors') == 'true':
                 name = translation(translation_number) % num_espn3
             else:
                 name = '[COLOR=FFCC0000]' + (
                     translation(translation_number) %
                     num_espn3) + '[/COLOR]'
             addDir(
                 name,
                 dict(ESPN_URL=espn_url,
                      MODE=self.make_mode(LIVE_EVENTS_MODE),
                      NETWORK_ID=ESPN3_ID), defaultlive)
         if num_secplus > 0 and selfAddon.getSetting(
                 'ShowSecPlus') == 'true':
             translation_number = 30201 if num_secplus == 1 else 30200
             if selfAddon.getSetting('NoColors') == 'true':
                 name = translation(translation_number) % num_secplus
             else:
                 name = '[COLOR=FF004C8D]' + (
                     translation(translation_number) %
                     num_secplus) + '[/COLOR]'
             addDir(
                 name,
                 dict(ESPN_URL=espn_url,
                      MODE=self.make_mode(LIVE_EVENTS_MODE),
                      NETWORK_ID=SECPLUS_ID), defaultlive)
         if num_accextra > 0 and selfAddon.getSetting(
                 'ShowAccExtra') == 'true':
             translation_number = 30203 if num_accextra == 1 else 30202
             if selfAddon.getSetting('NoColors') == 'true':
                 name = translation(translation_number) % num_accextra
             else:
                 name = '[COLOR=FF013ca6]' + (
                     translation(translation_number) %
                     num_accextra) + '[/COLOR]'
             addDir(
                 name,
                 dict(ESPN_URL=espn_url,
                      MODE=self.make_mode(LIVE_EVENTS_MODE),
                      NETWORK_ID=ACC_EXTRA_ID), defaultlive)
Exemplo n.º 18
0
def download_root_files(relmon, cmsweb, callback_url):
    """
    Download all files needed for comparison and fill relmon dictionary
    """
    for category in relmon.get('categories', []):
        if category['status'] != 'initial':
            continue

        category_name = category['name']
        reference_list = category.get('reference', [])
        target_list = category.get('target', [])
        for item in reference_list + target_list:
            name = item['name']
            if name.lower().startswith('/relval') and name.lower().endswith(
                    '/dqmio'):
                logging.info('Name %s is dataset name', name)
                # Dataset name
                dqmio_dataset = name
            else:
                logging.info('Name %s is workflow name', name)
                # Workflow name
                workflow = cmsweb.get_workflow(item['name'])
                if not workflow:
                    item['status'] = 'no_workflow'
                    notify(relmon, callback_url)
                    logging.warning('Could not find workflow %s in ReqMgr2',
                                    item['name'])
                    continue

                dqmio_dataset = get_dqmio_dataset(workflow)
                if not dqmio_dataset:
                    item['status'] = 'no_dqmio'
                    notify(relmon, callback_url)
                    logging.warning(
                        'Could not find DQMIO dataset in %s. Datasets: %s',
                        item['name'],
                        ', '.join(workflow.get('OutputDatasets', [])))
                    continue

            file_urls = get_root_file_path_for_dataset(cmsweb, dqmio_dataset,
                                                       category_name)
            if not file_urls:
                item['status'] = 'no_root'
                notify(relmon, callback_url)
                logging.warning(
                    'Could not get root file path for %s dataset of %s workflow',
                    dqmio_dataset, item['name'])
                continue

            item['versioned'] = len(file_urls) > 1
            file_url = file_urls[-1]

            logging.info('File URL for %s is %s', item['name'], file_url)
            item['file_url'] = file_url
            item['file_size'] = 0
            item['status'] = 'downloading'
            item['file_name'] = item['file_url'].split('/')[-1]
            item['events'] = 0
            notify(relmon, callback_url)
            try:
                item['file_name'] = cmsweb.get_big_file(item['file_url'])
                item['status'] = 'downloaded'
                item['file_size'] = os.path.getsize(item['file_name'])
                item['events'] = get_events(item['file_name'])
                logging.info('Downloaded %s. Size %.2f MB. Events %s',
                             item['file_name'],
                             item.get('file_size', 0) / 1024.0 / 1024.0,
                             item['events'])
            except Exception as ex:
                logging.error(ex)
                logging.error('Error getting %s for %s', item['file_url'],
                              item['name'])
                item['status'] = 'failed'

            notify(relmon, callback_url)
Exemplo n.º 19
0
def cal_events():
    return flask.jsonify(events.get_events())
Exemplo n.º 20
0
def plan():
    users.require_role(2)
    user_id = session["user_id"]
    event_list = events.get_events(user_id)
    today1 = datetime.date.today()

    if request.method == "GET":
        group_info = group.get_info()
        week, all_event_entries = entries.get_week(user_id, 2)
        all_own_entries = subfunctions.change_list_to_dict(
            5, all_event_entries)
        friends_plans = subfunctions.add_weekday(
            entries.friends_planning(user_id))
        today = datetime.date.today().strftime("%d.%m.")
        days = {0: "SU", 1: "MA", 2: "TI", 3: "KE", 4: "TO", 5: "PE", 6: "LA"}
        days_i = subfunctions.change_days_dow_to_i_dict(days, today1)
        return render_template("plan.html",
                               friends_plans=friends_plans,
                               all_own_entries=all_own_entries,
                               days_i=days_i,
                               group_info=group_info,
                               events=event_list,
                               days=days,
                               week=week,
                               all_event_entries=all_event_entries,
                               today=today)

    if request.method == "POST":
        users.check_csrf()
        event_indices = request.form.getlist("event_index")
        start_times = request.form.getlist("time_start")
        finish_times = request.form.getlist("time_finish")
        days_list = request.form.getlist("day")
        if len(event_indices) == len(start_times) and len(start_times) == len(
                finish_times):
            day_i = int(float(days_list[0]))
            dow = int(
                datetime.datetime.strftime(
                    today1 + datetime.timedelta(days=day_i + 7), "%w"))
            times_of_own_entries_for_day = entries.get_times_of_own_entries_for_day(
                user_id, dow, day_i + 7)
            new_entry_times_list = subfunctions.get_new_entry_times(
                start_times, finish_times)
            if not new_entry_times_list:
                return render_template(
                    "error.html",
                    message=
                    "Tapahtumien lisäys ei onnistunut, tarkista valitsemasi ajat, aikojen valitsemisessa oli puutteita"
                )
            if times_of_own_entries_for_day or len(new_entry_times_list) > 1:
                if subfunctions.check_times_many(times_of_own_entries_for_day,
                                                 new_entry_times_list) != "ok":
                    return render_template(
                        "error.html",
                        message=subfunctions.check_times_many(
                            times_of_own_entries_for_day,
                            new_entry_times_list))
            elif new_entry_times_list[0][0] >= new_entry_times_list[0][1]:
                return render_template(
                    "error.html",
                    message=
                    "Antamasi ajat olivat samat tai alkuaika oli suurempi kuin loppuaika, tarkista ajat ja tallenna uudelleen"
                )
            for entry in new_entry_times_list:
                date = today1 + datetime.timedelta(
                    days=7 + int(float(days_list[entry[2]])))
                index = entry[2]
                if entries.add_entry(date, user_id, event_list[int(
                        event_indices[index])][0], start_times[index],
                                     finish_times[index]) < 0:
                    return render_template(
                        "error.html",
                        message=
                        "Tapahtumien lisäys ei onnistunut, tallenna uudelleen")
            return redirect("/plan")
        return render_template(
            "error.html",
            message=
            "Tapahtumien lisäys ei onnistunut, tarkista valitsemasi ajat ja tallenna uudelleen"
        )
Exemplo n.º 21
0
def get_events():
    response = events.get_events()
    return make_response(jsonify({'success': True, 'events': response}), 200)
Exemplo n.º 22
0
def entry(day):
    users.require_role(2)
    day_i = int(float(day))
    user_id = session["user_id"]
    today = datetime.datetime.today()
    date = today + datetime.timedelta(days=day_i)

    if request.method == "GET":
        participants = entries.get_participants(session["user_id"], date)
        event_list = events.get_events(user_id)
        days = {0: "SU", 1: "MA", 2: "TI", 3: "KE", 4: "TO", 5: "PE", 6: "LA"}
        days_i = subfunctions.change_days_dow_to_i_dict(days, today)
        return render_template("entry.html",
                               participants=participants,
                               events=event_list,
                               days_i=days_i,
                               date=date,
                               day=day_i)

    if request.method == "POST":
        users.check_csrf()
        if request.form["time1"] and request.form[
                "time2"] and request.form["time1"] < request.form["time2"]:
            dow = int(datetime.datetime.strftime(date, "%w"))
            times_of_own_entries_for_day = entries.get_times_of_own_entries_for_day(
                user_id, dow, day_i)
            start_time = datetime.datetime.strptime(request.form["time1"],
                                                    "%H:%M").time()
            finish_time = datetime.datetime.strptime(request.form["time2"],
                                                     "%H:%M").time()
            if times_of_own_entries_for_day:
                if subfunctions.check_times_one(
                        times_of_own_entries_for_day,
                    (start_time, finish_time)) != "ok":
                    return render_template(
                        "error.html",
                        message=subfunctions.check_times_one(
                            times_of_own_entries_for_day,
                            (start_time, finish_time)))
        else:
            return render_template(
                "error.html",
                message=
                "Osallistumisesi lisäys ei onnistunut, alku tai loppuaika oli virheellinen tai puuttui. Tarkista ajat ja tallenna uudelleen"
            )
        extras = 0 if not request.form["extra_participants"] else request.form[
            "extra_participants"]

        entry_id = entries.add_entry_with_extras(date, user_id,
                                                 request.form["event_id"],
                                                 start_time, finish_time,
                                                 extras)
        if entry_id == -1:
            return render_template(
                "error.html",
                message=
                "Osallistumisesi lisäys ei onnistunut, tallenna uudelleen")

        if request.form["comment"]:
            content = request.form["comment"].strip()
            if len(content) > 0:
                if not messages.add_entry_comment(
                        user_id, entry_id, request.form["event_id"],
                        " [" + request.form["event_id"] + "] " + content):
                    return render_template(
                        "error.html",
                        message=
                        "Viestisi lähetys ei onnistunut, tarkista tiedot ja lähetä uudelleen"
                    )
        return redirect("/calendar")
Exemplo n.º 23
0
    def get_artsits(self):
        '''
        generating the artists objects list.
        :return: None, json file is the output
        '''
        try:
            for i in ['1']:
                try:
                    # getting artists list from last.fm
                    request = Request(
                        "http://ws.audioscrobbler.com/2.0/?method=chart.gettopartists&limit={0}&api_key={1}&page={2}&format=json"
                        .format('940', LAST_FM_API, i))
                    response_body = urlopen(request).read()
                    res = json.loads(response_body)
                    raw_arstits = res['artists']
                    artists = raw_arstits['artist']
                    result = None
                    for artist in artists:
                        try:
                            name = artist['name']
                            if name in self.all_artists:
                                continue
                            self.all_artists.append(name)
                            # reformatting artist name for future queries
                            art = name.replace(' ', '%20').encode('utf-8')
                            req = Request(
                                "http://ws.audioscrobbler.com/2.0/?method=artist.getinfo&artist={0}&api_key={1}&format=json"
                                .format(art, LAST_FM_API))
                            response_body = json.loads(
                                urlopen(req).read())['artist']

                            # getting artist info
                            desc = ''
                            play_count = 0
                            image = None
                            info = response_body
                            try:
                                desc = info['bio']['content']
                            except:
                                print 'no description for {0}'.format(name)
                            try:
                                play_count = info['stats']['playcount']
                            except:
                                print 'no play_count for {0}'.format(name)
                            try:
                                image = info['image'][1]['#text']
                            except:
                                print 'no image for {0}'.format(name)

                            # getting artist events
                            events = []
                            try:
                                events = get_events(artist['name'])
                            except Exception as e:
                                print "cannot get events for artists: {0}".format(
                                    artist['name'])
                                print e

                            # getting artist tracks
                            try:
                                tracks = self.get_tracks(art)
                                if tracks is None:
                                    continue
                            except Exception as e:
                                print "cannot get tracks for artists: {0}".format(
                                    artist['name'])
                                print e
                                continue  # continue to next artists if there's no tracks

                            # generating the track object and insert it the the collected data
                            new_artist = {
                                'name': name,
                                'description': desc,
                                'img': image,
                                'play_count': play_count,
                                'tracks': tracks,
                                'events': events
                            }
                            self.artists.append(new_artist)
                        except Exception as e:
                            print "cannot generate artist {0}".format(
                                artist['name'])
                            print e
                            continue  # continue to next artists if exception raised

                        # print data to file every 10 artists
                        self.art_count += 1
                        if self.art_count % 10 == 0:
                            result = {"artists": self.artists}
                            filename = "artists{0}-{1}.json".format(
                                i, self.art_count)
                            with open(filename, 'w') as feedsjson:
                                json.dump(result, feedsjson)
                            print "for file {0}".format(filename)
                            self.print_stats()

                # printing last file if exception raised before next 10 artists
                except Exception as e:
                    filename = "artists{0}-{1}-end.json".format(
                        i, self.art_count)
                    if result is None:
                        result = {"artists": self.artists}
                    with open(filename, 'w') as feedsjson:
                        json.dump(result, feedsjson)
                    print "page {0} didn't finished totally".format(i)
                    print self.print_stats()
                    print e
                    continue
        # continue to next artists page query
        except Exception as e:
            print "cannot get top artists"
            print self.print_stats()
            print e
Exemplo n.º 24
0
def main():
    image = "bg/" + str(randint(0, 59) + 1) + ".jpg"
    calendar = get_events()
    return render_template('index.html', events=calendar, image=image)
Exemplo n.º 25
0
def holdings(aggregated):
    """List all coins held with USD values. Also list holdings by exchange."""
    totals = {}
    events = get_events(loaders.all)

    # Get raw accounting-style entries for each event e.g., (coinbase, LTC, +1.00)
    all_entries = [[], [], []]  # location, coin, amount (will be zipped)
    for ev in events:
        entries = ev.entries()
        # Try-catch block needed to deal with single vs multiple entries per event
        try:
            locations, coins, amounts = zip(*entries)
            all_entries[0].extend(locations)
            all_entries[1].extend(coins)
            all_entries[2].extend(amounts)
        except TypeError:
            location, coin, amount = entries
            all_entries[0].append(location)
            all_entries[1].append(coin)
            all_entries[2].append(amount)

    # Process the accounting-style entries into a nested dict of
    # location -> coin -> amount
    for location, coin, amount in zip(*all_entries):
        totals.setdefault(location, {}).setdefault(coin, 0)
        totals[location][coin] += amount

    # Get set of coin symbols to prepare to poll coinmarketcap API
    my_coins = set(all_entries[1])
    my_coins.remove("USD")

    # Poll coinmarketcap API for spot prices of all coins and store them in a dict
    coin_spotprices = get_coin_spot_prices(my_coins)

    total_usd = 0
    location_usd = {}
    total_bycoin = {}
    for location in totals:
        if "USD" in totals[location]:
            del totals[location]["USD"]

        location_usd[location] = 0
        for coin, amount in totals[location].items():
            if round(amount, 9) != 0:
                total_bycoin[coin] = total_bycoin.get(coin, 0) + amount
                location_usd[location] += amount * coin_spotprices[coin]
        # print('Total (in USD) at {}: ${:.2f}\n'.format(location, location_usd))
        total_usd += location_usd[location]

    # If the --aggregated option is passed
    if aggregated:
        # Sort total_bycoin by USD value
        coins_sorted_usd = []
        for coin, amount in total_bycoin.items():
            usd_value = amount * coin_spotprices[coin]
            coins_sorted_usd.append((coin, amount, usd_value))
        coins_sorted_usd.sort(key=lambda x: x[2], reverse=True)

        # Print out the total coin values sorted by value
        for coin in coins_sorted_usd:
            print("{} {:.8f} (USD {:.2f} @ USD {:.2f} per {})".format(
                coin[0], coin[1], coin[2], coin_spotprices[coin[0]], coin[0]))
    # If the --aggregated option is not passed
    else:
        # Sort locations by USD value
        locations = list(totals.keys())
        locations.sort(key=lambda x: location_usd[x], reverse=True)
        for location in locations:
            print("\n{} (USD {:.2f})".format(location, location_usd[location]))

            # Sort coins within a location by USD value
            coins_sorted_usd = []
            for coin, amount in totals[location].items():
                usd_value = amount * coin_spotprices[coin]
                coins_sorted_usd.append((coin, amount, usd_value))
            coins_sorted_usd.sort(key=lambda x: x[2], reverse=True)

            # Print out the total coin values sorted by value
            for coin in coins_sorted_usd:
                if round(coin[1], 9) != 0:
                    print("    {} {:.8f} (USD {:.2f} @ USD {:.2f} per {})".
                          format(coin[0], coin[1], coin[2],
                                 coin_spotprices[coin[0]], coin[0]))

    print("-----------------")
    print("Total Portfolio Value: USD {:.2f}".format(total_usd))
Exemplo n.º 26
0
import utils.utils
import commands as cmd
import events

DISCORD_BOT_TOKEN = os.environ.get('TOKEN')

# creating logger
logger = utils.utils.get_logger('discord bot', logging.INFO)

# creating bot
intents = discord.Intents.default()
intents.members = True
bot = commands.Bot(command_prefix='!', intents=intents)
cmd.get_commands(bot)
events.get_events(bot, logger)

# Main function

if __name__ == '__main__':
    try:
        logger.info('Preparing to run bot...')
        bot.run(DISCORD_BOT_TOKEN)
    except Exception as e:
        import sys

        logger.error(sys.exc_info()[0])
        logger.error("---- -----")
        import traceback

        logger.error(traceback.format_exc())
Exemplo n.º 27
0
    def get(self, gps_code=None):
        params = self.prepare_params(gps_code=gps_code)
        if not params:
            self.write(xxdumps({}, ensure_ascii=False))
            self.finish()
            return

        # todo
        # временное решение для балансировки
        is_balance = self.get_argument('is_balance', '0')
        if is_balance == '1' and c.IS_BALANCED:
            url = 'http://{host}:{port}{path}'.format(
                host=c.BALANCE_HOST,
                port=random.choice(c.BALANCE_PORTS),
                path=self.request.uri
            )
            url = url.replace('is_balance=1', 'is_balance=0')

            response = yield self.application.http_client.fetch(url)
            self.write(response.body)
            self.set_status(response.code)
            self.finish()
            return

        track_data = yield self.get_data(with_sensors=self.sensors)
        track = track_data.get('track', [])
        cars_sensors = track_data.get('cars_sensors')

        self.application.logger.debug(
            'TRACK gps_code: {} from_dt: {} ({}), to_dt: {} ({}) '
            'len(track): {}'.
            format(self.gps_code, self.from_dt_repr, self.from_dt,
                    self.to_dt_repr, self.to_dt, len(track)))

        if self.version < 3:
            data = track
        else:
            data = track_data
            events = yield get_events(
                self.application.db, track, self.from_dt_utc,
                self.to_dt_utc, self.gps_code, strict=True
            )
            if events:
                data['consumptions'] = count_fuel_consumptions(
                    track, events['sensors']
                )

            data.update({
                'parkings': events.get('parkings', []),
                'sensors': cars_sensors,
                'events': events.get('sensors', {}),
                'equipment': events.get('equipment', {}),
                'equipment_distance': events.get('equipment_distance', {}),
                'equipment_time': events.get('equipment_time', {}),
                'time_of_parking': sum(
                    [p['sec'] for p in events.get('parkings', [])]),
            })

        if self.without_track:
            tmp = data.pop('track', None)
        self.write(xxdumps(data, ensure_ascii=False))
        self.finish()
Exemplo n.º 28
0
    def get(self):
        params = self.prepare_params()
        if not params:
            self.write(xxdumps({}, ensure_ascii=False))
            self.finish()
            return

        if self.version < 2:
            json_response = xxdumps({
                'error': 'Not implemented (current version API < 2)'
            })
            self.write(json_response, ensure_ascii=False)
            self.finish()
            return

        track_data = yield self.get_data(with_sensors=True,
                                            debug=self.application.debug)
        track = track_data.get('track', [])
        cars_sensors = track_data.get('cars_sensors')
        sensors_data = track_data.get('sensors_data')

        events = yield get_events(
            self.application.db, track, self.from_dt_utc,
            self.to_dt_utc, self.gps_code, strict=True
        )
        new_consumptions = {}
        if events:
            new_consumptions = count_fuel_consumptions(
                track, events['sensors']
            )

        data = {}
        data.update({
            'sensors': cars_sensors,
            'events': events.get('sensors', {}),
            'eq': events.get('eq', {}),
            'consumptions': new_consumptions,
        })

        if self.format in ['png']:
            import matplotlib.pyplot as plt

            colors = []
            for style in ['-', '-.', '--.']:
                for color in ['r', 'g', 'b']:
                    colors.append(color + style)

            ts = datetime.datetime.now().strftime('%Y-%m-%d')
            name = random.randint(1000, 9999)
            name = '/tmp/graph-{}-{}.png'.format(ts, name)
            g = None

            yy = sensors_data.get('yy')
            for sid in yy:
                plt.plot(yy[sid]['x'], yy[sid]['y'], 'gray', label='{} Original'.format(sid), linewidth=2)
                color = colors.pop() if colors else 'b'
                plt.plot(yy[sid]['x'], yy[sid]['filtered'], color, label='{} Filtered'.format(sid), linewidth=2)

                color = colors.pop() if colors else 'b'
                plt.plot(yy[sid]['x'], yy[sid]['lstsq'], color[0] + '-', label='{} LSTSQ'.format(sid), linewidth=1)

                # события на графике
                for sensor_id, sensor_events in events.get('sensors', {}).items():
                    for e in sensor_events:
                        ey = []
                        for i, x in enumerate(yy[sid]['ts']):
                            y = yy[sid]['filtered'][i]
                            if e['start_point']['timestamp'] <= x <= e['end_point']['timestamp']:
                                ey.append(y)
                            else:
                                ey.append(None)
                        color = colors.pop() if colors else 'b'
                        plt.plot(yy[sid]['x'], ey, '*', label='{} {}'.format(sid, e['type']), linewidth=4, markersize=7)

            plt.legend()
            plt.savefig(name)
            if self.application.debug:
                plt.show()
            plt.close()

            with open(name, 'rb') as f:
                g = f.read()
            os.unlink(name)

            self.set_header("Content-Type", "image/png")
            self.write(g)
            self.finish()
            return

        self.write(xxdumps(data, ensure_ascii=False))
        self.finish()
Exemplo n.º 29
0
from events import get_events

lat = 53.350140
lng = -6.266155
events = get_events('java', lat, lng)

print(events)