for k, v in dev.state_information.items(): click.echo("%s: %s" % (k, v)) click.echo(click.style("== Generic information ==", bold=True)) click.echo("Time: %s" % dev.time) click.echo("Hardware: %s" % dev.hw_info["hw_ver"]) click.echo("Software: %s" % dev.hw_info["sw_ver"]) click.echo("MAC (rssi): %s (%s)" % (dev.mac, dev.rssi)) click.echo("Location: %s" % dev.location) ctx.invoke(emeter) @cli.command() @pass_dev @click.option('--year', type=Datetime(format='%Y'), default=None, required=False) @click.option('--month', type=Datetime(format='%Y-%m'), default=None, required=False) @click.option('--erase', is_flag=True) def emeter(dev, year, month, erase): """Query emeter for historical consumption.""" click.echo(click.style("== Emeter ==", bold=True)) if not dev.has_emeter: click.echo("Device has no emeter") return if erase:
def add(incidents): @incidents.command() def list_providers(): """ List events """ from bos_incidents import factory t = PrettyTable(["Provider", "Incident Count"], hrules=ALLBORDERS) t.align = 'l' storage = factory.get_incident_storage() providers = storage.get_distinct("provider_info.name") for provider in providers: t.add_row([ provider, str( storage.get_incidents_count( {"provider_info.name": provider})) ]) click.echo(t) @incidents.command() def purge(): """ Purge the entire store """ from bos_incidents import factory factory.get_incident_storage(purge=True) @incidents.command() @click.argument("begin", required=False, type=Datetime(format='%Y/%m/%d')) @click.argument("end", required=False, type=Datetime(format='%Y/%m/%d')) def list(begin, end, filter): """ List incidents from the bos-incidents store """ from bos_incidents import factory t = PrettyTable(["identifier", "Incidents"], hrules=ALLBORDERS) t.align = 'l' storage = factory.get_incident_storage() for event in storage.get_events(resolve=True): # pprint(event) if not ("id" in event and event["id"]): continue id = event["id"] id["start_time"] = parser.parse( id["start_time"]).replace(tzinfo=None) # Limit time if begin and end and (id["start_time"] < begin or id["start_time"] > end): continue incidents = format_incidents(event) t.add_row([ "\n".join([ id["sport"], id["event_group_name"], id["start_time"].strftime("%Y/%m/%d"), "home: {}".format(id["home"]), "away: {}".format(id["away"]), ]), str(incidents) ]) click.echo(t) @incidents.command() @click.argument("unique_string", required=False, default=None) @click.argument("provider", required=False, default=None) @click.option("--filter", default=None) def show(unique_string, provider, filter): """ Show the content of a specific incidents """ from bos_incidents import factory storage = factory.get_incident_storage() if provider is not None: incident = [ storage.get_incident_by_unique_string_and_provider( unique_string, provider) ] else: if filter is not None: incidents = storage.get_incidents( dict(unique_string={"$regex": ".*" + filter + ".*i"})) elif unique_string is not None: incidents = storage.get_incidents( dict(unique_string=unique_string)) else: incidents = storage.get_incidents() for incident in incidents: print(" > " + incident["unique_string"] + "-" + incident["provider_info"]["name"]) @incidents.command() @click.argument("status_name") def status(status_name): """ Show events that have a status 'status' """ import builtins from bos_incidents import factory t = PrettyTable(["identifier", "Incidents", "Status"], hrules=ALLBORDERS) t.align = 'l' storage = factory.get_incident_storage() for call in INCIDENT_CALLS: events = storage.get_events_by_call_status(call=call, status_name=status_name) for event in events: full_event = storage.get_event_by_id(event["id_string"]) t.add_row([ full_event["id_string"], format_event_incidents(full_event), format_event_incident_statuses(full_event) ]) click.echo(t) @incidents.command() @click.argument("unique_string", required=False, default=None) @click.argument("provider", required=False, default=None) @click.option("--filter", default=None) @click.option("--test", default=False) def rm(unique_string, provider, filter, test): """ Remove an incident from the store """ from bos_incidents import factory storage = factory.get_incident_storage() if unique_string and provider: incidents = [ storage.get_incident_by_unique_string_and_provider( unique_string, provider) ] elif filter: incidents = storage.get_incidents( dict(unique_string={"$regex": ".*" + filter + ".*i"})) if test: print("To be deleted: ") for incident in incidents: if test: print(" > " + incident["unique_string"] + "-" + incident["provider_info"]["name"]) else: storage.delete_incident(incident) @incidents.command() @click.argument("unique_string") @click.argument("provider") @click.option("--url", default="http://localhost:8010/trigger") def resend(url, unique_string, provider): """ Resend one or more incidents from the store """ from bos_incidents import factory storage = factory.get_incident_storage() incident = storage.get_incident_by_unique_string_and_provider( unique_string, provider) pprint(incident) incident.update(dict(skip_storage=True)) resend_incidents(url, incident) @incidents.command() @click.argument("call", required=False, default="*") @click.argument("status_name", required=False) @click.argument("begin", required=False, type=Datetime(format='%Y/%m/%d')) @click.argument("end", required=False, type=Datetime(format='%Y/%m/%d')) @click.option("--url", default="http://localhost:8010/trigger") def resendall(url, call, status_name, begin, end): """ Resend everything in the store that matches a call and status_name """ from bos_incidents import factory storage = factory.get_incident_storage() for event in storage.get_events(resolve=False): for incident_call, content in event.items(): if not content or "incidents" not in content: continue if call and call != "*" and incident_call != call: continue if status_name and content["status"]["name"] != status_name: continue for _incident in content["incidents"]: incident = storage.resolve_to_incident(_incident) id = incident["id"] start_time = parser.parse( id["start_time"]).replace(tzinfo=None) # Limit time if begin and end and (start_time < begin or start_time > end): continue pprint(incident) incident.update(dict(skip_storage=True)) resend_incidents(url, incident)
Return absolute paths from input doc and path """ for band in doc['image']['bands'].values(): band['path'] = str(path / band['path']) return doc @click.command(help=__doc__) @click.option('--output', help="Write datasets into this directory", type=click.Path(exists=False, writable=True, dir_okay=True)) @click.argument('datasets', type=click.Path(exists=True, readable=True, writable=False), nargs=-1) @click.option('--date', type=Datetime(format='%d/%m/%Y'), default=datetime.now(), help="Enter file creation start date for data preparation") @click.option('--checksum/--no-checksum', help="Checksum the input dataset to confirm match", default=False) def main(output, datasets, checksum, date): logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) for dataset in datasets: (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(dataset) create_date = datetime.utcfromtimestamp(ctime) if create_date <= date: logging.info("Dataset creation time ", create_date,
@cli.command() @click.argument('offset', type=float, required=False) @pass_dev def offset(dev, offset): """ Sets the temperature offset [-3,5 3,5] """ if dev.temperature_offset is not None: click.echo("Current temp offset: %s" % dev.temperature_offset) if offset is not None: click.echo("Setting the offset to %s" % offset) dev.temperature_offset = offset @cli.command() @click.argument('away_end', type=Datetime(format='%Y-%m-%d %H:%M'), default=None, required=False) @click.argument('temperature', type=float, default=None, required=False) @pass_dev def away(dev, away_end, temperature): """ Enables or disables the away mode. """ if away_end: click.echo("Setting away until %s, temperature: %s" % (away_end, temperature)) else: click.echo("Disabling away mode") dev.set_away(away_end, temperature) @cli.command()
print_tx, print_table, format_table, ) from .main import main, config @main.command() @click.pass_context @onlineChain @click.argument('market', nargs=1) @click.option('--limit', type=int, help="Limit number of elements", default=10) # fixme add start and stop time @click.option('--start', help="Start datetime '%Y-%m-%d %H:%M:%S'", type=Datetime(format='%Y-%m-%d %H:%M:%S')) @click.option('--stop', type=Datetime(format='%Y-%m-%d %H:%M:%S'), help="Stop datetime '%Y-%m-%d %H:%M:%S'", default=datetime.utcnow()) def trades(ctx, market, limit, start, stop): """ List trades in a market """ market = Market(market, bitshares_instance=ctx.bitshares) t = [["time", "quote", "base", "price"]] for trade in market.trades(limit, start=start, stop=stop): t.append([ str(trade["time"]), str(trade["quote"]), str(trade["base"]), "{:f} {}/{}".format(trade["price"],
""" Archives the input file to the output destination """ archive_path = os.path.join(output, "archive") if not os.path.exists(archive_path): os.makedirs(archive_path) os.rename(yaml_path, (os.path.join(archive_path, os.path.basename(yaml_path)))) @click.command(help=__doc__) @click.option('--output', help="Write datasets into this directory", type=click.Path(exists=False, writable=True, dir_okay=True)) @click.argument('datasets', type=click.Path(exists=True, readable=True, writable=False), nargs=-1) @click.option('--date', type=Datetime(format='%d/%m/%Y'), default=datetime.now(), help="Enter file creation start date for data preparation") @click.option('--checksum/--no-checksum', help="Checksum the input dataset to confirm match", default=False) def main(output, datasets, checksum, date): logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) for dataset in datasets: (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(dataset) create_date = datetime.utcfromtimestamp(ctime) if create_date <= date: logging.info( "Dataset creation time " + str(create_date) + " is older than start date " + str(date) + "...SKIPPING") else: path = Path(dataset).absolute() if path.is_dir(): for file in os.listdir(path):
"""Hardware Report Generator. This dashboard can be found at: https://hardware.metrics.mozilla.com/ """ import click import json import glob import summarize_json from click_datetime import Datetime from datetime import datetime, timedelta from pyspark.sql import SparkSession datetime_type = Datetime(format='%Y%m%d') @click.command() @click.option('--start_date', type=datetime_type, required=True, help='Start date (e.g. yyyymmdd)') @click.option('--end_date', type=datetime_type, default=None, help='End date (e.g. yyyymmdd)') @click.option('--bucket', required=True, help='Output bucket for JSON data') def main(start_date, end_date, bucket):
from stock_scraper.service import (update_indexes, update_index, fetch_update_security, DATE_FORMAT) DEFAULT_DATE = (datetime.utcnow().date() - timedelta(days=1)).isoformat() INDEX = 'index' SHARE = 'share' ROOT = os.path.dirname(__file__) logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @click.argument('code', required=False) @click.option('--start-date', type=Datetime(format=DATE_FORMAT), default=DEFAULT_DATE, help='''Start Date of Period''') @click.option('--end-date', type=Datetime(format=DATE_FORMAT), default=DEFAULT_DATE, help='''End Date of Period''') @click.option('--index', 'asset', flag_value=INDEX, help='''Indicate that the provided code is for an index''') @click.option('--share', 'asset', flag_value=SHARE, help='''Indicates that the provided code is for an share''', default=True)
# enable -h as an help flag CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help']) @click.group(context_settings=CONTEXT_SETTINGS, cls=DefaultGroup, default='launch', default_if_no_args=True) def main(): """Dream note gui """ pass @click.command(context_settings=CONTEXT_SETTINGS) @click.option('--date', type=Datetime(format='%Y-%m-%d'), default=datetime.now(), help='date of dream YYYY-MM-DD, default to today') @click.option('--type', type=str, default='normal', help='dream type (normal, lucid, ...) default to normal') @click.option('--tags', '-t', type=str, default=None, multiple=True, help='tags of the dream. Can be specified multiple times') @click.argument('title') def add(**kwargs): """ Add a new dream """
click.echo(click.style("Device state: %s" % plug.state, fg="green" if plug.is_on else "red")) click.echo("LED state: %s" % plug.led) click.echo("Time: %s" % plug.time) click.echo("On since: %s" % plug.on_since) click.echo("Hardware: %s" % plug.hw_info["hw_ver"]) click.echo("Software: %s" % plug.hw_info["sw_ver"]) click.echo("MAC (rssi): %s (%s)" % (plug.mac, plug.rssi)) click.echo("Location: %s" % plug.location) ctx.invoke(emeter) @cli.command() @pass_dev @click.option('--year', type=Datetime(format='%Y'), default=None, required=False) @click.option('--month', type=Datetime(format='%Y-%m'), default=None, required=False) @click.option('--erase', is_flag=True) def emeter(plug, year, month, erase): """Query emeter for historical consumption.""" click.echo(click.style("== Emeter ==", bold=True)) if not plug.has_emeter: click.echo("Device has no emeter") return if erase: click.echo("Erasing emeter statistics..") plug.erase_emeter_stats() return
find_options.extend(['-mtime', '+' + str(max_in_days)]) return find_options @click.group() def cli(): pass @cli.command('process-level2') @click.option('--level1-root', default=DEFAULT_S2_L1C, type=str, help="Folder containing Sentinel-2 level-1 datasets.") @click.option('--s2-aoi', default=DEFAULT_S2_AOI, type=str, help="List of MGRS tiles of interest.") @click.option('--start-date', type=Datetime(format='%Y-%m-%d'), help="Start of date range to process.") @click.option('--end-date', type=Datetime(format='%Y-%m-%d'), help="End of date range to process.") @click.option('--pkgdir', default=DEFAULT_PKGDIR, type=click.Path(file_okay=False, writable=True), help="The base output packaged directory.") @click.option("--workdir", default=DEFAULT_WORKDIR, type=click.Path(file_okay=False, writable=True), help="The base output working directory.") @click.option("--logdir", default=DEFAULT_LOGDIR, type=click.Path(file_okay=False, writable=True), help="The base logging and scripts output directory.") @click.option("--env", type=click.Path(exists=True, readable=True), help="Environment script to source.") @click.option("--workers", type=click.IntRange(1, 32), default=28, help="The number of workers to request per node.") @click.option("--memory", default=256, help="The memory in GB to request per node.")
pprintOperation(b), ] if csv: t.writerow(row) else: t.add_row(row) if not csv: click.echo(t) @main.command() @click.pass_context @onlineChain @click.argument('market', nargs=1) @click.option('--limit', type=int, default=10) # fixme add start and stop time @click.option('--start', type=Datetime(format='%Y-%m-%d %H:%M:%S')) @click.option('--stop', type=Datetime(format='%Y-%m-%d %H:%M:%S'), default=datetime.utcnow()) def trades(ctx, market, limit, start, stop): market = Market(market, bitshares_instance=ctx.bitshares) t = PrettyTable(["time", "quote", "base", "price"]) t.align = 'r' for trade in market.trades(limit, start=start, stop=stop): t.add_row([ str(trade["time"]), str(trade["quote"]), str(trade["base"]), "{:f} {}/{}".format(trade["price"], trade["base"]["asset"]["symbol"], trade["quote"]["asset"]["symbol"]),
# Write the week start/end in the filename. suffix = ("-" + chunk_start.strftime("%Y%d%m") + "-" + chunk_end.strftime("%Y%d%m")) file_name = get_file_name(suffix) date_to_json[file_name] = processed_aggregates # Move on to the next chunk, just add one day the end of the last # chunk. chunk_start = chunk_end + dt.timedelta(days=1) return date_to_json datetime_type = Datetime(format="%Y%m%d") @click.command() @click.option("--start_date", type=datetime_type, required=True, help="Start date (e.g. yyyymmdd)") @click.option("--end_date", type=datetime_type, default=None, help="End date (e.g. yyyymmdd)") @click.option("--bucket", required=True, help="Output bucket for JSON data") @click.option( "--spark-provider", type=click.Choice(["emr", "dataproc"]),
# -*- coding: utf-8 -*- """Console script for tempsummarization.""" import sys from datetime import datetime import click from click_datetime import Datetime from contamehistorias.engine import TemporalSummarizationEngine from contamehistorias.datasources.mediacloud import MediaCloudSearchAPI @click.command() @click.option('--query', help="Perform news retrieval with informed query") @click.option('--language',default="en", help="Expected language in headlines") @click.option('--start_date', type=Datetime(format='%d/%m/%Y'), default=datetime(year=2010, month=1, day=1), help="Perform news retrieval since this date") @click.option('--end_date', type=Datetime(format='%d/%m/%Y'), default=datetime.now(), help="Perform news retrieval until this date") @click.option('--api_key', help="MediaCloud API key") @click.option('--verbose', is_flag=True) def main(query, language, start_date, end_date, api_key, verbose): """Console script for tempsummarization.""" click.echo("Conta-me Historias Temporal Summarization. Media Cloud API example (more info at https://mediacloud.org/)") print("--query",query) print("--language",language) print("--start_date",start_date) print("--end_date",end_date) print() params = { 'language':language, 'start_date':start_date,