"acq_reputation_building": _x("S"), "acq_demand_creation": _x("T"), "acq_leasing_enablement": _x("U"), "acq_market_intelligence": _x("V"), "monthly_average_rent": _x("AA"), "lowest_monthly_rent": _x("AB"), "ret_reputation_building": _x("W"), "ret_demand_creation": _x("X"), "ret_leasing_enablement": _x("Y"), "ret_market_intelligence": _x("Z"), "usvs": _x("P"), "inquiries": _x("Q"), "tours": _x("R"), }, ) @click.command() @click.argument("file_name", type=click.Path(exists=True)) def command(file_name): project = Project.objects.get(public_id="pro_tdglra7vyt7wu311") workbook = openpyxl.load_workbook(file_name, data_only=True) import pdb pdb.set_trace() sheet = workbook["OUTPUT Periods"] print("Updating rows...") for row in range(3, 67 + 1): import_venice_beach_row(project, sheet, row) print("...done updating rows")
with open(output, 'w') as out: import_from_file(data_path, resume=True) #subprocess.run(['python', 'manage.py', 'import_data', '--file', data_path], stdout=out) return state def import_states_file(states_file): with open(states_file) as f: states = f.readlines() for state in states: import_state(state) @click.command() @click.option('--file', type=click.Path(exists=True)) @click.option('--states', type=click.Path(exists=True)) @click.option('--parallelism', '-p', type=click.INT, default=1) @click.option('--resume/--no-result', default=False) @click.option('--rerun/--no-rerun', default=False) def import_data(file, resume, states, parallelism, rerun: bool): if file: import_from_file(file, resume, rerun) if states: import_states_file(states) def sha256_digest(f): return subprocess.run(["sha256sum", f], capture_output=True).stdout.split()[0].strip()
except Massimport.DoesNotExist as e: click.secho( 'Massimport session with id: {} does not exist.'.format(id), bold=True, fg='red') return qs = massimport.files.filter(status=0) click.secho('Files total: {} - limit: {}'.format(qs.count(), limit), bold=True) for item in massimport.files.filter(status=0)[0:limit]: item.enqueue() @cli.command() @click.option('--path', '-p', type=click.Path(), required=True) @click.option('--username', '-u', type=unicode, required=True) @click.option('--collection', '-c', type=unicode) @click.option('--limit', '-l', type=click.IntRange(1, 50000, clamp=True), default=100) def start(path, limit, username, collection): """Start an import session""" click.secho( '--------------------------------------------------------------------', bold=True) click.echo('Username:\t {}'.format(username)) click.echo('Collection:\t {}'.format(collection)) click.echo('Limit:\t\t {}'.format(limit))
from django.contrib.auth.models import User import djclick as click from miqa.core.models import Project from miqa.core.rest.project import import_data @click.command() @click.option('--csv', type=click.Path(exists=True)) @click.option('--username', type=click.STRING, help='username for project creator') def command(csv, username): if username: user = User.objects.get(username=username) else: user = User.objects.first() project = Project.objects.create(name='miqa-dev', import_path=csv, export_path='.', creator=user) import_data(user.id, project.id)
import json import djclick as click from django.db import transaction from panels.models import GenePanelSnapshot from panels.models import GenePanelEntrySnapshot from panels.models import Region from panels.models import STR from panels.models import Gene @click.command() @click.argument('json_file', type=click.Path(exists=True)) def command(json_file): """ Update Ensembl IDs in Genes This will: 1. Update Gene data 2. Increment and panel where this gene is referenced 3. Update all entities (gene ensembl info) that use this gene Runs as a transaction, won't update in case of any failure. :param json_file: JSON File in the following format: { 'EXOC3L2': { 'GRch37': { '82': { 'ensembl_id': 'ENSG00000130201',
from mutualfunds.importers.cas import import_cas @click.command() @click.option( "-p", "password", metavar="PASSWORD", prompt="Enter PDF password", hide_input=True, confirmation_prompt=False, help="CAS password", ) @click.argument("input_file", type=click.Path(exists=True, dir_okay=False), metavar="CAS_PDF_FILE") def cas_importer(password, input_file): logger = logging.getLogger(__name__) logger.info("Reading CAS PDF") pdf_data = casparser.read_cas_pdf(input_file, password) period = pdf_data["statement_period"] email = pdf_data["investor_info"]["email"] click.echo("CAS file type " + click.style(pdf_data["file_type"], fg="green", bold=True)) click.echo("CAS statement period: " + click.style(period["from"], fg="green", bold=True) + " to " + click.style(period["to"], fg="green", bold=True)) click.echo("Email : " + click.style(email, fg="green", bold=True)) try:
t = datetime.now() for i, segment in enumerate(circuit): start, end, _, meta = segment nodes = edge_map[meta['id']] if int(end) == nodes[0].id: nodes = reversed(nodes) for node in nodes: t += timedelta(seconds=1) gpx_segment.points.append( gpxpy.gpx.GPXTrackPoint(latitude=node.lat, longitude=node.lon, time=t) ) return gpx @click.command() @click.argument('osm-data', type=click.Path(exists=True)) def postman(osm_data): Settings = IngestSettings( max_distance=Distance(km=50), max_segments=300, max_concurrent=40, quality_settings=DefaultQualitySettings, location_filter=None, ) loader = OSMIngestor(Settings) loader.load_osm(osm_data, extra_links=[(885729040, 827103027)]) # s = datetime.now() # data = write_gpickle(loader.global_graph, 'test.pickle') #nx_yaml.write_yaml(loader.global_graph, 'test.yaml') # e = datetime.now() # print(e-s)
def _read_checkpoint_file( checkpoint_file: Optional[str]) -> Tuple[Set[str], Set[str]]: colls, users = set(), set() if checkpoint_file: with open(checkpoint_file) as fd: for line in fd.readlines(): if line.startswith('CHECKPOINT'): _, type_, oid = line.split() if type_ == 'collection': colls.add(oid) elif type_ == 'user': users.add(oid) return colls, users @click.command() @click.argument('mongo_uri', type=click.STRING) @click.argument('user_id', type=click.INT) @click.option('--checkpoint-file', type=click.Path(dir_okay=False, exists=True)) def command(mongo_uri: str, user_id: int, checkpoint_file: Optional[str]) -> None: skip_colls, skip_users = _read_checkpoint_file(checkpoint_file) default_user: User = User.objects.get(id=user_id) assert not default_user.is_anonymous db = MongoClient(mongo_uri).girder user_map = _sync_users(db, default_user) _sync_root_folders(db, user_map, skip_colls, skip_users)
game_file = None if game_file is not None: click.echo(f"Skipping {filepath}...") return click.secho(f"Importing {filepath}...", fg="green") GameFile.objects.create( folder=gamefolder, filename=filename, content=json_content, file_type=file_type, game_version=str(game_version), ) @click.command() @click.argument("game_version", type=str, required=True) @click.option("-f", "--file_path", type=click.Path(exists=True)) def command(game_version, file_path): game_version = semver.VersionInfo.parse(game_version) if file_path is not None: _process_file(os.path.dirname(file_path), os.path.basename(file_path), game_version) return for root, _, files in os.walk(settings.BOUNDLESS_LOCATION): for filename in files: _process_file(root, filename, game_version)
from atracker.models import EventType from ...label_statistics import yearly_summary_for_label_as_xls, summary_for_label_as_xls from ...suisa_statistics import monthly_for_channel_as_xls @click.group() def cli(): """Statistics CLI""" pass @cli.command() @click.argument('scope', nargs=1) @click.option('-i', '--id', required=True) @click.option('-y', '--year', type=int, required=False) @click.option('-p', '--path', type=click.Path(), required=False) def label_statistics(scope, id, year, path): print('generate label statistics: scope: {} - id: {} - year: {} - output: {}'.format( scope, id, year, path) ) event_type_id = EventType.objects.get(title=scope).pk label = Label.objects.get(pk=id) if year: yearly_summary_for_label_as_xls( year=year, label=label, event_type_id=event_type_id,