Example #1
0
def fetch_amendements(chambre: Optional[str], num: Optional[int],
                      progress: bool, settings: Dict[str, Any]) -> None:
    an_ids = repository.list_opendata_dossiers()
    if progress:
        bar = ProgressBar(total=len(an_ids))
    random.shuffle(an_ids)
    for an_id in an_ids:
        dossier_ref = repository.get_opendata_dossier_ref(an_id)
        fetch_amendements_for_dossier(dossier_ref, chambre, num, settings)
        if progress:
            bar.update(step=len(dossier_ref.lectures))
Example #2
0
async def fetch_statistics(datasets: List[Dataset]) -> List[Dataset]:
    print(f"Fetching statistics")
    nb_updated_datasets = 0
    bar = ProgressBar(total=len(datasets))
    for dataset in bar.iter(datasets):
        results = await fetch_stats_for(dataset.page)
        if results["2020"]:
            dataset.nb_hits = results["2020"][0]["nb_hits"]
            nb_updated_datasets += 1
    print(f"{nb_updated_datasets} datasets updated from Matomo")
    return datasets
Example #3
0
async def fetch_stats():
    conn = await get_conn()
    rows = await conn.fetch('SELECT id, url FROM datasets;')
    bar = ProgressBar(total=len(rows))
    for row in bar.iter(rows):
        stats = await fetch_stats_for(row['url'])
        if stats.get('2020'):
            await conn.execute(
                'UPDATE datasets SET nb_hits = $1 WHERE id = $2',
                stats['2020'][0]['nb_hits'],
                row['id']
            )
def migrate_users(app, limit, users_filepath):
    click.echo('Starting users migration.')
    ds = app.user_datastore
    old_users = load_json_file(users_filepath)
    if limit:
        old_users = old_users[:limit]
    bar = ProgressBar(total=len(old_users),
                      template=PROGRESSIST_TEMPLATE,
                      done_char='👤')
    editor_role = Role.objects.get(name='editor')
    with app.app_context():
        for old_user in bar.iter(old_users):
            create_user(ds, old_user, editor_role)
    click.echo('Users migrated.')
def migrate_articles(app, limit, article_filepath):
    click.echo('Starting article migration.')
    old_articles = load_json_file(article_filepath)
    if limit:
        old_articles = old_articles[:limit]
    bar = ProgressBar(total=len(old_articles),
                      template=PROGRESSIST_TEMPLATE,
                      done_char='📃')
    with app.app_context():
        for old_article in bar.iter(old_articles):
            create_article(old_article)
        bar.done = 0  # Reset.
        for old_article in bar.iter(old_articles):
            create_article(old_article)
    click.echo('Articles migrated.')
Example #6
0
async def as_xlsx(max_rows=None, debug=False):
    """Export des données au format souhaité par la DGT.

    :max_rows:          Max number of rows to process.
    :debug:             Turn on debug to be able to read the generated Workbook
    """
    print("Reading from DB")
    records = await db.declaration.completed()
    print("Flattening JSON")
    if max_rows:
        records = records[:max_rows]
    wb = Workbook(write_only=not debug)
    ws = wb.create_sheet()
    ws.title = "BDD REPONDANTS"
    wb.active = ws
    ws_ues = wb.create_sheet()
    ws_ues.title = "BDD UES détail entreprises"
    ws_ues.append([
        "Annee_indicateurs",
        "Region",
        "Departement",
        "Adresse",
        "CP",
        "Commune",
        "Tranche_effectif",
        "Nom_UES",
        "Siren_entreprise_declarante",
        "Nom_entreprise_declarante",
        "Nom_entreprise",
        "Siren",
    ])
    headers, columns = await get_headers_columns()
    ws.append(headers)
    bar = ProgressBar(prefix="Computing", total=len(records))
    for record in bar.iter(records):
        data = record.data
        if not data:
            continue
        ues_data(ws_ues, data)
        data = prepare_record(data)
        data["modified_at"] = record["modified_at"]
        ws.append([clean_cell(fmt(data.get(c))) for c, fmt in columns])
    return wb
Example #7
0
async def fetch_datasets_from_urls(dataset_urls: List[str]) -> List[Dataset]:
    print("Fetching datasets from URLs.")
    dataset_slugs = [
        extract_slug(dataset_url) for dataset_url in dataset_urls
        if dataset_url.startswith("https://www.data.gouv.fr/fr/datasets/")
    ]
    datasets = []
    bar = ProgressBar(total=len(dataset_slugs))
    for i, dataset_slug in enumerate(bar.iter(dataset_slugs)):
        data = await fetch_json_data(
            f"/api/1/datasets/{dataset_slug}/",
            headers={
                "X-Fields": ("id,title,metrics,description,acronym,page,"
                             "owner{first_name,last_name,avatar_thumbnail},"
                             "organization{name,slug,logo_thumbnail}")
            },
        )
        if data and "id" in data:
            dataset = await convert_to_dataset(data, i)
            datasets.append(dataset)
    return datasets
Example #8
0
File: usine.py Project: cbnva/usine
def put(local, remote, force=False):
    user = client.context.get('user')
    if client.cd:
        remote = Path(client.cd) / remote
    if not hasattr(local, 'read'):
        local = Path(local)
        if local.is_dir():
            with unsudo():  # Force reset to SSH user.
                mkdir(remote)
                if user:
                    chown(user, remote)
            for path in local.rglob('*'):
                relative_path = path.relative_to(local)
                put(path, remote / relative_path)
            return
        if not force and exists(remote):
            lstat = os.stat(str(local))
            rstat = client.sftp.stat(str(remote))
            if (lstat.st_size == rstat.st_size
                    and lstat.st_mtime <= rstat.st_mtime):
                print(f'{local} => {remote}: SKIPPING (reason: up to date)')
                return
    elif isinstance(local, StringIO):
        local = BytesIO(local.read().encode())
    if hasattr(local, 'read'):
        func = client.sftp.putfo
        bar = ProgressBar(prefix=f'Sending to {remote}',
                          animation='{spinner}',
                          template='{prefix} {animation} {done:B}')
    else:
        bar = ProgressBar(prefix=f'{local} => {remote}')
        func = client.sftp.put
    if client.dry_run:
        print(bar.prefix)
        return
    tmp = str(Path('/tmp') / md5(str(remote).encode()).hexdigest())
    try:
        func(local,
             tmp,
             callback=lambda done, total: bar.update(done=done, total=total),
             confirm=True)
    except OSError as err:
        print(red(f'Error while processing {remote}'))
        print(red(err))
        sys.exit(1)
    if hasattr(local, 'read'):
        bar.finish()
    if user:
        with (unsudo()):
            with (sudo()):
                chown(user, tmp)
    mv(tmp, remote)
    if user:
        chown(user, remote)
Example #9
0
def get(remote, local):
    if client.cd:
        remote = Path(client.cd) / remote
    if hasattr(local, 'read'):
        func = client.sftp.getfo
        bar = ProgressBar(prefix=f'Reading from {remote}',
                          animation='{spinner}',
                          template='{prefix} {animation} {done:B}')
    else:
        bar = ProgressBar(prefix=f'{remote} => {local}',
                          template='{prefix} {animation} {percent} '
                                   '({done:B}/{total:B}) ETA: {eta}')
        func = client.sftp.get
    func(str(remote), local,
         callback=lambda done, total: bar.update(done=done, total=total))
    if hasattr(local, 'read'):
        local.seek(0)
        bar.finish()
Example #10
0
logging.basicConfig(format="[%(asctime)s -%(levelname)8s]: %(message)s",
                    datefmt="%H:%M:%S",
                    level=LOGGING_LEVEL)
logger = logging.getLogger('bbb-player')

# try to import pySmartDL else use plain old urllib
try:
    from pySmartDL import SmartDL
    smartDlEnabled = True
except ImportError:
    logger.warning("pySmartDL not imported, using urllib instead")
    smartDlEnabled = False
    try:
        from progressist import ProgressBar
        bar = ProgressBar(
            throttle=timedelta(seconds=1),
            template=
            "Download |{animation}|{tta}| {done:B}/{total:B} at {speed:B}/s")
    except:
        logger.warning(
            "progressist not imported. Progress bar will not be shown. Try running: \
                            pip3 install progressist")
        bar = None


def ffmpegCombine(suffix, fileName=DEFAULT_COMBINED_VIDEO_NAME):
    try:
        import ffmpeg
    except:
        logger.critical(
            "ffmpeg-python not imported. Try running:\npip3 install ffmpeg-python"
        )
Example #11
0
def downloadScript(inputURL, meetingNameWanted):
    # get meeting id from url https://regex101.com/r/UjqGeo/3
    matchesURL = re.search(r"/?(\d+\.\d+)/.*?([0-9a-f]{40}-\d{13})/?",
                           inputURL, re.IGNORECASE)
    if matchesURL and len(matchesURL.groups()) == 2:
        bbbVersion = matchesURL.group(1)
        meetingId = matchesURL.group(2)
        logger.info(f"Detected bbb version:\t{bbbVersion}")
        logger.info(f"Detected meeting id:\t{meetingId}")
    else:
        logger.error("Meeting ID could not be found in the url.")
        exit(1)

    baseURL = "{}://{}/presentation/{}/".format(
        urlparse(inputURL).scheme,
        urlparse(inputURL).netloc, meetingId)
    logger.debug("Base url: {}".format(baseURL))

    if meetingNameWanted:
        folderPath = os.path.join(SCRIPT_DIR, DOWNLOADED_MEETINGS_FOLDER,
                                  meetingNameWanted)
    else:
        folderPath = os.path.join(SCRIPT_DIR, DOWNLOADED_MEETINGS_FOLDER,
                                  meetingId)
    logger.debug("Folder path: {}".format(folderPath))

    if os.path.isfile(os.path.join(folderPath, DOWNLOADED_FULLY_FILENAME)):
        logger.info("Meeting is already downloaded.")
    else:
        logger.info(
            "Folder already created but not everything was downloaded. Retrying."
        )
        # todo: maybe delete contents of the folder

        foldersToCreate = [
            os.path.join(folderPath, x)
            for x in ["", "video", "deskshare", "presentation"]
        ]
        # logger.info(foldersToCreate)
        for i in foldersToCreate:
            createFolder(i)

        try:
            from progressist import ProgressBar
            bar = ProgressBar(
                throttle=timedelta(seconds=1),
                template=
                "Download |{animation}|{tta}| {done:B}/{total:B} at {speed:B}/s"
            )
        except:
            logger.warning(
                "progressist not imported. Progress bar will not be shown. Try running: \
                                pip3 install progressist")
            bar = None

        downloadFiles(baseURL, folderPath)
        downloadSlides(baseURL, folderPath)

        # Copy the 2.3 player
        copy_tree(os.path.join(SCRIPT_DIR, "player23"), folderPath)

        with open(os.path.join(folderPath, DOWNLOADED_FULLY_FILENAME),
                  'w') as fp:
            # write a downloaded_fully file to mark a successful download
            # todo: check if files were really dl-ed (make a json of files to download and
            #          check them one by one on success)
            pass
Example #12
0
# progressist - Minimalist and pythonic progress bar

# PyPI: https://pypi.org/project/progressist/
# Github: https://github.com/pyrates/progressist

# pip install progressist

# Usage
from progressist import ProgressBar

bar = ProgressBar(total=mytotalstuff)
for item in mystuff:
    # do_stuff
    bar.update()
    
Example #13
0
def urlretrieve(url, dest):
    print("Downloading", url)
    bar = ProgressBar(template="Download |{animation}| {done:B}/{total:B}")
    urllib.request.urlretrieve(url, dest, reporthook=bar.on_urlretrieve)