Ejemplo n.º 1
0
def create_merged_state_spreadsheet(state):
    state_data = merge_state_data(state)
    reports = rows.import_from_dicts(state_data["reports"])
    cases = rows.import_from_dicts(state_data["cases"])

    data = io.BytesIO()
    rows.export_to_xlsx(reports, data, sheet_name=google_data.BOLETIM_SPREADSHEET)
    data.seek(0)
    rows.export_to_xlsx(cases, data, sheet_name=google_data.CASOS_SPREADSHEET)
    data.seek(0)
    return data
Ejemplo n.º 2
0
    def test_issue_168(self):
        temp = tempfile.NamedTemporaryFile(delete=False)
        filename = "{}.{}".format(temp.name, self.file_extension)
        self.files_to_delete.append(filename)

        table = rows.Table(fields=OrderedDict([("jsoncolumn", rows.fields.JSONField)]))
        table.append({"jsoncolumn": '{"python": 42}'})
        rows.export_to_xlsx(table, filename)

        table2 = rows.import_from_xlsx(filename)
        self.assert_table_equal(table, table2)
Ejemplo n.º 3
0
    def test_export_to_xlsx_fobj(self):
        temp = tempfile.NamedTemporaryFile()
        filename = temp.name + '.xlsx'
        temp.close()
        fobj = open(filename, 'wb')
        self.files_to_delete.append(filename)

        rows.export_to_xlsx(utils.table, fobj)
        fobj.close()

        table = rows.import_from_xlsx(filename)
        self.assert_table_equal(table, utils.table)
Ejemplo n.º 4
0
    def test_issue_168(self):
        temp = tempfile.NamedTemporaryFile(delete=False)
        filename = '{}.{}'.format(temp.name, self.file_extension)
        self.files_to_delete.append(filename)

        table = rows.Table(fields=
                OrderedDict([('jsoncolumn', rows.fields.JSONField)]))
        table.append({'jsoncolumn': '{"python": 42}'})
        rows.export_to_xlsx(table, filename)

        table2 = rows.import_from_xlsx(filename)
        self.assert_table_equal(table, table2)
Ejemplo n.º 5
0
    def test_export_to_xlsx_filename(self):
        temp = tempfile.NamedTemporaryFile()
        filename = temp.name + '.xlsx'
        temp.close()
        self.files_to_delete.append(filename)
        rows.export_to_xlsx(utils.table, filename)

        table = rows.import_from_xlsx(filename)
        self.assert_table_equal(table, utils.table)

        export_in_memory = rows.export_to_xlsx(utils.table, None)
        result_fobj = BytesIO()
        result_fobj.write(export_in_memory)
        result_fobj.seek(0)
        result_table = rows.import_from_xlsx(result_fobj)
        self.assert_table_equal(result_table, utils.table)
Ejemplo n.º 6
0
    def test_export_to_xlsx_uses_prepare_to_export(self, mocked_prepare_to_export):
        temp = tempfile.NamedTemporaryFile()
        filename = temp.name + ".xlsx"
        temp.file.close()
        self.files_to_delete.append(filename)

        kwargs = {"test": 123, "parameter": 3.14}
        mocked_prepare_to_export.return_value = iter([utils.table.fields.keys()])

        rows.export_to_xlsx(utils.table, temp.name, **kwargs)
        self.assertTrue(mocked_prepare_to_export.called)
        self.assertEqual(mocked_prepare_to_export.call_count, 1)

        call = mocked_prepare_to_export.call_args
        self.assertEqual(call[0], (utils.table,))
        self.assertEqual(call[1], kwargs)
Ejemplo n.º 7
0
    def test_export_to_xlsx_uses_prepare_to_export(self,
                                                   mocked_prepare_to_export):
        temp = tempfile.NamedTemporaryFile()
        filename = temp.name + ".xlsx"
        temp.file.close()
        self.files_to_delete.append(filename)

        kwargs = {"test": 123, "parameter": 3.14}
        mocked_prepare_to_export.return_value = iter(
            [utils.table.fields.keys()])

        rows.export_to_xlsx(utils.table, temp.name, **kwargs)
        self.assertTrue(mocked_prepare_to_export.called)
        self.assertEqual(mocked_prepare_to_export.call_count, 1)

        call = mocked_prepare_to_export.call_args
        self.assertEqual(call[0], (utils.table, ))
        self.assertEqual(call[1], kwargs)
Ejemplo n.º 8
0
        for item in itens_dados:
            match = pattern.match(str(item))
            if match:
               row_ver[fields.slug(match.group(1))] = parse_data(match.group(3))

        row_ver["frentes"] = URL_FRENTES.format(id=id_ver)
        row_ver["votacoes"] = URL_VOTACOES.format(id=id_ver)
        row_ver["mandatos"] = URL_MANDATOS.format(id=id_ver)

    def pegar_dados_do_brasilio(cpf_ver, row_ver):
        try:
            data = api.dataset_table_data("documentos-brasil", 'documents', document_type='CPF', document=cpf_ver)
            result = list(data)
            row_ver['docs_brasilio'] = result[0]['sources']
        except:
            row_ver['docs_brasilio'] = ''

        data = api.dataset_table_data("socios-brasil", 'socios', search="\"{}\"".format(row_ver['nome_civil'].strip()))
        result = list(data)
        row_ver['socios_brasilio'] = ' /// '.join([r['cnpj'] + ' ' + r['razao_social'] + ' (' + r['qualificacao_socio'] + ')' for r in result])

    for id_ver, cpf_ver in map(lambda i: i.strip().split('\t'), IDS_CPFS):
        row_ver = {}
        pegar_dados_da_camara_municipal(id_ver, row_ver)
        pegar_dados_do_brasilio(cpf_ver, row_ver)
        vereadores.append(row_ver)


    export_to_xlsx(vereadores, "vereadores.xlsx")
Ejemplo n.º 9
0
for day in days_brasil:
    if day['city'] == 'Joinville' and day['is_last'] == True:
        day['date'] = rows_dados[0].data_iso
        day['last_available_confirmed'] = rows_dados[0].confirmados
        day['last_available_confirmed_per_100k_inhabitants'] = rows_dados[0].conf_por_100k
        day['new_confirmed'] = rows_dados[0].confirmados - rows_dados[1].confirmados
        day['last_available_deaths'] = rows_dados[0].obitos
        day['new_deaths'] = rows_dados[0].obitos - rows_dados[1].obitos
        day['last_available_death_rate'] = rows_dados[0].taxa_conf_obito
        break

rows_brasil = rows.import_from_dicts(days_brasil)


#%% Exportando os dados das cidades brasileiras para .CSV e .XLSX
rows.export_to_csv(rows_brasil, "covid_brasil.csv")

rows.export_to_xlsx(rows_brasil, "covid_brasil.xlsx")


#%% Exportando os dados sobre joinville para .CSV e .XLSX
rows.export_to_csv(rows_dados, "covid_joinville.csv")

rows.export_to_xlsx(rows_dados, "covid_joinville.xlsx")


#%% Exportando os dados sobre ocupacao dos leitos para .CSV e .XLSX
rows.export_to_csv(rows_leitos, "leitos_uti_enfermaria.csv")

rows.export_to_xlsx(rows_leitos, "leitos_uti_enfermaria.xlsx")
Ejemplo n.º 10
0
def teladduser(file, time_sleep):
    """
    Log in on a Telegram account and add a users in a Supergroup from a SpreadSheet
    which the account logged in is admin.
    """

    # Verify if the Excel SpreadSheet was give!
    if not file:
        print('Need to pass the Excel SpreadSheet Filename!\n')
        click.Context(teladduser).exit(code=1)

    # Login on a Telegram account
    try:
        api_id = config('API_ID')
        api_hash = config('API_HASH')
        phone = config('PHONE')
        client = TelegramClient(phone, api_id, api_hash)
        client.connect()
        if not client.is_user_authorized():
            client.send_code_request(phone)
            login_code = click.prompt(
                'Enter the Login Code that was send to yor Telegram app',
                type=int)
            client.sign_in(phone, login_code)
    except UndefinedValueError:
        print(
            'The environment variables API_ID, API_HASH or PHONE were not defined. '
            'Please create a .env file with they!\n')
        click.Context(teladduser).exit(code=1)

    # Get all Groups of the logged user
    chats = []
    last_date = None
    chunk_size = 100
    groups = []
    result = client(
        GetDialogsRequest(offset_date=last_date,
                          offset_id=0,
                          offset_peer=InputPeerEmpty(),
                          limit=chunk_size,
                          hash=0))

    # Get only the super group of the logged user
    chats.extend(result.chats)
    for chat in chats:
        try:
            if chat.megagroup:
                groups.append(chat)
        except:
            continue

    # Select a group to add users
    for i, g in enumerate(groups):
        print(f"{i + 1} - {g.title}")
    g_index = click.prompt("\nEnter Number of Group you want add users",
                           type=int)
    try:
        target_group = groups[int(g_index) - 1]
    except IndexError:
        print(
            '\nThe number selected was not of a valid Group number! Please try again!\n'
        )
        click.Context(teladduser).exit(code=1)

    target_group_entity = InputPeerChannel(target_group.id,
                                           target_group.access_hash)

    print(f'\nReading the file {file}, this will take a while ...\n')
    users_to_add = rows.import_from_xlsx(file)

    # Create a new Rows Table to save processed data
    fields = OrderedDict([('username_normal', rows.fields.TextField),
                          ('nome', rows.fields.TextField),
                          ('grupocanal', rows.fields.TextField),
                          ('conta_de_envio', rows.fields.IntegerField),
                          ('log', rows.fields.TextField)])
    users_added = rows.Table(fields=fields)

    n = 0
    for i, user in enumerate(users_to_add):
        if user.log:
            users_added.append({
                'username_normal': user.username_normal,
                'nome': user.nome,
                'grupocanal': user.grupocanal,
                'cont_a_de_envio': user.conta_de_envio,
                'log': user.log,
            })
        elif i >= 45:
            try:
                print(f'Adicionando usuário: {i} - {user.nome}')
                user_to_add = client.get_input_entity(user.username_normal)
                client(
                    InviteToChannelRequest(target_group_entity, [user_to_add]))
                log = f"Usuário inserido em: {datetime.strftime(datetime.today(), '%Y-%m-%d às %H:%M:%S')}"
                users_added.append({
                    'username_normal': user.username_normal,
                    'nome': user.nome,
                    'grupocanal': target_group.title,
                    'cont_a_de_envio': user.conta_de_envio,
                    'log': log,
                })
                n += 1
                if n % 20 == 0:
                    print(
                        f'\nWaiting {time_sleep / 60} minutes to avoid Flood Error.\n'
                    )
                    time.sleep(time_sleep)
                else:
                    time.sleep(time_sleep / 15)
            except PeerFloodError:
                print(
                    "\nGetting Flood Error from telegram. Script is stopping now. Please try again after some time.\n"
                )
                try:
                    rows.export_to_xlsx(users_added,
                                        "usersAddedBeforeFloodError.xlsx")
                except:
                    print('\nCould not write to the file provided!\n')
                click.Context(teladduser).exit(code=1)
            except UserPrivacyRestrictedError:
                print(
                    "\nThe user's privacy settings do not allow you to do this. Skipping.\n"
                )
            except ValueError as err:
                print(f'\n{err} - Skipping.\n')
            except UserChannelsTooMuchError:
                print(
                    f'\nThe user {user.username_normal} you tried to add is already in too many channels/supergroups\n'
                )
            except FloodWaitError as err:
                print('\nHave to sleep', err.seconds, 'seconds\n')
                time.sleep(err.seconds)
            except KeyboardInterrupt:
                print('\nExecution was interrupted by user.\n')
                click.Context(teladduser).exit(code=1)
            except:
                traceback.print_exc()
                print("\nUnexpected Error\n")
                continue
        else:
            users_added.append({
                'username_normal': user.username_normal,
                'nome': user.nome,
                'grupocanal': user.grupocanal,
                'cont_a_de_envio': user.conta_de_envio,
                'log': user.log,
            })
    try:
        rows.export_to_xlsx(users_added, file)
    except:
        traceback.print_exc()
        print('\nCould not write to the file provided!\n')