Example #1
0
def staff_nametag_export(modeladmin, request, queryset):
    """Exports certain staff data in ods format, containing the necessary information for the name tag production application.
    The produced ods file is the input for the name tag Java aplication.
    """
    table = []
    EMPTY = '~'
    rowlength = None

    for person in queryset.filter(Q(is_tutor=True) | Q(is_orga=True)):
        tutor = EMPTY
        orga = EMPTY
        if person.is_tutor:
            if "master" in str(person.tutor_for).lower():
                tutor = 'MASTER'
            else:
                tutor = 'TUTOR'
        if person.is_orga:
            orga = 'ORGA'
        row = [person.prename, person.name, tutor[0], tutor, orga,] + [EMPTY] * 4
        table.append(row)

        if rowlength is None:
            rowlength = len(row)

    out_stream = io.BytesIO()
    with odswriter.writer(out_stream) as out:
        # need to specify number of columns for jOpenDocument compatibility
        sheet = out.new_sheet("Staff", cols=rowlength)
        sheet.writerows(table)

    response = HttpResponse(out_stream.getvalue(), content_type="application/vnd.oasis.opendocument.spreadsheet")
    # name the file according to the expectations of the Java name tag application
    response['Content-Disposition'] = 'attachment; filename="tutoren.ods"'
    return response
Example #2
0
def liste_tuteurs_seance():
    session = model.Session.get(int(request.args['session']))
    rows = controller.get_tutor_registration_list_rows(session)

    if request.args.get('download', 'false') == 'true':
        def pred(row):
            return (
                    row.tutor.name,
                    row.tutor.email,
                    ', '.join(x.name for x in row.subjects1),
                    ', '.join(x.name for x in row.subjects2),
                    row.comment)
        io = StringIO()
        with odswriter.writer(io) as odsfile:
            odsfile.writerow(['Nom', 'Email', u'Matière principale',
                u'Autres matières', 'Commentaire', 'Confirmation'])
            for row in rows:
                odsfile.writerow(pred(row))
        io.seek(0)
        response = Response(io,
                mimetype='application/vnd.oasis.opendocument.spreadsheet')
        response.headers["Content-Disposition"] = "attachment; filename=Tuteurs_seance.ods"
        return response
    else:
        return render_template('gestion_soutien/liste_tuteurs_seance.html',
                session=session,
                rows=rows)
Example #3
0
def staff_overview_export(modeladmin, request, queryset):
    """Exports an overview of the staff containing contact data and field of duty.
    """
    tutors = []
    orgas = []
    helpers = []

    queryset = queryset.order_by('name', 'prename')

    common_header = [_('Vorname'), _('Nachname'), _('E-Mail'), _('Handy'),]

    for person in queryset:
        row = [person.prename, person.name, person.email, person.phone]
        if person.is_tutor:
            tutors.append(row + [str(person.tutor_for)])
        if person.is_orga:
            jobs = ' / '.join([str(job) for job in person.orga_jobs.all()])
            orgas.append(row + [jobs])
        if person.is_helper:
            jobs = ' / '.join([str(job) for job in person.helper_jobs.all()])
            helpers.append(row + [jobs])

    out_stream = io.BytesIO()
    with odswriter.writer(out_stream) as out:
        Sheetdata = namedtuple('Sheetdata', 'title extra_header rows')
        for data in (Sheetdata(_('Orgas'), _('Verantwortlich für ...'), orgas),
            Sheetdata(_('Tutoren'), _('Betreut ...'), tutors),
            Sheetdata(_('Helfer'), _('Hilft bei ...'), helpers), ):
                sheet = out.new_sheet(data.title)
                sheet.writerow(common_header + [data.extra_header])
                sheet.writerows(data.rows)

    response = HttpResponse(out_stream.getvalue(), content_type="application/vnd.oasis.opendocument.spreadsheet")
    response['Content-Disposition'] = 'attachment; filename="Personal.ods"'
    return response
Example #4
0
 def saveAttributes(self, active):
     file = QFileDialog.getSaveFileName(self, self.tr('Save in...'),'', self.tr('OpenDocument Spreadsheet (*.ods)'))
     if file:
         try:
             with ods.writer(open(file,"wb")) as odsfile:
                 tabs = None
                 if active:
                     tabs = self.tabWidget.currentWidget().findChildren(QtGui.QTableWidget)
                 else:
                     tabs = self.tabWidget.findChildren(QtGui.QTableWidget)
                 for table in reversed(tabs):
                     sheet = odsfile.new_sheet(table.title[:20]+'...') # For each tab in the container, a new sheet is created
                     sheet.writerow([table.title]) # As the tab's title's lenght is limited, the full name of the layer is written in the first row
                     nb_row = table.rowCount()
                     nb_col = table.columnCount()
                     
                     # Fetching and writing of the table's header
                     header = []
                     for i in range(0,nb_col):
                         header.append(table.horizontalHeaderItem(i).text())
                     sheet.writerow(header)
                     
                     # Fetching and writing of the table's items
                     for i in range(0,nb_row):
                         row = []
                         for j in range(0,nb_col):
                             row.append(table.item(i,j).text())
                         if not table.isRowHidden(i):
                             sheet.writerow(row)
                 return True
         except IOError:
             QMessageBox.critical(self, self.tr('Error'), self.tr('The file can\'t be written.')+'\n'+self.tr('Maybe you don\'t have the rights or are trying to overwrite an opened file.'))
             return False
Example #5
0
def cashiers(cashier_ids=None):
    if not cashier_ids: print USAGE; return

    init_command()

    accounts = AccountService()
    cash = CashPaymentService()

    dates = [ datetime.strptime(d,DATE_FORMAT) for d in config.EVENT_DATES ]
    cashiers = [ accounts.get_one(c,check_ownership=False) for c in cashier_ids.split(",") ]

    for cashier in cashiers:
        filename = "report_{}.ods".format(parameterize(cashier.name))
        with ods.writer(open(filename,"wb")) as odsfile:
            for date in dates:
                sheet = odsfile.new_sheet(date.strftime("dia %d"))
                sheet.writerow(["hora","modo","valor","id","nome","produto"])
                report = cash.for_cashier(cashier, date)

                for payment in report:
                    person = Person(payment.purchase)
                    for transition in payment.transitions:
                        row =  [
                            transition.created.time(), transition.mode, Decimal(payment.amount),
                            person.id, person.name, person.product.description
                        ]
                        sheet.writerow(row)

                summer = lambda f: sum([ x.amount for x in filter(f, report) ])

                sheet.writerow([None, u"total cartão",   summer(lambda p: p.mode == 'card') ])
                sheet.writerow([None, u"total dinheiro", summer(lambda p: p.mode == 'cash') ])
                sheet.writerow([None, u"total geral",    summer(lambda p: True            ) ])
Example #6
0
def tutorgroup_export(modeladmin, request, queryset):
    """Exports group names with associated tutors in ods format.
    The produced ods file serves as an input for the name tag Java aplication.
    """
    table = []
    max_number_of_tutors = max(group.tutors.count() for group in queryset)
    head_row = ['Gruppenname', 'Gruppenbild']
    for i in range(1, max_number_of_tutors+1):
        head_row.extend(['Tutor ' + str(i), "Nummer Tutor " + str(i)])
    table.append(head_row)
    for group in queryset.order_by('name'):
        row = [group.name, 'icon_' + group.name.lower()]
        for tutor in group.tutors.all():
            row.extend([str(tutor), tutor.phone])
        table.append(row)

    out_stream = io.BytesIO()
    with odswriter.writer(out_stream) as out:
        # need to specify number of columns for jOpenDocument compatibility
        sheet = out.new_sheet("Gruppen", cols=2*max_number_of_tutors+2)
        sheet.writerows(table)

    response = HttpResponse(out_stream.getvalue(), content_type="application/vnd.oasis.opendocument.spreadsheet")
    # name the file according to the expectations of the Java name tag application
    response['Content-Disposition'] = 'attachment; filename="gruppen.ods"'
    return response
 def test_col_padding(self):
     f = io.BytesIO()
     with ods.writer(f) as odsfile:
         my_sheet = odsfile.new_sheet("My Sheet", cols=3)
         my_sheet.writerows([["One"],
                             ["Two", "Four", "Sixteen"],
                             ["Three", "Nine", "Twenty seven"]])
 def open(self):
     BOMWriterBase.open(self)
     try:
         import odswriter as ods
     except ImportError:
         raise ImportError("Please install odswriter: `pip install odswriter`")
     self.writer = ods.writer(self.f)
     self.unicode_support = True
 def open(self):
     BOMWriterBase.open(self)
     try:
         import odswriter as ods
     except ImportError:
         raise ImportError(
             "Please install odswriter: `pip install odswriter`")
     self.writer = ods.writer(self.f)
     self.unicode_support = True
 def test_multi_sheet(self):
     f = io.BytesIO()
     with ods.writer(f) as odsfile:
         bears = odsfile.new_sheet("Bears")
         bears.writerow(["American Black Bear", "Asiatic Black Bear", "Brown Bear", "Giant Panda", "Qinling Panda",
                          "Sloth Bear", "Sun Bear", "Polar Bear", "Spectacled Bear"])
         sloths = odsfile.new_sheet("Sloths")
         sloths.writerow(["Pygmy Three-Toed Sloth", "Maned Sloth", "Pale-Throated Sloth", "Brown-Throated Sloth",
                          "Linneaeus's Two-Twoed Sloth", "Hoffman's Two-Toed Sloth"])
 def test_single_sheet(self):
     f = io.BytesIO()
     with ods.writer(f) as odsfile:
         odsfile.writerow(["String", "ABCDEF123456", "123456"])
         # Lose the 2L below if you want to run this example code on Python 3, Python 3 has no long type.
         odsfile.writerow(["Float", 1, 123, 123.123, decimal.Decimal("10.321")])
         odsfile.writerow(["Date/DateTime", datetime.datetime.now(), datetime.date(1989, 11, 9)])
         odsfile.writerow(["Time", datetime.time(13, 37), datetime.time(16, 17, 18)])
         odsfile.writerow(["Bool", True, False, True])
         odsfile.writerow(["Formula", 1, 2, 3, ods.Formula("IF(A1=2,B1,C1)")])
     val = f.getvalue()
     self.assertGreater(len(val), 0)
Example #12
0
def listToOds(fname, headers, _list, sheet=""):
    #escribir headers si se ha pasado
    ods = odswriter.writer(open(fname, "wb"))
    active = ods
    #Crear sheet si esta definido
    if sheet != "":
        sht = ods.new_sheet(sheet)
        active = sht
    if headers != []:
        active.writerow(headers)
    for item in _list:
        active.writerow(item)
    ods.close()
Example #13
0
def launder_through_gnumeric(rows):
    """
        Saves rows into an ods, uses ssconvert (based on gnumeric) to convert to a CSV and loads
        the rows from that CSV.
    """
    with TempDir() as d:
        # Make an ODS
        temp_ods = os.path.join(d.path, "test.ods")
        temp_csv = os.path.join(d.path, "test.csv")
        with ods.writer(open(temp_ods, "wb")) as odsfile:
            odsfile.writerows(rows)

        # Convert it to a CSV
        p = subprocess.Popen(["ssconvert", temp_ods, temp_csv])

        p.wait()

        # Read the CSV
        csvfile =  csv.reader(open(temp_csv))
        return list(csvfile)
Example #14
0
def write_data(models, outfile='peewee_models.ods', overwrite=False):
    '''Write model data from process_file() to ODS output'''
    outfile=os.path.abspath(outfile)
    if (not overwrite) and os.path.exists(outfile):
        raise ValueError, 'File already exists!'
    optindex = get_option_table_index(models)
    with ods.writer(open(outfile, 'wb')) as odsfile:
        for model, fieldlist in models.items():
            modelsheet = odsfile.new_sheet(model)
            line = ['name', 'type'] + [o for o in optindex]
            modelsheet.writerow(line)
            for field in fieldlist:
                line = [field['name'], field['type']] + ['' for i in range(len(optindex))]
                if field.has_key('options_string'):
                    if not field.has_key('options_dict'):
                        field['options_dict'] = options_to_dict(field['options_string'])
                    for option, value in field['options_dict'].items():
                        line[optindex.index(option)+2] = value
                modelsheet.writerow(line)
    print 'Models written to %s' % outfile
Example #15
0
def write_data(models, outfile='peewee_models.ods', overwrite=False):
    '''Write model data from process_file() to ODS output'''
    outfile = os.path.abspath(outfile)
    if (not overwrite) and os.path.exists(outfile):
        raise ValueError, 'File already exists!'
    optindex = get_option_table_index(models)
    with ods.writer(open(outfile, 'wb')) as odsfile:
        for model, fieldlist in models.items():
            modelsheet = odsfile.new_sheet(model)
            line = ['name', 'type'] + [o for o in optindex]
            modelsheet.writerow(line)
            for field in fieldlist:
                line = [field['name'], field['type']
                        ] + ['' for i in range(len(optindex))]
                if field.has_key('options_string'):
                    if not field.has_key('options_dict'):
                        field['options_dict'] = options_to_dict(
                            field['options_string'])
                    for option, value in field['options_dict'].items():
                        line[optindex.index(option) + 2] = value
                modelsheet.writerow(line)
    print 'Models written to %s' % outfile
def launder_through_lo(rows):
    """
        Saves rows into an ods, uses LibreOffice to convert to a CSV and loads
        the rows from that CSV.
    """
    with TempDir() as d:
        # Make an ODS
        temp_ods = os.path.join(d.path, "test.ods")
        with ods.writer(open(temp_ods, "wb")) as odsfile:
            odsfile.writerows(rows)

        # Convert it to a CSV
        p = subprocess.Popen(["libreoffice", "--headless", "--convert-to",
                              "csv", "test.ods"],
                             cwd=d.path)

        p.wait()

        # Read the CSV
        temp_csv = os.path.join(d.path,"test.csv")
        csvfile =  csv.reader(open(temp_csv))
        return list(csvfile)
Example #17
0
#!/usr/bin/env python

'''
# PLACEHOLDER CODE
# copy pasted straight from the example at https://github.com/mmulqueen/odswriter

TODO:
  * Parse datapackage.json and read the JSON table schema
  * Lint and store rows
'''

import datetime
import decimal
import odswriter as ods

with ods.writer(open("test.ods","wb")) as odsfile:
    odsfile.writerow(["String", "ABCDEF123456", "123456"])
    # Lose the 2L below if you want to run this example code on Python 3, Python 3 has no long type.
    odsfile.writerow(["Float", 1, 123, 123.123, 2L, decimal.Decimal("10.321")])
    odsfile.writerow(["Date/DateTime", datetime.datetime.now(), datetime.date(1989,11,9)])
    odsfile.writerow(["Time",datetime.time(13,37),datetime.time(16,17,18)])
    odsfile.writerow(["Bool",True,False,True])
    odsfile.writerow(["Formula",1,2,3,ods.Formula("IF(A1=2,B1,C1)")])

Example #18
0
        return str(number)
    else:
        return '\n'.join([str(number) + letter for letter in mapping[str(number)]])

orig_path_filename = askopenfilename()
print(orig_path_filename)

orig_filename = orig_path_filename.split("/")[-1].split('.')[0]
print(orig_filename)

dest_path = "/".join(orig_path_filename.split("/")[:-1]) + "/"
print(dest_path)

# dest_path_filename = dest_path + orig_filename + "_letras.csv"
dest_path_filename = dest_path + orig_filename + "_letras.ods"
print(dest_path_filename)

with ods.writer(open(dest_path_filename, 'wb')) as fout:
# with open(dest_path_filename, 'w') as fout:
    with open(orig_path_filename) as fin:
        for line in fin:
            codigo = line.strip()
            if len(codigo) > 6:
                print('Warning: bypassing ' + codigo)
                # print(codigo, file=fout)
                fout.writerow([codigo.strip()])
            else:
                # print(addletras(codigo), file=fout)
                for cod_with_letra in addletras(codigo).split():
                    fout.writerow([cod_with_letra.strip()])
Example #19
0
def content(scenario_props, scenario_id, base_name, site_id, supply_id, user):
    now = Datetime.now(pytz.utc)
    report_context = {}
    future_funcs = {}
    report_context['future_funcs'] = future_funcs

    sess = None
    try:
        sess = Session()
        if scenario_props is None:
            scenario_contract = Contract.get_supplier_by_id(sess, scenario_id)
            scenario_props = scenario_contract.make_properties()
            base_name.append(scenario_contract.name)

        for contract in sess.query(Contract).join(MarketRole).filter(
                MarketRole.code == 'Z'):
            try:
                props = scenario_props[contract.name]
            except KeyError:
                continue

            try:
                rate_start = props['start_date']
            except KeyError:
                raise BadRequest(
                    "In " + scenario_contract.name + " for the rate " +
                    contract.name + " the start_date is missing.")

            if rate_start is not None:
                rate_start = rate_start.replace(tzinfo=pytz.utc)

            lib = importlib.import_module('chellow.' + contract.name)

            if hasattr(lib, 'create_future_func'):
                future_funcs[contract.id] = {
                    'start_date': rate_start,
                    'func': lib.create_future_func(
                        props['multiplier'], props['constant'])}

        start_date = scenario_props['scenario_start']
        if start_date is None:
            start_date = Datetime(
                now.year, now.month, 1, tzinfo=pytz.utc)
        else:
            start_date = start_date.replace(tzinfo=pytz.utc)

        base_name.append(
            hh_format(start_date).replace(' ', '_').replace(':', '').
            replace('-', ''))
        months = scenario_props['scenario_duration']
        base_name.append('for')
        base_name.append(str(months))
        base_name.append('months')
        finish_date = start_date + relativedelta(months=months)

        if 'kwh_start' in scenario_props:
            kwh_start = scenario_props['kwh_start']
        else:
            kwh_start = None

        if kwh_start is None:
            kwh_start = chellow.computer.forecast_date()
        else:
            kwh_start = kwh_start.replace(tzinfo=pytz.utc)

        sites = sess.query(Site).join(SiteEra).join(Era).filter(
            Era.start_date <= finish_date,
            or_(
                Era.finish_date == null(),
                Era.finish_date >= start_date)).distinct().order_by(Site.code)
        if site_id is not None:
            site = Site.get_by_id(sess, site_id)
            sites = sites.filter(Site.id == site.id)
            base_name.append('site')
            base_name.append(site.code)
        if supply_id is not None:
            supply = Supply.get_by_id(sess, supply_id)
            base_name.append('supply')
            base_name.append(str(supply.id))
            sites = sites.filter(Era.supply == supply)

        running_name, finished_name = chellow.dloads.make_names(
            '_'.join(base_name) + '.ods', user)

        rf = open(running_name, "wb")
        f = odswriter.writer(rf, '1.1')
        group_tab = f.new_sheet("Site Level")
        sup_tab = f.new_sheet("Supply Level")
        changes = defaultdict(list, {})

        try:
            kw_changes = scenario_props['kw_changes']
        except KeyError:
            kw_changes = ''

        for row in csv.reader(io.StringIO(kw_changes)):
            if len(''.join(row).strip()) == 0:
                continue
            if len(row) != 4:
                raise BadRequest(
                    "Can't interpret the row " + str(row) + " it should be of "
                    "the form SITE_CODE, USED / GENERATED, DATE, MULTIPLIER")
            site_code, typ, date_str, kw_str = row
            date = Datetime.strptime(date_str.strip(), "%Y-%m-%d").replace(
                tzinfo=pytz.utc)
            changes[site_code.strip()].append(
                {
                    'type': typ.strip(), 'date': date,
                    'multiplier': float(kw_str)})

        sup_header_titles = [
            'imp-mpan-core', 'exp-mpan-core', 'metering-type', 'source',
            'generator-type', 'supply-name', 'msn', 'pc', 'site-id',
            'site-name', 'associated-site-ids', 'month']
        site_header_titles = [
            'site-id', 'site-name', 'associated-site-ids', 'month',
            'metering-type', 'sources', 'generator-types']
        summary_titles = [
            'import-net-kwh', 'export-net-kwh', 'import-gen-kwh',
            'export-gen-kwh', 'import-3rd-party-kwh', 'export-3rd-party-kwh',
            'displaced-kwh', 'used-kwh', 'used-3rd-party-kwh',
            'import-net-gbp', 'export-net-gbp', 'import-gen-gbp',
            'export-gen-gbp', 'import-3rd-party-gbp', 'export-3rd-party-gbp',
            'displaced-gbp', 'used-gbp', 'used-3rd-party-gbp',
            'billed-import-net-kwh', 'billed-import-net-gbp']

        title_dict = {}
        for cont_type, con_attr in (
                ('mop', Era.mop_contract), ('dc', Era.hhdc_contract),
                ('imp-supplier', Era.imp_supplier_contract),
                ('exp-supplier', Era.exp_supplier_contract)):
            titles = []
            title_dict[cont_type] = titles
            conts = sess.query(Contract).join(con_attr) \
                .join(Era.supply).join(Source).filter(
                    Era.start_date <= start_date,
                    or_(
                        Era.finish_date == null(),
                        Era.finish_date >= start_date),
                    Source.code.in_(('net', '3rd-party'))
                ).distinct().order_by(Contract.id)
            if supply_id is not None:
                conts = conts.filter(Era.supply_id == supply_id)
            for cont in conts:
                title_func = chellow.computer.contract_func(
                    report_context, cont, 'virtual_bill_titles', None)
                if title_func is None:
                    raise Exception(
                        "For the contract " + cont.name +
                        " there doesn't seem to be a "
                        "'virtual_bill_titles' function.")
                for title in title_func():
                    if title not in titles:
                        titles.append(title)

        sup_tab.writerow(
            sup_header_titles + summary_titles + [None] +
            ['mop-' + t for t in title_dict['mop']] +
            [None] + ['dc-' + t for t in title_dict['dc']] + [None] +
            ['imp-supplier-' + t for t in title_dict['imp-supplier']] +
            [None] + ['exp-supplier-' + t for t in title_dict['exp-supplier']])
        group_tab.writerow(site_header_titles + summary_titles)

        sites = sites.all()
        month_start = start_date
        while month_start < finish_date:
            month_finish = month_start + relativedelta(months=1) - HH
            for site in sites:
                site_changes = changes[site.code]
                site_associates = set()
                site_category = None
                site_sources = set()
                site_gen_types = set()
                site_month_data = defaultdict(int)
                for group in site.groups(
                        sess, month_start, month_finish, False):
                    site_associates.update(
                        set(
                            s.code for s in group.sites
                            if s.code != site.code))
                    for cand_supply in group.supplies:
                        site_sources.add(cand_supply.source.code)
                        if cand_supply.generator_type is not None:
                            site_gen_types.add(cand_supply.generator_type.code)
                        for cand_era in sess.query(Era).filter(
                                Era.supply == cand_supply,
                                Era.start_date <= group.finish_date, or_(
                                    Era.finish_date == null(),
                                    Era.finish_date >= group.start_date)). \
                                options(
                                    joinedload(Era.channels),
                                    joinedload(Era.pc),
                                    joinedload(Era.mtc).joinedload(
                                        Mtc.meter_type)):
                            if site_category != 'hh':
                                if cand_era.pc.code == '00':
                                    site_category = 'hh'
                                elif site_category != 'amr':
                                    if len(cand_era.channels) > 0:
                                        site_category = 'amr'
                                    elif site_category != 'nhh':
                                        if cand_era.mtc.meter_type.code \
                                                not in ['UM', 'PH']:
                                            site_category = 'nhh'
                                        else:
                                            site_category = 'unmetered'

                for group in site.groups(
                        sess, month_start, month_finish, True):
                    calcs = []
                    deltas = defaultdict(int)
                    group_associates = set(
                        s.code for s in group.sites if s.code != site.code)
                    for supply in group.supplies:
                        if supply_id is not None and supply.id != supply_id:
                            continue
                        for era in sess.query(Era).join(Supply) \
                                .join(Source).filter(
                                    Era.supply == supply,
                                    Era.start_date <= group.finish_date, or_(
                                        Era.finish_date == null(),
                                        Era.finish_date >= group.start_date)) \
                                .options(
                                    joinedload(Era.ssc),
                                    joinedload(Era.hhdc_contract),
                                    joinedload(Era.mop_contract),
                                    joinedload(Era.imp_supplier_contract),
                                    joinedload(Era.exp_supplier_contract),
                                    joinedload(Era.channels),
                                    joinedload(Era.imp_llfc).joinedload(
                                        Llfc.voltage_level),
                                    joinedload(Era.exp_llfc).joinedload(
                                        Llfc.voltage_level),
                                    joinedload(Era.cop),
                                    joinedload(Era.supply).joinedload(
                                        Supply.dno_contract),
                                    joinedload(Era.mtc).joinedload(
                                        Mtc.meter_type)):

                            if era.start_date > group.start_date:
                                ss_start = era.start_date
                            else:
                                ss_start = group.start_date

                            if hh_before(era.finish_date, group.finish_date):
                                ss_finish = era.finish_date
                            else:
                                ss_finish = group.finish_date

                            if era.imp_mpan_core is None:
                                imp_ss = None
                            else:
                                imp_ss = SupplySource(
                                    sess, ss_start, ss_finish, kwh_start, era,
                                    True, None, report_context)

                            if era.exp_mpan_core is None:
                                exp_ss = None
                                measurement_type = imp_ss.measurement_type
                            else:
                                exp_ss = SupplySource(
                                    sess, ss_start, ss_finish, kwh_start, era,
                                    False, None, report_context)
                                measurement_type = exp_ss.measurement_type

                            order = meter_order[measurement_type]
                            calcs.append(
                                (
                                    order, era.imp_mpan_core,
                                    era.exp_mpan_core, imp_ss, exp_ss))

                            if imp_ss is not None and len(era.channels) == 0:
                                for hh in imp_ss.hh_data:
                                    deltas[hh['start-date']] += hh['msp-kwh']

                    imp_net_delts = defaultdict(int)
                    exp_net_delts = defaultdict(int)
                    imp_gen_delts = defaultdict(int)

                    displaced_era = chellow.computer.displaced_era(
                        sess, group, group.start_date, group.finish_date)
                    site_ds = chellow.computer.SiteSource(
                        sess, site, group.start_date, group.finish_date,
                        kwh_start, None, report_context, displaced_era)

                    for hh in site_ds.hh_data:
                        try:
                            delta = deltas[hh['start-date']]
                            hh['import-net-kwh'] += delta
                            hh['used-kwh'] += delta
                        except KeyError:
                            pass

                    for hh in site_ds.hh_data:
                        for change in site_changes:
                            if change['type'] == 'used' and \
                                    change['date'] <= hh['start-date']:
                                used = change['multiplier'] * hh['used-kwh']
                                exp_net = max(
                                    0, hh['import-gen-kwh'] -
                                    hh['export-gen-kwh'] -
                                    used)
                                exp_net_delt = exp_net - hh['export-net-kwh']
                                exp_net_delts[hh['start-date']] += exp_net_delt
                                displaced = hh['import-gen-kwh'] - \
                                    hh['export-gen-kwh'] - exp_net
                                imp_net = used - displaced
                                imp_delt = imp_net - hh['import-net-kwh']
                                imp_net_delts[hh['start-date']] += imp_delt

                                hh['import-net-kwh'] = imp_net
                                hh['used-kwh'] = used
                                hh['export-net-kwh'] = exp_net
                                hh['msp-kwh'] = displaced
                            elif change['type'] == 'generated' and \
                                    change['date'] <= hh['start-date']:
                                imp_gen = change['multiplier'] * \
                                    hh['import-gen-kwh']
                                imp_gen_delt = imp_gen - hh['import-gen-kwh']
                                exp_net = max(
                                    0, imp_gen - hh['export-gen-kwh'] -
                                    hh['used-kwh'])
                                exp_net_delt = exp_net - hh['export-net-kwh']
                                exp_net_delts[hh['start-date']] += exp_net_delt

                                displaced = imp_gen - hh['export-gen-kwh'] - \
                                    exp_net

                                imp_net = hh['used-kwh'] - displaced
                                imp_net_delt = imp_net - hh['import-net-kwh']
                                imp_net_delts[hh['start-date']] += imp_net_delt

                                imp_gen_delts[hh['start-date']] += imp_gen_delt

                                hh['import-net-kwh'] = imp_net
                                hh['export-net-kwh'] = exp_net
                                hh['import-gen-kwh'] = imp_gen
                                hh['msp-kwh'] = displaced

                    if displaced_era is not None and supply_id is None:
                        month_data = {}
                        for sname in (
                                'import-net', 'export-net', 'import-gen',
                                'export-gen', 'import-3rd-party',
                                'export-3rd-party', 'msp', 'used',
                                'used-3rd-party', 'billed-import-net'):
                            for xname in ('kwh', 'gbp'):
                                month_data[sname + '-' + xname] = 0

                        month_data['used-kwh'] = \
                            month_data['displaced-kwh'] = \
                            sum(hh['msp-kwh'] for hh in site_ds.hh_data)

                        disp_supplier_contract = \
                            displaced_era.imp_supplier_contract
                        disp_vb_function = chellow.computer.contract_func(
                            report_context, disp_supplier_contract,
                            'displaced_virtual_bill', None)
                        if disp_vb_function is None:
                            raise BadRequest(
                                "The supplier contract " +
                                disp_supplier_contract.name +
                                " doesn't have the displaced_virtual_bill() "
                                "function.")
                        disp_vb_function(site_ds)
                        disp_supplier_bill = site_ds.supplier_bill

                        try:
                            gbp = disp_supplier_bill['net-gbp']
                        except KeyError:
                            disp_supplier_bill['problem'] += \
                                'For the supply ' + \
                                site_ds.mpan_core + \
                                ' the virtual bill ' + \
                                str(disp_supplier_bill) + \
                                ' from the contract ' + \
                                disp_supplier_contract.name + \
                                ' does not contain the net-gbp key.'

                        month_data['used-gbp'] = \
                            month_data['displaced-gbp'] = \
                            site_ds.supplier_bill['net-gbp']

                        out = [
                            None, None, displaced_era.make_meter_category(),
                            'displaced', None, None, None, None, site.code,
                            site.name,
                            ','.join(sorted(list(group_associates))),
                            month_finish] + \
                            [month_data[t] for t in summary_titles]

                        sup_tab.writerow(out)
                        for k, v in month_data.items():
                            site_month_data[k] += v
                    for i, (
                            order, imp_mpan_core, exp_mpan_core, imp_ss,
                            exp_ss) in enumerate(sorted(calcs, key=str)):
                        if imp_ss is None:
                            era = exp_ss.era
                        else:
                            era = imp_ss.era
                        supply = era.supply
                        source = supply.source
                        source_code = source.code
                        site_sources.add(source_code)
                        month_data = {}
                        for name in (
                                'import-net', 'export-net', 'import-gen',
                                'export-gen', 'import-3rd-party',
                                'export-3rd-party', 'displaced', 'used',
                                'used-3rd-party', 'billed-import-net'):
                            for sname in ('kwh', 'gbp'):
                                month_data[name + '-' + sname] = 0

                        if source_code == 'net':
                            delts = imp_net_delts
                        elif source_code == 'gen':
                            delts = imp_gen_delts
                        else:
                            delts = []

                        if len(delts) > 0 and imp_ss is not None:
                            for hh in imp_ss.hh_data:
                                diff = hh['msp-kwh'] + delts[hh['start-date']]
                                if diff < 0:
                                    hh['msp-kwh'] = 0
                                    hh['msp-kw'] = 0
                                    delts[hh['start-date']] -= hh['msp-kwh']
                                else:
                                    hh['msp-kwh'] += delts[hh['start-date']]
                                    hh['msp-kw'] += hh['msp-kwh'] / 2
                                    del delts[hh['start-date']]

                            left_kwh = sum(delts.values())
                            if left_kwh > 0:
                                first_hh = imp_ss.hh_data[0]
                                first_hh['msp-kwh'] += left_kwh
                                first_hh['msp-kw'] += left_kwh / 2

                        imp_supplier_contract = era.imp_supplier_contract
                        if imp_supplier_contract is not None:
                            import_vb_function = contract_func(
                                report_context, imp_supplier_contract,
                                'virtual_bill', None)
                            if import_vb_function is None:
                                raise BadRequest(
                                    "The supplier contract " +
                                    imp_supplier_contract.name +
                                    " doesn't have the virtual_bill() "
                                    "function.")
                            import_vb_function(imp_ss)
                            imp_supplier_bill = imp_ss.supplier_bill

                            try:
                                gbp = imp_supplier_bill['net-gbp']
                            except KeyError:
                                imp_supplier_bill['problem'] += \
                                    'For the supply ' + \
                                    imp_ss.mpan_core + \
                                    ' the virtual bill ' + \
                                    str(imp_supplier_bill) + \
                                    ' from the contract ' + \
                                    imp_supplier_contract.name + \
                                    ' does not contain the net-gbp key.'
                            if source_code in ('net', 'gen-net'):
                                month_data['import-net-gbp'] += gbp
                                month_data['used-gbp'] += gbp
                            elif source_code == '3rd-party':
                                month_data['import-3rd-party-gbp'] += gbp
                                month_data['used-gbp'] += gbp
                            elif source_code == '3rd-party-reverse':
                                month_data['export-3rd-party-gbp'] += gbp
                                month_data['used-gbp'] -= gbp

                            kwh = sum(
                                hh['msp-kwh'] for hh in imp_ss.hh_data)

                            if source_code in ('net', 'gen-net'):
                                month_data['import-net-kwh'] += kwh
                                month_data['used-kwh'] += kwh
                            elif source_code == '3rd-party':
                                month_data['import-3rd-party-kwh'] += kwh
                                month_data['used-kwh'] += kwh
                            elif source_code == '3rd-party-reverse':
                                month_data['export-3rd-party-kwh'] += kwh
                                month_data['used-kwh'] -= kwh
                            elif source_code in ('gen', 'gen-net'):
                                month_data['import-gen-kwh'] += kwh

                        exp_supplier_contract = era.exp_supplier_contract
                        if exp_supplier_contract is None:
                            kwh = sess.query(
                                func.coalesce(
                                    func.sum(
                                        cast(HhDatum.value, Float)), 0)). \
                                join(Channel).filter(
                                    Channel.era == era,
                                    Channel.channel_type == 'ACTIVE',
                                    Channel.imp_related == false()).scalar()
                            if source_code == 'gen':
                                month_data['export-net-kwh'] += kwh
                        else:
                            export_vb_function = contract_func(
                                report_context, exp_supplier_contract,
                                'virtual_bill', None)
                            export_vb_function(exp_ss)

                            exp_supplier_bill = exp_ss.supplier_bill
                            try:
                                gbp = exp_supplier_bill['net-gbp']
                            except KeyError:
                                exp_supplier_bill['problem'] += \
                                    'For the supply ' + \
                                    imp_ss.mpan_core + \
                                    ' the virtual bill ' + \
                                    str(imp_supplier_bill) + \
                                    ' from the contract ' + \
                                    imp_supplier_contract.name + \
                                    ' does not contain the net-gbp key.'

                            kwh = sum(hh['msp-kwh'] for hh in exp_ss.hh_data)

                            if source_code in ('net', 'gen-net'):
                                month_data['export-net-kwh'] += kwh
                                month_data['export-net-gbp'] += gbp
                            elif source_code in \
                                    ('3rd-party', '3rd-party-reverse'):
                                month_data['export-3rd-party-kwh'] += kwh
                                month_data['export-3rd-party-gbp'] += gbp
                                month_data['used-kwh'] -= kwh
                                month_data['used-gbp'] -= gbp
                            elif source_code == 'gen':
                                month_data['export-gen-kwh'] += kwh

                        sss = exp_ss if imp_ss is None else imp_ss
                        dc_contract = era.hhdc_contract
                        sss.contract_func(
                            dc_contract, 'virtual_bill')(sss)
                        dc_bill = sss.dc_bill
                        gbp = dc_bill['net-gbp']

                        mop_contract = era.mop_contract
                        mop_bill_function = sss.contract_func(
                            mop_contract, 'virtual_bill')
                        mop_bill_function(sss)
                        mop_bill = sss.mop_bill
                        gbp += mop_bill['net-gbp']

                        if source_code in ('3rd-party', '3rd-party-reverse'):
                            month_data['import-3rd-party-gbp'] += gbp
                        else:
                            month_data['import-net-gbp'] += gbp
                        month_data['used-gbp'] += gbp

                        if source_code in ('gen', 'gen-net'):
                            generator_type = supply.generator_type.code
                            site_gen_types.add(generator_type)
                        else:
                            generator_type = None

                        sup_category = era.make_meter_category()
                        if CATEGORY_ORDER[site_category] < \
                                CATEGORY_ORDER[sup_category]:
                            site_category = sup_category

                        for bill in sess.query(Bill).filter(
                                Bill.supply == supply,
                                Bill.start_date <= sss.finish_date,
                                Bill.finish_date >= sss.start_date):
                            bill_start = bill.start_date
                            bill_finish = bill.finish_date
                            bill_duration = (
                                bill_finish - bill_start).total_seconds() + \
                                (30 * 60)
                            overlap_duration = (
                                min(bill_finish, sss.finish_date) -
                                max(bill_start, sss.start_date)
                                ).total_seconds() + (30 * 60)
                            overlap_proportion = \
                                float(overlap_duration) / bill_duration
                            month_data['billed-import-net-kwh'] += \
                                overlap_proportion * float(bill.kwh)
                            month_data['billed-import-net-gbp'] += \
                                overlap_proportion * float(bill.net)

                        out = [
                            era.imp_mpan_core, era.exp_mpan_core,
                            sup_category, source_code,
                            generator_type, supply.name, era.msn, era.pc.code,
                            site.code, site.name,
                            ','.join(sorted(list(site_associates))),
                            month_finish] + [
                            month_data[t] for t in summary_titles] + [None] + [
                            (mop_bill[t] if t in mop_bill else None)
                            for t in title_dict['mop']] + [None] + \
                            [(dc_bill[t] if t in dc_bill else None)
                                for t in title_dict['dc']]
                        if imp_supplier_contract is None:
                            out += [None] * \
                                (len(title_dict['imp-supplier']) + 1)
                        else:
                            out += [None] + [
                                (
                                    imp_supplier_bill[t]
                                    if t in imp_supplier_bill else None)
                                for t in title_dict['imp-supplier']]
                        if exp_supplier_contract is not None:
                            out += [None] + [
                                (
                                    exp_supplier_bill[t]
                                    if t in exp_supplier_bill else None)
                                for t in title_dict['exp-supplier']]

                        for k, v in month_data.items():
                            site_month_data[k] += v
                        sup_tab.writerow(out)

                group_tab.writerow(
                    [
                        site.code, site.name,
                        ''.join(sorted(list(site_associates))),
                        month_finish, site_category,
                        ', '.join(sorted(list(site_sources))),
                        ', '.join(sorted(list(site_gen_types)))] +
                    [site_month_data[k] for k in summary_titles])
                sess.rollback()

            month_start += relativedelta(months=1)
    except BadRequest as e:
        msg = e.description + traceback.format_exc()
        sys.stderr.write(msg + '\n')
        group_tab.writerow(["Problem " + msg])
    except:
        msg = traceback.format_exc()
        sys.stderr.write(msg + '\n')
        group_tab.writerow(["Problem " + msg])
    finally:
        if sess is not None:
            sess.close()
        try:
            f.close()
            rf.close()
            os.rename(running_name, finished_name)
        except:
            msg = traceback.format_exc()
            r_name, f_name = chellow.dloads.make_names('error.txt', user)
            ef = open(r_name, "w")
            ef.write(msg + '\n')
            ef.close()
Example #20
0
import datetime
import decimal
import odswriter as ods

# Single sheet mode
with ods.writer(open("test.ods", "wb"), first_row_bold=True) as odsfile:
    odsfile.writerow(["String", "ABCDEF123456", "123456"])
    odsfile.writerow([
        "Multiline",
        "This is one line\nThis is two line\nThis is three line\nThis is a really stupendously long line that is just way too long, isn't it?  Maybe it is we'll never know!"
    ])
    # Lose the 2L below if you want to run this example code on Python 3, Python 3 has no long type.
    odsfile.writerow(["Float", 1, 123, 123.123, decimal.Decimal("10.321")])
    odsfile.writerow(
        ["Date/DateTime",
         datetime.datetime.now(),
         datetime.date(1989, 11, 9)])
    odsfile.writerow(
        ["Time", datetime.time(13, 37),
         datetime.time(16, 17, 18)])
    odsfile.writerow(["Bool", True, False, True])
    odsfile.writerow(["Formula", 1, 2, 3, ods.Formula("IF(A1=2,B1,C1)")])

# Multiple sheet mode
with ods.writer(open("test-multi.ods", "wb")) as odsfile:
    bears = odsfile.new_sheet("Bears", first_row_bold=True)
    bears.writerow([
        "American Black Bear", "Asiatic Black Bear", "Brown Bear",
        "Giant Panda", "Qinling Panda", "Sloth Bear", "Sun Bear", "Polar Bear",
        "Spectacled Bear"
    ])
Example #21
0
 def test_BytesIO(self):
     f = io.BytesIO()
     with ods.writer(f) as odsfile:
         odsfile.writerows(self.rows)
     val = f.getvalue()
     self.assertGreater(len(val), 0)  # Produces non-empty output
Example #22
0
 def test_FileIO(self):
     f = tempfile.TemporaryFile(mode="wb")
     with ods.writer(f) as odsfile:
         odsfile.writerows(self.rows)
Example #23
0
messagebox.showinfo("Ignore list", "Note: Ignoring\n" + ignorelist_message)

orig_path_filename = askopenfilename()
print(orig_path_filename)

orig_filename = orig_path_filename.split("/")[-1].split('.')[0]
print(orig_filename)

dest_path = "/".join(orig_path_filename.split("/")[:-1]) + "/"
print(dest_path)

# dest_path_filename = dest_path + orig_filename + "_letras.csv"
dest_path_filename = dest_path + orig_filename + "_letras.ods"
print(dest_path_filename)

with ods.writer(open(dest_path_filename, 'wb')) as fout:
    # with open(dest_path_filename, 'w') as fout:
    with open(orig_path_filename) as fin:
        for line in fin:
            codigo = line.strip()
            if codigo[:6] not in ignorelist:
                if len(codigo) > 6:
                    print('Warning: bypassing ' + codigo)
                    # print(codigo, file=fout)
                    fout.writerow([codigo.strip()])
                else:
                    # print(addletras(codigo), file=fout)
                    for cod_with_letra in addletras(codigo).split():
                        fout.writerow([cod_with_letra.strip()])