def parse(self, taggers=list()): """ Open the csv file and dump it in a tablib.Dataset object """ self.logger.info("Will parse input %(csv_path)s csv file" % {"csv_path": self.csv_path}) data = Dataset() with open(self.csv_path, "rb") as csv_file: google_contact = UnicodeReader(csv_file) for row_num, row in enumerate(google_contact): if row_num == 0: data.headers = row continue gRow = GoogleContactRow(headers=data.headers, row=row) gRow.standard_cleanup() gRow.format_names() tags = [] for tagger in taggers: tags += getattr(gRow, tagger)() tags = list(set(tags)) # Get the row index index = format_index(gRow[data.headers.index("Name")]) # Empty index # drop this row if not index: self.logger.info("Ignored row without index (%(row_num)d)" % {"row_num": row_num}) continue # Duplicate? if self.is_duplicate(index): self.logger.info( "Found duplicate row for %(name)s (num: %(row_num)d)" % {"name": index, "row_num": row_num} ) # Drop this row if self.drop: self.logger.debug("Dropped duplicate row %(row_num)d" % {"row_num": row_num}) continue # Merge this row if self.merge: row_dst = self.hash.index(index) data[row_dst] = merge_lists(gRow, data[row_dst]) self.logger.debug( "Merged duplicate row %(row_src)d with %(row_dst)d" % {"row_src": row_num, "row_dst": row_dst} ) continue self.hash += (index,) data.append(gRow, tags=tags) self.logger.debug("row %d tags %s", row_num, tags) self.data = data self.logger.debug("File columns are:\n%s", "\n".join(self.data.headers))
def transpose(self): """Transpose a :class:`Dataset`, turning rows into columns and vice versa, returning a new ``Dataset`` instance. The first row of the original instance becomes the new header row.""" # Don't transpose if there is no data if not self: return _dset = Dataset() # The first element of the headers stays in the headers, # it is our "hinge" on which we rotate the data new_headers = [self.headers[0]] + self[self.headers[0]] _dset.headers = new_headers for column in self.headers: if column == self.headers[0]: # It's in the headers, so skip it continue # Adding the column name as now they're a regular column row_data = [column] + self[column] row_data = Row(row_data) _dset.append(row=row_data) return _dset
def test_export_csv(self): HactHistoryFactory( partner=self.partner, year=2017, partner_values=self.hact_data ) response = self.forced_auth_req( "get", self.url, user=self.unicef_user, data={"format": "csv"} ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), "csv") self.assertEqual(dataset.height, 1) self.assertEqual(dataset._get_headers(), [ "Implementing Partner", "Partner Type", "Shared", "Shared IP", "TOTAL for current CP cycle", "PLANNED for current year", "Current Year (1 Oct - 30 Sep)", "Micro Assessment", "Risk Rating", "Expiring Threshold", "Approach Threshold", "Programmatic Visits Planned", "Programmatic Visits M.R", "Programmatic Visits Done", "Spot Checks M.R", "Spot Checks Done", "Audits M.R", "Audits Done", "Flag for Follow up", ]) self.assertEqual(dataset[0], ( "Partner Name", PartnerType.UN_AGENCY, "with UNFPA", PartnerOrganization.AGENCY_CHOICES.UN, "200.00", "300.00", "150.00", "Yes", "High", "False", "False", "10", # programmatic visits "8", "5", "3", # spot checks "2", "4", # audits "2", "No", ))
def test_csv_export(self): response = self.forced_auth_req( 'get', self.url, user=self.unicef_staff, data={"format": "csv"} ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 1) self.assertEqual(len(dataset._get_headers()), 33) self.assertEqual(len(dataset[0]), 33)
def test_csv_flat_export_api(self): response = self.forced_auth_req( 'get', reverse('reports:applied-indicator'), user=self.unicef_staff, data={"format": "csv_flat"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 1) self.assertEqual(len(dataset._get_headers()), 31) self.assertEqual(len(dataset[0]), 31)
def test_csv_flat_export_api(self): response = self.forced_auth_req( 'get', reverse('partners_api:partner-staff-members-list', args=[self.partner.pk]), user=self.unicef_staff, data={"format": "csv_flat"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 2) self.assertEqual(len(dataset._get_headers()), 11) self.assertEqual(len(dataset[0]), 11)
def test_csv_flat_export_api_hidden(self): response = self.forced_auth_req( 'get', reverse('partners_api:partner-list'), user=self.unicef_staff, data={"format": "csv_flat", "hidden": True}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 1) self.assertEqual(len(dataset._get_headers()), 48) self.assertEqual(len(dataset[0]), 48)
def test_csv_export_api(self): response = self.forced_auth_req( 'get', reverse('partners_api:intervention-amendments'), user=self.unicef_staff, data={"format": "csv"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 1) self.assertEqual(len(dataset._get_headers()), 10) self.assertEqual(len(dataset[0]), 10)
def test_csv_flat_export_api(self): response = self.forced_auth_req( 'get', reverse('funds:funds-reservation-header'), user=self.unicef_staff, data={"format": "csv_flat"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 1) self.assertEqual(len(dataset._get_headers()), 20) self.assertEqual(len(dataset[0]), 20)
def test_csv_export_api(self): response = self.forced_auth_req( 'get', reverse('partners_api:partner-assessment'), user=self.unicef_staff, data={"format": "csv"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 1) self.assertEqual(len(dataset._get_headers()), 18) self.assertEqual(len(dataset[0]), 18)
def test_agreement_export_api(self): response = self.forced_auth_req( 'get', '/api/v2/agreements/', user=self.unicef_staff, data={"format": "csv"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 2) self.assertEqual(dataset._get_headers(), [ 'Reference Number', 'Status', 'Partner Name', 'Partner Number', 'Agreement Type', 'Start Date', 'End Date', 'Signed By Partner', 'Signed By Partner Date', 'Signed By UNICEF', 'Signed By UNICEF Date', 'Partner Authorized Officer', 'Amendments', 'URL', ]) # we're interested in the first agreement, so it will be last in the exported list exported_agreement = dataset[-1] self.assertEqual(exported_agreement, ( self.agreement.agreement_number, str(self.agreement.status), str(self.agreement.partner.name), str(self.agreement.partner.vendor_number), self.agreement.agreement_type, '{}'.format(self.agreement.start), '{}'.format(self.agreement.end), u'', '{}'.format(self.agreement.signed_by_partner_date), self.unicef_staff.get_full_name(), '{}'.format(self.agreement.signed_by_unicef_date), ', '.join([ sm.get_full_name() for sm in self.agreement.authorized_officers.all() ]), u'', u'https://testserver/pmp/agreements/{}/details/'.format( self.agreement.id), ))
def _prepare_table(string): dataset = Dataset() for i, line in enumerate(string.split('\n')[1:]): if '*****' in line or not line: continue row = line.split() if i == 1: row[4] += ' ' + row[5] dataset.headers = row[:5] else: dataset.append([numberfy(i) for i in row]) return dataset
def _prepare_cluters(string): dataset = Dataset() string = string.replace('PULSE HEIGHT', 'PULSE-HEIGHT').replace(' +/-', '_+/-') for i, line in enumerate(string.split('\n')[3:]): if '*****' in line or not line: continue line = line.split() if i == 0: dataset.headers = line else: dataset.append( [numberfy(cell.replace('_', ' ')) for cell in line]) return dataset
def test_agreement_export_api(self): response = self.forced_auth_req( 'get', '/api/v2/agreements/', user=self.unicef_staff, data={"format": "csv"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 2) self.assertEqual(dataset._get_headers(), [ 'Reference Number', 'Status', 'Partner Name', 'Partner Number', 'Agreement Type', 'Start Date', 'End Date', 'Signed By Partner', 'Signed By Partner Date', 'Signed By UNICEF', 'Signed By UNICEF Date', 'Partner Authorized Officer', 'Amendments', 'URL', 'Special Conditions PCA', ]) # we're interested in the first agreement, so it will be last in the exported list exported_agreement = dataset[-1] self.assertEqual(exported_agreement, ( self.agreement.agreement_number, str(self.agreement.status), str(self.agreement.partner.name), str(self.agreement.partner.vendor_number), self.agreement.agreement_type, '{}'.format(self.agreement.start), '{}'.format(self.agreement.end), '', '{}'.format(self.agreement.signed_by_partner_date), self.unicef_staff.get_full_name(), '{}'.format(self.agreement.signed_by_unicef_date), ', '.join([sm.get_full_name() for sm in self.agreement.authorized_officers.all()]), '', 'https://testserver/pmp/agreements/{}/details/'.format(self.agreement.id), 'No', ) )
def test_csv_flat_export_api(self): response = self.forced_auth_req( 'get', reverse('partners_api:partner-assessment'), user=self.unicef_staff, data={"format": "csv_flat"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 1) headers = dataset._get_headers() self.assertEqual(len(headers), 15) self.assertIn("Country", headers) self.assertEqual(len(dataset[0]), 15)
def _prepare_table(string): result = [] for line in string.split('\n'): if '*****' in line or not line: continue line = line.lstrip().strip('*').split('*') row = [] for cell in line: cell = cell.strip() row.append(cell) result.append(row) dataset = Dataset() dataset.headers = result[0] for row in result[1:]: dataset.append([numberfy(i) for i in row]) return dataset
def test_partners_export_api(self): response = self.forced_auth_req( 'get', '/api/v2/partners/', user=self.unicef_staff, data={"format": "csv"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 2) self.assertEqual(dataset._get_headers(), [ 'Vendor Number', 'Organizations Full Name', 'Short Name', 'Alternate Name', 'Partner Type', 'Shared Partner', 'Address', 'Phone Number', 'Email Address', 'Risk Rating', 'Date Last Assessed Against Core Values', 'Actual Cash Transfer for CP (USD)', 'Actual Cash Transfer for Current Year (USD)', 'Marked for Deletion', 'Blocked', 'Assessment Type', 'Date Assessed', 'Assessment Type (Date Assessed)', 'Staff Members', 'URL' ]) deleted_flag = "Yes" if self.partner.deleted_flag else "No" blocked = "Yes" if self.partner.blocked else "No" test_option = [ e for e in dataset if e[0] == self.partner.vendor_number ][0] self.assertEqual( test_option, (self.partner.vendor_number, six.text_type(self.partner.name), self.partner.short_name, self.partner.alternate_name, "{}".format( self.partner.partner_type), u', '.join([ x for x in self.partner.shared_with ]), self.partner.address, self.partner.phone_number, self.partner.email, self.partner.rating, u'{}'.format( self.partner.core_values_assessment_date), u'{:.2f}'.format( self.partner.total_ct_cp), u'{:.2f}'.format( self.partner.total_ct_cy), deleted_flag, blocked, self.partner.type_of_assessment, u'{}'.format( self.partner.last_assessment_date), u'', ', '.join([ "{} ({})".format(sm.get_full_name(), sm.email) for sm in self.partner.staff_members.filter( active=True).all() ]), u'https://testserver/pmp/partners/{}/details/'.format( self.partner.id)))
def _prepare_clusters_str(self, data): dataset = Dataset() stringRep = data.replace('PULSE HEIGHT', 'PULSE-HEIGHT').replace(' +/-', '_+/-') for i, line in enumerate(stringRep.split('\n')[3:]): if '*****' in line or not line or 'GEANT' in line: continue if 'NO. PULSE-HEIGHT' not in line and '+/-' not in line: continue parts = line.split() if i == 0: dataset.headers = parts else: if '+/-' in line: dataset.append( [numberfy(cell.replace('_', ' ')) for cell in parts]) return dataset
def test_csv_flat_export_api_hact_value_string(self): partner = self.partner partner.pk = None partner.vendor_number = "Vendor New Num" partner.hact_values = json.dumps('{"key": "random string"}') partner.save() response = self.forced_auth_req( 'get', reverse('partners_api:partner-list'), user=self.unicef_staff, data={"format": "csv_flat"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 2) self.assertEqual(len(dataset._get_headers()), 49) self.assertEqual(len(dataset[0]), 49)
def _prepare_table(string): dataset = Dataset() good = None for i, line in enumerate(string.split('\n')[1:]): if '====' in line or not line: continue row = line.split() if i == 1: dataset.headers = row else: if len(row) == len(dataset.headers): good = line dataset.append([numberfy(num) for num in row]) else: print(good) print(row) return dataset
def test_csv_export_api(self): response = self.forced_auth_req( 'get', reverse('partners_api:agreement-list'), user=self.unicef_staff, data={"format": "csv"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 1) self.assertEqual(dataset._get_headers(), [ 'Reference Number', 'Status', 'Partner Name', 'Agreement Type', 'Start Date', 'End Date', 'Signed By Partner', 'Signed By Partner Date', 'Signed By UNICEF', 'Signed By UNICEF Date', 'Partner Authorized Officer', 'Amendments', 'URL' ]) exported_agreement = dataset[0] self.assertEqual(exported_agreement, ( self.agreement.agreement_number, six.text_type(self.agreement.status), six.text_type(self.agreement.partner.name), self.agreement.agreement_type, '{}'.format(self.agreement.start), '{}'.format(self.agreement.end), u'', '{}'.format(self.agreement.signed_by_partner_date), u'', '{}'.format(self.agreement.signed_by_unicef_date), ', '.join([sm.get_full_name() for sm in self.agreement.authorized_officers.all()]), u'', u'https://testserver/pmp/agreements/{}/details/'.format(self.agreement.id) ))
def test_csv_flat_export_api(self): response = self.forced_auth_req( 'get', reverse('funds:funds-donor'), user=self.unicef_staff, data={"format": "csv_flat"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 1) six.assertCountEqual(self, dataset._get_headers(), [ "Grant", "ID", "Name", "created", "modified", ]) self.assertEqual(len(dataset[0]), 5)
def test_agreement_export_api(self): response = self.forced_auth_req( 'get', '/api/partners/{}/agreements/export/'.format(self.partner.id), user=self.unicef_staff) self.assertEquals(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content, 'csv') self.assertEqual(dataset.height, 1) self.assertEqual(dataset._get_headers(), [ 'reference_number', 'partner__vendor_number', 'partner__name', 'partner__short_name', 'start_date', 'end_date', 'signed_by_partner', 'signed_by_partner_date', 'signed_by_unicef', 'signed_by_unicef_date', 'authorized_officers' ]) self.assertEqual(dataset[0], (self.agreement.reference_number, '', self.partner.name, '', '', '', '', '', '', '', ''))
def test_csv_flat_export_api(self): response = self.forced_auth_req( 'get', reverse('funds:funds-donor'), user=self.unicef_staff, data={"format": "csv_flat"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 1) self.assertCountEqual(dataset._get_headers(), [ "Grant", "ID", "Name", "created", "modified", ]) self.assertEqual(len(dataset[0]), 5)
def test_government_export_api(self): response = self.forced_auth_req( 'get', '/api/partners/{}/government_interventions/export/'.format( self.partner.id), user=self.unicef_staff) self.assertEquals(response.status_code, status.HTTP_200_OK, response.content) dataset = Dataset().load(response.content, 'csv') self.assertEqual(dataset.height, 1) self.assertEqual(dataset._get_headers(), [ 'number', 'partner__name', 'result_structure__name', 'sectors', 'cash_transfer', 'year' ]) self.assertEqual(dataset[0], ('RefNumber', self.partner.name, self.government_intervention.result_structure.name, '', '0', datetime.now().strftime('%Y')))
def test_partner_export_api(self): response = self.forced_auth_req('get', '/api/partners/export/', user=self.unicef_staff) self.assertEquals(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content, 'csv') self.assertEqual(dataset.height, 2) self.assertEqual(dataset._get_headers(), [ 'vendor_number', 'vision_synced', 'deleted_flag', 'name', 'short_name', 'alternate_id', 'alternate_name', 'partner_type', 'cso_type', 'shared_partner', 'address', 'email', 'phone_number', 'risk_rating', 'type_of_assessment', 'last_assessment_date', 'total_ct_cp', 'total_ct_cy', 'agreement_count', 'intervention_count', 'active_staff_members' ]) self.assertEqual( dataset[0], ('', '0', '0', self.partner.name, '', '', '', '', '', 'No', '', '', '', '', '', '', '', '', '1', '1', 'Mace Windu'))
def render_csv_report(self): dataset = Dataset(*self._build_report_data(), headers=self._build_report_headers()) response = HttpResponse( dataset.csv, 'text/csv', None, 'text/csv; charset=utf-8', ) filename = self.get_filename().encode('utf-8') response['Content-Disposition'] = 'attachment; filename=%s' % filename return response
def stack_cols(self, other): """Stack two :class:`Dataset` instances together by joining at the column level, and return a new combined ``Dataset`` instance. If either ``Dataset`` has headers set, than the other must as well.""" if not isinstance(other, Dataset): return if self.headers or other.headers: if not self.headers or not other.headers: raise HeadersNeeded if self.height != other.height: raise InvalidDimensions try: new_headers = self.headers + other.headers except TypeError: new_headers = None _dset = Dataset() for column in self.headers: _dset.append_col(col=self[column]) for column in other.headers: _dset.append_col(col=other[column]) _dset.headers = new_headers return _dset
def projectsImport(self, fname): start = datetime.datetime.now() imported_data = Dataset().load(open(fname).read()) project_resource = ProjectResource() result = project_resource.import_data(imported_data, dry_run=False) if result.has_errors(): logger.info('IMPORT COMPLETED WITH ERRORS: ') logger.info(result) print(result) return else: end = datetime.datetime.now() delta = end - start logger.info('IMPORT SUCCESSFULLY COMPLETED IN TIME: ' + str(delta))
def as_tablib_dataset(report, parent_context): render_context = _report(report, parent_context) dataset = Dataset() for element in render_context["elements"].values(): table = element["table"] dataset.append_separator(element["title"]) for i, row in enumerate(table.as_values()): dataset.append(row) return dataset
def test_export_csv_empty_shared_with(self): """If partner shared_with value is empty make sure we handle that gracefully """ partner = PartnerFactory(name="Partner Name", partner_type=PartnerType.UN_AGENCY, shared_with=None, rating="High", total_ct_cp=200.0, total_ct_cy=150.0) HactHistoryFactory(partner=partner, year=2017, partner_values=self.hact_data) response = self.forced_auth_req("get", self.url, user=self.unicef_user, data={"format": "csv"}) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), "csv") self.assertEqual(dataset.height, 1) self.assertEqual( dataset[0], ( "Partner Name", PartnerType.UN_AGENCY, "with UNFPA", PartnerOrganization.AGENCY_CHOICES.UN, "200.00", "300.00", "150.00", "Yes", "High", "False", "False", "10", # programmatic visits "8", "5", "3", # spot checks "2", "4", # audits "2", "No", ))
def sort(self, col, reverse=False, include_tags=True): """Sort a :class:`Dataset` by a specific column, given string (for header) or integer (for column index). The order can be reversed by setting ``reverse`` to ``True``. Returns a new :class:`Dataset` instance where columns have been sorted. """ if isinstance(col, str) or isinstance(col, unicode): if not self.headers: raise HeadersNeeded else: if self.headers: col = self.headers[col] old_headers = self.headers[:] if include_tags: if len(self._tags_list) != self.height: raise InvalidDimensions else: _dset = copy(self) _dset.rpush_col(self._tags_list, header='tags') tag_getter = itemgetter(self.width +1) _dset._tags_list = [] _sorted = sorted(_dset.dict, key=itemgetter(col), reverse=reverse) _dataset = Dataset(headers=self.headers) for item in _sorted: if self.headers: tags = tag_getter(item).split(' ') changed_row = Row([item[key] for key in old_headers], tags=tags) else: changed_row = item _dataset.append(row=changed_row._row, tags=changed_row.tags, include_tags=True) else: _dset = copy(self) _sorted = sorted(_dset.dict, key=itemgetter(col), reverse=reverse) _dataset = Dataset(headers=self.headers) for item in _sorted: if self.headers: row = [item[key] for key in self.headers] else: row = item _dataset.append(row=row) return _dataset
def before_import(self, dataset, dry_run, **kwargs): stream = io.StringIO(dataset.csv.replace(replace[0], replace[1])) dataset = Dataset().load(stream.read(), format='csv') return dataset
def test_csv_export_api(self): response = self.forced_auth_req( 'get', reverse('partners_api:partner-list'), user=self.unicef_staff, data={"format": "csv"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 1) self.assertEqual(dataset._get_headers(), [ 'Vendor Number', 'Organizations Full Name', 'Short Name', 'Alternate Name', 'Partner Type', 'Shared Partner', 'Address', 'Phone Number', 'Email Address', 'Risk Rating', 'Date Last Assessed Against Core Values', 'Actual Cash Transfer for CP (USD)', 'Actual Cash Transfer for Current Year (USD)', 'Marked for Deletion', 'Blocked', 'Assessment Type', 'Date Assessed', 'Assessment Type (Date Assessed)', 'Staff Members', 'URL', 'Planned Programmatic Visits' ]) deleted_flag = "Yes" if self.partner.deleted_flag else "No" blocked = "Yes" if self.partner.blocked else "No" test_option = [ e for e in dataset if e[0] == self.partner.vendor_number ][0] # the order of staff members in the results is hard to determine # so just ensuring that all relevant staff members are in the results for sm in self.partner.staff_members.filter(active=True).all(): member = "{} ({})".format(sm.get_full_name(), sm.email) self.assertIn(member, test_option[18]) self.assertEqual(test_option, ( self.partner.vendor_number, str(self.partner.name), self.partner.short_name, self.partner.alternate_name, "{}".format(self.partner.partner_type), ', '.join([x for x in self.partner.shared_with]), self.partner.address, self.partner.phone_number, self.partner.email, self.partner.rating, '{}'.format(self.partner.core_values_assessment_date), '{:.2f}'.format(self.partner.total_ct_cp), '{:.2f}'.format(self.partner.total_ct_ytd), deleted_flag, blocked, self.partner.type_of_assessment, '{}'.format(self.partner.last_assessment_date), '', test_option[18], 'https://testserver/pmp/partners/{}/details/'.format( self.partner.id), '{} (Q1:{} Q2:{}, Q3:{}, Q4:{})'.format( self.planned_visit.year, self.planned_visit.programmatic_q1, self.planned_visit.programmatic_q2, self.planned_visit.programmatic_q3, self.planned_visit.programmatic_q4, ), ))
from gimel_parser import parse USER_NAME = '' PASSWORD = '' SESSION_FILE = 'calibration.txt' EXEL_OUTPUT = 'calibration_stats.xlsx' minimum_energy = 1 step_size = 0.2 number_of_injections = 250 if __name__ == '__main__': with GimelSession(user=USER_NAME, password=PASSWORD, output_file=SESSION_FILE) as g: g.start_gimmel() g.send_particles_ascending_energies('electron', minimum_energy, step_size, number_of_injections) with open(SESSION_FILE) as f: text = f.read() events = parse(text) dataset = Dataset() dataset.headers = ('P', 'Kappa', 'd Kappa', 'Calorimeter Pulse Hight') for event in events: row = [] row.append(event.energy) row.append(event.tracks.tracks[0].parameters.akappa) row.append(event.tracks.tracks[0].error_matrix['akappa']['akappa']) row.append(event.calorimeter.clusters['PULSE-HEIGHT'][0]) dataset.append(row) with open(EXEL_OUTPUT, 'wb') as f: f.write(dataset.export('xlsx'))
def test_intervention_export_api(self): response = self.forced_auth_req( 'get', '/api/v2/interventions/', user=self.unicef_staff, data={"format": "csv"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 1) self.assertEqual(dataset._get_headers(), [ "Partner", "Vendor Number", "Status", "Partner Type", "Agreement", "Country Programme", "Document Type", "Reference Number", "Document Title", "Start Date", "End Date", "UNICEF Office", "Sections", "Locations", "Contingency PD", "Cluster", "UNICEF Focal Points", "CSO Authorized Officials", "Budget Currency", "Total CSO Contribution", "UNICEF Cash", "UNICEF Supply", "Total PD/SSFA Budget", "FR Number(s)", "FR Currency", "FR Posting Date", "FR Amount", "FR Actual CT", "Outstanding DCT", "Document Submission Date by CSO", "Submission Date to PRC", "Review Date by PRC", "Signed by Partner", "Signed by Partner Date", "Signed by UNICEF", "Signed by UNICEF Date", "Days from Submission to Signed", "Days from Review to Signed", "Total no. of amendments", "Last amendment date", "Attachment type", "# of attachments", "CP Outputs", "URL", ]) self.assertEqual(dataset[0], ( str(self.intervention.agreement.partner.name), str(self.intervention.agreement.partner.vendor_number), self.intervention.status, self.intervention.agreement.partner.partner_type, self.intervention.agreement.agreement_number, str(self.intervention.country_programme.name), self.intervention.document_type, self.intervention.number, str(self.intervention.title), '{}'.format(self.intervention.start), '{}'.format(self.intervention.end), u'', u'', u'', str("Yes" if self.intervention.contingency_pd else "No"), u'', u'', u'', str(self.ib.currency), u'{:.2f}'.format(self.intervention.total_partner_contribution), u'{:.2f}'.format(self.intervention.total_unicef_cash), u'{:.2f}'.format(self.intervention.total_in_kind_amount), u'{:.2f}'.format(self.intervention.total_budget), u', '.join([fr.fr_numbers for fr in self.intervention.frs.all()]), u'', u'', u'', u'', u'', '{}'.format(self.intervention.submission_date), '{}'.format(self.intervention.submission_date_prc), '{}'.format(self.intervention.review_date_prc), u'{}'.format(self.intervention. partner_authorized_officer_signatory.get_full_name()), '{}'.format(self.intervention.signed_by_partner_date), self.unicef_staff.get_full_name(), '{}'.format(self.intervention.signed_by_unicef_date), '{}'.format(self.intervention.days_from_submission_to_signed), '{}'.format(self.intervention.days_from_review_to_signed), str(self.intervention.amendments.count()), u'', str(', '.join([ '{}'.format(att.type.name) for att in self.intervention.attachments.all() ])), str(self.intervention.attachments.count()), u'', u'https://testserver/pmp/interventions/{}/details/'.format( self.intervention.id), ))
times = int(times) number_of_decays = 0 event_id = 0 progress = 0 if __name__ == '__main__': with GimelSession(user=USER_NAME, password=PASSWORD, output_file=SESSION_FILE) as g: g.start_gimmel() # Insert the name of the particle here: g.send_particle_in_bulk('k-short', momentum, times) with open(SESSION_FILE) as f: text = f.read() events = parse(text) raw = Dataset() raw.headers = ('Event ID','P Parent Particle', 'Kappa1', 'd Kappa1', 'tandip1', 'd tandip1', 'Kappa2', 'd Kappa2', 'tandip2', 'd tandip2', 'Vertex x', 'd Vertex x', 'Vertex y', 'd Vertex y', 'Vertex z', 'd Vertex z', 'Phi', 'd Phi') for event in events: sp.call('cls',shell=True) event_id += 1 progress = int(100*event_id/times) print('Processing..', str(progress)+'% completed.') if len(event.raw.strip()) == 0: event_id -= 1
number_of_decays = 0 event_id = 0 progress = 0 if __name__ == '__main__': with GimelSession(user=USER_NAME, password=PASSWORD, output_file=SESSION_FILE) as g: g.start_gimmel() # Insert the name of the particle here: g.send_particle_in_bulk('pi-0', momentum, times) with open(SESSION_FILE) as f: text = f.read() events = parse(text) raw = Dataset() raw.headers = ('Event ID', 'P Parent Particle', 'Cluster1 y', 'Cluster1 z', 'Pulse Height 1', 'Cluster2 y', 'Cluster2 z', 'Pulse Height 2') for event in events: sp.call('cls', shell=True) event_id += 1 progress = int(100 * event_id / times) print('Processing..', str(progress) + '% completed.') if len(event.raw.strip()) == 0: event_id -= 1 # elif len(event.calorimeter.clusters.clusters) == 1 and len(event.tracks.tracks) == 0:
PASSWORD = data["password"] IDNum = data["id"] with GimelSession(user=USER_NAME, password=PASSWORD, output_file=SESSION_FILE) as g: g.start_gimmel() g.send_command(IDNum) g.send_particles_ascending_energies(particle, minimum_energy, step_size, number_of_injections, per_energy) with open(SESSION_FILE) as f: text = f.read() if particle is 'photon': events = parse(text) dataset = Dataset() dataset.headers = ('P', 'pulseheight', 'x', 'dx', 'y', 'dy', 'z', 'dz', 'ywidth', 'zwidth') for event in events: row = [] if len(event.clusters.clusters.clusters) is 1: row.append(event.energy) row.append(event.clusters.clusters.clusters[0].pulse_height) row.append(event.clusters.clusters.clusters[0].x.value) row.append(event.clusters.clusters.clusters[0].x.error) row.append(event.clusters.clusters.clusters[0].y.value) row.append(event.clusters.clusters.clusters[0].y.error) row.append(event.clusters.clusters.clusters[0].z.value) row.append(event.clusters.clusters.clusters[0].z.error) row.append(event.clusters.clusters.clusters[0].ywidth) row.append(event.clusters.clusters.clusters[0].zwidth)
def test_csv_export_api(self): response = self.forced_auth_req( 'get', reverse('partners_api:partner-list'), user=self.unicef_staff, data={"format": "csv"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 1) self.assertEqual(dataset._get_headers(), [ 'Vendor Number', 'Organizations Full Name', 'Short Name', 'Alternate Name', 'Partner Type', 'Shared Partner', 'Address', 'Phone Number', 'Email Address', 'Risk Rating', 'Date Last Assessed Against Core Values', 'Actual Cash Transfer for CP (USD)', 'Actual Cash Transfer for Current Year (USD)', 'Marked for Deletion', 'Blocked', 'Assessment Type', 'Date Assessed', 'Assessment Type (Date Assessed)', 'Staff Members', 'URL', 'Planned Programmatic Visits' ]) deleted_flag = "Yes" if self.partner.deleted_flag else "No" blocked = "Yes" if self.partner.blocked else "No" test_option = [e for e in dataset if e[0] == self.partner.vendor_number][0] # the order of staff members in the results is hard to determine # so just ensuring that all relevant staff members are in the results for sm in self.partner.staff_members.filter(active=True).all(): member = "{} ({})".format(sm.get_full_name(), sm.email) self.assertIn(member, test_option[18]) self.assertEqual(test_option, ( self.partner.vendor_number, str(self.partner.name), self.partner.short_name, self.partner.alternate_name, "{}".format(self.partner.partner_type), ', '.join([x for x in self.partner.shared_with]), self.partner.address, self.partner.phone_number, self.partner.email, self.partner.rating, '{}'.format(self.partner.core_values_assessment_date), '{:.2f}'.format(self.partner.total_ct_cp), '{:.2f}'.format(self.partner.total_ct_ytd), deleted_flag, blocked, self.partner.type_of_assessment, '{}'.format(self.partner.last_assessment_date), '', test_option[18], 'https://testserver/pmp/partners/{}/details/'.format(self.partner.id), '{} (Q1:{} Q2:{}, Q3:{}, Q4:{})'.format( self.planned_visit.year, self.planned_visit.programmatic_q1, self.planned_visit.programmatic_q2, self.planned_visit.programmatic_q3, self.planned_visit.programmatic_q4, ), ))
def test_intervention_export_api(self): response = self.forced_auth_req( 'get', '/api/v2/interventions/', user=self.unicef_staff, data={"format": "csv"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) dataset = Dataset().load(response.content.decode('utf-8'), 'csv') self.assertEqual(dataset.height, 1) self.assertEqual(dataset._get_headers(), [ "Partner", "Vendor Number", "Status", "Partner Type", "CSO Type", "Agreement", "Country Programme", "Document Type", "Reference Number", "Document Title", "Start Date", "End Date", "UNICEF Office", "Sections", "Locations", "Contingency PD", "Cluster", "UNICEF Focal Points", "CSO Authorized Officials", "Budget Currency", "Total CSO Contribution", "UNICEF Cash", "UNICEF Supply", "Total PD/SSFA Budget", "FR Number(s)", "FR Currency", "FR Posting Date", "FR Amount", "FR Actual CT", "Outstanding DCT", "Planned Programmatic Visits", "Document Submission Date by CSO", "Submission Date to PRC", "Review Date by PRC", "Signed by Partner", "Signed by Partner Date", "Signed by UNICEF", "Signed by UNICEF Date", "Days from Submission to Signed", "Days from Review to Signed", "Total no. of amendments", "Last amendment date", "Attachment type", "# of attachments", "CP Outputs", "URL", ]) self.assertEqual(dataset[0], ( str(self.intervention.agreement.partner.name), str(self.intervention.agreement.partner.vendor_number), self.intervention.status, self.intervention.agreement.partner.partner_type, '', self.intervention.agreement.agreement_number, str(self.intervention.country_programme.name), self.intervention.document_type, self.intervention.number, str(self.intervention.title), '{}'.format(self.intervention.start), '{}'.format(self.intervention.end), '', '', '', str("Yes" if self.intervention.contingency_pd else "No"), '', '', '', str(self.ib.currency), '{:.2f}'.format(self.intervention.total_partner_contribution), '{:.2f}'.format(self.intervention.total_unicef_cash), '{:.2f}'.format(self.intervention.total_in_kind_amount), '{:.2f}'.format(self.intervention.total_budget), ', '.join([fr.fr_numbers for fr in self.intervention.frs.all()]), '', '', '', '', '', '{} (Q1:{} Q2:{}, Q3:{}, Q4:{})'.format(self.planned_visit.year, self.planned_visit.programmatic_q1, self.planned_visit.programmatic_q2, self.planned_visit.programmatic_q3, self.planned_visit.programmatic_q4), '{}'.format(self.intervention.submission_date), '{}'.format(self.intervention.submission_date_prc), '{}'.format(self.intervention.review_date_prc), '{}'.format(self.intervention.partner_authorized_officer_signatory.get_full_name()), '{}'.format(self.intervention.signed_by_partner_date), self.unicef_staff.get_full_name(), '{}'.format(self.intervention.signed_by_unicef_date), '{}'.format(self.intervention.days_from_submission_to_signed), '{}'.format(self.intervention.days_from_review_to_signed), str(self.intervention.amendments.count()), '', str(', '.join(['{}'.format(att.type.name) for att in self.intervention.attachments.all()])), str(self.intervention.attachments.count()), '', 'https://testserver/pmp/interventions/{}/details/'.format(self.intervention.id), ))