def index(self): request = self.request user = request.user if not user.cic.SuperUser: self._security_failure() with request.connmgr.get_connection('admin') as conn: cursor = conn.execute('EXEC sp_CIC_Vacancy_l_History ?', request.dboptions.MemberID) history = cursor.fetchall() cursor.close() headings = [ _('Record #', request), _('Record Name', request), _('Service Title', request), _('Service Title At Change', request), _('Vacancy Unit Type ID', request), _('Vacancy Unit Type GUID', request), _('Modified Date', request), _('Modified By', request), _('Vacancy Change', request), _('Total Vacancy', request) ] fields = [ 'NUM', 'OrgName', 'ServiceTitleNow', 'ServiceTitle', 'BT_VUT_ID', 'BT_VUT_GUID', 'MODIFIED_DATE', 'MODIFIED_BY', 'VacancyChange', 'VacancyFinal', ] getter = attrgetter(*fields) def row_getter(x): return tuple(u'' if y is None else unicode(y) for y in getter(x)) file = tempfile.TemporaryFile() with BufferedZipFile(file, 'w', zipfile.ZIP_DEFLATED) as zip: write_csv_to_zip(zip, itertools.chain([headings], itertools.imap(row_getter, history)), 'vacancy_history.csv') length = file.tell() file.seek(0) res = request.response res.content_type = 'application/zip' res.charset = None res.app_iter = FileIterator(file) res.content_length = length res.headers['Content-Disposition'] = 'attachment;filename=vacancy-history-%s.zip' % (datetime.today().isoformat('-').replace(':', '-').split('.')[0]) return res
def main(previous_name, dest_file): to_delete_count = 0 to_delete_percent = 0 if previous_name.endswith(".zip"): csv_file = open_zipfile(previous_name) else: csv_file = open(previous_name) reader = UTF8Reader(csv_file) next(reader) previous = set( filter( lambda x: x[-1] in ["AGENCY", "SITE", "PROGRAM"], map(lambda x: (x[0], x[1], x[2].upper()), reader), )) previous_count = len(previous) csv_file.close() if dest_file.endswith(".zip"): csv_file = open_zipfile(dest_file) else: csv_file = open(dest_file) reader = UTF8Reader(csv_file) header = next(reader) current_records = list(map(tuple, reader)) previous.difference_update(current_records) csv_file.close() to_delete = list(previous) del previous to_delete.sort() to_delete_count = len(to_delete) to_delete_percent = 100 * to_delete_count / previous_count dest_file = dest_file[:-4] + "_delete.zip" with open(dest_file, "wb") as fd: with bufferedzip.BufferedZipFile(fd, "w", zipfile.ZIP_DEFLATED) as zip: write_csv_to_zip( zip, itertools.chain([header], to_delete), os.path.basename(dest_file)[:-4] + ".csv", ) return to_delete_percent, to_delete_count, previous_count
def __call__(self): request = self.request user = request.user if not user: return make_401_error("Access Denied") if "airsexport" not in user.cic.ExternalAPIs: return make_401_error("Insufficient Permissions") log.debug("icarol source list") sql = """EXEC sp_GBL_AIRS_Export_ICarolSource""" file = tempfile.TemporaryFile() with request.connmgr.get_connection("admin") as conn: cursor = conn.execute(sql) def row_group_iterable(): yield [["Record NUM"]] while True: rows = cursor.fetchmany(2000) if not rows: break yield map(lambda x: tuple(y or "" for y in x), rows) with bufferedzip.BufferedZipFile(file, "w", zipfile.ZIP_DEFLATED) as zip: write_csv_to_zip( zip, itertools.chain.from_iterable(row_group_iterable()), "record_ids.csv", ) response = request.response response.content_type = "application/zip" length = file.tell() file.seek(0) res = request.response res.content_type = "application/zip" res.charset = None res.app_iter = FileIterator(file) res.content_length = length res.headers[ "Content-Disposition"] = "attachment;filename=record_ids.zip" return res
def main(previous_name, dest_file): to_delete_count = 0 to_delete_percent = 0 if previous_name.endswith('.zip'): csv_file = open_zipfile(previous_name) else: csv_file = open(previous_name) reader = UTF8Reader(csv_file) reader.next() previous = set(itertools.ifilter(lambda x: x[-1] in ['AGENCY', 'SITE', 'PROGRAM'], itertools.imap(lambda x: (x[0], x[1], x[2].upper()), reader))) previous_count = len(previous) csv_file.close() if dest_file.endswith('.zip'): csv_file = open_zipfile(dest_file) else: csv_file = open(dest_file) reader = UTF8Reader(csv_file) header = reader.next() current_records = map(tuple, reader) previous.difference_update(current_records) csv_file.close() to_delete = list(previous) del previous to_delete.sort() to_delete_count = len(to_delete) to_delete_percent = 100 * to_delete_count / previous_count dest_file = dest_file[:-4] + '_delete.zip' with open(dest_file, 'wb') as fd: with bufferedzip.BufferedZipFile(fd, 'w', zipfile.ZIP_DEFLATED) as zip: write_csv_to_zip(zip, itertools.chain([header], to_delete), os.path.basename(dest_file)[:-4] + '.csv') return to_delete_percent, to_delete_count, previous_count
def __call__(self): request = self.request user = request.user if not user: return make_401_error(u'Access Denied') if 'airsexport' not in user.cic.ExternalAPIs: return make_401_error(u'Insufficient Permissions') model_state = modelstate.ModelState(request) model_state.schema = AIRSExportOptionsSchema() model_state.form.method = None log.debug('full list') # I don't think that version is relevant, just ignore it del model_state.schema.fields['version'] if not model_state.validate(): if model_state.is_error('DST'): msg = u"Invalid Distribution" elif model_state.is_error('Field'): msg = u'Invalid Field' else: msg = u"An unknown error occurred." return make_internal_server_error(msg) sql = '''EXEC sp_GBL_AIRS_Export_FullList ?, @@LANGID, ?, ?, ?, ?''' values = [ request.viewdata.cic.ViewType, model_state.value('DST'), model_state.value('PubCodeSync'), model_state.value('IncludeDeleted'), model_state.value('IncludeSiteAgency') ] log.debug('full list: %s', values) file = tempfile.TemporaryFile() with request.connmgr.get_connection('admin') as conn: cursor = conn.execute( sql, values ) def row_group_iterable(): yield [[u'Record NUM', u'Parent NUM', u'Record Type']] while True: rows = cursor.fetchmany(2000) if not rows: break yield itertools.imap(lambda x: tuple(y or u'' for y in x), rows) with bufferedzip.BufferedZipFile(file, 'w', zipfile.ZIP_DEFLATED) as zip: write_csv_to_zip( zip, itertools.chain.from_iterable(row_group_iterable()), 'records%s.csv' % (model_state.value('FileSuffix') or '')) response = request.response response.content_type = 'application/zip' length = file.tell() file.seek(0) res = request.response res.content_type = 'application/zip' res.charset = None res.app_iter = FileIterator(file) res.content_length = length res.headers['Content-Disposition'] = 'attachment;filename=records%s.zip' % (model_state.value('FileSuffix') or '') return res
def calculate_deletion_list(lang, url, args, **kwargs): suffix = lang.file_suffix + '_full_list' previous_filenames = previous_files(args.dest, args.filename_prefix, suffix) kwargs.pop('stream', None) r = requests.get(url + '/list', stream=True, **kwargs) r.raise_for_status() dest_file = args.dest_file[:-4] + '_full_list.zip' update_environ('ALLEXPORTFILES', dest_file) with open(dest_file, 'wb') as fd: for chunk in r.iter_content(chunk_size=8192): fd.write(chunk) to_delete_count = 0 to_delete_percent = 0 if previous_filenames and args.type != 'full': now = datetime.datetime.now().strftime(_time_format) end = -len(suffix) - 4 start = -len(now) + end previous_filenames.sort(key=lambda x: x[start:end]) csv_file = open_zipfile(previous_filenames[-1]) reader = UTF8Reader(csv_file) reader.next() previous = set(itertools.imap(tuple, reader)) previous_count = len(previous) csv_file.close() csv_file = open_zipfile(dest_file) reader = UTF8Reader(csv_file) header = reader.next() current_records = map(tuple, reader) record_counts = Counter(x[0] for x in current_records) previous.difference_update(current_records) csv_file.close() to_delete = list(previous) del previous to_delete.sort() to_delete_count = len(to_delete) to_delete_percent = 100 * to_delete_count / previous_count dest_file = args.dest_file[:-4] + '_delete.zip' update_environ('ALLEXPORTFILES', dest_file) update_environ('SYNCEXPORTFILES', dest_file) with open(dest_file, 'wb') as fd: with bufferedzip.BufferedZipFile(fd, 'w', zipfile.ZIP_DEFLATED) as zip: write_csv_to_zip(zip, itertools.chain([header], to_delete), os.path.basename(dest_file)[:-4] + '.csv') else: csv_file = open_zipfile(dest_file) reader = UTF8Reader(csv_file) header = reader.next() record_counts = Counter(x[0] for x in reader) csv_file.close() dest_file = args.dest_file[:-4] + '_delete.zip' update_environ('ALLEXPORTFILES', dest_file) update_environ('SYNCEXPORTFILES', dest_file) with open(dest_file, 'wb') as fd: with bufferedzip.BufferedZipFile(fd, 'w', zipfile.ZIP_DEFLATED) as zip: write_csv_to_zip(zip, [header], os.path.basename(dest_file)[:-4] + '.csv') return to_delete_percent, to_delete_count, record_counts
def __call__(self): request = self.request user = request.user if not user: return make_401_error("Access Denied") if "airsexport" not in user.cic.ExternalAPIs: return make_401_error("Insufficient Permissions") model_state = modelstate.ModelState(request) model_state.schema = AIRSExportOptionsSchema() model_state.form.method = None log.debug("full list") # I don't think that version is relevant, just ignore it del model_state.schema.fields["version"] if not model_state.validate(): if model_state.is_error("DST"): msg = "Invalid Distribution" elif model_state.is_error("Field"): msg = "Invalid Field" else: msg = "An unknown error occurred." return make_internal_server_error(msg) sql = """EXEC sp_GBL_AIRS_Export_FullList ?, @@LANGID, ?, ?, ?, ?""" values = [ request.viewdata.cic.ViewType, model_state.value("DST"), model_state.value("PubCodeSync"), model_state.value("IncludeDeleted"), model_state.value("IncludeSiteAgency"), ] log.debug("full list: %s", values) file = tempfile.TemporaryFile() with request.connmgr.get_connection("admin") as conn: cursor = conn.execute(sql, values) def row_group_iterable(): yield [["Record NUM", "Parent NUM", "Record Type"]] while True: rows = cursor.fetchmany(2000) if not rows: break yield map(lambda x: tuple(y or "" for y in x), rows) with bufferedzip.BufferedZipFile(file, "w", zipfile.ZIP_DEFLATED) as zip: write_csv_to_zip( zip, itertools.chain.from_iterable(row_group_iterable()), "records%s.csv" % (model_state.value("FileSuffix") or ""), ) response = request.response response.content_type = "application/zip" length = file.tell() file.seek(0) res = request.response res.content_type = "application/zip" res.charset = None res.app_iter = FileIterator(file) res.content_length = length res.headers[ "Content-Disposition"] = "attachment;filename=records%s.zip" % ( model_state.value("FileSuffix") or "") return res
def calculate_deletion_list(lang, url, args, **kwargs): suffix = lang.file_suffix + "_full_list" previous_filenames = previous_files(args.dest, args.filename_prefix, suffix) kwargs.pop("stream", None) r = requests.get(url + "/list", stream=True, **kwargs) r.raise_for_status() dest_file = args.dest_file[:-4] + "_full_list.zip" update_environ("ALLEXPORTFILES", dest_file) with open(dest_file, "wb") as fd: for chunk in r.iter_content(chunk_size=8192): fd.write(chunk) to_delete_count = 0 to_delete_percent = 0 if previous_filenames and args.type != "full": now = datetime.datetime.now().strftime(_time_format) end = -len(suffix) - 4 start = -len(now) + end previous_filenames.sort(key=lambda x: x[start:end]) csv_file = open_zipfile(previous_filenames[-1]) reader = open_csv_reader(csv_file) next(reader) previous = set(remove_exclusions(reader, url, args, **kwargs)) previous_count = len(previous) csv_file.close() csv_file = open_zipfile(dest_file) reader = open_csv_reader(csv_file) header = next(reader) current_records = list(map(tuple, reader)) record_counts = Counter(x[0] for x in current_records) previous.difference_update(current_records) csv_file.close() to_delete = list(previous) del previous to_delete.sort() to_delete_count = len(to_delete) to_delete_percent = 100 * to_delete_count / previous_count dest_file = args.dest_file[:-4] + "_delete.zip" update_environ("ALLEXPORTFILES", dest_file) update_environ("SYNCEXPORTFILES", dest_file) with open(dest_file, "wb") as fd: with bufferedzip.BufferedZipFile(fd, "w", zipfile.ZIP_DEFLATED) as zip: write_csv_to_zip( zip, itertools.chain([header], to_delete), os.path.basename(dest_file)[:-4] + ".csv", ) else: csv_file = open_zipfile(dest_file) reader = open_csv_reader(csv_file) header = next(reader) record_counts = Counter(x[0] for x in reader) csv_file.close() dest_file = args.dest_file[:-4] + "_delete.zip" update_environ("ALLEXPORTFILES", dest_file) update_environ("SYNCEXPORTFILES", dest_file) with open(dest_file, "wb") as fd: with bufferedzip.BufferedZipFile(fd, "w", zipfile.ZIP_DEFLATED) as zip: write_csv_to_zip(zip, [header], os.path.basename(dest_file)[:-4] + ".csv") return to_delete_percent, to_delete_count, record_counts
def index(self): request = self.request user = request.user if not user.cic.SuperUser: self._security_failure() with request.connmgr.get_connection("admin") as conn: cursor = conn.execute("EXEC sp_CIC_Vacancy_l_History ?", request.dboptions.MemberID) history = cursor.fetchall() cursor.close() headings = [ _("Record #", request), _("Record Name", request), _("Service Title", request), _("Service Title At Change", request), _("Vacancy Unit Type ID", request), _("Vacancy Unit Type GUID", request), _("Modified Date", request), _("Modified By", request), _("Vacancy Change", request), _("Total Vacancy", request), ] fields = [ "NUM", "OrgName", "ServiceTitleNow", "ServiceTitle", "BT_VUT_ID", "BT_VUT_GUID", "MODIFIED_DATE", "MODIFIED_BY", "VacancyChange", "VacancyFinal", ] getter = attrgetter(*fields) def row_getter(x): return tuple("" if y is None else str(y) for y in getter(x)) file = tempfile.TemporaryFile() with BufferedZipFile(file, "w", zipfile.ZIP_DEFLATED) as zip: write_csv_to_zip( zip, itertools.chain([headings], map(row_getter, history)), "vacancy_history.csv", ) length = file.tell() file.seek(0) res = request.response res.content_type = "application/zip" res.charset = None res.app_iter = FileIterator(file) res.content_length = length res.headers[ "Content-Disposition"] = "attachment;filename=vacancy-history-%s.zip" % ( datetime.today().isoformat("-").replace(":", "-").split(".")[0]) return res
def list(self): request = self.request user = request.user if not user.SuperUser: self._security_failure() CM_ID = self._get_cmid() with request.connmgr.get_connection('admin') as conn: cursor = conn.execute('EXEC sp_GBL_Community_l_Print ?', CM_ID) communities = cursor.fetchall() cursor.close() for community in communities: community.Names = self._culture_dict_from_xml(community.Names, 'Name') community.AltNames = u'; '.join(self._list_from_xml(community.AltNames, 'AltNames')) community.AltAreaSearch = u'; '.join(self._list_from_xml(community.AltAreaSearch, 'Name')) if request.params.get('csv'): active_cultures = syslanguage.active_cultures() culture_map = syslanguage.culture_map() headings = [_('ID')] + [_('Name (%s)') % culture_map[culture].LanguageName for culture in active_cultures] headings += [ _('GUID'), _('Parent ID'), _('Parent'), _("Parent's Parent"), _('Province'), _('Is Alt-Area'), _('Is Parent'), _('Located In'), _('Areas Served'), _('Bus Routes'), _('Wards'), _('Views'), ] fields = [ 'CM_GUID', 'ParentCommunity', 'ParentCommunityName', 'ParentCommunity2', 'ProvinceName', 'AlternativeArea', 'ParentUsage', 'LocatedInUsage', 'AreasServedUsage', 'BusRouteUsage', 'WardUsage', 'ViewUsage', ] if request.dboptions.UseVOL: headings += [ _('Opportunities'), _('Community Groups'), ] fields += [ 'VolOppUsage', 'CommunityGroupUsage', ] headings += [ _('Alternate Names'), _('Alt Area Search'), ] fields += [ 'AltNames', 'AltAreaSearch', ] form_cultures = [culture_map[culture].FormCulture for culture in active_cultures] base_field_getter = attrgetter(*fields) name_field_getter = lambda x: tuple(x.Names.get(y, {}).get('Name') for y in form_cultures) def row_getter(x): return tuple(u'' if y is None else unicode(y) for y in x[0:0] + name_field_getter(x) + base_field_getter(x)) file = tempfile.TemporaryFile() with BufferedZipFile(file, 'w', zipfile.ZIP_DEFLATED) as zip: write_csv_to_zip(zip, itertools.chain([headings], itertools.imap(row_getter, communities)), 'communities.csv') length = file.tell() file.seek(0) res = request.response res.content_type = 'application/zip' res.charset = None res.app_iter = FileIterator(file) res.content_length = length res.headers['Content-Disposition'] = 'attachment;filename=communities-%s.zip' % (datetime.today().isoformat('-').replace(':', '-').split('.')[0]) return res title = _('Manage Communities', request) return self._create_response_namespace(title, title, dict(communities=communities, CM_ID=CM_ID), no_index=True)
def list(self): request = self.request user = request.user if not user.SuperUser: self._security_failure() CM_ID = self._get_cmid() with request.connmgr.get_connection("admin") as conn: cursor = conn.execute("EXEC sp_GBL_Community_l_Print ?", CM_ID) communities = cursor.fetchall() cursor.close() for community in communities: community.Names = self._culture_dict_from_xml( community.Names, "Name") community.AltNames = "; ".join( self._list_from_xml(community.AltNames, "AltNames")) community.AltAreaSearch = "; ".join( self._list_from_xml(community.AltAreaSearch, "Name")) if request.params.get("csv"): active_cultures = syslanguage.active_cultures() culture_map = syslanguage.culture_map() headings = [_("ID")] + [ _("Name (%s)") % culture_map[culture].LanguageName for culture in active_cultures ] headings += [ _("GUID"), _("Parent ID"), _("Parent"), _("Parent's Parent"), _("Province"), _("Is Alt-Area"), _("Is Parent"), _("Located In"), _("Areas Served"), _("Bus Routes"), _("Wards"), _("Views"), ] fields = [ "CM_GUID", "ParentCommunity", "ParentCommunityName", "ParentCommunity2", "ProvinceName", "AlternativeArea", "ParentUsage", "LocatedInUsage", "AreasServedUsage", "BusRouteUsage", "WardUsage", "ViewUsage", ] if request.dboptions.UseVOL: headings += [ _("Opportunities"), _("Community Groups"), ] fields += [ "VolOppUsage", "CommunityGroupUsage", ] headings += [ _("Alternate Names"), _("Alt Area Search"), ] fields += [ "AltNames", "AltAreaSearch", ] form_cultures = [ culture_map[culture].FormCulture for culture in active_cultures ] base_field_getter = attrgetter(*fields) name_field_getter = lambda x: tuple( x.Names.get(y, {}).get("Name") for y in form_cultures) def row_getter(x): return tuple("" if y is None else str(y) for y in x[0:0] + name_field_getter(x) + base_field_getter(x)) file = tempfile.TemporaryFile() with BufferedZipFile(file, "w", zipfile.ZIP_DEFLATED) as zip: write_csv_to_zip( zip, itertools.chain([headings], map(row_getter, communities)), "communities.csv", ) length = file.tell() file.seek(0) res = request.response res.content_type = "application/zip" res.charset = None res.app_iter = FileIterator(file) res.content_length = length res.headers[ "Content-Disposition"] = "attachment;filename=communities-%s.zip" % ( datetime.today().isoformat("-").replace(":", "-").split(".")[0]) return res title = _("Manage Communities", request) return self._create_response_namespace(title, title, dict(communities=communities, CM_ID=CM_ID), no_index=True)