def clean_multipart(context, data_dict): """Clean old multipart uploads. :param context: :param data_dict: :returns: dict with: removed - amount of removed uploads. total - total amount of expired uploads. errors - list of errors raised during deletion. Appears when `total` and `removed` are different. :rtype: dict """ h.check_access('cloudstorage_clean_multipart', data_dict) uploader = ResourceCloudStorage({}) delta = _get_max_multipart_lifetime() oldest_allowed = datetime.datetime.utcnow() - delta uploads_to_remove = model.Session.query(MultipartUpload).filter( MultipartUpload.initiated < oldest_allowed) result = {'removed': 0, 'total': uploads_to_remove.count(), 'errors': []} for upload in uploads_to_remove: try: _delete_multipart(upload, uploader) except toolkit.ValidationError as e: result['errors'].append(e.error_summary) else: result['removed'] += 1 return result
def upload_multipart(context, data_dict): h.check_access('cloudstorage_upload_multipart', data_dict) upload_id, part_number, part_content = toolkit.get_or_bust( data_dict, ['uploadId', 'partNumber', 'upload']) uploader = ResourceCloudStorage({}) upload = model.Session.query(MultipartUpload).get(upload_id) data = bytearray(_get_underlying_file(part_content).read()) resp = uploader.driver.connection.request( _get_object_url(uploader, upload.name), params={ 'uploadId': upload_id, 'partNumber': part_number }, method='PUT', data=data, headers={'Content-Length': len(data)}) if resp.status != 200: raise toolkit.ValidationError('Upload failed: part %s' % part_number) _save_part_info(part_number, resp.headers['etag'], upload) return {'partNumber': part_number, 'ETag': resp.headers['etag']}
def initiate_multipart(context, data_dict): """Initiate new Multipart Upload. :param context: :param data_dict: dict with required keys: id: resource's id name: filename size: filesize :returns: MultipartUpload info :rtype: dict """ h.check_access('cloudstorage_initiate_multipart', data_dict) id, name, size = toolkit.get_or_bust(data_dict, ['id', 'name', 'size']) user_id = None if context['auth_user_obj']: user_id = context['auth_user_obj'].id res_dict = toolkit.get_action('resource_show')(context.copy(), { 'id': data_dict.get('id') }) res_dict['upload_in_progress'] = True toolkit.get_action('resource_patch')(context.copy(), res_dict) uploader = ResourceCloudStorage({'multipart_name': name}) res_name = uploader.path_from_filename(id, name) upload_object = MultipartUpload.by_name(res_name) if upload_object is not None: _delete_multipart(upload_object, uploader) upload_object = None if upload_object is None: for old_upload in model.Session.query(MultipartUpload).filter_by( resource_id=id): _delete_multipart(old_upload, uploader) _rindex = res_name.rfind('/') if ~_rindex: try: name_prefix = res_name[:_rindex] for cloud_object in uploader.container.iterate_objects(): if cloud_object.name.startswith(name_prefix): log.info('Removing cloud object: %s' % cloud_object) cloud_object.delete() except Exception as e: log.exception('[delete from cloud] %s' % e) upload_object = MultipartUpload( uploader.driver._initiate_multipart(container=uploader.container, object_name=res_name), id, res_name, size, name, user_id) upload_object.save() return upload_object.as_dict()
def finish_multipart(context, data_dict): """Called after all parts had been uploaded. Triggers call to `_commit_multipart` which will convert separate uploaded parts into single file :param context: :param data_dict: dict with required key `uploadId` - id of Multipart Upload that should be finished :returns: None :rtype: NoneType """ h.check_access('cloudstorage_finish_multipart', data_dict) upload_id = toolkit.get_or_bust(data_dict, 'uploadId') save_action = data_dict.get('save_action', False) upload = model.Session.query(MultipartUpload).get(upload_id) chunks = [ (part.n, part.etag) for part in model.Session.query(MultipartPart).filter_by( upload_id=upload_id).order_by(MultipartPart.n) ] uploader = ResourceCloudStorage({}) try: obj = uploader.container.get_object(upload.name) obj.delete() except Exception: pass uploader.driver._commit_multipart( _get_object_url(uploader, upload.name), upload_id, chunks) upload.delete() upload.commit() if save_action and save_action == "go-metadata": try: res_dict = toolkit.get_action('resource_show')( context.copy(), {'id': data_dict.get('id')}) pkg_dict = toolkit.get_action('package_show')( context.copy(), {'id': res_dict['package_id']}) if pkg_dict['state'] == 'draft': toolkit.get_action('package_patch')( dict(context.copy(), allow_state_change=True), dict(id=pkg_dict['id'], state='active') ) except Exception as e: log.error(e) return {'commited': True}
def filter(self, stream): routes = request.environ.get('pylons.routes_dict') if routes.get('controller') in ('admin', 'ckanext.sparql.controllers.controller:SparqlAdminController'): isactive = 'active' if routes.get('controller') == 'ckanext.sparql.controllers.controller:SparqlAdminController' else '' stream = stream | Transformer('//ul[@class="nav nav-pills"]').append(HTML( '''<li class="''' + isactive + '''"> <a class href="/ckan-admin/sparql-config"> SPARQL Endpoint Configuration </a> </li>''' )) if model.Session.query(SparqlEndpoint).filter_by(isglobal=True, isenabled=True).first(): stream = stream | Transformer('//div[@id="mainmenu"]').append(HTML( '''<a class="" href="/sparql">SPARQL Endpoint</a>''' )) try: packageid = c.pkg.id except: packageid = None if packageid: if routes.get('controller') in ('package', 'related', 'ckanext.sparql.controllers.controller:SparqlPackageController'): sparqlendpoint = model.Session.query(SparqlEndpoint).filter(SparqlEndpoint.packages.any(Package.name == routes.get('id'))).first() htmlstr = '' isactive = 'active' if routes.get('controller') == 'ckanext.sparql.controllers.controller:SparqlPackageController' else '' if (sparqlendpoint and sparqlendpoint.isenabled) or check_access('package_update', {'id':packageid}): htmlstr += '''<li class="dropdown ''' + isactive + '''"> <a class="dropdown-toggle" data-toggle="dropdown" href="#"><img src="/icons/rdf_flyer.24" height="16px" width="16px" alt="None" class="inline-icon ">SPARQL Endpoint<b class="caret"></b></a> <div class="dropdown-appears"> <ul class="dropdown-menu">''' if sparqlendpoint and sparqlendpoint.isenabled: htmlstr += '''<li> <a href="/dataset/%s/sparql"><img src="/images/icons/package.png" height="16px" width="16px" alt="None" class="inline-icon "> Query SPARQL Endpoint</a> </li>''' % routes.get('id') if check_access('package_update', {'id':packageid}): htmlstr += '''<li> <a href="/dataset/%(id)s/edit/sparql"><img src="/images/icons/package_edit.png" height="16px" width="16px" alt="None" class="inline-icon "> Configure SPARQL Endpoint</a> </li>''' % {'id': routes.get('id')} htmlstr += '''</ul> </div> </li>''' stream = stream | Transformer('//ul[@class="nav nav-pills"]').append(HTML(htmlstr)) return stream
def upload_rdf(self): request = self._get_request_data() if not 'package_id' in request: abort(400, 'Please provide a suitable package_id parameter') elif not check_access('package_update', {'id':request['package_id']}): return self._finish_not_authz() if not 'data' in request: abort(400, 'Please provide a suitable data parameter') if not 'format' in request or request['format'] not in SUPPORTED_RDF_SYNTAXES: abort(400, 'Please provide a suitable format parameter') endpoint = model.Session.query(SparqlEndpoint).filter(SparqlEndpoint.packages.any(Package.name == request['package_id'])).first() if not endpoint: abort(404, 'No endpoint defined for provided package') pkg_data = { 'id': request['package_id'], 'sparulurl': endpoint.sparulurl, 'storetype': endpoint.storetype, 'graph': endpoint.graph, 'username': endpoint.username, 'passwd': endpoint.passwd, 'isauthrequired': endpoint.isauthrequired, } celery.send_task('upload_rdf', args=[pkg_data, request['data'], request['format']], task_id=str(uuid.uuid4())) return self._finish_ok('Uploading... Check progress in package web.')
def abort_multipart(context, data_dict): h.check_access('cloudstorage_abort_multipart', data_dict) id = toolkit.get_or_bust(data_dict, ['id']) uploader = ResourceCloudStorage({}) resource_uploads = MultipartUpload.resource_uploads(id) aborted = [] for upload in resource_uploads: _delete_multipart(upload, uploader) aborted.append(upload.id) model.Session.commit() return aborted
def user_is_admin(user, org=None): import ckan.lib.helpers as helpers if org: return helpers.check_access('organization_update', {'id': org.id}) else: # Are they admin of any org? return len(user.get_groups('organization', capacity='admin')) > 0
def check(*args, **kwargs): id = kwargs['id'] controller = args[0] pkg = tk.get_action('package_show')(None, {'id': id}) if not check_journal_role(pkg, 'admin') and not h.check_access('sysadmin'): tk.abort(403, 'Unauthorized') return func(controller, id)
def view_file(pkg_id, resource_id): context = { u'model': model, u'session': model.Session, u'user': g.user, u'for_view': True, u'auth_user_obj': g.userobj } # check access to the resource ret = helpers.check_access(u'resource_show', {'id': resource_id}) if not ret: return base.render("not_authorized.html") resource = toolkit.get_action('resource_show')(context, { 'id': resource_id, 'include_tracking': False }) # use the correct javascript file to render the data for the resource's format if (resource["format"].lower() == "stl"): return base.render("vtkjs_view_stl.html", extra_vars={'url': resource["url"]}) elif resource["format"].lower() == "vtp": return base.render("vtkjs_view_vtp.html", extra_vars={'url': resource["url"]}) elif resource["format"].lower() == "vti": return base.render("vtkjs_view_vti.html", extra_vars={'url': resource["url"]}) elif resource["format"].lower() == "zip": return base.render("vtkjs_view_obj-zip.html", extra_vars={'url': resource["url"]})
def run(self): user_is_sysadmin = h.check_access('sysadmin') if not user_is_sysadmin: abort(401, _('Unauthorized to access migration scripts')) if ('insert_initial_odm_data' in request.params): script = s1_insert_initial_odm_data.S1_insert_intial_odm_data() c.script_results = script.run() return p.toolkit.render('ckanext/migration/result.html') elif ('import_taxonomy_tag_dictionaries' in request.params): script = s2_import_taxonomy_tag_dictionaries.S2_import_taxonomy_tag_dictionaries() c.script_results = script.run() return p.toolkit.render('ckanext/migration/result.html') elif ('import_taxonomy_term_translations' in request.params): script = s3_import_taxonomy_term_translations.S3_import_taxonomy_term_translations() c.script_results = script.run() return p.toolkit.render('ckanext/migration/result.html') elif ('import_odc_laws' in request.params): script = s4_import_odc_laws.S4_import_odc_laws() c.script_results = script.run() return p.toolkit.render('ckanext/migration/result.html') elif ('delete_all_laws' in request.params): script = s5_delete_all_laws.S5_delete_all_laws() c.script_results = script.run() return p.toolkit.render('ckanext/migration/result.html') elif ('migrate_to_multilingual' in request.params): script = s6_migrate_to_multilingual.S6_migrate_to_multilingual() c.script_results = script.run() return p.toolkit.render('ckanext/migration/result.html') elif ('reset_multilingual_flag' in request.params): script = s7_reset_multilingual_flag.S7_reset_multilingual_flag() c.script_results = script.run() return p.toolkit.render('ckanext/migration/result.html') else: return p.toolkit.render('ckanext/migration/index.html')
def check(*args, **kwargs): id = kwargs['id'] controller = args[0] pkg = tk.get_action('package_show')(None, {'id': id}) if not check_journal_role(pkg, 'admin') and not h.check_access('sysadmin'): tk.abort(403, 'Unauthorized') return func(controller, id)
def upload_multipart(context, data_dict): h.check_access('cloudstorage_upload_multipart', data_dict) upload_id, part_number, part_content = toolkit.get_or_bust( data_dict, ['uploadId', 'partNumber', 'upload']) uploader = ResourceCloudStorage({}) upload = model.Session.query(MultipartUpload).get(upload_id) resp = uploader.driver.connection.request( _get_object_url(uploader, upload.name) + '?partNumber={0}&uploadId={1}'.format(part_number, upload_id), method='PUT', data=bytearray(part_content.file.read())) if resp.status != 200: raise toolkit.ValidationError('Upload failed: part %s' % part_number) _save_part_info(part_number, resp.headers['etag'], upload) return {'partNumber': part_number, 'ETag': resp.headers['etag']}
def create_new_version_of_subset(self, subset_id, orig_id): context = { 'model': model, 'session': model.Session, 'user': c.user, 'ignore_capacity_check': True } h.check_access('package_update', {'id': subset_id}) subset = tk.get_action('package_show')(context, {'id': subset_id}) orig_pkg = tk.get_action('package_show')(context, {'id': orig_id}) new_ver_name = subset['name'][:subset['name'].rfind("-v") + 2] + str( helpers.get_version_number(orig_pkg)).zfill(2) # add include_private for newer CKAN versions # ATTENTION deleted but not purged datasets cannot be found! search_results = tk.get_action('package_search')( context, { 'rows': 10000, 'fq': "name:%s" % (new_ver_name), 'include_versions': True }) if search_results['count'] > 0: h.flash_error( 'The new version could not be created as another package already has the name "%s". Please create a new subset from the original package.' % (new_ver_name)) else: try: enqueue_job = tk.enqueue_job except AttributeError: from ckanext.rq.jobs import enqueue as enqueue_job enqueue_job(create_new_version_of_subset_job, [c.user, subset, orig_pkg]) h.flash_notice( 'Your version is being created. This might take a while, you will receive an E-Mail when your version is available.' ) redirect( h.url_for(controller='package', action='read', id=subset['name']))
def check_multipart(context, data_dict): """Check whether unfinished multipart upload already exists. :param context: :param data_dict: dict with required `id` :returns: None or dict with `upload` - existing multipart upload info :rtype: NoneType or dict """ h.check_access('cloudstorage_check_multipart', data_dict) id = toolkit.get_or_bust(data_dict, 'id') try: upload = model.Session.query(MultipartUpload).filter_by( resource_id=id).one() except NoResultFound: return upload_dict = upload.as_dict() upload_dict['parts'] = model.Session.query(MultipartPart).filter( MultipartPart.upload == upload).count() return {'upload': upload_dict}
def build_nav_main(*args): ''' build a set of menu items. args: tuples of (menu type, title) eg ('login', _('Login')) outputs <li><a href="...">title</a></li> ''' output = '' for item in args: menu_item, title = item[:2] if len(item) == 3 and not h.check_access(item[2]): continue output += h._make_menu_item(menu_item, title, class_='list-group-item') return output
def build_nav_main(*args): ''' build a set of menu items. args: tuples of (menu type, title) eg ('login', _('Login')) outputs <li><a href="...">title</a></li> ''' output = '' for item in args: menu_item, title = item[:2] if len(item) == 3 and not h.check_access(item[2]): continue output += h._make_menu_item(menu_item, title, class_='list-group-item') return output
def get_editable_packages(): auth_list = [] # Get a list of all packages # Todo: There has to be a more succinct way to get a list of packages, as dicts, that the current user has permission to edit all_datasets = toolkit.get_action('package_list')({}, {}) if all_datasets: for dataset in all_datasets: # Check for update permission authorization if helpers.check_access('package_update', {'id': dataset}): auth_list.append( toolkit.get_action('package_show')({}, { 'id': dataset })) return auth_list
def package_search(context, data_dict): log.debug("context: {}".format(context)) log.debug("data_dict: {}".format(data_dict)) user = context.get('auth_user_obj', None) if user is None: user_name = context.get('user', '') user = ckan_model.User.get(user_name) if h.check_access('list_packages', data_dict=data_dict): if user is not None: context['ignore_capacity_check'] = True if data_dict.get('q', '') != '': q_split = data_dict['q'].split(' ') query = " ".join([x for x in q_split if ':' not in x]) rest = " ".join([x for x in q_split if ':' in x]) data_dict['fq'] += rest data_dict['q'] = query if user.sysadmin: data_dict['fq'] += ' +capacity:("private" OR "public")' else: memberships = AuthMember.by_user_id(user.id) org_names = [ ckan_model.Group.get(x.group_id).name for x in memberships if x.role.org_member == True ] org_filters = [ 'OR filter(capacity:"private" AND organization:{})'.format( x) for x in org_names ] filters = " ".join(org_filters) data_dict['fq'] += '(capacity:"public" {})'.format(filters) log.debug("User fq: {}".format(data_dict['fq'])) results = ckan_package_search(context=context, data_dict=data_dict) else: results = {} results['count'] = 0 results['results'] = [] results['facets'] = {} results['search_facets'] = {} results['sort'] = '' return results
def build_main(*args): ''' build a set of menu items. args: tuples of (menu type, title) eg ('login', _('Login')) outputs <div class="main-menu__item"><a href="...">title</a></div> ''' output = '' l_start = '<div class="main-menu__item">' l_end = '</div>' for item in args: menu_item, title = item[:2] if len(item) == 3 and not check_access(item[2]): continue output += _make_menu_item(menu_item, title, l_start, l_end, l_start, l_end) return output
def read(self,id): try: context = {'model':model, 'user':c.user, 'detailed': h.check_access('harvest_job_create', {'source_id':id})} c.source = p.toolkit.get_action('harvest_source_show')(context, {'id':id}) c.page = Page( collection=c.source['status']['packages'], page=request.params.get('page', 1), items_per_page=20, url=pager_url ) return render('source/read.html') except p.toolkit.ObjectNotFound: abort(404,_('Harvest source not found')) except p.toolkit.NotAuthorized,e: abort(401,self.not_auth_message)
def check_access_account_requests(context, data_dict=None): """ :param context: :return: True if user is sysadmin or admin in top level org """ user = context.get('user') orgs = model.Group.get_top_level_groups(type='organization') user_is_admin_in_top_org = None if orgs: for org in orgs: if authz.has_user_permission_for_group_or_org( org.id, user, 'admin'): user_is_admin_in_top_org = True break return { 'success': True if user_is_admin_in_top_org or h.check_access('sysadmin') else False }
def check_access_account_requests(context, data_dict=None): """ :param context: :return: True if user is sysadmin or admin in top level org """ user = context.get('user') orgs = model.Group.get_top_level_groups(type='organization') user_is_admin_in_top_org = None if orgs: for org in orgs: if authz.has_user_permission_for_group_or_org( org.id, user, 'admin' ): user_is_admin_in_top_org = True break return { 'success': True if user_is_admin_in_top_org or h.check_access('sysadmin') else False }
def run(self): user_is_sysadmin = h.check_access('sysadmin') if not user_is_sysadmin: abort(401, _('Unauthorized to access migration scripts')) if ('import_taxonomy_tag_dictionaries' in request.params): script = s2_import_taxonomy_tag_dictionaries.S2_import_taxonomy_tag_dictionaries() c.script_results = script.run() return p.toolkit.render('ckanext/migration/result.html') elif ('import_taxonomy_term_translations' in request.params): c.script_results = s3_import_taxonomy_term_translations.translate(timeout=7200) return p.toolkit.render('ckanext/migration/taxonomy_translation_result.html') else: return p.toolkit.render('ckanext/migration/index.html')
def read(self, id): try: context = { 'model': model, 'user': c.user, 'detailed': h.check_access('harvest_job_create', {'source_id': id}) } c.source = p.toolkit.get_action('harvest_source_show')(context, { 'id': id }) c.page = Page(collection=c.source['status']['packages'], page=request.params.get('page', 1), items_per_page=20, url=pager_url) return render('source/read.html') except p.toolkit.ObjectNotFound: abort(404, _('Harvest source not found')) except p.toolkit.NotAuthorized, e: abort(401, self.not_auth_message)
def upload_rdf(self): request = self._get_request_data() if not 'package_id' in request: abort(400, 'Please provide a suitable package_id parameter') elif not check_access('package_update', {'id': request['package_id']}): return self._finish_not_authz() if not 'data' in request: abort(400, 'Please provide a suitable data parameter') if not 'format' in request or request[ 'format'] not in SUPPORTED_RDF_SYNTAXES: abort(400, 'Please provide a suitable format parameter') endpoint = model.Session.query(SparqlEndpoint).filter( SparqlEndpoint.packages.any( Package.name == request['package_id'])).first() if not endpoint: abort(404, 'No endpoint defined for provided package') pkg_data = { 'id': request['package_id'], 'sparulurl': endpoint.sparulurl, 'storetype': endpoint.storetype, 'graph': endpoint.graph, 'username': endpoint.username, 'passwd': endpoint.passwd, 'isauthrequired': endpoint.isauthrequired, } celery.send_task('upload_rdf', args=[pkg_data, request['data'], request['format']], task_id=str(uuid.uuid4())) return self._finish_ok('Uploading... Check progress in package web.')
def edit(self, id): def validate(data): error_dict = {} has_owner = data.get('owner') has_key = data.get('key') if tk.asbool(data.get('integration', 'False')): if not has_owner: error_dict['owner'] = ['Required'] if not has_key: error_dict['key'] = ['Required'] if tk.asbool(data.get('show_links', 'False')): if not has_owner or not has_key: error_dict['show_links'] = [ 'This option available only ' 'if credentials are provided' ] if not error_dict: api = API(has_owner, has_key) check = api.check_credentials() if not check: error_dict['key'] = ['Incorrect key'] if error_dict: raise logic.ValidationError(error_dict) context = { 'model': model, 'session': model.Session, 'user': c.user or c.author, 'auth_user_obj': c.userobj } data_dict = {'id': id} stats = {} extra = {'errors': {}, 'error_summary': None, 'stats': stats} try: if not h.check_access('organization_update', data_dict): raise logic.NotAuthorized c.group_dict = logic.get_action('organization_show')(context, data_dict) c.group = context['group'] c.credentials = c.group.datadotworld_credentials if c.credentials is None: c.credentials = Credentials(organization=c.group) model.Session.add(c.credentials) except logic.NotFound: base.abort(404, _('Organization not found')) except logic.NotAuthorized: base.abort(401, _('User %r not authorized to edit %s') % (c.user, id)) if request.method == 'POST': data = dict(request.POST) c.credentials.update(data) try: validate(data) except logic.ValidationError as e: extra['errors'] = e.error_dict extra['error_summary'] = e.error_summary else: query = model.Session.query(Extras).join(model.Package).join( model.Group, model.Package.owner_org == model.Group.id).filter( model.Group.id == c.group.id) for item in query: item.state = 'pending' model.Session.commit() h.flash_success('Saved') if tk.asbool(c.credentials.integration): syncronize_org(c.group.id) return base.redirect_to('organization_dataworld', id=id) query = model.Session.query( func.count(model.Package.id).label('total'), Extras.state).join(model.Group, model.Package.owner_org == model.Group.id).join( Extras).group_by(Extras.state).filter( model.Package.owner_org == c.group.id) for amount, state in query: stats[state] = amount return base.render('organization/edit_credentials.html', extra_vars=extra)
def index(self): c = p.toolkit.c our_cache = cache.get_cache('stats', type='memory') public_display, sysadmin_display, cache_timeout = get_cache_config() get_stats_display = public_display or ( sysadmin_display and h.check_access('sysadmin')) if get_stats_display: stats = stats_lib.Stats() stats.init(our_cache, cache_timeout) rev_stats = stats_lib.RevisionStats() rev_stats.init(our_cache, cache_timeout) c.top_rated_packages = stats.top_rated_packages() c.most_edited_packages = stats.most_edited_packages() c.largest_groups = stats.largest_groups() c.top_package_owners = stats.top_package_owners() c.summary_stats = stats.summary_stats() c.activity_counts = stats.activity_counts() c.by_org = stats.by_org() c.res_by_org = stats.res_by_org() c.top_active_orgs = stats.top_active_orgs() c.user_access_list = stats.user_access_list() c.recent_datasets = stats.recent_datasets() c.new_packages_by_week = rev_stats.get_by_week('new_packages') c.num_packages_by_week = rev_stats.get_num_packages_by_week() c.package_revisions_by_week = rev_stats.get_by_week( 'package_revisions') # Used in the legacy CKAN templates. c.packages_by_week = [] # Used in new CKAN templates gives more control to the templates for formatting. c.raw_packages_by_week = [] for week_date, num_packages, cumulative_num_packages in c.num_packages_by_week: c.packages_by_week.append( '[new Date(%s), %s]' % (week_date.replace('-', ','), cumulative_num_packages)) c.raw_packages_by_week.append({ 'date': h.date_str_to_datetime(week_date), 'total_packages': cumulative_num_packages}) c.all_package_revisions = [] c.raw_all_package_revisions = [] week_queue = Queue.Queue() for week_date, revs, num_revisions, cumulative_num_revisions in c.package_revisions_by_week: c.all_package_revisions.append( '[new Date(%s), %s]' % (week_date.replace('-', ','), num_revisions)) c.raw_all_package_revisions.append({ 'date': h.date_str_to_datetime(week_date), 'total_revisions': num_revisions}) week_queue.put(week_date) c.new_datasets = [] c.raw_new_datasets = [] for week_date, pkgs, num_packages, cumulative_num_packages in c.new_packages_by_week: revision_week_date = week_queue.get() while revision_week_date != week_date: c.new_datasets.append( '[new Date(%s), %s]' % ( revision_week_date.replace('-', ','), 0)) c.raw_new_datasets.append({ 'date': h.date_str_to_datetime(revision_week_date), 'new_packages': 0}) revision_week_date = week_queue.get() c.new_datasets.append( '[new Date(%s), %s]' % (week_date.replace('-', ','), num_packages)) c.raw_new_datasets.append({ 'date': h.date_str_to_datetime(week_date), 'new_packages': num_packages}) while not week_queue.empty(): revision_week_date = week_queue.get() c.new_datasets.append( '[new Date(%s), %s]' % ( revision_week_date.replace('-', ','), 0)) c.raw_new_datasets.append({ 'date': h.date_str_to_datetime(revision_week_date), 'new_packages': 0}) return p.toolkit.render('stats/index.html') else: abort(403, _('Not authorized to see this page'))
def __init__(self): super(PublishMainMenu, self).__init__(_("Publish Data")) self.children = [PublishDataMenu(), PublishToolsMenu()] if helpers.check_access('can_create_service') and service_database_enabled(): self.children.append(PublishServiceMenu())
def initiate_multipart(context, data_dict): """Initiate new Multipart Upload. :param context: :param data_dict: dict with required keys: id: resource's id name: filename size: filesize :returns: MultipartUpload info :rtype: dict """ h.check_access('cloudstorage_initiate_multipart', data_dict) id, name, size = toolkit.get_or_bust(data_dict, ['id', 'name', 'size']) user_id = None if context['auth_user_obj']: user_id = context['auth_user_obj'].id uploader = ResourceCloudStorage({'multipart_name': name}) res_name = uploader.path_from_filename(id, name) upload_object = MultipartUpload.by_name(res_name) if upload_object is not None: _delete_multipart(upload_object, uploader) upload_object = None if upload_object is None: for old_upload in model.Session.query(MultipartUpload).filter_by( resource_id=id): _delete_multipart(old_upload, uploader) _rindex = res_name.rfind('/') if ~_rindex: try: name_prefix = res_name[:_rindex] for cloud_object in uploader.container.iterate_objects(): if cloud_object.name.startswith(name_prefix): log.info('Removing cloud object: %s' % cloud_object) cloud_object.delete() except Exception as e: log.exception('[delete from cloud] %s' % e) resp = uploader.driver.connection.request( _get_object_url(uploader, res_name) + '?uploads', method='POST') if not resp.success(): raise toolkit.ValidationError(resp.error) try: upload_id = resp.object.find('{%s}UploadId' % resp.object.nsmap[None]).text except AttributeError: upload_id_list = filter(lambda e: e.tag.endswith('UploadId'), resp.object.getchildren()) upload_id = upload_id_list[0].text upload_object = MultipartUpload(upload_id, id, res_name, size, name, user_id) upload_object.save() return upload_object.as_dict()
def filter(self, stream): routes = request.environ.get('pylons.routes_dict') if routes.get('controller') in ( 'admin', 'ckanext.sparql.controllers.controller:SparqlAdminController'): isactive = 'active' if routes.get( 'controller' ) == 'ckanext.sparql.controllers.controller:SparqlAdminController' else '' stream = stream | Transformer( '//ul[@class="nav nav-pills"]').append( HTML('''<li class="''' + isactive + '''"> <a class href="/ckan-admin/sparql-config"> SPARQL Endpoint Configuration </a> </li>''')) if model.Session.query(SparqlEndpoint).filter_by( isglobal=True, isenabled=True).first(): stream = stream | Transformer('//div[@id="mainmenu"]').append( HTML('''<a class="" href="/sparql">SPARQL Endpoint</a>''')) try: packageid = c.pkg.id except: packageid = None if packageid: if routes.get( 'controller' ) in ('package', 'related', 'ckanext.sparql.controllers.controller:SparqlPackageController' ): sparqlendpoint = model.Session.query(SparqlEndpoint).filter( SparqlEndpoint.packages.any( Package.name == routes.get('id'))).first() htmlstr = '' isactive = 'active' if routes.get( 'controller' ) == 'ckanext.sparql.controllers.controller:SparqlPackageController' else '' if (sparqlendpoint and sparqlendpoint.isenabled) or check_access( 'package_update', {'id': packageid}): htmlstr += '''<li class="dropdown ''' + isactive + '''"> <a class="dropdown-toggle" data-toggle="dropdown" href="#"><img src="/icons/rdf_flyer.24" height="16px" width="16px" alt="None" class="inline-icon ">SPARQL Endpoint<b class="caret"></b></a> <div class="dropdown-appears"> <ul class="dropdown-menu">''' if sparqlendpoint and sparqlendpoint.isenabled: htmlstr += '''<li> <a href="/dataset/%s/sparql"><img src="/images/icons/package.png" height="16px" width="16px" alt="None" class="inline-icon "> Query SPARQL Endpoint</a> </li>''' % routes.get('id') if check_access('package_update', {'id': packageid}): htmlstr += '''<li> <a href="/dataset/%(id)s/edit/sparql"><img src="/images/icons/package_edit.png" height="16px" width="16px" alt="None" class="inline-icon "> Configure SPARQL Endpoint</a> </li>''' % { 'id': routes.get('id') } htmlstr += '''</ul> </div> </li>''' stream = stream | Transformer( '//ul[@class="nav nav-pills"]').append(HTML(htmlstr)) return stream
def __init__(self): super(PublishMainMenu, self).__init__(_("Publish Data")) self.children = [PublishDataMenu(), PublishToolsMenu()] if helpers.check_access( 'can_create_service') and service_database_enabled(): self.children.append(PublishServiceMenu())
def _check_access(self, id): context = self._context() pkg = tk.get_action('package_show')(context, {'id': id}) if not check_journal_role(pkg, 'admin') and not h.check_access('sysadmin'): tk.abort(401, 'Unauthorized to manage DOIs')