def get_project_stats(_id, short_name): # pragma: no cover """Get stats for project.""" import pybossa.cache.projects as cached_projects import pybossa.cache.project_stats as stats from flask import current_app cached_projects.get_project(short_name) stats.update_stats(_id, current_app.config.get('GEO'))
def warm_project(_id, short_name, featured=False): if _id not in projects_cached: cached_projects.get_project(short_name) cached_projects.n_tasks(_id) n_task_runs = cached_projects.n_task_runs(_id) cached_projects.overall_progress(_id) cached_projects.last_activity(_id) cached_projects.n_completed_tasks(_id) cached_projects.n_volunteers(_id) if n_task_runs >= 1000 or featured: # print ("Getting stats for %s as it has %s task runs" % # (short_name, n_task_runs)) stats.get_stats(_id, app.config.get('GEO')) projects_cached.append(_id)
def get_project_stats(_id, short_name): # pragma: no cover """Get stats for project.""" import pybossa.cache.projects as cached_projects import pybossa.cache.project_stats as stats from flask import current_app cached_projects.get_project(short_name) cached_projects.n_tasks(_id) cached_projects.n_task_runs(_id) cached_projects.overall_progress(_id) cached_projects.last_activity(_id) cached_projects.n_completed_tasks(_id) cached_projects.n_volunteers(_id) stats.get_stats(_id, current_app.config.get('GEO'))
def export_tasks(current_user_email_addr, short_name, ty, expanded, filetype, **filters): """Export tasks/taskruns from a project.""" from pybossa.core import task_csv_exporter, task_json_exporter from pybossa.cache import projects as cached_projects project = cached_projects.get_project(short_name) try: # Export data and upload .zip file locally if filetype == 'json': path = task_json_exporter.make_zip(project, ty, expanded, **filters) elif filetype == 'csv': path = task_csv_exporter.make_zip(project, ty, expanded, **filters) else: path = None # Construct message if path is not None: # Success email subject = 'Data exported for your project: {0}'.format(project.name) msg = 'Your exported data is attached.' else: # Failure email subject = 'Data export failed for your project: {0}'.format(project.name) msg = 'There was an issue with your export. ' + \ 'Please try again or report this issue ' + \ 'to a {0} administrator.' msg = msg.format(current_app.config.get('BRAND')) body = 'Hello,\n\n' + msg + '\n\nThe {0} team.' body = body.format(current_app.config.get('BRAND')) mail_dict = dict(recipients=[current_user_email_addr], subject=subject, body=body) message = Message(**mail_dict) # Attach export file to message if path is not None: with current_app.open_resource(path) as fp: message.attach(path.split('/')[-1], "application/zip", fp.read()) mail.send(message) job_response = '{0} {1} file was successfully exported for: {2}' return job_response.format( ty.capitalize(), filetype.upper(), project.name) except: current_app.logger.exception( 'Export email failed - Project: {0}' .format(project.name)) subject = 'Email delivery failed for your project: {0}'.format(project.name) msg = 'There was an error when attempting to deliver your data export via email.' body = 'Hello,\n\n' + msg + '\n\nThe {0} team.' body = body.format(current_app.config.get('BRAND')) mail_dict = dict(recipients=[current_user_email_addr], subject=subject, body=body) message = Message(**mail_dict) mail.send(message) raise
def flush_task_runs(project_short_name, confirmed): project = cached_projects.get_project(project_short_name) if current_user.admin or project.owner_id == current_user.id: if confirmed == "confirmed": associated_task_runs = TaskRun.query.filter_by(project_id=project.id).all() for task_run in associated_task_runs: db.session.delete(task_run) pass db.session.commit() # Iterate over all tasks associated with the project, and mark them as 'ongoing' # Some tasks might be marked as 'completed' if enough task_runs were done associated_tasks = Task.query.filter_by(project_id=project.id).all() for task in associated_tasks: if task.state != u"ongoing": task.state = u"ongoing" db.session.commit() # Reset project data in the cache cached_projects.clean_project(project.id) # Note: The cache will hold the old data about the users who contributed # to the tasks associated with this projects till the User Cache Timeout. # Querying the list of contributors to this project, and then individually updating # their cache after that will be a very expensive query, hence we will avoid that # for the time being. flash('All Task Runs associated with this project have been successfully deleted.', 'success') return redirect(url_for('project.task_settings', short_name = project_short_name)) elif confirmed == "unconfirmed": # Obtain data required by the project profile renderer (project, owner, n_tasks, n_task_runs, overall_progress, last_activity, n_results) = projects_view.project_by_shortname(project_short_name) return render_template('geotagx/projects/delete_task_run_confirmation.html', project=project, owner=owner, n_tasks=n_tasks, n_task_runs=n_task_runs, overall_progress=overall_progress, last_activity=last_activity, n_results=n_results, n_completed_tasks=cached_projects.n_completed_tasks(project.id), n_volunteers=cached_projects.n_volunteers(project.id)) else: abort(404) else: abort(404)
def import_repo(short_name): """Import a project from a GitHub repo.""" project = project_repo.get_by_shortname(short_name) if not project: # pragma: no cover abort(404) ensure_authorized_to('update', project) github_url = request.args.get('github_url') try: gh_repo = GitHubRepo(github_url) except GitHubURLError as e: flash(str(e), 'error') return redirect(url_for('.sync', short_name=project.short_name)) gh_repo.load_contents() try: gh_repo.validate() except InvalidPybossaProjectError as e: flash(str(e), 'error') return redirect(url_for('.sync', short_name=project.short_name)) form = GitHubProjectForm(request.form) project_json = gh_repo.get_project_json() _populate_form(form, gh_repo.contents, project_json) categories = project_repo.get_all_categories() if request.method == 'POST' and form.validate(): info = json.loads(form.additional_properties.data) original_short_name = project_json['short_name'] if form.tutorial.data: resp = github.get(form.tutorial.data) info['tutorial'] = resp.content.replace(original_short_name, project.short_name) if form.task_presenter.data: resp = github.get(form.task_presenter.data) info['task_presenter'] = resp.content.replace(original_short_name, project.short_name) if form.results.data: resp = github.get(form.results.data) info['results'] = resp.content.replace(original_short_name, project.short_name) long_description = None if form.long_description.data: resp = github.get(form.long_description.data) long_description = resp.content old_project = Project(**project.dictize()) project.description = form.description.data project.long_description = long_description project.category_id = form.category_id.data project.webhook = form.webhook.data project.info = info if form.thumbnail.data: data = github.get(form.thumbnail.data).content prefix = time.time() filename = "project_%s_thumbnail_%i.png" % (project.id, prefix) container = "user_%s" % current_user.id _download(filename, container, data) if project.info.get("thumbnail"): uploader.delete_file(project.info["thumbnail"], container) project.info['container'] = container project.info['thumbnail'] = filename try: project_repo.update(project) except sqlalchemy.exc.DataError as e: # pragma: no cover flash('''DataError: {0} <br><br>Please check the files being imported from GitHub'''.format(e.orig), 'danger') return redirect(url_for('.sync', short_name=project.short_name)) auditlogger.add_log_entry(old_project, project, current_user) cached_cat.reset() cached_projects.get_project(project.short_name) flash(gettext('Project updated!'), 'success') return redirect(url_for('project.tasks', short_name=project.short_name)) elif request.method == 'POST': # pragma: no cover flash(gettext('Please correct the errors'), 'error') else: form.process() form.description.data = project_json.get('description', '') form.webhook.data = project_json.get('webhook', '') reserved_keys = ['name', 'short_name', 'description', 'webhook', 'category_id'] for k in reserved_keys: project_json.pop(k, None) form.additional_properties.data = json.dumps(project_json) return render_template('projects/github/import.html', form=form, github_url=github_url, project=project)