def process_admin_request(self, req, cat, page, path_info): projects = [Project(self.env, n) for n in Project.select(self.env)] if req.method == 'POST': if 'add' in req.args.keys(): proj = req.args.get('project') user = req.args.get('user') role = req.args.get('role') if proj not in [p.name for p in projects] and proj != '*': raise TracError, 'Invalid project %s' % proj if role not in ('member', 'admin'): raise TracError, 'Invalid role %s' % role Members(self.env, proj)[user] = role req.redirect(req.href.admin(cat, page)) projects_data = {} for proj in projects: projects_data[proj.name] = { 'members': dict(proj.members.iteritems()), 'env_path': proj. env_path, # Need some dummy value to ensure that the headings show up } req.hdf['tracforge.projects.*'] = { 'dummy': 1, 'members': dict(Members(self.env, '*').iteritems()), } req.hdf['tracforge.projects'] = projects_data add_stylesheet(req, 'tracforge/css/admin.css') return 'admin_tracforge_memebership.cs', None
def post(self, projectId): project = Project.getFromId(projectId) project.title = self.request.get('title') project.description = self.request.get('description') project.members = map(lambda x: x.strip(), self.request.get('members').split(',')) project.tags = map(lambda x: x.strip(), self.request.get('tags').split(',')) if project.tags == ['']: project.tags = [] project.folder = self.request.get('folder') project.calendar = self.request.get('calendar') project.project_lead = self.request.get('project_lead') project.project_areas = self.request.get('project_area', allow_multiple=True) project.project_deliverables = self.request.get('project_deliverables', allow_multiple=True) project.project_resources = self.request.get('project_resources', allow_multiple=True) project_canvas = {} for item in Project.getCanvasFields(): project_canvas[item] = self.request.get(item) project.canvas = project_canvas project.put() self.redirect('/project/%s' % projectId)
def pull_projects(self): if not self.user or not self.password: self.password = getpass.getpass('Password: '******'User: '******'server': 'https://jira.zipcar.com'} , basic_auth=(self.user, self.password)) self.agile = jira.client.GreenHopper( {'server': 'https://jira.zipcar.com'} , basic_auth=(self.user, self.password)) for prj in self.agile.boards(): pid = getattr(prj, 'id') if self.cache.data.has_key(str(pid)): transaction.begin() self.cache.data[str(pid)].query = prj.filter.query transaction.commit() continue transaction.begin() project = Project(str(pid), prj.name) project.query = prj.filter.query if prj.sprintSupportEnabled: project.process = 'Scrum' else: project.process = 'Kanban' self.cache.data[str(pid)] = project transaction.commit() transaction.begin() docid = self.cache.document_map.add(['jira', str(pid)]) self.cache.catalog.index_doc(docid, project) transaction.commit()
def get(self,project_id,entry_id): project = Project() project.get(project_id) data = project.get_entry(entry_id) data = json.dumps(data,default=bson.json_util.default) resp = Response(data,status=200,mimetype='application/json') resp.headers['Link'] = 'http://localhost:5000'
def test_get_all_projects(self): user = User.create(email="*****@*****.**", user_type='super_admin') user.put() org = Organization.create(name="Org Foo") org.put() project1 = Project.create(organization_id=org.uid, program_label='demo-program') project2 = Project.create(organization_id=org.uid, program_label='demo-program') project1.put() project2.put() query = ''' query GetAllProjects { projects { uid } } ''' response = self.testapp.post_json( '/api/graphql', {'query': query}, headers=login_headers(user.uid), ) received = json.loads(response.body) # No particular order. self.assertIn({'uid': project1.uid}, received['projects']) self.assertIn({'uid': project2.uid}, received['projects'])
def add_project(): """ Add a new project """ user_id = session['user_id'] name = request.form['proj_name'] status = request.form['status'] description = request.form['description'] picture_path = "" keywords = request.form['keywords'] tool_list = request.form['tool_list'] supply_list = request.form['supply_list'] directions = request.form['directions'] URL_link = request.form['URL_link'] app.logger.info("getting project data from form") new_proj = Project(user_id=user_id, status=status, name=name, description=description, picture_path=picture_path, keywords=keywords, tool_list=tool_list, supply_list=supply_list, directions=directions, URL_link=URL_link) #add to session & commit # db.session.add(new_proj) # db.session.commit() new_proj.save() flash(f"Project: {name} added.") return redirect('/projects')
def post(self): project = Project( name=self.request.get('name'), description=self.request.get('description'), xp=int(self.request.get('xp')) ) project.put() project = project.to_dct() self.response.write(project['id'])
def post(self,project_id): api_key = request.args.get('api_key') if not authorized(api_key,project_id): return jsonify({'status':False}) project = Project() project.get(project_id) data = request.json project.add_entry(data['name'],data['description'],data['source']) return jsonify({'status':True,'msg':'entry created'})
def create(): """gets data from form and add it to Users table""" if request.form: po = Project( request.form.get("name"), request.form.get("email"), request.form.get("password"), ) po.create() return redirect(url_for('project.project'))
def test_add_project(app): old_list = app.soap.get_projects_list() project = Project.random() while app.soap.get_project_id(project) is None: project = Project.random() app.project.add_project(project) old_list.append(project) new_list = app.soap.get_projects_list() assert sorted(old_list) == sorted(new_list)
def get_project(self,name): name = [i.replace('_',' ') for i in name] temp = [] for n in name: project = Project() project.find(n) temp.append(project.project.to_mongo()) return temp
def createRandomProjects(): p = Project(title="GovLab Academy", description="bla bla", members=addDomain(['arnaud', 'nikki', 'luis'])) p.put() p = Project(title="ICANN", description="bla bla", members=addDomain(['arnaud', 'antony', 'samantha'])) p.put() p = Project(title="OrgPedia", description="bla bla", members=addDomain(['arnaud', 'miller'])) p.put()
def delete(self,project_id,entry_id): api_key = request.args.get('api_key') if not authorized(api_key,project_id): return jsonify({'status':False}) project = Project() project.get(project_id) del(project.project.entry[entry_id]) del(project.project.stats[entry_id]) del(project.project.export[entry_id]) del(project.project.input_file[entry_id]) project.save()
def post(self): api_key = request.args.get('api_key') user = User() user.api_login(api_key) if not user.is_authenticated(): return jsonify({'status':False}) data = request.json project = Project() project.create(data['name'],data['description']) user.add_project(str(project.get_id())) return jsonify({'status':True})
def test_encoder(self): p = Project('test') p.id = 1 c = Category('test') c.id = 2 c.project_id = p.id t = Task('test') t.id = 3 t.category_id = c.id self.assertIsNotNone(json.dumps(p, cls=ModelEncoder)) self.assertIsNotNone(json.dumps(c, cls=ModelEncoder)) self.assertIsNotNone(json.dumps(t, cls=ModelEncoder))
def test_del_project(app): if len(app.soap.get_projects_list()) == 0: project = Project.random() while app.soap.get_project_id(project) is None: project = Project.random() old_list = app.soap.get_projects_list() project = random.choice(old_list) app.project.remove_project(project) old_list.remove(project) new_list = app.soap.get_projects_list() assert sorted(old_list) == sorted(new_list)
def put(self,project_id,entry_id): api_key = request.args.get('api_key') if not authorized(api_key,project_id): return jsonify({'status':False}) project = Project() project.get(project_id) data = request.json entry = project.project.entry[entry_id] entry['description'] = data['description'] entry['source'] = data['source'] project.save() return jsonify({'status':True,'msg':'entry updated'})
def post(self): caller = get_caller(request) if caller["role"] != "admin": return res("⛔️ Only an admin can add a project", "error"), 401 req = parse(request) errors = ProjectListSchema().validate(req) if errors: return res("Errors in request", "alert", errors=errors), 400 project = Project(name=req["name"]) project.save() return res("Project created successfully", "success", project=convert_query(project))
def delete(self,project_id): api_key = request.args.get('api_key') if not authorized(api_key,project_id): return jsonify({'status':False}) user = User() user.api_login(api_key) project = Project() project.get(project_id) if not project.get_id() in user.user.project: return jsonify({'status':False}) user.remove_project(project.get_id()) model = MongoModel(project=project.project_,collection=project.collection_) model.delete({'_id':objectid.ObjectId(str(project_id))}) return jsonify({'status':True})
def test_project_task_body(self): """Tasks should get dynamic properties from their definition.""" program_label = 'demo-program' task_label = 'task_foo' project = Project.create(program_label=program_label, organization_id='Organization_Foo') task_template = { 'label': task_label, 'body': "<p>Demo body.</p>", } Program.mock_program_config( program_label, {'project_tasklist_template': [{ 'tasks': [task_template] }]}) t = Task.create(task_label, 1, 'checkpoint_foo', parent=project, program_label=program_label) t_dict = t.to_client_dict() self.assertIsInstance(t_dict['body'], basestring) self.assertGreater(len(t_dict['body']), 0)
def test_program_admin_changes_project_task_with_liaison(self): """Liaison notified.""" org = Organization.create(name='Foo Org') admin1 = User.create(email='*****@*****.**', user_type='user', owned_organizations=[org.uid]) admin2 = User.create(email='*****@*****.**', user_type='user', owned_organizations=[org.uid]) admin1.put() admin2.put() task = org.tasklist.tasks[0] prog = User.create(email='*****@*****.**', user_type='program_admin', name='Petrarch Prog') project = Project.create(program_label='demo-program', organization_id=org.uid, liaison_id=admin1.uid) notifier.changed_project_task(prog, project, task) # Each related org admin should get a notification. self.assertEqual(len(admin1.notifications()), 1) self.assertEqual(len(admin2.notifications()), 0)
def test_create_select(self): program_label = 'demo-program' task_label = 'task_foo' project = Project.create(program_label=program_label, organization_id='Organization_Foo') task_template = { 'label': task_label, 'data_type': 'radio', 'select_options': [{ 'value': 'normal', 'label': 'true names' }, { 'value': 'alias', 'label': 'aliases' }], } Program.mock_program_config( program_label, {'project_tasklist_template': [{ 'tasks': [task_template] }]}) t = Task.create(task_label, 1, 'checkpoint_foo', parent=project, program_label=program_label) t_dict = t.to_client_dict() self.assertEqual(t_dict['select_options'], task_template['select_options'])
def create_project_task(self, cohort_date=datetime.datetime.today()): program_label = 'demo-program' task_label = 'task_project_foo' project = Project.create(program_label=program_label, organization_id='Organization_Foo') task_template = { 'label': task_label, 'data_type': 'radio', 'select_options': [{ 'value': 'normal', 'label': 'true names' }, { 'value': 'alias', 'label': 'aliases' }], } Program.mock_program_config( program_label, {'project_tasklist_template': [{ 'tasks': [task_template] }]}) t = Task.create(task_label, 1, 'checkpoint_foo', parent=project, program_label=program_label) t.put() return t
def test_close_tasklist(self): """Should delete all associated task reminders.""" user1 = User.create(email='*****@*****.**', user_type='user') user2 = User.create(email='*****@*****.**', user_type='user') org = Organization.create(name="Foo Org", liaison_id=user1.uid) user1.owned_organizations = [org.uid] user2.owned_organizations = [org.uid] user1.put() user2.put() org.put() response = self.testapp.post_json( '/api/projects', { 'organization_id': org.uid, 'program_label': 'demo-program', 'liaison_id': user1.uid }, headers=login_headers(user1.uid), ) project_dict = json.loads(response.body) project = Project.get_by_id(project_dict['uid']) # Simulate time passing and the datastore reaching consistency. trs1 = TaskReminder.get(ancestor=user1) trs2 = TaskReminder.get(ancestor=user2) Tasklist(project).close() self.assertEqual(len(TaskReminder.get(ancestor=user1)), 0) self.assertEqual(len(TaskReminder.get(ancestor=user2)), 0)
def queue_org_welcome(templates): """After joining a program for the first time, welcome them.""" # Every org-program combination is represented by a Project. Look for # recently created ones. yesterday = datetime.datetime.now() - datetime.timedelta(hours=24) # Which programs have a welcome template? to_welcome = programs_with_template(templates, ORG_WELCOME_SUFFIX) tasks = [] for program in to_welcome: # Can't use .get() from DatastoreModel because of the >= query query = Project.query( Project.created >= yesterday, Project.deleted == False, Project.program_label == program['label'], ) for project_key in query.iter(keys_only=True): url = '/task/email_project/{}/{}'.format( project_key.id(), get_slug(program['label'], ORG_WELCOME_SUFFIX)) task = taskqueue.add(url=url) tasks.append(task) return tasks
def project_add(): #get user id user = get_jwt_identity() current_user = User.find_by_email(user) user_id = current_user.user_id #create and add new project to projects table in db project_name = request.form['project_name'] project_description = request.form['project_description'] new_project = Project(project_name=project_name, description=project_description, admin_id=user_id) # db.session.add(new_project) # db.session.commit() current_user.projects.append(new_project) # Session = sessionmaker(bind = engine) # session = Session() # session.add(e1) db.session.add(new_project) db.session.commit() # #get the project id for most recently added project # current_project = Project.query.filter_by(project_name = project_name, description = project_description).first() # current_project_id = current_project.project_id # #add project id and user id to project_member table (association table) # new_project_member= ProjectMember(user_id=user_id, project_id = current_project_id) # db.session.add(new_project_member) # db.session.commit() return "added project "
def create_task(project_key): form = TaskForm(request.form) project = Project.get_project(project_key) if project is None: abort(404) current_user_id = get_user(request, project_key) if current_user_id is None: return redirect(url_for('who_are_you', project_key=project_key)) choices = [(p.id, p.name) for p in project.people] form.assigned_to.choices = choices if request.method == 'POST' and form.validate(): title = form.title.data description = form.description.data priority = form.priority.data assigned_to = form.assigned_to.data Task.new(project.key, title, priority, current_user_id, assigned_to, description) flash("Your task was created") project.touch() return redirect('/project/' + project_key) else: assigned_to = current_user_id form.assigned_to.default = current_user_id form.process() return render_template('edit_task.html', form=form, project=project, assigned_to=assigned_to)
def get(self): projects = Project.objects().all() return res( "All projects returned", "success", projects=convert_query(projects, list=True), )
def get_permission_groups(self, username): group_extn_point = PermissionSystem( self.master_env).store.group_providers group_providers = [ x for x in group_extn_point if x.__class__.__name__ != self.__class__.__name__ ] # Filter out this one (recursion block) master_groups = [] for prov in group_providers: master_groups += list(prov.get_permission_groups(username)) self.log.debug( 'TracForgeGroupModule: Detected master groups (%s) for %s' % (', '.join([str(x) for x in master_groups]), username)) proj = Project.by_env_path(self.master_env, self.env.path) access = set() subjects = [username] + master_groups for subj in subjects: if subj in proj: access.add(proj.members[subj]) if 'admin' in access: return ['admin', 'member'] elif 'member' in access: return ['member'] elif 'staff' in access: return ['staff'] else: return []
def test_select_checkpoints_with_offset(self): """Can use queries like "LIMIT 20,10" to get "pages" of records.""" # Create two checkpoints. program_label = 'demo-program' Program.mock_program_config( program_label, { 'project_tasklist_template': [ { 'name': 'Foo', 'label': 'checkpoint_foo', 'tasks': [] }, { 'name': 'Bar', 'label': 'checkpoint_bar', 'tasks': [] }, ] }) project = Project.create(program_label=program_label, organization_id='Organization_Foo') project.put() # Select each of the two checkpoints in different queries with one-row # pages. r1 = Checkpoint.get(program_label=program_label, n=1, offset=0) r2 = Checkpoint.get(program_label=program_label, n=1, offset=1) self.assertNotEqual(r1[0].uid, r2[0].uid)
def process_admin_request(self, req, cat, page, path_info): projects = [Project(self.env, n) for n in Project.select(self.env)] if req.method == 'POST': if 'add' in req.args.keys(): proj = req.args.get('project') user = req.args.get('user') role = req.args.get('role') if proj not in [p.name for p in projects] and proj != '*': raise TracError, 'Invalid project %s'%proj if role not in ('member', 'admin'): raise TracError, 'Invalid role %s'%role Members(self.env, proj)[user] = role req.redirect(req.href.admin(cat, page)) projects_data = {} for proj in projects: projects_data[proj.name] = { 'members': dict(proj.members.iteritems()), 'env_path': proj.env_path, # Need some dummy value to ensure that the headings show up } req.hdf['tracforge.projects.*'] = { 'dummy': 1, 'members': dict(Members(self.env, '*').iteritems()), } req.hdf['tracforge.projects'] = projects_data add_stylesheet(req, 'tracforge/css/admin.css') return 'admin_tracforge_memebership.cs', None
def post(self, project_id, slug): """A project has been identified as new. Send them a welcome.""" project = Project.get_by_id(project_id) program = Program.get_config(project.program_label) org = Organization.get_by_id(project.organization_id) # The Org liaison is preferred, but is not guaranteed to be set (users # choose their org liaison explicitly as one of the org tasks). Default # to the Project liaison, which is set on creation in # add_program.controller.js@joinProgram org_liaison = User.get_by_id(org.liaison_id) project_liaison = User.get_by_id(project.liaison_id) liaison = org_liaison or project_liaison email = Email.create( to_address=liaison.email, mandrill_template=slug, mandrill_template_content={ 'program_name': program['name'], 'organization_name': org.name, 'liaison_name': liaison.name, 'join_date': util.datelike_to_iso_string(project.created), }, ) email.put()
def new_user_project(): """ User can add a new project. """ user_id = session['user_id'] if request.method == 'GET': return render_template("/projects/newproject.html", user_id=user_id) else: title = request.form.get("title") sdate = request.form.get("sdate") edate = request.form.get("edate") proj_desc = request.form.get("proj_desc") user_id = request.form.get("user_id") proj_img = request.form.get("proj_img") new_proj = Project(title=title, sdate=sdate, edate=edate, proj_desc=proj_desc, user_id=user_id, proj_img=proj_img) db.session.add(new_proj) db.session.commit() return redirect("/projects")
def add_project(): """Add a new project to the database.""" data = json.loads(request.data.decode()) title = data.get('title') desc = data.get('desc') category_id = data.get('categoryId') tags = data.get('tags') # Add project project = Project(title, desc=desc) db.session.add(project) db.session.commit() # Add association db.session.add(CategoryProject(category_id, project.id)) db.session.commit() # Add tags if tags: all_tags = Tag.create_tags(tags) db.session.add_all([TagProject(project.id, tag.code) for tag in all_tags ]) db.session.commit() return get_project_json(project.id)
def test_initial_values(self): """Adopt initial values if specified, overriding defaults.""" # Background program_label = 'demo-program' task_label_default = 'task_default' task_label_init = 'task_init' # One task with default values, one with a non-default initial value. task_template_default = { 'label': task_label_default, # default value of disabled is False } task_template_initial_values = { 'label': task_label_init, 'initial_values': { 'disabled': True }, # override default } Program.mock_program_config( program_label, { 'project_tasklist_template': [{ 'tasks': [ task_template_default, task_template_initial_values, ] }] }) # Creating the project will generate a tasklist with the above tasks. project = Project.create(program_label=program_label, organization_id='Organization_Foo') self.assertFalse(project.tasklist.tasks[0].disabled) self.assertTrue(project.tasklist.tasks[1].disabled)
def get_permission_groups(self, username): group_extn_point = PermissionSystem(self.master_env).store.group_providers group_providers = [x for x in group_extn_point if x.__class__.__name__ != self.__class__.__name__] # Filter out this one (recursion block) master_groups = [] for prov in group_providers: master_groups += list(prov.get_permission_groups(username)) self.log.debug('TracForgeGroupModule: Detected master groups (%s) for %s'%(', '.join([str(x) for x in master_groups]), username)) proj = Project.by_env_path(self.master_env, self.env.path) access = set() subjects = [username] + master_groups for subj in subjects: if subj in proj: access.add(proj.members[subj]) if 'admin' in access: return ['admin', 'member'] elif 'member' in access: return ['member'] elif 'staff' in access: return ['staff'] else: return []
def test_user_changes_project_task_with_account_manager(self): """Account manager notified.""" acct_mgr = User.create(email='*****@*****.**', user_type='program_admin', owned_programs=['demo-program']) other_prog = User.create(email='*****@*****.**', user_type='program_admin', owned_programs=['demo-program']) acct_mgr.put() other_prog.put() org = Organization.create(name='Foo Org') org.put() admin = User.create(email='*****@*****.**', user_type='user', name='Addi Admin') project = Project.create(program_label='demo-program', organization_id=org.uid, account_manager_id=acct_mgr.uid) task = project.tasklist.tasks[0] notifier.changed_project_task(admin, project, task) # Each account manager gets a notification, other related program # admins don't. self.assertEqual(len(acct_mgr.notifications()), 1) self.assertEqual(len(other_prog.notifications()), 0)
def create_project_cohort(self, cohort_date=datetime.datetime.today()): program_label = 'demo-program' cohort_label = 'demo-cohort' program = Program.get_config(program_label) org_id = 'Org_Foo' liaison_id = 'User_liaison' project = Project.create(organization_id=org_id, program_label=program_label) project.put() one_day = datetime.timedelta(days=1) cohort_config = { 'label': cohort_label, 'name': 'Demo Cohort', 'open_date': str(cohort_date - one_day), # yesterday 'close_date': str(cohort_date + one_day), # tomorrow } program['cohorts'][cohort_label] = cohort_config Program.mock_program_config( program_label, {'cohorts': { cohort_label: cohort_config }}, ) pc = ProjectCohort.create( project_id=project.uid, organization_id=org_id, program_label=program_label, cohort_label=cohort_label, liaison_id=liaison_id, ) pc.put() return pc
def createProject(self, request, context): project = Project(name=request.name, user_id=request.user_id, created_at=date.today()) session.add(project) session.commit() db_project = session.query(Project).filter_by(name=request.name).first() return message.ProjectResponse(id=str(db_project.id), name=db_project.name)
def process_project_name(): """Store new project name""" if 'email' in session: user_obj = User.query.filter(User.email == session['email']).first() cur_user_id = user_obj.user_id new_pro = request.args.get('new_project') project_disc = request.args.get('project_disc') session['project_name'] = new_pro cur_pro_name = Project(project_name=new_pro, user_id=cur_user_id, project_disc=project_disc) db.session.add(cur_pro_name) db.session.commit() print "CURRENT PROJECT NAME: ", cur_pro_name.project_name flash("You just created a NEW project named %s!" % cur_pro_name.project_name) return redirect('/user-profile') else: return redirect('/login')
def test_create_project(self): """All program owners notified about new projects.""" prog1 = User.create(email='*****@*****.**', user_type='program_admin', owned_programs=['demo-program']) prog2 = User.create(email='*****@*****.**', user_type='super_admin', owned_programs=['demo-program']) prog3 = User.create(email='*****@*****.**', user_type='program_admin', owned_programs=[]) prog1.put() prog2.put() prog3.put() org = Organization.create(name='Foo Org') org.put() admin = User.create(email='*****@*****.**', user_type='user', name='Addi Admin') project = Project.create(program_label='demo-program', organization_id=org.uid) notifier.created_project(admin, project) # Each super admin should get a notification. self.assertEqual(len(prog1.notifications()), 1) self.assertEqual(len(prog2.notifications()), 1) self.assertEqual(len(prog3.notifications()), 0)
def test_user_changes_project_task(self): """All program admins are notified.""" prog1 = User.create(email='*****@*****.**', user_type='program_admin', owned_programs=['demo-program']) prog2 = User.create(email='*****@*****.**', user_type='super_admin', owned_programs=['demo-program']) prog3 = User.create(email='*****@*****.**', user_type='program_admin', owned_programs=[]) prog1.put() prog2.put() prog3.put() org = Organization.create(name='Foo Org') org.put() admin = User.create(email='*****@*****.**', user_type='user', name='Addi Admin') project = Project.create(program_label='demo-program', organization_id=org.uid) task = project.tasklist.tasks[0] notifier.changed_project_task(admin, project, task) # Each related program admin should get a notification. self.assertEqual(len(prog1.notifications()), 1) self.assertEqual(len(prog2.notifications()), 1) self.assertEqual(len(prog3.notifications()), 0)
def test_get_single_project(self): user = User.create(email="*****@*****.**", user_type='super_admin') user.put() org = Organization.create(name="Org Foo") org.put() project = Project.create( organization_id=org.uid, program_label='demo-program', account_manager_id='User_001', liaison_id='User_002', priority=True, deidentification_method='total', loa_notes="Some stuff happened.", last_active=datetime.datetime.now(), ) project.put() query = ''' query GetSingleProject($uid: String!) { project(uid: $uid) { account_manager_id created deidentification_method deleted last_active liaison_id loa_notes modified organization_id organization_name organization_status priority program_description program_label program_name short_uid uid } } ''' response = self.testapp.post_json( '/api/graphql', # See http://graphql.org/learn/serving-over-http/#post-request { 'query': query, 'variables': { 'uid': project.uid }, }, headers=login_headers(user.uid), ) self.assertEqual( response.body, json.dumps({'project': project.to_client_dict()}), )
def test_get_projects(self): # add test data active = Status.query.filter_by(name=Status.ACTIVE).one() project1 = Project(project_id=101, name="this is a test", status=active) project2 = Project(project_id=102, name="this is another test", status=active) db.session.add(project1) db.session.add(project2) db.session.flush() rv = self.test_client.get("/api/v1/projects") self.assertEqual(rv.status_code, 200) self.assertEqual(rv.mimetype, "application/json") response_data = json.loads(rv.get_data().decode()) self.assertIsInstance(response_data, dict) self.assertIn("result", response_data) result = response_data["result"] expected = [project1.serialise(), project2.serialise()] self.assertEqual(expected, result)
def post(self, projectId): project = Project.getFromId(projectId) project.title = self.request.get('title') project.description = self.request.get('description') project.members = map(lambda x:x.strip(), self.request.get('members').split(',')) project.tags = map(lambda x:x.strip(), self.request.get('tags').split(',')) if project.tags == ['']: project.tags = [] project.folder = self.request.get('folder') project.calendar = self.request.get('calendar') project.project_lead = self.request.get('project_lead') project.project_areas = self.request.get('project_area', allow_multiple=True) project.project_deliverables = self.request.get('project_deliverables', allow_multiple=True) project.project_resources = self.request.get('project_resources', allow_multiple=True) project_canvas = {} for item in Project.getCanvasFields(): project_canvas[item] = self.request.get(item) project.canvas = project_canvas project.put() self.redirect('/project/%s' % projectId)
def get(self,project_id): if project_id: project = Project() project.get(project_id) data = project.project.to_mongo() else: data = {} project = ProjectList() all_project = [] for p in project.all(): all_project.append(p.project.to_mongo()) data['all'] = all_project if request.args.get('api_key'): user = User() user.api_login(request.args.get('api_key')) user_project = user.user.project data['user'] = self.get_project(user_project) data = json.dumps(data,default=bson.json_util.default) resp = Response(data,status=200,mimetype='application/json') resp.headers['Link'] = 'http://localhost:5000' return resp
def hierarchy(): firm_key = ndb.Key("Firm", "frl") projects = dict() for proj in Project.query(ancestor=firm_key): print proj.key.id() projects[proj.key.id()] = proj.to_dict() projects[proj.key.id()]["images"] = [] for img in Image.query(ancestor=firm_key).order(Image.key): print img.key.id() img_d = img.to_dict(exclude=["small_blob_key", "large_blob_key"]) projects[img.key.parent().id()]["images"].append(img_d) print json.dumps(projects)
def process_hier(): firmid = 'frl' firm_key = ndb.Key('Firm', firmid) projects = dict() for proj in Project.query(ancestor=firm_key): projects[proj.key.id()] = proj.to_dict() projects[proj.key.id()]['images'] = [] for img in Image.query(ancestor=firm_key).order(Image.key): img_d = img.to_dict( exclude=['small_blob_key', 'large_blob_key'] ) projects[img.key.parent().id()]['images'].append( img_d ) fname = '/gs/frl-arch/' + firmid + '/json/proj-detailed.json' wfname = files.gs.create( fname, mime_type='application/javascript', acl='public-read') with files.open(wfname, 'a') as f: f.write( json.dumps(projects) ) files.finalize(wfname)
def put(self,project_id): api_key = request.args.get('api_key') project = Project() project.get(project_id) user = User() user.api_login(api_key) data = request.json if data.get('action') == 'join': user.add_project(project.get_id()) return jsonify({'status':True,'msg':'join project'}) elif data.get('action') == 'withdraw': user.remove_project(project.get_id()) return jsonify({'status':True,'msg':'withdrawn from project'}) if not authorized(api_key,project_id): return jsonify({'status':False,'msg':'unauthorized'}) if not project.get_id() in user.user.project: return jsonify({'status':False,'msg':'project not in user'}) project.project.description = data['description'] project.save() return jsonify({'status':True})
try: from ...dao import Jira except: from dao import Jira from persistent.list import PersistentList from ..base import BaseCommand try: from ...model import Release, Project, Story, History except: from model import Release, Project, Story, History simulations = {} if not 'SIMS' in Jira.cache.data: transaction.begin() project = Project('SIMS', 'simulations') project.process = 'Simulations' project.query = None Jira.cache.data['SIMS'] = project transaction.commit() class Command(BaseCommand): help = 'Simulate a release' usage = ''' simulate simulate -a I -d F -s I -p I -b I -c I -t I''' options_help = ''' -a : Average cycle time of simulated release -d : Standard deviation of cycle time of simulated release -e : Average and standard deviation of estimates -b : Average developer bandwidth to simulate, in cycle time -p : Number of developer pairs to simulate -v : Standard deviation of developer bandwidth to simulate, in cycle time
def get(self): (template_data, template) = get_template('templates/all_projects.html') all_projects = Project.query().order(Project.title).fetch() template_data['projects'] = all_projects self.response.out.write(template.render(template_data))
def decode(self, s): items = [] if s is None or len(s) == 0: return items data = json.JSONDecoder.decode(self, s) for obj in data: cls = obj["class"] if cls == "Project": o = Project(obj["name"]) o.default = obj["default"] elif cls == "Category": o = Category(obj["name"]) o.project_id = obj["project_id"] elif cls == "Task": o = Task(obj["name"]) o.default = obj["enabled"] o.category_id = obj["category_id"] o.rating = obj["rating"] o.completed = obj["completed"] o.duration = obj["duration"] o.id = obj["id"] items.append(o) return items
def post(self): project = Project(title=self.request.get('title'), description=self.request.get('description'), members=map(lambda x:x.strip(), self.request.get('members').split(','))) # TODO (arnaud): fix that; this is a repeat of the other method. project.tags = map(lambda x:x.strip(), self.request.get('tags').split(',')) if project.tags == ['']: project.tags = [] project.folder = self.request.get('folder') project.calendar = self.request.get('calendar') project.project_lead = self.request.get('project_lead') project.project_areas = self.request.get('project_area', allow_multiple=True) project.project_deliverables = self.request.get('project_deliverables', allow_multiple=True) project.project_resources = self.request.get('project_resources', allow_multiple=True) project_canvas = {} for item in Project.getCanvasFields(): project_canvas[item] = self.request.get(item) project.canvas = project_canvas project.put() self.redirect('/project/%s' % project.key.id())
def _parseConfig(self, config_path): def toList(item): if not item: return [] if isinstance(item, list): return item return [item] if config_path: config_path = os.path.expanduser(config_path) if not os.path.exists(config_path): raise Exception("Unable to read layout config file at %s" % config_path) config_file = open(config_path) data = yaml.load(config_file) validator = layoutvalidator.LayoutValidator() validator.validate(data) self._config_env = {} for include in data.get('includes', []): if 'python-file' in include: fn = include['python-file'] if not os.path.isabs(fn): base = os.path.dirname(config_path) fn = os.path.join(base, fn) fn = os.path.expanduser(fn) execfile(fn, self._config_env) for conf_pipeline in data.get('pipelines', []): pipeline = Pipeline(conf_pipeline['name']) pipeline.description = conf_pipeline.get('description') pipeline.failure_message = conf_pipeline.get('failure-message', "Build failed.") pipeline.success_message = conf_pipeline.get('success-message', "Build succeeded.") pipeline.dequeue_on_new_patchset = conf_pipeline.get( 'dequeue-on-new-patchset', True) manager = globals()[conf_pipeline['manager']](self, pipeline) pipeline.setManager(manager) self.pipelines[conf_pipeline['name']] = pipeline manager.success_action = conf_pipeline.get('success') manager.failure_action = conf_pipeline.get('failure') manager.start_action = conf_pipeline.get('start') for trigger in toList(conf_pipeline['trigger']): approvals = {} for approval_dict in toList(trigger.get('approval')): for k, v in approval_dict.items(): approvals[k] = v f = EventFilter(types=toList(trigger['event']), branches=toList(trigger.get('branch')), refs=toList(trigger.get('ref')), approvals=approvals, comment_filters= toList(trigger.get('comment_filter')), email_filters= toList(trigger.get('email_filter'))) manager.event_filters.append(f) for config_job in data.get('jobs', []): job = self.getJob(config_job['name']) # Be careful to only set attributes explicitly present on # this job, to avoid squashing attributes set by a meta-job. m = config_job.get('failure-message', None) if m: job.failure_message = m m = config_job.get('success-message', None) if m: job.success_message = m m = config_job.get('failure-pattern', None) if m: job.failure_pattern = m m = config_job.get('success-pattern', None) if m: job.success_pattern = m m = config_job.get('hold-following-changes', False) if m: job.hold_following_changes = True m = config_job.get('voting', None) if m is not None: job.voting = m fname = config_job.get('parameter-function', None) if fname: func = self._config_env.get(fname, None) if not func: raise Exception("Unable to find function %s" % fname) job.parameter_function = func branches = toList(config_job.get('branch')) if branches: job._branches = branches job.branches = [re.compile(x) for x in branches] def add_jobs(job_tree, config_jobs): for job in config_jobs: if isinstance(job, list): for x in job: add_jobs(job_tree, x) if isinstance(job, dict): for parent, children in job.items(): parent_tree = job_tree.addJob(self.getJob(parent)) add_jobs(parent_tree, children) if isinstance(job, str): job_tree.addJob(self.getJob(job)) for config_project in data.get('projects', []): project = Project(config_project['name']) self.projects[config_project['name']] = project mode = config_project.get('merge-mode') if mode and mode == 'cherry-pick': project.merge_mode = model.CHERRY_PICK for pipeline in self.pipelines.values(): if pipeline.name in config_project: job_tree = pipeline.addProject(project) config_jobs = config_project[pipeline.name] add_jobs(job_tree, config_jobs) # All jobs should be defined at this point, get rid of # metajobs so that getJob isn't doing anything weird. self.metajobs = {} for pipeline in self.pipelines.values(): pipeline.manager._postConfig()
def process_admin_request(self, req, cat, page, path_info): if req.method == 'POST': if 'create' in req.args.keys(): # Project creation name = req.args.get('shortname', '').strip() fullname = req.args.get('fullname', '').strip() env_path = req.args.get('env_path', '').strip() proto_name = req.args.get('prototype', '').strip() if not (name and fullname and env_path and proto_name): raise TracError('All arguments are required') # Make the models proj = Project(self.env, name) proto = Prototype(self.env, proto_name) if not proto.exists: raise TracError('Penguins on fire') # Store the project proj.env_path = env_path proj.save() # Apply the prototype proto.apply(req, proj) db = self.env.get_db_cnx() cursor = db.cursor() cursor.execute('SELECT action, args, return, stdout, stderr FROM tracforge_project_log WHERE project=%s ORDER BY id',(proj.name,)) output = [] for action, args, rv, out, err in cursor: output.append({ 'action': action, 'args': args, 'rv': rv, 'out': out.splitlines(), 'err': err.splitlines(), }) req.hdf['tracforge.output'] = output req.hdf['tracforge.href.projects'] = req.href.admin(cat, page) #req.args['hdfdump'] = 1 return 'admin_tracforge_project_new.cs', None req.redirect(req.href.admin(cat, page)) elif 'delete' in req.args.keys(): # Project deleteion raise TracError, 'Not implemented yet. Sorry.' #self.log.debug('TracForge: Starting data grab') projects = [Project(self.env, n) for n in Project.select(self.env)] #self.log.debug('TracForge: Done with data grab') #self.log.debug('TracForge: Starting data grab') project_data = {} for proj in projects: #self.log.debug('TracForge: Getting data for %s', proj.name) project_data[proj.name] = { 'fullname': proj.valid and proj.env.project_name or '', 'env_path': proj.env_path, } #self.log.debug('TracForge: Done with data grab') req.hdf['tracforge.projects'] = project_data req.hdf['tracforge.prototypes'] = Prototype.select(self.env) return 'admin_tracforge.cs', None
def get(self): ndb.delete_multi( Project.query().fetch(keys_only=True) ) ndb.delete_multi( Quest.query().fetch(keys_only=True) ) ndb.delete_multi( Task.query().fetch(keys_only=True) ) project = Project(name="project 01", progress=0, description="descricao", xp=100) project.put() quest = Quest(name="quest 01", description="description", xp=30, deadline="2013-11-21", progress=0, projectKey=project.key) quest.put() task = Task(name="task 01", done="false", xp=15, progress=50, questKey=quest.key) task.put() task = Task(name="task 02", done="false", xp=15, progress=50, questKey=quest.key) task.put() quest = Quest(name="quest 02", description="description", xp=30, deadline="2013-12-10", progress=0, projectKey=project.key) quest.put() task = Task(name="task 01", done="false", xp=15, progress=50, questKey=quest.key) task.put() task = Task(name="task 02", done="false", xp=15, progress=50, questKey=quest.key) task.put() quest = Quest(name="quest 03", description="description", xp=30, deadline="2014-01-10", progress=0, projectKey=project.key) quest.put() task = Task(name="task 01", done="false", xp=15, progress=50, questKey=quest.key) task.put() task = Task(name="task 02", done="false", xp=15, progress=50, questKey=quest.key) task.put() self.response.write("ok")
def get(self): query = Project.query().order(Project.progress) projetos = query.fetch() projetos = [p.to_dct() for p in projetos] self.response.write(json.dumps(projetos))
def get(self, projectId): project = Project.getFromId(projectId) (template_data, template) = get_template('templates/project.html') template_data['project'] = project self.response.out.write(template.render(template_data))
def _parseConfig(self, config_path): def toList(item): if not item: return [] if isinstance(item, list): return item return [item] if config_path: config_path = os.path.expanduser(config_path) if not os.path.exists(config_path): raise Exception("Unable to read layout config file at %s" % config_path) config_file = open(config_path) data = yaml.load(config_file) self._config_env = {} for include in data.get('includes', []): if 'python-file' in include: fn = include['python-file'] if not os.path.isabs(fn): base = os.path.dirname(config_path) fn = os.path.join(base, fn) fn = os.path.expanduser(fn) execfile(fn, self._config_env) for config_queue in data['queues']: manager = globals()[config_queue['manager']](self, config_queue['name']) self.queue_managers[config_queue['name']] = manager manager.success_action = config_queue.get('success') manager.failure_action = config_queue.get('failure') manager.start_action = config_queue.get('start') for trigger in toList(config_queue['trigger']): approvals = {} for approval_dict in toList(trigger.get('approval')): for k, v in approval_dict.items(): approvals[k] = v f = EventFilter(types=toList(trigger['event']), branches=toList(trigger.get('branch')), refs=toList(trigger.get('ref')), approvals=approvals, comment_filters=toList( trigger.get('comment_filter'))) manager.event_filters.append(f) for config_job in data['jobs']: job = self.getJob(config_job['name']) # Be careful to only set attributes explicitly present on # this job, to avoid squashing attributes set by a meta-job. m = config_job.get('failure-message', None) if m: job.failure_message = m m = config_job.get('success-message', None) if m: job.success_message = m m = config_job.get('hold-following-changes', False) if m: job.hold_following_changes = True fname = config_job.get('parameter-function', None) if fname: func = self._config_env.get(fname, None) if not func: raise Exception("Unable to find function %s" % fname) job.parameter_function = func branches = toList(config_job.get('branch')) if branches: job._branches = branches job.branches = [re.compile(x) for x in branches] def add_jobs(job_tree, config_jobs): for job in config_jobs: if isinstance(job, list): for x in job: add_jobs(job_tree, x) if isinstance(job, dict): for parent, children in job.items(): parent_tree = job_tree.addJob(self.getJob(parent)) add_jobs(parent_tree, children) if isinstance(job, str): job_tree.addJob(self.getJob(job)) for config_project in data['projects']: project = Project(config_project['name']) self.projects[config_project['name']] = project for qname in self.queue_managers.keys(): if qname in config_project: job_tree = project.addQueue(qname) config_jobs = config_project[qname] add_jobs(job_tree, config_jobs) # All jobs should be defined at this point, get rid of # metajobs so that getJob isn't doing anything weird. self.metajobs = {} # TODO(jeblair): check that we don't end up with jobs like # "foo - bar" because a ':' is missing in the yaml for a dependent job for manager in self.queue_managers.values(): manager._postConfig()
def main(): logging.debug("windninja.main()") #NOTE: THIS DEBUG STATEMENT WILL NEVER GET INTO THE LOG FILE BUT WILL OUTPUT TO STDOUT start = datetime.datetime.now() # argument parsing parser = argparse.ArgumentParser(description="WindNinja Server Wrapper") parser.add_argument("id", help="id of the windninja run") parser.add_argument("-l", "--log_level", choices=["debug", "info", "warn", "none"], default="none", help="Logging level") parser.add_argument("-p", "--pretty_print", action='store_true', help="Pretty print job file") #--------------------------------------------------------------------------------- #IMPORTANT: if args are bad, process will exit without much in the way of logging # so when run from queue or web be sure to validate command line is correctly # formatted... #TODO: create custome parser that logs command line errors to file #--------------------------------------------------------------------------------- args = parser.parse_args() logging.debug(str(args)) project = None status = JobStatus.failed msg = None try: id = args.id.replace("-", "") project_path = os.path.join(CONFIG.JOBS_DIRECTORY, id) log_level = getattr(logging, args.log_level.upper(), 0) if log_level: logger.enable_file(project_path, log_level) #----------------------------------------------------------------------- #IMPORTANT: FAILURES BEFORE THIS POINT WILL NOT BE LOGGED TO TEXT FILE #----------------------------------------------------------------------- logging.info("Begin - version {}".format(VERSION)) logging.debug("project path: {}".format(project_path)) project = Project(project_path) project.pretty_print = args.pretty_print project.openJob() if project is None or project.job is None or project.error is not None: logging.error("Exiting: Unable to open project file: {}".format(project.error)) project = None elif project.job["status"] != JobStatus.new.name: logging.error("Exiting: Project is not NEW: status={}".format(project.job["status"])) project = None else: project.updateJob(JobStatus.executing.name, (logging.INFO, "Initializing WindNinja Run" ), True) # evaluate 'auto' forecast if necessary logging.debug("evaluate project forecast: {}".format(project.forecast)) if project.forecast.lower() == "auto": evaluated_forecast = withinForecast(project.bbox) logging.debug("evaluated forecast for bbox: {}".format(evaluated_forecast)) if evaluated_forecast: project.forecast = evaluated_forecast #TODO: should this new value be written back to job info project.updateJob(None, (logging.INFO, "Auto Forecast Evaluated: {}".format(evaluated_forecast)), True) else: #project.updateJob(None, (logging.ERROR, MESSAGES.BBOX_OUTSIDE_FORECASTS), True) raise Exception(MESSAGES.BBOX_OUTSIDE_FORECASTS) # create the cli output folder wncli_folder = os.path.join(project_path, "wncli") os.makedirs(wncli_folder) result = createDem(project.bbox, wncli_folder) if result[0]: project.demPath = result[1] project.updateJob(None, (logging.INFO, "DEM created"), True) # execute the cli override_args = {ptr.split(":")[0]: ptr.split(":")[1] for ptr in project.parameters.split(";")} #TODO: rethink "products" output_shp = project.products.get("vector", False) output_asc = project.products.get("clustered", False) output_wx = project.products.get("weather", False) result = execute_wncli(wncli_folder, override_args, project.demPath, project.forecast, output_shp, output_asc, output_wx) #result: # 0 : status [True | False] # 1 : output_folder | error message [string] # 2 : simulations [list of datetime] # 3 : windninja_shapefiles [list of string] # 4 : windninja_ascfiles [list of string] # 5 : weather_shapefiles [list of string] if result[0]: project.updateJob(None, (logging.INFO, "WindNinjaCLI executed"), True) results_folder = result[1] # add the simulation times/zone info simulations = result[2] simulations.sort() # initialize some variables used across products wx_infos = wn_infos = None wx_max_speed = wn_max_speed = 0 project.output = { "simulations" : { "times" : ["{:%Y%m%dT%H%M}".format(d) for d in simulations], "utcOffset" : "{:%z}".format(result[2][0]) } } # generate the desired output products # weather results as geojson vectors #TODO: even though the wx data is small (a few hundred points) if it was aggregated to # a single file it might help with performance... and size could be reduced if # using a denormalized format - the geometry json is approx 1/2 the file size. # if project.products.get("weather",False): converted_weather = processShapefiles(results_folder, result[5], project.path, True, where="speed>0", zip_name="wx_geojson.zip") if converted_weather[0]: project.updateJob(None, (logging.INFO, "Weather converted to geojson"), True) wx_infos = converted_weather[2] wx_max_speed = converted_weather[3] output = project.output["weather"] = { "name": "Weather Json Vectors", "type": "vector", "format": "json", "package": os.path.basename(converted_weather[4]), "files": converted_weather[1], "data": { "maxSpeed": { "overall": wx_max_speed } } } for i in wx_infos: name = i.replace("shp", "json") output["data"]["maxSpeed"][name] = wx_infos[i]["max"] else: project.updateJob(None, (logging.ERROR, converted_weather[1]), True) # windninja resutls as geojson vectors if project.products.get("vector",False): converted_windninja = processShapefiles(results_folder, result[3], project.path, True, zip_name="wn_geojson.zip") if converted_windninja[0]: project.updateJob(None, (logging.INFO, "Output converted to geojson"), True) wn_infos = converted_windninja[2] wn_max_speed = converted_windninja[3] output = project.output["vector"] = { "name": "WindNinja Json Vectors", "type": "vector", "format": "json", "package": os.path.basename(converted_windninja[4]), "files": converted_windninja[1], "data": { "maxSpeed": { "overall": wn_max_speed } } } for i in wn_infos: name = i.replace("shp", "json") output["data"]["maxSpeed"][name] = wn_infos[i]["max"] else: project.updateJob(None, (logging.ERROR, converted_windninja[1]), True) # topofire tiles #TODO: this one could be kicked off in a parrallel process as it doesn't rely on the WN output #TODO: Tile fetch is surprisingly quick but could create a local cache that is auto built as requests come in. if project.products.get("topofire", False): from tilegrabber import grab_tiles topofire_zip_file = grab_tiles(project.bbox, project.path, "topofire") if topofire_zip_file: project.updateJob(None, (logging.INFO, "TopoFire tiles compiled"), True) project.output["topofire"] = { "name": "TopoFire Basemap", "type": "basemap", "format": "tiles", "package": os.path.basename(topofire_zip_file), "files": [] } else: project.updateJob(None, (logging.WARNING, "TopoFire tiles unavailable"), True) # windninja results as tile packages if project.products.get("raster", False): from rastertilemaker import make_tiles_for_output wn_shpfiles = result[3] # calculate values if needed if output_wx and not wx_infos: wx_shpfiles = results[5] converted = processShapefiles(results_folder, wx_shpfiles, project.path, False) wx_max_speed = covnerted[3] if not wn_infos: converted = processShapefiles(results_folder,wn_shpfiles, project.path, False) wn_infos = converted[2] wn_max_speed = converted[3] max_speed = wn_max_speed if (wn_max_speed > wx_max_speed) else wx_max_speed #NOTE: weather points NOT drawn in tiles, but max speed maybe from weather .... #TODO: should this return an error/status? tile_zip = make_tiles_for_output(project.path, (results_folder, wn_shpfiles), (wn_infos, max_speed), project.forecast) project.updateJob(None, (logging.INFO, "Output converted to raster tiles"), True) output = project.output["raster"] = { "name": "WindNinja Raster Tiles", "type": "raster", "format": "tiles", "package": tile_zip, "files": [k.replace(".shp", "") for k in wn_infos.keys()], "data": { "maxSpeed": { "overall": max_speed } } } for i in wn_infos: name = i.replace(".shp", "") output["data"]["maxSpeed"][name] = wn_infos[i]["max"] # windninja results as custom clustered format if project.products.get("clustered", False): from convolve import createClusters # run calculation if not already done if not wn_infos: wn_infos = {} for f in [a for a in result[4] if a.find("vel") > 0 ]: wn_infos[f] = getRasterInfo(os.path.join(results_folder, f)) wn_max_speed = sorted(wn_infos.values(), key=lambda x: x["max"], reverse=True)[0]["max"] #NOTE: assumes weather max will be covered if created max_speed = wn_max_speed if (wn_max_speed > wx_max_speed) else wx_max_speed #TODO: should this return a status/error native_wkid = int(wn_infos.values()[0]["native_wkid"]) file_format = "json" clustered_file, breakdown = createClusters(results_folder, project.path, "wn_clustered", native_wkid, separate=False, given_max_vel=max_speed, format=file_format) project.updateJob(None, (logging.INFO, "Output converted to cluster"), True) #TODO: zip file zip_name="wn_clustered.zip" zip_files(os.path.join(project.path, zip_name), [os.path.join(project.path, f) for f in clustered_file]) output = project.output["clustered"] = { "name": "WindNinja Cluster Vectors", "type": "cluster", "format": file_format, "baseUrl": "", "package": zip_name, "files": clustered_file, "data": { "maxSpeed": { "overall": wn_max_speed }, "speedBreaks": breakdown } } for i in wn_infos: name = i.replace("_vel.asc", "").replace(".shp", "") output["data"]["maxSpeed"][name] = wn_infos[i]["max"] # processing complete! status = JobStatus.succeeded else: project.updateJob(None, (logging.ERROR, result[1]), True) else: project.updateJob(None, (logging.ERROR, result[1]), True) except Exception as e: try: msg = str(e).replace("\n", " ") if project is not None: project.updateJob(None, (logging.ERROR, msg), True) else: logging.error(msg) except: pass finish = datetime.datetime.now() delta = finish - start if project is not None: try: msg = "Complete - total processing: {}".format(delta) project.updateJob(status.name, (logging.INFO, msg), True) except Exception as ex: logging.error("job update failed n failed:\t{}".format(str(ex))) try: project.sendEmail() except Exception as ex: logging.error("send notification failed:\t{}".format(str(ex))) #TODO: should this be a command line flag to skip or try try: dequeue(args.id) logging.info("Job dequeue") except Exception as ex: logging.error("job dequeue failed:\t{}".format(str(ex)))