def data_to_projects(connection, data): result = [] for row in data: p = Project(row['id']) p.name = row['name'] p.target_date = dovetail.util.condition_date(row['target_date']) p.est_start_date = dovetail.util.condition_date(row['est_start_date']) p.est_end_date = dovetail.util.condition_date(row['est_end_date']) p.value = row['value'] p.work = work_db.select_work_for_project(connection, row['id']) p.key_work = work_db.select_key_work_for_project(connection, row['id']) result.append(p) return result
def select_project(connection, project_id): result = Project(project_id) data = connection.execute( database.projects.select( database.projects.c.id == project_id)).first() result.name = data['name'] # NOTE: We don't need to condition them because we're not doing an explicit select # SqlAlchemy takes care of the date manipulation for us result.target_date = data['target_date'] result.est_end_date = data['est_end_date'] result.participants = people_db.select_project_participants( connection, project_id) result.work = work_db.select_work_for_project(connection, project_id) return result
def edit_project(project_id): name = request.values['name'] target_date = dovetail.util.parse_date(request.values['target_date']) worklines = request.values['worklines'].split('\n') original_work_ids = set(json.loads(request.values['original_work_ids'])) work = [] for workline in worklines: try: work_data = projects_util.parse_workline(g.connection, workline) fields = work_data['fields'] fields.update(project_id=project_id) # Save any changes to the work items # TOOD: Separate topo sort work so we only have to write to database once work_db.update_work(g.connection, work_data) fields.update(id=work_data['id']) work.append(work_db.fields_to_work_object(fields)) except: # TODO: log something pass # Mark missing work as done returned_work_ids = set([w.work_id for w in work]) done_work_ids = original_work_ids - returned_work_ids work_db.mark_work_done(g.connection, done_work_ids) project = Project(project_id) project.name = name project.target_date = target_date project.work = work project.topo_sort_work() work_db.update_work_topo_order(g.connection, project.work) #dovetail.scheduler.reschedule_world(g.connection) # Update project info projects_db.update_project(g.connection, project) response_data = {} result = Response(json.dumps(response_data), status=200, mimetype='application/json') return result
def edit_project(project_id): name = request.values['name'] target_date = dovetail.util.parse_date(request.values['target_date']) worklines = request.values['worklines'].split('\n') original_work_ids = set(json.loads(request.values['original_work_ids'])) work = [] for workline in worklines: try: work_data = projects_util.parse_workline(g.connection, workline) fields = work_data['fields'] fields.update(project_id = project_id) # Save any changes to the work items # TOOD: Separate topo sort work so we only have to write to database once work_db.update_work(g.connection, work_data) fields.update(id = work_data['id']) work.append(work_db.fields_to_work_object(fields)) except: # TODO: log something pass # Mark missing work as done returned_work_ids = set([w.work_id for w in work]) done_work_ids = original_work_ids - returned_work_ids work_db.mark_work_done(g.connection, done_work_ids) project = Project(project_id) project.name = name project.target_date = target_date project.work = work project.topo_sort_work() work_db.update_work_topo_order(g.connection, project.work) #dovetail.scheduler.reschedule_world(g.connection) # Update project info projects_db.update_project(g.connection, project) response_data = {} result = Response(json.dumps(response_data), status=200, mimetype='application/json') return result