def ajax_productivity_per_task_and_date(request, username): """ # url = employee/ajax_productivity_per_task/<username> # Devuelve un objeto con las fechas y las productividades de la tarea real y esperada #{'name': ..., 'total_productivity': X,'expected_productivity':Y} (X en unidades goal_description/hora) # Parámetro obligatorio: # task_id: el id de la tarea en cuestión # Parámetros opcionales: # start_date - fecha en formato YYYY-MM-DD que indica el inicio de la medición. Por defecto, 30 días antes de la fecha actual. # end_date - fecha en formato YYYY-MM-DD que indica el final de la medición. Por defecto, fecha actual. # offset - desplazamiento (huso) horario en formato +/-HH:MM - Por defecto +00:00 # Si se proporcionan pero no tienen el formato correcto se lanzará un error HTTP 400 Bad Request #Ejemplo #/employee/ajax_productivity_per_task_and_date/JoseGavilan?task_id=3&start_date=2017-02-05&end_date=2017-02-16 #devuelve lo siguiente #{"dates": # ["2017-02-05", "2017-02-06", "2017-02-07", "2017-02-08", "2017-02-09", "2017-02-10", "2017-02-11", "2017-02-12", "2017-02-13", "2017-02-14", "2017-02-15", "2017-02-16"], #"task": {"name": "Hacer cosas de front", # "real_productivity": [0, 0, 0, 0, 0, 0, 0, 1.2, 0, 0.225, 0, 0], # "task_id": 3, # "expected_productivity": [9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 4.0, 4.0, 2.0, 2.0, 2.0]}} """ # Get and parse the dates start_date = request.GET.get("start_date", str(date.today() - timedelta(days=30))) end_date = request.GET.get("end_date", str(date.today())) date_regex = re.compile("^\d{4}-\d{2}-\d{2}$") if date_regex.match(start_date) is None or date_regex.match( end_date) is None: raise SuspiciousOperation("Start/end date are not valid") offset = request.GET.get("offset", "+00:00") offset_regex = re.compile("^(\+|-)\d{2}:\d{2}$") if offset_regex.match(offset) is None: raise SuspiciousOperation("Time offset is not valid") # Append time offsets start_date += " 00:00" + offset end_date += " 00:00" + offset # Check that the user is logged in and it's an administrator or with permissions logged = get_authorized_or_403(request) # Check that it's at least PM if get_highest_role_tier(logged) < 40: raise PermissionDenied employee = get_object_or_404(Employee, user__username=username, user__is_active=True) # Check that the admin has permission to view that employee same_company_or_403(logged, employee) task_id = request.GET.get("task_id") # Find task with id requested task = Task.objects.filter( pk=task_id, active=True, projectDepartment_id__projectdepartmentemployeerole__employee_id= employee, production_goal__isnull=False).distinct().first() if task is None: raise SuspiciousOperation("The task could not be found") # Get all dates between start and end dates = [] str_dates = [] d1 = datetime.strptime(start_date[0:19] + start_date[20:22], '%Y-%m-%d %H:%M%z') d2 = datetime.strptime(end_date[0:19] + end_date[20:22], '%Y-%m-%d %H:%M%z') delta = d2 - d1 # timedelta for i in range(delta.days + 1): str_dates.append((d1 + timedelta(days=i)).date().strftime("%Y-%m-%d")) dates.append(d1 + timedelta(days=i)) data = { 'dates': str_dates, 'task': { 'task_id': task.id, 'name': task.name, 'real_productivity': [], 'expected_productivity': [] } } # Save productivity for each date # for each date, we will find the asociated timelog for log_date in dates: log = TimeLog.objects.filter(task_id=task.id, workDate__year=log_date.year, workDate__month=log_date.month, workDate__day=log_date.day, employee_id=employee).first() if log is None: # He did not work that day total_productivity = 0 total_duration = 0 else: total_produced_units = log.produced_units total_duration = log.duration if total_duration == 0: total_productivity = 0 else: # Duration is in minutes, so we multiply by 60 (duration is in the denominator) total_productivity = 60 * total_produced_units / total_duration # Find the registry date of production goal evolution which is closest to the date expected_productivity = GoalEvolution.objects.filter( task_id_id=task.id, registryDate__gte=log_date).first() # If we do not find the goal or if the date is after the last task update, it may be the current task goal if total_duration == 0: expected_productivity = 0 else: if expected_productivity is None or task.registryDate <= log_date: expected_productivity = task.production_goal else: expected_productivity = expected_productivity.production_goal data["task"]["real_productivity"].append( default_round(total_productivity)) data["task"]["expected_productivity"].append( default_round(expected_productivity)) return JsonResponse(data)
def ajax_profit_per_date_in_project(request, employee_id, project_id): """ # url = department/ajax_profit_per_date/<department_id> # Devuelve un objeto con las fechas y las rentabilidades diarias y acumuladas # # Parámetro obligatorio: ninguno # Parámetros opcionales: # start_date - fecha en formato YYYY-MM-DD que indica el inicio de la medición. Por defecto, 30 días antes de la fecha actual. # end_date - fecha en formato YYYY-MM-DD que indica el final de la medición. Por defecto, fecha actual. # offset - desplazamiento (huso) horario en formato +/-HH:MM - Por defecto +00:00 # Si se proporcionan pero no tienen el formato correcto se lanzará un error HTTP 400 Bad Request #Ejemplo #/department/ajaxAcumProfit/1/ #devuelve lo siguiente #{"acumExpenses": [0, 1457.18015695298, 3071.32603956358, 4438.9463044226895, 6465.819587171869, 7912.658013249849, 9791.46399488711, 11615.32872003681, 13494.726436052111, 15102.72092592163, 16718.442225021892, 18327.93613617256, 20841.87940297534, 22953.949544558982, 24314.625169466122, 25683.231076691303, 27287.16055422502, 28760.84364198999, 31104.25163724206, 32808.89759982555, 34747.27999087272, 36150.9847742294, 37523.6098087571, 38600.05927001698, 40953.76583717958, 42469.88703139726, 44081.49130458021, 45420.3135021882, 47945.57927018715, 49368.262834629466, 51133.932803674485], "acumIncome": [0, 155861.848663544, 262457.90948135697, 396454.85575838294, 572637.4741922909, 703418.0032829699, 889130.2419483919, 1057821.248373874, 1259349.275922576, 1393310.956579081, 1539441.608896949, 1700420.3827038072, 1955067.034572835, 2187486.6539142523, 2300530.309442004, 2429378.038836404, 2615789.2939997134, 2742614.2371285204, 3004214.3219032744, 3205025.4834073624, 3363963.7766520614, 3552325.908039063, 3718850.184141958, 3833661.86021891, 4044009.6991582112, 4159278.365569177, 4285423.634163346, 4417334.086840815, 4692230.750316469, 4819759.243153938, 4997733.5628708275], "dates": ["2017-03-21", "2017-03-22", "2017-03-23", "2017-03-24", "2017-03-25", "2017-03-26", "2017-03-27", "2017-03-28", "2017-03-29", "2017-03-30", "2017-03-31", "2017-04-01", "2017-04-02", "2017-04-03", "2017-04-04", "2017-04-05", "2017-04-06", "2017-04-07", "2017-04-08", "2017-04-09", "2017-04-10", "2017-04-11", "2017-04-12", "2017-04-13", "2017-04-14", "2017-04-15", "2017-04-16", "2017-04-17", "2017-04-18", "2017-04-19", "2017-04-20"], "income": [0, 155861.848663544, 106596.060817813, 133996.946277026, 176182.618433908, 130780.529090679, 185712.238665422, 168691.006425482, 201528.027548702, 133961.680656505, 146130.652317868, 160978.773806858, 254646.651869028, 232419.619341417, 113043.655527752, 128847.7293944, 186411.255163309, 126824.943128807, 261600.084774754, 200811.161504088, 158938.293244699, 188362.131387002, 166524.276102895, 114811.676076952, 210347.838939301, 115268.666410966, 126145.268594169, 131910.452677469, 274896.663475654, 127528.492837469, 177974.319716889], "expenses": [0, 1457.18015695298, 1614.1458826106, 1367.62026485911, 2026.87328274918, 1446.83842607798, 1878.80598163726, 1823.8647251497, 1879.3977160153, 1607.99448986952, 1615.72129910026, 1609.49391115067, 2513.94326680278, 2112.07014158364, 1360.67562490714, 1368.60590722518, 1603.92947753372, 1473.68308776497, 2343.40799525207, 1704.64596258349, 1938.38239104717, 1403.70478335668, 1372.6250345277, 1076.44946125988, 2353.7065671626, 1516.12119421768, 1611.60427318295, 1338.82219760799, 2525.26576799895, 1422.68356444232, 1765.66996904502]} "expected_productivity": [9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 9.0, 4.0, 4.0, 2.0, 2.0, 2.0]}} """ # Check that it's at least PM if get_highest_role_tier(request.user.actor) < 40: raise PermissionDenied # Get and parse the dates start_date = request.GET.get("start_date", str(date.today() - timedelta(days=30))) end_date = request.GET.get("end_date", str(date.today())) date_regex = re.compile("^\d{4}-\d{2}-\d{2}$") if date_regex.match(start_date) is None or date_regex.match( end_date) is None: raise SuspiciousOperation("Start/end date are not valid") offset = request.GET.get("offset", "+00:00") offset_regex = re.compile("^(\+|-)\d{2}:\d{2}$") if offset_regex.match(offset) is None: raise SuspiciousOperation("Time offset is not valid") # Append time offsets start_date += " 00:00" + offset end_date += " 00:00" + offset check_metrics_authorized_for_employee_in_project(request.user, employee_id, project_id) # Get all dates between start and end dates = [] str_dates = [] d1 = datetime.strptime(start_date[0:19] + start_date[20:22], '%Y-%m-%d %H:%M%z') d2 = datetime.strptime(end_date[0:19] + end_date[20:22], '%Y-%m-%d %H:%M%z') delta = d2 - d1 # timedelta for i in range(delta.days + 1): str_dates.append((d1 + timedelta(days=i)).date().strftime("%Y-%m-%d")) dates.append(d1 + timedelta(days=i)) data = { 'dates': str_dates, 'expenses': [], 'income': [], 'acumExpenses': [], 'acumIncome': [] } # Profit # for each date, we will find all logs, calculate the sum and acumulate it index = 0 for log_date in dates: logs = TimeLog.objects.filter( employee_id=employee_id, employee_id__projectdepartmentemployeerole__projectDepartment_id__project_id =project_id, workDate__year=log_date.year, workDate__month=log_date.month, workDate__day=log_date.day).distinct() expenses = logs.aggregate( total_expenses=Sum(F("duration") / 60.0 * F("employee_id__price_per_hour"), output_field=FloatField()))["total_expenses"] expenses = expenses if expenses is not None else 0 income = logs.aggregate(total_income=Sum( F("task_id__price_per_unit") * F("produced_units")))["total_income"] income = income if income is not None else 0 data['expenses'].append(default_round(expenses)) data['income'].append(default_round(income)) if index == 0: data['acumExpenses'].append(default_round(expenses)) data['acumIncome'].append(default_round(income)) else: data['acumExpenses'].append(data['acumExpenses'][index - 1] + expenses) data['acumIncome'].append(data['acumIncome'][index - 1] + income) index += 1 return JsonResponse(data)
def ajax_productivity_per_task(request): """ # Devuelve un objeto {'names': [dpto1, dpto2...], 'values': [tiempo1, tiempo2...]} # Parámetros obligatorios: # task_id - ID del task # Parámetros opcionales: # start_date - fecha en formato YYYY-MM-DD que indica el inicio de la medición. Por defecto, 30 días antes de la fecha actual. # end_date - fecha en formato YYYY-MM-DD que indica el final de la medición. Por defecto, fecha actual. # offset - desplazamiento (huso) horario en formato +/-HH:MM - Por defecto +00:00 # Si se proporcionan pero no tienen el formato correcto se lanzará un error HTTP 400 Bad Request """ # ------------------------- Cortesía de Agu ------------------------------ actor=get_actor_or_403(request) if "task_id" not in request.GET: raise SuspiciousOperation task_id = request.GET["task_id"] task=get_object_or_404(Task, pk=task_id) actor=check_task(request,task,for_view=True) same_company_or_403(actor,task.actor_id) # Get and parse the dates and the offset start_date = request.GET.get("start_date", str(date.today() - timedelta(days=30))) end_date = request.GET.get("end_date", str(date.today())) date_regex = re.compile("^\d{4}-\d{2}-\d{2}$") if date_regex.match(start_date) is None or date_regex.match(end_date) is None: raise SuspiciousOperation("Start/end date are not valid") offset = request.GET.get("offset", "+00:00") offset_regex = re.compile("^(\+|-)\d{2}:\d{2}$") if offset_regex.match(offset) is None: raise SuspiciousOperation("Time offset is not valid") # Append time offsets start_date += " 00:00" + offset end_date += " 00:00" + offset # -------------------------------------------------------------------------- dates = [] str_dates = [] d1 = datetime.strptime(start_date[0:19] + start_date[20:22], '%Y-%m-%d %H:%M%z') d2 = datetime.strptime(end_date[0:19] + end_date[20:22], '%Y-%m-%d %H:%M%z') delta = d2 - d1 # timedelta for i in range(delta.days + 1): str_dates.append((d1 + timedelta(days=i)).date().strftime("%Y-%m-%d")) dates.append(d1 + timedelta(days=i)) data = {"days": str_dates, "production": [], "goal_evolution": []} index = 0 # Save productivity for each date # for each date, we will find the asociated timelog for log_date in dates: log = TimeLog.objects.filter(task_id=task_id, workDate__year=log_date.year, workDate__month=log_date.month, workDate__day=log_date.day).aggregate( total_duration=Sum( F("duration")/60.0, output_field=FloatField()), total_produced_units=Sum( F("produced_units"), output_field=FloatField())) if log is None: # Not work that day total_productivity = 0 total_duration = 0 else: total_produced_units = log["total_produced_units"] total_duration = log["total_duration"] if total_duration == 0 or total_duration is None: total_productivity = 0 else: # If not produced but spent time, 0 productivity (you lazy guy...) if total_produced_units is None: total_productivity = 0 else: total_productivity = total_produced_units/total_duration # Find the registry date of production goal evolution which is closest to the date expected_productivity = GoalEvolution.objects.filter(task_id_id=task_id, registryDate__gte=log_date).first() # If we do not find the goal or if the date is after the last task update, it may be the current task goal if total_duration==0 or total_duration is None: expected_productivity=0 else: if expected_productivity is None or task.registryDate <= log_date: expected_productivity = task.production_goal else: expected_productivity = expected_productivity.production_goal data["production"].append(default_round(total_productivity)) data["goal_evolution"].append(default_round(expected_productivity)) return JsonResponse(data)