def create_reports_for_contract(contract): """ Function used to create all Reports from carryover_target_date to date.today(). :param contract: :return: """ _month_year = contract.carryover_target_date today = datetime.date.today() Report.objects.create( month_year=_month_year, worktime=datetime.timedelta( minutes=contract.initial_carryover_minutes), contract=contract, user=contract.user, created_by=contract.user, modified_by=contract.user, ) _month_year += relativedelta(months=1) # Create Reports for all months between carryover_target_date and now while _month_year <= today: Report.objects.create( month_year=_month_year, worktime=datetime.timedelta(0), contract=contract, user=contract.user, created_by=contract.user, modified_by=contract.user, ) _month_year += relativedelta(months=1)
def update_reports(contract, month_year): """ Update the Reports for the given contract starting with the given month/year. :param contract: :param month_year: :return: """ previous_report = Report.objects.filter(contract=contract, month_year=month_year - relativedelta(months=1)) carry_over_worktime = datetime.timedelta( minutes=contract.initial_carryover_minutes) if previous_report.exists(): carry_over_worktime = (previous_report.first().worktime - previous_report.first().debit_worktime) # Loop over all Reports starting from month in which the created/update shift # took place. for report in Report.objects.filter(contract=contract, month_year__gte=month_year): total_work_time = Shift.objects.filter( contract=report.contract, started__month=report.month_year.month, started__year=report.month_year.year, was_reviewed=True, ).aggregate(total_work_time=Coalesce( Sum(F("stopped") - F("started"), output_field=DurationField()), datetime.timedelta(0), ))["total_work_time"] report.worktime = carry_over_worktime + total_work_time report.save() carry_over_worktime = report.worktime - report.debit_worktime
def create_n_report_objects(): """ This fixture resembles a report object factory. Shifts are distinguised by id, there is no specific need for the start_stop mechanism. Nonetheless in terms of consistency this mechanism is kept as in the user_conftest.py. :return: Function """ month_year = datetime.date(2019, 1, 1) hours = datetime.timedelta(0) created_at = datetime.datetime(2019, 1, 1, 16).isoformat() modified_at = created_at def create_reports(start_stop, user, contract, month_year=month_year): lst = [] for i in range(*start_stop): report = Report.objects.create( month_year=month_year, hours=hours, contract=contract, user=user, created_by=user, modified_by=user, created_at=created_at, modified_at=modified_at, ) lst.append(report) return lst return create_reports
def create_reports_monthly(self): """ This is a Periodical Task which creates a Report object for every active users currently running contracts on the first of the month. An active Contract is the current month is between it's start- and end_date. :param self: :return: """ date_now = datetime.datetime.now().date() for user in User.objects.filter(is_active=True, is_staff=False): for contract in user.contracts.filter( carryover_target_date__lt=date_now, end_date__gte=date_now): last_report = Report.objects.get(contract=contract, month_year=date_now - relativedelta(months=1)) carry_over_worktime = last_report.worktime - datetime.timedelta( minutes=contract.minutes) Report.objects.create( month_year=date_now, worktime=carry_over_worktime, contract=contract, user=user, created_by=user, modified_by=user, )
def add15(last_compute_time, interval=15): ''' last_compute_time is datetime.datetime every default = 15 minutes starting from rise till set next is datetime.datetime ''' diff_sec = interval * 60. next = last_compute_time + datetime.timedelta(0,15*60.) return next
def get_carry_over_last_month(self, obj): try: last_mon_report_object = Report.objects.get( contract=obj.contract, month_year=obj.month_year - relativedelta(months=1), ) except Report.DoesNotExist: return datetime.timedelta( minutes=obj.contract.initial_carryover_minutes) return self.calculate_carryover(last_mon_report_object)
def test_authorise_request(cognito_settings): """Test authorise_request """ region = cognito_settings["cognito.region"] userpool_id = cognito_settings["cognito.userpool.id"] userpool_iss = auth.cognito_userpool_iss(region, userpool_id) userpool_keys = [{"kid": "test_kid"}] mock_request = Mock() mock_request.headers.get = Mock(return_value=ACCESS_TOKEN_EXPIRED) ########################################################################### # Test case: user id not found in the token with raises(Exception), patch( "jose.jwt.get_unverified_claims") as mock_jwt: mock_jwt.return_value = {"token_use": "none"} auth.authorise_request(mock_request, region, userpool_id, userpool_keys) ########################################################################### # Test case: invalid token with raises(Exception): auth.authorise_request(mock_request, region, userpool_id, userpool_keys) ########################################################################### # Test case: valid token with patch("jose.jwt.get_unverified_header") as mock_header,\ patch("jose.jwt.get_unverified_claims") as mock_claims,\ patch("jose.jwt.decode") as mock_decode,\ patch("jose.jws.verify") as mock_verify: userpool_keys = [{"kid": "test_kid"}] dt = datetime.datetime.utcnow() + datetime.timedelta(days=1) future_timestamp = mktime(dt.timetuple()) mock_header.return_value = {"kid": "test_kid", "alg": ""} mock_claims.return_value = { "iss": userpool_iss, "token_use": "access", "exp": future_timestamp, "username": SAMPLE_USERNAME } mock_decode.return_value = True mock_verify.return_value = True assert auth.authorise_request(mock_request, region, userpool_id, userpool_keys) == SAMPLE_USERNAME
def list_images(self, **kwargs): """ These images have an update() function to update attributes like public/private, min_disk, min_ram NOTE: glance.images.list() returns a generator, we return lists """ now_time = datetime.datetime.now() if getattr(self, 'cache_time', None) \ and (now_time - self.cache_time > datetime.timedelta(minutes=self.CACHE_TIMEOUT)): self.clear_cache() if not getattr(self, 'all_images', []): self.all_images = [img for img in self.glance.images.list(**kwargs)] self.cache_time = datetime.datetime.now() logger.info("Caching a copy of image-list") return self.all_images logger.info("Returning a cached copy of image-list") return [img for img in self.all_images]
def list_images(self, **kwargs): """ These images have an update() function to update attributes like public/private, min_disk, min_ram NOTE: glance.images.list() returns a generator, we return lists """ now_time = datetime.datetime.now() if getattr(self, 'cache_time', None) \ and (now_time - self.cache_time > datetime.timedelta(minutes=self.CACHE_TIMEOUT)): self.clear_cache() if not getattr(self, 'all_images', []): self.all_images = [ img for img in self.glance.images.list(**kwargs) ] self.cache_time = datetime.datetime.now() logger.info("Caching a copy of image-list") return self.all_images logger.info("Returning a cached copy of image-list") return [img for img in self.all_images]
def parse_pubsub(message): from google.cloud import datastore, storage from requests import get, post from pytz import timezone, datetime import json, time logging.getLogger().info("Starting for : " + message) print(message) client = datastore.Client() storage_client = storage.Client() entity = client.get(client.key("webapi", message.strip())) #Don't process the entity if we are already processing it if entity["Running"]: return #mark the entity processing to true entity["Running"] = True #update entity client.put(entity) try: while True: #fetch the entity using its key, which is table name entity = client.get(client.key("webapi", message.strip())) url = entity["URL"] date = datetime.datetime.strptime(entity["FromDate"], "%d/%m/%Y") todate = date + datetime.timedelta(days=1) bucket = entity["Bucket"] output_folder = entity["OutputFolder"] table_name = entity["TableName"] #Set output folder for data file output_file = output_folder + "/" + table_name + "/" + date.strftime( "%Y/%m/%d") + ".csv" #Set output folder for schema file schema_file = "schemas/" + output_folder + "/" + table_name + ".csv" data = { "FromDate": date.strftime("%m/%d/%Y %H:%M:%S"), "ToDate": todate.strftime("%m/%d/%Y %H:%M:%S"), "CompressionType": "A" } logging.getLogger().info("POST Payload : " + json.dumps(data)) #Call the web api res = post(url, json=data) #If api call fails if res.status_code != 200: entity["Running"] = False client.put(entity) break #If api call is successful lines = [] columns = "" # join values by comma to create csv for line in res.json(): value = ",".join(map(str, line.values())) lines.append(value) columns = ",".join(map(str, line.keys())) bucket = storage_client.get_bucket(bucket) # create gcs object blob = bucket.blob(output_file) #Upload data file to gcs bucket blob.upload_from_string("\n".join(lines)) #Upload schema file to gcs bucket if columns: blob = bucket.blob(schema_file) blob.upload_from_string(columns) #update FromDate column for the entity if datetime.datetime.now( timezone("Asia/Kolkata")).date() >= todate.date(): entity["FromDate"] = unicode(todate.strftime("%d/%m/%Y")) client.put(entity) else: entity["Running"] = False client.put(entity) break except: entity["Running"] = False client.put(entity)
def aggregate_shift_content(self, shifts): """ Method to aggregate a content with all dates at which a shift was worked. By creating this dictionary we merge all Shifts on a date to One Object with the following rule: Take the started value of the first Shift of the date as actual started value. Use the stopped value of the last Shift of the date as actual stopped value. Calculate the total work time as Sum of stopped - started values of each Shift at a date. Calculate the break time as the actual stopped - actual started - worktime. E.g.: Assume we have at a given date (1.1.1999) 3 Shifts. 1. 10:00-11:30 2. 13:00-15:30 3. 16:00-18:30 From this follow the values: actual started : 10:00 actual stopped : 18:30 work time : 6 hours 30 minutes break time : 2 hours :param report_object: :return: """ content = {} # We have to use DateTime objects since Date objects ignore timezones. # This causes problems with DateTimes which change the day on Localtime -> UTC conversion # Only works for servertime dates = [ _datetime.date() for _datetime in shifts.datetimes("started", "day") ] for date in dates: shifts_of_date = shifts.filter(started__date=date) worked_shifts = shifts_of_date.filter(type="st") vacation_or_sick_shifts = shifts_of_date.exclude(type="st") # calculate time worked worked_time = worked_shifts.aggregate(work_time=Coalesce( Sum(F("stopped") - F("started"), output_field=DurationField()), datetime.timedelta(0), ))["work_time"] # calculate time not present sick_or_vacation_time = vacation_or_sick_shifts.aggregate( sick_or_vac_time=Coalesce( Sum(F("stopped") - F("started"), output_field=DurationField()), datetime.timedelta(0), ))["sick_or_vac_time"] vacation_or_sick_type = "" if vacation_or_sick_shifts.exists(): vacation_or_sick_type = ( vacation_or_sick_shifts.first().get_type_display()) started = shifts_of_date.first().started.astimezone( timezone(settings.TIME_ZONE)) stopped = shifts_of_date.last().stopped.astimezone( timezone(settings.TIME_ZONE)) content[date.strftime("%d.%m.%Y")] = { "started": started.time().strftime("%H:%M"), "stopped": stopped.time().strftime("%H:%M"), "type": vacation_or_sick_type, "work_time": timedelta_to_string(stopped - started), "net_work_time": timedelta_to_string(worked_time), "break_time": timedelta_to_string(stopped - started - worked_time - sick_or_vacation_time), "sick_or_vac_time": timedelta_to_string(sick_or_vacation_time), } return content
def test_validate_jwt_with_mock(cognito_settings): """Test validate_jwt """ region = cognito_settings["cognito.region"] userpool_id = cognito_settings["cognito.userpool.id"] userpool_iss = auth.cognito_userpool_iss(region, userpool_id) ########################################################################### # Test case: passed all with patch("jose.jwt.get_unverified_header") as mock_header,\ patch("jose.jwt.get_unverified_claims") as mock_claims,\ patch("jose.jwt.decode") as mock_decode,\ patch("jose.jws.verify") as mock_verify: userpool_keys = [{"kid": "test_kid"}] dt = datetime.datetime.utcnow() + datetime.timedelta(days=1) future_timestamp = mktime(dt.timetuple()) mock_header.return_value = {"kid": "test_kid", "alg": ""} mock_claims.return_value = { "iss": userpool_iss, "token_use": "access", "exp": future_timestamp } mock_decode.return_value = True mock_verify.return_value = True passed, msg = auth.validate_jwt(ACCESS_TOKEN_EXPIRED, userpool_iss, userpool_keys) assert passed is True and msg is None ########################################################################### # Test case: failed: Obtained keys are wrong with patch("jose.jwt.get_unverified_header") as mock_header: mock_header.return_value = {"kid": "", "alg": ""} passed, msg = auth.validate_jwt(ACCESS_TOKEN_EXPIRED, userpool_iss, userpool_keys) assert passed is False and msg == "Obtained keys are wrong" ########################################################################### # Test case: failed: Failed to decode token with patch("jose.jwt.get_unverified_header") as mock_header: userpool_keys = [{"kid": "test_kid"}] mock_header.return_value = {"kid": "test_kid", "alg": ""} passed, msg = auth.validate_jwt(ACCESS_TOKEN_EXPIRED, userpool_iss, userpool_keys) assert passed is False and msg.startswith("Failed to decode token") ########################################################################### # Test case: failed: Invalid issuer in token with patch("jose.jwt.get_unverified_header") as mock_header, \ patch("jose.jwt.get_unverified_claims") as mock_claims, \ patch("jose.jwt.decode") as mock_decode: userpool_keys = [{"kid": "test_kid"}] mock_header.return_value = {"kid": "test_kid", "alg": ""} mock_claims.return_value = { "iss": "", "token_use": "access", "exp": future_timestamp } mock_decode.return_value = True passed, msg = auth.validate_jwt(ACCESS_TOKEN_EXPIRED, userpool_iss, userpool_keys) assert passed is False and msg == "Invalid issuer in token" ########################################################################### # Test case: failed: Token not of valid use with patch("jose.jwt.get_unverified_header") as mock_header,\ patch("jose.jwt.get_unverified_claims") as mock_claims,\ patch("jose.jwt.decode") as mock_decode: userpool_keys = [{"kid": "test_kid"}] mock_header.return_value = {"kid": "test_kid", "alg": ""} mock_claims.return_value = { "iss": userpool_iss, "token_use": "", "exp": future_timestamp } mock_decode.return_value = True passed, msg = auth.validate_jwt(ACCESS_TOKEN_EXPIRED, userpool_iss, userpool_keys) assert passed is False and msg == "Token not of valid use" ########################################################################### # Test case: failed: Failed to verify signature with patch("jose.jwt.get_unverified_header") as mock_header,\ patch("jose.jwt.get_unverified_claims") as mock_claims,\ patch("jose.jwt.decode") as mock_decode: userpool_keys = [{"kid": "test_kid"}] mock_header.return_value = {"kid": "test_kid", "alg": ""} mock_claims.return_value = { "iss": userpool_iss, "token_use": "access", "exp": future_timestamp } mock_decode.return_value = True passed, msg = auth.validate_jwt(ACCESS_TOKEN_EXPIRED, userpool_iss, userpool_keys) assert passed is False and msg.startswith("Failed to verify signature") ########################################################################### # Test case: failed: Token has expired with patch("jose.jwt.get_unverified_header") as mock_header,\ patch("jose.jwt.get_unverified_claims") as mock_claims,\ patch("jose.jwt.decode") as mock_decode,\ patch("jose.jws.verify") as mock_verify: userpool_keys = [{"kid": "test_kid"}] dt = datetime.datetime.utcnow() + datetime.timedelta(days=-1) future_timestamp = mktime(dt.timetuple()) mock_header.return_value = {"kid": "test_kid", "alg": ""} mock_claims.return_value = { "iss": userpool_iss, "token_use": "access", "exp": future_timestamp } mock_decode.return_value = True mock_verify.return_value = True passed, msg = auth.validate_jwt(ACCESS_TOKEN_EXPIRED, userpool_iss, userpool_keys) assert passed is False and msg.startswith("Token has expired")