def post(self, request, pk, now=0): sched_obj = self.get_object(pk) data_labels_len = len(sched_obj.param_label) - 1 template_labels_len = len(list(sched_obj.template_id.params.keys())) print(now, data_labels_len, template_labels_len) try: print(now, type(now)) if not now: settings_sched_time_zone = pytimezone( request.data["scheduler_tz"]) if sched_obj.scheduled_on.replace( tzinfo=datetime.timezone.utc) < timezone.now( ).astimezone(settings_sched_time_zone).replace( tzinfo=settings_time_zone): return Response("Date and time incorrect", status=status.HTTP_400_BAD_REQUEST) ## put data in database and then schedule the job for it return WhatsappScheduler.setup_whatsapp_template_schedule( pk, request.data["scheduler_tz"]) else: ## send now res = whatsapp_template_schedule.delay(pk, api=True) if res: return Response(status=status.HTTP_200_OK) return Response(status=status.HTTP_400_BAD_REQUEST) except Exception as ex: print(ex) return Response(status=status.HTTP_400_BAD_REQUEST)
def datetime_as_sgt(dt): """Set a datetime with the SGT timezone and return the datetime. Raises: AssertionError: Raised if `dt` is not of datetime class. """ assert isinstance(dt, datetime) return dt.astimezone(pytimezone('Asia/Singapore'))
def datetime_as_sgt(dt): """Set a datetime with the SGT timezone and return the datetime. Raises: ValueError: Raised if `dt` is not of datetime class. """ if not isinstance(dt, datetime): raise ValueError('dt is not a datetime object.') return dt.astimezone(pytimezone('Asia/Singapore'))
def schedule_index(request): #TODO this needs to be gotten, not hardcoded now = datetime.now(pytimezone('US/Eastern')) tda = now - timedelta(7) filterName = "" if request.user.is_authenticated(): if request.method == 'POST': filterName = request.POST.get("show","") if filterName == "": eps_list = episode_data.objects.filter(show__user_tv_shows__user=request.user,air_date__gte=tda).order_by('air_date','show__air_time','eps_number') else: eps_list = episode_data.objects.filter(show__name=filterName,show__user_tv_shows__user=request.user).order_by('eps_number') else: eps_list = episode_data.objects.filter(show__user_tv_shows__user=request.user,air_date__gte=tda).order_by('air_date','show__air_time','eps_number') show_list = tv_shows.objects.filter(show_type = "tvshow",active=1,user_tv_shows__user=request.user).order_by('name') else: if request.method == 'POST': filterName = request.POST.get("show","") if filterName == "": eps_list = episode_data.objects.filter(air_date__gte=tda).order_by('air_date','eps_number') else: eps_list = episode_data.objects.filter(show = tv_shows.objects.get(name=filterName)).order_by('eps_number') else: eps_list = episode_data.objects.filter(air_date__gte=tda).order_by('air_date','eps_number') show_list = tv_shows.objects.filter(show_type = "tvshow",active=1).order_by('name') for e in eps_list: air_time = tv_shows.objects.get(id = e.show_id).air_time + int(now.strftime("%z")) e.air_date = e.air_date.replace (hour = (air_time/100), minute=air_time%100,tzinfo=now.tzinfo) e.air_date_string = e.air_date.strftime("%A, %B %d %Y at %I:%M %p") if e.air_date < (now - timedelta(hours=1)): e.css_markup = "oldrow" elif e.air_date < now: e.css_markup = "playing" elif e.air_date < now + timedelta(1): e.css_markup = "today" elif e.air_date >= now + timedelta(7): e.css_markup = "futurerow" else: e.css_markup = "currentrow" if e.downloaded: e.uri = e.uri.replace("/mnt/raid/","ftp://192.168.1.3/") t = loader.get_template('templates/schedule_index.html') future = now + timedelta(7) c = Context({ 'eps_list': eps_list, 'user':request.user, 'show_list': show_list, 'show_name': filterName, }) return HttpResponse(t.render(c))
def setup_whatsapp_template_schedule(pk, sched_tz): try: print sched_obj = ScheduleTask.objects.get(pk=pk) scheduled_on = sched_obj.scheduled_on settings_sched_time_zone = pytimezone(sched_tz) print("before changing, ", scheduled_on) local_now = datetime.now(pytimezone(sched_tz)) offset = local_now.utcoffset().total_seconds() print(offset) offset_delta = timedelta(seconds=offset) # scheduled_on = scheduled_on.astimezone(settings_time_zone).replace(tzinfo=settings_sched_time_zone) scheduled_on = scheduled_on - offset_delta print("scheduling job ", str(scheduled_on)) print(pk) WhatsappScheduler.sched.add_job(whatsapp_template_schedule, 'date', run_date=str(scheduled_on), args=[pk], id=str(pk)) sched_obj.scheduled_flag = True sched_obj.task_done = False sched_obj.save() return Response({ "scheduled_flag": True, "task_done": False }, status=status.HTTP_201_CREATED) except Exception as ex: print(ex) return Response(status=status.HTTP_400_BAD_REQUEST)
from datetime import date, datetime from freezegun import freeze_time from pytz import timezone as pytimezone from landtransportsg import timezone # constants for testing last three months-related dates GOOD_CUTOFF_DAY = 15 LEAP_YEAR_DATE = freeze_time('2018-03-17') NON_LEAP_YEAR_DATE = freeze_time('2019-06-03') @pytest.mark.parametrize( ('date_time', 'expected_hour'), [ (datetime(2019, 7, 1, 8), 8), (datetime(2019, 7, 1, 8, tzinfo=pytimezone('Asia/Singapore')), 8), (datetime(2019, 7, 1, 8, tzinfo=pytimezone('UTC')), 16), ], ) def test_datetime_as_sgt(date_time, expected_hour): sgt_date_time = timezone.datetime_as_sgt(date_time) assert sgt_date_time.hour is expected_hour @pytest.mark.parametrize( 'date_time', ['2019-07-13 08:32:17', '2019-07-13 08:32:17+08:00'], ) def test_datetime_as_sgt_from_bad_datetime(date_time): with pytest.raises(ValueError): _ = timezone.datetime_as_sgt(date_time)
def parse_idx_feed(self): root = self.ygl_file.getroot() # Reporting information num_houses = 0 num_of_duplicates = 0 num_of_value_errors = 0 num_integrity_error = 0 num_updated_homes = 0 num_added_homes = 0 # Loop through every home for house in root.iter('Rental'): num_houses += 1 new_listing = RentDatabaseModel() street_number = "" street_name = "" for element in house: try: if element.tag == 'ID': new_listing.listing_number = element.text elif element.tag == 'StreetNumber': street_number = element.text elif element.tag == 'StreetName': street_name = element.text elif element.tag == 'City': new_listing.city = element.text elif element.tag == 'State': new_listing.state = element.text elif element.tag == 'Zip': new_listing.zip_code = element.text elif element.tag == 'UnitNumber': new_listing.apartment_number = element.text.lower() elif element.tag == 'Latitude': new_listing.latitude = element.text elif element.tag == 'Longitude': new_listing.longitude = element.text elif element.tag == 'Beds': new_listing.num_bedrooms = int(element.text) elif element.tag == 'Baths': # don't support decimals right now new_listing.num_bathrooms = round(float(element.text)) elif element.tag == 'AvailableDate': date_available = datetime.strptime(element.text, '%m/%d/%Y') new_listing.date_available = date_available # Need to compare non-naive timezone date to non-naive. # This way the dates are comparable date_available = pytimezone('US/Eastern').localize(date_available) if timezone.now() > date_available - timedelta(days=CURRENTLY_AVAILABLE_DELTA_DAYS): new_listing.currently_available = True else: new_listing.currently_available = False elif element.tag == 'Pet': if 'Dog Ok' in element.text: new_listing.dogs_allowed = True elif 'Cat Ok' in element.text: new_listing.cats_allowed = True elif 'Pet Friendly' in element.text: new_listing.dogs_allowed = True new_listing.cats_allowed = True elif 'Negotiable' in element.text: new_listing.dogs_allowed = True new_listing.cats_allowed = True elif element.tag == 'Parking': if element.text == 'Included': new_listing.parking_spot = True elif element.tag == 'Price': new_listing.price = int(element.text) elif element.tag == 'Features': # Initialize word scraper word_scraper = WordScraper(element.text) new_listing.laundromat_nearby = word_scraper.look_for_laundromat() new_listing.furnished = word_scraper.look_for_furnished() new_listing.hardwood_floors = word_scraper.look_for_hardwood_floors() new_listing.dishwasher = word_scraper.look_for_dishwasher() new_listing.air_conditioning = word_scraper.look_for_ac() new_listing.pool = word_scraper.look_for_pool() new_listing.patio_balcony = word_scraper.look_for_balcony() new_listing.laundry_in_building = word_scraper.look_for_laundry_in_building() new_listing.laundry_in_unit = word_scraper.look_for_laundry_in_unit() new_listing.gym = word_scraper.look_for_gym() new_listing.storage = word_scraper.look_for_storage() new_listing.remarks = element.text except ValueError: print("[ VALUE ERROR ] Could not add home") num_of_value_errors += 1 continue new_listing.home_type = HomeTypeModel.objects.get(home_type=HomeTypeModel.APARTMENT) new_listing.listing_provider = HomeProviderModel.objects.get(provider="YGL") new_listing.last_updated = self.update_timestamp new_listing.street_address = normalize_street_address("{0} {1}".format(street_number, street_name)) # Determines if the home already exists as a YGL house if RentDatabaseModel.objects\ .filter(listing_provider=new_listing.listing_provider) \ .filter(street_address=new_listing.street_address)\ .filter(city=new_listing.city) \ .filter(state=new_listing.state)\ .filter(zip_code=new_listing.zip_code)\ .filter(apartment_number=new_listing.apartment_number)\ .exists(): # Retrieve the home that the home matches existing_apartment = RentDatabaseModel.objects.get( street_address=new_listing.street_address, city=new_listing.city, state=new_listing.state, zip_code=new_listing.zip_code, apartment_number=new_listing.apartment_number ) existing_apartment.update(new_listing) existing_apartment.save() num_updated_homes += 1 print("[ UPDATED ] {0}".format(existing_apartment.full_address)) # Tests if the home exists within another provider # If so mark it as a duplicate and don't add it elif RentDatabaseModel.objects\ .filter(street_address=new_listing.street_address) \ .filter(city=new_listing.city) \ .filter(state=new_listing.state) \ .filter(zip_code=new_listing.zip_code)\ .filter(apartment_number=new_listing.apartment_number)\ .exists(): num_of_duplicates += 1 print("[ DUPLICATE ] " + new_listing.full_address) else: try: new_listing.save() print("[ ADDING ] " + new_listing.full_address) num_added_homes += 1 except IntegrityError: print("[ Integrity Error ] ") num_integrity_error += 1 manager = HomeProviderModel.objects.get(provider="YGL") manager.last_updated_feed = self.update_timestamp manager.save() print("") print("RESULTS:") logger.info("\nNumber of houses in database: {0}\n".format(num_houses) + "Num added homes: {0}\n".format(num_added_homes) + "Num updated homes: {0}\n".format(num_updated_homes) + "Update timestamp: {0}\n".format(self.update_timestamp.date()) + "Number of duplicates: {0}\n".format(num_of_duplicates) + "Number of value errors: {0}\n".format(num_of_value_errors) + "Number of integrity error is: {0}\n".format(num_integrity_error))
def mock_utcnow(): localtz = pytimezone(config.DEFAULT_TIMEZONE) return localtz.localize(datetime(2018, 2, 20, 10, 10))
def get(self, user=None, sub=None, medical_record_number=None, role=None): response = None format = request.args.get('format') try: start = None if request.args.get('start'): start = datetime.strptime(request.args.get('start'), '%Y-%m-%d %H:%M') end = None if request.args.get('end'): end = datetime.strptime(request.args.get('end'), '%Y-%m-%d %H:%M') end += timedelta(days=1) if not medical_record_number: medical_record_number = request.args.get('medical_record_number', None) unit_floor = request.args.get('unit_floor') report_query = DBPatientHistory.query.join(DBPatient, DBPatient.medical_record_number == DBPatientHistory.medical_record_number).filter(DBPatient.deleted == False) if medical_record_number: medical_record_number = medical_record_number.split(',') report_query = report_query.filter(DBPatientHistory.medical_record_number.in_(medical_record_number)) if start: report_query = report_query.filter(DBPatientHistory.date_of_record >= start) if end: report_query = report_query.filter(DBPatientHistory.date_of_record < end) if unit_floor: report_query = report_query.filter(DBPatientHistory.unit_floor == unit_floor) patients = report_query.order_by(DBPatientHistory.name).all() response = [] if patients: timezone = None if format and format.lower() == CSV: try: timezone = pytimezone( requests.get('http://freegeoip.net/json/' + str(request.remote_addr)).json().get( 'time_zone')) except: timezone = None for patient in patients: patient_json = PatientInfoReportJsonSerializer( timezone=timezone, format='csv' if format and format.lower() == CSV else 'json' ).serialize(patient) patient_json['username'] = patient_json['user'] response.append(patient_json) response = {'patient_reports' : response, 'total_amount': len(response)} status_code = status.HTTP_200_OK except Exception as e: struct_logger.error(instance=LOGGING_INSTANCE, path=request.path, method=request.method, exception=e.message) response = e.message status_code = status.HTTP_400_BAD_REQUEST log(request.remote_addr, request.path, request.method, request.args, status_code, user=user, device=None) if status_code == status.HTTP_200_OK and format and format.lower() == CSV: return self.output_csv(response['patient_reports'], status_code, CSV_HEADERS) else: response['patient_reports'] = response['patient_reports'][:1000] return response, status_code
from .models import ScheduleTask from apps.whatsappbot.models import TemplateApproval from .serializers import WhatsappMakeScheduleRegDetailSerializer from apps.whatsappbot.events import send_text_template_message, send_media_template_message import pandas as pd import json import io from apscheduler.schedulers.background import BackgroundScheduler import _thread from django.utils import timezone, dateformat from pytz import timezone as pytimezone from datetime import datetime, timedelta import chatbot.settings as settings from celery import task settings_time_zone = pytimezone(settings.TIME_ZONE) print("this is time: ", dateformat.format(timezone.now(), 'd/m/Y H:i:s')) def fetch_csv(request, sched_obj): csv_url = request.data['data_url'] s = requests.get(csv_url).content print(s) csv = pd.read_csv(io.StringIO(s.decode('utf-8'))) csv_json = csv.to_json(orient='index') request.data['data'] = json.loads(csv_json) serializer = WhatsappMakeScheduleRegDetailSerializer(sched_obj, data=request.data,
def add_home_to_database(self, home): new_listing = RentDatabaseModel() num_of_value_errors = 0 try: # Home Address info new_listing.street_address = "{0} {1}".format( home['StreetNumber'], home['StreetName']).replace(',', '') new_listing.city = home['City'] new_listing.state = home['StateOrProvince'] new_listing.zip_code = home['PostalCode'] new_listing.latitude = home['Latitude'] new_listing.longitude = home['Longitude'] # Home Basic info new_listing.price = int(float(home['ListPrice'])) new_listing.num_bedrooms = int(home['BedroomsTotal']) new_listing.num_bathrooms = int(home['BathroomsFull']) new_listing.apartment_number = home['UnitNumber'] # MLS listing information new_listing.remarks = home['PublicRemarks'] new_listing.listing_number = home['ListingId'] new_listing.listing_agent = home['ListAgentMlsId'] new_listing.listing_office = home['ListOfficeMlsId'] new_listing.listing_provider = HomeProviderModel.objects.get_or_create( provider=HomeProviderModel.MLSPIN)[0] new_listing.showing_instructions = home['ShowingInstructions'] new_listing.showing_remarks = home['FIRM_RMK1'] # Amenities new_listing.dogs_allowed = 'yes' in home['PETS_ALLOWED'].lower() new_listing.cats_allowed = 'yes' in home['PETS_ALLOWED'].lower() word_scraper_remarks = WordScraper(new_listing.remarks) word_scraper_appliances = WordScraper(home['Appliances']) new_listing.air_conditioning = home['AIR_CONDITION'] == 'Yes' if word_scraper_remarks.look_for_ac( ) or word_scraper_appliances.look_for_ac(): new_listing.air_conditioning = True new_listing.furnished = word_scraper_remarks.look_for_furnished() \ or word_scraper_appliances.look_for_furnished() new_listing.hardwood_floors = word_scraper_remarks.look_for_hardwood_floors() \ or word_scraper_appliances.look_for_hardwood_floors() new_listing.dishwasher = word_scraper_remarks.look_for_dishwasher() \ or word_scraper_appliances.look_for_dishwasher() new_listing.laundry_in_building = word_scraper_remarks.look_for_laundry_in_building() \ or word_scraper_appliances.look_for_laundry_in_building() new_listing.pool = word_scraper_remarks.look_for_pool() new_listing.patio_balcony = word_scraper_remarks.look_for_balcony() new_listing.storage = word_scraper_remarks.look_for_storage() new_listing.last_updated = self.update_timestamp list_type = home['RN_TYPE'] if list_type == "Apartment": new_listing.home_type = HomeTypeModel.objects.get_or_create( home_type=HomeTypeModel.APARTMENT)[0] elif list_type == "Single Family": new_listing.home_type = HomeTypeModel.objects.get_or_create( home_type=HomeTypeModel.SINGLE_FAMILY)[0] elif list_type == "Condominium": new_listing.home_type = HomeTypeModel.objects.get_or_create( home_type=HomeTypeModel.CONDO)[0] else: new_listing.home_type = HomeTypeModel.objects.get_or_create( home_type=HomeTypeModel.OTHER)[0] if home['Date_Available']: date_available = datetime.strptime(home['Date_Available'], '%Y-%m-%dT%H:%M:%S') new_listing.date_available = date_available date_available = pytimezone('US/Eastern').localize( date_available) if timezone.now() > date_available - timedelta( days=CURRENTLY_AVAILABLE_DELTA_DAYS): new_listing.currently_available = True else: self.num_available_in_future += 1 else: new_listing.currently_available = True except ValueError: num_of_value_errors += 1 return # Determines if the home already exists as a MLSPIN house if RentDatabaseModel.objects \ .filter(listing_provider=new_listing.listing_provider) \ .filter(street_address=new_listing.street_address) \ .filter(city=new_listing.city) \ .filter(state=new_listing.state) \ .filter(zip_code=new_listing.zip_code) \ .filter(apartment_number=new_listing.apartment_number) \ .exists(): # Retrieve the home that the home matches existing_apartment = RentDatabaseModel.objects.get( street_address=new_listing.street_address, city=new_listing.city, state=new_listing.state, zip_code=new_listing.zip_code, apartment_number=new_listing.apartment_number) # Since the apartments are the same # Update the existing apartment with the fields stored in the new listing existing_apartment.update(new_listing) try: existing_apartment.save() print("[ UPDATED ] {0}".format( existing_apartment.full_address)) self.num_updated_homes += 1 except ValidationError: print('Validation error') self.num_validation_error += 1 # Tests if the home exists within another provider # If so mark it as a duplicate and don't add it elif RentDatabaseModel.objects \ .filter(street_address=new_listing.street_address) \ .filter(city=new_listing.city) \ .filter(state=new_listing.state) \ .filter(zip_code=new_listing.zip_code) \ .filter(apartment_number=new_listing.apartment_number) \ .exists(): print("[ DUPLICATE ] {0}".format(new_listing.full_address)) self.num_of_duplicates += 1 else: try: new_listing.save() self.num_added_homes += 1 print("[ ADDING ] " + new_listing.full_address) except IntegrityError: print("[ Integrity Error ] ") self.num_integrity_errors += 1 except ValidationError: print("[ Validation Error ] ") self.num_validation_error += 1
def get(self, user=None, sub=None, medical_record_number=None, role=None): response = None events = None format = request.args.get('format') try: start_time = time.time() start = None if request.args.get('start'): start = datetime.strptime(request.args.get('start'), '%Y-%m-%d %H:%M') end = None if request.args.get('end'): end = datetime.strptime(request.args.get('end'), '%Y-%m-%d %H:%M') end += timedelta(days=1) if not medical_record_number: medical_record_number = request.args.get( 'medical_record_number', None) unit_floor = request.args.get('unit_floor') report_query = DBEventHistory.query.with_entities( DBEventHistory.unit_floor, DBEventHistory.event_type, DBEventHistory.occurred, DBEventHistory.location, DBEventHistory.sensor_serial, DBEventHistory.battery, DBEventHistory.distance, DBEventHistory.alarm_threshold_minutes, DBEventHistory.alarm_clear_multiple, DBEventHistory.previous_alarm_threshold_hours, DBEventHistory.wound_alarm_threshold_minutes, DBEventHistory.wound_alarm_clear_multiple).join( DBPatient, DBPatient.medical_record_number == DBEventHistory.medical_record_number).filter( DBPatient.deleted == False).add_columns( DBPatient.name, DBPatient.last_name).filter( DBEventHistory.event_type != 'Sensor Stopped') if medical_record_number: medical_record_number = medical_record_number.split(',') report_query = report_query.filter( DBEventHistory.medical_record_number.in_( medical_record_number)) if start: report_query = report_query.filter( DBEventHistory.occurred >= start) if end: report_query = report_query.filter( DBEventHistory.occurred < end) if unit_floor: report_query = report_query.filter( DBEventHistory.unit_floor == unit_floor) events = report_query.order_by( DBEventHistory.occurred.desc()).all() else: struct_logger.msg(instance=LOGGING_INSTANCE, time='--- %s seconds ---' % (time.time() - start_time)) #with profiled(): engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'], echo=False) conn = engine.connect() start = None if request.args.get('start'): start = datetime.strptime(request.args.get('start'), '%Y-%m-%d %H:%M') start = start.strftime('%Y-%m-%d %H:%M:%S') end = None if request.args.get('end'): end = datetime.strptime(request.args.get('end'), '%Y-%m-%d %H:%M') end += timedelta(days=1) end = end.strftime('%Y-%m-%d %H:%M:%S') unit_floor = request.args.get('unit_floor') query_string = 'SELECT event_history.event_type AS event_type, event_history.unit_floor AS unit_floor, event_history.occurred AS occurred, event_history.location AS location, event_history.sensor_serial AS sensor_serial, event_history.battery AS battery, event_history.distance AS distance, event_history.alarm_threshold_minutes AS alarm_threshold_minutes, event_history.alarm_clear_multiple AS alarm_clear_multiple, event_history.previous_alarm_threshold_hours AS previous_alarm_threshold_hours, event_history.wound_alarm_threshold_minutes AS wound_alarm_threshold_minutes, event_history.wound_alarm_clear_multiple AS wound_alarm_clear_multiple, patient.name AS name, patient.last_name AS last_name FROM event_history INNER JOIN patient ON patient.medical_record_number = event_history.medical_record_number and patient.deleted = false ' if start or end or unit_floor: query_string += 'WHERE ' AND = False if start: query_string += 'event_history.occurred >= "' + start + '" ' AND = True if end: if AND: query_string += 'and ' else: AND = True query_string += 'event_history.occurred < "' + end + '" ' if unit_floor: if AND: query_string += 'and ' else: AND = True query_string += 'event_history.unit_floor = "' + unit_floor + '" ' query_string += 'ORDER BY event_history.occurred DESC' result = conn.execute(query_string) struct_logger.msg(instance=LOGGING_INSTANCE, time='--- %s seconds ---' % (time.time() - start_time)) events = result.fetchall() response = [] if events: timezone = None if format and format.lower() == CSV: try: timezone = pytimezone( requests.get('http://freegeoip.net/json/' + str(request.remote_addr)).json().get( 'time_zone')) except: timezone = None prev_event = None for event in events: if (event.event_type not in ('Sensor came online', 'Device is plugged into power') or not prev_event or prev_event and event.occurred - prev_event.occurred > timedelta(seconds=15) and (prev_event.event_type == 'Sensor went offline' and event.event_type == 'Sensor came online' or prev_event.event_type == 'Device is unplugged from power' and event.event_type == 'Device is plugged into power')): event_json = EventHistoryReportJsonSerializer( timezone=timezone, format='csv' if format and format.lower() == CSV else 'json').serialize(event) response.append(event_json) else: response.pop() prev_event = event response = { 'event_reports': response, 'total_amount': len(response) } status_code = status.HTTP_200_OK except Exception as e: struct_logger.error(instance=LOGGING_INSTANCE, path=request.path, method=request.method, exception=e.message) response = e.message status_code = status.HTTP_400_BAD_REQUEST log(request.remote_addr, request.path, request.method, request.args, status_code, user=user, device=None) if status_code == status.HTTP_200_OK and format and format.lower( ) == CSV: return self.output_csv(response['event_reports'], status_code, CSV_HEADERS) else: struct_logger.msg(instance=LOGGING_INSTANCE, time='--- %s seconds ---' % (time.time() - start_time)) response['event_reports'] = response['event_reports'][:1000] return response, status_code
def put(self, request, pk, format=None): sched_obj = self.get_object(pk) ## _thread.start_new_thread(fetch_csv, (request, sched_obj)) try: if 'scheduler_excel' in list(request.data.keys()): file = request.data['scheduler_excel'] if "csv" in str(file.name): print("csv inside") csv = pd.read_csv(file, dtype=str) elif "xlsx" in str(file.name): print("xlsx inside", file.name) csv = pd.read_excel(file, dtype=str) else: print("unsupported") return Response( status=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE) csv.dropna(subset=["whatsapp_number"], inplace=True) print(csv) csv_json = csv.to_json(orient='index') request.data['data'] = list( json.loads(csv_json).values() ) ## [{wa:"", p1:"", p2:"", link:""}, {wa:"", p1:"", p2:"", link:""}] param_labels = request.data['data'][ 0] ## {wa:"", p1:"", p2:"", link:""} param_labels = list(param_labels.keys()) print(param_labels) ## check if parameters match if "whatsapp_number" not in param_labels: return Response("missing 'whatsapp_number' param ", status=status.HTTP_409_CONFLICT) if sched_obj.template_id.template_type != "text": if "link" not in param_labels: return Response("missing 'link' param ", status=status.HTTP_409_CONFLICT) param_labels.remove("link") if set(param_labels) & set( list(sched_obj.template_id.params.values())) != set( list(sched_obj.template_id.params.values())): return Response("param's labels mismatch", status=status.HTTP_409_CONFLICT) if len(param_labels) - 1 != len( list(sched_obj.template_id.params.keys())): return Response("param's length mismatch", status=status.HTTP_409_CONFLICT) if sched_obj.template_id.template_type != "text": param_labels.append("link") # if all correct request.data['param_label'] = param_labels request.data["extra"] = {"data_len": len(request.data['data'])} if 'scheduled_on' in list(request.data.keys()): print(request.data['scheduled_on']) print(request.data["scheduler_tz"]) settings_sched_time_zone = pytimezone( request.data["scheduler_tz"]) request.data['scheduled_on'] = parse_datetime( request.data['scheduled_on']) print( request.data['scheduled_on'].replace( tzinfo=datetime.timezone.utc), " ////// ", timezone.now().astimezone(settings_sched_time_zone). replace(tzinfo=settings_time_zone)) if request.data['scheduled_on'].replace( tzinfo=datetime.timezone.utc) < timezone.now( ).astimezone(settings_sched_time_zone).replace( tzinfo=settings_time_zone): return Response("scheduled time already passed.", status=status.HTTP_409_CONFLICT) serializer = WhatsappMakeScheduleRegDetailSerializer( sched_obj, data=request.data, partial=True) if serializer.is_valid(raise_exception=True): serializer.save() return Response(serializer.data) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) except Exception as ex: print(ex) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
from rest_framework.permissions import IsAuthenticated from decouple import config from django.utils import timezone from .events import fetch_csv, whatsapp_template_schedule, WhatsappScheduler import _thread import requests import pandas as pd import io import json from django.utils.dateparse import parse_datetime import datetime import chatbot.settings as settings from pytz import timezone as pytimezone import pytz settings_sched_time_zone = pytimezone(settings.SCHED_TIME_ZONE) settings_time_zone = pytimezone(settings.TIME_ZONE) class WhatsappMakeSchedule(APIView): ''' Api for getting all existing scheduled jobs and making new scheduled jobs ''' permission_classes = [IsAuthenticated] def get(self, request, admin=0, format=None): if admin == 1: if request.user.role == "SA" and not request.user.user_is_deleted: queryset = ScheduleTask.objects.all().order_by('-created_on') serializer = WhatsappMakeScheduleListSerializer(queryset,
def handle(self, *args, **options): password = getpass("Enter %s password: "******"Connected to %s." % options['source_db']) donorids_list = [] migrated_donations = 0 with transaction.atomic(): with connection.cursor() as cursor: if options['donorids']: # migrate specific donors, beware it is a list of lists donorids_list = [item for sublist in options['donorids'] for item in sublist] else: # migrate all donors select_all_donors_query = "select id from wp_give_donors;" cursor.execute(select_all_donors_query) allDonors = cursor.fetchall() donorids_list = [row[0] for row in allDonors] total_donor_number = len(donorids_list) # get source wordpress db timezone setting timezone_query = "select option_value from wp_options where option_name = 'timezone_string';" cursor.execute(timezone_query) timezoneResult = cursor.fetchall() timezone_string = timezoneResult[0][0] sourcedb_tz = pytimezone(timezone_string) for num, donor_id in enumerate(donorids_list, 1): select_donor_lj_meta_query = "select id, email, name, date_created, dnm.* from wp_give_donors dn left join wp_give_donormeta dnm on dn.id = dnm.donor_id where dn.id = %d;" % donor_id select_donor_donations_query = "select distinct donation_id from wp_give_donationmeta where meta_key = '_give_payment_donor_id' and meta_value = %d;" % donor_id select_subscriptions_query = "select * from wp_give_subscriptions where customer_id = %d;" % donor_id self.print("[%d/%d]...Now processing queries of givewp donor %d" % (num, total_donor_number, donor_id)) cursor.execute(select_donor_lj_meta_query) donorMetaResult = cursor.fetchall() cursor.execute(select_donor_donations_query) donationsResult = cursor.fetchall() donationids_list = [row[0] for row in donationsResult] newUser = None um = None for i, row in enumerate(donorMetaResult): givewp_donor_email = row[1] givewp_donor_name = row[2] givewp_donormeta_key = row[6] givewp_donormeta_value = row[7] if i == 0: # Add the user first newUser = User.objects.create_user(email=givewp_donor_email, password=uuid4_str()) newUser.save() # save donor email as verified and primary email_obj = EmailAddress(email=givewp_donor_email, verified=True, primary=True, user=newUser) email_obj.save() # save donor's name attribute as UserMeta as I am not sure how to correctly split the name into first and last names um = UserMeta(user=newUser, field_key='_give_donor_name', field_value=givewp_donor_name) um.save() if givewp_donormeta_key == '_give_donor_first_name': newUser.first_name = givewp_donormeta_value elif givewp_donormeta_key == '_give_donor_last_name': newUser.last_name = givewp_donormeta_value else: um = UserMeta(user=newUser, field_key=givewp_donormeta_key, field_value=givewp_donormeta_value) um.save() newUser.save() if options['verbose']: self.print("[√] Created Newstream User (email: %s, name: %s)." % (newUser.email, newUser.fullname)) # add subscriptions if options['verbose']: self.print("...Now processing queries of subscriptions of givewp donor %d" % donor_id) cursor.execute(select_subscriptions_query) subscriptionsResult = cursor.fetchall() newSubscription = None for i, row in enumerate(subscriptionsResult): # extract givewp subscription data givewp_subscription_id = row[0] givewp_subscription_initial_amount = row[4] givewp_parent_donation_id = row[8] givewp_subscription_created = row[10] givewp_subscription_status = row[12] subscription_profile_id = row[13] # self.print("[info] givewp subscription id: %s" % givewp_subscription_id) # self.print("[info] givewp subscription amount: %s" % givewp_subscription_initial_amount) # self.print("[info] givewp subscription parent donation id: %s" % givewp_parent_donation_id) # self.print("[info] givewp subscription created at: %s" % givewp_subscription_created) # self.print("[info] givewp subscription status: %s" % givewp_subscription_status) # self.print("[info] givewp subscription profile id: %s" % subscription_profile_id) # query data for subscription's parent donation parent_donation_query = "select * from wp_posts where ID = %d;" % givewp_parent_donation_id cursor.execute(parent_donation_query) parentDonationResult = cursor.fetchone() if parentDonationResult is None: # donation_id might not exist in wp_posts, probably deleted # see https://givewp.com/documentation/core/donors/delete-donor self.print("[x] Could not obtain post %i" % givewp_parent_donation_id) # remove parentDonation id from donationids_list donationids_list.remove(givewp_parent_donation_id) # remove renewalDonation ids from donationids_list, query data for renewals of the subscription first renewals_query = "select distinct donation_id from wp_give_donationmeta where meta_key = 'subscription_id' and meta_value = %d;" % givewp_subscription_id cursor.execute(renewals_query) renewalsResult = cursor.fetchall() for renewalID in renewalsResult: donationids_list.remove(renewalID[0]) continue parent_donation_status = parentDonationResult[7] parent_donation_datetime_local = sourcedb_tz.localize(parentDonationResult[2]) parent_donation_datetime = parent_donation_datetime_local.astimezone(pytz.utc) # self.print("[info] givewp parent donation status: %s" % parent_donation_status) # self.print("[info] givewp parent donation created at: %s" % parent_donation_datetime) # query data for parent donation's meta data parent_donationmeta_query = "select * from wp_give_donationmeta where donation_id = %d;" % givewp_parent_donation_id cursor.execute(parent_donationmeta_query) parentDonationMetaResult = cursor.fetchall() parentDonationMetaDict = {} for meta in parentDonationMetaResult: parentDonationMetaDict[meta[2]] = meta[3] # query data for renewals of the subscription renewals_query = "select distinct donation_id from wp_give_donationmeta where meta_key = 'subscription_id' and meta_value = %d;" % givewp_subscription_id cursor.execute(renewals_query) renewalsResult = cursor.fetchall() newSubscription = Subscription( id=givewp_subscription_id, is_test=self.paymentmode_mapping(parentDonationMetaDict['_give_payment_mode']), profile_id=subscription_profile_id, user=newUser, gateway=self.gateway_mapping(parentDonationMetaDict['_give_payment_gateway']), recurring_amount=round_half_up(givewp_subscription_initial_amount, 2), currency=parentDonationMetaDict['_give_payment_currency'], recurring_status=self.subscription_status_mapping(givewp_subscription_status), subscribe_date=givewp_subscription_created.replace(tzinfo=timezone.utc) ) newSubscription.save() if options['verbose']: self.print("[√] Created Newstream Subscription (id: %d, profile_id: %s)" % (newSubscription.id, newSubscription.profile_id)) # add donations linked to this subscription(need to link with the new subscription id in Newstream) # need to add the parent payment first, so it gets the smallest id among the renewals parentDonation = Donation( id=givewp_parent_donation_id, is_test=newSubscription.is_test, subscription=newSubscription, transaction_id=parentDonationMetaDict['_give_payment_transaction_id'] if '_give_payment_transaction_id' in parentDonationMetaDict else gen_transaction_id(newSubscription.gateway), user=newUser, gateway=newSubscription.gateway, is_recurring=True, donation_amount=round_half_up(parentDonationMetaDict['_give_payment_total'], 2), currency=newSubscription.currency, payment_status=self.donation_status_mapping(parent_donation_status), donation_date=parent_donation_datetime, ) parentDonation.save() migrated_donations += 1 # remove parentDonation id from donationids_list donationids_list.remove(givewp_parent_donation_id) if options['verbose']: self.print("[√] Created Newstream Parent Donation (id: %d, amount: %s)" % (parentDonation.id, parentDonation.donation_amount)) # save all meta data as DonationPaymentMeta for key, value in parentDonationMetaDict.items(): # if newUser first name and last name empty, save once again if key == '_give_donor_billing_first_name' and not newUser.first_name: newUser.first_name = value newUser.save() if key == '_give_donor_billing_last_name' and not newUser.last_name: newUser.last_name = value newUser.save() dpm = DonationPaymentMeta(donation=parentDonation, field_key=key, field_value=value) dpm.save() # then add renewals as well for renewalID in renewalsResult: # query data for renewal donation's meta data givewp_renewal_donation_id = renewalID[0] renewal_donation_query = "select * from wp_posts where ID = %d" % givewp_renewal_donation_id cursor.execute(renewal_donation_query) renewalDonationResult = cursor.fetchone() if renewalDonationResult is None: # donation_id fetched from wp_give_donationmeta might not exist in wp_posts, probably deleted # see https://givewp.com/documentation/core/donors/delete-donor self.print("[x] Could not obtain post %i" % givewp_renewal_donation_id) # remove renewalDonation id from donationids_list donationids_list.remove(givewp_renewal_donation_id) continue givewp_renewal_donation_status = renewalDonationResult[7] givewp_renewal_donation_datetime_local = sourcedb_tz.localize(renewalDonationResult[2]) givewp_renewal_donation_datetime = givewp_renewal_donation_datetime_local.astimezone(pytz.utc) # self.print("[info] givewp renewal donation status: %s" % givewp_renewal_donation_status) # self.print("[info] givewp renewal donation created at: %s" % givewp_renewal_donation_datetime) renewal_donationmeta_query = "select * from wp_give_donationmeta where donation_id = %d;" % givewp_renewal_donation_id cursor.execute(renewal_donationmeta_query) renewalDonationMetaResult = cursor.fetchall() renewalDonationMetaDict = {} for meta in renewalDonationMetaResult: renewalDonationMetaDict[meta[2]] = meta[3] renewalDonation = Donation( id=givewp_renewal_donation_id, is_test=parentDonation.is_test, subscription=newSubscription, transaction_id=renewalDonationMetaDict['_give_payment_transaction_id'] if '_give_payment_transaction_id' in renewalDonationMetaDict else gen_transaction_id(self.gateway_mapping(renewalDonationMetaDict['_give_payment_gateway'])), user=newUser, gateway=self.gateway_mapping(renewalDonationMetaDict['_give_payment_gateway']), is_recurring=True, donation_amount=round_half_up(renewalDonationMetaDict['_give_payment_total'], 2), currency=renewalDonationMetaDict['_give_payment_currency'], payment_status=self.donation_status_mapping(givewp_renewal_donation_status), donation_date=givewp_renewal_donation_datetime, ) renewalDonation.save() migrated_donations += 1 # remove renewalDonation id from donationids_list donationids_list.remove(givewp_renewal_donation_id) if options['verbose']: self.print("[√] Created Newstream Renewal Donation (id: %d, amount: %s)" % (renewalDonation.id, renewalDonation.donation_amount)) # save all meta data as DonationPaymentMeta for key, value in renewalDonationMetaDict.items(): dpm = DonationPaymentMeta(donation=renewalDonation, field_key=key, field_value=value) dpm.save() # loop remaining (one-time) donations from donationids_list if options['verbose']: self.print("...Now processing one-time donations of givewp donor %d" % donor_id) for givewp_donation_id in donationids_list: givewp_donation_query = "select * from wp_posts where ID = %d;" % givewp_donation_id cursor.execute(givewp_donation_query) givewpDonationResult = cursor.fetchone() if givewpDonationResult is None: # donation_id fetched from wp_give_donationmeta might not exist in wp_posts, probably deleted # see https://givewp.com/documentation/core/donors/delete-donor self.print("[x] Could not obtain post %i" % givewp_donation_id) continue givewp_donation_status = givewpDonationResult[7] givewp_donation_datetime_local = sourcedb_tz.localize(givewpDonationResult[2]) givewp_donation_datetime = givewp_donation_datetime_local.astimezone(pytz.utc) # self.print("[info] givewp (onetime) donation status: %s" % givewp_donation_status) # self.print("[info] givewp (onetime) donation created at: %s" % givewp_donation_datetime) # query data for givewp donation's meta data givewp_donationmeta_query = "select * from wp_give_donationmeta where donation_id = %d;" % givewp_donation_id cursor.execute(givewp_donationmeta_query) givewpDonationMetaResult = cursor.fetchall() givewpDonationMetaDict = {} for meta in givewpDonationMetaResult: givewpDonationMetaDict[meta[2]] = meta[3] # add donations linked to this subscription(need to link with the new subscription id in Newstream) # need to add the parent payment first, so it gets the smallest id among the renewals singleDonation = Donation( id=givewp_donation_id, is_test=self.paymentmode_mapping(givewpDonationMetaDict['_give_payment_mode']), transaction_id=givewpDonationMetaDict['_give_payment_transaction_id'] if '_give_payment_transaction_id' in givewpDonationMetaDict else gen_transaction_id(self.gateway_mapping(givewpDonationMetaDict['_give_payment_gateway'])), user=newUser, gateway=self.gateway_mapping(givewpDonationMetaDict['_give_payment_gateway']), is_recurring=False, donation_amount=round_half_up(givewpDonationMetaDict['_give_payment_total'], 2), currency=givewpDonationMetaDict['_give_payment_currency'], payment_status=self.donation_status_mapping(givewp_donation_status), donation_date=givewp_donation_datetime, ) singleDonation.save() migrated_donations += 1 if options['verbose']: self.print("[√] Created Newstream (onetime) Donation (id: %d, amount: %s)" % (singleDonation.id, str(singleDonation.donation_amount))) self.print('==============================') self.print("Total Migrated Donations: %d" % migrated_donations) # reset sequences for donations and subscriptions sequence_sql = django_connection.ops.sequence_reset_sql(no_style(), [Donation, Subscription]) with django_connection.cursor() as cursor: for sql in sequence_sql: cursor.execute(sql) except Exception as e: self.print(str(e)) self.print("...rolling back previous changes.") traceback.print_exc()