def main():
    """Main Module of this program"""
    args = args_fetch()
    logger = logger_fetch(args.get('log_level'))
    if args['test']:
        logger.info("Testing phase")
    if args['import_rejected_urls']:
        logger.info("Going to import Rejected URls")
        state_codes = []
        objs = Location.objects.filter(location_type='state')
        for obj in objs:
            state_codes.append(obj.code)
        for state_code in state_codes:
            if state_code != '34':
                file_path = f"../import_data/rejected_url_data/{state_code}.csv"
                dataframe = pd.read_csv(file_path)
                for index, row in dataframe.iterrows():
                    location_code = row['code']
                    report_type = row['report_type']
                    finyear = row['finyear']
                    report_url = row['report_url']
                    my_location = Location.objects.filter(
                        code=location_code).first()
                    if my_location is not None:
                        my_report = Report.objects.create(
                            location=my_location,
                            finyear=finyear,
                            report_type=report_type,
                            report_url=report_url)
                        logger.info(f" Created report {my_report.id}")
    logger.info("...END PROCESSING")
Beispiel #2
0
def main():
    """Main Module of this program"""
    args = args_fetch()
    logger = logger_fetch(args.get('log_level'))
    secret_key = ''.join(random.SystemRandom().choice(
        'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)')
                         for i in range(50))
    logger.info(secret_key)
    logger.info("...END PROCESSING")
Beispiel #3
0
def main():
    """Main Module of this program"""
    args = args_fetch()
    logger = logger_fetch(args.get('log_level'))
    if args['test']:
        logger.info("Testing")
        objs = Context.objects.all()
        for obj in objs:
            logger.info(obj.id)

    logger.info("...END PROCESSING")
Beispiel #4
0
def main():
    """Main Module of this program"""
    args = args_fetch()
    logger = logger_fetch(args.get('log_level'))
    if args['test']:
        logger.info("Testing")
        phone = '9845065241'
        app_name = '294685'
        PhoneCall.objects.create(phone=phone, exotel_app_no=app_name)
    logger.info("...END PROCESSING")
    if args['execute']:
        # Get Call Status
        exotel_status = ["completed", "busy", "no-answer", "failed"]
        objs = PhoneCall.objects.filter(in_progress=True)
        for obj in objs:
            sid = obj.sid
            res = exotel_call_status(logger, sid)
            res_dict = res["Call"]
            vendor_status = res_dict["Status"]
            if vendor_status in exotel_status:
                obj.vendor_status = vendor_status
                obj.retry = obj.retry + 1
                obj.in_progress = False
                obj.extra_fields = res_dict
                if vendor_status == "completed":
                    obj.status = "completed"
                    obj.is_complete = True
                else:
                    if obj.retry == 10:
                        obj.status = "maxRetryFail"
                        obj.is_complete = True
                obj.save()
            logger.info(res)
        #Place Calls
        objs = PhoneCall.objects.filter(is_complete=False, in_progress=False)
        for obj in objs:
            data = exotel_place_call(logger, obj.phone, obj.exotel_app_no,
                                     obj.id)
            try:
                sid = data["Call"]["Sid"]
            except:
                sid = None
            if sid is None:
                obj.is_complete = True
                obj.status = "DND"
                obj.save()
            else:
                obj.in_progress = True
                obj.sid = sid
                obj.save()
Beispiel #5
0
def main():
    """Main Module of this program"""
    args = args_fetch()
    logger = logger_fetch(args.get('log_level'))
    if args['test']:
        logger.info("Testing")
        objs = Location.objects.all()
        for obj in objs:
            logger.info(obj.id)
    if args['import']:
        filename = "../import_data/census_state.csv"
        df = pd.read_csv(filename)
        logger.info(df.head())
        for index, row in df.iterrows():
            code = str(row.get('state_code', '')).lstrip().rstrip()
            name = row.get('name', None).lstrip().rstrip()
            logger.info(name)
            myLocation = Location.objects.filter(code=code).first()
            if myLocation is None:
                myLocation = Location.objects.create(code=code, name=name)
            myLocation.name = name
            myLocation.state_name = name
            myLocation.state_code = code
            myLocation.location_type = 'state'
            myLocation.save()

        filename = "../import_data/census_district.csv"
        df = pd.read_csv(filename)
        logger.info(df.head())
        for index, row in df.iterrows():
            code = str(row.get('district_code', '')).lstrip().rstrip()
            state_code = str(row.get('state_code', '')).lstrip().rstrip()
            name = row.get('name', None).lstrip().rstrip()
            logger.info(name)
            myLocation = Location.objects.filter(code=code).first()
            if myLocation is None:
                myLocation = Location.objects.create(code=code, name=name)
            myLocation.name = name
            myLocation.district_name = name
            myLocation.district_code = code
            myLocation.state_code = state_code
            myLocation.location_type = 'district'
            myLocation.save()

    logger.info("...END PROCESSING")
def main():
    """Main Module of this program"""
    args = args_fetch()
    logger = logger_fetch(args.get('log_level'))
    if args['test']:
        logger.info("Testing phase")
    if args['sample']:
        sample_name = args.get("sampleName", None)
        scheme = args.get("scheme", None)
        percentage = int(args.get("percentage", 0))
        location_type = args.get("locationType", None)
        logger.info("Sampling locations")
        itda_tag = LibtechTag.objects.filter(id=2).first()
        ltarray = [itda_tag]
        my_tag = LibtechTag.objects.filter(name=sample_name).first()
        if my_tag is None:
            my_tag = LibtechTag.objects.create(name=sample_name)
        ltarray = [my_tag]
        objs = Location.objects.filter(libtech_tag__in=ltarray)
        for obj in objs:
            logger.info(obj.code)
        exit(0)
        parent_objs = Location.objects.filter(libtech_tag__in=ltarray,
                                              scheme=scheme)
        for parent_obj in parent_objs:
            location_set = Location.objects.filter(parent_location=parent_obj)
            location_set_size = len(location_set)
            village_code_array = []
            for location in location_set:
                village_code_array.append(location.code)
        # logger.info(village_code_array)
            to_be_sampled = round((percentage * location_set_size) / 100)
            village_code_sampled = random.sample(village_code_array,
                                                 to_be_sampled)
            for village_code in village_code_sampled:
                my_location = Location.objects.filter(code=village_code,
                                                      scheme=scheme).first()
                my_location.libtech_tag.add(my_tag)
                my_location.save()
            logger.info(
                f"{location_set_size}-{to_be_sampled}--{len(village_code_sampled)}"
            )
    logger.info("...END PROCESSING")
def main():
    """Main Module of this program"""
    args = args_fetch()
    logger = logger_fetch(args.get('log_level'))
    if args['test']:
        logger.info("Testing")
        objs = Entity.objects.all()
        for obj in objs:
            logger.info(obj.id)
            break
    if args['exportPledges']:
        export_pledges()
    if args['exportRequests']:
        export_requests()
    if args['export']:
        queryset = Entity.objects.filter(record_type='helpseekers')
        filename = ''
        export_entities(queryset, filename, bulk_export=True)
    if args['export1']:
        csv_array = []
        columns = ["title", "state", "status", "urgency", "remarks"]

        columns = [
            'ID', 'Urgency', 'Status', 'Assigned to', 'Remarks',
            'Connected with Govt Scheme', 'Organisation Name', 'Contact',
            'Mobile', 'Help Expected', 'Stranded State', 'Stranded District',
            'StrandedBlock', 'Stranded City/Panchayat', 'Stranded Address',
            'Native State', 'Native District', 'People Stranded',
            'Date Called', 'Contact with Govt Official', 'Contact with anyone',
            'Accom for 14 days', 'If no', ' Where are you staying"', 'Ration',
            'Ration Desc', 'Drinking Water', 'Health Issues',
            'Health Issue Desc', 'Urgent Req', 'Need Cash', 'How Much',
            'Urgent Req Desc', 'Donor Involved', 'Donor Name',
            'Type Of donation', 'Donation Value(Rs))', 'Initial Date'
        ]
        objs = Entity.objects.filter(record_type='helpseekers')
        for obj in objs:
            logger.info(obj.id)
            a = [obj.title, obj.state, obj.status, obj.urgency, obj.remarks]
            if obj.assigned_to_user is not None:
                user_name = obj.assigned_to_user.name
            else:
                user_name = ''
            try:
                stranded_district = obj.prefill_json["data"]["contactForm"][
                    "data"]["district"]
            except:
                stranded_district = ''
            if obj.assigned_to_group is not None:
                org_name = obj.assigned_to_group.name
            else:
                org_name = ''
            #logger.info(f"Stranded District {stranded_district}")
            government_scheme = ''
            contact = ''
            stranded_state = obj.state
            # stranded_district = ''
            stranded_block = ''
            stranded_panchayat = ''
            stranded_address = obj.address
            native_state = ''
            native_district = ''
            how_many_people = obj.how_many_people
            date_called = ''
            govt_official = ''
            contact_with_anyone = ''
            accom_for_14_days = ''
            if_no = ''
            where_are_you_staying = ''
            ration = ''
            ration_desc = ''
            drinking_water = ''
            health_issues = ''
            health_issue_description = ''
            urgent_req = ''
            need_cash = ''
            how_much = ''
            urgent_req_desc = ''
            donor_involved = ''
            donar_name = ''
            donation_type = ''
            donation_value = ''
            initial_date = ''

            a = [
                obj.id, obj.urgency, obj.status, user_name, obj.remarks,
                government_scheme, org_name, contact, obj.phone, obj.what_help,
                stranded_state, stranded_district, stranded_block,
                stranded_panchayat, stranded_address, native_state,
                native_district, how_many_people, date_called, govt_official,
                contact_with_anyone, accom_for_14_days, if_no,
                where_are_you_staying, ration, ration_desc, drinking_water,
                health_issues, health_issue_description, urgent_req, need_cash,
                how_much, urgent_req_desc, donor_involved, donar_name,
                donation_type, donation_value, initial_date
            ]
            csv_array.append(a)
        df = pd.DataFrame(csv_array, columns=columns)
        filename = "export/data.csv"
        file_url = upload_s3(logger, filename, df)
        logger.info(file_url)

    logger.info("...END PROCESSING")
def main():
    """Main Module of this program"""
    args = args_fetch()
    logger = logger_fetch(args.get('log_level'))
    if args['importSwanUsers']:
        logger.info("Importing swan users")
        df = pd.read_csv("../import_data/swan_users.csv")
        for index, row in df.iterrows():
            email = row['email']
            name = row['name']
            if isinstance(email, str):
                if "@" in email:
                    password = User.objects.make_random_password()
                    password = '******'
                    logger.info(f"email is {email} and password is {password}")
                    myuser = User.objects.filter(email=email).first()
                    if myuser is None:
                        myuser = User.objects.create(email=email)
                    myteam = Team.objects.filter(name="swanteam").first()
                    myuser.team = myteam
                    myuser.name = name
                    myuser.formio_usergroup = "swan"
                    myuser.user_role = 'usergroupadmin'
                    myuser.set_password(password)
                    myuser.save()

    if args['test']:
        objs = Entity.objects.filter(extra_fields__volunteer="Navmee")
        for obj in objs:
            logger.info(obj.id)
        exit(0)
        facility_json = '{"_id":"5e8a81632776ff4e9ea73357","type":"resource","tags":["common"],"owner":"5e8a3d7bf1f3d54924170187","components":[{"autofocus":false,"input":true,"tableView":true,"inputType":"radio","label":"Is this facility functional?","key":"facilityFunctional","values":[{"value":"yes","label":"Yes","shortcut":"Y"},{"value":"no","label":"No","shortcut":"N"}],"defaultValue":"","protected":false,"fieldSet":false,"persistent":true,"hidden":false,"clearOnHide":true,"validate":{"required":false,"custom":"","customPrivate":false},"type":"radio","labelPosition":"top","optionsLabelPosition":"right","tags":[],"conditional":{"show":"","when":null,"eq":""},"properties":{},"lockKey":true},{"autofocus":false,"input":true,"label":"Submit","tableView":false,"key":"submit","size":"md","leftIcon":"","rightIcon":"","block":false,"action":"submit","disableOnInvalid":false,"theme":"primary","type":"button"}],"display":"form","submissionAccess":[{"roles":["5e8a3d73f1f3d5492417017a"],"type":"create_own"},{"roles":["5e8a3d73f1f3d5492417017a"],"type":"read_own"},{"roles":["5e8a3d73f1f3d5492417017a"],"type":"update_own"},{"roles":["5e8a3d73f1f3d5492417017a"],"type":"delete_own"}],"title":"Help facilities","name":"helpFacilities","path":"map/facility","access":[{"roles":["5e8a3d73f1f3d54924170179","5e8a3d73f1f3d5492417017a","5e8a3d73f1f3d5492417017b"],"type":"read_all"}],"created":"2020-04-06T01:09:55.726Z","modified":"2020-04-06T01:10:33.874Z","machineName":"helpFacilities"}'
        logger.info("Testing")
        objs = Entity.objects.all()
        for obj in objs:
            record_type = obj.record_type
            if record_type == "facility":
                obj.feedback_form_json = facility_json
                obj.save()
        exit(0)
    if args['import']:
        data_dir = "../import_data"
        failed_index_array = []
        filename = args['filename']
        filename = "import_workers.csv"
        filepath = f"{data_dir}/{filename}"
        df = pd.read_csv(filepath)
        logger.info(df.columns)
        record_type = "helpseekers"
        for index, row in df.iterrows():
            name = row['name']
            contact_numbers = row['contact_numbers']
            volunteer = row['volunteer']
            extra_fields = {}
            if isinstance(volunteer, str):
                extra_fields["volunteer"] = volunteer
            address = row['address']
            how_many = row['how_many']
            keywords = f"{name},{contact_numbers}"
            obj = Entity.objects.filter(title=name,
                                        record_type=record_type).first()
            if obj is None:
                obj = Entity.objects.create(title=name,
                                            record_type=record_type)
            obj.contact_numbers = contact_numbers
            obj.address = address
            obj.keywords = keywords
            obj.extra_fields = extra_fields
            obj.save()

        exit(0)
        filename = "volunteer.csv"
        filepath = f"{data_dir}/{filename}"
        df = pd.read_csv(filepath)
        logger.info(df.head())
        record_type = "volunteer"
        icon_url = "./assets/blue-dot.png"
        record_subtype = "orgization NGO"
        for index, row in df.iterrows():
            create_record = False
            place_status = row.get("places_status", None)
            if place_status == "OK":
                create_record = True
            name = row.get("Name", None)
            description = row.get("Work Willing To Do", "")
            latitude = row.get("lat", None)
            longitude = row.get("lng", None)
            information_source = row.get("Source of Information", None)
            contact_numbers = row.get("Phone", None)
            remarks = row.get("Comments", None)
            try:
                obj = Entity.objects.filter(name=name,
                                            record_type=record_type).first()
                if obj is None:
                    obj = Entity.objects.create(name=name,
                                                description=description,
                                                record_type=record_type)
                obj.latitude = latitude
                obj.longitude = longitude
                obj.description = description
                obj.record_subtype = record_subtype
                obj.icon_url = icon_url
                obj.remarks = remarks
                obj.information_source = information_source
                obj.contact_numbers = contact_numbers
                obj.save()
            except:
                failed_index_array.append(index)
        logger.info(f"failed index is {failed_index_array}")

        record_type = "facility"
        filename = "indira.csv"
        filepath = f"{data_dir}/{filename}"
        df = pd.read_csv(filepath)
        icon_url = "http://maps.google.com/mapfiles/ms/icons/blue-dot.png"

        record_subtype = "Indira Food Canteen"
        for index, row in df.iterrows():
            create_record = False
            place_status = row.get("places_status", None)
            if place_status == "OK":
                create_record = True
            name = row.get("Ward No and Loc", None)
            latitude = row.get("lat", None)
            longitude = row.get("lng", None)
            zone = row.get("Zone", "")
            constituency = row.get("Constituency", "")
            address = row.get("address_to_use", "")
            description = f"Zone:{zone} constituency:{constituency} ward:{name} addresss:{address}"
            try:
                obj = Entity.objects.filter(name=name,
                                            record_type=record_type).first()
                if obj is None:
                    obj = Entity.objects.create(name=name,
                                                description=description,
                                                record_type=record_type)
                obj.latitude = latitude
                obj.longitude = longitude
                obj.description = description
                obj.record_subtype = record_subtype
                obj.icon_url = icon_url
                obj.save()
            except:
                failed_index_array.append(index)
        logger.info(f"failed index is {failed_index_array}")

        exit(0)
        record_subtype = "Delhi Govt Food Facility"
        description = "Delhi Government food facility"
        icon_url = "http://maps.google.com/mapfiles/ms/icons/blue-dot.png"
        for index, row in df.iterrows():
            create_record = False
            name = row.get("Description", None)
            latitude = row.get("Where is it located.lat", None)
            longitude = row.get("Where is it located.lon", None)
            information_source = row.get("source", None)
            logger.info(name)
            logger.info(latitude)
            logger.info(longitude)
            logger.info(information_source)
            if (isinstance(latitude, float) and isinstance(longitude, float)):
                create_record = True
            try:
                obj = Entity.objects.filter(name=name,
                                            record_type=record_type).first()
                if obj is None:
                    obj = Entity.objects.create(name=name,
                                                description=description,
                                                record_type=record_type)
                obj.latitude = latitude
                obj.longitude = longitude
                obj.information_source = information_source
                obj.record_subtype = record_subtype
                obj.icon_url = icon_url
                obj.save()
            except:
                failed_index_array.append(index)
        logger.info(f"failed index is {failed_index_array}")

    logger.info("...END PROCESSING")
Beispiel #9
0
def main():
    """Main Module of this program"""
    args = args_fetch()
    logger = logger_fetch(args.get('log_level'))

    if args['importSwanUsers']:
        logger.info("Importing swan users")
        df = pd.read_csv("../import_data/swan_users_22may20.csv")
        swanteam = Team.objects.filter(name="swanteam").first()
        myorg = Organization.objects.filter(name="swan").first()
        for index, row in df.iterrows():
            email = row['email']
            name = row['name']
            team = str(row['team']).lstrip().rstrip()
            if ((team != '') and (team != "nan")):
                myteam = Team.objects.filter(organization=myorg,
                                             name=team).first()
                if myteam is None:
                    myteam = Team.objects.create(organization=myorg, name=team)
            else:
                myteam = swanteam
            if isinstance(email, str):
                if "@" in email:
                    password = User.objects.make_random_password()
                    password = '******'
                    logger.info(
                        f"{index} email is {email} and password is {password}")
                    myuser = User.objects.filter(email=email).first()
                    if myuser is None:
                        myuser = User.objects.create(email=email, name=name)
                    myuser.team = myteam
                    myuser.name = name
                    myuser.formio_usergroup = "swan"
                    myuser.user_role = 'groupadmin'
                    myuser.organization = myorg
                    myuser.set_password(password)
                    myuser.is_locked = False
                    myuser.login_attempt_count = 0
                    myuser.save()

    if args['connectUsersEntity']:
        objs = Entity.objects.filter(record_type="helpseekers")
        myTeam = Team.objects.filter(id=10).first()
        for obj in objs:
            obj.assigned_to_group = myTeam
            logger.info(obj.id)
            obj.save()
        exit(0)
        for obj in objs:
            extra_fields = obj.extra_fields
            try:
                wasan_user_email = obj.extra_fields['common']['user_email']
            except:
                wasan_user_id = None
            myUser = User.objects.filter(region=wasan_user_email).first()
            if myUser is not None:
                obj.assigned_to_user = myUser
                obj.save()
    if args['importUsers']:
        logger.info("Importing Users")
        role_dict = {
            '1': 'usergroupadmin',
            '2': 'groupadmin',
            '3': 'volunteer',
            '4': 'volunteer',
            '5': 'volunteer',
            '6': 'usergroupadmin'
        }
        df = pd.read_csv("../import_data/wassan_users_2may.csv")
        for index, row in df.iterrows():
            # wasan_id = row['Id']
            name = row['name']
            email = row['email']
            if "@" not in email:
                email = f"{email}@abcd.com"
            password = row['password']
            user_role = row['role']
            wassan_username = row['username']
            group_name = row['group']
            phone = row['mobile']
            myTeam = Team.objects.filter(name=group_name).first()
            if myTeam is None:
                myTeam = Team.objects.create(name=group_name)
            myuser = User.objects.filter(email=email).first()
            if myuser is None:
                myuser = User.objects.create(email=email)
            myuser.group = myTeam
            myuser.name = name
            myuser.set_password(password)
            myuser.phone = phone
            myuser.wassan_username = wassan_username
            # myuser.region = wasan_id#Temporary
            # user_role = role_dict.get(str(role_id), "volunteer")
            myuser.user_role = user_role
            myuser.formio_usergroup = 'wassan'
            myuser.save()

    if args['importRequests']:
        with open('../import_data/support_requests_24may20.json', 'r') as f:
            records = json.load(f)

        for i, record in enumerate(records):
            logger.info(record)
            try:
                email = record['extra_fields']['email']
            except:
                email = None
            try:
                amount_needed = record['prefill_json']['data']['totalcost']
            except:
                amount_needed = '0'
            amount_needed = int(amount_needed.replace(",", ""))
            prefill_json = record['prefill_json']
            extra_fields = record['extra_fields']
            myuser = None
            if email is not None:
                myuser = User.objects.filter(email=email).first()

            if myuser is not None:
                myorg = myuser.organization
                logger.info(
                    f"{i}-{myuser.email}-{myorg.name}--{amount_needed}")
                title = f"{i}-request from {myorg.name}"
                obj = Request.objects.filter(title=title).first()
                if obj is None:
                    obj = Request.objects.create(title=title)
                obj.user = myuser
                obj.organization = myorg
                obj.amount_needed = amount_needed
                obj.amount_pending = amount_needed
                obj.amount_pledged = 0
                obj.save()
                try:
                    obj.prefill_json = prefill_json
                    obj.data_json = prefill_json['data']
                    obj.extra_fields = extra_fields
                    obj.save()
                except:
                    logger.info("count not prefill")
    if args['importEntities']:
        superadminuser = User.objects.filter(id=1).first()
        #with open('../import_data/2020-05-22_SWAN_Data.json', 'r') as f:
        #    #data = f.read().replace(': NaN,', ': "",').replace(': NaN',':""').replace(',NaN',',""')
        #    data = f.read().replace('NaN','""')
        #with open('/tmp/a.json', 'w') as f:
        #    f.write(data)
        #exit(0)
        with open('../import_data/swan_data_22may20.json', 'r') as f:
            records = json.load(f)
        backend_remarks = "Imported Swan Data on 22 May 2020"
        base_number = 200521 * 10000
        email_array = []
        #for i,record in enumerate(records):
        #    logger.info(i)
        #    try:
        #        email = record["extra_fields"]["common"]["user_email"]
        #    except:
        #        email = None
        #    if email is not None:
        #        if email not in email_array:
        #            email_array.append(email)
        #logger.info(email_array)
        #logger.info(len(email_array))
        #exit(0)
        for i, record in enumerate(records):
            #logger.info(i)
            try:
                email = record["extra_fields"]["common"]["user_email"]
                volunteer = User.objects.filter(email=email).first()
            except:
                volunteer = None
            creator = superadminuser
            if volunteer is not None:
                creator = volunteer

            sr_no = base_number + i
            obj_id = create_entity(logger,
                                   record,
                                   creator,
                                   wasan_id=sr_no,
                                   wasan_org_id=500,
                                   usergroup='swan',
                                   backend_remarks=backend_remarks,
                                   assigned_to_user=volunteer)
            logger.info(f"{i}-{obj_id} creator-{creator} assigned-{volunteer}")
        exit(0)
        usergroup = "wassan"
        objs = Entity.objects.filter(record_type="helpseekers")
        for obj in objs:
            obj.status = STATUS_DICT.get(obj.status, None)
            obj.save()

    if args['populateDistricts']:
        objs = Entity.objects.filter(record_type="helpseekers").order_by("-id")
        #    objs = Entity.objects.filter(id = 74533).order_by("-id")
        for obj in objs:
            try:
                district = obj.prefill_json['data']['contactForm']['data'][
                    'district']
            except:
                district = None
            logger.info(f"{obj.id} - {district}")
            if district is not None:
                obj.district = district
                obj.save()
    if args['importOrgs']:
        df = pd.read_csv("../import_data/requests_24may20.csv")
        df = df.fillna("")
        password = '******'
        for index, row in df.iterrows():
            logger.info(index)
            org = row.get('org', None)
            phone = row.get('phone', None)
            try:
                phone = str(int(phone))
            except:
                phone = ''
            email = row.get('email', None)
            name = row.get('name', None)
            if name is None:
                name = org
            logger.info(f"{index}-{org}-{name}-{email}-{phone}")
            if org is not None:
                myorg = Organization.objects.filter(name=org).first()
                if myorg is None:
                    myorg = Organization.objects.create(name=org)
                myorg.contact_phone = phone
                myorg.contact_name = name
                phone_array = [phone]
                data_json = {
                    "contactName": name,
                    "organization": org,
                    "mobile": phone_array
                }

                myorg.data_json = data_json
                myorg.save()
                myTeam = Team.objects.filter(name=org).first()
                if myTeam is None:
                    myTeam = Team.objects.create(name=org, organization=myorg)
                myTeam.organization = myorg
                myTeam.save()
            if email is not None:
                myUser = User.objects.filter(email=email).first()
                if myUser is None:
                    myUser = User.objects.create(email=email, name=name)
                myUser.name = name
                myUser.organization = myorg
                myUser.team = myTeam
                myUser.phone = phone
                myUser.user_role = "groupadmin"
                myUser.set_password(password)
                myUser.save()
    if args['test']:
        objs = Entity.objects.filter(information_source__icontains='APPI')
        i = 0
        for obj in objs:
            i = i + 1
            logger.info(f"{i}-{obj.record_type}-{obj.information_source}")
        exit(0)
        df = pd.read_csv("/tmp/requests.csv")
        logger.info(df.columns)
        password = "******"
        for index, row in df.iterrows():
            logger.info(index)
            org = row.get('org', None)
            phone = row.get('phone', None)
            email = row.get('email', None)
            total = row.get('total', None)
            remarks = row.get('remarks', None)
            logger.info(remarks)
            if org is not None:
                myorg = Organization.objects.filter(name=org).first()
                if myorg is None:
                    myorg = Organization.objects.create(name=org)
                myorg.contact_phone = phone
                myorg.save()
            if email is not None:
                myUser = User.objects.filter(email=email).first()
                if myUser is None:
                    myUser = User.objects.create(email=email, name=org)
                myUser.organization = myorg
                myUser.user_role = "volunteer"
                myUser.set_password(password)
                myUser.save()
                logger.info(f"Created user {myUser.id}")
            try:
                total = int(total)
            except:
                total = ''
            if isinstance(total, int):
                req_title = f"request from {org}-{index}"
                my_req = Request.objects.filter(title=req_title).first()
                if my_req is None:
                    my_req = Request.objects.create(title=req_title)
                my_req.amount_needed = total
                my_req.organization = myorg
                my_req.user = myUser
                my_req.amount_pending = total
                my_req.amount_pledged = 0
                my_req.notes = remarks
                my_req.save()
        exit(0)
        users = User.objects.filter(formio_usergroup="swan")
        for user in users:
            logger.info(user.id)
        exit(0)
        states = Entity.objects.all().values('state').annotate(c=Count('id'))
        for state in states:
            logger.info(state)
        exit(0)
        org = Organization.objects.filter(id=1).first()
        objs = Team.objects.all()
        for obj in objs:
            obj.organization = org
            obj.save()
        exit(0)
        objs = User.objects.all()
        for obj in objs:
            obj.formio_usergroup = "wassan"
            logger.info(obj.id)
            obj.save()
        objs = Entity.objects.filter(record_type="helpseekers")
        for obj in objs:
            logger.info(obj.id)
            #logger.info(obj.prefill_json)
            obj.what_help = help_sought(obj.prefill_json)
            obj.status = get_status(obj.prefill_json)
            obj.remarks = get_remarks(obj.prefill_json)
            logger.info(obj.what_help)
            obj.save()
        exit(0)
        objs = User.objects.all()
        for obj in objs:
            obj.save()
        exit(0)
        objs = Entity.objects.filter(formio_usergroup="wassan")
        for obj in objs:
            try:
                status = obj.extra_fields["common"]["status"]
            except:
                status = None
            try:
                urgency = obj.extra_fields["needs"]["urgency"]
            except:
                urgency = None
            obj.status = slugify(status)
            obj.urgency = urgency
            logger.info(obj.urgency)
            obj.save()

        exit(0)
        csv_array = []
    if args['db2csv']:
        logger.info("test")
        dbhost = "localhost"
        dbuser = "******"
        dbpasswd = "vivek123"
        #db = MySQLdb.connect(host=dbhost, user=dbuser, passwd=dbpasswd, charset='utf8')
        db = dbInitialize(
            db='wasan', charset="utf8"
        )  # The rest is updated automatically in the function
        cur = db.cursor()
        db.autocommit(True)
        query = "SET NAMES utf8"
        cur.execute(query)
        query = "use wasan"
        cur.execute(query)
        query = "show tables"
        cur.execute(query)
        results = cur.fetchall()
        table_names = []
        for row in results:
            table_names.append(row[0])
        logger.info(table_names)
        # table_names = ['blocks', 'districts', 'migrants', 'migrantshistory',   'roles', 'states', 'users']
        # table_names = ["roles"]
        for table_name in table_names:
            csv_array = []
            logger.info(table_name)
            query = f"SHOW COLUMNS from {table_name};"
            cur.execute(query)
            results = cur.fetchall()
            column_headers = []
            for row in results:
                column_headers.append(row[0])
            logger.info(column_headers)
            query = f"select * from {table_name};"
            cur.execute(query)
            results = cur.fetchall()
            for row in results:
                a = []
                for item in row:
                    item = str(item).replace(",", "")
                    a.append(item)
                csv_array.append(a)
            df = pd.DataFrame(csv_array, columns=column_headers)
            df.to_csv(f"/tmp/wasan/{table_name}.csv")
    if args['importRegions']:
        logger.info("importing regions")
        df = pd.read_csv("../import_data/states.csv")
        for index, row in df.iterrows():
            name = row['state']
            myreg = Region.objects.filter(name=name).first()
            if myreg is None:
                Region.objects.create(name=name)
    logger.info("...END PROCESSING")
def main():
    """Main Module of this program"""
    args = args_fetch()
    logger = logger_fetch(args.get('log_level'))
    if args['fixfilepath']:
        location_type = args['location_type']
        logger.info("fixing broken file paths")
        objs = Location.objects.all()
        objs = Location.objects.filter(location_type=location_type)
        j = len(objs)
        for obj in objs:
            logger.info(f"processing j - {j}")
            j = j - 1
            base_file_path = obj.parent_location.s3_filepath
            s3_filepath = f"{base_file_path[:-14]}/{obj.slug}/DATA/reports/"
            obj.s3_filepath = s3_filepath
            obj.save()

    if args['crawl']:
        scheme = 'nrega'
        location_type = args['location_type']
        logger.info(f"Crawling data for {location_type}")
        copy_from_parent = [
            "state_code", "state_name", "district_code", "district_name",
            "block_code", "block_name", "panchayat_code", "panchayat_name",
            "crawl_ip", "state_short_code"
        ]
        if location_type == 'state':
            parent_location_type = 'country'
            code_param = 'state_code'
            name_param = 'state_name'
        elif location_type == 'district':
            parent_location_type = 'state'
            code_param = 'district_code'
            name_param = 'district_name'
        elif location_type == 'block':
            parent_location_type = 'district'
            code_param = 'Block_Code'
            name_param = 'block_name'
        elif location_type == 'panchayat':
            parent_location_type = 'block'
            code_param = 'Panchayat_Code'
            name_param = 'Panchayat_name'
        queryset = Location.objects.filter(location_type=parent_location_type)
        queryset = Location.objects.filter(location_type=parent_location_type,
                                           id__gt=1395)

        queryset = Location.objects.filter(location_type=parent_location_type,
                                           id=500)
        #queryset = Location.objects.filter(location_type=parent_location_type)[:1]
        for parent_location in queryset:
            urls_to_process = []
            base_url = parent_location.nic_url
            parent_file_path = parent_location.s3_filepath
            parent_display_name = parent_location.display_name
            logger.info(f"parent file path is {parent_file_path}")
            base_filepath = parent_file_path.rstrip('/DATA/reports/')
            logger.info(f"base_filepath is {base_filepath}")
            myhtml = None
            logger.info(f"Current Processing {base_url}")
            if 'http://nrega.nic.in' in base_url:
                base_url = base_url.replace("http", "https")
            res = requests.get(base_url)
            if res.status_code == 200:
                myhtml = res.content
            if myhtml is not None:
                mysoup = BeautifulSoup(myhtml, "lxml")
                links = mysoup.findAll("a")
                for link in links:
                    href = link['href']
                    if code_param in href:
                        urls_to_process.append(href)

            for href in urls_to_process:
                parsed = urlparse.urlparse(href)
                crawl_ip = parsed.netloc
                params = urlparse.parse_qs(parsed.query)
                #   setattr(my_location,  key.lower(), value)
                code = params[code_param][0]
                name = params[name_param][0]
                logger.info(f"{name}-{code}")
                if is_english(name):
                    name_not_english = False
                    english_name = name
                else:
                    name_not_english = True
                    #this part of script needs correction, currently it defaults to hindi
                    english_name = ms_transliterate_word(logger, name)
                my_location = Location.objects.filter(code=code,
                                                      scheme=scheme).first()
                if my_location is None:
                    my_location = Location.objects.create(code=code,
                                                          name=name,
                                                          scheme=scheme)
                for item in copy_from_parent:
                    value = getattr(parent_location, item)
                    setattr(my_location, item, value)
                setattr(my_location, code_param.lower(), code)
                setattr(my_location, name_param.lower(), name)
                my_location.parent_location = parent_location
                if location_type == "state":
                    logger.info(f"href is {href}")
                    my_location.nic_url = href + "&lflag=eng"
                    my_location.display_name = name.title()
                    my_location.crawl_ip = crawl_ip
                    my_location.state_short_code = STATE_SHORT_CODE_DICT.get(
                        code, "")
                else:
                    my_location.nic_url = urljoin(base_url, href)
                    my_location.display_name = f"{parent_display_name}-{english_name.title()}"
                my_location.location_type = location_type
                my_location.name_not_english = name_not_english
                my_location.english_name = english_name
                slug = slugify(english_name)
                my_location.slug = slug
                if my_location.state_code in ['02', '36']:
                    my_location.is_nic = False
                else:
                    my_location.is_nic = True
                filepath = f"{base_filepath}/{slug}/DATA/reports/"
                logger.info(f"file paht is {filepath}")
                my_location.s3_filepath = filepath
                my_location.save()

    logger.info("...END PROCESSING")
Beispiel #11
0
def main():
    """Main Module of this program"""
    args = args_fetch()
    logger = logger_fetch(args.get('log_level'))
    if args['test']:
        df = pd.read_csv("/tmp/facility.csv")
        for index, row in df.iterrows():
            name = row.get("name", "")
            description = row.get("description", "")
            latitude = row.get("latitude", None)
            longitude = row.get("longitude", None)
            information_source = row.get("information_source", None)
            entity = Entity.objects.filter(name=name,
                                           description=description).first()
            if entity is None:
                entity = Entity.objects.create(name=name,
                                               description=description)
            entity.record_type = "facility"
            entity.record_subtype = "food"
            entity.latitude = latitude
            entity.longitude = longitude
            entity.information_source = information_source
            entity.save()
        exit(0)
        csv_array = []
        column_headers = [
            "name", "descript", "latitude", "longitude", "information_source"
        ]
        objs = Entity.objects.filter(record_type="facility")
        for obj in objs:
            a = [
                obj.name, obj.description, obj.latitude, obj.longitude,
                obj.information_source
            ]
            csv_array.append(a)
        df = pd.DataFrame(csv_array, columns=column_headers)
        df.to_csv("/tmp/facility.csv")
        exit(0)
        objs = Covid.objects.all()
        for obj in objs:
            obj.delete()
        exit(0)
        facility_json = '{"_id":"5e8a81632776ff4e9ea73357","type":"resource","tags":["common"],"owner":"5e8a3d7bf1f3d54924170187","components":[{"autofocus":false,"input":true,"tableView":true,"inputType":"radio","label":"Is this facility functional?","key":"facilityFunctional","values":[{"value":"yes","label":"Yes","shortcut":"Y"},{"value":"no","label":"No","shortcut":"N"}],"defaultValue":"","protected":false,"fieldSet":false,"persistent":true,"hidden":false,"clearOnHide":true,"validate":{"required":false,"custom":"","customPrivate":false},"type":"radio","labelPosition":"top","optionsLabelPosition":"right","tags":[],"conditional":{"show":"","when":null,"eq":""},"properties":{},"lockKey":true},{"autofocus":false,"input":true,"label":"Submit","tableView":false,"key":"submit","size":"md","leftIcon":"","rightIcon":"","block":false,"action":"submit","disableOnInvalid":false,"theme":"primary","type":"button"}],"display":"form","submissionAccess":[{"roles":["5e8a3d73f1f3d5492417017a"],"type":"create_own"},{"roles":["5e8a3d73f1f3d5492417017a"],"type":"read_own"},{"roles":["5e8a3d73f1f3d5492417017a"],"type":"update_own"},{"roles":["5e8a3d73f1f3d5492417017a"],"type":"delete_own"}],"title":"Help facilities","name":"helpFacilities","path":"map/facility","access":[{"roles":["5e8a3d73f1f3d54924170179","5e8a3d73f1f3d5492417017a","5e8a3d73f1f3d5492417017b"],"type":"read_all"}],"created":"2020-04-06T01:09:55.726Z","modified":"2020-04-06T01:10:33.874Z","machineName":"helpFacilities"}'
        logger.info("Testing")
        objs = Entity.objects.all()
        for obj in objs:
            record_type = obj.record_type
            if record_type == "facility":
                obj.feedback_form_json = facility_json
                obj.save()
        exit(0)
    if args['import']:
        data_dir = "../import_data"
        failed_index_array = []
        filename = args['filename']
        filename = "volunteer.csv"
        filepath = f"{data_dir}/{filename}"
        df = pd.read_csv(filepath)
        logger.info(df.head())
        record_type = "volunteer"
        icon_url = "./assets/blue-dot.png"
        record_subtype = "orgization NGO"
        for index, row in df.iterrows():
            create_record = False
            place_status = row.get("places_status", None)
            if place_status == "OK":
                create_record = True
            name = row.get("Name", None)
            description = row.get("Work Willing To Do", "")
            latitude = row.get("lat", None)
            longitude = row.get("lng", None)
            information_source = row.get("Source of Information", None)
            contact_numbers = row.get("Phone", None)
            remarks = row.get("Comments", None)
            try:
                obj = Entity.objects.filter(name=name,
                                            record_type=record_type).first()
                if obj is None:
                    obj = Entity.objects.create(name=name,
                                                description=description,
                                                record_type=record_type)
                obj.latitude = latitude
                obj.longitude = longitude
                obj.description = description
                obj.record_subtype = record_subtype
                obj.icon_url = icon_url
                obj.remarks = remarks
                obj.information_source = information_source
                obj.contact_numbers = contact_numbers
                obj.save()
            except:
                failed_index_array.append(index)
        logger.info(f"failed index is {failed_index_array}")

        record_type = "facility"
        filename = "indira.csv"
        filepath = f"{data_dir}/{filename}"
        df = pd.read_csv(filepath)
        icon_url = "http://maps.google.com/mapfiles/ms/icons/blue-dot.png"

        record_subtype = "Indira Food Canteen"
        for index, row in df.iterrows():
            create_record = False
            place_status = row.get("places_status", None)
            if place_status == "OK":
                create_record = True
            name = row.get("Ward No and Loc", None)
            latitude = row.get("lat", None)
            longitude = row.get("lng", None)
            zone = row.get("Zone", "")
            constituency = row.get("Constituency", "")
            address = row.get("address_to_use", "")
            description = f"Zone:{zone} constituency:{constituency} ward:{name} addresss:{address}"
            try:
                obj = Entity.objects.filter(name=name,
                                            record_type=record_type).first()
                if obj is None:
                    obj = Entity.objects.create(name=name,
                                                description=description,
                                                record_type=record_type)
                obj.latitude = latitude
                obj.longitude = longitude
                obj.description = description
                obj.record_subtype = record_subtype
                obj.icon_url = icon_url
                obj.save()
            except:
                failed_index_array.append(index)
        logger.info(f"failed index is {failed_index_array}")

        exit(0)
        record_subtype = "Delhi Govt Food Facility"
        description = "Delhi Government food facility"
        icon_url = "http://maps.google.com/mapfiles/ms/icons/blue-dot.png"
        for index, row in df.iterrows():
            create_record = False
            name = row.get("Description", None)
            latitude = row.get("Where is it located.lat", None)
            longitude = row.get("Where is it located.lon", None)
            information_source = row.get("source", None)
            logger.info(name)
            logger.info(latitude)
            logger.info(longitude)
            logger.info(information_source)
            if (isinstance(latitude, float) and isinstance(longitude, float)):
                create_record = True
            try:
                obj = Entity.objects.filter(name=name,
                                            record_type=record_type).first()
                if obj is None:
                    obj = Entity.objects.create(name=name,
                                                description=description,
                                                record_type=record_type)
                obj.latitude = latitude
                obj.longitude = longitude
                obj.information_source = information_source
                obj.record_subtype = record_subtype
                obj.icon_url = icon_url
                obj.save()
            except:
                failed_index_array.append(index)
        logger.info(f"failed index is {failed_index_array}")

    logger.info("...END PROCESSING")
def main():
    """Main Module of this program"""
    args = args_fetch()
    logger = logger_fetch(args.get('log_level'))
    if args['test']:
        logger.info("Testing phase")
        scheme = "rayatubarosa"
        vskp_code = '520'
        vskp_location = Location.objects.filter(scheme=scheme,
                                                code=vskp_code).first()
        logger.info(vskp_location.id)
        vskp_block_url = "https://libtech-india-data.s3.ap-south-1.amazonaws.com/data/locations/rayatu_barosa/vskp_blocks.csv"
        vskp_village_url = "https://libtech-india-data.s3.ap-south-1.amazonaws.com/data/locations/rayatu_barosa/vskp_villages.csv"
        village_df = pd.read_csv(vskp_village_url)
        logger.info(village_df.columns)
        total = len(village_df)
        for index, row in village_df.iterrows():
            block_code = str(int(row['mandal_code']))
            village_code = str(int(row['village_code']))
            logger.info(f"{index}-{total}-{block_code}-{village_code}")
            village_name = row['village_name_telugu']
            english_name = ms_transliterate_word(logger,
                                                 village_name,
                                                 lang_code='te',
                                                 script_code='Telu')
            parent_location = Location.objects.filter(scheme=scheme,
                                                      code=block_code).first()
            my_location = Location.objects.filter(scheme=scheme,
                                                  code=village_code).first()
            if my_location is None:
                my_location = Location.objects.create(scheme=scheme,
                                                      code=village_code,
                                                      name=village_name)
            my_location.parent_location = parent_location
            parent_display_name = parent_location.display_name
            my_location.location_type = "village"
            my_location.state_name = 'Andhra Pradesh'
            my_location.state_code = '28'
            my_location.district_code = parent_location.parent_location.code
            my_location.district_name = parent_location.parent_location.name
            my_location.block_code = parent_location.code
            my_location.block_name = parent_location.name
            my_location.name_not_english = True
            my_location.english_name = english_name
            my_location.display_name = f"{parent_display_name}-{english_name.title()}"
            slug = slugify(english_name)
            my_location.slug = slug
            my_location.save()

        block_df = pd.read_csv(vskp_block_url)
        logger.info(block_df.columns)
        for index, row in block_df.iterrows():
            block_code = str(int(row['mandal_code']))
            english_name = row['mandal_name_eng']
            block_name = row["block_name_telugu"]
            logger.info(block_code)
            my_location = Location.objects.filter(scheme=scheme,
                                                  code=block_code).first()
            if my_location is None:
                my_location = Location.objects.create(scheme=scheme,
                                                      code=block_code,
                                                      name=block_name)
            my_location.parent_location = vskp_location
            parent_display_name = vskp_location.display_name
            my_location.state_code = '28'
            my_location.state_name = 'Andhra Pradesh'
            my_location.district_code = vskp_location.code
            my_location.district_name = vskp_location.name
            my_location.location_type = "block"
            my_location.block_code = block_code
            my_location.block_name = block_name
            my_location.name_not_english = True
            my_location.english_name = english_name
            my_location.display_name = f"{parent_display_name}-{english_name.title()}"
            slug = slugify(english_name)
            my_location.slug = slug
            my_location.save()

    logger.info("...END PROCESSING")