예제 #1
0
def create_record(author, location, date):
    record = Record(location=location)
    record.author = author
    record.channel = Channel.objects.get(name="Proyecto Pance")
    record.save()
    record.updateCreationDate(date)
    return record
예제 #2
0
 def test_get_initial_queryset(self):
     config = TestConfiguration()
     other_record = Record(job_id=9348275987)
     view = DTRecordsJson(kwargs={})
     records = view.get_initial_queryset()
     job_ids = set(map(lambda x: x.job.id, records))
     self.assertNotIn(0, job_ids)
예제 #3
0
    def test_report_success(self):
        record = Record(student_id=self.student_id)
        record.state = Record.LOCKED
        record.save()
        vote_token = AuthToken.generate(self.student_id, str(self.station.external_id), '70')
        vote_token.save()

        url = reverse('report')
        data = {
            'uid': self.student_id,
            'vote_token': vote_token.code,
            'token': self.token,
            'api_key': settings.API_KEY, 'version': settings.API_VERSION,
        }
        response = self.client.post(url, data)
        self.assertEqual(response.data, {'status': 'success'})
예제 #4
0
 def __init__(self):
     self.user = User.objects.create(username='******',
                                     password='******',
                                     is_superuser=True)
     self.org = Organization.objects.create(name="Test Organization")
     self.record_group = RecordGroup.objects.create(
         organization=self.org, name="Test Record Group")
     self.job = Job.objects.create(record_group=self.record_group,
                                   user=self.user,
                                   job_type="HarvestJob",
                                   job_details='{"test_key": "test value"}',
                                   name="Test Job")
     self.downstream_job = Job.objects.create(
         record_group=self.record_group,
         user=self.user,
         job_type="TransformJob",
         job_details='{"test_key": "test value"}',
         name="Test Transform Job")
     JobInput.objects.create(job=self.downstream_job, input_job=self.job)
     # TODO: the test framework should be clearing all the dbs, not just mysql
     old_records = Record.objects(job_id=self.job.id)
     for record in old_records:
         record.delete()
     self.record = Record.objects.create(job_id=self.job.id,
                                         record_id='testrecord',
                                         document=TEST_DOCUMENT)
     self.job.update_record_count()
예제 #5
0
    def test_complete_success(self):
        record = Record(student_id=self.student_id)
        record.state = Record.VOTING
        record.save()
        token = AuthToken.generate(self.student_id, str(self.station.external_id), '70')
        token.save()

        url = 'https://{0}{1}?callback={2}'.format(
            settings.CALLBACK_DOMAIN, reverse('callback'), token.confirm_code)
        response = self.client.get(url)
        record = Record.objects.get(student_id=self.student_id)
        self.assertEqual(record.state, Record.USED)
        self.assertEqual(response.status_code, status.HTTP_200_OK)
        self.assertEqual(response.data, {
            'status': 'success',
            'message': 'all correct',
        })
예제 #6
0
    def test_confirm_success(self):
        record = Record(student_id=self.student_id)
        record.state = Record.LOCKED
        record.save()
        vote_token = AuthToken.generate(self.student_id, str(self.station.external_id), '70')
        vote_token.save()

        url = reverse('confirm')
        data = {'uid': self.student_id,
                'vote_token': vote_token.code, 'token': self.token,
                'api_key': settings.API_KEY, 'version': settings.API_VERSION}
        response = self.client.post(url, data)
        callback = 'https://{0}{1}?callback={2}'.format(
            settings.CALLBACK_DOMAIN, reverse('callback'), vote_token.confirm_code)
        self.assertEqual(response.data, {
            'status': 'success',
            'ballot': self.authcode.code,
            'callback': callback,
        })
예제 #7
0
def populateWorldRecords(url, stats_type, countries_df):

    print_info(f"Fetching content for stats_type [{stats_type}]..")
    print_info(f"URL in use [{url}]")
    with requests.Session() as s:
        download = s.get(url)
    decoded_content = download.content.decode('utf-8')

    # local_file_name = f'datasets/{stats_type}.csv'
    # print_info(f"Writing dowloaded content to file[{local_file_name}]..")
    # with open(local_file_name, "w") as outfile:
    #     outfile.write(decoded_content)
    # print_info("Writing summary to local file..Done")

    rows_fetched = list(csv.reader(decoded_content.splitlines(),
                                   delimiter=','))
    print_info(f"Fetching content for stats_type [{stats_type}]..Done")

    # Handle header row
    header_row = rows_fetched.pop(0)  # Header is the first row.
    header_row.pop(0)  # Remove the value 'Province/State'
    header_row.pop(0)  # Remove the value 'Country/Region'
    header_row.pop(0)  # Remove the value 'Lat'
    header_row.pop(0)  # Remove the value 'Long'
    latest_stats_date = rectifyDateFormat(header_row[-1])
    # stats_dates_csv   =
    stats_dates_csv = rectifyDateFormat(dates_csv=(",".join(header_row)))

    print_info("Creating objects..")
    objects_list = []
    ignored_countries = [
        'Diamond Princess', 'West Bank and Gaza', 'Kosovo', 'MS Zaandam'
    ]
    for row in rows_fetched[:]:
        state_province = row.pop(0)
        country_region = row.pop(0)
        if (country_region in ignored_countries):
            country_alpha3 = '---'
        else:
            country_alpha3 = countries_df.loc[country_region, 'alpha3']
        latitude = row.pop(0)
        longitude = row.pop(0)
        stats_type = stats_type
        stats_value_csv = ",".join(row)
        latest_stats_value = row[-1] or 0
        # Create model Record instances
        obj = Record(
            state_province=state_province,
            country_region=country_region,
            country_alpha3=country_alpha3,
            latitude=latitude,
            longitude=longitude,
            stats_type=stats_type,
            latest_stats_date=latest_stats_date,
            latest_stats_value=latest_stats_value,
            stats_dates_csv=stats_dates_csv,
            stats_value_csv=stats_value_csv,
        )
        objects_list.append(obj)

    print_info("Creating objects..Done")
    print_info(f"Total objects created = {len(objects_list)}")

    print_info(f"Inserting records for stats_type[{stats_type}]..")
    Record.objects.bulk_create(objects_list)
    print_info(f"Inserting records for stats_type[{stats_type}]..Done")

    return decoded_content
예제 #8
0
def populate_records_india(
        url='https://api.rootnet.in/covid19-in/stats/daily'):

    print_info("Handling India records..")
    Record.objects.filter(country_region='India').delete()

    # Globals
    states_lat_long = {
        "India": {
            "lat": 20.5937,
            "long": 78.9629
        },
        "Andhra Pradesh": {
            "lat": 15.9129,
            "long": 79.7400
        },
        "Assam": {
            "lat": 26.244156,
            "long": 92.537842
        },
        "Bihar": {
            "lat": 25.0961,
            "long": 85.3131
        },
        "Chandigarh": {
            "lat": 30.7333,
            "long": 76.7794
        },
        "Chhattisgarh": {
            "lat": 21.295132,
            "long": 81.828232
        },
        "Delhi": {
            "lat": 28.7041,
            "long": 77.1025
        },
        "Gujarat": {
            "lat": 22.309425,
            "long": 72.136230
        },
        "Haryana": {
            "lat": 29.238478,
            "long": 76.431885
        },
        "Himachal Pradesh": {
            "lat": 32.084206,
            "long": 77.571167
        },
        "Jammu and Kashmir": {
            "lat": 33.7782,
            "long": 76.5762
        },
        "Karnataka": {
            "lat": 15.317277,
            "long": 75.713890
        },
        "Kerala": {
            "lat": 10.850516,
            "long": 76.271080
        },
        "Ladakh": {
            "lat": 34.152588,
            "long": 77.577049
        },
        "Madhya Pradesh": {
            "lat": 23.473324,
            "long": 77.947998
        },
        "Maharashtra": {
            "lat": 19.601194,
            "long": 75.552979
        },
        "Odisha": {
            "lat": 20.940920,
            "long": 84.803467
        },
        "Puducherry": {
            "lat": 11.9416,
            "long": 79.8083
        },
        "Punjab": {
            "lat": 31.1471,
            "long": 75.3412
        },
        "Rajasthan": {
            "lat": 27.391277,
            "long": 73.432617
        },
        "Tamil Nadu": {
            "lat": 11.127123,
            "long": 78.656891
        },
        "Telangana": {
            "lat": 17.123184,
            "long": 79.208824
        },
        "Telengana": {
            "lat": 17.123184,
            "long": 79.208824
        },
        "Tripura": {
            "lat": 23.745127,
            "long": 91.746826
        },
        "Uttar Pradesh": {
            "lat": 28.207609,
            "long": 79.826660
        },
        "Uttarakhand": {
            "lat": 30.0668,
            "long": 79.0193
        },
        "West Bengal": {
            "lat": 22.978624,
            "long": 87.747803
        }
    }

    state_wise_stats = {}
    # Fetch JSON data from url
    r = requests.get(url)
    r_json = r.json()
    for data in r_json['data']:
        date = data['day']  # 2020-03-10
        for regional in data['regional']:
            state = regional['loc']
            if (state in state_wise_stats.keys()):
                pass
            else:
                state_wise_stats[state] = {}
                state_wise_stats[state]['confirmed_csv'] = ''
                state_wise_stats[state]['recovered_csv'] = ''
                state_wise_stats[state]['deaths_csv'] = ''
                state_wise_stats[state]['dates_csv'] = ''

            # Handle the case where the state is not present in the states_lat_long dict
            if (state in states_lat_long.keys()):
                state_wise_stats[state]['lat'] = states_lat_long[state]['lat']
                state_wise_stats[state]['long'] = states_lat_long[state][
                    'long']
            else:
                state_wise_stats[state]['lat'] = states_lat_long['India'][
                    'lat']
                state_wise_stats[state]['long'] = states_lat_long['India'][
                    'long']

            state_wise_stats[state]['recovered_csv'] = state_wise_stats[state][
                'recovered_csv'] + str(regional['discharged']) + ","
            state_wise_stats[state]['confirmed_csv'] = state_wise_stats[state][
                'confirmed_csv'] + str(regional['confirmedCasesIndian'] +
                                       regional['confirmedCasesForeign']) + ","
            state_wise_stats[state]['deaths_csv'] = state_wise_stats[state][
                'deaths_csv'] + str(regional['deaths']) + ","
            state_wise_stats[state]['dates_csv'] = state_wise_stats[state][
                'dates_csv'] + str(date) + ","
            state_wise_stats[state]['confirmed_latest'] = regional[
                'confirmedCasesIndian'] + regional['confirmedCasesForeign']
            state_wise_stats[state]['recovered_latest'] = regional[
                'discharged']
            state_wise_stats[state]['deaths_latest'] = regional['deaths']
            state_wise_stats[state]['date_latest'] = str(date)
    # At this time, we have collected all the data into the state_wise_stats dict
    # Next step - Load data onto database using bulk insert
    # Bulk insert requires array of objects to be created
    objects_list = []
    for state in state_wise_stats:
        # For each state, we create 3 objects - 1.Confirmed 2.Recovered 3.Deaths
        obj = Record(
            state_province=state,
            country_region='India',
            country_alpha3='IND',
            latitude=state_wise_stats[state]['lat'],
            longitude=state_wise_stats[state]['long'],
            stats_type='confirmed',
            latest_stats_date=state_wise_stats[state]['date_latest'],
            latest_stats_value=state_wise_stats[state]['confirmed_latest'],
            stats_dates_csv=state_wise_stats[state]['dates_csv'].rstrip(','),
            stats_value_csv=state_wise_stats[state]['confirmed_csv'].rstrip(
                ','),
        )
        objects_list.append(obj)
        obj = Record(
            state_province=state,
            country_region='India',
            country_alpha3='IND',
            latitude=state_wise_stats[state]['lat'],
            longitude=state_wise_stats[state]['long'],
            stats_type='deaths',
            latest_stats_date=state_wise_stats[state]['date_latest'],
            latest_stats_value=state_wise_stats[state]['deaths_latest'],
            stats_dates_csv=state_wise_stats[state]['dates_csv'].rstrip(','),
            stats_value_csv=state_wise_stats[state]['deaths_csv'].rstrip(','),
        )
        objects_list.append(obj)
        obj = Record(
            latitude=state_wise_stats[state]['lat'],
            longitude=state_wise_stats[state]['long'],
            stats_type='recovered',
            state_province=state,
            country_region='India',
            country_alpha3='IND',
            stats_dates_csv=state_wise_stats[state]['dates_csv'].rstrip(','),
            stats_value_csv=state_wise_stats[state]['recovered_csv'].rstrip(
                ','),
            latest_stats_date=state_wise_stats[state]['date_latest'],
            latest_stats_value=state_wise_stats[state]['recovered_latest'])
        objects_list.append(obj)

    print_info("Inserting INDIA records..")
    Record.objects.bulk_create(objects_list)
    print_info("Inserting INDIA records..Done")
    print_info("Handling India records..Done")
예제 #9
0
def authenticate(request):
    # Check parameters
    internal_id = request.data['cid']
    raw_student_id = request.data['uid']
    station_id = request.station

    if settings.ENFORCE_CARD_VALIDATION:
        # Parse student ID
        if re.match(r'[A-Z]\d{2}[0-9A-Z]\d{6}', raw_student_id) and re.match(r'[0-9a-f]{8}', internal_id) and re.match(r'\d+', station_id):
            # Extract parameters
            student_id = raw_student_id[:-1]
            revision = int(raw_student_id[-1:])
            logger.info('Station %s request for card %s[%s]', station_id, student_id, revision)
        else:
            # Malformed card information
            logger.info('Station %s request for card %s (%s)', station_id, raw_student_id, internal_id)
            return error('card_invalid')

    else:
        # Do not reveal full internal ID as ACA requested
        logger.info('Station %s request for card (%s****)', station_id, internal_id[:4])

    # Call ACA API
    try:
        aca_info = service.to_student_id(internal_id)

    except URLError:
        logger.exception('Failed to connect to ACA server')
        return error('external_error', status.HTTP_502_BAD_GATEWAY)

    except service.ExternalError as e:
        if not settings.ENFORCE_CARD_VALIDATION:
            # We can only reveal full internal ID if it’s an invalid card
            logger.exception('Card rejected by ACA server (%s), reason %s', internal_id, e.reason)
        else:
            logger.exception('Card rejected by ACA server, reason %s', e.reason)

        # Tell clients the exact reason of error
        if e.reason == 'card_invalid' or e.reason == 'student_not_found':
            return error('card_invalid')
        elif e.reason == 'card_blacklisted':
            return error('card_suspicious')
        return error('external_error', status.HTTP_502_BAD_GATEWAY)

    else:
        if settings.ENFORCE_CARD_VALIDATION:
            student_id = aca_info.id
            revision = 0
            logger.info('User %s (%s) checked', student_id, aca_info.type)
        elif aca_info.id != student_id:
            logger.info('ID %s returned instead', aca_info.id)
            return error('card_suspicious')

    # Check vote record
    try:
        record = Record.objects.get(student_id=student_id)
        if settings.ENFORCE_CARD_VALIDATION and record.revision != revision:
            # ACA claim the card valid!
            logger.info('Expect revision %s, recorded %s', revision, record.revision)
            return error('card_suspicious')

        if record.state == Record.VOTING:
            # Automaticlly unlock stuck record
            record.state = Record.AVAILABLE
            record.save()
            logger.info('Reset %s state from VOTING', student_id)

        if record.state != Record.AVAILABLE:
            logger.error('Duplicate entry (%s)', student_id)
            return error('duplicate_entry')

    except Record.DoesNotExist:
        record = Record(student_id=student_id, revision=revision)

    # Build up kind identifier
    try:
        college = settings.COLLEGE_IDS[aca_info.college]
    except KeyError:
        # Use student ID as an alternative
        logger.warning('No matching college for ACA entry %s', aca_info.college)
        college = student_id[3]

        # In rare cases, we may encounter students without colleges
        if college not in settings.COLLEGE_NAMES:
            logger.warning('No matching college for ID %s', college)
            college = '0'

    # Determine graduate status
    type_code = student_id[0]
    kind = college
    try:
        override = OverrideEntry.objects.get(student_id=student_id)
        kind = override.kind
    except OverrideEntry.DoesNotExist:
        if type_code in settings.GRADUATE_CODES:
            if aca_info.department in settings.JOINT_DEPARTMENT_CODES:
                kind += 'B'
            else:
                kind += '1'
        elif type_code in settings.UNDERGRADUATE_CODES:
            if aca_info.department in settings.JOINT_DEPARTMENT_CODES:
                kind += 'A'
            else:
                kind += '0'

    # Check if student has eligible identity
    if kind not in settings.KINDS:
        return error('unqualified')

    # Generate record and token
    record.state = Record.LOCKED
    record.save()

    token = AuthToken.generate(student_id, station_id, kind)
    token.save()

    logger.info('Auth token issued: %s', token.code)
    return Response({'status': 'success', 'uid': student_id, 'type': settings.KINDS[kind], 'vote_token': token.code})
예제 #10
0
def authenticate(request):
    # Check parameters
    internal_id = request.data['cid']
    raw_student_id = request.data['uid']
    station_id = request.station

    if settings.ENFORCE_CARD_VALIDATION:
        # Parse student ID
        if re.match(r'[A-Z]\d{2}[0-9A-Z]\d{6}', raw_student_id) and re.match(r'[0-9a-f]{8}', internal_id) and re.match(r'\d+', station_id):
            # Extract parameters
            student_id = raw_student_id[:-1]
            revision = int(raw_student_id[-1:])
            logger.info('Station %s request for card %s[%s]', station_id, student_id, revision)
        else:
            # Malformed card information
            logger.info('Station %s request for card %s (%s)', station_id, raw_student_id, internal_id)
            return error('card_invalid')

    else:
        # Do not reveal full internal ID as ACA requested
        logger.info('Station %s request for card (%s****)', station_id, internal_id[:4])

    # Call ACA API
    try:
        aca_info = service.to_student_id(internal_id)

    except URLError:
        logger.exception('Failed to connect to ACA server')
        return error('external_error', status.HTTP_502_BAD_GATEWAY)

    except service.ExternalError as e:
        if not settings.ENFORCE_CARD_VALIDATION:
            # We can only reveal full internal ID if it’s an invalid card
            logger.exception('Card rejected by ACA server (%s), reason %s', internal_id, e.reason)
        else:
            logger.exception('Card rejected by ACA server, reason %s', e.reason)

        # Tell clients the exact reason of error
        if e.reason == 'card_invalid' or e.reason == 'student_not_found':
            return error('card_invalid')
        elif e.reason == 'card_blacklisted':
            return error('card_suspicious')
        return error('external_error', status.HTTP_502_BAD_GATEWAY)

    else:
        if not settings.ENFORCE_CARD_VALIDATION:
            student_id = aca_info.id
            revision = 0
            logger.info('User %s (%s) checked', student_id, aca_info.type)
        elif aca_info.id != student_id:
            logger.info('ID %s returned instead', aca_info.id)
            return error('card_suspicious')

    # Check vote record
    try:
        record = Record.objects.get(student_id=student_id)
        if settings.ENFORCE_CARD_VALIDATION and record.revision != revision:
            # ACA claim the card valid!
            logger.info('Expect revision %s, recorded %s', revision, record.revision)
            return error('card_suspicious')

        if record.state == Record.VOTING:
            # Automaticlly unlock stuck record
            record.state = Record.AVAILABLE
            record.save()
            logger.info('Reset %s state from VOTING', student_id)

        if record.state != Record.AVAILABLE:
            logger.error('Duplicate entry (%s)', student_id)
            return error('duplicate_entry')

    except Record.DoesNotExist:
        record = Record(student_id=student_id, revision=revision)

    # Determine graduate status
    kind = kind_classifier.get_kind(aca_info)

    if kind is None:
        return error('unqualified')

    # Generate record and token
    record.state = Record.LOCKED
    record.save()

    token = AuthToken.generate(student_id, station_id, kind)
    token.save()

    logger.info('Auth token issued: %s', token.code)
    return Response({'status': 'success', 'uid': student_id, 'type': aca_info.college, 
        'college': settings.DPTCODE_NAME[aca_info.department], 'vote_token': token.code})
예제 #11
0
파일: job.py 프로젝트: fruviad/combine
def job_details(request, org_id, record_group_id, job_id):
    LOGGER.debug('details for job id: %s', job_id)

    # get CombineJob
    cjob = CombineJob.get_combine_job(job_id)

    # update status
    cjob.job.update_status()

    # detailed record count
    record_count_details = cjob.job.get_detailed_job_record_count()

    # get job lineage
    job_lineage = cjob.job.get_lineage()

    # get dpla_bulk_data_match
    dpla_bulk_data_matches = cjob.job.get_dpla_bulk_data_matches()

    # check if limiting to one, pre-existing record
    get_q = request.GET.get('q', None)

    # job details and job type specific augment
    job_detail = cjob.job.job_details_dict

    # mapped field analysis, generate if not part of job_details
    if 'mapped_field_analysis' in job_detail.keys():
        field_counts = job_detail['mapped_field_analysis']
    else:
        if cjob.job.finished:
            field_counts = cjob.count_indexed_fields()
            cjob.job.update_job_details(
                {'mapped_field_analysis': field_counts}, save=True)
        else:
            LOGGER.debug('job not finished, not setting')
            field_counts = {}

    # TODO: What is this accomplishing?
    # OAI Harvest
    if isinstance(cjob, HarvestOAIJob):
        pass

    # Static Harvest
    elif isinstance(cjob, HarvestStaticXMLJob):
        pass

    # Transform
    elif isinstance(cjob, TransformJob):
        pass

    # Merge/Duplicate
    elif isinstance(cjob, MergeJob):
        pass

    # Analysis
    elif isinstance(cjob, AnalysisJob):
        pass

    # get published records, primarily for published sets
    pub_records = PublishedRecords()

    oai_sets = Record.objects(job_id=cjob.job.id).item_frequencies(field='oai_set')

    # get published subsets with PublishedRecords static method
    published_subsets = PublishedRecords.get_subsets()

    # loop through subsets and enrich
    for _ in published_subsets:

        # add counts
        counts = mc_handle.combine.misc.find_one(
            {'_id': 'published_field_counts_%s' % _['name']})

        # if counts not yet calculated, do now
        if counts is None:
            counts = PublishedRecords(
                subset=_['name']).count_indexed_fields()
        _['counts'] = counts

    # get field mappers
    field_mappers = FieldMapper.objects.all()

    # return
    return render(request, 'core/job_details.html', {
        'cjob': cjob,
        'record_group': cjob.job.record_group,
        'record_count_details': record_count_details,
        'field_counts': field_counts,
        'field_mappers': field_mappers,
        'xml2kvp_handle': xml2kvp.XML2kvp(),
        'job_lineage_json': json.dumps(job_lineage),
        'dpla_bulk_data_matches': dpla_bulk_data_matches,
        'q': get_q,
        'job_details': job_detail,
        'pr': pub_records,
        'published_subsets': published_subsets,
        'es_index_str': cjob.esi.es_index_str,
        'breadcrumbs': breadcrumb_parser(request),
        'oai_sets': dict(oai_sets)
    })
예제 #12
0
 def test_record_with_nonexistent_job(self):
     record = Record(job_id=2345789)
     self.assertEqual(record.job.name, 'No Job')
     self.assertEqual(record.job.record_group.name, 'No Record Group')