def main():
	'''Just getting the top posts of r/funny'''
	print main.__doc__
	var = r.get_subreddit('funny').get_top(limit=2)
	for x in var:
		print('\n')
		pprintpp.pprint(str(x))
Example #2
0
def ip_whois(dicsubdominios):
    for (k, v) in dicsubdominios.items():
        hosts = IPWhois(v)  # .lookup_rws()
        results = hosts.lookup_whois()
        print('Host: ', v)
        pprintpp.pprint(results)
        print('\n\n')
Example #3
0
    def test_generate_simple_parler_dummies(self):
        generate_simple_parler_dummies()

        assert_pformat_equal(SimpleParlerModel.objects.all().count(), 5)

        with translation.override("en"):
            qs = SimpleParlerModel.objects.language(language_code="en").all()

            info = []
            for instance in qs:
                info.append((instance.slug, list(instance.get_available_languages())))

            pprint(info)

            # Every dummy entry should be translated in de and en:
            assert_pformat_equal(
                info,
                [
                    ("simpleparlermodel-en-1", ["de", "en"]),
                    ("simpleparlermodel-en-2", ["de", "en"]),
                    ("simpleparlermodel-en-3", ["de", "en"]),
                    ("simpleparlermodel-en-4", ["de", "en"]),
                    ("simpleparlermodel-en-5", ["de", "en"]),
                ],
            )
Example #4
0
    def find_winner(self):
        pprint(self.stats)
        while self.winner is None:
            self.do_a_round()
            pprint(self.stats)

        print('Winner: {}'.format(self.winner))
Example #5
0
def assert_filenames_and_content(*,
                                 path,
                                 reference,
                                 fromfile="current",
                                 tofile="reference",
                                 **pformat_kwargs):
    if not isinstance(path, Path):
        path = Path(path)

    assert_is_dir(path)

    current_data = []
    for item in sorted(path.iterdir()):
        with item.open("rb") as f:
            current_data.append((item.name, f.read()))

    if current_data != reference:
        print("\nCurrent filenames and content:")
        pprintpp.pprint(current_data, **pformat_kwargs)

    assert_pformat_equal(current_data,
                         reference,
                         fromfile=fromfile,
                         tofile=tofile,
                         **pformat_kwargs)
def event_notification(server, headers, tokens):
    event_endpoint = '/dna/intent/api/v1/events?tags=ASSURANCE'
    url = server + event_endpoint
    headers['X-Auth-Token'] = tokens
    resp = requests.get(url, headers=headers, verify=False)
    event_notice = resp.json()  #['response']
    pprint(event_notice)
Example #7
0
def get_site_topology(server,headers,tokens):
    all_devices = '/dna/intent/api/v1/topology/site-topology'
    url = server + all_devices
    headers['X-Auth-Token'] = tokens
    resp = requests.get(url, headers=headers, verify=False)
    topology_list = resp.json()['response']
    pprint(topology_list)
def main(tmc_api_key):

    tune_v2_session_authenticate = \
        TuneV2SessionAuthenticate(
            logger_level=logging.DEBUG
        )

    try:
        if tune_v2_session_authenticate.get_session_token(
                tmc_api_key=tmc_api_key, request_retry=None):
            session_token = tune_v2_session_authenticate.session_token
            print(session_token)

    except TuneRequestBaseError as tmc_req_ex:
        print_traceback(tmc_req_ex)
        pprint(tmc_req_ex.to_dict())
        print(str(tmc_req_ex))

    except TuneReportingError as tmc_rep_ex:
        pprint(tmc_rep_ex.to_dict())
        print(str(tmc_rep_ex))

    except Exception as ex:
        print_traceback(ex)
        print(get_exception_message(ex))
Example #9
0
    def get_ec2_instances(self):
        pprintpp.pprint(self.awsservices.aws_region_codes)
        sorted_regions = list(self.awsservices.aws_region_codes)
        sorted_regions.sort()
        for a_region in sorted_regions:
            if a_region in self.exclude_regions:
                continue
            print('Going in region: ' + a_region)
            try:
                self.client = boto3.client('ec2', region_name=a_region)
                ec2_instances = self.client.describe_instances()
                for an_ec2_instance in ec2_instances['Reservations'][0][
                        'Instances']:
                    print('\tInstanceId:{:<30}'.format(
                        an_ec2_instance['InstanceId']))
                    print('\tInstanceType:{:<50}'.format(
                        an_ec2_instance['InstanceType']))
                    print('\tKeyName:{:<30}'.format(
                        an_ec2_instance['KeyName']))
                    print('\tTags:{:<50}'.format(str(an_ec2_instance['Tags'])))
                    print('\tPublicIpAddress:{:<30}'.format(
                        an_ec2_instance['PublicIpAddress']))
                    print('\tPublicDnsName:{:<60}'.format(
                        an_ec2_instance['PublicDnsName']))
                    print('\tPrivateIpAddress:{:<30}'.format(
                        an_ec2_instance['PrivateIpAddress']))
                    # w1=an_ec2_instance['InstanceId'],
                    # w2=an_ec2_instance['InstanceType'],
                    # w3=an_ec2_instance['KeyName'],
                    # w4=an_ec2_instance['PublicIpAddress'],
                    # w5=an_ec2_instance['PublicDnsName'],
                    # w6=an_ec2_instance['PrivateIpAddress'],
                    # w7=str(an_ec2_instance['Tags'])
                    # print('{:<11} {:<13} {:<8} {:<15} {:<30} {:<15} {<30}'.format(c['InstanceId'],
                    #                                                        c['InstanceType'],
                    #                                                        c['KeyName']
                    #                                                        )
                    #       )
                    # print('{:<15} {:<30} {:<15} {<30}'.format(
                    #                                                       c['PublicIpAddress'],
                    #                                                       c['PublicDnsName'],
                    #                                                       c['PrivateIpAddress'],
                    #                                                       str(c['Tags'])
                    #                                                       )
                    #       )
                    #
                    # print('{<30}'.format(str(c['Tags'])))

                    # print('{:<11} {:<13} {:<8} {:<15} {:<30} {:<15} {<30}'.format(c['InstanceId'],
                    #                                                        c['InstanceType'],
                    #                                                        c['KeyName'],
                    #                                                        c['PublicIpAddress'],
                    #                                                        c['PublicDnsName'],
                    #                                                        c['PrivateIpAddress'],
                    #                                                        str(c['Tags'])
                    #                                                        )
                    #       )
                    #pprintpp.pprint(an_ec2_instance )
            except Exception:
                pass
Example #10
0
def test_put_dict_to_lambda(jar):
    data = {"list": [1, 23, 4, 5], "xyz": "xyz", "dt1": dt.now()}

    resp = jar.put(data)
    pprint(resp)

    assert resp == 200
Example #11
0
    def assert_restapi(cls, result, expect):
        """Compare content received from REST API.

        See the description for assert_storage method.

        Args:
            result (dict): Result JSON from REST API.
            expect (dict): Excepted JSON in REST API response.
        """

        if result is None and result is expect:
            assert 1
            return

        try:
            cls._schema.validate(result)
        except ValidationError as error:
            print('json validation error: {}'.format(error))
            assert 0
        except SchemaError as error:
            print('json scbhema error: {}'.format(error))
            assert 0

        result_dict = Content._get_result_restapi(result)
        expect_dict = Content._get_expect_restapi(expect)
        try:
            assert result_dict == expect_dict
        except AssertionError:
            print('result:')
            pprintpp.pprint(result_dict)
            print('expect:')
            pprintpp.pprint(expect_dict)
            raise AssertionError
Example #12
0
def get_sites(server,headers,tokens):
    all_devices = '/dna/intent/api/v1/site'
    url = server + all_devices
    headers['X-Auth-Token'] = tokens
    resp = requests.get(url, headers=headers, verify=False)
    site_list = resp.json()
    pprint(site_list)
Example #13
0
def cadets_list(result):
    print(
        "\n--------------------------------------\nBreakdown of Google Spreadsheet\n--------------------------------------"
    )
    fail_logins, pass_logins, closure_ids, cadet_fail_list, cadet_pass_list  = [], [], [], [], []
    values = result['values']
    for lists in values:
        logins_room = []
        if lists[1] == '':
            print(f"NO OCCUPANTS in Room: {lists[0]}")
            continue
        elif len(lists) == 5:
            logins_room = lists[1].split(', ')
            room = room_status.Room(lists[0], logins_room, lists[2], lists[3],
                                    lists[4], closure_ids)
            cadet_fail_list, cadet_pass_list = pass_fail_lists(
                room, logins_room, lists, cadet_fail_list, cadet_pass_list)
        else:
            print(f"Room EMPTY or Data incorrect for room: {lists[0]}")
    print(
        "------------------------------\n-cadets_lists- cadet_failed_groups:\n------------------------------"
    )
    pprintpp.pprint(cadet_fail_list)
    print("\n")
    print(
        "------------------------------\n-cadets_lists- cadet_passed_groups:\n------------------------------"
    )
    pprintpp.pprint(cadet_pass_list)
    return cadet_fail_list, cadet_pass_list
Example #14
0
def get_task_batches():

    # Build a map of task names to task instances
    name_to_instance = dict((t.name, t) for t in tasks)

    # Build a map of task names to dependency names
    name_to_deps = dict((t.name, set(t.dependencies)) for t in tasks)
    pprint(name_to_deps)

    # This is where we'll store the batches
    batches = []

    # While there are dependencies to solve...
    while name_to_deps:
        # Get all tasks with no dependencies
        ready = {name for name, deps in name_to_deps.items() if not deps}

        # If there aren't any, we have a loop in the graph
        if not ready:
            msg = "Circular dependencies found!\n"
            msg += format_dependencies(name_to_deps)
            raise ValueError(msg)

        # Remove them from the dependency graph
        for name in ready:
            del name_to_deps[name]
        for deps in name_to_deps.values():
            deps.difference_update(ready)

        # Add the batch to the list
        batches.append({name_to_instance[name] for name in ready})

    # Return the list of batches
    return batches
Example #15
0
def assert_pformat_equal(first, second, msg="", **pformat_kwargs):
    """ compare with pprintpp and icdiff output """
    if first != second:
        if isinstance(first, str):
            print(first)
        else:
            pprintpp.pprint(first, **pformat_kwargs)
        assert first == second, f"{msg}{create_icdiff(first=first, second=second, **pformat_kwargs)}"
Example #16
0
def exportResultsExcel(resultsFilePath, report, df):
    resultsFile = exportFilePrep(resultsFilePath, report, "xlsx")
    df.to_excel(resultsFile, encoding='utf-8', index=True, float_format='%.2f')

    if os.path.exists(resultsFile):
        pprint(f"Results Excel: {resultsFile}")

    return df, resultsFile
Example #17
0
def get_device(server, headers, tokens):
    all_devices = '/dna/intent/api/v1/network-device'
    url = server + all_devices
    headers['X-Auth-Token'] = tokens
    resp = requests.get(url, headers=headers, verify=False)
    device_list = resp.json()
    for value in device_list['response']:
        pprint(value)
Example #18
0
def exportResultsTEX(resultsFilePath, report, df):
    resultsFile = exportFilePrep(resultsFilePath, report, "tex")
    with open(resultsFile, 'w') as tf:
        tf.write(df.to_latex())

    if os.path.exists(resultsFile):
        pprint(f"TEX: {resultsFile}")

    return df, resultsFile
Example #19
0
    def debug(self):
        """Debug databse."""

        with closing(self._connection.cursor()) as cursor:
            cursor.execute('SELECT * FROM contents')
            rows = cursor.fetchall()

        import pprintpp
        pprintpp.pprint(rows)
	def extract(self):
		self.default_metadata()
		pprint(self.def_met)

		self.extract_default_metadata()
		self.extract_metadata_from_book()
		self._finishing_touches_()

		pprint(self.extracted_elements)
Example #21
0
    def get_cur_report(self):
        next_token = str(secrets.token_hex(8))
        report = self.client.describe_report_definitions(MaxResults=5,
                                                         NextToken=next_token)

        paginator = self.client.get_paginator('describe_report_definitions')
        print('next_token : ' + next_token)
        pprintpp.pprint(report)
        pprintpp.pprint(paginator.__dict__)
Example #22
0
def clusterStations(stationList):
    maxRange = 60 
    clusters = list()

    averageLat = sum(station.location.latitude for station in stationList)/len(stationList)
    averageLon = sum(station.location.longitude for station in stationList)/len(stationList)

    if 'GRAND\'PLACE' in stationList[0].name or 'NEYRPIC' in stationList[0].name or 'FLANDRIN - VALMY' in stationList[0].name:
        dd = defaultdict(list)
        for d in stationList:
            dd[(d.road)].append(d)
        result = list()
        result.append(stationList)
        return dd.values()

    # ??????????????
    elif len(stationList) > 2:
        for station in stationList:
            if haversine(station.location.longitude, station.location.latitude, averageLon, averageLat) > maxRange:
                break
        else:
            if len(set([station.road for station in stationList])) is 1:  # if every stations are on the same road
                result = list()
                result.append(stationList)
                return result

    bb = False
    # if "LA TRONCHE, CIMETIERE" in stationList[0].name and list(stationList[0].lines)[0].mode == "BUS":
    #     bb = True

    while stationList:
        cluster = list()
        station = stationWithTheNearestStation(stationList)
        stationList.remove(station)
        cluster.append(station)
        if bb:
            print("-- ", station)

        stationList.sort(key = lambda p: haversine(station.location.longitude, station.location.latitude, p.location.longitude, p.location.latitude))
        if bb: 
            pprint(stationList)

        for anotherStation in stationList:
            averageLoc = averageLocation(cluster)
            if bb:
                print(haversine(averageLoc["lon"], averageLoc["lat"], anotherStation.location.longitude, anotherStation.location.latitude), anotherStation)
                print((anotherStation.road == cluster[0].road or anotherStation.road == "" or cluster[0].road == ""), cluster[0].road, anotherStation.road)
            if (haversine(averageLoc["lon"], averageLoc["lat"], anotherStation.location.longitude, anotherStation.location.latitude) < maxRange and
                    (anotherStation.road == cluster[0].road or anotherStation.road == "" or cluster[0].road == "")) or haversine(averageLoc["lon"], averageLoc["lat"], anotherStation.location.longitude, anotherStation.location.latitude) < 15:
                cluster.append(anotherStation)

        stationList = [x for x in stationList if x not in cluster]
        clusters.append(cluster)
        if bb:
            print("Cluster :", cluster ,"\n\n")
    return clusters
def sub_notice(server, headers, tokens, payload):
    event_endpoint = '/dna/intent/api/v1/event/subscription'
    url = server + event_endpoint
    headers['X-Auth-Token'] = tokens
    resp = requests.post(url,
                         headers=headers,
                         data=json.dumps(payload),
                         verify=False)
    print(resp.status_code)
    pprint(resp.json())
Example #24
0
def test_merge():
    from masonjar.dictionary.merge import merge

    left = {
        'v1': 1,
        'v2': 2,
        'aset': {1, 2, 3},
        'adict': {
            's1': ['a', 'b', True],
            's2': [],
            'alevel': {
                'leaf': 100,
            }
        }
    }

    right = {
        'v1': 10,
        'another': 100,
        'aset': {3, 4},
        'adict': {
            's1': ['a', 'c'],
            's3': 1024,
            'alevel': {
                'leaf': {
                    'override': True,
                },
                'notleaf': 10.18,
            }
        }
    }

    expected = {
        'adict': {
            'alevel': {
                'leaf': {
                    'override': True
                },
                'notleaf': 10.18,
            },
            's1': ['a', 'b', True, 'a', 'c'],
            's2': [],
            's3': 1024,
        },
        'another': 100,
        'aset': {1, 2, 3, 4},
        'v1': 10,
        'v2': 2,
    }

    merged = merge(left, right)
    pprint(merged)

    ddiff = DeepDiff(merged, expected)
    assert not ddiff
Example #25
0
 def list_service(self, serv_code):
     for aKey in self.aws_services.keys():
         #print(str(type(aKey)) + aKey)
         pass
     pprintpp.pprint(self.aws_services.keys())
     search_serv_code = serv_code + "_region"
     if search_serv_code  in self.aws_services.keys():
         #print(search_serv_code)
         #pprintpp.pprint(self.aws_services[search_serv_code ])
         log_str = str([search_serv_code, str(self.aws_services[search_serv_code][0]), str(self.aws_services[search_serv_code][1])])
         self.log_me.info( log_str )
Example #26
0
def dump_html(to_dump):
	#os.chdir('tmp')
	#os.listdir(r'./')
	try:
		html_dump = open(r'./tmp/temp_html','wb')
	except:
		e = sys.exc_info()
		pprint(e)
	#os.chdir('..')

	pickle.dump( to_dump, html_dump, -1)
	html_dump.close()
Example #27
0
def exportResultsCSV(resultsFilePath, report, df):
    resultsFile = exportFilePrep(resultsFilePath, report, "csv")
    df.to_csv(resultsFile,
              sep=',',
              encoding='utf-8',
              index=True,
              float_format='%.2f')

    if os.path.exists(resultsFile):
        pprint(f"Results CSV: {resultsFile}")

    return df, resultsFile
Example #28
0
	def event_handler(self, event):
		pprint(event)
		if (event.type == pygame.MOUSEBUTTONDOWN):
			self.set_down()
			return False
		if (event.type == pygame.MOUSEBUTTONUP):
			self.release()
			self.swipe()
			return True
		if self.is_down:
			self.delta()
		return False
Example #29
0
def create_soup_from_html_dump():
	try:
		html_dump = open(r'./tmp/temp_html','rb')
	except:
		e = sys.exc_info()
		pprint(e)
		return

	html_soup = bs( pickle.load(html_dump), "lxml") #markup using lxml's html parser

	html_dump.close()
	return html_soup
def main():
    cn_continent = convert_country_2_code_to_continent('US')
    pprint(cn_continent)

    cn_continent = convert_country_2_code_to_continent('AU')
    pprint(cn_continent)
    cn_continent = convert_country_2_code_to_continent('NZ')
    pprint(cn_continent)
    cn_continent = convert_country_2_code_to_continent('JP')
    pprint(cn_continent)

    cn_name = convert_country_2_code_to_country_name('JP')
    pprint(cn_name)
Example #31
0
 def get_s3_buckets(self):
     if not self.client:
         self.logMe('Not connected to S3')
     else:
         # pd_list_of_dict = list()
         # pd_list_of_dict.append({'Bucket': None, 'Location': None, })
         owners_buckets = self.client.list_buckets()
         pprintpp.pprint(owners_buckets['Owner'])
         self.logMe.info(owners_buckets['Owner'])
         for indexid, a_bucket in enumerate(owners_buckets['Buckets']):
             self.logMe.info(str(a_bucket))
             pprintpp.pprint(a_bucket)
             input("Press Enter to continue...")
Example #32
0
    def get_tag_data(self, tag_pattern):
        """Query the soup for tag_pattern

        :param tag_pattern: find this tag in the soup
        :return: None
        """
        self.extracted_tags = self.soup.find_all(re.compile(tag_pattern))
        for a_req_tag in self.extracted_tags:
            self.logMe.info('a_req_tag: ' + a_req_tag)
            pprintpp.pprint(a_req_tag)
            print(a_req_tag.previous_sibling)
            print(a_req_tag.next_sibling)
        print('Ends')
def from_domain(domain):

    """This prints WHOIS info about a given domain"""

    info = whois.query(domain)
    menu_utils.header("Info retrieved")
    info = {"name": info.name,
            "name servers": info.name_servers,
            "registrar": info.registrar,
            "creation date": info.creation_date,
            "expiration date": info.expiration_date,
            "last updated": info.last_updated}
    pprintpp.pprint(info)
Example #34
0
 def make_settings(self):
     with self.settings_path.open('w') as settings:
         self.write_info(settings)
         print(file=settings)
         print(self.project.project_config['settings_prelude'], file=settings)
         print(file=settings)
         for key, value in self.config.items():
             if key == 'uwsgi':
                 continue
             if key.islower():
                 print('Invalid Django setting "{key}".'.format(key=key))
                 sys.exit(1)
             print(key, '=', end=' ', file=settings)
             pprintpp.pprint(value, indent=4, stream=settings)
Example #35
0
async def update_last_days(n):
    import json
    import settings
    from tools import date_to_timestamp

    redis = await aioredis.create_redis(settings.REDIS_HOST)

    pairs = []
    day_objects = last_days(n)
    for day in day_objects:
        pairs += [date_to_timestamp(day), json.dumps(get_nbrb_rates(day))]

    pprint(pairs)

    redis.hmset(settings.REDIS_KEY_DAILY_RATES, *pairs)
Example #36
0
 def render_pretty(self, do_print=True, *args, **kwargs):
     rendr = self.render(*args, **kwargs)
     r = pprint(rendr)
     if do_print:
         print r
     else:
         return r
Example #37
0
def auth_view(request):

	REDIRECT_URI = 'http://localhost:8000/oauth2/oauth2callback'


	FLOW = flow_from_clientsecrets(
		CLIENT_SECRETS,
		scope = 'https://www.googleapis.com/auth/calendar',
		redirect_uri=REDIRECT_URI
	)

	user = request.user

	storage = Storage(CredentialsModel, 'id', user, 'credential')
	credential = storage.get()

	if credential is None or credential.invalid is True:
		FLOW.params['state'] = xsrfutil.generate_token(
			settings.SECRET_KEY, user)
		authorize_url = FLOW.step1_get_authorize_url()
		f = FlowModel(id=user, flow=FLOW)
		f.save()
		return HttpResponseRedirect(authorize_url)
	else:
		http = httplib2.Http()
		http = credential.authorize(http)
		service = build('calendar', 'v3', http=http)
		
		now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time
		
		print('Get the last 150 events')
		
		eventsResult = service.events().list(
			calendarId='primary', timeMax=now, maxResults=150, singleEvents=False).execute()
		
		events = eventsResult.get('items', [])
		pprintpp.pprint(events)

		return render(request, 'oauth/index.html', {'events': events})
Example #38
0
    msg['From'] = sender_email
    msg['To'] = ','.join(receiver_emails)
    content = MIMEText(body, 'plain')
    msg.attach(content)

    f = file(attachment_filename)
    attachment = MIMEText(f.read())
    attachment.add_header('Content-Disposition', 'attachment', filename=attachment_filename)
    msg.attach(attachment)

    try:
        #print msg
        smtp_obj = smtplib.SMTP(smtpserver)
        smtp_obj.sendmail(sender_email, receiver_emails, msg.as_string())
        print 'Successfully sent email'
    except smtplib.SMTPException:
        print 'Error: unable to send email'


if __name__=='__main__':
    incidents = fetch_incidents()
    pprintpp.pprint(incidents)

    to_csv(incidents, INCIDENT_FILENAME)

    send_email(
        SENDER_NAME, SENDER_EMAIL, RECIPIENTS,
        'ushahidi incidents', 'Incidents file from ushahidi is attached :)',
        INCIDENT_FILENAME, SMTPSERVER
    )
def requests_response_json(
    response,
    request_curl,
    request_label=None,
    raise_ex_if_not_json_response=True,
):
    """Get JSON from response from requests

    Args:
        response:
        request_curl:
        request_label:
        raise_ex_if_not_json_response:

    Returns:

    """
    json_response = None
    response_extra = {}
    if request_label:
        response_extra.update({'request_label': request_label})

    try:
        json_response = response.json()
        response_details_source = 'json'
        response_content_length = len(json_response)

        response_extra.update({
            'response_details_source': response_details_source,
            'response_content_length': response_content_length
        })
    except ValueError as json_decode_ex:
        log.error("Validate JSON Response: Failed: JSONDecodeError", extra=response_extra)

        data = dump.dump_all(response)
        pprint(data.decode('utf-8'))

        pprint(response.text)

        handle_json_decode_error(
            response_decode_ex=json_decode_ex,
            response=response,
            response_extra=response_extra,
            request_label=request_label,
            request_curl=request_curl
        )

    except Exception as ex:
        log.error("Validate JSON Response: Failed: Exception", extra=response_extra)

        pprint(response.text)

        handle_json_decode_error(
            response_decode_ex=ex,
            response=response,
            response_extra=response_extra,
            request_label=request_label,
            request_curl=request_curl
        )

    if json_response is None:
        if raise_ex_if_not_json_response:
            log.error("Validate JSON Response: Failed: None", extra=response_extra)

            raise RequestsFortifiedModuleError(
                error_message="Validate JSON Response: Failed: None",
                error_request_curl=request_curl,
                error_code=RequestsFortifiedErrorCodes.REQ_ERR_SOFTWARE
            )
        else:
            log.warning("Validate JSON Response: None", extra=response_extra)
    else:
        log.debug("Validate JSON Response: Valid", extra=response_extra)

    return json_response
def handle_json_decode_error(
    response_decode_ex,
    response,
    response_extra=None,
    request_label=None,
    request_curl=None,
):
    """Handle JSON Decode Error

    Args:
        response_decode_ex:
        response:
        response_extra:
        request_label:
        request_curl:

    Returns:

    """
    if response_extra is None:
        response_extra = {}

    if request_label:
        response_extra.update({'request_label': request_label})

    if hasattr(response, 'text') and \
            response.text and \
            len(response.text) > 0:
        response_details = response.text
        response_details_source = 'text'
        response_content_length = len(response_details)

        if response_details.startswith('<html'):
            response_details_source = 'html'
            soup_html = bs4.BeautifulSoup(response_details, "html.parser")
            # kill all script and style elements
            for script in soup_html(["script", "style"]):
                script.extract()  # rip it out
            text_html = soup_html.get_text()
            lines_html = [line for line in text_html.split('\n') if line.strip() != '']
            lines_html = [line.strip(' ') for line in lines_html]
            response_details = lines_html

        elif response_details.startswith('<?xml'):
            response_details_source = 'xml'
            response_details = json.dumps(xmltodict.parse(response_details))
        else:
            pprint(response_details)

        response_extra.update({
            'response_details': response_details,
            'response_details_source': response_details_source,
            'response_content_length': response_content_length,
            'error_exception': base_class_name(response_decode_ex),
            'error_details': get_exception_message(response_decode_ex)
        })

    log.error("Validate JSON Response: Failed: Invalid", extra=response_extra)

    raise RequestsFortifiedModuleError(
        error_message="Validate JSON Response: Failed: Invalid",
        errors=response_decode_ex,
        error_request_curl=request_curl,
        error_code=RequestsFortifiedErrorCodes.REQ_ERR_SOFTWARE
    )
Example #41
0
        else:
            inputs.append(inp)

    return '\n'.join(inputs)


if __name__ == "__main__":

    table = getstdin()

    lines = table.split('\n')
    matrix = [line.split('\t') for line in lines]
    # print(table)
    tmatrix = transpose(matrix)

    # now each element has the candidnate name at 0 and the tags in the rest

    d = dict()
    for line in tmatrix:
        name = line[0]
        d[name] = [e for e in line[1:] if e != '']

    out = sys.argv[1] if len(sys.argv) > 1 else 'terms.py'

    with open(out, 'w') as f:
        pprint(d, stream=f)



    
    # sun rise
    if hour ==0:
        counter = 0
        for item in soup.find_all('span', class_ = 'start'):
            counter += 1
            if counter == 1:
                stats['sun_rise'] = item.getText()

    # day length
    if hour == 0:
        counter = 0
        for item in soup.find_all('span', class_ = 'time'):
            counter += 1
            if counter == 1:
                stats['day_length'] = item.getText()

    # sunset
    if hour == 0:
        counter = 0
        for item in soup.find_all('span', class_ = 'finish'):
            counter += 1
            if counter == 1:
                stats['sunset'] = item.getText()

    print "sleep 1 second..."
    # sleep
    time.sleep(1)

print "\n"
pprint(stats)
Example #43
0
	def get(self, article):
		return pprintpp.pprint(article)
'''

mariadb_connection = mariadb.connect(host='localhost', user='******', password='******', database='seleri')
additional_connection = mariadb.connect(host='localhost', user='******', password='******', database='seleri')
cursor = mariadb_connection.cursor()
backup_cursor = additional_connection.cursor()

try:
	cursor.execute("show tables")

	print("The tables in this database are :")
	for (table_name,) in cursor:
		print("-> {}".format(table_name))
		backup_cursor.execute("select * from %s" % table_name)
		table_contents = backup_cursor.fetchall()
		pprint(table_contents)

	### query 1
	cursor.execute('select title, year from starred, movies where starred.MRN = movies.MRN and starred.A_ID in ( select A_ID from actors where actors.firstname="Shahrukh" and actors.lastname="Khan") ')

	print("\nResult of query no. 1:")
	for title, year in cursor:
		print("movie: {}, released in : {}".format(title, year))

	### query 2
	cursor.execute('select title, year from starred, movies where starred.MRN = movies.MRN and starred.A_ID in (select a.A_ID from actors as a where a.firstname="Shahrukh" and a.lastname="Khan") and movies.MRN in (select directed.MRN from directed, movies where directed.MRN = movies.MRN and directed.D_ID in (select directors.D_ID from directors where directors.firstname="Shahrukh" and directors.lastname="Khan")) ')

	print("\nResult of query no. 2:")
	for title, year in cursor:
		print("movie: {}, released in : {}".format(title, year))
Example #45
0
                        RACK_DB.file_accn(fake)
                    else:
                        return

    def find_random(n):
        sample = []
        for item in RACK_DB.mem_db:
            sample.append(item)
        mem_nf = 0
        nf = 0
        for item in random.sample(sample, n):
            if len(RACK_DB.find_accn(item)) > 0:
                print "found in memory"
            else:
                nf += 1
            if len(RACK_DB.find_accn_mem(item)) > 0:
                print "found with database"
            else:
                mem_nf += 1
        print "mem_nf: " + str(mem_nf)
        print "nf: " + str(nf)

    # RACK_DB._convert_to_sqlitedb()
    # populate(50)
    # find_random(10)
    RACK_DB._print_database()
    RACK_DB._db_info()
    print_prof_data()
    RACK_DB.clean()
    pprint(RACK_DB.mem_db)
Example #46
0
def process(ws, data):
    print(':: response:')
    pprint(data)
    print()
Example #47
0
 def test_quotes(self, name, input, expected):
     stream = p.TextIO()
     p.pprint(input, stream=stream)
     assert_equal(stream.getvalue().rstrip("\n"), expected)
request_download = RequestsFortifiedDownload(
    logger_level=logging.DEBUG,
    logger_output=LoggingOutput.FILE,
    logger_format=LoggingFormat.JSON,
)

request_download.logger.note(request_download.logger.getLevelName().lower())
request_download.logger.info("Start".upper())

result = \
    request_download.request(
        request_method='GET',
        request_url=URL_REST_POPULATIONS,
        request_params=None,
        request_retry=None,
        request_headers=HEADER_CONTENT_TYPE_APP_JSON,
        request_label="REST Population"
    )

request_download.logger.info("Completed".upper(), extra=vars(result))

pprint("Logger file path: %s" % request_download.logger.logger_path)

logger_fp = open(request_download.logger.logger_path, 'r')
pprint(logger_fp.readlines())

pprint(request_download.logger.getLevelName())

json_rest_countries = result.json()
pprint(json_rest_countries)
    def request_json_download(
        self,
        request_method,
        request_url,
        tmp_json_file_name,
        tmp_directory,
        request_params=None,
        request_data=None,
        request_retry=None,
        request_retry_func=None,
        request_retry_excps=None,
        request_retry_excps_func=None,
        request_headers=None,
        request_auth=None,
        request_label=None,
        build_request_curl=False,
        allow_redirects=True,
        verify=True,
        encoding_write=None,
        encoding_read=None,
    ):
        """Download and Read JSON file.

        Args:
            request_method: request_method for the new :class:`Request` object.
            request_url: URL for the new :class:`Request` object.
            tmp_json_file_name: Provide temporary name for downloaded CSV
            tmp_directory: Provide temporary directory to hold downloaded CSV
            request_params: (optional) Dictionary or bytes to be sent in the query
                string for the :class:`Request`.
            request_data: (optional) Dictionary, bytes, or file-like object to
                send in the body of the :class:`Request`.
            request_retry: (optional) Retry configuration.
            request_headers: (optional) Dictionary of HTTP Headers to
                send with the :class:`Request`.
            request_auth: (optional) Auth tuple to enable
                Basic/Digest/Custom HTTP Auth.
            build_request_curl: (optional) Build a copy-n-paste curl for command line
                that provides same request as this call.
            allow_redirects: (optional) Boolean. Set to True if
                POST/PUT/DELETE redirect following is allowed.
            verify: (optional) whether the SSL cert will be verified. A
                CA_BUNDLE path can also be provided. Defaults to ``True``.
            encoding_write:
            encoding_read:
            decode_unicode:

        Returns:
            Generator containing JSON data by rows in JSON dictionary format.

        """
        _request_label = "Request Download JSON File"
        request_label = "{0}: {1}".format(request_label, _request_label)  if request_label is not None else _request_label

        log.info(
            "{0}: Start".format(request_label),
            extra={
                'request_url': request_url,
                'encoding_write': encoding_write,
                'encoding_read': encoding_read,
            }
        )

        timer_start = dt.datetime.now()

        _attempts = 0
        _tries = 60
        _delay = 10

        while _tries:
            _attempts += 1

            log.debug(
                "{0}: Download".format(request_label),
                extra={
                    'attempts': _attempts,
                    'request_url': request_url,
                }
            )

            response = self.requests_client.request(
                request_method=request_method,
                request_url=request_url,
                request_params=request_params,
                request_data=request_data,
                request_retry=request_retry,
                request_retry_func=request_retry_func,
                request_retry_excps=request_retry_excps,
                request_retry_excps_func=request_retry_excps_func,
                request_headers=request_headers,
                request_auth=request_auth,
                build_request_curl=build_request_curl,
                allow_redirects=allow_redirects,
                verify=verify,
                stream=True,
                request_label=request_label
            )

            if response is None:
                log.error(
                    "{0}: No response".format(request_label),
                    extra={
                        'request_url': request_url,
                    }
                )

                raise RequestsFortifiedModuleError(
                    error_message="{0}: No response".format(request_label),
                    error_code=RequestsFortifiedErrorCodes.REQ_ERR_REQUEST
                )

            http_status_code = response.status_code

            timer_end = dt.datetime.now()
            timer_delta = timer_end - timer_start
            response_time_secs = timer_delta.seconds
            response_headers = None

            if hasattr(response, 'headers'):
                response_headers = \
                    json.loads(
                        json.dumps(
                            dict(response.headers)
                        )
                    )

            log.debug(
                "{0}: Response Status".format(request_label),
                extra={
                    'http_status_code': http_status_code,
                    'response_time_secs': response_time_secs,
                    'response_url': response.url,
                    'response_headers': safe_dict(response_headers),
                }
            )

            if not os.path.exists(tmp_directory):
                os.mkdir(tmp_directory)

            tmp_json_file_path = "{0}/{1}".format(tmp_directory, tmp_json_file_name)

            if os.path.exists(tmp_json_file_path):
                log.debug(
                    "{0}: Removing".format(request_label),
                    extra={'file_path': tmp_json_file_path},
                )
                os.remove(tmp_json_file_path)

            mode_write = 'wb' if encoding_write is None else 'w'

            log.debug(
                "{0}: Finished".format(request_label),
                extra={
                    'file_path': tmp_json_file_path,
                    'mode_write': mode_write,
                    'encoding_write': encoding_write,
                }
            )

            log.debug(
                "{0}: Usage".format(request_label),
                extra=env_usage(tmp_directory)
            )

            chunk_total_sum = 0

            with open(file=tmp_json_file_path, mode=mode_write, encoding=encoding_write) as json_raw_file_w:
                log.debug(
                    "{0}: Response Raw: Started".format(request_label),
                    extra={
                        'file_path': tmp_json_file_path,
                    }
                )

                _tries -= 1
                error_exception = None
                error_details = None
                chunk_size = 8192
                try:
                    raw_response = response.raw
                    while True:
                        chunk = raw_response.read(chunk_size, decode_content=True)
                        if not chunk:
                            break

                        chunk_total_sum += chunk_size

                        json_raw_file_w.write(chunk)
                        json_raw_file_w.flush()
                        os.fsync(json_raw_file_w.fileno())

                    log.debug(
                        "{0}: By Chunk: Completed".format(request_label),
                        extra={
                            'file_path': tmp_json_file_path,
                        }
                    )

                    break

                except requests.exceptions.ChunkedEncodingError as chunked_encoding_ex:
                    error_exception = base_class_name(chunked_encoding_ex)
                    error_details = get_exception_message(chunked_encoding_ex)

                    log.warning(
                        "{0}: Error: {1}".format(request_label, error_exception),
                        extra={
                            'error_details': error_details,
                            'chunk_total_sum': chunk_total_sum,
                        }
                    )

                    if not _tries:
                        log.error(
                            "{0}: Exhausted Retries: Error: {1}".format(request_label, error_exception),
                        )
                        raise

                except http_client.IncompleteRead as incomplete_read_ex:
                    error_exception = base_class_name(incomplete_read_ex)
                    error_details = get_exception_message(incomplete_read_ex)

                    log.warning(
                        "{0}: IncompleteRead".format(request_label),
                        extra={
                            'error_exception': error_exception,
                            'error_details': error_details,
                            'chunk_total_sum': chunk_total_sum,
                        }
                    )

                    if not _tries:
                        log.error(
                            "{0}: Exhausted Retries: Error: {1}".format(request_label, error_exception),
                        )
                        raise

                except requests.exceptions.RequestException as request_ex:
                    log.error(
                        "{0}: Request Exception".format(request_label),
                        extra={
                            'error_exception': base_class_name(request_ex),
                            'error_details': get_exception_message(request_ex),
                            'chunk_total_sum': chunk_total_sum,
                        }
                    )
                    raise

                except Exception as ex:
                    log.error(
                        "{0}: Unexpected Exception".format(request_label),
                        extra={
                            'error_exception': base_class_name(ex),
                            'error_details': get_exception_message(ex),
                            'chunk_total_sum': chunk_total_sum,
                        }
                    )
                    raise

                if not _tries:
                    log.error(
                        "{0}: Exhausted Retries".format(request_label),
                        extra={
                            'tries': _tries,
                            'request_url': request_url,
                        }
                    )

                    raise RequestsFortifiedModuleError(
                        error_message="{0}: Exhausted Retries: {1}".format(request_label, request_url),
                        error_request_curl=self.built_request_curl,
                        error_code=RequestsFortifiedErrorCodes.REQ_ERR_RETRY_EXHAUSTED
                    )

                log.info(
                    "{0}: Performing Retry".format(request_label),
                    extra={
                        'tries': _tries,
                        'delay': _delay,
                        'request_url': request_url,
                    }
                )

                time.sleep(_delay)

        tmp_json_file_size = os.path.getsize(tmp_json_file_path)
        bom_enc, bom_len, bom_header = detect_bom(tmp_json_file_path)

        log.info(
            "{0}: By Chunk: Completed: Details".format(request_label),
            extra={
                'file_path': tmp_json_file_path,
                'file_size': bytes_to_human(tmp_json_file_size),
                'chunk_total_sum': chunk_total_sum,
                'bom_encoding': bom_enc,
            }
        )

        if bom_enc == 'gzip':
            tmp_json_gz_file_path = "%s.gz" % tmp_json_file_path

            os.rename(src=tmp_json_file_path, dst=tmp_json_gz_file_path)

            with open(file=tmp_json_file_path, mode=mode_write, encoding=encoding_write) as json_file_w:
                log.debug(
                    "{0}: GZip: Started".format(request_label),
                    extra={
                        'file_path': tmp_json_file_path,
                    }
                )

                with gzip.open(tmp_json_gz_file_path, 'r') as gzip_file_r:
                    json_file_w.write(gzip_file_r.read())

        response_extra = {
            'file_path': tmp_json_file_path,
            'file_size': bytes_to_human(tmp_json_file_size),
        }

        log.info(
            "{0}: Read Downloaded".format(request_label),
            extra=response_extra
        )

        json_download = None
        with open(tmp_json_file_path, mode='r') as json_file_r:
            json_file_content = json_file_r.read()
            try:
                json_download = json.loads(json_file_content)
            except ValueError as json_decode_ex:
                pprint(json_file_content)

                response_extra.update({
                    'json_file_content': json_file_content,
                    'json_file_content_len': len(json_file_content)
                })

                handle_json_decode_error(
                    response_decode_ex=json_decode_ex,
                    response=response,
                    response_extra=response_extra,
                    request_label=request_label,
                    request_curl=self.built_request_curl
                )

            except Exception as ex:
                pprint(json_file_content)

                response_extra.update({
                    'json_file_content': json_file_content,
                    'json_file_content_len': len(json_file_content)
                })

                log.error(
                    "{0}: Failed: Exception".format(request_label),
                    extra=response_extra,
                )

                handle_json_decode_error(
                    response_decode_ex=ex,
                    response=response,
                    response_extra=response_extra,
                    request_label=request_label,
                    request_curl=self.built_request_curl
                )

        response_extra.update({'json_file_content_len': len(json_download)})

        log.info(
            "{0}: Finished".format(request_label),
            extra=response_extra
        )

        return json_download
Example #50
0
 def test_back_and_forth(self, expected):
     input = eval(expected)
     stream = p.TextIO()
     p.pprint(input, stream=stream)
     assert_equal(stream.getvalue().rstrip("\n"), expected)
Example #51
0
 def test_expected_input(self, expected, input):
     stream = p.TextIO()
     p.pprint(input, stream=stream)
     assert_equal(stream.getvalue().rstrip("\n"), expected)
Example #52
0
 def test_unicode(self, name, input, expected, encoding="utf-8"):
     stream = p.TextIO(encoding=encoding)
     p.pprint(input, stream=stream)
     assert_equal(stream.getvalue().rstrip("\n"), expected)
Example #53
0
NA = object()


def execute(filename, globals=None, locals=None):
    with open(filename) as f:
        script = f.read()

    stmts = list(ast.iter_child_nodes(ast.parse(script)))
    expr = None

    if isinstance(stmts[-1], ast.Expr):
        stmts, expr = stmts[:-1], stmts[-1]

    if stmts:
        exec(compile(ast.Module(body=stmts), filename=filename, mode="exec"), globals, locals)

    if expr:
        value = eval(compile(ast.Expression(body=expr.value), filename=filename, mode="eval"), globals, locals)

        if not hasattr(expr.value, 'func') or not hasattr(expr.value.func, 'id') or expr.value.func.id != 'print':
            return value

    return NA


value = execute(sys.argv[1])

if value is not NA:
    pprint.pprint(value)