def brute_force(user, keywords, forms, user_agent): passwords = generate_all_passwords(keywords) results = {} for form in forms: print('\nAttempting to brute-force: ' + form + '\n') get_header = {'User-Agent': user_agent} html_doc = get_request(form, get_header) parser = HTMLParser(html_doc) done = False for password in passwords: if not done: login = parser.create_login_string(user, password) post = { 'User-Agent': user_agent, 'Content-Type': 'application/x-www-form-urlencoded', 'Content-Length': str(len(login)) } response = post_request(form, post, login) if get_status(response) >= 500: print( '\nHold on, too many failed attempts! Waiting for the server to accept more login requests...\n' ) # Continually retry logging in if there is a server error (too many failed attempts) while get_status(response) >= 500: response = post_request(form, post, login) combination = 'User: '******'\nPassword: '******'Attempting to login...\n' + combination) if get_status(response) == 302: print('Login Succeeded!\n') done = True results[form] = combination else: print('Login Failed...\n') if not done: print('Ran out of passwords! Bruteforce failed!') results[form] = None sleep( 5 ) # Temporary pause between forms to see end result of the current form # Print bruteforce results print('Bruteforcer Results') print('-' * 50) for form, combination in results.items(): print('Form: ' + form) if combination is None: print('Bruteforce Failed') else: print(combination) print()
def work(): global wait_for try: ids, messages = fetch_new_emails() except OSError: log("Failed to connect with e-mail server to parse messages.") return except NoMessages: debug("No e-mails to parse.") return except: log("Unexpected Error.") return data = parse_messages(messages) request_data = to_json({"token": AUTH_TOKEN, "data": data}) debug("JSON: {}".format(request_data)) try: debug("Connecting to Server to register parsed events.") response = post_request(API_ENDPOINT, headers={'Content-Type': 'application/json'}, data=request_data) debug("Events registered.") # Server returns wait_for until next run (in minutes) received_wait_for = int(response.text) if received_wait_for == -1: log("Invalid token.") if not DEBUG: mark_as_unread(ids) return elif received_wait_for == -2: log("Database error.") if not DEBUG: mark_as_unread(ids) return debug("Received {} (minutes) from the server, " "to wait until next execution.".format( received_wait_for)) if 0 < received_wait_for <= MAX_WAITING_PERIOD: wait_for = received_wait_for else: debug("Ignoring {} as it's not between 1 and {}".format( received_wait_for, MAX_WAITING_PERIOD)) except RequestException: log("Failed to connect to Server") if not DEBUG: mark_as_unread(ids) except ValueError: log("Received {} from the Server, failed to convert to int " "to wait for (in minutes)".format(response.text))
def _post(self, path, data={}, files=None, key=None, admin=False): if not key: key = self.api_key if not admin else self.master_api_key data = data.copy() data['key'] = key return post_request("%s/%s" % (self.api_url, path), data=data, files=files)
def send_request(url: str, payload: Dict[str, Any]) -> Response: """A Celery task that sends a POST request to the given URL. Args: url (str): URL to send request to. payload (Dict[str, Any]): Request payload. Must be JSON-serializable. Returns: Response: Received response. """ return post_request(url, json=payload)
def _do_request(self, data): resp = post_request(self.api, data) # DEBUG # print(resp.text) if resp.status_code != 200: raise APIError('bad API response') json = resp.json() if not json['success']: raise APIError('bad API response, invalid json') return json
def __init__(self): # Log in to the devicemanager API api = baseurl + '/auth/login' resp = post_request(api, data=credentials) if resp.status_code != 200: raise ConnectionError('could not connect to intelligent city server') json = resp.json() if json['success'] != True: raise ConnectionError('could not connect to intelligent city server') self.sessionid = json['sessionid'] self.api = baseurl + '/devicemanager/python_backend_request'
def get_wca_access_token_from_auth_code(auth_code): """ Calls to the WCA with the auth code, and cubers.io client ID and secret, to retrieve the access token for the authenticated user. """ payload = { 'grant_type': 'authorization_code', 'code': auth_code, 'client_id': __CLIENT_ID, 'client_secret': __CLIENT_SECRET, 'redirect_uri': __REDIRECT, } response = post_request(__ACCESS_TOKEN_URL, data=payload).json() error = response.get('error', None) if error: raise WCAAuthException(error) return response['access_token']
def fetch_floorplan_details(id): body = FLOORPLAN_ID_REQUEST body['variables']['amliId'] = id response = post_request(GRAPHQL_ENDPOINT, json=body, headers={'Content-Type':'application/json'}) if response.status_code != 200: raise Exception('Failed to grab floorplan details') """ Return details of floorplan { "data": { "main_image": { "url": "https://images.prismic.io/amli-website/b3758197-4bf2-4e38-85ab-f11da2041306_austin_aldrich_A3+update.jpg?auto=compress,format&rect=0,0,650,490&w=650&h=490", ... }, ... }, "id": "XMwgnSwAADgA00ur", ... } """ return response.json()['data']['floorplan']['cms']
def post_to_mattermost(attachment, channel=CHANNEL, username=USER_NAME, icon=USER_ICON): # This function submits the new hook to mattermost data = { 'attachments': [attachment], 'username': USER_NAME, 'icon_url': icon, 'channel': channel } # Post the webhook response = post_request(MM_URL + "hooks/" + HOOK_ID, data=json_dumps(data).encode('utf-8'), headers={'Content-Type': 'application/json'}) if response.status_code != 200: err = 'Request to mattermost returned an error %s, the response is:\n%s' raise ValueError(err % (response.status_code, response.text)) print("---hook response---") print(response.text) return "Posted successfully."
def fetch_all_floorplans(move_in_date): body = APARTMENT_LIST_REQUEST body['variables']['moveInDate'] = move_in_date response = post_request(GRAPHQL_ENDPOINT, json=body, headers={'Content-Type':'application/json'}) if response.status_code != 200: raise Exception('Failed to grab floorplans') # Return a list of floorplan data """ [ { "floorplanName": "A3", "bathroomMin": 1, "bedroomMin": 1, "priceMax": 1896, "sqftMin": 742, "availableUnitCount": 1, "floorplanId": "1752" }, ... ] """ return response.json()['data']['propertyFloorplansSummary']
def fetch_apartments(floorplan, move_in_date): body = FLOORPLAN_DETAILS_REQUEST body['variables']['amliFloorplanId'] = floorplan.number_id body['variables']['floorplanId'] = floorplan.weird_id body['variables']['moveInDate'] = move_in_date response = post_request(GRAPHQL_ENDPOINT, json=body, headers={'Content-Type':'application/json'}) if response.status_code != 200: raise Exception('Failed to grab apartments') """ Return a list of apartment data [ { "floor": 1, "pets": "Cats", "unitNumber": "150", "rpAvailableDate": "2020-02-29", "rent": 1896 }, ... ] """ return response.json()['data']['units']
def _post( self, path, data={}, files=None, key=None, admin=False): if not key: key = self.api_key if not admin else self.master_api_key data = data.copy() data['key'] = key return post_request( "%s/%s" % (self.api_url, path), data=data, files=files )
def _post( self, path, data={}, files=None, key=None): if not key: key = self.api_key data = data.copy() data['key'] = key return post_request( "%s/%s" % (self.api_url, path), data=data, files=files )
def cleanup(self): # attempt to log out api = baseurl + '/auth/logout' data = {'username': credentials['username'], 'sessionid': self.sessionid} post_request(api, data)
def update_realestate_table(): """ Updates the realestate market values table """ # TODO if we hit an out of memory error around here, we should fix how we read in the data # TODO should prob check to make sure we dont ge r logging.warn("UPDATING REALESTATE DB") max_req = 1001 where_stmt = "1=1" if_exists = 'replace' # replace current database on the first iteration # do while loop while True: # get all the records data = { 'where': where_stmt, # get a specific set of rows 'outFields': ", ".join(RE_DATABASE_COLS), # get specific columns 'returnGeometry': True, # do get the extra geometry for geolocation stuff 'outSR': 4286, # get coordinate values 'f': 'json', # TODO: update to GEOjson format? 'units': 'esriSRUnit_StatuteMile', # 1 mile unit 'resultRecordCount': max_req, # the number of records to request at this time 'orderByFields': 'OBJECTID' # the field to be sorted by } # make the request. Because we do a post request, this should return the entire database in one go db_request = None try: db_request = post_request(url=RE_DATABASE_URL, data=data) except Exception as e: logging.error( 'Fatal error when attempting to retreive realestate database\n' + traceback.format_exception_only(*sys.exc_info()[0:2])[0]) return # double check the return values if 'json' in db_request.headers.get('Content-Type'): results = db_request.json() else: logging.error( 'Realestate database request content is not in JSON format.') return # ensure we received data and have not received all of the data yet if results.get('error', False) or len( results['features']) == 0 or not results.get( 'exceededTransferLimit', True): break else: temp_table = [x['attributes'] for x in results['features']] last_id = temp_table[-1]['OBJECTID'] logging.warn("RECEIVED REALESTATE ID's: " + str(temp_table[0]['OBJECTID']) + " -> " + str(last_id)) # append the geometry into the the array as its own dict item coords_table = [ x['geometry']['rings'] for x in results['features'] ] i = 0 for group in coords_table: groupavg = [0, 0] for pair in group: for row in pair: groupavg[0] += row[0] groupavg[1] += row[1] temp_table[i]['longitude'] = groupavg[0] / len(pair) temp_table[i]['latitude'] = groupavg[1] / len(pair) i += 1 temp_database = pd.DataFrame.from_records(temp_table, index="OBJECTID", columns=RE_DATABASE_COLS) # clean table values temp_database['ST_NAME'] = temp_database['ST_NAME'].str.strip() temp_database['BLDG_NO'] = temp_database['BLDG_NO'].replace( np.nan, 0) # create a column for the estimated realestate value temp_database['est_value'] = temp_database.apply(lambda row: max( row.TAXBASE, row.ARTAXBAS, row.FULLCASH, row.SALEPRIC, sum([ x if x is not None else 0 for x in [row.BFCVLAND, row.BFCVIMPR] ]), sum([ x if x is not None else 0 for x in [row.LANDEXMP, row.IMPREXMP] ]), sum([ x if x is not None else 0 for x in [row.CURRLAND, row.CURRIMPR] ]), sum([ x if x is not None else 0 for x in [row.EXMPLAND, row.EXMPIMPR] ])), axis=1) # remove the unnecessary cols temp_database = temp_database.drop([ "TAXBASE", "ARTAXBAS", "BFCVLAND", "BFCVIMPR", "LANDEXMP", "IMPREXMP", "CURRLAND", "CURRIMPR", "EXMPLAND", "EXMPIMPR", "FULLCASH", "SALEPRIC" ], axis=1) temp_database['YEAR_BUILD'] = temp_database['YEAR_BUILD'].replace( 0, np.nan) temp_database['SALEDATE'] = pd.to_datetime( temp_database['SALEDATE'], errors='coerce', infer_datetime_format=True, format="%m%d%Y") #temp_database['SALEDATE'] = temp_database['SALEDATE'].replace('NaT', np.nan) # rename column headers temp_database = temp_database.rename(columns={ 'PERMHOME': 'perm_home', 'SALEDATE': 'date_sold', 'YEAR_BUILD': 'year_built', 'OWNMDE': 'owner_mode', 'VACIND': 'vacant', 'STDIRPRE': 'addr_prefix', 'ST_NAME': 'addr_name', 'ST_TYPE': 'addr_suffix', 'BLDG_NO': 'addr_num' }, errors='raise') # add to sql database temp_database = temp_database.astype({"addr_num": int}) temp_database.to_sql(REALESTATE_TABLE_NAME, con=DATABASE, index=True, index_label='uid', if_exists=if_exists) # setup vars for next loop. # make all following writes append and not replace if_exists = 'append' # setup the next query statement where_stmt = "OBJECTID > {} AND OBJECTID < {}".format( last_id, last_id + max_req) logging.info([ " ".join([str(x['name']), str(x['type'])]) for x in inspect(DATABASE).get_columns(REALESTATE_TABLE_NAME) ]) else: logging.error('Missing realestate data')
def send(self, title: str, text: str, click_action: str) -> None: json = self.generate_json(title, text, click_action) post_request("https://fcm.googleapis.com/fcm/send", headers=self.HEADERS, json=json)