def call(method, api_or_module, url, body='', headers=None, params=None, query_string='', scheme='http', **kwargs): """Simulates a round-trip call against the given API / URL""" api = API(api_or_module).http.server() response = StartResponseMock() headers = {} if headers is None else headers if not isinstance(body, str) and 'json' in headers.get('content-type', 'application/json'): body = output_format.json(body) headers.setdefault('content-type', 'application/json') params = params if params else {} params.update(kwargs) if params: query_string = '{}{}{}'.format(query_string, '&' if query_string else '', urlencode(params, True)) result = api(create_environ(path=url, method=method, headers=headers, query_string=query_string, body=body, scheme=scheme), response) if result: try: response.data = result[0].decode('utf8') except TypeError: data = BytesIO() for chunk in result: data.write(chunk) data = data.getvalue() try: response.data = data.decode('utf8') except UnicodeDecodeError: # pragma: no cover response.data = data except (UnicodeDecodeError, AttributeError): response.data = result[0] response.content_type = response.headers_dict['content-type'] if 'application/json' in response.content_type: response.data = json.loads(response.data) return response
def load_can_database(file, contents=None): file = Path(file) dbc = None if file.suffix.lower() in ('.dbc', '.arxml'): if contents is None and file.exists(): contents = file.read_bytes() if contents: import_type = file.suffix.lower().strip('.') loads = dbc_load if import_type == 'dbc' else arxml_load contents = BytesIO(contents) try: dbc = loads( contents, import_type=import_type, key="db", ) except UnicodeDecodeError: encoding = detect(contents)["encoding"] contents = contents.decode(encoding) dbc = loads( contents, importType=import_type, import_type=import_type, key="db", encoding=encoding, ) return dbc
def descope_css(css_content: BytesIO): css_content = css_content.decode("utf-8") # first handle beginning of selector blocks # ([^;\{\}\n]+) is a capture 1+ char word(s)s that are NOT ; { } or \n # \{[\s\S]*?\} matches the actual css rules in {} # \s\S are needed for newlines (can't use DOTALL because we need newlines for the first rule) # *? are non-greedy to ensure we aren't capturing "nested" rules #pattern = re.compile("\n*([^;\{\}]+)+\{[\s\S]*?\}") pattern = re.compile("([^;\{\}\n]+)\{[\s\S]*?\}") rules_to_scope = pattern.finditer(css_content) for rule in rules_to_scope: old_css_selector = rule.group(1) new_css_selector = BOOK_HTML_SELECTOR + " " + old_css_selector old_css_rule = rule.group() new_css_rule = re.sub(re.escape(old_css_selector), new_css_selector, old_css_rule) css_content = re.sub(re.escape(old_css_rule), new_css_rule, css_content) #then handle all multi-selctor blocks css_content = re.sub(',', ', ' + BOOK_HTML_SELECTOR, css_content) return str.encode(css_content)
def dump_message(file, headers, props=None, content=None): msg = Message() for (name, value) in headers: msg[name] = value payload = BytesIO() if props is not None: start = payload.tell() for (key, value) in props.items(): payload.write("K {}\n".format(len(key)).encode("ascii")) payload.writelines((key.encode("ascii"), b"\n")) payload.write("V {}\n".format(len(value)).encode("ascii")) payload.writelines((value.encode("ascii"), b"\n")) payload.write(b"PROPS-END\n") msg["Prop-content-length"] = format(payload.tell() - start) if content is not None: msg["Text-content-length"] = format(len(content)) payload.write(content) if props is not None or content is not None: payload = payload.getvalue() msg["Content-length"] = format(len(payload)) # Workaround for Python issue 18324, "set_payload does not handle # binary payloads correctly", http://bugs.python.org/issue18324 msg.set_payload(payload.decode("ascii", "surrogateescape")) BytesGenerator(file, mangle_from_=False).flatten(msg)
def batch_add(): if request.method == 'POST' and 'file' in request.files: file = request.files['file'] if file and file.filename[-4:] == '.csv': count = 0 datas = BytesIO(file.read()).read() encode = chardet.detect(datas)['encoding'] datas = datas.decode(encode).replace('\r\n', '\n') for l in datas.split('\n'): data = l.split(',') try: book = Book(id=int(data[0]), type=data[1], name=data[2], publisher=data[3], year=data[4], author=data[5], price=data[6], amount=data[7]) except Exception: return '<script>alert("处理行\\n'+ l + '\\n时发生问题,请检查文件!,确保以英文逗号分割。该行之前部分记录已经添加成功!"); window.location="/add";</script>' try: db.session.add(book) db.session.commit() except Exception: return '<script>alert("处理行\\n'+ l + '\\n时发生问题,添加入数据库失败,请检查数据book_id是否有重复。部分记录添加成功"); window.location="/add";</script>' count += 1 return render_template("add.html", message="添加%d套书籍成功成功!" % count) else: return render_template('add.html', message="文件不合法!") return redirect(url_for('app.view.add'))
def disassemble_all_code(self): if not os.path.isfile(r"C:\devkitPro\devkitPPC\bin\powerpc-eabi-objdump.exe"): raise Exception(r"Failed to disassemble code: Could not find devkitPPC. devkitPPC should be installed to: C:\devkitPro\devkitPPC") rels_arc = self.get_arc("files/RELS.arc") out_dir = os.path.join(self.randomized_output_folder, "disassemble") if not os.path.isdir(out_dir): os.mkdir(out_dir) main_symbols = get_main_symbols(self) files_to_disassemble = get_list_of_all_rels(self) files_to_disassemble.append("sys/main.dol") for file_path in files_to_disassemble: basename_with_ext = os.path.basename(file_path) print(basename_with_ext) rel_file_entry = rels_arc.get_file_entry(basename_with_ext) if rel_file_entry: rel_file_entry.decompress_data_if_necessary() data = rel_file_entry.data else: data = self.gcm.read_file_data(file_path) if try_read_str(data, 0, 4) == "Yaz0": data = Yaz0.decompress(data) basename, file_ext = os.path.splitext(basename_with_ext) is_rel = (file_ext == ".rel") bin_path = os.path.join(out_dir, basename_with_ext) with open(bin_path, "wb") as f: data.seek(0) f.write(data.read()) asm_path = os.path.join(out_dir, basename + ".asm") disassemble_file(bin_path, asm_path) if is_rel: demangled_map_path = os.path.join(ASM_PATH, "maps-out", basename + ".map.out") if os.path.isfile(demangled_map_path): with open(demangled_map_path, "rb") as f: rel_map_data = BytesIO(f.read()) else: rel_map_data = self.gcm.read_file_data("files/maps/" + basename + ".map") rel_map_data.seek(0) rel_map_data = rel_map_data.read() # Copy the map file to the output directory rel_map_path = os.path.join(out_dir, basename + ".map") with open(rel_map_path, "wb") as f: f.write(rel_map_data) rel_map_data = rel_map_data.decode("ascii") add_relocations_and_symbols_to_rel(asm_path, bin_path, main_symbols, rel_map_data) else: add_symbols_to_main(asm_path, main_symbols)
def create_qr(dest): qr = qrcode.QRCode( version=None, error_correction=qrcode.constants.ERROR_CORRECT_Q, box_size = 10, border=3 ) qr.add_data(dest) qr.make(fit=True) module_dir = os.path.dirname(__file__) img = qr.make_image(fill_color=(83, 132, 63), back_color="white") qr_size, _ = img.size logo_size = int(qr_size * 0.30) min = int((qr_size / 2) - (logo_size / 2)) max = int((qr_size / 2) + (logo_size / 2)) module_dir = os.path.dirname(__file__) logo = Image.open(module_dir + '/static/logo.png') logo = logo.resize((max - min, max - min)) img.paste(logo, (min, min, max, max)) img_str = BytesIO() img.save(img_str, format="PNG") img_str = base64.b64encode(img_str.getvalue()) return img_str.decode('ascii')
def validate(self): if not self.data: raise exceptions.Warning(_("You need to select a file!")) csv_data = base64.b64decode(self.data) csv_data = BytesIO(csv_data.decode(self.encoding).encode('utf-8')) csv_iterator = pycompat.csv_reader(csv_data, quotechar="'", delimiter=self.delimeter) try: self.reader_info = [] self.reader_info.extend(csv_iterator) csv_data.close() self.state = 'validated' except Exception as e: print("Not a valid file!", e) return { 'name': ('Tarifs'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'tarif.import', 'view_id': False, 'context': { 'data': self.data, 'state': self.state, 'supplier_id': self.supplier_id.id, 'tarifs_ids': self._get_tarif_from_csv() }, 'type': 'ir.actions.act_window', 'target': 'new' }
def getData(url): response = BytesIO() f = urllib.request.urlopen(url) response = f.read() strResponse = response.decode( 'utf-8') # json.loads object must be a string, not bytes dicResponse = json.loads(strResponse) return dicResponse
def strip_unwanted_tags(body_content: BytesIO): body_content = body_content.decode("utf-8") body_content = re.sub(r'</body>', '', body_content) body_content = re.sub(r'<body.*>', '', body_content) body_content = re.sub(r' ', '', body_content) #body_content = re.sub(r'\t', '', body_content) #body_content = re.sub(r'\n', '', body_content) return str.encode(body_content)
def get_file(self, bucket: str, key: str): try: s3_client = boto3.client('s3') obj = s3_client.get_object(Bucket=bucket, Key=key) bytes_obj = BytesIO(obj['Body'].read()).read() file_body = bytes_obj.decode('UTF-8') except ClientError as e: raise e return file_body
def encode_raw(self, data): """ encode complex data for raw output """ def _rewind(fid): if hasattr(fid, 'seek'): fid.seek(0) return data mime_type = getattr(data, 'mime_type', None) encoding = getattr(data, 'encoding', None) schema = getattr(data, 'schema', None) format_ = self.get_format(mime_type, encoding, schema) text_encoding = getattr(format_, 'text_encoding', 'utf-8') if format_ is None: raise ValueError("Invalid format specification! mime_type=%r, " "encoding=%r, schema=%r" % (mime_type, encoding, schema)) if isinstance(data, CDObject): data = data.data if format_.is_xml: data = BytesIO( etree.tostring(data, pretty_print=False, xml_declaration=True, encoding=text_encoding)) content_type = "%s; charset=%s" % (format_.mime_type, text_encoding) elif format_.is_json: data = BytesIO( json.dumps(data, ensure_ascii=False).encode(text_encoding)) content_type = "%s; charset=%s" % (format_.mime_type, text_encoding) elif format_.is_text: if isinstance(data, (CDTextBuffer, CDAsciiTextBuffer)): data.text_encoding = text_encoding else: source_text_encoding = getattr(data, 'text_encoding', None) if source_text_encoding != text_encoding: data = _rewind(data).read() if source_text_encoding is not None: data = data.decode(source_text_encoding) data = BytesIO(data.encode(text_encoding)) content_type = "%s; charset=%s" % (format_.mime_type, text_encoding) else: # generic binary byte-stream if format_.encoding is not None: data_out = BytesIO() for chunk in format_.encode(_rewind(data)): # if isinstance(chunk, binary_type): # chunk = str(chunk,'utf-8') data_out.write(chunk) data = data_out content_type = format_.mime_type return _rewind(data), content_type
def _internal_result(raw_response): try: return raw_response[0].decode('utf8') except TypeError: data = BytesIO() for chunk in raw_response: data.write(chunk) data = data.getvalue() try: return data.decode('utf8') except UnicodeDecodeError: # pragma: no cover return data except (UnicodeDecodeError, AttributeError): return raw_response[0]
def call(method, api_or_module, url, body='', headers=None, params=None, query_string='', scheme='http', **kwargs): """Simulates a round-trip call against the given API / URL""" api = API(api_or_module).http.server() response = StartResponseMock() headers = {} if headers is None else headers if not isinstance(body, str) and 'json' in headers.get( 'content-type', 'application/json'): body = output_format.json(body) headers.setdefault('content-type', 'application/json') params = params if params else {} params.update(kwargs) if params: query_string = '{}{}{}'.format(query_string, '&' if query_string else '', urlencode(params, True)) result = api( create_environ(path=url, method=method, headers=headers, query_string=query_string, body=body, scheme=scheme), response) if result: try: response.data = result[0].decode('utf8') except TypeError: data = BytesIO() for chunk in result: data.write(chunk) data = data.getvalue() try: response.data = data.decode('utf8') except UnicodeDecodeError: # pragma: no cover response.data = data except (UnicodeDecodeError, AttributeError): response.data = result[0] response.content_type = response.headers_dict['content-type'] if 'application/json' in response.content_type: response.data = json.loads(response.data) return response
def get_data(self): if not self.data: raise exceptions.Warning(_("You need to select a file!")) csv_data = base64.b64decode(self.data) csv_data = BytesIO(csv_data.decode(self.encoding).encode('utf-8')) csv_iterator = pycompat.csv_reader(csv_data, quotechar=self.quotechar, delimiter=self.delimeter) try: self.reader_info = [] self.reader_info.extend(csv_iterator) csv_data.close() self.state = 'validated' except Exception as e: raise ValidationError(_("CSV file error : %s") % e)
async def _run_diffoscope(old_binary, new_binary, preexec_fn=None): args = ["diffoscope", "--json=-", "--exclude-directory-metadata=yes"] args.extend([old_binary, new_binary]) stdout = BytesIO() logging.debug("running %r", args) p = await asyncio.create_subprocess_exec(*args, stdin=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, preexec_fn=preexec_fn) stdout, stderr = await p.communicate(b"") if p.returncode == 0: return None if p.returncode != 1: raise DiffoscopeError(stderr.decode(errors="replace")) try: return json.loads(stdout.decode("utf-8")) except json.JSONDecodeError as e: raise DiffoscopeError("Error parsing JSON: %s" % e)
def load_can_database(file, contents=None, **kwargs): file = Path(file) dbc = None if file.suffix.lower() in (".dbc", ".arxml") or contents: if contents is None and file.exists(): contents = file.read_bytes() if contents: import_type = file.suffix.lower().strip(".") loads = dbc_load if import_type == "dbc" else arxml_load contents = BytesIO(contents) try: try: dbc = loads(contents, import_type=import_type, key="db", **kwargs) except UnicodeDecodeError: encoding = detect(contents)["encoding"] contents = contents.decode(encoding) dbc = loads( contents, importType=import_type, import_type=import_type, key="db", encoding=encoding, ) except: raise dbc = None if isinstance(dbc, dict): if dbc: first_bus = list(dbc)[0] dbc = dbc[first_bus] else: dbc = None return dbc
def post(self, url, post): s = [] for k, v in list(post.items()): if PY3: s.append("%s=%s" % (k, urllib2_parse.quote(str(v)))) else: s.append("%s=%s" % (k, urllib2_parse.quote(v.encode('utf-8')))) s = '&'.join(s) log.debug(s) data = """POST %s HTTP/1.1 Connection: Keep-alive User-agent: hplip/2.0 Host: %s Content-length: %d Cache-control: No-cache %s""" % (url, self.http_host, len(s), s) log.log_data(data) self.writeEWS(data.encode('utf-8')) ret = BytesIO() while self.readEWS(4096, ret, timeout=5): pass ret = ret.getvalue() log.log_data(ret) self.closeEWS() match = http_result_pat.match(ret.decode('utf-8')) try: code = int(match.group(1)) except (ValueError, TypeError): code = HTTP_ERROR return code == HTTP_OK
def get_file_csv_data(self, file): self.ensure_one() csv_data = base64.b64decode(file) csv_data = BytesIO(csv_data.decode(self.encoding).encode('utf-8')) csv_iterator = pycompat.csv_reader(csv_data, quotechar="'", delimiter=self.delimiter) file_reader = [] try: self.reader_info = [] self.reader_info.extend(csv_iterator) csv_data.close() except Exception as e: raise exceptions.Warning(e) list_values = enumerate(self.reader_info) for index, row in list_values: if not index: header = row else: file_reader.append(dict(zip(header, row))) return file_reader
def validate(self): if not self.data: raise exceptions.Warning(_("You need to select a file!")) csv_data = base64.b64decode(self.data) csv_data = BytesIO(csv_data.decode('utf-8').encode('utf-8')) csv_iterator = pycompat.csv_reader(csv_data, delimiter=";") logging.info("csv_iterator" + str(csv_iterator)) try: self.reader_info = [] self.reader_info.extend(csv_iterator) csv_data.close() # self.stock_production_lot_ids = self._get_stock_prd_lot_from_csv() self.state = 'validated' except Exception: raise exceptions.Warning(_("Not a valid file!")) return { 'name': ('Assignment Sub'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'stock.inventory.import', 'view_id': False, 'context': { 'data': self.data, 'state': self.state, 'default_dest_categ': self.dest_categ.id, 'default_new_prd_categ': self.new_prd_categ.id, 'stock_inventory_ids': self._get_stock_inventory_from_csv() }, 'type': 'ir.actions.act_window', 'target': 'new' }
def check(self): """ 该方法也是该类的主要方法,包括构建请求资源,解析返回结果等 """ if not self.__resource.startswith('/'): self.__resource = '/' + self.__resource request = "GET %s HTTP/1.1\r\nHost:%s\r\n\r\n" % (self.__resource, self.__address) s = socket.socket() s.settimeout(self.__timeout) try: s.connect((self.__address, self.__port)) s.send(request.encode()) response = s.recv(100) except socket.error as e: logger.debug("连接%s 上端口 %s 失败 ,原因为:%s" % (self.__address, self.__port, e)) return False finally: s.close() line = BytesIO(response).readline() try: (http_version, status, messages) = re.split(r'\s+', line.decode(), 2) except ValueError: logger.warning("分割响应码失败") return False if status in ['200', '301', '302']: return True else: return False
def run(self): #results = {} # {'file' : error_code,...} STATE_DONE = 0 STATE_ABORTED = 10 STATE_SUCCESS = 20 STATE_BUSY = 25 STATE_READ_SENDER_INFO = 30 STATE_PRERENDER = 40 STATE_COUNT_PAGES = 50 STATE_NEXT_RECIPIENT = 60 STATE_COVER_PAGE = 70 STATE_SINGLE_FILE = 80 STATE_MERGE_FILES = 90 STATE_SINGLE_FILE = 100 STATE_SEND_FAX = 110 STATE_CLEANUP = 120 STATE_ERROR = 130 next_recipient = self.next_recipient_gen() state = STATE_READ_SENDER_INFO self.rendered_file_list = [] while state != STATE_DONE: # --------------------------------- Fax state machine if self.check_for_cancel(): state = STATE_ABORTED log.debug("STATE=(%d, 0, 0)" % state) if state == STATE_ABORTED: # --------------------------------- Aborted (10, 0, 0) log.error("Aborted by user.") self.write_queue((STATUS_IDLE, 0, '')) state = STATE_CLEANUP elif state == STATE_SUCCESS: # --------------------------------- Success (20, 0, 0) log.debug("Success.") self.write_queue((STATUS_COMPLETED, 0, '')) state = STATE_CLEANUP elif state == STATE_ERROR: # --------------------------------- Error (130, 0, 0) log.error("Error, aborting.") self.write_queue((STATUS_ERROR, 0, '')) state = STATE_CLEANUP elif state == STATE_BUSY: # --------------------------------- Busy (25, 0, 0) log.error("Device busy, aborting.") self.write_queue((STATUS_BUSY, 0, '')) state = STATE_CLEANUP elif state == STATE_READ_SENDER_INFO: # --------------------------------- Get sender info (30, 0, 0) log.debug("%s State: Get sender info" % ("*"*20)) state = STATE_PRERENDER try: try: self.dev.open() except Error as e: log.error("Unable to open device (%s)." % e.msg) state = STATE_ERROR else: try: self.sender_name = self.dev.station_name log.debug("Sender name=%s" % self.sender_name) self.sender_fax = self.dev.phone_num log.debug("Sender fax=%s" % self.sender_fax) except Error: log.error("HTTP GET failed!") state = STATE_ERROR finally: self.dev.close() elif state == STATE_PRERENDER: # --------------------------------- Pre-render non-G4 files (40, 0, 0) log.debug("%s State: Pre-render non-G4 files" % ("*"*20)) state = self.pre_render(STATE_COUNT_PAGES) elif state == STATE_COUNT_PAGES: # --------------------------------- Get total page count (50, 0, 0) log.debug("%s State: Get total page count" % ("*"*20)) state = self.count_pages(STATE_NEXT_RECIPIENT) elif state == STATE_NEXT_RECIPIENT: # --------------------------------- Loop for multiple recipients (60, 0, 0) log.debug("%s State: Next recipient" % ("*"*20)) state = STATE_COVER_PAGE try: recipient = next(next_recipient) log.debug("Processing for recipient %s" % recipient['name']) self.write_queue((STATUS_SENDING_TO_RECIPIENT, 0, recipient['name'])) except StopIteration: state = STATE_SUCCESS log.debug("Last recipient.") continue recipient_file_list = self.rendered_file_list[:] elif state == STATE_COVER_PAGE: # --------------------------------- Create cover page (70, 0, 0) log.debug("%s State: Render cover page" % ("*"*20)) state = self.cover_page(recipient) elif state == STATE_SINGLE_FILE: # --------------------------------- Special case for single file (no merge) (80, 0, 0) log.debug("%s State: Handle single file" % ("*"*20)) state = self.single_file(STATE_SEND_FAX) elif state == STATE_MERGE_FILES: # --------------------------------- Merge multiple G4 files (90, 0, 0) log.debug("%s State: Merge multiple files" % ("*"*20)) state = self.merge_files(STATE_SEND_FAX) elif state == STATE_SEND_FAX: # --------------------------------- Send fax state machine (110, 0, 0) log.debug("%s State: Send fax" % ("*"*20)) state = STATE_NEXT_RECIPIENT FAX_SEND_STATE_DONE = 0 FAX_SEND_STATE_ABORT = 10 FAX_SEND_STATE_ERROR = 20 FAX_SEND_STATE_BUSY = 25 FAX_SEND_STATE_SUCCESS = 30 FAX_SEND_STATE_DEVICE_OPEN = 40 FAX_SEND_STATE_BEGINJOB = 50 FAX_SEND_STATE_DOWNLOADPAGES = 60 FAX_SEND_STATE_ENDJOB = 70 FAX_SEND_STATE_CANCELJOB = 80 FAX_SEND_STATE_CLOSE_SESSION = 170 monitor_state = False fax_send_state = FAX_SEND_STATE_DEVICE_OPEN while fax_send_state != FAX_SEND_STATE_DONE: if self.check_for_cancel(): log.error("Fax send aborted.") fax_send_state = FAX_SEND_STATE_ABORT if monitor_state: fax_state = self.getFaxDownloadState() if not fax_state in (pml.UPDN_STATE_XFERACTIVE, pml.UPDN_STATE_XFERDONE): log.error("D/L error state=%d" % fax_state) fax_send_state = FAX_SEND_STATE_ERROR state = STATE_ERROR log.debug("STATE=(%d, %d, 0)" % (STATE_SEND_FAX, fax_send_state)) if fax_send_state == FAX_SEND_STATE_ABORT: # -------------- Abort (110, 10, 0) monitor_state = False fax_send_state = FAX_SEND_STATE_CANCELJOB state = STATE_ABORTED elif fax_send_state == FAX_SEND_STATE_ERROR: # -------------- Error (110, 20, 0) log.error("Fax send error.") monitor_state = False fax_send_state = FAX_SEND_STATE_CLOSE_SESSION state = STATE_ERROR elif fax_send_state == FAX_SEND_STATE_BUSY: # -------------- Busy (110, 25, 0) log.error("Fax device busy.") monitor_state = False fax_send_state = FAX_SEND_STATE_CLOSE_SESSION state = STATE_BUSY elif fax_send_state == FAX_SEND_STATE_SUCCESS: # -------------- Success (110, 30, 0) log.debug("Fax send success.") monitor_state = False fax_send_state = FAX_SEND_STATE_CLOSE_SESSION state = STATE_NEXT_RECIPIENT elif fax_send_state == FAX_SEND_STATE_DEVICE_OPEN: # -------------- Device open (110, 40, 0) log.debug("%s State: Open device" % ("*"*20)) fax_send_state = FAX_SEND_STATE_BEGINJOB try: self.dev.open() except Error as e: log.error("Unable to open device (%s)." % e.msg) fax_send_state = FAX_SEND_STATE_ERROR else: if self.dev.device_state == DEVICE_STATE_NOT_FOUND: fax_send_state = FAX_SEND_STATE_ERROR elif fax_send_state == FAX_SEND_STATE_BEGINJOB: # -------------- BeginJob (110, 50, 0) log.debug("%s State: BeginJob" % ("*"*20)) try: ff = open(self.f, 'rb') except IOError: log.error("Unable to read fax file.") fax_send_state = FAX_SEND_STATE_ERROR continue try: header = ff.read(FILE_HEADER_SIZE) except IOError: log.error("Unable to read fax file.") fax_send_state = FAX_SEND_STATE_ERROR continue magic, version, total_pages, hort_dpi, vert_dpi, page_size, \ resolution, encoding, reserved1, reserved2 = self.decode_fax_header(header) if magic != b'hplip_g3': log.error("Invalid file header. Bad magic.") fax_send_state = FAX_SEND_STATE_ERROR else: log.debug("Magic=%s Ver=%d Pages=%d hDPI=%d vDPI=%d Size=%d Res=%d Enc=%d" % (magic, version, total_pages, hort_dpi, vert_dpi, page_size, resolution, encoding)) job_id = self.job_id delay = 0 faxnum = recipient['fax'] speeddial = 0 if resolution == RESOLUTION_STD: res = "STANDARD" elif resolution == RESOLUTION_FINE: res = "FINE" elif resolution == RESOLUTION_300DPI: res = "SUPERFINE" soap = utils.cat( """<SOAP-ENV:Envelope xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/"><SOAP-ENV:Body><Fax:BeginJob xmlns:Fax="urn:Fax"><ticket xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="Fax:Ticket"><jobId xmlns:xsd="http://www.w3.org/2001/XMLSchema" xsi:type="xsd:string">$job_id</jobId><resolution xsi:type="Fax:Resolution">$res</resolution><delay xmlns:xsd="http://www.w3.org/2001/XMLSchema" xsi:type="xsd:positiveInteger">$delay</delay><phoneNumber xmlns:xsd="http://www.w3.org/2001/XMLSchema" xsi:type="xsd:string">$faxnum</phoneNumber><speedDial xmlns:xsd="http://www.w3.org/2001/XMLSchema" xsi:type="xsd:positiveInteger">$speeddial</speedDial></ticket></Fax:BeginJob></SOAP-ENV:Body></SOAP-ENV:Envelope>""") data = self.format_http(soap.encode('utf-8')) log.log_data(data) if log.is_debug(): open('beginjob.log', 'wb').write(data) self.dev.openSoapFax() self.dev.writeSoapFax(data) ret = BytesIO() while self.dev.readSoapFax(8192, ret, timeout=5): pass ret = ret.getvalue() if log.is_debug(): open('beginjob_ret.log', 'wb').write(ret) log.log_data(ret) self.dev.closeSoapFax() if self.get_error_code(ret.decode('utf-8')) == HTTP_OK: fax_send_state = FAX_SEND_STATE_DOWNLOADPAGES else: fax_send_state = FAX_SEND_STATE_ERROR elif fax_send_state == FAX_SEND_STATE_DOWNLOADPAGES: # -------------- DownloadPages (110, 60, 0) log.debug("%s State: DownloadPages" % ("*"*20)) page = BytesIO() for p in range(total_pages): if self.check_for_cancel(): fax_send_state = FAX_SEND_STATE_ABORT if fax_send_state == FAX_SEND_STATE_ABORT: break try: header = ff.read(PAGE_HEADER_SIZE) except IOError: log.error("Unable to read fax file.") fax_send_state = FAX_SEND_STATE_ERROR continue page_num, ppr, rpp, bytes_to_read, thumbnail_bytes, reserved2 = \ self.decode_page_header(header) log.debug("Page=%d PPR=%d RPP=%d BPP=%d Thumb=%d" % (page_num, ppr, rpp, bytes_to_read, thumbnail_bytes)) if ppr != PIXELS_PER_LINE: log.error("Pixels per line (width) must be %d!" % PIXELS_PER_LINE) page.write(ff.read(bytes_to_read)) thumbnail = ff.read(thumbnail_bytes) # thrown away for now (should be 0 read) page.seek(0) try: data = page.read(bytes_to_read) except IOError: log.error("Unable to read fax file.") fax_send_state = FAX_SEND_STATE_ERROR break if data == b'': log.error("No data!") fax_send_state = FAX_SEND_STATE_ERROR break height = rpp job_id = self.job_id soap = utils.cat( """<SOAP-ENV:Envelope xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/"><SOAP-ENV:Header><jobId xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xsd:string" SOAP-ENV:mustUnderstand="1">$job_id</jobId></SOAP-ENV:Header><SOAP-ENV:Body><Fax:DownloadPage xmlns:Fax="urn:Fax"><height xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xsd:positiveInteger">$height</height></Fax:DownloadPage></SOAP-ENV:Body></SOAP-ENV:Envelope>""") m = dime.Message() m.add_record(dime.Record(b"cid:id0", b"http://schemas.xmlsoap.org/soap/envelope/", dime.TYPE_T_URI, to_bytes_utf8(soap))) m.add_record(dime.Record(b"", b"image/g4fax", dime.TYPE_T_MIME, data)) output = BytesIO() m.generate(output) data = self.format_http(output.getvalue(), content_type="application/dime") log.log_data(data) if log.is_debug(): open('downloadpages%d.log' % p, 'wb').write(data) try: self.dev.writeSoapFax(data) except Error: fax_send_state = FAX_SEND_STATE_ERROR ret = BytesIO() try: while self.dev.readSoapFax(8192, ret, timeout=5): pass except Error: fax_send_state = FAX_SEND_STATE_ERROR ret = ret.getvalue() if log.is_debug(): open('downloadpages%d_ret.log' % p, 'wb').write(ret) log.log_data(ret) self.dev.closeSoapFax() if self.get_error_code(ret.decode('utf-8')) != HTTP_OK: fax_send_state = FAX_SEND_STATE_ERROR break page.truncate(0) page.seek(0) else: fax_send_state = FAX_SEND_STATE_ENDJOB elif fax_send_state == FAX_SEND_STATE_ENDJOB: # -------------- EndJob (110, 70, 0) log.debug("%s State: EndJob" % ("*"*20)) job_id = self.job_id soap = utils.cat( """<SOAP-ENV:Envelope xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/"><SOAP-ENV:Header><jobId xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xsd:string" SOAP-ENV:mustUnderstand="1">$job_id</jobId></SOAP-ENV:Header><SOAP-ENV:Body><Fax:EndJob xmlns:Fax="urn:Fax"><jobId xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xsd:string">$job_id</jobId></Fax:EndJob></SOAP-ENV:Body></SOAP-ENV:Envelope>""") data = self.format_http(soap.encode('utf-8')) log.log_data(data) if log.is_debug(): open('endjob.log', 'wb').write(data) self.dev.writeSoapFax(data) ret = BytesIO() while self.dev.readSoapFax(8192, ret, timeout=5): pass ret = ret.getvalue() if log.is_debug(): open('endjob_ret.log', 'wb').write(ret) log.log_data(ret) self.dev.closeSoapFax() if self.get_error_code(ret.decode('utf-8')) == HTTP_OK: fax_send_state = FAX_SEND_STATE_SUCCESS else: fax_send_state = FAX_SEND_STATE_ERROR elif fax_send_state == FAX_SEND_STATE_CANCELJOB: # -------------- CancelJob (110, 80, 0) log.debug("%s State: CancelJob" % ("*"*20)) job_id = self.job_id soap = utils.cat( """<SOAP-ENV:Envelope xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/"><SOAP-ENV:Header><jobId xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xsd:string" SOAP-ENV:mustUnderstand="1">$job_id</jobId></SOAP-ENV:Header><SOAP-ENV:Body><Fax:CancelJob xmlns:Fax="urn:Fax"><jobId xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xsd:string">$job_id</jobId></Fax:CancelJob></SOAP-ENV:Body></SOAP-ENV:Envelope>""") data = self.format_http(soap.encode('utf-8')) log.log_data(data) if log.is_debug(): open('canceljob.log', 'wb').write(data) self.dev.writeSoapFax(data) ret = BytesIO() while self.dev.readSoapFax(8192, ret, timeout=5): pass ret = ret.getvalue() if log.is_debug(): open('canceljob_ret.log', 'wb').write(ret) log.log_data(ret) self.dev.closeSoapFax() if self.get_error_code(ret.decode('utf-8')) == HTTP_OK: fax_send_state = FAX_SEND_STATE_CLOSE_SESSION else: fax_send_state = FAX_SEND_STATE_ERROR elif fax_send_state == FAX_SEND_STATE_CLOSE_SESSION: # -------------- Close session (110, 170, 0) log.debug("%s State: Close session" % ("*"*20)) log.debug("Closing session...") try: mm.close() except NameError: pass try: ff.close() except NameError: pass time.sleep(1) self.dev.closeSoapFax() self.dev.close() fax_send_state = FAX_SEND_STATE_DONE # Exit inner state machine elif state == STATE_CLEANUP: # --------------------------------- Cleanup (120, 0, 0) log.debug("%s State: Cleanup" % ("*"*20)) if self.remove_temp_file: log.debug("Removing merged file: %s" % self.f) try: os.remove(self.f) log.debug("Removed") except OSError: log.debug("Not found") state = STATE_DONE # Exit outer state machine
def disassemble_all_code(self): if not os.path.isfile( r"C:\devkitPro\devkitPPC\bin\powerpc-eabi-objdump.exe"): raise Exception( r"Failed to disassemble code: Could not find devkitPPC. devkitPPC should be installed to: C:\devkitPro\devkitPPC" ) rels_arc = self.get_arc("files/RELS.arc") out_dir = os.path.join(self.randomized_output_folder, "disassemble") if not os.path.isdir(out_dir): os.mkdir(out_dir) demangled_map_path = os.path.join(ASM_PATH, "maps-out", "framework.map.out") if os.path.isfile(demangled_map_path): with open(demangled_map_path, "rb") as f: framework_map_contents = BytesIO(f.read()) else: framework_map_contents = self.gcm.read_file_data( "files/maps/framework.map") framework_map_contents = read_all_bytes(framework_map_contents).decode( "ascii") main_symbols = get_main_symbols(framework_map_contents) all_rel_paths = get_list_of_all_rels(self) files_to_disassemble = all_rel_paths.copy() files_to_disassemble.append("sys/main.dol") for file_path_in_gcm in files_to_disassemble: basename_with_ext = os.path.basename(file_path_in_gcm) rel_file_entry = rels_arc.get_file_entry(basename_with_ext) if rel_file_entry: rel_file_entry.decompress_data_if_necessary() data = rel_file_entry.data else: data = self.gcm.read_file_data(file_path_in_gcm) if Yaz0.check_is_compressed(data): data = Yaz0.decompress(data) basename, file_ext = os.path.splitext(basename_with_ext) bin_path = os.path.join(out_dir, basename_with_ext) with open(bin_path, "wb") as f: data.seek(0) f.write(data.read()) all_rels_by_path = OrderedDict() all_rel_symbols_by_path = OrderedDict() for file_path_in_gcm in all_rel_paths: basename_with_ext = os.path.basename(file_path_in_gcm) basename, file_ext = os.path.splitext(basename_with_ext) bin_path = os.path.join(out_dir, basename_with_ext) rel = REL() rel.read_from_file(bin_path) all_rels_by_path[file_path_in_gcm] = rel demangled_map_path = os.path.join(ASM_PATH, "maps-out", basename + ".map.out") if os.path.isfile(demangled_map_path): with open(demangled_map_path, "rb") as f: rel_map_data = BytesIO(f.read()) else: rel_map_data = self.gcm.read_file_data("files/maps/" + basename + ".map") rel_map_data.seek(0) rel_map_data = rel_map_data.read() # Copy the map file to the output directory rel_map_path = os.path.join(out_dir, basename + ".map") with open(rel_map_path, "wb") as f: f.write(rel_map_data) rel_map_data = rel_map_data.decode("ascii") all_rel_symbols_by_path[file_path_in_gcm] = get_rel_symbols( rel, rel_map_data) for file_path_in_gcm in files_to_disassemble: basename_with_ext = os.path.basename(file_path_in_gcm) print(basename_with_ext) basename, file_ext = os.path.splitext(basename_with_ext) bin_path = os.path.join(out_dir, basename_with_ext) asm_path = os.path.join(out_dir, basename + ".asm") disassemble_file(bin_path, asm_path) is_rel = (file_ext == ".rel") if is_rel: add_relocations_and_symbols_to_rel(asm_path, bin_path, file_path_in_gcm, main_symbols, all_rel_symbols_by_path, all_rels_by_path) else: add_symbols_to_main(self, asm_path, main_symbols)
def parse(data, boundary): boundary = boundary.encode() start = b'--' + boundary end = b'--' + boundary + b'--' lines = iter(data.splitlines(True)) data = MultiDict() line = next(lines).rstrip(b'\r\n') if line == end: return data if line != start: raise ValueError('invalid form data: missing boundary') while True: headers = {} while True: try: line = next(lines).rstrip(b'\r\n') except StopIteration: raise ValueError('invalid form data: unexpected end of data') if not line: break try: key, value = line.split(b':', 1) except ValueError: raise ValueError('invalid form data: expected header') headers[key.strip().lower()] = value.strip() try: disposition, disposition_params = parse_content_disposition( headers.pop(b'content-disposition')) except KeyError: raise ValueError( 'invalid form data: expected header Content-Disposition') except ValueError as e: raise ValueError( f'invalid form data: invalid Content-Disposition: {e}' ) from None if disposition != 'form-data': raise ValueError( 'invalid form data: expected form-data Content-Disposition') try: name = disposition_params.pop('name') except KeyError: raise ValueError( 'invalid form data: expected name in Content-Disposition') try: file_name = disposition_params.pop('filename') except KeyError: file_name = None if disposition_params: raise ValueError( 'invalid form data: unexpected Content-Disposition param ' + next(iter(disposition_params))) if file_name is None: content_type = None else: try: content_type = headers.pop(b'content-type').decode() except KeyError: raise ValueError( 'invalid form data: expected header Content-Type') if headers: raise ValueError('invalid form data: unexpected header ' + next(iter(headers)).decode()) value = BytesIO() line_end = b'' while True: try: line = next(lines) except StopIteration: raise ValueError('invalid form data: unexpected end of data') stripped_line = line.rstrip(b'\r\n') if stripped_line in [start, end]: break value.write(line_end) value.write(stripped_line) line_end = line[len(stripped_line):] value = value.getvalue() if file_name is not None: value = File(file_name, content_type, value) else: value = value.decode() print('FIELD', name, value) data.appendlist(name, value) if stripped_line == end: break return data