def class_define_end(fp): """ 类结束 :param fp: """ text = CodeTemplate.java_template.get("class_definition_end") FileWriter.writeline_with_endl(fp, text)
def gen_package_name(fp): """ 生成包名 :param fp: """ FileWriter.writeline_with_endl( fp, CodeTemplate.java_template.get("package_name"))
def gen_copyright(fp): """ 生成版权信息 :param fp: """ FileWriter.writeline_with_endl( fp, CodeTemplate.java_template.get("copy_right"))
def main(opts): "Main function" if not [os.path.isdir(x) for x in vars(opts)]: raise IOError("No such directory") # add slash at the end of directory slashIt = lambda x: x if x.endswith("/") else x + "/" path = slashIt(opts.path) pathConverted = slashIt(opts.pathConverted) pathManual = slashIt(opts.pathManual) manualReplacments = [] fileNames = [] for name in FileReader.getFileNames(path): # extracts file name from path fileName = FileReader.getFileName(name) # get file lines fileLines = FileReader.readFile(name) templateConverter = TemplateConverter(fileLines, name) # get converted lines convertedLines = templateConverter.getFileLines() # add lines that are inconvertible to the list manualReplacments += templateConverter.irreversibleDataList fileNames += [x.fileName for x in templateConverter.irreversibleDataList] fileName = fileName + ".tmpl" # save jinja2 template FileWriter.writeToFile(pathConverted + fileName, convertedLines) # save info about inconvertible template print(str(len(list(set( fileNames)))) + " file(s) need manual conversion. More information can be found in:\n" + pathManual + "manualConversions.txt") FileWriter.writeToFile(pathManual + "manualConversions.txt", FileWriter.convertToString(manualReplacments))
def class_define_begin(fp, classname): """ 类开始 :param fp: :param classname: """ text = CodeTemplate.java_template.get("class_definition") % classname FileWriter.writeline_with_endl(fp, text)
def json_serialize(fp): """ 生成序列化注解 :param fp: """ if CodeTemplate.java_template.get("option_json_serialize"): text = CodeTemplate.java_template.get("json_serialize") FileWriter.writeline_with_endl(fp, text, 1)
def property_comment(fp, content=""): """ 定义注释 :param content: 注释内容 :param fp: 文件对象 """ if CodeTemplate.java_template.get("option_comment"): text = CodeTemplate.java_template.get("property_comment") % content FileWriter.writeline_with_endl(fp, text, 1)
def json_property(fp, field_name): """ 生成json property注解 :param fp: :param field_name: """ if CodeTemplate.java_template.get("option_json_property"): text = CodeTemplate.java_template.get("json_property") % field_name FileWriter.writeline_with_endl(fp, text, 1)
def property_define(fp, type_name, field_name): """ 定义属性 :param fp: :param type_name: :param field_name: """ text = CodeTemplate.java_template.get("property_definition") % (type_name, field_name) FileWriter.writeline_with_endl(fp, text)
def property_define(fp, type_name, field_name): """ 定义属性 :param fp: :param type_name: :param field_name: """ text = CodeTemplate.java_template.get("property_definition") % ( type_name, field_name) FileWriter.writeline_with_endl(fp, text)
def function_comment(fp, param="", param_desc=""): """ 函数注释 :param param_desc: 参数描述 :param param: 参数 :param fp: """ if CodeTemplate.java_template.get("option_comment"): text = CodeTemplate.java_template.get("function_comment") % (param, param_desc) FileWriter.writeline_with_endl(fp, text, 1)
def function_define_get(fp, type_name, field_name, field_name_cap): """ 定义get函数 :param fp: :param type_name: :param field_name: :param field_name_cap: """ text = CodeTemplate.java_template.get("function_definition_get") % (type_name, field_name_cap, field_name) FileWriter.writeline_with_endl(fp, text)
def __init__(self, initial_vaccinated, initial_infected, initial_healthy, virus, resultsfilename): # 'Set up the initial simulation values’ self.virus = virus self.initial_infected = initial_infected self.initial_healthy = initial_healthy self.initial_vaccinated = initial_vaccinated self.population = [] self.population_size = initial_infected + initial_healthy + initial_vaccinated self.total_dead = 0 self.total_vaccinated = initial_vaccinated self.file_writer = FileWriter(resultsfilename)
def function_comment(fp, param="", param_desc=""): """ 函数注释 :param param_desc: 参数描述 :param param: 参数 :param fp: """ if CodeTemplate.java_template.get("option_comment"): text = CodeTemplate.java_template.get("function_comment") % ( param, param_desc) FileWriter.writeline_with_endl(fp, text, 1)
def function_define_get(fp, type_name, field_name, field_name_cap): """ 定义get函数 :param fp: :param type_name: :param field_name: :param field_name_cap: """ text = CodeTemplate.java_template.get("function_definition_get") % ( type_name, field_name_cap, field_name) FileWriter.writeline_with_endl(fp, text)
def eventLogFile(self, eventLogFile): """Retrieves a single Event File Log and writes it to the appropriate directory Parameters ---------- param: eventLogFile ex: { 'LogFileLength': 5199.0, 'EventType': 'API', 'LogDate': '2016-11-22T00:00:00.000+0000', 'attributes': { 'url': '/services/data/v32.0/sobjects/EventLogFile/0ATr00000000TWHGA2', 'type': 'EventLogFile' }, 'LogFile': '/services/data/v32.0/sobjects/EventLogFile/0ATr00000000TWHGA2/LogFile', 'Id': '0ATr00000000TWHGA2' } Returns ------- csv containing event file log """ if (self.accessToken == ''): raise ValueError( 'accessToken has not been set, run authenticate method to set token' ) exit eventFileId = eventLogFile['Id'] headers = { 'Authorization': 'Bearer ' + self.accessToken, 'X-PrettyPrint': '1', 'Accept-Encoding': 'gzip' } rawResponse = requests.get( 'https://' + self.sfURL + '/services/data/v32.0/sobjects/EventLogFile/' + eventFileId + '/LogFile', headers=headers) if self.debug: print "[DEBUG] eventLogFile >> " print rawResponse print rawResponse.content # if self.log: # w = FileWriter('log', eventFileId) # w.writeFile(rawResponse.content) w = FileWriter(eventLogFile) w.writeFile(rawResponse.content) return rawResponse
def gen_import(fp, modules): """ import模块 :param modules: 需要import的模块列表 :param fp: """ for module in modules: if module == "list": text = "import java.util.List;" elif module == "date": text = "import java.util.Date;" elif module == "big_decimal": text = "import java.math.BigDecimal;" FileWriter.writeline_with_endl(fp, text) for import_elem in CodeTemplate.java_template.get("default_import_list"): text = "import " + import_elem + ";" FileWriter.writeline_with_endl(fp, text) if CodeTemplate.java_template.get("option_json_property"): for module in CodeTemplate.import_json_property: text = "import " + module + ";" FileWriter.writeline_with_endl(fp, text) if CodeTemplate.java_template.get("option_json_serialize"): for module in CodeTemplate.import_json_serialize: text = "import " + module + ";" FileWriter.writeline_with_endl(fp, text)
def gen_import(fp, modules): """ import模块 :param modules: 需要import的模块列表 :param fp: """ for module in modules: if module == "list": text = "import java.util.List;" elif module == "date": text = "import java.util.Date;" elif module == "big_decimal": text = "import java.math.BigDecimal;" FileWriter.writeline_with_endl(fp, text) for import_elem in CodeTemplate.java_template.get( "default_import_list"): text = "import " + import_elem + ";" FileWriter.writeline_with_endl(fp, text) if CodeTemplate.java_template.get("option_json_property"): for module in CodeTemplate.import_json_property: text = "import " + module + ";" FileWriter.writeline_with_endl(fp, text) if CodeTemplate.java_template.get("option_json_serialize"): for module in CodeTemplate.import_json_serialize: text = "import " + module + ";" FileWriter.writeline_with_endl(fp, text)
def _state_filesize(self, opcode, data): # Sanity check the frame if opcode != self.OP_TEXT: raise ProtocolError("Received non-text frame while waiting for filesize", "Invalid: -1") elif not data.startswith(b"Filesize: "): raise ProtocolError("Received invalid text while waiting for filesize", "Invalid: -1") # Retrieve the file size filesize = data.decode("ascii").replace("Filesize: ", "", 1).strip() if not filesize.isdigit(): raise ProtocolError("Invalid filesize received", "Invalid: -1") # Make sure the upload is to a permitted destination elif (self._permitted_upload_dir is not None and not self._url.filename.startswith(self._permitted_upload_dir)): raise ProtocolError("Invalid upload directory, upload denied", "Not Permitted: -2") # Now check if the file exists elif not os.path.isfile(self._url.filename): # We're go for file upload! self._send_msg(b"Permitted") self._filewriter = FileWriter(self._url.filename, int(filesize)) else: # File already exists - let the client know it messed up raise ProtocolError("File already exists, upload denied", "Not Permitted: -1") return self.ST_FILESEGM
def __update_history(self): #Getting current time stamp. st = datetime.datetime.fromtimestamp( time.time()).strftime('%Y-%m-%d-%H:%M:%S') #Stamping the object self.data['timeStamp'] = str(st) #Getting the history file related to the market we are using. history = FileReader.read_history(self.data['marketName']) #Adding/updating data to history file history['result'] = StatsGiver.get_market_candles( self.data["marketName"], "DAY_1") history['data'].append(self.data) FileWriter.store_stats( history, Parser.reverse_trade_string(self.data['marketName'])) time.sleep(1.1)
def test_write_file_FileNotFoundError(self): """ Testing if FileNotFoundError is raised and handled correctly """ self.assertRaises( FileNotFoundError, FileWriter.write_tex_file("SomeFile.txt", "a") )
def writeFeaturesToFile(sentence, filename): fileWriter = FileWriter(OUTPUT_PATH, filename) tagged = nltk.pos_tag(word_tokenize(sentence)) for i in range(len(tagged)): feature = 'I\tcw={} ct={}'.format(tagged[i][0], tagged[i][1]) if i == 0: feature = feature + ' pw=BOS pt=BOS' else: feature = feature + ' pw={} pt={}'.format(tagged[i-1][0], tagged[i-1][1]) if i == len(tagged)-1: feature = feature + ' nw=EOS nt=EOS' else: feature = feature + ' nw={} nt={}'.format(tagged[i+1][0], tagged[i+1][1]) fileWriter.writeToFile(feature) fileWriter.writeToFile("\n") print(tagged)
def update(self, no): if no < 0: print("Record is not a student!") return seekSummary = SeekOffsetCalculator.determineSeekValueAndOffset(self.studentFileIndex, no) jumpToSeekValue = seekSummary["jumpToSeekValue"] currentStudentOffset = seekSummary["currentRecordOffset"] firstPartLines = [] lastPartLines = [] with FileReader.open(self.filePath) as file: file.seek(0) firstPartLines = file.read(jumpToSeekValue).splitlines() print(firstPartLines) file.seek(currentStudentOffset + jumpToSeekValue) lastPartLines = file.read().splitlines() print(lastPartLines) with FileWriter.reset(self.filePath) as file: file.seek(0) if len(firstPartLines) > 0: content = "" for line in firstPartLines: content += line + "\n" file.writelines(content) with FileWriter.update(self.filePath) as file: file.seek(jumpToSeekValue) student = StudentRecorder.create() studentSummary = student.prepareSummaryAsWritableFormat() file.write(studentSummary) newStudentOffSetValue = len(studentSummary) self.studentFileIndex[student.no] = newStudentOffSetValue print("Öğrenci başarıyla güncellendi.") with FileWriter.open(self.filePath) as file: content = "" for line in lastPartLines: content += line + "\n" file.writelines(content)
def testNormal(self): with FileWriter(self.TEST_FILE) as f: f.WriteLn("a").IncIndent() f.WriteLn("b").DecIndent().WriteLn("c") with open(self.TEST_FILE) as file: lines = file.readlines() self.assertEqual("a\n", lines[0]) self.assertEqual("\tb\n", lines[1]) self.assertEqual("c\n", lines[2])
def run_latency(warmup_time_s, upstream_ip, upstream_port, duration_s, ssl): switch = Switch(DPID("00:00:00:00:00:00:00:01"), warmup_time_s, upstream_ip, upstream_port, 1, False, ssl) switch.start() time.sleep(duration_s) print("stop!") switch.stop() FileWriter().write_results_to_file(switch.get_results())
def test_read_file_ValueError(self): """ Testing if ValueError is raised and handled correctly Trying to write with a wrong mode :return: """ self.assertRaises( ValueError, FileWriter.write_tex_file("SomeFile.txt", "ysa") )
def write(self, lesson): try: with FileWriter.open(self.filePath) as file: lessonSummary = lesson.prepareSummaryAsWritableFormat() file.write(lessonSummary) newLessonOffSetValue = len(lessonSummary) self.lessonFileIndex[lesson.code] = newLessonOffSetValue print("Ders başarıyla kaydedildi.") except Exception as e: print(f"Hata: {e}")
def rollback_write(self, path, name): """ Rollback a filesystem write. If the file does not exist, fail silently. Arguments: path<string> -- Absolute filesystem path to file. name<string> -- Image name. Returns: True if image was successfully removed, else False. """ result = False writer = FileWriter() if writer.file_exists(path + name): writer.unwrite(path, name) print 'Rolled back write of %s at %s.' % (name, path) result = True return result
def __init__(self, master): self.master = master master.title("Dialogue Editor") master.geometry("1024x768") master.rowconfigure(1, weight=1) master.columnconfigure(1, weight=1) self._setupMenuBar(master) self.writer = FileWriter() Content.initData() Content.mutateEvent.append(self.refreshViews) self.toprow = TopRowMenu(master) self.paneltree = PanelTree(master) self.paneltext = PanelText(master) self.paneldetails = PanelDetails.PanelDetails(master) self.refreshViews()
def testRemoveFromLastLine(self): with FileWriter(self.TEST_FILE) as f: f.WriteLn("abc") f.WriteLn("def") f.RemoveFromLastLine(1) f.WriteLn("123") with open(self.TEST_FILE) as file: lines = file.readlines() self.assertEqual("abc\n", lines[0]) self.assertEqual("de\n", lines[1]) self.assertEqual("123\n", lines[2])
def update(self, code): print(self.lessonFileIndex) seekSummary = SeekOffsetCalculator.determineSeekValueAndOffset( self.lessonFileIndex, code) jumpToSeekValue = seekSummary["jumpToSeekValue"] currentLessonOffset = seekSummary["currentRecordOffset"] firstPartLines = [] lastPartLines = [] with FileReader.open(self.filePath) as file: file.seek(0) firstPartLines = file.read(jumpToSeekValue).splitlines() print(firstPartLines) file.seek(currentLessonOffset + jumpToSeekValue) lastPartLines = file.read().splitlines() print(lastPartLines) with FileWriter.reset(self.filePath) as file: file.seek(0) if len(firstPartLines) > 0: content = "" for line in firstPartLines: content += line + "\n" file.writelines(content) with FileWriter.update(self.filePath) as file: file.seek(jumpToSeekValue) lesson = LessonRecorder.create() lessonSummary = lesson.prepareSummaryAsWritableFormat() file.write(lessonSummary) newLessonOffSetValue = len(lessonSummary) self.lessonFileIndex[lesson.code] = newLessonOffSetValue print("Ders başarıyla güncellendi.") with FileWriter.open(self.filePath) as file: content = "" for line in lastPartLines: content += line + "\n" file.writelines(content)
def __build_dictionary(self): print('Building dictionary') dict_words = [] i = 0 for text in self.data: i += 1 print( "FeatureExtraction.__build_dictionary(): Step {} / {}".format( i, len(self.data))) words = NLP(text=text['content']).get_words_feature() dict_words.append(words) FileWriter( filePath=Settings.DICTIONARY_PATH).store_dictionary(dict_words)
def write(self, student): if student is None: return try: with FileWriter.open(self.filePath) as file: studentSummary = student.prepareSummaryAsWritableFormat() file.write(studentSummary) newStudentOffSetValue = len(studentSummary) self.studentFileIndex[student.no] = newStudentOffSetValue print("Öğrenci başarıyla kaydedildi.") except Exception as e: print(f"Hata: {e}")
def DriveScrape(): seedUrl = 'https://in.bookmyshow.com/national-capital-region-ncr/movies/' bookTicketsUrl = 'https://in.bookmyshow.com/buytickets/{}-national-capital-region-ncr/movie-ncr-{}-MT/{}' #bookTicketsUrl = 'https://in.bookmyshow.com/buytickets/{movieNameFromUrl}-national-capital-region-ncr/movie-ncr-{MovieId}}-MT/{TodaysDate}' l = GetMovieNameAndIdList(seedUrl) x = l.GetList() fw = FileWriter() fw.WriteScapeBMSFirstTimeFileForListOfMovies(x[0], x[1]) details = GetFirstLevelDetails(seedUrl, x[0]) movieInfoList = details.FillInMovieInfoDetails() fw = FileWriter() fw.WriteFirstLevelDetailsToCSV(movieInfoList, x[1]) showDetails = GetShowSeatsMultiPlexDetails(movieInfoList, x[1], bookTicketsUrl) venueAndShowTimeInfoLists = showDetails.FillIntheShowAndMultiPlexDetails() fw = FileWriter() fw.WriteVenueList(venueAndShowTimeInfoLists[0], x[1]) fw.WriteShowTimeInfoList(venueAndShowTimeInfoLists[1], x[1])
def write_blob(self, blob, path, name): """ Write the image blob and a thumbnail to the filesystem. Arguments: blob<string> -- Image blob. path<string> -- Absolute filesystem path to write to. name<string> -- Image name. Returns: True if image was successfully written, else False. """ result = False writer = FileWriter() if not writer.file_exists(path + name): writer.write(blob, path, name) print 'Wrote %s to %s.' % (name, path) result = True else: print '%s already exists at %s. Skipping write.' % (name, path) return result
def __init__(self): config = ConfigParser() self.client_ID = config.__getattr__('client_id') self.client_Secret = config.__getattr__('client_secret') self.userName = config.__getattr__('username') self.password = config.__getattr__('password') self.subReddit = config.__getattr__('subreddit') self.limit = config.__getattr__('limit') self.logFileName = config.__getattr__('logfile') self.userFileName = config.__getattr__('userfile') self.start_duration = config.__getattr__('start_duration') self.end_duration = config.__getattr__('end_duration') self.pushShiftAPI = config.__getattr__('pushshiftapi') self.logger = Logger(self.logFileName) self.fileWriter = FileWriter(self.userFileName)
def main(): parser = argparse.ArgumentParser() parser.add_argument("-f", "--file", help="Please Enter the file name with -f \n") parser.add_argument("-l", "--label", help="Please Enter the number of labels with -l \n", type=int, action=LabelValidator) # Read arguments from the command line args = parser.parse_args() if args.file and args.label: #Create the mesh generator object meshGenerator = MeshGenerator() #Read the file from the directory rawData = meshGenerator.readFile(args.file) #Process the raw data into 3d numpy array processedData = meshGenerator.processData(rawData) # Encode the labels in the volume data encodedData, actualLabels = meshGenerator.faceMaker.encodeDataWithLabels( processedData, args.label) # Check the neighbors and make the faces meshGenerator.makeFace(encodedData, actualLabels) #create the object for .obj and material file writer writer = FileWriter() #write Material file writer.writeMaterialFile(len(actualLabels)) #Write the .obj files writer.writeObjectFile(meshGenerator.verticeList, meshGenerator.faceList) print("Successful") else: usage() sys.exit(2)
def writeFeaturesToFile(sentence, filename): fileWriter = FileWriter(OUTPUT_PATH, filename) tagged = nltk.pos_tag(word_tokenize(sentence)) for i in range(len(tagged)): feature = 'I\tcw={} ct={}'.format(tagged[i][0], tagged[i][1]) if i == 0: feature = feature + ' pw=BOS pt=BOS' else: feature = feature + ' pw={} pt={}'.format(tagged[i - 1][0], tagged[i - 1][1]) if i == len(tagged) - 1: feature = feature + ' nw=EOS nt=EOS' else: feature = feature + ' nw={} nt={}'.format(tagged[i + 1][0], tagged[i + 1][1]) fileWriter.writeToFile(feature) fileWriter.writeToFile("\n") print(tagged)
import matplotlib.pyplot as plt from FileWriter import FileWriter from PathMethods import figuresFileName import sys vi = sys.version_info if vi[0] != 2 or vi[1] != 6: print "ERROR: python interpreter of wrong version. Requires 2.6.x" raise SystemExit print sys.argv ##------------------------------------------- from YingRongSpikeInMetArray import DataArray filepath = sys.argv[1] denatsFile = sys.argv[2] pictureFolder = figuresFileName(filepath) fw = FileWriter(filepath, customFileName="") ##------------------------------------------- #If a third argument is passed, generate figures #Figure generation is slow, so it is often optimal to disregard generation #as fitting is <5 second operation runFigs = False try: sys.argv[3] runFigs = True #Check if pictureFolder exists, if not mkdir if not os.path.exists(pictureFolder): os.makedirs(pictureFolder) except IndexError: pass
def printLogToFile(self, filename): fw = FileWriter(filename) fw.appendALine(self.log)
def scan_map(map_path, resource_dir = None): """ 扫描处理指定.map文件 """ print "scan map: " + map_path collect_images, map_width, map_height, lp, rp = read_map(map_path) print collect_images if resource_dir != None: for image_info in collect_images: image_name = image_info["image"] dir_name = image_name.split("_")[0] image_path = os.path.join(resource_dir, dir_name, image_name) image_info["image"] = image_path print collect_images collect_image, lt, rb = splice_images(collect_images, map_width, map_height) out_path = os.path.join(os.path.dirname(map_path), "data") if os.path.exists(out_path): os.remove(out_path) out_file = FileWriter(out_path) # 地图整体长宽 out_file.WriteInt(map_width) out_file.WriteInt(map_height) # 可破坏区域的长宽,在拼好的图片的图片区域内给出 rect = Rect((0, 0, collect_image.size[0], collect_image.size[1])) if lt.x < 0: rect.x0 = -lt.x if lt.y < 0: rect.y0 = -lt.y if rb.x > map_width: rect.x1 = collect_image.size[0] - (rb.x - map_width) if rb.y > map_height: rect.y1 = collect_image.size[1] - (rb.y - map_height) print "destory rect: " + str(rect) print "LT: " + str(lt) + " RB: " + str(rb) + " collect WH: " + str(collect_image.size[0]) + " " + str(collect_image.size[1]) # 写入破坏区域大小 out_file.WriteInt(rect.width) out_file.WriteInt(rect.height) # 可破坏区域的坐标信息 # 给出可破坏区/扫瞄区的左下点相对于整个地图的左下点的定位信息 if lt.x < 0: left = 0; else: left = lt.x if rb.y > map_height: right = 0 else: right = map_height - rb.y out_file.WriteInt(left) out_file.WriteInt(right) crop_image = collect_image.crop(rect.get_rect()) # crop_image.save("out.png") print crop_image write_collect_bits(out_file, crop_image, 1) # 玩家定位数据 # if len(lp) != 0 or len(rp) != 0: # out_file.WriteInt(1) # # out_file.WriteInt(len(lp)) # for p in lp: # out_file.WriteInt(p.x) # out_file.WriteInt(p.y) # # out_file.WriteInt(len(rp)) # for p in rp: # out_file.WriteInt(p.x) # out_file.WriteInt(p.y) # else: # out_file.WriteInt(0) out_file.close()
def gen_copyright(fp): """ 生成版权信息 :param fp: """ FileWriter.writeline_with_endl(fp, CodeTemplate.java_template.get("copy_right"))
class Websocket(): """Handle a websocket connection""" # TODO: Make opcode its own object? # These are the possible opcodes OP_CONT = 0x0 # Continuation OP_TEXT = 0x1 # Text frame OP_BIN = 0x2 # Binary frame OP_CLOS = 0x8 # Close request OP_PING = 0x9 # Ping frame OP_PONG = 0xA # Pong frame OP_OTH = 0xF # Other - reserved # These are the receiver states ST_FILESIZE = "wait for filesize" ST_FILESEGM = "waiting for file segment metadata" ST_FILESEGD = "waiting for file segment data" ST_CLOSE = "close the connection" ST_FINISH = "file finish command received" def __init__(self, conn, addr, url, settings): self._conn = conn self._addr = addr self._url = url self._permitted_upload_dir = None if "uploaddir" in settings.keys(): # Make sure there's one / at the end of the directory self._permitted_upload_dir = settings["uploaddir"].rstrip("/ ") + "/" self._filewriter = None self._curbuf=bytearray() # Store the buffer of received bytes def handle_websocket(self): """Handle a websocket connection""" self._send_header() state = self.ST_FILESIZE # First, wait for the file size # Handle file upload. Close connection nicely in any case try: while state != self.ST_FINISH: opcode, data = self._get_frame() if opcode == self.OP_PING: # Handle a ping in any state. Don't do anything else with the data self._send_msg(data, self.OP_PONG) elif state == self.ST_FILESIZE: state = self._state_filesize(opcode, data) elif state == self.ST_FILESEGM: state = self._state_filesegm(opcode, data) elif state == self.ST_FILESEGD: state = self._state_filesegd(opcode, data) else: raise StateError("Invalid state") # ST_FINISH state self._filewriter.finish() except (ProtocolError, StateError) as e: logging.error("%s: %s", self._addr, e.msg) if e.response is not None: self._send_msg(e.response.encode()) else: self._send_msg(b"Finished") finally: self._send_msg(None, self.OP_CLOS) def handle_invalid_url(self): """Handle a websocket request for an invalid URL.""" self._send_header() self._send_msg("Invalid: -1") self._send_msg(None, self.OP_CLOS) # Send the header responding to the websocket connection request def _send_header(self): header = ("HTTP/1.1 101 Switching Protocols\r\nUpgrade: websocket\r\n" "Connection: Upgrade\r\nSec-WebSocket-Accept: " + self._url.accept_key + "\r\n\r\n") self._conn.sendall(header.encode()) # Handle the ST_FILESIZE state def _state_filesize(self, opcode, data): # Sanity check the frame if opcode != self.OP_TEXT: raise ProtocolError("Received non-text frame while waiting for filesize", "Invalid: -1") elif not data.startswith(b"Filesize: "): raise ProtocolError("Received invalid text while waiting for filesize", "Invalid: -1") # Retrieve the file size filesize = data.decode("ascii").replace("Filesize: ", "", 1).strip() if not filesize.isdigit(): raise ProtocolError("Invalid filesize received", "Invalid: -1") # Make sure the upload is to a permitted destination elif (self._permitted_upload_dir is not None and not self._url.filename.startswith(self._permitted_upload_dir)): raise ProtocolError("Invalid upload directory, upload denied", "Not Permitted: -2") # Now check if the file exists elif not os.path.isfile(self._url.filename): # We're go for file upload! self._send_msg(b"Permitted") self._filewriter = FileWriter(self._url.filename, int(filesize)) else: # File already exists - let the client know it messed up raise ProtocolError("File already exists, upload denied", "Not Permitted: -1") return self.ST_FILESEGM # Handle the ST_FILESEGM state def _state_filesegm(self, opcode, data): if opcode != self.OP_TEXT: raise ProtocolError("Received non-text frame while waiting " "for segment metadata", "Invalid: -1") state = self.ST_FILESEGM if data.startswith(b"File Finish"): # The sender indicated that the file is done state = self.ST_FINISH elif data.startswith(b"Segment Start:"): #TODO: Parse out, verify, the segment start and finish data here # A file data segment will follow state = self.ST_FILESEGD else: raise ProtocolError("Received invalid segment metadata", "Invalid: -1") return state # Handle the ST_FILESEGD state def _state_filesegd(self, opcode, data): if opcode != self.OP_BIN and opcode != self.OP_TEXT: raise ProtocolError("Invalid opcode while waiting for file data", "Invalid: -1") if opcode == self.OP_TEXT: # In this case, the data is base64 encoded. Decode it data = base64.b64decode(bytes(data)) self._filewriter.append(data) if self._filewriter.test_size() > 0: raise ProtocolError("Received data after file met specified size", "Segment Error: -1") return self.ST_FILESEGM # Send a message over the websocket connection def _send_msg(self, data, opcode=None): if opcode == None: opcode = self.OP_TEXT # Start with a final message, blank opcode, no masking, 0 data length frame = bytearray(b"\x80\x00") # Place the opcode frame[0] = frame[0] | opcode # Place the size of the data segment if data is None: pass elif len(data) < 126: frame[1] = frame[1] | len(data) elif len(data) < 65535: frame[1] = frame[1] | 126 packed = struct.pack(">I", len(data)) frame.extend(packed) else: frame[1] = frame[1] | 127 packed = struct.pack(">Q", len(data)) frame.extend(packed) # Append the data if data is not None: frame.extend(data) # Send the frame self._conn.sendall(frame) # Receive a chunk of data into the buffer. Get at least "amount" of data def _recv_data(self, amount, maxtimeouts=2, maxtries=1000): tocount = 0 tries = 0 while (len(self._curbuf) < amount and tocount < maxtimeouts and tries < maxtries): tries += 1 try: self._curbuf.extend(bytearray(self._conn.recv(1024*256))) except socket.error: tocount += 1 if len(self._curbuf) < amount: raise ProtocolError("Socket timeout waiting for frame header", "Timeout: -1") # Return a byte array representing one single websocket frame def _get_frame(self): # TODO: handle fin set case - namely, if it's not set # Receive the header self._recv_data(2) headerlen = self._get_headerlen(self._curbuf) self._recv_data(headerlen) datalen = self._get_datalen(self._curbuf) framesize = headerlen + datalen # Receive the entire frame self._recv_data(framesize) frame = self._curbuf[0:framesize] del self._curbuf[0:framesize] opcode = self._get_opcode(frame) data = frame[headerlen:] # If there's a mask bit, unmask the frame if self._test_mask(frame): maskpos = headerlen - 4 mask = frame[maskpos:(maskpos+4)] for i, byte in enumerate(data): data[i] = byte ^ mask[i%4] else: raise ProtocolError("Mask bit was not set", "Invalid: -1") return opcode, data # Return true if the mask bit is set def _test_mask(self, data): if len(data) >= 2 and data[1] & 0x80: return True return False # Return true if the fin bit is set def _test_fin(self, data): if len(data) >= 1 and data[0] & 0x80: return True return False # Return true if the specified rsv bit is set def _test_rsv(self, data, index=1): if (len(data) >= 1 and index > 1 and index < 3 and data[0] & (0x80 >> index)): return True return False # Return the value in the length field def _get_length_field(self, data): if len(data) < 2: raise ProtocolError("Invalid header", "Invalid: -1") # Remove the mask bit... return int(data[1] & 0x7f) # Determine the frame's opcode def _get_opcode(self, data): if len(data) < 1: raise StateError("Not enough data to get opcode") val = data[0] & 0x0f # Remove the fin and rsv bits opcode = self.OP_OTH if val == self.OP_CONT: opcode = self.OP_CONT elif val == self.OP_TEXT: opcode = self.OP_TEXT elif val == self.OP_BIN: opcode = self.OP_BIN elif val == self.OP_CLOS: opcode = self.OP_CLOS elif val == self.OP_PING: opcode = self.OP_PING elif val == self.OP_PONG: opcode = self.OP_PONG else: opcode = self.OP_OTH return opcode # Determine the frame's length based on the header def _get_datalen(self, data): length = self._get_length_field(data) # The length field has magic values 126 and 127. Handle them if length == 126: if len(data) < 4: raise ProtocolError("Invalid header", "Invalid: -1") # Unpack bytes 2 and 3 into an integer. Unpack returns a tuple length, = struct.unpack(">H", bytes(data[2:4])) elif length == 127: if len(data) < 10: raise ProtocolError("Invalid header", "Invalid: -1") # Unpack bytes 2 thru 9 into an integer. Unpack returns a tuple length, = struct.unpack(">Q", bytes(data[2:10])) return length # Determine the header's length based on the header def _get_headerlen(self, data): length = self._get_length_field(data) headerlen = 2 # The length field has magic values 126 and 127. Handle them if length == 126: headerlen = 4 # Length field is 3 bytes elif length == 127: headerlen = 10 # Length field is 9 bytes if self._test_mask(data): headerlen += 4 # Compensate for mask size return headerlen
def gen_package_name(fp): """ 生成包名 :param fp: """ FileWriter.writeline_with_endl(fp, CodeTemplate.java_template.get("package_name"))