def map(cls, src_path, drive_letter=None, force=False): # Not remote path, do not need map if not is_remote_path(src_path): logger.warn(src_path + " is not a remote path, do not need to map.") return NOT_REMOTE_PATH_ERROR # Disable the logger if choose to force mapping if force: logger.disable() # Use the available drive letter if drive_letter is None: if cls.has_been_mapped(): drive_letter = cls.__cur_drive_letter else: drive_letter = cls._obtain_available_letter() if drive_letter is None: raise CustomError("No available drive letter!", NO_AVAILABLE_DRIVE_LETTER) # Try to map the drive if it has not been mapped by me result = cls.try_to_map( src_path, drive_letter) if not cls.has_been_mapped() else MAP_DRIVE_FAILURE # If failed, try to cancel the current drive mapping if result != SUCCESS: confirm = True if not force: confirm = call_user_confirm( "Mapping drive " + drive_letter + " failed, do you wanna try to force cancelling it?") if confirm: result = cls.try_to_cancel_mapping(drive_letter) else: return MAP_DRIVE_FAILURE # If succeed to cancel the mapping, try to map the drive again if result == SUCCESS: result = cls.try_to_map(src_path, drive_letter) # If still failed, raise an error if result != SUCCESS: raise CustomError("Mapping drive " + drive_letter + " failed!", MAP_DRIVE_FAILURE) # Enable the logger and return success if force: logger.enable() return SUCCESS
def parse_config(self): if not os.path.isfile(self.cfg_file_path): raise CustomError('%s is not a valid config file.' % self.cfg_file_path, CustomError.ERROR_PATH_NOT_FOUND) try: f = open(self.cfg_file_path) self.cfg_info = json.load(f) f.close() except: raise CustomError('Parse %s failed.' % self.cfg_file_path, CustomError.ERROR_PARSE_FILE) for k in CocosLibsCompiler.CHECK_KEYS: if k not in self.cfg_info.keys(): raise CustomError('%s is not found in %s' % (k, self.cfg_file_path), CustomError.ERROR_WRONG_CONFIG)
def do_compile(self): if self.platform == 'all': self.compile_all() return if utils_cocos.os_is_mac(): support_platforms = SimulatorCompiler.SUPPORT_PLATFORMS['mac'] elif utils_cocos.os_is_win32(): support_platforms = SimulatorCompiler.SUPPORT_PLATFORMS['win'] else: support_platforms = SimulatorCompiler.SUPPORT_PLATFORMS['other'] if self.platform not in support_platforms: raise CustomError( '%s is not support in current system.' % self.platform, CustomError.ERROR_WRONG_ARGS) if self.platform == 'win32': self.compile_for_win32() elif self.platform == 'android': self.compile_for_android() elif self.platform == 'ios': self.compile_for_ios() elif self.platform == 'mac': self.compile_for_osx()
def _queryRouteList(self, categoryType=None): timestamp = str(int(time.time())) signParam = 'nonceStr=' + self._nonceStr + '×tamp=' + timestamp + '&key=' + self._key signValue = hashlib.md5(signParam.encode('utf-8')).hexdigest().upper() data = { 'nonceStr': self._nonceStr, 'timestamp': timestamp, 'sign': signValue, } try: response = requests.post(url=self._url_demo + self._url_queryRouteList, data=data) if response.status_code == 200: return response.text else: print('请求失败', response.status_code) print(response.text) raise CustomError( sys._getframe().f_code.co_name + 'code {}, {}'.format(response.status_code, response.text)) return None except Exception as e: print('请求错误') raise e return e.args
def get_user_entrust_from_cache(market_name): market_id = get_market_id_by_name(market_name) if not market_id: raise CustomError(market_name + '市场不存在') params = {'marketId': market_id} status, result = signed_request_get( config_params.EXCHANGE_HOST + config_params.API_GET_USER_ENTRUST_FROM_CACHE, **params) return status, result
def get_entrust_by_id(market_name, entrustId): market_id = get_market_id_by_name(market_name) if not market_id: raise CustomError(market_name + '市场不存在') params = {'marketId': market_id, 'entrustId': entrustId} status, result = signed_request_get( config_params.EXCHANGE_HOST + config_params.API_USER_ENTRUST_BY_ID, **params) return status, result
def cancle_entrust(market_name, entrustId): market_id = get_market_id_by_name(market_name) if not market_id: raise CustomError(market_name + '市场不存在') params = {'marketId': market_id, 'entrustId': entrustId} status, result = signed_request_post( config_params.EXCHANGE_HOST + config_params.API_CANCEL_ENTRUST, **params) return status, result
def __init_currency_cache(): status, result = get_currency_list() # 根据返回结果解释出market列表 if status: data = result['datas'] for currency in data: name = currency['name'].upper() __currency_list[name] = currency else: raise CustomError('初始化币种列表缓存失败,无法获取市场列表数据' + result)
def _queryRouteStationTime(self, routeSeq=None, routeCode=None, type=0): timestamp = str(int(time.time())) signParam = 'nonceStr=' + self._nonceStr + '×tamp=' + timestamp + '&key=' + self._key signValue = hashlib.md5(signParam.encode('utf-8')).hexdigest().upper() if routeSeq and routeCode: data = { 'nonceStr': self._nonceStr, 'timestamp': timestamp, 'sign': signValue, 'routeSeq': str(routeSeq), 'routeCode': routeCode, 'type': type } elif routeSeq and not routeCode: data = { 'nonceStr': self._nonceStr, 'timestamp': timestamp, 'sign': signValue, 'routeSeq': str(routeSeq), 'type': type } elif routeCode and not routeSeq: data = { 'nonceStr': self._nonceStr, 'timestamp': timestamp, 'sign': signValue, 'routeCode': routeCode, 'type': type } else: data = { 'nonceStr': self._nonceStr, 'timestamp': timestamp, 'sign': signValue, # 'type': type } try: response = requests.post(url=self._url_demo + self._url_queryRouteStationTime, data=data) if response.status_code == 200: return response.text else: print('请求失败', response.status_code) print(response.text) raise CustomError( sys._getframe().f_code.co_name + 'code {}, {}'.format(response.status_code, response.text)) return None except Exception as e: print('请求错误') raise e return e.args
def __init_market_cache(): # global __market_list status, result = get_market_list() # 根据返回结果解释出market列表 if status: data = result['datas'] for mk in data: name = mk['name'].upper() __market_list[name] = mk else: raise CustomError('初始化市场列表缓存失败,无法获取市场列表数据' + result)
def get_withdraw_address(currency_name, paegNum, pageSize): currency_id = get_currency_id_by_name(currency_name) if not currency_id: raise CustomError(currency_name + '币种不存在') params = { 'currencyId': currency_id, 'paegNum': paegNum, 'pageSize': pageSize } status, result = signed_request_get( config_params.EXCHANGE_HOST + config_params.API_FUND_WITHDRAW_ADDRESS, **params) return status, result
def get_user_entrust_from_cache_with_page(market_name, page_index, page_size): market_id = get_market_id_by_name(market_name) if not market_id: raise CustomError(market_name + '市场不存在') params = { 'marketId': market_id, 'pageIndex': page_index, 'pageSize': page_size } status, result = signed_request_get( config_params.EXCHANGE_HOST + config_params.API_GET_USER_ENTRUST_FROM_CACHE_WITH_PAGE, **params) return status, result
def save2JSON(self, folder_name, document): assert type(folder_name) is str if type(document) == list or type(document) == dict: documentJSON = json.dumps(document, ensure_ascii=False, indent=4) documentJSONBYTES = documentJSON.encode('utf-8') try: with open( 'data/' + folder_name + '/' + folder_name + str(date.today()) + '.json', 'wb') as f: f.write(documentJSONBYTES) except Exception as e: print('保存', folder_name, '失败') raise CustomError('保存', folder_name, '失败')
def get_user_entrust_list(market_name, pageIndex, pageSize): market_id = get_market_id_by_name(market_name) if not market_id: raise CustomError(market_name + '市场不存在') params = { 'marketId': market_id, 'pageIndex': pageIndex, 'pageSize': pageSize } status, result = signed_request_get( config_params.EXCHANGE_HOST + config_params.API_GET_USER_ENTRUST_LIST, **params) return status, result
def _queryBusSchedule(self, routeSeq=None, scheduleDate=str(date.today())): timestamp = str(int(time.time())) if not routeSeq: # routeSeq为空,则代表查询所有的排班 signParam = 'nonceStr=' + self._nonceStr + '&scheduleDate=' + str( scheduleDate) + '×tamp=' + timestamp + '&key=' + key else: # routeSeq不为空,查询某一班车的排班 signParam = 'nonceStr=' + self._nonceStr + '&routeSeq=' + str( routeSeq) + '&scheduleDate=' + str( scheduleDate) + '×tamp=' + timestamp + '&key=' + key signValue = hashlib.md5(signParam.encode('utf-8')).hexdigest().upper() if not routeSeq: data = { 'nonceStr': self._nonceStr, 'timestamp': timestamp, 'sign': signValue, # 'routeSeq': routeSeq, 'scheduleDate': scheduleDate } else: data = { 'nonceStr': self._nonceStr, 'timestamp': timestamp, 'sign': signValue, 'routeSeq': routeSeq, 'scheduleDate': scheduleDate } try: # print(self._url_demo+self._url_queryBusSchedule) response = requests.post(url=self._url_demo + self._url_queryBusSchedule, data=data) if response.status_code == 200: return response.text else: print('请求失败', response.status_code) print(response.text) raise CustomError( sys._getframe().f_code.co_name + 'code {}, {}'.format(response.status_code, response.text)) return None except Exception as e: print('请求错误') raise e return e.args
def get_payout_coin_record(currency_name, tal, page_num, page_size): currency_id = get_currency_id_by_name(currency_name) if not currency_id: raise CustomError(currency_name + '币种不存在') params = { 'currencyId': currency_id, 'tal': tal, 'paegNum': page_num, 'pageSize': page_size } status, result = signed_request_get( config_params.EXCHANGE_HOST + config_params.API_PAYOUT_CION_RECORD, **params) return status, result
def add_entrust(market_name, amount, price, rangeType, type): market_id = get_market_id_by_name(market_name) if not market_id: raise CustomError(market_name + '市场不存在') params = { 'marketId': market_id, 'amount': amount, 'price': price, 'rangeType': rangeType, 'type': type } status, result = signed_request_post( config_params.EXCHANGE_HOST + config_params.API_ADD_ENTRUST, **params) return status, result
def test_websocket_data(self): print('\n' + 'test_websocket_data:') market_id = get_market_id_by_name(TestApiFunc.test_market_name) if not market_id: raise CustomError('市场不存在') ws = WsCustomClient( config_params.KLINE_WS_HOST + config_params.WEBSOCKET_PATH, market_id, TestApiFunc.test_market_name) ws.connect() time.sleep(5) # 这里提供最简单的websocket保持连接实现,建议用户根据项目实际需要自行优化 # while True: i = 1 while i < 10: if not ws.is_ws_connect: print('连接已断开,重新连接') ws.connect() time.sleep(1) i = i + 1 ws.close(reason='test')
def get_user_entrust_list(market_name, page_index, page_size, type_param, status_param, start_date_time, end_date_time): market_id = get_market_id_by_name(market_name) if not market_id: raise CustomError(market_name + '市场不存在') params = { 'marketId': market_id, 'pageIndex': page_index, 'pageSize': page_size } if type_param is not None: params['type'] = type_param if status_param is not None: params['status'] = status_param if start_date_time is not None: params['startDateTime'] = start_date_time if end_date_time is not None: params['endDateTime'] = end_date_time status, result = signed_request_get( config_params.EXCHANGE_HOST + config_params.API_GET_USER_ENTRUST_LIST, **params) return status, result
def _getRouteCarDynamic(self, busline=None, vehicleNo=None): # busline: 0上行,1下行 # vehicleNo: 车牌号 timestamp = str(int(time.time())) signParam = 'nonceStr=' + self._nonceStr + '×tamp=' + timestamp + '&key=' + key signValue = hashlib.md5(signParam.encode('utf-8')).hexdigest().upper() if vehicleNo: data = { 'nonceStr': self._nonceStr, 'timestamp': timestamp, 'sign': signValue, 'vehicleNo': vehicleNo } else: data = { 'nonceStr': self._nonceStr, 'timestamp': timestamp, 'sign': signValue, } try: response = requests.post(self._url_demo + self._url_getRouteCarDynamic, data=data) if response.status_code == 200: # print(response.text) return response.text else: print('获取失败: ', response.status_code) print(response.text) raise CustomError( sys._getframe().f_code.co_name + 'code {}, {}'.format(response.status_code, response.text)) return None except Exception as e: print('请求出错:', e.args) raise e
def extract(line, src_path, dst_path): # The file quantity we've extracted file_count = 0 # File path line split_array = line.split(LINE_SEP) split_array_length = len(split_array) if split_array_length <= 0: raise CustomError("The format is wrong, please check!", WRONG_FORMAT_LINE) first_outer_file_matcher = split_array[0] # Match the former part matched_outer_file_list = glob( concat_path(src_path, first_outer_file_matcher)) # Not found any matched file if len(matched_outer_file_list) <= 0: raise CustomError( "Have not found any file matched pattern '" + first_outer_file_matcher + "'", NOT_FOUND_MATCHED_PATTERN) # Found more than 1 file, check if it is a multi-volume archive elif len(matched_outer_file_list) > 1: part_file = True for cur_matched_file in matched_outer_file_list: if not is_full_matched( ALL_MULTI_VOLUME_COMPRESSED_FILE_NAME_PATTERN, cur_matched_file): part_file = False break if not part_file: err_message = "Ambiguous matched pattern: '" + first_outer_file_matcher + "'\n" err_message += "Matched files: \n" for each_file in matched_outer_file_list: err_message += each_file + "\n" raise CustomError(err_message, AMBIGUOUS_MATCHED_PATTERN) # If split array's length is 1, just copy it rather than extract if split_array_length == 1: for cur_matched_file in matched_outer_file_list: cur_file_name = get_name_part(cur_matched_file) cur_file_dst_path = concat_path(dst_path, cur_file_name) # If it's okay to move, then do it and add the file count okay_to_copy = Extractor._check_the_path_okay_to_move( dst_path, cur_file_name) if okay_to_copy: copy_file(cur_matched_file, cur_file_dst_path) file_count += 1 return file_count # Sort the list and choose the first one if len(matched_outer_file_list) > 1: matched_outer_file_list.sort() cur_matched_outer_file = format_path(matched_outer_file_list[0]) # Use system temporary path if necessary cur_temp_folder = None cur_extracting_dst_path = "" if split_array_length >= 3: cur_temp_folder = tempfile.TemporaryDirectory() cur_extracting_dst_path = cur_temp_folder.name # Check if each compressed file is supported extension first for c in range(split_array_length - 1): if not is_full_matched(ALL_SUPPORTED_FILE_PATTERN, split_array[c]): raise CustomError(split_array[c] + " is not a supported file!", NOT_SUPPORTED_FILE) # If supported, try to processing this line else: logger.info("Processing '" + line + "' ... ") extracted_file_list = [] for i in range(split_array_length - 1): cur_inner_file_matcher = split_array[i + 1] # Last i does not use temporary path if i == split_array_length - 2: cur_extracting_dst_path = dst_path # Do extracting extracted_file_list = Extractor._extract_files( cur_matched_outer_file, cur_inner_file_matcher, cur_extracting_dst_path) # If return None, error have occurred and shown, break if extracted_file_list is None: break # If it has finished, continue the loop so that it can run into the "else" branch elif i == split_array_length - 2: continue # If it has not finished and is not ambiguous, continue the loop elif i != split_array_length - 2 and len( extracted_file_list) == 1: cur_matched_outer_file = extracted_file_list[0] continue # If the list has only one file, it seems okay, continue the loop elif len(extracted_file_list) == 1: continue # Ambiguous, raise an error else: err_message = "Ambiguous matched pattern: '" + first_outer_file_matcher + "'\n" err_message += "Matched files: \n" for each_file in matched_outer_file_list: err_message += each_file + "\n" raise CustomError(err_message, AMBIGUOUS_MATCHED_PATTERN) else: this_line_file_count = len(extracted_file_list) file_count += this_line_file_count logger.info("Successfully extracted " + format_plural(this_line_file_count, "file") + ".") # Finally, delete the temporary folder if cur_temp_folder is not None: cur_temp_folder.cleanup() tempfile.tempdir = None return file_count
def compile_win(self): if not utils_cocos.os_is_win32(): print("this is not win platform, needn't compile") return # get the VS versions will be used for compiling support_vs_versions = self.cfg_info[ CocosLibsCompiler.KEY_SUPPORT_VS_VERSIONS] compile_vs_versions = support_vs_versions if self.vs_version is not None: if self.vs_version not in support_vs_versions: raise CustomError('Not support VS%d' % self.vs_version) else: compile_vs_versions = [self.vs_version] vs_cmd_info = {} for vs_version in compile_vs_versions: # get the vs command with specified version vs_command = utils_cocos.get_vs_cmd_path(vs_version) if vs_command is None: Logging.warning('Not found VS%d' % vs_version) else: vs_cmd_info[vs_version] = vs_command if len(vs_cmd_info) == 0: raise CustomError('Not found available VS.', CustomError.ERROR_TOOLS_NOT_FOUND) cocos2d_proj_file = os.path.join(self.repo_x, 'cocos/2d/libcocos2d.vcxproj') # get the VS projects info win32_proj_info = self.cfg_info[CocosLibsCompiler.KEY_VS_PROJS_INFO] for vs_version in compile_vs_versions: if not vs_version in vs_cmd_info.keys(): continue # rename the cocos2d project out dll name f = open(cocos2d_proj_file, 'r') old_file_content = f.read() f.close() new_file_content = old_file_content.replace( '$(OutDir)$(ProjectName).dll', '$(OutDir)$(ProjectName)_%d.dll' % vs_version) f = open(cocos2d_proj_file, 'w') f.write(new_file_content) f.close() try: vs_command = vs_cmd_info[vs_version] for key in win32_proj_info.keys(): # clean solutions proj_path = os.path.join(self.repo_x, key) clean_cmd = " ".join([ "\"%s\"" % vs_command, "\"%s\"" % proj_path, "/clean \"Release|Win32\"" ]) utils_cocos.execute_command(clean_cmd) for key in win32_proj_info.keys(): output_dir = os.path.join(self.lib_dir, "win32") proj_path = os.path.join(self.repo_x, key) # get the build folder & win32 output folder build_folder_path = os.path.join( os.path.dirname(proj_path), "Release.win32") win32_output_dir = os.path.join(self.repo_x, output_dir) if not os.path.exists(win32_output_dir): os.makedirs(win32_output_dir) # build project if self.use_incredibuild: # use incredibuild, build whole sln build_cmd = " ".join([ "BuildConsole", "%s" % proj_path, "/build", "/cfg=\"Release|Win32\"" ]) utils_cocos.execute_command(build_cmd) else: for proj_name in win32_proj_info[key][ CocosLibsCompiler.KEY_VS_BUILD_TARGETS]: # build the projects self.build_win32_proj(vs_command, proj_path, proj_name, "build") lib_file_path = os.path.join( build_folder_path, "%s.lib" % proj_name) if not os.path.exists(lib_file_path): # if the lib is not generated, rebuild the project self.build_win32_proj(vs_command, proj_path, proj_name, "rebuild") if not os.path.exists(lib_file_path): raise Exception( "Library %s not generated as expected!" % lib_file_path) # copy the libs into prebuilt dir for file_name in os.listdir(build_folder_path): name, ext = os.path.splitext(file_name) if ext != ".lib" and ext != ".dll": continue file_path = os.path.join(build_folder_path, file_name) shutil.copy(file_path, win32_output_dir) # rename the specified libs suffix = "_%d" % vs_version for proj_name in win32_proj_info[key][ CocosLibsCompiler.KEY_VS_RENAME_TARGETS]: src_name = os.path.join(win32_output_dir, "%s.lib" % proj_name) dst_name = os.path.join( win32_output_dir, "%s%s.lib" % (proj_name, suffix)) if os.path.exists(src_name): if os.path.exists(dst_name): os.remove(dst_name) os.rename(src_name, dst_name) except Exception as e: raise e finally: f = open(cocos2d_proj_file, 'w') f.write(old_file_content) f.close() print("Win32 build succeeded.")
def _extract_files(outer_file, inner_file_matcher, dst_path): # Load the compressed file and get the inner file list if is_full_matched(ZIP_FILE_NAME_PATTERN, outer_file): compressed_file = zipfile.ZipFile(outer_file) inner_file_list = compressed_file.namelist() elif is_full_matched(TAR_FILE_NAME_PATTERN, outer_file): compressed_file = tarfile.open(outer_file) inner_file_list = compressed_file.getnames() elif is_full_matched(RAR_FILE_NAME_PATTERN, outer_file): compressed_file = rarfile.RarFile(outer_file) inner_file_list = compressed_file.namelist() else: raise CustomError("Inner error!", INNER_ERROR) # Get the inner file matcher's parent folder inner_file_matcher_parent_path = format_path( concat_path(dst_path, get_parent_path(inner_file_matcher))) # Format the inner file matcher to standard regex expression formatted_inner_file_pattern = format_path(inner_file_matcher, to_absolute=False, to_pattern=True) # Delete the duplicate files and sort it reversely distinct_inner_file_list = sorted(list(set(inner_file_list)), reverse=True) # Collect the files we've extracted and return it to user extracted_file_list = [] # Traverse the compressed file and find the matched inner file for cur_inner_file_name in distinct_inner_file_list: formatted_cur_inner_file_name = format_path(cur_inner_file_name, to_absolute=False) if not is_full_matched(formatted_inner_file_pattern, formatted_cur_inner_file_name): continue try: # Extract this file first Extractor._do_extract(compressed_file, formatted_cur_inner_file_name, dst_path) # Check if this file has been extracted successfully cur_file_origin_path = concat_path( dst_path, formatted_cur_inner_file_name) if not os.path.exists(cur_file_origin_path): raise CustomError( "Extract " + formatted_cur_inner_file_name + " failed!", EXTRACTION_FAILURE) cur_file_parent_path = get_parent_path(cur_file_origin_path) cur_file_name = get_name_part(cur_file_origin_path) cur_file_real_dst_path = concat_path(dst_path, cur_file_name) # Do not move temporary file if is_temporary_path(cur_file_origin_path): pass # Do not move the inner un-direct sub file elif not is_same_path(cur_file_parent_path, inner_file_matcher_parent_path): pass # Do not move the file which is placed correctly elif is_same_path(cur_file_parent_path, dst_path): pass else: # Check if it's okay to move first if not Extractor._check_the_path_okay_to_move( dst_path, cur_file_name): continue # Now move the file and remove the parent folder move_file(cur_file_origin_path, dst_path) remove_file_or_folder( get_direct_sub_folder(dst_path, cur_file_origin_path)) # Finally, add this file to the list extracted_file_list.append(cur_file_real_dst_path) except Exception as e: logger.exception(e) break else: if len(extracted_file_list) == 0: raise CustomError( "No file matched '" + inner_file_matcher + "', nothing extracted!", NO_FILE_MATCHED_FILE_ERROR) return extracted_file_list return None