def f5_devicelist(request, format=None): devicelist_file = USER_DATABASES_DIR + "devicelist.txt" # get method if request.method == 'GET': try: f = open(devicelist_file, "r") string_content = f.readlines() f.close() # converter string to dict stream = BytesIO(string_content[0]) data_from_databasefile = JSONParser().parse(stream) # return return Response(data_from_databasefile) except: message = ["device list database is not existed!"] return Response(message, status=status.HTTP_400_BAD_REQUEST) elif request.method == 'POST': try: _input_ = JSONParser().parse(request) if re.match(ENCAP_PASSWORD, str(_input_[0]['auth_key'])): # log message f = open(LOG_FILE, "a") _date_ = os.popen("date").read().strip() log_msg = _date_ + " from : " + request.META[ 'REMOTE_ADDR'] + " , method POST request to run f5_devicelist function!\n" f.write(log_msg) f.close() # read database file to lookup ip f = open(devicelist_file, "r") string_content = f.readlines() f.close() # get from database file stream = BytesIO(string_content[0].strip()) data_from_databasefile = JSONParser().parse(stream) # init database file # f = open(devicelist_file,"w") # f.close() # update database file message = [] for _param_ in data_from_databasefile: # ip information if not re.match("[0-9]+.[0-9]+.[0-9]+.[0-9]+", str(_param_['ip']).strip()): message = [ "device list database is not normal, please check the device database file!" ] return Response(message, status=status.HTTP_400_BAD_REQUEST) curl_command = "curl -sk -u " + USER_NAME + ":" + USER_PASSWORD + " https://" + str( _param_['ip'] ).strip( ) + "/mgmt/tm/cm/device -H 'Content-Type: application/json'" _response_ = os.popen(curl_command).read().strip() stream = BytesIO(_response_) data_from_response = JSONParser().parse(stream) _result_dict_ = {} for _dictData_ in data_from_response[u'items']: if re.match(str(_dictData_[u'managementIp']), str(_param_['mgmtip']).strip()): #if re.match(str(_dictData_[u'managementIp']),str(_param_['ip']).strip()): _result_dict_["failover"] = str( _dictData_[u'failoverState']) _result_dict_["clustername"] = str( _dictData_[u'name']) _result_dict_["devicehostname"] = str( _dictData_[u'hostname']) #_result_dict_["ip"] = str(_dictData_[u'managementIp']) _result_dict_["ip"] = str(_param_['ip']).strip() _result_dict_["mgmtip"] = str( _dictData_[u'managementIp']) # interface information curl_command = "curl -sk -u " + USER_NAME + ":" + USER_PASSWORD + " https://" + _result_dict_[ "ip"].strip( ) + "/mgmt/tm/net/interface -H 'Content-Type: application/json'" raw_data = os.popen(curl_command).read().strip() stream = BytesIO(raw_data) data_forinterface_from_request = JSONParser().parse(stream) cache_interface = {} for _target_ in data_forinterface_from_request[u'items']: _interfacename_ = _target_[u"name"] if (u"enabled" in _target_.keys()) or ("enabled" in _target_.keys()): if _target_[u"enabled"] or re.search( "true", str(_target_[u"enabled"]).strip(), re.I): if _interfacename_ not in cache_interface.keys( ): cache_interface[_interfacename_] = {} cache_interface[_interfacename_][ u"name"] = _interfacename_ cache_interface[_interfacename_][ u"portstatus"] = "enable" cache_interface[_interfacename_][ u"macAddress"] = _target_[ u"macAddress"] cache_interface[_interfacename_][ u"mediaMax"] = _target_[u"mediaMax"] # trunk information curl_command = "curl -sk -u " + USER_NAME + ":" + USER_PASSWORD + " https://" + _result_dict_[ "ip"].strip( ) + "/mgmt/tm/net/trunk -H 'Content-Type: application/json'" raw_data = os.popen(curl_command).read().strip() stream = BytesIO(raw_data) data_fortrunk_from_request = JSONParser().parse(stream) cache_trunk = {} for _target_ in data_fortrunk_from_request[u'items']: _trunkname_ = _target_[u"name"] temp_listbox = [] for _interfacename_ in _target_[u"interfaces"]: if _interfacename_ in cache_interface.keys(): temp_listbox.append( cache_interface[_interfacename_]) if len(temp_listbox): cache_trunk[_trunkname_] = {} cache_trunk[_trunkname_][u"name"] = _trunkname_ cache_trunk[_trunkname_][u"macAddress"] = _target_[ u"macAddress"] cache_trunk[_trunkname_][u"lacp"] = _target_[ u"lacp"] cache_trunk[_trunkname_][u"lacpMode"] = _target_[ u"lacpMode"] cache_trunk[_trunkname_][ u"interfaces"] = temp_listbox # vlan information curl_command = "curl -sk -u " + USER_NAME + ":" + USER_PASSWORD + " https://" + _result_dict_[ "ip"].strip( ) + "/mgmt/tm/net/vlan -H 'Content-Type: application/json'" raw_data = os.popen(curl_command).read().strip() stream = BytesIO(raw_data) data_forvlan_from_request = JSONParser().parse(stream) cache_vlan = {} for _target_ in data_forvlan_from_request[u'items']: _vlanname_ = _target_[u"name"] curl_command = "curl -sk -u " + USER_NAME + ":" + USER_PASSWORD + " https://" + _result_dict_[ "ip"].strip() + "/mgmt/tm/net/vlan/~Common~" + str( _vlanname_ ) + "/interfaces -H 'Content-Type: application/json'" raw_data = os.popen(curl_command).read().strip() stream = BytesIO(raw_data) vlaninterface_data = JSONParser().parse(stream) temp_listbox = [] for _values_inner_dict_ in vlaninterface_data[ u'items']: if _values_inner_dict_[ u"name"] in cache_trunk.keys(): temp_listbox.append( cache_trunk[_values_inner_dict_[u"name"]]) if _values_inner_dict_[ u"name"] in cache_interface.keys(): temp_listbox.append(cache_interface[ _values_inner_dict_[u"name"]]) if len(temp_listbox): cache_vlan[_vlanname_] = {} cache_vlan[_vlanname_][u"name"] = _vlanname_ cache_vlan[_vlanname_][ u"interfaces"] = temp_listbox cache_vlan[_vlanname_][u"tag"] = _target_[u"tag"] # self information curl_command = "curl -sk -u " + USER_NAME + ":" + USER_PASSWORD + " https://" + _result_dict_[ "ip"].strip( ) + "/mgmt/tm/net/self -H 'Content-Type: application/json'" raw_data = os.popen(curl_command).read().strip() stream = BytesIO(raw_data) data_forself_from_request = JSONParser().parse(stream) temp_listbox = {} for _target_ in data_forself_from_request[u'items']: _vlanstring_ = str( str(_target_[u"vlan"]).strip().split("/")[-1]) if (unicode(_vlanstring_) in cache_vlan.keys()) or ( _vlanstring_ in cache_vlan.keys()): _ipvalues_ = _target_[u"address"] if _ipvalues_ not in temp_listbox.keys(): temp_listbox[_ipvalues_] = {} temp_listbox[_ipvalues_][u"floating"] = _target_[ u"floating"] temp_listbox[_ipvalues_][u"vlan"] = cache_vlan[ unicode(_vlanstring_)] _result_dict_["ipaddress"] = temp_listbox # cluster information curl_command = "curl -sk -u " + USER_NAME + ":" + USER_PASSWORD + " https://" + _result_dict_[ "ip"].strip( ) + "/mgmt/tm/cm/trust-domain -H 'Content-Type: application/json'" raw_data = os.popen(curl_command).read().strip() stream = BytesIO(raw_data) data_from_response = JSONParser().parse(stream) _temp_list_ = [] for _target_ in data_from_response[u'items']: for _caDevices_ in _target_[u'caDevices']: _Dname_ = _caDevices_.strip().split('/')[-1] if not re.match(str(_Dname_), str(_result_dict_["clustername"])): _temp_list_.append(str(_Dname_)) _result_dict_["haclustername"] = _temp_list_[-1] # find device-group curl_command = "curl -sk -u " + USER_NAME + ":" + USER_PASSWORD + " https://" + _result_dict_[ "ip"].strip( ) + "/mgmt/tm/cm/device-group -H 'Content-Type: application/json'" raw_data = os.popen(curl_command).read().strip() stream = BytesIO(raw_data) data_from_response = JSONParser().parse(stream) for _target_ in data_from_response[u'items']: if re.search(str(_target_[u'type']), str('sync-failover')): _result_dict_["devicegroupname"] = str( _target_[u'name']) # result add message.append(_result_dict_) # return and update database file f = open(devicelist_file, "w") f.write(json.dumps(message)) f.close() return Response(message) except: message = [{}] return Response(message, status=status.HTTP_400_BAD_REQUEST)
def test_put_file_error(self, mock_ftp): self.storage._start_connection() with self.assertRaises(ftp.FTPStorageException): self.storage._put_file('foo', File(BytesIO(b'foo'), 'foo'))
def get(self, request, format=None): """ Return menu of current user. """ self.request.session.set_expiry(30) menu_module = 'BACKEND' # Variable para indicar el entorno menu_parents = [] # Variable que contiene los menus menu_childrens = {} # Variable que contien los items del menu h_list = [] user = request.user # 1° obtener los permisos del usuario permission_list = [] # Variable que contien los permisos del usuario if request.user.is_superuser: permission_list = [] # si es uperuser, no se restringe else: try: # Obtener los Hierarchy a los que el usuario tiene acceso h_list = list( col["id"] for col in Hierarchy.objects.values("id").filter( Q(userhierarchygroup__user__id=user.id) | Q(userhierarchypermission__user__id=user.id)). distinct()) # h_list = set(h_x_g_list + h_x_p_list) # merge distinct # print ('h_list', h_list) group_list = list( col["id"] for col in Group.objects.values("id").filter( (Q(userhierarchygroup__user__id=user.id) & Q(userhierarchygroup__hierarchy__in=h_list)) # o para todos los Hierarchy | Q(user__id=user.id)).distinct()) # print ('group_list=', group_list) # Permisos del User a través de sus Group y # Permisos del User a través de sus Permission permission_list = list( col["id"] for col in Permission.objects.values("id").filter( Q(group__in=group_list) | (Q(userhierarchypermission__user__id=user.id) & Q(userhierarchypermission__hierarchy__in=h_list)) # o para todos los Hierarchy | Q(user__id=user.id)).distinct()) except Exception as e: print("Error", e) # pass print('permission_list=', permission_list) # 2° Obtener menu # obtengo los hijos y luego saco sus padres, esto es para no mostrar un # menu sin items menu_childrens_t = list(col["id"] for col in Menu.objects.values( "id").filter(Q(permission__in=permission_list) | Q(id__isnull=True if permission_list else False), module=menu_module, is_active=True).order_by("pos")) menu_parents = Menu.objects.filter( childrens__in=menu_childrens_t, module=menu_module, is_active=True).order_by("pos").distinct() menu_json = [] if menu_parents: for menu in menu_parents: menu_s = MenuInfoSerializer(menu) items = Menu.objects.filter( Q(permission__in=permission_list) | Q(id__isnull=True if permission_list else False), parent_id=menu.id, module=menu_module, is_active=True).order_by("pos") items_s = MenuInfoSerializer(items, many=True) content = JSONRenderer().render(items_s.data) stream = BytesIO(content) data = JSONParser().parse(stream) print("=======") print((data)) print("=======") menu_s.data['menu_items'].extend(data) ''' menu_json.append({ 'menu': menu_s.data }) ''' menu_json.append(menu_s.data) print('menu_json=', (menu_json)) content = { 'user': user.username, 'hierarchys': h_list, 'permissions': permission_list, 'menu_json': menu_json, } return Response({'menu': menu_json})
def test_save_error(self): photo = BytesIO() self.assertRaises(FileSaveError, self.storage.save, 'EMPTY_FILE', photo)
''' Serialize a queryset ''' qs = Status.objects.all() serializer2 = StatusSerializer(obj,many = True) serializer2.data json_data2 = JSONRenderer().render(serializer2.data) print(json_data2) stream2 = BytesIO(json_data2) data2 = JSONParser().parse(stream2) print(data2) """ create dat """ data = {'user':1} serializer = StatusSerializer(data=data) serializer.is_valid()
def list(self, request, project): """ GET method implementation for log slicer Receives a line range and job_id and returns those lines """ job_id = request.query_params.get("job_id") log_name = request.query_params.get("name") if log_name: log_names = [log_name] else: log_names = ["buildbot_text", "builds-4h"] format = 'json' if log_name == 'mozlog_json' else 'text' file = None start_line = request.query_params.get("start_line") end_line = request.query_params.get("end_line") if not start_line or not end_line: return Response( "``start_line`` and ``end_line`` parameters are both required", status=HTTP_400_BAD_REQUEST) try: start_line = abs(int(start_line)) end_line = abs(int(end_line)) except ValueError: return Response("parameters could not be converted to integers", status=HTTP_400_BAD_REQUEST) if start_line >= end_line: return Response("``end_line`` must be larger than ``start_line``", status=HTTP_400_BAD_REQUEST) try: job = Job.objects.get(repository__name=project, project_specific_id=job_id) except Job.DoesNotExist: return Response("Job does not exist", status=HTTP_404_NOT_FOUND) try: url = JobLog.objects.filter( job=job, name__in=log_names)[0:1].values_list('url', flat=True)[0] except JobLog.DoesNotExist: return Response("Job log does not exist", status=HTTP_404_NOT_FOUND) try: file = filesystem.get(url) if not file: r = make_request(url) try: file = gzip.GzipFile(fileobj=BytesIO(r.content)) # read 16 bytes, just to make sure the file is gzipped file.read(16) file.seek(0) filesystem.set(url, file.fileobj) except IOError: # file is not gzipped, but we should still store / read # it as such, to save space file = BytesIO(r.content) gz_file_content = BytesIO() with gzip.GzipFile('none', 'w', fileobj=gz_file_content) as gz: gz.write(r.content) filesystem.set(url, gz_file_content) else: file = gzip.GzipFile(fileobj=file) lines = [] for i, line in enumerate(file): if i < start_line: continue elif i >= end_line: break if format == 'json': lines.append({"data": json.loads(line), "index": i}) else: lines.append({"text": line, "index": i}) return Response(lines) finally: if file: file.close()
serializer = CommentSerializer(comment) #comment is an instance, print(serializer.data) #and a data also can be transfered into #the second parameter ############################################################# from rest_framework.renderers import JSONRenderer json = JSONRenderer().render(serializer.data) print(json) ############################################################# from django.utils.six import BytesIO from rest_framework.parsers import JSONParser stream = BytesIO(json) data = JSONParser().parse(stream) serializer = CommentSerializer(data=data) print(serializer.is_valid()) print(serializer.validated_data) ###################################### class CommentSerializer(serializers.Serializer): email = serializers.EmailField() content = serializers.CharField(max_length=200) created = serializers.DateTimeField() def create(self, validated_data): return Comment(**validated_data)
def mini_login(status, code, userInfo): appid = 'wx8c822d6f747d1e6a' secret = '2419917cec1c48a0e421a9e3513f754e' url = 'https://api.weixin.qq.com/sns/jscode2session?appid={0}&secret={1}&js_code={2}&grant_type=authorization_code'.format( appid, secret, code) res = requests.get(url) res = json.loads(res.text) unionid = res['unionid'] user = User.objects.filter(union_id=unionid).first() print('code', code) print('userInfo', userInfo) print('res', res) print('status', status) event = Event.objects.all().first() if status == 'firstLogin': if user: if user.name and user.hotel_name: return { 'url': 'https://pinkslash.metatype.cn/customer_profile/{0}/'. format(user.id) } else: return { 'url': 'https://pinkslash.metatype.cn/mini_customer/save_message/{0}/' .format(user.id) } else: head_img = userInfo['userInfo']['avatarUrl'] user = User.objects.create(union_id=unionid, head_img=head_img, event=event, status=0) print('user', user) qr = qrcode.make( 'http://pinkslash.metatype.cn/wechat_login/?status=sendcredits_{0}_{1}' .format(user.id, event.id)) buf = BytesIO() qr.save(buf) qr_data = buf.getvalue() buf.write(qr_data) qr_img = InMemoryUploadedFile(file=buf, field_name=None, name='food.png', content_type='image/png', size=len(qr_data), charset=None) _user = User.objects.get(id=user.id) _user.qrcode = qr_img _user.save() print('_user', _user) return { 'url': 'https://pinkslash.metatype.cn/mini_customer/save_message/{0}/' .format(user.id) } else: return { 'url': 'https://pinkslash.metatype.cn/customer_profile/{0}/'.format( user.id) }
def test_save(self, mock_sftp): self.storage._save('foo', File(BytesIO(b'foo'), 'foo')) self.assertTrue(mock_sftp.open.return_value.write.called)
def create(self): self.string_buffer = BytesIO() self._archive = tarfile.TarFile(fileobj=self.string_buffer, mode='w')
def create(self): self.string_buffer = BytesIO() self._archive = zipfile.ZipFile(self.string_buffer, mode='w')
def f5_virtualserverstats_by_rollback(request, virtualservername, rollback_interval, format=None): # get method if request.method == 'GET': try: matched_filename = str(virtualservername) matched_fullpath = USER_VAR_STATS + matched_filename + "*.virtual.stats" matched_filelist = glob.glob(matched_fullpath) all_stats_list = [] for _filename_ in matched_filelist: stats_inform_dict = {} parsed_filename = str(str(_filename_).strip().split("/")[-1]) stats_inform_dict[unicode(parsed_filename)] = {} id_count = int(0) f = open(_filename_, 'r') _contents_ = f.readlines() f.close() ## added 0909 to extract rollback parameter floatbox = float(0) sorting_dict_container = {} _copied_contents_ = copy.copy(_contents_) for _copied_contents_item_ in _copied_contents_: IOString = _copied_contents_item_.strip() stream = BytesIO(IOString) json_stream = JSONParser().parse(stream) if (matched_filename not in json_stream.keys()) or ( unicode(matched_filename) not in json_stream.keys()): return Response( "database is not matched with the server name!") floatbox = float(json_stream[unicode(matched_filename)] [u'updated_time']) sorting_dict_container[floatbox] = json_stream[unicode( matched_filename)] unicode_timevalue_list = sorting_dict_container.keys() unicode_timevalue_list.sort() last_time = unicode_timevalue_list[-1] # start end time value calculation before_time = int(rollback_interval) backtotime_interval = float( int(ROLLBAK_INTERVAL) * int(before_time)) if float(last_time) < float(backtotime_interval): predicted_past_time = float(last_time) else: predicted_past_time = float(last_time) - float( backtotime_interval) findabs_box = {} for _univalue_ in unicode_timevalue_list: abs_interval_value = abs( float(_univalue_) - float(predicted_past_time)) findabs_box[abs_interval_value] = _univalue_ findabs_box_keys = findabs_box.keys() findabs_box_keys.sort() matched_final_time = findabs_box[findabs_box_keys[0]] # matched_index = unicode_timevalue_list.index( matched_final_time) included_matched_index = matched_index + int(1) valid_timevalue = [] if matched_index <= STATS_VIEWER_COUNT: valid_timevalue = unicode_timevalue_list[: included_matched_index] else: start_index = int(matched_index - STATS_VIEWER_COUNT) valid_timevalue = unicode_timevalue_list[ start_index:included_matched_index] ordered_list = [] for _time_item_ in valid_timevalue: temp_dictbox = {} temp_dictbox[unicode(matched_filename)] = {} temp_dictbox[ unicode(matched_filename )] = sorting_dict_container[_time_item_] ordered_list.append(temp_dictbox) for datafrom_filestring in ordered_list: if len(datafrom_filestring.keys()) != 1 or not re.search( matched_filename, str(datafrom_filestring.keys())): continue dictkey_datafrom_filestring = datafrom_filestring.keys() items_keyname_dict = datafrom_filestring[ dictkey_datafrom_filestring[0]].keys() if ((u'updated_time') not in items_keyname_dict): continue float_id = float(datafrom_filestring[ dictkey_datafrom_filestring[0]][u'updated_time']) stats_inform_dict[unicode(parsed_filename)][float_id] = {} stats_inform_dict[unicode( parsed_filename)][float_id] = datafrom_filestring[ dictkey_datafrom_filestring[0]] all_stats_list.append(stats_inform_dict) ## if len(_contents_) <= (int(STATS_VIEWER_COUNT)*int(STATS_SAVEDDATA_MULTI)): #if len(_contents_) <= int(STATS_VIEWER_COUNT): # possible_contents = _contents_ #else: # # possible_numbering = int(len(_contents_) - (int(STATS_VIEWER_COUNT)*int(STATS_SAVEDDATA_MULTI))) # possible_numbering = int(len(_contents_) - int(STATS_VIEWER_COUNT)) # possible_contents = _contents_[possible_numbering:] #for _read_content_ in possible_contents: # stream = BytesIO(_read_content_) # datafrom_filestring = JSONParser().parse(stream) # if len(datafrom_filestring.keys()) != 1 or not re.search(matched_filename,str(datafrom_filestring.keys())): # continue # dictkey_datafrom_filestring = datafrom_filestring.keys() # items_keyname_dict = datafrom_filestring[dictkey_datafrom_filestring[0]].keys() # if ((u'updated_time') not in items_keyname_dict): # continue # float_id = float(datafrom_filestring[dictkey_datafrom_filestring[0]][u'updated_time']) # stats_inform_dict[unicode(parsed_filename)][float_id] = {} # stats_inform_dict[unicode(parsed_filename)][float_id] = datafrom_filestring[dictkey_datafrom_filestring[0]] #while True: # _read_content_ = f.readline().strip() # if not _read_content_: # break # stats_inform_dict[unicode(parsed_filename)][id_count] = {} # stream = BytesIO(_read_content_) # datafrom_filestring = JSONParser().parse(stream) # if len(datafrom_filestring.keys()) != 1: # continue # dictkey_datafrom_filestring = datafrom_filestring.keys() # stats_inform_dict[unicode(parsed_filename)][id_count] = datafrom_filestring[dictkey_datafrom_filestring[0]] # id_count = id_count + int(1) #f.close() #all_stats_list.append(stats_inform_dict) return Response(all_stats_list) except: return Response("stats data is not normal!") # get the result data and return message = _status_all_ return Response(message)
def get_stats_information(category_value,active_device_list): rank_dict_data = {} for _active_device_ in active_device_list: stats_filename = "*@*"+_active_device_+".virtual.stats" matched_filelist = glob.glob(USER_VAR_STATS+stats_filename) rank_dict_data[unicode(_active_device_)] = {} compare_container = {} for _mfilename_ in matched_filelist: f = open(_mfilename_,'r') _read_contents_ = f.readlines() f.close() _last_string_ = _read_contents_[-1].strip() stream = BytesIO(_last_string_) _dictdata_ = JSONParser().parse(stream) if len(_dictdata_.keys()) != 1: continue _keyname_ = _dictdata_.keys()[-1].strip() _interval_ = _dictdata_[_keyname_][unicode("interval")] _matched_inner_keyname_ = [] for _inner_keyname_ in _dictdata_[unicode(_keyname_)].keys(): _value_ = str(_inner_keyname_.lower()) if re.search(category_value,_value_): _matched_inner_keyname_.append(_inner_keyname_) if len(_matched_inner_keyname_) == 0: continue for _inner_keyname_ in _matched_inner_keyname_: if unicode(_inner_keyname_) not in compare_container.keys(): compare_container[unicode(_inner_keyname_)] = {} for _inner_keyname_ in _matched_inner_keyname_: _float_id_ = float(_dictdata_[unicode(_keyname_)][unicode(_inner_keyname_)]) if _float_id_ not in compare_container[unicode(_inner_keyname_)].keys(): compare_container[unicode(_inner_keyname_)][_float_id_] = [] _fname_ = str(_mfilename_.strip().split("/")[-1]).split("@")[0] compare_container[unicode(_inner_keyname_)][_float_id_].append(_fname_) else: _fname_ = str(_mfilename_.strip().split("/")[-1]) _fname_ = str(_mfilename_.strip().split("/")[-1]).split("@")[0] compare_container[unicode(_inner_keyname_)][_float_id_].append(_fname_) _container_keyname_ = compare_container.keys() sorted_container = {} for _keyname_ in _container_keyname_: rank_dict_data[unicode(_active_device_)][unicode(_keyname_)] = {} sorted_container = {} sorted_id = compare_container[_keyname_].keys() sorted_id.sort() total_count = int(len(sorted_id)) if total_count < STATS_TOP_COUNT: selected_values = sorted_id else: selected_values = sorted_id[int(total_count)-int(STATS_TOP_COUNT):] selected_values.reverse() rank = int(0) for _sorted_values_ in selected_values: sorted_container[rank] = {} sorted_container[rank][unicode(str("virtualservers"))] = compare_container[_keyname_][_sorted_values_] sorted_container[rank][unicode(str("value"))] = _sorted_values_ rank = rank + int(1) rank_dict_data[unicode(_active_device_)][unicode(_keyname_)] = sorted_container return rank_dict_data
def import_from_url(self, url): response = requests.get(url) # Force requests to evaluate as UTF-8 response.encoding = 'utf-8' csv_file = BytesIO(response.content) self.import_from_file(csv_file)
def process_image(self, image, image_format, save_kwargs, width, height): return BytesIO()
def test_save_in_subdir(self, mock_sftp): self.storage._save('bar/foo', File(BytesIO(b'foo'), 'foo')) self.assertEqual(mock_sftp.mkdir.call_args_list[0][0], ('bar', )) self.assertTrue(mock_sftp.open.return_value.write.called)
def juniper_cachingnat(request,format=None): JUNIPER_DEVICELIST_DBFILE = USER_DATABASES_DIR + "devicelist.txt" # get method if request.method == 'GET': try: return Response(viewer_information()) except: message = ["error, viewer has some issue!"] return Response(message, status=status.HTTP_400_BAD_REQUEST) elif request.method == 'POST': try: _input_ = JSONParser().parse(request) if re.match(ENCAP_PASSWORD,str(_input_[0]['auth_key'])): start_time = time.time() # log message #f = open(LOG_FILE,"a") #_date_ = os.popen("date").read().strip() #log_msg = _date_+" from : "+request.META['REMOTE_ADDR']+" , method POST request to run f5_devicelist function!\n" #f.write(log_msg) #f.close() # device file read CURL_command = "curl http://0.0.0.0:"+RUNSERVER_PORT+"/juniper/devicelist/" get_info = os.popen(CURL_command).read().strip() stream = BytesIO(get_info) data_from_CURL_command = JSONParser().parse(stream) ## policy database file comes from standby device! ## at this time, seconday should be used to match for working valid_access_ip = [] ip_device_dict = {} for dataDict_value in data_from_CURL_command: _keyname_ = dataDict_value.keys() if (u"failover" not in _keyname_) or ("failover" not in _keyname_): return Response("error, device list should be updated!", status=status.HTTP_400_BAD_REQUEST) else: searched_element = re.search(str("secondary"),str(dataDict_value[u"failover"]),re.I) if searched_element: _ipaddress_ = str(dataDict_value[u"apiaccessip"]) if _ipaddress_ not in valid_access_ip: ip_device_dict[_ipaddress_] = str(dataDict_value[u"devicehostname"]) valid_access_ip.append(_ipaddress_) _processor_list_ = [] for _accessip_ in valid_access_ip: _processor_ = Process(target = cachingnat_processing, args = (_accessip_, ip_device_dict[_accessip_],)) _processor_.start() _processor_list_.append(_processor_) for _processor_ in _processor_list_: _processor_.join() # delete file which name is cachenat_ finish_time = time.time() spentabs_time = abs(float(finish_time) - float(start_time)) for _dirctname_ in [USER_VAR_CHCHES]: for _filename_ in os.listdir(_dirctname_): filename_direct = str(_dirctname_.strip() + _filename_.strip()) if re.search("cachenat_", filename_direct, re.I): timeabs_value = abs(float(finish_time) - float(os.path.getctime(filename_direct))) if timeabs_value > spentabs_time: remove_cmd = "rm -rf %(filename_direct)s" % {"filename_direct":filename_direct} os.popen(remove_cmd) # return return Response(viewer_information()) except: message = "Post Algorithm has some problem!" return Response(message, status=status.HTTP_400_BAD_REQUEST)
def setUp(self): image_obj = BytesIO() qrcode_obj = qrcode.make('https://mirumee.com/') qrcode_obj.save(image_obj) self.imagefile = ImageFile(image_obj, '01.png')
def render(data, width, height, force=True, padding=None, overlays=(), overlay_sources=(), overlay_tints=(), overlay_sizes=None, overlay_positions=None, mask=None, mask_source=None, center=".5,.5", format=IMAGE_DEFAULT_FORMAT, quality=IMAGE_DEFAULT_QUALITY, fill=None, background=None, tint=None, pre_rotation=None, post_rotation=None, crop=True, grayscale=False): """ Rescale the given image, optionally cropping it to make sure the result image has the specified width and height. """ if not isinstance(data, six.string_types): input_file = BytesIO(data) else: input_file = StringIO(data) img = pil.open(input_file) if img.mode != "RGBA": img = img.convert("RGBA") if width is None: width = img.size[0] if height is None: height = img.size[1] img = do_rotate(img, pre_rotation) if crop: img = resizeCrop(img, width, height, center, force) else: img = resizeScale(img, width, height, force) if grayscale: img = do_grayscale(img) do_tint(img, tint) img = do_fill(img, fill, width, height) img = do_background(img, background) do_mask(img, mask, mask_source) do_overlays(img, overlays, overlay_tints, overlay_sources, overlay_sizes, overlay_positions) img = do_padding(img, padding) img = do_rotate(img, post_rotation) tmp = BytesIO() if not format.upper() in ALPHA_FORMATS: img = img.convert("RGB") img.save(tmp, format, quality=quality) tmp.seek(0) output_data = tmp.getvalue() input_file.close() tmp.close() return output_data
from core.models import Person from core.serializers import PersonSerializer from rest_framework.renderers import JSONRenderer from rest_framework.parsers import JSONParser from django.utils.six import BytesIO person = Person.objects.get(pk=1) serializer = PersonSerializer(person) content = JSONRenderer().render(serializer.data) stream = BytesIO(content) data = JSONParser().parse(stream) serializer = PersonSerializer(data=data) # passando a instancia 'person' ele chama o método update # PersonSerializer(person, data=data) serializer.is_valid() serializer.validated_data serializer = PersonSerializer() print(repr(serializer))
def test_save(self): photo = BytesIO(b"Foo") photo_id = self.storage.save('foo', photo) self.assertTrue(photo_id.isdigit())
def test_message_serialization(self): """ Test serialization/deserialization of a sample Notification Message """ msg = NotificationMessage( id=1001, msg_type=NotificationType( name='edx_notifications.sample', renderer='foo.renderer', ), namespace='my-namespace', payload={ 'name1': 'val1', 'name2': datetime.utcnow(), }, deliver_no_earlier_than=datetime.utcnow(), created=datetime.utcnow(), ) serializer = NotificationMessageSerializer(msg) json_data = JSONRenderer().render(serializer.data) # no deserialize the string and compare resulting objects stream = BytesIO(json_data) data = JSONParser().parse(stream) deserializer = NotificationMessageSerializer(data=data) self.assertTrue(deserializer.is_valid()) # compare the original data object to our deserialized version # and make sure they are the same msg_payload = json.loads(deserializer.data['payload']) msg_output = NotificationMessage( id=deserializer.data['id'], msg_type=NotificationType( name=deserializer.data['msg_type']['name'], renderer=deserializer.data['msg_type']['renderer'], ), namespace=deserializer.data['namespace'], payload={ 'name1': msg_payload['name1'], 'name2': msg_payload['name2'], }, deliver_no_earlier_than=parser.parse( deserializer.data['deliver_no_earlier_than'] ).replace(tzinfo=pytz.timezone('UTC')), created=parser.parse(deserializer.data['created']).replace(tzinfo=pytz.timezone('UTC')), ) self.assertEqual(msg.namespace, msg_output.namespace) self.assertEqual(msg.msg_type, msg_output.msg_type) # pylint: disable=maybe-no-member # now intentionally try to break it data['namespace'] = 'busted' data['msg_type']['name'] = 'not-same' deserializer = NotificationMessageSerializer(data=data) self.assertTrue(deserializer.is_valid()) # compare the original data object to our deserialized version # and make sure they are not considered the same msg_payload = json.loads(deserializer.data['payload']) msg_output = NotificationMessage( id=deserializer.data['id'], msg_type=NotificationType( name=deserializer.data['msg_type']['name'], renderer=deserializer.data['msg_type']['renderer'], ), namespace=deserializer.data['namespace'], payload={ 'name1': msg_payload['name1'], 'name2': msg_payload['name2'], }, deliver_no_earlier_than=parser.parse( deserializer.data['deliver_no_earlier_than'] ).replace(tzinfo=pytz.timezone('UTC')), created=parser.parse(deserializer.data['created']).replace(tzinfo=pytz.timezone('UTC')), ) self.assertNotEqual(msg.namespace, msg_output.namespace) self.assertNotEqual(msg.msg_type, msg_output.msg_type) # pylint: disable=maybe-no-member
from status.api.serializers import StatusSerializer ''' serializing a single object. ''' qs = Status.object.first() serializer = StatusSerializer(qs) print(serializer.data) # gives ordered dict data from our model json_data = JSONRenderer().render(serializer.data) print(json_data) # gives byte format of JSON. or Called JSON # To convert back to Python data: json.loads(json_data) # will give list or dict data in python # When we have streaming data: stream = BytesIO(json_data) data = JSONParser().parse(stream) print(data) # will give list or dict data in python. ''' Serializing a queryset ''' qs = Status.object.first() serializer = StatusSerializer(qs, many=True) print(serializer.data) # gives ordered dict data from our model json_data = JSONRenderer().render(serializer.data) print(json_data) # gives byte format of JSON. or Called JSON # To convert back to Python data: json.loads(json_data) # will give list or dict data in python # When we have streaming data:
def test_address_list(self): with open(p("test_mail_01.txt"), 'rb') as f: parser = EmailParser() content = f.read() mail = parser.parse(BytesIO(content)) self.assertEqual(len(mail['cc']), 5)
def test_put_file(self, mock_ftp): self.storage._start_connection() self.storage._put_file('foo', File(BytesIO(b'foo'), 'foo'))
def write(self, content): if 'w' not in self._mode: raise AttributeError("File was opened for read-only access.") self.file = BytesIO(content) self._is_dirty = True self._is_read = True
def test_save(self, mock_ftp): self.storage._save('foo', File(BytesIO(b'foo'), 'foo'))
print(game1.id) print(game1.name) print(game1.created_timestamp) print(game2.id) print(game2.name) print(game2.created_timestamp) game_serializer1 = GameSerializer(game1) print(game_serializer1.data) game_serializer2 = GameSerializer(game2) print(game_serializer2.data) renderer = JSONRenderer() rendered_game1 = renderer.render(game_serializer1.data) rendered_game2 = renderer.render(game_serializer2.data) print(rendered_game1) print(rendered_game2) json_string_for_new_game = '{"name":"Red Dead Redemption 2","release_date":"2018-10-26T01:01:00.776594Z","esrb_rating":"M (Mature)"}' json_bytes_for_new_game = bytes(json_string_for_new_game, encoding="UTF-8") stream_for_new_game = BytesIO(json_bytes_for_new_game) parser = JSONParser() parsed_new_game = parser.parse(stream_for_new_game) print(parsed_new_game) new_game_serializer = GameSerializer(data=parsed_new_game) if new_game_serializer.is_valid(): new_game = new_game_serializer.save() print(new_game.name)
def get_json(request): stream = BytesIO(request.body) return JSONParser().parse(stream)
def get_reservation_serializer(reservation): serializer = ReservationSerializer(reservation) content = JSONRenderer().render(serializer.data) stream = BytesIO(content) data = JSONParser().parse(stream) return ReservationSerializer(data=data)