def list_vms(host=None): """ make a list of vms and expand out their fixed_ip and floating ips sensibly """ flags.parse_args([]) my_instances = [] if host is None: instances = db.instance_get_all(context.get_admin_context()) else: instances = db.instance_get_all_by_host( context.get_admin_context(), host) for instance in instances: my_inst = {} my_inst = dict(instance).copy() for (k,v) in my_inst.items(): try: json.encoder(v) except TypeError, e: v = str(v) my_inst[k] = v ec2_id = db.get_ec2_instance_id_by_uuid(context.get_admin_context(), instance.uuid) ec2_id = 'i-' + hex(int(ec2_id)).replace('0x', '').zfill(8) my_inst['ec2_id'] = ec2_id try: fixed_ips = db.fixed_ip_get_by_instance(context.get_admin_context(), instance.uuid) except: pass my_inst['fixed_ips'] = [ ip.address for ip in fixed_ips ] my_inst['floating_ips'] = [] for ip in fixed_ips: my_inst['floating_ips'].extend([ f_ip.address for f_ip in db.floating_ip_get_by_fixed_address(context.get_admin_context(), ip.address)]) my_instances.append(my_inst)
def get(self, command): if command == 'playlists': playlists = None try: playlists = pickle.load(open('cache/playlists.p', 'rb+')) except: with self.sp.current_user_playlists() as reply: print(reply) playlists = json.encoder(reply) pickle.dump(playlists, open('cache/playlists.p', 'wb+')) return playlists elif command == 'token': try: return {"token": self.token, "refresh": 'None'} except: return {"token": "none", "refresh": "none"} elif command == 'Search': state = json.loads(request.data) self.DB.Search(state['Query'], state['ReturnAmt'], state['Type']) else: return {"command": "invalid"}
def make_book_json(self, ISBN, Title, Pub_Year, Pub_Name, Pub_City, Pub_URL, Authors=[], Notes=[], Edition=1, Language='English'): notes_list = [] for note in Notes: notes_list.append({"Text": "{0}".format(note)}) book_dict = { "ISBN": ISBN, "Title": Title, "Pub_Year": Pub_Year, "Edition": Edition, "Language": Language, "Authors": self.make_authors_dict_list(Authors), "Publisher": { "Name": Pub_Name, "City": Pub_City, "URL": Pub_URL, }, "Notes": notes_list, } return encoder().encode(book_dict)
def list_vms(host=None): """ make a list of vms and expand out their fixed_ip and floating ips sensibly """ flags.parse_args([]) my_instances = [] if host is None: instances = db.instance_get_all(context.get_admin_context()) else: instances = db.instance_get_all_by_host(context.get_admin_context(), host) for instance in instances: my_inst = {} my_inst = dict(instance).copy() for (k, v) in my_inst.items(): try: json.encoder(v) except TypeError, e: v = str(v) my_inst[k] = v ec2_id = db.get_ec2_instance_id_by_uuid(context.get_admin_context(), instance.uuid) ec2_id = 'i-' + hex(int(ec2_id)).replace('0x', '').zfill(8) my_inst['ec2_id'] = ec2_id try: fixed_ips = db.fixed_ip_get_by_instance( context.get_admin_context(), instance.uuid) except: pass my_inst['fixed_ips'] = [ip.address for ip in fixed_ips] my_inst['floating_ips'] = [] for ip in fixed_ips: my_inst['floating_ips'].extend([ f_ip.address for f_ip in db.floating_ip_get_by_fixed_address( context.get_admin_context(), ip.address) ]) my_instances.append(my_inst)
def post_search_query(): input_string = "https://upload.wikimedia.org/wikipedia/commons/7/71/2010-kodiak-bear-1.jpg" # check if string is url if re.search("(?:http\:|https\:)?\/\/.*\.(?:png|jpg)", input_string): input_string = string_from_image(input_string) # validate input if not input_string or len(input_string) > max_query_lengh: return (json.encoder({ "success": False, "message": "query length too long" })) # get category category = categorize_string(input_string) print(category) # get wiki data wiki_data = komila.create_maps(input_string) if category == animal: wolfram_data = aaron.process_animal(input_string) summary = anthony.summarize_animals(wiki_data, wolfram_data) elif category == person: wolfram_data = aaron.process_person(input_string) summary = anthony.summarize_person(wiki_data, wolfram_data) elif category == planet: wolfram_data = aaron.process_planet(input_string) summary = anthony.summarize_planets(wiki_data, wolfram_data) elif category == cities: wolfram_data = aaron.process_cities(input_string) summary = anthony.summarize_cities(wiki_data, wolfram_data) else: return json.dumps({ "success": False, "message": "could not find category" }) if not summary["success"]: return json.dumps({ "success": False, "message": "failure in summarize" }) del summary["success"] return json.dumps(summary)
def build_transformer( vocab_size: int, input_length: int = 32, model_name: str = "cl-tohoku/bert-base-japanese", hopping_num: int = 6, head_num: int = 8, hidden_dim: int = 512, dropout_rate: float = 0.1, encoder_output_dim: int = 768, target_length: int = 64, ) -> tf.keras.models.Model: input_shape = (input_length, ) input_ids = tf.keras.layers.Input(input_shape, dtype=tf.int32, name="input_ids") input_attention_mask = tf.keras.layers.Input(input_shape, dtype=tf.int32, name="input_attention_mask") encoder = TFBertModel.from_pretrained( model_name) #, output_attentions=True) encoder.trainable = False encoder_output = encoder( input_ids, attention_mask=input_attention_mask, ) target_ids = tf.keras.layers.Input((target_length, ), dtype=tf.int32, name="target_ids") decoder = Decoder( vocab_size=vocab_size, hopping_num=hopping_num, head_num=head_num, hidden_dim=hidden_dim, dropout_rate=dropout_rate, ) output = decoder( input=target_ids, encoder_output=encoder_output[0], source_mask=input_attention_mask, ) model = tf.keras.Model( inputs=[input_ids, input_attention_mask, target_ids], outputs=[output]) return model
country = string[1].split('.')[0] for i,t in enumerate(b): string = t[0].split('//') if len(string) > 1: country = string[1].split('.')[0] if country != "www": b[i][0] = country b d = dict(b) d b.index(['http://www.digikey.com': u'http://www.digikey.com/classic/Ordering/AddPart.aspx?site=us&lang=en', b.index(['http://www.digikey.com', u'http://www.digikey.com/classic/Ordering/AddPart.aspx?site=us&lang=en']) b[73][0] = "us" url_enc = "redirectInfo=%7b%24%24type%3a%22RedirectInfo%22%2cc_PrevRedirect%3anull%2cServerEventID%3a%2251b92548-35a8-0000-b7cf-9c0017dab884%22%2cTo%3a%22wFF5snc1zRvwEMfhXlllqtG%5c%5cx2bo28pCpKLqQ37LeUfz4Y%5c%5cx3d%22%2cFrom%3a%22m0%5c%5cx2f0vd1Lrg58oSlZaJ3gDO7srlmm9bKKycrrsCwadgYJyXurtSPC%5c%5cx2fgeLBi2aa%5c%5cx2bbbj2BuWr9jGz%5c%5cx2bRxIdrRfAg1%5c%5cx2bApmlw6DMObqy1uvL8xBF0dpTgdOC380u3trJnBo0WLdxWOS8FdIki%5c%5cx2bBRjLUHnzMpGdgOkyAmynLNCLZ%5c%5cx2bP2cSi2HQpxkC8tI1Yk0csUFWcC7kN%5c%5cx2ftvZfvAFTdXymK7cqNw%5c%5cx3d%5c%5cx3d%22%2cc_ServerTime%3a94%2cStartDateTicks%3a1371139153298%2cEndDateTicks%3a1371139153392%2cc_Summary%3anull%2cT9%3a-1%2cTime%3a%222013-06-13T15%3a59%3a13.298%22%7d; ASP.NET_SessionId=hgaqk2i3cbryqeuiaposf4n5; CARTCOOKIEUUID=1cfd6a60-3c08-43b2-ac53-3b717b96b875; preferences=ps=gb&pl=en-GB&pc_gb=GBP; _csm_ux_data" d = dict(b) d d[us] d["us"] b.index(['http://www.digikey.com', u'http://www.digikey.com/classic/Ordering/AddPart.aspx?site=us&lang=en']) a[73] a[77] b[77][0] = "int" d = dict(b) d import json json.encoder(d) json.encoder.encode_basestring(d) with open "digikey_international.json", "w") as outfile: json.dump(d, outfile) with open ("digikey_international.json", "w") as outfile: json.dump(d, outfile)
def hello_class(): json.encoder(Person('121', '1231231')) return json.encoder(Person('121', '1231231'))