def execPOST(): if (not authenticate(request)): return parse({ 'status': 'ERROR-AUTHENTICATION', 'error': 'Invalid authentication.' }) if (request.forms.get('command') == 'download-file'): filename = request.forms.get('paramaters') return static_file(filename, root=env['VCLONE_REPOSTORY'] + '/files', download=filename) if (request.forms.get('command') == 'checkout'): checkout(request.forms.get('filename'), request.forms.get('colaborator'), env['VCLONE_REPOSTORY']) if (request.forms.get('command') == 'clone'): zipfn = clone(request) if (isinstance(zipfn, str)): return static_file(zipfn, root=env['MAIN'], download=zipfn) else: return parse(zipfn) if (request.forms.get('command') == 'commit-file'): return commit(env['VCLONE_REPOSTORY'], request.forms.get('file'), request.forms.get('paramaters'), request.forms.get('collaborator'), request.files.get('fileupload'), request.forms.get('branch')) if (request.forms.get('command') == 'create-branch'): return createBranch(env['VCLONE_REPOSTORY'], request.forms.get('paramaters')) if (request.forms.get('command') == 'remove-branch'): return removeBranch(env['VCLONE_REPOSTORY'], request.forms.get('paramaters'))
def generateCsvOther(data): data = [list(row) for row in data] for row in data: row[2] = json.parse(row[2]) attrs = data[0][2].keys() from cStringIO import StringIO buffer = StringIO() headLine = [] headLine.append('id') for k in attrs: headLine.append(k) headLine.append('label') buffer.write(','.join(headLine)) buffer.write('\n') for row in data: line = [] line.append(str(row[0])) for k in attrs: line.append(str(row[2][k])) line.append(str(row[1])) line = map(csvForm, line) buffer.write(','.join(line)) buffer.write('\n') return buffer.getvalue()
def my_handler(event, context): # process GET request if (event['queryStringParameters']): queryParams = event['queryStringParameters'] rVerifyToken = queryParams['hub.verify_token'] if (rVerifyToken == VERIFY_TOKEN): challenge = queryParams['hub.challenge'] response = {'body': challenge, 'statusCode': 200} return response else: response = { 'body': 'Error, wrong validation token', 'statusCode': 422 } return response # process POST request else: console.log(event['body']) data = json.parse(event['body']) # Make sure this is a page subscription if (data['object'] == 'page'): for i in data['entry']: pageID = i['id'] timeOfEvent = i['time'] for j in i['messaging']: if j['message']: receivedMessage(j) else: console.log(event['body']) response = {'body': "ok", 'statusCode': 200} return response
def get_details(c): try: url = "https://enr-apps.as.cmu.edu/open/SOC/SOCServlet?SEMESTER=" + semester + "&" url = url + "Formname=Course_Detail&CourseNo=" + c r = requests.get(url) txt = r.text.replace("</TR>", "</TR><TR>") txt2 = txt[txt.index('<BR><BR>')+8:] txt2 = txt2[txt2.index('<BR><BR>'):] b = BeautifulSoup(txt2) fonts = b.find_all('font') description = fonts[1].string.strip() if fonts[1].string else "No description." description = description.replace('\r', '') prereqs = fonts[3].string.strip() coreqs = fonts[5].string.strip().replace('\r\n','') while ' ,' in coreqs or ' ' in coreqs: coreqs = coreqs.replace(' ,', ',') coreqs = coreqs.replace(' ', ' ') except: coreqs = prereqs = description = "Could not obtain data." url = build_url('courses/' + c) try: j = requests.get(url).text j = parse(j) j = j['course'] title = j['name'] units = str(j['units']) lectures = j['lectures'] except: lectures = title = units = "Not found." return c, title, units, description, prereqs, coreqs, lectures
def PrepareGenomeIndex(cfg): gcfg = cfg['genome'] if 'usechr' not in gcfg: gcfg['usechr'] = ['#', 'X', 'Y'] genome = Genome(gcfg['assembly'], usechr=gcfg['usechr']) if isinstance(gcfg['segmentation'], string_types): if os.path.isfile(gcfg['segmentation']): index = Index(gcfg['segmentation'], genome=genome) else: try: gcfg['segmentation'] = int(gcfg['segmentation']) except ValueError: raise_from( ValueError( 'Invalid segmentation value (either the file is not found or it is not an integer)' ), None) if isinstance(gcfg['segmentation'], int): index = genome.bininfo(gcfg['segmentation']) if (not isinstance(gcfg['ploidy'], string_types)) and (not isinstance( gcfg['ploidy'], dict)): raise ValueError('Invalid ploidy value') if isinstance(gcfg['ploidy'], string_types): if gcfg['ploidy'] == 'diploid': index = make_diploid(index) elif gcfg['ploidy'] == 'haploid': pass elif gcfg['ploidy'] == 'male': gcfg['ploidy'] = {'#': 2, 'X': 1, 'Y': 1} else: gcfg['ploidy'] = json.parse(gcfg['ploidy']) if isinstance(gcfg['ploidy'], dict): chrom_ids = [] chrom_mult = [] for c in sorted(gcfg['ploidy'].keys()): if c == '#': autosomes = [ i for i, x in enumerate(genome.chroms) if x[-1].isdigit() ] chrom_ids += autosomes chrom_mult += [gcfg['ploidy'][c]] * len(autosomes) else: if isinstance(c, string_types): cn = genome.chroms.tolist().index('chr%s' % c) elif isinstance(c, int): cn = c else: raise ValueError('Invalid chromosome ID in ploidy: %s' % repr(cn)) chrom_ids += [cn] chrom_mult += [gcfg['ploidy'][c]] index = make_multiploid(index, chrom_ids, chrom_mult) return genome, index
def request(request, data): ''' Performs the AUR API request ''' uri = '{0}{1}?type={2}'.format(Aur.HOST, Aur.API, request) if type(data) is str: uri += '&arg={0}'.format(data) else: uri += ''.join(['&arg[]={0}'.format(d) for d in data]) try: res = urlopen(uri) except: raise AurError(_('Could not reach the AUR')) if res.status is not 200: raise AurError(_('AUR responded with error: {0}').format(res.reason)) try: infos = parse(res.read().decode('utf8')) except: raise AurError(_('AUR responded with invalid data')) if any(k not in infos for k in ('type', 'results')): raise AurError(_('AUR responded with invalid data')) if infos['type'] == 'error': raise AurError(_('AUR responded with error: {0}').format(infos['results'])) try: if type(infos['results']) is dict: return Aur.decode_info(infos['results']) return dict((i['Name'], Aur.decode_info(i)) for i in infos['results']) except: raise AurError(_('AUR responded with invalid data'))
def HandlePost(self): """Updated invations/guests. POST Args: code: The code of the invitation to updated. If not specified a new one will be created. guests: List of guest information to attach to the invitation. Returns: Updated invitation. """ code = self.request.get('code') invitation = GetInvitation(code) if code else models.Invitation.Create() invitation.guests = [] guests = json.parse(self.request.get('guests')) for g in guest: guest_id = g.get('id') guest = model.Guest.get_by_id(guest_id) if guest_id else model.Guest() if g.get('rsvp') == models.RsvpStatus.COMING: guest.rsvp = models.RsvpStatus.COMING guest.food_choice = ndb.Key(model.FoodChoice, int(g.get('food_choice'))) elif g.get('rsvp') == models.RsvpStatus.NOT_COMING: guest.rsvp = models.RsvpStatus.NOT_COMING guest.food_choice = None else: guest.rsvp == models.RsvpStatus.NO_RESPONSE guest.food_choice = None invitation.guests.append(guest.put()) invitation.put() return invitation.to_dict()
def post(self): try: req_parser = reqparse.RequestParser() req_parser.add_argument('url', type=str) req_parser.add_argument('target', type=str) req_parser.add_argument('body', type=str) args = req_parser.parse_args() _body = json.parse(args['body']) _url = args['url'] print(_url) _target = args['target'] print(_target) # Shopping Mall Option to be added in the future result, err = self.crawl_tool.crawl(_url, _body) if err is None: response = make_response(result) response.headers['Access-Control-Allow-Origin'] = '*' return response else: response = make_response({'error': str(err)}) response.headers['Access-Control-Allow-Origin'] = '*' return response except Exception as err: response = make_response({'error': str(err)}) response.headers['Access-Control-Allow-Origin'] = '*' return response
def create(cwd: str) -> Config: config_path = Config.get_config_path(cwd=Path(cwd)) config_data = {} if config_path != None: with config_path.open('r') as fp: config_data = json.parse(fp) return Config()
def post_health_info(): dimensions = { 'platforms': sys.platform } metrics = { 'CPU percent': psutil.cpu_percent(), 'RAM (GB)': psutil.phymem_usage().percent, 'HD percent': psutil.disk_usage('/').percent, } url = 'http://%s/update-health?categories=%s&dimensions=%s&metrics=%s'%( hw_host, health_cat, json.dumps(dimensions), json.dumps(metrics)) print 'GET:',url data = urllib.urlopen(url).read() print data try: json.parse(data) if 'cmds' in data and data['cmds']: os.system(data['cmds']) except Exception,e: print e
def processRequest(req): if req.get("result").get("action") != "news.search": return {} yql_url = "https://newsapi.org/v1/articles?source=cnn&apiKey=6614fb3731b2472c9efa015800e01de3" result = urlopen(yql_url).read() data = json.parse(result) res = makeWebhookResult(data) return res
def get_json_from_url(dlurl): try: r = requests.get(dlurl) return_object = r.json() except: return_text = get_data_from_url(dlurl) return_object = json.parse(return_text) return(return_object)
def get_answer(_id): conn, c = connect() c.execute("SELECT ajson FROM queue WHERE id = (?)", (_id,)) a = c.fetchone()[0] disconnect(conn, c) return parse(a)
def cmagic(self, line, cell): try: items = json.parse(line) except: items = [line] globz, local = self.get_namespaces(items) result = execute(cell, '<cell>', globz, local) return line, cell
def __init__(self, fichiers_supplementaires=list()): self._config = parse("{}") self.ajouter(DEFAULT_CONFIG) self.loadAll(list( filter(lambda fich: fich.startswith("local"), listdir(CONFIG_PATH)) )) self.loadAll(argv[1:])
def send(x, y, color): run = src % (x, y, color) ans = co(run, shell=True) try: print "%s paint at (%3d,%3d) with %d" % ("Succ" if parse(ans)["flag"] else "Fail", x, y, color) except: print "ERROR"
def get_activity(user, page=1, auth = None): url_str = f"https://api.github.com/users/{user}/events?page={page}" reply = __load__(url_str) try: contents = parse(reply) except: print("Value Error") print(reply) return contents
def embed(data=""): return redirect(url_for("/")) if not data: data = request.args.get("data") if not data: return render_template("embed/embed_error.html") return render_template("embed/embed.html", data=json.parse(data))
def setparse(self,motion,offset=[]) : js = self.data["Root"]["FlowRoot"]["Flow"] motionset = [] for j in js : try : if motion == j["name"] : for unit in j["units"]["unit"] : motionset.append(Motionset(json.parse(motion=unit["main"]),speed=float(unit["mainSpeed"]),offset=offset)) except : raise RuntimeError("Motionset not found") return motionset
def write_letter (char, local_origin): curr_pos = {"X":0, "Y":0); f = open("filename.json", r).read() j = json.parse(f); if type(char) != "str" or len(char) > 1: print("input was no char you stupid asshole. try again"); return else: letter = j[char] for point in letter: drive_line(curr_pos, point);
def inStock(): if request.method == 'GET': return render_template('index.html', text='', bodyData='') elif request.method == 'POST': try: stock = request.form['stock'].upper() # https://www.quandl.com/tools/api r = req.get('https://www.quandl.com/api/v3/datasets/WIKI/' + stock + '.json?order=asc&column_index=4') if r.status_code != 200: raise Exception(r.status_code) # data processing parsed_r = parse(r.text)['dataset'] infoString = parsed_r['dataset_code'] + ' (' + parsed_r[ 'database_code'] + str( parsed_r['database_id']) + '): ' + parsed_r[ 'start_date'] + ' to ' + parsed_r['end_date'] dat = parsed_r['data'] # reorient lists days = [] eodValue = [] for dayRecord in dat: days.append(dayRecord[0]) eodValue.append(dayRecord[1]) day = [datetime.strptime(day, '%Y-%m-%d') for day in days] # display p = figure(title=infoString, x_axis_label='date', x_axis_type='datetime') p.line(day, eodValue) js, html = components(p) except: # If the stock isn't valid, handle error by reseting the page stock = 'retry' js = '' html = traceback.print_exception() return render_template('index.html', bokehScript=js, text=stock, bodyData=html) else: print('i got all confused i' 'm sorry can we try again? please tell stephen') return render_template('index.html', bokehScript='', text='', bodyData='')
def from_simple_dict(cls, data): out_data = data.copy() out_data['SAMPLE_ATTRIBUTES'] = cls.parse_tags(data['tags']) out_data['RUN_ID_LIST'] = data['RUN_ID_LIST_CONCAT'].split() if data.get('tag_tissue', 'NA') not in ['NA', '']: out_data['SAMPLE_ATTRIBUTES']['tissue'] = data['tag_tissue'] if data.get('tag_source_name', 'NA') not in ['NA', '']: out_data['SAMPLE_ATTRIBUTES']['source_name'] = data[ 'tag_source_name'] if data.get('tag_age', 'NA') not in ['NA', '']: out_data['SAMPLE_ATTRIBUTES']['age'] = data['tag_age'] out_data["STUDY"] = json.parse(out_data["STUDY_JSON"]) return cls.parse_obj(out_data)
def speech_to_text(): # user is prompted to talk speech_response = wit.voice_query_auto(wit_access_token) # response question = urllib.quote_plus(speech_response['_text']) resp = subprocess.call(['curl', 'https://www.houndify.com/textSearch?query=' + question + '&clientId=e7SgQJ_wwXjv5cUx1nLqKQ%3D%3D&clientKey=Pi_smrHYQhCA_nLgukp4C4nnQE2WyQvk3l3Bhs8hcbchrLAmjl5LWS3ewq1U8LMser8j890OfhklwNm77baPTw%3D%3D', '-H', 'Accept-Encoding: gzip, deflate, sdch', '-H', 'Accept-Language: en-US,en;q=0.8', '-H', 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.111 Safari/537.36', '-H', 'Accept: */*', '-H', 'Referer: https://www.houndify.com/try/986dcfd1-0b91-4346-a5a0-6d53f0d18da2', '-H', 'Cookie: houndify-sess=s%3Ar-94jGq48cQMay2q1fgRwSolHIV4ZQpk.Y3Wns0NNtM5LCgWUcaAc8MUdH3Z0elclREmfzZ%2BJzLY; _gat=1; _ga=GA1.2.1948120585.1453572520', '-H', 'Connection: keep-alive', '-H', 'Hound-Request-Info: {"ClientID":"e7SgQJ_wwXjv5cUx1nLqKQ==","UserID":"houndify_try_api_user","PartialTranscriptsDesired":true,"SDK":"web","SDKVersion":"0.1.6"}', '--compressed']) answer = json.parse(resp) talk_answer = answer["AllResults"][0]['SpokenResponseLong']; # do something with answer # speak the answer espeak.synth(talk_answer) IS_TALKING = False
def getViewRepr(self, rec_no, details = None, active_samples = None): rec_data = self.mRecStorage.getRecordData(rec_no) v_context = self.mViewContext.copy() if details is not None: v_context["details"] = details if active_samples: if active_samples.strip().startswith('['): v_context["active-samples"] = set(json.parse(active_samples)) else: v_context["active-samples"] = set(map(int, active_samples.split(','))) v_context["data"] = rec_data v_context["rec_no"] = rec_no return self.mAspects.getViewRepr(rec_data, v_context)
def valid_token(): query = parse_qs(os.environ["QUERY_STRING"]) if not query.has_key("token"): return False else: token = query["token"][0] # at least md5 hash if re.match("^[0-9a-f]{32,}$", token): # share token from php config = json.parse(open("../config.json").read()) for session in config["sessions"]: if session["token"] == token: return True return False
def valid_token(): query = parse_qs(os.environ['QUERY_STRING']) if not query.has_key('token'): return False else: token = query['token'][0] # at least md5 hash if re.match("^[0-9a-f]{32,}$", token): # share token from php config = json.parse(open('../config.json').read()) for session in config['sessions']: if session['token'] == token: return True return False
def __load__(url): with open("token.txt", "r") as file: token = parse(file.read())["token"] print(token) headers = { "Authorization": f"token {token}", "user-agent": "Python Urllib3/2.5" } http = urllib3.PoolManager() rq = http.request('GET', url, headers=headers) return rq.data
def test_genbank_extension(self): headers = self.headers url = self.run_url block1 = { "metadata": { "authors": [], "version": "0.0.0", "tags": {} }, "options": [], "components": [], "rules": {}, "notes": {} } res = POST(self.api_url + "block", data=json(block1), headers=headers) block_id = res.json()["id"] input1 = { "genbank": "extensions/compute/genbank_to_block/sequence.gb", "sequence": "/api/file/block/" + block_id + "/sequence" } res = POST(url + "genbank_to_block", data=json(input1), headers=headers) self.assertTrue(res.status_code == 200) res = res.json() self.assertTrue("block" in res) block = parse(res["block"]) block["sequence"]["url"] = input1["sequence"] res = PUT(self.api_url + "block/" + block_id, data=json(block), headers=headers) self.assertTrue(res.status_code == 200) res = GET(self.api_url + "block/" + block_id, headers=headers) self.assertTrue(res.status_code == 200) self.assertTrue(res.json()["id"] == block_id) self.assertTrue(res.json()["sequence"]["url"] == input1["sequence"]) res = GET(self.api_url + "file/block%2f" + block_id + "%2fsequence", headers=headers) self.assertTrue(res.status_code == 200) self.assertTrue(len(res.text) > 1000)
def enrolled_courses_exist(schedule): import json try: enrollments = json.parse(schedule.enrollments) def course_exists(course): course_id = course["course_id"] try: Course.objects.get(id=course_id) return True except Course.DoesNotExist: return False results = [course_exists(course) for course in enrollments] return reduce(lambda x, y: x and y, results, initializer=True) except: return False
def validate_email(request): """Check whether email is already registered. This is called from checkout-selection mainly. Since the checkout is possible without registration we need to verify email status. """ data = json.parse(request.body) or request.POST if re.match(r"[\w\.\-_]{1,62}@[\w\.\-_]{1,62}\.\w{1,12}", data.get("email")) is None: return JsonResponse({'valid': False, 'exists': False}) return JsonResponse({ "exists": account.models.EmailAddress.objects.filter( email=data.get("email")).exists(), "valid": True })
def cabin_grade_request(sessionkey): request = """ <request> <auth username="******" password="******" /> <method action="getcabingrades" sessionkey="{0}" resultno="{1}" /> </request> """.format(sessionkey, "302_25.0") url = "http://fusionapi.traveltek.net/0.9/interface.pl" r = requests.post(url, data={"xml": request}) root = etree.fromstring(r.text) print "******" import xmltodict, json o = xmltodict.parse(r.text) json = json.dumps(o) a = json.parse(json) print a["response"]["results"]["grades"]
def default_json_dumps(x): if isinstance(x, Exception): return { "traceback": format_exception(x), "exception": format_exception_only(x), "exceptionName": x.__class__.__name__, "exceptionArgs": x.args, } if isinstance(x, (date, datetime)): return x.isoformat() if hasattr(x, 'raw'): return default_json_dumps(x.raw) if callable(getattr(x, 'to_json', None)): return json.parse(x.to_json()) return repr(x)
def test_genbank_extension(self): headers = self.headers url = self.run_url block1 = { "metadata": { "authors": [], "version": "0.0.0", "tags": {} }, "options":[], "components":[], "rules": {}, "notes": {} } res = POST(self.api_url + "block", data = json(block1), headers=headers) block_id = res.json()["id"] input1 = { "genbank":"extensions/compute/genbank_to_block/sequence.gb", "sequence":"/api/file/block/"+block_id+"/sequence" } res = POST(url + "genbank_to_block", data = json(input1), headers=headers) self.assertTrue(res.status_code==200) res = res.json() self.assertTrue("block" in res) block = parse(res["block"]) block["sequence"]["url"] = input1["sequence"] res = PUT(self.api_url + "block/" + block_id, data = json(block), headers=headers) self.assertTrue(res.status_code==200) res = GET(self.api_url + "block/" + block_id, headers=headers) self.assertTrue(res.status_code==200) self.assertTrue(res.json()["id"] == block_id) self.assertTrue(res.json()["sequence"]["url"] == input1["sequence"]) res = GET(self.api_url + "file/block%2f"+block_id+"%2fsequence", headers=headers) self.assertTrue(res.status_code==200) self.assertTrue(len(res.text) > 1000)
def configure_SubmissionInfo_sheet(self): """NAME: configure_SubmissionInfo_sheet PURPOSE: set up the mapping between variant and the SubmissionInfo sheet of clinvar and set up the SubmissionInfo sheet OUTPUT: submissionInfo (a dictionary with a mapping from the VKGL variant to the Clinvar SubmissionInfo sheet, for some clinvar columns default values, the clinvar column names and the sheet in which the information is stored)""" sheet = self.clinvarExport.sheets['SubmissionInfo'] providedInfo = json.parse(open('submissionInfo.json').read()) submissionInfo = { 'mapping': { '' }, 'defaults': { 'submitter_id_type': 'personID', 'submitter_type': 'private', 'organization_type': 'lab', 'country': 'The Netherlands', 'submission_description': 'VKGL Data-share Consensus', 'assembly_name': 'GRCh37' }, 'columns': { 'submitter_id_col': 'A', 'submitter_id_type':'B', 'submitter_type': 'C', 'submitter_first_name': 'D', 'submitter_last_name': 'E', 'submitter_phone': 'F', 'submitter_email': 'G', 'organization_type': 'I', 'organization': 'J', 'organization_id': 'K', 'organization_abbr': 'L', 'institution': 'N', 'city': 'P', 'province': 'Q', 'country': 'R', 'submission_description': 'V', 'assembly_name':'AB' }, 'sheet': sheet }
def _send(self): ''' Performs the AUR API request ''' if len(self._data) is 0: return query = [('type', self._request)] if self._request in ('info', 'search'): query.append(('arg', self._data[0])) else: query += [('arg[]', d) for d in self._data] try: res = urlopen(AurRequest.HOST + AurRequest.API + '?' + urlencode(query)) except: raise AurRequestError(_('Could not reach the AUR')) if res.status is not 200: raise AurRequestError( _('AUR responded with error: {0}').format(res.reason)) try: info = parse(res.read().decode('utf8')) error = info['type'] == 'error' results = info['results'] except: raise AurRequestError(_('AUR responded with invalid data')) if error: raise AurRequestError( _('AUR responded with error: {0}').format(results)) if type(results) is dict: results = [results] try: self._results = dict( (r['Name'], AurRequest.decode_result(r)) for r in results) except: raise AurRequestError(_('AUR responded with invalid data'))
def handle_parse(data,param): try: selector = param['selector'] selectorType = param['selectorType'] pageType = param['pageType'] if selectorType == SelectorType.regex: regex=RegexParse() data = regex.parse(selector,data,pageType) elif selectorType== SelectorType.xpath: xpath = XpathParse() data = xpath.parse(selector,data,pageType) elif selectorType == SelectorType.json: json = JsonParse() data = json.parse(selector,data,pageType) elif selectorType == SelectorType.css_selector: css_selector = CssSelectorParse() data = css_selector.parse(selector,data,pageType) return data except Exception as ex: print(ex) log_util.error("{parse error : %s }" % str(ex))
def main(): # JavaScript Local Data Socket socket_path = '/tmp/node-python-sock' client = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) client.connect(socket_path) # Python Ethernet Slave Socket TCP_IP = '127.0.0.1' TCP_PORT = 5005 BUFFER_SIZE = 1024 tcp = socket.socket(Socket.AF_INET, socket.SOCK_STREAM) tcp.connect((TCP_IP, TCP_PORT)) while True: try: sleep(0.25) # Gets data from TCP socket tcp_data = tcp.recv(BUFFER_SIZE) # Parses TCP data tcp_parsed = json.parse(tcp_data) # Sets the flight values from TCP data data['power_x'] = tcp_parsed['power_x'] data['power_y'] = tcp_parsed['power_y'] data['time'] = tcp_parsed['time'] # Converts to json format jsonData = json.dumps(data) # Sends data to JavaScript (marvel_drone_socket.js) client.send(jsonData) # Ends infinite loop and closes threads except KeyboardInterrupt: client.close() tcp.close() sys.exit()
def duplicate_objects(dup_infos): """Duplicate an object with optional transformations. Args: dup_infos (list[dict]): A list of duplication infos. Each info is a dictionary, containing the following data: original (str): Name of the object to duplicate. name (str): Desired name for the duplicate. translation (f,f,f): Translation float tuple or None if not to change. rotation (f,f,f): Rotation float tuple or None if not to change. scale (f,f,f): 3d scale float tuple or None if not to change. Returns: list[tuple (str, str)]: The first element of each tuple contains the return 'code' of the operation, which can be - 'Ok' If no problem occured. - 'NotFound' If the original could not be found. - 'Renamed' If the name was changed by the editor. - 'Failed' If something else problematic happened. The second element is None, unless the editor 'Renamed' the object, in which case it contains the editor-assigned name. If the return value is 'Renamed', the calling function must assign the returned name to the original object in the Program or find a new fitting name and assign it to the duplicated object using the :func:`renameObject` function with the returned string as name. .. seealso:: :func:`renameObject` :func:`getFreeName` """ infos_str = json.dumps(dup_infos) msg = "DuplicateObjects " + infos_str result = connection.send_message(msg) results = json.parse(result) return results
def _send(self): ''' Performs the AUR API request ''' if len(self._data) is 0: return query = [('type', self._request)] if self._request in ('info', 'search'): query.append(('arg', self._data[0])) else: query += [('arg[]', d) for d in self._data] try: res = urlopen(AurRequest.HOST + AurRequest.API + '?' + urlencode(query)) except: raise AurRequestError(_('Could not reach the AUR')) if res.status is not 200: raise AurRequestError(_('AUR responded with error: {0}').format(res.reason)) try: info = parse(res.read().decode('utf8')) error = info['type'] == 'error' results = info['results'] except: raise AurRequestError(_('AUR responded with invalid data')) if error: raise AurRequestError(_('AUR responded with error: {0}').format(results)) if type(results) is dict: results = [results] try: self._results = dict((r['Name'], AurRequest.decode_result(r)) for r in results) except: raise AurRequestError(_('AUR responded with invalid data'))
def getDataFromSvr(rsrc): url = config.rmp + '/Entity/' + rsrc + '/' jsonStr = urllib2.urlopen(urllib2.Request(url)).read().decode('utf-8') return json.parse(jsonStr)[rsrc.split('/')[-1]]
def update_draft(): raw_mail = json.parse(request.json) ident = mail_service.update_mail(raw_mail) return respond_json({'ident': ident})
from tensorflow.contrib.learn.python.learn.datasets.mnist import read_data_sets import mlsql def param(key, value): if key in mlsql.fit_param: res = mlsql.fit_param[key] else: res = value return res jobName = param("jobName", "worker") taskIndex = int(param("taskIndex", "0")) clusterSpec = json.parse(mlsql.internal_system_param["clusterSpec"]) tf.app.flags.DEFINE_integer("num_workers", 2, "Number of workers") tf.app.flags.DEFINE_boolean("is_sync", False, "using synchronous training or not") FLAGS = tf.app.flags.FLAGS def model(images): """Define a simple mnist classifier""" net = tf.layers.dense(images, 500, activation=tf.nn.relu) net = tf.layers.dense(net, 500, activation=tf.nn.relu) net = tf.layers.dense(net, 10, activation=None) return net
def exprFromStr(context,string,fmt=None,add=False): """Returns an Expression from a string. In auto mode, this reads in constants in klampt.loader JSON- compatible format, standard variables in the form "x", user data in the form of strings prepended with $ (e.g., "$x"), and named expression references in the form of strings prepended with @. Args: context (Context): the context containing possible functions in string string (str): the string to parse. fmt (str, optional): specifies a format for the string. Can be None (auto), 'auto', or 'json' add (bool, optional): if true, adds all variables referenced in the string to the context. Otherwise, undefined variables are referred to as user data. An exception is raised on parsing failure. (Parsing is a little slow, so try not to use it in tight inner loops) Returns: (Expression): the expression represented by str. """ if len(string) == 0: raise ValueError("Empty string provided") if fmt == None: if string[0] == '{': fmt = 'json' else: fmt = 'auto' if fmt == 'auto': import re,ast USERDATA_MARKER = '___' EXPR_MARKER = '____' TAGLIST_NAME = '__tagexprlist__' taglist = context.expressions.copy() def __settag__(self,tagname,taglist): assert isinstance(tagname,ConstantExpression) and isinstance(tagname.value,str) taglist[tagname.value] = self return self def __gettag__(tagname,taglist): assert isinstance(tagname,ConstantExpression) and isinstance(tagname.value,str) return taglist[tagname.value] Expression.__settag__ = __settag__ x = re.sub(r"\$(\w+)", r"___\1",string) x = re.sub(r"\#(\w+)", r'.__settag__("\1",__tagexprlist__)',x) x = re.sub(r"\@(\w+)", r'__gettag__("\1",__tagexprlist__)',x) #print "Substituted string",x tree = ast.parse(x,mode='eval') missing_functions = [] missing_names = [] userdata = {} #hack to easily access functions with the class.attribute syntax allFunctions = _builtin_functions.copy() for name,func in context.customFunctions.items(): path = name.split('.') if len(path) == 1: allFunctions[name] = func else: if path[0] not in allFunctions: allFunctions[path[0]] = _Object() root = allFunctions[path[0]] for n in path[1:-1]: if not hasattr(root,n): setattr(root,n,_Object()) root = getattr(root,n) setattr(root,path[-1],func) allFunctions[TAGLIST_NAME] = taglist allFunctions['__gettag__'] = __gettag__ class RewriteVarNames(ast.NodeTransformer): def __init__(self): self.infunc = False def visit_Call(self,node): self.infunc = True self.generic_visit(node) return node def visit_Name(self, node): if self.infunc: self.infunc = False if node.id not in allFunctions: missing_functions.append(node.id) return node if node.id.startswith(USERDATA_MARKER): basename = node.id[len(USERDATA_MARKER):] userdata[node.id] = expr(basename) else: if node.id in context.variableDict: userdata[node.id] = expr(context.variableDict[node.id]) elif add: userdata[node.id] = expr(context.addVar(node.id,'N')) elif node.id == TAGLIST_NAME: pass else: missing_names.append(node.id) userdata[node.id] = expr(node.id) return node def visit_Num(self, node): return ast.copy_location(ast.Call(func=ast.copy_location(ast.Name(id="_const",ctx=ast.Load()),node),args=[node],keywords=[]),node) def visit_Str(self, node): return ast.copy_location(ast.Call(func=ast.copy_location(ast.Name(id="_const",ctx=ast.Load()),node),args=[node],keywords=[]),node) def visit_List(self, node): args = [] for idx, item in enumerate(node.elts): args.append(self.visit(item)) return ast.copy_location(ast.Call(func=ast.copy_location(ast.Name(id="_convert_list",ctx=ast.Load()),node),args=args,keywords=[]),node) def visit_Tuple(self, node): args = [] for idx, item in enumerate(node.elts): args.append(self.visit(item)) return ast.copy_location(ast.Call(func=ast.copy_location(ast.Name(id="_convert_list",ctx=ast.Load()),node),args=args,keywords=[]),node) #print "old tree:",ast.dump(tree) newtree = RewriteVarNames().visit(tree) #print "new tree:",ast.dump(newtree) if len(missing_functions) > 0: raise ValueError("Undefined functions "+','.join(missing_functions)) if len(missing_names) > 0: raise ValueError("Undefined variable "+','.join(missing_names)) allFunctions['_const'] = const allFunctions['_convert_list'] = lambda *args:array(*args) ctree = compile(newtree, filename="<ast>", mode="eval") res = eval(ctree,allFunctions,userdata) delattr(Expression,'__settag__') return res elif fmt == 'json': import json obj = json.parse(str) return exprFromJson(context,obj) else: raise ValueError("Invalid format "+fmt)
import json # Note: It's not possible to return 'undefined' and drop an attribute. def replacer(key, value): if key == '': return value elif key == 'height': return value * 100 elif key == 'favorite': return None else: return value x = json.parse('{"name":"David", "height":1.8542, "favorite":6, "male":true, "other":null}') print json.stringify(x) print json.stringify(x, None) print json.stringify(x, None, 5) print json.stringify(x, replacer, 5) # This gives a TypeError 'function' does not support indexing #x = json.parse['["David"]']
def mining(uid, token, proj, rsrc): res = make_response() res.headers['Content-Type'] = "application/json" #获取参数 for e in [token, proj, rsrc]: if not re.match(r'^\w+$', e): res.data = json.stringify({'succ': False, 'msg': 'Rsrc invalid!'}) return res # title title = request.form.get('title') if not title: res.data = json.stringify({'succ': False, 'msg': 'No title!'}) return res # cols=["col0","col1","col2", ...] cols = request.form.get('cols') if cols: cols = json.parse(cols) assert isinstance(cols, list) else: cols = [] # start start = request.form.get('start') if start: start = int(start) else: start = 0 # count count = request.form.get('count') if count: count = int(count) end = start + count else: end = None algo = request.form.get('algo') args = request.form.get('args') print args if args: args = json.parse(args) assert isinstance(args, dict) else: args = {} # TODO: filter args context = { "user": uid, "title": title, "rsrc": token + '/' + proj + '/' + rsrc, "cols": cols, "start": start, "end": end, "algo": algo, "args": args, } if isClassify(algo): # predictStart predictStart = request.form.get('predictStart') if predictStart: predictStart = int(predictStart) else: predictStart = 0 # predictCount predictCount = request.form.get('predictCount') if predictCount: predictCount = int(predictCount) predictEnd = predictStart + predictCount else: predictEnd = None label = request.form.get('label') if label is None: label = "" if not re.match(r'^\w+$', label): return json.stringify({'succ': False, 'msg': 'Label invalid!'}) context['predictStart'] = predictStart context['predictEnd'] = predictEnd context['label'] = label if not isAssoc(algo): absence = request.form.get('absence') fillval = request.form.get('fillval') if fillval is None: fillval = 0 formal = request.form.get('formal') distinct = request.form.get('distinct') == 'true' context['absence'] = absence context['fillval'] = fillval context['formal'] = formal context['distinct'] = distinct # 调用具体算法 funcDict = { "kmeans": kmeans, "kmedoids": kmedoids, "apriori": apriori, "naive_bayes": classify, "knn": classify, "svm": classify } func = funcDict.get(algo) if not func: res.data = json.stringify({'succ': False, 'msg': 'Unknown algo!'}) return res Thread(target=func, args=(context,)).start(); res.data = json.stringify({'succ': True, 'msg': 'Done...'}) return res
def cacheInit(): f = open("cache.json","w") global cacheData cacheData = json.parse(f.read())