def download(self,cargo): log.info(self.name +' download') projdir = self.garpi.go.projects() url = self.garpi.cfg.get(self.name,self.name + '_url') from get import get get(url,projdir,True) return ('%s_CONFIG'%self.NAME,cargo)
def catalogs(): for i in json.loads(get('http://instances.ckan.org/config/instances.json')): url = i['url'] if url in CKAN_FIX: url = CKAN_FIX[url] if url != None: yield 'ckan', url for row in json.loads(get('https://opendata.socrata.com/api/views/6wk3-4ija/rows.json?accessType=DOWNLOAD'))['data']: url = list(filter(None, row[11]))[0] if url in SOCRATA_FIX: url = SOCRATA_FIX[url] if url != None: yield 'socrata', url # for url in open('opendatasoft').readlines(): # yield 'opendatasoft', url # Other interesting ones for pair in [ ('socrata', 'https://data.cityofchicago.org'), ('socrata', 'https://data.cityofnewyork.us'), ('socrata', 'https://data.austintexas.gov'), ('socrata', 'https://data.hawaii.gov'), ('ckan', 'https://catalog.data.gov'), ]: yield pair
def download(self): '''Download missing or update pre-existing project files. As a side effect the program will be in the projects directory that contains the downloaded project''' log.info(self.name +' download') import fs, ConfigParser projdir = fs.projects() fs.goto(projdir, True) from get import get try: tag = self.tag() except ConfigParser.NoOptionError: tag = None #print 'url="%s" name="%s" tag="%s"'%(self.url(), self.name, tag) get(self.url(), self.name, True, tag=tag) tarfile = os.path.basename(self.url()) if '.tgz' in tarfile or '.tar' in tarfile: untar(tarfile) dot = tarfile.find('.') dirname = tarfile[:dot] import shutil shutil.move(dirname, self.name) pass fs.goback return
def get_from_file(fname): with open(fname, "r+") as f: words = f.readlines() words = list(map(lambda x: x.rstrip("\n"), words)) for index, word in enumerate(words): print(str(index) + ") " + word) get(word)
def download(self, cargo): log.info("cmt download") target = "%s/%s" % (self.garpi.go.external(), self.tgz()) from get import get get(self.url(), target) self.check_tgz() return ("CMT_UNPACK", cargo)
def main(): # get.get(os.getenv("CASSANDRA_HOST"), os.getenv("CASSANDRA_KEYSPACE")) while True: try: get.get(os.getenv("CASSANDRA_HOST"), os.getenv("CASSANDRA_KEYSPACE")) sleep(5) except: pass
def POST(self): global number_to_ip; global node_name; # this contains the IP of the node #syslog.syslog('AALU: in post') mc = pylibmc.Client(["127.0.0.1"], binary=True, behaviors={"tcp_nodelay": True, "ketama": True}) if mc.get('id') is None: mc.set('id',0) req_id = str(mc.get('id')) mc['id'] = mc['id']+1 user_data=web.input(); if len(user_data) is 5: # syslog.syslog("AALU: I Got something Good in marketplace_real"); from_name = user_data['from_name']; destination = user_data['destination'] from_number = user_data['from_number'] ip = user_data['ip'] #ip of the rapidcell node if from_number not in number_to_ip: number_to_ip[from_number] = ip body = str(user_data['body']) body = body+","+from_number+","+from_name+","+node_name #appending useful information to the body # syslog.syslog("AALU: Got SMS:"+body) if 'sell' in body or 'Sell' in body: # syslog.syslog("AALU: this is a sell message and needs to be put in the queue") string = "AALU: post: %s,%s,%s,%s" %(str(time.time()),req_id,"MKP",body) syslog.syslog(string); base.POST(req_id,"MKP",body); #mc['id'] = mc['id']+1; elif 'search' in body or 'Search' in body: data_to_be_sent = {} #req_id = str(mc['id']+1) data_to_be_sent['i'] = req_id #mc['id'] = mc['id']+1; data_to_be_sent['t'] = "MKP" data_to_be_sent['d'] = body syslog.syslog("AALU: search: %f,%s,%s,%s" %(time.time(),req_id,"MKP",body)) #thread = get.get('http://ec2-54-93-162-141.eu-central-1.compute.amazonaws.com:8080/server','',data_to_be_sent); thread = get.get('http://ec2-54-93-162-141.eu-central-1.compute.amazonaws.com:8080/server','',data_to_be_sent); thread.start(); elif 'buy' in body or 'Buy' in body: data_to_be_sent = {} data_to_be_sent['i'] = req_id #mc['id'] = mc['id']+1; data_to_be_sent['t'] = "MKP" data_to_be_sent['d'] = body syslog.syslog("AALU: get: %f,%s,%s,%s" %(time.time(),req_id,"MKP",body)) thread = get.get('http://ec2-54-93-162-141.eu-central-1.compute.amazonaws.com:8080/server','',data_to_be_sent); #thread = get.get('http://0.0.0.0:8888/server','',data_to_be_sent); thread.start(); else: syslog.syslog("AALU: It should not reach here because text contains sell")
def main(): cmds = sys.argv if len(cmds) > 1: ui = cmds[1].lower() if ui == 'init': init() if backup.readyToBackup(): if ui == 'restore' and len(backup.getIndex()) > 0: if len(cmds) > 2: restore(os.path.join(backup.restDir, cmds[2])) else: print("Restoring to default location %s " % backup.restDir) restore(backup.restDir) print("All files restored.") elif ui == 'get': try: get(cmds[2]) except IndexError: print("Please give a pattern eg. 'file' ") elif ui == 'test': fileEntries = validateArchiveIndex() invalidFiles = validateFiles() #print(fileEntries) #print("Correct Entries: %s " % fileEntries['Correct Entries']) #print("Incorrect Entries: %s " % archiveList['Incorrect Entries']) if fileEntries['Erroneous Paths'] != []: print("Erroneous Paths: %d" % len(fileEntries['Erroneous Paths'])) print('\n'.join(' {}: {}'.format(*x) \ for x in enumerate(fileEntries['Erroneous Paths'],1))) elif ui == 'list': if len(cmds) > 2: print('\n'.join('{}: {}'.format(*x) \ for x in enumerate(listFiles(cmds[2]),1))) else: listFiles() print('\n'.join('{}: {}'.format(*x) \ for x in enumerate(listFiles(),1))) elif ui == 'store' and len(cmds) > 2: if os.path.isdir(cmds[2]): store(cmds[2]) else: print("invalid directory") else: print("not ready... run 'cat' init") else: print("run 'init' to initialise the program")
def test_write_cache(): downloader = lambda _:b'abcde' cachedir = tempfile.mkdtemp() sleep = lambda:None url = 'http://foo.bar/baz' os.makedirs(os.path.join(cachedir, 'foo.bar')) get(url, cachedir = cachedir, downloader = downloader, sleep = sleep, load = False) with open(os.path.join(cachedir, 'foo.bar', 'baz'), 'rb') as fp: read = fp.read() assert read == downloader(None) shutil.rmtree(cachedir)
def xml(page, url, filename): # 把饭否消息api输出的xml合并。 end = '<?xml version="1.0" encoding="UTF-8"?>\n<statuses>\n</statuses>\n' while True: resp = get.get(page, url) if resp != end: #open函数里面的'a'是追加,改成'w'就是只写入会覆盖以前的内容. f = open(filename, 'a') f.write(resp) # 每个write()后面必须close()才能写入. f.close() else: #把饭否api拉来的xml简单叠加到一起,然后进行下面的操作,重新读取文件然后把中间xml文件尾和头去掉。 f = open(filename, 'r') result = f.read() f.close() result = string.replace( result, '\n</statuses>\n<?xml version="1.0" encoding="UTF-8"?>\n<statuses>\n', '\n') f = open(filename, 'w') f.write(result) f.close() return True page = page + 1
def rip(self): disk_storage = storage.storage() self.clean(); thread = [] for i in range(0,self.how_many): from_number = self.start_number+i; print from_number; from_name = from_number; node_name = self.myself; t="NEW" node = nodes[0] print node; data_to_be_sent = {} data_to_be_sent['i'] = from_name; data_to_be_sent['t'] = "NEW"; from_number = str(from_number) data_to_be_sent['d'] = from_number+","+from_number+","+from_number+","+from_number; syslog.syslog("BALU: uid=%s and time=%s" %(from_name,str(time.time()))) try: thread.append(get.get('http://'+node+':8080/server','',data_to_be_sent)); thread[-1].start(); except IndexError: print "there was an indexError" syslog.syslog("BALU: Node selected is %s" %node); for i in range(0,self.how_many): thread[i].join()
def POST(self): # import logging # logging.basicConfig(format='%(asctime)s %(module)s %(funcName)s %(lineno)d %(levelname)s %(message)s', filename='/var/log/sms-server.log', level=logging.INFO) user_data=web.input(); if len(user_data) is 4: syslog.syslog("BALU: user_data has 4 params") from_name = user_data['from_name']; destination = user_data['destination'] from_number = user_data['from_number'] body = user_data['body'] syslog.syslog("BALU Got SMS: %s" % (body)) returning = body[::-1] data_list = []; data_to_be_sent = {} data_to_be_sent['to'] = from_number data_to_be_sent['msisdn'] = destination data_to_be_sent['text'] = returning data_list.append(data_to_be_sent) thread = get.get('http://128.122.140.120:8080/marketplace_aws_handler','',data_to_be_sent); thread.start(); return "IF was executed" else: return 'something is wrong' ''' if data_list != []:
def make_new_users(user_number, myself, how_many): # global user_number; # global myself #print "New user request came" thread = []; for i in range(0,how_many): thread.append('') for i in range(0,how_many): user_id = user_number; user_number = user_number+1; #If new user then t is NEW and i is the identity on my machine and you have to issue a global identity, myself is the machine it is coming from data_to_be_sent = {} data_to_be_sent['i'] = user_id; data_to_be_sent['t'] = "NEW HASH"; data_to_be_sent['d'] = "something"+","+str(user_id)+","+str(user_id)+","+myself; syslog.syslog("AALU: uid=%s and time=%s" %(user_id,str(time.time()))) thread[i] = get.get('http://127.0.0.1:8090/server','',data_to_be_sent); # thread = get.get('http://0.0.0.0:8081/server','',data_to_be_sent); print i thread[i].start(); # thread.join(); for i in range(0,how_many): thread[i].join()
def onetest(): MH = 1e6 * np.random.rand() + 1e6 ML = 1e20 while ML > MH: ML = 1e6 * np.random.rand() + 1e6 Zs = np.random.rand() * 0.1 nframe = int(np.random.rand() * 80 + 3) costsum = 0 frames = 0 for i in range(1, 10): wave_data_i = get(i) ansi, ans01i = double_thresh(wave_data_i, MH, ML, Zs, nframe, i) with open('../label/label_' + str(i) + '.txt') as f: labelstr = f.readline() labelstr = labelstr.strip('[') labelstr = labelstr.strip(']') labelstr = labelstr.strip() labelstr = labelstr.split(',') labelstr = np.array(labelstr, dtype='f4') costsum += np.sum(np.abs(labelstr - ans01i)) frames += wave_data_i.shape[0] accuracy = ((frames - costsum) / frames) #print(costsum) #print(str(accuracy*100) + '%') return MH, ML, Zs, nframe, costsum, accuracy
def addisbn(isbn): try: a = get(isbn) booklist: Booklist = Booklist() booklist.cover_url = a['cover_url'] booklist.isbn = isbn booklist.name = a['title'] db.session.add(booklist) detail: Details = Details() detail.isbn = isbn detail.abstract = a['abstract'] detail.author_intro = a['author_intro'] detail.book_intro = a['book_intro'] detail.catalog = a['catalog'] detail.labels = str(a['labels']) detail.url = a['url'] db.session.add(detail) rating: Rating = Rating() rating.isbn = isbn rating.count = a['rating']['count'] rating.info = a['rating']['rating_info'] rating.star_count = a['rating']['star_count'] rating.value = a['rating']['value'] db.session.add(rating) db.session.commit() except: return "0" # Failed else: return "1" # Success
def implement2(): wave_data = get(1, False) y = naive_DPCM(wave_data) save_data('1_4bit.dpc', y, 4) data = load_data('1_4bit.dpc', 4) z = naive_decode_DPCM(data) print('SNR: ' + str(SNR(wave_data, z))) writeaspcm('1_4bit', z)
def GET(self): user_data = web.input() data_to_be_sent = {}; data_to_be_sent['result'] = user_data['result'] data_to_be_sent['id'] = user_data['id'] #this is the openvpn ip coming with the request syslog.syslog("AALU: search_handler:%s,%s" %(str(time.time()),user_data['id'])) thread = get.get('http://'+user_data['ip']+':8081/nexmo_search','',data_to_be_sent); #node_name coming in each request is the ip of the handler thread.start();
def __init__(self, code, keywords, pythonkey): self.pythonkey = pythonkey self.code = code self.keywords = keywords self.result = None self.get = get.get(code) self.tm = self.get.getMark() self.tl = self.get.getLanguage() self.parse()
def download(): 'Download CMT source tar file into external area.' log.info('downloading cmt tar file') target = "%s/%s"%(fs.external(),tgz()) from get import get from exception import InconsistentState target = get(url(),target) if not os.path.exists(target): raise InconsistentState,'Tar file does not exist: %s%s'%(os.getcwd(),tgz()) return target
def GET(self): user_data = web.input() if user_data['do'] == 'search': syslog.syslog("AALU: search: %s,%s" %(str(time.time()),user_data['id'])) # search on aws for a random file uploaded by someone else. For example if you are calling this from rapidcell you can search with my openvpn IP to try and find a file i have uploaded elif user_data['do'] == 'get': syslog.syslog("AALU: get: %s,%s" %(str(time.time()),user_data['key'])) # #get a file thread = get.get('http://ec2-54-93-162-141.eu-central-1.compute.amazonaws.com:8080/search_and_get','',user_data); thread.start();
def get_data_single(owner, repo, pull_id): url = urls.url_single_pull(owner, repo, pull_id) response = get.get(url) p = response.json() username_integrator = "" if p['merged_by']: username_integrator = p['merged_by']['login'] return username_integrator, p['comments'], p['review_comments'], p['commits'], p['additions'], p['deletions'], p['changed_files']
def clean(self): i=0 data_to_be_sent = {} data_to_be_sent['i'] = "a"; data_to_be_sent['t'] = "CLEAN"; data_to_be_sent['d'] = 'd,d,d,d,d'; thread = get.get('http://'+nodes[i]+':8080/server','',data_to_be_sent); thread.start(); thread.join(); syslog.syslog("BALU: Just cleaned node:%s" %nodes[i]);
def convert(fecha, filename = 'out.html', verbose = False): """ Genera un fichero HTML con el resumen de los arículos sin leer.""" if verbose: print 'Cargando template básico' template = open('template.html').read().decode('utf-8') if verbose: print 'Obteniendo artículos' articulos = get() html = '' indice = '' # Abrimos o creamos el JSON que almacane el tag original de cada idart try: arc = open(JSON_DATABASE) except IOError: # Creamos la base de datos si no existe if verbose: print 'El JSON no exite, creando' json.dump({}, open(JSON_DATABASE, 'w')) j = {} else: if verbose: print 'Json cargado correctamente' j = json.load(arc) key = dict() for i in range(len(articulos)): art = articulos[i] key['ART%s'%i] = art['tag'] md = "" md += '## [%s](%s) (ART%s)\n' % (art['titulo'], art['link'], i) md += '#### Link: [%s](%s)\n' % (art['link'], art['link']) md += '#### Feed: %s\n' % art['feed'] md += '#### Autor: %s\n' % art['autor'] md = markdown(md) unread = "Para marcar como no leido, subrayar: __UNREAD__ART%s" % i html += '<article id="art-%s">%s\n%s\n%s</article><hr />\n' % (i, md, art['content'], unread) indice += '<li>%s: %s (ART%s)</li>' % \ (art['feed'], art['titulo'], i) # Escribimos en la base de datos j[filename] = key if verbose: print 'Escribiendo en JSON' json.dump(j, open(JSON_DATABASE, 'w')) # Escribimos en el fichero, teniendo el HTML básico en template.html if verbose: print 'Guardando HTML en', filename write = template % dict(html=html, indice=indice, fecha=fecha) arc = open(filename,'w') arc.write(write.encode('utf-8')) arc.close() # Convertimos el HTML a PDF filename = pipes.quote(filename) # Evitamos Command Execution pdf = pipes.quote(filename + '.pdf') if verbose: print 'Guardando PDF en', pdf os.system('%s "%s" "%s"' % (XHTML2HTTP_EXEC, filename, pdf))
def application(env: tp.Dict[str, tp.Any], start_response: tp.Callable[..., None]) -> tp.List[tp.Any]: """ uWSGI entry point Manages HTTP request and calls specific functions for [GET, POST] Args: env: HTTP request environment - dict start_response: HTTP response headers function Returns: data: which will be transmitted """ # Parse query string query = dict(urllib.parse.parse_qsl(env['QUERY_STRING'])) # Parse cookie raw_json = env.get('HTTP_COOKIE', '') cookie_obj = SimpleCookie() cookie_obj.load(raw_json) # Even though SimpleCookie is dictionary-like, it internally uses a Morsel object # which is incompatible with requests. Manually construct a dictionary instead. cookie = {} for key, morsel in cookie_obj.items(): cookie[key] = morsel.value status = '200 OK' headers = [] data = [] # Manage admin actions if env['PATH_INFO'][:6] == '/admin': status, headers, data = admin(env, query, cookie) # Main methods elif env['REQUEST_METHOD'] == 'GET': status, headers, data = get(env, query, cookie) elif env['REQUEST_METHOD'] == 'POST': status, headers, data = post(env, query, cookie) elif env['REQUEST_METHOD'] == 'OPTIONS': status = '200 OK' headers = [ ('Access-Control-Allow-Origin', '*'), ('Access-Control-Allow-Methods', 'GET, POST, HEAD, OPTIONS'), ('Access-Control-Allow-Headers', '*'), ('Allow', 'GET, POST, HEAD, OPTIONS' ) # TODO: Add content application/json ] # Setup request status and headers start_response(status, headers) return data
def rip(self): for i in range(0,len(nodes)-1): myssh.connect_before(nodes[i],keys[i],self.how_many) disk_storage = storage.storage() self.clean(); thread = [] for i in range(0,self.how_many): from_number = self.start_number+i; print from_number; from_name = from_number; node_name = self.myself; t="NEW" x = int(from_number)%10 if x == 1: node = nodes[1] elif x ==2: node = nodes[2] elif x ==3: node = nodes[3] elif x ==4: node = nodes[4] elif x ==5: node = nodes[5] elif x ==6: node = nodes[6] elif x ==7: node = nodes[7] elif x ==8: node = nodes[8] elif x ==9: node = nodes[9] else: node = nodes[0] print node; data_to_be_sent = {} data_to_be_sent['i'] = from_name; data_to_be_sent['t'] = "NEW"; from_number = str(from_number) data_to_be_sent['d'] = from_number+","+from_number+","+from_number+","+from_number; syslog.syslog("BALU: uid=%s and time=%s" %(from_name,str(time.time()))) try: thread.append(get.get('http://'+node+':8080/server','',data_to_be_sent)); thread[-1].start(); except IndexError: print "there was an indexError" syslog.syslog("BALU: Node selected is %s" %node); for i in range(0,self.how_many): thread[i].join() for i in range(0,len(nodes)-1): myssh.connect_after(nodes[i],keys[i],self.how_many)
def test_get(): downloader = lambda _:b'abcde' cachedir = tempfile.mkdtemp() sleep = lambda:None url = 'http://foo.bar/baz' observed = get(url, cachedir = cachedir, downloader = downloader, sleep = sleep, load = True) assert observed == downloader(None), (observed, downloader(None)) shutil.rmtree(cachedir)
def socrata(url, directory): page = 1 while True: full_url = urljoin(url, '/api/views?page=%d' % page) filename = os.path.join(directory, re.sub('^https?://', '', full_url)) raw = get(full_url, cachedir = directory) try: search_results = json.loads(raw) except ValueError: os.remove(filename) raw = get(full_url, cachedir = directory) try: search_results = json.loads(raw) except ValueError: print('**Something is wrong with %s**' % filename) break else: if len(search_results) == 0: break page += 1
def download(): "Download GIT source tar file into external area." log.info("downloading git tar file") target = os.path.join(fs.external(), tgz()) from get import get from exception import InconsistentState target = get(url(), target) if not os.path.exists(target): raise InconsistentState, "Tar file does not exist: %s%s" % (os.getcwd(), tgz()) return target
def lab1_main(): for i in range(1, 11): wave_data = get(i) wave_energy = energy(wave_data) wave_zrate = zrate(wave_data) with open('../result/lab1/' + str(i) + '_en.txt', 'w') as f: for num in wave_energy: f.write(str(num) + '\n') with open('../result/lab1/' + str(i) + '_zero.txt', 'w') as f: for num in wave_zrate: f.write(str(num) + '\n') return wave_data
def count_apis(catalog): 'This includes federated APIs.' raw = get( urljoin(catalog, '/browse?limitTo=apis&utf8=%E2%9C%93'), cachedir = os.path.join('downloads', 'socrata-apis')) html = lxml.html.fromstring(raw) resultCounts = html.xpath('//div[@class="resultCount"]/text()') if resultCounts == []: return 0 else: m = re.match(r'^Showing ([0-9]+) of ([0-9]+)$', resultCounts[0].strip()) return m.group(2)
def test_load_off(): downloader = lambda _:b'abcde' cachedir = tempfile.mkdtemp() sleep = lambda:None url = 'http://foo.bar/baz' os.makedirs(os.path.join(cachedir, 'foo.bar')) observed = get(url, cachedir = cachedir, downloader = downloader, sleep = sleep, load = False) assert observed == None shutil.rmtree(cachedir)
def main(): mc = pylibmc.Client(["127.0.0.1"], binary=True, behaviors={"tcp_nodelay": True, "ketama": True}) while True: head = mc.get('queue_head') tail = mc.get('queue_tail') if mc.get('queue_head') < mc.get('queue_tail') or mc.get(str(head)) is not None: #there is something in the queue item = mc.get(str(head)) #this item needs to be uploaded item = str(item) item = item.split(',') data_to_be_sent = {}; #for i in range(0,len(item)): # syslog.syslog("AALU: item["+str(i)+"] = "+ item[i]) data_to_be_sent['i'] = item[0] data_to_be_sent['t'] = item[1] data_to_be_sent['d'] = item[2] #+","+item[3]+","+item[4]+","+item[5] # logging.info('Item 1 %s' % item[0]) # logging.info('Item 2 %s' % item[1]) # logging.info('Item 3 %s' % item[2]) if item[1] == 'MKP': data_to_be_sent['d'] = item[2] +","+item[3]+","+item[4]+","+item[5] thread = get.get('http://ec2-54-93-162-141.eu-central-1.compute.amazonaws.com:8080/server','',data_to_be_sent); # thread = get.get('http://0.0.0.0:8888/server','',data_to_be_sent); syslog.syslog("AALU: in loop.py for MKP: %s,%s" %(str(time.time()),data_to_be_sent['d'])) thread.start(); thread.join(); #add if else based on the application for which data is being uploaded. elif item[1] == 'IVR': data_to_be_sent['d'] = item[2] # thread = uploader.file_uploader('http://128.122.140.120:8888/ivr_server', '', data_to_be_sent['d']) thread = uploader.file_uploader('http://ec2-54-93-162-141.eu-central-1.compute.amazonaws.com:8080/ivr_server', '', data_to_be_sent['d']) syslog.syslog("AALU: in loop.py for IVR: %s,%s" %(str(time.time()),data_to_be_sent['d'])) thread.start() thread.join() elif item[1] == 'SEN': data_to_be_sent['d'] = item[2] thread = uploader.file_uploader('http://ec2-54-93-162-141.eu-central-1.compute.amazonaws.com:8080/random_server', '', data_to_be_sent['d']) syslog.syslog("AALU: in loop.py for SEN: %s,%s" %(str(time.time()),data_to_be_sent['d'])) thread.start(); thread.join() if head < tail: mc.incr('queue_head') #else there is no need to increase the head location mc.delete(str(head)) else: #the queue seems to be empty so do nothing. #wait for a half a second and then ho out to the while again doNothing();
def get_dataset(): dataset = None for i in range(1,10): wave_data = get(i) wave_label = get_labels(i) wave_label = np.expand_dims(wave_label,axis=0) wave_label = wave_label.T waves = np.hstack((wave_data,wave_label)) print(waves.shape) if dataset is None: dataset = waves else: dataset = np.vstack((dataset,waves)) return dataset
def predict(): if request.method == 'POST': global inf2 mov_name = request.form['movie'] rat = request.form['rating'] #print(mov) mov = get_id(mov_name) mov2 = zip([int(mov)], [int(rat)]) f = get_recommendations(mov2) inf = get(f) return render_template('index.html', result=inf)
def _get_media_data(url): if contains(r'^https?://(www\.)?gfycat\.com', url): #if gfycat gif, deals with http and https sites url = url.replace('gfycat.com', 'giant.gfycat.com').replace('www.', '') + '.mp4' if contains(r'^https?://(?:i\.)?imgur\.com/.+?\.gifv', url): #if imgur gifv url = url.replace('.gifv', '.mp4') format = _get_format(url, _IMG_FILES + _VIDEO_FILES) if format: #if the media is in one of the acceptable formats response = get(url) if response.status_code == 200: #make sure we got the content properly return {'data': response.content, 'format': format} else: #if there was a bad status code don't download the content print "GOT A WEIRD STATUS CODE: {0}".format(response.status_code) return None
def opendatasoft(url, directory): ''' Args: url: A string for the root of the portal (like "http://demo.ckan.org") directory: The directory to which stuff should be saved Returns: Nothing ''' # http://parisdata.opendatasoft.com/api/datasets/1.0/search?rows=1000000 # Make sure the directory exists. try: os.makedirs(directory) except OSError: pass fn = os.path.join(directory, remove_protocol(url)) if not os.path.exists(fn): try: get(url + '/api/datasets/1.0/search?rows=1000000', cachedir = directory) except: print('**Error downloading %s**' % url) else: print(' Downloaded %s' % url)
def POST(self): syslog.syslog("BALU: I got something in iver server") x = web.input(myfile={}) filedir = '/home/ec2-user/ivr' # change this to the directory you want to store the file in. if 'myfile' in x: # to check if the file-object is created filepath=x.myfile.filename.replace('\\','/') # replaces the windows-style slashes with linux ones. filename=filepath.split('/')[-1] # splits the and chooses the last part (the filename with extension) fout = open(filedir +'/'+ filename,'w') # creates the file where the uploaded file should be stored fout.write(x.myfile.file.read()) # writes the uploaded file to the newly created file. fout.close() # closes the file, upload complete. syslog.syslog("BALU: ivr file:%s at time:%s" %(filename,str(time.time()))) data_to_be_sent = {}; data_to_be_sent['ret'] = filename data_to_be_sent['app'] = 'IVR' thread = get.get('http://128.122.140.120:8080/aws_file_handler','',data_to_be_sent); #node_name coming in each request is the ip of the handler thread.start();
def test_read_cache(): downloader = lambda _:b'abcde' cachedir = tempfile.mkdtemp() sleep = lambda:None url = 'http://foo.bar/baz' os.makedirs(os.path.join(cachedir, 'foo.bar')) expected = b'lalalala' with open(os.path.join(cachedir, 'foo.bar', 'baz'), 'wb') as fp: fp.write(expected) observed = get(url, cachedir = cachedir, downloader = downloader, sleep = sleep, load = True) assert observed == expected, (observed, expected) shutil.rmtree(cachedir)
def get_all(owner, repo): pulls = [] page = 1 after_limit_date = False url = urls.url_all_pulls(owner, repo, page) while True: response = get.get(url) after_limit_date = get_data(response, pulls) if after_limit_date or not contains_next_page(response): break else: page = page + 1 url = urls.url_all_pulls(owner, repo, page) return pulls
def findarguments(): wave_data = get(1, False) wave_data = np.array(wave_data, dtype=np.int16) SNRmax = 0 amax = 1 for a in range(570, 600): y = factor_DPCM(wave_data, a) z = factor_decode_DPCM(y, a) snr = SNR(wave_data, z) if snr > SNRmax: SNRmax = snr amax = a print("The best a :" + str(amax)) y = factor_DPCM(wave_data, amax, 4) save_data('1_4bit.dpc', y, 4) data = load_data('1_4bit.dpc', 4) z = factor_decode_DPCM(data, amax, 4) print('SNR: ' + str(SNR(wave_data, z))) writeaspcm('1_4bit', z)
def downloadCh(dlRootPath, url, config_json=None): j = get(url) if not j: return False bname = j['bname'] episodeName = j['cname'] # 极黑的布伦希尔德\\jpg\\特别篇 comicName = re.sub(r'[\\/:*?"<>|]', '_', bname) nowComicFolder = os.path.join(dlRootPath, comicName) if os.path.exists(nowComicFolder) == False: os.mkdir(nowComicFolder) # 写配置文件 configFullName = os.path.join(nowComicFolder, 'config.json') if config_json: with open(configFullName, 'w') as config: config.write(config_json) config.close() # 当前话的目标下载目录 desDownloadEpisodeFolderPath = os.path.join(dlRootPath, comicName, episodeName) length = j['len'] print('下載 %s %s 中 共%s頁' % (bname, episodeName, length)) # 判断,下载的目标文件夹中,是否总图片张数与网页读取到的总张数一致 # 不一致则开始下载 # 首先是这个目录得存在,不存在则创建 if os.path.exists(desDownloadEpisodeFolderPath) == False: os.mkdir(desDownloadEpisodeFolderPath) if coutFiles(desDownloadEpisodeFolderPath) == length: print("已经下载,跳过") return True e = j['sl']['e'] m = j['sl']['m'] path = j['path'] i = 1 for filename in j['files']: pgUrl = 'https://i.hamreus.com' + path + filename print(os.path.basename(pgUrl)) print('%s / %s' % (i, length), end='\r') downloadPg(comicName + "_" + episodeName, dlRootPath, desDownloadEpisodeFolderPath, pgUrl, e, m, i) # 每頁間隔0.5秒 time.sleep(0.5) i += 1 return True
def GET(self): syslog.syslog("AALU: in aws_file_handler") user_data=web.input(); if len(user_data) is 2: data_to_be_sent = {} data_to_be_sent['ret'] = user_data['ret'] data_to_be_sent['app'] = user_data['app'] if user_data['app'] == 'IVR': syslog.syslog("AALU: post ivr ret:%s,%s" %(user_data['ret'],str(time.time()))) else: syslog.syslog("AALU: post sen ret: %s,%s" %(user_data['ret'],str(time.time()))) global file_to_ip; if data_to_be_sent['ret'] not in file_to_ip: ip="10.8.0.10" # syslog.syslog("AALU: send file ack to ghana") else: ip = file_to_ip[data_to_be_sent['ret']] # syslog.syslog("AALU: send file ack to nyc:%s" %ip) thread = get.get('http://'+ip+':8081/nexmo_file','',data_to_be_sent); thread.start();
def json(page, url, filename): # 把饭否消息api输出的json合并。 end = '[]' while True: resp = get.get(page, url) if resp != end: #open函数里面的'a'是追加,改成'w'就是只写入会覆盖以前的内容. f=open(filename,'a') f.write(resp) # 每个write()后面必须close()才能写入. f.close() else: #处理json的方法简单粗暴,直接把饭否api拉来的数据叠加到一起,然后把中间json的尾头替换为逗号',' 一般文件应该不会影响性能。 f=open(filename, 'r') result = f.read() f.close() result = string.replace(result, "]\n\n[", ",") f = open(filename, 'w') f.write(result) f.close() return True page = page + 1
def json(page, url, filename): # 把饭否消息api输出的json合并。 end = '[]' while True: resp = get.get(page, url) if resp != end: #open函数里面的'a'是追加,改成'w'就是只写入会覆盖以前的内容. f = open(filename, 'a') f.write(resp) # 每个write()后面必须close()才能写入. f.close() else: #处理json的方法简单粗暴,直接把饭否api拉来的数据叠加到一起,然后把中间json的尾头替换为逗号',' 一般文件应该不会影响性能。 f = open(filename, 'r') result = f.read() f.close() result = string.replace(result, "]\n\n[", ",") f = open(filename, 'w') f.write(result) f.close() return True page = page + 1
def POST(self): user_data=web.input(); if len(user_data) is 4: syslog.syslog("AALU: user_data has 4 params") from_name = user_data['from_name']; destination = user_data['destination'] from_number = user_data['from_number'] body = user_data['body'] logging.info("Got SMS: %s" % (body)) returning = body[::-1] data_list = []; data_to_be_sent = {} data_to_be_sent['to'] = from_number data_to_be_sent['msisdn'] = destination data_to_be_sent['text'] = returning data_list.append(data_to_be_sent) thread = get.get('http://10.8.0.6:8081/nexmo_sms','',data_to_be_sent); thread.start(); return "IF was executed" else: return 'something is wrong' ''' if data_list != []:
def xml(page, url, filename): # 把饭否消息api输出的xml合并。 end = '<?xml version="1.0" encoding="UTF-8"?>\n<statuses>\n</statuses>\n' while True: resp = get.get(page, url) if resp != end: #open函数里面的'a'是追加,改成'w'就是只写入会覆盖以前的内容. f=open(filename,'a') f.write(resp) # 每个write()后面必须close()才能写入. f.close() else: #把饭否api拉来的xml简单叠加到一起,然后进行下面的操作,重新读取文件然后把中间xml文件尾和头去掉。 f = open(filename, 'r') result = f.read() f.close() result = string.replace(result, '\n</statuses>\n<?xml version="1.0" encoding="UTF-8"?>\n<statuses>\n', '\n') f=open(filename,'w') f.write(result) f.close() return True page = page + 1
def test_send_txt(self): '''查找联系人,发送文本消息''' time.sleep(5) self.driver.swipe(810,960,54,960,500)#左划 time.sleep(2) self.driver.find_element_by_id("com.yuntongxun.ecdemo:id/card_item_tv").click() self.driver.find_element_by_id("content").click() self.driver.find_element_by_id("content").send_keys("13311267857") self.driver.find_element_by_id("text_right").click() #发送文本消息 set.set1() text200=get.get() self.driver.find_element_by_id("chatting_content_et").click() self.driver.find_element_by_id("chatting_content_et").send_keys(text200)#hello tester self.driver.find_element_by_id("chatting_send_btn").click()#发送 self.driver.press_keycode('4')#点击返回键 time.sleep(2) #验证发送消息 sendoutcontent = self.driver.find_element_by_id("chatting_content_itv").get_attribute("text") print sendoutcontent receivecontent = self.driver.find_element_by_id("tv_read_unread").get_attribute("text") print receivecontent assert_equal(sendoutcontent,text200,msg=u'发送的消息验证失败') assert_equal(receivecontent,u"已读",msg=u'消息状态验证失败') #删除发送消息 el = self.driver.find_element_by_id("chatting_content_itv") action1 = TouchAction(self.driver) action1.long_press(el,duration=5000).perform() self.driver.find_element_by_name(u"删除").click() self.driver.find_element_by_id("dilaog_button3").click() #验证接收的消息 sendoutcontent = self.driver.find_element_by_id("chatting_content_itv").get_attribute("text") assert_equal(sendoutcontent,text200,msg=u'接收的消息验证失败') el = self.driver.find_element_by_id("chatting_content_itv") action1.long_press(el,duration=5000).perform() self.driver.find_element_by_name(u"删除").click() self.driver.find_element_by_id("dilaog_button3").click()
def cast_attack(fight_id, character): fight_req = { "pathParameters": { "id": fight_id } } fight_res = get(fight_req, None) data = json.loads(fight_res['body']) #print(data) while data['enemy']['status'] is not 'DEAD': request = { "pathParameters": { "id": fight_id }, "body": json.dumps({ "character_id": character['id'] }) } response = attack(request, None) print (json.dumps(response['body'])) time.sleep(character['attack_speed'] * .001)
def GET(self): user_data = web.input() global random if user_data['do'] == 'search': #search on aws for a random file uploaded by someone else. For example if you are calling this from rapidcell you can search with my openvpn IP to try and find a file i have uploaded result = ""; if user_data['key'] in random: #is not None: #send a non-empty string to the user_data['ip'] for i in range(0,min(len(random[user_data['key']]),5)): result = result+","+random[user_data['key']][i] #send result variable back. data_to_be_sent = {}; data_to_be_sent['result'] = result data_to_be_sent['ip'] = user_data['ip'] #this is the openvpn ip coming with the request data_to_be_sent['id'] = user_data['id'] thread = get.get('http://128.122.140.120:8080/search_handler','',data_to_be_sent); #node_name coming in each request is the ip of the handler thread.start(); elif user_data['do'] == 'get': syslog.syslog("BALU: sending file back %s,%s" %(str(time.time()),user_data['key'])) filedir = '/home/ec2-user/random' # change this to the directory you want to store the file in. filename = user_data['key'] thread = uploader.file_uploader('http://128.122.140.120:8080/get_handler', '', filedir + '/' + filename) thread.start()
def test_send_txt200(self): '''群成员发送文本消息''' clear_massage(self, name="groupname1") clear_massage(self, name=u"系统通知") driver = self.driver with open('F:\Appium\group\groupID.txt', 'r') as f: el = f.read() driver.find_element_by_id( "com.yuntongxun.eckuailiao:id/btn_address_list").click() #点击联系人 driver.find_element_by_id( "com.yuntongxun.eckuailiao:id/tv_head_group").click() #点击群组 driver.find_element_by_id( "com.yuntongxun.eckuailiao:id/p_list").click() #点击群组列表 el = u"群组id:" + el driver.find_element_by_name(el).click() #点击群组id,以后改为读取上一条用例创建群组的id #群成员发送@消息 set.set1() text200 = get.get() driver.find_element_by_id( "com.yuntongxun.eckuailiao:id/chatting_content_et").send_keys( text200) #输入特殊字符 driver.find_element_by_id( "com.yuntongxun.eckuailiao:id/chatting_send_btn").click() #点击发送 #验证接收消息 el = self.driver.find_element_by_id( "com.yuntongxun.eckuailiao:id/chatting_content_itv").get_attribute( "text") assert_equal(el, text200, msg=u'消息验证失败') print "群组内发送消息成功" #删除发送消息 el = self.driver.find_element_by_id( "com.yuntongxun.eckuailiao:id/chatting_content_itv") action1 = TouchAction(self.driver) action1.long_press(el, duration=5000).perform() self.driver.find_element_by_name(u"删除").click() self.driver.find_element_by_id("dilaog_button3").click() '''
def GET(self): syslog.syslog("AALU: in GET") user_data=web.input(); if len(user_data) is 3: #syslog.syslog("AALU: I GOt something Good"); data_to_be_sent = {}; data_to_be_sent['to'] = user_data['to']; data_to_be_sent['msisdn'] = user_data['msisdn']; data_to_be_sent['text'] = user_data['text']; syslog.syslog("AALU: Server said: something"); global number_to_ip; if data_to_be_sent['to'] not in number_to_ip: ip = "10.8.0.10" syslog.syslog("AALU: send this to ghana") else: ip = number_to_ip[data_to_be_sent['to']] thread = get.get('http://'+ip+':8081/nexmo_sms','',data_to_be_sent); thread.start(); # # ip = "10.8.0.10" # ew(); else: syslog.syslog("AALU: not enough params");
def test_send_txt(self): '''查找联系人,发送文本消息2049字符''' time.sleep(2) clear_massage(self, name=u"qiuqiu") #删除消息页面,昵称为**的聊天记录 self.driver.find_element_by_id( "com.yuntongxun.eckuailiao:id/btn_address_list").click() #联系人 time.sleep(2) self.driver.find_element_by_id( "com.yuntongxun.eckuailiao:id/search").send_keys("13311267857") #self.driver.find_element_by_id("com.yuntongxun.eckuailiao:id/btn_my").click()#点击搜索 self.driver.find_element_by_id( "com.yuntongxun.eckuailiao:id/account").click() #点击账号 #发送文本消息 set.set1() text2049 = get.get() self.driver.find_element_by_id( "com.yuntongxun.eckuailiao:id/phone_chat").click() #点击发消息 self.driver.find_element_by_id("chatting_content_et").send_keys( text2049) #hello tester self.driver.find_element_by_id("chatting_send_btn").click() #发送 time.sleep(2) el = self.driver.find_element_by_id( "com.yuntongxun.eckuailiao:id/dialog_tv_message").get_attribute( "text") assert_equal(el, u"您发送的文本超过最大长度限制", msg=u'消息验证失败') print u"发送文本长度最大2048验证通过" self.driver.find_element_by_id( "com.yuntongxun.eckuailiao:id/dilaog_button1").click() #点击确认 time.sleep(2) #删除发送失败的消息 el = self.driver.find_element_by_id("chatting_content_itv") action1 = TouchAction(self.driver) action1.long_press(el, duration=5000).perform() self.driver.find_element_by_name(u"删除").click() self.driver.find_element_by_id("dilaog_button3").click()
base_name = basename(prog.name) try: code = re.search(code_regex, base_name).group(1) logging.debug(re.search(code_regex, base_name)) except AttributeError: print('Code not found') exit(26) # Without code, we cannot check the cases, so exit logging.debug("code = {}".format(code)) if command == 'download': import download download.download(code, db_folder, remaining, verbosity, quiet) elif command.startswith('get'): import get get.get(command, code, db_folder, remaining, verbosity, quiet) elif command == 'test': if prog == 0: logging.error('No file found, aborting') exit(-1) # Compile if CPP file if prog.name.endswith('.cpp'): logging.info('Compiling...') compile_to = '_' + basename(prog.name).split('.')[0] proc = Popen(['g++', '-g', prog.name, '-o', compile_to], stdout=PIPE, stderr=PIPE) out, err = proc.communicate()
def get(self, request, pk): return get(request)
def downloadCh(url, config_json=None): def downloadPg(url, e, m, counter): h = { 'accept': 'image/webp,image/apng,image/*,*/*;q=0.8', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-TW,zh;q=0.9,en-US;q=0.8,en;q=0.7,zh-CN;q=0.6', 'cache-control': 'no-cache', 'pragma': 'no-cache', 'referer': 'https://www.manhuagui.com/', 'sec-fetch-dest': 'image', 'sec-fetch-mode': 'no-cors', 'sec-fetch-site': 'cross-site', 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.129 Safari/537.36' } #單頁最大重試次數 for i in range(10): try: res = requests.get(url, params={ 'e': e, 'm': m }, headers=h, timeout=10) res.raise_for_status() except: print('頁面 %s 下載失敗: %s 重試中...' % (url, res.status_code), end='') print('等待2秒...') #每次重試間隔 time.sleep(2) continue filename = str(counter) + '_' + os.path.basename(url) file = open(filename, 'wb') for chunk in res.iter_content(100000): file.write(chunk) file.close() #轉檔 調整為False將不會轉檔 if True: output_filename = filename + '.jpg' src_filename = os.path.join('..', '..', 'jpg', cname, output_filename) im = Image.open(filename) im.save(src_filename, 'jpeg') #轉檔結束 return print('超過重試次數 跳過此檔案') j = get(url) if not j: return False bname = j['bname'] cname = j['cname'] chdir(os.path.join(re.sub(r'[\\/:*?"<>|]', '_', bname), 'jpg', cname)) os.chdir(os.path.join('..', '..')) if config_json: with open('config.json', 'w') as config: config.write(config_json) chdir(os.path.join('raw', cname)) length = j['len'] print('下載 %s %s 中 共%s頁' % (bname, cname, length)) e = j['sl']['e'] m = j['sl']['m'] path = j['path'] i = 1 for filename in j['files']: pgUrl = 'https://i.hamreus.com' + path + filename print(os.path.basename(pgUrl)) print('%s / %s' % (i, length), end='\r') downloadPg(pgUrl, e, m, i) #每頁間隔0.5秒 time.sleep(0.5) i += 1 os.chdir(os.path.join('..', '..', '..')) return True
def hello_world(): x = get() return jsonify(x)
# init.py __author__ = "Riju" __maintainer__ = "Riju" __email__ = "*****@*****.**" __status__ = "Development" import get import config import consumer_key foo = "bar" get.get(config.config_json) print(config.config_json)
#If the protocol of the url is not http or https, print error if help == False and (scheme != 'http' and scheme != 'https'): parser.print_usage() print("URL must contain either http or https protocol") #No post or get entered elif help == False and args.get == False and args.post == False: parser.print_usage() print("You must specify either the -get or -post option") else: #GET request if args.get: #If httpc.py -get -help if help: print(GET_HELP) else: response = get.get(host, path, header) #POST request elif args.post: #If httpc.py -post -help if help: print(POST_HELP) else: response = post.post(host, path, data, header) #If httpc.py -help print general help message elif help: parser.print_usage() print(HELP) #Output the response if not help: #If -o is specified, output response to file
print("###Test getting rid of an item") print(delete(del_event, None)) print(list(event, None)) print() print('###Test adding an existing item to pop the quantity') # get the item get_basket_event = { 'pathParameters': { 'id': customer_id, 'product_id': '3' } } basket_item_3 = get(get_basket_event, None) print() print(basket_item_3) create_basket_3 = {'body': basket_item_3['body']} basket_item_3 = create(create_basket_3, None) print(basket_item_3) print('###Test update the quantity to be a specific value') basket_item_3 = json.loads(basket_item_3['body']) basket_item_3['quantity'] = -99
#!/usr/bin/env python2.7 import set import get set.set() get.get()