def call_subprocess(cmd, stdin_data=None, stdin_async=True): """call sub process async Args: cmd: str, commands stdin_data: str, data for standard in stdin_async: bool, whether use async for stdin """ stdin = Subprocess.STREAM if stdin_async else subprocess.PIPE sub_process = Subprocess( shlex.split(cmd), stdin=stdin, stdout=Subprocess.STREAM, stderr=Subprocess.STREAM, ) if stdin_data: if stdin_async: yield Task(sub_process.stdin.write, stdin_data) else: sub_process.stdin.write(stdin_data) if stdin_async or stdin_data: sub_process.stdin.close() result, error = yield [ Task(sub_process.stdout.read_until_close), Task(sub_process.stderr.read_until_close), ] raise Return((result, error))
def _call_ipptool(self, request): with tempfile.NamedTemporaryFile(delete=False) as temp_file: temp_file.write(bytes(request, encoding='utf-8')) from tornado.process import Subprocess process = Subprocess([ self.config['ipptool_path'], self.authenticated_uri, '-X', temp_file.name ], stdin=subprocess.PIPE, stdout=Subprocess.STREAM, stderr=Subprocess.STREAM, io_loop=self.io_loop) future = [] self.io_loop.add_timeout( self.io_loop.time() + self.config['timeout'], functools.partial(self.timeout_handler, process.proc, future)) try: stdout, stderr = yield [ Task(process.stdout.read_until_close), Task(process.stderr.read_until_close) ] if future: raise TimeoutError finally: os.unlink(temp_file.name) result = plistlib.readPlistFromString(stdout) try: raise Return(result['Tests'][0]) except (IndexError, KeyError): logger = logging.getLogger(__name__) logger.error('ipptool command failed: {} {}'.format( stdout, stderr)) raise
def _validate_request_code(self, code, client_id, callback): ''' _validate_request_code - internal method for verifying the the given nonce. also removes the nonce from the data_store, as they are intended for one-time use. ''' nonce = yield Task(self.data_store.fetch, 'nonce_codes', code=code) if not nonce: raise Proauth2Error('access_denied', 'invalid request code: %s' % code) if client_id != nonce['client_id']: raise Proauth2Error('access_denied', 'invalid request code: %s' % code) user_id = nonce['user_id'] expires = nonce['expires'] yield Task(self.data_store.remove, 'nonce_codes', code=code, client_id=client_id, user_id=user_id) if time() > expires: raise Proauth2Error('access_denied', 'request code %s expired' % code) callback(user_id)
def call_subprocess(cmd, stdin_data=None, stdin_async=False): """ Wrapper around subprocess call using Tornado's Subprocess class. """ stdin = STREAM if stdin_async else subprocess.PIPE sub_process = tornado.process.Subprocess( cmd, stdin=stdin, stdout=STREAM, stderr=STREAM ) if stdin_data: if stdin_async: yield Task(sub_process.stdin.write, stdin_data) else: sub_process.stdin.write(stdin_data) if stdin_async or stdin_data: sub_process.stdin.close() result, error = yield [ Task(sub_process.stdout.read_until_close), Task(sub_process.stderr.read_until_close) ] raise Return((result, error))
def runCommandBackground(cmdlist, shell=False): """ Wrapper around subprocess call using Tornado's Subprocess class. This routine can fork a process in the background without blocking the main IOloop, the the forked process can run for a long time without problem """ LOG = logging.getLogger('imgstorage.imgstoragenas.NasDaemon') LOG.debug('Executing: ' + str(cmdlist)) # tornado.process.initialize() sub_process = tornado.process.Subprocess(cmdlist, stdout=STREAM, stderr=STREAM, shell=shell) # we need to set_exit_callback to fetch the return value # the function can even be empty by it must be set or the # sub_process.returncode will be always None retval = 0 sub_process.set_exit_callback(lambda value: value) (result, error) = \ (yield [Task(sub_process.stdout.read_until_close), Task(sub_process.stderr.read_until_close)]) if sub_process.returncode: raise ActionError('Error executing %s: %s' % (cmdlist, error)) raise Return((result.splitlines(), error))
def receive(self, data): start, length = struct.unpack('2B', data) print "len:", length data = yield Task(self.stream.read_bytes, length) s_acpi = ''.join(struct.unpack_from('4s', data)) # keep 0x00 acpi_control = struct.unpack_from('B', data)[0] if acpi_control & 1 == 0: # I-FRAME ssn, rsn = acpi.parse_i_frame(s_acpi) LOG.debug("ssn: {}, rsn: {}".format(ssn, rsn)) s_asdu = ConstBitStream(bytes=data, offset=4 * 8) o_asdu = asdu.ASDU(s_asdu) elif acpi_control & 3 == 1: # S-FRAME print "B" rsn = acpi.parse_s_frame(s_acpi) print rsn elif acpi_control & 3 == 3: # U-FRAME print "A" if s_acpi == acpi.STARTDT_CON: print 'connected' if s_acpi == acpi.TESTFR_ACT: print 'ping' yield Task(self.send, acpi.TESTFR_CON) self.stream.read_bytes(2, self.receive)
def rpc(self, rpc_request, properties=None, callback=None): ''' Publish an RPC request. Returns a :class:`RPCResponseFuture`. :param rpc_request: An instance of :class:`RPCRequest`. ''' callback = stack_context.wrap(callback) yield Task(self.ensure_connection) yield Task(self.ensure_rpc_queue) if not properties: correlation_id = str(uuid.uuid4()) properties = pika.BasicProperties(reply_to=self.rpc_queue, correlation_id=correlation_id) logger.info('Publishing RPC request with key: %s' % rpc_request.routing_key) self.channel.basic_publish(exchange=rpc_request.exchange, routing_key=rpc_request.routing_key, body=rpc_request.json_params, properties=properties) logger.info('Constructing RPC response future with cid: %s' % correlation_id) future = RPCResponseFuture(correlation_id, timeout=rpc_request.timeout, io_loop=self.io_loop) self.futures[correlation_id] = future callback(future)
def test_expiration(self): email = '*****@*****.**' token = yield Task(self.moth.create_token, email=email, expire=-1) authed = yield Task(self.moth.auth_token, email=email, token=token) self.assertFalse(authed) token = yield Task(self.moth.create_token, email=email, expire=1) authed = yield Task(self.moth.auth_token, email=email, token=token) self.assertTrue(authed)
def test_retval(self): email = '*****@*****.**' want_retval = 'hope this gets returned' token = yield Task(self.moth.create_token, email=email, retval=want_retval) got_retval = yield Task(self.moth.auth_token, email=email, token=token) self.assertEqual(want_retval, got_retval)
def test_remove_token(self): email = '*****@*****.**' token = yield Task(self.moth.create_token, email) found = self.mongo.tokens.find_one(dict(email=email, token=token)) self.assertIsNotNone(found) yield Task(self.moth.remove_token, email=email, token=token) not_found = self.mongo.tokens.find_one(dict(email=email, token=token)) self.assertIsNone(not_found)
def get(self, image_id=''): self.set_json_output() download = self.get_argument('download', '') if download: response = yield Task( self.api_call, url=self.settings['API_URL'] + '/images?download={}'.format(download), method='GET') if response and response.code in [200, 201]: respdata = loads(response.body.decode('utf-8')) links = respdata['data'] folder = self.settings['static_path'] + '/' + str(uid()) mkdir(folder) for link in links: info('Downloading: ' + link['url']) with open(folder + '/' + link['filename'], 'wb') as f: c = pycurl.Curl() c.setopt(pycurl.USERAGENT, 'Mozilla/5.0 (Windows; U; Windows NT 6.1; it; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 (.NET CLR 3.5.30729)') c.setopt(c.URL, link['url']) c.setopt(c.WRITEDATA, f) c.perform() c.close() curpath = dirname(realpath(curdir)) chdir(folder) info('Creating zip file: ' + folder + '.zip') with ZipFile(folder + '.zip', 'w') as myzip: for link in links: myzip.write(link['filename']) chdir(curpath) rmtree(folder) dtexec = datetime.now() + timedelta(hours=1) jobid = str(uid()) self.settings['scheduler'].add_job( remove_file, trigger='date', name='Remove file ' + folder + '.zip at ' + str(dtexec), run_date=dtexec, args=[self.settings['scheduler'], folder + '.zip', jobid], coalesce=True, id=jobid) self.set_header('Content-Type', 'application/octet-stream') self.set_header('Content-Disposition', 'attachment; filename=' + folder.split('/')[-1] + '.zip') with open(folder + '.zip', 'rb') as f: self.write(f.read()) self.finish() else: self.response(500, 'Fail to get urls to download the images.') return else: response = yield Task( self.api_call, url=self.settings['API_URL'] + '/images/{}'.format(image_id), method='GET') self.set_status(response.code) if response.code in [200, 201]: self.finish(response.body) else: self.finish({'status': 'error', 'message': 'Fail to get images data.'})
def run_process(self): self.pipe = Subprocess(shlex.split(self.cmd), stdout=Subprocess.STREAM, stderr=Subprocess.STREAM) self.start = time.time() (out, err) = (yield [ Task(self.pipe.stdout.read_until_close), Task(self.pipe.stderr.read_until_close) ]) return (out, err)
def main(): f = ReadOnlyFileStream('./README.md') first = yield Task(f.read_until, "\n") deuz = yield Task(f.read_until, "\n") yield Task(f.seek, 0) refirst = yield Task(f.read_until, "\n") f.close() print first, refirst assert first == refirst print first, deuz
def _insertChangeLog(self, branch, **kw): the_dict = dict(option_act=branch, detail=convSimpleJson(kw)) the_dict['time'] = '%s' %int(time.time()) yield Task(self.db.insert, self.forms['changelog']['insert'], **the_dict) try: yield Task(self.db.insert, self.forms['changelog']['insert'], **the_dict) message = 'record option successed.' except: message = 'Changelog writing failed.' raise Return(message)
def command(self, cmd, cmd_text): assert type(cmd_text) == str assert self.state == self.STATE_CONNECTED assert self._incommand, "overlapped commands not supported" assert self.current_resultset, "overlapped commands not supported" cmd_text = cmd_text.encode(self.charset) output = Stream() output.write_int(1, cmd) output.write(cmd_text) yield Task(self.stream.write, output.pack(0)) print((yield Task(read_packet, self.stream)))
def post(self, animal_id=None, rurl=None): lobj = yield Task(self.get_animal_by_id, animal_id) if not lobj: self.response(404, 'Animal not found for the id: ' + str(animal_id)) return # check data id_from = animal_id id_to = self.input_data.get('relative_id', None) if int(id_from) == int(id_to): self.response(400, 'Animals need to be different to define kinship.') return relation = self.input_data.get('relation', None) if not id_from or not id_to or not relation: self.response(400, 'Invalid request.') return try: robj = yield Task(self.get_animal_by_id, id_to) except Exception as e: info(e) robj = None if not robj: self.response(400, 'Relative not found with the id: %d' % (id_to)) return already_relative_f = yield Task(self.check_relative, animal_id, id_to) already_relative_t = yield Task(self.check_relative, id_to, animal_id) already_relative = already_relative_f or already_relative_t if already_relative: self.response(409, 'Relation already defined.', already_relative) return valid, relation, gender = yield Task(self.relation_is_valid, lobj, robj, relation) if not valid: self.response( 400, 'Invalid relationship assignment request with the relation: %s. (The individual with the id %d is a "%s" animal.)' % (relation, int(id_to), gender)) return try: radd = yield self.Relatives.insert({ 'id_from': int(animal_id), 'id_to': int(id_to), 'relation': relation.lower(), 'created_at': datetime.now(), 'updated_at': datetime.now() }) except Exception as e: info(e) radd = None if radd: self.response(201, 'Relation added.') else: self.response(500, 'Fail to add relation.')
def send_users_activity(self): """ Publish current users in redis channel """ users = yield Task(self.application.client.lrange, 'users', 0, -1) if users: try: users = [tornado.escape.json_decode(user) for user in users] yield Task(self.application.client.publish, 'users', tornado.escape.json_encode(users)) except tornado.websocket.WebSocketClosedError: logging.warning("Websocket closed when sending message")
def test_can_get_limit_usage(self): url = 'http://globo.com' key = 'limit-for-%s' % url self.cache.redis.delete(key) yield Task(self.cache.redis.zadd, key, {'a': 1, 'b': 2, 'c': 3}) limit = yield Task(self.cache.redis.zcard, key) expect(limit).to_equal(3) limit = yield self.cache.get_limit_usage(url) expect(limit).to_equal(3)
def __FromDNSPOD(self, name, *arg, **kw): zid, _ = arg status_dict = dict(domain_name=name, status=str()) api = self.dnspod_api['record']['list'] f = yield self.initRequest() domain_id = yield Task(self._Domainid, f, name) origin_record_list = yield Task(f.urlPost, api, domain_id=domain_id) record_list = origin_record_list[1]['records'] if origin_record_list[ 0] else list() status = yield Task(self._InsertRecordList, record_list, zid=zid) status_dict['status'] = status raise Return(status_dict)
def get(self): self.write(self.render_string('waiting.html')) self.flush() user = self.current_user user_studies = yield Task(self._get_private, user) shared_studies = yield Task(self._get_shared, user) all_emails_except_current = yield Task(self._get_all_emails) all_emails_except_current.remove(self.current_user.id) self.render('private_studies.html', user_studies=user_studies, shared_studies=shared_studies, all_emails_except_current=all_emails_except_current)
def handle(client, stream): client_read = partial(read, client) while True: data = yield Task(stream.read_until, '{') data = data[:-1] count = int(data.strip()) - 1 print('Reading {0} bytes.'.format(count)) data = '{' + (yield Task(stream.read_bytes, count)) client_read(data) if data == '{x}': break
def request_authorization(self, client_id, user_id, response_type, redirect_uri=None, scope=None, state=None, expires=600, callback=None): ''' request_authorization generates a nonce, and stores it in the data_store along with the client_id, user_id, and expiration timestamp. It then returns a dictionary containing the nonce as "code," and the passed state. --- response_type MUST be "code." this is directly from the OAuth2 spec. this probably doesn't need to be checked here, but if it's in the spec I guess it should be verified somewhere. scope has not been implemented here. it will be stored, but there is no scope-checking built in here at this time. if a redirect_uri is passed, it must match the registered redirect_uri. again, this is per spec. ''' if response_type != 'code': raise Proauth2Error('invalid_request', 'response_type must be "code"', state=state) client = yield Task(self.data_store.fetch, 'applications', client_id=client_id) if not client: raise Proauth2Error('access_denied') if redirect_uri and client['redirect_uri'] != redirect_uri: raise Proauth2Error('invalid_request', "redirect_uris don't match") nonce_code = self._generate_token() expires = time() + expires try: yield Task(self.data_store.store, 'nonce_codes', code=nonce_code, client_id=client_id, expires=expires, user_id=user_id, scope=scope) except Proauth2Error as e: e.state = state raise e callback({'code': nonce_code, 'state': state})
def test_can_get_next_job_list(self): key = 'next-job-bucket' self.cache.redis.delete(key) for x in range(2): page = PageFactory.create(uuid='%d' %x, url='http://g%d.com' % x) yield Task(self.cache.add_next_job_bucket, page.uuid, page.url) data = yield Task(self.cache.get_next_job_list, 1, 10) expect([loads(job) for job in data]).to_equal([ {"url":"http://g0.com","page":"0"}, {"url":"http://g1.com","page":"1"} ])
def post(self): if 'new_password' in self.input_data.keys(): if len(self.input_data['new_password']) >= 6: resp = self.db ouser = yield Task(self.get_user_by_email, self.current_user['username']) if ouser: resp = yield Task(self.changePassword, ouser, self.input_data['new_password']) self.response(resp[0], resp[1]) else: self.response(400, 'Invalid user requesting password change.') else: self.response(400, 'Password must have at least 6 characters.') else: self.response(400, 'To change your password, you must send it in a json object with the key \'new_password\'.')
def methods_tests(self, storage): res = (yield Task(storage.make_embedded, project="test_project1", period="month", metrics=self.get_embedded_metrics())) self.assertTrue(isinstance(res, UUID)) embedded_res = (yield Task(storage.get_embedded, res)) self.assertEquals(embedded_res['project'], "test_project1") self.assertEquals(embedded_res['period'], "month") self.assertEquals(len(embedded_res['metrics']), len(self.get_embedded_metrics()))
def receive(self, data): self.recived = time.time() start, length = struct.unpack('2B', data) #print "len:", length data = yield Task(self.stream.read_bytes, length) s_acpi = ''.join(struct.unpack_from('4s', data)) # keep 0x00 acpi_control = struct.unpack_from('B', data)[0] if acpi_control & 1 == 0: # I-FRAME self.ssn, self.rsn = acpi.parse_i_frame(s_acpi) LOG.debug("ssn: {}, rsn: {}".format(self.ssn, self.rsn)) #s_asdu = ConstBitStream(bytes=data, offset=5*8) s_asdu = ConstBitStream(bytes=data, offset=4*8) o_asdu = asdu.ASDU(s_asdu) ### for o in o_asdu.objs: if o is asdu.MMeTf1: print o.val if o is asdu.MSpTb1: print o.val #self.rsn = ssn + 1 #LOG.debug("send>>>>>: ssn: {}, rsn: {}".format(self.ssn, self.rsn)) #yield Task(self.send, struct.pack('<1BHHHHH', 0x10, self.ssn, self.rsn, 0x0, 0x33, 0x16)) #print 'send>>>>>> ' + str(ssn +1) #yield Task(self.sendfixed, acpi.s_frame(ssn +1)) #print struct.pack('<3BHHHH', 0x10, 0x01, 0x00, ssn +1, 0x0, 0x33, 0x16) #yield Task(self.sendraw, struct.pack('BBBHBB', 0x10, 0x01, 0x0, 0x03, 0x0, 0x16)) yield Task(self.send, acpi.s_frame2(self.ssn + 1)) #self.ssn += 1 elif acpi_control & 3 == 1: # S-FRAME print "S-FRAME" self.rsn = acpi.parse_s_frame(s_acpi) print self.rsn elif acpi_control & 3 == 3: # U-FRAME print "U-FRAME" if s_acpi == acpi.STARTDT_CON: print 'connected' if s_acpi == acpi.TESTFR_ACT: print 'ping' yield Task(self.send, acpi.TESTFR_CON) self.stream.read_bytes(2, self.receive)
def test_increment_page_score(self): self.cache.redis.delete('pages-score') total = yield Task(self.cache.redis.zcard, 'page-scores') expect(int(total)).to_equal(0) yield self.cache.increment_page_score('page-1') score = yield Task(self.cache.redis.zscore, 'page-scores', 'page-1') expect(int(score)).to_equal(1) yield self.cache.increment_page_score('page-1') score = yield Task(self.cache.redis.zscore, 'page-scores', 'page-1') expect(int(score)).to_equal(2)
def test_add_next_job_bucket(self): key = 'next-job-bucket' self.cache.redis.delete(key) prefs = yield Task(self.cache.redis.get, key) expect(prefs).to_be_null() for x in range(2): page = PageFactory.create(uuid='%d' %x, url='http://g%d.com' % x) yield Task(self.cache.add_next_job_bucket, page.uuid, page.url) data = yield Task(self.cache.redis.zrange, key, 0, 0) expect(data).to_be_like([dumps({"url": "http://g0.com", "page": "0"})]) data = yield Task(self.cache.redis.zrange, key, 1, 1) expect(data).to_be_like([dumps({"url": "http://g1.com", "page": "1"})])
def handle_stream(self, stream, address): while True: line = yield Task(stream.read_until, "\n") try: metric, timestamp, value = parse(line) except Exception: stream.close() serialized = struct.pack('!ff', timestamp, value) pipe = self.redis.pipeline() if metric not in self.metrics: pipe.sadd('metrics', metric) self.metrics.add(metric) pipe.zadd(metric, timestamp, serialized) pipe.publish(metric, serialized) yield Task(pipe.execute)
def test_the_whole_megillah(self): email = '*****@*****.**' ip = '19.64.2.67' expire = 2 retval = 'Allan Melvin' token = yield Task(self.moth.create_token, email=email, ip=ip, expire=expire, retval=retval) authed = yield Task(self.moth.auth_token, email=email, token=token, ip=ip) self.assertEqual(authed, retval)