def upload_file(self, local_path): try: upload(self, local_path) except Exception as e: return str(e) else: return True
def upload(source=None, target=None, machine=None): print green("当前机器:" + env.host) if not machine or not source or not target: abort("机器、源路径、目标路径不能为空") if machine and not machine == env.host: return utils.upload(source, target)
def deploy(machine=None): print green("当前机器:" + env.host) #如果指定机器,那就只上着一个 if machine and not machine == env.host: return #关闭slb #print env.host for slb in env.slbList: utils.setSlb(env.slbServer, slb, 0) utils.runCmd("sleep 5") #备份 utils.zip(env.online) utils.download(env.online, env.localPath) #上传 utils.upload(env.source, env.target) #杀掉进程 utils.stopProcess(env.aport) utils.runCmd("sleep 10") #解压缩 utils.unzip(env.online) #开始进程 utils.runCmd(env.start_process) #sleep 多少秒 utils.runCmd("sleep " + env.sleep_time) #监控进程 run(env.monitor_url) #上线slb for slb in env.slbList: utils.setSlb(env.slbServer, slb, 100)
def setup_mapper(self, _, line): """ Reads in a dummy line from the input.txt file, ignores it, and sets up the job passed to MRLandTrendrJob by reading from the input S3 dir for that job. Outputs a list of the S3 keys for each of the input rasters """ job = os.environ.get('LT_JOB') print 'Setting up %s' % job analysis_rasts = [ k.key for k in utils.get_keys(s.IN_RASTS % job) if s.RAST_TRIGGER in k.key ] if not analysis_rasts: raise Exception('No analysis rasters specified for job %s' % job) # download template rast for grid rast_fn = utils.rast_dl(analysis_rasts[0]) # set up grid grid_fn = utils.keyname2filename(s.OUT_GRID % job) utils.rast2grid(rast_fn, out_csv=grid_fn) utils.upload([grid_fn]) # note - must yield at end to ensure grid is created for i, keyname in enumerate(analysis_rasts): yield i, keyname
def pre_calc_all(): """Uploads FORMA alerts for all countries in all formats to S3.""" for iso in get_iso_codes(): for format in FORMATS: print 'Downloading %s in %s format...' % (iso, format) response, filename = get_response(iso, format) to_file(response, filename) print 'Uploading %s to S3...' % filename utils.upload(filename, 'gfw_downloads_iso')
def up(num, tmp_file, args): url = 'http://' + args.serverAddress + '/upload' try: for i in range(num, -1, -1): file = tmp_file + str(i) utils.upload(file, url) utils.rmfile(file) num -= 1 except: utils.log('err_upload_data', args.file, 'error') return utils.log( 'again_upload_data', 'python dic.py -u impact --file ' + args.file + ' --n ' + num + ' --job ' + args.job, 'error')
def upload(): # TODO: # _ streaming response (http://flask.pocoo.org/docs/patterns/streaming/) # Parse JSON payload payload = json.loads(request.data) # Upload media to Flickr if "directMediaLink" in payload: url = unicode(payload["directMediaLink"]).encode("utf-8") utils.upload(url) return OK
async def resetspawn(self, ctx: commands.Context, node: str, server: str, username: str): await ctx.send("Resetting player " + username + " to spawn...") if node == "london": serverid = utils.londonids[server] elif node == "canada": serverid = utils.canadaids[server] elif node == "germany": serverid = utils.germanyids[server] uuid = utils.getUUID(username) if uuid: if not os.path.exists(uuid + ".dat"): url = serverid + "/world/playerdata/" + uuid + ".dat" print(("Downloading " + url + "...")) utils.download(url, uuid + ".dat", node) dir_path = os.getcwd() nbtfile = nbtlib.load(dir_path + "/" + uuid + ".dat") url = serverid + "/world/level.dat" print(("Downloading " + url + "...")) utils.download(url, "level.dat", node) worldfile = nbtlib.load(dir_path + "/" + "level.dat") xCoord = worldfile.root["Data"]["SpawnX"] yCoord = worldfile.root["Data"]["SpawnY"] zCoord = worldfile.root["Data"]["SpawnZ"] print( ("Resetting " + username + "\'s coordinates to " + str(xCoord) + "," + str(yCoord) + "," + str(zCoord) + "...")) await ctx.send("Original coords: " + str(nbtfile.root["Pos"][0]) + ", " + str(nbtfile.root["Pos"][1]) + ", " + str(nbtfile.root["Pos"][2]) + " in dim " + str(nbtfile.root["Dimension"])) nbtfile.root["Pos"][0] = Double(xCoord) nbtfile.root["Pos"][1] = Double(yCoord) nbtfile.root["Pos"][2] = Double(zCoord) nbtfile.root["Dimension"] = Int(0) nbtfile.save() await ctx.send("New coords: " + str(nbtfile.root["Pos"][0]) + ", " + str(nbtfile.root["Pos"][1]) + ", " + str(nbtfile.root["Pos"][2]) + " in dim " + str(nbtfile.root["Dimension"])) print("Uploading to server...") utils.upload(dir_path + "/" + uuid + ".dat", serverid + "/world/playerdata/" + uuid + ".dat", node) print("Uploaded!") os.unlink(dir_path + "/" + uuid + ".dat") os.unlink(dir_path + "/" + "level.dat") await ctx.send("Done!")
def grabThumbs(lon, lat, year, w = 8000): # Grab landsat thumbnail with all visual parameters already baked # in instead of manipulating the image in python b = utils.createBox(lon, lat, w = w) poly = utils.formatPolygon(b) composite = ee.Image('L7_TOA_1YEAR/%s' % year).select('30', '20', '10') visparams = {'bands': ['30', '20', '10'], 'gain': [2, 2, 1.7]} visual_image = composite.visualize(**visparams) params = {'scale':30, 'crs':'EPSG:4326', 'region':str(b[:-1])} ee_url = visual_image.getThumbUrl(params) req = utils.download(ee_url) s = re.search('thumbid=(.*)&token=', ee_url) thumbid = s.group(1) filename = "%s.png" % thumbid Image.open("thumb").save(filename) aws_url = utils.upload(filename) os.remove("thumb") os.remove(filename) return aws_url
def output_reducer(self, label_key, pix_datas): """ fill the data in to a raster image and return the names of the generated images """ # download a template raster job = os.environ.get('LT_JOB') rast_keys = utils.get_keys(s.IN_RASTS % job) tmplt_key = [ k.key for k in rast_keys if s.RAST_TRIGGER in k.key ][0].key tmplt_rast = utils.rast_dl(tmplt_key) # name raster so it uploads to correct location rast_key = s.OUT_RAST_KEYNAME % (job, label_key) rast_fn = utils.keyname2filename(rast_key) # write data to raster utils.data2raster(pix_datas, tmplt_rast, out_fn=rast_fn) # upload raster rast_key = utils.upload([rast_fn])[0] yield label_key, [rast_key.key]
def uploadData(year): """Format and store the NCEP precipitation data on S3 for the supplied year. """ filename = gen_name(year) base = 'ftp://ftp.cdc.noaa.gov/Datasets/ncep.reanalysis/surface_gauss/' urllib.urlretrieve(base + filename, filename) df = cdfToPandas(filename) df.to_csv('ncep.csv', index = False, header = False) utils.upload('ncep.csv', 'prate/prate-%s' % year, 'wri-rain') # remove the files for the year os.remove(filename) os.remove('ncep.csv')
def screenshot(self): im = grab(childprocess=False) if sys.platform == 'win32': local_path = os.path.join(os.getenv('TEMP'), 'sc_%s.png' % str(get_timestamp())) delete_cmd = "del \"%s\"" % local_path else: local_path = '/tmp/%s.png' % str(get_timestamp()) delete_cmd = "rm %s" % local_path im.save(local_path) upload(self, local_path) try: self.run(delete_cmd) return True except: return False
def POST(self, web): form = myform() if not form.validates(): return Output().pageReturnError("To be able to post you need to add image!") else: self._cache.setNotUpdated(0) return upload().get(web, self._db, 0)
def process(self, event): fileName = os.path.basename(event.src_path) if event.is_directory is False and os.path.exists( event.src_path) and os.path.basename( event.src_path).startswith( '.') is False and os.path.getsize(event.src_path) != 0: rand = ''.join( random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(6)) cmd = ["cp", event.src_path, "/tmp/" + fileName + '.' + rand] args = utils.args_to_string(cmd) p = subprocess.check_output(cmd, shell=False) sha256 = utils.get_sha256('/tmp/' + fileName + '.' + rand) print '[!] Sending ' + event.src_path + '.' + rand + ' to Viper\n[!] sha256: ' + sha256 utils.upload('/tmp/' + fileName + '.' + rand)
def POST(self, thread, web): form = myform() if web.input().Content == "" and web.input(File={}).File.filename == "": return Output().pageReturnErrorSubFolder("To be able to post you need to add text or image!") else: self._cache.setNotUpdated(0) self._cache.setNotUpdated(thread) return upload().get(web, self._db, thread)
def main(): result = scrape( 'https://scholar.google.com/scholar?hl=en&as_sdt=0,6&q=%22responsible+ai%22&scisbd=1') sleep(randint(2,10)) result += scrape( 'https://scholar.google.com/scholar?hl=en&as_sdt=0,6&q=harmful+ai&scisbd=1' ) sleep(randint(2, 10)) result += scrape( 'https://scholar.google.com/scholar?hl=en&as_sdt=0,6&q=ethics+ai&scisbd=1' ) sleep(randint(2, 10)) result += scrape( 'https://scholar.google.com/scholar?hl=en&as_sdt=0,6&q=%22ai+ethics%22&scisbd=1' ) convert(result) upload()
def get(self): action = self.get_argument('action') if action == 'switch': ret = utils.switch_pages() elif action == 'bookname': name = self.get_argument('name') ret = utils.set_bookname(name) elif action == 'upload': remote = self.get_argument('remote') ret = utils.upload(remote) self.write(json.dumps(ret))
def predict(): # upload all images in request to folder on server filepaths = upload(request, UPLOAD_FOLDER) # parse and resize images then get prediction parsed = readresize(filepaths) result = getprediction(parsed) # cleanup and return result cleanup(filepaths) return jsonify({'prediction': result.tolist()})
def updateIndex(new_items): # Accepts a list of dictionaries with new alerts. Appends the new # alerts to `dict.json`, stored on S3, which contains information # on all alerts that have the landsat images already json_file = 'dict.json' base = "http://landsatpostage.s3.amazonaws.com" a = requests.get("%s/validation/%s" % (base, json_file)) old_items = json.loads(a.content) if old_items != None: d = old_items + new_items else: d = new_items # dump the new and old entries into a local json file with open(json_file, 'w') as f: json.dump(d, f) # upload the json file and delete the local copy utils.upload(json_file, 'validation/%s' % json_file) os.remove(json_file) return d
async def resetcoords(self, ctx: commands.Context, node: str, server: str, player: str, x: int, y: int, z: int, dim: int): """Reset a player's coords""" await ctx.send("Resetting player " + player + " to coords: " + str(x) + ", " + str(y) + ", " + str(z) + " " + "in dimension " + str(dim) + "...") if node == "london": serverid = utils.londonids[server] elif node == "canada": serverid = utils.canadaids[server] elif node == "germany": serverid = utils.germanyids[server] uuid = utils.getUUID(player) if uuid: url = serverid + "/world/playerdata/" + uuid + ".dat" print("Downloading " + url + "...") utils.download(url, uuid + ".dat", node) dir_path = cwd = os.getcwd() nbtfile = nbtlib.load(dir_path + "/" + uuid + ".dat") print("Resetting " + player + "\'s coordinates to " + str(x) + "," + str(y) + "," + str(z) + "...") await ctx.send("Original coords: " + str(nbtfile.root["Pos"][0]) + ", " + str(nbtfile.root["Pos"][1]) + ", " + str(nbtfile.root["Pos"][2]) + " in dim " + str(nbtfile.root["Dimension"])) nbtfile.root["Pos"][0] = x nbtfile.root["Pos"][1] = y nbtfile.root["Pos"][2] = z nbtfile.root["Dimension"] = Int(dim) nbtfile.save() await ctx.send("New coords: " + str(nbtfile.root["Pos"][0]) + ", " + str(nbtfile.root["Pos"][1]) + ", " + str(nbtfile.root["Pos"][2]) + " in dim " + str(nbtfile.root["Dimension"])) print("Uploading to server...") utils.upload(dir_path + "/" + uuid + ".dat", serverid + "/world/playerdata/" + uuid + ".dat", node) print("Uploaded!") os.unlink(dir_path + "/" + uuid + ".dat") await ctx.send("Done!")
def main(): # obtain first two results result = scrape( 'https://search.techcrunch.com/search;_ylt=Awr9ImItSrtfWq0AA7ynBWVH;_ylc=X1MDMTE5NzgwMjkxOQRfcgMyBGZyA3RlY2hjcnVuY2gEZ3ByaWQDV1JYVG5TV3JRWHVWXy5tSkNvNzNVQQRuX3JzbHQDMARuX3N1Z2cDMQRvcmlnaW4Dc2VhcmNoLnRlY2hjcnVuY2guY29tBHBvcwMwBHBxc3RyAwRwcXN0cmwDMARxc3RybAM5BHF1ZXJ5A2V0aGljcyUyMGFpBHRfc3RtcAMxNjA2MTA5NzU5?p=ethics+ai&fr2=sb-top&fr=techcrunch' ) sleep(randint(2, 10)) result += scrape( 'https://search.techcrunch.com/search;_ylt=Awr9JnE_SrtfNYYAUfynBWVH;_ylu=Y29sbwNncTEEcG9zAzEEdnRpZAMEc2VjA3BhZ2luYXRpb24-?p=ethics+ai&fr=techcrunch&fr2=sb-top&b=11&pz=10&bct=0&xargs=0' ) # scrape others current = 21 base = 'https://search.techcrunch.com/search;_ylt=Awr9CKpUTrtfRGMAlx2nBWVH;_ylu=Y29sbwNncTEEcG9zAzEEdnRpZAMEc2VjA3BhZ2luYXRpb24-?p=ethics+ai&pz=10&fr=techcrunch&fr2=sb-top&bct=0&b=' end = '&pz=10&bct=0&xargs=0' while current <= 121: sleep(randint(2, 10)) result += scrape(base + str(current) + end) current += 10 convert(result) upload()
def makeData(): up = None litpic = request.files.get('litpic') or get_argument('litpic') if not isinstance(litpic, str): up = upload(litpic) if up: litpic = up.file_name type_id = get_argument('type_id', type=int, required=True) data = dict(type_id=type_id, uid=get_argument('uid', type=int, required=True), title=get_argument('title', required=True), flag=get_argument('flag'), litpic=litpic, content=get_argument('content'), jumplink='/detail/{}'.format(type_id), keywords=get_argument('keywords'), description=get_argument('description', required=True)) return data, up
def putUser(uuid): user_data = dict(nickname=get_argument('nickname'), email=get_argument('email'), mobile=get_argument('mobile'), status=get_argument('status')) up = None avatar = request.files.get('avatar') or get_argument('avatar') if not isinstance(avatar, str): up = upload(avatar) if up: avatar = up.file_name user_info_data = dict(avatar=avatar, sex=get_argument('sex'), qq=get_argument('qq'), birthday=get_argument('birthday'), info=get_argument('info')) ret = user_model.edit(user_data, user_info_data, uuid, up) return respData(**ret)
def grabImage(lon, lat, year, w = 8000): # Note that we cannot do pan-sharpening on pre-composited images, # since they don't have Band 8, which Landsat ETM+ does have. b = utils.createBox(lon, lat, w = w) poly = utils.formatPolygon(b) composite = ee.Image('L7_TOA_1YEAR/%s' % year).select('30', '20', '10') visparams = {'bands': ['30', '20', '10'], 'gain': [1.4, 1.4, 1.1]} visual_image = composite.visualize(**visparams) params = {'scale':30, 'crs':'EPSG:4326', 'region':str(b[:-1])} url = visual_image.getDownloadUrl(params) req = requests.get(url) # Convert the downloaded tif image to a numpy array z = zipfile.ZipFile(StringIO.StringIO(req.content)) def _toArray(color): # Grab the image with the associated color (red, green, or # blue) and return a numpy array a = filter(lambda x: x.endswith('%s.tif' % color), z.namelist()) p = z.extract(a[0]) im = Image.open(p) os.remove(p) return np.array(im) tifs = filter(lambda x: x.endswith('.tif'), z.namelist()) png_name = '%s.png' % tifs[0].split(".")[0] r, g, b = map(_toArray, ['red', 'green', 'blue']) # convert three separate image arrays into a square image where # each element is a triplet triplets = np.array([r, g, b]).swapaxes(0,2) data = np.transpose(triplets, axes = (1,0,2)) # correct for axis swap img = Image.fromarray(data, 'RGB') sharpenImage(img).save(png_name) url = utils.upload(png_name) os.remove(png_name) return url
def output_reducer(self, label_key, pix_datas): """ fill the data in to a raster image and return the names of the generated images """ # download a template raster job = os.environ.get('LT_JOB') rast_keys = utils.get_keys(s.IN_RASTS % job) tmplt_key = [k.key for k in rast_keys if s.RAST_TRIGGER in k.key][0].key tmplt_rast = utils.rast_dl(tmplt_key) # name raster so it uploads to correct location rast_key = s.OUT_RAST_KEYNAME % (job, label_key) rast_fn = utils.keyname2filename(rast_key) # write data to raster utils.data2raster(pix_datas, tmplt_rast, out_fn=rast_fn) # upload raster rast_key = utils.upload([rast_fn])[0] yield label_key, [rast_key.key]
def makeData(): dirs = get_argument('dirs', required=True) up = None litpic = request.files.get('litpic') or get_argument('litpic') if not isinstance(litpic, str): up = upload(litpic) if up: litpic = up.file_name post_data = dict(id=get_argument('id', type=int), pid=get_argument('pid', type=int), model_id=get_argument('model_id', type=int), typename=get_argument('typename', required=True), jumplink='/category/' + dirs, dirs=dirs, litpic=litpic, desctiption=get_argument('description'), sorts=get_argument('sorts'), keywords=get_argument('keywords'), status=get_argument('status'), icon=get_argument('icon') or '<i class="fa fa-circle-o"></i>') return post_data, up
import requests from bs4 import BeautifulSoup import config import json import utils with open('queries.json', 'r') as f: queries = json.loads(f.read()) for query in queries: results = utils.search(query) if not results: continue utils.upload(results)
parser.add_argument("zCoord", nargs=1, type=float) parser.add_argument("dim", nargs=1, type=int) args = parser.parse_args() if args.node[0] == "london": serverid = utils.londonids[args.server[0]] elif args.node[0] == "canada": serverid = utils.canadaids[args.server[0]] elif args.node[0] == "germany": serverid = utils.germanyids[args.server[0]] uuid = utils.getUUID(args.player[0]) if uuid: url = serverid + "/world/playerdata/" + uuid + ".dat" print("Downloading " + url + "...") utils.download(url, uuid + ".dat", args.node[0]) dir_path = cwd = os.getcwd() nbtfile = nbtlib.load(dir_path + "/" + uuid + ".dat") print("Resetting " + args.player[0] + "\'s coordinates to " + str(args.xCoord[0]) + "," + str(args.yCoord[0]) + "," + str(args.zCoord[0]) + "...") nbtfile.root["Pos"][0] = args.xCoord[0] nbtfile.root["Pos"][1] = args.yCoord[0] nbtfile.root["Pos"][2] = args.zCoord[0] nbtfile.root["Dimension"] = Int(args.dim[0]) nbtfile.save() print("Uploading to server...") utils.upload(dir_path + "/" + uuid + ".dat", serverid + "/world/playerdata/" + uuid + ".dat", args.node[0]) print("Uploaded!") os.unlink(dir_path + "/" + uuid + ".dat")
def run(self): while not self.request_stop.is_set(): # wait for input or exit AnyEvent(self.processing, self.request_stop).wait() if self.request_stop.is_set(): logging.debug('quitting before starting') return # rely on processing event to signal that we are good to go, WON'T SCALE to multiple worker threads photos = self.input_q.get_nowait() logging.debug('got photos') frames = [] for frame in self.gifmaker.make_frames(photos): frames.append(frame) # time.sleep(2) # test delay logging.debug('got frame') self.frame_q.put(frame) if self.request_stop.is_set(): logging.debug('stopping thread before making gif') return gif = self.gifmaker.make_gif(frames) # time.sleep(5) # test delay self.gif_q.put(gif) # wait for upload trigger OR quit signal AnyEvent(self.upload, self.request_stop).wait() if self.request_stop.is_set(): logging.debug('stopping thread before uploading') return logging.debug('uploading') path_name = config.upload_path_fmt.format( time.strftime(config.upload_ts_fmt)) path_link = config.upload_path_lnk.format( time.strftime(config.upload_ts_fmt)) path_qr = config.upload_path_qr.format( time.strftime(config.upload_ts_fmt)) path_url = config.upload_path_url.format( time.strftime(config.upload_ts_fmt)) # path_qr_url = 'http://indika.net/booth/output_' + (upload_ts_fmt) + '.html' # path_qr_url = config.upload_qr_url.format(time.strftime(config.upload_ts_fmt)) templateLoader = jinja2.FileSystemLoader( searchpath="/home/pi/booth6/") templateEnv = jinja2.Environment(loader=templateLoader) TEMPLATE_FILE = "template.j2" template = templateEnv.get_template(TEMPLATE_FILE) template_vars = {"path_link": path_link} template.render() outputText = template.render(template_vars) # template = templateEnv.get_template( TEMPLATE_FILE ).render(booth_url = path_url) # env = Environment(loader=FileSystemLoader(current_directory)) # # # Find all files with the j2 extension in the current directory # templates = glob.glob('*.j2') # render_template = config.social_template # render_template = config.render_template # return env.get_template(render_template).render(path_link, path_url) # for f in templates: # rendered_string = render_template(f) # print(rendered_string) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.load_host_keys('/home/pi/.ssh/known_hosts') ssh.connect(upload_server, username=upload_user) sftp = ssh.open_sftp() # Html_file = sftp.open('output_' + (config.upload_ts_fmt) + '.html',"w") Html_file = sftp.open(path_url, "w") Html_file.write(outputText) Html_file.close() # sftp.putfo(stream, path) # assumes the stream is at the start or wherever the user intended # logging.debug('Uploaded {}'.format(path)) sftp.close() ssh.close() qr_stream = upload(gif, config.upload_server, config.upload_user, path_name, path_qr) self.upload_q.put( qr_stream ) # you will need to add another stream to the thread class... or we could re-use the gif stream # time.sleep(5) # test delay logging.debug('upload done') self.upload.clear() self.processing.clear() self.done.set() logging.debug('stopping thread')
def run(dockerid, monitor=False, recurse=False): print """\n _ _ _ __ ___ ___| |__ _ __ __ _(_) | '_ ` _ \ / _ \ '_ \| '__/ _` | | | | | | | | __/ | | | | | (_| | | |_| |_| |_|\___|_| |_|_| \__,_|_| \n""" connector = netlinks.NetlinkConnector() observer = Observer() # this line may need to be changed, dending on yr docker install observer.schedule(FileHandler(), path='/var/lib/docker/devicemapper/mnt/' + dockerid + '/rootfs', recursive=recurse) observer.start() telnet = utils.getTelnetPid try: while True: events = connector.recv() for event in events: print event if event['event'] == 'EXEC': print 'EXEC (%d):' % (event['process_pid']) print ' - process exe: %s' % (netlinks.pid_to_exe( event['process_pid'])) print ' - process cmdline: %s' % (netlinks.pid_to_cmdline( event['process_pid'])) if 'kill' and 'telnetd' in netlinks.pid_to_cmdline( event['process_pid']): print ' [!] respawning telnetd' cmd = [ 'docker', 'exec', dockerid, 'telnetd', '-b', '0.0.0.0:23' ] args = utils.args_to_string(cmd) telnet = subprocess.check_output(args, shell=False) elif event['event'] == 'FORK': print 'FORK (parent: %d, child: %d):' % ( event['parent_pid'], event['child_pid']) print ' - parent exe: %s' % (netlinks.pid_to_exe( event['parent_pid'])) print ' - parent cmdline: %s' % (netlinks.pid_to_cmdline( event['parent_pid'])) print ' \_ child exe: %s' % (netlinks.pid_to_exe( event['child_pid'])) print ' \_ child cmdline: %s' % (netlinks.pid_to_cmdline( event['child_pid'])) if 'deleted' in netlinks.pid_to_exe( event['child_pid']) and monitor is False: childpid = str(event['child_pid']) print ' [!] killing %s' % childpid rand = ''.join(random.SystemRandom().choice( string.ascii_uppercase + string.digits) for _ in range(6)) args = [ 'cp', '/proc/' + childpid + '/exe', '/tmp/exe.' + rand ] cp = subprocess.check_output(args, shell=False) utils.upload('/tmp/exe.' + rand) time.sleep(1) cmd = ['kill', '-9', childpid] args = utils.args_to_string(cmd) proc = subprocess.check_output(cmd, shell=False) cmd = [ 'docker', 'exec', dockerid, 'telnetd', '-b', '0.0.0.0:23' ] args = utils.args_to_string(cmd) telnet = subprocess.check_output(cmd, shell=False) elif event['event'] == 'EXIT': print 'EXIT (%d):' % (event['process_pid']) print ' - process tgid: %s' % (event['process_tgid']) print ' - process exit code %s' % (event['exit_code']) print ' - process signal %s' % (event['exit_signal']) if event['process_pid'] == telnet: print ' [!] respawning telnetd' cmd = [ 'docker', 'exec', dockerid, 'telnetd', '-b', '0.0.0.0:23' ] args = utils.args_to_strings(cmd) proc = subprocess.check_output(cmd, shell=False) telnet = utils.getTelnetPid print '' except KeyboardInterrupt: observer.stop() observer.join()