def externals_property(self): if self._externals_property == None: propget_cmd = Shell(verbose=False, fatal=True).run( 'svn pg --xml svn:externals "{0}"'.format(self.url)) contents_xml = propget_cmd.output_xml() self._externals_property = contents_xml.findtext('target/property') return self._externals_property
def StorePackageHashes(self, projectPathSrc, username, projectname, branchname): shell = Shell(Logger()) con = Database(self.config) for dir in os.listdir(projectPathSrc): if os.path.isdir(projectPathSrc + "/" + dir): packagename = os.path.basename(dir) # update hash of each package cmd = "find " + projectPathSrc + "/" + dir + " -type f -print0 | sort -z | xargs -0 sha1sum | sha1sum | awk '{print $1}'" hash = shell.evaluateshell(cmd) # print(packagename + " " + hash) cursor = con.execute("SELECT * FROM package WHERE username = ? AND projectname = ? AND packagename = ? AND branchname = ?", (username, projectname, packagename, branchname)) row = cursor.fetchone() alreadyuptodate = False idToUpdate = None if row is not None: if row['sourcehash'] == hash: alreadyuptodate = True else: idToUpdate = row['id'] if not alreadyuptodate: if idToUpdate is None: stmt = "INSERT INTO package(username, projectname, packagename, branchname, sourcehash) VALUES(?,?,?,?,?)" cursor = con.execute(stmt, (username, projectname, packagename, branchname, hash)) else: stmt = "UPDATE package SET sourcehash = ? WHERE id = ?" cursor = con.execute(stmt, (hash, idToUpdate)) self.MarkPackageAsDirty(con, idToUpdate) con.commit() con.close()
def shoot(self, event): if (self.id == 1): if (event.key == pygame.K_SPACE): return Shell(self.pointA[0], self.pointA[1], self.theta) elif (self.id == 2): if (event.key == pygame.K_KP_ENTER): return Shell(self.pointA[0], self.pointA[1], self.theta)
def StorePackageHashes(self, projectPathSrc, username, projectname, branchname): shell = Shell(Logger()) con = Database(self.config) for dir in os.listdir(projectPathSrc): if os.path.isdir(projectPathSrc + "/" + dir): packagename = os.path.basename(dir) # update hash of each package cmd = "find " + projectPathSrc + "/" + dir + " -type f -print0 | sort -z | xargs -0 sha1sum | sha1sum | awk '{print $1}'" hash = shell.evaluateshell(cmd) # print(packagename + " " + hash) cursor = con.execute( "SELECT * FROM package WHERE username = ? AND projectname = ? AND packagename = ? AND branchname = ?", (username, projectname, packagename, branchname)) row = cursor.fetchone() alreadyuptodate = False idToUpdate = None if row is not None: if row['sourcehash'] == hash: alreadyuptodate = True else: idToUpdate = row['id'] if not alreadyuptodate: if idToUpdate is None: stmt = "INSERT INTO package(username, projectname, packagename, branchname, sourcehash) VALUES(?,?,?,?,?)" cursor = con.execute(stmt, (username, projectname, packagename, branchname, hash)) else: stmt = "UPDATE package SET sourcehash = ? WHERE id = ?" cursor = con.execute(stmt, (hash, idToUpdate)) self.MarkPackageAsDirty(con, idToUpdate) con.commit() con.close()
def main(): try: shell = Shell(sys.stdin, sys.stdout) except ShellError as e: print(e.error) return 1 shell.run()
def collecting_traffic(name): print(str(name) + ' Thread starts') shell = Shell() shell.execute( "echo \"1234\" | sudo -S tcpdump -i any -v -G 20 not arp and not src 127.0.0.1 and not dst 127.0.0.1 -w data-%S.pcap" )
def create_compile_shell(code_name, exe_name, compile_type): Log.compile_log('Compile: create compile file') shell = Shell(name= 'compile_' + code_name + '_' + exe_name + '_' + compile_type + '.sh', path=DATA.HOST_SHELL_PATH) # 生成编译的sh文件 compile_name = '' if compile_type.upper() == 'C': compile_name = 'gcc ' + DATA.DOCKER_CODES_PATH + '/' + code_name + '.c -o ' \ + DATA.DOCKER_EXES_PATH + '/' + exe_name elif compile_type.upper() == 'CPP': compile_name = 'g++ ' + DATA.DOCKER_CODES_PATH + '/' + code_name + '.cpp -o ' \ + DATA.DOCKER_EXES_PATH + '/' + exe_name elif compile_type.upper() == 'JAVA': compile_name = 'javac ' + DATA.DOCKER_CODES_PATH + '/' + code_name + '.java\n' compile_name += 'mv ' + DATA.DOCKER_CODES_PATH + '/' + exe_name + '.class ' + DATA.DOCKER_EXES_PATH elif compile_type.upper() == 'PY': compile_name = 'cp ' + DATA.DOCKER_CODES_PATH + '/' + code_name + '.py ' + DATA.DOCKER_EXES_PATH Log.compile_log('Compile: Get Compile Name') try: Log.compile_log('Compile: Create file and write data') compile_file = open(shell.get_path() + '/' + shell.get_name(), 'w') compile_file.write("#! /bin/bash\n") compile_file.write(compile_name + '\n') compile_file.write("if [ \"$?\" = \"0\" ]\n") compile_file.write("then echo 'success'\nelse\necho 'fail'\nfi") compile_file.close() Log.compile_log('Compile: write data over') os.system("chmod 777 " + shell.get_path() + '/' + shell.get_name()) except Exception,e: compile_file.close() print e.message error_log = compile_file(DATA.HOST_ERROR_LOG_PATH + '/create_compile_shell_' + str(time.time()) + str(random.randint(1000,9999)) + '.log','w') error_log.write(e.message) error_log.close() return None
def createfolder(): id = bottle.request.forms.shellid foldername = bottle.request.forms.foldername path = bottle.request.forms.path shellinfo = get_shell_from_id(id) shell = Shell(shellinfo['url'], shellinfo['pwd'], shellinfo['plugin'], shellinfo['method'], shellinfo['coding']) return shell.mkdir(foldername, path)
def command(): id = bottle.request.forms.shellid cmd = bottle.request.forms.command path = bottle.request.forms.path shellinfo = get_shell_from_id(id) shell = Shell(shellinfo['url'], shellinfo['pwd'], shellinfo['plugin'], shellinfo['method'], shellinfo['coding']) return shell.execute_command(cmd, path)
def collecting_traffic(name): print(str(name)+ ' Thread starts') shell = Shell() net_data = NetworkData() ip = net_data.get_host_ip() net = ipaddress.ip_network(ip, strict=False) shell.execute("echo \"1996\" | sudo -S tcpdump -i any -v -G 20 net " + str(net) + " -w data-%S.pcap")
def delfile(): id = bottle.request.forms.shellid filename = bottle.request.forms.filename path = bottle.request.forms.path shellinfo = get_shell_from_id(id) shell = Shell(shellinfo['url'], shellinfo['pwd'], shellinfo['plugin'], shellinfo['method'], shellinfo['coding']) return shell.del_file(filename, path)
def fire(self): if self.ready_to_fire: self.ready_to_fire = False self.count = 0 s = Shell((sf.Texture.from_file("res/Shell.png"))) s.setup(self, self.get_forward_point()) self.shells.append(s) self.shells_fired += 1
def start(argv=None, **kwds): """general-purpose entry point for applications""" cls = kwds.get('applicationClass') kwds = dict(**kwds) kwds['argv'] = argv app = cls() shell = Shell(app) shell.run(**kwds) return 0
def collecting_traffic(name): print(str(name) + ' Thread starts') shell = Shell() shell.execute("echo \"abcd\" | sudo -S rm *.csv") shell.execute("echo \"abcd\" | sudo -S rm *.pcap") shell.execute("echo \"abcd\" | sudo -S stop my") shell.execute( "echo \"abcd\" | sudo -S tcpdump -i any -v -G 20 not arp and not src 10.3.0.24 and not src 127.0.0.1 and not " "dst 10.3.0.24 and not dst 127.0.0.1 -w data-%S.pcap")
def test(self, _, input_string, output_string): input_stream = io.StringIO(input_string) output_stream = io.StringIO() # noinspection PyTypeChecker shell = Shell(input_stream, output_stream) shell.run() self.assertEqual(output_string, output_stream.getvalue())
def getfilelist(): shellid = bottle.request.forms.shellid path = bottle.request.forms.path shellinfo = get_shell_from_id(shellid) shell = Shell(shellinfo['url'], shellinfo['pwd'], shellinfo['plugin'], shellinfo['method'], shellinfo['coding']) info = { 'sysinfo': shell.get_sys_info(), 'filelist': shell.get_dir(path), } return info
def __sort(algs, data): if 'insertion' == algs: Insertion.sort(data) if 'insertion_advance' == algs: Insertion.sort_advance(data) if 'selection' == algs: Selection.sort(data) if 'shell' == algs: Shell.sort(data) if 'merge' == algs: Merge.sort(data)
def uploadfile(): id = bottle.request.forms.shellid path = bottle.request.forms.path formfile = bottle.request.files.file formfile.save('./upload', overwrite=True) shellinfo = get_shell_from_id(id) shell = Shell(shellinfo['url'], shellinfo['pwd'], shellinfo['plugin'], shellinfo['method'], shellinfo['coding']) info = shell.upload_file('./upload/%s' % formfile.filename, path) os.remove('./upload/%s' % formfile.filename) return info
class Handler(FileSystemEventHandler): def __init__(self, queue): self.queue = queue self.shell = Shell() def on_any_event(self, event): if event.is_directory: return None elif event.event_type == 'created': # Event is created, you can process it now print("Watchdog received created event - % s." % event.src_path) if ('data' in str(event.src_path)): self.queue.put(str(event.src_path)[2:]) print(self.queue.qsize()) if (self.queue.qsize() > 1): file = self.queue.get() csv_file_name = socket.gethostname( ) + 'traffic' + file[5:-5] + '.csv' self.shell.execute("echo \"abcd\" | sudo -S tshark -r " + file + "-T fields -E separator=, -E " "quote=d -e _ws.col.No. -e " "_ws.col.Time -e _ws.col.Source -e " "tcp.srcport -e _ws.col.Destination " "-e tcp.dstport -e _ws.col.Protocol " "-e _ws.col.Length -e _ws.col.Info " "> " + csv_file_name) try: var_password = "******" var_command = "scp -o StrictHostKeychecking=no " + csv_file_name + " [email protected]:/root/GateWay/Profiles" var_child = pexpect.spawn(var_command) i = var_child.expect(["password:"******"Got the key or connection timeout") pass except Exception as e: print("Oops Something went wrong buddy") print(e)
class Handler(FileSystemEventHandler): def __init__(self, queue, dfQueue): self.df_queue = dfQueue self.queue = queue self.shell = Shell() def on_any_event(self, event): if event.is_directory: return None elif event.event_type == 'created': # Event is created, you can process it now print("Watchdog received created event - % s." % event.src_path) if('data' in str(event.src_path)): self.queue.put(str(event.src_path)[2:]) print(self.queue.qsize()) if(self.queue.qsize() > 1): file = self.queue.get() csv_file_name = 'traffic'+file[5:-5]+'.csv' self.shell.execute("echo \"1996\" | sudo -S tshark -r " + file + " -T fields -E separator=, -E quote=d -e _ws.col.No. -e _ws.col.Time -e _ws.col.Source -e _ws.col.SourcePort -e _ws.col.Destination -e _ws.col.DestinationPort -e _ws.col.Protocol -e _ws.col.Length -e _ws.col.Info > " + csv_file_name) packets = [] csv_file = open(csv_file_name, mode='r') csv_reader = csv.DictReader(csv_file, fieldnames=['no','time', 'src_ip', 'src_port', 'dst_ip', 'dst_port', 'protocol', 'length', 'info']) for row in csv_reader: packet = { 'time' : row['time'], 'src_ip' : {True : row['src_ip'], False: '0.0.0.0' } [row['src_ip'] != ''], 'src_port' : {True : row['src_port'], False: '0' } [row['src_port'] != ''], 'dst_ip' : {True : row['dst_ip'], False: '0.0.0.0' } [row['dst_ip'] != ''], 'dst_port' : {True : row['dst_port'], False: '0' } [row['dst_port'] != ''], 'protocol' : row['protocol'], 'length' : int(row['length']), 'info' : row['info'], 'dir' : 'NA' } packets.append(packet) csv_file.close() os.remove(csv_file_name) df = pd.DataFrame(packets) self.df_queue.put(df)
def subdirectories(self): if self._subdirectories == None: self._subdirectories = [] contents_xml = Shell(verbose = False).run('svn ls --xml "{0}"'.format(self.url)).output_xml() for element in contents_xml.find('list').findall('entry'): if not element.get('kind') == 'dir': continue name = element.findtext('name') subdirectory_url = os.path.join(self.url, name) subdirectory = SVNDirectory(subdirectory_url, parent = self) if subdirectory.should_ignore(): continue self._subdirectories.append(subdirectory) return self._subdirectories
def __init__(self, containername, configBuildMachine, logger, packageSrcPath, containertype): self.hostname = containername self.containertype = containertype self.staticMachine = (True if ('static' in configBuildMachine and configBuildMachine['static'] == "t") else False) self.port = "22" if configBuildMachine['port'] is not None: self.port = str(configBuildMachine['port']) self.cid = 10 if configBuildMachine['cid'] is not None: self.cid = configBuildMachine['cid'] self.containername = str(self.cid).zfill(3) + "-" + containername if containertype == "lxd": self.containername = "l" + str( self.cid).zfill(3) + "-" + containername.replace(".", "-") self.containerIP = socket.gethostbyname(self.hostname) self.containerPort = str(2000 + int(self.cid)) if configBuildMachine['local'] is not None and configBuildMachine[ 'local'] == "t": # the host server for the build container is actually hosting the LBS application as well # or the container is running on localhost if containertype == "lxc": self.containerIP = self.calculateLocalContainerIP(self.cid) self.containerPort = "22" if containertype == "lxd": self.containerIP = self.calculateLocalContainerIP(self.cid) self.containerPort = "22" if containertype == "docker": self.containerIP = self.calculateLocalContainerIP(1) self.containerPort = str(2000 + int(self.cid)) self.config = Config.LoadConfig() self.SSHContainerPath = self.config['lbs']['SSHContainerPath'] self.logger = logger self.shell = Shell(logger) # we are reusing the slots, for caches etc self.slot = containername self.distro = "" self.release = "" self.arch = "" self.staticIP = "" self.packageSrcPath = packageSrcPath
def subdirectories(self): if self._subdirectories == None: self._subdirectories = [] contents_xml = Shell(verbose=False).run( 'svn ls --xml "{0}"'.format(self.url)).output_xml() for element in contents_xml.find('list').findall('entry'): if not element.get('kind') == 'dir': continue name = element.findtext('name') subdirectory_url = os.path.join(self.url, name) subdirectory = SVNDirectory(subdirectory_url, parent=self) if subdirectory.should_ignore(): continue self._subdirectories.append(subdirectory) return self._subdirectories
class Linux(object): def __init__(self, modules, rpyc=None, modules_user=None, rpyc_user=None): self._modules = modules self._modules_user = modules_user self._rpyc = rpyc self._rpyc_user = rpyc_user self._shell = Shell(self._modules, self._rpyc, self._modules_user) if self._rpyc is not None: self._ip = self.cmd("netstat -na | grep ':18812.*ESTABLISHED' | head -1 | tr ':' ' ' | awk {'print $4'}", shell=True).stdout.read().strip() else: self._ip = "127.0.0.1" def __del__(self): if self._rpyc is not None: self._rpyc.close() def start(self): self._shell.wait_process_by_short_name("Xorg") time.sleep(10) self._ui = UI(self._shell) self._ui.start() def stop(self): self._ui.stop() self.cmd('pkill rpyc', infrastructure=True) @property def modules(self): return self._modules @property def shell(self): return self._shell @property def ui(self): return self._ui @property def ip(self): return self._ip def cmd(self, cmdline, *args, **kw): return self.shell.cmd(cmdline, *args, **kw)
def create_run_shell(exe_name, interpreter, question, param=[]): Log.compile_log('RUN: create run file') scale = 1 suffix = '' interpreter_value = '' exe_sentence = 'time (' if len(param) > 0: exe_sentence += 'echo ' for item in param: exe_sentence += (str(item) + ' ') exe_sentence += ' | ' if interpreter.upper() == 'C' or interpreter.upper() == 'CPP': interpreter_value = '' elif interpreter.upper() == 'PY': interpreter_value = 'python' scale = 3 suffix = '.py' elif interpreter.upper() == 'JAVA': interpreter_value = 'java' scale = 2 suffix = '.java' Log.compile_log('RUN: Get interpreter value') exe_sentence += (interpreter_value + ' .' + DATA.DOCKER_EXES_PATH + '/' + exe_name + suffix + ' )') Log.compile_log('RUN: The Exesentence is : ' + exe_sentence) # 生成编译的sh文件 try: Log.compile_log('RUN: create file and write data') shell = Shell(name='run_' + exe_name + '_' + interpreter + '.sh', path=DATA.HOST_SHELL_PATH) Log.compile_log('RUN: ' + shell.get_path() + '/' + shell.get_name()) run_file = open(shell.get_path() + '/' + shell.get_name(), "w") run_file.write('#! /bin/bash\n') run_file.write('ulimit -s -t ' + str(question.get_time() * scale) + '\n') run_file.write(exe_sentence + '\n') run_file.close() Log.compile_log('RUN: write data over') Popen("chmod 777 " + shell.get_path() + '/' + shell.get_name(), shell=True, stdin=PIPE, stdout=PIPE, close_fds=True) except Exception, e: run_file.close() Log.compile_log('RUN: The error is : ' + e.message) error_log = run_file( DATA.HOST_ERROR_LOG_PATH + '/create_run_shell_' + str(time.time()) + str(random.randint(1000, 9999)) + '.log', 'w') error_log.write(e.message) error_log.close() return None
def __init__(self, modules, rpyc=None, modules_user=None, rpyc_user=None): self._modules = modules self._modules_user = modules_user self._rpyc = rpyc self._rpyc_user = rpyc_user self._shell = Shell(self._modules, self._rpyc, self._modules_user) if self._rpyc is not None: self._ip = self.cmd("netstat -na | grep ':18812.*ESTABLISHED' | head -1 | tr ':' ' ' | awk {'print $4'}", shell=True).stdout.read().strip() else: self._ip = "127.0.0.1"
def buildpackageOnCopr(self, username, projectname, packagename, branchname, packageSrcPath, lxcdistro, lxcrelease, lxcarch): # connect to copr coprtoken_filename = self.config['lbs']['SSHContainerPath'] + '/' + username + '/' + projectname + '/copr' if not os.path.isfile(coprtoken_filename): raise Exception("please download a token file from copr and save in " + coprtoken_filename) userconfig = self.config['lbs']['Users'][username] copr_projectname = projectname if 'CoprProjectName' in userconfig['Projects'][projectname]: copr_projectname = userconfig['Projects'][projectname]['CoprProjectName'] if not self.container.connectToCopr(coprtoken_filename, copr_projectname): raise Exception("problem connecting to copr, does the project " + copr_projectname + " already exist?") # calculate the release number release = self.container.getLatestReleaseFromCopr(packagename) if release is not None: if release.find('.') > -1: releasenumber = int(release[:release.find('.')]) afterreleasenumber = release[release.find('.'):] release = str(releasenumber+1)+afterreleasenumber else: release = str(int(release)+1) # build the src rpm locally, and move to public directory # simplification: tarball must be in the git repository # simplification: lbs must run on Fedora self.shell = Shell(self.logger) rpmbuildpath = "/run/uwsgi/rpmbuild_" + username + "_" + projectname + "_" + packagename self.shell.executeshell("mkdir -p " + rpmbuildpath + "/SOURCES; mkdir -p " + rpmbuildpath + "/SPECS") self.shell.executeshell("cp -R " + packageSrcPath + "/* " + rpmbuildpath + "/SOURCES; mv " + rpmbuildpath + "/SOURCES/*.spec " + rpmbuildpath + "/SPECS") if release is not None: self.shell.executeshell("sed -i 's/^Release:.*/Release: " + release + "/g' " + rpmbuildpath + "/SPECS/*.spec") if not self.shell.executeshell("rpmbuild --define '_topdir " + rpmbuildpath + "' -bs " + rpmbuildpath + "/SPECS/" + packagename + ".spec"): raise Exception("Problem with building the source rpm file for package " + packagename) myPath = username + "/" + projectname if 'Secret' in self.config['lbs']['Users'][username]: raise Exception("You cannot use a secret path when you are working with Copr") repoPath=self.config['lbs']['ReposPath'] + "/" + myPath + "/" + lxcdistro + "/" + lxcrelease + "/src" files = os.listdir(rpmbuildpath + "/SRPMS") if files is not None and len(files) == 1: srcrpmfilename = files[0] else: raise Exception("cannot find the source rpm, no files in " + rpmbuildpath + "/SRPMS") if not os.path.isfile(rpmbuildpath + "/SRPMS/" + srcrpmfilename): raise Exception("cannot find the source rpm, " + rpmbuildpath + "/SRPMS/" + srcrpmfilename + " is not a file") if not self.shell.executeshell("mkdir -p " + repoPath + " && mv " + rpmbuildpath + "/SRPMS/" + srcrpmfilename + " " + repoPath + " && rm -Rf " + rpmbuildpath): raise Exception("Problem moving the source rpm file") # tell copr to build this srpm. raise an exception if the build failed. if not self.container.buildProject(self.config['lbs']['DownloadUrl'] + "/repos/" + myPath + "/" + lxcdistro + "/" + lxcrelease + "/src/" + srcrpmfilename): raise Exception("problem building the package on copr")
def testTerminate(self): test = Shell(cmd="calc") test.run_background() for i in range(2): time.sleep(1) test.terminate() time.sleep(1) self.assertTrue(test.is_terminate())
def testDefault(self): test = Shell(cmd="echo 123") test.run_background() for i in range(3): time.sleep(1) self.assertTrue(test.is_terminate()) output = test.get_output() self.assertTrue(output.startswith("123"))
def main(args=None): """ Entry point for both command-line and shell Note that the `args` parameter to this function allows us to do stuff from the Python interactive prompt: >>> Main.main(['-H', 'pivot02', 'getstate', 'mabramow-test1']) getstate(mabramow-test1) = off Inspiration from http://www.artima.com/weblogs/viewpost.jsp?thread=4829 """ try: options, args = optionsParser.parser.parse_args(args or sys.argv[1:]) if options.serverOp: args.insert(0, options.serverOp) host = GetHost(options) if options.interactive: Shell(host).cmdloop() else: operationName, result = CommandProcessor.Process(host, args) if result is not None: result = TaskManager().HandleIfTask(result, async=False) if not options.quiet: if isinstance(result, basestring) or \ isinstance(result, int) or isinstance(result, long): sys.stdout.write(GetPrologue(operationName, args)) if result == 'success': result = 1 if result == 'error': result = 0 print(optionsParser.GetFormatter(options).Format(result)) except CommandProcessor.InvalidOperation: sys.stderr.write('Invalid operation specified.\n\n') optionsParser.parser.print_help() sys.stderr.write('\nInvalid operation specified.\n') except CommandProcessor.InvalidParameter, e: sys.stderr.write('Invalid parameter specified.\n\n') sys.stderr.write('%s\n' % e)
def create_run_shell(exe_name, interpreter, question, param=[]): Log.compile_log('RUN: create run file') scale = 1 suffix = '' interpreter_value = '' exe_sentence = 'time (' if len(param) > 0: exe_sentence += 'echo ' for item in param: exe_sentence += (str(item) + ' ') exe_sentence += ' | ' if interpreter.upper() == 'C' or interpreter.upper() == 'CPP': interpreter_value = '' elif interpreter.upper() == 'PY': interpreter_value = 'python' scale = 3 suffix = '.py' elif interpreter.upper() == 'JAVA': interpreter_value = 'java' scale = 2 suffix = '.java' Log.compile_log('RUN: Get interpreter value') exe_sentence += (interpreter_value + ' .' + DATA.DOCKER_EXES_PATH + '/' + exe_name + suffix + ' )') Log.compile_log('RUN: The Exesentence is : '+exe_sentence) # 生成编译的sh文件 try: Log.compile_log('RUN: create file and write data') shell = Shell(name='run_'+exe_name+'_'+interpreter+'.sh', path=DATA.HOST_SHELL_PATH) Log.compile_log('RUN: ' + shell.get_path() + '/' + shell.get_name()) run_file = open(shell.get_path() + '/' + shell.get_name(), "w") run_file.write('#! /bin/bash\n') run_file.write('ulimit -s -t ' + str(question.get_time() * scale) + '\n') run_file.write(exe_sentence + '\n') run_file.close() Log.compile_log('RUN: write data over') Popen("chmod 777 " + shell.get_path() + '/' + shell.get_name(),shell=True, stdin=PIPE,stdout=PIPE, close_fds=True) except Exception, e: run_file.close() Log.compile_log('RUN: The error is : ' + e.message ) error_log = run_file(DATA.HOST_ERROR_LOG_PATH + '/create_run_shell_' + str(time.time()) + str(random.randint(1000, 9999)) + '.log', 'w') error_log.write(e.message) error_log.close() return None
def __init__(self, containername, configBuildMachine, logger, packageSrcPath, containertype): self.hostname = containername self.staticMachine = (True if ('static' in configBuildMachine and configBuildMachine['static'] == "t") else False) self.port="22" if configBuildMachine['port'] is not None: self.port=str(configBuildMachine['port']) self.cid=10 if configBuildMachine['cid'] is not None: self.cid=configBuildMachine['cid'] self.containername = str(self.cid).zfill(3) + "-" + containername self.containerIP=socket.gethostbyname(self.hostname) self.containerPort=str(2000+int(self.cid)) if configBuildMachine['local'] is not None and configBuildMachine['local'] == "t": # the host server for the build container is actually hosting the LBS application as well # or the container is running on localhost if containertype == "lxc": self.containerIP=self.calculateLocalContainerIP(self.cid) self.containerPort="22" if containertype == "docker": self.containerIP=self.calculateLocalContainerIP(1) self.containerPort=str(2000+int(self.cid)) self.config = Config.LoadConfig() self.SSHContainerPath = self.config['lbs']['SSHContainerPath'] self.logger = logger self.shell = Shell(logger) # we are reusing the slots, for caches etc self.slot = containername self.distro = "" self.release = "" self.arch = "" self.staticIP = "" self.packageSrcPath = packageSrcPath
def create_compile_shell(code_name, exe_name, compile_type): Log.compile_log('Compile: create compile file') shell = Shell(name='compile_' + code_name + '_' + exe_name + '_' + compile_type + '.sh', path=DATA.HOST_SHELL_PATH) # 生成编译的sh文件 compile_name = '' if compile_type.upper() == 'C': compile_name = 'gcc ' + DATA.DOCKER_CODES_PATH + '/' + code_name + '.c -o ' \ + DATA.DOCKER_EXES_PATH + '/' + exe_name elif compile_type.upper() == 'CPP': compile_name = 'g++ ' + DATA.DOCKER_CODES_PATH + '/' + code_name + '.cpp -o ' \ + DATA.DOCKER_EXES_PATH + '/' + exe_name elif compile_type.upper() == 'JAVA': compile_name = 'javac ' + DATA.DOCKER_CODES_PATH + '/' + code_name + '.java\n' compile_name += 'mv ' + DATA.DOCKER_CODES_PATH + '/' + exe_name + '.class ' + DATA.DOCKER_EXES_PATH elif compile_type.upper() == 'PY': compile_name = 'cp ' + DATA.DOCKER_CODES_PATH + '/' + code_name + '.py ' + DATA.DOCKER_EXES_PATH Log.compile_log('Compile: Get Compile Name') try: Log.compile_log('Compile: Create file and write data') compile_file = open(shell.get_path() + '/' + shell.get_name(), 'w') compile_file.write("#! /bin/bash\n") compile_file.write(compile_name + '\n') compile_file.write("if [ \"$?\" = \"0\" ]\n") compile_file.write("then echo 'success'\nelse\necho 'fail'\nfi") compile_file.close() Log.compile_log('Compile: write data over') os.system("chmod 777 " + shell.get_path() + '/' + shell.get_name()) except Exception, e: compile_file.close() print e.message error_log = compile_file( DATA.HOST_ERROR_LOG_PATH + '/create_compile_shell_' + str(time.time()) + str(random.randint(1000, 9999)) + '.log', 'w') error_log.write(e.message) error_log.close() return None
def allowed_traffic_generate(name): print(str(name) + ' Thread starts') shell = Shell() while True: shell.execute("echo \"abcd\" | sudo -S curl http://172.24.4.159:8080") time.sleep(1)
def __init__(self, deviceId): # SysEngine.__init__(self) # self.deviceId = deviceId AdbServer.__init__(self, deviceId) Shell.__init__(self)
class Build: 'run one specific build of one package' def __init__(self, LBS, logger): self.LBS = LBS self.logger = logger self.container = None self.finished = False self.buildmachine = None self.config = Config.LoadConfig() def createbuildmachine(self, lxcdistro, lxcrelease, lxcarch, buildmachine, packageSrcPath): # create a container on a remote machine self.buildmachine = buildmachine con = Database(self.config) stmt = "SELECT * FROM machine WHERE name = ?" cursor = con.execute(stmt, (buildmachine,)) machine = cursor.fetchone() con.close() if machine['type'] == 'lxc': self.container = LXCContainer(buildmachine, machine, self.logger, packageSrcPath) elif machine['type'] == 'docker': self.container = DockerContainer(buildmachine, machine, self.logger, packageSrcPath) elif machine['type'] == 'copr': self.container = CoprContainer(buildmachine, machine, self.logger, packageSrcPath) return self.container.createmachine(lxcdistro, lxcrelease, lxcarch, buildmachine) def buildpackageOnCopr(self, username, projectname, packagename, branchname, packageSrcPath, lxcdistro, lxcrelease, lxcarch): # connect to copr coprtoken_filename = self.config['lbs']['SSHContainerPath'] + '/' + username + '/' + projectname + '/copr' if not os.path.isfile(coprtoken_filename): raise Exception("please download a token file from copr and save in " + coprtoken_filename) userconfig = self.config['lbs']['Users'][username] copr_projectname = projectname if 'CoprProjectName' in userconfig['Projects'][projectname]: copr_projectname = userconfig['Projects'][projectname]['CoprProjectName'] if not self.container.connectToCopr(coprtoken_filename, copr_projectname): raise Exception("problem connecting to copr, does the project " + copr_projectname + " already exist?") # calculate the release number release = self.container.getLatestReleaseFromCopr(packagename) if release is not None: if release.find('.') > -1: releasenumber = int(release[:release.find('.')]) afterreleasenumber = release[release.find('.'):] release = str(releasenumber+1)+afterreleasenumber else: release = str(int(release)+1) # build the src rpm locally, and move to public directory # simplification: tarball must be in the git repository # simplification: lbs must run on Fedora self.shell = Shell(self.logger) rpmbuildpath = "/run/uwsgi/rpmbuild_" + username + "_" + projectname + "_" + packagename self.shell.executeshell("mkdir -p " + rpmbuildpath + "/SOURCES; mkdir -p " + rpmbuildpath + "/SPECS") self.shell.executeshell("cp -R " + packageSrcPath + "/* " + rpmbuildpath + "/SOURCES; mv " + rpmbuildpath + "/SOURCES/*.spec " + rpmbuildpath + "/SPECS") if release is not None: self.shell.executeshell("sed -i 's/^Release:.*/Release: " + release + "/g' " + rpmbuildpath + "/SPECS/*.spec") if not self.shell.executeshell("rpmbuild --define '_topdir " + rpmbuildpath + "' -bs " + rpmbuildpath + "/SPECS/" + packagename + ".spec"): raise Exception("Problem with building the source rpm file for package " + packagename) myPath = username + "/" + projectname if 'Secret' in self.config['lbs']['Users'][username]: raise Exception("You cannot use a secret path when you are working with Copr") repoPath=self.config['lbs']['ReposPath'] + "/" + myPath + "/" + lxcdistro + "/" + lxcrelease + "/src" files = os.listdir(rpmbuildpath + "/SRPMS") if files is not None and len(files) == 1: srcrpmfilename = files[0] else: raise Exception("cannot find the source rpm, no files in " + rpmbuildpath + "/SRPMS") if not os.path.isfile(rpmbuildpath + "/SRPMS/" + srcrpmfilename): raise Exception("cannot find the source rpm, " + rpmbuildpath + "/SRPMS/" + srcrpmfilename + " is not a file") if not self.shell.executeshell("mkdir -p " + repoPath + " && mv " + rpmbuildpath + "/SRPMS/" + srcrpmfilename + " " + repoPath + " && rm -Rf " + rpmbuildpath): raise Exception("Problem moving the source rpm file") # tell copr to build this srpm. raise an exception if the build failed. if not self.container.buildProject(self.config['lbs']['DownloadUrl'] + "/repos/" + myPath + "/" + lxcdistro + "/" + lxcrelease + "/src/" + srcrpmfilename): raise Exception("problem building the package on copr") def buildpackageOnContainer(self, username, projectname, packagename, branchname, lxcdistro, lxcrelease, pathSrc): # install a mount for the project repo myPath = username + "/" + projectname if 'Secret' in self.config['lbs']['Users'][username]: myPath = username + "/" + self.config['lbs']['Users'][username]['Secret'] + "/" + projectname mountPath=self.config['lbs']['ReposPath'] + "/" + myPath + "/" + lxcdistro + "/" + lxcrelease if not self.container.installmount(mountPath, "/mnt" + mountPath, "/root/repo"): raise Exception("Problem with installmount") mountPath=self.config['lbs']['TarballsPath'] + "/" + myPath if not self.container.installmount(mountPath, "/mnt" + mountPath, "/root/tarball"): raise Exception("Problem with installmount") # prepare container, install packages that the build requires; this is specific to the distro self.buildHelper = BuildHelperFactory.GetBuildHelper(lxcdistro, self.container, username, projectname, packagename, branchname) if not self.buildHelper.PrepareMachineBeforeStart(): raise Exception("Problem with PrepareMachineBeforeStart") if self.container.startmachine(): self.logger.print("container has been started successfully") else: raise Exception("Problem with startmachine") if not self.buildHelper.PrepareMachineAfterStart(): raise Exception("Problem with PrepareMachineAfterStart") if not self.buildHelper.PrepareForBuilding(): raise Exception("Problem with PrepareForBuilding") # copy the repo to the container self.container.rsyncContainerPut(pathSrc+'lbs-'+projectname, "/root/lbs-"+projectname) # copy the keys to the container sshContainerPath = self.config['lbs']['SSHContainerPath'] if os.path.exists(sshContainerPath + '/' + username + '/' + projectname): self.container.rsyncContainerPut(sshContainerPath + '/' + username + '/' + projectname + '/*', '/root/.ssh/') self.container.executeInContainer('chmod 600 /root/.ssh/*') if not self.buildHelper.DownloadSources(): raise Exception("Problem with DownloadSources") if not self.buildHelper.InstallRepositories(self.config['lbs']['DownloadUrl']): raise Exception("Problem with InstallRepositories") if not self.buildHelper.SetupEnvironment(branchname): raise Exception("Setup script did not succeed") if not self.buildHelper.InstallRequiredPackages(): raise Exception("Problem with InstallRequiredPackages") # disable the network, so that only code from the tarball is being used if not self.buildHelper.DisableOutgoingNetwork(): raise Exception("Problem with disabling the network") if not self.buildHelper.BuildPackage(): raise Exception("Problem with building the package") myPath = username + "/" + projectname if 'Secret' in self.config['lbs']['Users'][username]: myPath = username + "/" + self.config['lbs']['Users'][username]['Secret'] + "/" + projectname srcPath=self.config['lbs']['ReposPath'] + "/" + myPath + "/" + lxcdistro + "/" + lxcrelease destPath=srcPath[:srcPath.rindex("/")] srcPath="/mnt"+srcPath if not self.container.rsyncHostGet(srcPath, destPath): raise Exception("Problem with syncing repos") srcPath=self.config['lbs']['TarballsPath'] + "/" + myPath destPath=srcPath[:srcPath.rindex("/")] srcPath="/mnt"+srcPath if not self.container.rsyncHostGet(srcPath, destPath): raise Exception("Problem with syncing tarballs") # create repo file self.buildHelper.CreateRepoFile() def buildpackage(self, username, projectname, packagename, branchname, lxcdistro, lxcrelease, lxcarch, buildmachine, jobId): userconfig = self.config['lbs']['Users'][username] self.logger.startTimer() self.logger.print(" * Starting at " + strftime("%Y-%m-%d %H:%M:%S GMT%z")) self.logger.print(" * Preparing the machine...") # get the sources of the packaging instructions gotPackagingInstructions = False try: pathSrc=self.LBS.getPackagingInstructions(userconfig, username, projectname, branchname) packageSrcPath=pathSrc + '/lbs-'+projectname + '/' + packagename gotPackagingInstructions = True except Exception as e: print(e) self.logger.print("LBSERROR: "+str(e)+ "; for more details see /var/log/uwsgi.log") jobFailed = True if not gotPackagingInstructions: self.LBS.ReleaseMachine(buildmachine, jobFailed) elif self.createbuildmachine(lxcdistro, lxcrelease, lxcarch, buildmachine, packageSrcPath): try: if type(self.container) is CoprContainer: self.buildpackageOnCopr(username, projectname, packagename, branchname, packageSrcPath, lxcdistro, lxcrelease, lxcarch) else: self.buildpackageOnContainer(username, projectname, packagename, branchname, lxcdistro, lxcrelease, pathSrc) self.logger.print("Success!") self.LBS.MarkPackageAsBuilt(username, projectname, packagename, branchname, lxcdistro, lxcrelease, lxcarch) jobFailed = False except Exception as e: # TODO: logging to log file does not work yet? logging.basicConfig(level=logging.DEBUG, filename='/var/log/lbs.log') logging.exception("Error happened...") self.logger.print("LBSERROR: "+str(e)) finally: self.LBS.ReleaseMachine(buildmachine, jobFailed) else: self.logger.print("LBSERROR: There is a problem with creating the container!") self.LBS.ReleaseMachine(buildmachine, jobFailed) self.finished = True logpath=self.logger.getLogPath(username, projectname, packagename, branchname, lxcdistro, lxcrelease, lxcarch) buildnumber=self.logger.store(self.config['lbs']['DeleteLogAfterDays'], self.config['lbs']['KeepMinimumLogs'], logpath) if self.logger.hasLBSERROR() or not self.config['lbs']['SendEmailOnSuccess'] == False: if self.config['lbs']['EmailFromAddress'] == '*****@*****.**': self.logger.print("Please configure the email settings for sending notification emails") else: self.logger.email(self.config['lbs']['EmailFromAddress'], userconfig['EmailToAddress'], "LBS Result for " + projectname + "/" + packagename, self.config['lbs']['LBSUrl'] + "/logs/" + logpath + "/" + str(buildnumber)) # now mark the build finished con = Database(self.config) stmt = "UPDATE build SET status='FINISHED', finished=?, buildsuccess=?, buildnumber=? WHERE id = ?" lastBuild = Logger().getLastBuild(username, projectname, packagename, branchname, lxcdistro+"/"+lxcrelease+"/"+lxcarch) con.execute(stmt, (datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), lastBuild["resultcode"], lastBuild["number"], jobId)) con.commit() con.close() self.logger.clean() return self.logger.get()
class RemoteContainer: def __init__(self, containername, configBuildMachine, logger, packageSrcPath, containertype): self.hostname = containername self.staticMachine = (True if ('static' in configBuildMachine and configBuildMachine['static'] == "t") else False) self.port="22" if configBuildMachine['port'] is not None: self.port=str(configBuildMachine['port']) self.cid=10 if configBuildMachine['cid'] is not None: self.cid=configBuildMachine['cid'] self.containername = str(self.cid).zfill(3) + "-" + containername self.containerIP=socket.gethostbyname(self.hostname) self.containerPort=str(2000+int(self.cid)) if configBuildMachine['local'] is not None and configBuildMachine['local'] == "t": # the host server for the build container is actually hosting the LBS application as well # or the container is running on localhost if containertype == "lxc": self.containerIP=self.calculateLocalContainerIP(self.cid) self.containerPort="22" if containertype == "docker": self.containerIP=self.calculateLocalContainerIP(1) self.containerPort=str(2000+int(self.cid)) self.config = Config.LoadConfig() self.SSHContainerPath = self.config['lbs']['SSHContainerPath'] self.logger = logger self.shell = Shell(logger) # we are reusing the slots, for caches etc self.slot = containername self.distro = "" self.release = "" self.arch = "" self.staticIP = "" self.packageSrcPath = packageSrcPath def calculateLocalContainerIP(self, cid): # test if we are inside a container as well # we just test if the host server for the build container is actually hosting the LBS application as well s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # just need to connect to any external host to know which is the IP address of the machine that hosts LBS s.connect((self.hostname, 80)) lbsipaddress=s.getsockname()[0].split('.') lbsipaddress.pop() # on CentOS: /etc/libvirt/qemu/networks/default.xml 192.168.122 # on Fedora 27: /etc/libvirt/qemu/networks/default.xml 192.168.124 # on Ubuntu 16.04: /etc/default/lxc-net 10.0.3 if '.'.join(lbsipaddress) == "192.168.122" or '.'.join(lbsipaddress) == "192.168.124" or '.'.join(lbsipaddress) == "10.0.3": return '.'.join(lbsipaddress) + "." + str(cid) # we are running uwsgi and lxc/docker on one host if os.path.isfile("/etc/redhat-release"): file = open("/etc/redhat-release", 'r') version = file.read() if "Fedora" in version: return "192.168.124." + str(cid) if "CentOS" in version: return "192.168.122." + str(cid) elif os.path.isfile("/etc/lsb-release"): file = open("/etc/lsb-release", 'r') version = file.read() if "Ubuntu" in version: return "10.0.3." + str(cid) def executeOnHost(self, command): if self.shell.executeshell('ssh -f -o "StrictHostKeyChecking no" -p ' + self.port + ' -i ' + self.SSHContainerPath + "/container_rsa root@" + self.hostname + " \"export LC_ALL=C; (" + command + ") 2>&1; echo \$?\""): return self.logger.getLastLine() == "0" return False def createmachine(self, distro, release, arch, staticIP): # not implemented here return False def startmachine(self): # not implemented here return False def executeInContainer(self, command): """Execute a command in a container via SSH""" # not implemented here return False def destroy(self): # not implemented here return False def stop(self): # not implemented here return False def rsyncContainerPut(self, src, dest): # not implemented here return False def rsyncContainerGet(self, path, dest = None): # not implemented here return False def rsyncHostPut(self, src, dest = None): # not implemented here return False def rsyncHostGet(self, path, dest = None): # not implemented here return False def installmount(self, localpath, hostpath = None): # not implemented here return False
class methods: def __init__(self, fps, clockObject, surface, font, bars, windowsize): self.fps = fps self.clock = clockObject self.surface = surface self.font = font self.bars = bars self.windowsize = windowsize def get_array(self, length, mode=0): arr = list(range(length)) if not mode: random.shuffle(arr) elif mode == 2: arr = arr[::-1] elif mode == 3: for i in range(length - 1): if random.randint(0, 10) < 8: tmp = random.randint(4, 15) try: arr[i], arr[i + tmp] = arr[i + tmp], arr[i] except: pass return arr def setup(self, length, mode=0): self.array = self.get_array(length, mode) self.display = Display(self.windowsize[0] / length, self.windowsize, self.surface, self.font) self.accesses = 0 self.comparisons = 0 setattr(self.display, "bars", self.bars) bubble = lambda self: Bubble(self.array, self.display, self.clock, self.fps ).main() quicksort = lambda self: Quicksort(self.array, self.display, self.clock, self.fps).main() selection = lambda self: Selection(self.array, self.display, self.clock, self.fps).main() cocktail = lambda self: Cocktail(self.array, self.display, self.clock, self .fps).main() bogo = lambda self: Bogo(self.array, self.display, self.clock, self.fps ).main() oddeven = lambda self: Oddeven(self.array, self.display, self.clock, self. fps).main() shell = lambda self: Shell(self.array, self.display, self.clock, self.fps ).main() comb = lambda self: Comb(self.array, self.display, self.clock, self.fps ).main() insertion = lambda self: Insertion(self.array, self.display, self.clock, self.fps).main() mergetd = lambda self: MergeTD(self.array, self.display, self.clock, self. fps).main() radixlsd = lambda self: RadixLSD(self.array, self.display, self.clock, self .fps).main() counting = lambda self: Counting(self.array, self.display, self.clock, self .fps).main() cycle = lambda self: Cycle(self.array, self.display, self.clock, self.fps ).main() heap = lambda self: Heap(self.array, self.display, self.clock, self.fps ).main() circle = lambda self: Circle(self.array, self.display, self.clock, self.fps ).main() gnome = lambda self: Gnome(self.array, self.display, self.clock, self.fps ).main() binaryinsertion = lambda self: BinaryInsertion( self.array, self.display, self.clock, self.fps).main() pancake = lambda self: Pancake(self.array, self.display, self.clock, self. fps).main() permutation = lambda self: Permutation(self.array, self.display, self. clock, self.fps).main() strand = lambda self: Strand(self.array, self.display, self.clock, self.fps ).main() bucket = lambda self: Bucket(self.array, self.display, self.clock, self.fps ).main() minmax = lambda self: MinMax(self.array, self.display, self.clock, self.fps ).main() mergebu = lambda self: MergeBU(self.array, self.display, self.clock, self. fps).main() bitonic = lambda self: Bitonic(self.array, self.display, self.clock, self. fps).main() stooge = lambda self: Stooge(self.array, self.display, self.clock, self.fps ).main() smooth = lambda self: Smooth(self.array, self.display, self.clock, self.fps ).main() quick3 = lambda self: Quick3(self.array, self.display, self.clock, self.fps ).main()
def main(): # width = 1200 # height = 850 width = 700 height = 600 blue_color = (97, 159, 182) pygame.init() screen = pygame.display.set_mode((width, height)) pygame.display.set_caption('Tanks') clock = pygame.time.Clock() background = pygame.image.load('images/background_desert1.png') background = pygame.transform.scale(background, (width,height)) the_player = Player("images/tank_bottom_new_cropped3.png", "images/tank_top_new_cropped7.png", 350, 350, screen) #do we even need this? players = Group() players.add(the_player) # square = Block( 200, 200, (0,0,0) , 50, 50, screen) players.add(square) shells = Group() #what does this do???? # player_group = Group() # player_group.add(the_player) # Game initialization # gameStart() stop_game = False while not stop_game: for event in pygame.event.get(): key.move_forward_pressed() # Event handling if event.type == pygame.QUIT: stop_game = True if key.move_forward_pressed(): the_player.move_up() if key.move_back_pressed(): the_player.move_down() if not key.move_back_pressed() and not key.move_forward_pressed(): the_player.stop() if key.turn_left_pressed(): the_player.turn_left() if key.turn_right_pressed(): the_player.turn_right() if not key.turn_left_pressed() and not key.turn_right_pressed(): the_player.stop_turn() if event.type == pygame.MOUSEBUTTONDOWN: # print math.fabs((pygame.mouse.get_pos()[0] - the_player.x) / math.cos(the_player.top_angle_rad)) x2 = pygame.mouse.get_pos()[0] y2 = pygame.mouse.get_pos()[1] current_tick = pygame.time.get_ticks() if (current_tick - the_player.last_shot_tick) > the_player.cool_down: shot_length = math.sqrt((x2 - the_player.x)**2 +(y2 - the_player.y)**2) # print shot_length if shot_length > 85: new_shell = Shell("images/tank_shell5.png", screen, the_player) #tank_shell1_cropped.png # shells.add(new_shell) the_player.last_shot_tick = new_shell.shot_start_tick if new_shell.shot_length == new_shell.shot_length_current: del new_shell # elif current_tick - the_player.last_shot_tick < 175: # screen.blit() # the_player.shoot() # if event.key # if event.type == pygame.KEYUP: # the_player.bottom_angle += 3 # Game logic the_player.update(players) # Draw background screen.blit(background, [0,0]) # Game display square.render(the_player) the_player.draw_me() #shell display for shell in shells: shell.update(the_player, players) shell.draw_shot() # if shell.beyond_screen(): # shells.remove(shell) #or if shell reached mouse_pos pygame.display.update() clock.tick(60) pygame.quit()
def getPackagingInstructionsInternal(self, userconfig, username, projectname, branchname, gitprojectname, lbsproject, pathSrc): os.makedirs(pathSrc, exist_ok=True) needToDownload = True #we want a clean clone #but do not delete the tree if it is being used by another build t = None if os.path.isfile(pathSrc+'lbs-'+projectname+'-lastused'): t = os.path.getmtime(pathSrc+'lbs-'+projectname+'-lastused') # delete the tree only if it has not been used within the last 3 minutes if (time.time() - t) < 3*60: needToDownload = False # update the timestamp os.utime(pathSrc+'lbs-'+projectname+'-lastused') else: open(pathSrc+'lbs-'+projectname+'-lastused', 'a').close() headers = {} if not 'GitType' in userconfig or userconfig['GitType'] == 'github': url=lbsproject + "/archive/" + branchname + ".tar.gz" elif userconfig['GitType'] == 'gitlab': url=lbsproject + "/repository/archive.tar.gz?ref=" + branchname tokenfilename=self.config["lbs"]["SSHContainerPath"] + "/" + username + "/" + projectname + "/gitlab_token" if os.path.isfile(tokenfilename): with open (tokenfilename, "r") as myfile: headers['PRIVATE-TOKEN'] = myfile.read().strip() # check if the version we have is still uptodate etagFile = pathSrc+'lbs-'+projectname+'-etag' if needToDownload and os.path.isfile(etagFile): with open(etagFile, 'r') as content_file: Etag = content_file.read() headers['If-None-Match'] = Etag r = requests.get(url, headers=headers) if 'Etag' in r.headers and r.headers['Etag'] == '"' + Etag + '"': needToDownload = False if not needToDownload and os.path.isdir(pathSrc+'lbs-'+projectname): # we can reuse the existing source, it was used just recently, or has not changed on the server self.StorePackageHashes(pathSrc+'lbs-'+projectname, username, projectname, branchname) return # delete the working tree if os.path.isdir(pathSrc+'lbs-'+projectname): shutil.rmtree(pathSrc+'lbs-'+projectname) sourceFile = pathSrc + "/" + branchname + ".tar.gz" if os.path.isfile(sourceFile): os.remove(sourceFile) r = requests.get(url, headers=headers) if r.status_code == 401: raise Exception("problem downloading the repository, access denied") elif not r.status_code == 200: raise Exception("problem downloading the repository " + url + ", HTTP error code " + str(r.status_code)) chunk_size = 100000 with open(sourceFile, 'wb') as fd: for chunk in r.iter_content(chunk_size): fd.write(chunk) if 'Etag' in r.headers: Etag = r.headers['Etag'] with open(etagFile, 'w') as fd: fd.write(Etag.strip('"')) shell = Shell(Logger()) if not 'GitType' in userconfig or userconfig['GitType'] == 'github': cmd="cd " + pathSrc + ";" cmd+="tar xzf " + branchname + ".tar.gz; mv lbs-" + gitprojectname + "-" + branchname + " lbs-" + projectname shell.executeshell(cmd) elif userconfig['GitType'] == 'gitlab': cmd="cd " + pathSrc + ";" cmd+="tar xzf " + branchname + ".tar.gz; mv lbs-" + gitprojectname + "-" + branchname + "-* lbs-" + projectname shell.executeshell(cmd) if os.path.isfile(sourceFile): os.remove(sourceFile) if not os.path.isdir(pathSrc+'lbs-'+projectname): raise Exception("Problem with cloning the git repo") self.StorePackageHashes(pathSrc+'lbs-'+projectname, username, projectname, branchname)
def externals_property(self): if self._externals_property == None: propget_cmd = Shell(verbose = False, fatal = True).run('svn pg --xml svn:externals "{0}"'.format(self.url)) contents_xml = propget_cmd.output_xml() self._externals_property = contents_xml.findtext('target/property') return self._externals_property
def __init__(self): Shell.__init__(self)
def __init__(self, queue): self.queue = queue self.shell = Shell()
import sys sys.path.append("../android") sys.path.append("../sunriver") sys.path.append("../sunriver/applications/DesktopInYourPocket") import time from Android import Android from Sunriver import Sunriver from Chroot import Chroot from NetInterfaces import NetInterfaces from DesktopInYourPocket import DesktopInYourPocket from Linux import Linux from Shell import Shell device_id = Android.devices().keys()[0] android = Android(device_id) desktop = DesktopInYourPocket(android) desktop.start() chroot = Chroot(android) interfaces = NetInterfaces(android) linux = Sunriver.connect(chroot, interfaces) shell = Shell(linux.modules, linux._rpyc) ui = UI(shell) ui.start() shell.cmd("leafpad") time.sleep(2) leafpad = ui.dogtail.tree.root.child("(Untitled)") print leafpad
def run(self, *args, **kwds): from Shell import Shell shell = Shell(self) shell.run(*args, **kwds) return