def server_status(self, port): self.log_it('in server_status()', 5) try: op = subprocess.run_cmd('iquery -p ' + port + ' -aq "list(\'libraries\')"') self.log_it('Checking if system library is loaded', 3) if op.find('libsystem.so') == -1: self.log_it('Trying to load system library', 3) subprocess.run_cmd('iquery -p ' + port + ' -aq "load_library(\'system\')"') time.sleep(10) try: self.log_it('checking server liveliness', 3) op = subprocess.run_cmd('iquery -p ' + port + ' -aq "list(\'instances\')"') # SciDB returns 'error' for a few seconds until offline state of instances is detected. if ((op.find('error') >= 0) or (op.find('offline') >= 0)): self.log_it( 'Atleast one of the SciDB instances is offline.', 1) return 'offline' except: self.log_it('Atleast one of the SciDB instances is offline.', 1) except: self.log_it('Cannot load \'system\' library!', 1) return 'no-system-lib' return 'online'
def fetch(): """Import flaws to RVD from a variety of sources""" cyan("Importing...") # if not uri: # red("A URI is needed when calling import") # sys.exit(1) # else: cyan("Creating the default folder for the import process...") cmd = "mkdir -p /tmp/rvd" run_cmd(shlex.split(cmd))
def start(self, min_mem='1024M', max_mem='3072M', version='1.10.2'): """ Start the minecraft server. This uses the python call function to shell out and actually call the server. Example (using method defaults): java -Xms1024M -Xmx3072M -jar minecraft_server.1.10.2.jar nogui """ java_path = '/usr/bin/java' path = '/home/reed/Github/mcmd/_minecraft_files/_1server' server_start_cmd = '{0} -Xms{1} -Xmx{2} -jar {3}/minecraft_server.{4}.jar nogui > {5}/log.out &' server_start_cmd = server_start_cmd.format(java_path, min_mem, max_mem, path, version, path) logging.debug("Running server command:") logging.debug("> {0}".format(server_start_cmd)) try: with cd(path): run_cmd([server_start_cmd], shell=True) except Exception as e: logging.warn('Error running server start command: {0}'.format(str(e)))
def run_command(command, debug=False, fail_on_error=False): try: from subprocess import getstatusoutput as run_cmd except: from subprocess import call as run_cmd err, out = run_cmd(command) if err and fail_on_error: printerror(out) exit(1) if debug: printmsg(out) return err, out
def server_status(self,port): self.log_it('in server_status()',5) try: op = subprocess.run_cmd('iquery -p ' + port + ' -aq "list(\'libraries\')"') self.log_it('Checking if system library is loaded',3) if op.find('libsystem.so') == -1: self.log_it('Trying to load system library',3) subprocess.run_cmd('iquery -p ' + port + ' -aq "load_library(\'system\')"') time.sleep(10) try: self.log_it('checking server liveliness',3) op = subprocess.run_cmd('iquery -p ' + port + ' -aq "list(\'instances\')"') # SciDB returns 'error' for a few seconds until offline state of instances is detected. if ((op.find('error') >= 0) or (op.find('offline') >= 0)): self.log_it('Atleast one of the SciDB instances is offline.',1) return 'offline' except: self.log_it('Atleast one of the SciDB instances is offline.',1) except: self.log_it('Cannot load \'system\' library!',1) return 'no-system-lib' return 'online'
def get_stack_names(): commit_sha_env = getenv('COMMIT_SHA_ENV') commit_sha = getenv(commit_sha_env) p = run_cmd(['git', 'show', '--name-only', '--oneline'], capture_output=True, text=True) if p.returncode == 0: # Ditch the first line which contains short sha and message file_names = p.stdout.strip().split('\n')[1:] iter = filter(lambda f: Path(f).is_file(), file_names) return [*map(lambda f: sub('\.yml$', '', f), iter)] raise Exception('Git command failed: {}'.format(p.stderr))
def server_status(self, port): self.log_it("in server_status()", 5) try: op = subprocess.run_cmd("iquery -p " + port + " -aq \"list('libraries')\"") self.log_it("Checking if system library is loaded", 3) if op.find("libsystem.so") == -1: self.log_it("Trying to load system library", 3) subprocess.run_cmd("iquery -p " + port + " -aq \"load_library('system')\"") time.sleep(10) try: self.log_it("checking server liveliness", 3) op = subprocess.run_cmd("iquery -p " + port + " -aq \"list('instances')\"") # SciDB returns 'error' for a few seconds until offline state of instances is detected. if (op.find("error") >= 0) or (op.find("offline") >= 0): self.log_it("Atleast one of the SciDB instances is offline.", 1) return "offline" except: self.log_it("Atleast one of the SciDB instances is offline.", 1) except: self.log_it("Cannot load 'system' library!", 1) return "no-system-lib" return "online"
def exec(self, make_f_p): arg = "" job_args = 0 job_val = "" for a in sys.argv[1:]: # arguments passed to script # parse each command line argument m = regex_j.match(a) if m: # if '-j /--jobs' flag passed, store it job_args = 1 job_val = m.group(2) continue if job_args and not job_val: # if flag was matched, but number was not passed if regex_number.match( a): # check if next arguments are numbers and set them job_val = a continue else: job_val = "0" # else default to 0 arg += " {0}".format(a) # create arg string minus '-j /--jobs' if job_args: # if '-j /--jobs' flag was passed if regex_0_or_none.match( job_val): # but no core count, get max from the system job_val = cpu_count() arg += " '-j' '{0}'".format(job_val) # generate make command and execute it makecmd = self.GMAKECMD + self.CMDOPTS + " -f " + make_f_p + " " + arg errfile = environ["SCRAM_INTwork"] + "/build_error" try: unlink(errfile) except Exception as e: logging.warning("nothing to unlink " + str(e)) pass # nothing to unlink print( "({makecmd} && [ ! -e {errfile} ]) || (err=$?; echo gmake: \\*\\*\\* [There are compilation/build " "errors. Please see the detail log above.] Error $err && exit $err)" .format(makecmd=makecmd, errfile=errfile)) e, out = run_cmd( "({makecmd} && [ ! -e {errfile} ]) || (err=$?; echo gmake: \\*\\*\\* [There are compilation/build " "errors. Please see the detail log above.] Error $err && exit $err)" .format(makecmd=makecmd, errfile=errfile)) if e != 0: sys.exit( "SCRAM MakeInterface::exec(): Unable to run gmake ... {0}". format(out))
def contact(request): # takes stl file and form data from user passes through process_stl and returns output if request.method == 'GET': form = ContactForm() else: form = ContactForm(request.POST, request.FILES) if form.is_valid() and request.FILES['myfile']: subject = form.cleaned_data['subject'] your_email = form.cleaned_data['your_email'] body = form.cleaned_data['body'] myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save('user_upload.stl', myfile) quality = form.cleaned_data['quality'] quantity = form.cleaned_data['quantity'] global layer_thickness if quality == 'Best': # Change the path after cd the absolute path for the Cura folder wherever it is # ( ...\Fracktal-PrintCostEstimator\Cura if downloaded from Github). run_cmd('cd path\to\Fracktal-PrintCostEstimator\Cura && CuraEngine -s layerThickness=100 -s infillSpeed=60 -o ../temp_files/user_upload.gcode ../temp_files/user_upload.stl && cd ../login_app', shell=True) elif quality == 'High': run_cmd('cd path\to\Fracktal-PrintCostEstimator\Cura && CuraEngine -s layerThickness=200 -s infillSpeed=80 -o ../temp_files/user_upload.gcode ../temp_files/user_upload.stl && cd ../login_app', shell=True) elif quality == 'Normal': run_cmd('cd path\to\Fracktal-PrintCostEstimator\Cura && CuraEngine -s layerThickness=300 -s infillSpeed=80 -o ../temp_files/user_upload.gcode ../temp_files/user_upload.stl && cd ../login_app', shell=True) material = form.cleaned_data['material'] color = form.cleaned_data['color'] stl_data = process_stl() if material == 'PLA': weight = 1.25*(float(stl_data[0])/1000.0) cost_per_gm = 15.0 elif material == 'ABS': weight = 1.04*(float(stl_data[0])/1000.0) cost_per_gm = 20.0 cost_per_hr = 250.0 cost = quantity*(cost_per_hr*stl_data[1] + weight*cost_per_gm) total_cost = cost + 250.0 global body_all body_all = "Your Email Address: " + your_email + "\nSubject: " + subject + "\n\nBody: " + body + "\n\nQuality: " + quality + "\nMaterial: " + material + "\nColor: " + color + "\nVolume: " + stl_data[0] + " mm^3" + "\nPrint time estimate(in hrs): " + str(stl_data[1]) + "\nQuantity: " + str(quantity) + "\nEstimated print cost(INR): " + str(int(cost)) + " + INR 250 setup fee " body_short = "Body: " + body + "\n\nQuality: " + quality + "\nMaterial: " + material + "\nColor: " + color + "\nVolume: " + stl_data[0] + " mm^3" + "\nPrint time estimate(in hrs): " + str(stl_data[1]) + "\nEstimated cost(INR): " + str(int(cost)) """try: email = EmailMessage(subject, body_short, your_email, ['*****@*****.**']) #email.attach(attachment.name, attachment.read(), attachment.content_type) email.send() except BadHeaderError: return HttpResponse('Invalid Header Found')""" return redirect('/login_app/contact_success') return render(request, 'login_app/contact_mail.html', {'contact_form':form})
def contact( request ): # takes stl file and form data from user passes through process_stl and returns output if request.method == 'GET': form = ContactForm() else: form = ContactForm(request.POST, request.FILES) if form.is_valid() and request.FILES['myfile']: subject = form.cleaned_data['subject'] your_email = form.cleaned_data['your_email'] body = form.cleaned_data['body'] myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save('user_upload.stl', myfile) quality = form.cleaned_data['quality'] quantity = form.cleaned_data['quantity'] global layer_thickness if quality == 'Best': # Change the path after cd the absolute path for the Cura folder wherever it is # ( ...\Fracktal-PrintCostEstimator\Cura if downloaded from Github). run_cmd( 'cd path\to\Fracktal-PrintCostEstimator\Cura && CuraEngine -s layerThickness=100 -s infillSpeed=60 -o ../temp_files/user_upload.gcode ../temp_files/user_upload.stl && cd ../login_app', shell=True) elif quality == 'High': run_cmd( 'cd path\to\Fracktal-PrintCostEstimator\Cura && CuraEngine -s layerThickness=200 -s infillSpeed=80 -o ../temp_files/user_upload.gcode ../temp_files/user_upload.stl && cd ../login_app', shell=True) elif quality == 'Normal': run_cmd( 'cd path\to\Fracktal-PrintCostEstimator\Cura && CuraEngine -s layerThickness=300 -s infillSpeed=80 -o ../temp_files/user_upload.gcode ../temp_files/user_upload.stl && cd ../login_app', shell=True) material = form.cleaned_data['material'] color = form.cleaned_data['color'] stl_data = process_stl() if material == 'PLA': weight = 1.25 * (float(stl_data[0]) / 1000.0) cost_per_gm = 15.0 elif material == 'ABS': weight = 1.04 * (float(stl_data[0]) / 1000.0) cost_per_gm = 20.0 cost_per_hr = 250.0 cost = quantity * (cost_per_hr * stl_data[1] + weight * cost_per_gm) total_cost = cost + 250.0 global body_all body_all = "Your Email Address: " + your_email + "\nSubject: " + subject + "\n\nBody: " + body + "\n\nQuality: " + quality + "\nMaterial: " + material + "\nColor: " + color + "\nVolume: " + stl_data[ 0] + " mm^3" + "\nPrint time estimate(in hrs): " + str( stl_data[1]) + "\nQuantity: " + str( quantity) + "\nEstimated print cost(INR): " + str( int(cost)) + " + INR 250 setup fee " body_short = "Body: " + body + "\n\nQuality: " + quality + "\nMaterial: " + material + "\nColor: " + color + "\nVolume: " + stl_data[ 0] + " mm^3" + "\nPrint time estimate(in hrs): " + str( stl_data[1]) + "\nEstimated cost(INR): " + str(int(cost)) """try: email = EmailMessage(subject, body_short, your_email, ['*****@*****.**']) #email.attach(attachment.name, attachment.read(), attachment.content_type) email.send() except BadHeaderError: return HttpResponse('Invalid Header Found')""" return redirect('/login_app/contact_success') return render(request, 'login_app/contact_mail.html', {'contact_form': form})
'--force-cuda-runtime-version', help= 'Request a specific CUDA runtime version, e.g. 11.4. The default behaviour is to accept any supported CUDA runtime version.', dest='CUDARuntimeVersion', default='') opt = parser.parse_args() if opt.command: opt.command = ' '.join(opt.command) os.environ["CMSSW_DAS_QUERY_SITES"] = opt.dasSites if opt.IBEos: from subprocess import getstatusoutput as run_cmd ibeos_cache = os.path.join(os.getenv("LOCALRT"), "ibeos_cache.txt") if not os.path.exists(ibeos_cache): err, out = run_cmd( "curl -L -s -o %s https://raw.githubusercontent.com/cms-sw/cms-sw.github.io/master/das_queries/ibeos.txt" % ibeos_cache) if err: run_cmd("rm -f %s" % ibeos_cache) print("Error: Unable to download ibeos cache information") print(out) sys.exit(err) for cmssw_env in ["CMSSW_BASE", "CMSSW_RELEASE_BASE"]: cmssw_base = os.getenv(cmssw_env, None) if not cmssw_base: continue cmssw_base = os.path.join(cmssw_base, "src/Utilities/General/ibeos") if os.path.exists(cmssw_base): os.environ["PATH"] = cmssw_base + ":" + os.getenv("PATH") os.environ["CMS_PATH"] = "/cvmfs/cms-ib.cern.ch"
zip_file.extractall(srcdir_path) elif ext == 'rar': print('.rar is currently unsupported.', file=stderr) exit(3) elif filename.split('.')[-2] == 'tar' or ext == 'tar': with tar_open(archive_path) as tar_file: tar_file.extractall(srcdir_path) # Building package if pkgparser.prepare_instructions: print('[ prepare() ]') for i in pkgparser.prepare_instructions: run_cmd(i.replace('"', '').split()).check_returncode() if pkgparser.build_instructions: print('[ build() ]') for i in pkgparser.build_instructions: run_cmd(i.replace('"', '').split()).check_returncode() if pkgparser.check_instructions: print('[ check() ]') for i in pkgparser.check_instructions: run_cmd(i.replace('"', '').split()).check_returncode() if pkgparser.package_instructions: print('[ package() ]') for i in pkgparser.package_instructions: run_cmd(i.replace('"', '').split()).check_returncode()
file = open('time.sh', 'w+') file.write("#!/usr/bin/bash \n" "### BEGIN INIT INFO + \n" "# Provides: timecheck program \n" "# Required-Start: \n" "# Required-Stop: $local_fs \n" "# Default-Start: 2 3 4 5 \n" "# Default-Stop: 0 1 6 \n" "# Short-Description: time_correct \n" "# Description: time_correct \n" "### END INIT INFO\t") file.write("date -d " + correct_time_str) file.close() Popen(["sudo", "chmod", "+x", "time.sh"]) Popen(["cp", "time.sh", "/etc/init.d/time.sh"]) #os.chmod(/etc/init.d/time.sh, 755) Popen(["chmod", "+x", "/etc/init.d/time.sh"]) #os.chmod(/etc/init.d/time.sh, 755) Popen(["service", "time.sh", "start"]) #Popen(["sudo","chmod", "u+s", "time.sh"]) os.chmod("time.sh", 755) #Popen(["bash", "/home/jonny/time/time.sh"], shell=True) run_cmd("date") print("date should be corrected") #choice=input("do you wish to create a service? yes or else nothing") #if choice=="yes": #init==("input are you using init or system d?") #if init==" init": #print("cool will do that later")
def cmd_help(args): run_cmd(['man', 'scram']) return True
def dump_issues_local(self, issues, label, update=False): """ Dump all tickets into a local directory, mimicing the 'rvd export local' functionality. TODO: document params """ local_directory_path = ".rvd/" if not os.path.exists(local_directory_path): cyan("Creating directory .rvd/ whereto dump tickets...") os.makedirs(local_directory_path) update = True else: if update: cyan("Updating all tickets, re-downloading...") cmd = f"rm -r {local_directory_path}" run_cmd(shlex.split(cmd)) os.makedirs(".rvd") else: yellow("Directory already exists, skipping") if update: importer = Base() # Fetch all issues, exluding the ones with the invalid label # NOTE: includes the ones with the duplicate label issues = importer.get_issues_filtered(state="all") # flaws = [] # a list whereto store all flaws, from the issues for issue in issues: # Filter by label, to further align with code below # check for PRs and skip them, should be labeled with "contribution" labels = [l.name for l in issue.labels] if "contribution" in labels: gray("Found a PR, skipping it") continue # # This can't be enabled because if it was to be, training and # # evaluation will not match # if "duplicate" in labels: # gray("Found duplicate, skipping it") # continue # review labels all_labels = True # indicates whether all labels are present if label: for l in label: if l not in labels or "invalid" in labels: all_labels = False if all_labels: # NOTE: partially re-implementing Base.import_issue() # to avoid calling again the Github API try: document_raw = issue.body document_raw = document_raw.replace("```yaml", "").replace( "```", "" ) document = yaml.safe_load(document_raw) try: flaw = Flaw(document) # flaws.append(flaw) # append to list # Dump into local storage with open( local_directory_path + str(flaw.id) + ".yml", "w+" ) as file: yellow("Creating file " + str(flaw.id) + ".yml") # dump contents in file result = yaml.dump( document, file, default_flow_style=False, sort_keys=False, ) except TypeError: # likely the document wasn't properly formed, # report about it and continue yellow( "Warning: issue " + str(issue.number) + " \ not processed due to an error" ) continue except yaml.parser.ParserError: red(f"{issue.number} is not has no correct yaml format") continue
def from_gitlab(self, id, deadline, disclose): """ Generate a report from a Gitlab private archive """ cyan(f"Creating temporary directory /tmp/rvd/reports/{id} ...") temp_dir = f"/tmp/rvd/reports/{id}" cmd = f"mkdir -p {temp_dir}" run_cmd(shlex.split(cmd)) # Create the markdown file cyan("Creating Markdown file...") importer_private = GitlabImporter() flaw, labels = importer_private.get_flaw(id) markdown_content = "" markdown_content += '---' + "\n" markdown_content += 'title: "Robot Vulnerability advisory"' + "\n" markdown_content += 'author: [Alias Robotics ([email protected])]' + "\n" markdown_content += 'date: "' + str( arrow.utcnow().format('YYYY-MM-DD')) + '"' + "\n" markdown_content += 'toc: true' + "\n" markdown_content += 'subject: "Robot Cybersecurity"' + "\n" markdown_content += 'keywords: [Robotics, Security, Robot]' + "\n" markdown_content += 'subtitle: "Responsible notification about vulnerabilities discovered in one or several of your products."' + "\n" markdown_content += 'lang: "en"' + "\n" markdown_content += 'titlepage: true' + "\n" markdown_content += 'titlepage-color: "313131"' + "\n" markdown_content += 'titlepage-text-color: "FFFFFF"' + "\n" markdown_content += 'titlepage-rule-color: "FFFFFF"' + "\n" markdown_content += 'titlepage-rule-height: 1' + "\n" markdown_content += 'pandoc-latex-fontsize:' + "\n" markdown_content += ' - classes: [smallcontent]' + "\n" markdown_content += ' size: tiny' + "\n" markdown_content += ' - classes: [largecontent, important]' + "\n" markdown_content += ' size: huge' + "\n" markdown_content += 'bibliography: bibliography.bib' + "\n" markdown_content += '...' + "\n" markdown_content += '' + "\n" markdown_content += ' <!-- here goes the index -->' + "\n" markdown_content += '' + "\n" markdown_content += '\\newpage' + "\n" markdown_content += "\n" markdown_content += intro_content1 if deadline: markdown_content += "`" + deadline + " days` " + "\n" else: markdown_content += "`" + "90 days" + "` " + "\n" markdown_content += intro_content2 markdown_content += "\n" markdown_content += flaw.markdown(disclose) markdown_path = temp_dir + "/" + str(id) + ".md" markdown_file = open(markdown_path, 'w+') markdown_file.write(markdown_content) # Create the Makefile cyan("Creating Makefile...") makefile_path = temp_dir + "/Makefile" # makefile_content = "SOURCE_FILE := " + str(arrow.utcnow().format('YYYYMMDD')) + str(id) + ".md" + "\n" makefile_content = "SOURCE_FILE := " + str(id) + ".md" + "\n" makefile_content += "OUT_FILE := " + str(id) + "_report" + "\n" makefile_content += makefile makefile_file = open(makefile_path, 'w+') makefile_file.write(makefile_content) # Create LaTeX template cyan("Creating LaTeX template...") template_path = temp_dir + "/template.tex" template_content = eisvogel template_file = open(template_path, 'w+') template_file.write(template_content)
def exportar_local(update): """Export all tickets to the local export directory""" local_directory_path = ".rvd/" cyan("Creating folder for the export process...") flag_fetch = False # a flag to determine whether all tickets need to # be fetched again if not os.path.exists(local_directory_path): os.makedirs(local_directory_path) flag_fetch = True else: if update: cyan("Updating all tickets, re-downloading...") cmd = f"rm -r {local_directory_path}" run_cmd(shlex.split(cmd)) os.makedirs(".rvd") flag_fetch = True else: red("Directory already exists, skipping") if flag_fetch: importer = Base() # Fetch all issues, exluding the ones with the invalid label # NOTE: includes the ones with the duplicate label issues = importer.get_issues_filtered(state="all") flaws = [] # a list whereto store all flaws, from the issues for issue in issues: # NOTE: partially re-implementing Base.import_issue() # to avoid calling again the Github API try: document_raw = issue.body document_raw = document_raw.replace("```yaml", "").replace("```", "") document = yaml.safe_load(document_raw) try: flaw = Flaw(document) # flaws.append(flaw) # append to list # Dump into local storage with open( local_directory_path + str(flaw.id) + ".yml", "w+" ) as file: yellow("Creating file " + str(flaw.id) + ".yml") # dump contents in file # result = yaml.dump(document, file) result = yaml.dump( document, file, default_flow_style=False, sort_keys=False ) except TypeError: # likely the document wasn't properly formed, report about it and continue yellow( "Warning: issue " + str(issue.number) + " not processed due to an error" ) continue except yaml.parser.ParserError: red(f"{issue.number} is not has no correct yaml format") continue