def runCurrentStepFunction(self):
     QtCore.QCoreApplication.processEvents()
     step = self.lesson.steps[self.currentStep]
     self.webView.setEnabled(False)
     execute(step.function)
     self.webView.setEnabled(True)
     self.stepFinished()
 def moveToNextStep(self):
     if self.currentStep == len(self.lesson.steps):
         QtGui.QMessageBox.information(self, "Lesson", "You have reached the end of this lesson")
         self.finishLesson()
     else:
         step = self.lesson.steps[self.currentStep]
         if step.endsignal is not None:
             step.endsignal.connect(self.endSignalEmitted)
         item = self.listSteps.item(self.currentStep)
         item.setBackground(QtCore.Qt.green)
         if os.path.exists(step.description):
             with open(step.description) as f:
                     html = "".join(f.readlines())
             self.webView.setHtml(html, QtCore.QUrl.fromUserInput(step.description))
         else:
             self.webView.setHtml(step.description)
         QtCore.QCoreApplication.processEvents()
         if step.prestep is not None:
             execute(step.prestep)
         if step.function is not None:
             self.btnRunStep.setEnabled(step.steptype != Step.AUTOMATEDSTEP)
             self.btnMove.setEnabled(step.steptype != Step.AUTOMATEDSTEP)
             if step.steptype == Step.AUTOMATEDSTEP:
                 self.runCurrentStepFunction()
         else:
             self.btnRunStep.setEnabled(False)
             self.btnMove.setEnabled(True)
Exemplo n.º 3
0
def close_staff_tab(first_name, last_name):
    utils.execute('''
      UPDATE order_group
      SET is_open = FALSE, closedby = 1, updated = curdate() + interval '4' hour #4am so it won't affect tip pool
      WHERE is_open = TRUE
      AND table_id = concat(%(first_name)s,' ',%(last_name)s)
    ''', args = locals())
Exemplo n.º 4
0
def mode_pre(session_dir, args):
    global gtmpfilename

    """
    Read from Session file and write to session.pre file
    """
    endtime_to_update = int(time.time()) - get_changelog_rollover_time(
        args.volume)
    status_file = os.path.join(session_dir, args.volume, "status")
    status_file_pre = status_file + ".pre"

    mkdirp(os.path.dirname(args.outfile), exit_on_err=True, logger=logger)

    # If Pre status file exists and running pre command again
    if os.path.exists(status_file_pre) and not args.regenerate_outfile:
        fail("Post command is not run after last pre, "
             "use --regenerate-outfile")

    start = 0
    try:
        with open(status_file) as f:
            start = int(f.read().strip())
    except ValueError:
        pass
    except (OSError, IOError) as e:
        fail("Error Opening Session file %s: %s"
             % (status_file, e), logger=logger)

    logger.debug("Pre is called - Session: %s, Volume: %s, "
                 "Start time: %s, End time: %s"
                 % (args.session, args.volume, start, endtime_to_update))

    prefix = datetime.now().strftime("%Y%m%d-%H%M%S-%f-")
    gtmpfilename = prefix + next(tempfile._get_candidate_names())

    run_cmd_nodes("pre", args, start=start, end=-1, tmpfilename=gtmpfilename)

    # Merger
    if args.full:
        cmd = ["sort", "-u"] + node_outfiles + ["-o", args.outfile]
        execute(cmd,
                exit_msg="Failed to merge output files "
                "collected from nodes", logger=logger)
    else:
        # Read each Changelogs db and generate finaldb
        create_file(args.outfile, exit_on_err=True, logger=logger)
        outfilemerger = OutputMerger(args.outfile + ".db", node_outfiles)
        write_output(args.outfile, outfilemerger, args.field_separator)

    try:
        os.remove(args.outfile + ".db")
    except (IOError, OSError):
        pass

    run_cmd_nodes("cleanup", args, tmpfilename=gtmpfilename)

    with open(status_file_pre, "w", buffering=0) as f:
        f.write(str(endtime_to_update))

    sys.stdout.write("Generated output file %s\n" % args.outfile)
Exemplo n.º 5
0
def executeHandlers(type, list=()):
	"""
	Executes all handlers by type with list as list of args
	"""
	handlers = Handlers[type]
	for handler in handlers:
		utils.execute(handler, list)
Exemplo n.º 6
0
Arquivo: tax.py Projeto: jkobrin/pos1
def populate_pay_stub():

  results = utils.select('''
  select
  DATE(intime) - interval (DAYOFWEEK(intime) -1) DAY as week_of,
  employee_tax_info.person_id,
  last_name, first_name,
  sum(hours_worked) as hours_worked,
  pay_rate, 
  allowances,
  nominal_scale,
  round(sum(hours_worked)*pay_rate) as weekly_pay,
  round(sum(hours_worked)*pay_rate*nominal_scale) as gross_wages,
  married,
  sum(tip_pay) tips,
  round(sum(hours_worked)*pay_rate - weekly_tax) + sum(tip_pay) as total_weekly,
  sum(tip_pay) / sum(hours_worked) + pay_rate as total_hourly_pay
  from hours_worked JOIN employee_tax_info ON hours_worked.person_id = employee_tax_info.person_id
  where yearweek(intime) = yearweek(now() - interval '1' week)
  and intime != 0
  group by employee_tax_info.person_id
  ''',
  incursor = None,
  label = True
  )

  for row in results:
    add_witholding_fields(employee_tax_info = row)
    columns = ','.join(row.keys())
    values = ','.join(map(str,row.values()))
    utils.execute('''INSERT into pay_stub (%s) VALUES (%s)'''%(columns, values))
Exemplo n.º 7
0
 def uninstall(self, hostname):
     """
     Portal uninstall process
     """
     utils.execute("apt-get -y --purge remove openjdk-7-jdk tomcat7", check_exit_code=False)
     utils.execute("apt-get -y clean", check_exit_code=False)
     return
Exemplo n.º 8
0
def check_postgresql_server():
    if System.system_name in ('ubuntu', 'debian',):
        command = [
            'sudo',
            'su',
            'postgres',
            '-c',
            'psql -tAc "select version();"'
        ]
        exit_code, output = utils.execute(command,capture_output=True)
        if exit_code:
            System.postgresql_version = None  # Not installed
        else:
            System.postgresql_version = output.split(' ')[1]

    elif System.system_name == 'darwin':
        command = [
            'psql',
            '-tAc',
            'select version();'
        ]
        exit_code, output = utils.execute(command,capture_output=True)
        if exit_code:
            System.postgresql_version = None  # Not installed
        else:
            System.postgresql_version = output.split(' ')[1]  # No need to remove \n
    else:
        raise NotImplementedError()
Exemplo n.º 9
0
def index(serverpin, in_):
  wantsin = (in_ == 'true')
  wantsout = not wantsin

  cursor = utils.get_cursor()

  isin = _server_is_in(serverpin)
  isout = not isin

  if wantsin and isin:
    resp = 'already clocked in'
  elif wantsin and isout:
    tip_share = server_tip_share(serverpin)
    sqlin = 'INSERT INTO hours VALUES(null, %(serverpin)s, NOW(), 0, %(tip_share)s, null)' % locals()
    utils.execute(sqlin, cursor)
    resp = 'Clocked in at ' + utils.now()
  elif wantsout and isin:
    sqlout = 'UPDATE hours SET outtime = NOW() WHERE person_id = %(serverpin)s AND outtime = 0' % locals()
    res = utils.execute(sqlout, cursor)
    resp = 'Clocked out at ' + utils.now()
  elif wantsout and isout:
    resp = 'already clocked out'
  else:
    resp = 'programming error'
    

  cursor.close()

  return json.dumps(resp)
Exemplo n.º 10
0
def node_cmd(host, host_uuid, task, cmd, args, opts):
    """
    Runs command via ssh if host is not local
    """
    localdir = is_host_local(host_uuid)

    # this is so to avoid deleting the ssh keys on local node which otherwise
    # cause ssh password prompts on the console (race conditions)
    # mode_delete() should be cleaning up the session tree
    if localdir and task == "delete":
        return

    pem_key_path = get_pem_key_path(args.session, args.volume)

    if not localdir:
        # prefix with ssh command if not local node
        cmd = ["ssh",
               "-i", pem_key_path,
               "root@%s" % host] + cmd

    execute(cmd, exit_msg="%s - %s failed" % (host, task), logger=logger)

    if opts.get("copy_outfile", False):
        cmd_copy = ["scp",
                    "-i", pem_key_path,
                    "root@%s:/%s" % (host, opts.get("node_outfile")),
                    os.path.dirname(opts.get("node_outfile"))]
        execute(cmd_copy, exit_msg="%s - Copy command failed" % host,
                logger=logger)
Exemplo n.º 11
0
def index(req, receipts_id, field_name, new_value):
  
  utils.execute(
  '''update receipts_by_server set %(field_name)s = '%(new_value)s' where id = %(receipts_id)s;'''%locals()
  )

  return json.dumps(None)
Exemplo n.º 12
0
def _ensure_project_folder(project_id):
    if not os.path.exists(ca_path(project_id)):
        start = os.getcwd()
        os.chdir(ca_folder())
        utils.execute('sh', 'geninter.sh', project_id,
                      _project_cert_subject(project_id))
        os.chdir(start)
 def moveToNextStep(self):
     if self.currentStep == len(self.lesson.steps):
         dlg = LessonFinishedDialog(self.lesson)
         dlg.exec_()
         if dlg.nextLesson is not None:
             self.init(dlg.nextLesson)
         else:
             self.finishLesson()
     else:
         step = self.lesson.steps[self.currentStep]
         if step.endsignal is not None:
             step.endsignal.connect(self.endSignalEmitted)
         item = self.listSteps.item(self.currentStep)
         item.setBackground(Qt.green)
         if os.path.exists(step.description):
             with open(step.description) as f:
                     html = "".join(f.readlines())
             self.webView.document().setMetaInformation(QTextDocument.DocumentUrl,
                                                        QUrl.fromUserInput(step.description).toString())
             self.webView.setHtml(html)
         else:
             self.webView.setHtml(step.description)
         QCoreApplication.processEvents()
         if step.prestep is not None:
             execute(step.prestep)
         if step.function is not None:
             self.btnRunStep.setEnabled(step.steptype != Step.AUTOMATEDSTEP)
             self.btnMove.setEnabled(step.steptype != Step.AUTOMATEDSTEP and step.endsignal is None)
             if step.steptype == Step.AUTOMATEDSTEP:
                 self.runCurrentStepFunction()
         else:
             self.btnRunStep.setEnabled(False)
             self.btnMove.setEnabled(step.endsignal is None)
Exemplo n.º 14
0
	def delete_net(self,Id):
		if Id:
			utils.execute("rm -f /var/run/netns/%s" % ("nets-"+Id))
		if os.path.isdir("/sys/class/net/%s" % ("t-"+Id)):
			utils.execute("ip link del %s" % ("t-"+Id))
		else:
			pass
Exemplo n.º 15
0
def update(req, edits, newrows):
  edits = json.loads(edits)
  newrows = json.loads(newrows)
  insert_ids = {}
  cursor = utils.get_cursor()

  for rowid, fields_and_vals in edits.items():
    setlist = ','.join('%s = %s'%(f, sql_representation(v)) for f, v in fields_and_vals.items() if f != 'estimated_units_remaining')
    sql = "update sku set " + setlist + " where id = " + rowid + "\n"
    utils.execute(sql, cursor)
  for rowid, fields_and_vals in newrows.items():
    for bad_field in ('uid', 'undefined', 'estimated_units_remaining', 'boundindex', 'visibleindex', 'uniqueid'):
      if fields_and_vals.has_key(bad_field): fields_and_vals.pop(bad_field)

    fields = fields_and_vals.keys()
    values = fields_and_vals.values()
    field_list = ','.join(fields)
    value_list = ','.join(sql_representation(v) for v in values)
    sql = "insert into sku ("+field_list+") VALUES ("+value_list+")"
    utils.execute(sql, cursor)
    insert_ids[rowid] = utils.select("select LAST_INSERT_ID()", cursor, False)[0][0]

  cursor.close ()

  wineprint.gen_fodt_and_pdf()

  return json.dumps(insert_ids)
Exemplo n.º 16
0
def populate_pay_stub(temp = True, incursor=None):

  #days_of_tips_calculated = utils.select(
  #  '''select count(distinct date(intime)) from hours 
  #  where yearweek(intime) = yearweek(now() - interval '1' week) and tip_pay is not null''',
  #  label = False
  #  )[0][0]
  #if days_of_tips_calculated != 7:
  #  return 'Tips have been calculated for only %s days last week. When all days tips are calculated, refresh this page to see and print weekly pay for last week.'%days_of_tips_calculated

  results = utils.select('''
  select
  DATE(intime) - interval (DAYOFWEEK(intime) -1) DAY as week_of,
  hours_worked.person_id,
  last_name, first_name,
  sum(hours_worked) as hours_worked,
  pay_rate, 
  COALESCE(allowances, 0) allowances,
  COALESCE(nominal_scale, 0) nominal_scale,
  COALESCE(married, 0) married,
  COALESCE(salary, 0) + COALESCE(round(sum(hours_worked)*pay_rate), 0) as weekly_pay,
  COALESCE(salary, 0) + COALESCE(round(sum(hours_worked)*pay_rate), 0) * COALESCE(nominal_scale,0) as gross_wages,
  COALESCE(sum(tip_pay),0) tips,
  COALESCE(sum(tip_pay) / sum(hours_worked) + pay_rate, 0) as total_hourly_pay
  from hours_worked LEFT OUTER JOIN employee_tax_info ON hours_worked.person_id = employee_tax_info.person_id
  where yearweek(intime) = yearweek(now() - interval '1' week)
  and intime != 0
  group by hours_worked.person_id, yearweek(intime)
  ''',
  incursor = incursor,
  label = True
  )

  if temp:
    utils.execute('''
    create temporary table PAY_STUB_TEMP like PAY_STUB;
    ''', incursor=incursor);
    table_names = ('PAY_STUB_TEMP',)
  else:
    table_names = ('PAY_STUB', 'WEEKLY_PAY_STUB')

  for row in results:
    if not temp and utils.select(
      'select 1 from PAY_STUB where week_of = "%(week_of)s" and person_id = %(person_id)s'%row,
      incursor = incursor
      ):
      continue

    for table_name in table_names:
      if table_name == 'WEEKLY_PAY_STUB':
        row['gross_wages'] = row['weekly_pay'] + float(row['tips'])
        row['pay_rate'] = round(row['total_hourly_pay'])

      tax.add_witholding_fields(row)
      columns = ', '.join(row.keys())
      values = ', '.join(("'%s'" % value for value in row.values()))
      sqltext = 'INSERT into %s (%s) VALUES (%s);'%(table_name, columns, values)
      my_logger.debug('pay stub: ' + sqltext)
      utils.execute(sqltext, incursor=incursor)
Exemplo n.º 17
0
def abyss_bloom(input_fqs, outputs):
    fq1, fq2 = input_fqs
    for k_mer_size, bf, bf_flag in zip(K_MER_SIZES, BFS, BF_FLAGS):
        cmd = CONFIG['abyss_bloom']['cmd'].format(**locals())
        # cmd = ('abyss-bloom build -v -k {k_mer_size} -j 8 -b 3G -l 2 -q 15 - '
        #        '{fq1} {fq2} '
        #        '| gzip -c > {bf}'.format(**locals()))
        execute(cmd, flag=bf_flag)
Exemplo n.º 18
0
def cleanup():
	print("\n***** Cleaning up *****")
	if volume_mounted:
		utils.execute("umount " + mount_point)
	if volume_created:
		vmcreate.detach_and_delete_volume(volume)
	if mount_point_created:
		utils.execute("rm -rf " + mount_point)
Exemplo n.º 19
0
 def uninstall(self, hostname):
     """
     RabbitMQ uninstall process
     """
     utils.execute("apt-get -y --purge remove rabbitmq-server memcached python-memcache", check_exit_code=False)
     utils.execute("apt-get -y clean", check_exit_code=False)
     shutil.rmtree('/var/lib/rabbitmq', ignore_errors=True)
     return
Exemplo n.º 20
0
def _ensure_project_folder(project_id):
    if not os.path.exists(ca_path(project_id)):
        geninter_sh_path = os.path.join(FLAGS.ca_path,
                                        'geninter.sh')
        start = os.getcwd()
        os.chdir(ca_folder())
        utils.execute('sh', geninter_sh_path, project_id,
                      _project_cert_subject(project_id))
        os.chdir(start)
Exemplo n.º 21
0
	def __init__(self,url):
		self.connection=docker.Client(base_url=url)
		if os.path.isdir("/etc/config"):
			pass
		else:
			utils.execute("mkdir /etc/config")
		self.path="/etc/config/"
		self.netpath="/etc/network/"
		self.net=network.network()
Exemplo n.º 22
0
 def uninstall(self, hostname):
     """
     Keystone uninstall process
     """
     utils.execute("apt-get -y --purge remove keystone python-keystone python-keystoneclient python-mysqldb",
         check_exit_code=False)
     utils.execute("apt-get -y clean", check_exit_code=False)
     shutil.rmtree('/var/lib/keystone', ignore_errors=True)
     return
Exemplo n.º 23
0
def revoke_cert(project_id, file_name):
    """Revoke a cert by file name"""
    start = os.getcwd()
    os.chdir(ca_folder(project_id))
    # NOTE(vish): potential race condition here
    utils.execute("openssl ca -config ./openssl.cnf -revoke '%s'" % file_name)
    utils.execute("openssl ca -gencrl -config ./openssl.cnf -out '%s'" %
                  FLAGS.crl_file)
    os.chdir(start)
Exemplo n.º 24
0
 def uninstall(self, hostname):
     """
     RabbitMQ uninstall process
     """
     utils.execute("apt-get -y --purge remove collectd-core xymon-client", check_exit_code=False)
     utils.execute("apt-get -y clean", check_exit_code=False)
     shutil.rmtree('/var/lib/collectd/rrd', ignore_errors=True)
     shutil.rmtree('/var/www/rrd', ignore_errors=True)
     return
Exemplo n.º 25
0
def delivered(req, item_id):
    my_logger.info(req.get_remote_host() + ': delivered on ' + str(item_id))

    utils.execute('''
      UPDATE order_item oi
      set oi.is_delivered = NOT oi.is_delivered, oi.is_held = FALSE, oi.updated = NOW()
      where oi.id = %(item_id)s
    ''' % locals())

    return json.dumps(None);
Exemplo n.º 26
0
Arquivo: vm.py Projeto: AmesianX/chef
    def import_raw(self, raw: str, force: bool):
        if not os.path.exists(raw):
            utils.fail("%s: file not found" % raw)
            exit(1)

        self.initialise(force)

        utils.pend("copy disk image")
        utils.execute(['cp', raw, self.path_raw])
        utils.ok()
Exemplo n.º 27
0
 def _removeRepos(self):
     (stdout, stderr) = utils.execute('sed -i /precise-updates/d /etc/apt/sources.list')
     if len(stderr) > 0: return stderr
     (stdout, stderr) = utils.execute('sed -i /precise-security/d /etc/apt/sources.list')
     if len(stderr) > 0: return stderr
     (stdout, stderr) = utils.execute('sed -i /archive.ubuntu.com/d /etc/apt/sources.list')
     if len(stderr) > 0: return stderr
     (stdout, stderr) = utils.execute('rm /etc/apt/sources.list.d/stackops.list', check_exit_code=False)
     (stdout, stderr) = utils.execute('apt-get -y update', check_exit_code=False)
     return ''
Exemplo n.º 28
0
Arquivo: vm.py Projeto: AmesianX/chef
    def create(self, size: str, force: bool, **kwargs: dict):
        self.initialise(force)

        utils.pend("create %s%sB image"
                   % (size, ('i', '')[size[-1] in '0123456789']))
        utils.execute(['%s/qemu-img' % self.path_executable,
                       'create', '-f', 'raw', self.path_raw, size],
                      msg="execute qemu-img")
        self.size = size
        utils.ok()
Exemplo n.º 29
0
def patch_hybridcloud_files():
    """Execute a shell script, do this things:
    1. replace python code
    2. update configuration files
    3. install some dependence packages
    4. restart component proc
    """

    utils.execute(["dos2unix", os.path.join(CURRENT_PATH, "install.sh")])
    utils.execute(["sh", os.path.join(CURRENT_PATH, "install.sh")])
Exemplo n.º 30
0
def revoke_cert(project_id, file_name):
    """Revoke a cert by file name."""
    start = os.getcwd()
    os.chdir(ca_folder(project_id))
    # NOTE(vish): potential race condition here
    utils.execute('openssl', 'ca', '-config', './openssl.cnf', '-revoke',
                  file_name)
    utils.execute('openssl', 'ca', '-gencrl', '-config', './openssl.cnf',
                  '-out', FLAGS.crl_file)
    os.chdir(start)
Exemplo n.º 31
0
def main(args):
    print("IREE handy-dandy-LLVM-submodule-updater at your service...")
    print(f"  IREE Path: {args.repo}")
    print(f"  LLVM Path: {args.llvm_path}")
    print(f"  LLVM Bazel Path: {args.llvm_bazel_path}")
    print(f"  TensorFlow Path: {args.tensorflow_path}")
    print(f"  MLIR-HLO Path: {args.tensorflow_path}")

    current_llvm_commit = parse_rev(args.llvm_path, "HEAD")
    current_llvm_bazel_commit = parse_rev(args.llvm_bazel_path, "HEAD")
    current_tf_commit = parse_rev(args.tensorflow_path, "HEAD")
    current_mlir_hlo_commit = parse_rev(args.mlir_hlo_path, "HEAD")
    print("Current Commits:")
    print(f"  llvm = {current_llvm_commit}")
    print(f"  llvm_bazel = {current_llvm_bazel_commit}")
    print(f"  tensorflow = {current_tf_commit}")
    print(f"  mlir-hlo = {current_mlir_hlo_commit}")

    # Update LLVM-Bazel
    new_llvm_bazel_commit = find_new_llvm_bazel_commit(args.llvm_bazel_path,
                                                       current_llvm_commit,
                                                       args.llvm_bazel_rev)
    print(f"\n*** Updating LLVM Bazel to {new_llvm_bazel_commit} ***")
    utils.execute(["git", "checkout", new_llvm_bazel_commit],
                  cwd=args.llvm_bazel_path)
    stage_path(args.repo, args.llvm_bazel_path)

    validate_llvm_bazel_commit(current_llvm_commit,
                               args.llvm_bazel_path,
                               exit_on_failure=args.validate)

    # Update TensorFlow
    new_tf_commit = find_new_commit_from_version_file(
        args.tensorflow_path, TF_LLVM_WORKSPACE_FILEPATH, current_llvm_commit,
        args.tensorflow_rev)
    print("\n*** Updating TensorFlow to", new_tf_commit, "***")
    utils.execute(["git", "checkout", new_tf_commit], cwd=args.tensorflow_path)
    stage_path(args.repo, args.tensorflow_path)

    validate_tf_commit(current_llvm_commit,
                       args.tensorflow_path,
                       exit_on_failure=args.validate)

    # Update MLIR-HLO
    new_mlir_hlo_commit = find_new_commit_from_version_file(
        args.mlir_hlo_path, MLIR_HLO_LLVM_VERSION_FILEPATH,
        current_llvm_commit, args.mlir_hlo_rev)
    print("\n*** Updating MLIR-HLO to", new_mlir_hlo_commit, "***")
    utils.execute(["git", "checkout", new_mlir_hlo_commit],
                  cwd=args.mlir_hlo_path)
    stage_path(args.repo, args.mlir_hlo_path)

    validate_mlir_hlo_commit(current_llvm_commit,
                             args.mlir_hlo_path,
                             exit_on_failure=args.validate)

    # Export SUBMODULE_VERSIONS.txt.
    print()  # Add line break.
    submodule_versions.export_versions(args.repo)
Exemplo n.º 32
0
def import_versions(repo_dir):
    print("*** Importing versions to git submodule state")
    diff_versions = get_diff_versions(repo_dir)
    if not diff_versions:
        print("*** No submodule updates required")
        return
    for path, (current, written) in diff_versions.items():
        if current is None:
            print(("Warning: Submodule %s does not exist but is "
                   "still in the version file") % (path, ))
            continue
        if written is None:
            print("Warning: Submodule %s is not in the version file" %
                  (current, ))
            continue
        # Directly update the submodule commit hash in the index.
        # See: https://stackoverflow.com/questions/33514642
        command = [
            "git", "update-index", "--cacheinfo", "160000", written, path
        ]
        print("Updating", path, "to", written)
        utils.execute(command, cwd=repo_dir)
Exemplo n.º 33
0
def generate_vpn_files(project_id):
    project_folder = ca_folder(project_id)
    csr_fn = os.path.join(project_folder, 'server.csr')
    crt_fn = os.path.join(project_folder, 'server.crt')

    genvpn_sh_path = os.path.join(os.path.dirname(__file__),
                                  'CA',
                                  'genvpn.sh')
    if os.path.exists(crt_fn):
        return
    _ensure_project_folder(project_id)
    start = os.getcwd()
    os.chdir(ca_folder())
    # TODO(vish): the shell scripts could all be done in python
    utils.execute('sh', genvpn_sh_path,
                  project_id, _vpn_cert_subject(project_id))
    with open(csr_fn, 'r') as csrfile:
        csr_text = csrfile.read()
    (serial, signed_csr) = sign_csr(csr_text, project_id)
    with open(crt_fn, 'w') as crtfile:
        crtfile.write(signed_csr)
    os.chdir(start)
Exemplo n.º 34
0
def generate_key_pair(bits=1024):
    # what is the magic 65537?

    tmpdir = tempfile.mkdtemp()
    keyfile = os.path.join(tmpdir, 'temp')
    utils.execute('ssh-keygen', '-q', '-b', bits, '-N', '', '-t', 'rsa', '-f',
                  keyfile)
    fingerprint = generate_fingerprint('%s.pub' % (keyfile))
    private_key = open(keyfile).read()
    public_key = open(keyfile + '.pub').read()

    shutil.rmtree(tmpdir)
    # code below returns public key in pem format
    # key = M2Crypto.RSA.gen_key(bits, 65537, callback=lambda: None)
    # private_key = key.as_pem(cipher=None)
    # bio = M2Crypto.BIO.MemoryBuffer()
    # key.save_pub_key_bio(bio)
    # public_key = bio.read()
    # public_key, err = execute('ssh-keygen', '-y', '-f',
    #                           '/dev/stdin', private_key)

    return (private_key, public_key, fingerprint)
Exemplo n.º 35
0
 def download(self, src, dst):
     cmd = [
         self.util,
         "--config-file",
         self.conf_file,
         "cp",
         "-f",
         f"{self.path}{src}",
         dst,
     ]
     ret, _ = utils.execute(cmd, print_cmd=True)
     if ret:
         logging.info("Download %s ", src)
Exemplo n.º 36
0
def get_submodule_versions(repo_dir):
    raw_status = utils.execute(["git", "submodule", "status"],
                               cwd=repo_dir,
                               silent=True,
                               capture_output=True).decode("UTF-8")
    status_lines = []
    for line in raw_status.splitlines():
        # Format is a status char followed by revision, space and path.
        m = re.match(r"""^.([0-9a-z]+)\s+([^\s]+)""", line)
        if m:
            # Output as just the commit hash followed by space and path.
            status_lines.append(m.group(1) + " " + m.group(2))
    return "\n".join(status_lines) + "\n"
Exemplo n.º 37
0
def mode_query(session_dir, args):
    # Verify volume status
    cmd = ["gluster", 'volume', 'info', args.volume, "--xml"]
    _, data, _ = execute(cmd,
                         exit_msg="Failed to Run Gluster Volume Info",
                         logger=logger)
    try:
        tree = etree.fromstring(data)
        statusStr = tree.find('volInfo/volumes/volume/statusStr').text
    except (ParseError, AttributeError) as e:
        fail("Invalid Volume: %s" % e, logger=logger)

    if statusStr != "Started":
        fail("Volume %s is not online" % args.volume, logger=logger)

    mkdirp(session_dir, exit_on_err=True, logger=logger)
    mkdirp(os.path.join(session_dir, args.volume),
           exit_on_err=True,
           logger=logger)
    mkdirp(os.path.dirname(args.outfile), exit_on_err=True, logger=logger)

    # Configure cluster for pasword-less SSH
    ssh_setup(args)

    # Enable volume options for changelog capture
    enable_volume_options(args)

    # Start query command processing
    if args.since_time:
        start = args.since_time
        logger.debug("Query is called - Session: %s, Volume: %s, "
                     "Start time: %s" % ("default", args.volume, start))

        run_cmd_nodes("query", args, start=start)

        # Merger
        # Read each Changelogs db and generate finaldb
        create_file(args.outfile, exit_on_err=True, logger=logger)
        outfilemerger = OutputMerger(args.outfile + ".db", node_outfiles)
        write_output(args, outfilemerger)

        try:
            os.remove(args.outfile + ".db")
        except (IOError, OSError):
            pass

        run_cmd_nodes("cleanup", args)

        sys.stdout.write("Generated output file %s\n" % args.outfile)
    else:
        fail("Please specify --since-time option")
Exemplo n.º 38
0
  def is_reproducible(self, testcase, target_path):
    """Checks if the testcase reproduces.

      Args:
        testcase: The path to the testcase to be tested.
        target_path: The path to the fuzz target to be tested

      Returns:
        True if crash is reproducible and we were able to run the
        binary.

      Raises:
        ReproduceError if we can't attempt to reproduce the crash.
    """

    if not os.path.exists(target_path):
      raise ReproduceError('Target %s not found.' % target_path)

    os.chmod(target_path, stat.S_IRWXO)

    target_dirname = os.path.dirname(target_path)
    command = ['docker', 'run', '--rm', '--privileged']
    container = utils.get_container_name()
    if container:
      command += [
          '--volumes-from', container, '-e', 'OUT=' + target_dirname, '-e',
          'TESTCASE=' + testcase
      ]
    else:
      command += [
          '-v',
          '%s:/out' % target_dirname, '-v',
          '%s:/testcase' % testcase
      ]

    command += [
        '-t', docker.BASE_RUNNER_TAG, 'reproduce', self.target_name, '-runs=100'
    ]

    logging.info('Running reproduce command: %s.', ' '.join(command))
    for _ in range(REPRODUCE_ATTEMPTS):
      _, _, returncode = utils.execute(command)
      if returncode != 0:
        logging.info('Reproduce command returned: %s. Reproducible on %s.',
                     returncode, target_path)

        return True

    logging.info('Reproduce command returned 0. Not reproducible on %s.',
                 target_path)
    return False
Exemplo n.º 39
0
def node_cmd(host, host_uuid, task, cmd, args, opts):
    """
    Runs command via ssh if host is not local
    """
    try:
        localdir = is_host_local(host_uuid)

        # this is so to avoid deleting the ssh keys on local node which
        # otherwise cause ssh password prompts on the console (race conditions)
        # mode_delete() should be cleaning up the session tree
        if localdir and task == "delete":
            return

        pem_key_path = get_pem_key_path(args.session, args.volume)

        if not localdir:
            # prefix with ssh command if not local node
            cmd = ["ssh",
                   "-oNumberOfPasswordPrompts=0",
                   "-oStrictHostKeyChecking=no",
                   "-t",
                   "-t",
                   "-i", pem_key_path,
                   "root@%s" % host] + cmd

        execute(cmd, exit_msg="%s - %s failed" % (host, task), logger=logger)

        if opts.get("copy_outfile", False) and not localdir:
            cmd_copy = ["scp",
                        "-oNumberOfPasswordPrompts=0",
                        "-oStrictHostKeyChecking=no",
                        "-i", pem_key_path,
                        "root@%s:/%s" % (host, opts.get("node_outfile")),
                        os.path.dirname(opts.get("node_outfile"))]
            execute(cmd_copy, exit_msg="%s - Copy command failed" % host,
                    logger=logger)
    except KeyboardInterrupt:
        sys.exit(2)
Exemplo n.º 40
0
def main(args):
    print("IREE handy-dandy-LLVM-submodule-updater at your service...")
    print("  IREE Path :", args.repo)
    print("  LLVM Path :", args.llvm)
    print("  TensorFlow Path :", args.tensorflow)
    print("  LLVM Bazel Path :", args.llvm_bazel)
    current_llvm_commit = get_commit(args.llvm)
    current_tensorflow_commit = get_commit(args.tensorflow)

    print("Current Commits: llvm =", current_llvm_commit, "tensorflow =",
          current_tensorflow_commit)

    # Update TensorFlow
    new_tf_commit = find_new_tf_commit(args.tensorflow, current_llvm_commit,
                                       args.tensorflow_commit)
    print("\n*** Updating TensorFlow to", new_tf_commit, "***")
    utils.execute(["git", "checkout", new_tf_commit], cwd=args.tensorflow)
    stage_path(args.repo, args.tensorflow)

    validate_tf_commit(current_llvm_commit,
                       args.tensorflow,
                       exit_on_failure=args.validate)

    new_llvm_bazel_commit = find_new_llvm_bazel_commit(args.llvm_bazel,
                                                       current_llvm_commit,
                                                       args.llvm_bazel_commit)
    print("\n*** Updating LLVM Bazel to", new_llvm_bazel_commit, "***")
    utils.execute(["git", "checkout", new_llvm_bazel_commit],
                  cwd=args.llvm_bazel)
    stage_path(args.repo, args.llvm_bazel)

    validate_llvm_bazel_commit(current_llvm_commit,
                               args.llvm_bazel,
                               exit_on_failure=args.validate)

    # Export SUBMODULE_VERSIONS.
    print()  # Add line break.
    submodule_versions.export_versions(args.repo)
Exemplo n.º 41
0
def run(min_zoom, max_zoom, bbox_code, quality, only_tiles):
    bbox_code = bbox_code.upper()
    if not hasattr(bbox_cities, bbox_code):
        print('{} bbox code not found in bbox_cities.py'.format(minus))
        sys.exit(-1)

    run_checks()

    if not os.path.isdir(TMP_DIR):
        os.mkdir(TMP_DIR)
    if not os.path.isdir(BUILD_DIR):
        os.mkdir(BUILD_DIR)

    compile_osm_styles(getattr(bbox_cities, bbox_code), min_zoom, max_zoom)

    filename = hashlib.md5('{}{}{}{}{}'.format(bbox_code,
                                               min_zoom, max_zoom, quality,
                                               datetime.now())).hexdigest()

    if not only_tiles:
        make_initial_gpkg()

    render_tiles(getattr(bbox_cities, bbox_code),
                 '{}/osm.xml'.format(STYLES_DIR),
                 TILES_DIR,
                 min_zoom,
                 max_zoom,
                 tms_scheme=True)
    print('{} tiles created'.format(plus))

    make_gpkg_from_tiles(quality, filename)

    if not only_tiles:
        execute('ogr2ogr', '-f', 'GPKG', 'tmp/out.{}.gpkg'.format(filename),
                'tmp/initial.gpkg', '-update', '-progress')

    shutil.copy('tmp/out.{}.gpkg'.format(filename), BUILD_DIR)
    print('{} Final gpkg moved to build dir'.format(plus))
Exemplo n.º 42
0
    def _clone(self):
        """Creates a clone of the repo in the specified directory.

      Raises:
        ValueError: when the repo is not able to be cloned.
    """
        if not os.path.exists(self.base_dir):
            os.makedirs(self.base_dir)
        self.remove_repo()
        out, _, _ = utils.execute(
            ['git', 'clone', self.repo_url, self.repo_name],
            location=self.base_dir)
        if not self._is_git_repo():
            raise ValueError('%s is not a git repo' % self.repo_url)
Exemplo n.º 43
0
    def checkout_commit(self, commit, clean=True):
        """Checks out a specific commit from the repo.

    Args:
      commit: The commit SHA to be checked out.

    Raises:
      RuntimeError: when checkout is not successful.
      ValueError: when commit does not exist.
    """
        self.fetch_unshallow()
        if not self.commit_exists(commit):
            raise ValueError('Commit %s does not exist in current branch' %
                             commit)
        utils.execute(['git', 'checkout', '-f', commit],
                      self.repo_dir,
                      check_result=True)
        if clean:
            utils.execute(['git', 'clean', '-fxd'],
                          self.repo_dir,
                          check_result=True)
        if self.get_current_commit() != commit:
            raise RuntimeError('Error checking out commit %s' % commit)
Exemplo n.º 44
0
def run_nrpspredictor(seq_record, nrpsnames, nrpsseqs, options):
    #NRPSPredictor: extract AMP-binding + 120 residues N-terminal of this domain, extract 8 Angstrom residues and insert this into NRPSPredictor
    logging.getLogger('user_visible').info(
        "Predicting NRPS A domain substrate specificities by NRPSPredictor")
    with TemporaryDirectory(change=True):
        nrpsseqs_file = "nrpsseqs.fasta"

        NRPSPredictor2_dir = utils.get_full_path(__file__, "NRPSPredictor2")
        utils.writefasta(nrpsnames, nrpsseqs, nrpsseqs_file)
        #Get NRPSPredictor2 code predictions, output sig file for input for NRPSPredictor2 SVMs
        nrpscodepred.run_nrpscodepred(options)
        #Run NRPSPredictor2 SVM
        datadir = path.join(NRPSPredictor2_dir, 'data')
        libdir = path.join(NRPSPredictor2_dir, 'lib')
        jarfile = path.join(NRPSPredictor2_dir, 'build', 'NRPSpredictor2.jar')
        classpath = [
            jarfile,
            '%s/java-getopt-1.0.13.jar' % libdir,
            '%s/Utilities.jar' % libdir,
            '%s/libsvm.jar' % libdir
        ]
        if sys.platform == ("linux2") or sys.platform == ("darwin"):
            java_separator = ":"
        elif sys.platform == ("win32"):
            java_separator = ";"
        commands = [
            'java',
            '-Ddatadir=%s' % datadir, '-cp',
            java_separator.join(classpath),
            'org.roettig.NRPSpredictor2.NRPSpredictor2', '-i', 'input.sig',
            '-r',
            path.join(
                options.raw_predictions_outputfolder,
                "ctg" + str(options.record_idx) + '_nrpspredictor2_svm.txt'),
            '-s', '1', '-b', options.eukaryotic and '1' or '0'
        ]
        out, err, retcode = utils.execute(commands)
        if err != '':
            logging.debug('running nrpspredictor2 gave error %r' % err)
        #Copy NRPSPredictor results and move back to original directory
        try:
            os.remove(
                path.join(
                    options.raw_predictions_outputfolder, "ctg" +
                    str(options.record_idx) + "_nrpspredictor2_codes.txt"))
        except:
            pass
        shutil.move(
            "ctg" + str(options.record_idx) + "_nrpspredictor2_codes.txt",
            options.raw_predictions_outputfolder)
Exemplo n.º 45
0
def node_cmd(host, host_uuid, task, cmd, args, opts):
    """
    Runs command via ssh if host is not local
    """
    localdir = is_host_local(host_uuid)

    pem_key_path = get_pem_key_path(args.session, args.volume)

    if not localdir:
        # prefix with ssh command if not local node
        cmd = ["ssh", "-i", pem_key_path, "root@%s" % host] + cmd

    execute(cmd, exit_msg="%s - %s failed" % (host, task), logger=logger)

    if opts.get("copy_outfile", False):
        cmd_copy = [
            "scp", "-i", pem_key_path,
            "root@%s:/%s" % (host, opts.get("node_outfile")),
            os.path.dirname(opts.get("node_outfile"))
        ]
        execute(cmd_copy,
                exit_msg="%s - Copy command failed" % host,
                logger=logger)
Exemplo n.º 46
0
 def remove_image_sync(self, repo):
     cmd = [
         "ctr",
         "-n",
         "k8s.io",
         "-a",
         self.__address,
         "images",
         "rm",
         repo,
         "--sync",
     ]
     ret, out = utils.execute(cmd)
     assert ret
Exemplo n.º 47
0
    def run(self, program_file, harness_file):
        executable = self.compile(program_file, harness_file)
        if executable:
            run_cmd = self._get_run_cmd(executable)
            run_result = utils.execute(run_cmd,
                                       quiet=True,
                                       err_to_output=False)

            if utils.found_err(run_result):
                return [FALSE]
            else:
                return [UNKNOWN]
        else:
            return [ERROR]
Exemplo n.º 48
0
    def get_remote_url(self):
        """Return remote url."""
        url = execute("git config --get remote.origin.url", self.dir)
        if url[0:8] == "https://":
            pass
        elif url[0:4] == "git@":
            # Remove git@ and .git in beginning/end and replace : with /
            url = url[4:-4]
            url = url.replace(":", "/")
            url = "https://" + url
        else:
            raise RuntimeWarning("No valid url found for remote origin.")

        return url
Exemplo n.º 49
0
    def execute(self, cmds, addl_env={}, check_exit_code=True):
        if not self._parent.root_helper:
            raise NameError("Sudo privilege is required to run this command.")
        ns_params = []
        if self._parent.namespace:
            ns_params = ['ip', 'netns', 'exec', self._parent.namespace]

        env_params = []
        if addl_env:
            env_params = (['env'] +
                          ['%s=%s' % pair for pair in addl_env.items()])
        return utils.execute(ns_params + env_params + list(cmds),
                             root_helper=self._parent.root_helper,
                             check_exit_code=check_exit_code)
Exemplo n.º 50
0
def disable_local_mesh_bridge(bridge):
    iface_list = netifaces.interfaces()
    if bridge in iface_list:
        utils.execute('ifconfig', bridge, '0.0.0.0')
        utils.execute('ip', 'link', 'set', bridge, 'down')

    filename = '/etc/network/interfaces.d/ifcfg-%s' % bridge
    if os.path.isfile(filename):
        utils.execute('rm', '-f', filename)
Exemplo n.º 51
0
    def run(self, program_file, test_case):
        import klee

        klee_prepared_file = utils.get_prepared_name(program_file, klee.name)
        c_version = 'gnu11'
        if not self.executable:
            compile_cmd = ['gcc']
            compile_cmd += [
                '-std={}'.format(c_version), "-L", klee.lib_dir,
                '-D__alias__(x)=', '-o', self.executable_name,
                klee_prepared_file, '-lkleeRuntest', '-lm'
            ]
            result = utils.execute(compile_cmd)
            if result.returncode != 0:
                c_version = 'gnu90'
                compile_cmd = ['gcc']
                compile_cmd += [
                    '-std={}'.format(c_version), "-L", klee.lib_dir,
                    '-D__alias__(x)=', '-o', self.executable_name,
                    klee_prepared_file, '-lkleeRuntest', '-lm'
                ]
            self.executable = self.executable_name

        if not os.path.exists(self.executable_name):
            return [ERROR]

        curr_env = utils.get_env()
        curr_env['KTEST_FILE'] = test_case.origin

        result = utils.execute([self.executable],
                               env=curr_env,
                               err_to_output=False)

        if utils.found_err(result):
            return [FALSE]
        else:
            return [UNKNOWN]
Exemplo n.º 52
0
  def commit_exists(self, commit):
    """Checks to see if a commit exists in the project repo.

    Args:
      commit: The commit SHA you are checking.

    Returns:
      True if the commit exits in the project.
    """
    if not commit.rstrip():
      return False

    _, _, err_code = utils.execute(['git', 'cat-file', '-e', commit],
                                   self.repo_dir)
    return not err_code
Exemplo n.º 53
0
def insert_purchase():
    def get_db_time():
        return """%d-%d-%d %d:%d""" % (TIME["year"], TIME["month"],
                                       TIME["day"], TIME["hour"],
                                       TIME["minute"])

    found_good_pair = False
    while not found_good_pair:
        pair = _get_pair_customer_product()
        if pair not in _customer_product_pairs:
            _customer_product_pairs.append(pair)
            found_good_pair = True
            break
    if not found_good_pair:
        return
    purchase_customer_id = pair[0]
    purchase_product_id = pair[1]

    utils.execute(
        """INSERT INTO purchase (date_time, customer_id, product_id) VALUES ('{DB_TIME}', {PURCHASE_CUSTOMER_ID}, {PURCHASE_PRODUCT_ID});"""
        .format(DB_TIME=get_db_time(),
                PURCHASE_CUSTOMER_ID=purchase_customer_id,
                PURCHASE_PRODUCT_ID=purchase_product_id))
    advance_time()
Exemplo n.º 54
0
def check_postgresql_server():
    if System.system_name in (
            'ubuntu',
            'debian',
    ):
        command = [
            'sudo', 'su', 'postgres', '-c', 'psql -tAc "select version();"'
        ]
        exit_code, output = utils.execute(command, capture_output=True)
        if exit_code:
            System.postgresql_version = None  # Not installed
        else:
            System.postgresql_version = output.split(' ')[1]

    elif System.system_name == 'darwin':
        command = ['psql', '-tAc', 'select version();']
        exit_code, output = utils.execute(command, capture_output=True)
        if exit_code:
            System.postgresql_version = None  # Not installed
        else:
            System.postgresql_version = output.split(' ')[
                1]  # No need to remove \n
    else:
        raise NotImplementedError()
Exemplo n.º 55
0
def mod_novnc():
    astute = utils.get_astute()
    if astute:
        filename = '/etc/nova/nova.conf'
        orig_filename = filename + ".orig"
        if not os.path.exists(orig_filename):
            shutil.copyfile(filename, orig_filename)
        cf = ConfigParser.ConfigParser()
        try:
            cf.read(orig_filename)
            if not cf.has_section('cache'):
                cf.add_section('cache')
            cf.set('cache', 'enable', 'True')
            memcached_servers = cf.get('keystone_authtoken',
                                       'memcached_servers')
            cf.set('cache', 'memcached_servers', memcached_servers)
            cf.set('DEFAULT', 'memcached_servers', memcached_servers)
            with open(filename, 'w') as configfile:
                cf.write(configfile)
            LOG.info('%s created' % filename)
            utils.execute('service', 'nova-novncproxy', 'restart')
            utils.execute('service', 'nova-consoleauth', 'restart')
        except Exception:
            utils.reportError('Cannot set configurations to %s' % filename)
Exemplo n.º 56
0
 def upload(self, src, dst, force=False):
     if not self.stat(dst) or force:
         cmd = [
             self.util,
             "--config-file",
             self.conf_file,
             "-f",
             "cp",
             src,
             f"{self.path}{dst}",
         ]
         ret, _ = utils.execute(cmd, print_output=True)
         assert ret
         if ret:
             logging.info("Object %s is uploaded", dst)
Exemplo n.º 57
0
    def ip(self, internal=True):
        if internal and self._internal_ip: return self._internal_ip
        if not internal and self._external_ip: return self._external_ip

        command = 'gcloud compute instances list --filter="name={}" ' \
            '--format "get(networkInterfaces[0].{})"'.format(self.id(),
                'networkIP' if internal else 'accessConfigs[0].natIP')
        desc = '{} {} IP address'.format(
            'internal' if internal else 'external', self.id())

        ip = utils.execute(command, desc)()

        if internal: self._internal_ip = ip
        else: self._external_ip = ip

        return ip
Exemplo n.º 58
0
    def copy_files(self, dst_dirname):
        copy_file = lambda f: \
            utils.execute('gcloud compute scp {}:epaxos/{} {} --zone {}'.format(
                self.id(),
                f,
                path.join(dst_dirname, f),
                self.zone()
            ), '{}: copying metrics files'.format(self.id()))

        processes = list(map(copy_file, self.metrics_filenames()))

        def handler():
            for p in processes:
                p()

        return handler
Exemplo n.º 59
0
  def get_git_diff(self):
    """Gets a list of files that have changed from the repo head.

    Returns:
      A list of changed file paths or None on Error.
    """
    self.fetch_unshallow()
    out, err_msg, err_code = utils.execute(
        ['git', 'diff', '--name-only', 'origin...'], self.repo_dir)
    if err_code:
      logging.error('Git diff failed with error message %s.', err_msg)
      return None
    if not out:
      logging.error('No diff was found.')
      return None
    return [line for line in out.splitlines() if line]
Exemplo n.º 60
0
def run(args):
    """ perform log subcommand """

    try:
        cmd = utils.kubectl_cmd(args) + [
            "exec", "-nkadalu", "kadalu-csi-provisioner-0", "-c",
            "kadalu-provisioner", "--", "/kadalu/heal-info.sh"
        ]
        resp = utils.execute(cmd)
        print(resp.stdout)
        print()

    except utils.CommandError as err:
        utils.command_error(cmd, err.stderr)
    except FileNotFoundError:
        utils.kubectl_cmd_help(args.kubectl_cmd)