Example #1
0
	def post(self):
		username = self.request.get('quotationrEmail')

		user = self.user_model.get_by_auth_id(username)
		if not user:
			logging.info('Could not find any user entry for username %s', username)
			self.response.out.write('fail:::cant find email')
			return

		user_id = user.get_id()
		token = self.user_model.create_signup_token(user_id)

		verification_url = self.uri_for('verification', type='p', user_id=user_id,
			signup_token=token, _full=True)

		
		logging.error(verification_url)
		
		mail.send_mail(sender="Quotationr <jason@quotationr.com>",
                to=user.email_address,
                subject="Reset Your Quotationr Password",
                body="Please click the following link to reset your Quotationr password:\n\n " + verification_url)
		
		#self.response.out.write('success:::' + user.email_address)
		self.response.out.write('success:::email sent')
  def PublishManifest(self, manifest, version, build_id=None):
    """Publishes the manifest as the manifest for the version to others.

    Args:
      manifest: Path to manifest file to publish.
      version: Manifest version string, e.g. 6102.0.0-rc4
      build_id: Optional integer giving build_id of the build that is
                publishing this manifest. If specified and non-negative,
                build_id will be included in the commit message.
    """
    # Note: This commit message is used by master.cfg for figuring out when to
    #       trigger slave builders.
    commit_message = 'Automatic: Start %s %s %s' % (self.build_names[0],
                                                    self.branch, version)
    if build_id is not None and build_id >= 0:
      commit_message += '\nCrOS-Build-Id: %s' % build_id

    logging.info('Publishing build spec for: %s', version)
    logging.info('Publishing with commit message: %s', commit_message)
    logging.debug('Manifest contents below.\n%s', osutils.ReadFile(manifest))

    # Copy the manifest into the manifest repository.
    spec_file = '%s.xml' % os.path.join(self.all_specs_dir, version)
    osutils.SafeMakedirs(os.path.dirname(spec_file))

    shutil.copyfile(manifest, spec_file)

    # Actually push the manifest.
    self.PushSpecChanges(commit_message)
Example #3
0
    def Adjustment(self):
        """ adjustment & and blunder removing

            :returns: adjusted coordinates or None
        """
        # adjustment loop
        last_res = None
        while True:
            res, blunder = self.g.adjust()
            if res is None or not 'east' in res[0] or not 'north' in res[0] or \
                              not 'elev' in res[0]:
                # adjustment faild or too many blunders
                if not last_res is None:
                    logging.warning("blunders are not fully removed")
                    res = last_res
                else:
                    logging.error("adjustment failed")
                break
            elif blunder['std-residual'] < 1.0:
                logging.info("blunders removed")
                break
            else:
                logging.info("%s - %s observation removed" % (blunder['from'], blunder['to']))
                self.g.remove_observation(blunder['from'], blunder['to'])
                last_res = res
        return res
Example #4
0
def _work_reg():
  _task_lock.acquire()
  jid = _task_seq.next()
  _task_list[jid] = False
  logging.info(str(_task_list))
  _task_lock.release()
  return jid
Example #5
0
  def test_restart(self):
    """test_restart tests that when starting a second vttablet with the same
    configuration as another one, it will kill the previous process
    and take over listening on the socket.

    If vttablet listens to other ports (like gRPC), this feature will
    break. We believe it is not widely used, so we're OK with this for now.
    (container based installations usually handle tablet restarts
    by using a different set of servers, and do not rely on this feature
    at all).
    """
    if environment.topo_server().flavor() != 'zookeeper':
      logging.info("Skipping this test in non-github tree")
      return
    if tablet_62344.grpc_enabled():
      logging.info("Skipping this test as second gRPC port interferes")
      return

    utils.run_vtctl(['CreateKeyspace', 'test_keyspace'])

    # create the database so vttablets start, as it is serving
    tablet_62344.create_db('vt_test_keyspace')

    tablet_62344.init_tablet('master', 'test_keyspace', '0')
    proc1 = tablet_62344.start_vttablet()
    proc2 = tablet_62344.start_vttablet()
    for timeout in xrange(20):
      logging.debug("Sleeping waiting for first process to die")
      time.sleep(1.0)
      proc1.poll()
      if proc1.returncode is not None:
        break
    if proc1.returncode is None:
      self.fail("proc1 still running")
    tablet_62344.kill_vttablet()
    def ModifyMemory(self, request, context):
        logging.info("__INIT__ModifyMemory[VappVmServicer]")
        vapp_vm = VappVm(context)
        res = vapp_vm.modify_memory(request)
        logging.info("__DONE__ModifyMemory[VappVmServicer]")

        return res
Example #7
0
 def execute(self, email_models):
     logging.debug("In Destiny::execute()")
     if not email_models:
         return
     emails_id = []
     destinations = {}
     for destiny in self._plugins.keys():
         destinations.setdefault(destiny, email_models[-1].get(destiny))
         emails_id.append(email_models[-1].email_id())
     for email_model in email_models[:-1]:
         for destiny in self._plugins.keys():
             d_tables = destinations.get(destiny).get("tables")
             for d_table in d_tables:
                 for k, v in d_table.iteritems():
                     m_tables = email_model.get(destiny).get("tables")
                     for m_table in m_tables:
                         if k in m_table:
                             d_table.setdefault(k, []).extend(m_table[k])
         emails_id.append(email_model.email_id())
     for destiny, models in destinations.iteritems():
         for forward in self._plugins.get(destiny):
             try:
                 forward.execute(models)
             except Exception, e:
                 logging.error("!! Error-execute: %s" % (str(e),))
                 logging.info("Add emails in queure error: %s" % str(emails_id))
                 for email_id in emails_id:
                     self.add_email_error(email_id)
                 continue
Example #8
0
def move_data():
    db = DBConnection().db
       
    mobiles = ['18310505991', '13693675352', '13581731204']
    message = "数据库T_LOCATION已经完全转移到T_LOCATION_NEW,请及确认表信息的正确性和完整性。"
    #max_row = 1000000000
    max_row = 250000000
    begin_time = time.gmtime(time.time())
    for i in range(10000, max_row, 10000):
        sql = "INSERT INTO T_LOCATION_NEW" \
              " SELECT * FROM T_LOCATION WHERE id <=%d AND id > %d -10000" \
              " and (timestamp between 0 and 1448899200)" % (i, i)
        logging.info("exectue sql:%s", sql)
        
        n = db.execute(sql)
        #time.sleep(0.1)
        logging.info("last record  row id =%s", n)
        break
       # if i = 250000000:
        if i == 240000000:
            for mobile in mobiles:
                SMSHelper.send(mobile, message)    
                print "send", mobile
    end_time = time.gmtime(time.time())
    L_bak = "alter table T_LOCATION rename  to T_LOCATION_bak"
    NEW_L = "alter table T_LOCATION_NEW rename  to T_LOCATION"
    
    for i in range(1, 5): 
        time.sleep(1)
        logging.info("Will rename table neame after %d second", 5-i)
    
    db.execute(L_bak)
    db.execute(NEW_L)
    logging.info("exchange tables T_LOCATION and T_LOCATION_NEW is accomplished ")
    logging.info("Move table data begin_time:%s, end_time:%s", begin_time, end_time)
Example #9
0
def analyze_testcases(test_results_section):
    case_names = {}
    result_types = []
    result_fields = {}
    for test_result in test_results_section:
        case_name = test_result['case_name']
        if case_name in case_names:
            case_names[case_name] += 1
        else:
            case_names[case_name] = 1
            result_types.append(test_result)

    for test_result in result_types:
        for field in test_result.iterkeys():
            if field in result_fields:
                result_fields[field] += 1
            else:
                result_fields[field] = 1

    logging.info("Number of different case names: {}\n".format(len(case_names)))
    for case_name, occurrences in case_names.iteritems():
        logging.info("Case name '{}' occurred {} times".format(case_name, occurrences))
    logging.info('')
    for field, occurrences in result_fields.iteritems():
        logging.info("Field '{}' occurred {} times".format(field, occurrences))
    logging.info('')
Example #10
0
def read_dependency_xml(moduleName, config):
    qt_path = config.get('default', 'qt')
    xml_path = os.path.join(
        qt_path, 'lib/{}-android-dependencies.xml'.format(moduleName))
    if not os.path.exists(xml_path):
        log.info("module {} do not have xml {}".format(moduleName, xml_path))
        return

    tree = ElementTree.parse(xml_path)
    root = tree.getroot()
    lib_tag = root.find('dependencies/lib')

    name = "" if not lib_tag.attrib else lib_tag.attrib.get('name', "")

    if name != moduleName:
        raise Exception("moduleName({}) and name from xml({}) do not match".format(
            moduleName, name))

    deps_tag = lib_tag.find('depends')
    deps = list()

    for child in deps_tag:
        info = {
            "tag": child.tag
        }
        info.update(child.attrib)
        deps.append(info)

    return deps
def main():
    """
    SendDataClient类,作为flashshot的服务
    """

    log.init_log('./logs/send_data_client')

    # 如果配置文件中配置了这个地区需要设定IP代理,则在上传文件前,先将代理IP取消,然后等执行完毕后再设置上
    # 并且将 uping 设置为 1,此时每五分钟执行的checkIpProxy将不会修改此IP,上传结束后就修改回 0
    if config.NEED_PROXY:
        configFile = ConfigParser.ConfigParser()
        configFile.read(CONFIGFILE)
        configFile.set("info", "uping", 1)
        configFile.write(open(CONFIGFILE, "w"))
        logging.info('setProxy("0") ')
        # 在传送图片前,先将本地代理IP关掉
        ipProxy.setProxy("0")

    target_folder = sys.argv[1]
    target_filenames = get_file_list(target_folder)
    upload_files(target_folder, target_filenames)  
    
    # 在传送图片后,将本地代理Ip继续设定
    if config.NEED_PROXY:
        configFile = ConfigParser.ConfigParser()
        configFile.read(CONFIGFILE)
        ip1 = configFile.get("info", "ip1")
        configFile.set("info", "uping", 0)
        configFile.write(open(CONFIGFILE, "w"))
        enableProxyScript = "python ipProxy.py " + ip1
        os.popen(enableProxyScript)
        # ipProxy.setProxy(ip1)
        logging.info('setProxy ' + ip1)
Example #12
0
  def __init__(self):
    """Initializes the manager by reading the config file."""

    self.routers = []
    self.auth_manager = auth_manager.AuthorizationManager()

    self.default_router = self._CreateRouter(config.CONFIG["API.DefaultRouter"])

    if config.CONFIG["API.RouterACLConfigFile"]:
      logging.info("Using API router ACL config file: %s",
                   config.CONFIG["API.RouterACLConfigFile"])

      with open(config.CONFIG["API.RouterACLConfigFile"], mode="rb") as fh:
        acl_list = APIAuthorization.ParseYAMLAuthorizationsList(fh.read())

      if not acl_list:
        raise InvalidAPIAuthorization("No entries added from "
                                      "RouterACLConfigFile.")

      for index, acl in enumerate(acl_list):
        router = self._CreateRouter(acl.router, params=acl.router_params)
        self.routers.append(router)

        router_id = str(index)
        self.auth_manager.DenyAll(router_id)

        for group in acl.groups:
          self.auth_manager.AuthorizeGroup(group, router_id)

        for user in acl.users:
          self.auth_manager.AuthorizeUser(user, router_id)
Example #13
0
    def get(self, request, *args, **kwargs):
        if 'cname' in request.GET and request.GET['cname'] != "":
            #cname = request.GET['cname']
            cname = " and (Contact_FirstName like '%%"+request.GET['cname']+"%%' or Contact_LastName like '%%"+request.GET['cname']+"%%')"
        else:
            cname = ""

        if 'ccompany' in request.GET and request.GET['ccompany'] != "":
            ccompany = " and Contact_Company like '%%"+request.GET['ccompany']+"%%'"
        else:
            ccompany = ""
        
        if 'ctype' in request.GET and request.GET['ctype'] != "" and request.GET['ctype'] != "Contact Type":
            ctype = " and Contact_Type like '%%"+request.GET['ctype']+"%%'"
        else:
            ctype = ""
        #allcontacts = Tblcontact.objects.raw("select * from tblcontact where Contact_PK>0 "+cname+" "+cemail+" "+ctype+"")
        if 'noof' in request.GET and request.GET['noof'] != "":
            noof = request.GET['noof']
        else:
            noof = 25
        allcalls = Tblcalls.objects.raw("select * from tblcalls where Calls_StaffID = "+str(request.session['Staff'].staff_pk)+" and (Calls_Deleted!=1)  limit "+str(noof)+"")
        calltypes = Tblcalltype.objects.all()
        callactions = Tblcallaction.objects.all()
        callsource = Tblcallsource.objects.all()
        logging.info('total contacts:: %s',request.session['Staff'].staff_pk)
        content = {'page_title': "View Calls",
                   'allitems':allcalls,"calltypes":calltypes,"callactions":callactions,
                   "callsource":callsource,}
        return render_template(request, "viewcalls.htm", content)
Example #14
0
    def makeJar(self, infile, jardir):
        '''makeJar is the main entry point to JarMaker.

        It takes the input file, the output directory, the source dirs and the
        top source dir as argument, and optionally the l10n dirs.
        '''

        # making paths absolute, guess srcdir if file and add to sourcedirs
        _normpath = lambda p: os.path.normpath(os.path.abspath(p))
        self.topsourcedir = _normpath(self.topsourcedir)
        self.sourcedirs = [_normpath(p) for p in self.sourcedirs]
        if self.localedirs:
            self.localedirs = [_normpath(p) for p in self.localedirs]
        elif self.relativesrcdir:
            self.localedirs = \
                self.generateLocaleDirs(self.relativesrcdir)
        if isinstance(infile, basestring):
            logging.info('processing ' + infile)
            self.sourcedirs.append(_normpath(os.path.dirname(infile)))
        pp = self.pp.clone()
        pp.out = JarManifestParser()
        pp.do_include(infile)

        for info in pp.out:
            self.processJarSection(info, jardir)
    def Delete(self, request, context):
        logging.info("__INIT__Delete[VappVmServicer]")
        vapp_vm = VappVm(context)
        res = vapp_vm.delete(request)
        logging.info("__DONE__Delete[VappVmServicer]")

        return res
Example #16
0
    def post(self):
        args = parser.parse_args()
        ip_address = request.remote_addr
        port = args['port']

        worker = Worker.query.filter_by(ip_address=ip_address, port=port).first()
        if not worker:
            logging.info("New worker connecting from {0}".format(ip_address))
            worker = Worker(hostname=args['hostname'],
                          ip_address=ip_address,
                          port=port,
                          status='enabled',
                          current_task=None,
                          log=None,
                          time_cost=None,
                          activity=None,
                          connection='online',
                          system=args['system'])
        else:
            worker.connection = 'online'
            worker.current_task = None

        db.session.add(worker)
        db.session.commit()

        return '', 204
    def Read(self, request, context):
        logging.info("__INIT__Read[VappVmServicer]")
        vapp_vm = VappVm(context)
        res = vapp_vm.read(request)
        logging.info("__DONE__Read[VappVmServicer]")

        return res
Example #18
0
def parse_files(filelist, doctype='grant'):
    """
    Takes in a list of patent file names (from __main__() and start.py) and commits
    them to the database. This method is designed to be used sequentially to
    account for db concurrency.  The optional argument `commit_frequency`
    determines the frequency with which we commit the objects to the database.
    If set to 0, it will commit after all patobjects have been added.  Setting
    `commit_frequency` to be low (but not 0) is helpful for low memory machines.
    """
    if not filelist:
        return
    commit = alchemy.commit
    for filename in filelist:
        print filename
        for i, xmltuple in enumerate(extract_xml_strings(filename)):
            patobj = parse_patent(xmltuple, filename, doctype)
            if doctype == 'grant':
                alchemy.add_grant(patobj)
                commit = alchemy.commit
            else:
                alchemy.add_application(patobj)
                commit = alchemy.commit_application
            if commit_frequency and ((i+1) % commit_frequency == 0):
                commit()
                logging.info("{0} - {1} - {2}".format(filename, (i+1), datetime.datetime.now()))
                print " *", (i+1), datetime.datetime.now()
            
        commit()
        print " *", "Complete", datetime.datetime.now()
    def PowerOn(self, request, context):
        logging.info("__INIT__PowerOn[VappVmServicer]")
        vapp_vm = VappVm(context)
        res = vapp_vm.power_on(request)
        logging.info("__DONE__PowerOn[VappVmServicer]")

        return res
Example #20
0
def make_parser():
    """ Construct the command line parser """
    logging.info("Constructing parser")
    description = "Store and retrieve snippets of text"
    parser = argparse.ArgumentParser(description=description)

    subparsers = parser.add_subparsers(help="Available commands")

    # Subparser for the put command
    logging.debug("Constructing put subparser")
    put_parser = subparsers.add_parser("put", help="Store a snippet")
    put_parser.add_argument("name", help="The name of the snippet")
    put_parser.add_argument("snippet", help="The snippet text")
    put_parser.add_argument("filename", default="snippets.csv", nargs="?",
                            help="The snippet filename")
    put_parser.set_defaults(command="put")

    # Subparser for the get command
    logging.debug("Constructing get subparser")
    put_parser = subparsers.add_parser("get", help="Retrieve a snippet")
    put_parser.add_argument("name", help="The name of the snippet")
    put_parser.add_argument("filename", default="snippets.csv", nargs="?",
                            help="The snippet filename")
    put_parser.set_defaults(command="get")

    return parser
Example #21
0
def slicethread(fname, oname, wname, cfg, jobid):
  retcode = "fail"
  try:
    con = sqlite3.connect('db.sqlite')
    con.row_factory = sqlite3.Row

    cfg = "config.ini" if cfg is None else cfg

    proc = subprocess.Popen(["slic3r",
      "--load", cfg,
      fname, "-o", wname+'.gcode'])
    con.execute('insert into journal(cmd, pid, action, status, timestamp) values(?,?,?,?,DateTime(\'now\'))',
      ('slice {} -c {}'.format(os.path.basename(fname),
                               os.path.basename(cfg)), proc.pid, 'start',
        0 if proc.returncode == None else 1 ))
    con.commit()
    retcode = proc.wait()
    con.execute('insert into journal(cmd, pid, action, status, timestamp) values(?,?,?,?,DateTime(\'now\'))',
      ('slice {} -c {}'.format(os.path.basename(fname),
                               os.path.basename(cfg)), proc.pid, 'stop',
        proc.returncode))
    con.commit()
    try:
      os.unlink(oname+'.gcode')
    except OSError as e:
      pass
    finally:
      try:
        os.rename(wname+'.gcode', oname+'.gcode')
      except Exception:
        logging.info( wname+'.gcode')
        logging.info( oname+'.gcode')
        pass
  finally:
    _work_done(jobid, val=retcode)
Example #22
0
    def compare(self, bag):
        if len(self.iplist) > 0 and (self.dev not in bag.keys() or len(bag[self.dev]) == 0):
            # Remove all IPs on this device
            logging.info(
                "Will remove all configured addresses on device %s", self.dev)
            self.delete("all")
            app = CsApache(self)
            app.remove()

        # This condition should not really happen but did :)
        # It means an apache file got orphaned after a guest network address
        # was deleted
        if len(self.iplist) == 0 and (self.dev not in bag.keys() or len(bag[self.dev]) == 0):
            app = CsApache(self)
            app.remove()

        for ip in self.iplist:
            found = False
            if self.dev in bag.keys():
                for address in bag[self.dev]:
                    self.setAddress(address)
                    if self.hasIP(ip):
                        found = True
                    if self.is_guest_gateway(address, ip):
                        found = True
            if not found:
                self.delete(ip)
Example #23
0
def handle(data, con, apikey=None):
  d = json.loads(data)

  handlers = {'import': importit, 'ping': ping,
      'listimported': listimported, 'slice': sliceit,
      'listdone': listdone, 'getdone': getdone,
      'importconfig': importconfig, 'listconfig': listconfigs,
      'listprogress': listprogress, 'getstats': getstats,
      'journal': getjournal, 'del': wipefile, 'wait': waitfor}

  hndlr = noop
  cmd = 'noop'
  if d.has_key('cmd'):
    if d['cmd'] in handlers.keys():
      cmd = d['cmd']
      hndlr = handlers[cmd]

  logging.info('cmd: ' + cmd)

  if not apikey is None:
    if not (d.has_key('key') and d['key'] == apikey):
      logging.info('authentication failed for "{}" key!'.format(
        '' if not d.has_key('key') else d['key']))
      return json.dumps({'r': 'fail',
        'm': 'authentication failed. incorrect apikey'})

  try:
    r = hndlr(d, con)
    result = json.dumps(r)
  except Exception as e:
    logging.error(str(e))
    result = json.dumps({u'm':unicode(e), u'r':u'fail'})
  logaccess(len(data), len(result), unicode(cmd), con)

  return result
Example #24
0
    def pre(self, emulator=None):
        """
        _pre_

        Pre execution checks

        """
        if emulator is not None:
            return emulator.emulatePre(self.step)
        logging.info("Pre-executing CMSSW step")
        if hasattr(self.step.application.configuration, "configCacheUrl"):
            # means we have a configuration & tweak in the sandbox
            psetFile = self.step.application.command.configuration
            psetTweak = self.step.application.command.psetTweak
            self.stepSpace.getFromSandbox(psetFile)

            if psetTweak:
                self.stepSpace.getFromSandbox(psetTweak)

        if hasattr(self.step, "pileup"):
            self.stepSpace.getFromSandbox("pileupconf.json")

        # add in ths scram env PSet manip script whatever happens
        self.step.runtime.scramPreScripts.append("SetupCMSSWPset")
        return None
Example #25
0
    def run(self, logger):
        """
        Run test case and gather results
        """
        if self.case_data.skip:
            logging.info("Skipping test case '%s'" % \
                             self.case_data.name)
            return

        logging.info("Running test case '%s' (%s)"
                     % (self.case_data.name,
                        self.case_data.methodname))
        starttime = time()
        try:
            rv = self.test_method(**self.case_data.args)
        except AssertionError, e:
            # The test failed.
            if len(e.args) > 1:
                self.results.append('message', e.args[0].encode("utf-8"))
                self.results.append_screenshot(e.args[1])
            else:
                self.results.append('message', e.args[0].encode("utf-8"))
                self.results.append_screenshot()
            self.results.append('stacktrace', traceback.format_exc())
            self.results['pass'] = 0
Example #26
0
 def kill_process(self, process):
     """
     Kill the given process.
     """
     logging.info('killing %s', process)
     drone = self._get_drone_for_process(process)
     drone.queue_call('kill_process', process)
  def _UploadStatus(self, version, status, message=None, fail_if_exists=False,
                    dashboard_url=None):
    """Upload build status to Google Storage.

    Args:
      version: Version number to use. Must be a string.
      status: Status string.
      message: A failures_lib.BuildFailureMessage object with details
               of builder failure, or None (default).
      fail_if_exists: If set, fail if the status already exists.
      dashboard_url: Optional url linking to builder dashboard for this build.
    """
    data = BuilderStatus(status, message, dashboard_url).AsPickledDict()

    # This HTTP header tells Google Storage to return the PreconditionFailed
    # error message if the file already exists.
    gs_version = 0 if fail_if_exists else None

    logging.info('Recording status %s for %s', status, self.build_names)
    for build_name in self.build_names:
      url = BuildSpecsManager._GetStatusUrl(build_name, version)

      # Do the actual upload.
      ctx = gs.GSContext(dry_run=self.dry_run)
      ctx.Copy('-', url, input=data, version=gs_version)
    def read(self, request):
        logging.info("__INIT__read[VappVm]")
        res = vapp_vm_pb2.ReadVappVmResult()
        res.present = False
        org_resource = self.client.get_org()
        org = Org(self.client, resource=org_resource)
        try:
            vdc_resource = org.get_vdc(request.target_vdc)
            vdc = VDC(
                self.client, name=request.target_vdc, resource=vdc_resource)

            vapp_resource = vdc.get_vapp(request.target_vapp)
            vapp = VApp(
                self.client, name=request.target_vapp, resource=vapp_resource)
            read_vapp_vm_resp = vapp.get_vm(request.target_vm_name)
            vm = VM(client=self.client, href=None, resource=read_vapp_vm_resp)

            res.present = True
        except Exception as e:
            errmsg = '__ERROR_read[VappVm] failed for VappVm {0}. __ErrorMessage__ {1}'
            logging.warn(errmsg.format(request.target_vm_name, str(e)))

            return res

        logging.info("__DONE__read[VappVm]")

        return res
Example #29
0
    def jar(self):
        """Performs the 'jar' command."""
        class_name = getattr(self.flags, "class")
        if (class_name is None) and (len(self.args) > 0):
            class_name = self.pop_args_head()
        assert (class_name is not None), ("No class name specified with [--class=]<class>.")

        lib_jars = []
        if self.flags.jars is not None:
            lib_jars.extend(self.flags.jars)
        classpath = list(self.express.get_classpath(lib_jars=lib_jars))

        java_opts = []
        if self.flags.java_opts is not None:
            java_opts = [self.flags.java_opts]

        user_args = list(self.args)
        logging.info("Running java class %r with parameters: %r", class_name, user_args)

        cmd = [
            "java",
            # This property is only needed in kiji-schema v1.1 :
            "-Dorg.kiji.schema.impl.AvroCellEncoder.SCHEMA_VALIDATION=DISABLED",
        ] + java_opts + [
            "-classpath", ":".join(classpath),
            class_name,
        ] + user_args

        logging.debug("Running command:\n%s\n", " \\\n\t".join(map(repr, cmd)))
        return subprocess.call(cmd)
Example #30
0
def filter_exclude_include(src_list):
    info(u"Applying --exclude/--include")
    cfg = Config()
    exclude_list = FileDict(ignore_case = False)
    for file in src_list.keys():
        debug(u"CHECK: %s" % file)
        excluded = False
        for r in cfg.exclude:
            if r.search(file):
                excluded = True
                debug(u"EXCL-MATCH: '%s'" % (cfg.debug_exclude[r]))
                break
        if excluded:
            ## No need to check for --include if not excluded
            for r in cfg.include:
                if r.search(file):
                    excluded = False
                    debug(u"INCL-MATCH: '%s'" % (cfg.debug_include[r]))
                    break
        if excluded:
            ## Still excluded - ok, action it
            debug(u"EXCLUDE: %s" % file)
            exclude_list[file] = src_list[file]
            del(src_list[file])
            continue
        else:
            debug(u"PASS: %r" % (file))
    return src_list, exclude_list