Example #1
0
def root(module=None):
  """
  Sets the "root module" for helipad.
  
  The root module's directory is used as the definition of where relative paths
  are based off of.
  """
  global _ROOT_MODULE
  
  if module is None:
    return _ROOT_MODULE
  
  if isinstance(module, basestring):
    components = module.split('.')
    module = __import__(module, globals(), locals(), [], -1)
    
    for component in components[1:]:
      module = getattr(module, component)
  
  if inspect.ismodule(module):
    _ROOT_MODULE = module
  else:
    raise ValueError, "Invalid module: %s" % module
  
  # Return a reference to this module (so that we can string together method calls)
  return __import__('helipad', globals(), locals(), [], -1)
Example #2
0
 def check_server_disallowed(self):
     """
     Check if server domain name or IP is disallowed in settings.py.
     """
     hostname = self.netloc_parts[2].lower()
     if (hasattr(settings, 'DISALLOWED_DOMAIN_LIST') and
         settings.DISALLOWED_DOMAIN_LIST):
         for domain in settings.DISALLOWED_DOMAIN_LIST:
             if hostname == domain or hostname.endswith('.' + domain):
                 raise ValidationError(unicode(
                     _("Domain name %(domain)s is disallowed.") % locals()))
     try:
         ip = socket.gethostbyname(hostname)
     except socket.error:
         raise ValidationError(unicode(
             _("Could not resolve IP address for %(hostname)s.") %
             locals()))
     if (not hasattr(settings, 'DISALLOWED_SERVER_IP_LIST') or
         not settings.DISALLOWED_SERVER_IP_LIST):
         return
     server = long_ip(ip)
     # print 'server', server, dotted_ip(server), ip
     for disallowed in settings.DISALLOWED_SERVER_IP_LIST:
         disallowed = disallowed.strip()
         if disallowed == '' or disallowed.startswith('#'):
             continue
         mask = bit_mask(32)
         if '/' in disallowed:
             disallowed, bits = disallowed.split('/', 1)
             mask = slash_mask(int(bits))
         identifier = long_ip(disallowed) & mask
         masked = server & mask
         if masked == identifier:
             raise ValidationError(unicode(
                 _("Server IP address %(ip)s is disallowed.") % locals()))
Example #3
0
def project_detail(request, pk, page=''):
    print pk, page
    try:
        project = Project.objects.get(pk=pk)
    except Project.DoesNotExist:
        raise Http404
    testers = project.testers.all()
    bugs = project.bugs.all()
    if page == None:
       return render_to_response('project_detail.html', locals(),
                              context_instance=RequestContext(request))
    elif page == '/bugs':
        return render_to_response('project_bugs.html', locals(),
            context_instance=RequestContext(request))
    elif page == '/testers':
        return render_to_response('project_testers.html', locals(),
            context_instance=RequestContext(request))
    elif page == '/enlist':
        user = request.user
        if not user.is_authenticated():
            raise PermissionDenied
        # Текущий залогиненый пользователь должен быть тестером
        if not hasattr(user, 'tester'):
            raise PermissionDenied
        tester = user.tester
        project.add_tester(tester)
        return HttpResponseRedirect('/projects/%i/testers' % project.pk)
    else:
        raise Http404
Example #4
0
def Get_Marshall(GetScript, SetScript, TestScript, User, Group):
    arg_names = list(locals().keys())
    if GetScript is None:
        GetScript = ''
    if SetScript is None:
        SetScript = ''
    if TestScript is None:
        TestScript = ''
    if User is None:
        User = ''
    if Group is None:
        Group = ''

    retval = 0
    (retval, GetScript, SetScript, TestScript, User, Group,
     Result) = Get(GetScript, SetScript, TestScript, User, Group)

    GetScript = protocol.MI_String(GetScript)
    SetScript = protocol.MI_String(SetScript)
    TestScript = protocol.MI_String(TestScript)
    User = protocol.MI_String(User)
    Group = protocol.MI_String(Group)
    Result = protocol.MI_String(Result)
    arg_names.append('Result')

    retd = {}
    ld = locals()
    for k in arg_names:
        retd[k] = ld[k]
    return retval, retd
Example #5
0
 def _reset_database(self, conn_string):
     conn_pieces = urlparse.urlparse(conn_string)
     if conn_string.startswith('sqlite'):
         # We can just delete the SQLite database, which is
         # the easiest and cleanest solution
         db_path = conn_pieces.path.strip('/')
         if db_path and os.path.exists(db_path):
             os.unlink(db_path)
         # No need to recreate the SQLite DB. SQLite will
         # create it for us if it's not there...
     elif conn_string.startswith('mysql'):
         # We can execute the MySQL client to destroy and re-create
         # the MYSQL database, which is easier and less error-prone
         # than using SQLAlchemy to do this via MetaData...trust me.
         database = conn_pieces.path.strip('/')
         loc_pieces = conn_pieces.netloc.split('@')
         host = loc_pieces[1]
         auth_pieces = loc_pieces[0].split(':')
         user = auth_pieces[0]
         password = ""
         if len(auth_pieces) > 1:
             if auth_pieces[1].strip():
                 password = "-p%s" % auth_pieces[1]
         sql = ("drop database if exists %(database)s; "
                "create database %(database)s;") % locals()
         cmd = ("mysql -u%(user)s %(password)s -h%(host)s "
                "-e\"%(sql)s\"") % locals()
         exitcode, out, err = execute(cmd)
         self.assertEqual(0, exitcode)
Example #6
0
    def migrate_instance_start(self, context, instance_uuid,
                               floating_addresses,
                               rxtx_factor=None, project_id=None,
                               source=None, dest=None):
        # We only care if floating_addresses are provided and we're
        # switching hosts
        if not floating_addresses or (source and source == dest):
            return

        LOG.info(_("Starting migration network for instance"
                   " %(instance_uuid)s"), locals())
        for address in floating_addresses:
            floating_ip = self.db.floating_ip_get_by_address(context,
                                                             address)

            if self._is_stale_floating_ip_address(context, floating_ip):
                LOG.warn(_("Floating ip address |%(address)s| no longer "
                           "belongs to instance %(instance_uuid)s. Will not"
                           "migrate it "), locals())
                continue

            interface = CONF.public_interface or floating_ip['interface']
            fixed_ip = self.db.fixed_ip_get(context,
                                            floating_ip['fixed_ip_id'],
                                            get_network=True)
            self.l3driver.remove_floating_ip(floating_ip['address'],
                                             fixed_ip['address'],
                                             interface,
                                             fixed_ip['network'])

            # NOTE(wenjianhn): Make this address will not be bound to public
            # interface when restarts nova-network on dest compute node
            self.db.floating_ip_update(context,
                                       floating_ip['address'],
                                       {'host': None})
Example #7
0
    def autoinc_sql(self, table, column):
        # To simulate auto-incrementing primary keys in Oracle, we have to
        # create a sequence and a trigger.
        sq_name = self._get_sequence_name(table)
        tr_name = self._get_trigger_name(table)
        tbl_name = self.quote_name(table)
        col_name = self.quote_name(column)
        sequence_sql = """
DECLARE
    i INTEGER;
BEGIN
    SELECT COUNT(*) INTO i FROM USER_CATALOG
        WHERE TABLE_NAME = '%(sq_name)s' AND TABLE_TYPE = 'SEQUENCE';
    IF i = 0 THEN
        EXECUTE IMMEDIATE 'CREATE SEQUENCE "%(sq_name)s"';
    END IF;
END;
/""" % locals()
        trigger_sql = """
CREATE OR REPLACE TRIGGER "%(tr_name)s"
BEFORE INSERT ON %(tbl_name)s
FOR EACH ROW
WHEN (new.%(col_name)s IS NULL)
    BEGIN
        SELECT "%(sq_name)s".nextval
        INTO :new.%(col_name)s FROM dual;
    END;
/""" % locals()
        return sequence_sql, trigger_sql
Example #8
0
    def __init__(self, host, name, settings=None):
        Component.__init__(self, yadtshell.settings.SERVICE, host, name)

        settings = settings or {}
        self.needs_services = []
        self.needs_artefacts = []
        self.needs.add(host.uri)

        for k in settings:
            setattr(self, k, settings[k])
        extras = settings.get('extra', [])
        for k in extras:
            if hasattr(self, k):
                getattr(self, k).extend(extras[k])
            else:
                setattr(self, k, extras[k])

        for n in self.needs_services:
            if n.startswith(yadtshell.settings.SERVICE):
                self.needs.add(n % locals())
            else:
                self.needs.add(yadtshell.uri.create(
                    yadtshell.settings.SERVICE, host.host, n % locals()))
        for n in self.needs_artefacts:
            self.needs.add(yadtshell.uri.create(yadtshell.settings.ARTEFACT,
                                                host.host,
                                                n % locals() + "/" + yadtshell.settings.CURRENT))

        self.state = yadtshell.settings.STATE_DESCRIPTIONS.get(
            settings.get('state'),
            yadtshell.settings.UNKNOWN)
        self.script = None
Example #9
0
def _run_scalpel_paired(align_bams, items, ref_file, assoc_files,
                          region=None, out_file=None):
    """Detect indels with Scalpel.

    This is used for paired tumor / normal samples.
    """
    config = items[0]["config"]
    if out_file is None:
        out_file = "%s-paired-variants.vcf.gz" % os.path.splitext(align_bams[0])[0]
    if not utils.file_exists(out_file):
        with file_transaction(config, out_file) as tx_out_file:
            paired = get_paired_bams(align_bams, items)
            if not paired.normal_bam:
                ann_file = _run_scalpel_caller(align_bams, items, ref_file,
                                               assoc_files, region, out_file)
                return ann_file
            vcfstreamsort = config_utils.get_program("vcfstreamsort", config)
            perl_exports = utils.get_perl_exports(os.path.dirname(tx_out_file))
            tmp_path = "%s-scalpel-work" % utils.splitext_plus(out_file)[0]
            db_file = os.path.join(tmp_path, "main", "somatic.db")
            if not os.path.exists(db_file + ".dir"):
                if os.path.exists(tmp_path):
                    utils.remove_safe(tmp_path)
                opts = " ".join(_scalpel_options_from_config(items, config, out_file, region, tmp_path))
                opts += " --ref {}".format(ref_file)
                opts += " --dir %s" % tmp_path
                # caling
                cl = ("{perl_exports} && "
                      "scalpel-discovery --somatic {opts} --tumor {paired.tumor_bam} --normal {paired.normal_bam}")
                do.run(cl.format(**locals()), "Genotyping paired variants with Scalpel", {})
            # filtering to adjust input parameters
            bed_opts = " ".join(_scalpel_bed_file_opts(items, config, out_file, region, tmp_path))
            use_defaults = True
            if use_defaults:
                scalpel_tmp_file = os.path.join(tmp_path, "main/somatic.indel.vcf")
            # Uses default filters but can tweak min-alt-count-tumor and min-phred-fisher
            # to swap precision for sensitivity
            else:
                scalpel_tmp_file = os.path.join(tmp_path, "main/somatic-indel-filter.vcf.gz")
                with file_transaction(config, scalpel_tmp_file) as tx_indel_file:
                    cmd = ("{perl_exports} && "
                           "scalpel-export --somatic {bed_opts} --ref {ref_file} --db {db_file} "
                           "--min-alt-count-tumor 5 --min-phred-fisher 10 --min-vaf-tumor 0.1 "
                           "| bgzip -c > {tx_indel_file}")
                    do.run(cmd.format(**locals()), "Scalpel somatic indel filter", {})
            scalpel_tmp_file = bgzip_and_index(scalpel_tmp_file, config)
            scalpel_tmp_file_common = bgzip_and_index(os.path.join(tmp_path, "main/common.indel.vcf"), config)
            compress_cmd = "| bgzip -c" if out_file.endswith("gz") else ""
            bcftools_cmd_chi2 = get_scalpel_bcftools_filter_expression("chi2", config)
            bcftools_cmd_common = get_scalpel_bcftools_filter_expression("reject", config)
            fix_ambig = vcfutils.fix_ambiguous_cl()
            cl2 = ("vcfcat <({bcftools_cmd_chi2} {scalpel_tmp_file}) "
                   "<({bcftools_cmd_common} {scalpel_tmp_file_common}) | "
                   " {fix_ambig} | {vcfstreamsort} {compress_cmd} > {tx_out_file}")
            do.run(cl2.format(**locals()), "Finalising Scalpel variants", {})

    ann_file = annotation.annotate_nongatk_vcf(out_file, align_bams,
                                               assoc_files.get("dbsnp"), ref_file,
                                               config)
    return ann_file
Example #10
0
    def test_reusable_scope(self):

        scope = let(a="tacos", b="soup", c="cake")
        d = "godzilla"

        with scope:
            self.assertEquals(a, "tacos")
            self.assertEquals(b, "soup")
            self.assertEquals(c, "cake")
            self.assertEquals(d, "godzilla")

            a = "fajita"
            b = "stew"
            d = "mothra"

        self.assertFalse("a" in locals())
        self.assertFalse("b" in locals())
        self.assertFalse("c" in locals())
        self.assertTrue("d" in locals())

        self.assertFalse("a" in globals())
        self.assertFalse("b" in globals())
        self.assertFalse("c" in globals())
        self.assertFalse("d" in globals())

        self.assertEquals(d, "mothra")

        with scope:
            self.assertEquals(a, "fajita")
            self.assertEquals(b, "stew")
            self.assertEquals(c, "cake")
            self.assertEquals(d, "mothra")
Example #11
0
def add_param_writer_object(name, base_state, typ, var_type = "", var_index = None, root_node = False):
    var_type1 = "_" + var_type if var_type != "" else ""
    if isinstance(var_index, Number):
        var_index = "uint32_t(" + str(var_index) +")"
    set_varient_index = "serialize(_out, " + var_index +");\n" if var_index is not None else ""
    ret = Template(reindent(4,"""
        ${base_state}__${name}$var_type1 start_${name}$var_type() && {
            $set_varient_index
            return { _out, std::move(_state) };
        }
    """)).substitute(locals())
    if not is_stub(typ) and is_local_type(typ):
        ret += add_param_writer_basic_type(name, base_state, typ, var_type, var_index, root_node)
    if is_stub(typ):
        set_command = "_state.f.end(_out);" if var_type is not "" else ""
        return_command = "{ _out, std::move(_state._parent) }" if var_type is not "" and not root_node else "{ _out, std::move(_state) }"
        ret += Template(reindent(4, """
            template<typename Serializer>
            after_${base_state}__${name} ${name}$var_type(Serializer&& f) && {
                $set_varient_index
                f(writer_of_$typ(_out));
                $set_command
                return $return_command;
            }""")).substitute(locals())
    return ret
Example #12
0
    def create_hatch(self, hatch):
        sidelen = 72
        if self._hatches.has_key(hatch):
            return self._hatches[hatch]
        name = 'H%d' % len(self._hatches)
        self._pswriter.write("""\
  << /PatternType 1
     /PaintType 2
     /TilingType 2
     /BBox[0 0 %(sidelen)d %(sidelen)d]
     /XStep %(sidelen)d
     /YStep %(sidelen)d

     /PaintProc {
        pop
        0 setlinewidth
""" % locals())
        self._pswriter.write(
            self._convert_path(Path.hatch(hatch), Affine2D().scale(72.0)))
        self._pswriter.write("""\
          stroke
     } bind
   >>
   matrix
   makepattern
   /%(name)s exch def
""" % locals())
        self._hatches[hatch] = name
        return name
Example #13
0
def convert_to_kallisto(data):
    files = dd.get_input_sequence_files(data)
    if len(files) == 2:
        fq1, fq2 = files
    else:
        fq1, fq2 = files[0], None
    samplename = dd.get_sample_name(data)
    work_dir = dd.get_work_dir(data)
    kallisto_dir = os.path.join(work_dir, "kallisto", samplename, "fastq")
    out_file = os.path.join(kallisto_dir, "barcodes.batch")
    umis = config_utils.get_program("umis", dd.get_config(data))
    if file_exists(out_file):
        return out_file
    if dd.get_minimum_barcode_depth(data):
        cb_histogram = os.path.join(work_dir, "umis", samplename, "cb-histogram.txt")
        cb_cutoff = dd.get_minimum_barcode_depth(data)
        cb_options = "--cb_histogram {cb_histogram} --cb_cutoff {cb_cutoff}"
        cb_options = cb_options.format(**locals())
    else:
        cb_options = ""
    cmd = ("{umis} kallisto {cb_options} --out_dir {tx_kallisto_dir} {fq1}")
    with file_transaction(data, kallisto_dir) as tx_kallisto_dir:
        safe_makedir(tx_kallisto_dir)
        message = ("Transforming %s to Kallisto singlecell format. "
                   % fq1)
        do.run(cmd.format(**locals()), message)
    return out_file
    def head(self, **KWS):



        ## CHEETAH: generated from #def head at line 5, col 1.
        trans = KWS.get("trans")
        if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
            trans = self.transaction # is None unless self.awake() was called
        if not trans:
            trans = DummyTransaction()
            _dummyTrans = True
        else: _dummyTrans = False
        write = trans.response().write
        SL = self._CHEETAH__searchList
        _filter = self._CHEETAH__currentFilter
        
        ########################################
        ## START - generated method body
        
        write(u'''<script type="text/javascript" src="http://maps.google.com/maps/api/js?sensor=false&language=pt-br"></script>
<script type="text/javascript">
  function initialize() {
    var hotel = new google.maps.LatLng(''')
        _v = VFSL([locals()]+SL+[globals(), builtin],"site.latitude",True) # u'$site.latitude' on line 9, col 40
        if _v is not None: write(_filter(_v, rawExpr=u'$site.latitude')) # from line 9, col 40.
        write(u''', ''')
        _v = VFSL([locals()]+SL+[globals(), builtin],"site.longitude",True) # u'$site.longitude' on line 9, col 56
        if _v is not None: write(_filter(_v, rawExpr=u'$site.longitude')) # from line 9, col 56.
        write(u''');
    var myOptions = {
      zoom: 16,
      center: hotel,
      mapTypeId: google.maps.MapTypeId.ROADMAP
    };
    var map = new google.maps.Map(document.getElementById("map_canvas"), myOptions);
    var hotelMarker = new google.maps.Marker({
      position: hotel, 
      map: map, 
      title:"''')
        _v = VFSL([locals()]+SL+[globals(), builtin],"site.name",True) # u'$site.name' on line 19, col 14
        if _v is not None: write(_filter(_v, rawExpr=u'$site.name')) # from line 19, col 14.
        write(u'''"
\t});
\t
\tvar content = "S\xedtio Tur\xedstico: ''')
        _v = VFSL([locals()]+SL+[globals(), builtin],"site.name",True) # u'$site.name' on line 22, col 34
        if _v is not None: write(_filter(_v, rawExpr=u'$site.name')) # from line 22, col 34.
        write(u'''<br>"
\tvar infoWindow = new google.maps.InfoWindow({content: content});
\tinfoWindow.setPosition(hotel);
    infoWindow.open(map);
  }

</script>
''')
        
        ########################################
        ## END - generated method body
        
        return _dummyTrans and trans.response().getvalue() or ""
Example #15
0
    def handle(self, *args, **options):
        # Remove plugins which yield an error upon importing
        for plugin in Plugin.objects.all():
            try:
                plugin.get_class()
            except ImportError:
                while True:
                    ans = raw_input('Error on importing {plugin.class_name}. Remove? [y/N]'.format(**locals()))
                    ans = ans.strip().lower()

                    if ans in ("", "n"):
                        break
                    elif ans == "y":
                        plugin.delete()
                        break

        # Look for plugins in plugin directory
        plugin_files = os.listdir(os.path.dirname(plugins.__file__))
        plugin_paths = (os.path.join(os.path.dirname(plugins.__file__), p) for p in plugin_files)
        detected_plugins = get_plugins(filter(is_module, plugin_paths))
        new_plugins = (p for p in detected_plugins if not Plugin.objects.filter(class_name=get_qualified_name(p)).exists())

        for p in new_plugins:
            log.info("Found new plugin: {p}".format(**locals()))

            plugin = Plugin.objects.create(
                label=p.name(),
                class_name=get_qualified_name(p),
                plugin_type=get_plugin_type(p)
            )

            log.info("Created new plugin: {plugin.class_name}".format(**locals()))
Example #16
0
	def Generate(self):
		#self.ParentNamespace = "Base"
		#self.Namespace = "Base"
		encoding = sys.getfilesystemencoding()
		path = self.path
		if hasattr(path,"decode"): # this is python2. Otherwise this is unicode already
			path = path.decode(encoding)
		exportName = self.export.Name
		if hasattr(exportName,"decode"): # this is python2. Otherwise this is unicode already
			exportName = exportName.decode(encoding)
		dirname = self.dirname
		if hasattr(dirname,"decode"): # this is python2. Otherwise this is unicode already
			dirname = dirname.decode(encoding)
		print("TemplateClassPyExport",path + exportName)
		# Imp.cpp must not exist, neither in path nor in dirname
		if(not os.path.exists(path + exportName + "Imp.cpp")):
			if(not os.path.exists(dirname + exportName + "Imp.cpp")):
				file = open(path + exportName + "Imp.cpp",'wb')
				generateBase.generateTools.replace(self.TemplateImplement,locals(),file)
				file.close()
		file = open(path + exportName + ".cpp",'wb')
		generateBase.generateTools.replace(self.TemplateModule,locals(),file)
		file.close()
		file = open(path + exportName + ".h",'wb')
		generateBase.generateTools.replace(self.TemplateHeader,locals(),file)
		#file.write( generateBase.generateTools.replace(self.Template,locals()))
		file.close()
def buildIndirectMaps(infile, outfile, track):
    '''build a map between query and target, linking
    via intermediate targets.'''

    to_cluster = True

    path = P.asList(PARAMS["%s_path" % track])

    E.info("path=%s" % str(path))

    statement = []

    for stage, part in enumerate(path):
        filename = part + ".over.psl.gz"
        if not os.path.exists(filename):
            raise ValueError(
                "required file %s for %s (stage %i) not exist." % (filename, outfile, stage))

        if stage == 0:
            statement.append( '''gunzip < %(filename)s''' % locals() )
        else:
            statement.append( '''
               pslMap stdin <(gunzip < %(filename)s) stdout
            ''' % locals() )

    statement.append("gzip")

    statement = " | ".join(statement) + " > %(outfile)s " % locals()

    P.run()
    def getmethods(self,modulePath,Class) :
        '''
         This will get the list of methods in given module or class.
         It accepts the module path and class name. If there is no
         class name then it has be mentioned as None.
        '''
        methodList = []
        moduleList = modulePath.split("/")
        newModule = ".".join([moduleList[len(moduleList) - 2],moduleList[len(moduleList) - 1]])
        print "Message : Method list is being obatined , Please wait ..."
        try :
            if Class :
                Module = __import__(moduleList[len(moduleList) - 1], globals(), locals(), [Class], -1)
                ClassList = [x.__name__ for x in Module.__dict__.values() if inspect.isclass(x)]
                self.ClassList = ClassList
                Class = vars(Module)[Class]
                methodList = [x.__name__ for x in Class.__dict__.values() if inspect.isfunction(x)]
            else :
                Module = __import__(moduleList[len(moduleList) - 1], globals(), locals(),[moduleList[len(moduleList) - 2]], -1)
                methodList = [x.__name__ for x in Module.__dict__.values() if inspect.isfunction(x)]
                ClassList = [x.__name__ for x in Module.__dict__.values() if inspect.isclass(x)]
                self.ClassList = ClassList
        except :
            print "Error : " +str(sys.exc_info()[1])


        self.method = methodList
        return self.method
Example #19
0
    def init_host_floating_ips(self):
        """Configures floating ips owned by host."""

        admin_context = context.get_admin_context()
        try:
            floating_ips = self.db.floating_ip_get_all_by_host(admin_context,
                                                               self.host)
        except exception.NotFound:
            return

        for floating_ip in floating_ips:
            fixed_ip_id = floating_ip.get('fixed_ip_id')
            if fixed_ip_id:
                try:
                    fixed_ip = self.db.fixed_ip_get(admin_context,
                                                    fixed_ip_id,
                                                    get_network=True)
                except exception.FixedIpNotFound:
                    msg = _('Fixed ip %(fixed_ip_id)s not found') % locals()
                    LOG.debug(msg)
                    continue
                interface = CONF.public_interface or floating_ip['interface']
                try:
                    self.l3driver.add_floating_ip(floating_ip['address'],
                                                  fixed_ip['address'],
                                                  interface,
                                                  fixed_ip['network'])
                except exception.ProcessExecutionError:
                    LOG.debug(_('Interface %(interface)s not found'), locals())
                    raise exception.NoFloatingIpInterface(interface=interface)
    def getargs(self,moduleName,className,method) :
        '''
          This will return the list of arguments in a method of python module of class.
          It accepts method list as an argument.
        '''
        print "Message : Argument list is being obtained for each method"
        methodArgsDict = {}
        if className == None:
            moduleList = moduleName.split(".")
            for index,name in enumerate(method) :
                Module = __import__(moduleList[len(moduleList) -1], globals(), locals(), [moduleList[len(moduleList) -2]], -1)
                try :
                    names = vars(Module)[name]
                except KeyError:
                    print "Message : method '" + name + "'does not exists,Continued with including it. "
                    return False
                argumentList = inspect.getargspec(names) #inspect.getargvalues(name)
                methodArgsDict[name] = argumentList[0]
        else :
            moduleList = moduleName.split(".")
            for index,name in enumerate(method) :
                Module = __import__(moduleList[len(moduleList) - 1], globals(), locals(), [className], -1)
                Class = getattr(Module, className)
                try :
                    names = vars(Class)[name]
                except KeyError :
                    print "Message : method '" + name + "'does not exists,Continued with include it."
                    return False

                argumentList = inspect.getargspec(names) #inspect.getargvalues(name)
                methodArgsDict[name] = argumentList[0]

        return methodArgsDict
Example #21
0
	def remove_file(self, path):
		path, name = self.sanitize(path)

		metadata = self._get_metadata(path)

		file_path = os.path.join(path, name)
		if not os.path.exists(file_path):
			return
		if not os.path.isfile(file_path):
			raise RuntimeError("{name} in {path} is not a file".format(**locals()))

		try:
			os.remove(file_path)
		except Exception as e:
			raise RuntimeError("Could not delete {name} in {path}".format(**locals()), e)

		if name in metadata:
			if "hash" in metadata[name]:
				hash = metadata[name]["hash"]
				for m in metadata.values():
					if not "links" in m:
						continue
					for link in m["links"]:
						if "rel" in link and "hash" in link and (link["rel"] == "model" or link["rel"] == "machinecode") and link["hash"] == hash:
							m["links"].remove(link)
			del metadata[name]
			self._save_metadata(path, metadata)
Example #22
0
def get_ebook_count(field, key, publish_year=None):
    ebook_count_db = get_ebook_count_db()

    # Handle the case of ebook_count_db_parametres not specified in the config.
    if ebook_count_db is None:
        return 0

    def db_lookup(field, key, publish_year=None):
        sql = "select sum(ebook_count) as num from subjects where field=$field and key=$key"
        if publish_year:
            if isinstance(publish_year, (tuple, list)):
                sql += " and publish_year between $y1 and $y2"
                (y1, y2) = publish_year
            else:
                sql += " and publish_year=$publish_year"
        return list(ebook_count_db.query(sql, vars=locals()))[0].num

    total = db_lookup(field, key, publish_year)
    if total:
        return total
    elif publish_year:
        sql = "select ebook_count as num from subjects where field=$field and key=$key limit 1"
        if len(list(ebook_count_db.query(sql, vars=locals()))) != 0:
            return 0
    years = find_ebook_count(field, key)
    if not years:
        return 0
    for year, count in sorted(years.iteritems()):
        ebook_count_db.query(
            "insert into subjects (field, key, publish_year, ebook_count) values ($field, $key, $year, $count)",
            vars=locals(),
        )

    return db_lookup(field, key, publish_year)
Example #23
0
    def get(self, section, key, **kwargs):
        section = str(section).lower()
        key = str(key).lower()

        d = self.defaults

        # first check environment variables
        option = self._get_env_var_option(section, key)
        if option:
            return option

        # ...then the config file
        if self.has_option(section, key):
            return expand_env_var(
                ConfigParser.get(self, section, key, **kwargs))

        # ...then commands
        option = self._get_cmd_option(section, key)
        if option:
            return option

        # ...then the defaults
        if section in d and key in d[section]:
            return expand_env_var(d[section][key])

        else:
            logging.warn("section/key [{section}/{key}] not found "
                         "in config".format(**locals()))

            raise AirflowConfigException(
                "section/key [{section}/{key}] not found "
                "in config".format(**locals()))
Example #24
0
    def send_new_premium_email(self, force=False):
        subs = UserSubscription.objects.filter(user=self.user)
        message = """Woohoo!
        
User: %(user)s
Feeds: %(feeds)s

Sincerely,
NewsBlur""" % {'user': self.user.username, 'feeds': subs.count()}
        mail_admins('New premium account', message, fail_silently=True)
        
        if not self.user.email or not self.send_emails:
            return
        
        sent_email, created = MSentEmail.objects.get_or_create(receiver_user_id=self.user.pk,
                                                               email_type='new_premium')
        
        if not created and not force:
            return
        
        user    = self.user
        text    = render_to_string('mail/email_new_premium.txt', locals())
        html    = render_to_string('mail/email_new_premium.xhtml', locals())
        subject = "Thanks for going premium on NewsBlur!"
        msg     = EmailMultiAlternatives(subject, text, 
                                         from_email='NewsBlur <%s>' % settings.HELLO_EMAIL,
                                         to=['%s <%s>' % (user, user.email)])
        msg.attach_alternative(html, "text/html")
        msg.send(fail_silently=True)
        
        logging.user(self.user, "~BB~FM~SBSending email for new premium: %s" % self.user.email)
Example #25
0
def helpModule(module):
    """
    Print the first text chunk for each established method in a module.

    module: module to write output from, format "folder.folder.module"
    """

    # split module.x.y into "from module.x import y" 
    t = module.split(".")
    importName = "from " + ".".join(t[:-1]) + " import " + t[-1]

    # dynamically do the import
    exec(importName)
    moduleName = t[-1]

    # extract all local functions from the imported module, 
    # referenced here by locals()[moduleName]
    functions = [locals()[moduleName].__dict__.get(a) for a in dir(locals()[moduleName]) if isinstance(locals()[moduleName].__dict__.get(a), types.FunctionType)]

    # pull all the doc strings out from said functions and print the top chunk
    for function in functions:
        base = function.func_doc
        base = base.replace("\t", " ")
        doc = "".join(base.split("\n\n")[0].strip().split("\n"))
        # print function.func_name + " : " + doc
        print helpers.formatLong(function.func_name, doc)
Example #26
0
 def __init__(self, browser, remote, capabilities, user_agent, environment,
              ignore_ssl, fresh_instance):
     data = {}
     for arg in inspect.getargspec(HolmiumConfig.__init__).args[1:]:
         setattr(self, arg, locals()[arg])
         data[arg] = locals()[arg]
     super(HolmiumConfig, self).__init__(**data)
def Get_Marshall(FeatureName, Enable = False, Instances = None, RunIntervalInSeconds = 300, Tag = "default", Format = "tsv", FilterType = "filter_changetracking", Configuration = None):
    arg_names = list(locals().keys())
    init_vars(Instances)
    
    CurrentInstances = Instances
    FeatureName = protocol.MI_String(FeatureName)
    Enable = protocol.MI_Boolean(Enable)
    for instance in CurrentInstances:
        instance['InstanceName'] = protocol.MI_String(instance['InstanceName'])
        instance['ClassName'] = protocol.MI_String(instance['ClassName'])
        if instance['Properties'] is not None and len(instance['Properties']):
            instance['Properties'] = protocol.MI_StringA(instance['Properties'])
    Instances = protocol.MI_InstanceA(CurrentInstances)
    RunIntervalInSeconds = protocol.MI_Uint64(RunIntervalInSeconds)
    Tag = protocol.MI_String(Tag)
    Format = protocol.MI_String(Format)
    FilterType = protocol.MI_String(FilterType)

    if Configuration is None:
        Configuration = []
    if Configuration is not None and len(Configuration):
        Configuration = protocol.MI_StringA(Configuration)

    retd = {}
    ld = locals()
    for k in arg_names:
        retd[k] = ld[k]
    return 0, retd
Example #28
0
def is_installed(pkg_name):
    """
    Check if a Portage package is installed.
    """
    manager = MANAGER

    with settings(hide("running", "stdout", "stderr", "warnings"),
                  warn_only=True):
        res = run("%(manager)s -p %(pkg_name)s" % locals())

    if not res.succeeded:
        return False

    if pkg_name.startswith("="):
        # The =, which is required when installing/checking for absolute
        # versions, will not appear in the results.
        pkg_name = pkg_name[1:]

    match = re.search(
            r"\n\[ebuild +(?P<code>\w+) *\] .*%(pkg_name)s.*" % locals(),
            res.stdout)
    if match and match.groupdict()["code"] in ("U", "R"):
        return True
    else:
        return False
Example #29
0
    def _import_transporter(self, transporter):
        """Imports transporter module and class, returns class.
        Input value can be:
        * a full/absolute module path, like
          "MyTransporterPackage.SomeTransporterClass"
        """
        transporter_class = None
        module = None
        alternatives = []
        default_prefix = 'cloud_sync_app.transporter.transporter_'
        if not transporter.startswith(default_prefix):
            alternatives.append('%s%s' % (default_prefix, transporter))
        for module_name in alternatives:
            try:
                module = __import__(module_name, globals(), locals(), ["TRANSPORTER_CLASS"], -1)
            except ImportError:
                import traceback
                traceback.print_exc()
                pass

        if not module:
            msg = "The transporter module '%s' could not be found." % transporter
            if len(alternatives) > 1:
                msg = '%s Tried (%s)' % (msg, ', '.join(alternatives))
            self.logger.error(msg)
        else:
            try:
                classname = module.TRANSPORTER_CLASS
                module = __import__(module_name, globals(), locals(), [classname])
                transporter_class = getattr(module, classname)
            except AttributeError:
                self.logger.error("The Transporter module '%s' was found, but its Transporter class '%s' could not be found." % (module_name, classname))
        return transporter_class
Example #30
0
def dump_database(database, path='/var/backups/postgres', filename='', format='plain', port=None):
    """
    Generate a dump database to a remote destination path
    Example::

        import fabtools

        fabtools.postgres.dump_database('myapp', path='/var/backups/postgres', filename='myapp-backup.sql')
        # If not filename specified will be saved with the date file format: database-201312010000.sql
        fabtools.postgres.dump_database('myapp', path='/var/backups/postgres') 
        # If not path specified will be saved at '/var/backups/postgres'
        fabtools.postgres.dump_database('myapp')
        # You can scpecify the pg_dump's custom format (able to restore with pg_restore)
        fabtools.postgres.dump_database('myapp', format='custom')

    """
    command_options = []
    port_option = _port_option(port)

    if port_option is not None:
        command_options.append(port_option)

    command_options = ' '.join(command_options)

    if fabtools.files.is_dir(path):
        if database_exists(database):
                date = _date.today().strftime("%Y%m%d%H%M")
                if not filename:
                    filename = '%(database)s-%(date)s.sql' % locals()
                dest = quote(posixpath.join(path, filename))
                _run_as_pg('pg_dump %(command_options)s %(database)s --format=%(format)s --blobs --file=%(dest)s' % locals())
        else:
            abort('''Database does not exist: %(database)s''' % locals())
    else:
        abort('''Destination path does not exist: %(path)s''' % locals())