コード例 #1
0
ファイル: pastevent.py プロジェクト: wearhacks/main_wearhacks
    def saveProject(self, args):
        print u'%s' % args
        try:
            obj = Project.objects.get(project_name = args['name'],url = args['url'])
            return None
        except Project.DoesNotExist:
            obj = Project(
                submitted_event = self.event,
                project_name = args['name'],
                short_description = args['desc'],
                url = args['url'],
                project_type = args['type']
            )
            img_temp = NamedTemporaryFile(delete=True)

            if args['image']:
                img_temp.write(urllib2.urlopen(args['image']).read())
                img_temp.flush()
                img_temp.seek(0)
                img_filepath = urlparse(args['image']).path.split('/')[-1]
                obj.image.save(img_filepath, File(img_temp))
                obj.save()
            else :
                obj.save()
            return obj
コード例 #2
0
 def test_match_node_project_node(self):
     """
     Tests applying a project to a simple match node query for L{QueryEvaluator}.
     """
     gs = GraphStructure()
     q = QueryEvaluator(gs)
     proj = Project()
     attrs1 = {'Label' : 'Person', 'Name' : 'Alice', 'Salary' : '500'}
     attrs2 = {'Label' : 'Person', 'Name' : 'Bob', 'Salary' : '1000'}
     attrs3 = {'Label' : 'Person', 'Name' : 'John', 'Salary' : '20000'}
     attrs4 = {'Label' : 'Person', 'Name' : 'Donnie', 'Salary' : '100000000'}
     node1 = q.add_node(attrs1)
     node2 = q.add_node(attrs2)
     node3 = q.add_node(attrs3)
     node4 = q.add_node(attrs4)
     # Test matching all nodes and then projecting to a single attribute. 
     match_lst1 = q.match_node({'Label' : 'Person'})
     # Project on salary field. 
     filtered_lst1 = proj.project(match_lst1, ['Salary'])
     self.assertEqual(filtered_lst1, [{'Salary' : '500'}, 
                                      {'Salary' : '1000'}, 
                                      {'Salary' : '20000'}, 
                                      {'Salary' : '100000000'}])
     # Test matching a single node and then projecting to a single attribute. 
     match_lst2 = q.match_node({'Name' : 'Alice'})
     filtered_lst2 = proj.project(match_lst2, ['Salary'])
     self.assertEqual(filtered_lst2, [{'Salary' : '500'}])
コード例 #3
0
    def load_project(filename):
        if not os.path.exists(filename):
            raise Exception("File '{0}' not exists".format(filename))

        tree = parse(filename);
        project_node = tree.getroot()
        if project_node.tag != "project":
            raise Exception("Project don't have 'project' root tag")

        project_name = project_node.get("name", "")
 
        files_node = project_node.find("files")

        files_nodes = []
        if files_node:
            files_nodes = files_node.findall("file")

        project = Project(filename, project_name)

        for file_el in files_nodes:
            file_path = file_el.get("path")
            if not file_path:
                raise Exception("Some file in project has unspecified 'path' attribute")
            project.add_file(file_path)

        return project
コード例 #4
0
    def unassign(self, name, project=None):
        '''
        Vnasserver unassign
        Parameters:
            name: name of the vnasserver.
            project : varray to be assigned .
        Returns:
            
        
        '''
        vnasserverList = self.getvnasserverList(name)
        
        
        request =  {'vnas_server' : vnasserverList }

        if(project):
            proj_object = Project(self.__ipAddr, self.__port)
            pro_uri = proj_object.project_query(project)
                
                      
        body = json.dumps(request)
        (s, h) = common.service_json_request(self.__ipAddr, self.__port,
                                             "PUT",
                                             VnasServer.URI_VNASSERVER_UNASSIGN.format(pro_uri),
                                             body)
        
     
        if(s is not None and len(s) > 0):
            print s
            o = common.json_decode(s)
            return o
コード例 #5
0
ファイル: views.py プロジェクト: qzio/timesheet_py
def project_edit(request, project_id):
    p = Project(request.user.id)
    project = p.project(project_id)
    c = RequestContext(request, {
        "project": project
    })
    return render_to_response('project.html', c)
コード例 #6
0
ファイル: manager.py プロジェクト: I2PC/scipion
    def createProject(self, projectName, runsView=1, 
                      hostsConf=None, protocolsConf=None, location=None):
        """Create a new project.
        confs dict can contains customs .conf files 
        for: menus, protocols, or hosts
        """
        # If location is not None create project on it (if exists)
        if location is None:
            projectPath = self.getProjectPath(projectName)
        else:
            projectPath = os.path.join(location, projectName)

        # JMRT: Right now the project.create function change the current
        # working dir (cwd) to the project location, let's store the cwd
        # and restored after the creation
        cwd = os.getcwd()
        project = Project(projectPath)
        project.create(runsView=runsView, 
                       hostsConf=hostsConf, 
                       protocolsConf=protocolsConf)
        # If location is not the default one create a symlink on self.PROJECTS directory
        if projectPath != self.getProjectPath(projectName):
            # JMRT: Let's create the link to the absolute path, since relative
            # can be broken in systems with different mount points
            pwutils.path.createAbsLink(os.path.abspath(projectPath), 
                                       self.getProjectPath(projectName))

        os.chdir(cwd)  # Retore cwd before project creation

        return project
コード例 #7
0
ファイル: HTTPServer.py プロジェクト: csantosbh/Lauen
 def do_GET(self):
     if self.path == '/lau_canvas.nmf' or self.path == '/lau_canvas.pexe' or self.path == '/game':
         # TODO this will be requested in the first time
         # that the user opens the editor. But since theres no
         # project, it will issue an error. Fix this.
         projFolder = Project.getProjectFolder()
         path = projFolder+'/build/nacl'+self.path
         f = self.send_headers(path)
         if projFolder != None and f != None:
             self.copyfile(f, self.wfile)
             f.close()
             pass
     elif self.path.startswith('/default_assets/') or self.path.startswith('/assets/') or self.path.startswith('/scenes/'):
         projFolder = Project.getProjectFolder()
         path = projFolder+self.path
         f = self.send_headers(path)
         if projFolder != None and f != None and Utils.FileExists(projFolder+self.path):
             self.copyfile(f, self.wfile)
             f.close()
             pass
     else:
         return SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)
         #self.copyfile(open('./'+self.path, 'r'), self.wfile)
         pass
     pass
コード例 #8
0
ファイル: potato.py プロジェクト: SomePlaceElse/SomePlaceElse
    def get_result(self, tid):
        self.tid = tid
        potato = Project(user_name=tid, SUPPORT=0.3)

        #potato.shoot_lazy()
        potato.shoot_eager()

        result = potato.ranked_recommendations
        sorted_list = sorted(result.items(), key= lambda (k,v) : v, reverse=True)
        
        """
            turn the dic into array of items, get its first set 
            and then its second value to get the high value in 
            the result map
        """
        highest_value =  sorted_list[0][1]
        self.top = []
        for key, value in result.items():
            if highest_value == value:
                self.top.append(key)

        data = Data([
            Bar(
                x = result.keys(),
                y = result.values()
                )
            ])
        self.unique_url = py.plot(data, filename='basic-bar', auto_open=False)
        self.html_url = self.unique_url + 'html'
        self.embed_url = self.unique_url + '.embed?width=750&height=550'
コード例 #9
0
    def list(self, project, tenant):
        '''
        This function will give us the list of consistency group uris
        separated by comma.
        prameters:
            project: Name of the project path.
        return
            returns with list of consistency group ids separated by comma. 
        '''
        if(tenant == None):
            tenant = ""
        projobj = Project(self.__ipAddr, self.__port)
        fullproj = tenant+"/"+project
        projuri = projobj.project_query(fullproj)

        
        (s, h) = common.service_json_request(self.__ipAddr, self.__port, "GET", 
                                             self.URI_CONSISTENCY_GROUPS_SEARCH.format(projuri), None)
        o = common.json_decode(s)
        if not o:
            return []
        
        congroups=[]
        resources = common.get_node_value(o, "resource")
        for resource in resources:
            congroups.append(resource["id"])
       
        return congroups
コード例 #10
0
def search_by_project(projectName, resourceSearchUri, ipAddr, port):
        # check if the URI passed has both project and name parameters
        strUri = str(resourceSearchUri)
        if(strUri.__contains__("search") and strUri.__contains__("?project=")):
            # Get the project URI
            from project import Project
            proj_obj = Project(ipAddr, port)
            project_uri = proj_obj.project_query(projectName)

            (s, h) = service_json_request(
                ipAddr, port, "GET",
                resourceSearchUri.format(project_uri), None)

            o = json_decode(s)
            if not o:
                return None

            resources = get_node_value(o, "resource")

            resource_uris = []
            for resource in resources:
                resource_uris.append(resource["id"])
            return resource_uris
        else:
            raise SOSError(SOSError.VALUE_ERR, "Search URI " + strUri +
                           " is not in the expected format, it should end" +
                           " with ?project={0}")
コード例 #11
0
ファイル: naming_test.py プロジェクト: 0-T-0/TACTIC
    def test_all(my):
        
        my.transaction = Transaction.get(create=True)
        try:
            my.create_snapshot()

            my._test_base_alias()

            my._test_file_naming()
            my._test_file_naming_base()
            my._test_dir_naming()
            # this comes after test_dir_naming so the file_object doesn't get polluted
            my._test_file_naming_manual_version()
            my._test_get_naming()
            my._test_checkin_type()
            my._test_naming_util()
        finally:
            my.transaction.rollback()
            Project.set_project('unittest')

            my.test_env.delete()
            my.sample3d_env.delete()

        # reset the unittest project type to whatever it was
        """
コード例 #12
0
ファイル: task_slice.py プロジェクト: 151706061/invesalius3
    def SetThresholdModes(self, pubsub_evt):
        (thresh_modes_names, default_thresh) = pubsub_evt.data
        self.combo_thresh.SetItems(thresh_modes_names)
        self.threshold_modes_names = thresh_modes_names
        proj = Project()
        if isinstance(default_thresh, int):
            self.combo_thresh.SetSelection(default_thresh)
            (thresh_min, thresh_max) =\
                self.threshold_modes[thresh_modes_names[default_thresh]]
        elif default_thresh in proj.threshold_modes.keys():
            index = self.threshold_modes_names.index(default_thresh)
            self.combo_thresh.SetSelection(index)
            thresh_min, thresh_max = self.threshold_modes[default_thresh]

        elif default_thresh in proj.threshold_modes.values():
            preset_name = proj.threshold_modes.get_key(default_thresh)[0]
            index = self.threshold_modes_names.index(preset_name)
            self.combo_thresh.SetSelection(index)
            thresh_min, thresh_max = default_thresh
        else:
            index = self.threshold_modes_names.index(_("Custom"))
            self.combo_thresh.SetSelection(index)
            thresh_min, thresh_max = default_thresh
            proj.threshold_modes[_("Custom")] = (thresh_min, thresh_max)

        self.gradient.SetMinValue(thresh_min)
        self.gradient.SetMaxValue(thresh_max)
コード例 #13
0
def search_by_project_and_name(projectName, componentName, searchUri,
                               ipAddr, port):

        # check if the URI passed has both project and name parameters
        strUri = str(searchUri)
        if(strUri.__contains__("search") and
           strUri.__contains__("?project=") and strUri.__contains__("&name=")):
            # Get the project URI
            from project import Project
            proj_obj = Project(ipAddr, port)
            project_uri = proj_obj.project_query(projectName)

            (s, h) = service_json_request(
                ipAddr, port, "GET",
                searchUri.format(project_uri, componentName), None)

            o = json_decode(s)
            if not o:
                return None

            resources = get_node_value(o, "resource")
            if(len(resources) > 0):
                component_uri = resources[0]['id']
                return component_uri
        else:
            raise SOSError(SOSError.VALUE_ERR, "Search URI " + strUri +
                           " is not in the expected format, it should end" +
                           " with ?project={0}&name={1}")
コード例 #14
0
    def exportgroup_list(self, project, tenant):
        '''
        This function will give us the list of export group uris
        separated by comma.
        prameters:
            project: Name of the project path.
        return
            returns with list of export group ids separated by comma. 
        '''
        if(tenant == None):
            tenant = ""
        projobj = Project(self.__ipAddr, self.__port)
        fullproj = tenant+"/"+project
        projuri = projobj.project_query(fullproj)
        
        uri = self.URI_EXPORT_GROUP_SEARCH
        
        if ('?' in uri):
            uri += '&project=' + projuri 
        else:
            uri += '?project=' + projuri

        (s, h) = common.service_json_request(self.__ipAddr, self.__port, "GET", 
                                             uri, None)
        o = common.json_decode(s)
        if not o:
            return []
        
        exportgroups=[]
        resources = common.get_node_value(o, "resource")
        for resource in resources:
            exportgroups.append(resource["id"])
       
        return exportgroups
コード例 #15
0
    def cluster_create(self, label, tenant, project, datacenter, vcenter):
        tenant_obj = Tenant(self.__ipAddr, self.__port)
        vdatacenterobj = VcenterDatacenter(self.__ipAddr, self.__port)
        projectobj = Project(self.__ipAddr, self.__port)

        if(tenant == None):
            tenant_uri = tenant_obj.tenant_getid()
        else:
            tenant_uri = tenant_obj.tenant_query(tenant)
                
        parms = { 'name'            : label
                   }
        #project
        if(project):
            if(tenant):
                projectname = tenant + "/" + project
            else:
                projectname = "" + "/" + project
            #on failure, query raise exception
            parms['project'] = projectobj.project_query(projectname)
            
        #datacenter
        if(datacenter):
            #on failure, query raise exception
            parms['vcenter_data_center'] = vdatacenterobj.vcenterdatacenter_query(datacenter, vcenter)

        body = json.dumps(parms)

        (s, h) = common.service_json_request(self.__ipAddr, self.__port,
                                             "POST",
                                             Cluster.URI_TENANTS_CLUSTERS.format(tenant_uri),
                                             body)
        o = common.json_decode(s)
コード例 #16
0
class Projects:

    def __init__(self, filename):
        self.filename = filename
        self.projects = list()
        self.set_tm_file('tm.po')

        if os.path.isfile(filename):
            os.remove(filename)
            
    def set_tm_file(self, filename):
        self.tm_file = filename
        self.tm_project = Project('Translation memory', self.tm_file)

    def add(self, project):
        self.projects.append(project)

    def add_project(self, project_dto, add_source):
        project = Project(project_dto.name, project_dto.filename)
        project.set_add_source(add_source)
        project.add_filesets(project_dto)
        self.add(project)
        logging.debug(project_dto)

    def __call__(self):
        """Proces all projects"""
        for project in self.projects:
            project.do()

        self.create_tm_for_all_projects()

    def create_tm_for_all_projects(self):
        """Creates the TM memory for all projects"""

        for project in self.projects:
            if os.path.isfile(self.tm_file):
                backup_file = 'tm-previous.po'
                shutil.copy(self.tm_file, backup_file)
                cmd = 'msgcat -tutf-8 --use-first -o {0} {1} {2}'
                os.system(cmd.format(self.tm_file,
                                     backup_file,
                                     project.get_filename()))
                os.remove(backup_file)
            else:
                shutil.copy(project.get_filename(), self.tm_file)

        os.system('msgfmt -c --statistics {0}'.format(self.tm_file))

    def statistics(self):
        for project in self.projects:
            project.statistics()

        self.tm_project.statistics()

    def to_tmx(self):
        for project in self.projects:
            project.to_tmx()

        self.tm_project.to_tmx()
コード例 #17
0
ファイル: project_data.py プロジェクト: griblik/scratch
def import_from_api(pid):
    '''Download all activity from the Tracker API for project pid'''

    print('Downloading activity for ' + str(pid))
    project = Project(pid)
    project.get_project_data(API_KEY)

    return project
コード例 #18
0
ファイル: views.py プロジェクト: qzio/timesheet_py
def projects_stop(request, project_id):
    if (not request.user.is_authenticated()):
        return redirect('/login')
    p = Project(request.user.id)
    msg = "Failed to stop tracking time"
    if (p.stop_tracking(int(project_id))):
        msg = "successfully stopped tracking time"
    return redirect("/?flash=%s" % msg)
コード例 #19
0
ファイル: views.py プロジェクト: qzio/timesheet_py
def projects_start(request, project_id):
    if (not request.user.is_authenticated()):
        return redirect('/login')
    p = Project(request.user.id)
    msg = "failed to start project"
    if (p.start_tracking(int(project_id))):
        msg = "successfully started tracking project"
    return redirect("/?flash=%s" % (msg,))
コード例 #20
0
ファイル: query.py プロジェクト: minus7/sdk
def execute(key):
    root = os.getcwd()
    proj = Project(root)
    result = proj.get_config(key)
    if result == None:
        exit(1)
    stdout.write(result)
    exit(0)
コード例 #21
0
ファイル: views.py プロジェクト: qzio/timesheet_py
def projects_delete(request, project_id):
    if (not request.user.is_authenticated()):
        return redirect('/login')
    p = Project(request.user.id)
    msg = "failed to delete"
    if p.delete(int(project_id)):
        msg ="success deleting project"

    return redirect("/?flash=%s" % msg)
コード例 #22
0
 def __open_project(self, path):
     project = Project(path)
     LastFilesList.instance().add_file(PROJECTS, project.path)
     project_widget = ProjectWidget(project, self.settings)
     project_widget.import_image.connect(lambda file: self.open_image(file, project=project))
     project_widget.calibrate.connect(lambda file: self.calibrate(file, project=project))
     project_widget.math.connect(lambda file: self.plots_math(file, project=project))
     project_widget.finish.connect(lambda file: self.finish_spectrum(file, project=project))
     self.__add_widget(project_widget, project.get_name())
コード例 #23
0
ファイル: utils.py プロジェクト: lilipeng/msmbuilder
def get_project_object(traj_directory, conf_filename, out_filename=None):
    """
    This function constructs a msmbuilder.Project object 
    given a directory of trajectories saved as .lh5's. 

    Note that this is only really necessary when a script
    like ConvertDataToLHDF.py converts the data but fails
    to write out the ProjectInfo.yaml file.

    This function can also be used to combine two projects
    by copying and renaming the trajectories in a new 
    folder. Though, it's probably more efficient to just
    do some bash stuff to cat the ProjectInfo.yaml's 
    together and rename the trajectories.
    
    Inputs:
    -------
    1) traj_directory : directory to find the trajectories
    2) conf_filename : file to find the conformation
    3) out_filename [ None ] : if None, then this function 
        does not save the project file, but if given, the
        function will save the project file and also
        return the object

    Outputs:
    -------
    project : msmbuilder.Project object corresponding to 
        your project.
    """

    traj_paths = sorted(os.listdir(traj_directory), key=keynat) # relative to the traj_directory
    traj_paths = [os.path.join(traj_directory, filename) for filename in traj_paths] # relative to current directory

    traj_lengths = []

    for traj_filename in traj_paths: # Get the length of each trajectory
        logger.info(traj_filename)

        if traj_filename.split('.')[-1] in ['hdf', 'h5', 'lh5']:
            with tables.openFile(traj_filename) as f:
                traj_lengths.append(f.root.coordinates.shape[0])

        else:
            traj_lengths.append(md.load(traj_filename).n_frames) 

    project = Project({'conf_filename': conf_filename,
                       'traj_lengths': traj_lengths,
                       'traj_paths': traj_paths,
                       'traj_errors': [None] * len(traj_paths),
                       'traj_converted_from': [ [None] ] * len(traj_paths) })

    if out_filename is None:
        return project
    else:
        project.save( out_filename )
        logger.info('Saved project file to %s', out_filename)
        return project
コード例 #24
0
ファイル: parser.py プロジェクト: cpfair/powervcwsf
    def _parse_page(self, pid, contents):
        contents = unicodedata.normalize("NFC", contents)
        logger.info(pid)
        project = Project(
            pid=pid,
            name=re.search("<hr /><b>([^<]+)</b>", contents).group(1),
            synopsis=re.search("<td valign=\"top\">Abstract:</td><td>(.+?)</td>", contents).group(1),
            year=int(re.search("CWSF (\d\d\d\d)", contents).group(1)),
            agecat=self._agecats.Find(re.search("<td>Category:</td><td>([^<]+)</td>", contents).group(1))
        )
        self._projects.append(project)

        provterrcodematch = re.search("<td>City:</td><td>([^<]+\s(\w\w))</td>", contents)
        if provterrcodematch:
            project.ProvTerr = self._provterrs.FindByCode(provterrcodematch.group(2))

        region_match = re.search("<td>Region:</td><td>([^<]+)</td>", contents)

        if region_match:
            project.Region = self._resolve_region(region_match.group(1))
        else:
            project.Region = self._resolve_region("Unknown")

        project.Region.MarkAsSeenInYear(project.Year)
        project.Region.ProvTerr = project.ProvTerr

        finalist_names = []
        finalist_names_match = re.search("<h3 style=\"margin-top: 0px;\">([^<]+)</h3>", contents)
        if finalist_names_match:
            finalist_names = finalist_names_match.group(1).split(",")

        for name in finalist_names:
            finalist = Finalist(name.strip(), project=project)
            self._finalists.append(finalist)
            finalist.Participant = self._resolve_participant(finalist)
            project.Finalists.append(finalist)

        divisions_names = re.search("<td>(Challenge|Division):</td><td>(?P<division>[^<]+)</td>", contents).group(2).split('/')
        for name in divisions_names:
            name = name.strip()
            if name == "None":
                continue
            div = self._divisions.Find(name)
            div.MarkAsSeenInYear(project.Year)
            project.Divisions.append(div)

        project.Awards = self._awardsParser.ParseAwards(contents)

        logger.debug("Project: %s (%d)" % (project.Name, project.Year))
        logger.debug("\tRegion: %s (%s)" % (project.Region, project.ProvTerr))
        logger.debug("\tFinalists: %s" % ", ".join([str(x) for x in project.Finalists]))
        logger.debug("\tDivisions: %s" % ", ".join([str(x) for x in project.Divisions]))
        logger.debug("\tSynopsis: %s..." % project.Synopsis[:80])
        logger.debug("\tAwards:")
        for award in project.Awards:
            logger.debug("\t\t%s" % str(award))
コード例 #25
0
ファイル: EditorApp.py プロジェクト: pixpil/gii
	def openProject( self, basePath = None ):
		if self.projectLoaded: return Project.get()
		info = Project.findProject( basePath )
		if not info:
			raise Exception( 'no valid gii project found' )
		proj = Project.get()
		proj.load( info['path'] )
		self.projectLoaded = True
		self.registerDataPath( proj.getEnvPath('data') )
		return proj
コード例 #26
0
ファイル: dsmanager.py プロジェクト: alvesjnr/Dist-Share
def clone_copy(dist_file,copy_name,clone_name,license=None):

    with open(dist_file) as f:
        project_dict = pickle.loads(f.read())
    
    project = Project(dumped_project=project_dict['project'])
    project = project.clone_copy(copy_name,clone_name)
    project_dict['project'] = project.dumps()

    with open(dist_file,'w') as f:
        f.write(pickle.dumps(project_dict))
コード例 #27
0
ファイル: views.py プロジェクト: qzio/timesheet_py
def projects_archive(request, project_id):
    if (not request.user.is_authenticated()):
        return redirect('/login')
    p = Project(request.user.id)

    if p.archive_tracked_times(int(project_id), request.POST["period"], request.POST.getlist("entries")):
        msg = "Archived tracked time for project %s" % project_id
        return redirect("/?flash={0}".format(msg))
    else:
        msg = "Failed to archive {0}".format(project_id)
        return redirect("/projects/{0}/history?flash={1}".format(project_id, msg))
コード例 #28
0
ファイル: main.py プロジェクト: coyf/agl
 def init_project(self, value):
     """
     Initiate the Project's object
     :return: void
     """
     self.init_root_path()
     # set parameters for Project's constructor (feel free to add more)
     kwargs = {"project_name": value, "root_path": self.root_path}
     new_project = Project(**kwargs)
     new_project.create_folder()
     AttributeContainer().current_project = new_project
コード例 #29
0
    def exportgroup_create(self, name, project, tenant, varray, exportgrouptype, export_destination=None):
        """
        This function will take export group name and project name  as input
        and it will create the Export group with given name.
        parameters:
           name : Name of the export group.
           project: Name of the project path.
           tenant: Container tenant name.
        return
            returns with status of creation.
        """
        # check for existance of export group.
        try:
            status = self.exportgroup_show(name, project, tenant)
        except SOSError as e:
            if e.err_code == SOSError.NOT_FOUND_ERR:
                if tenant is None:
                    tenant = ""

                fullproj = tenant + "/" + project
                projObject = Project(self.__ipAddr, self.__port)
                projuri = projObject.project_query(fullproj)

                varrayObject = VirtualArray(self.__ipAddr, self.__port)
                nhuri = varrayObject.varray_query(varray)

                parms = {"name": name, "project": projuri, "varray": nhuri, "type": exportgrouptype}

                if exportgrouptype and export_destination:
                    if exportgrouptype == "Cluster":
                        cluster_obj = Cluster(self.__ipAddr, self.__port)
                        try:
                            cluster_uri = cluster_obj.cluster_query(export_destination, fullproj)
                        except SOSError as e:
                            raise e
                        parms["clusters"] = [cluster_uri]
                    elif exportgrouptype == "Host":
                        host_obj = Host(self.__ipAddr, self.__port)
                        try:
                            host_uri = host_obj.query_by_name(export_destination)
                        except SOSError as e:
                            raise e
                        parms["hosts"] = [host_uri]

                body = json.dumps(parms)
                (s, h) = common.service_json_request(self.__ipAddr, self.__port, "POST", self.URI_EXPORT_GROUP, body)

                o = common.json_decode(s)
                return o
            else:
                raise e

        if status:
            raise SOSError(SOSError.ENTRY_ALREADY_EXISTS_ERR, "Export group with name " + name + " already exists")
コード例 #30
0
ファイル: allPythonContent.py プロジェクト: Mondego/pyreco
class SbtRunner(OnePerWindow):

    @classmethod
    def is_sbt_running_for(cls, window):
        return cls(window).is_sbt_running()

    def __init__(self, window):
        self._project = Project(window)
        self._proc = None

    def project_root(self):
        return self._project.project_root()

    def sbt_command(self, command):
        cmdline = self._project.sbt_command()
        if command is not None:
            cmdline.append(command)
        return cmdline

    def start_sbt(self, command, on_start, on_stop, on_stdout, on_stderr):
        if self.project_root() and not self.is_sbt_running():
            self._proc = self._try_start_sbt_proc(self.sbt_command(command),
                                                  on_start,
                                                  on_stop,
                                                  on_stdout,
                                                  on_stderr)

    def stop_sbt(self):
        if self.is_sbt_running():
            self._proc.terminate()

    def kill_sbt(self):
        if self.is_sbt_running():
            self._proc.kill()

    def is_sbt_running(self):
        return (self._proc is not None) and self._proc.is_running()

    def send_to_sbt(self, input):
        if self.is_sbt_running():
            self._proc.send(input)

    def _try_start_sbt_proc(self, cmdline, *handlers):
        try:
            return SbtProcess.start(cmdline,
                                    self.project_root(),
                                    self._project.settings,
                                    *handlers)
        except OSError:
            msg = ('Unable to find "%s".\n\n'
                   'You may need to specify the full path to your sbt command.'
                   % cmdline[0])
            sublime.error_message(msg)
コード例 #31
0
class SbtRunner(OnePerWindow):
    @classmethod
    def is_sbt_running_for(cls, window):
        return cls(window).is_sbt_running()

    def __init__(self, window):
        self._project = Project(window)
        self._proc = None
        self._history = []

    def project_root(self):
        return self._project.project_root()

    def sbt_command(self, command):
        cmdline = self._project.sbt_command()
        if command is not None:
            cmdline.append(command)
        return cmdline

    def start_sbt(self, command, on_start, on_stop, on_stdout, on_stderr):
        if self.project_root() and not self.is_sbt_running():
            self._proc = self._try_start_sbt_proc(self.sbt_command(command),
                                                  on_start, on_stop, on_stdout,
                                                  on_stderr)

    def stop_sbt(self):
        if self.is_sbt_running():
            self._proc.terminate()

    def kill_sbt(self):
        if self.is_sbt_running():
            self._proc.kill()

    def is_sbt_running(self):
        return (self._proc is not None) and self._proc.is_running()

    def send_to_sbt(self, input):
        if self.is_sbt_running():
            self.add_to_history(input)
            self._proc.send(input)

    def _try_start_sbt_proc(self, cmdline, *handlers):
        try:
            return SbtProcess.start(cmdline, self.project_root(),
                                    self._project.settings, *handlers)
        except OSError:
            msg = (
                'Unable to find "%s".\n\n'
                'You may need to specify the full path to your sbt command.' %
                cmdline[0])
            sublime.error_message(msg)

    def add_to_history(self, input):
        if input != '' and not input.isspace():
            input = input.rstrip('\n\r')
            self._history = [cmd for cmd in self._history if cmd != input]
            self._history.insert(0, input)
            history_length = self._project.settings.get('history_length') or 20
            del self._history[history_length:]

    def clear_history(self):
        self._history.clear()

    def get_history(self):
        return self._history
コード例 #32
0
ファイル: main.py プロジェクト: huxuan/PrefixTreeESpan
def prefixtreeespan():
    """
    PrefixTreeESpan algorithm (Part 1)
    Get first level frequent nodes and corresponding projection database
    """
    # Read date file, compute useful params
    growth_elems_count = {}
    tree_data = file(TREEDATA)
    for tree_string in tree_data.readlines():

        # Get label list
        tree_labels = tree_string.strip().split(' ')
        # print "Labels:", tree_labels

        # DFS to compute scope
        root = Node(tree_labels[0])
        stack = [(root, 0)]
        tree = [root]
        index = 1
        while stack:
            elem = Node(tree_labels[index])
            if elem.label == '-1':
                stack[-1][0].scope = index
                stack.pop()
            else:
                stack.append((elem, index))
            tree.append(elem)
            index += 1

        # Add tree in to global TREE_DB
        TREE_DB.append(tree)

        # print '=' * 80
        # print "F1 Elements:"
        # for elem in tree:
        #     print tree.index(elem), elem.label, elem.scope

        # Count all node as growth elements in set
        growth_elems = set([ (elem.label, 0)
            for elem in tree
                if elem.label != '-1' ])
        for growth_elem in growth_elems:
            if growth_elem not in growth_elems_count:
                growth_elems_count[growth_elem] = 0
            growth_elems_count[growth_elem] += 1

    # print '=' * 80
    # print "F1 Growth Elements Count:"
    # for growth_elem in growth_elems_count:
    # print growth_elem, growth_elems_count[growth_elem]

    # Get frequent one level nodes
    fre_elems = set([ growth_elem
        for growth_elem in growth_elems_count
            if growth_elems_count[growth_elem] >= MINSUP ])

    # print '=' * 80
    # print "F1 Frequent Elements:"
    # print fre_elems

    # Generate projection dateabase for each frequent node
    for fre_elem in fre_elems:

        # print '=' * 80
        # print "F1 Frequent Element:"
        # print fre_elem

        pre_tree = [fre_elem[0], '-1']
        output_pre_tree(pre_tree)

        pros_db = []
        for i in xrange(len(TREE_DB)):
            tree = TREE_DB[i]
            for j in xrange(len(tree)):
                if tree[j].label == fre_elem[0] and tree[j + 1].label != '-1':
                    pros = Project(i)
                    pros.add(j + 1, tree[j].scope)
                    pros_db.append(pros)

        # print '=' * 80
        # print 'F1 Projected Database:'
        # for pros in pros_db:
        #     print pros

        get_fre(pre_tree, 1, pros_db)
コード例 #33
0
    cppunit_prefix=os.path.join(sandbox, "local"),
    packageWildcard='*_setup.exe',
    downloadPlatform='win',
    extraLibOptions=
    'crossmingw=1 release=1 sandbox_path=%s audio_backend=portaudio xmlbackend=both'
    % sandbox,
    extraAppOptions=
    'crossmingw=1 release=1 sandbox_path=%s external_dll_path=%s' %
    (sandbox, sandbox + 'local/bin'),
)
repositories = localDefinitions['repositories'].split()

client = Client(localDefinitions['name'])
client.brief_description = localDefinitions['description']

clam = Task(project=Project('CLAM'),
            client=client,
            task_name='with svn update')
clam.set_check_for_new_commits(
    checking_cmd=
    'cd %(sandbox)s && svn status -u %(repositories)s | grep \'[*!]\'' %
    localDefinitions,
    minutes_idle=15)
clam.add_subtask(
    'List of new commits',
    [
        'cd %(sandbox)s/' % localDefinitions,
    ] + [
        # 'true' is needed in order testfarm not to catch the 'cd'
        {
            CMD: 'true ; cd %s; svn log -r BASE:HEAD; cd -' % repo,
コード例 #34
0
def _licenses(args):
    '''Generate licenses for this project.

Use command hdinfo to get hardware information.
    '''
    logging.info('Generate licenses for project %s ...', args.project)

    project = Project()
    project.open(args.project)

    licpath = os.path.join(args.project, 'licenses')
    if not os.path.exists(licpath):
        logging.info('Make output path of licenses: %s', licpath)
        os.mkdir(licpath)

    if args.expired is None:
        fmt = ''
    else:
        fmt = '*TIME:%.0f\n' % \
              time.mktime(time.strptime(args.expired, '%Y-%m-%d'))

    if project.get('disable_restrict_moce'):
        fmt = '%s*FLAGS:%c' % (fmt, 1)

    if args.bind_disk:
        fmt = '%s*HARDDISK:%s' % (fmt, args.bind_disk)

    if args.bind_mac:
        fmt = '%s*IFMAC:%s' % (fmt, args.bind_mac)

    if args.bind_ipv4:
        fmt = '%s*IFIPV4:%s' % (fmt, args.bind_ipv4)

    # if args.bind_ipv6:
    #     fmt = '%s*IFIPV6:%s' % (fmt, args.bind_ipv6)

    if args.bind_domain:
        fmt = '%s*DOMAIN:%s' % (fmt, args.bind_domain)

    # if args.bind_file:
    #     if os.path.exists(args.bind_file):
    #         f = open(args.bind_file, 'rb')
    #         s = f.read()
    #         f.close()
    #         if sys.version_info[0] == 3:
    #             fmt = '%s*FIXKEY:%s;%s' % (fmt, key, s.decode())
    #         else:
    #             fmt = '%s*FIXKEY:%s;%s' % (fmt, key, s)
    #     else:
    #         raise RuntimeError('Bind file %s not found' % bindfile)

    # Prefix of registration code
    fmt = fmt + '*CODE:'
    capsule = build_path(project.capsule, args.project)
    for rcode in args.codes:
        output = os.path.join(licpath, rcode)
        if not os.path.exists(output):
            logging.info('Make path: %s', output)
            os.mkdir(output)

        licfile = os.path.join(output, license_filename)
        licode = fmt + rcode
        txtinfo = licode.replace('\n', r'\n')
        if args.expired:
            txtinfo = '"Expired:%s%s"' % (args.expired,
                                          txtinfo[txtinfo.find(r'\n') + 2:])
        logging.info('Generate license: %s', txtinfo)
        make_project_license(capsule, licode, licfile)
        logging.info('Write license file: %s', licfile)

        logging.info('Write information to %s.txt', licfile)
        with open(os.path.join(licfile + '.txt'), 'w') as f:
            f.write(txtinfo)

    logging.info('Generate %d licenses OK.', len(args.codes))
コード例 #35
0
 def _reporter(self, view):
     for window in maybe(view.window()):
         return Project(window).error_reporter
コード例 #36
0
    def get_from_db_naming(self, protocol):

        project_code = Project.get_project_code()
        if project_code in ["admin", "sthpw"]:
            return None

        # get the naming object
        naming = Naming.get(self.sobject, self.snapshot)
        if not naming:
            return None

        if not self.verify_checkin_type(naming):
            return None

        if protocol == 'sandbox':
            mode = 'sandbox_dir'
        else:
            mode = 'dir'

        # Provide a mechanism for a custom class
        naming_class = naming.get_value("class_name", no_exception=True)
        #naming_class = "pyasm.biz.TestFileNaming"
        if naming_class:
            kwargs = {
                'sobject': self.sobject,
                'snapshot': self.snapshot,
                'file_object': self._file_object,
                #'ext': self.get_ext(),
                'file_type': self.file_type,
                'mode': mode
            }
            naming = Common.create_from_class_path(naming_class, [], kwargs)
            dirname = naming.get_dir()
            if dirname:
                return dirname


        # provide a mechanism for a custom client side script
        script_path = naming.get_value("script_path", no_exception=True)
        if script_path:
            project_code = self.sobject.get_project_code()
            input = {
                'sobject': self.sobject,
                'snapshot': self.snapshot,
                'file_object': self._file_object,
                #'ext': self.get_ext(),
                'file_type': self.file_type,
                'mode': mode,
                'project': project_code
            }
            from tactic.command import PythonCmd

            cmd = PythonCmd(script_path=script_path, input=input)
            results = cmd.execute()
            if results:
                return results


        naming_util = NamingUtil()

        naming_expr = ''
        if protocol == 'sandbox':
            naming_expr = naming.get_value("sandbox_dir_naming")

        if not naming_expr:
            naming_expr = naming.get_value("dir_naming")

        # so it can take the default
        if not naming_expr:
            return None

        file_type = self.get_file_type()

        alias = naming.get_value("base_dir_alias", no_exception=True)

        # build the dir name
        dir_name = naming_util.naming_to_dir(naming_expr, self.sobject, self.snapshot, file=self._file_object, file_type=file_type)

        return dir_name
コード例 #37
0
d = input(">>> codecool_bp.find_mentor_by_full_name(self, 'Brian Python'): ")
if d == "4":
    pass
else:
    codecool_bp.find_mentor_by_full_name('Brian Python')

pause()
e = input("""It's 9 in the morning. The overall energy level is {}.
The day starts with a coding dojo.

>>> coding_dojo = Project("Miki", codecool_bp.students, 0, 100): """.format(
    codecool_bp.overall_energy()))
if e == "5":
    pass
else:
    coding_dojo = Project("Miki", codecool_bp.students, 0, 100)
# print("Overall energy level decreased with {} points.".format())

pause()
print("OK, it was tiring, let's have a coffee!")
f = input("\n>>> coffee_break = Break(): ")
if f == "6":
    pass
else:
    student_energy = [
        student.change_levels("energy_level", 2)
        for student in codecool_bp.students
    ]
    print('''
Having a coffee increases the energy level of students!
New energy levels:
コード例 #38
0
def _build(args):
    '''Build project, obfuscate all scripts in the project.'''
    project = Project()
    project.open(args.project)
    logging.info('Build project %s ...', args.project)

    capsule = build_path(project.capsule, args.project)
    logging.info('Use capsule: %s', capsule)

    output = build_path(project.output, args.project) \
        if args.output is None else args.output
    logging.info('Output path is: %s', output)

    if not args.only_runtime:
        files = project.get_build_files(args.force)
        src = project.src
        soutput = os.path.join(output, os.path.basename(src)) \
            if project.get('is_package') else output

        logging.info('Save obfuscated scripts to "%s"', soutput)
        if not os.path.exists(soutput):
            os.makedirs(soutput)

        logging.info('Read public key from capsule')
        prokey = get_product_key(capsule)

        logging.info('%s increment build',
                     'Disable' if args.force else 'Enable')
        logging.info('Search scripts from %s', src)

        logging.info('Obfuscate scripts with mode:')
        if hasattr(project, 'obf_mod'):
            obf_mod = project.obf_mod
        else:
            obf_mod = project.obf_module_mode == 'des'
        if hasattr(project, 'wrap_mode'):
            wrap_mode = project.wrap_mode
            obf_code = project.obf_code
        elif project.obf_code_mode == 'wrap':
            wrap_mode = 1
            obf_code = 1
        else:
            wrap_mode = 0
            obf_code = 0 if project.obf_code_mode == 'none' else 1

        def v(t):
            return 'on' if t else 'off'

        logging.info('Obfuscating the whole module is %s', v(obf_mod))
        logging.info('Obfuscating each function is %s', v(obf_code))
        logging.info('Autowrap each code object mode is %s', v(wrap_mode))

        entry = os.path.abspath(project.entry) if project.entry else None
        protection = project.cross_protection \
            if hasattr(project, 'cross_protection') else 1
        for x in files:
            a, b = os.path.join(src, x), os.path.join(soutput, x)
            logging.info('\t%s -> %s', x, b)

            d = os.path.dirname(b)
            if not os.path.exists(d):
                os.makedirs(d)

            pcode = entry and (os.path.abspath(a) == entry) and protection
            encrypt_script(prokey,
                           a,
                           b,
                           obf_code=obf_code,
                           obf_mod=obf_mod,
                           wrap_mode=wrap_mode,
                           protection=pcode,
                           rpath=project.runtime_path)

        logging.info('%d scripts has been obfuscated', len(files))
        project['build_time'] = time.time()
        project.save(args.project)

        if project.entry:
            make_entry(project.entry,
                       project.src,
                       output,
                       rpath=project.runtime_path,
                       ispackage=project.get('is_package'))

    if not args.no_runtime:
        routput = os.path.join(output, os.path.basename(project.src)) \
            if project.get('is_package') else output
        if not os.path.exists(routput):
            logging.info('Make path: %s', routput)
            os.mkdir(routput)
        logging.info('Make runtime files to %s', routput)
        make_runtime(capsule, routput)
        if project.get('disable_restrict_mode'):
            licode = '*FLAGS:%c*CODE:PyArmor-Project' % chr(1)
            licfile = os.path.join(routput, license_filename)
            logging.info('Generate no restrict mode license file: %s', licfile)
            make_project_license(capsule, licode, licfile)

    else:
        logging.info('\tIn order to import obfuscated scripts, insert ')
        logging.info('\t2 lines in entry script:')
        logging.info('\t\tfrom pytransform import pyarmor_runtime')
        logging.info('\t\tpyarmor_runtime()')

    logging.info('Build project OK.')
コード例 #39
0
def _info(args):
    '''Show project information'''
    project = Project()
    project.open(args.project)
    logging.info('Project %s information\n%s', args.project, project.info())
コード例 #40
0
 def loadProject(self, projId, **kwargs):
     """ Retrieve a project object, given its id. """
     project = Project(self.getProjectPath(projId))
     project.load(**kwargs)
     return project
コード例 #41
0
ファイル: dump.py プロジェクト: ES-Alexander/productive-flow
import os, sys

argc = len(sys.argv)
name = '_main'
path = 'projects'
if argc > 1:
    name = sys.argv[1]
    if argc > 2:
        path = sys.argv[2]

if '/' in path and not os.path.isfile(path + '/{}.txt'.format(name)):
    # parent needs to know about new sub_project being added to
    prev_name_ind = path.rindex('/')
    prev_name = path[prev_name_ind:]
    prev_path = path[:prev_name_ind]
    prev = Project(prev_name, path=prev_path)
    proj = prev.create_sub_project(name, path=path)
else:
    # adding to main, or adding to known sub_project
    proj = Project(name, path=path)

section = '-' * 50
print('',
      '#' * 50,
      '',
      'Enter item names, with optional elaboration.',
      "To quit, enter nothing as the name of the next item.",
      'To elaborate, enter parameters and values in the form:',
      'param = value,',
      'making sure to denote values with appropriate Python syntax',
      "(e.g. my_str = 'String',).\n",
コード例 #42
0
from project import Project
from pyfmi import load_fmu
model = load_fmu(Project.get_fmu("PS_40TCL_om12.fmu"))
import logging
import time

from algo_procedures.double_dqn_2_delta_procedure import Double_DQN_2_Delta_Wrapper
from algo_procedures.dqn_2_delta_procedure import DQN_2_Delta_Wrapper
from algo_procedures.dqn_2_procedure import DQN_2_Wrapper
from pipeline.experiment import GymExperiment
from pipeline.ps_env import save_ps_output

if __name__ == '__main__':
    env_config = {
        "env_name": "PSwithFMU-v0",
        "entry_point": "pipeline:PSEnvV1",
        'p_reff': 2.3,
        'time_step': 1,
        'log_level': logging.INFO,
        'compute_reward': None,
        'p_reff_amplitude': 0,
        'p_reff_period': 200,
        'get_seed': lambda: round(time.time()),
        'path': Project.get_fmu("PS_40TCL_om12.fmu"),
        'simulation_start_time': 0
    }
    test_env_config = {
        "entry_point": "pipeline:PSEnvV1",
        'p_reff': 2.3,
        'time_step': 1,
        'log_level': logging.INFO,
コード例 #43
0
def project_without_dependencies(cartridge_cmd, tmpdir):
    project = Project(cartridge_cmd, 'empty-project', tmpdir, 'cartridge')

    remove_all_dependencies(project)
    return project
コード例 #44
0
def default_project(cartridge_cmd, module_tmpdir):
    project = Project(cartridge_cmd, 'default-project', module_tmpdir,
                      'cartridge')
    return project
コード例 #45
0
    outdir = None
    outfile = None
if grid_search:
    from studies.DT_studies import DT_grid_search
    best_params = DT_grid_search(data,
                                 data_opts=opts,
                                 k=10,
                                 n_repeats=10,
                                 filename=outfile,
                                 seed=split_seed)
else:
    best_params = {"max_depth": None, "ccp_alpha": 0.0}

tree = Project(data,
               "tree",
               best_params,
               opts,
               split_seed=split_seed,
               verbosity=verbosity)
tree.evaluate_model_accuracy(outdir=outdir)
if verbosity > 0:
    print('\n')

learners.append(tree)

### === Perceptron
###############################################################################
if output:
    outdir = os.path.join(outbase, "Perceptron", "P_" + case)
    outfile = os.path.join(outdir, "_".join(["P", "gridsearch", opt_ID
                                             ])) + ".pdf"
else:
コード例 #46
0
def _licenses(args):
    '''Generate licenses for obfuscated scripts.

Examples,

* Expired license for global capsule

    pyarmor licenses --expired=2018-05-12 Customer-Jordan

* Bind license to fixed harddisk and expired someday for project

    cd projects/myproject
    ./pyarmor licenses -e 2018-05-12 \\
              --bind-disk '100304PBN2081SF3NJ5T' Customer-Tom
    '''
    if os.path.exists(os.path.join(args.project, config_filename)):
        logging.info('Generate licenses for project %s ...', args.project)
        project = Project()
        project.open(args.project)
        capsule = build_path(project.capsule, args.project) \
            if args.capsule is None else args.capsule
    else:
        if args.project != '':
            logging.warning('Ignore option --project, no project in %s',
                            args.project)
        capsule = DEFAULT_CAPSULE if args.capsule is None else args.capsule
        if not (os.path.exists(capsule) and check_capsule(capsule)):
            logging.info('Generate capsule %s', capsule)
            make_capsule(capsule)
        logging.info('Generate licenses with capsule %s ...', capsule)
        project = {
            'disable_restrict_mode': 0 if args.restrict else 1,
        }

    licpath = os.path.join(
        args.project if args.output is None else args.output, 'licenses')
    if os.path.exists(licpath):
        logging.info('Output path of licenses: %s', licpath)
    else:
        logging.info('Make output path of licenses: %s', licpath)
        os.mkdir(licpath)

    if args.expired is None:
        fmt = ''
    else:
        fmt = '*TIME:%.0f\n' % \
              time.mktime(time.strptime(args.expired, '%Y-%m-%d'))

    if project.get('disable_restrict_mode'):
        logging.info('The license files generated is in disable restrict mode')
        fmt = '%s*FLAGS:%c' % (fmt, 1)
    else:
        logging.info('The license files generated is in restrict mode')

    if args.bind_disk:
        fmt = '%s*HARDDISK:%s' % (fmt, args.bind_disk)

    if args.bind_mac:
        fmt = '%s*IFMAC:%s' % (fmt, args.bind_mac)

    if args.bind_ipv4:
        fmt = '%s*IFIPV4:%s' % (fmt, args.bind_ipv4)

    # if args.bind_ipv6:
    #     fmt = '%s*IFIPV6:%s' % (fmt, args.bind_ipv6)

    if args.bind_domain:
        fmt = '%s*DOMAIN:%s' % (fmt, args.bind_domain)

    if args.bind_file:
        bind_file, bind_key = args.bind_file.split(';', 2)
        if os.path.exists(bind_file):
            f = open(bind_file, 'rb')
            s = f.read()
            f.close()
            if sys.version_info[0] == 3:
                fmt = '%s*FIXKEY:%s;%s' % (fmt, bind_key, s.decode())
            else:
                fmt = '%s*FIXKEY:%s;%s' % (fmt, bind_key, s)
        else:
            raise RuntimeError('Bind file %s not found' % bind_file)

    # Prefix of registration code
    fmt = fmt + '*CODE:'

    for rcode in args.codes:
        output = os.path.join(licpath, rcode)
        if not os.path.exists(output):
            logging.info('Make path: %s', output)
            os.mkdir(output)

        licfile = os.path.join(output, license_filename)
        licode = fmt + rcode
        txtinfo = licode.replace('\n', r'\n')
        if args.expired:
            txtinfo = '"Expired:%s%s"' % (args.expired,
                                          txtinfo[txtinfo.find(r'\n') + 2:])
        logging.info('Generate license: %s', txtinfo)
        make_project_license(capsule, licode, licfile)
        logging.info('Write license file: %s', licfile)

        logging.info('Write information to %s.txt', licfile)
        with open(os.path.join(licfile + '.txt'), 'w') as f:
            f.write(txtinfo)

    logging.info('Generate %d licenses OK.', len(args.codes))
コード例 #47
0
ファイル: manifest_xml.py プロジェクト: wangsy93/repo-push
    def _ParseProject(self, node, parent=None, **extra_proj_attrs):
        """
    reads a <project> element from the manifest file
    """
        name = self._reqatt(node, 'name')
        if parent:
            name = self._JoinName(parent.name, name)

        remote = self._get_remote(node)
        if remote is None:
            remote = self._default.remote
        if remote is None:
            raise ManifestParseError("no remote for project %s within %s" %
                                     (name, self.manifestFile))

        revisionExpr = node.getAttribute('revision') or remote.revision
        if not revisionExpr:
            revisionExpr = self._default.revisionExpr
        if not revisionExpr:
            raise ManifestParseError("no revision for project %s within %s" %
                                     (name, self.manifestFile))

        path = node.getAttribute('path')
        if not path:
            path = name
        if path.startswith('/'):
            raise ManifestParseError(
                "project %s path cannot be absolute in %s" %
                (name, self.manifestFile))

        rebase = node.getAttribute('rebase')
        if not rebase:
            rebase = True
        else:
            rebase = rebase.lower() in ("yes", "true", "1")

        sync_c = node.getAttribute('sync-c')
        if not sync_c:
            sync_c = False
        else:
            sync_c = sync_c.lower() in ("yes", "true", "1")

        sync_s = node.getAttribute('sync-s')
        if not sync_s:
            sync_s = self._default.sync_s
        else:
            sync_s = sync_s.lower() in ("yes", "true", "1")

        clone_depth = node.getAttribute('clone-depth')
        if clone_depth:
            try:
                clone_depth = int(clone_depth)
                if clone_depth <= 0:
                    raise ValueError()
            except ValueError:
                raise ManifestParseError('invalid clone-depth %s in %s' %
                                         (clone_depth, self.manifestFile))

        dest_branch = node.getAttribute(
            'dest-branch') or self._default.destBranchExpr

        upstream = node.getAttribute('upstream')

        groups = ''
        if node.hasAttribute('groups'):
            groups = node.getAttribute('groups')
        groups = self._ParseGroups(groups)

        if parent is None:
            relpath, worktree, gitdir, objdir = self.GetProjectPaths(
                name, path)
        else:
            relpath, worktree, gitdir, objdir = \
                self.GetSubprojectPaths(parent, name, path)

        default_groups = ['all', 'name:%s' % name, 'path:%s' % relpath]
        groups.extend(set(default_groups).difference(groups))

        if self.IsMirror and node.hasAttribute('force-path'):
            if node.getAttribute('force-path').lower() in ("yes", "true", "1"):
                gitdir = os.path.join(self.topdir, '%s.git' % path)

        project = Project(manifest=self,
                          name=name,
                          remote=remote.ToRemoteSpec(name),
                          gitdir=gitdir,
                          objdir=objdir,
                          worktree=worktree,
                          relpath=relpath,
                          revisionExpr=revisionExpr,
                          revisionId=None,
                          rebase=rebase,
                          groups=groups,
                          sync_c=sync_c,
                          sync_s=sync_s,
                          clone_depth=clone_depth,
                          upstream=upstream,
                          parent=parent,
                          dest_branch=dest_branch,
                          **extra_proj_attrs)

        for n in node.childNodes:
            if n.nodeName == 'copyfile':
                self._ParseCopyFile(project, n)
            if n.nodeName == 'linkfile':
                self._ParseLinkFile(project, n)
            if n.nodeName == 'annotation':
                self._ParseAnnotation(project, n)
            if n.nodeName == 'project':
                project.subprojects.append(
                    self._ParseProject(n, parent=project))

        return project
コード例 #48
0
def _obfuscate(args):
    '''Obfuscate scripts without project'''
    if args.src is None and args.entry is None and not args.scripts:
        raise RuntimeError('No entry script')

    entry = args.entry or (args.scripts and args.scripts[0])
    path = os.path.abspath(
        os.path.dirname(entry) if args.src is None else args.src)
    logging.info('Obfuscate scripts in path "%s"', path)

    capsule = args.capsule if args.capsule else DEFAULT_CAPSULE
    if os.path.exists(capsule) and check_capsule(capsule):
        logging.info('Use cached capsule %s', capsule)
    else:
        logging.info('Generate capsule %s', capsule)
        make_capsule(capsule)

    output = args.output
    if os.path.abspath(output) == path:
        raise RuntimeError('Output path can not be same as src')
    if args.recursive:
        pats = ['global-include *.py', 'prune build', 'prune dist']
        if os.path.abspath(output).startswith(path):
            x = os.path.abspath(output)[len(path):].strip('/\\')
            pats.append('prune %s' % x)
        if hasattr('', 'decode'):
            pats = [p.decode() for p in pats]
        files = Project.build_manifest(pats, path)
    else:
        files = Project.build_globfiles(['*.py'], path)

    logging.info('Save obfuscated scripts to "%s"', output)
    if not os.path.exists(output):
        os.makedirs(output)

    logging.info('Read public key from capsule')
    prokey = get_product_key(capsule)

    logging.info('Obfuscate scripts with default mode')
    for x in files:
        a, b = os.path.join(path, x), os.path.join(output, x)
        logging.info('\t%s -> %s', x, b)
        protection = args.cross_protection and entry \
            and (os.path.abspath(a) == os.path.abspath(entry))

        d = os.path.dirname(b)
        if not os.path.exists(d):
            os.makedirs(d)

        encrypt_script(prokey, a, b, protection=protection)
    logging.info('%d scripts have been obfuscated', len(files))

    make_runtime(capsule, output)

    if entry and entry.endswith('__init__.py') and args.restrict is None:
        logging.info('Disable restrict mode for package by default')
        restrict = 0
    else:
        restrict = 1 if args.restrict is None else args.restrict
        logging.info('Obfuscate scripts with restrict mode %s',
                     'on' if restrict else 'off')
    if not restrict:
        licode = '*FLAGS:%c*CODE:PyArmor-Project' % chr(1)
        licfile = os.path.join(output, license_filename)
        logging.info('Generate no restrict mode license file: %s', licfile)
        make_project_license(capsule, licode, licfile)

    if entry:
        make_entry(os.path.basename(entry), path, output)
    for script in args.scripts[1:]:
        make_entry(os.path.basename(script), path, output)

    logging.info('Obfuscate %d scripts OK.', len(files))
コード例 #49
0
class SbtWindowCommand(sublime_plugin.WindowCommand):
    def __init__(self, *args):
        super(SbtWindowCommand, self).__init__(*args)
        self._project = Project(self.window)
        self._runner = SbtRunner(self.window)
        self._sbt_view = SbtView(self.window)
        self._error_view = ErrorView(self.window)
        self._error_reporter = self._project.error_reporter
        self._error_report = self._project.error_report
        self._monitor_compile_output = BuildOutputMonitor(self._project)

    def is_sbt_project(self):
        return self._project.is_sbt_project()

    def is_play_project(self):
        return self._project.is_play_project()

    def is_sbt_running(self):
        return self._runner.is_sbt_running()

    def start_sbt(self, command=None):
        self._runner.start_sbt(command,
                               on_start=self._sbt_view.start,
                               on_stop=self._sbt_view.finish,
                               on_stdout=self._on_stdout,
                               on_stderr=self._on_stderr)

    def stop_sbt(self):
        self._runner.stop_sbt()

    def kill_sbt(self):
        self._runner.kill_sbt()

    def show_sbt(self):
        self._sbt_view.show()

    def hide_sbt(self):
        self._sbt_view.hide()

    def focus_sbt(self):
        self._sbt_view.focus()

    def take_input(self):
        return self._sbt_view.take_input()

    def send_to_sbt(self, cmd):
        self._runner.send_to_sbt(cmd)

    @delayed(0)
    def show_error(self, error):
        self._error_report.focus_error(error)
        self._error_reporter.show_errors()
        self._error_view.show_error(error)
        self.goto_error(error)

    @delayed(0)
    def goto_error(self, error):
        self.window.open_file(error.encoded_position(),
                              sublime.ENCODED_POSITION)

    def show_error_output(self):
        self._error_view.show()

    def setting(self, name):
        return self._project.setting(name)

    def _on_stdout(self, output):
        self._monitor_compile_output(output)
        self._show_output(output)

    def _on_stderr(self, output):
        self._show_output(output)

    @delayed(0)
    def _show_output(self, output):
        output = self._work_around_JLine_bug(output)
        self._sbt_view.show_output(output)

    # If we have a single character, space, CR then we are probably seeing a
    # JLine bug which has inserted the space, CR at column 80 of a prompt
    # line. Delete the space and CR so that it doesn't mess up the display
    # of the prompt line (ie. hide the stuff before the CR). Just remove
    # the space, CR pair.
    def _work_around_JLine_bug(self, output):
        return re.sub(r'^(.) \r$', r'\1', output)
コード例 #50
0
 def _projects_default(self):
     return [Project(main=self.main)]
コード例 #51
0
    def _ParseProject(self, node, parent=None):
        """
    reads a <project> element from the manifest file
    """
        name = self._reqatt(node, 'name')
        if parent:
            name = self._JoinName(parent.name, name)

        remote = self._get_remote(node)
        if remote is None:
            remote = self._default.remote
        if remote is None:
            raise ManifestParseError, \
                  "no remote for project %s within %s" % \
                  (name, self.manifestFile)

        revisionExpr = node.getAttribute('revision')
        if not revisionExpr:
            revisionExpr = self._default.revisionExpr
        if not revisionExpr:
            raise ManifestParseError, \
                  "no revision for project %s within %s" % \
                  (name, self.manifestFile)

        path = node.getAttribute('path')
        if not path:
            path = name
        if path.startswith('/'):
            raise ManifestParseError, \
                  "project %s path cannot be absolute in %s" % \
                  (name, self.manifestFile)

        rebase = node.getAttribute('rebase')
        if not rebase:
            rebase = True
        else:
            rebase = rebase.lower() in ("yes", "true", "1")

        sync_c = node.getAttribute('sync-c')
        if not sync_c:
            sync_c = False
        else:
            sync_c = sync_c.lower() in ("yes", "true", "1")

        sync_s = node.getAttribute('sync-s')
        if not sync_s:
            sync_s = self._default.sync_s
        else:
            sync_s = sync_s.lower() in ("yes", "true", "1")

        upstream = node.getAttribute('upstream')

        groups = ''
        if node.hasAttribute('groups'):
            groups = node.getAttribute('groups')
        groups = [x for x in re.split(r'[,\s]+', groups) if x]

        if parent is None:
            relpath, worktree, gitdir = self.GetProjectPaths(name, path)
        else:
            relpath, worktree, gitdir = self.GetSubprojectPaths(parent, path)

        default_groups = ['all', 'name:%s' % name, 'path:%s' % relpath]
        groups.extend(set(default_groups).difference(groups))

        project = Project(manifest=self,
                          name=name,
                          remote=remote.ToRemoteSpec(name),
                          gitdir=gitdir,
                          worktree=worktree,
                          relpath=relpath,
                          revisionExpr=revisionExpr,
                          revisionId=None,
                          rebase=rebase,
                          groups=groups,
                          sync_c=sync_c,
                          sync_s=sync_s,
                          upstream=upstream,
                          parent=parent)

        for n in node.childNodes:
            if n.nodeName == 'copyfile':
                self._ParseCopyFile(project, n)
            if n.nodeName == 'annotation':
                self._ParseAnnotation(project, n)
            if n.nodeName == 'project':
                project.subprojects.append(
                    self._ParseProject(n, parent=project))

        return project
コード例 #52
0
    def load_pdef(self, pdef):

        tables = []
        try:
            tables = pdef['dbinfo']['tables']
        except:
            pass

        # print pdef
        # 加载项目定义信息
        if not pdef['project'].has_key('group'):
            pdef['project']['group'] = "qdingweb-v3"

        if not pdef['dbinfo'].has_key('type'):
            pdef['dbinfo']['type'] = "mysql"

        self.project = Project(
            pdef['project']['folder_name'], pdef['project']['name'],
            pdef['project']['desc'], pdef['dbinfo']['type'],
            pdef['dbinfo']['host'], pdef['dbinfo']['port'],
            pdef['dbinfo']['name'], pdef['dbinfo']['username'],
            pdef['dbinfo']['password'], tables, pdef['project']['packageName'],
            self.group_def[pdef['project']['group']])

        db_url = 'mysql://%s:%s@%s:%s/%s'
        schema_sql = """
            select TABLE_SCHEMA,TABLE_NAME,COLUMN_NAME,COLUMN_COMMENT 
            from information_schema.`COLUMNS`  where 
            TABLE_SCHEMA = '%s' 
            """ % self.project.db_name
        if self.project.db_type == 'oracle':
            db_url = 'oracle://%s:%s@%s:%s/%s'
            schema_sql = """
            select a.OWNER,a.TABLE_NAME,a.COLUMN_NAME,b.COMMENTS 
            from all_tab_cols a inner join user_col_comments b on a.COLUMN_NAME = b.COLUMN_NAME
            where a.owner = '%s'
            """ % self.project.db_username

        engine = create_engine(
            db_url %
            (self.project.db_username, self.project.db_password,
             self.project.db_host, self.project.db_port, self.project.db_name),
            encoding='utf8')

        #engine = create_engine('oracle://*****:*****@10.37.223.134:1521/orcl')

        insp = reflection.Inspector.from_engine(engine)
        # print insp.get_table_names()

        self.dbconn = engine.connect()

        self.field_dict = {}
        try:

            results = self.dbconn.execute(schema_sql).fetchall()

            print results
            for r in results:
                # print r[3].encode('utf-8')
                if r[3]:
                    self.field_dict[(r[1], r[2])] = r[3].encode('utf-8')
        except Exception, e:
            print e
            print schema_sql
コード例 #53
0
def _build(args):
    '''Build project, obfuscate all files in the project'''
    project = Project()
    project.open(args.project)
    logging.info('Build project %s ...', args.project)
    capsule = build_path(project.capsule, args.project)

    if not args.only_runtime:
        output = project.output
        mode = project.get_obfuscate_mode()
        files = project.get_build_files(args.force)
        src = project.src
        filepairs = [(os.path.join(src, x), os.path.join(output, x))
                     for x in files]

        logging.info('%s increment build',
                     'Disable' if args.force else 'Enable')
        logging.info('Search scripts from %s', src)
        logging.info('Obfuscate %d scripts with mode %s', len(files), mode)
        for x in files:
            logging.info('\t%s', x)
        logging.info('Save obfuscated scripts to %s', output)

        obfuscate_scripts(filepairs, mode, capsule, output)

        # for x in targets:
        #     output = os.path.join(project.output, x)
        #     pairs = [(os.path.join(src, x), os.path.join(output, x))
        #              for x in files]
        #     for src, dst in pairs:
        #         try:
        #             shutil.copy2(src, dst)
        #         except Exception:
        #             os.makedirs(os.path.dirname(dst))
        #             shutil.copy2(src, dst)
        project['build_time'] = time.time()
        project.save(args.project)

    if not args.no_runtime:
        if project.runtime_path is None:
            routput = output
            logging.info('Make runtime files to %s', routput)
            make_runtime(capsule, routput)
        else:
            routput = os.path.join(args.project, 'runtimes')
            if not os.path.exits(routput):
                logging.info('Make path: %s', routput)
                os.mkdir(routput)
            logging.info('Make runtime files to %s', routput)
            make_runtime(capsule, routput)
        if project.get('disable_restrict_mode'):
            licode = '*FLAGS:%c*CODE:Pyarmor-Project' % chr(1)
            licfile = os.path.join(routput, license_filename)
            logging.info('Generate no restrict mode license file: %s', licfile)
            make_project_license(capsule, licode, licfile)

    if project.entry:
        for x in project.entry.split(','):
            filename = os.path.join(output, x.strip())
            if not os.path.exists(filename):
                shutil.copy(os.path.join(project.src, x.strip()), filename)
            logging.info('Insert bootstrap code to entry script %s', filename)
            make_entry(filename, project.runtime_path)
    else:
        logging.info('\tIn order to import obfuscated scripts, insert ')
        logging.info('\t2 lines in entry script:')
        logging.info('\t\tfrom pytransfrom import pyarmor_runtime')
        logging.info('\t\tpyarmor_runtime()')

    logging.info('Build project OK.')
コード例 #54
0
def main(config):

    project = Project()

    if (project.inputs_dir / "df_folds.csv").is_file():
        pass
    else:
        print("splitting dataset..")
        data.split_folds(project.inputs_dir)

    fold = config["val_fold"]
    logging.info(f"val fold = {fold}")

    df_folds = pd.read_csv(project.inputs_dir / "df_folds.csv")
    marking = pd.read_csv(project.inputs_dir / "marking.csv")

    df_train = df_folds[df_folds["fold"] != fold]
    df_valid = df_folds[df_folds["fold"] == fold]

    train_dataset = WheatDataset(
        image_ids=df_train["image_id"].values,
        dataframe=marking,
        path=project.inputs_dir / "train",
        transforms=get_train_transforms(),
    )

    valid_dataset = WheatDataset(
        image_ids=df_valid["image_id"].values,
        dataframe=marking,
        path=project.inputs_dir / "train",
        transforms=get_valid_transforms(),
    )

    train_data_loader = DataLoader(
        train_dataset,
        batch_size=config["batch_size"],
        shuffle=False,
        num_workers=4,
        collate_fn=collate_fn,
    )

    valid_data_loader = DataLoader(
        valid_dataset,
        batch_size=config["batch_size"],
        shuffle=False,
        num_workers=4,
        collate_fn=collate_fn,
    )

    model = model_detr.DETRModel(
        num_classes=config["num_classes"], num_queries=config["num_queries"]
    )

    model = model.to(device)
    matcher = HungarianMatcher()
    weight_dict = weight_dict = {"loss_ce": 1, "loss_bbox": 1, "loss_giou": 1}
    losses = ["labels", "boxes", "cardinality"]
    criterion = SetCriterion(
        config["num_classes"] - 1,
        matcher,
        weight_dict,
        eos_coef=config["null_class_coef"],
        losses=losses,
    )
    criterion = criterion.to(device)

    optimizer = torch.optim.AdamW(model.parameters(), lr=config["lr"])

    best_loss = float("inf")
    for epoch in range(config["num_epochs"]):
        train_loss = train_fn(
            train_data_loader, model, criterion, optimizer, config=config, epoch=epoch
        )
        valid_loss = eval_fn(valid_data_loader, model, criterion)

        print(
            f"|EPOCH {epoch+1}| TRAIN_LOSS {train_loss.avg}| VALID_LOSS {valid_loss.avg}|"
        )

        logging.info(
            f"|EPOCH {epoch+1}| TRAIN_LOSS {train_loss.avg}| VALID_LOSS {valid_loss.avg}|"
        )

        if valid_loss.avg < best_loss:
            best_loss = valid_loss.avg
            print(f"New best model in epoch {epoch+1}")
            torch.save(model.state_dict(), project.checkpoint_dir / f"detr_best_{fold}.pth")
コード例 #55
0
ファイル: main.py プロジェクト: huxuan/PrefixTreeESpan
def get_fre(pre_tree, n, pros_db):
    """
    PrefixTreeESpan Algorithm (Part 2)
    Get n+1 order frequent subtree according to
    prefix tree and projection database
    """

    # Count growth elements
    growth_elems_count = {}
    for pros in pros_db:
        tree = TREE_DB[pros.tid]
        # Traversal each project database
        for i in xrange(len(pros.scope_list)):
            for j in xrange(pros.offset_list[i], pros.scope_list[i]):
                if tree[j].label != '-1':
                    growth_elem = (tree[j].label, i + 1)
                    # Treat the count as set of tree id
                    if growth_elem not in growth_elems_count:
                        growth_elems_count[growth_elem] = set([])
                    growth_elems_count[growth_elem].add(pros.tid)

    # print '=' * 80
    # print "F%d Growth Elements Counts:" % (n + 1, )
    # for growth_elem in growth_elems_count:
    #     print growth_elem, len(growth_elems_count[growth_elem])

    # Get frequent growth elements via comparison with MINSUP
    fre_elems = set([ growth_elem
        for growth_elem in growth_elems_count
            if len(growth_elems_count[growth_elem]) >= MINSUP ])

    # print '=' * 80
    # print "F%d Frequent Elements:" % (n + 1, )
    # print fre_elems

    # Get projection database for each frequent growth elements
    for fre_elem in fre_elems:

        # print '=' * 80
        # print "F%d Frequent Element:" % (n + 1, )
        # print fre_elem

        # Generate new prefix tree
        pre_tree_new = pre_tree[:]
        pre_tree_new.insert(- fre_elem[1], fre_elem[0])
        pre_tree_new.insert(- fre_elem[1], '-1')
        # Output the result
        output_pre_tree(pre_tree_new)

        # Generate new projection database
        pros_db_new = []
        for i in xrange(len(pros_db)):
            pros = pros_db[i]
            tree = TREE_DB[pros.tid]
            for j in xrange(len(pros.offset_list)):
                for k in xrange(pros.offset_list[j], pros.scope_list[j]):
                    if tree[k].label == fre_elem[0]:
                        pros_new = Project(pros.tid)
                        if tree[k + 1].label != '-1':
                            pros_new.add(k + 1, tree[k].scope)
                        l = tree[k].scope + 1
                        while tree[l].label != "-1" and l < pros.scope_list[j]:
                            pros_new.add(l, tree[l].scope)
                            l = tree[l].scope + 1
                        pros_db_new.append(pros_new)

        # print '=' * 80
        # print 'F%d Projected Database:' % (n + 1, )
        # for pros in pros_db_new:
        #     print pros

        # Get next level frequent subtree
        get_fre(pre_tree_new, n + 1, pros_db_new)

    return 0
コード例 #56
0
ファイル: publish.py プロジェクト: michal-tomlein/gitx
def publish_cmd(args):
    label = None if args.prerelease == False else "pre"

    project = Project(os.getcwd(), "release", label)

    print "Preparing release {}".format(project.release_tag_name())
    if not args.force:
        helpers.assert_clean()
        helpers.assert_branch(project.release_branch())
        helpers.set_version(project.build_version(), project.label())

    print("Building: {}".format(project.build_product()))
    build.build(project)

    print("Signing product with identity \"{}\"".format(
        project.codesign_identity()))
    sign.sign_everything_in_app(project.build_product(),
                                project.codesign_identity())

    print("Packaging {} to {} as {}".format(project.build_product(),
                                            project.image_path(),
                                            project.image_name()))
    package(project.build_product(), project.image_path(),
            project.image_name())

    print("Generating release notes...")
    generate_release_notes(project)

    print("Committing and tagging \"{}\"".format(project.release_tag_name()))
    if not args.force:  # We didn't set_version, so nothing should have changed
        commit_release(project)
    tag_release(project.release_tag_name(), args.force)

    publish_release(project, args.prerelease, args.draft, args.dry_run)
コード例 #57
0
 def __init__(self, name):
     self.name = name
     self.projects = [Project('ICMP', 'ICMP Protocol', Tree()), Project('HTTP', 'HTTP Proto', Tree())]
     self.selected_project = 0
コード例 #58
0
        self.setCentralWidget(widget)

    def keyPressEvent(self, ev):
        self.ws_controller.keyPressEvent(ev)


if __name__ == '__main__':
    app = QtWidgets.QApplication([])
    pg.setConfigOption('background', 'w')

    app.setStyleSheet(open('style/darkorange.stylesheet').read())

    pg.setConfigOption('background', 0.1)
    pg.setConfigOption('foreground', 'w')

    project = Project()
    project_bar = ProjectBar(project)
    ws_view = WorkspaceView()
    ws_controller = WorkspaceController(project, ws_view)

    post_processor = PostProcessor()
    controller = KlipperController(project, post_processor)
    controller_ui = KlipperControllerUI(controller)

    main_window = MainWindow(ws_view, ws_controller, project_bar,
                             controller_ui)
    main_window.show()
    controller_ui.console.user_input_w.setFocus()

    app.exec_()
コード例 #59
0
 def __init__(self, window):
     self._project = Project(window)
     self._proc = None
     self._history = []
コード例 #60
0
#!/usr/bin/env python3
from project import Project

p = Project()
p.parse()

print("Send commands")

while True:
    command = input("$ ")
    splitted = command.split(' ')
    if splitted[0] == "show" and splitted[1] == "data":
        print(p.generate_json())
    if splitted[0] == "update":
        print(p.update_expression((int(splitted[1]), int(splitted[2])), splitted[3], " ".join(splitted[4:])))
    if splitted[0] == "clear":
        print(p.clear_cell((int(splitted[1]), int(splitted[2]))))