Exemplo n.º 1
0
def monitorSubreddits():
    """Listens for new entries (submissions and comments) in the monitored subreddits.
    The list of subredits to monitor is specified in configuration.json."""

    # Gets the configuration to use. These are settings regarding how to connect to
    # the DB and which subreddits to listen to.
    config = configuration.getConfiguration()

    print "Monitoring the following subreddits for new submissions/comments: " + str(
        config["subreddits"])

    # Object used to access the Reddit API.
    reddit = RedditWrapper(config["reddit"], config["subreddits"])
    db = DBWrapper(config["database"])

    # Create a new thread which will listen for new comments.
    threadPost = threading.Thread(target=monitorForNewEntriesAndSaveInDB,
                                  args=(reddit.getCommentsStream(), db,
                                        stop_event))

    # From this point on, change the handler used when the user send SIGINT.
    signal.signal(signal.SIGINT, exitGracefully)

    # Do work in the other thread. Listen for new comments.
    threadPost.start()

    # Do work in main thread. Listen for new submissions
    monitorForNewEntriesAndSaveInDB(reddit.getSubmissionsStream(), db,
                                    stop_event)

    threadPost.join()

    print "Bye!"
Exemplo n.º 2
0
def start(program):
    log = logging.getLogger(__name__)
    parser = argparse.ArgumentParser(parents=opentuner.argparsers())
    parser.add_argument('--program', help='Name of the StreamJit application')

    argv = ['--program', program, '--test-limit', '6000']
    args = parser.parse_args(argv)

    if not args.database:
        args.database = 'sqlite:///' + program + '.db'

    try:
        conn = sqlite3.connect('streamjit.db')
        c = conn.cursor()
        query = 'SELECT configuration FROM apps WHERE name="%s"' % program
        c.execute(query)
        row = c.fetchone()
        if not row:
            data = raw_input(
                "No entry found with name = %s \nPlease press anykey to exit" %
                program)
            sys.exit(1)
        cfgString = row[0]
        cfg = configuration.getConfiguration(cfgString)
        cfgparams = cfg.getAllParameters()
    except Exception, e:
        print 'Exception occured'
        traceback.print_exc()
        data = raw_input("Press Keyboard to exit...")
Exemplo n.º 3
0
def start(program):
	log = logging.getLogger(__name__)
	parser = argparse.ArgumentParser(parents=opentuner.argparsers())
	parser.add_argument('--program', help='Name of the StreamJit application')

	argv = ['--program', program,  '--test-limit', '6000']
	args = parser.parse_args(argv)

	if not args.database:
    		args.database = 'sqlite:///' + program + '.db'

	try: 
		conn = sqlite3.connect('streamjit.db')
		c = conn.cursor()
		query = 'SELECT configuration FROM apps WHERE name="%s"'%program
		c.execute(query)
		row = c.fetchone()
		if not row:
			data = raw_input ( "No entry found with name = %s \nPlease press anykey to exit"%program )
			sys.exit(1)
		cfgString = row[0]
		cfg = configuration.getConfiguration(cfgString)
		cfgparams = cfg.getAllParameters()
	except Exception, e:
		print 'Exception occured'
		traceback.print_exc()
		data = raw_input ( "Press Keyboard to exit..." )
Exemplo n.º 4
0
 def get_device(self):
     # devices from configuration has the form [devicename, host, tcp_port, username, password]
     devices = configuration.getConfiguration()
     if len(devices)>0 and appuifw.query(u"Choose a previously used device?", "query"):
         # define the list of items
         L = []
         for device in devices:
             name = device[0]
             host = device[1]
             L.append(unicode(host if name == '' else "%s - %s" % (name, host)))
         # create the selection list
         index = appuifw.popup_menu(L, u"Previously connected")
         if index is None:
             return -1
         return devices[index]
     else:
         # define the field list (consists of tuples: (label, type ,value)); label is a unicode string
         # type is one of the following strings: 'text', 'number', 'date', 'time',or 'combo'
         # see <http://gist.github.com/322309#file_form.py>
         data = [(u'Hostname/IP','text', u''),(u'TCP Port','number', 1234),(u'Username','text',u'admin')]
         # set the view/edit mode of the form  
         flags = appuifw.FFormEditModeOnly
         # creates the form
         f = appuifw.Form(data, flags)
         f.save_hook = self.save_input
         # make the form visible on the UI
         f.execute()
         self.host = f[0][2]
         self.tcp_port = f[1][2]
         self.username = f[2][2]
         password = appuifw.query(u"Password",'code')
         return ['', self.host, self.tcp_port, self.username, password]
Exemplo n.º 5
0
 def loadConfigurationSettings(self):
     conf = configuration.getConfiguration()
     http = https = user = pas = None
     if conf.hasKey("httpProxy"):
         http = conf.httpProxy
         if conf.hasKey("httpsProxy"): https = conf.httpsProxy
         if conf.hasKey("proxyUser"): user = conf.proxyUser
         if conf.hasKey("proxyPassword"): pas = conf.proxyPassword
         domaindata.set_proxy_environment(http, https, user, pas)
Exemplo n.º 6
0
def listAll():
    """Display the entire contents of the DB."""
    config = configuration.getConfiguration()
    db = DBWrapper(config["database"])

    cursor = db.items.find(filter={},
                           projection={
                               "_id": 0,
                               "content": 0
                           },
                           sort=[("timestamp", pymongo.DESCENDING)])

    results = list(cursor)

    response = Response(response=json.dumps(results),
                        status=200,
                        mimetype='application/json')

    return response
Exemplo n.º 7
0
    def run(self):
        while 1:
            data = self.recvmsg()
            if (data == 'exit\n'):
                print data, "I have received exit. I am gonna exit."
                break
            elif (data == 'program\n'):
                self.program = self.file.readline()

            elif (data == 'confg\n'):
                print "Config received."
                cfgString = self.file.readline()
                try:
                    cfg = configuration.getConfiguration(cfgString)
                    argv = ['--program', self.program, '--test-limit', '5000']
                    onlinetuner.start(argv, cfg, self)
                except Exception, e:
                    print "Exception occured : %s" % e
                    traceback.print_exc()
                    #data = raw_input ( "Press Keyboard to exit..." )
                    break

            else:
                print "###Invalid data received. Please check...:", data
Exemplo n.º 8
0
def main():
    config = configuration.getConfiguration()
    logger.info("Configuration loaded : {0}".format(configuration.global_configuration))

    Zenhub = zenhub_connector.ZenhubConnector(config)
    Aha = Aha_connector.AhaConnector(config)

    #data = Zenhub.github.get_issue(1777)
    #issue = Zenhub.github.get_issues()
    #logger.info("Data got for issue is {0}".format(data.is_closed))
    #raise NotImplementedError
    
    mapAhaZenhubReleases(Aha, Zenhub)
    #mapAhaZenhubEpics(Aha, Zenhub)
  

    logger.info("--------------Before the Sync starts: -------------------------")
    logger.info("No. of Releases in Aha: {0}".format(len(Aha.getReleases())))
    logger.info("No. of UnReleased Releases in Aha {0}".format(len(Aha.openReleasesMap)))
    logger.info("No. of Releases in Zenhub: {0}".format(len(Zenhub.releases)))
    logger.info("No. of Unmapped Releases {0}".format(len(Aha.unmappedAhaReleases)))
    logger.info("No. of Epics in Ahaq {0}".format(len(Aha.epicsMap)))

    # For Each UnReleased Release, sync all the zenhub / github update to the Release
    # If the Release is not found create it.
    CreateOrUpdateReleaseData(Aha, Zenhub)
    
    # for release in mappedAhaZenhubReleases:
    #     logger.info("Mapped Releases: {0}".format(release))

    # for release in unmappedAhaReleases:
    #     logger.info("Unmapped Aha Releases {0}".format(release))
    
    #Once the Releases are synced the Epics can be synced now. 
    #We Take all the Epics from Aha and sync it to zenhub.
    CreateOrUpdateEpicsData(Aha, Zenhub)
Exemplo n.º 9
0
	def run(self):
		while 1:
		        data = self.recvmsg()
		        if ( data == 'exit\n'):
				print data, "I have received exit. I am gonna exit."
				break;
			elif ( data == 'program\n'):
				self.program = self.file.readline()
				
			elif ( data == 'confg\n' ):
				print "Config received."
				cfgString = self.file.readline()
				try:
					cfg = configuration.getConfiguration(cfgString)
					argv = ['--program', self.program,  '--test-limit', '5000']
					onlinetuner.start(argv, cfg, self)
				except Exception, e:
					print "Exception occured : %s"%e
					traceback.print_exc()
					#data = raw_input ( "Press Keyboard to exit..." )
					break;
					
			else:
				print "###Invalid data received. Please check...:" , data
Exemplo n.º 10
0
def handler(event, context=None):
    # ensure it's a valid request
    if event and "body" in event and "headers" in event:

        # AWS Lambda configures the logger before executing this script
        # We want to remove their configurations and set our own
        log = logging.getLogger()
        if log.handlers:
            for handler in log.handlers:
                log.removeHandler(handler)

        if "X-Ghi-Server" in event["headers"]:
            # was invoked by local server
            logging.basicConfig(
                level=logging.INFO,
                format="%(asctime)s [ghi] %(message)s",
                datefmt="%Y-%m-%d %H:%M:%S"
            )
        else:
            logging.basicConfig(
                level=logging.INFO,
                format="%(message)s"
            )

        # By default ghi will respond to the request immediately,
        # then invoke itself to actually process the event.
        # This can be disabled by setting GHI_LONG_RESPONSE="true"
        if "requestContext" in event:
            from aws import InvokeSelf
            # Was invoked by AWS
            if "GHI_LONG_RESPONSE" in os.environ and os.getenv("GHI_LONG_RESPONSE"):
                pass
            elif "X-Ghi-Invoked" not in event["headers"]:
                return InvokeSelf(event)

        # validate and load configuration file
        configuration = getConfiguration()
        if configuration["statusCode"] != 200:
            return configuration

        # Enable debug if set in config
        if configuration["debug"]:
            logging.getLogger().setLevel(logging.DEBUG)

        # verify the request is from GitHub
        githubPayload = event["body"]

        # Enhanced logging if debug is set
        logging.debug("Ghi Version:")
        logging.debug(__version__)
        logging.debug("Payload:")
        logging.debug(githubPayload)
        logging.debug("Headers:")
        logging.debug(event["headers"])

        # figure out which pool this should belong to so we can use its secret
        pool = getPool(githubPayload, configuration["pools"])
        if pool["statusCode"] != 200:
            return pool

        try:
            if pool["verify"]:
                githubSignature = event["headers"]["X-Hub-Signature"]
            try:
                githubEvent = event["headers"]["X-GitHub-Event"]
            except KeyError as e:
                githubEvent = event["headers"]["X-Github-Event"]
        except KeyError as e:
            errorMessage = "missing header in request: %s" % e
            logging.error(errorMessage)
            return {
                "statusCode": 400,
                "body": json.dumps({
                    "success": False,
                    "message": errorMessage
                })
            }

        # check signatures of request
        if pool["verify"]:
            validPayload = validatePayload(
                payload=githubPayload,
                signature=githubSignature,
                secret=pool["secret"]
            )
            if not validPayload:
                logging.error("GitHub payload validation failed")
                return {
                    "statusCode": 401,
                    "body": json.dumps({
                        "success": False,
                        "message": "payload validation failed"
                    })
                }
        else:
            logging.debug("Skipping payload verification because 'verify' set to False.")

        getMessages = parsePayload(githubEvent, githubPayload, pool["pool"].repos, pool["pool"].shorten)
        if getMessages["statusCode"] != 200:
            return getMessages

        logging.debug("Messages:")
        logging.debug(getMessages["messages"])
        
        # Send messages to the designated IRC channel(s)
        sendToIrc = sendMessages(pool["pool"], getMessages["messages"])
        if sendToIrc["statusCode"] != 200:
            return sendToIrc

        result = "Successfully notified IRC."
        logging.info(result)
        return {
            "statusCode": 200,
            "body": json.dumps({
                "success": True,
                "message": result
            })

        }

    else:
        return {
            "statusCode": 400,
            "body": json.dumps({
                "success": False,
                "message": "bad event data"
            })
        }
Exemplo n.º 11
0
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with DSCE.  If not, see <http://www.gnu.org/licenses/>.
#
# Copyright (c) 2011 Klaus Melcher ([email protected])
"""This is a wrapper around locating and loading of XRC files within the dsce.
"""
import wx
import wx.xrc as xrc

import os
import configuration

_RESPATH_ = configuration.getConfiguration().xrcDir

def _getXrcsAbsPath(fn):
    ap = os.path.join(_RESPATH_,fn)
    if not os.path.isfile(ap):
        raise BaseException("File %s does not exist" % ap)
    else:
        return ap

def loadPanel(parent, fn, n):
    """parent - parent to use
    fn - the XmlResouce file name. The file must be located in _RESPATH_.
    n - the resource name.
    """
    return xrc.XmlResource(_getXrcsAbsPath(fn)).LoadPanel(parent, n)
Exemplo n.º 12
0
from autologging import logged, traced, TRACE
import logging
import sys
from datetime import datetime
from configuration import getConfiguration
from urllib.parse import urljoin
import time

logging.basicConfig(
    level=logging.INFO,
    format=
    "%(levelname)s:%(filename)s,%(lineno)d:%(name)s.%(funcName)s:%(message)s",
    handlers=[logging.StreamHandler()])
logger = logging.getLogger()

config = getConfiguration()

AHA_TOKEN = config.AHA_TOKEN
ZENHUB_TOKEN = config.ZENHUB_TOKEN
GITHUB_TOKEN = config.GITHUB_TOKEN
RELEASES_AHA = None

AHA_HEADER = {
    'Authorization': AHA_TOKEN,
    'Content-Type': "application/json",
    "User-Agent": "*****@*****.**"
}
ZENHUB_HEADER = {'X-Authentication-Token': ZENHUB_TOKEN}
ZH_ISSUE_RELEASE_MAP = {}

########################DATA_STORE##################################
Exemplo n.º 13
0
if __name__ == '__main__':
    logging.basicConfig(level=logging.INFO)
    parser = argparse.ArgumentParser(parents=opentuner.argparsers())
    parser.add_argument('--program',
                        help='StreamJIT benchmark to tune (with first input)')
    parser.add_argument('--timestamp',
                        help='timestamp to use for final config/errors',
                        default=time.strftime('%Y%m%d-%H%M%S'))
    args = parser.parse_args()
    (cfg_json,
     error_str) = call_java([], "edu.mit.streamjit.tuner.ConfigGenerator2", [
         "edu.mit.streamjit.impl.compiler2.Compiler2BlobFactory", args.program
     ])
    if len(error_str) > 0:
        sys.exit("Getting config JSON: " + error_str)
    cfg = configuration.getConfiguration(cfg_json)
    jvm_options = make_jvm_options()

    manipulator = StreamJITConfigurationManipulator(cfg)
    for p in cfg.getAllParameters().values() + jvm_options.values():
        manipulator.add_parameter(p)

    # create seed configurations
    seed_multipliers = [1024, 4096, 128]
    seed_configs = []
    for m in seed_multipliers:
        seed_config = manipulator.seed_config()
        for p in cfg.getAllParameters().values() + jvm_options.values():
            if isinstance(p, sjparameters.sjCompositionParameter):
                p.equal_division(seed_config)
            elif isinstance(p, sjparameters.sjPermutationParameter):
Exemplo n.º 14
0
def handler(event, context=None, sysd=None):
    # ensure it's a valid request
    if event and "body" in event and "headers" in event:
        # validate and load configuration file
        configuration = getConfiguration()
        if configuration["statusCode"] != 200:
            return configuration

        # configure logging according to server-environment
        if sysd == "systemd":
            setup_server_logging("systemd", configuration["debug"])
        elif "X-Ghi-Server" in event["headers"]:
            setup_server_logging("plain", configuration["debug"])
        else:
            setup_server_logging("aws", configuration["debug"])

        # verify the request is from GitHub
        githubPayload = event["body"]

        # Enhanced logging if debug is set
        logging.debug("Ghi Version:")
        logging.debug(__version__)
        logging.debug("Payload:")
        logging.debug(githubPayload)
        logging.debug("Headers:")
        logging.debug(event["headers"])

        # By default ghi will respond to the request immediately,
        # then invoke itself to actually process the event.
        # This can be disabled by setting GHI_LONG_RESPONSE="true"
        if "requestContext" in event:
            from aws import InvokeSelf
            # Was invoked by AWS
            if "GHI_LONG_RESPONSE" in os.environ and os.getenv(
                    "GHI_LONG_RESPONSE"):
                pass
            elif "X-Ghi-Invoked" not in event["headers"]:
                return InvokeSelf(event)

        # figure out which pool this should belong to so we can use its secret
        pool = getPool(githubPayload, configuration["pools"])
        if pool["statusCode"] != 200:
            return pool

        try:
            if pool["verify"]:
                githubSignature = event["headers"]["X-Hub-Signature"]
            try:
                githubEvent = event["headers"]["X-GitHub-Event"]
            except KeyError as e:
                githubEvent = event["headers"]["X-Github-Event"]
        except KeyError as e:
            errorMessage = "missing header in request: %s" % e
            logging.error(errorMessage)
            return {
                "statusCode": 400,
                "body": json.dumps({
                    "success": False,
                    "message": errorMessage
                })
            }

        # check signatures of request
        if pool["verify"]:
            validPayload = validatePayload(payload=githubPayload,
                                           signature=githubSignature,
                                           secret=pool["secret"])
            if not validPayload:
                logging.error("GitHub payload validation failed")
                return {
                    "statusCode":
                    401,
                    "body":
                    json.dumps({
                        "success": False,
                        "message": "payload validation failed"
                    })
                }
        else:
            logging.debug(
                "Skipping payload verification because 'verify' set to False.")

        getMessages = parsePayload(githubEvent, githubPayload,
                                   pool["pool"].repos, pool["pool"].shorten)
        if getMessages["statusCode"] != 200:
            return getMessages

        ircCheck = False
        mastCheck = False
        failure = False

        if "irc" in pool["pool"].outlets:
            logging.debug("IRC Messages:")
            logging.debug(getMessages["ircMessages"])

            # Send messages to the designated IRC channel(s)
            sendToIrc = sendMessages(pool["pool"], getMessages["ircMessages"])
            if sendToIrc["statusCode"] != 200:
                failure = True
                ircResult = "Something went wrong while trying to notify IRC."
            else:
                ircResult = "Successfully notified IRC."
                ircCheck = True
            logging.info(ircResult)

        githubPayload = json.loads(githubPayload)

        if githubEvent == "pull_request":
            if not (githubPayload["action"] == "closed"
                    and githubPayload["pull_request"]["merged"]):
                mastAppliedMergeFilter = pool["pool"].mastMergeFilter
            else:
                mastAppliedMergeFilter = False
        else:
            mastAppliedMergeFilter = pool["pool"].mastMergeFilter

        if "mastodon" in pool["pool"].outlets and not mastAppliedMergeFilter:
            logging.debug("Mastodon Messages:")
            logging.debug(getMessages["mastMessages"])

            # Send messages to Mastodon's instance's user's timeline
            sendToMastodon = sendToots(pool["pool"],
                                       getMessages["mastMessages"])
            if sendToMastodon["statusCode"] != 200:
                failure = True
                mastResult = "Something went wrong while trying to notify Mastodon."
            else:
                mastResult = "Succesfully notified Mastodon."
                mastCheck = True
            logging.info(mastResult)

        if ircCheck or not mastAppliedMergeFilter and not failure:
            result = "Succesfully notified {both0}{IRC}{both1}{Mastodon}.".format(
                both0="both " if ircCheck and mastCheck else "",
                both1=" and " if ircCheck and mastCheck else "",
                IRC="IRC" if ircCheck else "",
                Mastodon="Mastodon" if mastCheck else "")
            if "mastodon" in pool["pool"].outlets and mastAppliedMergeFilter:
                mastResult = "Didn't toot because of the merge filter."
                logging.info(mastResult)
                result = result[:-1] + ", but not Mastodon because of the merge filter."
        elif "mastodon" in pool["pool"].outlets and mastAppliedMergeFilter:
            mastResult = "Event received, but didn't toot because of the merge filter."
            logging.info(mastResult)
            result = "Event received, but didn't toot because of the merge filter."

        if failure:
            result = "Something went wrong."
            return {
                "statusCode": 500,
                "body": json.dumps({
                    "success": False,
                    "message": result
                })
            }
        else:
            return {
                "statusCode": 200,
                "body": json.dumps({
                    "success": True,
                    "message": result
                })
            }

    else:
        return {
            "statusCode": 400,
            "body": json.dumps({
                "success": False,
                "message": "bad event data"
            })
        }
import logging
import wx



p=os.path.abspath(os.path.dirname(sys.argv[0])).replace("bin","lib")
if os.path.isdir(p):
    sys.path.append(p)
else:
    logging.fatal("Libraries directory %s does not exist" % p)
    sys.exit(1)


import configuration

config = configuration.getConfiguration()
logging.basicConfig(format= config.logFormat,
                    datefmt = config.logDateFormat,
                    level= config.debugLevel)

import application 


if __name__ == "__main__":

    logging.debug("Start DomainSharedContactsEditor version %s" % _version_)

    app = application.Application(sys.argv)
    app.run()

    logging.debug("DomainSharedContactsEditor closed")
Exemplo n.º 16
0
                           },
                           sort=[("timestamp", pymongo.DESCENDING)])

    results = list(cursor)

    response = Response(response=json.dumps(results),
                        status=200,
                        mimetype='application/json')

    return response


if __name__ == '__main__':
    try:
        # Gets the configuration to use. These are settings regarding which port and host the server should use.
        server_config = configuration.getConfiguration()

        # Connect to the database.
        db_connection = DBWrapper(server_config["database"])

        app.run(host=server_config["webserver"]["host"],
                port=int(server_config["webserver"]["port"]))

    except R2D2_ConfigurationError as e:
        print "An error occured when trying to read the configuration file 'Configuration.json'."
        print e.message
        sys.exit(3)

    except R2D2_DatabaseError as e:
        print "An error occured when trying to connect to the database."
        print e.message
Exemplo n.º 17
0
    def __init__(self, controller):
        self.controller = controller
        # Create a new window
        self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
        self.window.set_title("Select a Device")
        self.window.set_icon_from_file(getAbsoluteFilepath(PROGRAM_ICON))

        self.window.set_size_request(470, 220)
        self.window.connect("delete_event", self.delete_event)

        # create a TreeStore with two string columns to use as the model
        self.treestore = gtk.TreeStore(str,str,str,str,str)

        devices = netio230a.get_all_detected_devices()
        if len(devices) > 0:
            self.auto_iter = self.treestore.append(None,['auto-detected devices','','','',''])
        else:
            if os.name == 'nt':
                self.treestore.append(None,['auto-detection is not working on Windows','','','',''])
            else:
                self.treestore.append(None,['no auto-detected devices','','','',''])
        for device in devices:
            #   device name, IP, port, user, password
            self.treestore.append(self.auto_iter,[device[0],str(device[1][0])+'.'+str(device[1][1])+'.'+str(device[1][2])+'.'+str(device[1][3]),'','',''])
        
        # devices from configuration has the form [devicename, host, port, username, password]
        devices = configuration.getConfiguration()
        if len(devices) > 0:
            self.recently_iter = self.treestore.append(None,['previously used devices','','','',''])
        for device in devices:
            #   device name, IP, port, user, password
            self.treestore.append(self.recently_iter,[device[0],device[1],str(device[2]),device[3],device[4]])
        
        # more on TreeViews: <http://www.thesatya.com/blog/2007/10/pygtk_treeview.html>
        # and <http://www.pygtk.org/pygtk2tutorial/ch-TreeViewWidget.html#sec-TreeViewOverview>
        # create the TreeView using treestore
        self.treeview = gtk.TreeView(self.treestore)
        # create the TreeViewColumn to display the data
        self.tvc_device_name = gtk.TreeViewColumn('Device Name')
        self.tvc_ip = gtk.TreeViewColumn('IP Address')
        self.tvc_tcp_port = gtk.TreeViewColumn('TCP Port')
        self.tvc_user_name = gtk.TreeViewColumn('User Name')
        # set alignment of the column titles to right
        #self.tvc_ip.set_alignment(1.0)
        #self.tvc_tcp_port.set_alignment(1.0)
        # add tvcolumn to treeview
        self.treeview.append_column(self.tvc_device_name)
        self.treeview.append_column(self.tvc_ip)
        self.treeview.append_column(self.tvc_tcp_port)
        self.treeview.append_column(self.tvc_user_name)
        # create a CellRendererText to render the data
        self.cell = gtk.CellRendererText()
        self.cell_right_align = gtk.CellRendererText()
        self.cell_right_align.set_property('xalign', 1.0)
        # add the cell to the tvcolumn and allow it to expand
        self.tvc_device_name.pack_start(self.cell, True)
        self.tvc_ip.pack_start(self.cell_right_align, True)
        self.tvc_tcp_port.pack_start(self.cell_right_align, True)
        self.tvc_user_name.pack_start(self.cell, True)
        # set the cell "text" attribute to column 0 - retrieve text from that column in treestore
        self.tvc_device_name.add_attribute(self.cell, 'text', 0)
        self.tvc_ip.add_attribute(self.cell_right_align, 'text', 1)
        self.tvc_tcp_port.add_attribute(self.cell_right_align, 'text', 2)
        self.tvc_user_name.add_attribute(self.cell, 'text', 3)
        # make it searchable
        self.treeview.set_search_column(0)
        # Allow sorting on the column
        self.tvc_device_name.set_sort_column_id(0)
        self.tvc_ip.set_sort_column_id(1)
        self.tvc_tcp_port.set_sort_column_id(2)
        self.tvc_user_name.set_sort_column_id(3)
        # Allow drag and drop reordering of rows
        self.treeview.set_reorderable(True)
        self.treeview.expand_all()
        self.treeview.set_size_request(-1,200)

        spacing, homogeneous, expand, fill, padding = 2, False, True, True, 2
        # Create a new hbox with the appropriate homogeneous
        # and spacing settings
        box = gtk.HBox(homogeneous, spacing)
        
        # create the buttons
        button = gtk.Button("other device")
        box.pack_start(button, expand, fill, padding)
        button.connect("clicked",self.connect_clicked)
        button = gtk.Button("connect")
        box.pack_start(button, expand, fill, padding)
        button.connect("clicked",self.connect_clicked, self.treeview)
        
        scroll = gtk.ScrolledWindow()
        scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC);
        scroll.add(self.treeview);

        spacing, homogeneous, expand, fill, padding = 1, False, False, True, 2
        superbox = gtk.VBox(homogeneous, spacing)
        superbox.pack_start(scroll, True, True, 1)
        superbox.pack_start(box, False, False, 0)
        
        self.superbox = superbox
        
        self.window.add(self.superbox)
        self.window.show_all()
Exemplo n.º 18
0
	enabledJvmOptions = [aggressiveOpts, compileThreshold, clipInlining, freqInlineSize,
		maxInlineSize, maxInlineLevel, eliminateArrays, useNuma, bindGCTaskThreadsToCPUs]
	return {x.name:x for x in enabledJvmOptions}

if __name__ == '__main__':
	logging.basicConfig(level=logging.INFO)
	parser = argparse.ArgumentParser(parents=opentuner.argparsers())
	parser.add_argument('--program', help='StreamJIT benchmark to tune (with first input)')
	parser.add_argument('--timestamp', help='timestamp to use for final config/errors',
		default=time.strftime('%Y%m%d-%H%M%S'))
	args = parser.parse_args()
	(cfg_json, error_str) = call_java([], "edu.mit.streamjit.tuner.ConfigGenerator2",
		["edu.mit.streamjit.impl.compiler2.Compiler2BlobFactory", args.program])
	if len(error_str) > 0:
		sys.exit("Getting config JSON: "+error_str)
	cfg = configuration.getConfiguration(cfg_json)
	jvm_options = make_jvm_options();

	manipulator = StreamJITConfigurationManipulator(cfg)
	for p in cfg.getAllParameters().values() + jvm_options.values():
		manipulator.add_parameter(p)

	# create seed configurations
	seed_multipliers = [1024, 4096, 128]
	seed_configs = []
	for m in seed_multipliers:
		seed_config = manipulator.seed_config()
		for p in cfg.getAllParameters().values() + jvm_options.values():
			if isinstance(p, sjparameters.sjCompositionParameter):
				p.equal_division(seed_config)
			elif isinstance(p, sjparameters.sjPermutationParameter):