コード例 #1
0
 def get_config(self):
     config = super().get_config().copy()
     config.update({
         'd_model': self.d_model,
         'warmup_steps': self.warmup_steps,
     })
     return config
コード例 #2
0
def updateWifi(cli, resp):
    data = cli.ReadRequestPostedFormData()
    c = config.get()
    if 'deletewifi' in data:
        if 'wifi' in c:
            w = c['wifi']
            w['SSID'] = ''
            w['password'] = ''
            config.update(c)
        return getWifiSetup(cli, resp, "Wifi setup deleted")

    SSID = data['SSID']
    password = data['pass']
    if 'wifi' not in c:
        c['wifi'] = {'password': ''}
    if password or SSID != c['wifi'].get(
            'SSID'):  #Set password if SSID changes or password is set
        c['wifi']['password'] = password
    c['wifi']['SSID'] = SSID
    config.update(c)
    try:
        wifi.connect(timeoutmillis=30 * 1000)
        return getWifiSetup(cli, resp, "Connected successully to " + SSID)
    except:
        return getWifiSetup(cli, resp, "Failed to connect")
コード例 #3
0
    def predictByUnique(self, text, threshold=float(1e40)):
        """ 
        Förutsäger en koordinat för en bunte text
        Tar endast och använder det vanliga på unika ord
        Input: text
        Output: koordinat (lon, lat)
        """
        lenText = len(text)
        lenWords = int(lenText / 8.3)
        lowerPercent = 0.00008 # Ger frekvens median 3
        lowerBound = int(lenWords*lowerPercent) 
        topBound = int(lenWords/300.0)
        
        if topBound == 0:
            topBound = 999999999999999999999999
            
        print "low ", lowerBound, "top ", topBound
        
        words = self.cleanData(text).split()
        c = Counter()
        c.update(words)

        wordsInSpan = [t[0] for t in c.most_common() if t[1] > lowerBound and t[1] < topBound]
        print "lenord i spann", len(wordsInSpan)
        text = " ".join(wordsInSpan)
                
        return self.predict(text, threshold=threshold)
コード例 #4
0
ファイル: admin.py プロジェクト: asdil12/mcweb
def remove(name):
	admin_accounts = config.get('admin_accounts')
	if name not in admin_accounts:
		return False
	else:
		del admin_accounts[name]
		config.update(admin_accounts=admin_accounts)
		return True
コード例 #5
0
ファイル: main.py プロジェクト: Sleska4/TelegramBot
 def del_end(message):
     del_accept = message.text
     if del_accept == 'ПРИНЯТЬ ✅':
         config.arr = arr_test
         config.update()
         bot.send_message(message.chat.id,
                          'Массив изменён',
                          reply_markup=config.menu())
     else:
         bot.send_message(message.chat.id,
                          'Изменения не были добавлены',
                          reply_markup=config.menu())
コード例 #6
0
ファイル: app.py プロジェクト: Orteko/bitcoin-arbitrage-ui
def update_markets():
    markets = {}
    for market_currency_pair in request.form.getlist("markets"):
        market, currency1, currency2 = tuple(market_currency_pair.split("_"))
        if market not in markets:
            markets[market] = []
        markets[market].append((currency1, currency2))

    config.update({"markets": markets})
    flash("Updated markets being watched.")

    return redirect("/markets")
コード例 #7
0
ファイル: app.py プロジェクト: ryepdx/bitcoin-arbitrage-ui
def update_markets():
    markets = {}
    for market_currency_pair in request.form.getlist("markets"):
        market, currency1, currency2 = tuple(market_currency_pair.split("_"))
        if market not in markets:
            markets[market] = []
        markets[market].append((currency1, currency2))

    config.update({"markets": markets})
    flash("Updated markets being watched.")

    return redirect("/markets")
コード例 #8
0
ファイル: app.py プロジェクト: Orteko/bitcoin-arbitrage-ui
def update_settings():
    observers = ["Logger", "WebSocket"]
    new_config = request.form.to_dict()
    new_config["traderbot_markets"] = request.form.getlist("traderbot_markets")

    if len(new_config["traderbot_markets"]) > 0:
        observers.append("TraderBot")

    new_config["observers"] = observers
    config.update(new_config)

    flash("Updated bot settings.")

    return redirect("/settings")
コード例 #9
0
ファイル: app.py プロジェクト: ryepdx/bitcoin-arbitrage-ui
def update_settings():
    observers = ["Logger", "WebSocket"]
    new_config = request.form.to_dict()
    new_config["traderbot_markets"] = request.form.getlist("traderbot_markets")

    if len(new_config["traderbot_markets"]) > 0:
        observers.append("TraderBot")

    new_config["observers"] = observers
    config.update(new_config)

    flash("Updated bot settings.")

    return redirect("/settings")
コード例 #10
0
ファイル: control.py プロジェクト: kollehond/ChatterPi
def event_handler():
    c.update()
    if c.EYES == 'ON':
        eyesPin.on()
    if c.TRIGGER_OUT == 'ON':
        triggerOut.on()
        time.sleep(0.5)
        triggerOut.off()
    if c.SOURCE == 'FILES':
        tracks.play_vocal()
    else:
        a.play_vocal_track
    if c.EYES == 'ON':
        eyesPin.off()
コード例 #11
0
async def set_channel(ctx):
    """
    Sets which channel the bot should send its messages in.
    """
    global con
    if ctx.message.author.id != con["user"]:
        return

    new_channel = ctx.channel.id

    con["channel"] = new_channel
    config.update(con)

    await ctx.message.channel.send(
        f"✅ Set bot channel for {ctx.message.author} to #{ctx.channel}")
コード例 #12
0
ファイル: main.py プロジェクト: Sleska4/TelegramBot
def data_add(message):
    data = message.text
    if data in ['1', '2', '3', '4', '5', '6', '7']:
        config.arr[len(config.arr) - 1].append('mod' + str(data))
        bot.send_message(message.chat.id,
                         'Дата изменена на {}'.format('mod' + str(data)),
                         reply_markup=config.menu())
    else:
        try:
            data_boolean = False
            data = str(data)
            data = data.split('.')
            if 1 <= int(data[0]) <= 31:
                if 1 <= int(data[1]) <= 12:
                    data_boolean = True
            if data_boolean:
                data = '.'.join(data)
                config.arr[len(config.arr) - 1].append(data)
                bot.send_message(message.chat.id,
                                 'Данные успешно обновлены ✅',
                                 reply_markup=config.menu())
            else:
                bot.send_message(
                    message.chat.id,
                    'Данные введены некоректно, попробуйте снова.',
                    reply_markup=config.menu())
                data = bot.send_message(message.chat.id,
                                        'Хотите указать дату?',
                                        reply_markup=config.cancel())
                bot.register_next_step_handler(data, data_add)
        except:
            if data == ['ОТМЕНА ❌']:  # Пока под вопросом
                config.arr[len(config.arr) - 1].append(
                    'mod1')  # Пока под вопросом
                bot.send_message(message.chat.id,
                                 'Дата поставленна на каждый день',
                                 reply_markup=config.menu())
            else:
                bot.send_message(
                    message.chat.id,
                    'Данные введены некоректно, попробуйте снова.',
                    reply_markup=config.menu())
                data = bot.send_message(message.chat.id,
                                        'Хотите указать дату?',
                                        reply_markup=config.cancel())
                bot.register_next_step_handler(data, data_add)

    config.update()
コード例 #13
0
ファイル: msgHandle.py プロジェクト: kang2453/python
 def msgHandle(self, cmd, value):
     if cmd == 'OPTION':
         # 옵션은 여기에서 변경하고
         # sitelist.py에서는 다시 읽으라고 한다.
         # value로 받은거 그대로 저장함
         config.update(value)
         self.msgQue.append('RELOAD!config.conf')
     # 명령 수행 MSG
     elif cmd == 'CMD':
         # sitelist 성능 측정관련 메시지
         self.msgQue.append(value)
     # 업데이트 관련 MSG
     elif cmd == 'UPDATE':
         self.msgQue.append(value)
     else:
         writeLog.PrintLog("%s:%s msg is not define" % (cmd, value))
コード例 #14
0
    def test_post(self):
        print '\tTesting POST method'
        headers = {'Authorization': self.ACCESS_TOKEN}
        r = requests.post(self.API_URL, headers=headers, data=self.SNIPPET)

        testfor.status_code(self, r, 200)
        testfor.cors_headers(self, r, {'Origin': '*'})
        testfor.valid_json(self, r)

        expected_key = json.loads(self.SNIPPET) \
                           ['snippetTitle']     \
                           .replace(' ', '_')   \
                           .lower()
        testfor.key_val_start(self, r, 'key', expected_key)

        # Update config file with received key for later tests
        config.update('snippet_id', r.json()['key'])
コード例 #15
0
def load_manifest(filename, dirname=None, extra_config=None):
    autoscaler_web_dir = dirname or path.normpath(
        path.join(path.dirname(path.abspath(__file__)), '..'))
    filename = path.join(autoscaler_web_dir, filename)
    with open(filename, 'r') as f:
        manifest = yaml.load(f)
    config = manifest['applications'][0]['env']
    validate_ssl = config.get('CFAS_VALIDATE_SSL', True)
    if isinstance(validate_ssl, basestring):
        validate_ssl = 'false' != validate_ssl
    config = {
        'autoscaler_api_url': config['CFAS_API_URL'],
        'token': config['CFAS_TOKEN'],
        'secret': config['CFAS_SECRET'],
        'validate_ssl': validate_ssl
    }
    if extra_config:
        config.update(extra_config)
    return config
コード例 #16
0
async def getUser():
    """
    Allows the user to link their discord account to the bot through their user id
    """
    global con

    def checkIfDm(message: discord.Message):

        if isinstance(
                message.channel,
                discord.channel.DMChannel) and message.author != client.user:
            return True
        else:
            return False

    token = ''.join(str(random.randint(1, 10)) for i in range(7))

    print(
        "[Info] To link your discord account to the bot, open a new direct message with the bot and send it the following code:\n {}"
        .format(token))

    while True:

        message = await client.wait_for('message', check=checkIfDm)

        if message.content == token:

            await message.channel.send("Linking account...")
            print("[Info] Token recieved, linking account")

            con['user'] = message.author.id
            config.update(con)

            await message.channel.send("Account Linked")
            print('[Info] Account linked.')
            break

        else:

            print(
                "[Info] That is not the correct token, please send the correct token printed previously"
            )
コード例 #17
0
def updateConfig(cli, resp):
    data = cli.ReadRequestPostedFormData()
    if 'settime' in data:
        y, m, d = [int(x) for x in data["date"].split('-')]
        h, mi = [int(x) for x in data["time"].split(":")]
        settime(y, m, d, 0, h, mi, 0)

        return getStatus(cli, resp, "Time updated")
    if 'settz' in data:
        tzmin = int(data['tzmin'])
        c = config.get()
        if not 'rtc' in c:
            c['rtc'] = {}
        c['rtc']['timezoneDeltaMinutes'] = tzmin
        config.update(c)

        return getStatus(cli, resp, "Timezone updated")
    if 'loadcsv' in data:
        try:
            sdb.importcsv(data['csv'])
            return getStatus(cli, resp, "Salat timetable updated successfully")
        except Exception as err:
            sys.print_exception(err)
            return getStatus(cli, resp, "Error with CSV data : %s" % str(err))

    if 'updatenotif' in data:
        for sidx in range(0, 6):
            sdb.setsalarmdelay(sidx, int(data['salat%dalm' % sidx]))
            sdb.setsvolume(sidx, int(data['salat%dvol' % sidx]))
        sdb.save()
    if 'syncmawaqit' in data:
        try:
            import mawaqit
            mawaqit.dosync(sdb)
            return getStatus(cli, resp, "Mawaqit Sync successful")
        except Exception as err:
            sys.print_exception(err)
            return getStatus(cli, resp, "Error with CSV data : %s" % str(err))

    return getStatus(cli, resp)
コード例 #18
0
    def predictByGrammar(self, text, threshold=float(1e40), clipping=True):
        """ 
        Förutsäger en koordinat för en bunte text
        Implementatation av gramatikförfarandet
        Input: text
        Output: kortad text med bara ord som grammatiken tagit 
        """
    
        lenText = len(text)
        lenWords = int(lenText / 8.3)
        lowerPercent = 0.00008 # Ger frekvens median 3        
        
        if clipping:
            lowerBound = int(lenWords*lowerPercent) 
            topBound = int(lenWords/300.0)
        else:
            lowerBound = 0
            topBound = 999999999999999999999999
        
        if topBound == 0:
            topBound = 999999999999999999999999
            
        print "low ", lowerBound, "top ", topBound

        c = Counter()
        text = text.lower()
        for pattern in self.patterns:
            found = re.findall(pattern, text)
            if found:
                c.update(found)

        wordsInSpan = [t[0] for t in c.most_common() if t[1] > lowerBound and t[1] < topBound]
        print "lenord i spann", len(wordsInSpan)
        text = " ".join(wordsInSpan)
                
        #return self.predict(text, threshold=threshold)
        return text 
コード例 #19
0
 def get_config(
     self
 ):  # to save this model in serialized .h5 format we have to override the get_config method
     config = super(Embedding, self).get_config()
     config.update({"embed_dim": self.embed_dim})
     return config
コード例 #20
0
ファイル: __init__.py プロジェクト: petronius/dyssh
        # arguments manually.
        positional = []
        for args, kwargs in ARGS.items():
            if len(args) == 1 and not args[0].startswith('-'):
                positional.append(args[0])
            elif args[0].startswith('-'):
                optparser.add_option(*args, **kwargs)
        options, args = optparser.parse_args()
        # Add these to the options object so that config.update() checks them
        for k, v in zip(positional, args):
            if not hasattr(options, k):
                setattr(options, k, v)
        argv = options

    try:
        config.update(argv)
    except ValueError, e:
        error('', ' '.join(e.args))
        error('', __doc__)
        sys.exit(os.EX_USAGE)

    try:
        import atexit
        import readline
        histfile = os.path.join(config.get('histfile'))
        try:
            readline.read_history_file(histfile)
        except IOError:
            pass
        atexit.register(readline.write_history_file, histfile)
    except ImportError:
コード例 #21
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun May 17 22:19:49 2020

@author: Mike McGurrin
"""
# import and initialize common constants and class variables
import config as c
c.update()

import control
# run control, which handles the triggers and event handling
control.controls()
    
    
        


    
        
コード例 #22
0
ファイル: application.py プロジェクト: lexdene/testdrape
	def systemInit(self):
		'''
		系统级初始化,
		这函数理论上讲应该仅在开机/启动服务器的时候执行一次,以后就不再执行了。
		'''
		config.update(self.edconfig())
コード例 #23
0
ファイル: example2.py プロジェクト: StudioProcess/helios-py
        mat = matrix.keystone(cfg.keystonex / 4095, cfg.keystoney / 4095, 2047,
                              2047) * mat  # 7. apply keystone correction
        square = transform(square, mat)

        square = interpolate(square, cfg.interpolation, close=True)
        square = barrel_distort(
            square, cfg.barrel, 2047, 2047
        )  # use this on interpolated points, this transform doesn't preserve straight lines
        frame = Helios.Frame(*square)
        # frame = Helios.Frame( make_point(1, 1) )
        if 'fps' in cfg and cfg.fps > 0: pps = len(frame) * cfg.fps
        else: pps = cfg.pps
        pps = min(pps, 24000)  # limit this
        new_info = {
            'points': len(frame),
            'pps': pps,
            'fps': int(pps / len(frame))
        }
        if new_info != info:
            info = new_info
            print(
                f'points: {info["points"]}, pps: {info["pps"]}, fps: {info["fps"]}'
            )
        Helios.WriteFrame(0, pps, Helios.FLAGS_DEFAULT, frame, len(frame))

        config.update(1)
except KeyboardInterrupt:
    Helios.CloseDevices()
    exit()

Helios.CloseDevices()
コード例 #24
0
ファイル: application.py プロジェクト: asdil12/mcweb
def server(action=None):
	"Server"
	if 'username' not in session: return goto_login(fname(), fparms())
	if request.method == 'POST':
		if action == 'power':
			task = request.form.get('task')
			if task == 'start':
				if mcs.start():
					flash('Server started.', 'success')
				else:
					flash('Server already running.', 'info')
			elif task == 'stop':
				if mcs.stop():
					flash('Server stopped.', 'success')
				else:
					flash('Server not running.', 'info')
			elif task == 'restart':
				mcs.stop()
				if mcs.start():
					flash('Server restarted.', 'success')
				else:
					flash('Server did not start.', 'error')
		elif action == 'memory':
			memory = int(request.form.get('mem', 512))
			if 512 <= memory <= 64000:
				config.update(server_memory=memory)
				flash('Server memory updated. <span class="halflink" onclick="document.getElementById(\'restartform\').submit();">Restart</span> the server to apply your changes.', 'success')
			else:
				flash('Memory value out of range: %d.' % memory, 'error')
		elif action == 'autos':
			autostart = (request.form.get('auto') == 'on')
			flash('Server autostart %s.' % ('enabled' if autostart else 'disabled'), 'success')
			config.update(server_autostart=bool(autostart))
		elif action == 'update':
			old_version = mcs.get_version()
			update_server_binary()
			new_version = mcs.get_version()
			if old_version != new_version:
				flash('Server updated. <span class="halflink" onclick="document.getElementById(\'restartform\').submit();">Restart</span> the server to apply your changes.', 'success')
			else:
				flash('Server version unchanged. You already have the current version.', 'info')
		elif action == 'announce':
			message = request.form.get('message').replace("\n", '')
			try:
				mcs.cmd('say %s' % message)
				flash('Announcement sent.', 'success')
			except mcserver.NotRunning:
				flash('Announcement impossible when server is not running.', 'error')
		elif action == 'timeset':
			newtime = int(request.form.get('time', '1')) % 24000
			try:
				mcs.cmd('time set %d' % newtime)
				flash('Time set to value <i>%s</i>.' % newtime, 'success')
			except mcserver.NotRunning:
				flash('Time setting impossible when server is not running.', 'error')
		return redirect(url_for('server'))
	info = mcs.info()
	mem = config.get('server_memory')
	autos = config.get('server_autostart')
	username = request.args.get('username', 'default')
	mcs.prepare_nbt()
	return render_template('server.html', navigation=get_navi(fname()), info=info, mem=mem, autos=autos, username=username, servertime=mcs.get_time())
コード例 #25
0
ファイル: main.py プロジェクト: hw233/gsdld_pokemon2
 def init_config(self, config_dict):
     """ 初始化环境 """
     import config
     config.__dict__.update(config_dict)
     # 更新全局配置,log等级等
     config.update()
コード例 #26
0
import config

public_settings = {'number_of_questions_per_page': 100, '...': '...'}

config.update(public_settings)
コード例 #27
0
--- twistedcaldav/__init__.py.orig	2016-01-07 15:18:44 UTC
+++ twistedcaldav/__init__.py
@@ -33,6 +33,10 @@ File.contentTypes = loadMimeTypes(("/etc
 # Register additional WebDAV XML elements
 #
 
+# Make sure the default config is loaded and updated early on to avoid race conditions during startup. (upstream: r15635)
+from twistedcaldav.config import config 
+config.update()
+
 import twistedcaldav.caldavxml
 import twistedcaldav.carddavxml
 import twistedcaldav.mkcolxml
コード例 #28
0
ファイル: __init__.py プロジェクト: petronius/dyssh
        # arguments manually.
        positional = []
        for args, kwargs in ARGS.items():
            if len(args) == 1 and not args[0].startswith('-'):
                positional.append(args[0])
            elif args[0].startswith('-'):
                optparser.add_option(*args, **kwargs)
        options, args = optparser.parse_args()
        # Add these to the options object so that config.update() checks them
        for k, v in zip(positional, args):
            if not hasattr(options, k):
                setattr(options, k, v)
        argv = options

    try:
        config.update(argv)
    except ValueError, e:
        error('',' '.join(e.args))
        error('',__doc__)
        sys.exit(os.EX_USAGE)
        
    try:
        import atexit
        import readline
        histfile = os.path.join(config.get('histfile'))
        try:
            readline.read_history_file(histfile)
        except IOError:
            pass
        atexit.register(readline.write_history_file, histfile)
    except ImportError:
コード例 #29
0
ファイル: start.py プロジェクト: Zhuyike/Ke_scientist
            handlers=route.route_list,
            template_path=os.path.join(os.path.dirname(__file__), "templates"),
            static_path=os.path.join(os.path.dirname(__file__), "static"),
            login_url='/login',
            cookie_secret=self.mongodb['keientist'].cookie_secret.find_one()
            ['key'],
            **app_settings)


if __name__ == "__main__":
    logging.basicConfig(
        level=logging.DEBUG,
        format=
        '[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d] %(message)s',
        datefmt='%y%m%d %H:%M:%S')
    logging.info("Keientist: Hello")
    argp = argparse.ArgumentParser()
    argp.add_argument('--debug', default=1, type=int)
    argp.add_argument('--port', default=32410, type=int)
    args = argp.parse_args()
    config.load('./server.conf')
    config.update('port', args.port)
    app = RunKeientist(args)
    http_server = tornado.httpserver.HTTPServer(app, xheaders=True)
    http_server.listen(args.port)
    logging.info("Keientist: start service at " + time.ctime() + "\n")
    try:
        tornado.ioloop.IOLoop.instance().start()
    except (KeyboardInterrupt, SystemExit):
        logging.info('Keientist: exit service at {}'.format(time.ctime()))
コード例 #30
0
ファイル: ClassifiedImport.py プロジェクト: genegis/genegis
def main(input_table=None, sr=None, output_loc=None,
    output_gdb=None, output_fc=None, genetic=None,
    identification=None, location=None, other=None,
    mode='toolbox', protected_map=config.protected_columns):

    # set mode based on how script is called.
    settings.mode = mode

    # First, create a geodatabase for all our future results.
    # TODO: can we generate this from a single value?
    gdb_path = os.path.abspath(os.path.join(output_loc, output_gdb + '.gdb'))

    # if the user is calling this from the command-line, they won't have necessarily
    # entered a full path for the FC output. Infer it from the input instead.
    if output_fc.lower().find('gdb') == -1:
        output_fc =  os.path.join(gdb_path, output_fc)

    # check if we received a value spatial reference -- if not, use WGS84.
    if sr in ('', None):
        # default spatial reference can be redefined.
        sr = config.sr

    try:
        # only try to create this GDB if it doesn't already exist.
        if not os.path.exists(gdb_path):
            # Process: Create File GDB
            # SYNTAX: CreateFileGDB_management (out_folder_path, out_name, {out_version})
            arcpy.CreateFileGDB_management(output_loc, output_gdb, "CURRENT")
            utils.msg("File geodatabase successfully created: %s" % gdb_path)
        else:
            utils.msg("File geodatabase already exists, skipping creation.")
    except Exception as e:
        utils.msg("Error creating file geodatabase", mtype='error', exception=e)
        sys.exit()

    # TODO: WE NEED TO DO A FULL CLASSIFICATION OF THE INPUT AND MANUALLY BUILD UP THE LAYER...
    # We'll have two columns per locus, need to import correctly

    # Start things off by importing the table directly. We still need to edit the header
    # because of ArcGIS' restrictions on table names.

    # do we have a text-based file?
    file_type = utils.file_type(input_table)
    if file_type == 'Text':
        # Generate a temporary copy of the input CSV which corrects it for
        # ArcGIS, stripping invalid column label characters.
        data_table = utils.validate_table(input_table)

        # TODO: use field mapping to handle the date-time field?
        utils.protect_columns(data_table, protected_map)
    else:
        data_table = input_table

    # write out our table, after additional validation.
    try:
        arcpy.env.overwriteOutput = settings.overwrite

        # generate table name based on input name
        (label, ext) = os.path.splitext(os.path.basename(input_table))

        # Validate label will produce a valid table name from our input file
        validated_label = arcpy.ValidateTableName(label)

        # write out our filtered table to ArcGIS
        arcpy.TableToTable_conversion(data_table, gdb_path, validated_label)

        if file_type == 'Text':
            # Delete the temporary table with validated names;
            # temp file is stored in the same spot as the original.
            temp_dir = os.path.dirname(input_table)
            temp_path = os.path.join(temp_dir, data_table)
            os.remove(temp_path)

    except Exception as e:
        utils.msg("Error converting table %s to GDB" % input_table, mtype='error', exception=e)
        sys.exit()

    input_csv = os.path.join(gdb_path, validated_label)
    utils.msg("Table successfully imported: \n %s" % input_csv)
    fields = [f.name.lower() for f in arcpy.ListFields(input_csv)]

    # intially, our date column is imported as text to prevent ArcGIS
    # from inadvertently munging it. Add a formatted date column.
    try:
        # TODO: make date field defined elsewhere.
        input_time_field = "Date_Time"
        field_name = 'Date_formatted'
        expression = 'formatDate(!{input_time_field}!)'.format(
            input_time_field=input_time_field)
        code_block = """
import dateutil.parser
def formatDate(input_date):
    parsed_date = dateutil.parser.parse(input_date)
    return parsed_date.strftime("%m/%d/%Y %H:%M:%S")"""
        # check if a formatted date field exists; if so skip this step
        if field_name.lower() not in fields:
            arcpy.AddField_management(input_csv, field_name, 'DATE')
            arcpy.CalculateField_management(input_csv, field_name, expression, "PYTHON_9.3", code_block)
            utils.msg("Added a formatted date field: {field_name}.".format(field_name=field_name))
    except Exception as e:
        utils.msg("Error parsing date information", mtype='error', exception=e)
        sys.exit()

    # coordinate columns
    x = y = None

    # Convert the table to a temporary spatial feature
    try:
        if location is None:
            raise Exception("Required location columns not set.")

        # A temporary XY Layer needed to create the feature class.
        # NOTE: This table is deleted when the script finishes
        temporary_layer = input_csv + '_xy_temp'

        # 'location', ArcGIS passes semicolon separated values
        loc_parts = location.split(";")

        # TODO: ArcGIS doesn't preserve order; do we need separate fields for these? or some other approach?
        if loc_parts[0].lower() in ['x', 'longitude', 'lon']:
            (x, y) = loc_parts[:2]
        else:
            (y, x) = loc_parts[:2]

        # Process: Make XY Event Layer.  This layer is temporary and will be
        # deleted upon script completion.
        # SYNTAX: arcpy.MakeXYEventLayer_management(table, in_x_field,
        #           in_y_field, out_layer, {spatial_reference}, {in_z_field})
        arcpy.MakeXYEventLayer_management(input_csv, x, y, temporary_layer, sr)
    except Exception as e:
        utils.msg("Error making XY Event Layer", mtype='error', exception=e)
        sys.exit()

    utils.msg("XY event layer successfully created.")


    # Copy our features to a permanent layer
    try:
        # for this step, overwrite any existing results
        arcpy.env.overwriteOutput = True

        # Process: Copy Features
        # SYNTAX: CopyFeatures_management (in_features, out_feature_class, {config_keyword}, {spatial_grid_1}, {spatial_grid_2}, {spatial_grid_3})
        arcpy.CopyFeatures_management(temporary_layer, output_fc, "", "0", "0", "0")
        utils.msg("Features succesfully created: \n %s" % output_fc)

    except Exception as e:
        utils.msg("Error copying features to a feature class", mtype='error', exception=e)
        sys.exit()

    utils.msg("Feature Class successfully created, your SRGD file has been imported!")

    try:
        haplotype_table = os.path.join(gdb_path, "{}_{}".format(validated_label, 'Haplotypes'))

        # look up our haplotype data
        haplotypes = utils.Haplotype(output_fc)
        
        # create a dictionary for inserting records
        dts = {'names': ('code', 'haplotype', 'count'),
                       'formats': (numpy.uint16, 'S6', numpy.uint8)}

        # create a numpy formatted structure from this data
        array = numpy.rec.fromrecords(haplotypes.indexed, dtype=dts)

        # output the new table
        arcpy.da.NumPyArrayToTable(array, haplotype_table)
        
        utils.msg("Haplotype table created: \n {}".format(haplotype_table))

    except Exception as e:
        utils.msg("Error creating supplemental haplotype table", mtype='error', exception=e)
        sys.exit()



    # Because we can't pass around objects between this process and the calling
    # addin environment, dump out the settings to our shared configuration file.
    try:
        config.update('fc_path', output_fc.strip())
        config.update('x_coord', x)
        config.update('y_coord', y)

        var_types = {
                'identification': identification,
                'genetic': genetic,
                'location': location,
                'other': other
        }

        if identification is None:
            raise Exception("Required Identification columns not entered.")

        # the first ID field should be used as the default key.
        id_cols = identification.split(";")
        id_field = id_cols[0]
        for (i, col) in enumerate(id_cols):
            # FIXME this will always set individual_id to the primary key.
            if col.lower() == 'individual_id':
                id_field = id_cols[i]
        config.update('id_field', id_field)

        for (var, val) in var_types.items():
            if val is None:
                val = ''
            config.update('%s_columns' % var, val.strip())

    except Exception as e:
        msg = "Error creating output configuration file: %s" % config.config_path
        utils.msg(msg, mtype='error', exception=e)
        sys.exit()

    # clean up: remove intermediate steps.
    try:
        arcpy.Delete_management(temporary_layer)
    except Exception as e:
        utils.msg("Unable to delete temporary layer", mtype='error', exception=e)
        sys.exit()
コード例 #31
0
ファイル: ClassifiedImport.py プロジェクト: UGAROY/genegis
def main(input_table=None,
         sr=None,
         output_loc=None,
         output_gdb=None,
         output_fc=None,
         genetic=None,
         identification=None,
         location=None,
         other=None,
         mode='toolbox',
         protected_map=config.protected_columns):

    # set mode based on how script is called.
    settings.mode = mode

    # First, create a geodatabase for all our future results.
    # TODO: can we generate this from a single value?
    gdb_path = os.path.abspath(os.path.join(output_loc, output_gdb + '.gdb'))

    # if the user is calling this from the command-line, they won't have necessarily
    # entered a full path for the FC output. Infer it from the input instead.
    if output_fc.lower().find('gdb') == -1:
        output_fc = os.path.join(gdb_path, output_fc)

    # check if we received a value spatial reference -- if not, use WGS84.
    if sr in ('', None):
        # default spatial reference can be redefined.
        sr = config.sr

    try:
        # only try to create this GDB if it doesn't already exist.
        if not os.path.exists(gdb_path):
            # Process: Create File GDB
            # SYNTAX: CreateFileGDB_management (out_folder_path, out_name, {out_version})
            arcpy.CreateFileGDB_management(output_loc, output_gdb, "CURRENT")
            utils.msg("File geodatabase successfully created: %s" % gdb_path)
        else:
            utils.msg("File geodatabase already exists, skipping creation.")
    except Exception as e:
        utils.msg("Error creating file geodatabase",
                  mtype='error',
                  exception=e)
        sys.exit()

    # TODO: WE NEED TO DO A FULL CLASSIFICATION OF THE INPUT AND MANUALLY BUILD UP THE LAYER...
    # We'll have two columns per locus, need to import correctly

    # Start things off by importing the table directly. We still need to edit the header
    # because of ArcGIS' restrictions on table names.

    # do we have a text-based file?
    file_type = utils.file_type(input_table)
    if file_type == 'Text':
        # Generate a temporary copy of the input CSV which corrects it for
        # ArcGIS, stripping invalid column label characters.
        data_table = utils.validate_table(input_table)

        # TODO: use field mapping to handle the date-time field?
        utils.protect_columns(data_table, protected_map)
    else:
        data_table = input_table

    # write out our table, after additional validation.
    try:
        arcpy.env.overwriteOutput = settings.overwrite

        # generate table name based on input name
        (label, ext) = os.path.splitext(os.path.basename(input_table))

        # Validate label will produce a valid table name from our input file
        validated_label = arcpy.ValidateTableName(label)

        # write out our filtered table to ArcGIS
        arcpy.TableToTable_conversion(data_table, gdb_path, validated_label)

        if file_type == 'Text':
            # Delete the temporary table with validated names;
            # temp file is stored in the same spot as the original.
            temp_dir = os.path.dirname(input_table)
            temp_path = os.path.join(temp_dir, data_table)
            os.remove(temp_path)

    except Exception as e:
        utils.msg("Error converting table %s to GDB" % input_table,
                  mtype='error',
                  exception=e)
        sys.exit()

    input_csv = os.path.join(gdb_path, validated_label)
    utils.msg("Table successfully imported: \n %s" % input_csv)
    fields = [f.name.lower() for f in arcpy.ListFields(input_csv)]

    # intially, our date column is imported as text to prevent ArcGIS
    # from inadvertently munging it. Add a formatted date column.
    try:
        # TODO: make date field defined elsewhere.
        input_time_field = "Date_Time"
        field_name = 'Date_formatted'
        expression = 'formatDate(!{input_time_field}!)'.format(
            input_time_field=input_time_field)
        code_block = """
import dateutil.parser
def formatDate(input_date):
    parsed_date = dateutil.parser.parse(input_date)
    return parsed_date.strftime("%m/%d/%Y %H:%M:%S")"""
        # check if a formatted date field exists; if so skip this step
        if field_name.lower() not in fields:
            arcpy.AddField_management(input_csv, field_name, 'DATE')
            arcpy.CalculateField_management(input_csv, field_name, expression,
                                            "PYTHON_9.3", code_block)
            utils.msg("Added a formatted date field: {field_name}.".format(
                field_name=field_name))
    except Exception as e:
        utils.msg("Error parsing date information", mtype='error', exception=e)
        sys.exit()

    # coordinate columns
    x = y = None

    # Convert the table to a temporary spatial feature
    try:
        if location is None:
            raise Exception("Required location columns not set.")

        # A temporary XY Layer needed to create the feature class.
        # NOTE: This table is deleted when the script finishes
        temporary_layer = input_csv + '_xy_temp'

        # 'location', ArcGIS passes semicolon separated values
        loc_parts = location.split(";")

        # TODO: ArcGIS doesn't preserve order; do we need separate fields for these? or some other approach?
        if loc_parts[0].lower() in ['x', 'longitude', 'lon']:
            (x, y) = loc_parts[:2]
        else:
            (y, x) = loc_parts[:2]

        # Process: Make XY Event Layer.  This layer is temporary and will be
        # deleted upon script completion.
        # SYNTAX: arcpy.MakeXYEventLayer_management(table, in_x_field,
        #           in_y_field, out_layer, {spatial_reference}, {in_z_field})
        arcpy.MakeXYEventLayer_management(input_csv, x, y, temporary_layer, sr)
    except Exception as e:
        utils.msg("Error making XY Event Layer", mtype='error', exception=e)
        sys.exit()

    utils.msg("XY event layer successfully created.")

    # Copy our features to a permanent layer
    try:
        # for this step, overwrite any existing results
        arcpy.env.overwriteOutput = True

        # Process: Copy Features
        # SYNTAX: CopyFeatures_management (in_features, out_feature_class, {config_keyword}, {spatial_grid_1}, {spatial_grid_2}, {spatial_grid_3})
        arcpy.CopyFeatures_management(temporary_layer, output_fc, "", "0", "0",
                                      "0")
        utils.msg("Features succesfully created: \n %s" % output_fc)

    except Exception as e:
        utils.msg("Error copying features to a feature class",
                  mtype='error',
                  exception=e)
        sys.exit()

    utils.msg(
        "Feature Class successfully created, your SRGD file has been imported!"
    )

    try:
        haplotype_table = os.path.join(
            gdb_path, "{}_{}".format(validated_label, 'Haplotypes'))

        # look up our haplotype data
        haplotypes = utils.Haplotype(output_fc)

        # create a dictionary for inserting records
        dts = {
            'names': ('code', 'haplotype', 'count'),
            'formats': (numpy.uint16, 'S6', numpy.uint8)
        }

        # create a numpy formatted structure from this data
        array = numpy.rec.fromrecords(haplotypes.indexed, dtype=dts)

        # output the new table
        arcpy.da.NumPyArrayToTable(array, haplotype_table)

        utils.msg("Haplotype table created: \n {}".format(haplotype_table))

    except Exception as e:
        utils.msg("Error creating supplemental haplotype table",
                  mtype='error',
                  exception=e)
        sys.exit()

    # Because we can't pass around objects between this process and the calling
    # addin environment, dump out the settings to our shared configuration file.
    try:
        config.update('fc_path', output_fc.strip())
        config.update('x_coord', x)
        config.update('y_coord', y)

        var_types = {
            'identification': identification,
            'genetic': genetic,
            'location': location,
            'other': other
        }

        if identification is None:
            raise Exception("Required Identification columns not entered.")

        # the first ID field should be used as the default key.
        id_cols = identification.split(";")
        id_field = id_cols[0]
        for (i, col) in enumerate(id_cols):
            # FIXME this will always set individual_id to the primary key.
            if col.lower() == 'individual_id':
                id_field = id_cols[i]
        config.update('id_field', id_field)

        for (var, val) in var_types.items():
            if val is None:
                val = ''
            config.update('%s_columns' % var, val.strip())

    except Exception as e:
        msg = "Error creating output configuration file: %s" % config.config_path
        utils.msg(msg, mtype='error', exception=e)
        sys.exit()

    # clean up: remove intermediate steps.
    try:
        arcpy.Delete_management(temporary_layer)
    except Exception as e:
        utils.msg("Unable to delete temporary layer",
                  mtype='error',
                  exception=e)
        sys.exit()
コード例 #32
0
         Player2.moveup(1)
     if keys[ord('s')]:
         Player2.movedown(1)
 # starts with game logic here
 # first, the ocean and and shores are drawn
 screen.fill(config.blue)
 pygame.draw.rect(screen, config.green, (0, 0, 700, 40), 0)
 pygame.draw.rect(screen, config.green, (0, 460, 700, 40), 0)
 pygame.draw.rect(screen, config.green, (0, 92, 700, 40), 0)
 pygame.draw.rect(screen, config.green, (0, 184, 700, 40), 0)
 pygame.draw.rect(screen, config.green, (0, 276, 700, 40), 0)
 pygame.draw.rect(screen, config.green, (0, 368, 700, 40), 0)
 # if player 1 wins, increase speed for player 1
 # if player 2 wins, increase speed for player 2
 if cp == 1:
     config.update(Obstacle3, speed11)
     config.update(Obstacle4, speed12)
 if cp == 2:
     config.update(Obstacle3, speed21)
     config.update(Obstacle4, speed22)
 # if any player collects treasure, give them 20 points
 if (Player1.rect.x > 340 and Player1.rect.x < 380 and
     Player1.rect.y > 184 and Player1.rect.y < 224):
         if tc1 == 0:
             score1 += 20
             tc1 = 1
         Treasure.rect.x = 1000
         Treasure.rect.y = 1000
 if (Player2.rect.x > 340 and Player2.rect.x < 380 and
     Player2.rect.y > 184 and Player2.rect.y < 224):
         if tc2 == 0:
import config

private_settings = {
    'db_settings': {
        'dev': {
            'db_name': 'your_db_name',
            'host': 'localhost',
            'user': '******',
            'password': '******'
        },
        'test': {
            'db_name': '?',
            'host': '?',
            'user': '******',
            'password': '******'
        },
        'prod': {
            'db_name': '?',
            'host': '?',
            'user': '******',
            'password': '******'
        }
    }
}

config.update(private_settings)
コード例 #34
0
ファイル: admin.py プロジェクト: asdil12/mcweb
def set(name, password):
	admin_accounts = config.get('admin_accounts')
	admin_accounts[name] = _hash(password)
	config.update(admin_accounts=admin_accounts)