コード例 #1
0
ファイル: depot.py プロジェクト: freemind/Djl
    def run(self):
        '''Write the files...'''
        if self.methode == 1:
            import cPickle
            rep = config(info=2) + '/' + config(info=14) + '/'
            fichier_liste_jeux = rep + 'liste_jeux.cPickle'

            fichier_liste_jeux = open(fichier_liste_jeux, 'wb')
            cPickle.dump(self.liste,  fichier_liste_jeux,  protocol=2)
            fichier_liste_jeux.close()
        else:
            rep = config(info=2) + '/' + config(info=14) + '/'
            fichier_liste_jeux = rep + 'liste_jeux.txt'
            
            #Ouvre le fichier de cache (écriture)
            fichier_liste_jeux = open(fichier_liste_jeux, 'w')
            
            #Parcours toutes les entrées du dépôt...
            for i in range(len(self.liste)):
                ligne = ""
                #Parcours tous les éléments de chaque entrée...
                for x in range(len(self.liste[i]['value'][0])):
                    info = self.liste[i]['value'][0][x]['value']
                    if x == 0:
                       ligne = info
                    else:
                        ligne = ligne + ";" + info
                    
                #print ligne
                if i > 0:
                    fichier_liste_jeux.write("\n")
                fichier_liste_jeux.write(ligne)
            fichier_liste_jeux.close()
コード例 #2
0
ファイル: dialog.py プロジェクト: ak1394/bookshelf
    def __init__(self, parent):
        JDialog.__init__(self, parent)
        e = c.resource('entry')

        self.language = PyLanguageComboBox(c.config('SUPPORTED_LOCALES'), Locale.getDefault())
        self.language.set_selection(c.config('DEFAULT_TEXT_LANGUAGE'))
        
        self.encoding = PyComboBox(c.SUPPORTED_ENCODINGS)

        self.para_start = PyComboBox([(x, x) for x in range(11)])
        self.para_start.set_selection(c.config('DEFAULT_PARAGRAPH_START'))

        main_panel = PyGridBagPanel(title='')
        main_panel.add(JLabel(e['language']), gridx=0, gridy=0, anchor='LINE_START', weightx=0.5)
        main_panel.add(self.language, gridx=1, gridy=0, anchor='LINE_END', weightx=0.1, fill='HORIZONTAL')
        main_panel.add(JLabel(e['encoding']), gridx=0, gridy=1, anchor='LINE_START', weightx=0.5)
        main_panel.add(self.encoding, gridx=1, gridy=1, anchor='LINE_END', weightx=0.1, fill='HORIZONTAL')
        main_panel.add(JLabel(e['source_para_start']), gridx=0, gridy=2, anchor='LINE_START', weightx=0.5)
        main_panel.add(self.para_start, gridx=1, gridy=2, anchor='LINE_END', weightx=0.1, fill='HORIZONTAL')
        main_panel.setPreferredSize(Dimension(300, 70))

        button_panel=PyPanel([JButton(_d['save'], actionPerformed=self.action_ok), JButton(_d['discard'], actionPerformed=self.action_cancel)])
        
        panel=PyGridBagPanel(title='')
        panel.add(main_panel)
        panel.add(button_panel, gridy=1)        

        self.title = _d['text_input_title']
        self.contentPane.add(panel)
コード例 #3
0
ファイル: auth.py プロジェクト: andrew-morris/docker-kippo2
    def checkUserPass(self, theusername, thepassword, ip):
        #  UserDB is the default auth_class
        authname = UserDB
        parameters = None

        # Is the auth_class defined in the config file?
        if config().has_option('honeypot', 'auth_class'):
            authclass = config().get('honeypot', 'auth_class')

            # Check if authclass exists in this module
            if hasattr(modules[__name__], authclass):
                authname = getattr(modules[__name__], authclass)

                # Are there auth_class parameters?
                if config().has_option('honeypot', 'auth_class_parameters'):
                    parameters = config().get('honeypot', 'auth_class_parameters')
            else:
                log.msg('auth_class: %s not found in %s' % (authclass, __name__))

        if parameters:
            theauth = authname(parameters)
        else:
            theauth = authname()

        if theauth.checklogin(theusername, thepassword, ip):
            log.msg(eventid='KIPP0002',
                format='login attempt [%(username)s/%(password)s] succeeded',
                username=theusername, password=thepassword)
            return True
        else:
            log.msg(eventid='KIPP0003',
                format='login attempt [%(username)s/%(password)s] failed',
                username=theusername, password=thepassword)
            return False
コード例 #4
0
ファイル: connect.py プロジェクト: artekw/node2mqtt
    def __init__(self, taskQ, resultQ, debug=False):
        self._logger = logging.getLogger(__name__)
        multiprocessing.Process.__init__(self)
        self.taskQ = taskQ
        self.resultQ = resultQ
        self.debug = debug
        self.connected = False

        try:
            host = config().get("app", ['serial', 'host'])
            port = config().get("app", ['serial', 'port'])

            if self.debug:
                self._logger.debug('Trying connect to %s:%s' % (host, str(port)))
        except:
            print "Can't read from config."
            sys.exit(3)

        try:
            self.soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            self.soc.connect((host, port))
            self.connected = True

            if self.debug:
                self._logger.debug('Connected to %s:%s' % (host, str(port)))

        except socket.error:
            self.connected = False

            self._logger.warning("Can't connect to %s:%s" % (host, str(port)))
            sys.exit(3)
コード例 #5
0
ファイル: add_game.py プロジェクト: freemind/Djl
    def save(self):
        '''Sauvegarde le raccourci
			Save the shortcut
		'''
		#If the user doesn't give a name to the game, it doesn't save:
        if self.game_name.text() == '':
            self.close()
        #Otherwise, save:
        else:
			#Save data in a standard .desktop file:
            if self.file == "":
                file = open(config(info=2) + '/shortcuts/' + unicode(self.nom_game.text()).encode('utf-8').lower() + '.desktop', 'w')
            else:
                #Si le raccourci existe déjà, on réécrit par dessus, plutot que créer un nouveau file
                file = open(config(info=2) + '/shortcuts/' + self.file, 'w')

            file.write('[Desktop Entry]' + '\n')
            file.write('Name=' + self.nom_game.text() + '\n')
            file.write('Icon=' + self.icon.text() + '\n')
            file.write('Exec=' + self.cmd_game.text() + '\n')
            file.write('Path=' + self.rep_game.text() + '\n')
            file.write('Type=Application')
            file.close()
            
            #Créé le file temporaire pour demander le rafraichissement de l'interface principale:
			#Makes the temporary file 
            self.make_update_file()
            #Close the window:
            self.close()
コード例 #6
0
def connection():
    conn = None

    try:
        # region read connection parameters
        params = config.config('db')
        schema_path = config.config('schema')
        # schema = schema_path['schema']+"""_3dot1_pcornet"""
        schema = [(re.sub('_pedsnet', '', schema_path['schema']) + """_3dot1_pcornet"""),
                  (re.sub('_pedsnet', '', schema_path['schema']) + """_3dot1_start2001_pcornet""")]
        # endregion

        # region connect to the PostgreSQL server
        print('Connecting to the PostgreSQL database...')
        conn = psycopg2.connect(**params)
        # endregion

        # region Check if there is already a configuration file
        # if os.path.isfile(configfile_name):
        # delete the file
        # os.remove(configfile_name)
        # endregion

        return conn + ',' + schema

    except (Exception, psycopg2.OperationalError) as error:
        print(error)
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
    except (Exception, psycopg2.ProgrammingError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()
            print('Database connection closed.')
コード例 #7
0
ファイル: pierc.py プロジェクト: dlech/pierc
def main():
    mysql_settings = config.config("mysql_config.txt")
    irc_settings = config.config("irc_config.txt")
    webdav_settings = config.config("webdav_config.txt")
    c = Logger(
        irc_settings["server"],
        int(irc_settings["port"]),
        irc_settings["channel"],
        irc_settings["nick"],
        irc_settings.get("password", None),
        irc_settings.get("username", None),
        irc_settings.get("ircname", None),
        irc_settings.get("topic", None),
        irc_settings.get("localaddress", ""),
        int(irc_settings.get("localport", 0)),
        bool(irc_settings.get("ssl", False)),
        bool(irc_settings.get("ipv6", False)),

        mysql_settings["server"],
        int(mysql_settings["port"]),
        mysql_settings["database"],
        mysql_settings["user"],
        mysql_settings["password"],

        webdav_settings
    )
    c.start()
コード例 #8
0
ファイル: ui.py プロジェクト: agbiotec/galaxy-tools-vcr
    def __init__(self, src=None):
        self._buffers = []
        self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
        self._reportuntrusted = True
        self._ocfg = config.config() # overlay
        self._tcfg = config.config() # trusted
        self._ucfg = config.config() # untrusted
        self._trustusers = set()
        self._trustgroups = set()

        if src:
            self.fout = src.fout
            self.ferr = src.ferr
            self.fin = src.fin

            self._tcfg = src._tcfg.copy()
            self._ucfg = src._ucfg.copy()
            self._ocfg = src._ocfg.copy()
            self._trustusers = src._trustusers.copy()
            self._trustgroups = src._trustgroups.copy()
            self.environ = src.environ
            self.fixconfig()
        else:
            self.fout = sys.stdout
            self.ferr = sys.stderr
            self.fin = sys.stdin

            # shared read-only environment
            self.environ = os.environ
            # we always trust global config files
            for f in scmutil.rcpath():
                self.readconfig(f, trust=True)
コード例 #9
0
def setup():
    from config import config 
    with open('loaded_modules', 'wb') as f:
        f.write('foobar')
    
    config().write('channels', 'channel_list', '#channel1')
    config().write('general', 'admin', 'foobar')
コード例 #10
0
def readFile():
    print "---------------------------------------------------------------------"
    print "construct data structure..."
    featureNumber = cf.config("featureNumber")
    labelName = cf.config("labelName")  
    dataset = {"data":[],"labelName":labelName}
    label = []
    for i in range(0,featureNumber):
        label.append([])
        
    ###############################################################
    #### usage:
    #### dataset["data"][label][feature]
    ###############################################################
    for i in range(0,len(labelName)):
        dataset["data"].append(copy.deepcopy(label))

    #### read files
    for idx_label in labelName:        
        #### read csv
        print "import dataset/train/"+labelName[idx_label]+".csv..."
        readTemp = None
        readTemp = csv.reader(open("dataset/train/"+labelName[idx_label]+".csv","rb"))
        for sample in readTemp:
            for i in range(0,len(sample)):
                dataset["data"][idx_label][i].append(float(sample[i]))

    #### return
    print "---------------------------------------------------------------------"
    return dataset
コード例 #11
0
ファイル: mcstatus.py プロジェクト: Jake0oo0/PythonSkype
def choose(chat, message, args, sender):
    if len(args) == 1:
        if args[0].lower() == '--listen':
            listeners.append(chat.Name)
            conf = config.config()
            config_operators = conf.get('mc_listens', [])
            if chat.Name in config_operators:
                chat.SendMessage("This chat is already listening to Minecraft service statuses.")
                return
            config_operators.append(chat.Name)
            conf['mc_listens'] = config_operators
            config.save(conf)
            return
        elif args[0].lower() == '--unlisten':
            conf = config.config()
            config_operators = conf.get('mc_listens', [])
            if chat.Name not in config_operators:
                chat.SendMessage("This chat is not currently listening to Minecraft service statuses.")
                return
            config_operators.remove(chat.Name)
            conf['mc_listens'] = config_operators
            config.save(conf)
            return
        else:
            service = args[0]
            if service in get_statuses():
                chat.SendMessage(format_status(service))
                return
            else:
                chat.SendMessage("Service not found.")
                return
    chat.SendMessage(format_status())
コード例 #12
0
ファイル: interface.py プロジェクト: freemind/Djl
    def ch_tab(self):
        '''Quand on change d'onglet...'''
        if self.tabWidget.currentIndex() == 2: #Dépôt...
            if self.connecte_ws == 0: #Si on est pas connecté au WS, on le fait maintenant
                #self.Thread_depot()
                self.listWidget.clear()
                if int(config(info=3)) == 1 and QtGui.QSystemTrayIcon.isSystemTrayAvailable():
                    self.menu_lance.clear()
                #Créé un nouvelle liste des jeux installés
                self.liste_jeux_installe()
                #Pareil pour les raccourcis
                self.liste_raccourcis()
            try:
                self.maj_description()
            except AttributeError:
                pass
            self.widget_liste_jeux.setFocus() #Donne le focus à la liste des jeux
            
        elif self.tabWidget.currentIndex() == 3: #IRC.
            #Si on affiche le canal IRC, on remet le titre "normal" qui a put être modifié avec l'arrivée d'un nouveau message
            self.clignote_IRC = 0
            self.tabWidget.setTabIcon(3, QtGui.QIcon(dossier_racine + '/res/irc_crystal.png'))
            if int(config(info=3)) == 1 and QtGui.QSystemTrayIcon.isSystemTrayAvailable():
                self.tray.setIcon(QtGui.QIcon(dossier_racine + '/icone.png'))
            self.tabWidget.setTabText(3, QtGui.QApplication.translate("MainWindow", _("IRC"), None, QtGui.QApplication.UnicodeUTF8))

            #On donne le focus à la barre de texte pour envoyer le message:
            self.line_Edit_chat.setFocus()
            
        elif self.tabWidget.currentIndex() == 1: #Liste des jeux principale.
            #Donne le focus à la barre de recherche.
            self.recherche_jeu.setFocus()
        elif self.tabWidget.currentIndex() == 4: #Modules
            self.chTabMod()
コード例 #13
0
ファイル: recorder.py プロジェクト: wschurman/kittenmash
    def start(self):
        val = raw_input("Is this a cat? (enter \"y\" for cat): ")
        if val.lower()[0] == "y":
            self.type = config("type_kitten")
        else:
            self.type = config("type_not_kitten")

        self.root = tk.Tk()

        if self.type == config("type_kitten"):
            print "Please focus GUI and place a cat on the keyboard (esc to exit):"
        else:
            print "Please focus GUI and type something (esc to exit):"

        self.root.bind_all('<Key>', self.key)
        self.root.mainloop()

        c = Clusterer(self.raw_sequence)
        clusters = c.cluster()

        output = open(self.filename, 'wb')
        pickle.dump(self.type, output, -1)
        pickle.dump(clusters, output, -1)
        output.close()

        return clusters
コード例 #14
0
ファイル: logger_init.py プロジェクト: cai-lw/KBGAN
def logger_init():
    logging.basicConfig(level=logging.DEBUG, format='%(module)15s %(asctime)s %(message)s', datefmt='%H:%M:%S')
    if config().log.to_file:
        log_filename = os.path.join(config().log.dir,
                                    config().log.prefix + datetime.datetime.now().strftime("%m%d%H%M%S"))
        logging.getLogger().addHandler(logging.FileHandler(log_filename))
    if config().log.dump_config:
        dump_config()
コード例 #15
0
def etl_only():
    schema_path = config.config('schema')
    schema = re.sub('_pedsnet', '', schema_path['schema'])

    query.get_etl_ready(schema)
    # subprocess.call("ls -la", shell=True)  stdout=subprocess.PIPE,

    filelist = glob.glob(os.path.join(etl_dir, '*.sql'))
    for infile in sorted(filelist):
        args = infile

        print 'starting ETL \t:' + datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n"
        proc = subprocess.Popen(['bash_script/etl_bash.sh', args], stderr=subprocess.STDOUT)
        output, error = proc.communicate()

        if output:
            with open("logs/log_file.log", "a") as logfile:
                logfile.write("\n" + datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n")
                logfile.write(output)
        if error:
            with open("logs/log_file.log", "a") as logfile:
                logfile.write("\n" + datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S' + "\n"))
                logfile.write(error)

    # create the upper case views
    conn = None
    try:
        # region read connection parameters
        params = config.config('db')
        schema_path = config.config('schema')
        # schema = schema_path['schema']+"""_3dot1_pcornet"""
        schema = [(re.sub('_pedsnet', '', schema_path['schema']) + """_3dot1_pcornet"""),
                  (re.sub('_pedsnet', '', schema_path['schema']) + """_3dot1_start2001_pcornet""")]
        # endregion

        # region connect to the PostgreSQL server
        print('Connecting to the PostgreSQL database...')
        conn = psycopg2.connect(**params)
        # endregion
        cur = conn.cursor()
        cur.execute(open(view, "r").read())
        conn.commit()
        for schemas in schema:
            cur.execute("""select count(*) from capitalview(\'""" + params[1] + """\',\'""" + schemas + """\')""")
            conn.commit()
        cur.close
    except (Exception, psycopg2.OperationalError) as error:
        print(error)
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
    except (Exception, psycopg2.ProgrammingError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()
            print('Database connection closed.')
    print 'ETL is complete'
コード例 #16
0
def test_part():
    from submodules.admin_part import admin_part
    from config import config 
    config().write('channels', 'channel_list', '#channel1, #channel2')
    assert admin_part().handler(['#channel1', '#channel2', '#channel3'], {'name': 'foobar'}) == \
            [('send_data', 'PART #channel1'),
             ('send_data', 'PRIVMSG foobar :Parted channel: #channel1'),
             ('send_data', 'PART #channel2'),
             ('send_data', 'PRIVMSG foobar :Parted channel: #channel2'),
             ('send_data', 'PRIVMSG foobar :#channel3 is not a channel we are in (hopefully)')]
コード例 #17
0
def wikipedia_view():
    wikipedia_zim_dir = config().get('ZIM', 'wikipedia_zim_dir')
    library_file = config().get('ZIM', 'kiwix_library_file')
    old_library_file = config().get('ZIM', 'old_kiwix_library_file')
    # Old location before being moved, for backwards compatibility
    if not os.path.exists(library_file):
        logger.info("Kiwix library file not found at: %s, using old location: %s" % (library_file, old_library_file))
        library_file = old_library_file
    langs = organize_books_by_language(glob(os.path.join(wikipedia_zim_dir, "*.zim")), library_file)
    return render_template('wikipedia_index.html', languages=langs)
コード例 #18
0
ファイル: depot.py プロジェクト: freemind/Djl
 def run(self):
     '''Download the images...'''
     rep = config(info=2) + '/' + config(info=14) + '/imgs/'
     lien = "http://djl.jeuxlinux.fr/images/"
     for i in range(len(self.liste)):
         type_= self.liste[i][0]['value'][0][0]['value']
         if int(type_) == 1:
             nom = self.liste[i][0]['value'][0][1]['value']
             if not os.path.exists(rep+nom):
                 urllib.urlretrieve(lien+nom, rep+nom,  reporthook=None)
コード例 #19
0
ファイル: custom_install.py プロジェクト: adityaup22/Cloop
def main():
	d=FeildStorage()
	aip.main()
	f=open("aip.txt")
	ip=f.read().split()
	f.close()
	nnip=d.getvalue('nn')
	jtip=d.getvalue('jt')
	count=0
	for i in ip:
		if i==nnip:
			swap(ip[count],ip[0])
		elif i==jtip:
			swap(ip[count],ip[1])
		else:
			pass
		count += 1	
	
	
	config.config(ip[0],ip[1])
	count=0	
	inst=[]
	for i in ip:
		
		t=threading.Thread(target=install,args=(i,))
		inst.append(t)
		t.start()
	for i in inst:
		i.join()
		
		
	
	abc=[]
	
	for i in ip:
		
	
		if count == 0:
			nn(i)
			
		elif count == 1:
			jt(i)
		else:
			t=threading.Thread(target=dntt,args=(i,))
			t.start()
			abc.append(t)
	
		count +=1
	for i in abc:
		i.join()
	
	print "\n\n\t\t\tSETUP COMPLETE"
	print("Namenode is "+ip[0])
	print("Jobtracker is "+ip[1])
	out("clear")
コード例 #20
0
ファイル: application.py プロジェクト: ak1394/bookshelf
def main():
    load('bookshelf.conf')
    if config('LOCALE') is not None and config('LOCALE') != '':
        Locale.setDefault(Locale(config('LOCALE')))
    from window import BookshelfView
    from logic import Bookshelf
    UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName())
    view = BookshelfView(Bookshelf())
    screenSize = Toolkit.getDefaultToolkit().getScreenSize();
    view.setLocation(screenSize.width/5,  screenSize.height/5)
    view.setVisible(1)
コード例 #21
0
def init_db():
    global is_init
    global flask_app
    if not is_init:
        if flask_app is None:
            raise Exception("init_db called when flask app not set")
        # set global config variables referenced by SQLAlchemy
        flask_app.config['SQLALCHEMY_ECHO'] = config().getboolean('GUTENBERG', 'sqlalchemy_echo')
        database_path = config().get_path('GUTENBERG', 'sqlalchemy_database_uri')
        flask_app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.abspath(database_path)
        db.init_app(flask_app)
        is_init = True
コード例 #22
0
ファイル: logic.py プロジェクト: ak1394/bookshelf
 def save(self, f):
     # TODO check if previews been generated / ie book rendered
     builder = MidletBuilder(self.reader_package, self.platform)
     builder.language = config('MIDLET_LOCALE')
     for book in self.books:
         builder.add(book.book)
     # add plugins
     for plugin in config('PLUGIN_LIST'):
         builder.addPlugin(plugin)
     # always add pager
     builder.addPlugin('pager')
     builder.write(f, config('MAKE_FOLDER'))
コード例 #23
0
ファイル: dialog.py プロジェクト: ak1394/bookshelf
    def __init__(self, parent):
        JDialog.__init__(self, parent)
        self._parent = parent
        e = c.resource('entry')

        self.open_file_chooser = JFileChooser(c.config('DEFAULT_OPEN_DIRECTORY'))
        self.open_file_chooser.multiSelectionEnabled = 0
        self.open_file_chooser.fileSelectionMode = JFileChooser.DIRECTORIES_ONLY
        self.open_file_dir = c.config('DEFAULT_OPEN_DIRECTORY')
        self.open_file_label = JLabel(self.open_file_dir)
        
        self.save_file_chooser = JFileChooser(c.config('DEFAULT_SAVE_DIRECTORY'))
        self.save_file_chooser.multiSelectionEnabled = 0
        self.save_file_chooser.fileSelectionMode = JFileChooser.DIRECTORIES_ONLY
        self.save_file_dir = c.config('DEFAULT_SAVE_DIRECTORY')
        self.save_file_label = JLabel(self.save_file_dir)

        self.language = PyLanguageComboBox(c.config('SUPPORTED_LOCALES'), Locale.getDefault())
        self.language.set_selection(Locale.getDefault().language)

        if c.config('MIDLET_LOCALE') is not None and c.config('MIDLET_LOCALE') != '':
            midlet_locale = Locale(c.config('MIDLET_LOCALE'))
        else:
            midlet_locale = Locale.getDefault()
        self.midlet_language = PyLanguageComboBox(c.config('SUPPORTED_LOCALES'))
        self.midlet_language.set_selection(midlet_locale.language)

        main = PyGridBagPanel(title="")
        main.setPreferredSize(Dimension(400, 200))

        lang_panel = PyBorderPanel(west=JLabel(e['language']), east=self.language, title="")
        main.add(lang_panel, gridx=0, gridy=0, fill="HORIZONTAL", anchor="LINE_START", weightx=0.5, weighty=0.5)

        midlet_lang_panel = PyBorderPanel(west=JLabel(_d['midlet_language']), east=self.midlet_language, title="")
        main.add(midlet_lang_panel, gridx=0, gridy=1, fill="HORIZONTAL", anchor="LINE_START", weightx=0.5, weighty=0.5)

        open_panel = PyBorderPanel(west=JLabel(_d['open_dir']), east=JButton(_d['select'], actionPerformed=self.action_set_open), south=self.open_file_label, title="")
        main.add(open_panel, gridx=0, gridy=2, fill="HORIZONTAL", anchor="LINE_START", weightx=0.5, weighty=0.5)

        save_panel = PyBorderPanel(west=JLabel(_d['save_dir']), east=JButton(_d['select'], actionPerformed=self.action_set_save), south=self.save_file_label, title="")
        main.add(save_panel, gridx=0, gridy=3, fill="HORIZONTAL", anchor="LINE_START", weightx=0.5, weighty=0.5)

        self.make_folder = JCheckBox()
        makefolder_panel = PyBorderPanel(west=JLabel(_d['make_dir']), east=self.make_folder, title="")
        main.add(makefolder_panel, gridx=0, gridy=4, fill="HORIZONTAL", anchor="LINE_START", weightx=0.5, weighty=0.5)
        self.make_folder.selected = c.config('MAKE_FOLDER')
        
        button_panel=PyPanel([JButton(_d['save'], actionPerformed=self.action_ok), JButton(_d['discard'], actionPerformed=self.action_cancel)])
        panel=PyBorderPanel(south=button_panel, west=main)

        self.title = _d['interface_title']
        self.contentPane.add(panel)
コード例 #24
0
ファイル: top_views.py プロジェクト: dionis/internet-in-a-box
def index():
    error = None
    if config().get_knowledge_dir() is None:
        error = _('Could not find knowledge directory containing the Internet-in-a-Box dataset.  ')
        error += _("The configured '<strong>knowledge_dir</strong>' path is '<strong>%(config_dir)s</strong>' ", config_dir=config().get('DEFAULT', 'knowledge_dir'))
        error += _(" and the '<strong>search_for_knowledge_dir</strong>' setting is ")
        if config().getboolean('DEFAULT', 'search_for_knowledge_dir'):
            error += _('<strong>ON</strong>, so all mounted filesystems were checked.')
        else:
            error += _('<strong>OFF</strong>, so other mounted filesystems were NOT checked.')
    if error is None:
        return render_template("home.html")
    else:
        return render_template("home.html", error=Markup(error))
コード例 #25
0
    def __init__(self,configfile):
        
        self._mqttQueue = Queue.Queue()

        self._configfile = configfile
        self._configAll = config()
        print configfile
        self._configAll.Open(self._configfile)
        self._configGeneral = config(self._configAll.subsection('Config','General'))
#        print "TesT",self._configALL
        self._pidfile = str(self._configGeneral.get(0,'PIDFILE','/var/run/gpio2mqtt.pid'))
        self._loghandle = None
        
        daemon.__init__(self, self._pidfile)  
コード例 #26
0
ファイル: user.py プロジェクト: jonas-lu/ZhihuCrawler
    def __init__(self, user):
        logger = logging.getLogger(__name__)
        logger.warning("Storing %s(%s), %s followings", user['name'], user['domain'], len(user['followings']))

        rh.add_crawled_user(user['domain'])

        self.followings = user['followings']
        del user['followings']
        self.user = user

        py2neo.authenticate(config('neo4j.server'), config('neo4j.username'), config('neo4j.password'))
        self.g = py2neo.Graph()

        self.src_node = self.merge_node(user)
コード例 #27
0
ファイル: dialog.py プロジェクト: ak1394/bookshelf
    def __init__(self, parent):
        JDialog.__init__(self, parent)
        self.supported = c.supported()

        brand = c.config('PLATFORM_BRAND')
        model = c.config('PLATFORM_MODEL')
        platform = c.config('PLATFORM_JAR')

        self.allow_events = 1
        
        supported_brands = self.supported.keys()
        supported_brands.sort()
        self.brand_list = brand_list = JList(supported_brands, selectionMode=ListSelectionModel.SINGLE_SELECTION)
        brand_list.addListSelectionListener(ListSelectionListenerProxy(self.action_brand))
        brand_list_pane=JScrollPane(brand_list)
        brand_list_pane.setPreferredSize(Dimension(75, 150))
        brand_panel = PyPanel([brand_list_pane], title=_d['brand'])

        self.model_list_model = DefaultListModel()
        self.model_list = model_list = JList(self.model_list_model, selectionMode=ListSelectionModel.SINGLE_SELECTION)
        model_list.addListSelectionListener(ListSelectionListenerProxy(self.action_model))      
        model_list_pane=JScrollPane(model_list)
        model_list_pane.setPreferredSize(Dimension(75, 150))
        model_panel = PyPanel([model_list_pane], title=_d['model'])

        self.platform_list_model = DefaultListModel()
        self.platform_list = platform_list = JList(self.platform_list_model, selectionMode=ListSelectionModel.SINGLE_SELECTION)
        platform_list.addListSelectionListener(ListSelectionListenerProxy(self.action_platform))      
        platform_list_pane=JScrollPane(platform_list)
        platform_list_pane.setPreferredSize(Dimension(75, 150))
        platform_panel = PyPanel([platform_list_pane], title=_d['platform'])

        self.text = text = JTextArea();
        text.editable = 0
        text.enabled = 0
        text.lineWrap = 1
        text.wrapStyleWord = 1
        text.setPreferredSize(Dimension(200, 150))
        text_panel = PyPanel([text], title=_d['description'])

        all_panel = PyPanel([brand_panel, model_panel, platform_panel, text_panel], title="")

        button_panel=PyPanel([JButton(_d['save'], actionPerformed=self.action_ok), JButton(_d['discard'], actionPerformed=self.action_cancel)])
        panel=PyBorderPanel(south=button_panel, west=all_panel)
        self.title = _d['phone_model_title']
        self.contentPane.add(panel)
        
        brand_list.setSelectedValue(brand, 1)
        model_list.setSelectedValue(model, 1)
        platform_list.setSelectedValue(platform, 1)
コード例 #28
0
ファイル: cylc_xdot.py プロジェクト: tomgreen66/cylc
 def load_config( self, reload=False ):
     if reload:
         print 'Reloading the suite.rc file.'
         try:
             self.suiterc = config.config( self.suite, self.file, collapsed=self.suiterc.closed_families )
         except:
             print >> sys.stderr, "Failed to reload suite.rc file (parsing error?)."
             return False
     else:
         try:
             self.suiterc = config.config( self.suite, self.file )
         except:
             print >> sys.stderr, "Failed to load suite.rc file (parsing error?)."
             return False
     return True
コード例 #29
0
ファイル: pierc.py プロジェクト: rnarian/pierc
def main():
	mysql_settings = config.config("mysql_config.txt")
	irc_settings = config.config("irc_config.txt")
	
	c = Logger(
				irc_settings["server"], 
				int(irc_settings["port"]), 
				irc_settings["channel"], 
				irc_settings["nick"],
				mysql_settings["server"],
				int(mysql_settings["port"]),
				mysql_settings["database"],
				mysql_settings["user"],
				mysql_settings["password"] ) 
	c.start()
コード例 #30
0
ファイル: listspider.py プロジェクト: gukefei/Spider
 def __init__(self, url_id):
     self.database = db()
     sql = 'SELECT * FROM sp_list_urls WHERE ID=%d LIMIT 1' % url_id
     data = self.database.findone(sql)
     self.website_id = data['website_id']
     self.url = data['url']
     self.rule_id = data['rule_id']
     sql = 'SELECT * FROM sp_websites WHERE ID=%d LIMIT 1' % self.website_id
     data = self.database.findone(sql)
     self.currency = data['currency']
     sql = 'SELECT * FROM sp_list_pattern WHERE ID=%d LIMIT 1' % self.rule_id
     data = self.database.findone(sql)
     self.pattern = data['pattern']  # 通用采集范围规则
     self.name_pattern = data['name_pattern']  # 商品名称采集规则
     self.price_pattern = data['price_pattern']  # 价格采集规则
     self.img_pattern = data['img_pattern']  # 图片采集规则
     self.img_pattern_attr = data['img_pattern_attr']  # 图片抽取规则
     self.page = data['page']  # 页码变量名称
     self.page_step = int(data['page_step'])  # 页码增幅
     self.detail_url_pattern = data['url_pattern']  # 商品详情页的链接地址匹配规则
     self.detail_url_pattern_attr = data['url_pattern_attr']  # 商品详情页的抽取规则
     conf = config()
     self.__phantomjs = conf.phantomjs_path
     self.__list_spide_pages = conf.list_spide_pages
     self.__timewait = conf.timewait
コード例 #31
0
def create_tables():
    """ create tables in the PostgreSQL database"""
    commands = ("""
          CREATE TABLE log(
                id SERIAL PRIMARY KEY,
                ts TIMESTAMP NOT NULL,
                phrase VARCHAR (128) NOT NULL,
                letters VARCHAR(32) NOT NULL,
                ip VARCHAR(16) NOT NULL,
                browser_string VARCHAR(256) NOT NULL,
                results VARCHAR(64) NOT NULL)""", """
         CREATE TABLE vendors (
                vendor_id SERIAL PRIMARY KEY,
                vendor_name VARCHAR(255) NOT NULL
                )
        """, """ 
        CREATE TABLE parts (
                part_id SERIAL PRIMARY KEY,
                part_name VARCHAR(255) NOT NULL
                )
        """, """
        CREATE TABLE part_drawings (
                part_id INTEGER PRIMARY KEY,
                file_extension VARCHAR(5) NOT NULL,
                drawing_data BYTEA NOT NULL,
                FOREIGN KEY (part_id)
                REFERENCES parts (part_id)
                ON UPDATE CASCADE ON DELETE CASCADE
        )
        """, """
        CREATE TABLE vendor_parts (
                vendor_id INTEGER NOT NULL,
                part_id INTEGER NOT NULL,
                PRIMARY KEY (vendor_id , part_id),
                FOREIGN KEY (vendor_id)
                    REFERENCES vendors (vendor_id)
                    ON UPDATE CASCADE ON DELETE CASCADE,
                FOREIGN KEY (part_id)
                    REFERENCES parts (part_id)
                    ON UPDATE CASCADE ON DELETE CASCADE
        )
        """)
    conn = None
    try:
        # read the connection parameters
        params = config()
        # connect to the PostgreSQL server
        conn = psycopg2.connect(**params)
        cur = conn.cursor()
        # create table one by one
        for command in commands:
            cur.execute(command)
        # close communication with the PostgreSQL database server
        cur.close()
        # commit the changes
        conn.commit()
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()
コード例 #32
0
ファイル: trafficfilter.py プロジェクト: kuteng01/xmrmouse
    def getTraffic(self):
        #print ('into getTraffic\n')
        #self.TrafficTimer()
        #item = [src_tag, sp_tag, dst_tag, dp_tag, self.ticketCnt, iptablesObj]
        for ptime, pktdata in self.cap:
            #print "for\n"
            pkt = dpkt.ethernet.Ethernet(pktdata)
            if pkt.data.data.__class__.__name__ <> 'TCP':
                continue
            ipsrc_tag = 0
            ipdst_tag = 0
            sport_tag = 0
            dport_tag = 0

            ipdata = pkt.data
            sip = '%d.%d.%d.%d' % tuple(map(ord, list(ipdata.src)))
            dip = '%d.%d.%d.%d' % tuple(map(ord, list(ipdata.dst)))

            tcpdata = pkt.data.data
            sport = tcpdata.sport
            dport = tcpdata.dport

            content = self.findContent

            src_tag = sip
            dst_tag = dip
            sp_tag = str(sport)
            dp_tag = str(dport)
            #if ord(list(ipdata.src)[0]) > ord(list(ipdata.dst)[0]):
                #temp = dst_tag
                #dst_tag = src_tag
                #src_tag = temp
            dowell = 0
            ip = get_host_ip()
            if ip != None:
                if src_tag != ip:
                    temp=dst_tag
                    dst_tag=src_tag
                    src_tag=temp

                    temp=sp_tag
                    sp_tag=dp_tag
                    dp_tag=temp
                dowell = 1
            if dowell != 1:
                if sport < dport:
                    temp = dst_tag
                    dst_tag = src_tag
                    src_tag = temp

                    temp = sp_tag
                    sp_tag = dp_tag
                    dp_tag = temp

            #name = src_tag + '_' + sp_tag + '_' + dst_tag + '_' + dp_tag
            name=dst_tag + '_' + dp_tag

            cg=config.config()
            if dst_tag in cg.ip_list and dp_tag == cg.dport:
                continue
            #print("%s\n" %name)
            if (name) in self.files4out:
                 item = self.files4out[name]
                 item[4] = self.timerout
                 #print(name,item)
            else:
                appdata = tcpdata.data
                if appdata.find(content) == -1:
                    continue
                #print('new item')

                self.proxy._connect(sp_tag, dst_tag, dp_tag, True)
                iptablesObj = iptablesfilter.iptablesfilter('',dst_tag,dp_tag)
                iptablesObj.setIptables()
                item = [src_tag, sp_tag, dst_tag, dp_tag, self.ticketCnt , iptablesObj]
                self.files4out[name] = item

                kill_other(sp_tag)
コード例 #33
0
ファイル: apiify.py プロジェクト: sgnls/apiify
import expiring_cache
import datetime
import threading
import socketserver
import time
import urllib
import re
import json
import config
import subprocess
import functools
import resource
import pathlib
import code

config = config.config("apiify.yaml")
cacheable = lambda _:True
if config.get('no_caching_this_error'):
    cacheable = lambda x:config['no_caching_this_error'].encode() not in x.lower()


@expiring_cache.expiring_cache(maxsize=config['cached_max_items'], cacheable=cacheable, hours_to_live=config['item_hours_in_cache'])
def exec_command(arguments):
    try:
        if config['block_command_injection']:
            cli = config['base_command'].split()+ arguments.split()
            ph = subprocess.Popen(cli, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin= subprocess.PIPE)
        else:
            cli = config['base_command']+ " " + arguments
            ph = subprocess.Popen(cli, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin= subprocess.PIPE)
        out,err = ph.communicate()
コード例 #34
0
ファイル: tasker.py プロジェクト: Thomaxius/faceitapp
import main
import asyncio
import db_endpoints as db
from config import config
import logger
import traceback

log = logger.get("TASKER")

faceit_config = config(section="faceit")['faceit']
CHECK_INTERVAL = int(faceit_config['check_interval_s'])


async def tasker():
    log.info("Starting tasker..")
    while True:
        log.info("Doing tasks..")
        try:
            records = await db.get_all_player_guids()
            for record in records:
                for task in [
                        main.check_for_elo_change,
                        main.check_and_handle_nickname_change,
                        main.check_for_new_matches
                ]:
                    try:
                        await task(record['player_guid'])
                    except Exception as e:
                        log.error(
                            "Error doing task %s, error: %s, trackeback: %s" %
                            (task, e, traceback.format_exc()))
コード例 #35
0
ファイル: config.py プロジェクト: ebilling/pi-pool-control
    def get(self, name):
        element = self._config
        names = name.split('.')
        for word in names:
            if word in element:
                element = element[word]
            else:
                return None
        if isinstance(element, dict):
            return element
        return str(element)

    def _readConfig(self, filename):
        self._config = {}
        f = open(filename, "r")
        self._config = json.load(f)
        f.close()


if __name__ == "__main__":

    import config

    conf = config.config("config.json")
    capacitance = conf.get("capacitance.gpio.24")

    print "Whole Config:\n" + str(conf._config)

    print "\n\nCapacitance:\n" + capacitance

コード例 #36
0
ファイル: pg.py プロジェクト: jamesszm/mlsh-adv
        self.logger.info(msg)
        return avg_reward

    def record(self):
        env = gym.make(self.config.env_name)
        env = gym.wrappers.Monitor(
            env,
            self.config.record_path,
            video_callable=lambda x: True,
            resume=True)
        self.evaluate(env, 1)

    def run(self):
        self.initialize()
        if self.config.record:
            self.record()
        self.train()
        if self.config.record:
            self.record()

    def set_seed(self, seed=None):
        self.seed = seed


if __name__ == "__main__":
    print(envs)
    config = config('VanillaPolicyGradient')
    env = gym.make(config.env_name)
    model = PolicyGradient(env, config)
    model.run()
コード例 #37
0
import serial
import matplotlib.pyplot as plt
import numpy as np
import time
import datetime
import os
import zmq
import config

config = config.config()
(LOC_MIC1, LOC_MIC2, LOC_MIC3, LOC_MIC4) = config.getMicLocs()
SAMPLING_RATE = config.getSamplingRate()
DISTANCE_TEST = config.getTestDistance()
SPEED_SOUND = config.getSpeedSound()


def distanceBetween(l1, l2):
    a1 = np.array(l1)
    a2 = np.array(l2)
    r = a1 - a2
    return np.sqrt(r.dot(r))


def getDataAtTime(t):
    t = 0
    LOC_TEST = (DISTANCE_TEST * np.cos(2 * np.pi * 1.0 / 100 * t),
                DISTANCE_TEST * np.sin(2 * np.pi * 1.0 / 100 * t), 0)
    print LOC_TEST[0], LOC_TEST[1], np.sqrt(LOC_TEST[0]**2 +
                                            LOC_TEST[1]**2), np.arctan(
                                                LOC_TEST[1] / LOC_TEST[0])
コード例 #38
0
    def eliminarAlbum(self):
        conexion=None
        try:
            params = config()

            #print(params)
            # Conexion al servidor de PostgreSQL
            #print('Conectando a la base de datos PostgreSQL...')
            conexion = psycopg2.connect(**params)

            # creación del cursor
            cur = conexion.cursor()

            # Ejecución la consulta para obtener la conexión
            print('La version de PostgreSQL es la:')
            cur.execute('SELECT version()')

            # Se obtienen los resultados
            db_version = cur.fetchone()
            nombre=self.nombreInput.text()
            id=self.id
            if nombre != '':
                #Se verifica que exista ese album
                cur.execute("SELECT album.albumid FROM album WHERE album.title = '{0}'".format(nombre))
                IDAlbum=cur.fetchall()
                if(len(IDAlbum)!=0):
                    #Si si existe se obtine el ID y se borra 
                    IDoficial=(IDAlbum[0][0])
                    cur.execute("DELETE FROM creador_track WHERE creador_track.trackid IN (SELECT track.trackid FROM track WHERE track.albumid = %s)",(IDoficial,))
                    cur.execute("DELETE FROM playlisttrack WHERE playlisttrack.trackid IN (SELECT track.trackid FROM track WHERE track.albumid = %s)",(IDoficial,))
                    cur.execute("DELETE FROM invoiceline WHERE invoiceline.trackid IN (SELECT track.trackid FROM track WHERE track.albumid = %s)",(IDoficial,))
                    cur.execute("DELETE FROM actividad_track WHERE actividad_track.trackid IN (SELECT track.trackid FROM track WHERE track.albumid = %s)",(IDoficial,))
                    print ("Ya va a llegar")
                    #cur.execute("SELECT track.name FROM track WHERE track.albumid = '{0}'".format(IDoficial))
                    #tracks=cur.fetchall()
                    #for a in tracks :
                        #print (a[0])
                        #cur.execute("""SELECT add_bitacora(%s::numeric, %s::varchar, 3::numeric, 1::numeric ) """, (id, a[0])) 
                    #print ("Sí llego")
                    cur.execute("""UPDATE track set u_deleted=%s, u_updated=%s WHERE track.albumid = %s""", (id,id, IDoficial))
                    cur.execute("DELETE FROM track WHERE track.albumid = %s",(IDoficial,))
                    cur.execute("""UPDATE album set u_deleted=%s, u_updated=%s WHERE album.albumid = %s""", (id, id, IDoficial))
                    cur.execute("DELETE FROM album WHERE album.title = '{0}'".format(nombre))
                    #cur.execute("""SELECT add_bitacora(%s::numeric, %s::varchar, 3::numeric, 2::numeric )""", (id, nombre))
                    conexion.commit()
                    """cur.execute("SELECT * FROM album ORDER BY album.albumid ASC LIMIT 10")
                    # Recorremos los resultados y los mostramos
                    for a,b,c in cur.fetchall() :
                            print(a,b,c)
                    print("--------------------------------------------------")"""
                    addedSong=QMessageBox()
                    addedSong.setIcon(QMessageBox.Information)
                    addedSong.setWindowTitle("Listo")
                    addedSong.setText("Album eliminado exitosamente")
                    addedSong.exec()
                else:
                    #Sino existe se muestra error
                    blank=QMessageBox()
                    blank.setIcon(QMessageBox.Information)
                    blank.setWindowTitle("ERROR")
                    blank.setText("Ese album no existe en la base de datos")
                    blank.exec()                      
            else:
                blank=QMessageBox()
                blank.setIcon(QMessageBox.Information)
                blank.setWindowTitle("INCOMPLETO")
                blank.setText("Por favor ingresa el nombre del album a borrar")
                blank.exec()
        except (Exception, psycopg2.DatabaseError) as error:
            print(error)
        finally:
            if conexion is not None:
                conexion.close()
コード例 #39
0
ファイル: watcher_xrp.py プロジェクト: kenzaki5/develop
class watcher_xrp:
    config = config()
    API_KEY = config.getApiKey()
    API_SECRET = config.getApiSecret()
    count = int(config.getCount())
    CURRENCY_PAIR = "xrp_jpy"
    sellMean = []
    buyMean = []
    highMean = []
    lowMean = []
    lastMean = []
    sellList = []
    buyList = []
    highList = []
    lowList = []
    lastList = []
    init = 0

    bbservice = bbService(API_KEY, API_SECRET, CURRENCY_PAIR)
    tradeStop = tradeStop()
    slackService = slackService()

    def watch(self):
        res = False
        for num in range(((int(self.count) * 3)) // int(self.count)):
            for num in range(int(self.count)):
                if self.init == 0:
                    self.sellMean = []
                    self.buyMean = []
                    self.highMean = []
                    self.lowMean = []
                    self.lastMean = []
                    self.sellList = []
                    self.buyList = []
                    self.highList = []
                    self.lowList = []
                    self.lastList = []

                ticker = self.getTicker()
                self.sellList.append(float(ticker['sell']))
                self.buyList.append(float(ticker['buy']))
                self.highList.append(float(ticker['high']))
                self.lowList.append(float(ticker['low']))
                self.lastList.append(float(ticker['last']))
                self.init += 1

            self.sellMean.append(mean(self.sellList))
            self.buyMean.append(mean(self.buyList))
            self.highMean.append(mean(self.highList))
            self.lowMean.append(mean(self.lowList))
            self.lastMean.append(mean(self.lastList))

        over0 = self.sellMean[0] > self.buyMean[0]
        print(over0)
        over1 = self.sellMean[1] > self.buyMean[1]
        print(over1)
        over2 = self.sellMean[2] > self.buyMean[2]
        print(over2)
        golden = (self.getTake(over0, over1, over2) == True)
        dead = (self.getTake(over0, over1, over2) == False)
        print("golden {0}".format(golden))
        print("dead {0}".format(dead))

        if golden:
            self.slackService.requestOnSlack("Log : golden {0}".format(golden))
            res = True
        elif dead:
            self.slackService.requestOnSlack("Log : dead {0}".format(dead))
            res = False

        print("finish {0}".format(self.count))
        return res

    def getTicker(self):
        ticker = self.bbservice.getTicker(self.CURRENCY_PAIR)
        sleep(5)
        return ticker

    def getTake(self, a, b, c):
        return (not (a) and b
                and c) or (a and not (b)
                           and c) or (a and b and not (c)) or (a and b and c)

    def calculate_mean(self, data):
        s = sum(data)
        N = len(data)
        mean = s / N

        return mean

    #平均からの偏差を求める
    def find_difference(self, data):
        mean = self.calculate_mean(data)
        diff = []

        for num in data:
            diff.append(num - mean)
        return diff

    def calculate_variance(self, data):
        diff = self.find_difference(data)
        #差の2乗を求める
        squared_diff = []
        for d in diff:
            squared_diff.append(d**2)

        #分散を求める
        sum_squared_diff = sum(squared_diff)
        variance = sum_squared_diff / len(data)
        return variance
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(name)s -   %(message)s',
    datefmt='%m/%d/%Y %H:%M:%S',
    handlers=[
        logging.FileHandler(logfile),
        logging.StreamHandler(sys.stdout)
    ])

logger = logging.getLogger()



import config
rc_list=json.loads(config.config(section='rediscluster')['rediscluster'])


rediscluster_client = RedisCluster(startup_nodes=rc_list, decode_responses=True)

import os
from concurrent.futures import ThreadPoolExecutor, as_completed
n_cpus = os.cpu_count()
logger.info(f'Number of CPUs: {n_cpus}')
executor = ThreadPoolExecutor(max_workers=n_cpus)

cwd=Path.cwd()
datapath=cwd.joinpath('./data/')
print(datapath)

import argparse
コード例 #41
0
            loss = criterion(outputs, targets)
            decoder.zero_grad()
            encoder.zero_grad()
            loss.backward()
            optimizer.step()

            # Print log info
            if i % args.log_step == 0:
                print('Epoch [{}/{}], Step [{}/{}], '
                      'Loss: {:.4f}, Perplexity: {:5.4f}'.format(
                          epoch, args.num_epochs, i, total_step, loss.item(),
                          np.exp(loss.item())))

            # Save the model checkpoints
            if (i + 1) % args.save_step == 0:
                torch.save(
                    decoder.state_dict(),
                    os.path.join(args.model_path,
                                 'decoder-{}-{}.ckpt'.format(epoch + 1,
                                                             i + 1)))
                torch.save(
                    encoder.state_dict(),
                    os.path.join(args.model_path,
                                 'encoder-{}-{}.ckpt'.format(epoch + 1,
                                                             i + 1)))


if __name__ == '__main__':
    args = config()
    main(args)
コード例 #42
0
ファイル: pierc.py プロジェクト: tlan/pierc
                connection.privmsg(self.channel, "Goodbye.")
                self.on_ping(connection, event)
                sys.exit(0)

            if text.split(" ")[1] and text.split(" ")[1] == "ping":
                self.on_ping(connection, event)
                return


def main():
    mysql_settings = config.config("mysql_config.txt")
    irc_settings = config.config("irc_config.txt")

    c = Logger(irc_settings["server"], int(irc_settings["port"]),
               irc_settings["channel"],
               irc_settings["nick"], mysql_settings["server"],
               int(mysql_settings["port"]), mysql_settings["database"],
               mysql_settings["user"], mysql_settings["password"])
    c.start()


if __name__ == "__main__":
    irc_settings = config.config("irc_config.txt")
    reconnect_interval = irc_settings["reconnect"]
    while True:
        try:
            main()
        except irclib.ServerNotConnectedError:
            print "Server Not Connected! Let's try again!"
        time.sleep(float(reconnect_interval))
コード例 #43
0
ファイル: post.py プロジェクト: PythoNove/api_service_python
__author__ = "Inove Coding School"
__email__ = "*****@*****.**"
__version__ = "1.0"

import os
import requests

from config import config

# Obtener la path de ejecución actual del script
script_path = os.path.dirname(os.path.realpath(__file__))

# Obtener los parámetros del archivo de configuración
config_path_name = os.path.join(script_path, 'config.ini')
server = config('server', config_path_name)

ip = server['host']
port = server['port']
endpoint = 'registro'

url = f'http://{ip}:{port}/{endpoint}'

if __name__ == "__main__":
    try:
        name = str(input('Ingrese el nombre de la persona:'))
        age = int(input('Ingrese la edad:'))
        nationality = str(input('Ingrese la nacionalidad:'))
        post_data = {"name": name, "age": age, "nationality": nationality}
        x = requests.post(url, data=post_data)
        print('POST enviado a:', url)
コード例 #44
0
				type    VARCHAR(10),
				hidden  CHAR(1)
			) engine = InnoDB;
			
			""")

    def insert_line(self, channel, name, time, message, msgtype, hidden="F"):
        """
		Sample line: "sfucsss, danly, 12:33-09/11/2009, I love hats, normal, 0"
		"""
        query = "INSERT INTO main (channel, name, time, message, type, hidden) VALUES" + \
        "(\""+self.conn.escape_string(channel)+ "\"," + \
        "\""+self.conn.escape_string(name)+"\"," + \
        "\""+time+"\"," + \
        "\""+self.conn.escape_string(message)+"\"," + \
        "\""+self.conn.escape_string(msgtype)+"\"," + \
        "\""+self.conn.escape_string(hidden)+"\")"

        self.cursor.execute(query)

    def commit(self):
        self.conn.commit()


if __name__ == "__main__":
    mysql_config = config.config("mysql_config.txt")
    db = Pierc_DB(mysql_config["server"], int(mysql_config["port"]),
                  mysql_config["database"], mysql_config["user"],
                  mysql_config["password"])
    db.create_table()
コード例 #45
0
ファイル: ta_features.py プロジェクト: WeiyuLee/TB-ETF
def ta_preprocess(member_ID, Date, org_data):

    print("========== TA Preprocess Start ==========")

    feature_list = []
    TA_conf = conf.config('feature_conf').config['TA']
    if TA_conf["enable"] is False:

        print("TA features are disabled.")

    else:
        ID_pbar = tqdm(range(len(member_ID)))
        for ID_idx in ID_pbar:
            curr_ID_data = org_data.loc[member_ID[ID_idx]]

            curr_high_price_seq = []
            curr_low_price_seq = []
            curr_close_price_seq = []
            curr_trade_price_seq = []

            for Date_idx in range(len(Date)):
                try:
                    curr_high_price = curr_ID_data[Date[Date_idx]][1]
                    curr_low_price = curr_ID_data[Date[Date_idx]][2]
                    curr_close_price = curr_ID_data[Date[Date_idx]][3]
                    curr_trade_price = curr_ID_data[Date[Date_idx]][4]
                except:
                    curr_high_price = float(np.NAN)
                    curr_low_price = float(np.NAN)
                    curr_close_price = float(np.NAN)
                    curr_trade_price = float(np.NAN)

                curr_high_price_seq.append(curr_high_price)
                curr_low_price_seq.append(curr_low_price)
                curr_close_price_seq.append(curr_close_price)
                curr_trade_price_seq.append(curr_trade_price)

            curr_high_price_seq = np.array(curr_high_price_seq)
            curr_low_price_seq = np.array(curr_low_price_seq)
            curr_close_price_seq = np.array(curr_close_price_seq)
            curr_trade_price_seq = np.array(curr_trade_price_seq)

            ########################### Overlap Studies ###########################

            # Moving Average
            MA_conf = conf.config('feature_conf').config['MA']
            if MA_conf["enable"] is True:
                MA_seqs, MA_feature_list = ta_MA(MA_conf, curr_close_price_seq)
                if ID_idx == 0: feature_list.extend(MA_feature_list)

            # Hilbert Transform - Instantaneous Trendline
            HT_TRENDLINE_conf = conf.config(
                'feature_conf').config['HT_TRENDLINE']
            if HT_TRENDLINE_conf["enable"] is True:
                HT_TRENDLINE_seqs, HT_TRENDLINE_feature_list = ta_HT_TRENDLINE(
                    HT_TRENDLINE_conf, curr_close_price_seq)
                if ID_idx == 0: feature_list.extend(HT_TRENDLINE_feature_list)

            # MidPoint over period
            MIDPOINT_conf = conf.config('feature_conf').config['MIDPOINT']
            if MIDPOINT_conf["enable"] is True:
                MIDPOINT_seqs, MIDPOINT_feature_list = ta_MIDPOINT(
                    MIDPOINT_conf, curr_close_price_seq)
                if ID_idx == 0: feature_list.extend(MIDPOINT_feature_list)

            # Midpoint Price over period
            MIDPRICE_conf = conf.config('feature_conf').config['MIDPRICE']
            if MIDPRICE_conf["enable"] is True:
                MIDPRICE_seqs, MIDPRICE_feature_list = ta_MIDPRICE(
                    MIDPRICE_conf, curr_high_price_seq, curr_low_price_seq)
                if ID_idx == 0: feature_list.extend(MIDPRICE_feature_list)

            ######################### Momentum Indicators #########################

            # Commodity Channel Index
            CCI_conf = conf.config('feature_conf').config['CCI']
            if CCI_conf["enable"] is True:
                CCI_seqs, CCI_feature_list = ta_CCI(CCI_conf,
                                                    curr_high_price_seq,
                                                    curr_low_price_seq,
                                                    curr_close_price_seq)
                if ID_idx == 0: feature_list.extend(CCI_feature_list)

            # Moving Average Convergence/Divergence
            MACD_conf = conf.config('feature_conf').config['MACD']
            if MACD_conf["enable"] is True:
                MACD_seqs, MACD_feature_list = ta_MACD(MACD_conf,
                                                       curr_close_price_seq)
                if ID_idx == 0: feature_list.extend(MACD_feature_list)

            # Relative Strength Index
            RSI_conf = conf.config('feature_conf').config['RSI']
            if RSI_conf["enable"] is True:
                RSI_seqs, RSI_feature_list = ta_RSI(RSI_conf,
                                                    curr_close_price_seq)
                if ID_idx == 0: feature_list.extend(RSI_feature_list)

            # Stochastic (STOCH) KDJ
            KDJ_conf = conf.config('feature_conf').config['KDJ']
            if KDJ_conf["enable"] is True:
                KDJ_seqs, KDJ_feature_list = ta_KDJ(KDJ_conf,
                                                    curr_high_price_seq,
                                                    curr_low_price_seq,
                                                    curr_close_price_seq)
                if ID_idx == 0: feature_list.extend(KDJ_feature_list)

            ########################## Volume Indicators ##########################

            # Chaikin A/D Line
            AD_conf = conf.config('feature_conf').config['AD']
            if AD_conf["enable"] is True:
                AD_seqs, AD_feature_list = ta_AD(AD_conf, curr_high_price_seq,
                                                 curr_low_price_seq,
                                                 curr_close_price_seq,
                                                 curr_trade_price_seq)
                if ID_idx == 0: feature_list.extend(AD_feature_list)

            # Chaikin A/D Oscillator
            ADOSC_conf = conf.config('feature_conf').config['ADOSC']
            if ADOSC_conf["enable"] is True:
                ADOSC_seqs, ADOSC_feature_list = ta_ADOSC(
                    ADOSC_conf, curr_high_price_seq, curr_low_price_seq,
                    curr_close_price_seq, curr_trade_price_seq)
                if ID_idx == 0: feature_list.extend(ADOSC_feature_list)

            # On Balance Volume
            OBV_conf = conf.config('feature_conf').config['OBV']
            if OBV_conf["enable"] is True:
                OBV_seqs, OBV_feature_list = ta_OBV(OBV_conf,
                                                    curr_close_price_seq,
                                                    curr_trade_price_seq)
                if ID_idx == 0: feature_list.extend(OBV_feature_list)

            ########################### Cycle Indicators ##########################

            # Hilbert Transform - Dominant Cycle Period
            HT_DCPERIOD_conf = conf.config(
                'feature_conf').config['HT_DCPERIOD']
            if HT_DCPERIOD_conf["enable"] is True:
                HT_DCPERIOD_seqs, HT_DCPERIOD_feature_list = ta_HT_DCPERIOD(
                    HT_DCPERIOD_conf, curr_close_price_seq)
                if ID_idx == 0: feature_list.extend(HT_DCPERIOD_feature_list)

            # Hilbert Transform - Dominant Cycle Period
            HT_DCPHASE_conf = conf.config('feature_conf').config['HT_DCPHASE']
            if HT_DCPHASE_conf["enable"] is True:
                HT_DCPHASE_seqs, HT_DCPHASE_feature_list = ta_HT_DCPHASE(
                    HT_DCPHASE_conf, curr_close_price_seq)
                if ID_idx == 0: feature_list.extend(HT_DCPHASE_feature_list)

            ######################## Volatility Indicators ########################

            # Average True Range
            ATR_conf = conf.config('feature_conf').config['ATR']
            if ATR_conf["enable"] is True:
                ATR_seqs, ATR_feature_list = ta_ATR(ATR_conf,
                                                    curr_high_price_seq,
                                                    curr_low_price_seq,
                                                    curr_close_price_seq)
                if ID_idx == 0: feature_list.extend(ATR_feature_list)

            # Normalized Average True Range
            NATR_conf = conf.config('feature_conf').config['NATR']
            if NATR_conf["enable"] is True:
                NATR_seqs, NATR_feature_list = ta_NATR(NATR_conf,
                                                       curr_high_price_seq,
                                                       curr_low_price_seq,
                                                       curr_close_price_seq)
                if ID_idx == 0: feature_list.extend(NATR_feature_list)

            # Append the features to the original DataFrame
            for Date_idx in range(len(Date)):
                temp_ta_features = []

                if MA_conf["enable"] is True:
                    for i in range(len(MA_feature_list)):
                        temp_ta_features.append(MA_seqs[i][Date_idx])

                if HT_TRENDLINE_conf["enable"] is True:
                    temp_ta_features.append(HT_TRENDLINE_seqs[Date_idx])

                if MIDPOINT_conf["enable"] is True:
                    for i in range(len(MIDPOINT_conf["period"])):
                        temp_ta_features.append(MIDPOINT_seqs[i][Date_idx])

                if MIDPRICE_conf["enable"] is True:
                    for i in range(len(MIDPRICE_conf["period"])):
                        temp_ta_features.append(MIDPRICE_seqs[i][Date_idx])

                if CCI_conf["enable"] is True:
                    for i in range(len(CCI_conf["period"])):
                        temp_ta_features.append(CCI_seqs[i][Date_idx])

                if MACD_conf["enable"] is True:
                    for i in range(len(MACD_conf["period"])):
                        temp_ta_features.append(MACD_seqs[i][0][Date_idx])
                        temp_ta_features.append(MACD_seqs[i][1][Date_idx])
                        temp_ta_features.append(MACD_seqs[i][2][Date_idx])

                if RSI_conf["enable"] is True:
                    for i in range(len(RSI_conf["period"])):
                        temp_ta_features.append(RSI_seqs[i][Date_idx])

                if KDJ_conf["enable"] is True:
                    for i in range(len(KDJ_conf["period"])):
                        temp_ta_features.append(KDJ_seqs[i][0][Date_idx])
                        temp_ta_features.append(KDJ_seqs[i][1][Date_idx])

                if AD_conf["enable"] is True:
                    temp_ta_features.append(AD_seqs[Date_idx])

                if ADOSC_conf["enable"] is True:
                    for i in range(len(ADOSC_conf["period"])):
                        temp_ta_features.append(ADOSC_seqs[i][Date_idx])

                if OBV_conf["enable"] is True:
                    temp_ta_features.append(OBV_seqs[Date_idx])

                if HT_DCPERIOD_conf["enable"] is True:
                    temp_ta_features.append(HT_DCPERIOD_seqs[Date_idx])

                if HT_DCPHASE_conf["enable"] is True:
                    temp_ta_features.append(HT_DCPHASE_seqs[Date_idx])

                if ATR_conf["enable"] is True:
                    for i in range(len(ATR_conf["period"])):
                        temp_ta_features.append(ATR_seqs[i][Date_idx])

                if NATR_conf["enable"] is True:
                    for i in range(len(NATR_conf["period"])):
                        temp_ta_features.append(NATR_seqs[i][Date_idx])

                try:
                    org_data[Date[Date_idx]].loc[member_ID[ID_idx]].extend(
                        temp_ta_features)
                except:
                    org_data[Date[Date_idx]].loc[member_ID[ID_idx]] = [
                        org_data[Date[Date_idx]].loc[member_ID[ID_idx]]
                    ]
                    org_data[Date[Date_idx]].loc[member_ID[ID_idx]].extend(
                        temp_ta_features)

            ID_pbar.set_description("Process: {}/{}".format(
                ID_idx + 1, len(member_ID)))

    print("========== TA Preprocess Done! ==========")

    return org_data, feature_list
コード例 #46
0
def create_tables():
    """ create tables in the PostgreSQL database"""
    commands = (
        """
        CREATE TABLE app_user (
            user_id SERIAL PRIMARY KEY,
            city TEXT NOT NULL,
            favourite_music_type TEXT DEFAULT NULL,
            isTheaterFan BOOL NOT NULL,
            isCinemaFan BOOL NOT NULL
        )
        """,
        """ CREATE TABLE auth (
                auth_id SERIAL PRIMARY KEY,
                email_address TEXT NOT NULL UNIQUE,
                password TEXT NOT NULL,
                isOrganizer BOOL NOT NULL,
                isAdmin BOOL NOT NULL
                )
        """,
        """
        CREATE TABLE organizer (
                organizer_id SERIAL PRIMARY KEY,
                event_type TEXT DEFAULT NULL
                )
        """,
        """
        CREATE TABLE city (
                city_id SERIAL PRIMARY KEY,
                city TEXT NOT NULL,
                country TEXT NOT NULL
                )
        """,
        """
        CREATE TABLE place (
                place_id SERIAL PRIMARY KEY,
                place TEXT NOT NULL,
                city_id INTEGER REFERENCES city
                )
        """,
        """
        CREATE TABLE comment (
                comment_id SERIAL PRIMARY KEY,
                event_comment_id TEXT NOT NULL,
                comment_date TIMESTAMP NOT NULL,
                comment TEXT NOT NULL
                )
        """,
        """
        CREATE TABLE event (
                event_id SERIAL PRIMARY KEY,
                title TEXT NOT NULL,
                event_type TEXT NOT NULL,
                description TEXT NOT NULL,
                date TIMESTAMP NOT NULL,
                place_id INTEGER REFERENCES place, 
                price FLOAT8 NOT NULL,
                event_comment_id INTEGER REFERENCES comment,
                image_link TEXT DEFAULT NULL
               )
        """,
        """
        CREATE TABLE event_ticket (
                ticket_id SERIAL PRIMARY KEY,
                price float8 NOT NULL,
                event_id INTEGER REFERENCES event,
                amount INTEGER NOT NULL
                )
        """,
        """
        CREATE TABLE id_table (
                user_id SERIAL PRIMARY KEY,
                auth_id INTEGER REFERENCES auth,
                organizer_id INTEGER REFERENCES organizer
                )
        """,
        """
        CREATE TABLE payment (
                payment_id SERIAL PRIMARY KEY,
                user_id INTEGER REFERENCES app_user,
                email TEXT NOT NULL,
                name TEXT NOT NULL,
                phone_number TEXT NOT NULL,
                event_id INTEGER REFERENCES event, 
                shipping_address TEXT DEFAULT NULL
               )
        """,
        """
        CREATE TABLE basket (
                basket_id SERIAL PRIMARY KEY,
                user_id INTEGER REFERENCES app_user,
                event_id INTEGER REFERENCES event, 
                price FLOAT8 NOT NULL,
                quantity TEXT NOT NULL
                )
        """,
        """
        CREATE TABLE public.session_tokens(
            user_id INTEGER REFERENCES app_user,
            token character varying(256) COLLATE pg_catalog."default" NOT NULL
        )
        """,
    )
    conn = None
    try:
        # read the connection parameters
        params = config()
        # connect to the PostgreSQL server
        conn = psycopg2.connect(**params)
        cur = conn.cursor()
        # create table one by one
        for command in commands:
            cur.execute(command)
        # close communication with the PostgreSQL database server
        cur.close()
        # commit the changes
        conn.commit()
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()
コード例 #47
0
ファイル: model_final.py プロジェクト: dodo2code/NAACL18
# -*- coding: utf-8 -*-

import os
os.environ["CUDA_VISIBLE_DEVICES"] = '0'
'''
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
'''

import tensorflow as tf
import numpy as np
import config
conf = config.config()

#data preparation
import data_process

train_doc2id,   train_query2id,   train_summ2id,  \
train_doc_mask, train_query_mask, train_sum_mask, \
train_doc_len,  train_que_len,    train_sum_len,  \
train_sent_seg, train_seg_mask, \
_, _= data_process.train_data

test_doc2id,   test_query2id,   test_summ2id,  \
test_doc_mask, test_query_mask, test_sum_mask, \
test_doc_len,  test_que_len,    test_sum_len,  \
test_sent_seg, test_seg_mask, \
_, _= data_process.test_data

コード例 #48
0
    def SetupPackage(self, get):
        name = get.dname
        site_name = get.site_name
        php_version = get.php_version
        #取基础信息
        find = public.M('sites').where(
            'name=?', (site_name, )).field('id,path,name').find()
        path = find['path']
        if path.replace('//', '/') == '/':
            return public.returnMsg(False, 'Dangerous website root directory!')

        #获取包信息
        pinfo = self.GetPackageInfo(name)
        id = pinfo['id']
        if not pinfo:
            return public.returnMsg(False,
                                    'The specified package does not exist.!')

        #检查本地包
        self.WriteLogs(
            json.dumps({
                'name': 'Verifying package...',
                'total': 0,
                'used': 0,
                'pre': 0,
                'speed': 0
            }))
        pack_path = self.__panelPath + '/package'
        if not os.path.exists(pack_path): os.makedirs(pack_path, 384)
        packageZip = pack_path + '/' + name + '.zip'
        isDownload = False
        if os.path.exists(packageZip):
            md5str = self.GetFileMd5(packageZip)
            if md5str != pinfo['versions'][0]['md5']: isDownload = True
        else:
            isDownload = True

        #下载文件
        if isDownload:
            self.WriteLogs(
                json.dumps({
                    'name': 'Downloading file ...',
                    'total': 0,
                    'used': 0,
                    'pre': 0,
                    'speed': 0
                }))
            if pinfo['versions'][0]['download']:
                self.DownloadFile(
                    'http://www.bt.cn/api/Pluginother/get_file?fname=' +
                    pinfo['versions'][0]['download'], packageZip)

        if not os.path.exists(packageZip):
            return public.returnMsg(False,
                                    'File download failed!' + packageZip)

        pinfo = self.set_temp_file(packageZip, path)
        if not pinfo:
            return public.returnMsg(
                False,
                'Cannot find [aaPanel Auto Deployment Configuration File] in the installation package'
            )

        #设置权限
        self.WriteLogs(
            json.dumps({
                'name': 'Setting permissions',
                'total': 0,
                'used': 0,
                'pre': 0,
                'speed': 0
            }))
        os.system('chmod -R 755 ' + path)
        os.system('chown -R www.www ' + path)
        if pinfo['chmod']:
            for chm in pinfo['chmod']:
                os.system('chmod -R ' + str(chm['mode']) + ' ' +
                          (path + '/' + chm['path']).replace('//', '/'))

        #安装PHP扩展
        self.WriteLogs(
            json.dumps({
                'name': 'Install the necessary PHP extensions',
                'total': 0,
                'used': 0,
                'pre': 0,
                'speed': 0
            }))
        import files
        mfile = files.files()
        if type(pinfo['php_ext']) == str:
            pinfo['php_ext'] = pinfo['php_ext'].strip().split(',')
        for ext in pinfo['php_ext']:
            if ext == 'pathinfo':
                import config
                con = config.config()
                get.version = php_version
                get.type = 'on'
                con.setPathInfo(get)
            else:
                get.name = ext
                get.version = php_version
                get.type = '1'
                mfile.InstallSoft(get)

        #解禁PHP函数
        if 'enable_functions' in pinfo:
            try:
                if type(pinfo['enable_functions']) == str:
                    pinfo['enable_functions'] = pinfo[
                        'enable_functions'].strip().split(',')
                php_f = public.GetConfigValue(
                    'setup_path') + '/php/' + php_version + '/etc/php.ini'
                php_c = public.readFile(php_f)
                rep = "disable_functions\s*=\s{0,1}(.*)\n"
                tmp = re.search(rep, php_c).groups()
                disable_functions = tmp[0].split(',')
                for fun in pinfo['enable_functions']:
                    fun = fun.strip()
                    if fun in disable_functions: disable_functions.remove(fun)
                disable_functions = ','.join(disable_functions)
                php_c = re.sub(
                    rep, 'disable_functions = ' + disable_functions + "\n",
                    php_c)
                public.writeFile(php_f, php_c)
                public.phpReload(php_version)
            except:
                pass

        #执行额外shell进行依赖安装
        self.WriteLogs(
            json.dumps({
                'name': 'Execute extra SHELL',
                'total': 0,
                'used': 0,
                'pre': 0,
                'speed': 0
            }))
        if os.path.exists(path + '/install.sh'):
            os.system('cd ' + path + ' && bash ' + 'install.sh ' +
                      find['name'] + " &> install.log")
            os.system('rm -f ' + path + '/install.sh')

        #是否执行Composer
        if os.path.exists(path + '/composer.json'):
            self.WriteLogs(
                json.dumps({
                    'name': 'Execute Composer',
                    'total': 0,
                    'used': 0,
                    'pre': 0,
                    'speed': 0
                }))
            if not os.path.exists(path + '/composer.lock'):
                execPHP = '/www/server/php/' + php_version + '/bin/php'
                if execPHP:
                    if public.get_url().find('125.88'):
                        os.system(
                            'cd ' + path + ' && ' + execPHP +
                            ' /usr/bin/composer config repo.packagist composer https://packagist.phpcomposer.com'
                        )
                    import panelSite
                    phpini = '/www/server/php/' + php_version + '/etc/php.ini'
                    phpiniConf = public.readFile(phpini)
                    phpiniConf = phpiniConf.replace(
                        'proc_open,proc_get_status,', '')
                    public.writeFile(phpini, phpiniConf)
                    os.system(
                        'nohup cd ' + path + ' && ' + execPHP +
                        ' /usr/bin/composer install -vvv > /tmp/composer.log 2>&1 &'
                    )

        #写伪静态
        self.WriteLogs(
            json.dumps({
                'name': 'Set URL rewrite',
                'total': 0,
                'used': 0,
                'pre': 0,
                'speed': 0
            }))
        swfile = path + '/nginx.rewrite'
        if os.path.exists(swfile):
            rewriteConf = public.readFile(swfile)
            dwfile = self.__panelPath + '/vhost/rewrite/' + site_name + '.conf'
            public.writeFile(dwfile, rewriteConf)

        #删除伪静态文件
        public.ExecShell("rm -f " + path + '/*.rewrite')

        #删除多余文件
        rm_file = path + '/index.html'
        if os.path.exists(rm_file):
            rm_file_body = public.readFile(rm_file)
            if rm_file_body.find('panel-heading') != -1: os.remove(rm_file)

        #设置运行目录
        self.WriteLogs(
            json.dumps({
                'name': 'Set the run directory',
                'total': 0,
                'used': 0,
                'pre': 0,
                'speed': 0
            }))
        if pinfo['run_path'] != '/':
            import panelSite
            siteObj = panelSite.panelSite()
            mobj = obj()
            mobj.id = find['id']
            mobj.runPath = pinfo['run_path']
            siteObj.SetSiteRunPath(mobj)

        #导入数据
        self.WriteLogs(
            json.dumps({
                'name': 'Import database',
                'total': 0,
                'used': 0,
                'pre': 0,
                'speed': 0
            }))
        if os.path.exists(path + '/import.sql'):
            databaseInfo = public.M('databases').where(
                'pid=?', (find['id'], )).field('username,password').find()
            if databaseInfo:
                os.system('/www/server/mysql/bin/mysql -u' +
                          databaseInfo['username'] + ' -p' +
                          databaseInfo['password'] + ' ' +
                          databaseInfo['username'] + ' < ' + path +
                          '/import.sql')
                os.system('rm -f ' + path + '/import.sql')
                siteConfigFile = (path + '/' + pinfo['db_config']).replace(
                    '//', '/')
                if os.path.exists(siteConfigFile):
                    siteConfig = public.readFile(siteConfigFile)
                    siteConfig = siteConfig.replace('BT_DB_USERNAME',
                                                    databaseInfo['username'])
                    siteConfig = siteConfig.replace('BT_DB_PASSWORD',
                                                    databaseInfo['password'])
                    siteConfig = siteConfig.replace('BT_DB_NAME',
                                                    databaseInfo['username'])
                    public.writeFile(siteConfigFile, siteConfig)

        #清理文件和目录
        self.WriteLogs(
            json.dumps({
                'name': '清理多余的文件',
                'total': 0,
                'used': 0,
                'pre': 0,
                'speed': 0
            }))
        if type(pinfo['remove_file']) == str:
            pinfo['remove_file'] = pinfo['remove_file'].strip().split(',')
        for f_path in pinfo['remove_file']:
            filename = (path + '/' + f_path).replace('//', '/')
            if os.path.exists(filename):
                if not os.path.isdir(filename):
                    if f_path.find('.user.ini') != -1:
                        public.ExecShell("chattr -i " + filename)
                    os.remove(filename)
                else:
                    public.ExecShell("rm -rf " + filename)

        public.serviceReload()
        if id: self.depTotal(id)
        self.WriteLogs(
            json.dumps({
                'name': 'Ready to deploy',
                'total': 0,
                'used': 0,
                'pre': 0,
                'speed': 0
            }))
        return public.returnMsg(True, pinfo)
コード例 #49
0
                            if (globalvars.meteo_data.abs_pressure == 0):
                                globalvars.meteo_data.abs_pressure = None
                            if (self.cfg.use_bmp085):
                                sensor.Sensor.ReadBMP085_temp_in(self)

                            globalvars.meteo_data.CalcStatistics()
                            globalvars.meteo_data.LogDataToDB()
                        else:
                            log("Meteo : Error in getting data - status = " +
                                str(globalvars.meteo_data.status))

                        self.error = False

            except IOError, e:
                #raise
                log("ERROR with PCE-FWS20  %s . Will retry ..." % e)
                #            ret,self.model,self.idd,self.bus = self.Detect()
                #            usbdevice = "/dev/bus/usb/%s/%s" % (self.idd , self.bus )
                #            os.system( "./usbreset %s" % (usbdevice) )
                self.__init__(self.cfg)
                self.error = True

if __name__ == '__main__':

    configfile = 'swpi.cfg'

    if not os.path.isfile(configfile):
        "Configuration file not found"
        exit(1)
    cfg = config.config(configfile)
コード例 #50
0
 def __init__(self):
     self.__data = data.data()
     self.__database = database.database()
     self.__config = config.config()
コード例 #51
0
__author__ = "Inove Coding School"
__email__ = "*****@*****.**"
__version__ = "1.1"

import os
import sqlite3
from datetime import datetime, timedelta

from config import config
# Obtener la path de ejecución actual del script
script_path = os.path.dirname(os.path.realpath(__file__))

# Obtener los parámetros del archivo de configuración
config_path_name = os.path.join(script_path, 'config.ini')
db = config('db', config_path_name)


def create_schema():

    # Conectarnos a la base de datos
    # En caso de que no exista el archivo se genera
    # como una base de datos vacia
    conn = sqlite3.connect(db['database'])

    # Crear el cursor para poder ejecutar las querys
    c = conn.cursor()

    # Obtener el path real del archivo de schema
    script_path = os.path.dirname(os.path.realpath(__file__))
    schema_path_name = os.path.join(script_path, db['schema'])
コード例 #52
0
ファイル: app.py プロジェクト: camboya/api_webapp_python
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
import matplotlib.image as mpimg

import persona
from config import config


app = Flask(__name__)

# Obtener la path de ejecución actual del script
script_path = os.path.dirname(os.path.realpath(__file__))

# Obtener los parámetros del archivo de configuración
config_path_name = os.path.join(script_path, 'config.ini')
db = config('db', config_path_name)
server = config('server', config_path_name)

persona.db = db


@app.route("/")
def index():
    try:
        # Imprimir los distintos endopoints disponibles
        result = "<h1>Bienvenido!!</h1>"
        result += "<h2>Endpoints disponibles:</h2>"
        result += "<h3>[GET] /reset --> borrar y crear la base de datos</h3>"
        result += "<h3>[GET] /personas --> mostrar la tabla de personas (el HTML)</h3>"
        result += "<h3>[POST] /personas --> enviar el JSON para completar la tabla</h3>"
        result += "<h3>[GET] /registro --> mostrar el HTML con el formulario de registro de persona</h3>"
コード例 #53
0
ファイル: scan.py プロジェクト: a4a881d4/R7-OCM
def main():
    g = config()
    s = scan(g)
    sp = s.spectrum(800e6, 1000e6)
    print sp
コード例 #54
0
import urlparse
import urllib
import json
import time
import re

import config as c
config = c.config()


def main():
    if config.sendPush:
        from pushbullet import Pushbullet
        pb = Pushbullet(config.pushbullet_access_token)
    iPhoneList = config.iPhoneList
    iPhone = {}
    triedTimes = 0

    try:

        for i in range(0, len(iPhoneList)):
            parsed = urlparse.urlparse(iPhoneList[i]['reserve_url'])
            partNumber = urlparse.parse_qs(parsed.query)['partNumber'][0]
            iPhone[partNumber] = {
                "name": iPhoneList[i]["name"],
                "reserve_url": iPhoneList[i]["reserve_url"],
                "pushed": False,
                "available": False
            }

        print("Started, auto update every 10s, stop using ctrl+c")
コード例 #55
0
ファイル: app.py プロジェクト: zekalarcon/api_service_python
from heart_orm import db
import heart_orm as heart
#import heart as heart  # Puede elegir esta opcion sino quieren usar ORM

from config import config

# Crear el server Flask
app = Flask(__name__)

# Obtener la path de ejecución actual del script
script_path = os.path.dirname(os.path.realpath(__file__))

# Obtener los parámetros del archivo de configuración
config_path_name = os.path.join(script_path, 'config.ini')
db_config = config('db', config_path_name)
server_config = config('server', config_path_name)

# Indicamos al sistema (app) de donde leer la base de datos
app.config["SQLALCHEMY_DATABASE_URI"] = f"sqlite:///{db_config['database']}"
# Asociamos nuestro controlador de la base de datos con la aplicacion
db.init_app(app)


# Ruta que se ingresa por la ULR 127.0.0.1:5000
@app.route("/")
def index():
    try:
        # Imprimir los distintos endopoints disponibles
        result = "<h1>Bienvenido!!</h1>"
        result += "<h2>Endpoints disponibles:</h2>"
コード例 #56
0
def main():

    cfg = config()
    cfg['device'] = torch.device(
        "cuda" if torch.cuda.is_available() else "cpu")

    timestr = time.strftime("%Y%m%d-%H%M%S")
    cfg['logdir'] += timestr
    pprint(cfg)

    train_df, test_df = balance_data(cfg['train_csv_path'])
    print("Train Stats:")
    print("No DR:", len(train_df[train_df['diagnosis'] == 0]))
    print("Mild:", len(train_df[train_df['diagnosis'] == 1]))
    print("Moderate:", len(train_df[train_df['diagnosis'] == 2]))
    print("Severe:", len(train_df[train_df['diagnosis'] == 3]))
    print("Proliferative DR:", len(train_df[train_df['diagnosis'] == 4]))
    print("\nTest Stats:")
    print("No DR:", len(test_df[test_df['diagnosis'] == 0]))
    print("Mild:", len(test_df[test_df['diagnosis'] == 1]))
    print("Moderate:", len(test_df[test_df['diagnosis'] == 2]))
    print("Severe:", len(test_df[test_df['diagnosis'] == 3]))
    print("Proliferative DR:", len(test_df[test_df['diagnosis'] == 4]))

    train_transforms, test_transforms = get_transforms()
    train_dataset = AptosDataset(
        img_root=cfg['img_root'],
        df=train_df,
        img_transforms=train_transforms,
        is_train=True,
    )

    test_dataset = AptosDataset(
        img_root=cfg['img_root'],
        df=test_df,
        img_transforms=test_transforms,
        is_train=False,
    )
    print(
        f"Training set size:{len(train_dataset)}, Test set size:{len(test_dataset)}"
    )

    train_loader = DataLoader(train_dataset,
                              cfg['batch_size'],
                              shuffle=True,
                              num_workers=1)
    test_loader = DataLoader(test_dataset,
                             cfg['test_batch_size'],
                             shuffle=False,
                             num_workers=1)

    loaders = {'train': train_loader, 'valid': test_loader}

    model = AptosModel(arch=cfg['arch'], freeze=cfg['freeze'])
    criterion = nn.CrossEntropyLoss()
    optimizer = torch.optim.Adam(model.parameters(), lr=cfg['lr'])
    scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer,
                                                           factor=0.5,
                                                           patience=2)

    runner = SupervisedRunner(device=cfg['device'])

    runner.train(
        model=model,
        criterion=criterion,
        optimizer=optimizer,
        scheduler=scheduler,
        loaders=loaders,
        callbacks=[
            AccuracyCallback(num_classes=cfg['num_classes'],
                             threshold=0.5,
                             activation="Sigmoid"),
            #             PrecisionRecallF1ScoreCallback(
            #                 class_names=cfg['class_names'],
            #                 num_classes=cfg['num_classes']
            #             )
        ],
        logdir=cfg['logdir'],
        num_epochs=cfg['num_epochs'],
        verbose=cfg['verbose'],
        # set this true to run for 3 epochs only
        check=cfg['check'])
コード例 #57
0
ファイル: database.py プロジェクト: hebelsan/IRT_Postgre
def get_connection():
    params = config()
    # connect to the PostgreSQL server
    print("Connecting to the PostgreSQL database...")
    return psycopg2.connect(**params)
コード例 #58
0
ファイル: init_db.py プロジェクト: wakin-/mastodon_autofollow
import sqlite3
from contextlib import closing
from config import config

dbname = config('dbname')

with closing(sqlite3.connect(dbname)) as conn:
    c = conn.cursor()

    create_table = '''create table oauth_applications (id integer PRIMARY KEY AUTOINCREMENT, domain varchar(256),
                      uid varchar(256), secret varchar(256))'''
    c.execute(create_table)
    create_table = '''create table uuid (uuid varchar(256), user_id varchar(256), 
                      domain varchar(256), disable boolean)'''
    c.execute(create_table)
    create_table = '''create table user (id integer PRIMARY KEY AUTOINCREMENT, access_token varchar(256), 
                      user_id varchar(256), domain varchar(256), avatar varchar(256))'''
    c.execute(create_table)

    create_table = '''create table zodiac (id integer PRIMARY KEY AUTOINCREMENT, title varchar(256),
                      bot_access_token varchar(256), bot_base_url vasechar(256), uri varchar(256))'''
    c.execute(create_table)

    create_table = '''create table user_zodiac (user_id integer, zodiac_id integer, 
                      FOREIGN KEY(user_id) references user(id) ON DELETE CASCADE,
                      FOREIGN KEY(zodiac_id) references zodiac(id) ON DELETE CASCADE,
                      UNIQUE(user_id, zodiac_id))
                      '''
    c.execute(create_table)

    create_table = '''create table session (id integer PRIMARY KEY AUTOINCREMENT, 
コード例 #59
0
ファイル: utils.py プロジェクト: hellorusk-sandbox/TSMH
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import math
from config import config
from nltk.tokenize import word_tokenize
from copy import copy, deepcopy
import sys
import json
config=config()
sys.path.insert(0,config.skipthoughts_path)
sys.path.insert(0,config.emb_path)
# sys.path.insert(0,'../utils/dict_emb')
from dict_use import dict_use
sys.path.insert(0,'../bert')
from bert_scorer import BERT_Scorer, GPT2_Scorer
bert_scorer = BERT_Scorer(config.bert_path)
gpt2_scorer = GPT2_Scorer(config.gpt2_path)
tokenizer = bert_scorer.tokenizer
PAD_IDX = tokenizer._convert_token_to_id('[PAD]')  # 0
MASK_IDX = tokenizer._convert_token_to_id('[MASK]')  # 103
QMASK_IDX = tokenizer._convert_token_to_id('?')
WORD_START_IDX = 1996

if config.mode[0] == 's':
	import fasttext
	# model_dir = './yelp_polarity_model'
	model_dir = '../sentiment/yelp_polarity_model'
	model = fasttext.load_model(model_dir)
コード例 #60
0
from corrupter import BernCorrupter, BernCorrupterMulti
from read_data import index_ent_rel, graph_size, read_data
from config import config, overwrite_config_with_args
from logger_init import logger_init
from data_utils import inplace_shuffle, heads_tails
from select_gpu import select_gpu
from trans_e import TransE
from trans_d import TransD
from distmult import DistMult
from compl_ex import ComplEx

logger_init()
torch.cuda.set_device(select_gpu())
overwrite_config_with_args()

task_dir = config().task.dir
# kb_index = index_ent_rel(os.path.join(task_dir, 'train2id.txt'),
#                          os.path.join(task_dir, 'valid2id.txt'),
#                          os.path.join(task_dir, 'test2id.txt'))
kb_index = index_ent_rel(os.path.join(task_dir, 'train2id.txt'),
                         os.path.join(task_dir, 'test2id.txt'))
n_ent, n_rel = graph_size(kb_index)

train_data = read_data(os.path.join(task_dir, 'train2id.txt'), kb_index)
inplace_shuffle(*train_data)
# valid_data = read_data(os.path.join(task_dir, 'valid2id.txt'), kb_index)
test_data = read_data(os.path.join(task_dir, 'test2id.txt'), kb_index)
# heads, tails = heads_tails(n_ent, train_data, valid_data, test_data)
heads, tails = heads_tails(n_ent, train_data, test_data)
# valid_data = [torch.LongTensor(vec) for vec in valid_data]
test_data = [torch.LongTensor(vec) for vec in test_data]