Beispiel #1
0
    def import_snippets(self, filenames):
        success = True

        for filename in filenames:
            if not Pluma.utils_uri_has_file_scheme(filename):
                continue

            # Remove file://
            gfile = Gio.file_new_for_uri(filename)
            filename = gfile.get_path()

            importer = Importer(filename)
            error = importer.run()

            if error:
                message = _(
                    'The following error occurred while importing: %s') % error
                success = False
                message_dialog(self.dlg, Gtk.MessageType.ERROR, message)

        self.build_model(True)

        if success:
            message = _('Import successfully completed')
            message_dialog(self.dlg, Gtk.MessageType.INFO, message)
Beispiel #2
0
    def _reload(self, filenames):
        modules = [m for m in sys.modules.values()
                if self._check(filenames, m)]

        for mod in modules:
            self._logger.info("Reloading module %s", mod.__name__)
            Importer.reload(mod)
        else:
            j25._load_routing()
            j25._update_mapper()
            j25._dispatcher.register_all_apps_router()
Beispiel #3
0
    def __init__(self, settings, config, sql=None, parent=None, cli=False):
        self.importtimer = 0
        self.settings = settings
        self.config = config
        self.sql = sql
        self.parent = parent

        self.input_settings = {}
        self.pipe_to_hud = None

        self.importer = Importer.Importer(self, self.settings, self.config,
                                          self.sql)
        self.importer.setCallHud(True)
        self.importer.setQuiet(False)
        self.importer.setHandCount(0)
        self.importer.setMode('auto')

        self.server = settings['db-host']
        self.user = settings['db-user']
        self.password = settings['db-password']
        self.database = settings['db-databaseName']

        if cli == False:
            self.setupGui()
        else:
            # TODO: Separate the code that grabs the directories from config
            #       Separate the calls to the Importer API
            #       Create a timer interface that doesn't rely on GTK
            pass
Beispiel #4
0
    def __init__(self):
        gobject.GObject.__init__(self)

        self.conf = Config.Config(tgcm.country_support)
        self.modem_manager = MainModem.MainModem()
        self.conn_manager = ConnectionManager.ConnectionManager()
        self.device_dialer = FreeDesktop.DeviceDialer()
        self.importer = Importer.Importer()
        self.hs_service = HotSpotsService.HotSpotsService()

        self.install_callback = None
        self.refresh_callback = None

        if not self.conf.is_news_available():
            return

        # Load news and updates database
        self.db_name = 'news-%s.db' % tgcm.country_support
        self.db_filepath = os.path.join(tgcm.config_dir, self.db_name)
        if os.path.exists(self.db_filepath):
            db_file = open(self.db_filepath, 'r')
            self.db_contents = pickle.load(db_file)
            db_file.close()
        else:
            self.db_contents = {}
            self.db_contents['news'] = {}
            self.db_contents['updates'] = {}

        self.__update_rss_db_if_possible(APP_START)
        self.device_dialer.connect("connected", self.__connected_cb)
        self.modem_manager.connect('main-modem-changed',
                                   self.__main_modem_changed_cb)
        self.modem_manager.connect('main-modem-removed',
                                   self.__main_modem_removed_cb)
def send():
    """Renders the main page of the US Parks Finder Website"""
    if request.method == 'POST':
        name = request.form['Name']
        state = request.form['State']
        loc = request.form['Location']
        mtd = int(request.form['maxTravelDistance'])
        user = MDL.User(name, state, loc, float(mtd))
        usrLat = user.lat
        usrLng = user.lng
        parks = IMPRT.getParks(user)
        #print(parks)
        #parks.sort()
        #print(parks)
        zoom = getZoom(mtd)
        pNames = getPNames(parks)
        pNames.sort()
        pLa, pLo = getPLatLngs(parks)
        pTups = getPTups(parks)
        return render_template('test.html',
                               name=name,
                               state=state,
                               location=loc,
                               tdist=mtd,
                               numParks=len(pNames),
                               parks=pNames,
                               usrLat=usrLat,
                               usrLng=usrLng,
                               zoom=zoom,
                               pLa=pLa,
                               pLo=pLo,
                               parkObjs=pTups)
    else:
        return render_template("index.html")
Beispiel #6
0
    def __init__(self, settings, config, sql=None, parent=None):
        QWidget.__init__(self, parent)
        self.settings = settings
        self.config = config

        self.importer = Importer.Importer(self, self.settings, config, sql,
                                          self)

        self.setLayout(QVBoxLayout())

        self.importDir = QLineEdit(self.settings['bulkImport-defaultPath'])
        hbox = QHBoxLayout()
        hbox.addWidget(self.importDir)
        self.chooseButton = QPushButton('Browse...')
        self.chooseButton.clicked.connect(self.browseClicked)
        hbox.addWidget(self.chooseButton)
        self.layout().addLayout(hbox)

        self.load_button = QPushButton(_('Bulk Import'))
        self.load_button.clicked.connect(self.load_clicked)
        self.layout().addWidget(self.load_button)

        #    see how many hands are in the db and adjust accordingly
        tcursor = self.importer.database.cursor
        tcursor.execute("Select count(1) from Hands")
        row = tcursor.fetchone()
        tcursor.close()
        self.importer.database.rollback()
        self.n_hands_in_db = row[0]
def getMoreParkData(pName, usrLatLng):
    """returns in depth data about one park for the more page
        -inputs: pName - type:string, usrLatLng - type:tuple
    """
    park = IMPRT.getPark(pName)
    park.setWeatherAndTemp(run=True)
    park.getDistance(latlng=usrLatLng)
    return park
Beispiel #8
0
    def __init__(self, settings, config, sql = None, parent = None):
        self.settings = settings
        self.config = config
        self.parent = parent

        self.importer = Importer.Importer(self, self.settings, config, sql, parent)

        self.vbox = gtk.VBox(False, 0)
        self.vbox.show()

        self.chooser = gtk.FileChooserWidget()
        self.chooser.set_filename(self.settings['bulkImport-defaultPath'])
        self.chooser.set_select_multiple(True)
        self.chooser.set_show_hidden(True)
        self.vbox.add(self.chooser)
        self.chooser.show()

#    Table widget to hold the progress bar and load button
        #self.table = gtk.Table(rows=5, columns=5, homogeneous=False)
        self.table = gtk.Table(rows=2, columns=4, homogeneous=False)
        self.vbox.add(self.table)
        self.table.show()

#    label - spacer (fills in column one in table)
        self.lab_spacer = gtk.Label()
#         self.table.attach(self.lab_spacer, 3, 5, 3, 4, xpadding=0, ypadding=0,
#                           yoptions=gtk.SHRINK)
        self.table.attach(self.lab_spacer,1,2,1,2, xpadding=0, ypadding=0, yoptions=gtk.SHRINK)
        self.lab_spacer.show()

#    button - Import
        self.load_button = gtk.Button(_('_Bulk Import'))  # todo: rename variables to import too
        self.load_button.connect('clicked', self.load_clicked,
                                 _('Import clicked'))
#         self.table.attach(self.load_button, 2, 3, 4, 5, xpadding=0, ypadding=0,
#                           yoptions=gtk.SHRINK)
        self.table.attach(self.load_button, 2, 3, 1, 2, xpadding=0, ypadding=0, yoptions=gtk.SHRINK)
        self.load_button.show()

#    label - info

        self.progressbar = gtk.ProgressBar()
        self.table.attach(self.progressbar, 3, 4, 1, 2, xpadding=0, ypadding=0, yoptions=gtk.SHRINK)
        self.progressbar.set_text(_("Waiting..."))
        self.progressbar.set_fraction(0)
        self.progressbar.show()

#    see how many hands are in the db and adjust accordingly
        tcursor = self.importer.database.cursor
        tcursor.execute("Select count(1) from Hands")
        row = tcursor.fetchone()
        tcursor.close()
        self.importer.database.rollback()
        self.n_hands_in_db = row[0]
Beispiel #9
0
def import_activity(import_str):
    local_file_name = ""

    try:
        import_obj = json.loads(import_str)
        username = import_obj['username']
        user_id = import_obj['user_id']
        uploaded_file_data = import_obj['uploaded_file_data']
        uploaded_file_name = import_obj['uploaded_file_name']

        # Generate a random name for the local file.
        print("Generating local file name...")
        root_dir = os.path.dirname(os.path.abspath(__file__))
        tempfile_dir = os.path.join(root_dir, 'tempfile')
        if not os.path.exists(tempfile_dir):
            os.makedirs(tempfile_dir)
        upload_path = os.path.normpath(tempfile_dir)
        uploaded_file_name, uploaded_file_ext = os.path.splitext(
            uploaded_file_name)
        local_file_name = os.path.join(upload_path, str(uuid.uuid4()))
        local_file_name = local_file_name + uploaded_file_ext

        # Write the file.
        print("Write the data to a local file...")
        with open(local_file_name, 'wb') as local_file:
            local_file.write(uploaded_file_data.encode("utf-8"))

        # Import the file into the database.
        print("Import the data to the database...")
        data_mgr = DataMgr.DataMgr("", None, None, None)
        importer = Importer.Importer(data_mgr)
        success, _, activity_id = importer.import_file(username, user_id,
                                                       local_file_name,
                                                       uploaded_file_name,
                                                       uploaded_file_ext)

        # If the import was successful, then schedule the activity for analysis.
        if success:
            print("Import was successful, perform analysis...")
            analysis_scheduler = AnalysisScheduler.AnalysisScheduler()
            activity = data_mgr.retrieve_activity(activity_id)
            analysis_scheduler.add_to_queue(activity, user_id, data_mgr)
        else:
            print("Import was not successful.")
    except:
        log_error("Exception when importing activity data: " + str(import_str))
        log_error(traceback.format_exc())
        log_error(sys.exc_info()[0])
    finally:
        # Remove the local file.
        if len(local_file_name) > 0:
            print("Removing local file...")
            os.remove(local_file_name)
Beispiel #10
0
def wacot_import(args):
    importer = Importer.Importer()
    if args.from_dumps == 'all' or args.from_dumps == 'xml':
        importer.import_xml()
    if args.from_dumps == 'all' or args.from_dumps == 'cat':
        importer.import_categories()
    if not args.only_import:
        analyzer = Analyzer.Analyzer()
        analyzer.compute_article_contributions()
        analyzer.compute_category_contributions()
        analyzer.compute_bot_flags()
        analyzer.count_article_contributions()
        analyzer.count_category_contributions()
def upload():
    """
    Imports a file and saves to DB
    :return:
    """
    datafile = request.files['file']
    c = MySQL.get_connection(DATABASE)
    if datafile:
        try:
            logfile = os.path.splitext(datafile.filename)[0] + str(
                int(time.time())) + '.log'  # given name + current timestamp
            f = logging.FileHandler(os.path.join(LOG_DIR, logfile), 'w')
            Config.setup_logging(f)

            filepath = os.path.join(UPLOADS_DIR, datafile.filename)
            datafile.save(filepath)  # to file system
            Importer.run(filepath, c, {"normalization": request.form['normalization']})

            logger.removeHandler(f)
            f.close()
            return jsonify({"name": datafile.filename, 'log': logfile})
        finally:
            c.close()
Beispiel #12
0
def upload():
    """
    Imports a file and saves to DB
    :return:
    """
    datafile = request.files['file']
    c = MySQL.get_connection(DATABASE)
    if datafile:
        try:
            logfile = os.path.splitext(datafile.filename)[0] + str(
                int(time.time())) + '.log'  # given name + current timestamp
            f = logging.FileHandler(os.path.join(LOG_DIR, logfile), 'w')
            Config.setup_logging(f)

            filepath = os.path.join(UPLOADS_DIR, datafile.filename)
            datafile.save(filepath)  # to file system
            Importer.run(filepath, c,
                         {"normalization": request.form['normalization']})

            logger.removeHandler(f)
            f.close()
            return jsonify({"name": datafile.filename, 'log': logfile})
        finally:
            c.close()
Beispiel #13
0
        def import_snippets(self, filenames):
                success = True
                
                for filename in filenames:
                        if not gedit.utils.uri_has_file_scheme(filename):
                                continue

                        # Remove file://
                        gfile = gio.File(filename)
                        filename = gfile.get_path()

                        importer = Importer(filename)
                        error = importer.run()
         
                        if error:
                                message = _('The following error occurred while importing: %s') % error
                                success = False
                                message_dialog(self.dlg, gtk.MESSAGE_ERROR, message)
                
                self.build_model(True)

                if success:
                        message = _('Import successfully completed')
                        message_dialog(self.dlg, gtk.MESSAGE_INFO, message)
Beispiel #14
0
def main(argv=None):
    """main can also be called in the python interpreter, by supplying the command line as the argument."""
    if argv is None:
        argv = sys.argv[1:]

    def destroy(*args):  # call back for terminating the main eventloop
        gtk.main_quit()

    Configuration.set_logfile("fpdb-log.txt")
    (options, argv) = Options.fpdb_options()

    if options.sitename:
        options.sitename = Options.site_alias(options.sitename)

    if options.usage == True:
        #Print usage examples and exit
        print _("USAGE:")
        sys.exit(0)

    Configuration.set_logfile("GuiBulkImport-log.txt")
    if options.config:
        config = Configuration.Config(options.config)
    else:
        config = Configuration.Config()

    settings = {}
    if os.name == 'nt': settings['os'] = 'windows'
    else: settings['os'] = 'linuxmac'

    settings.update(config.get_db_parameters())
    settings.update(config.get_import_parameters())
    settings.update(config.get_default_paths())

    #Do something useful
    importer = Importer.Importer(False, settings, config, None)
    importer.addBulkImportImportFileOrDir(os.path.expanduser(options.filename))
    importer.setCallHud(False)
    if options.archive:
        importer.setStarsArchive(True)
        importer.setFTPArchive(True)
    if options.testData:
        importer.setPrintTestData(True)
    (stored, dups, partial, errs, ttime) = importer.runImport()
    importer.clearFileList()
    print(_('Bulk import done: Stored: %d, Duplicates: %d, Partial: %d, Errors: %d, Time: %s seconds, Stored/second: %.0f')\
                     % (stored, dups, partial, errs, ttime, (stored+0.0) / ttime))
Beispiel #15
0
class Parser(object):
    def __init__(self):
        self.objects = {}
        self.importer = Importer()

    def _import(self, file_path):
        getFileName = lambda path: splitext(split(file_path)[1])[0]
        with open(file_path) as file:
            data = self.importer.do_import(file)
        return data, getFileName(file_path)

    def _add_obj(self, data, file_name):
        """Regista o objeto no parser"""
        vertices, normals, faces, textures = data
        self.objects[file_name] = {
            'vertices': vertices,
            'normals': normals,
            'faces': faces,
            'textures': textures,
        }

    def _parse(self, name, use_texture : bool):
        obj = self.objects[name]
        glBegin(GL_QUADS)

        for face in obj['faces']:
        # estrutura de um vertice na lista de
        # faces: vertex/texture/normal
            for v in face:
                vi, ti, ni = v
                glNormal3fv(obj['normals'][ni])
                glTexCoord2fv(obj['textures'][ti])
                glVertex3fv(obj['vertices'][vi])
        glEnd()

    def load(self, file_path):
        """Carrega o arquivo"""
        data, file_name = self._import(file_path)
        self._add_obj(data, file_name)

    def parse(self, name, texture=False):
        """Faz o desenho do objeto"""
        self._parse(name, texture)
Beispiel #16
0
import Importer
Importer.loadImports('data/blueprints/')
from data.blueprints import *


class blueprint:
    def __init__(self, ID, width, height, tiles):
        self.ID = ID
        self.tiles = tiles
        self.width = width
        self.height = height

    def getRel(self, colRow):
        toRet = []

        for t in range(len(self.tiles)):
            toRet.append(
                (colRow[1] - self.tiles[t][0], self.tiles[t][1] + colRow[0],
                 self.tiles[t][2], self.tiles[t][3]))

        return toRet

    def getRequiredResources(self):
        req = []

        for t in range(len(self.tiles)):
            found = False
            for i in range(len(req)):
                if req[i][0] == self.tiles[t][3]:
                    found = True
 def setUp(self):
     self.imp = Importer()
Beispiel #18
0
    def __init__(self, settings, config, sql=None, parent=None):
        self.settings = settings
        self.config = config
        self.parent = parent

        self.importer = Importer.Importer(self, self.settings, config, sql,
                                          parent)

        self.vbox = gtk.VBox(False, 0)
        self.vbox.show()

        self.chooser = gtk.FileChooserWidget()
        self.chooser.set_filename(self.settings['bulkImport-defaultPath'])
        self.chooser.set_select_multiple(True)
        self.chooser.set_show_hidden(True)
        self.vbox.add(self.chooser)
        self.chooser.show()

        #    Table widget to hold the settings
        self.table = gtk.Table(rows=5, columns=5, homogeneous=False)
        self.vbox.add(self.table)
        self.table.show()

        #    checkbox - print start/stop?
        self.chk_st_st = gtk.CheckButton(_('Print Start/Stop Info'))
        self.table.attach(self.chk_st_st,
                          0,
                          1,
                          0,
                          1,
                          xpadding=10,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.chk_st_st.show()
        self.chk_st_st.set_active(True)

        #    label - status
        self.lab_status = gtk.Label(_("Hands/status print:"))
        self.table.attach(self.lab_status,
                          1,
                          2,
                          0,
                          1,
                          xpadding=0,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.lab_status.show()
        self.lab_status.set_justify(gtk.JUSTIFY_RIGHT)
        self.lab_status.set_alignment(1.0, 0.5)

        #    spin button - status
        status_adj = gtk.Adjustment(
            value=100,
            lower=0,
            upper=300,
            step_incr=10,
            page_incr=1,
            page_size=0)  #not sure what upper value should be!
        self.spin_status = gtk.SpinButton(adjustment=status_adj,
                                          climb_rate=0.0,
                                          digits=0)
        self.table.attach(self.spin_status,
                          2,
                          3,
                          0,
                          1,
                          xpadding=10,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.spin_status.show()

        #    label - threads
        self.lab_threads = gtk.Label(_("Number of threads:"))
        self.table.attach(self.lab_threads,
                          3,
                          4,
                          0,
                          1,
                          xpadding=0,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.lab_threads.show()
        if not self.allowThreads:
            self.lab_threads.set_sensitive(False)
        self.lab_threads.set_justify(gtk.JUSTIFY_RIGHT)
        self.lab_threads.set_alignment(1.0, 0.5)

        #    spin button - threads
        threads_adj = gtk.Adjustment(
            value=0, lower=0, upper=32, step_incr=1, page_incr=1,
            page_size=0)  #not sure what upper value should be!
        self.spin_threads = gtk.SpinButton(adjustment=threads_adj,
                                           climb_rate=0.0,
                                           digits=0)
        self.table.attach(self.spin_threads,
                          4,
                          5,
                          0,
                          1,
                          xpadding=10,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.spin_threads.show()
        if not self.allowThreads:
            self.spin_threads.set_sensitive(False)


#    checkbox - archive file?
        self.is_archive = gtk.CheckButton(_('Archive File'))
        self.table.attach(self.is_archive,
                          0,
                          1,
                          1,
                          2,
                          xpadding=10,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.is_archive.show()

        #    label - hands
        self.lab_hands = gtk.Label(_("Hands/file:"))
        self.table.attach(self.lab_hands,
                          1,
                          2,
                          1,
                          2,
                          xpadding=0,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.lab_hands.show()
        self.lab_hands.set_justify(gtk.JUSTIFY_RIGHT)
        self.lab_hands.set_alignment(1.0, 0.5)

        #    spin button - hands to import
        hands_adj = gtk.Adjustment(
            value=0, lower=0, upper=10, step_incr=1, page_incr=1,
            page_size=0)  #not sure what upper value should be!
        self.spin_hands = gtk.SpinButton(adjustment=hands_adj,
                                         climb_rate=0.0,
                                         digits=0)
        self.table.attach(self.spin_hands,
                          2,
                          3,
                          1,
                          2,
                          xpadding=10,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.spin_hands.show()

        #    label - drop indexes
        self.lab_drop = gtk.Label(_("Drop indexes:"))
        self.table.attach(self.lab_drop,
                          3,
                          4,
                          1,
                          2,
                          xpadding=0,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.lab_drop.show()
        self.lab_drop.set_justify(gtk.JUSTIFY_RIGHT)
        self.lab_drop.set_alignment(1.0, 0.5)

        #    ComboBox - drop indexes
        self.cb_dropindexes = gtk.combo_box_new_text()
        self.cb_dropindexes.append_text(_('auto'))
        self.cb_dropindexes.append_text(_("don't drop"))
        self.cb_dropindexes.append_text(_('drop'))
        self.cb_dropindexes.set_active(0)
        self.table.attach(self.cb_dropindexes,
                          4,
                          5,
                          1,
                          2,
                          xpadding=10,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.cb_dropindexes.show()

        self.cb_testmode = gtk.CheckButton(_('HUD Test mode'))
        self.table.attach(self.cb_testmode,
                          0,
                          1,
                          2,
                          3,
                          xpadding=10,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.cb_testmode.show()

        #    label - filter
        self.lab_filter = gtk.Label(_("Site filter:"))
        self.table.attach(self.lab_filter,
                          1,
                          2,
                          2,
                          3,
                          xpadding=0,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.lab_filter.show()
        self.lab_filter.set_justify(gtk.JUSTIFY_RIGHT)
        self.lab_filter.set_alignment(1.0, 0.5)

        #    ComboBox - filter
        self.cbfilter = gtk.combo_box_new_text()
        disabled_sites = []  # move disabled sites to bottom of list
        self.cbfilter.append_text(_("Please select site"))
        for w in self.config.hhcs:
            try:
                if self.config.supported_sites[
                        w].enabled:  # include enabled ones first
                    print w
                    self.cbfilter.append_text(w)
                else:
                    disabled_sites.append(w)
            except:  # self.supported_sites[w] may not exist if hud_config is bad
                disabled_sites.append(w)
        for w in disabled_sites:  # then disabled ones
            print w
            self.cbfilter.append_text(w)
        self.cbfilter.set_active(0)
        self.table.attach(self.cbfilter,
                          2,
                          3,
                          2,
                          3,
                          xpadding=10,
                          ypadding=1,
                          yoptions=gtk.SHRINK)
        self.cbfilter.show()

        #    label - drop hudcache
        self.lab_hdrop = gtk.Label(_("Drop HudCache:"))
        self.table.attach(self.lab_hdrop,
                          3,
                          4,
                          2,
                          3,
                          xpadding=0,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.lab_hdrop.show()
        self.lab_hdrop.set_justify(gtk.JUSTIFY_RIGHT)
        self.lab_hdrop.set_alignment(1.0, 0.5)

        #    ComboBox - drop hudcache
        self.cb_drophudcache = gtk.combo_box_new_text()
        self.cb_drophudcache.append_text(_('auto'))
        self.cb_drophudcache.append_text(_("don't drop"))
        self.cb_drophudcache.append_text(_('drop'))
        self.cb_drophudcache.set_active(0)
        self.table.attach(self.cb_drophudcache,
                          4,
                          5,
                          2,
                          3,
                          xpadding=10,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.cb_drophudcache.show()

        #    button - Import
        self.load_button = gtk.Button(
            _('_Bulk Import'))  # todo: rename variables to import too
        self.load_button.connect('clicked', self.load_clicked,
                                 _('Import clicked'))
        self.table.attach(self.load_button,
                          2,
                          3,
                          4,
                          5,
                          xpadding=0,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.load_button.show()

        #    label - spacer (keeps rows 3 & 5 apart)
        self.lab_spacer = gtk.Label()
        self.table.attach(self.lab_spacer,
                          3,
                          5,
                          3,
                          4,
                          xpadding=0,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.lab_spacer.show()

        #    label - info
        #        self.lab_info = gtk.Label()
        #        self.table.attach(self.lab_info, 3, 5, 4, 5, xpadding = 0, ypadding = 0, yoptions=gtk.SHRINK)
        #        self.lab_info.show()
        self.progressbar = gtk.ProgressBar()
        self.table.attach(self.progressbar,
                          3,
                          5,
                          4,
                          5,
                          xpadding=0,
                          ypadding=0,
                          yoptions=gtk.SHRINK)
        self.progressbar.set_text(_("Waiting..."))
        self.progressbar.set_fraction(0)
        self.progressbar.show()

        #    see how many hands are in the db and adjust accordingly
        tcursor = self.importer.database.cursor
        tcursor.execute("Select count(1) from Hands")
        row = tcursor.fetchone()
        tcursor.close()
        self.importer.database.rollback()
        self.n_hands_in_db = row[0]
        if self.n_hands_in_db == 0:
            self.cb_dropindexes.set_active(2)
            self.cb_dropindexes.set_sensitive(False)
            self.lab_drop.set_sensitive(False)
            self.cb_drophudcache.set_active(2)
            self.cb_drophudcache.set_sensitive(False)
            self.lab_hdrop.set_sensitive(False)
Beispiel #19
0
import Importer
Importer.loadImports('data/tiles/')
from data.tiles import *


class tileList:
    def __init__(self):
        self.tiles = []
        self.finderList = []
        key = None
        for key in globals().keys():
            if key.split('-')[0] == 'Tile':
                self.tiles.append(globals()[key].setter)

    def getTileByType(self, world, tType):
        if self.finderList == []:
            for i in range(len(self.tiles)):
                self.finderList.append(self.tiles[i](world, (0, 0)))
        for i in range(len(self.finderList)):
            if self.finderList[i].tileType == tType:
                return self.tiles[i]
class TestImporter(unittest.TestCase):
    def setUp(self):
        self.imp = Importer()

    def test_startImporter(self):

        self.imp.startImporter()

    def test_getConfiguration(self):

        self.imp.startImporter()
        confFile = './fixtures/keggimporter.conf'
        self.imp.setConfigurationFile(confFile)

        config = self.imp.getConfiguration('directories', 'inserts')
        expectedConfig = './fixtures/inserts'

        self.assertEquals(config, expectedConfig)

    def test_openInsertFile(self):

        self.imp.startImporter()
        confFile = './fixtures/keggimporter.conf'
        self.imp.setConfigurationFile(confFile)

        result = self.imp.openInsertFile('remove_this_file')

        self.assertTrue(type(result) is file)

        result.close()

    def test_nextPrimaryKey(self):

        self.imp.startImporter()
        confFile = './fixtures/keggimporter.conf'
        self.imp.setConfigurationFile(confFile)

        result = self.imp.nextPrimaryKey('test')

        self.assertEquals(result, 1)

        result = self.imp.nextPrimaryKey('test')

        self.assertEquals(result, 2)

        result = self.imp.nextPrimaryKey('test')

        self.assertEquals(result, 3)

    def test_writeFile(self):

        self.imp.startImporter()
        confFile = './fixtures/keggimporter.conf'
        self.imp.setConfigurationFile(confFile)

        f = open('./fixtures/remove_this_thing', 'a')

        result = self.imp.writeFile(f, 'test_table', ['bla', 'ble', 'bli'])

        self.assertEquals(result, 1)

        result = self.imp.writeFile(f, 'test_table', ['bla', 'ble', 'bli'])

        self.assertEquals(result, 2)

        result = self.imp.writeFile(f, 'test_table', ['bla', 'ble', 'bli'])

        self.assertEquals(result, 3)

    def test_write_importer_files(self):

        self.imp.importerPathway.startImporter()
        confFile = './fixtures/keggimporter.conf'
        self.imp.importerPathway.setConfigurationFile(confFile)

        self.imp.importerEc.startImporter()
        confFile = './fixtures/keggimporter.conf'
        self.imp.importerEc.setConfigurationFile(confFile)

        self.imp.importerOrganism.startImporter()
        confFile = './fixtures/keggimporter.conf'
        self.imp.importerOrganism.setConfigurationFile(confFile)

        self.imp.startImporter()
        confFile = './fixtures/keggimporter.conf'
        self.imp.setConfigurationFile(confFile)

        self.imp.writeTaxonomies()
        self.imp.writePathways()
        self.imp.writeEcs()
        self.imp.writeOrganisms()
        self.imp.writeProteins()
        self.imp.writeProteinRelations()
        self.imp.writeOrganismTaxonomies()
        self.imp.writeProteinAccessions()
        self.imp.writeEcMaps()

        expectedFiles = [
            'accessionsInsert.psql', 'organismTaxonomiesInsert.psql',
            'proteinMapsInsert.psql', 'ecMapsInsert.psql',
            'pathwayClassesInsert.psql', 'proteinsInsert.psql',
            'ecsInsert.psql', 'pathwayNamesInsert.psql',
            'organismEcsInsert.psql', 'pathwaySuperClassesInsert.psql',
            'taxonomiesInsert.psql', 'organismMapsInsert.psql',
            'proteinAccessionsInsert.psql', 'organismsInsert.psql',
            'proteinEcsInsert.psql'
        ]

        for insertFile in expectedFiles:
            f = open('./fixtures/inserts/' + insertFile)
            self.assertTrue(type(f) is file)

            count = 0
            for line in f:
                count += 1

            # Random number greater than 1
            if count == 10:
                break

            # Random number around an expected minimum amount of records.
            # Empty files will raise an error (what we want to caught).
            if count < 5:
                print('EMPTY FILE: ' + insertFile)

            self.assertTrue(count > 5)

            f.close()
Beispiel #21
0
    def __init__(self, parent, controller):
        tk.Frame.__init__(self, parent)
        self.controller = controller
        self.parent = parent

        # From here starts the code
        self.gimbab = importer.food('Gimbab')
        self.bulgogi = importer.food('Bulgogi')
        self.kimchi = importer.food('Kimchi_Jjigae')

        # In this frame the other objects will be added
        self.frame = tk.Frame(self, bg='white')
        self.frame.place(relwidth=1, relheight=1)

        self.image_frame = tk.Frame(self, bg='white')
        self.image_frame.place(relx=0.5,
                               rely=0.2,
                               width=400,
                               height=450,
                               anchor='n')

        # the image of the foods will be called from the get_food_image function of importer module
        # default image
        self.food_image = importer.get_food_image_path('food2')
        self.food_label = tk.Label(self.image_frame, image=self.food_image)
        self.food_label.pack()

        # Button to close this and go to the next page
        self.timer_button = tk.Button(self.frame, text='Show timer')
        # this keeps the bottom disabled at the beginninng. when user selects a food button will get enabled
        #self.timer_button.place(relx=0.5, rely=0, relwidth=0.8, relheight=0.05, anchor='n')

        # Button to close this and go to the next page
        self.button2 = tk.Button(
            self.frame,
            text='Show the recipe',
            command=lambda: controller.show_frame("Secondpage"))
        # this keeps the bottom disabled at the beginninng. when user selects a food button will get enabled
        self.button2["state"] = "disabled"
        self.button2.place(relx=0.5,
                           rely=0.9,
                           relwidth=0.8,
                           relheight=0.05,
                           anchor='n')

        # Its the code for dropdown menu
        self.dropdown_selected = tk.StringVar()
        self.dropdown_selected.set(
            "Select the desired recipe")  # Default value
        self.food_list = (self.gimbab.foodname, self.kimchi.foodname,
                          self.bulgogi.foodname)

        self.list_of_food = tk.OptionMenu(self.frame, self.dropdown_selected,
                                          *self.food_list)
        # StringVar().trace() : https://kite.com/python/docs/Tkinter.StringVar.trace
        self.dropdown_selected.trace("w", self.option_changed)
        self.list_of_food.place(relx=0.5,
                                rely=0.1,
                                relwidth=0.8,
                                relheight=0.05,
                                anchor='n')
Beispiel #22
0
log = logging.getLogger('')
log.setLevel(logging.DEBUG)
format = logging.Formatter(
    "%(asctime)s - %(name)s - %(levelname)s - %(message)s")

ch = logging.StreamHandler(sys.stdout)
ch.setFormatter(format)
log.addHandler(ch)

fh = logging.handlers.RotatingFileHandler(logFile, maxBytes=0, backupCount=0)
fh.setFormatter(format)
log.addHandler(fh)

log.info('keggimporter: START')

imp = Importer()

imp.startImporter()

log.info('writeTaxonomies: START')
imp.writeTaxonomies()
log.info('writeTaxonomies: DONE')

log.info('writePathways: START')
imp.writePathways()
log.info('writePathways: DONE')

log.info('writeEcs: START')
imp.writeEcs()
log.info('writeEcs: DONE')
Beispiel #23
0
def main(argv=None):
    if argv is None:
        argv = sys.argv[1:]

    Configuration.set_logfile("fpdb-log.txt")
    (options, argv) = Options.fpdb_options()

    test_all_sites = True

    if options.usage == True:
        usage()

    single_file_test = False

    if options.sitename:
        options.sitename = Options.site_alias(options.sitename)
        if options.sitename == False:
            usage()
        if options.filename:
            print "Testing single hand: '%s'" % options.filename
            single_file_test = True
        else:
            print "Only regression testing '%s' files" % (options.sitename)
        test_all_sites = False

    config = Configuration.Config(file = "HUD_config.test.xml")
    db = Database.Database(config)
    settings = {}
    settings.update(config.get_db_parameters())
    settings.update(config.get_import_parameters())
    settings.update(config.get_default_paths())
    db.recreate_tables()
    importer = Importer.Importer(False, settings, config, None)
    importer.setDropIndexes("don't drop")
    importer.setThreads(-1)
    importer.setCallHud(False)
    importer.setFakeCacheHHC(True)

    AbsoluteErrors    = FpdbError('Absolute Poker')
    BetfairErrors     = FpdbError('Betfair')
    BetOnlineErrors   = FpdbError('BetOnline')
    BossErrors        = FpdbError('Boss')
    BovadaErrors      = FpdbError('Bovada')
    CakeErrors        = FpdbError('Cake')
    EnetErrors        = FpdbError('Enet')
    EntractionErrors  = FpdbError('Entraction')
    EverleafErrors    = FpdbError('Everleaf Poker')
    EverestErrors     = FpdbError('Everest Poker')
    FTPErrors         = FpdbError('Full Tilt Poker')
    iPokerErrors      = FpdbError('iPoker')
    MergeErrors      = FpdbError('Merge')
    MicrogamingErrors = FpdbError('Microgaming')
    OnGameErrors      = FpdbError('OnGame')
    PacificPokerErrors= FpdbError('PacificPoker')
    PartyPokerErrors  = FpdbError('Party Poker')
    PokerStarsErrors  = FpdbError('PokerStars')
    PKRErrors         = FpdbError('PKR')
    PTErrors          = FpdbError('PokerTracker')
    WinamaxErrors     = FpdbError('Winamax')

    ErrorsList = [
                    AbsoluteErrors, BetfairErrors, BetOnlineErrors, BossErrors, CakeErrors, EntractionErrors,
                    EverleafErrors, EverestErrors, FTPErrors, iPokerErrors, MergeErrors, MicrogamingErrors,
                    OnGameErrors, PacificPokerErrors, PartyPokerErrors, PokerStarsErrors, PKRErrors,
                    PTErrors, WinamaxErrors, BovadaErrors, EnetErrors,
                ]

    sites = {
                'Absolute' : False,
                'Betfair' : False,
                'BetOnline': False,
                'Boss' : False,
                'Bovada' : False,
                'Cake' : False,
                'Enet' : False,
                'Entraction' : False,
                'Everleaf' : False,
                'Everest' : False,
                'Full Tilt Poker' : False,
                'iPoker' : False,
                'Merge' : False,
                'Microgaming': False,
                'OnGame' : False,
                'Pkr' : False,
                'PacificPoker' : False,
                'PartyPoker' : False,
                'PokerStars' : False,
                'PokerTracker' : False,
                'Winamax' : False,
            }

    if test_all_sites == True:
        for s in sites:
            sites[s] = True
    else:
        sites[options.sitename] = True

    if sites['PacificPoker'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/PacificPoker/", compare, importer, PacificPokerErrors, "PacificPoker")
        walk_testfiles("regression-test-files/tour/PacificPoker/", compare, importer, PacificPokerErrors, "PacificPoker")
        walk_testfiles("regression-test-files/summaries/PacificPoker/", compare, importer, PacificPokerErrors, "PacificPoker")
    elif sites['PacificPoker'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, PacificPokerErrors, "PacificPoker")

    if sites['PokerStars'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/Stars/", compare, importer, PokerStarsErrors, "PokerStars")
        walk_testfiles("regression-test-files/tour/Stars/", compare, importer, PokerStarsErrors, "PokerStars")
        walk_testfiles("regression-test-files/summaries/Stars/", compare, importer, PokerStarsErrors, "PokerStars")
    elif sites['PokerStars'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, PokerStarsErrors, "PokerStars")

    if sites['Full Tilt Poker'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/FTP/", compare, importer, FTPErrors, "Full Tilt Poker")
        walk_testfiles("regression-test-files/tour/FTP/", compare, importer, FTPErrors, "Full Tilt Poker")
        walk_testfiles("regression-test-files/summaries/FTP/", compare, importer, FTPErrors, "Full Tilt Poker")
    elif sites['Full Tilt Poker'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, FTPErrors, "Full Tilt Poker")
    if sites['PartyPoker'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/PartyPoker/", compare, importer, PartyPokerErrors, "PartyPoker")
        walk_testfiles("regression-test-files/tour/PartyPoker/", compare, importer, PartyPokerErrors, "PartyPoker")
    elif sites['PartyPoker'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, PartyPokerErrors, "PartyPoker")
    if sites['Betfair'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/Betfair/", compare, importer, BetfairErrors, "Betfair")
    elif sites['Betfair'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, BetfairErrors, "Betfair")
    if sites['OnGame'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/OnGame/", compare, importer, OnGameErrors, "OnGame")
        walk_testfiles("regression-test-files/tour/OnGame/", compare, importer, OnGameErrors, "OnGame")
    elif sites['OnGame'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, OnGameErrors, "OnGame")
    if sites['Absolute'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/Absolute/", compare, importer, AbsoluteErrors, "Absolute")
        walk_testfiles("regression-test-files/tour/Absolute/", compare, importer, AbsoluteErrors, "Absolute")
    elif sites['Absolute'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, AbsoluteErrors, "Absolute")
    if sites['Everleaf'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/Everleaf/", compare, importer, EverleafErrors, "Everleaf")
        walk_testfiles("regression-test-files/tour/Everleaf/", compare, importer, EverleafErrors, "Everleaf")
    elif sites['Everleaf'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, EverleafErrors, "Everleaf")
    if sites['Everest'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/Everest/", compare, importer, EverestErrors, "Everest")
        walk_testfiles("regression-test-files/tour/Everest/", compare, importer, EverestErrors, "Everest")
    elif sites['Everest'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, EverestErrors, "Everest")
    if sites['Merge'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/Merge/", compare, importer, MergeErrors, "Merge")
        walk_testfiles("regression-test-files/tour/Merge/", compare, importer, MergeErrors, "Merge")
    elif sites['Merge'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, MergeErrors, "Merge")
    if sites['Pkr'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/PKR/", compare, importer, PKRErrors, "PKR")
        walk_testfiles("regression-test-files/tour/PKR/", compare, importer, PKRErrors, "PKR")
    elif sites['Pkr'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, PKRErrors, "PKR")
    if sites['iPoker'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/iPoker/", compare, importer, iPokerErrors, "iPoker")
        walk_testfiles("regression-test-files/tour/iPoker/", compare, importer, iPokerErrors, "iPoker")
    elif sites['iPoker'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, iPokerErrors, "iPoker")
    if sites['Boss'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/Boss/", compare, importer, BossErrors, "Boss")
        walk_testfiles("regression-test-files/tour/Boss/", compare, importer, BossErrors, "Boss")
    elif sites['Boss'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, BossErrors, "Boss")
    if sites['Entraction'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/Entraction/", compare, importer, EntractionErrors, "Entraction")
        walk_testfiles("regression-test-files/tour/Entraction/", compare, importer, EntractionErrors, "Entraction")
    elif sites['Entraction'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, EntractionErrors, "Entraction")
    if sites['BetOnline'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/BetOnline/", compare, importer, BetOnlineErrors, "BetOnline")
        walk_testfiles("regression-test-files/tour/BetOnline/", compare, importer, BetOnlineErrors, "BetOnline")
    elif sites['BetOnline'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, BetOnlineErrors, "BetOnline")
    if sites['Microgaming'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/Microgaming/", compare, importer, MicrogamingErrors, "Microgaming")
        walk_testfiles("regression-test-files/tour/Microgaming/", compare, importer, MicrogamingErrors, "Microgaming")
    elif sites['Microgaming'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, MicrogamingErrors, "Microgaming")
    if sites['Cake'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/Cake/", compare, importer, CakeErrors, "Cake")
        walk_testfiles("regression-test-files/tour/Cake/", compare, importer, CakeErrors, "Cake")
    elif sites['Cake'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, CakeErrors, "Cake")
    if sites['PokerTracker'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/PokerTracker/", compare, importer, PTErrors, "PokerTracker")
        walk_testfiles("regression-test-files/tour/PokerTracker/", compare, importer, PTErrors, "PokerTracker")
    elif sites['PokerTracker'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, PTErrors, "PokerTracker")
    if sites['Winamax'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/Winamax/", compare, importer, WinamaxErrors, "Winamax")
        walk_testfiles("regression-test-files/tour/Winamax/", compare, importer, WinamaxErrors, "Winamax")
    elif sites['Winamax'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, WinamaxErrors, "Winamax")
    if sites['Bovada'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/Bovada/", compare, importer, BovadaErrors, "Bovada")
        walk_testfiles("regression-test-files/tour/Bovada/", compare, importer, BovadaErrors, "Bovada")
    elif sites['Bovada'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, BovadaErrors, "Bovada")
    if sites['Enet'] == True and not single_file_test:
        walk_testfiles("regression-test-files/cash/Enet/", compare, importer, EnetErrors, "Enet")
    elif sites['Enet'] == True and single_file_test:
        walk_testfiles(options.filename, compare, importer, EnetErrors, "Enet")



    totalerrors = 0

    for i, site in enumerate(ErrorsList):
        totalerrors += ErrorsList[i].errorcount

    for i, site in enumerate(ErrorsList):
        ErrorsList[i].print_histogram()

    # Merge the dicts of stats from the various error objects
    statdict = {}
    for i, site in enumerate(ErrorsList):
        tmp = ErrorsList[i].statcount
        for stat in tmp:
            if stat in statdict:
                statdict[stat] += tmp[stat]
            else:
                statdict[stat] = tmp[stat]

    print "\n"
    print "---------------------"
    print "Errors by stat:"
    print "---------------------"
    #for stat in statdict:
    #    print "(%3d) : %s" %(statdict[stat], stat)

    sortedstats = sorted([(value,key) for (key,value) in statdict.items()])
    for num, stat in sortedstats:
        print "(%3d) : %s" %(num, stat)

    print "---------------------"
    print "Total Errors: %d" % totalerrors
    print "---------------------"

    print "-------- Parse Error List --------"
    for i, site in enumerate(ErrorsList):
        ErrorsList[i].print_parse_list()
Beispiel #24
0
import copy, Importer
Importer.loadImports('data/items/')
from data.items import *


class itemList:
    def __init__(self, itemPictures):
        self.itemPictures = itemPictures
        self.items = []
        self.finderList = []
        key = None
        for key in globals().keys():
            if key.split('-')[0] == 'Item':
                self.items.append(globals()[key].setter)

    def getItems(self):
        return self.items

    def getItemByID(self, ID):
        if self.finderList == []:
            for i in range(len(self.items)):
                self.finderList.append(self.items[i]())
        for i in range(len(self.finderList)):
            if self.finderList[i].ID == ID:
                return self.items[i]

    def getPictures(self):
        return self.itemPictures
Beispiel #25
0
import Importer
Importer.loadImports('data/tiles/')
from data.tiles import *

class tileList:
    def __init__(self):
        self.tiles = []
        self.finderList = []
        key = None
        for key in globals().keys():
            if key.split('-')[0] == 'Tile':
                self.tiles.append(globals()[key].setter)

    def getTileByType(self,world,tType):
        if self.finderList == []:
            for i in range(len(self.tiles)):
                self.finderList.append(self.tiles[i](world, (0,0)))
        for i in range(len(self.finderList)):
            if self.finderList[i].tileType == tType:
                return self.tiles[i]
Beispiel #26
0
def main(input_path, output_path, days, max_missing_days):

    weekdays = (5 * days) / 7

    output_dir = os.path.dirname(os.path.abspath(output_path))
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    kill_path = output_dir + '/kill'
    killfile_monitor = KillFileMonitor(kill_path, 1)

    output_path_temp = output_path + '.incomplete'
    try:
        with open(output_path_temp, 'w', encoding='utf8') as out_file:
            out_file.write('id;instrument;decision;confirmed;time;')
            for day in range(-days + 1, 1):
                out_file.write(str(day))
                if day < 0:
                    out_file.write(';')

            progress = FileItemProgress('flatten: ', 1, None, None)
            known_ids = set()

            for file_path in sorted(glob.iglob(input_path), reverse=True):
                killfile_monitor.maybe_check_killfile()
                with open(file_path, 'r', encoding='utf8') as in_file:
                    progress.set_file(file_path, in_file)

                    def flatten_snapshot(snapshot):
                        killfile_monitor.maybe_check_killfile()

                        id = snapshot.DecisionId
                        if id in known_ids:
                            return

                        if snapshot.Decision is not None and snapshot.snapshotrates is not None:

                            known_ids.add(id)

                            snapshot_date = datetime.datetime.strptime(
                                snapshot.Time, '%Y-%m-%d %H:%M:%S').date()
                            first_date = snapshot_date - datetime.timedelta(
                                days - 1)

                            rates = list(
                                filter(
                                    lambda r: r.Close is not None and r.Close >
                                    0, snapshot.snapshotrates))
                            rates = list(
                                map(
                                    lambda r: Rate(
                                        datetime.datetime.strptime(
                                            r.Time, '%Y-%m-%d %H:%M:%S').date(
                                            ), r.Close), rates))

                            if len(rates) > 0:
                                previous_rate = lastOrDefault(
                                    (r for r in rates if r.Date < first_date),
                                    rates[0])
                                remaining_rates = list(
                                    itertools.dropwhile(
                                        lambda r: r.Date < first_date, rates))

                                if len(remaining_rates) >= (weekdays -
                                                            max_missing_days):
                                    out_file.write('\n')

                                    out_file.write(str(id))
                                    out_file.write(';')

                                    out_file.write(str(snapshot.instrument.ID))
                                    out_file.write(';')

                                    out_file.write(snapshot.Decision)
                                    out_file.write(';')

                                    out_file.write(str(snapshot.Confirmed))
                                    out_file.write(';')

                                    out_file.write(
                                        snapshot_date.strftime('%Y%m%d'))
                                    out_file.write(';')

                                    snapshot_days = (snapshot_date -
                                                     first_date).days + 1
                                    for day in range(snapshot_days):

                                        date = first_date + datetime.timedelta(
                                            days=day)

                                        remaining_rates = list(
                                            itertools.dropwhile(
                                                lambda r: r.Date < date,
                                                remaining_rates))

                                        rate = previous_rate
                                        if len(remaining_rates) > 0:
                                            firstRate = remaining_rates[0]
                                            if firstRate.Date == date:
                                                rate = firstRate
                                            # else: #encountered future rate. use previous rate.
                                        # else: #no remaining rates. use previous rate.

                                        value = '%.2f' % (rate.Close)
                                        out_file.write(value)

                                        if day < snapshot_days - 1:
                                            out_file.write(';')

                                        previous_rate = rate

                                else:
                                    print_flush(
                                        "%s has insufficient rates: %d < %d - %d"
                                        % (snapshot.instrument.InstrumentName,
                                           len(remaining_rates), weekdays,
                                           max_missing_days))

                        progress.add_item()
                        progress.maybe_print()

                    importer = Importer()
                    importer.import_stream(in_file, flatten_snapshot)

        if os.path.exists(output_path):
            os.remove(output_path)
        os.rename(output_path_temp, output_path)

    except KilledException:
        killfile_monitor.delete_killfile()
        if os.path.exists(output_path_temp):
            os.remove(output_path_temp)
        print_flush('Killed.')
Beispiel #27
0
def main():
    """Starts the tests."""

    logging.basicConfig(filename=ERROR_LOG,
                        filemode='w',
                        level=logging.DEBUG,
                        format='%(asctime)s %(message)s',
                        datefmt='%m/%d/%Y %I:%M:%S %p')

    successes = []
    failures = []

    # Parse the command line arguments.
    parser = argparse.ArgumentParser()
    parser.add_argument("--dir",
                        type=str,
                        action="store",
                        default=os.path.dirname(os.path.realpath(__file__)),
                        help="Directory of files to be processed",
                        required=True)
    args = parser.parse_args()

    store = TestActivityWriter()
    importer = Importer.Importer(store)
    test_dir = os.path.abspath(os.path.join('.', args.dir))

    # Process each file in the specified directory as well as its subdirectories.
    total_time = 0
    num_files_processed = 0
    for subdir, _, files in os.walk(test_dir):

        title_str = "Processing all files in " + test_dir + ":"
        print(title_str + "\n")
        for current_file in files:

            # My test file repo has a description file that we should skip.
            if current_file == "description.csv":
                continue

            full_path = os.path.join(subdir, current_file)
            _, temp_file_ext = os.path.splitext(full_path)
            if temp_file_ext in ['.gpx', '.tcx', '.csv']:
                title_str = "Processing: " + full_path
                print("=" * len(title_str))
                print(title_str)
                print("=" * len(title_str))
                start_time = time.time()
                success, device_id, activity_id = importer.import_file(
                    "test user", "", full_path, current_file, temp_file_ext)
                if success:
                    elapsed_time = time.time() - start_time
                    total_time = total_time + elapsed_time
                    num_files_processed = num_files_processed + 1
                    print("Elapsed Processing Time: " + str(elapsed_time) +
                          " seconds")
                    print("Success!\n")
                    successes.append(current_file)
                else:
                    print("Failure!\n")
                    failures.append(current_file)

    # Print the summary data.
    print_records(store, Keys.TYPE_RUNNING_KEY)
    print_records(store, Keys.TYPE_CYCLING_KEY)
    print_records(store, Keys.TYPE_OPEN_WATER_SWIMMING_KEY)
    print_records(store, Keys.TYPE_POOL_SWIMMING_KEY)

    # Print the maximum heart rate and heart rate zone calculators.
    max_hr = store.summarizer.hr_calc.estimate_max_hr(45)
    print("Estimated Maximum Heart Rate: {:.2f} bpm\n".format(max_hr))

    # Print the success and failure summary.
    title_str = "Summary:"
    print(title_str)
    print("=" * len(title_str))
    print("Num success: " + str(len(successes)))
    print("Num failures: " + str(len(failures)))
    for failure in failures:
        print("- " + failure)

    # Print the time summary.
    if num_files_processed > 0:
        print("Average time per sample: " +
              str(total_time / num_files_processed) + " seconds\n")
    else:
        print("No files processed.\n")
Beispiel #28
0
 def __init__(self):
     self.objects = {}
     self.importer = Importer()
Beispiel #29
0
total_iterations *= 1

print(f"Total iterations: {total_iterations}")

step = 0
material_number = 0
state = {"negative": False, "positive": False}
margin_list = []
mass_list = []
margin_details = []
input_details = []
first_iteration = True

print(f"-- Importing materials --")
material_list = Importer.import_all_materials("material_sheet",
                                              ("metal", "none"))
print(material_list[0].type, material_list[1].type, material_list[2].type,
      material_list[3].type, material_list[4].type, material_list[5].type,
      material_list[6].type)

material_properties_attachment = material_list[6]
material_properties_fastener = material_list[6]
material_properties_vehicle = material_list[6]

print(
    f"Material: {material_properties_attachment.name} {material_properties_attachment.type}"
)
material_details = [
    material_properties_attachment.name, material_properties_attachment.type
]
material_number += 1
Beispiel #30
0
#!/usr/bin/env python

import Splitter
import Tagger
import Scorer
import Importer

import yaml
from pprint import pprint

if __name__ == "__main__":
    rss = Importer.RSSImporter(
        'https://news.google.com/news/feeds?q=apple&output=rss')
    input_text = rss.parse()

    s = Splitter.Splitter()
    tagger = Tagger.DictionaryTagger([
        'dicts/positive.yml', 'dicts/negative.yml', 'dicts/inc.yml',
        'dicts/dec.yml', 'dicts/inv.yml'
    ])
    scorer = Scorer.Scorer()
    total = 0
    for summary in input_text:

        split = s.split(summary)

        tagged = tagger.tag(split)

        score = scorer.score(tagged)
        print "%s -> %d" % (summary, score)
        total += score
Beispiel #31
0
def import_activity(import_str, internal_task_id):
    local_file_name = ""

    try:
        import_obj = json.loads(import_str)
        username = import_obj['username']
        user_id = import_obj['user_id']
        uploaded_file_data = import_obj['uploaded_file_data']
        uploaded_file_name = import_obj['uploaded_file_name']
        desired_activity_id = import_obj['desired_activity_id']
        data_mgr = DataMgr.DataMgr(None, "", None, None, None)
        importer = Importer.Importer(data_mgr)

        # Generate a random name for the local file.
        print("Generating local file name...")
        root_dir = os.path.dirname(os.path.abspath(__file__))
        tempfile_dir = os.path.join(root_dir, 'tempfile')
        if not os.path.exists(tempfile_dir):
            os.makedirs(tempfile_dir)
        upload_path = os.path.normpath(tempfile_dir)
        uploaded_file_name, uploaded_file_ext = os.path.splitext(
            uploaded_file_name)
        local_file_name = os.path.join(upload_path, str(uuid.uuid4()))
        local_file_name = local_file_name + uploaded_file_ext

        # Decode and write the file.
        print("Writing the data to a local file...")
        with open(local_file_name, 'wb') as local_file:

            # Data to import is expected to be Base 64 encoded. This is because, at this point, we don't distinguish between
            # text and binary files.
            print("Base64 decoding...")
            uploaded_file_data = uploaded_file_data.replace(
                " ", "+"
            )  # Some JS base64 encoders replace plus with space, so we need to undo that.
            decoded_file_data = base64.b64decode(uploaded_file_data)
            #            print("zlib decompressing...")
            #            decoded_file_data = zlib.decompress(decoded_file_data)
            print("Writing...")
            local_file.write(decoded_file_data)

        # Update the status of the analysis in the database.
        print("Updating status...")
        data_mgr.update_deferred_task(user_id, internal_task_id, None,
                                      Keys.TASK_STATUS_STARTED)

        # Import the file into the database.
        print("Importing the data to the database...")
        success, _, activity_id = importer.import_activity_from_file(
            username, user_id, local_file_name, uploaded_file_name,
            uploaded_file_ext, desired_activity_id)

        # The import was successful, do more stuff.
        if success:

            # Save the file to the database.
            print("Saving the file to the database...")
            data_mgr.create_uploaded_file(activity_id, decoded_file_data)

            # Update the status of the analysis in the database.
            print("Updating status...")
            data_mgr.update_deferred_task(user_id, internal_task_id,
                                          activity_id,
                                          Keys.TASK_STATUS_FINISHED)

            # Schedule the activity for analysis.
            print("Importing was successful, performing analysis...")
            data_mgr.analyze_activity_by_id(activity_id, user_id)

        # The import failed.
        else:

            # Update the status of the analysis in the database.
            print("Import was not successful.")
            data_mgr.update_deferred_task(user_id, internal_task_id,
                                          activity_id, Keys.TASK_STATUS_ERROR)
    except:
        log_error("Exception when importing activity data: " + str(import_str))
        log_error(traceback.format_exc())
        log_error(sys.exc_info()[0])
    finally:
        # Remove the local file.
        if len(local_file_name) > 0:
            print("Removing local file...")
            os.remove(local_file_name)
Beispiel #32
0
def main(argv=None):
    if argv is None:
        argv = sys.argv
    run_importer = True
    run_formatter = True
    run_analysis = True
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "h", ["help", "skip-importer", "skip-formatter", "skip-analysis"])
            print "got args"
        except getopt.error, msg:
                raise Usage(msg)
        for option, data in opts:
            if('-h' == option or '--help' == option):
                print "LSCE test script. Usage: \"python testscript.py [--skip-importer] [--skip-formatter] [--skip-analysis]" +\
                    " mat_source hdf5_dest" +\
                    "\"\n\nSupply the following arguments to run pipeline:\n\n\tmat_source: " +\
                    "The path to the raw .mat files to be imported.\n\thdf5_dest: the name to save hdf5 output file under" +\
                    "\n\nAvailable modes:\n\t--skip-importer: skip importation step. Formatter wil still run using" +\
                    " mat_source as src directory." +\
                    "\n\t--skip-formatter: skip formatting step. Importer will use mat_source as usual. \n\t\t\t  Analysis will" +\
                    " use hdf5_dest if it exists." + \
                    "\n\t--skip-analysis: skip computation of analysis data. Formatter will still output to hdf5_dest. "
                return
            if('--skip-importer' == option):
                run_importer = False
            if('--skip-formatter' == option):
                run_formatter = False
            if('--skip-analysis' == option):
                run_analysis = False
        if(len(args) < 2):
            raise Usage("Insufficient arguments supplied.")
        else:
            print args.__repr__()
        print "Welcome to LSCE test script.\nThis script will perform a " + \
            "complete iteration of our pipeline, starting with the data importer."
        if(run_importer):
            print "Importing data from directory "+args[0]
            Importer.loadFromMat(args[0])
        else:
            print "Skipped importing data."
        if(run_formatter):
            print "Formatting data as hdf5 in "+args[1]+".hdf5"
            DataFormatter.formatData(args[0], args[1])
        else:
            print "Skipped formatting data."
        os.system("PAUSE")
        testing = None
        raw_data = None
        if(run_analysis):
            dtool = DataAnalysis.data_analysis()
            dtool.load_hdf5(args[1], dataset_name="Electrode_12_master", group_name="raw_data")
            dtool.sampling_rate = 1000
            testing = dtool.high_demo_filter(20)
            raw_data = dtool.f["raw_data"]["Electrode_12_master"]
        else:
            print "Skipped data analysis.\nPlaceholder groups " + \
                "\"/data_analysis/demo_filter_results\" and \"/raw_data/Electrode_12_master\" will be used."
            hdfile = h5py.File(args[1]+".hdf5", "r+")
            if("data_analysis" not in hdfile or "demo_filter_results" not in hdfile["data_analysis"]):
                print "Skipping graphs..."
                return
            testing = hdfile["data_analysis"]["demo_filter_results"]
            raw_data = hdfile["raw_data"]["Electrode_12_master"]
        os.system("PAUSE")
        plt.subplot(2, 1, 1)
        plt.plot(testing)
        plt.subplot(2, 1, 2)
        plt.plot(raw_data)
        plt.show()
        if(run_analysis):
            dtool.close()
Beispiel #33
0
import Importer
Importer.loadImports('data/blueprints/')
from data.blueprints import *

class blueprint:
    def __init__(self,ID,width,height,tiles):
        self.ID = ID
        self.tiles = tiles
        self.width = width
        self.height = height

    def getRel(self,colRow):
        toRet = []

        for t in range(len(self.tiles)):
            toRet.append((colRow[1]-self.tiles[t][0],self.tiles[t][1] + colRow[0],self.tiles[t][2],self.tiles[t][3]))

        return toRet

    def getRequiredResources(self):
        req = []

        for t in range(len(self.tiles)):
            found = False
            for i in range(len(req)):
                if req[i][0] == self.tiles[t][3]:
                    found = True
                    
            if found == True:
                for l in range(len(req)):
                    if req[l][0] == self.tiles[t][3]:
Beispiel #34
0
def run_unit_tests():
    """Entry point for the unit tests."""
    successes = []
    failures = []

    store = TestActivityWriter()
    importer = Importer.Importer(store)
    test_dir = os.path.abspath(os.path.join('.', args.dir))

    # Process each file in the specified directory as well as its subdirectories.
    total_time = 0
    num_files_processed = 0
    for subdir, _, files in os.walk(test_dir):

        title_str = "Processing all files in " + test_dir + ":"
        print(title_str + "\n")
        for current_file in files:

            # My test file repo has a description file that we should skip.
            if current_file == "description.csv":
                continue

            full_path = os.path.join(subdir, current_file)
            _, temp_file_ext = os.path.splitext(full_path)
            if temp_file_ext in ['.gpx', '.tcx', '.csv']:
                title_str = "Processing: " + full_path
                print("=" * len(title_str))
                print(title_str)
                print("=" * len(title_str))
                start_time = time.time()
                success, device_id, activity_id = importer.import_activity_from_file(
                    "test user", "", full_path, current_file, temp_file_ext)
                if success:
                    elapsed_time = time.time() - start_time
                    total_time = total_time + elapsed_time
                    num_files_processed = num_files_processed + 1
                    print(f"Elapsed Processing Time: {elapsed_time} seconds")
                    print("Success!\n")
                    successes.append(current_file)
                else:
                    print("Failure!\n")
                    failures.append(current_file)

    # Print the summary data.
    print_records(store, Keys.TYPE_RUNNING_KEY)
    print_records(store, Keys.TYPE_CYCLING_KEY)
    print_records(store, Keys.TYPE_OPEN_WATER_SWIMMING_KEY)
    print_records(store, Keys.TYPE_POOL_SWIMMING_KEY)

    # Print the maximum heart rate and heart rate zone calculators.
    max_hr = store.summarizer.hr_calc.estimate_max_hr(45)
    print("Estimated Maximum Heart Rate: {:.2f} bpm\n".format(max_hr))

    # Print the success and failure summary.
    title_str = "Summary:"
    print(title_str)
    print("=" * len(title_str))
    print("Num success: " + str(len(successes)))
    print("Num failures: " + str(len(failures)))
    for failure in failures:
        print("- " + failure)

    # Print the time summary.
    if num_files_processed > 0:
        print("Average time per sample: " +
              str(total_time / num_files_processed) + " seconds\n")
    else:
        print("No files processed.\n")

    return len(failures) == 0
Beispiel #35
0
def main(argv=None):
    if argv is None:
        argv = sys.argv
    run_importer = True
    run_formatter = True
    run_analysis = True
    try:
        try:
            opts, args = getopt.getopt(
                argv[1:], "h",
                ["help", "skip-importer", "skip-formatter", "skip-analysis"])
            print "got args"
        except getopt.error, msg:
            raise Usage(msg)
        for option, data in opts:
            if ('-h' == option or '--help' == option):
                print "LSCE test script. Usage: \"python testscript.py [--skip-importer] [--skip-formatter] [--skip-analysis]" +\
                    " mat_source hdf5_dest" +\
                    "\"\n\nSupply the following arguments to run pipeline:\n\n\tmat_source: " +\
                    "The path to the raw .mat files to be imported.\n\thdf5_dest: the name to save hdf5 output file under" +\
                    "\n\nAvailable modes:\n\t--skip-importer: skip importation step. Formatter wil still run using" +\
                    " mat_source as src directory." +\
                    "\n\t--skip-formatter: skip formatting step. Importer will use mat_source as usual. \n\t\t\t  Analysis will" +\
                    " use hdf5_dest if it exists." + \
                    "\n\t--skip-analysis: skip computation of analysis data. Formatter will still output to hdf5_dest. "
                return
            if ('--skip-importer' == option):
                run_importer = False
            if ('--skip-formatter' == option):
                run_formatter = False
            if ('--skip-analysis' == option):
                run_analysis = False
        if (len(args) < 2):
            raise Usage("Insufficient arguments supplied.")
        else:
            print args.__repr__()
        print "Welcome to LSCE test script.\nThis script will perform a " + \
            "complete iteration of our pipeline, starting with the data importer."
        if (run_importer):
            print "Importing data from directory " + args[0]
            Importer.loadFromMat(args[0])
        else:
            print "Skipped importing data."
        if (run_formatter):
            print "Formatting data as hdf5 in " + args[1] + ".hdf5"
            DataFormatter.formatData(args[0], args[1])
        else:
            print "Skipped formatting data."
        os.system("PAUSE")
        testing = None
        raw_data = None
        if (run_analysis):
            dtool = DataAnalysis.data_analysis()
            dtool.load_hdf5(args[1],
                            dataset_name="Electrode_12_master",
                            group_name="raw_data")
            dtool.sampling_rate = 1000
            testing = dtool.high_demo_filter(20)
            raw_data = dtool.f["raw_data"]["Electrode_12_master"]
        else:
            print "Skipped data analysis.\nPlaceholder groups " + \
                "\"/data_analysis/demo_filter_results\" and \"/raw_data/Electrode_12_master\" will be used."
            hdfile = h5py.File(args[1] + ".hdf5", "r+")
            if ("data_analysis" not in hdfile
                    or "demo_filter_results" not in hdfile["data_analysis"]):
                print "Skipping graphs..."
                return
            testing = hdfile["data_analysis"]["demo_filter_results"]
            raw_data = hdfile["raw_data"]["Electrode_12_master"]
        os.system("PAUSE")
        plt.subplot(2, 1, 1)
        plt.plot(testing)
        plt.subplot(2, 1, 2)
        plt.plot(raw_data)
        plt.show()
        if (run_analysis):
            dtool.close()