Ejemplo n.º 1
0
 def goArchive(self):
     text = str(self.folder)+'\n'
     newArchive = Archive(self.ims,self.archive.get(),self.folder)
     newArchive.autoArchive()
     for i in range(0,len(newArchive.dirs)):
         text+=' -'+str(newArchive.dirs[i])+'\n'
     self.output.set(text)
Ejemplo n.º 2
0
 def call_Menu(self,event):
     global ARCHIVE,PRODUIT,CLIENT
     if(ARCHIVE):
         Archive.destroy_Gerer_les_Archives()
         ARCHIVE=0
     if(PRODUIT):
         Produit.destroy_Gerer_les_Produits()
         PRODUIT=0 
     if(CLIENT):
         Client.destroy_Gerer_les_Clients()
         CLIENT=0                  
Ejemplo n.º 3
0
    def assemble_data_arrays(self):
        #create a vector of launch epochs - this is always the first column of the archive
        self.launch_epoch = self.archive.ArchiveItems[0]

        #create vectors of flight times
        self.flight_times = []
        for item in self.archive.ArchiveItems:
            if 'phase flight time' in item.name:
                self.flight_times.append(item)

        #create vectors of wait times
        self.wait_times = []
        for item in self.archive.ArchiveItems:
            if 'wait time' in item.name:
                self.wait_times.append(item)

        #create vector of mass at each encounter
        self.arrival_masses = []
        for item in self.archive.ArchiveItems:
            if 'arrival mass' in item.name:
                self.arrival_masses.append(item)

        #C3
        self.C3 = Archive.ArchiveItem('C3')
        for item in self.archive.ArchiveItems:
            if 'j0p0: magnitude of outgoing velocity asymptote' in item.name:
                for entry in item.values:
                    self.C3.values.append(entry**2)

        #DLA
        self.DLA = Archive.ArchiveItem('DLA')
        for item in self.archive.ArchiveItems:
            if 'j0p0: DEC of departure asymptote' in item.name:
                for entry in item.values:
                    self.DLA.values.append(entry**2)

        #solution time stamp
        self.solution_timestamps = self.archive.ArchiveItems[-2]

        #step count
        self.solution_step_count = self.archive.ArchiveItems[-3]

        #objective function - this is always the last entry
        self.objective_function = self.archive.ArchiveItems[-1]

        #compute total flight time
        self.total_flight_time = Archive.ArchiveItem('Total flight time')
        for i in range(0, len(self.archive.ArchiveItems[0].values) - 1):
            temp = 0.0
            for flight_time in self.flight_times:
                temp += flight_time.values[i]

            self.total_flight_time.values.append(temp)
Ejemplo n.º 4
0
def channels():
    conn = data_base.conn
    channels = Archive.Channels_()
    table_channels = channels.GetChannels(data_base.conn)
    d = defaultdict(list)
    for i in table_channels:
        d[i.Id].append({"name": str(i.name), "value": str(i.types)})
    js = json.dumps(d)
    return js
Ejemplo n.º 5
0
 def __init__(self, lexico, nomeArquivo):
     self.linha = 1
     self.tokenAtual = None
     self.analisadorLexico = lexico
     self.tokenList = TokensClass.TokensClass()
     self.arquivo = Archive.Archive(nomeArquivo, None)
     self.listIdent = []
     self.listTipo = []
     self.listLinha = []
     self.cont = 0
     self.tipoAnterior = None
Ejemplo n.º 6
0
def read_from_file(filename):
    file = open(filename, "r")
    count = -1
    ## Handle one input line at a time
    for line in file:
        info = line.strip().split("*")
        ## Outputs error if number of values read in is not 4
        if len(info) != 4:
            print(info)
            print("Error: Incorrect number of inputs")
            file.close()
            return None
        movie = Movie(info[0].strip(), info[3].strip(), info[1].strip(),
                      info[2].strip())
        count += 1
        ## Add movies to archive
        if count == 0:
            archive = Archive(movie)
        else:
            archive.addMovie(movie)
    file.close()
    return archive
Ejemplo n.º 7
0
 def _download(self, otr, remote_url=False):
     if not remote_url:
         remote_url = self._downloadqueue(otr, call.params['url'])
         xbmc.log('got remote download url <%s> %s' % (type(remote_url), remote_url))
     if isinstance(remote_url, str) or isinstance(remote_url, unicode):
         archive = Archive.Archive()
         local_path = archive.downloadEpgidItem(call.params['epgid'], call.params['name'], remote_url)
         if local_path and __addon__.getSetting('otrAskPlayAfterDownload') == 'true':
             xbmc.executebuiltin("Container.Refresh")
             if xbmcgui.Dialog().yesno(
                 __title__,
                 _('download completed, play file now?'),
                 str(remote_url.split('/').pop()) ):
                 self._play(otr, local_path)
     return True
Ejemplo n.º 8
0
    def _deleteLocalCopies(self, otr):

        if not __addon__.getSetting('otrAskDeleteLocal') == 'false':
            if not xbmcgui.Dialog().yesno(
                __title__,
                _('do you want do delete existing local copies?')):
                    return False

        archive = Archive.Archive()
        if 'epgid' in call.params and call.params['epgid']:
            archive.deleteLocalEpgidPath(epgid=call.params['epgid'])
        elif 'file' in call.params and call.params['file']:
            archive.deleteLocalEpgidPath(file=call.params['file'])
        xbmc.executebuiltin("Container.Refresh")
        return True
Ejemplo n.º 9
0
def main () : 

    population_size = 100

    N = 10
    k = 6
    p = 0.99

    lamda = [0,1,1]

    number_of_neighbors = 25

    number_of_generation = 100
    mut_prob = 1./N
 
    seed = 1000   
    

    landscape = Landscape.Landscape(N,k,seed,p) 
    evolution = Evolution.Evolution(landscape,population_size, 1, number_of_neighbors,Archive.Archive(10,""), N) 
    mth = ["None","N","R"]
    for i in range(3): 
        print " ====================================================================="
        archive =  Archive.Archive(10,mth[i]) 
        evolution.archiving = archive
        #evolution.lamda = lamda[i]
        
        history  = evolution.run(number_of_generation,mut_prob)
        maxs = [ ]
        for gen in history : 
            values = [genotype.fitness for genotype in gen]
            maxs.append(numpy.max(values))
        plt.plot(maxs, label=r"$method = $" +str(mth[i]))
        
    
    plt.xlabel("Generation")
    plt.ylabel("Max Fitness")

    plt.title("Novelty Vs Fitness")
    plt.legend(loc="lower right", shadow=True, fontsize='12')
    plt.show()
Ejemplo n.º 10
0
def blocoPrincipal(codeFile, fileTable):
    # Para nao printar em um arquivo a tabela de simbolos:
    if fileTable == None:
        lexico = Lexico.Lexico()
        sintatico = Sintatico.Sintatico(lexico, codeFile)
        parser = sintatico.parser()
    # Para printar em um arquivo a tabela de simbolos:
    else:
        lexico = Lexico.Lexico()
        sintatico = Sintatico.Sintatico(lexico, codeFile)
        parser = sintatico.parser()
        tabela = Archive.Archive(None, fileTable)
        listReserv = []
        aux = []
        # Pega as palavras reservadas
        for r in lexico.reservadas:
            aux.append(r.split())
        # Retira da string completa, so o que precisa:
        for i in aux:
            a = len(str(i)) -2
            listReserv.append(str(i)[2:a])
        # Manda criar a tabela:
        tabela.criaTabela(sintatico.listIdent,sintatico.listTipo,sintatico.listLinha, listReserv)
Ejemplo n.º 11
0
def adv_evolution_with_archiving(ref_ind, init_pop, number_of_generation,
                                 mut_probs, lamda, k, log_folder, mut_bp,
                                 archive_size):

    print(" Starting of evolution ")
    prev_population = numpy.copy(init_pop)  #Initialize the population of RNA
    population_size = len(init_pop)
    n = number_of_generation

    logger = Logger.Logger(str(log_folder), str(lamda))
    logger.save_population(init_pop, 0)
    maxfitness = max([ind.fitness for ind in prev_population])

    archive = Archive.Archive(archive_size, "N")

    while (n > 0) and (maxfitness < 1):

        print('Generation ' + str(number_of_generation - n))
        newgeneration = []
        newgeneration = reproduce(prev_population, int(0.1 * population_size))
        selected_ind = optimal_select_with_archving(
            numpy.insert(prev_population, len(prev_population),
                         archive.archiving), population_size, lamda, k,
            log_folder, archive)
        newgeneration = numpy.insert(
            newgeneration, len(newgeneration),
            RNAEvolution.adv_mutateAll(ref_ind.RNA_structure, selected_ind,
                                       mut_probs, mut_bp))

        prev_population = numpy.copy(newgeneration)
        maxfitness = max([ind.fitness for ind in prev_population])
        n -= 1
        print "Size of the archive =============== ", len(archive.archiving)
        logger.save_population(newgeneration, number_of_generation - n)

    return newgeneration
Ejemplo n.º 12
0
def simpleArchiveTest(ims):
    printAll(ims)
    print "1.Archive by FocalLength"
    print "2.Archive by DateTime"
    print "3.Release"
    print "4.Scene Archive"
    
    f = input("Enter #: ")
    if(f == 1):
        newArchive = Archive(ims,'FocalLength')
        newArchive.autoArchive()
    elif(f == 2):
        newArchive = Archive(ims,'DateTime')
        newArchive.autoArchive()
    elif(f == 3):
	newArchive = Archive(ims,'')
        newArchive.deArchive()
    elif(f == 4):
        newArchive = Archive(ims,'')
        newArchive.sceneArchive()
Ejemplo n.º 13
0
try:
    from Tkinter import *
except ImportError:
    from tkinter import *

try:
    import ttk
    py3 = False
except ImportError:
    import tkinter.ttk as ttk
    py3 = True

def init(top, gui, *args, **kwargs):
    global w, top_level, root
    w = gui
    top_level = top
    root = top

def destroy_window():
    # Function which closes the window.
    global top_level
    top_level.destroy()
    top_level = None

if __name__ == '__main__':
    import Archive
    Archive.vp_start_gui()


Ejemplo n.º 14
0
if __name__ in '__main__':
    # xml用ルートディレクトリ
    xml_dirs = '/mnt/Drobo/XML_files/'
    # pdf用ルートディレクトリ
    pdf_dirs = '/mnt/Drobo/PDF_files/'
    # tiff用ルートディレクトリ
    tif_dirs = '/mnt/Drobo/TIF_files/'
    # pos用ルートディレクトリ
    pos_dirs = '/mnt/Drobo/POS_files/'

    fr = open('not_registered.txt', 'r')
    for l in fr:
        try:
            xml_path = l.rstrip()
            ret = archive.xml_elements(xml_path)
            pub_nr = ret['publn_nr']
            print(ret['publn_nr'])
            print(xml_path)
            dirs = ret['kind-of-jp'] + '/' + pub_nr[0:4] + '/' + pub_nr[
                4:7] + '000'

            #DB登録
            sql_element = archive.generate_insert_sql(ret)
            archive.register_xml_elements(sql_element)

            # copy xml file
            os.makedirs(xml_dirs + dirs, exist_ok=True)
            target_xml = xml_dirs + dirs + '/' + pub_nr + '.xml'
            if not os.path.isfile(target_xml):
                shutil.copy(xml_path, target_xml)
Ejemplo n.º 15
0
    if not os.path.isdir(d_path):
        print("ディレクトリが存在しません")
        sys.exit()

    d_path += '/DOCUMENT/' + in_filetype
    d_path = Path(d_path)
    path_list = list(d_path.glob("**/*.xml"))

    for pl in path_list:

        dirs = ""
        pub_nr = ""

        try:
            xml_path = str(pl)
            ret = archive.xml_elements(xml_path)
            pub_nr = ret['publn_nr']
            print(ret['publn_nr'])
            print(xml_path)
            dirs = ret['kind-of-jp'] + '/' + pub_nr[0:4] + '/' + pub_nr[
                4:7] + '000'

            #DB登録
            sql_element = archive.generate_insert_sql(ret)
            archive.register_xml_elements(sql_element)

            # copy xml file
            os.makedirs(xml_dirs + dirs, exist_ok=True)
            target_xml = xml_dirs + dirs + '/' + pub_nr + '.xml'
            if not os.path.isfile(target_xml):
                shutil.copy(xml_path, target_xml)
Ejemplo n.º 16
0
    def _createRecordingList(self, otr): 
        """
        wrapper um createList fuer recordings aufzurufen

        @param otr: OtrHandler
        @type  otr: OtrHandler Instanz
        """

        def get_recording_list_item(archive, recording):

            li = xbmcgui.ListItem(
                recording['label'],
                recording['filename'],
                archive.getImageUrl(recording['epgid'], recording['icon_image']),
                archive.getImageUrl(recording['epgid'], recording['thumbnail_image'])
                )

            contextmenueitems = [tuple((
                _('delete local copies'),
                "XBMC.RunPlugin(\"%s\")" % call.format('/deletelocalcopies', params={'epgid': recording['epgid']})
                )), tuple((
                _('delete'),
                "XBMC.RunPlugin(\"%s\")" % call.format('/deletejob', params={'epgid': recording['epgid']})
                )), tuple((
                _('refresh listing'),
                "XBMC.RunPlugin(\"%s\")" % call.format('/refreshlisting', params={'epgid': recording['epgid']})
                )), tuple((
                _('userinfo'),
                "XBMC.RunPlugin(\"%s\")" % call.format('/userinfo')
                ))]
            li.addContextMenuItems(contextmenueitems, replaceItems=True )

            infos = dict(
                filter(
                    lambda r: r[0] in ['duration', 'title', 'studio', 'date', 'plot'],
                    recording.items()
                    ) )
            li.setInfo('video', infos)
            return [
                call.format(params={ 'epgid': recording['epgid'] }),
                li,
                True
                ]

        def get_recordingstreams_list_item(archive, recording):
            if not 'streams' in recording: return
            list = recording['streams'].keys()
            list.sort()
            for stream in list:

                li = xbmcgui.ListItem(
                    "%s %s" % (_('stream:'), recording['streams'][stream]['name']),
                    recording['streams'][stream]['type'],
                    archive.getImageUrl(recording['epgid'], recording['icon_image']),
                    archive.getImageUrl(recording['epgid'], recording['thumbnail_image'])
                    )

                contextmenueitems = []
                contextmenueitems.append( tuple((
                    _('play'),
                    "PlayWith()" )))

                if 'copies' in recording:
                    if not str(recording['streams'][stream]['file'].split('/').pop()) in recording['copies']:
                        contextmenueitems.append( tuple((
                            _('download'),
                            "XBMC.RunPlugin(\"%s\")" % call.format('/download', params={
                                'url': recording['streams'][stream]['file'],
                                'epgid': recording['epgid'],
                                'name': recording['streams'][stream]['name']
                            }) )) )

                contextmenueitems.append( tuple((
                    _('userinfo'),
                    "XBMC.RunPlugin(\"%s\")" % call.format('/userinfo')
                    )) )
                li.addContextMenuItems(contextmenueitems, replaceItems=True )

                yield [
                    call.format('/play', params={
                        'url': recording['streams'][stream]['file'],
                        'epgid': recording['epgid']}),
                    li,
                    False,
                    ]

        def get_recordingcopies_list_item(archive, recording):
            if not 'copies' in recording: return
            for copy in recording['copies'].keys():

                li = xbmcgui.ListItem(
                    "%s %s" % (_('local copy:'), recording['copies'][copy]['name']),
                    '',
                    archive.getImageUrl(recording['epgid'], recording['icon_image']),
                    archive.getImageUrl(recording['epgid'], recording['thumbnail_image'])
                )

                contextmenueitems = [tuple((
                    _('play'),
                    "PlayWith()" )), tuple((
                    _('delete'),
                    "XBMC.RunPlugin(\"%s\")" % call.format('/deletelocalcopies',
                        params={'file': recording['copies'][copy]['file']})
                    )), tuple((
                    _('userinfo'),
                    "XBMC.RunPlugin(\"%s\")" % call.format('/userinfo')
                    ))]
                li.addContextMenuItems(contextmenueitems, replaceItems=True )

                yield [
                    recording['copies'][copy]['file'],
                    li,
                    False,
                    ]


        listing = list()
        archive = Archive.Archive()
        archive.load()
        print "last: %s" % archive.LastFile(archive).last()

        if archive.LastFile(archive).last() < 0 or archive.LastFile(archive).last() > 900:
            self.__login()
            archive.refresh(otr)
            archive.load()

        if not 'epgid' in call.params:
            for epgid in archive.recordings:
                listing.append(get_recording_list_item(archive, archive.recordings[epgid]))
        else:
            epgid = call.params['epgid']
            for stream in get_recordingstreams_list_item(archive, archive.recordings[epgid]):
                listing.append(stream)
            for stream in get_recordingcopies_list_item(archive, archive.recordings[epgid]):
                listing.append(stream)

        return listing
Ejemplo n.º 17
0
    def __init__(self, max_iterations, evaluations):
        """
        objective_functions: objective function array
        bounds: Bounds array (bounds for each objective value)
        objective_types: array of objective type (min or max)
        num_particles: Array of the number of particles for each swarm
        max_iterations: Number of iterations
        dimensions: Number of dimensions (length of particle position vector)
        """
        best_swarm_global_fitness_values = []  # best error for group
        swarm_gbest_positions = []  # best position for group
        evaluations = evaluations
        objective_functions = evaluations.get_objective_functions()
        num_particles = evaluations.get_num_particles()
        archive = Archive.Archive(sum(num_particles), evaluations)
        constants = evaluations.get_constants()
        objective_types = evaluations.get_objective_types()
        dimensions = evaluations.get_num_dimensions()
        bounds = evaluations.get_bounds()

        for objective_index in range(len(objective_functions)):
            if objective_types[objective_index] == "min":
                best_swarm_global_fitness_values.append(float('inf'))
            else:
                best_swarm_global_fitness_values.append(float('-inf'))
            swarm_gbest_positions.append([])

        # for each objective make a swarm
        swarms = []
        for objective_index in range(len(objective_functions)):
            # establish the swarm
            swarm = []
            for particle in range(0, num_particles[objective_index]):
                swarm.append(
                    Particle.Particle(dimensions,
                                      objective_types[objective_index], bounds,
                                      constants[0], constants[1], constants[2],
                                      constants[3]))
            swarms.append(copy.deepcopy(swarm))

        # begin optimization loop
        iteration = 0
        while iteration < max_iterations:
            print("iteration: " + str(iteration))
            for objective_index in range(len(objective_functions)):
                # cycle through particles in objective swarm and evaluate fitness
                for particle_index in range(0, num_particles[objective_index]):
                    swarms[objective_index][particle_index].evaluate(
                        objective_functions[objective_index])
                    # check to see if the current position is an individual best
                    if (objective_types[objective_index] == "min" and swarms[objective_index][particle_index].fitness_function_value < swarms[objective_index][particle_index].best_fitness_value) \
                            or (objective_types[objective_index] == "max" and swarms[objective_index][particle_index].fitness_function_value > swarms[objective_index][particle_index].best_fitness_value) \
                            or swarms[objective_index][particle_index].best_fitness_value == -1:
                        swarms[objective_index][
                            particle_index].pbest_position_indexes = copy.deepcopy(
                                swarms[objective_index]
                                [particle_index].position_indexes)
                        swarms[objective_index][
                            particle_index].best_fitness_value = float(
                                swarms[objective_index]
                                [particle_index].fitness_function_value)

                    # determine if current particle is the best (globally) in its swarm
                    if (objective_types[objective_index] == "min" and swarms[objective_index][particle_index].best_fitness_value < best_swarm_global_fitness_values[objective_index])\
                            or (objective_types[objective_index] == "max" and swarms[objective_index][particle_index].best_fitness_value > best_swarm_global_fitness_values[objective_index])\
                            or best_swarm_global_fitness_values[objective_index] == -1:
                        swarm_gbest_positions[objective_index] = copy.deepcopy(
                            swarms[objective_index]
                            [particle_index].pbest_position_indexes)
                        best_swarm_global_fitness_values[
                            objective_index] = float(
                                swarms[objective_index]
                                [particle_index].best_fitness_value)

                    # update the archive with the solution
                    archive.add_to_archive(
                        swarms[objective_index][particle_index])

            # for each objective
            for objective_index in range(len(objective_functions)):
                # cycle through swarm and update velocities and position
                for particle_index in range(0, num_particles[objective_index]):
                    # get the guide particle
                    guide_particle = archive.get_guide()
                    swarms[objective_index][particle_index].update_velocity(
                        swarm_gbest_positions[objective_index], guide_particle)
                    swarms[objective_index][particle_index].update_position()
            iteration += 1

        # print final results
        print('FINAL:')
Ejemplo n.º 18
0
            p.setoffset(o)
            if p.load().serialize() == '\x00\x00\xff\xff':
                continue

#            yield self.new(Object.File, __name__='Member[%d]'% index, offset=o)
            yield self.new(Object.File, offset=o)
        return

if __name__ == '__main__':
    import Archive
    from ptypes import *
    source = ptypes.file('~/python26/libs/python26.lib')

    print 'Reading .lib header'
#    Archive.File = ptypes.debugrecurse(Archive.File)
    self = Archive.File()
#    self.source = provider.file('../../obj/test.lib')
    self.source = ptypes.file('~/python26/libs/python26.lib')
    self.load()

#    print self['SymbolNames']['Header']
#    print self['SymbolNames']['Member']
#    print self['MemberNames']['Header']
#    print self['MemberNames']['Member']
#    print self['LongNames']['Header']
#    print self['LongNames']['Member']
#    print '-'*79

    ## enumerate all objects that are dll imports
    ## enumerate all objects that are actual object files
Ejemplo n.º 19
0
 def _refreshListing(self, otr):
     self.__login()
     archive = Archive.Archive()
     archive.refresh(otr)
     xbmc.executebuiltin("Container.Refresh")
Ejemplo n.º 20
0
from flask import Flask, request, abort
import simplejson as json
import flask
from collections import defaultdict
import datetime
import Archive
import Query
import urllib
import req
import redis_storage
import parsconfig

#config = {"size_stream" : 8000, "density" : 100}
config = parsconfig.Config('/home/olga/projects/config.yaml')
data_base = Archive.Archive(config.config)

app = Flask(__name__)
app.debug = True
app.logger.debug('Значение для отладки')


@app.route("/")
def main():
    return flask.redirect("/chunk")


@app.route("/channels")
def channels():
    conn = data_base.conn
    channels = Archive.Channels_()
    table_channels = channels.GetChannels(data_base.conn)
Ejemplo n.º 21
0
 def goRelease(self):
     newArchive = Archive(self.ims,'',self.folder)
     newArchive.deArchive()
     self.output.set(str(self.folder)+'\n'+'released')
Ejemplo n.º 22
0
 def goSceneArchive(self):
     self.output.set('Scene Archiving...')
     newArchive = Archive(self.ims,'',self.folder)
     newArchive.sceneArchive()
     self.output.set(str(self.folder)+'\n'+'Scene Archiving Result:\n'+str(len(newArchive.dirs))+' scenes')
Ejemplo n.º 23
0
            os.makedirs(d_path, exist_ok=True)
            subprocess.call(('sudo mount ' + f_name + ' ' + d_path),
                            shell=True)

        print(d_path)

        path_list = []
        for ft in in_filetype.split(','):
            tar_path = Path(d_path + 'DOCUMENT/' + ft)
            path_list += list(tar_path.glob("**/*.xml"))

        for pl in path_list:

            try:
                xml_path = str(pl)
                ret = archive.xml_elements(xml_path)
                # 再公表は発行日を再公表日に書き換え
                if 'corrected-publication-dat' in ret:
                    ret['pub_date'] = ret['corrected-publication-date']
                pub_nr = ret['publn_nr']
                print(pub_nr)
                print(xml_path)
                dirs = ret['type'] + '/' + pub_nr[0:4] + '/' + pub_nr[
                    4:7] + '000'

            except:
                except_str = traceback.format_exc()
                print(except_str)
                message = "--------------------------------------------------\n"
                message += "XML_PATH:" + xml_path + "\n"
                message += "ERROR_MSG:" + except_str + "\n"
Ejemplo n.º 24
0
    def OpenFile(self, e):
        dlg = wx.FileDialog(self,
                            message="Open an EMTG file",
                            defaultDir=self.dirname,
                            defaultFile="",
                            wildcard="*.emtgopt;*.emtg_universe;*.emtg;",
                            style=wx.FD_OPEN)
        if dlg.ShowModal() == wx.ID_OK:
            self.filename = dlg.GetFilename()
            self.dirname = dlg.GetDirectory()

            fileparts = self.filename.split(".")

            #before we actually open the new file, we need to clear memory associated with whatever file we currently have open
            if self.mode == "options":
                self.missionoptions = []
                self.optionsnotebook.Destroy()

            elif self.mode == "mission":
                self.mission = []
                self.missionpanel.Destroy()

            elif self.mode == "universe":
                self.universe = []
                self.universenotebook.Destroy()

            elif self.mode == "archive":
                self.archive = []
                self.archivepanel.Destroy()

            self.mode = ""

            #next open the new file
            if fileparts[1] == "emtgopt":

                import sys
                import inspect
                currentdir = os.path.dirname(
                    os.path.abspath(inspect.getfile(inspect.currentframe())))
                sys.path.append(currentdir + "/" + 'Converters')
                from Convert_emtgopt_v1_to_v2 import Convert_emtgopt_v1_to_v2

                self.missionoptions = Convert_emtgopt_v1_to_v2(
                    os.path.join(self.dirname, self.filename))

                if self.missionoptions.success == 1:
                    self.mode = "options"
                    self.lblWelcome.Show(False)
                    self.InitializeMissionOptionsEditor()
                    self.fileMenu.Enable(wx.ID_SAVE, True)
                    self.fileMenu.Enable(wx.ID_EDIT, True)

            elif fileparts[1] == "emtg":
                self.mission = Mission.Mission(
                    os.path.join(self.dirname, self.filename))
                if self.mission.success == 1:
                    self.mode = "mission"
                    self.lblWelcome.Show(False)
                    self.missionpanel = MissionPanel.MissionPanel(
                        self, self.mission)
                    self.missionpanel.SetSize(self.GetSize())
                    self.fileMenu.Enable(wx.ID_EDIT, True)

            elif fileparts[1] == "emtg_universe":
                self.universe = Universe.Universe(
                    os.path.join(self.dirname, self.filename))
                if self.universe.success == 1:
                    self.mode = "universe"
                    self.lblWelcome.Show(False)
                    self.InitializeUniverseOptionsEditor()
                    self.fileMenu.Enable(wx.ID_SAVE, True)
                    self.fileMenu.Enable(wx.ID_EDIT, True)

            elif fileparts[1] == "emtg_archive":
                self.archive = Archive.Archive(
                    os.path.join(self.dirname, self.filename))
                if self.archive.success == 1:
                    self.mode = "archive"
                    self.lblWelcome.Show(False)
                    self.InitializeArchiveProcessor()
                    self.fileMenu.Enable(wx.ID_EDIT, True)
                    self.fileMenu.Enable(wx.ID_SAVE, False)

            else:
                errordlg = wx.MessageDialog(self, "Unrecognized file type.",
                                            "EMTG Error", wx.OK)
                errordlg.ShowModal()
                errordlg.Destroy()

        dlg.Destroy()