Пример #1
0
    def __init__(self,root,canvas,scrollbar):
        #Se inicia el conbtrolador de los archivos
        self.FileManager=FileManager()
        #Se asignan las varibles externas como varibles internas de la clase
        self.root = root
        self.canvas = canvas
        self.scrollbar = scrollbar

        # se inicia los iconos
        self.init_icons(root)

        #se crean un diccionario con los accesos directos a las carpetas principales dependidno del sistema operativo
        home = expanduser("~")
        print("Carpetas del usuario"+home)

        #windows
        if (name == "nt"):
            self.listadir={"Documentos":"\Documents","Escritorio":"\Desktop","Descargas":"\Downloads","Imagenes":"\Pictures"}
            for key,value in self.listadir.items():
                self.listadir[key]= home+value
            #print(self.listadir)
        else:
        #linux
            self.dirHome = home + "/Files"
            self.dirImagenes = home + "/Files/Imagenes"
            self.dirTexto = home + "/Files/Documentos"

        # se obtienen el directorio actual que sera de donde se inicie la aplicacion
        self.directorio_actual =getcwd()

        # se inicia el canvas con los iconos actuales de "directorio_actual"
        self.actualizar_canvas()
Пример #2
0
def datatoexcel(tipo="padrao"):
    fm = FileManager("arquivos/filtrados/")

    # use creds to create a client to interact with the google drive api
    scope = [
        'https://spreadsheets.google.com/feeds',
        'https://www.googleapis.com/auth/drive'
    ]
    creds = ServiceAccountCredentials.from_json_keyfile_name(
        "arquivos/config/credentials.json", scope)
    client = gspread.authorize(creds)

    # enviar dados consolidado
    if tipo == 1:
        sheet_dados_consolidado = client.open(
            "Teste Data Studio Black").worksheet("Prog Display - Consolidado")
        arquivo_consolidado = fm.verificar_arquivo("dados_consolidado.json")
        with open(arquivo_consolidado, 'r') as arq:
            dados_consolidado = dict(json.load(arq))
            for e, value in enumerate(dados_consolidado.values()):
                sheet_dados_consolidado.update_cell(2, e + 1, value)

    # -------------------------------------------------------

    # enviar dados dia
    if tipo == 2:
        sheet_dados_dia = client.open("Teste Data Studio Black").worksheet(
            "Prog Display - Dia")
        arquivo_dia = fm.verificar_arquivo("dados_dia.json")
        with open(arquivo_dia, 'r') as arq:
            dados_dia = dict(json.load(arq))
            days = dados_dia["dia"]
            for d, day in enumerate(days, 2):
                for v, value in enumerate(day.values(), 1):
                    sheet_dados_dia.update_cell(d, v, value)
Пример #3
0
 def __init__(self, url, depth=-1, basic_auth="", allowed_urls=[]):
     self.spider = Spider(url,
                          depth=depth,
                          basic_auth=basic_auth,
                          allowed_urls=allowed_urls)
     self.filemanager = FileManager()
     self.shown_urls = []
Пример #4
0
    def setUp(self):

        manager = FileManager("products.csv", "urls.db")
        manager.cleanup_database()
        log = Log(verbose=False)
        self.crawler = Crawler("https://www.epocacosmeticos.com.br", manager,
                               log)
Пример #5
0
def save_results_for_KM(root_dir, res_dict, method_name, dat_name):
    """
       The function is used to save generated results for K-means and its variants
    """
    res_dir = os.path.join(
        root_dir, 'results', method_name,
        dat_name)  # get the result directory where the result is stored
    f_manager = FileManager(res_dir)
    f_path = os.path.join(res_dir, 'cls_quality.csv')
    f_manager.add_file(f_path)

    print f_path

    # Then, we save the results to one csv file like
    #        "seed_num"  "time"   "Purity"     "ARI"      "ACC"     "NMI" ...
    #     1      1         000      000         000        000       000  ...
    #     2      2         000      000         000        000       000  ...
    field_names = ['seed', 'time', 'Purity', 'ARI', 'ACC', 'NMI',
                   'd_W']  # fill out the field names for CSV

    with open(f_path,
              mode='wb') as csv_file:  # open the file, if not exist, create it
        writer = csv.DictWriter(
            csv_file, fieldnames=field_names
        )  # create a writer which maps the dictionaries onto output rows in CSV
        writer.writeheader()  # write the field names to the header
        for key in res_dict.keys():
            writer.writerow(res_dict[key])
Пример #6
0
class CoordinateManager:
    def __init__(self, parent, atom_coordinates, interaction_manager):
        self.parent = parent
        self.atom_coordinates = atom_coordinates
        self.old_atom_coordinates = np.copy(atom_coordinates)

        self.interaction_manager = interaction_manager
        self.best_response_value = self.calculate_response_value()
        self.file_manager = FileManager()
        self.ival_matrix = np.zeros_like(
            self.interaction_manager.interaction_matrix, dtype=float)
        self.temperature = 10
        self.number_atoms = self.interaction_manager.number_atoms

    def start_iter(self):
        self.old_atom_coordinates = np.copy(self.atom_coordinates)

    def reset(self):
        self.best_response_value = 1000000

    def update(self, atom_coordinates):
        for x in [0, 1, 2]:
            atom_coordinates[:, x] -= min(atom_coordinates[:, x])

        self.atom_coordinates = atom_coordinates

    def verify_global(self):
        self.temperature /= 1.001

        response = self.calculate_response_value()
        deltaE = response - self.best_response_value
        if deltaE <= 0:
            self.best_response_value = response
            print("New Best Response: %s at iteration %s" %
                  (response, self.parent.iteration_number))
            self.calculate_response_value(True)
            self.file_manager.write_numpy_to_mol("tempfile.mol",
                                                 self.interaction_manager,
                                                 self.atom_coordinates)
            return True
        else:
            self.revert()
            return False

    def revert(self):
        self.atom_coordinates = self.old_atom_coordinates

    def calculate_response_value(self, debug=False):
        response = 0
        if debug:
            print("_*_" * 3)
        for i, v1 in enumerate(self.atom_coordinates):
            for j, v2 in enumerate(self.atom_coordinates):
                if j < i:
                    response += self.interaction_manager.interaction_response(
                        i, j, v1, v2, debug)
        return response

    def get_coordinates(self):
        return np.copy(self.atom_coordinates)
Пример #7
0
    def __init__(self, axis_width, interaction_matrix, type_array, shift_data,
                 distance_matrix):
        """
        Args:
            axis_width: Number of points to generate function mapping for

        Returns:
            None
        """
        self.x_axis = range(10 * axis_width)
        self.interaction_map = {}
        self.interaction_matrix = interaction_matrix

        initial_coordinates = np.random.rand(len(interaction_matrix), 3)
        self.coordinate_manager = CoordinateManager(initial_coordinates)

        self.interaction_matrix[self.interaction_matrix ==
                                1] = InteractionValues.DEFAULT
        self.interaction_matrix_original = np.copy(interaction_matrix)
        self.file_manager = FileManager()
        self.bonds = []
        self.atoms = []
        self.initialise_atoms(type_array, shift_data)
        self.system_states = []
        self.distance_matrix = distance_matrix
Пример #8
0
  def start(self):

    # create base data/conf directory
    FileManager.createDirectory("%s/data/solr" % (self.configuration["rootDirectory"]))
    FileManager.createDirectory("%s/log/solr" % (self.configuration["rootDirectory"]))

    for port in self.configuration["solr"]["ports"]:
      print "Starting solr on port %d" % port

      # create required items
      dataDirectory = "%s/data/solr/instance-%d" % (self.configuration["rootDirectory"], port)
      logDirectory = "%s/log/solr/instance-%d" % (self.configuration["rootDirectory"], port)
      FileManager.createDirectory(logDirectory)
      FileManager.createDirectory(dataDirectory)
      FileManager.createDirectory("%s/home" % dataDirectory)

      solrXmlFile = "%s/home/solr.xml" % (dataDirectory)
      configLines = self.createConfigLines(port)
      with open(solrXmlFile, 'w') as f:
        f.writelines(configLines)

      zookeeperConnectionString = ",".join(["localhost:%d" % zkPort for zkPort in self.configuration["zk"]["ports"]])

      # actually start solr
      arguments = [
        "%s/bin/solr/bin/solr" % (self.configuration["rootDirectory"]),
        "-cloud",
        "-m", self.configuration["solr"]["memory"],
        "-p", "%s" % port,
        "-s", "%s/home" % (dataDirectory),
        "-z", zookeeperConnectionString
      ]
      os.system(" ".join(arguments))
    def test_filemanager_populate(self):
        sta = False
        types = ['atm', 'ice', 'ocn', 'rest', 'streams.cice', 'streams.ocean']
        database = 'test.db'
        simstart = 51
        simend = 60
        experiment = '20171011.beta2_FCT2-icedeep_branch.A_WCYCL1850S.ne30_oECv3_ICG.edison'
        filemanager = FileManager(mutex=self.mutex,
                                  sta=sta,
                                  types=types,
                                  database=database,
                                  remote_endpoint=self.remote_endpoint,
                                  remote_path=self.remote_path,
                                  local_endpoint=self.local_endpoint,
                                  local_path=self.local_path)
        filemanager.populate_file_list(simstart=simstart,
                                       simend=simend,
                                       experiment=experiment)

        simlength = simend - simstart + 1
        atm_file_names = [
            x.name for x in DataFile.select().where(DataFile.datatype == 'atm')
        ]
        self.assertTrue(len(atm_file_names) == (simlength * 12))

        for year in range(simstart, simend + 1):
            for month in range(1, 13):
                name = '{exp}.cam.h0.{year:04d}-{month:02d}.nc'.format(
                    exp=experiment, year=year, month=month)
                self.assertTrue(name in atm_file_names)
Пример #10
0
 def __init__(self,
              url,
              number_of_threads=20,
              allowed_urls=[],
              blocked_urls=[],
              basic_auth=(),
              depth=-1):
     self.url = url
     self.number_of_threads = number_of_threads
     self.allowed_urls = allowed_urls
     # self.blocked_urls = blocked_urls
     self.lost_url = set()
     self.basic_auth = basic_auth
     self.depth = depth
     self.crawl = True
     self.visited = {}
     self.general_visited = set()
     self.unvisited = set()
     self.general_unvisited = {self.url}
     self.fetched_url_record = dict()
     self.csv_table = CsvFormat([
         "url", "status code", "title", "keyword", "description", "h1",
         "h2", "h3", "h4", "h5", "h6", "index", "open tags",
         "external links", "h_tag_format"
     ])
     self.downloaded_pages = {}
     self.record = []
     self.url_parser = UrlParser(url)
     self.parser = HtmlParser()
     self.filemanager = FileManager()
Пример #11
0
def infer(model, directory):
    """  infer a set of images for classification"""
    click.clear()
    click.echo()
    click.echo(lr.APP_TITLE)

    fm = FileManager()
    directory = fm.adjustDirectoryPath(directory)
    logTrainer = LogisticImageTrainer()
    if os.path.exists(lr.LOCAL_SAVED_MODEL_LR):
        logTrainer.loadModel(lr.LOCAL_SAVED_MODEL_LR)
        print("== model", lr.LOCAL_SAVED_MODEL_LR, "loaded")
        it = ImageTools()
        imageList = fm.folderFileListOfImages(directory)
        if len(imageList) > 0:
            for filename in imageList:
                code,descr = logTrainer.infer(directory,filename)
                out = "== FILE {0:s} Class {1:s}->{2:s}".format(filename, code ,descr)
                click.echo(out)
            for filename in imageList:
                code,descr = logTrainer.infer(directory,filename)
                
                out = "== {0:s} -> {1:s}".format( code ,descr)
                it.showImage(directory+filename,out)
        else:
            click.echo(':: No files in \'%s\' directory' % directory)
    else:
        print("== model", lr.LOCAL_SAVED_MODEL_LR, "required for predictions do not exist!")
Пример #12
0
def test_all():
	input_filenames = listdir(path='./testing/inputs')
	scraper = Scraper()
	filemanager = FileManager()
	for input_filename in input_filenames:
		input_data = filemanager.read_input_file( \
					'testing/inputs/' + input_filename)
		scraper_data = []
		for word in input_data[0]:
			scraper_data.append(scraper.scrape_word(word))
		for sentence_tuple in input_data[1]:
			scraper_data.append(scraper.scrape_sentence( \
						sentence_tuple[0]))
		output_data = []
		output_filename = 'output' + input_filename[5:]
		output_data = filemanager.read_output_file( \
					'testing/outputs/' + output_filename)
		for data in output_data:
			if '#####' in  data[1]:
				audio_bytes = b''
			else:
				audio_bytes = filemanager.read_audio_file('testing/audio/' + data[2] + '.mp3')
			data[1] = audio_bytes
		for i in range(len(scraper_data)):
			for j in range(4):
				if scraper_data[i][j] != output_data[i][j]:
					print()
					print(scraper_data[i][2])
					print()
				assert scraper_data[i][j] == output_data[i][j]
Пример #13
0
def get_user_input():
    print("Choose process: (delete, copy, move)")
    process = input()
    print("Enter regex for input files:")
    regex = input()
    print("Enter origin directory:")
    origin = FilePath.from_string(input())

    file_manager = FileManager()
    origin_files = file_manager.get_folder_contents(origin)

    if process in ["copy", "move"]:
        print("Enter destination directory:")
        destination = FilePath.from_string(input())
    else:
        destination = None

    file_rule = create_file_rule(regex)
    process = create_process(file_manager, process)

    rule = Rule(file_rule, process, destination)
    filtered = rule.apply_files_rule(origin_files)

    for p in rule.apply_process(filtered):
        input("Enter to continue...")
Пример #14
0
    def __init__(self):
        super(MainForm, self).__init__()
        uic.loadUi("ui/MainForm.ui", self)

        names = []
        paths = []
        manager = FileManager()
        model = QtGui.QStandardItemModel()

        self.connect(self.addButton, QtCore.SIGNAL("clicked()"), self.add_show)
        self.connect(self.removeButton, QtCore.SIGNAL("clicked()"), self.remove_show)
        self.connect(self.playButton, QtCore.SIGNAL("clicked()"), self.play)
        self.connect(self.editButton, QtCore.SIGNAL("clicked()"), self.change_episode)

        for show in manager.get_shows():
            name = QtGui.QStandardItem(show["name"])
            names.append(name)

            path = QtGui.QStandardItem(show["current_episode"])
            path.setFlags(QtCore.Qt.ItemIsEnabled)
            paths.append(path)

        model.appendColumn(names)
        model.appendColumn(paths)

        self.tableView.setModel(model)

        # Configure Table
        header = self.tableView.horizontalHeader()
        header.setResizeMode(QtGui.QHeaderView.ResizeToContents)
        self.tableView.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
        self.tableView.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
Пример #15
0
 def __init__(self):
     self.filesToWatch = [
         WatchedFile(
             MatroxCommandManager.reloadAnnouncements,
             FileManager.Instance().formatPath(
                 FileManager.Instance().ROOT_FILEPATH,
                 FileManager.Instance().ANNOUNCEMENTS_FILENAME))
     ]
Пример #16
0
 def removeFavorite(self, server):
     """
     Removes a server from the favorites list.
     """
     Log.log.debug('[GuiController] remove Favorite called for server with ' \
                   + 'adress ' + server.getaddress())
     fm = FileManager()
     fm.removeFavorite(server)
Пример #17
0
    def __init__(self):

        options = DeckOptions()
        self.myConfig = Configuration(options.pathToConfig())
        self.t = NoteTranslator()
        self.sm = MidiStringMaker(self.t)
        self.tm = MidiTrackMaker(self.t, self.sm)
        self.fm = FileManager(self.myConfig, self.tm)
 def __init__(self, interaction_manager):
     self.interaction_manager = interaction_manager
     self.interaction_manager.plot_all_interactions()
     self.file_manager = FileManager()
     self.best_atom_coordinates = self.interaction_manager.get_initial_coordinates(
     )
     self.best_response_value = 0
     self.iterations = 0
     self.date = self.get_now()
Пример #19
0
 def __init__(self):
     self.__crawler = Crawler()
     self.__cleaner = Cleaner()
     self.__file_manager = FileManager()
     self.__search_engine = GoogleSearch(config.SEARCH_TOPIC,
                                         config.MAX_ITEM,
                                         config.NUMBER_OF_RESULTS_PER_PAGE,
                                         config.PAUSE_BTW_REQUEST)
     self.__csf_manager = CSFManager()
Пример #20
0
    def status(self):
        print "Zookeeper Status:"

        # Check installation
        installPath = "%s/bin/zookeeper" % self.configuration["rootDirectory"]
        if os.path.exists(installPath):
            print "  Installed: Yes, at %s" % (installPath)
        else:
            print "  Installed: No, expected at %s" % (installPath)

        # Check running status
        print "  Instances:"
        for port in self.configuration["zk"]["ports"]:
            print "    Port %d:" % (port)

            dataDirectory = "%s/data/zookeeper/instance-%d" % (
                self.configuration["rootDirectory"], port)
            if os.path.exists(dataDirectory):
                print "      Data Directory: Yes, at %s" % (dataDirectory)
            else:
                print "      Data Directory: No, expected at %s" % (
                    dataDirectory)

            logDirectory = "%s/log/zookeeper/instance-%d" % (
                self.configuration["rootDirectory"], port)
            if os.path.exists(logDirectory):
                print "      Log Directory: Yes, at %s" % (logDirectory)
            else:
                print "      Log Directory: No, expected at %s" % (
                    logDirectory)

            configurationFilePath = "%s/data/zookeeper/instance-%d.cfg" % (
                self.configuration["rootDirectory"], port)
            if os.path.exists(configurationFilePath):
                print "      Configuration File: Yes, at %s" % (
                    configurationFilePath)
            else:
                print "      Configuration File: No, expected at %s" % (
                    configurationFilePath)

            pidFilePath = "%s/data/zookeeper/instance-%d/zookeeper_server.pid" % (
                self.configuration["rootDirectory"], port)
            if os.path.exists(pidFilePath):
                print "      Pid File: Yes, at %s" % (pidFilePath)
                pid = FileManager.readPid(pidFilePath)
                if pid:
                    if FileManager.isRunning(pid):
                        print "      Running: Yes, under pid %d" % (pid)
                    else:
                        print "      Running: Maybe, expecting process under pid %d but unable to find it" % (
                            pid)
                else:
                    print "      Running: Maybe, but unable to find pid"
            else:
                print "      Pid File: No, expected at %s" % (pidFilePath)
                print "      Running: No"
Пример #21
0
def main():
    """
    Submitter 0.3
    Tools for submitting solutions to stepik.org
    """
    file_manager = FileManager()
    try:
        file_manager.create_dir(APP_FOLDER)
    except OSError:
        exit_util("Can't do anything. Not enough rights to edit folders.")
    def decode(self, inputfile, outputfile=None):
        byte_array = FileManager.load_bytes(inputfile)

        freq, code = FileManager.parse_bytes(byte_array, self.separator)
        decoded = self._decode(code, freq)

        if outputfile:
            FileManager.export(decoded, outputfile, "w")

        return decoded
Пример #23
0
 def addFavorite(self, server):
     """
     Adds a favorite server.
     
     The favorites are persisted in a CSV file.
     """
     Log.log.debug('[GuiController] addFavorite called for server with ' \
                   + 'adress ' + server.getaddress())
     fm = FileManager()
     fm.addFavorite(server)
Пример #24
0
    def install(self):
        print "Installing Zookeeper to %s/bin/zookeeper" % (
            self.configuration["rootDirectory"])

        # create install directories
        FileManager.createDirectory("%s" %
                                    (self.configuration["rootDirectory"]))
        FileManager.createDirectory("%s/packages" %
                                    (self.configuration["rootDirectory"]))
        FileManager.createDirectory("%s/bin" %
                                    (self.configuration["rootDirectory"]))

        # download zookeeper
        FileManager.download(
            self.configuration["zk"]["artifactUrl"],
            "%s/packages/zookeeper.tgz" %
            (self.configuration["rootDirectory"]))

        # unpack zookeeper
        extractionDir = FileManager.extractTarball(
            "%s/packages/zookeeper.tgz" %
            (self.configuration["rootDirectory"]),
            "%s/bin" % (self.configuration["rootDirectory"]))
        shutil.move(
            "%s/bin/%s" % (self.configuration["rootDirectory"], extractionDir),
            "%s/bin/zookeeper" % (self.configuration["rootDirectory"]))
Пример #25
0
class App:
    def __init__(self, url, depth=-1, basic_auth="", allowed_urls=[]):
        self.spider = Spider(url,
                             depth=depth,
                             basic_auth=basic_auth,
                             allowed_urls=allowed_urls)
        self.filemanager = FileManager()
        self.shown_urls = []

    def run(self):
        try:
            self.spider.start()
        except Exception as e:
            self.filemanager.save_to_log(e)
            return
        data = {
            "visited": list(self.spider.general_visited),
            "unvisited": list(self.spider.general_unvisited)
        }
        self.filemanager.save_to_json(data)

        return

    def stop(self):
        self.spider.crawl = False

    def get_csv_table(self):
        return self.spider.csv_table.get_table(), self.spider.csv_table.column

    def remaining(self):
        print(f"number of urls found: {len(self.spider.general_unvisited)}")
        print(f"number of urls fetched: {len(self.spider.general_visited)}")

    def retrive_lost_urls(self):
        self.spider.general_visited = self.spider.general_visited - self.spider.lost_url
        self.spider.csv_table.remove_table()
        return

    def has_lost_url(self):
        if self.spider.lost_url:
            return True
        else:
            return False

    def show_lost_urls(self):
        return self.spider.lost_url

    def get_urls_to_show(self):
        urls_to_show = {}
        for url in self.spider.visited:
            if url not in self.shown_urls:
                urls_to_show[url] = self.spider.visited[url]
                self.shown_urls.append(url)
        return urls_to_show
Пример #26
0
 def removeRecent(self, server):
     """
     Removes a server from the list of recent servers
     
     @param server - server to be removed
     """
     Log.log.debug('[GuiController] removeRecent called for server with ' \
                   + 'adress ' + server.getaddress())
     
     fm = FileManager()
     fm.removeRecentServer(server)
Пример #27
0
 def clearRecentServers(self, tab):
     """
     clears the recent server list.
     
     @param tab - tab requesting the action
     """
     Log.log.debug('[GuiController] clearRecentServers called ')
     
     fm = FileManager()
     fm.clearRecentServerList()
     tab.clearServerList()
Пример #28
0
class CaptureSession:
    """
    Interfaces between card, gui and data buffers
    """
    def __init__(self, globalSession, name):

        self.name = name

        self.globalSession = globalSession
        self.settings = self.globalSession.settings

        self.dataManager = DataManager(self.settings, self.globalSession)

        self.fitted = False
        self.acquisitionDone = False
        self.dataInMemory = False

        # initialize file manager
        self.fileManager = FileManager(self.settings)

    def run(self):
        # Guarantee latest settings before running
        self.dataManager.updateSettings(self.globalSession.settings)
        self.fileManager.updateSettings(self.globalSession.settings)

        self.storeThread = threading.Timer(0.005,
                                           self.dataManager.storeData).start()
        time.sleep(0.25)
        self.saveData()

    def stopRun(self):
        self.acquisitionDone = True
        self.dataInMemory = True

        try:
            self.saveThread.cancel()
        except AttributeError:
            pass

        self.saveData()

    def readData(self):
        if self.acquisitionDone and not self.dataInMemory:
            allData = self.fileManager.readData(self.name)
            self.dataManager.setData(allData)
            self.dataInMemory = True

    def saveData(self):
        """
        Save session to current filename
        """
        self.fileManager.saveData(self.name, self.dataManager.getAllData())
        if self.globalSession.running and not self.globalSession.stopProgram:
            self.saveThread = threading.Timer(5, self.saveData).start()
Пример #29
0
def filemanager(request, path, course_id):
    course = get_object_or_404(Course, id=course_id)
    if course.user_is_teacher(request.user):
        course_folder = UPLOAD_ROOT + "/" + course.name
        if os.path.exists(course_folder):
            fm = FileManager(course_folder)
        else:
            os.mkdir(course_folder)
            fm = FileManager(course_folder)
        return fm.render(request,path)
    else:
        return HttpResponseForbidden()
Пример #30
0
    def __init__(self, parent, atom_coordinates, interaction_manager):
        self.parent = parent
        self.atom_coordinates = atom_coordinates
        self.old_atom_coordinates = np.copy(atom_coordinates)

        self.interaction_manager = interaction_manager
        self.best_response_value = self.calculate_response_value()
        self.file_manager = FileManager()
        self.ival_matrix = np.zeros_like(
            self.interaction_manager.interaction_matrix, dtype=float)
        self.temperature = 10
        self.number_atoms = self.interaction_manager.number_atoms
Пример #31
0
    def add_show(self):
        file_name = self.open_file_dialog("/Users/i_see_dead_ppl/Projects/python/")
        if file_name != "":
            manager = FileManager()
            new_show = manager.add_show(file_name)

            model = self.tableView.model()

            name = QtGui.QStandardItem(new_show["name"])
            path = QtGui.QStandardItem(new_show["current_episode"])

            model.appendRow([name, path])
    def encode(self, inputfile, outputfile=None):
        string = FileManager.load_text(inputfile) + \
                 self.break_symbol * (self.bits // 8)

        table = self.build_intervals(string)
        encoded = self._encode(string, table)

        if outputfile:
            data = FileManager.merge_data(table, self.separator, encoded)
            FileManager.export(data, outputfile, "wb")

        return encoded
Пример #33
0
class CaptureSession:
    """
    Interfaces between card, gui and data buffers
    """
    def __init__(self, globalSession,name):

        self.name = name

        self.globalSession = globalSession
        self.settings = self.globalSession.settings

        self.dataManager = DataManager(self.settings, self.globalSession)

        self.fitted = False
        self.acquisitionDone = False
        self.dataInMemory = False
        
        # initialize file manager
        self.fileManager = FileManager(self.settings)

    def run(self):
        # Guarantee latest settings before running
        self.dataManager.updateSettings(self.globalSession.settings)
        self.fileManager.updateSettings(self.globalSession.settings)

        self.storeThread = threading.Timer(0.005, self.dataManager.storeData).start()
        time.sleep(0.25)
        self.saveData()

    def stopRun(self):
        self.acquisitionDone = True
        self.dataInMemory = True

        try:
            self.saveThread.cancel()
        except AttributeError:
            pass

        self.saveData()
   
    def readData(self):
        if self.acquisitionDone and not self.dataInMemory:
            allData = self.fileManager.readData(self.name)
            self.dataManager.setData(allData)
            self.dataInMemory = True

    def saveData(self):
        """
        Save session to current filename
        """
        self.fileManager.saveData(self.name,self.dataManager.getAllData())
        if self.globalSession.running and not self.globalSession.stopProgram:
            self.saveThread = threading.Timer(5, self.saveData).start()
Пример #34
0
    def runDistTestCase(self, testcase, met_calc):
        #create logdir
        if not os.path.isdir(testcase["testlogdir"]):
            os.makedirs(testcase["testlogdir"])
            os.makedirs(testcase["testoutputdir"])
        
        # Create docker command to run testcase: get all parameters
        bytesreq = testcase["bytesreq"]
        isquic = testcase["tcp_config"] == ""
        cmd = (
            "CURTIME=" + testcase["curtime"] + " "
            "SERVER_LOGS=" + testcase["testlogdir"] + " "
            "CLIENT_LOGS=" + testcase["testlogdir"] + " "
            "SCENARIO=\"" + testcase["scenario"] + "\" "
            "CLIENT=" + testcase["client"] + " "
            "SERVER=" + testcase["server"] + " "
            "BYTESREQ=" + bytesreq + " "
            "CLIENT_PARAMS=\"" + testcase["client_params"] + "\" "
            "SERVER_PARAMS=\"" + testcase["server_params"] + "\" "
            "CL_COMMIT=\"" + testcase['cl_commit'] + "\" "
            "SV_COMMIT=\"" + testcase['sv_commit'] + "\" "
        )
        # Check which simulator to run
        if testcase["sim"] == "qns":
            o_file = "/qns.out"
            cmd = cmd + "docker-compose -f ../quic-network-simulator/docker-compose.yml " + testcase["tcp_config"] + "up --abort-on-container-exit"
        else:
            o_file = "/min.out"
            cmd = cmd + "docker-compose -f ../containernet/docker-compose.yml up --abort-on-container-exit"
        print("Server: " + testcase["server"] + ". Client: " + testcase["client"] + ". Test case: " + testcase["scenario"] + ". Simulation: " + testcase["sim"])
        runsuccess = False
        while not runsuccess:
            try:
                r = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, timeout=90)
                output = r.stdout
            except subprocess.TimeoutExpired as ex:
                output = ex.stdout
                expired = True


            with open(testcase["testoutputdir"] + o_file, "w+") as outputfile:
                outputfile.write(output.decode('utf-8'))

            filemngr = FileManager()
            
            clpars = testcase["client_params"]
            clpars = clpars.replace("$CURTIME" , testcase["curtime"])
            clpars = clpars.replace("$BYTESREQ", bytesreq)
            svpars = testcase["server_params"]
            svpars = svpars.replace("$CURTIME" , testcase["curtime"])
            filemngr.addTestInfo(testcase["testlogdir"], testcase["scenario"], clpars, svpars, testcase["client"], testcase["server"], testcase["sim"])
            runsuccess = filemngr.pcaptojson(testcase["testlogdir"], testcase["sim"], met_calc, isquic, testcase["run"])
  def start(self):

    # create base data/conf directory
    FileManager.createDirectory("%s/data/zookeeper" % (self.configuration["rootDirectory"]))
    FileManager.createDirectory("%s/log/zookeeper" % (self.configuration["rootDirectory"]))

    for port in self.configuration["zk"]["ports"]:
      print "Starting zookeeper on port %d" % port

      # create required items
      dataDirectory = "%s/data/zookeeper/instance-%d" % (self.configuration["rootDirectory"], port)
      logDirectory = "%s/log/zookeeper/instance-%d" % (self.configuration["rootDirectory"], port)
      configurationFilePath = "%s/data/zookeeper/instance-%d.cfg" % (self.configuration["rootDirectory"], port)
      myidFilePath = "%s/data/zookeeper/instance-%d/myid" % (self.configuration["rootDirectory"], port)

      FileManager.createDirectory(logDirectory)
      FileManager.createDirectory(dataDirectory)

      configLines = self.createConfigLines(port, dataDirectory)
      with open(configurationFilePath, 'w') as f:
        f.writelines(configLines)

      with open(myidFilePath, 'w') as f:
        f.writelines(["%s" % port])

      # actually start zookeeper
      arguments = [
        "ZOOCFGDIR=%s/data/zookeeper" % (self.configuration["rootDirectory"]),
        "ZOOCFG=instance-%d.cfg" % (port),
        "ZOO_LOG_DIR=%s" % (logDirectory),
        "%s/bin/zookeeper/bin/zkServer.sh" % (self.configuration["rootDirectory"]),
        "start"
      ]
      os.system(" ".join(arguments))
Пример #36
0
def bluetooth():
	"""Bluetooth OBD-II Range Test

	This method manages all range testing of a bluetooth OBD-II adapter.
	"""

	# Scan for all adapters.
	adapters = scanner.scan( "OBD" )

	# No adapters were found.
	if len( adapters ) == 0:
		print "[!]\tNo adapters were found that have 'OBD' in their name.\nExiting..."

	# Adapters were found.
	else:
		# Grab the first adapter returned.
		# adapter = OBD( adapters[0]['addr'], adapters[0]['name'], BAUD )
		adapter = OBD( type="bluetooth", addr=adapters[0]['addr'], name=adapters[0]['name'], baud=BAUD )
		adapter.bind()
		adapter.connect()

		# Setup the file manager.
		fm = FileManager()

		# Write header to CSV file.
		fm.writeCSV( csvfile, [ "Iteration", "RX/TX Time" ] )

		# Save the starting time.
		starttime = datetime.now()

		###
		# Run the range test.
		###
		test( adapter, fm )

		# Get the time when testing completes.
		finishtime = datetime.now()

		# Create a plot of the values.
		columns = getColumns( fm.readCSV( csvfile ) )

		# Create plot.
		figurename = plotter.generateLinePlot( columns["Iteration"][1:len(columns["Iteration"])], columns["RX/TX Time"][1:len(columns["RX/TX Time"])], "Bluetooth Range Test", "Iteration", "(RX - TX) Time [sec]", ("rangetest_" + finishtime.strftime( "%H_%M_%S" )), "png" )

		# Write ending results.
		print "\tTime to completion: " + str( finishtime - starttime )
		print "\tCSV File: " + csvfile
		print "\tPlot Image: " + figurename
Пример #37
0
def loadConfigFile(path):
  try:
    config = FileManager.loadAsJson(path)
    return config
  except ValueError as e:
    print "Invalid or non-existant config file at %s - %s" % (path, e.message) 
    return None
  def status(self):
    print "Zookeeper Status:"

    # Check installation
    installPath = "%s/bin/zookeeper" % self.configuration["rootDirectory"]
    if os.path.exists(installPath):
      print "  Installed: Yes, at %s" % (installPath)
    else:
      print "  Installed: No, expected at %s" % (installPath)

    # Check running status
    print "  Instances:"
    for port in self.configuration["zk"]["ports"]:
      print "    Port %d:" % (port)

      dataDirectory = "%s/data/zookeeper/instance-%d" % (self.configuration["rootDirectory"], port)
      if os.path.exists(dataDirectory):
        print "      Data Directory: Yes, at %s" % (dataDirectory)
      else:
        print "      Data Directory: No, expected at %s" % (dataDirectory)

      logDirectory = "%s/log/zookeeper/instance-%d" % (self.configuration["rootDirectory"], port)
      if os.path.exists(logDirectory):
        print "      Log Directory: Yes, at %s" % (logDirectory)
      else:
        print "      Log Directory: No, expected at %s" % (logDirectory)

      configurationFilePath = "%s/data/zookeeper/instance-%d.cfg" % (self.configuration["rootDirectory"], port)
      if os.path.exists(configurationFilePath):
        print "      Configuration File: Yes, at %s" % (configurationFilePath)
      else:
        print "      Configuration File: No, expected at %s" % (configurationFilePath)

      pidFilePath = "%s/data/zookeeper/instance-%d/zookeeper_server.pid" % (self.configuration["rootDirectory"], port)
      if os.path.exists(pidFilePath):
        print "      Pid File: Yes, at %s" % (pidFilePath)
        pid = FileManager.readPid(pidFilePath)
        if pid:
          if FileManager.isRunning(pid):
            print "      Running: Yes, under pid %d" % (pid)
          else:
            print "      Running: Maybe, expecting process under pid %d but unable to find it" % (pid)
        else:
          print "      Running: Maybe, but unable to find pid"
      else:
        print "      Pid File: No, expected at %s" % (pidFilePath)
        print "      Running: No"
Пример #39
0
 def add_name_to_buddylist(self, name, tab):
     """
     Adds a new name to the buddylist
     
     @param name = name to add to the buddylist
     """
     Log.log.debug('[GuiController] add_name_to_buddylist called for buddy ' \
                   + 'with name ' + name)
     
     fm = FileManager()
     buddylist = fm.get_buddies()
     buddylist.append(name)
     t = Thread(target=fm.save_buddies)
     t.setDaemon(True)
     t.start()
     if tab:
         tab.append_buddy_to_list(name)
Пример #40
0
def wifi():
    """WiFi OBD-II Range Test

	This method manages all range testing of a wifi OBD-II adapter.
	"""
    global IP, PORT

    adapter = OBD(type="wifi", name="OBD", ip=IP, port=PORT)
    adapter.connect()

    # Setup the file manager.
    fm = FileManager()

    # Write header to CSV file.
    fm.writeCSV(csvfile, ["Iteration", "RX/TX Time"])

    # Save the starting time.
    starttime = datetime.now()

    ###
    # Run the range test.
    ###
    test(adapter, fm)

    # Get the time when testing completes.
    finishtime = datetime.now()

    # Create a plot of the values.
    columns = getColumns(fm.readCSV(csvfile))

    # Create plot.
    figurename = plotter.generateLinePlot(
        columns["Iteration"][1 : len(columns["Iteration"])],
        columns["RX/TX Time"][1 : len(columns["RX/TX Time"])],
        "WiFi Range Test",
        "Iteration",
        "(RX - TX) Time [sec]",
        ("rangetest_" + finishtime.strftime("%H_%M_%S")),
        "png",
    )

    # Write ending results.
    print "\tTime to completion: " + str(finishtime - starttime)
    print "\tCSV File: " + csvfile
    print "\tPlot Image: " + figurename
  def install(self):
    print "Installing Zookeeper to %s/bin/zookeeper" % (self.configuration["rootDirectory"])
    
    # create install directories
    FileManager.createDirectory("%s" % (self.configuration["rootDirectory"]))
    FileManager.createDirectory("%s/packages" % (self.configuration["rootDirectory"]))
    FileManager.createDirectory("%s/bin" % (self.configuration["rootDirectory"]))

    # download zookeeper
    FileManager.download(self.configuration["zk"]["artifactUrl"], "%s/packages/zookeeper.tgz" % (self.configuration["rootDirectory"]))

    # unpack zookeeper
    extractionDir = FileManager.extractTarball("%s/packages/zookeeper.tgz" % (self.configuration["rootDirectory"]), "%s/bin" % (self.configuration["rootDirectory"]))
    shutil.move("%s/bin/%s" % (self.configuration["rootDirectory"], extractionDir), "%s/bin/zookeeper" % (self.configuration["rootDirectory"]))
Пример #42
0
 def startFavoritesLoadingThread(self, tab):
     """
     Starts loading the favorites
     
     @param tab - the tab requesting the favoriteslist
     """
     fm = FileManager()
     self.tab = tab
     self.filter = None
     
     serverlist = fm.getFavorites().values()
     for server in serverlist:
         self.serverqueue.put(server)
         
     self.servercount = len(serverlist)
     
     #notify the coordinator thread, that the serverlist is loaded
     self.messageque.put('serverlist_loaded')
Пример #43
0
 def startRecentServersLoadingThread(self, tab):
     """
     Starts loading the recent servers list
     
     @param tab - tab rquesting the recent servers
     """
     fm = FileManager()
     self.tab = tab
     self.filter = None
     
     serverdict = fm.getRecentServers()
     for key in serverdict:
         self.serverqueue.put(serverdict[key])
     
     self.servercount = len(serverdict)
     
     #notify the coordinator thread, that the serverlist is loaded
     self.messageque.put('serverlist_loaded')
Пример #44
0
    def get_pixbuf(self):
        scanner = FileScanner()
        files = scanner.get_files_from_dir(self.directory)

        if files:
            file_manager = FileManager(on_list_modified=lambda: None)
            file_manager.set_files(files)
            file_manager.sort_by_date(True)
            file_manager.go_first()
            return self.get_mixed_thumbnail(file_manager.get_current_file(), self.default_thumbnail_size)
        else:
            dir_icon = GTKIconImage(gtk.STOCK_DIRECTORY, self.default_gtk_icon_size)
            return dir_icon.get_pixbuf()
Пример #45
0
    def __init__(self, client, filename, port, peer_id):
        self._client = client
        self._filename = filename
        self._port = port
        self._peer_id = peer_id

        # _peers is a list of peers that the TorrentManager is trying
        # to communicate with
        self._peers = []

        # _bitfields is a dictionary mapping peers to a bitfield of the pieces
        # each has
        self._bitfields = {}

        try:
            self._metainfo = Metainfo(filename)
        except (IOError, ValueError) as err:
            if isinstance(err, IOError):
                message = err.strerror+' ('+filename+')'
            else:
                message = err.message+' ('+filename+')'
            logger.error(message)
            raise TorrentManagerError(message)

        # _have is the bitfield for this torrent. It is initialized to reflect
        # which pieces are already available on disk.
        self._filemanager = FileManager(self._metainfo)
        self._have = self._filemanager.have()

        try:
            self._tracker_proxy = TrackerProxy(self._metainfo, self._port,
                                               self._peer_id)
        except TrackerError as err:
            logger.critical("Could not connect to tracker at {}"
                            .format(self._metainfo.announce))
            logger.debug("    TrackerError: {}".format(err.message))
            raise TorrentManagerError(err.message)

        self._needed = {piece: (0, []) for piece
                        in list(self._have.findall('0b0'))}

        self._interested = {}

        self._requesting = {}

        self._partial = []

        self._reactor = Reactor()
        self._reactor.schedule_timer(_TIMER_INTERVAL, self.timer_event)
        self._tick = 1

        print "Starting to serve torrent {} off of Cracker website...".format(filename)

        self._connect_to_peers(20)
Пример #46
0
    def load_buddies(self, tab, execute):
        """
        Loads the buddylist and append the values to the treeview on the 
        passed tab 
        
        @param tab - the tab requesting the buddylist
        @execute - boolean value. If True immediately start a buddy search after
                   loading the buddylist
        """
        Log.log.debug('[GuiController] load_buddies called...')
        fm = FileManager()
        buddylist = fm.get_buddies()
        for name in buddylist:
            gobject.idle_add(tab.append_buddy_to_list, name)

        
        if execute:
            tab.filter.lock()
        
            #create a filter object
            filter = Filter(FilterType.BUDDY_FILTER, tab)
            filter.playerlist = fm.get_buddies()
            tab.set_all_buddies_to_offline()
            self.executeMasterServerQuery(filter, tab)
Пример #47
0
    def __init__(self, globalSession,name):

        self.name = name

        self.globalSession = globalSession
        self.settings = self.globalSession.settings

        self.dataManager = DataManager(self.settings, self.globalSession)

        self.fitted = False
        self.acquisitionDone = False
        self.dataInMemory = False
        
        # initialize file manager
        self.fileManager = FileManager(self.settings)
Пример #48
0
    def __init__(self, bootstrapper):
        self.server = NodeServer()
        NodeServer.gossiper = self
        self.server.start()
        self.filemanager = FileManager()
        self.file_lock = threading.RLock()

        self.manager = ManagerNode(self)
        self.bootstrapper = bootstrapper
        self.hosts = bootstrapper.hosts
        self.gossip_queue = []
        self.gossip_dict = {}
        self.current_gossip = set()

        self.timed_gossip()
        self.timed_hostcheck()
        print "Gossip Server Started..."
Пример #49
0
    def __init__(self, globalSession, errorFnc):
        self.settings = SessionSettings()
        self.fileManager = FileManager(self.settings)
        self.dmanager = DataManager(self.settings, 
                                    globalSession,
                                    errorFnc) 

        self.needsSaved = False # captured data that needs saved
        
        # method that updates statusbar
        self.globalSession = globalSession
        
        # method displays error popup
        self.errorFnc = errorFnc

        # if we load in a session from file, we don't want to
        # wipe it over in a new capture, so we set the
        # read only flag to true
        self.readOnly = False 

        self.running = True
        self.dmanager.running = self.running
Пример #50
0
 def __connect(self, server):
     """
     launch Urban Terror and connect to the passed server
     """
     fm = FileManager()
     
     
     #build the connect parameters
     #format of the commandline command:
     #urbanterror + connect <address> + password <pw>
     
     #get the executablename, the path and the additional commands
     #from the configuration
     config = fm.getConfiguration()
     executable = config[cfgkey.URT_EXE]
     path = config[cfgkey.URT_EXE_PATH]
     additionalcommands = config[cfgkey.URT_EXE_PARAMS]
             
     if not os.path.exists(os.path.join(path, executable)):
         Log.log.warning('path to Urban Terror unreachable : ' + os.path.join(path, executable))
     params = ' +connect ' + server.getaddress()
     if server.needsPassword():
         params = params + ' +password ' + server.getPassword()
         if server.getRememberPassword():
             if server.isFavorite():
                 fm.saveFavorites()
         else:
             server.setPassword('')
     
     #add additional params    
     params = params + ' ' + additionalcommands
             
     #add server to recent servers list
     fm.addRecent(server)
             
     Log.log.info('launching UrT with cmd = ' + os.path.join(path,\
                                                  executable) + ' ' + params)
     #use shlex.split to turn the command string into a sequence
     #that works with subprocess.popen
     args = shlex.split(executable + ' ' + params)
     
     #finally execute the command 
     self.urt_process = subprocess.Popen(args, executable=os.path.join(path,\
                                     executable), cwd=os.path.normpath(path))
     
Пример #51
0
class CaptureSession:
    """
    Interfaces between card, gui and data buffers
    """    
    def __init__(self, globalSession, errorFnc):
        self.settings = SessionSettings()
        self.fileManager = FileManager(self.settings)
        self.dmanager = DataManager(self.settings, 
                                    globalSession,
                                    errorFnc) 

        self.needsSaved = False # captured data that needs saved
        
        # method that updates statusbar
        self.globalSession = globalSession
        
        # method displays error popup
        self.errorFnc = errorFnc

        # if we load in a session from file, we don't want to
        # wipe it over in a new capture, so we set the
        # read only flag to true
        self.readOnly = False 

        self.running = True
        self.dmanager.running = self.running


    def setName(self, name):
        """
        Change the name of the current session
        """
        self.settings.name = name

    def getName(self):
        return self.settings.name


    def loadSettings(self, path):
        """
        Loads just settings from file
        """
        self.settings = self.fileManager.getSettings(path)
        self.settings.filename = ""


    def saveSession(self):
        """
        Save session to current filename
        """
        self.fileManager.writeCapture(self.dmanager.getRawCountData(),
                                      self.dmanager.getRawAIData(),
                                      self.dmanager.getCombinedData())
        self.needsSaved = False


    def saveSessionAs(self, path):
        """
        Save session to new filename
        """
        self.settings.filename = path
        self.saveSession()

    def startCapture(self):
        """
        Need to reinitialise dmanager. Can't rely on
        the fact that self.settings is a pointer as
        need to recalculate stuff like voltage intervals 
        """
        self.settings.sanitise()
        
        # reinitialise data manager to do things like voltage calcs
        self.dmanager.initialise(self.settings, 
                                 self.globalSession,
                                 self.errorFnc) 

        # queue for passing data between acquisition and dmanager
        q = Queue()

        # set up acquisition process and start
        self.captureProcess = Process(target=acquire,
                                      args=(self.settings, 
                                            q, self.running))
        self.captureProcess.start()

        # set up data capture process and start
        self.dAcqThread = Thread(target=self.dmanager.dataCallback, 
                                 args=(q,))
        self.dAcqThread.start()

        self.needsSaved = True

    def registerGraphManager(self, graphManager):
        self.dmanager.registerGraphManager(graphManager)


    def getRange(self):
        """
        Returns the range required for gauge
        """
        self.settings.sanitise()
        return self.settings.scans

        
    def clearGraph(self):
        self.dmanager.graphManager.clearPlot()

    def killCapture(self):
        """
        Kills running capture. Program's behaviour
        may become undefined
        """
        try:
            self.captureProcess.terminate()
        except:
            self.errorFnc("Could not stop capture process")

        self.dAcqThread._Thread__stop()
        self.clearDevice()

    def stopCapture(self):
        """
        Stops running capture at end of scan
        (bit of cheat, just deletes the scan before)
        """
        self.running = False
        self.dmanager.running = False
        try:
            self.captureProcess.terminate()
        except:
            self.errorFnc("Could not stop capture process")


    def isCapturing(self):
        """
        Returns true if capturing is in progress
        """
        return self.captureProcess.is_alive()

    def setGlobalSettings(self, settings):
        self.globalSettings = settings
Пример #52
0
class TorrentManager(object):
    def __init__(self, client, filename, port, peer_id):
        self._client = client
        self._filename = filename
        self._port = port
        self._peer_id = peer_id

        # _peers is a list of peers that the TorrentManager is trying
        # to communicate with
        self._peers = []

        # _bitfields is a dictionary mapping peers to a bitfield of the pieces
        # each has
        self._bitfields = {}

        try:
            self._metainfo = Metainfo(filename)
        except (IOError, ValueError) as err:
            if isinstance(err, IOError):
                message = err.strerror+' ('+filename+')'
            else:
                message = err.message+' ('+filename+')'
            logger.error(message)
            raise TorrentManagerError(message)

        # _have is the bitfield for this torrent. It is initialized to reflect
        # which pieces are already available on disk.
        self._filemanager = FileManager(self._metainfo)
        self._have = self._filemanager.have()

        try:
            self._tracker_proxy = TrackerProxy(self._metainfo, self._port,
                                               self._peer_id)
        except TrackerError as err:
            logger.critical("Could not connect to tracker at {}"
                            .format(self._metainfo.announce))
            logger.debug("    TrackerError: {}".format(err.message))
            raise TorrentManagerError(err.message)

        self._needed = {piece: (0, []) for piece
                        in list(self._have.findall('0b0'))}

        self._interested = {}

        self._requesting = {}

        self._partial = []

        self._reactor = Reactor()
        self._reactor.schedule_timer(_TIMER_INTERVAL, self.timer_event)
        self._tick = 1

        print "Starting to serve torrent {} off of Cracker website...".format(filename)

        self._connect_to_peers(20)

    def _connect_to_peers(self, n):
        # Get addresses of n peers from the tracker and try to establish
        # a connection with each
        addrs = self._tracker_proxy.get_peers(n)
        for addr in addrs:
            peer = PeerProxy(self, self._peer_id, (addr['ip'], addr['port']),
                             info_hash=self._metainfo.info_hash)
            self._peers.append(peer)
            self._bitfields[peer] = BitArray(self._metainfo.num_pieces)

    def _remove_peer(self, peer):
        # Clean up references to the peer in various data structures
        self._peers.remove(peer)

        pieces = list(self._bitfields[peer].findall('0b1'))
        for piece in pieces:
            if piece in self._needed:
                occurences, peers = self._needed[piece]
                if peer in peers:
                    peers.remove(peer)
                    self._needed[piece] = (occurences-1, peers)

        del self._bitfields[peer]

        if peer in self._interested:
            del self._interested[peer]
        elif peer in self._requesting:
            # If the peer is in the middle of downloading a piece, save
            # the state in the partial list
            index, offset, sha1, _, _ = self._requesting[peer]
            self._partial.append((index, offset, sha1))
            del self._requesting[peer]

    def _rarest(self):
        # Returns a list of tuples which includes a piece index sorted by
        # the number of peers which have the piece in ascending order
        return sorted([(occurences, peers, index)
                       for (index, (occurences, peers)) in self._needed.items()
                       if occurences != 0])

    def _show_interest(self, peer):
        if not peer.is_interested():
            logger.debug("Expressing interest in peer {}"
                         .format(str(peer.addr())))
            peer.interested()

        if not peer.is_peer_choked():
            self._request(peer)

    def _check_interest(self, peer):
        # If the peer is not already interested or requesting, identify a piece
        # for it to download and show interest to the peer.
        if peer not in self._interested and peer not in self._requesting:
            # Compute the set of needed pieces which the peer has that are not
            # already designated for another peer
            needed = self._have.copy()
            needed.invert()
            of_interest = list((needed & self._bitfields[peer]).findall('0b1'))
            dont_consider = [i for i, _, _, _ in self._interested.values()]
            dont_consider.extend([i for i, _, _, _, _
                                  in self._requesting.values()])

            if len(of_interest) > 0:
                for index, offset, sha1 in self._partial:
                    if index in of_interest:
                        self._partial.remove((index, offset, sha1))
                        self._interested[peer] = (index, offset, sha1,
                                                  self._tick)
                        self._show_interest(peer)
                        return
                for _, _, index in self._rarest():
                    if index in of_interest and index not in dont_consider:
                        self._interested[peer] = (index, 0, hashlib.sha1(),
                                                  self._tick)
                        self._show_interest(peer)
                        return
            if peer not in self._interested and peer.is_interested():
                logger.debug("Expressing lack of interest in peer {}"
                             .format(str(peer.addr())))
                peer.not_interested()
                self._connect_to_peers(1)

    def _request(self, peer):
        if peer in self._interested:
            index, offset, sha1, _ = self._interested[peer]
            del self._interested[peer]
            self._requesting[peer] = (index, offset, sha1, self._tick, 0)

        index, received_bytes, _, _, _ = self._requesting[peer]

        bytes_to_request = self._bytes_to_request(index, received_bytes)
        logger.debug("Requesting pc: {} off: {} len: {} from {}"
                     .format(index, received_bytes, bytes_to_request,
                             str(peer.addr())))
        peer.request(index, received_bytes, bytes_to_request)

    def _is_last_piece(self, index):
        return index == self._metainfo.num_pieces-1

    def _length_of_last_piece(self):
        return (self._metainfo.total_length -
                (self._metainfo.num_pieces-1)*self._metainfo.piece_length)

    def _length_of_piece(self, index):
        if self._is_last_piece(index):
            return self._length_of_last_piece()
        else:
            return self._metainfo.piece_length

    def _in_last_block(self, index, offset):
        if self._is_last_piece(index):
            piece_length = self._length_of_last_piece()
        else:
            piece_length = self._metainfo.piece_length

        return piece_length-offset < _BLOCK_SIZE

    def _bytes_to_request(self, index, offset):
        if not self._in_last_block(index, offset):
            return _BLOCK_SIZE
        else:
            return self._length_of_piece(index) - offset

    def info_hash(self):
        return self._metainfo.info_hash

    # PeerProxy callbacks

    def get_bitfield(self):
        return self._have

    def peer_unconnected(self, peer):
        logger.info("Peer {} is unconnected".format(str(peer.addr())))
        self._remove_peer(peer)
        self._connect_to_peers(1)

    def peer_bitfield(self, peer, bitfield):
        # Validate the bitfield
        length = len(bitfield)
        if (length < self._metainfo.num_pieces or
            (length > self._metainfo.num_pieces and
             bitfield[self._metainfo.num_pieces:length].any(1))):
            logger.debug("Invalid bitfield from peer {}"
                         .format(str(peer.addr())))
            peer.drop_connection()
            self._remove_peer(peer)
            self._connect_to_peers(1)
            return

        # Set the peer's bitfield and updated needed to reflect which pieces
        # the peer has
        logger.debug("Peer at {} sent bitfield".format(str(peer.addr())))
        self._bitfields[peer] = bitfield[0:self._metainfo.num_pieces]
        pieces = list(self._bitfields[peer].findall('0b1'))
        for piece in pieces:
            if piece in self._needed:
                occurences, peers = self._needed[piece]
                if peer not in peers:
                    peers.append(peer)
                    self._needed[piece] = (occurences+1, peers)

        # Check whether there may be interest obtaining a piece from this peer
        self._check_interest(peer)

    def peer_has(self, peer, index):
        # Update the peer's bitfield and needed to reflect the availability
        # of the piece
        logger.debug("Peer at {} has piece {}".format(str(peer.addr()), index))
        if index < self._metainfo.num_pieces:
            self._bitfields[peer][index] = 1
        else:
            raise IndexError

        if index in self._needed:
            occurences, peers = self._needed[index]
            if peer not in peers:
                peers.append(peer)
                self._needed[index] = (occurences+1, peers)

            # Check whether there may be interest obtaining a piece from this
            # peer
            self._check_interest(peer)

    def peer_choked(self, peer):
        logger.debug("Peer {} choked".format(str(peer.addr())))
        if peer in self._interested:
            del self._interested[peer]
        elif peer in self._requesting:
            # When choked in the middle of obtaining a piece, save the
            # progress in the partial list
            index, offset, sha1, _, _ = self._requesting[peer]
            self._partial.append((index, offset, sha1))
            del self._requesting[peer]

    def peer_unchoked(self, peer):
        logger.debug("Peer {} unchoked".format(str(peer.addr())))
        if peer in self._interested:
            self._request(peer)

    def peer_sent_block(self, peer, index, begin, buf):
        if peer not in self._requesting:
            # If a peer is very slow in responding, a block could come after
            # it has timed out.  Just ignore the data at this point and
            # ignore the slow peer
            logger.debug("Received block from peer {} which has timed out"
                         .format(str(peer.addr())))
            return

        piece, received_bytes, sha1, _, _ = self._requesting[peer]
        if piece == index and begin == received_bytes:
            # When the next expected block is received, update the hash value
            # and write the block to file
            sha1.update(buf)
            self._filemanager.write_block(index, begin, buf)
            self._requesting[peer] = (piece, received_bytes + len(buf),
                                      sha1, self._tick, 0)

            if received_bytes + len(buf) < self._length_of_piece(index):
                # Request the next block in the piece
                self._request(peer)
            else:
                # On receipt of the last block in the piece, verify the hash
                # and update the records to reflect receipt of the piece
                if sha1.digest() == self._metainfo.piece_hash(index):
                    logger.info("Successfully got piece {} from {}"
                                .format(index, str(peer.addr())))
                    del self._needed[index]
                    percent = 100 * (1 - (len(self._needed) /
                                          float(self._metainfo.num_pieces)))
                    print "{0}: Downloaded {1:1.4f}%".format(self._filename,
                                                             percent)
                    self._have[index] = 1
                else:
                    logger.info("Unsuccessfully got piece {} from {}"
                                .format(index, str(peer.addr())))
                del self._requesting[peer]

                if self._needed != {}:
                    # Try to find another piece for this peer to get
                    self._check_interest(peer)
                else:
                    logger.info("Successfully downloaded entire torrent {}"
                                .format(self._filename))
                    self._client.download_complete(self._filename)

    def peer_interested(self, peer):
        pass

    def peer_not_interested(self, peer):
        pass

    def peer_request(self, peer, index, begin, length):
        pass

    def peer_canceled(self, peer, index, begin, length):
        pass

    # Reactor callback

    def timer_event(self):
        self._reactor.schedule_timer(_TIMER_INTERVAL, self.timer_event)
        self._tick += 1

        # For any peers that have been interested but unchoked for an
        # excessive period of time, stop being interested, free up assigned
        # piece and connect to another peer
        for peer, (_, _, _, tick) in self._interested.items():
            if tick + 4 == self._tick:
                logger.debug("Timed out on interest for peer {}"
                             .format(str(peer.addr())))
                peer.not_interested()
                del self._interested[peer]
                self._connect_to_peers(1)

        # For any peer that has an outstanding request for an excessive period
        # of time, resend the request message in case it got lost or is being
        # ignored
        for peer, (index, offset, sha1, tick, retries) \
                in self._requesting.items():
            if tick + 5 == self._tick:
                logger.debug("Timed out on request for peer {}"
                             .format(str(peer.addr())))
                if retries < _MAX_RETRIES:
                    self._requesting[peer] = (index, offset, sha1,
                                              self._tick, retries+1)
                    self._request(peer)
                else:
                    self._partial.append((index, offset, sha1))
                    del self._requesting[peer]
                    peer.not_interested()
                    self._connect_to_peers(1)
Пример #53
0
from networkmanager import NetworkManager
from filemanager import FileManager

fmanager=FileManager()
catalog=fmanager.load_courses()
faculty=fmanager.load_profs()
user=fmanager.load_user(username, password)
Пример #54
0
class GossipServer:
    """

    Possible Gossip Information:
      File Request -
        ('filereq', {destination_ip}, {filename}, {manager-ip})
      Send Chunk -
        ('send_chunk', {destination_ip}, {start}, {end}, filereq)
      Chunk -
        ('chunk', {start}, {end}, {data}, filereq)
      Has File -
        ('has_file', {source_ip}, {filesize}, filereq)
    """

    def __init__(self, bootstrapper):
        self.server = NodeServer()
        NodeServer.gossiper = self
        self.server.start()
        self.filemanager = FileManager()
        self.file_lock = threading.RLock()

        self.manager = ManagerNode(self)
        self.bootstrapper = bootstrapper
        self.hosts = bootstrapper.hosts
        self.gossip_queue = []
        self.gossip_dict = {}
        self.current_gossip = set()

        self.timed_gossip()
        self.timed_hostcheck()
        print "Gossip Server Started..."

    @classmethod
    def encrypt(self, msg, key):
        data = str(msg)

        start = 0
        block_sz = 224
        result = []
        fout = open("/tmp/pub-key.pem", 'w')
        fout.write(key)
        fout.close()
        while start < len(data) - 1:
            block = data[start:start + block_sz]
            p = Popen(['openssl', 'rsautl', '-encrypt', '-inkey',
                       '/tmp/pub-key.pem', '-pubin'], stdin=PIPE, stdout=PIPE)
            out, err = p.communicate(block)
            result.append(out)
            start += block_sz
        expand = []
        for c in result:
            for char in c:
                expand.append(ord(char))
        return ''.join([(hex(c)[2:] if len(hex(c)) == 4
                         else '0' + hex(c)[2:]) for c in expand])



    def process_gossip(self, data):
        #print "PROCESS:", data

        for item, ttl in data.items():
            if item not in self.current_gossip:
                self.current_gossip.add(item)
                print "\tProcessing Gossip:", item, self.gossip_dict
                if item[0] == 'filereq':
                    file_offer = self.gen_file_offer(item)
                    if file_offer:
                        print "\tHave File:", item
                        manager_ip = item[3]
                        if manager_ip == self.bootstrapper.myip:
                            self.manager.manage(file_offer)
                        else:
                            self.gossip_queue.append((manager_ip, file_offer))
                elif item[0] == 'chunk':
                    print "RECEIVED CHUNK"
                    tag, start, end, data, filereq = item
                    tag, destip, filename, mip = filereq
                    self.file_lock.acquire()
                    self.filemanager.receive_chunk('files/' + filename, start, end, data)
                    self.file_lock.release()
                elif item[0] == 'send_chunk':
                    tag, dest_ip, start, end, filereq = item
                    tag, destip, filename, mip = filereq

                    file_chunk = self.filemanager.find_chunk('files/' + filename, start, end)
                    if destip in self.hosts:
                        encrypted_chunk = self.encrypt(file_chunk, self.hosts[destip])
                        chunk_descriptor = ('chunk', start, end, encrypted_chunk, filereq)
                        self.gossip_queue.append((destip, chunk_descriptor))
                    else:
                        print "NNNOOOOOTTTTT", self.hosts, destip
                elif item[0] == 'has_file':
                    self.manager.manage(item)

    def send_chunk_request(self, req, ip):
        if ip == self.bootstrapper.myip:
            self.process_gossip({req : 100})
        else:
            self.gossip_queue.append((ip, req))


    def gen_file_offer(self, item):
        tag, dest_ip, filename, manager_ip = item
        self.gossip_dict[(tag, dest_ip, filename, manager_ip)] = 100
        filesize = self.filemanager.find_file('files/' + filename)
        if filesize != None:
            return ('has_file', self.bootstrapper.myip, filesize, item)
        return None

    def init_file_request(self, filename):
        print "You requested:", filename
        manager = self.choose_random_host()
        while manager == self.bootstrapper.myip:
            manager = self.choose_random_host()

        filereq = ('filereq', self.bootstrapper.myip, filename, manager)
        self.gossip_dict[filereq] = 100

    def timed_gossip(self):
        self.gossip()
        threading.Timer(3, self.timed_gossip, ()).start()

    def timed_lease_check(self):
        #threading.Timer(30, self.timed_lease_check, ()).start()
        pass

    def timed_hostcheck(self):
        self.hosts = self.bootstrapper.hosts
        #for ip, pkey in self.bootstrapper.hosts.items():
        #    self.hosts[ip] = pkey
        threading.Timer(3, self.timed_hostcheck, ()).start()

    def choose_random_host(self):
        if len(self.hosts) == 0:
            return None
        host_list = self.hosts.keys()
        return choice(host_list)

    def gossip(self):
        if len(self.gossip_queue) > 0:
            host, item = self.gossip_queue.pop(0)
        else:
            host = self.choose_random_host()
            item = None
        if not host:
            return
        self.send(host, item)

    def gossip_data(self, item):
        return json.dumps(dict_convert(self.gossip_dict, item))

    def send(self, host, item):
        try:
            data = self.gossip_data(item)
            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            s.settimeout(2)
            s.connect((host, 7060))
            s.send(data)
        except Exception as e:
            ###print "EXCEPTION IN SEND:", str(e), "\n\tFOR HOST:", host
            pass
Пример #55
0
def view(request, path):
    extensions = ['html', 'htm', 'zip', 'py', 'css', 'js', 'jpeg', 'jpg', 'png']
    fm = FileManager(settings.MEDIA_ROOT, extensions=extensions)
    return fm.render(request, path)