示例#1
0
class PromotionPattern:
	def __init__(self,title,length,relativeDays,lang,mp,pattern,has_gamma=True,start=0,archLink=None,archLink_fmt=None,archTime_fmt=None):
		""" Promotion pattern in a given Wiki

		title: Name of the Promotion pattern (type: str)
		length: Length of display during promotion (type: int)
		relativeDays:
		lang:
		mp: Name of language main page in pv database (e.g. Main_Page for en)
		pattern: Regular expression for promotion pattern (type: regex)
		has_gamma: True is there is a sharp decay in page view pattern (e.g. by Today's Featured article)
		start: The start time of the promotion pattern (e.g. 25 for On this Day)
		archive: Link to site with archive of promotion pattern (type: string)
		arch_fmt: strftime format of specified time in link to archive of promotion pattern
		"""
		self.title = title
		self.relativeDays = relativeDays
		self.length = length
		self.mp_title = mp
		self.has_gamma = has_gamma
		self.start = start
		try:
			languages.get(iso639_1_code=lang)
			self.lang = lang
		except KeyError:
			raise KeyError('Language "'+lang+'" does not exist')

		if archLink:
			self.scrapePattern = ScrapePattern(lang,pattern,archLink,archLink_fmt,archTime_fmt)

	def getArchive(self,start,end):
		self.arch = Archive(start,end,self.scrapePattern,self.title)
		self.arch.getArticles()
示例#2
0
    def setUp(self):
        self.archive = Archive()
        exd = ExperimentDescription(DetectionEntity(None))
        res1 = Result('res_0', exd, None)
        res2 = Result('res_1', exd, None)
        res3 = Result('res_2', exd, None)
        res4 = Result('res_3', exd, None)
        exp1 = Experiment('exp_0', [res1])
        exp2 = Experiment('exp_1', [res2])
        exp3 = Experiment('exp_2', [res3])
        exp4 = Experiment('exp_3', [res4])
        r1 = NewResults(exp1)
        r2 = NewResults(exp2)
        r3 = NewResults(exp3)
        r4 = NewResults(exp4)

        self.mod1 = Model('m_0', [1], [], [])
        self.mod1.ignored_results = frozenset([res3])
        self.mod1.results_covered = frozenset([res1, res4])

        self.mod2 = Model('m_1', [2], [], [])
        self.mod2.ignored_results = frozenset([res3])
        self.mod2.results_covered = frozenset([res1, res2, res4])

        m = AdditionalModels([self.mod1, self.mod2])

        self.archive.record(r1)
        self.archive.record(m)
        self.archive.record(r2)
        self.archive.record(r3)
        self.archive.record(r4)
示例#3
0
def prepare_forsale():
    archive = Archive()
    df = archive.read_forsale_baseline()

    df = df[df['alias'].isin(['V','R'])]
    df = add_days_ago(df, 'created_date', 'days')
    df = df[df['days'] < 30]

    df['maps_url'] = df.apply(lambda x: f'https://www.google.com/maps?q={x.lat},{x.lon}', axis=1)
    df['boliga_url'] = df.apply(lambda x: f'https://www.boliga.dk/bolig/{x.estate_id}', axis=1)
    df['city'] = df.apply(lambda x: 'Lyngby' if x.city == 'Kongens Lyngby' else x.city, axis=1)
    df['list_price'] = df.apply(lambda x: fix_pricing(x.list_price), axis=1)
    df['sqm_price'] = df.apply(lambda x: fix_pricing(x.sqm_price), axis=1)

    cols = [
        'city',
        'address',
        'alias',
        'rooms',
        'living_area',
        'lot_area',
        'energy_class',
        'build_year',
        'list_price',
        'sqm_price',
        'boliga_url',
        'estate_url',
        'maps_url',
        'days'
    ]
    df = df[cols]
    df = df.sort_values(by=['city', 'days']).reset_index(drop=True)
    df = df.rename(columns={'alias':'type', 'energy_class':'energy', 'build_year':'built'})
    return df
示例#4
0
    def _import_sample(self, name, directory):
        sample_file = pjoin(Settings.SAMPLES_PATH,
                            self._args.sample + '.tar.gz')

        archive = Archive()
        archive.decompress(sample_file, self.directory, 'tarball', strip=1)
        return 0
示例#5
0
    def unpack_archive(self, replace=True):
        name = self.name

        tmp_file = tempfile.NamedTemporaryFile(suffix=os.path.splitext(name)[-1])
        archive_file = self.file
        self.open()
        shutil.copyfileobj(archive_file, tmp_file.file)
        tmp_file.file.flush()
        archive = Archive(tmp_file.name)
        temp_dir = tempfile.mkdtemp()
        archive.extract(temp_dir)

        unpack_folder = self._generate_unpack_folder(name)
        try:
            for dir, dirs, files in os.walk(temp_dir):
                for _filename in files:
                    abs_path = os.path.join(dir, _filename)
                    storage_path = os.path.join(unpack_folder, os.path.relpath(abs_path, temp_dir))
                    is_exists = self.unpack_storage.exists(storage_path)
                    if is_exists:
                        if not replace:
                            continue
                        else:
                            self.unpack_storage.delete(storage_path)

                    with open(abs_path, 'rb') as f:

                        self.unpack_storage.save(storage_path, File(f))
        finally:
            shutil.rmtree(temp_dir)
示例#6
0
	def test_calculate_max_number_activities(self):
		# model with activities, archive with results that have add activity in interventions
		a1 = mnm_repr.Activity('act1', None, ['a'], [])
		a2 = mnm_repr.Activity('act1', None, ['b'], [])
		a3 = mnm_repr.Activity('act1', None, ['c'], [])
		a4 = mnm_repr.Activity('act1', None, ['d'], [])

		base_model = mnm_repr.Model('m1', [], [a1], [])

		des1 = exp_repr.ExperimentDescription(None, [mnm_repr.Add(a2), mnm_repr.Add(a1)])
		des2 = exp_repr.ExperimentDescription(None, [mnm_repr.Add(a3)])
		des3 = exp_repr.ExperimentDescription(None, [mnm_repr.Add(a4), mnm_repr.Add(a3)])

		res1 = exp_repr.Result('r1', des1, None)
		res2 = exp_repr.Result('r2', des2, None)
		res3 = exp_repr.Result('r3', des3, None)

		exp1 = exp_repr.Experiment('exp1', [res1])
		exp2 = exp_repr.Experiment('exp2', [res2])
		exp3 = exp_repr.Experiment('exp3', [res3])

		arch = Archive()
		arch.known_results = [exp1, exp2, exp3]

		rev = RevisionModule(arch)

		out = rev.calculate_max_number_activities(base_model)

		self.assertEqual(out, 4)
示例#7
0
 def __unpack(self, archive_name) -> None:
     first = time.time()
     parent_dir = os.getcwd()
     frame_archive = Archive(archive_name)
     folder = frame_archive.name_woextension
     # You can also call this to remove a directory.
     if os.path.exists(frame_archive.name_woextension):
         for filename in os.listdir(folder):
             file_path = os.path.join(folder, filename)
             try:
                 if os.path.isfile(file_path) or os.path.islink(file_path):
                     os.unlink(file_path)
                 elif os.path.isdir(file_path):
                     shutil.rmtree(file_path)
             except Exception as e:
                 print('Failed to delete %s. Reason: %s' % (file_path, e))
     frame_archive.extract()
     os.chdir(parent_dir)
     os.remove("./%s" % frame_archive.file_name)
     print(f"{archive_name} unzipped and archived")
     frame_packet = sorted(os.listdir(f"{archive_name[:-4]}/Frames"))
     frame_packet = [
         f"{archive_name[:-4]}/Frames/{x}" for x in frame_packet
     ]
     self.new_packets.append(frame_packet)
     print(f"{time.time() - first} to unpack")
示例#8
0
def main(args):
    confs = CameraConfig(args.config_file)
    ncams = confs.n_cameras()
    cams = CameraList()
    archive = Archive(confs)
    framenos = [0] * ncams
    location = confs.data['location']
    sleep = confs.data['cameras'][0]['sleep']
    start_video = confs.data['cameras'][0]['start_video']
    stop_video = confs.data['cameras'][0]['stop_video']

    for c in confs.data['cameras']:
        camid = c['id']
        addr = c['url']
        cam = Camera(addr, camid)
        cams.append(cam)
        framenos[cam.camid] = int(archive.next_prefix(cam.camid))

    while True:
        if is_daylight(location) or args.debug:

            for i, cam in enumerate(cams):
                im = cam.grab()
                timestamp = datetime.now()

                if im is None:
                    break

                framenos[i] += 1
                archive.save_image(cam.camid, timestamp, framenos[i], im)

            time.sleep(sleep)
示例#9
0
	def test_RevCIAddB_revise_ignoring(self):
		met1 = mnm_repr.Metabolite('met1')
		met2 = mnm_repr.Metabolite('met2')
		comp1 = mnm_repr.Medium()
		cond_subst_1 = mnm_repr.PresentEntity(met1, comp1)
		cond_subst_2 = mnm_repr.PresentEntity(met2, comp1)
		a1 = mnm_repr.Reaction('act1', [], [cond_subst_1, cond_subst_2])
		a1.remove_cost = None
		a1.reversibility = False
		a2 = mnm_repr.Reaction('act1', [], [cond_subst_1])
		a2.reversibility = False
		# model to be revised
		mod = mnm_repr.Model('m_0', [], [a1], [])
		# results
		des1 = exp_repr.ExperimentDescription(exp_repr.DetectionEntity('met1'), [])
		des2 = exp_repr.ExperimentDescription(exp_repr.DetectionEntity('met2'), [])
		res1 = exp_repr.Result('res_0', des1, 'false')
		res2 = exp_repr.Result('res_1', des2, 'true')
		exp1 = exp_repr.Experiment('exp_0', [res1])
		exp2 = exp_repr.Experiment('exp_1', [res2])
		# archive with results and parts for revision
		arch = Archive()
		arch.known_results = [exp1, exp2]
		arch.mnm_activities = [a1, a2]
		arch.mnm_entities = [met1, met2]
		arch.mnm_compartments = [comp1]

		rev = RevCIAddB(arch)
		out = rev.revise(mod)
		self.assertEqual(out[0], [])
		self.assertEqual(out[1], True)
示例#10
0
 def test_function_pad_the_mask_of_sar_based_on_size_amsr(self):
     """test the correct behaviour of padding"""
     mask_sar_size = np.arange(6,12).reshape(2, 3)
     # mask_sar_size = array([[ 6,  7,  8],
     #                        [ 9, 10, 11]])
     mask_amsr = np.arange(36).reshape(6, 6)
     # mask_amsr = array([[ 0,  1,  2,  3,  4,  5],
     #                    [ 6,  7,  8,  9, 10, 11],
     #                    [12, 13, 14, 15, 16, 17],
     #                    [18, 19, 20, 21, 22, 23],
     #                    [24, 25, 26, 27, 28, 29]])
     mask_sar_size, pads = Archive.pad_the_mask_of_sar_based_on_size_amsr(mask_amsr, mask_sar_size)
     np.testing.assert_equal(mask_sar_size, np.array([[ 1,  1,  1,  1,  1,  1],
                                                      [ 1,  1,  1,  1,  1,  1],
                                                      [ 1,  6,  7,  8,  1,  1],
                                                      [ 1,  9, 10, 11,  1,  1],
                                                      [ 1,  1,  1,  1,  1,  1],
                                                      [ 1,  1,  1,  1,  1,  1]]))
     self.assertEqual(pads, (2, 2, 1, 2))
     mask_sar_size = np.arange(6,12).reshape(3, 2)
     # mask_sar_size = array([[ 6,  7],
     #                        [ 8, 9],
     #                        [10, 11]])
     mask_sar_size, pads = Archive.pad_the_mask_of_sar_based_on_size_amsr(mask_amsr, mask_sar_size)
     np.testing.assert_equal(mask_sar_size, np.array([[ 1,  1,  1,  1,  1,  1],
                                                      [ 1,  1,  6,  7,  1,  1],
                                                      [ 1,  1,  8,  9,  1,  1],
                                                      [ 1,  1, 10, 11,  1,  1],
                                                      [ 1,  1,  1,  1,  1,  1],
                                                      [ 1,  1,  1,  1,  1,  1]]))
     self.assertEqual(pads, (1, 2, 2, 2))
示例#11
0
    def test_calculate_max_number_activities(self):
        # model with activities, archive with results that have add activity in interventions
        a1 = mnm_repr.Activity('act1', None, ['a'], [])
        a2 = mnm_repr.Activity('act1', None, ['b'], [])
        a3 = mnm_repr.Activity('act1', None, ['c'], [])
        a4 = mnm_repr.Activity('act1', None, ['d'], [])

        base_model = mnm_repr.Model('m1', [], [a1], [])

        des1 = exp_repr.ExperimentDescription(
            None, [mnm_repr.Add(a2), mnm_repr.Add(a1)])
        des2 = exp_repr.ExperimentDescription(None, [mnm_repr.Add(a3)])
        des3 = exp_repr.ExperimentDescription(
            None, [mnm_repr.Add(a4), mnm_repr.Add(a3)])

        res1 = exp_repr.Result('r1', des1, None)
        res2 = exp_repr.Result('r2', des2, None)
        res3 = exp_repr.Result('r3', des3, None)

        exp1 = exp_repr.Experiment('exp1', [res1])
        exp2 = exp_repr.Experiment('exp2', [res2])
        exp3 = exp_repr.Experiment('exp3', [res3])

        arch = Archive()
        arch.known_results = [exp1, exp2, exp3]

        rev = RevisionModule(arch)

        out = rev.calculate_max_number_activities(base_model)

        self.assertEqual(out, 4)
示例#12
0
    def postInit(self):
        #
        # initialize game content
        #
        # camera
        base.camLens.setNearFar(1.0, 10000)
        base.camLens.setFov(75)

        a = 33
        base.camera.setPos(0, -a, a + 3)  #80)
        # collision
        base.cTrav = CollisionTraverser("base collision traverser")
        base.cHandler = CollisionHandlerEvent()
        base.cPusher = CollisionHandlerPusher()
        base.cQueue = CollisionHandlerQueue()
        base.globalClock = ClockObject.getGlobalClock()
        base.cHandler.addInPattern('%fn-into-%in')
        base.cHandler.addOutPattern('%fn-out-%in')
        base.cHandler.addAgainPattern('%fn-again-%in')
        # ai init
        base.AIworld = AIWorld(render)
        # 3d manager
        base.audio3d = Audio3DManager(base.sfxManagerList[0], camera)
        # manager
        self.archiveManager = ArchiveManager()
        self.mapManager = MapManager()
        self.initHeroInfo = None
        # Lock
        self.lock = threading.Lock()
        self.gameThread = None

        self.filters = CommonFilters(base.win, base.cam)
        # UI
        self.menu = Menu()
        self.option = Option()
        self.archive = Archive()
        self.death = Death()
        # self.oobe()
        #self.Archive_status = 0
        # self.menuMusic = loader.loadMusic("assets/audio/menuMusic.ogg")
        # self.menuMusic.setLoop(True)
        # self.fightMusic = loader.loadMusic("assets/audio/fightMusic.ogg")
        # self.fightMusic.setLoop(True)
        # base.audio3d = Audio3DManager(base.sfxManagerList[0], camera)

        self.titleVideo, self.titleCard = loadVideo('title.mp4')

        self.isInited = False
        self.isSkip = False
        self.isRenew = False
        #
        # Event handling
        #
        self.accept("escape", self.__escape)

        #
        # Start with the menu
        #
        self.request("Menu")
示例#13
0
def test_create_tags(test_dir, monkeypatch, tags, expected):
    """Test setting tags.
    """
    monkeypatch.chdir(test_dir)
    archive_path = Path(archive_name(tags=["tags"], counter="create_tags"))
    Archive().create(archive_path, "", [Path("base")], tags=tags)
    with Archive().open(archive_path) as archive:
        assert archive.manifest.tags == expected
示例#14
0
def player_save(data):
    speaker = data["speaker"]

    if is_player(speaker):
        a = Archive()
        a.save(speaker)
        data["speaker"].send_line("Game saved.")
    else:
        speaker.send_line("Only players can save.")
示例#15
0
class Job(object):

    Current = None

    def __init__(self, name):
        self.__name = name
        self.__layers = [ [], {} ]
        self.__archive = None
        self.__path = None

    def getPath(self):
        return self.__path

    def getName(self):
        return self.__name

    def getLayer(self, name):
        try:
            return self.__layers[1][name]
        except KeyError:
            raise LayerException("Layer %s does not exist." % name)

    def addLayer(self, layer):
        
        if self.__layers[1].has_key(layer.getName()):
            raise LayerException("Invalid layer name: %s , duplicate name." % layer)
        
        self.__layers[0].append(layer)
        self.__layers[1][layer.getName()] = layer
        layer.setJob(self)

    def getLayers(self):
        return self.__layers[0]

    def loadArchive(self):
        self.__archive = Archive(self)

    def getArchive(self):
        return self.__archive

    def putData(self, key, value):
        return self.__archive.putData(key, value)

    def setup(self):

        self.__archive = Archive(self)
        self.__path = self.__archive.getPath()

        for layer in self.__layers[0]:
            layer.setup()

        archive = self.__archive
        try:
            self.__archive = None
            archive.putData("blueprint.yaml", self)
        finally:
            self.__archive = archive
示例#16
0
 def test_function_get_unprocessed_files(self):
     """ Without having a file named 'processed_files.json' in the output folder,
     'files' attribute must be filled with what comes out of litsdir of input folder. """
     test_archive = Archive()
     test_archive.OUTPATH = ""
     with mock.patch("os.listdir", return_value=["a.nc"]):
         test_archive.get_unprocessed_files()
     self.assertEqual(test_archive.files, ['a.nc'])
     self.assertEqual(test_archive.processed_files, [])
示例#17
0
    def enter(self):
        self.password = self.PasswordLineEdit.text()

        if self.password == 'durara':
            self.archive = Archive()
            self.archive.show()
            self.close()
        else:
            self.error.setText("password isn't correct")
            self.PasswordLineEdit.setText('')
def test_create_fileinfos_generator(test_dir, monkeypatch):
    """Create the archive from FileInfo.iterpaths() which returns a generator.
    """
    monkeypatch.chdir(test_dir)
    fileinfos = FileInfo.iterpaths([Path("base")], set())
    archive_path = Path("archive-fi-generator.tar")
    Archive().create(archive_path, "", fileinfos=fileinfos)
    with Archive().open(archive_path) as archive:
        check_manifest(archive.manifest, testdata)
        archive.verify()
def test_create_fileinfos_list(test_dir, monkeypatch):
    """Create the archive from a list of FileInfo objects.
    """
    monkeypatch.chdir(test_dir)
    fileinfos = list(FileInfo.iterpaths([Path("base")], set()))
    archive_path = Path("archive-fi-list.tar")
    Archive().create(archive_path, "", fileinfos=fileinfos)
    with Archive().open(archive_path) as archive:
        check_manifest(archive.manifest, testdata)
        archive.verify()
示例#20
0
def players():
    log("SCRIPT", "Booting core server functionality")
    a = Archive()
    handler.load_handlers()
    importer.process_xml()
    log("SCRIPT", "Finished loading core functionality")

    log("SCRIPT", "Retreiving player information from database")
    players = a.list()
    log("SCRIPT", "Loaded %d player%s from database" % (len(players), '' if len(players) == 1 else 's'))
    print

    i = 0
    names = players.keys()
    names.sort()
    for p in names:
        i += 1
        print '%d: %s' % (i, p)
    print

    n = raw_input('Load player index (blank to cancel): ')
    name = None
    try:
        n = int(n)
        if n < 1 or n > len(names):
            print 'Cancelled.'
            sys.exit(0)
        name = names[n - 1]
    except (ValueError, IndexError):
        sys.exit(1)

    player = Player()
    if not a.load(name, player):
        choice = raw_input('Player could not be loaded properly.  Delete? (Y/N): ')
        if choice.upper() == 'Y':
            a.delete(name)
        sys.exit(0)

    print
    print player.name
    print player.gender, player.race
    for stat, value in player.stats.items():
        print ' %s: %d' % (stat, value)
    print
    action = raw_input('Action ([p]assword, [d]elete), [c]ancel): ')
    if action == '':
        sys.exit(0)
    elif 'password'.startswith(action.lower()):
        player.password = encrypt_password(raw_input('New password: '******'Password written.'
    elif 'delete'.startswith(action.lower()):
        confirm = raw_input('Really delete? (Y/N): ')
        if confirm.upper() == 'Y':
            a.delete(name)
            print 'Deletion complete.'
        else:
            print 'Deletion cancelled.'
    else:
        print 'Cancelled.'
示例#21
0
def test_dir(tmpdir):
    with tmp_chdir(tmpdir):
        rel_paths = archive_paths(Path(""), False)
        abs_paths = archive_paths(Path(""), True)
        for i, data in enumerate(testdata):
            base = next(filter(lambda e: e.type == 'd', data)).path
            setup_testdata(tmpdir, data)
            Archive().create(rel_paths[i], "bz2", [base])
            Archive().create(abs_paths[i], "bz2", [tmpdir / base])
            shutil.rmtree(base)
    return tmpdir
示例#22
0
def test_create_default_basedir_rel(test_dir, monkeypatch):
    """Check the default basedir with relative paths.  (Issue #8)
    """
    monkeypatch.chdir(test_dir)
    archive_path = Path("archive-rel.tar")
    p = Path("base", "data")
    Archive().create(archive_path, "", [p])
    with Archive().open(archive_path) as archive:
        assert archive.basedir == Path("base")
        check_manifest(archive.manifest, testdata)
        archive.verify()
def test_create_fileinfos_manifest(test_dir, monkeypatch):
    """Create the archive from a Manifest.
    A Manifest is an iterable of FileInfo objects.
    """
    monkeypatch.chdir(test_dir)
    manifest = Manifest(paths=[Path("base")])
    archive_path = Path("archive-fi-manifest.tar")
    Archive().create(archive_path, "", fileinfos=manifest)
    with Archive().open(archive_path) as archive:
        check_manifest(archive.manifest, testdata)
        archive.verify()
示例#24
0
def test_create_add_symlink(test_dir, monkeypatch):
    """Check adding explicitly adding a symbolic link.  (Issue #37)
    """
    monkeypatch.chdir(test_dir)
    archive_path = Path("archive-symlink.tar")
    paths = [Path("base", "data", "misc"), Path("base", "data", "s.dat")]
    data = [ testdata[i] for i in (1,3,4) ]
    Archive().create(archive_path, "", paths)
    with Archive().open(archive_path) as archive:
        check_manifest(archive.manifest, data)
        archive.verify()
示例#25
0
def test_create_default_basedir_abs(test_dir, monkeypatch):
    """Check the default basedir with absolute paths.  (Issue #8)
    """
    monkeypatch.chdir(test_dir)
    archive_path = Path("archive-abs.tar")
    p = test_dir / Path("base", "data")
    Archive().create(archive_path, "", [p])
    with Archive().open(archive_path) as archive:
        assert archive.basedir == Path("archive-abs")
        check_manifest(archive.manifest, testdata, prefix_dir=test_dir)
        archive.verify()
示例#26
0
def getmsgs():
    """Yield a couple of test mails along with appropriate folder names.
    """
    mails = Archive().open(testdata)
    idx = yaml.safe_load(mails.get_metadata(".index.yaml").fileobj)
    for folder in sorted(idx.keys()):
        for msg_path in idx[folder]:
            msgbytes = mails._file.extractfile(msg_path).read()
            msg = email.message_from_bytes(msgbytes)
            testmails.append((folder, msg))
            yield (folder, msgbytes)
示例#27
0
def test_create_exclude_file(test_dir, testname, monkeypatch):
    """Exclude one single file.
    """
    monkeypatch.chdir(str(test_dir))
    name = archive_name(tags=[testname])
    paths = [Path("base")]
    excludes = [Path("base", "msg.txt")]
    data = sub_testdata(testdata, excludes[0])
    Archive().create(Path(name), "", paths, excludes=excludes)
    with Archive().open(Path(name)) as archive:
        check_manifest(archive.manifest, data)
        archive.verify()
示例#28
0
 def test_function_calculate_mask(self, mock_full, mock_down, mock_or, mock_pad, mock_ams, mock_sar):
     """test the calling of methods correctly in this function"""
     fil = mock.Mock()
     test_archive = Archive()
     test_archive.apply_instead_of_training = True
     test_archive.calculate_mask(fil)
     mock_ams.assert_called()
     mock_sar.assert_called()
     mock_or.assert_called_with('rp1', 'aout0')
     mock_pad.assert_called_with('aout0', 'sarout')
     mock_down.assert_called_with(np.array([[1]]), 'aout1', 'aout2')
     mock_full.assert_called()
示例#29
0
def test_create_exclude_samelevel(test_dir, testname, monkeypatch):
    """Exclude a directory explictely named in paths.
    """
    monkeypatch.chdir(test_dir)
    name = archive_name(tags=[testname])
    paths = [Path("base", "data"), Path("base", "empty")]
    excludes = [paths[1]]
    data = sub_testdata(testdata, Path("base"), paths[0])
    Archive().create(Path(name), "", paths, excludes=excludes)
    with Archive().open(Path(name)) as archive:
        check_manifest(archive.manifest, data)
        archive.verify()
示例#30
0
def test_create_exclude_subdir(test_dir, testname, monkeypatch):
    """Exclude a subdirectory.
    """
    monkeypatch.chdir(test_dir)
    name = archive_name(tags=[testname])
    paths = [Path("base")]
    excludes = [Path("base", "data")]
    data = sub_testdata(testdata, excludes[0])
    Archive().create(Path(name), "", paths, excludes=excludes)
    with Archive().open(Path(name)) as archive:
        check_manifest(archive.manifest, data)
        archive.verify()
示例#31
0
def test_create_long_directory_name(tmpdir, monkeypatch):
    """An archive containing a directory with a long path name.

    Verification fails if the archive is created in the GNU tar format.
    """
    setup_testdata(tmpdir, testdata_long_dir)
    monkeypatch.chdir(tmpdir)
    archive_path = Path("archive-longdir.tar")
    Archive().create(archive_path, "", [long_dir_path])
    with Archive().open(archive_path) as archive:
        check_manifest(archive.manifest, testdata_long_dir)
        archive.verify()
示例#32
0
def test_create_exclude_explicit_include(test_dir, testname, monkeypatch):
    """Exclude a directory, but explicitely include an item in that
    directory.
    """
    monkeypatch.chdir(test_dir)
    name = archive_name(tags=[testname])
    paths = [Path("base"), Path("base", "data", "rnd1.dat")]
    excludes = [Path("base", "data")]
    data = sub_testdata(testdata, excludes[0], paths[1])
    Archive().create(Path(name), "", paths, excludes=excludes)
    with Archive().open(Path(name)) as archive:
        check_manifest(archive.manifest, data)
        archive.verify()
def test_create_fileinfos_subset(test_dir, monkeypatch):
    """Do not include the content of a directory.
    This test verifies that creating an archive from fileinfos does
    not implicitly descend subdirectories.
    """
    monkeypatch.chdir(test_dir)
    excludes = [Path("base", "data", "rnd.dat")]
    fileinfos = FileInfo.iterpaths([Path("base")], set(excludes))
    data = sub_testdata(testdata, excludes[0])
    archive_path = Path("archive-fi-subset.tar")
    Archive().create(archive_path, "", fileinfos=fileinfos)
    with Archive().open(archive_path) as archive:
        check_manifest(archive.manifest, data)
        archive.verify()
示例#34
0
def test_create_invalid_file_socket(test_dir, testname, monkeypatch):
    """Create an archive from a directory containing a socket.
    """
    monkeypatch.chdir(str(test_dir))
    name = archive_name(tags=[testname])
    p = Path("base")
    fp = p / "socket"
    with tmp_socket(fp):
        with pytest.warns(ArchiveWarning, match="%s: socket ignored" % fp):
            Archive().create(name, "", [p])
    with Archive().open(name) as archive:
        assert archive.basedir == Path("base")
        check_manifest(archive.manifest, testdata)
        archive.verify()
示例#35
0
def test_create_invalid_file_fifo(test_dir, testname, monkeypatch):
    """Create an archive from a directory containing a FIFO.
    """
    monkeypatch.chdir(test_dir)
    archive_path = Path(archive_name(tags=[testname]))
    p = Path("base")
    fp = p / "fifo"
    with tmp_fifo(fp):
        with pytest.warns(ArchiveWarning, match="%s: FIFO ignored" % fp):
            Archive().create(archive_path, "", [p])
    with Archive().open(archive_path) as archive:
        assert archive.basedir == Path("base")
        check_manifest(archive.manifest, testdata)
        archive.verify()
示例#36
0
class AWSOutput(IOutput):
    """AWS client for uploading packages."""
    def __init__(self, config):
        self.attempts = config.get("attempts", 3)
        self.timeout = config.get("timeout", 50)
        self.archiver = Archive(config["id"], config["secret"], config["url"],
                                config["bucket"], config["prefix"])

    def push(self, data):
        archive_name = "%s/%s.json.xz" % (data["schema"],
                                          data["data"]["timestamp"])
        # Put single message into an array for backward compatibility
        # when the source of ingest was from archives of messages
        # from Kafka identified by offsets.
        self.archiver.save(archive_name, [data])
示例#37
0
	def setUp(self):
		self.archive = Archive()
		exd = ExperimentDescription(DetectionEntity(None))
		res1 = Result('res_0', exd, None)
		res2 = Result('res_1', exd, None)
		res3 = Result('res_2', exd, None)
		res4 = Result('res_3', exd, None)
		exp1 = Experiment('exp_0', [res1])
		exp2 = Experiment('exp_1', [res2])
		exp3 = Experiment('exp_2', [res3])
		exp4 = Experiment('exp_3', [res4])
		r1 = NewResults(exp1)
		r2 = NewResults(exp2)
		r3 = NewResults(exp3)
		r4 = NewResults(exp4)

		self.mod1 = Model('m_0', [1], [], [])
		self.mod1.ignored_results = frozenset([res3])
		self.mod1.results_covered = frozenset([res1, res4])

		self.mod2 = Model('m_1', [2], [], [])
		self.mod2.ignored_results = frozenset([res3])
		self.mod2.results_covered = frozenset([res1, res2, res4])

		m = AdditionalModels([self.mod1, self.mod2])

		self.archive.record(r1)
		self.archive.record(m)
		self.archive.record(r2)
		self.archive.record(r3)
		self.archive.record(r4)
示例#38
0
def detect_deleted_file(sys, node_name, node_path, dir, filename):
	
	archive = Archive.from_sig_path(os.path.join(dir, filename), node_name, node_path, sys)
	
	if not archive.original_present:
		sys.delete_file(archive)
	else:
		pass
示例#39
0
    def handle_close(self):
        # Shunt output to parent (avoids recursion in simultaneous logouts)
        self.parent.send = lambda s: None

        if self.parent.location:
            libsigma.report(libsigma.ROOM, "$actor has left the game.", self.parent)
            self.parent.location.characters.remove(self.parent)

        w = World()
        if self.parent in w.players:
            a = Archive()
            a.save(self.parent)
            w.players.remove(self.parent)

        log("NETWORK", "Client at %s closed connection" % self.addr[0])
        self.parent.socket = None
        self.close()
示例#40
0
文件: imports.py 项目: qnap-dev/qdk2
    def _import_archive(self, filename, directory):
        if filename.startswith('http://') \
                or filename.startswith('https://') \
                or filename.startswith('ftp://'):
            download_file = self._download(filename)
            filename = download_file

        if filename is None:
            return -1

        try:
            archive = Archive()
            ftype = archive.file_type(filename)
            if ftype is None:
                error('Invalid archive format: ' + filename)
                return -1
            archive.decompress(filename, directory, ftype)
        finally:
            if download_file is not None:
                rmtree(pdirname(download_file))
        return 0
示例#41
0
	def test_check_consistency_negative(self):
		met1 = mnm_repr.Metabolite('met1')
		met2 = mnm_repr.Metabolite('met2')
		comp1 = mnm_repr.Medium()
		cond_subst_1 = mnm_repr.PresentEntity(met1, comp1)
		cond_subst_2 = mnm_repr.PresentEntity(met2, comp1)
		base_model = mnm_repr.Model('m1', [cond_subst_1, cond_subst_2], [], [])
		exd = exp_repr.ExperimentDescription(exp_repr.DetectionEntity('met1'), [])
		res = exp_repr.Result('r1', exd, 'false')
		exp = exp_repr.Experiment('exp1', [res])

		arch = Archive()
		arch.mnm_entities = [met1, met2]
		arch.mnm_compartments = [comp1]
		ev1 = AdditionalModels([base_model])
		ev2 = AcceptedResults(exp)
		arch.record(ev1)
		arch.record(ev2)

		rev = RevisionModule(arch)
		out = rev.check_consistency(base_model)
		self.assertEqual(False, out)
示例#42
0
    def setup(self):

        self.__archive = Archive(self)
        self.__path = self.__archive.getPath()

        for layer in self.__layers[0]:
            layer.setup()

        archive = self.__archive
        try:
            self.__archive = None
            archive.putData("blueprint.yaml", self)
        finally:
            self.__archive = archive
示例#43
0
    def __init__(self, dir):
        import os
        import os.path
        index_dir_name = 'index'
        archive_file_name = 'documents'
        self.dir = dir

        if not os.path.exists(self.dir):
            os.mkdir(self.dir)

        self.index = DocumentDB(os.path.join(dir, index_dir_name))
        self.index.setup()

        self.archive = Archive(os.path.join(dir, archive_file_name))

        self.doc_map = {}
示例#44
0
	def setUp(self):
		self.archive = Archive()
示例#45
0
 def loadArchive(self):
     self.__archive = Archive(self)
示例#46
0
class Repository:
    def __init__(self, dir):
        import os
        import os.path
        index_dir_name = 'index'
        archive_file_name = 'documents'
        self.dir = dir

        if not os.path.exists(self.dir):
            os.mkdir(self.dir)

        self.index = DocumentDB(os.path.join(dir, index_dir_name))
        self.index.setup()

        self.archive = Archive(os.path.join(dir, archive_file_name))

        self.doc_map = {}

    def file_path(self, repofile):
        return self.archive.token_path(repofile.token)

    """
    Adds a document consisting of the arguments as data and returns it.
    The filenames should be strings, they will be stored in the document
    as the appropriate references (not strings).
    """
    def add_document(self, sender, recipient, date, keywords, files):
        assert isinstance(keywords, unicode)

        doc = self._create_document(sender, recipient, date, keywords, files)
        self.index.add(doc)

        return doc

    def remove_file(self, doc, file):
        doc.remove_file(file)
        self.index.update_doc(doc)
        self.archive.remove_file(file)

    def remove_document(self, doc):
        self.archive.remove_document(doc)
        self.index.remove(doc)

    def has_document_uuid(self, uuid):
        return self.index.has_uuid(uuid)

    def search_keywords(self, keywords):
        matches = self.index.search_keywords(keywords)
        docs = []

        for match in matches:
            doc = Document()
            for key in match.keys():
                setattr(doc, key, match[key])
            docs.append(doc)

        return docs

    """
    Creates a document consisting of the arguments, and enters
    the files into the archive.
    """
    def _create_document(self, sender, recipient, date, keywords, files):
        doc = Document()

        self.archive.add_document(doc)
        
        def create_repo_file(filename):
            import os.path
            token = ArchiveToken(filename, doc.uuid)
            file_type = os.path.splitext(filename)
            return RepoFile(token, file_type)

        for file in files:
            repo_file = create_repo_file(file)

            doc.add_file(repo_file)
            self.archive.add_file(file, repo_file.token)

        doc.sender = sender
        doc.recipient = recipient
        doc.date = date
        doc.keywords = keywords

        return doc
示例#47
0
	def getArchive(self,start,end):
		self.arch = Archive(start,end,self.scrapePattern,self.title)
		self.arch.getArticles()
示例#48
0
 def clear_archive(self):
     a = Archive()
     players = a.list()
     for p in players.keys():
         a.delete(p)
示例#49
0
文件: init.py 项目: wtbhk/DDD_console
print 'Loading...'

if not os.path.exists('./DDD') : os.makedirs('./DDD')
d = Download()
config = open('./config.txt')
blogger = []
while True:
	line = config.readline()
	if not line:
		break
	blogger.append(line[:-1])

for i in blogger:
	if not os.path.exists('./DDD/'+i) : os.makedirs('./DDD/'+i)
	all_ids = os.listdir('./DDD/' + i)
	a = Archive('http://' + i + '.diandian.com/archive')
	d.reg_callback(print_)
	archives = a.getAllArchive()
	for j in archives:
		for k in archives[j]:
			print k
			try:
				if re.search(r'post\/.*\/(.*)', k).group(1) not in all_ids:
					d.add(k)
			except:
				print 'err:'+k


d.start()

示例#50
0
文件: pss.py 项目: marcioweck/PSSLib
def main():

    benchmark = pycec2013.Benchmark(17)

    lbounds = tuple(benchmark.get_lbounds())
    ubounds = tuple(benchmark.get_ubounds())
    min_ = min(lbounds)
    max_ = max(ubounds)

    toolbox = base.Toolbox()
    toolbox.register("generate", generate, creator.Individual)
    toolbox.register("update", map, updateHive)
    toolbox.register("feval", map, benchmark.evaluate)
    # toolbox.register("fdist", nearest_better_tree)
    toolbox.register("hive", generateHive, cma.Strategy)
    toolbox.register("bounds", ensure_bounds(lbounds, ubounds))
    toolbox.decorate("generate", toolbox.bounds)

    dim = benchmark.ndim
    nmin = benchmark.ngoptima
    leftfes = benchmark.max_fes
    ngoptima = 0
    max_ngoptima = benchmark.ngoptima

    def similarity_func(a, b):
        if np.isnan(np.sum(a)) or np.isnan(np.sum(b)):
            pdb.set_trace()

        d = euclidean(a, b)
        return d < 0.06

    hof = Archive(max_ngoptima, similarity_func)

    distribs = [stats.uniform for i in range(dim)]

    samples = sampler(distribs, (min_, max_), dim, 20*dim)

    #samples = np.loadtxt("/home/weckwar/inputs.txt", delimiter=',', ndmin=2)

    seeds, _ = nbcdm.raw_data_seeds_sel(benchmark.evaluate, samples, 20, useDM=True, maskmode='NEA1')
    # xpoints = np.array([x for x,c,_ in seeds])
    # np.savetxt("/home/weckwar/inputs.txt", xpoints, delimiter=',')
    #plotseeds(benchmark.evaluate, min_, max_, dim, samples=xpoints)
    #return

    hives = list()
    population = list()
    norm = float(np.sqrt(dim))
    for (xstart, c, (f1, f2)) in seeds:
        ind = creator.Individual(xstart)
        ind.fitness.values = (f1, f2)
        population.append(ind)
        hives.append(toolbox.hive((ind, c/norm)))

    verbose = True

    logbook = tools.Logbook()
    logbook.header = "gen", "nswarm", "ngoptima", "muerror", "dispersion"

    generation = 0
    logbook.record(gen=generation, nswarm=len(hives), ngoptima=ngoptima,
                   muerror=averageFitnessQC(population),
                   dispersion=solowPolaskyQC(population, 1.0/dim))

    while leftfes > 0  and ngoptima < max_ngoptima:

        swarms = toolbox.generate(hives)

        blob = list(chain(*swarms))
        D = squareform(pdist(blob))
        fitnesses = toolbox.feval(blob)

        nelem = len(swarms[0])
        for i, swarm in enumerate(swarms):
            k = i*nelem
            nbidx = np.arange(k, k+nelem)
            for j, ind in enumerate(swarm):
                D[k+j,nbidx] = np.inf
                sortedline = np.argsort(D[k+j,:])
                bestidx = next((l for l in sortedline
                                if fitnesses[l] > fitnesses[k+j]), -1)

                ind.fitness.values = (fitnesses[k+j], D[k+j, bestidx])

        checks = toolbox.update(hives, swarms)

        nextgen = [hives[i] for i, ok in enumerate(checks) if ok]

        xstarts = [creator.Centroid(x.centroid) for x in nextgen]
        cfit = toolbox.feval(xstarts)
        for x, fit in izip(xstarts, cfit):
            x.fitness.values = (fit,)

        uniques = list(remove_overlaps(benchmark.evaluate, xstarts, nextgen))
        hives = [nextgen[i] for i in uniques]
        xstarts = [xstarts[i]  for i in uniques]

        hof.update(xstarts)
        hfit = [x.fitness.values[0] for x in hof]

        ngoptima = benchmark.count_goptima(hof, hfit, 1e-5)

        if len(hives) < 2:
            samples = sampler(distribs, (min_, max_), dim, 2.*dim)

            seeds, _ = nbcdm.raw_data_seeds_sel(benchmark.evaluate, samples, 10.)

            for (xstart, c, (f1, f2)) in seeds:
                ind = creator.Individual(xstart)
                ind.fitness.values = (f1, f2)
                hives.append(toolbox.hive((ind, 0.5*c/norm)))

            leftfes -= len(samples)

        leftfes -= len(swarms)*nelem + len(xstarts)

        generation += 1
        logbook.record(gen=generation, nswarm=len(hives), ngoptima=ngoptima,
                   muerror=0,#averageFitnessQC(xstarts),
                   dispersion=0)#solowPolaskyQC(xstarts, 1.0/dim))
        print logbook.stream

    print "Used FEs: {0}".format(benchmark.max_fes - leftfes)
    print ngoptima
    for ind in hof:
        print "x: {0} -> {1}".format(ind, ind.fitness.values[0])
    plotseeds(benchmark.evaluate, min_, max_, dim, samples=hof)
示例#51
0
class QualityModuleTest(unittest.TestCase):
	def setUp(self):
		self.archive = Archive()
		exd = ExperimentDescription(DetectionEntity(None))
		res1 = Result('res_0', exd, None)
		res2 = Result('res_1', exd, None)
		res3 = Result('res_2', exd, None)
		res4 = Result('res_3', exd, None)
		exp1 = Experiment('exp_0', [res1])
		exp2 = Experiment('exp_1', [res2])
		exp3 = Experiment('exp_2', [res3])
		exp4 = Experiment('exp_3', [res4])
		r1 = NewResults(exp1)
		r2 = NewResults(exp2)
		r3 = NewResults(exp3)
		r4 = NewResults(exp4)

		self.mod1 = Model('m_0', [1], [], [])
		self.mod1.ignored_results = frozenset([res3])
		self.mod1.results_covered = frozenset([res1, res4])

		self.mod2 = Model('m_1', [2], [], [])
		self.mod2.ignored_results = frozenset([res3])
		self.mod2.results_covered = frozenset([res1, res2, res4])

		m = AdditionalModels([self.mod1, self.mod2])

		self.archive.record(r1)
		self.archive.record(m)
		self.archive.record(r2)
		self.archive.record(r3)
		self.archive.record(r4)

	def tearDown(self):
		self.archive = None
		self.mod1 = None
		self.mod2 = None

	def test_AllCovered(self):
		q = AllCovered(self.archive)
		q.check_and_update_qualities()
		self.assertEqual(self.mod1.quality, 2)
		self.assertEqual(self.mod2.quality, 3)

	def test_AllCoveredMinusIgnored(self):
		q = AllCoveredMinusIgnored(self.archive)
		q.check_and_update_qualities()
		self.assertEqual(self.mod1.quality, 1)
		self.assertEqual(self.mod2.quality, 2)

	def test_NewCovered(self):
		q = NewCovered(self.archive)
		q.check_and_update_qualities()
		self.assertEqual(self.mod1.quality, 1)
		self.assertEqual(self.mod2.quality, 2)

	def test_NewCoveredMinusIgnored(self):
		q = NewCoveredMinusIgnored(self.archive)
		q.check_and_update_qualities()
		self.assertEqual(self.mod1.quality, 1)
		self.assertEqual(self.mod2.quality, 2)
示例#52
0
	def system_configuration_generator(self, case, first_suffix):
		for qual in [AllCoveredMinusIgnored]: # NewCoveredMinusIgnored
			for rev in [RevCIAddR]: # RevCIAddB
				for threshold_addit_mods in [4]: #2, , 8
					for stop_threshold in [8]: #2, 4, 

						suffix = 'conf%s_%s' % (self.get_suffix((qual, rev, threshold_addit_mods, stop_threshold)), first_suffix)

						archive_ = Archive()
						archive_.mnm_compartments = self.compartments # added access to all compartments
						archive_.model_of_ref = case['model_of_ref']

						# recording entities with proper IDs: base versions and derived versions
						# these entities were involved in producing new versions and are handled below, not here
						entities_to_skip = list(case['ents_base_and_derived'].keys())
						for list_of_ents in case['ents_base_and_derived'].values():
							entities_to_skip.extend(list_of_ents)

						for ent in case['all_entities']:
							if ent in entities_to_skip:
								continue
							# not-skipped activities:
							ent.ID = archive_.get_new_ent_id()
							archive_.mnm_entities.append(ent)

						for ent in case['ents_base_and_derived'].keys():
							derv_ents = case['ents_base_and_derived'][ent]# need to copy this now, dictionary stops working after ID change
							ent.ID = archive_.get_new_ent_id()
							archive_.mnm_entities.append(ent)
							for derv_ent in derv_ents:
								derv_ent.ID = ent.ID
								archive_.mnm_entities.append(derv_ent)

						for act in case['all_activities']:
							act.ID = archive_.get_new_act_id()
							archive_.mnm_activities.append(act)

						for act in case['add_import_activities']:
							act.ID = archive_.get_new_act_id()
							archive_.import_activities.append(act)
		
						archive_.record(InitialModels(case['initial_models']))

						qual_m = qual(archive_)
						rev_m = rev(archive_, sfx=suffix)

						cost_model = CostModel(case['all_entities'],
							self.compartments, case['all_activities'],
							case['model_of_ref'].setup_conditions,
							case['add_import_activities'])
						cost_model.set_all_basic_costs_to_1()
						cost_model.calculate_derived_costs(case['all_activities'])
						cost_model.remove_None_valued_elements()

						exp_m = BasicExpModuleWithCosts(archive_, cost_model, sfx=suffix) # !!!!! switched from no costs
						oracle_ = Oracle(archive_, case['entities_ref'],
							case['activities_ref'], case['model_of_ref'],
							case['all_entities'], self.compartments,
							case['all_activities'], sfx=suffix)

						max_numb_cycles = 1000 # 
						max_time = 4 # 

						yield OverseerWithModQuality(archive_, rev_m, exp_m,
							oracle_, threshold_addit_mods, qual_m, max_numb_cycles,
							max_time, suffix, stop_threshold)
示例#53
0
	def __init__(self, job, db):
		self.job = job
		self.db = db
		
		self.archive = Archive()
		self.crypt = Crypt()
示例#54
0
class Backup:
	"""Handles complete backup flow. Check if file exists orhas been modified.
	Compresses, encrypts and uploads the file to th destination."""

	db = None
	job = None
	compress = None
	encrypt = None
	
	def __init__(self, job, db):
		self.job = job
		self.db = db
		
		self.archive = Archive()
		self.crypt = Crypt()
	
	def backup(self):
		"""Start backup process."""
		
		if not isdir(self.job['path']):
			self._list_files(
				None, dirname(self.job['path']), [basename(self.job['path']), ]
			)
		else:
			walk(self.job['path'], self._list_files, None)
		
	def _list_files(self, dir, basepath, files):
		"""Callback for walker. Iterates over filelist, builds absolute path
		and checks wheather to skip or upload the file."""
		
		for file in files:
			# absolute path
			path = join(basepath, file)
			
			# only work on files
			if isfile(path)	or (
				not isdir(path) and self.job.has_key('pre_command')):
				
				item = self.db.get_file(self.job['name'], path)
				
				# file is not in db
				if not item:
					self._backup_file(path)
				else:
					# file exists in db, but has a different mtime
					if isfile(path):
						mtime = getmtime(path)
						
						if int(item['mtime']) != int(mtime):
							self._backup_file(path)
					else:
						self._backup_file(path)
						
	def _execute_command(self, command):
		"""Execute pre- or postcommand."""
		if self.job.has_key(command):
			try:
				logging.info('[%s] Executing %s' 
					% (self.job['name'], self.job[command]))
				system(self.job[command])
			except:
				logging.warn('[%s] Command failed %s' 
					% (self.job['name'], self.job[command]))

	def _backup_file(self, path):
		"""Back ups specific file to desired storage device."""
		
		print('[%s] Starting backup for %s' % (self.job['name'], path))
		
		# precommand
		self._execute_command('pre_command')
		
		# get size, mtime
		file_info = self._file_info(path)
		
		# get storeage wrapper
		storage = self._get_account(self.job['destination'])
		dest = join(tempfile.gettempdir(), 'umaticssync')
		
		# is compression deired? bzip2 file
		if self.job.has_key('compress') and self.job['compress'] == 'true':
			logging.info('[%s] Compressing %s' % (self.job['name'], path))
			self.archive.compress(path, dest)
			old_dest = dest
			compressed = 1
		
		# is encryption desired? encrypt with user id
		if self.job.has_key('encrypt') and self.job['encrypt']:
			logging.info('[%s] Encrypting %s' % (self.job['name'], path))
			self.crypt.encrypt(self.job['encrypt'], dest)
			dest = dest + '.gpg'
			remove(old_dest)
			encrypted = 1
		
		# add file/increase revision
		info = self.db.add_file(
			self.job['name'], path, file_info['mtime'], file_info['size'], 
			encrypted, compressed)
		
		# build key and upload, cleanup
		key = normpath('%s/%s.r%%s' % (self.job['name'], path))
		logging.info('[%s] Uploading %s.r%s' 
			% (self.job['name'], path, info['revision']))
		storage.store_file(key % info['revision'], dest)
		remove(dest)
		
		# cleanup old revisions
		revision = int(info['revision']) - int(self.job['revisions'])
		if revision >= 0:
			print "del", key % revision
			storage.del_file(key % revision)
			
		# postcommand
		self._execute_command('post_command')
		
	def _file_info(self, path):
		"""Returns size and mtime."""
		return {'size': getsize(path), 'mtime': getmtime(path)}
		
	def _get_account(self, uri):
		"""Return storage engine object based on the provided URI string."""
		uri = urlsplit(uri)
		
		# s3 backend
		if uri[0] == 's3':
			a_key, s_key = uri[2][2:].split('@')[0].split(':')
			bucket = uri[2][2:].split('@')[1]
			
			from wrapper.S3Wrapper import S3Wrapper
			return S3Wrapper(a_key, s_key, bucket)
		# ftp server
		elif uri[0] == 'ftp':
			user, passwd = uri[2][2:].split('@')[0].split(':')
			host = uri[2][2:].split('@')[1]
			path = uri[2]
			
			from wrapper.FTPWrapper import FTPWrapper
			return FTPWrapper(host, user, passwd, path)
		# @todo: implement
		elif uri[0] == 'scp':
			pass
		# local storage backend
		elif uri[0] == 'file':
			path = uri[1]
			
			from wrapper.FileWrapper import FileWrapper
			return FileWrapper(path)

	def restore(self, revision):
		files = self.db.get_files(self.job['name'])
		
		if len(files) == 0:
			#logging.info('[%s] No files found for backup job')
			return False
		
		# get storage instance
		storage = self._get_account(self.job['destination'])
		
		# iterate thur files
		for file in files:
			try:
				# is given revision in allowed range?
				rev_diff = int(file['revision']) - int(self.job['revisions'])
				if int(revision) in range(rev_diff, file['revision'] + 1):
					rev = revision
				else:
					# fallback to latest file revision
					rev = file['revision']
			except:
				rev = file['revision']
			
			logging.info('[%s] Restoring %s.r%s' 
				% (self.job['name'], file['path'], rev))
			
			# get file
			key = normpath('%s/%s.r%s' % (self.job['name'], file['path'], rev))
			dest = join(tempfile.gettempdir(), 'umaticssync')
			
			logging.info('[%s] Downloading %s' 
				% (self.job['name'], file['path']))
			storage.get_file(key, dest)
			
			if file['encrypted'] == 1:
				logging.info('[%s] Decrypting %s' 
					% (self.job['name'], file['path']))
				self.crypt.decrypt(self.job['encrypt'], dest, dest)
			
			if file['compressed'] == 1:
				logging.info('[%S] Extracting %s' 
					% (self.job['name'], file['path']))
				self.archive.extract(dest)
			else:
				rename(dest, file['path'])
示例#55
0
def process_commands():
    while len(command_queue) > 0:
        speaker, message = command_queue.pop(0)
        prompt = True

        if speaker.state == STATE_NAME:
            if message.lower() == "new":
                speaker.state = STATE_CONFIG_NAME
            else:
                a = Archive()
                name = message.strip()
                if a.load(name, speaker):
                    speaker.state = STATE_PASSWORD
                else:
                    speaker.send_line("I do not know that name.", 2)

        elif speaker.state == STATE_PASSWORD:
            password = encrypt_password(message)

            if password == speaker.password:
                # Do a dupe check to ensure no double logins
                # before entering STATE_PLAYING
                w = World()
                dupe = False
                for p in w.players:
                    if p.name == speaker.name:
                        dupe = True
                        speaker.send_line("That name is already active.", 2)
                        speaker.reset()
                        speaker.state = STATE_NAME
                        break

                if not dupe:
                    log("LOGIN", "User <%s> logged in at %s" % (speaker.name, time_string()))
                    speaker.state = STATE_PLAYING

                    # Add player to master players list
                    w.players.append(speaker)

                    # Insert player into default start room and "look"
                    libsigma.enter_room(speaker, w.rooms[options["default_start"]])
                    libsigma.report(libsigma.ROOM, "$actor has entered the game.", speaker)
                    speaker.send_line("", 2)
                    libsigma.queue_command(speaker, "look")
                    prompt = False

            else:
                speaker.send_line("Incorrect password.", 2)
                speaker.reset()
                speaker.state = STATE_NAME

        elif speaker.state == STATE_CONFIG_NAME:
            name = message.strip()
            a = Archive()
            if a.find(name):
                speaker.send_line("That name is already taken. Please choose another.", breaks=2)
            elif not valid_name(name):
                speaker.send_line("You cannot go by that name here.", breaks=2)
            else:
                speaker.name=name
                speaker.state=STATE_CONFIG_PASSWORD

        elif speaker.state == STATE_CONFIG_PASSWORD:
            if not valid_password(message):
                speaker.send_line("Please make your password at least five simple characters.", breaks=2)
            else:
                speaker.password = encrypt_password(message)
                speaker.state=STATE_CONFIG_CHAR
                creation.send_options(speaker)

        elif speaker.state == STATE_CONFIG_CHAR:
            if not creation.check_choice(speaker, message.lstrip()):
                speaker.send_line("Please make a valid choice.")
            if creation.is_configured(speaker):
                for stat in stats:
                    if speaker.stats[stat]==DEFAULT_STAT:
                        speaker.stats[stat]=3
                libsigma.add_points(speaker,5)

                speaker.state = STATE_PLAYING
                w = World()
                w.players.append(speaker)
                libsigma.enter_room(speaker, w.rooms[options["default_start"]])
                libsigma.report(libsigma.ROOM, "$actor has entered the game.", speaker)
                speaker.send_line("", 2)
                libsigma.queue_command(speaker, "look")
                speaker.HP=speaker.max_HP
            else:
                creation.send_options(speaker)

        elif speaker.state == STATE_PLAYING:
            if not run_command(speaker, message):
                speaker.send_line("What?")

        if speaker.socket and prompt:
            speaker.send_prompt()
示例#56
0
class ArchiveTest(unittest.TestCase):
	def setUp(self):
		self.archive = Archive()

	def test_record_ChosenExperiment(self):
		event = archive.ChosenExperiment('exp')
		self.archive.record(event)
		self.assertIn(event, self.archive.development_history)

	def test_record_Results(self):
		res = exp_repr.Experiment('res')
		event = archive.AcceptedResults(res)
		self.archive.record(event)
		self.assertIn(event, self.archive.development_history)
		self.assertIn(res, self.archive.known_results)

	def test_record_RefutedModels(self):
		mod = 'mod'
		self.archive.working_models.update([mod])
		event = archive.RefutedModels([mod])
		self.archive.record(event)
		self.assertIn(event, self.archive.development_history)
		self.assertNotIn(mod, self.archive.working_models)

	def test_record_RevisedModel(self):
		old = mnm_repr.Model(None, [1], [], [])
		new = mnm_repr.Model(None, [2], [], [])
		event = archive.RevisedModel(old, [new])
		self.archive.record(event)
		self.assertIn(event, self.archive.development_history)
		self.assertIn(new, self.archive.working_models)

	def test_record_AdditionalModels(self):
		mod1 = mnm_repr.Model(None, [1], [], [])
		mod2 = mnm_repr.Model(None, [2], [], [])
		event = archive.AdditionalModels([mod1,mod2])
		self.archive.record(event)
		self.assertIn(event, self.archive.development_history)
		self.assertIn(mod1, self.archive.working_models)
		self.assertIn(mod2, self.archive.working_models)

	def test_record_UpdatedModelQuality(self):
		event = archive.UpdatedModelQuality('mod', 1)
		self.archive.record(event)
		self.assertIn(event, self.archive.development_history)

	def test_record_InitialModels(self):
		mod1 = mnm_repr.Model(None, [1], [], [])
		mod2 = mnm_repr.Model(None, [2], [], [])
		event = archive.InitialModels([mod1, mod2])
		self.archive.record(event)
		self.assertIn(event, self.archive.development_history)
		self.assertIn(mod1, self.archive.working_models)
		self.assertIn(mod2, self.archive.working_models)

	def test_record_InitialResults(self):
		exp1 = exp_repr.Experiment('exp1')
		exp2 = exp_repr.Experiment('exp2')
		event = archive.InitialResults([exp1, exp2])
		self.archive.record(event)
		self.assertIn(event, self.archive.development_history)
		self.assertIn(exp1, self.archive.known_results)
		self.assertIn(exp2, self.archive.known_results)

	def test_record_TypeError(self):
		event = archive.Event
		self.assertRaises(TypeError, self.archive.record, event)

	def test_get_model_origin_event(self):
		mod1 = 'mod1'
		mod2 = 'mod2'
		exp1 = exp_repr.Experiment('exp1', ['res1'])
		exp2 = exp_repr.Experiment('exp2', ['res2'])
		adit = archive.AdditionalModels([mod1, mod2])
		self.archive.development_history.extend([adit, archive.NewResults(exp1), archive.NewResults(exp2)])
		origin_event = self.archive.get_model_origin_event(mod1)
		self.assertEqual(origin_event, adit)

	def test_get_events_after_event(self):
		mod1 = 'mod1'
		mod2 = 'mod2'
		exp1 = exp_repr.Experiment('exp1', ['res1'])
		exp2 = exp_repr.Experiment('exp2', ['res2'])
		res1 = archive.NewResults(exp1)
		res2 = archive.NewResults(exp2)
		self.archive.development_history.extend([archive.AdditionalModels([mod1, mod2]), res1, res2])
		origin_event = self.archive.get_model_origin_event(mod1)
		events = self.archive.get_events_after_event(origin_event)
		self.assertIn(res1, events)
		self.assertIn(res2, events)

	def test_get_results_after_model(self):
		mod1 = 'mod1'
		mod2 = 'mod2'
		exp1 = exp_repr.Experiment('exp1', ['res1'])
		exp2 = exp_repr.Experiment('exp2', ['res2'])
		self.archive.development_history.extend([archive.AdditionalModels([mod1, mod2]), archive.NewResults(exp1), archive.NewResults(exp2)])
		res = self.archive.get_results_after_model(mod1)
		self.assertIn('res1', res)
		self.assertIn('res2', res)