Esempio n. 1
3
 def testOneChunk(self):
     self.assertEquals(chunkify(self.data, 1, 1), self.data)
Esempio n. 2
1
 def test_magical35(self):
     self.assertNotEqual(list(chunkify(xrange(1, 36), 10, 10)), [])
Esempio n. 3
0
def rbmclassprobs(rbm, x, batchsize):
    n_visible = rbm.W.shape[1]
    if not rbm.classRBM:
        raise ValueError(
            "Class probabilities can only be calc. for classification RBM´s")
    if x.shape[1] != n_visible:
        raise ValueError("x has wrong dimensions")

    n_samples = x.shape[0]

    # check if result should be calculated in batches
    sig = signature(rbmclassprobs)
    if len(sig.parameters) == 3:
        numbatches = n_samples / batchsize
        assert numbatches % 1 == 0, "numbatches not integer"
        #
        chunks = chunkify(batchsize, x)
    else:
        chunks = chunkify(n_samples, x)

    class_prob_res = []
    class_prob_res = np.empty((0, rbm.U.shape[1]))  #np.array(class_prob_res)
    # :O class_prob_res = np.empty((batchsize,rbm.U.shape[1]))
    for i in range(len(chunks)):
        minibatch = x[chunks[i]['start']:chunks[i]['end'], :]
        class_prob, _ = rbmpygivenx(rbm, minibatch, 'test')
        class_prob_res = np.append(class_prob_res, class_prob, axis=0)
        # class_prob_res = np.concatenate((class_prob_res, class_prob), axis=0)

    return class_prob_res
Esempio n. 4
0
def loader(kind, path, config, params, loaded_tasks):
    not_for_locales = config.get("not-for-locales", [])
    locales_per_chunk = config["locales-per-chunk"]

    filtered_locales = [
        locale for locale in get_screenshots_locales()
        if locale not in not_for_locales
    ]
    chunks, remainder = divmod(len(filtered_locales), locales_per_chunk)
    if remainder:
        # We need one last chunk to include locales in remainder
        chunks = int(chunks + 1)

    # Taskcluster sorts task names alphabetically, we need numbers to be zero-padded.
    max_number_of_digits = _get_number_of_digits(chunks)

    jobs = {
        str(this_chunk).zfill(max_number_of_digits): {
            "attributes": {
                "chunk_locales": chunkify(filtered_locales, this_chunk,
                                          chunks),
                "l10n_chunk": str(this_chunk),
            }
        }
        # Chunks starts at 1 (and not 0)
        for this_chunk in range(1, chunks + 1)
    }

    config["jobs"] = jobs

    return base_loader(kind, path, config, params, loaded_tasks)
Esempio n. 5
0
def sync(user, secret, friends, user_dir, chunk_dir, backup_dir):
    did_anything = False
    
    encryption_key = 'shhh!'
    deleted_chunks, created_chunks = chunkify.chunkify([user_dir], encryption_key, chunk_dir)
    postjson('/update_chunks', {"user": user, "secret": secret, "created": created_chunks, "deleted": deleted_chunks})
    
    expired_chunks = postjson('/chunks_expired', {"user": user, "secret": secret})['expired']
    for chunk in expired_chunks:
        path = os.path.join(backup_dir, chunk['user'], chunk['chunk_id'])
        if os.path.exists(path):
            did_anything = True
            os.remove(path)
    
    # try restoring any pending files:
    restorables = postjson('/get_restorable_chunks', {"user": user, "secret": secret})['chunks']
    for chunk in restorables:
        did_anything = True
        data = r.post(root+'/download_chunk/'+chunk['key'], data=json.dumps({"user": user, "secret": secret}), headers={"Content-Type": "application/json"}).content
        path = os.path.join(user_dir, chunk['chunk_id'])
        #open(path, 'wb').write(data)
        archive = zipfile.ZipFile(StringIO.StringIO(data))
        archive.extractall(user_dir)
    
    # try uploading any files that need to be restored:
    files_to_restore = postjson('/files_to_restore', {"user": user, "secret": secret, "friends": friends})['chunks']
    for chunk in files_to_restore:
        path = os.path.join(backup_dir, chunk['user'], chunk['chunk_id'])
        if os.path.exists(path):
            did_anything = True
            r.post(root+'/upload_chunk/'+chunk['key'], data=open(path, 'rb').read(), headers={"Content-Type": "application/octet-stream"})
    
    while True:
        chunks = postjson('/chunks_to_upload', {"user": user, "secret": secret})['chunks']
        if len(chunks)==0:
            break
        did_anything = True
        chunk = chunks[0]
        data = open(os.path.join(chunk_dir, chunk['chunk_id']))
        print r.post(root+'/upload_chunk/'+chunk['key'], data=data, headers={"Content-Type": "application/octet-stream"})
        
    
    while True:
        chunks = postjson('/chunks_to_download', {"user": user, "secret": secret, "friends": friends})['chunks']
        if len(chunks)==0:
            break
        did_anything = True
        chunk = chunks[0]
        resp = r.post(root+'/download_chunk/'+chunk['key'], data=json.dumps({"user": user, "secret": secret}), headers={"Content-Type": "application/json"})
        #print resp
        data = resp.content
        if data and len(data)>0:
            user_dir = os.path.join(backup_dir, chunk['user'])
            if not os.path.exists(user_dir):
                os.mkdir(os.path.join(backup_dir, chunk['user']))
            open(os.path.join(user_dir, chunk['chunk_id']), 'wb').write(data)
    
    return did_anything
Esempio n. 6
0
 def do(self, type, job, hashtypes, id):
     """
     Answer to a DO command. Used to send a job.
     """
     if debug:
         print "MasterProtocol.do(id=%s)" % (id)
     chunks = chunkify(type, job, id)
     if not id in self.factory.fathers:
         self.factory.fathers[id] = dict()
     nb = len(chunks)
     father = "%s:%s" % (type, job)
     self.factory.fathers[id][father] = nb
     self.factory.api.sendFather(father, nb, id)
     for chunk in chunks:
         self.addTask(type, chunk, father, hashtypes, id)
Esempio n. 7
0
def sync(user, secret, friends, user_dir, chunk_dir, backup_dir):
    did_anything = False
    
    encryption_key = 'shhh!'
    deleted_chunks, created_chunks = chunkify.chunkify([user_dir], encryption_key, chunk_dir)
    postjson('/update_chunks', {"user": user, "secret": secret, "created": created_chunks, "deleted": deleted_chunks})
    
    expired_chunks = postjson('/chunks_expired', {"user": user, "secret": secret})['expired']
    for chunk in expired_chunks:
        path = os.path.join(backup_dir, chunk['user'], chunk['chunk_id'])
        if os.path.exists(path):
            did_anything = True
            os.remove(path)
    
    while True:
        chunks = postjson('/chunks_to_upload', {"user": user, "secret": secret})['chunks']
        if len(chunks)==0:
            break
        did_anything = True
        chunk = chunks[0]
        data = open(os.path.join(chunk_dir, chunk['chunk_id']))
        print r.post(root+'/upload_chunk/'+chunk['key'], data=data, headers={"Content-Type": "application/octet-stream"})
        
    
    while True:
        chunks = postjson('/chunks_to_download', {"user": user, "secret": secret, "friends": friends})['chunks']
        if len(chunks)==0:
            break
        did_anything = True
        chunk = chunks[0]
        resp = r.post(root+'/download_chunk/'+chunk['key'], data=json.dumps({"user": user, "secret": secret}), headers={"Content-Type": "application/json"})
        #print resp
        data = resp.content
        if data:
            user_dir = os.path.join(backup_dir, chunk['user'])
            if not os.path.exists(user_dir):
                os.mkdir(os.path.join(backup_dir, chunk['user']))
            open(os.path.join(user_dir, chunk['chunk_id']), 'wb').write(data)
    
    return did_anything
Esempio n. 8
0
def test_should_reject_a_list_of_strings_given_for_fill():
    with pytest.raises(TypeError):
        chunkify("ABCDEFG", 3, ["x"])
Esempio n. 9
0
def sync(user, secret, friends, user_dir, chunk_dir, backup_dir):
    did_anything = False

    encryption_key = 'shhh!'
    deleted_chunks, created_chunks = chunkify.chunkify([user_dir],
                                                       encryption_key,
                                                       chunk_dir)
    postjson(
        '/update_chunks', {
            "user": user,
            "secret": secret,
            "created": created_chunks,
            "deleted": deleted_chunks
        })

    expired_chunks = postjson('/chunks_expired', {
        "user": user,
        "secret": secret
    })['expired']
    for chunk in expired_chunks:
        path = os.path.join(backup_dir, chunk['user'], chunk['chunk_id'])
        if os.path.exists(path):
            did_anything = True
            os.remove(path)

    while True:
        chunks = postjson('/chunks_to_upload', {
            "user": user,
            "secret": secret
        })['chunks']
        if len(chunks) == 0:
            break
        did_anything = True
        chunk = chunks[0]
        data = open(os.path.join(chunk_dir, chunk['chunk_id']))
        print r.post(root + '/upload_chunk/' + chunk['key'],
                     data=data,
                     headers={"Content-Type": "application/octet-stream"})

    while True:
        chunks = postjson('/chunks_to_download', {
            "user": user,
            "secret": secret,
            "friends": friends
        })['chunks']
        if len(chunks) == 0:
            break
        did_anything = True
        chunk = chunks[0]
        resp = r.post(root + '/download_chunk/' + chunk['key'],
                      data=json.dumps({
                          "user": user,
                          "secret": secret
                      }),
                      headers={"Content-Type": "application/json"})
        #print resp
        data = resp.content
        if data:
            user_dir = os.path.join(backup_dir, chunk['user'])
            if not os.path.exists(user_dir):
                os.mkdir(os.path.join(backup_dir, chunk['user']))
            open(os.path.join(user_dir, chunk['chunk_id']), 'wb').write(data)

    return did_anything
Esempio n. 10
0
def test_should_reject_negative_chunk_size():
    with pytest.raises(ValueError):
        chunkify("ABCDEFG", -1, "x")
Esempio n. 11
0
def test_should_reject_s_given_an_float():
    with pytest.raises(TypeError):
        chunkify(5.5, 1, "x")
Esempio n. 12
0
def test_should_reject_s_given_a_list_of_ints():
    with pytest.raises(TypeError):
        chunkify([1, 2, 3, 4, 5], 1, "x")
Esempio n. 13
0
 def testNotEvenlyDivisibleWithoutExtraSubsequentChunk(self):
     self.assertEquals(chunkify(self.data, 4, 4), [9, 10])
Esempio n. 14
0
 def testNotEvenlyDivisibleWithExtra(self):
     self.assertEquals(chunkify(self.data, 2, 4), [4, 5, 6])
Esempio n. 15
0
def test_should_reject_given_a_string_with_more_than_a_single_character_for_fill(
):
    with pytest.raises(ValueError):
        chunkify("ABCDEFG", 3, "xx")
Esempio n. 16
0
def test_should_reject_given_a_list_for_fill():
    with pytest.raises(ValueError):
        chunkify("ABCDEFG", 3, [])
Esempio n. 17
0
def test_accept_empty_string():
    assert chunkify("", 5, "x") == ["xxxxx"]
Esempio n. 18
0
def test_should_reject_given_a_float_for_fill():
    with pytest.raises(TypeError):
        chunkify("ABCDEFG", 3, 1.2)
Esempio n. 19
0
 def testMultipleChunks(self):
     self.assertEquals(chunkify(self.data, 3, 5), [5, 6])
Esempio n. 20
0
def test_chunks_of_size_one():
    assert chunkify("ABCDEFG", 1, "z") == ["A", "B", "C", "D", "E", "F", "G"]
Esempio n. 21
0
 def testNotEvenlyDivisibleWithoutExtra(self):
     self.assertEquals(chunkify(self.data, 3, 4), [7, 8])
Esempio n. 22
0
def test_should_chunkify_the_example_correctly():
    assert chunkify("ABCDEFG", 3, "x") == ["ABC", "DEF", "Gxx"]
Esempio n. 23
0
 def testIterable(self):
     self.assertEquals(list(chunkify(xrange(1, 11), 2, 5)), [3, 4])
Esempio n. 24
0
def test_should_work_just_the_same_given_a_list_of_strings():
    assert chunkify(["A", "B", "C", "D", "E", "F", "G"], 3, "x") == [
        "ABC",
        "DEF",
        "Gxx",
    ]
Esempio n. 25
0
def test_should_reject_chunk_size_of_zero():
    with pytest.raises(ValueError):
        chunkify("ABCDEFG", 0, "x")
Esempio n. 26
0
def sync(user, secret, friends, user_dir, chunk_dir, backup_dir):
    did_anything = False

    encryption_key = 'shhh!'
    deleted_chunks, created_chunks = chunkify.chunkify([user_dir],
                                                       encryption_key,
                                                       chunk_dir)
    postjson(
        '/update_chunks', {
            "user": user,
            "secret": secret,
            "created": created_chunks,
            "deleted": deleted_chunks
        })

    expired_chunks = postjson('/chunks_expired', {
        "user": user,
        "secret": secret
    })['expired']
    for chunk in expired_chunks:
        path = os.path.join(backup_dir, chunk['user'], chunk['chunk_id'])
        if os.path.exists(path):
            did_anything = True
            os.remove(path)

    # try restoring any pending files:
    restorables = postjson('/get_restorable_chunks', {
        "user": user,
        "secret": secret
    })['chunks']
    for chunk in restorables:
        did_anything = True
        data = r.post(root + '/download_chunk/' + chunk['key'],
                      data=json.dumps({
                          "user": user,
                          "secret": secret
                      }),
                      headers={
                          "Content-Type": "application/json"
                      }).content
        path = os.path.join(user_dir, chunk['chunk_id'])
        #open(path, 'wb').write(data)
        archive = zipfile.ZipFile(StringIO.StringIO(data))
        archive.extractall(user_dir)

    # try uploading any files that need to be restored:
    files_to_restore = postjson('/files_to_restore', {
        "user": user,
        "secret": secret,
        "friends": friends
    })['chunks']
    for chunk in files_to_restore:
        path = os.path.join(backup_dir, chunk['user'], chunk['chunk_id'])
        if os.path.exists(path):
            did_anything = True
            r.post(root + '/upload_chunk/' + chunk['key'],
                   data=open(path, 'rb').read(),
                   headers={"Content-Type": "application/octet-stream"})

    while True:
        chunks = postjson('/chunks_to_upload', {
            "user": user,
            "secret": secret
        })['chunks']
        if len(chunks) == 0:
            break
        did_anything = True
        chunk = chunks[0]
        data = open(os.path.join(chunk_dir, chunk['chunk_id']))
        print r.post(root + '/upload_chunk/' + chunk['key'],
                     data=data,
                     headers={"Content-Type": "application/octet-stream"})

    while True:
        chunks = postjson('/chunks_to_download', {
            "user": user,
            "secret": secret,
            "friends": friends
        })['chunks']
        if len(chunks) == 0:
            break
        did_anything = True
        chunk = chunks[0]
        resp = r.post(root + '/download_chunk/' + chunk['key'],
                      data=json.dumps({
                          "user": user,
                          "secret": secret
                      }),
                      headers={"Content-Type": "application/json"})
        #print resp
        data = resp.content
        if data and len(data) > 0:
            user_dir = os.path.join(backup_dir, chunk['user'])
            if not os.path.exists(user_dir):
                os.mkdir(os.path.join(backup_dir, chunk['user']))
            open(os.path.join(user_dir, chunk['chunk_id']), 'wb').write(data)

    return did_anything