Пример #1
0
def make_file(output_format):
    output_format["bin"] += 1
    util.make_folder(output_format)
    name = util.file_name(output_format)
    output_file = "/tmp/" + name
    f = open(output_file, "wb+")
    return [f, name]
Пример #2
0
def each_emotion(i):
    li = lists[i]
    out = outdir%emotions[i][1]
    util.make_folder(out)
    
    j = 0
    while len(os.listdir(out)) < 100000:
        j += 1
        try:
            filename = li[j].split('/')[-1]
            filename = filename.split('.')[0]
            
            if os.path.isfile('%s/%s_orig.jpg'%(out,filename)) and os.path.isfile('%s/%s_flip.jpg'%(out,filename)) and os.path.isfile('%s/%s_crop1.jpg'%(out,filename)) and os.path.isfile('%s/%s_crop2.jpg'%(out,filename)):
                continue
            
            im = cv.imread(li[j])
            flip = cv.flip(im,1)
            
            h = im.shape[0]
            w = im.shape[1]
            size = min(h,w)
            
            cv.imwrite('%s/%s_orig.jpg'%(out,filename), cv.resize(im,(64,64)))
            cv.imwrite('%s/%s_flip.jpg'%(out,filename), cv.resize(flip,(64,64)))
        
            cv.imwrite('%s/%s_crop1.jpg'%(out,filename), cv.resize(im[0:size,0:size],(64,64)))
            cv.imwrite('%s/%s_crop2.jpg'%(out,filename), cv.resize(im[h-size:h,w-size:w],(64,64)))
            
        except:
            print '%s:%d (%s)'%(emotions[i],j, filename)
            pass
Пример #3
0
def download(course, item):
    """
    Download announcement JSON.
    :param course: A Course object.
    :param item: {
        "close_time": 2147483647,
        "user_id": 1069689,
        "open_time": 1411654451,
        "title": "Coursera",
        "deleted": 0,
        "email_announcements": "email_sent",
        "section_id": "14",
        "order": "6",
        "item_type": "announcement",
        "__type": "announcement",
        "published": 1,
        "item_id": "39",
        "message": "Hello, everyone.",
        "uid": "announcement39",
        "id": 39,
        "icon": ""
    }
    :return: None.
    """
    path = '{}/announcement/{}.json'
    path = path.format(course.get_folder(), item['item_id'])

    util.make_folder(path, True)
    util.write_json(path, item)

    content = util.read_file(path)
    content = util.remove_coursera_bad_formats(content)

    util.write_file(path, content)
Пример #4
0
def save_model(model, model_name, mode, n_hiddens, act_fun, n_comps,
               batch_norm):

    assert is_data_loaded(), 'Dataset hasn\'t been loaded'

    savedir = root_output + data_name + '/'
    util.make_folder(savedir)
    filename = create_model_id(model_name, mode, n_hiddens, act_fun, n_comps,
                               batch_norm)

    util.save(model, savedir + filename + '.pkl')
  def test_basic(self):
    database: TestDatabase = TestDatabase()
    table1: TestTable = database.create_table("table1")
    with open("spacenet/1-1-1-tide.knn", "rb") as f:
      entry1: TestEntry = table1.add_entry("5/1550529206.039528-957/1-1/1-1-1-tide.knn", f.read())
    with open("spacenet/3band_AOI_1_RIO_img147.tif", "rb") as f:
      entry2: TestEntry = table1.add_entry("0/1550529206.039528-957/1-1/1-1-1-tide.tiff", f.read())

    params = {
      "bucket": table1.name,
      "input_prefix": 0
    }
    input_format = util.parse_file_name(entry1.key)
    output_format = dict(input_format)
    output_format["prefix"] = 6
    util.make_folder(output_format)
    draw_borders.run(database, entry1.key, params, input_format, output_format, [])
Пример #6
0
Файл: peer.py Проект: kq2/Ricin
def download(course, item):
    """
    Download peer-grading JSON.
    :param course: A Course object.
    :param item: This JSON item is directly written into saved file.
    :return: None.
    """
    path = "{}/peer_assessment/{}.json"
    path = path.format(course.get_folder(), item["item_id"])

    util.make_folder(path, True)
    util.write_json(path, item)

    content = util.read_file(path)
    content = util.remove_coursera_bad_formats(content)

    util.write_file(path, content)
Пример #7
0
def run_application(database, bucket_name: str, key: str,
                    input_format: Dict[str, Any], output_format: Dict[str,
                                                                      Any],
                    offsets: List[int], params: Dict[str, Any]):
    temp_file = "/tmp/{0:s}".format(key)
    util.make_folder(util.parse_file_name(key))

    if len(offsets) == 0:
        database.download(bucket_name, key, temp_file)
    else:
        obj = database.get_entry(bucket_name, key)
        format_lib = importlib.import_module("formats." +
                                             params["input_format"])
        iterator_class = getattr(format_lib, "Iterator")
        iterator = iterator_class(obj, OffsetBounds(offsets[0], offsets[1]))
        items = iterator.get(iterator.get_start_index(),
                             iterator.get_end_index())
        with open(temp_file, "wb+") as f:
            items = list(items)
            iterator_class.from_array(list(items), f, iterator.get_extra())

    application_lib = importlib.import_module("applications." +
                                              params["application"])
    application_method = getattr(application_lib, "run")
    output_files = application_method(database, temp_file, params,
                                      input_format, output_format)

    found = False
    for output_file in output_files:
        p = util.parse_file_name(output_file.replace("/tmp/", ""))
        if p is None:
            index = output_file.rfind(".")
            ext = output_file[index + 1:]
            output_format["ext"] = ext
            new_key = util.file_name(output_format)
        else:
            new_key = util.file_name(p)

        with open(output_file, "rb") as f:
            database.put(params["bucket"], new_key, f, {})
    return True
Пример #8
0
def combine(database: Database, table_name, key, input_format, output_format,
            offsets, params):
    output_format["file_id"] = input_format["bin"]
    output_format["bin"] = 1
    output_format["num_bins"] = 1
    output_format["num_files"] = input_format["num_bins"]
    file_name = util.file_name(output_format)
    util.make_folder(output_format)
    [combine, last_file, keys] = util.combine_instance(table_name, key, params)
    if combine:
        msg = "Combining TIMESTAMP {0:f} NONCE {1:d} BIN {2:d} FILE {3:d}"
        msg = msg.format(input_format["timestamp"], input_format["nonce"],
                         input_format["bin"], input_format["file_id"])
        print(msg)

        format_lib = importlib.import_module("formats." +
                                             params["output_format"])
        iterator_class = getattr(format_lib, "Iterator")
        temp_name = "/tmp/{0:s}".format(file_name)
        # Make this deterministic and combine in the same order
        keys.sort()
        entries: List[Entry] = list(
            map(lambda key: database.get_entry(table_name, key), keys))
        metadata: Dict[str, str] = {}
        if database.contains(table_name, file_name):
            return True

        with open(temp_name, "wb+") as f:
            metadata = iterator_class.combine(entries, f, params)

        found = database.contains(table_name, file_name)
        if not found:
            with open(temp_name, "rb") as f:
                database.put(params["bucket"], file_name, f, metadata, True)
        os.remove(temp_name)
        return True
    else:
        return database.contains(table_name, file_name) or key != last_file
Пример #9
0
from chainer.utils import type_check
from chainer import function
import chainer.functions as F
import chainer.links as L

sys.path.append('/home/dl-box/study/.package/python_util/')
import util,Feature

nz = 100    #zの次元数

emo = 'all'

repeat = 50  #画像生成枚数(繰り返し数)

model_root = '../model/'  #感情極性分類モデルのパス
util.make_folder('generated_images/%s/'%emo)
output_images = 'generated_images/%s/'%emo + '%s'
model_file = 'generate_model/%s_gen.h5'%emo

#感情極性分類モデルの定義
model = [model_root + 'mean.npy', model_root + 'deploy.prototxt', model_root + 'finetuned.caffemodel', model_root + 'synset_words.txt']
cls = Feature.Classify(model)


class Generator(chainer.Chain):
    def __init__(self):
        super(Generator, self).__init__(
            l0z = L.Linear(nz, 4*4*512, wscale=0.02*math.sqrt(nz)),
            dc1 = L.Deconvolution2D(512, 256, 4, stride=2, pad=1, wscale=0.02*math.sqrt(4*4*512)),
            dc2 = L.Deconvolution2D(256, 128, 4, stride=2, pad=1, wscale=0.02*math.sqrt(4*4*256)),
            dc3 = L.Deconvolution2D(128, 64, 4, stride=2, pad=1, wscale=0.02*math.sqrt(4*4*128)),
Пример #10
0
def collect_results(data, n_hiddens, n_layers, n_comps, n_layers_comps, act_funs, modes, has_cond):

    print('collecting for {0}...'.format(data))
    ex.load_data(data)

    # create file to write to
    filename = ('{0}_{1}_bpp.txt' if bits_per_pixel else '{0}_{1}.txt').format(data, split)
    util.make_folder(root_results)
    f = open(root_results + filename, 'w')
    f.write('Results for {0}\n'.format(data))
    f.write('\n')

    for act, mode in itertools.product(act_funs, modes):

        f.write('actf: {0}\n'.format(act))
        f.write('mode: {0}\n'.format(mode))
        f.write('\n')

        # gaussian
        f.write('Gaussian\n')
        res, err = ex.fit_and_evaluate_gaussian(split, cond=False, use_image_space=bits_per_pixel)
        if bits_per_pixel:
            res, err = calc_bits_per_pixel(res, err)
        f.write('  {0:.2f} +/- {1:.2f}\n'.format(res, n_err * err))
        if has_cond:
            f.write('conditional\n')
            res, err = ex.fit_and_evaluate_gaussian(split, cond=True, use_image_space=bits_per_pixel)
            if bits_per_pixel:
                res, err = calc_bits_per_pixel(res, err)
            f.write('  {0:.2f} +/- {1:.2f}\n'.format(res, n_err * err))
        f.write('\n')

        # made
        f.write('MADE 1 comp\n')
        for nh in n_hiddens:
            f.write('  [1 x {0}]: {1}\n'.format(nh, result('made', mode, [nh]*1, act)))
            f.write('  [2 x {0}]: {1}\n'.format(nh, result('made', mode, [nh]*2, act)))
        if has_cond:
            f.write('conditional\n')
            for nh in n_hiddens:
                f.write('  [1 x {0}]: {1}\n'.format(nh, result('made_cond', mode, [nh]*1, act)))
                f.write('  [2 x {0}]: {1}\n'.format(nh, result('made_cond', mode, [nh]*2, act)))
        f.write('\n')

        # mog made
        for nc in n_comps:
            f.write('MADE {0} comp\n'.format(nc))
            for nh in n_hiddens:
                f.write('  [1 x {0}]: {1}\n'.format(nh, result('made', mode, [nh]*1, act, nc)))
                f.write('  [2 x {0}]: {1}\n'.format(nh, result('made', mode, [nh]*2, act, nc)))
            if has_cond:
                f.write('conditional\n')
                for nh in n_hiddens:
                    f.write('  [1 x {0}]: {1}\n'.format(nh, result('made_cond', mode, [nh]*1, act, nc)))
                    f.write('  [2 x {0}]: {1}\n'.format(nh, result('made_cond', mode, [nh]*2, act, nc)))
            f.write('\n')

        # real nvp
        for nl in n_layers:
            f.write('RealNVP {0} layers\n'.format(nl))
            for nh in n_hiddens:
                f.write('  [1 x {0}]: {1}\n'.format(nh, result('realnvp', None, [nh]*1, 'tanhrelu', nl, True)))
                f.write('  [2 x {0}]: {1}\n'.format(nh, result('realnvp', None, [nh]*2, 'tanhrelu', nl, True)))
            if has_cond:
                f.write('conditional\n')
                for nh in n_hiddens:
                    f.write('  [1 x {0}]: {1}\n'.format(nh, result('realnvp_cond', None, [nh]*1, 'tanhrelu', nl, True)))
                    f.write('  [2 x {0}]: {1}\n'.format(nh, result('realnvp_cond', None, [nh]*2, 'tanhrelu', nl, True)))
            f.write('\n')

        # maf
        for nl in n_layers:
            f.write('MAF {0} layers\n'.format(nl))
            for nh in n_hiddens:
                f.write('  [1 x {0}]: {1}\n'.format(nh, result('maf', mode, [nh]*1, act, nl, True)))
                f.write('  [2 x {0}]: {1}\n'.format(nh, result('maf', mode, [nh]*2, act, nl, True)))
            if has_cond:
                f.write('conditional\n')
                for nh in n_hiddens:
                    f.write('  [1 x {0}]: {1}\n'.format(nh, result('maf_cond', mode, [nh]*1, act, nl, True)))
                    f.write('  [2 x {0}]: {1}\n'.format(nh, result('maf_cond', mode, [nh]*2, act, nl, True)))
            f.write('\n')

        # maf on made
        for nl, nc in n_layers_comps:
            f.write('MAF {0} layers on MADE {1} comp\n'.format(nl, nc))
            for nh in n_hiddens:
                f.write('  [1 x {0}]: {1}\n'.format(nh, result('maf_on_made', mode, [nh]*1, act, [nl, nc], True)))
                f.write('  [2 x {0}]: {1}\n'.format(nh, result('maf_on_made', mode, [nh]*2, act, [nl, nc], True)))
            if has_cond:
                f.write('conditional\n')
                for nh in n_hiddens:
                    f.write('  [1 x {0}]: {1}\n'.format(nh, result('maf_on_made_cond', mode, [nh]*1, act, [nl, nc], True)))
                    f.write('  [2 x {0}]: {1}\n'.format(nh, result('maf_on_made_cond', mode, [nh]*2, act, [nl, nc], True)))
            f.write('\n')

    # close file
    f.close()
Пример #11
0
#initial_delta = timedelta(days=2)

#Stats
total_num_of_repo_queried = 0
total_num_of_repo_downloaded = 0
total_seconds_of_download = 0
total_seconds_of_analyzing = 0


if __name__ == '__main__':
	#Token to raise GitHub rate limit constraint
	if not token:
		print 'Forgot to export your token'
	#Create folder data if it does not exist already
	if not os.path.exists(data_dir):
		util.make_folder(data_dir)
	#Create result folder and file
	if not os.path.exists(result_dir):
		util.make_folder(result_dir)
	result_file_dir = result_dir + '/' + result_file
	if not os.path.isfile(result_file_dir):
		util.make_file(result_file_dir) 
	#Collect url list
	ps = None
	pe = None
	#Use this set to download prior as well
	cs = None
	ce = starting_date
	#User this set to download inclusive
	#cs = starting_date
	#ce = cs + initial_delta