def derivative_Gaussian(x, complete=True, p=1):
    if complete:
        b = 10
    else:
        b = 1.25
    filt = (x * b) * np.exp(-(x * b)**2 / 2)
    return normalization(filt, p=p)
Example #2
0
def test():
    print('start')
    for patient in os.listdir('/mnt/hd1/puwenbo/Dataset/T1T2/moving/'):
        print('do ' + patient + 'now')
        elastixImageFilter = sitk.ElastixImageFilter()
        fix = sitk.ReadImage('/mnt/hd1/puwenbo/Dataset/T1T2/fixed/' + patient)
        elastixImageFilter.SetFixedImage(fix)
        elastixImageFilter.SetMovingImage(sitk.ReadImage('/mnt/hd1/puwenbo/Dataset/T1T2/moving/' + patient))
        elastixImageFilter.SetParameterMap(helper.p)
        elastixImageFilter.SetOutputDirectory('/mnt/hd1/puwenbo/Dataset/T1T2/output/')
        elastixImageFilter.Execute()
        res = helper.normalization(sitk.GetArrayFromImage(elastixImageFilter.GetResultImage()))
        sitk.WriteImage(sitk.GetImageFromArray(res), '/mnt/hd1/puwenbo/Dataset/T1T2/output/'+ patient)
Example #3
0
def conv_filter(x, name='Gaussian', complete=True, p=1):
    if complete:
        b = 10
    else:
        b = 1.25
    if name == 'Gaussian':
        filt = np.exp(-(x * b)**2 / 2)
    elif name == 'derivative':
        filt = (x * b) * np.exp(-(x * b)**2 / 2)
    elif name == 'averaging':
        b = 1.25
        filt = np.exp(-(x * b)**2 / 2)
    else:
        print(name, 'Filter not implemented')
        raise ValueError
    return normalization(filt, p=p)
Example #4
0
confusion_set = ['than', 'then']

# minimum occurence of tokens in training data 
# tokens with less occurences will be substituted to 'U' for unknown
# 'U' can also serve as substitute for unseen tokens at test time
min_occurrence = 20

### END SETTINGS ###
       
# init

if not os.path.exists(work_dir):
    os.makedirs(work_dir)
    
with open(corpus_file) as f:
    sents = [[helper.normalization(twp.split('|')[0].lower()) for twp in line.split()] for line in f]

train_sents = list(helper.acs(sents, preserve_tokens)) 
token_embeddings = helper.TokenEmbeddings(train_sents, min_occurrence)

if timestamp and start_epoch and start_iteration:
    errors = helper.load_errors('%s-%d-%d.errors' % (timestamp, start_epoch, start_iteration), work_dir)
    load_weights = '%s-%d-%d.weights' % (timestamp, start_epoch, start_iteration)
    print('init previous states...')
    print('timestamp: ', timestamp)
    print('start_epoch: ', start_epoch)
    print('start_iteration: ', start_iteration)
else:
    errors = []
    start_epoch = 0
    start_iteration = 0
def StandingStill(x):
    filt = np.ones(x.shape)
    return normalization(filt)
def TurnDown(x):
    l = 10
    filt = (1 + np.exp(l * x))**(-1)
    return normalization(filt)
def TurnUp(x):
    l = 10
    filt = (1 + np.exp(-l * x))**(-1)
    return normalization(filt)