Skip to content

Commit

Permalink
Do not augment when validating
Browse files Browse the repository at this point in the history
  • Loading branch information
gzuidhof committed Jun 3, 2016
1 parent 64bc449 commit 2691943
Show file tree
Hide file tree
Showing 3 changed files with 51 additions and 7 deletions.
45 changes: 45 additions & 0 deletions config/smaller.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
[info]
experiment: U-net default settings
name: unet

[dataset]
pixels: 512
n_classes: 2
channels: 1
subset: None
filenames_train: './../data/1_1_1mm_512_x_512_lung_slices/subset[0-7]/*.pkl.gz'
filenames_validation: './../data/1_1_1mm_512_x_512_lung_slices/subset[8]/*.pkl.gz'

[network]
architecture: unet
input_size: 512
depth: 4
branching_factor: 4

[updates]
optimization: nesterov
learning_rate: 0.00001
momentum: 0.985
l2_lambda: 1e-4
batch_size_train: 2
batch_size_validation: 4
n_epochs: 200

[preprocessing]
erode_segmentation: 3

[normalization]
zero_center: True
mean_pixel: 0.66200809792889126

[augmentation]
augment: True
flip: True
zoom: 0.08 ;Not working yet
rotation: 16
translation: 3


[misc]
multiprocess_load_augmentation: False
save_every_n_epoch: 4
11 changes: 5 additions & 6 deletions src/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,14 @@

_EPSILON = 1e-8

def get_image(filename):
def get_image(filename, deterministic):
with gzip.open(filename,'rb') as f:
lung = pickle.load(f)

with gzip.open(filename.replace('lung','nodule'),'rb') as f:
truth = np.array(pickle.load(f),dtype=np.float32)

if P.AUGMENT:
if P.AUGMENT and not deterministic:
lung, truth = augment([lung,truth])

truth = np.array(np.round(truth),dtype=np.int64)
Expand Down Expand Up @@ -52,9 +52,8 @@ def get_image(filename):

return lung, truth

def load_images(filenames):
filenames = filter(lambda x: x!='.', filenames)
slices = map(get_image, filenames)
def load_images(filenames, deterministic=False):
slices = [get_image(filename, deterministic) for filename in filenames]
lungs, truths = zip(*slices)

l = np.concatenate(lungs,axis=0)
Expand All @@ -64,7 +63,7 @@ def load_images(filenames):
# get set to 0 (the background is -1)
w = loss_weighting.weight_by_class_balance(t, classes=[0,1])

#Set -1 labels back to label 0
#Set -10 labels back to label 0
t = np.clip(t, 0, 100000)

return l, t, w
2 changes: 1 addition & 1 deletion src/learn.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@
val_batches = 0

np.random.shuffle(filenames_val)
val_gen = ParallelBatchIterator(load_images, filenames_val, ordered=False,
val_gen = ParallelBatchIterator(partial(load_images,deterministic=True), filenames_val, ordered=False,
batch_size=P.BATCH_SIZE_VALIDATION,
multiprocess=P.MULTIPROCESS_LOAD_AUGMENTATION)

Expand Down

0 comments on commit 2691943

Please sign in to comment.