Skip to content

Commit 11c4651

Browse files
authored
Merge pull request #204 from lisa-lab/segmentation
Fully convolutional networks for 2D segmentation
2 parents 7f06bc2 + 479f645 commit 11c4651

37 files changed

+3172
-11
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
.idea
12
code/*.pyc
23
code/*_plots
34
code/tmp*
@@ -13,3 +14,5 @@ html
1314
*.pyc
1415
*~
1516
*.swp
17+
# This directory may be created by scripts from segmentation tutorials.
18+
save_models

README.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,4 +37,4 @@ Subdirectories:
3737
Build instructions
3838
------------------
3939

40-
To build the html version of the tutorials, install sphinx and run doc/Makefile
40+
To build the html version of the tutorials, run python doc/scripts/docgen.py

code/cnn_1D_segm/data_loader/__init__.py

Whitespace-only changes.
Lines changed: 185 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,185 @@
1+
import os
2+
import time
3+
4+
import numpy as np
5+
from PIL import Image
6+
import re
7+
import warnings
8+
9+
from dataset_loaders.parallel_loader import ThreadedDataset
10+
from parallel_loader_1D import ThreadedDataset_1D
11+
12+
floatX = 'float32'
13+
14+
class Cortical6LayersDataset(ThreadedDataset_1D):
15+
'''The Cortical Layers Dataset.
16+
Parameters
17+
----------
18+
which_set: string
19+
A string in ['train', 'val', 'valid', 'test'], corresponding to
20+
the set to be returned.
21+
split: float
22+
A float indicating the dataset split between training and validation.
23+
For example, if split=0.85, 85\% of the images will be used for training,
24+
whereas 15\% will be used for validation.
25+
'''
26+
name = 'cortical_layers'
27+
28+
non_void_nclasses = 7
29+
GTclasses = [0, 1, 2, 3, 4, 5, 6]
30+
_cmap = {
31+
0: (128, 128, 128), # padding
32+
1: (128, 0, 0), # layer 1
33+
2: (128, 64, ), # layer 2
34+
3: (128, 64, 128), # layer 3
35+
4: (0, 0, 128), # layer 4
36+
5: (0, 0, 64), # layer 5
37+
6: (64, 64, 128), # layer 6
38+
}
39+
_mask_labels = {0: 'padding', 1: 'layers1', 2: 'layer2', 3: 'layer3',
40+
4: 'layer4', 5: 'layer5', 6: 'layer6'}
41+
_void_labels = []
42+
43+
44+
_filenames = None
45+
46+
@property
47+
def filenames(self):
48+
49+
if self._filenames is None:
50+
# Load filenames
51+
nfiles = sum(1 for line in open(self.mask_path))
52+
filenames = range(nfiles)
53+
np.random.seed(1609)
54+
np.random.shuffle(filenames)
55+
56+
if self.which_set == 'train':
57+
filenames = filenames[:int(nfiles*self.split)]
58+
elif self.which_set == 'val':
59+
filenames = filenames[-(nfiles - int(nfiles*self.split)):]
60+
61+
# Save the filenames list
62+
self._filenames = filenames
63+
64+
return self._filenames
65+
66+
def __init__(self,
67+
which_set="train",
68+
split=0.85,
69+
shuffle_at_each_epoch = True,
70+
smooth_or_raw = 'both',
71+
*args, **kwargs):
72+
73+
self.task = 'segmentation'
74+
75+
self.n_layers = 6
76+
n_layers_path = str(self.n_layers)+"layers_segmentation"
77+
78+
self.which_set = "val" if which_set == "valid" else which_set
79+
if self.which_set not in ("train", "val", 'test'):
80+
raise ValueError("Unknown argument to which_set %s" %
81+
self.which_set)
82+
83+
self.split = split
84+
85+
self.image_path_raw = os.path.join(self.path,n_layers_path,"training_raw.txt")
86+
self.image_path_smooth = os.path.join(self.path,n_layers_path, "training_geo.txt")
87+
self.mask_path = os.path.join(self.path,n_layers_path, "training_cls.txt")
88+
self.regions_path = os.path.join(self.path, n_layers_path, "training_regions.txt")
89+
90+
self.smooth_raw_both = smooth_or_raw
91+
92+
if smooth_or_raw == 'both':
93+
self.data_shape = (200,2)
94+
else :
95+
self.data_shape = (200,1)
96+
97+
super(Cortical6LayersDataset, self).__init__(*args, **kwargs)
98+
99+
def get_names(self):
100+
"""Return a dict of names, per prefix/subset."""
101+
102+
return {'default': self.filenames}
103+
104+
105+
106+
def test_6layers():
107+
train_iter = Cortical6LayersDataset(
108+
which_set='train',
109+
smooth_or_raw = 'both',
110+
batch_size=500,
111+
data_augm_kwargs={},
112+
return_one_hot=False,
113+
return_01c=False,
114+
return_list=True,
115+
use_threads=False)
116+
117+
valid_iter = Cortical6LayersDataset(
118+
which_set='valid',
119+
smooth_or_raw = 'smooth',
120+
batch_size=500,
121+
data_augm_kwargs={},
122+
return_one_hot=False,
123+
return_01c=False,
124+
return_list=True,
125+
use_threads=False)
126+
127+
valid_iter2 = Cortical6LayersDataset(
128+
which_set='valid',
129+
smooth_or_raw = 'raw',
130+
batch_size=500,
131+
data_augm_kwargs={},
132+
return_one_hot=False,
133+
return_01c=False,
134+
return_list=True,
135+
use_threads=False)
136+
137+
138+
139+
train_nsamples = train_iter.nsamples
140+
train_nbatches = train_iter.nbatches
141+
valid_nbatches = valid_iter.nbatches
142+
valid_nbatches2 = valid_iter2.nbatches
143+
144+
145+
146+
# Simulate training
147+
max_epochs = 1
148+
print "Simulate training for", str(max_epochs), "epochs"
149+
start_training = time.time()
150+
for epoch in range(max_epochs):
151+
print "Epoch #", str(epoch)
152+
153+
start_epoch = time.time()
154+
155+
print "Iterate on the training set", train_nbatches, "minibatches"
156+
for mb in range(train_nbatches):
157+
start_batch = time.time()
158+
batch = train_iter.next()
159+
if mb%5 ==0:
160+
print("Minibatch train {}: {} sec".format(mb, (time.time() -
161+
start_batch)))
162+
163+
print "Iterate on the validation set", valid_nbatches, "minibatches"
164+
for mb in range(valid_nbatches):
165+
start_batch = time.time()
166+
batch = valid_iter.next()
167+
if mb%5 ==0:
168+
print("Minibatch valid {}: {} sec".format(mb, (time.time() -
169+
start_batch)))
170+
171+
print "Iterate on the validation set (second time)", valid_nbatches2, "minibatches"
172+
for mb in range(valid_nbatches2):
173+
start_batch = time.time()
174+
batch = valid_iter2.next()
175+
if mb%5==0:
176+
print("Minibatch valid {}: {} sec".format(mb, (time.time() -
177+
start_batch)))
178+
179+
print("Epoch time: %s" % str(time.time() - start_epoch))
180+
print("Training time: %s" % str(time.time() - start_training))
181+
182+
if __name__ == '__main__':
183+
print "Loading the dataset 1 batch at a time"
184+
test_6layers()
185+
print "Success!"

0 commit comments

Comments
 (0)