Skip to content

Commit 19068d4

Browse files
StephanieLarocquenotoraptor
authored andcommitted
load data from train file
1 parent 1adcc24 commit 19068d4

File tree

4 files changed

+62
-10
lines changed

4 files changed

+62
-10
lines changed

code/fcn_2D_segm/data_loader.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33

44
from dataset_loaders.images.polyps912 import Polyps912Dataset
55
from dataset_loaders.images.camvid import CamvidDataset
6-
from dataset_loaders.images.polyps912 import Polyps912Dataset
76
from dataset_loaders.images.isbi_em_stacks import IsbiEmStacksDataset
87

98

code/fcn_2D_segm/train_fcn8.py

Lines changed: 35 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
import lasagne
1414
from lasagne.regularization import regularize_network_params
1515

16-
from data_loader import load_data
16+
from dataset_loaders.images.polyps912 import Polyps912Dataset
1717
from fcn8 import buildFCN8
1818

1919

@@ -156,10 +156,41 @@ def train(dataset, learn_step=0.005,
156156
bs = batch_size
157157
else:
158158
bs = [10, 1, 1]
159+
train_iter = Polyps912Dataset(which_set='train',
160+
batch_size=batch_size[0],
161+
seq_per_subset=0,
162+
seq_length=0,
163+
data_augm_kwargs=train_data_augm_kwargs,
164+
return_one_hot=one_hot,
165+
return_01c=False,
166+
overlap=0,
167+
use_threads=False,
168+
shuffle_at_each_epoch=shuffle_train,
169+
return_list=True,
170+
return_0_255=return_0_255)
171+
val_iter = Polyps912Dataset(which_set='val',
172+
batch_size=batch_size[1],
173+
seq_per_subset=0,
174+
seq_length=0,
175+
return_one_hot=one_hot,
176+
return_01c=False,
177+
overlap=0,
178+
use_threads=False,
179+
shuffle_at_each_epoch=False,
180+
return_list=True,
181+
return_0_255=return_0_255)
182+
test_iter = Polyps912Dataset(which_set='test',
183+
batch_size=batch_size[2],
184+
seq_per_subset=0,
185+
seq_length=0,
186+
return_one_hot=one_hot,
187+
return_01c=False,
188+
overlap=0,
189+
use_threads=False,
190+
shuffle_at_each_epoch=False,
191+
return_list=True,
192+
return_0_255=return_0_255)
159193

160-
train_iter, val_iter, test_iter = \
161-
load_data(dataset, data_augmentation,
162-
one_hot=False, batch_size=bs, return_0_255=train_from_0_255)
163194

164195
n_batches_train = train_iter.nbatches
165196
n_batches_val = val_iter.nbatches

code/unet/data_loader.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import os
22
import time
33

4-
from dataset_loaders.images.polyps912 import Polyps912Dataset
54
from dataset_loaders.images.camvid import CamvidDataset
65
from dataset_loaders.images.polyps912 import Polyps912Dataset
76
from dataset_loaders.images.isbi_em_stacks import IsbiEmStacksDataset

code/unet/train_unet.py

Lines changed: 27 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,8 @@
1313
import lasagne
1414
from lasagne.regularization import regularize_network_params
1515

16-
from data_loader import load_data
16+
17+
from dataset_loaders.images.isbi_em_stacks import IsbiEmStacksDataset
1718
from Unet_lasagne_recipes import build_UNet
1819
# from metrics import jaccard, accuracy, crossentropy
1920

@@ -159,9 +160,31 @@ def train(dataset, learn_step=0.005,
159160
else:
160161
bs = [10, 1, 1]
161162

162-
train_iter, val_iter, test_iter = \
163-
load_data(dataset, data_augmentation,
164-
one_hot=False, batch_size=bs, return_0_255=train_from_0_255)
163+
164+
train_iter = IsbiEmStacksDataset(which_set='train',
165+
batch_size=batch_size[0],
166+
seq_per_subset=0,
167+
seq_length=0,
168+
data_augm_kwargs=data_augmentation,
169+
return_one_hot=one_hot,
170+
return_01c=False,
171+
overlap=0,
172+
use_threads=True,
173+
shuffle_at_each_epoch=shuffle_train,
174+
return_list=True,
175+
return_0_255=return_0_255)
176+
177+
val_iter = IsbiEmStacksDataset(which_set='val',
178+
batch_size=batch_size[1],
179+
seq_per_subset=0,
180+
seq_length=0,
181+
return_one_hot=one_hot,
182+
return_01c=False,
183+
use_threads=True,
184+
shuffle_at_each_epoch=False,
185+
return_list=True,
186+
return_0_255=return_0_255)
187+
test_iter = None
165188

166189
batch = train_iter.next()
167190
input_dim = (np.shape(batch[0])[2], np.shape(batch[0])[3]) #(x,y) image shape

0 commit comments

Comments
 (0)