anonymous No title
No License Python
2019年07月13日
Copy
print(type(X_train_memmap))
print(len(X_train_memmap))
print(X_train_memmap.shape)
print(type(Y_train_memmap))
print(len(Y_train_memmap))
print(Y_train_memmap.shape)
print(type(X_test_memmap))
print(len(X_test_memmap))
print(X_test_memmap.shape)
print(type(Y_test_memmap))
print(len(Y_test_memmap))
print(Y_test_memmap.shape)

"""
<class 'numpy.memmap'>
637
(637, 360, 480, 3)
<class 'numpy.memmap'>
637
(637, 5)
<class 'numpy.memmap'>
215
(215, 360, 480, 3)
<class 'numpy.memmap'>
215
(215, 5)
"""

base_model = Xception(
	include_top=False,
	input_shape=(IMG_HEIGHT, IMG_WIDTH, 3),
	pooling='None'
)

# 全結合層の新規構築

top_model = Sequential()
top_model.add(GlobalAveragePooling2D())
top_model.add(Dense(1024, activation='relu'))
top_model.add(Dense(len(classes), activation='softmax'))

# 全結合層を削除したモデルと上で自前で構築した全結合層を結合
model = Model(inputs=base_model.input, outputs=top_model(base_model.output))

#全層 trainable
for layer in model.layers:
	layer.trainable = True

model.compile(optimizer=Adam(lr=0.001), loss='categorical_crossentropy', metrics=['accuracy'])

#過学習を起こさせるためにデータ水増しをしない
image_data_generator = ImageDataGenerator(
	featurewise_center = False,
	samplewise_center = False,
	featurewise_std_normalization = False,
	samplewise_std_normalization = False,
	zca_whitening = False,
	rotation_range = 0,
	width_shift_range = 0.0,
	height_shift_range = 0.0,
	horizontal_flip = False,
	vertical_flip = False
)

# reduce learning rate
reduce_lr = ReduceLROnPlateau(
	monitor = 'val_acc',
	factor = 0.5,
	patience = 5,
	verbose = 1
)

history = model.fit_generator(
	image_data_generator.flow(X_train_memmap, Y_train_memmap, batch_size=BATCH_SIZE),
	steps_per_epoch=(len(X_train_memmap) / BATCH_SIZE),
	epochs=EPOCH,
	validation_data=(X_test_memmap, Y_test_memmap),
	callbacks = [reduce_lr],
	initial_epoch=0
)
print(type(X_train_memmap))
print(len(X_train_memmap))
print(X_train_memmap.shape)
print(type(Y_train_memmap))
print(len(Y_train_memmap))
print(Y_train_memmap.shape)
print(type(X_test_memmap))
print(len(X_test_memmap))
print(X_test_memmap.shape)
print(type(Y_test_memmap))
print(len(Y_test_memmap))
print(Y_test_memmap.shape)

"""
<class 'numpy.memmap'>
637
(637, 360, 480, 3)
<class 'numpy.memmap'>
637
(637, 5)
<class 'numpy.memmap'>
215
(215, 360, 480, 3)
<class 'numpy.memmap'>
215
(215, 5)
"""

base_model = Xception(
	include_top=False,
	input_shape=(IMG_HEIGHT, IMG_WIDTH, 3),
	pooling='None'
)

# 全結合層の新規構築

top_model = Sequential()
top_model.add(GlobalAveragePooling2D())
top_model.add(Dense(1024, activation='relu'))
top_model.add(Dense(len(classes), activation='softmax'))

# 全結合層を削除したモデルと上で自前で構築した全結合層を結合
model = Model(inputs=base_model.input, outputs=top_model(base_model.output))

#全層 trainable
for layer in model.layers:
	layer.trainable = True

model.compile(optimizer=Adam(lr=0.001), loss='categorical_crossentropy', metrics=['accuracy'])

#過学習を起こさせるためにデータ水増しをしない
image_data_generator = ImageDataGenerator(
	featurewise_center = False,
	samplewise_center = False,
	featurewise_std_normalization = False,
	samplewise_std_normalization = False,
	zca_whitening = False,
	rotation_range = 0,
	width_shift_range = 0.0,
	height_shift_range = 0.0,
	horizontal_flip = False,
	vertical_flip = False
)

# reduce learning rate
reduce_lr = ReduceLROnPlateau(
	monitor = 'val_acc',
	factor = 0.5,
	patience = 5,
	verbose = 1
)

history = model.fit_generator(
	image_data_generator.flow(X_train_memmap, Y_train_memmap, batch_size=BATCH_SIZE),
	steps_per_epoch=(len(X_train_memmap) / BATCH_SIZE),
	epochs=EPOCH,
	validation_data=(X_test_memmap, Y_test_memmap),
	callbacks = [reduce_lr],
	initial_epoch=0
)

Output

Run
年末年始は機械学習・深層学習を勉強しませんか?
No one still commented. Please first comment.
年末年始は機械学習・深層学習を勉強しませんか?
広告
未経験から最短でエンジニアへの転職を目指すなら