سلام
دیتای ما تصویر یک صفحه ی بازی هست که بی ربط به دیتاست Imagenet هست آیا
1- استفاده از fine tuning اثر داره ؟
2- برای استفاده از بهترین مدل های موجود مثل vgg, resnet ,... به چه تعداد دیتا نیاز داریم که از اول وزن هارو آموزش بدیم ؟ آیا تعداد دیتا تابعی از تعداد پارامتر های مدل هست ؟
3- میخوام با cross validation بهترین پارامتر و ساختار و .. رو پیدا کنم، مشکل اینه که ورود با image generator تولید میشه و تابع grid search cv این ورودی رو قبول نمیکنه، گشتم تونستم کد زیر رو پیدا کنم ولی نمیفهمم برای تابع fit(خط آخر) چی جوری generator یا flow directory رو ورودی بدم بهش!؟
from __future__ import print_function
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras.wrappers.scikit_learn import KerasClassifier
from keras import backend as K
from sklearn.grid_search import GridSearchCV
from tqdm import tqdm # a nice pretty percentage bar for tasks. Thanks to viewer Daniel Bühler for this suggestion
import os # dealing with directories
import numpy as np # dealing with arrays
from random import shuffle # mixing up or currently ordered data that might lead our network astray in training.
num_classes = 10
# input image dimensions
img_rows, img_cols = 28, 28
input_shape = (img_rows, img_cols, 1)
def make_model(dense_layer_sizes, filters, kernel_size, pool_size):
'''Creates model comprised of 2 convolutional layers followed by dense layers
dense_layer_sizes: List of layer sizes.
This list has one number for each layer
filters: Number of convolutional filters in each convolutional layer
kernel_size: Convolutional kernel size
pool_size: Size of pooling area for max pooling
'''
model = Sequential()
model.add(Conv2D(filters, kernel_size,
padding='valid',
input_shape=input_shape))
model.add(Activation('relu'))
model.add(Conv2D(filters, kernel_size))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=pool_size))
model.add(Dropout(0.25))
model.add(Flatten())
for layer_size in dense_layer_sizes:
model.add(Dense(layer_size))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adadelta',
metrics=['accuracy'])
return model
class KerasClassifier(KerasClassifier):
""" adds sparse matrix handling using batch generator
"""
def fit(self, x, y, **kwargs):
""" adds sparse matrix handling """
if not issparse(x):
return super().fit(x, y, **kwargs)
############ adapted from KerasClassifier.fit ######################
if self.build_fn is None:
self.model = self.__call__(**self.filter_sk_params(self.__call__))
elif not isinstance(self.build_fn, types.FunctionType):
self.model = self.build_fn(
**self.filter_sk_params(self.build_fn.__call__))
else:
self.model = self.build_fn(**self.filter_sk_params(self.build_fn))
loss_name = self.model.loss
if hasattr(loss_name, '__name__'):
loss_name = loss_name.__name__
if loss_name == 'categorical_crossentropy' and len(y.shape) != 2:
y = to_categorical(y)
### fit => fit_generator
fit_args = copy.deepcopy(self.filter_sk_params(Sequential.fit_generator))
fit_args.update(kwargs)
############################################################
self.model.fit_generator(
self.get_batch(x, y, self.sk_params["batch_size"]),
samples_per_epoch=x.shape[0],
**fit_args)
return self
def get_batch(self, x, y=None, batch_size=32):
""" batch generator to enable sparse input """
index = np.arange(x.shape[0])
start = 0
while True:
if start == 0 and y is not None:
np.random.shuffle(index)
batch = index[start:start+batch_size]
if y is not None:
yield x[batch].toarray(), y[batch]
else:
yield x[batch].toarray()
start += batch_size
if start >= x.shape[0]:
start = 0
def predict_proba(self, x):
""" adds sparse matrix handling """
if not issparse(x):
return super().predict_proba(x)
preds = self.model.predict_generator(
self.get_batch(x, None, self.sk_params["batch_size"]),
val_samples=x.shape[0])
return preds
dense_size_candidates = [[32], [64], [32, 32], [64, 64]]
my_classifier = KerasClassifier(make_model, batch_size=32)
validator = GridSearchCV(my_classifier,
param_grid={'dense_layer_sizes': dense_size_candidates,
# epochs is avail for tuning even when not
# an argument to model building function
'epochs': [3, 6],
'filters': [8],
'kernel_size': [3],
'pool_size': [2]},
scoring='neg_log_loss',
n_jobs=1)
batch_size = 20
validation_datagen = ImageDataGenerator(rescale=1./255)
train_datagen = ImageDataGenerator(rescale=1./255)
test_datagen = ImageDataGenerator(rescale=1./255)
train_generator = train_datagen.flow_from_directory(
'd:/train', # this is the target directory
target_size=(width, height), # all images will be resized to 150x150
batch_size=batch_size,
color_mode= "grayscale",
class_mode='binary',
shuffle=True
# ,save_to_dir='preview', save_prefix='cat', save_format='png'
) # since we use binary_crossentropy loss, we need binary labels
# this is a similar generator, for validation data
validation_generator = validation_datagen.flow_from_directory(
'd:/validation',
target_size=(width, height),
batch_size=batch_size,
color_mode= "grayscale",
class_mode='binary')
test_generator = test_datagen.flow_from_directory(
'd:/test',
target_size=(width, height),
batch_size=batch_size,
color_mode= "grayscale",
class_mode='binary')
validator.fit(??????
با تشکر