from tensorflow.keras.preprocessing.image import ImageDataGenerator from tensorflow.keras.applications.mobilenet import preprocess_input, decode_predictions test_path = 'train_data/test/' train_path = 'train_data/train/' val_path = 'train_data/val/' WIDTH=224 HEIGHT=224 BATCH_SIZE=64 #Train DataSet Generator with Augmentation print("\nTraining Data Set") train_generator = ImageDataGenerator(preprocessing_function=preprocess_input) train_flow = train_generator.flow_from_directory( train_path, target_size=(HEIGHT, WIDTH), batch_size = BATCH_SIZE ) #Validation DataSet Generator with Augmentation print("\nValidation Data Set") val_generator = ImageDataGenerator(preprocessing_function=preprocess_input) val_flow = val_generator.flow_from_directory( val_path, target_size=(HEIGHT, WIDTH), batch_size = BATCH_SIZE ) #Test DataSet Generator with Augmentation print("\nTest Data Set") test_generator = ImageDataGenerator(preprocessing_function=preprocess_input) test_flow = test_generator.flow_from_directory( test_path, target_size=(HEIGHT, WIDTH), batch_size = BATCH_SIZE ) from tensorflow.keras.models import Sequential, Model, load_model from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping, TensorBoard, CSVLogger from tensorflow.keras import optimizers, models from tensorflow.keras.layers import Dense, Dropout, GlobalAveragePooling2D from tensorflow.keras import applications from tensorflow.keras import backend as K import tensorflow as tf import os NUM_PARALLEL_EXEC_UNITS = 24 #Set Performance Parameters for MKL and Tensorflow using Keras backend #TensorFlow config = tf.ConfigProto( intra_op_parallelism_threads=NUM_PARALLEL_EXEC_UNITS, inter_op_parallelism_threads=1 ) session = tf.Session(config=config) K.set_session(session) #MKL and OpenMP os.environ["OMP_NUM_THREADS"] = str(NUM_PARALLEL_EXEC_UNITS) os.environ["KMP_BLOCKTIME"] = "1" os.environ["KMP_SETTINGS"] = "1" os.environ["KMP_AFFINITY"]= "granularity=fine,verbose,compact,1,0" # Initialize resnet50 with transfer learning base_model = applications.ResNet50(weights='imagenet', include_top=False, input_shape=(WIDTH, HEIGHT,3)) # add a global spatial average pooling layer x = base_model.output x = GlobalAveragePooling2D()(x) # and a dense layer x = Dense(1024, activation='relu')(x) predictions = Dense(len(train_flow.class_indices), activation='softmax')(x) # this is the model we will train model = Model(inputs=base_model.input, outputs=predictions) # first: train only the top layers (which were randomly initialized) # i.e. freeze all convolutional MobileNet layers for layer in base_model.layers: layer.trainable = False # compile the model (should be done *after* setting layers to non-trainable) model.compile(optimizer=optimizers.Adam(lr=0.001), metrics=['accuracy', 'top_k_categorical_accuracy'], loss='categorical_crossentropy') model.summary() import math top_layers_file_path="resnet50.hdf5" checkpoint = ModelCheckpoint(top_layers_file_path, monitor='loss', verbose=1, save_best_only=True, mode='min') tb = TensorBoard(log_dir='./logs', batch_size=val_flow.batch_size, write_graph=True, update_freq='batch') early = EarlyStopping(monitor="loss", mode="min", patience=5) csv_logger = CSVLogger('./logs/mn-log.csv', append=True) history = model.fit_generator(train_flow, epochs=1, verbose=1, validation_data=val_flow, validation_steps=math.ceil(val_flow.samples/val_flow.batch_size), steps_per_epoch=math.ceil(train_flow.samples/train_flow.batch_size), callbacks=[checkpoint, early, tb, csv_logger]) model.load_weights(top_layers_file_path) loss, acc, top_5 = model.evaluate_generator( test_flow, verbose = True, steps=math.ceil(test_flow.samples/test_flow.batch_size)) print("Loss: ", loss) print("Acc: ", acc) print("Top 5: ", top_5) label = [k for k,v in train_flow.class_indices.items()] with open('labels.txt', 'w+') as file: file.write("\n".join(label)) from tensorflow.python.framework import graph_util from tensorflow.python.framework import graph_io input_model_path = top_layers_file_path output_model_name = "resnet50.pb" output_model_dir = "tf_model" K.set_learning_phase(0) sess = K.get_session() test_model = models.load_model(input_model_path) orig_output_node_names = [node.op.name for node in test_model.outputs] constant_graph = graph_util.convert_variables_to_constants( sess, sess.graph.as_graph_def(), orig_output_node_names) graph_io.write_graph( constant_graph, output_model_dir, output_model_name, as_text=False)