-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
420faa7
commit 498ab90
Showing
9 changed files
with
329 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,130 @@ | ||
import h5py | ||
import tensorflow as tf | ||
import numpy as np | ||
import sys | ||
from tensorflow.python.keras.models import load_model | ||
from tensorflow.python.keras.applications.resnet50 import ResNet50, preprocess_input | ||
from tensorflow.python.keras.preprocessing.image import load_img | ||
from tensorflow.python.keras import backend as K | ||
from scipy.misc import imresize | ||
import matplotlib.pyplot as plt | ||
import matplotlib.gridspec as gridspec | ||
from math import sqrt, floor, ceil | ||
|
||
|
||
def plot_layer(model): | ||
layer_num = len(model.layers) | ||
print(layer_num) | ||
x = floor(sqrt(layer_num)) | ||
y = ceil(layer_num / x) | ||
for i in range(layer_num): | ||
extract_layer = K.function([model.layers[0].input], [model.layers[i].output]) | ||
f = extract_layer([img])[0] | ||
print(f.shape) | ||
show_img = f[:, :, :, 1] | ||
show_img.shape = (f.shape[1], f.shape[2]) | ||
plt.subplot(x, y, i + 1) | ||
plt.imshow(show_img, cmap='gray') | ||
plt.axis('off') | ||
plt.show() | ||
|
||
def plot_feature(model, layer): | ||
extract_layer = K.function([model.layers[0].input], [model.layers[layer].output]) | ||
f = extract_layer([img])[0] | ||
print(f.shape) | ||
x = floor(sqrt(f.shape[-1])) | ||
y = ceil(f.shape[-1] / x) | ||
print(x, y) | ||
for i in range(f.shape[-1]): | ||
show_img = f[:, :, :, i] | ||
show_img.shape = (f.shape[1], f.shape[2]) | ||
plt.subplot(x, y, i + 1) | ||
plt.imshow(show_img, cmap='gray') | ||
plt.axis('off') | ||
plt.show() | ||
|
||
def plot_filter(model, input_img): | ||
extract_layer = K.function([model.layers[0].input], [model.layers[1].output]) | ||
f = extract_layer([input_img])[0] | ||
print(f.shape) | ||
|
||
for i in range(64): | ||
show_img = f[:, :, :, i] | ||
show_img.shape = (f.shape[1], f.shape[2]) | ||
plt.subplot(8, 8, i + 1) | ||
plt.imshow(show_img, cmap='gray') | ||
plt.axis('off') | ||
plt.show() | ||
|
||
def decode_predictions(preds, labels, top=5): | ||
results = [] | ||
for pred in preds: | ||
top_indices = pred.argsort()[-top:][::-1] | ||
result = [(labels[i], pred[i]) for i in top_indices] | ||
results.append(result) | ||
return results | ||
|
||
def show_labes(image, pred, name): | ||
fig = plt.figure(figsize=(15, 5)) | ||
gs = gridspec.GridSpec(1, 2, width_ratios=[1, 1]) | ||
ax1 = plt.subplot(gs[0]) | ||
x = [pred[i][0] for i in range(len(pred))][::-1] | ||
y = [pred[i][1] * 100 for i in range(len(pred))][::-1] | ||
print(x, y) | ||
colors=['#edf8fb','#b2e2e2','#66c2a4','#2ca25f','#006d2c'] | ||
width = 0.4 | ||
ind = np.arange(len(y)) | ||
ax1.barh(ind, y, width, align='center', color=colors) | ||
ax1.set_yticks(ind+width/2) | ||
ax1.set_yticklabels(x, minor=False, fontsize=10) | ||
for i, v in enumerate(y): | ||
ax1.text(v + 1, i, '%5.2f%%' % v) | ||
plt.title('Probability Output') | ||
ax2 = plt.subplot(gs[1]) | ||
ax2.axis('off') | ||
ax2.imshow(image) | ||
plt.title(name) | ||
plt.show() | ||
|
||
def get_weights(epoch,logs): | ||
wsAndBs = model.layers[indexOfTheConvLayer].get_weights() | ||
#or model.get_layer("layerName").get_weights() | ||
|
||
weights = wsAndBs[0] | ||
biases = wsAndBs[1] | ||
#do what you need to do with them | ||
#you can see the epoch and the logs too: | ||
print("end of epoch: " + str(epoch)) # for instance | ||
|
||
image_name = sys.argv[1] | ||
label_name = sys.argv[2] | ||
|
||
labels = [line.strip() for line in open(label_name, 'r')] | ||
|
||
image = np.array(load_img(image_name)) | ||
img = preprocess_input(np.array([imresize(image, (200, 200, 3))]).astype('float32')) | ||
|
||
model = ResNet50(weights='imagenet', include_top=False, input_shape=(200, 200, 3)) | ||
# model.summary() | ||
input_img = model.input | ||
print(input_img.shape) | ||
|
||
for i in range(len(model.layers[:10])): | ||
print(model.layers[i].name) | ||
weights = model.layers[i].get_weights() | ||
print([i.shape for i in weights]) | ||
# plot_feature(model, i) | ||
plot_filter(model, input_img) | ||
|
||
bottleneck_feature = model.predict(img) | ||
|
||
print(bottleneck_feature.shape) | ||
|
||
model = load_model('resnet_model.h5') | ||
# model.summary() | ||
|
||
pred = model.predict(bottleneck_feature) | ||
result = decode_predictions(pred, labels, 5) | ||
print(result[0][0]) | ||
|
||
# show_labes(image, result[0], image_name) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
import tensorflow as tf | ||
import numpy as np | ||
import matplotlib.pyplot as plt | ||
from tensorflow.python.keras.models import load_model | ||
from sklearn.metrics import classification_report, confusion_matrix | ||
import seaborn as sn | ||
import pandas as pd | ||
import matplotlib.pyplot as plt | ||
|
||
def decode_predictions(preds, labels, top=5): | ||
results = [] | ||
for pred in preds: | ||
top_indices = pred.argsort()[-top:][::-1] | ||
result = [(labels[i], pred[i]) for i in top_indices] | ||
results.append(result) | ||
return results | ||
|
||
labels = [line.strip() for line in open('label.txt', 'r')] | ||
|
||
X_test = np.concatenate([np.load('resnet_features_train.npy'), | ||
np.load('resnet_features_test.npy')]) | ||
y_test = np.concatenate([np.load('resnet_labels_train.npy'), | ||
np.load('resnet_labels_test.npy')]) | ||
y_test = y_test.tolist() | ||
y_test = np.array([np.argmax(np.array(i)) for i in y_test]) | ||
|
||
model = load_model('resnet_model.h5') | ||
model.summary() | ||
|
||
# score = model.evaluate(X_test, y_test) | ||
# print('Accuracy on the Test Images: ', score[1]) | ||
|
||
|
||
y_pred = model.predict_classes(X_test) | ||
print(classification_report(y_test, y_pred, target_names=labels)) | ||
cm = confusion_matrix(y_test, y_pred) | ||
df_cm = pd.DataFrame(cm, index = labels, columns = labels) | ||
print(df_cm) | ||
df_cm.to_csv('cm') | ||
plt.figure() | ||
sn.heatmap(df_cm, annot=True) | ||
plt.show() | ||
|
||
# pred = model.predict(X_test) | ||
# result = decode_predictions(pred, labels, 1) | ||
# print(result) |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
from sklearn.manifold import TSNE | ||
import numpy as np | ||
import matplotlib.pyplot as plt | ||
import seaborn as sns | ||
palette = np.array(sns.color_palette("hls", 21)) | ||
from sklearn.utils import shuffle | ||
X = np.concatenate([np.load('resnet_features_train.npy'), | ||
np.load('resnet_features_test.npy')]) | ||
y = np.concatenate([np.load('resnet_labels_train.npy'), | ||
np.load('resnet_labels_test.npy')]) | ||
y = y.tolist() | ||
y = np.array([np.argmax(np.array(i)) for i in y]) | ||
print(y) | ||
|
||
style_labels = list(np.loadtxt('label.txt', str, delimiter='\n')) | ||
print(style_labels) | ||
X = X.reshape((-1, 2048)) | ||
|
||
X, y = shuffle(X, y, random_state=0) | ||
|
||
# X=X[0:1000] | ||
# y=y[0:1000] | ||
print(X.shape, y.shape) | ||
X_tsne = TSNE(n_components=2, early_exaggeration=10.0, | ||
random_state=20180705).fit_transform(X) | ||
#X_tsne = PCA().fit_transform(X) | ||
print(X_tsne.shape) | ||
import itertools | ||
|
||
print('end') | ||
# fig=plt.figure() | ||
# ax=Axes3D(fig) | ||
import matplotlib | ||
|
||
markers = matplotlib.markers.MarkerStyle.filled_markers | ||
|
||
markers = marker = itertools.cycle(markers) | ||
|
||
f = plt.figure(figsize=(15, 5)) | ||
ax = plt.subplot(aspect='equal') | ||
print(X_tsne) | ||
for i in range(21): | ||
ax.scatter(X_tsne[y == i, 0], X_tsne[y == i, 1], | ||
marker=next(markers), c=palette[i], label=style_labels[i]) | ||
plt.legend(loc=2, numpoints=1, ncol=2, fontsize=12, bbox_to_anchor=(1.05, 0.8)) | ||
ax.axis('off') | ||
plt.savefig('t_sne.png') | ||
plt.show() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,105 @@ | ||
import tensorflow as tf | ||
import numpy as np | ||
import os | ||
from tensorflow.python.keras.datasets import cifar10 | ||
from tensorflow.python.keras.callbacks import ModelCheckpoint | ||
from tensorflow.python.keras.utils import to_categorical | ||
from tensorflow.python.keras.models import Sequential | ||
from tensorflow.python.keras.layers import Dense, Conv2D, MaxPooling2D | ||
from tensorflow.python.keras.layers import Dropout, Flatten, GlobalAveragePooling2D | ||
from tensorflow.python.keras.backend import clear_session | ||
from PIL import Image | ||
from sklearn.preprocessing import LabelBinarizer | ||
from sklearn.model_selection import StratifiedShuffleSplit | ||
from tensorflow.python.keras.resnet50 import ResNet50, preprocess_input | ||
from scipy.misc import imresize | ||
|
||
data_dir = 'images/' | ||
contents = [item for item in os.listdir(data_dir) if not item.startswith('.')] | ||
classes = [each for each in contents if os.path.isdir(data_dir + each)] | ||
|
||
batch = [] | ||
labels = [] | ||
|
||
for each in classes: | ||
print("Starting {} images".format(each)) | ||
class_path = data_dir + each | ||
files = os.listdir(class_path) | ||
for ii, file in enumerate(files, 1): | ||
img = tf.keras.preprocessing.image.load_img(os.path.join(class_path, file)).resize((224, 224), Image.ANTIALIAS) | ||
img = np.array(img) | ||
batch.append(img.reshape((1, 224, 224, 3))) | ||
labels.append(each) | ||
|
||
codes = np.concatenate(batch) | ||
|
||
lb = LabelBinarizer() | ||
lb.fit(labels) | ||
labels_vecs = lb.transform(labels) | ||
labels_vecs = np.where(labels_vecs == 1)[1].reshape((-1, 1)) | ||
|
||
ss = StratifiedShuffleSplit(n_splits=1, test_size=0.2) | ||
|
||
train_idx, test_idx = next(ss.split(codes, labels)) | ||
|
||
X_train, y_train = codes[train_idx], labels_vecs[train_idx] | ||
X_test, y_test = codes[test_idx], labels_vecs[test_idx] | ||
|
||
print("There are {} train images and {} test images.".format(X_train.shape[0], X_test.shape[0])) | ||
print('There are {} unique classes to predict.'.format(np.unique(y_train).shape[0])) | ||
|
||
#One-hot encoding the labels | ||
num_classes = 21 | ||
y_train = to_categorical(y_train, num_classes) | ||
y_test = to_categorical(y_test, num_classes) | ||
|
||
#Creating a checkpointer | ||
checkpointer = ModelCheckpoint(filepath='scratchmodel.best.hdf5', | ||
verbose=1,save_best_only=True) | ||
|
||
#Loading the ResNet50 model with pre-trained ImageNet weights | ||
model = ResNet50(weights='imagenet', include_top=False, input_shape=(200, 200, 3)) | ||
|
||
#Reshaping the training data | ||
X_train_new = np.array([imresize(X_train[i], (200, 200, 3)) for i in range(0, len(X_train))]).astype('float32') | ||
|
||
#Preprocessing the data, so that it can be fed to the pre-trained ResNet50 model. | ||
resnet_train_input = preprocess_input(X_train_new) | ||
|
||
#Creating bottleneck features for the training data | ||
train_features = model.predict(resnet_train_input) | ||
|
||
#Saving the bottleneck features | ||
np.savez('resnet_features_train', features=train_features) | ||
|
||
#Reshaping the testing data | ||
X_test_new = np.array([imresize(X_test[i], (200, 200, 3)) for i in range(0, len(X_test))]).astype('float32') | ||
|
||
#Preprocessing the data, so that it can be fed to the pre-trained ResNet50 model. | ||
resnet_test_input = preprocess_input(X_test_new) | ||
|
||
#Creating bottleneck features for the testing data | ||
test_features = model.predict(resnet_test_input) | ||
|
||
#Saving the bottleneck features | ||
np.savez('resnet_features_test', features=test_features) | ||
|
||
model = Sequential() | ||
model.add(GlobalAveragePooling2D(input_shape=train_features.shape[1:])) | ||
model.add(Dropout(0.3)) | ||
model.add(Dense(num_classes, activation='softmax')) | ||
model.summary() | ||
|
||
model.compile(loss='categorical_crossentropy', optimizer='adam', | ||
metrics=['accuracy']) | ||
|
||
model.fit(train_features, y_train, batch_size=32, epochs=10, | ||
validation_split=0.2, callbacks=[checkpointer], verbose=1, shuffle=True) | ||
|
||
#Evaluate the model on the test data | ||
score = model.evaluate(test_features, y_test) | ||
|
||
#Accuracy on test data | ||
print('Accuracy on the Test Images: ', score[1]) | ||
|
||
clear_session() |