avec Python, Tensorflow et un réseau de neurones
\
Pour créer des images à partir d'une vidéo / to create pitcures from a video : ffmpeg -i mavideo.mp4 -vf fps=10 image_%d.jpg
import numpy as np
import os
from matplotlib import pyplot as plt
import cv2
import random
import pickle
file_list = []
class_list = []
DATASETDIR = '/dataset_des/'
DATADIR = '/dataset_des/data'
# Catégories / categories
CATEGORIES = []
for dir in sorted(os.listdir(DATADIR)):
CATEGORIES.append(dir)
print(CATEGORIES)
# Taille des images / size of the images
IMG_SIZE = 50
for category in CATEGORIES :
path = os.path.join(DATADIR, category)
for img in os.listdir(path):
img_array = cv2.imread(os.path.join(path, img), cv2.IMREAD_GRAYSCALE)
training_data = []
def create_training_data():
for category in CATEGORIES :
path = os.path.join(DATADIR, category)
class_num = CATEGORIES.index(category)
for img in os.listdir(path):
try :
img_array = cv2.imread(os.path.join(path, img), cv2.IMREAD_GRAYSCALE)
new_array = cv2.resize(img_array, (IMG_SIZE, IMG_SIZE))
training_data.append([new_array, class_num])
except Exception as e:
pass
create_training_data()
random.shuffle(training_data)
X = []
y = []
for features, label in training_data:
X.append(features)
y.append(label)
X = np.array(X).reshape(-1, IMG_SIZE, IMG_SIZE, 1)
pickle_out = open(os.path.join(DATASETDIR, "X.pickle"), "wb")
pickle.dump(X, pickle_out)
pickle_out.close()
pickle_out = open(os.path.join(DATASETDIR, "y.pickle"), "wb")
pickle.dump(y, pickle_out)
pickle_out.close()
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten, Conv2D, MaxPooling2D
import pickle
from keras.models import model_from_json
from keras.models import load_model
import matplotlib.pyplot as plt
X = pickle.load(open(os.path.join(DATASETDIR, "X.pickle"), "rb"))
y = pickle.load(open(os.path.join(DATASETDIR, "y.pickle"), "rb"))
X = X/255.0
# Construction du modèle / building the model
model = Sequential()
model.add(Conv2D(32, (3, 3), input_shape = X.shape[1:]))
model.add(Activation("relu"))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Conv2D(64, (3, 3)))
model.add(Activation("relu"))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Conv2D(64, (3, 3)))
model.add(Activation("relu"))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128))
model.add(Activation("relu"))
model.add(Dense(128))
model.add(Activation("relu"))
model.add(Dense(len(CATEGORIES)))
model.add(Activation("softmax"))
# Compilation du modèle / compiling the model
model.compile(loss="sparse_categorical_crossentropy",
optimizer="adam",
metrics=["accuracy"])
# Entrainement du modèle / training the model (80% - 20%)
history = model.fit(X, y, batch_size=32, epochs=10, validation_split=0.2)
# Sauvegarde du modèle / saving the model
model_json = model.to_json()
with open(os.path.join(DATASETDIR, "model.json"), "w") as json_file :
json_file.write(model_json)
model.save_weights(os.path.join(DATASETDIR, "model.h5"))
print("Saved model to disk")
model.save(os.path.join(DATASETDIR, "model"))
print(history.history.keys())
plt.figure(1)
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='lower right')
import cv2
import tensorflow as tf
from keras.models import load_model
import pickle
from google.colab.patches import cv2_imshow
import matplotlib.pyplot as plt
import numpy as np
model = tf.keras.models.load_model(os.path.join(DATASETDIR, "model"))
def prepare(file):
IMG_SIZE = 50
img_array = cv2.imread(file, cv2.IMREAD_GRAYSCALE)
img_array.astype('float32')
img_array = img_array / 255
new_array = cv2.resize(img_array, (IMG_SIZE, IMG_SIZE))
return new_array.reshape(-1, IMG_SIZE, IMG_SIZE, 1)
# Tests et résultats / tests and results
image = os.path.join(DATASETDIR, "tests/de_2.jpg")
prediction = model.predict([prepare(image)])
prediction_list = list(prediction[0])
for n in range(len(prediction_list)):
print(f'{CATEGORIES[n]} : {prediction_list[n]*100:.2f} %')
print('Valeur du dé : ', CATEGORIES[prediction_list.index(max(prediction_list))])
fig = plt.figure()
fig.set_figwidth(6)
fig.set_figheight(3)
plt.subplot(1,2,1)
plt.imshow(cv2.imread(image))
plt.gca().set_aspect('equal', adjustable='datalim')
plt.subplot(1,2,2)
pourcents = [n * 100 for n in prediction_list]
plt.barh(CATEGORIES, pourcents, align='center', color='#2980b9')
plt.xlim((0,100))
for i, v in enumerate(pourcents):
if v > 70:
plt.text(v-20, i-0.1, f'{v:.1f} %', color='white', fontsize=8)
else :
plt.text(v+4, i-0.1, f'{v:.1f} %', color='#7f8c8d', fontsize=8)
plt.tight_layout()
plt.show()