import numpy as np import tensorflow as tf from tensorflow import keras from sklearn.model_selection import train_test_split from tensorflow.keras.utils import to_categorical from tensorflow.keras.optimizers import Adam from tensorflow.keras.callbacks import EarlyStopping # Daten einlesen X = np.load('images_big.npy') # Eingabebilder Y = np.load('labels_big.npy') # Labels (Buchstaben) # Umwandlung der Labels in numerische Werte unique_labels, Y_numeric = np.unique(Y, return_inverse=True) # Vorverarbeitung der Daten X = X.reshape(-1, 32, 32, 1) # Umwandlung in 4D-Array (N, R, C, K) Y_categorical = to_categorical(Y_numeric) # One-Hot-Encoding der Labels # Aufteilung der Daten in Trainings- und Testdatensätze X_train, X_test, Y_train, Y_test = train_test_split( X, Y_categorical, test_size=0.1, random_state=42 ) # # Aufbau des Modells # model = keras.Sequential([ # keras.layers.Conv2D(32, (3, 3), activation='relu', input_shape=(32, 32, 1)), # keras.layers.MaxPooling2D(pool_size=(2, 2)), # keras.layers.Conv2D(64, (3, 3), activation='relu'), # keras.layers.MaxPooling2D(pool_size=(2, 2)), # keras.layers.Flatten(), # keras.layers.Dense(128, activation='relu'), # keras.layers.Dense(len(unique_labels), activation='softmax') # Anzahl der Klassen # ]) model = keras.Sequential([ keras.layers.Conv2D(64, (3, 3), activation='relu', input_shape=(32, 32, 1)), keras.layers.BatchNormalization(), keras.layers.MaxPooling2D(pool_size=(2, 2)), keras.layers.Conv2D(128, (3, 3), activation='relu'), keras.layers.BatchNormalization(), keras.layers.MaxPooling2D(pool_size=(2, 2)), keras.layers.Flatten(), keras.layers.Dense(256, activation='relu'), keras.layers.Dense(len(unique_labels), activation='softmax') ]) # model = keras.Sequential([ # keras.layers.Input(shape=(32, 32)), # images are 28x28 # keras.layers.Flatten(), # becomes 784 # keras.layers.Dense(64, activation='relu'), # keras.layers.Dense(128, activation='relu'), # keras.layers.Dense(len(unique_labels), activation='softmax') # Anzahl der Klassen # ]) optimizer = Adam(learning_rate=0.001) early_stopping = EarlyStopping(monitor='val_loss', patience=5, restore_best_weights=True) # Kompilieren des Modells model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy']) # model.compile(loss='sparse_categorical_crossentropy', optimizer='adam', metrics=['accuracy']) # Modelldetails anzeigen model.summary() # Training des Modells model.fit(X_train, Y_train, epochs=20, batch_size=128, validation_data=(X_test, Y_test), callbacks=[early_stopping]) # Bewertung des Modells test_loss, test_accuracy = model.evaluate(X_test, Y_test) print(f'Testgenauigkeit: {test_accuracy:.4f}') model.save("model.keras")