Creating a Neural Network Based Mobile App with a Neural Network Designed and Trained in Google Colab

Having done a masters degree in Artificial Intelligence and then switch my attention to writing cross-platform mobile apps, I want to drag it back to AI, while continuing to work on .NET MAUI. So I'm doing some experimental work creating and training neural networks in Python/Keras on Google Colab and then exporting them into .onnx files so they can be deployed into the .NET MAUI apps. Seeing what's possible before deciding what interesting NN based apps I might want to make.

Stephen Moreton-Howell

5/8/20242 min read

Getting Started

I started with a very simple neural network in Google Colab:

import tensorflow as tf

from tensorflow import keras

import numpy as np

# Create simple hardcoded training data

# Input: 10 numbers (features)

# Output: Classification (0=Low, 1=Medium, 2=High based on sum)

# Training inputs - each row is one training example with 10 numbers

X_train = np.array([

[1, 2, 3, 4, 5, 6, 7, 8, 9, 10], # Sum = 55 (Medium)

[0, 1, 0, 1, 0, 1, 0, 1, 0, 1], # Sum = 5 (Low)

[10, 10, 10, 10, 10, 10, 10, 10, 10, 10], # Sum = 100 (High)

[2, 2, 2, 2, 2, 2, 2, 2, 2, 2], # Sum = 20 (Low)

[5, 5, 5, 5, 5, 5, 5, 5, 5, 5], # Sum = 50 (Medium)

[8, 9, 8, 9, 8, 9, 8, 9, 8, 9], # Sum = 85 (High)

[1, 1, 1, 1, 1, 1, 1, 1, 1, 1], # Sum = 10 (Low)

[5, 5, 5, 5, 5, 5, 5, 5, 5, 5], # Sum = 45 (Medium)

[9, 9, 9, 9, 9, 9, 9, 9, 9, 9], # Sum = 90 (High)

[3, 3, 3, 3, 3, 3, 3, 3, 3, 3], # Sum = 30 (Medium)

[1, 2, 3, 4, 5, 6, 7, 8, 9, 10], # Sum = 55 (Medium)

[0, 1, 0, 1, 0, 1, 0, 1, 0, 1], # Sum = 5 (Low)

[5, 5, 5, 5, 5, 5, 5, 5, 5, 5], # Sum = 50 (Medium)

[2, 2, 2, 2, 2, 2, 2, 2, 2, 2], # Sum = 20 (Low)

[5, 5, 5, 5, 5, 5, 5, 5, 5, 5], # Sum = 50 (Medium)

[8, 9, 8, 9, 8, 9, 8, 9, 8, 9], # Sum = 85 (High)

[1, 1, 1, 1, 1, 1, 1, 1, 1, 1], # Sum = 10 (Low)

[5, 5, 5, 5, 5, 5, 5, 5, 5, 5], # Sum = 45 (Medium)

[9, 9, 9, 9, 9, 9, 9, 9, 9, 9], # Sum = 90 (High)

[3, 3, 3, 3, 3, 3, 3, 3, 3, 3], # Sum = 30 (Medium)

], dtype=np.float32)

# Training labels - corresponding classifications

# 0 = Low (sum < 30)

# 1 = Medium (30 <= sum < 70)

# 2 = High (sum >= 70)

y_train = np.array([1, 0, 2, 0, 1, 2, 0, 1, 2, 1, 1, 0, 1, 0, 1, 2, 0, 1, 2, 1], dtype=np.int32)

print("Training Data Shape:", X_train.shape)

print("Training Labels Shape:", y_train.shape)

print("\nFirst training example:")

print("Input:", X_train[0])

print("Label:", y_train[0], "(Medium)")

# Create the neural network

model = keras.Sequential([

keras.layers.Dense(128, activation='relu', input_shape=(10,)),

keras.layers.Dropout(0.2),

keras.layers.Dense(64, activation='relu'),

keras.layers.Dense(3, activation='softmax') # 3 output classes

])

# Compile the model

model.compile(

optimizer='adam',

loss='sparse_categorical_crossentropy',

metrics=['accuracy']

)

# Train the model

history = model.fit(X_train, y_train, epochs=100, verbose=1)

# Test the model

test_input = np.array([[5, 5, 5, 5, 5, 5, 5, 5, 5, 5]], dtype=np.float32) # Sum = 10 (should be Low)

prediction = model.predict(test_input)

predicted_class = np.argmax(prediction)

print("\nTest prediction:")

print("Input:", test_input[0])

print("Predictions:", prediction[0])

print("Predicted class:", predicted_class, "(0=Low, 1=Medium, 2=High)")

# Save the model

converter = tf.lite.TFLiteConverter.from_keras_model(model)

tflite_model = converter.convert()

with open('model.tflite', 'wb') as f:

f.write(tflite_model)

from google.colab import files

files.download('model.tflite')

# Save the model as ONNX

!pip install tf2onnx

# Convert to ONNX

!python -m tf2onnx.convert --keras saved_model.keras --output my_model.onnx

# Download

from google.colab import files

files.download('my_model.onnx')