initial commit

This commit is contained in:
2025-12-19 18:01:44 +01:00
commit 1ab6c147da
4 changed files with 233 additions and 0 deletions

3
.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
zig-out/
.zig-cache/

38
build.zig Normal file
View File

@@ -0,0 +1,38 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{});
const exe = b.addExecutable(.{
.name = "machine_learning",
.root_module = b.createModule(.{
.root_source_file = b.path("src/main.zig"),
.target = target,
.optimize = optimize,
.imports = &.{},
}),
});
b.installArtifact(exe);
const run_step = b.step("run", "Run the app");
const run_cmd = b.addRunArtifact(exe);
run_step.dependOn(&run_cmd.step);
run_cmd.step.dependOn(b.getInstallStep());
if (b.args) |args| {
run_cmd.addArgs(args);
}
const tests = b.addTest(.{
.root_module = exe.root_module,
});
const run_tests = b.addRunArtifact(tests);
const test_step = b.step("test", "Run tests");
test_step.dependOn(&run_tests.step);
}

12
build.zig.zon Normal file
View File

@@ -0,0 +1,12 @@
.{
.name = .machine_learning,
.version = "0.0.0",
.fingerprint = 0x46268de1d0f2801,
.minimum_zig_version = "0.15.2",
.dependencies = .{},
.paths = .{
"build.zig",
"build.zig.zon",
"src",
},
}

180
src/main.zig Normal file
View File

@@ -0,0 +1,180 @@
const std = @import("std");
// Simple neural network to x>50
const HIDDEN_SIZE = 4;
const LEARNING_RATE = 0.1;
const EPOCHS = 5000;
const TRAINING_SIZE = 100;
// Activation function: sigmoid
fn sigmoid(x: f64) f64 {
return 1.0 / (1.0 + @exp(-x));
}
// Derivative of sigmoid for backpropagation
fn sigmoid_derivative(x: f64) f64 {
const s = sigmoid(x);
return s * (1.0 - s);
}
// Forward pass result containing all intermediate values
const ForwardResult = struct {
output: f64,
hidden: [HIDDEN_SIZE]f64,
hidden_raw: [HIDDEN_SIZE]f64,
output_raw: f64,
};
// Neural network structure
const NeuralNetwork = struct {
// Weights: input -> hidden
weights_ih: [HIDDEN_SIZE]f64,
bias_h: [HIDDEN_SIZE]f64,
// Weights: hidden -> output
weights_ho: [HIDDEN_SIZE]f64,
bias_o: f64,
fn init(rng: *std.Random) NeuralNetwork {
var nn: NeuralNetwork = undefined;
// Initialize weights randomly between -1 and 1
for (&nn.weights_ih) |*row| {
row.* = rng.float(f64) * 2.0 - 1.0;
}
for (&nn.weights_ho) |*row| {
row.* = rng.float(f64) * 2.0 - 1.0;
}
// Initialize biases
for (&nn.bias_h) |*b| {
b.* = rng.float(f64) * 2.0 - 1.0;
}
nn.bias_o = rng.float(f64) * 2.0 - 1.0;
return nn;
}
fn normalize(input: u64) f64 {
return @as(f64, @floatFromInt(input)) / 100.0;
}
fn forward(self: *NeuralNetwork, input: u64) ForwardResult {
var result: ForwardResult = undefined;
const normalized = normalize(input);
// Hidden layer
for (0..HIDDEN_SIZE) |i| {
const sum = normalized * self.weights_ih[i] + self.bias_h[i];
result.hidden_raw[i] = sum;
result.hidden[i] = sigmoid(sum);
}
// Output layer
var sum: f64 = 0.0;
for (0..HIDDEN_SIZE) |i| {
sum += result.hidden[i] * self.weights_ho[i];
}
sum += self.bias_o;
result.output_raw = sum;
result.output = sigmoid(sum);
return result;
}
// Backward pass: update weights based on error
fn backward(self: *NeuralNetwork, input: u64, target: f64, forward_result: ForwardResult) void {
const normalized = normalize(input);
const output_error = target - forward_result.output;
const output_delta = output_error * sigmoid_derivative(forward_result.output_raw);
var hidden_error: [HIDDEN_SIZE]f64 = undefined;
var hidden_delta: [HIDDEN_SIZE]f64 = undefined;
// Calculate hidden layer errors and deltas
for (0..HIDDEN_SIZE) |i| {
hidden_error[i] = output_delta * self.weights_ho[i];
hidden_delta[i] = hidden_error[i] * sigmoid_derivative(forward_result.hidden_raw[i]);
}
// Update output layer weights
for (0..HIDDEN_SIZE) |i| {
self.weights_ho[i] += LEARNING_RATE * output_delta * forward_result.hidden[i];
}
self.bias_o += LEARNING_RATE * output_delta;
// Update hidden layer weights
for (0..HIDDEN_SIZE) |i| {
self.weights_ih[i] += LEARNING_RATE * hidden_delta[i] * normalized;
self.bias_h[i] += LEARNING_RATE * hidden_delta[i];
}
}
fn train(self: *NeuralNetwork, input: u64, target: f64) f64 {
const forward_result = self.forward(input);
self.backward(input, target, forward_result);
const err = target - forward_result.output;
// Return squared error
return err * err;
}
};
pub fn main() !void {
var prng = std.Random.DefaultPrng.init(@intCast(std.time.timestamp()));
var rng = prng.random();
std.debug.print("Training neural network to learn if x > \"50\"...\n\n", .{});
var nn = NeuralNetwork.init(&rng);
// Training loop
var epoch: u64 = 0;
while (epoch < EPOCHS) : (epoch += 1) {
var total_loss: f64 = 0.0;
var i: u64 = 0;
while (i < TRAINING_SIZE) : (i += 1) {
const num = rng.intRangeAtMost(u64, 0, 100);
const target: f64 = if (num > 50) 1.0 else 0.0;
const loss = nn.train(num, target);
total_loss += loss;
}
// Print progress every 500 epochs
if (@mod(epoch, 500) == 0) {
const avg_loss = total_loss / @as(f64, @floatFromInt(TRAINING_SIZE));
std.debug.print("Epoch {d}: Average Loss = {d:.6}\n", .{ epoch, avg_loss });
}
}
std.debug.print("\nTraining complete!\n\n", .{});
std.debug.print("Enter x to test if x>50\n", .{});
const stdin = std.fs.File.stdin();
while (true) {
var buf = [_]u8{0} ** 50000;
var reader = stdin.reader(&buf);
const data = reader.interface.peekDelimiterExclusive("\n"[0]) catch undefined;
reader.interface.tossBuffered();
const int = std.fmt.parseInt(u64, data, 10) catch {
std.debug.print("Invalid input\n", .{});
continue;
};
const result = nn.forward(int);
const actual = result.output > 0.5;
std.debug.print("Is {} > 50? {} ({d:.6})\n", .{ int, actual, result.output });
}
std.debug.print("\nGoodbye!\n", .{});
}