Put your Vanilla code here
by hbyte » Fri Jan 12, 2024 11:25 pm
- Code: Select all
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#define INPUT_SIZE 2
#define HIDDEN_SIZE 4
#define OUTPUT_SIZE 1
typedef struct {
double *weights;
double *biases;
double *outputs;
double *errors;
int input_size;
int output_size;
} Layer;
typedef struct {
Layer input_layer;
Layer hidden_layer;
Layer output_layer;
} NeuralNetwork;
double sigmoid(double x) {
return 1.0 / (1.0 + exp(-x));
}
void initialize_layer(Layer *layer, int input_size, int output_size) {
layer->input_size = input_size;
layer->output_size = output_size;
// Allocate memory for weights, biases, outputs, and errors
layer->weights = (double *)malloc(input_size * output_size * sizeof(double));
layer->biases = (double *)malloc(output_size * sizeof(double));
layer->outputs = (double *)malloc(output_size * sizeof(double));
layer->errors = (double *)malloc(output_size * sizeof(double));
// Initialize weights and biases with random values
for (int i = 0; i < input_size * output_size; ++i) {
layer->weights[i] = ((double)rand() / RAND_MAX) * 2.0 - 1.0;
}
for (int i = 0; i < output_size; ++i) {
layer->biases[i] = ((double)rand() / RAND_MAX) * 2.0 - 1.0;
}
}
void forward_pass(Layer *input_layer, Layer *hidden_layer, Layer *output_layer, double *input) {
// Set input layer outputs
for (int i = 0; i < input_layer->output_size; ++i) {
input_layer->outputs[i] = input[i];
}
// Calculate hidden layer outputs
for (int i = 0; i < hidden_layer->output_size; ++i) {
hidden_layer->outputs[i] = 0;
for (int j = 0; j < hidden_layer->input_size; ++j) {
hidden_layer->outputs[i] += input_layer->outputs[j] * hidden_layer->weights[j * hidden_layer->output_size + i];
}
hidden_layer->outputs[i] += hidden_layer->biases[i];
hidden_layer->outputs[i] = sigmoid(hidden_layer->outputs[i]);
}
// Calculate output layer outputs
for (int i = 0; i < output_layer->output_size; ++i) {
output_layer->outputs[i] = 0;
for (int j = 0; j < output_layer->input_size; ++j) {
output_layer->outputs[i] += hidden_layer->outputs[j] * output_layer->weights[j * output_layer->output_size + i];
}
output_layer->outputs[i] += output_layer->biases[i];
output_layer->outputs[i] = sigmoid(output_layer->outputs[i]);
}
}
void backpropagation(NeuralNetwork *network, double *target) {
Layer *output_layer = &(network->output_layer);
Layer *hidden_layer = &(network->hidden_layer);
Layer *input_layer= &(network->input_layer);
// Calculate output layer errors
for (int i = 0; i < output_layer->output_size; ++i) {
output_layer->errors[i] = (target[i] - output_layer->outputs[i]) * output_layer->outputs[i] * (1 - output_layer->outputs[i]);
}
// Update output layer weights and biases
for (int i = 0; i < output_layer->output_size; ++i) {
for (int j = 0; j < hidden_layer->output_size; ++j) {
output_layer->weights[j * output_layer->output_size + i] += 0.1 * output_layer->errors[i] * hidden_layer->outputs[j];
}
output_layer->biases[i] += 0.1 * output_layer->errors[i];
}
// Calculate hidden layer errors
for (int i = 0; i < hidden_layer->output_size; ++i) {
hidden_layer->errors[i] = 0;
for (int j = 0; j < output_layer->output_size; ++j) {
hidden_layer->errors[i] += output_layer->errors[j] * output_layer->weights[i * output_layer->output_size + j];
}
hidden_layer->errors[i] *= hidden_layer->outputs[i] * (1 - hidden_layer->outputs[i]);
}
// Update hidden layer weights and biases
for (int i = 0; i < hidden_layer->output_size; ++i) {
for (int j = 0; j < input_layer->output_size; ++j) {
hidden_layer->weights[j * hidden_layer->output_size + i] += 0.1 * hidden_layer->errors[i] * input_layer->outputs[j];
}
hidden_layer->biases[i] += 0.1 * hidden_layer->errors[i];
}
}
int main() {
// Seed for random number generation
srand(1);
// Define XOR training data
double training_data[4][3] = {
{0, 0, 0},
{0, 1, 1},
{1, 0, 1},
{1, 1, 0}
};
// Create and initialize neural network
NeuralNetwork network;
initialize_layer(&(network.input_layer), INPUT_SIZE, HIDDEN_SIZE);
initialize_layer(&(network.hidden_layer), HIDDEN_SIZE, OUTPUT_SIZE);
initialize_layer(&(network.output_layer), OUTPUT_SIZE, 1);
// Train the neural network
for (int epoch = 0; epoch < 80000; ++epoch) {
for (int i = 0; i < 4; ++i) {
// Perform forward pass
forward_pass(&(network.input_layer), &(network.hidden_layer), &(network.output_layer), training_data[i]);
// Perform backpropagation
backpropagation(&network, &training_data[i][2]);
}
}
// Test the trained network
printf("Testing trained network:\n");
for (int i = 0; i < 4; ++i) {
forward_pass(&(network.input_layer), &(network.hidden_layer), &(network.output_layer), training_data[i]);
printf("Input: %d XOR %d = %.2lf\n", (int)training_data[i][0], (int)training_data[i][1], network.output_layer.outputs[0]);
}
// Free allocated memory
free(network.input_layer.weights);
free(network.input_layer.biases);
free(network.input_layer.outputs);
free(network.input_layer.errors);
free(network.hidden_layer.weights);
free(network.hidden_layer.biases);
free(network.hidden_layer.outputs);
free(network.hidden_layer.errors);
free(network.output_layer.weights);
free(network.output_layer.biases);
free(network.output_layer.outputs);
free(network.output_layer.errors);
return 0;
}
-
hbyte
- Site Admin
-
- Posts: 135
- Joined: Thu Aug 13, 2020 6:11 pm
Return to Python and ML
Who is online
Users browsing this forum: No registered users and 2 guests