Skip to content

Commit

Permalink
have a totally functionning forward pass
Browse files Browse the repository at this point in the history
  • Loading branch information
fm94 committed Jun 1, 2024
1 parent be266d4 commit 221a228
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 17 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Tipousi - a tiny and fast deep learning framework from scratch in C++

[![Tipous Build Status](https://github.com/fm94/Tipousi/actions/workflows/cmake-multi-platform.yml/badge.svg?branch=master)](https://github.com/fm94/Tipousi/actions/workflows/cmake-multi-platform.yml)
[![Tipousi Build Status](https://github.com/fm94/Tipousi/actions/workflows/cmake-multi-platform.yml/badge.svg?branch=master)](https://github.com/fm94/Tipousi/actions/workflows/cmake-multi-platform.yml)

This is a WIP project that implements an entire deep learning framework from scratch in C++.</br>
The goal is to implement as many layers and modules as possible and have a python interface at the end.</br>
Expand Down
15 changes: 9 additions & 6 deletions include/graph/sequential.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,17 @@ namespace Tipousi
// travers all nodes in backward pass
// and delete all of them sequentially
Node *current_node = m_output_node;
bool all_cleaned = false;
while (!all_cleaned)
while (true)
{
// TODO : hacky approch deleting only first node -> should
// be recursive
Node *next_cleaned = current_node->get_inputs()[0];
// TODO : hacky approch deleting only first node
// -> should be recursive
auto input_nodes = current_node->get_inputs();
if (input_nodes.size() == 0 || !input_nodes[0])
{
break;
}
delete current_node;
current_node = next_cleaned;
current_node = input_nodes[0];
}
}

Expand Down
3 changes: 1 addition & 2 deletions src/activation/softmax.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@ namespace Tipousi
{
Eigen::MatrixXf expX = in.array().exp();
Eigen::VectorXf sumExpX = expX.rowwise().sum();
out =
(expX.array().rowwise() / sumExpX.transpose().array()).matrix();
out = (expX.array().colwise() / sumExpX.array()).matrix();
}

void Softmax::backward(const Eigen::MatrixXf &dout,
Expand Down
7 changes: 4 additions & 3 deletions src/graph/sequential.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,21 +16,22 @@ namespace Tipousi
// while keeping the original data intact
Eigen::MatrixXf data_copy = in;
Node *current_node = m_input_node;
bool is_finished = false;
while (!is_finished)
// continue until no more nodes
while (true)
{
if (current_node)
{
// TODO hacky approachs: always take number 0
auto output_nodes = current_node->get_outputs();
if (output_nodes.size() == 0 || !output_nodes[0])
{
is_finished = true;
break;
}
current_node->forward(data_copy);
current_node = output_nodes[0];
}
}
out = data_copy;
}

void Sequential::backward()
Expand Down
23 changes: 18 additions & 5 deletions tests/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
#include "graph/sequential.hpp"
#include "layer/dense.hpp"
#include "loss/mse.hpp"
#include <chrono>
#include <iostream>
#include <memory>

using namespace Tipousi;
Expand All @@ -14,12 +16,15 @@ using namespace Loss;

void test_create_net()
{
int n_features{2};
int n_labels{2};

// create layer nodes
// these are raw ptrs and ownership will go to the graph,
// it is responsable for cleaning them!
Node *node1 = Node::create<Dense>(5, 32);
Node *node1 = Node::create<Dense>(n_features, 32);
Node *node2 = Node::create<ReLU>();
Node *node3 = Node::create<Dense>(32, 1);
Node *node3 = Node::create<Dense>(32, n_labels);
Node *node4 = Node::create<Softmax>();

// build the dependencies
Expand All @@ -32,13 +37,21 @@ void test_create_net()

// test inference
int n_samples{32};
int n_features{2};
int n_labels{1};
auto features = Eigen::MatrixXf::Random(n_samples, n_features);
auto labels = Eigen::MatrixXf(n_samples, n_labels);
// forward pass

// forward pass with time measurement
Eigen::MatrixXf preds;
auto start = std::chrono::high_resolution_clock::now();
net.forward(features, preds);
auto end = std::chrono::high_resolution_clock::now();
auto duration =
std::chrono::duration_cast<std::chrono::microseconds>(end - start)
.count();

std::cout << "Forward pass execution time: " << duration << " microseconds"
<< std::endl;

// backward pass
net.backward();
}
Expand Down

0 comments on commit 221a228

Please sign in to comment.