Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ from deeptensor import (
Conv2D,
MaxPooling2D,
Flatten,
FeedForwardLayer,
LinearLayer,

# activation layers
GeLu,
Expand Down Expand Up @@ -124,11 +124,11 @@ from deeptensor import (

model = Model(
[
FeedForwardLayer(2, 16),
LinearLayer(2, 16),
ReLu(),
FeedForwardLayer(16, 16),
LinearLayer(16, 16),
LeakyReLu(0.1),
FeedForwardLayer(16, 1),
LinearLayer(16, 1),
Sigmoid(),
],
False, # using_cuda
Expand Down
10 changes: 5 additions & 5 deletions csrc/layers/feed_forward_layer.h → csrc/layers/linear_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
#include "../tensor.h"
#include "../utils.h"

class FeedForwardLayer : public Layer {
class LinearLayer : public Layer {
private:
int nin; // no_of_inputs
int nout; // no_of_outputs
Expand Down Expand Up @@ -42,14 +42,14 @@ class FeedForwardLayer : public Layer {
}

public:
FeedForwardLayer(int nin, int nout) : nin(nin), nout(nout) {
LinearLayer(int nin, int nout) : nin(nin), nout(nout) {
_initialize();
}
FeedForwardLayer(int nin, int nout, int seed)
LinearLayer(int nin, int nout, int seed)
: nin(nin), nout(nout), seed(seed) {
_initialize();
}
FeedForwardLayer(
LinearLayer(
int nin,
int nout,
int seed,
Expand Down Expand Up @@ -90,7 +90,7 @@ class FeedForwardLayer : public Layer {
}

std::string printMe() override {
std::string s = "FeedForwardLayer(" + std::to_string(this->nin) + "," +
std::string s = "LinearLayer(" + std::to_string(this->nin) + "," +
std::to_string(this->nout) + ")";
return s;
}
Expand Down
14 changes: 7 additions & 7 deletions csrc/main.cc
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
#include "layers/convolutional_layer.h"
#include "layers/feed_forward_layer.h"
#include "layers/linear_layer.h"
#include "layers/flatten.h"
#include "layers/non_linear_layer.h"
#include "loss.h"
Expand Down Expand Up @@ -137,15 +137,15 @@ PYBIND11_MODULE(_core, m) {
.def("parameters", &Layer::parameters)
.def("__repr__", &Layer::printMe);

py::class_<FeedForwardLayer, Layer, std::shared_ptr<FeedForwardLayer>>(
m, "FeedForwardLayer")
py::class_<LinearLayer, Layer, std::shared_ptr<LinearLayer>>(
m, "LinearLayer")
.def(py::init<int, int>())
.def(py::init<int, int, int>())
.def(py::init<int, int, int, std::string, std::string>())
.def("zero_grad", &FeedForwardLayer::zero_grad)
.def("parameters", &FeedForwardLayer::parameters)
.def("__call__", &FeedForwardLayer::call)
.def("__repr__", &FeedForwardLayer::printMe);
.def("zero_grad", &LinearLayer::zero_grad)
.def("parameters", &LinearLayer::parameters)
.def("__call__", &LinearLayer::call)
.def("__repr__", &LinearLayer::printMe);

py::class_<Conv2D, Layer, std::shared_ptr<Conv2D>>(m, "Conv2D")
.def(py::init<int, int, int>())
Expand Down
6 changes: 3 additions & 3 deletions ctests/nn_test.cc
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#include <gtest/gtest.h>
#include <memory>
#include <vector>
#include "layers/feed_forward_layer.h"
#include "layers/linear_layer.h"
#include "layers/non_linear_layer.h"
#include "neural_network.h"

Expand Down Expand Up @@ -31,7 +31,7 @@
// int seed = 42;
// std::shared_ptr<Model> model = std::make_shared<Model>(
// std::vector<std::shared_ptr<Layer>>{
// std::make_shared<FeedForwardLayer>(2, 2, seed, "XAVIER", "NORMAL"),
// std::make_shared<LinearLayer>(2, 2, seed, "XAVIER", "NORMAL"),
// },
// false);

Expand Down Expand Up @@ -91,7 +91,7 @@
// int seed = 42;
// std::shared_ptr<Model> model = std::make_shared<Model>(
// std::vector<std::shared_ptr<Layer>>{
// std::make_shared<FeedForwardLayer>(2, 2, seed, "XAVIER", "NORMAL"),
// std::make_shared<LinearLayer>(2, 2, seed, "XAVIER", "NORMAL"),
// std::make_shared<ReLu>(),
// },
// false);
Expand Down
Loading
Loading