Cześć, bawię się Arduino i znalazłem bibliotekę https://github.com/GiorgosXou/NeuralNetworks pozwalającą na uczenie sieci neuronowych na Arduino. Wszystko działa bardzo fajnie tylko że chciałbym wpuścić na tą sieć taki zestaw danych uczących którego Arduino nie przełknie. W związku chciałbym uczyć sieć na komputerze, a potem na Arduino wpisze tylko odpowiednie wagi neuronów. Tak więc skopiowałem tą bibliotekę i przepisałem funkcje typowe dla Arduino na C++ jednakże nie potrafię tego skompilować. Piszę w VisualStudio Code.
Dostaje błędy:
In function main': undefined reference to
NeuralNetwork::NeuralNetwork(unsigned int const*, unsigned int const&)'
undefined reference to NeuralNetwork::FeedForward(float const*)' undefined reference to
NeuralNetwork::BackProp(float const*)'
undefined reference to NeuralNetwork::FeedForward(float const*)' undefined reference to
NeuralNetwork::print(bool)'
undefined reference to NeuralNetwork::~NeuralNetwork()' undefined reference to
NeuralNetwork::~NeuralNetwork()'
collect2.exe: error: ld returned 1 exit status
mimo to wydaje mi się że te funkcje są zdefiniowane w pliku "NeuralNetwork.h"
Kod pliku main.cpp
#include <iostream>
#include "NeuralNetwork.h"
#define NumberOf(arg) ((unsigned int) (sizeof (arg) / sizeof (arg [0]))) //calculates the amount of layers (in this case 4)
unsigned int layers[] = {3, 9, 9, 1}; // 4 layers (1st)layer with 3 input neurons (2nd & 3rd)layer 9 hidden neurons each and (4th)layer with 1 output neuron
float *outputs; // 4th layer's outputs (in this case output)
//Default Inputs
const float inputs[8][3] = {
{0, 0, 0}, //0
{0, 0, 1}, //1
{0, 1, 0}, //1
{0, 1, 1}, //0
{1, 0, 0}, //1
{1, 0, 1}, //0
{1, 1, 0}, //0
{1, 1, 1} //1
};
const float expectedOutput[8][1] = {{0}, {1}, {1}, {0}, {1}, {0}, {0}, {1}}; // values that we were expecting to get from the 4th/(output)layer of Neural-network, in other words something like a feedback to the Neural-network.
int main()
{
NeuralNetwork NN(layers, NumberOf(layers)); // Creating a NeuralNetwork with default learning-rates
//Trains the NeuralNetwork for 8000 epochs = Training loops
for (int i = 0; i < 8000; i++)
{
for (int j = 0; j < NumberOf(inputs); j++)
{
NN.FeedForward(inputs[j]); // Feeds-Forward the inputs to the first layer of the NN and Gets the output.
NN.BackProp(expectedOutput[j]); // Tells to the NN if the output was right/the-expectedOutput and then, teaches it.
}
}
//Goes through all inputs
for (int i = 0; i < NumberOf(inputs); i++)
{
outputs = NN.FeedForward(inputs[i]); // Feeds-Forward the inputs[i] to the first layer of the NN and Gets the output
std::cout<<(outputs[0], 7)<<std::endl; // prints the first 7 digits after the comma.
}
NN.print(); // prints the weights and biases of each layer
}
Kod Pliku "NeuralNetwork.h"
#if defined(__AVR_ATtiny2313__) || defined(__AVR_ATtiny4313__) || defined(__AVR_ATtiny24__) || defined(__AVR_ATtiny44__) || defined(__AVR_ATtiny84__) || defined(__AVR_ATtiny25__) || defined(__AVR_ATtiny45__) || defined(__AVR_ATtiny85__)
#define As__AVR_ATtinyX__
#endif
// Defines a list of microcontroller series (as) As__No_Common_Serial_Support (in that moment)
#if defined(As__AVR_ATtinyX__) // or etc.
#define As__No_Common_Serial_Support
#endif
// - This prevents problems if someone accidently #include's your library twice.
#ifndef NeuralNetwork_h
#define NeuralNetwork_h
// - That gives you access to the standard types and constants of the Arduino language.
//#include <math.h>
// - And code goes here...
class NeuralNetwork
{
private:
bool FIRST_TIME_FDFp = false; // determines if there are trashes left in last outputs .
unsigned int _numberOflayers; // # of layers .
const float *_inputs; // Pointer to primary/first Inputs Array from Sketch .
// (Used for backpropagation) .
class Layer
{
public:
unsigned int _numberOfInputs; // # of neurons in the previous layer.
unsigned int _numberOfOutputs; // # of neurons in the current layer.
float *bias; // bias of this layer
float *outputs; // outputs of this layer [1D Array] pointers.
float **weights; // weights of this layer [2D Array] pointers.
float *preLgamma; // gamma of previous layer [1D Array] pointers.
// Default Constractor .
// #0 Constructor .
// #1 Constructor With default/("probably") preptained, weights and biases.
Layer();
Layer(const unsigned int &NumberOfInputs, const unsigned int &NumberOfOutputs); // #0
Layer(const unsigned int &NumberOfInputs, const unsigned int &NumberOfOutputs, float *default_Weights, float *default_Bias); // #1
Layer(const unsigned int &NumberOfInputs, const unsigned int &NumberOfOutputs, float *default_Weights, float *default_Bias, const bool NO_OUTPUTS); // #1
void FeedForward(const float *inputs); // Calculates the outputs() of layer.
void FdF_PROGMEM(const float *inputs);
void BackPropOutput(const float *_expected_, const float *inputs, const NeuralNetwork *NN);
void BackPropHidden(const Layer *frontLayer, const float *inputs, const NeuralNetwork *NN);
float Sigmoid(const float &x); // Sigmoid Activation Function 1/(1+e^(-x)) .
float SigmDer(const float &x); // Derivative of Sigmoid Activation Function.
void print_PROGMEM();
void print();
};
public:
Layer *layers; // layers in the network [1D Array].
//unsigned float doesn't exist..?
float LearningRateOfWeights = 0.33; // Learning Rate of Weights.
float LearningRateOfBiases = 0.066; // Learning Rate of Biases .
~NeuralNetwork(); // Destractor.
NeuralNetwork(const unsigned int *_layer, const unsigned int &NumberOflayers); // #0
NeuralNetwork(const unsigned int *_layer, const unsigned int &NumberOflayers, const float &LRw, const float &LRb); // #0
NeuralNetwork(const unsigned int *_layer, float *default_Weights, float *default_Bias, const unsigned int &NumberOflayers); // #1
NeuralNetwork(const unsigned int *_layer, float *default_Weights, float *default_Bias, const unsigned int &NumberOflayers, bool NO_OUTPUTS);
// NeuralNetwork(const unsigned int *_layer, const PROGMEM float *default_Weights, const PROGMEM float *default_Bias, const unsigned int &NumberOflayers , bool isProgmem); // isProgmem (because of the Error #777) ? i get it in a way but ..
float *FeedForward(const float *inputs); // Moves Calculated outputs as inputs to next layer.
float *FeedForward(const float *inputs, const bool IS__PROGMEM);
void BackProp(const float *expected); // BackPropopagation - (error, delta-weights, etc.).
void print(bool IS__PROGMEM = false);
};
#endif
Jest jeszcze plik Layer.cpp i NeuralNetwork.cpp ale one chyba nie są teraz ważne. Ewentualnie czy mógłby ktoś polecić prostą bibliotekę C++ do sieci neuronowych? Tak żebym mógł zdefiniować warstwy i ilość neuronów i abym miał dostęp do wag neuronów? Żadnych kerasów, tensorflowów i innych wielkich bibliotek, tylko coś prostego jak ta do Arduino, bo koniec końców chce uruchomić tą sieć na Arduino właśnie.