cwNN.h/cpp/Makefile.am : Initial commit.

This commit is contained in:
kevin 2020-08-19 20:12:51 -04:00
parent ca4487d09b
commit d492374df7
3 changed files with 143 additions and 0 deletions

View File

@ -4,6 +4,8 @@ libcwSRC =
libcwHDR += src/libcw/cwCommon.h src/libcw/cwCommonImpl.h src/libcw/cwMem.h src/libcw/cwLog.h src/libcw/cwUtility.h
libcwSRC += src/libcw/cwCommonImpl.cpp src/libcw/cwMem.cpp src/libcw/cwLog.cpp src/libcw/cwUtility.cpp
libcwHDR += src/libcw/cwMtx.h
libcwHDR += src/libcw/cwFileSys.h src/libcw/cwText.h src/libcw/cwFile.h src/libcw/cwTime.h src/libcw/cwLex.h src/libcw/cwNumericConvert.h
libcwSRC += src/libcw/cwFileSys.cpp src/libcw/cwText.cpp src/libcw/cwFile.cpp src/libcw/cwTime.cpp src/libcw/cwLex.cpp
@ -16,6 +18,8 @@ libcwSRC += src/libcw/cwThread.cpp src/libcw/cwMutex.cpp src/libcw/cwThreadMach
libcwHDR += src/libcw/cwMpScNbQueue.h src/libcw/cwSpScBuf.h src/libcw/cwSpScQueueTmpl.h
libcwSRC += src/libcw/cwSpScBuf.cpp src/libcw/cwSpScQueueTmpl.cpp
libcwHDR += src/libcw/cwSvg.h
libcwSRC += src/libcw/cwSvg.cpp
libcwHDR += src/libcw/cwWebSock.h src/libcw/cwWebSockSvr.h src/libcw/cwLib.h
libcwSRC += src/libcw/cwWebSock.cpp src/libcw/cwWebSockSvr.cpp src/libcw/cwLib.cpp
@ -49,3 +53,5 @@ libcwHDR += src/libcw/cwMdns.h src/libcw/cwEuCon.h src/libcw/cwDnsSd.h src
libcwSRC += src/libcw/cwMdns.cpp src/libcw/cwEuCon.cpp src/libcw/cwDnsSd.cpp src/libcw/dns_sd/dns_sd.cpp src/libcw/dns_sd/dns_sd_print.cpp src/libcw/dns_sd/fader.cpp src/libcw/dns_sd/rpt.cpp
libcwHDR += src/libcw/cwDataSets.h
libcwSRC += src/libcw/cwDataSets.cpp

75
cwNN.cpp Normal file
View File

@ -0,0 +1,75 @@
#include "cwCommon.h"
#include "cwLog.h"
#include "cwCommonImpl.h"
#include "cwMem.h"
#include "cwFile.h"
#include "cwNN.h"
namespace cw
{
namespace nn
{
template< typename R >
struct input_str
{
R* x;
unsigned dimN;
unsigned* dimV;
};
typedef struct dense_str
{
unsigned xN; // count of neurons in src layer
unsigned yN; // count of neurons in this layer
real_t* wM; // wM[ xN, yN ] weight matrix
real_t* bV; // bV[ yN ] bias vector
real_t* yV; // scaled input + bias
real_t* aV; // activation output
real_t* dV; // contribution to cost for each neurode
real_t* gV; // C gradient wrt weight at each neurode
} dense_t;
typedef struct layer_str
{
} layer_t;
typedef struct nn_str
{
} nn_t;
void _mtx_mul( R* z, R* m, R* x, unsigned mN, unsigned mM )
{
}
void _add( R* y, R* x, unsigned n )
{
}
void _activation( dense_t* l )
{
}
void _dense_forward( dense_t* l0, dense_t* l1 )
{
assert( l1->wM.dimV[1] == l0->yN );
assert( l1->wM.dimV[0] == l1->yN );
_mtx_mult( l1->zV, l1->wM.base, l0->aV, l0->yN, l1->yN );
_add( l1->zV, l1->bV, l1->yN );
_activation(l1)
}
}
}

62
cwNN.h Normal file
View File

@ -0,0 +1,62 @@
#ifndef cwNN_H
#define cwNN_H
namespace cw
{
namespace nn
{
typedef handle<struct nn_str> handle_t;
enum
{
kSigmoidActId,
kReluActId
};
enum
{
kInputLayerId,
kDenseLayerId,
kConv1DConvId
};
enum
{
kZeroInitId,
kUniformInitId,
kNormalInitId
};
typedef struct layer_args_str
{
unsigned typeId;
unsigned actId;
unsigned weightInitId;
unsigned biasInitId;
unsigned dimN;
const unsigned* dimV;
} layer_args_t;
typedef struct network_args_str
{
layer_args_t* layers;
unsigned layerN;
} network_args_t;
rc_t parse_args( const object_t& o, network_args_t& args );
rc_t create( handle_t& h, const network_args_t& args );
rc_t destroy( handle_t& h );
template< typename R >
rc_t train( handle_t h, unsigned epochN, unsigned batchN, const dataset<R>& trainDs );
template< typename R >
rc_t infer( handle_t h, const dataset<R>& ds );
}
}
#endif