1 module des.isys.neiro.layer.net;
2 
3 import std.math;
4 import std.conv;
5 import std.range;
6 import std.traits;
7 import std.algorithm;
8 
9 import des.isys.neiro.neiron;
10 import des.isys.neiro.func;
11 
12 import des.isys.neiro.layer.neiron;
13 import des.isys.neiro.layer.structure;
14 
15 version(unittest) import std.stdio;
16 
17 interface NetProcessor(T)
18     if( isFloatingPoint!T )
19 {
20     void setStructure( NetStructure!T );
21     T[] process( in T[] src );
22 }
23 
24 class LearnNetProcessor(T) : NetProcessor!T
25     if( isFloatingPoint!T )
26 {
27 package:
28 
29     NetStructure!T structure;
30 
31     @property
32     {
33         ValueNeiron!T[] input() { return structure.input; }
34         BPLayer!T[] layers() { return structure.layers; }
35         BPNeiron!T[] output() { return layers[$-1].neirons; }
36     }
37 
38     T[] last_result;
39 
40     T _nu, _alpha;
41     T last_error, last_delta;
42 
43 public:
44 
45     this( T Nu=1, T Alpha=0.01 )
46     in
47     {
48         assert( Nu > 0 );
49         assert( Alpha > 0 );
50     }
51     body
52     {
53         _nu = Nu;
54         _alpha = Alpha;
55     }
56 
57     void setStructure( NetStructure!T structure )
58     in { assert( structure !is null ); } body
59     { this.structure = structure; }
60 
61     T[] process( in T[] src )
62     {
63         prepareInput( src );
64         processForEachLayer();
65         copyResult();
66 
67         return last_result;
68     }
69 
70     @property
71     {
72         T nu() const { return _nu; }
73         T nu( in T Nu )
74         in { assert( Nu > 0 ); } body
75         {
76             _nu = Nu;
77             return _nu;
78         }
79 
80         T alpha() const { return _alpha; }
81         T alpha( in T Alpha )
82         in { assert( Alpha > 0 ); } body
83         {
84             _alpha = Alpha;
85             return _alpha;
86         }
87 
88         const(T)[] lastResult() const { return last_result; }
89     }
90 
91     T learn( T[] src, T[] standard, uint steps )
92     {
93         last_delta = 1;
94 
95         foreach( k; 0 .. steps )
96         {
97             process( src );
98 
99             auto errors = calcErrors( standard );
100 
101             addErrorsToOutput( errors );
102 
103             last_error = calcErrorSum( errors );
104 
105             backpropagation( nu, alpha );
106 
107             last_delta = getMaxLinkDeltaWeight();
108         }
109 
110         return last_error;
111     }
112 
113 protected:
114 
115     void prepareInput( in T[] src )
116     {
117         foreach( ref n, v; zip(input,src) )
118             n.value = v;
119     }
120 
121     void processForEachLayer() { foreach( l; layers ) l.process(); }
122 
123     void copyResult()
124     {
125         last_result.length = output.length;
126         foreach( i; 0 .. output.length )
127             last_result[i] = output[i].output;
128     }
129 
130     auto calcErrors( in T[] standard )
131     {
132         auto ret = new T[](last_result.length);
133         ret[] = standard[] - last_result[];
134         return ret;
135     }
136 
137     void addErrorsToOutput( in T[] errors )
138     {
139         foreach( n, e; zip(output, errors) )
140             n.addError( e );
141     }
142 
143     auto calcErrorSum( in T[] errors )
144     { return reduce!((a,b)=>a+=b*b)(cast(T)0,errors) / 2.0; }
145 
146     void backpropagation( T nu, T alpha )
147     {
148         auto cnu = currentNu;
149         foreach_reverse( layer; layers )
150             layer.correct( cnu, alpha );
151     }
152 
153     @property T currentNu() { return last_delta * last_error * nu; }
154 
155     T getMaxLinkDeltaWeight()
156     {
157         T maxd = 0;
158 
159         foreach( layer; layers )
160             foreach( neiron; layer.neirons )
161                 foreach( link; neiron.bpLinks )
162                     maxd = max( maxd, abs(link.deltaWeight) );
163 
164         return maxd;
165     }
166 }
167 
168 unittest
169 {
170     auto nw = new class NetWeight!float
171     { float opIndex(size_t,size_t,size_t) { return 1; } };
172 
173     auto calns = new ConsistentlyAssociatedLayereNetStructure!float(
174                          new LinearDependence!float(2), [1,1], nw );
175 
176     auto lnp = new LearnNetProcessor!float( 1, 0.001 );
177     lnp.setStructure( calns );
178 
179     assert( lnp.process( [1] ) == [2] );
180 
181     lnp.learn( [1], [1], 100 );
182     auto r = lnp.process([1])[0];
183     assert( r > 1 && r < 2 );
184 }