1 | function [Y]=MLPval(x,W1,W2,F1,F2) |
---|
2 | %MLPval compute output values from a trained MLP |
---|
3 | % |
---|
4 | % [Y]=MLPval(X,W1,W2,F1,F2) |
---|
5 | % |
---|
6 | % X is the input data |
---|
7 | % W1 the weight matrix input -> hidden layer |
---|
8 | % W2 weight matrix hidden layer -> ouput |
---|
9 | % F1 (default 'tah') activation function of the hidden layer units |
---|
10 | % F2 (default 'lin') activation function of the output layer units |
---|
11 | % |
---|
12 | % Y the output ouf the network at point X |
---|
13 | % |
---|
14 | % 21/04/97 S. Canu |
---|
15 | if nargin < 3; |
---|
16 | help MLPval |
---|
17 | error(sprintf('\n *** MLPval error: invalid call***\n\n\t[Y]=MLPval(X,W1,W2,F1,F2);\n\n')); |
---|
18 | end; |
---|
19 | % Check that matrix (X) and matrix (W1) have compatible dimensions |
---|
20 | [n,d] = size(x); |
---|
21 | [dp1,m] = size(W1); |
---|
22 | [mp1,q] = size(W2); |
---|
23 | if (d+1)~=dp1, |
---|
24 | error('The number of colum in x plus one must equal the number of rows in W1'); |
---|
25 | end |
---|
26 | if (m+1)~=mp1, |
---|
27 | error('The number of colum in W1 plus one must equal the number of rows in W2'); |
---|
28 | end |
---|
29 | % Complete unspecified name of activation function |
---|
30 | if nargin < 5; |
---|
31 | F2='lin'; |
---|
32 | if nargin < 4; |
---|
33 | F1 ='tah'; |
---|
34 | end; |
---|
35 | end; |
---|
36 | if F1=='tah' & F2=='lin' |
---|
37 | a1 = [x ones(n,1)]*W1; |
---|
38 | x1 = tanh(a1); |
---|
39 | a2 = [x1 ones(n,1)]*W2; |
---|
40 | Y = a2; |
---|
41 | elseif F1=='sig' & F2=='lin' |
---|
42 | a1 = [x ones(n,1)]*W1; |
---|
43 | x1 = tanh(a1); |
---|
44 | a2 = [x1 ones(n,1)]*W2; |
---|
45 | Y = a2; |
---|
46 | elseif F1=='tah' & F2=='tah' |
---|
47 | a1 = [x ones(n,1)]*W1; |
---|
48 | x1 = phi(a1); |
---|
49 | a2 = [x1 ones(n,1)]*W2; |
---|
50 | Y = tanh(a2); |
---|
51 | elseif F1=='sig' & F2=='tah' |
---|
52 | a1 = [x ones(n,1)]*W1; |
---|
53 | x1 = phi(a1); |
---|
54 | a2 = [x1 ones(n,1)]*W2; |
---|
55 | Y = tanh(a2); |
---|
56 | elseif F1=='tah' & F2=='sig' |
---|
57 | a1 = [x ones(n,1)]*W1; |
---|
58 | x1 = tanh(a1); |
---|
59 | a2 = [x1 ones(n,1)]*W2; |
---|
60 | Y = phi(a2); |
---|
61 | elseif F1=='sig' & F2=='sig' |
---|
62 | a1 = [x ones(n,1)]*W1; |
---|
63 | x1 = phi(a1); |
---|
64 | a2 = [x1 ones(n,1)]*W2; |
---|
65 | Y = phi(a2); |
---|
66 | |
---|
67 | |
---|
68 | elseif F1=='tah' & F2=='exp' |
---|
69 | a1 = [x ones(n,1)]*W1; |
---|
70 | x1 = tanh(a1); |
---|
71 | a2 = [x1 ones(n,1)]*W2; |
---|
72 | Y = exp(a2); |
---|
73 | elseif F1=='sig' & F2=='exp' |
---|
74 | a1 = [x ones(n,1)]*W1; |
---|
75 | x1 = phi(a1); |
---|
76 | a2 = [x1 ones(n,1)]*W2; |
---|
77 | Y = exp(a2); |
---|
78 | else |
---|
79 | error('The name of the activation function is incorect') |
---|
80 | end |
---|