mirror of
https://github.com/lxsang/antd-lua-plugin
synced 2025-07-26 10:49:51 +02:00
mimgrating from another repo
This commit is contained in:
30
lib/ann/tests/callback.lua
Executable file
30
lib/ann/tests/callback.lua
Executable file
@ -0,0 +1,30 @@
|
||||
#! /usr/bin/env lua
|
||||
|
||||
require("lfann")
|
||||
|
||||
function eq(x, y)
|
||||
return math.abs(x - y) < 0.00001
|
||||
end
|
||||
|
||||
local num = 1
|
||||
|
||||
function callback(ud, nData, nIn, nOut)
|
||||
assert(ud == "Hi!")
|
||||
return 1 * nData, 2 * nData, 3 * nData, 4 * nData, 5 * nData, 6 * nData, 7 * nData
|
||||
end
|
||||
|
||||
local data = fann.Data.create_from_callback(num, 2, 5, callback, "Hi!")
|
||||
|
||||
for i = 1, num do
|
||||
local row = data:get_row(i)
|
||||
|
||||
for j, k in ipairs(row) do
|
||||
assert(k == i * j)
|
||||
end
|
||||
end
|
||||
|
||||
print("End.")
|
||||
|
||||
if arg[1] == "mem" then
|
||||
io.read("*n")
|
||||
end
|
64
lib/ann/tests/cascade.lua
Executable file
64
lib/ann/tests/cascade.lua
Executable file
@ -0,0 +1,64 @@
|
||||
#! /usr/bin/env lua
|
||||
|
||||
require("lfann")
|
||||
|
||||
function eq(x, y)
|
||||
return math.abs(x - y) < 0.00001
|
||||
end
|
||||
|
||||
local net = fann.Net.create_shortcut{2, 1}
|
||||
net:set_callback() -- Disable logging
|
||||
|
||||
net:set_cascade_candidate_change_fraction(0.1)
|
||||
assert(eq(net:get_cascade_candidate_change_fraction(), 0.1))
|
||||
|
||||
net:set_cascade_candidate_stagnation_epochs(5)
|
||||
assert(net:get_cascade_candidate_stagnation_epochs(5) == 5)
|
||||
|
||||
net:set_cascade_output_change_fraction(0.2)
|
||||
assert(eq(net:get_cascade_output_change_fraction(), 0.2))
|
||||
|
||||
net:set_cascade_output_stagnation_epochs(10)
|
||||
assert(net:get_cascade_output_stagnation_epochs(10) == 10)
|
||||
|
||||
net:set_cascade_weight_multiplier(0.359)
|
||||
assert(eq(net:get_cascade_weight_multiplier(), 0.359))
|
||||
|
||||
net:set_cascade_candidate_limit(800)
|
||||
assert(eq(net:get_cascade_candidate_limit(), 800))
|
||||
|
||||
net:set_cascade_max_cand_epochs(100)
|
||||
assert(net:get_cascade_max_cand_epochs() == 100)
|
||||
|
||||
net:set_cascade_max_out_epochs(120)
|
||||
assert(net:get_cascade_max_out_epochs() == 120)
|
||||
|
||||
net:set_cascade_min_out_epochs(20)
|
||||
assert(net:get_cascade_min_out_epochs() == 20)
|
||||
|
||||
net:set_cascade_min_cand_epochs(30)
|
||||
assert(net:get_cascade_min_cand_epochs() == 30)
|
||||
|
||||
net:set_cascade_activation_functions{fann.SIGMOID, fann.GAUSSIAN}
|
||||
local funcs = net:get_cascade_activation_functions()
|
||||
assert(funcs[1] == fann.SIGMOID)
|
||||
assert(funcs[2] == fann.GAUSSIAN)
|
||||
assert(#funcs == net:get_cascade_activation_functions_count())
|
||||
|
||||
net:set_cascade_num_candidate_groups(3)
|
||||
assert(net:get_cascade_num_candidate_groups(), 3)
|
||||
|
||||
net:set_cascade_activation_steepnesses{0.5, 0.25, 0.75}
|
||||
local steps = net:get_cascade_activation_steepnesses()
|
||||
assert(eq(steps[1], 0.5), steps[1])
|
||||
assert(eq(steps[2], 0.25))
|
||||
assert(eq(steps[3], 0.75))
|
||||
assert(#steps == net:get_cascade_activation_steepnesses_count())
|
||||
|
||||
net:cascade_train_on_file("train.data", 10, 1, 0.1)
|
||||
|
||||
print("End.")
|
||||
|
||||
if arg[1] == "mem" then
|
||||
io.read("*n")
|
||||
end
|
72
lib/ann/tests/creation.lua
Executable file
72
lib/ann/tests/creation.lua
Executable file
@ -0,0 +1,72 @@
|
||||
#! /usr/bin/env lua
|
||||
|
||||
require("lfann")
|
||||
|
||||
function eq(x, y)
|
||||
return math.abs(x - y) < 0.00001
|
||||
end
|
||||
|
||||
for i = 1, 300 do
|
||||
local net = fann.Net.create_standard({2, 3, 2})
|
||||
|
||||
local layers = net:get_layer_array()
|
||||
assert(#layers == 3)
|
||||
assert(layers[1] == 2)
|
||||
assert(layers[2] == 3)
|
||||
assert(layers[3] == 2)
|
||||
|
||||
local layers = net:get_bias_array()
|
||||
assert(#layers == 3)
|
||||
assert(layers[1] == 1)
|
||||
assert(layers[2] == 1)
|
||||
assert(layers[3] == 0)
|
||||
|
||||
net:set_weight(2, 6, 0.67)
|
||||
net:set_weight(3, 6, 0.89)
|
||||
|
||||
local conn = net:get_connection_array()
|
||||
assert(#conn == 7)
|
||||
assert(eq(conn[2][6], 0.67))
|
||||
assert(eq(conn[3][6], 0.89))
|
||||
|
||||
net = fann.Net.create_sparse(0.5, {2, 3, 2})
|
||||
local layers = net:get_layer_array()
|
||||
assert(#layers == 3)
|
||||
assert(layers[1] == 2)
|
||||
assert(layers[2] == 3)
|
||||
assert(layers[3] == 2)
|
||||
|
||||
net = fann.Net.create_shortcut({2, 3, 2})
|
||||
local layers = net:get_layer_array()
|
||||
assert(#layers == 3)
|
||||
assert(layers[1] == 2)
|
||||
assert(layers[2] == 3)
|
||||
assert(layers[3] == 2)
|
||||
|
||||
net = fann.Net.create_from_file("xor.test")
|
||||
|
||||
local layers = net:get_layer_array()
|
||||
assert(#layers == 3)
|
||||
assert(layers[1] == 2)
|
||||
assert(layers[2] == 3)
|
||||
assert(layers[3] == 1)
|
||||
|
||||
net = net:copy()
|
||||
local layers = net:get_layer_array()
|
||||
assert(#layers == 3)
|
||||
assert(layers[1] == 2)
|
||||
assert(layers[2] == 3)
|
||||
assert(layers[3] == 1)
|
||||
|
||||
for c = 1, 300 do
|
||||
net:set_callback( function() end, 100 )
|
||||
end
|
||||
|
||||
collectgarbage()
|
||||
end
|
||||
|
||||
print("End.")
|
||||
|
||||
if arg[1] == "mem" then
|
||||
io.read("*n")
|
||||
end
|
64
lib/ann/tests/data.lua
Executable file
64
lib/ann/tests/data.lua
Executable file
@ -0,0 +1,64 @@
|
||||
#! /usr/bin/env lua
|
||||
|
||||
require("lfann")
|
||||
|
||||
for i = 1, 1000 do
|
||||
local data = fann.Data.read_from_file("train.data")
|
||||
assert(data:length() == 4)
|
||||
local data2 = data:subset(0, data:length() - 1)
|
||||
assert(data2:length() == 3)
|
||||
local data3 = data:merge(data2)
|
||||
assert(data3:length() == data:length() + data2:length())
|
||||
assert(data:num_input() == 2)
|
||||
assert(data:num_output() == 1)
|
||||
assert(data2:num_input() == 2)
|
||||
assert(data2:num_output() == 1)
|
||||
assert(data3:num_input() == 2)
|
||||
assert(data3:num_output() == 1)
|
||||
|
||||
local row = data:get_row(1)
|
||||
assert(row[1] == 1 and row[2] == 1 and row[3] == -1)
|
||||
row = data:get_row(2)
|
||||
assert(row[1] == 1 and row[2] == 0 and row[3] == 1)
|
||||
row = data:get_row(3)
|
||||
assert(row[1] == 0 and row[2] == 1 and row[3] == 1)
|
||||
row = data:get_row(4)
|
||||
assert(row[1] == 0 and row[2] == 0 and row[3] == -1)
|
||||
|
||||
local n_rows, n_inp, n_out = 50, 10, 20
|
||||
local data = fann.Data.create_from_callback(n_rows, n_inp, n_out,
|
||||
function(ud, line, n_inp, n_out)
|
||||
local out = {}
|
||||
|
||||
for i = 1, n_inp do
|
||||
table.insert(out, line + i)
|
||||
end
|
||||
|
||||
for i = 1, n_out do
|
||||
table.insert(out, line * 2 - i)
|
||||
end
|
||||
|
||||
return unpack(out)
|
||||
end
|
||||
)
|
||||
|
||||
for r = 1, n_rows do
|
||||
local row = data:get_row(r)
|
||||
|
||||
for i = 1, n_inp do
|
||||
assert(row[i] == r + i)
|
||||
end
|
||||
|
||||
for i = 1, n_out do
|
||||
assert(row[i + n_inp] == r * 2 - i)
|
||||
end
|
||||
end
|
||||
|
||||
collectgarbage("collect")
|
||||
end
|
||||
|
||||
print("End.")
|
||||
|
||||
if arg[1] == "mem" then
|
||||
io.read("*n")
|
||||
end
|
44
lib/ann/tests/params.lua
Executable file
44
lib/ann/tests/params.lua
Executable file
@ -0,0 +1,44 @@
|
||||
#! /usr/bin/env lua
|
||||
|
||||
require("lfann")
|
||||
|
||||
function eq(x, y)
|
||||
return math.abs(x - y) < 0.00001
|
||||
end
|
||||
|
||||
local neurons = {4, 2, 2, 5, 4}
|
||||
local net = fann.Net.create_standard(neurons)
|
||||
assert(eq(net:get_connection_rate(), 1))
|
||||
|
||||
net:set_learning_rate(0.65)
|
||||
assert(eq(net:get_learning_rate(), 0.65))
|
||||
|
||||
net:set_bit_fail_limit(2)
|
||||
assert(eq(net:get_bit_fail_limit(), 2))
|
||||
|
||||
local n = #neurons - 1
|
||||
for i, j in pairs(neurons) do n = n + j end
|
||||
assert(net:get_total_neurons() == n)
|
||||
|
||||
net:set_activation_steepness(0.345, 2, 1)
|
||||
assert(eq(net:get_activation_steepness(2, 1), 0.345))
|
||||
|
||||
net:set_activation_steepness_hidden(0.32)
|
||||
assert(eq(net:get_activation_steepness(2, 1), 0.32))
|
||||
assert(eq(net:get_activation_steepness(3, 2), 0.32))
|
||||
|
||||
net:set_activation_steepness_output(0.8)
|
||||
assert(eq(net:get_activation_steepness(5, 1), 0.8))
|
||||
assert(eq(net:get_activation_steepness(5, 4), 0.8))
|
||||
|
||||
local n = 0
|
||||
for i = 1, #neurons - 1 do
|
||||
n = n + neurons[i] * neurons[i + 1] + neurons[i + 1]
|
||||
end
|
||||
assert(net:get_total_connections() == n)
|
||||
|
||||
print("End.")
|
||||
|
||||
if arg[1] == "mem" then
|
||||
io.read("*n")
|
||||
end
|
17
lib/ann/tests/run.lua
Executable file
17
lib/ann/tests/run.lua
Executable file
@ -0,0 +1,17 @@
|
||||
#! /usr/bin/env lua
|
||||
|
||||
require("lfann")
|
||||
|
||||
local m = 400000
|
||||
local net = fann.Net.create_from_file("xor.test")
|
||||
|
||||
for i = 1, m do
|
||||
local out = net:run{i % 1000, i > (m / 2) and 1 or 0}
|
||||
assert(out[1] >= -1 and out[1] <= 1)
|
||||
end
|
||||
|
||||
print("End.")
|
||||
|
||||
if arg[1] == "mem" then
|
||||
io.read("*n")
|
||||
end
|
59
lib/ann/tests/scale.lua
Executable file
59
lib/ann/tests/scale.lua
Executable file
@ -0,0 +1,59 @@
|
||||
#! /usr/bin/env lua
|
||||
|
||||
require("lfann")
|
||||
|
||||
function eq(x, y)
|
||||
return math.abs(x - y) < 0.00001
|
||||
end
|
||||
|
||||
local vals = {-1, 10, 20, 0, -10, 5, -1, 1, 30}
|
||||
|
||||
local function create_callback()
|
||||
return unpack(vals)
|
||||
end
|
||||
|
||||
local data = fann.Data.create_from_callback(1, 4, 5, create_callback)
|
||||
|
||||
local rmin, rmax = data:get_bounds()
|
||||
assert( eq(rmin, -10) and eq(rmax, 30) )
|
||||
|
||||
local rmin, rmax = data:get_bounds_input()
|
||||
assert( eq(rmin, -1) and eq(rmax, 20) )
|
||||
|
||||
local rmin, rmax = data:get_bounds_output()
|
||||
assert( eq(rmin, -10) and eq(rmax, 30) )
|
||||
|
||||
data:scale(-10, 30)
|
||||
|
||||
local rmin, rmax = data:get_bounds()
|
||||
assert( eq(rmin, -10) and eq(rmax, 30) )
|
||||
|
||||
for i, j in ipairs(data:get_row(1)) do
|
||||
assert( eq(j, vals[i]) )
|
||||
end
|
||||
|
||||
data:scale_input(-1, 1)
|
||||
|
||||
local rmin, rmax = data:get_bounds()
|
||||
assert( eq(rmin, -10) and eq(rmax, 30) )
|
||||
|
||||
local row = data:get_row(1)
|
||||
assert( eq(row[3], 1) )
|
||||
assert( not eq(row[5], -1) )
|
||||
assert( not eq(row[9], 1) )
|
||||
|
||||
data:scale(-1, 1)
|
||||
|
||||
local rmin, rmax = data:get_bounds()
|
||||
assert( eq(rmin, -1) and eq(rmax, 1) )
|
||||
|
||||
local row = data:get_row(1)
|
||||
assert( not eq(row[3], 1) )
|
||||
assert( eq(row[5], -1) )
|
||||
assert( eq(row[9], 1) )
|
||||
|
||||
print("End.")
|
||||
|
||||
if arg[1] == "mem" then
|
||||
io.read("*n")
|
||||
end
|
9
lib/ann/tests/train.data
Normal file
9
lib/ann/tests/train.data
Normal file
@ -0,0 +1,9 @@
|
||||
4 2 1
|
||||
1 1
|
||||
-1
|
||||
1 0
|
||||
1
|
||||
0 1
|
||||
1
|
||||
0 0
|
||||
-1
|
36
lib/ann/tests/xor.net
Normal file
36
lib/ann/tests/xor.net
Normal file
@ -0,0 +1,36 @@
|
||||
FANN_FLO_2.1
|
||||
num_layers=3
|
||||
learning_rate=0.700000
|
||||
connection_rate=1.000000
|
||||
network_type=0
|
||||
learning_momentum=0.000000
|
||||
training_algorithm=2
|
||||
train_error_function=1
|
||||
train_stop_function=0
|
||||
cascade_output_change_fraction=0.010000
|
||||
quickprop_decay=-0.000100
|
||||
quickprop_mu=1.750000
|
||||
rprop_increase_factor=1.200000
|
||||
rprop_decrease_factor=0.500000
|
||||
rprop_delta_min=0.000000
|
||||
rprop_delta_max=50.000000
|
||||
rprop_delta_zero=0.100000
|
||||
cascade_output_stagnation_epochs=12
|
||||
cascade_candidate_change_fraction=0.010000
|
||||
cascade_candidate_stagnation_epochs=12
|
||||
cascade_max_out_epochs=150
|
||||
cascade_min_out_epochs=50
|
||||
cascade_max_cand_epochs=150
|
||||
cascade_min_cand_epochs=50
|
||||
cascade_num_candidate_groups=2
|
||||
bit_fail_limit=3.49999999999999977796e-01
|
||||
cascade_candidate_limit=1.00000000000000000000e+03
|
||||
cascade_weight_multiplier=4.00000000000000022204e-01
|
||||
cascade_activation_functions_count=10
|
||||
cascade_activation_functions=3 5 7 8 10 11 14 15 16 17
|
||||
cascade_activation_steepnesses_count=4
|
||||
cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000e-01 7.50000000000000000000e-01 1.00000000000000000000e+00
|
||||
layer_sizes=3 4 2
|
||||
scale_included=0
|
||||
neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (3, 8, 5.00000000000000000000e-01) (3, 8, 5.00000000000000000000e-01) (3, 8, 5.00000000000000000000e-01) (0, 8, 0.00000000000000000000e+00) (4, 8, 5.00000000000000000000e-01) (0, 8, 0.00000000000000000000e+00)
|
||||
connections (connected_to_neuron, weight)=(0, 2.80526386706506980495e-01) (1, -6.32915057544128911715e-01) (2, 2.43256971299247087837e-01) (0, 1.07956202553892222085e+00) (1, -1.57046491452944914080e+00) (2, 3.60570095466195006395e-01) (0, -3.03603668158796224930e+00) (1, 3.25717842790857625701e+00) (2, 3.21682713436011402663e-01) (3, -4.83919214899166116872e-01) (4, -2.23388962577329319714e+00) (5, -1.97655141071750106718e+00) (6, -5.13441786611898431936e-01)
|
36
lib/ann/tests/xor.test
Normal file
36
lib/ann/tests/xor.test
Normal file
@ -0,0 +1,36 @@
|
||||
FANN_FLO_2.1
|
||||
num_layers=3
|
||||
learning_rate=0.700000
|
||||
connection_rate=1.000000
|
||||
network_type=0
|
||||
learning_momentum=0.000000
|
||||
training_algorithm=2
|
||||
train_error_function=1
|
||||
train_stop_function=0
|
||||
cascade_output_change_fraction=0.010000
|
||||
quickprop_decay=-0.000100
|
||||
quickprop_mu=1.750000
|
||||
rprop_increase_factor=1.200000
|
||||
rprop_decrease_factor=0.500000
|
||||
rprop_delta_min=0.000000
|
||||
rprop_delta_max=50.000000
|
||||
rprop_delta_zero=0.100000
|
||||
cascade_output_stagnation_epochs=12
|
||||
cascade_candidate_change_fraction=0.010000
|
||||
cascade_candidate_stagnation_epochs=12
|
||||
cascade_max_out_epochs=150
|
||||
cascade_min_out_epochs=50
|
||||
cascade_max_cand_epochs=150
|
||||
cascade_min_cand_epochs=50
|
||||
cascade_num_candidate_groups=2
|
||||
bit_fail_limit=3.49999999999999977796e-01
|
||||
cascade_candidate_limit=1.00000000000000000000e+03
|
||||
cascade_weight_multiplier=4.00000000000000022204e-01
|
||||
cascade_activation_functions_count=10
|
||||
cascade_activation_functions=3 5 7 8 10 11 14 15 16 17
|
||||
cascade_activation_steepnesses_count=4
|
||||
cascade_activation_steepnesses=2.50000000000000000000e-01 5.00000000000000000000e-01 7.50000000000000000000e-01 1.00000000000000000000e+00
|
||||
layer_sizes=3 4 2
|
||||
scale_included=0
|
||||
neurons (num_inputs, activation_function, activation_steepness)=(0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (0, 0, 0.00000000000000000000e+00) (3, 8, 5.00000000000000000000e-01) (3, 8, 5.00000000000000000000e-01) (3, 8, 5.00000000000000000000e-01) (0, 8, 0.00000000000000000000e+00) (4, 8, 5.00000000000000000000e-01) (0, 8, 0.00000000000000000000e+00)
|
||||
connections (connected_to_neuron, weight)=(0, 3.33489874939374764118e-01) (1, -3.63063380595726348243e-01) (2, 8.42711779204087380046e-02) (0, -1.04596746931312378770e+00) (1, 9.60473346716757214736e-01) (2, 7.86632414989053563303e-02) (0, 1.19807412288840020587e+00) (1, -9.30247929134354722969e-01) (2, 4.15395392027860382855e-02) (3, -6.28203027065782459992e-01) (4, -1.84799059663882059290e+00) (5, -1.74061332936564361340e+00) (6, -6.26874907503542244669e-01)
|
Reference in New Issue
Block a user