## ~dieggsy/chicken-genann

19916f54cb9d0fd4ebb8968002a77330e5eabb53 — dieggsy 4 years ago
```Add comments to scheme examples
```
```4 files changed, 18 insertions(+), 2 deletions(-)

M example1.scm
M example2.scm
M example3.scm
M example4.scm
```
`M example1.scm => example1.scm +5 -2`
```@@ 5,12 5,15 @@

(printf "Genann example 1.~n")
(printf "Train a small ANN to the XOR function using backpropagation.~n")
-(define inputs #(#f64(0 0) #f64(0 1) #f64(1 0) #f64(1 1)))

+;; Input and expected out data for the XOR function.
+(define inputs #(#f64(0 0) #f64(0 1) #f64(1 0) #f64(1 1)))
(define outputs #(#f64(0) #f64(1) #f64(1) #f64(0)))

+;; New network with 2 inputs, 1 hidden layer of 2 neurons, and 1 output.
(define ann (make-genann 2 1 2 1))

+;; Train on the four labeled data points many times.
((= i 300))
(genann-train ann (vector-ref inputs 0) (vector-ref outputs 0) 3)

@@ 18,7 21,7 @@
(genann-train ann (vector-ref inputs 2) (vector-ref outputs 2) 3)
(genann-train ann (vector-ref inputs 3) (vector-ref outputs 3) 3))

-
+;; Run the network and see what it predicts.
((= i 4))
(printf "Output for ~a is ~a~n"

```
`M example2.scm => example2.scm +8 -0`
```@@ 8,8 8,10 @@
(printf "Genann example 1.~n")
(printf "Train a small ANN to the XOR function using random search.~n");

+;; Input and expected out data for the XOR function.
(define inputs #(#f64(0 0) #f64(0 1) #f64(1 0) #f64(1 1)))

+;; New network with 2 inputs,1 hidden layer of 2 neurons,and 1 output.
(define outputs #(#f64(0) #f64(1) #f64(1) #f64(0)))

(let loop ((ann (make-genann 2 1 2 1))

@@ 17,12 19,16 @@
(count 1))
(let ((save (genann-copy* ann)))
(when (= 0 (modulo count 1000))
+      ;; We're stuck, start over.
(genann-randomize ann)
(set! last-err 1000.0))
+
+    ;; Take a random guess at the ANN weights.
((= i (genann-total-weights ann)))
(set! (genann-weight-ref ann i) (+ (genann-weight-ref ann i)
(- (pseudo-random-real) .5))))
+    ;; See how we did
(let ((err
(apply + (list-tabulate
4

@@ 34,6 40,7 @@

(cond ((<= err 0.01)
(printf "Finished in ~a loops~n" count)
+             ;; Run the network and see what it predicts.
((= i 4))
(printf "Output for ~a is ~a~n"

@@ 41,6 48,7 @@
(inexact->exact
(round
(f64vector-ref (genann-run ann (vector-ref inputs i)) 0))))))
+            ;; Keep these weights if they're an improvement.
((< err last-err)
(else

```
`M example3.scm => example3.scm +2 -0`
```@@ 11,8 11,10 @@
(define ann (call-with-input-file save-name

+;; Input data for the XOR function.
(define inputs #(#f64(0 0) #f64(0 1) #f64(1 0) #f64(1 1)))

+;; Run the network and see what it predicts.
((= i 4))
(printf "Output for ~a is ~a~n"

```
`M example4.scm => example4.scm +3 -0`
```@@ 33,16 33,19 @@
inputs)
(cons (alist-ref (car clist) class-names string=?) class))))))))

(printf "Loaded ~a data points from ~a~n" samples iris-data)

+;; 4 inputs. 1 hidden layer(s) of 4 neurons. 3 outputs (1 per class)
(define ann (make-genann 4 1 4 3))

(define loops 5000)

(printf "Training for ~a loops over data.\n" loops)

+;; Train the network with backpropagation.