;;;; PERCEPTRON LEARNING EXAMPLE ;;; This is a simple perceptron learner written to ;;; illustrate the concepts in connectionist ;;; learning. ;;; by Nilufer Onder for CS4811 ;;; Last modification: April 4, 2006 ;;; To run the program: ;;; - Set values for the initial weights ;;; (look for CHANGE WEIGHTS) ;;; - Set values for the learning count ;;; (look for CHANGE LEARNING CONSTANT) ;;; - Set the example you want to use or code a new example ;;; (look for CHANGE EXAMPLES) ;;; - Run lisp ;;; - Enter (train-it) ;;; If you don't enter an iteration limit, it is set to 999. ;;; It will make passes over the examples until it converges ;;; or the iteration limit is reached, whichever comes first. ;;; To enter an iteration limit of 100 say: (train-it 100) (defvar weights '(0 0 0)) ; Initial weights (defvar learning-constant 0.5) ; The multiplier (defvar positive-output 1) ; Most commonly used (defvar negative-output 0) ; Sometimes people use -1 (defvar examples1 ; Simple logical or '( (-1 0 0 0) (-1 0 1 1) (-1 1 0 1) (-1 1 1 1) ) ) (defvar examples2 ; The points in Luger's '( (-1 1.0 1.0 1) ; textbook. (-1 9.4 6.4 0) ; The bias and output (-1 2.5 2.1 1) ; are different. (-1 8.0 7.7 0) (-1 0.5 2.2 1) (-1 7.9 8.4 0) (-1 7.0 7.0 0) (-1 2.8 0.8 1) (-1 1.2 3.0 1) (-1 7.8 6.1 0) ; (-1 3.0 3.0 0) ) ) ;; Internal bookkeeping variables. (defvar not-converged t) ; (defvar iteration-count 0) ;;;; TRAIN-IT is the main fuction. ;;; It does the initializations and runs the loop of iterations. (defun train-it (&optional (max-iterations 999)) ;; The changable aspects are below: ;; CHANGE WEIGHTS if desired. (setf weights '(0 0 0)) ;(setf weights '(0.75 0.5 -0.6) ) ;; CHANGE LEARNING CONSTANT if desired. (setf learning-constant 0.5) ;; CHANGE EXAMPLES if desired. (setf examples examples1) ;; This is reallt not meant to be changed because we are using the ;; threshold. (setf threshold 0) ;; Output useful information. (format t "Iteration limit is ~S~%" max-iterations) (format t "Learning constant is ~S~%" learning-constant) (format t "Initial weights vector is ~S~%" weights) ;; Can output initial X and Y intercepts too. ;; Set the control variables. (setf iteration-count 1) (setf not-converged t) ;; Compute the number of inputs in the examples. ;; The first one is the bias, it IS included in the count. ;; The last one is the output, it IS NOT included in the count. (setf num-inputs (- (length (first examples) ) 1 )) (loop while not-converged do (setf not-converged nil) (if (> iteration-count max-iterations) (format t "Iteration limit exceeded (limit = ~S).~%" max-iterations) (progn (format t "~%~%DOING ITERATION ~S~%" iteration-count) (train-once) (incf iteration-count) ) ) ) (format t "Converged after ~S iterations.~%" (- iteration-count 2)) ) (defun train-once () (dolist (x1 examples) ;; First train the perceptron (format t "~%Doing example ~S~%~%" x1) (setf sum 0) ;; Add the products, including the bias, i.e., compute the sum. (dotimes (i1 num-inputs ) (setf sum (+ sum (* (nth i1 weights) (nth i1 x1)))) ) (format t " Sum is ~S.~%" sum) ;; f-of-x is the output, compute it by comparing the sum to the ;; threshold. If sum is greater than the threshold, the output is ;; positive. Otherwise (less or equal), it is negative. (setf f-of-x (if (> sum threshold) positive-output negative-output) ) ;; Now, see if the output is correct. ;; Is the computed output equal to the desired output from the ;; training examples? (setf desired (nth num-inputs x1)) (format t " Output is ~S, desired output is ~S~%" f-of-x desired) ;; If the result is correct, do nothing (will go on with the next ;; example) ;; If it is incorrect, update the weights. (if (equal f-of-x desired) (format t "~% Results equal, do nothing.~%") ; else (progn ;; Will do another iteration if the weights were updated in this ;; iteration. (setq not-converged t) (dotimes (i1 num-inputs ) (setf (nth i1 weights) (+ (nth i1 weights) (* learning-constant (- desired f-of-x) (nth i1 x1)))) ) (if (equal desired 1) (progn (format t "~% The output should have been 1.") (format t "~% Add half of the weights to the inputs.~%") ) (progn (format t "~% The output should have been 0.") (format t "~% Subtract half of the weights from the inputs.~%") ) ) (format t "~% New weights is ~S~%" weights) ;; Output the X and Y intercepts of the line learned. ;; Check for divide-by-zero before dividing. (unless (or (equal (third weights) 0) (equal (second weights) 0) ) (format t " Y intercept is ~S, X intercept is ~S~%" (/ (first weights) (third weights)) (/ (first weights) (second weights)) ) ) ) ; progn ) ) )