Skip to content

Commit

Permalink
Converted matrixMultiplyScalar and multiplyScalar to a multimethod. M…
Browse files Browse the repository at this point in the history
…oved the build properties into a separate file.
  • Loading branch information
Jason Hurt authored and Jason Hurt committed Dec 4, 2009
1 parent 1358275 commit de5e2da
Show file tree
Hide file tree
Showing 7 changed files with 43 additions and 48 deletions.
13 changes: 2 additions & 11 deletions NeuralNetwork.iml
Original file line number Diff line number Diff line change
Expand Up @@ -12,25 +12,16 @@
<sourceFolder url="file://$MODULE_DIR$/test/src" isTestSource="true" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="module-library">
<library>
<CLASSES>
<root url="jar://$MODULE_DIR$/../../clojure/clojure.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
</library>
</orderEntry>
<orderEntry type="module-library">
<library>
<CLASSES>
<root url="jar://$MODULE_DIR$/lib/junit-4.7.jar!/" />
<root url="jar://$MODULE_DIR$/../../../Dev/clojure-read-only/clojure.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
</library>
</orderEntry>
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

12 changes: 6 additions & 6 deletions NeuralNetwork.ipr
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,10 @@
</profile>
</profiles>
<list size="4">
<item index="0" class="java.lang.String" itemvalue="INFO" />
<item index="1" class="java.lang.String" itemvalue="WARNING" />
<item index="2" class="java.lang.String" itemvalue="ERROR" />
<item index="3" class="java.lang.String" itemvalue="SERVER PROBLEM" />
<item index="0" class="java.lang.String" itemvalue="SERVER PROBLEM" />
<item index="1" class="java.lang.String" itemvalue="INFO" />
<item index="2" class="java.lang.String" itemvalue="WARNING" />
<item index="3" class="java.lang.String" itemvalue="ERROR" />
</list>
</component>
<component name="JavacSettings">
Expand Down Expand Up @@ -254,14 +254,14 @@
</component>
<component name="ProjectFileVersion" converted="true" />
<component name="ProjectKey">
<option name="state" value="project:///root/workspace/NeuralNetwork/NeuralNetwork.ipr" />
<option name="state" value="project:///Users/jleehurt/Documents/workspace/NeuralNetwork/NeuralNetwork.ipr" />
</component>
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/NeuralNetwork.iml" filepath="$PROJECT_DIR$/NeuralNetwork.iml" />
</modules>
</component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_6" assert-keyword="true" jdk-15="true" project-jdk-name="1.6 (3)" project-jdk-type="JavaSDK">
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_6" assert-keyword="true" jdk-15="true" project-jdk-name="1.6" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/out" />
</component>
<component name="ResourceManagerContainer">
Expand Down
15 changes: 3 additions & 12 deletions build.xml
Original file line number Diff line number Diff line change
@@ -1,14 +1,6 @@
<?xml version="1.0" ?>
<project name="NeuralNetwork" default="build">
<!-- this should point to the junit library -->
<property name="junit.lib.dir" location="/root/workspace/NeuralNetwork/lib"/>

<!--this should point to location of the JOGL jars and the native OpenGL
libraries for your plaform (.so for Linux, .dll for windows)-->
<property name="jogl.lib.dir" location="/root/jogl-1.1.1-linux-i586/lib"/>

<!--this should point to the location of the Clojure library -->
<property name="clojure.lib.dir" location="/root/clojure"/>
<property file="build.properties" />

<property name="bin.dir" location="bin"/>
<property name="test.bin.dir" location="test-bin"/>
Expand All @@ -35,9 +27,8 @@
<fileset dir="${clojure.lib.dir}">
<include name="clojure.jar"/>
</fileset>
<fileset dir="${bin.dir}">
<include name="**/*.class"/>
</fileset>
<pathelement path="${bin.dir}" />

</path>

<target name="init">
Expand Down
41 changes: 27 additions & 14 deletions src/com/jhurt/Math.clj
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@

(ns com.jhurt.Math)


(def randomNumbers (repeatedly rand))

;; Matrix Functions
Expand Down Expand Up @@ -44,12 +43,6 @@
(matrixSubtract (rest matrixA) (rest matrixB))
(map - (first matrixA) (first matrixB)))))

(defn matrixMultiplyScalar [matrixA scalar]
(if (seq matrixA)
(conj
(matrixMultiplyScalar (rest matrixA) scalar)
(map (fn [arg] (* arg scalar)) (first matrixA)))))

(defn areListsEqual [x y]
(reduce (fn [a b] (and a b)) (map = x y)))

Expand All @@ -59,15 +52,9 @@
(if (not (nil? v))
(transposeMatrix (vector v))))

(defn vectorMultiplyScalar [v scalar]
(map * v (cycle [scalar])))

(defn transposeArray [array]
(map (fn [& column] column) array))

(defn multiplyScalar [array scalar]
(map * (repeat (count array) scalar) array))

(defn arrayTransposeByAnother [x y]
(reduce + (map * (map first (transposeArray x)) y)))

Expand Down Expand Up @@ -106,7 +93,33 @@
of vectorA[i] * vectorB[j]"
(map (fn [x] (map (fn [y] (* x y)) vectorB)) vectorA))

;; Functions shared b/w Matrix and Vector
(defmulti getArityMulti (fn [a x] (class x)))

(defmethod getArityMulti clojure.lang.ISeq [a x]
(getArityMulti (inc a) (first x)))

(defmethod getArityMulti clojure.lang.IPersistentVector [a x]
(getArityMulti (inc a) (first x)))

(defmethod getArityMulti :default [a x] a)

(defn getArity [x dummy] (getArityMulti 0 x))

(defmulti multiplyScalar getArity)

(defmethod multiplyScalar 2 [matrixA scalar]
(if (seq matrixA)
(conj
(multiplyScalar (rest matrixA) scalar)
(map (fn [arg] (* arg scalar)) (first matrixA)))))

(defmethod multiplyScalar 1 [array scalar]
(map * (repeat (count array) scalar) array))

(defmethod multiplyScalar :default [x scalar]
(* x scalar))

;TODO put this somewhere else
(defn weightsByInput [w i]
(map (fn [x y] (reduce + (map * (repeat (count x) y) x))) (transposeMatrix w) i))

4 changes: 2 additions & 2 deletions src/com/jhurt/nn/BackPropagation.clj
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,8 @@
(if (= (count nodeOutputs) (count deltas))
deltas
(let [delta (if (= 0 (count deltas))
(matrixMultiplyScalar (makeMatrix extendedInput (nth errors errorIndex)) gamma)
(matrixMultiplyScalar
(multiplyScalar (makeMatrix extendedInput (nth errors errorIndex)) gamma)
(multiplyScalar
(makeMatrix (nth nodeOutputs nodeValueIndex) (nth errors errorIndex)) gamma))]
(recur (dec errorIndex) (inc nodeValueIndex) (concat deltas (list delta))))))))

Expand Down
4 changes: 2 additions & 2 deletions src/com/jhurt/nn/PerceptronHaykin.clj
Original file line number Diff line number Diff line change
Expand Up @@ -53,15 +53,15 @@
;return an updated weight vector of the perceptron
(defn getAdaptedWeightVector [weights inputs desiredResponse actualResponse]
(let [etaDeltaDesiredActual (* learningRateParameter (- desiredResponse actualResponse))]
(println "adapted weight vector" (matrixAdd weights (matrixMultiplyScalar inputs etaDeltaDesiredActual)))
(println "adapted weight vector" (matrixAdd weights (multiplyScalar inputs etaDeltaDesiredActual)))
(println "desiredResponse" desiredResponse)
(println "actualResponse" actualResponse)
(println "learningRateParameter" learningRateParameter)
(println "etaDeltaDesiredActual" etaDeltaDesiredActual)
(println "weights" weights)
(println "inputs" inputs "\n")
(if (not (= 0.0 etaDeltaDesiredActual))
(cons 1 (rest (matrixAdd weights (matrixMultiplyScalar inputs etaDeltaDesiredActual))))
(cons 1 (rest (matrixAdd weights (multiplyScalar inputs etaDeltaDesiredActual))))
weights)))

;train the perceptron with the inputs and corresponding known outputs
Expand Down
2 changes: 1 addition & 1 deletion test/src/com/jhurt/MathTest.clj
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@

(defn -testMatrixMultiplyScalar [_]
(Assert/assertTrue (areListsEqual [[3 3 3] [3 3 3] [3 3 3]]
(matrixMultiplyScalar [[1 1 1] [1 1 1] [1 1 1]] 3))))
(multiplyScalar [[1 1 1] [1 1 1] [1 1 1]] 3))))

(defn -testTransposeMatrix [_]
(Assert/assertEquals [[1 3] [2 4]] (transposeMatrix [[1 2] [3 4]]))
Expand Down

0 comments on commit de5e2da

Please sign in to comment.