From f50c5c60e1e6eb35ce0562dad905d2bd9e3e5e4c Mon Sep 17 00:00:00 2001
From: Jeffrey Phillips Freeman <jeffrey.freeman@syncleus.com>
Date: Wed, 12 Nov 2014 12:42:23 -0500
Subject: [PATCH] Removed the neural package, it will be moved to its own
 project.

Issue: GRL-16
Change-Id: I0e9e8dd963fff78dbf0bcd2d65b9d9bf29d66b21
---
 .gitignore                                    |   1 +
 .../grail/graph/GrailGraphFactory.java        |   7 +-
 .../neural/AbstractActivationNeuron.java      |  64 -----
 .../grail/neural/ActivationNeuron.java        |  80 -------
 .../neural/activation/ActivationFunction.java |  47 ----
 .../activation/GausianActivationFunction.java |  70 ------
 .../HyperbolicSecantActivationFunction.java   |  72 ------
 .../HyperbolicTangentActivationFunction.java  |  66 ------
 .../IdentityActivationFunction.java           |  70 ------
 .../activation/SineActivationFunction.java    |  63 -----
 .../backprop/AbstractBackpropNeuron.java      |  49 ----
 .../grail/neural/backprop/BackpropNeuron.java |  80 -------
 .../neural/backprop/BackpropSynapse.java      |  32 ---
 .../syncleus/grail/neural/package-info.java   |  19 --
 .../AbstractPrioritySerialTriggerTest.java    |   2 -
 .../neural/AbstractActivationNeuronTest.java  |  68 ------
 .../activation/ActivationBoundsTest.java      |  77 ------
 .../activation/ActivationValuesTest.java      | 148 ------------
 .../BadAccessActivationFunction.java          |  49 ----
 .../GausianActivationFunctionTest.java        |  86 -------
 ...yperbolicSecantActivationFunctionTest.java |  82 -------
 ...perbolicTangentActivationFunctionTest.java |  82 -------
 .../IdentityActivationFunctionTest.java       |  82 -------
 ...oDefaultConstructorActivationFunction.java |  49 ----
 .../SineActivationFunctionTest.java           |  82 -------
 .../backprop/ActionTriggerXor3InputTest.java  | 219 ------------------
 .../grail/neural/backprop/SimpleOrTest.java   | 122 ----------
 .../neural/backprop/SimpleXor2InputTest.java  | 161 -------------
 .../neural/backprop/SimpleXor3InputTest.java  | 171 --------------
 29 files changed, 2 insertions(+), 2198 deletions(-)
 delete mode 100644 src/main/java/com/syncleus/grail/neural/AbstractActivationNeuron.java
 delete mode 100644 src/main/java/com/syncleus/grail/neural/ActivationNeuron.java
 delete mode 100644 src/main/java/com/syncleus/grail/neural/activation/ActivationFunction.java
 delete mode 100644 src/main/java/com/syncleus/grail/neural/activation/GausianActivationFunction.java
 delete mode 100644 src/main/java/com/syncleus/grail/neural/activation/HyperbolicSecantActivationFunction.java
 delete mode 100644 src/main/java/com/syncleus/grail/neural/activation/HyperbolicTangentActivationFunction.java
 delete mode 100644 src/main/java/com/syncleus/grail/neural/activation/IdentityActivationFunction.java
 delete mode 100644 src/main/java/com/syncleus/grail/neural/activation/SineActivationFunction.java
 delete mode 100644 src/main/java/com/syncleus/grail/neural/backprop/AbstractBackpropNeuron.java
 delete mode 100644 src/main/java/com/syncleus/grail/neural/backprop/BackpropNeuron.java
 delete mode 100644 src/main/java/com/syncleus/grail/neural/backprop/BackpropSynapse.java
 delete mode 100644 src/main/java/com/syncleus/grail/neural/package-info.java
 delete mode 100644 src/test/java/com/syncleus/grail/neural/AbstractActivationNeuronTest.java
 delete mode 100644 src/test/java/com/syncleus/grail/neural/activation/ActivationBoundsTest.java
 delete mode 100644 src/test/java/com/syncleus/grail/neural/activation/ActivationValuesTest.java
 delete mode 100644 src/test/java/com/syncleus/grail/neural/activation/BadAccessActivationFunction.java
 delete mode 100644 src/test/java/com/syncleus/grail/neural/activation/GausianActivationFunctionTest.java
 delete mode 100644 src/test/java/com/syncleus/grail/neural/activation/HyperbolicSecantActivationFunctionTest.java
 delete mode 100644 src/test/java/com/syncleus/grail/neural/activation/HyperbolicTangentActivationFunctionTest.java
 delete mode 100644 src/test/java/com/syncleus/grail/neural/activation/IdentityActivationFunctionTest.java
 delete mode 100644 src/test/java/com/syncleus/grail/neural/activation/NoDefaultConstructorActivationFunction.java
 delete mode 100644 src/test/java/com/syncleus/grail/neural/activation/SineActivationFunctionTest.java
 delete mode 100644 src/test/java/com/syncleus/grail/neural/backprop/ActionTriggerXor3InputTest.java
 delete mode 100644 src/test/java/com/syncleus/grail/neural/backprop/SimpleOrTest.java
 delete mode 100644 src/test/java/com/syncleus/grail/neural/backprop/SimpleXor2InputTest.java
 delete mode 100644 src/test/java/com/syncleus/grail/neural/backprop/SimpleXor3InputTest.java

diff --git a/.gitignore b/.gitignore
index 2f7896d..e08c794 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1,2 @@
 target/
+.DS_Store
diff --git a/src/main/java/com/syncleus/grail/graph/GrailGraphFactory.java b/src/main/java/com/syncleus/grail/graph/GrailGraphFactory.java
index 20bcfc7..46223b0 100644
--- a/src/main/java/com/syncleus/grail/graph/GrailGraphFactory.java
+++ b/src/main/java/com/syncleus/grail/graph/GrailGraphFactory.java
@@ -19,8 +19,6 @@
 package com.syncleus.grail.graph;
 
 import com.syncleus.grail.graph.action.*;
-import com.syncleus.grail.neural.ActivationNeuron;
-import com.syncleus.grail.neural.backprop.*;
 import com.tinkerpop.frames.FramedGraphFactory;
 import com.tinkerpop.frames.modules.Module;
 import com.tinkerpop.frames.modules.gremlingroovy.GremlinGroovyModule;
@@ -32,12 +30,9 @@ import java.util.*;
 public class GrailGraphFactory extends FramedGraphFactory {
     private static final Set<Class<?>> BUILT_IN_TYPES = new HashSet<Class<?>>(Arrays.asList(new Class<?>[]{
                                                                           SignalMultiplyingEdge.class,
-                                                                          BackpropNeuron.class,
-                                                                          BackpropSynapse.class,
                                                                           PrioritySerialTrigger.class,
                                                                           ActionTriggerEdge.class,
-                                                                          PrioritySerialTriggerEdge.class,
-                                                                          ActivationNeuron.class}));
+                                                                          PrioritySerialTriggerEdge.class}));
 
     public GrailGraphFactory() {
         super(GrailGraphFactory.constructModules(Collections.<Module>emptySet()));
diff --git a/src/main/java/com/syncleus/grail/neural/AbstractActivationNeuron.java b/src/main/java/com/syncleus/grail/neural/AbstractActivationNeuron.java
deleted file mode 100644
index 54d97e8..0000000
--- a/src/main/java/com/syncleus/grail/neural/AbstractActivationNeuron.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural;
-
-import com.syncleus.grail.neural.activation.*;
-import com.syncleus.grail.graph.*;
-import com.syncleus.grail.neural.backprop.*;
-import com.tinkerpop.frames.modules.javahandler.Initializer;
-
-public abstract class AbstractActivationNeuron implements ActivationNeuron {
-
-    private ActivationFunction activationFunction;
-
-    @Initializer
-    public void init() {
-        this.setActivationFunctionClass(HyperbolicTangentActivationFunction.class);
-        this.setActivity(0.0);
-    }
-
-    protected ActivationFunction getActivationFunction() {
-        final Class<? extends ActivationFunction> activationClass = this.getActivationFunctionClass();
-        if( (this.activationFunction != null) && (this.activationFunction.getClass().equals(activationClass)) )
-            return this.activationFunction;
-
-        this.activationFunction = null;
-        try {
-            this.activationFunction = activationClass.newInstance();
-        }
-        catch( final InstantiationException caughtException ) {
-            throw new IllegalStateException("activation function does not have a public default constructor", caughtException);
-        }
-        catch( final IllegalAccessException caughtException ) {
-            throw new IllegalStateException("activation function does not have a public default constructor", caughtException);
-        }
-
-        return this.activationFunction;
-    }
-
-    @Override
-    public void propagate() {
-        this.setActivity(0.0);
-        for (final SignalMultiplyingEdge currentSynapse : this.getSourceEdges(BackpropSynapse.class)) {
-            currentSynapse.propagate();
-            this.setActivity(this.getActivity() + currentSynapse.getSignal());
-        }
-        this.setSignal( this.getActivationFunction().activate(this.getActivity()) );
-    }
-}
diff --git a/src/main/java/com/syncleus/grail/neural/ActivationNeuron.java b/src/main/java/com/syncleus/grail/neural/ActivationNeuron.java
deleted file mode 100644
index 89b30f5..0000000
--- a/src/main/java/com/syncleus/grail/neural/ActivationNeuron.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural;
-
-import com.syncleus.grail.neural.activation.ActivationFunction;
-import com.syncleus.grail.graph.*;
-import com.syncleus.grail.graph.action.Action;
-import com.tinkerpop.blueprints.Direction;
-import com.tinkerpop.frames.*;
-import com.tinkerpop.frames.modules.javahandler.*;
-import com.tinkerpop.frames.modules.typedgraph.*;
-
-@TypeValue("ActivationNeuron")
-@JavaHandlerClass(AbstractActivationNeuron.class)
-public interface ActivationNeuron extends SignalNode {
-    @JavaHandler
-    @Action("propagate")
-    void propagate();
-
-    @Property("activity")
-    Double getActivity();
-
-    @Property("activity")
-    void setActivity(double activity);
-
-    @Property("activationFunction")
-    Class<? extends ActivationFunction> getActivationFunctionClass();
-
-    @Property("activationFunction")
-    void setActivationFunctionClass(Class<? extends ActivationFunction> activationFunctionClass);
-
-    @Adjacency(label="signals", direction= Direction.IN)
-    Iterable<? extends Node> getSources();
-
-    @TypedAdjacency(label="signals", direction=Direction.IN)
-    <N extends Node> Iterable<? extends N> getSources(Class<? extends N> type);
-
-    @Adjacency(label="signals", direction=Direction.IN)
-    void setSources(Iterable<? extends SignalNode> targets);
-
-    @Adjacency(label="signals", direction=Direction.IN)
-    void removeSource(SignalNode target);
-
-    @Adjacency(label="signals", direction=Direction.IN)
-    <N extends SignalNode> N addSource(N target);
-
-    @Adjacency(label="signals", direction=Direction.IN)
-    Signaler addSource();
-
-    @TypedAdjacency(label="signals", direction=Direction.IN)
-    <N extends SignalNode> N addSource(Class<? extends N> type);
-
-    @Incidence(label = "signals", direction=Direction.IN)
-    Iterable<? extends SignalMultiplyingEdge> getSourceEdges();
-
-    @TypedIncidence(label="signals", direction=Direction.IN)
-    <E extends SignalMultiplyingEdge> Iterable<? extends E> getSourceEdges(Class<? extends E> type);
-
-    @Incidence(label = "signals", direction=Direction.IN)
-    <E extends SignalMultiplyingEdge> E addSourceEdge(SignalMultiplyingEdge target);
-
-    @Incidence(label = "signals", direction=Direction.IN)
-    void removeSourceEdge(SignalMultiplyingEdge source);
-}
diff --git a/src/main/java/com/syncleus/grail/neural/activation/ActivationFunction.java b/src/main/java/com/syncleus/grail/neural/activation/ActivationFunction.java
deleted file mode 100644
index 960d66c..0000000
--- a/src/main/java/com/syncleus/grail/neural/activation/ActivationFunction.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-public interface ActivationFunction {
-    /**
-     * The activation function.
-     *
-     * @param activity the neuron's current activity.
-     * @return The result of the activation function. Usually a bound value
-     * between 1 and -1 or 1 and 0. However this bound range is not
-     * required.
-     * @since 1.0
-     */
-    double activate(double activity);
-
-    /**
-     * The derivative of the activation function.
-     *
-     * @param activity The neuron's current activity.
-     * @return The result of the derivative of the activation function.
-     * @since 1.0
-     */
-    double activateDerivative(double activity);
-
-    boolean isBound();
-
-    double getUpperLimit();
-
-    double getLowerLimit();
-}
diff --git a/src/main/java/com/syncleus/grail/neural/activation/GausianActivationFunction.java b/src/main/java/com/syncleus/grail/neural/activation/GausianActivationFunction.java
deleted file mode 100644
index 204f191..0000000
--- a/src/main/java/com/syncleus/grail/neural/activation/GausianActivationFunction.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-/**
- * An implementation of an activation function using a gausian function.
- *
- * @author Jeffrey Phillips Freeman
- * @since 1.0
- */
-public class GausianActivationFunction implements ActivationFunction {
-    private static final double UPPER_LIMIT = 1.0;
-    private static final double LOWER_LIMIT = 0.0;
-
-    /**
-     * The gausian activation function.
-     *
-     * @param activity the neuron's current activity.
-     * @return The result of the gausian activation function bound between 0 and
-     * 1.
-     * @since 1.0
-     */
-    @Override
-    public double activate(final double activity) {
-        return Math.pow(Math.E, (-1.0 * Math.pow(activity, 2)));
-    }
-
-    /**
-     * The derivative of the gausian activation function.
-     *
-     * @param activity The neuron's current activity.
-     * @return The result of the derivative of the gausian activation function.
-     * @since 1.0
-     */
-    @Override
-    public double activateDerivative(final double activity) {
-        return (-2.0 * Math.log10(Math.E) * activity) / Math.pow(Math.E, Math.pow(activity, 2));
-    }
-
-    @Override
-    public boolean isBound() {
-        return true;
-    }
-
-    @Override
-    public double getUpperLimit() {
-        return UPPER_LIMIT;
-    }
-
-    @Override
-    public double getLowerLimit() {
-        return LOWER_LIMIT;
-    }
-}
diff --git a/src/main/java/com/syncleus/grail/neural/activation/HyperbolicSecantActivationFunction.java b/src/main/java/com/syncleus/grail/neural/activation/HyperbolicSecantActivationFunction.java
deleted file mode 100644
index 1ca8660..0000000
--- a/src/main/java/com/syncleus/grail/neural/activation/HyperbolicSecantActivationFunction.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-/**
- * An implementation of an activation function using a hyperbolic secant
- * function.
- *
- * @author Jeffrey Phillips Freeman
- * @since 1.0
- */
-public class HyperbolicSecantActivationFunction implements ActivationFunction {
-    private static final double UPPER_LIMIT = 1.0;
-    private static final double LOWER_LIMIT = 0.0;
-
-    /**
-     * The hyperbolic secant activation function.
-     *
-     * @param activity the neuron's current activity.
-     * @return The result of the hyperbolic secant activation function bound
-     * between 0 and 1.
-     * @since 1.0
-     */
-    @Override
-    public double activate(final double activity) {
-        return 1.0 / Math.cosh(activity);
-    }
-
-    /**
-     * The derivative of the hyperbolic secant activation function.
-     *
-     * @param activity The neuron's current activity.
-     * @return The result of the derivative of the hyperbolic secand activation
-     * function.
-     * @since 1.0
-     */
-    @Override
-    public double activateDerivative(final double activity) {
-        return -1.0 * Math.tanh(activity) * this.activate(activity);
-    }
-
-    @Override
-    public boolean isBound() {
-        return true;
-    }
-
-    @Override
-    public double getUpperLimit() {
-        return UPPER_LIMIT;
-    }
-
-    @Override
-    public double getLowerLimit() {
-        return LOWER_LIMIT;
-    }
-}
diff --git a/src/main/java/com/syncleus/grail/neural/activation/HyperbolicTangentActivationFunction.java b/src/main/java/com/syncleus/grail/neural/activation/HyperbolicTangentActivationFunction.java
deleted file mode 100644
index 0e9693d..0000000
--- a/src/main/java/com/syncleus/grail/neural/activation/HyperbolicTangentActivationFunction.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-public class HyperbolicTangentActivationFunction implements ActivationFunction {
-    private static final double UPPER_LIMIT = 1.0;
-    private static final double LOWER_LIMIT = -1.0;
-    private static final double DERIVATIVE_EXP = 2.0;
-
-    /**
-     * The hyperbolic tangent activation function.
-     *
-     * @param activity the neuron's current activity.
-     * @return The result of the hyperbolic tangent activation function bound
-     * between -1 and 1.
-     * @since 1.0
-     */
-    @Override
-    public double activate(final double activity) {
-        return Math.tanh(activity);
-    }
-
-    /**
-     * The derivative of the hyperbolic tangent activation function.
-     *
-     * @param activity The neuron's current activity.
-     * @return The result of the derivative of the hyperbolic tangent activation
-     * function.
-     * @since 1.0
-     */
-    @Override
-    public double activateDerivative(final double activity) {
-        return 1.0 - Math.pow(this.activate(activity), HyperbolicTangentActivationFunction.DERIVATIVE_EXP);
-    }
-
-    @Override
-    public boolean isBound() {
-        return true;
-    }
-
-    @Override
-    public double getUpperLimit() {
-        return UPPER_LIMIT;
-    }
-
-    @Override
-    public double getLowerLimit() {
-        return LOWER_LIMIT;
-    }
-}
diff --git a/src/main/java/com/syncleus/grail/neural/activation/IdentityActivationFunction.java b/src/main/java/com/syncleus/grail/neural/activation/IdentityActivationFunction.java
deleted file mode 100644
index 08dc1cf..0000000
--- a/src/main/java/com/syncleus/grail/neural/activation/IdentityActivationFunction.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-/**
- * This activation function always returns the current activity.
- *
- * @author Jeffrey Phillips Freeman
- * @since 2.0
- */
-public class IdentityActivationFunction implements ActivationFunction {
-    private static final double UPPER_LIMIT = Double.MAX_VALUE;
-    private static final double LOWER_LIMIT = -1.0 * Double.MAX_VALUE;
-
-    /**
-     * The activation function.
-     *
-     * @param activity the neuron's current activity.
-     * @return The result of the activation function. Usually a bound value between
-     * 1 and -1 or 1 and 0. However this bound range is not required.
-     * @since 2.0
-     */
-    @Override
-    public double activate(final double activity) {
-        return activity;
-    }
-
-    /**
-     * The derivative of the activation function.
-     *
-     * @param activity The neuron's current activity.
-     * @return The result of the derivative of the activation function.
-     * @since 2.0
-     */
-    @Override
-    public double activateDerivative(final double activity) {
-        return 1.0;
-    }
-
-    @Override
-    public boolean isBound() {
-        return false;
-    }
-
-    @Override
-    public double getUpperLimit() {
-        return UPPER_LIMIT;
-    }
-
-    @Override
-    public double getLowerLimit() {
-        return LOWER_LIMIT;
-    }
-}
diff --git a/src/main/java/com/syncleus/grail/neural/activation/SineActivationFunction.java b/src/main/java/com/syncleus/grail/neural/activation/SineActivationFunction.java
deleted file mode 100644
index 5706b1f..0000000
--- a/src/main/java/com/syncleus/grail/neural/activation/SineActivationFunction.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-public class SineActivationFunction implements ActivationFunction {
-    private static final double UPPER_LIMIT = 1.0;
-    private static final double LOWER_LIMIT = -1.0;
-
-    /**
-     * The sine activation function.
-     *
-     * @param activity the neuron's current activity.
-     * @return The result of the sine activation function bound between -1 and 1.
-     * @since 1.0
-     */
-    @Override
-    public double activate(final double activity) {
-        return Math.sin(activity);
-    }
-
-    /**
-     * The derivative of the sine activation function.
-     *
-     * @param activity The neuron's current activity.
-     * @return The result of the derivative of the sine activation function.
-     * @since 1.0
-     */
-    @Override
-    public double activateDerivative(final double activity) {
-        return Math.cos(activity);
-    }
-
-    @Override
-    public boolean isBound() {
-        return true;
-    }
-
-    @Override
-    public double getUpperLimit() {
-        return UPPER_LIMIT;
-    }
-
-    @Override
-    public double getLowerLimit() {
-        return LOWER_LIMIT;
-    }
-}
diff --git a/src/main/java/com/syncleus/grail/neural/backprop/AbstractBackpropNeuron.java b/src/main/java/com/syncleus/grail/neural/backprop/AbstractBackpropNeuron.java
deleted file mode 100644
index 85b7b64..0000000
--- a/src/main/java/com/syncleus/grail/neural/backprop/AbstractBackpropNeuron.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.backprop;
-
-import com.syncleus.grail.neural.*;
-import com.tinkerpop.frames.modules.javahandler.Initializer;
-
-public abstract class AbstractBackpropNeuron extends AbstractActivationNeuron implements BackpropNeuron {
-    private static final double DEFAULT_LEARNING_RATE = 0.0175;
-    @Initializer
-    public void init() {
-        this.setLearningRate(AbstractBackpropNeuron.DEFAULT_LEARNING_RATE);
-        this.setDeltaTrain(0.0);
-    }
-
-    @Override
-    public void backpropagate() {
-        for (final BackpropSynapse synapse : this.getTargetEdges(BackpropSynapse.class)) {
-            final BackpropNeuron target = synapse.getTarget();
-            synapse.setWeight(synapse.getWeight() + (target.getDeltaTrain() * target.getLearningRate() * this.getSignal()));
-        }
-
-        double newDeltaTrain = 0.0;
-        for (final BackpropSynapse synapse : this.getTargetEdges(BackpropSynapse.class)) {
-            final BackpropNeuron target = synapse.getTarget();
-            assert synapse.getWeight() != null;
-            assert target.getDeltaTrain() != null;
-            newDeltaTrain += (synapse.getWeight() * target.getDeltaTrain());
-        }
-        newDeltaTrain *= this.getActivationFunction().activateDerivative(this.getActivity());
-        this.setDeltaTrain(newDeltaTrain);
-    }
-}
diff --git a/src/main/java/com/syncleus/grail/neural/backprop/BackpropNeuron.java b/src/main/java/com/syncleus/grail/neural/backprop/BackpropNeuron.java
deleted file mode 100644
index 9e48743..0000000
--- a/src/main/java/com/syncleus/grail/neural/backprop/BackpropNeuron.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.backprop;
-
-import com.syncleus.grail.graph.*;
-import com.syncleus.grail.graph.action.Action;
-import com.syncleus.grail.neural.*;
-import com.tinkerpop.frames.*;
-import com.tinkerpop.frames.modules.javahandler.*;
-import com.tinkerpop.frames.modules.typedgraph.TypeValue;
-
-@TypeValue("BackpropNeuron")
-@JavaHandlerClass(AbstractBackpropNeuron.class)
-public interface BackpropNeuron extends ActivationNeuron {
-    @JavaHandler
-    @Action("backpropagate")
-    void backpropagate();
-
-    @Property("learningRate")
-    Double getLearningRate();
-
-    @Property("learningRate")
-    void setLearningRate(double learningRate);
-
-    @Property("deltaTrain")
-    Double getDeltaTrain();
-
-    @Property("deltaTrain")
-    void setDeltaTrain(double deltaTrain);
-
-    @Adjacency(label="signals")
-    Iterable<? extends BackpropNeuron> getTargets();
-
-    @TypedAdjacency(label="signals")
-    <N extends BackpropNeuron> Iterable<? extends N> getTargets(Class<? extends N> type);
-
-    @Adjacency(label="signals")
-    void setTargets(Iterable<? extends BackpropNeuron> targets);
-
-    @Adjacency(label="signals")
-    void removeTarget(BackpropNeuron target);
-
-    @Adjacency(label="signals")
-    <N extends BackpropNeuron> N addTarget(N target);
-
-    @Adjacency(label="signals")
-    BackpropNeuron addTarget();
-
-    @TypedAdjacency(label="signals")
-    <N extends BackpropNeuron> N addTarget(Class<? extends N> type);
-
-    @Incidence(label = "signals")
-    Iterable<? extends BackpropSynapse> getTargetEdges();
-
-    @TypedIncidence(label="signals")
-    <E extends BackpropSynapse> Iterable<? extends E> getTargetEdges(Class<? extends E> type);
-
-    @Incidence(label = "signals")
-    <E extends BackpropSynapse> E addTargetEdge(E target);
-
-    @Incidence(label = "signals")
-    void removeTargetEdge(BackpropSynapse target);
-
-}
diff --git a/src/main/java/com/syncleus/grail/neural/backprop/BackpropSynapse.java b/src/main/java/com/syncleus/grail/neural/backprop/BackpropSynapse.java
deleted file mode 100644
index dd554f7..0000000
--- a/src/main/java/com/syncleus/grail/neural/backprop/BackpropSynapse.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.backprop;
-
-import com.syncleus.grail.graph.SignalMultiplyingEdge;
-import com.tinkerpop.frames.*;
-import com.tinkerpop.frames.modules.typedgraph.TypeValue;
-
-@TypeValue("BackpropSynapse")
-public interface BackpropSynapse extends SignalMultiplyingEdge {
-    @InVertex
-    BackpropNeuron getTarget();
-
-    @OutVertex
-    BackpropNeuron getSource();
-}
diff --git a/src/main/java/com/syncleus/grail/neural/package-info.java b/src/main/java/com/syncleus/grail/neural/package-info.java
deleted file mode 100644
index 4edf6ab..0000000
--- a/src/main/java/com/syncleus/grail/neural/package-info.java
+++ /dev/null
@@ -1,19 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural;
diff --git a/src/test/java/com/syncleus/grail/graph/action/AbstractPrioritySerialTriggerTest.java b/src/test/java/com/syncleus/grail/graph/action/AbstractPrioritySerialTriggerTest.java
index 0e1d5bf..66a0e59 100644
--- a/src/test/java/com/syncleus/grail/graph/action/AbstractPrioritySerialTriggerTest.java
+++ b/src/test/java/com/syncleus/grail/graph/action/AbstractPrioritySerialTriggerTest.java
@@ -19,8 +19,6 @@
 package com.syncleus.grail.graph.action;
 
 import com.syncleus.grail.graph.BlankGraphFactory;
-import com.syncleus.grail.neural.ActivationNeuron;
-import com.syncleus.grail.neural.activation.*;
 import com.tinkerpop.frames.FramedTransactionalGraph;
 import junit.framework.Assert;
 import org.junit.Test;
diff --git a/src/test/java/com/syncleus/grail/neural/AbstractActivationNeuronTest.java b/src/test/java/com/syncleus/grail/neural/AbstractActivationNeuronTest.java
deleted file mode 100644
index 58c709a..0000000
--- a/src/test/java/com/syncleus/grail/neural/AbstractActivationNeuronTest.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural;
-
-import com.syncleus.grail.graph.BlankGraphFactory;
-import com.syncleus.grail.neural.activation.*;
-import com.tinkerpop.frames.FramedTransactionalGraph;
-import junit.framework.Assert;
-import org.junit.Test;
-
-import java.lang.reflect.*;
-
-public class AbstractActivationNeuronTest {
-
-    @Test( expected = UndeclaredThrowableException.class )
-    public void testBadAccessActivation() {
-        final FramedTransactionalGraph<?> graph = BlankGraphFactory.makeTinkerGraph();
-        final ActivationNeuron neuron = graph.addVertex(null, ActivationNeuron.class);
-        neuron.setActivationFunctionClass(BadAccessActivationFunction.class);
-        try {
-            neuron.propagate();
-        }
-        catch( final UndeclaredThrowableException caught ) {
-            Assert.assertTrue(InvocationTargetException.class.equals(caught.getUndeclaredThrowable().getClass()));
-            throw caught;
-        }
-    }
-
-    @Test( expected = UndeclaredThrowableException.class )
-    public void testNoDefaultConstructorActivation() {
-        final FramedTransactionalGraph<?> graph = BlankGraphFactory.makeTinkerGraph();
-        final ActivationNeuron neuron = graph.addVertex(null, ActivationNeuron.class);
-        neuron.setActivationFunctionClass(NoDefaultConstructorActivationFunction.class);
-        try {
-            neuron.propagate();
-        }
-        catch( final UndeclaredThrowableException caught ) {
-            Assert.assertTrue(InvocationTargetException.class.equals(caught.getUndeclaredThrowable().getClass()));
-            throw caught;
-        }
-    }
-
-    @Test
-    public void testPropagateTwice() {
-        final FramedTransactionalGraph<?> graph = BlankGraphFactory.makeTinkerGraph();
-        final ActivationNeuron neuron = graph.addVertex(null, ActivationNeuron.class);
-        neuron.setActivationFunctionClass(HyperbolicTangentActivationFunction.class);
-        neuron.propagate();
-        neuron.propagate();
-        Assert.assertEquals(HyperbolicTangentActivationFunction.class, neuron.getActivationFunctionClass());
-    }
-}
diff --git a/src/test/java/com/syncleus/grail/neural/activation/ActivationBoundsTest.java b/src/test/java/com/syncleus/grail/neural/activation/ActivationBoundsTest.java
deleted file mode 100644
index 49837e1..0000000
--- a/src/test/java/com/syncleus/grail/neural/activation/ActivationBoundsTest.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-import org.junit.*;
-
-import java.util.*;
-
-public class ActivationBoundsTest {
-    private static final Random RANDOM = new Random();
-    private static final double UPPER_TEST_VALUE = 1000000000.0;
-    private static final double UPPER_CUTOFF_VALUE = 100.0;
-    private static final double LOWER_TEST_VALUE = -1000000000.0;
-    private static final double LOWER_CUTOFF_VALUE = -100.0;
-    private static final double TEST_INCREMENT = 10.0;
-    private static final int RANDOM_TEST_ITERATIONS = 10000;
-    private static final double RANDOM_TEST_RANGE = 1000.0;
-    private static final List<ActivationFunction> activationFunctions = new ArrayList<ActivationFunction>(Arrays.asList(
-                                                                            new HyperbolicTangentActivationFunction(),
-                                                                            new SineActivationFunction(),
-                                                                            new GausianActivationFunction(),
-                                                                            new HyperbolicTangentActivationFunction()));
-
-    @Test
-    public void testBounds() {
-        for (final ActivationFunction currentActivationFunction : this.activationFunctions) {
-            double currentIn = UPPER_TEST_VALUE;
-            while (currentIn >= UPPER_CUTOFF_VALUE) {
-                currentActivationFunction.activateDerivative(currentIn);
-                final double result = currentActivationFunction.activate(currentIn);
-                Assert.assertTrue("Transfer out of bounds. In: " + currentIn + ", result: " + result, (result <= currentActivationFunction.getUpperLimit()) && (result >= currentActivationFunction.getLowerLimit()));
-                currentIn = currentIn / TEST_INCREMENT;
-            }
-            while (currentIn > 0.0) {
-                currentActivationFunction.activateDerivative(currentIn);
-                final double result = currentActivationFunction.activate(currentIn);
-                Assert.assertTrue("Transfer out of bounds. In: " + currentIn + ", result: " + result, (result <= currentActivationFunction.getUpperLimit()) && (result >= currentActivationFunction.getLowerLimit()));
-                currentIn--;
-            }
-            currentIn = LOWER_TEST_VALUE;
-            while (currentIn <= LOWER_CUTOFF_VALUE) {
-                currentActivationFunction.activateDerivative(currentIn);
-                final double result = currentActivationFunction.activate(currentIn);
-                Assert.assertTrue("Transfer out of bounds. In: " + currentIn + ", result: " + result, (result <= currentActivationFunction.getUpperLimit()) && (result >= currentActivationFunction.getLowerLimit()));
-                currentIn = currentIn / TEST_INCREMENT;
-            }
-            while (currentIn <= 0.0) {
-                currentActivationFunction.activateDerivative(currentIn);
-                final double result = currentActivationFunction.activate(currentIn);
-                Assert.assertTrue("Transfer out of bounds. In: " + currentIn + ", result: " + result, (result <= currentActivationFunction.getUpperLimit()) && (result >= currentActivationFunction.getLowerLimit()));
-                currentIn++;
-            }
-            for (int count = 0; count < RANDOM_TEST_ITERATIONS; count++) {
-                currentIn = ((RANDOM.nextDouble() * 2.0) - 1.0) * RANDOM_TEST_RANGE;
-                currentActivationFunction.activateDerivative(currentIn);
-                final double result = currentActivationFunction.activate(currentIn);
-                Assert.assertTrue("Transfer out of bounds. In: " + currentIn + ", result: " + result, (result <= currentActivationFunction.getUpperLimit()) && (result >= currentActivationFunction.getLowerLimit()));
-            }
-        }
-    }
-}
\ No newline at end of file
diff --git a/src/test/java/com/syncleus/grail/neural/activation/ActivationValuesTest.java b/src/test/java/com/syncleus/grail/neural/activation/ActivationValuesTest.java
deleted file mode 100644
index 70373db..0000000
--- a/src/test/java/com/syncleus/grail/neural/activation/ActivationValuesTest.java
+++ /dev/null
@@ -1,148 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-import org.junit.*;
-
-public class ActivationValuesTest {
-    private static final ActivationFunction GAUSIAN_ACTIVATION_FUNCTION = new GausianActivationFunction();
-    private static final double[] GAUSIAN_ACTIVITY = {-10000.0,
-                                                             -100.0,
-                                                             -10.0,
-                                                             -1.0,
-                                                             -0.1,
-                                                             0.0,
-                                                             0.1,
-                                                             1.0,
-                                                             10.0,
-                                                             100.0,
-                                                             10000.0};
-    private static final double[] GAUSIAN_TRANSFERED = {scientific(6.451709693, -43429449),
-                                                               scientific(1.135483865, -4343),
-                                                               scientific(3.720075976, -44),
-                                                               0.3678794412,
-                                                               0.9900498337,
-                                                               1.0,
-                                                               0.9900498337,
-                                                               0.3678794412,
-                                                               scientific(3.720075976, -44),
-                                                               scientific(1.135483865, -4343),
-                                                               scientific(6.451709693, -43429449)};
-    private static final ActivationFunction IDENTITY_ACTIVATION_FUNCTION = new IdentityActivationFunction();
-    private static final double[] IDENTITY_ACTIVITY = {-10000.0, -5000.0, -1000.0, -500.0, -100.0,
-                                                              -50.0, -10.0, -5.0, -1.0, -0.5, -0.1, -0.05,
-                                                              0.0, 0.05, 0.1, 0.5, 1.0, 5.0, 10.0, 50.0,
-                                                              100.0, 500.0, 1000.0, 5000.0, 10000.0};
-    private static final double[] IDENTITY_TRANSFERED = IDENTITY_ACTIVITY;
-    private static final ActivationFunction HYPERBOLIC_SECANT_ACTIVATION_FUNCTION = new HyperbolicSecantActivationFunction();
-    private static final double[] HYPERBOLIC_SECANT_ACTIVITY = {-10000.0,
-                                                                       -100.0,
-                                                                       -10.0,
-                                                                       -1.0,
-                                                                       -0.1,
-                                                                       0.0,
-                                                                       0.1,
-                                                                       1.0,
-                                                                       10.0,
-                                                                       100.0,
-                                                                       10000.0};
-    private static final double[] HYPERBOLIC_SECANT_TRANSFERED = {scientific(2.270967731, -4343),
-                                                                         scientific(7.440151952, -44),
-                                                                         0.00009079985934,
-                                                                         0.6480542737,
-                                                                         0.9950207489,
-                                                                         1.0,
-                                                                         0.9950207489,
-                                                                         0.6480542737,
-                                                                         0.00009079985934,
-                                                                         scientific(7.440151952, -44),
-                                                                         scientific(2.270967731, -4343)};
-    private static final ActivationFunction HYPERBOLIC_TANGENT_ACTIVATION_FUNCTION = new HyperbolicTangentActivationFunction();
-    private static final double[] HYPERBOLIC_TANGENT_ACTIVITY = {-10000.0,
-                                                                        -100.0,
-                                                                        -10.0,
-                                                                        -1.0,
-                                                                        -0.1,
-                                                                        0.0,
-                                                                        0.1,
-                                                                        1.0,
-                                                                        10.0,
-                                                                        100.0,
-                                                                        10000.0};
-    private static final double[] HYPERBOLIC_TANGENT_TRANSFERED = {-1.0,
-                                                                          -1.0,
-                                                                          -0.9999999959,
-                                                                          -0.7615941560,
-                                                                          -0.09966799462,
-                                                                          0.0,
-                                                                          0.09966799462,
-                                                                          0.7615941560,
-                                                                          0.9999999959,
-                                                                          1.0,
-                                                                          1.0};
-    private static final ActivationFunction SINE_ACTIVATION_FUNCTION = new SineActivationFunction();
-    private static final double[] SINE_ACTIVITY = {-10000.0,
-                                                          -100.0,
-                                                          -10.0,
-                                                          -1.0,
-                                                          -0.1,
-                                                          0.0,
-                                                          0.1,
-                                                          1.0,
-                                                          10.0,
-                                                          100.0,
-                                                          10000.0};
-    private static final double[] SINE_TRANSFERED = {0.3056143889,
-                                                            0.5063656411,
-                                                            0.5440211109,
-                                                            -0.8414709848,
-                                                            -0.09983341665,
-                                                            0.0,
-                                                            0.09983341665,
-                                                            0.8414709848,
-                                                            -0.5440211109,
-                                                            -0.5063656411,
-                                                            -0.3056143889};
-
-    private static boolean checkFunction(final ActivationFunction function, final double[] activities, final double[] transfered) {
-        for (int testIndex = 0; testIndex < activities.length; testIndex++) {
-            final double result = function.activate(activities[testIndex]);
-            if (!checkResult(result, transfered[testIndex]))
-                return false;
-        }
-        return true;
-    }
-
-    private static boolean checkResult(final double firstValue, final double secondValue) {
-        return (Math.abs(firstValue - secondValue) < 0.0000001);
-    }
-
-    private static double scientific(final double value, final double exponent) {
-        return value * Math.pow(10.0, exponent);
-    }
-
-    @Test
-    public void testActivations() {
-        Assert.assertTrue("Gausian failed!", checkFunction(GAUSIAN_ACTIVATION_FUNCTION, GAUSIAN_ACTIVITY, GAUSIAN_TRANSFERED));
-        Assert.assertTrue("Identity failed!", checkFunction(IDENTITY_ACTIVATION_FUNCTION, IDENTITY_ACTIVITY, IDENTITY_TRANSFERED));
-        Assert.assertTrue("SecH failed!", checkFunction(HYPERBOLIC_SECANT_ACTIVATION_FUNCTION, HYPERBOLIC_SECANT_ACTIVITY, HYPERBOLIC_SECANT_TRANSFERED));
-        Assert.assertTrue("TanH failed!", checkFunction(HYPERBOLIC_TANGENT_ACTIVATION_FUNCTION, HYPERBOLIC_TANGENT_ACTIVITY, HYPERBOLIC_TANGENT_TRANSFERED));
-        Assert.assertTrue("Sine failed!", checkFunction(SINE_ACTIVATION_FUNCTION, SINE_ACTIVITY, SINE_TRANSFERED));
-    }
-}
\ No newline at end of file
diff --git a/src/test/java/com/syncleus/grail/neural/activation/BadAccessActivationFunction.java b/src/test/java/com/syncleus/grail/neural/activation/BadAccessActivationFunction.java
deleted file mode 100644
index d30e1d0..0000000
--- a/src/test/java/com/syncleus/grail/neural/activation/BadAccessActivationFunction.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-public class BadAccessActivationFunction implements ActivationFunction {
-    private BadAccessActivationFunction() {
-    }
-
-    @Override
-    public double activate(final double activity) {
-        return 0;
-    }
-
-    @Override
-    public double activateDerivative(final double activity) {
-        return 0;
-    }
-
-    @Override
-    public boolean isBound() {
-        return false;
-    }
-
-    @Override
-    public double getUpperLimit() {
-        return 0;
-    }
-
-    @Override
-    public double getLowerLimit() {
-        return 0;
-    }
-}
diff --git a/src/test/java/com/syncleus/grail/neural/activation/GausianActivationFunctionTest.java b/src/test/java/com/syncleus/grail/neural/activation/GausianActivationFunctionTest.java
deleted file mode 100644
index 0c24414..0000000
--- a/src/test/java/com/syncleus/grail/neural/activation/GausianActivationFunctionTest.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-import junit.framework.Assert;
-import org.junit.Test;
-
-public class GausianActivationFunctionTest {
-    private final static ActivationFunction ACTIVATION_FUNCTION = new GausianActivationFunction();
-    private final static double[][] ACTIVATION_TRUTH_TABLE = new double[][]{
-            {0.0, 1.0},
-            {0.25, 0.9394130628134758},
-            {1.0, 0.36787944117144233},
-            {10.0, scientific(3.7200759760208555, -44.0)},
-            {1000000.0, 0.0},
-            {-0.25, 0.9394130628134758},
-            {-1.0, 0.36787944117144233},
-            {-10.0, scientific(3.7200759760208555, -44.0)},
-            {-1000000.0, 0.0} };
-    private final static double[][] DERIVATIVE_TRUTH_TABLE = new double[][]{
-            {0.0, -0.0},
-            {0.25, -0.20399095470386272},
-            {1.0, -0.3195360226128187},
-            {10.0, scientific(-3.231216937293423, -43)},
-            {1000000.0, -0.0},
-            {-0.25, 0.20399095470386272},
-            {-1.0, 0.3195360226128187},
-            {-10.0, scientific(-3.231216937293423, -43)},
-            {-1000000.0, 0.0} };
-    private final static boolean IS_BOUND = true;
-    private final static double UPPER_LIMIT = 1.0;
-    private final static double LOWER_LIMIT = 0.0;
-
-    @Test
-    public void testActivation() {
-        for( int index = 0; index < ACTIVATION_TRUTH_TABLE.length ; index++ )
-            Assert.assertTrue(checkResult(ACTIVATION_FUNCTION.activate(ACTIVATION_TRUTH_TABLE[index][0]), ACTIVATION_TRUTH_TABLE[index][1]));
-    }
-
-    @Test
-    public void testDerivative() {
-        for( int index = 0; index < DERIVATIVE_TRUTH_TABLE.length ; index++ )
-            Assert.assertTrue(checkResult(ACTIVATION_FUNCTION.activateDerivative(DERIVATIVE_TRUTH_TABLE[index][0]), DERIVATIVE_TRUTH_TABLE[index][1]));
-    }
-
-    @Test
-    public void testIsBound() {
-        Assert.assertTrue(ACTIVATION_FUNCTION.isBound() == IS_BOUND);
-    }
-
-    @Test
-    public void testUpperLimit() {
-        final double upperLimit = ACTIVATION_FUNCTION.getUpperLimit();
-        Assert.assertTrue( checkResult(upperLimit, UPPER_LIMIT));
-    }
-
-    @Test
-    public void testLowerLimit() {
-        final double lowerLimit = ACTIVATION_FUNCTION.getLowerLimit();
-        Assert.assertTrue( checkResult(lowerLimit, LOWER_LIMIT));
-    }
-
-    private static boolean checkResult(final double firstValue, final double secondValue) {
-        return (Math.abs(firstValue - secondValue) < 0.0000001);
-    }
-
-    private static double scientific(final double value, final double exponent) {
-        return value * Math.pow(10.0, exponent);
-    }
-}
diff --git a/src/test/java/com/syncleus/grail/neural/activation/HyperbolicSecantActivationFunctionTest.java b/src/test/java/com/syncleus/grail/neural/activation/HyperbolicSecantActivationFunctionTest.java
deleted file mode 100644
index 57ac301..0000000
--- a/src/test/java/com/syncleus/grail/neural/activation/HyperbolicSecantActivationFunctionTest.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-import junit.framework.Assert;
-import org.junit.Test;
-
-public class HyperbolicSecantActivationFunctionTest {
-    private final static ActivationFunction ACTIVATION_FUNCTION = new HyperbolicSecantActivationFunction();
-    private final static double[][] ACTIVATION_TRUTH_TABLE = new double[][]{
-            {0.0, 1.0},
-            {0.25, 0.9695436291402145},
-            {1.0, 0.6480542736638853},
-            {10.0, 0.00009079985933781724},
-            {1000000.0, 0.0},
-            {-0.25, 0.9695436291402145},
-            {-1.0, 0.6480542736638853},
-            {-10.0, 0.00009079985933781724},
-            {-1000000.0, 0.0} };
-    private final static double[][] DERIVATIVE_TRUTH_TABLE = new double[][]{
-            {0.0, -0.0},
-            {0.25, -0.23745932879105916},
-            {1.0, -0.493554347564573},
-            {10.0, -0.00009079985896351232},
-            {1000000.0, -0.0},
-            {-0.25, 0.23745932879105916},
-            {-1.0, 0.493554347564573},
-            {-10.0, 0.00009079985896351232},
-            {-1000000.0, 0.0} };
-    private final static boolean IS_BOUND = true;
-    private final static double UPPER_LIMIT = 1.0;
-    private final static double LOWER_LIMIT = 0.0;
-
-    @Test
-    public void testActivation() {
-        for( int index = 0; index < ACTIVATION_TRUTH_TABLE.length ; index++ )
-            Assert.assertTrue(checkResult(ACTIVATION_FUNCTION.activate(ACTIVATION_TRUTH_TABLE[index][0]), ACTIVATION_TRUTH_TABLE[index][1]));
-    }
-
-    @Test
-    public void testDerivative() {
-        for( int index = 0; index < DERIVATIVE_TRUTH_TABLE.length ; index++ )
-            Assert.assertTrue(checkResult(ACTIVATION_FUNCTION.activateDerivative(DERIVATIVE_TRUTH_TABLE[index][0]), DERIVATIVE_TRUTH_TABLE[index][1]));
-    }
-
-    @Test
-    public void testIsBound() {
-        Assert.assertTrue(ACTIVATION_FUNCTION.isBound() == IS_BOUND);
-    }
-
-    @Test
-    public void testUpperLimit() {
-        final double upperLimit = ACTIVATION_FUNCTION.getUpperLimit();
-        Assert.assertTrue( checkResult(upperLimit, UPPER_LIMIT));
-    }
-
-    @Test
-    public void testLowerLimit() {
-        final double lowerLimit = ACTIVATION_FUNCTION.getLowerLimit();
-        Assert.assertTrue( checkResult(lowerLimit, LOWER_LIMIT));
-    }
-
-    private static boolean checkResult(final double firstValue, final double secondValue) {
-        return (Math.abs(firstValue - secondValue) < 0.0000001);
-    }
-}
diff --git a/src/test/java/com/syncleus/grail/neural/activation/HyperbolicTangentActivationFunctionTest.java b/src/test/java/com/syncleus/grail/neural/activation/HyperbolicTangentActivationFunctionTest.java
deleted file mode 100644
index 9e533a1..0000000
--- a/src/test/java/com/syncleus/grail/neural/activation/HyperbolicTangentActivationFunctionTest.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-import junit.framework.Assert;
-import org.junit.Test;
-
-public class HyperbolicTangentActivationFunctionTest {
-    private final static ActivationFunction ACTIVATION_FUNCTION = new HyperbolicTangentActivationFunction();
-    private final static double[][] ACTIVATION_TRUTH_TABLE = new double[][]{
-            {0.0, 0.0},
-            {0.25, 0.24491866240370913},
-            {1.0, 0.7615941559557649},
-            {10.0, 0.9999999958776927},
-            {1000000.0, 1.0},
-            {-0.25, -0.24491866240370913},
-            {-1.0, -0.7615941559557649},
-            {-10.0, -0.9999999958776927},
-            {-1000000.0, -1.0} };
-    private final static double[][] DERIVATIVE_TRUTH_TABLE = new double[][]{
-            {0.0, 1.0},
-            {0.25, 0.940014848806378},
-            {1.0, 0.41997434161402614},
-            {10.0, 0.000000008244614546626394},
-            {1000000.0, 0.0},
-            {-0.25, 0.940014848806378},
-            {-1.0, 0.41997434161402614},
-            {-10.0, 0.000000008244614546626394},
-            {-1000000.0, 0.0} };
-    private final static boolean IS_BOUND = true;
-    private final static double UPPER_LIMIT = 1.0;
-    private final static double LOWER_LIMIT = -1.0;
-
-    @Test
-    public void testActivation() {
-        for( int index = 0; index < ACTIVATION_TRUTH_TABLE.length ; index++ )
-            Assert.assertTrue(checkResult(ACTIVATION_FUNCTION.activate(ACTIVATION_TRUTH_TABLE[index][0]), ACTIVATION_TRUTH_TABLE[index][1]));
-    }
-
-    @Test
-    public void testDerivative() {
-        for( int index = 0; index < DERIVATIVE_TRUTH_TABLE.length ; index++ )
-            Assert.assertTrue(checkResult(ACTIVATION_FUNCTION.activateDerivative(DERIVATIVE_TRUTH_TABLE[index][0]), DERIVATIVE_TRUTH_TABLE[index][1]));
-    }
-
-    @Test
-    public void testIsBound() {
-        Assert.assertTrue(ACTIVATION_FUNCTION.isBound() == IS_BOUND);
-    }
-
-    @Test
-    public void testUpperLimit() {
-        final double upperLimit = ACTIVATION_FUNCTION.getUpperLimit();
-        Assert.assertTrue( checkResult(upperLimit, UPPER_LIMIT));
-    }
-
-    @Test
-    public void testLowerLimit() {
-        final double lowerLimit = ACTIVATION_FUNCTION.getLowerLimit();
-        Assert.assertTrue( checkResult(lowerLimit, LOWER_LIMIT));
-    }
-
-    private static boolean checkResult(final double firstValue, final double secondValue) {
-        return (Math.abs(firstValue - secondValue) < 0.0000001);
-    }
-}
diff --git a/src/test/java/com/syncleus/grail/neural/activation/IdentityActivationFunctionTest.java b/src/test/java/com/syncleus/grail/neural/activation/IdentityActivationFunctionTest.java
deleted file mode 100644
index b63d146..0000000
--- a/src/test/java/com/syncleus/grail/neural/activation/IdentityActivationFunctionTest.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-import junit.framework.Assert;
-import org.junit.Test;
-
-public class IdentityActivationFunctionTest {
-    private final static ActivationFunction ACTIVATION_FUNCTION = new IdentityActivationFunction();
-    private final static double[][] ACTIVATION_TRUTH_TABLE = new double[][]{ {0.0, 0.0},
-                                                                             {0.25, 0.25},
-                                                                             {1.0, 1.0},
-                                                                             {10.0, 10.0},
-                                                                             {1000000.0, 1000000.0},
-                                                                             {-0.25, -0.25},
-                                                                             {-1.0, -1.0},
-                                                                             {-10.0, -10.0},
-                                                                             {-1000000.0, -1000000.0} };
-    private final static double[][] DERIVATIVE_TRUTH_TABLE = new double[][]{ {0.0, 1.0},
-                                                                             {0.25, 1.0},
-                                                                             {1.0, 1.0},
-                                                                             {10.0, 1.0},
-                                                                             {1000000.0, 1.0},
-                                                                             {-0.25, 1.0},
-                                                                             {-1.0, 1.0},
-                                                                             {-10.0, 1.0},
-                                                                             {-1000000.0, 1.0} };
-    private final static boolean IS_BOUND = false;
-    private final static double UPPER_LIMIT = Double.MAX_VALUE;
-    private final static double LOWER_LIMIT = -1.0 * Double.MAX_VALUE;
-
-    @Test
-    public void testActivation() {
-        for( int index = 0; index < ACTIVATION_TRUTH_TABLE.length ; index++ )
-            Assert.assertTrue(IdentityActivationFunctionTest.checkResult(ACTIVATION_FUNCTION.activate(ACTIVATION_TRUTH_TABLE[index][0]), ACTIVATION_TRUTH_TABLE[index][1]));
-    }
-
-    @Test
-    public void testDerivative() {
-        for( int index = 0; index < DERIVATIVE_TRUTH_TABLE.length ; index++ )
-            Assert.assertTrue(IdentityActivationFunctionTest.checkResult(ACTIVATION_FUNCTION.activateDerivative(DERIVATIVE_TRUTH_TABLE[index][0]), DERIVATIVE_TRUTH_TABLE[index][1]));
-    }
-
-    @Test
-    public void testIsBound() {
-        Assert.assertTrue(ACTIVATION_FUNCTION.isBound() == IS_BOUND);
-    }
-
-    @Test
-    public void testUpperLimit() {
-        final double upperLimit = ACTIVATION_FUNCTION.getUpperLimit();
-        Assert.assertTrue( upperLimit > 1.0 );
-        Assert.assertTrue( IdentityActivationFunctionTest.checkResult(upperLimit, UPPER_LIMIT));
-    }
-
-    @Test
-    public void testLowerLimit() {
-        final double lowerLimit = ACTIVATION_FUNCTION.getLowerLimit();
-        Assert.assertTrue( lowerLimit < -1.0 );
-        Assert.assertTrue( IdentityActivationFunctionTest.checkResult(lowerLimit, LOWER_LIMIT));
-    }
-
-    private static boolean checkResult(final double firstValue, final double secondValue) {
-        return (Math.abs(firstValue - secondValue) < 0.0000001);
-    }
-}
diff --git a/src/test/java/com/syncleus/grail/neural/activation/NoDefaultConstructorActivationFunction.java b/src/test/java/com/syncleus/grail/neural/activation/NoDefaultConstructorActivationFunction.java
deleted file mode 100644
index 8dbf04a..0000000
--- a/src/test/java/com/syncleus/grail/neural/activation/NoDefaultConstructorActivationFunction.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-public class NoDefaultConstructorActivationFunction implements ActivationFunction {
-    public NoDefaultConstructorActivationFunction(String something) {
-    }
-
-    @Override
-    public double activate(final double activity) {
-        return 0;
-    }
-
-    @Override
-    public double activateDerivative(final double activity) {
-        return 0;
-    }
-
-    @Override
-    public boolean isBound() {
-        return false;
-    }
-
-    @Override
-    public double getUpperLimit() {
-        return 0;
-    }
-
-    @Override
-    public double getLowerLimit() {
-        return 0;
-    }
-}
diff --git a/src/test/java/com/syncleus/grail/neural/activation/SineActivationFunctionTest.java b/src/test/java/com/syncleus/grail/neural/activation/SineActivationFunctionTest.java
deleted file mode 100644
index e2d9a8e..0000000
--- a/src/test/java/com/syncleus/grail/neural/activation/SineActivationFunctionTest.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.activation;
-
-import junit.framework.Assert;
-import org.junit.Test;
-
-public class SineActivationFunctionTest {
-    private final static ActivationFunction ACTIVATION_FUNCTION = new SineActivationFunction();
-    private final static double[][] ACTIVATION_TRUTH_TABLE = new double[][]{
-            {0.0, 0.0},
-            {0.25, 0.24740395925452294},
-            {1.0, 0.8414709848078965},
-            {10.0, -0.5440211108893698},
-            {1000000.0, -0.34999350217129294},
-            {-0.25, -0.24740395925452294},
-            {-1.0, -0.8414709848078965},
-            {-10.0, 0.5440211108893698},
-            {-1000000.0, 0.34999350217129294} };
-    private final static double[][] DERIVATIVE_TRUTH_TABLE = new double[][]{
-            {0.0, 1.0},
-            {0.25, 0.9689124217106447},
-            {1.0, 0.5403023058681398},
-            {10.0, -0.8390715290764524},
-            {1000000.0, 0.9367521275331447},
-            {-0.25, 0.9689124217106447},
-            {-1.0, 0.5403023058681398},
-            {-10.0, -0.8390715290764524},
-            {-1000000.0, 0.9367521275331447} };
-    private final static boolean IS_BOUND = true;
-    private final static double UPPER_LIMIT = 1.0;
-    private final static double LOWER_LIMIT = -1.0;
-
-    @Test
-    public void testActivation() {
-        for( int index = 0; index < ACTIVATION_TRUTH_TABLE.length ; index++ )
-            Assert.assertTrue(checkResult(ACTIVATION_FUNCTION.activate(ACTIVATION_TRUTH_TABLE[index][0]), ACTIVATION_TRUTH_TABLE[index][1]));
-    }
-
-    @Test
-    public void testDerivative() {
-        for( int index = 0; index < DERIVATIVE_TRUTH_TABLE.length ; index++ )
-            Assert.assertTrue(checkResult(ACTIVATION_FUNCTION.activateDerivative(DERIVATIVE_TRUTH_TABLE[index][0]), DERIVATIVE_TRUTH_TABLE[index][1]));
-    }
-
-    @Test
-    public void testIsBound() {
-        Assert.assertTrue(ACTIVATION_FUNCTION.isBound() == IS_BOUND);
-    }
-
-    @Test
-    public void testUpperLimit() {
-        final double upperLimit = ACTIVATION_FUNCTION.getUpperLimit();
-        Assert.assertTrue( checkResult(upperLimit, UPPER_LIMIT));
-    }
-
-    @Test
-    public void testLowerLimit() {
-        final double lowerLimit = ACTIVATION_FUNCTION.getLowerLimit();
-        Assert.assertTrue( checkResult(lowerLimit, LOWER_LIMIT));
-    }
-
-    private static boolean checkResult(final double firstValue, final double secondValue) {
-        return (Math.abs(firstValue - secondValue) < 0.0000001);
-    }
-}
diff --git a/src/test/java/com/syncleus/grail/neural/backprop/ActionTriggerXor3InputTest.java b/src/test/java/com/syncleus/grail/neural/backprop/ActionTriggerXor3InputTest.java
deleted file mode 100644
index f97e9c0..0000000
--- a/src/test/java/com/syncleus/grail/neural/backprop/ActionTriggerXor3InputTest.java
+++ /dev/null
@@ -1,219 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.backprop;
-
-import com.syncleus.grail.neural.activation.*;
-import com.syncleus.grail.graph.BlankGraphFactory;
-import com.syncleus.grail.graph.action.*;
-import com.tinkerpop.frames.*;
-import org.junit.*;
-
-import java.lang.reflect.UndeclaredThrowableException;
-import java.util.*;
-
-public class ActionTriggerXor3InputTest {
-
-    private static final ActivationFunction ACTIVATION_FUNCTION = new HyperbolicTangentActivationFunction();
-
-    @Test
-    public void testXor() {
-        final FramedTransactionalGraph<?> graph = BlankGraphFactory.makeTinkerGraph();
-
-        //
-        //Construct the Neural Graph
-        //
-        final List<BackpropNeuron> newInputNeurons = new ArrayList<BackpropNeuron>(2);
-        newInputNeurons.add(ActionTriggerXor3InputTest.createNeuron(graph, "input"));
-        newInputNeurons.add(ActionTriggerXor3InputTest.createNeuron(graph, "input"));
-        newInputNeurons.add(ActionTriggerXor3InputTest.createNeuron(graph, "input"));
-        final List<BackpropNeuron> newHiddenNeurons = new ArrayList<BackpropNeuron>(4);
-        newHiddenNeurons.add(ActionTriggerXor3InputTest.createNeuron(graph, "hidden"));
-        newHiddenNeurons.add(ActionTriggerXor3InputTest.createNeuron(graph, "hidden"));
-        newHiddenNeurons.add(ActionTriggerXor3InputTest.createNeuron(graph, "hidden"));
-        final BackpropNeuron newOutputNeuron = ActionTriggerXor3InputTest.createNeuron(graph, "output");
-        final BackpropNeuron biasNeuron = ActionTriggerXor3InputTest.createNeuron(graph, "bias");
-        biasNeuron.setSignal(1.0);
-
-        //connect all input neurons to hidden neurons
-        for (final BackpropNeuron inputNeuron : newInputNeurons) {
-            for (final BackpropNeuron hiddenNeuron : newHiddenNeurons) {
-                graph.addEdge(null, inputNeuron.asVertex(), hiddenNeuron.asVertex(), "signals", BackpropSynapse.class);
-            }
-        }
-        //connect all hidden neurons to the output neuron
-        for (final BackpropNeuron hiddenNeuron : newHiddenNeurons) {
-            graph.addEdge(null, hiddenNeuron.asVertex(), newOutputNeuron.asVertex(), "signals", BackpropSynapse.class);
-
-            //create bias neuron
-            graph.addEdge(null, biasNeuron.asVertex(), hiddenNeuron.asVertex(), "signals", BackpropSynapse.class);
-        }
-        //create bias neuron for output neuron
-        graph.addEdge(null, biasNeuron.asVertex(), newOutputNeuron.asVertex(), "signals", BackpropSynapse.class);
-
-        //
-        //Construct the Action Triggers for the neural Graph
-        //
-        //First lets handle the output layer for propagation
-        final PrioritySerialTrigger propagateOutputTrigger = ActionTriggerXor3InputTest.createPrioritySerialTrigger(graph);
-        //connect it to the output neuron with a priority of 0 (highest priority)
-        final PrioritySerialTriggerEdge outputTriggerEdge = graph.addEdge(null, propagateOutputTrigger.asVertex(), newOutputNeuron.asVertex(), "triggers", PrioritySerialTriggerEdge.class);
-        outputTriggerEdge.setTriggerPriority(1000);
-        outputTriggerEdge.setTriggerAction("propagate");
-
-        //now lets handle the hidden layer for propagation
-        final PrioritySerialTrigger propagateHiddenTrigger = ActionTriggerXor3InputTest.createPrioritySerialTrigger(graph);
-        propagateHiddenTrigger.asVertex().setProperty("triggerPointer", "propagate");
-        //connect it to each of the hidden neurons with a priority of 0 (highest priority)
-        for (final BackpropNeuron hiddenNeuron : newHiddenNeurons) {
-            final PrioritySerialTriggerEdge newEdge = graph.addEdge(null, propagateHiddenTrigger.asVertex(), hiddenNeuron.asVertex(), "triggers", PrioritySerialTriggerEdge.class);
-            newEdge.setTriggerPriority(1000);
-            newEdge.setTriggerAction("propagate");
-        }
-
-        //chain the prop[agation of the hidden layer to the propagation of the output layer, but make sure it has less of a priority than the other triggers
-        final PrioritySerialTriggerEdge chainTriggerPropagateEdge = graph.addEdge(null, propagateHiddenTrigger.asVertex(), propagateOutputTrigger.asVertex(), "triggers", PrioritySerialTriggerEdge.class);
-        chainTriggerPropagateEdge.setTriggerPriority(0);
-        chainTriggerPropagateEdge.setTriggerAction("actionTrigger");
-
-        //next lets handle the input layer for back propagation
-        final PrioritySerialTrigger backpropInputTrigger = ActionTriggerXor3InputTest.createPrioritySerialTrigger(graph);
-        //connect it to each of the input neurons
-        for (final BackpropNeuron inputNeuron : newInputNeurons) {
-            final PrioritySerialTriggerEdge newEdge = graph.addEdge(null, backpropInputTrigger.asVertex(), inputNeuron.asVertex(), "triggers", PrioritySerialTriggerEdge.class);
-            newEdge.setTriggerPriority(1000);
-            newEdge.setTriggerAction("backpropagate");
-        }
-        //also connect it to all the bias neurons
-        final PrioritySerialTriggerEdge biasTriggerBackpropEdge = graph.addEdge(null, backpropInputTrigger.asVertex(), biasNeuron.asVertex(), "triggers", PrioritySerialTriggerEdge.class);
-        biasTriggerBackpropEdge.setTriggerPriority(1000);
-        biasTriggerBackpropEdge.setTriggerAction("backpropagate");
-
-        //create backpropagation trigger for the hidden layer
-        final PrioritySerialTrigger backpropHiddenTrigger = ActionTriggerXor3InputTest.createPrioritySerialTrigger(graph);
-        backpropHiddenTrigger.asVertex().setProperty("triggerPointer", "backpropagate");
-        //connect it to each of the hidden neurons with a priority of 0 (highest priority)
-        for (final BackpropNeuron hiddenNeuron : newHiddenNeurons) {
-            final PrioritySerialTriggerEdge newEdge = graph.addEdge(null, backpropHiddenTrigger.asVertex(), hiddenNeuron.asVertex(), "triggers", PrioritySerialTriggerEdge.class);
-            newEdge.setTriggerPriority(1000);
-            newEdge.setTriggerAction("backpropagate");
-        }
-
-        //chain the hidden layers back propagation to the input layers trigger
-        final PrioritySerialTriggerEdge chainTriggerBackpropEdge = graph.addEdge(null, backpropHiddenTrigger.asVertex(), backpropInputTrigger.asVertex(), "triggers", PrioritySerialTriggerEdge.class);
-        chainTriggerBackpropEdge.setTriggerPriority(0);
-        chainTriggerBackpropEdge.setTriggerAction("actionTrigger");
-
-        //commit everything
-        graph.commit();
-
-        //
-        // Graph is constructed, just need to train and test our network now.
-        //
-        final int maxCycles = 10000;
-        final int completionPeriod = 50;
-        final double maxError = 0.75;
-        for (int cycle = maxCycles; cycle >= 0; cycle--) {
-            int finished = 0;
-            for (int in1 = -1; in1 <= 1; in1 += 2) {
-                for (int in2 = -1; in2 <= 1; in2 += 2) {
-                    for (int in3 = -1; in3 <= 1; in3 += 2) {
-                        boolean bi = in1 >= 0;
-                        boolean bj = in2 >= 0;
-                        boolean bk = in3 >= 0;
-                        boolean expect = bi ^ bj ^ bk;
-                        double expectD = expect ? 1.0 : -1.0;
-
-                        train(graph, in1, in2, in3, expectD);
-
-                        if (cycle % completionPeriod == 0 && calculateError(graph, in1, in2, in3, expectD) < maxError) {
-                            finished++;
-                        }
-                    }
-                }
-            }
-            if (finished == 8)
-                break;
-        }
-
-        Assert.assertTrue(ActionTriggerXor3InputTest.propagate(graph, 1.0, 1.0, 1.0) > 0.0);
-        Assert.assertTrue(ActionTriggerXor3InputTest.propagate(graph, -1.0, 1.0, 1.0) < 0.0);
-        Assert.assertTrue(ActionTriggerXor3InputTest.propagate(graph, 1.0, -1.0, 1.0) < 0.0);
-        Assert.assertTrue(ActionTriggerXor3InputTest.propagate(graph, 1.0, 1.0, -1.0) < 0.0);
-        Assert.assertTrue(ActionTriggerXor3InputTest.propagate(graph, -1.0, -1.0, 1.0) > 0.0);
-        Assert.assertTrue(ActionTriggerXor3InputTest.propagate(graph, -1.0, 1.0, -1.0) > 0.0);
-        Assert.assertTrue(ActionTriggerXor3InputTest.propagate(graph, 1.0, -1.0, -1.0) > 0.0);
-        Assert.assertTrue(ActionTriggerXor3InputTest.propagate(graph, -1.0, -1.0, -1.0) < 0.0);
-    }
-
-    private static double calculateError(FramedTransactionalGraph<?> graph, double in1, double in2, double in3, double expect) {
-        double actual = ActionTriggerXor3InputTest.propagate(graph, in1, in2, in3);
-        return Math.abs(actual - expect) / Math.abs(expect);
-    }
-
-    private static void train(final FramedTransactionalGraph<?> graph, final double input1, final double input2, final double input3, final double expected) {
-        ActionTriggerXor3InputTest.propagate(graph, input1, input2, input3);
-
-        final Iterator<BackpropNeuron> outputNeurons = graph.getVertices("layer", "output", BackpropNeuron.class).iterator();
-        final BackpropNeuron outputNeuron = outputNeurons.next();
-        Assert.assertTrue(!outputNeurons.hasNext());
-        outputNeuron.setDeltaTrain((expected - outputNeuron.getSignal()) * ACTIVATION_FUNCTION.activateDerivative(outputNeuron.getActivity()));
-        graph.commit();
-
-        final Iterator<PrioritySerialTrigger> backpropTriggers = graph.getVertices("triggerPointer", "backpropagate", PrioritySerialTrigger.class).iterator();
-        final PrioritySerialTrigger backpropTrigger = backpropTriggers.next();
-        Assert.assertTrue(!backpropTriggers.hasNext());
-        backpropTrigger.trigger();
-        graph.commit();
-    }
-
-    private static double propagate(final FramedTransactionalGraph<?> graph, final double input1, final double input2, final double input3) {
-        final Iterator<BackpropNeuron> inputNeurons = graph.getVertices("layer", "input", BackpropNeuron.class).iterator();
-        inputNeurons.next().setSignal(input1);
-        inputNeurons.next().setSignal(input2);
-        inputNeurons.next().setSignal(input3);
-        Assert.assertTrue(!inputNeurons.hasNext());
-        graph.commit();
-
-        final Iterator<PrioritySerialTrigger> propagateTriggers = graph.getVertices("triggerPointer", "propagate", PrioritySerialTrigger.class).iterator();
-        final PrioritySerialTrigger propagateTrigger = propagateTriggers.next();
-        Assert.assertTrue(!propagateTriggers.hasNext());
-        try {
-            propagateTrigger.trigger();
-        } catch (final UndeclaredThrowableException caught) {
-            caught.getUndeclaredThrowable().printStackTrace();
-            throw caught;
-        }
-        graph.commit();
-
-        final Iterator<BackpropNeuron> outputNeurons = graph.getVertices("layer", "output", BackpropNeuron.class).iterator();
-        final BackpropNeuron outputNeuron = outputNeurons.next();
-        Assert.assertTrue(!outputNeurons.hasNext());
-        return outputNeuron.getSignal();
-    }
-
-    private static BackpropNeuron createNeuron(final FramedGraph<?> graph, final String layer) {
-        final BackpropNeuron neuron = graph.addVertex(null, BackpropNeuron.class);
-        neuron.asVertex().setProperty("layer", layer);
-        return neuron;
-    }
-
-    private static PrioritySerialTrigger createPrioritySerialTrigger(final FramedGraph<?> graph) {
-        return graph.addVertex(null, PrioritySerialTrigger.class);
-    }
-}
diff --git a/src/test/java/com/syncleus/grail/neural/backprop/SimpleOrTest.java b/src/test/java/com/syncleus/grail/neural/backprop/SimpleOrTest.java
deleted file mode 100644
index 37e85a1..0000000
--- a/src/test/java/com/syncleus/grail/neural/backprop/SimpleOrTest.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.backprop;
-
-import com.syncleus.grail.neural.activation.*;
-import com.syncleus.grail.graph.*;
-import com.tinkerpop.frames.*;
-import org.junit.*;
-import java.util.*;
-
-public class SimpleOrTest {
-
-    private static final ActivationFunction ACTIVATION_FUNCTION = new SineActivationFunction();
-
-    @Test
-    public void testOr() {
-        final FramedTransactionalGraph<?> graph = BlankGraphFactory.makeTinkerGraph();
-
-        final List<BackpropNeuron> newInputNeurons = new ArrayList<BackpropNeuron>(2);
-        newInputNeurons.add(SimpleOrTest.createNeuron(graph, "input"));
-        newInputNeurons.add(SimpleOrTest.createNeuron(graph, "input"));
-        final BackpropNeuron newOutputNeuron = SimpleOrTest.createNeuron(graph, "output");
-
-        //connect all hidden neurons to the output neuron
-        for( BackpropNeuron inputNeuron : newInputNeurons ) {
-            graph.addEdge(null, inputNeuron.asVertex(), newOutputNeuron.asVertex(), "signals", BackpropSynapse.class);//.asEdge().setProperty("type", "BackpropSynapse");
-        }
-        //create bias neuron for output neuron
-        final BackpropNeuron biasNeuron = SimpleOrTest.createNeuron(graph, "bias");
-        biasNeuron.setSignal(1.0);
-        graph.addEdge(null, biasNeuron.asVertex(), newOutputNeuron.asVertex(), "signals", BackpropSynapse.class);//.asEdge().setProperty("type", "BackpropSynapse");
-        graph.commit();
-
-        for(int i = 0; i < 10000; i++) {
-            SimpleOrTest.train(graph, -1.0, 1.0, 1.0);
-            SimpleOrTest.train(graph, 1.0, -1.0, 1.0);
-            SimpleOrTest.train(graph, 1.0, 1.0, 1.0);
-            SimpleOrTest.train(graph, -1.0, -1.0, -1.0);
-            if( i%50 == 0 && SimpleOrTest.calculateError(graph) < 0.2 )
-                break;
-        }
-
-        Assert.assertTrue("expected >0.0, got: " + SimpleOrTest.propagate(graph, 1.0, 1.0), SimpleOrTest.propagate(graph, 1.0, 1.0) > 0.0);
-        Assert.assertTrue("expected <0.0, got: " + SimpleOrTest.propagate(graph, -1.0, -1.0), SimpleOrTest.propagate(graph, -1.0, -1.0) < 0.0);
-        Assert.assertTrue("expected >0.0, got: " + SimpleOrTest.propagate(graph, 1.0, -1.0), SimpleOrTest.propagate(graph, 1.0, -1.0) > 0.0);
-        Assert.assertTrue("expected >0.0, got: " + SimpleOrTest.propagate(graph, -1.0, 1.0), SimpleOrTest.propagate(graph, -1.0, 1.0) > 0.0);
-    }
-
-    private static double calculateError(FramedTransactionalGraph<?> graph) {
-        double actual = SimpleOrTest.propagate(graph, 1.0, 1.0);
-        double error = Math.abs(actual - 1.0) / 2.0;
-
-        actual = SimpleOrTest.propagate(graph, -1.0, -1.0);
-        error += Math.abs(actual + 1.0) / 2.0;
-
-        actual = SimpleOrTest.propagate(graph, 1.0, -1.0);
-        error += Math.abs(actual - 1.0) / 2.0;
-
-        actual = SimpleOrTest.propagate(graph, -1.0, 1.0);
-        error += Math.abs(actual - 1.0) / 2.0;
-
-        return error/4.0;
-    }
-
-    private static void train(final FramedTransactionalGraph<?> graph, final double input1, final double input2, final double expected) {
-        SimpleOrTest.propagate(graph, input1, input2);
-
-        final Iterator<BackpropNeuron> outputNeurons = graph.getVertices("layer", "output", BackpropNeuron.class).iterator();
-        final BackpropNeuron outputNeuron = outputNeurons.next();
-        Assert.assertTrue(!outputNeurons.hasNext());
-        outputNeuron.setDeltaTrain((expected - outputNeuron.getSignal()) * ACTIVATION_FUNCTION.activateDerivative(outputNeuron.getActivity()));
-        graph.commit();
-
-        final Iterator<BackpropNeuron> inputNeurons = graph.getVertices("layer", "input", BackpropNeuron.class).iterator();
-        inputNeurons.next().backpropagate();
-        inputNeurons.next().backpropagate();
-        Assert.assertTrue(!inputNeurons.hasNext());
-        graph.commit();
-
-        final Iterator<BackpropNeuron> biasNeurons = graph.getVertices("layer", "bias", BackpropNeuron.class).iterator();
-        biasNeurons.next().backpropagate();
-        Assert.assertTrue(!biasNeurons.hasNext());
-        graph.commit();
-    }
-
-    private static double propagate(final FramedTransactionalGraph<?> graph, final double input1, final double input2) {
-        final Iterator<BackpropNeuron> inputNeurons = graph.getVertices("layer", "input", BackpropNeuron.class).iterator();
-        inputNeurons.next().setSignal(input1);
-        inputNeurons.next().setSignal(input2);
-        Assert.assertTrue(!inputNeurons.hasNext());
-        graph.commit();
-
-        final Iterator<BackpropNeuron> outputNeurons = graph.getVertices("layer", "output", BackpropNeuron.class).iterator();
-        final BackpropNeuron outputNeuron = outputNeurons.next();
-        Assert.assertTrue(!outputNeurons.hasNext());
-        outputNeuron.propagate();
-        graph.commit();
-        return outputNeuron.getSignal();
-    }
-
-    private static BackpropNeuron createNeuron(final FramedGraph<?> graph, final String layer) {
-        final BackpropNeuron neuron = graph.addVertex(null, BackpropNeuron.class);
-        neuron.asVertex().setProperty("layer", layer);
-        return neuron;
-    }
-}
diff --git a/src/test/java/com/syncleus/grail/neural/backprop/SimpleXor2InputTest.java b/src/test/java/com/syncleus/grail/neural/backprop/SimpleXor2InputTest.java
deleted file mode 100644
index e237521..0000000
--- a/src/test/java/com/syncleus/grail/neural/backprop/SimpleXor2InputTest.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.backprop;
-
-import com.syncleus.grail.neural.activation.*;
-import com.syncleus.grail.graph.*;
-import com.tinkerpop.frames.*;
-import org.junit.*;
-import java.util.*;
-
-public class SimpleXor2InputTest {
-    private static final ActivationFunction ACTIVATION_FUNCTION = new HyperbolicTangentActivationFunction();
-
-    @Test
-    public void testXor() {
-        final FramedTransactionalGraph<?> graph = BlankGraphFactory.makeTinkerGraph();
-
-        final List<BackpropNeuron> newInputNeurons = new ArrayList<BackpropNeuron>(2);
-        newInputNeurons.add(SimpleXor2InputTest.createNeuron(graph, "input"));
-        newInputNeurons.add(SimpleXor2InputTest.createNeuron(graph, "input"));
-        final List<BackpropNeuron> newHiddenNeurons = new ArrayList<BackpropNeuron>(4);
-        newHiddenNeurons.add(SimpleXor2InputTest.createNeuron(graph, "hidden"));
-        newHiddenNeurons.add(SimpleXor2InputTest.createNeuron(graph, "hidden"));
-        newHiddenNeurons.add(SimpleXor2InputTest.createNeuron(graph, "hidden"));
-        newHiddenNeurons.add(SimpleXor2InputTest.createNeuron(graph, "hidden"));
-        final BackpropNeuron newOutputNeuron = SimpleXor2InputTest.createNeuron(graph, "output");
-        newOutputNeuron.setActivationFunctionClass(HyperbolicTangentActivationFunction.class);
-        newOutputNeuron.setLearningRate(0.09);
-        final BackpropNeuron biasNeuron = SimpleXor2InputTest.createNeuron(graph, "bias");
-        biasNeuron.setSignal(1.0);
-        biasNeuron.setActivationFunctionClass(HyperbolicTangentActivationFunction.class);
-        biasNeuron.setLearningRate(0.09);
-
-        //connect all input neurons to hidden neurons
-        for( BackpropNeuron inputNeuron : newInputNeurons ) {
-            //make sure all input neurons use tanH activation function
-            inputNeuron.setActivationFunctionClass(HyperbolicTangentActivationFunction.class);
-            inputNeuron.setLearningRate(0.09);
-            for( BackpropNeuron hiddenNeuron : newHiddenNeurons ) {
-                graph.addEdge(null, inputNeuron.asVertex(), hiddenNeuron.asVertex(), "signals", BackpropSynapse.class);
-            }
-        }
-        //connect all hidden neurons to the output neuron
-        for( BackpropNeuron hiddenNeuron : newHiddenNeurons ) {
-            graph.addEdge(null, hiddenNeuron.asVertex(), newOutputNeuron.asVertex(), "signals", BackpropSynapse.class);
-
-            //all hidden neurons shoudl use tanh activation function
-            hiddenNeuron.setActivationFunctionClass(HyperbolicTangentActivationFunction.class);
-            hiddenNeuron.setLearningRate(0.09);
-
-            //create bias neuron
-            graph.addEdge(null, biasNeuron.asVertex(), hiddenNeuron.asVertex(), "signals", BackpropSynapse.class);
-        }
-        //create bias neuron for output neuron
-        graph.addEdge(null, biasNeuron.asVertex(), newOutputNeuron.asVertex(), "signals", BackpropSynapse.class);
-        graph.commit();
-
-        for(int i = 0; i < 10000; i++) {
-            SimpleXor2InputTest.train(graph, -1.0, 1.0, 1.0);
-            SimpleXor2InputTest.train(graph, 1.0, -1.0, 1.0);
-            SimpleXor2InputTest.train(graph, 1.0, 1.0, -1.0);
-            SimpleXor2InputTest.train(graph, -1.0, -1.0, -1.0);
-            if( i%50 == 0 && SimpleXor2InputTest.calculateError(graph) < 0.1 )
-                break;
-        }
-        Assert.assertTrue(SimpleXor2InputTest.propagate(graph, 1.0, 1.0) < 0.0);
-        Assert.assertTrue(SimpleXor2InputTest.propagate(graph, -1.0, -1.0) < 0.0);
-        Assert.assertTrue(SimpleXor2InputTest.propagate(graph, 1.0, -1.0) > 0.0);
-        Assert.assertTrue(SimpleXor2InputTest.propagate(graph, -1.0, 1.0) > 0.0);
-    }
-
-    private static double calculateError(FramedTransactionalGraph<?> graph) {
-        double actual = SimpleXor2InputTest.propagate(graph, 1.0, 1.0);
-        double error = Math.abs(actual + 1.0) / 2.0;
-
-        actual = SimpleXor2InputTest.propagate(graph, -1.0, -1.0);
-        error += Math.abs(actual + 1.0) / 2.0;
-
-        actual = SimpleXor2InputTest.propagate(graph, 1.0, -1.0);
-        error += Math.abs(actual - 1.0) / 2.0;
-
-        actual = SimpleXor2InputTest.propagate(graph, -1.0, 1.0);
-        error += Math.abs(actual - 1.0) / 2.0;
-
-        return error/4.0;
-    }
-
-    private static void train(final FramedTransactionalGraph<?> graph, final double input1, final double input2, final double expected) {
-        SimpleXor2InputTest.propagate(graph, input1, input2);
-
-        final Iterator<BackpropNeuron> outputNeurons = graph.getVertices("layer", "output", BackpropNeuron.class).iterator();
-        final BackpropNeuron outputNeuron = outputNeurons.next();
-        Assert.assertTrue(!outputNeurons.hasNext());
-        outputNeuron.setDeltaTrain((expected - outputNeuron.getSignal()) * ACTIVATION_FUNCTION.activateDerivative(outputNeuron.getActivity()));
-        graph.commit();
-
-        final Iterator<BackpropNeuron> hiddenNeurons = graph.getVertices("layer", "hidden", BackpropNeuron.class).iterator();
-        hiddenNeurons.next().backpropagate();
-        hiddenNeurons.next().backpropagate();
-        hiddenNeurons.next().backpropagate();
-        hiddenNeurons.next().backpropagate();
-        Assert.assertTrue(!hiddenNeurons.hasNext());
-        graph.commit();
-
-        final Iterator<BackpropNeuron> inputNeurons = graph.getVertices("layer", "input", BackpropNeuron.class).iterator();
-        inputNeurons.next().backpropagate();
-        inputNeurons.next().backpropagate();
-        Assert.assertTrue(!inputNeurons.hasNext());
-        graph.commit();
-
-        final Iterator<BackpropNeuron> biasNeurons = graph.getVertices("layer", "bias", BackpropNeuron.class).iterator();
-        biasNeurons.next().backpropagate();
-        Assert.assertTrue(!biasNeurons.hasNext());
-        graph.commit();
-    }
-
-    private static double propagate(final FramedTransactionalGraph<?> graph, final double input1, final double input2) {
-        final Iterator<BackpropNeuron> inputNeurons = graph.getVertices("layer", "input", BackpropNeuron.class).iterator();
-        inputNeurons.next().setSignal(input1);
-        inputNeurons.next().setSignal(input2);
-        Assert.assertTrue(!inputNeurons.hasNext());
-        graph.commit();
-
-        final Iterator<BackpropNeuron> hiddenNeurons = graph.getVertices("layer", "hidden", BackpropNeuron.class).iterator();
-        hiddenNeurons.next().propagate();
-        hiddenNeurons.next().propagate();
-        hiddenNeurons.next().propagate();
-        hiddenNeurons.next().propagate();
-        Assert.assertTrue(!hiddenNeurons.hasNext());
-        graph.commit();
-
-        final Iterator<BackpropNeuron> outputNeurons = graph.getVertices("layer", "output", BackpropNeuron.class).iterator();
-        final BackpropNeuron outputNeuron = outputNeurons.next();
-        Assert.assertTrue(!outputNeurons.hasNext());
-        outputNeuron.propagate();
-        graph.commit();
-        return outputNeuron.getSignal();
-    }
-
-    private static BackpropNeuron createNeuron(final FramedGraph<?> graph, final String layer) {
-        final BackpropNeuron neuron = graph.addVertex(null, BackpropNeuron.class);
-        neuron.asVertex().setProperty("layer", layer);
-        return neuron;
-    }
-}
diff --git a/src/test/java/com/syncleus/grail/neural/backprop/SimpleXor3InputTest.java b/src/test/java/com/syncleus/grail/neural/backprop/SimpleXor3InputTest.java
deleted file mode 100644
index 05dfb6e..0000000
--- a/src/test/java/com/syncleus/grail/neural/backprop/SimpleXor3InputTest.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/******************************************************************************
- *                                                                             *
- *  Copyright: (c) Syncleus, Inc.                                              *
- *                                                                             *
- *  You may redistribute and modify this source code under the terms and       *
- *  conditions of the Open Source Community License - Type C version 1.0       *
- *  or any later version as published by Syncleus, Inc. at www.syncleus.com.   *
- *  There should be a copy of the license included with this file. If a copy   *
- *  of the license is not included you are granted no right to distribute or   *
- *  otherwise use this file except through a legal and valid license. You      *
- *  should also contact Syncleus, Inc. at the information below if you cannot  *
- *  find a license:                                                            *
- *                                                                             *
- *  Syncleus, Inc.                                                             *
- *  2604 South 12th Street                                                     *
- *  Philadelphia, PA 19148                                                     *
- *                                                                             *
- ******************************************************************************/
-package com.syncleus.grail.neural.backprop;
-
-import com.syncleus.grail.neural.activation.*;
-import com.syncleus.grail.graph.BlankGraphFactory;
-import com.tinkerpop.frames.*;
-import org.junit.*;
-
-import java.util.*;
-
-public class SimpleXor3InputTest {
-    private static final ActivationFunction activationFunction = new HyperbolicTangentActivationFunction();
-
-    @Test
-    public void testXor() {
-        final FramedTransactionalGraph<?> graph = BlankGraphFactory.makeTinkerGraph();
-
-        final List<BackpropNeuron> newInputNeurons = new ArrayList<BackpropNeuron>(2);
-        newInputNeurons.add(SimpleXor3InputTest.createNeuron(graph, "input"));
-        newInputNeurons.add(SimpleXor3InputTest.createNeuron(graph, "input"));
-        newInputNeurons.add(SimpleXor3InputTest.createNeuron(graph, "input"));
-        final List<BackpropNeuron> newHiddenNeurons = new ArrayList<BackpropNeuron>(4);
-        newHiddenNeurons.add(SimpleXor3InputTest.createNeuron(graph, "hidden"));
-        newHiddenNeurons.add(SimpleXor3InputTest.createNeuron(graph, "hidden"));
-        newHiddenNeurons.add(SimpleXor3InputTest.createNeuron(graph, "hidden"));
-        final BackpropNeuron newOutputNeuron = SimpleXor3InputTest.createNeuron(graph, "output");
-        final BackpropNeuron biasNeuron = SimpleXor3InputTest.createNeuron(graph, "bias");
-        biasNeuron.setSignal(1.0);
-
-        //connect all input neurons to hidden neurons
-        for( BackpropNeuron inputNeuron : newInputNeurons ) {
-            for( BackpropNeuron hiddenNeuron : newHiddenNeurons ) {
-                graph.addEdge(null, inputNeuron.asVertex(), hiddenNeuron.asVertex(), "signals", BackpropSynapse.class);
-            }
-        }
-        //connect all hidden neurons to the output neuron
-        for( BackpropNeuron hiddenNeuron : newHiddenNeurons ) {
-            graph.addEdge(null, hiddenNeuron.asVertex(), newOutputNeuron.asVertex(), "signals", BackpropSynapse.class);
-
-            //create bias connection
-            graph.addEdge(null, biasNeuron.asVertex(), hiddenNeuron.asVertex(), "signals", BackpropSynapse.class);
-        }
-        //create bias neuron for output neuron
-        graph.addEdge(null, biasNeuron.asVertex(), newOutputNeuron.asVertex(), "signals", BackpropSynapse.class);
-        graph.commit();
-
-        for(int i = 0; i < 10000 ; i++) {
-            SimpleXor3InputTest.train(graph, 1.0, 1.0, 1.0, 1.0);
-            SimpleXor3InputTest.train(graph, -1.0, 1.0, 1.0, -1.0);
-            SimpleXor3InputTest.train(graph, 1.0, -1.0, 1.0, -1.0);
-            SimpleXor3InputTest.train(graph, 1.0, 1.0, -1.0, -1.0);
-            SimpleXor3InputTest.train(graph, -1.0, -1.0, 1.0, 1.0);
-            SimpleXor3InputTest.train(graph, -1.0, 1.0, -1.0, 1.0);
-            SimpleXor3InputTest.train(graph, 1.0, -1.0, -1.0, 1.0);
-            SimpleXor3InputTest.train(graph, -1.0, -1.0, -1.0, -1.0);
-            if( i%50 == 0 && SimpleXor3InputTest.calculateError(graph) < 0.1 )
-                break;
-        }
-        Assert.assertTrue(SimpleXor3InputTest.propagate(graph, 1.0, 1.0, 1.0) > 0.0);
-        Assert.assertTrue(SimpleXor3InputTest.propagate(graph, -1.0, 1.0, 1.0) < 0.0);
-        Assert.assertTrue(SimpleXor3InputTest.propagate(graph, 1.0, -1.0, 1.0) < 0.0);
-        Assert.assertTrue(SimpleXor3InputTest.propagate(graph, 1.0, 1.0, -1.0) < 0.0);
-        Assert.assertTrue(SimpleXor3InputTest.propagate(graph, -1.0, -1.0, 1.0) > 0.0);
-        Assert.assertTrue(SimpleXor3InputTest.propagate(graph, -1.0, 1.0, -1.0) > 0.0);
-        Assert.assertTrue(SimpleXor3InputTest.propagate(graph, 1.0, -1.0, -1.0) > 0.0);
-        Assert.assertTrue(SimpleXor3InputTest.propagate(graph, -1.0, -1.0, -1.0) < 0.0);
-    }
-
-    private static double calculateError(FramedTransactionalGraph<?> graph) {
-        double actual = SimpleXor3InputTest.propagate(graph, 1.0, 1.0, 1.0);
-        double error = Math.abs(actual - 1.0) / 2.0;
-
-        actual = SimpleXor3InputTest.propagate(graph, -1.0, 1.0, 1.0);
-        error += Math.abs(actual + 1.0) / 2.0;
-
-        actual = SimpleXor3InputTest.propagate(graph, 1.0, -1.0, 1.0);
-        error += Math.abs(actual + 1.0) / 2.0;
-
-        actual = SimpleXor3InputTest.propagate(graph, 1.0, 1.0, -1.0);
-        error += Math.abs(actual + 1.0) / 2.0;
-
-        actual = SimpleXor3InputTest.propagate(graph, 1.0, -1.0, -1.0);
-        error += Math.abs(actual - 1.0) / 2.0;
-
-        actual = SimpleXor3InputTest.propagate(graph, -1.0, 1.0, -1.0);
-        error += Math.abs(actual - 1.0) / 2.0;
-
-        actual = SimpleXor3InputTest.propagate(graph, -1.0, -1.0, 1.0);
-        error += Math.abs(actual - 1.0) / 2.0;
-
-        actual = SimpleXor3InputTest.propagate(graph, -1.0, -1.0, -1.0);
-        error += Math.abs(actual + 1.0) / 2.0;
-
-        return error/8.0;
-    }
-
-    private static void train(final FramedTransactionalGraph<?> graph, final double input1, final double input2, final double input3, final double expected) {
-        SimpleXor3InputTest.propagate(graph, input1, input2, input3);
-
-        final Iterator<BackpropNeuron> outputNeurons = graph.getVertices("layer", "output", BackpropNeuron.class).iterator();
-        final BackpropNeuron outputNeuron = outputNeurons.next();
-        Assert.assertTrue(!outputNeurons.hasNext());
-        outputNeuron.setDeltaTrain((expected - outputNeuron.getSignal()) * activationFunction.activateDerivative(outputNeuron.getActivity()) );
-        graph.commit();
-
-        final Iterator<BackpropNeuron> hiddenNeurons = graph.getVertices("layer", "hidden", BackpropNeuron.class).iterator();
-        hiddenNeurons.next().backpropagate();
-        hiddenNeurons.next().backpropagate();
-        hiddenNeurons.next().backpropagate();
-        Assert.assertTrue(!hiddenNeurons.hasNext());
-        graph.commit();
-
-        final Iterator<BackpropNeuron> inputNeurons = graph.getVertices("layer", "input", BackpropNeuron.class).iterator();
-        inputNeurons.next().backpropagate();
-        inputNeurons.next().backpropagate();
-        inputNeurons.next().backpropagate();
-        Assert.assertTrue(!inputNeurons.hasNext());
-        graph.commit();
-
-        final Iterator<BackpropNeuron> biasNeurons = graph.getVertices("layer", "bias", BackpropNeuron.class).iterator();
-        biasNeurons.next().backpropagate();
-        Assert.assertTrue(!biasNeurons.hasNext());
-        graph.commit();
-    }
-
-    private static double propagate(final FramedTransactionalGraph<?> graph, final double input1, final double input2, final double input3) {
-        final Iterator<BackpropNeuron> inputNeurons = graph.getVertices("layer", "input", BackpropNeuron.class).iterator();
-        inputNeurons.next().setSignal(input1);
-        inputNeurons.next().setSignal(input2);
-        inputNeurons.next().setSignal(input3);
-        Assert.assertTrue(!inputNeurons.hasNext());
-        graph.commit();
-
-        final Iterator<BackpropNeuron> hiddenNeurons = graph.getVertices("layer", "hidden", BackpropNeuron.class).iterator();
-        hiddenNeurons.next().propagate();
-        hiddenNeurons.next().propagate();
-        hiddenNeurons.next().propagate();
-        Assert.assertTrue(!hiddenNeurons.hasNext());
-        graph.commit();
-
-        final Iterator<BackpropNeuron> outputNeurons = graph.getVertices("layer", "output", BackpropNeuron.class).iterator();
-        final BackpropNeuron outputNeuron = outputNeurons.next();
-        Assert.assertTrue(!outputNeurons.hasNext());
-        outputNeuron.propagate();
-        graph.commit();
-        return outputNeuron.getSignal();
-    }
-
-    private static BackpropNeuron createNeuron(final FramedGraph<?> graph, final String layer) {
-        final BackpropNeuron neuron = graph.addVertex(null, BackpropNeuron.class);
-        neuron.asVertex().setProperty("layer", layer);
-        return neuron;
-    }
-}
-- 
GitLab