encog(一)

encog库主要是一个包含神经网络算法的java库。

最基本的前馈神经网络BasicNetwork类是前馈神经网络。

以下是BasicNetwork类的方法

void addLayer(Layer layer)
向网络中添加新层
void addWeight(int fromLayer, int fromNeuron, int toNeuron, double value)   
改变权重。 W[第fromLayer+1层](fromNeuron+1,toNeuronr+2)=W[第fromLayer+1层](fromNeuron+1,toNeuronr+2)+value

network.addWeight(0, 1, 2, -1);  //将第一层的第二个神经元连接到第二层的第三个神经元的权重增加-1

double calculateError(MLDataSet data)
Calculate the error for this neural network.
int calculateNeuronCount()
Calculate the total number of neurons in the network across all layers.
int classify(MLData input)
Classify the input into a group.
void clearContext()
Clear any data from any context layers.
Object clone()
Return a clone of this neural network.
void compute(double[] input, double[] output)
Compute the output for this network.
MLData compute(MLData input)
Compute the output for a given input to the neural network.
void decodeFromArray(double[] encoded)
Decode an array to this object.
String dumpWeights() 
void enableConnection(int fromLayer, int fromNeuron, int toNeuron, boolean enable)
关闭或开启网络中的某个连接
int encodedArrayLength()
void encodeToArray(double[] encoded)
Encode the object to the specified array.
boolean equals(BasicNetwork other, int precision)
Determine if this neural network is equal to another.
boolean equals(Object other)
Compare the two neural networks.
ActivationFunction getActivation(int layer)
Get the activation function for the specified layer.
String getFactoryArchitecture()
String getFactoryType()
FlatNetwork getFlat()
int getInputCount()
double getLayerBiasActivation(int l)
Get the bias activation for the specified layer.
int getLayerCount() 
int getLayerNeuronCount(int l)
Get the neuron count.
double getLayerOutput(int layer, int neuronNumber)
Get the layer output for the specified neuron.
int getLayerTotalNeuronCount(int l)
Get the total (including bias and context) neuron cont for a layer.
int getOutputCount()
NeuralStructure getStructure() 
double getWeight(int fromLayer, int fromNeuron, int toNeuron)
Get the weight between the two layers.
int hashCode()
Generate a hash code.
boolean isConnected(int layer, int fromNeuron, int toNeuron)
Determine if the specified connection is enabled.
boolean isLayerBiased(int l)
Determine if the specified layer is biased.
void reset()
随机设置网络权重
void reset(int seed)
通过随机数种子随机设置网络权重
void setBiasActivation(double activation)
Sets the bias activation for every layer that supports bias.
void setLayerBiasActivation(int l, double value)
Set the bias activation for the specified layer.
void setWeight(int fromLayer, int fromNeuron, int toNeuron, double value)
Set the weight between the two specified neurons.
String toString()
void updateProperties()
Update any objeccts when a property changes.
void validateNeuron(int targetLayer, int neuron)
Validate the the specified targetLayer and neuron are valid.
int winner(MLData input)
Determine the winner for the specified input.
 

 BasicLayer类是最基本的层

构造函数
BasicLayer(ActivationFunction activationFunction, boolean hasBias, int neuronCount)
第一个参数:激活函数类型
第二个参数:是否有偏置节点
第三个参数:神经元个数
BasicLayer(int neuronCount)
Construct this layer with a sigmoid activation function.
返回值成员函数
ActivationFunction getActivationFunction() 
BasicNetwork getNetwork() 
int getNeuronCount() 
void setNetwork(BasicNetwork network)
Set the network for this layer.
import org.encog.Encog;
import org.encog.engine.network.activation.ActivationSigmoid;
import org.encog.ml.data.MLData;
import org.encog.ml.data.MLDataPair;
import org.encog.ml.data.MLDataSet;
import org.encog.ml.data.basic.BasicMLDataSet;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;
import org.encog.neural.networks.training.propagation.resilient.ResilientPropagation;

/**
神经网络实现异或逻辑
 */
public class text {

    /**
     * The input necessary for XOR.
     */
    public static double XOR_INPUT[][] = { { 0.0, 0.0 }, { 1.0, 0.0 },
            { 0.0, 1.0 }, { 1.0, 1.0 } };

    /**
     * The ideal data necessary for XOR.
     */
    public static double XOR_IDEAL[][] = { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } };
    
    /**
     * The main method.
     * @param args No arguments are used.
     */
    public static void main(final String args[]) {
        
        // create a neural network, without using a factory
        BasicNetwork network = new BasicNetwork();
        network.addLayer(new BasicLayer(null,true,2));//输入层
        network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));//隐层1
        network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));//隐层2
        network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));//输出层
        network.getStructure().finalizeStructure();
        network.reset();
        
        // create training data
        MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
        
        // train the neural network
        final ResilientPropagation train = new ResilientPropagation(network, trainingSet);

        int epoch = 1;

        do {
            train.iteration();
            System.out.println("Epoch #" + epoch + " Error:" + train.getError());
            epoch++;
        } while(train.getError() > 0.01);
        train.finishTraining();

        // test the neural network
        System.out.println("Neural Network Results:");
        for(MLDataPair pair: trainingSet ) {
            final MLData output = network.compute(pair.getInput());
            System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
                    + ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
        }
        
        Encog.getInstance().shutdown();
    }
}
原文地址:https://www.cnblogs.com/codeDog123/p/6753593.html