神经网络:反向传播

时间:2011-12-19 05:39:54

标签: c++ neural-network backpropagation

我一直在摸不着头脑......基本上我正在尝试进行神经网络反向传播培训

我相信我的错误反向传播是正确的;但它似乎没有正确训练我的神经网络......

template<class T, class U>
class Backpropagation {
public:
  static void train(std::shared_ptr<NeuralNetworkFeedForwardLayered<T, U> > neural_network, std::valarray<double> input, std::valarray<double> label, double eta = 0.10) {
    std::valarray<double> output;

    // go forward and calculate our values
    neural_network->f(input, output);

    // error at output nodes
    std::valarray<double> error = label - output;

    std::vector<std::shared_ptr<Neuron> > neurons = neural_network->get_outputs();

        // set the error to our neurons
    for (std::vector<std::shared_ptr<Neuron> >::size_type i = 0; i < neurons.size(); i++) {
      std::shared_ptr<NeuronBackpropagation> neuron = std::dynamic_pointer_cast<NeuronBackpropagation>(neurons[i]);

      neuron->set_error(error[i]);
    }

    // backpropagate our errors
    for (typename std::vector<std::shared_ptr<NeuronLayer<T> > >::size_type i = neural_network->get_layers_count() - 2; i <= neural_network->get_layers_count(); i--) {
      // go through each neuron in the layer
      std::shared_ptr<NeuronLayer<T> > layer = neural_network->get_layer(i);

      for (typename std::vector<std::shared_ptr<Neuron> >::size_type j = 0; j < layer->get_neurons_count(); j++) {
        std::shared_ptr<NeuronBackpropagation> neuron = std::dynamic_pointer_cast<NeuronBackpropagation>(layer->get_neuron(j));

        double sum = 0;

        std::vector<std::shared_ptr<Synapse> > connections = neuron->get_synapse_outputs();
        for (typename std::vector<std::shared_ptr<Synapse> >::size_type k = 0; k < connections.size(); k++) {
          std::shared_ptr<NeuronBackpropagation> post = std::dynamic_pointer_cast<NeuronBackpropagation>(connections[k]->get_neuron_post());
          sum += connections[k]->w() * post->get_error();
            }

        neuron->set_error(neuron->d() * sum);
      }
    }

    // now its time to calculate our derivative (dw) and update our weights
    for (typename std::vector<std::shared_ptr<NeuronLayer<T> > >::size_type i = 0; i < neural_network->get_layers_count(); i++) {
      std::shared_ptr<NeuronLayer<T> > layer = neural_network->get_layer(i);

      for (typename std::vector<std::shared_ptr<Neuron> >::size_type j = 0; j < layer->get_neurons_count(); j++) {
        std::shared_ptr<NeuronBackpropagation> neuron = std::dynamic_pointer_cast<NeuronBackpropagation>(layer->get_neuron(j));
        std::vector<std::shared_ptr<Synapse> > connections = neuron->get_synapse_inputs();

        double error = neuron->get_error();
        for (typename std::vector<std::shared_ptr<Synapse> >::size_type k = 0; k < connections.size(); k++) {
          std::shared_ptr<Neuron> pre = connections[k]->get_neuron_pre();
          // std::cout << "set_weight: " << connections[i]->w() + alpha * error * pre->f() << " " << error << " " << pre->f() << std::endl;

          connections[k]->set_weight(connections[k]->w() + eta * error * pre->f());
        }
      }
    }
  }

};

0 个答案:

没有答案