如何在Encog 3.4(Github中当前正在开发的版本)中暂停遗传算法?

我正在使用Encog的Java版本。

我正在尝试修改Encog随附的Lunar示例。我想暂停/序列化遗传算法,然后在以后继续/反序列化。

当我调用train.pause();时,它仅返回null-从代码中可以很明显地看出来,因为该方法始终返回null

我认为这将是非常简单的,因为在某些情况下,我想训练神经网络,将其用于一些预测,然后在获得更多数据之前继续使用遗传算法进行训练,然后再恢复更多的预测-无需从头开始重新训练。

请注意,我不是在尝试序列化或持久化神经网络,而是整个遗传算法。

最佳答案

并不是Encog中的所有培训师都支持简单的暂停/恢复。如果不支持,则返回null,就像这样。遗传算法训练器比支持暂停/恢复的简单传播训练器复杂得多。要保存遗传算法的状态,必须保存整个种群以及评分功能(可能可序列化也可能不可序列化)。我修改了Lunar Lander示例,向您展示了如何保存/重新加载神经网络种群来执行此操作。

您会看到它训练了50次迭代,然后往返(加载/保存)遗传算法,然后再训练了50次。

package org.encog.examples.neural.lunar;

import java.io.File;
import java.io.IOException;

import org.encog.Encog;
import org.encog.engine.network.activation.ActivationTANH;
import org.encog.ml.MLMethod;
import org.encog.ml.MLResettable;
import org.encog.ml.MethodFactory;
import org.encog.ml.ea.population.Population;
import org.encog.ml.genetic.MLMethodGeneticAlgorithm;
import org.encog.ml.genetic.MLMethodGenomeFactory;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.pattern.FeedForwardPattern;
import org.encog.util.obj.SerializeObject;

public class LunarLander {

    public static BasicNetwork createNetwork()
    {
        FeedForwardPattern pattern = new FeedForwardPattern();
        pattern.setInputNeurons(3);
        pattern.addHiddenLayer(50);
        pattern.setOutputNeurons(1);
        pattern.setActivationFunction(new ActivationTANH());
        BasicNetwork network = (BasicNetwork)pattern.generate();
        network.reset();
        return network;
    }

    public static void saveMLMethodGeneticAlgorithm(String file, MLMethodGeneticAlgorithm ga ) throws IOException
    {
        ga.getGenetic().getPopulation().setGenomeFactory(null);
        SerializeObject.save(new File(file),ga.getGenetic().getPopulation());
    }

    public static MLMethodGeneticAlgorithm loadMLMethodGeneticAlgorithm(String filename) throws ClassNotFoundException, IOException {
        Population pop = (Population) SerializeObject.load(new File(filename));
        pop.setGenomeFactory(new MLMethodGenomeFactory(new MethodFactory(){
            @Override
            public MLMethod factor() {
                final BasicNetwork result = createNetwork();
                ((MLResettable)result).reset();
                return result;
            }},pop));

        MLMethodGeneticAlgorithm result = new MLMethodGeneticAlgorithm(new MethodFactory(){
            @Override
            public MLMethod factor() {
                return createNetwork();
            }},new PilotScore(),1);

        result.getGenetic().setPopulation(pop);

        return result;
    }


    public static void main(String args[])
    {
        BasicNetwork network = createNetwork();

        MLMethodGeneticAlgorithm train;


        train = new MLMethodGeneticAlgorithm(new MethodFactory(){
            @Override
            public MLMethod factor() {
                final BasicNetwork result = createNetwork();
                ((MLResettable)result).reset();
                return result;
            }},new PilotScore(),500);

        try {
            int epoch = 1;

            for(int i=0;i<50;i++) {
                train.iteration();
                System.out
                        .println("Epoch #" + epoch + " Score:" + train.getError());
                epoch++;
            }
            train.finishTraining();

            // Round trip the GA and then train again
            LunarLander.saveMLMethodGeneticAlgorithm("/Users/jeff/projects/trainer.bin",train);
            train = LunarLander.loadMLMethodGeneticAlgorithm("/Users/jeff/projects/trainer.bin");

            // Train again
            for(int i=0;i<50;i++) {
                train.iteration();
                System.out
                        .println("Epoch #" + epoch + " Score:" + train.getError());
                epoch++;
            }
            train.finishTraining();

        } catch(IOException ex) {
            ex.printStackTrace();
        } catch (ClassNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        int epoch = 1;

        for(int i=0;i<50;i++) {
            train.iteration();
            System.out
                    .println("Epoch #" + epoch + " Score:" + train.getError());
            epoch++;
        }
        train.finishTraining();

        System.out.println("\nHow the winning network landed:");
        network = (BasicNetwork)train.getMethod();
        NeuralPilot pilot = new NeuralPilot(network,true);
        System.out.println(pilot.scorePilot());
        Encog.getInstance().shutdown();
    }
}

10-08 04:16