Added connect.bat, made minor changes to kgsGtp.ini. ScratchGo can now easily connect to gokgs.com's server.

It will
look for room whf4cs6999
as whf4human (to avoid creating more accounts)
and will connect to or wait for whf4cs6999.
This commit is contained in:
2012-12-12 19:34:32 -05:00
parent 28dc44b61e
commit 14bc769493
50 changed files with 2462 additions and 264 deletions

View File

@@ -1,100 +0,0 @@
package net.woodyfolsom.msproj.ann;
import java.io.File;
import java.io.IOException;
import java.util.List;
import net.woodyfolsom.msproj.ann.NNData;
import net.woodyfolsom.msproj.ann.NNDataPair;
import net.woodyfolsom.msproj.ann.NeuralNetFilter;
import net.woodyfolsom.msproj.ann.TTTFilter;
import net.woodyfolsom.msproj.tictactoe.GameRecord;
import net.woodyfolsom.msproj.tictactoe.NNDataSetFactory;
import net.woodyfolsom.msproj.tictactoe.Referee;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class TTTFilterTest {
private static final String FILENAME = "tttPerceptron.net";
@AfterClass
public static void deleteNewNet() {
File file = new File(FILENAME);
if (file.exists()) {
file.delete();
}
}
@BeforeClass
public static void deleteSavedNet() {
File file = new File(FILENAME);
if (file.exists()) {
file.delete();
}
}
@Test
public void testLearn() throws IOException {
double alpha = 0.5;
double lambda = 0.0;
int maxEpochs = 1000;
NeuralNetFilter nnLearner = new TTTFilter(alpha, lambda, maxEpochs);
// Create trainingSet from a tournament of random games.
// Future iterations will use Epsilon-greedy play from a policy based on
// this network to generate additional datasets.
List<GameRecord> tournament = new Referee().play(1);
List<List<NNDataPair>> trainingSet = NNDataSetFactory
.createDataSet(tournament);
System.out.println("Generated " + trainingSet.size()
+ " datasets from random self-play.");
nnLearner.learnSequences(trainingSet);
System.out.println("Learned network after "
+ nnLearner.getActualTrainingEpochs() + " training epochs.");
double[][] validationSet = new double[7][];
// empty board
validationSet[0] = new double[] { 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0 };
// center
validationSet[1] = new double[] { 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0,
0.0, 0.0 };
// top edge
validationSet[2] = new double[] { 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0 };
// left edge
validationSet[3] = new double[] { 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0,
0.0, 0.0 };
// corner
validationSet[4] = new double[] { 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0 };
// win
validationSet[5] = new double[] { 1.0, 1.0, 1.0, -1.0, -1.0, 0.0, 0.0,
-1.0, 0.0 };
// loss
validationSet[6] = new double[] { -1.0, 1.0, 0.0, 1.0, -1.0, 1.0, 0.0,
0.0, -1.0 };
String[] inputNames = new String[] { "00", "01", "02", "10", "11",
"12", "20", "21", "22" };
String[] outputNames = new String[] { "values" };
System.out.println("Output from eval set (learned network):");
testNetwork(nnLearner, validationSet, inputNames, outputNames);
}
private void testNetwork(NeuralNetFilter nnLearner,
double[][] validationSet, String[] inputNames, String[] outputNames) {
for (int valIndex = 0; valIndex < validationSet.length; valIndex++) {
NNDataPair dp = new NNDataPair(new NNData(inputNames,
validationSet[valIndex]), new NNData(outputNames,
validationSet[valIndex]));
System.out.println(dp + " => " + nnLearner.compute(dp));
}
}
}

View File

@@ -81,10 +81,10 @@ public class XORFilterTest {
@Test
public void testLearnSaveLoad() throws IOException {
NeuralNetFilter nnLearner = new XORFilter(0.5,0.0);
NeuralNetFilter nnLearner = new XORFilter(0.05,0.0);
// create training set (logical XOR function)
int size = 2;
int size = 1;
double[][] trainingInput = new double[4 * size][];
double[][] trainingOutput = new double[4 * size][];
for (int i = 0; i < size; i++) {
@@ -106,7 +106,7 @@ public class XORFilterTest {
trainingSet.add(new NNDataPair(new NNData(inputNames,trainingInput[i]),new NNData(outputNames,trainingOutput[i])));
}
nnLearner.setMaxTrainingEpochs(1);
nnLearner.setMaxTrainingEpochs(10000);
nnLearner.learnPatterns(trainingSet);
System.out.println("Learned network after " + nnLearner.getActualTrainingEpochs() + " training epochs.");