From: Michael Ortmann <41313082+michaelortmann@users.noreply.github.com> Date: Tue, 30 Nov 2021 20:01:34 +0000 (+0100) Subject: Fix typos in comments, adjust readme X-Git-Url: https://git.sesse.net/?p=stockfish;a=commitdiff_plain;h=4b86ef8c4f8755850b38f2eca026cb9da20c4d01;ds=sidebyside Fix typos in comments, adjust readme closes https://github.com/official-stockfish/Stockfish/pull/3822 also adjusts readme as requested in https://github.com/official-stockfish/Stockfish/pull/3816 No functional change --- diff --git a/README.md b/README.md index 79db8170..330d19ed 100644 --- a/README.md +++ b/README.md @@ -175,8 +175,12 @@ on the evaluations of millions of positions at moderate search depth. The NNUE evaluation was first introduced in shogi, and ported to Stockfish afterward. It can be evaluated efficiently on CPUs, and exploits the fact that only parts of the neural network need to be updated after a typical chess move. -[The nodchip repository](https://github.com/nodchip/Stockfish) provides additional -tools to train and develop the NNUE networks. On CPUs supporting modern vector instructions +[The nodchip repository](https://github.com/nodchip/Stockfish) provided the first version of +the needed tools to train and develop the NNUE networks. Today, more advanced training tools are available +in [the nnue-pytorch repository](https://github.com/glinscott/nnue-pytorch/), while data generation tools +are available in [a dedicated branch](https://github.com/official-stockfish/Stockfish/tree/tools). + +On CPUs supporting modern vector instructions (avx2 and similar), the NNUE evaluation results in much stronger playing strength, even if the nodes per second computed by the engine is somewhat lower (roughly 80% of nps is typical). diff --git a/src/nnue/nnue_common.h b/src/nnue/nnue_common.h index 75ac7862..74eaae17 100644 --- a/src/nnue/nnue_common.h +++ b/src/nnue/nnue_common.h @@ -109,7 +109,7 @@ namespace Stockfish::Eval::NNUE { // write_little_endian() is our utility to write an integer (signed or unsigned, any size) // to a stream in little-endian order. We swap the byte order before the write if - // necessary to always write in little endian order, independantly of the byte + // necessary to always write in little endian order, independently of the byte // ordering of the compiling machine. template inline void write_little_endian(std::ostream& stream, IntType value) { diff --git a/src/syzygy/tbprobe.cpp b/src/syzygy/tbprobe.cpp index 96b2970f..41e867c0 100644 --- a/src/syzygy/tbprobe.cpp +++ b/src/syzygy/tbprobe.cpp @@ -769,7 +769,7 @@ Ret do_probe_table(const Position& pos, T* entry, WDLScore wdl, ProbeState* resu goto encode_remaining; // With pawns we have finished special treatments } - // In positions withouth pawns, we further flip the squares to ensure leading + // In positions without pawns, we further flip the squares to ensure leading // piece is below RANK_5. if (rank_of(squares[0]) > RANK_4) for (int i = 0; i < size; ++i) @@ -812,7 +812,7 @@ Ret do_probe_table(const Position& pos, T* entry, WDLScore wdl, ProbeState* resu // Rs "together" in 62 * 61 / 2 ways (we divide by 2 because rooks can be // swapped and still get the same position.) // - // In case we have at least 3 unique pieces (inlcuded kings) we encode them + // In case we have at least 3 unique pieces (included kings) we encode them // together. if (entry->hasUniquePieces) { @@ -827,7 +827,7 @@ Ret do_probe_table(const Position& pos, T* entry, WDLScore wdl, ProbeState* resu + (squares[1] - adjust1)) * 62 + squares[2] - adjust2; - // First piece is on a1-h8 diagonal, second below: map this occurence to + // First piece is on a1-h8 diagonal, second below: map this occurrence to // 6 to differentiate from the above case, rank_of() maps a1-d4 diagonal // to 0...3 and finally MapB1H1H7[] maps the b1-h1-h7 triangle to 0..27. else if (off_A1H8(squares[1])) @@ -857,7 +857,7 @@ encode_remaining: idx *= d->groupIdx[0]; Square* groupSq = squares + d->groupLen[0]; - // Encode remainig pawns then pieces according to square, in ascending order + // Encode remaining pawns then pieces according to square, in ascending order bool remainingPawns = entry->hasPawns && entry->pawnCount[1]; while (d->groupLen[++next]) @@ -885,7 +885,7 @@ encode_remaining: // Group together pieces that will be encoded together. The general rule is that // a group contains pieces of same type and color. The exception is the leading -// group that, in case of positions withouth pawns, can be formed by 3 different +// group that, in case of positions without pawns, can be formed by 3 different // pieces (default) or by the king pair when there is not a unique piece apart // from the kings. When there are pawns, pawns are always first in pieces[]. // @@ -917,7 +917,7 @@ void set_groups(T& e, PairsData* d, int order[], File f) { // // This ensures unique encoding for the whole position. The order of the // groups is a per-table parameter and could not follow the canonical leading - // pawns/pieces -> remainig pawns -> remaining pieces. In particular the + // pawns/pieces -> remaining pawns -> remaining pieces. In particular the // first group is at order[0] position and the remaining pawns, when present, // are at order[1] position. bool pp = e.hasPawns && e.pawnCount[1]; // Pawns on both sides @@ -937,7 +937,7 @@ void set_groups(T& e, PairsData* d, int order[], File f) { d->groupIdx[1] = idx; idx *= Binomial[d->groupLen[1]][48 - d->groupLen[0]]; } - else // Remainig pieces + else // Remaining pieces { d->groupIdx[next] = idx; idx *= Binomial[d->groupLen[next]][freeSquares]; @@ -947,7 +947,7 @@ void set_groups(T& e, PairsData* d, int order[], File f) { d->groupIdx[n] = idx; } -// In Recursive Pairing each symbol represents a pair of childern symbols. So +// In Recursive Pairing each symbol represents a pair of children symbols. So // read d->btree[] symbols data and expand each one in his left and right child // symbol until reaching the leafs that represent the symbol value. uint8_t set_symlen(PairsData* d, Sym s, std::vector& visited) { @@ -1317,7 +1317,7 @@ void Tablebases::init(const std::string& paths) { for (auto p : bothOnDiagonal) MapKK[p.first][p.second] = code++; - // Binomial[] stores the Binomial Coefficents using Pascal rule. There + // Binomial[] stores the Binomial Coefficients using Pascal rule. There // are Binomial[k][n] ways to choose k elements from a set of n elements. Binomial[0][0] = 1; @@ -1337,7 +1337,7 @@ void Tablebases::init(const std::string& paths) { for (int leadPawnsCnt = 1; leadPawnsCnt <= 5; ++leadPawnsCnt) for (File f = FILE_A; f <= FILE_D; ++f) { - // Restart the index at every file because TB table is splitted + // Restart the index at every file because TB table is split // by file, so we can reuse the same index for different files. int idx = 0; diff --git a/src/syzygy/tbprobe.h b/src/syzygy/tbprobe.h index 56734af9..cf61b767 100644 --- a/src/syzygy/tbprobe.h +++ b/src/syzygy/tbprobe.h @@ -38,7 +38,7 @@ enum WDLScore { // Possible states after a probing operation enum ProbeState { FAIL = 0, // Probe failed (missing file table) - OK = 1, // Probe succesful + OK = 1, // Probe successful CHANGE_STM = -1, // DTZ should check the other side ZEROING_BEST_MOVE = 2 // Best move zeroes DTZ (capture or pawn move) }; diff --git a/src/tune.h b/src/tune.h index b5c715b3..53d52a65 100644 --- a/src/tune.h +++ b/src/tune.h @@ -84,7 +84,7 @@ class Tune { static Tune& instance() { static Tune t; return t; } // Singleton - // Use polymorphism to accomodate Entry of different types in the same vector + // Use polymorphism to accommodate Entry of different types in the same vector struct EntryBase { virtual ~EntryBase() = default; virtual void init_option() = 0; diff --git a/tests/reprosearch.sh b/tests/reprosearch.sh index c1167f7f..e16ba4ae 100755 --- a/tests/reprosearch.sh +++ b/tests/reprosearch.sh @@ -43,7 +43,7 @@ cat << EOF > repeat.exp expect eof EOF -# to increase the likelyhood of finding a non-reproducible case, +# to increase the likelihood of finding a non-reproducible case, # the allowed number of nodes are varied systematically for i in `seq 1 20` do