// History and stats update bonus, based on depth
int stat_bonus(Depth d) {
- return std::min((9 * d + 270) * d - 311 , 2145);
+ return std::min((8 * d + 240) * d - 276 , 1907);
}
// Add a small random component to draw evaluations to avoid 3-fold blindness
int failedHighCnt = 0;
while (true)
{
- Depth adjustedDepth = std::max(1, rootDepth - failedHighCnt - searchAgainCounter);
+ // Adjust the effective depth searched, but ensuring at least one effective increment for every
+ // four searchAgain steps (see issue #2717).
+ Depth adjustedDepth = std::max(1, rootDepth - failedHighCnt - 3 * (searchAgainCounter + 1) / 4);
bestValue = Stockfish::search<Root>(rootPos, ss, alpha, beta, adjustedDepth, false);
// Bring the best move to the front. It is critical that sorting
double reduction = (1.56 + mainThread->previousTimeReduction) / (2.20 * timeReduction);
double bestMoveInstability = 1 + 1.7 * totBestMoveChanges / Threads.size();
int complexity = mainThread->complexityAverage.value();
- double complexPosition = std::clamp(1.0 + (complexity - 277) / 1819, 0.5, 1.5);
+ double complexPosition = std::min(1.0 + (complexity - 277) / 1819.1, 1.5);
double totalTime = Time.optimum() * fallingEval * reduction * bestMoveInstability * complexPosition;
Move ttMove, move, excludedMove, bestMove;
Depth extension, newDepth;
Value bestValue, value, ttValue, eval, maxValue, probCutBeta;
- bool givesCheck, improving, didLMR, priorCapture;
- bool capture, doFullDepthSearch, moveCountPruning, ttCapture;
+ bool givesCheck, improving, priorCapture, singularQuietLMR;
+ bool capture, moveCountPruning, ttCapture;
Piece movedPiece;
int moveCount, captureCount, quietCount, improvement, complexity;
// Step 1. Initialize node
Thread* thisThread = pos.this_thread();
- thisThread->depth = depth;
ss->inCheck = pos.checkers();
priorCapture = pos.captured_piece();
Color us = pos.side_to_move();
// At non-PV nodes we check for an early TT cutoff
if ( !PvNode
&& ss->ttHit
- && tte->depth() > depth - ((int)thisThread->id() & 0x1)
+ && tte->depth() > depth - (tte->bound() == BOUND_EXACT)
&& ttValue != VALUE_NONE // Possible in case of TT access race
&& (tte->bound() & (ttValue >= beta ? BOUND_LOWER : BOUND_UPPER)))
{
else // Fall back to (semi)classical complexity for TT hits, the NNUE complexity is lost
complexity = abs(ss->staticEval - pos.psq_eg_stm());
- // Randomize draw evaluation
- if (eval == VALUE_DRAW)
- eval = value_draw(thisThread);
-
// ttValue can be used as a better position evaluation (~4 Elo)
if ( ttValue != VALUE_NONE
&& (tte->bound() & (ttValue > eval ? BOUND_LOWER : BOUND_UPPER)))
ss->killers);
value = bestValue;
- moveCountPruning = false;
+ moveCountPruning = singularQuietLMR = false;
// Indicate PvNodes that will probably fail low if the node was searched
// at a depth equal or greater than the current depth, and the result of this search was a fail low.
&& history < -3875 * (depth - 1))
continue;
- history += thisThread->mainHistory[us][from_to(move)];
+ history += 2 * thisThread->mainHistory[us][from_to(move)];
// Futility pruning: parent node (~9 Elo)
if ( !ss->inCheck
if (value < singularBeta)
{
extension = 1;
+ singularQuietLMR = !ttCapture;
// Avoid search explosion by limiting the number of double extensions
if ( !PvNode
// Step 16. Make the move
pos.do_move(move, st, givesCheck);
- bool doDeeperSearch = false;
-
// Step 17. Late moves reduction / extension (LMR, ~98 Elo)
// We use various heuristics for the sons of a node after the first son has
// been searched. In general we would like to reduce them, but there are many
r--;
// Increase reduction for cut nodes (~3 Elo)
- if (cutNode && move != ss->killers[0])
+ if (cutNode)
r += 2;
// Increase reduction if ttMove is a capture (~3 Elo)
if (PvNode)
r -= 1 + 15 / (3 + depth);
+ // Decrease reduction if ttMove has been singularly extended (~1 Elo)
+ if (singularQuietLMR)
+ r--;
+
// Increase reduction if next ply has a lot of fail high else reset count to 0
if ((ss+1)->cutoffCnt > 3 && !PvNode)
r++;
- ss->statScore = thisThread->mainHistory[us][from_to(move)]
+ ss->statScore = 2 * thisThread->mainHistory[us][from_to(move)]
+ (*contHist[0])[movedPiece][to_sq(move)]
+ (*contHist[1])[movedPiece][to_sq(move)]
+ (*contHist[3])[movedPiece][to_sq(move)]
value = -search<NonPV>(pos, ss+1, -(alpha+1), -alpha, d, true);
- // If the son is reduced and fails high it will be re-searched at full depth
- doFullDepthSearch = value > alpha && d < newDepth;
- doDeeperSearch = value > (alpha + 78 + 11 * (newDepth - d));
- didLMR = true;
- }
- else
- {
- doFullDepthSearch = !PvNode || moveCount > 1;
- didLMR = false;
- }
-
- // Step 18. Full depth search when LMR is skipped or fails high
- if (doFullDepthSearch)
- {
- value = -search<NonPV>(pos, ss+1, -(alpha+1), -alpha, newDepth + doDeeperSearch, !cutNode);
-
- // If the move passed LMR update its stats
- if (didLMR)
+ // Do full depth search when reduced LMR search fails high
+ if (value > alpha && d < newDepth)
{
+ const bool doDeeperSearch = value > (alpha + 78 + 11 * (newDepth - d));
+ value = -search<NonPV>(pos, ss+1, -(alpha+1), -alpha, newDepth + doDeeperSearch, !cutNode);
+
int bonus = value > alpha ? stat_bonus(newDepth)
: -stat_bonus(newDepth);
}
}
+ // Step 18. Full depth search when LMR is skipped
+ else if (!PvNode || moveCount > 1)
+ {
+ value = -search<NonPV>(pos, ss+1, -(alpha+1), -alpha, newDepth, !cutNode);
+ }
+
// For PV nodes only, do a full PV search on the first move or after a fail
// high (in the latter case search only if value < beta), otherwise let the
// parent node fail low with value <= alpha and try another move.