// Different node types, used as a template parameter
enum NodeType { NonPV, PV, Root };
- constexpr uint64_t TtHitAverageWindow = 4096;
- constexpr uint64_t TtHitAverageResolution = 1024;
-
// Futility margin
Value futility_margin(Depth d, bool improving) {
return Value(214 * (d - improving));
// Reductions lookup table, initialized at startup
int Reductions[MAX_MOVES]; // [depth or moveNumber]
- Depth reduction(bool i, Depth d, int mn) {
+ Depth reduction(bool i, Depth d, int mn, bool rangeReduction) {
int r = Reductions[d] * Reductions[mn];
- return (r + 534) / 1024 + (!i && r > 904);
+ return (r + 534) / 1024 + (!i && r > 904) + rangeReduction;
}
constexpr int futility_move_count(bool improving, Depth depth) {
return VALUE_DRAW + Value(2 * (thisThread->nodes & 1) - 1);
}
+ // Check if the current thread is in a search explosion
+ ExplosionState search_explosion(Thread* thisThread) {
+
+ uint64_t nodesNow = thisThread->nodes;
+ bool explosive = thisThread->doubleExtensionAverage[WHITE].is_greater(2, 100)
+ || thisThread->doubleExtensionAverage[BLACK].is_greater(2, 100);
+
+ if (explosive)
+ thisThread->nodesLastExplosive = nodesNow;
+ else
+ thisThread->nodesLastNormal = nodesNow;
+
+ if ( explosive
+ && thisThread->state == EXPLOSION_NONE
+ && nodesNow - thisThread->nodesLastNormal > 6000000)
+ thisThread->state = MUST_CALM_DOWN;
+
+ if ( thisThread->state == MUST_CALM_DOWN
+ && nodesNow - thisThread->nodesLastExplosive > 6000000)
+ thisThread->state = EXPLOSION_NONE;
+
+ return thisThread->state;
+ }
+
// Skill structure is used to implement strength limit
struct Skill {
explicit Skill(int l) : level(l) {}
void Search::init() {
for (int i = 1; i < MAX_MOVES; ++i)
- Reductions[i] = int(21.9 * std::log(i));
+ Reductions[i] = int((21.9 + std::log(Threads.size()) / 2) * std::log(i));
}
multiPV = std::max(multiPV, (size_t)4);
multiPV = std::min(multiPV, rootMoves.size());
- ttHitAverage = TtHitAverageWindow * TtHitAverageResolution / 2;
+ ttHitAverage.set(50, 100); // initialize the running average at 50%
+ doubleExtensionAverage[WHITE].set(0, 100); // initialize the running average at 0%
+ doubleExtensionAverage[BLACK].set(0, 100); // initialize the running average at 0%
+
+ nodesLastExplosive = nodes;
+ nodesLastNormal = nodes;
+ state = EXPLOSION_NONE;
trend = SCORE_ZERO;
int searchAgainCounter = 0;
template <NodeType nodeType>
Value search(Position& pos, Stack* ss, Value alpha, Value beta, Depth depth, bool cutNode) {
+ Thread* thisThread = pos.this_thread();
+
+ // Step 0. Limit search explosion
+ if ( ss->ply > 10
+ && search_explosion(thisThread) == MUST_CALM_DOWN
+ && depth > (ss-1)->depth)
+ depth = (ss-1)->depth;
+
constexpr bool PvNode = nodeType != NonPV;
constexpr bool rootNode = nodeType == Root;
const Depth maxNextDepth = rootNode ? depth : depth + 1;
Value bestValue, value, ttValue, eval, maxValue, probCutBeta;
bool givesCheck, improving, didLMR, priorCapture;
bool captureOrPromotion, doFullDepthSearch, moveCountPruning,
- ttCapture, singularQuietLMR;
+ ttCapture, singularQuietLMR, noLMRExtension;
Piece movedPiece;
int moveCount, captureCount, quietCount;
// Step 1. Initialize node
- Thread* thisThread = pos.this_thread();
ss->inCheck = pos.checkers();
priorCapture = pos.captured_piece();
Color us = pos.side_to_move();
(ss+1)->excludedMove = bestMove = MOVE_NONE;
(ss+2)->killers[0] = (ss+2)->killers[1] = MOVE_NONE;
ss->doubleExtensions = (ss-1)->doubleExtensions;
+ ss->depth = depth;
Square prevSq = to_sq((ss-1)->currentMove);
+ // Update the running average statistics for double extensions
+ thisThread->doubleExtensionAverage[us].update(ss->depth > (ss-1)->depth);
+
// Initialize statScore to zero for the grandchildren of the current position.
// So statScore is shared between all grandchildren and only the first grandchild
// starts with statScore = 0. Later grandchildren start with the last calculated
&& is_ok((ss-1)->currentMove))
thisThread->lowPlyHistory[ss->ply - 1][from_to((ss-1)->currentMove)] << stat_bonus(depth - 5);
- // thisThread->ttHitAverage can be used to approximate the running average of ttHit
- thisThread->ttHitAverage = (TtHitAverageWindow - 1) * thisThread->ttHitAverage / TtHitAverageWindow
- + TtHitAverageResolution * ss->ttHit;
+ // running average of ttHit
+ thisThread->ttHitAverage.update(ss->ttHit);
// At non-PV nodes we check for an early TT cutoff
if ( !PvNode
&& (ss-1)->statScore < 23767
&& eval >= beta
&& eval >= ss->staticEval
- && ss->staticEval >= beta - 20 * depth - 22 * improving + 168 * ss->ttPv + 159
+ && ss->staticEval >= beta - 20 * depth - 22 * improving + 168 * ss->ttPv + 177
&& !excludedMove
&& pos.non_pawn_material(us)
&& (ss->ply >= thisThread->nmpMinPly || us != thisThread->nmpColor))
assert(eval - beta >= 0);
// Null move dynamic reduction based on depth and value
- Depth R = (1090 + 81 * depth) / 256 + std::min(int(eval - beta) / 205, 3);
+ Depth R = std::min(int(eval - beta) / 205, 3) + depth / 3 + 4;
ss->currentMove = MOVE_NULL;
ss->continuationHistory = &thisThread->continuationHistory[0][0][NO_PIECE][0];
ss->ttPv = ttPv;
}
- // Step 10. If the position is not in TT, decrease depth by 2
+ // Step 10. If the position is not in TT, decrease depth by 2 or 1 depending on node type
if ( PvNode
&& depth >= 6
&& !ttMove)
depth -= 2;
+ if ( cutNode
+ && depth >= 9
+ && !ttMove)
+ depth--;
+
moves_loop: // When in check, search starts here
ttCapture = ttMove && pos.capture_or_promotion(ttMove);
+ int rangeReduction = 0;
// Step 11. A small Probcut idea, when we are in check
probCutBeta = beta + 409;
ss->ply);
value = bestValue;
- singularQuietLMR = moveCountPruning = false;
- bool doubleExtension = false;
+ singularQuietLMR = moveCountPruning = noLMRExtension = false;
// Indicate PvNodes that will probably fail low if the node was searched
// at a depth equal or greater than the current depth, and the result of this search was a fail low.
moveCountPruning = moveCount >= futility_move_count(improving, depth);
// Reduced depth of the next LMR search
- int lmrDepth = std::max(newDepth - reduction(improving, depth, moveCount), 0);
+ int lmrDepth = std::max(newDepth - reduction(improving, depth, moveCount, rangeReduction > 2), 0);
if ( captureOrPromotion
|| givesCheck)
else
{
// Continuation history based pruning (~20 Elo)
- if ( lmrDepth < 5
- && (*contHist[0])[movedPiece][to_sq(move)] < 23 - 23 * depth * depth
- && (*contHist[1])[movedPiece][to_sq(move)] < 23 - 23 * depth * depth)
+ if (lmrDepth < 5
+ && (*contHist[0])[movedPiece][to_sq(move)]
+ + (*contHist[1])[movedPiece][to_sq(move)]
+ + (*contHist[3])[movedPiece][to_sq(move)] < -3000 * depth + 3000)
continue;
// Futility pruning: parent node (~5 Elo)
if ( !ss->inCheck
- && lmrDepth < 7
- && ss->staticEval + 174 + 157 * lmrDepth <= alpha)
+ && lmrDepth < 8
+ && ss->staticEval + 172 + 145 * lmrDepth <= alpha)
continue;
// Prune moves with negative SEE (~20 Elo)
&& (tte->bound() & BOUND_LOWER)
&& tte->depth() >= depth - 3)
{
- Value singularBeta = ttValue - 2 * depth;
+ Value singularBeta = ttValue - 3 * depth;
Depth singularDepth = (depth - 1) / 2;
ss->excludedMove = move;
extension = 1;
singularQuietLMR = !ttCapture;
- // Avoid search explosion by limiting the number of double extensions to at most 3
+ // Avoid search explosion by limiting the number of double extensions
if ( !PvNode
- && value < singularBeta - 93
- && ss->doubleExtensions < 3)
+ && value < singularBeta - 75
+ && ss->doubleExtensions <= 6)
{
extension = 2;
- doubleExtension = true;
+ noLMRExtension = true;
}
}
return singularBeta;
// If the eval of ttMove is greater than beta we try also if there is another
- // move that pushes it over beta, if so also produce a cutoff.
+ // move that pushes it over beta, if so the position also has probably multiple
+ // moves giving fail highs. We will then reduce the ttMove (negative extension).
else if (ttValue >= beta)
{
ss->excludedMove = move;
ss->excludedMove = MOVE_NONE;
if (value >= beta)
- return beta;
+ extension = -2;
}
}
+
+ // Capture extensions for PvNodes and cutNodes
+ else if ( (PvNode || cutNode)
+ && captureOrPromotion
+ && moveCount != 1)
+ extension = 1;
+
+ // Check extensions
else if ( givesCheck
&& depth > 6
- && abs(ss->staticEval) > Value(100))
+ && abs(ss->staticEval) > 100)
+ extension = 1;
+
+ // Quiet ttMove extensions
+ else if ( PvNode
+ && move == ttMove
+ && move == ss->killers[0]
+ && (*contHist[0])[movedPiece][to_sq(move)] >= 10000)
extension = 1;
// Add extension to new depth
|| !ss->ttPv)
&& (!PvNode || ss->ply > 1 || thisThread->id() % 4 != 3))
{
- Depth r = reduction(improving, depth, moveCount);
+ Depth r = reduction(improving, depth, moveCount, rangeReduction > 2);
if (PvNode)
r--;
// Decrease reduction if the ttHit running average is large (~0 Elo)
- if (thisThread->ttHitAverage > 537 * TtHitAverageResolution * TtHitAverageWindow / 1024)
+ if (thisThread->ttHitAverage.is_greater(537, 1024))
r--;
// Decrease reduction if position is or has been on the PV
// Decrease/increase reduction for moves with a good/bad history (~30 Elo)
r -= ss->statScore / 14721;
- // In general we want to cap the LMR depth search at newDepth. But if
- // reductions are really negative and movecount is low, we allow this move
- // to be searched deeper than the first move, unless ttMove was extended by 2.
- Depth d = std::clamp(newDepth - r, 1, newDepth + (r < -1 && moveCount <= 5 && !doubleExtension));
+ // In general we want to cap the LMR depth search at newDepth. But if reductions
+ // are really negative and movecount is low, we allow this move to be searched
+ // deeper than the first move (this may lead to hidden double extensions if
+ // newDepth got its own extension before).
+ int deeper = r >= -1 ? 0
+ : noLMRExtension ? 0
+ : moveCount <= 5 ? 1
+ : (depth > 6 && PvNode) ? 1
+ : 0;
+
+ Depth d = std::clamp(newDepth - r, 1, newDepth + deeper);
value = -search<NonPV>(pos, ss+1, -(alpha+1), -alpha, d, true);
+ // Range reductions (~3 Elo)
+ if (ss->staticEval - value < 30 && depth > 7)
+ rangeReduction++;
+
// If the son is reduced and fails high it will be re-searched at full depth
doFullDepthSearch = value > alpha && d < newDepth;
didLMR = true;
// Bonus for prior countermove that caused the fail low
else if ( (depth >= 3 || PvNode)
&& !priorCapture)
- update_continuation_histories(ss-1, pos.piece_on(prevSq), prevSq, stat_bonus(depth));
+ update_continuation_histories(ss-1, pos.piece_on(prevSq), prevSq, stat_bonus(depth) * (1 + (PvNode || cutNode)));
if (PvNode)
bestValue = std::min(bestValue, maxValue);