int MaxSearchTime, AbsoluteMaxSearchTime, ExtraSearchTime, ExactMaxTime;
bool UseTimeManagement, InfiniteSearch, PonderSearch, StopOnPonderhit;
bool AbortSearch, Quit;
- bool FailHigh, FailLow, Problem;
+ bool FailLow, Problem;
// Show current line?
bool ShowCurrentLine;
bool UseLogFile;
std::ofstream LogFile;
- // Natural logarithmic lookup table and its getter function
- float lnArray[512];
- inline float ln(int i) { return lnArray[i]; }
+ // Reduction lookup tables and their getter functions
+ // Initialized at startup
+ int8_t PVReductionMatrix[64][64]; // [depth][moveNumber]
+ int8_t NonPVReductionMatrix[64][64]; // [depth][moveNumber]
+
+ inline Depth pv_reduction(Depth d, int mn) { return (Depth) PVReductionMatrix[Min(d / 2, 63)][Min(mn, 63)]; }
+ inline Depth nonpv_reduction(Depth d, int mn) { return (Depth) NonPVReductionMatrix[Min(d / 2, 63)][Min(mn, 63)]; }
// MP related variables
int ActiveThreads = 1;
bool ok_to_prune(const Position& pos, Move m, Move threat);
bool ok_to_use_TT(const TTEntry* tte, Depth depth, Value beta, int ply);
Value refine_eval(const TTEntry* tte, Value defaultEval, int ply);
- void reduction_parameters(float base, float Inhibitor, Depth depth, float& logLimit, float& gradient);
- Depth reduction(int moveCount, const float LogLimit, const float BaseRed, const float Gradient);
void update_history(const Position& pos, Move move, Depth depth, Move movesSearched[], int moveCount);
void update_killers(Move m, SearchStack& ss);
void update_gains(const Position& pos, Move move, Value before, Value after);
- bool fail_high_ply_1();
int current_search_time();
int nps();
void poll();
// Initialize global search variables
Idle = StopOnPonderhit = AbortSearch = Quit = false;
- FailHigh = FailLow = Problem = false;
+ FailLow = Problem = false;
NodesSincePoll = 0;
SearchStartTime = get_system_time();
ExactMaxTime = maxTime;
for (int i = 0; i < THREAD_MAX; i++)
{
Threads[i].nodes = 0ULL;
- Threads[i].failHighPly1 = false;
}
if (button_was_pressed("New Game"))
pthread_t pthread[1];
#endif
- // Init our logarithmic lookup table
- for (i = 0; i < 512; i++)
- lnArray[i] = float(log(double(i))); // log() returns base-e logarithm
-
- for (i = 0; i < THREAD_MAX; i++)
- Threads[i].activeSplitPoints = 0;
+ // Init our reduction lookup tables
+ for (i = 1; i < 64; i++) // i == depth
+ for (int j = 1; j < 64; j++) // j == moveNumber
+ {
+ double pvRed = 0.5 + log(double(i)) * log(double(j)) / 6.0;
+ double nonPVRed = 0.5 + log(double(i)) * log(double(j)) / 3.0;
+ PVReductionMatrix[i][j] = (int8_t) ( pvRed >= 1.0 ? floor( pvRed * int(OnePly)) : 0);
+ NonPVReductionMatrix[i][j] = (int8_t) (nonPVRed >= 1.0 ? floor(nonPVRed * int(OnePly)) : 0);
+ }
// Init futility margins array
FutilityMargins[0] = FutilityMargins[1] = Value(0);
FutilityMargins[i] = Value(112 * bitScanReverse32(i * i / 2)); // FIXME: test using log instead of BSR
}
+ for (i = 0; i < THREAD_MAX; i++)
+ Threads[i].activeSplitPoints = 0;
+
// Initialize global locks
lock_init(&MPLock, NULL);
lock_init(&IOLock, NULL);
break; // Value cannot be trusted. Break out immediately!
//Save info about search result
- ValueByIterationInfo[Iteration] = value;
+ ValueByIteration[Iteration] = value;
// Drop the easy move if it differs from the new best move
if (ss[0].pv[0] != EasyMove)
while (1) // Fail low loop
{
- // Loop through all the moves in the root move list
- for (int i = 0; i < rml.move_count() && !AbortSearch; i++)
- {
- if (alpha >= beta)
+ // Loop through all the moves in the root move list
+ for (int i = 0; i < rml.move_count() && !AbortSearch; i++)
{
- // We failed high, invalidate and skip next moves, leave node-counters
- // and beta-counters as they are and quickly return, we will try to do
- // a research at the next iteration with a bigger aspiration window.
- rml.set_move_score(i, -VALUE_INFINITE);
- continue;
- }
-
- RootMoveNumber = i + 1;
- FailHigh = false;
+ if (alpha >= beta)
+ {
+ // We failed high, invalidate and skip next moves, leave node-counters
+ // and beta-counters as they are and quickly return, we will try to do
+ // a research at the next iteration with a bigger aspiration window.
+ rml.set_move_score(i, -VALUE_INFINITE);
+ continue;
+ }
- // Save the current node count before the move is searched
- nodes = nodes_searched();
+ RootMoveNumber = i + 1;
- // Reset beta cut-off counters
- BetaCounter.clear();
+ // Save the current node count before the move is searched
+ nodes = nodes_searched();
- // Pick the next root move, and print the move and the move number to
- // the standard output.
- move = ss[0].currentMove = rml.get_move(i);
+ // Reset beta cut-off counters
+ BetaCounter.clear();
- if (current_search_time() >= 1000)
- cout << "info currmove " << move
- << " currmovenumber " << RootMoveNumber << endl;
+ // Pick the next root move, and print the move and the move number to
+ // the standard output.
+ move = ss[0].currentMove = rml.get_move(i);
- // Decide search depth for this move
- moveIsCheck = pos.move_is_check(move);
- captureOrPromotion = pos.move_is_capture_or_promotion(move);
- depth = (Iteration - 2) * OnePly + InitialDepth;
- ext = extension(pos, move, true, captureOrPromotion, moveIsCheck, false, false, &dangerous);
- newDepth = depth + ext;
+ if (current_search_time() >= 1000)
+ cout << "info currmove " << move
+ << " currmovenumber " << RootMoveNumber << endl;
- value = - VALUE_INFINITE;
+ // Decide search depth for this move
+ moveIsCheck = pos.move_is_check(move);
+ captureOrPromotion = pos.move_is_capture_or_promotion(move);
+ depth = (Iteration - 2) * OnePly + InitialDepth;
+ ext = extension(pos, move, true, captureOrPromotion, moveIsCheck, false, false, &dangerous);
+ newDepth = depth + ext;
- // Precalculate reduction parameters
- float LogLimit, Gradient, BaseReduction = 0.5;
- reduction_parameters(BaseReduction, 6.0, depth, LogLimit, Gradient);
+ value = - VALUE_INFINITE;
- while (1) // Fail high loop
- {
+ while (1) // Fail high loop
+ {
- // Make the move, and search it
- pos.do_move(move, st, ci, moveIsCheck);
+ // Make the move, and search it
+ pos.do_move(move, st, ci, moveIsCheck);
- if (i < MultiPV || value > alpha)
- {
- // Aspiration window is disabled in multi-pv case
- if (MultiPV > 1)
- alpha = -VALUE_INFINITE;
+ if (i < MultiPV || value > alpha)
+ {
+ // Aspiration window is disabled in multi-pv case
+ if (MultiPV > 1)
+ alpha = -VALUE_INFINITE;
- value = -search_pv(pos, ss, -beta, -alpha, newDepth, 1, 0);
+ value = -search_pv(pos, ss, -beta, -alpha, newDepth, 1, 0);
- // If the value has dropped a lot compared to the last iteration,
- // set the boolean variable Problem to true. This variable is used
- // for time managment: When Problem is true, we try to complete the
- // current iteration before playing a move.
- Problem = ( Iteration >= 2
- && value <= ValueByIteration[Iteration - 1] - ProblemMargin);
+ // If the value has dropped a lot compared to the last iteration,
+ // set the boolean variable Problem to true. This variable is used
+ // for time managment: When Problem is true, we try to complete the
+ // current iteration before playing a move.
+ Problem = ( Iteration >= 2
+ && value <= ValueByIteration[Iteration - 1] - ProblemMargin);
- if (Problem && StopOnPonderhit)
- StopOnPonderhit = false;
- }
- else
- {
- // Try to reduce non-pv search depth by one ply if move seems not problematic,
- // if the move fails high will be re-searched at full depth.
- bool doFullDepthSearch = true;
-
- if ( depth >= 3*OnePly // FIXME was newDepth
- && !dangerous
- && !captureOrPromotion
- && !move_is_castle(move))
- {
- ss[0].reduction = reduction(RootMoveNumber - MultiPV + 1, LogLimit, BaseReduction, Gradient);
- if (ss[0].reduction)
- {
- value = -search(pos, ss, -alpha, newDepth-ss[0].reduction, 1, true, 0);
- doFullDepthSearch = (value > alpha);
+ if (Problem && StopOnPonderhit)
+ StopOnPonderhit = false;
}
- }
-
- if (doFullDepthSearch)
- {
- ss[0].reduction = Depth(0);
- value = -search(pos, ss, -alpha, newDepth, 1, true, 0);
-
- if (value > alpha)
+ else
{
- // Fail high! Set the boolean variable FailHigh to true, and
- // re-search the move using a PV search. The variable FailHigh
- // is used for time managment: We try to avoid aborting the
- // search prematurely during a fail high research.
- FailHigh = true;
- value = -search_pv(pos, ss, -beta, -alpha, newDepth, 1, 0);
+ // Try to reduce non-pv search depth by one ply if move seems not problematic,
+ // if the move fails high will be re-searched at full depth.
+ bool doFullDepthSearch = true;
+
+ if ( depth >= 3*OnePly // FIXME was newDepth
+ && !dangerous
+ && !captureOrPromotion
+ && !move_is_castle(move))
+ {
+ ss[0].reduction = pv_reduction(depth, RootMoveNumber - MultiPV + 1);
+ if (ss[0].reduction)
+ {
+ value = -search(pos, ss, -alpha, newDepth-ss[0].reduction, 1, true, 0);
+ doFullDepthSearch = (value > alpha);
+ }
+ }
+
+ if (doFullDepthSearch)
+ {
+ ss[0].reduction = Depth(0);
+ value = -search(pos, ss, -alpha, newDepth, 1, true, 0);
+
+ if (value > alpha)
+ value = -search_pv(pos, ss, -beta, -alpha, newDepth, 1, 0);
+ }
}
- }
- }
-
- pos.undo_move(move);
-
- // Can we exit fail high loop ?
- if (AbortSearch || value < beta)
- break;
-
- // We are failing high and going to do a research. It's important to update score
- // before research in case we run out of time while researching.
- rml.set_move_score(i, value);
- update_pv(ss, 0);
- TT.extract_pv(pos, ss[0].pv, PLY_MAX);
- rml.set_move_pv(i, ss[0].pv);
-
- // Print search information to the standard output
- cout << "info depth " << Iteration
- << " score " << value_to_string(value)
- << ((value >= beta) ? " lowerbound" :
- ((value <= alpha)? " upperbound" : ""))
- << " time " << current_search_time()
- << " nodes " << nodes_searched()
- << " nps " << nps()
- << " pv ";
-
- for (int j = 0; ss[0].pv[j] != MOVE_NONE && j < PLY_MAX; j++)
- cout << ss[0].pv[j] << " ";
-
- cout << endl;
-
- if (UseLogFile)
- {
- ValueType type = (value >= beta ? VALUE_TYPE_LOWER
- : (value <= alpha ? VALUE_TYPE_UPPER : VALUE_TYPE_EXACT));
-
- LogFile << pretty_pv(pos, current_search_time(), Iteration,
- nodes_searched(), value, type, ss[0].pv) << endl;
- }
-
- // Prepare for a research after a fail high, each time with a wider window
- researchCount++;
- beta = Min(beta + AspirationDelta * (1 << researchCount), VALUE_INFINITE);
- } // End of fail high loop
+ pos.undo_move(move);
- // Finished searching the move. If AbortSearch is true, the search
- // was aborted because the user interrupted the search or because we
- // ran out of time. In this case, the return value of the search cannot
- // be trusted, and we break out of the loop without updating the best
- // move and/or PV.
- if (AbortSearch)
- break;
-
- // Remember beta-cutoff and searched nodes counts for this move. The
- // info is used to sort the root moves at the next iteration.
- int64_t our, their;
- BetaCounter.read(pos.side_to_move(), our, their);
- rml.set_beta_counters(i, our, their);
- rml.set_move_nodes(i, nodes_searched() - nodes);
-
- assert(value >= -VALUE_INFINITE && value <= VALUE_INFINITE);
-
- if (value <= alpha && i >= MultiPV)
- rml.set_move_score(i, -VALUE_INFINITE);
- else
- {
- // PV move or new best move!
-
- // Update PV
- rml.set_move_score(i, value);
- update_pv(ss, 0);
- TT.extract_pv(pos, ss[0].pv, PLY_MAX);
- rml.set_move_pv(i, ss[0].pv);
+ // Can we exit fail high loop ?
+ if (AbortSearch || value < beta)
+ break;
- if (MultiPV == 1)
- {
- // We record how often the best move has been changed in each
- // iteration. This information is used for time managment: When
- // the best move changes frequently, we allocate some more time.
- if (i > 0)
- BestMoveChangesByIteration[Iteration]++;
+ // We are failing high and going to do a research. It's important to update score
+ // before research in case we run out of time while researching.
+ rml.set_move_score(i, value);
+ update_pv(ss, 0);
+ TT.extract_pv(pos, ss[0].pv, PLY_MAX);
+ rml.set_move_pv(i, ss[0].pv);
// Print search information to the standard output
cout << "info depth " << Iteration
LogFile << pretty_pv(pos, current_search_time(), Iteration,
nodes_searched(), value, type, ss[0].pv) << endl;
}
- if (value > alpha)
- alpha = value;
- // Reset the global variable Problem to false if the value isn't too
- // far below the final value from the last iteration.
- if (value > ValueByIteration[Iteration - 1] - NoProblemMargin)
- Problem = false;
- }
- else // MultiPV > 1
+ // Prepare for a research after a fail high, each time with a wider window
+ researchCount++;
+ beta = Min(beta + AspirationDelta * (1 << researchCount), VALUE_INFINITE);
+
+ } // End of fail high loop
+
+ // Finished searching the move. If AbortSearch is true, the search
+ // was aborted because the user interrupted the search or because we
+ // ran out of time. In this case, the return value of the search cannot
+ // be trusted, and we break out of the loop without updating the best
+ // move and/or PV.
+ if (AbortSearch)
+ break;
+
+ // Remember beta-cutoff and searched nodes counts for this move. The
+ // info is used to sort the root moves at the next iteration.
+ int64_t our, their;
+ BetaCounter.read(pos.side_to_move(), our, their);
+ rml.set_beta_counters(i, our, their);
+ rml.set_move_nodes(i, nodes_searched() - nodes);
+
+ assert(value >= -VALUE_INFINITE && value <= VALUE_INFINITE);
+
+ if (value <= alpha && i >= MultiPV)
+ rml.set_move_score(i, -VALUE_INFINITE);
+ else
{
- rml.sort_multipv(i);
- for (int j = 0; j < Min(MultiPV, rml.move_count()); j++)
+ // PV move or new best move!
+
+ // Update PV
+ rml.set_move_score(i, value);
+ update_pv(ss, 0);
+ TT.extract_pv(pos, ss[0].pv, PLY_MAX);
+ rml.set_move_pv(i, ss[0].pv);
+
+ if (MultiPV == 1)
{
- cout << "info multipv " << j + 1
- << " score " << value_to_string(rml.get_move_score(j))
- << " depth " << ((j <= i)? Iteration : Iteration - 1)
- << " time " << current_search_time()
+ // We record how often the best move has been changed in each
+ // iteration. This information is used for time managment: When
+ // the best move changes frequently, we allocate some more time.
+ if (i > 0)
+ BestMoveChangesByIteration[Iteration]++;
+
+ // Print search information to the standard output
+ cout << "info depth " << Iteration
+ << " score " << value_to_string(value)
+ << ((value >= beta) ? " lowerbound" :
+ ((value <= alpha)? " upperbound" : ""))
+ << " time " << current_search_time()
<< " nodes " << nodes_searched()
- << " nps " << nps()
+ << " nps " << nps()
<< " pv ";
- for (int k = 0; rml.get_move_pv(j, k) != MOVE_NONE && k < PLY_MAX; k++)
- cout << rml.get_move_pv(j, k) << " ";
+ for (int j = 0; ss[0].pv[j] != MOVE_NONE && j < PLY_MAX; j++)
+ cout << ss[0].pv[j] << " ";
cout << endl;
+
+ if (UseLogFile)
+ {
+ ValueType type = (value >= beta ? VALUE_TYPE_LOWER
+ : (value <= alpha ? VALUE_TYPE_UPPER : VALUE_TYPE_EXACT));
+
+ LogFile << pretty_pv(pos, current_search_time(), Iteration,
+ nodes_searched(), value, type, ss[0].pv) << endl;
+ }
+ if (value > alpha)
+ alpha = value;
+
+ // Reset the global variable Problem to false if the value isn't too
+ // far below the final value from the last iteration.
+ if (value > ValueByIteration[Iteration - 1] - NoProblemMargin)
+ Problem = false;
}
- alpha = rml.get_move_score(Min(i, MultiPV-1));
- }
- } // PV move or new best move
+ else // MultiPV > 1
+ {
+ rml.sort_multipv(i);
+ for (int j = 0; j < Min(MultiPV, rml.move_count()); j++)
+ {
+ cout << "info multipv " << j + 1
+ << " score " << value_to_string(rml.get_move_score(j))
+ << " depth " << ((j <= i)? Iteration : Iteration - 1)
+ << " time " << current_search_time()
+ << " nodes " << nodes_searched()
+ << " nps " << nps()
+ << " pv ";
+
+ for (int k = 0; rml.get_move_pv(j, k) != MOVE_NONE && k < PLY_MAX; k++)
+ cout << rml.get_move_pv(j, k) << " ";
+
+ cout << endl;
+ }
+ alpha = rml.get_move_score(Min(i, MultiPV-1));
+ }
+ } // PV move or new best move
- assert(alpha >= oldAlpha);
+ assert(alpha >= oldAlpha);
- FailLow = (alpha == oldAlpha);
- }
+ FailLow = (alpha == oldAlpha);
+ }
- // Can we exit fail low loop ?
- if (AbortSearch || alpha > oldAlpha)
- break;
+ // Can we exit fail low loop ?
+ if (AbortSearch || alpha > oldAlpha)
+ break;
- // Prepare for a research after a fail low, each time with a wider window
- researchCount++;
- alpha = Max(alpha - AspirationDelta * (1 << researchCount), -VALUE_INFINITE);
- oldAlpha = alpha;
+ // Prepare for a research after a fail low, each time with a wider window
+ researchCount++;
+ alpha = Max(alpha - AspirationDelta * (1 << researchCount), -VALUE_INFINITE);
+ oldAlpha = alpha;
} // Fail low loop
CheckInfo ci(pos);
MovePicker mp = MovePicker(pos, ttMove, depth, H, &ss[ply]);
- // Precalculate reduction parameters
- float LogLimit, Gradient, BaseReduction = 0.5;
- reduction_parameters(BaseReduction, 6.0, depth, LogLimit, Gradient);
-
// Loop through all legal moves until no moves remain or a beta cutoff
// occurs.
while ( alpha < beta
&& !move_is_castle(move)
&& !move_is_killer(move, ss[ply]))
{
- ss[ply].reduction = reduction(moveCount, LogLimit, BaseReduction, Gradient);
+ ss[ply].reduction = pv_reduction(depth, moveCount);
if (ss[ply].reduction)
{
value = -search(pos, ss, -alpha, newDepth-ss[ply].reduction, ply+1, true, threadID);
ss[ply].reduction = Depth(0);
value = -search(pos, ss, -alpha, newDepth, ply+1, true, threadID);
if (value > alpha && value < beta)
- {
- // When the search fails high at ply 1 while searching the first
- // move at the root, set the flag failHighPly1. This is used for
- // time managment: We don't want to stop the search early in
- // such cases, because resolving the fail high at ply 1 could
- // result in a big drop in score at the root.
- if (ply == 1 && RootMoveNumber == 1)
- Threads[threadID].failHighPly1 = true;
-
- // A fail high occurred. Re-search at full window (pv search)
value = -search_pv(pos, ss, -beta, -alpha, newDepth, ply+1, threadID);
- Threads[threadID].failHighPly1 = false;
- }
}
}
pos.undo_move(move);
MovePicker mp = MovePicker(pos, ttMove, depth, H, &ss[ply]);
CheckInfo ci(pos);
- // Precalculate reduction parameters
- float LogLimit, Gradient, BaseReduction = 0.5;
- reduction_parameters(BaseReduction, 3.0, depth, LogLimit, Gradient);
-
// Loop through all legal moves until no moves remain or a beta cutoff occurs
while ( bestValue < beta
&& (move = mp.get_next_move()) != MOVE_NONE
Depth predictedDepth = newDepth;
//FIXME: We are ignoring condition: depth >= 3*OnePly, BUG??
- ss[ply].reduction = reduction(moveCount, LogLimit, BaseReduction, Gradient);
+ ss[ply].reduction = nonpv_reduction(depth, moveCount);
if (ss[ply].reduction)
predictedDepth -= ss[ply].reduction;
if (predictedDepth >= OnePly)
preFutilityValueMargin = FutilityMargins[int(predictedDepth)];
- preFutilityValueMargin += H.gain(pos.piece_on(move_from(move)), move_from(move), move_to(move)) + 45;
+ preFutilityValueMargin += H.gain(pos.piece_on(move_from(move)), move_to(move)) + 45;
futilityValueScaled = ss[ply].eval + preFutilityValueMargin - moveCount * IncrementalFutilityMargin;
&& !move_is_castle(move)
&& !move_is_killer(move, ss[ply]))
{
- ss[ply].reduction = reduction(moveCount, LogLimit, BaseReduction, Gradient);
+ ss[ply].reduction = nonpv_reduction(depth, moveCount);
if (ss[ply].reduction)
{
value = -search(pos, ss, -(beta-1), newDepth-ss[ply].reduction, ply+1, true, threadID);
const int FutilityMoveCountMargin = 3 + (1 << (3 * int(sp->depth) / 8));
- // Precalculate reduction parameters
- float LogLimit, Gradient, BaseReduction = 0.5;
- reduction_parameters(BaseReduction, 3.0, sp->depth, LogLimit, Gradient);
-
while ( lock_grab_bool(&(sp->lock))
&& sp->bestValue < sp->beta
&& !thread_should_stop(threadID)
&& !move_is_castle(move)
&& !move_is_killer(move, ss[sp->ply]))
{
- ss[sp->ply].reduction = reduction(moveCount, LogLimit, BaseReduction, Gradient);
+ ss[sp->ply].reduction = nonpv_reduction(sp->depth, moveCount);
if (ss[sp->ply].reduction)
{
value = -search(pos, ss, -(sp->beta-1), newDepth-ss[sp->ply].reduction, sp->ply+1, true, threadID);
int moveCount;
Move move;
- // Precalculate reduction parameters
- float LogLimit, Gradient, BaseReduction = 0.5;
- reduction_parameters(BaseReduction, 6.0, sp->depth, LogLimit, Gradient);
-
while ( lock_grab_bool(&(sp->lock))
&& sp->alpha < sp->beta
&& !thread_should_stop(threadID)
&& !move_is_castle(move)
&& !move_is_killer(move, ss[sp->ply]))
{
- ss[sp->ply].reduction = reduction(moveCount, LogLimit, BaseReduction, Gradient);
+ ss[sp->ply].reduction = pv_reduction(sp->depth, moveCount);
if (ss[sp->ply].reduction)
{
Value localAlpha = sp->alpha;
if (value > localAlpha && value < sp->beta)
{
- // When the search fails high at ply 1 while searching the first
- // move at the root, set the flag failHighPly1. This is used for
- // time managment: We don't want to stop the search early in
- // such cases, because resolving the fail high at ply 1 could
- // result in a big drop in score at the root.
- if (sp->ply == 1 && RootMoveNumber == 1)
- Threads[threadID].failHighPly1 = true;
-
// If another thread has failed high then sp->alpha has been increased
// to be higher or equal then beta, if so, avoid to start a PV search.
localAlpha = sp->alpha;
value = -search_pv(pos, ss, -sp->beta, -localAlpha, newDepth, sp->ply+1, threadID);
else
assert(thread_should_stop(threadID));
-
- Threads[threadID].failHighPly1 = false;
}
}
pos.undo_move(move);
}
- // reduction_parameters() precalculates some parameters used later by reduction. Becasue
- // floating point operations are involved we try to recalculate reduction at each move, but
- // we do the most consuming computation only once per node.
-
- void reduction_parameters(float baseReduction, float reductionInhibitor, Depth depth, float& logLimit, float& gradient)
- {
- // Precalculate some parameters to avoid to calculate the following formula for each move:
- //
- // red = baseReduction + ln(moveCount) * ln(depth / 2) / reductionInhibitor;
- //
- logLimit = depth > OnePly ? (1 - baseReduction) * reductionInhibitor / ln(depth / 2) : 1000;
- gradient = depth > OnePly ? ln(depth / 2) / reductionInhibitor : 0;
- }
-
-
- // reduction() returns reduction in plies based on moveCount and depth.
- // Reduction is always at least one ply.
-
- Depth reduction(int moveCount, float logLimit, float baseReduction, float gradient) {
-
- if (ln(moveCount) < logLimit)
- return Depth(0);
-
- float red = baseReduction + ln(moveCount) * gradient;
- return Depth(int(floor(red * int(OnePly))));
- }
-
-
// update_history() registers a good move that produced a beta-cutoff
// in history and marks as failures all the other moves of that ply.
&& pos.captured_piece() == NO_PIECE_TYPE
&& !move_is_castle(m)
&& !move_is_promotion(m))
- H.set_gain(pos.piece_on(move_to(m)), move_from(m), move_to(m), -(before + after));
- }
-
-
- // fail_high_ply_1() checks if some thread is currently resolving a fail
- // high at ply 1 at the node below the first root node. This information
- // is used for time management.
-
- bool fail_high_ply_1() {
-
- for (int i = 0; i < ActiveThreads; i++)
- if (Threads[i].failHighPly1)
- return true;
-
- return false;
+ H.set_gain(pos.piece_on(move_to(m)), move_to(m), -(before + after));
}
&& !FailLow
&& t > MaxSearchTime + ExtraSearchTime;
- bool noProblemFound = !FailHigh
- && !FailLow
- && !fail_high_ply_1()
- && !Problem
- && t > 6 * (MaxSearchTime + ExtraSearchTime);
-
bool noMoreTime = t > AbsoluteMaxSearchTime
- || stillAtFirstMove //FIXME: We are not checking any problem flags, BUG?
- || noProblemFound;
+ || stillAtFirstMove;
if ( (Iteration >= 3 && UseTimeManagement && noMoreTime)
|| (ExactMaxTime && t >= ExactMaxTime)
&& !FailLow
&& t > MaxSearchTime + ExtraSearchTime;
- bool noProblemFound = !FailHigh
- && !FailLow
- && !fail_high_ply_1()
- && !Problem
- && t > 6 * (MaxSearchTime + ExtraSearchTime);
-
bool noMoreTime = t > AbsoluteMaxSearchTime
- || stillAtFirstMove
- || noProblemFound;
+ || stillAtFirstMove;
if (Iteration >= 3 && UseTimeManagement && (noMoreTime || StopOnPonderhit))
AbortSearch = true;