Skip to content

Commit

Permalink
bench: 1538811
Browse files Browse the repository at this point in the history
bench: 1538811
  • Loading branch information
FauziAkram authored Oct 27, 2023
1 parent 0024133 commit 4626800
Showing 1 changed file with 21 additions and 13 deletions.
34 changes: 21 additions & 13 deletions src/search.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -77,24 +77,24 @@ enum NodeType {

// Futility margin
Value futility_margin(Depth d, bool noTtCutNode, bool improving) {
return Value((126 - 42 * noTtCutNode) * (d - improving));
return Value((125 - 43 * noTtCutNode) * (d - improving));
}

// Reductions lookup table initialized at startup
int Reductions[MAX_MOVES]; // [depth or moveNumber]

Depth reduction(bool i, Depth d, int mn, Value delta, Value rootDelta) {
int reductionScale = Reductions[d] * Reductions[mn];
return (reductionScale + 1560 - int(delta) * 945 / int(rootDelta)) / 1024
+ (!i && reductionScale > 791);
return (reductionScale + 1487 - int(delta) * 976 / int(rootDelta)) / 1024
+ (!i && reductionScale > 808);
}

constexpr int futility_move_count(bool improving, Depth depth) {
return improving ? (3 + depth * depth) : (3 + depth * depth) / 2;
}

// History and stats update bonus, based on depth
int stat_bonus(Depth d) { return std::min(334 * d - 531, 1538); }
int stat_bonus(Depth d) { return std::min(357 * d - 483, 1511); }

// Add a small random component to draw evaluations to avoid 3-fold blindness
Value value_draw(const Thread* thisThread) {
Expand Down Expand Up @@ -761,11 +761,19 @@ Value search(Position& pos, Stack* ss, Value alpha, Value beta, Depth depth, boo
// If eval is really low check with qsearch if it can exceed alpha, if it can't,
// return a fail low.
// Adjust razor margin according to cutoffCnt. (~1 Elo)
if (eval < alpha - 492 - (257 - 200 * ((ss + 1)->cutoffCnt > 3)) * depth * depth)
if (eval < alpha - 474 - (270 - 174 * ((ss + 1)->cutoffCnt > 3)) * depth * depth)
{
value = qsearch<NonPV>(pos, ss, alpha - 1, alpha);
if (value < alpha)
{
if (!priorCapture && prevSq != SQ_NONE)
{
int bonus = (depth > 6) + (PvNode || cutNode) + (value < alpha - 658) + ((ss-1)->moveCount > 11);
update_continuation_histories(ss-1, pos.piece_on(prevSq), prevSq, stat_bonus(depth) * bonus);
thisThread->mainHistory[~us][from_to((ss-1)->currentMove)] << stat_bonus(depth) * bonus * 0.57;
}
return value;
}
}

// Step 8. Futility pruning: child node (~40 Elo)
Expand Down Expand Up @@ -991,22 +999,22 @@ Value search(Position& pos, Stack* ss, Value alpha, Value beta, Depth depth, boo
+ (*contHist[3])[movedPiece][to_sq(move)];

// Continuation history based pruning (~2 Elo)
if (lmrDepth < 6 && history < -3498 * depth)
if (lmrDepth < 6 && history < -3645 * depth)
continue;

history += 2 * thisThread->mainHistory[us][from_to(move)];

lmrDepth += history / 7815;
lmrDepth = std::max(lmrDepth, -2);
lmrDepth += history / 7836;
lmrDepth = std::max(lmrDepth, -1);

// Futility pruning: parent node (~13 Elo)
if (!ss->inCheck && lmrDepth < 13 && ss->staticEval + 80 + 122 * lmrDepth <= alpha)
if (!ss->inCheck && lmrDepth < 13 && ss->staticEval + 77 + 124 * lmrDepth <= alpha)
continue;

lmrDepth = std::max(lmrDepth, 0);

// Prune moves with negative SEE (~4 Elo)
if (!pos.see_ge(move, Value(-27 * lmrDepth * lmrDepth)))
if (!pos.see_ge(move, Value(-26 * lmrDepth * lmrDepth)))
continue;
}
}
Expand Down Expand Up @@ -1316,12 +1324,12 @@ Value search(Position& pos, Stack* ss, Value alpha, Value beta, Depth depth, boo
// Bonus for prior countermove that caused the fail low
else if (!priorCapture && prevSq != SQ_NONE)
{
int bonus = (depth > 6) + (PvNode || cutNode) + (bestValue < alpha - 653)
+ ((ss - 1)->moveCount > 11);
int bonus = (depth > 6) + (PvNode || cutNode) + (bestValue < alpha - 657)
+ ((ss - 1)->moveCount > 10);
update_continuation_histories(ss - 1, pos.piece_on(prevSq), prevSq,
stat_bonus(depth) * bonus);
thisThread->mainHistory[~us][from_to((ss - 1)->currentMove)]
<< stat_bonus(depth) * bonus / 2;
<< stat_bonus(depth) * bonus * 0.61;
}

if (PvNode)
Expand Down

0 comments on commit 4626800

Please sign in to comment.