summaryrefslogtreecommitdiff
path: root/reftable/stack.c
diff options
context:
space:
mode:
authorKarthik Nayak <karthik.188@gmail.com>2025-11-08 22:51:54 +0100
committerJunio C Hamano <gitster@pobox.com>2025-11-10 09:28:47 -0800
commite35155588aa9f0355eb7e116ea418c189479f62d (patch)
treea0241c0a8cbaedfa98d729c94c28119e4e8d1c45 /reftable/stack.c
parent135f491f83d4763bdc61642eb0126ce2e6ada286 (diff)
reftable/stack: add function to check if optimization is required
The reftable backend performs auto-compaction as part of its regular flow, which is required to keep the number of tables part of a stack at bay. This allows it to stay optimized. Compaction can also be triggered voluntarily by the user via the 'git pack-refs' or the 'git refs optimize' command. However, currently there is no way for the user to check if optimization is required without actually performing it. Extract out the heuristics logic from 'reftable_stack_auto_compact()' into an internal function 'update_segment_if_compaction_required()'. Then use this to add and expose `reftable_stack_compaction_required()` which will allow users to check if the reftable backend can be optimized. Signed-off-by: Karthik Nayak <karthik.188@gmail.com> Acked-by: Patrick Steinhardt <ps@pks.im> Signed-off-by: Junio C Hamano <gitster@pobox.com>
Diffstat (limited to 'reftable/stack.c')
-rw-r--r--reftable/stack.c42
1 files changed, 37 insertions, 5 deletions
diff --git a/reftable/stack.c b/reftable/stack.c
index 49387f9344..1c9f21dfe1 100644
--- a/reftable/stack.c
+++ b/reftable/stack.c
@@ -1647,19 +1647,51 @@ static int stack_segments_for_compaction(struct reftable_stack *st,
return 0;
}
-int reftable_stack_auto_compact(struct reftable_stack *st)
+static int update_segment_if_compaction_required(struct reftable_stack *st,
+ struct segment *seg,
+ bool use_geometric,
+ bool *required)
{
- struct segment seg;
int err;
- if (st->merged->tables_len < 2)
+ if (st->merged->tables_len < 2) {
+ *required = false;
+ return 0;
+ }
+
+ if (!use_geometric) {
+ *required = true;
return 0;
+ }
+
+ err = stack_segments_for_compaction(st, seg);
+ if (err)
+ return err;
+
+ *required = segment_size(seg) > 0;
+ return 0;
+}
+
+int reftable_stack_compaction_required(struct reftable_stack *st,
+ bool use_heuristics,
+ bool *required)
+{
+ struct segment seg;
+ return update_segment_if_compaction_required(st, &seg, use_heuristics,
+ required);
+}
+
+int reftable_stack_auto_compact(struct reftable_stack *st)
+{
+ struct segment seg;
+ bool required;
+ int err;
- err = stack_segments_for_compaction(st, &seg);
+ err = update_segment_if_compaction_required(st, &seg, true, &required);
if (err)
return err;
- if (segment_size(&seg) > 0)
+ if (required)
return stack_compact_range(st, seg.start, seg.end - 1,
NULL, STACK_COMPACT_RANGE_BEST_EFFORT);