mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
synced 2026-05-16 10:11:38 -04:00
maple_tree: extract use of big node from mas_wr_spanning_store()
Isolate big node to use in its own function. No functional changes intended. Link: https://lkml.kernel.org/r/20260130205935.2559335-4-Liam.Howlett@oracle.com Signed-off-by: Liam R. Howlett <Liam.Howlett@oracle.com> Cc: Alice Ryhl <aliceryhl@google.com> Cc: Andrew Ballance <andrewjballance@gmail.com> Cc: Arnd Bergmann <arnd@arndb.de> Cc: Christian Kujau <lists@nerdbynature.de> Cc: Geert Uytterhoeven <geert@linux-m68k.org> Cc: Kuninori Morimoto <kuninori.morimoto.gx@renesas.com> Cc: Matthew Wilcox (Oracle) <willy@infradead.org> Cc: SeongJae Park <sj@kernel.org> Cc: Sidhartha Kumar <sidhartha.kumar@oracle.com> Cc: Suren Baghdasaryan <surenb@google.com> Cc: Vlastimil Babka <vbabka@suse.cz> Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
This commit is contained in:
committed by
Andrew Morton
parent
3e302560b9
commit
df11f9ee8f
@@ -2748,6 +2748,30 @@ static void mas_spanning_rebalance(struct ma_state *mas,
|
||||
mas_spanning_rebalance_loop(mas, mast, count);
|
||||
}
|
||||
|
||||
|
||||
static noinline void mas_wr_spanning_rebalance(struct ma_state *mas,
|
||||
struct maple_subtree_state *mast, unsigned char height,
|
||||
struct ma_wr_state *l_wr_mas)
|
||||
{
|
||||
struct maple_big_node b_node;
|
||||
|
||||
memset(&b_node, 0, sizeof(struct maple_big_node));
|
||||
/* Copy l_mas and store the value in b_node. */
|
||||
mas_store_b_node(l_wr_mas, &b_node, mast->orig_l->end);
|
||||
/* Copy r_mas into b_node if there is anything to copy. */
|
||||
if (mast->orig_r->max > mast->orig_r->last)
|
||||
mas_mab_cp(mast->orig_r, mast->orig_r->offset,
|
||||
mast->orig_r->end, &b_node, b_node.b_end + 1);
|
||||
else
|
||||
b_node.b_end++;
|
||||
|
||||
/* Stop spanning searches by searching for just index. */
|
||||
mast->orig_l->index = mast->orig_l->last = mas->index;
|
||||
|
||||
mast->bn = &b_node;
|
||||
/* Combine l_mas and r_mas and split them up evenly again. */
|
||||
return mas_spanning_rebalance(mas, mast, height);
|
||||
}
|
||||
/*
|
||||
* mas_rebalance() - Rebalance a given node.
|
||||
* @mas: The maple state
|
||||
@@ -3400,10 +3424,9 @@ static inline void mas_new_root(struct ma_state *mas, void *entry)
|
||||
* span.
|
||||
* @wr_mas: The maple write state
|
||||
*/
|
||||
static noinline void mas_wr_spanning_store(struct ma_wr_state *wr_mas)
|
||||
static void mas_wr_spanning_store(struct ma_wr_state *wr_mas)
|
||||
{
|
||||
struct maple_subtree_state mast;
|
||||
struct maple_big_node b_node;
|
||||
struct ma_state *mas;
|
||||
unsigned char height;
|
||||
|
||||
@@ -3467,24 +3490,9 @@ static noinline void mas_wr_spanning_store(struct ma_wr_state *wr_mas)
|
||||
return mas_new_root(mas, wr_mas->entry);
|
||||
}
|
||||
|
||||
memset(&b_node, 0, sizeof(struct maple_big_node));
|
||||
/* Copy l_mas and store the value in b_node. */
|
||||
mas_store_b_node(&l_wr_mas, &b_node, l_mas.end);
|
||||
/* Copy r_mas into b_node if there is anything to copy. */
|
||||
if (r_mas.max > r_mas.last)
|
||||
mas_mab_cp(&r_mas, r_mas.offset, r_mas.end,
|
||||
&b_node, b_node.b_end + 1);
|
||||
else
|
||||
b_node.b_end++;
|
||||
|
||||
/* Stop spanning searches by searching for just index. */
|
||||
l_mas.index = l_mas.last = mas->index;
|
||||
|
||||
mast.bn = &b_node;
|
||||
mast.orig_l = &l_mas;
|
||||
mast.orig_r = &r_mas;
|
||||
/* Combine l_mas and r_mas and split them up evenly again. */
|
||||
return mas_spanning_rebalance(mas, &mast, height + 1);
|
||||
mas_wr_spanning_rebalance(mas, &mast, height + 1, &l_wr_mas);
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
Reference in New Issue
Block a user