summaryrefslogtreecommitdiff
path: root/lib/maple_tree.c
diff options
context:
space:
mode:
Diffstat (limited to 'lib/maple_tree.c')
-rw-r--r--lib/maple_tree.c34
1 files changed, 20 insertions, 14 deletions
diff --git a/lib/maple_tree.c b/lib/maple_tree.c
index 05d5db255c39..ee1ff0c59fd7 100644
--- a/lib/maple_tree.c
+++ b/lib/maple_tree.c
@@ -4106,6 +4106,7 @@ static inline unsigned char mas_wr_new_end(struct ma_wr_state *wr_mas)
/*
* mas_wr_append: Attempt to append
* @wr_mas: the maple write state
+ * @new_end: The end of the node after the modification
*
* This is currently unsafe in rcu mode since the end of the node may be cached
* by readers while the node contents may be updated which could result in
@@ -4114,42 +4115,46 @@ static inline unsigned char mas_wr_new_end(struct ma_wr_state *wr_mas)
* Return: True if appended, false otherwise
*/
static inline bool mas_wr_append(struct ma_wr_state *wr_mas,
- unsigned char new_end)
+ unsigned char new_end)
{
- unsigned char end = wr_mas->node_end;
- struct ma_state *mas = wr_mas->mas;
- unsigned char node_pivots = mt_pivots[wr_mas->type];
+ struct ma_state *mas;
+ void __rcu **slots;
+ unsigned char end;
+ mas = wr_mas->mas;
if (mt_in_rcu(mas->tree))
return false;
if (mas->offset != wr_mas->node_end)
return false;
- if (new_end < node_pivots) {
+ end = wr_mas->node_end;
+ if (mas->offset != end)
+ return false;
+
+ if (new_end < mt_pivots[wr_mas->type]) {
wr_mas->pivots[new_end] = wr_mas->pivots[end];
- ma_set_meta(wr_mas->node, maple_leaf_64, 0, new_end);
+ ma_set_meta(wr_mas->node, wr_mas->type, 0, new_end);
}
- if (new_end == wr_mas->node_end + 1) {
+ slots = wr_mas->slots;
+ if (new_end == end + 1) {
if (mas->last == wr_mas->r_max) {
/* Append to end of range */
- rcu_assign_pointer(wr_mas->slots[new_end],
- wr_mas->entry);
+ rcu_assign_pointer(slots[new_end], wr_mas->entry);
wr_mas->pivots[end] = mas->index - 1;
mas->offset = new_end;
} else {
/* Append to start of range */
- rcu_assign_pointer(wr_mas->slots[new_end],
- wr_mas->content);
+ rcu_assign_pointer(slots[new_end], wr_mas->content);
wr_mas->pivots[end] = mas->last;
- rcu_assign_pointer(wr_mas->slots[end], wr_mas->entry);
+ rcu_assign_pointer(slots[end], wr_mas->entry);
}
} else {
/* Append to the range without touching any boundaries. */
- rcu_assign_pointer(wr_mas->slots[new_end], wr_mas->content);
+ rcu_assign_pointer(slots[new_end], wr_mas->content);
wr_mas->pivots[end + 1] = mas->last;
- rcu_assign_pointer(wr_mas->slots[end + 1], wr_mas->entry);
+ rcu_assign_pointer(slots[end + 1], wr_mas->entry);
wr_mas->pivots[end] = mas->index - 1;
mas->offset = end + 1;
}
@@ -4157,6 +4162,7 @@ static inline bool mas_wr_append(struct ma_wr_state *wr_mas,
if (!wr_mas->content || !wr_mas->entry)
mas_update_gap(mas);
+ trace_ma_write(__func__, mas, new_end, wr_mas->entry);
return true;
}