btrfs: reloc: refactor useless nodes handling into its own function
This patch will also add some comment for the cleanup. Reviewed-by: Josef Bacik <josef@toxicpanda.com> Signed-off-by: Qu Wenruo <wqu@suse.com> Reviewed-by: David Sterba <dsterba@suse.com> Signed-off-by: David Sterba <dsterba@suse.com>
This commit is contained in:
parent
1f87292466
commit
29db137b6b
|
@ -1192,6 +1192,80 @@ static int finish_upper_links(struct backref_cache *cache,
|
|||
return 0;
|
||||
}
|
||||
|
||||
/*
|
||||
* For useless nodes, do two major clean ups:
|
||||
*
|
||||
* - Cleanup the children edges and nodes
|
||||
* If child node is also orphan (no parent) during cleanup, then the child
|
||||
* node will also be cleaned up.
|
||||
*
|
||||
* - Freeing up leaves (level 0), keeps nodes detached
|
||||
* For nodes, the node is still cached as "detached"
|
||||
*
|
||||
* Return false if @node is not in the @useless_nodes list.
|
||||
* Return true if @node is in the @useless_nodes list.
|
||||
*/
|
||||
static bool handle_useless_nodes(struct reloc_control *rc,
|
||||
struct backref_node *node)
|
||||
{
|
||||
struct backref_cache *cache = &rc->backref_cache;
|
||||
struct list_head *useless_node = &cache->useless_node;
|
||||
bool ret = false;
|
||||
|
||||
while (!list_empty(useless_node)) {
|
||||
struct backref_node *cur;
|
||||
|
||||
cur = list_first_entry(useless_node, struct backref_node,
|
||||
list);
|
||||
list_del_init(&cur->list);
|
||||
|
||||
/* Only tree root nodes can be added to @useless_nodes */
|
||||
ASSERT(list_empty(&cur->upper));
|
||||
|
||||
if (cur == node)
|
||||
ret = true;
|
||||
|
||||
/* The node is the lowest node */
|
||||
if (cur->lowest) {
|
||||
list_del_init(&cur->lower);
|
||||
cur->lowest = 0;
|
||||
}
|
||||
|
||||
/* Cleanup the lower edges */
|
||||
while (!list_empty(&cur->lower)) {
|
||||
struct backref_edge *edge;
|
||||
struct backref_node *lower;
|
||||
|
||||
edge = list_entry(cur->lower.next,
|
||||
struct backref_edge, list[UPPER]);
|
||||
list_del(&edge->list[UPPER]);
|
||||
list_del(&edge->list[LOWER]);
|
||||
lower = edge->node[LOWER];
|
||||
free_backref_edge(cache, edge);
|
||||
|
||||
/* Child node is also orphan, queue for cleanup */
|
||||
if (list_empty(&lower->upper))
|
||||
list_add(&lower->list, useless_node);
|
||||
}
|
||||
/* Mark this block processed for relocation */
|
||||
mark_block_processed(rc, cur);
|
||||
|
||||
/*
|
||||
* Backref nodes for tree leaves are deleted from the cache.
|
||||
* Backref nodes for upper level tree blocks are left in the
|
||||
* cache to avoid unnecessary backref lookup.
|
||||
*/
|
||||
if (cur->level > 0) {
|
||||
list_add(&cur->list, &cache->detached);
|
||||
cur->detached = 1;
|
||||
} else {
|
||||
rb_erase(&cur->rb_node, &cache->rb_root);
|
||||
free_backref_node(cache, cur);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
/*
|
||||
* Build backref tree for a given tree block. Root of the backref tree
|
||||
* corresponds the tree block, leaves of the backref tree correspond roots of
|
||||
|
@ -1266,43 +1340,8 @@ static noinline_for_stack struct backref_node *build_backref_tree(
|
|||
goto out;
|
||||
}
|
||||
|
||||
/*
|
||||
* process useless backref nodes. backref nodes for tree leaves
|
||||
* are deleted from the cache. backref nodes for upper level
|
||||
* tree blocks are left in the cache to avoid unnecessary backref
|
||||
* lookup.
|
||||
*/
|
||||
while (!list_empty(&cache->useless_node)) {
|
||||
upper = list_first_entry(&cache->useless_node,
|
||||
struct backref_node, list);
|
||||
list_del_init(&upper->list);
|
||||
ASSERT(list_empty(&upper->upper));
|
||||
if (upper == node)
|
||||
node = NULL;
|
||||
if (upper->lowest) {
|
||||
list_del_init(&upper->lower);
|
||||
upper->lowest = 0;
|
||||
}
|
||||
while (!list_empty(&upper->lower)) {
|
||||
edge = list_first_entry(&upper->lower,
|
||||
struct backref_edge, list[UPPER]);
|
||||
list_del(&edge->list[UPPER]);
|
||||
list_del(&edge->list[LOWER]);
|
||||
lower = edge->node[LOWER];
|
||||
free_backref_edge(cache, edge);
|
||||
|
||||
if (list_empty(&lower->upper))
|
||||
list_add(&lower->list, &cache->useless_node);
|
||||
}
|
||||
mark_block_processed(rc, upper);
|
||||
if (upper->level > 0) {
|
||||
list_add(&upper->list, &cache->detached);
|
||||
upper->detached = 1;
|
||||
} else {
|
||||
rb_erase(&upper->rb_node, &cache->rb_root);
|
||||
free_backref_node(cache, upper);
|
||||
}
|
||||
}
|
||||
if (handle_useless_nodes(rc, node))
|
||||
node = NULL;
|
||||
out:
|
||||
btrfs_backref_iter_free(iter);
|
||||
btrfs_free_path(path);
|
||||
|
|
Loading…
Reference in New Issue