batman-adv: Place kref_get for gw_node near use
It is hard to understand why the refcnt is increased when it isn't done near the actual place the new reference is used. So using kref_get right before the place which requires the reference and in the same function helps to avoid accidental problems caused by incorrect reference counting. Signed-off-by: Sven Eckelmann <sven@narfation.org> Signed-off-by: Marek Lindner <mareklindner@neomailbox.ch> Signed-off-by: Simon Wunderlich <sw@simonwunderlich.de>
This commit is contained in:
parent
6a51e09d8b
commit
f665fa7e85
|
@ -339,14 +339,15 @@ static void batadv_gw_node_add(struct batadv_priv *bat_priv,
|
|||
if (!gw_node)
|
||||
return;
|
||||
|
||||
kref_init(&gw_node->refcount);
|
||||
INIT_HLIST_NODE(&gw_node->list);
|
||||
kref_get(&orig_node->refcount);
|
||||
gw_node->orig_node = orig_node;
|
||||
gw_node->bandwidth_down = ntohl(gateway->bandwidth_down);
|
||||
gw_node->bandwidth_up = ntohl(gateway->bandwidth_up);
|
||||
kref_init(&gw_node->refcount);
|
||||
|
||||
spin_lock_bh(&bat_priv->gw.list_lock);
|
||||
kref_get(&gw_node->refcount);
|
||||
hlist_add_head_rcu(&gw_node->list, &bat_priv->gw.list);
|
||||
spin_unlock_bh(&bat_priv->gw.list_lock);
|
||||
|
||||
|
@ -357,6 +358,9 @@ static void batadv_gw_node_add(struct batadv_priv *bat_priv,
|
|||
ntohl(gateway->bandwidth_down) % 10,
|
||||
ntohl(gateway->bandwidth_up) / 10,
|
||||
ntohl(gateway->bandwidth_up) % 10);
|
||||
|
||||
/* don't return reference to new gw_node */
|
||||
batadv_gw_node_put(gw_node);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
Loading…
Reference in New Issue