af_unix: Skip GC if no cycle exists.

We do not need to run GC if there is no possible cyclic reference.
We use unix_graph_maybe_cyclic to decide if we should run GC.

If a fd of an AF_UNIX socket is passed to an already inflight AF_UNIX
socket, they could form a cyclic reference.  Then, we set true to
unix_graph_maybe_cyclic and later run Tarjan's algorithm to group
them into SCC.

Once we run Tarjan's algorithm, we are 100% sure whether cyclic
references exist or not.  If there is no cycle, we set false to
unix_graph_maybe_cyclic and can skip the entire garbage collection
next time.

When finalising SCC, we set true to unix_graph_maybe_cyclic if SCC
consists of multiple vertices.

Even if SCC is a single vertex, a cycle might exist as self-fd passing.
Given the corner case is rare, we detect it by checking all edges of
the vertex and set true to unix_graph_maybe_cyclic.

With this change, __unix_gc() is just a spin_lock() dance in the normal
usage.

Signed-off-by: Kuniyuki Iwashima <kuniyu@amazon.com>
Acked-by: Paolo Abeni <pabeni@redhat.com>
Link: https://lore.kernel.org/r/20240325202425.60930-11-kuniyu@amazon.com
Signed-off-by: Jakub Kicinski <kuba@kernel.org>
This commit is contained in:
Kuniyuki Iwashima 2024-03-25 13:24:20 -07:00 committed by Jakub Kicinski
parent ba31b4a4e1
commit 77e5593aeb

View file

@ -112,6 +112,19 @@ static struct unix_vertex *unix_edge_successor(struct unix_edge *edge)
return edge->successor->vertex;
}
static bool unix_graph_maybe_cyclic;
static void unix_update_graph(struct unix_vertex *vertex)
{
/* If the receiver socket is not inflight, no cyclic
* reference could be formed.
*/
if (!vertex)
return;
unix_graph_maybe_cyclic = true;
}
static LIST_HEAD(unix_unvisited_vertices);
enum unix_vertex_index {
@ -138,12 +151,16 @@ static void unix_add_edge(struct scm_fp_list *fpl, struct unix_edge *edge)
vertex->out_degree++;
list_add_tail(&edge->vertex_entry, &vertex->edges);
unix_update_graph(unix_edge_successor(edge));
}
static void unix_del_edge(struct scm_fp_list *fpl, struct unix_edge *edge)
{
struct unix_vertex *vertex = edge->predecessor->vertex;
unix_update_graph(unix_edge_successor(edge));
list_del(&edge->vertex_entry);
vertex->out_degree--;
@ -227,6 +244,7 @@ void unix_del_edges(struct scm_fp_list *fpl)
void unix_update_edges(struct unix_sock *receiver)
{
spin_lock(&unix_gc_lock);
unix_update_graph(unix_sk(receiver->listener)->vertex);
receiver->listener = NULL;
spin_unlock(&unix_gc_lock);
}
@ -268,6 +286,26 @@ void unix_destroy_fpl(struct scm_fp_list *fpl)
unix_free_vertices(fpl);
}
static bool unix_scc_cyclic(struct list_head *scc)
{
struct unix_vertex *vertex;
struct unix_edge *edge;
/* SCC containing multiple vertices ? */
if (!list_is_singular(scc))
return true;
vertex = list_first_entry(scc, typeof(*vertex), scc_entry);
/* Self-reference or a embryo-listener circle ? */
list_for_each_entry(edge, &vertex->edges, vertex_entry) {
if (unix_edge_successor(edge) == vertex)
return true;
}
return false;
}
static LIST_HEAD(unix_visited_vertices);
static unsigned long unix_vertex_grouped_index = UNIX_VERTEX_INDEX_MARK2;
@ -353,6 +391,9 @@ static void __unix_walk_scc(struct unix_vertex *vertex)
vertex->index = unix_vertex_grouped_index;
}
if (!unix_graph_maybe_cyclic)
unix_graph_maybe_cyclic = unix_scc_cyclic(&scc);
list_del(&scc);
}
@ -363,6 +404,8 @@ static void __unix_walk_scc(struct unix_vertex *vertex)
static void unix_walk_scc(void)
{
unix_graph_maybe_cyclic = false;
/* Visit every vertex exactly once.
* __unix_walk_scc() moves visited vertices to unix_visited_vertices.
*/
@ -524,6 +567,9 @@ static void __unix_gc(struct work_struct *work)
spin_lock(&unix_gc_lock);
if (!unix_graph_maybe_cyclic)
goto skip_gc;
unix_walk_scc();
/* First, select candidates for garbage collection. Only
@ -617,7 +663,7 @@ static void __unix_gc(struct work_struct *work)
/* All candidates should have been detached by now. */
WARN_ON_ONCE(!list_empty(&gc_candidates));
skip_gc:
/* Paired with READ_ONCE() in wait_for_unix_gc(). */
WRITE_ONCE(gc_in_progress, false);