Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ENH: Cache graphs objects when converting to a backend #7345

Merged
merged 13 commits into from Mar 31, 2024
2 changes: 1 addition & 1 deletion networkx/__init__.py
Expand Up @@ -17,7 +17,7 @@
from networkx.exception import *

from networkx import utils
from networkx.utils.backends import _dispatchable, config
from networkx.utils import _clear_cache, _dispatchable, config

from networkx import classes
from networkx.classes import filters
Expand Down
6 changes: 4 additions & 2 deletions networkx/algorithms/approximation/traveling_salesman.py
Expand Up @@ -340,7 +340,7 @@ def traveling_salesman_problem(G, weight="weight", nodes=None, cycle=True, metho

@not_implemented_for("undirected")
@py_random_state(2)
@nx._dispatchable(edge_attrs="weight")
@nx._dispatchable(edge_attrs="weight", mutates_input=True)
def asadpour_atsp(G, weight="weight", seed=None, source=None):
"""
Returns an approximate solution to the traveling salesman problem.
Expand Down Expand Up @@ -492,7 +492,7 @@ def asadpour_atsp(G, weight="weight", seed=None, source=None):
return _shortcutting(circuit)


@nx._dispatchable(edge_attrs="weight", returns_graph=True)
@nx._dispatchable(edge_attrs="weight", mutates_input=True, returns_graph=True)
def held_karp_ascent(G, weight="weight"):
"""
Minimizes the Held-Karp relaxation of the TSP for `G`
Expand Down Expand Up @@ -767,6 +767,7 @@ def find_epsilon(k, d):
for u, v, d in G.edges(data=True):
d[weight] = original_edge_weights[(u, v)] + pi_dict[u]
dir_ascent, k_d = direction_of_ascent()
nx._clear_cache(G)
# k_d is no longer an individual 1-arborescence but rather a set of
# minimal 1-arborescences at the maximum point of the polytope and should
# be reflected as such
Expand All @@ -777,6 +778,7 @@ def find_epsilon(k, d):
for k in k_max:
if len([n for n in k if k.degree(n) == 2]) == G.order():
# Tour found
# TODO: this branch does not restore original_edge_weights of G!
return k.size(weight), k

# Write the original edge weights back to G and every member of k_max at
Expand Down
1 change: 1 addition & 0 deletions networkx/algorithms/centrality/group.py
Expand Up @@ -350,6 +350,7 @@ def prominent_group(
else:
nodes = list(G.nodes)
DF_tree = nx.Graph()
DF_tree.__networkx_cache__ = None # Disable caching
PB, sigma, D = _group_preprocessing(G, nodes, weight)
betweenness = pd.DataFrame.from_dict(PB)
if C is not None:
Expand Down
1 change: 1 addition & 0 deletions networkx/algorithms/community/lukes.py
Expand Up @@ -175,6 +175,7 @@ def _concatenate_or_merge(partition_1, partition_2, x, i, ref_weight):
t_G.nodes[inner][PKEY] = {}
slot = safe_G.nodes[inner][node_weight]
t_G.nodes[inner][PKEY][slot] = [{inner}]
nx._clear_cache(t_G)

# CORE ALGORITHM -----------------------
while True:
Expand Down
12 changes: 2 additions & 10 deletions networkx/algorithms/connectivity/connectivity.py
Expand Up @@ -31,11 +31,7 @@
]


@nx._dispatchable(
graphs={"G": 0, "auxiliary?": 4, "residual?": 5},
preserve_edge_attrs={"residual": {"capacity": float("inf")}},
preserve_graph_attrs={"auxiliary", "residual"},
)
@nx._dispatchable(graphs={"G": 0, "auxiliary?": 4}, preserve_graph_attrs={"auxiliary"})
def local_node_connectivity(
G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None
):
Expand Down Expand Up @@ -490,11 +486,7 @@ def all_pairs_node_connectivity(G, nbunch=None, flow_func=None):
return all_pairs


@nx._dispatchable(
graphs={"G": 0, "auxiliary?": 4, "residual?": 5},
preserve_edge_attrs={"residual": {"capacity": float("inf")}},
preserve_graph_attrs={"residual"},
)
@nx._dispatchable(graphs={"G": 0, "auxiliary?": 4})
def local_edge_connectivity(
G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None
):
Expand Down
14 changes: 5 additions & 9 deletions networkx/algorithms/connectivity/cuts.py
Expand Up @@ -22,12 +22,9 @@


@nx._dispatchable(
graphs={"G": 0, "auxiliary?": 4, "residual?": 5},
preserve_edge_attrs={
"auxiliary": {"capacity": float("inf")},
"residual": {"capacity": float("inf")},
},
preserve_graph_attrs={"auxiliary", "residual"},
graphs={"G": 0, "auxiliary?": 4},
preserve_edge_attrs={"auxiliary": {"capacity": float("inf")}},
preserve_graph_attrs={"auxiliary"},
)
def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, residual=None):
"""Returns the edges of the cut-set of a minimum (s, t)-cut.
Expand Down Expand Up @@ -162,10 +159,9 @@ def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, residual=None):


@nx._dispatchable(
graphs={"G": 0, "auxiliary?": 4, "residual?": 5},
preserve_edge_attrs={"residual": {"capacity": float("inf")}},
graphs={"G": 0, "auxiliary?": 4},
preserve_node_attrs={"auxiliary": {"id": None}},
preserve_graph_attrs={"auxiliary", "residual"},
preserve_graph_attrs={"auxiliary"},
)
def minimum_st_node_cut(G, s, t, flow_func=None, auxiliary=None, residual=None):
r"""Returns a set of nodes of minimum cardinality that disconnect source
Expand Down
13 changes: 4 additions & 9 deletions networkx/algorithms/connectivity/disjoint_paths.py
Expand Up @@ -20,12 +20,8 @@


@nx._dispatchable(
graphs={"G": 0, "auxiliary?": 5, "residual?": 6},
preserve_edge_attrs={
"auxiliary": {"capacity": float("inf")},
"residual": {"capacity": float("inf")},
},
preserve_graph_attrs={"residual"},
graphs={"G": 0, "auxiliary?": 5},
preserve_edge_attrs={"auxiliary": {"capacity": float("inf")}},
)
def edge_disjoint_paths(
G, s, t, flow_func=None, cutoff=None, auxiliary=None, residual=None
Expand Down Expand Up @@ -235,10 +231,9 @@ def edge_disjoint_paths(


@nx._dispatchable(
graphs={"G": 0, "auxiliary?": 5, "residual?": 6},
preserve_edge_attrs={"residual": {"capacity": float("inf")}},
graphs={"G": 0, "auxiliary?": 5},
preserve_node_attrs={"auxiliary": {"id": None}},
preserve_graph_attrs={"auxiliary", "residual"},
preserve_graph_attrs={"auxiliary"},
)
def node_disjoint_paths(
G, s, t, flow_func=None, cutoff=None, auxiliary=None, residual=None
Expand Down
1 change: 1 addition & 0 deletions networkx/algorithms/connectivity/stoerwagner.py
Expand Up @@ -94,6 +94,7 @@ def stoer_wagner(G, weight="weight", heap=BinaryHeap):
G = nx.Graph(
(u, v, {"weight": e.get(weight, 1)}) for u, v, e in G.edges(data=True) if u != v
)
G.__networkx_cache__ = None # Disable caching

for u, v, e in G.edges(data=True):
if e["weight"] < 0:
Expand Down
2 changes: 1 addition & 1 deletion networkx/algorithms/cycles.py
Expand Up @@ -764,7 +764,7 @@ def _chordless_cycle_search(F, B, path, length_bound):


@not_implemented_for("undirected")
@nx._dispatchable
@nx._dispatchable(mutates_input=True)
def recursive_simple_cycles(G):
"""Find simple cycles (elementary circuits) of a directed graph.

Expand Down
2 changes: 2 additions & 0 deletions networkx/algorithms/distance_measures.py
Expand Up @@ -629,6 +629,8 @@ def barycenter(G, weight=None, attr=None, sp=None):
barycenter_vertices = [v]
elif barycentricity == smallest:
barycenter_vertices.append(v)
if attr is not None:
nx._clear_cache(G)
return barycenter_vertices


Expand Down
9 changes: 2 additions & 7 deletions networkx/algorithms/flow/boykovkolmogorov.py
Expand Up @@ -10,13 +10,7 @@
__all__ = ["boykov_kolmogorov"]


@nx._dispatchable(
graphs={"G": 0, "residual?": 4},
edge_attrs={"capacity": float("inf")},
preserve_edge_attrs={"residual": {"capacity": float("inf")}},
preserve_graph_attrs={"residual"},
returns_graph=True,
)
@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True)
def boykov_kolmogorov(
G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None
):
Expand Down Expand Up @@ -162,6 +156,7 @@ def boykov_kolmogorov(
"""
R = boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff)
R.graph["algorithm"] = "boykov_kolmogorov"
nx._clear_cache(R)
return R


Expand Down
9 changes: 2 additions & 7 deletions networkx/algorithms/flow/dinitz_alg.py
Expand Up @@ -10,13 +10,7 @@
__all__ = ["dinitz"]


@nx._dispatchable(
graphs={"G": 0, "residual?": 4},
edge_attrs={"capacity": float("inf")},
preserve_edge_attrs={"residual": {"capacity": float("inf")}},
preserve_graph_attrs={"residual"},
returns_graph=True,
)
@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True)
def dinitz(G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None):
"""Find a maximum single-commodity flow using Dinitz' algorithm.

Expand Down Expand Up @@ -141,6 +135,7 @@ def dinitz(G, s, t, capacity="capacity", residual=None, value_only=False, cutoff
"""
R = dinitz_impl(G, s, t, capacity, residual, cutoff)
R.graph["algorithm"] = "dinitz"
nx._clear_cache(R)
return R


Expand Down
15 changes: 2 additions & 13 deletions networkx/algorithms/flow/edmondskarp.py
Expand Up @@ -8,12 +8,6 @@
__all__ = ["edmonds_karp"]


@nx._dispatchable(
graphs="R",
preserve_edge_attrs={"R": {"capacity": float("inf"), "flow": 0}},
preserve_graph_attrs=True,
mutates_input=True,
)
def edmonds_karp_core(R, s, t, cutoff):
"""Implementation of the Edmonds-Karp algorithm."""
R_nodes = R.nodes
Expand Down Expand Up @@ -123,13 +117,7 @@ def edmonds_karp_impl(G, s, t, capacity, residual, cutoff):
return R


@nx._dispatchable(
graphs={"G": 0, "residual?": 4},
edge_attrs={"capacity": float("inf")},
preserve_edge_attrs={"residual": {"capacity": float("inf")}},
preserve_graph_attrs={"residual"},
returns_graph=True,
)
@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True)
def edmonds_karp(
G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None
):
Expand Down Expand Up @@ -249,4 +237,5 @@ def edmonds_karp(
"""
R = edmonds_karp_impl(G, s, t, capacity, residual, cutoff)
R.graph["algorithm"] = "edmonds_karp"
nx._clear_cache(R)
return R
9 changes: 2 additions & 7 deletions networkx/algorithms/flow/preflowpush.py
Expand Up @@ -288,13 +288,7 @@ def global_relabel(from_sink):
return R


@nx._dispatchable(
graphs={"G": 0, "residual?": 4},
edge_attrs={"capacity": float("inf")},
preserve_edge_attrs={"residual": {"capacity": float("inf")}},
preserve_graph_attrs={"residual"},
returns_graph=True,
)
@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True)
def preflow_push(
G, s, t, capacity="capacity", residual=None, global_relabel_freq=1, value_only=False
):
Expand Down Expand Up @@ -427,4 +421,5 @@ def preflow_push(
"""
R = preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, value_only)
R.graph["algorithm"] = "preflow_push"
nx._clear_cache(R)
return R
9 changes: 2 additions & 7 deletions networkx/algorithms/flow/shortestaugmentingpath.py
Expand Up @@ -163,13 +163,7 @@ def relabel(u):
return R


@nx._dispatchable(
graphs={"G": 0, "residual?": 4},
edge_attrs={"capacity": float("inf")},
preserve_edge_attrs={"residual": {"capacity": float("inf")}},
preserve_graph_attrs={"residual"},
returns_graph=True,
)
@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True)
def shortest_augmenting_path(
G,
s,
Expand Down Expand Up @@ -302,4 +296,5 @@ def shortest_augmenting_path(
"""
R = shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, cutoff)
R.graph["algorithm"] = "shortest_augmenting_path"
nx._clear_cache(R)
return R
1 change: 1 addition & 0 deletions networkx/algorithms/flow/utils.py
Expand Up @@ -102,6 +102,7 @@ def build_residual_network(G, capacity):
raise nx.NetworkXError("MultiGraph and MultiDiGraph not supported (yet).")

R = nx.DiGraph()
R.__networkx_cache__ = None # Disable caching
R.add_nodes_from(G)

inf = float("inf")
Expand Down
2 changes: 2 additions & 0 deletions networkx/algorithms/planarity.py
Expand Up @@ -951,6 +951,7 @@ def remove_node(self, n):
raise nx.NetworkXError(
f"The node {n} is not in the planar embedding."
) from err
nx._clear_cache(self)

def remove_nodes_from(self, nodes):
"""Remove multiple nodes.
Expand Down Expand Up @@ -1233,6 +1234,7 @@ def remove_edge(self, u, v):
raise nx.NetworkXError(
f"The edge {u}-{v} is not in the planar embedding."
) from err
nx._clear_cache(self)

def remove_edges_from(self, ebunch):
"""Remove all edges specified in ebunch.
Expand Down