From 581d0a1b150cdbb01996d4915f674c493786d264 Mon Sep 17 00:00:00 2001 From: Deven Date: Tue, 27 Jan 2015 13:01:38 -0500 Subject: [PATCH 1/7] Cacti changes made --- src/setup.py | 4 +- src/zen/control/__init__.py | 3 +- src/zen/control/reachability.py | 96 ++++++++++++++++++++++++++++++++- src/zen/test.py | 3 ++ 4 files changed, 103 insertions(+), 3 deletions(-) diff --git a/src/setup.py b/src/setup.py index 3300343..b9f3313 100644 --- a/src/setup.py +++ b/src/setup.py @@ -45,7 +45,9 @@ Extension('zen.algorithms.community.lpa', ['zen/algorithms/community/lpa.pyx'], include_dirs=[numpy_include_dir]), Extension('zen.algorithms.community.label_rank', ['zen/algorithms/community/label_rank.pyx'], include_dirs=[numpy_include_dir]), Extension('zen.algorithms.community.spectral_modularity', ['zen/algorithms/community/spectral_modularity.pyx'], include_dirs=[numpy_include_dir]), -Extension('zen.algorithms.community.louvain', ['zen/algorithms/community/louvain.pyx'], include_dirs=[numpy_include_dir])] +Extension('zen.algorithms.community.louvain', ['zen/algorithms/community/louvain.pyx'], include_dirs=[numpy_include_dir]), +Extension('zen.util.max_weight_bmatching', ['zen/util/max_weight_bmatching.pyx'], \ + language='c++', include_dirs=[fiboheap_include_dir], libraries=['python2.7','m','util','dl'] )] setup( diff --git a/src/zen/control/__init__.py b/src/zen/control/__init__.py index c9f4b6a..78b45bd 100644 --- a/src/zen/control/__init__.py +++ b/src/zen/control/__init__.py @@ -36,4 +36,5 @@ from profile import profile from reachability import num_min_controls -from pplot import profile_plot, profile_heatmap \ No newline at end of file +from pplot import profile_plot, profile_heatmap +from cacti import build_cacti diff --git a/src/zen/control/reachability.py b/src/zen/control/reachability.py index 4da1792..a7ac3db 100644 --- a/src/zen/control/reachability.py +++ b/src/zen/control/reachability.py @@ -5,6 +5,7 @@ from zen import DiGraph, maximum_matching_ from zen.exceptions import type_check +import zen def num_min_controls(G): """ @@ -18,4 +19,97 @@ def num_min_controls(G): matched_edges = maximum_matching_(G) return max([len(G) - len(matched_edges),1]) - \ No newline at end of file + + +def generic_rank_(G,controls): + """ + Find the maximum matching for the directed graph such that all + matching paths (vis a vis the Hopcroft Karp algorithm) start with one of the seeds. + + Return the generic rank. + """ + + ##### + # apply the transformation to produce a bipartite graph which combines the controls + GT = zen.BipartiteGraph() + tnode2node = {} + node2unode = {} + node2vnode = {} + vnode2unode = {} + + ##### + # first transform the graph itself + for i in G.nodes_iter_(): + unode = GT.add_u_node() + vnode = GT.add_v_node() + tnode2node[unode] = i + tnode2node[vnode] = i + node2unode[i] = unode + node2vnode[i] = vnode + vnode2unode[vnode] = unode + + # add the edges + for i in G.edges_iter_(): + u,v = G.endpoints_(i) + GT.add_edge_(node2unode[u],node2vnode[v],i) + + # now add the controls in + for control in controls: + cnode = GT.add_u_node() + for v in control: + GT.add_edge_(cnode,node2vnode[v]) + + ##### + # run the bipartite matching + max_matching = zen.matching.hopcroft_karp_(GT) + + return len(max_matching) + + +def kalman_generic_rank(G,controls,repeats=100): + """ + Finds the reachability corresponding to a graph (adjacency matrix A) and its + controls (a matrix B) by brute force computing the Kalman rank condition: + rank [B AB A^2B A^3B ... A^(n-1)B]. + In order to compute the rank generically, we generate random entries for A and B, + subject to their zero/non-zero sparcity patterns and compute the true rank. We + repeat this "repeats" times (default is 100) and return the largest value. + """ + from numpy import array, zeros, nonzero, eye, dot + from numpy.linalg import matrix_rank + from numpy.random import rand as nprand + + rank = 0 + N = G.max_node_idx+1 # there could be some missing indexes in the graph (N > n) + n = G.num_nodes + A = G.matrix().T + + # build the B matrix + m = len(controls) + B = zeros((N,m)) + for i in range(m): + for d_idx in controls[i]: + B[d_idx,i] = 1 + + nonzero_idxs = nonzero(A>0) + num_nonzero_idxs = len(nonzero_idxs[0]) + for r in range(repeats): + # create a randomized instance of A + A1 = zeros((N,N)) + A1[nonzero_idxs] = nprand(num_nonzero_idxs) + + # compute the controllability matrix + An = eye(N) + C = zeros((N,m*N)) + C[:,0:m] = B + for i in range(1,N): + An = dot(An,A1) + C[:,i*m:i*m+m] = dot(An,B) + + # generic rank is the max of all instance ranks + new_rank = matrix_rank(C) + if new_rank > rank: + rank = new_rank + + return rank + diff --git a/src/zen/test.py b/src/zen/test.py index c9a0542..02789d8 100644 --- a/src/zen/test.py +++ b/src/zen/test.py @@ -73,5 +73,8 @@ from tests.community import * from tests.communityset import * +# cacti test +from tests.cacti import * + if __name__ == '__main__': unittest.main() From 55e2b43c57857e45bb300a66035952cc5ec49b94 Mon Sep 17 00:00:00 2001 From: Deven Date: Tue, 27 Jan 2015 13:01:59 -0500 Subject: [PATCH 2/7] Cacti changes made --- src/zen/control/cacti.py | 372 ++++++++++++++++++++++++++ src/zen/tests/cacti.py | 201 ++++++++++++++ src/zen/util/max_weight_bmatching.pyx | 231 ++++++++++++++++ src/zen/util/max_weight_matching.hpp | 213 +++++++++++++++ 4 files changed, 1017 insertions(+) create mode 100644 src/zen/control/cacti.py create mode 100644 src/zen/tests/cacti.py create mode 100644 src/zen/util/max_weight_bmatching.pyx create mode 100644 src/zen/util/max_weight_matching.hpp diff --git a/src/zen/control/cacti.py b/src/zen/control/cacti.py new file mode 100644 index 0000000..4e709d9 --- /dev/null +++ b/src/zen/control/cacti.py @@ -0,0 +1,372 @@ +""" +This module provides functionality for building, querying, and manipulating the cacti structure of a directed network. + +For information cacti structures, see the following reference: +Commault, Dion, Van der Woude. Characterization of generic properties of linear structured systems for efficient computations. Kybernetika, 38(5):503-520, 2002. +""" +import sys, zen +from zen.control.reachability import generic_rank_ +from zen.util.max_weight_bmatching import control_rooted_max_weight_matching +from collections import OrderedDict +from numpy import * +import itertools as it +sys.setrecursionlimit(2**27) + +class Stem: + """ + An instance of this class represents a single stem (and any associated buds) in a cacti structure. + """ + + def __init__(self,node_seq): + self._node_dict = OrderedDict.fromkeys(node_seq) + + def __contains__(self,x): + """ + Return True if x is a node in the stem (containment doesn't extend to buds). + """ + return x in self._node_dict + + def __len__(self): + """ + Number of nodes in stem + """ + return len(self._node_dict) + + def __iter__(self): + """ + Iterate over the nodes in the stem itself. + """ + return self._node_dict.iterkeys() + + def _clear(self): + self._node_dict.clear() + + def origin(self): + """ + The origin or starting node of the stem + """ + return next(self._node_dict.iterkeys(), None) + + def terminus(self): + """ + The terminus or end node of the stem + """ + x = None + for x in self._node_dict.iterkeys(): + pass + return x + + def _extend(self, nodes): + for u in nodes: + self._node_dict[u] = None + + def length_with_buds(self): + """ + Returns length of stem including the buds attached to it + """ + l = len(self._node_dict) + for bud in self._node_dict.itervalues(): + if bud is not None: + l += len(bud) + return l + + def _add_bud(self,dnode,cycle,dnode_target): + cycle._reorder_origin(dnode_target) + if dnode == self.terminus(): + self._extend(cycle) + return False + else: + self._node_dict[dnode] = cycle + cycle._set_stem(self, dnode) + return True + + def buds(self): + """ + Returns list of buds (Cycle objects) that are attached to this stem + """ + return list(self.buds_iter()) + + def buds_iter(self): + """ + Returns iterator over buds (Cycle objects) that are attached to this stem + """ + return (x for x in self._node_dict.itervalues() if x is not None) + +class Cycle: + """ + Cycles can either be buds that are connected to a stem by a distinguished node or independent cycles (no associated stem). + """ + def __init__(self,node_seq): + self._node_dict = OrderedDict.fromkeys(node_seq) + self._stem = None + self._dnode = None + + def __contains__(self,x): + """ + Return True if x is a node in the cycle + """ + return x in self._node_dict + + def __len__(self): + """ + Number of nodes in cycle + """ + return len(self._node_dict) + + def __iter__(self): + """ + Iterate over nodes in the cycle + """ + return self._node_dict.iterkeys() + + def _clear(self): + self._node_dict.clear() + + def origin(self): + """ + A starting node of this cycle (in case of bud this is the node where + distinguished edge is pointing to) + """ + return next(self._node_dict.iterkeys(), None) + + def _reorder_origin(self, x): + if x not in self._node_dict.iterkeys(): + return + toremove = list(it.takewhile(lambda a: a!=x, self._node_dict.iterkeys())) + for k in toremove: + t = self._node_dict[k] + del self._node_dict[k] + self._node_dict[k] = t + + def _set_stem(self,stem,dnode): + self._stem = stem + self._dnode = dnode + + def stem(self): + """ + Return stem to which this cycle is attached if it is a bud, otherwise + return None + """ + return self._stem + + def dist_node(self): + """ + Return distinguished node of the stem to which this cycle is attached + if it is a bud, otherwise returh None + """ + return self._dnode + + def is_bud(self): + """ + Returns True if this cycle is a bud + """ + return self._stem != None + +# helper function to construct stems and cycles from a given matching +def _stems_cycs_from_matching(matching, roots=None, origins=[]): + outmap = {a:b for a,b in matching} + vis = set() + indegmap = {} + for a,b in matching: + indegmap.setdefault(a,0) + indegmap[b] = indegmap.get(b, 0) + 1 + + if roots is None: + roots = [u for u in indegmap if indegmap[u] == 0] + + def recur(z,cur,stems,cycs): + if z in vis: + cycs.append(Cycle(cur)) + return + + vis.add(z) + cur.append(z) + + if z not in outmap: + stems.append(Stem(cur)) + return + + recur(outmap[z],cur,stems,cycs) + + stems, cycs = [],[] + + for r in roots: + if r not in vis: + recur(r,[],stems,cycs) + + origins = set(origins) + roots = [x for x in outmap.iterkeys() if x not in vis] + roots.sort(key=lambda x: 0 if x in origins else 1) + for r in roots: + recur(r,[],stems,cycs) + + return stems, cycs + + +def build_cacti(G, forced_ctls=None, num_ctls=None, shuffle=False, with_cycles=False): + """ + This method constructs cacti for a given directed graph G. + (The nodes in forced_ctls should be indices of DiGraph nodes and not node objects) + Inputs: 1) A directed Graph G + 2) if forced_ctls is not None, it has to be a list of tuples + representing controls. Each tuple represents a control node such + that the control is to be attached to each node in the tuple. For + example, [(1,), (4,)] would mean that two controls are attached to + nodes 1 and 4 respectively. Use this option for when controls are + fixed. Weigthed maximum matching is performed. + 3) num_ctls : This option is not implemented yet and should be kept + None + 4) if shuffle is True, matching algorithm is randomized so that + each call to build cacti would result in a different cacti + structure. This option is implemented yet for case of unweighted + maximum matching + 5) if forced_ctls is given, with_cycles option dictates whether any + independent cycles that are not reachable from the forced_ctls + controls should be included in the cacti + 6) if none of the force_ctls or num_ctls is given, unweighted + matching is performed such that minimum number of controls required + is calculated and the cacti represents the corresponding maximum matching + + Returns Cacti object + """ + cact = Cacti(G) + + if forced_ctls is not None: + cact._forced_control_case(forced_ctls, shuffle, with_cycles) + elif num_ctls is not None: + cact._free_control_case(num_ctls, shuffle) + else: + cact._min_controls_case() + + cact._build_cacti_from_stemscycles() + return cact + + +class Cacti: + """ + The Cacti class represents the cacti control structure of a given network. It can be used to find the location (and number) of inputs required to control the network as + well as to access the underlying cacti structure. It can also be used to find the extent to which a network can be controlled + by a specified set of controls and the related constrained cacti. + """ + + def __init__(self,G): + self._G = G + + self._stems = [] + self._cycles = [] + self._matching = [] + self._controls = [] + self._controllable_nodes=set() + + def num_controls(self): + """ + Return number of controls inputs + """ + return len(self._controls) + + def stems(self): + return list(self._stems) + + def cycles(self): + return list(self._cycles) + + def non_bud_cycles(self): + """ + Return the non-bud (independent cycles) + """ + return filter(lambda x: not x.is_bud(), self._cycles) + + + # helper function to build cacti from stems and cycles + def _build_cacti_from_stemscycles(self): + self._stems.sort(key=lambda x:len(x), reverse= True) + self._cycles.sort(key=lambda x:len(x), reverse= True) + + for stem in self._stems: + for u in stem: + for v in self._G.out_neighbors_(u): + cyc = [c for c in self._cycles if not c.is_bud() and v in c] + if cyc: + if not stem._add_bud(u,cyc[0],v): + cyc[0]._clear() + + self._cycles = [c for c in self._cycles if len(c) > 0] + + controls = [ [stem.origin()] for stem in self._stems ] + + if controls: + idx = 0 + for cyc in self._cycles: + if not cyc.is_bud(): + controls[idx].append(cyc.origin()) + idx = (idx+1) % len(controls) + controls = [tuple(a) for a in controls] + else: + controls = [ tuple(cyc.origin() for cyc in self._cycles) ] + + self._controls = controls + + ctlable_nodes = set() + ctlable_nodes.update([a for s in self._stems for a in s]) + ctlable_nodes.update([a for c in self._cycles for a in c]) + self._controllable_nodes = ctlable_nodes + + + # To find minimum controls required to fully control a network using + # Unweighted maximum + # Matching + def _min_controls_case(self): + #TODO Shuffle + G = self._G + matching = set(zen.matching.maximum_matching_(G)) + matching = [G.endpoints_(eidx) for eidx in matching] + stems, cycles = _stems_cycs_from_matching(matching) + for u in set(G.nodes_iter_()).difference(set(x for y in matching for x in y)) : + stems.append(Stem([u])) + self._matching, self._stems, self._cycles = matching, stems, cycles + + # To find cacti and hence controllable nodes given a fixed set of controls + # (forced_ctls) using maximum weighted matching + def _forced_control_case(self, forced_ctls, doshuffle=False, indcyc=False): + G = self._G + origins = [a for b in forced_ctls for a in b] + matching,roots = control_rooted_max_weight_matching(G, forced_ctls, + doshuffle=doshuffle, indcyc=indcyc)[1:3] + matching = [G.endpoints_(eidx) for eidx in matching] + self._stems, self._cycles = _stems_cycs_from_matching(matching, roots, origins) + self._matching = matching + + + def _free_control_case(self, num_ctls, doshuffle=False): + raise NotImplementedError("Yet to be implemented.") + + + def controls(self): + """ + Returns list of the controls that are required for maximum control of + the network. In case of unweigthed matching, it is the minimum number + of controls required for full control of the network. In case of + weighted matching (when fixed set of controls are given), this method + returns the controls that are sufficent for controlling the maximum + possible nodes of the network. + """ + return list(self._controls) + + def controllable_nodes(self): + """ + Returns set of nodes that are controllable + """ + return set(self._controllable_nodes) + + def num_controllable_nodes(self): + """ + Return number of nodes that are controllable + """ + return len(self._controllable_nodes) + + def matching(self): + """ + Return matching as calculated by the maximum matching algorithm (unweighted or weighted as the case maybe) + """ + return list(self._matching) + diff --git a/src/zen/tests/cacti.py b/src/zen/tests/cacti.py new file mode 100644 index 0000000..dae5003 --- /dev/null +++ b/src/zen/tests/cacti.py @@ -0,0 +1,201 @@ +import unittest +from numpy.random import randint +from zen import * + +class CactiUnweightedMatching(unittest.TestCase): + + def test_simple(self): + + G = DiGraph() + G.add_edge('x','y') + G.add_edge('x','z') + G.add_edge('y','z') + + cacti = control.build_cacti(G) + self.assertEqual(cacti.num_controls(),1) + + controls = cacti.controls() + self.assertEqual(controls[0][0],G.node_idx('x')) + + # change the graph + G.rm_edge('y','z') + cacti = control.build_cacti(G) + self.assertEqual(cacti.num_controls(),2) + controls = set(cacti.controls()) + sol1 = set([(G.node_idx('x'),),(G.node_idx('z'),)]) + sol2 = set([(G.node_idx('x'),),(G.node_idx('y'),)]) + self.assertTrue(controls == sol1 or controls == sol2) + + def test_non_bud_cycles(self): + # create a network consisting of three cycles + G = DiGraph() + G.add_edge(1,2) + G.add_edge(2,1) + c1 = set([G.node_idx(1),G.node_idx(2)]) + + G.add_edge(3,4) + G.add_edge(4,3) + c2 = set([G.node_idx(3),G.node_idx(4)]) + + G.add_edge(5,6) + G.add_edge(6,7) + G.add_edge(7,5) + c3 = set([G.node_idx(5),G.node_idx(6),G.node_idx(7)]) + + cacti = control.build_cacti(G) + self.assertEqual(cacti.num_controls(),1) + + controls = set(cacti.controls()[0]) + self.assertTrue(len(c1.intersection(controls)) == 1) + self.assertTrue(len(c2.intersection(controls)) == 1) + self.assertTrue(len(c3.intersection(controls)) == 1) + + def test_stem_cycle(self): + G = DiGraph() + G.add_edge(1,2) + G.add_edge(2,3) + G.add_edge(4,5) + G.add_edge(5,4) + + cacti = control.build_cacti(G) + self.assertEqual(cacti.num_controls(),1) + + def test_stem_bud(self): + G = DiGraph() + G.add_edge(1,2) + G.add_edge(2,3) + G.add_edge(2,4) + G.add_edge(4,5) + G.add_edge(5,4) + + cacti = control.build_cacti(G) + self.assertEqual(cacti.num_controls(),1) + + +class CactiWeightedMatchingFixedControls(unittest.TestCase): + ''' + graphs.append(G) + G = barabasi_albert(N,k,directed=True,seed=int(time()+getpid()*1000)) + graphs.append(G) + G = local_attachment(N,k,max(int(.25*k),1)) + graphs.append(G) + G = local_attachment(N,k,max(int(.5*k),1)) + graphs.append(G) + G = local_attachment(N,k,max(int(.75*k),1)) + graphs.append(G) + ''' + def _test_with_kalman(self, G, ctls=None): + if ctls is None: + ctls = set( randint(len(G)) for _ in xrange(randint(1,len(G))) ) + ctls = [tuple([a]) for a in ctls] + + cact = control.build_cacti(G, forced_ctls=ctls, shuffle=True) + + NC = cact.num_controllable_nodes() + K = control.reachability.kalman_generic_rank(G, ctls) + + self.assertEqual(NC, K) + + return cact + + + def test_random_graphs_ER(self): + N = 20 + for k in [2,4,6]: + G = generating.erdos_renyi(N, float(k)/N, directed=True, seed=2**20) + + self._test_with_kalman(G) + + def test_random_graphs_BA(self): + N = 20 + for k in [2,4,6]: + G = generating.barabasi_albert(N,k,directed=True,seed=2**20) + + self._test_with_kalman(G) + + def test_stem_cycle1(self): + G = DiGraph() + G.add_nodes(5) + G.add_edge_(0,1) + G.add_edge_(1,2) + G.add_edge_(3,4) + G.add_edge_(4,3) + + ctls = [(0,)] + cact = control.build_cacti(G, forced_ctls=ctls, shuffle=True, with_cycles=True) + self.assertEqual(cact.num_controllable_nodes(),5) + + def test_stem_cycle2(self): + G = DiGraph() + G.add_nodes(5) + G.add_edge_(0,1) + G.add_edge_(1,2) + G.add_edge_(3,4) + G.add_edge_(4,3) + + ctls = [(0,)] + cact = control.build_cacti(G, forced_ctls=ctls, shuffle=True) + self.assertEqual(cact.num_controllable_nodes(),3) + + def test_stem_cycle3(self): + G = DiGraph() + G.add_nodes(5) + G.add_edge_(0,1) + G.add_edge_(1,2) + G.add_edge_(3,4) + G.add_edge_(4,3) + + ctls = [(3,)] + cact = control.build_cacti(G, forced_ctls=ctls, shuffle=True) + self.assertEqual(cact.num_controllable_nodes(),2) + + def test_stem_bud1(self): + G = DiGraph() + G.add_nodes(5) + G.add_edge_(0,1) + G.add_edge_(1,2) + G.add_edge_(1,3) + G.add_edge_(3,4) + G.add_edge_(4,3) + + ctls = [(0,)] + cact = control.build_cacti(G, forced_ctls=ctls, shuffle=True) + self.assertEqual(cact.num_controllable_nodes(),5) + + def test_stem_bud2(self): + G = DiGraph() + G.add_nodes(5) + G.add_edge_(0,1) + G.add_edge_(1,2) + G.add_edge_(1,3) + G.add_edge_(3,4) + G.add_edge_(4,3) + + ctls = [(1,)] + cact = control.build_cacti(G, forced_ctls=ctls, shuffle=True) + self.assertEqual(cact.num_controllable_nodes(),4) + + def test_cacti_form1(self): + G = DiGraph() + G.add_nodes(5) + G.add_edge_(0,1) + G.add_edge_(1,2) + G.add_edge_(1,3) + G.add_edge_(3,4) + G.add_edge_(4,3) + + ctls = [(0,)] + cact = control.build_cacti(G, forced_ctls=ctls, shuffle=True) + stems, cycles = cact.stems(), cact.cycles() + + self.assertEqual(len(stems), 1) + self.assertEqual(len(cycles), 1) + self.assertEqual(stems[0].origin(), 0) + self.assertEqual(cycles[0].origin(), 3) + self.assertEqual(len(stems[0].buds()), 1) + + self.assertIsNotNone(cycles[0].stem()) + self.assertEqual(cycles[0].dist_node(),1) + +if __name__ == '__main__': + unittest.main() diff --git a/src/zen/util/max_weight_bmatching.pyx b/src/zen/util/max_weight_bmatching.pyx new file mode 100644 index 0000000..5366351 --- /dev/null +++ b/src/zen/util/max_weight_bmatching.pyx @@ -0,0 +1,231 @@ +#cython: boundscheck=False, wraparound=False, nonecheck=False, infer_types=True + +import zen, numpy as np +import networkx as nx +import matplotlib.pyplot as plt +from time import time +from os import getpid +from numpy.random import randint + +from libcpp.vector cimport vector +from libc.stdlib cimport malloc, free +from cython.operator cimport dereference as dref +from cython.operator cimport preincrement as incr + + +cdef extern from "max_weight_matching.hpp": + cdef struct edge_t: + int src, tgt + double w + edge_t(int, int, double) + cdef void mwb_matching(vector[vector[int]] &G, vector[edge_t] &E, \ + vector[int] &U, vector[int] &V, vector[int] &M) + + + +def control_rooted_max_weight_matching(Gi,controls, doshuffle=False, indcyc=False): + cdef: + vector[int] *U + vector[int] *V + vector[edge_t] *E + vector[vector[int]] *G + vector[int].iterator it + vector[edge_t].iterator ite + unsigned int u,v,N,t,x,y,r,s,e + double w,MAXW + edge_t temp_edge + vector[int] *M + + U = new vector[int]() + V = new vector[int]() + M = new vector[int]() + E = new vector[edge_t]() + + if controls is None: + controls = [] + node2uv, uv2node = {}, {} + + np.random.seed(int(time()+getpid()*np.random.random()*1000)) + + #remove non reachable nodes + if controls: + vis = set() + + def dfs(r): + vis.add(r) + for v in Gi.out_neighbors_(r): + if v not in vis: + dfs(v) + + for ctl in controls: + for driver in ctl: + if driver not in vis: + dfs(driver) + else: + vis = set(Gi.nodes_()) + + + # first transform the graph itself + N = 0 + #for t in vis: + for t in Gi.nodes_iter_(): + if indcyc or t in vis: + u, v = 2*N, 2*N+1 + node2uv[t] = N + uv2node[N] = t + N += 1 + U.push_back(u) + V.push_back(v) + if doshuffle: + x, y = randint(U.size()), U.size()-1 + s = dref(U)[x] + dref(U)[x] = dref(U)[y] + dref(U)[y] = s + x, y = randint(V.size()), V.size()-1 + s = dref(V)[x] + dref(V)[x] = dref(V)[y] + dref(V)[y] = s + + + MAXW = sum(len(ctl) for ctl in controls) + Gi.size() + 100.0 + + # add the edges + for e in Gi.edges_iter_(): + x, y = Gi.endpoints_(e) + #if x in vis and y in vis: + if indcyc or (x in vis and y in vis): + u,v = 2*node2uv[x], 2*node2uv[y]+1 + temp_edge.src = u + temp_edge.tgt = v + temp_edge.w = MAXW + 1.0 + E.push_back(temp_edge) + + + # add control nodes and forward edges with weight 1 + START_CNODE = N + for ctl in controls: + cnode = N + N += 1 + #if len(ctl) > 0: + #d = ctl[0] + for d in ctl: + u, v = 2*cnode, 2*node2uv[d]+1 + temp_edge.src = u + temp_edge.tgt = v + temp_edge.w = MAXW + 1.0 + E.push_back(temp_edge) + + for x in vis: + u, v = 2*node2uv[x], 2*cnode+1 + temp_edge.src = u + temp_edge.tgt = v + temp_edge.w = MAXW + E.push_back(temp_edge) + + u, v = 2*cnode, 2*cnode+1 + U.push_back(u) + V.push_back(v) + if doshuffle: + x, y = randint(U.size()), U.size()-1 + s = dref(U)[x] + dref(U)[x] = dref(U)[y] + dref(U)[y] = s + x, y = randint(V.size()), V.size()-1 + s = dref(V)[x] + dref(V)[x] = dref(V)[y] + dref(V)[y] = s + + + # add self loops with weight 0 + for s in xrange(N): + if s >= START_CNODE or not Gi.has_edge_(uv2node[s],uv2node[s]): + u, v = 2*s, 2*s+1 + temp_edge.src = u + temp_edge.tgt = v + temp_edge.w = MAXW + E.push_back(temp_edge) + + G = new vector[vector[int]](2*N, vector[int]()) + + for e in xrange(E.size()): + u,v,w = dref(E)[e].src, dref(E)[e].tgt, dref(E)[e].w + assert u%2==0 and v%2==1 + dref(G)[u].push_back(e) + dref(G)[v].push_back(e) + if doshuffle: + x, y = randint(dref(G)[u].size()), dref(G)[u].size()-1 + s = dref(G)[u][x] + dref(G)[u][x] = dref(G)[u][y] + dref(G)[u][y] = s + x, y = randint(dref(G)[v].size()), dref(G)[v].size()-1 + s = dref(G)[v][x] + dref(G)[v][x] = dref(G)[v][y] + dref(G)[v][y] = s + + ##### + # run the weighted bipartite matching + + mwb_matching(dref(G),dref(E),dref(U),dref(V),dref(M)) + + result, roots = [], [] + num_matched = 0 + for x in xrange(M.size()): + e = dref(M)[x] + u,v,w = dref(E)[e].src, dref(E)[e].tgt, dref(E)[e].w + assert u%2==0 and v%2==1 + if w > MAXW: + num_matched += 1 + if (u/2 < START_CNODE): + result.append(Gi.edge_idx_(uv2node[u/2],uv2node[v/2])) + else: + roots.append(uv2node[v/2]) + + + + # free memory + del G, U, V, E, M + + return num_matched, result, roots + + +if __name__ == '__main__': + + print 'Hello World' + + ''' + print 'MAXW = ', MAXW + print 'num_matched = ', num_matched + print 'U nodes--------' + it = U.begin() + while it != U.end(): + print dref(it) + incr(it) + + print 'V nodes--------' + it = V.begin() + while it != V.end(): + print dref(it) + incr(it) + + + ite = E.begin() + while ite != E.end(): + u,v,w = dref(ite).src, dref(ite).tgt, dref(ite).w + incr(ite) + print u/2,v/2,w + + for i in xrange(G.size()): + print i/2 ,' ----> ', + it = dref(G)[i].begin() + while it != dref(G)[i].end(): + u,v,w = dref(E)[dref(it)].src,dref(E)[dref(it)].tgt,dref(E)[dref(it)].w + incr(it) + if u == i: + print (u/2,v/2,w),' ;; ', + + print '' + + print 'result = ', [Gi.endpoints_(e) for e in result] + ''' + + diff --git a/src/zen/util/max_weight_matching.hpp b/src/zen/util/max_weight_matching.hpp new file mode 100644 index 0000000..6813d1b --- /dev/null +++ b/src/zen/util/max_weight_matching.hpp @@ -0,0 +1,213 @@ + +using namespace std; +#include +#include +#include +#include +#include + +struct edge_t { + int src, tgt; + double w; + edge_t(int s, int t, double w): src(s), tgt(t), w(w){} + edge_t() {} +}; + +void _augment_path_to(vector &E, int pred[], int v) { + int eidx = pred[v]; + while(eidx != -1) { + edge_t &edge = E[eidx]; + eidx = pred[edge.src]; + int t = edge.src; + edge.src = edge.tgt; + edge.tgt = t; + } +} + + +void _relax(vector< vector > &G, vector &E, int pred[], double dist[], + fibnode_t *nodes, vector &RB, double pot[], FiboHeap &pq, int a) +{ + vector &edges = G[a]; + for(vector::iterator iv = edges.begin(); iv != edges.end(); iv++) { + int eidx = *iv; + edge_t &edge = E[eidx]; + if(edge.src != a) + continue; + + int b = edge.tgt; + double db = dist[a] + (pot[a] + pot[b] - edge.w); + if(pred[b] == -1) { + dist[b] = db; pred[b] = eidx ; + RB.push_back(b); + nodes[b].init(); + nodes[b].val = b; nodes[b].key = db; nodes[b].inheap=true; + pq.insert(&nodes[b]); + } else if(db < dist[b]) { + dist[b] = db; pred[b] = eidx; + pq.decrease_key(&nodes[b],db); + } + } + +} + + +void mwb_matching(vector< vector > &G, vector &E, + vector &U, vector &V, vector &M) { + FiboHeap pq; + int N = G.size(); + double *pot = new double[N]; + bool *isfree = new bool[N]; + int *pred = new int[N]; + double *dist = new double[N]; + fibnode_t *nodes = new fibnode_t[N]; + vector RA, RB; + RA.reserve(N); RB.reserve(N); + + for(int i = 0; i < N; i++) { + pot[i] = dist[i] = 0.0; + isfree[i] = true; + pred[i] = -1; + nodes[i].init(); + } + + for(vector::iterator it = U.begin(); it != U.end(); it++) { + int a = *it; + double maxw = -INF; + edge_t *maxe = NULL; + + vector &edges = G[a]; + for(vector::iterator iv = edges.begin(); iv != edges.end(); iv++) { + edge_t &edge = E[*iv]; + if(edge.src != a) + continue; + + if(edge.w > maxw) { + maxw = edge.w; + maxe = &edge; + } + + } + + pot[a] = maxw; + if(maxe != NULL) { + assert( a != maxe->tgt); + if(isfree[maxe->tgt]) { + maxe->src = maxe->tgt; maxe->tgt = a; + isfree[maxe->src] = isfree[maxe->tgt] = false; + } + } + } + + for(vector::iterator it = U.begin(); it != U.end(); it++) { + int a = *it; + + if(!isfree[a]) continue; + + int bestA = a; + double minA = pot[a]; + dist[a] = 0; + RA.clear(); RB.clear(); + RA.push_back(a); + + _relax(G, E, pred, dist, nodes, RB, pot, pq, a); + + double delta=0.0; + while(true) { + int b = -1; + double db = -INF; + if(!pq.is_empty()) { + fibnode_t *bnode = pq.delete_min(); + b = bnode->val; + bnode->inheap = false; + db = dist[b]; + } + + //distinguish 3 cases + if(b == -1 || db >= minA) { + delta = minA; + // aug to best in A + _augment_path_to(E, pred, bestA); + isfree[a] = false; + isfree[bestA] = true; + break; + } else { + if(isfree[b]) { + delta = db; + //aug to b + _augment_path_to(E, pred, b); + isfree[a] = isfree[b] = false; + break; + } else { + //conti SP comp + int eidx = -1, a1 = -1; + vector &edges = G[b]; + for(vector::iterator iv = edges.begin(); iv != edges.end(); iv++) { + edge_t &edge = E[*iv]; + if(edge.src != b) + continue; + eidx = *iv; a1 = edge.tgt; + break; + } + assert(eidx != -1); + //a1,w1 = G[b].iteritems().next() + pred[a1] = eidx; dist[a1] = db; + RA.push_back(a1); + + if(db + pot[a1] < minA) { + bestA = a1; + minA = db + pot[a1]; + } + + _relax(G, E, pred, dist, nodes, RB, pot, pq, a1); + } + } + } + + // augment: pot update and reinitialize + while(RA.size() > 0) { + int ra = RA.back(); RA.pop_back(); + pred[ra] = -1; + double dpot = delta - dist[ra]; + if(dpot <= 0) continue; + pot[ra] = pot[ra] - dpot; + } + + while(RB.size() > 0) { + int rb = RB.back(); RB.pop_back(); + pred[rb] = -1; double dpot = delta - dist[rb]; + if(nodes[rb].inheap) { + pq.delete_node(&nodes[rb]); + nodes[rb].inheap = false; + } + if(dpot <= 0) continue; + pot[rb] = pot[rb] + dpot; + } + + } + + + for(vector::iterator it = V.begin(); it != V.end(); it++) { + int b = *it; + vector &edges = G[b]; + for(vector::iterator iv = edges.begin(); iv != edges.end(); iv++) { + int e = *iv; + edge_t &edge = E[e]; + if(edge.src != b) + continue; + + M.push_back(e); + + int t = edge.src; + edge.src = edge.tgt; + edge.tgt = t; + } + } + + delete[] pot, delete[] isfree, delete[] pred; + delete[] dist, delete[] nodes; + +} + + + From 708bec29170ed4e9fcfb31ed69ed2f6a4cda642c Mon Sep 17 00:00:00 2001 From: Deven Date: Tue, 27 Jan 2015 13:06:07 -0500 Subject: [PATCH 3/7] Cacti changes made --- src/zen/util/fiboheap.hpp | 213 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 213 insertions(+) create mode 100644 src/zen/util/fiboheap.hpp diff --git a/src/zen/util/fiboheap.hpp b/src/zen/util/fiboheap.hpp new file mode 100644 index 0000000..6b28fba --- /dev/null +++ b/src/zen/util/fiboheap.hpp @@ -0,0 +1,213 @@ +using namespace std; +#include +#include + +const double INF = 1e20; + +struct fibnode_t { + unsigned int degree; + int val; + double key; + bool is_marked; + bool inheap; + fibnode_t *next, *prev, *parent,*child; + fibnode_t() {init(); } + void init() { + degree = val = 0; key = 0.0; + is_marked = inheap = false; + next = prev = this; + parent = child = NULL; + } +}; + +class FiboHeap { + fibnode_t *min_node; + int hsize; + + public: + + FiboHeap() { + hsize=0; min_node = NULL; + } + ~FiboHeap() { + // TODO free memory if required + } + + void insert(fibnode_t *node) { + min_node = merge_heaps(min_node, node); + hsize++; + } + + fibnode_t* get_min() { + return min_node; + } + + bool is_empty() { + return min_node == NULL; + } + + int size() { + return hsize; + } + + fibnode_t* delete_min() { + if(is_empty()) + return NULL; + + hsize--; + fibnode_t *min_elem = min_node; + + if(min_node->next == min_node) + min_node = NULL; + else { + min_node->prev->next = min_node->next; + min_node->next->prev = min_node->prev; + min_node = min_node->next; + } + + fibnode_t *curr; + if(min_elem->child != NULL) { + curr = min_elem->child; + + do { + curr->parent = NULL; + curr = curr->next; + } while(curr != min_elem->child); + } + + min_node = merge_heaps(min_node, min_elem->child); + + if(min_node == NULL) + return min_elem; + + curr = min_node; + vector treetable, tovisit; + + while(tovisit.empty() || curr != min_node) { + tovisit.push_back(curr); + curr = curr->next; + } + + for(vector::iterator it = tovisit.begin(); it != tovisit.end(); it++) { + curr = *it; + + while (true) { + while(curr->degree >= treetable.size()) + treetable.push_back(NULL); + + if(treetable[curr->degree] == NULL) { + treetable[curr->degree] = curr; + break; + } + + fibnode_t *other = treetable[curr->degree]; + treetable[curr->degree] = NULL; + + fibnode_t *tmin = other->key < curr->key ? other : curr; + fibnode_t *tmax = other->key < curr->key ? curr : other; + + tmax->next->prev = tmax->prev; + tmax->prev->next = tmax->next; + + tmax->next = tmax->prev = tmax; + tmin->child = merge_heaps(tmin->child, tmax); + + tmax->parent = tmin; + tmax->is_marked = false; + tmin->degree += 1; + + curr = tmin; + } + + if(curr->key <= min_node->key) + min_node = curr; + } + + return min_elem; + } + + + void decrease_key(fibnode_t *node, double new_key) { + if(new_key > node->key) + return; + + node->key = new_key; + + if(node->parent != NULL && node->key <= node->parent->key) + cut_node(node); + + if(node->key <= min_node->key) + min_node = node; + + } + void delete_node(fibnode_t *node) { + decrease_key(node, -INF); + delete_min(); + } + + void cut_node(fibnode_t *node) { + node->is_marked = false; + if(node->parent == NULL) + return; + + if(node->next != node) { + node->next->prev = node->prev; + node->prev->next = node->next; + } + + if(node->parent->child == node) { + if(node->next != node) + node->parent->child = node->next; + else + node->parent->child = NULL; + } + + (node->parent->degree)--; + + node->prev = node->next = node; + min_node = merge_heaps(min_node, node); + + if(node->parent->is_marked) + cut_node(node->parent); + else + node->parent->is_marked = true; + + node->parent = NULL; + + } + + + FiboHeap* meld_heaps(FiboHeap *one,FiboHeap *two) { + FiboHeap *result = new FiboHeap(); + + result->min_node = merge_heaps(one->min_node, two->min_node); + result->hsize = one->hsize + two->hsize; + + one->hsize = two->hsize = 0; + one->min_node = two->min_node = NULL; + + return result; + } + + + fibnode_t* merge_heaps(fibnode_t* one, fibnode_t* two) { + if(one == NULL && two == NULL) + return NULL; + if(one == NULL && two != NULL) + return two; + if(one != NULL && two == NULL) + return one; + + fibnode_t *one_next = one->next; + one->next = two->next; + one->next->prev = one; + two->next = one_next; + two->next->prev = two; + + return one->key < two->key ? one : two; + } + + +}; + + From 7bcea2e70311b75277ec9ec622424ffcd7e573c5 Mon Sep 17 00:00:00 2001 From: Deven Date: Sun, 8 Feb 2015 08:15:23 -0500 Subject: [PATCH 4/7] Made changes after the feedback from last meeting --- src/Makefile | 3 +- src/setup.py | 6 +- src/zen/algorithms/matching.pyx | 483 ++++++++++++------ .../max_weight_matching.hpp | 211 +++++++- src/zen/control/__init__.py | 2 +- src/zen/control/cacti.py | 170 +++--- src/zen/control/reachability.py | 27 + src/zen/tests/cacti.py | 26 +- src/zen/util/fiboheap.hpp | 213 -------- src/zen/util/max_weight_bmatching.pyx | 231 --------- 10 files changed, 693 insertions(+), 679 deletions(-) rename src/zen/{util => algorithms}/max_weight_matching.hpp (54%) delete mode 100644 src/zen/util/fiboheap.hpp delete mode 100644 src/zen/util/max_weight_bmatching.pyx diff --git a/src/Makefile b/src/Makefile index 9f953b8..e2f409e 100644 --- a/src/Makefile +++ b/src/Makefile @@ -31,4 +31,5 @@ clean: rm -f zen/generating/*.c zen/generating/*.so zen/generating/*.pyc; \ rm -f zen/io/*.c zen/io/*.so zen/io/*.pyc; \ rm -f zen/layout/*.c zen/layout/*.so zen/layout/*.pyc; \ - rm -f zen/util/fiboheap.c zen/util/queue.c zen/util/*.so zen/util/*.pyc; \ No newline at end of file + rm -f zen/util/fiboheap.c zen/util/queue.c zen/util/*.so zen/util/*.pyc; \ + rm -rf zen/algorithms/matching.cpp diff --git a/src/setup.py b/src/setup.py index b9f3313..0af9a00 100644 --- a/src/setup.py +++ b/src/setup.py @@ -38,16 +38,14 @@ Extension('zen.algorithms.properties', ['zen/algorithms/properties.pyx'], include_dirs=[numpy_include_dir]), Extension('zen.algorithms.modularity', ['zen/algorithms/modularity.pyx'], include_dirs=[numpy_include_dir]), Extension('zen.algorithms.spanning', ['zen/algorithms/spanning.pyx'], include_dirs=[numpy_include_dir, fiboheap_include_dir]), -Extension('zen.algorithms.matching', ['zen/algorithms/matching.pyx'], include_dirs=[numpy_include_dir]), +Extension('zen.algorithms.matching', ['zen/algorithms/matching.pyx'], language='c++', include_dirs=[numpy_include_dir], libraries=['python2.7','m','util','dl']), Extension('zen.generating.rgm', ['zen/generating/rgm.pyx'], include_dirs=[numpy_include_dir]), Extension('zen.algorithms.community.communityset', ['zen/algorithms/community/communityset.pyx'], include_dirs=[numpy_include_dir]), Extension('zen.algorithms.community.overlapping_communityset', ['zen/algorithms/community/overlapping_communityset.pyx'], include_dirs=[numpy_include_dir]), Extension('zen.algorithms.community.lpa', ['zen/algorithms/community/lpa.pyx'], include_dirs=[numpy_include_dir]), Extension('zen.algorithms.community.label_rank', ['zen/algorithms/community/label_rank.pyx'], include_dirs=[numpy_include_dir]), Extension('zen.algorithms.community.spectral_modularity', ['zen/algorithms/community/spectral_modularity.pyx'], include_dirs=[numpy_include_dir]), -Extension('zen.algorithms.community.louvain', ['zen/algorithms/community/louvain.pyx'], include_dirs=[numpy_include_dir]), -Extension('zen.util.max_weight_bmatching', ['zen/util/max_weight_bmatching.pyx'], \ - language='c++', include_dirs=[fiboheap_include_dir], libraries=['python2.7','m','util','dl'] )] +Extension('zen.algorithms.community.louvain', ['zen/algorithms/community/louvain.pyx'], include_dirs=[numpy_include_dir])] setup( diff --git a/src/zen/algorithms/matching.pyx b/src/zen/algorithms/matching.pyx index 0573304..aa651eb 100644 --- a/src/zen/algorithms/matching.pyx +++ b/src/zen/algorithms/matching.pyx @@ -1,3 +1,4 @@ +#cython: boundscheck=False, wraparound=False, nonecheck=False, infer_types=True """ The ``zen.algorithms.matching`` module provides routines for computing `maximum-matchings `_ on various types of graphs. @@ -11,169 +12,357 @@ The ``zen.algorithms.matching`` module provides routines for computing `maximum- from zen.bipartite cimport BipartiteGraph from zen.digraph cimport DiGraph from zen.exceptions import * -import numpy as np +import numpy as np, time, os +from numpy.random import randint cimport numpy as np from Queue import Queue +from libcpp.vector cimport vector +from libc.stdlib cimport malloc, free +from cython.operator cimport dereference as dref +from cython.operator cimport preincrement as incr + __all__ = ['maximum_matching','maximum_matching_','hopcroft_karp_'] # TODO(druths): Add support for matching undirected networks def maximum_matching(G): - """ - Find a set of edges that comprise a maximum-matching for the graph ``G``. - - * If the graph is a bipartite graph (:py:class:`zen.BipartiteGraph`), then the standard bipartite maximum-matching - problem is solved using the Hopcroft-Karp algorithm. - * If the graph is a directed graph (:py:class:`zen.DiGraph`), then the edge subset is found such that no two edges - in the subset share a common starting vertex or a common ending vertex. - * If the graph is undirected, an error is thrown as this is not currently supported. - - **Returns**: - :py:class:`list`. The list of edge endpoint pairs that comprise the edges belonging to a maximum-matching for the graph. - - **Raises**: - :py:exc:`zen.ZenException`: if ``G`` is an undirected graph. - """ - eidx_matching = maximum_matching_(G) - - return [G.endpoints(eidx) for eidx in eidx_matching] + """ + Find a set of edges that comprise a maximum-matching for the graph ``G``. + + * If the graph is a bipartite graph (:py:class:`zen.BipartiteGraph`), then the standard bipartite maximum-matching + problem is solved using the Hopcroft-Karp algorithm. + * If the graph is a directed graph (:py:class:`zen.DiGraph`), then the edge subset is found such that no two edges + in the subset share a common starting vertex or a common ending vertex. + * If the graph is undirected, an error is thrown as this is not currently supported. + + **Returns**: + :py:class:`list`. The list of edge endpoint pairs that comprise the edges belonging to a maximum-matching for the graph. + + **Raises**: + :py:exc:`zen.ZenException`: if ``G`` is an undirected graph. + """ + eidx_matching = maximum_matching_(G) + + return [G.endpoints(eidx) for eidx in eidx_matching] cpdef maximum_matching_(G): - """ - Find a set of edges that comprise a maximum-matching for the graph ``G``. - - * If the graph is a bipartite graph (:py:class:`zen.BipartiteGraph`), then the standard bipartite maximum-matching - problem is solved using the Hopcroft-Karp algorithm. - * If the graph is a directed graph (:py:class:`zen.DiGraph`), then the edge subset is found such that no two edges - in the subset share a common starting vertex or a common ending vertex. - * If the graph is undirected, an error is thrown as this is not currently supported. - - **Returns**: - :py:class:`list`. The list of edge indices that indicate the edges belonging to a maximum-matching for the graph. - - **Raises**: - :py:exc:`zen.ZenException`: if ``G`` is an undirected graph. - """ - if type(G) == BipartiteGraph: - return __bipartite_hopcroft_karp_(G) - elif type(G) == DiGraph: - return __directed_hopcroft_karp_(G) - else: - raise ZenException, 'Only bipartite and directed graphs are currently supported' + """ + Find a set of edges that comprise a maximum-matching for the graph ``G``. + + * If the graph is a bipartite graph (:py:class:`zen.BipartiteGraph`), then the standard bipartite maximum-matching + problem is solved using the Hopcroft-Karp algorithm. + * If the graph is a directed graph (:py:class:`zen.DiGraph`), then the edge subset is found such that no two edges + in the subset share a common starting vertex or a common ending vertex. + * If the graph is undirected, an error is thrown as this is not currently supported. + + **Returns**: + :py:class:`list`. The list of edge indices that indicate the edges belonging to a maximum-matching for the graph. + + **Raises**: + :py:exc:`zen.ZenException`: if ``G`` is an undirected graph. + """ + if type(G) == BipartiteGraph: + return __bipartite_hopcroft_karp_(G) + elif type(G) == DiGraph: + return __directed_hopcroft_karp_(G) + else: + raise ZenException, 'Only bipartite and directed graphs are currently supported' def hopcroft_karp_(G): - """ - Find a set of edges that comprise a maximum-matching for the bipartite graph ``G`` using the `Hopcroft-Karp algorithm `_. - - **Returns**: - :py:class:`list`. The list of edge indices that indicate the edges belonging to a maximum-matching for the graph. - - **Raises**: - :py:exc:`zen.ZenException`: if ``G`` is not a bipartite graph. - """ - if type(G) == BipartiteGraph: - return __bipartite_hopcroft_karp_(G) - else: - raise ZenException, 'Only bipartite graphs are currently supported' + """ + Find a set of edges that comprise a maximum-matching for the bipartite graph ``G`` using the `Hopcroft-Karp algorithm `_. + + **Returns**: + :py:class:`list`. The list of edge indices that indicate the edges belonging to a maximum-matching for the graph. + + **Raises**: + :py:exc:`zen.ZenException`: if ``G`` is not a bipartite graph. + """ + if type(G) == BipartiteGraph: + return __bipartite_hopcroft_karp_(G) + else: + raise ZenException, 'Only bipartite graphs are currently supported' cpdef __directed_hopcroft_karp_(DiGraph G): - - cdef int unode, vnode, i - - ##### - # apply the transformation to produce a bipartite graph - GT = BipartiteGraph() - tnode2node = {} - node2unode = {} - node2vnode = {} - - # add the nodes - for i in G.nodes_iter_(): - unode = GT.add_u_node() - vnode = GT.add_v_node() - tnode2node[unode] = i - tnode2node[vnode] = i - node2unode[i] = unode - node2vnode[i] = vnode - - # add the edges - for i in G.edges_iter_(): - u,v = G.endpoints_(i) - #print u,node2unode[u],GT.is_in_U_(node2unode[u]) - #print v,node2vnode[v],GT.is_in_U_(node2vnode[v]) - GT.add_edge_(node2unode[u],node2vnode[v],i) - - ##### - # run the bipartite matching - max_matching = __bipartite_hopcroft_karp_(GT) - - ##### - # transform the maximum matching back into the directed graph - di_max_matching = [GT.edge_data_(i) for i in max_matching] - - return di_max_matching - + + cdef int unode, vnode, i + + ##### + # apply the transformation to produce a bipartite graph + GT = BipartiteGraph() + tnode2node = {} + node2unode = {} + node2vnode = {} + + # add the nodes + for i in G.nodes_iter_(): + unode = GT.add_u_node() + vnode = GT.add_v_node() + tnode2node[unode] = i + tnode2node[vnode] = i + node2unode[i] = unode + node2vnode[i] = vnode + + # add the edges + for i in G.edges_iter_(): + u,v = G.endpoints_(i) + #print u,node2unode[u],GT.is_in_U_(node2unode[u]) + #print v,node2vnode[v],GT.is_in_U_(node2vnode[v]) + GT.add_edge_(node2unode[u],node2vnode[v],i) + + ##### + # run the bipartite matching + max_matching = __bipartite_hopcroft_karp_(GT) + + ##### + # transform the maximum matching back into the directed graph + di_max_matching = [GT.edge_data_(i) for i in max_matching] + + return di_max_matching + cpdef __bipartite_hopcroft_karp_(BipartiteGraph G): - cdef int NIL_V = G.next_node_idx - cdef np.ndarray[np.int_t, ndim=1] pairs = np.ones(G.next_node_idx+1, np.int) * NIL_V - cdef np.ndarray[np.int_t, ndim=1] layers = np.ones(G.next_node_idx+1, np.int) * -1 - cdef np.ndarray[np.int_t, ndim=1] u_nodes = G.U_() - matching_list = [] - - while __bhk_bfs(G,pairs,layers,u_nodes): - for v in u_nodes: - if pairs[v] == NIL_V: - __bhk_dfs(G,v,pairs,layers) - - # construct the matching list - for u in u_nodes: - if pairs[u] != NIL_V: - matching_list.append(G.edge_idx_(u,pairs[u])) - - return matching_list + cdef int NIL_V = G.next_node_idx + cdef np.ndarray[np.int_t, ndim=1] pairs = np.ones(G.next_node_idx+1, np.int) * NIL_V + cdef np.ndarray[np.int_t, ndim=1] layers = np.ones(G.next_node_idx+1, np.int) * -1 + cdef np.ndarray[np.int_t, ndim=1] u_nodes = G.U_() + matching_list = [] + + while __bhk_bfs(G,pairs,layers,u_nodes): + for v in u_nodes: + if pairs[v] == NIL_V: + __bhk_dfs(G,v,pairs,layers) + + # construct the matching list + for u in u_nodes: + if pairs[u] != NIL_V: + matching_list.append(G.edge_idx_(u,pairs[u])) + + return matching_list cdef __bhk_bfs(BipartiteGraph G, np.ndarray[np.int_t, ndim=1] pairs, np.ndarray[np.int_t, ndim=1] layers, np.ndarray[np.int_t,ndim=1] u_nodes): - cdef int NIL_V = G.next_node_idx - cdef int i, v - Q = Queue() - - for v in u_nodes: - if pairs[v] == NIL_V: - layers[v] = 0 - Q.put(v) - else: - layers[v] = -1 - layers[NIL_V] = -1 - - while not Q.empty(): - v = Q.get() - - if v is NIL_V: - continue - - for i in range(G.node_info[v].degree): - u = G.endpoint_(G.node_info[v].elist[i],v) - if layers[pairs[u]] == -1: - layers[pairs[u]] = layers[v] + 1 - Q.put(pairs[u]) - - return layers[NIL_V] != -1 - + cdef int NIL_V = G.next_node_idx + cdef int i, v + Q = Queue() + + for v in u_nodes: + if pairs[v] == NIL_V: + layers[v] = 0 + Q.put(v) + else: + layers[v] = -1 + layers[NIL_V] = -1 + + while not Q.empty(): + v = Q.get() + + if v is NIL_V: + continue + + for i in range(G.node_info[v].degree): + u = G.endpoint_(G.node_info[v].elist[i],v) + if layers[pairs[u]] == -1: + layers[pairs[u]] = layers[v] + 1 + Q.put(pairs[u]) + + return layers[NIL_V] != -1 + cdef __bhk_dfs(BipartiteGraph G, int v, np.ndarray[np.int_t, ndim=1] pairs, np.ndarray[np.int_t, ndim=1] layers): - cdef int NIL_V = G.next_node_idx - cdef int i - - if v != NIL_V: - for i in range(G.node_info[v].degree): - u = G.endpoint_(G.node_info[v].elist[i],v) - if layers[pairs[u]] == layers[v] + 1: - if __bhk_dfs(G, pairs[u], pairs, layers): - pairs[u] = v - pairs[v] = u - return True - layers[v] = -1 - return False - return True - \ No newline at end of file + cdef int NIL_V = G.next_node_idx + cdef int i + + if v != NIL_V: + for i in range(G.node_info[v].degree): + u = G.endpoint_(G.node_info[v].elist[i],v) + if layers[pairs[u]] == layers[v] + 1: + if __bhk_dfs(G, pairs[u], pairs, layers): + pairs[u] = v + pairs[v] = u + return True + layers[v] = -1 + return False + return True + + +cdef extern from "max_weight_matching.hpp": + cdef struct edge_t: + int src, tgt + double w + edge_t(int, int, double) + cdef void mwb_matching(vector[vector[int]] &G, vector[edge_t] &E, \ + vector[int] &U, vector[int] &V, vector[int] &M) + + +#TODO write wrapper around mwb_matching for BipartiteGraph +#def max_weight_matching_(G): +# + +def __max_weight_matching(Gi, **kwargs): + cdef: + vector[int] *U + vector[int] *V + vector[edge_t] *E + vector[vector[int]] *G + vector[int].iterator it + vector[edge_t].iterator ite + unsigned int u,v,N,t,x,y,r,s,e + double w,MAXW + edge_t temp_edge + vector[int] *M + + U = new vector[int]() + V = new vector[int]() + M = new vector[int]() + E = new vector[edge_t]() + + controls = kwargs.pop('controls', None) + doshuffle = kwargs.pop('randomize',False) + indcyc = kwargs.pop('with_cycles',False) + + if controls is None: + controls = [] + node2uv, uv2node = {}, {} + + if doshuffle: + np.random.seed(int(time.time()+os.getpid()*np.random.random()*1000)) + + #remove non reachable nodes + if controls: + vis = set() + + def dfs(r): + vis.add(r) + for v in Gi.out_neighbors_(r): + if v not in vis: + dfs(v) + + for ctl in controls: + for driver in ctl: + if driver not in vis: + dfs(driver) + else: + vis = set(Gi.nodes_()) + + + # first transform the graph itself + N = 0 + #for t in vis: + for t in Gi.nodes_iter_(): + if indcyc or t in vis: + u, v = 2*N, 2*N+1 + node2uv[t] = N + uv2node[N] = t + N += 1 + U.push_back(u) + V.push_back(v) + if doshuffle: + x, y = randint(U.size()), U.size()-1 + s = dref(U)[x] + dref(U)[x] = dref(U)[y] + dref(U)[y] = s + x, y = randint(V.size()), V.size()-1 + s = dref(V)[x] + dref(V)[x] = dref(V)[y] + dref(V)[y] = s + + + MAXW = sum(len(ctl) for ctl in controls) + Gi.size() + 100.0 + + # add the edges + for e in Gi.edges_iter_(): + x, y = Gi.endpoints_(e) + #if x in vis and y in vis: + if indcyc or (x in vis and y in vis): + u,v = 2*node2uv[x], 2*node2uv[y]+1 + temp_edge.src = u + temp_edge.tgt = v + temp_edge.w = MAXW + 1.0 + E.push_back(temp_edge) + + + # add control nodes and forward edges with weight 1 + START_CNODE = N + for ctl in controls: + cnode = N + N += 1 + #if len(ctl) > 0: + #d = ctl[0] + for d in ctl: + u, v = 2*cnode, 2*node2uv[d]+1 + temp_edge.src = u + temp_edge.tgt = v + temp_edge.w = MAXW + 1.0 + E.push_back(temp_edge) + + for x in vis: + u, v = 2*node2uv[x], 2*cnode+1 + temp_edge.src = u + temp_edge.tgt = v + temp_edge.w = MAXW + E.push_back(temp_edge) + + u, v = 2*cnode, 2*cnode+1 + U.push_back(u) + V.push_back(v) + if doshuffle: + x, y = randint(U.size()), U.size()-1 + s = dref(U)[x] + dref(U)[x] = dref(U)[y] + dref(U)[y] = s + x, y = randint(V.size()), V.size()-1 + s = dref(V)[x] + dref(V)[x] = dref(V)[y] + dref(V)[y] = s + + + # add self loops with weight 0 + for s in xrange(N): + if s >= START_CNODE or not Gi.has_edge_(uv2node[s],uv2node[s]): + u, v = 2*s, 2*s+1 + temp_edge.src = u + temp_edge.tgt = v + temp_edge.w = MAXW + E.push_back(temp_edge) + + G = new vector[vector[int]](2*N, vector[int]()) + + for e in xrange(E.size()): + u,v,w = dref(E)[e].src, dref(E)[e].tgt, dref(E)[e].w + assert u%2==0 and v%2==1 + dref(G)[u].push_back(e) + dref(G)[v].push_back(e) + if doshuffle: + x, y = randint(dref(G)[u].size()), dref(G)[u].size()-1 + s = dref(G)[u][x] + dref(G)[u][x] = dref(G)[u][y] + dref(G)[u][y] = s + x, y = randint(dref(G)[v].size()), dref(G)[v].size()-1 + s = dref(G)[v][x] + dref(G)[v][x] = dref(G)[v][y] + dref(G)[v][y] = s + + ##### + # run the weighted bipartite matching + + mwb_matching(dref(G),dref(E),dref(U),dref(V),dref(M)) + + result, roots = [], [] + num_matched = 0 + for x in xrange(M.size()): + e = dref(M)[x] + u,v,w = dref(E)[e].src, dref(E)[e].tgt, dref(E)[e].w + assert u%2==0 and v%2==1 + if w > MAXW: + num_matched += 1 + if (u/2 < START_CNODE): + result.append(Gi.edge_idx_(uv2node[u/2],uv2node[v/2])) + else: + roots.append(uv2node[v/2]) + + + + # free memory + del G, U, V, E, M + + return num_matched, result, roots diff --git a/src/zen/util/max_weight_matching.hpp b/src/zen/algorithms/max_weight_matching.hpp similarity index 54% rename from src/zen/util/max_weight_matching.hpp rename to src/zen/algorithms/max_weight_matching.hpp index 6813d1b..f13a376 100644 --- a/src/zen/util/max_weight_matching.hpp +++ b/src/zen/algorithms/max_weight_matching.hpp @@ -1,11 +1,218 @@ - using namespace std; #include #include -#include #include #include +const double INF = 1e20; + +struct fibnode_t { + unsigned int degree; + int val; + double key; + bool is_marked; + bool inheap; + fibnode_t *next, *prev, *parent,*child; + fibnode_t() {init(); } + void init() { + degree = val = 0; key = 0.0; + is_marked = inheap = false; + next = prev = this; + parent = child = NULL; + } +}; + +class FiboHeap { + fibnode_t *min_node; + int hsize; + + public: + + FiboHeap() { + hsize=0; min_node = NULL; + } + ~FiboHeap() { + // TODO free memory if required + } + + void insert(fibnode_t *node) { + min_node = merge_heaps(min_node, node); + hsize++; + } + + fibnode_t* get_min() { + return min_node; + } + + bool is_empty() { + return min_node == NULL; + } + + int size() { + return hsize; + } + + fibnode_t* delete_min() { + if(is_empty()) + return NULL; + + hsize--; + fibnode_t *min_elem = min_node; + + if(min_node->next == min_node) + min_node = NULL; + else { + min_node->prev->next = min_node->next; + min_node->next->prev = min_node->prev; + min_node = min_node->next; + } + + fibnode_t *curr; + if(min_elem->child != NULL) { + curr = min_elem->child; + + do { + curr->parent = NULL; + curr = curr->next; + } while(curr != min_elem->child); + } + + min_node = merge_heaps(min_node, min_elem->child); + + if(min_node == NULL) + return min_elem; + + curr = min_node; + vector treetable, tovisit; + + while(tovisit.empty() || curr != min_node) { + tovisit.push_back(curr); + curr = curr->next; + } + + for(vector::iterator it = tovisit.begin(); it != tovisit.end(); it++) { + curr = *it; + + while (true) { + while(curr->degree >= treetable.size()) + treetable.push_back(NULL); + + if(treetable[curr->degree] == NULL) { + treetable[curr->degree] = curr; + break; + } + + fibnode_t *other = treetable[curr->degree]; + treetable[curr->degree] = NULL; + + fibnode_t *tmin = other->key < curr->key ? other : curr; + fibnode_t *tmax = other->key < curr->key ? curr : other; + + tmax->next->prev = tmax->prev; + tmax->prev->next = tmax->next; + + tmax->next = tmax->prev = tmax; + tmin->child = merge_heaps(tmin->child, tmax); + + tmax->parent = tmin; + tmax->is_marked = false; + tmin->degree += 1; + + curr = tmin; + } + + if(curr->key <= min_node->key) + min_node = curr; + } + + return min_elem; + } + + + void decrease_key(fibnode_t *node, double new_key) { + if(new_key > node->key) + return; + + node->key = new_key; + + if(node->parent != NULL && node->key <= node->parent->key) + cut_node(node); + + if(node->key <= min_node->key) + min_node = node; + + } + void delete_node(fibnode_t *node) { + decrease_key(node, -INF); + delete_min(); + } + + void cut_node(fibnode_t *node) { + node->is_marked = false; + if(node->parent == NULL) + return; + + if(node->next != node) { + node->next->prev = node->prev; + node->prev->next = node->next; + } + + if(node->parent->child == node) { + if(node->next != node) + node->parent->child = node->next; + else + node->parent->child = NULL; + } + + (node->parent->degree)--; + + node->prev = node->next = node; + min_node = merge_heaps(min_node, node); + + if(node->parent->is_marked) + cut_node(node->parent); + else + node->parent->is_marked = true; + + node->parent = NULL; + + } + + + FiboHeap* meld_heaps(FiboHeap *one,FiboHeap *two) { + FiboHeap *result = new FiboHeap(); + + result->min_node = merge_heaps(one->min_node, two->min_node); + result->hsize = one->hsize + two->hsize; + + one->hsize = two->hsize = 0; + one->min_node = two->min_node = NULL; + + return result; + } + + + fibnode_t* merge_heaps(fibnode_t* one, fibnode_t* two) { + if(one == NULL && two == NULL) + return NULL; + if(one == NULL && two != NULL) + return two; + if(one != NULL && two == NULL) + return one; + + fibnode_t *one_next = one->next; + one->next = two->next; + one->next->prev = one; + two->next = one_next; + two->next->prev = two; + + return one->key < two->key ? one : two; + } + + +}; + + struct edge_t { int src, tgt; double w; diff --git a/src/zen/control/__init__.py b/src/zen/control/__init__.py index 78b45bd..7802e22 100644 --- a/src/zen/control/__init__.py +++ b/src/zen/control/__init__.py @@ -37,4 +37,4 @@ from profile import profile from reachability import num_min_controls from pplot import profile_plot, profile_heatmap -from cacti import build_cacti +from cacti import build_cacti, build_cacti_fixed_controls_ diff --git a/src/zen/control/cacti.py b/src/zen/control/cacti.py index 4e709d9..e40a377 100644 --- a/src/zen/control/cacti.py +++ b/src/zen/control/cacti.py @@ -5,24 +5,28 @@ Commault, Dion, Van der Woude. Characterization of generic properties of linear structured systems for efficient computations. Kybernetika, 38(5):503-520, 2002. """ import sys, zen -from zen.control.reachability import generic_rank_ -from zen.util.max_weight_bmatching import control_rooted_max_weight_matching +from zen.control.reachability import generic_rank_, control_rooted_max_weight_matching_ from collections import OrderedDict from numpy import * +from exceptions import * import itertools as it sys.setrecursionlimit(2**27) class Stem: """ - An instance of this class represents a single stem (and any associated buds) in a cacti structure. + An instance of :py:class:`Stem` represents a single stem (and any associated buds) in a cacti structure. """ def __init__(self,node_seq): + """ + Creates a new :py:class:`Stem` from a sequence of nodes given by + ``node_seq``. + """ self._node_dict = OrderedDict.fromkeys(node_seq) def __contains__(self,x): """ - Return True if x is a node in the stem (containment doesn't extend to buds). + Return ``True`` if ``x`` is a node in the stem (containment doesn't extend to buds). """ return x in self._node_dict @@ -41,15 +45,15 @@ def __iter__(self): def _clear(self): self._node_dict.clear() - def origin(self): + def origin_(self): """ - The origin or starting node of the stem + Return the origin or starting node (index) of the stem """ return next(self._node_dict.iterkeys(), None) - def terminus(self): + def terminus_(self): """ - The terminus or end node of the stem + Returns the terminus or end node (index) of the stem. """ x = None for x in self._node_dict.iterkeys(): @@ -62,7 +66,7 @@ def _extend(self, nodes): def length_with_buds(self): """ - Returns length of stem including the buds attached to it + Returns length of stem including the buds attached to it. """ l = len(self._node_dict) for bud in self._node_dict.itervalues(): @@ -72,7 +76,7 @@ def length_with_buds(self): def _add_bud(self,dnode,cycle,dnode_target): cycle._reorder_origin(dnode_target) - if dnode == self.terminus(): + if dnode == self.terminus_(): self._extend(cycle) return False else: @@ -94,9 +98,14 @@ def buds_iter(self): class Cycle: """ - Cycles can either be buds that are connected to a stem by a distinguished node or independent cycles (no associated stem). + :py:class:Cycle can either be a bud that is connected to a stem by a + distinguished node or an independent cycle (no associated stem). """ def __init__(self,node_seq): + """ + Creates a new :py:class:`Cycle` from a sequence of nodes given by + ``node_seq``. + """ self._node_dict = OrderedDict.fromkeys(node_seq) self._stem = None self._dnode = None @@ -122,7 +131,7 @@ def __iter__(self): def _clear(self): self._node_dict.clear() - def origin(self): + def origin_(self): """ A starting node of this cycle (in case of bud this is the node where distinguished edge is pointing to) @@ -149,9 +158,9 @@ def stem(self): """ return self._stem - def dist_node(self): + def dist_node_(self): """ - Return distinguished node of the stem to which this cycle is attached + Return distinguished node (index) of the stem to which this cycle is attached if it is a bud, otherwise returh None """ return self._dnode @@ -203,45 +212,76 @@ def recur(z,cur,stems,cycs): return stems, cycs -def build_cacti(G, forced_ctls=None, num_ctls=None, shuffle=False, with_cycles=False): +def build_cacti_fixed_controls(G, fixed_ctls, **kwargs): """ - This method constructs cacti for a given directed graph G. - (The nodes in forced_ctls should be indices of DiGraph nodes and not node objects) - Inputs: 1) A directed Graph G - 2) if forced_ctls is not None, it has to be a list of tuples - representing controls. Each tuple represents a control node such - that the control is to be attached to each node in the tuple. For - example, [(1,), (4,)] would mean that two controls are attached to - nodes 1 and 4 respectively. Use this option for when controls are - fixed. Weigthed maximum matching is performed. - 3) num_ctls : This option is not implemented yet and should be kept - None - 4) if shuffle is True, matching algorithm is randomized so that - each call to build cacti would result in a different cacti - structure. This option is implemented yet for case of unweighted - maximum matching - 5) if forced_ctls is given, with_cycles option dictates whether any - independent cycles that are not reachable from the forced_ctls - controls should be included in the cacti - 6) if none of the force_ctls or num_ctls is given, unweighted - matching is performed such that minimum number of controls required - is calculated and the cacti represents the corresponding maximum matching - - Returns Cacti object + This method constructs :py:class:`Cacti` for a given directed graph ``G`` + and set of controls that are fixed to some nodes in G. Maximum perfect + weighted matching is used to calculate reachable/controllable nodes given + the fixed controls. + + **Args**: see method build_cacti_fixed_controls_ + Only difference here is that fixed_ctls are node objs and not + indices. + + Returns a py:class:`Cacti` object + """ + ctls = [tuple(G.node_idx(a) for a in c) for c in fixed_ctls] + return build_cacti_fixed_controls_(G, ctls, **kwargs) + + +def build_cacti_fixed_controls_(G, fixed_ctls, **kwargs): + """ + This method constructs :py:class:`Cacti` for a given directed graph ``G`` + and set of controls that are fixed to some nodes in G. Maximum perfect + weighted matching is used to calculate reachable/controllable nodes given + the fixed controls. + + **Args**: + + *``fixed_ctls`` (``LIST_OF_TUPLES``) + *``LIST_OF_TUPLES``: Represents control nodes that are + attached to the nodes in G. e.g. [(1,),(3,)] represents two controls + that are attached to node indices 1 and 3 in G. + + **KwArgs**: + *``randomize[=False]`` (``Boolean``). Indicates whether the matching + should be randomized + *``with_cycles[=False]`` (``Boolean``). Indicates whether + independent cycles not reachable from the ``fixed_ctls`` should be + included in the matching/cacti + + **Raises**: + ``ZenException``: if fixed_ctls is None + + Returns a py:class:`Cacti` object """ + cact = Cacti(G) - if forced_ctls is not None: - cact._forced_control_case(forced_ctls, shuffle, with_cycles) - elif num_ctls is not None: - cact._free_control_case(num_ctls, shuffle) - else: - cact._min_controls_case() + if fixed_ctls is None: + raise ZenException, "fixed_ctls cannot be None." + + cact._fixed_control_case(fixed_ctls, **kwargs) cact._build_cacti_from_stemscycles() return cact +def build_cacti(G): + """ + This method constructs :py:class:`Cacti` for a given directed graph ``G`` + using maximum unweighted matching. The resulting matching forms a cacti + which also gives the minimum number and locations of controls required for the full control. + + Returns a py:class:`Cacti` object + """ + cact = Cacti(G) + + cact._min_controls_case() + + cact._build_cacti_from_stemscycles() + return cact + class Cacti: """ The Cacti class represents the cacti control structure of a given network. It can be used to find the location (and number) of inputs required to control the network as @@ -260,7 +300,7 @@ def __init__(self,G): def num_controls(self): """ - Return number of controls inputs + Return number of controls inputs nodes. """ return len(self._controls) @@ -292,17 +332,17 @@ def _build_cacti_from_stemscycles(self): self._cycles = [c for c in self._cycles if len(c) > 0] - controls = [ [stem.origin()] for stem in self._stems ] + controls = [ [stem.origin_()] for stem in self._stems ] if controls: idx = 0 for cyc in self._cycles: if not cyc.is_bud(): - controls[idx].append(cyc.origin()) + controls[idx].append(cyc.origin_()) idx = (idx+1) % len(controls) controls = [tuple(a) for a in controls] else: - controls = [ tuple(cyc.origin() for cyc in self._cycles) ] + controls = [ tuple(cyc.origin_() for cyc in self._cycles) ] self._controls = controls @@ -313,10 +353,9 @@ def _build_cacti_from_stemscycles(self): # To find minimum controls required to fully control a network using - # Unweighted maximum - # Matching + # Unweighted maximum Matching def _min_controls_case(self): - #TODO Shuffle + #TODO Randomize matching G = self._G matching = set(zen.matching.maximum_matching_(G)) matching = [G.endpoints_(eidx) for eidx in matching] @@ -325,26 +364,23 @@ def _min_controls_case(self): stems.append(Stem([u])) self._matching, self._stems, self._cycles = matching, stems, cycles + # To find cacti and hence controllable nodes given a fixed set of controls - # (forced_ctls) using maximum weighted matching - def _forced_control_case(self, forced_ctls, doshuffle=False, indcyc=False): + # (fixed_ctls) using maximum weighted matching + def _fixed_control_case(self, fixed_ctls, **kwargs): + kwargs['controls'] = fixed_ctls G = self._G - origins = [a for b in forced_ctls for a in b] - matching,roots = control_rooted_max_weight_matching(G, forced_ctls, - doshuffle=doshuffle, indcyc=indcyc)[1:3] + origins = [a for b in fixed_ctls for a in b] + matching,roots = control_rooted_max_weight_matching_(G, **kwargs)[1:3] matching = [G.endpoints_(eidx) for eidx in matching] self._stems, self._cycles = _stems_cycs_from_matching(matching, roots, origins) self._matching = matching - def _free_control_case(self, num_ctls, doshuffle=False): - raise NotImplementedError("Yet to be implemented.") - - - def controls(self): + def controls_(self): """ - Returns list of the controls that are required for maximum control of - the network. In case of unweigthed matching, it is the minimum number + Returns list of nodes (indices) where controls should be attached. + In case of unweighted matching, it is the minimum number of controls required for full control of the network. In case of weighted matching (when fixed set of controls are given), this method returns the controls that are sufficent for controlling the maximum @@ -352,9 +388,9 @@ def controls(self): """ return list(self._controls) - def controllable_nodes(self): + def controllable_nodes_(self): """ - Returns set of nodes that are controllable + Returns set of nodes indices that are controllable """ return set(self._controllable_nodes) @@ -364,9 +400,9 @@ def num_controllable_nodes(self): """ return len(self._controllable_nodes) - def matching(self): + def matching_(self): """ - Return matching as calculated by the maximum matching algorithm (unweighted or weighted as the case maybe) + Returns a matching (a list of edges; an edge is a tuple of node indices (u,v)) as calculated by the maximum matching algorithm (unweighted or weighted as the case maybe) """ return list(self._matching) diff --git a/src/zen/control/reachability.py b/src/zen/control/reachability.py index a7ac3db..6a3f950 100644 --- a/src/zen/control/reachability.py +++ b/src/zen/control/reachability.py @@ -113,3 +113,30 @@ def kalman_generic_rank(G,controls,repeats=100): return rank +#TODO move appropriate code from zen.matching here +def control_rooted_max_weight_matching_(G, **kwargs): + """ + Find maximum weighted matchinh for given Directed Graph ``G`` + + **KwArgs**: + *``controls[=None]`` (LIST_OF_TUPLES) + *``LIST_OF_TUPLES``: Reprsenting control nodes that are + attached to the nodes in G e.g. [(1,),(3,)] represents two controls + that are attached to node indices 1 and 3. + When controls is not given (or None), the result will consist + of matching such that all cycles will be found (because cycles don't + need any controls.) + *``randomize[=False]`` (``Boolean``). Indicates whether the matching + should be randomized + *``with_cycles[=False]`` (``Boolean``). Indicates whether + independent cycles not reachable from the ``controls`` should be + included in the matching + + **Returns**: + ``(n,M,R)``: where ``n`` (int) is number of matched nodes. + ``M`` is a list of edge indices representing the matching + ``R`` is a list of node indices representing origins of + the stems. + """ + return zen.matching.__max_weight_matching(G, **kwargs) + diff --git a/src/zen/tests/cacti.py b/src/zen/tests/cacti.py index dae5003..0b5d4ab 100644 --- a/src/zen/tests/cacti.py +++ b/src/zen/tests/cacti.py @@ -14,14 +14,14 @@ def test_simple(self): cacti = control.build_cacti(G) self.assertEqual(cacti.num_controls(),1) - controls = cacti.controls() + controls = cacti.controls_() self.assertEqual(controls[0][0],G.node_idx('x')) # change the graph G.rm_edge('y','z') cacti = control.build_cacti(G) self.assertEqual(cacti.num_controls(),2) - controls = set(cacti.controls()) + controls = set(cacti.controls_()) sol1 = set([(G.node_idx('x'),),(G.node_idx('z'),)]) sol2 = set([(G.node_idx('x'),),(G.node_idx('y'),)]) self.assertTrue(controls == sol1 or controls == sol2) @@ -45,7 +45,7 @@ def test_non_bud_cycles(self): cacti = control.build_cacti(G) self.assertEqual(cacti.num_controls(),1) - controls = set(cacti.controls()[0]) + controls = set(cacti.controls_()[0]) self.assertTrue(len(c1.intersection(controls)) == 1) self.assertTrue(len(c2.intersection(controls)) == 1) self.assertTrue(len(c3.intersection(controls)) == 1) @@ -89,7 +89,7 @@ def _test_with_kalman(self, G, ctls=None): ctls = set( randint(len(G)) for _ in xrange(randint(1,len(G))) ) ctls = [tuple([a]) for a in ctls] - cact = control.build_cacti(G, forced_ctls=ctls, shuffle=True) + cact = control.build_cacti_fixed_controls_(G, ctls, randomize=True) NC = cact.num_controllable_nodes() K = control.reachability.kalman_generic_rank(G, ctls) @@ -122,7 +122,7 @@ def test_stem_cycle1(self): G.add_edge_(4,3) ctls = [(0,)] - cact = control.build_cacti(G, forced_ctls=ctls, shuffle=True, with_cycles=True) + cact = control.build_cacti_fixed_controls_(G, ctls, randomize=True, with_cycles=True) self.assertEqual(cact.num_controllable_nodes(),5) def test_stem_cycle2(self): @@ -134,7 +134,7 @@ def test_stem_cycle2(self): G.add_edge_(4,3) ctls = [(0,)] - cact = control.build_cacti(G, forced_ctls=ctls, shuffle=True) + cact = control.build_cacti_fixed_controls_(G, ctls, randomize=True) self.assertEqual(cact.num_controllable_nodes(),3) def test_stem_cycle3(self): @@ -146,7 +146,7 @@ def test_stem_cycle3(self): G.add_edge_(4,3) ctls = [(3,)] - cact = control.build_cacti(G, forced_ctls=ctls, shuffle=True) + cact = control.build_cacti_fixed_controls_(G, ctls, randomize=True) self.assertEqual(cact.num_controllable_nodes(),2) def test_stem_bud1(self): @@ -159,7 +159,7 @@ def test_stem_bud1(self): G.add_edge_(4,3) ctls = [(0,)] - cact = control.build_cacti(G, forced_ctls=ctls, shuffle=True) + cact = control.build_cacti_fixed_controls_(G, ctls, randomize=True) self.assertEqual(cact.num_controllable_nodes(),5) def test_stem_bud2(self): @@ -172,7 +172,7 @@ def test_stem_bud2(self): G.add_edge_(4,3) ctls = [(1,)] - cact = control.build_cacti(G, forced_ctls=ctls, shuffle=True) + cact = control.build_cacti_fixed_controls_(G, ctls, randomize=True) self.assertEqual(cact.num_controllable_nodes(),4) def test_cacti_form1(self): @@ -185,17 +185,17 @@ def test_cacti_form1(self): G.add_edge_(4,3) ctls = [(0,)] - cact = control.build_cacti(G, forced_ctls=ctls, shuffle=True) + cact = control.build_cacti_fixed_controls_(G, ctls, randomize=True) stems, cycles = cact.stems(), cact.cycles() self.assertEqual(len(stems), 1) self.assertEqual(len(cycles), 1) - self.assertEqual(stems[0].origin(), 0) - self.assertEqual(cycles[0].origin(), 3) + self.assertEqual(stems[0].origin_(), 0) + self.assertEqual(cycles[0].origin_(), 3) self.assertEqual(len(stems[0].buds()), 1) self.assertIsNotNone(cycles[0].stem()) - self.assertEqual(cycles[0].dist_node(),1) + self.assertEqual(cycles[0].dist_node_(),1) if __name__ == '__main__': unittest.main() diff --git a/src/zen/util/fiboheap.hpp b/src/zen/util/fiboheap.hpp deleted file mode 100644 index 6b28fba..0000000 --- a/src/zen/util/fiboheap.hpp +++ /dev/null @@ -1,213 +0,0 @@ -using namespace std; -#include -#include - -const double INF = 1e20; - -struct fibnode_t { - unsigned int degree; - int val; - double key; - bool is_marked; - bool inheap; - fibnode_t *next, *prev, *parent,*child; - fibnode_t() {init(); } - void init() { - degree = val = 0; key = 0.0; - is_marked = inheap = false; - next = prev = this; - parent = child = NULL; - } -}; - -class FiboHeap { - fibnode_t *min_node; - int hsize; - - public: - - FiboHeap() { - hsize=0; min_node = NULL; - } - ~FiboHeap() { - // TODO free memory if required - } - - void insert(fibnode_t *node) { - min_node = merge_heaps(min_node, node); - hsize++; - } - - fibnode_t* get_min() { - return min_node; - } - - bool is_empty() { - return min_node == NULL; - } - - int size() { - return hsize; - } - - fibnode_t* delete_min() { - if(is_empty()) - return NULL; - - hsize--; - fibnode_t *min_elem = min_node; - - if(min_node->next == min_node) - min_node = NULL; - else { - min_node->prev->next = min_node->next; - min_node->next->prev = min_node->prev; - min_node = min_node->next; - } - - fibnode_t *curr; - if(min_elem->child != NULL) { - curr = min_elem->child; - - do { - curr->parent = NULL; - curr = curr->next; - } while(curr != min_elem->child); - } - - min_node = merge_heaps(min_node, min_elem->child); - - if(min_node == NULL) - return min_elem; - - curr = min_node; - vector treetable, tovisit; - - while(tovisit.empty() || curr != min_node) { - tovisit.push_back(curr); - curr = curr->next; - } - - for(vector::iterator it = tovisit.begin(); it != tovisit.end(); it++) { - curr = *it; - - while (true) { - while(curr->degree >= treetable.size()) - treetable.push_back(NULL); - - if(treetable[curr->degree] == NULL) { - treetable[curr->degree] = curr; - break; - } - - fibnode_t *other = treetable[curr->degree]; - treetable[curr->degree] = NULL; - - fibnode_t *tmin = other->key < curr->key ? other : curr; - fibnode_t *tmax = other->key < curr->key ? curr : other; - - tmax->next->prev = tmax->prev; - tmax->prev->next = tmax->next; - - tmax->next = tmax->prev = tmax; - tmin->child = merge_heaps(tmin->child, tmax); - - tmax->parent = tmin; - tmax->is_marked = false; - tmin->degree += 1; - - curr = tmin; - } - - if(curr->key <= min_node->key) - min_node = curr; - } - - return min_elem; - } - - - void decrease_key(fibnode_t *node, double new_key) { - if(new_key > node->key) - return; - - node->key = new_key; - - if(node->parent != NULL && node->key <= node->parent->key) - cut_node(node); - - if(node->key <= min_node->key) - min_node = node; - - } - void delete_node(fibnode_t *node) { - decrease_key(node, -INF); - delete_min(); - } - - void cut_node(fibnode_t *node) { - node->is_marked = false; - if(node->parent == NULL) - return; - - if(node->next != node) { - node->next->prev = node->prev; - node->prev->next = node->next; - } - - if(node->parent->child == node) { - if(node->next != node) - node->parent->child = node->next; - else - node->parent->child = NULL; - } - - (node->parent->degree)--; - - node->prev = node->next = node; - min_node = merge_heaps(min_node, node); - - if(node->parent->is_marked) - cut_node(node->parent); - else - node->parent->is_marked = true; - - node->parent = NULL; - - } - - - FiboHeap* meld_heaps(FiboHeap *one,FiboHeap *two) { - FiboHeap *result = new FiboHeap(); - - result->min_node = merge_heaps(one->min_node, two->min_node); - result->hsize = one->hsize + two->hsize; - - one->hsize = two->hsize = 0; - one->min_node = two->min_node = NULL; - - return result; - } - - - fibnode_t* merge_heaps(fibnode_t* one, fibnode_t* two) { - if(one == NULL && two == NULL) - return NULL; - if(one == NULL && two != NULL) - return two; - if(one != NULL && two == NULL) - return one; - - fibnode_t *one_next = one->next; - one->next = two->next; - one->next->prev = one; - two->next = one_next; - two->next->prev = two; - - return one->key < two->key ? one : two; - } - - -}; - - diff --git a/src/zen/util/max_weight_bmatching.pyx b/src/zen/util/max_weight_bmatching.pyx deleted file mode 100644 index 5366351..0000000 --- a/src/zen/util/max_weight_bmatching.pyx +++ /dev/null @@ -1,231 +0,0 @@ -#cython: boundscheck=False, wraparound=False, nonecheck=False, infer_types=True - -import zen, numpy as np -import networkx as nx -import matplotlib.pyplot as plt -from time import time -from os import getpid -from numpy.random import randint - -from libcpp.vector cimport vector -from libc.stdlib cimport malloc, free -from cython.operator cimport dereference as dref -from cython.operator cimport preincrement as incr - - -cdef extern from "max_weight_matching.hpp": - cdef struct edge_t: - int src, tgt - double w - edge_t(int, int, double) - cdef void mwb_matching(vector[vector[int]] &G, vector[edge_t] &E, \ - vector[int] &U, vector[int] &V, vector[int] &M) - - - -def control_rooted_max_weight_matching(Gi,controls, doshuffle=False, indcyc=False): - cdef: - vector[int] *U - vector[int] *V - vector[edge_t] *E - vector[vector[int]] *G - vector[int].iterator it - vector[edge_t].iterator ite - unsigned int u,v,N,t,x,y,r,s,e - double w,MAXW - edge_t temp_edge - vector[int] *M - - U = new vector[int]() - V = new vector[int]() - M = new vector[int]() - E = new vector[edge_t]() - - if controls is None: - controls = [] - node2uv, uv2node = {}, {} - - np.random.seed(int(time()+getpid()*np.random.random()*1000)) - - #remove non reachable nodes - if controls: - vis = set() - - def dfs(r): - vis.add(r) - for v in Gi.out_neighbors_(r): - if v not in vis: - dfs(v) - - for ctl in controls: - for driver in ctl: - if driver not in vis: - dfs(driver) - else: - vis = set(Gi.nodes_()) - - - # first transform the graph itself - N = 0 - #for t in vis: - for t in Gi.nodes_iter_(): - if indcyc or t in vis: - u, v = 2*N, 2*N+1 - node2uv[t] = N - uv2node[N] = t - N += 1 - U.push_back(u) - V.push_back(v) - if doshuffle: - x, y = randint(U.size()), U.size()-1 - s = dref(U)[x] - dref(U)[x] = dref(U)[y] - dref(U)[y] = s - x, y = randint(V.size()), V.size()-1 - s = dref(V)[x] - dref(V)[x] = dref(V)[y] - dref(V)[y] = s - - - MAXW = sum(len(ctl) for ctl in controls) + Gi.size() + 100.0 - - # add the edges - for e in Gi.edges_iter_(): - x, y = Gi.endpoints_(e) - #if x in vis and y in vis: - if indcyc or (x in vis and y in vis): - u,v = 2*node2uv[x], 2*node2uv[y]+1 - temp_edge.src = u - temp_edge.tgt = v - temp_edge.w = MAXW + 1.0 - E.push_back(temp_edge) - - - # add control nodes and forward edges with weight 1 - START_CNODE = N - for ctl in controls: - cnode = N - N += 1 - #if len(ctl) > 0: - #d = ctl[0] - for d in ctl: - u, v = 2*cnode, 2*node2uv[d]+1 - temp_edge.src = u - temp_edge.tgt = v - temp_edge.w = MAXW + 1.0 - E.push_back(temp_edge) - - for x in vis: - u, v = 2*node2uv[x], 2*cnode+1 - temp_edge.src = u - temp_edge.tgt = v - temp_edge.w = MAXW - E.push_back(temp_edge) - - u, v = 2*cnode, 2*cnode+1 - U.push_back(u) - V.push_back(v) - if doshuffle: - x, y = randint(U.size()), U.size()-1 - s = dref(U)[x] - dref(U)[x] = dref(U)[y] - dref(U)[y] = s - x, y = randint(V.size()), V.size()-1 - s = dref(V)[x] - dref(V)[x] = dref(V)[y] - dref(V)[y] = s - - - # add self loops with weight 0 - for s in xrange(N): - if s >= START_CNODE or not Gi.has_edge_(uv2node[s],uv2node[s]): - u, v = 2*s, 2*s+1 - temp_edge.src = u - temp_edge.tgt = v - temp_edge.w = MAXW - E.push_back(temp_edge) - - G = new vector[vector[int]](2*N, vector[int]()) - - for e in xrange(E.size()): - u,v,w = dref(E)[e].src, dref(E)[e].tgt, dref(E)[e].w - assert u%2==0 and v%2==1 - dref(G)[u].push_back(e) - dref(G)[v].push_back(e) - if doshuffle: - x, y = randint(dref(G)[u].size()), dref(G)[u].size()-1 - s = dref(G)[u][x] - dref(G)[u][x] = dref(G)[u][y] - dref(G)[u][y] = s - x, y = randint(dref(G)[v].size()), dref(G)[v].size()-1 - s = dref(G)[v][x] - dref(G)[v][x] = dref(G)[v][y] - dref(G)[v][y] = s - - ##### - # run the weighted bipartite matching - - mwb_matching(dref(G),dref(E),dref(U),dref(V),dref(M)) - - result, roots = [], [] - num_matched = 0 - for x in xrange(M.size()): - e = dref(M)[x] - u,v,w = dref(E)[e].src, dref(E)[e].tgt, dref(E)[e].w - assert u%2==0 and v%2==1 - if w > MAXW: - num_matched += 1 - if (u/2 < START_CNODE): - result.append(Gi.edge_idx_(uv2node[u/2],uv2node[v/2])) - else: - roots.append(uv2node[v/2]) - - - - # free memory - del G, U, V, E, M - - return num_matched, result, roots - - -if __name__ == '__main__': - - print 'Hello World' - - ''' - print 'MAXW = ', MAXW - print 'num_matched = ', num_matched - print 'U nodes--------' - it = U.begin() - while it != U.end(): - print dref(it) - incr(it) - - print 'V nodes--------' - it = V.begin() - while it != V.end(): - print dref(it) - incr(it) - - - ite = E.begin() - while ite != E.end(): - u,v,w = dref(ite).src, dref(ite).tgt, dref(ite).w - incr(ite) - print u/2,v/2,w - - for i in xrange(G.size()): - print i/2 ,' ----> ', - it = dref(G)[i].begin() - while it != dref(G)[i].end(): - u,v,w = dref(E)[dref(it)].src,dref(E)[dref(it)].tgt,dref(E)[dref(it)].w - incr(it) - if u == i: - print (u/2,v/2,w),' ;; ', - - print '' - - print 'result = ', [Gi.endpoints_(e) for e in result] - ''' - - From b1008db8b326ad0e1de2e12dc6bc23b42e8f22ee Mon Sep 17 00:00:00 2001 From: Deven Date: Fri, 13 Mar 2015 12:44:08 -0400 Subject: [PATCH 5/7] Updated code to fix issues discussed in the review doc --- src/zen/control/cacti.py | 86 +++++++++++++++++++++++++++------------- 1 file changed, 59 insertions(+), 27 deletions(-) diff --git a/src/zen/control/cacti.py b/src/zen/control/cacti.py index e40a377..7eaf12b 100644 --- a/src/zen/control/cacti.py +++ b/src/zen/control/cacti.py @@ -19,8 +19,7 @@ class Stem: def __init__(self,node_seq): """ - Creates a new :py:class:`Stem` from a sequence of nodes given by - ``node_seq``. + Creates a new :py:class:`Stem` from a sequence of nodes given by ``node_seq``. """ self._node_dict = OrderedDict.fromkeys(node_seq) @@ -42,9 +41,6 @@ def __iter__(self): """ return self._node_dict.iterkeys() - def _clear(self): - self._node_dict.clear() - def origin_(self): """ Return the origin or starting node (index) of the stem @@ -61,9 +57,20 @@ def terminus_(self): return x def _extend(self, nodes): + """ + Extend stem by adding nodes at the end. Needed when a bud is attached + at terminus of the stem, in which case the bud is broken down and stem + is extended. + """ for u in nodes: self._node_dict[u] = None + def _set_bud(self, x, bud): + """ + Private method to add bud to a node x in this stem. + """ + self._node_dict[x] = bud + def length_with_buds(self): """ Returns length of stem including the buds attached to it. @@ -74,15 +81,6 @@ def length_with_buds(self): l += len(bud) return l - def _add_bud(self,dnode,cycle,dnode_target): - cycle._reorder_origin(dnode_target) - if dnode == self.terminus_(): - self._extend(cycle) - return False - else: - self._node_dict[dnode] = cycle - cycle._set_stem(self, dnode) - return True def buds(self): """ @@ -128,17 +126,22 @@ def __iter__(self): """ return self._node_dict.iterkeys() - def _clear(self): - self._node_dict.clear() - def origin_(self): """ - A starting node of this cycle (in case of bud this is the node where - distinguished edge is pointing to) + A starting node of this cycle, in case of bud this is the node where + distinguished edge is pointing to. In case of non-bud cycle, returns + None """ + if not self.is_bud(): + return None + return next(self._node_dict.iterkeys(), None) def _reorder_origin(self, x): + """ + Private method to make target node of the distinguished edge as the + origin of this cycle when this cycle is a bud. + """ if x not in self._node_dict.iterkeys(): return toremove = list(it.takewhile(lambda a: a!=x, self._node_dict.iterkeys())) @@ -148,6 +151,10 @@ def _reorder_origin(self, x): self._node_dict[k] = t def _set_stem(self,stem,dnode): + """ + Private method to set stem and distinguished node of this cycle when + this cycle is a bud. + """ self._stem = stem self._dnode = dnode @@ -156,6 +163,9 @@ def stem(self): Return stem to which this cycle is attached if it is a bud, otherwise return None """ + if not self.is_bud(): + return None + return self._stem def dist_node_(self): @@ -163,8 +173,22 @@ def dist_node_(self): Return distinguished node (index) of the stem to which this cycle is attached if it is a bud, otherwise returh None """ + if not self.is_bud(): + return None + return self._dnode + def dist_edge_(self): + """ + Return distinguished edge (u,v) (node indices) where u is in the stem + and v is in this cycle/bud of the stem. + Returns None if this cycle is not a bud. + """ + if not self.is_bud(): + return None + + return (self._dnode, self.origin_()) + def is_bud(self): """ Returns True if this cycle is a bud @@ -290,6 +314,9 @@ class Cacti: """ def __init__(self,G): + """ + Private constructor. Should not be used directly. build_cacti methods should be used, which return Cacti object. + """ self._G = G self._stems = [] @@ -325,12 +352,17 @@ def _build_cacti_from_stemscycles(self): for stem in self._stems: for u in stem: for v in self._G.out_neighbors_(u): - cyc = [c for c in self._cycles if not c.is_bud() and v in c] - if cyc: - if not stem._add_bud(u,cyc[0],v): - cyc[0]._clear() - - self._cycles = [c for c in self._cycles if len(c) > 0] + for icyc,cyc in enumerate(self._cycles): + if not cyc.is_bud() and v in cyc: + cyc._reorder_origin(v) + if u == stem.terminus_(): + stem._extend(cyc) + self._cycles[icyc] = None + else: + stem._set_bud(u, cyc) + cyc._set_stem(stem, u) + + self._cycles = [c for c in self._cycles if c is not None and len(c) > 0] controls = [ [stem.origin_()] for stem in self._stems ] @@ -338,11 +370,11 @@ def _build_cacti_from_stemscycles(self): idx = 0 for cyc in self._cycles: if not cyc.is_bud(): - controls[idx].append(cyc.origin_()) + controls[idx].append(list(cyc)[0]) idx = (idx+1) % len(controls) controls = [tuple(a) for a in controls] else: - controls = [ tuple(cyc.origin_() for cyc in self._cycles) ] + controls = [ tuple(list(cyc)[0] for cyc in self._cycles) ] self._controls = controls From 95deb2d9a5de4649b4de13b1d9db4888912ba88f Mon Sep 17 00:00:00 2001 From: Deven Date: Sat, 14 Mar 2015 06:29:14 -0400 Subject: [PATCH 6/7] synced with upstream master --- src/zen/control/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/zen/control/__init__.py b/src/zen/control/__init__.py index e6f18af..e65946c 100644 --- a/src/zen/control/__init__.py +++ b/src/zen/control/__init__.py @@ -38,3 +38,5 @@ from profile import profile from reachability import num_min_controls #, kalman_generic_rank from pplot import profile_plot, profile_heatmap, profile_heatmap_weighted +from cacti import build_cacti, build_cacti_fixed_controls_ + From 0483e3258f62d1219b69c39c64a1dc38e61c7a22 Mon Sep 17 00:00:00 2001 From: Deven Date: Thu, 19 Mar 2015 16:08:04 -0400 Subject: [PATCH 7/7] added support for foo and foo_ versions of methods --- src/zen/control/cacti.py | 115 ++++++++++++++++++++++++++++++--------- 1 file changed, 89 insertions(+), 26 deletions(-) diff --git a/src/zen/control/cacti.py b/src/zen/control/cacti.py index 7eaf12b..9878f3b 100644 --- a/src/zen/control/cacti.py +++ b/src/zen/control/cacti.py @@ -17,11 +17,12 @@ class Stem: An instance of :py:class:`Stem` represents a single stem (and any associated buds) in a cacti structure. """ - def __init__(self,node_seq): + def __init__(self,Gi,node_seq): """ Creates a new :py:class:`Stem` from a sequence of nodes given by ``node_seq``. """ self._node_dict = OrderedDict.fromkeys(node_seq) + self._G = Gi def __contains__(self,x): """ @@ -41,12 +42,24 @@ def __iter__(self): """ return self._node_dict.iterkeys() + def origin(self): + """ + Return the origin or starting node (object) of the stem + """ + return self._G.node_object(self.origin_()) + def origin_(self): """ Return the origin or starting node (index) of the stem """ return next(self._node_dict.iterkeys(), None) + def terminus(self): + """ + Returns the terminus or end node (object) of the stem. + """ + return self._G.node_object(self.terminus_()) + def terminus_(self): """ Returns the terminus or end node (index) of the stem. @@ -99,12 +112,13 @@ class Cycle: :py:class:Cycle can either be a bud that is connected to a stem by a distinguished node or an independent cycle (no associated stem). """ - def __init__(self,node_seq): + def __init__(self, Gi, node_seq): """ Creates a new :py:class:`Cycle` from a sequence of nodes given by ``node_seq``. """ self._node_dict = OrderedDict.fromkeys(node_seq) + self._G = Gi self._stem = None self._dnode = None @@ -126,16 +140,23 @@ def __iter__(self): """ return self._node_dict.iterkeys() + def origin(self): + """ + A starting node (object) of this cycle, in case of bud this is the node where + distinguished edge is pointing to. In case of non-bud cycle, returns None + """ + res = self.origin_() + return None if res == -1 else self._G.node_object(res) + def origin_(self): """ - A starting node of this cycle, in case of bud this is the node where - distinguished edge is pointing to. In case of non-bud cycle, returns - None + A starting node (index) of this cycle, in case of bud this is the node where + distinguished edge is pointing to. In case of non-bud cycle, returns -1 """ if not self.is_bud(): - return None + return -1 - return next(self._node_dict.iterkeys(), None) + return next(self._node_dict.iterkeys(), -1) def _reorder_origin(self, x): """ @@ -168,26 +189,41 @@ def stem(self): return self._stem + def dist_node(self): + """ + Return distinguished node (object) of the stem to which this cycle is attached + if it is a bud, otherwise return None + """ + res = self.dist_node_() + return None if res == -1 else self._G.node_object(res) + def dist_node_(self): """ Return distinguished node (index) of the stem to which this cycle is attached - if it is a bud, otherwise returh None + if it is a bud, otherwise return -1 """ if not self.is_bud(): - return None + return -1 return self._dnode - def dist_edge_(self): + def dist_edge(self): """ - Return distinguished edge (u,v) (node indices) where u is in the stem - and v is in this cycle/bud of the stem. + Return distinguished edge (u,v) (node objects) where u is in the stem and v is in this cycle/bud of the stem. Returns None if this cycle is not a bud. """ + res = self.dist_edge_() + return None if res == -1 else self._G.endpoints(res) + + def dist_edge_(self): + """ + Return distinguished edge e (edge index) where src(e) is in the stem and tgt(e) is in this cycle/bud of the stem. + Returns -1 if this cycle is not a bud. + """ if not self.is_bud(): - return None + return -1 - return (self._dnode, self.origin_()) + return self._G.edge_idx_(self._dnode, self.origin_()) def is_bud(self): """ @@ -196,7 +232,7 @@ def is_bud(self): return self._stem != None # helper function to construct stems and cycles from a given matching -def _stems_cycs_from_matching(matching, roots=None, origins=[]): +def _stems_cycs_from_matching(Gi, matching, roots=None, origins=[]): outmap = {a:b for a,b in matching} vis = set() indegmap = {} @@ -209,14 +245,14 @@ def _stems_cycs_from_matching(matching, roots=None, origins=[]): def recur(z,cur,stems,cycs): if z in vis: - cycs.append(Cycle(cur)) + cycs.append(Cycle(Gi, cur)) return vis.add(z) cur.append(z) if z not in outmap: - stems.append(Stem(cur)) + stems.append(Stem(Gi, cur)) return recur(outmap[z],cur,stems,cycs) @@ -249,6 +285,9 @@ def build_cacti_fixed_controls(G, fixed_ctls, **kwargs): Returns a py:class:`Cacti` object """ + if fixed_ctls is None: + raise ValueError, "fixed_ctls cannot be None." + ctls = [tuple(G.node_idx(a) for a in c) for c in fixed_ctls] return build_cacti_fixed_controls_(G, ctls, **kwargs) @@ -275,15 +314,15 @@ def build_cacti_fixed_controls_(G, fixed_ctls, **kwargs): included in the matching/cacti **Raises**: - ``ZenException``: if fixed_ctls is None + ``ValueError``: if fixed_ctls is None Returns a py:class:`Cacti` object """ - cact = Cacti(G) - if fixed_ctls is None: - raise ZenException, "fixed_ctls cannot be None." + raise ValueError, "fixed_ctls cannot be None." + + cact = Cacti(G) cact._fixed_control_case(fixed_ctls, **kwargs) @@ -391,9 +430,9 @@ def _min_controls_case(self): G = self._G matching = set(zen.matching.maximum_matching_(G)) matching = [G.endpoints_(eidx) for eidx in matching] - stems, cycles = _stems_cycs_from_matching(matching) + stems, cycles = _stems_cycs_from_matching(G, matching) for u in set(G.nodes_iter_()).difference(set(x for y in matching for x in y)) : - stems.append(Stem([u])) + stems.append(Stem(G, [u])) self._matching, self._stems, self._cycles = matching, stems, cycles @@ -405,10 +444,22 @@ def _fixed_control_case(self, fixed_ctls, **kwargs): origins = [a for b in fixed_ctls for a in b] matching,roots = control_rooted_max_weight_matching_(G, **kwargs)[1:3] matching = [G.endpoints_(eidx) for eidx in matching] - self._stems, self._cycles = _stems_cycs_from_matching(matching, roots, origins) + self._stems, self._cycles = _stems_cycs_from_matching(G, matching, roots, origins) self._matching = matching + def controls(self): + """ + Returns list of nodes (objects) where controls should be attached. + In case of unweighted matching, it is the minimum number + of controls required for full control of the network. In case of + weighted matching (when fixed set of controls are given), this method + returns the controls that are sufficent for controlling the maximum + possible nodes of the network. + """ + ctls = self.controls_() + return [tuple(self._G.node_object(x) for x in t) for t in ctls] + def controls_(self): """ Returns list of nodes (indices) where controls should be attached. @@ -420,6 +471,12 @@ def controls_(self): """ return list(self._controls) + def controllable_nodes(self): + """ + Returns set of nodes objects that are controllable + """ + return set([self._G.node_object(a) for a in self._controllable_nodes]) + def controllable_nodes_(self): """ Returns set of nodes indices that are controllable @@ -432,9 +489,15 @@ def num_controllable_nodes(self): """ return len(self._controllable_nodes) + def matching(self): + """ + Returns a matching in form of a list of edges (u,v) where u and v are node objects, as calculated by the maximum matching algorithm (unweighted or weighted as the case maybe) + """ + return [self._G.endpoints(e) for e in self.matching_()] + def matching_(self): """ - Returns a matching (a list of edges; an edge is a tuple of node indices (u,v)) as calculated by the maximum matching algorithm (unweighted or weighted as the case maybe) + Returns a matching, a list of edge indices as calculated by the maximum matching algorithm (unweighted or weighted as the case maybe) """ - return list(self._matching) + return [self._G.edge_idx_(u,v) for u,v in self._matching]