Added basis for incremental matching (incremental.py). Currently only used (as a test case) by MergePR.

Re-added decompositionView.py (missing from SWN tree (since revision 8)
This commit is contained in:
kwikrick 2010-07-01 21:09:51 +00:00
parent 2683d68d44
commit 5288253718
6 changed files with 951 additions and 240 deletions

View File

@ -8,11 +8,16 @@ This distribution consists of:
- a graphical geometric constraint solving workbench (solvergui)
- a python wrapper for qhull (Delny)
Delny is included in this distribution, but not created or maintained
by the authors of GeoSolver.
INSTALLATION INSTRUCTIONS
-------------------------
geosolver package
-----------------
geosolver module and test script
---------------------------------
You'll need Python2.5 or higher (but it's probably not compatible
with Python 3)
@ -22,8 +27,9 @@ Copy directory 'geosolver' to the python library
geosolver to PYTHONPATH (e.g. if geosolver is in /home/user/python, then
add /home/user/python to the python search path)
solvergui application
---------------------
workbench
---------
You'll need to install the geosolver package, as described above.
@ -31,29 +37,40 @@ In addition, you'll need to install Delny, which is
a python wrapper to the qhull library, which is
used to compute convex hulls.
Delny is included in this distribution, but not otherwise related.
See the README in the Delny subdirectory for installation instructions.
See the README in the Delny subdirectory for installation instructions.
Note that you will need the development version of qhull (libqhull5-dev)
and the python development header files (python-dev) before installing Delny.
You will also need to have the following packages installed.
These are not included in this distribution, but are
available for most operating systems (e.g. as optional
packages in most Linux distros, downloads for Windows)
- qhull (libqhull5) -- see http://www.qhull.org/
- qhull (development version) -- see http://www.qhull.org/
- pyQt4 -- see http://qt.nokia.com/
- pyOpenGL -- see http://pyopengl.sourceforge.net/
- Numpy -- http://numpy.scipy.org/
+ LAPACK extensions
On Debian and Ubuntu systems, installing the following
packages will satify all nessecary dependencies.
- python-dev
- libqhull5-dev
- python-qt4-opengl
- python-numpy-ext
RUNNING
-------
To run geosolver tests:
>cd solvertest
>cd test
>python test.py
To run solvergui:
>cd solvergui
>cd workbench
>python main.py

View File

@ -2,11 +2,6 @@
This module provides basic functionality for
ClusterSolver2D and ClusterSolver3D.
The solver finds a generic solution
for problems formulated by Clusters. The generic solution
is a directed acyclic graph of Clusters and Methods. Particilar problems
and solutions are represented by a Configuration for each cluster.
"""
from graph import Graph
@ -18,213 +13,7 @@ from cluster import *
from configuration import Configuration
from gmatch import gmatch
from method import OrMethod
# -----------------------------------------------------------
# ----------Method classes used by ClusterSolver -------------
# -----------------------------------------------------------
class ClusterMethod(MultiMethod):
"""A method that determines a single output cluster from a set of input clusters.
Subclasses should provide a static class variable 'patterngraph', which is a graph,
describing the pattern that is matched by the solver and used to instantiate the Method.
(see function pattern2graph)
Alternatively, subclasses may implement the static class method 'handcoded_match', which should
return a list of matches (given a new cluster and all connected clusters).
Subclasses should implement function _multi_execute such that the output cluster satisfies all
the constraints in the input clusters.
instance vars:
overconstrained - True iff the merge locally overconstrained
consistent - True iff the merge is generically consistent
(these variables are automatically set by the solver for debugging purposes )
"""
def __init__(self):
self.overconstrained = None
self.consistent = None
MultiMethod.__init__(self)
def prototype_constraints(self):
"""Return a list of SelectionConstraint"""
return []
def status_str(self):
s = ""
if self.consistent == True:
s += "consistent "
elif self.consistent == False:
s += "inconsistent "
if self.overconstrained == True:
s += "overconstrained"
elif self.overconstrained == False:
s += "well-constrained"
return s
def input_clusters(self):
return filter(lambda var: isinstance(var, Cluster), self.inputs())
class PrototypeMethod(MultiMethod):
"""A PrototypeMethod selects those solutions of a cluster for which
the protoype and the solution satisfy the same constraints.
"""
def __init__(self, incluster, selclusters, outcluster, constraints, enabled):
self._inputs = [incluster]+selclusters+[enabled]
self._outputs = [outcluster]
self._constraints = constraints
MultiMethod.__init__(self)
def multi_execute(self, inmap):
diag_print("PrototypeMethod.multi_execute called","clmethods")
incluster = self._inputs[0]
selclusters = []
for i in range(1,len(self._inputs)-1):
selclusters.append(self._inputs[i])
enabledvar = self._inputs[-1]
diag_print("input cluster"+str(incluster), "PrototypeMethod.multi_execute")
diag_print("selection clusters"+str(selclusters), "PrototypeMethod.multi_execute")
diag_print("enabledvar"+str(enabledvar), "PrototypeMethod.multi_execute")
# get confs/values
enabledval = inmap[enabledvar]
inconf = inmap[incluster]
selmap = {}
for cluster in selclusters:
conf = inmap[cluster]
assert len(conf.vars()) == 1
var = conf.vars()[0]
selmap[var] = conf.map[var]
if len(selmap) == 0:
selconf = {}
else:
selconf = Configuration(selmap)
diag_print("input configuration = "+str(inconf), "PrototypeMethod.multi_execute")
diag_print("selection configurations = "+str(selconf), "PrototypeMethod.multi_execute")
diag_print("enabled value = "+str(enabledval), "PrototypeMethod.multi_execute")
# do test
if enabledval == True:
sat = True
for con in self._constraints:
satcon = con.satisfied(inconf.map) == con.satisfied(selconf.map)
diag_print("constraint = "+str(con), "PrototypeMethod.multi_execute")
diag_print("constraint satisfied? "+str(satcon), "PrototypeMethod.multi_execute")
sat = sat and satcon
diag_print("prototype satisfied? "+str(sat), "PrototypeMethod.multi_execute")
if sat:
return [inconf]
else:
return []
else:
return [inconf]
def __str__(self):
return "PrototypeMethod#%d(%s->%s)"%(id(self),str(self._inputs[0]), str(self._outputs[0]))
class SelectionMethod(MultiMethod):
"""A SelectionMethod selects those solutions of a cluster for which
all selectionconstraints are satisfied.
"""
def __init__(self, incluster, outcluster):
self._inputs = [incluster]
self._outputs = [outcluster]
self._constraints = []
MultiMethod.__init__(self)
def add_constraint(self, con):
self._constraints.append(con)
def rem_constraint(self, con):
self._constraints.remove(con)
def iter_constraints(self):
return iter(self._constraints)
def multi_execute(self, inmap):
diag_print("SelectionMethod.multi_execute called","SelectionMethod.multi_execute")
incluster = self._inputs[0]
inconf = inmap[incluster]
diag_print("input configuration = "+str(inconf), "SelectionMethod.multi_execute")
sat = True
for con in self._constraints:
diag_print("constraint = "+str(con), "SelectionMethod.multi_execute")
satcon = con.satisfied(inconf.map)
diag_print("satisfied = "+str(satcon), "SelectionMethod.multi_execute")
sat = sat and satcon
diag_print("all satisfied = "+str(sat), "SelectionMethod.multi_execute")
if sat:
return [inconf]
else:
return []
def __str__(self):
return "SelectionMethod#%d(%s & %s ->%s)"%(id(self),str(self._inputs[0]), str(self._constraints), str(self._outputs[0]))
# --------------------------------------
# helper functions for pattern matching
# --------------------------------------
def pattern2graph(pattern):
"""Convert a pattern to a pattern graph, used before graph based matching.
The pattern is a list of tuples (pattype, patname, patvars), where
pattype is one of "point", "distance", "rigid", "balloon" or "hedgehog"
patname is a string, which is the name of a variable which will be associated with a cluster
patvars is a list of strings, where each string is a variable to be associated with a point variable
If pattype is point or distance, then the length of the cluster is fixed to 1 or 2 points.
Otherwise, clusters with any number of variables are matched.
If pattype is hedgehog, then the first variable in patvars is the center variable.
"""
pgraph = Graph()
pgraph.add_vertex("point")
pgraph.add_vertex("distance")
pgraph.add_vertex("rigid")
pgraph.add_vertex("balloon")
pgraph.add_vertex("hedgehog")
for clpattern in pattern:
(pattype, patname, patvars) = clpattern
pgraph.add_edge(pattype, patname)
for var in patvars:
pgraph.add_edge(patname, var)
if pattype == "hedgehog":
pgraph.add_edge("cvar"+"#"+patname, patvars[0])
pgraph.add_edge(patname, "cvar"+"#"+patname)
#diag_print("pattern graph:"+str(pgraph),"match");
return pgraph
def reference2graph(nlet):
"""Convert a set of (supposedly connected) clusters to a reference graph, used before graph-based matching."""
rgraph = Graph()
rgraph.add_vertex("point")
rgraph.add_vertex("distance")
rgraph.add_vertex("rigid")
rgraph.add_vertex("balloon")
rgraph.add_vertex("hedgehog")
for cluster in nlet:
for var in cluster.vars:
rgraph.add_edge(cluster, var)
if isinstance(cluster, Rigid):
rgraph.add_edge("rigid", cluster)
if len(cluster.vars) == 1:
rgraph.add_edge("point", cluster)
elif len(cluster.vars) == 2:
rgraph.add_edge("distance", cluster)
if isinstance(cluster, Balloon):
rgraph.add_edge("balloon", cluster)
if isinstance(cluster, Hedgehog):
rgraph.add_edge("hedgehog", cluster)
rgraph.add_edge("cvar"+"#"+str(id(cluster)), cluster.cvar)
rgraph.add_edge(cluster, "cvar"+"#"+str(id(cluster)))
#diag_print("reference graph:"+str(rgraph),"match");
return rgraph
def rootname(cluster):
"""returns the name of the root variable associated with the name of a cluster variable"""
return "root#"+str(id(cluster))
from incremental import MutableSet,Union
# --------------------------------------------------
# ---------- ClusterSolver main class --------------
@ -254,13 +43,14 @@ class ClusterSolver(Notifier):
# init superclasses
Notifier.__init__(self)
# store arguments
self.methodclasses = methodclasses
self.pattern_methods = filter(lambda m: hasattr(m,"patterngraph"),self.methodclasses)
self.handcoded_methods = filter(lambda m: hasattr(m,"handcoded_match"),self.methodclasses)
self._methodclasses = methodclasses
self._pattern_methods = filter(lambda m: hasattr(m,"patterngraph"),self._methodclasses)
self._handcoded_methods = filter(lambda m: hasattr(m,"handcoded_match"),self._methodclasses)
self._incremental_methods = filter(lambda m: hasattr(m,"incremental_matcher"),self._methodclasses)
# init instance vars
self._graph = Graph()
#self._graph.add_vertex("_root")
self._graph.add_vertex("_toplevel")
# self._graph.add_vertex("_toplevel")
self._graph.add_vertex("_variables")
self._graph.add_vertex("_clusters")
self._new = []
@ -273,6 +63,12 @@ class ClusterSolver(Notifier):
self._selection_method = {}
# store root cluster (will be assigned when first cluster added)
self._rootcluster = None
# an incrementally updated toplevel set
self._toplevel = MutableSet()
# incrementally updated set of applicable methods
self._incremental_matchers = map(lambda method: method.incremental_matcher(self), self._incremental_methods)
print "incremental matchers:",self._incremental_matchers
self._applicable_methods = Union(*self._incremental_matchers)
# ------- methods for setting up constraint problems ------------
@ -349,12 +145,14 @@ class ClusterSolver(Notifier):
return self._graph.outgoing_vertices("_methods")
def top_level(self):
"""get top-level clusters"""
return self._graph.outgoing_vertices("_toplevel")
"""return IncrementalSet of top-level clusters"""
return self._toplevel
# return self._graph.outgoing_vertices("_toplevel")
def is_top_level(self, object):
"""Returns True iff given cluster is a top-level cluster"""
return self._graph.has_edge("_toplevel",object)
#return self._graph.has_edge("_toplevel",object)
return object in self._toplevel
def find_dependend(self, object):
"""Return a list of objects that depend on given object directly."""
@ -395,14 +193,16 @@ class ClusterSolver(Notifier):
l = self._graph.ingoing_vertices(needer)
return filter(lambda x: self._graph.get(x,needer) == "needed_by", l)
def _add_top_level(self, object):
self._graph.add_edge("_toplevel",object)
self._new.append(object)
def _add_top_level(self, cluster):
# self._graph.add_edge("_toplevel",cluster)
self._new.append(cluster)
self._toplevel.add(cluster)
def _rem_top_level(self, object):
self._graph.rem_edge("_toplevel",object)
# self._graph.rem_edge("_toplevel",object)
if object in self._new:
self._new.remove(object)
self._toplevel.remove(object)
def _find_descendend(self,v):
"""find all descendend objects of v (i.e.. directly or indirectly dependend)"""
@ -433,7 +233,7 @@ class ClusterSolver(Notifier):
diag_print("_add_cluster "+str(newcluster),"clsolver")
# check if not already exists
if self._graph.has_vertex(newcluster):
raise StandardError, "cluster %s already in clsolver"%(str(cluster))
raise StandardError, "cluster %s already in clsolver"%(str(newcluster))
# update graph
self._add_to_group("_clusters", newcluster)
for var in newcluster.vars:
@ -551,6 +351,14 @@ class ClusterSolver(Notifier):
# --------------
def _process_new(self):
# try incremental matchers
while len(self._applicable_methods) > 0:
method = iter(self._applicable_methods).next()
print "applicable methods:", map(str, self._applicable_methods)
print "found applicable method:", method
self._add_method_complete(method)
# try old style matching
while len(self._new) > 0:
newobject = self._new.pop()
diag_print("search from "+str(newobject), "clsolver")
@ -573,7 +381,7 @@ class ClusterSolver(Notifier):
diag_print("search: connected clusters="+str(connected),"clsolver3D")
# first try handcoded matching
for methodclass in self.handcoded_methods:
for methodclass in self._handcoded_methods:
diag_print("trying incremental matching for "+str(methodclass), "clsolver3D")
matches = methodclass.handcoded_match(self, newcluster, connected)
if self._try_matches(methodclass, matches):
@ -590,7 +398,7 @@ class ClusterSolver(Notifier):
and returns True iff successfull
"""
refgraph = reference2graph(nlet)
for methodclass in self.pattern_methods:
for methodclass in self._pattern_methods:
diag_print("trying generic pattern matching for "+str(methodclass), "clsolver3D")
matches = gmatch(methodclass.patterngraph, refgraph)
if self._try_matches(methodclass,matches):
@ -726,6 +534,8 @@ class ClusterSolver(Notifier):
# remove from _new list
if item in self._new:
self._new.remove(item)
# remove from incremental top_level
self._toplevel.remove(item)
# remove from methodgraph
if isinstance(item, Method):
# note: method may have been removed because variable removed
@ -745,8 +555,6 @@ class ClusterSolver(Notifier):
for cluster in torestore:
if self._graph.has_vertex(cluster):
self._add_top_level(cluster)
# re-solve
self._process_new()
##def _contains_root(self, input_cluster):
@ -911,3 +719,219 @@ class ClusterSolver(Notifier):
# class ClusterSolver
# -----------------------------------------------------------
# ----------Method classes used by ClusterSolver -------------
# -----------------------------------------------------------
class ClusterMethod(MultiMethod):
"""A method that determines a single output cluster from a set of input clusters.
Subclasses should provide a static class variable 'patterngraph', which is a graph,
describing the pattern that is matched by the solver and used to instantiate the Method.
(see function pattern2graph)
Alternatively, subclasses may implement the static class method 'handcoded_match', which should
return a list of matches (given a new cluster and all connected clusters).
Subclasses should implement function _multi_execute such that the output cluster satisfies all
the constraints in the input clusters.
instance vars:
overconstrained - True iff the merge locally overconstrained
consistent - True iff the merge is generically consistent
(these variables are automatically set by the solver for debugging purposes )
"""
def __init__(self):
self.overconstrained = None
self.consistent = None
MultiMethod.__init__(self)
def prototype_constraints(self):
"""Return a list of SelectionConstraint"""
return []
def status_str(self):
s = ""
if self.consistent == True:
s += "consistent "
elif self.consistent == False:
s += "inconsistent "
if self.overconstrained == True:
s += "overconstrained"
elif self.overconstrained == False:
s += "well-constrained"
return s
def input_clusters(self):
return filter(lambda var: isinstance(var, Cluster), self.inputs())
def __eq__(self, other):
if self.__class__ == other.__class__:
return self._inputs == other._inputs
else:
return False
def __hash__(self):
return hash(tuple(self._inputs)+tuple([self.__class__]))
class PrototypeMethod(MultiMethod):
"""A PrototypeMethod selects those solutions of a cluster for which
the protoype and the solution satisfy the same constraints.
"""
def __init__(self, incluster, selclusters, outcluster, constraints, enabled):
self._inputs = [incluster]+selclusters+[enabled]
self._outputs = [outcluster]
self._constraints = constraints
MultiMethod.__init__(self)
def multi_execute(self, inmap):
diag_print("PrototypeMethod.multi_execute called","clmethods")
incluster = self._inputs[0]
selclusters = []
for i in range(1,len(self._inputs)-1):
selclusters.append(self._inputs[i])
enabledvar = self._inputs[-1]
diag_print("input cluster"+str(incluster), "PrototypeMethod.multi_execute")
diag_print("selection clusters"+str(selclusters), "PrototypeMethod.multi_execute")
diag_print("enabledvar"+str(enabledvar), "PrototypeMethod.multi_execute")
# get confs/values
enabledval = inmap[enabledvar]
inconf = inmap[incluster]
selmap = {}
for cluster in selclusters:
conf = inmap[cluster]
assert len(conf.vars()) == 1
var = conf.vars()[0]
selmap[var] = conf.map[var]
if len(selmap) == 0:
selconf = {}
else:
selconf = Configuration(selmap)
diag_print("input configuration = "+str(inconf), "PrototypeMethod.multi_execute")
diag_print("selection configurations = "+str(selconf), "PrototypeMethod.multi_execute")
diag_print("enabled value = "+str(enabledval), "PrototypeMethod.multi_execute")
# do test
if enabledval == True:
sat = True
for con in self._constraints:
satcon = con.satisfied(inconf.map) == con.satisfied(selconf.map)
diag_print("constraint = "+str(con), "PrototypeMethod.multi_execute")
diag_print("constraint satisfied? "+str(satcon), "PrototypeMethod.multi_execute")
sat = sat and satcon
diag_print("prototype satisfied? "+str(sat), "PrototypeMethod.multi_execute")
if sat:
return [inconf]
else:
return []
else:
return [inconf]
def __str__(self):
return "PrototypeMethod#%d(%s->%s)"%(id(self),str(self._inputs[0]), str(self._outputs[0]))
class SelectionMethod(MultiMethod):
"""A SelectionMethod selects those solutions of a cluster for which
all selectionconstraints are satisfied.
"""
def __init__(self, incluster, outcluster):
self._inputs = [incluster]
self._outputs = [outcluster]
self._constraints = []
MultiMethod.__init__(self)
def add_constraint(self, con):
self._constraints.append(con)
def rem_constraint(self, con):
self._constraints.remove(con)
def iter_constraints(self):
return iter(self._constraints)
def multi_execute(self, inmap):
diag_print("SelectionMethod.multi_execute called","SelectionMethod.multi_execute")
incluster = self._inputs[0]
inconf = inmap[incluster]
diag_print("input configuration = "+str(inconf), "SelectionMethod.multi_execute")
sat = True
for con in self._constraints:
diag_print("constraint = "+str(con), "SelectionMethod.multi_execute")
satcon = con.satisfied(inconf.map)
diag_print("satisfied = "+str(satcon), "SelectionMethod.multi_execute")
sat = sat and satcon
diag_print("all satisfied = "+str(sat), "SelectionMethod.multi_execute")
if sat:
return [inconf]
else:
return []
def __str__(self):
return "SelectionMethod#%d(%s & %s ->%s)"%(id(self),str(self._inputs[0]), str(self._constraints), str(self._outputs[0]))
# --------------------------------------
# helper functions for pattern matching
# --------------------------------------
def pattern2graph(pattern):
"""Convert a pattern to a pattern graph, used before graph based matching.
The pattern is a list of tuples (pattype, patname, patvars), where
pattype is one of "point", "distance", "rigid", "balloon" or "hedgehog"
patname is a string, which is the name of a variable which will be associated with a cluster
patvars is a list of strings, where each string is a variable to be associated with a point variable
If pattype is point or distance, then the length of the cluster is fixed to 1 or 2 points.
Otherwise, clusters with any number of variables are matched.
If pattype is hedgehog, then the first variable in patvars is the center variable.
"""
pgraph = Graph()
pgraph.add_vertex("point")
pgraph.add_vertex("distance")
pgraph.add_vertex("rigid")
pgraph.add_vertex("balloon")
pgraph.add_vertex("hedgehog")
for clpattern in pattern:
(pattype, patname, patvars) = clpattern
pgraph.add_edge(pattype, patname)
for var in patvars:
pgraph.add_edge(patname, var)
if pattype == "hedgehog":
pgraph.add_edge("cvar"+"#"+patname, patvars[0])
pgraph.add_edge(patname, "cvar"+"#"+patname)
#diag_print("pattern graph:"+str(pgraph),"match");
return pgraph
def reference2graph(nlet):
"""Convert a set of (supposedly connected) clusters to a reference graph, used before graph-based matching."""
rgraph = Graph()
rgraph.add_vertex("point")
rgraph.add_vertex("distance")
rgraph.add_vertex("rigid")
rgraph.add_vertex("balloon")
rgraph.add_vertex("hedgehog")
for cluster in nlet:
for var in cluster.vars:
rgraph.add_edge(cluster, var)
if isinstance(cluster, Rigid):
rgraph.add_edge("rigid", cluster)
if len(cluster.vars) == 1:
rgraph.add_edge("point", cluster)
elif len(cluster.vars) == 2:
rgraph.add_edge("distance", cluster)
if isinstance(cluster, Balloon):
rgraph.add_edge("balloon", cluster)
if isinstance(cluster, Hedgehog):
rgraph.add_edge("hedgehog", cluster)
rgraph.add_edge("cvar"+"#"+str(id(cluster)), cluster.cvar)
rgraph.add_edge(cluster, "cvar"+"#"+str(id(cluster)))
#diag_print("reference graph:"+str(rgraph),"match");
return rgraph
def rootname(cluster):
"""returns the name of the root variable associated with the name of a cluster variable"""
return "root#"+str(id(cluster))

View File

@ -7,6 +7,8 @@ from intersections import *
from configuration import Configuration
from cluster import *
from map import Map
import incremental
class ClusterSolver3D(ClusterSolver):
"""A generic 3D geometric constraint solver. See ClusterSolver for details."""
@ -223,6 +225,14 @@ class MergePR(ClusterMethod):
self._outputs = [out]
ClusterMethod.__init__(self)
def _incremental_matcher(solver):
toplevel = solver.top_level()
rigids = incremental.Filter(lambda c: isinstance(c, Rigid), toplevel)
points = incremental.Filter(lambda c: len(c.vars)==1, rigids)
connectedpairs = ConnectedPairs(solver, points, rigids)
matcher = incremental.Map(lambda (p,r): MergePR({"$p":p, "$r":r}), connectedpairs)
return matcher
incremental_matcher = staticmethod(_incremental_matcher)
def _handcoded_match(problem, newcluster, connected):
connected = set()
@ -767,4 +777,80 @@ def solve_3p3d(v1,v2,v3,v4,p1,p2,p3,d14,d24,d34):
solutions.append(solution)
return solutions
# --------- incremental sets ----------
class Connected(incremental.IncrementalSet):
def __init__(self, solver, incrset):
"""Creates an incremental set of all pairs of connected clusters in incrset, according to solver"""
self._solver = solver
self._incrset = incrset
incremental.IncrementalSet.__init__(self, [incrset])
return
def _receive_add(self,source, object):
connected = set()
for var in object.vars:
dependend = self._solver.find_dependend(var)
dependend = filter(lambda x: x in self._incrset, dependend)
connected.update(dependend)
connected.remove(object)
for object2 in connected:
self._add(frozenset((object, object2)))
def _receive_remove(self,source, object):
for frozen in list(self):
if object in frozen:
self._remove(frozen)
def __eq__(self, other):
if isinstance(other, Connected):
return self._solver == other._solver and self._incrset == other._incrset
else:
return False
def __hash__(self):
return hash((self._solver, self._incrset))
class ConnectedPairs(incremental.IncrementalSet):
def __init__(self, solver, incrset1, incrset2):
"""Creates an incremental set of all pairs (c1, c2) from incrset1 and incrset2 respectively, that are connected according to solver"""
self._solver = solver
self._incrset1 = incrset1
self._incrset2 = incrset2
incremental.IncrementalSet.__init__(self, [incrset1, incrset2])
return
def _receive_add(self,source, object):
connected = set()
for var in object.vars:
dependend = self._solver.find_dependend(var)
if source == self._incrset1:
dependend = filter(lambda x: x in self._incrset2, dependend)
elif source == self._incrset2:
dependend = filter(lambda x: x in self._incrset1, dependend)
connected.update(dependend)
if object in connected:
connected.remove(object)
for object2 in connected:
if source == self._incrset1:
self._add((object, object2))
elif source == self._incrset2:
self._add((object2, object))
def _receive_remove(self,source, object):
for (c1,c2) in list(self):
if c1==object or c2==object:
self._remove((c1,c2))
def __eq__(self, other):
if isinstance(other, ConnectedPairs):
return self._solver == other._solver and self._incrset1 == other._incrset1 and self._incrset2 == other._incrset2
else:
return False
def __hash__(self):
return hash((self._solver, self._incrset1, self._incrset2))

440
geosolver/incremental.py Executable file
View File

@ -0,0 +1,440 @@
"""This module provides various incrementally updated set-like containers."""
import notify
import weakref
class IncrementalSet(notify.Notifier, notify.Listener):
"""This is the base class for various incrementally updated set-like containers.
The represented set can change when it is notified of changes in other IncementalSets,
and it can notify other IncrementalSets when objects are added or removed.
Its contents can be iterated using 'iter', and it supports the 'in' and 'len queries.
All objects in the set are unique, and objects must implement __hash__ and __eq__.
Note that this class does not provide public methods for adding and removing objects.
See MutableSet for a user-modifiable subclass.
Subclasses should implement the _receive_add and _receive_remove methods, which are
called when an incset is notified by another incset. If your subclass uses the
Listerner/Notifier scheme for notification from other objects, your receive_notify
function should call IncrementalSet.receive_notify.
IncrementalSets are unqiue. Multiple instances of equivalent incsets refer to the
first of its kind. So update operations are executed only once, even if multiple
instances of equivalent incset exist. IncrementalSets must also define the __eq__ and
__hash__ methods.
"""
# keep track of all IncrementalSets, so we can re-use identical IncrementalSets
# _all[x] maps to a tuple (wr, cnt)
# where wr is a weak refererence to the original, first instantiated nest equal to x
# and cnt is the total number of instantiations (past and present) of x (never decreases)
_all = weakref.WeakKeyDictionary()
def __init__(self, inputs=[]):
"""Instantiate a new incset, that listens for changes from given incsets.
If an equivalent incset allready exists, this object stores only a
reference, and all IncrementalSet methods will use this reference.
"""
if self in self._all:
# set self._ref and update self._all
(self._ref, count) = self._all[self]
count += 1
self._all[self] = (self._ref, count)
else:
# set self._ref and update self._all
self._ref = None
count = 1
self._all[self] = (weakref.ref(self), count)
# initialise instance variables
self._objects = set()
self._inputs = set(inputs)
notify.Notifier.__init__(self)
notify.Listener.__init__(self)
# add incsets
for incset in self._inputs:
self._add_input(incset)
# update from initial state of all incsets
for incset in self._inputs:
for obj in incset:
self._receive_add(incset, obj)
def _add_input(self, incset):
"""Add an incset, listen for notifications from it"""
if self._ref:
# add object to ref if given
self._ref()._add_input(incset)
else:
self.add_notifier(incset)
def receive_notify(self, source, message):
if source in self.notifiers:
(action, object) = message
if action == "add":
self._receive_add(source, object)
elif action == "remove":
self._receive_remove(source, object)
else:
print "Warning:",self,"reveiced unknown message"
else:
print "Warning:", self, "reveiced notification from unknown source"
def _receive_add(self,source, object):
raise Exception("This method is abstract. Subclasses should implement it")
def _receive_remove(self,source, object):
raise Exception("This method is abstract. Subclasses should implement it")
def _add(self, object):
"""Add an object and send notification to listeners"""
if self._ref:
# add object to ref if given
self._ref()._add(object)
else:
# else add object to self
if object not in self._objects:
self._objects.add(object)
self.send_notify(("add", object))
def _remove(self, object):
"""Remove an object and send notification to listeners"""
if self._ref:
# remove object from ref, if given
self._ref()._remove(object)
else:
# else add object to self
if object not in self._objects:
self._objects.add(object)
self.send_notify(("add", object))
if object in self._objects:
self._objects.remove(object)
self.send_notify(("remove", object))
def __iter__(self):
"""Returns an iterator for the objects contained here.
Note that the iterator will become invalid when objects are added or removed, which
may be a side-effect of changing other IncrementalSets.
"""
if self._ref:
# remove object from ref, if given
return iter(self._ref())
else:
return iter(self._objects)
def __contains__(self, obj):
if self._ref:
# remove object from ref, if given
return obj in self._ref()
else:
return obj in self._objects
def __len__(self):
if self._ref:
# remove object from ref, if given
return len(self._ref())
else:
return len(self._objects)
class MutableSet(IncrementalSet):
"""A set-like container that can notify other objects of changes, when objects are added or removed"""
def __init__(self, seq=[]):
IncrementalSet.__init__(self)
for obj in seq:
self._add(obj)
def add(self, object):
self._add(object)
def remove(self, object):
self._remove(object)
def __repr__(self):
return "MutableSet#%s"%str(id(self))
class Union(IncrementalSet):
def __init__(self, *args):
IncrementalSet.__init__(self, args)
def _receive_add(self, source, obj):
self._add(obj)
def _receive_remove(self, source, obj):
count = 0
for incset in self._inputs:
if obj in incset:
count += 1
if count == 0:
self._remove(obj)
class Intersection(IncrementalSet):
def __init__(self, *args):
IncrementalSet.__init__(self, args)
def _receive_add(self, source, obj):
for incset in self._inputs:
if obj not in incset:
return
self._add(obj)
def _receive_remove(self, source, obj):
self._remove(obj)
class Difference(IncrementalSet):
def __init__(self, pos, neg):
self._pos = pos
self._neg = neg
IncrementalSet.__init__(self, [pos, neg])
def _receive_add(self, source, obj):
if source == self._pos and obj not in self._neg:
self._add(obj)
elif source == self._neg: # and obj in self:
self._remove(obj)
def _receive_remove(self, source, obj):
if source == self._pos:
self._remove(obj)
elif source == self._neg and obj in self._pos:
self._add(obj)
class Filter(IncrementalSet):
"""A set-like container that incrementally filters its input (MutableSet or other IncrementalSet)"""
def __init__(self, testfunction, incrset):
self._incrset = incrset
self._testfunction = testfunction
IncrementalSet.__init__(self,[incrset])
def _receive_add(self, source, object):
if self._testfunction(object):
self._add(object)
def _receive_remove(self, source, object):
if self._testfunction(object):
self._remove(object)
def __eq__(self, other):
if isinstance(other, Filter):
return (self._incrset, self._testfunction)==(other._incrset,other._testfunction)
else:
return False
def __hash__(self):
return hash((self._incrset, self._testfunction))
def __repr__(self):
return "Filter(%s,%s)"%(str(self._incrset),str(self._testfunction))
class Map(IncrementalSet):
"""A set-like container that incrementally maps its input through a function.
Note that the mapping function must always return the same output for the same input."""
def __init__(self, mapfunction, incrset):
self._incrset = incrset
self._mapfunction = mapfunction
IncrementalSet.__init__(self, [incrset])
def _receive_add(self, source, object):
self._add(self._mapfunction(object))
def _receive_remove(self, source, object):
self._remove(self._mapfunction(object))
def __eq__(self, other):
if isinstance(other, Map):
return (self._incrset, self._mapfunction)==(other._incrset,other._mapfunction)
else:
return False
def __hash__(self):
return hash((self._incrset, self._mapfunction))
def __repr__(self):
return "Map(%s,%s)"%(str(self._incrset),str(self._mapfunction))
class Permutations(IncrementalSet):
"""A set-like container that incrementally determines all permutations of its inputs (IncrementalSets)"""
def __init__(self, partmatchers):
self._partmatchers = list(partmatchers)
IncrementalSet.__init__(self, partmatchers)
def _receive_add(self, source, obj):
index = self._partmatchers.index(source)
if index == -1:
raise Exception("Unknown source")
parts = []
for i in range(len(self._partmatchers)):
if i == index:
parts.append([obj])
else:
parts.append(iter(self._partmatchers[i]))
newmatches = permutations(parts)
for match in newmatches:
self._add(match)
def _receive_remove(self, source, obj):
index = self._partmatchers.index(source)
if index == -1:
raise Exception("Unknown source")
toremove = []
for match in iter(self):
if match[index] == obj:
toremove.append(match)
for match in toremove:
self._remove(match)
def __eq__(self, other):
if isinstance(other, Permutations):
return self._partmatchers == other.partmathers
else:
return False
def __hash__(self):
return hash(tuple(self._partmatchers))
def __repr__(self):
return "Permutations(%s)"%str(self._partmatchers)
def permutations(listofiters):
"""All permuations of the objects in each iter, returned as a list of tuples"""
if len(listofiters)==0:
return []
elif len(listofiters)==1:
return map(lambda element: tuple([element]), listofiters[0])
else:
l = list(listofiters[0])
p = permutations(listofiters[1:])
z = []
for e in l:
for y in p:
z.append(tuple([e])+y)
return z
class Combinations(IncrementalSet):
"""A set-like container that incrementally determines all combinations of its inputs (IncrementalSets)"""
def __init__(self, partmatchers):
self._partmatchers = list(partmatchers)
IncrementalSet.__init__(self, partmatchers)
def _receive_add(self, source, obj):
index = self._partmatchers.index(source)
if index == -1:
raise Exception("Unknown source")
parts = []
for i in range(len(self._partmatchers)):
if i == index:
parts.append([obj])
else:
parts.append(iter(self._partmatchers[i]))
newmatches = combinations(parts)
for match in newmatches:
self._add(match)
def _receive_remove(self, source, obj):
index = self._partmatchers.index(source)
if index == -1:
raise Exception("Unknown source")
toremove = []
for match in iter(self):
if obj in match:
toremove.append(match)
for match in toremove:
self._remove(match)
def __eq__(self, other):
if isinstance(other, Combinations):
return self._partmatchers == other.partmathers
else:
return False
def __hash__(self):
return hash(tuple(self._partmatchers))
def __repr__(self):
return "Combinations(%s)"%str(self._partmatchers)
def combinations(listofiters):
"""All combinations of the objects in each iter, returned as a set of tuples"""
#Note: this implementation could be faster; now as expensive as permutations
if len(listofiters)==0:
return []
elif len(listofiters)==1:
return map(lambda element: frozenset([element]), listofiters[0])
else:
l = list(listofiters[0])
p = combinations(listofiters[1:])
z = set()
for e in l:
for y in p:
if e not in y:
z.add(tuple(frozenset([e]).union(y)))
return z
def test1():
s = MutableSet([5,-3])
s.add(1)
s.add(2)
print list(s)
t = Filter(lambda x: x > 1,s)
print list(t)
s.remove(2)
s.add(3)
print list(t)
p = MutableSet([1,2])
q = MutableSet(['x', 'y'])
r = Permutations((p,q))
print list(r)
p.add(3)
print list(r)
q.remove('x')
print list(r)
u = MutableSet(['a', 'b', 'c'])
w = Combinations((u,u))
print list(w)
u.add('d')
print list(w)
u.remove('a')
print list(w)
print list(IncrementalSet._all)
def test2():
integers = MutableSet([1,2,3,4,5,6,7,8,9,10])
odd = lambda x: x%2 == 1
square = lambda x: x**2
odds1 = Filter(odd,integers)
odds2 = Filter(odd,integers)
sq1 = Map(square,odds1)
sq2 = Map(square,odds2)
print set(sq1), set(sq2)
print list(IncrementalSet._all)
def test3():
integers5 = MutableSet([1,2,3,4,5])
integers10 = MutableSet([1,2,3,4,5,6,7,8,9,10])
union = Union(integers5, integers10)
intersection = Intersection(integers5, integers10)
difference = Difference(integers10, integers5)
integers5.remove(1)
integers5.remove(2)
integers10.remove(1)
integers10.remove(10)
print set(union)
print set(intersection)
print set(difference)
if __name__ == '__main__':
test1()
test2()
test3()

View File

@ -720,7 +720,7 @@ def test(problem, use_prototype=True):
solver = GeometricSolver(problem, use_prototype)
print "drplan:"
print solver.dr
print "top-level rigids:",solver.dr.top_level()
print "top-level rigids:",list(solver.dr.top_level())
result = solver.get_result()
print "result:"
print result
@ -885,7 +885,7 @@ def selection_test():
def runtests():
#diag_select("clsolver3D")
#test(double_tetrahedron_problem())
test(double_tetrahedron_problem())
#test(ada_tetrahedron_problem())
#test(double_banana_problem())
#test(double_banana_plus_one_problem())
@ -897,7 +897,7 @@ def runtests():
#diag_select("SelectionMethod.*")
#test(selection_problem(),False)
#selection_test()
test(overconstrained_tetra())
#test(overconstrained_tetra())
if __name__ == "__main__": runtests()

144
workbench/decompositionView.py Executable file
View File

@ -0,0 +1,144 @@
from includes import *
from ui_compositionView import Ui_compositionView
#from tree import Tree
from cvitems import CVCluster, CVConnection
from parameters import Settings
import random
class DecompositionView(QtGui.QDialog):
""" A view where the decomposition of the system of constraints is visualised as a directed acyclic graph"""
def __init__(self, viewport, viewportMngr, vpType, prototypeMngr, parent=None):
""" Initialization of the CompositionView class
Parameters:
viewportMngr - the manager of the viewports where the composition view can reside in
prototypeMngr - the manager of the prototypes is used to obtain the results of the solver
"""
QtGui.QDialog.__init__(self, parent)
self.prototypeManager = prototypeMngr
self.viewport = viewport
self.viewportManager = viewportMngr
self.settings = Settings()
self.setWindowFlags(QtCore.Qt.Window)
self.timer = QtCore.QObject()
"""map GeometricDecomposition to CVCluster"""
self.map = {}
self.ui = Ui_compositionView()
self.ui.setupUi(self)
self.ui.graphicsView.setupViewport(QtOpenGL.QGLWidget(QtOpenGL.QGLFormat(QtOpenGL.QGL.SampleBuffers|QtOpenGL.QGL.DoubleBuffer)))
self.ui.graphicsView.setRenderHints(QtGui.QPainter.Antialiasing | QtGui.QPainter.SmoothPixmapTransform)
self.currentTool = None
self.viewportType = vpType
self.orientation = TreeOrientation.BOTTOM
self.overConstrainedColor = QtGui.QColor(0,0,255)
self.underConstrainedColor = QtGui.QColor(255,0,0)
self.wellConstrainedColor = QtGui.QColor(0,255,0)
self.unsolvedColor = QtGui.QColor(125,124,255)
self.createScene()
self.createTriggers()
def createTriggers(self):
""" Create the triggers for the components in the graphical window """
QtCore.QObject.connect(self.ui.zoomInButton,QtCore.SIGNAL("clicked()"),self.zoomIn)
QtCore.QObject.connect(self.ui.zoomOutButton,QtCore.SIGNAL("clicked()"),self.zoomOut)
QtCore.QObject.connect(self.ui.fitButton, QtCore.SIGNAL("clicked()"), self.fit)
#QtCore.QObject.connect(self.ui.collapseButton, QtCore.SIGNAL("clicked()"), self.collapse)
QtCore.QObject.connect(self.ui.graphicsScene, QtCore.SIGNAL("changed(const QList<QRectF> & )"), self.updateSceneRect)
QtCore.QObject.connect(self.ui.verticalSlider,QtCore.SIGNAL("valueChanged(int)"),self.setupMatrix)
#QtCore.QObject.connect(self.settings.dvData,QtCore.SIGNAL("treeOrientationChanged()"), self.updateTreeOrientation)
def getViewportType(self):
return self.viewportType
def updateGL(self):
self.update()
def createDecomposition(self):
""" Create a new decomposition. If an older one exists it will be removed. """
self.clearScene()
self.createScene()
def clearScene(self):
self.map = {}
if self.ui.graphicsScene != None:
for item in self.ui.graphicsView.items():
item.hide()
if item.parentItem() == None:
self.ui.graphicsScene.removeItem(item)
def createScene(self):
""" Updating the view with new data and nodes for the visualisation of the tree """
if self.prototypeManager.result != None:
# get all clusters from result
new = [self.prototypeManager.result]
clusters = set()
while len(new) > 0:
c = new.pop()
clusters.add(c)
for child in c.subs:
if child not in clusters:
new.append(child)
# create N layers for clusters with 1-N variables
N = len(self.prototypeManager.result.variables)
layers = []
for n in range(0,N+1):
layers.append([])
# add clusters to layers
for c in clusters:
n = len(c.variables)
layers[n].append(c)
# sort clusters in layers
# ??
# map GeometricDecompositions to CVClusters
for n in range(0,N+1):
layer = layers[n]
for k in range(0,len(layer)):
c = layer[k]
y = n * 50.0
x = (k - len(layer)/2.0) * 30.0 * n
cvcluster = CVCluster(self, c, x,y)
self.ui.graphicsScene.addItem(cvcluster)
self.map[c] = cvcluster
self.map[cvcluster] = c
# add CVConnections
for c in clusters:
for child in c.subs:
self.ui.graphicsScene.addItem(CVConnection(self, self.map[c], self.map[child]))
def updateViewports(self):
self.viewportManager.updateViewports()
def updateSceneRect(self, rectList=None):
self.ui.graphicsScene.setSceneRect(self.ui.graphicsScene.itemsBoundingRect())
def zoomIn(self):
""" Zoom in the graphics view, by updating the vertical slider """
self.ui.verticalSlider.setValue(self.ui.verticalSlider.value() + 1)
def zoomOut(self):
""" Zoom out the graphics view, by updating the vertical slider """
self.ui.verticalSlider.setValue(self.ui.verticalSlider.value() - 1)
def fit(self):
""" Fits the tree exactly in the graphics view """
self.ui.graphicsView.fitInView(0.0, 0.0, self.ui.graphicsScene.width(), self.ui.graphicsScene.height(), QtCore.Qt.KeepAspectRatio)
""" Update the slider """
value = (math.log(self.ui.graphicsView.matrix().m11(),2)*50) + 250.0
self.ui.verticalSlider.setValue(value)
def setupMatrix(self, value):
""" Zoom in/out the graphics view, depending on the value of the slider
Parameters
value - value of the updated slider
"""
scale = math.pow(2.0, (self.ui.verticalSlider.value()-250.0)/50.0)
matrix = QtGui.QMatrix()
matrix.scale(scale,scale)
self.ui.graphicsView.setMatrix(matrix)