diff --git a/Delny-0.2.0/build/lib.linux-i686-2.5/delaunay/__init__.py b/Delny-0.2.0/build/lib.linux-i686-2.5/delaunay/__init__.py deleted file mode 100755 index 23770e5..0000000 --- a/Delny-0.2.0/build/lib.linux-i686-2.5/delaunay/__init__.py +++ /dev/null @@ -1,42 +0,0 @@ -# __init__.py - initialisation script of the Delny package -# -# Copyright 2004 Floris Bruynooghe -# -# This file is part of Delny. -# -# Delny is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# Delny is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Delny; if not, write to the Free Software Foundation, -# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. -# -# -# Authors: Floris Bruynooghe (flub) - -"""Delaunay triangulation. - -This package provides a python interface to the Delaunay triangulation -provided by the qhull software package using the C library libqhull. -It is possible to make a triangulations of any dimension you want. - -The Delaunay module imports the core module directy in it's namespace -so you can for example use `Delaunay.Triangulation()' after a `import -Delaunay'. So see the Delaunay.core documentation for the use of the -module. - -Whenever a "sequence object" is mentioned in this package all the -builtin Python sequences are accepted as well as an array from the -Numeric module. -""" - -from core import * - -__all__ = ["core", "_qhull"] diff --git a/Delny-0.2.0/build/lib.linux-i686-2.5/delaunay/core.py b/Delny-0.2.0/build/lib.linux-i686-2.5/delaunay/core.py deleted file mode 100755 index 833a00a..0000000 --- a/Delny-0.2.0/build/lib.linux-i686-2.5/delaunay/core.py +++ /dev/null @@ -1,110 +0,0 @@ -# core.py - the main user interface to the Delny package -# -# Copyright 2004-2006 Floris Bruynooghe -# -# This file is part of Delny. -# -# Delny is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# Delny is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Delny; if not, write to the Free Software Foundation, -# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. -# -# -# Authors: Floris Bruynooghe (flub) - -"""The main user interface to the Delny package - -Most users will want to use the Triangulate class to access the -routines provided in the package. -""" - -import Numeric -import _qhull - - -class Triangulation: - """Represents a Delaunay triangulation of a set of points - - All sequences used in this class can be either Python builtin - sequences or Numeric sequences. Results are returned as Python - builtin sequences however. - """ - # Data attributes: - # self.neighbours - # self.facets - # self.indices - def __init__(self, inputset, dim=None): - """Creates the Delaunay triangulation - - The `set' can be a 2 dimensional sequence with the last - dimension being the coordinates of every point. - - Alternatively the set is a sequence objet of any dimension. - In this case the `dim' argument should be given and indicate - the number of dimensions. In this case the sequence will be - flattened (made 1-dimensional) and the first point will be - represented by the `dim' first elements of it and so on. - """ - if(dim != None): - self.set = Numeric.array(inputset) - self.set.shape = (-1, dim) - else: - self.set = inputset - self.neighbours, self.facets, self.indices = _qhull.delny(self.set) - - def get_set(self): - """Returns the set as it is being used by this class - - This could be any sequence object, however if the `dim' - argument was not passed along to the constructor of the object - you can be sure this is the same sequence object as you passed - to the constructor. - """ - return self.set - - def get_neighbours(self): - """Returns the neighbours of each point in the set - - This is a dictionnary with the points of the sets as key and a - list of it's nearest neighbours as value. Every neighbour is - a tuple with floats. - """ - return self.neighbours - - def get_elements(self): - """Returns the elements of the Delaunay triangulation - - This is a list of elements where every element is a list of - nodes and every node is a tuple of floats. An - element is a triangle in 2D and a tetraheron in 3D. - """ - return self.facets - - def get_elements_indices(self): - """Returns the elements of the Delaunay triangulation - - This is a list of elements where every element is a list of - node indices corresponding to the point index given in the inputset. - An element is a triangle in 2D and a tetraheron in 3D. - """ - return self.indices - - - def update_set(self, newset): - """Recalculate the neighbours with a new input set - - This has the same effect as creating a new instance but - without doing so. - """ - #FIXME: should this be renamed to set_set()? - #FIXME: should this also take the `dim' argument? - self.__init__(newset) diff --git a/Delny-0.2.0/build/lib.linux-x86_64-2.5/delaunay/__init__.py b/Delny-0.2.0/build/lib.linux-x86_64-2.5/delaunay/__init__.py deleted file mode 100644 index 23770e5..0000000 --- a/Delny-0.2.0/build/lib.linux-x86_64-2.5/delaunay/__init__.py +++ /dev/null @@ -1,42 +0,0 @@ -# __init__.py - initialisation script of the Delny package -# -# Copyright 2004 Floris Bruynooghe -# -# This file is part of Delny. -# -# Delny is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# Delny is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Delny; if not, write to the Free Software Foundation, -# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. -# -# -# Authors: Floris Bruynooghe (flub) - -"""Delaunay triangulation. - -This package provides a python interface to the Delaunay triangulation -provided by the qhull software package using the C library libqhull. -It is possible to make a triangulations of any dimension you want. - -The Delaunay module imports the core module directy in it's namespace -so you can for example use `Delaunay.Triangulation()' after a `import -Delaunay'. So see the Delaunay.core documentation for the use of the -module. - -Whenever a "sequence object" is mentioned in this package all the -builtin Python sequences are accepted as well as an array from the -Numeric module. -""" - -from core import * - -__all__ = ["core", "_qhull"] diff --git a/Delny-0.2.0/build/lib.linux-x86_64-2.5/delaunay/core.py b/Delny-0.2.0/build/lib.linux-x86_64-2.5/delaunay/core.py deleted file mode 100644 index 833a00a..0000000 --- a/Delny-0.2.0/build/lib.linux-x86_64-2.5/delaunay/core.py +++ /dev/null @@ -1,110 +0,0 @@ -# core.py - the main user interface to the Delny package -# -# Copyright 2004-2006 Floris Bruynooghe -# -# This file is part of Delny. -# -# Delny is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# Delny is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Delny; if not, write to the Free Software Foundation, -# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. -# -# -# Authors: Floris Bruynooghe (flub) - -"""The main user interface to the Delny package - -Most users will want to use the Triangulate class to access the -routines provided in the package. -""" - -import Numeric -import _qhull - - -class Triangulation: - """Represents a Delaunay triangulation of a set of points - - All sequences used in this class can be either Python builtin - sequences or Numeric sequences. Results are returned as Python - builtin sequences however. - """ - # Data attributes: - # self.neighbours - # self.facets - # self.indices - def __init__(self, inputset, dim=None): - """Creates the Delaunay triangulation - - The `set' can be a 2 dimensional sequence with the last - dimension being the coordinates of every point. - - Alternatively the set is a sequence objet of any dimension. - In this case the `dim' argument should be given and indicate - the number of dimensions. In this case the sequence will be - flattened (made 1-dimensional) and the first point will be - represented by the `dim' first elements of it and so on. - """ - if(dim != None): - self.set = Numeric.array(inputset) - self.set.shape = (-1, dim) - else: - self.set = inputset - self.neighbours, self.facets, self.indices = _qhull.delny(self.set) - - def get_set(self): - """Returns the set as it is being used by this class - - This could be any sequence object, however if the `dim' - argument was not passed along to the constructor of the object - you can be sure this is the same sequence object as you passed - to the constructor. - """ - return self.set - - def get_neighbours(self): - """Returns the neighbours of each point in the set - - This is a dictionnary with the points of the sets as key and a - list of it's nearest neighbours as value. Every neighbour is - a tuple with floats. - """ - return self.neighbours - - def get_elements(self): - """Returns the elements of the Delaunay triangulation - - This is a list of elements where every element is a list of - nodes and every node is a tuple of floats. An - element is a triangle in 2D and a tetraheron in 3D. - """ - return self.facets - - def get_elements_indices(self): - """Returns the elements of the Delaunay triangulation - - This is a list of elements where every element is a list of - node indices corresponding to the point index given in the inputset. - An element is a triangle in 2D and a tetraheron in 3D. - """ - return self.indices - - - def update_set(self, newset): - """Recalculate the neighbours with a new input set - - This has the same effect as creating a new instance but - without doing so. - """ - #FIXME: should this be renamed to set_set()? - #FIXME: should this also take the `dim' argument? - self.__init__(newset) diff --git a/geosolver/clsolver.py b/geosolver/clsolver.py index de14725..3148a0a 100644 --- a/geosolver/clsolver.py +++ b/geosolver/clsolver.py @@ -67,7 +67,7 @@ class ClusterSolver(Notifier): self._toplevel = MutableSet() # incrementally updated set of applicable methods self._incremental_matchers = map(lambda method: method.incremental_matcher(self), self._incremental_methods) - print "incremental matchers:",self._incremental_matchers + #print "incremental matchers:",self._incremental_matchers self._applicable_methods = Union(*self._incremental_matchers) # ------- methods for setting up constraint problems ------------ @@ -347,26 +347,28 @@ class ClusterSolver(Notifier): # -------------- - # search methods + # isearch methods # -------------- def _process_new(self): - # try incremental matchers - while len(self._applicable_methods) > 0: - method = iter(self._applicable_methods).next() - print "applicable methods:", map(str, self._applicable_methods) - print "found applicable method:", method - self._add_method_complete(method) - - # try old style matching - while len(self._new) > 0: - newobject = self._new.pop() - diag_print("search from "+str(newobject), "clsolver") - succes = self._search(newobject) - if succes and self.is_top_level(newobject): - # maybe more rules applicable.... push back on stack - self._new.append(newobject) - # while + # try incremental matchers and old style matching alternatingly + while len(self._applicable_methods) > 0 or len(self._new) > 0: + # check incremental matches + if len(self._applicable_methods) > 0: + method = iter(self._applicable_methods).next() + #print "applicable methods:", map(str, self._applicable_methods) + print "incremental search found:", method + self._add_method_complete(method) + else: + newobject = self._new.pop() + diag_print("search from "+str(newobject), "clsolver") + succes = self._search(newobject) + if succes and self.is_top_level(newobject): + # maybe more rules applicable.... push back on stack + self._new.append(newobject) + #endif + # endif + # endwhile #end def def _search(self, newcluster): @@ -382,7 +384,7 @@ class ClusterSolver(Notifier): # first try handcoded matching for methodclass in self._handcoded_methods: - diag_print("trying incremental matching for "+str(methodclass), "clsolver3D") + diag_print("trying handcoded match for "+str(methodclass), "clsolver3D") matches = methodclass.handcoded_match(self, newcluster, connected) if self._try_matches(methodclass, matches): return True @@ -423,7 +425,7 @@ class ClusterSolver(Notifier): return False def _add_method_complete(self, merge): - # diag_print("add_method_complete "+str(merge), "clsolver") + diag_print("add_method_complete "+str(merge), "clsolver") # check that method has one output if len(merge.outputs()) != 1: raise StandardError, "merge number of outputs != 1" @@ -488,7 +490,8 @@ class ClusterSolver(Notifier): # do not remove rigids from toplevel if method does not consider root if isinstance(cluster, Rigid): if hasattr(merge,"noremove") and merge.noremove == True: - continue + diag_print("block top-level", "clsolver") + break # remove input clusters when all its constraints are in output cluster if num_constraints(cluster.intersection(output)) >= num_constraints(cluster): diag_print("remove from top-level: "+str(cluster),"clsolver") diff --git a/geosolver/clsolver2D.py b/geosolver/clsolver2D.py index 13cc65d..440150c 100644 --- a/geosolver/clsolver2D.py +++ b/geosolver/clsolver2D.py @@ -1,890 +1,223 @@ -"""A generic 2D geometric constraint solver. - -The solver finds a generic solution -for problems formulated by Clusters. The generic solution -is a directed acyclic graph of Clusters and Methods. Particilar problems -and solutions are represented by a Configuration for each cluster. - -Note: this module is now depricated, and will be removed or replaced -in the future. - -""" +"""A generic 2D geometric constraint solver.""" from clsolver import * -from multimethod import MultiMethod -from sets import Set -from diagnostic import diag_print -from selconstr import NotCounterClockwiseConstraint, NotClockwiseConstraint, NotAcuteConstraint, NotObtuseConstraint +from diagnostic import diag_print, diag_select +from selconstr import * from intersections import * from configuration import Configuration from cluster import * - -class Merge(ClusterMethod): - """A derive is a method such that a single ouput cluster is a - subconsraint of a single input cluster.""" - - def __init__(self): - ClusterMethod.__init__(self) - - - -class Derive(ClusterMethod): - """A merge is a method such that a single ouput cluster satisfies - all constraints in several input clusters. The output cluster - replaces the input clusters in the constriant problem""" - - def __init__(self): - ClusterMethod.__init__(self) - +from map import Map +import incremental class ClusterSolver2D(ClusterSolver): - """A generic 2D geometric constraint solver. - - Finds a geneneric solution for problems formulated by cluster-constraints. - - Constraints are Clusers: Rigids, Hedgehogs and Balloons. - Cluster are added and removed using the add and remove methods. - After adding each Cluster, the solver tries to merge it with - other clusters, resulting in new Clusters and Methods. - - For each Cluster a set of Configurations can be set using the - set method. Configurations are propagated via Methods and can - be retrieved with the get method. - """ - - # ------- PUBLIC METHODS -------- + """A 2D geometric constraint solver. See ClusterSolver for details.""" + # ------- PUBLIC METHODS -------- def __init__(self): """Instantiate a ClusterSolver2D""" - ClusterSolver.__init__(self, dimension=2) - - # ------------ INTERNALLY USED METHODS -------- - - # -------------- - # search methods - # -------------- - - def _search(self, newcluster): - if isinstance(newcluster, Rigid): - self._search_from_rigid(newcluster) - elif isinstance(newcluster, Hedgehog): - self._search_from_hog(newcluster) - elif isinstance(newcluster, Balloon): - self._search_from_balloon(newcluster) - else: - raise StandardError, "don't know how to search from "+str(newcluster) - # end _search - - def _search_from_balloon(self, balloon): - if self._search_absorb_from_balloon(balloon): - return - if self._search_balloon_from_balloon(balloon): - return - if self._search_cluster_from_balloon(balloon): - return - self._search_hogs_from_balloon(balloon) - #end def _search_from_baloon - - def _search_from_hog(self, hog): - if self._search_absorb_from_hog(hog): - return - if self._search_merge_from_hog(hog): - return - if self._search_balloon_from_hog(hog): - return - self._search_hogs_from_hog(hog) - #end def _search_from_hog - - def _search_from_rigid(self, cluster): - if self._search_absorb_from_cluster(cluster): - return - if self._search_balloonclustermerge_from_cluster(cluster): - return - if self._search_merge_from_cluster(cluster): - return - self._search_hogs_from_cluster(cluster) - # end def _search_from_cluster - - # ------ Absorb hogs ------- - - def _search_absorb_from_balloon(self, balloon): - for cvar in balloon.vars: - # find all incident hogs - hogs = self._find_hogs(cvar) - # determine shared vertices per hog - for hog in hogs: - shared = Set(hog.xvars).intersection(balloon.vars) - if len(shared) == len(hog.xvars): - return self._merge_balloon_hog(balloon, hog) - - def _search_absorb_from_cluster(self, cluster): - for cvar in cluster.vars: - # find all incident hogs - hogs = self._find_hogs(cvar) - # determine shared vertices per hog - for hog in hogs: - shared = Set(hog.xvars).intersection(cluster.vars) - if len(shared) == len(hog.xvars): - return self._merge_cluster_hog(cluster, hog) - - def _search_absorb_from_hog(self, hog): - dep = self.find_dependend(hog.cvar) - # case BH (overconstrained): - balloons = filter(lambda x: isinstance(x,Balloon) and self.is_top_level(x), dep) - sharecx = filter(lambda x: len(Set(hog.xvars).intersection(x.vars)) >=1, balloons) - for balloon in sharecx: - sharedcx = Set(balloon.vars).intersection(hog.xvars) - if len(sharedcx) == len(hog.xvars): - return self._merge_balloon_hog(balloon, hog) - # case CH (overconstrained) - clusters = filter(lambda x: isinstance(x,Rigid) and self.is_top_level(x), dep) - sharecx = filter(lambda x: len(Set(hog.xvars).intersection(x.vars)) >=1, clusters) - for cluster in sharecx: - sharedcx = Set(cluster.vars).intersection(hog.xvars) - if len(sharedcx) == len(hog.xvars): - return self._merge_cluster_hog(cluster, hog) - - # ------- DEALING WITH BALLOONS --------- - - def _find_balloons(self, variables): - balloons = Set() - for var in variables: - deps = self.find_dependend(var) - balls = filter(lambda x: isinstance(x,Balloon), deps) - balloons = balloons.intersection(balls) - return balloons - - def _make_balloon(self, var1, var2, var3, hog1, hog2): - diag_print("_make_balloon "+str(var1)+","+str(var2)+","+str(var3),"clsolver") - # derive sub-hogs if nessecairy - vars = Set([var1, var2, var3]) - subvars1 = vars.intersection(hog1.xvars) - if len(hog1.xvars) > 2: - hog1 = self._derive_subhog(hog1, subvars1) - subvars2 = vars.intersection(hog2.xvars) - if len(hog2.xvars) > 2: - hog2 = self._derive_subhog(hog2, subvars2) - # create balloon and method - balloon = Balloon([var1, var2, var3]) - balloonmethod = BalloonFromHogs(hog1, hog2, balloon) - self._add_merge(balloonmethod) - return balloon - - def _search_balloon_from_hog(self,hog): - newballoons = [] - var1 = hog.cvar - for var2 in hog.xvars: - hogs = self._find_hogs(var2) - for hog2 in hogs: - if var1 in hog2.xvars: - for var3 in hog2.xvars: - if var3 != var2 and var3 in hog.xvars: - if not self._known_angle(var1, var3, var2): - newballoons.append(self._make_balloon(var1, var2, var3, hog, hog2)) - if len(newballoons) > 0: - return newballoons - else: - return None - - def _search_balloon_from_balloon(self, balloon): - map = {} # map from adjacent balloons to variables shared with input balloon - for var in balloon.vars: - deps = self.find_dependend(var) - balloons = filter(lambda x: isinstance(x,Balloon), deps) - balloons = filter(lambda x: self.is_top_level(x), balloons) - for bal2 in balloons: - if bal2 != balloon: - if bal2 in map: - map[bal2].union_update([var]) - else: - map[bal2] = Set([var]) - for bal2 in map: - nvars = len(map[bal2]) - if nvars >= 2: - return self._merge_balloons(balloon, bal2) - return None - - def _search_cluster_from_balloon(self, balloon): - diag_print("_search_cluster_from_balloon", "clsolver") - map = {} # map from adjacent clusters to variables shared with input balloon - for var in balloon.vars: - deps = self.find_dependend(var) - clusters = filter(lambda x: isinstance(x,Rigid) or isinstance(x,Distance), deps) - clusters = filter(lambda x: self.is_top_level(x), clusters) - for c in clusters: - if c in map: - map[c].union_update([var]) - else: - map[c] = Set([var]) - for cluster in map: - nvars = len(map[cluster]) - if nvars >= 2: - return self._merge_balloon_cluster(balloon, cluster) - return None - - def _search_balloonclustermerge_from_cluster(self, rigid): - diag_print("_search_balloonclustermerge_from_cluster", "clsolver") - map = {} # map from adjacent clusters to variables shared with input balloon - for var in rigid.vars: - deps = self.find_dependend(var) - balloons = filter(lambda x: isinstance(x,Balloon), deps) - balloons = filter(lambda x: self.is_top_level(x), balloons) - for b in balloons: - if b in map: - map[b].union_update([var]) - else: - map[b] = Set([var]) - for balloon in map: - nvars = len(map[balloon]) - if nvars >= 2: - return self._merge_balloon_cluster(balloon, rigid) - return None - - def _merge_balloons(self, bal1, bal2): - # create new balloon and merge method - vars = Set(bal1.vars).union(bal2.vars) - newballoon = Balloon(vars) - merge = BalloonMerge(bal1,bal2,newballoon) - self._add_merge(merge) - return newballoon - - def _merge_balloon_cluster(self, balloon, cluster): - # create new cluster and method - vars = Set(balloon.vars).union(cluster.vars) - newcluster = Rigid(list(vars)) - merge = BalloonRigidMerge(balloon,cluster,newcluster) - self._add_merge(merge) - return newcluster - - - # ------- DEALING WITH HEDEGHOGS --------- - - def _find_hogs(self, cvar): - deps = self.find_dependend(cvar) - hogs = filter(lambda x: isinstance(x,Hedgehog), deps) - hogs = filter(lambda x: x.cvar == cvar, hogs) - hogs = filter(lambda x: self.is_top_level(x), hogs) - return hogs - - def _make_hog_from_cluster(self, cvar, cluster): - xvars = Set(cluster.vars) - xvars.remove(cvar) - hog = Hedgehog(cvar,xvars) - self._add_hog(hog) - method = Rigid2Hog(cluster, hog) - self._add_method(method) - return hog - - def _make_hog_from_balloon(self, cvar, balloon): - xvars = Set(balloon.vars) - xvars.remove(cvar) - hog = Hedgehog(cvar,xvars) - self._add_hog(hog) - method = Balloon2Hog(balloon, hog) - self._add_method(method) - return hog - - def _search_hogs_from_balloon(self, newballoon): - #diag_print("_search_hogs_from_balloon "+str(newballoon),"clsolver") - if self.dimension != 2: - return None - if len(newballoon.vars) <= 2: - return None - # create/merge hogs - for cvar in newballoon.vars: - # potential new hog - xvars = Set(newballoon.vars) - xvars.remove(cvar) - # find all incident hogs - hogs = self._find_hogs(cvar) - # determine shared vertices per hog - for hog in hogs: - shared = Set(hog.xvars).intersection(xvars) - if len(shared) >= 1 and len(shared) < len(hog.xvars) and len(shared) < len(xvars): - tmphog = Hedgehog(cvar, xvars) - if not self._graph.has_vertex(tmphog): - newhog = self._make_hog_from_balloon(cvar,newballoon) - self._merge_hogs(hog, newhog) - #end for - #end for - - def _search_hogs_from_cluster(self, newcluster): - #diag_print("_search_hogs_from_cluster "+str(newcluster),"clsolver") - if self.dimension != 2: - return None - if len(newcluster.vars) <= 2: - return None - # create/merge hogs - for cvar in newcluster.vars: - # potential new hog - xvars = Set(newcluster.vars) - xvars.remove(cvar) - # find all incident hogs - hogs = self._find_hogs(cvar) - # determine shared vertices per hog - for hog in hogs: - shared = Set(hog.xvars).intersection(xvars) - if len(shared) >= 1 and len(shared) < len(hog.xvars) and len(shared) < len(xvars): - tmphog = Hedgehog(cvar, xvars) - if not self._graph.has_vertex(tmphog): - newhog = self._make_hog_from_cluster(cvar,newcluster) - self._merge_hogs(hog, newhog) - #end for - #end for - - def _search_hogs_from_hog(self, newhog): - #diag_print("_search_hogs_from_hog "+str(newhog),"newhog") - if self.dimension != 2: - return None - # find adjacent clusters - dep = self.find_dependend(newhog.cvar) - top = filter(lambda c: self.is_top_level(c), dep) - clusters = filter(lambda x: isinstance(x,Rigid), top) - balloons = filter(lambda x: isinstance(x,Balloon), top) - hogs = self._find_hogs(newhog.cvar) - tomerge = [] - for cluster in clusters: - if len(cluster.vars) < 3: - continue - # determine shared vars - xvars = Set(cluster.vars) - xvars.remove(newhog.cvar) - shared = Set(newhog.xvars).intersection(xvars) - if len(shared) >= 1 and len(shared) < len(xvars) and len(shared) < len(newhog.xvars): - tmphog = Hedgehog(newhog.cvar, xvars) - if not self._graph.has_vertex(tmphog): - newnewhog = self._make_hog_from_cluster(newhog.cvar, cluster) - tomerge.append(newnewhog) - for balloon in balloons: - # determine shared vars - xvars = Set(balloon.vars) - xvars.remove(newhog.cvar) - shared = Set(newhog.xvars).intersection(xvars) - if len(shared) >= 1 and len(shared) < len(xvars) and len(shared) < len(newhog.xvars): - tmphog = Hedgehog(newhog.cvar, xvars) - if not self._graph.has_vertex(tmphog): - newnewhog = self._make_hog_from_balloon(newhog.cvar, balloon) - tomerge.append(newnewhog) - for hog in hogs: - if hog == newhog: - continue - # determine shared vars - shared = Set(newhog.xvars).intersection(hog.xvars) - if len(shared) >= 1 and len(shared) < len(hog.xvars) and len(shared) < len(newhog.xvars): - # if mergeable, then create new hog - tomerge.append(hog) + ClusterSolver.__init__(self, [MergePR, MergeRR, DeriveDDD]) - if len(tomerge) == 0: - return None - else: - lasthog = newhog - for hog in tomerge: - lasthog = self._merge_hogs(lasthog, hog) - return lasthog - - # end def - - def _merge_hogs(self, hog1, hog2): - diag_print("merging "+str(hog1)+"+"+str(hog2), "clsolver") - # create new hog and method - xvars = Set(hog1.xvars).union(hog2.xvars) - mergedhog = Hedgehog(hog1.cvar, xvars) - method = MergeHogs(hog1, hog2, mergedhog) - self._add_merge(method) - return mergedhog - - # end def _merge_hogs - - # ------ DEALING WITH CLUSTER MERGES ------- - - - def _search_merge_from_hog(self, hog): - - # case CH (overconstrained) - dep = self.find_dependend(hog.cvar) - clusters = filter(lambda x: isinstance(x,Rigid) and self.is_top_level(x), dep) - sharecx = filter(lambda x: len(Set(hog.xvars).intersection(x.vars)) >=1, clusters) - for cluster in sharecx: - sharedcx = Set(cluster.vars).intersection(hog.xvars) - if len(sharedcx) == len(hog.xvars): - return self._merge_cluster_hog(cluster, hog) - - # case CHC - for i in range(len(sharecx)): - c1 = sharecx[i] - for j in range(i+1, len(sharecx)): - c2 = sharecx[j] - return self._merge_cluster_hog_cluster(c1, hog, c2) - - # case CCH - sharex = Set() - for var in hog.xvars: - dep = self.find_dependend(var) - sharex.union_update(filter(lambda x: isinstance(x,Rigid) and self.is_top_level(x), dep)) - for c1 in sharecx: - for c2 in sharex: - if c1 == c2: continue - shared12 = Set(c1.vars).intersection(c2.vars) - sharedh2 = Set(hog.xvars).intersection(c2.vars) - shared2 = shared12.union(sharedh2) - if len(shared12) >= 1 and len(sharedh2) >= 1 and len(shared2) == 2: - return self._merge_cluster_cluster_hog(c1, c2, hog) - return None - - - def _search_merge_from_cluster(self, newcluster): - diag_print ("_search_merge "+str(newcluster), "clsolver") - # find clusters overlapping with new cluster - overlap = {} - for var in newcluster.vars: - # get dependent objects - dep = self._graph.outgoing_vertices(var) - # only clusters - dep = filter(lambda c: self._graph.has_edge("_rigids",c), dep) - # only top level - dep = filter(lambda c: self.is_top_level(c), dep) - # remove newcluster - if newcluster in dep: - dep.remove(newcluster) - for cluster in dep: - if cluster in overlap: - overlap[cluster].append(var) - else: - overlap[cluster] = [var] - - # point-cluster merge - for cluster in overlap: - if len(overlap[cluster]) == 1: - if len(cluster.vars) == 1: - return self._merge_point_cluster(cluster, newcluster) - elif len(newcluster.vars) == 1: - return self._merge_point_cluster(newcluster, cluster) - - # two cluster merge (overconstrained) - for cluster in overlap: - if len(overlap[cluster]) >= self.dimension: - return self._merge_cluster_pair(cluster, newcluster) - - # three cluster merge - clusterlist = overlap.keys() - for i in range(len(clusterlist)): - c1 = clusterlist[i] - for j in range(i+1, len(clusterlist)): - c2 = clusterlist[j] - shared12 = Set(c1.vars).intersection(c2.vars) - shared13 = Set(c1.vars).intersection(newcluster.vars) - shared23 = Set(c2.vars).intersection(newcluster.vars) - shared1 = shared12.union(shared13) - shared2 = shared12.union(shared23) - shared3 = shared13.union(shared23) - if len(shared1) == self.dimension and\ - len(shared1) == self.dimension and\ - len(shared2) == self.dimension: - return self._merge_cluster_triple(c1, c2, newcluster) - - # merge with an angle, case 1 - for cluster in overlap: - ovars = overlap[cluster] - if len(ovars) == 1: - cvar = ovars[0] - else: - raise StandardError, "unexpected case" - hogs = self._find_hogs(cvar) - for hog in hogs: - sharedch = Set(cluster.vars).intersection(hog.xvars) - sharednh = Set(newcluster.vars).intersection(hog.xvars) - sharedh = sharedch.union(sharednh) - if len(sharedch) >= 1 and len(sharednh) >= 1 and len(sharedh) >= 2: - return self._merge_cluster_hog_cluster(cluster, hog, newcluster) - - # merge with an angle, case 2 - #print "case c2" - for var in newcluster.vars: - hogs = self._find_hogs(var) - for hog in hogs: - sharednh = Set(newcluster.vars).intersection(hog.xvars) - if len(sharednh) < 1: - continue - for cluster in overlap: - sharednc = Set(newcluster.vars).intersection(cluster.vars) - if len(sharednc) != 1: - raise StandardError, "unexpected case" - if hog.cvar in cluster.vars: - #raise StandardError, "unexpected case" - continue - sharedch = Set(cluster.vars).intersection(hog.xvars) - sharedc = sharedch.union(sharednc) - if len(sharedch) >= 1 and len(sharedc) >= 2: - return self._merge_cluster_cluster_hog(newcluster, cluster, hog) - #print "end case 2" - - # merge with an angle, case 3 - #print "case c3" - for cluster in overlap: - sharednc = Set(newcluster.vars).intersection(cluster.vars) - if len(sharednc) != 1: - raise StandardError, "unexpected case" - for var in cluster.vars: - hogs = self._find_hogs(var) - for hog in hogs: - if hog.cvar in newcluster.vars: - # raise StandardError, "unexpected case" - continue - sharedhc = Set(newcluster.vars).intersection(hog.xvars) - sharedhn = Set(cluster.vars).intersection(hog.xvars) - sharedh = sharedhn.union(sharedhc) - sharedc = sharedhc.union(sharednc) - if len(sharedhc) >= 1 and len(sharedhn) >= 1 and len(sharedh) >= 2 and len(sharedc) == 2: - return self._merge_cluster_cluster_hog(cluster, newcluster, hog) - #print "end case 3" - # end def _search_merge - - def _merge_point_cluster(self, pointc, cluster): - diag_print("_merge_point_cluster "+str(pointc)+","+str(cluster),"clsolver") - #create new cluster and method - allvars = Set(pointc.vars).union(cluster.vars) - newcluster = Rigid(allvars) - merge = Merge1C(pointc,cluster,newcluster) - self._add_merge(merge) - return newcluster - #def - - def _merge_cluster_pair(self, c1, c2): - """Merge a pair of clusters, structurally overconstrained. - Rigid which contains root is used as origin. - Returns resulting cluster. - """ - diag_print("_merge_cluster_pair "+str(c1)+","+str(c2),"clsolver") - # always use root cluster as first cluster, swap if needed - if not self._contains_root(c1) and not self._contains_root(c2): - #raise "StandardError", "no root cluster" - pass - elif self._contains_root(c1) and self._contains_root(c2): - raise "StandardError", "two root clusters" - elif self._contains_root(c2): - diag_print("swap cluster order","clsolver") - return self._merge_cluster_pair(c2, c1) - #create new cluster and merge - allvars = Set(c1.vars).union(c2.vars) - newcluster = Rigid(allvars) - merge = Merge2C(c1,c2,newcluster) - self._add_merge(merge) - return newcluster - #def - - def _merge_cluster_hog(self, cluster, hog): - """merge cluster and hog (absorb hog, overconstrained)""" - diag_print("_merge_cluster_hog "+str(cluster)+","+str(hog),"clsolver") - #create new cluster and merge - newcluster = Rigid(cluster.vars) - merge = MergeCH(cluster,hog, newcluster) - self._add_merge(merge) - return newcluster - - def _merge_balloon_hog(self, balloon, hog): - """merge balloon and hog (absorb hog, overconstrained)""" - diag_print("_merge_balloon_hog "+str(balloon)+","+str(hog),"clsolver") - #create new balloon and merge - newballoon = Balloon(balloon.vars) - merge = MergeBH(balloon, hog, newballoon) - self._add_merge(merge) - return newballoon - - def _merge_cluster_triple(self, c1, c2, c3): - """Merge a triple of clusters. - Rigid which contains root is used as origin. - Returns resulting cluster. - """ - diag_print("_merge_cluster_triple "+str(c1)+","+str(c2)+","+str(c3),"clsolver") - # always use root cluster as first cluster, swap if needed - if self._contains_root(c2): - diag_print("swap cluster order","clsolver") - return self._merge_cluster_triple(c2, c1, c3) - elif self._contains_root(c3): - diag_print("swap cluster order","clsolver") - return self._merge_cluster_triple(c3, c1, c2) - #create new cluster and method - allvars = Set(c1.vars).union(c2.vars).union(c3.vars) - newcluster = Rigid(allvars) - merge = Merge3C(c1,c2,c3,newcluster) - self._add_merge(merge) - return newcluster - #def - - def _merge_cluster_hog_cluster(self, c1, hog, c2): - """merge c1 and c2 with a hog, with hog center in c1 and c2""" - diag_print("_merge_cluster_hog_cluster "+str(c1)+","+str(hog)+","+str(c2),"clsolver") - # always use root cluster as first cluster, swap if needed - if self._contains_root(c2): - diag_print("swap cluster order","clsolver") - return self._merge_cluster_hog_cluster(c2, hog, c1) - # derive sub-hog if nessecairy - allvars = Set(c1.vars).union(c2.vars) - xvars = Set(hog.xvars).intersection(allvars) - if len(xvars) < len(hog.xvars): - diag_print("deriving sub-hog","clsolver") - hog = self._derive_subhog(hog, xvars) - #create new cluster and merge - allvars = Set(c1.vars).union(c2.vars) - newcluster = Rigid(allvars) - merge = MergeCHC(c1,hog,c2,newcluster) - self._add_merge(merge) - return newcluster - - def _derive_subhog(self, hog, xvars): - subvars = Set(hog.xvars).intersection(xvars) - assert len(subvars) == len(xvars) - subhog = Hedgehog(hog.cvar, xvars) - method = SubHog(hog, subhog) - self._add_hog(subhog) - self._add_method(method) - return subhog - - def _merge_cluster_cluster_hog(self, c1, c2, hog): - """merge c1 and c2 with a hog, with hog center only in c1""" - diag_print("_merge_cluster_cluster_hog "+str(c1)+","+str(c2)+","+str(hog),"clsolver") - # always use root cluster as first cluster, swap if needed - if self._contains_root(c1) and self._contains_root(c2): - raise StandardError, "two root clusters!" - elif not self._contains_root(c1) and not self._contains_root(c2): - #raise StandardError, "no root cluster" - pass - elif self._contains_root(c2): - return self._merge_cluster_cluster_hog(c2, c1, hog) - # derive subhog if nessecairy - allvars = Set(c1.vars).union(c2.vars) - xvars = Set(hog.xvars).intersection(allvars) - if len(xvars) < len(hog.xvars): - diag_print("deriving sub-hog","clsolver") - hog = self._derive_subhog(hog, xvars) - # create new cluster and method - newcluster = Rigid(allvars) - merge = MergeCCH(c1,c2,hog,newcluster) - self._add_merge(merge) - return newcluster - -# class ClusterSolver2D - +# ---------------------------------------------- # ---------- Methods for 2D solving ------------- +# ---------------------------------------------- +# Merge methods take root cluster in considerations +# Derive methods do not take root cluster in consideration -class Merge1C(Merge): - """Represents a merging of a one-point cluster with any other cluster - The first cluster determines the orientation of the resulting - cluster - """ - def __init__(self, in1, in2, out): - self._inputs = [in1, in2] +class MergePR(ClusterMethod): + """Represents a merging of a one-point cluster with any other rigid.""" + def __init__(self, map): + # check inputs + in1 = map["$p"] + in2 = map["$r"] + # create ouput + outvars = set(in1.vars).union(in2.vars) + out = Rigid(outvars) + # set method properties + in1root = rootname(in1) + in2root = rootname(in2) + self._inputs = [in1, in2, in1root, in2root] self._outputs = [out] - self.overconstrained = False - self.consistent = True - MultiMethod.__init__(self) + ClusterMethod.__init__(self) + def _incremental_matcher(solver): + toplevel = solver.top_level() + rigids = Rigids(solver) + points = Points(solver) + connectedpairs = ConnectedPairs(solver, points, rigids) + matcher = incremental.Map(lambda (p,r): MergePR({"$p":p, "$r":r}), connectedpairs) + return matcher + + incremental_matcher = staticmethod(_incremental_matcher) + def __str__(self): - s = "merge1C("+str(self._inputs[0])+"+"+str(self._inputs[1])+"->"+str(self._outputs[0])+")" + s = "MergePR("+str(self._inputs[0])+"+"+str(self._inputs[1])+"->"+str(self._outputs[0])+")" s += "[" + self.status_str()+"]" return s def multi_execute(self, inmap): - diag_print("Merge1C.multi_execute called","clmethods") + diag_print("MergePR.multi_execute called","clmethods") + #c1 = self._inputs[0] + #c2 = self._inputs[1] + conf1 = inmap[self._inputs[0]] + conf2 = inmap[self._inputs[1]] + isroot1 = inmap[self._inputs[2]] + isroot2 = inmap[self._inputs[3]] + if isroot1: + res = conf1.merge(conf2) + elif isroot2: + res = conf2.merge(conf1) + else: # cheapest - just copy reference + res = conf2 + return [res] + +class MergeRR(ClusterMethod): + """Represents a merging of two rigids sharing two points.""" + def __init__(self, map): + # check inputs + in1 = map["$r1"] + in2 = map["$r2"] + # create output + out = Rigid(set(in1.vars).union(in2.vars)) + # set method parameters + in1root = rootname(in1) + in2root = rootname(in2) + self._inputs = [in1, in2, in1root, in2root] + self._outputs = [out] + ClusterMethod.__init__(self) + + #def _pattern(): + # pattern = [["rigid","$r1",["$a","$b"]], ["rigid", "$r2", ["$a", "$b"]]] + # return pattern2graph(pattern) + #pattern = staticmethod(_pattern) + #patterngraph = _pattern() + + def _incremental_matcher(solver): + toplevel = solver.top_level() + rigids = Rigids(solver) + connectedpairs = Connected(solver, rigids) + twoconnectedpairs = incremental.Filter(lambda (r1,r2): len(r1.vars.intersection(r2.vars))==2, connectedpairs); + matcher = incremental.Map(lambda (r1,r2): MergeRR({"$r1":r1, "$r2":r2}), twoconnectedpairs) + return matcher + + incremental_matcher = staticmethod(_incremental_matcher) + + + def __str__(self): + s = "MergeRR("+str(self._inputs[0])+"+"+str(self._inputs[1])+"->"+str(self._outputs[0])+")" + s += "[" + self.status_str()+"]" + return s + + def multi_execute(self, inmap): + diag_print("MergeRR.multi_execute called","clmethods") c1 = self._inputs[0] c2 = self._inputs[1] conf1 = inmap[c1] conf2 = inmap[c2] - #res = conf1.merge2D(conf2) - #return [res] - if len(c1.vars) == 1: - return [conf2.copy()] + isroot1 = inmap[self._inputs[2]] + isroot2 = inmap[self._inputs[3]] + if isroot1 and not isroot2: + res = conf1.merge(conf2) + elif isroot2 and not isroot1: + res = conf2.merge(conf1) + elif len(c1.vars) < len(c2.vars): # cheapest - transform smallest config + res = conf2.merge(conf1) else: - return [conf1.copy()] + res = conf1.merge(conf2) + return [res] -class Merge2C(Merge): - """Represents a merging of two clusters (overconstrained) - The first cluster determines the orientation of the resulting - cluster - """ - def __init__(self, in1, in2, out): - self.input1 = in1 - self.input2 = in2 - self.output = out - self._inputs = [in1, in2] + +def triplet2ddd(triplet): + (d_ab,d_ac,d_bc) = triplet + a = list(d_ab.vars.intersection(d_ac.vars))[0] + b = list(d_ab.vars.intersection(d_bc.vars))[0] + c = list(d_ac.vars.intersection(d_bc.vars))[0] + return DeriveDDD({"$d_ab":d_ab, "$d_ac":d_ac, "$d_bc":d_bc, "$a": a, "$b":b, "$c":c}) + + +class DeriveDDD(ClusterMethod): + """Represents a merging of three distances""" + def __init__(self, map): + # check inputs + self.d_ab = map["$d_ab"] + self.d_ac = map["$d_ac"] + self.d_bc = map["$d_bc"] + self.a = map["$a"] + self.b = map["$b"] + self.c = map["$c"] + # create output + out = Rigid([self.a,self.b,self.c]) + # get roots + self.root_ab = rootname(self.d_ab) + self.root_ac = rootname(self.d_ac) + self.root_bc = rootname(self.d_bc) + # set method parameters + self._inputs = [self.d_ab, self.d_ac, self.d_bc, self.root_ab, self.root_ac, self.root_bc] self._outputs = [out] - self.overconstrained = True - self.consistent = True - MultiMethod.__init__(self) + ClusterMethod.__init__(self) + # do not remove input clusters (because root not considered here) + # self.noremove = True + + #def _pattern(): + # pattern = [["rigid","$d_ab",["$a", "$b"]], + # ["rigid", "$d_ac",["$a", "$c"]], + # ["rigid", "$d_bc",["$b","$c"]]] + # return pattern2graph(pattern) + #pattern = staticmethod(_pattern) + #patterngraph = _pattern() + + def _incremental_matcher(solver): + triplets = Triplets(solver, Rigids(solver)) + matcher = incremental.Map(triplet2ddd, triplets) + + return matcher + + incremental_matcher = staticmethod(_incremental_matcher) def __str__(self): - s = "merge2C("+str(self.input1)+"+"+str(self.input2)+"->"+str(self.output)+")" + s = "DeriveDDD("+str(self._inputs[0])+"+"+str(self._inputs[1])+"+"+str(self._inputs[2])+"->"+str(self._outputs[0])+")" s += "[" + self.status_str()+"]" return s def multi_execute(self, inmap): - diag_print("Merge2C.multi_execute called","clmethods") - c1 = self._inputs[0] - c2 = self._inputs[1] - conf1 = inmap[c1] - conf2 = inmap[c2] - return [conf1.merge2D(conf2)] + diag_print("DeriveDDD.multi_execute called","clmethods") + c12 = inmap[self.d_ab] + c13 = inmap[self.d_ac] + c23 = inmap[self.d_bc] + v1 = self.a + v2 = self.b + v3 = self.c + d12 = distance_2p(c12.get(v1),c12.get(v2)) + d31 = distance_2p(c13.get(v1),c13.get(v3)) + d23 = distance_2p(c23.get(v2),c23.get(v3)) + solutions = solve_ddd(v1,v2,v3,d12,d23,d31) -class MergeCH(Merge): - """Represents a merging of a cluster and a hog (where - the hog is absorbed by the cluster). Overconstrained. - """ - def __init__(self, cluster, hog, out): - self.cluster = cluster - self.hog = hog - self.output = out - self._inputs = [cluster, hog] - self._outputs = [out] - self.overconstrained = True - self.consistent = True - MultiMethod.__init__(self) - - def __str__(self): - s = "mergeCH("+str(self.cluster)+"+"+str(self.hog)+"->"+str(self.output)+")" - s += "[" + self.status_str()+"]" - return s - - def multi_execute(self, inmap): - diag_print("MergeCH.multi_execute called","clmethods") - conf1 = inmap[self.cluster] - #conf2 = inmap[self.hog] - return [conf1.copy()] - -class MergeBH(Merge): - """Represents a merging of a balloon and a hog (where - the hog is absorbed by the balloon). Overconstrained. - """ - def __init__(self, balloon, hog, out): - self.balloon = balloon - self.hog = hog - self.output = out - self._inputs = [balloon, hog] - self._outputs = [out] - self.overconstrained = True - self.consistent = True - MultiMethod.__init__(self) - - def __str__(self): - s = "mergeBH("+str(self.balloon)+"+"+str(self.hog)+"->"+str(self.output)+")" - s += "[" + self.status_str()+"]" - return s - - def multi_execute(self, inmap): - diag_print("MergeBH.multi_execute called","clmethods") - conf1 = inmap[self.balloon] - #conf2 = inmap[self.hog] - return [conf1.copy()] - - -class Merge3C(Merge): - """Represents a merging of three clusters - The first cluster determines the orientation of the resulting - cluster - """ - def __init__(self, c1, c2, c3, out): - self.input1 = c1 - self.input2 = c2 - self.input3 = c3 - self.output = out - self._inputs = [c1, c2, c3] - self._outputs = [out] - self.overconstrained = False - self.consistent = True - MultiMethod.__init__(self) - # check coincidence - shared12 = Set(c1.vars).intersection(c2.vars) - shared13 = Set(c1.vars).intersection(c3.vars) - shared23 = Set(c2.vars).intersection(c3.vars) - shared1 = shared12.union(shared13) - shared2 = shared12.union(shared23) - shared3 = shared13.union(shared23) - if len(shared12) < 1: - raise StandardError, "underconstrained c1 and c2" - elif len(shared12) > 1: - diag_print("overconstrained CCC - c1 and c2", "clmethods") - self.overconstrained = True - if len(shared13) < 1: - raise StandardError, "underconstrained c1 and c3" - elif len(shared13) > 1: - diag_print("overconstrained CCC - c1 and c3", "clmethods") - self.overconstrained = True - if len(shared23) < 1: - raise StandardError, "underconstrained c2 and c3" - elif len(shared23) > 1: - diag_print("overconstrained CCC - c2 and c3", "clmethods") - self.overconstrained = True - if len(shared1) < 2: - raise StandardError, "underconstrained c1" - elif len(shared1) > 2: - diag_print("overconstrained CCC - c1", "clmethods") - self.overconstrained = True - if len(shared2) < 2: - raise StandardError, "underconstrained c2" - elif len(shared2) > 2: - diag_print("overconstrained CCC - c2", "clmethods") - self.overconstrained = True - if len(shared3) < 2: - raise StandardError, "underconstrained c3" - elif len(shared3) > 2: - diag_print("overconstrained CCC - c3", "clmethods") - self.overconstrained = True - - def __str__(self): - s = "merge3C("+str(self.input1)+"+"+str(self.input2)+"+"+str(self.input3)+"->"+str(self.output)+")" - s += "[" + self.status_str()+"]" - return s - - def multi_execute(self, inmap): - diag_print("Merge3C.multi_execute called","clmethods") - c1 = inmap[self._inputs[0]] - c2 = inmap[self._inputs[1]] - c3 = inmap[self._inputs[2]] - shared12 = Set(c1.vars()).intersection(c2.vars()).difference(c3.vars()) - shared13 = Set(c1.vars()).intersection(c3.vars()).difference(c2.vars()) - shared23 = Set(c2.vars()).intersection(c3.vars()).difference(c1.vars()) - v1 = list(shared12)[0] - v2 = list(shared13)[0] - v3 = list(shared23)[0] - assert v1 != v2 - assert v1 != v3 - assert v2 != v3 - p11 = c1.get(v1) - p21 = c1.get(v2) - d12 = vector.norm(p11-p21) - p23 = c3.get(v2) - p33 = c3.get(v3) - d23 = vector.norm(p23-p33) - p32 = c2.get(v3) - p12 = c2.get(v1) - d31 = vector.norm(p32-p12) - ddds = solve_ddd(v1,v2,v3,d12,d23,d31) - solutions = [] - for s in ddds: - solution = c1.merge2D(s).merge2D(c2).merge2D(c3) - solutions.append(solution) + # transform solutions to align with root input cluster + isroot_ab = inmap[self.root_ab] + isroot_ac = inmap[self.root_ac] + isroot_bc = inmap[self.root_bc] + for i in range(len(solutions)): + if isroot_ab: + solutions[i] = c12.merge(solutions[i]) + elif isroot_ac: + solutions[i] = c13.merge(solutions[i]) + elif isroot_bc: + solutions[i] = c23.merge(solutions[i]) return solutions def prototype_constraints(self): - c1 = self._inputs[0] - c2 = self._inputs[1] - c3 = self._inputs[2] - shared12 = Set(c1.vars).intersection(c2.vars).difference(c3.vars) - shared13 = Set(c1.vars).intersection(c3.vars).difference(c2.vars) - shared23 = Set(c2.vars).intersection(c3.vars).difference(c1.vars) - v1 = list(shared12)[0] - v2 = list(shared13)[0] - v3 = list(shared23)[0] - assert v1 != v2 - assert v1 != v3 - assert v2 != v3 constraints = [] - constraints.append(NotCounterClockwiseConstraint(v1,v2,v3)) - constraints.append(NotClockwiseConstraint(v1,v2,v3)) + constraints.append(SelectionConstraint(fnot(is_clockwise),[self.a,self.b,self.c])) + constraints.append(SelectionConstraint(fnot(is_counterclockwise),[self.a,self.b,self.c])) return constraints + +# --------------------------------------------------------- +# ------- functions to determine configurations ---------- +# --------------------------------------------------------- + def solve_ddd(v1,v2,v3,d12,d23,d31): diag_print("solve_ddd: %s %s %s %f %f %f"%(v1,v2,v3,d12,d23,d31),"clmethods") p1 = vector.vector([0.0,0.0]) @@ -894,546 +227,192 @@ def solve_ddd(v1,v2,v3,d12,d23,d31): for p3 in p3s: solution = Configuration({v1:p1, v2:p2, v3:p3}) solutions.append(solution) + diag_print("solve_ddd solutions"+str(solutions),"clmethods") return solutions -class MergeCHC(Merge): - """Represents a merging of two clusters and a hedgehog - The first cluster determines the orientation of the resulting - cluster - """ - def __init__(self, c1, hog, c2, out): - self.c1 = c1 - self.hog = hog - self.c2 = c2 - self.output = out - self._inputs = [c1, hog, c2] - self._outputs = [out] - self.overconstrained = False - self.consistent = True - MultiMethod.__init__(self) - # check coincidence - if not (hog.cvar in c1.vars and hog.cvar in c2.vars): - raise StandardError, "hog.cvar not in c1.vars and c2.vars" - shared12 = Set(c1.vars).intersection(c2.vars) - shared1h = Set(c1.vars).intersection(hog.xvars) - shared2h = Set(c2.vars).intersection(hog.xvars) - shared1 = shared12.union(shared1h) - shared2 = shared12.union(shared2h) - sharedh = shared1h.union(shared2h) - if len(shared12) < 1: - raise StandardError, "underconstrained c1 and c2" - elif len(shared12) > 1: - diag_print("overconstrained CHC - c1 and c2", "clmethods") - self.overconstrained = True - if len(shared1h) < 1: - raise StandardError, "underconstrained c1 and hog" - elif len(shared1h) > 1: - diag_print("overconstrained CHC - c1 and hog", "clmethods") - self.overconstrained = True - if len(shared2h) < 1: - raise StandardError, "underconstrained c2 and hog" - elif len(shared2h) > 1: - diag_print("overconstrained CHC - c2 and hog", "clmethods") - self.overconstrained = True - if len(shared1) < 2: - raise StandardError, "underconstrained c1" - elif len(shared1) > 2: - diag_print("overconstrained CHC - c1", "clmethods") - self.overconstrained = True - if len(shared2) < 2: - raise StandardError, "underconstrained c2" - elif len(shared2) > 2: - diag_print("overconstrained CHC - c2", "clmethods") - self.overconstrained = True - if len(sharedh) < 2: - raise StandardError, "underconstrained hog" - elif len(shared1) > 2: - diag_print("overconstrained CHC - hog", "clmethods") - self.overconstrained = True - - def __str__(self): - s = "mergeCHC("+str(self.c1)+"+"+str(self.hog)+"+"+str(self.c2)+"->"+str(self.output)+")" - s += "[" + self.status_str()+"]" - return s +# --------- incremental sets ---------- - def multi_execute(self, inmap): - diag_print("MergeCHC.multi_execute called","clmethods") - # determine vars - shared1 = Set(self.hog.xvars).intersection(self.c1.vars) - shared2 = Set(self.hog.xvars).intersection(self.c2.vars) - v1 = list(shared1)[0] - v2 = self.hog.cvar - v3 = list(shared2)[0] - # get configs - conf1 = inmap[self.c1] - confh = inmap[self.hog] - conf2 = inmap[self.c2] - # determine angle - p1h = confh.get(v1) - p2h = confh.get(v2) - p3h = confh.get(v3) - a123 = angle_3p(p1h, p2h, p3h) - # d1c - p11 = conf1.get(v1) - p21 = conf1.get(v2) - d12 = distance_2p(p11,p21) - # d2c - p32 = conf2.get(v3) - p22 = conf2.get(v2) - d23 = distance_2p(p32,p22) - # solve - dads = solve_dad(v1,v2,v3,d12,a123,d23) - solutions = [] - for s in dads: - solution = conf1.merge2D(s).merge2D(conf2) - solutions.append(solution) - return solutions +class Connected(incremental.IncrementalSet): + + def __init__(self, solver, incrset): + """Creates an incremental set of all pairs of connected clusters in incrset, according to solver""" + self._solver = solver + self._incrset = incrset + incremental.IncrementalSet.__init__(self, [incrset]) + return -def solve_dad(v1,v2,v3,d12,a123,d23): - diag_print("solve_dad: %s %s %s %f %f %f"%(v1,v2,v3,d12,a123,d23),"clmethods") - p2 = vector.vector([0.0, 0.0]) - p1 = vector.vector([d12, 0.0]) - p3s = [ vector.vector([d23*math.cos(a123), d23*math.sin(a123)]) ] - solutions = [] - for p3 in p3s: - solution = Configuration({v1:p1, v2:p2, v3:p3}) - solutions.append(solution) - return solutions + def _receive_add(self,source, obj): + connected = set() + for var in obj.vars: + dependend = self._solver.find_dependend(var) + dependend = filter(lambda x: x in self._incrset, dependend) + connected.update(dependend) + connected.remove(obj) + for obj2 in connected: + self._add(frozenset((obj, obj2))) -class MergeCCH(Merge): - """Represents a merging of two clusters and a hedgehog - The first cluster determines the orientation of the resulting - cluster - """ - def __init__(self, c1, c2, hog, out): - # init - self.c1 = c1 - self.c2 = c2 - self.hog = hog - self.output = out - self._inputs = [c1, c2, hog] - self._outputs = [out] - self.overconstrained = False - self.consistent = True - MultiMethod.__init__(self) - # check coincidence - if hog.cvar not in c1.vars: - raise StandardError, "hog.cvar not in c1.vars" - if hog.cvar in c2.vars: - raise StandardError, "hog.cvar in c2.vars" - shared12 = Set(c1.vars).intersection(c2.vars) - shared1h = Set(c1.vars).intersection(hog.xvars) - shared2h = Set(c2.vars).intersection(hog.xvars) - shared1 = shared12.union(shared1h) - shared2 = shared12.union(shared2h) - sharedh = shared1h.union(shared2h) - if len(shared12) < 1: - raise StandardError, "underconstrained c1 and c2" - elif len(shared12) > 1: - diag_print("overconstrained CCH - c1 and c2", "clmethods") - self.overconstrained = True - if len(shared1h) < 1: - raise StandardError, "underconstrained c1 and hog" - elif len(shared1h) > 1: - diag_print("overconstrained CCH - c1 and hog", "clmethods") - self.overconstrained = True - if len(shared2h) < 1: - raise StandardError, "underconstrained c2 and hog" - elif len(shared2h) > 2: - diag_print("overconstrained CCH - c2 and hog", "clmethods") - self.overconstrained = True - if len(shared1) < 1: - raise StandardError, "underconstrained c1" - elif len(shared1) > 1: - diag_print("overconstrained CCH - c1", "clmethods") - self.overconstrained = True - if len(shared2) < 2: - raise StandardError, "underconstrained c2" - elif len(shared2) > 2: - diag_print("overconstrained CCH - c2", "clmethods") - self.overconstrained = True - if len(sharedh) < 2: - raise StandardError, "underconstrained hog" - elif len(sharedh) > 2: - diag_print("overconstrained CCH - hog", "clmethods") - self.overconstrained = True - - #end __init__ - - def __str__(self): - s = "mergeCCH("+str(self.c1)+"+"+str(self.c2)+"+"+str(self.hog)+"->"+str(self.output)+")" - s += "[" + self.status_str()+"]" - return s + def _receive_remove(self,source, obj): + for frozen in list(self): + if obj in frozen: + self._remove(frozen) - def multi_execute(self, inmap): - diag_print("MergeCCH.multi_execute called","clmethods") - # assert hog.cvar in c1 - if self.hog.cvar in self.c1.vars: - c1 = self.c1 - c2 = self.c2 + def __eq__(self, other): + if isinstance(other, Connected): + return self._solver == other._solver and self._incrset == other._incrset else: - c1 = self.c2 - c2 = self.c1 - # get v1 - v1 = self.hog.cvar - # get v2 - candidates2 = Set(self.hog.xvars).intersection(c1.vars).intersection(c2.vars) - assert len(candidates2) >= 1 - v2 = list(candidates2)[0] - # get v3 - candidates3 = Set(self.hog.xvars).intersection(c2.vars).difference([v1, v2]) - assert len(candidates3) >= 1 - v3 = list(candidates3)[0] - # check - assert v1 != v2 - assert v1 != v3 - assert v2 != v3 - # get configs - confh = inmap[self.hog] - conf1 = inmap[c1] - conf2 = inmap[c2] - # get angle - p1h = confh.get(v1) - p2h = confh.get(v2) - p3h = confh.get(v3) - a312 = angle_3p(p3h, p1h, p2h) - # get distance d12 - p11 = conf1.get(v1) - p21 = conf1.get(v2) - d12 = distance_2p(p11, p21) - # get distance d23 - p22 = conf2.get(v2) - p32 = conf2.get(v3) - d23 = distance_2p(p22, p32) - adds = solve_add(v1,v2,v3,a312,d12,d23) - solutions = [] - # do merge (note, order c1 c2 restored) - conf1 = inmap[self.c1] - conf2 = inmap[self.c2] - for s in adds: - solution = conf1.merge2D(s).merge2D(conf2) - solutions.append(solution) - return solutions + return False - def prototype_constraints(self): - # assert hog.cvar in c1 - if self.hog.cvar in self.c1.vars: - c1 = self.c1 - c2 = self.c2 + def __hash__(self): + return hash((self._solver, self._incrset)) + +class ConnectedPairs(incremental.IncrementalSet): + + def __init__(self, solver, incrset1, incrset2): + """Creates an incremental set of all pairs (c1, c2) from incrset1 and incrset2 respectively, that are connected according to solver""" + self._solver = solver + self._incrset1 = incrset1 + self._incrset2 = incrset2 + incremental.IncrementalSet.__init__(self, [incrset1, incrset2]) + return + + def _receive_add(self,source, obj): + connected = set() + for var in obj.vars: + dependend = self._solver.find_dependend(var) + if source == self._incrset1: + dependend = filter(lambda x: x in self._incrset2, dependend) + elif source == self._incrset2: + dependend = filter(lambda x: x in self._incrset1, dependend) + connected.update(dependend) + if obj in connected: + connected.remove(obj) + for obj2 in connected: + if source == self._incrset1: + self._add((obj, obj2)) + elif source == self._incrset2: + self._add((obj2, obj)) + + def _receive_remove(self,source, obj): + for (c1,c2) in list(self): + if c1==obj or c2==obj: + self._remove((c1,c2)) + + def __eq__(self, other): + if isinstance(other, ConnectedPairs): + return self._solver == other._solver and self._incrset1 == other._incrset1 and self._incrset2 == other._incrset2 else: - c1 = self.c2 - c2 = self.c1 - shared1h = Set(self.hog.xvars).intersection(c1.vars).difference([self.hog.cvar]) - shared2h = Set(self.hog.xvars).intersection(c2.vars).difference(shared1h) - # get vars - v1 = self.hog.cvar - v2 = list(shared1h)[0] - v3 = list(shared2h)[0] - assert v1 != v2 - assert v1 != v3 - assert v2 != v3 - constraints = [] - constraints.append(NotAcuteConstraint(v2,v3,v1)) - constraints.append(NotObtuseConstraint(v2,v3,v1)) - return constraints + return False -def solve_add(a,b,c, a_cab, d_ab, d_bc): - diag_print("solve_dad: %s %s %s %f %f %f"%(a,b,c,a_cab,d_ab,d_bc),"clmethods") - p_a = vector.vector([0.0,0.0]) - p_b = vector.vector([d_ab,0.0]) - dir = vector.vector([math.cos(-a_cab),math.sin(-a_cab)]) - solutions = cr_int(p_b, d_bc, p_a, dir) - rval = [] - for s in solutions: - p_c = s - map = {a:p_a, b:p_b, c:p_c} - rval.append(Configuration(map)) - return rval + def __hash__(self): + return hash((self._solver, self._incrset1, self._incrset2)) -class BalloonFromHogs(Merge): - """Represent a balloon merged from two hogs""" - def __init__(self, hog1, hog2, balloon): - """Create a new balloon from two angles - - keyword args: - hog1 - a Hedghog - hog2 - a Hedehog - balloon - a Balloon instance - """ - self.hog1 = hog1 - self.hog2 = hog2 - self.balloon = balloon - self._inputs = [hog1, hog2] - self._outputs = [balloon] - self.overconstrained = False - self.consistent = True - MultiMethod.__init__(self) - # check coincidence - if hog1.cvar == hog2.cvar: - raise StandardError, "hog1.cvar is hog2.cvar" - shared12 = Set(hog1.xvars).intersection(hog2.xvars) - if len(shared12) < 1: - raise StandardError, "underconstrained" - #elif len(shared12) > 1: - # raise StandardError, "overconstrained" - def __str__(self): - s = "hog2balloon("+str(self.hog1)+"+"+str(self.hog2)+"->"+str(self.balloon)+")" - s += "[" + self.status_str()+"]" - return s +class Rigids(incremental.Filter): + + def __init__(self, solver): + self._solver = solver + incremental.Filter.__init__(self, lambda c: isinstance(c, Rigid), self._solver.top_level()) - def multi_execute(self, inmap): - diag_print("BalloonFromHogs.multi_execute called","clmethods") - v1 = self.hog1.cvar - v2 = self.hog2.cvar - shared = Set(self.hog1.xvars).intersection(self.hog2.xvars).difference([v1,v2]) - v3 = list(shared)[0] - assert v1 != v2 - assert v1 != v3 - assert v2 != v3 - # determine angle312 - conf1 = inmap[self.hog1] - p31 = conf1.get(v3) - p11 = conf1.get(v1) - p21 = conf1.get(v2) - a312 = angle_3p(p31,p11,p21) - # determine distance d12 - d12 = 1.0 - # determine angle123 - conf2 = inmap[self.hog2] - p12 = conf2.get(v1) - p22 = conf2.get(v2) - p32 = conf2.get(v3) - a123 = angle_3p(p12,p22,p32) - # solve - return solve_ada(v1,v2,v3, a312, d12, a123) + def __hash__(self): + return hash((self.__class__, self._solver)) -def solve_ada(a, b, c, a_cab, d_ab, a_abc): - diag_print("solve_ada: %s %s %s %f %f %f"%(a,b,c,a_cab,d_ab,a_abc),"clmethods") - p_a = vector.vector([0.0,0.0]) - p_b = vector.vector([d_ab, 0.0]) - dir_ac = vector.vector([math.cos(-a_cab),math.sin(-a_cab)]) - dir_bc = vector.vector([-math.cos(-a_abc),math.sin(-a_abc)]) - if tol_eq(math.sin(a_cab), 0.0) and tol_eq(math.sin(a_abc),0.0): - m = d_ab/2 + math.cos(-a_cab)*d_ab - math.cos(-a_abc)*d_ab - p_c = vector.vector([m,0.0]) - # p_c = (p_a + p_b) / 2 - map = {a:p_a, b:p_b, c:p_c} - cluster = _Configuration(map) - cluster.underconstrained = True - rval = [cluster] + def __eq__(self, other): + if isinstance(other, self.__class__): + return self._solver == other._solver else: - solutions = rr_int(p_a,dir_ac,p_b,dir_bc) - rval = [] - for s in solutions: - p_c = s - map = {a:p_a, b:p_b, c:p_c} - rval.append(Configuration(map)) - #endif - return rval + return False -class BalloonMerge(Merge): - """Represents a merging of two balloons - """ - def __init__(self, in1, in2, out): - self.input1 = in1 - self.input2 = in2 - self.output = out - self.shared = list(Set(self.input1.vars).intersection(self.input2.vars)) - self._inputs = [in1, in2] - self._outputs = [out] - self.consistent = True - MultiMethod.__init__(self) - # check coincidence - self.overconstrained = False - shared = Set(in1.vars).intersection(in2.vars) - if len(shared) < 2: - raise StandardError, "underconstrained" - elif len(shared) > 2: - diag_print("overconstrained balloon merge", "clmethods") - self.overconstrained = True - - def __str__(self): - s = "balloonmerge("+str(self.input1)+"+"+str(self.input2)+"->"+str(self.output)+")" - s += "[" + self.status_str()+"]" - return s - - def multi_execute(self, inmap): - diag_print("BalloonMerge.multi_execute called","clmethods") - c1 = self._inputs[0] - c2 = self._inputs[1] - conf1 = inmap[c1] - conf2 = inmap[c2] - return [conf1.merge_scale_2D(conf2)] - -class BalloonRigidMerge(Merge): - """Represents a merging of a balloon and a cluster - """ - def __init__(self, balloon, cluster, output): - self.balloon = balloon - self.cluster= cluster - self.output = output - self.shared = list(Set(self.balloon.vars).intersection(self.cluster.vars)) - self._inputs = [balloon, cluster] - self._outputs = [output] - self.overconstrained = False - self.consistent = True - MultiMethod.__init__(self) - # check coincidence - shared = Set(balloon.vars).intersection(cluster.vars) - if len(shared) < 2: - raise StandardError, "underconstrained balloon-cluster merge" - elif len(shared) > 2: - diag_print("overconstrained merge "+str(balloon)+"&"+str(cluster), "clmethods") - self.overconstrained = True - - def __str__(self): - s = "balloonclustermerge("+str(self.balloon)+"+"+str(self.cluster)+"->"+str(self.output)+")" - s += "[" + self.status_str()+"]" - return s - - def multi_execute(self, inmap): - diag_print("BalloonRigidMerge.multi_execute called","clmethods") - rigid = inmap[self.cluster] - balloon = inmap[self.balloon] - return [rigid.merge_scale_2D(balloon)] - #return [balloon.copy()] - -class MergeHogs(Merge): - """Represents a merging of two hogs to form a new hog - """ - def __init__(self, hog1, hog2, output): - self.hog1 = hog1 - self.hog2 = hog2 - self.output = output - self._inputs = [hog1, hog2] - self._outputs = [output] - self.consistent = True - MultiMethod.__init__(self) - # check coincidence - self.overconstrained = False - if hog1.cvar != hog2.cvar: - raise StandardError, "hog1.cvar != hog2.cvar" - shared = Set(hog1.xvars).intersection(hog2.xvars) - if len(shared) < 1: - raise StandardError, "underconstrained balloon-cluster merge" - elif len(shared) > 1: - diag_print("overconstrained merge "+str(hog1)+"&"+str(hog2), "clmethods") - self.overconstrained = True - - def __str__(self): - s = "mergeHH("+str(self.hog1)+"+"+str(self.hog2)+"->"+str(self.output)+")" - s += "[" + self.status_str()+"]" - return s - - def multi_execute(self, inmap): - diag_print("MergeHogs.multi_execute called","clmethods") - conf1 = inmap[self._inputs[0]] - conf2 = inmap[self._inputs[1]] - shared = Set(self.hog1.xvars).intersection(self.hog2.xvars) - conf12 = conf1.merge_scale_2D(conf2, [self.hog1.cvar, list(shared)[0]]) - return [conf12] + def __repr__(self): + return "Rigids("+repr(self._solver)+")" -# ---------- derive methods ------- -class Rigid2Hog(Derive): - """Represents a derivation of a hog from a c)luster - """ - def __init__(self, cluster, hog): - self.cluster = cluster - self.hog = hog - self._inputs = [cluster] - self._outputs = [hog] - MultiMethod.__init__(self) +class Points(incremental.Filter): + + def __init__(self, solver): + self._solver = solver + rigids = Rigids(solver) + incremental.Filter.__init__(self, lambda c: len(c.vars)==1, rigids) - def __str__(self): - s = "rigid2hog("+str(self.cluster)+"->"+str(self.hog)+")" - return s + def __hash__(self): + return hash((self.__class__, self._solver)) - def multi_execute(self, inmap): - diag_print("Rigid2Hog.multi_execute called","clmethods") - conf1 = inmap[self._inputs[0]] - vars = list(self._outputs[0].xvars) + [self._outputs[0].cvar] - conf = conf1.select(vars) - return [conf] - -class Balloon2Hog(Derive): - """Represents a derivation of a hog from a balloon - """ - def __init__(self, balloon, hog): - self.balloon = balloon - self.hog = hog - self._inputs = [balloon] - self._outputs = [hog] - MultiMethod.__init__(self) - - def __str__(self): - s = "balloon2hog("+str(self.balloon)+"->"+str(self.hog)+")" - return s - - def multi_execute(self, inmap): - diag_print("Balloon2Hog.multi_execute called","clmethods") - conf1 = inmap[self._inputs[0]] - vars = list(self._outputs[0].xvars) + [self._outputs[0].cvar] - conf = conf1.select(vars) - return [conf] - -class SubHog(Derive): - def __init__(self, hog, sub): - self.hog = hog - self.sub = sub - self._inputs = [hog] - self._outputs = [sub] - MultiMethod.__init__(self) - - def __str__(self): - s = "subhog("+str(self.hog)+"->"+str(self.sub)+")" - return s - - def multi_execute(self, inmap): - diag_print("SubHog.multi_execute called","clmethods") - conf1 = inmap[self._inputs[0]] - vars = list(self._outputs[0].xvars) + [self._outputs[0].cvar] - conf = conf1.select(vars) - return [conf] - -class PrototypeMethod(MultiMethod): - - def __init__(self, incluster, selclusters, outcluster, constraints): - self._inputs = [incluster]+selclusters - self._outputs = [outcluster] - self._constraints = constraints - MultiMethod.__init__(self) - - def multi_execute(self, inmap): - diag_print("PrototypeMethod.multi_execute called","clmethods") - incluster = self._inputs[0] - selclusters = [] - for i in range(1,len(self._inputs)): - selclusters.append(self._inputs[i]) - print "incluster", incluster - print "selclusters", map(str, selclusters) - # get confs - inconf = inmap[incluster] - selmap = {} - for cluster in selclusters: - conf = inmap[cluster] - assert len(conf.vars()) == 1 - var = conf.vars()[0] - selmap[var] = conf.map[var] - selconf = Configuration(selmap) - sat = True - print "inconf:",inconf - print "selconf:",selconf - for con in self._constraints: - print "con:",con, - if con.satisfied(inconf.map) != con.satisfied(selconf.map): - sat = False - print sat - if sat: - return [inconf] + def __eq__(self, other): + if isinstance(other, self.__class__): + return self._solver == other._solver else: - return [] + return False + def __repr__(self): + return "Points("+repr(self._solver)+")" +class Distances(incremental.Filter): + + def __init__(self, solver): + self._solver = solver + rigids = Rigids(solver) + incremental.Filter.__init__(self, lambda c: len(c.vars)==2, rigids) + + def __hash__(self): + return hash((self.__class__, self._solver)) + + def __eq__(self, other): + if isinstance(other, self.__class__): + return self._solver == other._solver + else: + return False + + def __repr__(self): + return "Distances("+repr(self._solver)+")" + +class Triplets(incremental.IncrementalSet): + + def __init__(self, solver, incrset): + """Creates an incremental set of all tripltes of connected clusters in incrset, according to solver""" + self._solver = solver + self._incrset = incrset + incremental.IncrementalSet.__init__(self, [incrset]) + return + + def _receive_add(self,source, obj): + connected = set() + for var in obj.vars: + dependend = self._solver.find_dependend(var) + dependend = filter(lambda x: x in self._incrset, dependend) + dependend = filter(lambda x: len(x.vars.intersection(obj.vars))==1, dependend) + connected.update(dependend) + if obj in connected: + connected.remove(obj) + obj1 = obj + if len(connected) >= 2: + l = list(connected) + for i in range(len(l)): + obj2 = l[i] + shared12 = obj1.vars.intersection(obj2.vars) + for j in range(i): + obj3 = l[j] + if len(obj2.vars.intersection(obj3.vars))==1: + shared23 = obj2.vars.intersection(obj3.vars) + shared13 = obj1.vars.intersection(obj3.vars) + shared = shared12.union(shared23).union(shared13) + if len(shared)==3: + self._add(frozenset((obj1,obj2,obj3))) + + def _receive_remove(self,source, obj): + for frozen in list(self): + if obj in frozen: + self._remove(frozen) + + def __eq__(self, other): + if isinstance(other, self.__class__): + return self._solver == other._solver and self._incrset == other._incrset + else: + return False + + def __hash__(self): + return hash((self.__class__, self._solver, self._incrset)) + + def __repr__(self): + return "Triplets("+repr(self._solver)+","+repr(self._incset)+")" + + diff --git a/geosolver/clsolver3D.py b/geosolver/clsolver3D.py index ff756cf..ecc3ccc 100644 --- a/geosolver/clsolver3D.py +++ b/geosolver/clsolver3D.py @@ -11,7 +11,7 @@ import incremental class ClusterSolver3D(ClusterSolver): - """A generic 3D geometric constraint solver. See ClusterSolver for details.""" + """A 3D geometric constraint solver. See ClusterSolver for details.""" # ------- PUBLIC METHODS -------- def __init__(self): @@ -227,44 +227,44 @@ class MergePR(ClusterMethod): def _incremental_matcher(solver): toplevel = solver.top_level() - rigids = incremental.Filter(lambda c: isinstance(c, Rigid), toplevel) - points = incremental.Filter(lambda c: len(c.vars)==1, rigids) + rigids = Rigids(solver) + points = Points(solver) connectedpairs = ConnectedPairs(solver, points, rigids) matcher = incremental.Map(lambda (p,r): MergePR({"$p":p, "$r":r}), connectedpairs) return matcher incremental_matcher = staticmethod(_incremental_matcher) - def _handcoded_match(problem, newcluster, connected): - connected = set() - for var in newcluster.vars: - dependend = problem.find_dependend(var) - dependend = filter(lambda x: problem.is_top_level(x), dependend) - connected.update(dependend) - matches = []; - if isinstance(newcluster, Rigid) and len(newcluster.vars)==1: - points = [newcluster] - distances = filter(lambda x: isinstance(x, Rigid) and len(x.vars)==2, connected) - elif isinstance(newcluster, Rigid) and len(newcluster.vars)==2: - distances = [newcluster] - points = filter(lambda x: isinstance(x, Rigid) and len(x.vars)==1, connected) - else: - return [] - for p in points: - for d in distances: - m = Map({ - "$p": p, - "$r": d, - "$a": list(p.vars)[0] - }) - matches.append(m) - return matches; - handcoded_match = staticmethod(_handcoded_match) + #def _handcoded_match(problem, newcluster, connected): + # connected = set() + # for var in newcluster.vars: + # dependend = problem.find_dependend(var) + # dependend = filter(lambda x: problem.is_top_level(x), dependend) + # connected.update(dependend) + # matches = []; + # if isinstance(newcluster, Rigid) and len(newcluster.vars)==1: + # points = [newcluster] + # distances = filter(lambda x: isinstance(x, Rigid) and len(x.vars)==2, connected) + # elif isinstance(newcluster, Rigid) and len(newcluster.vars)==2: + # distances = [newcluster] + # points = filter(lambda x: isinstance(x, Rigid) and len(x.vars)==1, connected) + # else: + # return [] + # for p in points: + # for d in distances: + # m = Map({ + # "$p": p, + # "$r": d, + # "$a": list(p.vars)[0] + # }) + # matches.append(m) + # return matches; + #handcoded_match = staticmethod(_handcoded_match) - def _pattern(): - pattern = [["point","$p",["$a"]], ["rigid", "$r", ["$a"]]] - return pattern2graph(pattern) - pattern = staticmethod(_pattern) - patterngraph = _pattern() + #def _pattern(): + # pattern = [["point","$p",["$a"]], ["rigid", "$r", ["$a"]]] + # return pattern2graph(pattern) + #pattern = staticmethod(_pattern) + #patterngraph = _pattern() def __str__(self): s = "MergePR("+str(self._inputs[0])+"+"+str(self._inputs[1])+"->"+str(self._outputs[0])+")" @@ -303,11 +303,28 @@ class MergeDR(ClusterMethod): self._outputs = [out] ClusterMethod.__init__(self) - def _pattern(): - pattern = [["distance","$d",["$a","$b"]], ["rigid", "$r",["$a", "$b"]]] - return pattern2graph(pattern) - pattern = staticmethod(_pattern) - patterngraph = _pattern() + + def _incremental_matcher(solver): + toplevel = solver.top_level() + rigids = Rigids(solver) + distances = Distances(solver) + connectedpairs = ConnectedPairs(solver, distances, rigids) + twoconnectedpairs = incremental.Filter(lambda (d,r): len(d.vars.intersection(r.vars))==2, connectedpairs); + matcher = incremental.Map(lambda (d,r): MergeDR({"$d":d, "$r":r}), twoconnectedpairs) + # + #global debugger + #debugger = incremental.Debugger(connectedpairs) + # + return matcher + + incremental_matcher = staticmethod(_incremental_matcher) + + + #def _pattern(): + # pattern = [["distance","$d",["$a","$b"]], ["rigid", "$r",["$a", "$b"]]] + # return pattern2graph(pattern) + #pattern = staticmethod(_pattern) + #patterngraph = _pattern() def __str__(self): s = "MergeDR("+str(self._inputs[0])+"+"+str(self._inputs[1])+"->"+str(self._outputs[0])+")" @@ -788,19 +805,19 @@ class Connected(incremental.IncrementalSet): incremental.IncrementalSet.__init__(self, [incrset]) return - def _receive_add(self,source, object): + def _receive_add(self,source, obj): connected = set() - for var in object.vars: + for var in obj.vars: dependend = self._solver.find_dependend(var) dependend = filter(lambda x: x in self._incrset, dependend) connected.update(dependend) - connected.remove(object) - for object2 in connected: - self._add(frozenset((object, object2))) + connected.remove(obj) + for obj2 in connected: + self._add(frozenset((obj, obj2))) - def _receive_remove(self,source, object): + def _receive_remove(self,source, obj): for frozen in list(self): - if object in frozen: + if obj in frozen: self._remove(frozen) def __eq__(self, other): @@ -822,26 +839,26 @@ class ConnectedPairs(incremental.IncrementalSet): incremental.IncrementalSet.__init__(self, [incrset1, incrset2]) return - def _receive_add(self,source, object): + def _receive_add(self,source, obj): connected = set() - for var in object.vars: + for var in obj.vars: dependend = self._solver.find_dependend(var) if source == self._incrset1: dependend = filter(lambda x: x in self._incrset2, dependend) elif source == self._incrset2: dependend = filter(lambda x: x in self._incrset1, dependend) connected.update(dependend) - if object in connected: - connected.remove(object) - for object2 in connected: + if obj in connected: + connected.remove(obj) + for obj2 in connected: if source == self._incrset1: - self._add((object, object2)) + self._add((obj, obj2)) elif source == self._incrset2: - self._add((object2, object)) + self._add((obj2, obj)) - def _receive_remove(self,source, object): + def _receive_remove(self,source, obj): for (c1,c2) in list(self): - if c1==object or c2==object: + if c1==obj or c2==obj: self._remove((c1,c2)) def __eq__(self, other): @@ -854,3 +871,63 @@ class ConnectedPairs(incremental.IncrementalSet): return hash((self._solver, self._incrset1, self._incrset2)) +class Rigids(incremental.Filter): + + def __init__(self, solver): + self._solver = solver + incremental.Filter.__init__(self, lambda c: isinstance(c, Rigid), self._solver.top_level()) + + def __hash__(self): + return hash((self.__class__, self._solver)) + + def __eq__(self, other): + if isinstance(other, self.__class__): + return self._solver == other._solver + else: + return False + + def __repr__(self): + return "Rigids("+repr(self._solver)+")" + + + +class Points(incremental.Filter): + + def __init__(self, solver): + self._solver = solver + rigids = Rigids(solver) + incremental.Filter.__init__(self, lambda c: len(c.vars)==1, rigids) + + def __hash__(self): + return hash((self.__class__, self._solver)) + + def __eq__(self, other): + if isinstance(other, self.__class__): + return self._solver == other._solver + else: + return False + + def __repr__(self): + return "Points("+repr(self._solver)+")" + +class Distances(incremental.Filter): + + def __init__(self, solver): + self._solver = solver + rigids = Rigids(solver) + incremental.Filter.__init__(self, lambda c: len(c.vars)==2, rigids) + + def __hash__(self): + return hash((self.__class__, self._solver)) + + def __eq__(self, other): + if isinstance(other, self.__class__): + return self._solver == other._solver + else: + return False + + def __repr__(self): + return "Distances("+repr(self._solver)+")" + + + diff --git a/geosolver/geometric.py b/geosolver/geometric.py index eeec908..2d4ea62 100644 --- a/geosolver/geometric.py +++ b/geosolver/geometric.py @@ -5,6 +5,7 @@ import vector import math from clsolver import PrototypeMethod, SelectionMethod from clsolver3D import ClusterSolver3D +from clsolver2D import ClusterSolver2D from cluster import * from configuration import Configuration from diagnostic import diag_print diff --git a/geosolver/incremental.py b/geosolver/incremental.py index a82e305..5047a21 100755 --- a/geosolver/incremental.py +++ b/geosolver/incremental.py @@ -41,6 +41,9 @@ class IncrementalSet(notify.Notifier, notify.Listener): (self._ref, count) = self._all[self] count += 1 self._all[self] = (self._ref, count) + + self.listeners = self._ref().listeners + self.notifiers = self._ref().notifiers else: # set self._ref and update self._all self._ref = None @@ -378,6 +381,32 @@ def combinations(listofiters): z.add(tuple(frozenset([e]).union(y))) return z +class Debugger(IncrementalSet): + """A set-like container that incrementally determines all combinations of its inputs (IncrementalSets)""" + def __init__(self, watch_iset): + self._watch = watch_iset + IncrementalSet.__init__(self, [self._watch]) + + def _receive_add(self, source, obj): + print "add", obj, "to", source + + def _receive_remove(self, source, obj): + print "remove", obj, "to", source + + def __eq__(self, other): + if isinstance(other, Debugger): + return self._watch == other.watch + else: + return False + + def __hash__(self): + return hash((self.__class__, self._watch)) + + def __repr__(self): + return "Debugger(%s)"%str(self._watch) + + + def test1(): s = MutableSet([5,-3]) @@ -434,6 +463,7 @@ def test3(): print set(intersection) print set(difference) + if __name__ == '__main__': test1() test2() diff --git a/geosolver/notify.py b/geosolver/notify.py index 5eb90aa..70c4484 100644 --- a/geosolver/notify.py +++ b/geosolver/notify.py @@ -5,8 +5,8 @@ # This also implies that there is no order of notifications sent, and objects can only register as listener/notifier once! # Notes: -# - member variables "listeners" and "notifiers" are not hidden, but should never be modified independently, so be careful! -# - subclasses will need to override the receive_notify class. +# - member variables "listeners" and "notifiers" are not hidden, but should never be modified directly, so be careful! +# - subclasses of Listener will want to override the receive_notify class. # - Notifier/Listener subclasses __init__ method must call Notifier/Listener.__init__(self) import weakref @@ -19,20 +19,15 @@ class Notifier: """ def __init__(self): - #self.listeners = [] self.listeners = weakref.WeakKeyDictionary() def add_listener(self, listener): """add a listener to the list (and self to listers' list)""" - #self.listeners.add(listener) - #listener.notifiers.add(self) self.listeners[listener] = True listener.notifiers[self] = True def rem_listener(self, listener): """remove a listener from the list (and self from listers' list)""" - #self.listeners.remove(listener) - #listener.notifiers.remove(self) del self.listeners[listener] del listener.notifiers[self] @@ -41,6 +36,16 @@ class Notifier: for dest in self.listeners: dest.receive_notify(self, message) + def __getstate__(self): + """when pickling... do not save self.listeners""" + dict = self.__dict__.copy() + del dict['listeners'] + return dict + + def __setstate__(self, dict): + """when unpickling... create new self.listeners""" + self.__dict__ = dict + self.listeners = weakref.WeakKeyDictionary() class Listener: """A listener is notified by one or more Notifiers. @@ -50,26 +55,31 @@ class Listener: """ def __init__(self): - # 20090521 - replaced list by weakKeyDict, do when listerner deleted, it is removed from list - #self.notifiers = [] self.notifiers = weakref.WeakKeyDictionary(); def add_notifier(self, notifier): """add a notifier to the list (and self to notifiers' list)""" - #self.notifiers.add(notifier) - #notifier.listeners.add(self) self.notifiers[notifier] = True notifier.listeners[self] = True def rem_notifier(self, notifier): """remove a notifier from the list (and self from notifiers' list)""" - #self.notifiers.remove(notifier) - #notifier.listeners.remove(self) del self.notifiers[notifier] del notifier.listeners[self] def receive_notify(self, source, message): """receive a message from a notifier. Implementing classes should override this.""" - print self,"receive_notify",source,message + print "receive_notify", self, source,message + + def __getstate__(self): + """when pickling... do not save self.notifiers""" + dict = self.__dict__.copy() + del dict['notifiers'] + return dict + + def __setstate__(self, dict): + """when unpickling... create new self.notifiers""" + self.__dict__ = dict + self.notifiers = weakref.WeakKeyDictionary() + - diff --git a/test/test.py b/test/test.py index 1300dc6..38e7f9a 100644 --- a/test/test.py +++ b/test/test.py @@ -374,6 +374,43 @@ def double_triangle(): problem.add_constraint(DistanceConstraint('v3', 'v4', 10.0)) return problem +def triple_double_triangle(): + problem = GeometricProblem(dimension=2) + problem.add_point('QX', vector([0.0, 0.0])) + problem.add_point('QA2', vector([1.0, 0.0])) + problem.add_point('QA3', vector([0.0, 1.0])) + problem.add_point('QY', vector([1.0, 1.0])) + problem.add_constraint(DistanceConstraint('QX', 'QA2', 10.0)) + problem.add_constraint(DistanceConstraint('QX', 'QA3', 10.0)) + problem.add_constraint(DistanceConstraint('QA2', 'QA3', 10.0)) + problem.add_constraint(DistanceConstraint('QA2', 'QY', 10.0)) + problem.add_constraint(DistanceConstraint('QA3', 'QY', 10.0)) + + #problem.add_point('QX', vector([0.0, 0.0])) + problem.add_point('QB2', vector([1.0, 0.0])) + problem.add_point('QZ', vector([0.0, 1.0])) + problem.add_point('QB4', vector([1.0, 1.0])) + problem.add_constraint(DistanceConstraint('QX', 'QB2', 10.0)) + problem.add_constraint(DistanceConstraint('QX', 'QZ', 10.0)) + problem.add_constraint(DistanceConstraint('QB2', 'QZ', 10.0)) + problem.add_constraint(DistanceConstraint('QB2', 'QB4', 10.0)) + problem.add_constraint(DistanceConstraint('QZ', 'QB4', 10.0)) + + #problem.add_point('QY', vector([0.0, 0.0])) + problem.add_point('QC2', vector([1.0, 0.0])) + #problem.add_point('QZ', vector([0.0, 1.0])) + problem.add_point('QC4', vector([1.0, 1.0])) + problem.add_constraint(DistanceConstraint('QY', 'QC2', 10.0)) + problem.add_constraint(DistanceConstraint('QY', 'QZ', 10.0)) + problem.add_constraint(DistanceConstraint('QC2', 'QZ', 10.0)) + problem.add_constraint(DistanceConstraint('QC2', 'QC4', 10.0)) + problem.add_constraint(DistanceConstraint('QZ', 'QC4', 10.0)) + + return problem + + + + def hog1(): # double triangle with inter-angle (needs angle propagation) problem = GeometricProblem(dimension=2) @@ -717,7 +754,9 @@ def test(problem, use_prototype=True): print "problem:" print problem print "use_prototype=",use_prototype + print "Solving..." solver = GeometricSolver(problem, use_prototype) + print "...done" print "drplan:" print solver.dr print "top-level rigids:",list(solver.dr.top_level()) @@ -883,8 +922,8 @@ def selection_test(): print len(solver.get_solutions()), "solutions" -def runtests(): - #diag_select("clsolver3D") +def test3d(): + #diag_select("clsolver") test(double_tetrahedron_problem()) #test(ada_tetrahedron_problem()) #test(double_banana_problem()) @@ -899,5 +938,9 @@ def runtests(): #selection_test() #test(overconstrained_tetra()) +def test2d(): + #test(ddd_problem()) + #test(double_triangle()) + test(triple_double_triangle()) -if __name__ == "__main__": runtests() +if __name__ == "__main__": test2d() diff --git a/workbench/run b/workbench/run.sh similarity index 100% rename from workbench/run rename to workbench/run.sh