From 492f71cc1955aa485dd70d86b045cda0ff38faa3 Mon Sep 17 00:00:00 2001 From: Gabi Roeger Date: Thu, 1 Feb 2024 15:48:54 +0100 Subject: [PATCH 1/5] [issue1133] Fix bug in invariant synthesis and revise its implementation. It was possible that an unbalanced action passed the balance check. This was a conceptual gap in the original algorithm for which we had to significantly revise the invariant synthesis. We used the opportunity to revise wide parts of its implementation, restructuring and renaming the different components to improve clarity. --- src/translate/constraints.py | 249 +++++++------- src/translate/invariant_finder.py | 17 +- src/translate/invariants.py | 535 ++++++++++++++++++------------ src/translate/tools.py | 19 -- 4 files changed, 469 insertions(+), 351 deletions(-) diff --git a/src/translate/constraints.py b/src/translate/constraints.py index 6190acc0f7..a26a2c9e7f 100644 --- a/src/translate/constraints.py +++ b/src/translate/constraints.py @@ -1,41 +1,30 @@ import itertools +from typing import Iterable, List, Tuple -class NegativeClause: - # disjunction of inequalities - def __init__(self, parts): +class InequalityDisjunction: + def __init__(self, parts: List[Tuple[str, str]]): self.parts = parts assert len(parts) def __str__(self): - disj = " or ".join(["(%s != %s)" % (v1, v2) - for (v1, v2) in self.parts]) - return "(%s)" % disj - - def is_satisfiable(self): - for part in self.parts: - if part[0] != part[1]: - return True - return False - - def apply_mapping(self, m): - new_parts = [(m.get(v1, v1), m.get(v2, v2)) for (v1, v2) in self.parts] - return NegativeClause(new_parts) + disj = " or ".join([f"({v1} != {v2})" for (v1, v2) in self.parts]) + return f"({disj})" -class Assignment: - def __init__(self, equalities): - self.equalities = tuple(equalities) - # represents a conjunction of expressions ?x = ?y or ?x = d - # with ?x, ?y being variables and d being a domain value +class EqualityConjunction: + def __init__(self, equalities: List[Tuple[str, str]]): + self.equalities = equalities + # A conjunction of expressions x = y, where x,y are either strings + # that denote objects or variables, or ints that denote invariant + # parameters. - self.consistent = None - self.mapping = None - self.eq_classes = None + self._consistent = None + self._representative = None # dictionary + self._eq_classes = None def __str__(self): - conj = " and ".join(["(%s = %s)" % (v1, v2) - for (v1, v2) in self.equalities]) - return "(%s)" % conj + conj = " and ".join([f"({v1} = {v2})" for (v1, v2) in self.equalities]) + return f"({conj})" def _compute_equivalence_classes(self): eq_classes = {} @@ -48,113 +37,141 @@ def _compute_equivalence_classes(self): c1.update(c2) for elem in c2: eq_classes[elem] = c1 - self.eq_classes = eq_classes + self._eq_classes = eq_classes - def _compute_mapping(self): - if not self.eq_classes: + def _compute_representatives(self): + if not self._eq_classes: self._compute_equivalence_classes() - # create mapping: each key is mapped to the smallest - # element in its equivalence class (with objects being - # smaller than variables) - mapping = {} - for eq_class in self.eq_classes.values(): - variables = [item for item in eq_class if item.startswith("?")] - constants = [item for item in eq_class if not item.startswith("?")] - if len(constants) >= 2: - self.consistent = False - self.mapping = None + # Choose a representative for each equivalence class. Objects are + # prioritized over variables and ints, but at most one object per + # equivalence class is allowed (otherwise the conjunction is + # inconsistent). + representative = {} + for eq_class in self._eq_classes.values(): + if next(iter(eq_class)) in representative: + continue # we already processed this equivalence class + variables = [item for item in eq_class if isinstance(item, int) or + item.startswith("?")] + objects = [item for item in eq_class if not isinstance(item, int) + and not item.startswith("?")] + + if len(objects) >= 2: + self._consistent = False + self._representative = None return - if constants: - set_val = constants[0] + if objects: + set_val = objects[0] else: - set_val = min(variables) + set_val = variables[0] for entry in eq_class: - mapping[entry] = set_val - self.consistent = True - self.mapping = mapping + representative[entry] = set_val + self._consistent = True + self._representative = representative def is_consistent(self): - if self.consistent is None: - self._compute_mapping() - return self.consistent + if self._consistent is None: + self._compute_representatives() + return self._consistent - def get_mapping(self): - if self.consistent is None: - self._compute_mapping() - return self.mapping + def get_representative(self): + if self._consistent is None: + self._compute_representatives() + return self._representative class ConstraintSystem: + """A ConstraintSystem stores two parts, both talking about the equality or + inequality of strings and ints (strings representing objects or + variables, ints representing invariant parameters): + - equality_DNFs is a list containing lists of EqualityConjunctions. + Each EqualityConjunction represents an expression of the form + (x1 = y1 and ... and xn = yn). A list of EqualityConjunctions can be + interpreted as a disjunction of such expressions. So + self.equality_DNFs represents a formula of the form "⋀ ⋁ ⋀ (x = y)" + as a list of lists of EqualityConjunctions. + - ineq_disjunctions is a list of InequalityDisjunctions. Each of them + represents a expression of the form (u1 != v1 or ... or um !=i vm). + - not_constant is a list of strings. + + We say that the system is solvable if we can pick from each list of + EqualityConjunctions in equality_DNFs one EquivalenceConjunction such + that the finest equivalence relation induced by all the equivalences in + the conjunctions is + - consistent, i.e. no equivalence class contains more than one object, + - for every disjunction in ineq_disjunctions there is at least one + inequality such that the two terms are in different equivalence + classes. + - every element of not_constant is not in the same equivalence class + as a constant. + We refer to the equivalence relation as the solution of the system.""" + def __init__(self): - self.combinatorial_assignments = [] - self.neg_clauses = [] + self.equality_DNFs = [] + self.ineq_disjunctions = [] + self.not_constant = [] def __str__(self): - combinatorial_assignments = [] - for comb_assignment in self.combinatorial_assignments: - disj = " or ".join([str(assig) for assig in comb_assignment]) + equality_DNFs = [] + for eq_DNF in self.equality_DNFs: + disj = " or ".join([str(eq_conjunction) + for eq_conjunction in eq_DNF]) disj = "(%s)" % disj - combinatorial_assignments.append(disj) - assigs = " and\n".join(combinatorial_assignments) - - neg_clauses = [str(clause) for clause in self.neg_clauses] - neg_clauses = " and ".join(neg_clauses) - return assigs + "(" + neg_clauses + ")" - - def _all_clauses_satisfiable(self, assignment): - mapping = assignment.get_mapping() - for neg_clause in self.neg_clauses: - clause = neg_clause.apply_mapping(mapping) - if not clause.is_satisfiable(): - return False - return True - - def _combine_assignments(self, assignments): - new_equalities = [] - for a in assignments: - new_equalities.extend(a.equalities) - return Assignment(new_equalities) - - def add_assignment(self, assignment): - self.add_assignment_disjunction([assignment]) - - def add_assignment_disjunction(self, assignments): - self.combinatorial_assignments.append(assignments) - - def add_negative_clause(self, negative_clause): - self.neg_clauses.append(negative_clause) - - def combine(self, other): - """Combines two constraint systems to a new system""" - combined = ConstraintSystem() - combined.combinatorial_assignments = (self.combinatorial_assignments + - other.combinatorial_assignments) - combined.neg_clauses = self.neg_clauses + other.neg_clauses - return combined - - def copy(self): - other = ConstraintSystem() - other.combinatorial_assignments = list(self.combinatorial_assignments) - other.neg_clauses = list(self.neg_clauses) - return other - - def dump(self): - print("AssignmentSystem:") - for comb_assignment in self.combinatorial_assignments: - disj = " or ".join([str(assig) for assig in comb_assignment]) - print(" ASS: ", disj) - for neg_clause in self.neg_clauses: - print(" NEG: ", str(neg_clause)) + equality_DNFs.append(disj) + eq_part = " and\n".join(equality_DNFs) + + ineq_disjunctions = [str(clause) for clause in self.ineq_disjunctions] + ineq_part = " and ".join(ineq_disjunctions) + return f"{eq_part} ({ineq_part}) (not constant {self.not_constant}" + + def _combine_equality_conjunctions(self, eq_conjunctions: + Iterable[EqualityConjunction]) -> None: + all_eq = itertools.chain.from_iterable(c.equalities + for c in eq_conjunctions) + return EqualityConjunction(list(all_eq)) + + def add_equality_conjunction(self, eq_conjunction: EqualityConjunction): + self.add_equality_DNF([eq_conjunction]) + + def add_equality_DNF(self, equality_DNF: List[EqualityConjunction]) -> None: + self.equality_DNFs.append(equality_DNF) + + def add_inequality_disjunction(self, ineq_disj: InequalityDisjunction): + self.ineq_disjunctions.append(ineq_disj) + + def add_not_constant(self, not_constant: str) -> None: + self.not_constant.append(not_constant) + + def extend(self, other: "ConstraintSystem") -> None: + self.equality_DNFs.extend(other.equality_DNFs) + self.ineq_disjunctions.extend(other.ineq_disjunctions) + self.not_constant.extend(other.not_constant) def is_solvable(self): - """Check whether the combinatorial assignments include at least - one consistent assignment under which the negative clauses - are satisfiable""" - for assignments in itertools.product(*self.combinatorial_assignments): - combined = self._combine_assignments(assignments) + # cf. top of class for explanation + def inequality_disjunction_ok(ineq_disj, representative): + for inequality in ineq_disj.parts: + a, b = inequality + if representative.get(a, a) != representative.get(b, b): + return True + return False + + for eq_conjunction in itertools.product(*self.equality_DNFs): + combined = self._combine_equality_conjunctions(eq_conjunction) if not combined.is_consistent(): continue - if self._all_clauses_satisfiable(combined): - return True + # check whether with the finest equivalence relation induced by the + # combined equality conjunction there is no element of not_constant + # in the same equivalence class as a constant and that in each + # inequality disjunction there is an inequality where the two terms + # are in different equivalence classes. + representative = combined.get_representative() + if any(not isinstance(representative.get(s, s), int) and + representative.get(s, s)[0] != "?" + for s in self.not_constant): + continue + if any(not inequality_disjunction_ok(d, representative) + for d in self.ineq_disjunctions): + continue + return True return False diff --git a/src/translate/invariant_finder.py b/src/translate/invariant_finder.py index a8f32ffa59..8aaee3fb99 100755 --- a/src/translate/invariant_finder.py +++ b/src/translate/invariant_finder.py @@ -79,9 +79,12 @@ def get_fluents(task): def get_initial_invariants(task): for predicate in get_fluents(task): all_args = list(range(len(predicate.arguments))) - for omitted_arg in [-1] + all_args: - order = [i for i in all_args if i != omitted_arg] - part = invariants.InvariantPart(predicate.name, order, omitted_arg) + part = invariants.InvariantPart(predicate.name, all_args, None) + yield invariants.Invariant((part,)) + for omitted in range(len(predicate.arguments)): + inv_args = (all_args[0:omitted] + [invariants.COUNTED] + + all_args[omitted:-1]) + part = invariants.InvariantPart(predicate.name, inv_args, omitted) yield invariants.Invariant((part,)) def find_invariants(task, reachable_action_params): @@ -118,7 +121,13 @@ def useful_groups(invariants, initial_facts): if isinstance(atom, pddl.Assign): continue for invariant in predicate_to_invariants.get(atom.predicate, ()): - group_key = (invariant, tuple(invariant.get_parameters(atom))) + parameters = invariant.get_parameters(atom) + # we need to make the parameters dictionary hashable, so + # we store the values as a tuple + parameters_tuple = tuple(parameters[var] + for var in range(invariant.arity())) + + group_key = (invariant, parameters_tuple) if group_key not in nonempty_groups: nonempty_groups.add(group_key) else: diff --git a/src/translate/invariants.py b/src/translate/invariants.py index ed98e4edca..f91eb11a68 100644 --- a/src/translate/invariants.py +++ b/src/translate/invariants.py @@ -8,38 +8,35 @@ # Notes: # All parts of an invariant always use all non-counted variables # -> the arity of all predicates covered by an invariant is either the -# number of the invariant variables or this value + 1 +# number of the invariant parameters or this value + 1 # -# we currently keep the assumption that each predicate occurs at most once -# in every invariant. - -def invert_list(alist): - result = defaultdict(list) - for pos, arg in enumerate(alist): - result[arg].append(pos) - return result +# We only consider invariants where each predicate occurs in at most one part. +COUNTED = -1 def instantiate_factored_mapping(pairs): + """Input pairs is a list [(preimg1, img1), ..., (preimgn, imgn)]. + For entry (preimg, img), preimg is a list of numbers and img a list of + invariant parameters or COUNTED of the same length. All preimages (and + all images) are pairwise disjoint, as well as the components of each + preimage/image. + + The function determines all possible bijections between the union of + preimgs and the union of imgs such that for every entry (preimg, img), + all values from preimg are mapped to values from img. + It yields one permutation after the other, each represented as a list + of pairs (x,y), meaning x is mapped to y. + """ + # for every entry (preimg, img) in pairs, determine all possible bijections + # from preimg to img. part_mappings = [[list(zip(preimg, perm_img)) for perm_img in itertools.permutations(img)] for (preimg, img) in pairs] - return tools.cartesian_product(part_mappings) - - -def find_unique_variables(action, invariant): - # find unique names for invariant variables - params = {p.name for p in action.parameters} - for eff in action.effects: - params.update([p.name for p in eff.parameters]) - inv_vars = [] - counter = itertools.count() - for _ in range(invariant.arity()): - while True: - new_name = "?v%i" % next(counter) - if new_name not in params: - inv_vars.append(new_name) - break - return inv_vars + # all possibilities to pick one bijection for each entry + if not part_mappings: + yield [] + else: + for x in itertools.product(*part_mappings): + yield list(itertools.chain.from_iterable(x)) def get_literals(condition): @@ -54,17 +51,27 @@ def ensure_conjunction_sat(system, *parts): conjunction of all parts is satisfiable. Each part must be an iterator, generator, or an iterable over - literals.""" + literals. + + We add the following constraints for each literal to the system: + + - for (not (= x y)): x != y (as an InequalityDisjunction with one entry + (x,y)), + - for (= x y): x = y + - for predicates that occur with a positive and negative literal, we + consider every combination of a positive one (e.g. P(x, y, z)) and + a negative one (e.g. (not P(a, b, c))) and add a constraint + (x != a or y != b or z != c).""" pos = defaultdict(set) neg = defaultdict(set) for literal in itertools.chain(*parts): if literal.predicate == "=": # use (in)equalities in conditions if literal.negated: - n = constraints.NegativeClause([literal.args]) - system.add_negative_clause(n) + d = constraints.InequalityDisjunction([literal.args]) + system.add_inequality_disjunction(d) else: - a = constraints.Assignment([literal.args]) - system.add_assignment_disjunction([a]) + a = constraints.EqualityConjunction([literal.args]) + system.add_equality_DNF([a]) else: if literal.negated: neg[literal.predicate].add(literal) @@ -77,125 +84,192 @@ def ensure_conjunction_sat(system, *parts): for negatom in neg[pred]: parts = list(zip(negatom.args, posatom.args)) if parts: - negative_clause = constraints.NegativeClause(parts) - system.add_negative_clause(negative_clause) + ineq_disj = constraints.InequalityDisjunction(parts) + system.add_inequality_disjunction(ineq_disj) -def ensure_cover(system, literal, invariant, inv_vars): - """Modifies the constraint system such that it is only solvable if the - invariant covers the literal""" - a = invariant.get_covering_assignments(inv_vars, literal) - assert len(a) == 1 - # if invariants could contain several parts of one predicate, this would - # not be true but the depending code in parts relies on this assumption - system.add_assignment_disjunction(a) +def ensure_cover(system, literal, invariant): + """Modifies the constraint system such that in every solution the invariant + covers the literal (= invariant parameters are equivalent to the + corresponding argument in the literal).""" + cover = invariant._get_cover_equivalence_conjunction(literal) + system.add_equality_DNF([cover]) def ensure_inequality(system, literal1, literal2): """Modifies the constraint system such that it is only solvable if the literal instantiations are not equal (ignoring whether one is negated and - the other is not)""" - if (literal1.predicate == literal2.predicate and - literal1.args): + the other is not). + + If the literals have different predicates, there is nothing to do. + Otherwise we add for P(x, y, z) and P(a, b, c) a contraint + (x != a or y != b or z != c).""" + if (literal1.predicate == literal2.predicate and literal1.args): parts = list(zip(literal1.args, literal2.args)) - system.add_negative_clause(constraints.NegativeClause(parts)) + system.add_inequality_disjunction(constraints.InequalityDisjunction(parts)) class InvariantPart: - def __init__(self, predicate, order, omitted_pos=-1): + def __init__(self, predicate, args, omitted_pos=None): + """There is one InvariantPart for every predicate mentioned in the + invariant. The arguments args contain numbers 0,1,... for the + invariant parameters and COUNTED at the omitted position. + If no position is omitted, omitted_pos is None, otherwise it is the + index of COUNTED in args.""" self.predicate = predicate - self.order = order + self.args = tuple(args) self.omitted_pos = omitted_pos def __eq__(self, other): # This implies equality of the omitted_pos component. - return self.predicate == other.predicate and self.order == other.order + return self.predicate == other.predicate and self.args == other.args def __ne__(self, other): - return self.predicate != other.predicate or self.order != other.order + return self.predicate != other.predicate or self.args != other.args def __le__(self, other): - return self.predicate <= other.predicate or self.order <= other.order + return (self.predicate, self.args) <= (other.predicate, other.args) def __lt__(self, other): - return self.predicate < other.predicate or self.order < other.order + return (self.predicate, self.args) < (other.predicate, other.args) def __hash__(self): - return hash((self.predicate, tuple(self.order))) + return hash((self.predicate, self.args)) def __str__(self): - var_string = " ".join(map(str, self.order)) - omitted_string = "" - if self.omitted_pos != -1: - omitted_string = " [%d]" % self.omitted_pos - return "%s %s%s" % (self.predicate, var_string, omitted_string) + return f"{self.predicate}({self.args}) [omitted_pos = {self.omitted_pos}]" def arity(self): - return len(self.order) - - def get_assignment(self, parameters, literal): - equalities = [(arg, literal.args[argpos]) - for arg, argpos in zip(parameters, self.order)] - return constraints.Assignment(equalities) + if self.omitted_pos is None: + return len(self.args) + else: + return len(self.args) - 1 def get_parameters(self, literal): - return [literal.args[pos] for pos in self.order] - - def instantiate(self, parameters): - args = ["?X"] * (len(self.order) + (self.omitted_pos != -1)) - for arg, argpos in zip(parameters, self.order): - args[argpos] = arg + """Returns a dictionary, mapping the invariant parameters to the + corresponding values in the literal.""" + return dict((arg, literal.args[pos]) + for pos, arg in enumerate(self.args) + if pos != self.omitted_pos) + + def instantiate(self, parameters_tuple): + args = [parameters_tuple[arg] if arg != COUNTED else "?X" + for arg in self.args] return pddl.Atom(self.predicate, args) def possible_mappings(self, own_literal, other_literal): - allowed_omissions = len(other_literal.args) - len(self.order) + """This method is used when an action had an unbalanced add effect + own_literal. The action has a delete effect on literal + other_literal, so we try to refine the invariant such that it also + covers the delete effect. + + From own_literal, we can determine a variable or object for every + invariant parameter, where potentially several invariant parameters + can have the same value. + + From the arguments of other_literal, we determine all possibilities + how we can use the invariant parameters as arguments of + other_literal so that the values match (possibly covering one + parameter with a placeholder/counted variable). Since there also can + be duplicates in the argumets of other_literal, we cannot operate on + the arguments directly, but instead operate on the positions. + + The method returns [] if there is no possible mapping and otherwise + yields the mappings from the positions of other to the invariant + variables or COUNTED one by one. + """ + allowed_omissions = len(other_literal.args) - self.arity() + # All parts of an invariant always use all non-counted variables, of + # which we have arity many. So we must omit allowed_omissions many + # arguments of other_literal when matching invariant parameters with + # arguments. if allowed_omissions not in (0, 1): + # There may be at most one counted variable. return [] own_parameters = self.get_parameters(own_literal) - arg_to_ordered_pos = invert_list(own_parameters) - other_arg_to_pos = invert_list(other_literal.args) + # own_parameters is a dictionary mapping the invariant parameters to + # the corresponding parameter of own_literal + ownarg_to_invariant_parameters = defaultdict(list) + for x, y in own_parameters.items(): + ownarg_to_invariant_parameters[y].append(x) + + # other_arg_to_pos maps every argument of other_literal to the + # lists of positions in which it is occuring in other_literal, e.g. + # for P(?a, ?b, ?a), other_arg_to_pos["?a"] = [0, 2]. + other_arg_to_pos = defaultdict(list) + for pos, arg in enumerate(other_literal.args): + other_arg_to_pos[arg].append(pos) + factored_mapping = [] + # We iterate over all values occuring as arguments in other_literal + # and compare the number of occurrences in other_literal to those in + # own_literal. If the difference of these numbers allows us to cover + # other_literal with the (still) permitted number of counted variables, + # we store the correspondance of all argument positions of + # other_literal for this value to the invariant parameters at these + # positions in factored_mapping. If all values can be covered, we + # instatiate the complete factored_mapping, computing all possibilities + # to map positions from other_literal to invariant parameters (or + # COUNTED if the position is omitted). for key, other_positions in other_arg_to_pos.items(): - own_positions = arg_to_ordered_pos.get(key, []) - len_diff = len(own_positions) - len(other_positions) + inv_params = ownarg_to_invariant_parameters[key] + # all positions at which key occurs as argument in own_literal + len_diff = len(inv_params) - len(other_positions) if len_diff >= 1 or len_diff <= -2 or len_diff == -1 and not allowed_omissions: + # mapping of the literals is not possible with at most one + # counted variable. return [] if len_diff: - own_positions.append(-1) + inv_params.append(COUNTED) allowed_omissions = 0 - factored_mapping.append((other_positions, own_positions)) + factored_mapping.append((other_positions, inv_params)) return instantiate_factored_mapping(factored_mapping) def possible_matches(self, own_literal, other_literal): + """This method is used when an action had an unbalanced add effect + on own_literal. The action has a delete effect on literal + other_literal, so we try to refine the invariant such that it also + covers the delete effect. + + For this purpose, we consider all possible mappings from the + parameter positions of other_literal to the parameter positions of + own_literal such that the extended invariant can use other_literal + to balance own_literal. From these position mapping, we can extract + the new invariant part. + + Consider for an example of the "self" InvariantPart "forall ?@v0, + ?@v1, ?@v2 P(?@v0, ?@v1, ?@v2) is non-increasing" and let + own_literal be P(?a, ?b, ?c) and other_literal be Q(?b, ?c, ?d, ?a). + The only possible mapping from positions of Q to invariant variables + of P (or COUNTED) is [0->?@v1, 1->?@v2, 2->COUNTED, 3->?@v0] for + which we create a new Invariant Part Q(?@v1, ?@v2, _. ?@v0) with the + third argument being counted. + """ assert self.predicate == own_literal.predicate - result = [] for mapping in self.possible_mappings(own_literal, other_literal): - new_order = [None] * len(self.order) - omitted = -1 - for (key, value) in mapping: - if value == -1: - omitted = key + args = [COUNTED] * len(other_literal.args) + omitted = None + for (other_pos, inv_var) in mapping: + if inv_var == COUNTED: + omitted = other_pos else: - new_order[value] = key - result.append(InvariantPart(other_literal.predicate, new_order, omitted)) - return result - - def matches(self, other, own_literal, other_literal): - return self.get_parameters(own_literal) == other.get_parameters(other_literal) + args[other_pos] = inv_var + yield InvariantPart(other_literal.predicate, args, omitted) class Invariant: # An invariant is a logical expression of the type - # forall V1...Vk: sum_(part in parts) weight(part, V1, ..., Vk) <= 1. + # forall ?@v1...?@vk: sum_(part in parts) weight(part, ?@v1, ..., ?@vk) <= 1. # k is called the arity of the invariant. - # A "part" is a symbolic fact only variable symbols in {V1, ..., Vk, X}; - # the symbol X may occur at most once. + # A "part" is an atom that only contains arguments from {?@v1, ..., ?@vk, + # COUNTED} but instead of ?@vi, we store it as int i; COUNTED may occur at + # most once. def __init__(self, parts): self.parts = frozenset(parts) - self.predicates = {part.predicate for part in parts} self.predicate_to_part = {part.predicate: part for part in parts} + self.predicates = set(self.predicate_to_part.keys()) assert len(self.parts) == len(self.predicates) def __eq__(self, other): @@ -214,7 +288,7 @@ def __hash__(self): return hash(self.parts) def __str__(self): - return "{%s}" % ", ".join(str(part) for part in self.parts) + return "{%s}" % ", ".join(sorted(str(part) for part in self.parts)) def __repr__(self): return '' % self @@ -228,11 +302,25 @@ def get_parameters(self, atom): def instantiate(self, parameters): return [part.instantiate(parameters) for part in self.parts] - def get_covering_assignments(self, parameters, atom): - part = self.predicate_to_part[atom.predicate] - return [part.get_assignment(parameters, atom)] - # if there were more parts for the same predicate the list - # contained more than one element + def _get_cover_equivalence_conjunction(self, literal): + """This is only called for atoms with a predicate for which the + invariant has a part. It returns an equivalence conjunction that + requires every invariant parameter to be equal to the corresponding + argument of the given literal. For the result, we do not consider + whether the literal is negated. + + Example: If the literal is P(?a, ?b, ?c), the invariant part for P + is P(?@v0, _, ?@v1) then the method returns the constraint (?@v0 = ?a + and ?@v1 = ?c). + """ + part = self.predicate_to_part[literal.predicate] + equalities = [(arg, literal.args[pos]) + for pos, arg in enumerate(part.args) + if arg != COUNTED] + return constraints.EqualityConjunction(equalities) + # If there were more parts for the same predicate, we would have to + # consider more than one assignment (disjunctively). + # We assert earlier that this is not the case. def check_balance(self, balance_checker, enqueue_func): # Check balance for this hypothesis. @@ -241,17 +329,16 @@ def check_balance(self, balance_checker, enqueue_func): actions_to_check |= balance_checker.get_threats(part.predicate) for action in actions_to_check: heavy_action = balance_checker.get_heavy_action(action) - if self.operator_too_heavy(heavy_action): + if self._operator_too_heavy(heavy_action): return False - if self.operator_unbalanced(action, enqueue_func): + if self._operator_unbalanced(action, enqueue_func): return False return True - def operator_too_heavy(self, h_action): + def _operator_too_heavy(self, h_action): add_effects = [eff for eff in h_action.effects if not eff.literal.negated and self.predicate_to_part.get(eff.literal.predicate)] - inv_vars = find_unique_variables(h_action, self) if len(add_effects) <= 1: return False @@ -259,8 +346,8 @@ def operator_too_heavy(self, h_action): for eff1, eff2 in itertools.combinations(add_effects, 2): system = constraints.ConstraintSystem() ensure_inequality(system, eff1.literal, eff2.literal) - ensure_cover(system, eff1.literal, self, inv_vars) - ensure_cover(system, eff2.literal, self, inv_vars) + ensure_cover(system, eff1.literal, self) + ensure_cover(system, eff2.literal, self) ensure_conjunction_sat(system, get_literals(h_action.precondition), get_literals(eff1.condition), get_literals(eff2.condition), @@ -270,8 +357,7 @@ def operator_too_heavy(self, h_action): return True return False - def operator_unbalanced(self, action, enqueue_func): - inv_vars = find_unique_variables(action, self) + def _operator_unbalanced(self, action, enqueue_func): relevant_effs = [eff for eff in action.effects if self.predicate_to_part.get(eff.literal.predicate)] add_effects = [eff for eff in relevant_effs @@ -279,59 +365,80 @@ def operator_unbalanced(self, action, enqueue_func): del_effects = [eff for eff in relevant_effs if eff.literal.negated] for eff in add_effects: - if self.add_effect_unbalanced(action, eff, del_effects, inv_vars, - enqueue_func): + if self._add_effect_unbalanced(action, eff, del_effects, + enqueue_func): return True return False - def minimal_covering_renamings(self, action, add_effect, inv_vars): - """computes the minimal renamings of the action parameters such - that the add effect is covered by the action. - Each renaming is an constraint system""" - - # add_effect must be covered - assigs = self.get_covering_assignments(inv_vars, add_effect.literal) - - # renaming of operator parameters must be minimal - minimal_renamings = [] - params = [p.name for p in action.parameters] - for assignment in assigs: - system = constraints.ConstraintSystem() - system.add_assignment(assignment) - mapping = assignment.get_mapping() - if len(params) > 1: - for (n1, n2) in itertools.combinations(params, 2): - if mapping.get(n1, n1) != mapping.get(n2, n2): - negative_clause = constraints.NegativeClause([(n1, n2)]) - system.add_negative_clause(negative_clause) - minimal_renamings.append(system) - return minimal_renamings - - def add_effect_unbalanced(self, action, add_effect, del_effects, - inv_vars, enqueue_func): - - minimal_renamings = self.minimal_covering_renamings(action, add_effect, - inv_vars) - - lhs_by_pred = defaultdict(list) + def _add_effect_unbalanced(self, action, add_effect, del_effects, + enqueue_func): + # We build for every delete effect that is possibly covered by this + # invariant a constraint system that will be solvable if the delete + # effect balances the add effect. Large parts of the constraint system + # are independent of the delete effect, so we precompute them first. + + # Dictionary add_effect_produced_by_pred describes what must be true so + # that the action is applicable and produces the add effect. It is + # stored as a map from predicate names to literals (overall + # representing a conjunction of these literals). + add_effect_produced_by_pred = defaultdict(list) for lit in itertools.chain(get_literals(action.precondition), get_literals(add_effect.condition), get_literals(add_effect.literal.negate())): - lhs_by_pred[lit.predicate].append(lit) + add_effect_produced_by_pred[lit.predicate].append(lit) + + # add_cover is an equality conjunction that sets each invariant + # parameter equal to its value in add_effect.literal. + add_cover = self._get_cover_equivalence_conjunction(add_effect.literal) + + # add_cover can imply equivalences between variables (and with + # constants). For example if the invariant part is P(_ ?@v0 ?@v1 ?@v2) + # and the add effect is P(?x ?y ?y a) then we would know that the + # invariant part is only threatened by the add effect if the first two + # invariant parameters are equal and the third parameter is a. + + # The add effect must be balanced in all threatening action + # applications. We thus must adapt the constraint system such that it + # prevents restricting solution that set action parameters or + # quantified variables of the add effect equal to each other or to + # a specific constant if this is not already implied by the threat. + params = [p.name for p in itertools.chain(action.parameters, + add_effect.parameters)] + param_system = constraints.ConstraintSystem() + representative = add_cover.get_representative() + # Dictionary representative maps every term to its representative in + # the finest equivalence relation induced by the equalities in + # add_cover. If the equivalence class contains an object, the + # representative is an object. + for param in params: + r = representative.get(param, param) + if isinstance(r, int) or r[0] == "?": + # for the add effect being a threat to the invariant, param + # does not need to be a specific constant. So we may not bind + # it to a constant when balancing the add effect. We store this + # information here. + param_system.add_not_constant(param) + for (n1, n2) in itertools.combinations(params, 2): + if representative.get(n1, n1) != representative.get(n2, n2): + # n1 and n2 don't have to be equivalent to cover the add + # effect, so we require for the solutions that they do not + # make n1 and n2 equvalent. + ineq_disj = constraints.InequalityDisjunction([(n1, n2)]) + param_system.add_inequality_disjunction(ineq_disj) for del_effect in del_effects: - minimal_renamings = self.unbalanced_renamings( - del_effect, add_effect, inv_vars, lhs_by_pred, minimal_renamings) - if not minimal_renamings: + if self._balances(del_effect, add_effect, + add_effect_produced_by_pred, add_cover, + param_system): return False - # Otherwise, the balance check fails => Generate new candidates. - self.refine_candidate(add_effect, action, enqueue_func) + # The balance check failed => Generate new candidates. + self._refine_candidate(add_effect, action, enqueue_func) return True - def refine_candidate(self, add_effect, action, enqueue_func): - """refines the candidate for an add effect that is unbalanced in the - action and adds the refined one to the queue""" + def _refine_candidate(self, add_effect, action, enqueue_func): + """Refines the candidate for an add effect that is unbalanced in the + action and adds the refined one to the queue.""" part = self.predicate_to_part[add_effect.literal.predicate] for del_eff in [eff for eff in action.effects if eff.literal.negated]: if del_eff.literal.predicate not in self.predicate_to_part: @@ -339,75 +446,79 @@ def refine_candidate(self, add_effect, action, enqueue_func): del_eff.literal): enqueue_func(Invariant(self.parts.union((match,)))) - def unbalanced_renamings(self, del_effect, add_effect, inv_vars, - lhs_by_pred, unbalanced_renamings): - """returns the renamings from unbalanced renamings for which - the del_effect does not balance the add_effect.""" + def _balances(self, del_effect, add_effect, produced_by_pred, + add_cover, param_system): + """Returns whether the del_effect is guaranteed to balance the add effect + where the input is such that: + - produced_by_pred must be true for the add_effect to be produced, + - add_cover is an equality conjunction that sets each invariant + parameter equal to its value in add_effect. These equivalences + must be true for the add effect threatening the invariant. + - param_system contains contraints that action and add_effect + parameters are not fixed to be equivalent or a certain constant + (except the add effect is otherwise not threat).""" + + balance_system = self._balance_system(add_effect, del_effect, + produced_by_pred) + if not balance_system: + # it is impossible to guarantee that every production by add_effect + # implies a consumption by del effect. + return False + # We will overall build a system that is solvable if the delete effect + # is guaranteed to balance the add effect for this invariant. system = constraints.ConstraintSystem() - ensure_cover(system, del_effect.literal, self, inv_vars) - - # Since we may only rename the quantified variables of the delete effect - # we need to check that "renamings" of constants are already implied by - # the unbalanced_renaming (of the of the operator parameters). The - # following system is used as a helper for this. It builds a conjunction - # that formulates that the constants are NOT renamed accordingly. We - # below check that this is impossible with each unbalanced renaming. - check_constants = False - constant_test_system = constraints.ConstraintSystem() - for a, b in system.combinatorial_assignments[0][0].equalities: - # first 0 because the system was empty before we called ensure_cover - # second 0 because ensure_cover only adds assignments with one entry - if b[0] != "?": - check_constants = True - neg_clause = constraints.NegativeClause([(a, b)]) - constant_test_system.add_negative_clause(neg_clause) + system.add_equality_conjunction(add_cover) + # In every solution, the invariant parameters must equal the + # corresponding arguments of the add effect atom. - ensure_inequality(system, add_effect.literal, del_effect.literal) + ensure_cover(system, del_effect.literal, self) + # In every solution, the invariant parameters must equal the + # corresponding arguments of the delete effect atom. - still_unbalanced = [] - for renaming in unbalanced_renamings: - if check_constants: - new_sys = constant_test_system.combine(renaming) - if new_sys.is_solvable(): - # it is possible that the operator arguments are not - # mapped to constants as required for covering the delete - # effect - still_unbalanced.append(renaming) - continue - - new_sys = system.combine(renaming) - if self.lhs_satisfiable(renaming, lhs_by_pred): - implies_system = self.imply_del_effect(del_effect, lhs_by_pred) - if not implies_system: - still_unbalanced.append(renaming) - continue - new_sys = new_sys.combine(implies_system) - if not new_sys.is_solvable(): - still_unbalanced.append(renaming) - return still_unbalanced - - def lhs_satisfiable(self, renaming, lhs_by_pred): - system = renaming.copy() - ensure_conjunction_sat(system, *itertools.chain(lhs_by_pred.values())) - return system.is_solvable() - - def imply_del_effect(self, del_effect, lhs_by_pred): - """returns a constraint system that is solvable if lhs implies - the del effect (only if lhs is satisfiable). If a solvable - lhs never implies the del effect, return None.""" - # del_effect.cond and del_effect.atom must be implied by lhs - implies_system = constraints.ConstraintSystem() + system.extend(balance_system) + # In every solution a production by the add effect guarantees + # a consumption by the delete effect. + + system.extend(param_system) + # A solution may not restrict action parameters (must be balanced + # independent of the concrete action instantiation). + + if not system.is_solvable(): + return False + return True + + def _balance_system(self, add_effect, del_effect, literals_by_pred): + """Returns a constraint system that is solvable if + - the conjunction of literals occurring as values in dictionary + literals_by_pred (characterizing a threat for the invariant + through an actual production by add_effect) implies the + consumption of the atom of the delete effect, and + - the produced and consumed atom are different (otherwise by + add-after-delete semantics, the delete effect would not balance + the add effect). + + We return None if we detect that the constraint system would never + be solvable (by an incomplete cheap test). + """ + system = constraints.ConstraintSystem() for literal in itertools.chain(get_literals(del_effect.condition), [del_effect.literal.negate()]): - poss_assignments = [] - for match in lhs_by_pred[literal.predicate]: - if match.negated != literal.negated: - continue - else: - a = constraints.Assignment(list(zip(literal.args, match.args))) - poss_assignments.append(a) - if not poss_assignments: + possibilities = [] + # possible equality conjunctions that establish that the literals + # in literals_by_pred logically imply the current literal. + for match in literals_by_pred[literal.predicate]: + if match.negated == literal.negated: + # match implies literal iff they agree on each argument + ec = constraints.EqualityConjunction(list(zip(literal.args, + match.args))) + possibilities.append(ec) + if not possibilities: return None - implies_system.add_assignment_disjunction(poss_assignments) - return implies_system + system.add_equality_DNF(possibilities) + + # if the add effect and the delete effect affect the same predicate + # then their arguments must differ in at least one position (because of + # the add-after-delete semantics). + ensure_inequality(system, add_effect.literal, del_effect.literal) + return system diff --git a/src/translate/tools.py b/src/translate/tools.py index ad244b8e99..12ac84a9b0 100644 --- a/src/translate/tools.py +++ b/src/translate/tools.py @@ -1,22 +1,3 @@ -def cartesian_product(sequences): - # TODO: Rename this. It's not good that we have two functions - # called "product" and "cartesian_product", of which "product" - # computes cartesian products, while "cartesian_product" does not. - - # This isn't actually a proper cartesian product because we - # concatenate lists, rather than forming sequences of atomic elements. - # We could probably also use something like - # map(itertools.chain, product(*sequences)) - # but that does not produce the same results - if not sequences: - yield [] - else: - temp = list(cartesian_product(sequences[1:])) - for item in sequences[0]: - for sequence in temp: - yield item + sequence - - def get_peak_memory_in_kb(): try: # This will only work on Linux systems. From db426cd8de43b603c4dfb79f4acf157da55dc71f Mon Sep 17 00:00:00 2001 From: Gabi Roeger Date: Thu, 1 Feb 2024 16:11:11 +0100 Subject: [PATCH 2/5] [issue913] Handle actions with uninitialized numeric expressions in instantiation. If an action increases total-cost by the value of a numeric fluent that has not been defined in the initial state, the action should not be instantiated. We adapted the relaxed reachability analysis of the grounding component to prevent such instantiations from the beginning (instead of failing with an AssertionError). --------- Co-authored-by: remochristen --- src/translate/normalize.py | 39 +++++++++++++++++++++++++++++++-- src/translate/pddl_to_prolog.py | 4 ++++ 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/src/translate/normalize.py b/src/translate/normalize.py index 375dc67e9c..4b5df01dda 100755 --- a/src/translate/normalize.py +++ b/src/translate/normalize.py @@ -1,6 +1,7 @@ #! /usr/bin/env python3 import copy +from typing import Sequence import pddl @@ -23,7 +24,19 @@ def delete_owner(self, task): def build_rules(self, rules): action = self.owner rule_head = get_action_predicate(action) - rule_body = condition_to_rule_body(action.parameters, self.condition) + + # If the action cost is based on a primitive numeric expression, + # we need to require that it has a value defined in the initial state. + # We hand it over to condition_to_rule_body to include this in the rule + # body. + pne = None + if (isinstance(action.cost, pddl.Increase) and + isinstance(action.cost.expression, + pddl.PrimitiveNumericExpression)): + pne = action.cost.expression + + rule_body = condition_to_rule_body(action.parameters, self.condition, + pne) rules.append((rule_body, rule_head)) def get_type_map(self): return self.owner.type_map @@ -117,6 +130,9 @@ def get_axiom_predicate(axiom): variables += [par.name for par in axiom.condition.parameters] return pddl.Atom(name, variables) +def get_pne_definition_predicate(pne: pddl.PrimitiveNumericExpression): + return pddl.Atom(f"@def-{pne.symbol}", pne.args) + def all_conditions(task): for action in task.actions: yield PreconditionProxy(action) @@ -366,10 +382,24 @@ def build_exploration_rules(task): proxy.build_rules(result) return result -def condition_to_rule_body(parameters, condition): +def condition_to_rule_body(parameters: Sequence[pddl.TypedObject], + condition: pddl.conditions.Condition, + pne: pddl.PrimitiveNumericExpression = None): + """The rule body requires that + - all parameters (including existentially quantified variables in the + condition) are instantiated with objecst of the right type, + - all positive atoms in the condition (which must be normalized) are + true in the Prolog model, and + - the primitive numeric expression (from the action cost) has a defined + value (in the initial state).""" result = [] + # Require parameters to be instantiated with objects of the right type. for par in parameters: result.append(par.get_atom()) + + # Require each positive literal in the condition to be reached and + # existentially quantified variables of the condition to be instantiated + # with objects of the right type. if not isinstance(condition, pddl.Truth): if isinstance(condition, pddl.ExistentialCondition): for par in condition.parameters: @@ -388,6 +418,11 @@ def condition_to_rule_body(parameters, condition): assert isinstance(part, pddl.Literal), "Condition not normalized: %r" % part if not part.negated: result.append(part) + + # Require the primitive numeric expression (from the action cost) to be + # defined. + if pne is not None: + result.append(get_pne_definition_predicate(pne)) return result if __name__ == "__main__": diff --git a/src/translate/pddl_to_prolog.py b/src/translate/pddl_to_prolog.py index fee70f7c3b..7950451bec 100755 --- a/src/translate/pddl_to_prolog.py +++ b/src/translate/pddl_to_prolog.py @@ -155,6 +155,10 @@ def translate_facts(prog, task): assert isinstance(fact, pddl.Atom) or isinstance(fact, pddl.Assign) if isinstance(fact, pddl.Atom): prog.add_fact(fact) + else: + # Add a fact to indicate that the primitive numeric expression in + # fact.fluent has been defined. + prog.add_fact(normalize.get_pne_definition_predicate(fact.fluent)) def translate(task): # Note: The function requires that the task has been normalized. From 57f34f186e41795179b4d9d96b38e8dd0407fcb0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Clemens=20B=C3=BCchner?= Date: Wed, 7 Feb 2024 13:54:13 +0100 Subject: [PATCH 3/5] [trivial] Remove unused function from landmark factories. --- src/search/landmarks/landmark_factory.h | 7 ------- src/search/landmarks/landmark_factory_h_m.cc | 4 ---- src/search/landmarks/landmark_factory_h_m.h | 1 - src/search/landmarks/landmark_factory_merged.cc | 9 --------- src/search/landmarks/landmark_factory_merged.h | 1 - .../landmarks/landmark_factory_reasonable_orders_hps.cc | 4 ---- .../landmarks/landmark_factory_reasonable_orders_hps.h | 1 - src/search/landmarks/landmark_factory_rpg_exhaust.cc | 4 ---- src/search/landmarks/landmark_factory_rpg_exhaust.h | 1 - src/search/landmarks/landmark_factory_rpg_sasp.cc | 4 ---- src/search/landmarks/landmark_factory_rpg_sasp.h | 1 - src/search/landmarks/landmark_factory_zhu_givan.cc | 4 ---- src/search/landmarks/landmark_factory_zhu_givan.h | 1 - 13 files changed, 42 deletions(-) diff --git a/src/search/landmarks/landmark_factory.h b/src/search/landmarks/landmark_factory.h index a0b78fbadc..2258f6bc61 100644 --- a/src/search/landmarks/landmark_factory.h +++ b/src/search/landmarks/landmark_factory.h @@ -32,13 +32,6 @@ class LandmarkFactory { std::shared_ptr compute_lm_graph(const std::shared_ptr &task); - /* - TODO: Currently reasonable orders are not supported for admissible landmark count - heuristics, which is why the heuristic needs to know whether the factory computes - reasonable orders. Once issue383 is dealt with we should be able to use reasonable - orders for admissible heuristics and this method can be removed. - */ - virtual bool computes_reasonable_orders() const = 0; virtual bool supports_conditional_effects() const = 0; bool achievers_are_calculated() const { diff --git a/src/search/landmarks/landmark_factory_h_m.cc b/src/search/landmarks/landmark_factory_h_m.cc index f6e92362f4..2894b41a30 100644 --- a/src/search/landmarks/landmark_factory_h_m.cc +++ b/src/search/landmarks/landmark_factory_h_m.cc @@ -1008,10 +1008,6 @@ void LandmarkFactoryHM::generate_landmarks( postprocess(task_proxy); } -bool LandmarkFactoryHM::computes_reasonable_orders() const { - return false; -} - bool LandmarkFactoryHM::supports_conditional_effects() const { return false; } diff --git a/src/search/landmarks/landmark_factory_h_m.h b/src/search/landmarks/landmark_factory_h_m.h index 7d6f0737fd..d8a411faab 100644 --- a/src/search/landmarks/landmark_factory_h_m.h +++ b/src/search/landmarks/landmark_factory_h_m.h @@ -140,7 +140,6 @@ class LandmarkFactoryHM : public LandmarkFactory { public: explicit LandmarkFactoryHM(const plugins::Options &opts); - virtual bool computes_reasonable_orders() const override; virtual bool supports_conditional_effects() const override; }; } diff --git a/src/search/landmarks/landmark_factory_merged.cc b/src/search/landmarks/landmark_factory_merged.cc index 61e6bb60a1..f67fb9a3b2 100644 --- a/src/search/landmarks/landmark_factory_merged.cc +++ b/src/search/landmarks/landmark_factory_merged.cc @@ -132,15 +132,6 @@ void LandmarkFactoryMerged::postprocess() { lm_graph->set_landmark_ids(); } -bool LandmarkFactoryMerged::computes_reasonable_orders() const { - for (const shared_ptr &lm_factory : lm_factories) { - if (lm_factory->computes_reasonable_orders()) { - return true; - } - } - return false; -} - bool LandmarkFactoryMerged::supports_conditional_effects() const { for (const shared_ptr &lm_factory : lm_factories) { if (!lm_factory->supports_conditional_effects()) { diff --git a/src/search/landmarks/landmark_factory_merged.h b/src/search/landmarks/landmark_factory_merged.h index 9d332f91c2..06556d9e1d 100644 --- a/src/search/landmarks/landmark_factory_merged.h +++ b/src/search/landmarks/landmark_factory_merged.h @@ -15,7 +15,6 @@ class LandmarkFactoryMerged : public LandmarkFactory { public: explicit LandmarkFactoryMerged(const plugins::Options &opts); - virtual bool computes_reasonable_orders() const override; virtual bool supports_conditional_effects() const override; }; } diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc index 9d6a16baff..3469c0c1a1 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.cc @@ -349,10 +349,6 @@ bool LandmarkFactoryReasonableOrdersHPS::effect_always_happens( return eff.empty(); } -bool LandmarkFactoryReasonableOrdersHPS::computes_reasonable_orders() const { - return true; -} - bool LandmarkFactoryReasonableOrdersHPS::supports_conditional_effects() const { return lm_factory->supports_conditional_effects(); } diff --git a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h index 91a7f2f991..01758afd49 100644 --- a/src/search/landmarks/landmark_factory_reasonable_orders_hps.h +++ b/src/search/landmarks/landmark_factory_reasonable_orders_hps.h @@ -21,7 +21,6 @@ class LandmarkFactoryReasonableOrdersHPS : public LandmarkFactory { public: LandmarkFactoryReasonableOrdersHPS(const plugins::Options &opts); - virtual bool computes_reasonable_orders() const override; virtual bool supports_conditional_effects() const override; }; } diff --git a/src/search/landmarks/landmark_factory_rpg_exhaust.cc b/src/search/landmarks/landmark_factory_rpg_exhaust.cc index a67a91d2f1..51e5d79579 100644 --- a/src/search/landmarks/landmark_factory_rpg_exhaust.cc +++ b/src/search/landmarks/landmark_factory_rpg_exhaust.cc @@ -53,10 +53,6 @@ void LandmarkFactoryRpgExhaust::generate_relaxed_landmarks( } } -bool LandmarkFactoryRpgExhaust::computes_reasonable_orders() const { - return false; -} - bool LandmarkFactoryRpgExhaust::supports_conditional_effects() const { return false; } diff --git a/src/search/landmarks/landmark_factory_rpg_exhaust.h b/src/search/landmarks/landmark_factory_rpg_exhaust.h index 027432ea11..7107893565 100644 --- a/src/search/landmarks/landmark_factory_rpg_exhaust.h +++ b/src/search/landmarks/landmark_factory_rpg_exhaust.h @@ -12,7 +12,6 @@ class LandmarkFactoryRpgExhaust : public LandmarkFactoryRelaxation { public: explicit LandmarkFactoryRpgExhaust(const plugins::Options &opts); - virtual bool computes_reasonable_orders() const override; virtual bool supports_conditional_effects() const override; }; } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.cc b/src/search/landmarks/landmark_factory_rpg_sasp.cc index af70a805ba..97a5e6b7da 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.cc +++ b/src/search/landmarks/landmark_factory_rpg_sasp.cc @@ -627,10 +627,6 @@ void LandmarkFactoryRpgSasp::discard_disjunctive_landmarks() { } } -bool LandmarkFactoryRpgSasp::computes_reasonable_orders() const { - return false; -} - bool LandmarkFactoryRpgSasp::supports_conditional_effects() const { return true; } diff --git a/src/search/landmarks/landmark_factory_rpg_sasp.h b/src/search/landmarks/landmark_factory_rpg_sasp.h index 1489743dc7..9922974fd7 100644 --- a/src/search/landmarks/landmark_factory_rpg_sasp.h +++ b/src/search/landmarks/landmark_factory_rpg_sasp.h @@ -64,7 +64,6 @@ class LandmarkFactoryRpgSasp : public LandmarkFactoryRelaxation { public: explicit LandmarkFactoryRpgSasp(const plugins::Options &opts); - virtual bool computes_reasonable_orders() const override; virtual bool supports_conditional_effects() const override; }; } diff --git a/src/search/landmarks/landmark_factory_zhu_givan.cc b/src/search/landmarks/landmark_factory_zhu_givan.cc index 111cd54e85..349fac8954 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.cc +++ b/src/search/landmarks/landmark_factory_zhu_givan.cc @@ -300,10 +300,6 @@ void LandmarkFactoryZhuGivan::add_operator_to_triggers(const OperatorProxy &op) triggers[lm.var][lm.value].push_back(op_or_axiom_id); } -bool LandmarkFactoryZhuGivan::computes_reasonable_orders() const { - return false; -} - bool LandmarkFactoryZhuGivan::supports_conditional_effects() const { return true; } diff --git a/src/search/landmarks/landmark_factory_zhu_givan.h b/src/search/landmarks/landmark_factory_zhu_givan.h index 1b49f2cf0f..dbc1569a3f 100644 --- a/src/search/landmarks/landmark_factory_zhu_givan.h +++ b/src/search/landmarks/landmark_factory_zhu_givan.h @@ -77,7 +77,6 @@ class LandmarkFactoryZhuGivan : public LandmarkFactoryRelaxation { public: explicit LandmarkFactoryZhuGivan(const plugins::Options &opts); - virtual bool computes_reasonable_orders() const override; virtual bool supports_conditional_effects() const override; }; } From 36e703140785ffe100d14ab27b099aa76ed7a3a4 Mon Sep 17 00:00:00 2001 From: Gabi Roeger Date: Thu, 8 Feb 2024 21:05:46 +0100 Subject: [PATCH 4/5] [issue879] make translator deterministic On some domains such as citycar, repeated runs of the translator could lead to different finite-domain encodings. In this issue we fixed several sources of this non-deterministic behaviour. --- src/translate/invariant_finder.py | 19 ++++++---- src/translate/invariants.py | 28 +++++++++----- .../pddl_parser/parsing_functions.py | 38 ++++++++++--------- 3 files changed, 51 insertions(+), 34 deletions(-) diff --git a/src/translate/invariant_finder.py b/src/translate/invariant_finder.py index 8aaee3fb99..9dded6dfab 100755 --- a/src/translate/invariant_finder.py +++ b/src/translate/invariant_finder.py @@ -3,6 +3,7 @@ from collections import deque, defaultdict import itertools +import random import time from typing import List @@ -13,7 +14,8 @@ class BalanceChecker: def __init__(self, task, reachable_action_params): - self.predicates_to_add_actions = defaultdict(set) + self.predicates_to_add_actions = defaultdict(list) + self.random = random.Random(314159) self.action_to_heavy_action = {} for act in task.actions: action = self.add_inequality_preconds(act, reachable_action_params) @@ -27,7 +29,9 @@ def __init__(self, task, reachable_action_params): too_heavy_effects.append(eff.copy()) if not eff.literal.negated: predicate = eff.literal.predicate - self.predicates_to_add_actions[predicate].add(action) + add_actions = self.predicates_to_add_actions[predicate] + if not add_actions or add_actions[-1] is not action: + add_actions.append(action) if create_heavy_act: heavy_act = pddl.Action(action.name, action.parameters, action.num_external_parameters, @@ -38,7 +42,7 @@ def __init__(self, task, reachable_action_params): self.action_to_heavy_action[action] = heavy_act def get_threats(self, predicate): - return self.predicates_to_add_actions.get(predicate, set()) + return self.predicates_to_add_actions.get(predicate, list()) def get_heavy_action(self, action): return self.action_to_heavy_action[action] @@ -115,7 +119,7 @@ def useful_groups(invariants, initial_facts): for predicate in invariant.predicates: predicate_to_invariants[predicate].append(invariant) - nonempty_groups = set() + nonempty_groups = dict() # dict instead of set because it is stable overcrowded_groups = set() for atom in initial_facts: if isinstance(atom, pddl.Assign): @@ -129,17 +133,18 @@ def useful_groups(invariants, initial_facts): group_key = (invariant, parameters_tuple) if group_key not in nonempty_groups: - nonempty_groups.add(group_key) + nonempty_groups[group_key] = True else: overcrowded_groups.add(group_key) - useful_groups = nonempty_groups - overcrowded_groups + useful_groups = [group_key for group_key in nonempty_groups.keys() + if group_key not in overcrowded_groups] for (invariant, parameters) in useful_groups: yield [part.instantiate(parameters) for part in sorted(invariant.parts)] # returns a list of mutex groups (parameters instantiated, counted variables not) def get_groups(task, reachable_action_params=None) -> List[List[pddl.Atom]]: with timers.timing("Finding invariants", block=True): - invariants = sorted(find_invariants(task, reachable_action_params)) + invariants = list(find_invariants(task, reachable_action_params)) with timers.timing("Checking invariant weight"): result = list(useful_groups(invariants, task.init)) return result diff --git a/src/translate/invariants.py b/src/translate/invariants.py index f91eb11a68..a4d593a893 100644 --- a/src/translate/invariants.py +++ b/src/translate/invariants.py @@ -278,12 +278,6 @@ def __eq__(self, other): def __ne__(self, other): return self.parts != other.parts - def __lt__(self, other): - return self.parts < other.parts - - def __le__(self, other): - return self.parts <= other.parts - def __hash__(self): return hash(self.parts) @@ -324,10 +318,24 @@ def _get_cover_equivalence_conjunction(self, literal): def check_balance(self, balance_checker, enqueue_func): # Check balance for this hypothesis. - actions_to_check = set() - for part in self.parts: - actions_to_check |= balance_checker.get_threats(part.predicate) - for action in actions_to_check: + actions_to_check = dict() + # We will only use the keys of the dictionary. We do not use a set + # because it's not stable and introduces non-determinism in the + # invariance analysis. + for part in sorted(self.parts): + for a in balance_checker.get_threats(part.predicate): + actions_to_check[a] = True + + actions = list(actions_to_check.keys()) + while actions: + # For a better expected perfomance, we want to randomize the order + # in which actions are checked. Since candidates are often already + # discarded by an early check, we do not want to shuffle the order + # but instead always draw the next action randomly from those we + # did not yet consider. + pos = balance_checker.random.randrange(len(actions)) + actions[pos], actions[-1] = actions[-1], actions[pos] + action = actions.pop() heavy_action = balance_checker.get_heavy_action(action) if self._operator_too_heavy(heavy_action): return False diff --git a/src/translate/pddl_parser/parsing_functions.py b/src/translate/pddl_parser/parsing_functions.py index 33cbb00c4f..e21e81b9d3 100644 --- a/src/translate/pddl_parser/parsing_functions.py +++ b/src/translate/pddl_parser/parsing_functions.py @@ -573,8 +573,7 @@ def parse_axioms_and_actions(context, entries, type_dict, predicate_dict): def parse_init(context, alist): initial = [] - initial_true = set() - initial_false = set() + initial_proposition_values = dict() initial_assignments = dict() for no, fact in enumerate(alist[1:], start=1): with context.layer(f"Parsing {no}. element in init block"): @@ -611,15 +610,16 @@ def parse_init(context, alist): if not isinstance(fact, list) or not fact: context.error("Invalid negated fact.", syntax=SYNTAX_LITERAL_NEGATED) atom = pddl.Atom(fact[0], fact[1:]) - check_atom_consistency(context, atom, initial_false, initial_true, False) - initial_false.add(atom) + check_atom_consistency(context, atom, + initial_proposition_values, False) + initial_proposition_values[atom] = False else: - if len(fact) < 1: - context.error(f"Expecting {SYNTAX_LITERAL} for atoms.") atom = pddl.Atom(fact[0], fact[1:]) - check_atom_consistency(context, atom, initial_true, initial_false) - initial_true.add(atom) - initial.extend(initial_true) + check_atom_consistency(context, atom, + initial_proposition_values, True) + initial_proposition_values[atom] = True + initial.extend(atom for atom, val in initial_proposition_values.items() + if val is True) return initial @@ -802,14 +802,18 @@ def parse_task_pddl(context, task_pddl, type_dict, predicate_dict): assert False, "This line should be unreachable" -def check_atom_consistency(context, atom, same_truth_value, other_truth_value, atom_is_true=True): - if atom in other_truth_value: - context.error(f"Error in initial state specification\n" - f"Reason: {atom} is true and false.") - if atom in same_truth_value: - if not atom_is_true: - atom = atom.negate() - print(f"Warning: {atom} is specified twice in initial state specification") +def check_atom_consistency(context, atom, initial_proposition_values, + atom_value): + if atom in initial_proposition_values: + prev_value = initial_proposition_values[atom] + if prev_value != atom_value: + context.error(f"Error in initial state specification\n" + f"Reason: {atom} is true and false.") + else: + if atom_value is False: + atom = atom.negate() + print(f"Warning: {atom} is specified twice in initial state specification") + def check_for_duplicates(context, elements, errmsg, finalmsg): seen = set() From 61646d7e7e70bcd4319190f2175578d7d86b6b8f Mon Sep 17 00:00:00 2001 From: Gabi Roeger Date: Fri, 9 Feb 2024 14:09:30 +0100 Subject: [PATCH 5/5] [issue1127] support negative literals in goals We previously did not fully support the occurrence of negative literals in conjunctive goals. We now support them, enforcing that such atoms are encoded with a propositional variable (or rather a FDR variable with a domain of size 2). --- src/translate/fact_groups.py | 11 ++++++++--- src/translate/translate.py | 5 ++++- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/src/translate/fact_groups.py b/src/translate/fact_groups.py index daec4a5875..8847047223 100644 --- a/src/translate/fact_groups.py +++ b/src/translate/fact_groups.py @@ -68,7 +68,11 @@ def _update_top(self): self.groups_by_size[len(candidate)].append(candidate) self.max_size -= 1 -def choose_groups(groups, reachable_facts): +def choose_groups(groups, reachable_facts, negative_in_goal): + if negative_in_goal: + # we remove atoms that occur negatively in the goal from the groups to + # enforce them to be encoded with a binary variable. + groups = [set(group) - negative_in_goal for group in groups] queue = GroupCoverQueue(groups) uncovered_facts = reachable_facts.copy() result = [] @@ -107,7 +111,8 @@ def sort_groups(groups): return sorted(sorted(group) for group in groups) def compute_groups(task: pddl.Task, atoms: Set[pddl.Literal], - reachable_action_params: Dict[pddl.Action, List[str]]) -> Tuple[ + reachable_action_params: Dict[pddl.Action, List[str]], + negative_in_goal: Set[pddl.Atom]) -> Tuple[ List[List[pddl.Atom]], # groups # -> all selected mutex groups plus singleton groups for uncovered facts List[List[pddl.Atom]], # mutex_groups @@ -128,7 +133,7 @@ def compute_groups(task: pddl.Task, atoms: Set[pddl.Literal], with timers.timing("Collecting mutex groups"): mutex_groups = collect_all_mutex_groups(groups, atoms) with timers.timing("Choosing groups", block=True): - groups = choose_groups(groups, atoms) + groups = choose_groups(groups, atoms, negative_in_goal) groups = sort_groups(groups) with timers.timing("Building translation key"): translation_key = build_translation_key(groups) diff --git a/src/translate/translate.py b/src/translate/translate.py index d2652f0fe6..e060e8a591 100755 --- a/src/translate/translate.py +++ b/src/translate/translate.py @@ -555,12 +555,15 @@ def pddl_to_sas(task): elif goal_list is None: return unsolvable_sas_task("Trivially false goal") + negative_in_goal = set() for item in goal_list: assert isinstance(item, pddl.Literal) + if item.negated: + negative_in_goal.add(item.negate()) with timers.timing("Computing fact groups", block=True): groups, mutex_groups, translation_key = fact_groups.compute_groups( - task, atoms, reachable_action_params) + task, atoms, reachable_action_params, negative_in_goal) with timers.timing("Building STRIPS to SAS dictionary"): ranges, strips_to_sas = strips_to_sas_dictionary(