/*
* Copyright (C) 2012-2015 Matthias Heizmann (heizmann@informatik.uni-freiburg.de)
* Copyright (C) 2009-2015 University of Freiburg
*
* This file is part of the ULTIMATE Automata Library.
*
* The ULTIMATE Automata Library is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The ULTIMATE Automata Library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the ULTIMATE Automata Library. If not, see .
*
* Additional permission under GNU GPL version 3 section 7:
* If you modify the ULTIMATE Automata Library, or any covered work, by linking
* or combining it with Eclipse RCP (or a modified version of Eclipse RCP),
* containing parts covered by the terms of the Eclipse Public License, the
* licensors of the ULTIMATE Automata Library grant you additional permission
* to convey the resulting work.
*/
package de.uni_freiburg.informatik.ultimate.automata.petrinet.unfolding;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.stream.Collectors;
import de.uni_freiburg.informatik.ultimate.automata.AutomataLibraryException;
import de.uni_freiburg.informatik.ultimate.automata.AutomataLibraryServices;
import de.uni_freiburg.informatik.ultimate.automata.GeneralOperation;
import de.uni_freiburg.informatik.ultimate.automata.nestedword.INestedWordAutomaton;
import de.uni_freiburg.informatik.ultimate.automata.nestedword.NestedRun;
import de.uni_freiburg.informatik.ultimate.automata.nestedword.operations.IsIncluded;
import de.uni_freiburg.informatik.ultimate.automata.petrinet.IPetriNetAndAutomataInclusionStateFactory;
import de.uni_freiburg.informatik.ultimate.automata.petrinet.netdatastructures.BoundedPetriNet;
import de.uni_freiburg.informatik.ultimate.automata.petrinet.netdatastructures.Transition;
import de.uni_freiburg.informatik.ultimate.automata.petrinet.operations.PetriNet2FiniteAutomaton;
import de.uni_freiburg.informatik.ultimate.automata.statefactory.IFinitePrefix2PetriNetStateFactory;
import de.uni_freiburg.informatik.ultimate.util.datastructures.HashDeque;
import de.uni_freiburg.informatik.ultimate.util.datastructures.ImmutableSet;
import de.uni_freiburg.informatik.ultimate.util.datastructures.UnionFind;
import de.uni_freiburg.informatik.ultimate.util.datastructures.relation.HashRelation;
import de.uni_freiburg.informatik.ultimate.util.datastructures.relation.HashRelation3;
import de.uni_freiburg.informatik.ultimate.util.datastructures.relation.Pair;
/**
* Converts to Petri net.
*
* @author Matthias Heizmann (heizmann@informatik.uni-freiburg.de)
* @param
* letter type
* @param
* content type
*/
public final class FinitePrefix2PetriNet
extends GeneralOperation> {
private final BranchingProcess mInput;
private final BoundedPetriNet mNet;
private final HashDeque, Event>> mMergingCandidates;
private final HashRelation, Condition> mConditionPredecessors =
new HashRelation<>();
private final HashRelation, Event> mEventSuccessors = new HashRelation<>();
private final UnionFind> mConditionRepresentatives;
private final UnionFind> mEventRepresentatives;
private final IFinitePrefix2PetriNetStateFactory mStateFactory;
private final HashRelation mOldToNewPlaces = new HashRelation<>();
private final HashRelation, Transition> mOldToNewTransitions =
new HashRelation<>();
private final boolean mUsePetrification = false;
private final boolean mRemoveDeadTransitions;
private int mNumberOfCallsOfMergeCondidates = 0;
private int mNumberOfMergingCondidates = 0;
private int mNumberOfMergedEventPairs = 0;
private int mNumberOfAddOperationsToTheCandQueue = 0;
private final Set> mVitalRepresentatives = new HashSet<>();
public FinitePrefix2PetriNet(final AutomataLibraryServices services,
final IFinitePrefix2PetriNetStateFactory stateFactory, final BranchingProcess bp) {
this(services, stateFactory, bp, false);
}
public FinitePrefix2PetriNet(final AutomataLibraryServices services,
final IFinitePrefix2PetriNetStateFactory stateFactory, final BranchingProcess bp,
final boolean removeDeadTransitions) {
super(services);
mStateFactory = stateFactory;
// TODO implement merging for markings?
mInput = bp;
mRemoveDeadTransitions = removeDeadTransitions;
if (mLogger.isInfoEnabled()) {
mLogger.info(startMessage());
}
final BoundedPetriNet oldNet = (BoundedPetriNet) mInput.getNet();
if (mUsePetrification) {
mNet = buildPetrification(bp);
mMergingCandidates = null;
mConditionRepresentatives = null;
mEventRepresentatives = null;
} else {
mMergingCandidates = new HashDeque<>();
mNet = new BoundedPetriNet<>(mServices, oldNet.getAlphabet(), false);
mConditionRepresentatives = new UnionFind<>();
mEventRepresentatives = new UnionFind<>();
constructNet(bp);
}
if (mLogger.isInfoEnabled()) {
mLogger.info(exitMessage());
}
}
private void constructNet(final BranchingProcess bp) {
if (mLogger.isDebugEnabled()) {
mLogger.debug("CONDITIONS:");
for (final Condition c : bp.getConditions()) {
mLogger.debug(c);
}
mLogger.debug("EVENTS:");
for (final Event e : bp.getEvents()) {
mLogger.debug(e.getPredecessorConditions() + " || " + e + " || " + e.getSuccessorConditions());
}
}
for (final Event e : bp.getEvents()) {
mEventRepresentatives.makeEquivalenceClass(e);
for (final Condition c : e.getSuccessorConditions()) {
assert mConditionRepresentatives.find(c) == null;
mConditionRepresentatives.makeEquivalenceClass(c);
}
for (final Condition c : e.getPredecessorConditions()) {
mEventSuccessors.addPair(c, e);
}
mConditionPredecessors.addAllPairs(e, e.getPredecessorConditions());
}
// equality intended here
for (final Event e : bp.getEvents()) {
if (e.isCutoffEvent()) {
final Event companion = e.getCompanion();
final ConditionMarking companionCondMark = companion.getConditionMark();
final ConditionMarking eCondMark = e.getConditionMark();
mergeConditions(eCondMark.getConditions(), companionCondMark.getConditions());
while (!mMergingCandidates.isEmpty()) {
mNumberOfMergingCondidates++;
final Pair, Event> candidate = mMergingCandidates.poll();
final Event e1 = mEventRepresentatives.find(candidate.getFirst());
final Event e2 = mEventRepresentatives.find(candidate.getSecond());
if (!e1.equals(e2)
&& mConditionPredecessors.getImage(e1).equals(mConditionPredecessors.getImage(e2))) {
mEventRepresentatives.union(e1, e2);
final Event rep = mEventRepresentatives.find(e1);
final Event nonRep;
if (rep.equals(e1)) {
nonRep = e2;
} else {
nonRep = e1;
}
for (final Condition c : mConditionPredecessors.getImage(nonRep)) {
mEventSuccessors.removePair(c, nonRep);
mEventSuccessors.addPair(c, rep);
}
mConditionPredecessors.addAllPairs(rep, mConditionPredecessors.getImage(nonRep));
mConditionPredecessors.removeDomainElement(nonRep);
mNumberOfMergedEventPairs++;
mergeConditions(e1.getSuccessorConditions(), e2.getSuccessorConditions());
}
}
}
}
final Set> releventEvents = new HashSet<>(mEventRepresentatives.getAllRepresentatives());
releventEvents.remove(mInput.getDummyRoot());
if (mRemoveDeadTransitions) {
final HashRelation, Event> companion2cutoff = new HashRelation<>();
for (final Event e : bp.getEvents()) {
if (e.isCutoffEvent()) {
companion2cutoff.addPair(e.getCompanion(), e);
}
}
final ArrayDeque> worklist = new ArrayDeque<>();
// TODO: Try if the condition representatives are sufficient
for (final Condition c : bp.getAcceptingConditions()) {
final Event predRepresentative = mEventRepresentatives.find(c.getPredecessorEvent());
if (mVitalRepresentatives.add(predRepresentative)) {
worklist.add(predRepresentative);
}
for (final Event e : bp.getCoRelation().computeCoRelatatedEvents(c)) {
if (mVitalRepresentatives.add(mEventRepresentatives.find(e))) {
worklist.add(mEventRepresentatives.find(e));
}
}
}
while (!worklist.isEmpty()) {
final Event representative = worklist.removeFirst();
for (final Event e : mEventRepresentatives.getEquivalenceClassMembers(representative)) {
for (final Event predEvent : e.getPredecessorEvents()) {
final Event predEventRep = mEventRepresentatives.find(predEvent);
if (mVitalRepresentatives.add(predEventRep)) {
worklist.add(predEventRep);
}
}
for (final Event cutoffEvent : companion2cutoff.getImage(e)) {
final Event cutoffEventRep = mEventRepresentatives.find(cutoffEvent);
if (mVitalRepresentatives.add(cutoffEventRep)) {
worklist.add(cutoffEventRep);
}
}
}
}
mVitalRepresentatives.remove(bp.getDummyRoot());
releventEvents.retainAll(mVitalRepresentatives);
}
final Map, PLACE> placeMap = new HashMap<>();
for (final Condition c : mConditionRepresentatives.getAllRepresentatives()) {
final boolean isInitial =
containsInitial(mConditionRepresentatives.getEquivalenceClassMembers(c), bp.initialConditions());
final boolean isAccepting = bp.getNet().isAccepting(c.getPlace());
final PLACE place = mStateFactory.finitePrefix2net(c);
mOldToNewPlaces.addPair(c.getPlace(), place);
final boolean newlyAdded = mNet.addPlace(place, isInitial, isAccepting);
if (!newlyAdded) {
throw new AssertionError("Must not add place twice: " + place);
}
placeMap.put(c, place);
}
for (final Event e : releventEvents) {
final Set preds = new HashSet<>();
final Set succs = new HashSet<>();
for (final Condition c : e.getPredecessorConditions()) {
final Condition representative = mConditionRepresentatives.find(c);
preds.add(placeMap.get(representative));
}
for (final Condition c : e.getSuccessorConditions()) {
final Condition representative = mConditionRepresentatives.find(c);
succs.add(placeMap.get(representative));
}
final Transition newTransition =
mNet.addTransition(e.getTransition().getSymbol(), ImmutableSet.of(preds), ImmutableSet.of(succs));
mOldToNewTransitions.addPair(newTransition, e.getTransition());
}
}
public Set> computeVitalTransitions() {
assert mRemoveDeadTransitions : "remove dead transitions must be enabled";
return mVitalRepresentatives.stream().map(Event::getTransition).collect(Collectors.toSet());
}
private boolean containsInitial(final Set> equivalenceClassMembers,
final Collection> initialConditions) {
return initialConditions.stream().anyMatch(x -> equivalenceClassMembers.contains(x));
}
private boolean petriNetLanguageEquivalence(final BoundedPetriNet oldNet,
final BoundedPetriNet newNet,
final IPetriNetAndAutomataInclusionStateFactory stateFactory) throws AutomataLibraryException {
if (mLogger.isInfoEnabled()) {
mLogger.info("Testing Petri net language equivalence");
}
final INestedWordAutomaton finAuto1 =
new PetriNet2FiniteAutomaton<>(mServices, stateFactory, oldNet).getResult();
final INestedWordAutomaton finAuto2 =
new PetriNet2FiniteAutomaton<>(mServices, stateFactory, newNet).getResult();
final NestedRun subsetCounterex =
new IsIncluded<>(mServices, stateFactory, finAuto1, finAuto2).getCounterexample();
final boolean subset = subsetCounterex == null;
if (!subset && mLogger.isErrorEnabled()) {
mLogger.error("Only accepted by first: " + subsetCounterex.getWord());
}
final NestedRun supersetCounterex =
new IsIncluded<>(mServices, stateFactory, finAuto2, finAuto1).getCounterexample();
final boolean superset = supersetCounterex == null;
if (!superset && mLogger.isErrorEnabled()) {
mLogger.error("Only accepted by second: " + supersetCounterex.getWord());
}
final boolean result = subset && superset;
if (mLogger.isInfoEnabled()) {
mLogger.info("Finished Petri net language equivalence");
}
return result;
}
@Override
public String startMessage() {
return "Start " + getOperationName() + ". Input " + mInput.sizeInformation();
}
@Override
public String exitMessage() {
return "Finished " + getOperationName() + ". Result " + mNet.sizeInformation() + ". Original net "
+ mInput.getNet().sizeInformation() + ".";
}
@Override
public BoundedPetriNet getResult() {
return mNet;
}
private void mergeConditions(final Set> set1, final Set> set2) {
mNumberOfCallsOfMergeCondidates++;
final Map> origPlace2Condition = new HashMap<>();
final Set> s2 =
set2.stream().map(x -> mConditionRepresentatives.find(x)).collect(Collectors.toSet());
for (final Condition c1 : set1) {
final Condition c1representative = mConditionRepresentatives.find(c1);
if (!s2.remove(c1representative)) {
origPlace2Condition.put(c1.getPlace(), c1representative);
}
}
for (final Condition c2 : s2) {
final PLACE p2 = c2.getPlace();
assert p2 != null : "no place for condition " + c2;
final Condition c1 = origPlace2Condition.get(p2);
assert c1 != null : "no condition for place " + p2;
for (final Event e1 : mEventSuccessors.getImage(c1)) {
for (final Event e2 : mEventSuccessors.getImage(c2)) {
if (e1.getTransition().equals(e2.getTransition())) {
mMergingCandidates
.add(new Pair<>(mEventRepresentatives.find(e1), mEventRepresentatives.find(e2)));
mNumberOfAddOperationsToTheCandQueue++;
}
}
}
mConditionRepresentatives.union(c1, c2);
final Condition rep = mConditionRepresentatives.find(c1);
final Condition nonRep;
if (rep.equals(c1)) {
nonRep = c2;
} else {
nonRep = c1;
}
for (final Event e : mEventSuccessors.getImage(nonRep)) {
mConditionPredecessors.removePair(e, nonRep);
mConditionPredecessors.addPair(e, rep);
}
mEventSuccessors.addAllPairs(rep, mEventSuccessors.getImage(nonRep));
mEventSuccessors.removeDomainElement(nonRep);
}
}
public HashRelation getOldToNewPlaces() {
return mOldToNewPlaces;
}
public HashRelation, Transition> getOldToNewTransitions() {
return mOldToNewTransitions;
}
/**
* A transition set.
*
* @author Matthias Heizmann (heizmann@informatik.uni-freiburg.de)
*/
class TransitionSet {
private final Map, Set>>> mLetter2Predset2Succsets = new HashMap<>();
void addTransition(final LETTER letter, final Set predset, final Set succset) {
Map, Set>> predsets2succsets = mLetter2Predset2Succsets.get(letter);
if (predsets2succsets == null) {
predsets2succsets = new HashMap<>();
mLetter2Predset2Succsets.put(letter, predsets2succsets);
}
Set> succsets = predsets2succsets.get(predset);
if (succsets == null) {
succsets = new HashSet<>();
predsets2succsets.put(predset, succsets);
}
succsets.add(succset);
}
void addAllTransitionsToNet(final BoundedPetriNet net) {
for (final Entry, Set>>> entry1 : mLetter2Predset2Succsets.entrySet()) {
final LETTER letter = entry1.getKey();
final Map, Set>> predsets2succsets = entry1.getValue();
for (final Entry, Set>> entry2 : predsets2succsets.entrySet()) {
final Set predset = entry2.getKey();
final Set> succsets = entry2.getValue();
for (final Set succset : succsets) {
net.addTransition(letter, ImmutableSet.copyOf(predset), ImmutableSet.copyOf(succset));
}
}
}
}
}
/**
* @return false iff there exists transition t such that c1 and c2 both have an outgoing event that is labeled with
* t.
*/
private static boolean areIndependent(final Condition c1,
final Condition c2) {
final Set> c1SuccTrans =
c1.getSuccessorEvents().stream().map(Event::getTransition).collect(Collectors.toSet());
final Set> c2SuccTrans =
c2.getSuccessorEvents().stream().map(Event::getTransition).collect(Collectors.toSet());
return Collections.disjoint(c1SuccTrans, c2SuccTrans);
}
public LinkedHashSet> collectRelevantEvents() {
final LinkedHashSet> conditions = new LinkedHashSet<>();
for (final Event e : mInput.getEvents()) {
if (!e.isCutoffEvent()) {
conditions.addAll(e.getSuccessorConditions());
}
}
return conditions;
}
private Map>>
computeEquivalenceClasses(final Collection> conditions) {
final Map>> result = new HashMap<>();
for (final Condition c : conditions) {
final UnionFind> uf = result.computeIfAbsent(c.getPlace(), x -> new UnionFind<>());
final List> mergeRequired = new ArrayList<>();
for (final Set> eqClass : uf.getAllEquivalenceClasses()) {
for (final Condition otherCond : eqClass) {
final boolean areIndependent = areIndependent(c, otherCond);
if (!areIndependent) {
mergeRequired.add(otherCond);
// no need to check others of this equivalence class, will be merged anyway
continue;
}
}
}
uf.makeEquivalenceClass(c);
for (final Condition otherCond : mergeRequired) {
uf.union(c, otherCond);
}
}
return result;
}
private BoundedPetriNet buildPetrification(final BranchingProcess bp) {
final LinkedHashSet> relevantConditions = collectRelevantEvents();
final Map>> equivalenceClasses =
computeEquivalenceClasses(relevantConditions);
final Map, PLACE> condition2Place =
computeCondition2Place(equivalenceClasses, mStateFactory);
final BoundedPetriNet result = new BoundedPetriNet<>(mServices, bp.getAlphabet(), false);
for (final Entry, PLACE> entry : condition2Place.entrySet()) {
if (!result.getPlaces().contains(entry.getValue())) {
final boolean isInitial = entry.getKey().getPredecessorEvent() == bp.getDummyRoot();
final boolean isAccepting = bp.getNet().isAccepting(entry.getKey().getPlace());
final boolean newlyAdded = result.addPlace(entry.getValue(), isInitial, isAccepting);
if (!newlyAdded) {
throw new AssertionError("Must not add place twice: " + entry.getValue());
}
}
}
computeTransitions(bp.getEvents(), condition2Place)
.forEach(x -> result.addTransition(x.getFirst(), x.getSecond(), x.getThird()));
return result;
}
private HashRelation3, ImmutableSet> computeTransitions(
final Collection> events, final Map, PLACE> condition2Place) {
final HashRelation3, ImmutableSet> letterPredecessorsSuccessors =
new HashRelation3<>();
for (final Event event : events) {
// skip auxiliary initial event
if (event.getTransition() != null) {
final ImmutableSet predecessors = event.getPredecessorConditions().stream()
.map(condition2Place::get).collect(ImmutableSet.collector());
assert !predecessors.contains(null);
final ImmutableSet successors;
if (event.getCompanion() != null) {
final Event companion = event.getCompanion();
if (companion.getTransition() != event.getTransition()) {
throw new UnsupportedOperationException("finite prefix with same transition cut-off required");
}
successors = companion.getSuccessorConditions().stream().map(condition2Place::get)
.collect(ImmutableSet.collector());
} else {
successors = event.getSuccessorConditions().stream().map(condition2Place::get)
.collect(ImmutableSet.collector());
}
assert !successors.contains(null);
letterPredecessorsSuccessors.addTriple(event.getTransition().getSymbol(), predecessors, successors);
}
}
return letterPredecessorsSuccessors;
}
private Map, PLACE> computeCondition2Place(
final Map>> equivalenceClasses,
final IFinitePrefix2PetriNetStateFactory stateFactory) {
final Map, PLACE> result = new HashMap<>();
for (final Entry>> entry : equivalenceClasses.entrySet()) {
for (final Condition rep : entry.getValue().getAllRepresentatives()) {
final PLACE resultPlace = stateFactory.finitePrefix2net(rep);
for (final Condition eqMember : entry.getValue().getEquivalenceClassMembers(rep)) {
result.put(eqMember, resultPlace);
}
}
}
return result;
}
@Override
public boolean checkResult(final IPetriNetAndAutomataInclusionStateFactory stateFactory)
throws AutomataLibraryException {
if (!(mInput.getNet() instanceof BoundedPetriNet)) {
throw new AssertionError("implement result checking for on-demand inputs");
}
final BoundedPetriNet originalNet = (BoundedPetriNet) mInput.getNet();
final boolean languagesEquivalent = petriNetLanguageEquivalence(originalNet, mNet, stateFactory);
if (!languagesEquivalent) {
mLogger.error("The result of the " + FinitePrefix2PetriNet.class.getSimpleName()
+ " recognizes a different language than the original net.");
}
return languagesEquivalent;
}
}