/*
* Copyright (C) 2017 Daniel Dietsch (dietsch@informatik.uni-freiburg.de)
* Copyright (C) 2017 University of Freiburg
*
* This file is part of the ULTIMATE UnitTest Library.
*
* The ULTIMATE UnitTest Library is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The ULTIMATE UnitTest Library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the ULTIMATE UnitTest Library. If not, see .
*
* Additional permission under GNU GPL version 3 section 7:
* If you modify the ULTIMATE UnitTest Library, or any covered work, by linking
* or combining it with Eclipse RCP (or a modified version of Eclipse RCP),
* containing parts covered by the terms of the Eclipse Public License, the
* licensors of the ULTIMATE UnitTest Library grant you additional permission
* to convey the resulting work.
*/
package de.uni_freiburg.informatik.ultimate.test.benchexec;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import jakarta.xml.bind.JAXBElement;
import jakarta.xml.bind.JAXBException;
import javax.xml.namespace.QName;
import de.uni_freiburg.informatik.ultimate.test.UltimateRunDefinition;
import de.uni_freiburg.informatik.ultimate.test.UltimateTestSuite;
import de.uni_freiburg.informatik.ultimate.test.benchexec.benchmark.Benchmark;
import de.uni_freiburg.informatik.ultimate.test.benchexec.benchmark.Option;
import de.uni_freiburg.informatik.ultimate.test.benchexec.benchmark.Rundefinition;
import de.uni_freiburg.informatik.ultimate.test.benchexec.benchmark.Tasks;
import de.uni_freiburg.informatik.ultimate.test.reporting.BaseTestLogfile;
import de.uni_freiburg.informatik.ultimate.test.reporting.IPreTestLog;
import de.uni_freiburg.informatik.ultimate.util.CoreUtil;
import de.uni_freiburg.informatik.ultimate.util.datastructures.relation.Pair;
/**
* The {@link BenchexecRundefinitionGeneratorPreLog} generates a benchexec rundefinition from a test suite s.t. the
* testsuite can be run with benchexec.
*
* @author Daniel Dietsch (dietsch@informatik.uni-freiburg.de)
*
*/
public final class BenchexecRundefinitionGeneratorPreLog extends BaseTestLogfile implements IPreTestLog {
private static final String MAX_MEMORY = CoreUtil.humanReadableByteCount(Runtime.getRuntime().maxMemory(), true);
private final static QName QNAME_INCLUDE = new QName("", "include");
private Benchmark mBenchexecBenchmark;
public BenchexecRundefinitionGeneratorPreLog(final Class extends UltimateTestSuite> ultimateTestSuite) {
super(ultimateTestSuite);
}
/**
* Define a function that determines the main input and the additional inputs and their naming convention. You can
* use these variables for benchexec.
*
*
${benchmark_name} Name of benchmark execution
*
${benchmark_date} Timestamp of benchmark execution
*
${benchmark_path} Directory of benchmark XML file
*
${benchmark_path_abs} Directory of benchmark XML file (absolute path)
*
${benchmark_file} Name of benchmark XML file (without path)
*
${logfile_path} Directory where tool-output files will be stored
*
${logfile_path_abs} Directory where tool-output files will be stored (absolute path)
*
${rundefinition_name} Name of current run definition
*
${inputfile_name} Name of current input file (without path)
*
${inputfile_path} Directory of current input file
*
${inputfile_path_abs} Directory of current input file (absolute path)
*
*
*
* @param ultimateTestSuite
* @param bla
*/
public BenchexecRundefinitionGeneratorPreLog(final Class extends UltimateTestSuite> ultimateTestSuite,
final Function funGetMainInput, final String[] additionalInputs) {
super(ultimateTestSuite);
}
@Override
public String getFilenameExtension() {
return ".xml";
}
@Override
public String getLog() {
try {
return BenchmarkSerializer.toString(mBenchexecBenchmark);
} catch (final JAXBException e) {
throw new RuntimeException(e);
}
}
@Override
public void addAllTests(final List runs) {
final Set timeouts = runs.stream().map(a -> a.getTimeout()).collect(Collectors.toSet());
if (timeouts.size() != 1) {
throw new IllegalArgumentException(
"Cannot handle more than one timeout in a toolchain, and have seen " + timeouts);
}
if (runs.stream().anyMatch(a -> a.getInput().length != 1)) {
throw new IllegalArgumentException("Cannot handle multiple input files in a run definition");
}
final long timeout = timeouts.iterator().next();
final long timeoutInS = timeout / 1000;
final long hardTimeoutInS = (long) (timeoutInS * 1.25);
mBenchexecBenchmark = new Benchmark();
mBenchexecBenchmark.setTimelimit(String.valueOf(timeoutInS));
mBenchexecBenchmark.setHardtimelimit(String.valueOf(hardTimeoutInS));
mBenchexecBenchmark.setCpuCores("2");
mBenchexecBenchmark.setMemlimit(MAX_MEMORY);
mBenchexecBenchmark.setTool("ultimateautomizer");
// add rundefinitions to benchmaks
final Set> tcSettings =
runs.stream().map(a1 -> new Pair<>(a1.getToolchain(), a1.getSettings())).collect(Collectors.toSet());
final Map> rundefsToTasks = new HashMap<>();
for (final Pair tcSetting : tcSettings) {
final File toolchain = tcSetting.getFirst();
final File setting = tcSetting.getSecond();
final Rundefinition rd = new Rundefinition();
rd.setName(toolchain.getName() + "+" + setting.getName());
final List