Mentions légales du service

Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • rules/integraal
  • ulliana/integraal
2 results
Show changes
Commits on Source (10)
Showing
with 1330 additions and 227 deletions
......@@ -28,7 +28,8 @@ public interface AtomicQueryExplainer<ExplanationType> extends QueryExplainer<Ex
/**
* Runs the pipeline (with timing) on the given dataset and queries.
*/
void pipelineWithTimer(String datasetName, Collection<Atom> queries);
void pipelineWithTimerDetailed(String datasetName, Collection<Atom> queries);
void pipelineIn2StepsWithTimer(String datasetName, Collection<Atom> queries);
}
package fr.boreal.explanation.configuration;
import com.opencsv.CSVWriter;
import fr.boreal.model.kb.api.KnowledgeBase;
import java.io.ByteArrayOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import java.util.*;
public class StatsUtil {
......@@ -13,8 +17,8 @@ public class StatsUtil {
static Map<String, Map<String, Map<String, Long>>> timingRecords = new TreeMap<>();
static Map<String, LinkedHashSet<String>> operationOrder = new HashMap<>();
static LinkedHashSet<String> datasetOrder = new LinkedHashSet<>();
static Map<String, Integer> datasetToFBsize = new HashMap<>();
static Map<String, Integer> rbSize = new HashMap<>();
static Map<String, Long> datasetToFBsize = new HashMap<>();
static Map<String, Integer> datasetToRBsize = new HashMap<>();
static Map<String, Integer> avgExplanationNbPerQuery = new HashMap<>();
......@@ -27,7 +31,7 @@ public class StatsUtil {
* @param operation A label describing what is being timed (e.g. "Transforming rules").
* @param datasetName The identifier for the dataset (e.g. "Dataset1").
*/
public static void timer(Runnable function, String explainerName, String operation, String datasetName) {
public static void timerSumAllQueries(Runnable function, String explainerName, String operation, String datasetName) {
long startTime = System.currentTimeMillis();
function.run();
long elapsedTime = System.currentTimeMillis() - startTime;
......@@ -40,11 +44,7 @@ public class StatsUtil {
.computeIfAbsent(explainerName, ignore -> new HashMap<>())
.computeIfAbsent(operation, ignore -> new HashMap<>());
if (!datasetNameToTime.containsKey(datasetName)) {
datasetNameToTime.put(datasetName, elapsedTime);
} else {
datasetNameToTime.put(datasetName, datasetNameToTime.get(datasetName) + elapsedTime);
}
datasetNameToTime.merge(datasetName, elapsedTime, Long::sum);
// Keep track of the order of execution
operationOrder.computeIfAbsent(explainerName, ignore -> new LinkedHashSet<>()).add(operation);
......@@ -52,12 +52,205 @@ public class StatsUtil {
// Keep track of the order of datasets
datasetOrder.add(datasetName);
}
public static void recordFBSize(String datasetName, int FBSize) {
};
public static void timerPerEachQuery(Runnable function,
String explainerName,
String operation,
String datasetName) {
long elapsedTime = -1;
if (function != null) {
long startTime = System.currentTimeMillis();
function.run();
elapsedTime = System.currentTimeMillis() - startTime;
}
System.out.printf("[Dataset=%s] %s took %d ms%n",
datasetName, operation, elapsedTime);
// 1) get-or-create the map of op-names → (dataset → time)
Map<String, Map<String, Long>> opsMap =
timingRecords.computeIfAbsent(explainerName, k -> new TreeMap<>());
// 2) count existing invocations for this op *and* this dataset
long existingCount = opsMap.entrySet().stream()
.filter(e -> {
String key = e.getKey();
boolean matchesBaseOrNumbered =
key.equals(operation) ||
key.startsWith(operation + " #");
return matchesBaseOrNumbered
&& e.getValue().containsKey(datasetName);
})
.count();
// 3) build the numbered name (first call uses bare name)
String numberedOp = existingCount == 0
? operation
: operation + " #" + existingCount;
// 4) record under that new key
Map<String, Long> datasetToTime = opsMap
.computeIfAbsent(numberedOp, k -> new TreeMap<>());
datasetToTime.put(datasetName, elapsedTime);
// 5) maintain execution order
operationOrder
.computeIfAbsent(explainerName, k -> new LinkedHashSet<>())
.add(numberedOp);
datasetOrder.add(datasetName);
}
public static void timerChasePerEachQuery(Runnable function,
KnowledgeBase kb,
String explainerName,
String operation,
String datasetName) {
long elapsedTime = -1;
long factBaseBefore = -1;
long factBaseAfter = -1;
int ruleBaseBefore = -1;
int ruleBaseAfter = -1;
if (function != null) {
factBaseBefore = kb.getFactBase().size();
ruleBaseBefore = kb.getRuleBase().getRules().size();
long startTime = System.currentTimeMillis();
function.run();
elapsedTime = System.currentTimeMillis() - startTime;
factBaseAfter = kb.getFactBase().size();
ruleBaseAfter = kb.getRuleBase().getRules().size();
}
System.out.printf("[Dataset=%s] %s took %d ms%n", datasetName, operation, elapsedTime);
// 1) Get or create timing records map
Map<String, Map<String, Long>> opsMap =
timingRecords.computeIfAbsent(explainerName, k -> new TreeMap<>());
// 2) Count existing invocations for this op and dataset
long existingCount = opsMap.entrySet().stream()
.filter(e -> {
String key = e.getKey();
boolean matchesBaseOrNumbered =
key.equals(operation) || key.startsWith(operation + " #");
return matchesBaseOrNumbered && e.getValue().containsKey(datasetName);
})
.count();
// 3) Create numbered op name
String numberedOp = existingCount == 0
? operation
: operation + " #" + existingCount;
// 4) Record execution time
Map<String, Long> datasetToTime = opsMap.computeIfAbsent(numberedOp, k -> new TreeMap<>());
datasetToTime.put(datasetName, elapsedTime);
// 5) Record fact base sizes
recordSize(explainerName, "sizeFactBaseBefore " + numberedOp, datasetName, factBaseBefore);
recordSize(explainerName, "sizeFactBaseAfter " + numberedOp, datasetName, factBaseAfter);
// 6) Record rule base sizes
recordSize(explainerName, "sizeRuleBaseBefore " + numberedOp, datasetName, ruleBaseBefore);
recordSize(explainerName, "sizeRuleBaseAfter " + numberedOp, datasetName, ruleBaseAfter);
// 7) Maintain execution order
LinkedHashSet<String> opOrderSet = operationOrder.computeIfAbsent(explainerName, k -> new LinkedHashSet<>());
opOrderSet.add(numberedOp);
opOrderSet.add("sizeFactBaseBefore " + numberedOp);
opOrderSet.add("sizeFactBaseAfter " + numberedOp);
opOrderSet.add("sizeRuleBaseBefore " + numberedOp);
opOrderSet.add("sizeRuleBaseAfter " + numberedOp);
datasetOrder.add(datasetName);
}
// Helper to store size under appropriate key
private static void recordSize(String explainerName, String fullKey, String datasetName, long size) {
Map<String, Map<String, Long>> sizeMap =
timingRecords.computeIfAbsent(explainerName, k -> new TreeMap<>());
Map<String, Long> datasetMap = sizeMap.computeIfAbsent(fullKey, k -> new TreeMap<>());
datasetMap.put(datasetName, size);
}
public static void timerMarcoAllQueries(
Runnable function,
String explainerName,
String operation,
String datasetName
) {
// 1) Capture everything sent to System.out
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream oldOut = System.out;
System.setOut(new PrintStream(baos, true));
// 2) Run the function (it prints its own stats to stdout)
long startTime = System.currentTimeMillis();
function.run();
long elapsedTime = System.currentTimeMillis() - startTime;
// 3) Restore stdout
System.out.flush();
System.setOut(oldOut);
// 4) Extract the captured text
String stdout = baos.toString(StandardCharsets.UTF_8);
// 5) Parse the first “total : X.XXX” line
double totalSeconds = parseTotalSeconds(stdout);
// 6) Convert to milliseconds (long)
long totalMs = Math.round(totalSeconds * 1000);
// 7) (Optional) print a summary
System.out.printf(
"[Dataset=%s] %s total = %.3f s → %d ms%n",
datasetName, operation, totalSeconds, totalMs
);
// 8) Record _only_ the total-ms into your map
Map<String, Long> datasetTimeMap = timingRecords
.computeIfAbsent(explainerName, k -> new HashMap<>())
.computeIfAbsent(operation, k -> new HashMap<>());
datasetTimeMap.merge(datasetName, totalMs, Long::sum);
// 9) Keep your ordering structures exactly as before
operationOrder
.computeIfAbsent(explainerName, k -> new LinkedHashSet<>())
.add(operation);
datasetOrder.add(datasetName);
}
private static double parseTotalSeconds(String stdout) {
for (String line : stdout.split("\\r?\\n")) {
String trimmed = line.trim();
if (trimmed.startsWith("total")) {
int colon = trimmed.indexOf(':');
if (colon >= 0 && colon + 1 < trimmed.length()) {
String numPart = trimmed.substring(colon + 1).trim();
return Double.parseDouble(numPart);
}
}
}
throw new IllegalStateException(
"Could not find a line beginning with 'total:' in:\n" + stdout
);
}
public static void recordFBSize(String datasetName, long FBSize) {
datasetToFBsize.put(datasetName, FBSize);
};
public static void recordRBSize(String datasetName, int RBSize) {
datasetToRBsize.put(datasetName, RBSize);
};
/**
* Exports pivoted timings so that:
......@@ -77,20 +270,28 @@ public class StatsUtil {
columns.addAll(datasetOrder);
writer.writeNext(columns.toArray(new String[0]));
// 2) Record # Rules & # Facts
// for (String datasetName : datasetOrder) {
// if (operationToRecord.containsKey(datasetName)) {
// row.add(operationToRecord.get(datasetName).toString());
// } else {
// row.add("");
// }
// }
// 2) Write RB and FB Size
List<String> row = new ArrayList<>();
row.add("");
row.add("");
for (String datasetName : datasetOrder) {
if (datasetToFBsize.containsKey(datasetName) && datasetToRBsize.containsKey(datasetName)) {
row.add(datasetToFBsize.get(datasetName).toString() + "," + datasetToRBsize.get(datasetName).toString());
} else {
row.add("");
}
}
writer.writeNext(row.toArray(new String[0]));
// 3) For each explainer
for (String explainerName : timingRecords.keySet()) {
// For each operation for this explainer
writer.writeNext(new String[0]);
if (operationOrder.get(explainerName) == null) {
throw new IllegalStateException("No operations for explainer " + explainerName);
}
for (String operation : operationOrder.get(explainerName)) {
// Print explainer name and operation name
Map<String, Long> operationToRecord = timingRecords.get(explainerName).get(operation);
......@@ -99,14 +300,19 @@ public class StatsUtil {
continue;
}
List<String> row = new ArrayList<>();
row = new ArrayList<>();
row.add(explainerName);
row.add(operation);
// Then print all dataset values in order
for (String datasetName : datasetOrder) {
if (operationToRecord.containsKey(datasetName)) {
row.add(operationToRecord.get(datasetName).toString());
long value = operationToRecord.get(datasetName);
if (value < 0) {
row.add("_");
} else {
row.add(operationToRecord.get(datasetName).toString());
}
} else {
row.add("");
}
......
package fr.boreal.explanation.configuration.query_selection;
import fr.boreal.explanation.configuration.DefaultChaseForExplanations;
import fr.boreal.forward_chaining.chase.Chase;
import fr.boreal.forward_chaining.chase.ChaseBuilder;
import fr.boreal.model.kb.api.FactBase;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.kb.api.RuleBase;
import fr.boreal.model.kb.impl.RuleBaseImpl;
import fr.boreal.model.logicalElements.api.Atom;
import fr.boreal.model.logicalElements.api.Constant;
import fr.boreal.model.logicalElements.api.Predicate;
import fr.boreal.model.logicalElements.impl.AtomImpl;
import fr.boreal.model.rule.api.FORule;
import fr.boreal.storage.natives.SimpleInMemoryGraphStore;
import java.util.*;
import java.util.Collections;
import java.util.stream.Collectors;
public class QuerySelector {
// public static Optional<Collection<Atom>> select(KnowledgeBase kb, int nbFacts, Boolean explainFacts) {
// Collection<FORule> rules = new ArrayList<FORule>(kb.getRuleBase().getRules());
//
// RuleBase rb = new RuleBaseImpl(rules);
// FactBase fb = new SimpleInMemoryGraphStore(kb.getFactBase().getAtoms().collect(Collectors.toList()));
//
// KnowledgeBase kbCopy = Utils.copyKnowledgeBase(kb);
//
// Collection<Atom> predicateConstantAtoms = new ArrayList<Atom>();
// Collection<Constant> predicateConstants = new ArrayList<Constant>();
//
// if (!explainFacts) {
// for (FORule ruleToConstant : kbCopy.getRuleBase().getRules()) {
// for (Predicate predicateToConstant : ruleToConstant.getBody().getPredicates()) {
// if (predicateToConstant.getArity() == 1) {
// Constant constantPred = termFactory.createOrGetConstant("CONS_"+predicateToConstant.toString());
// Atom predicateConstantAtom = new AtomImpl(predicateToConstant, constantPred);
//
// kbCopy.getFactBase().add(predicateConstantAtom);
// predicateConstantAtoms.add(predicateConstantAtom);
// predicateConstants.add(constantPred);}
// }
// }
// }
//
// Chase chase = DefaultChaseForExplanations.chase(kb);
//
// int depth = 0;
//
// List<Set<Atom>> foundAxiomEntailments = new ArrayList<>();
//
// foundAxiomEntailments.add(new HashSet<>());
//
// chase.applyGlobalPretreatments();
//
// while (chase.hasNextStep()) {
// chase.applyPretreatments();
// chase.nextStep();
// chase.applyEndOfStepTreatments();
// depth++;
// foundAxiomEntailments.add(chase
// .getLastStepResults()
// .created_facts_as_factbase()
// .getAtoms()
// .collect(Collectors.toSet()));
// }
//
// System.out.println(foundAxiomEntailments);
// if (kbCopy.getFactBase().size() - predicateConstantAtoms.size() > nbFacts) {
// List<Atom> selectedAtoms = new ArrayList<>();
// for (int i = foundAxiomEntailments.size()-1; selectedAtoms.size() < nbFacts; i--) {
// if (i == -1) {
// System.out.println("Doesn't have enough entailments");
// return Optional.empty();
// } else {
// for (Atom a : foundAxiomEntailments.get(i)) {
// selectedAtoms.add(a);
// if (selectedAtoms.size() == nbFacts) {
// return Optional.of(selectedAtoms);
// }
// }
// }
// }
// }
//
// System.out.println("Doesn't have enough entailments");
// return Optional.empty();
// }
private static final Random RANDOM = new Random();
public static List<Atom> selectQueries(KnowledgeBase kb) {
Collection<FORule> rules = new ArrayList<FORule>(kb.getRuleBase().getRules());
RuleBase rb = new RuleBaseImpl(rules);
FactBase fb = new SimpleInMemoryGraphStore(kb.getFactBase().getAtoms().collect(Collectors.toList()));
Chase chase = ChaseBuilder.defaultBuilder(fb, rb)
.useObliviousChecker()
.useNaiveComputer()
.useByPredicateRuleScheduler()
.useBreadthFirstApplier()
.build().get();
List<List<Atom>> foundAxiomEntailments = new ArrayList<>();
chase.applyGlobalPretreatments();
while (chase.hasNextStep()) {
chase.applyPretreatments();
chase.nextStep();
chase.applyEndOfStepTreatments();
List<Atom> factsProduced = chase
.getLastStepResults()
.created_facts_as_factbase()
.getAtoms()
.collect(Collectors.toList());
if (!factsProduced.isEmpty()) {
foundAxiomEntailments.add(factsProduced);
}
}
List<Atom> queries = sampleQueries(foundAxiomEntailments);
Collections.shuffle(queries, RANDOM);
return queries;
}
public static List<Atom> sampleQueries(List<List<Atom>> lists) {
if (lists == null || lists.isEmpty()) {
throw new IllegalArgumentException("The outer list must not be null or empty.");
}
// how many to pick from each
final int FIRST_COUNT = 1;
final int MID_COUNT = 1;
final int LAST_COUNT = 8;
List<Atom> result = new ArrayList<>(FIRST_COUNT + MID_COUNT + LAST_COUNT);
// 1) First list
result.addAll(sampleList(lists.get(0), FIRST_COUNT));
// 2) Middle list (e.g. index = size()/2 gives 5th list when size=9)
int midIndex = lists.size() / 2;
result.addAll(sampleList(lists.get(midIndex), MID_COUNT));
// 3) Last list
result.addAll(sampleList(lists.get(lists.size() - 1), LAST_COUNT));
return result;
}
/**
* Helper to sample exactly `count` items from `list`.
* If list.size() >= count, sampling is done without replacement;
* otherwise with replacement.
*/
private static List<Atom> sampleList(List<Atom> list, int count) {
if (list == null || list.isEmpty()) {
throw new IllegalArgumentException("Sublists must not be null or empty.");
}
List<Atom> picked = new ArrayList<>(count);
if (list.size() >= count) {
// shuffle a copy and take the first 'count' items
List<Atom> copy = new ArrayList<>(list);
Collections.shuffle(copy, RANDOM);
for (int i = 0; i < count; i++) {
picked.add(copy.get(i));
}
} else {
// not enough items: sample with replacement
for (int i = 0; i < count; i++) {
picked.add(list.get(RANDOM.nextInt(list.size())));
}
}
return picked;
}
}
......@@ -85,7 +85,11 @@ public abstract class AbstractDynamicGRIBasedExplainer_KBGRI<ExplanationType> im
abstract ExplanationProcessor_GRI getGMUSProcessor(Solver solver);
@Override
public void pipelineWithTimer(String datasetName, Collection<Atom> queries) {
public void pipelineWithTimerDetailed(String datasetName, Collection<Atom> queries) {
throw new NotImplementedException();
}
@Override
public void pipelineIn2StepsWithTimer(String datasetName, Collection<Atom> queries) {
throw new NotImplementedException();
}
// abstract RuleTransformer getRuleTransformer();
......
......@@ -100,6 +100,10 @@ public abstract class AbstractIncrementalGRIBasedExplainer_KBGRI<ExplanationType
this.solver = solver;
}
public void staticStep() {
// there is no static step (placeholder for static util)
}
public void dynamicStep(Atom query) {
......@@ -117,11 +121,14 @@ public abstract class AbstractIncrementalGRIBasedExplainer_KBGRI<ExplanationType
if (seenRules.isEmpty()) {
transformRulesForFirstStaticKB();
chaseKBforGRI();
} else if (!seenRules.containsAll(queryAncestorRules)) {
transformUnseenRules();
chaseKBforGRI();
}
chaseKBforGRI();
// REL tracing
prepareRelTracing(query);
......@@ -143,66 +150,85 @@ public abstract class AbstractIncrementalGRIBasedExplainer_KBGRI<ExplanationType
}
public void pipelineWithTimer(String datasetName, Collection<Atom> queries) {
public void pipelineWithTimerDetailed(String datasetName, Collection<Atom> queries) {
String explainerName = this.getClass().getSimpleName() + " + " + this.solver.getClass().getSimpleName();
StatsUtil.timer(this::recordInitialFactbase, explainerName, "recordInitialFactbase", datasetName);
StatsUtil.timerSumAllQueries(this::recordInitialFactbase, explainerName, "recordInitialFactbase", datasetName);
for (Atom query : queries) {
StatsUtil.timer(() -> checkQueryInInitialFB(query), explainerName,
StatsUtil.timerSumAllQueries(() -> checkQueryInInitialFB(query), explainerName,
"checkQueryInInitialFB", datasetName);
if (!queryInInitialFB) {
if (queryAncestorRules != null) {
StatsUtil.timer(this::clearAncestorRules, explainerName,
StatsUtil.timerSumAllQueries(this::clearAncestorRules, explainerName,
"clearAncestorRules", datasetName);
}
StatsUtil.timer(() -> computeAncestorRules(query), explainerName,
StatsUtil.timerPerEachQuery(() -> computeAncestorRules(query), explainerName,
"computeAncestorRules", datasetName);
if (seenRules.isEmpty()) {
StatsUtil.timer(() -> transformRulesForFirstStaticKB(), explainerName,
StatsUtil.timerPerEachQuery(() -> transformRulesForFirstStaticKB(), explainerName,
"Transforming the rules for building GRI for first query", datasetName);
StatsUtil.timerChasePerEachQuery(this::chaseKBforGRI, staticKB, explainerName,
"chaseKBforGRI", datasetName);
StatsUtil.timerPerEachQuery(null, explainerName,
"Transforming the rules previously unseen", datasetName);
} else if (!seenRules.containsAll(queryAncestorRules)) {
StatsUtil.timer(() -> transformUnseenRules(), explainerName,
StatsUtil.timerPerEachQuery(this::transformUnseenRules, explainerName,
"Transforming the rules previously unseen", datasetName);
StatsUtil.timerChasePerEachQuery(this::chaseKBforGRI, staticKB, explainerName,
"chaseKBforGRI", datasetName);
} else {
StatsUtil.timerPerEachQuery(null, explainerName,
"Transforming the rules previously unseen", datasetName);
StatsUtil.timerChasePerEachQuery(null, staticKB, explainerName,
"chaseKBforGRI", datasetName);
}
StatsUtil.timer(this::chaseKBforGRI, explainerName,
"chaseKBforGRI", datasetName);
// filtering
StatsUtil.timer(() -> prepareRelTracing(query), explainerName,
StatsUtil.timerSumAllQueries(() -> prepareRelTracing(query), explainerName,
"prepareRelTracing", datasetName);
StatsUtil.timer(this::chaseForRELPropagation, explainerName,
StatsUtil.timerChasePerEachQuery(this::chaseForRELPropagation, dynamicKB, explainerName,
"chaseForRELPropagation", datasetName);
StatsUtil.timer(() -> encodeClauses(query), explainerName,
StatsUtil.timerSumAllQueries(() -> encodeClauses(query), explainerName,
"encodeClauses", datasetName);
if (solver instanceof MARCOGMUSSolver) {
StatsUtil.timer(this::writeMarcoGCNF, explainerName,
StatsUtil.timerSumAllQueries(this::writeMarcoGCNF, explainerName,
"writeMarcoGCNF", datasetName);
StatsUtil.timer(this::solveGMUSviaMarco, explainerName,
StatsUtil.timerMarcoAllQueries(this::solveGMUSviaMarco, explainerName,
"solveGMUSviaMarco", datasetName);
StatsUtil.timer(this::decodeGMUSesFromMarco, explainerName,
StatsUtil.timerSumAllQueries(this::decodeGMUSesFromMarco, explainerName,
"decodeGMUSesFromMarco", datasetName);
} else if (solver instanceof Sat4JSolver) {
StatsUtil.timer(this::solveGMUSviaSat4j, explainerName,
StatsUtil.timerSumAllQueries(this::solveGMUSviaSat4j, explainerName,
"solveGMUSviaSat4j", datasetName);
StatsUtil.timer(this::decodeGMUSesFromSat4j, explainerName,
StatsUtil.timerSumAllQueries(this::decodeGMUSesFromSat4j, explainerName,
"decodeGMUSesFromSat4j", datasetName
);
}
} else {
StatsUtil.timer(() -> returnExplanationForQueryInFB(query), explainerName,
StatsUtil.timerSumAllQueries(() -> returnExplanationForQueryInFB(query), explainerName,
"returnExplanationForQueryInFB", datasetName);
}
}
}
public void pipelineIn2StepsWithTimer (String datasetName, Collection<Atom> queries) {
String explainerName = this.getClass().getSimpleName() + " + " + this.solver.getClass().getSimpleName();
StatsUtil.timerPerEachQuery(this::staticStep, explainerName, "Static Step", datasetName);
for (Atom query : queries) {
StatsUtil.timerPerEachQuery(() -> dynamicStep(query), explainerName, "Dynamic Step", datasetName);
}
}
/**
* This methods takes in a query and
* (1) compute queryAncestorRules of the query
......@@ -370,5 +396,8 @@ public abstract class AbstractIncrementalGRIBasedExplainer_KBGRI<ExplanationType
explanations = getGMUSProcessor(solver).translateSAT4JGMUS(sat4jGmuses, encodingResult.factIDMap(), encodingResult.ruleIDMap());
}
public void placeHolder() {
}
abstract ExplanationProcessor_GRI getGMUSProcessor(Solver solver);
}
......@@ -144,44 +144,55 @@ public abstract class AbstractStaticGRIBasedExplainer_KBGRI<ExplanationType> imp
* Pipelines
*/
public void pipelineWithTimer(String datasetName, Collection<Atom> queries) {
public void pipelineWithTimerDetailed(String datasetName, Collection<Atom> queries) {
String explainerName = this.getClass().getSimpleName() + " + " + this.solver.getClass().getSimpleName();
StatsUtil.timer(this::transformRules, explainerName, "Transforming rules", datasetName);
StatsUtil.timer(this::prepareKBforGRI, explainerName, "Preparing KB for GRI", datasetName);
StatsUtil.timer(this::chaseKBforGRI, explainerName, "Chasing on staticKB for GRI", datasetName);
StatsUtil.timer(this::recordInitialFactbase, explainerName, "Recording initial factbase", datasetName);
StatsUtil.timerSumAllQueries(this::transformRules, explainerName, "Transforming rules", datasetName);
StatsUtil.timerSumAllQueries(this::prepareKBforGRI, explainerName, "Preparing KB for GRI", datasetName);
StatsUtil.timerPerEachQuery(this::chaseKBforGRI, explainerName, "Chasing on staticKB for GRI", datasetName);
StatsUtil.timerSumAllQueries(this::recordInitialFactbase, explainerName, "Recording initial factbase", datasetName);
for (Atom query : queries) {
if (this.explanations != null) {
StatsUtil.timer(this::dynamicStepClear, explainerName,
StatsUtil.timerSumAllQueries(this::dynamicStepClear, explainerName,
"Clearing gmuses and explanations for the previous query",
datasetName);
}
StatsUtil.timer(() -> checkQueryEntailment(query), explainerName, "Check if query is entailed", datasetName);
StatsUtil.timer(() -> checkQueryInInitialFB(query), explainerName, "Check if query is already in input FB", datasetName);
StatsUtil.timerSumAllQueries(() -> checkQueryEntailment(query), explainerName, "Check if query is entailed", datasetName);
StatsUtil.timerSumAllQueries(() -> checkQueryInInitialFB(query), explainerName, "Check if query is already in input FB", datasetName);
if (queryEntailed && !queryInInitialFB) {
StatsUtil.timer(() -> prepareRelTracing(query), explainerName, "Preparing Rel Tracing", datasetName);
StatsUtil.timer(this::chaseForRELPropagation, explainerName, "Chase For Rel Tracing", datasetName);
StatsUtil.timer(() -> encodeClauses(query), explainerName, "Encoding clauses", datasetName);
StatsUtil.timerSumAllQueries(() -> prepareRelTracing(query), explainerName, "Preparing Rel Tracing", datasetName);
StatsUtil.timerPerEachQuery(this::chaseForRELPropagation, explainerName, "Chase For Rel Tracing", datasetName);
StatsUtil.timerSumAllQueries(() -> encodeClauses(query), explainerName, "Encoding clauses", datasetName);
if (solver instanceof MARCOGMUSSolver) {
StatsUtil.timer(this::writeMarcoGCNF, explainerName, "Writing GCNF", datasetName);
StatsUtil.timer(this::solveGMUSviaMarco, explainerName, "Computing GMUSes", datasetName);
StatsUtil.timer(this::decodeGMUSesFromMarco, explainerName, "Decoding GMUSes", datasetName);
StatsUtil.timerSumAllQueries(this::writeMarcoGCNF, explainerName, "Writing GCNF", datasetName);
StatsUtil.timerMarcoAllQueries(this::solveGMUSviaMarco, explainerName, "Computing GMUSes", datasetName);
StatsUtil.timerSumAllQueries(this::decodeGMUSesFromMarco, explainerName, "Decoding GMUSes", datasetName);
} else if (solver instanceof Sat4JSolver) {
StatsUtil.timer(this::solveGMUSviaSat4j, explainerName, "Computing GMUSes with sat4j", datasetName);
StatsUtil.timer(this::decodeGMUSesFromSat4j, explainerName, "Decoding GMUSes from sat4j", datasetName);
StatsUtil.timerSumAllQueries(this::solveGMUSviaSat4j, explainerName, "Computing GMUSes with sat4j", datasetName);
StatsUtil.timerSumAllQueries(this::decodeGMUSesFromSat4j, explainerName, "Decoding GMUSes from sat4j", datasetName);
}
} else if (queryInInitialFB) {
StatsUtil.timer(() -> returnExplanationForQueryInFB(query), explainerName, "Returning the explanation for query already in FB", datasetName);
StatsUtil.timerSumAllQueries(() -> returnExplanationForQueryInFB(query), explainerName, "Returning the explanation for query already in FB", datasetName);
} else {
returnEmptySetForExplanation();
}
}
}
public void pipelineIn2StepsWithTimer (String datasetName, Collection<Atom> queries) {
String explainerName = this.getClass().getSimpleName() + " + " + this.solver.getClass().getSimpleName();
StatsUtil.timerPerEachQuery(this::staticStep, explainerName, "Static Step", datasetName);
for (Atom query : queries) {
StatsUtil.timerPerEachQuery(() -> dynamicStep(query), explainerName, "Dynamic Step", datasetName);
}
}
/**
* Saturates the GRI with Tracing rules to compute relevant bodyAtoms and triggers for the query.
* Then calls the solver.
......
package fr.boreal.explanation.kb_gri.explainers.static_gri_baseline;
import com.google.common.collect.BiMap;
import fr.boreal.explanation.api.encoders.GSATEncodingResult_GRI;
import fr.boreal.explanation.api.explainers.AtomicQueryExplainer;
import fr.boreal.explanation.api.processors.ExplanationProcessor_GRI;
import fr.boreal.explanation.api.solver.Solver;
import fr.boreal.explanation.configuration.DefaultChaseForExplanations;
import fr.boreal.explanation.configuration.StatsUtil;
import fr.boreal.explanation.kb_gri.rule_transformation.GRIRuleTransformer;
import fr.boreal.explanation.kb_gri.rule_transformation.static_gri.RulesetForStaticGRIBuildingAndTracing;
import fr.boreal.explanation.solving_enumerating.marco.MARCOGMUSSolver;
import fr.boreal.explanation.solving_enumerating.sat4j.Sat4JSolver;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.kb.impl.KnowledgeBaseImpl;
import fr.boreal.model.logicalElements.api.Atom;
import fr.boreal.model.logicalElements.api.Predicate;
import fr.boreal.model.logicalElements.impl.AtomImpl;
import fr.boreal.model.rule.api.FORule;
import fr.boreal.storage.natives.SimpleInMemoryGraphStore;
import fr.boreal.views.FederatedFactBase;
import java.io.File;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import static fr.boreal.explanation.configuration.PathFinder.ensureFilePath;
public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType> implements AtomicQueryExplainer<ExplanationType> {
/**
* Attributes to be defined in the constructor
*/
private final Solver solver;
protected KnowledgeBase inputKB;
private FederatedFactBase federatedFactBase;
/**
* RuleTransforming
*/
GRIRuleTransformer ruleTransformer = GRIRuleTransformer.instance();
private RulesetForStaticGRIBuildingAndTracing transformedRB;
/**
* Static and dynamic knowledge bases
*/
protected KnowledgeBase staticKB;
protected KnowledgeBase dynamicKB;
/**
* Intermediate objects from the pipeline
*/
// check if the static step is done
boolean staticStepDone = false;
// preprocessing
java.util.function.Predicate<Atom> belongsToInitialFactbase;
boolean queryEntailed = false;
boolean queryInInitialFB = false;
// encoding
GSATEncodingResult_GRI encodingResult;
// s4j
Sat4JSolver sat4JSolver = new Sat4JSolver();
List<List<List<Integer>>> sat4jGmuses;
// marco
MARCOGMUSSolver marcoSolver = new MARCOGMUSSolver();
File gcnffile = new File(ensureFilePath("integraal-explanation/gsat.gcnf", "gsat.gcnf"));
// decoding
protected Set explanations;
protected Set<String> gmuses;
/**
* Constructor
*/
public AbstractStaticGRIBasedExplainer_Baseline(KnowledgeBase kb, Solver solver) {
ruleTransformer.clear();
inputKB = kb;
federatedFactBase = new FederatedFactBase(new SimpleInMemoryGraphStore());
this.solver = solver;
}
/**
* Static step
*/
public void staticStep() {
transformRules();
prepareKBforGRI();
chaseKBforGRI();
recordInitialFactbase();
prepareEncoding();
}
/**
* Dynamic step
*
* @param query
*
*/
public void dynamicStep(Atom query) {
if (this.explanations != null) {
dynamicStepClear();
}
checkQueryEntailment(query);
checkQueryInInitialFB(query);
if (queryEntailed & !queryInInitialFB) {
encodeClauses(query);
if (solver instanceof MARCOGMUSSolver) {
writeMarcoGCNF();
solveGMUSviaMarco();
decodeGMUSesFromMarco();
} else if (solver instanceof Sat4JSolver) {
solveGMUSviaSat4j();
decodeGMUSesFromSat4j();
}
} else if (queryInInitialFB) {
returnExplanationForQueryInFB(query);
} else {
returnEmptySetForExplanation();
}
}
private void dynamicStepClear() {
explanations.clear();
if (gmuses != null) {
gmuses.clear();
}
if (sat4jGmuses != null) {
sat4jGmuses.clear();
}
}
/*
* Pipelines
*/
public void pipelineWithTimerDetailed(String datasetName, Collection<Atom> queries) {
String explainerName = this.getClass().getSimpleName() + " + " + this.solver.getClass().getSimpleName();
StatsUtil.timerSumAllQueries(this::transformRules, explainerName, "Transforming rules", datasetName);
StatsUtil.timerSumAllQueries(this::prepareKBforGRI, explainerName, "Preparing KB for GRI", datasetName);
StatsUtil.timerSumAllQueries(this::chaseKBforGRI, explainerName, "Chasing on staticKB for GRI", datasetName);
StatsUtil.timerSumAllQueries(this::recordInitialFactbase, explainerName, "Recording initial factbase", datasetName);
StatsUtil.timerSumAllQueries(this::prepareEncoding, explainerName, "Preparing Encoding", datasetName);
for (Atom query : queries) {
if (this.explanations != null) {
StatsUtil.timerSumAllQueries(this::dynamicStepClear, explainerName,
"Clearing gmuses and explanations for the previous query",
datasetName);
}
StatsUtil.timerSumAllQueries(() -> checkQueryEntailment(query), explainerName, "Check if query is entailed", datasetName);
StatsUtil.timerSumAllQueries(() -> checkQueryInInitialFB(query), explainerName, "Check if query is already in input FB", datasetName);
if (queryEntailed && !queryInInitialFB) {
StatsUtil.timerSumAllQueries(() -> encodeClauses(query), explainerName, "Encoding clauses", datasetName);
if (solver instanceof MARCOGMUSSolver) {
StatsUtil.timerSumAllQueries(this::writeMarcoGCNF, explainerName, "Writing GCNF", datasetName);
StatsUtil.timerMarcoAllQueries(this::solveGMUSviaMarco, explainerName, "Computing GMUSes", datasetName);
StatsUtil.timerSumAllQueries(this::decodeGMUSesFromMarco, explainerName, "Decoding GMUSes", datasetName);
} else if (solver instanceof Sat4JSolver) {
StatsUtil.timerSumAllQueries(this::solveGMUSviaSat4j, explainerName, "Computing GMUSes with sat4j", datasetName);
StatsUtil.timerSumAllQueries(this::decodeGMUSesFromSat4j, explainerName, "Decoding GMUSes from sat4j", datasetName);
}
} else if (queryInInitialFB) {
StatsUtil.timerSumAllQueries(() -> returnExplanationForQueryInFB(query), explainerName, "Returning the explanation for query already in FB", datasetName);
} else {
returnEmptySetForExplanation();
}
}
}
public void pipelineIn2StepsWithTimer (String datasetName, Collection<Atom> queries) {
String explainerName = this.getClass().getSimpleName() + " + " + this.solver.getClass().getSimpleName();
StatsUtil.timerPerEachQuery(this::staticStep, explainerName, "Static Step", datasetName);
for (Atom query : queries) {
StatsUtil.timerPerEachQuery(() -> dynamicStep(query), explainerName, "Dynamic Step", datasetName);
}
}
/**
* Saturates the GRI with Tracing rules to compute relevant bodyAtoms and triggers for the query.
* Then calls the solver.
*
* @param query
* @return
*/
public Set<ExplanationType> getAllExplanations(Atom query) {
if (!staticStepDone) {
staticStep();
staticStepDone = true;
}
dynamicStep(query);
return explanations;
}
public void transformRules() {
this.transformedRB = ruleTransformer.createStaticGRIBuildingRB(inputKB);
}
public void prepareKBforGRI() {
this.staticKB = new KnowledgeBaseImpl(inputKB.getFactBase(), transformedRB.getStaticRuleBase());
}
public void chaseKBforGRI() {
DefaultChaseForExplanations.chase(this.staticKB);
}
public void recordInitialFactbase() {
belongsToInitialFactbase = atom -> staticKB.getFactBase().contains(atom);
}
public void checkQueryEntailment(Atom query) {
Atom queryWithFuncId = GRIRuleTransformer.instance().createAtomWithStoredFnTermIdentifier(query);
if (staticKB.getFactBase().contains(queryWithFuncId)) {
queryEntailed = true;
}
}
public void returnEmptySetForExplanation() {
explanations = Set.of();
}
public void checkQueryInInitialFB(Atom query) {
if (belongsToInitialFactbase.test(query)) {
queryInInitialFB = true;
}
}
public void returnExplanationForQueryInFB(Atom query) {
explanations = getGMUSProcessor(solver).getQueryIsInTheIntitialFactbaseExplanation(query);
}
/**
* Prepares a dynamic KB that can be used for tracing relevant atoms for the input query.
* The dynamic KB uses a federated factbase ; inferences will be put on the local storage, which can be eas
*/
public void prepareEncoding() {
SimpleInMemoryGraphStore encodingFB = new SimpleInMemoryGraphStore();
BiMap<FORule, Predicate> ruleToEdgePredicateMap = ruleTransformer.getRuleToEdgePredicateMap();
BiMap<FORule, Predicate> ruleToRelEdgePredicateMap = ruleTransformer.getRuleToRelEdgePredicateMap();
for (Iterator<Predicate> itp = staticKB.getFactBase().getPredicates(); itp.hasNext(); ) {
Predicate p = itp.next();
if (p.label().endsWith("+")) {
for (Iterator<Atom> ita = staticKB.getFactBase().getAtomsByPredicate(p); ita.hasNext(); ) {
Atom a = ita.next();
encodingFB.add(new AtomImpl(GRIRuleTransformer.REL, a.getTerm(a.getPredicate().arity() - 1)));
}
}
}
for (FORule r : inputKB.getRuleBase().getRules()) {
Predicate p = ruleToEdgePredicateMap.get(r);
for (Iterator<Atom> ita = staticKB.getFactBase().getAtomsByPredicate(p); ita.hasNext(); ) {
Atom a = ita.next();
encodingFB.add(new AtomImpl(ruleToRelEdgePredicateMap.get(r), a.getTerms()));
}
}
dynamicKB = new KnowledgeBaseImpl(encodingFB, transformedRB.getDynamicRuleBase());
}
public void encodeClauses(Atom query) {
encodingResult = getGMUSProcessor(solver).getEncoder().encode(dynamicKB, query, belongsToInitialFactbase);
}
public void writeMarcoGCNF() {
marcoSolver.writeGCNF(encodingResult.propVarIDMap(), encodingResult.clauses(), encodingResult.nbGroup());
}
public void solveGMUSviaMarco() {
gmuses = marcoSolver.getGMUSes(gcnffile);
}
public void decodeGMUSesFromMarco() {
explanations = getGMUSProcessor(solver).translateMARCOGMUS(gmuses, encodingResult.factIDMap(), encodingResult.ruleIDMap());
}
public void solveGMUSviaSat4j() {
sat4jGmuses = sat4JSolver.enumerateGMUSes(encodingResult.clauses());
}
public void decodeGMUSesFromSat4j() {
explanations = getGMUSProcessor(solver).translateSAT4JGMUS(sat4jGmuses, encodingResult.factIDMap(), encodingResult.ruleIDMap());
}
abstract ExplanationProcessor_GRI getGMUSProcessor(Solver solver);
}
package fr.boreal.explanation.kb_gri.explainers.static_gri_baseline;
import fr.boreal.explanation.api.processors.ExplanationProcessor_GRI;
import fr.boreal.explanation.api.solver.Solver;
import fr.boreal.explanation.kb_gri.processors.FactSupportGMUSProcessor_KBGRI;
import fr.boreal.explanation.solving_enumerating.marco.MARCOGMUSSolver;
import fr.boreal.model.kb.api.FactBase;
import fr.boreal.model.kb.api.KnowledgeBase;
/**
* Computes fact-support explanations for a knowledge base and a ground atomic query
*/
public class FactSupportExplainer_Baseline extends AbstractStaticGRIBasedExplainer_Baseline<FactBase> {
public FactSupportExplainer_Baseline(KnowledgeBase kb) {
super(kb, new MARCOGMUSSolver());
}
public FactSupportExplainer_Baseline(KnowledgeBase kb, Solver solver) {
super(kb, solver);
}
@Override
ExplanationProcessor_GRI getGMUSProcessor(Solver solver) {
return new FactSupportGMUSProcessor_KBGRI(solver);
}
}
package fr.boreal.explanation.kb_gri.explainers.static_gri_baseline;
import fr.boreal.explanation.api.processors.ExplanationProcessor_GRI;
import fr.boreal.explanation.api.solver.Solver;
import fr.boreal.explanation.kb_gri.processors.KBSupportGMUSProcessor_KBGRI;
import fr.boreal.explanation.solving_enumerating.marco.MARCOGMUSSolver;
import fr.boreal.model.kb.api.KnowledgeBase;
/**
* Computes kb-support explanations for a knowledge base and a ground atomic query
*/
public class KBSupportExplainer_Baseline extends AbstractStaticGRIBasedExplainer_Baseline<KnowledgeBase> {
public KBSupportExplainer_Baseline(KnowledgeBase kb) {
super(kb, new MARCOGMUSSolver());
}
public KBSupportExplainer_Baseline(KnowledgeBase kb, Solver solver) {
super(kb,solver);
}
@Override
ExplanationProcessor_GRI getGMUSProcessor(Solver solver) {
return new KBSupportGMUSProcessor_KBGRI(solver);
}
}
package fr.boreal.explanation.kb_gri.explainers.static_gri_baseline;
import fr.boreal.explanation.api.processors.ExplanationProcessor_GRI;
import fr.boreal.explanation.api.solver.Solver;
import fr.boreal.explanation.kb_gri.processors.RuleSupportGMUSProcessor_KBGRI;
import fr.boreal.explanation.solving_enumerating.marco.MARCOGMUSSolver;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.kb.api.RuleBase;
/**
* Computes rule-support explanations for a knowledge base and a ground atomic query
*/
public class RuleSupportExplainer_Baseline extends AbstractStaticGRIBasedExplainer_Baseline<RuleBase> {
public RuleSupportExplainer_Baseline(KnowledgeBase kb) {
super(kb, new MARCOGMUSSolver());
}
public RuleSupportExplainer_Baseline(KnowledgeBase kb, Solver solver) {
super(kb, solver);
}
@Override
ExplanationProcessor_GRI getGMUSProcessor(Solver solver) {
return new RuleSupportGMUSProcessor_KBGRI(solver);
}
}
......@@ -17,7 +17,13 @@ public class FactSupportProvenanceExplainer_KBGRI implements AtomicQueryExplaine
}
@Override
public void pipelineWithTimer(String datasetName, Collection<Atom> queries) {
public void pipelineWithTimerDetailed(String datasetName, Collection<Atom> queries) {
throw new NotImplementedException();
}
@Override
public void pipelineIn2StepsWithTimer(String datasetName, Collection<Atom> queries) {
throw new NotImplementedException();
}
}
......@@ -3,14 +3,17 @@ package fr.boreal.explanation.pipeline_with_timer;
import fr.boreal.explanation.InternalDLGPParser;
import fr.boreal.explanation.api.explainers.AtomicQueryExplainer;
import fr.boreal.explanation.configuration.StatsUtil;
import fr.boreal.explanation.configuration.query_selection.QuerySelector;
import fr.boreal.explanation.kb_gri.explainers.incremental_gri.KBSupportExplainer_IncrementalKBGRI;
import fr.boreal.explanation.kb_gri.explainers.static_gri.FactSupportExplainer_KBGRI;
import fr.boreal.explanation.kb_gri.explainers.static_gri.KBSupportExplainer_KBGRI;
import fr.boreal.explanation.kb_gri.explainers.static_gri.RuleSupportExplainer_KBGRI;
import fr.boreal.explanation.kb_gri.explainers.static_gri_baseline.KBSupportExplainer_Baseline;
import fr.boreal.explanation.solving_enumerating.sat4j.Sat4JSolver;
import fr.boreal.explanation.tracker_gri.explainers.FactSupportExplainer_TrackerGRI;
import fr.boreal.explanation.tracker_gri.explainers.KBSupportExplainer_TrackerGRI;
import fr.boreal.explanation.tracker_gri.explainers.RuleSupportExplainer_TrackerGRI;
import fr.boreal.explanation.tracker_gri.explainers.UnambiguousProofTreeWhyProvenanceExplainer_TrackerGRI;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.logicalElements.api.Atom;
......@@ -23,47 +26,35 @@ public class compareExplainers {
/*
input
*/
static String benchPath = new File("").getAbsolutePath() + "/integraal/integraal-explanation/src/test/resources/AAAI_datasets/";
static String benchPath = "integraal-explanation/src/test/resources/ruleml24/";
// static String benchPath = "integraal-explanation/src/test/resources/mowl-bench-main/";
public static List<String> datasets = List.of(
// "algorithmTypes",
// "DataEncoding",
// "HARMONISE",
// "NEMO_annotation_properties",
// "Ontology1260916270538",
// "platformOntology",
// "snoopy",
// "transitive-ancestor",
// "catalogue",
// "ddex",
// "ncbi_rank",
// "oil",
// "Ontology-Kassel-Material",
// "PlatOntology",
// "socialsimulationdescription",
// "WNAffect"
// "andersen_1000",
"andersen_10000"//,
// "andersen_100000", // dropped as it is too large for now
// "andersen_5000",
// "andersen_50000",
// "csda_httpd", // dropped as it is too large for now
// "csda_linux", // dropped as it is too large for now
// "csda_postgresql", // dropped as it is too large for now
/* "doctors_q01_100k",
"doctors_q02_100k",
"doctors_q03_100k",
"doctors_q04_100k",
"doctors_q05_100k",
"doctors_q06_100k",
"doctors_q07_100k",
"galen_15",
"galen_25",
"galen_40",
"galen_50",
"graph_analytics_bitcoin",
"graph_analytics_facebook"*/
"warmup",
// "218a127c-8d13-40af-9957-63c28b0a091e_elontology",
// "3cb9dec1-627b-496f-b49a-543dd1c77837_snoopy",
// "b80a039c-8975-4928-b45f-017126159882_sification",
// "98523b14-b04e-4af0-9597-15a297e226eb_taEncoding",
// "d09d7487-43dd-452a-976a-b69bc44d9b06_onOntology",
// "08cb4085-d900-4f82-8fa8-f523e382008d_ESG_test_1",
// "04c0af47-ff23-4761-8a4e-8fec2389f89f_ESG_test_2",
// "0b1f5620-f71d-4a01-8a16-d1818f69c187_ncbi_rank",
// "7b46d780-2032-46e3-ac89-90ab183e22aa_properties",
// "bb6535f4-d6a3-4197-a678-7f464e737862_properties",
// "4c3c8c62-d8f4-42c0-a96c-c994ece58345_catalogue",
// "734871d0-bf0d-47b3-bc19-b484567f5f4f_rmOntology",
// "93c3774c-58c0-4fa9-a592-539c130ee2d7_AlgorithmTypes",
// "f8a8e732-205b-4703-b568-2ce69e524055_instances",
// "f32019cc-43f3-49ce-b4f2-235f7db97132_oil",
// "8ee7a7f5-ad8c-4ac5-afbe-207e3ff285c8_HARMONISE",
// "ddd6d023-65dd-429a-a01f-2784c1d61f65_WNAffect",
"2d09369e-392a-4ef2-a7c9-28334cbdc6f8_ddex",
"dce07111-010a-450b-8c3a-2d55a8b48a21_LSCOM",
"3b327e7f-0409-4f52-8e69-7062a62f7ba1_undin_area",
"c669669d-7a04-410c-9474-1f6e46230556_BioLinks",
"1dd81ef3-1e3e-4b2f-9bd0-230123497968_atOntology",
"80bf8685-0e15-4780-8e61-7130e4e777c6_00Ontology",
"ab726941-765f-4e5e-b6c9-2d51b652e841_00Ontology"
);
......@@ -71,21 +62,20 @@ public class compareExplainers {
static KnowledgeBase inputKB;
enum AtomicQueryExplainerType {
TRACKER_GRI_KB_MARCO,
// STATIC_GRI_FB_MARCO,
// STATIC_GRI_RB_MARCO,
STATIC_GRI_KB_MARCO,
STATIC_GRI_FB_MARCO,
STATIC_GRI_RB_MARCO,
STATIC_GRI_KB_S4J,
STATIC_GRI_FB_S4J,
STATIC_GRI_RB_S4J,
TRACKER_GRI_FB_MARCO,
TRACKER_GRI_RB_MARCO,
TRACKER_GRI_FB_S4J,
TRACKER_GRI_KB_S4J,
TRACKER_GRI_RB_S4J,
INCREMENTAL_GRI_KB_MARCO
// STATIC_GRI_KB_S4J,
// STATIC_GRI_FB_S4J,
// STATIC_GRI_RB_S4J,
// TRACKER_GRI_FB_MARCO,
// TRACKER_GRI_KB_MARCO,
// TRACKER_GRI_RB_MARCO,
// TRACKER_GRI_FB_S4J,
// TRACKER_GRI_KB_S4J,
// TRACKER_GRI_RB_S4J,
INCREMENTAL_GRI_KB_MARCO,
// BASELINE_GRI_KB_MARCO
// TRACKER_GRI_UNAMBIGUOUS_PROOF_TREE_WHY_PROVENANCE
}
......@@ -93,19 +83,20 @@ public class compareExplainers {
AtomicQueryExplainerType type,
KnowledgeBase kb) {
return switch (type) {
case STATIC_GRI_FB_MARCO -> new FactSupportExplainer_KBGRI(kb);
case STATIC_GRI_RB_MARCO -> new RuleSupportExplainer_KBGRI(kb);
// case STATIC_GRI_FB_MARCO -> new FactSupportExplainer_KBGRI(kb);
// case STATIC_GRI_RB_MARCO -> new RuleSupportExplainer_KBGRI(kb);
case STATIC_GRI_KB_MARCO -> new KBSupportExplainer_KBGRI(kb);
case STATIC_GRI_KB_S4J -> new KBSupportExplainer_KBGRI(kb, new Sat4JSolver());
case STATIC_GRI_FB_S4J -> new FactSupportExplainer_KBGRI(kb, new Sat4JSolver());
case STATIC_GRI_RB_S4J -> new RuleSupportExplainer_KBGRI(kb, new Sat4JSolver());
case TRACKER_GRI_FB_MARCO -> new FactSupportExplainer_TrackerGRI(kb);
case TRACKER_GRI_KB_MARCO -> new KBSupportExplainer_TrackerGRI(kb);
case TRACKER_GRI_RB_MARCO -> new RuleSupportExplainer_TrackerGRI(kb);
case TRACKER_GRI_FB_S4J -> new FactSupportExplainer_TrackerGRI(kb, new Sat4JSolver());
case TRACKER_GRI_KB_S4J -> new KBSupportExplainer_TrackerGRI(kb, new Sat4JSolver());
case TRACKER_GRI_RB_S4J -> new RuleSupportExplainer_TrackerGRI(kb, new Sat4JSolver());
// case STATIC_GRI_KB_S4J -> new KBSupportExplainer_KBGRI(kb, new Sat4JSolver());
// case STATIC_GRI_FB_S4J -> new FactSupportExplainer_KBGRI(kb, new Sat4JSolver());
// case STATIC_GRI_RB_S4J -> new RuleSupportExplainer_KBGRI(kb, new Sat4JSolver());
// case TRACKER_GRI_FB_MARCO -> new FactSupportExplainer_TrackerGRI(kb);
// case TRACKER_GRI_KB_MARCO -> new KBSupportExplainer_TrackerGRI(kb);
// case TRACKER_GRI_RB_MARCO -> new RuleSupportExplainer_TrackerGRI(kb);
// case TRACKER_GRI_FB_S4J -> new FactSupportExplainer_TrackerGRI(kb, new Sat4JSolver());
// case TRACKER_GRI_KB_S4J -> new KBSupportExplainer_TrackerGRI(kb, new Sat4JSolver());
// case TRACKER_GRI_RB_S4J -> new RuleSupportExplainer_TrackerGRI(kb, new Sat4JSolver());
case INCREMENTAL_GRI_KB_MARCO -> new KBSupportExplainer_IncrementalKBGRI(kb);
// case BASELINE_GRI_KB_MARCO -> new KBSupportExplainer_Baseline(kb);
// case TRACKER_GRI_UNAMBIGUOUS_PROOF_TREE_WHY_PROVENANCE -> new UnambiguousProofTreeWhyProvenanceExplainer_TrackerGRI(kb);
};
}
......@@ -114,26 +105,35 @@ public class compareExplainers {
StatsUtil.clearStats();
for (AtomicQueryExplainerType type : AtomicQueryExplainerType.values()) {
for (String datasetName : datasets) {
inputKB = loadKB(datasetName);
Collection<Atom> queries = loadQuery(datasetName);
AtomicQueryExplainer<?> explainer = getExplainer(type, inputKB);
// Collection<Atom> queries = loadQuery(datasetName);
Collection<Atom> queries = QuerySelector.selectQueries(inputKB);
System.out.println("Loading KB completed");
explainer.pipelineWithTimer(datasetName, queries);
StatsUtil.recordFBSize(datasetName, inputKB.getFactBase().size());
StatsUtil.recordRBSize(datasetName, inputKB.getRuleBase().getRules().size());
explainer.pipelineWithTimerDetailed(datasetName, queries);
// explainer.pipelineIn2StepsWithTimer(datasetName, queries);
}
}
StatsUtil.exportPivotStatsToCSV("timings.csv");
StatsUtil.exportPivotStatsToCSV("ruleml24_timings.csv");
}
static KnowledgeBase loadKB(String ontology_name) throws FileNotFoundException {
return InternalDLGPParser.ParserForDLGP.parseKB(
new File(benchPath +ontology_name+"/"+ontology_name+".dlgp"));
new File(benchPath +ontology_name+"/"+ontology_name+"_kb.dlgp"));
}
static Collection<Atom> loadQuery(String ontology_name) throws FileNotFoundException {
return InternalDLGPParser.ParserForDLGP.parseQueries(
new File(benchPath +ontology_name+"/"+ontology_name+"_query_5.dlgp"));
new File(benchPath +ontology_name+"/"+ontology_name+"_queries.dlgp"));
}
}
......@@ -106,9 +106,9 @@ public class MARCOGMUSSolver implements Solver {
ProcessBuilder build = new ProcessBuilder().redirectErrorStream(true);
String command;
if (timeout == 0)
build.command(ensureFilePath("integraal-explanation/MARCO-MUS/marco.py", "MARCO-MUS/marco.py"), "-v", "-b", "MUSes", gcnfFile.getAbsolutePath(), " -a");
build.command(ensureFilePath("integraal-explanation/MARCO-MUS/marco.py", "MARCO-MUS/marco.py"), "-s", "-v", "-b", "MUSes", gcnfFile.getAbsolutePath());
else
build.command(ensureFilePath("integraal-explanation/MARCO-MUS/marco.py", "MARCO-MUS/marco.py"), "-v", "-b", "MUSes", "-T", Long.toString(timeout), gcnfFile.getAbsolutePath(), " -a");
build.command(ensureFilePath("integraal-explanation/MARCO-MUS/marco.py", "MARCO-MUS/marco.py"), "-a", "-v", "-b", "MUSes", "-T", Long.toString(timeout), gcnfFile.getAbsolutePath());
Process proc;
try {
proc = build.start();
......
......@@ -119,37 +119,46 @@ public abstract class AbstractStaticGRIBasedExplainer_TrackerGRI<ExplanationType
}
}
@Override
public void pipelineIn2StepsWithTimer (String datasetName, Collection<Atom> queries) {
String explainerName = this.getClass().getSimpleName() + " + " + this.solver.getClass().getSimpleName();
StatsUtil.timerPerEachQuery(this::staticStep, explainerName, "Static Step", datasetName);
for (Atom query : queries) {
StatsUtil.timerPerEachQuery(() -> dynamicStep(query), explainerName, "Dynamic Step", datasetName);
}
}
public void pipelineWithTimer(String datasetName, Collection<Atom> queries) {
@Override
public void pipelineWithTimerDetailed(String datasetName, Collection<Atom> queries) {
String explainerName = this.getClass().getSimpleName() + " + " + this.solver.getClass().getSimpleName();
StatsUtil.timer(this::recordInitialFactbase, explainerName, "Recording initial factbase", datasetName);
StatsUtil.timer(this::trackerChase, explainerName, "Chase for building lineage tracker", datasetName);
StatsUtil.timerSumAllQueries(this::recordInitialFactbase, explainerName, "Recording initial factbase", datasetName);
StatsUtil.timerSumAllQueries(this::trackerChase, explainerName, "Chase for building lineage tracker", datasetName);
for (Atom query : queries) {
if (this.explanations != null) {
StatsUtil.timer(this::dynamicStepClear, explainerName,
StatsUtil.timerSumAllQueries(this::dynamicStepClear, explainerName,
"Clearing gmuses and explanations for the previous query",
datasetName);
}
StatsUtil.timer(() -> checkQueryEntailment(query), explainerName, "Check if query is entailed", datasetName);
StatsUtil.timer(() -> checkQueryInInitialFB(query), explainerName, "Check if query is already in input FB", datasetName);
StatsUtil.timerSumAllQueries(() -> checkQueryEntailment(query), explainerName, "Check if query is entailed", datasetName);
StatsUtil.timerSumAllQueries(() -> checkQueryInInitialFB(query), explainerName, "Check if query is already in input FB", datasetName);
if (queryEntailed && !queryInInitialFB) {
StatsUtil.timer(() -> encodeTrackerClauses(query), explainerName, "Preparing Rel Tracing", datasetName);
StatsUtil.timerSumAllQueries(() -> encodeTrackerClauses(query), explainerName, "Preparing Rel Tracing", datasetName);
if (solver instanceof MARCOGMUSSolver) {
StatsUtil.timer(this::writeMarcoGCNF, explainerName, "Writing GCNF", datasetName);
StatsUtil.timer(this::solveGMUSviaMarco, explainerName, "Computing GMUSes", datasetName);
StatsUtil.timer(this::decodeGMUSesFromMarco, explainerName, "Decoding GMUSes", datasetName);
StatsUtil.timerSumAllQueries(this::writeMarcoGCNF, explainerName, "Writing GCNF", datasetName);
StatsUtil.timerSumAllQueries(this::solveGMUSviaMarco, explainerName, "Computing GMUSes", datasetName);
StatsUtil.timerSumAllQueries(this::decodeGMUSesFromMarco, explainerName, "Decoding GMUSes", datasetName);
} else if (solver instanceof Sat4JSolver) {
StatsUtil.timer(this::solveGMUSviaSat4j, explainerName, "Computing GMUSes with sat4j", datasetName);
StatsUtil.timer(this::decodeGMUSesFromSat4j, explainerName, "Decoding GMUSes from sat4j", datasetName);
StatsUtil.timerSumAllQueries(this::solveGMUSviaSat4j, explainerName, "Computing GMUSes with sat4j", datasetName);
StatsUtil.timerSumAllQueries(this::decodeGMUSesFromSat4j, explainerName, "Decoding GMUSes from sat4j", datasetName);
}
} else if (queryInInitialFB) {
StatsUtil.timer(() -> returnExplanationForQueryInFB(query), explainerName, "Preparing Rel Tracing", datasetName);
StatsUtil.timerSumAllQueries(() -> returnExplanationForQueryInFB(query), explainerName, "Preparing Rel Tracing", datasetName);
} else {
returnEmptySetForExplanation();
}
......
......@@ -82,7 +82,12 @@ public class UnambiguousProofTreeWhyProvenanceExplainer_TrackerGRI implements At
}
@Override
public void pipelineWithTimer(String datasetName, Collection<Atom> queries) {
public void pipelineWithTimerDetailed(String datasetName, Collection<Atom> queries) {
throw new NotImplementedException();
}
@Override
public void pipelineIn2StepsWithTimer(String datasetName, Collection<Atom> queries) {
throw new NotImplementedException();
}
......
......@@ -17,4 +17,8 @@ module fr.boreal.explanation {
requires jfreechart;
requires com.opencsv;
requires commons.collections;
exports fr.boreal.explanation.api.explainers;
exports fr.boreal.explanation.kb_gri.explainers.static_gri;
exports fr.boreal.explanation.kb_gri.explainers.incremental_gri;
exports fr.boreal.explanation.tracker_gri.explainers;
}
\ No newline at end of file
......@@ -103,13 +103,13 @@ public class TestData {
public static String mowl_bench_path = "./src/test/resources/mowl-bench-main/";
public static List<String> default_mowl_bench_datasets = List.of(
"algorithmTypes"
// "DataEncoding",
// "HARMONISE",
// "NEMO_annotation_properties",
// "Ontology1260916270538",
// "platformOntology",
// "snoopy",
"algorithmTypes",
"DataEncoding",
"HARMONISE",
"NEMO_annotation_properties",
"Ontology1260916270538",
"platformOntology",
"snoopy"
// "transitive-ancestor"
// "catalogue",
// "ddex",
......
......@@ -4,6 +4,7 @@ import fr.boreal.explanation.InternalDLGPParser;
import fr.boreal.explanation.api.explainers.AtomicQueryExplainer;
import fr.boreal.explanation.kb_gri.explainers.incremental_gri.KBSupportExplainer_IncrementalKBGRI;
import fr.boreal.explanation.kb_gri.explainers.static_gri.KBSupportExplainer_KBGRI;
import fr.boreal.explanation.kb_gri.explainers.static_gri_baseline.KBSupportExplainer_Baseline;
import fr.boreal.explanation.solving_enumerating.sat4j.Sat4JSolver;
import fr.boreal.explanation.tracker_gri.explainers.KBSupportExplainer_TrackerGRI;
import fr.boreal.model.kb.api.KnowledgeBase;
......@@ -36,7 +37,8 @@ public class KBSupport_MowlOntologyTest {
STATIC_GRI_KB_S4J,
TRACKER_STATIC_GRI_KB_MARCO,
TRACKER_STATIC_GRI_KB_S4J,
INCREMENTAL_GRI_KB_MARCO
INCREMENTAL_GRI_KB_MARCO,
BASELINE_GRI_KB_MARCO
}
static AtomicQueryExplainer<?> getExplainer(
AtomicQueryExplainerType type,
......@@ -47,6 +49,7 @@ public class KBSupport_MowlOntologyTest {
case TRACKER_STATIC_GRI_KB_MARCO -> new KBSupportExplainer_TrackerGRI(kb);
case TRACKER_STATIC_GRI_KB_S4J -> new KBSupportExplainer_TrackerGRI(kb, new Sat4JSolver());
case INCREMENTAL_GRI_KB_MARCO -> new KBSupportExplainer_IncrementalKBGRI(kb);
case BASELINE_GRI_KB_MARCO -> new KBSupportExplainer_Baseline(kb);
};
}
......
package fr.boreal.test.explanation.unit_tests.explainer.kb_gri_baseline_marco;
import fr.boreal.explanation.kb_gri.explainers.static_gri.FactSupportExplainer_KBGRI;
import fr.boreal.explanation.kb_gri.explainers.static_gri_baseline.FactSupportExplainer_Baseline;
import fr.boreal.explanation.kb_gri.rule_transformation.GRIRuleTransformer;
import fr.boreal.model.kb.api.FactBase;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.kb.api.RuleBase;
import fr.boreal.model.kb.impl.KnowledgeBaseImpl;
import fr.boreal.model.kb.impl.RuleBaseImpl;
import fr.boreal.model.logicalElements.api.Atom;
import fr.boreal.model.logicalElements.api.Predicate;
import fr.boreal.storage.natives.SimpleInMemoryGraphStore;
import fr.boreal.test.explanation.TestData;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.Set;
public class FactSupportExplainerTest {
FactSupportExplainer_Baseline explainer;
GRIRuleTransformer griRuleTransformer = GRIRuleTransformer.instance();
Predicate REL = GRIRuleTransformer.REL;
@BeforeEach
public void setUp() {
}
@Test
public void FactSupportExplainerTestCyclic3() {
Atom query = TestData.ta;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.pa));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r1_px_qx,TestData.r10_qx_tx, TestData.r11_tx_px));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new FactSupportExplainer_Baseline(kb);
Set<FactBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase1 = new SimpleInMemoryGraphStore(Set.of(TestData.pa));
Assertions.assertTrue(explanations.contains(expectedFactBase1), "(Completeness issue) Explanation missing: " + expectedFactBase1);
Assertions.assertEquals(1, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void FactSupportExplainerTestCyclic2() {
Atom query = TestData.qa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.pa));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r1_px_qx,TestData.r10_qx_tx, TestData.r11_tx_px));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new FactSupportExplainer_Baseline(kb);
Set<FactBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase1 = new SimpleInMemoryGraphStore(Set.of(TestData.pa));
Assertions.assertTrue(explanations.contains(expectedFactBase1), "(Completeness issue) Explanation missing: " + expectedFactBase1);
Assertions.assertEquals(1, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void FactSupportExplainerTestCyclic1() {
Atom query = TestData.qa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.pa));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r1_px_qx,TestData.r9_qx_px));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new FactSupportExplainer_Baseline(kb);
Set<FactBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase1 = new SimpleInMemoryGraphStore(Set.of(TestData.pa));
Assertions.assertTrue(explanations.contains(expectedFactBase1), "(Completeness issue) Explanation missing: " + expectedFactBase1);
Assertions.assertEquals(1, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void FactSupportExplainerTest4() {
Atom query = TestData.pa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.saa, TestData.sab));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r6_sxy_px,TestData.r7_sxx_px));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new FactSupportExplainer_Baseline(kb);
Set<FactBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase1 = new SimpleInMemoryGraphStore(Set.of(TestData.saa));
FactBase expectedFactBase2 = new SimpleInMemoryGraphStore(Set.of(TestData.sab));
Assertions.assertTrue(explanations.contains(expectedFactBase1), "(Completeness issue) Explanation missing: " + expectedFactBase1);
Assertions.assertTrue(explanations.contains(expectedFactBase2), "(Completeness issue) Explanation missing: " + expectedFactBase2);
Assertions.assertEquals(2, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void FactSupportExplainerTest3() {
Atom query = TestData.qa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.saa, TestData.sbc, TestData.tab));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r6_sxy_px, TestData.r7_sxx_px, TestData.r8_pxpytxy_qx));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new FactSupportExplainer_Baseline(kb);
Set<FactBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase = new SimpleInMemoryGraphStore(Set.of(TestData.saa, TestData.sbc, TestData.tab));
Assertions.assertTrue(explanations.contains(expectedFactBase), "(Completeness issue) Explanation missing: " + expectedFactBase);
Assertions.assertEquals(1, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void FactSupportExplainerTest2() {
Atom query = TestData.qa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.saa, TestData.sbc, TestData.tab));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r6_sxy_px, TestData.r7_sxx_px, TestData.r8_pxpytxy_qx));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new FactSupportExplainer_Baseline(kb);
Set<FactBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase = new SimpleInMemoryGraphStore(Set.of(TestData.saa, TestData.sbc, TestData.tab));
Assertions.assertTrue(explanations.contains(expectedFactBase), "(Completeness issue) Explanation missing: " + expectedFactBase);
Assertions.assertEquals(1, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void FactSupportExplainerTest1() {
Atom query = TestData.qa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.pa, TestData.ta));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r1_px_qx, TestData.r4_qx_txy));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new FactSupportExplainer_Baseline(kb);
Set<FactBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase = new SimpleInMemoryGraphStore(Set.of(TestData.pa));
Assertions.assertTrue(explanations.contains(expectedFactBase), "(Completeness issue) Explanation missing: " + expectedFactBase);
Assertions.assertEquals(1, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void QueryNotEntailedTest() {
Atom query = TestData.sa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.pa, TestData.ta));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r1_px_qx, TestData.r4_qx_txy));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new FactSupportExplainer_Baseline(kb);
Set<FactBase> explanations = explainer.getAllExplanations(query);
Assertions.assertEquals(0, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void QueryInFactBaseTest() {
Atom query = TestData.pa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.pa, TestData.ta));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r1_px_qx, TestData.r4_qx_txy));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new FactSupportExplainer_Baseline(kb);
Set<FactBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase = new SimpleInMemoryGraphStore(Set.of(TestData.pa));
Assertions.assertTrue(explanations.contains(expectedFactBase), "(Completeness issue) Explanation missing: " + expectedFactBase);
Assertions.assertEquals(1, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
}
package fr.boreal.test.explanation.unit_tests.explainer.kb_gri_baseline_marco;
import fr.boreal.explanation.kb_gri.explainers.static_gri.KBSupportExplainer_KBGRI;
import fr.boreal.explanation.kb_gri.explainers.static_gri_baseline.KBSupportExplainer_Baseline;
import fr.boreal.explanation.kb_gri.rule_transformation.GRIRuleTransformer;
import fr.boreal.model.kb.api.FactBase;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.kb.api.RuleBase;
import fr.boreal.model.kb.impl.KnowledgeBaseImpl;
import fr.boreal.model.kb.impl.RuleBaseImpl;
import fr.boreal.model.logicalElements.api.Atom;
import fr.boreal.model.logicalElements.api.Predicate;
import fr.boreal.storage.natives.SimpleInMemoryGraphStore;
import fr.boreal.test.explanation.TestData;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.Set;
public class KBSupportExplainerTest {
KBSupportExplainer_Baseline explainer;
GRIRuleTransformer griRuleTransformer = GRIRuleTransformer.instance();
Predicate REL = GRIRuleTransformer.REL;
@BeforeEach
public void setUp() {
}
@Test
public void KBSupportExplainerTestCyclic3() {
Atom query = TestData.ta;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.pa));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r1_px_qx,TestData.r10_qx_tx, TestData.r11_tx_px));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new KBSupportExplainer_Baseline(kb);
Set<KnowledgeBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase1 = new SimpleInMemoryGraphStore(Set.of(TestData.pa));
RuleBase expectedRuleBase1 = new RuleBaseImpl(Set.of(TestData.r1_px_qx, TestData.r10_qx_tx));
KnowledgeBase expectedExpl1 = new KnowledgeBaseImpl(expectedFactBase1, expectedRuleBase1);
Assertions.assertTrue(explanations.contains(expectedExpl1), "(Completeness issue) Explanation missing: " + expectedExpl1);
Assertions.assertEquals(1, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void KBSupportExplainerTestCyclic2() {
Atom query = TestData.qa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.pa));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r1_px_qx,TestData.r10_qx_tx, TestData.r11_tx_px));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new KBSupportExplainer_Baseline(kb);
Set<KnowledgeBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase1 = new SimpleInMemoryGraphStore(Set.of(TestData.pa));
RuleBase expectedRuleBase1 = new RuleBaseImpl(Set.of(TestData.r1_px_qx));
KnowledgeBase expectedExpl1 = new KnowledgeBaseImpl(expectedFactBase1, expectedRuleBase1);
Assertions.assertTrue(explanations.contains(expectedExpl1), "(Completeness issue) Explanation missing: " + expectedExpl1);
Assertions.assertEquals(1, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void KBSupportExplainerTestCyclic1() {
Atom query = TestData.qa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.pa));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r1_px_qx,TestData.r9_qx_px));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new KBSupportExplainer_Baseline(kb);
Set<KnowledgeBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase1 = new SimpleInMemoryGraphStore(Set.of(TestData.pa));
RuleBase expectedRuleBase1 = new RuleBaseImpl(Set.of(TestData.r1_px_qx));
KnowledgeBase expectedExpl1 = new KnowledgeBaseImpl(expectedFactBase1, expectedRuleBase1);
Assertions.assertTrue(explanations.contains(expectedExpl1), "(Completeness issue) Explanation missing: " + expectedExpl1);
Assertions.assertEquals(1, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void KBSupportExplainerTest4() {
Atom query = TestData.pa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.saa, TestData.sab));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r6_sxy_px,TestData.r7_sxx_px));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new KBSupportExplainer_Baseline(kb);
Set<KnowledgeBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase1 = new SimpleInMemoryGraphStore(Set.of(TestData.saa));
RuleBase expectedRuleBase1 = new RuleBaseImpl(Set.of(TestData.r6_sxy_px));
KnowledgeBase expectedExpl1 = new KnowledgeBaseImpl(expectedFactBase1, expectedRuleBase1);
FactBase expectedFactBase2 = new SimpleInMemoryGraphStore(Set.of(TestData.sab));
RuleBase expectedRuleBase2 = new RuleBaseImpl(Set.of(TestData.r6_sxy_px));
KnowledgeBase expectedExpl2 = new KnowledgeBaseImpl(expectedFactBase2, expectedRuleBase2);
FactBase expectedFactBase3 = new SimpleInMemoryGraphStore(Set.of(TestData.saa));
RuleBase expectedRuleBase3 = new RuleBaseImpl(Set.of(TestData.r7_sxx_px));
KnowledgeBase expectedExpl3 = new KnowledgeBaseImpl(expectedFactBase3, expectedRuleBase3);
Assertions.assertTrue(explanations.contains(expectedExpl1), "(Completeness issue) Explanation missing: " + expectedExpl1);
Assertions.assertTrue(explanations.contains(expectedExpl2), "(Completeness issue) Explanation missing: " + expectedExpl2);
Assertions.assertTrue(explanations.contains(expectedExpl3), "(Completeness issue) Explanation missing: " + expectedExpl3);
Assertions.assertEquals(3, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void KBSupportExplainerTest3() {
Atom query = TestData.qa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.saa, TestData.sbc, TestData.tab));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r6_sxy_px, TestData.r7_sxx_px, TestData.r8_pxpytxy_qx));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new KBSupportExplainer_Baseline(kb);
Set<KnowledgeBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase = new SimpleInMemoryGraphStore(Set.of(TestData.saa, TestData.sbc, TestData.tab));
RuleBase expectedRuleBase = new RuleBaseImpl(Set.of(TestData.r6_sxy_px,TestData.r8_pxpytxy_qx));
KnowledgeBase expectedExpl = new KnowledgeBaseImpl(expectedFactBase, expectedRuleBase);
Assertions.assertTrue(explanations.contains(expectedExpl), "(Completeness issue) Explanation missing: " + expectedExpl);
Assertions.assertEquals(1, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void KBSupportExplainerTest2() {
Atom query = TestData.qa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.saa, TestData.sbc, TestData.tab));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r6_sxy_px, TestData.r7_sxx_px, TestData.r8_pxpytxy_qx));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new KBSupportExplainer_Baseline(kb);
Set<KnowledgeBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase = new SimpleInMemoryGraphStore(Set.of(TestData.saa, TestData.sbc, TestData.tab));
RuleBase expectedRuleBase = new RuleBaseImpl(Set.of(TestData.r6_sxy_px,TestData.r8_pxpytxy_qx));
KnowledgeBase expectedExpl = new KnowledgeBaseImpl(expectedFactBase, expectedRuleBase);
Assertions.assertTrue(explanations.contains(expectedExpl), "(Completeness issue) Explanation missing: " + expectedExpl);
Assertions.assertEquals(1, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void KBSupportExplainerTest1() {
Atom query = TestData.qa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.pa, TestData.ta));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r1_px_qx, TestData.r4_qx_txy));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new KBSupportExplainer_Baseline(kb);
Set<KnowledgeBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase = new SimpleInMemoryGraphStore(Set.of(TestData.pa));
RuleBase expectedRuleBase = new RuleBaseImpl(Set.of(TestData.r1_px_qx));
KnowledgeBase expectedExpl = new KnowledgeBaseImpl(expectedFactBase, expectedRuleBase);
Assertions.assertTrue(explanations.contains(expectedExpl), "(Completeness issue) Explanation missing: " + expectedExpl);
Assertions.assertEquals(1, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void QueryNotEntailedTest() {
Atom query = TestData.sa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.pa, TestData.ta));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r1_px_qx, TestData.r4_qx_txy));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new KBSupportExplainer_Baseline(kb);
Set<KnowledgeBase> explanations = explainer.getAllExplanations(query);
Assertions.assertEquals(0, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
@Test
public void QueryInFactBaseTest() {
Atom query = TestData.pa;
FactBase factBase = new SimpleInMemoryGraphStore(List.of(TestData.pa, TestData.ta));
RuleBase ruleBase = new RuleBaseImpl(List.of(TestData.r1_px_qx, TestData.r4_qx_txy));
KnowledgeBase kb = new KnowledgeBaseImpl(factBase, ruleBase);
explainer = new KBSupportExplainer_Baseline(kb);
Set<KnowledgeBase> explanations = explainer.getAllExplanations(query);
FactBase expectedFactBase = new SimpleInMemoryGraphStore(Set.of(TestData.pa));
RuleBase expectedRuleBase = new RuleBaseImpl(Set.of());
KnowledgeBase expectedExpl = new KnowledgeBaseImpl(expectedFactBase, expectedRuleBase);
Assertions.assertTrue(explanations.contains(expectedExpl), "(Completeness issue) Explanation missing: " + expectedExpl);
Assertions.assertEquals(1, explanations.size(), "(Soundness issue) identify non-explanation as explanation");
}
}