Mentions légales du service

Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • rules/integraal
  • ulliana/integraal
2 results
Show changes
Showing
with 1313 additions and 81 deletions
package fr.boreal.explanation.api.processors;
import fr.boreal.model.kb.api.KnowledgeBase;
import java.util.Iterator;
import java.util.Set;
/**
* Uses query rewriting to compute the Explanations of a Query
* Supports different types of Explanations, and Query types.
*
* @param <ExplanationType>
* @param <QueryType>
*/
public interface ExplanationProcessor_Rewriting<ExplanationType, QueryType> {
/**
* @param kb
* @param query
* @return the set of all explanations of a query given the kb
*/
Set<ExplanationType> computeAllExplanations(KnowledgeBase kb, QueryType query);
/**
* @param kb
* @param query
* @return an iterator for all explanations of a query given the kb
*/
default Iterator<ExplanationType> computeExplanations(KnowledgeBase kb, QueryType query) {
return this.computeAllExplanations(kb, query).iterator();
}
}
package fr.boreal.explanation.api.solver;
public interface Solver {
}
package fr.boreal.explanation.api.validators;
import fr.boreal.model.formula.api.FOFormulaConjunction;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.logicalElements.api.Atom;
import java.util.Arrays;
public interface AtomicKBExplanationValidator {
public static boolean isValid(KnowledgeBase kb, Atom query) {
var queryOK = Arrays.stream(query.getTerms()).allMatch(term -> term.isConstant() || term.isLiteral());
// check that all rules body and head are conjunctions
// TODO : check more in depth that the FOFormulaConjunction contains only bodyAtoms
// TODO : check that in the the data (be careful of federations) there are only constants and literals
// TODO : check that in the rules there are no functional terms
var rulesOK = true;
for (var r : kb.getRuleBase().getRules()) {
// rules must have a label
if (r.getLabel() == null || r.getLabel().isBlank()) {
throw new RuntimeException("rule label is empty: " + r);
}
if (!(r.getBody() instanceof FOFormulaConjunction || r.getBody() instanceof Atom)) {
rulesOK = false;
break;
}
if (!(r.getHead() instanceof FOFormulaConjunction || r.getHead() instanceof Atom)) {
rulesOK = false;
break;
}
}
return queryOK && rulesOK;
}
}
package fr.boreal.explanation.configuration; package fr.boreal.explanation.configuration;
import fr.boreal.forward_chaining.api.ForwardChainingAlgorithm; import fr.boreal.forward_chaining.chase.Chase;
import fr.boreal.forward_chaining.chase.ChaseBuilder; import fr.boreal.forward_chaining.chase.ChaseBuilder;
import fr.boreal.forward_chaining.chase.lineage.LineageTracker;
import fr.boreal.forward_chaining.chase.rule_applier.trigger_applier.facts_handler.DelegatedApplication;
import fr.boreal.forward_chaining.chase.rule_applier.trigger_applier.tracking.TriggerApplierWithTrackerImpl;
import fr.boreal.forward_chaining.chase.rule_applier.trigger_applier.facts_handler.DirectApplication;
import fr.boreal.forward_chaining.chase.rule_applier.trigger_applier.renamer.FreshRenamer;
import fr.boreal.model.kb.api.KnowledgeBase; import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.kb.api.FactBase;
import fr.boreal.model.kb.api.RuleBase;
import fr.boreal.model.logicalElements.factory.impl.SameObjectTermFactory;
/** /**
* sets the default chase used for computing explanations * sets the default chase used for computing explanations
*/ */
public class DefaultChaseForExplanations { public class DefaultChaseForExplanations {
public static void chase(KnowledgeBase kb) { public static Chase chase(FactBase fb, RuleBase rb) {
ForwardChainingAlgorithm chase = ChaseBuilder.defaultBuilder(kb.getFactBase(), kb.getRuleBase()) if (rb.getRules().size() < 10000) {
Chase chase = ChaseBuilder.defaultBuilder(fb, rb)
.useObliviousChecker()
.useNaiveComputer()
.useByPredicateRuleScheduler()
.useBreadthFirstApplier()
//.useMultiThreadRuleApplier()
.build().get();
chase.execute();
return chase;
} else {
Chase chase = ChaseBuilder.defaultBuilder(fb, rb)
.useObliviousChecker()
.useNaiveComputer()
.useByPredicateRuleScheduler()
.useMultiThreadRuleApplier()
.build().get();
chase.execute();
return chase;
}
}
public static Chase chase(KnowledgeBase kb) {
return chase(kb.getFactBase(), kb.getRuleBase());
}
public static TrackingChase chaseForTracker(FactBase fb, RuleBase rb) {
var triggerTracker = new TriggerApplierWithTrackerImpl(new FreshRenamer(SameObjectTermFactory.instance()), new DelegatedApplication());
Chase chase = ChaseBuilder.defaultBuilder(fb, rb)
.setTriggerApplier(triggerTracker)
.useObliviousChecker() .useObliviousChecker()
.useNaiveComputer() .useNaiveComputer()
.useNaiveRuleScheduler() .useNaiveRuleScheduler()
.useAllTransformer()
.build().get(); .build().get();
chase.execute(); chase.execute();
return new TrackingChase(chase,triggerTracker.getTracker());
}
public static TrackingChase chaseForTracker(KnowledgeBase kb) {
return chaseForTracker(kb.getFactBase(), kb.getRuleBase());
} }
public record TrackingChase(Chase chase, LineageTracker tracker){}
} }
package fr.boreal.explanation.configuration;
import java.io.File;
public class PathFinder {
public static String ensureFilePath(String primaryPath, String fallbackPath) {
File file = new File(primaryPath);
// Check if the primary file exists
if (file.exists()) {
return primaryPath;
} else {
return fallbackPath;
}
}
}
package fr.boreal.explanation.configuration;
import com.opencsv.CSVWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.*;
public class StatsUtil {
// Holds all timing records in memory
// explainer -> op -> dataset -> time (TreeMap so that explainers are ordered by name)
static Map<String, Map<String, Map<String, Long>>> timingRecords = new TreeMap<>();
static Map<String, LinkedHashSet<String>> operationOrder = new HashMap<>();
static LinkedHashSet<String> datasetOrder = new LinkedHashSet<>();
static Map<String, Integer> datasetToFBsize = new HashMap<>();
static Map<String, Integer> rbSize = new HashMap<>();
static Map<String, Integer> avgExplanationNbPerQuery = new HashMap<>();
/**
* Times the given Runnable, prints the duration, and stores
* (datasetName, operation, elapsedTime) for later export.
*
* @param function The operation to time.
* @param operation A label describing what is being timed (e.g. "Transforming rules").
* @param datasetName The identifier for the dataset (e.g. "Dataset1").
*/
public static void timer(Runnable function, String explainerName, String operation, String datasetName) {
long startTime = System.currentTimeMillis();
function.run();
long elapsedTime = System.currentTimeMillis() - startTime;
// Print to console
System.out.printf("[Dataset=%s] %s took %d ms%n", datasetName, operation, elapsedTime);
// Sum the total time used across queries
Map<String, Long> datasetNameToTime = timingRecords
.computeIfAbsent(explainerName, ignore -> new HashMap<>())
.computeIfAbsent(operation, ignore -> new HashMap<>());
if (!datasetNameToTime.containsKey(datasetName)) {
datasetNameToTime.put(datasetName, elapsedTime);
} else {
datasetNameToTime.put(datasetName, datasetNameToTime.get(datasetName) + elapsedTime);
}
// Keep track of the order of execution
operationOrder.computeIfAbsent(explainerName, ignore -> new LinkedHashSet<>()).add(operation);
// Keep track of the order of datasets
datasetOrder.add(datasetName);
}
public static void recordFBSize(String datasetName, int FBSize) {
};
/**
* Exports pivoted timings so that:
* - Each row = one Dataset
* - Each column = one unique Operation
* - Each cell = the SUM of times for that (dataset, operation).
*
* CSV columns will be: [Dataset, Op1, Op2, Op3, ...].
*/
public static void exportPivotStatsToCSV(String fileName) {
try (CSVWriter writer = new CSVWriter(new FileWriter(fileName))) {
// 1) Write columns names
List<String> columns = new ArrayList<>();
columns.add("Explainer");
columns.add("Operation");
columns.addAll(datasetOrder);
writer.writeNext(columns.toArray(new String[0]));
// 2) Record # Rules & # Facts
// for (String datasetName : datasetOrder) {
// if (operationToRecord.containsKey(datasetName)) {
// row.add(operationToRecord.get(datasetName).toString());
// } else {
// row.add("");
// }
// }
// 3) For each explainer
for (String explainerName : timingRecords.keySet()) {
// For each operation for this explainer
writer.writeNext(new String[0]);
for (String operation : operationOrder.get(explainerName)) {
// Print explainer name and operation name
Map<String, Long> operationToRecord = timingRecords.get(explainerName).get(operation);
if (operationToRecord == null) {
continue;
}
List<String> row = new ArrayList<>();
row.add(explainerName);
row.add(operation);
// Then print all dataset values in order
for (String datasetName : datasetOrder) {
if (operationToRecord.containsKey(datasetName)) {
row.add(operationToRecord.get(datasetName).toString());
} else {
row.add("");
}
}
// finally, write the row
writer.writeNext(row.toArray(new String[0]));
}
}
System.out.println("Exported pivoted timings to CSV: " + fileName);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Optional helper to clear all recorded stats if you want a fresh run.
*/
public static void clearStats() {
timingRecords.clear();
}
}
package fr.boreal.explanation.configuration.query_selection;
import fr.boreal.explanation.configuration.DefaultChaseForExplanations;
import fr.boreal.forward_chaining.chase.Chase;
import fr.boreal.model.kb.api.FactBase;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.kb.api.RuleBase;
import fr.boreal.model.kb.impl.RuleBaseImpl;
import fr.boreal.model.logicalElements.api.Atom;
import fr.boreal.model.logicalElements.api.Constant;
import fr.boreal.model.logicalElements.api.Predicate;
import fr.boreal.model.logicalElements.impl.AtomImpl;
import fr.boreal.model.rule.api.FORule;
import fr.boreal.storage.natives.SimpleInMemoryGraphStore;
import java.util.*;
import java.util.stream.Collectors;
public class QuerySelector {
public static Optional<Collection<Atom>> select(KnowledgeBase kb, int nbFacts, Boolean explainFacts) {
Collection<FORule> rules = new ArrayList<FORule>(kb.getRuleBase().getRules());
RuleBase rb = new RuleBaseImpl(rules);
FactBase fb = new SimpleInMemoryGraphStore(kb.getFactBase().getAtoms().collect(Collectors.toList()));
KnowledgeBase kbCopy = Utils.copyKnowledgeBase(kb);
Collection<Atom> predicateConstantAtoms = new ArrayList<Atom>();
Collection<Constant> predicateConstants = new ArrayList<Constant>();
if (!explainFacts) {
for (FORule ruleToConstant : kbCopy.getRuleBase().getRules()) {
for (Predicate predicateToConstant : ruleToConstant.getBody().getPredicates()) {
if (predicateToConstant.getArity() == 1) {
Constant constantPred = termFactory.createOrGetConstant("CONS_"+predicateToConstant.toString());
Atom predicateConstantAtom = new AtomImpl(predicateToConstant, constantPred);
kbCopy.getFactBase().add(predicateConstantAtom);
predicateConstantAtoms.add(predicateConstantAtom);
predicateConstants.add(constantPred);}
}
}
}
Chase chase = DefaultChaseForExplanations.chase(kb);
int depth = 0;
List<Set<Atom>> foundAxiomEntailments = new ArrayList<>();
foundAxiomEntailments.add(new HashSet<>());
chase.applyGlobalPretreatments();
while (chase.hasNextStep()) {
chase.applyPretreatments();
chase.nextStep();
chase.applyEndOfStepTreatments();
depth++;
foundAxiomEntailments.add(chase
.getLastStepResults()
.created_facts_as_factbase()
.getAtoms()
.collect(Collectors.toSet()));
}
System.out.println(foundAxiomEntailments);
if (kbCopy.getFactBase().size() - predicateConstantAtoms.size() > nbFacts) {
List<Atom> selectedAtoms = new ArrayList<>();
for (int i = foundAxiomEntailments.size()-1; selectedAtoms.size() < nbFacts; i--) {
if (i == -1) {
System.out.println("Doesn't have enough entailments");
return Optional.empty();
} else {
for (Atom a : foundAxiomEntailments.get(i)) {
selectedAtoms.add(a);
if (selectedAtoms.size() == nbFacts) {
return Optional.of(selectedAtoms);
}
}
}
}
}
System.out.println("Doesn't have enough entailments");
return Optional.empty();
}
}
package fr.boreal.explanation.explainers;
import fr.boreal.explanation.api.AtomicQueryExplanationEnumerator;
import fr.boreal.explanation.ruleFactories.StaticGRIRuleTransformer;
import fr.boreal.model.kb.api.FactBase;
import fr.boreal.model.logicalElements.api.Atom;
import java.util.Set;
/**
* Computes fact-support explanations for a knowledge base and a ground atomic query
*/
public class FactSupportExplainer implements AtomicQueryExplanationEnumerator<FactBase> {
static StaticGRIRuleTransformer staticRuleTransformerFactory = StaticGRIRuleTransformer.instance();
public Set<FactBase> explain(Atom query) {
return Set.of();
}
}
package fr.boreal.explanation.explainers;
import fr.boreal.explanation.api.AtomicQueryExplanationEnumerator;
import fr.boreal.explanation.GRI.dynamicProcessing.RELTracer;
import fr.boreal.explanation.solving_enumerating.GMUSProcessor;
import fr.boreal.explanation.ruleFactories.StaticGRIRuleTransformer;
import fr.boreal.explanation.GRI.staticProcessing.GRIBuilder;
import fr.boreal.model.kb.api.FactBase;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.logicalElements.api.Atom;
import org.apache.commons.lang3.tuple.Pair;
import java.util.Set;
/**
* Computes kb-support explanations for a knowledge base and a ground atomic query
*/
public class KBSupportExplainer implements AtomicQueryExplanationEnumerator<KnowledgeBase> {
KnowledgeBase gri;
StaticGRIRuleTransformer ruleTransformer;
/**
* Sets the initial KB and compute the GRI
*
* @param kb the current kb
*/
public KBSupportExplainer(KnowledgeBase kb) {
Pair<KnowledgeBase, StaticGRIRuleTransformer> pair = GRIBuilder.buildGRI(kb);
gri = pair.getLeft();
ruleTransformer = pair.getRight();
}
public Set<KnowledgeBase> explain(Atom query) {
RELTracer tracer = new RELTracer();
FactBase filteredGRI = tracer.computeQueryRelevant(gri, ruleTransformer, query);
GMUSProcessor gmusProcessor = new GMUSProcessor();
return gmusProcessor.computeExplanations(filteredGRI, ruleTransformer, query);
}
}
package fr.boreal.explanation.explainers;
import fr.boreal.explanation.api.AtomicQueryExplanationEnumerator;
import fr.boreal.explanation.ruleFactories.StaticGRIRuleTransformer;
import fr.boreal.model.kb.api.RuleBase;
import fr.boreal.model.logicalElements.api.Atom;
import java.util.Set;
/**
* Computes rule-support explanations for a knowledge base and a ground atomic query
*/
public class RuleSupportExplainer implements AtomicQueryExplanationEnumerator<RuleBase> {
static StaticGRIRuleTransformer staticRuleTransformerFactory = StaticGRIRuleTransformer.instance();
public Set<RuleBase> explain(Atom query) {
return Set.of();
}
}
package fr.boreal.explanation.kb_gri.encoders;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import fr.boreal.explanation.api.encoders.GSATEncoder_GRI;
import fr.boreal.explanation.api.encoders.GSATEncodingResult_GRI;
import fr.boreal.explanation.kb_gri.rule_transformation.GRIRuleTransformer;
import fr.boreal.explanation.util.HornClausePrinter;
import fr.boreal.model.kb.api.FactBase;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.logicalElements.api.*;
import fr.boreal.model.rule.api.FORule;
import java.util.*;
/**
* This abstract class contains the main algorithm for encoding factbase with 4 additional abstract methods
* <p>
* This class computes an encoding of explanations as GCNF formulas that can later be processed by a MUS solver
* <p>
* Ground bodyAtoms are represented as propositional variables
* <p>
* <p>
* <p>
* <p>
* 1. propVarIDmap maps a propositional variable to its corresponding (int) ID starting at 1
* 2. clauses is a list of list of integers.
* A clause take the following form:
* nbGroup, i for every atom a_i in the chase
* nbGroup, -i1, -i2, ..., i for every grounded rule a_i1 ^ a_i2 ... -> a_i
* where each integer i is the ID of a propositional variable (as stored in propVarID map)
* 3. nbGroup tracks the latest group number (starting at 0)
* <p>
* At the same time we are also computing 2 extra structures which are used in translating the GMUSes back
* to KB:
* 1. factIDmap (nbGroup - atom)
* 2. ruleIDmap (nbGroup - ungrounded rule)
*/
public abstract class AbstractGSATEncoder_KBGRI implements GSATEncoder_GRI<KnowledgeBase, Atom> {
GRIRuleTransformer griRuleTransformer = GRIRuleTransformer.instance();
Predicate REL = GRIRuleTransformer.REL;
public BiMap<Atom, Integer> propVarIDMap = HashBiMap.create();
public BiMap<Integer, Atom> factIDMap = HashBiMap.create();
public BiMap<Integer, FORule> ruleIDMap = HashBiMap.create();
public List<List<Integer>> clauses = new ArrayList<>();
protected int nbGroup;
public AbstractGSATEncoder_KBGRI() {
this.nbGroup = getStartingGroupNumber();
}
/**
* @return the starting group number for the encoding
*/
abstract int getStartingGroupNumber();
/**
* @return the number of groups
*/
public int getNumberOfGroups() {
return this.nbGroup;
}
/**
* This method takes a (filtered) KB and the query and return the set of explanations.
*
* @param filteredGRI
* @param query
* @return set of explanations
*/
public GSATEncodingResult_GRI encode(
KnowledgeBase filteredGRI,
Atom query,
java.util.function.Predicate<Atom> belongsToInitialFactbase) {
assignDefaultGroupNumberAndCreateClauseForStartQuery(query);
assignGroupNumbersAndComputeClausesForRELEdges(filteredGRI.getFactBase(), query, belongsToInitialFactbase);
return new GSATEncodingResult_GRI(propVarIDMap, factIDMap, ruleIDMap, clauses, nbGroup);
}
/**
* This method assigns id 1 to the query in propvarIDMap and add the query clause of group 0
* It modifies:
* 1. propvarIDMap
* 2. clauses
*
* @param query
*/
public void assignDefaultGroupNumberAndCreateClauseForStartQuery(Atom query) {
propVarIDMap.put(query, propVarIDMap.size() + 1);
clauses.add(List.of(0, propVarIDMap.get(query) * -1));
}
/**
* For every *relvant* rule r :
* (1) Gives a group number [nbgroup] to every *relvant* rule r and hence to every REL_EDGE_r predicate
* (r is relevant if it is used to compute SupRel and marked by the presence of a REL_EDGE_r atom).
* <p>
* (2) Computes a set of clauses encoding the usage of r.
* Every clause is derived from an atom of the form
* REL_EDGE_r(id_grounded_body1,id_grounded_body2...,id_grounded_head)
* Every clause is encoded as a list of integers the structure
* nbgroup -id_grounded_body1 -id_grounded_body2 ... id_grounded_head
*
* @param filteredGRI
*/
public void assignGroupNumbersAndComputeClausesForRELEdges(FactBase filteredGRI, Atom query, java.util.function.Predicate<Atom> belongsToInitialFactbase) {
BiMap<Predicate, FORule> relEdgePredicateToRule = griRuleTransformer.getRuleToRelEdgePredicateMap().inverse();
// Looping over all relEdge_r predicates
// We compute a SET of clauses. Each clause represents a grounding of the rule (i.e. a particular relEdge_r atom).
var predIt = filteredGRI.getPredicates();
while (predIt.hasNext()) {
Predicate relEdgePred = predIt.next();
if (relEdgePred.label().startsWith(GRIRuleTransformer.REL_EDGE_prefix)) {
// All clauses/groundings of the same rule will be in the same group
var currentRule = relEdgePredicateToRule.get(relEdgePred);
// We assign a new group to the currentRule
int currentRuleID = computeRuleID();
if (needRule()) {
ruleIDMap.put(currentRuleID, currentRule);
}
List<Atom> seenRelEgdeAtoms = new ArrayList<Atom>();
// We produce a clause for every REL_EDGE_currentRule atom (each represents a grounding of currentRule)
var it = filteredGRI.getAtomsByPredicate(relEdgePred);
while (it.hasNext()) {
// First, we check the relEdge atom has not been seen to avoid trigger-clause redundancy
Atom relEdgeAtom = it.next();
if (!seenRelEgdeAtoms.contains(relEdgeAtom)) {
// Now it has been seen!
seenRelEgdeAtoms.add(relEdgeAtom);
// Prepare the resulting clause with its associated group number
// We use the currentRuleID as the group number
List<Integer> relEdgeAtomClause = new ArrayList<>();
relEdgeAtomClause.addFirst(currentRuleID);
// We collect all identifiers of relevant bodyAtoms
Term[] relevantAtomIdentifierList = relEdgeAtom.getTerms();
// Checking (circular) redundancy: p ^ q -> q (e.g.) is not helpful so we skip it
boolean redundant = false;
for (int i = 0; i < relevantAtomIdentifierList.length - 1; i++) {
if (relevantAtomIdentifierList[i].equals(relevantAtomIdentifierList[relevantAtomIdentifierList.length-1])) {
redundant = true;
break;
}
}
if (redundant) {
continue;
}
// Process the atom
for (int i = 0; i < relevantAtomIdentifierList.length; i++) {
var relevantAtomIdentifier = (LogicalFunctionalTerm) relevantAtomIdentifierList[i];
Atom relevantAtom = griRuleTransformer.getAtomFromFnTerm(relevantAtomIdentifier);
// Check that
// (1) the relevant atom has not already been encoded (it's sufficient to check that it's not in propVarIDMap)
// (2) the relevant atom belongs to the original factbase
if (!propVarIDMap.containsKey(relevantAtom)) {
// Assign an integer to the relevant atom if unseen
propVarIDMap.put(relevantAtom, propVarIDMap.size() + 1);
// Moreover, if the relevant atom is an initial fact, we create a clause for it
if (belongsToInitialFactbase.test(relevantAtom)) {
// Assign a group to the atom
int relAtomGroupID = computeRelAtomGroupID();
if (needFact()) {
factIDMap.put(relAtomGroupID, relevantAtom);
}
// Create a singleton group with the rel atom to the gcnf
var atomClause = List.of(relAtomGroupID, propVarIDMap.get(relevantAtom));
clauses.add(atomClause);
}
}
// Add each relevant atom to the relEdgeAtom clause, negating if it is not the head
relEdgeAtomClause.add(propVarIDMap.get(relevantAtom) * -1);
}
relEdgeAtomClause.set(relEdgeAtomClause.size() - 1, relEdgeAtomClause.getLast() * -1);
// The clause is ready
if (needRule()) {
clauses.add(relEdgeAtomClause);
} else {
if (!clauses.contains(relEdgeAtomClause)) {
clauses.add(relEdgeAtomClause);
}
}
}
}
}
}
// Check that there is no redundancy
if (clauses.size() > clauses.stream().distinct().count()) {
throw new IllegalStateException("There is a redundant clause.");
}
}
public abstract int computeRuleID();
public abstract int computeRelAtomGroupID();
public abstract boolean needRule();
public abstract boolean needFact();
}
package fr.boreal.explanation.kb_gri.encoders;
import fr.boreal.explanation.api.encoders.GSATEncodingResult_GRI;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.logicalElements.api.Atom;
public class FactSupportGSATEncoder_KBGRI extends AbstractGSATEncoder_KBGRI {
@Override
int getStartingGroupNumber() {
return 1;
}
/**
* This is used to configure the main algorithm in AbstractKBtoGSATEncoder
* For fact-support the rule are all in one group (group 1). Therefore, this method always return 1.
* @return 1
*/
@Override
public int computeRuleID() {
return 1;
}
/**
* This is used to configure the main algorithm in AbstractKBtoGSATEncoder
* For fact-support encoder each REL fact makes in a singleton group.
* Therefore, this method increment the group number and uses it as the factID.
* @return int relAtomGroup
*/
@Override
public int computeRelAtomGroupID() {
nbGroup++;
int relAtomGroup = nbGroup;
return relAtomGroup;
}
/**
* This is used to configure the main algorithm in AbstractKBtoGSATEncoder
* For fact-support encoder we don't need to record rules so this method always returns false
* @return false
*/
@Override
public boolean needRule() {
return false;
}
/**
* This is used to configure the main algorithm in AbstractKBtoGSATEncoder
* For fact-support encoder we need to record facts so this method always returns true
* @return ture
*/
@Override
public boolean needFact() {
return true;
}
}
package fr.boreal.explanation.kb_gri.encoders;
public class KBSupportKBGSATEncoder_KBGRI extends AbstractGSATEncoder_KBGRI {
int getStartingGroupNumber() {
return 0;
}
/**
* This is used to configure the main algorithm in AbstractKBtoGSATEncoder
* For KB-support encoder all groundings of the same rule are in the same group.
* Therefore, this method increment the group number and uses it as the ruleID shared across all groundings.
* @return int currentRuleID
*/
@Override
public int computeRuleID() {
nbGroup++;
int currentRuleID = nbGroup;
return currentRuleID;
}
/**
* This is used to configure the main algorithm in AbstractKBtoGSATEncoder
* For KB-support encoder each REL fact makes in a singleton group.
* Therefore, this method increment the group number and uses it as the factID.
* @return int relAtomGroup
*/
@Override
public int computeRelAtomGroupID() {
nbGroup++;
int relAtomGroup = nbGroup;
return relAtomGroup;
}
/**
* This is used to configure the main algorithm in AbstractKBtoGSATEncoder
* For rule-support we need to record ruleID so the method return true
* @return true
*/
@Override
public boolean needRule() {
return true;
}
/**
* This is used to configure the main algorithm in AbstractKBtoGSATEncoder
* For fact-support encoder we need to record facts so this method always returns true
* @return ture
*/
@Override
public boolean needFact() {
return true;
}
}
package fr.boreal.explanation.kb_gri.encoders;
public class RuleSupportKBGSATEncoder_KBGRI extends AbstractGSATEncoder_KBGRI {
int getStartingGroupNumber() {
return 1;
}
/**
* This is used to configure the main algorithm in AbstractKBtoGSATEncoder
* For rule-support encoder all groundings of the same rule are in the same group.
* Therefore, this method increment the group number and uses it as the ruleID shared across all groundings.
* @return int currentRuleID
*/
@Override
public int computeRuleID() {
nbGroup++;
int currentRuleID = nbGroup;
return currentRuleID;
}
/**
* This is used to configure the main algorithm in AbstractKBtoGSATEncoder
* For rule-support encoder all REL facts form 1 group (Group 1)
* Therefore this method always return 1
* @return 1
*/
@Override
public int computeRelAtomGroupID() {
return 1;
}
/**
* This is used to configure the main algorithm in AbstractKBtoGSATEncoder
* For rule-support we need to record ruleID so the method return true
* @return true
*/
@Override
public boolean needRule() {
return true;
}
/**
* This is used to configure the main algorithm in AbstractKBtoGSATEncoder
* For rule-support we don't need to record factID so the method returns false
* @return false
*/
@Override
public boolean needFact() {
return false;
}
}
package fr.boreal.explanation.kb_gri.explainers.dynamic_gri;
import fr.boreal.explanation.api.explainers.AtomicQueryExplainer;
import fr.boreal.explanation.api.processors.ExplanationProcessor_GRI;
import fr.boreal.explanation.api.solver.Solver;
import fr.boreal.explanation.configuration.DefaultChaseForExplanations;
import fr.boreal.explanation.kb_gri.rule_transformation.GRIRuleTransformer;
import fr.boreal.explanation.solving_enumerating.marco.MARCOGMUSSolver;
import fr.boreal.forward_chaining.chase.Chase;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.kb.api.RuleBase;
import fr.boreal.model.kb.impl.KnowledgeBaseImpl;
import fr.boreal.model.logicalElements.api.Atom;
import fr.boreal.model.logicalElements.impl.AtomImpl;
import org.apache.commons.lang3.NotImplementedException;
import java.util.*;
import java.util.function.Predicate;
public abstract class AbstractDynamicGRIBasedExplainer_KBGRI<ExplanationType> implements AtomicQueryExplainer<ExplanationType> {
private final Set<Atom> intialAtoms;
public GRIRuleTransformer ruleTransformer = GRIRuleTransformer.instance();
/**
* The rulebase used to build the GRI
*/
private final RuleBase transformedRB;
/**
* A KB whose factbase will contain the GRI and whose rulebase is the transformedRB
*/
protected KnowledgeBase downwardClosure;
/**
* The chase object used to compute the GRI, so that it can be reused for tracing.
*/
protected Chase griKBChase;
/**
* Sets the initial KB and compute the GRI using the rule transformation proposed in RuleML22.
*
* @param kb the current kb
*/
public AbstractDynamicGRIBasedExplainer_KBGRI(KnowledgeBase kb, Atom query) {
this.intialAtoms = new HashSet<>();
kb.getFactBase().getAtoms().forEach(intialAtoms::add);
ruleTransformer.clear();
DefaultChaseForExplanations.chase(kb);
this.transformedRB = ruleTransformer.createDynamicGRIBuilderRB(kb, query);
this.downwardClosure = new KnowledgeBaseImpl(kb.getFactBase(), transformedRB);
DefaultChaseForExplanations.chase(downwardClosure);
}
/**
* Saturates the GRI with Tracing rules to compute relevant atoms and triggers for the query.
* Then calls the solver.
*
* @param query
* @return
*/
public Set<ExplanationType> getAllExplanations(Atom query) {
Atom queryWithFuncId = GRIRuleTransformer.instance().createAtomWithStoredFnTermIdentifier(query);
if (!downwardClosure.getFactBase().contains(queryWithFuncId)) {
return Set.of();
}
// add seed atom to the factbase
Atom RELquery = new AtomImpl(GRIRuleTransformer.REL, GRIRuleTransformer.createFnTermIdentifier(query));
downwardClosure.getFactBase().add(RELquery);
// chase only with the tracing rules
griKBChase = DefaultChaseForExplanations.chase(downwardClosure.getFactBase(),this.transformedRB);
griKBChase.execute();
Predicate<Atom> belongsToInitialFactbase = atom -> this.intialAtoms.contains(atom) ;
return getGMUSProcessor(new MARCOGMUSSolver()).computeAllExplanations(downwardClosure, query, belongsToInitialFactbase);
}
abstract ExplanationProcessor_GRI getGMUSProcessor(Solver solver);
@Override
public void pipelineWithTimer(String datasetName, Collection<Atom> queries) {
throw new NotImplementedException();
}
// abstract RuleTransformer getRuleTransformer();
}
package fr.boreal.explanation.kb_gri.explainers.dynamic_gri;
import fr.boreal.explanation.api.processors.ExplanationProcessor_GRI;
import fr.boreal.explanation.api.solver.Solver;
import fr.boreal.explanation.kb_gri.processors.FactSupportGMUSProcessor_KBGRI;
import fr.boreal.explanation.kb_gri.processors.KBSupportGMUSProcessor_KBGRI;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.logicalElements.api.Atom;
/**
* Computes kb-support explanations for a knowledge base and a ground atomic query
*/
public class FactSupportExplainer_DynamicKBGRI extends AbstractDynamicGRIBasedExplainer_KBGRI<KnowledgeBase> {
public FactSupportExplainer_DynamicKBGRI(KnowledgeBase kb, Atom query) {
super(kb, query);
}
@Override
ExplanationProcessor_GRI getGMUSProcessor(Solver solver) {
return new FactSupportGMUSProcessor_KBGRI(solver);
}
}
package fr.boreal.explanation.kb_gri.explainers.dynamic_gri;
import fr.boreal.explanation.api.processors.ExplanationProcessor_GRI;
import fr.boreal.explanation.api.solver.Solver;
import fr.boreal.explanation.kb_gri.processors.KBSupportGMUSProcessor_KBGRI;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.logicalElements.api.Atom;
/**
* Computes kb-support explanations for a knowledge base and a ground atomic query
*/
public class KBSupportExplainer_DynamicKBGRI extends AbstractDynamicGRIBasedExplainer_KBGRI<KnowledgeBase> {
public KBSupportExplainer_DynamicKBGRI(KnowledgeBase kb, Atom query) {
super(kb, query);
}
@Override
ExplanationProcessor_GRI getGMUSProcessor(Solver solver) {
return new KBSupportGMUSProcessor_KBGRI(solver);
}
}
package fr.boreal.explanation.kb_gri.explainers.dynamic_gri;
import fr.boreal.explanation.api.processors.ExplanationProcessor_GRI;
import fr.boreal.explanation.api.solver.Solver;
import fr.boreal.explanation.kb_gri.processors.KBSupportGMUSProcessor_KBGRI;
import fr.boreal.explanation.kb_gri.processors.RuleSupportGMUSProcessor_KBGRI;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.logicalElements.api.Atom;
/**
* Computes kb-support explanations for a knowledge base and a ground atomic query
*/
public class RuleSupportExplainer_DynamicKBGRI extends AbstractDynamicGRIBasedExplainer_KBGRI<KnowledgeBase> {
public RuleSupportExplainer_DynamicKBGRI(KnowledgeBase kb, Atom query) {
super(kb, query);
}
@Override
ExplanationProcessor_GRI getGMUSProcessor(Solver solver) {
return new RuleSupportGMUSProcessor_KBGRI(solver);
}
}
package fr.boreal.explanation.kb_gri.explainers.incremental_gri;
import com.google.common.collect.BiMap;
import fr.boreal.explanation.api.encoders.GSATEncodingResult_GRI;
import fr.boreal.explanation.api.explainers.AtomicQueryExplainer;
import fr.boreal.explanation.api.processors.ExplanationProcessor_GRI;
import fr.boreal.explanation.api.solver.Solver;
import fr.boreal.explanation.configuration.DefaultChaseForExplanations;
import fr.boreal.explanation.configuration.StatsUtil;
import fr.boreal.explanation.kb_gri.rule_transformation.GRIRuleTransformer;
import fr.boreal.explanation.kb_gri.rule_transformation.static_gri.RulesetForStaticGRIBuildingAndTracing;
import fr.boreal.explanation.solving_enumerating.marco.MARCOGMUSSolver;
import fr.boreal.explanation.solving_enumerating.sat4j.Sat4JSolver;
import fr.boreal.grd.api.GraphOfFORuleDependencies;
import fr.boreal.grd.impl.GRDImpl;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.kb.api.RuleBase;
import fr.boreal.model.kb.impl.KnowledgeBaseImpl;
import fr.boreal.model.kb.impl.RuleBaseImpl;
import fr.boreal.model.logicalElements.api.Atom;
import fr.boreal.model.logicalElements.api.Predicate;
import fr.boreal.model.logicalElements.impl.AtomImpl;
import fr.boreal.model.query.factory.FOQueryFactory;
import fr.boreal.model.rule.api.FORule;
import fr.boreal.storage.natives.SimpleInMemoryGraphStore;
import fr.boreal.unifier.QueryUnifier;
import fr.boreal.unifier.QueryUnifierAlgorithm;
import fr.boreal.views.FederatedFactBase;
import java.io.File;
import java.util.*;
import static fr.boreal.explanation.configuration.PathFinder.ensureFilePath;
public abstract class AbstractIncrementalGRIBasedExplainer_KBGRI<ExplanationType> implements AtomicQueryExplainer<ExplanationType> {
/**
* Attributes to be defined in the constructor
*/
private final Solver solver;
protected KnowledgeBase inputKB;
private FederatedFactBase federatedFactBase;
private static GraphOfFORuleDependencies grd;
/**
* RuleTransforming
*/
GRIRuleTransformer ruleTransformer = GRIRuleTransformer.instance();
private RulesetForStaticGRIBuildingAndTracing transformedRB;
/**
* Static and dynamic knowledge bases
*/
protected KnowledgeBase staticKB;
protected KnowledgeBase dynamicKB;
private RuleBase dynamicRB = new RuleBaseImpl();
/**
* Data structures for finding ancestor rules
*/
private Set<FORule> seenRules = new HashSet<>();
private Set<FORule> queryAncestorRules = new HashSet<>();
// preprocessing
java.util.function.Predicate<Atom> belongsToInitialFactbase;
boolean queryInInitialFB = false;
// encoding
GSATEncodingResult_GRI encodingResult;
// s4j
Sat4JSolver sat4JSolver = new Sat4JSolver();
List<List<List<Integer>>> sat4jGmuses;
// marco
MARCOGMUSSolver marcoSolver = new MARCOGMUSSolver();
File gcnffile = new File(ensureFilePath("integraal-explanation/gsat.gcnf", "gsat.gcnf"));
// decoding
protected Set explanations;
protected Set<String> gmuses;
/**
* Sets the initial KB
*
* @param kb the current kb
*/
public AbstractIncrementalGRIBasedExplainer_KBGRI(KnowledgeBase kb, Solver solver) {
ruleTransformer.clear();
inputKB = kb;
federatedFactBase = new FederatedFactBase(new SimpleInMemoryGraphStore());
grd = new GRDImpl(kb.getRuleBase());
this.solver = solver;
}
public void dynamicStep(Atom query) {
recordInitialFactbase();
checkQueryInInitialFB(query);
if (!queryInInitialFB) {
if (queryAncestorRules != null) {
clearAncestorRules();
}
// partially computing GRI
computeAncestorRules(query);
if (seenRules.isEmpty()) {
transformRulesForFirstStaticKB();
} else if (!seenRules.containsAll(queryAncestorRules)) {
transformUnseenRules();
}
chaseKBforGRI();
// REL tracing
prepareRelTracing(query);
chaseForRELPropagation();
encodeClauses(query);
if (solver instanceof MARCOGMUSSolver) {
writeMarcoGCNF();
solveGMUSviaMarco();
decodeGMUSesFromMarco();
} else if (solver instanceof Sat4JSolver) {
solveGMUSviaSat4j();
decodeGMUSesFromSat4j();
}
} else {
returnExplanationForQueryInFB(query);
}
}
public void pipelineWithTimer(String datasetName, Collection<Atom> queries) {
String explainerName = this.getClass().getSimpleName() + " + " + this.solver.getClass().getSimpleName();
StatsUtil.timer(this::recordInitialFactbase, explainerName, "recordInitialFactbase", datasetName);
for (Atom query : queries) {
StatsUtil.timer(() -> checkQueryInInitialFB(query), explainerName,
"checkQueryInInitialFB", datasetName);
if (!queryInInitialFB) {
if (queryAncestorRules != null) {
StatsUtil.timer(this::clearAncestorRules, explainerName,
"clearAncestorRules", datasetName);
}
StatsUtil.timer(() -> computeAncestorRules(query), explainerName,
"computeAncestorRules", datasetName);
if (seenRules.isEmpty()) {
StatsUtil.timer(() -> transformRulesForFirstStaticKB(), explainerName,
"Transforming the rules for building GRI for first query", datasetName);
} else if (!seenRules.containsAll(queryAncestorRules)) {
StatsUtil.timer(() -> transformUnseenRules(), explainerName,
"Transforming the rules previously unseen", datasetName);
}
StatsUtil.timer(this::chaseKBforGRI, explainerName,
"chaseKBforGRI", datasetName);
// filtering
StatsUtil.timer(() -> prepareRelTracing(query), explainerName,
"prepareRelTracing", datasetName);
StatsUtil.timer(this::chaseForRELPropagation, explainerName,
"chaseForRELPropagation", datasetName);
StatsUtil.timer(() -> encodeClauses(query), explainerName,
"encodeClauses", datasetName);
if (solver instanceof MARCOGMUSSolver) {
StatsUtil.timer(this::writeMarcoGCNF, explainerName,
"writeMarcoGCNF", datasetName);
StatsUtil.timer(this::solveGMUSviaMarco, explainerName,
"solveGMUSviaMarco", datasetName);
StatsUtil.timer(this::decodeGMUSesFromMarco, explainerName,
"decodeGMUSesFromMarco", datasetName);
} else if (solver instanceof Sat4JSolver) {
StatsUtil.timer(this::solveGMUSviaSat4j, explainerName,
"solveGMUSviaSat4j", datasetName);
StatsUtil.timer(this::decodeGMUSesFromSat4j, explainerName,
"decodeGMUSesFromSat4j", datasetName
);
}
} else {
StatsUtil.timer(() -> returnExplanationForQueryInFB(query), explainerName,
"returnExplanationForQueryInFB", datasetName);
}
}
}
/**
* This methods takes in a query and
* (1) compute queryAncestorRules of the query
* (2) check whether queryAncestorRules is contained the set of seenRules. if yes, skip (3) - (4)
* (3) transform the unseen rules (queryAncestorRules\seenRules) into (i) edgeBuildingRule (ii) relPropagationRule
* (4) add all queryAncestorRules to seenRules
* (5) run the chase on edge the edgeBuildingRule
* (6) add query seed
* (7) rerun the chase
* (8) compute explanations from reledges
* (9) remove all the rel and reledge atoms
*
* @param query
* @return
*/
public Set<ExplanationType> getAllExplanations(Atom query) {
dynamicStep(query);
return explanations;
}
/**
* compute queryAncestorRules (a set of rules (nodes) on the subgraph of the Graph of Rule Dependency
* that contain all ancestor rules of rules that produce the query)
* @param rb rulebase
* @param a atom
* @return
*/
static private Set<FORule> getAncestorRules (RuleBase rb, Atom a) {
Set<FORule> ancestorRules = new LinkedHashSet<>();
for (FORule rule : rb.getRulesByHeadPredicate(a.getPredicate())) {
QueryUnifierAlgorithm QUA = new QueryUnifierAlgorithm();
Collection<QueryUnifier> unifiers = QUA.getMostGeneralSinglePieceUnifiers(
FOQueryFactory.instance().createOrGetQuery(a, List.of()),
rule
);
if (!unifiers.isEmpty()) {
ancestorRules.addAll(grd.getAncestorRules(rule));
}
}
return ancestorRules;
}
public void checkQueryInInitialFB(Atom query) {
if (belongsToInitialFactbase.test(query)) {
queryInInitialFB = true;
}
}
public void returnExplanationForQueryInFB(Atom query) {
explanations = getGMUSProcessor(solver).getQueryIsInTheIntitialFactbaseExplanation(query);
}
public void clearAncestorRules() {
queryAncestorRules.clear();
}
public void computeAncestorRules(Atom query) {
queryAncestorRules.addAll(getAncestorRules(inputKB.getRuleBase(), query));
}
/**
* This operation transforms rules and prepares
* (1) static KB for static chase
* (2) dynamic RB for dynamic chase
*/
public void transformRulesForFirstStaticKB() {
// building static KB (partial GRI)
KnowledgeBase queryAncestorRuleKB = new KnowledgeBaseImpl(
new SimpleInMemoryGraphStore(inputKB.getFactBase().getAtomsInMemory()),
new RuleBaseImpl(queryAncestorRules));
this.transformedRB = ruleTransformer.createStaticGRIBuildingRB(queryAncestorRuleKB);
staticKB = new KnowledgeBaseImpl(inputKB.getFactBase(), transformedRB.getStaticRuleBase());
// preparing rules for dynamic chase
for (FORule rule : queryAncestorRules) {
FORule transformedRelPropRule = ruleTransformer.createRelPropagationRule(rule);
dynamicRB.add(transformedRelPropRule);
}
// recording the rules already seen
seenRules.addAll(queryAncestorRules);
}
public void transformUnseenRules() {
Set<FORule> rulesToAdd = new HashSet<>(queryAncestorRules);
rulesToAdd.removeAll(seenRules);
for (FORule rule : rulesToAdd) {
FORule transformedEdgeBuildingRule = ruleTransformer.createGRIEdgeBuildingRule(rule);
FORule transformedRelPropRule = ruleTransformer.createRelPropagationRule(rule);
RuleBase griRB = staticKB.getRuleBase();
staticKB.getRuleBase().add(transformedEdgeBuildingRule);
dynamicRB.add(transformedRelPropRule);
seenRules.add(rule);
}
}
public void recordInitialFactbase() {
belongsToInitialFactbase = atom -> inputKB.getFactBase().contains(atom);
}
public void chaseKBforGRI() {
DefaultChaseForExplanations.chase(this.staticKB);
}
/**
* Prepares a dynamic KB that can be used for tracing relevant atoms for the input query.
* The dynamic KB uses a federated factbase ; inferences will be put on the local storage, which can be eas
*
* @param query
*/
public void prepareRelTracing(Atom query) {
Atom RELQuery = new AtomImpl(GRIRuleTransformer.REL, GRIRuleTransformer.createFnTermIdentifier(query));
BiMap<FORule, Predicate> ruleToEdgePredicateMap = ruleTransformer.getRuleToEdgePredicateMap();
for (var edge : ruleToEdgePredicateMap.inverse().keySet()) {
federatedFactBase.addStorage(edge, staticKB.getFactBase());
}
federatedFactBase.setDefaultStorage(new SimpleInMemoryGraphStore());
federatedFactBase.add(RELQuery);
if (federatedFactBase.size() != 1) {
throw new IllegalStateException("Problem with clearing federated fact base.");
}
dynamicKB = new KnowledgeBaseImpl(federatedFactBase, dynamicRB);
}
public void chaseForRELPropagation() {
DefaultChaseForExplanations.chase(dynamicKB);
}
public void encodeClauses(Atom query) {
encodingResult = getGMUSProcessor(solver).getEncoder().encode(dynamicKB, query, belongsToInitialFactbase);
}
public void writeMarcoGCNF() {
marcoSolver.writeGCNF(encodingResult.propVarIDMap(), encodingResult.clauses(), encodingResult.nbGroup());
}
public void solveGMUSviaMarco() {
gmuses = marcoSolver.getGMUSes(gcnffile);
}
public void decodeGMUSesFromMarco() {
explanations = getGMUSProcessor(solver).translateMARCOGMUS(gmuses, encodingResult.factIDMap(), encodingResult.ruleIDMap());
}
public void solveGMUSviaSat4j() {
sat4jGmuses = sat4JSolver.enumerateGMUSes(encodingResult.clauses());
}
public void decodeGMUSesFromSat4j() {
explanations = getGMUSProcessor(solver).translateSAT4JGMUS(sat4jGmuses, encodingResult.factIDMap(), encodingResult.ruleIDMap());
}
abstract ExplanationProcessor_GRI getGMUSProcessor(Solver solver);
}
package fr.boreal.explanation.kb_gri.explainers.incremental_gri;
import fr.boreal.explanation.api.processors.ExplanationProcessor_GRI;
import fr.boreal.explanation.api.solver.Solver;
import fr.boreal.explanation.kb_gri.processors.FactSupportGMUSProcessor_KBGRI;
import fr.boreal.explanation.solving_enumerating.marco.MARCOGMUSSolver;
import fr.boreal.model.kb.api.KnowledgeBase;
/**
* Computes kb-support explanations for a knowledge base and a ground atomic query
*/
public class FactSupportExplainer_IncrementalKBGRI extends AbstractIncrementalGRIBasedExplainer_KBGRI<KnowledgeBase> {
public FactSupportExplainer_IncrementalKBGRI(KnowledgeBase kb) {
super(kb, new MARCOGMUSSolver());
}
public FactSupportExplainer_IncrementalKBGRI(KnowledgeBase kb, Solver solver) {
super(kb, solver);
}
@Override
ExplanationProcessor_GRI getGMUSProcessor(Solver solver) {
return new FactSupportGMUSProcessor_KBGRI(solver);
}
}