Mentions légales du service

Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • 151-implement-addall-stream-atom-for-factbases
  • 158-refactor-component-builder
  • 163-fixing-evaluate-and-saturate-exception-with-provided-query-or-fact-2
  • 166-doter-les-unionfoquery-des-methodes-add-and-remove
  • 187-add-explanation-module
  • 190-deprecate-storedfunctionalterm-backup
  • 197-handle-errors-for-api
  • 199-add-fallback-for-mus-enumeration-in-explanations
  • 200-going-further-towards-the-generalized-use-of-keywords
  • 203-adding-comments-and-correcting-atomtype
  • 205-integrating-iri-naming-in-integraal
  • 208-integraal-cli-bug-fixing-qol
  • NCLI
  • NNCLI
  • WIP
  • acharoen/WIP
  • detached
  • detached2
  • develop
  • develop-test-kbdm
  • explanations_hypergraph
  • features-explanations
  • features/integraal-corese-storage
  • features/integraal-tatooine
  • filtered_hypergraph
  • hybrid-reasoning
  • mapping-examples
  • mapping-examples-new-containers
  • refactoring_explanation
  • test-gui
  • test-merge-187
  • tmp_explanation
  • Corese-storages
  • Tatooine
  • cycle-1
  • cycle-2
  • cycle-3
  • cycle-4
  • cycle-5
  • cycle-6
  • cycle-7
41 results

Target

Select target project
  • rules/integraal
  • ulliana/integraal
2 results
Select Git revision
  • 151-implement-addall-stream-atom-for-factbases
  • 158-refactor-component-builder
  • 163-fixing-evaluate-and-saturate-exception-with-provided-query-or-fact-2
  • 166-doter-les-unionfoquery-des-methodes-add-and-remove
  • 187-add-explanation-module
  • 190-deprecate-storedfunctionalterm-backup
  • 197-handle-errors-for-api
  • 199-add-fallback-for-mus-enumeration-in-explanations
  • 200-going-further-towards-the-generalized-use-of-keywords
  • 203-adding-comments-and-correcting-atomtype
  • 205-integrating-iri-naming-in-integraal
  • 208-integraal-cli-bug-fixing-qol
  • NCLI
  • NNCLI
  • WIP
  • acharoen/WIP
  • detached
  • detached2
  • develop
  • develop-test-kbdm
  • explanations_hypergraph
  • features-explanations
  • features/integraal-corese-storage
  • features/integraal-tatooine
  • filtered_hypergraph
  • hybrid-reasoning
  • mapping-examples
  • mapping-examples-new-containers
  • refactoring_explanation
  • test-gui
  • test-merge-187
  • tmp_explanation
  • Corese-storages
  • Tatooine
  • cycle-1
  • cycle-2
  • cycle-3
  • cycle-4
  • cycle-5
  • cycle-6
  • cycle-7
41 results
Show changes
Commits on Source (1)
Showing
with 664 additions and 92 deletions
......@@ -273,6 +273,7 @@ public enum InteGraalKeywords {
* Query Explanation
*/
QUERY_EXPLANATION,
BASELINE_EXPLANATION,
/**
* CSV Rules Encoding
*/
......
......@@ -12,10 +12,16 @@ import fr.boreal.model.logicalElements.api.Term;
import fr.boreal.model.rule.api.FORule;
import fr.boreal.storage.natives.SimpleInMemoryGraphStore;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.Collections;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
public class QuerySelector {
......@@ -97,9 +103,9 @@ public class QuerySelector {
.created_facts_as_factbase()
.getAtoms()
.filter(a -> a.getPredicate().label().startsWith("query"))
.filter(a -> !a.getPredicate().label().startsWith("constant"))
.filter(a -> !a.getPredicate().label().startsWith("owl:Thing"))
.filter(a -> Arrays.stream(a.getTerms()).noneMatch(Term::isVariable))
.filter(a -> Arrays.stream(a.getTerms()).noneMatch(t -> t.toString().startsWith("constant")))
.collect(Collectors.toList());
if (!factsProduced.isEmpty()) {
......@@ -107,19 +113,8 @@ public class QuerySelector {
}
}
// List<List<Atom>> allQueries = new ArrayList<>();
// for (int i = 0; i < foundAxiomEntailments.size(); i++) {
// List<Atom> l = new ArrayList<>();
// for (Atom a : foundAxiomEntailments.get(i)) {
// if (a.getPredicate().label().startsWith("query")) {
// l.add(a);
// }
// }
// if (!l.isEmpty()) {
// allQueries.add(l);
// }
// }
writePredicateMatrixCsv(foundAxiomEntailments, Paths.get("predicates.csv"));
writeDlgpFiles(foundAxiomEntailments, Paths.get("dlgps"));
for (List<Atom> l : foundAxiomEntailments) {
System.out.println(l.size());
}
......@@ -141,6 +136,125 @@ public class QuerySelector {
return queries;
}
public static List<Atom> getBalancedStrike(List<List<Atom>> listOfLists, int nbQueries) {
List<Atom> flatList = new ArrayList<>();
for (List<Atom> sublist : listOfLists) {
flatList.addAll(sublist);
}
if (flatList.isEmpty()) {
System.out.println("No atoms available.");
return Collections.emptyList();
}
// Group atoms by predicate
Map<String, List<Atom>> atomsByPredicate = new HashMap<>();
for (Atom atom : flatList) {
String predicate = atom.getPredicate().label();
atomsByPredicate.computeIfAbsent(predicate, k -> new ArrayList<>()).add(atom);
}
int predicateCount = atomsByPredicate.size();
int baseCount = nbQueries / predicateCount;
int leftover = nbQueries % predicateCount;
Set<Atom> resultSet = new LinkedHashSet<>();
Random random = new Random();
// Step 1: Select baseCount atoms per predicate
for (Map.Entry<String, List<Atom>> entry : atomsByPredicate.entrySet()) {
List<Atom> atoms = entry.getValue();
Collections.shuffle(atoms, random);
int limit = Math.min(baseCount, atoms.size());
resultSet.addAll(atoms.subList(0, limit));
}
// Step 2: Distribute leftovers to predicates that have room
List<Atom> leftovers = new ArrayList<>();
for (Map.Entry<String, List<Atom>> entry : atomsByPredicate.entrySet()) {
List<Atom> atoms = entry.getValue();
for (Atom atom : atoms) {
if (!resultSet.contains(atom)) {
leftovers.add(atom);
}
}
}
Collections.shuffle(leftovers, random);
Iterator<Atom> iter = leftovers.iterator();
while (resultSet.size() < nbQueries && iter.hasNext()) {
resultSet.add(iter.next());
}
// Count atoms per predicate
Map<String, Integer> predicateCounts = new TreeMap<>();
for (Atom atom : resultSet) {
String predicate = atom.getPredicate().label();
predicateCounts.put(predicate, predicateCounts.getOrDefault(predicate, 0) + 1);
}
System.out.println("Selected " + nbQueries + " atoms with " + predicateCounts.size() + " distinct predicates:");
for (Map.Entry<String, Integer> entry : predicateCounts.entrySet()) {
System.out.println(" " + entry.getKey() + ": " + entry.getValue());
}
return new ArrayList<>(resultSet);
}
public static List<Atom> getSimpleStrikes(List<List<Atom>> listOfLists, int nbQueries) {
List<Atom> flatList = new ArrayList<>();
for (List<Atom> sublist : listOfLists) {
flatList.addAll(sublist);
}
if (flatList.isEmpty()) {
return Collections.emptyList();
}
// Separate atoms by predicate
List<Atom> q01Atoms = new ArrayList<>();
List<Atom> q03Atoms = new ArrayList<>();
List<Atom> otherAtoms = new ArrayList<>();
for (Atom atom : flatList) {
String predicate = atom.getPredicate().label(); // Adjust as needed
if (predicate.contains("query01")) {
q01Atoms.add(atom);
} else if (predicate.contains("query03")) {
q03Atoms.add(atom);
} else {
otherAtoms.add(atom);
}
}
Set<Atom> resultSet = new LinkedHashSet<>();
resultSet.addAll(q01Atoms);
resultSet.addAll(q03Atoms);
int remaining = nbQueries - resultSet.size();
if (remaining <= 0) {
// Already have 100 or more atoms, just return the first 100
return new ArrayList<>(resultSet).subList(0, nbQueries);
}
List<Atom> workingPool = new ArrayList<>(otherAtoms);
Random random = new Random();
while (resultSet.size() < nbQueries) {
Collections.shuffle(workingPool, random);
for (Atom atom : workingPool) {
resultSet.add(atom);
if (resultSet.size() == nbQueries) {
break;
}
}
}
return new ArrayList<>(resultSet);
}
public static List<Atom> sampleQueries(List<List<Atom>> lists, int nbQueries) {
if (lists == null || lists.isEmpty()) {
throw new IllegalArgumentException("The outer list must not be null or empty.");
......@@ -195,4 +309,106 @@ public class QuerySelector {
return picked;
}
public static void writePredicateMatrixCsv(List<List<Atom>> levels,
Path csvTarget) throws IOException {
Map<String,int[]> matrix = buildCountMatrix(levels); // ← compte, pas présence
int levelCount = levels.size();
try (BufferedWriter w = Files.newBufferedWriter(csvTarget)) {
// Header
w.write("Predicate");
for (int i = 0; i < levelCount; i++) w.write(",L" + i);
w.write(",Total"); // ← colonne de synthèse (optionnel)
w.newLine();
for (Map.Entry<String,int[]> e : matrix.entrySet()) {
w.write(e.getKey());
int sum = 0;
for (int c : e.getValue()) {
w.write(',' + Integer.toString(c)); // « 0 » si absent
sum += c;
}
w.write(',' + Integer.toString(sum)); // ← total sur toutes les
w.newLine(); // profondeurs
}
}
}
private static Map<String,int[]> buildCountMatrix(List<List<Atom>> levels) {
int levelCount = levels.size();
Map<String,int[]> matrix = new TreeMap<>();
Pattern name = Pattern.compile("^\\s*([^(\\s]+)\\s*\\(");
for (int lvl = 0; lvl < levelCount; lvl++) {
for (Atom a : levels.get(lvl)) {
String fact = a.toString();
Matcher m = name.matcher(fact);
if (m.find()) {
String p = m.group(1);
matrix.computeIfAbsent(p, k -> new int[levelCount])[lvl]++; // ← ++
}
}
}
return matrix;
}
/**
* Creates one DLGP file per level.
*
* @param levels List of levels, each level is a List of fact strings
* @param outputFolder Directory to receive the files (must exist or be creatable)
* @throws IOException if writing fails
*/
public static void writeDlgpFiles(List<List<Atom>> levels,
Path outputFolder) throws IOException {
if (!Files.exists(outputFolder))
Files.createDirectories(outputFolder);
for (int i = 0; i < levels.size(); i++) {
Path file = outputFolder.resolve("level_" + i + ".dlgp");
try (BufferedWriter w = Files.newBufferedWriter(file)) {
for (Atom a : levels.get(i)) {
String fact = a.toString();
w.write("@facts");
w.write(System.lineSeparator());
w.write(fact.trim());
if (!fact.trim().endsWith(".")) w.write(".");
w.write(System.lineSeparator());
}
}
}
}
// ──────────────────────────────────────────────────────────────────────────
// Helpers
// ──────────────────────────────────────────────────────────────────────────
private static final Pattern PREDICATE_NAME =
Pattern.compile("^\\s*([^(\\s]+)\\s*\\(");
/** Builds a predicate-to-level presence matrix */
private static Map<String, boolean[]> buildPresenceMatrix(List<List<Atom>> levels) {
int levelCount = levels.size();
Map<String, boolean[]> matrix = new TreeMap<>(); // alpha order for readability
for (int lvl = 0; lvl < levelCount; lvl++) {
for (Atom a : levels.get(lvl)) {
String fact = a.toString();
Matcher m = PREDICATE_NAME.matcher(fact);
if (m.find()) {
String pred = m.group(1);
matrix.computeIfAbsent(pred, k -> new boolean[levelCount])[lvl] = true;
} else {
// silently ignore malformed lines; could also throw
}
}
}
return matrix;
}
}
......@@ -283,6 +283,7 @@ public abstract class AbstractStaticGRIBasedExplainer_KBGRI<ExplanationType> imp
StatsUtil.timerWithOrderedQuery(this::decodeGMUSesFromSat4j, queryNumber, explainerName, "Decoding GMUSes from sat4j", datasetName);
}
StatsUtil.recordSize((long) encodingResult.clauses().size(), queryNumber, explainerName, "# Clauses", datasetName);
StatsUtil.recordSize((long) explanations.size(), queryNumber, explainerName, "Explanations", datasetName);
} else if (queryInInitialFB) {
......
......@@ -9,25 +9,28 @@ import fr.boreal.explanation.configuration.DefaultChaseForExplanations;
import fr.boreal.explanation.configuration.StatsUtil;
import fr.boreal.explanation.kb_gri.rule_transformation.GRIRuleTransformer;
import fr.boreal.explanation.kb_gri.rule_transformation.static_gri.RulesetForStaticGRIBuildingAndTracing;
import fr.boreal.explanation.solving_enumerating.hybrid.HybridSAT4JMARCOSolver;
import fr.boreal.explanation.solving_enumerating.marco.MARCOGMUSSolver;
import fr.boreal.explanation.solving_enumerating.sat4j.Sat4JSolver;
import fr.boreal.grd.api.GraphOfFORuleDependencies;
import fr.boreal.grd.impl.GRDImpl;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.kb.api.RuleBase;
import fr.boreal.model.kb.impl.KnowledgeBaseImpl;
import fr.boreal.model.kb.impl.RuleBaseImpl;
import fr.boreal.model.logicalElements.api.Atom;
import fr.boreal.model.logicalElements.api.Predicate;
import fr.boreal.model.logicalElements.impl.AtomImpl;
import fr.boreal.model.query.factory.FOQueryFactory;
import fr.boreal.model.rule.api.FORule;
import fr.boreal.storage.natives.SimpleInMemoryGraphStore;
import fr.boreal.unifier.QueryUnifier;
import fr.boreal.unifier.QueryUnifierAlgorithm;
import fr.boreal.views.FederatedFactBase;
import org.apache.commons.lang3.NotImplementedException;
import java.io.File;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import static fr.boreal.explanation.configuration.PathFinder.ensureFilePath;
import java.io.IOException;
import java.util.*;
public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType> implements AtomicQueryExplainer<ExplanationType> {
......@@ -39,6 +42,13 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
protected KnowledgeBase inputKB;
private FederatedFactBase federatedFactBase;
/**
* Ancestor Rules
*/
private static GraphOfFORuleDependencies grd;
private Set<FORule> queryAncestorRules = new HashSet<>();
/**
* RuleTransforming
*/
......@@ -49,10 +59,16 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
/**
* Static and dynamic knowledge bases
*/
protected KnowledgeBase staticKB;
protected KnowledgeBase dynamicKB;
/**
* Rule History
*/
private List<RuleBase> staticRuleHistory = new ArrayList<>();
private List<RuleBase> dynamicRuleHistory = new ArrayList<>();
/**
* Intermediate objects from the pipeline
*/
......@@ -62,8 +78,10 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
// preprocessing
java.util.function.Predicate<Atom> belongsToInitialFactbase;
boolean queryEntailed = false;
boolean queryInInitialFB = false;
protected boolean queryEntailed = false;
protected boolean queryInInitialFB = false;
// memory of entailed queries
Set<Atom> entailedQueryCache = new HashSet<>();
// encoding
GSATEncodingResult_GRI encodingResult;
......@@ -74,10 +92,13 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
// marco
MARCOGMUSSolver marcoSolver = new MARCOGMUSSolver();
File gcnffile = new File(ensureFilePath("integraal-explanation/gsat.gcnf", "gsat.gcnf"));
String tmpFileName = "gsat.gcnf";
File gcnffile = new File(tmpFileName);
// decoding
protected Set explanations;
protected Set<ExplanationType> explanations;
protected Set<String> gmuses;
......@@ -108,14 +129,13 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
* Dynamic step
*
* @param query
*
*/
public void dynamicStep(Atom query) {
if (this.explanations != null) {
dynamicStepClear();
}
checkQueryEntailment(query);
checkQueryInInitialFB(query);
checkQueryEntailment(query);
if (queryEntailed & !queryInInitialFB) {
encodeClauses(query);
if (solver instanceof MARCOGMUSSolver) {
......@@ -125,6 +145,8 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
} else if (solver instanceof Sat4JSolver) {
solveGMUSviaSat4j();
decodeGMUSesFromSat4j();
} else if (solver instanceof HybridSAT4JMARCOSolver) {
solveViaHybridSolver();
}
} else if (queryInInitialFB) {
returnExplanationForQueryInFB(query);
......@@ -133,14 +155,10 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
}
}
private void dynamicStepClear() {
explanations.clear();
if (gmuses != null) {
gmuses.clear();
}
if (sat4jGmuses != null) {
sat4jGmuses.clear();
}
public void dynamicStepClear() {
explanations = new HashSet<>();
gmuses = new HashSet<>();
sat4jGmuses = new ArrayList<>();
}
/*
......@@ -152,9 +170,8 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
StatsUtil.timerSumAllQueries(this::transformRules, explainerName, "Transforming rules", datasetName);
StatsUtil.timerSumAllQueries(this::prepareKBforGRI, explainerName, "Preparing KB for GRI", datasetName);
StatsUtil.timerSumAllQueries(this::chaseKBforGRI, explainerName, "Chasing on staticKB for GRI", datasetName);
StatsUtil.timerChasePerEachQuery(this::chaseKBforGRI, staticKB, explainerName, "Chasing on staticKB for GRI", datasetName);
StatsUtil.timerSumAllQueries(this::recordInitialFactbase, explainerName, "Recording initial factbase", datasetName);
StatsUtil.timerSumAllQueries(this::prepareEncoding, explainerName, "Preparing Encoding", datasetName);
for (Atom query : queries) {
if (this.explanations != null) {
......@@ -185,7 +202,7 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
}
public void pipelineIn2StepsWithTimer (String datasetName, Collection<Atom> queries) {
public void pipelineIn2StepsWithTimer(String datasetName, Collection<Atom> queries) {
String explainerName = this.getClass().getSimpleName() + " + " + this.solver.getClass().getSimpleName();
StatsUtil.timerPerEachQuery(this::staticStep, explainerName, "Static Step", datasetName);
......@@ -198,9 +215,80 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
@Override
public void pipelineWithOrderedQuery(String datasetName, Collection<Atom> queries) {
throw new NotImplementedException();
String explainerName = this.getClass().getSimpleName() + " + " + this.solver.getClass().getSimpleName();
StatsUtil.timerWithOrderedQuery(this::transformRules, "Pre-first-query", explainerName, "Transforming rules", datasetName);
StatsUtil.timerWithOrderedQuery(this::prepareKBforGRI, "Pre-first-query", explainerName, "Preparing KB for GRI", datasetName);
StatsUtil.timerWithOrderedQuery(this::chaseKBforGRI, "Pre-first-query", explainerName, "Chasing on staticKB for GRI", datasetName);
StatsUtil.timerWithOrderedQuery(this::recordInitialFactbase, "Pre-first-query", explainerName, "Recording initial factbase", datasetName);
StatsUtil.timerWithOrderedQuery(this::buildingGRD, "Pre-first-query", explainerName, "Recording initial factbase", datasetName);
int queryIndex = 0;
for (Atom query : queries) {
queryIndex++;
String queryNumber = "Q" + queryIndex;
// String queryNumber = query.toString();
tmpFileName = datasetName + queryIndex + "_static_gsat.gcnf";
String prefix = datasetName + queryIndex + "_static_gsat_";
String suffix = ".gcnf";
// gcnffile = new File(tmpFileName);
File gcnffile = null;
try {
gcnffile = File.createTempFile(prefix, suffix);
} catch (IOException e) {
throw new RuntimeException(e);
}
gcnffile.deleteOnExit();
StatsUtil.recordSize((long) staticKB.getFactBase().size(), queryNumber, explainerName, "GRI size", datasetName);
if (this.explanations != null) {
StatsUtil.timerWithOrderedQuery(this::dynamicStepClear, queryNumber, explainerName,
"Clearing gmuses and explanations for the previous query",
datasetName);
}
StatsUtil.timerWithOrderedQuery(() -> checkQueryEntailment(query), queryNumber, explainerName, "Checking if query is entailed", datasetName);
StatsUtil.timerWithOrderedQuery(() -> checkQueryInInitialFB(query), queryNumber, explainerName, "Checking if query is already in input FB", datasetName);
if (queryEntailed && !queryInInitialFB) {
StatsUtil.timerWithOrderedQuery(this::clearAncestorRules, queryNumber, explainerName, "Clearing Ancestor Rules", datasetName);
StatsUtil.timerWithOrderedQuery(() -> computeAncestorRules(query), queryNumber, explainerName, "Computing Ancestor Rules for each query", datasetName);
staticRuleHistory.add(staticKB.getRuleBase());
StatsUtil.timerWithOrderedQuery(() -> prepareEncoding(), queryNumber, explainerName, "Prepare Encoding", datasetName);
StatsUtil.timerWithOrderedQuery(() -> encodeClauses(query), queryNumber, explainerName, "Encoding clauses", datasetName);
if (solver instanceof MARCOGMUSSolver) {
StatsUtil.timerWithOrderedQuery(this::writeMarcoGCNF, queryNumber, explainerName, "Writing GCNF", datasetName);
StatsUtil.timerMarcoOrderedQueries(this::solveGMUSviaMarco, queryNumber, explainerName, "Computing GMUSes via Marco", datasetName);
StatsUtil.timerWithOrderedQuery(this::decodeGMUSesFromMarco, queryNumber, explainerName, "Decoding GMUSes from Marco", datasetName);
} else if (solver instanceof Sat4JSolver) {
StatsUtil.timerWithOrderedQuery(this::solveGMUSviaSat4j, queryNumber, explainerName, "Computing GMUSes with sat4j", datasetName);
StatsUtil.timerWithOrderedQuery(this::decodeGMUSesFromSat4j, queryNumber, explainerName, "Decoding GMUSes from sat4j", datasetName);
}
StatsUtil.recordSize((long) encodingResult.clauses().size(), queryNumber, explainerName, "# Clauses", datasetName);
StatsUtil.recordSize((long) explanations.size(), queryNumber, explainerName, "Explanations", datasetName);
} else if (queryInInitialFB) {
StatsUtil.timerWithOrderedQuery(() -> returnExplanationForQueryInFB(query), queryNumber, explainerName, "Returning the explanation for query already in FB", datasetName);
} else {
returnEmptySetForExplanation();
}
// System.out.println("Static GRI Query Ancestor Rules: " + queryAncestorRules);
// System.out.println("query: " + query);
}
// System.out.println("Static GRI REL-tracing RB: " + dynamicRuleHistory);
}
/**
* Saturates the GRI with Tracing rules to compute relevant bodyAtoms and triggers for the query.
* Then calls the solver.
......@@ -217,6 +305,38 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
return explanations;
}
/**
* compute queryAncestorRules (a set of rules (nodes) on the subgraph of the Graph of Rule Dependency
* that contain all ancestor rules of rules that produce the query)
*
* @param rb rulebase
* @param a atom
* @return
*/
static private Set<FORule> getAncestorRules(RuleBase rb, Atom a) {
Set<FORule> ancestorRules = new LinkedHashSet<>();
for (FORule rule : rb.getRulesByHeadPredicate(a.getPredicate())) {
QueryUnifierAlgorithm QUA = new QueryUnifierAlgorithm();
Collection<QueryUnifier> unifiers = QUA.getMostGeneralSinglePieceUnifiers(
FOQueryFactory.instance().createOrGetQuery(a, List.of()),
rule
);
if (!unifiers.isEmpty()) {
ancestorRules.addAll(grd.getAncestorRules(rule,ancestorRules));
}
}
return ancestorRules;
}
public void buildingGRD() {
grd = new GRDImpl(inputKB.getRuleBase(), List.of());
}
public void transformRules() {
this.transformedRB = ruleTransformer.createStaticGRIBuildingRB(inputKB);
}
......@@ -237,11 +357,12 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
Atom queryWithFuncId = GRIRuleTransformer.instance().createAtomWithStoredFnTermIdentifier(query);
if (staticKB.getFactBase().contains(queryWithFuncId)) {
queryEntailed = true;
this.entailedQueryCache.add(query);
}
}
public void returnEmptySetForExplanation() {
explanations = Set.of();
explanations = new HashSet();
}
public void checkQueryInInitialFB(Atom query) {
......@@ -254,11 +375,14 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
explanations = getGMUSProcessor(solver).getQueryIsInTheIntitialFactbaseExplanation(query);
}
public void clearAncestorRules() {
queryAncestorRules.clear();
}
public void computeAncestorRules(Atom query) {
queryAncestorRules.addAll(getAncestorRules(inputKB.getRuleBase(), query));
}
/**
* Prepares a dynamic KB that can be used for tracing relevant atoms for the input query.
* The dynamic KB uses a federated factbase ; inferences will be put on the local storage, which can be eas
*/
public void prepareEncoding() {
SimpleInMemoryGraphStore encodingFB = new SimpleInMemoryGraphStore();
......@@ -286,19 +410,19 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
dynamicKB = new KnowledgeBaseImpl(encodingFB, transformedRB.getDynamicRuleBase());
}
public void encodeClauses(Atom query) {
encodingResult = getGMUSProcessor(solver).getEncoder().encode(dynamicKB, query, belongsToInitialFactbase);
}
public void writeMarcoGCNF() {
marcoSolver.writeGCNF(encodingResult.propVarIDMap(), encodingResult.clauses(), encodingResult.nbGroup());
marcoSolver.writeGCNF(encodingResult.propVarIDMap(), encodingResult.clauses(), encodingResult.nbGroup(), gcnffile);
}
public void solveGMUSviaMarco() {
gmuses = marcoSolver.getGMUSes(gcnffile);
}
public void decodeGMUSesFromMarco() {
explanations = getGMUSProcessor(solver).translateMARCOGMUS(gmuses, encodingResult.factIDMap(), encodingResult.ruleIDMap());
}
......@@ -311,15 +435,67 @@ public abstract class AbstractStaticGRIBasedExplainer_Baseline<ExplanationType>
explanations = getGMUSProcessor(solver).translateSAT4JGMUS(sat4jGmuses, encodingResult.factIDMap(), encodingResult.ruleIDMap());
}
public void solveViaHybridSolver() {
var result = ((HybridSAT4JMARCOSolver) solver).solve(encodingResult);
switch (result.solver()) {
case MARCOGMUSSolver ignored -> {
explanations = getGMUSProcessor(solver).translateMARCOGMUS((Set<String>) result.solverResult(), encodingResult.factIDMap(), encodingResult.ruleIDMap());
}
case Sat4JSolver ignored ->
explanations = getGMUSProcessor(solver).translateSAT4JGMUS((List<List<List<Integer>>>) result.solverResult(), encodingResult.factIDMap(), encodingResult.ruleIDMap());
default -> throw new IllegalStateException("Unsupported solver");
}
}
abstract ExplanationProcessor_GRI getGMUSProcessor(Solver solver);
public boolean isQueryEntailed(Atom query){
throw new NotImplementedException();
public boolean isQueryEntailed(Atom query) {
if (this.entailedQueryCache.contains(query)) {
return true;
}
Atom queryWithFuncId = GRIRuleTransformer.instance().createAtomWithStoredFnTermIdentifier(query);
if (staticKB.getFactBase().contains(queryWithFuncId)) {
entailedQueryCache.add(query);
return true;
}
return false;
}
public boolean isQueryInInitialFB(Atom query) {
return this.belongsToInitialFactbase.test(query);
}
public Set<ExplanationType> getLastComputedExplanations() {
return explanations;
}
public boolean isQueryInInitialFB(Atom query){
throw new NotImplementedException();
public GSATEncodingResult_GRI getEncoding() {
return encodingResult;
}
public String getPrintableStatistics() {
StringBuilder result = new StringBuilder();
result.append("\n\nStatistics for : " + this.getClass().getSimpleName() + "\n");
result.append("GRInstance size " + this.staticKB.getFactBase().size() + "\n");
result.append("number of clauses " + this.encodingResult.clauses().size() + "\n");
result.append("number of explanations " + this.explanations.size() + "\n");
result.append("\n");
return result.toString();
}
public String getPrintableStatisticsXML() {
StringBuilder result = new StringBuilder();
result.append("<statistics>\n");
result.append(" <component>").append(this.getClass().getSimpleName()).append("</component>\n");
result.append(" <grInstanceSize>").append(this.staticKB.getFactBase().size()).append("</grInstanceSize>\n");
result.append(" <numberOfClauses>").append(this.encodingResult.clauses().size()).append("</numberOfClauses>\n");
result.append(" <numberOfExplanations>").append(this.explanations.size()).append("</numberOfExplanations>\n");
result.append("</statistics>");
return result.toString();
}
}
......@@ -6,6 +6,7 @@ import fr.boreal.explanation.configuration.StatsUtil;
import fr.boreal.explanation.configuration.query_selection.QuerySelector;
import fr.boreal.explanation.kb_gri.explainers.incremental_gri.KBSupportExplainer_IncrementalKBGRI;
import fr.boreal.explanation.kb_gri.explainers.static_gri.KBSupportExplainer_KBGRI;
import fr.boreal.explanation.kb_gri.explainers.static_gri_baseline.KBSupportExplainer_Baseline;
import fr.boreal.model.kb.api.KnowledgeBase;
import fr.boreal.model.logicalElements.api.Atom;
import fr.boreal.io.dlgp.DlgpWriter;
......@@ -30,45 +31,49 @@ public class compareExplainers {
// static String benchPath = "integraal-explanation/src/test/resources/mowl-bench-main/";
public static List<String> datasets = List.of(
"warmup",
// "218a127c-8d13-40af-9957-63c28b0a091e_elontology",
// "3cb9dec1-627b-496f-b49a-543dd1c77837_snoopy",
// "b80a039c-8975-4928-b45f-017126159882_sification",
// "98523b14-b04e-4af0-9597-15a297e226eb_taEncoding",
// "d09d7487-43dd-452a-976a-b69bc44d9b06_onOntology",
// "08cb4085-d900-4f82-8fa8-f523e382008d_ESG_test_1",
// "04c0af47-ff23-4761-8a4e-8fec2389f89f_ESG_test_2",
// "0b1f5620-f71d-4a01-8a16-d1818f69c187_ncbi_rank",
// "7b46d780-2032-46e3-ac89-90ab183e22aa_properties",
// "bb6535f4-d6a3-4197-a678-7f464e737862_properties",
// "4c3c8c62-d8f4-42c0-a96c-c994ece58345_catalogue",
// "734871d0-bf0d-47b3-bc19-b484567f5f4f_rmOntology",
// "93c3774c-58c0-4fa9-a592-539c130ee2d7_AlgorithmTypes",
// "f8a8e732-205b-4703-b568-2ce69e524055_instances",
// "f32019cc-43f3-49ce-b4f2-235f7db97132_oil",
// "8ee7a7f5-ad8c-4ac5-afbe-207e3ff285c8_HARMONISE",
// "ddd6d023-65dd-429a-a01f-2784c1d61f65_WNAffect",
// "2d09369e-392a-4ef2-a7c9-28334cbdc6f8_ddex",
// "dce07111-010a-450b-8c3a-2d55a8b48a21_LSCOM",
// "3b327e7f-0409-4f52-8e69-7062a62f7ba1_undin_area"
// "c669669d-7a04-410c-9474-1f6e46230556_BioLinks",
// "1dd81ef3-1e3e-4b2f-9bd0-230123497968_atOntology",
// "80bf8685-0e15-4780-8e61-7130e4e777c6_00Ontology",
// "ab726941-765f-4e5e-b6c9-2d51b652e841_00Ontology"
"lubm001"
// "warmup",
// "lubm001"
// "lubm010"
// "andersen_1000"
"andersen_1000"
// "andersen_50
// 00"
// "andersen_50000"
// "andersen_10000"
// "andersen_100000"
);
// "dataset_1",
// "dataset_2",
// "dataset_3",
// "dataset_4",
// "dataset_5",
// "dataset_6",
// "dataset_7",
// "dataset_8",
// "dataset_9",
// "dataset_10",
// "dataset_11",
// "dataset_12",
// "dataset_13",
// "dataset_14",
// "dataset_15",
// "dataset_16",
// "dataset_17",
// "dataset_18",
// "dataset_19",
// "dataset_20",
// "dataset_21",
// "dataset_22",
// "dataset_23",
// "dataset_24",
// "dataset_25",
// "dataset_26",
// "dataset_27"
);
enum AtomicQueryExplainerType {
// STATIC_GRI_FB_MARCO,
// STATIC_GRI_RB_MARCO,
// STATIC_GRI_KB_MARCO
// STATIC_GRI_KB_MARCO,
// STATIC_GRI_KB_S4J,
// STATIC_GRI_FB_S4J,
// STATIC_GRI_RB_S4J,
......@@ -78,8 +83,8 @@ public class compareExplainers {
// TRACKER_GRI_FB_S4J,
// TRACKER_GRI_KB_S4J,
// TRACKER_GRI_RB_S4J,
INCREMENTAL_GRI_KB_MARCO
// BASELINE_GRI_KB_MARCO
// INCREMENTAL_GRI_KB_MARCO
BASELINE_GRI_KB_MARCO
// TRACKER_GRI_UNAMBIGUOUS_PROOF_TREE_WHY_PROVENANCE
}
......@@ -99,15 +104,15 @@ public class compareExplainers {
// case TRACKER_GRI_FB_S4J -> new FactSupportExplainer_TrackerGRI(kb, new Sat4JSolver());
// case TRACKER_GRI_KB_S4J -> new KBSupportExplainer_TrackerGRI(kb, new Sat4JSolver());
// case TRACKER_GRI_RB_S4J -> new RuleSupportExplainer_TrackerGRI(kb, new Sat4JSolver());
case INCREMENTAL_GRI_KB_MARCO -> new KBSupportExplainer_IncrementalKBGRI(kb);
// case BASELINE_GRI_KB_MARCO -> new KBSupportExplainer_Baseline(kb);
// case INCREMENTAL_GRI_KB_MARCO -> new KBSupportExplainer_IncrementalKBGRI(kb);
case BASELINE_GRI_KB_MARCO -> new KBSupportExplainer_Baseline(kb);
// case TRACKER_GRI_UNAMBIGUOUS_PROOF_TREE_WHY_PROVENANCE -> new UnambiguousProofTreeWhyProvenanceExplainer_TrackerGRI(kb);
};
}
public static void main(String[] args) throws IOException, InterruptedException {
String strike = "10_4_queries";
String strike = "1";
if (args.length > 0) {
strike = args[0];
}
......@@ -167,8 +172,7 @@ public class compareExplainers {
static Collection<Atom> loadQueryLubm(KnowledgeBase inputKB, String datasetName, Boolean record, int nbQueries, String strike) throws IOException {
Collection<Atom> queries = new ArrayList<>();
// String strPath = benchPath + datasetName + "/"+datasetName + "_" + nbQueries + "_queries_strike_" + strike + ".dlgp";
// /home/akira/IdeaProjects/integraal/integraal/integraal-explanation/src/test/resources/ruleml24/lubm001/all_queries/output/
String strPath = benchPath + datasetName + "/all_queries/output/query"+strike+".dlgp";
String strPath = "/home/akira/git/owlapi-comparison/data/lubm001/full/queries/complex_strikes/without_iri/strike1.dlgp";
// System.out.println(strPath);
File queryFile = new File(strPath);
......@@ -199,12 +203,13 @@ public class compareExplainers {
static Collection<Atom> loadQueryStrike(KnowledgeBase inputKB, String datasetName, Boolean record, int nbQueries, String strike) throws IOException {
Collection<Atom> queries = new ArrayList<>();
String strPath = benchPath + datasetName + "/"+datasetName + "_" + nbQueries + "_queries_strike_" + strike + ".dlgp";
String strPath = benchPath + datasetName + "/"+datasetName + "_strike" + strike + ".dlgp";
System.out.println(strPath);
File queryFile = new File(strPath);
if (queryFile.exists()) {
System.out.println("yes");
queries = InternalDLGPParser.ParserForDLGP.parseQueries(queryFile);
} else {
queries = QuerySelector.selectQueries(inputKB, nbQueries);
......
......@@ -134,7 +134,7 @@ public class MARCOGMUSSolver implements Solver {
try {
while ((line = in.readLine()) != null) {
System.out.println(line);
// System.out.println(line);
if (line.startsWith("Traceback (most recent call last)")) {
System.out.println("Locally installed MARCO had an execution error. Trying to see if the docker-image is active. \n");
......@@ -142,13 +142,13 @@ public class MARCOGMUSSolver implements Solver {
}
if (line.startsWith("U")) {
gmuses.add(line.split("U ")[1]);
System.out.println(line);
// System.out.println(line);
} else {
if (line.startsWith("S")) {
// satisfiable set
System.out.println(line);
// System.out.println(line);
} else {
System.err.println(line);
System.out.println(line);
}
}
}
......