Commit 9cb71b1c authored by lafabregue's avatar lafabregue

switch to gdal to compute overviews

parent d9add86f
......@@ -100,6 +100,9 @@ public abstract class Data implements Cloneable, Serializable, Iterable<DataObje
/** the ponderation weight of each constraints */
private Vector<Double> constraintsWeights = null;
/** a set of constraints attached to the Data */
protected Vector<Constraint> constraints = null;
/** types to define the source represented by the Data */
static public int DEFAULT_TYPE = 0;
static public int NOT_IMAGE_FILE_TYPE = 1;
......@@ -1484,9 +1487,10 @@ public abstract class Data implements Cloneable, Serializable, Iterable<DataObje
* @param r1 le premier resultat
* @param r2 le second resultat
* @param data2 les données du deuxième agent
* @param onWholeData specify if the confusion has to be done on the whole data or the sample
* @return une matrice de confusion entre r1 et r2
*/
public Matrix confusion(final ClusteringResult r1, final ClusteringResult r2, final Data data2) {
public Matrix confusion(final ClusteringResult r1, final ClusteringResult r2, final Data data2, boolean onWholeData) {
int cor1[], cor2[];
int w, h;
......@@ -1498,9 +1502,9 @@ public abstract class Data implements Cloneable, Serializable, Iterable<DataObje
final Matrix mat = new Matrix(w, h);
for (int p = 0; p < r1.getNbObjects(); p++) {
// cluster de p dans r1
final int i = r1.indice(cor1, r1.getClass(p, false), w);
final int i = r1.indice(cor1, r1.getClass(p, onWholeData), w);
// cluster de p dans r2
final int j = r1.indice(cor2, r2.getClass(p, false), h);
final int j = r1.indice(cor2, r2.getClass(p, onWholeData), h);
mat.matrix[i + j * w]++;
}
......@@ -1757,26 +1761,48 @@ public abstract class Data implements Cloneable, Serializable, Iterable<DataObje
*
* @return the updated set of constraints
*/
public abstract Vector<Constraint> updateAndSetConstraintsToSample(Vector<Constraint> constraints);
public Vector<Constraint> updateAndSetConstraintsToSample(Vector<Constraint> constraints) {
this.constraints = updateConstraintsToSample(constraints);
return this.constraints;
}
/**
* Return the set of constraints attached to the data
* Update a set of constraints to match to the Data, but the Data constraints are not updated.
* Be aware that if the data is sampled it might change the size of the sample
* to integrated constrained index.
*
* @param constraints
* the set of constraints to add
*
* @return the updated set of constraints
*/
public abstract Vector<Constraint> updateConstraintsToSample(Vector<Constraint> constraints);
/**
* Return the set of constraints attached to the data.
* This method should never return a null pointer.
*
* @return the set of constraints
*/
public abstract Vector<Constraint> getConstraints();
/**
* Return the weights associated to the constraints
* Return the weights associated to the constraints.
* This method should never return a null pointer.
*
* @return the constraintsWeights, null if all constraints have the same weight
*/
public Vector<Double> getConstraintsWeights() {
if (constraintsWeights == null) {
constraintsWeights = new Vector<Double>();
}
return constraintsWeights;
}
/**
* Return the weight associate to a constraint
* Return the weight associate to a constraint.
*
* @return the constraint weight, 1 is returned if no weight is set
*/
......
......@@ -20,6 +20,7 @@ import jcl.data.attribute.AttributeNumerical;
import jcl.data.attribute.AttributeSequence;
import jcl.data.attribute.AttributeSymbolicCategorial;
import jcl.data.mask.Mask;
import jcl.data.sampling.Sampler;
import jcl.utils.RandomizeTools;
import jcl.weights.GlobalWeights;
import jcl.weights.Weights;
......@@ -45,9 +46,6 @@ public class SimpleData extends Data {
private double estimatedMaxDistance = 0;
private double estimatedMinDistance = 0;
private boolean extremaComputed = false;
/** a set of constraints attached to the Data */
private Vector<Constraint> constraints = null;
/**
* <p>
......@@ -345,9 +343,8 @@ public class SimpleData extends Data {
}
/**
* Constructeur a partir d'un nombre d'objets.
* Constructor from a List of DataObjects
* @param data les objets contenus dans cet ensemble de donnees
* @param structure la structure des donnees
* @deprecated should specify a model;use rather {@link #Data(DataObject[], Model)}
*/
public SimpleData(final List<DataObject> data) {
......@@ -366,6 +363,29 @@ public class SimpleData extends Data {
this.wholeDataNbObjects = data.size();
}
/**
* Constructor from a Sampler
* @param sampler the sampler to use to generate the list of DataObjects
* @deprecated should specify a model;use rather {@link #Data(DataObject[], Model)}
*/
public SimpleData(final Sampler sampler) {
this.currentView = sampler.getDataObjects();
this.wholeDataNbObjects = sampler.getDataSize();
setSampler(sampler);
}
/**
* Constructor from a Sampler.
* @param sampler the sampler to use to generate the list of DataObjects
* @param model the model of the desired dataset
*/
public SimpleData(final Sampler sampler, Model model) {
this.currentView = sampler.getDataObjects();
this.wholeDataNbObjects = sampler.getDataSize();
this.model = model;
setSampler(sampler);
}
/**
* @param data
* @param structure
......@@ -855,41 +875,45 @@ public class SimpleData extends Data {
@SuppressWarnings("unchecked")
@Override
public Vector<Constraint> updateAndSetConstraintsToSample(Vector<Constraint> oldConstraints) {
public Vector<Constraint> updateConstraintsToSample(Vector<Constraint> oldConstraints) {
Vector<Constraint> newConstraints;
if(getSampler() == null) {
// if there is no sampler the index reference is the same
this.constraints = (Vector<Constraint>) oldConstraints.clone();
newConstraints = (Vector<Constraint>) oldConstraints.clone();
} else {
// otherwise we need to rematch the indexes to the sample and include non sampled elements
this.constraints = new Vector<Constraint>();
newConstraints = new Vector<Constraint>();
for(Constraint c : oldConstraints) {
switch (c.getType()) {
case Constraint.CANNOT_LINK_TYPE :
this.constraints.add(new CannotLinkConstraint(
newConstraints.add(new CannotLinkConstraint(
getSampledIndex(((CannotLinkConstraint) c).getFirstIndex()),
getSampledIndex(((CannotLinkConstraint) c).getSecondIndex())));
break;
case Constraint.MUST_LINK_TYPE :
this.constraints.add(new MustLinkConstraint(
newConstraints.add(new MustLinkConstraint(
getSampledIndex(((MustLinkConstraint) c).getFirstIndex()),
getSampledIndex(((MustLinkConstraint) c).getSecondIndex())));
break;
case Constraint.LABEL_TYPE :
this.constraints.add(new LabelConstraint(
newConstraints.add(new LabelConstraint(
getSampledIndex(((LabelConstraint) c).getIndex()),
((LabelConstraint) c).getClassID()));
break;
default :
this.constraints.add(c);
newConstraints.add(c);
}
}
}
return this.constraints;
return newConstraints;
}
@Override
public Vector<Constraint> getConstraints() {
if (this.constraints == null) {
this.constraints = new Vector<Constraint>();
}
return this.constraints;
}
......
......@@ -89,11 +89,13 @@ public class ClusteringEvaluation {
* les donnees de la classification
* @param qualityIndex
* l'indice de qualite e calculer
* @param constraints
* set of constraints to evaluate, null if none
* @return la qualite du resultat
*/
public static QualityIndex getQuality(
final ClusteringResult clusteringResult, final Data data,
final int qualityIndex) {
final int qualityIndex, final Vector<Constraint> constraints) {
double quality = -Double.MAX_VALUE;
String name = "";
int optimization = QualityIndex.MAX;
......@@ -172,7 +174,7 @@ public class ClusteringEvaluation {
optimization = QualityIndex.MIN;
break;
case ClusteringEvaluation.BACKGROUND_KNOWLEDGE:
quality = ClusteringEvaluation.getBackgroundKnowledge(clusteringResult, data);
quality = ClusteringEvaluation.getBackgroundKnowledge(clusteringResult, data, constraints);
optimization = QualityIndex.MAX;
break;
}
......@@ -244,8 +246,7 @@ public class ClusteringEvaluation {
for (int p = 0; p < clusteringResult.getCluster(k).getCard(); p++) {
double distMin = Double.MAX_VALUE;
final DataObject object = clusteringResult.getCluster(k).getObject(
p);
final DataObject object = clusteringResult.getCluster(k).getObject(p);
for (int i = 0; i < clusteringResult.getNbClusters(); i++) {
if ((i != k) && (clusteringResult.getCard(i) > 0)) {
......@@ -944,9 +945,12 @@ public class ClusteringEvaluation {
* the clustering result to evaluate
* @param data
* the associated data
* @param constraints
* set of constraints to evaluate, null if none
* @return the index value between 0 and 1, 1 being all constraints are fulfilled
*/
private static double getBackgroundKnowledge(ClusteringResult clusteringResult, Data data) {
private static double getBackgroundKnowledge(ClusteringResult clusteringResult,
Data data, final Vector<Constraint> constraints) {
double labelGlobalWeight = 0.0;
double totalWeight = 0.0;
......@@ -955,36 +959,42 @@ public class ClusteringEvaluation {
Vector<LabelConstraint> labelConstraints = new Vector<LabelConstraint>();
Vector<Double> labelConstraintsWeights = new Vector<Double>();
for (int i = 0 ; i < data.getConstraints().size() ; i++) {
// Label constraints are evaluated separately because they are
// used are evaluated as a similarity between and label classification
// we use the Rand index
if (data.getConstraints().get(i) instanceof LabelConstraint) {
labelConstraints.add((LabelConstraint) data.getConstraints().get(i));
labelConstraintsWeights.add(data.getConstraintWeight(i));
labelGlobalWeight += data.getConstraintWeight(i);
} else {
// other constraints are evaluated normally
// we ignore if the constraints can't be evaluated (negative value)
if (qualityValue >= 0) {
totalWeight += data.getConstraintWeight(i);
qualityValue += data.getConstraints().get(i).evaluate(clusteringResult);
}
if (constraints != null) {
for (int i = 0 ; i < constraints.size() ; i++) {
// Label constraints are evaluated separately because they are
// used are evaluated as a similarity between and label classification
// we use the Rand index
if (constraints.get(i) instanceof LabelConstraint) {
labelConstraints.add((LabelConstraint) constraints.get(i));
labelConstraintsWeights.add(data.getConstraintWeight(i));
labelGlobalWeight += data.getConstraintWeight(i);
} else {
// other constraints are evaluated normally
// we ignore if the constraints can't be evaluated (negative value)
if (qualityValue >= 0) {
totalWeight += data.getConstraintWeight(i);
qualityValue += data.getConstraints().get(i).evaluate(clusteringResult);
}
}
}
}
//TODO :
// compute Rand index for label constraints
//labelGlobalWeight +=
qualityValue /= totalWeight;
// quality might need an ajustment to
// we reajust the index to ignore the first 0.3
if (qualityValue < 0.3) {
qualityValue = 0.0;
}
qualityValue -= 0.3;
if (qualityValue < 0) {
qualityValue = 0;
}
qualityValue = qualityValue / 0.7 ;
return qualityValue;
......
......@@ -67,7 +67,7 @@ public class ClusteringQuality implements Serializable {
this.quality = new QualityIndex[result.getQualityCriteria().length];
for (int i = 0; i < result.getQualityCriteria().length; i++) {
this.quality[i] = ClusteringEvaluation.getQuality(result,
result.getData(), result.getQualityCriteria()[i]);
result.getData(), result.getQualityCriteria()[i], result.getData().getConstraints());
}
}
......
......@@ -255,7 +255,7 @@ public class ServerHybridClassificationMonitor implements Observer {
this.addInfos("*** FIN DE L'APPRENTISSAGE : " + tours + " tours");
this.addInfos("*** GAMMA FINAL : " + this.classification.getGammaMax());
this.addInfos("*** NB DE CLASSES : "
+ this.classification.unification().getNbClusters());
+ this.classification.unification(false).getNbClusters());
this.addInfos("****************************************");
this.addInfos("");
this.addInfos("Finished");
......
package jcl.learning;
import java.util.Vector;
import jcl.clustering.constraints.Constraint;
/**
* Allow a classification to be run in an iterative manner
*
* @author Baptiste LAFABREGUE
*
*/
public interface IterativeClassification {
/** Set of constraints used to compute the result */
Vector<Constraint> constraintsUsed = null;
/** Set of constraints that should be used for a next iteration */
Vector<Constraint> constraintsToUse = null;
/**
* Launch a new iteration of the classification
*/
public void newIteration(Vector<Constraint> constraints);
/**
* <p>
* Return the last set of constraints used to generate this Classification.
* <p>
*
* @return the set of constraints
*/
public Vector<Constraint> getConstraintsUsed();
}
......@@ -364,7 +364,9 @@ public class ClassifierKmeans extends LearningMethod {
public ClusteringResult merge(LearningResult learningResult, ClusteringResult _result, Data data, Vector cr) {
if ((LearningResultKmeans) learningResult != null) {
((LearningResultKmeans) learningResult).mergeSeeds(cr, _result.getClusterMap());
return ((LearningResultKmeans) learningResult).classify(data, false);
// run on the sample if clustering result is not on the whole data
return ((LearningResultKmeans) learningResult).classify(data,
_result.getClusterMap().length != data.getWholeDataNbObjects());
}
return null;
}
......@@ -374,7 +376,9 @@ public class ClassifierKmeans extends LearningMethod {
if (((LearningResultKmeans) learningResult) != null) {
((LearningResultKmeans) learningResult).removeSeed(c);
// this is mandatory because of the LightCluster implementation
return ((LearningResultKmeans) learningResult).classify(data, false);
// run on the sample if clustering result is not on the whole data
return ((LearningResultKmeans) learningResult).classify(data,
_result.getClusterMap().length != data.getWholeDataNbObjects());
}
return null;
}
......@@ -391,7 +395,9 @@ public class ClassifierKmeans extends LearningMethod {
// remplace le noyau initialial du cluster c par les nouveaux noyaux
((LearningResultKmeans) learningResult).replaceSeeds(c, splitLearnResult.getSeeds());
// reclasse les donnees
ClusteringResult result = ((LearningResultKmeans) learningResult).classify(data, false);
// run on the sample if clustering result is not on the whole data
ClusteringResult result = ((LearningResultKmeans) learningResult).classify(data,
_result.getClusterMap().length != data.getWholeDataNbObjects());
result.setMethode(this.getType());
return result;
}
......
......@@ -882,7 +882,7 @@ public class LearningResultKmeans extends LearningResult {
// }
int i = startIndex;
while(i < stopIndex) {
while(i <= stopIndex) {
DataObject obj = dataset.next();
double distTemp;
double distMin = seeds.get(0).distance(obj, weights.getWeights(0));
......@@ -1000,6 +1000,7 @@ public class LearningResultKmeans extends LearningResult {
break;
case JCLModelExchange.MODEL_WEIGHT_SECTION :
weights = JCLModelExchange.stringToWeights(JCLModelExchange.getSectionBody(s));
break;
}
}
......
......@@ -160,11 +160,11 @@ public class WrapperFeatureRanking extends Wrapper {
this.initViewers(bestLearningResult);
this.display();
quality = ClusteringEvaluation.getQuality(nextClusteringResult,
data, ClusteringEvaluation.WG);
data, ClusteringEvaluation.WG, data.getConstraints());
} else {
final QualityIndex nextQuality = ClusteringEvaluation
.getQuality(nextClusteringResult, data, this
.getClusteringCriterion());
.getClusteringCriterion(), data.getConstraints());
if (nextQuality.betterThan(quality)) {
bestLearningResult = nextLearningResult;
this.updateViewers(bestLearningResult);
......
......@@ -6,6 +6,7 @@ import java.io.Serializable;
import java.util.Vector;
import jcl.clustering.ClusteringResult;
import jcl.clustering.constraints.Constraint;
import jcl.data.Data;
import jcl.learning.LearningMethod;
import jcl.learning.LearningParameters;
......@@ -279,10 +280,12 @@ public class LearningAgent implements Runnable, Serializable {
* le deuxieme agent d'apprentissage
* @param config
* la configuration de Samarah
* @param constraints
* set of constraints to evaluate, null if none
* @return la concordance locale entre l'agent courant et l'agent ag2
*/
public double localConcordance(final LearningAgent ag2,
final SamarahConfig config) {
final SamarahConfig config, final Vector<Constraint> constraints) {
double gamma = 0.0;
SamarahEval eval = null;
......@@ -297,7 +300,7 @@ public class LearningAgent implements Runnable, Serializable {
tmp2.setId(1);
ag_list.add(tmp2);
ag_list.add(tmp1);
eval = new SamarahEval(ag_list, config, null);
eval = new SamarahEval(ag_list, config, null, constraints);
gamma = eval.gamma[0][1];
}
return gamma;
......
......@@ -67,6 +67,11 @@ public class SamarahConfig implements Cloneable, Serializable {
* </p>
*/
public double importanceSimilitude = 0.5;
/**
* Weight of the constraints in the quality index computation
*/
public double importance_constraints = 0;
/** */
public int maxBadTours = 10;
......@@ -117,11 +122,6 @@ public class SamarahConfig implements Cloneable, Serializable {
* </p>
*/
public int unification = SamarahConfig.WITHOUT_UNCLASSIFIED_UNIFICATION;
/**
* Weight of the constraints in the qulity index computation
*/
public double importance_constraints = 0;
/**
* <p>
......
package jcl.learning.methods.multistrategy.samarah;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Vector;
import jcl.clustering.constraints.CannotLinkConstraint;
import jcl.clustering.constraints.Constraint;
import jcl.clustering.constraints.MustLinkConstraint;
import jcl.utils.exceptions.MethodNotImplementedException;
/**
......@@ -81,18 +88,139 @@ public class SamarahConflict {
* l'evaluation des resultats
* @param tour
* le numero du tour actuel
* @param constraints
* set of constraints to evaluate, null if none
* @return une solution comprenant deux nouveaux agents avec leur nouvelle
* solution
* @throws MethodNotImplementedException
*/
public SamarahConflict operatorsApplication(final Vector cr,
final SamarahConfig config, final SamarahEval eval, final int tour)
public SamarahConflict operatorsApplication(final Vector<Integer> cr, final SamarahConfig config,
final SamarahEval eval, final int tour, final Vector<Constraint> constraints)
throws MethodNotImplementedException {
SamarahConflict solution = null;
LearningAgent ag1, ag2;
final int n = cr.size();
// System.out.println("#n: "+n);
double gamma1 = 0.0, gamma2 = 0.0, gamma3 = 0.0, gamma4 = 0.0;
//<<< test - start
FileWriter fw = null;
try {
fw = new FileWriter("log_samarah.log", true);
} catch (IOException e) {
e.printStackTrace();
}
BufferedWriter bw = new BufferedWriter(fw);
PrintWriter out = new PrintWriter(bw);
out.println("new conflict resolution =========================================================== (round "+tour+")");
Vector<MustLinkConstraint> involvedMLa1_sat = new Vector<MustLinkConstraint>();
Vector<CannotLinkConstraint> involvedCLa1_sat = new Vector<CannotLinkConstraint>();
Vector<MustLinkConstraint> involvedMLa2_sat = new Vector<MustLinkConstraint>();
Vector<CannotLinkConstraint> involvedCLa2_sat = new Vector<CannotLinkConstraint>();
Vector<MustLinkConstraint> involvedMLa1_unsat = new Vector<MustLinkConstraint>();
Vector<CannotLinkConstraint> involvedCLa1_unsat = new Vector<CannotLinkConstraint>();
Vector<MustLinkConstraint> involvedMLa2_unsat = new Vector<MustLinkConstraint>();
Vector<CannotLinkConstraint> involvedCLa2_unsat = new Vector<CannotLinkConstraint>();
for (Constraint c : constraints) {
if (c instanceof MustLinkConstraint) {
MustLinkConstraint mlc = (MustLinkConstraint) c;
int firstLabela1 = this.agent1.getClassification().getClusteringResult().getClusterMap()[mlc.getFirstIndex()];
int secondLabela1 = this.agent1.getClassification().getClusteringResult().getClusterMap()[mlc.getSecondIndex()];
int firstLabela2 = this.agent2.getClassification().getClusteringResult().getClusterMap()[mlc.getFirstIndex()];
int secondLabela2 = this.agent2.getClassification().getClusteringResult().getClusterMap()[mlc.getSecondIndex()];
boolean firstIna1 = false;
boolean secondIna1 = false;
boolean firstIna2 = false;
boolean secondIna2 = false;
if (this.classe == firstLabela1)
firstIna1 = true;
if (this.classe == secondLabela1)
secondIna1 = true;
for (int i : cr) {
if (i == firstLabela2)
firstIna2 = true;
if (i == secondLabela2)
secondIna2 = true;
}
if (firstIna1) {
if (secondIna1) {
involvedMLa1_sat.add(mlc);
} else {
involvedMLa1_unsat.add(mlc);
}
} else {
if (secondIna1) {
involvedMLa1_unsat.add(mlc);
}
}
if (firstIna2) {
if (secondIna2) {
involvedMLa2_sat.add(mlc);
} else {
involvedMLa2_unsat.add(mlc);
}
} else {
if (secondIna2) {
involvedMLa2_unsat.add(mlc);
}
}
} else {
CannotLinkConstraint clc = (CannotLinkConstraint) c;
int firstLabela1 = this.agent1.getClassification().getClusteringResult().getClusterMap()[clc.getFirstIndex()];
int secondLabela1 = this.agent1.getClassification().getClusteringResult().getClusterMap()[clc.getSecondIndex()];
int firstLabela2 = this.agent2.getClassification().getClusteringResult().getClusterMap()[clc.getFirstIndex()];
int secondLabela2 = this.agent2.getClassification().getClusteringResult().getClusterMap()[clc.getSecondIndex()];
boolean firstIna1 = false;
boolean secondIna1 = false;
boolean firstIna2 = false;
boolean secondIna2 = false;
if (this.classe == firstLabela1)
firstIna1 = true;
if (this.classe == secondLabela1)
secondIna1 = true;
for (int i : cr) {
if (i == firstLabela2)
firstIna2 = true;
if (i == secondLabela2)
secondIna2 = true;
}
if (firstIna1) {
if (secondIna1) {
involvedCLa1_unsat.add(clc);
} else {
involvedCLa1_sat.add(clc);
}
} else {
if (secondIna1) {
involvedCLa1_sat.add(clc);
}
}
if (firstIna2) {
if (secondIna2) {
involvedCLa2_unsat.add(clc);
} else {
involvedCLa2_sat.add(clc);
}
} else {
if (secondIna2) {
involvedCLa2_sat.add(clc);
}
}
}
}
String cr_toString = "";
for (int i : cr) {
cr_toString += i+", ";
}
//>>> test - end
if (n > 1) {
// Scission agent 1
ag1 = (LearningAgent) this.agent1.clone();
......@@ -100,17 +228,68 @@ public class SamarahConflict {
// Fusion agent 2
ag2 = (LearningAgent) this.agent2.clone();
ag2.merge(cr);
//<<< test - start
for (MustLinkConstraint mlc : involvedMLa1_sat) {
out.println("ml " + mlc.toString() + " : sat constraint for a"+this.agent1.getId()+" -> split " + this.classe);
}
for (MustLinkConstraint mlc : involvedMLa1_unsat) {
out.println("ml " + mlc.toString() + " : unsat constraint for a"+this.agent1.getId()+" -> split " + this.classe);
}
for (CannotLinkConstraint clc : involvedCLa1_sat) {
out.println("cl " + clc.toString() + " : sat constraint for a"+this.agent1.getId()+" -> split " + this.classe);
}
for (CannotLinkConstraint clc : involvedCLa1_unsat) {
out.println("cl " + clc.toString() + " : unsat constraint for a"+this.agent1.getId()+" -> split " + this.classe);
}
for (MustLinkConstraint mlc : involvedMLa2_sat) {
out.println("ml " + mlc.toString() + " : sat constraint for a"+this.agent2.getId()+" -> merge " + cr_toString);
}
for (MustLinkConstraint mlc : involvedMLa2_unsat) {
out.println("ml " + mlc.toString() + " : unsat constraint for a"+this.agent2.getId()+" -> merge " + cr_toString);
}
for (CannotLinkConstraint clc : involvedCLa2_sat) {
out.println("cl " + clc.toString() + " : sat constraint for a"+this.agent2.getId()+" -> merge " + cr_toString);
}
for (CannotLinkConstraint clc : involvedCLa2_unsat) {
out.println("cl " + clc.toString() + " : unsat constraint for a"+this.agent2.getId()+" -> merge " + cr_toString);
}
//>>> test - end
} else {
// Reclassement par l'agent 1
ag1 = (LearningAgent) this.agent1.clone();
ag1.reclass(this.classe);
ag2 = null;
//<<< test - start
for (MustLinkConstraint mlc : involvedMLa1_sat) {
out.println("ml " + mlc.toString() + " : sat constraint for a"+this.agent1.getId()+" -> delete " + this.classe);
}
for (MustLinkConstraint mlc : involvedMLa1_unsat) {
out.println("ml " + mlc.toString() + " : unsat constraint for a"+this.agent1.getId()+" -> delete " + this.classe);
}
for (CannotLinkConstraint clc : involvedCLa1_sat) {
out.println("cl " + clc.toString() + " : sat constraint for a"+this.agent1.getId()+" -> delete " + this.classe);
}
for (CannotLinkConstraint clc : involvedCLa1_unsat) {
out.println("cl " + clc.toString() + " : unsat constraint for a"+this.agent1.getId()+" -> delete " + this.classe);
}
//>>> test - end
}
//<<< test - start