Commit ee095199 authored by lafabregue's avatar lafabregue

finalisation of merge with master

parent d6fa1b3c
......@@ -7,7 +7,6 @@ import jcl.data.distance.Distance;
import jcl.data.distance.DistanceModel;
import jcl.data.distance.DistanceParameter;
import jcl.data.distance.MetaDistance;
import jcl.weights.Weights;
/**
* <p>
......
......@@ -48,6 +48,7 @@ public class SimpleData extends Data {
* </p>
*/
public SimpleData() {
super();
}
/**
......
......@@ -212,11 +212,6 @@ public class AttributeHistogram extends Attribute {
}
}
@Override
public double distance(final Attribute a) {
return AttributeHistogram.distanceDTW(this, (AttributeHistogram) a);
}
/**
* Calcule la distance DTW entre 2 séquences sans omettre de points
......
......@@ -248,20 +248,6 @@ public class AttributeSequence extends Attribute {
}
/**
* @deprecated
*/
@Override
public double distance(final Attribute a) {
AttributeSequence b = (AttributeSequence) a;
if (AttributeSequence.mode == AttributeSequence.EUCLIDIEN) {
return AttributeSequence.distanceEuc(this, (AttributeSequence) b);
} else {
return AttributeSequence.distanceDTW(this, (AttributeSequence) b);
}
}
/**
* Calcule la distance DTW entre 2 séquences sans omettre de points
*
......@@ -418,20 +404,6 @@ public class AttributeSequence extends Attribute {
}
public static Attribute mean(final Attribute[] tabSequence) {
switch (AttributeSequence.mode) {
case EUCLIDIEN:
return AttributeSequence.euclidianMean(tabSequence);
case DTW_BARYCENTRE:
return AttributeSequence.mean(tabSequence, null);
case DYNAMIC_TIME_WARPING:
return AttributeSequence.NLAAFMean(tabSequence);
default:
return null;
}
}
public static Attribute medoid(final AttributeSequence[] tabSequence) {
double distanceTmp = 0.0;
double tampon;
......@@ -453,65 +425,6 @@ public class AttributeSequence extends Attribute {
}
public static Attribute mean(final Attribute[] tabSequence, AttributeSequence oldCenter) {
Attribute res = null;
switch (AttributeSequence.mode) {
case EUCLIDIEN:
res = AttributeSequence.euclidianMean(tabSequence);
break;
case DTW_BARYCENTRE:
// calcul de la moyenne
if (oldCenter != null) {
res = AttributeSequence.DBAMean(oldCenter, tabSequence);
} else {
int alea = (int) (Math.round(Math.random() * (tabSequence.length - 1)));
res = AttributeSequence.DBAMean(tabSequence[alea], tabSequence);
}
for (int i = 0; i < AttributeSequence.NB_ITERATIONS; i++) {
res = AttributeSequence.DBAMean(res, tabSequence);
}
if (AttributeSequence.simplifyFrom != -1 && ((AttributeSequence) res).getNbTuples() > 1) {
// Reduction du centre
double distance = 0.0;
double tampon;
for (int i = 0; i < tabSequence.length; i++) {
tampon = res.distance(tabSequence[i]);
distance += tampon * tampon;
}
distance = Math.sqrt(distance);
double distanceTmp = distance;
final AttributeSequence tmp = (AttributeSequence) res.clone();
AttributeSequence best;
System.out.println("Debut reduction centre");
do {
best = (AttributeSequence) tmp.clone();
distance = distanceTmp;
// System.out.println("Reduction distance : " +
// distanceTmp);
tmp.simplifyOnce();
distanceTmp = 0.0;
for (int i = 0; i < tabSequence.length; i++) {
tampon = tmp.distance(tabSequence[i]);
distanceTmp += tampon * tampon;
}
distanceTmp = Math.sqrt(distanceTmp);
} while (distanceTmp < distance && tmp.getNbTuples() > 1);
res = best;
}
break;
case DYNAMIC_TIME_WARPING:
res = AttributeSequence.NLAAFMean(tabSequence);
break;
}
return res;
}
protected static Attribute euclidianMean(final Attribute[] tabSequence) {
final int nbTuples = ((AttributeSequence) tabSequence[0]).getNbTuples();
final double[] tuplesAverageSeq = new double[nbTuples];
......
......@@ -249,16 +249,6 @@ public class AttributeSymbolicCategorial extends Attribute{
/** TODO definir cette methode */
}
/**
* @deprecated use rather the {@link CategorialDistance} framework
*/
@Override
public double distance(final Attribute a) {
final AttributeSymbolicCategorial att = (AttributeSymbolicCategorial) a;
return CategorialDistance.getInstance().compute(this, att, EmptyDistanceParameter.getInstance());
}
/**
* <p>
* Methode renvoyant le nombre de valeurs possible pour cet attribut.
......
package jcl.data.attribute;
import jcl.data.distance.sequential.ParameterDTWSymbolic;
import jcl.data.distance.sequential.DistanceDTWSymbolic;
/**
* This class represents a sequence of symbolic attributes. Warning: this has to be used with DTW distance
*
......@@ -14,8 +11,6 @@ public class AttributeSymbolicSequence extends Attribute {
private static final int MAX_SEQ_LENGTH = 600;
private static double[][] matriceW = new double[AttributeSymbolicSequence.MAX_SEQ_LENGTH][AttributeSymbolicSequence.MAX_SEQ_LENGTH];
/**
* Identifiant geo de l'ilot
*/
......@@ -41,17 +36,6 @@ public class AttributeSymbolicSequence extends Attribute {
return this.sequence.length;
}
/**
*
* @param another attribute to be compared to
* @return the distance
* @deprecated use rather the {@link DistanceDTWSymbolic} framework
*/
@Override
public synchronized double distance(final Attribute a) {
return DistanceDTWSymbolic.getInstance().compute(this, (AttributeSymbolicSequence) a, new ParameterDTWSymbolic(matriceW, similarity));
}
@Override
public Object clone() {
return new AttributeSymbolicSequence(this.idGEo, this.sequence.clone(), this.similarity);
......
......@@ -76,7 +76,7 @@ public class DistanceDTWMDMRSingleLink implements Distance<AttributeMDMRSequence
return matriceW[tailleS - 1][tailleT - 1];
}
protected static final double distanceTo(double[][] s1, double[][] s2) {
public static final double distanceTo(double[][] s1, double[][] s2) {
double minDistance = Double.MAX_VALUE;
double currentDistance, tmp;
for (double[] p1 : s1) {
......
......@@ -15,20 +15,20 @@ import jcl.data.Data;
import jcl.data.DataObject;
import jcl.data.SimpleData;
import jcl.data.attribute.Attribute;
import jcl.data.attribute.AttributeNumerical;
import jcl.data.attribute.AttributeMultiDimSequence;
import jcl.data.attribute.AttributeSequence;
import jcl.data.distance.Distance;
import jcl.data.distance.DistanceModel;
import jcl.data.distance.DistanceParameter;
import jcl.data.distance.EmptyDistanceParameter;
import jcl.data.distance.MetaDistance;
import jcl.data.distance.MetaDistanceEuclidean;
import jcl.data.distance.NumericalEuclideanDistance;
import jcl.data.distance.average.Average;
import jcl.data.distance.average.sequential.AverageMDDBAMean;
import jcl.data.distance.sequential.DistanceDTWMD;
import jcl.data.distance.sequential.ParameterDTW;
import jcl.learning.methods.monostrategy.kmeans.ClassifierKmeans;
import jcl.learning.methods.monostrategy.kmeans.LearningResultKmeans;
import jcl.learning.methods.monostrategy.kmeans.ParametersKmeans;
import jcl.weights.GlobalWeights;
public class Kmeans {
......@@ -82,10 +82,11 @@ public class Kmeans {
// distances[0] = NumericalEuclideanDistance.getInstance(); // first attribute compared with an euclidean distance between numericals
distances[0] = jcl.data.distance.sequential.DistanceDTW.getInstance(); // second attribute (sequential) compared with the DTW distance
MetaDistance metaDistance = MetaDistanceEuclidean.getInstance(); // defines the way the two scores are combined (possibility to weight)
DistanceModel model = new DistanceModel(distances, metaDistance);
//~ The dataset can be created with the model
dataset = new SimpleData(objects, model);
Average[] averages = new Average[1];
averages[0] = AverageMDDBAMean.getInstance();// uses MultiDim DBA mean for sequential attributes ...
DistanceModel model = new DistanceModel(distances, metaDistance, averages);
//~ It is now necessary to parametrize the Kmeans algorithm
//~ The Kmeans algorithm requires a set of distance parameters for each thread
......@@ -96,9 +97,12 @@ public class Kmeans {
// distanceParameters[th][0] = EmptyDistanceParameter.getInstance(); //no parameter for a numerical euclidean distance
distanceParameters[th][0] = new ParameterDTW(new double[SEQUENCE_LENGTH][SEQUENCE_LENGTH]); //but yes for DTW (requires a matrix to work in)
}
param = new ParametersKmeans(NB_CLUSTERS, NB_TURNS, NB_THREADS,
new GlobalWeights(dataset.getOneDataObject()), distanceParameters);
//~ The dataset can be created with the model
dataset = new SimpleData(objects, model, distanceParameters);
param = new ParametersKmeans(NB_CLUSTERS, NB_TURNS, NB_THREADS, model,
distanceParameters, DistanceModel.generateDefaultAverageParameters(model, dataset));
//~ OK now, then launch the algorithm
method = new ClassifierKmeans(param, null);
result = (LearningResultKmeans) method.learn(dataset);
......
......@@ -280,55 +280,57 @@ public class XmlResultReader extends DataFileReader {
}
//TODO : the distance should be load, otherwise it will not work
// so it should also be saved
LearningParameters params = null;
if (name.equals("Kmeans")) {
params = new ParametersKmeans(nbClusters, nbIters, weights);
params = new ParametersKmeans(nbClusters, nbIters, null, null, null);
}
if (name.equals("COBWEB")) {
params = new ParametersCobweb(acuteness, mapi, maxDepth, nbpasses,
minCard, weights);
minCard);
}
if (name.equals("S.O.M.")) {
params = new ParametersSOM(width, heigth, nbIters, epsilon, weights);
params = new ParametersSOM(width, heigth, nbIters, epsilon, null, null);
}
if (name.equals("PERCEPTRON")) {
params = new ParametersPerceptron(nbClusters, nbEpochs, 1, .99,
weights);
null, null);
}
if (name.equals("EM")) {
params = new ParametersEM(nbClusters, nbIters, weights);
params = new ParametersEM(nbClusters, nbIters, null, null, null);
}
if (name.equals("FUZZY-C-MEANS")) {
params = new ParametersKmeans(nbClusters, nbIters, m, weights);
params = new ParametersKmeans(nbClusters, nbIters, m, null, null, null);
}
if (name.equals("RANDOM")) {
params = new ParametersRandom(nbClusters, weights);
params = new ParametersRandom(nbClusters);
}
if (name.equals("GW-K-MEANS")) {
params = new ParametersKmeans(nbClusters, nbIters, beta,
ParametersKmeans.GLOBAL_FEATURE_WEIGHTING, weights);
ParametersKmeans.GLOBAL_FEATURE_WEIGHTING, null, null, null);
}
if (name.equals("FUZZY-GW-C-MEANS")) {
params = new ParametersKmeans(nbClusters, nbIters, m, beta,
ParametersKmeans.GLOBAL_FEATURE_WEIGHTING, weights);
ParametersKmeans.GLOBAL_FEATURE_WEIGHTING, null, null, null);
}
if (name.equals("LW-K-MEANS")) {
params = new ParametersKmeans(nbClusters, nbIters, beta,
ParametersKmeans.LOCAL_FEATURE_WEIGHTING, weights);
ParametersKmeans.LOCAL_FEATURE_WEIGHTING, null, null, null);
}
if (name.equals("FUZZY-LW-C-MEANS")) {
params = new ParametersKmeans(nbClusters, nbIters, m, beta,
ParametersKmeans.LOCAL_FEATURE_WEIGHTING, weights);
ParametersKmeans.LOCAL_FEATURE_WEIGHTING, null, null, null);
}
......
......@@ -165,24 +165,48 @@ public abstract class LearningParameters implements Serializable {
public void setSamples(Vector<DataObject> samples) {
this.samples = samples;
}
public DistanceModel getModel() {
return distanceModel;
}
/**
* Set the distance used by this method
* @param model
* the distance used
*/
public void setModel(DistanceModel model) {
this.distanceModel = model;
}
/**
* The distance used by this method
* @return the distance used
*/
public DistanceModel getModel() {
return distanceModel;
}
/**
* Return the parameters used by the distance, the first dimension is the number of thread,
* each thread has its own parameters
* @return the parameters
*/
public DistanceParameter[][] getDistanceParameters() {
return distanceParameters;
}
/**
* Return the parameters used by the average of the distance model
* @return the parameters
*/
public AverageParameter[] getAverageParameters() {
return averageParameters;
}
/**
* Set the parameters used by the distance model
* @param distanceParameters
* parameters for the distance itself
* @param averageParameters
* parameters to the attached average
*/
public void setParameters(DistanceParameter[][] distanceParameters, AverageParameter[] averageParameters) {
this.distanceParameters = distanceParameters;
this.averageParameters = averageParameters;
......
......@@ -51,9 +51,8 @@ public class ParametersCobweb extends LearningParameters {
* poids sur les attributs
*/
public ParametersCobweb(final double acuteness, final double mapi,
final int maxDepth, final int nombrepasses, final int minCard,
DistanceModel model, DistanceParameter[][] distParam) {
super(model,distParam);
final int maxDepth, final int nombrepasses, final int minCard) {
super(null, null, null);
this.acuteness = acuteness;
this.mapi = mapi;
this.maxDepth = maxDepth;
......
......@@ -37,7 +37,7 @@ public class ParametersCure extends LearningParameters {
*/
public ParametersCure(final int nbClusters, final int nbPrototypes,
final double alpha, DistanceModel model, DistanceParameter[][] distanceParameters) {
super(model, distanceParameters);
super(model, distanceParameters, null);
this.nbClusters = nbClusters;
this.nbPrototypes = nbPrototypes;
this.alpha = alpha;
......
......@@ -4,10 +4,10 @@ import java.util.Vector;
import jcl.data.distance.DistanceModel;
import jcl.data.distance.DistanceParameter;
import jcl.data.distance.average.AverageParameter;
import jcl.learning.LearningMethod;
import jcl.learning.LearningParameters;
import jcl.learning.methods.monostrategy.kmeans.ParametersKmeans;
import jcl.weights.ClassificationWeights;
/**
* <p>
......@@ -36,11 +36,12 @@ public class ParametersEM extends LearningParameters {
* @param weights
*/
public ParametersEM(final int nbClusters, final int nbIters,
final DistanceModel model, DistanceParameter[][] distParam) {
super(model, distParam);
final DistanceModel model, DistanceParameter[][] distParam,
AverageParameter[] averageParam) {
super(model, distParam, averageParam);
this.nbClusters = nbClusters;
this.nbIters = nbIters;
this.init = new ParametersKmeans(this.nbClusters, 0, model, distParam);
this.init = new ParametersKmeans(this.nbClusters, 0, model, distParam, averageParam);
}
/**
......@@ -51,12 +52,13 @@ public class ParametersEM extends LearningParameters {
* @param weights
*/
public ParametersEM(final int nbClusters, final int nbIters,
final int nbKmeansIters, final DistanceModel model, DistanceParameter[][] distParam) {
super(model,distParam);
final int nbKmeansIters, final DistanceModel model, DistanceParameter[][] distParam,
AverageParameter[] averageParam) {
super(model, distParam, averageParam);
this.nbClusters = nbClusters;
this.nbIters = nbIters;
this.init = new ParametersKmeans(this.nbClusters, nbKmeansIters,
model, distParam);
model, distParam, averageParam);
}
/**
......
......@@ -4,6 +4,7 @@ import java.util.Vector;
import jcl.data.distance.DistanceModel;
import jcl.data.distance.DistanceParameter;
import jcl.data.distance.average.AverageParameter;
import jcl.learning.LearningMethod;
import jcl.learning.LearningParameters;
import jcl.weights.ClassificationWeights;
......@@ -43,10 +44,10 @@ public class ParametersEvidence extends LearningParameters {
* @param pourcentage
* @param weights
*/
public ParametersEvidence(final int nbClassifications,
final int nbClustersMax, final int nbClustersMin,
final double pourcentage, final DistanceModel model, DistanceParameter[][] distParam) {
super(model,distParam);
public ParametersEvidence(final int nbClassifications, final int nbClustersMax,
final int nbClustersMin, final double pourcentage, final DistanceModel model,
DistanceParameter[][] distParam, AverageParameter[] averageParam) {
super(model,distParam, averageParam);
this.nbClassifications = nbClassifications;
this.nbClustersMax = nbClustersMax;
this.nbClustersMin = nbClustersMin;
......
......@@ -99,7 +99,7 @@ public class ParametersGNG extends LearningParameters {
*/
public ParametersGNG(final int nbNodes, final int age, final int etapes,
final int freq_new_node, DistanceModel model, DistanceParameter[][] distanceParameters) {
super(model, distanceParameters);
super(model, distanceParameters, null);
this.maxNodes = nbNodes;
this.MAX_EDGE_AGE = age;
this.steps = etapes;
......
......@@ -64,11 +64,16 @@ public class ParametersKmeans extends LearningParameters {
*
* @param nbClusters Nombre de classes
* @param nbIters Nombre d'iterations
* @param weights Ponderations initiales
* @param model
* the model used to compute the distance
* @param distParameters
* the list of parameters used by the distance model
* @param averageParameters
* the list of parameters used by the distance model for the average
*/
public ParametersKmeans(final int nbClusters, final int nbIters, DistanceModel model,
DistanceParameter[][] distParam, AverageParameter[] averageParameters) {
super(model,distParam, averageParameters);
DistanceParameter[][] distParameters, AverageParameter[] averageParameters) {
super(model,distParameters, averageParameters);
this.nbClusters = nbClusters;
this.nbIters = nbIters;
this.fuzzy = false;
......
package jcl.learning.methods.monostrategy.kmeans.constraint;
import java.util.Vector;
import jcl.clustering.ClusteringResult;
import jcl.clustering.constraints.Constraint;
import jcl.data.Data;
import jcl.learning.LearningMethod;
import jcl.learning.LearningResult;
import multiCube.tools.util.exceptions.MethodNotImplementedException;
public class ConstraintKmeans extends LearningMethod {
/**
*
*/
private static final long serialVersionUID = 1L;
// list the known objects (with cluster)
int knowledge[];
// type constraint or seeded
int type;
// constant for constraint kmeans
public static final int CONSTRAINT_KMEANS = 0;
// constant for seeded kmeans
public static final int SEEDED_KMEANS = 1;;
/**
* @param parameters
* @param samples
*/
public ConstraintKmeans(final ParametersConstraintKmeans parameters,
final Vector samples, final int knowledge[], final int type) {
super(parameters, samples);
this.knowledge = knowledge;
this.type = type;
}
@Override
public LearningResult learn(final Data data) {
final ResultConstraintKmeans result = new ResultConstraintKmeans(data,
(ParametersConstraintKmeans) this.parameters, this.samples,
this.knowledge, this.type);
for (int i = 0; i < ((ParametersConstraintKmeans) this.parameters).nbIters; i++) {
// redifine seed
result.redefineSeeds();
// cluster affectation
result.clusterAffectation(data, true);
this.display();
this.progress += 1;
}
return result;
}
/** ****************************************** */
@Override
public int getType() {
// TODO Auto-generated method stub
return 0;
}
@Override
public Object clone() {
return this.clone();
}
@Override
public String getHTMLParameters() {
// TODO Auto-generated method stub
return "";
}
@Override
public String getTextParameters() {
// TODO Auto-generated method stub
return "";
}
@Override
public ClusteringResult merge(final LearningResult learningResult,
final ClusteringResult result, final Data data, final Vector cr)
throws MethodNotImplementedException {
throw new MethodNotImplementedException();
}
@Override
public ClusteringResult reclass(final LearningResult learningResult,
final ClusteringResult result, final Data data, final int c)
throws MethodNotImplementedException {
throw new MethodNotImplementedException();
}
@Override
public ClusteringResult split(final LearningResult learningResult,
final ClusteringResult result, final Data data, final int c,
final int n) throws MethodNotImplementedException {
throw new MethodNotImplementedException();
}
@Override
public ClusteringResult injectConstraints(LearningResult learningResult, ClusteringResult result, Data data,
Vector<Constraint> constraints) throws MethodNotImplementedException {
throw new MethodNotImplementedException();
}
}
package jcl.learning.methods.monostrategy.kmeans.constraint;
import java.util.Vector;
import jcl.data.distance.DistanceModel;
import jcl.data.distance.DistanceParameter;
import jcl.learning.LearningMethod;
import jcl.learning.LearningParameters;
import jcl.weights.ClassificationWeights;
public class ParametersConstraintKmeans extends LearningParameters {
/**
*
*/
private static final long serialVersionUID = 1L;
// number of clusters
int nbClusters = 0;
// number of iteration
int nbIters = 0;
// list the known objects (with cluster)
int knowledge[];
// type constraint or seeded
int type;
public ConstraintKmeans method = null;
/**
* @param nbClusters
* @param nbIters
* @param weights
*/
public ParametersConstraintKmeans(final int nbClusters, final int nbIters,
DistanceModel model, DistanceParameter[][] distanceParameters, final int knowledge[],
final int type) {
super(model, distanceParameters);
this.nbClusters = nbClusters;
this.nbIters = nbIters;
this.knowledge = knowledge;
this.type = type;
}
@Override
public Object clone() {
return this.clone();
}
@Override