Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
M
Mustic
Project overview
Project overview
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
1
Merge Requests
1
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Container Registry
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
lafabregue
Mustic
Commits
33a0b68d
Commit
33a0b68d
authored
Aug 06, 2018
by
lafabregue
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
change test files
parent
b14022b0
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
131 additions
and
192 deletions
+131
-192
src/test/TestA2CNES.java
src/test/TestA2CNES.java
+60
-108
src/test/TestA2CNESIterative.java
src/test/TestA2CNESIterative.java
+28
-8
src/test/TestA2CNESIterativeSelectedCst.java
src/test/TestA2CNESIterativeSelectedCst.java
+43
-76
No files found.
src/test/TestA2CNES.java
View file @
33a0b68d
package
test
;
import
java.beans.PropertyVetoException
;
import
java.io.File
;
import
java.io.FileNotFoundException
;
import
java.io.IOException
;
...
...
@@ -17,7 +16,6 @@ import org.joda.time.DateTime;
import
org.joda.time.format.DateTimeFormat
;
import
org.joda.time.format.DateTimeFormatter
;
import
jcl.Classification
;
import
jcl.clustering.constraints.CannotLinkConstraint
;
import
jcl.clustering.constraints.Constraint
;
import
jcl.clustering.constraints.MustLinkConstraint
;
...
...
@@ -33,37 +31,37 @@ import jcl.weights.ClassificationWeights;
import
jcl.weights.GlobalWeights
;
import
multiCube.tools.image.ImageHelper
;
import
mustic.gui.ClassificationFrame
;
import
mustic.gui.ClassificationImage
;
import
mustic.gui.DataDesktopFrame
;
import
mustic.gui.DataSession
;
import
mustic.gui.MainFrame
;
import
mustic.models.thread.ClassificationThread
;
import
mustic.utils.io.CSVUtils
;
public
class
TestA2CNES
{
public
void
customClassify
(
)
{
public
static
void
main
(
String
[]
args
)
{
HybridClassification
classification
=
new
HybridClassification
();
String
datasetName
=
"FacesUCR"
;
String
datasetPath
=
"FacesUCR"
;
String
dataPath
=
"/home/baptiste/A2CNES/"
;
String
resultPath
=
"/home/baptiste/A2CNES/results/"
;
String
testResultPath
=
"/home/baptiste/A2CNES/Train_results/"
;
final
String
datasetName
=
"Frogs_MFCCs"
;
final
String
datasetPath
=
"Frogs_MFCCs"
;
final
String
dataPath
=
System
.
getProperty
(
"user.home"
)+
"/A2CNES/"
;
final
String
resultPath
=
System
.
getProperty
(
"user.home"
)+
"/A2CNES/results/"
;
final
int
nInf
=
54
;
final
int
nSup
=
68
;
final
int
[]
ag_seeds
=
{
52
,
64
,
71
};
final
int
nb_iter
=
15
;
double
constraintsWgt
=
80
;
File
directory
=
new
File
(
"log"
);
if
(!
directory
.
exists
()){
directory
.
mkdir
();
}
Data
dataTrain
=
getDataFromFile
(
dataPath
+
datasetPath
+
"/train/"
+
datasetName
+
".data"
,
'\t'
,
"train"
,
null
);
Data
dataTest
=
getDataFromFile
(
dataPath
+
datasetPath
+
"/test/"
+
datasetName
+
".data"
,
'\t'
,
"test"
,
null
);
//
AttributeMultiDimSequence.setMode(AttributeMultiDimSequence.EUCLIDIEN);
AttributeMultiDimSequence
.
setMode
(
AttributeMultiDimSequence
.
DTW_BARYCENTRE
);
AttributeMultiDimSequence
.
setMode
(
AttributeMultiDimSequence
.
EUCLIDIEN
);
//
AttributeMultiDimSequence.setMode(AttributeMultiDimSequence.DTW_BARYCENTRE);
DataDesktopFrame
[]
desktopFrames
=
MainFrame
.
getInstance
().
getDesktop
().
getAllDataDesktopFrames
();
DataSession
testSession
=
desktopFrames
[
desktopFrames
.
length
-
2
].
getDataSession
();
final
DateTime
startTime
=
DateTime
.
now
();
int
nInf
=
12
;
int
nSup
=
17
;
// parametre pour la precision des conflits
double
minC
=
0.9
;
...
...
@@ -74,7 +72,6 @@ public class TestA2CNES {
// parametre qualite/similitude
double
ps
=
0.2
;
double
pq
=
1.0
-
ps
;
double
constraintsWgt
=
80
;
double
valueKExtern
=
60
;
double
kExtern
=
valueKExtern
*
((
100
-
constraintsWgt
)/
100
);
double
kIntern
=
(
100
-
valueKExtern
)
*
((
100
-
constraintsWgt
)/
100
);
...
...
@@ -89,72 +86,43 @@ public class TestA2CNES {
classification
.
setAdvancedParameters
(
degradation
,
classRatio
,
solutionType
,
kIntern
,
kExtern
,
unificationType
,
criterion
,
constraintsWgt
);
ClassificationWeights
weights
=
new
GlobalWeights
(
dataT
rain
);
ClassificationWeights
weights
=
new
GlobalWeights
(
dataT
est
);
final
Vector
<
Thread
>
threadList
=
new
Vector
<
Thread
>();
final
Vector
<
Classification
>
classifList
=
new
Vector
<
Classification
>();
// we search for all constraints files
// <<<< START FOR PARAM config
// for (int i = 0 ; i < 10 ; i++) {
// HybridClassification classif = (HybridClassification) classification.clone();
// if (i % 2 == 1 ) {
// classification.setParameters(nInf, nSup, minC, 0.4, 0.6, pcr);
// }
// if (i <= 5)
// classif.addAgent(new ParametersKmeans(8, 25, weights), dataTrain);
// if (i <= 7)
// classif.addAgent(new ParametersKmeans(10, 25, weights), dataTrain);
// classif.addAgent(new ParametersKmeans(13, 25, weights), dataTrain);
// classif.addAgent(new ParametersKmeans(16, 25, weights), dataTrain);
// if (i >= 2)
// classif.addAgent(new ParametersKmeans(19, 25, weights), dataTrain);
// if (i >= 4)
// classif.addAgent(new ParametersKmeans(22, 25, weights), dataTrain);
//// classif.addAgent(new ParametersKmeans(3, 10, weights), dataTest);
//// classif.addAgent(new ParametersKmeans(4, 10, weights), dataTest);
//// classif.addAgent(new ParametersKmeans(6, 10, weights), dataTest);
//
// classif.setName(testResultPath+datasetName+"/clusteing"+i);
// classif.setData(dataTrain);
// >>>> END FOR PARAM config1
// for (int i = 0 ; i < 20 ; i++) {
// HybridClassification classif = (HybridClassification) classification.clone();
// classif.addAgent(new ParametersKmeans(13, 20, weights), dataTest);
// classif.addAgent(new ParametersKmeans(16, 20, weights), dataTest);
// classif.addAgent(new ParametersKmeans(19, 20, weights), dataTest);
// final HybridClassification classif = (HybridClassification) classification.clone();
// Data currentData = (Data) dataTest.clone();
// final String path_to_add = resultPath + datasetName + "/";
// classif.setName(datasetName+"_unconstrained"+"-"+i+
// ".clustering");
// classif.setData(currentData);
//
// for (int a : ag_seeds) {
// classif.addAgent(new ParametersKmeans(a, nb_iter, weights), currentData);
// }
//
// classif.setName(resultPath+datasetPath+"/clustering"+i);
// classif.setData(dataTest);
//// >>>> REPLACE PARAM
// Thread t = null;
//
// ClassificationImage classificationImage = new ClassificationImage(testSession,
// Messages.getString("ClassifierPanel.73") + DataSession.nbClustering, //$NON-NLS-1$
// true);
//
//
// t = new ClassificationThread(classif, classificationImage.getProgressBar(),
// classificationImage, null);
// Thread t = new Thread() {
// @Override
// public void run() {
// classif.classify();
// System.out.println(classif.getName());
// try {
// new CSVResultWriter(classif, path_to_add + classif.getName()).write();
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
// };
// t.start();
// try {
// testSession.associatedFrame.setMaximum(true);
// } catch (PropertyVetoException e1) {}
// testSession.addClassifier(classificationImage);
// try {
// testSession.associatedFrame.setSelected(true);
// testSession.associatedFrame.toFront();
// } catch (PropertyVetoException e) {
// e.printStackTrace();
// }
// classificationImage.setVisible(true);
//
// threadList.add(t);
// classifList.add(classif);
// }
// <<<< CONSTRAINTS
try
(
DirectoryStream
<
Path
>
dirStream
=
Files
.
newDirectoryStream
(
Paths
.
get
(
dataPath
+
datasetName
+
"/t
rain/"
),
"*1_10
.constraints"
))
{
Paths
.
get
(
dataPath
+
datasetName
+
"/t
est/"
),
"*
.constraints"
))
{
Iterator
<
Path
>
iter
=
dirStream
.
iterator
();
while
(
iter
.
hasNext
())
{
Vector
<
Constraint
>
constraints
=
new
Vector
<
Constraint
>();
...
...
@@ -191,43 +159,34 @@ public class TestA2CNES {
}
for
(
int
i
=
0
;
i
<
1
;
i
++)
{
HybridClassification
classif
=
(
HybridClassification
)
classification
.
clone
();
final
HybridClassification
classif
=
(
HybridClassification
)
classification
.
clone
();
Data
currentData
=
(
Data
)
dataTest
.
clone
();
currentData
.
updateAndSetConstraintsToSample
(
constraints
);
classif
.
addAgent
(
new
ParametersKmeans
(
16
,
15
,
weights
),
currentData
);
classif
.
addAgent
(
new
ParametersKmeans
(
20
,
15
,
weights
),
currentData
);
classif
.
addAgent
(
new
ParametersKmeans
(
24
,
15
,
weights
),
currentData
);
for
(
int
a
:
ag_seeds
)
{
classif
.
addAgent
(
new
ParametersKmeans
(
a
,
nb_iter
,
weights
),
currentData
);
}
classif
.
setName
(
resultPath
+
datasetName
+
"/"
+
ImageHelper
.
stripExtension
(
filename
)+
"-"
+
i
+
final
String
path_to_add
=
resultPath
+
datasetName
+
"/"
;
classif
.
setName
(
ImageHelper
.
stripExtension
(
filename
)+
"-"
+
i
+
".clustering"
);
classif
.
setData
(
currentData
);
Thread
t
=
null
;
ClassificationImage
classificationImage
=
new
ClassificationImage
(
testSession
,
classif
.
getName
(),
//$NON-NLS-1$
true
);
t
=
new
ClassificationThread
(
classif
,
classificationImage
.
getProgressBar
(),
classificationImage
,
null
,
false
,
null
);
Thread
t
=
new
Thread
()
{
@Override
public
void
run
()
{
classif
.
classify
();
System
.
out
.
println
(
classif
.
getName
());
try
{
new
CSVResultWriter
(
classif
,
path_to_add
+
classif
.
getName
()).
write
();
}
catch
(
IOException
e
)
{
e
.
printStackTrace
();
}
}
};
t
.
start
();
try
{
testSession
.
associatedFrame
.
setMaximum
(
true
);
}
catch
(
PropertyVetoException
e1
)
{}
testSession
.
addClassifier
(
classificationImage
);
try
{
testSession
.
associatedFrame
.
setSelected
(
true
);
testSession
.
associatedFrame
.
toFront
();
}
catch
(
PropertyVetoException
e
)
{
e
.
printStackTrace
();
}
classificationImage
.
setVisible
(
true
);
threadList
.
add
(
t
);
classifList
.
add
(
classif
);
}
}
}
catch
(
IOException
e2
)
{
...
...
@@ -245,13 +204,6 @@ public class TestA2CNES {
}
catch
(
InterruptedException
e
)
{
e
.
printStackTrace
();
}
for
(
Classification
cl
:
classifList
)
{
try
{
new
CSVResultWriter
(
cl
,
cl
.
getName
()).
write
();
}
catch
(
IOException
e
)
{
e
.
printStackTrace
();
}
}
DateTimeFormatter
formatter
=
DateTimeFormat
.
forPattern
(
"MM/dd/yyyy HH:mm:ss"
);
System
.
out
.
println
(
"Start at "
+
formatter
.
print
(
startTime
));
System
.
out
.
println
(
"wrote final results at "
+
formatter
.
print
(
DateTime
.
now
()));
...
...
src/test/TestA2CNESIterative.java
View file @
33a0b68d
...
...
@@ -53,10 +53,13 @@ public class TestA2CNESIterative {
final
String
resultPath
=
System
.
getProperty
(
"user.home"
)+
"/A2CNES/results_iter/"
;
final
int
nInf
=
12
;
final
int
nSup
=
17
;
final
int
ag1_seeds
=
16
;
final
int
ag2_seeds
=
20
;
final
int
ag3_seeds
=
24
;
final
int
[]
ag_seeds
=
{
16
,
20
,
24
};
final
int
nb_iter
=
15
;
File
directory
=
new
File
(
"log"
);
if
(!
directory
.
exists
()){
directory
.
mkdir
();
}
// String testResultPath = System.getProperty("user.home")+"/A2CNES/Train_results/";
...
...
@@ -64,8 +67,8 @@ public class TestA2CNESIterative {
final
Data
dataTest
=
TestA2CNES
.
getDataFromFile
(
dataPath
+
datasetPath
+
"/test/"
+
datasetName
+
".data"
,
'\t'
,
"test"
,
null
);
//
AttributeMultiDimSequence.setMode(AttributeMultiDimSequence.EUCLIDIEN);
AttributeMultiDimSequence
.
setMode
(
AttributeMultiDimSequence
.
DTW_BARYCENTRE
);
AttributeMultiDimSequence
.
setMode
(
AttributeMultiDimSequence
.
EUCLIDIEN
);
//
AttributeMultiDimSequence.setMode(AttributeMultiDimSequence.DTW_BARYCENTRE);
final
DateTime
startTime
=
DateTime
.
now
();
...
...
@@ -206,9 +209,10 @@ public class TestA2CNESIterative {
// extractAndAddConstraints(subset, constraints, subsetSize, null);
// currentData.updateAndSetConstraintsToSample(subset);
classif
.
addAgent
(
new
ParametersKmeans
(
ag1_seeds
,
nb_iter
,
weights
),
currentData
);
classif
.
addAgent
(
new
ParametersKmeans
(
ag2_seeds
,
nb_iter
,
weights
),
currentData
);
classif
.
addAgent
(
new
ParametersKmeans
(
ag3_seeds
,
nb_iter
,
weights
),
currentData
);
for
(
int
a
:
ag_seeds
)
{
classif
.
addAgent
(
new
ParametersKmeans
(
a
,
nb_iter
,
weights
),
currentData
);
}
final
String
path_to_add
=
resultPath
+
datasetName
+
"/"
;
classif
.
setName
(
ImageHelper
.
stripExtension
(
filename
)+
"-"
+
i
+
...
...
@@ -249,6 +253,7 @@ public class TestA2CNESIterative {
distanceParameters
[
0
]
=
new
ParameterDTW
(
new
double
[
seqLength
][
seqLength
]);
//but yes for DTW (requires a matrix to work in)
for
(
int
i
=
0
;
i
<
5
;
i
++)
{
int
[]
clustMap
=
classif
.
getClusteringResult
().
getClusterMap
();
int
[]
satisifiedMap
=
new
int
[
constraints
.
size
()];
for
(
int
j
=
0
;
j
<
constraints
.
size
()
;
j
++)
{
...
...
@@ -295,6 +300,21 @@ public class TestA2CNESIterative {
classif
.
setAdvancedParameters
(
degradation
,
classRatio
,
solutionType
,
kIntern
,
kExtern
,
unificationType
,
criterion
,
95
);
classif
.
newIteration
(
subset
);
FileWriter
fw2
=
null
;
try
{
fw2
=
new
FileWriter
(
"log/"
+
rand
+
"sat_cst"
+
classif
.
getName
()+
".log"
,
true
);
}
catch
(
IOException
e
)
{
e
.
printStackTrace
();
}
BufferedWriter
bw2
=
new
BufferedWriter
(
fw2
);
PrintWriter
out2
=
new
PrintWriter
(
bw2
);
int
countSat
=
0
;
for
(
Constraint
c
:
subset
)
{
if
(
c
.
evaluate
(
classif
.
getClusteringResult
())
==
1
)
{
countSat
++;
}
}
out2
.
write
(
subset
.
size
()+
";"
+
countSat
);
try
{
new
CSVResultWriter
(
classif
,
path_to_add
+
classif
.
getName
()+
"_"
+(
i
+
1
)).
write
();
...
...
src/test/TestA2CNESIterativeSelectedCst.java
View file @
33a0b68d
...
...
@@ -36,6 +36,8 @@ import jcl.io.results.CSVResultWriter;
import
jcl.learning.methods.monostrategy.kmeans.ParametersKmeans
;
import
jcl.learning.methods.multistrategy.samarah.HybridClassification
;
import
jcl.learning.methods.multistrategy.samarah.SamarahConfig
;
import
jcl.learning.methods.multistrategy.samarahConstrained.HybridClassificationConstrained
;
import
jcl.learning.methods.multistrategy.samarahConstrained.HybridClassificationConstrained2
;
import
jcl.utils.RandomizeTools
;
import
jcl.weights.ClassificationWeights
;
import
jcl.weights.GlobalWeights
;
...
...
@@ -45,18 +47,22 @@ import mustic.utils.io.CSVUtils;
public
class
TestA2CNESIterativeSelectedCst
{
public
static
void
main
(
String
[]
args
)
{
HybridClassification
classification
=
new
HybridClassification
();
HybridClassificationConstrained2
classification
=
new
HybridClassificationConstrained2
();
// HybridClassificationConstrained2 classification = new HybridClassificationConstrained2();
final
String
datasetName
=
"F
acesUCR
"
;
final
String
datasetPath
=
"F
acesUCR
"
;
final
String
datasetName
=
"F
rogs_MFCCs
"
;
final
String
datasetPath
=
"F
rogs_MFCCs
"
;
final
String
dataPath
=
System
.
getProperty
(
"user.home"
)+
"/A2CNES/"
;
final
String
resultPath
=
System
.
getProperty
(
"user.home"
)+
"/A2CNES/results_iter/"
;
final
int
nInf
=
12
;
final
int
nSup
=
17
;
final
int
ag1_seeds
=
16
;
final
int
ag2_seeds
=
20
;
final
int
ag3_seeds
=
24
;
final
int
nInf
=
54
;
final
int
nSup
=
68
;
final
int
[]
ag_seeds
=
{
52
,
64
,
71
};
final
int
nb_iter
=
15
;
File
directory
=
new
File
(
"log"
);
if
(!
directory
.
exists
()){
directory
.
mkdir
();
}
// String testResultPath = System.getProperty("user.home")+"/A2CNES/Train_results/";
...
...
@@ -64,8 +70,8 @@ public class TestA2CNESIterativeSelectedCst {
final
Data
dataTest
=
TestA2CNES
.
getDataFromFile
(
dataPath
+
datasetPath
+
"/test/"
+
datasetName
+
".data"
,
'\t'
,
"test"
,
null
);
//
AttributeMultiDimSequence.setMode(AttributeMultiDimSequence.EUCLIDIEN);
AttributeMultiDimSequence
.
setMode
(
AttributeMultiDimSequence
.
DTW_BARYCENTRE
);
AttributeMultiDimSequence
.
setMode
(
AttributeMultiDimSequence
.
EUCLIDIEN
);
//
AttributeMultiDimSequence.setMode(AttributeMultiDimSequence.DTW_BARYCENTRE);
final
DateTime
startTime
=
DateTime
.
now
();
...
...
@@ -98,68 +104,9 @@ public class TestA2CNESIterativeSelectedCst {
final
Vector
<
Thread
>
threadList
=
new
Vector
<
Thread
>();
final
Vector
<
Classification
>
classifList
=
new
Vector
<
Classification
>();
// we search for all constraints files
// <<<< START FOR PARAM config
// for (int i = 0 ; i < 10 ; i++) {
// HybridClassification classif = (HybridClassification) classification.clone();
// if (i % 2 == 1 ) {
// classification.setParameters(nInf, nSup, minC, 0.4, 0.6, pcr);
// }
// if (i <= 5)
// classif.addAgent(new ParametersKmeans(8, 25, weights), dataTrain);
// if (i <= 7)
// classif.addAgent(new ParametersKmeans(10, 25, weights), dataTrain);
// classif.addAgent(new ParametersKmeans(13, 25, weights), dataTrain);
// classif.addAgent(new ParametersKmeans(16, 25, weights), dataTrain);
// if (i >= 2)
// classif.addAgent(new ParametersKmeans(19, 25, weights), dataTrain);
// if (i >= 4)
// classif.addAgent(new ParametersKmeans(22, 25, weights), dataTrain);
//// classif.addAgent(new ParametersKmeans(3, 10, weights), dataTest);
//// classif.addAgent(new ParametersKmeans(4, 10, weights), dataTest);
//// classif.addAgent(new ParametersKmeans(6, 10, weights), dataTest);
//
// classif.setName(testResultPath+datasetName+"/clusteing"+i);
// classif.setData(dataTrain);
// >>>> END FOR PARAM config1
// for (int i = 0 ; i < 20 ; i++) {
// HybridClassification classif = (HybridClassification) classification.clone();
// classif.addAgent(new ParametersKmeans(13, 20, weights), dataTest);
// classif.addAgent(new ParametersKmeans(16, 20, weights), dataTest);
// classif.addAgent(new ParametersKmeans(19, 20, weights), dataTest);
//
// classif.setName(resultPath+datasetPath+"/clustering"+i);
// classif.setData(dataTest);
//// >>>> REPLACE PARAM
// Thread t = null;
//
// ClassificationImage classificationImage = new ClassificationImage(testSession,
// Messages.getString("ClassifierPanel.73") + DataSession.nbClustering, //$NON-NLS-1$
// true);
//
//
// t = new ClassificationThread(classif, classificationImage.getProgressBar(),
// classificationImage, null);
// t.start();
// try {
// testSession.associatedFrame.setMaximum(true);
// } catch (PropertyVetoException e1) {}
// testSession.addClassifier(classificationImage);
// try {
// testSession.associatedFrame.setSelected(true);
// testSession.associatedFrame.toFront();
// } catch (PropertyVetoException e) {
// e.printStackTrace();
// }
// classificationImage.setVisible(true);
//
// threadList.add(t);
// classifList.add(classif);
// }
// <<<< CONSTRAINTS
try
(
DirectoryStream
<
Path
>
dirStream
=
Files
.
newDirectoryStream
(
Paths
.
get
(
dataPath
+
datasetName
+
"/t
rain
/"
),
"*0.1_*"
))
{
Paths
.
get
(
dataPath
+
datasetName
+
"/t
est
/"
),
"*0.1_*"
))
{
Iterator
<
Path
>
iter
=
dirStream
.
iterator
();
final
String
rand
=
Integer
.
toString
((
int
)
(
Math
.
random
()*
1000
));
while
(
iter
.
hasNext
())
{
...
...
@@ -229,7 +176,8 @@ public class TestA2CNESIterativeSelectedCst {
e
.
printStackTrace
();
}
final
int
subsetSize
=
(
int
)
Math
.
ceil
(
constraintsCL
.
size
()
*
0.04
);
final
int
subsetSize
=
(
int
)
Math
.
ceil
(
constraintsCL
.
size
()
*
0.1
);
System
.
out
.
println
(
"subset size : "
+
subsetSize
);
for
(
int
i
=
0
;
i
<
1
;
i
++)
{
final
HybridClassification
classif
=
(
HybridClassification
)
classification
.
clone
();
...
...
@@ -239,9 +187,9 @@ public class TestA2CNESIterativeSelectedCst {
// extractAndAddConstraints(subset, constraints, subsetSize, null);
// currentData.updateAndSetConstraintsToSample(subset);
classif
.
addAgent
(
new
ParametersKmeans
(
ag1_seeds
,
nb_iter
,
weights
),
currentData
);
classif
.
addAgent
(
new
ParametersKmeans
(
ag2_seeds
,
nb_iter
,
weights
),
currentData
);
classif
.
addAgent
(
new
ParametersKmeans
(
ag3_seeds
,
nb_iter
,
weights
),
currentData
);
for
(
int
a
:
ag_seeds
)
{
classif
.
addAgent
(
new
ParametersKmeans
(
a
,
nb_iter
,
weights
),
currentData
);
}
final
String
path_to_add
=
resultPath
+
datasetName
+
"/"
;
classif
.
setName
(
ImageHelper
.
stripExtension
(
filename
)+
"-"
+
i
+
...
...
@@ -271,6 +219,15 @@ public class TestA2CNESIterativeSelectedCst {
BufferedWriter
bw
=
new
BufferedWriter
(
fw
);
PrintWriter
out
=
new
PrintWriter
(
bw
);
FileWriter
fw2
=
null
;
try
{
fw2
=
new
FileWriter
(
"log/"
+
rand
+
"sat_cst"
+
classif
.
getName
()+
".log"
,
true
);
}
catch
(
IOException
e
)
{
e
.
printStackTrace
();
}
BufferedWriter
bw2
=
new
BufferedWriter
(
fw2
);
PrintWriter
out2
=
new
PrintWriter
(
bw2
);
Distance
[]
distances
=
new
Distance
[
1
];
// a distance is set for every attribute
distances
[
0
]
=
jcl
.
data
.
distance
.
sequential
.
DistanceDTWMD
.
getInstance
();
// second attribute (sequential) compared with the DTW distance
MetaDistance
metaDistance
=
MetaDistanceEuclidean
.
getInstance
();
// defines the way the two scores are combined (possibility to weight)
...
...
@@ -302,7 +259,7 @@ public class TestA2CNESIterativeSelectedCst {
Vector
<
Constraint
>
subsetCL
=
extractConstraintsRandom
(
constraintsCL
,
subsetSize
,
satisfactionMaskCL
);
Mask
satisfactionMaskML
=
new
IntArrayMask
(
satisifiedMapML
,
1
,
true
);
Vector
<
Constraint
>
subset
=
extractConstraintsRandom
(
constraintsML
,
subsetSize
,
satisfactionMaskML
);
subset
.
addAll
(
constraints
CL
);
subset
.
addAll
(
subset
CL
);
out
.
println
(
"------- new iter : "
+
(
i
+
1
)
+
" --------"
);
for
(
Constraint
c
:
subset
)
{
if
(
c
instanceof
MustLinkConstraint
)
{
...
...
@@ -331,6 +288,13 @@ public class TestA2CNESIterativeSelectedCst {
classif
.
setAdvancedParameters
(
degradation
,
classRatio
,
solutionType
,
kIntern
,
kExtern
,
unificationType
,
criterion
,
95
);
classif
.
newIteration
(
subset
);
int
countSat
=
0
;
for
(
Constraint
c
:
subset
)
{
if
(
c
.
evaluate
(
classif
.
getClusteringResult
())
==
1
)
{
countSat
++;
}
}
out2
.
write
(
subset
.
size
()+
";"
+
countSat
+
"\n"
);
try
{
new
CSVResultWriter
(
classif
,
path_to_add
+
classif
.
getName
()+
"_"
+(
i
+
1
)).
write
();
...
...
@@ -343,6 +307,9 @@ public class TestA2CNESIterativeSelectedCst {
out
.
close
();
bw
.
close
();
fw
.
close
();
out2
.
close
();
bw2
.
close
();
fw2
.
close
();
}
catch
(
IOException
e
)
{
e
.
printStackTrace
();
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment