Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
M
MultiCube
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Service Desk
Milestones
Merge Requests
2
Merge Requests
2
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Incidents
Environments
Packages & Registries
Packages & Registries
Container Registry
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
lafabregue
MultiCube
Commits
7482508b
Commit
7482508b
authored
Jun 11, 2018
by
balanche
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
work in progess : cleaning of the old distance implementation
parent
965162f4
Changes
6
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
15 additions
and
829 deletions
+15
-829
src/mustic/gui/MainFrame.java
src/mustic/gui/MainFrame.java
+0
-330
src/mustic/gui/panels/ImageResultPanel.java
src/mustic/gui/panels/ImageResultPanel.java
+0
-12
src/mustic/gui/panels/classifier/ClassifierPanel.java
src/mustic/gui/panels/classifier/ClassifierPanel.java
+6
-5
src/mustic/gui/panels/classifier/components/HybridClassificationPanel.java
...nels/classifier/components/HybridClassificationPanel.java
+9
-1
src/mustic/io/ImageData.java
src/mustic/io/ImageData.java
+0
-6
src/mustic/io/ImageMultiDimSequenceData.java
src/mustic/io/ImageMultiDimSequenceData.java
+0
-475
No files found.
src/mustic/gui/MainFrame.java
View file @
7482508b
...
...
@@ -10,12 +10,6 @@ import java.awt.event.*;
import java.awt.image.BufferedImage;
import java.beans.PropertyVetoException;
import java.io.*;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Vector;
import java.util.zip.ZipEntry;
...
...
@@ -27,28 +21,16 @@ import javax.swing.border.EmptyBorder;
import javax.swing.event.*;
import javax.swing.tree.*;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import jcl.Classification;
import jcl.clustering.Cluster;
import jcl.clustering.ClusteringResult;
import jcl.clustering.constraints.CannotLinkConstraint;
import jcl.clustering.constraints.Constraint;
import jcl.clustering.constraints.MustLinkConstraint;
import jcl.data.Data;
import jcl.data.DataObject;
import jcl.data.SimpleData;
import jcl.data.attribute.AttributeMultiDimSequence;
import jcl.io.results.CSVResultWriter;
import jcl.learning.LearningMethod;
import jcl.learning.methods.monostrategy.SingleClassification;
import jcl.learning.methods.monostrategy.kmeans.ParametersKmeans;
import jcl.learning.methods.multistrategy.samarah.HybridClassification;
import jcl.learning.methods.multistrategy.samarah.SamarahConfig;
import jcl.weights.ClassificationWeights;
import jcl.weights.GlobalWeights;
import jsl.Segmentation;
import mustic.gui.dialog.*;
import mustic.gui.dialog.arff.ArffLoadDialog;
...
...
@@ -68,13 +50,9 @@ import mustic.gui.progressbar.OpenRawImage;
import mustic.gui.segmentation.*;
import mustic.io.*;
import mustic.models.gui.panels.*;
import mustic.models.thread.ClassificationThread;
import mustic.utils.UnZipSub;
import mustic.utils.filters.ImageFileFilter;
import mustic.utils.filters.ZipFileFilter;
import mustic.utils.image.ImageHelper;
import mustic.utils.io.CSVUtils;
import mustic.utils.io.dataExchange.DataCsvExchange;
import mustic.utils.tools.ProgressStatus;
import net.infonode.tabbedpanel.*;
import net.infonode.tabbedpanel.titledtab.TitledTab;
...
...
@@ -736,16 +714,6 @@ public class MainFrame extends JFrame {
});
OGRMenu.add(menuItemLoadOGRToData);
//TODO to delete when no more used
JMenuItem customToDelete = new JMenuItem("custom"); //$NON-NLS-1$
customToDelete.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
customClassify();
}
});
CSVMenu.add(customToDelete);
// menuItemResultToCSV = new JMenuItem(Messages.getString("MainFrame.305")); //$NON-NLS-1$
// menuItemResultToCSV.addActionListener(new ActionListener() {
// @Override
...
...
@@ -3006,14 +2974,6 @@ public class MainFrame extends JFrame {
classifAfterSegmentation = true;
}
else if (entry.getName().equals(Messages.getString("MainFrame.186") + i + "/ParameterMode.ser")) { //$NON-NLS-1$ //$NON-NLS-2$
/**
* Chargement du mode de comparaison des séquences
* @author Francois Petitjean
*/
ObjectInputStream ois = new ObjectInputStream(in);
AttributeMultiDimSequence.setMode((Integer) ois.readObject());
}
else if (entry.getName().equals(Messages.getString("MainFrame.188") + i + "/ParameterSimplify.ser")) { //$NON-NLS-1$ //$NON-NLS-2$
/**
* Chargement du type de simplification des séquences
...
...
@@ -3798,296 +3758,6 @@ public class MainFrame extends JFrame {
this.launchOrthoRect = launchOrthoRect;
}
private void customClassify() {
HybridClassification classification = new HybridClassification();
String datasetName = "FacesUCR";
String datasetPath = "FacesUCR";
String dataPath = "/home/baptiste/A2CNES/";
String resultPath = "/home/baptiste/A2CNES/results/";
String testResultPath = "/home/baptiste/A2CNES/Train_results/";
Data dataTrain = getDataFromFile(dataPath+datasetPath+"/train/"+datasetName+".data", '\t', "train", null);
Data dataTest = getDataFromFile(dataPath+datasetPath+"/test/"+datasetName+".data", '\t', "test", null);
// AttributeMultiDimSequence.setMode(AttributeMultiDimSequence.EUCLIDIEN);
AttributeMultiDimSequence.setMode(AttributeMultiDimSequence.DTW_BARYCENTRE);
DataDesktopFrame[] desktopFrames = MainFrame.getInstance().getDesktop().getAllDataDesktopFrames();
DataSession testSession = desktopFrames[desktopFrames.length-2].getDataSession();
final DateTime startTime = DateTime.now();
int nInf = 12;
int nSup = 17;
// parametre pour la precision des conflits
double minC = 0.9;
// parametre pour les classes representatives
double pcr = 0.2;
// parametre qualite/similitude
double ps = 0.2;
double pq = 1.0 - ps;
double constraintsWgt = 80;
double valueKExtern = 60;
double kExtern = valueKExtern * ((100-constraintsWgt)/100);
double kIntern = (100 - valueKExtern) * ((100-constraintsWgt)/100);
int solutionType = 3;
double classRatio = 0.01;
int criterion = 0;
double degradation = 0.9;
int unificationType = SamarahConfig.WITHOUT_UNCLASSIFIED_UNIFICATION;
classification.setParameters(nInf, nSup, minC, ps, pq, pcr);
classification.setAdvancedParameters(degradation, classRatio, solutionType, kIntern,
kExtern, unificationType, criterion, constraintsWgt);
ClassificationWeights weights = new GlobalWeights(dataTrain);
final Vector<Thread> threadList = new Vector<Thread>();
final Vector<Classification> classifList = new Vector<Classification>();
// we search for all constraints files
// <<<< START FOR PARAM config
// for (int i = 0 ; i < 10 ; i++) {
// HybridClassification classif = (HybridClassification) classification.clone();
// if (i % 2 == 1 ) {
// classification.setParameters(nInf, nSup, minC, 0.4, 0.6, pcr);
// }
// if (i <= 5)
// classif.addAgent(new ParametersKmeans(8, 25, weights), dataTrain);
// if (i <= 7)
// classif.addAgent(new ParametersKmeans(10, 25, weights), dataTrain);
// classif.addAgent(new ParametersKmeans(13, 25, weights), dataTrain);
// classif.addAgent(new ParametersKmeans(16, 25, weights), dataTrain);
// if (i >= 2)
// classif.addAgent(new ParametersKmeans(19, 25, weights), dataTrain);
// if (i >= 4)
// classif.addAgent(new ParametersKmeans(22, 25, weights), dataTrain);
//// classif.addAgent(new ParametersKmeans(3, 10, weights), dataTest);
//// classif.addAgent(new ParametersKmeans(4, 10, weights), dataTest);
//// classif.addAgent(new ParametersKmeans(6, 10, weights), dataTest);
//
// classif.setName(testResultPath+datasetName+"/clusteing"+i);
// classif.setData(dataTrain);
// >>>> END FOR PARAM config1
// for (int i = 0 ; i < 20 ; i++) {
// HybridClassification classif = (HybridClassification) classification.clone();
// classif.addAgent(new ParametersKmeans(13, 20, weights), dataTest);
// classif.addAgent(new ParametersKmeans(16, 20, weights), dataTest);
// classif.addAgent(new ParametersKmeans(19, 20, weights), dataTest);
//
// classif.setName(resultPath+datasetPath+"/clustering"+i);
// classif.setData(dataTest);
//// >>>> REPLACE PARAM
// Thread t = null;
//
// ClassificationImage classificationImage = new ClassificationImage(testSession,
// Messages.getString("ClassifierPanel.73") + DataSession.nbClustering, //$NON-NLS-1$
// true);
//
//
// t = new ClassificationThread(classif, classificationImage.getProgressBar(),
// classificationImage, null);
// t.start();
// try {
// testSession.associatedFrame.setMaximum(true);
// } catch (PropertyVetoException e1) {}
// testSession.addClassifier(classificationImage);
// try {
// testSession.associatedFrame.setSelected(true);
// testSession.associatedFrame.toFront();
// } catch (PropertyVetoException e) {
// e.printStackTrace();
// }
// classificationImage.setVisible(true);
//
// threadList.add(t);
// classifList.add(classif);
// }
// <<<< CONSTRAINTS
try (DirectoryStream<Path> dirStream = Files.newDirectoryStream(
Paths.get(dataPath+datasetName+"/train/"), "*1_10.constraints")) {
Iterator<Path> iter = dirStream.iterator();
while(iter.hasNext()) {
Vector<Constraint> constraints = new Vector<Constraint>();
String filename = iter.next().getFileName().toString();
System.out.println(filename);
CSVUtils reader = null;
try {
reader = new CSVUtils(new File(dataPath+datasetName+"/test/"+filename), '\t');
List<String> line;
while((line = reader.readNext()) != null) {
switch (Integer.parseInt(line.get(line.size()-1))) {
case Constraint.MUST_LINK_TYPE :
try {
int index1 = Integer.parseInt(line.get(0))-1;
int index2 = Integer.parseInt(line.get(1))-1;
constraints.add(new MustLinkConstraint(index1, index2));
} catch(Exception e) {
e.printStackTrace();
}
break;
case Constraint.CANNOT_LINK_TYPE :
try {
int index1 = Integer.parseInt(line.get(0))-1;
int index2 = Integer.parseInt(line.get(1))-1;
constraints.add(new CannotLinkConstraint(index1, index2));
} catch(Exception e) {
e.printStackTrace();
}
break;
}
}
} catch (FileNotFoundException e) {
e.printStackTrace();
}
for(int i = 0 ; i < 1; i++) {
HybridClassification classif = (HybridClassification) classification.clone();
Data currentData = (Data) dataTest.clone();
currentData.updateAndSetConstraintsToSample(constraints);
classif.addAgent(new ParametersKmeans(16, 15, weights), currentData);
classif.addAgent(new ParametersKmeans(20, 15, weights), currentData);
classif.addAgent(new ParametersKmeans(24, 15, weights), currentData);
classif.setName(resultPath+datasetName+"/"+
ImageHelper.stripExtension(filename)+"-"+i+
".clustering");
classif.setData(currentData);
Thread t = null;
ClassificationImage classificationImage = new ClassificationImage(testSession,
classif.getName(), //$NON-NLS-1$
true);
t = new ClassificationThread(classif, classificationImage.getProgressBar(),classificationImage.getStatusBar(),
classificationImage, null, false, null);
t.start();
try {
testSession.associatedFrame.setMaximum(true);
} catch (PropertyVetoException e1) {}
testSession.addClassifier(classificationImage);
try {
testSession.associatedFrame.setSelected(true);
testSession.associatedFrame.toFront();
} catch (PropertyVetoException e) {
e.printStackTrace();
}
classificationImage.setVisible(true);
threadList.add(t);
classifList.add(classif);
}
}
} catch (IOException e2) {
e2.printStackTrace();
}
// >>>> CONSTRAINTS
new Thread() {
@Override
public void run() {
try {
for (Thread thread : threadList) {
thread.join();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
for(Classification cl : classifList) {
try {
new CSVResultWriter(cl, cl.getName()).write();
} catch (IOException e) {
e.printStackTrace();
}
}
DateTimeFormatter formatter = DateTimeFormat.forPattern("MM/dd/yyyy HH:mm:ss");
System.out.println("Start at "+ formatter.print(startTime));
System.out.println("wrote final results at "+ formatter.print(DateTime.now()));
}
}.start();
ClassificationFrame.getInstance().updateClassifierPanel();
ClassificationFrame.getInstance().dispose();
}
private Data getDataFromFile(String path, char separator, String name, String mask) {
Data data = null;
// we create the two data session
// the first session is used for training, the second for classification
try {
List<DataObject> result = new ArrayList<DataObject>();
CSVUtils reader = null;
CSVUtils maskReader = null;
try {
File file = new File(path);
reader = new CSVUtils(file, separator);
List<String> line;
if (mask != null) {
maskReader = new CSVUtils(new File(mask), separator);
}
List<String> maskLine = null;
while((line = reader.readNext()) != null) {
if (mask != null) {
maskLine = maskReader.readNext();
}
extractObjectFromLine(result, line, 1, maskLine);
}
} catch (FileNotFoundException e) {
e.printStackTrace();
result = null;
}
data = new SimpleData(result);
data.setDataName(name);
MainFrame.getInstance().createDataSession(data);
} catch (Exception e) {
e.printStackTrace();
}
return data;
}
/**
* Construct a DataObject from a set of Strings and add it to a List
* @param list
* the list to add the DataObject to
* @param line
* the line to interpret
*/
private void extractObjectFromLine(List<DataObject> list, List<String> line, int featureNb, List<String> maskLine) {
DataObject obj = new DataObject(1);
double[][] seq = new double[line.size()/featureNb][];
for (int i = 0 ; i < line.size()/featureNb ; i++) {
if (maskLine != null) {
if(Integer.parseInt(maskLine.get(i)) == 0)
continue;
}
seq[i] = new double[featureNb];
for (int j = 0 ; j < featureNb ; j++) {
seq[i][j] = Double.parseDouble(line.get(i*featureNb + j));
}
}
obj.setAttribute(0, new AttributeMultiDimSequence(seq));
list.add(obj);
}
private void extractcustomESV() {
DataCsvExchange.dataToCSV("/home/baptiste/dataExtraite.csv", this.currentDataSession.getData(),
"\t", false);
}
/**
* Return the currently selected session
* @return the DataSession/ImageSession
...
...
src/mustic/gui/panels/ImageResultPanel.java
View file @
7482508b
...
...
@@ -1199,12 +1199,6 @@ public class ImageResultPanel extends ResultPanel implements TreeSelectionListen
out
.
closeEntry
();
}
out
.
putNextEntry
(
new
ZipEntry
(
name
+
'/'
+
"ParameterMode.ser"
));
oos
=
new
ObjectOutputStream
(
out
);
oos
.
writeObject
(
AttributeMultiDimSequence
.
getMode
());
oos
.
flush
();
out
.
closeEntry
();
out
.
putNextEntry
(
new
ZipEntry
(
name
+
'/'
+
"ParameterSimplify.ser"
));
oos
=
new
ObjectOutputStream
(
out
);
oos
.
writeObject
(
AttributeMultiDimSequence
.
getSimplifyFrom
());
...
...
@@ -1311,12 +1305,6 @@ public class ImageResultPanel extends ResultPanel implements TreeSelectionListen
out
.
closeEntry
();
}
out
.
putNextEntry
(
new
ZipEntry
(
name
+
'/'
+
"ParameterMode.ser"
));
oos
=
new
ObjectOutputStream
(
out
);
oos
.
writeObject
(
AttributeMultiDimSequence
.
getMode
());
oos
.
flush
();
out
.
closeEntry
();
out
.
putNextEntry
(
new
ZipEntry
(
name
+
'/'
+
"ParameterSimplify.ser"
));
oos
=
new
ObjectOutputStream
(
out
);
oos
.
writeObject
(
AttributeMultiDimSequence
.
getSimplifyFrom
());
...
...
src/mustic/gui/panels/classifier/ClassifierPanel.java
View file @
7482508b
...
...
@@ -941,20 +941,22 @@ public class ClassifierPanel extends JPanel {
ClassificationWeights
weights
=
new
GlobalWeights
(
data
);
if
(
this
.
attributeSelectionPanel
!=
null
)
weights
=
this
.
attributeSelectionPanel
.
getWeights
();
DistanceModel
model
=
null
;
/* generates model */
if
(
this
.
useNaiveDist
)
{
// AttributeMultiDimSequence.setMode(AttributeMultiDimSequence.EUCLIDIEN);
model
=
DistanceModel
.
generateNaiveModel
(
data
.
getOneDataObject
());
model
=
DistanceModel
.
generateNaiveModel
(
data
.
getOneDataObject
()
,
weights
);
}
else
{
// AttributeMultiDimSequence.setMode(AttributeMultiDimSequence.DTW_BARYCENTRE);
model
=
DistanceModel
.
generateDefaultModel
(
data
.
getOneDataObject
());
model
=
DistanceModel
.
generateDefaultModel
(
data
.
getOneDataObject
()
,
weights
);
}
if
(
this
.
attributeSelectionPanel
!=
null
)
weights
=
this
.
attributeSelectionPanel
.
getWeights
();
int
choice
=
this
.
modele
.
getSelectedApproach
();
...
...
@@ -1042,7 +1044,6 @@ public class ClassifierPanel extends JPanel {
else
{
distanceParameters
[
th
][
i
]
=
EmptyDistanceParameter
.
getInstance
();
//no parameter for a numerical euclidean distance
averageParameters
[
i
]
=
EmptyAverageParameter
.
getInstance
();
}
i
++;
}
...
...
src/mustic/gui/panels/classifier/components/HybridClassificationPanel.java
View file @
7482508b
...
...
@@ -10,7 +10,11 @@ import javax.swing.*;
import
javax.swing.border.TitledBorder
;
import
jcl.data.Data
;
import
jcl.data.attribute.Attribute
;
import
jcl.data.attribute.AttributeMultiDimSequence
;
import
jcl.data.distance.Distance
;
import
jcl.data.distance.DistanceParameter
;
import
jcl.data.distance.sequential.DistanceEuclidean
;
import
jcl.learning.LearningParameters
;
import
jcl.learning.methods.multistrategy.samarah.*
;
import
mustic.gui.MainFrame
;
...
...
@@ -151,7 +155,11 @@ public class HybridClassificationPanel extends JPanel implements AgentListManage
else
{
file
+=
"\t\t<isSimplifySequence>true</isSimplifySequence>\n"
;
}
if
(
AttributeMultiDimSequence
.
getMode
()
==
AttributeMultiDimSequence
.
EUCLIDIEN
)
{
for
(
Distance
<
Attribute
,
DistanceParameter
>
d
:
agent
.
getParams
().
getModel
().
getDistances
())
{
if
(!(
d
instanceof
DistanceEuclidean
))
}
if
(
AttributeMultiDimSequence
.
getMode
()
==
AttributeMultiDimSequence
.
EUCLIDIEN
&&
agent
.
getParams
().
getModel
())
{
file
+=
"\t\t<isUseNaiveDist>true</isUseNaiveDist>\n"
;
}
else
{
...
...
src/mustic/io/ImageData.java
View file @
7482508b
...
...
@@ -533,12 +533,6 @@ public class ImageData extends SimpleData implements Serializable {
public
void
setResolution
(
double
resolution
)
{
this
.
resolution
=
resolution
;
}
private
void
setProperties
(
int
mode
,
int
simplifyFrom
)
{
AttributeMultiDimSequence
.
setMode
(
mode
);
AttributeMultiDimSequence
.
setSimplifyFrom
(
simplifyFrom
);
}
/**
* <p>
...
...
src/mustic/io/ImageMultiDimSequenceData.java
deleted
100644 → 0
View file @
965162f4
package
mustic.io
;
import
java.io.Serializable
;
import
java.util.ArrayList
;
import
java.util.Arrays
;
import
java.util.concurrent.ConcurrentHashMap
;
import
jcl.data.Data
;
import
jcl.data.DataObject
;
import
jcl.data.SimpleData
;
import
jcl.data.attribute.AttributeMultiDimSequence
;
import
jcl.data.mask.Mask
;
/**
* @author Francois Petitjean
* @deprecated the ImageData class support sequence of images, it should be used instead
*/
public
class
ImageMultiDimSequenceData
extends
SimpleData
implements
Serializable
{
private
static
final
long
serialVersionUID
=
1L
;
/**
* position des valeurs dans l'image en cas de masque global
*/
public
long
[]
indexInImage
;
public
ImageMultiDimSequenceData
(
RawImage
[]
seqImages
)
{
launch
(
seqImages
,
null
);
}
public
ImageMultiDimSequenceData
(
RawImage
[]
seqImages
,
int
mode
,
int
simplifyFrom
)
{
setProperties
(
mode
,
simplifyFrom
);
launch
(
seqImages
,
null
);
}
public
ImageMultiDimSequenceData
(
RawImage
[]
seqImages
,
RawImage
[]
masks
)
{
launch
(
seqImages
,
masks
);
}
public
ImageMultiDimSequenceData
(
RawImage
[]
seqImages
,
RawImage
[]
masks
,
int
mode
,
int
simplifyFrom
)
{
setProperties
(
mode
,
simplifyFrom
);
launch
(
seqImages
,
masks
);
}
private
void
launch
(
RawImage
[]
seqImages
,
RawImage
[]
maskImages
)
{
if
(
seqImages
==
null
)
{
PiLLiTools
.
error
(
"Invalid image provided"
,
"data/ImageData.java"
,
"Constructeur aleatoire"
);
return
;
}
if
(
seqImages
[
0
].
getMask
()
==
null
)
{
if
(
maskImages
==
null
)
{
System
.
out
.
println
(
"launchWithoutMasks"
);
launchWithoutMasks
(
seqImages
);
}
else
{
System
.
out
.
println
(
"launchWithLocalMasksForDTW"
);
launchWithLocalMasksForDTW
(
seqImages
,
maskImages
);
}
}
else
{
if
(
maskImages
==
null
)
{
System
.
out
.
println
(
"launchWithGlobalMask"
);
launchWithGlobalMask
(
seqImages
);
}
else
{
if
(
AttributeMultiDimSequence
.
getMode
()
==
AttributeMultiDimSequence
.
DTW_BARYCENTRE
)
{
System
.
out
.
println
(
"launchWithGlobalAndLocalMasks"
);
launchWithGlobalAndLocalMasks
(
seqImages
,
maskImages
);
}
else
{
System
.
err
.
println
(
"Demande de masques locaux en euclidien dans "
+
this
.
getClass
().
getName
()
+
"#launch(RawImage[], RawImage[])"
);
return
;
}
}
}
}
private
void
launchWithGlobalAndLocalMasks
(
final
RawImage
[]
seqImages
,
final
RawImage
[]
maskImages
)
{
/* creating the band names */
int
nbBands
=
seqImages
[
0
].
getNbSelectedBands
();
String
[]
bandNames
=
new
String
[
nbBands
];
for
(
int
i
=
0
;
i
<
bandNames
.
length
;
i
++)
{
bandNames
[
i
]
=
"Band "
+
i
;
}
String
[]
attNames
=
{
"Sequence"
};
this
.
setAttributesNames
(
attNames
);
Mask
globalMask
=
seqImages
[
0
].
getMask
();
int
nbPixels
=
seqImages
[
0
].
getWidth
()
*
seqImages
[
0
].
getHeight
()
-
seqImages
[
0
].
getNbMaskedPixels
();
int
nbImages
=
seqImages
.
length
;
System
.
out
.
println
(
nbPixels
);
ConcurrentHashMap
<
Integer
,
ArrayList
<
Double
[]>>
values
=
new
ConcurrentHashMap
<
Integer
,
ArrayList
<
Double
[]>>();
/*
* We creates the data array. We need as many DataObject items as pixels
* on image. Each item will contain a tuple.
*/
this
.
setLocked
(
new
boolean
[
nbPixels
]);
this
.
currentView
=
new
ArrayList
<
DataObject
>(
nbPixels
);
this
.
indexInImage
=
new
long
[
nbPixels
];
int
xdim
=
seqImages
[
0
].
getWidth
(),
ydim
=
seqImages
[
0
].
getHeight
();
double
[][][]
buffer
;
double
[][][]
maskBuffer
;
ArrayList
<
Double
[]>
tmp
;
Double
[]
tmpTab
;
boolean
wasNull
;
for
(
int
i
=
0
;
i
<
seqImages
.
length
;
i
++)
{
System
.
out
.
println
(
"Image "
+
i
);
buffer
=
seqImages
[
i
].
load
(
0
,
0
,
seqImages
[
i
].
getWidth
(),
seqImages
[
i
].
getHeight
());
seqImages
[
i
]
=
null
;
maskBuffer
=
maskImages
[
i
].
load
(
0
,
0
,
maskImages
[
i
].
getWidth
(),
maskImages
[
i
].
getHeight
());
maskImages
[
i
]
=
null
;
System
.
gc
();
for
(
int
x
=
0
;
x
<
xdim
;
x
++)
{
for
(
int
y
=
0
;
y
<
ydim
;
y
++)
{
if
(!
globalMask
.
isMasked
(
x
+
y
*
ydim
))
{
if
(
maskBuffer
[
0
][
x
][
y
]
==
0
)
{
tmp
=
values
.
get
(
y
*
xdim
+
x
);
wasNull
=
(
tmp
==
null
);
if
(
wasNull
)
{
tmp
=
new
ArrayList
<
Double
[]>(
nbImages
/
2
);
}
tmpTab
=
new
Double
[
nbBands
];
for
(
int
b
=
0
;
b
<
nbBands
;
b
++)
{
tmpTab
[
b
]
=
buffer
[
b
][
x
][
y
];
}
tmp
.
add
(
tmpTab
);
if
(
i
==
40
)
tmp
.
trimToSize
();
if
(
wasNull
)
{
values
.
put
(
y
*
xdim
+
x
,
tmp
);
}
}