Commit 2c25660f authored by Hurstel's avatar Hurstel

Git init

parents
Project components details:
/Alpha
-> precompiled executable for Windows with a x86 architecture
/FIC_scripts
-> python scripts to help analyze the OscilloPic2's signal data output.
/libOscilloPic2
-> C++ library that provides functions to interact with the OscilloPic2 device. Optionnally
used by the program SeriesProtcol
/SeriesDoc
-> quick documenation about the programs
/SeriesGenerator
-> a program that is a Qt User Interface to generate 'Feel In Controls' series description files.
/SeriesProtocol
-> a program that interepret and runs 'Feel In Controls' series description files.
Make sure to check the SeriesDoc and each component's individual README.txt files
\ No newline at end of file
<config>
<curve_display>
<curve_view_width>5.0</curve_view_width>
<curve_view_min>-35.0</curve_view_min>
<curve_view_max>100.0</curve_view_max>
</curve_display>
<global_params>
<gl_update_delay>500</gl_update_delay>
<save_results>true</save_results>
<phantom_sampling_rate>1600</phantom_sampling_rate>
<auto_seed>true</auto_seed>
<save_path>data</save_path>
</global_params>
<device active='yes'>
<threshold>10</threshold>
<timeout>500</timeout>
<com_port>14</com_port>
<baudrate>115200</baudrate>
</device>
</config>
<signal_config>
<img_path>[default display]</img_path>
<default_color red="255" blue="0" green="0"/>
<size width="100" height="100"/>
<snd_path>[default sound]</snd_path>
</signal_config>
<config>
<curve_display>
<curve_view_width>5.0</curve_view_width>
<curve_view_min>-35.0</curve_view_min>
<curve_view_max>100.0</curve_view_max>
</curve_display>
<global_params>
<gl_update_delay>500</gl_update_delay>
<save_results>true</save_results>
<phantom_sampling_rate>1600</phantom_sampling_rate>
<auto_seed>true</auto_seed>
<save_path>data</save_path>
</global_params>
<device active='yes'>
<threshold>10</threshold>
<timeout>500</timeout>
<com_port>14</com_port>
<baudrate>115200</baudrate>
</device>
</config>
To use the 'SignalAnalyze' script you'll need python in its 2.7 version.
Check /install/README.txt to install the python2.7 dependencies before you can
use the scripts.
SignalAnalzye.py is for a GUI version.
SignalAnalyze_cmd.py is for a console version.
# python SignalAnalyze.py -h
'''
Created on 27 august 2014
@author: hurstel
'''
import sys
import os.path
from argparse import ArgumentParser
from argparse import RawDescriptionHelpFormatter
from gooey.gooey_decorator import Gooey
from dev.CSVFileLooker import CSVFileLooker
from dev.HapticAnalyzer import HapticAnalyzer
from dev.AuditiveAnalyzer import AuditiveAnalyzer
from dev.VisualAnalyzer import VisualAnalyzer
def writeReport(reportPath, sigConf, clusters):
print "Writing report"
path= reportPath
i=1
while ( os.path.isfile(path) ) :
i+=1
path_split= reportPath.split('.')
if len(path_split)>1:
path= ('.'.join(path_split[:-1]))+("_{0}".format(i))+'.'+path_split[-1:][0]
else:
path= reportPath+("_{0}".format(i))
reportFile= open(path, 'w')
print "---- writing at: "+path
l1= len(clusters[0])
l2= len(clusters[1])
if (l1 != l2) :
reportFile.write("# (W) The same amount of clusters hasn't been detected in both entries\n\n")
reportFile.write("signal ; cluster_detect_position ; time\n")
groupClust= [ (c,e) for e in [0,1] for c in clusters[e] ]
groupClust.sort()
print "fuu: %s" % str(groupClust)
lg= len(groupClust)
for i in range(0, lg):
if i<(lg-1) and groupClust[i][0]==groupClust[i+1][1]:
w_s= sigConf[0]+sigConf[1]
else :
w_s= str(sigConf[groupClust[i][1]])
p= groupClust[i][0]
reportFile.write("{0} ; {1} ; {2}\n".format(w_s, p, float(p)/4000.0))
reportFile.close()
@Gooey
def main(argv=None):
if (argv is None):
argv= sys.argv
else:
sys.argv.extend(argv)
parser= ArgumentParser(description="Test", formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("csvFilePath", help="path to the csv file to analyze")
parser.add_argument("signalConfig", help="signal configuration for both entries (ex: 'HV' for haptic on entry\
1 and visual on entry; 'AH' for auditive on entry 1 and haptic on entry 2)")
parser.add_argument("--report", dest="report", help="print a report file (give path)")
args= parser.parse_args()
looker= CSVFileLooker(args.csvFilePath)
looker.read()
sigConf= args.signalConfig[:2].upper()
print "\n\n\n"
print "---------"
print "This is a help script."
print "Its goal is to find quickly the signals occurence in the given sampled dataSet."
print "There are no warranty nor reliability for the results."
print "(especially regarding the auditive signal which is highly unstable)"
print "You'll probably want to (and should) double check manually the dataSet."
print "Feel free to edit and improve this script."
print "---------\n\n"
class SignalConfigError(Exception):
def __init__(self):
Exception.__init__(self)
def __str__(self):
return "The script's 'signalConfig' argument is invalid."
analyzers= [None,None]
if (len(sigConf)>=2):
data1= [v[0] for v in looker.data]
data2= [v[1] for v in looker.data]
if sigConf[0]=='A':
analyzers[0]= AuditiveAnalyzer(data1)
elif sigConf[0]=='V':
analyzers[0]= VisualAnalyzer(data1)
else:
analyzers[0]= HapticAnalyzer(data1)
if sigConf[1]=='H':
analyzers[1]= HapticAnalyzer(data2)
elif sigConf[1]=='V':
analyzers[1]= VisualAnalyzer(data2)
else:
analyzers[1]= AuditiveAnalyzer(data2)
foundClusters= [None, None]
for i in [0,1]:
analyzers[i].compute()
foundClusters[i]= analyzers[i].findCluster()
ev= [None,None]
for i in [0,1]:
print '%c signal:' % sigConf[i]
print '---- foundClusters: %s' % foundClusters[i]
v= []
for clust in foundClusters[i]:
v.append(analyzers[i].probableSignalClusterSignificantPos(clust))
print '---- signal occurences in dataset: %s' % v
print '---- signal times occurence in sampling: %s' % str([ float(p)/4000.0 for p in v])
ev[i]=v
if args.report is not None :
writeReport(args.report, sigConf, ev)
else :
raise SignalConfigError
return 0
if __name__ == "__main__":
sys.exit(main())
'''
Created on 27 august 2014
@author: hurstel
'''
import sys
import os.path
from argparse import ArgumentParser
from argparse import RawDescriptionHelpFormatter
from dev.CSVFileLooker import CSVFileLooker
from dev.HapticAnalyzer import HapticAnalyzer
from dev.AuditiveAnalyzer import AuditiveAnalyzer
from dev.VisualAnalyzer import VisualAnalyzer
def writeReport(reportPath, sigConf, clusters):
print "Writing report"
path= reportPath
i=1
while ( os.path.isfile(path) ) :
i+=1
path_split= reportPath.split('.')
if len(path_split)>1:
path= ('.'.join(path_split[:-1]))+("_{0}".format(i))+'.'+path_split[-1:][0]
else:
path= reportPath+("_{0}".format(i))
reportFile= open(path, 'w')
print "---- writing at: "+path
l1= len(clusters[0])
l2= len(clusters[1])
if (l1 != l2) :
reportFile.write("# (W) The same amount of clusters hasn't been detected in both entries\n\n")
reportFile.write("signal ; cluster_detect_position ; time\n")
groupClust= [ (c,e) for e in [0,1] for c in clusters[e] ]
groupClust.sort()
print "fuu: %s" % str(groupClust)
lg= len(groupClust)
for i in range(0, lg):
if i<(lg-1) and groupClust[i][0]==groupClust[i+1][1]:
w_s= sigConf[0]+sigConf[1]
else :
w_s= str(sigConf[groupClust[i][1]])
p= groupClust[i][0]
reportFile.write("{0} ; {1} ; {2}\n".format(w_s, p, float(p)/4000.0))
reportFile.close()
def main(argv=None):
if (argv is None):
argv= sys.argv
else:
sys.argv.extend(argv)
parser= ArgumentParser(description="Test", formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("csvFilePath", help="path to the csv file to analyze")
parser.add_argument("signalConfig", help="signal configuration for both entries (ex: 'HV' for haptic on entry\
1 and visual on entry; 'AH' for auditive on entry 1 and haptic on entry 2)")
parser.add_argument("--report", dest="report", help="print a report file (give path)")
args= parser.parse_args()
looker= CSVFileLooker(args.csvFilePath)
looker.read()
sigConf= args.signalConfig[:2].upper()
print "\n\n\n"
print "---------"
print "This is a help script."
print "Its goal is to find quickly the signals occurence in the given sampled dataSet."
print "There are no warranty nor reliability for the results."
print "(especially regarding the auditive signal which is highly unstable)"
print "You'll probably want to (and should) double check manually the dataSet."
print "Feel free to edit and improve this script."
print "---------\n\n"
class SignalConfigError(Exception):
def __init__(self):
Exception.__init__(self)
def __str__(self):
return "The script's 'signalConfig' argument is invalid."
analyzers= [None,None]
if (len(sigConf)>=2):
data1= [v[0] for v in looker.data]
data2= [v[1] for v in looker.data]
if sigConf[0]=='A':
analyzers[0]= AuditiveAnalyzer(data1)
elif sigConf[0]=='V':
analyzers[0]= VisualAnalyzer(data1)
else:
analyzers[0]= HapticAnalyzer(data1)
if sigConf[1]=='H':
analyzers[1]= HapticAnalyzer(data2)
elif sigConf[1]=='V':
analyzers[1]= VisualAnalyzer(data2)
else:
analyzers[1]= AuditiveAnalyzer(data2)
foundClusters= [None, None]
for i in [0,1]:
analyzers[i].compute()
foundClusters[i]= analyzers[i].findCluster()
ev= [None,None]
for i in [0,1]:
print '%c signal:' % sigConf[i]
print '---- foundClusters: %s' % foundClusters[i]
v= []
for clust in foundClusters[i]:
v.append(analyzers[i].probableSignalClusterSignificantPos(clust))
print '---- signal occurences in dataset: %s' % v
print '---- signal times occurence in sampling: %s' % str([ float(p)/4000.0 for p in v])
ev[i]=v
if args.report is not None :
writeReport(args.report, sigConf, ev)
else :
raise SignalConfigError
return 0
if __name__ == "__main__":
sys.exit(main())
This diff is collapsed.
'''
Created on 27 august 2014
@author: hurstel
'''
import re
class CSVFileLooker:
"""
A class which goal is to parse and store the sampled
saved into a given file so that it can be be accessed
later for analyzing purpose.
"""
class Error:
"""
Container class for
CVSFileLooker's method's errors
"""
class OpenFailError(Exception):
"""
Error emitted if the file containing the sampled
can't be read
"""
def __init__(self, filePath):
"""
Constructor - do not call directly
"""
self._filePath= filePath
def __str__(self):
"""
str method overload - do not call directly, use str()
"""
return "The registered file ({0}) hasn't been open properly".format(self._filePath)
class ErrorLineConstruct(Exception):
"""
Error emitted if a line of the data containing file isn't
correctly formated leading to a read impossibility
"""
def __init__(self, filePath, lineNumber):
"""
Constructor - do not call directly
"""
self._filePath= filePath
self._lineNumber= lineNumber
def __str__(self):
"""
str method overload - do not call directly, use str()
"""
return "It appears that the read file ({0}) isn't well constructed\
(error at line {1})".format(self._filePath,self._lineNumber)
def __init__(self, filePath, separator=' '):
"""
Constructor - do not call directly
"""
self._filePath= filePath
"""private attribute - do not call directly
the path of the data containing file to read"""
self._separator=separator
"""private attribute - do not call directly
line separator for the data into the data file lines"""
self.data= []
"""public attribute
stores the read data under the form
[v1,v2] where v1 is the sampled data from the first signal entry
and v2 the sampled data from the second signal entry.
The size of this attribute will be the number of 'sampling' recorded which should be
(sampling_duration)*(sampling_frequency)
(with sampling duration in seconds (s), and
sampling_frequency= 4000(Hz), base and OscilloPic2 specs)"""
def _whatIsSeparator(self, line):
"""
private method - do not call directly
determines the single character data separator in
a given line
returns the detected separator (char), or None in
line in invalid format.
"""
rx= r'([0-9]+)(.)([0-9]+)(.)([0-9]+)'
m= re.match(rx,line)
if (m is None) or (m.lastindex != 5) or (m.group(2)!= m.group(4)) :
return None
else:
return m.group(2)
def _extractLineData(self, line, separator):
"""
private method - do not call directly
given a line and separator, extract the line's
data.
returns a size 3 tuple (a,b,c) with:
a: the occurrence index in the sampling
b: the data value sampled on first (signal) entry
c: the data value sampled on second (signal) entry
or returns None if the line is formated correctly or
no data could be extracted
"""
lineDataRaw= line.split(separator)
if len(lineDataRaw)<3 :
return None
try:
lineData= (int(lineDataRaw[0]),int(lineDataRaw[1]), int(lineDataRaw[2]))
except ValueError:
lineData= None
return lineData
def read(self):
"""
public method
open the data sampled file associated to the current instance of this object
and stores the extracted data into the public attribute 'data'
Raises Error.OpenFailError exception if the file couldn't be read
Raises Error.ErrorLineConstruct exception if for a certain line,
the data extraction fails
"""
try:
self._file= open(self._filePath, 'r')
except IOError as e:
print 'Error occurred while opening file - IOError : (%s) {%s}' % (e.errno, e.strerror)
raise CSVFileLooker.Error.OpenFailError(self._filePath)
print 'Reading file: %s' % self._filePath
lineNumber= 0
firstLine= self._file.readline()
s= self._whatIsSeparator(firstLine)
#if no separator could be extracted from first line,
#the file is badly constructed, raise error
if (s is None) :
raise CSVFileLooker.Error.ErrorLineConstruct(self._filePath, lineNumber)
lineData= self._extractLineData(firstLine, s)
#if no data could be extracted from first line,
#the file is badly constructed, raise error
if (lineData is None) or (lineData[0]!=lineNumber) :
raise CSVFileLooker.Error.ErrorLineConstruct(self._filePath, lineNumber)
self.data.append((lineData[1],lineData[2]))
for line in self._file:
if ( len(line.split())==0 ) :
break
lineNumber+=1
lineData= self._extractLineData(line, s)
#if no data could be extracted from current line, and
#the current counted line number doesn't match the sampling occurrence,
#the file is badly constructed, raise error
if (lineData is None) or (lineData[0]!=lineNumber) is None :
raise CSVFileLooker.Error.ErrorLineConstruct(self._filePath, lineNumber)
self.data.append((lineData[1],lineData[2]))
print '---- file is read'
self._file.close()
print '---- file closed'
\ No newline at end of file
'''
Created on 28 august 2014
@author: hurstel
'''
import bisect
import math
class DataAnalyzer:
"""
Abstract class.
This class is defined to be a 'skeleton' for
analyzing classes depending on the signal type to
process.
But it already provides basic common function
(such as 'compute()' that compute generic values
like 'average', 'standard deviation', etc.) for the given
sampled data
"""
class Error:
"""
Container class for
DataAnalyzer's method's errors
"""
class CallFindButNoCompute(Exception):
"""