Files
scandocs/uni/masterarbeit/source/simulations/moversight/prepareAndExtractStatsData.py
2014-06-30 13:58:10 +02:00

282 lines
11 KiB
Python
Executable File

#! /opt/python-2.7/bin/python
import sys
import os
import subprocess
#SIM_PATH="/home/jgaebler/ubeeme/source/simulations"
SIM_PATH="/home/gschneid/ubeeme/simulations"
RESULT_FOLDER="results"
SIM_RESULT_PATH=SIM_PATH+"/"+RESULT_FOLDER
scaveCommand = "scavetool"
scaveOptionListUniqueStatNames="l -n "
RESULT_FILE_EXT_VECTOR="vec"
RESULT_FILE_EXT_SCALAR="sca"
RESULT_FILE_EXT_INDEX="vci"
GROUP_SIZE_VARIABLE = "groupSize"
def usage():
print """Usage: %s <omnet-configuration-name> <run-number>\n
Prepares collected moversight statistic data for gnuplot. The script prepares and extract data from omnet vector and scalar result files.
""" % (os.path.basename(sys.argv[0]))
def main():
"""the main routine, manages the statistic data processing"""
if len(sys.argv) != 3:
usage()
sys.exit(1)
# sys.argv[0] script name
# sys.argv[1] omnet-configuration-name
configName=sys.argv[1]
# sys.argv[2] run-number
runNumber=sys.argv[2]
#prepare vector results
vectorResultFile=buildAnalyseSourceFilePath(configName, runNumber, RESULT_FILE_EXT_VECTOR)
if os.path.exists( vectorResultFile ):
resultListVector=extractAttributes(configName, runNumber, RESULT_FILE_EXT_VECTOR)
for elem in resultListVector:
print elem
if elem == GROUP_SIZE_VARIABLE :
extractGroupSizeStatisticDataFromStatisticSource(configName, runNumber)
else:
extractVectorStatisticDataFromStatisticSource(elem, configName, runNumber)
recordedNodesPerVariable=countRecordedNodesPerVariable(elem, configName, runNumber)
plotVectorData(elem, configName, runNumber, recordedNodesPerVariable)
plotVectorDataWithGroupSize(elem, configName, runNumber, recordedNodesPerVariable)
else:
print "no data found to extract - "+vectorResultFile+" does not exist"
#prepare scalar results
#scalarResultFile=buildAnalyseSourceFilePath(configName,runNumber,RESULT_FILE_EXT_SCALAR)
#if (os.path.exists( scalarResultFile ) :
# resultListVector=extractAttributes(configName, runNumber, RESULT_FILE_EXT_VECTOR)
def countRecordedNodesPerVariable(variable, configName, runNumber):
sourceFilePath = buildAnalyseSourceFilePath(configName, runNumber, RESULT_FILE_EXT_INDEX)
#execute
p1 = subprocess.Popen(["cat "+ sourceFilePath], shell=True, stdout=subprocess.PIPE)
p2 = subprocess.Popen(["grep", "-c", variable], stdin=p1.stdout, stdout=subprocess.PIPE)
p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits.
#retrieve scave output
std_output_value = p2.communicate()[0]
resultList = stringToList(std_output_value)
if len(resultList) != 1:
raise Exception("unable to count recorded nodes per variable")
return int(resultList[0])
def gnuplot(handle, string):
"""writes the given command to a gnuplot handle"""
handle.write(string+";\n");
def plotVectorData(variable, configName, runNumber, recordedNodesPerVariable):
"""plots the extracted vector data via gnuplot"""
resultFile = buildAnalyseResultFilePath(configName, runNumber, variable)
sFile = resultFile+"."+RESULT_FILE_EXT_VECTOR
print "process: "+sFile
gp = os.popen('/usr/bin/gnuplot','w')
gnuplot(gp, 'set title "'+configName+' run: '+runNumber+' variable: '+variable+'"')
gnuplot(gp, 'set xlabel "time [s]"')
gnuplot(gp, 'set ylabel "delay [s]"')
gnuplot(gp, 'set key left top')
#plot file beginne ab zeile 6 plotte 3 gegen 4
gnuplot(gp, 'plot "'+sFile+'" every ::'+str((recordedNodesPerVariable+1))+' using 3:4 with lines title "delay"')
#export settings png
gnuplot(gp, 'set terminal push')
gnuplot(gp, 'set terminal png');
gnuplot(gp, 'set output "'+SIM_RESULT_PATH+"/"+configName+'-'+runNumber+'-'+variable+'.png"')
#export settings eps
gnuplot(gp, 'replot')
gnuplot(gp, 'set terminal push')
gnuplot(gp, 'set term postscript eps enhanced')
gnuplot(gp, 'set output "'+SIM_RESULT_PATH+"/"+configName+'-'+runNumber+'-'+variable+'.eps"')
#repeat the most recent plot command, the output now going to the specified file
gnuplot(gp, 'replot')
gnuplot(gp, 'set output')
gnuplot(gp, 'set terminal pop')
gnuplot(gp, 'unset output')
gp.close()
def plotVectorDataWithGroupSize(variable, configName, runNumber, recordedNodesPerVariable):
"""plots the extracted vector data via gnuplot compared to a group size graph"""
#check, if no groupSize var given
if(variable == GROUP_SIZE_VARIABLE): return
resultFile = buildAnalyseResultFilePath(configName, runNumber, variable)
sFile = resultFile+"."+RESULT_FILE_EXT_VECTOR
groupSizeResultFile = buildAnalyseResultFilePath(configName, runNumber, GROUP_SIZE_VARIABLE)
groupSFile=determineMaxGroupSizeNodeResultFileName(configName, runNumber)
if(groupSFile == "FILE_NAME_NOT_SET"):
print "unable to extract data, becaus resultfile not found"
return
print "process: "+sFile
print " with : "+groupSFile
gp = os.popen('/usr/bin/gnuplot','w')
gnuplot(gp, 'set title "'+configName+' run: '+runNumber+' variable: '+variable+' vs. '+GROUP_SIZE_VARIABLE+'"')
gnuplot(gp, 'set xlabel "time [s]"')
gnuplot(gp, 'set ylabel "delay [s]"')
gnuplot(gp, 'set y2range [0:]')
gnuplot(gp, 'set ytics nomirror')
gnuplot(gp, 'set y2label "size"')
gnuplot(gp, 'set y2tics 0, 2')
gnuplot(gp, 'set key left top')
gnuplot(gp, 'plot "'+groupSFile+'" every ::'+str((recordedNodesPerVariable))+' using 3:4 with lines title "'+GROUP_SIZE_VARIABLE+'" axis x1y2, "'+sFile +'" every ::'+str((recordedNodesPerVariable+1))+' using 3:4 with lines title "'+variable+'" axis x1y1')
#export settings png
gnuplot(gp, 'set terminal push')
gnuplot(gp, 'set terminal png');
gnuplot(gp, 'set output "'+SIM_RESULT_PATH+"/"+configName+'-'+runNumber+'-'+variable+'-with-'+GROUP_SIZE_VARIABLE+'.png"')
#export settings eps
gnuplot(gp, 'replot')
gnuplot(gp, 'set terminal push')
gnuplot(gp, 'set term postscript eps enhanced')
gnuplot(gp, 'set output "'+SIM_RESULT_PATH+"/"+configName+'-'+runNumber+'-'+variable+'-with-'+GROUP_SIZE_VARIABLE+'.eps"')
#repeat the most recent plot command, the output now going to the specified fiel
gnuplot(gp, 'replot')
gnuplot(gp, 'set output')
gnuplot(gp, 'set terminal pop')
gnuplot(gp, 'unset output')
gp.close()
def determineMaxGroupSizeNodeResultFileName(configName, runNumber):
"""determines the index of the node, which have collected the most groupSize data"""
#execute
searchPattern = buildAnalyseGroupSizeResultFilePath(configName, runNumber, "*")+"."+RESULT_FILE_EXT_VECTOR
searchCommand = "ls "+searchPattern
proc = subprocess.Popen([searchCommand], shell=True, stdout=subprocess.PIPE)
#retrieve the output, transform in into a list and remove empty lines
std_output_value = proc.communicate()[0]
resultList=stringToList(std_output_value)
count = 0
fileName = "FILE_NAME_NOT_SET"
for elem in resultList:
countCommand = "wc -l "+elem
p2 = subprocess.Popen([countCommand], shell=True, stdout=subprocess.PIPE)
output_value = p2.communicate()[0]
tCount, found, tail = output_value.partition(" ")
if found :
if count < int(tCount):
fileName = elem
count = tCount
return fileName
def extractGroupSizeStatisticDataFromStatisticSource(configName, runNumber):
"""the method extracts the group size, recorded add the different nodes."""
sourceFile=buildAnalyseSourceFilePath(configName, runNumber, RESULT_FILE_EXT_VECTOR)
#determine the number of nodes, recording group size data within the last run
#the nodes will be determined by its omnet path name
runningNodesCommand="scavetool l -m "+sourceFile
proc = subprocess.Popen([runningNodesCommand], shell=True, stdout=subprocess.PIPE)
resultList = stringToList(proc.communicate()[0])
if len(resultList) <= 0:
raise Exception("unable to extract running nodes ")
##process the group size for the retrieved omnet modules
for elem in resultList:
index=elem[elem.index('[')+1:elem.rindex(']')]
outputFile=buildAnalyseGroupSizeResultFilePath(configName, runNumber, index)
scaveCommand ="scavetool v -p \"module("+elem+") AND "+GROUP_SIZE_VARIABLE + "\" -O "+outputFile+" "+sourceFile
p1 = subprocess.call([scaveCommand], shell=True, stdout=subprocess.PIPE)
def extractVectorStatisticDataFromStatisticSource(variable, configName, runNumber):
"""extracts the statistic data per variable from the vector source file and put it in an output vecotr file"""
resultFile = buildAnalyseResultFilePath(configName, runNumber, variable)
sourceFile = buildAnalyseSourceFilePath(configName, runNumber, RESULT_FILE_EXT_VECTOR)
scaveOptionsExtractVectorStats="v -p 'name( "+variable+" )' -O "+resultFile+" "+sourceFile
scave=scaveCommand + " " + scaveOptionsExtractVectorStats
#execute
proc = subprocess.Popen([scave], shell=True, stdout=subprocess.PIPE)
proc.wait()
syncDisk()
def buildAnalyseGroupSizeResultFilePath(configName, runNumber, nodeIndex):
""" the path to the result file of the group size. the created file path is without a file extention, because scave append it automatically"""
return buildAnalyseResultFilePath(configName, runNumber, GROUP_SIZE_VARIABLE)+"-node-"+nodeIndex
def buildAnalyseResultFilePath(configName, runNumber, variable):
""" creates the path including filename to the result file for the given parameter used as sink for the analysing. the created path is without a file extention, because scave append it automatically"""
return SIM_RESULT_PATH +"/"+configName+"-"+runNumber+"-"+variable
def buildAnalyseSourceFilePath(configName, runNumber, resultFileExt):
"""creates the path including filename to the result file for the given parameter used as source for the analysing"""
return SIM_RESULT_PATH +"/"+configName+"-"+runNumber+"."+resultFileExt
def extractAttributes(configName, runNumber, resultFileExt):
"""extract the attributes present within the given omnet statistic resultfile\n note: today it works only for vector files, not scalar files"""
#check input
if (resultFileExt != RESULT_FILE_EXT_VECTOR) and (resultFileExt != RESULT_FILE_EXT_SCALAR):
raise AttributeError("The given result file extension '"+resultFileExt+"' is not suppported")
#build the scave command string
scave = scaveCommand + " " + scaveOptionListUniqueStatNames +" "+ buildAnalyseSourceFilePath(configName,runNumber,resultFileExt)
#execute
proc = subprocess.Popen([scave], shell=True, stdout=subprocess.PIPE)
#retrieve scave output
std_output_value = proc.communicate()[0]
syncDisk()
#prepare result list
if resultFileExt == RESULT_FILE_EXT_VECTOR:
resultList=stringToList(std_output_value)
return resultList
else:
print "postprocessing of "+resultFileExt+" files not supported"
return std_out_value
def stringToList(inputString):
"""transforms a string into a list. as element seperation is \\n used"""
resultList=inputString.split("\n")
if resultList[len(resultList)-1] == '':
del resultList[len(resultList)-1]
return resultList
def syncDisk():
"""cals the sync command"""
command ="sync"
#execute
p = subprocess.Popen([command],shell=True, stdout=subprocess.PIPE)
p.wait()
if __name__=="__main__":
main()