Trackmate help with scripting edge displacement output

Hi @tinevez,

I am having a hard time trying to figure out how to export data from TrackMate via Jython scripting. I need to process hundreds - thousands of videos, so automation is key :slight_smile:

What I want to export, to the best ability, is:

  • edge id of some sort
  • track it belongs to (track id)
  • that track’s median quality
  • that tracks median speed
  • quality of spots of that edge
  • spot ids in that edge
  • edge displacement (this is the main metric of interest)
  • frame, or at least, the frames of the source and target spots

What I intend to recreate is something similar to the Edge displacement vs Time (frame) plot, but would like to further process the edges based on track and spot qualities, etc.

I would prefer if there was a way to save this somehow as a table to a .CSV, but as long as it can even output the data to the logger, I can deal with that (this is my current strategy).

Here is the code I have so far, however I’ve only managed to output edges as pairs of spot IDs, as a one-line string that I do not know how to extract edge displacement from.

from fiji.plugin.trackmate import Model
from fiji.plugin.trackmate import Settings
from fiji.plugin.trackmate import TrackMate
from fiji.plugin.trackmate import SelectionModel
from fiji.plugin.trackmate import Logger
from fiji.plugin.trackmate.detection import DogDetectorFactory
import fiji.plugin.trackmate.tracking.sparselap.SparseLAPTrackerFactory as SparseLAPTrackerFactory
from fiji.plugin.trackmate.tracking import LAPUtils
import fiji.plugin.trackmate.visualization.hyperstack.HyperStackDisplayer as HyperStackDisplayer
import fiji.plugin.trackmate.features.FeatureFilter as FeatureFilter
import sys
import fiji.plugin.trackmate.features.track.TrackSpotQualityFeatureAnalyzer as TrackSpotQualityFeatureAnalyzer

from ij import IJ, ImagePlus, ImageStack
import fiji.plugin.trackmate.detection.DetectorKeys as DetectorKeys
import fiji.plugin.trackmate.features.FeatureAnalyzer as FeatureAnalyzer
import fiji.plugin.trackmate.action.ExportStatsToIJAction as ExportStatsToIJAction
import fiji.plugin.trackmate.features.ModelFeatureUpdater as ModelFeatureUpdater
import fiji.plugin.trackmate.features.SpotFeatureCalculator as SpotFeatureCalculator
import fiji.plugin.trackmate.features.track.TrackSpeedStatisticsAnalyzer as TrackSpeedStatisticsAnalyzer
import fiji.plugin.trackmate.features.track.TrackDurationAnalyzer as TrackDurationAnalyzer
import fiji.plugin.trackmate.util.TMUtils as TMUtils

import fiji.plugin.trackmate.features.edges.EdgeTargetAnalyzer as EdgeTargetAnalyzer
import fiji.plugin.trackmate.features.edges.EdgeVelocityAnalyzer as EdgeVelocityAnalyzer
import fiji.plugin.trackmate.features.edges.EdgeTimeLocationAnalyzer as EdgeTimeLocationAnalyzer

# Open image
mainDir = "C:\\Users\\Julia\\Desktop\\Mac Documents\\Lab Stuff\\Lenti\\171006 R Phase beat tracking\\"
openDir = "videos\\"
imageTitle = "170928 1d 5kpa plate 1 well16.tif"
imp = IJ.openImage(mainDir + openDir + imageTitle)

imp.show()

#----------------------------
# Create the model object now
#----------------------------
    
model = Model()

# Send all messages to ImageJ log window.
model.setLogger(Logger.IJ_LOGGER)


   
#------------------------
# Prepare settings object
#------------------------
   
settings = Settings()
settings.setFrom(imp)
   
# Configure detector - We use the Strings for the keys
settings.detectorFactory = DogDetectorFactory()
settings.detectorSettings = { 
    'DO_SUBPIXEL_LOCALIZATION' : False,
    'RADIUS' : 0.1,
    'THRESHOLD' : 0.15,
    'DO_MEDIAN_FILTERING' : True,
}  

# Configure spot filters - Classical filter on quality
filter1 = FeatureFilter('QUALITY', 0.37, True)
settings.addSpotFilter(filter1)
 
# Configure tracker
# Note: Sparse is the name for Simple LAP tracker
settings.trackerFactory = SparseLAPTrackerFactory()
settings.trackerSettings = LAPUtils.getDefaultLAPSettingsMap() #this sets tens of madatory settings

settings.trackerSettings['ALLOW_TRACK_SPLITTING'] = False
settings.trackerSettings['ALLOW_TRACK_MERGING'] = False
settings.trackerSettings['LINKING_MAX_DISTANCE'] = 0.1
settings.trackerSettings['GAP_CLOSING_MAX_DISTANCE'] = 0.2
settings.trackerSettings['MAX_FRAME_GAP'] = 2


# Add track quality analyzer to be able to filer on track quality

settings.addTrackAnalyzer(TrackSpotQualityFeatureAnalyzer())

# filter tracks on quality

filter2 = FeatureFilter('MEDIAN_QUALITY', 0.5, True)
settings.addTrackFilter(filter2)

# add more track and edge analyzers for later output

settings.addTrackAnalyzer(TrackSpeedStatisticsAnalyzer())
settings.addTrackAnalyzer(TrackDurationAnalyzer())
settings.addEdgeAnalyzer(EdgeTargetAnalyzer())
settings.addEdgeAnalyzer(EdgeVelocityAnalyzer())
settings.addEdgeAnalyzer(EdgeTimeLocationAnalyzer())

#-------------------
# Instantiate plugin
#-------------------

trackmate = TrackMate(model, settings)
   
#--------
# Process
#--------

ok = trackmate.checkInput()
if not ok:
    sys.exit(str(trackmate.getErrorMessage()))

ok = trackmate.process()
if not ok:
    sys.exit(str(trackmate.getErrorMessage()))

   
#----------------
# Display results
#----------------
model.getLogger().log('Found ' + str(model.getTrackModel().nTracks(True)) + ' tracks.')

selectionModel = SelectionModel(model)
displayer =  HyperStackDisplayer(model, selectionModel, imp)
displayer.render()
displayer.refresh()
   
# The feature model, that stores edge and track features.
fm = model.getFeatureModel()
   
for id in model.getTrackModel().trackIDs(True):
   
    # Fetch the track feature from the feature model.
    v = fm.getTrackFeature(id, 'TRACK_MEDIAN_SPEED')
    q = fm.getTrackFeature(id, 'TRACK_MEDIAN_QUALITY')
    model.getLogger().log('')
    model.getLogger().log('Track ' + str(id) + ': median speed = ' + str(v) + ' ' + model.getSpaceUnits() + '/' + 
model.getTimeUnits())
    model.getLogger().log('Track ' + str(id) + ': median quality = ' + str(q) )

    # output the spots that make up the edges of the track
    edgs = model.getTrackModel().trackEdges(id)
    model.getLogger().log('Track ' + str(id) + ': edges = ' + str(edgs))

    #d = fm.getEdgeFeature (edgs, 'DISPLACEMENT')
    #model.getLogger().log('Track ' + str(id) + ': edges displacement = ' + str(d))
    # these are not working at all, I don't know how to get an edge id of some sort

    # output all spots in each track
    track = model.getTrackModel().trackSpots(id)
    for spot in track:
        sid = spot.ID()
        # Fetch spot features directly from spot. 
        x=spot.getFeature('POSITION_X')
        y=spot.getFeature('POSITION_Y')
        t=spot.getFeature('FRAME')
        q=spot.getFeature('QUALITY')
        model.getLogger().log('\tspot ID = ' + str(sid) + ': x='+str(x)+', y='+str(y)+', t='+str(t)+', q='+str(q))

# Echo results with the logger we set at start:
model.getLogger().log(str(model))

# save to txt file
IJ.saveAs("Text", mainDir + "tracking\\test1.csv") 

IJ.selectWindow("Log")
IJ.run("Close")

IJ.selectWindow(imageTitle)
IJ.run("Close")

Sorry, I just don’t understand how to access edges and then extract data that’s been calculated for them.

Thanks a lot for any help/suggestions!

1 Like

Hi @l-jaye

Feature values for edges are accessed via the FeatureModel (unlike Spot, that directly contains its feature values).

So would do something like that:

		// Get the top model. I am actually engaged to (another) one.
		final Model model = trackmate.getModel();

		// Get the feature model, where feature values of edges and tracks are
		// stored.
		final FeatureModel featureModel = model.getFeatureModel();

		// Get the track model
		final TrackModel trackModel = model.getTrackModel();

		// This method returns the IDs of all visible tracks.
		final Set< Integer > trackIDs = trackModel.trackIDs( true );
		for ( final Integer trackId : trackIDs )
		{
			System.out.println( "For track with id = " + trackId + ", named " + trackModel.name( trackId ) );

			// This method return all the edges of the track with specified ID.
			// They are unsorted, though.
			final Set< DefaultWeightedEdge > edges = trackModel.trackEdges( trackId );
			for ( final DefaultWeightedEdge edge : edges )
			{
				// Feature values for edges are accessed like this. You need to
				// know the String name of the feature you want to access.
				final double velocity = featureModel.getEdgeFeature( edge, EdgeVelocityAnalyzer.VELOCITY );
				System.out.println( " - edge " + edge + " v = " + velocity );
			}
		}
		// Et voila !
2 Likes

Hello @tinevez,

Thanks a lot for your help!

The critical information here was that we can extract “edge” object from the the list of edges.

I doubt this is the most elegant way to deal with output, but I’ve essentially constructed a comma-separated table in the logger with all the declared variables I need (metadata, edge displacement values, etc), then save the logger to test as a .csv, and from there, life is easy :slight_smile:

This is your solution, adapted to parameters I am exporting, implemented in Python fiji macro:

# import functions from the plugin
from ij import IJ, ImagePlus, ImageStack
from fiji.plugin.trackmate import Model
from fiji.plugin.trackmate import Settings
from fiji.plugin.trackmate import TrackMate
from fiji.plugin.trackmate import SelectionModel
from fiji.plugin.trackmate import Logger
from fiji.plugin.trackmate.detection import DogDetectorFactory
import fiji.plugin.trackmate.tracking.sparselap.SparseLAPTrackerFactory as SparseLAPTrackerFactory
from fiji.plugin.trackmate.tracking import LAPUtils
import fiji.plugin.trackmate.visualization.hyperstack.HyperStackDisplayer as HyperStackDisplayer
import fiji.plugin.trackmate.features.FeatureFilter as FeatureFilter
import fiji.plugin.trackmate.features.track.TrackSpotQualityFeatureAnalyzer as TrackSpotQualityFeatureAnalyzer
import fiji.plugin.trackmate.features.track.TrackSpeedStatisticsAnalyzer as TrackSpeedStatisticsAnalyzer
import fiji.plugin.trackmate.features.track.TrackDurationAnalyzer as TrackDurationAnalyzer
import fiji.plugin.trackmate.features.edges.EdgeTargetAnalyzer as EdgeTargetAnalyzer
import fiji.plugin.trackmate.features.edges.EdgeVelocityAnalyzer as EdgeVelocityAnalyzer
import fiji.plugin.trackmate.features.edges.EdgeTimeLocationAnalyzer as EdgeTimeLocationAnalyzer


# Open image
mainDir = "C:\\fakeImageDir\\"
openDir = "fakeRawImageDir\\"
# Note: a loop here will be constructed to loop through imageTitles and run the analysis on each image
imageTitle = "fakeImageTitle.tif"
imp = IJ.openImage(mainDir + openDir + imageTitle)
# convert z to frame somewhere here (IJ.run (""))
imp.show()

#----------------------------
# Create the model object now
#----------------------------

# Some of the parameters we configure below need to have a reference to the model at creation. So we create an empty model now.

model = Model()

# Send all messages to ImageJ log window, which will display results in a comma separated format, to be later saved as .csv
model.setLogger(Logger.IJ_LOGGER)


#------------------------
# Prepare settings object
#------------------------
   
settings = Settings()
settings.setFrom(imp) # this gets image settings, such as frames, pixels, etc from the stack
   
# Configure detector - We use the Strings for the keys
settings.detectorFactory = DogDetectorFactory() # still not sure if DoG or LoG is better?
settings.detectorSettings = { 
    'DO_SUBPIXEL_LOCALIZATION' : False,
    'RADIUS' : 0.1, # these definitely need tinkering with, but should be consistent for all analyzed samples
    'THRESHOLD' : 0.15,
    'DO_MEDIAN_FILTERING' : True,
}  

# Configure spot filters - Classical filter on quality
filter1 = FeatureFilter('QUALITY', 0.37, True) # also can be tinkered with, but can be post-processed in R, as spot quality is exported
settings.addSpotFilter(filter1)
 
# Configure tracker
# Note: Sparse is the name for Simple LAP tracker
settings.trackerFactory = SparseLAPTrackerFactory()
settings.trackerSettings = LAPUtils.getDefaultLAPSettingsMap() #this sets tens of madatory settings

settings.trackerSettings['ALLOW_TRACK_SPLITTING'] = False
settings.trackerSettings['ALLOW_TRACK_MERGING'] = False
settings.trackerSettings['LINKING_MAX_DISTANCE'] = 0.1 # these can also be tinkered wih 
settings.trackerSettings['GAP_CLOSING_MAX_DISTANCE'] = 0.2
settings.trackerSettings['MAX_FRAME_GAP'] = 2


# Add track quality analyzer to be able to filer on track quality

settings.addTrackAnalyzer(TrackSpotQualityFeatureAnalyzer())

# filter tracks on quality

filter2 = FeatureFilter('MEDIAN_QUALITY', 0.5, True)
settings.addTrackFilter(filter2)

# add more track and edge analyzers for various calculations

settings.addTrackAnalyzer(TrackSpeedStatisticsAnalyzer())
settings.addTrackAnalyzer(TrackDurationAnalyzer())
settings.addEdgeAnalyzer(EdgeTargetAnalyzer())
settings.addEdgeAnalyzer(EdgeVelocityAnalyzer())
settings.addEdgeAnalyzer(EdgeTimeLocationAnalyzer())

#-------------------
# Instantiate plugin
#-------------------

trackmate = TrackMate(model, settings)
   
#--------
# Process
#--------

ok = trackmate.checkInput()
if not ok:
    sys.exit(str(trackmate.getErrorMessage()))

ok = trackmate.process()
if not ok:
    sys.exit(str(trackmate.getErrorMessage()))

   
#----------------
# Display results
#----------------
model.getLogger().log('Found ' + str(model.getTrackModel().nTracks(True)) + ' tracks.')

selectionModel = SelectionModel(model)
displayer =  HyperStackDisplayer(model, selectionModel, imp)
displayer.render()
displayer.refresh()

# Echo results with the logger we set at start:
model.getLogger().log(str(model))

# Extract the feature and track models, that stores edge and track features (metadata and calculated parameters).
fm = model.getFeatureModel()
tm = model.getTrackModel()

spaceUnit = model.getSpaceUnits() # extract the distance units
timeUnit = model.getTimeUnits() # extract the time units

model.getLogger().log('') # adds an empty line to the logger
model.getLogger().log('track.id, track.mean.speed ('+spaceUnit+'/'+timeUnit+'), track.quality, edge.displacement ('+ spaceUnit +'), edge.time ('+timeUnit+'), start.spot.id, end.spot.id, start.frame, end.frame, start.spot.quality, end.spot.quality')
# the above line in the logger will become table column headings. Commas are important to delimit the table properly in the final .csv

for id in model.getTrackModel().trackIDs(True): # go by tracks

    # Fetch the track feature from the feature model.
    v = fm.getTrackFeature(id, 'TRACK_MEDIAN_SPEED')
    q = fm.getTrackFeature(id, 'TRACK_MEDIAN_QUALITY')

    edges = tm.trackEdges(id) # extract all edges from each track
    for edge in edges: # go by edges in each track
	    d = fm.getEdgeFeature (edge, EdgeVelocityAnalyzer.DISPLACEMENT )
	    t = fm.getEdgeFeature (edge, EdgeTimeLocationAnalyzer.TIME ) # I believe this is the calculated time (frame), the mean of start and end spot frames
	    startSpot = tm.getEdgeSource (edge)
	    endSpot = tm.getEdgeTarget (edge)
	
	    startID = startSpot.ID()
	    endID = endSpot.ID()

	    startFrame = startSpot.getFeature ('FRAME')
	    endFrame = endSpot.getFeature ('FRAME')

	    startQ = startSpot.getFeature ('QUALITY')
	    endQ = endSpot.getFeature ('QUALITY')
	
	    # output all declared variables to the logger (order to coincide with table headings). The commas are important to properly save as .csv
	    model.getLogger().log(str (id) +
		    ',' + str (v) +
		    ',' + str (q) +
		    ',' + str(d) + 
		    ',' + str(t) + 
		    ',' + str(startID) + 
		    ',' + str(endID) + 
		    ',' + str(startFrame) +
		    ',' + str (endFrame) + 
		    ',' + str (startQ) + 
		    ',' + str(endQ))


# save to .csv file
IJ.saveAs("Text", mainDir + "tracking\\test1.csv") # note, the file name will reflect the imageTitle, or some loop identifier

IJ.selectWindow("Log")
IJ.run("Close")

IJ.selectWindow(imageTitle)
IJ.run("Close")
## loop should end here
2 Likes