Wiki

Clone wiki

anima / Applications C Elegans classifier

This pipeline example classifies worm images to 'most of worms dead' vs 'most of worms alive' by their morphology.

It downloads the data directly from Broad institute. Note that MATLAB is required by the pipeline.

#!/usr/bin/env anduril
//$OPT --wrapper slurm-prefix
//$OPT --pipe "tee $ANDURIL_EXECUTION_DIR/_log"
//$OPT --pipe "anduril-pager --ls -t --title $ANDURIL_EXECUTION_DIR"
//$OPT --threads 4
//$POST anduril-result-browser --unused "$ANDURIL_EXECUTION_DIR"

import anduril.builtin._
import anduril.tools._
import anduril.anima._
import org.anduril.runtime._

object Analysis {
// Download the data set
var filedownload=URLInput(url="https://www.broadinstitute.org/bbbc/BBBC010/BBBC010_v1_images.zip")

// Custom script to extract the archive
var fileunarchive=QuickBash(in=filedownload, script="mkdir $out; cd $out; unzip -j $in")

//Define the folder of input images
var filedir=fileunarchive.out
// create a list of files matching a pattern    
var transmitted=Folder2Array(folder1=filedir,
                        filePattern="(.*w2.*tif)")

//----------------------------------------------------------------------
// Convert the images to grayscale PNG
var signal=ImageMagick(ina1=transmitted,command="@ina1@ -fx R -auto-level @out@")
// Preprocess images, inverting color,equalizing intensity and background removal.
val sigprocess=MatlabOp(in1=signal,
           script="foo=imclose(adapthisteq(mat2gray(imcomplement(in1))),strel('disk',2,0));"+
                  "out=(foo-medfilt2(foo,[21 21]));")
// Segment images with Otsu method, and constant correction, removing small clutter
val wormask=SegmentImage(in=sigprocess.out,
                        method="otsu",minArea=200,
                        corr=0.6,fill=false,
                        maxArea=0,minInt=0.001)
// Thin the mask to skeletons
val skeleton=MatlabOp(in1=wormask.mask,
           script="out=bwmorph(imclose(in1,strel('disk',2,0)),'thin',Inf);")
// Measure the features from skeletons
val skfeatures=SkeletonFeatures(skeleton=skeleton.out,
                      images=sigprocess.out,
                      maxDist=150)
// Create a visualization of the segmentation and skeletonization
val seg_visu=MergeImage(in=makeArray(sigprocess.out,wormask.perimeter,skeleton.out), colors="G,R,C")

val seg_visu_foss=ImageMagick(in=makeArray(sigprocess.out,wormask.perimeter,skeleton.out),
                command=" -compose Plus \\( @a1@ -normalize +level-colors Black,Green \\) "+
                        " \\( @a2@ -auto-level +level-colors ,'rgb(220,0,0)' \\) -composite "+
                        " \\( @a3@ -auto-level +level-colors ,Cyan \\) -composite "+
                        "@out@")
// Remove short skeletons from the data
val measurements_filt=CSVFilter(skfeatures.out, lowBound="Length=20")
// Calculate an additional feature from the measured
val measurements_calc=CSVTransformer(csv1=measurements_filt,
        transform1="csv1[,c('File','Distance','Alpha')]",
        transform2="csv1[,'Distance']/csv1[,'Length']",
        columnNames="c('File','D','Alpha','DLRatio')")
// Calculate per-image summaries
val measurements_median=CSVSummary(measurements_calc, 
                    summaryType="median", clusterCol="File", counts=false)
// Parse the filename for ground truth
val measurements_grouped=CSVTransformer(measurements_median,
        transform1="paste('G',as.integer(gsub('(.*)(..)(_w2).*','\\\\2',csv1[,'File'])>12),sep='')",
        transform2="csv1",
        columnNames="c('Group',colnames(csv1))")
// Split the data to training and validation sets
val images_split=CSVSplit(measurements_grouped, labelCol="File")

val sets_split=ArraySplit(images_split, N=2)
val sets_visu=FolderSplit(seg_visu, N=2)

val training_set=CSVListJoin(in=sets_split.out1, fileCol="")
val validation_set=CSVListJoin(in=sets_split.out2, fileCol="")
// Train and classify with random forest classifier
val rf_classifier=WekaClassifier(data=training_set,
               testdata=validation_set,
               classifydata=validation_set,
               classColumn="Group",
               columnsToRemove="File",
               crossValidation=500,
               methodClass="weka.classifiers.trees.RandomForest")
// Train and classify with naive bayes classifier
val nbc_classifier=WekaClassifier(data=training_set,
               testdata=validation_set,
               classifydata=validation_set,
               classColumn="Group",
               columnsToRemove="File",
               crossValidation=500,
               methodClass="weka.classifiers.bayes.NaiveBayes")
// Create annotations for the classified images
val annotations=CSVTransformer(csv1=validation_set,
                           csv2=rf_classifier.predictedClasses,
            transform1="csv1[,'File']",
            transform2="paste('Class:',csv2[,'Group'],'Prediction:',"+
            "csv2[,'PredictedClass'],'<br>',csv1[,'File'])",
            columnNames="c('File','Annotation')")
// Create a result HTML image gallery with annotations
val gallery=ImageGallery(inRoot=sets_visu.out("2"),
                     csvRoot=annotations,
                     annotationCol="Annotation")

// collect results from different classifiers:
val compare_classifers=CSVListJoin(in=makeArray("RF"->rf_classifier.confusion, 
                                      "NBC"->nbc_classifier.confusion), 
                                      fileCol="Method")


val highlight_self=SyntaxHighlight(in = INPUT("worms2.scala"),
                    language        = "Anduril",
                    rowNumbers      = true,
                    title           = "Code listing",
                    PNGFont         = "Andale Mono",
                    PNGSize         = 14,
                    PNGPad          = 5)


}

Updated