<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article
  PUBLIC "-//NLM//DTD Journal Publishing DTD v3.0 20080202//EN" "http://dtd.nlm.nih.gov/publishing/3.0/journalpublishing3.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" article-type="research-article" dtd-version="3.0" xml:lang="en">
  <front>
    <journal-meta><journal-id journal-id-type="nlm-ta">PLoS ONE</journal-id><journal-id journal-id-type="publisher-id">plos</journal-id><journal-id journal-id-type="pmc">plosone</journal-id><issn pub-type="epub">1932-6203</issn><publisher>
        <publisher-name>Public Library of Science</publisher-name>
        <publisher-loc>San Francisco, USA</publisher-loc>
      </publisher></journal-meta>
    <article-meta><article-id pub-id-type="publisher-id">PONE-D-12-00187</article-id><article-id pub-id-type="doi">10.1371/journal.pone.0041642</article-id><article-categories>
        <subj-group subj-group-type="heading">
          <subject>Research Article</subject>
        </subj-group>
        <subj-group subj-group-type="Discipline-v2">
          <subject>Biology</subject>
          <subj-group>
            <subject>Computational biology</subject>
            <subj-group>
              <subject>Computational neuroscience</subject>
              <subj-group>
                <subject>Sensory systems</subject>
              </subj-group>
            </subj-group>
          </subj-group>
          <subj-group>
            <subject>Model organisms</subject>
            <subj-group>
              <subject>Animal models</subject>
              <subj-group>
                <subject>Drosophila melanogaster</subject>
              </subj-group>
            </subj-group>
          </subj-group>
          <subj-group>
            <subject>Neuroscience</subject>
            <subj-group>
              <subject>Computational neuroscience</subject>
              <subj-group>
                <subject>Sensory systems</subject>
              </subj-group>
            </subj-group>
            <subj-group>
              <subject>Behavioral neuroscience</subject>
              <subject>Motor systems</subject>
              <subject>Neuroethology</subject>
              <subject>Sensory perception</subject>
              <subject>Sensory systems</subject>
            </subj-group>
          </subj-group>
        </subj-group>
        <subj-group subj-group-type="Discipline-v2">
          <subject>Computer science</subject>
          <subj-group>
            <subject>Software engineering</subject>
            <subj-group>
              <subject>Software tools</subject>
            </subj-group>
          </subj-group>
        </subj-group>
        <subj-group subj-group-type="Discipline">
          <subject>Computational Biology</subject>
          <subject>Neuroscience</subject>
          <subject>Computer Science</subject>
        </subj-group>
      </article-categories><title-group><article-title>Automated Tracking of Animal Posture and Movement during Exploration and Sensory Orientation Behaviors</article-title><alt-title alt-title-type="running-head">Tracking Animal Posture and Movement</alt-title></title-group><contrib-group>
        <contrib contrib-type="author" xlink:type="simple">
          <name name-style="western">
            <surname>Gomez-Marin</surname>
            <given-names>Alex</given-names>
          </name>
          <xref ref-type="aff" rid="aff1">
            <sup>1</sup>
          </xref>
          <xref ref-type="corresp" rid="cor1">
            <sup>*</sup>
          </xref>
        </contrib>
        <contrib contrib-type="author" xlink:type="simple">
          <name name-style="western">
            <surname>Partoune</surname>
            <given-names>Nicolas</given-names>
          </name>
          <xref ref-type="aff" rid="aff1">
            <sup>1</sup>
          </xref>
          <xref ref-type="aff" rid="aff2">
            <sup>2</sup>
          </xref>
        </contrib>
        <contrib contrib-type="author" xlink:type="simple">
          <name name-style="western">
            <surname>Stephens</surname>
            <given-names>Greg J.</given-names>
          </name>
          <xref ref-type="aff" rid="aff3">
            <sup>3</sup>
          </xref>
        </contrib>
        <contrib contrib-type="author" xlink:type="simple">
          <name name-style="western">
            <surname>Louis</surname>
            <given-names>Matthieu</given-names>
          </name>
          <xref ref-type="aff" rid="aff1">
            <sup>1</sup>
          </xref>
          <xref ref-type="corresp" rid="cor1">
            <sup>*</sup>
          </xref>
        </contrib>
      </contrib-group><aff id="aff1">
        <label>1</label>
        <addr-line>European Molecular Biology Laboratory/Center for Genomic Regulation Systems Biology Unit, Center for Genomic Regulation &amp; Universitat Pompeu Fabra, Barcelona, Spain</addr-line>
      </aff><aff id="aff2">
        <label>2</label>
        <addr-line>Department of Electrical Engineering and Computer Science, Université de Liège, Liege Sart-Tilman, Belgium</addr-line>
      </aff><aff id="aff3">
        <label>3</label>
        <addr-line>Joseph Henry Laboratories of Physics &amp; Lewis-Sigler Institute for Integrative Genomics Princeton University, Princeton, New Jersey, United States of America</addr-line>
      </aff><contrib-group>
        <contrib contrib-type="editor" xlink:type="simple">
          <name name-style="western">
            <surname>Krapp</surname>
            <given-names>Holger G.</given-names>
          </name>
          <role>Editor</role>
          <xref ref-type="aff" rid="edit1"/>
        </contrib>
      </contrib-group><aff id="edit1">
        <addr-line>Imperial College London, United Kingdom</addr-line>
      </aff><author-notes>
        <corresp id="cor1">* E-mail: <email xlink:type="simple">agomezmarin@gmail.com</email> (AGM); <email xlink:type="simple">matthieu.louis@crg.eu</email> (ML)</corresp>
        <fn fn-type="conflict">
          <p>ML is a PLoS ONE Editorial Board member. This does not alter the authors' adherence to all the PLoS ONE policies on sharing data and materials.</p>
        </fn>
        <fn fn-type="con">
          <p>Conceived and designed the experiments: AGM ML. Performed the experiments: AGM. Analyzed the data: AGM. Contributed reagents/materials/analysis tools: AGM NP GJS. Wrote the paper: AGM ML.</p>
        </fn>
      </author-notes><pub-date pub-type="collection">
        <year>2012</year>
      </pub-date><pub-date pub-type="epub">
        <day>9</day>
        <month>8</month>
        <year>2012</year>
      </pub-date><volume>7</volume><issue>8</issue><elocation-id>e41642</elocation-id><history>
        <date date-type="received">
          <day>19</day>
          <month>12</month>
          <year>2011</year>
        </date>
        <date date-type="accepted">
          <day>28</day>
          <month>6</month>
          <year>2012</year>
        </date>
      </history><permissions>
        
        <copyright-holder>Gomez-Marin et al</copyright-holder>
        <license xlink:type="simple">
          <license-p>This is an open-access article distributed under the terms of the Creative Commons Attribution License, which permits unrestricted use, distribution, and reproduction in any medium, provided the original author and source are credited.</license-p>
        </license>
      </permissions><abstract>
        <sec>
          <title>Background</title>
          <p>The nervous functions of an organism are primarily reflected in the behavior it is capable of. Measuring behavior quantitatively, at high-resolution and in an automated fashion provides valuable information about the underlying neural circuit computation. Accordingly, computer-vision applications for animal tracking are becoming a key complementary toolkit to genetic, molecular and electrophysiological characterization in systems neuroscience.</p>
        </sec>
        <sec>
          <title>Methodology/Principal Findings</title>
          <p>We present Sensory Orientation Software (<italic>SOS</italic>) to measure behavior and infer sensory experience correlates. <italic>SOS</italic> is a simple and versatile system to track body posture and motion of single animals in two-dimensional environments. In the presence of a sensory landscape, tracking the trajectory of the animal's sensors and its postural evolution provides a quantitative framework to study sensorimotor integration. To illustrate the utility of <italic>SOS</italic>, we examine the orientation behavior of fruit fly larvae in response to odor, temperature and light gradients. We show that <italic>SOS</italic> is suitable to carry out high-resolution behavioral tracking for a wide range of organisms including flatworms, fishes and mice.</p>
        </sec>
        <sec>
          <title>Conclusions/Significance</title>
          <p>Our work contributes to the growing repertoire of behavioral analysis tools for collecting rich and fine-grained data to draw and test hypothesis about the functioning of the nervous system. By providing open-access to our code and documenting the software design, we aim to encourage the adaptation of <italic>SOS</italic> by a wide community of non-specialists to their particular model organism and questions of interest.</p>
        </sec>
      </abstract><funding-group>
        <funding-statement>AGM is supported by the Juan de la Cierva program from the Spanish Ministry of Science and Innovation. NP was funded through an ERASMUS scholarship granted by the University of Liège. GJS is supported in part by grants PHY-0650617 and IIS-0613435 from the National Science Foundation,by grant P50 GM071508 from the National Institutes of Health, and by the Swartz Foundation. ML acknowledges funding from the Spanish Ministry of Science and Innovation (BFU2008-00362 and BFU2009-07757-E/BMC), the European Molecular Biology Laboratory/Center for Genomic Regulation Systems Biology Program and a Marie Curie Reintegration Grant (PIRG02-GA-2007-224791). The funders had no role in study design, data collection and analysis, decision to publish, or preparation of the manuscript.</funding-statement>
      </funding-group><counts>
        <page-count count="9"/>
      </counts></article-meta>
  </front>
  <body>
    <sec id="s1">
      <title>Introduction</title>
      <p>In a similar way that detailed knowledge of the dynamics of ion channels enhance our understanding of neurons, precise behavioral characterizations help to unravel the function of neural circuits. However, natural behaviors are usually complex, variable and multidimensional, with no universal language such as that of action potentials. Therefore, quantifying behavior at high-resolution, efficiently, and in an unbiased fashion remains a challenge in most neurobiological studies <xref ref-type="bibr" rid="pone.0041642-Leshner1">[1]</xref>. Indeed, though manual annotation is common, <italic>ad hoc</italic> performance indices defined by the experimenter may fail to capture the information relevant to the transformation of sensory input into behavioral output. An alternative approach consists in measuring unconstrained behavior from its most fundamental components — the time course of the animal's posture — to search for principles simplifying the apparent complexity of the phenomenon <xref ref-type="bibr" rid="pone.0041642-Stephens1">[2]</xref>. This requires new techniques to systematically collect and analyze behavioral data.</p>
      <p>Computer-vision offers a fundamental tool in the study of animal behavior. Several companies provide commercial software specifically devised to study a particular paradigm (e.g. the Morris water maze for rodents). Although these solutions can be onerous and difficult to customize, they have the advantage of working out of the box for the specific tasks they were designed for. In addition, a series of custom-made tracking software written by neurobiologists is now available, enabling behavioral measurements of individual animals at an unprecedented resolution in nematodes <xref ref-type="bibr" rid="pone.0041642-Huang1">[3]</xref>, <xref ref-type="bibr" rid="pone.0041642-Baek1">[4]</xref>, <xref ref-type="bibr" rid="pone.0041642-Ramot1">[5]</xref>, flies <xref ref-type="bibr" rid="pone.0041642-Straw1">[6]</xref>, <xref ref-type="bibr" rid="pone.0041642-Gilestro1">[7]</xref>, <xref ref-type="bibr" rid="pone.0041642-Kohlhoff1">[8]</xref>, <xref ref-type="bibr" rid="pone.0041642-Lott1">[9]</xref> and rodents <xref ref-type="bibr" rid="pone.0041642-Drai1">[10]</xref>. Software capable of tracking multiple animals simultaneously <xref ref-type="bibr" rid="pone.0041642-Branson1">[11]</xref>, <xref ref-type="bibr" rid="pone.0041642-Gershow1">[12]</xref>, <xref ref-type="bibr" rid="pone.0041642-Swierczek1">[13]</xref> has augmented the toolkit for high-throughput screening. While the use of these tools is becoming common practice, it takes considerable effort to adapt and extend the codes to different behavioral paradigms or model organisms. We believe that there exists a scope for free, simple, and customizable software between sophisticated freeware and commercial packages.</p>
      <p>Multipurpose tracking systems that measure motor responses and simultaneously infer the corresponding sensory input during unconstrained orientation behavior are scarce. To assess the sensory information accessible to an animal, it is important to determine not only the position of the center of mass (the animal being described as a moving dot in space), but also its posture and the kinematics of specific points along the body. For instance, while olfactory inputs are collected by sensors at the tip of the head in <italic>Drosophila</italic> larva, thermosensory and visual inputs arise from sensory neurons covering the whole body <xref ref-type="bibr" rid="pone.0041642-Garrity1">[14]</xref>, <xref ref-type="bibr" rid="pone.0041642-Gerber1">[15]</xref>, <xref ref-type="bibr" rid="pone.0041642-Xiang1">[16]</xref>. Similarly, escape responses and turning maneuvers in fishes involve intricate muscle activity patterns where body curvature and tail acceleration play a key role. In general, it is valuable to know not only where the animal is located in space but also what inputs are stimulating its sensors (visual and otherwise) together with its relative orientation to particular landmarks or other organisms.</p>
      <p>Here we have developed Sensory Orientation Software (<italic>SOS</italic>) to extract and analyze fine-grained information about the posture and motion of single animals behaving in sensory landscapes. The <italic>SOS</italic> system consists of a series of custom-made Matlab codes for online animal tracking and offline processing and analysis. We provide access to all our scripts as <xref ref-type="supplementary-material" rid="pone.0041642.s003">File S1</xref>. The scripts are commented and documented in a step-by-step tutorial. We provide a test dataset (<xref ref-type="supplementary-material" rid="pone.0041642.s003">File S1</xref>) and include a movie illustrating the application of <italic>SOS</italic> to track different animals (<xref ref-type="supplementary-material" rid="pone.0041642.s002">Movie S1</xref>). Our software targets a community of non-experts in computer-vision or programming: it offers a flexible basis adaptable to several paradigms and organisms. Together with an accompanying manuscript by Colomb <italic>et al.</italic>, this work presents a free, customizable and pedagogical tool for behavioral tracking and analysis.</p>
      <p>The structure of the paper is as follows. First, we describe the online tracking system. We estimate relevant spatial, temporal and data constraints related both to the animal's characteristics and the tracking procedure itself. Next, we explain how to compute postures from raw body shape images. We illustrate this approach in fruit fly larvae, flatworms, fish and mice. In the <italic>Drosophila</italic> larva, we show how to accurately infer the sensory stimuli to which particular loci along the larval body are exposed in a sensory landscape. We validate our approach by examining, at high-resolution, the sensorimotor trajectories of larvae in odor, temperature and light gradients.</p>
    </sec>
    <sec id="s2" sec-type="materials|methods">
      <title>Materials and Methods</title>
      <sec id="s2a">
        <title>Fly stocks and animal preparation</title>
        <p>Fly stocks were maintained on conventional cornmeal-agar molasses medium at 22°C and kept in a 12 h dark-light cycle. The <italic>Drosophila melanogaster</italic> Canton-S strain was used as ‘wild type’. In all behavioral experiments, 6-day-old third instar foraging larvae were tested during the day. Room temperature was kept between 21 and 23°C and relative humidity between 50 and 60%. Larvae were washed from food medium by pouring a solution of 15% sucrose in the food vial. Individuals floating at the surface of the sucrose solution were transferred to the arena for behavioral tracking. Single animals were monitored while crawling on a 3% agarose slab.</p>
      </sec>
      <sec id="s2b">
        <title>Sensory landscapes</title>
        <p>Orientation behavior in <italic>Drosophila</italic> larvae was tested in controlled odor, temperature and light gradients. For chemotaxis, an airborne odor gradient was created by loading an odor droplet on the condensation ring of the lid of a 96-well plate, which was inverted on a surface of agarose to form a closed arena <xref ref-type="bibr" rid="pone.0041642-Louis1">[17]</xref>. Ethyl butyrate (CAS number 105-54-4) was used as odorant. The odor concentration was estimated via infrared spectrometry <xref ref-type="bibr" rid="pone.0041642-Louis1">[17]</xref>. For thermotaxis, a linear spatial thermal gradient was created with two thermoelectric temperature controllers (TC-48-20, TE Technology) maintaining the two extremes of the plate at different constant temperatures. The agarose layer was placed right on top of the metal plate and its temperature was directly measured with a thermometer (MM2000 Handhold Thermometer, TM Electronics). The experiment started after the establishment of a stationary gradient in the agar. For phototaxis, a bright light pad (5000 Kelvin color temperature radiation, Slimlite Lightbox, Kaiser) was placed perpendicularly to the agarose layer surface where the animal crawled, creating a sideways gradient.</p>
      </sec>
      <sec id="s2c">
        <title>Behavioral arenas</title>
        <p>A video camera (Stingray Camera, Allied Vision Technologies; Computar lens, 12–36 mm, 1∶2∶8, 2/3″ C) fixed on a stand was used to monitor larval behavior. Larval tracking lasted a maximum of five minutes and was interrupted when the animal left the field of view. Frames were streamed at 7 Hz live from the camera by the Image Acquisition toolbox of Matlab (The MathWorks, Natick, USA), which automatically recognizes DCAM compatible FireWire cameras upon installation of the CMU 1394 Digital Camera Driver. The installation of the Image Processing toolbox of Matlab is necessary to ensure the functionality of <italic>SOS</italic>. To maximize the effectiveness of the image processing, different conditions of illumination were designed to study each modality. For chemotaxis, a light pad (5000 Kelvin color temperature radiation, Slimlite Lightbox, Kaiser) illuminated the arena from above creating uniform daylight conditions, while the camera recorded images from below. This configuration allowed us to reduce the shadow from the condensation rings of the lid of the arena. For thermotaxis, the camera was placed above the agarose layer and the setup was illuminated by sideways red LEDs (620 nm wavelength, 30 lm luminous flux, Lumitronix LED, Technik GmbH). For phototaxis, the camera was placed above the agarose layer and sideways white-light illumination (Slimlite Lightbox, Kaiser) was sufficient to enable tracking. The light intensity was assumed to decay from the source position.</p>
      </sec>
    </sec>
    <sec id="s3">
      <title>Results</title>
      <sec id="s3a">
        <title>Online tracking: from video streaming to animal postures</title>
        <p>During online tracking camera frames are acquired live and preprocessed. Raw images of the animal in the arena are cropped into a bounding box enclosing the two-dimensional projection of its body shape. Together with the coordinates of the box, the sequence of cropped images is saved for offline analysis, optimizing the storage of raw data and making the subsequent processing more efficient. The operations allowing monitoring the behavior of a single animal in real time are schematized in the flowchart diagram of <xref ref-type="fig" rid="pone-0041642-g001">Figure 1</xref>.</p>
        <fig id="pone-0041642-g001" orientation="portrait" position="float">
          <object-id pub-id-type="doi">10.1371/journal.pone.0041642.g001</object-id>
          <label>Figure 1</label>
          <caption>
            <title>From experiments to animal shapes.</title>
            <p>(<bold>A</bold>) Flowchart of the sequential steps of the online software. (<bold>B</bold>) Illustration of the image processing. The quality of the object segmentation depends critically on the threshold used to binarize the image. Frames streamed from the camera are used to reconstruct the background, detect the animal, and track its motion. The body posture and location of points of interest are saved as the animal freely moves in the arena.</p>
          </caption>
          <graphic mimetype="image" orientation="portrait" position="float" xlink:href="info:doi/10.1371/journal.pone.0041642.g001" xlink:type="simple"/>
        </fig>
        <p>The program is initiated by typing <italic>track(x,y)</italic> in the command prompt, where the argument <italic>x</italic> specifies the sampling interval in seconds and <italic>y</italic> is the total number of frames to be tracked. The tracking frequency is controlled by a built-in timer function. Messages are displayed at the prompt guiding the user and indicating the status of the tracker. The software connects to the camera and displays a live image of the arena on screen so that the experimenter can adjust, amongst other features, the field of view, the focus and the illumination intensity. This is important since the target recognition procedure is based on differences in contrast. The tracker detects the animal as the largest object with the highest contrast (darkest or brightest, depending on the illumination and the arena). The input grayscale frame is converted into a binary black-and-white image based on a threshold operation, whose value can be iteratively and automatically adjusted by the user before tracking starts.</p>
        <p>Before capturing the body contour to be used for posture tracking, a steady background image of the whole field of view is acquired. By subtracting the background from the current image, the animal posture can be automatically and robustly segmented. Depending on the illumination, the stimulus delivery system, and the particularities of the arena, segmentation can be either straightforward or more complex. In the simplest case, the animal is the only salient object and the background is essentially uniform. Then, no background subtraction is necessary. When static objects are present in the image, the background can be built as an average over the whole time sequence <xref ref-type="bibr" rid="pone.0041642-Khurana1">[18]</xref>. However, that solution only works for offline tracking. Provided that there are no slight displacements of the arena during the experiment, saving an initial snapshot of the background before the animal is introduced in the arena should work. More generally, <italic>SOS</italic> reconstructs the background after the animal is loaded in the arena via the following procedure: it detects the animal as the largest salient object; it crops a small box around it; it saves the outer image; finally, it completes the outer image with the inner image of the bounding box once the animal has moved away from it. This prevents confounding the animal with water droplets, small pieces of dirt, and shades produced by the arena. Animal postures can be faithfully tracked anywhere in the arena (see <xref ref-type="supplementary-material" rid="pone.0041642.s001">Text S1</xref>).</p>
        <p>The background is subtracted from the current frame and the remaining grayscale image is transformed through a threshold into a binary (black and white) image. Unsuitable thresholds can lead to fragmented or noisy binary postures. Even with the appropriate threshold, the binary image may still contain undesirable objects due to pixel noise artifacts or impurities in the substrate. Since these objects are usually smaller than the animal, they are easily erased by retaining only the largest object in the image via standard Matlab functions. When an image of the animal shape is the only object left, a smooth contour is easily obtained offline.</p>
        <p>Once the tracker has successfully detected the animal from the background, it creates a small bounding box around it. This region of the arena corresponds to the most likely area where the animal will be found in the next frame. From then on, computations will only take place in that region of interest, speeding up the image processing. The software enters a loop where the animal shape is found and saved, together with the coordinate positions of the cropped bounding box in the arena system of reference. Tracking stops when the animal leaves the field of view, or if the preset sampling frequency is faster than the time the computer takes to acquire and process each frame. Data acquisition can also be terminated at any time by following the prompts generated by the program. At the end of each trial, the data (background, postures and coordinates) are saved in a folder named after the current date and time of the experiment.</p>
      </sec>
      <sec id="s3b">
        <title>Getting to scale: selecting appropriate temporal and spatial resolutions of a tracking experiment</title>
        <p>Spatial resolution, temporal resolution and data storage requirements are mutually dependent. It is therefore convenient to estimate the tracking resolution limits given the arena constraints, animal features, and hardware-software computational characteristics (<xref ref-type="fig" rid="pone-0041642-g002">Figure 2A</xref>). These typical scales can be estimated, related and exploited in a useful manner. We summarize the most relevant relationships in <xref ref-type="fig" rid="pone-0041642-g002">Figure 2</xref> as a guide for behavioral tracking experiments.</p>
        <fig id="pone-0041642-g002" orientation="portrait" position="float">
          <object-id pub-id-type="doi">10.1371/journal.pone.0041642.g002</object-id>
          <label>Figure 2</label>
          <caption>
            <title>Spatial and temporal scales of a tracking experiment.</title>
            <p>(<bold>A</bold>) Scheme of the main spatial and temporal scales of the tracking: organism typical size (λ), behavioral arena field of view length (L), temporal resolution of the tracking (τ), and total time of the experiment (T). (<bold>B</bold>) Scaling of the organism posture resolution in pixels as a function of the relative field of view (considering a 1024-pixel frame resolution in length). Labeled areas represent different resolution limits. (<bold>C</bold>) Tracking time before the animal reaches the arena's edge (corresponding to an arena/organism ratio of 20 at a posture resolution of 50 pixels) as a function of the typical motion speed of an organism expressed in body length units. Labeled areas represent different locomotion speeds in relationship to the time to reach the boundaries of the arena. (<bold>D</bold>) Total data storage requirements for experiment lasting 5 minutes in the conditions of the previous panel, as a function of the tracking frequency in frames per second. The continuous line corresponds to disk space usage to save only a bounding box enclosing the organism posture, while the dashed line involves saving the complete frame, which considerable increases the storage needs.</p>
          </caption>
          <graphic mimetype="image" orientation="portrait" position="float" xlink:href="info:doi/10.1371/journal.pone.0041642.g002" xlink:type="simple"/>
        </fig>
        <p>The level of detail at which the animal posture is tracked determines the number of body pixels to be processed and saved, therefore setting a trade-off between posture resolution, tracking frequency, and data size. Along the same lines, zooming in to acquire higher resolution postures implies tracking a smaller field of view, thus restricting the area where the animal can be tracked (<xref ref-type="fig" rid="pone-0041642-g002">Figure 2B</xref>). The animal-to-arena ratio provides a useful quantity to be related to the typical speed of the animal, image size, and frames per second. How fast the animal moves imposes a lower bound on the tracking speed and an upper bound on the total tracking time before the animal is likely to contact the arena boundaries (<xref ref-type="fig" rid="pone-0041642-g002">Figure 2C</xref>). In turn, given a particular posture resolution, a field of view and tracking frequency, the duration of the experiment will determine the total amount of data to be saved for that particular experiment (<xref ref-type="fig" rid="pone-0041642-g002">Figure 2D</xref>).</p>
        <p>Spatial constraints related to animal posture resolution and motion in the arena arise mainly for setups where the camera is fixed. In closed-loop tracking systems where the camera follows the animal, very high postural resolution can be achieved in large arenas <xref ref-type="bibr" rid="pone.0041642-Faumont1">[19]</xref>, <xref ref-type="bibr" rid="pone.0041642-Leifer1">[20]</xref>. Spatial and temporal constraints of the online tracking can be relaxed by acquiring frames as fast as needed without any preprocessing, at the expense of generating large volumes of video data. Sequences of high-resolution raw images for a single experiment can easily fill gigabytes of disk space. Our system is well suited to minimize the data stored upon completion of an experiment, saving only the relevant information as the experiment takes place, reducing the volume of data storage and making the subsequent offline analysis more efficient.</p>
      </sec>
      <sec id="s3c">
        <title>Offline processing: analyzing sequences of animal posture</title>
        <p>During the offline analysis, each image is automatically processed to obtain animal-centric posture descriptors and loci of interest. The main steps to obtain postures are depicted in <xref ref-type="fig" rid="pone-0041642-g003">Figure 3A</xref>. We show how similar principles and operations can be applied for the high-resolution tracking of animals as distinct as fruit fly larvae, fishes or mice.</p>
        <fig id="pone-0041642-g003" orientation="portrait" position="float">
          <object-id pub-id-type="doi">10.1371/journal.pone.0041642.g003</object-id>
          <label>Figure 3</label>
          <caption>
            <title>Postures in motion.</title>
            <p>(<bold>A</bold>) Flow chart of the sequential image processing steps to obtain high-resolution trajectories of animal postures. (<bold>B</bold>) Visual representation of the step schematics in (A) for three different organisms: fruit fly larvae, fish and mice. One can either perform a thinning operation on the binary image of the animal shape to find a skeleton whose endpoints will correspond to head and tail, or alternatively compute contour curvature maxima to determine the position of the head and tail. (<bold>C</bold>) Illustration of the tracking method for a temporal sequence of postures (blue silhouettes) and head positions (red dots) for a crawling larva, a swimming fish and a walking mouse.</p>
          </caption>
          <graphic mimetype="image" orientation="portrait" position="float" xlink:href="info:doi/10.1371/journal.pone.0041642.g003" xlink:type="simple"/>
        </fig>
        <p>From the raw frames collected during the experimental session, the goal now is to generate high-resolution trajectories of the animal's posture that quantitatively describe the time course of its behavior. In short, we automatically trace the animal contour to compute its curvature and skeleton, in order to extract the position of several points of interest, including head and tail.</p>
        <p>Once the original image has been segmented and binarized, the body edge is detected as the boundary between black and white pixels. If necessary, the shape can be smoothed by standard image processing procedures (see <xref ref-type="supplementary-material" rid="pone.0041642.s001">Text S1</xref>). Next, we find the skeleton using a thinning operation, which recursively removes pixels from the boundary until a single-pixel-wide connected skeleton is obtained (<xref ref-type="fig" rid="pone-0041642-g003">Figure 3B</xref>). The body skeleton is convenient for two reasons. First, it gives access to the local curvature of the animal along its anterior-posterior axis — a representation which permitted to make fundamental discoveries in <italic>C. elegans</italic> <xref ref-type="bibr" rid="pone.0041642-Stephens1">[2]</xref>. Second, the two end-points of the skeleton can be used as a proxy for animal head and tail positions, as is discussed below. The thinning process can produce spurious branches that lead to skeletons with more than two endpoints. The code keeps track of the fraction of such cases. If the contour is smooth and regular, as is the case for the <italic>Drosophila</italic> larva, spurs are rare and problematic frames can be discarded from the analysis. Depending on the particular organism that is being tracked, one could make use of such extra branches of the skeleton to detect relevant posture features such as legs, wings or fins. Alternatively, a second approach to obtain head and tail positions relies on the identification of points of maximum curvature along the animal's contour (see <xref ref-type="fig" rid="pone-0041642-g003">Figure 3B</xref>). The skeleton is then built from such points by tracing bisectors along left and right sides of the animal perimeter <xref ref-type="bibr" rid="pone.0041642-Swierczek1">[13]</xref>.</p>
        <p>Differentiating the head from the tail is necessary to robustly reconstruct trajectories. When the temporal resolution is high, classification is achieved via a simple “distance rule”: the head in the current frame is identified as the closest locus to the head in the previous frame. This rule only requires human intervention to define the head position in the first frame. The program displays the first grayscale image of the animal on the screen and asks the user to click first near the head and then near the tail positions. From then on, head and tail are sorted automatically. This simple clustering algorithm can robustly detect the head position even if the animal is not moving or if it is engaged in backward locomotion. The algorithm only requires a tracking frequency that is faster than the typical speed of motion expressed in units of body length (<xref ref-type="fig" rid="pone-0041642-g002">Figure 2</xref>). When the animal bends to such an extent that a blob-like shape is created, the head and the tail can be swapped. We automatically flag these events during the first round of processing to allow for a correction if necessary (see <xref ref-type="supplementary-material" rid="pone.0041642.s001">Text S1</xref>). Furthermore, a visual animation is displayed at the end of the image processing so that the user can easily review any potential problems in loci assignment. For all frames associated with a potential error, the program pauses and invites the user to disambiguate the classification.</p>
        <p>To illustrate the use of <italic>SOS</italic>, we tracked the postural dynamics of four model organisms in neurobiology: a fruit fly larva (third instar <italic>Drosophila melanogaster</italic> larva) crawling on an agarose surface, a flatworm (planarian <italic>Schmidtea mediterranea</italic>) swimming away from a light source, a fish (adult zebra fish <italic>Danio Rerio</italic>) swimming in a Petri dish, and a mouse (<italic>Mus musculus</italic>) walking on a square arena and swimming in a mater maze. The raw data was either generated by the authors or kindly provided by other labs. <xref ref-type="fig" rid="pone-0041642-g003">Figure 3</xref> illustrates in detail the application of the segmentation scheme for a fly larva, fish and mouse. The resulting movies for all organisms can be found as <xref ref-type="supplementary-material" rid="pone.0041642.s002">Movie S1</xref>.</p>
        <p>From the contour and skeleton images, the area, the perimeter size and skeleton length are saved. This information can be used, for instance, to normalize the size of the animal, to measure rhythmic patterns of locomotion from body contractions or, potentially, to detect hunching or rearing. Coordinates including the animal's centroid position and the middle point of the skeleton are extracted as well. Next, the software translates all high-resolution coordinates (positions of head, tail, centroid, midpoint, ordered skeleton from head to tail, and contour) to the laboratory frame of reference, and it converts these features from pixels to units of physical length. Calibration implies a multiplication of the coordinates by a conversion factor calculated from landmarks of the arena available in the field of view. To define customized landmarks, the code displays the arena background on the screen and asks the user to click on key positions separated by distances known to the experimenter. The spatial scale (pixels per millimeter) and the temporal resolution (frames per second) of the tracking are saved together with the arena landmarks.</p>
      </sec>
      <sec id="s3d">
        <title>High-resolution sensorimotor trajectories</title>
        <p>In the presence of a stimulus landscape or during a particular behavioral task, postural data can be used to infer the sensory input to which the animal is exposed during the course of an experiment. This yields to detailed sensorimotor trajectories. By projecting the body shapes onto the plane of locomotion and calculating postures in time and space, we can map the position of the animal's head with the corresponding stimulus intensity and local gradient's strength and direction. Aligning the motor data with the reconstructed stimulus landscape, we can obtain, for instance, the stimulus intensity at the head, its time derivative and the relative orientation angle of the animal with respect to the local gradient (<xref ref-type="fig" rid="pone-0041642-g004">Figure 4</xref>).</p>
        <fig id="pone-0041642-g004" orientation="portrait" position="float">
          <object-id pub-id-type="doi">10.1371/journal.pone.0041642.g004</object-id>
          <label>Figure 4</label>
          <caption>
            <title>High-resolution sensorimotor trajectories.</title>
            <p>The posture and position of each animal are mapped onto the arena system of reference and then used to infer, from the gradient landscape, the sensory dynamics at different loci of interest. The temporal sequences of all sensory and motor variables correspdonding to different animals are compiled in a single data file.</p>
          </caption>
          <graphic mimetype="image" orientation="portrait" position="float" xlink:href="info:doi/10.1371/journal.pone.0041642.g004" xlink:type="simple"/>
        </fig>
        <p>After testing several animals independently (for instance corresponding to the same sensory stimulus, genotype, developmental stage, etc.), our offline analysis software allows for merging files from all trials in a consistent way. Together with continuous kinematic variables, <italic>SOS</italic> detects and saves discrete behavioral events such as runs, turns and casts, by finding abrupt reorientation speeds and large head bending angles <xref ref-type="bibr" rid="pone.0041642-GomezMarin1">[21]</xref>. On the whole, the process produces a temporal sequence of high-resolution sensory and behavioral data for all animals tested. Hypothesis about neural computation can then be drawn from statistical correlations between sensory inputs and motor outputs.</p>
      </sec>
      <sec id="s3e">
        <title>Orientation in sensory landscapes: chemotaxis, thermotaxis and phototaxis in the <italic>Drosophila melanogaster</italic> larva</title>
        <p>From online tracking to offline processing and the generation of sensorimotor trajectories, we illustrate the potential of the whole <italic>SOS</italic> system by examining <italic>Drosophila</italic> larvae orienting to odor, temperature and light gradients <xref ref-type="bibr" rid="pone.0041642-GomezMarin2">[22]</xref>. Our present aim is not to conduct an exhaustive study of each modality, but to show how the analysis can reveal interesting aspects of sensory orientation in the larva. The trajectory of particular points along the body is used to reconstruct both the positional dynamics of the sensory organs and the behavior of the entire animal. As shown in <xref ref-type="fig" rid="pone-0041642-g005">Figure 5</xref>, we infer the sensory input at the body locations where receptors are located.</p>
        <fig id="pone-0041642-g005" orientation="portrait" position="float">
          <object-id pub-id-type="doi">10.1371/journal.pone.0041642.g005</object-id>
          <label>Figure 5</label>
          <caption>
            <title><italic>Drosophila</italic> larval orientation in odor, temperature and light gradients.</title>
            <p>Top: stimulus landscape overlaid with one representative head-and-tail trajectory for every sensory modality. Bottom: reconstructed time course of a sensory variable relevant to the behavioral modality under study. (<bold>A</bold>) Concentration changes due to head casts as the animal returns to the odor source. (<bold>B</bold>) Thermal differences along the body axis skeleton with respect to the head as the animal reorients in the temperature gradient. (<bold>C</bold>) Percentage of light on the body contour as the larva moves away from the light source. For all panels, gradient direction and direction of motion are illustrated with grey and black arrows, respectively. Numbers indicate the occurrence of sampling events reported in the bottom panel.</p>
          </caption>
          <graphic mimetype="image" orientation="portrait" position="float" xlink:href="info:doi/10.1371/journal.pone.0041642.g005" xlink:type="simple"/>
        </fig>
        <p><italic>Drosophila</italic> larvae move towards increasing concentrations of attractive odors <xref ref-type="bibr" rid="pone.0041642-Gershow1">[12]</xref>, <xref ref-type="bibr" rid="pone.0041642-Louis1">[17]</xref>, <xref ref-type="bibr" rid="pone.0041642-GomezMarin1">[21]</xref>, <xref ref-type="bibr" rid="pone.0041642-AcevesPina1">[23]</xref>, <xref ref-type="bibr" rid="pone.0041642-Monte1">[24]</xref>, <xref ref-type="bibr" rid="pone.0041642-Fishilevich1">[25]</xref>. The larval “nose” is located at the tip of the head, where a pair of olfactory organs (dorsal organs) host 21 olfactory sensory neurons <xref ref-type="bibr" rid="pone.0041642-Gerber1">[15]</xref>, <xref ref-type="bibr" rid="pone.0041642-Cobb1">[26]</xref>. It has been shown that larval chemotaxis does not require stereo-olfaction, namely, the detection of concentration differences between the left and the right “noses” <xref ref-type="bibr" rid="pone.0041642-Louis1">[17]</xref>. Reconstruction and analysis of the odor concentration dynamics at the tip of the head during free exploration has shown that head casts are a key process of the reorientation mechanism <xref ref-type="bibr" rid="pone.0041642-GomezMarin1">[21]</xref>. Such trajectories revealed that temporal integration of stimulus changes during head motion represents the main input signal controlling the timing and direction of turns. Lateral head sweeps constitute an active sampling process that allows the animal to reorient in the odor gradient <xref ref-type="bibr" rid="pone.0041642-GomezMarin1">[21]</xref>. As quantified in <xref ref-type="fig" rid="pone-0041642-g005">Figure 5A</xref>, lateral head movements can be associated with rapid changes in odor concentration on the order of a 5% relative difference (50 nM changes in a 1 µM concentration background).</p>
        <p><italic>Drosophila</italic> larvae sense temperature by using a pair of detectors located at the tip of the head (terminal organs) and chordotonal neurons scattered along the body wall <xref ref-type="bibr" rid="pone.0041642-Garrity1">[14]</xref>. When exposed to a temperature gradient, first instar larvae modulate the acceptance of a new direction of motion during lateral head sweeps <xref ref-type="bibr" rid="pone.0041642-Luo1">[27]</xref>. As argued in reference <xref ref-type="bibr" rid="pone.0041642-Garrity1">[14]</xref>, thermal equilibration from the surface to the internal structure of first instar larvae should take place in less than a tenth of a second. Applying the same scaling argument to third instar larvae, we find that a few seconds are necessary for thermal equilibration along the lateral axis of the body. In contrast, the characteristic time for thermal equilibration along the longitudinal axis (4 mm) is nearly one minute. This slow timescale implies that, in principle, third instar larvae could measure temperature differences along the antero-posterior axis. By reconstructing the sensorimotor trajectory of a larva navigating from low to high temperatures in a linear temperature gradient of slope 0.1°C/mm, <italic>SOS</italic> provides a quantitative estimate of the temperature differences along the larva's body during orientation. As shown in the <xref ref-type="fig" rid="pone-0041642-g005">Figure 5B</xref>, we find that variations along the anteroposterior axis are of the same order of magnitude than those associated with head casts.</p>
        <p><italic>Drosophila</italic> larvae display a strong photophobic behavior lasting until wandering stage <xref ref-type="bibr" rid="pone.0041642-Sawin1">[28]</xref>. Two sensory organs (Bolwig's organs) are located in the head. Moreover, non-conventional photoreceptors tile the entire body wall <xref ref-type="bibr" rid="pone.0041642-Xiang1">[16]</xref>. From genetic dissections of light driven behaviors in the fruit fly larva <xref ref-type="bibr" rid="pone.0041642-Hassan1">[29]</xref>, <xref ref-type="bibr" rid="pone.0041642-Busto1">[30]</xref>, <xref ref-type="bibr" rid="pone.0041642-Scantlebury1">[31]</xref>, <xref ref-type="bibr" rid="pone.0041642-Sprecher1">[32]</xref>, <xref ref-type="bibr" rid="pone.0041642-Keene1">[33]</xref>, there is a growing interest in the quantification of orientation responses in light gradients <xref ref-type="bibr" rid="pone.0041642-Kane1">[34]</xref>. Instead of exposing individuals to all-or-none light flashes, we tested larval behavior in response to a sideways light gradient (<xref ref-type="fig" rid="pone-0041642-g005">Figure 5C</xref>). The orientation mechanism controlling light responses is thought to involve body bends or head casts <xref ref-type="bibr" rid="pone.0041642-Sawin1">[28]</xref>. Turning direction might also be inferred from a comparison of the light exposure on the left and on the right sides of the body. In our arena the animal controls light exposure by modifying its orientation with respect to the direction of the source. In <xref ref-type="fig" rid="pone-0041642-g005">Figure 5C</xref>, we use <italic>SOS</italic> to quantify the temporal evolution of the percentage of body surface exposed to light. Together with the light intensity at the Bolwig's organs, our system allows us to estimate subtle differences in illumination along the left and right sides of the body — a possible parallel source of information exploited by the larva during phototaxis.</p>
      </sec>
    </sec>
    <sec id="s4">
      <title>Discussion</title>
      <p>Adaptive behavior refers to the ability of an animal to produce and react to changes in internal and external signals by means of motion <xref ref-type="bibr" rid="pone.0041642-Huston1">[35]</xref>. At the same time, behavior often implies an active process to collect sensory information, rather than a passive response. Motion, perception, and proprioception are not independent but are interwoven in a sensorimotor feedback loop. With the aim of improving and sharing behavioral quantification tools, we have developed software for high-resolution tracking of single animals that are freely moving in two-dimensional sensory environments. By monitoring the postural changes of an individual over time, our system reveals the stimulus history to which specific sensors are exposed in space and time. This reconstruction of trajectories in sensory and motor spaces represents a necessary step in the analysis of the neural processes controlling active sampling during orientation behavior.</p>
      <p>Across the animal kingdom, quantitative measures of motor responses have provided invaluable information about the computation underlying orientation behavior <xref ref-type="bibr" rid="pone.0041642-Gershow1">[12]</xref>, <xref ref-type="bibr" rid="pone.0041642-GomezMarin3">[36]</xref>, <xref ref-type="bibr" rid="pone.0041642-Fraenkel1">[37]</xref>, <xref ref-type="bibr" rid="pone.0041642-Webster1">[38]</xref>, <xref ref-type="bibr" rid="pone.0041642-Carde1">[39]</xref>. Historically, the advent of high-resolution tracking of <italic>Escherichia Coli</italic> in chemical gradients <xref ref-type="bibr" rid="pone.0041642-Min1">[40]</xref>, <xref ref-type="bibr" rid="pone.0041642-Berg1">[41]</xref> laid the foundation for an understanding of the biochemical pathways controlling chemotaxis in unicellular organisms. Similar behavioral analysis conducted in <italic>Caenorhabditis elegans</italic> <xref ref-type="bibr" rid="pone.0041642-PierceShimomura1">[42]</xref>, <xref ref-type="bibr" rid="pone.0041642-Lockery1">[43]</xref>, <xref ref-type="bibr" rid="pone.0041642-Iino1">[44]</xref> and in <italic>Drosophila melanogaster</italic> larvae <xref ref-type="bibr" rid="pone.0041642-Louis1">[17]</xref>, <xref ref-type="bibr" rid="pone.0041642-GomezMarin1">[21]</xref>, <xref ref-type="bibr" rid="pone.0041642-Luo1">[27]</xref> have shed light on the neural mechanisms controlling active sampling and decision making. Here, we demonstrate the use of our tracking software on three sensory modalities in the fruit fly larva: chemotaxis, thermotaxis and phototaxis. Our analysis produces behavioral features associated with the detection and computation of sensory stimuli in each modality: odor concentration changes due to side-to-side head movements during chemotaxis; temperature gradients along the larval body during thermotaxis; and differences in photostimulation between the left and right sides of the animal during phototaxis.</p>
      <p>The software <italic>SOS</italic> is a tracking and analysis system that can be used in behavioral screens to characterize subtle sensorimotor deficiencies associated with selected phenotypic traits. Precise behavioral tracking is also convenient for phenotypic scoring in genetic mapping studies <xref ref-type="bibr" rid="pone.0041642-Orgogozo1">[45]</xref>. <italic>SOS</italic> can be adapted to make use of anatomical features specific to the organism under study such as sharp edges, large protrusions, darker parts, and expression of fluorescent markers or tags. It can also be extended to analyze orientation behavior in other sensory landscapes such as humidity, gravity, or simply foraging strategies in homogeneous sensory landscapes. Finally, <italic>SOS</italic> can be applied to monitor a wide range of organisms from <italic>Planarian</italic> <xref ref-type="bibr" rid="pone.0041642-Inoue1">[46]</xref> and marine zooplankton <xref ref-type="bibr" rid="pone.0041642-Jekely1">[47]</xref> to larvae from other species <xref ref-type="bibr" rid="pone.0041642-Crespo1">[48]</xref>, fishes and mice. We hope that novel ways of measuring and analyzing animal behavior will contribute to the development of new concepts, theories and principles about how neurons process sensory information to produce coordinated motor responses <xref ref-type="bibr" rid="pone.0041642-Stephens2">[49]</xref>.</p>
    </sec>
    <sec id="s5">
      <title>Supporting Information</title>
      <supplementary-material id="pone.0041642.s001" mimetype="application/pdf" orientation="portrait" position="float" xlink:href="info:doi/10.1371/journal.pone.0041642.s001" xlink:type="simple">
        <label>Text S1</label>
        <caption>
          <p>
            <bold>Step-by-step tutorial detailing the use and functionality of </bold>
            <bold>
              <italic>SOS</italic>
            </bold>
            <bold>.</bold>
          </p>
          <p>(PDF)</p>
        </caption>
      </supplementary-material>
      <supplementary-material id="pone.0041642.s002" mimetype="video/x-m4v" orientation="portrait" position="float" xlink:href="info:doi/10.1371/journal.pone.0041642.s002" xlink:type="simple">
        <label>Movie S1</label>
        <caption>
          <p>
            <bold>Illustrative movie of posture tracking in flatworms, fruit fly larvae, fishes and mice.</bold>
          </p>
          <p>(M4V)</p>
        </caption>
      </supplementary-material>
      <supplementary-material id="pone.0041642.s003" mimetype="application/zip" orientation="portrait" position="float" xlink:href="info:doi/10.1371/journal.pone.0041642.s003" xlink:type="simple">
        <label>File S1</label>
        <caption>
          <p><bold>Tracking and analysis codes of </bold><bold><italic>SOS</italic></bold><bold> together with a test dataset generated from larvae behaving in an odor gradient.</bold> Updated code versions will be uploaded on the website of the Louis lab: <ext-link ext-link-type="uri" xlink:href="http://www.crg.es/matthieu_louis" xlink:type="simple">http://www.crg.es/matthieu_louis</ext-link>.</p>
          <p>(ZIP)</p>
        </caption>
      </supplementary-material>
    </sec>
  </body>
  <back>
    <ack>
      <p>We are grateful to Julia Riedl and Andreas Braun for help in the development of the thermotaxis assay. We thank Alfonso Perez-Escudero, Robert Hinz and Gonzalo de Polavieja for generously sharing zebrafish movies. We thank Ignasi Sahun and Mara Dierssen for mouse movies. We thank Beatriz Calvo and Emili Salo for flatworm movies. We also thank Gus Lott, Rex Kerr, and Ev Yemini for discussion about computer-vision algorithms for animal tracking. Our manuscript has benefited from exchanges with Bjoern Brembs, Julien Colomb and Simon Sprecher. We are grateful to two anonymous referees for their thorough reading of the manuscript and useful comments.</p>
    </ack>
    <ref-list>
      <title>References</title>
      <ref id="pone.0041642-Leshner1">
        <label>1</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Leshner</surname><given-names>A</given-names></name>, <name name-style="western"><surname>Pfaff</surname><given-names>DW</given-names></name> (<year>2011</year>) <article-title>Quantification of behavior</article-title>. <source>Proc Natl Acad Sci U S A</source> <volume>108 Suppl 3</volume>: <fpage>15537</fpage>–<lpage>15541</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Stephens1">
        <label>2</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Stephens</surname><given-names>GJ</given-names></name>, <name name-style="western"><surname>Johnson-Kerner</surname><given-names>B</given-names></name>, <name name-style="western"><surname>Bialek</surname><given-names>W</given-names></name>, <name name-style="western"><surname>Ryu</surname><given-names>WS</given-names></name> (<year>2008</year>) <article-title>Dimensionality and dynamics in the behavior of C. elegans</article-title>. <source>PLoS Comput Biol</source> <volume>4</volume>: <fpage>e1000028</fpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Huang1">
        <label>3</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Huang</surname><given-names>KM</given-names></name>, <name name-style="western"><surname>Cosman</surname><given-names>P</given-names></name>, <name name-style="western"><surname>Schafer</surname><given-names>WR</given-names></name> (<year>2006</year>) <article-title>Machine vision based detection of omega bends and reversals in C. elegans</article-title>. <source>J Neurosci Methods</source> <volume>158</volume>: <fpage>323</fpage>–<lpage>336</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Baek1">
        <label>4</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Baek</surname><given-names>JH</given-names></name>, <name name-style="western"><surname>Cosman</surname><given-names>P</given-names></name>, <name name-style="western"><surname>Feng</surname><given-names>Z</given-names></name>, <name name-style="western"><surname>Silver</surname><given-names>J</given-names></name>, <name name-style="western"><surname>Schafer</surname><given-names>WR</given-names></name> (<year>2002</year>) <article-title>Using machine vision to analyze and classify Caenorhabditis elegans behavioral phenotypes quantitatively</article-title>. <source>J Neurosci Methods</source> <volume>118</volume>: <fpage>9</fpage>–<lpage>21</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Ramot1">
        <label>5</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Ramot</surname><given-names>D</given-names></name>, <name name-style="western"><surname>Johnson</surname><given-names>BE</given-names></name>, <name name-style="western"><surname>Berry</surname><given-names>TL</given-names><suffix>Jr</suffix></name>, <name name-style="western"><surname>Carnell</surname><given-names>L</given-names></name>, <name name-style="western"><surname>Goodman</surname><given-names>MB</given-names></name> (<year>2008</year>) <article-title>The Parallel Worm Tracker: a platform for measuring average speed and drug-induced paralysis in nematodes</article-title>. <source>PLoS One</source> <volume>3</volume>: <fpage>e2208</fpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Straw1">
        <label>6</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Straw</surname><given-names>AD</given-names></name>, <name name-style="western"><surname>Branson</surname><given-names>K</given-names></name>, <name name-style="western"><surname>Neumann</surname><given-names>TR</given-names></name>, <name name-style="western"><surname>Dickinson</surname><given-names>MH</given-names></name> (<year>2011</year>) <article-title>Multi-camera real-time three-dimensional tracking of multiple flying animals</article-title>. <source>J R Soc Interface</source> <volume>8</volume>: <fpage>395</fpage>–<lpage>409</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Gilestro1">
        <label>7</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Gilestro</surname><given-names>GF</given-names></name>, <name name-style="western"><surname>Cirelli</surname><given-names>C</given-names></name> (<year>2009</year>) <article-title>pySolo: a complete suite for sleep analysis in Drosophila</article-title>. <source>Bioinformatics</source> <volume>25</volume>: <fpage>1466</fpage>–<lpage>1467</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Kohlhoff1">
        <label>8</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Kohlhoff</surname><given-names>KJ</given-names></name>, <name name-style="western"><surname>Jahn</surname><given-names>TR</given-names></name>, <name name-style="western"><surname>Lomas</surname><given-names>DA</given-names></name>, <name name-style="western"><surname>Dobson</surname><given-names>CM</given-names></name>, <name name-style="western"><surname>Crowther</surname><given-names>DC</given-names></name>, <etal>et al</etal>. (<year>2011</year>) <article-title>The iFly tracking system for an automated locomotor and behavioural analysis of Drosophila melanogaster</article-title>. <source>Integr Biol (Camb)</source> <volume>3</volume>: <fpage>755</fpage>–<lpage>760</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Lott1">
        <label>9</label>
        <mixed-citation publication-type="other" xlink:type="simple">Lott GK (2010) gVision: Scientific video and image acquisition. http://gvision-hhmisourceforgenet/.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Drai1">
        <label>10</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Drai</surname><given-names>D</given-names></name>, <name name-style="western"><surname>Golani</surname><given-names>I</given-names></name> (<year>2001</year>) <article-title>SEE: a tool for the visualization and analysis of rodent exploratory behavior</article-title>. <source>Neurosci Biobehav Rev</source> <volume>25</volume>: <fpage>409</fpage>–<lpage>426</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Branson1">
        <label>11</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Branson</surname><given-names>K</given-names></name>, <name name-style="western"><surname>Robie</surname><given-names>AA</given-names></name>, <name name-style="western"><surname>Bender</surname><given-names>J</given-names></name>, <name name-style="western"><surname>Perona</surname><given-names>P</given-names></name>, <name name-style="western"><surname>Dickinson</surname><given-names>MH</given-names></name> (<year>2009</year>) <article-title>High-throughput ethomics in large groups of Drosophila</article-title>. <source>Nat Methods</source> <volume>6</volume>: <fpage>451</fpage>–<lpage>457</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Gershow1">
        <label>12</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Gershow</surname><given-names>M</given-names></name>, <name name-style="western"><surname>Berck</surname><given-names>M</given-names></name>, <name name-style="western"><surname>Mathew</surname><given-names>D</given-names></name>, <name name-style="western"><surname>Luo</surname><given-names>L</given-names></name>, <name name-style="western"><surname>Kane</surname><given-names>EA</given-names></name>, <etal>et al</etal>. (<year>2012</year>) <article-title>Controlling airborne cues to study small animal navigation</article-title>. <source>Nat Methods</source> <volume>9</volume>: <fpage>290</fpage>–<lpage>296</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Swierczek1">
        <label>13</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Swierczek</surname><given-names>NA</given-names></name>, <name name-style="western"><surname>Giles</surname><given-names>AC</given-names></name>, <name name-style="western"><surname>Rankin</surname><given-names>CH</given-names></name>, <name name-style="western"><surname>Kerr</surname><given-names>RA</given-names></name> (<year>2010</year>) <article-title>High-throughput behavioral analysis in C. elegans</article-title>. <source>Nat Methods</source> <volume>8</volume>: <fpage>592</fpage>–<lpage>598</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Garrity1">
        <label>14</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Garrity</surname><given-names>PA</given-names></name>, <name name-style="western"><surname>Goodman</surname><given-names>MB</given-names></name>, <name name-style="western"><surname>Samuel</surname><given-names>AD</given-names></name>, <name name-style="western"><surname>Sengupta</surname><given-names>P</given-names></name> (<year>2010</year>) <article-title>Running hot and cold: behavioral strategies, neural circuits, and the molecular machinery for thermotaxis in C. elegans and Drosophila</article-title>. <source>Genes Dev</source> <volume>24</volume>: <fpage>2365</fpage>–<lpage>2382</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Gerber1">
        <label>15</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Gerber</surname><given-names>B</given-names></name>, <name name-style="western"><surname>Stocker</surname><given-names>RF</given-names></name> (<year>2007</year>) <article-title>The Drosophila larva as a model for studying chemosensation and chemosensory learning: a review</article-title>. <source>Chem Senses</source> <volume>32</volume>: <fpage>65</fpage>–<lpage>89</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Xiang1">
        <label>16</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Xiang</surname><given-names>Y</given-names></name>, <name name-style="western"><surname>Yuan</surname><given-names>Q</given-names></name>, <name name-style="western"><surname>Vogt</surname><given-names>N</given-names></name>, <name name-style="western"><surname>Looger</surname><given-names>LL</given-names></name>, <name name-style="western"><surname>Jan</surname><given-names>LY</given-names></name>, <etal>et al</etal>. (<year>2010</year>) <article-title>Light-avoidance-mediating photoreceptors tile the Drosophila larval body wall</article-title>. <source>Nature</source> <volume>468</volume>: <fpage>921</fpage>–<lpage>926</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Louis1">
        <label>17</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Louis</surname><given-names>M</given-names></name>, <name name-style="western"><surname>Huber</surname><given-names>T</given-names></name>, <name name-style="western"><surname>Benton</surname><given-names>R</given-names></name>, <name name-style="western"><surname>Sakmar</surname><given-names>TP</given-names></name>, <name name-style="western"><surname>Vosshall</surname><given-names>LB</given-names></name> (<year>2008</year>) <article-title>Bilateral olfactory sensory input enhances chemotaxis behavior</article-title>. <source>Nat Neurosci</source> <volume>11</volume>: <fpage>187</fpage>–<lpage>199</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Khurana1">
        <label>18</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Khurana</surname><given-names>S</given-names></name>, <name name-style="western"><surname>Li</surname><given-names>WK</given-names></name>, <name name-style="western"><surname>Atkinson</surname><given-names>NS</given-names></name> (<year>2010</year>) <article-title>Image enhancement for tracking the translucent larvae of Drosophila melanogaster</article-title>. <source>PLoS One</source> <volume>5</volume>: <fpage>e15259</fpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Faumont1">
        <label>19</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Faumont</surname><given-names>S</given-names></name>, <name name-style="western"><surname>Rondeau</surname><given-names>G</given-names></name>, <name name-style="western"><surname>Thiele</surname><given-names>TR</given-names></name>, <name name-style="western"><surname>Lawton</surname><given-names>KJ</given-names></name>, <name name-style="western"><surname>McCormick</surname><given-names>KE</given-names></name>, <etal>et al</etal>. (<year>2011</year>) <article-title>An Image-Free Opto-Mechanical System for Creating Virtual Environments and Imaging Neuronal Activity in Freely Moving <italic>Caenorhabditis elegans</italic></article-title>. <source>PLoS One</source> <volume>6</volume>: <fpage>e24666</fpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Leifer1">
        <label>20</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Leifer</surname><given-names>AM</given-names></name>, <name name-style="western"><surname>Fang-Yen</surname><given-names>C</given-names></name>, <name name-style="western"><surname>Gershow</surname><given-names>M</given-names></name>, <name name-style="western"><surname>Alkema</surname><given-names>MJ</given-names></name>, <name name-style="western"><surname>Samuel</surname><given-names>AD</given-names></name> (<year>2011</year>) <article-title>Optogenetic manipulation of neural activity in freely moving Caenorhabditis elegans</article-title>. <source>Nat Methods</source> <volume>8</volume>: <fpage>147</fpage>–<lpage>152</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-GomezMarin1">
        <label>21</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Gomez-Marin</surname><given-names>A</given-names></name>, <name name-style="western"><surname>Stephens</surname><given-names>GJ</given-names></name>, <name name-style="western"><surname>Louis</surname><given-names>M</given-names></name> (<year>2011</year>) <article-title>Active sampling and decision making in Drosophila chemotaxis</article-title>. <source>Nat Commun</source> <volume>2</volume>: <fpage>441</fpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-GomezMarin2">
        <label>22</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Gomez-Marin</surname><given-names>A</given-names></name>, <name name-style="western"><surname>Louis</surname><given-names>M</given-names></name> (<year>2012</year>) <article-title>Active sensation during orientation behavior in the Drosophila larva: more sense than luck</article-title>. <source>Curr Opin Neurobiol</source> <volume>22</volume>: <fpage>208</fpage>–<lpage>215</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-AcevesPina1">
        <label>23</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Aceves-Pina</surname><given-names>EO</given-names></name>, <name name-style="western"><surname>Quinn</surname><given-names>WG</given-names></name> (<year>1979</year>) <article-title>Learning in Normal and Mutant Drosophila Larvae</article-title>. <source>Science</source> <volume>206</volume>: <fpage>93</fpage>–<lpage>96</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Monte1">
        <label>24</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Monte</surname><given-names>P</given-names></name>, <name name-style="western"><surname>Woodard</surname><given-names>C</given-names></name>, <name name-style="western"><surname>Ayer</surname><given-names>R</given-names></name>, <name name-style="western"><surname>Lilly</surname><given-names>M</given-names></name>, <name name-style="western"><surname>Sun</surname><given-names>H</given-names></name>, <etal>et al</etal>. (<year>1989</year>) <article-title>Characterization of the larval olfactory response in <italic>Drosophila</italic> and its genetic basis</article-title>. <source>Behav Genet</source> <volume>19</volume>: <fpage>267</fpage>–<lpage>283</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Fishilevich1">
        <label>25</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Fishilevich</surname><given-names>E</given-names></name>, <name name-style="western"><surname>Domingos</surname><given-names>AI</given-names></name>, <name name-style="western"><surname>Asahina</surname><given-names>K</given-names></name>, <name name-style="western"><surname>Naef</surname><given-names>F</given-names></name>, <name name-style="western"><surname>Vosshall</surname><given-names>LB</given-names></name>, <etal>et al</etal>. (<year>2005</year>) <article-title>Chemotaxis behavior mediated by single larval olfactory neurons in Drosophila</article-title>. <source>Curr Biol</source> <volume>15</volume>: <fpage>2086</fpage>–<lpage>2096</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Cobb1">
        <label>26</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Cobb</surname><given-names>M</given-names></name> (<year>1999</year>) <article-title>What and how do maggots smell?</article-title> <source>Bio Rev</source> <volume>74</volume>: <fpage>425</fpage>–<lpage>459</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Luo1">
        <label>27</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Luo</surname><given-names>L</given-names></name>, <name name-style="western"><surname>Gershow</surname><given-names>M</given-names></name>, <name name-style="western"><surname>Rosenzweig</surname><given-names>M</given-names></name>, <name name-style="western"><surname>Kang</surname><given-names>K</given-names></name>, <name name-style="western"><surname>Fang-Yen</surname><given-names>C</given-names></name>, <etal>et al</etal>. (<year>2010</year>) <article-title>Navigational decision making in Drosophila thermotaxis</article-title>. <source>J Neurosci</source> <volume>30</volume>: <fpage>4261</fpage>–<lpage>4272</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Sawin1">
        <label>28</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Sawin</surname><given-names>E</given-names></name>, <name name-style="western"><surname>Harris</surname><given-names>L</given-names></name>, <name name-style="western"><surname>Campos</surname><given-names>A</given-names></name>, <name name-style="western"><surname>Sokolowski</surname><given-names>M</given-names></name> (<year>1994</year>) <article-title>Sensorimotor transformation from light reception to phototactic behavior <italic>Drosophila</italic> larvae (Diptera: Drosophilidae)</article-title>. <source>Journal of Insect Behavior</source> <volume>7</volume>: <fpage>553</fpage>–<lpage>567</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Hassan1">
        <label>29</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Hassan</surname><given-names>J</given-names></name>, <name name-style="western"><surname>Busto</surname><given-names>M</given-names></name>, <name name-style="western"><surname>Iyengar</surname><given-names>B</given-names></name>, <name name-style="western"><surname>Campos</surname><given-names>AR</given-names></name> (<year>2000</year>) <article-title>Behavioral characterization and genetic analysis of the Drosophila melanogaster larval response to light as revealed by a novel individual assay</article-title>. <source>Behav Genet</source> <volume>30</volume>: <fpage>59</fpage>–<lpage>69</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Busto1">
        <label>30</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Busto</surname><given-names>M</given-names></name>, <name name-style="western"><surname>Iyengar</surname><given-names>B</given-names></name>, <name name-style="western"><surname>Campos</surname><given-names>AR</given-names></name> (<year>1999</year>) <article-title>Genetic dissection of behavior: modulation of locomotion by light in the Drosophila melanogaster larva requires genetically distinct visual system functions</article-title>. <source>J Neurosci</source> <volume>19</volume>: <fpage>3337</fpage>–<lpage>3344</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Scantlebury1">
        <label>31</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Scantlebury</surname><given-names>N</given-names></name>, <name name-style="western"><surname>Sajic</surname><given-names>R</given-names></name>, <name name-style="western"><surname>Campos</surname><given-names>AR</given-names></name> (<year>2007</year>) <article-title>Kinematic analysis of Drosophila larval locomotion in response to intermittent light pulses</article-title>. <source>Behav Genet</source> <volume>37</volume>: <fpage>513</fpage>–<lpage>524</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Sprecher1">
        <label>32</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Sprecher</surname><given-names>SG</given-names></name>, <name name-style="western"><surname>Desplan</surname><given-names>C</given-names></name> (<year>2008</year>) <article-title>Switch of rhodopsin expression in terminally differentiated Drosophila sensory neurons</article-title>. <source>Nature</source> <volume>454</volume>: <fpage>533</fpage>–<lpage>537</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Keene1">
        <label>33</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Keene</surname><given-names>AC</given-names></name>, <name name-style="western"><surname>Mazzoni</surname><given-names>EO</given-names></name>, <name name-style="western"><surname>Zhen</surname><given-names>J</given-names></name>, <name name-style="western"><surname>Younger</surname><given-names>MA</given-names></name>, <name name-style="western"><surname>Yamaguchi</surname><given-names>S</given-names></name>, <etal>et al</etal>. (<year>2011</year>) <article-title>Distinct visual pathways mediate Drosophila larval light avoidance and circadian clock entrainment</article-title>. <source>J Neurosci</source> <volume>31</volume>: <fpage>6527</fpage>–<lpage>6534</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Kane1">
        <label>34</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Kane</surname><given-names>E</given-names></name>, <name name-style="western"><surname>Gershow</surname><given-names>M</given-names></name>, <name name-style="western"><surname>Sprecher</surname><given-names>SG</given-names></name>, <name name-style="western"><surname>Samuel</surname><given-names>AD</given-names></name> <article-title>Sensorimotor basis for Drosophila larva phototaxis</article-title>. in preparation.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Huston1">
        <label>35</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Huston</surname><given-names>SJ</given-names></name>, <name name-style="western"><surname>Jayaraman</surname><given-names>V</given-names></name> (<year>2011</year>) <article-title>Studying sensorimotor integration in insects</article-title>. <source>Curr Opin Neurobiol</source> <volume>21</volume>: <fpage>527</fpage>–<lpage>534</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-GomezMarin3">
        <label>36</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Gomez-Marin</surname><given-names>A</given-names></name>, <name name-style="western"><surname>Duistermars</surname><given-names>BJ</given-names></name>, <name name-style="western"><surname>Frye</surname><given-names>MA</given-names></name>, <name name-style="western"><surname>Louis</surname><given-names>M</given-names></name> (<year>2010</year>) <article-title>Mechanisms of odor-tracking: multiple sensors for enhanced perception and behavior</article-title>. <source>Front Cell Neurosci</source> <volume>4</volume>: <fpage>6</fpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Fraenkel1">
        <label>37</label>
        <mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Fraenkel</surname><given-names>GS</given-names></name>, <name name-style="western"><surname>Gunn</surname><given-names>DL</given-names></name> (<year>1961</year>) <source>The Orientation of Animals</source>. <publisher-loc>New York</publisher-loc>: <publisher-name>Dover Publications</publisher-name>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Webster1">
        <label>38</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Webster</surname><given-names>DR</given-names></name>, <name name-style="western"><surname>Weissburg</surname><given-names>MJ</given-names></name> (<year>2009</year>) <article-title>The Hydrodynamics of Chemical Cues Among Aquatic Organisms</article-title>. <source>Annual Review of Fluid Mechanics</source> <volume>41</volume>: <fpage>73</fpage>–<lpage>90</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Carde1">
        <label>39</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Carde</surname><given-names>RT</given-names></name>, <name name-style="western"><surname>Willis</surname><given-names>MA</given-names></name> (<year>2008</year>) <article-title>Navigational strategies used by insects to find distant, wind-borne sources of odor</article-title>. <source>J Chem Ecol</source> <volume>34</volume>: <fpage>854</fpage>–<lpage>866</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Min1">
        <label>40</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Min</surname><given-names>TL</given-names></name>, <name name-style="western"><surname>Mears</surname><given-names>PJ</given-names></name>, <name name-style="western"><surname>Chubiz</surname><given-names>LM</given-names></name>, <name name-style="western"><surname>Rao</surname><given-names>CV</given-names></name>, <name name-style="western"><surname>Golding</surname><given-names>I</given-names></name>, <etal>et al</etal>. (<year>2009</year>) <article-title>High-resolution, long-term characterization of bacterial motility using optical tweezers</article-title>. <source>Nat Methods</source> <volume>6</volume>: <fpage>831</fpage>–<lpage>835</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Berg1">
        <label>41</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Berg</surname><given-names>HC</given-names></name>, <name name-style="western"><surname>Brown</surname><given-names>DA</given-names></name> (<year>1972</year>) <article-title>Chemotaxis in Escherichia coli analysed by three-dimensional tracking</article-title>. <source>Nature</source> <volume>239</volume>: <fpage>500</fpage>–<lpage>504</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-PierceShimomura1">
        <label>42</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Pierce-Shimomura</surname><given-names>JT</given-names></name>, <name name-style="western"><surname>Morse</surname><given-names>TM</given-names></name>, <name name-style="western"><surname>Lockery</surname><given-names>SR</given-names></name> (<year>1999</year>) <article-title>The fundamental role of pirouettes in Caenorhabditis elegans chemotaxis</article-title>. <source>J Neurosci</source> <volume>19</volume>: <fpage>9557</fpage>–<lpage>9569</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Lockery1">
        <label>43</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Lockery</surname><given-names>SR</given-names></name> (<year>2011</year>) <article-title>The computational worm: spatial orientation and its neuronal basis in C. elegans</article-title>. <source>Curr Opin Neurobiol</source></mixed-citation>
      </ref>
      <ref id="pone.0041642-Iino1">
        <label>44</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Iino</surname><given-names>Y</given-names></name>, <name name-style="western"><surname>Yoshida</surname><given-names>K</given-names></name> (<year>2009</year>) <article-title>Parallel use of two behavioral mechanisms for chemotaxis in Caenorhabditis elegans</article-title>. <source>J Neurosci</source> <volume>29</volume>: <fpage>5370</fpage>–<lpage>5380</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Orgogozo1">
        <label>45</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Orgogozo</surname><given-names>V</given-names></name>, <name name-style="western"><surname>Broman</surname><given-names>KW</given-names></name>, <name name-style="western"><surname>Stern</surname><given-names>DL</given-names></name> (<year>2006</year>) <article-title>High-resolution quantitative trait locus mapping reveals sign epistasis controlling ovariole number between two Drosophila species</article-title>. <source>Genetics</source> <volume>173</volume>: <fpage>197</fpage>–<lpage>205</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Inoue1">
        <label>46</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Inoue</surname><given-names>T</given-names></name>, <name name-style="western"><surname>Kumamoto</surname><given-names>H</given-names></name>, <name name-style="western"><surname>Okamoto</surname><given-names>K</given-names></name>, <name name-style="western"><surname>Umesono</surname><given-names>Y</given-names></name>, <name name-style="western"><surname>Sakai</surname><given-names>M</given-names></name>, <etal>et al</etal>. (<year>2004</year>) <article-title>Morphological and functional recovery of the planarian photosensing system during head regeneration</article-title>. <source>Zoolog Sci</source> <volume>21</volume>: <fpage>275</fpage>–<lpage>283</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Jekely1">
        <label>47</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Jekely</surname><given-names>G</given-names></name>, <name name-style="western"><surname>Colombelli</surname><given-names>J</given-names></name>, <name name-style="western"><surname>Hausen</surname><given-names>H</given-names></name>, <name name-style="western"><surname>Guy</surname><given-names>K</given-names></name>, <name name-style="western"><surname>Stelzer</surname><given-names>E</given-names></name>, <etal>et al</etal>. (<year>2008</year>) <article-title>Mechanism of phototaxis in marine zooplankton</article-title>. <source>Nature</source> <volume>456</volume>: <fpage>395</fpage>–<lpage>399</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Crespo1">
        <label>48</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Crespo</surname><given-names>JE</given-names></name>, <name name-style="western"><surname>Lazzari</surname><given-names>CR</given-names></name>, <name name-style="western"><surname>Castelo</surname><given-names>MK</given-names></name> (<year>2011</year>) <article-title>Orientation mechanisms and sensory organs involved in host location in a dipteran parasitoid larva</article-title>. <source>Journal of Insect Physiology</source> <volume>57</volume>: <fpage>191</fpage>–<lpage>196</lpage>.</mixed-citation>
      </ref>
      <ref id="pone.0041642-Stephens2">
        <label>49</label>
        <mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Stephens</surname><given-names>GJ</given-names></name>, <name name-style="western"><surname>Osborne</surname><given-names>LC</given-names></name>, <name name-style="western"><surname>Bialek</surname><given-names>W</given-names></name> (<year>2011</year>) <article-title>Searching for simplicity in the analysis of neurons and behavior</article-title>. <source>Proc Natl Acad Sci U S A</source> <volume>108 Suppl 3</volume>: <fpage>15565</fpage>–<lpage>15571</lpage>.</mixed-citation>
      </ref>
    </ref-list>
  </back>
</article>