<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "http://jats.nlm.nih.gov/publishing/1.3/JATS-journalpublishing1-3.dtd">
<article article-type="research-article" dtd-version="1.3" xml:lang="en" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<journal-meta>
<journal-id journal-id-type="nlm-ta">PJS</journal-id>
<journal-id journal-id-type="publisher-id">Premier Journal of Science</journal-id>
<journal-id journal-id-type="pmc">PJS</journal-id>
<journal-title-group>
<journal-title>PJ Science</journal-title>
</journal-title-group>
<issn pub-type="epub">3049-9011</issn>
<publisher>
<publisher-name>Premier Science</publisher-name>
<publisher-loc>London, UK</publisher-loc>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.70389/PJS.100144</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>ORIGINAL RESEARCH</subject>
</subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Cognitive science</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Sensory perception</subject><subj-group><subject>Hallucinations</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Sensory perception</subject><subj-group><subject>Hallucinations</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Social sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Sensory perception</subject><subj-group><subject>Hallucinations</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Sensory perception</subject><subj-group><subject>Hallucinations</subject></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Social sciences</subject><subj-group><subject>Linguistics</subject><subj-group><subject>Grammar</subject><subj-group><subject>Phonology</subject><subj-group><subject>Syllables</subject></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Engineering and technology</subject><subj-group><subject>Signal processing</subject><subj-group><subject>Speech signal processing</subject></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Cognitive science</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Sensory perception</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Sensory perception</subject></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Social sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Sensory perception</subject></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Sensory perception</subject></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Medicine and health sciences</subject><subj-group><subject>Mental health and psychiatry</subject><subj-group><subject>Schizophrenia</subject></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Research and analysis methods</subject><subj-group><subject>Bioassays and physiological analysis</subject><subj-group><subject>Electrophysiological techniques</subject><subj-group><subject>Brain electrophysiology</subject><subj-group><subject>Electroencephalography</subject><subj-group><subject>Event-related potentials</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Physiology</subject><subj-group><subject>Electrophysiology</subject><subj-group><subject>Neurophysiology</subject><subj-group><subject>Brain electrophysiology</subject><subj-group><subject>Electroencephalography</subject><subj-group><subject>Event-related potentials</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Neurophysiology</subject><subj-group><subject>Brain electrophysiology</subject><subj-group><subject>Electroencephalography</subject><subj-group><subject>Event-related potentials</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Brain mapping</subject><subj-group><subject>Electroencephalography</subject><subj-group><subject>Event-related potentials</subject></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Medicine and health sciences</subject><subj-group><subject>Clinical medicine</subject><subj-group><subject>Clinical neurophysiology</subject><subj-group><subject>Electroencephalography</subject><subj-group><subject>Event-related potentials</subject></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Research and analysis methods</subject><subj-group><subject>Imaging techniques</subject><subj-group><subject>Neuroimaging</subject><subj-group><subject>Electroencephalography</subject><subj-group><subject>Event-related potentials</subject></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Neuroimaging</subject><subj-group><subject>Electroencephalography</subject><subj-group><subject>Event-related potentials</subject></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Cell biology</subject><subj-group><subject>Cellular types</subject><subj-group><subject>Animal cells</subject><subj-group><subject>Neurons</subject><subj-group><subject>Interneurons</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Cellular neuroscience</subject><subj-group><subject>Neurons</subject><subj-group><subject>Interneurons</subject></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Research and analysis methods</subject><subj-group><subject>Bioassays and physiological analysis</subject><subj-group><subject>Electrophysiological techniques</subject><subj-group><subject>Brain electrophysiology</subject><subj-group><subject>Electroencephalography</subject></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Physiology</subject><subj-group><subject>Electrophysiology</subject><subj-group><subject>Neurophysiology</subject><subj-group><subject>Brain electrophysiology</subject><subj-group><subject>Electroencephalography</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Neurophysiology</subject><subj-group><subject>Brain electrophysiology</subject><subj-group><subject>Electroencephalography</subject></subj-group></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Brain mapping</subject><subj-group><subject>Electroencephalography</subject></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Medicine and health sciences</subject><subj-group><subject>Clinical medicine</subject><subj-group><subject>Clinical neurophysiology</subject><subj-group><subject>Electroencephalography</subject></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Research and analysis methods</subject><subj-group><subject>Imaging techniques</subject><subj-group><subject>Neuroimaging</subject><subj-group><subject>Electroencephalography</subject></subj-group></subj-group></subj-group></subj-group>
<subj-group subj-group-type="Discipline-v3"><subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Neuroimaging</subject><subj-group><subject>Electroencephalography</subject></subj-group></subj-group></subj-group></subj-group>
</article-categories>
<title-group>
<article-title>Autonomous Watercraft for Cleanup of Floating Waste in Water Bodies Using YOLO</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<name>
<surname>Ashok</surname>
<given-names>S</given-names>
</name>
<xref ref-type="aff" rid="aff1"><sup>1</sup></xref>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Anne Joan Venita</surname>
<given-names>V</given-names>
</name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Gowri Shankari</surname>
<given-names>E</given-names>
</name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Ranjana</surname>
<given-names>M</given-names>
</name>
<xref ref-type="aff" rid="aff2"><sup>2</sup></xref>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Prabhu</surname>
<given-names>V</given-names>
</name>
<xref ref-type="aff" rid="aff3"><sup>3</sup></xref>
</contrib>
<aff id="aff1"><sup>1</sup><institution>Associate Professor, Dept of ECE Vel Tech Multi Tech Dr. Rangarajan Dr. Sakunthala Engineering College (Autonomous)</institution>, <city>Chennai</city>, <state>Tamil Nadu</state>, <country>India</country></aff>
<aff id="aff2"><sup>2</sup><institution>UG Scholar, Dept of ECE, Vel Tech Multi Tech Dr. Rangarajan Dr. Sakunthala Engineering College (Autonomous)</institution>, <city>Chennai</city>, <state>Tamil Nadu</state>, <country>India</country></aff>
<aff id="aff3"><sup>3</sup><institution>Professor Dept of ECE, Dept of ECE, Vel Tech Multi Tech Dr. Rangarajan Dr. Sakunthala Engineering College (Autonomous)</institution>, <city>Chennai</city>, <state>Tamil Nadu</state>, <country>India</country></aff>
</contrib-group>
<author-notes>
<corresp id="cor001"><bold>Correspondence to:</bold> S Ashok, <email>sashok@veltechmultitech.org</email></corresp>
<fn fn-type="other"><p>Peer Review</p></fn>
</author-notes>
<pub-date pub-type="epub">
<day>27</day>
<month>12</month>
<year>2025</year>
</pub-date>
<pub-date pub-type="collection">
<month>12</month>
<year>2025</year>
</pub-date>
<volume>15</volume>
<issue>1</issue>
<elocation-id>100144</elocation-id>
<history>
<date date-type="received">
<day>13</day>
<month>08</month>
<year>2025</year>
</date>
<date date-type="rev-recd">
<day>26</day>
<month>09</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>11</day>
<month>10</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-year>2025</copyright-year>
<copyright-holder>S Ashok, V Anne Joan Venita, E Gowri Shankari, M Ranjana and V Prabhu</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/" xlink:type="simple">
<license-p>This is an open access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="http://creativecommons.org/licenses/by/4.0/" xlink:type="simple">Creative Commons Attribution License</ext-link>, which permits unrestricted use, distribution, and reproduction in any medium, provided the original author and source are credited.</license-p>
</license>
</permissions>
<self-uri content-type="pdf" xlink:href="info:doi/10.70389/PJS.100144"/>
<abstract>
<p>The proposed work deals with the very pertinent issue of plastic waste within enclosed bodies through design and development of an innovative Unmanned Surface Vehicle (USV) targeted specifically towards effective debris gathering. The modular approach will draw upon a catamaran-like configuration, applying PVC hulls to provide added buoyancy and stability while introducing a modular debris collection system designed to improve capture and removal rates of floating waste. Actuated by DC motors, the controls are exercised through an ESP32 microcontroller that makes the USV equally functional in remote-controlled as well as autonomous modes. Thus, it is highly adaptable to various operational environments. In autonomous mode of operation, onboard camera has been installed so that the python-based computer vision algorithms can be utilized for real-time detection of debris and navigation accordingly to ensure that accurate collection of waste materials. It is agile yet eco-friendly solution that works efficiently under many conditions rather than just one for better plastic waste management. The USV addresses more robust automated and repeatable approaches to debris gathering, which reduce workforce intervention during remediating operations and support efforts aimed at preserving aquatic ecosystems.</p>
</abstract>
<kwd-group kwd-group-type="author">
<kwd>Catamaran USV</kwd>
<kwd>YOLOv8 debris detection</kwd>
<kwd>ESP32 motor control</kwd>
<kwd>Modular floating waste collector</kwd>
<kwd>Autonomous narrow-canal navigation</kwd>
</kwd-group>
<counts>
<fig-count count="5"/>
<table-count count="1"/>
<page-count count="5"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>Version accepted</meta-name>
<meta-value>3</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec>
<title><ext-link ext-link-type="uri" xlink:href="https://premierscience.com/wp-content/uploads/2025/15/pjs-25-1164.pdf">Source-File: pjs-25-1164.pdf</ext-link></title>
</sec>
<sec id="sec001" sec-type="intro">
<title>Introduction</title>
<p>Floating waste pollution has emerged as one of the most pressing global environmental challenges of the 21st century. It severely impacts biodiversity, disrupts fragile aquatic ecosystems, hampers the livelihoods of communities relying on water resources, and endangers public health. The problem is especially acute in urban and semi-urban regions where garbage often accumulates in narrow canals, lakes, and drainage systems. These water bodies, while small in size, play a critical role in water circulation and community sanitation but are frequently neglected due to the difficulty of accessing and cleaning them. Traditional methods for cleaning such polluted water bodies rely heavily on manual labour or bulky mechanical skimmers and nets. Manual cleaning exposes workers to harmful pathogens and chemicals found in contaminated water, posing serious health risks. Moreover, these efforts are time-consuming, labour-intensive, and not scalable. On the other hand, large mechanical solutions are not only expensive to deploy and maintain but also impractical for narrow or shallow water bodies due to their size and limited manoeuvrability. Although some autonomous robotic solutions have been developed to address the issue of floating waste collection, most are designed for open or large water bodies. These existing systems often face significant limitations in confined environments&#x2014;they lack the agility to navigate tight spaces, the precision needed for obstacle avoidance in cluttered areas, or the ability to efficiently collect waste without obstructing their own movement. In some cases, their waste collection mechanisms are bulky, inefficient, or unable to adapt to varying water flow and waste patterns in constrained settings. Our project aims to bridge these gaps by designing and building a compact, remote-controlled watercraft with autonomous navigation capabilities specifically tailored for narrow and restricted water bodies. This watercraft is engineered for stability and agility, allowing it to move efficiently through tight spaces without losing balance or direction. Unlike conventional robots, our solution includes a built-in scooping mechanism that collects floating debris without interrupting the watercraft&#x2019;s movement. This design minimizes drag and prevents entanglement, ensuring continuous and efficient operation. The use of embedded systems, smart sensors, and real-time object detection using computer vision technologies (like YOLOv8) enables the craft to autonomously detect and avoid obstacles, recognize waste materials, and make navigation decisions dynamically. By automating the entire waste collection process, our solution reduces the need for human intervention, lowers the risk of exposure to harmful substances, and enables consistent cleanup operations in areas that are typically neglected due to their inaccessibility.</p>
</sec>
<sec id="sec002">
<title>Earlier Works</title>
<p>Numerous efforts have been made to tackle the challenges of waste management, particularly in automating the processes of waste sorting and removal. One study proposed a hybrid approach that combines automated and manual techniques using Near-Infrared (NIR) and Visible (VIS) optical sorting systems. While this method improves sorting purity and efficiency, its performance heavily depends on system calibration, material composition, and other environmental factors, limiting its adaptability in complex, dynamic environments.<sup><xref ref-type="bibr" rid="ref1">1</xref></sup> Another study introduced RWCNet, a deep learning model based on the TrashNet dataset, to classify recyclable garbage into six distinct categories. This demonstrated the growing potential of AI and machine learning in waste sorting, especially in handling the variability of waste characteristics. However, its primary focus remained on classification accuracy within controlled datasets, not on real-time application in water-based environments.<sup><xref ref-type="bibr" rid="ref2">2</xref></sup> Further research addressed robotic systems for sorting mixed industrial waste, focusing on three technical aspects: adaptive end-effectors, high-accuracy sensing, and advanced planning algorithms. These studies made significant strides in improving robotic dexterity and object identification, but were largely designed for solid-ground industrial settings, not aquatic or floating waste collection.<sup><xref ref-type="bibr" rid="ref3">3</xref></sup> Work in sustainable construction also explored reconfigurable systems using modular self-locking blocks (SL-blocks), offering insights into structural adaptability. Though innovative, this research remains in the construction domain and does not intersect with waste-cleaning robotics.<sup><xref ref-type="bibr" rid="ref4">4</xref></sup> One of the most relevant works for our project introduced a floating robotic platform equipped with a conveyor belt and oil-water separation system for river cleanup. This early-stage concept illustrated a promising direction for aquatic waste collection. However, it lacked detailed implementation for narrow or highly restricted water bodies, and did not address dynamic navigation, debris detection, or integration of real-time computer vision.<sup><xref ref-type="bibr" rid="ref5">5</xref></sup></p>
</sec>
<sec id="sec003">
<title>Proposed Methodology</title>
<p>The methodology of the autonomous watercraft is to develop a cost-efficient system to clean floating wastes from water bodies especially in constraints locations like canals and small rivers. This system is required to work autonomously, using computer vision and path planning for garbage detection and collection. The system must be able to operate in different environmental conditions while focusing on being ecofriendly, requiring low human maintenance intervention and reasonable cost.<sup><xref ref-type="bibr" rid="ref6">6</xref></sup> The use of real time waste detection and path planning algorithms allows the continuous operation of the automated watercraft, which helps in promoting environment conservation with the help of technology. The hardware system of autonomous watercraft is designed to efficiently clean up floating waste in water bodies. The watercraft has a catamaran style frame made of light weight PVC pipe for durability, buoyancy and stability. Propulsion is driven by DC motors connected to propellers, their speed and direction is controlled by a L298N motor driver. The ESP32 microcontroller acts as a central processing device, integrating information from sensors and cameras, performing motion commands and managing wireless communications. The schematic diagram of the autonomous boat for collecting waste is shown in <xref ref-type="fig" rid="F1">Figure 1</xref>.<sup><xref ref-type="bibr" rid="ref7">7</xref></sup></p>
<fig id="F1" position="float">
<object-id pub-id-type="doi">10.70389/journal.PJS.100144.g001</object-id>
<label>Fig 1</label>
<caption><title>Schematic diagram of the autonomous boat for collecting waste</title></caption>
<p><ext-link ext-link-type="uri" xlink:href="https://i0.wp.com/premierscience.com/wp-content/uploads/2025/15/pjs-25-1164-Figure-1.webp?">Figure 1</ext-link></p>
</fig>
<p>In <xref ref-type="fig" rid="F1">Figure 1</xref>, the ESP32 microcontroller acts as a central unit for controlling all components. It uses Wi-fi and Bluetooth capabilities of the ESP32 for wirelessly controlling all the components of the system. Lithium cell holders are used for powering the ESP32, the DC motors, and the other components. L298N motor drivers enable speed and direction control of the four DC motors that drive the attached propellers for movement. The 3-inch pipes and caps form a buoyant or structural framework that holds up the buoyancy in keeping the system stable, and the square basket setup serves to collect debris or for other tasks, as need be. This all combines together to make it a module and efficient design, presumably for an autonomous or remotely operated watercraft.<sup><xref ref-type="bibr" rid="ref8">8</xref></sup></p>
<p><xref ref-type="fig" rid="F2">Figure 2</xref> shows the working process of the autonomous boat, which first loads the YOLO object detection model, then connects the camera, establishes a WebSocket for communication purposes, and captures live video frames continuously. These live video frames are processed for the purpose of detecting waste in them using YOLO model. If no waste is detected, the system keeps processing the incoming frames. If it finds a waste, the boat counts the Euclidean distance of the waste from the boat. The boat sends all commands through the ESP32 microcontroller controlling the motors by setting its direction for getting close to the waste. After aligning toward the waste, following the waste, it proceeds for picking up the waste. Once the waste is well-collected into the basket, movement of the boat will come to a stop. However, if the waste is still in water, then the system dynamically reiterates directions and moves accordingly until the task is well done.<sup><xref ref-type="bibr" rid="ref9">9</xref></sup></p>
<fig id="F2" position="float">
<object-id pub-id-type="doi">10.70389/journal.PJS.100144.g002</object-id>
<label>Fig 2</label>
<caption><title>Flowchart representation of the work flow of the autonomous watercraft using YOLOv8</title></caption>
<p><ext-link ext-link-type="uri" xlink:href="https://i0.wp.com/premierscience.com/wp-content/uploads/2025/15/pjs-25-1164-Figure-2.webp?">Figure 2</ext-link></p>
</fig>
</sec>
<sec id="sec004">
<title>Result and Discussions</title>
<p>The watercraft that is designed to be independent and self-driven in this study applies computer vision for identifying and retrieving floating debris within an aquatic ecosystem. The current system uses the YOLO-based framework and has accuracy has reached up to 92% for detecting surface debris, which are plastics and other waste materials under different environmental conditions.</p>
<p><xref ref-type="fig" rid="F3">Figure 3</xref> shows the hardware setup of the autonomous boat used for collecting waste which has integrated proximity sensors and navigation algorithms for obstacle avoidance, hence assuring dependability even in highly confined aquatic systems like canals and narrow rivers.<sup><xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref11">11</xref></sup> Hardware is made of a PVC catamaran style frame, with DC motor propulsion controlled by an ESP32 microcontroller. It not only automates the process of waste cleanup but also minimizes human intervention towards sustainable water resource management, which further improves the health of the aquatic ecosystem.</p>
<fig id="F3" position="float">
<object-id pub-id-type="doi">10.70389/journal.PJS.100144.g003</object-id>
<label>Fig 3</label>
<caption><title>Hardware setup of the autonomous boat used for collecting waste</title></caption>
<p><ext-link ext-link-type="uri" xlink:href="https://i0.wp.com/premierscience.com/wp-content/uploads/2025/15/pjs-25-1164-Figure-3.webp?">Figure 3</ext-link></p>
</fig>
<p><xref ref-type="fig" rid="F4">Figure 4</xref> demonstrates YOLO-based object detection model applied to identify and classify different kinds of waste. Each object like bottles, brushes and containers appears inside a bounding box with a class label and its corresponding confidence score. This model analyses the scene, localizes multiple objects even when they are overlapped.</p>
<fig id="F4" position="float">
<object-id pub-id-type="doi">10.70389/journal.PJS.100144.g004</object-id>
<label>Fig 4</label>
<caption><title>Visual representation of waste detection and classification using YOLOv8</title></caption>
<p><ext-link ext-link-type="uri" xlink:href="https://i0.wp.com/premierscience.com/wp-content/uploads/2025/15/pjs-25-1164-Figure-4.webp?">Figure 4</ext-link></p>
</fig>
<p><xref ref-type="fig" rid="F5">Figure 5</xref> represents the training and validation progression of our YOLOv8 model regarding floating debris detection. The box_loss and cls_loss depicts a trend of gradually decreasing loss, meaning the model learns to predict a right bounding boxes, classify debris and localization of objects. In the same way, the validation metrics such as precision and recall boxes increase progressively, meaning it has a good generalization for unseen data. In other words, increased precision means that the model becomes more accurate at pinpointing actual debris, whereas increasing recall indicates that the model is correctly detecting more and more floating debris. This denotes the general effectiveness of the model in detecting and locating debris with high precision and reliability.<sup><xref ref-type="bibr" rid="ref12">12</xref>,<xref ref-type="bibr" rid="ref13">13</xref></sup></p>
<fig id="F5" position="float">
<object-id pub-id-type="doi">10.70389/journal.PJS.100144.g005</object-id>
<label>Fig 5</label>
<caption><title>Detection accuracy versus scale metrics of the trained YOLOv8 model</title></caption>
<p><ext-link ext-link-type="uri" xlink:href="https://i0.wp.com/premierscience.com/wp-content/uploads/2025/15/pjs-25-1164-Figure-5.webp?">Figure 5</ext-link></p>
</fig>
<p><xref ref-type="table" rid="T1">Table 1</xref> depicts the training history of the YOLOv8 model over 35 epochs, showing key metrics for both training and validation.Training losses, box_loss, cls_loss and dfl_loss, are all declining, indicating that the model is getting better at predicting accurate bounding boxes, classifying debris, and locating objects. Validation losses also tend to follow a downward trend, indicating good generalization to unseen data. Metrics such as precision and recall indicate how well the model is doing in detechting true positives and minimizing false negatives.<sup><xref ref-type="bibr" rid="ref14">14</xref>,<xref ref-type="bibr" rid="ref15">15</xref></sup> Further the learning rates for different parameter groups decrease step by step to ensure fine tuning with training.</p>
<table-wrap id="T1">
<label>Table 1</label>
<caption><title>Training and validation metrics from the testing optimization of the YOLOv8 model</title></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left">S. No</th>
<th valign="top" align="center">train/box_loss</th>
<th valign="top" align="center">train/cls_loss</th>
<th valign="top" align="center">train/dfl_loss</th>
<th valign="top" align="center">metrics/precision(B)</th>
<th valign="top" align="center">metrics/recall(B)</th>
<th valign="top" align="center">metrics/mAP50(B)</th>
<th valign="top" align="center">metrics/mAP50-95(B)</th>
<th valign="top" align="center">val/box_loss</th>
<th valign="top" align="center">val/cls_loss</th>
<th valign="top" align="center">val/dfl_loss</th>
<th valign="top" align="center">lr/pg0</th>
<th valign="top" align="center">lr/pg1</th>
<th valign="top" align="center">lr/pg2</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">1</td>
<td valign="top" align="center">0.86832</td>
<td valign="top" align="center">1.8963</td>
<td valign="top" align="center">1.4268</td>
<td valign="top" align="center">0.5437</td>
<td valign="top" align="center">0.36758</td>
<td valign="top" align="center">0.385138</td>
<td valign="top" align="center">0.22156</td>
<td valign="top" align="center">1.3113</td>
<td valign="top" align="center">2.611</td>
<td valign="top" align="center">1.9672</td>
<td valign="top" align="center">0.000255</td>
<td valign="top" align="center">0.000255</td>
<td valign="top" align="center">0.000255</td>
</tr>
<tr>
<td valign="top" align="left">2</td>
<td valign="top" align="center">0.99207</td>
<td valign="top" align="center">1.4881</td>
<td valign="top" align="center">1.5159</td>
<td valign="top" align="center">0.46</td>
<td valign="top" align="center">0.40753</td>
<td valign="top" align="center">0.361938</td>
<td valign="top" align="center">0.21568</td>
<td valign="top" align="center">1.3179</td>
<td valign="top" align="center">1.9743</td>
<td valign="top" align="center">1.9173</td>
<td valign="top" align="center">0.000497</td>
<td valign="top" align="center">0.000497</td>
<td valign="top" align="center">0.000497</td>
</tr>
<tr>
<td valign="top" align="left">3</td>
<td valign="top" align="center">1.0091</td>
<td valign="top" align="center">1.4252</td>
<td valign="top" align="center">1.5522</td>
<td valign="top" align="center">0.53095</td>
<td valign="top" align="center">0.46609</td>
<td valign="top" align="center">0.409931</td>
<td valign="top" align="center">0.27141</td>
<td valign="top" align="center">1.1884</td>
<td valign="top" align="center">1.591</td>
<td valign="top" align="center">1.5972</td>
<td valign="top" align="center">0.000724</td>
<td valign="top" align="center">0.000724</td>
<td valign="top" align="center">0.000724</td>
</tr>
<tr>
<td valign="top" align="left">4</td>
<td valign="top" align="center">0.94573</td>
<td valign="top" align="center">1.3029</td>
<td valign="top" align="center">1.4467</td>
<td valign="top" align="center">0.61742</td>
<td valign="top" align="center">0.40943</td>
<td valign="top" align="center">0.429511</td>
<td valign="top" align="center">0.34066</td>
<td valign="top" align="center">0.99779</td>
<td valign="top" align="center">1.5138</td>
<td valign="top" align="center">1.5929</td>
<td valign="top" align="center">0.000704</td>
<td valign="top" align="center">0.000704</td>
<td valign="top" align="center">0.000704</td>
</tr>
<tr>
<td valign="top" align="left">5</td>
<td valign="top" align="center">0.87881</td>
<td valign="top" align="center">1.1988</td>
<td valign="top" align="center">1.4226</td>
<td valign="top" align="center">0.78834</td>
<td valign="top" align="center">0.50579</td>
<td valign="top" align="center">0.609746</td>
<td valign="top" align="center">0.34915</td>
<td valign="top" align="center">1.0369</td>
<td valign="top" align="center">1.2697</td>
<td valign="top" align="center">1.6996</td>
<td valign="top" align="center">0.000682</td>
<td valign="top" align="center">0.000682</td>
<td valign="top" align="center">0.000682</td>
</tr>
<tr>
<td valign="top" align="left">6</td>
<td valign="top" align="center">0.81641</td>
<td valign="top" align="center">1.1097</td>
<td valign="top" align="center">1.4452</td>
<td valign="top" align="center">0.5128</td>
<td valign="top" align="center">0.552</td>
<td valign="top" align="center">0.387346</td>
<td valign="top" align="center">0.32945</td>
<td valign="top" align="center">0.94654</td>
<td valign="top" align="center">1.2847</td>
<td valign="top" align="center">1.6188</td>
<td valign="top" align="center">0.000662</td>
<td valign="top" align="center">0.000662</td>
<td valign="top" align="center">0.000662</td>
</tr>
<tr>
<td valign="top" align="left">7</td>
<td valign="top" align="center">0.74164</td>
<td valign="top" align="center">0.9971</td>
<td valign="top" align="center">1.3475</td>
<td valign="top" align="center">0.81303</td>
<td valign="top" align="center">0.5802</td>
<td valign="top" align="center">0.682933</td>
<td valign="top" align="center">0.35371</td>
<td valign="top" align="center">0.78115</td>
<td valign="top" align="center">0.95851</td>
<td valign="top" align="center">1.3164</td>
<td valign="top" align="center">0.000637</td>
<td valign="top" align="center">0.000637</td>
<td valign="top" align="center">0.000637</td>
</tr>
<tr>
<td valign="top" align="left">8</td>
<td valign="top" align="center">0.73875</td>
<td valign="top" align="center">0.94551</td>
<td valign="top" align="center">1.295</td>
<td valign="top" align="center">0.81909</td>
<td valign="top" align="center">0.61411</td>
<td valign="top" align="center">0.691162</td>
<td valign="top" align="center">0.36176</td>
<td valign="top" align="center">0.70689</td>
<td valign="top" align="center">0.91412</td>
<td valign="top" align="center">1.384</td>
<td valign="top" align="center">0.000617</td>
<td valign="top" align="center">0.000617</td>
<td valign="top" align="center">0.000617</td>
</tr>
<tr>
<td valign="top" align="left">9</td>
<td valign="top" align="center">0.70927</td>
<td valign="top" align="center">0.86547</td>
<td valign="top" align="center">1.2857</td>
<td valign="top" align="center">0.87931</td>
<td valign="top" align="center">0.59844</td>
<td valign="top" align="center">0.749034</td>
<td valign="top" align="center">0.6087</td>
<td valign="top" align="center">0.66538</td>
<td valign="top" align="center">0.82743</td>
<td valign="top" align="center">1.2789</td>
<td valign="top" align="center">0.000595</td>
<td valign="top" align="center">0.000595</td>
<td valign="top" align="center">0.000595</td>
</tr>
<tr>
<td valign="top" align="left">10</td>
<td valign="top" align="center">0.69069</td>
<td valign="top" align="center">0.85253</td>
<td valign="top" align="center">1.271</td>
<td valign="top" align="center">0.87355</td>
<td valign="top" align="center">0.61984</td>
<td valign="top" align="center">0.705898</td>
<td valign="top" align="center">0.58454</td>
<td valign="top" align="center">0.61765</td>
<td valign="top" align="center">0.75923</td>
<td valign="top" align="center">1.2222</td>
<td valign="top" align="center">0.000573</td>
<td valign="top" align="center">0.000573</td>
<td valign="top" align="center">0.000573</td>
</tr>
<tr>
<td valign="top" align="left">11</td>
<td valign="top" align="center">0.68632</td>
<td valign="top" align="center">0.78908</td>
<td valign="top" align="center">1.2435</td>
<td valign="top" align="center">0.74821</td>
<td valign="top" align="center">0.66196</td>
<td valign="top" align="center">0.681902</td>
<td valign="top" align="center">0.54009</td>
<td valign="top" align="center">0.61803</td>
<td valign="top" align="center">0.71931</td>
<td valign="top" align="center">1.2135</td>
<td valign="top" align="center">0.000551</td>
<td valign="top" align="center">0.000551</td>
<td valign="top" align="center">0.000551</td>
</tr>
<tr>
<td valign="top" align="left">12</td>
<td valign="top" align="center">0.6447</td>
<td valign="top" align="center">0.76846</td>
<td valign="top" align="center">1.2339</td>
<td valign="top" align="center">0.76995</td>
<td valign="top" align="center">0.62253</td>
<td valign="top" align="center">0.684673</td>
<td valign="top" align="center">0.56453</td>
<td valign="top" align="center">0.61994</td>
<td valign="top" align="center">0.69437</td>
<td valign="top" align="center">1.2315</td>
<td valign="top" align="center">0.00053</td>
<td valign="top" align="center">0.00053</td>
<td valign="top" align="center">0.00053</td>
</tr>
<tr>
<td valign="top" align="left">13</td>
<td valign="top" align="center">0.60653</td>
<td valign="top" align="center">0.7142</td>
<td valign="top" align="center">1.2098</td>
<td valign="top" align="center">0.90191</td>
<td valign="top" align="center">0.72174</td>
<td valign="top" align="center">0.772803</td>
<td valign="top" align="center">0.75807</td>
<td valign="top" align="center">0.63872</td>
<td valign="top" align="center">0.69376</td>
<td valign="top" align="center">1.255</td>
<td valign="top" align="center">0.000508</td>
<td valign="top" align="center">0.000508</td>
<td valign="top" align="center">0.000508</td>
</tr>
<tr>
<td valign="top" align="left">14</td>
<td valign="top" align="center">0.5855</td>
<td valign="top" align="center">0.675</td>
<td valign="top" align="center">1.198</td>
<td valign="top" align="center">0.78955</td>
<td valign="top" align="center">0.60034</td>
<td valign="top" align="center">0.671593</td>
<td valign="top" align="center">0.65807</td>
<td valign="top" align="center">0.63879</td>
<td valign="top" align="center">0.67395</td>
<td valign="top" align="center">1.229</td>
<td valign="top" align="center">0.000486</td>
<td valign="top" align="center">0.000486</td>
<td valign="top" align="center">0.000486</td>
</tr>
<tr>
<td valign="top" align="left">15</td>
<td valign="top" align="center">0.57202</td>
<td valign="top" align="center">0.65889</td>
<td valign="top" align="center">1.1789</td>
<td valign="top" align="center">0.78338</td>
<td valign="top" align="center">0.64587</td>
<td valign="top" align="center">0.68871</td>
<td valign="top" align="center">0.67513</td>
<td valign="top" align="center">0.54675</td>
<td valign="top" align="center">0.62129</td>
<td valign="top" align="center">1.1466</td>
<td valign="top" align="center">0.000443</td>
<td valign="top" align="center">0.000443</td>
<td valign="top" align="center">0.000443</td>
</tr>
<tr>
<td valign="top" align="left">16</td>
<td valign="top" align="center">0.56275</td>
<td valign="top" align="center">0.64614</td>
<td valign="top" align="center">1.1689</td>
<td valign="top" align="center">0.73862</td>
<td valign="top" align="center">0.64565</td>
<td valign="top" align="center">0.670156</td>
<td valign="top" align="center">0.65758</td>
<td valign="top" align="center">0.53836</td>
<td valign="top" align="center">0.59921</td>
<td valign="top" align="center">1.1567</td>
<td valign="top" align="center">0.000421</td>
<td valign="top" align="center">0.000421</td>
<td valign="top" align="center">0.000421</td>
</tr>
<tr>
<td valign="top" align="left">17</td>
<td valign="top" align="center">0.54307</td>
<td valign="top" align="center">0.62712</td>
<td valign="top" align="center">1.1572</td>
<td valign="top" align="center">0.77577</td>
<td valign="top" align="center">0.64456</td>
<td valign="top" align="center">0.685806</td>
<td valign="top" align="center">0.69082</td>
<td valign="top" align="center">0.54836</td>
<td valign="top" align="center">0.59921</td>
<td valign="top" align="center">1.1567</td>
<td valign="top" align="center">0.000399</td>
<td valign="top" align="center">0.000399</td>
<td valign="top" align="center">0.000399</td>
</tr>
<tr>
<td valign="top" align="left">18</td>
<td valign="top" align="center">0.53164</td>
<td valign="top" align="center">0.59858</td>
<td valign="top" align="center">1.153</td>
<td valign="top" align="center">0.92937</td>
<td valign="top" align="center">0.64187</td>
<td valign="top" align="center">0.78645</td>
<td valign="top" align="center">0.78645</td>
<td valign="top" align="center">0.50232</td>
<td valign="top" align="center">0.54711</td>
<td valign="top" align="center">1.1081</td>
<td valign="top" align="center">0.000377</td>
<td valign="top" align="center">0.000377</td>
<td valign="top" align="center">0.000377</td>
</tr>
<tr>
<td valign="top" align="left">19</td>
<td valign="top" align="center">0.51416</td>
<td valign="top" align="center">0.578</td>
<td valign="top" align="center">1.134</td>
<td valign="top" align="center">0.92119</td>
<td valign="top" align="center">0.71787</td>
<td valign="top" align="center">0.78645</td>
<td valign="top" align="center">0.78645</td>
<td valign="top" align="center">0.50329</td>
<td valign="top" align="center">0.55647</td>
<td valign="top" align="center">1.1181</td>
<td valign="top" align="center">0.000356</td>
<td valign="top" align="center">0.000356</td>
<td valign="top" align="center">0.000356</td>
</tr>
<tr>
<td valign="top" align="left">20</td>
<td valign="top" align="center">0.50436</td>
<td valign="top" align="center">0.57356</td>
<td valign="top" align="center">1.1277</td>
<td valign="top" align="center">0.93078</td>
<td valign="top" align="center">0.71698</td>
<td valign="top" align="center">0.79542</td>
<td valign="top" align="center">0.79542</td>
<td valign="top" align="center">0.49177</td>
<td valign="top" align="center">0.55558</td>
<td valign="top" align="center">1.1003</td>
<td valign="top" align="center">0.000334</td>
<td valign="top" align="center">0.000334</td>
<td valign="top" align="center">0.000334</td>
</tr>
<tr>
<td valign="top" align="left">21</td>
<td valign="top" align="center">0.48775</td>
<td valign="top" align="center">0.56036</td>
<td valign="top" align="center">1.1096</td>
<td valign="top" align="center">0.72123</td>
<td valign="top" align="center">0.70048</td>
<td valign="top" align="center">0.72123</td>
<td valign="top" align="center">0.70048</td>
<td valign="top" align="center">0.50368</td>
<td valign="top" align="center">0.55558</td>
<td valign="top" align="center">1.1003</td>
<td valign="top" align="center">0.000312</td>
<td valign="top" align="center">0.000312</td>
<td valign="top" align="center">0.000312</td>
</tr>
<tr>
<td valign="top" align="left">22</td>
<td valign="top" align="center">0.48336</td>
<td valign="top" align="center">0.54022</td>
<td valign="top" align="center">1.1162</td>
<td valign="top" align="center">0.89957</td>
<td valign="top" align="center">0.66901</td>
<td valign="top" align="center">0.7952</td>
<td valign="top" align="center">0.7952</td>
<td valign="top" align="center">0.4917</td>
<td valign="top" align="center">0.54238</td>
<td valign="top" align="center">1.0923</td>
<td valign="top" align="center">0.00029</td>
<td valign="top" align="center">0.00029</td>
<td valign="top" align="center">0.00029</td>
</tr>
<tr>
<td valign="top" align="left">23</td>
<td valign="top" align="center">0.47529</td>
<td valign="top" align="center">0.53429</td>
<td valign="top" align="center">1.1023</td>
<td valign="top" align="center">0.91776</td>
<td valign="top" align="center">0.70694</td>
<td valign="top" align="center">0.83048</td>
<td valign="top" align="center">0.75306</td>
<td valign="top" align="center">0.46452</td>
<td valign="top" align="center">0.51651</td>
<td valign="top" align="center">1.0631</td>
<td valign="top" align="center">0.000269</td>
<td valign="top" align="center">0.000269</td>
<td valign="top" align="center">0.000269</td>
</tr>
<tr>
<td valign="top" align="left">24</td>
<td valign="top" align="center">0.45589</td>
<td valign="top" align="center">0.51191</td>
<td valign="top" align="center">1.0982</td>
<td valign="top" align="center">0.87713</td>
<td valign="top" align="center">0.68308</td>
<td valign="top" align="center">0.84585</td>
<td valign="top" align="center">0.74317</td>
<td valign="top" align="center">0.47591</td>
<td valign="top" align="center">0.49128</td>
<td valign="top" align="center">1.0882</td>
<td valign="top" align="center">0.000248</td>
<td valign="top" align="center">0.000248</td>
<td valign="top" align="center">0.000248</td>
</tr>
<tr>
<td valign="top" align="left">25</td>
<td valign="top" align="center">0.37746</td>
<td valign="top" align="center">0.40325</td>
<td valign="top" align="center">1.0423</td>
<td valign="top" align="center">0.77715</td>
<td valign="top" align="center">0.70046</td>
<td valign="top" align="center">0.77023</td>
<td valign="top" align="center">0.83499</td>
<td valign="top" align="center">0.46452</td>
<td valign="top" align="center">0.51651</td>
<td valign="top" align="center">1.0631</td>
<td valign="top" align="center">0.000225</td>
<td valign="top" align="center">0.000225</td>
<td valign="top" align="center">0.000225</td>
</tr>
<tr>
<td valign="top" align="left">26</td>
<td valign="top" align="center">0.3647</td>
<td valign="top" align="center">0.39237</td>
<td valign="top" align="center">1.0243</td>
<td valign="top" align="center">0.78517</td>
<td valign="top" align="center">0.70046</td>
<td valign="top" align="center">0.77023</td>
<td valign="top" align="center">0.83499</td>
<td valign="top" align="center">0.42631</td>
<td valign="top" align="center">0.48437</td>
<td valign="top" align="center">1.0381</td>
<td valign="top" align="center">0.00018</td>
<td valign="top" align="center">0.00018</td>
<td valign="top" align="center">0.00018</td>
</tr>
<tr>
<td valign="top" align="left">27</td>
<td valign="top" align="center">0.34487</td>
<td valign="top" align="center">0.37477</td>
<td valign="top" align="center">1.0101</td>
<td valign="top" align="center">0.74345</td>
<td valign="top" align="center">0.77056</td>
<td valign="top" align="center">0.79362</td>
<td valign="top" align="center">0.82993</td>
<td valign="top" align="center">0.42631</td>
<td valign="top" align="center">0.48437</td>
<td valign="top" align="center">1.0381</td>
<td valign="top" align="center">0.000162</td>
<td valign="top" align="center">0.000162</td>
<td valign="top" align="center">0.000162</td>
</tr>
<tr>
<td valign="top" align="left">28</td>
<td valign="top" align="center">0.31551</td>
<td valign="top" align="center">0.33927</td>
<td valign="top" align="center">0.98727</td>
<td valign="top" align="center">0.80184</td>
<td valign="top" align="center">0.70831</td>
<td valign="top" align="center">0.84776</td>
<td valign="top" align="center">0.76596</td>
<td valign="top" align="center">0.40212</td>
<td valign="top" align="center">0.46618</td>
<td valign="top" align="center">1.0042</td>
<td valign="top" align="center">0.000138</td>
<td valign="top" align="center">0.000138</td>
<td valign="top" align="center">0.000138</td>
</tr>
<tr>
<td valign="top" align="left">29</td>
<td valign="top" align="center">0.30228</td>
<td valign="top" align="center">0.32856</td>
<td valign="top" align="center">0.97748</td>
<td valign="top" align="center">0.79178</td>
<td valign="top" align="center">0.70835</td>
<td valign="top" align="center">0.84628</td>
<td valign="top" align="center">0.75956</td>
<td valign="top" align="center">0.4166</td>
<td valign="top" align="center">0.47328</td>
<td valign="top" align="center">1.0143</td>
<td valign="top" align="center">0.000116</td>
<td valign="top" align="center">0.000116</td>
<td valign="top" align="center">0.000116</td>
</tr>
<tr>
<td valign="top" align="left">30</td>
<td valign="top" align="center">0.29649</td>
<td valign="top" align="center">0.31335</td>
<td valign="top" align="center">0.97101</td>
<td valign="top" align="center">0.79717</td>
<td valign="top" align="center">0.795</td>
<td valign="top" align="center">0.84628</td>
<td valign="top" align="center">0.75956</td>
<td valign="top" align="center">0.4166</td>
<td valign="top" align="center">0.47328</td>
<td valign="top" align="center">1.0143</td>
<td valign="top" align="center">0.0000947</td>
<td valign="top" align="center">0.0000947</td>
<td valign="top" align="center">0.0000947</td>
</tr>
<tr>
<td valign="top" align="left">31</td>
<td valign="top" align="center">0.28664</td>
<td valign="top" align="center">0.29155</td>
<td valign="top" align="center">0.94969</td>
<td valign="top" align="center">0.78387</td>
<td valign="top" align="center">0.74271</td>
<td valign="top" align="center">0.84329</td>
<td valign="top" align="center">0.74329</td>
<td valign="top" align="center">0.40457</td>
<td valign="top" align="center">0.47546</td>
<td valign="top" align="center">1.0073</td>
<td valign="top" align="center">0.0000729</td>
<td valign="top" align="center">0.0000729</td>
<td valign="top" align="center">0.0000729</td>
</tr>
<tr>
<td valign="top" align="left">32</td>
<td valign="top" align="center">0.26844</td>
<td valign="top" align="center">0.29155</td>
<td valign="top" align="center">0.94969</td>
<td valign="top" align="center">0.78387</td>
<td valign="top" align="center">0.74271</td>
<td valign="top" align="center">0.84329</td>
<td valign="top" align="center">0.74329</td>
<td valign="top" align="center">0.40457</td>
<td valign="top" align="center">0.47546</td>
<td valign="top" align="center">1.0073</td>
<td valign="top" align="center">0.0000729</td>
<td valign="top" align="center">0.0000729</td>
<td valign="top" align="center">0.0000729</td>
</tr>
<tr>
<td valign="top" align="left">33</td>
<td valign="top" align="center">0.26346</td>
<td valign="top" align="center">0.29155</td>
<td valign="top" align="center">0.94969</td>
<td valign="top" align="center">0.78387</td>
<td valign="top" align="center">0.74271</td>
<td valign="top" align="center">0.84329</td>
<td valign="top" align="center">0.74329</td>
<td valign="top" align="center">0.40457</td>
<td valign="top" align="center">0.47546</td>
<td valign="top" align="center">1.0073</td>
<td valign="top" align="center">0.0000729</td>
<td valign="top" align="center">0.0000729</td>
<td valign="top" align="center">0.0000729</td>
</tr>
<tr>
<td valign="top" align="left">34</td>
<td valign="top" align="center">0.26274</td>
<td valign="top" align="center">0.29155</td>
<td valign="top" align="center">0.94969</td>
<td valign="top" align="center">0.78387</td>
<td valign="top" align="center">0.74271</td>
<td valign="top" align="center">0.84329</td>
<td valign="top" align="center">0.74329</td>
<td valign="top" align="center">0.40457</td>
<td valign="top" align="center">0.47546</td>
<td valign="top" align="center">1.0073</td>
<td valign="top" align="center">0.0000729</td>
<td valign="top" align="center">0.0000729</td>
<td valign="top" align="center">0.0000729</td>
</tr>
<tr>
<td valign="top" align="left">35</td>
<td valign="top" align="center">0.26274</td>
<td valign="top" align="center">0.28261</td>
<td valign="top" align="center">0.94172</td>
<td valign="top" align="center">0.84074</td>
<td valign="top" align="center">0.73638</td>
<td valign="top" align="center">0.84248</td>
<td valign="top" align="center">0.73659</td>
<td valign="top" align="center">0.38568</td>
<td valign="top" align="center">0.44934</td>
<td valign="top" align="center">0.99912</td>
<td valign="top" align="center">0.0000294</td>
<td valign="top" align="center">0.0000294</td>
<td valign="top" align="center">0.0000294</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="sec005">
<title>Conclusion and Future Scope</title>
<p>The proposed work focuses on the development of an autonomous watercraft to address the increasingly major problem of floating waste within water bodies, especially in narrow canals and small rivers. Equipped with smart hardware and intelligent software, this watercraft can identify wastes in real time, navigate efficiently and collect debris effectively. The module design allows it to adapt to different environments, but autonomous features minimize the level of human intervention. It presents a practical solution to water pollution, which helps in the protection of aquatic ecosystems and contributes towards clean water bodies.</p>
<p>In the future, AI watercraft can be used for the better detection and classification of waste. The system can be made even more useful by adding environmental sensors that can monitor the water quality parameters, such as pH and turbidity. With improvements in solar panel efficiency, it would increase the time of operation and the design could be scaled up for larger water bodies or modified for specific tasks like cleaning up oil spills. Incorporating IoT technology for remote monitoring and management would give better control and data insights. Such upgrades might make the watercraft and essential tool in addressing global water pollution.</p>
</sec>
</body>
<back>
<fn-group>
<fn id="n1" fn-type="other">
<p>Additional material is published online only. To view please visit the journal online.</p>
<p><bold>Cite this as:</bold> Ashok S, Anne Joan Venita V, Gowri Shankari E, Ranjana M and Prabhu V. Wearables, Autonomous Watercraft for Cleanup of Floating Waste in Water Bodies Using YOLO. Premier Journal of Science 2025;15:100144</p>
<p><bold>DOI:</bold> <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.70389/PJS.100144">https://doi.org/10.70389/PJS.100144</ext-link></p>
</fn>
<fn id="n2" fn-type="other">
<p><bold>Ethical approval</bold></p>
<p>N/a</p>
</fn>
<fn id="n3" fn-type="other">
<p><bold>Consent</bold></p>
<p>N/a</p>
</fn>
<fn id="n4" fn-type="other">
<p><bold>Funding</bold></p>
<p>No industry funding</p>
</fn>
<fn id="n5" fn-type="conflict">
<p><bold>Conflicts of interest</bold></p>
<p>N/a</p>
</fn>
<fn id="n6" fn-type="other">
<p><bold>Author contribution</bold></p>
<p>S Ashok, V Anne Joan Venita, E Gowri Shankari, M Ranjana and V Prabhu &#x2013; Conceptualization, Writing &#x2013; original draft, review and editing</p>
</fn>
<fn id="n7" fn-type="other">
<p><bold>Guarantor</bold></p>
<p>S Ashok</p>
</fn>
<fn id="n8" fn-type="other">
<p><bold>Provenance and peer-review</bold></p>
<p>Unsolicited and externally peer-reviewed</p>
</fn>
<fn id="n9" fn-type="other">
<p><bold>Data availability statement</bold></p>
<p>N/a</p>
</fn>
</fn-group>
<ref-list>
<title>References</title>
<ref id="ref1"><label>1</label><mixed-citation publication-type="journal"><string-name><surname>Ahmad</surname> <given-names>RW</given-names></string-name>, <string-name><surname>Salah</surname> <given-names>K</given-names></string-name>, <string-name><surname>Jayaraman</surname> <given-names>R</given-names></string-name>, <string-name><surname>Yaqoob</surname> <given-names>I</given-names></string-name>, <string-name><surname>Omar</surname> <given-names>M</given-names></string-name>. <article-title>Blockchain in Smart City Waste Management: Enhancing Transparency and Efficiency</article-title>. <source>Sustain Urban Syst</source>. <year>2021</year>;<volume>13</volume>(<issue>2</issue>):<fpage>311</fpage>&#x2013;<lpage>330</lpage>.</mixed-citation></ref>
<ref id="ref2"><label>2</label><mixed-citation publication-type="journal"><string-name><surname>Elsheikh</surname> <given-names>M</given-names></string-name>, <string-name><surname>Ali</surname> <given-names>H</given-names></string-name>, <string-name><surname>Saleh</surname> <given-names>A</given-names></string-name>. <article-title>Water Care Robots: Integrating IoT and AI for Environmental Sustainability</article-title>. <source>Robotics in Aquatic Conservation</source>. <year>2021</year>;<volume>11</volume>(<issue>2</issue>):<fpage>176</fpage>&#x2013;<lpage>92</lpage>.</mixed-citation></ref>
<ref id="ref3"><label>3</label><mixed-citation publication-type="journal"><string-name><surname>Hossen</surname> <given-names>MM</given-names></string-name>, <string-name><surname>Majid</surname> <given-names>ME</given-names></string-name>, <string-name><surname>Kashem</surname> <given-names>SBA</given-names></string-name>, <string-name><surname>Khandakar</surname> <given-names>A</given-names></string-name>, <string-name><surname>Nashbat</surname> <given-names>M</given-names></string-name>, <string-name><surname>Ashraf</surname> <given-names>A</given-names></string-name>, <etal>et al.</etal> <article-title>Rwcnet: A deep learning model for recyclable waste classification with high accuracy</article-title>. <source>AI For Environmental Applications</source>. <year>2024</year>;<volume>9</volume>(<issue>1</issue>):<fpage>112</fpage>&#x2013;<lpage>30</lpage>.</mixed-citation></ref>
<ref id="ref4"><label>4</label><mixed-citation publication-type="journal"><string-name><surname>Huang</surname> <given-names>W</given-names></string-name>, <string-name><surname>Chen</surname> <given-names>D</given-names></string-name>, <string-name><surname>Li</surname> <given-names>X</given-names></string-name>. <article-title>Dual-Purpose Systems for Water Quality Monitoring and Cleaning: Integrating Real-Time Sensors and Navigation Algorithms</article-title>. <source>Environmental Innovation Review</source>. <year>2021</year>;<volume>16</volume>(<issue>1</issue>):<fpage>45</fpage>&#x2013;<lpage>62</lpage>.</mixed-citation></ref>
<ref id="ref5"><label>5</label><mixed-citation publication-type="journal"><string-name><surname>Sivasaravanababu</surname> <given-names>S</given-names></string-name>, <string-name><surname>Prabhu</surname> <given-names>V</given-names></string-name>, <string-name><surname>Parthasarathy</surname> <given-names>V</given-names></string-name>, <string-name><surname>Kumar</surname> <given-names>GS</given-names></string-name>. <article-title>A Heuristic-Concatenated Feature Classification Algorithm (H-CFCA) for autism and epileptic seizure detection</article-title>. <source>Biomed Signal Process Control</source>. <year>2023</year> Jul;<volume>85</volume>:<fpage>105245</fpage>. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1016/j.bspc.2023.105245">https://doi.org/10.1016/j.bspc.2023.105245</ext-link>. </mixed-citation></ref>
<ref id="ref6"><label>6</label><mixed-citation publication-type="journal"><string-name><surname>Kiyokawa</surname> <given-names>T</given-names></string-name>, <string-name><surname>Takamatsu</surname> <given-names>J</given-names></string-name>, <string-name><surname>Koyanaka</surname> <given-names>S</given-names></string-name>. <article-title>Challenges and solution for robotic sorting of mixed industrial waste</article-title>. <source>Rob Autom Lett</source>. <year>2024</year>;<volume>14</volume>(<issue>2</issue>):<fpage>201</fpage>&#x2013;<lpage>220</lpage>.</mixed-citation></ref>
<ref id="ref7"><label>7</label><mixed-citation publication-type="journal"><string-name><surname>Kong</surname> <given-names>S</given-names></string-name>, <string-name><surname>Lee</surname> <given-names>J</given-names></string-name>, <string-name><surname>Kim</surname> <given-names>H</given-names></string-name>. <article-title>Intelligent water surface cleaner robots for aquatic pollution management</article-title>. <source>Robotics for Sustainability</source>. <year>2021</year>;<volume>12</volume>(<issue>4</issue>):<fpage>287</fpage>&#x2013;<lpage>300</lpage>.</mixed-citation></ref>
<ref id="ref8"><label>8</label><mixed-citation publication-type="journal"><string-name><surname>Liu</surname> <given-names>X</given-names></string-name>. <article-title>SL-Block System: A Modular Approach to Sustainable Construction Using Robotics and Machine Learning</article-title>. <source>Smart Construction Review</source>. <year>2024</year>;<volume>12</volume>(<issue>4</issue>):<fpage>334</fpage>&#x2013;<lpage>56</lpage>.</mixed-citation></ref>
<ref id="ref9"><label>9</label><mixed-citation publication-type="journal"><string-name><surname>Moura</surname> <given-names>P</given-names></string-name>, <string-name><surname>Almeida</surname> <given-names>J</given-names></string-name>, <string-name><surname>Santos</surname> <given-names>R</given-names></string-name>, <string-name><surname>Oliveira</surname> <given-names>F</given-names></string-name>. <article-title>Innovative Rover Cleaning Mechanisms for Plastic and Oil Waste Management</article-title>. <source>Water Pollution Solutions</source>. <year>2024</year>;<volume>18</volume>(<issue>1</issue>):<fpage>87</fpage>&#x2013;<lpage>105</lpage>.</mixed-citation></ref>
<ref id="ref10"><label>10</label><mixed-citation publication-type="journal"><string-name><surname>Nawar</surname> <given-names>A</given-names></string-name>, <string-name><surname>Ahmed</surname> <given-names>F</given-names></string-name>, <string-name><surname>Rahman</surname> <given-names>S</given-names></string-name>. <article-title>Robotic systems for river surface cleaning and oil spill mitigation: a study on South Asian water pollution</article-title>. <source>Environmental Robotics Journal</source>. <year>2023</year>;<volume>8</volume>(<issue>4</issue>):<fpage>199</fpage>&#x2013;<lpage>216</lpage>.</mixed-citation></ref>
<ref id="ref11"><label>11</label><mixed-citation publication-type="journal"><string-name><surname>Prochazka</surname> <given-names>R</given-names></string-name>, <string-name><surname>Valicek</surname> <given-names>J</given-names></string-name>, <string-name><surname>Harnicarova</surname> <given-names>M</given-names></string-name>, <string-name><surname>Kusenerova</surname> <given-names>M</given-names></string-name>, <string-name><surname>Tozan</surname> <given-names>H</given-names></string-name>, <string-name><surname>Borzan</surname> <given-names>C</given-names></string-name>, <etal>et al.</etal> <article-title>Sorting of plastic packaging using advanced NIR/VIS optical systems for enhanced purity</article-title>. <source>J Environ Sustain</source>. <year>2024</year>;<volume>15</volume>(<issue>3</issue>):<fpage>452</fpage>&#x2013;<lpage>67</lpage>. </mixed-citation></ref>
<ref id="ref12"><label>12</label><mixed-citation publication-type="journal"><string-name><surname>Rianmora</surname> <given-names>S</given-names></string-name>, <string-name><surname>Punsawt</surname> <given-names>P</given-names></string-name>, <string-name><surname>Yutisavanuwat</surname> <given-names>C</given-names></string-name>, <string-name><surname>Tongtan</surname> <given-names>Y</given-names></string-name>. <article-title>Intelligent Conveyor Belt Systems for Plastic Bottle Sorting in Recycling SMEs</article-title>. <source>Recycling and Automation Advances</source>. <year>2023</year>;<volume>11</volume>(<issue>3</issue>):<fpage>122</fpage>&#x2013;<lpage>40</lpage>.</mixed-citation></ref>
<ref id="ref13"><label>13</label><mixed-citation publication-type="journal"><string-name><surname>Sebastin</surname> <given-names>Suresh</given-names></string-name>, <string-name><surname>V.Prabhu</surname>, <given-names>V</given-names></string-name>. <article-title>Parthasarathy, Rajasekhar Boddu, Yadala Sucharitha, and Gemmachis Teshite, (2022) A Novel Routing Protocol for Low-Energy Wireless Sensor Networks</article-title>, <source>Journal of Sensors</source>, <year>2022</year>, <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1155/2022/8244176">https://doi.org/10.1155/2022/8244176</ext-link>.</mixed-citation></ref>
<ref id="ref14"><label>14</label><mixed-citation publication-type="journal"><string-name><surname>Wang</surname> <given-names>W</given-names></string-name>, <string-name><surname>Zhang</surname> <given-names>H</given-names></string-name>, <string-name><surname>Li</surname> <given-names>Y</given-names></string-name>. <article-title>Detachable Surface Garbage Cleaning Robots: Low-Cost Solutions for Small Water Bodies</article-title>. <source>Robotics Environ Appl</source>. <year>2024</year>;<volume>10</volume>(<issue>2</issue>):<fpage>67</fpage>&#x2013;<lpage>81</lpage>.</mixed-citation></ref>
<ref id="ref15"><label>15</label><mixed-citation publication-type="journal"><string-name><surname>Yim</surname> <given-names>M</given-names></string-name>. <article-title>Modular Self-Reconfigurable Robots for Adaptive Tasks in Complex Environments</article-title>. <source>Adv Rob Manuf</source>. <year>2021</year>;<volume>14</volume>(<issue>2</issue>):<fpage>145</fpage>&#x2013;<lpage>62</lpage>.</mixed-citation></ref>
</ref-list>
</back>
</article>
