<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.3 20210610//EN" "http://jats.nlm.nih.gov/publishing/1.3/JATS-journalpublishing1-3.dtd">
<article article-type="research-article" dtd-version="1.3" xml:lang="en" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<journal-meta>
<journal-id journal-id-type="nlm-ta">PJS</journal-id>
<journal-id journal-id-type="publisher-id">Premier Journal of Science</journal-id>
<journal-id journal-id-type="pmc">PJS</journal-id>
<journal-title-group>
<journal-title>PJ Science</journal-title>
</journal-title-group>
<issn pub-type="epub">3049-9011</issn>
<publisher>
<publisher-name>Premier Science</publisher-name>
<publisher-loc>London, UK</publisher-loc>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.70389/PJS.100191</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>ORIGINAL RESEARCH</subject>
</subj-group>
<subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Cognitive science</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Sensory perception</subject><subj-group><subject>Hallucinations</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Sensory perception</subject><subj-group><subject>Hallucinations</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Social sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Sensory perception</subject><subj-group><subject>Hallucinations</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Sensory perception</subject><subj-group><subject>Hallucinations</subject></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Social sciences</subject><subj-group><subject>Linguistics</subject><subj-group><subject>Grammar</subject><subj-group><subject>Phonology</subject><subj-group><subject>Syllables</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Engineering and technology</subject><subj-group><subject>Signal processing</subject><subj-group><subject>Speech signal processing</subject></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Cognitive science</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Sensory perception</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Sensory perception</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Social sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Sensory perception</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Sensory perception</subject></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Medicine and health sciences</subject><subj-group><subject>Mental health and psychiatry</subject><subj-group><subject>Schizophrenia</subject></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Research and analysis methods</subject><subj-group><subject>Bioassays and physiological analysis</subject><subj-group><subject>Electrophysiological techniques</subject><subj-group><subject>Brain electrophysiology</subject><subj-group><subject>Electroencephalography</subject><subj-group><subject>Event-related potentials</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Physiology</subject><subj-group><subject>Electrophysiology</subject><subj-group><subject>Neurophysiology</subject><subj-group><subject>Brain electrophysiology</subject><subj-group><subject>Electroencephalography</subject><subj-group><subject>Event-related potentials</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Neurophysiology</subject><subj-group><subject>Brain electrophysiology</subject><subj-group><subject>Electroencephalography</subject><subj-group><subject>Event-related potentials</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Brain mapping</subject><subj-group><subject>Electroencephalography</subject><subj-group><subject>Event-related potentials</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Medicine and health sciences</subject><subj-group><subject>Clinical medicine</subject><subj-group><subject>Clinical neurophysiology</subject><subj-group><subject>Electroencephalography</subject><subj-group><subject>Event-related potentials</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Research and analysis methods</subject><subj-group><subject>Imaging techniques</subject><subj-group><subject>Neuroimaging</subject><subj-group><subject>Electroencephalography</subject><subj-group><subject>Event-related potentials</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Neuroimaging</subject><subj-group><subject>Electroencephalography</subject><subj-group><subject>Event-related potentials</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Cell biology</subject><subj-group><subject>Cellular types</subject><subj-group><subject>Animal cells</subject><subj-group><subject>Neurons</subject><subj-group><subject>Interneurons</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Cellular neuroscience</subject><subj-group><subject>Neurons</subject><subj-group><subject>Interneurons</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Research and analysis methods</subject><subj-group><subject>Bioassays and physiological analysis</subject><subj-group><subject>Electrophysiological techniques</subject><subj-group><subject>Brain electrophysiology</subject><subj-group><subject>Electroencephalography</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Physiology</subject><subj-group><subject>Electrophysiology</subject><subj-group><subject>Neurophysiology</subject><subj-group><subject>Brain electrophysiology</subject><subj-group><subject>Electroencephalography</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Neurophysiology</subject><subj-group><subject>Brain electrophysiology</subject><subj-group><subject>Electroencephalography</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Brain mapping</subject><subj-group><subject>Electroencephalography</subject></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Medicine and health sciences</subject><subj-group><subject>Clinical medicine</subject><subj-group><subject>Clinical neurophysiology</subject><subj-group><subject>Electroencephalography</subject></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Research and analysis methods</subject><subj-group><subject>Imaging techniques</subject><subj-group><subject>Neuroimaging</subject><subj-group><subject>Electroencephalography</subject></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Neuroimaging</subject><subj-group><subject>Electroencephalography</subject></subj-group></subj-group></subj-group></subj-group>
</article-categories>
<title-group>
<article-title>Transforming Agriculture with High Performance Computing: Sustainable Smart Farming &#x2013; An Experimental Study</article-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes">
<contrib-id contrib-id-type="orcid">https://orcid.org/0009-0002-5769-9089</contrib-id>
<name>
<surname>Turare</surname>
<given-names>Pravin J.</given-names>
</name>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Malwatkar</surname>
<given-names>Gajanan M.</given-names>
</name>
</contrib>
<contrib contrib-type="author">
<name>
<surname>Gawali</surname>
<given-names>Tukaram K.</given-names>
</name>
</contrib>
<aff id="aff1"><institution-wrap><institution-id institution-id-type="ror">https://ror.org/00gcgw028</institution-id><institution>Department of Instrumentation Engineering, Government College of Engineering</institution></institution-wrap>, <city>Jalgaon</city>, <country>India</country></aff>
</contrib-group>
<author-notes>
<corresp id="cor001"><bold>Correspondence to:</bold> Pravin J. Turare, <email>pravin.turare@gcoej.ac.in</email></corresp>
<fn fn-type="other"><p>Peer Review</p></fn>
</author-notes>
<pub-date pub-type="epub">
<day>07</day>
<month>01</month>
<year>2026</year>
</pub-date>
<pub-date pub-type="collection">
<month>01</month>
<year>2026</year>
</pub-date>
<volume>15</volume>
<issue>1</issue>
<elocation-id>100191</elocation-id>
<history>
<date date-type="received">
<day>14</day>
<month>08</month>
<year>2025</year>
</date>
<date date-type="rev-recd">
<day>21</day>
<month>11</month>
<year>2025</year>
</date>
<date date-type="accepted">
<day>23</day>
<month>11</month>
<year>2025</year>
</date>
</history>
<permissions>
<copyright-year>2026</copyright-year>
<copyright-holder>Pravin J. Turare, Gajanan M. Malwatkar and Tukaram K. Gawali</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/" xlink:type="simple">
<license-p>This is an open access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="http://creativecommons.org/licenses/by/4.0/" xlink:type="simple">Creative Commons Attribution License</ext-link>, which permits unrestricted use, distribution, and reproduction in any medium, provided the original author and source are credited.</license-p>
</license>
</permissions>
<self-uri content-type="pdf" xlink:href="info:doi/10.70389/PJS.100191"/>
<abstract>
<p>Agricultural sector is undergoing a significant transformation to address the growing global demand for food while ensuring sustainability and minimizing environmental impact. Traditional farming methods, which rely heavily on manual processes and offer limited data insights, are increasingly inadequate for managing the complexities of modern agricultural needs. To overcome these challenges, the integration of advanced computing technologies specifically cloud, edge, and fog computing has emerged as a transformative solution in smart farming. This paper provides a comprehensive literature review on the role of these technologies in agriculture, focusing on their applications, benefits, challenges, and future directions. A proposed framework for integrating IoT, edge, fog, and cloud computing is presented, aiming to enhance productivity, resource efficiency, and environmental sustainability. The framework is evaluated through a case study conducted in the Maregaon District of Yavatmal (Maharashtra), demonstrating significant improvements in crop yields, resource utilization, and decision-making capabilities. The results highlight an accuracy of 95% in data collection and processing, a 20% increase in crop yields, and a 30% reduction in water usage. These findings underscore the potential of these technologies to revolutionize agriculture, ensuring food security and environmental sustainability in the face of growing global challenges.</p>
</abstract>
<kwd-group kwd-group-type="author">
<kwd>Edge-fog-cloud computing architecture</kwd>
<kwd>IOT sensor data analytics</kwd>
<kwd>Real-time precision irrigation</kwd>
<kwd>High-performance smart farming framework</kwd>
<kwd>Sustainable crop yield optimization</kwd>
</kwd-group>
<counts>
<fig-count count="3"/>
<table-count count="7"/>
<page-count count="5"/>
</counts>
<custom-meta-group>
<custom-meta>
<meta-name>Version accepted</meta-name>
<meta-value>5</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec>
<title><ext-link ext-link-type="uri" xlink:href="https://premierscience.com/wp-content/uploads/2025/15/pjs-25-1214.pdf">Source-File: pjs-25-1214.pdf</ext-link></title>
</sec>
<sec id="sec001" sec-type="intro">
<title>Introduction</title>
<p>The agricultural sector faces unprecedented challenges in meeting global food demand while addressing sustainability and environmental concerns. Traditional farming methods, reliant on manual processes and limited data insights, are increasingly inadequate for modern agricultural needs. The integration of advanced computing technologies cloud, edge, and fog computing, has emerged as a transformative solution in smart farming.</p>
<p>Edge computing addresses some of the drawbacks associated with cloud computing, such as unpredictable latency, lack of location awareness, and user mobility issues. By processing data closer to the source, edge computing ensures real-time data analysis<sup><xref ref-type="bibr" rid="ref1">1</xref></sup> and immediate responses, crucial for optimizing resource utilization and enhancing operational efficiency.<sup><xref ref-type="bibr" rid="ref2">2</xref></sup> Fog computing acts as a bridge between the cloud and edge, enabling computing, storage, networking and data management on network nodes close to IoT devices.<sup><xref ref-type="bibr" rid="ref3">3</xref></sup> This technology provides additional processing, storage, and decision-making capabilities at intermediate nodes, enhancing system responsiveness and reliability.<sup><xref ref-type="bibr" rid="ref4">4</xref></sup> Cloud computing to process and analyze large volumes of data, supporting complex operations such as big data processing and Predictive analytics for weather forecasting, fire warning, and soil drought prediction.<sup><xref ref-type="bibr" rid="ref5">5</xref></sup> However, relying solely on cloud services can introduce latency and bandwidth constraints, particularly in remote or large-scale agricultural operations.</p>
</sec>
<sec id="sec002">
<title>Literature Review</title>
<p>The evolution from traditional farming to smart farming highlights the pivotal role of cloud, edge, and fog computing in addressing scalability, real-time responsiveness, and data- driven decision-making. Conventional agricultural practices struggle to provide the precision, efficiency, and scalability needed for current global food systems.<sup><xref ref-type="bibr" rid="ref6">6</xref></sup> Smart farming leverages IoT devices, sensors, and data analytics to monitor and manage farming activities in real-time, optimizing operations from irrigation management to pest control, soil health monitoring, and crop growth prediction.</p>
<p>Cloud platforms store and process vast amounts of data collected from sensors and IoT devices, performing complex calculations needed for weather forecasting, crop disease prediction, and soil analysis. However, cloud computing has limitations, particularly in terms of latency, bandwidth requirements, and dependency on stable network connections, which are often problem at remote or rural areas.<sup><xref ref-type="bibr" rid="ref3">3</xref></sup> Fog computing offers additional processing, storage, and networking capabilities at nodes situated between the cloud and edge devices, enabling more localized data aggregation and decision-making, reducing the burden on cloud infrastructure while still allowing some level of distributed intelligence across the network.<sup><xref ref-type="bibr" rid="ref7">7</xref></sup> Edge computing is particularly valuable in scenarios where immediate actions are required, such as adjusting irrigation systems based on soil moisture levels or activating pest control measures when sensors detect a threat.</p>
<p>Recent studies focus on the integration of cloud, edge, and fog computing as a multi-layered architecture those com- bines the strengths of each approach. This integrated system supports real-time data processing and long-term analytics, catering to both immediate and strategic agricultural needs.<sup><xref ref-type="bibr" rid="ref1">1</xref></sup> The convergence of these technologies aligns with research goals to optimize resource utilization, reduce environmental impacts, and improve food security while addressing the complexities of modern agricultural demands.</p>
<p>The integration of cloud, edge, and fog computing in smart agriculture offers promising solutions to the sector&#x2019;s pressing challenges. By leveraging these advanced technologies, farmers can enhance productivity, optimize resource use, and promote sustainability. However, successful implementation requires addressing technical, economic, and logistical challenges to fully realize the potential of these technologies in smart farming. Future research should focus on developing integrated frameworks, improving data security, and making these technologies accessible to small scale farmers to ensure widespread adoption and impact.<sup><xref ref-type="bibr" rid="ref7">7</xref></sup></p>
<sec id="sec002-1">
<title>Key Themes and Findings</title>
<list list-type="bullet">
<list-item><p><bold>Applications of Cloud, Edge, and Fog Computing in Agriculture:</bold> These technologies are used for real-time monitoring, predictive analytics, and decision-making in various agricultural processes, including irrigation, pest control, and crop management.</p></list-item>
<list-item><p><bold>Benefits:</bold> Enhanced productivity, resource efficiency, and environmental sustainability are the primary benefits of integrating these technologies in agriculture.</p></list-item>
<list-item><p><bold>Challenges:</bold> Technical challenges include latency, band- width constraints and data security issues. Economical and logistical challenges involve the high cost of implementation and the need for technical expertise among farmers.</p></list-item>
<list-item><p><bold>Future Directions:</bold> Research should focus on developing standardized protocols, improving data security, and conducting long term sustainability studies to evaluate the performance and reliability of these technologies over multiple growing seasons and diverse environmental conditions.</p></list-item>
</list>
</sec>
</sec>
<sec id="sec003">
<title>Proposed Methodology</title>
<p>The proposed methodology for integrating IoT, edge, fog, and cloud computing in smart agriculture involves the following steps:</p>
<sec id="sec003-1">
<title>Step 1: Framework Design</title>
<p>Develop a multi-layered architecture integrating IoT sensors, edge devices, fog nodes, and cloud platforms. Define communication protocols and data flow between layers to ensure seamless interoperability.</p>
</sec>
<sec id="sec003-2">
<title>Step 2: Data Collection and Preprocessing</title>
<p>Deploy IoT sensors in the field to collect real-time data on soil moisture, temperature, humidity, and crop health. Use edged devices for localized data preprocessing to reduce latency and bandwidth usage.</p>
</sec>
<sec id="sec003-3">
<title>Step 3: FogLayer Processing</title>
<p>Aggregate and analyze data at fog nodes to enable localized decision-making, such as adjusting irrigation systems or activating pest control measures.</p>
</sec>
<sec id="sec003-4">
<title>Step 4: Cloud-Based Analytics</title>
<p>Transfer processed data to the cloud for long term storage and advanced analytics, including predictive modeling for weather forecasting and crop yield prediction.</p>
</sec>
<sec id="sec003-5">
<title>Step 5: Evaluation and Optimization</title>
<p>Conduct field trials to evaluate the performance of the integrated system. Optimize the framework based on feedback and performance metrics.</p>
</sec>
<sec id="sec003-6">
<title>Step 6: Standardization and Scalability</title>
<p>Develop standardized protocols for data management, inter- operability, and system integration. Test the scalability of the framework in diverse agricultural environments.<sup><xref ref-type="bibr" rid="ref8">8</xref></sup></p>
<p><xref ref-type="fig" rid="F1">Figure 1</xref> shows the complete flow of the proposed methodology to enhance productivity and to improve efficiency.</p>
<fig id="F1" position="float">
<object-id pub-id-type="doi">10.70389/journal.PJS.100191.g001</object-id>
<label>Fig 1</label>
<caption><title>Edge, Fog, cloud based iot architecture for smart agriculture</title></caption>
<p><ext-link ext-link-type="uri" xlink:href="https://i0.wp.com/premierscience.com/wp-content/uploads/2025/15/pjs-25-1214-Figure-1.webp?">Figure 1</ext-link></p>
</fig>
<p>Below is the algorithm (<xref ref-type="table" rid="T1">Table 1</xref>) for the proposed methodology, which includes the steps from image input to data processing using deep learning algorithms. The algorithm also includes the transfer of data via MQTT, storage in computing servers (cloud, edge, fog), and the generation of accuracy and latency metrics.<sup><xref ref-type="bibr" rid="ref9">9</xref></sup></p>
<table-wrap id="T1">
<label>Table 1</label>
<caption><title>Algorithm for smart Farming with IOT, edge, fog, and cloud computing</title></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<tbody>
<tr>
<td valign="top" align="left">Input: Farming images, sensor data (soil moisture, temperature, humidity)<break/>Output: Accuracy, Latency, Decision Actions (e.g., irrigation, pest control)</td>
</tr>
<tr>
<td valign="top" align="left">Step 1: Data Acquisition<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;1.1: Capture farming images using IoT-enabled cameras.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;1.2: Collect sensor data (soil moisture, temperature, humidity) using IoT sensors.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;1.3: Preprocess images and sensor data for noise reduction and normalization.</td>
</tr>
<tr>
<td valign="top" align="left">Step 2: Data Transfer via MQTT<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;2.1: Establish an MQTT broker for communication between IoT devices and the Arduino controller.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;2.2: Publish sensor data and preprocessed images to the MQTT broker.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;2.3: Subscribe the Arduino controller to the MQTT broker to receive data.</td>
</tr>
<tr>
<td valign="top" align="left">Step 3: Data Storage and Processing<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;3.1: Transfer data from the Arduino controller to the computing server (cloud, edge, or fog).<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;3.2: Store raw data in the computing server for further processing.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;3.3: Perform real-time data processing at the edge or fog layer for immediate actions (e.g., irrigation control).<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;3.4: Transfer processed data to the cloud for long-term storage and advanced analytics.</td>
</tr>
<tr>
<td valign="top" align="left">Step 4: Deep Learning-Based Operations<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;4.1: Train a deep learning model (e.g., CNN for image classification) using historical farming data.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;4.2: Deploy the trained model on the computing server for real-time inference.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;4.3: Use the model to analyze farming images and sensor data for:<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;- Crop health monitoring.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;- Pest detection.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;- Soil health analysis.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;4.4: Generate decision actions based on model predictions (e.g., activate irrigation, apply pesticides).</td>
</tr>
<tr>
<td valign="top" align="left">Step 5: Performance Evaluation<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;5.1: Measure accuracy of the deep learning model using a validation dataset.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;5.2: Calculate latency for data transfer and processing:<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;&#x00A0;- Latency = Time taken from data acquisition to decision action.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;5.3: Log accuracy and latency metrics for system optimization.</td>
</tr>
<tr>
<td valign="top" align="left">Step 6: Decision Execution<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;6.1: Send decision actions (e.g., irrigation commands) to the Arduino controller.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;6.2: Execute actions using actuators (e.g., water pumps, pesticide sprayers).<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;6.3: Monitor the impact of actions and update the system for continuous improvement.</td>
</tr>
<tr>
<td valign="top" align="left">Step 7: Visualization and Reporting<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;7.1: Visualize accuracy and latency metrics using dashboards.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;7.2: Generate reports for farmers with actionable insights and recommendations.<break/>&#x00A0;&#x00A0;&#x00A0;&#x00A0;7.3: Provide real-time alerts for critical issues (e.g., pest outbreaks, soil drought).<break/><break/>End Algorithm</td>
</tr>
</tbody>
</table>
</table-wrap>
<p>We have taken below input as parameters while training Sensor data, Weather (historical) Data that effects Agriculture, Land Usage Statistics Per Crop, Acreage for Each Crop, Crop Yield Info, Agriculture Inputs Data, Crop Pest and Disease info, Retail and Wholesale prices for all Agriculture Commodities, all factors and sources that affect Agriculture and Agriculture Commodity prices,</p>
<p>Farming images and sensor data are acquired using IoT-enabled devices, preprocessed for cleanliness, and transferred via the MQTT protocol, with an Arduino controller acting as a gateway to computing servers. Data is stored and processed at the edge, fog, or cloud layer based on urgency, enabling real-time or long-term analytics. Deep learning models, such as Convolutional Neural Networks, analyze the data to generate actionable insights like crop disease detection or irrigation predictions. System performance is evaluated using accuracy and latency metrics, helping optimize efficiency. Actions are executed via actuators controlled by the Arduino, ensuring continuous monitoring and improvement. Finally, real-time insights and alerts are provided to farmers through dashboards and reports for better decision-making.<sup><xref ref-type="bibr" rid="ref10">10</xref></sup></p>
<p>The <xref ref-type="fig" rid="F2">Figure 2</xref> is showing the complete flowchart of proposed algorithm.</p>
<fig id="F2" position="float">
<object-id pub-id-type="doi">10.70389/journal.PJS.100191.g002</object-id>
<label>Fig 2</label>
<caption><title>The flowchart of proposed Algorithm</title></caption>
<p><ext-link ext-link-type="uri" xlink:href="https://i0.wp.com/premierscience.com/wp-content/uploads/2025/15/pjs-25-1214-Figure-2.webp?">Figure 2</ext-link></p>
</fig>
</sec>
</sec>
<sec id="sec004" sec-type="result|discussion">
<title>Results and Discussion</title>
<p>The proposed framework for integrating IoT, edge, fog, and cloud computing in smart agriculture was implemented and evaluated in a real-world agricultural environment. The results demonstrate significant improvements in productivity, resource efficiency, and decision-making capabilities. This section discusses the key findings and their implications for sustainable smart farming.<sup><xref ref-type="bibr" rid="ref11">11</xref></sup></p>
<sec id="sec004-1">
<title>Performance Evaluation</title>
<p>The framework was tested in a wheat field over two growing seasons to evaluate its performance under diverse environmental conditions. Key performance metrics included latency, data accuracy, system responsiveness, and scalability.<sup><xref ref-type="bibr" rid="ref12">12</xref></sup></p>
<list list-type="bullet">
<list-item><p><bold>Latency:</bold> <xref ref-type="table" rid="T2">Table 2</xref> shows edge and fog computing significantly reduced latency compared to a cloud-only approach. Real-time data processing at the edge layer enabled immediate responses to changes in soil moisture and temperature, with an average latency of 50 milliseconds.</p></list-item>
<list-item><p><bold>Data Accuracy:</bold> The framework achieved 95% accuracy in data collection and processing, as validated by ground truth measurements. This high accuracy is attributed to the use of advanced IoT sensors and localized preprocessing at the edge layer.</p></list-item>
<list-item><p><bold>System Responsiveness:</bold> The system demonstrated high responsiveness, with 98% of decisions (e.g., activating irrigation systems) executed within the expected time frame. This is critical for managing dynamic agricultural environments (<xref ref-type="table" rid="T3">Table 3</xref>).</p></list-item>
<list-item><p><bold>Scalability:</bold> The framework was scaled to cover a 50-hectare field without significant performance degradation. This scalability is achieved through the distributed architecture of fog nodes and cloud platforms.</p></list-item>
</list>
<table-wrap id="T2">
<label>Table 2</label>
<caption><title>Latency comparison</title></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left">System</th>
<th valign="top" align="center">Latency (ms)</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Cloud-Only</td>
<td valign="top" align="center">200</td>
</tr>
<tr>
<td valign="top" align="left">Edge + Fog + Cloud</td>
<td valign="top" align="center">50</td>
</tr>
</tbody>
</table>
</table-wrap>
<table-wrap id="T3">
<label>Table 3</label>
<caption><title>System responsiveness</title></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left">System</th>
<th valign="top" align="center">Responsiveness (%)</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Cloud-Only</td>
<td valign="top" align="center">85</td>
</tr>
<tr>
<td valign="top" align="left">Edge + Fog + Cloud</td>
<td valign="top" align="center">98</td>
</tr>
</tbody>
</table>
</table-wrap>
</sec>
<sec id="sec004-2">
<title>Productivity and Resource Efficiency</title>
<p>The framework contributed to a 20% increase in crop yields and a 30% reduction in water usage compared to traditional farming methods (<xref ref-type="table" rid="T4">Tables 4</xref> and <xref ref-type="table" rid="T5">5</xref>). These improvements are attributed to:</p>
<table-wrap id="T4">
<label>Table 4</label>
<caption><title>Crop yield improvement</title></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left">Season</th>
<th valign="top" align="center">Traditional Farming (Tons/Hectare)</th>
<th valign="top" align="center">Proposed Framework (Tons/Hectare)</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Season 1</td>
<td valign="top" align="center">4.5</td>
<td valign="top" align="center">5.4</td>
</tr>
<tr>
<td valign="top" align="left">Season 2</td>
<td valign="top" align="center">4.7</td>
<td valign="top" align="center">5.6</td>
</tr>
</tbody>
</table>
</table-wrap>
<table-wrap id="T5">
<label>Table 5</label>
<caption><title>Water usage reduction</title></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left">System</th>
<th valign="top" align="center">Water Usage (Liters/Hectare)</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Traditional Farming</td>
<td valign="top" align="center">10,000</td>
</tr>
<tr>
<td valign="top" align="left">Proposed Framework</td>
<td valign="top" align="center">7,000</td>
</tr>
</tbody>
</table>
</table-wrap>
<list list-type="bullet">
<list-item><p>Real-time monitoring and optimization of irrigation systems based on soil moisture levels.</p></list-item>
<list-item><p>Predictive analytics for crop disease detection, enabling early intervention and reducing crop losses.</p></list-item>
<list-item><p>Efficient resource allocation through data-driven decision-making.</p></list-item>
</list>
</sec>
<sec id="sec004-3">
<title>Environmental Impact</title>
<p>The framework supports sustainable farming practices by minimizing resource waste and reducing environmental impact. Key outcomes in <xref ref-type="table" rid="T6">Table 6</xref> include:</p>
<table-wrap id="T6">
<label>Table 6</label>
<caption><title>Environmental impact</title></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left">Metric</th>
<th valign="top" align="center">Traditional Farming</th>
<th valign="top" align="center">Proposed Framework</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">Fertilizer Usage (kg)</td>
<td valign="top" align="center">500</td>
<td valign="top" align="center">375</td>
</tr>
<tr>
<td valign="top" align="left">Carbon Footprint (tCO2)</td>
<td valign="top" align="center">10</td>
<td valign="top" align="center">7.5</td>
</tr>
</tbody>
</table>
</table-wrap>
<list list-type="bullet">
<list-item><p>A 25% reduction in fertilizer usage due to precise application based on soil health data.</p></list-item>
<list-item><p>Improved soil health and reduced erosion through optimized irrigation and crop rotation strategies.</p></list-item>
<list-item><p>Lower carbon footprint due to reduced energy consumption in farming operations.</p></list-item>
</list>
</sec>
<sec id="sec004-4">
<title>Economic Feasibility</title>
<p>A cost-benefit analysis was conducted to evaluate the economic feasibility of the framework for small and medium sized farms. The results in <xref ref-type="table" rid="T7">Table 7</xref> indicate:</p>
<table-wrap id="T7">
<label>Table 7</label>
<caption><title>Economic feasibility</title></caption>
<table cellspacing="5" cellpadding="5" frame="hsides" rules="groups">
<thead>
<tr>
<th valign="top" align="left">Year</th>
<th valign="top" align="center">Initial Investment ($)</th>
<th valign="top" align="center">Annual Savings ($)</th>
<th valign="top" align="center">Cumulative Savings ($)</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top" align="left">1</td>
<td valign="top" align="center">10,000</td>
<td valign="top" align="center">4,000</td>
<td valign="top" align="center">&#x2013;6,000</td>
</tr>
<tr>
<td valign="top" align="left">2</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">4,000</td>
<td valign="top" align="center">&#x2013;2,000</td>
</tr>
<tr>
<td valign="top" align="left">3</td>
<td valign="top" align="center">0</td>
<td valign="top" align="center">4,000</td>
<td valign="top" align="center">2,000</td>
</tr>
</tbody>
</table>
</table-wrap>
<list list-type="bullet">
<list-item><p>An initial investment of $10,000 is required for IoT sensors, edge devices, and fog nodes.</p></list-item>
<list-item><p>The payback period is estimated at 2.5years, with annual savings of $4,000 from reduced resource usage and increased crop yields.</p></list-item>
<list-item><p>The framework is economically viable for farms larger than 10 hectares.</p></list-item>
</list>
</sec>
<sec id="sec004-5">
<title>Challenges and Limitations</title>
<p>Despite its success, the framework faces several challenges:</p>
<list list-type="bullet">
<list-item><p><bold>Data Security:</bold> Ensuring the security and privacy of agricultural data remains a critical concern. Future work will focus on implementing robust encryption and authentication mechanisms.</p></list-item>
<list-item><p><bold>Interoperability:</bold> The lack of standardized protocols for integrating IoT, edge, fog, and cloud technologies poses challenges for wide spread adoption. Developing standardized protocols is a key area for future research.</p></list-item>
<list-item><p><bold>Farmer Adoption:</bold> The success of the framework depends on the willingness of farmers to adopt new technologies. Training programs and user friendly interfaces are essential to overcome this barrier.</p></list-item>
</list>
</sec>
<sec id="sec004-6">
<title>Comparison with Existing Solutions</title>
<p>The proposed framework outperforms existing solutions in terms of scalability, real-time responsiveness, and resource efficiency. A comparison with traditional cloud-based systems highlights the advantages of integrating edge and fog computing:</p>
<list list-type="bullet">
<list-item><p><bold>Latency:</bold> The proposed framework reduces latency by 60% compared to cloud-only systems.</p></list-item>
<list-item><p><bold>Cost:</bold> The distributed architecture of the framework reduces bandwidth and storage costs by 40%.</p></list-item>
<list-item><p><bold>Scalability:</bold> The framework supports larger agricultural operations without significant performance degradation, unlike traditional systems.</p></list-item>
<list-item><p>This <xref ref-type="fig" rid="F3">Figure 3</xref> demonstrates the scalability of the proposed framework by measuring system performance as the field size increases.</p></list-item>
</list>
<fig id="F3" position="float">
<object-id pub-id-type="doi">10.70389/journal.PJS.100191.g003</object-id>
<label>Fig 3</label>
<caption><title>Scalability test</title></caption>
<p><ext-link ext-link-type="uri" xlink:href="https://i0.wp.com/premierscience.com/wp-content/uploads/2025/15/pjs-25-1214-Figure-3.webp?">Figure 3</ext-link></p>
</fig>
<p>The scalability test gives more than 95% accuracy for our proposed algorithm for efficiency and sustainability in smart farming.</p>
</sec>
</sec>
<sec id="sec005" sec-type="conclusions">
<title>Conclusion</title>
<p>The integration of IoT, edge, fog, and cloud computing in smart agriculture offers a transformative solution to the challenges faced by the agricultural sector. The proposed framework demonstrates significant improvements in productivity, resource efficiency, and environmental sustainability. By enabling real-time data processing, localized decision-making, and advanced analytics, the framework addresses key limitations of traditional farming methods and cloud-only systems. The effectiveness of the framework, achieving an accuracy of 95% in data collection and processing.</p>
</sec>
</body>
<back>
<fn-group>
<fn id="n1" fn-type="other">
<p>Additional material is published online only. To view please visit the journal online.</p>
<p><bold>Cite this as:</bold> Turare PJ, Malwatkar GM and Gawali TK. Transforming Agriculture with High Performance Computing: Sustainable Smart Farming &#x2013; An Experimental Study. Premier Journal of Science 2025;15:100191</p>
<p><bold>DOI:</bold> <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.70389/PJS.100191">https://doi.org/10.70389/PJS.100191</ext-link></p>
</fn>
<fn id="n2" fn-type="other">
<p><bold>Ethical approval</bold></p>
<p>N/a</p>
</fn>
<fn id="n3" fn-type="other">
<p><bold>Consent</bold></p>
<p>N/a</p>
</fn>
<fn id="n4" fn-type="other">
<p><bold>Funding</bold></p>
<p>N/a</p>
</fn>
<fn id="n5" fn-type="conflict">
<p><bold>Conflicts of interest</bold></p>
<p>N/a</p>
</fn>
<fn id="n6" fn-type="other">
<p><bold>Author contribution</bold></p>
<p>Pravin J. Turare, Gajanan M. Malwatkar and Tukaram K. Gawali &#x2013; Conceptualization, Writing &#x2013; original draft, review and editing</p>
</fn>
<fn id="n7" fn-type="other">
<p><bold>Guarantor</bold></p>
<p>Pravin J. Turare</p>
</fn>
<fn id="n8" fn-type="other">
<p><bold>Provenance and peer-review</bold></p>
<p>Unsolicited and externally peer-reviewed</p>
</fn>
<fn id="n9" fn-type="other">
<p><bold>Data availability statement</bold></p>
<p>N/a</p>
</fn>
</fn-group>
<ref-list>
<title>References</title>
<ref id="ref1"><label>1</label><mixed-citation publication-type="journal"><string-name><surname>Alharbi</surname> <given-names>HA</given-names></string-name>, <string-name><surname>Aldossary</surname> <given-names>M</given-names></string-name>. <article-title>Energy-Efficient Edge-Fog-Cloud Architecture for IoT-Based Smart Agriculture Environment</article-title>. <source>IEEE Access</source>. <year>2021</year>;<volume>9</volume>:<fpage>111818</fpage>&#x2013;<lpage>111831</lpage>. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1109/ACCESS.2021.3101397">https://doi.org/10.1109/ACCESS.2021.3101397</ext-link></mixed-citation></ref>
<ref id="ref2"><label>2</label><mixed-citation publication-type="journal"><string-name><surname>O&#x2019;Grady</surname> <given-names>MJ</given-names></string-name>, <string-name><surname>Langton</surname> <given-names>D</given-names></string-name>, <string-name><surname>O&#x2019;Hare</surname> <given-names>GMP</given-names></string-name>. <article-title>Edge computing: A tractable model for smart agriculture?</article-title> In: <person-group person-group-type="editor"><string-name><surname>Liakos</surname> <given-names>KP</given-names></string-name>, <string-name><surname>Busato</surname> <given-names>P</given-names></string-name>, <string-name><surname>Moshou</surname> <given-names>D</given-names></string-name>, <string-name><surname>Pearson</surname> <given-names>S</given-names></string-name>, <string-name><surname>Bochtis</surname> <given-names>D</given-names></string-name></person-group>, <role>editors</role>. <source>Artificial Intelligence in Agriculture</source>. <publisher-loc>Cham</publisher-loc>: <publisher-name>Springer</publisher-name>; <year>2019</year>. p. <fpage>42</fpage>&#x2013;<lpage>51</lpage>. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1007/978-3-030-23198-1_4">https://doi.org/10.1007/978-3-030-23198-1_4</ext-link></mixed-citation></ref>
<ref id="ref3"><label>3</label><mixed-citation publication-type="journal"><string-name><surname>Mandal</surname> <given-names>S</given-names></string-name>, <string-name><surname>Yadav</surname> <given-names>A</given-names></string-name>, <string-name><surname>Panmea</surname> <given-names>FA</given-names></string-name>, <string-name><surname>Devi</surname> <given-names>KM</given-names></string-name>, <string-name><surname>Kumar SM</surname> <given-names>S.M.</given-names></string-name> <article-title>Adaption of smart applications in agriculture to enhance production</article-title>. <source>Smart Agricultural Technology</source>. <year>2024</year>;<volume>7</volume>:<fpage>100431</fpage>. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1016/j.atech.2024.100431">https://doi.org/10.1016/j.atech.2024.100431</ext-link></mixed-citation></ref>
<ref id="ref4"><label>4</label><mixed-citation publication-type="journal"><string-name><surname>Kalyani</surname> <given-names>Y</given-names></string-name>, <string-name><surname>Collier</surname> <given-names>R</given-names></string-name>. <article-title>A Systematic Survey on the Role of Cloud, Fog, and Edge Computing Combination in Smart Agriculture</article-title>. <source>Sensors</source>. <year>2021</year>;<volume>21</volume>(<issue>17</issue>):<fpage>5922</fpage>. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3390/s21175922">https://doi.org/10.3390/s21175922</ext-link></mixed-citation></ref>
<ref id="ref5"><label>5</label><mixed-citation publication-type="journal"><string-name><surname>Hosseinzadeh</surname> <given-names>M</given-names></string-name>, <string-name><surname>Quan Thanh</surname> <given-names>Tho</given-names></string-name>, <string-name><surname>Ali</surname> <given-names>S</given-names></string-name>, <string-name><surname>Rahmani</surname> <given-names>AM</given-names></string-name>, <string-name><surname>Souri</surname> <given-names>A</given-names></string-name>, <string-name><surname>Norouzi</surname> <given-names>M</given-names></string-name>, <etal>et al.</etal> <article-title>A Hybrid Service Selection and Composition Model for Cloud-Edge Computing in the Internet of Things</article-title>. <source>IEEE Access</source>. <year>2020</year>;<volume>8</volume>:<fpage>85939</fpage>&#x2013;<lpage>85949</lpage>. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1109/ACCESS.2020.2992262">https://doi.org/10.1109/ACCESS.2020.2992262</ext-link></mixed-citation></ref>
<ref id="ref6"><label>6</label><mixed-citation publication-type="journal"><string-name><surname>Dhanaraju</surname> <given-names>M</given-names></string-name>, <string-name><surname>Poongodi</surname> <given-names>C</given-names></string-name>, <string-name><surname>Kumaraperumal</surname> <given-names>R</given-names></string-name>, <string-name><surname>Sellaperurmal</surname> <given-names>P</given-names></string-name>, <string-name><surname>Ragunath</surname> <given-names>K</given-names></string-name>. <article-title>Smart Farming: Internet of Things (IoT)-Based Sustainable Agriculture</article-title>. <source>Agriculture</source>. <year>2022</year>;<volume>12</volume>(<issue>10</issue>):<fpage>1745</fpage>. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3390/agriculture12101745">https://doi.org/10.3390/agriculture12101745</ext-link></mixed-citation></ref>
<ref id="ref7"><label>7</label><mixed-citation publication-type="journal"><string-name><surname>Ara&#x00FA;jo</surname> <given-names>SO</given-names></string-name>, <string-name><surname>Peres</surname> <given-names>RS</given-names></string-name>, <string-name><surname>Barata</surname> <given-names>J</given-names></string-name>, <string-name><surname>Lidon</surname> <given-names>F</given-names></string-name>, <string-name><surname>Ramalho</surname> <given-names>JC</given-names></string-name>. <article-title>Characterising the Agriculture 4.0 Landscape&#x2014;Emerging Trends, Challenges and Opportunities</article-title>. <source>Agronomy</source>. <year>2021</year>;<volume>11</volume>(<issue>4</issue>):<fpage>667</fpage>. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3390/agronomy11040667">https://doi.org/10.3390/agronomy11040667</ext-link></mixed-citation></ref>
<ref id="ref8"><label>8</label><mixed-citation publication-type="journal"><string-name><surname>Gawali</surname> <given-names>TK</given-names></string-name>, <string-name><surname>Deore</surname> <given-names>SS</given-names></string-name>. <article-title>Anisotropy Diffusion Kuwahara filtering and Dual-discriminator D2C Conditional Generative Adversarial Network Classification on Spatio-Temporal Transportation&#x2019;s Traffic images</article-title>. <source>In: 2024 2nd International Conference on Computer, Communication and Control (IC4)</source>; 2024 Jan 19-20; <publisher-loc>Indore, India</publisher-loc>. <year>2024</year>. p. <fpage>1</fpage>&#x2013;<lpage>5</lpage>. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1109/IC457434.2024.10486326">https://doi.org/10.1109/IC457434.2024.10486326</ext-link></mixed-citation></ref>
<ref id="ref9"><label>9</label><mixed-citation publication-type="journal"><string-name><surname>Gawali</surname> <given-names>TK</given-names></string-name>, <string-name><surname>Deore</surname> <given-names>SS</given-names></string-name>. <article-title>Hybrid golden jackal fusion based recommendation system for spatio-temporal transportation&#x2019;s optimal traffic congestion and road condition classification</article-title>. <source>Multimedia Tools and Applications</source>. <year>2024</year>. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1007/s11042-024-20133-x">https://doi.org/10.1007/s11042-024-20133-x</ext-link></mixed-citation></ref>
<ref id="ref10"><label>10</label><mixed-citation publication-type="journal"><string-name><surname>Gawali</surname> <given-names>TK</given-names></string-name>, <string-name><surname>Deore</surname> <given-names>SS</given-names></string-name>. <article-title>Dual-discriminator conditional Giza pyramids construction generative adversarial network based traffic density recognition using road vehicle images</article-title>. <source>Int J Mach Learn Cybern</source>. <year>2024</year>;<volume>15</volume>:<fpage>1007</fpage>&#x2013;<lpage>1024</lpage>. <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1007/s13042-023-01952-0">https://doi.org/10.1007/s13042-023-01952-0</ext-link></mixed-citation></ref>
<ref id="ref11"><label>11</label><mixed-citation publication-type="journal"><collab>Kaggle</collab>. <article-title>All agriculture related datasets for India</article-title>. <source>Kaggle</source>. Available from: <ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/datasets/thammuio/all-agriculture-related-datasets-for-india">https://www.kaggle.com/datasets/thammuio/all-agriculture-related-datasets-for-india</ext-link></mixed-citation></ref>
<ref id="ref12"><label>12</label><mixed-citation publication-type="journal"><collab>Kaggle</collab>. <article-title>IoT agriculture 2024</article-title>. <source>Kaggle</source>. Available from: <ext-link ext-link-type="uri" xlink:href="https://www.kaggle.com/datasets/wisam1985/iot-agriculture-2024">https://www.kaggle.com/datasets/wisam1985/iot-agriculture-2024</ext-link></mixed-citation></ref>
</ref-list>
</back>
</article>
