<?xml version="1.0" encoding="utf-8" standalone="yes" ?>
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
  <channel>
    <title>Tkwer</title>
    <link>http://tkwer.site/</link>
      <atom:link href="http://tkwer.site/index.xml" rel="self" type="application/rss+xml" />
    <description>Tkwer</description>
    <generator>Wowchemy (https://wowchemy.com)</generator><language>en-us</language><lastBuildDate>Mon, 24 Oct 2022 00:00:00 +0000</lastBuildDate>
    
    
    <item>
      <title>MMEngine</title>
      <link>http://tkwer.site/project/mmengine/</link>
      <pubDate>Mon, 27 Oct 2025 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/project/mmengine/</guid>
      <description>&lt;h1 id=&#34;overview&#34;&gt;Overview&lt;/h1&gt;
&lt;p&gt;MMEngine is designed as an efficient, next-generation multimodal data acquisition engine. Built with a blueprint node architecture, it provides an intuitive and user-friendly interaction experience for complex data processing workflows. The system enables seamless integration of multiple data sources with real-time processing and visualization capabilities.&lt;/p&gt;
&lt;div class=&#34;alert alert-note&#34;&gt;
  &lt;div&gt;
    &lt;strong&gt;Current Progress:&lt;/strong&gt; We are collecting multimodal air-writing text data from over 100 volunteers using MMEngine. Our research aims to recognize free OOV (out-of-vocabulary) text in single-radar scenarios through cross-modal contrastive distillation. The ultimate goal is to enable air-writing interaction as seen in science fiction movies.
  &lt;/div&gt;
&lt;/div&gt;


















&lt;figure  id=&#34;figure-mmengine-blueprint-node-interface-with-real-time-processing&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;MMEngine blueprint node interface with real-time processing.&#34;
           src=&#34;http://tkwer.site/project/mmengine/featured.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      MMEngine blueprint node interface with real-time processing.
    &lt;/figcaption&gt;&lt;/figure&gt;

&lt;h1 id=&#34;core-features&#34;&gt;Core Features&lt;/h1&gt;
&lt;h2 id=&#34;blueprint-node-design&#34;&gt;Blueprint Node Design&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Visual Programming&lt;/strong&gt;: Drag-and-drop node-based workflow creation&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Modular Architecture&lt;/strong&gt;: Reusable processing blocks for different data types&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Real-time Connection&lt;/strong&gt;: Live data flow between nodes with instant feedback&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;User-Friendly Interface&lt;/strong&gt;: Intuitive design accessible to both beginners and experts&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;multimodal-data-support&#34;&gt;Multimodal Data Support&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Leap Motion Integration&lt;/strong&gt;: ✅ Complete data acquisition and visualization&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Radar Data Nodes&lt;/strong&gt;: ✅ Complete mmWave radar integration&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;IMU Data Nodes&lt;/strong&gt;: ✅ Complete IMU data acquisition and visualization&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Extensible Framework&lt;/strong&gt;: Support for additional sensor types and data sources&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Synchronized Processing&lt;/strong&gt;: Multi-sensor data fusion with timestamp alignment&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;advanced-processing-capabilities&#34;&gt;Advanced Processing Capabilities&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Deep Learning Nodes&lt;/strong&gt;: Pre-built neural network modules for real-time inference&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Signal Processing&lt;/strong&gt;: JSON-configurable processing nodes for custom algorithms&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Real-time Visualization&lt;/strong&gt;: Live data streaming and interactive displays&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Synchronized Annotation&lt;/strong&gt;: Multi-modal data labeling and ground truth generation&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;technical-architecture&#34;&gt;Technical Architecture&lt;/h1&gt;
&lt;h2 id=&#34;node-system&#34;&gt;Node System&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Data Acquisition Nodes&lt;/strong&gt;: Sensor input interfaces (Leap Motion, Radar, etc.)&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Processing Nodes&lt;/strong&gt;: Signal processing, filtering, and transformation&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;ML Inference Nodes&lt;/strong&gt;: Real-time deep learning model execution&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Visualization Nodes&lt;/strong&gt;: Real-time plotting and data display&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Export Nodes&lt;/strong&gt;: Data saving and format conversion&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;configuration-system&#34;&gt;Configuration System&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;JSON-Based&lt;/strong&gt;: Flexible node parameter configuration&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Hot-Reload&lt;/strong&gt;: Runtime parameter updates without restart&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Template Library&lt;/strong&gt;: Pre-configured node templates for common tasks&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Custom Nodes&lt;/strong&gt;: User-defined processing blocks&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;current-implementation-status&#34;&gt;Current Implementation Status&lt;/h2&gt;
&lt;h3 id=&#34;-completed-features&#34;&gt;✅ Completed Features&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Leap Motion Module&lt;/strong&gt;: Full data acquisition and visualization pipeline&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Blueprint Editor&lt;/strong&gt;: Visual node connection and workflow design&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Real-time Processing&lt;/strong&gt;: Low-latency data streaming architecture&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Basic Visualization&lt;/strong&gt;: Multi-dimensional data plotting capabilities&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;-in-development&#34;&gt;🚧 In Development&lt;/h3&gt;
&lt;p&gt;&lt;strong&gt;Advanced ML Nodes&lt;/strong&gt;: Extended deep learning model support
&lt;strong&gt;Annotation Tools&lt;/strong&gt;: Enhanced labeling and ground truth features
&lt;strong&gt;Performance Optimization&lt;/strong&gt;: GPU acceleration and memory management&lt;/p&gt;
&lt;h3 id=&#34;-planned-features&#34;&gt;🔮 Planned Features&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Multi-Sensor Fusion&lt;/strong&gt;: Advanced sensor data synchronization&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Cloud Integration&lt;/strong&gt;: Remote processing and data storage&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Plugin System&lt;/strong&gt;: Third-party node development framework&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Collaborative Tools&lt;/strong&gt;: Multi-user annotation and workflow sharing&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;key-advantages&#34;&gt;Key Advantages&lt;/h1&gt;
&lt;h2 id=&#34;ease-of-use&#34;&gt;Ease of Use&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;No Coding Required&lt;/strong&gt;: Visual programming eliminates complex scripting&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Rapid Prototyping&lt;/strong&gt;: Quick workflow creation and testing&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Real-time Feedback&lt;/strong&gt;: Instant visualization of processing results&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Flexible Configuration&lt;/strong&gt;: Easy parameter adjustment through GUI&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;performance--scalability&#34;&gt;Performance &amp;amp; Scalability&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Efficient Processing&lt;/strong&gt;: Optimized data pipelines for real-time operation&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Modular Design&lt;/strong&gt;: Scalable architecture for complex workflows&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Resource Management&lt;/strong&gt;: Intelligent memory and CPU utilization&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Cross-Platform&lt;/strong&gt;: Compatible with Windows, Linux, and macOS&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;MMEngine represents the future of multimodal data processing, making advanced sensor fusion and machine learning accessible through an intuitive visual interface.&lt;/p&gt;
</description>
    </item>
    
    <item>
      <title>RadarEngine</title>
      <link>http://tkwer.site/project/radarengine/</link>
      <pubDate>Wed, 27 Aug 2025 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/project/radarengine/</guid>
      <description>&lt;h1 id=&#34;overview&#34;&gt;Overview&lt;/h1&gt;
&lt;p&gt;RadarEngine is a C/C++ radar data acquisition and visualization software built with the ImGui framework. This tool is designed to dramatically simplify radar operation by integrating all essential functions into a single, user-friendly interface. Users can configure all radar parameters, update firmware, collect data, and visualize results in real-time without complex setup procedures.&lt;/p&gt;


















&lt;figure  id=&#34;figure-radarengine-imgui-interface-with-real-time-visualization&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;RadarEngine ImGui interface with real-time visualization.&#34;
           src=&#34;http://tkwer.site/project/radarengine/featured.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      RadarEngine ImGui interface with real-time visualization.
    &lt;/figcaption&gt;&lt;/figure&gt;

&lt;h1 id=&#34;core-features&#34;&gt;Core Features&lt;/h1&gt;
&lt;h2 id=&#34;integrated-radar-management&#34;&gt;Integrated Radar Management&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Firmware Update&lt;/strong&gt;: Built-in firmware flashing and update capabilities&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Parameter Configuration&lt;/strong&gt;: All radar settings configurable through GUI&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Real-time Control&lt;/strong&gt;: Instant parameter adjustment with immediate feedback&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Device Detection&lt;/strong&gt;: Automatic radar module recognition and connection&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;data-acquisition--processing&#34;&gt;Data Acquisition &amp;amp; Processing&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Raw Data Collection&lt;/strong&gt;: Direct radar signal capture and storage&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Signal Processing&lt;/strong&gt;: Built-in FFT, filtering, and preprocessing algorithms&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Real-time Processing&lt;/strong&gt;: Live signal analysis with minimal latency&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Data Export&lt;/strong&gt;: Multiple format support for further analysis&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;visualization--monitoring&#34;&gt;Visualization &amp;amp; Monitoring&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Real-time Display&lt;/strong&gt;: Instant visualization of processed radar data&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Multiple Views&lt;/strong&gt;: Range-Doppler maps, spectrograms, and time-domain plots&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Interactive Interface&lt;/strong&gt;: ImGui-based responsive controls and displays&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Customizable Layout&lt;/strong&gt;: Flexible window arrangement for different use cases&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;technical-details&#34;&gt;Technical Details&lt;/h1&gt;
&lt;h2 id=&#34;development-stack&#34;&gt;Development Stack&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Language&lt;/strong&gt;: C/C++ for high performance&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;GUI Framework&lt;/strong&gt;: ImGui for immediate mode interface&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Graphics&lt;/strong&gt;: OpenGL for real-time rendering&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Platform&lt;/strong&gt;: Cross-platform compatibility (Windows/Linux)&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;current-status&#34;&gt;Current Status&lt;/h2&gt;
&lt;p&gt;This project is currently in &lt;strong&gt;beta/development stage&lt;/strong&gt;. Core functionalities are implemented and working, with ongoing improvements in:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;User interface refinement&lt;/li&gt;
&lt;li&gt;Additional signal processing algorithms&lt;/li&gt;
&lt;li&gt;Enhanced visualization options&lt;/li&gt;
&lt;li&gt;Performance optimization&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;key-advantages&#34;&gt;Key Advantages&lt;/h1&gt;
&lt;h2 id=&#34;simplified-operation&#34;&gt;Simplified Operation&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;One-Stop Solution&lt;/strong&gt;: All radar operations in a single application&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;No Complex Setup&lt;/strong&gt;: Eliminates traditional multi-step radar configuration&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Intuitive Interface&lt;/strong&gt;: Easy-to-use GUI for both beginners and experts&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Real-time Feedback&lt;/strong&gt;: Immediate visualization of parameter changes&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;performance--efficiency&#34;&gt;Performance &amp;amp; Efficiency&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Low Latency&lt;/strong&gt;: C/C++ implementation for optimal performance&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Memory Efficient&lt;/strong&gt;: Optimized for continuous operation&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Real-time Processing&lt;/strong&gt;: Live data analysis without delays&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Responsive UI&lt;/strong&gt;: ImGui ensures smooth user interaction&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;This software aims to bridge the gap between complex radar hardware and user-friendly operation, making radar technology more accessible for research, development, and practical applications.&lt;/p&gt;
</description>
    </item>
    
    <item>
      <title>Domain-Generalized mmWave Gesture Recognition via Multi-View Learning</title>
      <link>http://tkwer.site/publication/preprint1/</link>
      <pubDate>Mon, 07 Apr 2025 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/publication/preprint1/</guid>
      <description>&lt;div class=&#34;alert alert-note&#34;&gt;
  &lt;div&gt;
    Create your slides in Markdown - click the &lt;em&gt;Slides&lt;/em&gt; button to check out the example.
  &lt;/div&gt;
&lt;/div&gt;
&lt;p&gt;Supplementary notes can be added here, including &lt;a href=&#34;https://wowchemy.com/docs/writing-markdown-latex/&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;code, math, and images&lt;/a&gt;.&lt;/p&gt;
</description>
    </item>
    
    <item>
      <title>mmScribe: Streaming End-to-End Aerial Handwriting Text Translation via mmWave Radar</title>
      <link>http://tkwer.site/publication/preprint/</link>
      <pubDate>Mon, 07 Apr 2025 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/publication/preprint/</guid>
      <description>&lt;div class=&#34;alert alert-note&#34;&gt;
  &lt;div&gt;
    Create your slides in Markdown - click the &lt;em&gt;Slides&lt;/em&gt; button to check out the example.
  &lt;/div&gt;
&lt;/div&gt;
&lt;p&gt;Supplementary notes can be added here, including &lt;a href=&#34;https://wowchemy.com/docs/writing-markdown-latex/&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;code, math, and images&lt;/a&gt;.&lt;/p&gt;
</description>
    </item>
    
    <item>
      <title>mmScribe</title>
      <link>http://tkwer.site/project/mmscribe/</link>
      <pubDate>Fri, 27 Sep 2024 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/project/mmscribe/</guid>
      <description>&lt;h1 id=&#34;overview&#34;&gt;Overview&lt;/h1&gt;
&lt;p&gt;&lt;strong&gt;mmScribe&lt;/strong&gt; is an innovative Aerial Handwriting system that enables contactless human-computer interaction through millimeter-wave radar technology. The system accurately captures user handwriting and converts them into text input, providing a novel approach to human-computer interaction.&lt;/p&gt;


















&lt;figure  id=&#34;figure-mmscribe-real-time-aerial-handwriting-system&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;mmScribe Real-time Aerial Handwriting System&#34;
           src=&#34;http://tkwer.site/project/mmscribe/featured.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      mmScribe Real-time Aerial Handwriting System
    &lt;/figcaption&gt;&lt;/figure&gt;

&lt;h1 id=&#34;key-features&#34;&gt;Key Features&lt;/h1&gt;
&lt;ul&gt;
&lt;li&gt;🎯 &lt;strong&gt;Streaming Aerial Handwriting Recognition&lt;/strong&gt; - Real-time gesture-to-text conversion&lt;/li&gt;
&lt;li&gt;📱 &lt;strong&gt;Cross-platform Compatibility&lt;/strong&gt; - Supports Android, Windows, and Raspberry Pi&lt;/li&gt;
&lt;li&gt;⚡ &lt;strong&gt;Real-time Response&lt;/strong&gt; - Low latency for seamless interaction&lt;/li&gt;
&lt;li&gt;🔒 &lt;strong&gt;Privacy-Preserving&lt;/strong&gt; - No camera required, protecting user privacy&lt;/li&gt;
&lt;li&gt;🛠️ &lt;strong&gt;Easy Integration&lt;/strong&gt; - Simple integration with existing systems&lt;/li&gt;
&lt;li&gt;📊 &lt;strong&gt;Comprehensive Tools&lt;/strong&gt; - Complete data analysis and processing toolkit&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;platform-support&#34;&gt;Platform Support&lt;/h1&gt;
&lt;p&gt;mmScribe supports multiple platforms through our runtime system:&lt;/p&gt;
&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;Platform&lt;/th&gt;
&lt;th&gt;Status&lt;/th&gt;
&lt;th&gt;Description&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Android&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;✅ Released&lt;/td&gt;
&lt;td&gt;Full APK available for download&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Windows&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;✅ Source Code&lt;/td&gt;
&lt;td&gt;Complete source code and libraries&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Raspberry Pi&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;✅ Source Code&lt;/td&gt;
&lt;td&gt;Optimized for embedded systems&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;
&lt;h2 id=&#34;hardware-requirements&#34;&gt;Hardware Requirements&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;ESP32-BGT60TR13 Radar Module&lt;/strong&gt;
&lt;ul&gt;
&lt;li&gt;58-63GHz mmWave Radar&lt;/li&gt;
&lt;li&gt;USB/UART Interface&lt;/li&gt;
&lt;li&gt;5V Power Supply&lt;/li&gt;
&lt;/ul&gt;
&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;quick-installation&#34;&gt;Quick Installation&lt;/h2&gt;
&lt;h3 id=&#34;android&#34;&gt;Android&lt;/h3&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-bash&#34; data-lang=&#34;bash&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;c1&#34;&gt;# Download and install APK&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;wget https://github.com/Tkwer/mmScribe/releases/latest/download/mmScribe.apk
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;h3 id=&#34;windowsraspberry-pi&#34;&gt;Windows/Raspberry Pi&lt;/h3&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-bash&#34; data-lang=&#34;bash&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;c1&#34;&gt;# Clone the repository&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;git clone https://github.com/Tkwer/mmScribe.git
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;nb&#34;&gt;cd&lt;/span&gt; mmScribe
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;c1&#34;&gt;# Install dependencies&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;pip install -r requirements.txt
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;h1 id=&#34;dataset&#34;&gt;Dataset&lt;/h1&gt;
&lt;p&gt;We provide a comprehensive dataset for aerial handwriting recognition using millimeter-wave radar:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;🧑‍🤝‍🧑 &lt;strong&gt;12 participants&lt;/strong&gt; (6 males, 6 females)&lt;/li&gt;
&lt;li&gt;📝 &lt;strong&gt;15,488 total samples&lt;/strong&gt; across all participants&lt;/li&gt;
&lt;li&gt;📊 &lt;strong&gt;Rich feature set&lt;/strong&gt; including micro-Doppler and range-time data&lt;/li&gt;
&lt;li&gt;🎯 &lt;strong&gt;Ground truth data&lt;/strong&gt; from Leap Motion controller&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;dataset-structure&#34;&gt;Dataset Structure&lt;/h3&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-fallback&#34; data-lang=&#34;fallback&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;dataset/
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;├── datas1/    # Reserved dataset
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;├── datas2/    # Participant 001 (1212 samples)
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;├── datas3/    # Participant 002 (1202 samples)
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;...
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;└── datas14/   # Participant 013 (1192 samples)
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;h1 id=&#34;quick-start&#34;&gt;Quick Start&lt;/h1&gt;
&lt;h3 id=&#34;prerequisites&#34;&gt;Prerequisites&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;Python 3.8 or higher&lt;/li&gt;
&lt;li&gt;CUDA-compatible GPU (optional, for faster processing)&lt;/li&gt;
&lt;li&gt;Compatible radar hardware (ESP32-BGT60TR13)&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;basic-usage&#34;&gt;Basic Usage&lt;/h3&gt;
&lt;ol&gt;
&lt;li&gt;&lt;strong&gt;Select Dataset&lt;/strong&gt; - Choose from our comprehensive dataset&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Run Training&lt;/strong&gt;:
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-bash&#34; data-lang=&#34;bash&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;python main.py
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Deploy&lt;/strong&gt; - Use the trained model on your target platform&lt;/li&gt;
&lt;/ol&gt;
&lt;h1 id=&#34;technical-details&#34;&gt;Technical Details&lt;/h1&gt;
&lt;p&gt;The system leverages millimeter-wave radar technology to capture fine-grained hand movements in 3D space. By analyzing micro-Doppler signatures and range-time data, mmScribe can accurately recognize air handwriting without requiring any physical contact or camera-based tracking.&lt;/p&gt;
&lt;h3 id=&#34;key-advantages&#34;&gt;Key Advantages&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Privacy-First Design&lt;/strong&gt;: No visual data collection&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Works in Any Lighting&lt;/strong&gt;: Independent of ambient light conditions&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Low Power Consumption&lt;/strong&gt;: Efficient radar-based sensing&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Robust Performance&lt;/strong&gt;: Resistant to environmental interference&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;applications&#34;&gt;Applications&lt;/h1&gt;
&lt;ul&gt;
&lt;li&gt;📝 Contactless text input for smart devices&lt;/li&gt;
&lt;li&gt;🏥 Sterile environment interaction (medical settings)&lt;/li&gt;
&lt;li&gt;🎮 Gaming and entertainment interfaces&lt;/li&gt;
&lt;li&gt;🏭 Industrial control systems&lt;/li&gt;
&lt;li&gt;♿ Accessibility solutions for users with mobility challenges&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;documentation&#34;&gt;Documentation&lt;/h1&gt;
&lt;p&gt;For detailed documentation, including:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Runtime setup guides&lt;/li&gt;
&lt;li&gt;Dataset usage instructions&lt;/li&gt;
&lt;li&gt;API references&lt;/li&gt;
&lt;li&gt;Training tutorials&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;Please visit the &lt;a href=&#34;https://github.com/Tkwer/mmScribe&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;GitHub Repository&lt;/a&gt;.&lt;/p&gt;
&lt;h1 id=&#34;license&#34;&gt;License&lt;/h1&gt;
&lt;p&gt;This project is licensed under the MIT License - see the LICENSE file for details.&lt;/p&gt;
&lt;hr&gt;
&lt;p&gt;⭐ &lt;strong&gt;If you find this project useful, please consider giving it a star on GitHub!&lt;/strong&gt;&lt;/p&gt;
</description>
    </item>
    
    <item>
      <title>Joint position estimation for hand motion using MIMO FMCW mmWave radar</title>
      <link>http://tkwer.site/publication/journal-article1/</link>
      <pubDate>Sun, 01 Sep 2024 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/publication/journal-article1/</guid>
      <description>&lt;div class=&#34;alert alert-note&#34;&gt;
  &lt;div&gt;
    Click the &lt;em&gt;Cite&lt;/em&gt; button above to demo the feature to enable visitors to import publication metadata into their reference management software.
  &lt;/div&gt;
&lt;/div&gt;
&lt;div class=&#34;alert alert-note&#34;&gt;
  &lt;div&gt;
    Create your slides in Markdown - click the &lt;em&gt;Slides&lt;/em&gt; button to check out the example.
  &lt;/div&gt;
&lt;/div&gt;
&lt;p&gt;Supplementary notes can be added here, including &lt;a href=&#34;https://wowchemy.com/docs/writing-markdown-latex/&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;code, math, and images&lt;/a&gt;.&lt;/p&gt;
</description>
    </item>
    
    <item>
      <title>HCI Gesture Control</title>
      <link>http://tkwer.site/project/hci_example/</link>
      <pubDate>Sat, 27 Jul 2024 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/project/hci_example/</guid>
      <description>&lt;h1 id=&#34;overview&#34;&gt;Overview&lt;/h1&gt;
&lt;p&gt;This project presents a revolutionary gesture control system for smart TVs and home entertainment devices using mmWave radar technology. The system enables mid-to-long range gesture recognition without requiring users to hold any physical devices, providing a truly hands-free smart home experience.&lt;/p&gt;


















&lt;figure  id=&#34;figure-device-free-gesture-control-system-for-smart-tv&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;Device-free gesture control system for smart TV.&#34;
           src=&#34;http://tkwer.site/project/hci_example/featured.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      Device-free gesture control system for smart TV.
    &lt;/figcaption&gt;&lt;/figure&gt;

&lt;h1 id=&#34;key-features&#34;&gt;Key Features&lt;/h1&gt;
&lt;h2 id=&#34;device-free-interaction&#34;&gt;Device-Free Interaction&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;No Remote Required&lt;/strong&gt;: Control your TV and smart devices through natural hand gestures&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Mid-to-Long Range Detection&lt;/strong&gt;: Effective gesture recognition from 1-5 meters distance&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Multiple Gesture Support&lt;/strong&gt;: Swipe, point, wave, and custom gesture patterns&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;seamless-integration&#34;&gt;Seamless Integration&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;HID Device Simulation&lt;/strong&gt;: Acts as a standard Human Interface Device (HID)&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Universal Compatibility&lt;/strong&gt;: Works with any smart TV, streaming device, or smart home system&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Plug-and-Play&lt;/strong&gt;: No additional software installation required on target devices&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;smart-home-ready&#34;&gt;Smart Home Ready&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Multi-Device Control&lt;/strong&gt;: Switch between controlling TV, sound system, lights, and other IoT devices&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Context-Aware&lt;/strong&gt;: Automatically adapts gesture mapping based on active device&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Real-Time Response&lt;/strong&gt;: Low-latency gesture recognition for smooth user experience&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;technical-advantages&#34;&gt;Technical Advantages&lt;/h1&gt;
&lt;h2 id=&#34;mmwave-radar-technology&#34;&gt;mmWave Radar Technology&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Privacy-Friendly&lt;/strong&gt;: No camera required, protecting user privacy&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Lighting Independent&lt;/strong&gt;: Works in complete darkness or bright light&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Weather Resistant&lt;/strong&gt;: Unaffected by ambient conditions&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;High Precision&lt;/strong&gt;: Accurate gesture detection and classification&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;hid-integration&#34;&gt;HID Integration&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Standard Protocol&lt;/strong&gt;: Uses USB HID protocol for universal compatibility&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Custom Mapping&lt;/strong&gt;: Configurable gesture-to-command mapping&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Multi-Modal Output&lt;/strong&gt;: Supports keyboard, mouse, and media control commands&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;applications&#34;&gt;Applications&lt;/h1&gt;
&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th style=&#34;text-align:center&#34;&gt;Smart TV Control&lt;/th&gt;
&lt;th style=&#34;text-align:center&#34;&gt;Audio System&lt;/th&gt;
&lt;th style=&#34;text-align:center&#34;&gt;Lighting Control&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td style=&#34;text-align:center&#34;&gt;

















&lt;figure  id=&#34;figure-volume-channel-menu-navigation&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;Volume, channel, menu navigation&#34; srcset=&#34;
               /project/hci_example/img/tv_control_huc58653a2c88c2e23774ea095083fc802_1738281_12f7ceca4b8f3ca8f0c683c5f2c301e2.webp 400w,
               /project/hci_example/img/tv_control_huc58653a2c88c2e23774ea095083fc802_1738281_6e35c399ccb67b0ee32215ad24862c20.webp 760w,
               /project/hci_example/img/tv_control_huc58653a2c88c2e23774ea095083fc802_1738281_1200x1200_fit_q75_h2_lanczos_3.webp 1200w&#34;
               src=&#34;http://tkwer.site/project/hci_example/img/tv_control_huc58653a2c88c2e23774ea095083fc802_1738281_12f7ceca4b8f3ca8f0c683c5f2c301e2.webp&#34;
               width=&#34;760&#34;
               height=&#34;760&#34;
               loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      Volume, channel, menu navigation
    &lt;/figcaption&gt;&lt;/figure&gt;
&lt;/td&gt;
&lt;td style=&#34;text-align:center&#34;&gt;

















&lt;figure  id=&#34;figure-play-pause-volume-control&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;Play, pause, volume control&#34; srcset=&#34;
               /project/hci_example/img/audio_control_hufc2c9e63252b0df53ac1e7f3895c654a_1715998_67caf8a811fc38f7c11814d024c8493b.webp 400w,
               /project/hci_example/img/audio_control_hufc2c9e63252b0df53ac1e7f3895c654a_1715998_6a3a769d74aa34012260424292a758ae.webp 760w,
               /project/hci_example/img/audio_control_hufc2c9e63252b0df53ac1e7f3895c654a_1715998_1200x1200_fit_q75_h2_lanczos_3.webp 1200w&#34;
               src=&#34;http://tkwer.site/project/hci_example/img/audio_control_hufc2c9e63252b0df53ac1e7f3895c654a_1715998_67caf8a811fc38f7c11814d024c8493b.webp&#34;
               width=&#34;760&#34;
               height=&#34;760&#34;
               loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      Play, pause, volume control
    &lt;/figcaption&gt;&lt;/figure&gt;
&lt;/td&gt;
&lt;td style=&#34;text-align:center&#34;&gt;

















&lt;figure  id=&#34;figure-onoff-brightness-color&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;On/off, brightness, color&#34; srcset=&#34;
               /project/hci_example/img/light_control_hu378304d52cc44c2b3d08348ea4bc9a88_1701034_1a93c36b92ee58ca2649a2b9f7407e7e.webp 400w,
               /project/hci_example/img/light_control_hu378304d52cc44c2b3d08348ea4bc9a88_1701034_761c6758f88b1b6e405e74c8159c337f.webp 760w,
               /project/hci_example/img/light_control_hu378304d52cc44c2b3d08348ea4bc9a88_1701034_1200x1200_fit_q75_h2_lanczos_3.webp 1200w&#34;
               src=&#34;http://tkwer.site/project/hci_example/img/light_control_hu378304d52cc44c2b3d08348ea4bc9a88_1701034_1a93c36b92ee58ca2649a2b9f7407e7e.webp&#34;
               width=&#34;760&#34;
               height=&#34;760&#34;
               loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      On/off, brightness, color
    &lt;/figcaption&gt;&lt;/figure&gt;
&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;
&lt;h2 id=&#34;use-cases&#34;&gt;Use Cases&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Living Room Entertainment&lt;/strong&gt;: Control TV, streaming services, and audio systems&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Smart Home Automation&lt;/strong&gt;: Manage lights, curtains, and climate control&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Accessibility&lt;/strong&gt;: Hands-free control for users with mobility limitations&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Hygiene-Critical Environments&lt;/strong&gt;: Touch-free interaction in medical or food service settings&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;system-architecture&#34;&gt;System Architecture&lt;/h1&gt;
&lt;p&gt;The system consists of:&lt;/p&gt;
&lt;ol&gt;
&lt;li&gt;&lt;strong&gt;mmWave Radar Sensor&lt;/strong&gt;: Captures gesture data in real-time&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;AI Processing Unit&lt;/strong&gt;: Classifies gestures using machine learning&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;HID Interface&lt;/strong&gt;: Translates gestures to standard device commands&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Configuration Software&lt;/strong&gt;: Allows custom gesture mapping and device profiles&lt;/li&gt;
&lt;/ol&gt;
&lt;p&gt;This innovative approach bridges the gap between natural human interaction and smart home technology, making device control more intuitive and accessible than ever before.&lt;/p&gt;
</description>
    </item>
    
    <item>
      <title>MMHTSR: In-air handwriting trajectory sensing and reconstruction based on mmWave radar</title>
      <link>http://tkwer.site/publication/journal-article/</link>
      <pubDate>Fri, 01 Sep 2023 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/publication/journal-article/</guid>
      <description>&lt;div class=&#34;alert alert-note&#34;&gt;
  &lt;div&gt;
    Click the &lt;em&gt;Cite&lt;/em&gt; button above to demo the feature to enable visitors to import publication metadata into their reference management software.
  &lt;/div&gt;
&lt;/div&gt;
&lt;div class=&#34;alert alert-note&#34;&gt;
  &lt;div&gt;
    Create your slides in Markdown - click the &lt;em&gt;Slides&lt;/em&gt; button to check out the example.
  &lt;/div&gt;
&lt;/div&gt;
&lt;p&gt;Supplementary notes can be added here, including &lt;a href=&#34;https://wowchemy.com/docs/writing-markdown-latex/&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;code, math, and images&lt;/a&gt;.&lt;/p&gt;
</description>
    </item>
    
    <item>
      <title>MMHTSR</title>
      <link>http://tkwer.site/project/mmhtsr/</link>
      <pubDate>Sun, 27 Aug 2023 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/project/mmhtsr/</guid>
      <description>&lt;!-- &lt;link rel=&#34;stylesheet&#34; type=&#34;text/css&#34; href=&#34;css/zzsc.css&#34;&gt; --&gt;
&lt;!-- &lt;link rel=&#39;stylesheet&#39; id=&#39;jquery-gif-css&#39;  href=&#39;css/jquery.gif.css&#39; type=&#39;text/css&#39; media=&#39;all&#39; /&gt; --&gt;
&lt;h1 id=&#34;overview&#34;&gt;Overview&lt;/h1&gt;
&lt;p&gt;MMHTSR is a framework based on Texas Instruments&amp;rsquo; low-cost millimeter-wave radar for aerial trajectory sensing and reconstruction. We will be presenting our research work on this system. Additionally, we have developed a real-time system capable of online reconstruction and accurate recognition of in-air handwritten trajectory characters and gestures, as shown in the animated image below.&lt;/p&gt;


















&lt;figure  id=&#34;figure-real-time-system&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;Real-time System. &#34; srcset=&#34;
               /project/mmhtsr/img/1_huf4e62de5c55bbd1d4e23cd8aaf8d3cd9_437855_b5a4a7a28ae55b553073bb7487d1ef80.webp 400w,
               /project/mmhtsr/img/1_huf4e62de5c55bbd1d4e23cd8aaf8d3cd9_437855_e26c6082f74e04e596b39314db76f3a1.webp 760w,
               /project/mmhtsr/img/1_huf4e62de5c55bbd1d4e23cd8aaf8d3cd9_437855_1200x1200_fit_q75_h2_lanczos.webp 1200w&#34;
               src=&#34;http://tkwer.site/project/mmhtsr/img/1_huf4e62de5c55bbd1d4e23cd8aaf8d3cd9_437855_b5a4a7a28ae55b553073bb7487d1ef80.webp&#34;
               width=&#34;760&#34;
               height=&#34;413&#34;
               loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      Real-time System.
    &lt;/figcaption&gt;&lt;/figure&gt;

&lt;h1 id=&#34;dataset-vs-visualization&#34;&gt;Dataset vs Visualization&lt;/h1&gt;
&lt;h2 id=&#34;dataset&#34;&gt;Dataset&lt;/h2&gt;
&lt;p&gt;At the same time, we have made our dataset publicly available, comprising over 10,000 data samples totaling approximately 130GB of raw radar signal data. This dataset encompasses 30 different types of in-air handwritten characters and gestures.&lt;/p&gt;
&lt;!-- &lt;img src=&#34;img\2.jpg&#34; style=&#34;zoom:50%&#34; /&gt; --&gt;


















&lt;figure  id=&#34;figure-26-uppercase-english-alphabet-symbols-and-4-types-of-gesture-actions-vs-dataset-folder&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;26 uppercase English alphabet symbols and 4 types of gesture actions vs dataset folder. &#34; srcset=&#34;
               /project/mmhtsr/img/2_huc93afc0df973e9ecb2479aa6055b47fc_363322_374059362076b2556cff79c431414718.webp 400w,
               /project/mmhtsr/img/2_huc93afc0df973e9ecb2479aa6055b47fc_363322_206b0d1b554bf0d88d5dd3cc038f351d.webp 760w,
               /project/mmhtsr/img/2_huc93afc0df973e9ecb2479aa6055b47fc_363322_1200x1200_fit_q75_h2_lanczos.webp 1200w&#34;
               src=&#34;http://tkwer.site/project/mmhtsr/img/2_huc93afc0df973e9ecb2479aa6055b47fc_363322_374059362076b2556cff79c431414718.webp&#34;
               width=&#34;70%&#34;
               height=&#34;70%,&#34;
               loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      26 uppercase English alphabet symbols and 4 types of gesture actions vs dataset folder.
    &lt;/figcaption&gt;&lt;/figure&gt;

&lt;!-- ![&#34;example&#34;](img\2.jpg ){:height=&#34;10%&#34; width=&#34;10%&#34;} --&gt;
&lt;p&gt;&lt;strong&gt;Dataset link:&lt;/strong&gt; &lt;a href=&#34;https://pan.baidu.com/s/1zwzfdnttbouxvKiKfAV6pg?pwd=zjvx&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;https://pan.baidu.com/s/1zwzfdnttbouxvKiKfAV6pg?pwd=zjvx&lt;/a&gt; code：zjvx&lt;/p&gt;
&lt;h2 id=&#34;visualization&#34;&gt;Visualization&lt;/h2&gt;
&lt;p&gt;In traditional network training and signal processing, writing and debugging code can be a complex and time-consuming task. However, through our platform, you can accomplish these tasks using an intuitive graphical interface, without the need for an in-depth understanding of programming details. This makes it easy even for researchers to engage in network training and signal processing, saving a significant amount of time and effort. Moreover, our platform offers rich visualization capabilities to help you comprehend and analyze your data as well as your model&amp;rsquo;s performance. These visualization tools provide you with deeper insights, assisting you in making more informed decisions.&lt;/p&gt;


















&lt;figure  id=&#34;figure-visualization-platform&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;Visualization platform.&#34;
           src=&#34;http://tkwer.site/project/mmhtsr/img/3.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      Visualization platform.
    &lt;/figcaption&gt;&lt;/figure&gt;

&lt;!-- 
# Interference of the Body with Signals  
Even when the human body is not directly in the line of sight (LOS) of the radar, approaching the radar can still result in receiving signals from the human body (possibly from the main beam, sidelobes, multipath, etc.), which could potentially interfere with gesture signals. To validate this impact, we conducted the following experiments. As shown in the figure, when a person stands in front of the radar without applying static clutter removal, the human target is clearly visible in the radar heatmap. When static clutter removal is applied, even slight body movements (of very small amplitude) can lead to noticeable target motion in the radar heatmap. This phenomenon becomes even more pronounced with larger-scale movements.&lt;font color=&#34;#006666&#34;&gt;(Corresponding to **Fig.** 25)&lt;/font&gt;&lt;br /&gt; 

| | | | |
|:-:|:-:|:-:|:-:|
| 

















&lt;figure  id=&#34;figure-static&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;static&#34;
           src=&#34;http://tkwer.site/project/mmhtsr/img/4.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      static
    &lt;/figcaption&gt;&lt;/figure&gt;
 | 

















&lt;figure  id=&#34;figure-jogging&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;jogging&#34;
           src=&#34;http://tkwer.site/project/mmhtsr/img/5.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      jogging
    &lt;/figcaption&gt;&lt;/figure&gt;
 | 

















&lt;figure  id=&#34;figure-shake&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;shake&#34;
           src=&#34;http://tkwer.site/project/mmhtsr/img/6.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      shake
    &lt;/figcaption&gt;&lt;/figure&gt;
 | 

















&lt;figure  id=&#34;figure-writing&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;writing&#34;
           src=&#34;http://tkwer.site/project/mmhtsr/img/7.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      writing
    &lt;/figcaption&gt;&lt;/figure&gt;
 |
| | | | |

We dynamically demonstrate the processing of an aerial handwritten trajectory &#39;B&#39; obtained from radar signals. When performing only basic preprocessing, interference cannot be removed. Our method robustly achieves the reconstruction of the trajectory. &lt;font color=&#34;#006666&#34;&gt;(Corresponding to **Fig.** 27)&lt;/font&gt;&lt;br /&gt; 



















&lt;figure  id=&#34;figure-raw-trajectory&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;raw trajectory. &#34;
           src=&#34;http://tkwer.site/project/mmhtsr/img/9.gif&#34;
           loading=&#34;lazy&#34; data-zoomable width=&#34;80%&#34; height=&#34;80%,&#34; /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      raw trajectory.
    &lt;/figcaption&gt;&lt;/figure&gt;




















&lt;figure  id=&#34;figure-processed-trajectory&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;processed trajectory. &#34;
           src=&#34;http://tkwer.site/project/mmhtsr/img/8.gif&#34;
           loading=&#34;lazy&#34; data-zoomable width=&#34;80%&#34; height=&#34;80%,&#34; /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      processed trajectory.
    &lt;/figcaption&gt;&lt;/figure&gt;



# Independent GPR vs Joint GPR 
&lt;font color=&#34;#006666&#34;&gt;(Corresponding to **Fig.** 29)&lt;/font&gt;&lt;br /&gt;  
Dynamically demonstrated two GPR prediction methods.

| | |
|:-:|:-:|
| 

















&lt;figure  id=&#34;figure-igpr&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;IGPR&#34;
           src=&#34;http://tkwer.site/project/mmhtsr/img/10.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      IGPR
    &lt;/figcaption&gt;&lt;/figure&gt;
 | 

















&lt;figure  id=&#34;figure-jgpr&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;JGPR&#34;
           src=&#34;http://tkwer.site/project/mmhtsr/img/11.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      JGPR
    &lt;/figcaption&gt;&lt;/figure&gt;
 | 
| | | 

# Comparing the processing of [14] with our processing. 
&lt;font color=&#34;#006666&#34;&gt;(Corresponding to **Fig.** 30)&lt;/font&gt;&lt;br /&gt;  


















&lt;figure  id=&#34;figure-comparing-the-processing-a-of-14-with-our-processing-b-the-example-of-in-air-handwriting-the-letter-m&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;Comparing the processing (a) of [14] with our processing (b), the example of in-air handwriting the letter M. &#34;
           src=&#34;http://tkwer.site/project/mmhtsr/img/12_1.gif&#34;
           loading=&#34;lazy&#34; data-zoomable width=&#34;80%&#34; height=&#34;80%,&#34; /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      Comparing the processing (a) of [14] with our processing (b), the example of in-air handwriting the letter M.
    &lt;/figcaption&gt;&lt;/figure&gt;


# Imitating the Hand Tremors in Elderly Gesture Movements

&lt;video controls&gt;
  &lt;source src=&#34;img/2023-08-23 11-21-43_1.mp4&#34; type=&#34;video/mp4&#34;&gt;
  Your browser does not support the video tag.
&lt;/video&gt; --&gt;
&lt;h1 id=&#34;application&#34;&gt;Application&lt;/h1&gt;
&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th style=&#34;text-align:center&#34;&gt;&lt;/th&gt;
&lt;th style=&#34;text-align:center&#34;&gt;&lt;/th&gt;
&lt;th style=&#34;text-align:center&#34;&gt;&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td style=&#34;text-align:center&#34;&gt;

















&lt;figure  id=&#34;figure-games&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;games&#34;
           src=&#34;http://tkwer.site/project/mmhtsr/img/double12345678.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      games
    &lt;/figcaption&gt;&lt;/figure&gt;
&lt;/td&gt;
&lt;td style=&#34;text-align:center&#34;&gt;

















&lt;figure  id=&#34;figure-menu&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;menu&#34;
           src=&#34;http://tkwer.site/project/mmhtsr/img/double123456789.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      menu
    &lt;/figcaption&gt;&lt;/figure&gt;
&lt;/td&gt;
&lt;td style=&#34;text-align:center&#34;&gt;

















&lt;figure  id=&#34;figure-input-method&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;input method&#34;
           src=&#34;http://tkwer.site/project/mmhtsr/img/double1234567890.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      input method
    &lt;/figcaption&gt;&lt;/figure&gt;
&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td style=&#34;text-align:center&#34;&gt;&lt;/td&gt;
&lt;td style=&#34;text-align:center&#34;&gt;&lt;/td&gt;
&lt;td style=&#34;text-align:center&#34;&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;
</description>
    </item>
    
    <item>
      <title>RadarSensing</title>
      <link>http://tkwer.site/project/radarsensing/</link>
      <pubDate>Sun, 27 Aug 2023 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/project/radarsensing/</guid>
      <description>&lt;h1 id=&#34;overview&#34;&gt;Overview&lt;/h1&gt;
&lt;p&gt;RadarSensing is a comprehensive upper computer software platform designed for mmWave radar applications. The system integrates real-time data acquisition, advanced signal processing, time-frequency visualization, vital signs monitoring, and intelligent motion recognition into a unified interface, providing researchers and developers with powerful tools for radar-based sensing applications.&lt;/p&gt;
&lt;h1 id=&#34;core-features&#34;&gt;Core Features&lt;/h1&gt;
&lt;h2 id=&#34;real-time-data-acquisition&#34;&gt;Real-Time Data Acquisition&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Multi-Radar Support&lt;/strong&gt;: Compatible with TI AWR series, IWR series, and other mmWave radar platforms&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;High-Speed Streaming&lt;/strong&gt;: Real-time data capture at up to 1000+ frames per second&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Configurable Parameters&lt;/strong&gt;: Adjustable chirp configuration, sampling rate, and frame structure&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Data Recording&lt;/strong&gt;: Automatic data logging with timestamp and metadata&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;time-frequency-signal-visualization&#34;&gt;Time-Frequency Signal Visualization&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Range-Doppler Maps&lt;/strong&gt;: Real-time heatmap visualization of range and velocity information&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Range-Angle Processing&lt;/strong&gt;: 2D/3D spatial mapping with beamforming algorithms&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Spectrogram Analysis&lt;/strong&gt;: Time-frequency domain representation for signal analysis&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Customizable Display&lt;/strong&gt;: Multiple visualization modes with adjustable color schemes and scaling&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;vital-signs-monitoring&#34;&gt;Vital Signs Monitoring&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Heart Rate Detection&lt;/strong&gt;: Non-contact cardiac rhythm monitoring with high accuracy&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Respiratory Rate&lt;/strong&gt;: Real-time breathing pattern analysis and rate calculation&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Heart Rate Variability (HRV)&lt;/strong&gt;: Advanced cardiac health assessment metrics&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Multi-Target Tracking&lt;/strong&gt;: Simultaneous monitoring of multiple subjects&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Medical-Grade Accuracy&lt;/strong&gt;: Validated against clinical standards for healthcare applications&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;motion-recognition--classification&#34;&gt;Motion Recognition &amp;amp; Classification&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Gesture Recognition&lt;/strong&gt;: Real-time hand gesture classification with machine learning&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Activity Detection&lt;/strong&gt;: Human activity recognition (walking, sitting, falling, etc.)&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Gait Analysis&lt;/strong&gt;: Detailed biomechanical movement assessment&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Custom Training&lt;/strong&gt;: User-defined gesture and motion pattern learning&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Multi-Class Classification&lt;/strong&gt;: Support for 50+ predefined motion categories&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;technical-architecture&#34;&gt;Technical Architecture&lt;/h1&gt;
&lt;h2 id=&#34;signal-processing-pipeline&#34;&gt;Signal Processing Pipeline&lt;/h2&gt;
&lt;ol&gt;
&lt;li&gt;&lt;strong&gt;Raw Data Preprocessing&lt;/strong&gt;: Noise reduction, calibration, and filtering&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;FFT Processing&lt;/strong&gt;: Range and Doppler FFT with windowing functions&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;CFAR Detection&lt;/strong&gt;: Constant False Alarm Rate target detection&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Tracking Algorithms&lt;/strong&gt;: Kalman filtering for multi-target tracking&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Feature Extraction&lt;/strong&gt;: Advanced signal features for classification&lt;/li&gt;
&lt;/ol&gt;
&lt;h2 id=&#34;machine-learning-integration&#34;&gt;Machine Learning Integration&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Deep Learning Models&lt;/strong&gt;: CNN/RNN architectures for pattern recognition&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Real-Time Inference&lt;/strong&gt;: Optimized models for low-latency processing&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Transfer Learning&lt;/strong&gt;: Pre-trained models adaptable to specific applications&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Model Training Tools&lt;/strong&gt;: Built-in dataset management and training utilities&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;hardware-integration&#34;&gt;Hardware Integration&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;USB/Ethernet Interface&lt;/strong&gt;: Multiple connection options for radar modules&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;GPIO Control&lt;/strong&gt;: External trigger and synchronization support&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Multi-Threading&lt;/strong&gt;: Parallel processing for real-time performance&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Memory Management&lt;/strong&gt;: Efficient buffer handling for continuous operation&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;applications&#34;&gt;Applications&lt;/h1&gt;
&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th style=&#34;text-align:center&#34;&gt;Healthcare Monitoring&lt;/th&gt;
&lt;th style=&#34;text-align:center&#34;&gt;Smart Home&lt;/th&gt;
&lt;th style=&#34;text-align:center&#34;&gt;Security &amp;amp; Surveillance&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td style=&#34;text-align:center&#34;&gt;

















&lt;figure  id=&#34;figure-patient-monitoring&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img src=&#34;img/healthcare.gif&#34; alt=&#34;Patient monitoring&#34; loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      Patient monitoring
    &lt;/figcaption&gt;&lt;/figure&gt;
&lt;/td&gt;
&lt;td style=&#34;text-align:center&#34;&gt;

















&lt;figure  id=&#34;figure-occupancy-detection&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img src=&#34;img/smart_home.gif&#34; alt=&#34;Occupancy detection&#34; loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      Occupancy detection
    &lt;/figcaption&gt;&lt;/figure&gt;
&lt;/td&gt;
&lt;td style=&#34;text-align:center&#34;&gt;

















&lt;figure  id=&#34;figure-intrusion-detection&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img src=&#34;img/security.gif&#34; alt=&#34;Intrusion detection&#34; loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      Intrusion detection
    &lt;/figcaption&gt;&lt;/figure&gt;
&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;
&lt;h2 id=&#34;use-cases&#34;&gt;Use Cases&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Medical Monitoring&lt;/strong&gt;: Non-contact patient vital signs in hospitals and clinics&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Elderly Care&lt;/strong&gt;: Fall detection and health monitoring for assisted living&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Smart Buildings&lt;/strong&gt;: Occupancy sensing and energy management&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Automotive&lt;/strong&gt;: In-cabin monitoring and driver state assessment&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Research &amp;amp; Development&lt;/strong&gt;: Academic research and algorithm prototyping&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;key-advantages&#34;&gt;Key Advantages&lt;/h1&gt;
&lt;h2 id=&#34;user-friendly-interface&#34;&gt;User-Friendly Interface&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Intuitive GUI&lt;/strong&gt;: Modern interface with drag-and-drop configuration&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Real-Time Feedback&lt;/strong&gt;: Instant visualization of processing results&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Customizable Layouts&lt;/strong&gt;: Flexible workspace arrangement for different applications&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Export Capabilities&lt;/strong&gt;: Data export in multiple formats (CSV, MAT, HDF5)&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;performance-optimization&#34;&gt;Performance Optimization&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;GPU Acceleration&lt;/strong&gt;: CUDA support for intensive signal processing&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Multi-Core Processing&lt;/strong&gt;: Parallel algorithms utilizing all CPU cores&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Memory Efficiency&lt;/strong&gt;: Optimized memory usage for long-term operation&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Low Latency&lt;/strong&gt;: Sub-millisecond processing delays for real-time applications&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;extensibility&#34;&gt;Extensibility&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Plugin Architecture&lt;/strong&gt;: Modular design for custom algorithm integration&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;API Support&lt;/strong&gt;: RESTful API for external system integration&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Scripting Interface&lt;/strong&gt;: Python/MATLAB scripting for automation&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Open Source Components&lt;/strong&gt;: Extensible with community contributions&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;This comprehensive platform bridges the gap between raw radar data and practical applications, making advanced radar sensing accessible to researchers, developers, and industry professionals.&lt;/p&gt;
</description>
    </item>
    
    <item>
      <title>RadarStream</title>
      <link>http://tkwer.site/project/radarstream/</link>
      <pubDate>Tue, 15 Mar 2022 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/project/radarstream/</guid>
      <description>&lt;h1 id=&#34;overview&#34;&gt;Overview&lt;/h1&gt;
&lt;p&gt;&lt;strong&gt;RadarStream&lt;/strong&gt; is a comprehensive real-time raw data acquisition, processing, and visualization system designed for Texas Instruments&amp;rsquo; MIMO mmWave radar series. The system features a &lt;strong&gt;multi-threaded architecture&lt;/strong&gt; with C-wrapped data acquisition modules to overcome Python&amp;rsquo;s GIL limitations, enabling true real-time, frame-loss-free radar data processing.&lt;/p&gt;


















&lt;figure  id=&#34;figure-radarstream-real-time-system-interface&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;RadarStream Real-time System Interface&#34;
           src=&#34;http://tkwer.site/project/radarstream/featured.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      RadarStream Real-time System Interface
    &lt;/figcaption&gt;&lt;/figure&gt;

&lt;h1 id=&#34;key-features-&#34;&gt;Key Features ✨&lt;/h1&gt;
&lt;h2 id=&#34;real-time-multi-threaded-data-acquisition-&#34;&gt;Real-time Multi-threaded Data Acquisition 🧵&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Multi-threaded Architecture&lt;/strong&gt;: Separate threads for data acquisition and processing&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;C-Wrapped Acquisition Module&lt;/strong&gt; 🚀: Overcomes Python&amp;rsquo;s Global Interpreter Lock (GIL) for true multi-core processing&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Frame-Loss-Free Operation&lt;/strong&gt;: Near real-time data capture and handling&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;TI MIMO mmWave Radar Support&lt;/strong&gt;: Compatible with IWR series radar sensors&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;multi-dimensional-feature-extraction-&#34;&gt;Multi-dimensional Feature Extraction 📊&lt;/h2&gt;
&lt;p&gt;The system extracts comprehensive radar features for advanced signal processing:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;RTI&lt;/strong&gt; - Range-Time Information&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;DTI&lt;/strong&gt; - Doppler-Time Information&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;RDI&lt;/strong&gt; - Range-Doppler Information&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;RAI&lt;/strong&gt; - Range-Azimuth Information&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;REI&lt;/strong&gt; - Range-Elevation Information&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;interactive-visualization-interface-&#34;&gt;Interactive Visualization Interface 🖥️&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;PyQt5-based GUI&lt;/strong&gt;: Modern, intuitive user interface&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Real-time Plotting&lt;/strong&gt;: PyQtGraph for high-performance visualization&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Multi-view Display&lt;/strong&gt;: Simultaneous visualization of multiple feature dimensions&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Configuration Management&lt;/strong&gt;: Easy radar parameter configuration&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Data Recording&lt;/strong&gt;: Capture training data for machine learning models&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;hardware-support&#34;&gt;Hardware Support&lt;/h1&gt;
&lt;h2 id=&#34;tested-radar-platforms&#34;&gt;Tested Radar Platforms&lt;/h2&gt;
&lt;p&gt;The system has been validated with the following TI mmWave radar platforms:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;✅ &lt;strong&gt;IWR6843ISK&lt;/strong&gt; - 60 GHz Industrial Radar Sensor&lt;/li&gt;
&lt;li&gt;✅ &lt;strong&gt;IWR6843ISK-OBS&lt;/strong&gt; - Out-of-Box Solution variant&lt;/li&gt;
&lt;li&gt;✅ &lt;strong&gt;IWR1843ISK&lt;/strong&gt; - 77 GHz Automotive Radar Sensor&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;required-hardware&#34;&gt;Required Hardware&lt;/h2&gt;
&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;Component&lt;/th&gt;
&lt;th&gt;Description&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;TI mmWave Radar&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;IWR6843ISK, IWR6843ISK-OBS, or IWR1843ISK&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;DCA1000 EVM&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;Essential for raw data capture (ADC data streaming)&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Power Supply&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;5V 3A DC power adapter&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Ethernet Cable&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;For DCA1000 data connection&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Micro USB Cable&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;For radar CLI interface&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;PC&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;Windows OS recommended&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;
&lt;h3 id=&#34;connection-steps&#34;&gt;Connection Steps&lt;/h3&gt;
&lt;ol&gt;
&lt;li&gt;&lt;strong&gt;Power Connection&lt;/strong&gt;: Connect 5V 3A DC power to the radar board&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Ethernet Connection&lt;/strong&gt;: Connect DCA1000 EVM to PC via Ethernet cable&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;USB Connection&lt;/strong&gt;: Connect radar board to PC via micro USB for CLI&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Network Configuration&lt;/strong&gt;: Configure IPv4 settings for DCA1000 (similar to mmWaveStudio setup)&lt;/li&gt;
&lt;/ol&gt;
&lt;h3 id=&#34;platform-comparison&#34;&gt;Platform Comparison&lt;/h3&gt;
&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;Platform&lt;/th&gt;
&lt;th&gt;Performance&lt;/th&gt;
&lt;th&gt;Recommendation&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Windows PC&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;✅ Excellent - Full frame rate, no data loss&lt;/td&gt;
&lt;td&gt;⭐ Recommended&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Raspberry Pi 4B&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;⚠️ Limited - Low frame rate, prone to data loss&lt;/td&gt;
&lt;td&gt;Not recommended for real-time&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;
&lt;h1 id=&#34;software-architecture&#34;&gt;Software Architecture&lt;/h1&gt;
&lt;h2 id=&#34;technology-stack&#34;&gt;Technology Stack&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Python 3.6+&lt;/strong&gt; - Main programming language&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;PyQt5&lt;/strong&gt; - GUI framework&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;PyQtGraph&lt;/strong&gt; - High-performance plotting&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;NumPy&lt;/strong&gt; - Numerical computing&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;PyTorch&lt;/strong&gt; - Machine learning integration&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;C Extensions&lt;/strong&gt; - Performance-critical data acquisition&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;project-structure&#34;&gt;Project Structure&lt;/h2&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-fallback&#34; data-lang=&#34;fallback&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;RadarStream/
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;├── config/              # Radar configuration files
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;├── gesture_icons/       # Gesture visualization icons
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;├── libs/                # Radar communication libraries
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;├── STL3D/              # 3D printed mount files
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;├── iwr6843_tlv/        # TLV protocol implementation
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;├── dsp/                # Digital signal processing modules
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;├── main.py             # Application entry point
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;├── real_time_process.py # Real-time processing engine
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;├── radar_config.py     # Configuration utilities
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;└── UI_interface.py     # PyQt5 user interface
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;h2 id=&#34;multi-threaded-architecture&#34;&gt;Multi-threaded Architecture&lt;/h2&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-fallback&#34; data-lang=&#34;fallback&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;Main Thread (GUI)
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;    ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;Data Acquisition Thread (C-wrapped)
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;    ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;Processing Thread (Python)
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;    ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;Visualization Thread (PyQtGraph)
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt;Key Design Decisions&lt;/strong&gt;:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;✅ C-wrapped acquisition overcomes GIL limitations&lt;/li&gt;
&lt;li&gt;✅ Separate threads prevent GUI blocking&lt;/li&gt;
&lt;li&gt;✅ Lock-free queues for inter-thread communication&lt;/li&gt;
&lt;li&gt;✅ Efficient memory management for continuous operation&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;getting-started&#34;&gt;Getting Started&lt;/h1&gt;
&lt;h2 id=&#34;software-requirements&#34;&gt;Software Requirements&lt;/h2&gt;
&lt;p&gt;Install the required Python dependencies:&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-bash&#34; data-lang=&#34;bash&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;pip install pyqt5 pyqtgraph numpy torch matplotlib pyserial
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;p&gt;&lt;strong&gt;Dependency List&lt;/strong&gt;:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Python 3.6+&lt;/li&gt;
&lt;li&gt;PyQt5 - GUI framework&lt;/li&gt;
&lt;li&gt;PyQtGraph - Real-time plotting&lt;/li&gt;
&lt;li&gt;NumPy - Numerical computing&lt;/li&gt;
&lt;li&gt;PyTorch - Machine learning support&lt;/li&gt;
&lt;li&gt;Matplotlib - Additional visualization&lt;/li&gt;
&lt;li&gt;PySerial - Serial communication&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;firmware-requirements&#34;&gt;Firmware Requirements&lt;/h2&gt;
&lt;p&gt;The radar firmware must be selected from the TI mmWave Industrial Toolbox:&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;Firmware Path&lt;/strong&gt;: &lt;code&gt;mmwave_industrial_toolbox_4_10_1/labs/Out_Of_Box_Demo/prebuilt_binaries/&lt;/code&gt;&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;Note&lt;/strong&gt;: Version 4.10.1 is not strictly required - other versions of the mmWave Industrial Toolbox are also compatible.&lt;/p&gt;
&lt;h2 id=&#34;installation--setup&#34;&gt;Installation &amp;amp; Setup&lt;/h2&gt;
&lt;h3 id=&#34;1-clone-the-repository&#34;&gt;1. Clone the Repository&lt;/h3&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-bash&#34; data-lang=&#34;bash&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;git clone https://github.com/Tkwer/RadarStream.git
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;nb&#34;&gt;cd&lt;/span&gt; RadarStream
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;h3 id=&#34;2-hardware-connection&#34;&gt;2. Hardware Connection&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;Connect TI mmWave radar sensor to PC via micro USB&lt;/li&gt;
&lt;li&gt;Connect DCA1000 EVM to PC via Ethernet cable&lt;/li&gt;
&lt;li&gt;Connect 5V 3A DC power supply to radar board&lt;/li&gt;
&lt;li&gt;Configure network IPv4 settings for DCA1000&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;3-network-configuration&#34;&gt;3. Network Configuration&lt;/h3&gt;
&lt;p&gt;Configure your PC&amp;rsquo;s Ethernet adapter with static IP settings (similar to mmWaveStudio setup):&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;IP Address&lt;/strong&gt;: 192.168.33.30&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Subnet Mask&lt;/strong&gt;: 255.255.255.0&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Default Gateway&lt;/strong&gt;: 192.168.33.1&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;usage&#34;&gt;Usage&lt;/h2&gt;
&lt;h3 id=&#34;1-launch-the-application&#34;&gt;1. Launch the Application&lt;/h3&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-bash&#34; data-lang=&#34;bash&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;python main.py
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;h3 id=&#34;2-configure-the-radar&#34;&gt;2. Configure the Radar&lt;/h3&gt;
&lt;ol&gt;
&lt;li&gt;&lt;strong&gt;Select COM Port&lt;/strong&gt;: Choose the appropriate COM port for the radar CLI interface&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Load Configuration&lt;/strong&gt;: Select a radar configuration file from the &lt;code&gt;config/&lt;/code&gt; directory&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Send Config&lt;/strong&gt;: Click &amp;ldquo;Send Config&amp;rdquo; to initialize the radar with selected parameters&lt;/li&gt;
&lt;/ol&gt;
&lt;h3 id=&#34;3-start-data-acquisition&#34;&gt;3. Start Data Acquisition&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;Click &amp;ldquo;Start&amp;rdquo; to begin real-time data acquisition and visualization&lt;/li&gt;
&lt;li&gt;Use the interface controls to:
&lt;ul&gt;
&lt;li&gt;Visualize radar data in real-time across multiple feature dimensions&lt;/li&gt;
&lt;li&gt;Adjust visualization parameters&lt;/li&gt;
&lt;li&gt;Capture training data for machine learning models&lt;/li&gt;
&lt;li&gt;Record data sessions for offline analysis&lt;/li&gt;
&lt;/ul&gt;
&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;4-gesture-recognition-optional&#34;&gt;4. Gesture Recognition (Optional)&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;Load a pre-trained gesture recognition model&lt;/li&gt;
&lt;li&gt;Enable real-time gesture classification&lt;/li&gt;
&lt;li&gt;View recognized gestures with visual feedback&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;applications&#34;&gt;Applications&lt;/h1&gt;
&lt;p&gt;RadarStream enables a wide range of radar-based applications:&lt;/p&gt;
&lt;h2 id=&#34;human-computer-interaction-&#34;&gt;Human-Computer Interaction 🖐️&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Gesture Recognition&lt;/strong&gt;: Contactless gesture control for smart devices&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Air Writing&lt;/strong&gt;: Handwriting recognition in 3D space&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Sign Language Recognition&lt;/strong&gt;: Accessibility applications&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;motion-analysis-&#34;&gt;Motion Analysis 🏃&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Activity Recognition&lt;/strong&gt;: Classify human activities (walking, sitting, falling)&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Gait Analysis&lt;/strong&gt;: Biomechanical movement assessment&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Sports Analytics&lt;/strong&gt;: Performance monitoring and analysis&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;smart-home--iot-&#34;&gt;Smart Home &amp;amp; IoT 🏠&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Occupancy Detection&lt;/strong&gt;: Presence sensing for energy management&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Vital Signs Monitoring&lt;/strong&gt;: Non-contact heart rate and respiration&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Fall Detection&lt;/strong&gt;: Elderly care and safety monitoring&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;research--development-&#34;&gt;Research &amp;amp; Development 🔬&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Algorithm Prototyping&lt;/strong&gt;: Test new signal processing algorithms&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Dataset Collection&lt;/strong&gt;: Gather training data for machine learning&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Radar Education&lt;/strong&gt;: Teaching tool for radar signal processing&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;technical-highlights&#34;&gt;Technical Highlights&lt;/h1&gt;
&lt;h2 id=&#34;performance-optimization&#34;&gt;Performance Optimization&lt;/h2&gt;
&lt;h3 id=&#34;c-wrapped-data-acquisition&#34;&gt;C-Wrapped Data Acquisition&lt;/h3&gt;
&lt;p&gt;The critical data acquisition module is implemented in C and wrapped for Python, providing:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;True Multi-core Processing&lt;/strong&gt;: Bypasses Python&amp;rsquo;s GIL limitation&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Near-Zero Latency&lt;/strong&gt;: Minimal delay between radar and processing&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Frame-Loss-Free&lt;/strong&gt;: Reliable data capture even at high frame rates&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Memory Efficient&lt;/strong&gt;: Optimized buffer management&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;real-time-processing-pipeline&#34;&gt;Real-time Processing Pipeline&lt;/h3&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-fallback&#34; data-lang=&#34;fallback&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;Radar Hardware → DCA1000 → Ethernet → C Acquisition Module
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;                                              ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;                                    Lock-free Queue
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;                                              ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;                            Python Processing Thread
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;                                              ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;                                    Feature Extraction
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;                                              ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;                            PyQtGraph Visualization
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;h2 id=&#34;digital-signal-processing&#34;&gt;Digital Signal Processing&lt;/h2&gt;
&lt;p&gt;The &lt;code&gt;dsp/&lt;/code&gt; module provides comprehensive radar signal processing:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Range FFT&lt;/strong&gt;: Extract range information from ADC data&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Doppler FFT&lt;/strong&gt;: Compute velocity information&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Angle Estimation&lt;/strong&gt;: MUSIC/Bartlett beamforming for angle-of-arrival&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;CFAR Detection&lt;/strong&gt;: Constant False Alarm Rate target detection&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Clutter Removal&lt;/strong&gt;: Static clutter filtering&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Calibration&lt;/strong&gt;: Phase and amplitude calibration&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;&lt;strong&gt;Acknowledgment&lt;/strong&gt;: DSP module references &lt;a href=&#34;https://github.com/PreSenseRadar/OpenRadar&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;OpenRadar&lt;/a&gt; by PreSenseRadar.&lt;/p&gt;
&lt;h1 id=&#34;acknowledgements&#34;&gt;Acknowledgements&lt;/h1&gt;
&lt;p&gt;This project builds upon and references:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;&lt;a href=&#34;https://github.com/AndyYu0010/real-time-radar&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;real-time-radar&lt;/a&gt;&lt;/strong&gt; by AndyYu0010 - Real-time processing architecture&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;&lt;a href=&#34;https://github.com/PreSenseRadar/OpenRadar&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;OpenRadar&lt;/a&gt;&lt;/strong&gt; by PreSenseRadar - DSP module implementation&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;Special thanks to the open-source radar community for their contributions and support.&lt;/p&gt;
&lt;h1 id=&#34;future-development&#34;&gt;Future Development&lt;/h1&gt;
&lt;h2 id=&#34;planned-improvements-&#34;&gt;Planned Improvements 🚀&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;input disabled=&#34;&#34; type=&#34;checkbox&#34;&gt; &lt;strong&gt;Validate More RF Boards&lt;/strong&gt;: Test compatibility with additional TI radar platforms (AWR series, etc.)&lt;/li&gt;
&lt;li&gt;&lt;input disabled=&#34;&#34; type=&#34;checkbox&#34;&gt; &lt;strong&gt;Migrate to PySide6&lt;/strong&gt;: Update from PyQt5 to PySide6 for better licensing and features&lt;/li&gt;
&lt;li&gt;&lt;input disabled=&#34;&#34; type=&#34;checkbox&#34;&gt; &lt;strong&gt;Flexible API&lt;/strong&gt;: Make the &lt;code&gt;libs/&lt;/code&gt; folder API more modular and extensible&lt;/li&gt;
&lt;li&gt;&lt;input disabled=&#34;&#34; type=&#34;checkbox&#34;&gt; &lt;strong&gt;Enhanced ML Integration&lt;/strong&gt;: Add more pre-trained models and training utilities&lt;/li&gt;
&lt;li&gt;&lt;input disabled=&#34;&#34; type=&#34;checkbox&#34;&gt; &lt;strong&gt;Cross-platform Support&lt;/strong&gt;: Improve Linux and macOS compatibility&lt;/li&gt;
&lt;li&gt;&lt;input disabled=&#34;&#34; type=&#34;checkbox&#34;&gt; &lt;strong&gt;Performance Profiling&lt;/strong&gt;: Add built-in performance monitoring tools&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;community-contributions&#34;&gt;Community Contributions&lt;/h2&gt;
&lt;p&gt;If you encounter any issues or have suggestions for improvements, please:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;🐛 &lt;strong&gt;Report Bugs&lt;/strong&gt;: Submit issues on GitHub&lt;/li&gt;
&lt;li&gt;💡 &lt;strong&gt;Feature Requests&lt;/strong&gt;: Propose new features or enhancements&lt;/li&gt;
&lt;li&gt;🔧 &lt;strong&gt;Pull Requests&lt;/strong&gt;: Contribute code improvements&lt;/li&gt;
&lt;li&gt;📖 &lt;strong&gt;Documentation&lt;/strong&gt;: Help improve documentation and tutorials&lt;/li&gt;
&lt;/ul&gt;
&lt;hr&gt;
&lt;p&gt;&lt;strong&gt;RadarStream&lt;/strong&gt; provides a powerful, flexible platform for real-time mmWave radar data acquisition and processing, making advanced radar sensing accessible for research, development, and practical applications.&lt;/p&gt;
</description>
    </item>
    
    <item>
      <title>mmWave Radar Near-Field Imaging System</title>
      <link>http://tkwer.site/project/radarimage/</link>
      <pubDate>Tue, 15 Jun 2021 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/project/radarimage/</guid>
      <description>&lt;h1 id=&#34;overview&#34;&gt;Overview&lt;/h1&gt;
&lt;p&gt;This project presents a &lt;strong&gt;DIY millimeter-wave radar near-field imaging system&lt;/strong&gt; built from scratch using a custom 2D linear rail platform. By integrating GRBL motion control with radar data acquisition and imaging algorithms into a single compact program, the system achieves vendor-independent operation and rapid prototyping capabilities.&lt;/p&gt;
&lt;blockquote&gt;
&lt;p&gt;&lt;strong&gt;Honest Assessment&lt;/strong&gt;: While the imaging quality is admittedly subpar due to open-loop motor control and rapid scanning causing step loss, this project served as a valuable learning experience in radar imaging principles and mechatronic system integration.&lt;/p&gt;
&lt;/blockquote&gt;
&lt;h1 id=&#34;system-architecture&#34;&gt;System Architecture&lt;/h1&gt;
&lt;h2 id=&#34;hardware-platform&#34;&gt;Hardware Platform&lt;/h2&gt;
&lt;h3 id=&#34;2d-linear-rail-system&#34;&gt;2D Linear Rail System&lt;/h3&gt;


















&lt;figure  id=&#34;figure-diy-2d-linear-rail-imaging-platform&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;DIY 2D Linear Rail Imaging Platform&#34; srcset=&#34;
               /project/radarimage/%E5%AE%9E%E9%AA%8C%E5%B9%B3%E5%8F%B0_hu3825acf69f706ab6cc74f2ccf48b2792_281752_e2389994569017be8fd4c0842ae37b09.webp 400w,
               /project/radarimage/%E5%AE%9E%E9%AA%8C%E5%B9%B3%E5%8F%B0_hu3825acf69f706ab6cc74f2ccf48b2792_281752_169387aea5d8e32f3c688cf65f024f17.webp 760w,
               /project/radarimage/%E5%AE%9E%E9%AA%8C%E5%B9%B3%E5%8F%B0_hu3825acf69f706ab6cc74f2ccf48b2792_281752_1200x1200_fit_q75_h2_lanczos.webp 1200w&#34;
               src=&#34;http://tkwer.site/project/radarimage/%E5%AE%9E%E9%AA%8C%E5%B9%B3%E5%8F%B0_hu3825acf69f706ab6cc74f2ccf48b2792_281752_e2389994569017be8fd4c0842ae37b09.webp&#34;
               width=&#34;760&#34;
               height=&#34;570&#34;
               loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      DIY 2D Linear Rail Imaging Platform
    &lt;/figcaption&gt;&lt;/figure&gt;

&lt;p&gt;The mechanical platform consists of:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;X-Y Linear Rails&lt;/strong&gt;: Custom-built 2D scanning mechanism&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Stepper Motors&lt;/strong&gt;: NEMA 17 stepper motors for both axes&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;GRBL Controller&lt;/strong&gt;:  GRBL-based motion control board&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;mmWave Radar&lt;/strong&gt;: Mounted on the moving platform for scanning&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Frame Structure&lt;/strong&gt;: Aluminum extrusion frame for rigidity&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;motion-control-system&#34;&gt;Motion Control System&lt;/h3&gt;
&lt;p&gt;&lt;strong&gt;GRBL-Based Control&lt;/strong&gt;&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Open-Loop Stepper Control&lt;/strong&gt;: Cost-effective but prone to step loss&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;G-Code Commands&lt;/strong&gt;: Standard CNC control protocol&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;USB Serial Interface&lt;/strong&gt;: Direct PC communication&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Configurable Speed&lt;/strong&gt;: Adjustable scanning speed (trade-off: speed vs. accuracy)&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;&lt;strong&gt;Key Limitation&lt;/strong&gt;: The use of &lt;strong&gt;open-loop stepper motors&lt;/strong&gt; without position feedback means:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;⚠️ Rapid scanning can cause &lt;strong&gt;step loss&lt;/strong&gt;&lt;/li&gt;
&lt;li&gt;⚠️ No error correction for missed steps&lt;/li&gt;
&lt;li&gt;⚠️ Position accuracy degrades over time&lt;/li&gt;
&lt;li&gt;⚠️ Vibration and acceleration affect precision&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;software-integration&#34;&gt;Software Integration&lt;/h2&gt;
&lt;h3 id=&#34;all-in-one-program&#34;&gt;All-in-One Program&lt;/h3&gt;
&lt;p&gt;One of the key advantages of this DIY approach is &lt;strong&gt;vendor independence&lt;/strong&gt;. Unlike commercial systems requiring proprietary software, this system integrates everything into a single lightweight program.&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;Benefits&lt;/strong&gt;:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;✅ No dependency on vendor-specific software&lt;/li&gt;
&lt;li&gt;✅ Easy to modify and experiment&lt;/li&gt;
&lt;li&gt;✅ Lightweight and fast&lt;/li&gt;
&lt;li&gt;✅ Complete control over scanning patterns&lt;/li&gt;
&lt;li&gt;✅ Integrated data processing pipeline&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;imaging-algorithm&#34;&gt;Imaging Algorithm&lt;/h3&gt;
&lt;p&gt;&lt;strong&gt;Near-Field SAR Processing&lt;/strong&gt;&lt;/p&gt;
&lt;ol&gt;
&lt;li&gt;&lt;strong&gt;Data Collection&lt;/strong&gt;: Radar samples at each grid point&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Range Profile Extraction&lt;/strong&gt;: FFT processing of raw radar data&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Spatial Sampling&lt;/strong&gt;: 2D grid scanning pattern&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Back-Projection Algorithm&lt;/strong&gt;: Coherent summation for image formation&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Image Reconstruction&lt;/strong&gt;: 2D/3D visualization of targets&lt;/li&gt;
&lt;/ol&gt;
&lt;p&gt;&lt;strong&gt;Implemented Algorithms&lt;/strong&gt;:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Range-Doppler processing&lt;/li&gt;
&lt;li&gt;Back-projection imaging&lt;/li&gt;
&lt;li&gt;Frequency-domain focusing&lt;/li&gt;
&lt;li&gt;Basic clutter suppression&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;experimental-results&#34;&gt;Experimental Results&lt;/h1&gt;
&lt;h2 id=&#34;imaging-performance&#34;&gt;Imaging Performance&lt;/h2&gt;


















&lt;figure  id=&#34;figure-near-field-imaging-results&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;Near-Field Imaging Results&#34; srcset=&#34;
               /project/radarimage/%E6%88%90%E5%83%8F%E7%BB%93%E6%9E%9C_hu5c513d026fb32ddae18b64f335c582cd_313100_5c04f8db991555e9b9921275c8e8616e.webp 400w,
               /project/radarimage/%E6%88%90%E5%83%8F%E7%BB%93%E6%9E%9C_hu5c513d026fb32ddae18b64f335c582cd_313100_8c7646d4f671982e8462494786d7be0d.webp 760w,
               /project/radarimage/%E6%88%90%E5%83%8F%E7%BB%93%E6%9E%9C_hu5c513d026fb32ddae18b64f335c582cd_313100_1200x1200_fit_q75_h2_lanczos.webp 1200w&#34;
               src=&#34;http://tkwer.site/project/radarimage/%E6%88%90%E5%83%8F%E7%BB%93%E6%9E%9C_hu5c513d026fb32ddae18b64f335c582cd_313100_5c04f8db991555e9b9921275c8e8616e.webp&#34;
               width=&#34;760&#34;
               height=&#34;427&#34;
               loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      Near-Field Imaging Results
    &lt;/figcaption&gt;&lt;/figure&gt;

&lt;h3 id=&#34;honest-assessment&#34;&gt;Honest Assessment&lt;/h3&gt;
&lt;p&gt;&lt;strong&gt;Image Quality&lt;/strong&gt;: ⭐⭐☆☆☆ (Poor to Fair)&lt;/p&gt;
&lt;p&gt;The imaging results are &lt;strong&gt;admittedly subpar&lt;/strong&gt; due to several factors:&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;Root Causes of Poor Quality&lt;/strong&gt;:&lt;/p&gt;
&lt;ol&gt;
&lt;li&gt;
&lt;p&gt;&lt;strong&gt;Open-Loop Control&lt;/strong&gt; 🔴&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;No position feedback&lt;/li&gt;
&lt;li&gt;Accumulated positioning errors&lt;/li&gt;
&lt;li&gt;Step loss during rapid movements&lt;/li&gt;
&lt;/ul&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;&lt;strong&gt;Speed vs. Accuracy Trade-off&lt;/strong&gt; 🔴&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Prioritized fast scanning (to reduce total scan time)&lt;/li&gt;
&lt;li&gt;High acceleration causes mechanical vibration&lt;/li&gt;
&lt;li&gt;Insufficient settling time at each position&lt;/li&gt;
&lt;/ul&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;&lt;strong&gt;Mechanical Limitations&lt;/strong&gt; 🟡&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Frame rigidity not optimal&lt;/li&gt;
&lt;li&gt;Backlash in linear rail system&lt;/li&gt;
&lt;li&gt;Motor vibration during movement&lt;/li&gt;
&lt;/ul&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;&lt;strong&gt;Limited Calibration&lt;/strong&gt; 🟡&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Basic calibration only&lt;/li&gt;
&lt;li&gt;No real-time position verification&lt;/li&gt;
&lt;li&gt;Environmental factors not compensated&lt;/li&gt;
&lt;/ul&gt;
&lt;/li&gt;
&lt;/ol&gt;
&lt;h3 id=&#34;what-worked&#34;&gt;What Worked&lt;/h3&gt;
&lt;p&gt;Despite the limitations, the system successfully demonstrated:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;✅ Basic near-field imaging capability&lt;/li&gt;
&lt;li&gt;✅ Automated 2D scanning&lt;/li&gt;
&lt;li&gt;✅ Real-time data acquisition and processing&lt;/li&gt;
&lt;li&gt;✅ Vendor-independent operation&lt;/li&gt;
&lt;li&gt;✅ Rapid prototyping and experimentation&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;lessons-learned&#34;&gt;Lessons Learned&lt;/h1&gt;
&lt;p&gt;This project provided valuable insights into radar imaging systems:&lt;/p&gt;
&lt;h2 id=&#34;technical-insights&#34;&gt;Technical Insights&lt;/h2&gt;
&lt;ol&gt;
&lt;li&gt;
&lt;p&gt;&lt;strong&gt;Position Accuracy is Critical&lt;/strong&gt; 🎯&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Imaging quality is directly tied to positioning precision&lt;/li&gt;
&lt;li&gt;Open-loop control is insufficient for high-quality imaging&lt;/li&gt;
&lt;li&gt;Sub-millimeter accuracy needed for good results&lt;/li&gt;
&lt;/ul&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;&lt;strong&gt;Speed-Accuracy Trade-off&lt;/strong&gt; ⚖️&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Faster scanning reduces total time but degrades quality&lt;/li&gt;
&lt;li&gt;Need to balance throughput with positioning accuracy&lt;/li&gt;
&lt;li&gt;Acceleration profiles matter significantly&lt;/li&gt;
&lt;/ul&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;&lt;strong&gt;System Integration Complexity&lt;/strong&gt; 🔧&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Synchronizing motion and data acquisition is challenging&lt;/li&gt;
&lt;li&gt;Timing jitter affects image quality&lt;/li&gt;
&lt;li&gt;Need robust error handling&lt;/li&gt;
&lt;/ul&gt;
&lt;/li&gt;
&lt;li&gt;
&lt;p&gt;&lt;strong&gt;DIY Advantages&lt;/strong&gt; 💡&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Complete control over system parameters&lt;/li&gt;
&lt;li&gt;Easy to experiment with different algorithms&lt;/li&gt;
&lt;li&gt;Low cost for learning and prototyping&lt;/li&gt;
&lt;li&gt;No vendor lock-in&lt;/li&gt;
&lt;/ul&gt;
&lt;/li&gt;
&lt;/ol&gt;
&lt;h1 id=&#34;future-improvements&#34;&gt;Future Improvements&lt;/h1&gt;
&lt;h2 id=&#34;next-generation-platform-planned-&#34;&gt;Next-Generation Platform (Planned) 🚀&lt;/h2&gt;
&lt;p&gt;I plan to build an improved desktop-scale imaging platform with:&lt;/p&gt;
&lt;h3 id=&#34;mechanical-upgrades&#34;&gt;Mechanical Upgrades&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;CoreXY Structure&lt;/strong&gt; 🔄
&lt;ul&gt;
&lt;li&gt;Better speed and acceleration&lt;/li&gt;
&lt;li&gt;Reduced moving mass&lt;/li&gt;
&lt;li&gt;Improved positioning accuracy&lt;/li&gt;
&lt;li&gt;Smoother motion profiles&lt;/li&gt;
&lt;/ul&gt;
&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;motion-control-upgrades&#34;&gt;Motion Control Upgrades&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Closed-Loop Stepper Motors&lt;/strong&gt; 🔁
&lt;ul&gt;
&lt;li&gt;Real-time position feedback&lt;/li&gt;
&lt;li&gt;Automatic error correction&lt;/li&gt;
&lt;li&gt;Stall detection and recovery&lt;/li&gt;
&lt;li&gt;Precise position verification&lt;/li&gt;
&lt;/ul&gt;
&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;radar-upgrades&#34;&gt;Radar Upgrades&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Custom DIY Mini Radar Board&lt;/strong&gt; 📡
&lt;ul&gt;
&lt;li&gt;Compact form factor&lt;/li&gt;
&lt;li&gt;Optimized for near-field imaging&lt;/li&gt;
&lt;li&gt;Better integration with platform&lt;/li&gt;
&lt;li&gt;Lower cost and higher flexibility&lt;/li&gt;
&lt;/ul&gt;
&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;software-enhancements&#34;&gt;Software Enhancements&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;Advanced imaging algorithms (RMA, Omega-K)&lt;/li&gt;
&lt;li&gt;Real-time autofocus&lt;/li&gt;
&lt;li&gt;Motion compensation&lt;/li&gt;
&lt;li&gt;Enhanced visualization&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;expected-improvements&#34;&gt;Expected Improvements&lt;/h2&gt;
&lt;p&gt;With these upgrades, the next version should achieve:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;📈 &lt;strong&gt;10x better positioning accuracy&lt;/strong&gt; (closed-loop control)&lt;/li&gt;
&lt;li&gt;📈 &lt;strong&gt;5x faster scanning&lt;/strong&gt; (CoreXY kinematics)&lt;/li&gt;
&lt;li&gt;📈 &lt;strong&gt;Significantly improved image quality&lt;/strong&gt; (better hardware + algorithms)&lt;/li&gt;
&lt;li&gt;📈 &lt;strong&gt;More reliable operation&lt;/strong&gt; (error detection and correction)&lt;/li&gt;
&lt;/ul&gt;
&lt;hr&gt;
&lt;p&gt;&lt;strong&gt;Stay Tuned!&lt;/strong&gt; 🎬&lt;/p&gt;
&lt;p&gt;The next-generation platform is in the planning stages. Follow this space for updates on the improved desktop mmWave imaging system!&lt;/p&gt;
&lt;h1 id=&#34;applications&#34;&gt;Applications&lt;/h1&gt;
&lt;p&gt;Despite current limitations, near-field radar imaging has promising applications:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;🔍 &lt;strong&gt;Non-Destructive Testing&lt;/strong&gt;: Detecting defects in materials&lt;/li&gt;
&lt;li&gt;📦 &lt;strong&gt;Security Screening&lt;/strong&gt;: Concealed object detection&lt;/li&gt;
&lt;li&gt;🏥 &lt;strong&gt;Medical Imaging&lt;/strong&gt;: Complementary to other modalities&lt;/li&gt;
&lt;li&gt;🔬 &lt;strong&gt;Research &amp;amp; Education&lt;/strong&gt;: Learning radar imaging principles&lt;/li&gt;
&lt;li&gt;🛠️ &lt;strong&gt;Prototyping&lt;/strong&gt;: Testing imaging algorithms&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;technical-specifications&#34;&gt;Technical Specifications&lt;/h1&gt;
&lt;h2 id=&#34;current-system&#34;&gt;Current System&lt;/h2&gt;
&lt;table&gt;
&lt;thead&gt;
&lt;tr&gt;
&lt;th&gt;Component&lt;/th&gt;
&lt;th&gt;Specification&lt;/th&gt;
&lt;/tr&gt;
&lt;/thead&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Scanning Area&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;300mm × 300mm&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Position Resolution&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;~1mm (theoretical, degraded in practice)&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Scanning Speed&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;50-100 mm/s&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Radar Frequency&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;77 GHz (mmWave)&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Range Resolution&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;~5 cm&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Total Scan Time&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;5-10 minutes (full area)&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Control Interface&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;USB Serial (GRBL)&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;strong&gt;Software&lt;/strong&gt;&lt;/td&gt;
&lt;td&gt;Custom Python/C++ program&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;
&lt;hr&gt;
&lt;p&gt;&lt;strong&gt;Conclusion&lt;/strong&gt;: This project represents an honest exploration of DIY radar imaging. While the results fell short of commercial systems, the learning experience and system integration knowledge gained were invaluable. The next iteration will address the identified shortcomings with better hardware and control strategies.&lt;/p&gt;
</description>
    </item>
    
    <item>
      <title>Radar-Camera Fusion</title>
      <link>http://tkwer.site/project/radarcamerafusion/</link>
      <pubDate>Thu, 27 Aug 2020 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/project/radarcamerafusion/</guid>
      <description>&lt;h1 id=&#34;overview&#34;&gt;Overview&lt;/h1&gt;
&lt;p&gt;&lt;strong&gt;Radar-Camera Fusion&lt;/strong&gt; is an early-stage research project from my master&amp;rsquo;s studies, exploring multi-sensor fusion techniques for enhanced target detection and tracking. This project implements a &lt;strong&gt;late fusion strategy&lt;/strong&gt; that combines the complementary strengths of mmWave radar and camera sensors to achieve robust object localization and tracking in various scenarios.&lt;/p&gt;
&lt;blockquote&gt;
&lt;p&gt;&lt;strong&gt;Note&lt;/strong&gt;: This was one of my initial research explorations during my master&amp;rsquo;s program. While I may not recall every implementation detail, the core methodology and system architecture remain documented here.&lt;/p&gt;
&lt;/blockquote&gt;
&lt;h1 id=&#34;system-architecture&#34;&gt;System Architecture&lt;/h1&gt;
&lt;p&gt;The system implements a &lt;strong&gt;late fusion strategy&lt;/strong&gt; where radar and camera data are processed independently before being fused at the decision level. This approach leverages the strengths of both sensors:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Camera&lt;/strong&gt;: Provides rich visual information for object classification&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;mmWave Radar&lt;/strong&gt;: Offers accurate range, velocity, and robust performance in adverse conditions&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;fusion-pipeline&#34;&gt;Fusion Pipeline&lt;/h2&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-fallback&#34; data-lang=&#34;fallback&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;Camera Stream               Range/Azimuth Extraction
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;     ↓                                ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;  ROI Crop                  Point Cloud Processing
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;     ↓                                ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;YOLO Detection               Radar Point Cloud
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;     ↓                                ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;DeepSORT Tracking                      ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;     ↓                                ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;Monocular Ranging                      ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;     ↓                                ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;     └────→ Data Fusion ←────┘
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;              ↓
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;      Fused Target Info
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;h1 id=&#34;core-components&#34;&gt;Core Components&lt;/h1&gt;
&lt;h2 id=&#34;1-camera-processing-pipeline&#34;&gt;1. Camera Processing Pipeline&lt;/h2&gt;
&lt;h3 id=&#34;roi-extraction&#34;&gt;ROI Extraction&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Image Preprocessing&lt;/strong&gt;: Frame capture and region of interest cropping&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Adaptive ROI&lt;/strong&gt;: Dynamic region selection based on scene context&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Resolution Optimization&lt;/strong&gt;: Balanced processing speed and detection accuracy&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;yolo-object-detection&#34;&gt;YOLO Object Detection&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Real-time Detection&lt;/strong&gt;: YOLOv5 for fast and accurate object detection&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Multi-Class Recognition&lt;/strong&gt;: Supports pedestrians, vehicles, and other objects&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Bounding Box Output&lt;/strong&gt;: Precise object localization in image coordinates&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;deepsort-tracking&#34;&gt;DeepSORT Tracking&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Multi-Object Tracking&lt;/strong&gt;: Maintains consistent IDs across frames&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Appearance Features&lt;/strong&gt;: Deep learning-based appearance descriptor&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Kalman Filtering&lt;/strong&gt;: Smooth trajectory prediction and association&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;ID Management&lt;/strong&gt;: Handles occlusions and re-identification&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;monocular-distance-estimation&#34;&gt;Monocular Distance Estimation&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Pinhole Camera Model&lt;/strong&gt;: Geometric-based distance calculation&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Calibration&lt;/strong&gt;: Camera intrinsic parameter calibration&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Height-Based Ranging&lt;/strong&gt;: Estimates distance using object height and camera parameters&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Azimuth Calculation&lt;/strong&gt;: Computes horizontal angle from image coordinates&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;2-radar-processing-pipeline&#34;&gt;2. Radar Processing Pipeline&lt;/h2&gt;
&lt;h3 id=&#34;point-cloud-generation&#34;&gt;Point Cloud Generation&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;mmWave Radar&lt;/strong&gt;: Processes raw radar data to extract point clouds&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Range-Azimuth-Elevation&lt;/strong&gt;: 3D spatial information for each detection&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Velocity Information&lt;/strong&gt;: Doppler-based velocity measurement&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;SNR Filtering&lt;/strong&gt;: Signal quality-based point filtering&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;target-extraction&#34;&gt;Target Extraction&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Clustering&lt;/strong&gt;: Groups radar points into distinct targets&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Centroid Calculation&lt;/strong&gt;: Computes target center position&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Velocity Estimation&lt;/strong&gt;: Extracts radial velocity for each target&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;3-late-fusion-strategy&#34;&gt;3. Late Fusion Strategy&lt;/h2&gt;
&lt;h3 id=&#34;spatial-alignment&#34;&gt;Spatial Alignment&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Coordinate Transformation&lt;/strong&gt;: Converts camera and radar coordinates to common reference frame&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Calibration Matrix&lt;/strong&gt;: Extrinsic calibration between camera and radar&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Temporal Synchronization&lt;/strong&gt;: Aligns timestamps between sensors&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;data-association&#34;&gt;Data Association&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Position Matching&lt;/strong&gt;: Associates camera detections with radar points based on spatial proximity&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Gating Threshold&lt;/strong&gt;: Defines acceptable matching distance&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Confidence Weighting&lt;/strong&gt;: Combines detection confidences from both sensors&lt;/li&gt;
&lt;/ul&gt;
&lt;h3 id=&#34;fused-output&#34;&gt;Fused Output&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Enhanced Localization&lt;/strong&gt;: Improved position accuracy by combining both sensors&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Velocity Information&lt;/strong&gt;: Radar-provided velocity enriches camera detections&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Robust Detection&lt;/strong&gt;: Maintains tracking even when one sensor fails&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;technical-highlights&#34;&gt;Technical Highlights&lt;/h1&gt;
&lt;h2 id=&#34;advantages-of-late-fusion&#34;&gt;Advantages of Late Fusion&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;✅ &lt;strong&gt;Modularity&lt;/strong&gt;: Independent processing allows easy sensor replacement or upgrade&lt;/li&gt;
&lt;li&gt;✅ &lt;strong&gt;Robustness&lt;/strong&gt;: System continues functioning if one sensor fails&lt;/li&gt;
&lt;li&gt;✅ &lt;strong&gt;Flexibility&lt;/strong&gt;: Can adjust fusion weights based on environmental conditions&lt;/li&gt;
&lt;li&gt;✅ &lt;strong&gt;Interpretability&lt;/strong&gt;: Clear understanding of each sensor&amp;rsquo;s contribution&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;challenges-addressed&#34;&gt;Challenges Addressed&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Calibration Complexity&lt;/strong&gt;: Precise spatial and temporal alignment between sensors&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Coordinate Transformation&lt;/strong&gt;: Accurate mapping between different sensor coordinate systems&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Association Ambiguity&lt;/strong&gt;: Resolving which radar points correspond to which camera detections&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Computational Efficiency&lt;/strong&gt;: Real-time processing of dual sensor streams&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;applications&#34;&gt;Applications&lt;/h1&gt;
&lt;p&gt;The radar-camera fusion system is particularly well-suited for scenarios requiring both visual recognition and precise ranging:&lt;/p&gt;
&lt;h2 id=&#34;autonomous-driving&#34;&gt;Autonomous Driving&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Pedestrian Detection&lt;/strong&gt;: Enhanced detection accuracy in various lighting conditions&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Vehicle Tracking&lt;/strong&gt;: Robust tracking with velocity information&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Obstacle Avoidance&lt;/strong&gt;: Reliable distance measurement for path planning&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;intelligent-surveillance&#34;&gt;Intelligent Surveillance&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Perimeter Security&lt;/strong&gt;: Combined visual identification and range verification&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Intrusion Detection&lt;/strong&gt;: Multi-modal confirmation reduces false alarms&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Crowd Monitoring&lt;/strong&gt;: Track multiple targets with unique identities&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;smart-transportation&#34;&gt;Smart Transportation&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Traffic Flow Analysis&lt;/strong&gt;: Vehicle counting and speed measurement&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Parking Management&lt;/strong&gt;: Occupancy detection with vehicle classification&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Intersection Monitoring&lt;/strong&gt;: Multi-target tracking at complex scenarios&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;robotics&#34;&gt;Robotics&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Navigation&lt;/strong&gt;: Obstacle detection and avoidance&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Human-Robot Interaction&lt;/strong&gt;: Safe distance maintenance and gesture recognition&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Object Manipulation&lt;/strong&gt;: Precise localization for grasping tasks&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;system-demonstration&#34;&gt;System Demonstration&lt;/h1&gt;


















&lt;figure  id=&#34;figure-radar-camera-fusion-system-in-action&#34;&gt;
  &lt;div class=&#34;d-flex justify-content-center&#34;&gt;
    &lt;div class=&#34;w-100&#34; &gt;&lt;img alt=&#34;Radar-Camera Fusion System in Action&#34;
           src=&#34;http://tkwer.site/project/radarcamerafusion/featured.gif&#34;
           loading=&#34;lazy&#34; data-zoomable /&gt;&lt;/div&gt;
  &lt;/div&gt;&lt;figcaption&gt;
      Radar-Camera Fusion System in Action
    &lt;/figcaption&gt;&lt;/figure&gt;

&lt;p&gt;The demonstration shows the system&amp;rsquo;s ability to:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Detect and classify objects using camera-based YOLO&lt;/li&gt;
&lt;li&gt;Track multiple targets with DeepSORT&lt;/li&gt;
&lt;li&gt;Fuse camera-derived positions with radar point clouds&lt;/li&gt;
&lt;li&gt;Provide accurate range and velocity information&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;technical-specifications&#34;&gt;Technical Specifications&lt;/h1&gt;
&lt;h2 id=&#34;camera-module&#34;&gt;Camera Module&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Resolution&lt;/strong&gt;: 1920×1080 (Full HD)&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Frame Rate&lt;/strong&gt;: 30 FPS&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Field of View&lt;/strong&gt;: 60° horizontal&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Detection Network&lt;/strong&gt;: YOLOv5&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Tracking Algorithm&lt;/strong&gt;: DeepSORT&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;radar-module&#34;&gt;Radar Module&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Frequency&lt;/strong&gt;: 77 GHz mmWave&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Range Resolution&lt;/strong&gt;: 0.1-0.2 m&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Velocity Range&lt;/strong&gt;: ±50 m/s&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Angular Resolution&lt;/strong&gt;: 15° (azimuth)&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Update Rate&lt;/strong&gt;: 10-20 Hz&lt;/li&gt;
&lt;/ul&gt;
&lt;h2 id=&#34;fusion-performance&#34;&gt;Fusion Performance&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Latency&lt;/strong&gt;: &amp;lt; 100 ms (end-to-end)&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Position Accuracy&lt;/strong&gt;: ±0.3 m (fused)&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Tracking Range&lt;/strong&gt;: 0.5-50 m&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Max Targets&lt;/strong&gt;: 10+ simultaneous objects&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;lessons-learned&#34;&gt;Lessons Learned&lt;/h1&gt;
&lt;p&gt;This early research project provided valuable insights into multi-sensor fusion:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;✅ &lt;strong&gt;Complementary Strengths&lt;/strong&gt;: Camera excels at classification, radar at ranging&lt;/li&gt;
&lt;li&gt;✅ &lt;strong&gt;Calibration is Critical&lt;/strong&gt;: Accurate sensor alignment is essential for fusion&lt;/li&gt;
&lt;li&gt;✅ &lt;strong&gt;Late Fusion Trade-offs&lt;/strong&gt;: Simpler implementation but may miss low-level correlations&lt;/li&gt;
&lt;li&gt;✅ &lt;strong&gt;Real-time Challenges&lt;/strong&gt;: Synchronization and computational efficiency are key&lt;/li&gt;
&lt;/ul&gt;
&lt;h1 id=&#34;future-directions&#34;&gt;Future Directions&lt;/h1&gt;
&lt;p&gt;While this project explored late fusion, modern approaches might consider:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;strong&gt;Early Fusion&lt;/strong&gt;: Fusing raw sensor data for richer feature extraction&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Deep Learning Fusion&lt;/strong&gt;: End-to-end neural networks for joint processing&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Adaptive Fusion&lt;/strong&gt;: Dynamic weighting based on environmental conditions&lt;/li&gt;
&lt;li&gt;&lt;strong&gt;Additional Sensors&lt;/strong&gt;: Incorporating LiDAR or thermal cameras&lt;/li&gt;
&lt;/ul&gt;
</description>
    </item>
    
    <item>
      <title>Slides</title>
      <link>http://tkwer.site/slides/example/</link>
      <pubDate>Tue, 05 Feb 2019 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/slides/example/</guid>
      <description>&lt;h1 id=&#34;create-slides-in-markdown-with-wowchemy&#34;&gt;Create slides in Markdown with Wowchemy&lt;/h1&gt;
&lt;p&gt;&lt;a href=&#34;https://wowchemy.com/&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;Wowchemy&lt;/a&gt; | &lt;a href=&#34;https://wowchemy.com/docs/content/slides/&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;Documentation&lt;/a&gt;&lt;/p&gt;
&lt;hr&gt;
&lt;h2 id=&#34;features&#34;&gt;Features&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;Efficiently write slides in Markdown&lt;/li&gt;
&lt;li&gt;3-in-1: Create, Present, and Publish your slides&lt;/li&gt;
&lt;li&gt;Supports speaker notes&lt;/li&gt;
&lt;li&gt;Mobile friendly slides&lt;/li&gt;
&lt;/ul&gt;
&lt;hr&gt;
&lt;h2 id=&#34;controls&#34;&gt;Controls&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;Next: &lt;code&gt;Right Arrow&lt;/code&gt; or &lt;code&gt;Space&lt;/code&gt;&lt;/li&gt;
&lt;li&gt;Previous: &lt;code&gt;Left Arrow&lt;/code&gt;&lt;/li&gt;
&lt;li&gt;Start: &lt;code&gt;Home&lt;/code&gt;&lt;/li&gt;
&lt;li&gt;Finish: &lt;code&gt;End&lt;/code&gt;&lt;/li&gt;
&lt;li&gt;Overview: &lt;code&gt;Esc&lt;/code&gt;&lt;/li&gt;
&lt;li&gt;Speaker notes: &lt;code&gt;S&lt;/code&gt;&lt;/li&gt;
&lt;li&gt;Fullscreen: &lt;code&gt;F&lt;/code&gt;&lt;/li&gt;
&lt;li&gt;Zoom: &lt;code&gt;Alt + Click&lt;/code&gt;&lt;/li&gt;
&lt;li&gt;&lt;a href=&#34;https://revealjs.com/pdf-export/&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;PDF Export&lt;/a&gt;&lt;/li&gt;
&lt;/ul&gt;
&lt;hr&gt;
&lt;h2 id=&#34;code-highlighting&#34;&gt;Code Highlighting&lt;/h2&gt;
&lt;p&gt;Inline code: &lt;code&gt;variable&lt;/code&gt;&lt;/p&gt;
&lt;p&gt;Code block:&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-python&#34; data-lang=&#34;python&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;n&#34;&gt;porridge&lt;/span&gt; &lt;span class=&#34;o&#34;&gt;=&lt;/span&gt; &lt;span class=&#34;s2&#34;&gt;&amp;#34;blueberry&amp;#34;&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;k&#34;&gt;if&lt;/span&gt; &lt;span class=&#34;n&#34;&gt;porridge&lt;/span&gt; &lt;span class=&#34;o&#34;&gt;==&lt;/span&gt; &lt;span class=&#34;s2&#34;&gt;&amp;#34;blueberry&amp;#34;&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;:&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;    &lt;span class=&#34;nb&#34;&gt;print&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;(&lt;/span&gt;&lt;span class=&#34;s2&#34;&gt;&amp;#34;Eating...&amp;#34;&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;)&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;hr&gt;
&lt;h2 id=&#34;math&#34;&gt;Math&lt;/h2&gt;
&lt;p&gt;In-line math: $x + y = z$&lt;/p&gt;
&lt;p&gt;Block math:&lt;/p&gt;
&lt;p&gt;$$
f\left( x \right) = ;\frac{{2\left( {x + 4} \right)\left( {x - 4} \right)}}{{\left( {x + 4} \right)\left( {x + 1} \right)}}
$$&lt;/p&gt;
&lt;hr&gt;
&lt;h2 id=&#34;fragments&#34;&gt;Fragments&lt;/h2&gt;
&lt;p&gt;Make content appear incrementally&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-fallback&#34; data-lang=&#34;fallback&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;{{% fragment %}} One {{% /fragment %}}
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;{{% fragment %}} **Two** {{% /fragment %}}
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;{{% fragment %}} Three {{% /fragment %}}
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;p&gt;Press &lt;code&gt;Space&lt;/code&gt; to play!&lt;/p&gt;
&lt;span class=&#34;fragment &#34; &gt;
  One
&lt;/span&gt;
&lt;span class=&#34;fragment &#34; &gt;
  &lt;strong&gt;Two&lt;/strong&gt;
&lt;/span&gt;
&lt;span class=&#34;fragment &#34; &gt;
  Three
&lt;/span&gt;
&lt;hr&gt;
&lt;p&gt;A fragment can accept two optional parameters:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;code&gt;class&lt;/code&gt;: use a custom style (requires definition in custom CSS)&lt;/li&gt;
&lt;li&gt;&lt;code&gt;weight&lt;/code&gt;: sets the order in which a fragment appears&lt;/li&gt;
&lt;/ul&gt;
&lt;hr&gt;
&lt;h2 id=&#34;speaker-notes&#34;&gt;Speaker Notes&lt;/h2&gt;
&lt;p&gt;Add speaker notes to your presentation&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-markdown&#34; data-lang=&#34;markdown&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;{{% speaker_note %}}
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;k&#34;&gt;-&lt;/span&gt; Only the speaker can read these notes
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;k&#34;&gt;-&lt;/span&gt; Press &lt;span class=&#34;sb&#34;&gt;`S`&lt;/span&gt; key to view
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;  {{% /speaker_note %}}
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;p&gt;Press the &lt;code&gt;S&lt;/code&gt; key to view the speaker notes!&lt;/p&gt;
&lt;aside class=&#34;notes&#34;&gt;
  &lt;ul&gt;
&lt;li&gt;Only the speaker can read these notes&lt;/li&gt;
&lt;li&gt;Press &lt;code&gt;S&lt;/code&gt; key to view&lt;/li&gt;
&lt;/ul&gt;

&lt;/aside&gt;
&lt;hr&gt;
&lt;h2 id=&#34;themes&#34;&gt;Themes&lt;/h2&gt;
&lt;ul&gt;
&lt;li&gt;black: Black background, white text, blue links (default)&lt;/li&gt;
&lt;li&gt;white: White background, black text, blue links&lt;/li&gt;
&lt;li&gt;league: Gray background, white text, blue links&lt;/li&gt;
&lt;li&gt;beige: Beige background, dark text, brown links&lt;/li&gt;
&lt;li&gt;sky: Blue background, thin dark text, blue links&lt;/li&gt;
&lt;/ul&gt;
&lt;hr&gt;
&lt;ul&gt;
&lt;li&gt;night: Black background, thick white text, orange links&lt;/li&gt;
&lt;li&gt;serif: Cappuccino background, gray text, brown links&lt;/li&gt;
&lt;li&gt;simple: White background, black text, blue links&lt;/li&gt;
&lt;li&gt;solarized: Cream-colored background, dark green text, blue links&lt;/li&gt;
&lt;/ul&gt;
&lt;hr&gt;

&lt;section data-noprocess data-shortcode-slide
  
      
      data-background-image=&#34;/media/boards.jpg&#34;
  &gt;

&lt;h2 id=&#34;custom-slide&#34;&gt;Custom Slide&lt;/h2&gt;
&lt;p&gt;Customize the slide style and background&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-markdown&#34; data-lang=&#34;markdown&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;{{&lt;span class=&#34;p&#34;&gt;&amp;lt;&lt;/span&gt; &lt;span class=&#34;nt&#34;&gt;slide&lt;/span&gt; &lt;span class=&#34;na&#34;&gt;background-image&lt;/span&gt;&lt;span class=&#34;o&#34;&gt;=&lt;/span&gt;&lt;span class=&#34;s&#34;&gt;&amp;#34;/media/boards.jpg&amp;#34;&lt;/span&gt; &lt;span class=&#34;p&#34;&gt;&amp;gt;&lt;/span&gt;}}
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;{{&lt;span class=&#34;p&#34;&gt;&amp;lt;&lt;/span&gt; &lt;span class=&#34;nt&#34;&gt;slide&lt;/span&gt; &lt;span class=&#34;na&#34;&gt;background-color&lt;/span&gt;&lt;span class=&#34;o&#34;&gt;=&lt;/span&gt;&lt;span class=&#34;s&#34;&gt;&amp;#34;#0000FF&amp;#34;&lt;/span&gt; &lt;span class=&#34;p&#34;&gt;&amp;gt;&lt;/span&gt;}}
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;{{&lt;span class=&#34;p&#34;&gt;&amp;lt;&lt;/span&gt; &lt;span class=&#34;nt&#34;&gt;slide&lt;/span&gt; &lt;span class=&#34;na&#34;&gt;class&lt;/span&gt;&lt;span class=&#34;o&#34;&gt;=&lt;/span&gt;&lt;span class=&#34;s&#34;&gt;&amp;#34;my-style&amp;#34;&lt;/span&gt; &lt;span class=&#34;p&#34;&gt;&amp;gt;&lt;/span&gt;}}
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;hr&gt;
&lt;h2 id=&#34;custom-css-example&#34;&gt;Custom CSS Example&lt;/h2&gt;
&lt;p&gt;Let&amp;rsquo;s make headers navy colored.&lt;/p&gt;
&lt;p&gt;Create &lt;code&gt;assets/css/reveal_custom.css&lt;/code&gt; with:&lt;/p&gt;
&lt;div class=&#34;highlight&#34;&gt;&lt;pre tabindex=&#34;0&#34; class=&#34;chroma&#34;&gt;&lt;code class=&#34;language-css&#34; data-lang=&#34;css&#34;&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;p&#34;&gt;.&lt;/span&gt;&lt;span class=&#34;nc&#34;&gt;reveal&lt;/span&gt; &lt;span class=&#34;nt&#34;&gt;section&lt;/span&gt; &lt;span class=&#34;nt&#34;&gt;h1&lt;/span&gt;&lt;span class=&#34;o&#34;&gt;,&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;p&#34;&gt;.&lt;/span&gt;&lt;span class=&#34;nc&#34;&gt;reveal&lt;/span&gt; &lt;span class=&#34;nt&#34;&gt;section&lt;/span&gt; &lt;span class=&#34;nt&#34;&gt;h2&lt;/span&gt;&lt;span class=&#34;o&#34;&gt;,&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;p&#34;&gt;.&lt;/span&gt;&lt;span class=&#34;nc&#34;&gt;reveal&lt;/span&gt; &lt;span class=&#34;nt&#34;&gt;section&lt;/span&gt; &lt;span class=&#34;nt&#34;&gt;h3&lt;/span&gt; &lt;span class=&#34;p&#34;&gt;{&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;  &lt;span class=&#34;k&#34;&gt;color&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;:&lt;/span&gt; &lt;span class=&#34;kc&#34;&gt;navy&lt;/span&gt;&lt;span class=&#34;p&#34;&gt;;&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;span class=&#34;line&#34;&gt;&lt;span class=&#34;cl&#34;&gt;&lt;span class=&#34;p&#34;&gt;}&lt;/span&gt;
&lt;/span&gt;&lt;/span&gt;&lt;/code&gt;&lt;/pre&gt;&lt;/div&gt;&lt;hr&gt;
&lt;h1 id=&#34;questions&#34;&gt;Questions?&lt;/h1&gt;
&lt;p&gt;&lt;a href=&#34;https://discord.gg/z8wNYzb&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;Ask&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;&lt;a href=&#34;https://wowchemy.com/docs/content/slides/&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;Documentation&lt;/a&gt;&lt;/p&gt;
</description>
    </item>
    
    <item>
      <title>An example conference paper</title>
      <link>http://tkwer.site/publication/conference-paper/</link>
      <pubDate>Mon, 01 Jul 2013 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/publication/conference-paper/</guid>
      <description>&lt;div class=&#34;alert alert-note&#34;&gt;
  &lt;div&gt;
    Click the &lt;em&gt;Cite&lt;/em&gt; button above to demo the feature to enable visitors to import publication metadata into their reference management software.
  &lt;/div&gt;
&lt;/div&gt;
&lt;div class=&#34;alert alert-note&#34;&gt;
  &lt;div&gt;
    Create your slides in Markdown - click the &lt;em&gt;Slides&lt;/em&gt; button to check out the example.
  &lt;/div&gt;
&lt;/div&gt;
&lt;p&gt;Supplementary notes can be added here, including &lt;a href=&#34;https://wowchemy.com/docs/writing-markdown-latex/&#34; target=&#34;_blank&#34; rel=&#34;noopener&#34;&gt;code, math, and images&lt;/a&gt;.&lt;/p&gt;
</description>
    </item>
    
    <item>
      <title></title>
      <link>http://tkwer.site/admin/config.yml</link>
      <pubDate>Mon, 01 Jan 0001 00:00:00 +0000</pubDate>
      <guid>http://tkwer.site/admin/config.yml</guid>
      <description></description>
    </item>
    
  </channel>
</rss>
