Compare commits
44 Commits
862a48131e
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cddd5317c3 | ||
|
|
b1a7baae19 | ||
|
|
c993f74511 | ||
|
|
39551f3794 | ||
|
|
bfaa0eca2f | ||
|
|
373b17d8a5 | ||
|
|
ca742aa204 | ||
|
|
831322e44a | ||
|
|
24b65a8ee5 | ||
|
|
005292adfc | ||
|
|
a512e710fa | ||
|
|
e43f8b0efb | ||
|
|
2ba4528c1d | ||
|
|
0af76ed532 | ||
|
|
2347158093 | ||
|
|
faccc3d20b | ||
|
|
4bdd3a8411 | ||
|
|
34931f9362 | ||
|
|
6bcc541c86 | ||
|
|
e74fa87103 | ||
|
|
0ff8d2b1fb | ||
|
|
a917f75ce0 | ||
|
|
49f9aa98ed | ||
|
|
5acd03828d | ||
|
|
bba0e3a093 | ||
|
|
fe440960f4 | ||
|
|
4f638cdd64 | ||
|
|
c29499d9b0 | ||
|
|
acf9b2c4c4 | ||
|
|
d661bf27ae | ||
|
|
679a87bf45 | ||
|
|
d9cb5986ee | ||
|
|
fccc2ba2e5 | ||
|
|
17c7d0e555 | ||
|
|
7d75ad7596 | ||
|
|
45ec502eca | ||
|
|
fa5a16a8ea | ||
|
|
82d11e868f | ||
|
|
85bda0fa27 | ||
|
|
0ec9d70cd6 | ||
|
|
1e59c59aca | ||
|
|
787bdfe5f7 | ||
|
|
d6c5058f2e | ||
|
|
3ea0c74e7f |
1
.env.example
Normal file
1
.env.example
Normal file
@@ -0,0 +1 @@
|
||||
THERMALCAM_IFACE=enp1s0f0
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -2,3 +2,4 @@
|
||||
/frames
|
||||
**/*.png
|
||||
target
|
||||
.env
|
||||
|
||||
4638
Cargo.lock
generated
4638
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
21
Cargo.toml
21
Cargo.toml
@@ -5,14 +5,27 @@ edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[lib]
|
||||
name = "thermaldecoder"
|
||||
crate-type = ["rlib", "cdylib"]
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.77"
|
||||
axum = "0.7.4"
|
||||
bracket-color = "0.8.7"
|
||||
clap = { version = "4.5.1", features = ["derive"] }
|
||||
crossbeam = "0.8.4"
|
||||
crossbeam-channel = "0.5.11"
|
||||
crossterm = { version = "0.27.0", features = ["event-stream"] }
|
||||
dotenv = "0.15.0"
|
||||
eframe = "0.26.2"
|
||||
egui = "0.26.2"
|
||||
futures = "0.3.30"
|
||||
futures-timer = "3.0.3"
|
||||
indicatif = "0.17.7"
|
||||
pcap = { version = "1.2.0", features = ["capture-stream"] }
|
||||
pcap-parser = { version = "0.14.1", features = ["data"] }
|
||||
png = "0.17.10"
|
||||
pyo3 = { version = "0.20.0", "features" = ["extension-module"] }
|
||||
reqwest = { version = "0.11.24", features = ["json"] }
|
||||
serde = { version = "1.0.193", features = ["derive", "serde_derive", "alloc"] }
|
||||
tokio = { version = "1.36.0", features = ["full"] }
|
||||
tracing-subscriber = "0.3.18"
|
||||
tui-textarea = "0.4.0"
|
||||
v4l = { version = "0.14.0", features = ["v4l2"], default-features = false }
|
||||
|
||||
25
README.md
25
README.md
@@ -1,8 +1,31 @@
|
||||
### Thermal decoder
|
||||
|
||||
https://wiki.telavivmakers.org/tamiwiki/projects/thermalcam
|
||||
https://telavivmakers.org/tamiwiki/projects/thermalcam
|
||||
|
||||
|
||||
### Starting the stream
|
||||
|
||||
#### Enable jumbo frames
|
||||
|
||||
```
|
||||
sudo ip link set eth0 mtu 9000
|
||||
```
|
||||
|
||||
#### Send start packet
|
||||
You need to send a special packet.
|
||||
|
||||
Sending it via sudo because of raw sockets:
|
||||
```bash
|
||||
sudo ./venv/bin/python ./replay.py
|
||||
```
|
||||
|
||||
To send it you need the capability to open sockets in raw mode, but that does not work well with scripts (see [1]
|
||||
|
||||
[1] setcap for executables, not helpful for python scripts:
|
||||
```
|
||||
setcap cap_net_raw,cap_net_admin=eip ./replay.py
|
||||
```
|
||||
|
||||
### Rust lib usage
|
||||
|
||||
# if you don't already have a virtualenv. Linux specific, adjust to your OS.
|
||||
|
||||
116
cvview.py
116
cvview.py
@@ -2,6 +2,7 @@ import os
|
||||
import cv2
|
||||
import argparse
|
||||
import numpy as np
|
||||
from datetime import datetime
|
||||
|
||||
# Set up the argument parser
|
||||
parser = argparse.ArgumentParser(description="Visualize image files and display pixel values on hover.")
|
||||
@@ -28,51 +29,31 @@ def calibrate(x):
|
||||
#print('{}..{}'.format(ret.max(), ret.min()))
|
||||
return ret
|
||||
|
||||
|
||||
class state:
|
||||
calibrate = False
|
||||
|
||||
|
||||
# Function to display the image and pixel values along with the frame index
|
||||
def show_pixel_values(image_path):
|
||||
def mouse_event(event, x, y, flags, param):
|
||||
if event == cv2.EVENT_MOUSEMOVE:
|
||||
pixel_value = img[y, x]
|
||||
text = f'Value: {pixel_value}, Location: ({x},{y})'
|
||||
img_text = img.copy()
|
||||
# Overlay the frame index
|
||||
frame_index = get_frame_index(image_path)
|
||||
cv2.putText(img_text, f'Frame: {frame_index}', (10, img_text.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 1, cv2.LINE_AA)
|
||||
cv2.putText(img_text, text, (50, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 1, cv2.LINE_AA)
|
||||
cv2.imshow('Image', img_text)
|
||||
|
||||
img = cv2.imread(image_path, cv2.IMREAD_UNCHANGED)
|
||||
if img is None:
|
||||
print(f"Failed to load image at {image_path}. Check the file path and integrity.")
|
||||
return False
|
||||
if state.calibrate:
|
||||
img = calibrate(img)
|
||||
cv2.namedWindow('Image')
|
||||
cv2.setMouseCallback('Image', mouse_event)
|
||||
cv2.imshow('Image', img)
|
||||
return True
|
||||
# Global variables for the last mouse position
|
||||
last_x, last_y = 0, 0
|
||||
img, calibrated_img = None, None
|
||||
|
||||
|
||||
# Function to get the frame index from the filename
|
||||
def get_frame_index(filename):
|
||||
return os.path.splitext(os.path.basename(filename))[0][-4:]
|
||||
return os.path.splitext(os.path.basename(filename))[0][-5:]
|
||||
|
||||
|
||||
# Function to modify the numeric part of the filename
|
||||
def modify_filename(filename, increment=True):
|
||||
def modify_filename(filename, frame_increment=1):
|
||||
directory, basename = os.path.split(filename)
|
||||
basename_no_ext, ext = os.path.splitext(basename)
|
||||
print(f"Modifying filename {basename_no_ext} in directory {directory}.")
|
||||
if len(basename_no_ext) < 4 or not basename_no_ext[-4:].isdigit():
|
||||
if len(basename_no_ext) < 5 or not basename_no_ext[-5:].isdigit():
|
||||
raise ValueError("Filename does not end with five digits.")
|
||||
num_part = basename_no_ext[-4:]
|
||||
num = int(num_part) + (1 if increment else -1)
|
||||
new_name = f"{basename_no_ext[:-4]}{num:04d}{ext}"
|
||||
|
||||
num_part = basename_no_ext[-5:]
|
||||
num = int(num_part) + frame_increment
|
||||
|
||||
# Handle rollover
|
||||
num = num % 100000 # Modulo 100000 for 5 digits
|
||||
|
||||
new_name = f"{basename_no_ext[:-5]}{num:05d}{ext}"
|
||||
new_path = os.path.join(directory, new_name)
|
||||
if not os.path.exists(new_path):
|
||||
print(f"No file found at {new_path}.")
|
||||
@@ -80,6 +61,49 @@ def modify_filename(filename, increment=True):
|
||||
return new_path
|
||||
|
||||
|
||||
# Function to display the image and pixel values along with the frame index
|
||||
def show_pixel_values(image_path):
|
||||
global img, calibrated_img, last_x, last_y
|
||||
|
||||
def mouse_event(event, x, y, flags, param):
|
||||
global last_x, last_y
|
||||
if event == cv2.EVENT_MOUSEMOVE:
|
||||
last_x, last_y = x, y
|
||||
update_display(x, y)
|
||||
|
||||
img = cv2.imread(image_path, cv2.IMREAD_UNCHANGED)
|
||||
if img is None:
|
||||
print(f"Failed to load image at {image_path}. Check the file path and integrity.")
|
||||
return False
|
||||
|
||||
calibrated_img = calibrate(img) # Calibrate the image for display
|
||||
|
||||
cv2.namedWindow('Image')
|
||||
cv2.setMouseCallback('Image', mouse_event)
|
||||
update_display(last_x, last_y) # Initial display update
|
||||
return True
|
||||
|
||||
# Function to update the display with pixel values
|
||||
def update_display(x, y):
|
||||
global img, calibrated_img
|
||||
original_pixel_value = img[y, x]
|
||||
calibrated_pixel_value = calibrated_img[y, x]
|
||||
text_original = f'Original: {original_pixel_value}, Loc: ({x},{y})'
|
||||
text_calibrated = f'Calibrated: {calibrated_pixel_value}'
|
||||
img_text = img.copy()
|
||||
frame_index = get_frame_index(img_path)
|
||||
cv2.putText(img_text, f'Frame: {frame_index}', (10, img_text.shape[0] - 20), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1, cv2.LINE_AA)
|
||||
cv2.putText(img_text, text_original, (5, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1, cv2.LINE_AA)
|
||||
cv2.putText(img_text, text_calibrated+"c", (5, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1, cv2.LINE_AA)
|
||||
cv2.imshow('Image', img_text)
|
||||
return img_text # Return the image with text for saving
|
||||
|
||||
def save_frame(img_text):
|
||||
current_time = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
save_path = f"frame_{current_time}.png"
|
||||
cv2.imwrite(save_path, img_text)
|
||||
print(f"Frame saved as {save_path}")
|
||||
|
||||
# Ensure the provided path is a valid file
|
||||
if not os.path.isfile(img_path):
|
||||
print("The provided path is not a valid file.")
|
||||
@@ -94,15 +118,25 @@ while True:
|
||||
key = cv2.waitKey(0)
|
||||
if key == 27: # ESC key to exit
|
||||
break
|
||||
elif key == 91: # '[' key
|
||||
img_path = modify_filename(img_path, increment=False)
|
||||
elif key in [91, 93, ord('{'), ord('}')]: # Keys for frame navigation
|
||||
if key == 91: # '[' key
|
||||
img_path = modify_filename(img_path, frame_increment=-1)
|
||||
elif key == 93: # ']' key
|
||||
img_path = modify_filename(img_path, increment=True)
|
||||
elif key == ord('c'):
|
||||
state.calibrate = not state.calibrate
|
||||
img_path = modify_filename(img_path, frame_increment=1)
|
||||
elif key == ord('{'): # Shift + '['
|
||||
img_path = modify_filename(img_path, frame_increment=-50)
|
||||
elif key == ord('}'): # Shift + ']'
|
||||
img_path = modify_filename(img_path, frame_increment=50)
|
||||
|
||||
# Show the new image
|
||||
if not show_pixel_values(img_path):
|
||||
break # Exit the loop if the new image cannot be loaded
|
||||
break # Exit if the new image cannot be loaded
|
||||
else:
|
||||
update_display(last_x, last_y) # Update display with last known mouse position
|
||||
|
||||
elif key == ord('s'): # 's' key for saving
|
||||
# Update the display to get the latest overlay and save it
|
||||
img_text_with_overlays = update_display(last_x, last_y)
|
||||
save_frame(img_text_with_overlays)
|
||||
continue # Skip the frame reload if saving
|
||||
|
||||
cv2.destroyAllWindows()
|
||||
161
decode.py
Executable file → Normal file
161
decode.py
Executable file → Normal file
@@ -1,9 +1,12 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
import os
|
||||
import subprocess
|
||||
from io import BytesIO
|
||||
import numpy as np
|
||||
from tqdm import tqdm
|
||||
from datetime import datetime
|
||||
import pandas as pd
|
||||
import pcapng
|
||||
from struct import unpack
|
||||
@@ -12,6 +15,7 @@ from PIL import Image
|
||||
|
||||
# Create the parser
|
||||
parser = argparse.ArgumentParser(description="Process a pcap file.")
|
||||
parser.add_argument("--live", action="store_true", help="Process images live")
|
||||
|
||||
# Add an argument for the pcap file, with a default value
|
||||
parser.add_argument('input_file', nargs='?', default='in.pcap', help='The pcap file to process')
|
||||
@@ -19,39 +23,41 @@ parser.add_argument('input_file', nargs='?', default='in.pcap', help='The pcap f
|
||||
# Parse the arguments
|
||||
args = parser.parse_args()
|
||||
|
||||
# Now use args.input_file as the file to process
|
||||
input_file = args.input_file
|
||||
basename = os.path.splitext(os.path.basename(input_file))[0]
|
||||
|
||||
|
||||
# Read packets from a pcap file
|
||||
scanner = pcapng.scanner.FileScanner(open(input_file, "rb"))
|
||||
blocks = tqdm(scanner)
|
||||
|
||||
# Helper function to safely get an attribute from an object
|
||||
def tryget(obj, att):
|
||||
if hasattr(obj, att):
|
||||
return getattr(obj, att)
|
||||
return None
|
||||
|
||||
# TODO - probably a better way to do this
|
||||
def live_capture_cb(cb):
|
||||
def outer(pkt):
|
||||
data = bytes(pkt)
|
||||
l = len(data)
|
||||
if l == 6972:
|
||||
cb(data)
|
||||
scapy.all.sniff(iface="enp1s0f0", filter='udp', prn=outer)
|
||||
|
||||
|
||||
def rightsize(it):
|
||||
for i, obj in enumerate(it):
|
||||
if isinstance(obj, bytes):
|
||||
l = len(obj)
|
||||
data = obj
|
||||
else:
|
||||
if not hasattr(obj, 'packet_len'):
|
||||
continue
|
||||
len = obj.packet_len
|
||||
if len != 6972:
|
||||
l = obj.packet_len
|
||||
data = obj.packet_data
|
||||
if l != 6972:
|
||||
continue
|
||||
yield obj.packet_data
|
||||
yield data
|
||||
|
||||
|
||||
def removestart(it):
|
||||
"Remove the UDP header from the packets"
|
||||
for x in it:
|
||||
yield x[0x2A:]
|
||||
yield removestart_inner(x)
|
||||
|
||||
|
||||
def removestart_inner(x):
|
||||
return x[0x2A:]
|
||||
|
||||
|
||||
# Function to parse packet data
|
||||
def parse(data):
|
||||
@@ -70,29 +76,73 @@ def parsed(it):
|
||||
yield parse(x)
|
||||
|
||||
|
||||
class FrameCollector:
|
||||
def __init__(self):
|
||||
self.current = []
|
||||
|
||||
def handle(self, obj):
|
||||
ret = None
|
||||
if obj['part'] == 0:
|
||||
if len(self.current) > 0:
|
||||
ret = b"".join(self.current)
|
||||
self.current = []
|
||||
#otherdata = []
|
||||
self.current.append(obj["data"])
|
||||
return ret
|
||||
#otherdata.append(obj)
|
||||
|
||||
def last(self):
|
||||
if len(self.current) > 0:
|
||||
return b"".join(current)
|
||||
return None
|
||||
|
||||
|
||||
# Function to group data into frames
|
||||
def frames(it):
|
||||
current = []
|
||||
handler = FrameCollector()
|
||||
#otherdata = []
|
||||
for obj in it:
|
||||
if obj['part'] == 0:
|
||||
if len(current) > 0:
|
||||
yield b"".join(current)
|
||||
current = []
|
||||
current.append(obj["data"])
|
||||
if len(current) > 0:
|
||||
yield b"".join(current)
|
||||
ret = handler.handle(obj)
|
||||
if ret:
|
||||
yield ret
|
||||
last = handler.last()
|
||||
if last:
|
||||
yield last
|
||||
|
||||
|
||||
def iterimages(it, width, height, pixelformat=">H"):
|
||||
WIDTH = 384
|
||||
HEIGHT = 288
|
||||
|
||||
|
||||
def bad_frame(frame, width=WIDTH, height=HEIGHT):
|
||||
return len(frame) != width * height * 2 # 16 bpp
|
||||
|
||||
|
||||
def skip_bad_frames(it, width=WIDTH, height=HEIGHT):
|
||||
for frame in it:
|
||||
if len(frame) != width * height * 2: # 16 bpp
|
||||
if bad_frame(frame): # 16 bpp
|
||||
# Will be fixed when we stopped doing restarts
|
||||
#print(f'{len(frame)} != {width} * {height} * 2')
|
||||
continue
|
||||
yield frame
|
||||
|
||||
|
||||
def iterimages(it, width=WIDTH, height=HEIGHT, pixelformat=">H"):
|
||||
for frame in it:
|
||||
yield Image.fromarray(np.frombuffer(frame, dtype=pixelformat).reshape(width, height))
|
||||
|
||||
|
||||
def process_video():
|
||||
# Now use args.input_file as the file to process
|
||||
input_file = args.input_file
|
||||
basename = os.path.splitext(os.path.basename(input_file))[0]
|
||||
stream = open(input_file, 'rb')
|
||||
# Read packets from a pcap file
|
||||
scanner = pcapng.scanner.FileScanner(stream)
|
||||
blocks = tqdm(scanner)
|
||||
|
||||
# Get frames and convert them to images
|
||||
frames = frames(parsed(removestart(rightsize(blocks))))
|
||||
images = iterimages(it=frames, width=384, height=288)
|
||||
frames = skip_bad_frames(frames(parsed(removestart(rightsize(blocks)))))
|
||||
|
||||
# Create the directory for frames if not exists
|
||||
frame_dir = f"frames/{basename}"
|
||||
@@ -100,10 +150,9 @@ if not os.path.exists(frame_dir):
|
||||
os.makedirs(frame_dir)
|
||||
|
||||
# Save each image as a PNG file
|
||||
images = iterimages(it=frames)
|
||||
for i, img in enumerate(images):
|
||||
img.save(f'frames/{basename}/{basename}_{i:04}.png')
|
||||
|
||||
# Produce a video from the saved images
|
||||
ffmpeg_input = f"frames/{basename}/{basename}_%04d.png"
|
||||
command = [
|
||||
"ffmpeg",
|
||||
@@ -115,11 +164,55 @@ command = [
|
||||
"-i", ffmpeg_input, # Input file pattern
|
||||
"-vf", "transpose=1", # Video filter for transposing
|
||||
"-s", "384x288", # Size of one frame
|
||||
"-vcodec", "libx264", # Video codec
|
||||
"-vcodec", "libopenh264", # Video codec
|
||||
"-pix_fmt", "yuv420p", # Pixel format: YUV 4:2:0
|
||||
"thermal.mp4", # Output file in MP4 container
|
||||
]
|
||||
|
||||
subprocess.run(command)
|
||||
|
||||
print("to play: ffplay thermal.mp4")
|
||||
|
||||
|
||||
if args.live:
|
||||
# TODO: to video via ffmpeg; right now just a single png
|
||||
# of the last frame
|
||||
def todo_live_ffmpeg():
|
||||
output = 'to_ffmpeg'
|
||||
# live: write to named pipe
|
||||
if not Path(output).exists():
|
||||
print(f'making fifo at {output}')
|
||||
os.mkfifo(output)
|
||||
fd = open(output, 'wb')
|
||||
for frame in frames:
|
||||
fd.write(frame)
|
||||
|
||||
print('live stream, import scapy')
|
||||
import scapy.all
|
||||
print('open stream')
|
||||
|
||||
class PacketHandler:
|
||||
def __init__(self, cb):
|
||||
self.frame_collector = FrameCollector()
|
||||
self.cb = cb
|
||||
|
||||
def handle(self, pkt):
|
||||
pkt = removestart_inner(pkt)
|
||||
parsed = parse(pkt)
|
||||
frame_maybe = self.frame_collector.handle(parsed)
|
||||
if not frame_maybe or bad_frame(frame_maybe):
|
||||
return
|
||||
self.cb(frame_maybe)
|
||||
|
||||
progress = tqdm()
|
||||
|
||||
def on_frame(frame):
|
||||
progress.update(1)
|
||||
Image.fromarray(np.frombuffer(frame, dtype='>H').reshape(WIDTH, HEIGHT)).save(f'live.new.png')
|
||||
os.rename('live.new.png', 'live.png')
|
||||
|
||||
handler = PacketHandler(on_frame)
|
||||
live_capture_cb(handler.handle)
|
||||
|
||||
else:
|
||||
process_video()
|
||||
|
||||
|
||||
111
examples/cutoff.rs
Normal file
111
examples/cutoff.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use std::{collections::HashMap, io::stdout, time::Duration};
|
||||
|
||||
use futures::{future::FutureExt, select, StreamExt};
|
||||
use futures_timer::Delay;
|
||||
|
||||
use crossterm::{
|
||||
cursor::position,
|
||||
event::{DisableMouseCapture, EnableMouseCapture, Event, EventStream, KeyCode, KeyEvent},
|
||||
execute,
|
||||
terminal::{disable_raw_mode, enable_raw_mode},
|
||||
};
|
||||
|
||||
async fn set_cutoff(cutoff: f64, freq: f64) -> Result<(), reqwest::Error> {
|
||||
// Some simple CLI args requirements...
|
||||
let url = format!("http://localhost:3000/cutoff");
|
||||
let mut map = HashMap::new();
|
||||
map.insert("min_cutoff", cutoff);
|
||||
map.insert("max_cutoff", cutoff + 10.0);
|
||||
map.insert("freq_hz", freq);
|
||||
let client = reqwest::Client::new();
|
||||
let res = client.post(url).json(&map).send().await?;
|
||||
|
||||
// eprintln!("Response: {:?} {}", res.version(), res.status());
|
||||
// eprintln!("Headers: {:#?}\n", res.headers());
|
||||
|
||||
// let body = res.text().await?;
|
||||
// println!("{body}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
const HELP: &str = r#"EventStream based on futures_util::Stream with tokio
|
||||
- Keyboard, mouse and terminal resize events enabled
|
||||
- Prints "." every second if there's no event
|
||||
- Hit "c" to print current cursor position
|
||||
- Use Esc to quit
|
||||
"#;
|
||||
|
||||
async fn print_events() {
|
||||
let mut reader = EventStream::new();
|
||||
let mut cutoff = 30.0;
|
||||
let mut last_cutoff = cutoff;
|
||||
let mut freq = 1.0;
|
||||
|
||||
loop {
|
||||
let mut delay = Delay::new(Duration::from_millis(1_000)).fuse();
|
||||
let mut event = reader.next().fuse();
|
||||
let mut change = false;
|
||||
select! {
|
||||
_ = delay => {
|
||||
},
|
||||
maybe_event = event => {
|
||||
match maybe_event {
|
||||
Some(Ok(event)) => {
|
||||
if event == Event::Key(KeyCode::Char('c').into()) {
|
||||
println!("Cursor position: {:?}\r", position());
|
||||
}
|
||||
|
||||
if event == Event::Key(KeyCode::Esc.into()) {
|
||||
break;
|
||||
}
|
||||
if let Event::Key(k) = event {
|
||||
if let KeyCode::Char(c) = k.code {
|
||||
change = true;
|
||||
match c {
|
||||
'[' => {
|
||||
cutoff -= 1.0;
|
||||
}
|
||||
']' => {
|
||||
cutoff += 1.0;
|
||||
}
|
||||
'1' => {
|
||||
freq *= 0.9;
|
||||
}
|
||||
'2' => {
|
||||
freq *= 1.1;
|
||||
}
|
||||
_ => {
|
||||
change = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if change {
|
||||
set_cutoff(cutoff, freq).await.unwrap();
|
||||
println!("cutoff = {}\r", cutoff);
|
||||
}
|
||||
}
|
||||
Some(Err(e)) => println!("Error: {:?}\r", e),
|
||||
None => break,
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
println!("{}", HELP);
|
||||
|
||||
enable_raw_mode()?;
|
||||
|
||||
let mut stdout = stdout();
|
||||
execute!(stdout, EnableMouseCapture)?;
|
||||
|
||||
print_events().await;
|
||||
|
||||
execute!(stdout, DisableMouseCapture)?;
|
||||
|
||||
disable_raw_mode()
|
||||
}
|
||||
@@ -6,7 +6,6 @@ fn main() -> anyhow::Result<()> {
|
||||
let mut arg = env::args();
|
||||
arg.next(); // skip executable
|
||||
let filename = arg.next().ok_or(anyhow::anyhow!("unexpected"))?;
|
||||
let frames = arg.next().unwrap_or("frames".into());
|
||||
decode_to_files(&filename, &frames)?;
|
||||
decode_to_files(&filename)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
17
examples/replay.rs
Normal file
17
examples/replay.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
use std::net::UdpSocket;
|
||||
|
||||
fn main() -> std::io::Result<()> {
|
||||
{
|
||||
let socket = UdpSocket::bind("192.168.0.1:8091")?;
|
||||
|
||||
// Receives a single datagram message on the socket. If `buf` is too small to hold
|
||||
// the message, it will be cut off.
|
||||
let buf = [
|
||||
1, 0x20, 1, 0x80, 0x1b, 0x40, 0, 0x20, 0, 0, 0, 0, 0, 0, 0, 0x0f, 0, 0, 0, 1, 0, 0, 1,
|
||||
0, 0, 0x20, 0x2b, 0,
|
||||
];
|
||||
socket.set_broadcast(true)?;
|
||||
socket.send_to(&buf, "192.168.0.255:8092")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
9
listen.py
Normal file
9
listen.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from socket import socket, AF_INET, SOCK_DGRAM
|
||||
|
||||
s = socket(AF_INET, SOCK_DGRAM)
|
||||
s.bind(('', 8090))
|
||||
|
||||
while True:
|
||||
d = s.recvfrom(1024)
|
||||
print(d)
|
||||
|
||||
7
live_vid.sh
Executable file
7
live_vid.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
# Enable to get a dot file, to turn to png: dot -Tpng -osomething.png something.dot
|
||||
#export GST_DEBUG_DUMP_DOT_DIR=$(pwd)
|
||||
gst-launch-1.0 filesrc location=output.raw \
|
||||
! rawvideoparse use_sink_caps=false height=384 width=288 format=gray16-be \
|
||||
! videoconvertscale \
|
||||
! autovideosink
|
||||
10
pyproject.toml
Normal file
10
pyproject.toml
Normal file
@@ -0,0 +1,10 @@
|
||||
[project]
|
||||
name = "thermalcam"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.13"
|
||||
dependencies = [
|
||||
"python-dotenv>=1.2.1",
|
||||
"scapy>=2.6.1",
|
||||
]
|
||||
27
replay.py
Executable file
27
replay.py
Executable file
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env python3
|
||||
#replay the "trigger" packet.
|
||||
#this packets will start the source broadcasting its packets.
|
||||
|
||||
import base64
|
||||
from scapy.all import *
|
||||
from dotenv import load_dotenv
|
||||
import os
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# Base64 encoded packet data
|
||||
encoded_packet = "////////AAFsWfAKCABFAAA4KB0AAIARkEfAqAABwKgA/x+bH5wA2QAAASABgBtAACAAAAAAAAAADwAAAAEAAAEAACArAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////////////AAAAAAAAAAIBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
|
||||
|
||||
|
||||
# Decode the Base64 encoded packet
|
||||
decoded_packet = base64.b64decode(encoded_packet)
|
||||
|
||||
# Load packet with Scapy
|
||||
packet = Ether(decoded_packet)
|
||||
#print(packet.show(dump=True))
|
||||
|
||||
iface = os.environ.get('THERMALCAM_IFACE', 'enp1s0f0')
|
||||
print(f'using interface {iface}')
|
||||
|
||||
# (packet)
|
||||
sendp(packet, iface=iface)
|
||||
2
run_dhcp_server.sh
Executable file
2
run_dhcp_server.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
sudo dnsmasq -i enp1s0f0 --local-service --dhcp-range=192.168.0.10,192.168.0.100 --dhcp-leasefile=dhcp.lease -d
|
||||
13
run_live.sh
Executable file
13
run_live.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
cd $(dirname $0)
|
||||
|
||||
set -e
|
||||
|
||||
# Python works but stutters
|
||||
#sudo ./venv/bin/python ./decode.py --live
|
||||
cargo build --release
|
||||
TARGET=./target/release/thermaldecoder
|
||||
# setcap does not work yet (EPERM on socket AF_PACKET)
|
||||
# sudo setcap cap_net_raw,cap_net_admin=eip $TARGET
|
||||
#sudo strace -f -o live.strace $TARGET /dev/video0
|
||||
sudo RUST_BACKTRACE=full $TARGET "$@"
|
||||
13
run_live_debug.sh
Executable file
13
run_live_debug.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
cd $(dirname $0)
|
||||
|
||||
set -e
|
||||
|
||||
# Python works but stutters
|
||||
#sudo ./venv/bin/python ./decode.py --live
|
||||
cargo build --example live
|
||||
TARGET=./target/debug/examples/live
|
||||
# setcap does not work yet (EPERM on socket AF_PACKET)
|
||||
# sudo setcap cap_net_raw,cap_net_admin=eip $TARGET
|
||||
#sudo strace -f -o live.strace $TARGET /dev/video0
|
||||
sudo RUST_BACKTRACE=full $TARGET "$@"
|
||||
2
rustdecode.sh
Executable file
2
rustdecode.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
cargo run --release --example main -- "$@"
|
||||
@@ -89,7 +89,7 @@ impl PacketsIterator {
|
||||
pub struct Header {
|
||||
c1: u32,
|
||||
c2: u16,
|
||||
part: u16,
|
||||
pub part: u16,
|
||||
a: u16,
|
||||
ffaa: u16,
|
||||
b: u16,
|
||||
@@ -98,7 +98,7 @@ pub struct Header {
|
||||
}
|
||||
|
||||
impl Header {
|
||||
fn read(data: &[u8]) -> anyhow::Result<Self> {
|
||||
pub fn read(data: &[u8]) -> anyhow::Result<Self> {
|
||||
Ok(Header {
|
||||
c1: u32::from_be_bytes([data[0], data[1], data[2], data[3]]),
|
||||
c2: u16::from_be_bytes([data[4], data[5]]),
|
||||
@@ -122,12 +122,12 @@ impl Header {
|
||||
}
|
||||
}
|
||||
|
||||
const HDR_SIZE: usize = std::mem::size_of::<Header>();
|
||||
pub const HDR_SIZE: usize = std::mem::size_of::<Header>();
|
||||
|
||||
pub struct Frame {
|
||||
#[allow(dead_code)]
|
||||
header: Header,
|
||||
raw: Vec<u8>,
|
||||
pub header: Header,
|
||||
pub raw: Vec<u8>,
|
||||
}
|
||||
|
||||
impl Frame {
|
||||
@@ -184,7 +184,8 @@ impl Iterator for Decoder {
|
||||
}
|
||||
}
|
||||
|
||||
fn write_raw_frame(name: &str, data: &[u8]) -> anyhow::Result<()> {
|
||||
#[allow(dead_code)]
|
||||
pub fn write_raw_frame(name: &str, data: &[u8]) -> anyhow::Result<()> {
|
||||
let path = Path::new(&name);
|
||||
let file = File::create(path)?;
|
||||
let ref mut w = BufWriter::new(file);
|
||||
@@ -196,6 +197,7 @@ fn write_raw_frame(name: &str, data: &[u8]) -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn write_calibrated_frame(name: &str, data: &[u16]) -> anyhow::Result<()> {
|
||||
let path = Path::new(&name);
|
||||
let file = File::create(path).unwrap();
|
||||
@@ -251,15 +253,27 @@ fn decode(filename: &str) -> PyResult<PyFrameIterator> {
|
||||
Ok(iter.into())
|
||||
}
|
||||
|
||||
pub fn decode_to_files(filename: &str, frames_root: &str) -> anyhow::Result<()> {
|
||||
/// writes to frames/<basename of filename>
|
||||
#[allow(dead_code)]
|
||||
pub fn decode_to_files(filename: &str) -> anyhow::Result<()> {
|
||||
let frameiter = Decoder::new(filename)?;
|
||||
let basename = std::path::Path::new(filename)
|
||||
.file_stem()
|
||||
.ok_or(anyhow::anyhow!("cannot get basename"))?
|
||||
.to_str()
|
||||
.ok_or(anyhow::anyhow!("cannot convert to utf-8 from os name"))?;
|
||||
let target_dir = format!("frames/{}", basename);
|
||||
let target_dir = std::path::Path::new(&target_dir);
|
||||
if !target_dir.exists() {
|
||||
std::fs::create_dir(target_dir)?;
|
||||
}
|
||||
for (i, frame) in frameiter.enumerate() {
|
||||
let name = format!("{}/{:05}.png", frames_root, i);
|
||||
let name = format!("frames/{}/{:05}.png", basename, i);
|
||||
if let Err(_e) = write_raw_frame(&name, &frame.raw) {
|
||||
println!("skipping bad frame {}", i);
|
||||
continue;
|
||||
}
|
||||
let name = format!("{}/temp_{:05}.png", frames_root, i);
|
||||
let name = format!("{}/temp_{:05}.png", target_dir.display(), i);
|
||||
let pixels = frame.pixels();
|
||||
write_calibrated_frame(&name, &pixels)?;
|
||||
}
|
||||
209
src/stream.rs
Normal file
209
src/stream.rs
Normal file
@@ -0,0 +1,209 @@
|
||||
use crate::offline::{Header, HDR_SIZE};
|
||||
use bracket_color::prelude::*;
|
||||
use clap::Parser;
|
||||
use dotenv::dotenv;
|
||||
use std::time::SystemTime;
|
||||
use std::{
|
||||
io::Write,
|
||||
sync::{Arc, Mutex},
|
||||
thread::spawn,
|
||||
};
|
||||
use v4l::video::Output;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(version, about, long_about = None)]
|
||||
struct Args {
|
||||
#[arg(short, long, default_value_t = false)]
|
||||
temperature: bool,
|
||||
#[arg(short, long, default_value = "/dev/video0")]
|
||||
device: String,
|
||||
#[arg(short, long)]
|
||||
red_cutoff: Option<f64>,
|
||||
}
|
||||
|
||||
fn pixel_to_celcius(x: u16) -> u16 {
|
||||
let x: f64 = x.into();
|
||||
let x = x / 256.0;
|
||||
let t = (-1.665884e-08) * x.powf(4.)
|
||||
+ (1.347094e-05) * x.powf(3.)
|
||||
+ (-4.396264e-03) * x.powf(2.)
|
||||
+ (9.506939e-01) * x
|
||||
+ (-6.353247e+01);
|
||||
(t * 256.0) as u16
|
||||
}
|
||||
|
||||
/// https://en.wikipedia.org/wiki/HSL_and_HSV
|
||||
/// convert to the expected dynamic range first. We insert values in [0..256)
|
||||
/// h in [0, 360] degrees
|
||||
/// s in [0, 1]
|
||||
/// v in [0, 1]
|
||||
fn once_upon_a_time_hsv2rgb(h: u8, s: u8, v: u8) -> (u8, u8, u8) {
|
||||
let h = (h as f64) / 256.0 * 360.0;
|
||||
let s = (s as f64) / 256.0;
|
||||
let v = (v as f64) / 256.0;
|
||||
(0, 0, 0)
|
||||
}
|
||||
|
||||
fn rgb_to_u8s(rgb: &RGB) -> (u8, u8, u8) {
|
||||
(
|
||||
(rgb.r * 256.) as u8,
|
||||
(rgb.g * 256.) as u8,
|
||||
(rgb.b * 256.) as u8,
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) struct Streamer {
|
||||
pub(crate) min_cutoff: f64,
|
||||
pub(crate) max_cutoff: f64,
|
||||
pub(crate) freq_hz: f64,
|
||||
}
|
||||
|
||||
pub(crate) fn initialize() -> Arc<Mutex<Streamer>> {
|
||||
let args = Args::parse();
|
||||
Arc::new(Mutex::new(Streamer {
|
||||
min_cutoff: args.red_cutoff.unwrap_or(26.),
|
||||
max_cutoff: args.red_cutoff.unwrap_or(26.) + 10.0,
|
||||
freq_hz: 1.0,
|
||||
}))
|
||||
}
|
||||
|
||||
fn main(streamer: Arc<Mutex<Streamer>>) -> anyhow::Result<()> {
|
||||
dotenv().ok();
|
||||
let args = Args::parse();
|
||||
let device = match std::env::var("THERMALCAM_IFACE=enp1s0f0") {
|
||||
Ok(d) => {
|
||||
let device = pcap::Device::list()
|
||||
.expect("device list failed")
|
||||
.into_iter()
|
||||
.find(|x| x.name == d)
|
||||
.expect(&format!("could not find device {}", d));
|
||||
device
|
||||
}
|
||||
Err(_) => pcap::Device::lookup()
|
||||
.expect("device lookup failed")
|
||||
.expect("no device available"),
|
||||
};
|
||||
// get the default Device
|
||||
|
||||
println!("Using device {}", device.name);
|
||||
let output = args.device;
|
||||
println!("Using output v4l2loopback device {}", output);
|
||||
|
||||
const WIDTH: usize = 288;
|
||||
const HEIGHT: usize = 384;
|
||||
println!("reading cutoff");
|
||||
let start = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs_f64();
|
||||
let greyscale = !args.temperature;
|
||||
let fourcc_repr = if greyscale {
|
||||
[
|
||||
b'Y', // | 0b10000000
|
||||
b'1', b'6',
|
||||
b' ', // Note: not using b' ' | 0x80, (V4L2_PIX_FMT_Y16_BE)
|
||||
// because VID_S_FMT ioctl returns EINVAL, so just swap the bytes here
|
||||
]
|
||||
} else {
|
||||
// RGB32 is 4 bytes R, G, B, A
|
||||
[b'R', b'G', b'B', b'4']
|
||||
};
|
||||
println!("using four cc {:?}", fourcc_repr);
|
||||
let bytes_per_pixel = if greyscale { 2 } else { 4 };
|
||||
let fourcc = v4l::format::FourCC { repr: fourcc_repr };
|
||||
let mut out = v4l::Device::with_path(output)?;
|
||||
// To find the fourcc code, use v4l2-ctl --list-formats-out /dev/video0
|
||||
// (or read the source :)
|
||||
// flip axes
|
||||
let format = v4l::Format::new(HEIGHT as u32, WIDTH as u32, fourcc);
|
||||
Output::set_format(&out, &format)?;
|
||||
|
||||
// Setup Capture
|
||||
let mut cap = pcap::Capture::from_device(device)
|
||||
.unwrap()
|
||||
.immediate_mode(true)
|
||||
.open()
|
||||
.unwrap();
|
||||
|
||||
// get a packet and print its bytes
|
||||
const PACKET_LEN: usize = 6972;
|
||||
// input is grayscale 16 bits per pixel
|
||||
const FRAME_LEN: usize = WIDTH * HEIGHT * 2;
|
||||
let mut frame = [0u8; FRAME_LEN];
|
||||
let mut len = 0;
|
||||
let output_frame_len = WIDTH * HEIGHT * bytes_per_pixel;
|
||||
let mut swapped_vec = vec![0u8; output_frame_len];
|
||||
let swapped = &mut swapped_vec;
|
||||
while let Ok(p) = cap.next_packet() {
|
||||
let data = p.data;
|
||||
if data.len() != PACKET_LEN {
|
||||
continue;
|
||||
}
|
||||
let data = &data[0x2a..];
|
||||
let header = match Header::read(data) {
|
||||
Ok(header) => header,
|
||||
Err(_) => continue,
|
||||
};
|
||||
let data = &data[HDR_SIZE..];
|
||||
if (header.part == 0 && len > 0)
|
||||
// do not write out of bounds - would panic, instead just skip
|
||||
|| (data.len() + len > FRAME_LEN)
|
||||
{
|
||||
if len == FRAME_LEN {
|
||||
// read once per frame, can make it lower if need be
|
||||
let state = streamer.lock().unwrap();
|
||||
let mid = (state.min_cutoff + state.max_cutoff) / 2.0;
|
||||
let range = state.max_cutoff - state.min_cutoff;
|
||||
let hz = state.freq_hz;
|
||||
let now = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs_f64();
|
||||
let dt = now - start;
|
||||
let cutoff = mid + f64::sin(dt * hz) * 0.5 * range;
|
||||
// swap the bytes, we are using LE, not BE, 16 bit grayscale
|
||||
// possibly limitation of current v4l2loopback or v4l rust wrapper or libv4l2
|
||||
for i in 0..FRAME_LEN / 2 {
|
||||
let x = i % WIDTH;
|
||||
let y = (i / WIDTH) % HEIGHT;
|
||||
let mut pixel = u16::from_be_bytes([frame[i * 2], frame[i * 2 + 1]]);
|
||||
if greyscale {
|
||||
if args.temperature {
|
||||
pixel = pixel_to_celcius(pixel);
|
||||
}
|
||||
let pixel_swapped = pixel.to_le_bytes();
|
||||
let out_i = ((HEIGHT - 1 - y) + (WIDTH - 1 - x) * HEIGHT) * 2;
|
||||
swapped[out_i..out_i + 2].copy_from_slice(&pixel_swapped);
|
||||
} else {
|
||||
pixel = pixel_to_celcius(pixel);
|
||||
let (r, g, b) = if pixel > (256.0 * cutoff) as u16 {
|
||||
let p = pixel - (256.0 * cutoff) as u16;
|
||||
let rgb = HSV::from_f32(0.0, (p as f32) / 256.0, 0.0).to_rgb();
|
||||
rgb_to_u8s(&rgb)
|
||||
} else {
|
||||
let rgb =
|
||||
HSV::from_f32(pixel as f32 / 65536.0, 0.0, pixel as f32 / 65536.0)
|
||||
.to_rgb();
|
||||
rgb_to_u8s(&rgb)
|
||||
};
|
||||
let out_i = ((HEIGHT - 1 - y) + (WIDTH - 1 - x) * HEIGHT) * 4;
|
||||
swapped[out_i..out_i + 4].copy_from_slice(&[0, r, g, b]);
|
||||
}
|
||||
}
|
||||
out.write_all(&swapped[..])?;
|
||||
}
|
||||
len = 0;
|
||||
}
|
||||
frame[len..len + data.len()].copy_from_slice(data);
|
||||
len += data.len();
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn start_stream_thread(streamer: Arc<Mutex<Streamer>>) {
|
||||
spawn(move || {
|
||||
if let Err(e) = main(streamer) {
|
||||
println!("oops: {:?}", e);
|
||||
}
|
||||
});
|
||||
}
|
||||
12
start.sh
Executable file
12
start.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
. .env
|
||||
IFACE=$THERMALCAM_IFACE
|
||||
echo "using iface $IFACE"
|
||||
echo "checking mtu"
|
||||
mtu=$(ip link show ${IFACE} | grep -o 'mtu [0-9]*' | gawk '{print $2}')
|
||||
echo "mtu = $mtu"
|
||||
if [ $mtu -lt 9000 ]; then
|
||||
echo "setting mtu to 9000"
|
||||
sudo ip link set $IFACE mtu 9000
|
||||
fi
|
||||
sudo uv run ./replay.py
|
||||
26
test_rust.py
26
test_rust.py
@@ -2,11 +2,31 @@
|
||||
from pathlib import Path
|
||||
from thermaldecoder import decode
|
||||
import numpy as np
|
||||
import subprocess
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
|
||||
# Create a directory to store the frames if it doesn't exist
|
||||
root = Path('frames')
|
||||
root.mkdir(exist_ok=True)
|
||||
frames = list(decode('in.pcap'))
|
||||
f = np.array(frames[0])
|
||||
|
||||
# Decode the frames from the pcap file
|
||||
frames = list(decode('indesk.pcapng'))
|
||||
|
||||
# Iterate over the frames
|
||||
for i, frame in enumerate(frames):
|
||||
try:
|
||||
# Convert the frame to an image file
|
||||
img_path = root / f"frame_{i}.png"
|
||||
f = np.array(frame)
|
||||
f.shape = (384, 288)
|
||||
plt.imshow(f)
|
||||
plt.show()
|
||||
plt.axis('off')
|
||||
plt.savefig(img_path, bbox_inches='tight', pad_inches=0)
|
||||
plt.close()
|
||||
|
||||
# Use ffmpeg to display the image
|
||||
subprocess.run(['ffmpeg', '-i', str(img_path), '-vf', 'scale=800:600', '-framerate', '25', '-f', 'image2pipe', '-'], check=True)
|
||||
|
||||
except ValueError as e:
|
||||
print(f"Error processing frame {i}: {e}")
|
||||
|
||||
BIN
thermal.mp4
BIN
thermal.mp4
Binary file not shown.
Reference in New Issue
Block a user