Compare commits

..

44 Commits

Author SHA1 Message Date
devdesk
cddd5317c3 use uv for start.sh 2025-11-11 22:40:06 +02:00
devdesk
b1a7baae19 bug fix 2024-02-23 02:12:30 +02:00
devdesk
c993f74511 hertz inverted 2024-02-23 02:10:49 +02:00
devdesk
39551f3794 web can update pulse 2024-02-23 01:54:56 +02:00
devdesk
bfaa0eca2f pulse threshold 2024-02-23 01:48:11 +02:00
devdesk
373b17d8a5 web interface 2024-02-23 01:38:32 +02:00
devdesk
ca742aa204 add hsv to rgb, not better looking 2024-02-22 23:08:30 +02:00
devdesk
831322e44a initial example axum, streamer runs in thread 2024-02-20 23:29:33 +02:00
devdesk
24b65a8ee5 turn into an executable instead of a library
burns the python library to the ground though. but I'm just doing
a web service now, so it would be simpler to just use that,
and for live python has the implementation as well still
2024-02-20 21:23:26 +02:00
devdesk
005292adfc tweaking the color 2024-02-20 21:02:14 +02:00
devdesk
a512e710fa correct for both 4 bytes per pixel and 2 bytes 2024-02-19 22:38:58 +02:00
devdesk
e43f8b0efb correct for rotation and axes for greyscale 2024-02-19 22:31:07 +02:00
devdesk
2ba4528c1d rgb for cutoff temperature 2024-02-19 22:26:25 +02:00
devdesk
0af76ed532 add cutoff grayscale 2024-02-19 22:02:27 +02:00
devdesk
2347158093 add command line options and temperature calibration
without temperature calibration 0.3%
with ~5%

model name	: Intel(R) Core(TM) i7-3615QM CPU @ 2.30GHz
2024-02-19 21:53:25 +02:00
devdesk
faccc3d20b add clap dep 2024-02-19 21:52:49 +02:00
devdesk
4bdd3a8411 git ignore .env 2024-02-17 17:23:02 +02:00
devdesk
34931f9362 add .env.example 2024-02-17 17:21:22 +02:00
devdesk
6bcc541c86 working v4l2 in rust, swapping bytes 2024-02-17 17:21:01 +02:00
devdesk
e74fa87103 fourcc wip 2024-02-17 16:59:29 +02:00
devdesk
0ff8d2b1fb v4l by rust process, still sudo, endianess reversed 2024-02-17 16:55:53 +02:00
devdesk
a917f75ce0 avoid reallocating the frame, just use a static buffer 2024-02-17 15:56:05 +02:00
devdesk
49f9aa98ed remove indicatif, add dotenv, write to stdout, gst to v4lsink, usable with ffplay, still some hiccups, not cpu related 2024-02-17 15:43:20 +02:00
devdesk
5acd03828d start.sh sets mtu and sends the start packet 2024-02-17 15:31:29 +02:00
devdesk
bba0e3a093 rename and include comment in live playback utilty 2024-02-16 15:56:45 +02:00
devdesk
fe440960f4 adding deps to make standalone video app 2024-02-16 00:42:24 +02:00
devdesk
4f638cdd64 remove unused import 2024-02-16 00:42:01 +02:00
devdesk
c29499d9b0 to fifo and gst-launch 2024-02-16 00:41:39 +02:00
devdesk
acf9b2c4c4 rust implementation, fast enough it looks 2024-02-15 23:24:22 +02:00
devdesk
d661bf27ae rewrite to use scapy.all.sniff callback parameter, better, still sucks 2024-02-15 20:56:18 +02:00
devdesk
679a87bf45 stop writing to udp.bytes 2024-02-15 20:23:51 +02:00
devdesk
d9cb5986ee live working with image 2024-02-15 00:30:45 +02:00
devdesk
fccc2ba2e5 almost live, but horrible hack for stream capture, and not live ffmpeg, just live save of images 2024-02-15 00:27:48 +02:00
devdesk
17c7d0e555 almost live, but horrible hack for stream capture, and not live ffmpeg, just live save of images 2024-02-15 00:26:10 +02:00
devdesk
7d75ad7596 README: fix wiki link 2024-02-14 21:33:40 +02:00
devdesk
45ec502eca add a rust replay example, packet has identical data, src port, dest port, dest address
existing:

397	2041.095533360	192.168.0.1	192.168.0.255	UDP	252	8091 → 8092 [BAD UDP LENGTH 217 > IP PAYLOAD LENGTH] Len=209

new:

396	2008.927877440	192.168.0.1	192.168.0.255	UDP	70	8091 → 8092 Len=28

old bytes:

0000   ff ff ff ff ff ff 00 01 6c 59 f0 0a 08 00 45 00   ........lY....E.
0010   00 38 28 1d 00 00 80 11 90 47 c0 a8 00 01 c0 a8   .8(......G......
0020   00 ff 1f 9b 1f 9c 00 d9 00 00 01 20 01 80 1b 40   ........... ...@
0030   00 20 00 00 00 00 00 00 00 0f 00 00 00 01 00 00   . ..............
0040   01 00 00 20 2b 00 00 00 00 00 00 00 00 00 00 00   ... +...........
0050   00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00   ................
0060   00 ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff   ................
0070   ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff   ................
0080   ff 00 00 00 00 00 00 00 02 01 00 00 00 00 00 00   ................
0090   00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00   ................
00a0   00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00   ................
00b0   00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00   ................
00c0   00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00   ................
00d0   00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00   ................
00e0   00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00   ................
00f0   00 00 00 00 00 00 00 00 00 00 00 00               ............

new bytes:

0000   ff ff ff ff ff ff a8 20 66 11 cb 49 08 00 45 00   ....... f..I..E.
0010   00 38 2b 75 40 00 40 11 8c ef c0 a8 00 01 c0 a8   .8+u@.@.........
0020   00 ff 1f 9b 1f 9c 00 24 82 86 01 20 01 80 1b 40   .......$... ...@
0030   00 20 00 00 00 00 00 00 00 0f 00 00 00 01 00 00   . ..............
0040   01 00 00 20 2b 00                                 ... +.
2024-01-19 23:39:23 +02:00
devdesk
fa5a16a8ea try to run a dhcp server 2024-01-19 23:31:26 +02:00
devdesk
82d11e868f Merge branch 'main' of git.telavivmakers.space:tami/thermalcam_decoder 2024-01-07 02:03:45 +02:00
devdesk
85bda0fa27 replay file 2024-01-07 02:02:52 +02:00
yair
0ec9d70cd6 add c to temp value 2023-12-31 20:25:51 +02:00
yair
1e59c59aca merge pixel view to show 16bit and calibrated resaults. 50frame step, allow to save frame 2023-12-31 18:58:30 +02:00
yair
787bdfe5f7 merge pixel view to show 16bit and calibrated resaults. allow to save frame 2023-12-31 18:56:49 +02:00
Alon Levy
d6c5058f2e add rustdecode.sh 2023-12-31 14:18:30 +02:00
Alon Levy
3ea0c74e7f rust: create target dir, use frames/basename as decode.py 2023-12-31 14:17:03 +02:00
23 changed files with 5372 additions and 142 deletions

1
.env.example Normal file
View File

@@ -0,0 +1 @@
THERMALCAM_IFACE=enp1s0f0

1
.gitignore vendored
View File

@@ -2,3 +2,4 @@
/frames /frames
**/*.png **/*.png
target target
.env

4638
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -5,14 +5,27 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
name = "thermaldecoder"
crate-type = ["rlib", "cdylib"]
[dependencies] [dependencies]
anyhow = "1.0.77" anyhow = "1.0.77"
axum = "0.7.4"
bracket-color = "0.8.7"
clap = { version = "4.5.1", features = ["derive"] }
crossbeam = "0.8.4"
crossbeam-channel = "0.5.11"
crossterm = { version = "0.27.0", features = ["event-stream"] }
dotenv = "0.15.0"
eframe = "0.26.2"
egui = "0.26.2"
futures = "0.3.30"
futures-timer = "3.0.3"
indicatif = "0.17.7" indicatif = "0.17.7"
pcap = { version = "1.2.0", features = ["capture-stream"] }
pcap-parser = { version = "0.14.1", features = ["data"] } pcap-parser = { version = "0.14.1", features = ["data"] }
png = "0.17.10" png = "0.17.10"
pyo3 = { version = "0.20.0", "features" = ["extension-module"] } pyo3 = { version = "0.20.0", "features" = ["extension-module"] }
reqwest = { version = "0.11.24", features = ["json"] }
serde = { version = "1.0.193", features = ["derive", "serde_derive", "alloc"] } serde = { version = "1.0.193", features = ["derive", "serde_derive", "alloc"] }
tokio = { version = "1.36.0", features = ["full"] }
tracing-subscriber = "0.3.18"
tui-textarea = "0.4.0"
v4l = { version = "0.14.0", features = ["v4l2"], default-features = false }

View File

@@ -1,8 +1,31 @@
### Thermal decoder ### Thermal decoder
https://wiki.telavivmakers.org/tamiwiki/projects/thermalcam https://telavivmakers.org/tamiwiki/projects/thermalcam
### Starting the stream
#### Enable jumbo frames
```
sudo ip link set eth0 mtu 9000
```
#### Send start packet
You need to send a special packet.
Sending it via sudo because of raw sockets:
```bash
sudo ./venv/bin/python ./replay.py
```
To send it you need the capability to open sockets in raw mode, but that does not work well with scripts (see [1]
[1] setcap for executables, not helpful for python scripts:
```
setcap cap_net_raw,cap_net_admin=eip ./replay.py
```
### Rust lib usage ### Rust lib usage
# if you don't already have a virtualenv. Linux specific, adjust to your OS. # if you don't already have a virtualenv. Linux specific, adjust to your OS.

116
cvview.py
View File

@@ -2,6 +2,7 @@ import os
import cv2 import cv2
import argparse import argparse
import numpy as np import numpy as np
from datetime import datetime
# Set up the argument parser # Set up the argument parser
parser = argparse.ArgumentParser(description="Visualize image files and display pixel values on hover.") parser = argparse.ArgumentParser(description="Visualize image files and display pixel values on hover.")
@@ -28,51 +29,31 @@ def calibrate(x):
#print('{}..{}'.format(ret.max(), ret.min())) #print('{}..{}'.format(ret.max(), ret.min()))
return ret return ret
# Global variables for the last mouse position
class state: last_x, last_y = 0, 0
calibrate = False img, calibrated_img = None, None
# Function to display the image and pixel values along with the frame index
def show_pixel_values(image_path):
def mouse_event(event, x, y, flags, param):
if event == cv2.EVENT_MOUSEMOVE:
pixel_value = img[y, x]
text = f'Value: {pixel_value}, Location: ({x},{y})'
img_text = img.copy()
# Overlay the frame index
frame_index = get_frame_index(image_path)
cv2.putText(img_text, f'Frame: {frame_index}', (10, img_text.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 1, cv2.LINE_AA)
cv2.putText(img_text, text, (50, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 1, cv2.LINE_AA)
cv2.imshow('Image', img_text)
img = cv2.imread(image_path, cv2.IMREAD_UNCHANGED)
if img is None:
print(f"Failed to load image at {image_path}. Check the file path and integrity.")
return False
if state.calibrate:
img = calibrate(img)
cv2.namedWindow('Image')
cv2.setMouseCallback('Image', mouse_event)
cv2.imshow('Image', img)
return True
# Function to get the frame index from the filename # Function to get the frame index from the filename
def get_frame_index(filename): def get_frame_index(filename):
return os.path.splitext(os.path.basename(filename))[0][-4:] return os.path.splitext(os.path.basename(filename))[0][-5:]
# Function to modify the numeric part of the filename # Function to modify the numeric part of the filename
def modify_filename(filename, increment=True): def modify_filename(filename, frame_increment=1):
directory, basename = os.path.split(filename) directory, basename = os.path.split(filename)
basename_no_ext, ext = os.path.splitext(basename) basename_no_ext, ext = os.path.splitext(basename)
print(f"Modifying filename {basename_no_ext} in directory {directory}.") print(f"Modifying filename {basename_no_ext} in directory {directory}.")
if len(basename_no_ext) < 4 or not basename_no_ext[-4:].isdigit(): if len(basename_no_ext) < 5 or not basename_no_ext[-5:].isdigit():
raise ValueError("Filename does not end with five digits.") raise ValueError("Filename does not end with five digits.")
num_part = basename_no_ext[-4:]
num = int(num_part) + (1 if increment else -1) num_part = basename_no_ext[-5:]
new_name = f"{basename_no_ext[:-4]}{num:04d}{ext}" num = int(num_part) + frame_increment
# Handle rollover
num = num % 100000 # Modulo 100000 for 5 digits
new_name = f"{basename_no_ext[:-5]}{num:05d}{ext}"
new_path = os.path.join(directory, new_name) new_path = os.path.join(directory, new_name)
if not os.path.exists(new_path): if not os.path.exists(new_path):
print(f"No file found at {new_path}.") print(f"No file found at {new_path}.")
@@ -80,6 +61,49 @@ def modify_filename(filename, increment=True):
return new_path return new_path
# Function to display the image and pixel values along with the frame index
def show_pixel_values(image_path):
global img, calibrated_img, last_x, last_y
def mouse_event(event, x, y, flags, param):
global last_x, last_y
if event == cv2.EVENT_MOUSEMOVE:
last_x, last_y = x, y
update_display(x, y)
img = cv2.imread(image_path, cv2.IMREAD_UNCHANGED)
if img is None:
print(f"Failed to load image at {image_path}. Check the file path and integrity.")
return False
calibrated_img = calibrate(img) # Calibrate the image for display
cv2.namedWindow('Image')
cv2.setMouseCallback('Image', mouse_event)
update_display(last_x, last_y) # Initial display update
return True
# Function to update the display with pixel values
def update_display(x, y):
global img, calibrated_img
original_pixel_value = img[y, x]
calibrated_pixel_value = calibrated_img[y, x]
text_original = f'Original: {original_pixel_value}, Loc: ({x},{y})'
text_calibrated = f'Calibrated: {calibrated_pixel_value}'
img_text = img.copy()
frame_index = get_frame_index(img_path)
cv2.putText(img_text, f'Frame: {frame_index}', (10, img_text.shape[0] - 20), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1, cv2.LINE_AA)
cv2.putText(img_text, text_original, (5, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1, cv2.LINE_AA)
cv2.putText(img_text, text_calibrated+"c", (5, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1, cv2.LINE_AA)
cv2.imshow('Image', img_text)
return img_text # Return the image with text for saving
def save_frame(img_text):
current_time = datetime.now().strftime("%Y%m%d_%H%M%S")
save_path = f"frame_{current_time}.png"
cv2.imwrite(save_path, img_text)
print(f"Frame saved as {save_path}")
# Ensure the provided path is a valid file # Ensure the provided path is a valid file
if not os.path.isfile(img_path): if not os.path.isfile(img_path):
print("The provided path is not a valid file.") print("The provided path is not a valid file.")
@@ -94,15 +118,25 @@ while True:
key = cv2.waitKey(0) key = cv2.waitKey(0)
if key == 27: # ESC key to exit if key == 27: # ESC key to exit
break break
elif key == 91: # '[' key elif key in [91, 93, ord('{'), ord('}')]: # Keys for frame navigation
img_path = modify_filename(img_path, increment=False) if key == 91: # '[' key
img_path = modify_filename(img_path, frame_increment=-1)
elif key == 93: # ']' key elif key == 93: # ']' key
img_path = modify_filename(img_path, increment=True) img_path = modify_filename(img_path, frame_increment=1)
elif key == ord('c'): elif key == ord('{'): # Shift + '['
state.calibrate = not state.calibrate img_path = modify_filename(img_path, frame_increment=-50)
elif key == ord('}'): # Shift + ']'
img_path = modify_filename(img_path, frame_increment=50)
# Show the new image
if not show_pixel_values(img_path): if not show_pixel_values(img_path):
break # Exit the loop if the new image cannot be loaded break # Exit if the new image cannot be loaded
else:
update_display(last_x, last_y) # Update display with last known mouse position
elif key == ord('s'): # 's' key for saving
# Update the display to get the latest overlay and save it
img_text_with_overlays = update_display(last_x, last_y)
save_frame(img_text_with_overlays)
continue # Skip the frame reload if saving
cv2.destroyAllWindows() cv2.destroyAllWindows()

181
decode.py Executable file → Normal file
View File

@@ -1,9 +1,12 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import argparse import argparse
from pathlib import Path
import os import os
import subprocess import subprocess
from io import BytesIO
import numpy as np import numpy as np
from tqdm import tqdm from tqdm import tqdm
from datetime import datetime
import pandas as pd import pandas as pd
import pcapng import pcapng
from struct import unpack from struct import unpack
@@ -12,6 +15,7 @@ from PIL import Image
# Create the parser # Create the parser
parser = argparse.ArgumentParser(description="Process a pcap file.") parser = argparse.ArgumentParser(description="Process a pcap file.")
parser.add_argument("--live", action="store_true", help="Process images live")
# Add an argument for the pcap file, with a default value # Add an argument for the pcap file, with a default value
parser.add_argument('input_file', nargs='?', default='in.pcap', help='The pcap file to process') parser.add_argument('input_file', nargs='?', default='in.pcap', help='The pcap file to process')
@@ -19,39 +23,41 @@ parser.add_argument('input_file', nargs='?', default='in.pcap', help='The pcap f
# Parse the arguments # Parse the arguments
args = parser.parse_args() args = parser.parse_args()
# Now use args.input_file as the file to process
input_file = args.input_file
basename = os.path.splitext(os.path.basename(input_file))[0]
# Read packets from a pcap file
scanner = pcapng.scanner.FileScanner(open(input_file, "rb"))
blocks = tqdm(scanner)
# Helper function to safely get an attribute from an object
def tryget(obj, att):
if hasattr(obj, att):
return getattr(obj, att)
return None
# TODO - probably a better way to do this
def live_capture_cb(cb):
def outer(pkt):
data = bytes(pkt)
l = len(data)
if l == 6972:
cb(data)
scapy.all.sniff(iface="enp1s0f0", filter='udp', prn=outer)
def rightsize(it): def rightsize(it):
for i, obj in enumerate(it): for i, obj in enumerate(it):
if isinstance(obj, bytes):
l = len(obj)
data = obj
else:
if not hasattr(obj, 'packet_len'): if not hasattr(obj, 'packet_len'):
continue continue
len = obj.packet_len l = obj.packet_len
if len != 6972: data = obj.packet_data
if l != 6972:
continue continue
yield obj.packet_data yield data
def removestart(it): def removestart(it):
"Remove the UDP header from the packets" "Remove the UDP header from the packets"
for x in it: for x in it:
yield x[0x2A:] yield removestart_inner(x)
def removestart_inner(x):
return x[0x2A:]
# Function to parse packet data # Function to parse packet data
def parse(data): def parse(data):
@@ -70,42 +76,85 @@ def parsed(it):
yield parse(x) yield parse(x)
class FrameCollector:
def __init__(self):
self.current = []
def handle(self, obj):
ret = None
if obj['part'] == 0:
if len(self.current) > 0:
ret = b"".join(self.current)
self.current = []
#otherdata = []
self.current.append(obj["data"])
return ret
#otherdata.append(obj)
def last(self):
if len(self.current) > 0:
return b"".join(current)
return None
# Function to group data into frames # Function to group data into frames
def frames(it): def frames(it):
current = [] handler = FrameCollector()
#otherdata = []
for obj in it: for obj in it:
if obj['part'] == 0: ret = handler.handle(obj)
if len(current) > 0: if ret:
yield b"".join(current) yield ret
current = [] last = handler.last()
current.append(obj["data"]) if last:
if len(current) > 0: yield last
yield b"".join(current)
def iterimages(it, width, height, pixelformat=">H"): WIDTH = 384
HEIGHT = 288
def bad_frame(frame, width=WIDTH, height=HEIGHT):
return len(frame) != width * height * 2 # 16 bpp
def skip_bad_frames(it, width=WIDTH, height=HEIGHT):
for frame in it: for frame in it:
if len(frame) != width * height * 2: # 16 bpp if bad_frame(frame): # 16 bpp
# Will be fixed when we stopped doing restarts
#print(f'{len(frame)} != {width} * {height} * 2')
continue continue
yield frame
def iterimages(it, width=WIDTH, height=HEIGHT, pixelformat=">H"):
for frame in it:
yield Image.fromarray(np.frombuffer(frame, dtype=pixelformat).reshape(width, height)) yield Image.fromarray(np.frombuffer(frame, dtype=pixelformat).reshape(width, height))
# Get frames and convert them to images def process_video():
frames = frames(parsed(removestart(rightsize(blocks)))) # Now use args.input_file as the file to process
images = iterimages(it=frames, width=384, height=288) input_file = args.input_file
basename = os.path.splitext(os.path.basename(input_file))[0]
stream = open(input_file, 'rb')
# Read packets from a pcap file
scanner = pcapng.scanner.FileScanner(stream)
blocks = tqdm(scanner)
# Create the directory for frames if not exists # Get frames and convert them to images
frame_dir = f"frames/{basename}" frames = skip_bad_frames(frames(parsed(removestart(rightsize(blocks)))))
if not os.path.exists(frame_dir):
# Create the directory for frames if not exists
frame_dir = f"frames/{basename}"
if not os.path.exists(frame_dir):
os.makedirs(frame_dir) os.makedirs(frame_dir)
# Save each image as a PNG file # Save each image as a PNG file
for i, img in enumerate(images): images = iterimages(it=frames)
for i, img in enumerate(images):
img.save(f'frames/{basename}/{basename}_{i:04}.png') img.save(f'frames/{basename}/{basename}_{i:04}.png')
ffmpeg_input = f"frames/{basename}/{basename}_%04d.png"
# Produce a video from the saved images command = [
ffmpeg_input = f"frames/{basename}/{basename}_%04d.png"
command = [
"ffmpeg", "ffmpeg",
"-y", # Overwrite output file without asking "-y", # Overwrite output file without asking
"-hide_banner", # Hide banner "-hide_banner", # Hide banner
@@ -115,11 +164,55 @@ command = [
"-i", ffmpeg_input, # Input file pattern "-i", ffmpeg_input, # Input file pattern
"-vf", "transpose=1", # Video filter for transposing "-vf", "transpose=1", # Video filter for transposing
"-s", "384x288", # Size of one frame "-s", "384x288", # Size of one frame
"-vcodec", "libx264", # Video codec "-vcodec", "libopenh264", # Video codec
"-pix_fmt", "yuv420p", # Pixel format: YUV 4:2:0 "-pix_fmt", "yuv420p", # Pixel format: YUV 4:2:0
"thermal.mp4", # Output file in MP4 container "thermal.mp4", # Output file in MP4 container
] ]
subprocess.run(command) subprocess.run(command)
print("to play: ffplay thermal.mp4")
if args.live:
# TODO: to video via ffmpeg; right now just a single png
# of the last frame
def todo_live_ffmpeg():
output = 'to_ffmpeg'
# live: write to named pipe
if not Path(output).exists():
print(f'making fifo at {output}')
os.mkfifo(output)
fd = open(output, 'wb')
for frame in frames:
fd.write(frame)
print('live stream, import scapy')
import scapy.all
print('open stream')
class PacketHandler:
def __init__(self, cb):
self.frame_collector = FrameCollector()
self.cb = cb
def handle(self, pkt):
pkt = removestart_inner(pkt)
parsed = parse(pkt)
frame_maybe = self.frame_collector.handle(parsed)
if not frame_maybe or bad_frame(frame_maybe):
return
self.cb(frame_maybe)
progress = tqdm()
def on_frame(frame):
progress.update(1)
Image.fromarray(np.frombuffer(frame, dtype='>H').reshape(WIDTH, HEIGHT)).save(f'live.new.png')
os.rename('live.new.png', 'live.png')
handler = PacketHandler(on_frame)
live_capture_cb(handler.handle)
else:
process_video()
print("to play: ffplay thermal.mp4")

111
examples/cutoff.rs Normal file
View File

@@ -0,0 +1,111 @@
use std::{collections::HashMap, io::stdout, time::Duration};
use futures::{future::FutureExt, select, StreamExt};
use futures_timer::Delay;
use crossterm::{
cursor::position,
event::{DisableMouseCapture, EnableMouseCapture, Event, EventStream, KeyCode, KeyEvent},
execute,
terminal::{disable_raw_mode, enable_raw_mode},
};
async fn set_cutoff(cutoff: f64, freq: f64) -> Result<(), reqwest::Error> {
// Some simple CLI args requirements...
let url = format!("http://localhost:3000/cutoff");
let mut map = HashMap::new();
map.insert("min_cutoff", cutoff);
map.insert("max_cutoff", cutoff + 10.0);
map.insert("freq_hz", freq);
let client = reqwest::Client::new();
let res = client.post(url).json(&map).send().await?;
// eprintln!("Response: {:?} {}", res.version(), res.status());
// eprintln!("Headers: {:#?}\n", res.headers());
// let body = res.text().await?;
// println!("{body}");
Ok(())
}
const HELP: &str = r#"EventStream based on futures_util::Stream with tokio
- Keyboard, mouse and terminal resize events enabled
- Prints "." every second if there's no event
- Hit "c" to print current cursor position
- Use Esc to quit
"#;
async fn print_events() {
let mut reader = EventStream::new();
let mut cutoff = 30.0;
let mut last_cutoff = cutoff;
let mut freq = 1.0;
loop {
let mut delay = Delay::new(Duration::from_millis(1_000)).fuse();
let mut event = reader.next().fuse();
let mut change = false;
select! {
_ = delay => {
},
maybe_event = event => {
match maybe_event {
Some(Ok(event)) => {
if event == Event::Key(KeyCode::Char('c').into()) {
println!("Cursor position: {:?}\r", position());
}
if event == Event::Key(KeyCode::Esc.into()) {
break;
}
if let Event::Key(k) = event {
if let KeyCode::Char(c) = k.code {
change = true;
match c {
'[' => {
cutoff -= 1.0;
}
']' => {
cutoff += 1.0;
}
'1' => {
freq *= 0.9;
}
'2' => {
freq *= 1.1;
}
_ => {
change = false;
}
}
}
}
if change {
set_cutoff(cutoff, freq).await.unwrap();
println!("cutoff = {}\r", cutoff);
}
}
Some(Err(e)) => println!("Error: {:?}\r", e),
None => break,
}
}
};
}
}
#[tokio::main]
async fn main() -> std::io::Result<()> {
println!("{}", HELP);
enable_raw_mode()?;
let mut stdout = stdout();
execute!(stdout, EnableMouseCapture)?;
print_events().await;
execute!(stdout, DisableMouseCapture)?;
disable_raw_mode()
}

View File

@@ -6,7 +6,6 @@ fn main() -> anyhow::Result<()> {
let mut arg = env::args(); let mut arg = env::args();
arg.next(); // skip executable arg.next(); // skip executable
let filename = arg.next().ok_or(anyhow::anyhow!("unexpected"))?; let filename = arg.next().ok_or(anyhow::anyhow!("unexpected"))?;
let frames = arg.next().unwrap_or("frames".into()); decode_to_files(&filename)?;
decode_to_files(&filename, &frames)?;
Ok(()) Ok(())
} }

17
examples/replay.rs Normal file
View File

@@ -0,0 +1,17 @@
use std::net::UdpSocket;
fn main() -> std::io::Result<()> {
{
let socket = UdpSocket::bind("192.168.0.1:8091")?;
// Receives a single datagram message on the socket. If `buf` is too small to hold
// the message, it will be cut off.
let buf = [
1, 0x20, 1, 0x80, 0x1b, 0x40, 0, 0x20, 0, 0, 0, 0, 0, 0, 0, 0x0f, 0, 0, 0, 1, 0, 0, 1,
0, 0, 0x20, 0x2b, 0,
];
socket.set_broadcast(true)?;
socket.send_to(&buf, "192.168.0.255:8092")?;
}
Ok(())
}

9
listen.py Normal file
View File

@@ -0,0 +1,9 @@
from socket import socket, AF_INET, SOCK_DGRAM
s = socket(AF_INET, SOCK_DGRAM)
s.bind(('', 8090))
while True:
d = s.recvfrom(1024)
print(d)

7
live_vid.sh Executable file
View File

@@ -0,0 +1,7 @@
#!/bin/bash
# Enable to get a dot file, to turn to png: dot -Tpng -osomething.png something.dot
#export GST_DEBUG_DUMP_DOT_DIR=$(pwd)
gst-launch-1.0 filesrc location=output.raw \
! rawvideoparse use_sink_caps=false height=384 width=288 format=gray16-be \
! videoconvertscale \
! autovideosink

10
pyproject.toml Normal file
View File

@@ -0,0 +1,10 @@
[project]
name = "thermalcam"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
"python-dotenv>=1.2.1",
"scapy>=2.6.1",
]

27
replay.py Executable file
View File

@@ -0,0 +1,27 @@
#!/usr/bin/env python3
#replay the "trigger" packet.
#this packets will start the source broadcasting its packets.
import base64
from scapy.all import *
from dotenv import load_dotenv
import os
load_dotenv()
# Base64 encoded packet data
encoded_packet = "////////AAFsWfAKCABFAAA4KB0AAIARkEfAqAABwKgA/x+bH5wA2QAAASABgBtAACAAAAAAAAAADwAAAAEAAAEAACArAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//////////////////////////////////////////AAAAAAAAAAIBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
# Decode the Base64 encoded packet
decoded_packet = base64.b64decode(encoded_packet)
# Load packet with Scapy
packet = Ether(decoded_packet)
#print(packet.show(dump=True))
iface = os.environ.get('THERMALCAM_IFACE', 'enp1s0f0')
print(f'using interface {iface}')
# (packet)
sendp(packet, iface=iface)

2
run_dhcp_server.sh Executable file
View File

@@ -0,0 +1,2 @@
#!/bin/bash
sudo dnsmasq -i enp1s0f0 --local-service --dhcp-range=192.168.0.10,192.168.0.100 --dhcp-leasefile=dhcp.lease -d

13
run_live.sh Executable file
View File

@@ -0,0 +1,13 @@
#!/bin/bash
cd $(dirname $0)
set -e
# Python works but stutters
#sudo ./venv/bin/python ./decode.py --live
cargo build --release
TARGET=./target/release/thermaldecoder
# setcap does not work yet (EPERM on socket AF_PACKET)
# sudo setcap cap_net_raw,cap_net_admin=eip $TARGET
#sudo strace -f -o live.strace $TARGET /dev/video0
sudo RUST_BACKTRACE=full $TARGET "$@"

13
run_live_debug.sh Executable file
View File

@@ -0,0 +1,13 @@
#!/bin/bash
cd $(dirname $0)
set -e
# Python works but stutters
#sudo ./venv/bin/python ./decode.py --live
cargo build --example live
TARGET=./target/debug/examples/live
# setcap does not work yet (EPERM on socket AF_PACKET)
# sudo setcap cap_net_raw,cap_net_admin=eip $TARGET
#sudo strace -f -o live.strace $TARGET /dev/video0
sudo RUST_BACKTRACE=full $TARGET "$@"

2
rustdecode.sh Executable file
View File

@@ -0,0 +1,2 @@
#!/bin/bash
cargo run --release --example main -- "$@"

View File

@@ -89,7 +89,7 @@ impl PacketsIterator {
pub struct Header { pub struct Header {
c1: u32, c1: u32,
c2: u16, c2: u16,
part: u16, pub part: u16,
a: u16, a: u16,
ffaa: u16, ffaa: u16,
b: u16, b: u16,
@@ -98,7 +98,7 @@ pub struct Header {
} }
impl Header { impl Header {
fn read(data: &[u8]) -> anyhow::Result<Self> { pub fn read(data: &[u8]) -> anyhow::Result<Self> {
Ok(Header { Ok(Header {
c1: u32::from_be_bytes([data[0], data[1], data[2], data[3]]), c1: u32::from_be_bytes([data[0], data[1], data[2], data[3]]),
c2: u16::from_be_bytes([data[4], data[5]]), c2: u16::from_be_bytes([data[4], data[5]]),
@@ -122,12 +122,12 @@ impl Header {
} }
} }
const HDR_SIZE: usize = std::mem::size_of::<Header>(); pub const HDR_SIZE: usize = std::mem::size_of::<Header>();
pub struct Frame { pub struct Frame {
#[allow(dead_code)] #[allow(dead_code)]
header: Header, pub header: Header,
raw: Vec<u8>, pub raw: Vec<u8>,
} }
impl Frame { impl Frame {
@@ -184,7 +184,8 @@ impl Iterator for Decoder {
} }
} }
fn write_raw_frame(name: &str, data: &[u8]) -> anyhow::Result<()> { #[allow(dead_code)]
pub fn write_raw_frame(name: &str, data: &[u8]) -> anyhow::Result<()> {
let path = Path::new(&name); let path = Path::new(&name);
let file = File::create(path)?; let file = File::create(path)?;
let ref mut w = BufWriter::new(file); let ref mut w = BufWriter::new(file);
@@ -196,6 +197,7 @@ fn write_raw_frame(name: &str, data: &[u8]) -> anyhow::Result<()> {
Ok(()) Ok(())
} }
#[allow(dead_code)]
fn write_calibrated_frame(name: &str, data: &[u16]) -> anyhow::Result<()> { fn write_calibrated_frame(name: &str, data: &[u16]) -> anyhow::Result<()> {
let path = Path::new(&name); let path = Path::new(&name);
let file = File::create(path).unwrap(); let file = File::create(path).unwrap();
@@ -251,15 +253,27 @@ fn decode(filename: &str) -> PyResult<PyFrameIterator> {
Ok(iter.into()) Ok(iter.into())
} }
pub fn decode_to_files(filename: &str, frames_root: &str) -> anyhow::Result<()> { /// writes to frames/<basename of filename>
#[allow(dead_code)]
pub fn decode_to_files(filename: &str) -> anyhow::Result<()> {
let frameiter = Decoder::new(filename)?; let frameiter = Decoder::new(filename)?;
let basename = std::path::Path::new(filename)
.file_stem()
.ok_or(anyhow::anyhow!("cannot get basename"))?
.to_str()
.ok_or(anyhow::anyhow!("cannot convert to utf-8 from os name"))?;
let target_dir = format!("frames/{}", basename);
let target_dir = std::path::Path::new(&target_dir);
if !target_dir.exists() {
std::fs::create_dir(target_dir)?;
}
for (i, frame) in frameiter.enumerate() { for (i, frame) in frameiter.enumerate() {
let name = format!("{}/{:05}.png", frames_root, i); let name = format!("frames/{}/{:05}.png", basename, i);
if let Err(_e) = write_raw_frame(&name, &frame.raw) { if let Err(_e) = write_raw_frame(&name, &frame.raw) {
println!("skipping bad frame {}", i); println!("skipping bad frame {}", i);
continue; continue;
} }
let name = format!("{}/temp_{:05}.png", frames_root, i); let name = format!("{}/temp_{:05}.png", target_dir.display(), i);
let pixels = frame.pixels(); let pixels = frame.pixels();
write_calibrated_frame(&name, &pixels)?; write_calibrated_frame(&name, &pixels)?;
} }

209
src/stream.rs Normal file
View File

@@ -0,0 +1,209 @@
use crate::offline::{Header, HDR_SIZE};
use bracket_color::prelude::*;
use clap::Parser;
use dotenv::dotenv;
use std::time::SystemTime;
use std::{
io::Write,
sync::{Arc, Mutex},
thread::spawn,
};
use v4l::video::Output;
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
struct Args {
#[arg(short, long, default_value_t = false)]
temperature: bool,
#[arg(short, long, default_value = "/dev/video0")]
device: String,
#[arg(short, long)]
red_cutoff: Option<f64>,
}
fn pixel_to_celcius(x: u16) -> u16 {
let x: f64 = x.into();
let x = x / 256.0;
let t = (-1.665884e-08) * x.powf(4.)
+ (1.347094e-05) * x.powf(3.)
+ (-4.396264e-03) * x.powf(2.)
+ (9.506939e-01) * x
+ (-6.353247e+01);
(t * 256.0) as u16
}
/// https://en.wikipedia.org/wiki/HSL_and_HSV
/// convert to the expected dynamic range first. We insert values in [0..256)
/// h in [0, 360] degrees
/// s in [0, 1]
/// v in [0, 1]
fn once_upon_a_time_hsv2rgb(h: u8, s: u8, v: u8) -> (u8, u8, u8) {
let h = (h as f64) / 256.0 * 360.0;
let s = (s as f64) / 256.0;
let v = (v as f64) / 256.0;
(0, 0, 0)
}
fn rgb_to_u8s(rgb: &RGB) -> (u8, u8, u8) {
(
(rgb.r * 256.) as u8,
(rgb.g * 256.) as u8,
(rgb.b * 256.) as u8,
)
}
pub(crate) struct Streamer {
pub(crate) min_cutoff: f64,
pub(crate) max_cutoff: f64,
pub(crate) freq_hz: f64,
}
pub(crate) fn initialize() -> Arc<Mutex<Streamer>> {
let args = Args::parse();
Arc::new(Mutex::new(Streamer {
min_cutoff: args.red_cutoff.unwrap_or(26.),
max_cutoff: args.red_cutoff.unwrap_or(26.) + 10.0,
freq_hz: 1.0,
}))
}
fn main(streamer: Arc<Mutex<Streamer>>) -> anyhow::Result<()> {
dotenv().ok();
let args = Args::parse();
let device = match std::env::var("THERMALCAM_IFACE=enp1s0f0") {
Ok(d) => {
let device = pcap::Device::list()
.expect("device list failed")
.into_iter()
.find(|x| x.name == d)
.expect(&format!("could not find device {}", d));
device
}
Err(_) => pcap::Device::lookup()
.expect("device lookup failed")
.expect("no device available"),
};
// get the default Device
println!("Using device {}", device.name);
let output = args.device;
println!("Using output v4l2loopback device {}", output);
const WIDTH: usize = 288;
const HEIGHT: usize = 384;
println!("reading cutoff");
let start = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs_f64();
let greyscale = !args.temperature;
let fourcc_repr = if greyscale {
[
b'Y', // | 0b10000000
b'1', b'6',
b' ', // Note: not using b' ' | 0x80, (V4L2_PIX_FMT_Y16_BE)
// because VID_S_FMT ioctl returns EINVAL, so just swap the bytes here
]
} else {
// RGB32 is 4 bytes R, G, B, A
[b'R', b'G', b'B', b'4']
};
println!("using four cc {:?}", fourcc_repr);
let bytes_per_pixel = if greyscale { 2 } else { 4 };
let fourcc = v4l::format::FourCC { repr: fourcc_repr };
let mut out = v4l::Device::with_path(output)?;
// To find the fourcc code, use v4l2-ctl --list-formats-out /dev/video0
// (or read the source :)
// flip axes
let format = v4l::Format::new(HEIGHT as u32, WIDTH as u32, fourcc);
Output::set_format(&out, &format)?;
// Setup Capture
let mut cap = pcap::Capture::from_device(device)
.unwrap()
.immediate_mode(true)
.open()
.unwrap();
// get a packet and print its bytes
const PACKET_LEN: usize = 6972;
// input is grayscale 16 bits per pixel
const FRAME_LEN: usize = WIDTH * HEIGHT * 2;
let mut frame = [0u8; FRAME_LEN];
let mut len = 0;
let output_frame_len = WIDTH * HEIGHT * bytes_per_pixel;
let mut swapped_vec = vec![0u8; output_frame_len];
let swapped = &mut swapped_vec;
while let Ok(p) = cap.next_packet() {
let data = p.data;
if data.len() != PACKET_LEN {
continue;
}
let data = &data[0x2a..];
let header = match Header::read(data) {
Ok(header) => header,
Err(_) => continue,
};
let data = &data[HDR_SIZE..];
if (header.part == 0 && len > 0)
// do not write out of bounds - would panic, instead just skip
|| (data.len() + len > FRAME_LEN)
{
if len == FRAME_LEN {
// read once per frame, can make it lower if need be
let state = streamer.lock().unwrap();
let mid = (state.min_cutoff + state.max_cutoff) / 2.0;
let range = state.max_cutoff - state.min_cutoff;
let hz = state.freq_hz;
let now = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs_f64();
let dt = now - start;
let cutoff = mid + f64::sin(dt * hz) * 0.5 * range;
// swap the bytes, we are using LE, not BE, 16 bit grayscale
// possibly limitation of current v4l2loopback or v4l rust wrapper or libv4l2
for i in 0..FRAME_LEN / 2 {
let x = i % WIDTH;
let y = (i / WIDTH) % HEIGHT;
let mut pixel = u16::from_be_bytes([frame[i * 2], frame[i * 2 + 1]]);
if greyscale {
if args.temperature {
pixel = pixel_to_celcius(pixel);
}
let pixel_swapped = pixel.to_le_bytes();
let out_i = ((HEIGHT - 1 - y) + (WIDTH - 1 - x) * HEIGHT) * 2;
swapped[out_i..out_i + 2].copy_from_slice(&pixel_swapped);
} else {
pixel = pixel_to_celcius(pixel);
let (r, g, b) = if pixel > (256.0 * cutoff) as u16 {
let p = pixel - (256.0 * cutoff) as u16;
let rgb = HSV::from_f32(0.0, (p as f32) / 256.0, 0.0).to_rgb();
rgb_to_u8s(&rgb)
} else {
let rgb =
HSV::from_f32(pixel as f32 / 65536.0, 0.0, pixel as f32 / 65536.0)
.to_rgb();
rgb_to_u8s(&rgb)
};
let out_i = ((HEIGHT - 1 - y) + (WIDTH - 1 - x) * HEIGHT) * 4;
swapped[out_i..out_i + 4].copy_from_slice(&[0, r, g, b]);
}
}
out.write_all(&swapped[..])?;
}
len = 0;
}
frame[len..len + data.len()].copy_from_slice(data);
len += data.len();
}
Ok(())
}
pub(crate) fn start_stream_thread(streamer: Arc<Mutex<Streamer>>) {
spawn(move || {
if let Err(e) = main(streamer) {
println!("oops: {:?}", e);
}
});
}

12
start.sh Executable file
View File

@@ -0,0 +1,12 @@
#!/bin/bash
. .env
IFACE=$THERMALCAM_IFACE
echo "using iface $IFACE"
echo "checking mtu"
mtu=$(ip link show ${IFACE} | grep -o 'mtu [0-9]*' | gawk '{print $2}')
echo "mtu = $mtu"
if [ $mtu -lt 9000 ]; then
echo "setting mtu to 9000"
sudo ip link set $IFACE mtu 9000
fi
sudo uv run ./replay.py

View File

@@ -2,11 +2,31 @@
from pathlib import Path from pathlib import Path
from thermaldecoder import decode from thermaldecoder import decode
import numpy as np import numpy as np
import subprocess
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
# Create a directory to store the frames if it doesn't exist
root = Path('frames') root = Path('frames')
root.mkdir(exist_ok=True) root.mkdir(exist_ok=True)
frames = list(decode('in.pcap'))
f = np.array(frames[0]) # Decode the frames from the pcap file
f.shape = (384, 288) frames = list(decode('indesk.pcapng'))
plt.imshow(f)
plt.show() # Iterate over the frames
for i, frame in enumerate(frames):
try:
# Convert the frame to an image file
img_path = root / f"frame_{i}.png"
f = np.array(frame)
f.shape = (384, 288)
plt.imshow(f)
plt.axis('off')
plt.savefig(img_path, bbox_inches='tight', pad_inches=0)
plt.close()
# Use ffmpeg to display the image
subprocess.run(['ffmpeg', '-i', str(img_path), '-vf', 'scale=800:600', '-framerate', '25', '-f', 'image2pipe', '-'], check=True)
except ValueError as e:
print(f"Error processing frame {i}: {e}")

Binary file not shown.