This commit is contained in:
Gilles Soulier
2026-01-05 13:13:08 +01:00
parent 8e14adafc6
commit 1d177e96a6
149 changed files with 29541 additions and 1 deletions

View File

@@ -0,0 +1,151 @@
// Created by: Claude
// Date: 2026-01-04
// Purpose: Tests unitaires pour le transfert de fichiers
// Refs: protocol_events_v_2.md
use mesh_agent::p2p::protocol::FileMessage;
#[test]
fn test_file_message_meta_serialization() {
let meta = FileMessage::Meta {
name: "test.txt".to_string(),
size: 1024,
hash: "abc123".to_string(),
};
let json = serde_json::to_string(&meta).unwrap();
let deserialized: FileMessage = serde_json::from_str(&json).unwrap();
match deserialized {
FileMessage::Meta { name, size, hash } => {
assert_eq!(name, "test.txt");
assert_eq!(size, 1024);
assert_eq!(hash, "abc123");
}
_ => panic!("Wrong variant"),
}
}
#[test]
fn test_file_message_chunk_serialization() {
let chunk = FileMessage::Chunk {
offset: 1024,
data: vec![1, 2, 3, 4, 5],
};
let json = serde_json::to_string(&chunk).unwrap();
let deserialized: FileMessage = serde_json::from_str(&json).unwrap();
match deserialized {
FileMessage::Chunk { offset, data } => {
assert_eq!(offset, 1024);
assert_eq!(data, vec![1, 2, 3, 4, 5]);
}
_ => panic!("Wrong variant"),
}
}
#[test]
fn test_file_message_done_serialization() {
let done = FileMessage::Done {
hash: "final_hash_123".to_string(),
};
let json = serde_json::to_string(&done).unwrap();
let deserialized: FileMessage = serde_json::from_str(&json).unwrap();
match deserialized {
FileMessage::Done { hash } => {
assert_eq!(hash, "final_hash_123");
}
_ => panic!("Wrong variant"),
}
}
#[tokio::test]
async fn test_blake3_hash() {
use blake3::Hasher;
let data = b"Hello, Mesh!";
let hash = Hasher::new().update(data).finalize().to_hex().to_string();
// Blake3 hash is 32 bytes = 64 hex chars
assert_eq!(hash.len(), 64);
// Verify hash is deterministic
let hash2 = Hasher::new().update(data).finalize().to_hex().to_string();
assert_eq!(hash, hash2);
}
#[tokio::test]
async fn test_blake3_chunked_hash() {
use blake3::Hasher;
let data = b"Hello, Mesh! This is a longer message to test chunked hashing.";
// Hash all at once
let hash_full = Hasher::new().update(data).finalize().to_hex().to_string();
// Hash in chunks
let mut hasher = Hasher::new();
hasher.update(&data[0..20]);
hasher.update(&data[20..40]);
hasher.update(&data[40..]);
let hash_chunked = hasher.finalize().to_hex().to_string();
// Should be identical
assert_eq!(hash_full, hash_chunked);
}
#[test]
fn test_file_message_tag_format() {
let meta = FileMessage::Meta {
name: "test.txt".to_string(),
size: 100,
hash: "hash".to_string(),
};
let json = serde_json::to_string(&meta).unwrap();
// Verify it has the "t" field for type tag
assert!(json.contains(r#""t":"FILE_META""#));
}
#[tokio::test]
async fn test_length_prefixed_encoding() {
use tokio::io::{AsyncWriteExt, AsyncReadExt};
let msg = FileMessage::Meta {
name: "test.txt".to_string(),
size: 1024,
hash: "abc123".to_string(),
};
// Encode
let json = serde_json::to_vec(&msg).unwrap();
let len = (json.len() as u32).to_be_bytes();
let mut buffer = Vec::new();
buffer.write_all(&len).await.unwrap();
buffer.write_all(&json).await.unwrap();
// Decode
let mut cursor = std::io::Cursor::new(buffer);
let mut len_buf = [0u8; 4];
cursor.read_exact(&mut len_buf).await.unwrap();
let msg_len = u32::from_be_bytes(len_buf) as usize;
let mut msg_buf = vec![0u8; msg_len];
cursor.read_exact(&mut msg_buf).await.unwrap();
let decoded: FileMessage = serde_json::from_slice(&msg_buf).unwrap();
match decoded {
FileMessage::Meta { name, size, hash } => {
assert_eq!(name, "test.txt");
assert_eq!(size, 1024);
assert_eq!(hash, "abc123");
}
_ => panic!("Wrong variant"),
}
}