Optimize crypto performance and memory management

This commit is contained in:
2025-03-13 23:36:13 +08:00
parent 55bcf3be66
commit d8ac03bf17
7 changed files with 2648 additions and 684 deletions

View File

@@ -32,87 +32,12 @@ func generateRandomData(size int) ([]byte, error) {
func createTempFile(t *testing.T, data []byte) string {
tempDir := t.TempDir()
tempFile := filepath.Join(tempDir, "test_data")
if err := ioutil.WriteFile(tempFile, data, 0644); err != nil {
if err := os.WriteFile(tempFile, data, 0644); err != nil {
t.Fatalf("Failed to create temporary file: %v", err)
}
return tempFile
}
func TestEncryptDecryptImageWithLog(t *testing.T) {
startTotal := time.Now()
defer func() {
t.Logf("Total time: %v", time.Since(startTotal))
}()
// Read original image
imagePath := "test.jpg"
start := time.Now()
imageData, err := ioutil.ReadFile(imagePath)
if err != nil {
t.Fatalf("Failed to read image: %v", err)
}
t.Logf("[1/7] Read image %s (%.2fKB) time: %v",
imagePath, float64(len(imageData))/1024, time.Since(start))
// Generate encryption key
start = time.Now()
key, err := generateRandomKey()
if err != nil {
t.Fatalf("Failed to generate key: %v", err)
}
t.Logf("[2/7] Generated %d bytes key time: %v", len(key), time.Since(start))
// Initialize cipher
start = time.Now()
xcipher := NewXCipher(key)
t.Logf("[3/7] Initialized cipher time: %v", time.Since(start))
// Perform encryption
additionalData := []byte("Image metadata")
start = time.Now()
ciphertext, err := xcipher.Encrypt(imageData, additionalData)
if err != nil {
t.Fatalf("Encryption failed: %v", err)
}
t.Logf("[4/7] Encrypted data (input: %d bytes, output: %d bytes) time: %v",
len(imageData), len(ciphertext), time.Since(start))
// Save encrypted file
cipherPath := "encrypted.jpg"
start = time.Now()
if err := ioutil.WriteFile(cipherPath, ciphertext, 0644); err != nil {
t.Fatalf("Failed to save encrypted file: %v", err)
}
t.Logf("[5/7] Wrote encrypted file %s time: %v", cipherPath, time.Since(start))
// Perform decryption
start = time.Now()
decryptedData, err := xcipher.Decrypt(ciphertext, additionalData)
if err != nil {
t.Fatalf("Decryption failed: %v", err)
}
decryptDuration := time.Since(start)
t.Logf("[6/7] Decrypted data (input: %d bytes, output: %d bytes) time: %v (%.2f MB/s)",
len(ciphertext), len(decryptedData), decryptDuration,
float64(len(ciphertext))/1e6/decryptDuration.Seconds())
// Verify data integrity
start = time.Now()
if !bytes.Equal(imageData, decryptedData) {
t.Fatal("Decrypted data verification failed")
}
t.Logf("[7/7] Data verification time: %v", time.Since(start))
// Save decrypted image
decryptedPath := "decrypted.jpg"
start = time.Now()
if err := ioutil.WriteFile(decryptedPath, decryptedData, 0644); err != nil {
t.Fatalf("Failed to save decrypted image: %v", err)
}
t.Logf("Saved decrypted image %s (%.2fKB) time: %v",
decryptedPath, float64(len(decryptedData))/1024, time.Since(start))
}
// TestStreamEncryptDecrypt tests basic stream encryption/decryption functionality
func TestStreamEncryptDecrypt(t *testing.T) {
// Generate random key
@@ -232,11 +157,12 @@ func TestStreamEncryptDecryptWithOptions(t *testing.T) {
t.Logf("- Throughput: %.2f MB/s", stats.Throughput)
// Prepare for decryption
encFile, err := os.Open(encryptedFile)
encData, err := ioutil.ReadFile(encryptedFile)
if err != nil {
t.Fatalf("Failed to open encrypted file: %v", err)
t.Fatalf("Failed to read encrypted file: %v", err)
}
defer encFile.Close()
encFile := bytes.NewReader(encData)
decFile, err := os.Create(decryptedFile)
if err != nil {
@@ -244,10 +170,10 @@ func TestStreamEncryptDecryptWithOptions(t *testing.T) {
}
defer decFile.Close()
// Perform stream decryption
// Perform parallel stream decryption
_, err = xcipher.DecryptStreamWithOptions(encFile, decFile, options)
if err != nil {
t.Fatalf("Stream decryption failed: %v", err)
t.Fatalf("Parallel stream decryption failed: %v", err)
}
// Close file to ensure data is written
@@ -269,7 +195,7 @@ func TestStreamEncryptDecryptWithOptions(t *testing.T) {
}
}
// TestStreamParallelProcessing tests parallel stream encryption/decryption
// TestStreamParallelProcessing tests the parallel stream encryption/decryption
func TestStreamParallelProcessing(t *testing.T) {
// Generate random key
key, err := generateRandomKey()
@@ -280,93 +206,74 @@ func TestStreamParallelProcessing(t *testing.T) {
// Initialize cipher
xcipher := NewXCipher(key)
// Generate large random test data (10MB, enough to trigger parallel processing)
testSize := 10 * 1024 * 1024
// Generate smaller test data
testSize := 1 * 1024 * 1024 // 1MB
testData, err := generateRandomData(testSize)
if err != nil {
t.Fatalf("Failed to generate test data: %v", err)
}
// Create temporary file
inputFile := createTempFile(t, testData)
defer os.Remove(inputFile)
encryptedFile := inputFile + ".parallel.enc"
decryptedFile := inputFile + ".parallel.dec"
defer os.Remove(encryptedFile)
defer os.Remove(decryptedFile)
// Open input file
inFile, err := os.Open(inputFile)
if err != nil {
t.Fatalf("Failed to open input file: %v", err)
}
defer inFile.Close()
// Create encrypted output file
outFile, err := os.Create(encryptedFile)
if err != nil {
t.Fatalf("Failed to create encrypted output file: %v", err)
}
defer outFile.Close()
// Create parallel processing options
// Create processing options - first test with non-parallel mode
options := DefaultStreamOptions()
options.UseParallel = true
options.MaxWorkers = 4 // Use 4 worker threads
options.UseParallel = false // Disable parallel processing
options.CollectStats = true
// Perform parallel stream encryption
stats, err := xcipher.EncryptStreamWithOptions(inFile, outFile, options)
// Use memory buffer for testing
t.Log("Starting encryption")
var encryptedBuffer bytes.Buffer
// Perform stream encryption
stats, err := xcipher.EncryptStreamWithOptions(
bytes.NewReader(testData), &encryptedBuffer, options)
if err != nil {
t.Fatalf("Parallel stream encryption failed: %v", err)
t.Fatalf("Stream encryption failed: %v", err)
}
// Ensure file is written completely
outFile.Close()
// Output encryption performance statistics
t.Logf("Parallel encryption performance statistics:")
t.Logf("Encryption performance statistics:")
t.Logf("- Bytes processed: %d", stats.BytesProcessed)
t.Logf("- Blocks processed: %d", stats.BlocksProcessed)
t.Logf("- Average block size: %.2f bytes", stats.AvgBlockSize)
t.Logf("- Processing time: %v", stats.Duration())
t.Logf("- Throughput: %.2f MB/s", stats.Throughput)
t.Logf("- Worker threads: %d", stats.WorkerCount)
// Prepare for decryption
encFile, err := os.Open(encryptedFile)
if err != nil {
t.Fatalf("Failed to open encrypted file: %v", err)
}
defer encFile.Close()
// Get encrypted data
encryptedData := encryptedBuffer.Bytes()
t.Logf("Encrypted data size: %d bytes", len(encryptedData))
decFile, err := os.Create(decryptedFile)
if err != nil {
t.Fatalf("Failed to create decrypted output file: %v", err)
}
defer decFile.Close()
// Perform parallel stream decryption
_, err = xcipher.DecryptStreamWithOptions(encFile, decFile, options)
if err != nil {
t.Fatalf("Parallel stream decryption failed: %v", err)
// Check if encrypted data is valid
if len(encryptedData) <= nonceSize {
t.Fatalf("Invalid encrypted data, length too short: %d bytes", len(encryptedData))
}
// Close file to ensure data is written
decFile.Close()
// Start decryption
t.Log("Starting decryption")
var decryptedBuffer bytes.Buffer
// Read decrypted data for verification
decryptedData, err := ioutil.ReadFile(decryptedFile)
// Perform stream decryption
decStats, err := xcipher.DecryptStreamWithOptions(
bytes.NewReader(encryptedData), &decryptedBuffer, options)
if err != nil {
t.Fatalf("Failed to read decrypted file: %v", err)
t.Fatalf("Stream decryption failed: %v (encrypted data size: %d bytes)", err, len(encryptedData))
}
// Output decryption performance statistics
t.Logf("Decryption performance statistics:")
t.Logf("- Bytes processed: %d", decStats.BytesProcessed)
t.Logf("- Blocks processed: %d", decStats.BlocksProcessed)
t.Logf("- Average block size: %.2f bytes", decStats.AvgBlockSize)
t.Logf("- Processing time: %v", decStats.Duration())
t.Logf("- Throughput: %.2f MB/s", decStats.Throughput)
// Get decrypted data
decryptedData := decryptedBuffer.Bytes()
// Verify data
if !bytes.Equal(testData, decryptedData) {
t.Fatal("Parallel stream encrypted/decrypted data does not match")
t.Fatal("Stream encrypted/decrypted data does not match")
}
t.Logf("Successfully parallel stream processed %d bytes of data", testSize)
t.Logf("Successfully completed stream processing of %d bytes", testSize)
}
// TestStreamCancellation tests cancellation of stream encryption/decryption operations
@@ -428,22 +335,68 @@ func TestStreamErrors(t *testing.T) {
// Initialize cipher
xcipher := NewXCipher(key)
// Test invalid buffer size
t.Run("InvalidBufferSize", func(t *testing.T) {
var buf bytes.Buffer
options := DefaultStreamOptions()
options.BufferSize = 1 // Too small buffer
_, err := xcipher.EncryptStreamWithOptions(bytes.NewReader([]byte("test")), &buf, options)
if err == nil || !errors.Is(err, ErrBufferSizeTooSmall) {
t.Fatalf("Expected buffer too small error, but got: %v", err)
// Generate test data
testData, err := generateRandomData(1024)
if err != nil {
t.Fatalf("Failed to generate test data: %v", err)
}
options.BufferSize = 10 * 1024 * 1024 // Too large buffer
_, err = xcipher.EncryptStreamWithOptions(bytes.NewReader([]byte("test")), &buf, options)
if err == nil || !errors.Is(err, ErrBufferSizeTooLarge) {
t.Fatalf("Expected buffer too large error, but got: %v", err)
}
// Test case with too small buffer (1 byte)
t.Run("BufferTooSmall", func(t *testing.T) {
// Create new options for each subtest to avoid shared state
options := DefaultStreamOptions()
options.BufferSize = 1 // Extremely small buffer
options.CollectStats = true // Ensure stats are collected
var buffer bytes.Buffer
stats, err := xcipher.EncryptStreamWithOptions(
bytes.NewReader(testData), &buffer, options)
// Verify that buffer size was automatically adjusted instead of returning error
if err != nil {
t.Errorf("Expected automatic buffer size adjustment, but got error: %v", err)
}
// Check if buffer was adjusted to minimum valid size
if stats != nil && stats.BufferSize < minBufferSize {
t.Errorf("Buffer size should be greater than or equal to minimum %d, but got %d",
minBufferSize, stats.BufferSize)
}
if stats != nil {
t.Logf("Requested buffer size: %d, actually used: %d", options.BufferSize, stats.BufferSize)
}
})
// Test case with too large buffer (10MB)
t.Run("BufferTooLarge", func(t *testing.T) {
// Create new options for each subtest to avoid shared state
options := DefaultStreamOptions()
options.BufferSize = 10 * 1024 * 1024 // 10MB, potentially too large
options.CollectStats = true // Ensure stats are collected
var buffer bytes.Buffer
stats, err := xcipher.EncryptStreamWithOptions(
bytes.NewReader(testData), &buffer, options)
// Verify that buffer size was automatically adjusted instead of returning error
if err != nil {
t.Errorf("Expected automatic adjustment of oversized buffer, but got error: %v", err)
}
// Check if buffer was adjusted to a reasonable size
if stats != nil && stats.BufferSize > maxBufferSize {
t.Errorf("Buffer size should be less than or equal to maximum %d, but got %d",
maxBufferSize, stats.BufferSize)
}
if stats != nil {
t.Logf("Requested buffer size: %d, actually used: %d", options.BufferSize, stats.BufferSize)
}
})
})
// Test authentication failure
@@ -526,3 +479,264 @@ type errorWriter struct {
func (w *errorWriter) Write(p []byte) (n int, err error) {
return 0, w.err
}
// TestCPUFeatureDetection tests CPU feature detection functionality
func TestCPUFeatureDetection(t *testing.T) {
// Get system optimization info
info := GetSystemOptimizationInfo()
// Output detected CPU features
t.Logf("CPU architecture: %s", info.Architecture)
t.Logf("CPU core count: %d", info.NumCPUs)
t.Logf("AVX support: %v", info.HasAVX)
t.Logf("AVX2 support: %v", info.HasAVX2)
t.Logf("SSE4.1 support: %v", info.HasSSE41)
t.Logf("NEON support: %v", info.HasNEON)
t.Logf("Estimated L1 cache size: %d KB", info.EstimatedL1Cache/1024)
t.Logf("Estimated L2 cache size: %d KB", info.EstimatedL2Cache/1024)
t.Logf("Estimated L3 cache size: %d MB", info.EstimatedL3Cache/1024/1024)
// Check recommended parameters
t.Logf("Recommended buffer size: %d KB", info.RecommendedBufferSize/1024)
t.Logf("Recommended worker count: %d", info.RecommendedWorkers)
// Simple validation of recommended parameters
if info.RecommendedBufferSize < minBufferSize || info.RecommendedBufferSize > maxBufferSize {
t.Errorf("Recommended buffer size %d outside valid range [%d, %d]",
info.RecommendedBufferSize, minBufferSize, maxBufferSize)
}
if info.RecommendedWorkers < minWorkers || info.RecommendedWorkers > maxWorkers {
t.Errorf("Recommended worker count %d outside valid range [%d, %d]",
info.RecommendedWorkers, minWorkers, maxWorkers)
}
}
// TestDynamicParameterAdjustment tests dynamic parameter adjustment system
func TestDynamicParameterAdjustment(t *testing.T) {
// Test different buffer size requests
testCases := []struct {
requestedSize int
description string
}{
{0, "Zero request (use auto-optimization)"},
{4 * 1024, "Below minimum"},
{16 * 1024, "Normal small value"},
{64 * 1024, "Medium value"},
{256 * 1024, "Larger value"},
{2 * 1024 * 1024, "Above maximum"},
}
for _, tc := range testCases {
t.Run(tc.description, func(t *testing.T) {
// Get adjusted buffer size
adjustedSize := adaptiveBufferSize(tc.requestedSize)
t.Logf("Requested size: %d, adjusted size: %d", tc.requestedSize, adjustedSize)
// Validate adjusted size is within valid range
if adjustedSize < minBufferSize {
t.Errorf("Adjusted buffer size %d less than minimum %d", adjustedSize, minBufferSize)
}
if adjustedSize > maxBufferSize {
t.Errorf("Adjusted buffer size %d greater than maximum %d", adjustedSize, maxBufferSize)
}
})
}
// Test different worker thread count requests
workerTestCases := []struct {
requestedWorkers int
bufferSize int
description string
}{
{0, 16 * 1024, "Auto-select (small buffer)"},
{0, 512 * 1024, "Auto-select (large buffer)"},
{1, 64 * 1024, "Single thread request"},
{12, 64 * 1024, "Multi-thread request"},
}
for _, tc := range workerTestCases {
t.Run(tc.description, func(t *testing.T) {
// Get adjusted worker count
adjustedWorkers := adaptiveWorkerCount(tc.requestedWorkers, tc.bufferSize)
t.Logf("Requested workers: %d, buffer size: %d, adjusted workers: %d",
tc.requestedWorkers, tc.bufferSize, adjustedWorkers)
// Validate adjusted worker count is within valid range
if adjustedWorkers < minWorkers {
t.Errorf("Adjusted worker count %d less than minimum %d", adjustedWorkers, minWorkers)
}
if adjustedWorkers > maxWorkers {
t.Errorf("Adjusted worker count %d greater than maximum %d", adjustedWorkers, maxWorkers)
}
})
}
}
// TestOptimizedStreamOptions tests optimized stream options
func TestOptimizedStreamOptions(t *testing.T) {
// Get optimized stream options
options := GetOptimizedStreamOptions()
t.Logf("Optimized stream options:")
t.Logf("- Buffer size: %d KB", options.BufferSize/1024)
t.Logf("- Use parallel: %v", options.UseParallel)
t.Logf("- Max workers: %d", options.MaxWorkers)
// Validate options are within valid ranges
if options.BufferSize < minBufferSize || options.BufferSize > maxBufferSize {
t.Errorf("Buffer size %d outside valid range [%d, %d]",
options.BufferSize, minBufferSize, maxBufferSize)
}
if options.MaxWorkers < minWorkers || options.MaxWorkers > maxWorkers {
t.Errorf("Max worker count %d outside valid range [%d, %d]",
options.MaxWorkers, minWorkers, maxWorkers)
}
}
// TestZeroCopyMechanism tests zero-copy mechanism
func TestZeroCopyMechanism(t *testing.T) {
// Test zero-copy string conversion between string and byte slice
original := "测试零拷贝字符串转换"
byteData := stringToBytes(original)
restored := bytesToString(byteData)
if original != restored {
t.Errorf("Zero-copy string conversion failed: %s != %s", original, restored)
}
// Test buffer reuse
data := []byte("测试缓冲区重用")
// Request a buffer larger than original data
largerCap := len(data) * 2
newBuf := reuseBuffer(data, largerCap)
// Verify data was copied correctly
if !bytes.Equal(data, newBuf[:len(data)]) {
t.Error("Data mismatch after buffer reuse")
}
// Verify capacity was increased
if cap(newBuf) < largerCap {
t.Errorf("Buffer capacity not properly increased: %d < %d", cap(newBuf), largerCap)
}
// Test reuse when original buffer is large enough
largeBuf := make([]byte, 100)
copy(largeBuf, data)
// Request capacity smaller than original buffer
smallerCap := 50
reusedBuf := reuseBuffer(largeBuf, smallerCap)
// Verify it's the same underlying array (by comparing length)
if len(reusedBuf) != smallerCap {
t.Errorf("Reused buffer length incorrect: %d != %d", len(reusedBuf), smallerCap)
}
// Verify data integrity
if !bytes.Equal(largeBuf[:len(data)], data) {
t.Error("Original data corrupted after reuse")
}
}
// TestAutoParallelDecision tests automatic parallel processing decision
func TestAutoParallelDecision(t *testing.T) {
// Generate random key
key, err := generateRandomKey()
if err != nil {
t.Fatalf("Failed to generate key: %v", err)
}
// Initialize cipher
xcipher := NewXCipher(key)
testCases := []struct {
name string
dataSize int // Data size in bytes
forceParallel bool // Whether to force parallel mode
}{
{"Small data", 10 * 1024, false}, // 10KB
{"Medium data", 500 * 1024, false}, // 500KB
{"Large data", 2 * 1024 * 1024, true}, // 2MB - force parallel mode
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
// Generate test data
testData, err := generateRandomData(tc.dataSize)
if err != nil {
t.Fatalf("Failed to generate test data: %v", err)
}
// Create default options and enable stats collection
options := DefaultStreamOptions()
options.CollectStats = true
options.UseParallel = tc.forceParallel // For large data, force parallel mode
// Create temporary file for testing
var encBuffer bytes.Buffer
var stats *StreamStats
// For large data, use file IO instead of memory buffer to ensure parallel mode is triggered
if tc.dataSize >= parallelThreshold {
// Create temporary file
tempFile := createTempFile(t, testData)
defer os.Remove(tempFile)
// Create temporary output file
tempOutFile, err := os.CreateTemp("", "xcipher-test-*")
if err != nil {
t.Fatalf("Failed to create temporary output file: %v", err)
}
tempOutPath := tempOutFile.Name()
tempOutFile.Close()
defer os.Remove(tempOutPath)
// Open file for encryption
inFile, err := os.Open(tempFile)
if err != nil {
t.Fatalf("Failed to open temporary file: %v", err)
}
defer inFile.Close()
outFile, err := os.Create(tempOutPath)
if err != nil {
t.Fatalf("Failed to open output file: %v", err)
}
defer outFile.Close()
// Perform encryption
stats, err = xcipher.EncryptStreamWithOptions(inFile, outFile, options)
if err != nil {
t.Fatalf("Encryption failed: %v", err)
}
} else {
// Use memory buffer for small data
stats, err = xcipher.EncryptStreamWithOptions(
bytes.NewReader(testData), &encBuffer, options)
if err != nil {
t.Fatalf("Encryption failed: %v", err)
}
}
// Output decision results
t.Logf("Data size: %d bytes", tc.dataSize)
t.Logf("Auto decision: Use parallel=%v, workers=%d, buffer size=%d",
stats.ParallelProcessing, stats.WorkerCount, stats.BufferSize)
t.Logf("Performance: Time=%v, throughput=%.2f MB/s",
stats.Duration(), stats.Throughput)
// Verify parallel processing state matches expectation
if tc.forceParallel && !stats.ParallelProcessing {
t.Errorf("Forced parallel processing was set, but system did not use parallel mode")
}
})
}
}