Q-TensorFormer / tests /test_quantum_layers.py
Premchan369's picture
v3.0.0: Tests
bcadbf4 verified
"""
Tests for quantum layers (PennyLane required).
Verifies:
- Angle embedding output range
- Entanglement monitor behavior
- Classical fallback when no PennyLane
"""
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
import torch
import pytest
try:
import pennylane as qml
HAS_PENNYLANE = True
except ImportError:
HAS_PENNYLANE = False
@pytest.mark.skipif(not HAS_PENNYLANE, reason="PennyLane not installed")
class TestQuantumAngleEmbedding:
def test_output_shape(self):
from src.quantum_layers import QuantumAngleEmbedding
layer = QuantumAngleEmbedding(n_qubits=4, n_layers=2, n_outputs=4)
x = torch.randn(8, 4)
y = layer(x)
assert y.shape == (8, 4)
def test_output_range(self):
from src.quantum_layers import QuantumAngleEmbedding
layer = QuantumAngleEmbedding(n_qubits=4, n_layers=2, n_outputs=4)
layer.eval()
with torch.no_grad():
x = torch.randn(16, 4)
y = layer(x)
# Expectation values of PauliZ are in [-1, 1]
assert y.min() >= -1.1
assert y.max() <= 1.1
def test_batch_dimensions(self):
from src.quantum_layers import QuantumAngleEmbedding
layer = QuantumAngleEmbedding(n_qubits=4, n_layers=2, n_outputs=3)
x = torch.randn(2, 10, 4) # batch, seq, n_qubits
y = layer(x)
assert y.shape == (2, 10, 3)
def test_gradient_flow(self):
from src.quantum_layers import QuantumAngleEmbedding
layer = QuantumAngleEmbedding(n_qubits=4, n_layers=1)
x = torch.randn(4, 4, requires_grad=False)
y = layer(x)
loss = y.sum()
loss.backward()
for p in layer.qlayer.parameters():
assert p.grad is not None
class TestEntanglementMonitor:
def test_output_shape(self):
from src.quantum_layers import EntanglementMonitor
monitor = EntanglementMonitor(n_qubits=4)
# Simulated attention weights: (batch, heads, seq, seq)
attn = torch.randn(2, 4, 16, 16).softmax(dim=-1)
entropy = monitor(attn)
assert entropy.shape == (2, 4)
def test_uniform_attention(self):
from src.quantum_layers import EntanglementMonitor
monitor = EntanglementMonitor(n_qubits=4)
# Perfectly uniform attention → maximum entropy
attn = torch.ones(2, 2, 8, 8) / 8
entropy = monitor(attn)
expected = -torch.log(torch.tensor(1.0 / 8))
assert torch.allclose(entropy, expected, atol=0.2)
class TestFallback:
def test_classical_fallback(self):
from src.quantum_layers import ClassicalQuantumFallback
layer = ClassicalQuantumFallback(n_qubits=4, n_layers=2, n_outputs=4)
x = torch.randn(8, 4)
y = layer(x)
assert y.shape == (8, 4)
# Tanh output in [-1, 1]
assert y.min() >= -1.0
assert y.max() <= 1.0
class TestCreateQuantumEmbedding:
def test_factory(self):
from src.quantum_layers import create_quantum_embedding
layer = create_quantum_embedding(128, n_qubits=4, n_layers=2)
x = torch.randn(4, 128)
y = layer(x)
assert y.shape[0] == 4