This commit is contained in:
Laurent Mazare
2023-10-13 17:21:20 +02:00
committed by GitHub
parent eefad2b95f
commit 07af87a1d8

View File

@ -60,7 +60,7 @@ if __name__ == "__main__":
loss = torch.nn.L1Loss()
error = loss(hf_pooled, candle_pooled).mean().item()
print(f"Mean error between torch-referenze and candle: {error}")
print(f"Mean error between torch-reference and candle: {error}")
# Quantize all attention 'weights'
quantized_tensors = {}
@ -101,4 +101,4 @@ if __name__ == "__main__":
candle_pooled_2 = average_pool(torch.tensor(encoder_out_2.values()), hf_tokenized["attention_mask"])
error = loss(hf_pooled, candle_pooled_2).mean().item()
print(f"Mean error between torch-referenze and quantized-candle: {error}")
print(f"Mean error between torch-reference and quantized-candle: {error}")