From 8900d645c8e03bfd0c585e411c5e462051dddba4 Mon Sep 17 00:00:00 2001 From: Sambhav Dixit <94298612+sambhavnoobcoder@users.noreply.github.com> Date: Sun, 27 Oct 2024 19:12:14 +0530 Subject: [PATCH] Add integration test for token counting in log_step_metadata - Implemented test_token_counting_integration to verify the correct total token count when using a mocked tokenizer, ensuring that prompt and response token counts are accurately aggregated. --- tests/agents/test_agent_logging.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/agents/test_agent_logging.py b/tests/agents/test_agent_logging.py index 33e59843..98b0f8a5 100644 --- a/tests/agents/test_agent_logging.py +++ b/tests/agents/test_agent_logging.py @@ -41,3 +41,9 @@ class TestAgentLogging(unittest.TestCase): def test_log_step_metadata_timestamp(self): log_result = self.agent.log_step_metadata(1, "prompt", "response") self.assertIn('timestamp', log_result) + + def test_token_counting_integration(self): + self.mock_tokenizer.count_tokens.side_effect = [150, 250] + log_result = self.agent.log_step_metadata(1, "prompt", "response") + + self.assertEqual(log_result['tokens']['total'], 400)