|
1 | 1 | """High-value tests for Stream - focusing on lifecycle, resource cleanup, and state management.""" |
2 | 2 |
|
3 | 3 | from collections.abc import AsyncIterator |
4 | | -from typing import Any, Unpack |
| 4 | +from typing import Any, ClassVar, Unpack |
5 | 5 | from unittest.mock import AsyncMock |
6 | 6 |
|
7 | 7 | import pytest |
8 | 8 | from pydantic import Field |
9 | 9 |
|
10 | | -from celeste.exceptions import StreamNotExhaustedError |
| 10 | +from celeste.exceptions import StreamEventError, StreamNotExhaustedError |
11 | 11 | from celeste.io import Chunk, FinishReason, Output, Usage |
12 | 12 | from celeste.parameters import Parameters |
13 | 13 | from celeste.streaming import Stream |
@@ -720,3 +720,159 @@ def _parse_output( # type: ignore[override] |
720 | 720 | assert output.finish_reason is not None |
721 | 721 | assert isinstance(output.finish_reason, TypedFinishReason) |
722 | 722 | assert output.finish_reason.reason == "stop" |
| 723 | + |
| 724 | + |
| 725 | +class PipelineStream(Stream[ConcreteOutput, Parameters, Chunk]): |
| 726 | + """Stream that uses the base _parse_chunk pipeline (for testing error detection). |
| 727 | +
|
| 728 | + Unlike ConcreteStream which overrides _parse_chunk entirely, this class |
| 729 | + only overrides _parse_chunk_content and _aggregate_content, so the base |
| 730 | + _parse_stream_error → StreamEventError pipeline is exercised. |
| 731 | + """ |
| 732 | + |
| 733 | + _chunk_class: ClassVar[type[Chunk]] = Chunk |
| 734 | + _output_class: ClassVar[type[Output]] = ConcreteOutput |
| 735 | + _empty_content: ClassVar[str] = "" |
| 736 | + |
| 737 | + def _aggregate_content(self, chunks: list[Chunk]) -> str: |
| 738 | + """Aggregate content from chunks.""" |
| 739 | + return "".join(str(chunk.content) for chunk in chunks) |
| 740 | + |
| 741 | + def _parse_chunk_content(self, event_data: dict[str, Any]) -> str | None: |
| 742 | + """Extract content from delta field.""" |
| 743 | + return event_data.get("delta") or None |
| 744 | + |
| 745 | + |
| 746 | +class TestStreamErrorDetection: |
| 747 | + """Test Stream error detection via base _parse_stream_error pipeline.""" |
| 748 | + |
| 749 | + async def test_type_based_error_raises_stream_event_error(self) -> None: |
| 750 | + """Type-based error pattern (Anthropic) must raise StreamEventError.""" |
| 751 | + events = [ |
| 752 | + { |
| 753 | + "type": "error", |
| 754 | + "error": {"type": "overloaded_error", "message": "Server overloaded"}, |
| 755 | + }, |
| 756 | + ] |
| 757 | + stream = PipelineStream( |
| 758 | + _async_iter(events), |
| 759 | + stream_metadata={"provider": "anthropic"}, |
| 760 | + ) |
| 761 | + with pytest.raises(StreamEventError, match="Server overloaded") as exc_info: |
| 762 | + async for _ in stream: |
| 763 | + pass |
| 764 | + assert exc_info.value.error_type == "overloaded_error" |
| 765 | + assert exc_info.value.provider == "anthropic" |
| 766 | + assert exc_info.value.event_data == events[0] |
| 767 | + |
| 768 | + async def test_field_based_error_raises_stream_event_error(self) -> None: |
| 769 | + """Field-based error pattern (ChatCompletions) must raise StreamEventError.""" |
| 770 | + events = [ |
| 771 | + {"error": {"type": "invalid_request", "message": "Bad request"}}, |
| 772 | + ] |
| 773 | + stream = PipelineStream( |
| 774 | + _async_iter(events), |
| 775 | + stream_metadata={"provider": "openai"}, |
| 776 | + ) |
| 777 | + with pytest.raises(StreamEventError, match="Bad request") as exc_info: |
| 778 | + async for _ in stream: |
| 779 | + pass |
| 780 | + assert exc_info.value.error_type == "invalid_request" |
| 781 | + assert exc_info.value.provider == "openai" |
| 782 | + |
| 783 | + async def test_field_based_error_falls_back_to_code_field(self) -> None: |
| 784 | + """Field-based error without 'type' must fall back to 'code' field.""" |
| 785 | + events = [ |
| 786 | + {"error": {"code": "rate_limit_exceeded", "message": "Rate limited"}}, |
| 787 | + ] |
| 788 | + stream = PipelineStream(_async_iter(events)) |
| 789 | + with pytest.raises(StreamEventError) as exc_info: |
| 790 | + async for _ in stream: |
| 791 | + pass |
| 792 | + assert exc_info.value.error_type == "rate_limit_exceeded" |
| 793 | + |
| 794 | + async def test_type_based_error_with_string_error_value(self) -> None: |
| 795 | + """Type-based error with non-dict error value must use string fallback.""" |
| 796 | + events = [ |
| 797 | + {"type": "error", "error": "Something went wrong"}, |
| 798 | + ] |
| 799 | + stream = PipelineStream(_async_iter(events)) |
| 800 | + with pytest.raises(StreamEventError, match="Something went wrong") as exc_info: |
| 801 | + async for _ in stream: |
| 802 | + pass |
| 803 | + assert exc_info.value.error_type is None |
| 804 | + |
| 805 | + async def test_error_type_fields_classvar_override(self) -> None: |
| 806 | + """ClassVar override of _error_type_fields must change field lookup order.""" |
| 807 | + |
| 808 | + class GoogleLikeStream(PipelineStream): |
| 809 | + _error_type_fields: ClassVar[tuple[str, ...]] = ("status", "code") |
| 810 | + |
| 811 | + events = [ |
| 812 | + { |
| 813 | + "error": { |
| 814 | + "status": "PERMISSION_DENIED", |
| 815 | + "code": 403, |
| 816 | + "message": "Forbidden", |
| 817 | + }, |
| 818 | + }, |
| 819 | + ] |
| 820 | + stream = GoogleLikeStream(_async_iter(events)) |
| 821 | + with pytest.raises(StreamEventError) as exc_info: |
| 822 | + async for _ in stream: |
| 823 | + pass |
| 824 | + assert exc_info.value.error_type == "PERMISSION_DENIED" |
| 825 | + |
| 826 | + async def test_non_error_events_pass_through(self) -> None: |
| 827 | + """Normal events must not trigger error detection.""" |
| 828 | + events = [ |
| 829 | + {"delta": "Hello"}, |
| 830 | + {"delta": " world"}, |
| 831 | + ] |
| 832 | + stream = PipelineStream(_async_iter(events)) |
| 833 | + chunks = [chunk async for chunk in stream] |
| 834 | + assert len(chunks) == 2 |
| 835 | + assert stream.output.content == "Hello world" |
| 836 | + |
| 837 | + async def test_error_after_successful_chunks(self) -> None: |
| 838 | + """Error mid-stream (after successful chunks) must raise StreamEventError.""" |
| 839 | + events = [ |
| 840 | + {"delta": "Hello"}, |
| 841 | + { |
| 842 | + "type": "error", |
| 843 | + "error": {"type": "server_error", "message": "Internal error"}, |
| 844 | + }, |
| 845 | + ] |
| 846 | + stream = PipelineStream( |
| 847 | + _async_iter(events), |
| 848 | + stream_metadata={"provider": "test"}, |
| 849 | + ) |
| 850 | + chunks: list[Chunk] = [] |
| 851 | + with pytest.raises(StreamEventError, match="Internal error"): |
| 852 | + async for chunk in stream: |
| 853 | + chunks.append(chunk) |
| 854 | + assert len(chunks) == 1 |
| 855 | + assert chunks[0].content == "Hello" |
| 856 | + |
| 857 | + async def test_error_with_no_message_uses_default(self) -> None: |
| 858 | + """Error event without message field must use 'Unknown error' default.""" |
| 859 | + events = [ |
| 860 | + {"error": {"type": "mystery_error"}}, |
| 861 | + ] |
| 862 | + stream = PipelineStream(_async_iter(events)) |
| 863 | + with pytest.raises(StreamEventError, match="Unknown error"): |
| 864 | + async for _ in stream: |
| 865 | + pass |
| 866 | + |
| 867 | + async def test_error_provides_full_event_data(self) -> None: |
| 868 | + """StreamEventError must include the full original event data.""" |
| 869 | + event = { |
| 870 | + "type": "error", |
| 871 | + "error": {"type": "api_error", "message": "Fail"}, |
| 872 | + "extra": "data", |
| 873 | + } |
| 874 | + stream = PipelineStream(_async_iter([event])) |
| 875 | + with pytest.raises(StreamEventError) as exc_info: |
| 876 | + async for _ in stream: |
| 877 | + pass |
| 878 | + assert exc_info.value.event_data == event |
0 commit comments