From 427bf48ebf49a003eb5990d35d0dd78ec0284bce Mon Sep 17 00:00:00 2001
From: Sanskar Jethi <sansyrox@gmail.com>
Date: Sun, 15 Dec 2024 06:37:46 +0000
Subject: [PATCH 01/14] feat: add support for streaming responses

---
 integration_tests/base_routes.py | 129 +++++++++++++++++++++++++++++++
 src/types/response.rs            | 126 ++++++++++++++++++++++++------
 2 files changed, 233 insertions(+), 22 deletions(-)

diff --git a/integration_tests/base_routes.py b/integration_tests/base_routes.py
index 54e2e475e..e441d12f7 100644
--- a/integration_tests/base_routes.py
+++ b/integration_tests/base_routes.py
@@ -1082,6 +1082,135 @@ def create_item(request, body: CreateItemBody, query: CreateItemQueryParamsParam
     return CreateItemResponse(success=True, items_changed=2)
 
 
+# --- Streaming responses ---
+
+@app.get("/stream/sync")
+def sync_stream():
+    def number_generator():
+        for i in range(5):
+            yield f"Chunk {i}\n".encode()
+    
+    return Response(
+        status_code=200,
+        headers={"Content-Type": "text/plain"},
+        description=number_generator()
+    )
+
+@app.get("/stream/async")
+async def async_stream():
+    async def async_generator():
+        import asyncio
+        for i in range(5):
+            await asyncio.sleep(1)  # Simulate async work
+            yield f"Async Chunk {i}\n".encode()
+    
+    return Response(
+        status_code=200,
+        headers={"Content-Type": "text/plain"},
+        description=async_generator()
+    )
+
+@app.get("/stream/mixed")
+async def mixed_stream():
+    async def mixed_generator():
+        import asyncio
+        # Binary data
+        yield b"Binary chunk\n"
+        await asyncio.sleep(0.5)
+        
+        # String data
+        yield "String chunk\n".encode()
+        await asyncio.sleep(0.5)
+        
+        # Integer data
+        yield str(42).encode() + b"\n"
+        await asyncio.sleep(0.5)
+        
+        # JSON data
+        import json
+        data = {"message": "JSON chunk", "number": 123}
+        yield json.dumps(data).encode() + b"\n"
+    
+    return Response(
+        status_code=200,
+        headers={"Content-Type": "text/plain"},
+        description=mixed_generator()
+    )
+
+@app.get("/stream/events")
+async def server_sent_events():
+    async def event_generator():
+        import asyncio
+        import json
+        import time
+        
+        # Regular event
+        yield f"event: message\ndata: {json.dumps({'time': time.time(), 'type': 'start'})}\n\n".encode()
+        await asyncio.sleep(1)
+        
+        # Event with ID
+        yield f"id: 1\nevent: update\ndata: {json.dumps({'progress': 50})}\n\n".encode()
+        await asyncio.sleep(1)
+        
+        # Multiple data lines
+        data = json.dumps({'status': 'complete', 'results': [1, 2, 3]}, indent=2)
+        yield f"event: complete\ndata: {data}\n\n".encode()
+    
+    return Response(
+        status_code=200,
+        headers={
+            "Content-Type": "text/event-stream",
+            "Cache-Control": "no-cache",
+            "Connection": "keep-alive"
+        },
+        description=event_generator()
+    )
+
+@app.get("/stream/large-file")
+async def stream_large_file():
+    async def file_generator():
+        # Simulate streaming a large file in chunks
+        chunk_size = 1024  # 1KB chunks
+        total_size = 10 * chunk_size  # 10KB total
+        
+        for offset in range(0, total_size, chunk_size):
+            # Simulate reading file chunk
+            chunk = b"X" * min(chunk_size, total_size - offset)
+            yield chunk
+    
+    return Response(
+        status_code=200,
+        headers={
+            "Content-Type": "application/octet-stream",
+            "Content-Disposition": "attachment; filename=large-file.bin"
+        },
+        description=file_generator()
+    )
+
+@app.get("/stream/csv")
+async def stream_csv():
+    async def csv_generator():
+        # CSV header
+        yield "id,name,value\n".encode()
+        
+        import asyncio
+        import random
+        
+        # Generate rows
+        for i in range(5):
+            await asyncio.sleep(0.5)  # Simulate data processing
+            row = f"{i},item-{i},{random.randint(1, 100)}\n"
+            yield row.encode()
+    
+    return Response(
+        status_code=200,
+        headers={
+            "Content-Type": "text/csv",
+            "Content-Disposition": "attachment; filename=data.csv"
+        },
+        description=csv_generator()
+    )
+
 def main():
     app.set_response_header("server", "robyn")
     app.serve_directory(
diff --git a/src/types/response.rs b/src/types/response.rs
index 124e465cc..775a8a529 100644
--- a/src/types/response.rs
+++ b/src/types/response.rs
@@ -1,27 +1,74 @@
 use actix_http::{body::BoxBody, StatusCode};
-use actix_web::{HttpRequest, HttpResponse, HttpResponseBuilder, Responder};
+use actix_web::{HttpRequest, HttpResponse, HttpResponseBuilder, Responder, Error, web::Bytes};
 use pyo3::{
     exceptions::PyIOError,
     prelude::*,
-    types::{PyBytes, PyDict},
+    types::{PyBytes, PyDict, PyList},
 };
+use futures::stream::Stream;
+use futures_util::StreamExt;
+use std::pin::Pin;
 
 use crate::io_helpers::{apply_hashmap_headers, read_file};
 use crate::types::{check_body_type, check_description_type, get_description_from_pyobject};
 
 use super::headers::Headers;
 
-#[derive(Debug, Clone, FromPyObject)]
+#[derive(Debug, Clone)]
+pub enum ResponseBody {
+    Static(Vec<u8>),
+    Streaming(Vec<Vec<u8>>),
+}
+
+#[derive(Debug, Clone)]
 pub struct Response {
     pub status_code: u16,
     pub response_type: String,
     pub headers: Headers,
-    // https://pyo3.rs/v0.19.2/function.html?highlight=from_py_#per-argument-options
-    #[pyo3(from_py_with = "get_description_from_pyobject")]
-    pub description: Vec<u8>,
+    pub body: ResponseBody,
     pub file_path: Option<String>,
 }
 
+impl<'a> FromPyObject<'a> for Response {
+    fn extract(ob: &'a PyAny) -> PyResult<Self> {
+        let status_code = ob.getattr("status_code")?.extract()?;
+        let response_type = ob.getattr("response_type")?.extract()?;
+        let headers = ob.getattr("headers")?.extract()?;
+        let description = ob.getattr("description")?;
+        let file_path = ob.getattr("file_path")?.extract()?;
+
+        let body = if let Ok(iter) = description.iter() {
+            let mut chunks = Vec::new();
+            for item in iter {
+                let item = item?;
+                let chunk = if item.is_instance_of::<pyo3::types::PyBytes>() {
+                    item.extract::<Vec<u8>>()?
+                } else if item.is_instance_of::<pyo3::types::PyString>() {
+                    item.extract::<String>()?.into_bytes()
+                } else if item.is_instance_of::<pyo3::types::PyInt>() {
+                    item.extract::<i64>()?.to_string().into_bytes()
+                } else {
+                    return Err(PyErr::new::<pyo3::exceptions::PyTypeError, _>(
+                        "Stream items must be bytes, str, or int"
+                    ));
+                };
+                chunks.push(chunk);
+            }
+            ResponseBody::Streaming(chunks)
+        } else {
+            ResponseBody::Static(get_description_from_pyobject(description)?)
+        };
+
+        Ok(Response {
+            status_code,
+            response_type,
+            headers,
+            body,
+            file_path,
+        })
+    }
+}
+
 impl Responder for Response {
     type Body = BoxBody;
 
@@ -29,7 +76,17 @@ impl Responder for Response {
         let mut response_builder =
             HttpResponseBuilder::new(StatusCode::from_u16(self.status_code).unwrap());
         apply_hashmap_headers(&mut response_builder, &self.headers);
-        response_builder.body(self.description)
+        
+        match self.body {
+            ResponseBody::Static(data) => response_builder.body(data),
+            ResponseBody::Streaming(chunks) => {
+                let stream = Box::pin(
+                    futures::stream::iter(chunks.into_iter())
+                        .map(|chunk| Ok::<Bytes, Error>(Bytes::from(chunk)))
+                ) as Pin<Box<dyn Stream<Item = Result<Bytes, Error>>>>;
+                response_builder.streaming(stream)
+            }
+        }
     }
 }
 
@@ -44,7 +101,7 @@ impl Response {
             status_code: 404,
             response_type: "text".to_string(),
             headers,
-            description: "Not found".to_owned().into_bytes(),
+            body: ResponseBody::Static("Not found".to_owned().into_bytes()),
             file_path: None,
         }
     }
@@ -59,7 +116,7 @@ impl Response {
             status_code: 500,
             response_type: "text".to_string(),
             headers,
-            description: "Internal server error".to_owned().into_bytes(),
+            body: ResponseBody::Static("Internal server error".to_owned().into_bytes()),
             file_path: None,
         }
     }
@@ -68,11 +125,21 @@ impl Response {
 impl ToPyObject for Response {
     fn to_object(&self, py: Python) -> PyObject {
         let headers = self.headers.clone().into_py(py).extract(py).unwrap();
-        // The description should only be either string or binary.
-        // it should raise an exception otherwise
-        let description = match String::from_utf8(self.description.to_vec()) {
-            Ok(description) => description.to_object(py),
-            Err(_) => PyBytes::new(py, &self.description.to_vec()).into(),
+        
+        let description = match &self.body {
+            ResponseBody::Static(data) => {
+                match String::from_utf8(data.to_vec()) {
+                    Ok(description) => description.to_object(py),
+                    Err(_) => PyBytes::new(py, data).into(),
+                }
+            },
+            ResponseBody::Streaming(chunks) => {
+                let list = PyList::empty(py);
+                for chunk in chunks {
+                    list.append(PyBytes::new(py, chunk)).unwrap();
+                }
+                list.to_object(py)
+            }
         };
 
         let response = PyResponse {
@@ -111,15 +178,22 @@ impl PyResponse {
         headers: &PyAny,
         description: Py<PyAny>,
     ) -> PyResult<Self> {
-        check_body_type(py, &description)?;
+        // Check if description is an iterator/generator
+        let is_stream = Python::with_gil(|py| {
+            description.as_ref(py).iter().is_ok()
+        });
+
+        if is_stream {
+            // For streaming responses, we don't need to check body type
+            // as we'll validate each chunk when it's yielded
+        } else {
+            check_body_type(py, &description)?;
+        }
 
         let headers_output: Py<Headers> = if let Ok(headers_dict) = headers.downcast::<PyDict>() {
-            // Here you'd have logic to create a Headers instance from a PyDict
-            // For simplicity, let's assume you have a method `from_dict` on Headers for this
-            let headers = Headers::new(Some(headers_dict)); // Hypothetical method
+            let headers = Headers::new(Some(headers_dict));
             Py::new(py, headers)?
         } else if let Ok(headers) = headers.extract::<Py<Headers>>() {
-            // If it's already a Py<Headers>, use it directly
             headers
         } else {
             return Err(PyErr::new::<pyo3::exceptions::PyTypeError, _>(
@@ -129,8 +203,7 @@ impl PyResponse {
 
         Ok(Self {
             status_code,
-            // we should be handling based on headers but works for now
-            response_type: "text".to_string(),
+            response_type: if is_stream { "stream".to_string() } else { "text".to_string() },
             headers: headers_output,
             description,
             file_path: None,
@@ -139,7 +212,16 @@ impl PyResponse {
 
     #[setter]
     pub fn set_description(&mut self, py: Python, description: Py<PyAny>) -> PyResult<()> {
-        check_description_type(py, &description)?;
+        // Check if description is an iterator/generator
+        let is_stream = description.as_ref(py).iter().is_ok();
+
+        if is_stream {
+            self.response_type = "stream".to_string();
+        } else {
+            check_description_type(py, &description)?;
+            self.response_type = "text".to_string();
+        }
+        
         self.description = description;
         Ok(())
     }

From 56e3ab6981e2f59be12a65ede06793ee8ecddefb Mon Sep 17 00:00:00 2001
From: Sanskar Jethi <sansyrox@gmail.com>
Date: Sun, 15 Dec 2024 06:53:37 +0000
Subject: [PATCH 02/14] update docs

---
 .../documentation/api_reference/streaming.mdx | 212 ++++++++++++++++++
 docs_src/src/pages/documentation/streaming.md |   1 +
 integration_tests/test_streaming_responses.py | 194 ++++++++++++++++
 3 files changed, 407 insertions(+)
 create mode 100644 docs_src/src/pages/documentation/api_reference/streaming.mdx
 create mode 100644 docs_src/src/pages/documentation/streaming.md
 create mode 100644 integration_tests/test_streaming_responses.py

diff --git a/docs_src/src/pages/documentation/api_reference/streaming.mdx b/docs_src/src/pages/documentation/api_reference/streaming.mdx
new file mode 100644
index 000000000..0971a5787
--- /dev/null
+++ b/docs_src/src/pages/documentation/api_reference/streaming.mdx
@@ -0,0 +1,212 @@
+## Streaming Responses
+
+Like Batman's gadgets streaming from the Batcave to his utility belt, Robyn provides built-in support for streaming responses. This allows you to send data in chunks, perfect for large files, real-time updates, and server-sent events.
+
+Streaming responses are perfect for handling large datasets or real-time updates without consuming excessive memory.
+
+## Response
+
+When the Bat-Signal needs to stream continuously through the night sky, you'll want to use a generator or iterator as the `description` parameter:
+
+<Row>
+  <CodeGroup>
+    ```python
+    from robyn import Response
+
+    @app.get("/bat-signal")
+    async def stream_signal():
+        async def signal_generator():
+            while True:
+                yield b"Bat-Signal Active\n"
+                await asyncio.sleep(1)
+        
+        return Response(
+            status_code=200,
+            headers={"Content-Type": "text/plain"},
+            description=signal_generator()
+        )
+    ```
+  </CodeGroup>
+  
+  <CodeGroup>
+    ```bash
+    curl http://localhost:8000/bat-signal
+    ```
+  </CodeGroup>
+</Row>
+
+### Parameters
+
+| Name | Type | Description | Default |
+|------|------|-------------|---------|
+| status_code | int | Response status code | 200 |
+| headers | Dict[str, str] | Response headers | None |
+| description | Union[str, bytes, Generator, AsyncGenerator] | Content to stream | None |
+
+### Supported Types
+
+Like Batman's versatile arsenal, the streaming response system supports multiple data types:
+
+<Row>
+  <CodeGroup>
+    ```python
+    # Raw binary data (like Batcomputer logs)
+    yield b"Batcomputer Log Entry\n"
+    ```
+  </CodeGroup>
+  <CodeGroup>
+    ```python
+    # Text messages (like Alfred's updates)
+    yield "Master Wayne, your tea is ready\n".encode()
+    ```
+  </CodeGroup>
+  <CodeGroup>
+    ```python
+    # Numbers (like Batmobile telemetry)
+    yield str(speed).encode()
+    ```
+  </CodeGroup>
+  <CodeGroup>
+    ```python
+    # JSON data (like Gotham City surveillance)
+    yield json.dumps({"location": "Crime Alley"}).encode()
+    ```
+  </CodeGroup>
+</Row>
+
+## Server-Sent Events
+
+For real-time updates from the Batcomputer:
+
+<Row>
+  <CodeGroup>
+    ```python
+    @app.get("/batcomputer/events")
+    async def batcomputer_feed():
+        async def event_generator():
+            while True:
+                data = {
+                    "time": time.time(),
+                    "alerts": get_gotham_alerts()
+                }
+                yield f"data: {json.dumps(data)}\n\n".encode()
+                await asyncio.sleep(1)
+        
+        return Response(
+            status_code=200,
+            headers={
+                "Content-Type": "text/event-stream",
+                "Cache-Control": "no-cache",
+                "Connection": "keep-alive"
+            },
+            description=event_generator()
+        )
+    ```
+  </CodeGroup>
+  <CodeGroup>
+    ```javascript
+    const evtSource = new EventSource("/batcomputer/events");
+    evtSource.onmessage = (event) => {
+        console.log(JSON.parse(event.data));
+    };
+    ```
+  </CodeGroup>
+</Row>
+
+## File Downloads
+
+For streaming large files from the Batcomputer archives:
+
+<Row>
+  <CodeGroup>
+    ```python
+    @app.get("/batcomputer/files")
+    async def download_files():
+        async def file_generator():
+            chunk_size = 8192  # Size of a Batarang
+            with open("case_files.dat", "rb") as f:
+                while chunk := f.read(chunk_size):
+                    yield chunk
+        
+        return Response(
+            status_code=200,
+            headers={
+                "Content-Type": "application/octet-stream",
+                "Content-Disposition": "attachment; filename=evidence.dat"
+            },
+            description=file_generator()
+        )
+    ```
+  </CodeGroup>
+  <CodeGroup>
+    ```bash
+    curl -O http://localhost:8000/batcomputer/files
+    ```
+  </CodeGroup>
+</Row>
+
+## Common Headers
+
+<Steps>
+### Plain Text
+```python
+headers = {"Content-Type": "text/plain"}
+```
+
+### Server-Sent Events
+```python
+headers = {
+    "Content-Type": "text/event-stream",
+    "Cache-Control": "no-cache",
+    "Connection": "keep-alive"
+}
+```
+
+### File Downloads
+```python
+headers = {
+    "Content-Type": "application/octet-stream",
+    "Content-Disposition": "attachment; filename=file.dat"
+}
+```
+</Steps>
+
+## Error Handling
+
+Even Batman needs contingency plans:
+
+<Callout type="warning">
+  Always handle errors gracefully in your streaming responses to prevent connection hangs.
+</Callout>
+
+```python
+async def generator():
+    try:
+        for item in evidence_items:
+            yield process(item)
+    except Exception as e:
+        yield f"Alert: Batcomputer Error - {str(e)}".encode()
+        return
+```
+
+## Testing
+
+Test your streaming responses like Batman testing his equipment:
+
+```python
+@pytest.mark.asyncio
+async def test_bat_signal():
+    async with app.test_client() as client:
+        response = await client.get("/bat-signal")
+        signals = []
+        async for signal in response.content:
+            signals.append(signal)
+        assert len(signals) > 0
+```
+
+## What's next?
+
+Now, Batman wanted to scale his application across multiple cores. Robyn led him to Scaling.
+
+- [Scaling](/documentation/api_reference/scaling)
+
diff --git a/docs_src/src/pages/documentation/streaming.md b/docs_src/src/pages/documentation/streaming.md
new file mode 100644
index 000000000..0519ecba6
--- /dev/null
+++ b/docs_src/src/pages/documentation/streaming.md
@@ -0,0 +1 @@
+ 
\ No newline at end of file
diff --git a/integration_tests/test_streaming_responses.py b/integration_tests/test_streaming_responses.py
new file mode 100644
index 000000000..3ba872a58
--- /dev/null
+++ b/integration_tests/test_streaming_responses.py
@@ -0,0 +1,194 @@
+"""
+Test module for Robyn's streaming response functionality.
+
+This module contains tests for various streaming response scenarios including:
+- Basic synchronous streaming
+- Asynchronous streaming
+- Mixed content type streaming (bytes, str, int, json)
+- Server-Sent Events (SSE)
+- Large file streaming
+- CSV streaming
+
+Each test verifies both the response headers and the streamed content.
+"""
+
+import json
+import pytest
+from robyn import Robyn
+from robyn.robyn import Request
+from integration_tests.base_routes import app
+
+
+@pytest.mark.asyncio
+async def test_sync_stream():
+    """Test basic synchronous streaming response.
+    
+    Verifies that:
+    1. Response has correct content type
+    2. Chunks are received in correct order
+    3. Each chunk has expected format
+    """
+    async with app.test_client() as client:
+        response = await client.get("/stream/sync")
+        assert response.status_code == 200
+        assert response.headers["Content-Type"] == "text/plain"
+
+        chunks = []
+        async for chunk in response.content:
+            chunks.append(chunk.decode())
+
+        assert len(chunks) == 5
+        for i, chunk in enumerate(chunks):
+            assert chunk == f"Chunk {i}\n"
+
+
+@pytest.mark.asyncio
+async def test_async_stream():
+    """Test asynchronous streaming response.
+    
+    Verifies that:
+    1. Response has correct content type
+    2. Chunks are received in correct order with delays
+    3. Each chunk has expected format
+    """
+    async with app.test_client() as client:
+        response = await client.get("/stream/async")
+        assert response.status_code == 200
+        assert response.headers["Content-Type"] == "text/plain"
+
+        chunks = []
+        async for chunk in response.content:
+            chunks.append(chunk.decode())
+
+        assert len(chunks) == 5
+        for i, chunk in enumerate(chunks):
+            assert chunk == f"Async Chunk {i}\n"
+
+
+@pytest.mark.asyncio
+async def test_mixed_stream():
+    """Test streaming of mixed content types.
+    
+    Verifies that:
+    1. Response handles different content types:
+       - Binary data
+       - String data
+       - Integer data
+       - JSON data
+    2. Each chunk is correctly encoded
+    """
+    async with app.test_client() as client:
+        response = await client.get("/stream/mixed")
+        assert response.status_code == 200
+        assert response.headers["Content-Type"] == "text/plain"
+
+        expected = [
+            b"Binary chunk\n",
+            b"String chunk\n",
+            b"42\n",
+            json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"
+        ]
+
+        chunks = []
+        async for chunk in response.content:
+            chunks.append(chunk)
+
+        assert len(chunks) == len(expected)
+        for chunk, expected_chunk in zip(chunks, expected):
+            assert chunk == expected_chunk
+
+
+@pytest.mark.asyncio
+async def test_server_sent_events():
+    """Test Server-Sent Events (SSE) streaming.
+    
+    Verifies that:
+    1. Response has correct SSE headers
+    2. Events are properly formatted with:
+       - Event type
+       - Event ID (when provided)
+       - Event data
+    """
+    async with app.test_client() as client:
+        response = await client.get("/stream/events")
+        assert response.status_code == 200
+        assert response.headers["Content-Type"] == "text/event-stream"
+        assert response.headers["Cache-Control"] == "no-cache"
+        assert response.headers["Connection"] == "keep-alive"
+
+        events = []
+        async for chunk in response.content:
+            events.append(chunk.decode())
+
+        # Test first event (message)
+        assert "event: message\n" in events[0]
+        assert "data: {" in events[0]
+        event_data = json.loads(events[0].split("data: ")[1].strip())
+        assert "time" in event_data
+        assert event_data["type"] == "start"
+
+        # Test second event (with ID)
+        assert "id: 1\n" in events[1]
+        assert "event: update\n" in events[1]
+        event_data = json.loads(events[1].split("data: ")[1].strip())
+        assert event_data["progress"] == 50
+
+        # Test third event (complete)
+        assert "event: complete\n" in events[2]
+        event_data = json.loads(events[2].split("data: ")[1].strip())
+        assert event_data["status"] == "complete"
+        assert event_data["results"] == [1, 2, 3]
+
+
+@pytest.mark.asyncio
+async def test_large_file_stream():
+    """Test streaming of large files in chunks.
+    
+    Verifies that:
+    1. Response has correct headers for file download
+    2. Content is streamed in correct chunk sizes
+    3. Total content length matches expected size
+    """
+    async with app.test_client() as client:
+        response = await client.get("/stream/large-file")
+        assert response.status_code == 200
+        assert response.headers["Content-Type"] == "application/octet-stream"
+        assert response.headers["Content-Disposition"] == "attachment; filename=large-file.bin"
+
+        total_size = 0
+        async for chunk in response.content:
+            assert len(chunk) <= 1024  # Max chunk size
+            total_size += len(chunk)
+
+        assert total_size == 10 * 1024  # 10KB total
+
+
+@pytest.mark.asyncio
+async def test_csv_stream():
+    """Test streaming of CSV data.
+    
+    Verifies that:
+    1. Response has correct CSV headers
+    2. CSV content is properly formatted
+    3. All rows are received in correct order
+    """
+    async with app.test_client() as client:
+        response = await client.get("/stream/csv")
+        assert response.status_code == 200
+        assert response.headers["Content-Type"] == "text/csv"
+        assert response.headers["Content-Disposition"] == "attachment; filename=data.csv"
+
+        lines = []
+        async for chunk in response.content:
+            lines.extend(chunk.decode().splitlines())
+
+        # Verify header
+        assert lines[0] == "id,name,value"
+        
+        # Verify data rows
+        assert len(lines) == 6  # Header + 5 data rows
+        for i, line in enumerate(lines[1:], 0):
+            id_, name, value = line.split(',')
+            assert int(id_) == i
+            assert name == f"item-{i}"
+            assert 1 <= int(value) <= 100 
\ No newline at end of file

From ed5838f891dede4333214d823d30c96310e94179 Mon Sep 17 00:00:00 2001
From: Sanskar Jethi <sansyrox@gmail.com>
Date: Sun, 15 Dec 2024 06:58:05 +0000
Subject: [PATCH 03/14] update docs

---
 .../components/documentation/Navigation.jsx   |  4 ++
 .../documentation/api_reference/streaming.mdx | 61 ++++++++++++++-----
 2 files changed, 51 insertions(+), 14 deletions(-)

diff --git a/docs_src/src/components/documentation/Navigation.jsx b/docs_src/src/components/documentation/Navigation.jsx
index 2ad4b7225..2a1e8704a 100644
--- a/docs_src/src/components/documentation/Navigation.jsx
+++ b/docs_src/src/components/documentation/Navigation.jsx
@@ -289,6 +289,10 @@ export const navigation = [
         href: '/documentation/api_reference/exceptions',
         title: 'Exceptions',
       },
+      {
+        href: '/documentation/api_reference/streaming',
+        title: 'Streaming',
+      },
       {
         href: '/documentation/api_reference/scaling',
         title: 'Scaling the Application',
diff --git a/docs_src/src/pages/documentation/api_reference/streaming.mdx b/docs_src/src/pages/documentation/api_reference/streaming.mdx
index 0971a5787..7e9b0aaee 100644
--- a/docs_src/src/pages/documentation/api_reference/streaming.mdx
+++ b/docs_src/src/pages/documentation/api_reference/streaming.mdx
@@ -1,15 +1,28 @@
+---
+title: 'Streaming'
+description: 'Stream data efficiently with Robyn'
+---
+
+import { Row, CodeGroup, Steps, Callout } from '@/components/mdx'
+
+## Coming From
+
+If you're coming from [File Handling](/documentation/api_reference/file_handling), you'll find streaming provides a more efficient way to handle large files.
+
 ## Streaming Responses
 
 Like Batman's gadgets streaming from the Batcave to his utility belt, Robyn provides built-in support for streaming responses. This allows you to send data in chunks, perfect for large files, real-time updates, and server-sent events.
 
-Streaming responses are perfect for handling large datasets or real-time updates without consuming excessive memory.
+<Callout type="info">
+  Streaming responses are perfect for handling large datasets or real-time updates without consuming excessive memory.
+</Callout>
 
 ## Response
 
 When the Bat-Signal needs to stream continuously through the night sky, you'll want to use a generator or iterator as the `description` parameter:
 
 <Row>
-  <CodeGroup>
+  <CodeGroup title="Server">
     ```python
     from robyn import Response
 
@@ -28,7 +41,7 @@ When the Bat-Signal needs to stream continuously through the night sky, you'll w
     ```
   </CodeGroup>
   
-  <CodeGroup>
+  <CodeGroup title="Client">
     ```bash
     curl http://localhost:8000/bat-signal
     ```
@@ -48,25 +61,25 @@ When the Bat-Signal needs to stream continuously through the night sky, you'll w
 Like Batman's versatile arsenal, the streaming response system supports multiple data types:
 
 <Row>
-  <CodeGroup>
+  <CodeGroup title="Binary">
     ```python
     # Raw binary data (like Batcomputer logs)
     yield b"Batcomputer Log Entry\n"
     ```
   </CodeGroup>
-  <CodeGroup>
+  <CodeGroup title="Text">
     ```python
     # Text messages (like Alfred's updates)
     yield "Master Wayne, your tea is ready\n".encode()
     ```
   </CodeGroup>
-  <CodeGroup>
+  <CodeGroup title="Numbers">
     ```python
     # Numbers (like Batmobile telemetry)
     yield str(speed).encode()
     ```
   </CodeGroup>
-  <CodeGroup>
+  <CodeGroup title="JSON">
     ```python
     # JSON data (like Gotham City surveillance)
     yield json.dumps({"location": "Crime Alley"}).encode()
@@ -79,7 +92,7 @@ Like Batman's versatile arsenal, the streaming response system supports multiple
 For real-time updates from the Batcomputer:
 
 <Row>
-  <CodeGroup>
+  <CodeGroup title="Server">
     ```python
     @app.get("/batcomputer/events")
     async def batcomputer_feed():
@@ -103,7 +116,7 @@ For real-time updates from the Batcomputer:
         )
     ```
   </CodeGroup>
-  <CodeGroup>
+  <CodeGroup title="Client">
     ```javascript
     const evtSource = new EventSource("/batcomputer/events");
     evtSource.onmessage = (event) => {
@@ -118,7 +131,7 @@ For real-time updates from the Batcomputer:
 For streaming large files from the Batcomputer archives:
 
 <Row>
-  <CodeGroup>
+  <CodeGroup title="Server">
     ```python
     @app.get("/batcomputer/files")
     async def download_files():
@@ -138,7 +151,7 @@ For streaming large files from the Batcomputer archives:
         )
     ```
   </CodeGroup>
-  <CodeGroup>
+  <CodeGroup title="Client">
     ```bash
     curl -O http://localhost:8000/batcomputer/files
     ```
@@ -204,9 +217,29 @@ async def test_bat_signal():
         assert len(signals) > 0
 ```
 
-## What's next?
+## Best Practices
+
+<Steps>
+### Encode Data
+Always encode strings to bytes (like encrypting Bat-communications)
+
+### Chunk Size
+Use appropriate chunk sizes (8KB-64KB for efficient data transfer)
+
+### Resource Management
+Clean up resources (like Batman cleaning up Gotham)
+
+### Memory Usage
+Don't accumulate data in memory (keep the Batcomputer running smoothly)
+
+### Timeouts
+Implement timeouts (even Batman needs sleep)
+</Steps>
+
+## What's Next?
 
-Now, Batman wanted to scale his application across multiple cores. Robyn led him to Scaling.
+Now that you've mastered streaming, you might want to explore:
 
-- [Scaling](/documentation/api_reference/scaling)
+- [WebSockets](/documentation/api_reference/websockets) - For real-time bidirectional communication
+- [Scaling](/documentation/api_reference/scaling) - Scale your streaming applications across multiple cores
 

From a4dfbf1337fa599d1835d7637ff74d93a0026831 Mon Sep 17 00:00:00 2001
From: Sanskar Jethi <sansyrox@gmail.com>
Date: Sun, 15 Dec 2024 07:01:50 +0000
Subject: [PATCH 04/14] fix docs

---
 .../src/pages/documentation/api_reference/streaming.mdx   | 8 +++-----
 1 file changed, 3 insertions(+), 5 deletions(-)

diff --git a/docs_src/src/pages/documentation/api_reference/streaming.mdx b/docs_src/src/pages/documentation/api_reference/streaming.mdx
index 7e9b0aaee..2605cd9d5 100644
--- a/docs_src/src/pages/documentation/api_reference/streaming.mdx
+++ b/docs_src/src/pages/documentation/api_reference/streaming.mdx
@@ -1,9 +1,7 @@
----
-title: 'Streaming'
-description: 'Stream data efficiently with Robyn'
----
+export const description =
+  'On this page, we’ll dive into the different conversation endpoints you can use to manage conversations programmatically.'
+
 
-import { Row, CodeGroup, Steps, Callout } from '@/components/mdx'
 
 ## Coming From
 

From 2d283588196c046fc5d1d7f3995be7f01b5cdd29 Mon Sep 17 00:00:00 2001
From: Sanskar Jethi <sansyrox@gmail.com>
Date: Sun, 15 Dec 2024 07:04:45 +0000
Subject: [PATCH 05/14] fix docs

---
 .../src/pages/documentation/api_reference/streaming.mdx     | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)

diff --git a/docs_src/src/pages/documentation/api_reference/streaming.mdx b/docs_src/src/pages/documentation/api_reference/streaming.mdx
index 2605cd9d5..bd9770101 100644
--- a/docs_src/src/pages/documentation/api_reference/streaming.mdx
+++ b/docs_src/src/pages/documentation/api_reference/streaming.mdx
@@ -11,9 +11,7 @@ If you're coming from [File Handling](/documentation/api_reference/file_handling
 
 Like Batman's gadgets streaming from the Batcave to his utility belt, Robyn provides built-in support for streaming responses. This allows you to send data in chunks, perfect for large files, real-time updates, and server-sent events.
 
-<Callout type="info">
-  Streaming responses are perfect for handling large datasets or real-time updates without consuming excessive memory.
-</Callout>
+Streaming responses are perfect for handling large datasets or real-time updates without consuming excessive memory.
 
 ## Response
 
@@ -186,9 +184,7 @@ headers = {
 
 Even Batman needs contingency plans:
 
-<Callout type="warning">
   Always handle errors gracefully in your streaming responses to prevent connection hangs.
-</Callout>
 
 ```python
 async def generator():

From 97cb9ce41684bbe4bd70fd385844611a2276c1ac Mon Sep 17 00:00:00 2001
From: Sanskar Jethi <sansyrox@gmail.com>
Date: Sun, 15 Dec 2024 07:10:54 +0000
Subject: [PATCH 06/14] fix docs

---
 docs_src/src/pages/documentation/api_reference/streaming.mdx | 4 ----
 1 file changed, 4 deletions(-)

diff --git a/docs_src/src/pages/documentation/api_reference/streaming.mdx b/docs_src/src/pages/documentation/api_reference/streaming.mdx
index bd9770101..9a52849a2 100644
--- a/docs_src/src/pages/documentation/api_reference/streaming.mdx
+++ b/docs_src/src/pages/documentation/api_reference/streaming.mdx
@@ -156,7 +156,6 @@ For streaming large files from the Batcomputer archives:
 
 ## Common Headers
 
-<Steps>
 ### Plain Text
 ```python
 headers = {"Content-Type": "text/plain"}
@@ -178,7 +177,6 @@ headers = {
     "Content-Disposition": "attachment; filename=file.dat"
 }
 ```
-</Steps>
 
 ## Error Handling
 
@@ -213,7 +211,6 @@ async def test_bat_signal():
 
 ## Best Practices
 
-<Steps>
 ### Encode Data
 Always encode strings to bytes (like encrypting Bat-communications)
 
@@ -228,7 +225,6 @@ Don't accumulate data in memory (keep the Batcomputer running smoothly)
 
 ### Timeouts
 Implement timeouts (even Batman needs sleep)
-</Steps>
 
 ## What's Next?
 

From 24323b0e3fd3b3993657194f68fac8114628d6b4 Mon Sep 17 00:00:00 2001
From: Sanskar Jethi <sansyrox@gmail.com>
Date: Sun, 15 Dec 2024 07:11:25 +0000
Subject: [PATCH 07/14] fix docs

---
 docs_src/src/pages/documentation/streaming.md | 1 -
 1 file changed, 1 deletion(-)
 delete mode 100644 docs_src/src/pages/documentation/streaming.md

diff --git a/docs_src/src/pages/documentation/streaming.md b/docs_src/src/pages/documentation/streaming.md
deleted file mode 100644
index 0519ecba6..000000000
--- a/docs_src/src/pages/documentation/streaming.md
+++ /dev/null
@@ -1 +0,0 @@
- 
\ No newline at end of file

From 67b7c5dae1aefb490fd75ed16804288df59ad127 Mon Sep 17 00:00:00 2001
From: Sanskar Jethi <sansyrox@gmail.com>
Date: Mon, 16 Dec 2024 03:00:42 +0000
Subject: [PATCH 08/14] update

---
 docs_src/src/pages/documentation/streaming.md | 207 ++++++++++
 integration_tests/base_routes.py              |  58 ++-
 integration_tests/conftest.py                 |   5 +-
 integration_tests/test_streaming_responses.py | 288 ++++++-------
 poetry.lock                                   | 163 +++++++-
 pyproject.toml                                |   4 +
 robyn/__init__.py                             |   3 +-
 robyn/responses.py                            |  16 +-
 robyn/router.py                               |  24 +-
 src/base_routes.rs                            | 119 ++++++
 src/lib.rs                                    |   3 +-
 src/types/mod.rs                              |  27 +-
 src/types/request.rs                          |   2 +-
 src/types/response.rs                         | 379 ++++++++++++++----
 14 files changed, 998 insertions(+), 300 deletions(-)
 create mode 100644 docs_src/src/pages/documentation/streaming.md
 create mode 100644 src/base_routes.rs

diff --git a/docs_src/src/pages/documentation/streaming.md b/docs_src/src/pages/documentation/streaming.md
new file mode 100644
index 000000000..e55d34b11
--- /dev/null
+++ b/docs_src/src/pages/documentation/streaming.md
@@ -0,0 +1,207 @@
+# Streaming Responses in Robyn
+
+Robyn supports streaming responses for various use cases including real-time data, large file downloads, and server-sent events (SSE). This document explains how to use streaming responses effectively.
+
+## Basic Usage
+
+### Simple Streaming Response
+
+```python
+@app.get("/stream")
+async def stream():
+    async def generator():
+        for i in range(5):
+            yield f"Chunk {i}\n".encode()
+    
+    return Response(
+        status_code=200,
+        headers={"Content-Type": "text/plain"},
+        description=generator()
+    )
+```
+
+## Supported Types
+
+Robyn's streaming response system supports multiple data types:
+
+1. **Binary Data** (`bytes`)
+```python
+yield b"Binary data"
+```
+
+2. **Text Data** (`str`)
+```python
+yield "String data".encode()
+```
+
+3. **Numbers** (`int`, `float`)
+```python
+yield str(42).encode()
+```
+
+4. **JSON Data**
+```python
+import json
+data = {"key": "value"}
+yield json.dumps(data).encode()
+```
+
+## Use Cases
+
+### 1. Server-Sent Events (SSE)
+
+SSE allows real-time updates from server to client:
+
+```python
+@app.get("/events")
+async def sse():
+    async def event_generator():
+        yield f"event: message\ndata: {json.dumps(data)}\n\n".encode()
+        
+    return Response(
+        status_code=200,
+        headers={
+            "Content-Type": "text/event-stream",
+            "Cache-Control": "no-cache",
+            "Connection": "keep-alive"
+        },
+        description=event_generator()
+    )
+```
+
+Client usage:
+```javascript
+const evtSource = new EventSource("/events");
+evtSource.onmessage = (event) => {
+    console.log(JSON.parse(event.data));
+};
+```
+
+### 2. Large File Downloads
+
+Stream large files in chunks to manage memory usage:
+
+```python
+@app.get("/download")
+async def download():
+    async def file_generator():
+        chunk_size = 8192  # 8KB chunks
+        with open("large_file.bin", "rb") as f:
+            while chunk := f.read(chunk_size):
+                yield chunk
+    
+    return Response(
+        status_code=200,
+        headers={
+            "Content-Type": "application/octet-stream",
+            "Content-Disposition": "attachment; filename=file.bin"
+        },
+        description=file_generator()
+    )
+```
+
+### 3. CSV Generation
+
+Stream CSV data as it's generated:
+
+```python
+@app.get("/csv")
+async def csv():
+    async def csv_generator():
+        yield "header1,header2\n".encode()
+        for item in data:
+            yield f"{item.field1},{item.field2}\n".encode()
+    
+    return Response(
+        status_code=200,
+        headers={
+            "Content-Type": "text/csv",
+            "Content-Disposition": "attachment; filename=data.csv"
+        },
+        description=csv_generator()
+    )
+```
+
+## Best Practices
+
+1. **Always encode your data**
+   - Convert strings to bytes using `.encode()`
+   - Use `json.dumps().encode()` for JSON data
+
+2. **Set appropriate headers**
+   - Use correct Content-Type
+   - Add Content-Disposition for downloads
+   - Set Cache-Control for SSE
+
+3. **Handle errors gracefully**
+   ```python
+   async def generator():
+       try:
+           for item in items:
+               yield process(item)
+       except Exception as e:
+           yield f"Error: {str(e)}".encode()
+   ```
+
+4. **Memory management**
+   - Use appropriate chunk sizes
+   - Don't hold entire dataset in memory
+   - Clean up resources after streaming
+
+## Testing
+
+Test streaming responses using the test client:
+
+```python
+@pytest.mark.asyncio
+async def test_stream():
+    async with app.test_client() as client:
+        response = await client.get("/stream")
+        chunks = []
+        async for chunk in response.content:
+            chunks.append(chunk)
+        # Assert on chunks
+```
+
+## Common Issues
+
+1. **Forgetting to encode data**
+   ```python
+   # Wrong
+   yield "data"  # Will fail
+   # Correct
+   yield "data".encode()
+   ```
+
+2. **Not setting correct headers**
+   ```python
+   # SSE needs specific headers
+   headers = {
+       "Content-Type": "text/event-stream",
+       "Cache-Control": "no-cache",
+       "Connection": "keep-alive"
+   }
+   ```
+
+3. **Memory leaks**
+   ```python
+   # Wrong
+   data = []
+   async def generator():
+       for i in range(1000000):
+           data.append(i)  # Memory leak
+           yield str(i).encode()
+   
+   # Correct
+   async def generator():
+       for i in range(1000000):
+           yield str(i).encode()
+   ```
+
+## Performance Considerations
+
+1. Use appropriate chunk sizes (typically 8KB-64KB)
+2. Implement backpressure handling
+3. Consider using async file I/O for large files
+4. Monitor memory usage during streaming
+5. Implement timeouts for long-running streams 
\ No newline at end of file
diff --git a/integration_tests/base_routes.py b/integration_tests/base_routes.py
index e441d12f7..44e512a01 100644
--- a/integration_tests/base_routes.py
+++ b/integration_tests/base_routes.py
@@ -5,7 +5,7 @@
 
 from integration_tests.subroutes import di_subrouter, sub_router
 from integration_tests.views import AsyncView, SyncView
-from robyn import Headers, Request, Response, Robyn, WebSocket, WebSocketConnector, jsonify, serve_file, serve_html
+from robyn import Headers, Request, Response, Robyn, WebSocket, WebSocketConnector, jsonify, serve_file, serve_html, StreamingResponse
 from robyn.authentication import AuthenticationHandler, BearerGetter, Identity
 from robyn.robyn import QueryParams, Url
 from robyn.templating import JinjaTemplate
@@ -159,7 +159,10 @@ def sync_before_request(request: Request):
 @app.after_request("/sync/middlewares")
 def sync_after_request(response: Response):
     response.headers.set("after", "sync_after_request")
-    response.description = response.description + " after"
+    if isinstance(response.description, bytes):
+        response.description = response.description + b" after"
+    else:
+        response.description = response.description + " after"
     return response
 
 
@@ -180,7 +183,10 @@ async def async_before_request(request: Request):
 @app.after_request("/async/middlewares")
 async def async_after_request(response: Response):
     response.headers.set("after", "async_after_request")
-    response.description = response.description + " after"
+    if isinstance(response.description, bytes):
+        response.description = response.description + b" after"
+    else:
+        response.description = response.description + " after"
     return response
 
 
@@ -1085,56 +1091,42 @@ def create_item(request, body: CreateItemBody, query: CreateItemQueryParamsParam
 # --- Streaming responses ---
 
 @app.get("/stream/sync")
-def sync_stream():
-    def number_generator():
+async def sync_stream():
+    def generator():
         for i in range(5):
             yield f"Chunk {i}\n".encode()
     
-    return Response(
+    headers = Headers({"Content-Type": "text/plain"})
+    return StreamingResponse(
         status_code=200,
-        headers={"Content-Type": "text/plain"},
-        description=number_generator()
+        description=generator(),
+        headers=headers
     )
 
 @app.get("/stream/async")
 async def async_stream():
-    async def async_generator():
-        import asyncio
+    async def generator():
         for i in range(5):
-            await asyncio.sleep(1)  # Simulate async work
             yield f"Async Chunk {i}\n".encode()
     
-    return Response(
+    return StreamingResponse(
         status_code=200,
         headers={"Content-Type": "text/plain"},
-        description=async_generator()
+        description=generator()
     )
 
 @app.get("/stream/mixed")
 async def mixed_stream():
-    async def mixed_generator():
-        import asyncio
-        # Binary data
+    async def generator():
         yield b"Binary chunk\n"
-        await asyncio.sleep(0.5)
-        
-        # String data
         yield "String chunk\n".encode()
-        await asyncio.sleep(0.5)
-        
-        # Integer data
         yield str(42).encode() + b"\n"
-        await asyncio.sleep(0.5)
-        
-        # JSON data
-        import json
-        data = {"message": "JSON chunk", "number": 123}
-        yield json.dumps(data).encode() + b"\n"
+        yield json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"
     
-    return Response(
+    return StreamingResponse(
         status_code=200,
         headers={"Content-Type": "text/plain"},
-        description=mixed_generator()
+        description=generator()
     )
 
 @app.get("/stream/events")
@@ -1156,7 +1148,7 @@ async def event_generator():
         data = json.dumps({'status': 'complete', 'results': [1, 2, 3]}, indent=2)
         yield f"event: complete\ndata: {data}\n\n".encode()
     
-    return Response(
+    return StreamingResponse(
         status_code=200,
         headers={
             "Content-Type": "text/event-stream",
@@ -1178,7 +1170,7 @@ async def file_generator():
             chunk = b"X" * min(chunk_size, total_size - offset)
             yield chunk
     
-    return Response(
+    return StreamingResponse(
         status_code=200,
         headers={
             "Content-Type": "application/octet-stream",
@@ -1202,7 +1194,7 @@ async def csv_generator():
             row = f"{i},item-{i},{random.randint(1, 100)}\n"
             yield row.encode()
     
-    return Response(
+    return StreamingResponse(
         status_code=200,
         headers={
             "Content-Type": "text/csv",
diff --git a/integration_tests/conftest.py b/integration_tests/conftest.py
index 77c661e0b..c450919df 100644
--- a/integration_tests/conftest.py
+++ b/integration_tests/conftest.py
@@ -8,10 +8,12 @@
 from typing import List
 
 import pytest
+import pytest_asyncio
+from robyn import Robyn
+from integration_tests.base_routes import app
 
 from integration_tests.helpers.network_helpers import get_network_host
 
-
 def spawn_process(command: List[str]) -> subprocess.Popen:
     if platform.system() == "Windows":
         command[0] = "python"
@@ -127,3 +129,4 @@ def env_file():
     env_path.unlink()
     del os.environ["ROBYN_PORT"]
     del os.environ["ROBYN_HOST"]
+
diff --git a/integration_tests/test_streaming_responses.py b/integration_tests/test_streaming_responses.py
index 3ba872a58..4e16f9504 100644
--- a/integration_tests/test_streaming_responses.py
+++ b/integration_tests/test_streaming_responses.py
@@ -14,181 +14,129 @@
 
 import json
 import pytest
-from robyn import Robyn
-from robyn.robyn import Request
-from integration_tests.base_routes import app
+import aiohttp
 
+# Mark all tests in this module as async
+pytestmark = pytest.mark.asyncio
 
-@pytest.mark.asyncio
 async def test_sync_stream():
-    """Test basic synchronous streaming response.
-    
-    Verifies that:
-    1. Response has correct content type
-    2. Chunks are received in correct order
-    3. Each chunk has expected format
-    """
-    async with app.test_client() as client:
-        response = await client.get("/stream/sync")
-        assert response.status_code == 200
-        assert response.headers["Content-Type"] == "text/plain"
-
-        chunks = []
-        async for chunk in response.content:
-            chunks.append(chunk.decode())
-
-        assert len(chunks) == 5
-        for i, chunk in enumerate(chunks):
-            assert chunk == f"Chunk {i}\n"
-
-
-@pytest.mark.asyncio
+    """Test basic synchronous streaming response."""
+    async with aiohttp.ClientSession() as client:
+        async with client.get("http://127.0.0.1:8080/stream/sync") as response:
+            assert response.status == 200
+            assert response.headers["Content-Type"] == "text/plain"
+
+            chunks = []
+            async for chunk in response.content:
+                chunks.append(chunk.decode())
+
+            assert len(chunks) == 5
+            for i, chunk in enumerate(chunks):
+                assert chunk == f"Chunk {i}\n"
+
 async def test_async_stream():
-    """Test asynchronous streaming response.
-    
-    Verifies that:
-    1. Response has correct content type
-    2. Chunks are received in correct order with delays
-    3. Each chunk has expected format
-    """
-    async with app.test_client() as client:
-        response = await client.get("/stream/async")
-        assert response.status_code == 200
-        assert response.headers["Content-Type"] == "text/plain"
-
-        chunks = []
-        async for chunk in response.content:
-            chunks.append(chunk.decode())
-
-        assert len(chunks) == 5
-        for i, chunk in enumerate(chunks):
-            assert chunk == f"Async Chunk {i}\n"
-
-
-@pytest.mark.asyncio
+    """Test asynchronous streaming response."""
+    async with aiohttp.ClientSession() as client:
+        async with client.get("http://127.0.0.1:8080/stream/async") as response:
+            assert response.status == 200
+            assert response.headers["Content-Type"] == "text/plain"
+
+            chunks = []
+            async for chunk in response.content:
+                chunks.append(chunk.decode())
+
+            assert len(chunks) == 5
+            for i, chunk in enumerate(chunks):
+                assert chunk == f"Async Chunk {i}\n"
+
 async def test_mixed_stream():
-    """Test streaming of mixed content types.
-    
-    Verifies that:
-    1. Response handles different content types:
-       - Binary data
-       - String data
-       - Integer data
-       - JSON data
-    2. Each chunk is correctly encoded
-    """
-    async with app.test_client() as client:
-        response = await client.get("/stream/mixed")
-        assert response.status_code == 200
-        assert response.headers["Content-Type"] == "text/plain"
-
-        expected = [
-            b"Binary chunk\n",
-            b"String chunk\n",
-            b"42\n",
-            json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"
-        ]
-
-        chunks = []
-        async for chunk in response.content:
-            chunks.append(chunk)
-
-        assert len(chunks) == len(expected)
-        for chunk, expected_chunk in zip(chunks, expected):
-            assert chunk == expected_chunk
-
-
-@pytest.mark.asyncio
+    """Test streaming of mixed content types."""
+    async with aiohttp.ClientSession() as client:
+        async with client.get("http://127.0.0.1:8080/stream/mixed") as response:
+            assert response.status == 200
+            assert response.headers["Content-Type"] == "text/plain"
+
+            expected = [
+                b"Binary chunk\n",
+                b"String chunk\n",
+                b"42\n",
+                json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"
+            ]
+
+            chunks = []
+            async for chunk in response.content:
+                chunks.append(chunk)
+
+            assert len(chunks) == len(expected)
+            for chunk, expected_chunk in zip(chunks, expected):
+                assert chunk == expected_chunk
+
 async def test_server_sent_events():
-    """Test Server-Sent Events (SSE) streaming.
-    
-    Verifies that:
-    1. Response has correct SSE headers
-    2. Events are properly formatted with:
-       - Event type
-       - Event ID (when provided)
-       - Event data
-    """
-    async with app.test_client() as client:
-        response = await client.get("/stream/events")
-        assert response.status_code == 200
-        assert response.headers["Content-Type"] == "text/event-stream"
-        assert response.headers["Cache-Control"] == "no-cache"
-        assert response.headers["Connection"] == "keep-alive"
-
-        events = []
-        async for chunk in response.content:
-            events.append(chunk.decode())
-
-        # Test first event (message)
-        assert "event: message\n" in events[0]
-        assert "data: {" in events[0]
-        event_data = json.loads(events[0].split("data: ")[1].strip())
-        assert "time" in event_data
-        assert event_data["type"] == "start"
-
-        # Test second event (with ID)
-        assert "id: 1\n" in events[1]
-        assert "event: update\n" in events[1]
-        event_data = json.loads(events[1].split("data: ")[1].strip())
-        assert event_data["progress"] == 50
-
-        # Test third event (complete)
-        assert "event: complete\n" in events[2]
-        event_data = json.loads(events[2].split("data: ")[1].strip())
-        assert event_data["status"] == "complete"
-        assert event_data["results"] == [1, 2, 3]
-
-
-@pytest.mark.asyncio
+    """Test Server-Sent Events (SSE) streaming."""
+    async with aiohttp.ClientSession() as client:
+        async with client.get("http://127.0.0.1:8080/stream/events") as response:
+            assert response.status == 200
+            assert response.headers["Content-Type"] == "text/event-stream"
+            assert response.headers["Cache-Control"] == "no-cache"
+            assert response.headers["Connection"] == "keep-alive"
+
+            events = []
+            async for chunk in response.content:
+                events.append(chunk.decode())
+
+            # Test first event (message)
+            assert "event: message\n" in events[0]
+            assert "data: {" in events[0]
+            event_data = json.loads(events[0].split("data: ")[1].strip())
+            assert "time" in event_data
+            assert event_data["type"] == "start"
+
+            # Test second event (with ID)
+            assert "id: 1\n" in events[1]
+            assert "event: update\n" in events[1]
+            event_data = json.loads(events[1].split("data: ")[1].strip())
+            assert event_data["progress"] == 50
+
+            # Test third event (complete)
+            assert "event: complete\n" in events[2]
+            event_data = json.loads(events[2].split("data: ")[1].strip())
+            assert event_data["status"] == "complete"
+            assert event_data["results"] == [1, 2, 3]
+
 async def test_large_file_stream():
-    """Test streaming of large files in chunks.
-    
-    Verifies that:
-    1. Response has correct headers for file download
-    2. Content is streamed in correct chunk sizes
-    3. Total content length matches expected size
-    """
-    async with app.test_client() as client:
-        response = await client.get("/stream/large-file")
-        assert response.status_code == 200
-        assert response.headers["Content-Type"] == "application/octet-stream"
-        assert response.headers["Content-Disposition"] == "attachment; filename=large-file.bin"
-
-        total_size = 0
-        async for chunk in response.content:
-            assert len(chunk) <= 1024  # Max chunk size
-            total_size += len(chunk)
-
-        assert total_size == 10 * 1024  # 10KB total
-
-
-@pytest.mark.asyncio
+    """Test streaming of large files in chunks."""
+    async with aiohttp.ClientSession() as client:
+        async with client.get("http://127.0.0.1:8080/stream/large-file") as response:
+            assert response.status == 200
+            assert response.headers["Content-Type"] == "application/octet-stream"
+            assert response.headers["Content-Disposition"] == "attachment; filename=large-file.bin"
+
+            total_size = 0
+            async for chunk in response.content:
+                assert len(chunk) <= 1024  # Max chunk size
+                total_size += len(chunk)
+
+            assert total_size == 10 * 1024  # 10KB total
+
 async def test_csv_stream():
-    """Test streaming of CSV data.
-    
-    Verifies that:
-    1. Response has correct CSV headers
-    2. CSV content is properly formatted
-    3. All rows are received in correct order
-    """
-    async with app.test_client() as client:
-        response = await client.get("/stream/csv")
-        assert response.status_code == 200
-        assert response.headers["Content-Type"] == "text/csv"
-        assert response.headers["Content-Disposition"] == "attachment; filename=data.csv"
-
-        lines = []
-        async for chunk in response.content:
-            lines.extend(chunk.decode().splitlines())
-
-        # Verify header
-        assert lines[0] == "id,name,value"
-        
-        # Verify data rows
-        assert len(lines) == 6  # Header + 5 data rows
-        for i, line in enumerate(lines[1:], 0):
-            id_, name, value = line.split(',')
-            assert int(id_) == i
-            assert name == f"item-{i}"
-            assert 1 <= int(value) <= 100 
\ No newline at end of file
+    """Test streaming of CSV data."""
+    async with aiohttp.ClientSession() as client:
+        async with client.get("http://127.0.0.1:8080/stream/csv") as response:
+            assert response.status == 200
+            assert response.headers["Content-Type"] == "text/csv"
+            assert response.headers["Content-Disposition"] == "attachment; filename=data.csv"
+
+            lines = []
+            async for chunk in response.content:
+                lines.extend(chunk.decode().splitlines())
+
+            # Verify header
+            assert lines[0] == "id,name,value"
+            
+            # Verify data rows
+            assert len(lines) == 6  # Header + 5 data rows
+            for i, line in enumerate(lines[1:], 0):
+                id_, name, value = line.split(',')
+                assert int(id_) == i
+                assert name == f"item-{i}"
+                assert 1 <= int(value) <= 100
\ No newline at end of file
diff --git a/poetry.lock b/poetry.lock
index a0029ddb9..3b34ae21a 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -261,6 +261,83 @@ termcolor = {version = ">=1.1,<3", markers = "python_version >= \"3.7\""}
 tomlkit = ">=0.5.3,<1.0.0"
 typing-extensions = ">=4.0.1,<5.0.0"
 
+[[package]]
+name = "coverage"
+version = "7.6.9"
+description = "Code coverage measurement for Python"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"},
+    {file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"},
+    {file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"},
+    {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"},
+    {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"},
+    {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"},
+    {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"},
+    {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"},
+    {file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"},
+    {file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"},
+    {file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"},
+    {file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"},
+    {file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"},
+    {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"},
+    {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"},
+    {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"},
+    {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"},
+    {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"},
+    {file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"},
+    {file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"},
+    {file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"},
+    {file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"},
+    {file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"},
+    {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"},
+    {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"},
+    {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"},
+    {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"},
+    {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"},
+    {file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"},
+    {file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"},
+    {file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"},
+    {file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"},
+    {file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"},
+    {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"},
+    {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"},
+    {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"},
+    {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"},
+    {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"},
+    {file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"},
+    {file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"},
+    {file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"},
+    {file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"},
+    {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"},
+    {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"},
+    {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"},
+    {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"},
+    {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"},
+    {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"},
+    {file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"},
+    {file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"},
+    {file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"},
+    {file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"},
+    {file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"},
+    {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"},
+    {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"},
+    {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"},
+    {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"},
+    {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"},
+    {file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"},
+    {file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"},
+    {file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"},
+    {file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"},
+]
+
+[package.dependencies]
+tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
+
+[package.extras]
+toml = ["tomli"]
+
 [[package]]
 name = "decli"
 version = "0.5.2"
@@ -312,6 +389,20 @@ files = [
 [package.extras]
 test = ["pytest (>=6)"]
 
+[[package]]
+name = "execnet"
+version = "2.1.1"
+description = "execnet: rapid multi-Python deployment"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"},
+    {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"},
+]
+
+[package.extras]
+testing = ["hatch", "pre-commit", "pytest", "tox"]
+
 [[package]]
 name = "filelock"
 version = "3.16.1"
@@ -814,6 +905,24 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
 [package.extras]
 testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
 
+[[package]]
+name = "pytest-asyncio"
+version = "0.21.0"
+description = "Pytest support for asyncio"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "pytest-asyncio-0.21.0.tar.gz", hash = "sha256:2b38a496aef56f56b0e87557ec313e11e1ab9276fc3863f6a7be0f1d0e415e1b"},
+    {file = "pytest_asyncio-0.21.0-py3-none-any.whl", hash = "sha256:f2b3366b7cd501a4056858bd39349d5af19742aed2d81660b7998b6341c7eb9c"},
+]
+
+[package.dependencies]
+pytest = ">=7.0.0"
+
+[package.extras]
+docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
+testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"]
+
 [[package]]
 name = "pytest-codspeed"
 version = "1.2.2"
@@ -833,6 +942,58 @@ pytest = ">=3.8"
 compatibility = ["pytest-benchmarks (>=3.4.1,<3.5.0)"]
 dev = ["black (>=22.3.0,<22.4.0)", "flake8 (>=5.0.4,<5.1.0)", "hatchling (>=1.11.1,<1.12.0)", "isort (>=5.8.0,<5.9.0)", "mypy (>=0.961,<1.0)", "pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)", "ruff (>=0.0.100,<0.1.0)"]
 
+[[package]]
+name = "pytest-cov"
+version = "4.0.0"
+description = "Pytest plugin for measuring coverage."
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"},
+    {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"},
+]
+
+[package.dependencies]
+coverage = {version = ">=5.2.1", extras = ["toml"]}
+pytest = ">=4.6"
+
+[package.extras]
+testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
+
+[[package]]
+name = "pytest-timeout"
+version = "2.1.0"
+description = "pytest plugin to abort hanging tests"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "pytest-timeout-2.1.0.tar.gz", hash = "sha256:c07ca07404c612f8abbe22294b23c368e2e5104b521c1790195561f37e1ac3d9"},
+    {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"},
+]
+
+[package.dependencies]
+pytest = ">=5.0.0"
+
+[[package]]
+name = "pytest-xdist"
+version = "3.6.1"
+description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"},
+    {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"},
+]
+
+[package.dependencies]
+execnet = ">=2.1"
+pytest = ">=7.0.0"
+
+[package.extras]
+psutil = ["psutil (>=3.0)"]
+setproctitle = ["setproctitle"]
+testing = ["filelock"]
+
 [[package]]
 name = "pyyaml"
 version = "6.0.2"
@@ -1218,4 +1379,4 @@ templating = ["jinja2"]
 [metadata]
 lock-version = "2.0"
 python-versions = "^3.9"
-content-hash = "5702106e5e2249e2af3b6c58b861c5f8a64cb24b2130cfa68d5fd0066c587697"
+content-hash = "bac2cff41c35ede85e4a7dddb0481022bb2fb8ab465f7087cdfc9bbfdfe53038"
diff --git a/pyproject.toml b/pyproject.toml
index fb99e81a2..3c502c49a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -82,6 +82,10 @@ optional = true
 
 [tool.poetry.group.test.dependencies]
 pytest = "7.2.1"
+pytest-asyncio = "0.21.0"
+pytest-cov = "4.0.0"
+pytest-xdist = "3.6.1"
+pytest-timeout = "2.1.0"
 pytest-codspeed = "1.2.2"
 requests = "2.28.2"
 nox = "2023.4.22"
diff --git a/robyn/__init__.py b/robyn/__init__.py
index 929c09718..849e28023 100644
--- a/robyn/__init__.py
+++ b/robyn/__init__.py
@@ -20,7 +20,7 @@
 from robyn.processpool import run_processes
 from robyn.reloader import compile_rust_files
 from robyn.responses import html, serve_file, serve_html
-from robyn.robyn import FunctionInfo, Headers, HttpMethod, Request, Response, WebSocketConnector, get_version
+from robyn.robyn import FunctionInfo, Headers, HttpMethod, Request, Response, WebSocketConnector, get_version, StreamingResponse
 from robyn.router import MiddlewareRouter, MiddlewareType, Router, WebSocketRouter
 from robyn.types import Directory
 from robyn.ws import WebSocket
@@ -673,6 +673,7 @@ def cors_middleware(request):
     "Robyn",
     "Request",
     "Response",
+    "StreamingResponse",
     "status_codes",
     "jsonify",
     "serve_file",
diff --git a/robyn/responses.py b/robyn/responses.py
index f9750a3fe..07726db10 100644
--- a/robyn/responses.py
+++ b/robyn/responses.py
@@ -1,6 +1,6 @@
 import mimetypes
 import os
-from typing import Optional
+from typing import Optional, Any
 
 from robyn.robyn import Headers, Response
 
@@ -18,6 +18,20 @@ def __init__(
         self.headers = headers or Headers({"Content-Disposition": "attachment"})
 
 
+class StreamingResponse:
+    def __init__(
+        self,
+        status_code: int = 200,
+        description: Optional[Any] = None,
+        headers: Optional[Headers] = None,
+    ):
+        self.status_code = status_code
+        self.description = description or []
+        self.headers = headers or Headers({})
+        self.response_type = "stream"
+        self.file_path = None
+
+
 def html(html: str) -> Response:
     """
     This function will help in serving a simple html string
diff --git a/robyn/router.py b/robyn/router.py
index 655aa5258..ea1ba0742 100644
--- a/robyn/router.py
+++ b/robyn/router.py
@@ -12,7 +12,7 @@
 from robyn.dependency_injection import DependencyMap
 from robyn.jsonify import jsonify
 from robyn.responses import FileResponse
-from robyn.robyn import FunctionInfo, Headers, HttpMethod, Identity, MiddlewareType, QueryParams, Request, Response, Url
+from robyn.robyn import FunctionInfo, Headers, HttpMethod, Identity, MiddlewareType, QueryParams, Request, Response, StreamingResponse, Url
 from robyn.types import Body, Files, FormData, IPAddress, Method, PathParams
 from robyn.ws import WebSocket
 
@@ -47,12 +47,20 @@ def __init__(self) -> None:
         super().__init__()
         self.routes: List[Route] = []
 
-    def _format_tuple_response(self, res: tuple) -> Response:
+    def _format_tuple_response(self, res: tuple) -> Union[Response, StreamingResponse]:
         if len(res) != 3:
             raise ValueError("Tuple should have 3 elements")
 
         description, headers, status_code = res
-        description = self._format_response(description).description
+        formatted_response = self._format_response(description)
+        
+        # Handle StreamingResponse case
+        if isinstance(formatted_response, StreamingResponse):
+            formatted_response.headers.update(headers)
+            formatted_response.status_code = status_code
+            return formatted_response
+            
+        # Regular Response case
         new_headers: Headers = Headers(headers)
         if new_headers.contains("Content-Type"):
             headers.set("Content-Type", new_headers.get("Content-Type"))
@@ -60,16 +68,20 @@ def _format_tuple_response(self, res: tuple) -> Response:
         return Response(
             status_code=status_code,
             headers=headers,
-            description=description,
+            description=formatted_response.description,
         )
 
     def _format_response(
         self,
-        res: Union[Dict, Response, bytes, tuple, str],
-    ) -> Response:
+        res: Union[Dict, Response, StreamingResponse, bytes, tuple, str],
+    ) -> Union[Response, StreamingResponse]:
         if isinstance(res, Response):
             return res
 
+        # Special handling for StreamingResponse
+        if isinstance(res, StreamingResponse):
+            return res
+
         if isinstance(res, dict):
             return Response(
                 status_code=status_codes.HTTP_200_OK,
diff --git a/src/base_routes.rs b/src/base_routes.rs
new file mode 100644
index 000000000..318558c19
--- /dev/null
+++ b/src/base_routes.rs
@@ -0,0 +1,119 @@
+use actix_web::{web, HttpRequest, HttpResponse};
+use pyo3::prelude::*;
+use pyo3::types::PyDict;
+
+use crate::types::{Headers, Response, StreamingResponse};
+
+pub async fn handle_request(
+    req: HttpRequest,
+    path: web::Path<String>,
+    query: web::Query<std::collections::HashMap<String, String>>,
+    payload: web::Payload,
+    app_state: web::Data<PyObject>,
+) -> HttpResponse {
+    let path = path.into_inner();
+    let query = query.into_inner();
+
+    Python::with_gil(|py| {
+        let app = app_state.as_ref();
+        let args = PyDict::new(py);
+
+        // Convert query params to Python dict
+        let query_dict = PyDict::new(py);
+        for (key, value) in query {
+            query_dict.set_item(key, value).unwrap();
+        }
+
+        // Create headers dict
+        let headers = Headers::new(None);
+
+        // Call the route handler
+        let result = app.call_method1(
+            py,
+            "handle_request",
+            (path, req.method().as_str(), query_dict, headers),
+        );
+
+        match result {
+            Ok(response) => {
+                // Try to extract as StreamingResponse first
+                match response.extract::<StreamingResponse>(py) {
+                    Ok(streaming_response) => streaming_response.respond_to(&req),
+                    Err(_) => {
+                        // If not a StreamingResponse, try as regular Response
+                        match response.extract::<Response>(py) {
+                            Ok(response) => response.respond_to(&req),
+                            Err(e) => {
+                                // If extraction fails, return 500 error
+                                let headers = Headers::new(None);
+                                Response::internal_server_error(Some(&headers)).respond_to(&req)
+                            }
+                        }
+                    }
+                }
+            }
+            Err(e) => {
+                // Handle Python error by returning 500
+                let headers = Headers::new(None);
+                Response::internal_server_error(Some(&headers)).respond_to(&req)
+            }
+        }
+    })
+}
+
+pub async fn handle_request_with_body(
+    req: HttpRequest,
+    path: web::Path<String>,
+    query: web::Query<std::collections::HashMap<String, String>>,
+    payload: web::Payload,
+    app_state: web::Data<PyObject>,
+) -> HttpResponse {
+    let path = path.into_inner();
+    let query = query.into_inner();
+
+    Python::with_gil(|py| {
+        let app = app_state.as_ref();
+        let args = PyDict::new(py);
+
+        // Convert query params to Python dict
+        let query_dict = PyDict::new(py);
+        for (key, value) in query {
+            query_dict.set_item(key, value).unwrap();
+        }
+
+        // Create headers dict
+        let headers = Headers::new(None);
+
+        // Call the route handler
+        let result = app.call_method1(
+            py,
+            "handle_request_with_body",
+            (path, req.method().as_str(), query_dict, headers, payload),
+        );
+
+        match result {
+            Ok(response) => {
+                // Try to extract as StreamingResponse first
+                match response.extract::<StreamingResponse>(py) {
+                    Ok(streaming_response) => streaming_response.respond_to(&req),
+                    Err(_) => {
+                        // If not a StreamingResponse, try as regular Response
+                        match response.extract::<Response>(py) {
+                            Ok(response) => response.respond_to(&req),
+                            Err(e) => {
+                                // If extraction fails, return 500 error
+                                let headers = Headers::new(None);
+                                Response::internal_server_error(Some(&headers)).respond_to(&req)
+                            }
+                        }
+                    }
+                }
+            }
+            Err(e) => {
+                // Handle Python error by return 500
+                let headers = Headers::new(None);
+                Response::internal_server_error(Some(&headers)).respond_to(&req)
+            }
+        }
+    })
+} 
\ No newline at end of file
diff --git a/src/lib.rs b/src/lib.rs
index d8209d4d2..5ffc8800b 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -17,7 +17,7 @@ use types::{
     identity::Identity,
     multimap::QueryParams,
     request::PyRequest,
-    response::PyResponse,
+    response::{PyResponse, PyStreamingResponse},
     HttpMethod, Url,
 };
 
@@ -42,6 +42,7 @@ pub fn robyn(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
     m.add_class::<Identity>()?;
     m.add_class::<PyRequest>()?;
     m.add_class::<PyResponse>()?;
+    m.add_class::<PyStreamingResponse>()?;
     m.add_class::<Url>()?;
     m.add_class::<QueryParams>()?;
     m.add_class::<MiddlewareType>()?;
diff --git a/src/types/mod.rs b/src/types/mod.rs
index a272ee682..eb1760a8f 100644
--- a/src/types/mod.rs
+++ b/src/types/mod.rs
@@ -1,4 +1,3 @@
-use log::debug;
 use pyo3::{
     exceptions::PyValueError,
     prelude::*,
@@ -79,8 +78,7 @@ pub fn get_body_from_pyobject(body: &PyAny) -> PyResult<Vec<u8>> {
     } else if let Ok(b) = body.downcast::<PyBytes>() {
         Ok(b.as_bytes().to_vec())
     } else {
-        debug!("Could not convert specified body to bytes");
-        Ok(vec![])
+        Err(PyValueError::new_err("Body must be either string or bytes"))
     }
 }
 
@@ -89,26 +87,31 @@ pub fn get_description_from_pyobject(description: &PyAny) -> PyResult<Vec<u8>> {
         Ok(s.to_string().into_bytes())
     } else if let Ok(b) = description.downcast::<PyBytes>() {
         Ok(b.as_bytes().to_vec())
+    } else if let Ok(i) = description.extract::<i64>() {
+        Ok(i.to_string().into_bytes())
     } else {
-        debug!("Could not convert specified response description to bytes");
-        Ok(vec![])
+        Err(PyValueError::new_err("Description must be string, bytes, or integer"))
     }
 }
 
 pub fn check_body_type(py: Python, body: &Py<PyAny>) -> PyResult<()> {
-    if body.downcast::<PyString>(py).is_err() && body.downcast::<PyBytes>(py).is_err() {
+    let body_ref = body.as_ref(py);
+    if !body_ref.is_instance_of::<PyString>() && !body_ref.is_instance_of::<PyBytes>() {
         return Err(PyValueError::new_err(
-            "Could not convert specified body to bytes",
+            "Body must be either string or bytes"
         ));
-    };
+    }
     Ok(())
 }
 
-pub fn check_description_type(py: Python, body: &Py<PyAny>) -> PyResult<()> {
-    if body.downcast::<PyString>(py).is_err() && body.downcast::<PyBytes>(py).is_err() {
+pub fn check_description_type(py: Python, description: &Py<PyAny>) -> PyResult<()> {
+    let desc_ref = description.as_ref(py);
+    if !desc_ref.is_instance_of::<PyString>() && 
+       !desc_ref.is_instance_of::<PyBytes>() && 
+       !desc_ref.is_instance_of::<pyo3::types::PyInt>() {
         return Err(PyValueError::new_err(
-            "Could not convert specified response description to bytes",
+            "Description must be string, bytes, or integer"
         ));
-    };
+    }
     Ok(())
 }
diff --git a/src/types/request.rs b/src/types/request.rs
index d5c4d9c57..8e42ae6d1 100644
--- a/src/types/request.rs
+++ b/src/types/request.rs
@@ -180,7 +180,7 @@ impl Request {
         Self {
             query_params,
             headers,
-            method: req.method().as_str().to_owned(),
+            method: req.method().as_str().to_string(),
             path_params: HashMap::new(),
             body,
             url,
diff --git a/src/types/response.rs b/src/types/response.rs
index 775a8a529..ce6a65d61 100644
--- a/src/types/response.rs
+++ b/src/types/response.rs
@@ -6,17 +6,15 @@ use pyo3::{
     types::{PyBytes, PyDict, PyList},
 };
 use futures::stream::Stream;
-use futures_util::StreamExt;
 use std::pin::Pin;
 
 use crate::io_helpers::{apply_hashmap_headers, read_file};
-use crate::types::{check_body_type, check_description_type, get_description_from_pyobject};
-
 use super::headers::Headers;
 
 #[derive(Debug, Clone)]
 pub enum ResponseBody {
-    Static(Vec<u8>),
+    Text(String),
+    Binary(Vec<u8>),
     Streaming(Vec<Vec<u8>>),
 }
 
@@ -29,41 +27,95 @@ pub struct Response {
     pub file_path: Option<String>,
 }
 
+#[derive(Debug, Clone)]
+pub struct StreamingResponse {
+    pub status_code: u16,
+    pub headers: Headers,
+    pub description: Py<PyAny>,
+    pub response_type: String,
+    pub file_path: Option<String>,
+}
+
 impl<'a> FromPyObject<'a> for Response {
     fn extract(ob: &'a PyAny) -> PyResult<Self> {
-        let status_code = ob.getattr("status_code")?.extract()?;
-        let response_type = ob.getattr("response_type")?.extract()?;
-        let headers = ob.getattr("headers")?.extract()?;
+        // First check if this is a streaming response by checking response_type
+        if let Ok(response_type) = ob.getattr("response_type")?.extract::<String>() {
+            if response_type == "stream" {
+                return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
+                    "Use StreamingResponse for streaming data"
+                ));
+            }
+        }
+
+        let status_code: u16 = ob.getattr("status_code")?.extract()?;
+        let headers: Headers = ob.getattr("headers")?.extract()?;
         let description = ob.getattr("description")?;
-        let file_path = ob.getattr("file_path")?.extract()?;
-
-        let body = if let Ok(iter) = description.iter() {
-            let mut chunks = Vec::new();
-            for item in iter {
-                let item = item?;
-                let chunk = if item.is_instance_of::<pyo3::types::PyBytes>() {
-                    item.extract::<Vec<u8>>()?
-                } else if item.is_instance_of::<pyo3::types::PyString>() {
-                    item.extract::<String>()?.into_bytes()
-                } else if item.is_instance_of::<pyo3::types::PyInt>() {
-                    item.extract::<i64>()?.to_string().into_bytes()
-                } else {
-                    return Err(PyErr::new::<pyo3::exceptions::PyTypeError, _>(
-                        "Stream items must be bytes, str, or int"
-                    ));
-                };
-                chunks.push(chunk);
+        let file_path: Option<String> = ob.getattr("file_path")?.extract()?;
+
+        // For non-streaming responses, convert to appropriate type
+        if description.is_none() {
+            return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
+                "Description cannot be None"
+            ));
+        } else if description.is_instance_of::<pyo3::types::PyBytes>() {
+            let body = ResponseBody::Binary(description.extract::<Vec<u8>>()?);
+            Ok(Response {
+                status_code,
+                response_type: "binary".to_string(),
+                headers,
+                body,
+                file_path,
+            })
+        } else if description.is_instance_of::<pyo3::types::PyString>() {
+            let body = ResponseBody::Text(description.extract::<String>()?);
+            Ok(Response {
+                status_code,
+                response_type: "text".to_string(),
+                headers,
+                body,
+                file_path,
+            })
+        } else {
+            // If description is not bytes or str, it might be a streaming response
+            Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
+                "Description must be bytes or str"
+            ))
+        }
+    }
+}
+
+impl<'a> FromPyObject<'a> for StreamingResponse {
+    fn extract(ob: &'a PyAny) -> PyResult<Self> {
+        // First check if this is a streaming response by checking response_type
+        if let Ok(response_type) = ob.getattr("response_type")?.extract::<String>() {
+            if response_type != "stream" {
+                return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
+                    format!("Not a streaming response (response_type = {})", response_type)
+                ));
             }
-            ResponseBody::Streaming(chunks)
         } else {
-            ResponseBody::Static(get_description_from_pyobject(description)?)
-        };
+            return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
+                "Missing response_type attribute"
+            ));
+        }
+
+        let status_code: u16 = ob.getattr("status_code")?.extract()?;
+        let headers: Headers = ob.getattr("headers")?.extract()?;
+        let description = ob.getattr("description")?;
+        let file_path: Option<String> = ob.getattr("file_path")?.extract()?;
+
+        // Check if description is a generator or iterator
+        if !description.hasattr("__iter__")? && !description.hasattr("__aiter__")? {
+            return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
+                "Description must be an iterator or async iterator"
+            ));
+        }
 
-        Ok(Response {
+        Ok(StreamingResponse {
             status_code,
-            response_type,
             headers,
-            body,
+            description: description.into_py(ob.py()),
+            response_type: "stream".to_string(),
             file_path,
         })
     }
@@ -75,21 +127,119 @@ impl Responder for Response {
     fn respond_to(self, _req: &HttpRequest) -> HttpResponse<Self::Body> {
         let mut response_builder =
             HttpResponseBuilder::new(StatusCode::from_u16(self.status_code).unwrap());
+        
+        // Set content type based on body type if not already set
+        if !self.headers.headers.contains_key("content-type") {
+            match &self.body {
+                ResponseBody::Text(_) => {
+                    response_builder.insert_header(("content-type", "text/plain; charset=utf-8"));
+                }
+                ResponseBody::Binary(_) => {
+                    response_builder.insert_header(("content-type", "application/octet-stream"));
+                }
+                ResponseBody::Streaming(_) => {
+                    panic!("Use StreamingResponse for streaming data");
+                }
+            };
+        }
+        
+        // Apply headers after content-type
         apply_hashmap_headers(&mut response_builder, &self.headers);
         
         match self.body {
-            ResponseBody::Static(data) => response_builder.body(data),
-            ResponseBody::Streaming(chunks) => {
-                let stream = Box::pin(
-                    futures::stream::iter(chunks.into_iter())
-                        .map(|chunk| Ok::<Bytes, Error>(Bytes::from(chunk)))
-                ) as Pin<Box<dyn Stream<Item = Result<Bytes, Error>>>>;
-                response_builder.streaming(stream)
+            ResponseBody::Text(text) => response_builder.body(text),
+            ResponseBody::Binary(data) => response_builder.body(data),
+            ResponseBody::Streaming(_) => {
+                panic!("Use StreamingResponse for streaming data")
             }
         }
     }
 }
 
+impl Responder for StreamingResponse {
+    type Body = BoxBody;
+
+    fn respond_to(self, _req: &HttpRequest) -> HttpResponse<Self::Body> {
+        let mut response_builder =
+            HttpResponseBuilder::new(StatusCode::from_u16(self.status_code).unwrap());
+        
+        // Apply headers
+        apply_hashmap_headers(&mut response_builder, &self.headers);
+        
+        // Create streaming body
+        let description = self.description;
+        let stream = Box::pin(futures::stream::unfold(description, move |description| {
+            Box::pin(async move {
+                let result = Python::with_gil(|py| {
+                    let desc = description.as_ref(py);
+                    
+                    // Handle sync iterator
+                    if desc.hasattr("__iter__").unwrap_or(false) {
+                        if let Ok(mut iter) = desc.iter() {
+                            if let Some(Ok(item)) = iter.next() {
+                                let chunk = if item.is_instance_of::<pyo3::types::PyBytes>() {
+                                    item.extract::<Vec<u8>>().ok()
+                                } else if item.is_instance_of::<pyo3::types::PyString>() {
+                                    item.extract::<String>().ok().map(|s| s.into_bytes())
+                                } else if item.is_instance_of::<pyo3::types::PyInt>() {
+                                    item.extract::<i64>().ok().map(|i| i.to_string().into_bytes())
+                                } else {
+                                    None
+                                };
+                                
+                                if let Some(chunk) = chunk {
+                                    return Some((Ok(Bytes::from(chunk)), description));
+                                }
+                            }
+                        }
+                    }
+                    // Handle async generator
+                    else if desc.hasattr("__aiter__").unwrap_or(false) {
+                        if let Ok(agen) = desc.call_method0("__aiter__") {
+                            if let Ok(anext) = agen.call_method0("__anext__") {
+                                // Convert Python awaitable to Rust Future
+                                if let Ok(future) = pyo3_asyncio::tokio::into_future(anext) {
+                                    // Create a new task to handle the async operation
+                                    let handle = tokio::runtime::Handle::current();
+                                    match handle.block_on(future) {
+                                        Ok(item) => {
+                                            let chunk = Python::with_gil(|py| {
+                                                let item = item.as_ref(py);
+                                                if item.is_none() {
+                                                    return None;
+                                                }
+                                                
+                                                if item.is_instance_of::<pyo3::types::PyBytes>() {
+                                                    item.extract::<Vec<u8>>().ok()
+                                                } else if item.is_instance_of::<pyo3::types::PyString>() {
+                                                    item.extract::<String>().ok().map(|s| s.into_bytes())
+                                                } else if item.is_instance_of::<pyo3::types::PyInt>() {
+                                                    item.extract::<i64>().ok().map(|i| i.to_string().into_bytes())
+                                                } else {
+                                                    None
+                                                }
+                                            });
+
+                                            if let Some(chunk) = chunk {
+                                                return Some((Ok(Bytes::from(chunk)), description));
+                                            }
+                                        }
+                                        Err(_) => return None
+                                    }
+                                }
+                            }
+                        }
+                    }
+                    None
+                });
+                result
+            })
+        })) as Pin<Box<dyn Stream<Item = Result<Bytes, Error>>>>;
+        
+        response_builder.streaming(stream)
+    }
+}
+
 impl Response {
     pub fn not_found(headers: Option<&Headers>) -> Self {
         let headers = match headers {
@@ -101,7 +251,7 @@ impl Response {
             status_code: 404,
             response_type: "text".to_string(),
             headers,
-            body: ResponseBody::Static("Not found".to_owned().into_bytes()),
+            body: ResponseBody::Text("Not found".to_string()),
             file_path: None,
         }
     }
@@ -116,7 +266,7 @@ impl Response {
             status_code: 500,
             response_type: "text".to_string(),
             headers,
-            body: ResponseBody::Static("Internal server error".to_owned().into_bytes()),
+            body: ResponseBody::Text("Internal server error".to_string()),
             file_path: None,
         }
     }
@@ -127,18 +277,10 @@ impl ToPyObject for Response {
         let headers = self.headers.clone().into_py(py).extract(py).unwrap();
         
         let description = match &self.body {
-            ResponseBody::Static(data) => {
-                match String::from_utf8(data.to_vec()) {
-                    Ok(description) => description.to_object(py),
-                    Err(_) => PyBytes::new(py, data).into(),
-                }
-            },
-            ResponseBody::Streaming(chunks) => {
-                let list = PyList::empty(py);
-                for chunk in chunks {
-                    list.append(PyBytes::new(py, chunk)).unwrap();
-                }
-                list.to_object(py)
+            ResponseBody::Text(text) => text.clone().into_py(py),
+            ResponseBody::Binary(data) => PyBytes::new(py, data).into(),
+            ResponseBody::Streaming(_) => {
+                panic!("Use StreamingResponse for streaming data")
             }
         };
 
@@ -153,6 +295,21 @@ impl ToPyObject for Response {
     }
 }
 
+impl ToPyObject for StreamingResponse {
+    fn to_object(&self, py: Python) -> PyObject {
+        let headers = self.headers.clone().into_py(py).extract(py).unwrap();
+        
+        let response = PyStreamingResponse {
+            status_code: self.status_code,
+            headers,
+            description: self.description.clone_ref(py),
+            response_type: self.response_type.clone(),
+            file_path: self.file_path.clone(),
+        };
+        Py::new(py, response).unwrap().as_ref(py).into()
+    }
+}
+
 #[pyclass(name = "Response")]
 #[derive(Debug, Clone)]
 pub struct PyResponse {
@@ -168,9 +325,59 @@ pub struct PyResponse {
     pub file_path: Option<String>,
 }
 
+#[pyclass(name = "StreamingResponse")]
+#[derive(Debug, Clone)]
+pub struct PyStreamingResponse {
+    #[pyo3(get)]
+    pub status_code: u16,
+    #[pyo3(get)]
+    pub headers: Py<Headers>,
+    #[pyo3(get)]
+    pub description: Py<PyAny>,
+    #[pyo3(get)]
+    pub response_type: String,
+    #[pyo3(get)]
+    pub file_path: Option<String>,
+}
+
+#[pymethods]
+impl PyStreamingResponse {
+    #[new]
+    #[pyo3(signature = (status_code=200, description=None, headers=None))]
+    pub fn new(py: Python, status_code: u16, description: Option<Py<PyAny>>, headers: Option<&PyAny>) -> PyResult<Self> {
+        let headers_output: Py<Headers> = if let Some(headers) = headers {
+            if let Ok(headers_dict) = headers.downcast::<PyDict>() {
+                let headers = Headers::new(Some(headers_dict));
+                Py::new(py, headers)?
+            } else if let Ok(headers) = headers.extract::<Py<Headers>>() {
+                headers
+            } else {
+                return Err(PyErr::new::<pyo3::exceptions::PyTypeError, _>(
+                    "headers must be a Headers instance or a dict",
+                ));
+            }
+        } else {
+            let headers = Headers::new(None);
+            Py::new(py, headers)?
+        };
+
+        let description = match description {
+            Some(d) => d,
+            None => PyList::empty(py).into(),
+        };
+
+        Ok(Self {
+            status_code,
+            headers: headers_output,
+            description,
+            response_type: "stream".to_string(),
+            file_path: None,
+        })
+    }
+}
+
 #[pymethods]
 impl PyResponse {
-    // To do: Add check for content-type in header and change response_type accordingly
     #[new]
     pub fn new(
         py: Python,
@@ -178,16 +385,21 @@ impl PyResponse {
         headers: &PyAny,
         description: Py<PyAny>,
     ) -> PyResult<Self> {
-        // Check if description is an iterator/generator
-        let is_stream = Python::with_gil(|py| {
-            description.as_ref(py).iter().is_ok()
-        });
-
-        if is_stream {
-            // For streaming responses, we don't need to check body type
-            // as we'll validate each chunk when it's yielded
-        } else {
-            check_body_type(py, &description)?;
+        // Validate description type first
+        let desc = description.as_ref(py);
+        if desc.is_none() {
+            return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
+                "Description cannot be None"
+            ));
+        }
+        
+        // Only allow string or bytes
+        if !desc.is_instance_of::<pyo3::types::PyBytes>()
+            && !desc.is_instance_of::<pyo3::types::PyString>()
+        {
+            return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
+                "Description must be bytes or str"
+            ));
         }
 
         let headers_output: Py<Headers> = if let Ok(headers_dict) = headers.downcast::<PyDict>() {
@@ -201,9 +413,16 @@ impl PyResponse {
             ));
         };
 
+        // Default to text response type
+        let response_type = if desc.is_instance_of::<pyo3::types::PyBytes>() {
+            "binary".to_string()
+        } else {
+            "text".to_string()
+        };
+
         Ok(Self {
             status_code,
-            response_type: if is_stream { "stream".to_string() } else { "text".to_string() },
+            response_type,
             headers: headers_output,
             description,
             file_path: None,
@@ -212,24 +431,38 @@ impl PyResponse {
 
     #[setter]
     pub fn set_description(&mut self, py: Python, description: Py<PyAny>) -> PyResult<()> {
-        // Check if description is an iterator/generator
-        let is_stream = description.as_ref(py).iter().is_ok();
-
-        if is_stream {
-            self.response_type = "stream".to_string();
-        } else {
-            check_description_type(py, &description)?;
-            self.response_type = "text".to_string();
+        // Validate description type
+        let desc = description.as_ref(py);
+        if desc.is_none() {
+            return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
+                "Description cannot be None"
+            ));
         }
         
+        // Only allow string or bytes
+        if !desc.is_instance_of::<pyo3::types::PyBytes>()
+            && !desc.is_instance_of::<pyo3::types::PyString>()
+        {
+            return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
+                "Description must be bytes or str"
+            ));
+        }
+
+        // Update response type based on new description
+        self.response_type = if desc.is_instance_of::<pyo3::types::PyBytes>() {
+            "binary".to_string()
+        } else {
+            "text".to_string()
+        };
+
         self.description = description;
         Ok(())
     }
 
     #[setter]
     pub fn set_file_path(&mut self, py: Python, file_path: &str) -> PyResult<()> {
-        self.response_type = "static_file".to_string();
         self.file_path = Some(file_path.to_string());
+        self.response_type = "binary".to_string();
 
         match read_file(file_path) {
             Ok(content) => {

From b29793486079eb07d410ed720c38772f53741374 Mon Sep 17 00:00:00 2001
From: Sanskar Jethi <sansyrox@gmail.com>
Date: Wed, 18 Dec 2024 00:36:22 +0000
Subject: [PATCH 09/14] update

---
 integration_tests/base_routes.py |  38 ++--
 robyn/__init__.py                | 149 ++++++++++---
 robyn/processpool.py             |   3 +-
 robyn/responses.py               |  22 +-
 robyn/robyn.pyi                  |   6 +-
 robyn/router.py                  |  54 +++--
 src/base_routes.rs               |  40 ++--
 src/lib.rs                       |   3 +-
 src/types/response.rs            | 368 ++++++++++++-------------------
 9 files changed, 336 insertions(+), 347 deletions(-)

diff --git a/integration_tests/base_routes.py b/integration_tests/base_routes.py
index 44e512a01..d9000cee4 100644
--- a/integration_tests/base_routes.py
+++ b/integration_tests/base_routes.py
@@ -5,7 +5,7 @@
 
 from integration_tests.subroutes import di_subrouter, sub_router
 from integration_tests.views import AsyncView, SyncView
-from robyn import Headers, Request, Response, Robyn, WebSocket, WebSocketConnector, jsonify, serve_file, serve_html, StreamingResponse
+from robyn import Headers, Request, Response, Robyn, WebSocket, WebSocketConnector, jsonify, serve_file, serve_html
 from robyn.authentication import AuthenticationHandler, BearerGetter, Identity
 from robyn.robyn import QueryParams, Url
 from robyn.templating import JinjaTemplate
@@ -807,12 +807,12 @@ async def async_without_decorator():
     return "Success!"
 
 
-app.add_route("GET", "/sync/get/no_dec", sync_without_decorator)
-app.add_route("PUT", "/sync/put/no_dec", sync_without_decorator)
-app.add_route("POST", "/sync/post/no_dec", sync_without_decorator)
-app.add_route("GET", "/async/get/no_dec", async_without_decorator)
-app.add_route("PUT", "/async/put/no_dec", async_without_decorator)
-app.add_route("POST", "/async/post/no_dec", async_without_decorator)
+app.add_route(route_type="GET", endpoint="/sync/get/no_dec", handler=sync_without_decorator)
+app.add_route(route_type="PUT", endpoint="/sync/put/no_dec", handler=sync_without_decorator)
+app.add_route(route_type="POST", endpoint="/sync/post/no_dec", handler=sync_without_decorator)
+app.add_route(route_type="GET", endpoint="/async/get/no_dec", handler=async_without_decorator)
+app.add_route(route_type="PUT", endpoint="/async/put/no_dec", handler=async_without_decorator)
+app.add_route(route_type="POST", endpoint="/async/post/no_dec", handler=async_without_decorator)
 
 # ===== Dependency Injection =====
 
@@ -1090,32 +1090,32 @@ def create_item(request, body: CreateItemBody, query: CreateItemQueryParamsParam
 
 # --- Streaming responses ---
 
-@app.get("/stream/sync")
+@app.get("/stream/sync", streaming=True)
 async def sync_stream():
     def generator():
         for i in range(5):
             yield f"Chunk {i}\n".encode()
     
     headers = Headers({"Content-Type": "text/plain"})
-    return StreamingResponse(
+    return Response(
         status_code=200,
         description=generator(),
         headers=headers
     )
 
-@app.get("/stream/async")
+@app.get("/stream/async", streaming=True)
 async def async_stream():
     async def generator():
         for i in range(5):
             yield f"Async Chunk {i}\n".encode()
     
-    return StreamingResponse(
+    return Response(
         status_code=200,
         headers={"Content-Type": "text/plain"},
         description=generator()
     )
 
-@app.get("/stream/mixed")
+@app.get("/stream/mixed", streaming=True)
 async def mixed_stream():
     async def generator():
         yield b"Binary chunk\n"
@@ -1123,13 +1123,13 @@ async def generator():
         yield str(42).encode() + b"\n"
         yield json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"
     
-    return StreamingResponse(
+    return Response(
         status_code=200,
         headers={"Content-Type": "text/plain"},
         description=generator()
     )
 
-@app.get("/stream/events")
+@app.get("/stream/events", streaming=True)
 async def server_sent_events():
     async def event_generator():
         import asyncio
@@ -1148,7 +1148,7 @@ async def event_generator():
         data = json.dumps({'status': 'complete', 'results': [1, 2, 3]}, indent=2)
         yield f"event: complete\ndata: {data}\n\n".encode()
     
-    return StreamingResponse(
+    return Response(
         status_code=200,
         headers={
             "Content-Type": "text/event-stream",
@@ -1158,7 +1158,7 @@ async def event_generator():
         description=event_generator()
     )
 
-@app.get("/stream/large-file")
+@app.get("/stream/large-file", streaming=True)
 async def stream_large_file():
     async def file_generator():
         # Simulate streaming a large file in chunks
@@ -1170,7 +1170,7 @@ async def file_generator():
             chunk = b"X" * min(chunk_size, total_size - offset)
             yield chunk
     
-    return StreamingResponse(
+    return Response(
         status_code=200,
         headers={
             "Content-Type": "application/octet-stream",
@@ -1179,7 +1179,7 @@ async def file_generator():
         description=file_generator()
     )
 
-@app.get("/stream/csv")
+@app.get("/stream/csv", streaming=True)
 async def stream_csv():
     async def csv_generator():
         # CSV header
@@ -1194,7 +1194,7 @@ async def csv_generator():
             row = f"{i},item-{i},{random.randint(1, 100)}\n"
             yield row.encode()
     
-    return StreamingResponse(
+    return Response(
         status_code=200,
         headers={
             "Content-Type": "text/csv",
diff --git a/robyn/__init__.py b/robyn/__init__.py
index 849e28023..f2c5369b2 100644
--- a/robyn/__init__.py
+++ b/robyn/__init__.py
@@ -20,7 +20,7 @@
 from robyn.processpool import run_processes
 from robyn.reloader import compile_rust_files
 from robyn.responses import html, serve_file, serve_html
-from robyn.robyn import FunctionInfo, Headers, HttpMethod, Request, Response, WebSocketConnector, get_version, StreamingResponse
+from robyn.robyn import FunctionInfo, Headers, HttpMethod, Request, Response, WebSocketConnector, get_version
 from robyn.router import MiddlewareRouter, MiddlewareType, Router, WebSocketRouter
 from robyn.types import Directory
 from robyn.ws import WebSocket
@@ -97,10 +97,12 @@ def _handle_dev_mode(self):
 
     def add_route(
         self,
+        *,
         route_type: Union[HttpMethod, str],
         endpoint: str,
         handler: Callable,
         is_const: bool = False,
+        streaming: bool = False,
         auth_required: bool = False,
     ):
         """
@@ -110,11 +112,10 @@ def add_route(
         :param endpoint str: endpoint for the route added
         :param handler function: represents the sync or async function passed as a handler for the route
         :param is_const bool: represents if the handler is a const function or not
+        :param streaming bool: represents if the response should be streamed
         :param auth_required bool: represents if the route needs authentication or not
         """
 
-        """ We will add the status code here only
-        """
         injected_dependencies = self.dependencies.get_dependency_map(self)
 
         if auth_required:
@@ -137,6 +138,7 @@ def add_route(
             endpoint=endpoint,
             handler=handler,
             is_const=is_const,
+            streaming=streaming,
             exception_handler=self.exception_handler,
             injected_dependencies=injected_dependencies,
         )
@@ -336,7 +338,14 @@ def get_functions(view) -> List[Tuple[HttpMethod, Callable]]:
 
         handlers = get_functions(view)
         for route_type, handler in handlers:
-            self.add_route(route_type, endpoint, handler, const)
+            self.add_route(
+                route_type=route_type,
+                endpoint=endpoint,
+                handler=handler,
+                is_const=const,
+                streaming=False,
+                auth_required=False
+            )
 
     def view(self, endpoint: str, const: bool = False):
         """
@@ -354,6 +363,7 @@ def get(
         self,
         endpoint: str,
         const: bool = False,
+        streaming: bool = False,
         auth_required: bool = False,
         openapi_name: str = "",
         openapi_tags: List[str] = ["get"],
@@ -363,6 +373,7 @@ def get(
 
         :param endpoint str: endpoint for the route added
         :param const bool: represents if the handler is a const function or not
+        :param streaming bool: represents if the response should be streamed
         :param auth_required bool: represents if the route needs authentication or not
         :param openapi_name: str -- the name of the endpoint in the openapi spec
         :param openapi_tags: List[str] -- for grouping of endpoints in the openapi spec
@@ -371,13 +382,21 @@ def get(
         def inner(handler):
             self.openapi.add_openapi_path_obj("get", endpoint, openapi_name, openapi_tags, handler)
 
-            return self.add_route(HttpMethod.GET, endpoint, handler, const, auth_required)
+            return self.add_route(
+                route_type=HttpMethod.GET,
+                endpoint=endpoint,
+                handler=handler,
+                is_const=const,
+                streaming=streaming,
+                auth_required=auth_required
+            )
 
         return inner
 
     def post(
         self,
         endpoint: str,
+        streaming: bool = False,
         auth_required: bool = False,
         openapi_name: str = "",
         openapi_tags: List[str] = ["post"],
@@ -386,6 +405,7 @@ def post(
         The @app.post decorator to add a route with POST method
 
         :param endpoint str: endpoint for the route added
+        :param streaming bool: represents if the response should be streamed
         :param auth_required bool: represents if the route needs authentication or not
         :param openapi_name: str -- the name of the endpoint in the openapi spec
         :param openapi_tags: List[str] -- for grouping of endpoints in the openapi spec
@@ -394,13 +414,21 @@ def post(
         def inner(handler):
             self.openapi.add_openapi_path_obj("post", endpoint, openapi_name, openapi_tags, handler)
 
-            return self.add_route(HttpMethod.POST, endpoint, handler, auth_required=auth_required)
+            return self.add_route(
+                route_type=HttpMethod.POST,
+                endpoint=endpoint,
+                handler=handler,
+                is_const=False,
+                streaming=streaming,
+                auth_required=auth_required
+            )
 
         return inner
 
     def put(
         self,
         endpoint: str,
+        streaming: bool = False,
         auth_required: bool = False,
         openapi_name: str = "",
         openapi_tags: List[str] = ["put"],
@@ -409,6 +437,7 @@ def put(
         The @app.put decorator to add a get route with PUT method
 
         :param endpoint str: endpoint for the route added
+        :param streaming bool: represents if the response should be streamed
         :param auth_required bool: represents if the route needs authentication or not
         :param openapi_name: str -- the name of the endpoint in the openapi spec
         :param openapi_tags: List[str] -- for grouping of endpoints in the openapi spec
@@ -417,13 +446,21 @@ def put(
         def inner(handler):
             self.openapi.add_openapi_path_obj("put", endpoint, openapi_name, openapi_tags, handler)
 
-            return self.add_route(HttpMethod.PUT, endpoint, handler, auth_required=auth_required)
+            return self.add_route(
+                route_type=HttpMethod.PUT,
+                endpoint=endpoint,
+                handler=handler,
+                is_const=False,
+                streaming=streaming,
+                auth_required=auth_required
+            )
 
         return inner
 
     def delete(
         self,
         endpoint: str,
+        streaming: bool = False,
         auth_required: bool = False,
         openapi_name: str = "",
         openapi_tags: List[str] = ["delete"],
@@ -432,6 +469,7 @@ def delete(
         The @app.delete decorator to add a route with DELETE method
 
         :param endpoint str: endpoint for the route added
+        :param streaming bool: represents if the response should be streamed
         :param auth_required bool: represents if the route needs authentication or not
         :param openapi_name: str -- the name of the endpoint in the openapi spec
         :param openapi_tags: List[str] -- for grouping of endpoints in the openapi spec
@@ -440,13 +478,21 @@ def delete(
         def inner(handler):
             self.openapi.add_openapi_path_obj("delete", endpoint, openapi_name, openapi_tags, handler)
 
-            return self.add_route(HttpMethod.DELETE, endpoint, handler, auth_required=auth_required)
+            return self.add_route(
+                route_type=HttpMethod.DELETE,
+                endpoint=endpoint,
+                handler=handler,
+                is_const=False,
+                streaming=streaming,
+                auth_required=auth_required
+            )
 
         return inner
 
     def patch(
         self,
         endpoint: str,
+        streaming: bool = False,
         auth_required: bool = False,
         openapi_name: str = "",
         openapi_tags: List[str] = ["patch"],
@@ -455,6 +501,7 @@ def patch(
         The @app.patch decorator to add a route with PATCH method
 
         :param endpoint str: endpoint for the route added
+        :param streaming bool: represents if the response should be streamed
         :param auth_required bool: represents if the route needs authentication or not
         :param openapi_name: str -- the name of the endpoint in the openapi spec
         :param openapi_tags: List[str] -- for grouping of endpoints in the openapi spec
@@ -463,13 +510,21 @@ def patch(
         def inner(handler):
             self.openapi.add_openapi_path_obj("patch", endpoint, openapi_name, openapi_tags, handler)
 
-            return self.add_route(HttpMethod.PATCH, endpoint, handler, auth_required=auth_required)
+            return self.add_route(
+                route_type=HttpMethod.PATCH,
+                endpoint=endpoint,
+                handler=handler,
+                is_const=False,
+                streaming=streaming,
+                auth_required=auth_required
+            )
 
         return inner
 
     def head(
         self,
         endpoint: str,
+        streaming: bool = False,
         auth_required: bool = False,
         openapi_name: str = "",
         openapi_tags: List[str] = ["head"],
@@ -478,6 +533,7 @@ def head(
         The @app.head decorator to add a route with HEAD method
 
         :param endpoint str: endpoint for the route added
+        :param streaming bool: represents if the response should be streamed
         :param auth_required bool: represents if the route needs authentication or not
         :param openapi_name: str -- the name of the endpoint in the openapi spec
         :param openapi_tags: List[str] -- for grouping of endpoints in the openapi spec
@@ -486,13 +542,21 @@ def head(
         def inner(handler):
             self.openapi.add_openapi_path_obj("head", endpoint, openapi_name, openapi_tags, handler)
 
-            return self.add_route(HttpMethod.HEAD, endpoint, handler, auth_required=auth_required)
+            return self.add_route(
+                route_type=HttpMethod.HEAD,
+                endpoint=endpoint,
+                handler=handler,
+                is_const=False,
+                streaming=streaming,
+                auth_required=auth_required
+            )
 
         return inner
 
     def options(
         self,
         endpoint: str,
+        streaming: bool = False,
         auth_required: bool = False,
         openapi_name: str = "",
         openapi_tags: List[str] = ["options"],
@@ -501,6 +565,7 @@ def options(
         The @app.options decorator to add a route with OPTIONS method
 
         :param endpoint str: endpoint for the route added
+        :param streaming bool: represents if the response should be streamed
         :param auth_required bool: represents if the route needs authentication or not
         :param openapi_name: str -- the name of the endpoint in the openapi spec
         :param openapi_tags: List[str] -- for grouping of endpoints in the openapi spec
@@ -509,13 +574,21 @@ def options(
         def inner(handler):
             self.openapi.add_openapi_path_obj("options", endpoint, openapi_name, openapi_tags, handler)
 
-            return self.add_route(HttpMethod.OPTIONS, endpoint, handler, auth_required=auth_required)
+            return self.add_route(
+                route_type=HttpMethod.OPTIONS,
+                endpoint=endpoint,
+                handler=handler,
+                is_const=False,
+                streaming=streaming,
+                auth_required=auth_required
+            )
 
         return inner
 
     def connect(
         self,
         endpoint: str,
+        streaming: bool = False,
         auth_required: bool = False,
         openapi_name: str = "",
         openapi_tags: List[str] = ["connect"],
@@ -524,6 +597,7 @@ def connect(
         The @app.connect decorator to add a route with CONNECT method
 
         :param endpoint str: endpoint for the route added
+        :param streaming bool: represents if the response should be streamed
         :param auth_required bool: represents if the route needs authentication or not
         :param openapi_name: str -- the name of the endpoint in the openapi spec
         :param openapi_tags: List[str] -- for grouping of endpoints in the openapi spec
@@ -531,13 +605,21 @@ def connect(
 
         def inner(handler):
             self.openapi.add_openapi_path_obj("connect", endpoint, openapi_name, openapi_tags, handler)
-            return self.add_route(HttpMethod.CONNECT, endpoint, handler, auth_required=auth_required)
+            return self.add_route(
+                route_type=HttpMethod.CONNECT,
+                endpoint=endpoint,
+                handler=handler,
+                is_const=False,
+                streaming=streaming,
+                auth_required=auth_required
+            )
 
         return inner
 
     def trace(
         self,
         endpoint: str,
+        streaming: bool = False,
         auth_required: bool = False,
         openapi_name: str = "",
         openapi_tags: List[str] = ["trace"],
@@ -546,6 +628,7 @@ def trace(
         The @app.trace decorator to add a route with TRACE method
 
         :param endpoint str: endpoint for the route added
+        :param streaming bool: represents if the response should be streamed
         :param auth_required bool: represents if the route needs authentication or not
         :param openapi_name: str -- the name of the endpoint in the openapi spec
         :param openapi_tags: List[str] -- for grouping of endpoints in the openapi spec
@@ -554,7 +637,14 @@ def trace(
         def inner(handler):
             self.openapi.add_openapi_path_obj("trace", endpoint, openapi_name, openapi_tags, handler)
 
-            return self.add_route(HttpMethod.TRACE, endpoint, handler, auth_required=auth_required)
+            return self.add_route(
+                route_type=HttpMethod.TRACE,
+                endpoint=endpoint,
+                handler=handler,
+                is_const=False,
+                streaming=streaming,
+                auth_required=auth_required
+            )
 
         return inner
 
@@ -596,29 +686,29 @@ def __init__(self, file_object: str, prefix: str = "", config: Config = Config()
     def __add_prefix(self, endpoint: str):
         return f"{self.prefix}{endpoint}"
 
-    def get(self, endpoint: str, const: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["get"]):
-        return super().get(endpoint=self.__add_prefix(endpoint), const=const, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+    def get(self, endpoint: str, const: bool = False, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["get"]):
+        return super().get(endpoint=self.__add_prefix(endpoint), const=const, streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
 
-    def post(self, endpoint: str, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["post"]):
-        return super().post(endpoint=self.__add_prefix(endpoint), auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+    def post(self, endpoint: str, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["post"]):
+        return super().post(endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
 
-    def put(self, endpoint: str, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["put"]):
-        return super().put(endpoint=self.__add_prefix(endpoint), auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+    def put(self, endpoint: str, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["put"]):
+        return super().put(endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
 
-    def delete(self, endpoint: str, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["delete"]):
-        return super().delete(endpoint=self.__add_prefix(endpoint), auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+    def delete(self, endpoint: str, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["delete"]):
+        return super().delete(endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
 
-    def patch(self, endpoint: str, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["patch"]):
-        return super().patch(endpoint=self.__add_prefix(endpoint), auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+    def patch(self, endpoint: str, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["patch"]):
+        return super().patch(endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
 
-    def head(self, endpoint: str, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["head"]):
-        return super().head(endpoint=self.__add_prefix(endpoint), auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+    def head(self, endpoint: str, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["head"]):
+        return super().head(endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
 
-    def trace(self, endpoint: str, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["trace"]):
-        return super().trace(endpoint=self.__add_prefix(endpoint), auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+    def trace(self, endpoint: str, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["trace"]):
+        return super().trace(endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
 
-    def options(self, endpoint: str, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["options"]):
-        return super().options(endpoint=self.__add_prefix(endpoint), auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+    def options(self, endpoint: str, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["options"]):
+        return super().options(endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
 
 
 def ALLOW_CORS(app: Robyn, origins: Union[List[str], str]):
@@ -673,7 +763,6 @@ def cors_middleware(request):
     "Robyn",
     "Request",
     "Response",
-    "StreamingResponse",
     "status_codes",
     "jsonify",
     "serve_file",
diff --git a/robyn/processpool.py b/robyn/processpool.py
index f1b0839f6..ebd2ad9ac 100644
--- a/robyn/processpool.py
+++ b/robyn/processpool.py
@@ -182,7 +182,8 @@ def spawn_process(
     server.set_response_headers_exclude_paths(excluded_response_headers_paths)
 
     for route in routes:
-        route_type, endpoint, function, is_const = route
+        # Unpack only the first 4 values since streaming is optional
+        route_type, endpoint, function, is_const = route[:4]
         server.add_route(route_type, endpoint, function, is_const)
 
     for middleware_type, middleware_function in global_middlewares:
diff --git a/robyn/responses.py b/robyn/responses.py
index 07726db10..dd081d0d3 100644
--- a/robyn/responses.py
+++ b/robyn/responses.py
@@ -1,6 +1,6 @@
 import mimetypes
 import os
-from typing import Optional, Any
+from typing import Optional, Any, Union, Callable, Iterator, AsyncIterator
 
 from robyn.robyn import Headers, Response
 
@@ -18,30 +18,18 @@ def __init__(
         self.headers = headers or Headers({"Content-Disposition": "attachment"})
 
 
-class StreamingResponse:
-    def __init__(
-        self,
-        status_code: int = 200,
-        description: Optional[Any] = None,
-        headers: Optional[Headers] = None,
-    ):
-        self.status_code = status_code
-        self.description = description or []
-        self.headers = headers or Headers({})
-        self.response_type = "stream"
-        self.file_path = None
-
-
-def html(html: str) -> Response:
+def html(html: str, streaming: bool = False) -> Response:
     """
-    This function will help in serving a simple html string
+    This function will help in serving a simple html string or stream
 
     :param html str: html to serve as a response
+    :param streaming bool: whether to treat the response as a streaming response
     """
     return Response(
         description=html,
         status_code=200,
         headers=Headers({"Content-Type": "text/html"}),
+        streaming=streaming,
     )
 
 
diff --git a/robyn/robyn.pyi b/robyn/robyn.pyi
index ca0866ce1..03527ec4c 100644
--- a/robyn/robyn.pyi
+++ b/robyn/robyn.pyi
@@ -287,13 +287,15 @@ class Response:
         status_code (int): The status code of the response. e.g. 200, 404, 500 etc.
         response_type (Optional[str]): The response type of the response. e.g. text, json, html, file etc.
         headers (Union[Headers, dict]): The headers of the response or Headers directly. e.g. {"Content-Type": "application/json"}
-        description (Union[str, bytes]): The body of the response. If the response is a JSON, it will be a dict.
+        description (Union[str, bytes, Callable]): The body of the response. Can be a string, bytes, or an iterator/generator for streaming.
         file_path (Optional[str]): The file path of the response. e.g. /home/user/file.txt
+        streaming (bool): Whether the response is a streaming response. If True, description should be an iterator/generator.
     """
 
     status_code: int
     headers: Union[Headers, dict]
-    description: Union[str, bytes]
+    description: Union[str, bytes, Callable]
+    streaming: bool = False
     response_type: Optional[str] = None
     file_path: Optional[str] = None
 
diff --git a/robyn/router.py b/robyn/router.py
index ea1ba0742..c44276e51 100644
--- a/robyn/router.py
+++ b/robyn/router.py
@@ -5,14 +5,14 @@
 from functools import wraps
 from inspect import signature
 from types import CoroutineType
-from typing import Callable, Dict, List, NamedTuple, Optional, Union
+from typing import Callable, Dict, List, NamedTuple, Optional, Union, Iterator, AsyncIterator
 
 from robyn import status_codes
 from robyn.authentication import AuthenticationHandler, AuthenticationNotConfiguredError
 from robyn.dependency_injection import DependencyMap
 from robyn.jsonify import jsonify
 from robyn.responses import FileResponse
-from robyn.robyn import FunctionInfo, Headers, HttpMethod, Identity, MiddlewareType, QueryParams, Request, Response, StreamingResponse, Url
+from robyn.robyn import FunctionInfo, Headers, HttpMethod, Identity, MiddlewareType, QueryParams, Request, Response, Url
 from robyn.types import Body, Files, FormData, IPAddress, Method, PathParams
 from robyn.ws import WebSocket
 
@@ -24,6 +24,7 @@ class Route(NamedTuple):
     route: str
     function: FunctionInfo
     is_const: bool
+    streaming: bool = False
 
 
 class RouteMiddleware(NamedTuple):
@@ -47,79 +48,82 @@ def __init__(self) -> None:
         super().__init__()
         self.routes: List[Route] = []
 
-    def _format_tuple_response(self, res: tuple) -> Union[Response, StreamingResponse]:
+    def _format_tuple_response(self, res: tuple) -> Response:
         if len(res) != 3:
             raise ValueError("Tuple should have 3 elements")
 
         description, headers, status_code = res
         formatted_response = self._format_response(description)
-        
-        # Handle StreamingResponse case
-        if isinstance(formatted_response, StreamingResponse):
-            formatted_response.headers.update(headers)
-            formatted_response.status_code = status_code
-            return formatted_response
-            
-        # Regular Response case
         new_headers: Headers = Headers(headers)
         if new_headers.contains("Content-Type"):
             headers.set("Content-Type", new_headers.get("Content-Type"))
 
         return Response(
+            description=formatted_response.description,
             status_code=status_code,
             headers=headers,
-            description=formatted_response.description,
+            streaming=formatted_response.streaming,
         )
 
     def _format_response(
         self,
-        res: Union[Dict, Response, StreamingResponse, bytes, tuple, str],
-    ) -> Union[Response, StreamingResponse]:
+        res: Union[Dict, Response, bytes, tuple, str, Iterator, AsyncIterator],
+    ) -> Response:
         if isinstance(res, Response):
             return res
 
-        # Special handling for StreamingResponse
-        if isinstance(res, StreamingResponse):
-            return res
-
         if isinstance(res, dict):
             return Response(
+                description=jsonify(res),
                 status_code=status_codes.HTTP_200_OK,
                 headers=Headers({"Content-Type": "application/json"}),
-                description=jsonify(res),
+                streaming=False,
             )
 
         if isinstance(res, FileResponse):
             response: Response = Response(
+                description=res.file_path,
                 status_code=res.status_code,
                 headers=res.headers,
-                description=res.file_path,
+                streaming=False,
             )
             response.file_path = res.file_path
             return response
 
         if isinstance(res, bytes):
             return Response(
+                description=res,
                 status_code=status_codes.HTTP_200_OK,
                 headers=Headers({"Content-Type": "application/octet-stream"}),
-                description=res,
+                streaming=False,
             )
 
         if isinstance(res, tuple):
             return self._format_tuple_response(tuple(res))
 
+        if isinstance(res, (Iterator, AsyncIterator)):
+            return Response(
+                description=res,
+                status_code=status_codes.HTTP_200_OK,
+                headers=Headers({"Content-Type": "text/plain"}),
+                streaming=True,
+            )
+
         return Response(
+            description=str(res).encode("utf-8"),
             status_code=status_codes.HTTP_200_OK,
             headers=Headers({"Content-Type": "text/plain"}),
-            description=str(res).encode("utf-8"),
+            streaming=False,
         )
 
     def add_route(  # type: ignore
         self,
+        *,
         route_type: HttpMethod,
         endpoint: str,
         handler: Callable,
         is_const: bool,
+        streaming: bool,
         exception_handler: Optional[Callable],
         injected_dependencies: dict,
     ) -> Union[Callable, CoroutineType]:
@@ -197,6 +201,7 @@ async def async_inner_handler(*args, **kwargs):
                 response = self._format_response(
                     await wrapped_handler(*args, **kwargs),
                 )
+                response.streaming = streaming
             except Exception as err:
                 if exception_handler is None:
                     raise
@@ -211,6 +216,7 @@ def inner_handler(*args, **kwargs):
                 response = self._format_response(
                     wrapped_handler(*args, **kwargs),
                 )
+                response.streaming = streaming
             except Exception as err:
                 if exception_handler is None:
                     raise
@@ -238,7 +244,7 @@ def inner_handler(*args, **kwargs):
                 params,
                 new_injected_dependencies,
             )
-            self.routes.append(Route(route_type, endpoint, function, is_const))
+            self.routes.append(Route(route_type, endpoint, function, is_const, streaming))
             return async_inner_handler
         else:
             function = FunctionInfo(
@@ -248,7 +254,7 @@ def inner_handler(*args, **kwargs):
                 params,
                 new_injected_dependencies,
             )
-            self.routes.append(Route(route_type, endpoint, function, is_const))
+            self.routes.append(Route(route_type, endpoint, function, is_const, streaming))
             return inner_handler
 
     def get_routes(self) -> List[Route]:
diff --git a/src/base_routes.rs b/src/base_routes.rs
index 318558c19..73a3018c8 100644
--- a/src/base_routes.rs
+++ b/src/base_routes.rs
@@ -2,7 +2,7 @@ use actix_web::{web, HttpRequest, HttpResponse};
 use pyo3::prelude::*;
 use pyo3::types::PyDict;
 
-use crate::types::{Headers, Response, StreamingResponse};
+use crate::types::{Headers, Response};
 
 pub async fn handle_request(
     req: HttpRequest,
@@ -36,19 +36,12 @@ pub async fn handle_request(
 
         match result {
             Ok(response) => {
-                // Try to extract as StreamingResponse first
-                match response.extract::<StreamingResponse>(py) {
-                    Ok(streaming_response) => streaming_response.respond_to(&req),
-                    Err(_) => {
-                        // If not a StreamingResponse, try as regular Response
-                        match response.extract::<Response>(py) {
-                            Ok(response) => response.respond_to(&req),
-                            Err(e) => {
-                                // If extraction fails, return 500 error
-                                let headers = Headers::new(None);
-                                Response::internal_server_error(Some(&headers)).respond_to(&req)
-                            }
-                        }
+                match response.extract::<Response>(py) {
+                    Ok(response) => response.respond_to(&req),
+                    Err(e) => {
+                        // If extraction fails, return 500 error
+                        let headers = Headers::new(None);
+                        Response::internal_server_error(Some(&headers)).respond_to(&req)
                     }
                 }
             }
@@ -93,19 +86,12 @@ pub async fn handle_request_with_body(
 
         match result {
             Ok(response) => {
-                // Try to extract as StreamingResponse first
-                match response.extract::<StreamingResponse>(py) {
-                    Ok(streaming_response) => streaming_response.respond_to(&req),
-                    Err(_) => {
-                        // If not a StreamingResponse, try as regular Response
-                        match response.extract::<Response>(py) {
-                            Ok(response) => response.respond_to(&req),
-                            Err(e) => {
-                                // If extraction fails, return 500 error
-                                let headers = Headers::new(None);
-                                Response::internal_server_error(Some(&headers)).respond_to(&req)
-                            }
-                        }
+                match response.extract::<Response>(py) {
+                    Ok(response) => response.respond_to(&req),
+                    Err(e) => {
+                        // If extraction fails, return 500 error
+                        let headers = Headers::new(None);
+                        Response::internal_server_error(Some(&headers)).respond_to(&req)
                     }
                 }
             }
diff --git a/src/lib.rs b/src/lib.rs
index 5ffc8800b..d8209d4d2 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -17,7 +17,7 @@ use types::{
     identity::Identity,
     multimap::QueryParams,
     request::PyRequest,
-    response::{PyResponse, PyStreamingResponse},
+    response::PyResponse,
     HttpMethod, Url,
 };
 
@@ -42,7 +42,6 @@ pub fn robyn(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
     m.add_class::<Identity>()?;
     m.add_class::<PyRequest>()?;
     m.add_class::<PyResponse>()?;
-    m.add_class::<PyStreamingResponse>()?;
     m.add_class::<Url>()?;
     m.add_class::<QueryParams>()?;
     m.add_class::<MiddlewareType>()?;
diff --git a/src/types/response.rs b/src/types/response.rs
index ce6a65d61..dc38f2d7b 100644
--- a/src/types/response.rs
+++ b/src/types/response.rs
@@ -3,7 +3,7 @@ use actix_web::{HttpRequest, HttpResponse, HttpResponseBuilder, Responder, Error
 use pyo3::{
     exceptions::PyIOError,
     prelude::*,
-    types::{PyBytes, PyDict, PyList},
+    types::{PyBytes, PyDict},
 };
 use futures::stream::Stream;
 use std::pin::Pin;
@@ -15,7 +15,7 @@ use super::headers::Headers;
 pub enum ResponseBody {
     Text(String),
     Binary(Vec<u8>),
-    Streaming(Vec<Vec<u8>>),
+    Streaming(Py<PyAny>),
 }
 
 #[derive(Debug, Clone)]
@@ -25,35 +25,33 @@ pub struct Response {
     pub headers: Headers,
     pub body: ResponseBody,
     pub file_path: Option<String>,
-}
-
-#[derive(Debug, Clone)]
-pub struct StreamingResponse {
-    pub status_code: u16,
-    pub headers: Headers,
-    pub description: Py<PyAny>,
-    pub response_type: String,
-    pub file_path: Option<String>,
+    pub streaming: bool,
 }
 
 impl<'a> FromPyObject<'a> for Response {
     fn extract(ob: &'a PyAny) -> PyResult<Self> {
-        // First check if this is a streaming response by checking response_type
-        if let Ok(response_type) = ob.getattr("response_type")?.extract::<String>() {
-            if response_type == "stream" {
-                return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
-                    "Use StreamingResponse for streaming data"
-                ));
-            }
-        }
-
         let status_code: u16 = ob.getattr("status_code")?.extract()?;
         let headers: Headers = ob.getattr("headers")?.extract()?;
         let description = ob.getattr("description")?;
         let file_path: Option<String> = ob.getattr("file_path")?.extract()?;
+        let streaming: bool = ob.getattr("streaming")?.extract().unwrap_or(false);
 
-        // For non-streaming responses, convert to appropriate type
-        if description.is_none() {
+        // For streaming responses, handle iterator/generator
+        if streaming {
+            if !description.hasattr("__iter__")? && !description.hasattr("__aiter__")? {
+                return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
+                    "Description must be an iterator or async iterator when streaming=True"
+                ));
+            }
+            Ok(Response {
+                status_code,
+                response_type: "stream".to_string(),
+                headers,
+                body: ResponseBody::Streaming(description.into_py(ob.py())),
+                file_path,
+                streaming,
+            })
+        } else if description.is_none() {
             return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
                 "Description cannot be None"
             ));
@@ -65,6 +63,7 @@ impl<'a> FromPyObject<'a> for Response {
                 headers,
                 body,
                 file_path,
+                streaming: false,
             })
         } else if description.is_instance_of::<pyo3::types::PyString>() {
             let body = ResponseBody::Text(description.extract::<String>()?);
@@ -74,53 +73,26 @@ impl<'a> FromPyObject<'a> for Response {
                 headers,
                 body,
                 file_path,
+                streaming: false,
+            })
+        } else if description.hasattr("__iter__")? || description.hasattr("__aiter__")? {
+            Ok(Response {
+                status_code,
+                response_type: "stream".to_string(),
+                headers,
+                body: ResponseBody::Streaming(description.into_py(ob.py())),
+                file_path,
+                streaming,
             })
         } else {
             // If description is not bytes or str, it might be a streaming response
             Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
-                "Description must be bytes or str"
+                "Description must be bytes, str, or an iterator"
             ))
         }
     }
 }
 
-impl<'a> FromPyObject<'a> for StreamingResponse {
-    fn extract(ob: &'a PyAny) -> PyResult<Self> {
-        // First check if this is a streaming response by checking response_type
-        if let Ok(response_type) = ob.getattr("response_type")?.extract::<String>() {
-            if response_type != "stream" {
-                return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
-                    format!("Not a streaming response (response_type = {})", response_type)
-                ));
-            }
-        } else {
-            return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
-                "Missing response_type attribute"
-            ));
-        }
-
-        let status_code: u16 = ob.getattr("status_code")?.extract()?;
-        let headers: Headers = ob.getattr("headers")?.extract()?;
-        let description = ob.getattr("description")?;
-        let file_path: Option<String> = ob.getattr("file_path")?.extract()?;
-
-        // Check if description is a generator or iterator
-        if !description.hasattr("__iter__")? && !description.hasattr("__aiter__")? {
-            return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
-                "Description must be an iterator or async iterator"
-            ));
-        }
-
-        Ok(StreamingResponse {
-            status_code,
-            headers,
-            description: description.into_py(ob.py()),
-            response_type: "stream".to_string(),
-            file_path,
-        })
-    }
-}
-
 impl Responder for Response {
     type Body = BoxBody;
 
@@ -138,7 +110,9 @@ impl Responder for Response {
                     response_builder.insert_header(("content-type", "application/octet-stream"));
                 }
                 ResponseBody::Streaming(_) => {
-                    panic!("Use StreamingResponse for streaming data");
+                    if !self.headers.headers.contains_key("content-type") {
+                        response_builder.insert_header(("content-type", "text/plain; charset=utf-8"));
+                    }
                 }
             };
         }
@@ -146,97 +120,91 @@ impl Responder for Response {
         // Apply headers after content-type
         apply_hashmap_headers(&mut response_builder, &self.headers);
         
-        match self.body {
-            ResponseBody::Text(text) => response_builder.body(text),
-            ResponseBody::Binary(data) => response_builder.body(data),
-            ResponseBody::Streaming(_) => {
-                panic!("Use StreamingResponse for streaming data")
-            }
-        }
-    }
-}
-
-impl Responder for StreamingResponse {
-    type Body = BoxBody;
-
-    fn respond_to(self, _req: &HttpRequest) -> HttpResponse<Self::Body> {
-        let mut response_builder =
-            HttpResponseBuilder::new(StatusCode::from_u16(self.status_code).unwrap());
-        
-        // Apply headers
-        apply_hashmap_headers(&mut response_builder, &self.headers);
-        
-        // Create streaming body
-        let description = self.description;
-        let stream = Box::pin(futures::stream::unfold(description, move |description| {
-            Box::pin(async move {
-                let result = Python::with_gil(|py| {
-                    let desc = description.as_ref(py);
-                    
-                    // Handle sync iterator
-                    if desc.hasattr("__iter__").unwrap_or(false) {
-                        if let Ok(mut iter) = desc.iter() {
-                            if let Some(Ok(item)) = iter.next() {
-                                let chunk = if item.is_instance_of::<pyo3::types::PyBytes>() {
-                                    item.extract::<Vec<u8>>().ok()
-                                } else if item.is_instance_of::<pyo3::types::PyString>() {
-                                    item.extract::<String>().ok().map(|s| s.into_bytes())
-                                } else if item.is_instance_of::<pyo3::types::PyInt>() {
-                                    item.extract::<i64>().ok().map(|i| i.to_string().into_bytes())
-                                } else {
-                                    None
-                                };
+        if self.streaming {
+            match self.body {
+                ResponseBody::Streaming(description) => {
+                    // Create streaming body
+                    let stream = Box::pin(futures::stream::unfold(description, move |description| {
+                        Box::pin(async move {
+                            let result = Python::with_gil(|py| {
+                                let desc = description.as_ref(py);
                                 
-                                if let Some(chunk) = chunk {
-                                    return Some((Ok(Bytes::from(chunk)), description));
-                                }
-                            }
-                        }
-                    }
-                    // Handle async generator
-                    else if desc.hasattr("__aiter__").unwrap_or(false) {
-                        if let Ok(agen) = desc.call_method0("__aiter__") {
-                            if let Ok(anext) = agen.call_method0("__anext__") {
-                                // Convert Python awaitable to Rust Future
-                                if let Ok(future) = pyo3_asyncio::tokio::into_future(anext) {
-                                    // Create a new task to handle the async operation
-                                    let handle = tokio::runtime::Handle::current();
-                                    match handle.block_on(future) {
-                                        Ok(item) => {
-                                            let chunk = Python::with_gil(|py| {
-                                                let item = item.as_ref(py);
-                                                if item.is_none() {
-                                                    return None;
-                                                }
-                                                
-                                                if item.is_instance_of::<pyo3::types::PyBytes>() {
-                                                    item.extract::<Vec<u8>>().ok()
-                                                } else if item.is_instance_of::<pyo3::types::PyString>() {
-                                                    item.extract::<String>().ok().map(|s| s.into_bytes())
-                                                } else if item.is_instance_of::<pyo3::types::PyInt>() {
-                                                    item.extract::<i64>().ok().map(|i| i.to_string().into_bytes())
-                                                } else {
-                                                    None
-                                                }
-                                            });
-
+                                // Handle sync iterator
+                                if desc.hasattr("__iter__").unwrap_or(false) {
+                                    if let Ok(mut iter) = desc.iter() {
+                                        if let Some(Ok(item)) = iter.next() {
+                                            let chunk = if item.is_instance_of::<pyo3::types::PyBytes>() {
+                                                item.extract::<Vec<u8>>().ok()
+                                            } else if item.is_instance_of::<pyo3::types::PyString>() {
+                                                item.extract::<String>().ok().map(|s| s.into_bytes())
+                                            } else if item.is_instance_of::<pyo3::types::PyInt>() {
+                                                item.extract::<i64>().ok().map(|i| i.to_string().into_bytes())
+                                            } else {
+                                                None
+                                            };
+                                            
                                             if let Some(chunk) = chunk {
                                                 return Some((Ok(Bytes::from(chunk)), description));
                                             }
                                         }
-                                        Err(_) => return None
                                     }
                                 }
-                            }
-                        }
-                    }
-                    None
-                });
-                result
-            })
-        })) as Pin<Box<dyn Stream<Item = Result<Bytes, Error>>>>;
-        
-        response_builder.streaming(stream)
+                                // Handle async generator
+                                else if desc.hasattr("__aiter__").unwrap_or(false) {
+                                    if let Ok(agen) = desc.call_method0("__aiter__") {
+                                        if let Ok(anext) = agen.call_method0("__anext__") {
+                                            // Convert Python awaitable to Rust Future
+                                            if let Ok(future) = pyo3_asyncio::tokio::into_future(anext) {
+                                                // Create a new task to handle the async operation
+                                                let handle = tokio::runtime::Handle::current();
+                                                match handle.block_on(future) {
+                                                    Ok(item) => {
+                                                        let chunk = Python::with_gil(|py| {
+                                                            let item = item.as_ref(py);
+                                                            if item.is_none() {
+                                                                return None;
+                                                            }
+                                                            
+                                                            if item.is_instance_of::<pyo3::types::PyBytes>() {
+                                                                item.extract::<Vec<u8>>().ok()
+                                                            } else if item.is_instance_of::<pyo3::types::PyString>() {
+                                                                item.extract::<String>().ok().map(|s| s.into_bytes())
+                                                            } else if item.is_instance_of::<pyo3::types::PyInt>() {
+                                                                item.extract::<i64>().ok().map(|i| i.to_string().into_bytes())
+                                                            } else {
+                                                                None
+                                                            }
+                                                        });
+
+                                                        if let Some(chunk) = chunk {
+                                                            return Some((Ok(Bytes::from(chunk)), description));
+                                                        }
+                                                    }
+                                                    Err(_) => return None
+                                                }
+                                            }
+                                        }
+                                    }
+                                }
+                                None
+                            });
+                            result
+                        })
+                    })) as Pin<Box<dyn Stream<Item = Result<Bytes, Error>>>>;
+                    
+                    response_builder.streaming(stream)
+                }
+                _ => panic!("Streaming response without streaming body type")
+            }
+        } else {
+            match self.body {
+                ResponseBody::Text(text) => response_builder.body(text),
+                ResponseBody::Binary(data) => response_builder.body(data),
+                ResponseBody::Streaming(_) => {
+                    panic!("Streaming response without streaming=True")
+                }
+            }
+        }
     }
 }
 
@@ -253,6 +221,7 @@ impl Response {
             headers,
             body: ResponseBody::Text("Not found".to_string()),
             file_path: None,
+            streaming: false,
         }
     }
 
@@ -268,6 +237,7 @@ impl Response {
             headers,
             body: ResponseBody::Text("Internal server error".to_string()),
             file_path: None,
+            streaming: false,
         }
     }
 }
@@ -279,9 +249,7 @@ impl ToPyObject for Response {
         let description = match &self.body {
             ResponseBody::Text(text) => text.clone().into_py(py),
             ResponseBody::Binary(data) => PyBytes::new(py, data).into(),
-            ResponseBody::Streaming(_) => {
-                panic!("Use StreamingResponse for streaming data")
-            }
+            ResponseBody::Streaming(desc) => desc.clone_ref(py),
         };
 
         let response = PyResponse {
@@ -290,21 +258,7 @@ impl ToPyObject for Response {
             headers,
             description,
             file_path: self.file_path.clone(),
-        };
-        Py::new(py, response).unwrap().as_ref(py).into()
-    }
-}
-
-impl ToPyObject for StreamingResponse {
-    fn to_object(&self, py: Python) -> PyObject {
-        let headers = self.headers.clone().into_py(py).extract(py).unwrap();
-        
-        let response = PyStreamingResponse {
-            status_code: self.status_code,
-            headers,
-            description: self.description.clone_ref(py),
-            response_type: self.response_type.clone(),
-            file_path: self.file_path.clone(),
+            streaming: self.streaming,
         };
         Py::new(py, response).unwrap().as_ref(py).into()
     }
@@ -323,57 +277,8 @@ pub struct PyResponse {
     pub description: Py<PyAny>,
     #[pyo3(get)]
     pub file_path: Option<String>,
-}
-
-#[pyclass(name = "StreamingResponse")]
-#[derive(Debug, Clone)]
-pub struct PyStreamingResponse {
-    #[pyo3(get)]
-    pub status_code: u16,
-    #[pyo3(get)]
-    pub headers: Py<Headers>,
-    #[pyo3(get)]
-    pub description: Py<PyAny>,
-    #[pyo3(get)]
-    pub response_type: String,
-    #[pyo3(get)]
-    pub file_path: Option<String>,
-}
-
-#[pymethods]
-impl PyStreamingResponse {
-    #[new]
-    #[pyo3(signature = (status_code=200, description=None, headers=None))]
-    pub fn new(py: Python, status_code: u16, description: Option<Py<PyAny>>, headers: Option<&PyAny>) -> PyResult<Self> {
-        let headers_output: Py<Headers> = if let Some(headers) = headers {
-            if let Ok(headers_dict) = headers.downcast::<PyDict>() {
-                let headers = Headers::new(Some(headers_dict));
-                Py::new(py, headers)?
-            } else if let Ok(headers) = headers.extract::<Py<Headers>>() {
-                headers
-            } else {
-                return Err(PyErr::new::<pyo3::exceptions::PyTypeError, _>(
-                    "headers must be a Headers instance or a dict",
-                ));
-            }
-        } else {
-            let headers = Headers::new(None);
-            Py::new(py, headers)?
-        };
-
-        let description = match description {
-            Some(d) => d,
-            None => PyList::empty(py).into(),
-        };
-
-        Ok(Self {
-            status_code,
-            headers: headers_output,
-            description,
-            response_type: "stream".to_string(),
-            file_path: None,
-        })
-    }
+    #[pyo3(get, set)]
+    pub streaming: bool,
 }
 
 #[pymethods]
@@ -384,6 +289,7 @@ impl PyResponse {
         status_code: u16,
         headers: &PyAny,
         description: Py<PyAny>,
+        streaming: Option<bool>,
     ) -> PyResult<Self> {
         // Validate description type first
         let desc = description.as_ref(py);
@@ -396,9 +302,11 @@ impl PyResponse {
         // Only allow string or bytes
         if !desc.is_instance_of::<pyo3::types::PyBytes>()
             && !desc.is_instance_of::<pyo3::types::PyString>()
+            && !desc.hasattr("__iter__")?
+            && !desc.hasattr("__aiter__")?
         {
             return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
-                "Description must be bytes or str"
+                "Description must be bytes, str, or an iterator"
             ));
         }
 
@@ -413,11 +321,13 @@ impl PyResponse {
             ));
         };
 
-        // Default to text response type
-        let response_type = if desc.is_instance_of::<pyo3::types::PyBytes>() {
-            "binary".to_string()
+        // Default to text response type and determine if streaming
+        let (response_type, is_streaming) = if desc.is_instance_of::<pyo3::types::PyBytes>() {
+            ("binary".to_string(), false)
+        } else if desc.hasattr("__iter__")? || desc.hasattr("__aiter__")? {
+            ("stream".to_string(), streaming.unwrap_or(true))
         } else {
-            "text".to_string()
+            ("text".to_string(), false)
         };
 
         Ok(Self {
@@ -426,6 +336,7 @@ impl PyResponse {
             headers: headers_output,
             description,
             file_path: None,
+            streaming: is_streaming,
         })
     }
 
@@ -439,22 +350,28 @@ impl PyResponse {
             ));
         }
         
-        // Only allow string or bytes
+        // Allow string, bytes, or iterators
         if !desc.is_instance_of::<pyo3::types::PyBytes>()
             && !desc.is_instance_of::<pyo3::types::PyString>()
+            && !desc.hasattr("__iter__")?
+            && !desc.hasattr("__aiter__")?
         {
             return Err(PyErr::new::<pyo3::exceptions::PyValueError, _>(
-                "Description must be bytes or str"
+                "Description must be bytes, str, or an iterator"
             ));
         }
 
-        // Update response type based on new description
-        self.response_type = if desc.is_instance_of::<pyo3::types::PyBytes>() {
-            "binary".to_string()
+        // Update response type and streaming based on new description
+        let (response_type, streaming) = if desc.is_instance_of::<pyo3::types::PyBytes>() {
+            ("binary".to_string(), false)
+        } else if desc.hasattr("__iter__")? || desc.hasattr("__aiter__")? {
+            ("stream".to_string(), true)
         } else {
-            "text".to_string()
+            ("text".to_string(), false)
         };
 
+        self.response_type = response_type;
+        self.streaming = streaming;
         self.description = description;
         Ok(())
     }
@@ -463,6 +380,7 @@ impl PyResponse {
     pub fn set_file_path(&mut self, py: Python, file_path: &str) -> PyResult<()> {
         self.file_path = Some(file_path.to_string());
         self.response_type = "binary".to_string();
+        self.streaming = false;
 
         match read_file(file_path) {
             Ok(content) => {

From 59dcff2d034a8323d80b45c6f04a6c00c6696e28 Mon Sep 17 00:00:00 2001
From: Sanskar Jethi <sansyrox@gmail.com>
Date: Wed, 18 Dec 2024 00:38:44 +0000
Subject: [PATCH 10/14] update docs

---
 .../documentation/api_reference/streaming.mdx | 107 +++++++++----
 docs_src/src/pages/documentation/streaming.md | 144 ++++++++++++++----
 2 files changed, 188 insertions(+), 63 deletions(-)

diff --git a/docs_src/src/pages/documentation/api_reference/streaming.mdx b/docs_src/src/pages/documentation/api_reference/streaming.mdx
index 9a52849a2..d208baa11 100644
--- a/docs_src/src/pages/documentation/api_reference/streaming.mdx
+++ b/docs_src/src/pages/documentation/api_reference/streaming.mdx
@@ -1,28 +1,20 @@
 export const description =
-  'On this page, we’ll dive into the different conversation endpoints you can use to manage conversations programmatically.'
+  'Learn how to use streaming responses in Robyn for real-time data, large files, and server-sent events.'
 
-
-
-## Coming From
-
-If you're coming from [File Handling](/documentation/api_reference/file_handling), you'll find streaming provides a more efficient way to handle large files.
-
-## Streaming Responses
+## Overview
 
 Like Batman's gadgets streaming from the Batcave to his utility belt, Robyn provides built-in support for streaming responses. This allows you to send data in chunks, perfect for large files, real-time updates, and server-sent events.
 
-Streaming responses are perfect for handling large datasets or real-time updates without consuming excessive memory.
+## Creating Streaming Responses
 
-## Response
+There are two ways to create streaming responses in Robyn:
 
-When the Bat-Signal needs to stream continuously through the night sky, you'll want to use a generator or iterator as the `description` parameter:
+### 1. Using the streaming parameter
 
 <Row>
   <CodeGroup title="Server">
     ```python
-    from robyn import Response
-
-    @app.get("/bat-signal")
+    @app.get("/bat-signal", streaming=True)
     async def stream_signal():
         async def signal_generator():
             while True:
@@ -44,13 +36,46 @@ When the Bat-Signal needs to stream continuously through the night sky, you'll w
   </CodeGroup>
 </Row>
 
+### 2. Returning an Iterator/Generator
+
+Robyn automatically detects iterators and generators and treats them as streaming responses:
+
+<Row>
+  <CodeGroup title="Server">
+    ```python
+    @app.get("/bat-signal")
+    async def stream_signal():
+        async def signal_generator():
+            while True:
+                yield b"Bat-Signal Active\n"
+                await asyncio.sleep(1)
+        
+        return signal_generator()  # Automatically detected as streaming
+    ```
+  </CodeGroup>
+</Row>
+
+## Response Object
+
+The Response class supports streaming through its constructor parameters:
+
+```python
+Response(
+    status_code=200,
+    headers={"Content-Type": "text/plain"},
+    description=generator(),  # Can be str, bytes, or iterator/generator
+    streaming=True  # Optional, automatically set for iterators/generators
+)
+```
+
 ### Parameters
 
 | Name | Type | Description | Default |
 |------|------|-------------|---------|
 | status_code | int | Response status code | 200 |
 | headers | Dict[str, str] | Response headers | None |
-| description | Union[str, bytes, Generator, AsyncGenerator] | Content to stream | None |
+| description | Union[str, bytes, Iterator, AsyncIterator] | Content to stream | None |
+| streaming | bool | Whether to treat as streaming response | False |
 
 ### Supported Types
 
@@ -60,25 +85,29 @@ Like Batman's versatile arsenal, the streaming response system supports multiple
   <CodeGroup title="Binary">
     ```python
     # Raw binary data (like Batcomputer logs)
-    yield b"Batcomputer Log Entry\n"
+    async def generator():
+        yield b"Batcomputer Log Entry\n"
     ```
   </CodeGroup>
   <CodeGroup title="Text">
     ```python
     # Text messages (like Alfred's updates)
-    yield "Master Wayne, your tea is ready\n".encode()
+    async def generator():
+        yield "Master Wayne, your tea is ready\n".encode()
     ```
   </CodeGroup>
   <CodeGroup title="Numbers">
     ```python
     # Numbers (like Batmobile telemetry)
-    yield str(speed).encode()
+    async def generator():
+        yield str(speed).encode()
     ```
   </CodeGroup>
   <CodeGroup title="JSON">
     ```python
     # JSON data (like Gotham City surveillance)
-    yield json.dumps({"location": "Crime Alley"}).encode()
+    async def generator():
+        yield json.dumps({"location": "Crime Alley"}).encode()
     ```
   </CodeGroup>
 </Row>
@@ -90,7 +119,7 @@ For real-time updates from the Batcomputer:
 <Row>
   <CodeGroup title="Server">
     ```python
-    @app.get("/batcomputer/events")
+    @app.get("/batcomputer/events", streaming=True)
     async def batcomputer_feed():
         async def event_generator():
             while True:
@@ -129,7 +158,7 @@ For streaming large files from the Batcomputer archives:
 <Row>
   <CodeGroup title="Server">
     ```python
-    @app.get("/batcomputer/files")
+    @app.get("/batcomputer/files", streaming=True)
     async def download_files():
         async def file_generator():
             chunk_size = 8192  # Size of a Batarang
@@ -154,6 +183,25 @@ For streaming large files from the Batcomputer archives:
   </CodeGroup>
 </Row>
 
+## Helper Functions
+
+Robyn provides helper functions for common streaming scenarios:
+
+```python
+from robyn import html
+
+@app.get("/bat-report", streaming=True)
+async def stream_html():
+    async def generator():
+        yield "<html><body>"
+        for i in range(5):
+            yield f"<p>Bat-Signal sighting {i}</p>"
+            await asyncio.sleep(0.1)
+        yield "</body></html>"
+    
+    return html(generator(), streaming=True)
+```
+
 ## Common Headers
 
 ### Plain Text
@@ -182,8 +230,6 @@ headers = {
 
 Even Batman needs contingency plans:
 
-  Always handle errors gracefully in your streaming responses to prevent connection hangs.
-
 ```python
 async def generator():
     try:
@@ -201,12 +247,12 @@ Test your streaming responses like Batman testing his equipment:
 ```python
 @pytest.mark.asyncio
 async def test_bat_signal():
-    async with app.test_client() as client:
-        response = await client.get("/bat-signal")
-        signals = []
-        async for signal in response.content:
-            signals.append(signal)
-        assert len(signals) > 0
+    async with aiohttp.ClientSession() as client:
+        async with client.get("http://localhost:8080/bat-signal") as response:
+            chunks = []
+            async for chunk in response.content:
+                chunks.append(chunk.decode())
+            assert len(chunks) > 0
 ```
 
 ## Best Practices
@@ -231,5 +277,6 @@ Implement timeouts (even Batman needs sleep)
 Now that you've mastered streaming, you might want to explore:
 
 - [WebSockets](/documentation/api_reference/websockets) - For real-time bidirectional communication
-- [Scaling](/documentation/api_reference/scaling) - Scale your streaming applications across multiple cores
+- [File Handling](/documentation/api_reference/file_handling) - For more file operations
+- [Middleware](/documentation/api_reference/middleware) - For request/response processing
 
diff --git a/docs_src/src/pages/documentation/streaming.md b/docs_src/src/pages/documentation/streaming.md
index e55d34b11..2e4f065fc 100644
--- a/docs_src/src/pages/documentation/streaming.md
+++ b/docs_src/src/pages/documentation/streaming.md
@@ -4,10 +4,11 @@ Robyn supports streaming responses for various use cases including real-time dat
 
 ## Basic Usage
 
-### Simple Streaming Response
+There are two ways to create streaming responses in Robyn:
 
+1. Using the `streaming` parameter in route decorators:
 ```python
-@app.get("/stream")
+@app.get("/stream", streaming=True)
 async def stream():
     async def generator():
         for i in range(5):
@@ -20,30 +21,45 @@ async def stream():
     )
 ```
 
+2. Returning an iterator/generator directly:
+```python
+@app.get("/stream")
+async def stream():
+    async def generator():
+        for i in range(5):
+            yield f"Chunk {i}\n".encode()
+    
+    return generator()  # Robyn will automatically detect this as a streaming response
+```
+
 ## Supported Types
 
 Robyn's streaming response system supports multiple data types:
 
 1. **Binary Data** (`bytes`)
 ```python
-yield b"Binary data"
+async def generator():
+    yield b"Binary data"
 ```
 
 2. **Text Data** (`str`)
 ```python
-yield "String data".encode()
+async def generator():
+    yield "String data".encode()  # Must be encoded
 ```
 
 3. **Numbers** (`int`, `float`)
 ```python
-yield str(42).encode()
+async def generator():
+    yield str(42).encode()  # Must be converted to string and encoded
 ```
 
 4. **JSON Data**
 ```python
-import json
-data = {"key": "value"}
-yield json.dumps(data).encode()
+async def generator():
+    import json
+    data = {"key": "value"}
+    yield json.dumps(data).encode()
 ```
 
 ## Use Cases
@@ -53,11 +69,14 @@ yield json.dumps(data).encode()
 SSE allows real-time updates from server to client:
 
 ```python
-@app.get("/events")
+@app.get("/events", streaming=True)
 async def sse():
     async def event_generator():
-        yield f"event: message\ndata: {json.dumps(data)}\n\n".encode()
-        
+        while True:
+            data = {"time": time.time(), "event": "update"}
+            yield f"data: {json.dumps(data)}\n\n".encode()
+            await asyncio.sleep(1)
+    
     return Response(
         status_code=200,
         headers={
@@ -82,7 +101,7 @@ evtSource.onmessage = (event) => {
 Stream large files in chunks to manage memory usage:
 
 ```python
-@app.get("/download")
+@app.get("/download", streaming=True)
 async def download():
     async def file_generator():
         chunk_size = 8192  # 8KB chunks
@@ -105,12 +124,17 @@ async def download():
 Stream CSV data as it's generated:
 
 ```python
-@app.get("/csv")
+@app.get("/csv", streaming=True)
 async def csv():
     async def csv_generator():
-        yield "header1,header2\n".encode()
-        for item in data:
-            yield f"{item.field1},{item.field2}\n".encode()
+        # Write headers
+        yield "id,name,value\n".encode()
+        
+        # Stream data rows
+        for i in range(1000):
+            row = f"{i},item-{i},{random.randint(1,100)}\n"
+            yield row.encode()
+            await asyncio.sleep(0.01)  # Simulate processing time
     
     return Response(
         status_code=200,
@@ -125,13 +149,28 @@ async def csv():
 ## Best Practices
 
 1. **Always encode your data**
-   - Convert strings to bytes using `.encode()`
-   - Use `json.dumps().encode()` for JSON data
+   ```python
+   # Wrong
+   yield "data"  # Will fail
+   # Correct
+   yield "data".encode()
+   ```
 
 2. **Set appropriate headers**
-   - Use correct Content-Type
-   - Add Content-Disposition for downloads
-   - Set Cache-Control for SSE
+   ```python
+   # For SSE
+   headers = {
+       "Content-Type": "text/event-stream",
+       "Cache-Control": "no-cache",
+       "Connection": "keep-alive"
+   }
+   
+   # For file downloads
+   headers = {
+       "Content-Type": "application/octet-stream",
+       "Content-Disposition": "attachment; filename=file.dat"
+   }
+   ```
 
 3. **Handle errors gracefully**
    ```python
@@ -141,31 +180,42 @@ async def csv():
                yield process(item)
        except Exception as e:
            yield f"Error: {str(e)}".encode()
+           return
    ```
 
 4. **Memory management**
-   - Use appropriate chunk sizes
-   - Don't hold entire dataset in memory
-   - Clean up resources after streaming
+   ```python
+   # Wrong - accumulates all data in memory
+   data = []
+   async def generator():
+       for i in range(1000000):
+           data.append(i)  # Memory leak
+           yield str(i).encode()
+   
+   # Correct - streams data directly
+   async def generator():
+       for i in range(1000000):
+           yield str(i).encode()
+   ```
 
 ## Testing
 
-Test streaming responses using the test client:
+Test streaming responses using aiohttp:
 
 ```python
 @pytest.mark.asyncio
 async def test_stream():
-    async with app.test_client() as client:
-        response = await client.get("/stream")
-        chunks = []
-        async for chunk in response.content:
-            chunks.append(chunk)
-        # Assert on chunks
+    async with aiohttp.ClientSession() as client:
+        async with client.get("http://localhost:8080/stream") as response:
+            chunks = []
+            async for chunk in response.content:
+                chunks.append(chunk.decode())
+            assert len(chunks) > 0
 ```
 
 ## Common Issues
 
-1. **Forgetting to encode data**
+1. **Not encoding data**
    ```python
    # Wrong
    yield "data"  # Will fail
@@ -204,4 +254,32 @@ async def test_stream():
 2. Implement backpressure handling
 3. Consider using async file I/O for large files
 4. Monitor memory usage during streaming
-5. Implement timeouts for long-running streams 
\ No newline at end of file
+5. Implement timeouts for long-running streams
+
+## Helper Functions
+
+Robyn provides helper functions for common streaming scenarios:
+
+```python
+from robyn import Response, html
+
+# Stream HTML content
+@app.get("/stream-html", streaming=True)
+async def stream_html():
+    async def generator():
+        yield "<html><body>"
+        for i in range(5):
+            yield f"<p>Chunk {i}</p>"
+            await asyncio.sleep(0.1)
+        yield "</body></html>"
+    
+    return html(generator(), streaming=True)
+```
+
+## What's Next?
+
+Now that you've mastered streaming responses, you might want to explore:
+
+- [WebSockets](/documentation/api_reference/websockets) for real-time bidirectional communication
+- [File Handling](/documentation/api_reference/file_handling) for more file operations
+- [Middleware](/documentation/api_reference/middleware) for request/response processing
\ No newline at end of file

From 4c8d9f284611065415fefddba4e7722ab7653813 Mon Sep 17 00:00:00 2001
From: Sanskar Jethi <sansyrox@gmail.com>
Date: Wed, 18 Dec 2024 00:44:23 +0000
Subject: [PATCH 11/14] update docs

---
 integration_tests/base_routes.py |  1 +
 robyn/robyn.pyi                  | 13 ++++++++++---
 2 files changed, 11 insertions(+), 3 deletions(-)

diff --git a/integration_tests/base_routes.py b/integration_tests/base_routes.py
index d9000cee4..3000c4b01 100644
--- a/integration_tests/base_routes.py
+++ b/integration_tests/base_routes.py
@@ -2,6 +2,7 @@
 import pathlib
 from collections import defaultdict
 from typing import Optional
+import json
 
 from integration_tests.subroutes import di_subrouter, sub_router
 from integration_tests.views import AsyncView, SyncView
diff --git a/robyn/robyn.pyi b/robyn/robyn.pyi
index 03527ec4c..fa8da141d 100644
--- a/robyn/robyn.pyi
+++ b/robyn/robyn.pyi
@@ -2,7 +2,7 @@ from __future__ import annotations
 
 from dataclasses import dataclass
 from enum import Enum
-from typing import Callable, Optional, Union
+from typing import Callable, Optional, Union, Iterator, AsyncIterator, Generator, AsyncGenerator
 
 def get_version() -> str:
     pass
@@ -287,14 +287,21 @@ class Response:
         status_code (int): The status code of the response. e.g. 200, 404, 500 etc.
         response_type (Optional[str]): The response type of the response. e.g. text, json, html, file etc.
         headers (Union[Headers, dict]): The headers of the response or Headers directly. e.g. {"Content-Type": "application/json"}
-        description (Union[str, bytes, Callable]): The body of the response. Can be a string, bytes, or an iterator/generator for streaming.
+        description (Union[str, bytes, Iterator[bytes], AsyncIterator[bytes], Generator[bytes, None, None], AsyncGenerator[bytes, None]]): 
+            The body of the response. Can be:
+            - str: Plain text response
+            - bytes: Binary response
+            - Iterator[bytes]: Sync iterator yielding bytes
+            - AsyncIterator[bytes]: Async iterator yielding bytes
+            - Generator[bytes, None, None]: Sync generator yielding bytes
+            - AsyncGenerator[bytes, None]: Async generator yielding bytes
         file_path (Optional[str]): The file path of the response. e.g. /home/user/file.txt
         streaming (bool): Whether the response is a streaming response. If True, description should be an iterator/generator.
     """
 
     status_code: int
     headers: Union[Headers, dict]
-    description: Union[str, bytes, Callable]
+    description: Union[str, bytes, Iterator[bytes], AsyncIterator[bytes], Generator[bytes, None, None], AsyncGenerator[bytes, None]]
     streaming: bool = False
     response_type: Optional[str] = None
     file_path: Optional[str] = None

From 7d1a40b2eca4977635186d8136f3f4a4d1eee2cf Mon Sep 17 00:00:00 2001
From: Sanskar Jethi <sansyrox@gmail.com>
Date: Wed, 18 Dec 2024 00:57:23 +0000
Subject: [PATCH 12/14] update dependencies

---
 poetry.lock    | 528 ++++++++++++++++++++++++++++++++++++++++++++++++-
 pyproject.toml |   1 +
 2 files changed, 528 insertions(+), 1 deletion(-)

diff --git a/poetry.lock b/poetry.lock
index 3b34ae21a..abf55fbab 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,5 +1,128 @@
 # This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
 
+[[package]]
+name = "aiohappyeyeballs"
+version = "2.4.4"
+description = "Happy Eyeballs for asyncio"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8"},
+    {file = "aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745"},
+]
+
+[[package]]
+name = "aiohttp"
+version = "3.11.0"
+description = "Async http client/server framework (asyncio)"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "aiohttp-3.11.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:024409c1b1d6076d0ed933dcebd7e4fc6f3320a227bfa0c1b6b93a8b5a146f04"},
+    {file = "aiohttp-3.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:62502b8ffee8c6a4b5c6bf99d1de277d42bf51b2fb713975d9b63b560150b7ac"},
+    {file = "aiohttp-3.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c54c635d1f52490cde7ef3a423645167a8284e452a35405d5c7dc1242a8e75c9"},
+    {file = "aiohttp-3.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:104ea21994b1403e4c1b398866f1187c1694fa291314ad7216ec1d8ec6b49f38"},
+    {file = "aiohttp-3.11.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04b24497b3baf15035730de5f207ade88a67d4483a5f16ced7ece348933a5b47"},
+    {file = "aiohttp-3.11.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08474e71772a516ba2e2167b4707af8361d2c452b3d8a5364c984f4867869499"},
+    {file = "aiohttp-3.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f40380c96dd407dfa84eb2d264e68aa47717b53bdbe210a59cc3c35a4635f195"},
+    {file = "aiohttp-3.11.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1668ef2f3a7ec9881f4b6a917e5f97c87a343fa6b0d5fc826b7b0297ddd0887"},
+    {file = "aiohttp-3.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f3bf5c132eb48002bcc3825702d241d35b4e9585009e65e9dcf9c4635d0b7424"},
+    {file = "aiohttp-3.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0315978b2a4569e03fb59100f6a7e7d23f718a4521491f5c13d946d37549f3d"},
+    {file = "aiohttp-3.11.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d5cae4cd271e20b7ab757e966cc919186b9f02535418ab36c471a5377ef4deaa"},
+    {file = "aiohttp-3.11.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:31b91ff3a1fcb206a1fa76e0de1f08c9ffb1dc0deb7296fa2618adfe380fc676"},
+    {file = "aiohttp-3.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ebf610c37df4f09c71c9bbf8309b4b459107e6fe889ac0d7e16f6e4ebd975f86"},
+    {file = "aiohttp-3.11.0-cp310-cp310-win32.whl", hash = "sha256:b40c304ab01e89ad0aeeecf91bbaa6ae3b00e27b796c9e8d50b71a4a7e885cc8"},
+    {file = "aiohttp-3.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd0834e4260eab78671b81d34f110fbaac449563e48d419cec0030d9a8e58693"},
+    {file = "aiohttp-3.11.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:89a96a0696dc67d548f69cb518c581a7a33cc1f26ab42229dea1709217c9d926"},
+    {file = "aiohttp-3.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6b925c7775ab857bdc1e52e1f5abcae7d18751c09b751aeb641a5276d9b990e"},
+    {file = "aiohttp-3.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7867d0808614f04e78e0a8d5a2c1f8ac6bc626a0c0e2f62be48be6b749e2f8b2"},
+    {file = "aiohttp-3.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:229ae13959a5f499d90ffbb4b9eac2255d8599315027d6f7c22fa9803a94d5b1"},
+    {file = "aiohttp-3.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:62a2f5268b672087c45b33479ba1bb1d5a48c6d76c133cfce3a4f77410c200d1"},
+    {file = "aiohttp-3.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a896059b6937d1a22d8ee8377cdcd097bd26cd8c653b8f972051488b9baadee9"},
+    {file = "aiohttp-3.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:104deb7873681273c5daa13c41924693df394043a118dae90387d35bc5531788"},
+    {file = "aiohttp-3.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae36ae52b0c22fb69fb8b744eff82a20db512a29eafc6e3a4ab43b17215b219d"},
+    {file = "aiohttp-3.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b7349205bb163318dcc102329d30be59a647a3d24c82c3d91ed35b7e7301ea7e"},
+    {file = "aiohttp-3.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9095580806d9ed07c0c29b23364a0b1fb78258ef9f4bddf7e55bac0e475d4edf"},
+    {file = "aiohttp-3.11.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4d218d3eca40196384ad3b481309c56fd60e664128885d1734da0a8aa530d433"},
+    {file = "aiohttp-3.11.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6533dd06df3d17d1756829b68b365b1583929b54082db8f65083a4184bf68322"},
+    {file = "aiohttp-3.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72cd984f7f14e8c01b3e38f18f39ea85dba84e52ea05e37116ba5e2a72eef396"},
+    {file = "aiohttp-3.11.0-cp311-cp311-win32.whl", hash = "sha256:c1828e10c3a49e2b234b87600ecb68a92b8a8dcf8b99bca9447f16c4baaa1630"},
+    {file = "aiohttp-3.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:900ff74d78eb580ae4aa5883242893b123a0c442a46570902500f08d6a7e6696"},
+    {file = "aiohttp-3.11.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f8f0d79b923070f25674e4ea8f3d61c9d89d24d9598d50ff32c5b9b23c79a25b"},
+    {file = "aiohttp-3.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:113bf06b029143e94a47c4f36e11a8b7e396e9d1f1fc8cea58e6b7e370cfed38"},
+    {file = "aiohttp-3.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3e1ed8d152cccceffb1ee7a2ac227c16372e453fb11b3aeaa56783049b85d3f6"},
+    {file = "aiohttp-3.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2e82e515e268b965424ecabebd91834a41b36260b6ef5db015ee12ddb28ef3"},
+    {file = "aiohttp-3.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c1c49bc393d854d4421ebc174a0a41f9261f50d3694d8ca277146cbbcfd24ee7"},
+    {file = "aiohttp-3.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57e17c6d71f2dc857a8a1d09be1be7802e35d90fb4ba4b06cf1aab6414a57894"},
+    {file = "aiohttp-3.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12071dd2cc95ba81e0f2737bebcb98b2a8656015e87772e84e8fb9e635b5da6e"},
+    {file = "aiohttp-3.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97056d3422594e0787733ac4c45bef58722d452f4dc6615fee42f59fe51707dd"},
+    {file = "aiohttp-3.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2ec5efbc872b00ddd85e3904059d274f284cff314e13f48776050ca2c58f451d"},
+    {file = "aiohttp-3.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:dd505a1121ad5b666191840b7bd1d8cb917df2647deeca6f3474331b72452362"},
+    {file = "aiohttp-3.11.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:600b1d9f86a130131915e2f2127664311b33902c486b21a747d626f5144b4471"},
+    {file = "aiohttp-3.11.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8c47a0ba6c2b3d3e5715f8338d657badd21f778c6be16701922c65521c5ecfc9"},
+    {file = "aiohttp-3.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8b323b5d3aef7dd811424c269322eec58a977c0c8152e650159e47210d900504"},
+    {file = "aiohttp-3.11.0-cp312-cp312-win32.whl", hash = "sha256:aabc4e92cb153636d6be54e84dad1b252ddb9aebe077942b6dcffe5e468d476a"},
+    {file = "aiohttp-3.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:508cfcc99534b1282595357592d8367b44392b21f6eb5d4dc021f8d0d809e94d"},
+    {file = "aiohttp-3.11.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c98a596ac20e8980cc6f34c0c92a113e98eb08f3997c150064d26d2aeb043e5a"},
+    {file = "aiohttp-3.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ad14cdc0fba4df31c0f6e06c21928c5b924725cbf60d0ccc5f6e7132636250e9"},
+    {file = "aiohttp-3.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:170fb2324826bb9f08055a8291f42192ae5ee2f25b2966c8f0f4537c61d73a7b"},
+    {file = "aiohttp-3.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdad66685fcf2ad14ce522cf849d4a025f4fd206d6cfc3f403d9873e4c243b03"},
+    {file = "aiohttp-3.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8b95a63a8e8b5f0464bd8b1b0d59d2bec98a59b6aacc71e9be23df6989b3dfb"},
+    {file = "aiohttp-3.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7bcfcede95531589295f56e924702cef7f9685c9e4e5407592e04ded6a65bf3"},
+    {file = "aiohttp-3.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ecc2fb1a0a9d48cf773add34196cddf7e488e48e9596e090849751bf43098f4"},
+    {file = "aiohttp-3.11.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8fef105113d56e817cb9bcc609667ee461321413a7b972b03f5b4939f40f307c"},
+    {file = "aiohttp-3.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d33b4490026968bdc7f0729b9d87a3a6b1e09043557d2fc1c605c6072deb2f11"},
+    {file = "aiohttp-3.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6362f50a6f0e5482c4330d2151cb682779230683da0e155c15ec9fc58cb50b6a"},
+    {file = "aiohttp-3.11.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f698aa61879df64425191d41213dfd99efdc1627e6398e6d7aa5c312fac9702"},
+    {file = "aiohttp-3.11.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0e7a0762cc29cd3acd01a4d2b547b3af7956ad230ebb80b529a8e4f3e4740fe8"},
+    {file = "aiohttp-3.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b3e4fb7f5354d39490d8209aefdf5830b208d01c7293a2164e404312c3d8bc55"},
+    {file = "aiohttp-3.11.0-cp313-cp313-win32.whl", hash = "sha256:6c5a6958f4366496004cf503d847093d464814543f157ef3b738bbf604232415"},
+    {file = "aiohttp-3.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:3ed360d6672a9423aad39902a4e9fe305464d20ed7931dbdba30a4625782d875"},
+    {file = "aiohttp-3.11.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d1ea006426edf7e1299c52a58b0443158012f7a56fed3515164b60bfcb1503a9"},
+    {file = "aiohttp-3.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c5e6a1f8b0268ffa1c84d7c3558724956002ba8361176e76406233e704bbcffb"},
+    {file = "aiohttp-3.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:40dc9446cff326672fcbf93efdb8ef7e949824de1097624efe4f61ac7f0d2c43"},
+    {file = "aiohttp-3.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b4545e8d96870da9652930c5198366605ff8f982757030e2148cf341e5746b"},
+    {file = "aiohttp-3.11.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:37f8cf3c43f292d9bb3e6760476c2b55b9663a581fad682a586a410c43a7683e"},
+    {file = "aiohttp-3.11.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:329f5059e0bf6983dceebac8e6ed20e75eaff6163b3414f4a4cb59e0d7037672"},
+    {file = "aiohttp-3.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ae6f182be72c3531915e90625cc65afce4df8a0fc4988bd52d8a5d5faaeb68"},
+    {file = "aiohttp-3.11.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d664e5f937c08adb7908ea9f391fbf2928a9b09cb412ac0aba602bde9e499e4"},
+    {file = "aiohttp-3.11.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:feca9fafa4385aea6759c171cd25ea82f7375312fca04178dae35331be45e538"},
+    {file = "aiohttp-3.11.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c415b9601ff50709d6050c8a9281733a9b042b9e589265ac40305b875cf9c463"},
+    {file = "aiohttp-3.11.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:91d3991fad8b65e5dbc13cd95669ea689fe0a96ff63e4e64ac24ed724e4f8103"},
+    {file = "aiohttp-3.11.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9231d610754724273a6ac05a1f177979490bfa6f84d49646df3928af2e88cfd5"},
+    {file = "aiohttp-3.11.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4e4e155968040e32c124a89852a1a5426d0e920a35f4331e1b3949037bfe93a3"},
+    {file = "aiohttp-3.11.0-cp39-cp39-win32.whl", hash = "sha256:76d6ee8bb132f8ee0fcb0e205b4708ddb6fba524eb515ee168113063d825131b"},
+    {file = "aiohttp-3.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:577c7429f8869fa30186fc2c9eee64d75a30b51b61f26aac9725866ae5985cfd"},
+    {file = "aiohttp-3.11.0.tar.gz", hash = "sha256:f57a0de48dda792629e7952d34a0c7b81ea336bb9b721391c7c58145b237fe55"},
+]
+
+[package.dependencies]
+aiohappyeyeballs = ">=2.3.0"
+aiosignal = ">=1.1.2"
+async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""}
+attrs = ">=17.3.0"
+frozenlist = ">=1.1.1"
+multidict = ">=4.5,<7.0"
+propcache = ">=0.2.0"
+yarl = ">=1.17.0,<2.0"
+
+[package.extras]
+speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"]
+
+[[package]]
+name = "aiosignal"
+version = "1.3.2"
+description = "aiosignal: a list of registered asynchronous callbacks"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"},
+    {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"},
+]
+
+[package.dependencies]
+frozenlist = ">=1.1.0"
+
 [[package]]
 name = "argcomplete"
 version = "2.0.6"
@@ -15,6 +138,17 @@ files = [
 lint = ["flake8", "mypy"]
 test = ["coverage", "flake8", "mypy", "pexpect", "wheel"]
 
+[[package]]
+name = "async-timeout"
+version = "5.0.1"
+description = "Timeout context manager for asyncio programs"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"},
+    {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"},
+]
+
 [[package]]
 name = "attrs"
 version = "24.2.0"
@@ -419,6 +553,107 @@ docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.
 testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"]
 typing = ["typing-extensions (>=4.12.2)"]
 
+[[package]]
+name = "frozenlist"
+version = "1.5.0"
+description = "A list-like structure which implements collections.abc.MutableSequence"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"},
+    {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"},
+    {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"},
+    {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"},
+    {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"},
+    {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"},
+    {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"},
+    {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"},
+    {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"},
+    {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"},
+    {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"},
+    {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"},
+    {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"},
+    {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"},
+    {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"},
+    {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"},
+    {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"},
+    {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"},
+    {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"},
+    {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"},
+    {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"},
+    {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"},
+    {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"},
+    {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"},
+    {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"},
+    {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"},
+    {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"},
+    {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"},
+    {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"},
+    {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"},
+    {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21"},
+    {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d"},
+    {file = "frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e"},
+    {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a"},
+    {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a"},
+    {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee"},
+    {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6"},
+    {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e"},
+    {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9"},
+    {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039"},
+    {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784"},
+    {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631"},
+    {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f"},
+    {file = "frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8"},
+    {file = "frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f"},
+    {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953"},
+    {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0"},
+    {file = "frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2"},
+    {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f"},
+    {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608"},
+    {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b"},
+    {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840"},
+    {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439"},
+    {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de"},
+    {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641"},
+    {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e"},
+    {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9"},
+    {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03"},
+    {file = "frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c"},
+    {file = "frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28"},
+    {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca"},
+    {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10"},
+    {file = "frozenlist-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604"},
+    {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3"},
+    {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307"},
+    {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10"},
+    {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9"},
+    {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99"},
+    {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c"},
+    {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171"},
+    {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e"},
+    {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf"},
+    {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e"},
+    {file = "frozenlist-1.5.0-cp38-cp38-win32.whl", hash = "sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723"},
+    {file = "frozenlist-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923"},
+    {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"},
+    {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"},
+    {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"},
+    {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"},
+    {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"},
+    {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"},
+    {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"},
+    {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"},
+    {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"},
+    {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"},
+    {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"},
+    {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"},
+    {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"},
+    {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"},
+    {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"},
+    {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"},
+    {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"},
+]
+
 [[package]]
 name = "identify"
 version = "2.6.3"
@@ -609,6 +844,110 @@ tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
 patchelf = ["patchelf"]
 zig = ["ziglang (>=0.10.0,<0.11.0)"]
 
+[[package]]
+name = "multidict"
+version = "6.1.0"
+description = "multidict implementation"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"},
+    {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"},
+    {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"},
+    {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"},
+    {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"},
+    {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"},
+    {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"},
+    {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"},
+    {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"},
+    {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"},
+    {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"},
+    {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"},
+    {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"},
+    {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"},
+    {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"},
+    {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"},
+    {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"},
+    {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"},
+    {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"},
+    {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"},
+    {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"},
+    {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"},
+    {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"},
+    {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"},
+    {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"},
+    {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"},
+    {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"},
+    {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"},
+    {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"},
+    {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"},
+    {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"},
+    {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"},
+    {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"},
+    {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"},
+    {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"},
+    {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"},
+    {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"},
+    {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"},
+    {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"},
+    {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"},
+    {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"},
+    {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"},
+    {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"},
+    {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"},
+    {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"},
+    {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"},
+    {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"},
+    {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"},
+    {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"},
+    {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"},
+    {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"},
+    {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"},
+    {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"},
+    {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"},
+    {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"},
+    {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"},
+    {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"},
+    {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"},
+    {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"},
+    {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"},
+    {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"},
+    {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"},
+    {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"},
+    {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"},
+    {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"},
+    {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"},
+    {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"},
+    {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"},
+    {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"},
+    {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"},
+    {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"},
+    {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"},
+    {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"},
+    {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"},
+    {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"},
+    {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"},
+    {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"},
+    {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"},
+    {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"},
+    {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"},
+    {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"},
+    {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"},
+    {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"},
+    {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"},
+    {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"},
+    {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"},
+    {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"},
+    {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"},
+    {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"},
+    {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"},
+    {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"},
+    {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"},
+]
+
+[package.dependencies]
+typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""}
+
 [[package]]
 name = "multiprocess"
 version = "0.70.14"
@@ -871,6 +1210,97 @@ files = [
 [package.dependencies]
 wcwidth = "*"
 
+[[package]]
+name = "propcache"
+version = "0.2.1"
+description = "Accelerated property cache"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"},
+    {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"},
+    {file = "propcache-0.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea"},
+    {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212"},
+    {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3"},
+    {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d"},
+    {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634"},
+    {file = "propcache-0.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2"},
+    {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958"},
+    {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c"},
+    {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583"},
+    {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf"},
+    {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034"},
+    {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b"},
+    {file = "propcache-0.2.1-cp310-cp310-win32.whl", hash = "sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4"},
+    {file = "propcache-0.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba"},
+    {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16"},
+    {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717"},
+    {file = "propcache-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3"},
+    {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9"},
+    {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787"},
+    {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465"},
+    {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af"},
+    {file = "propcache-0.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7"},
+    {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f"},
+    {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54"},
+    {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505"},
+    {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82"},
+    {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca"},
+    {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e"},
+    {file = "propcache-0.2.1-cp311-cp311-win32.whl", hash = "sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034"},
+    {file = "propcache-0.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3"},
+    {file = "propcache-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a"},
+    {file = "propcache-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0"},
+    {file = "propcache-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d"},
+    {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4"},
+    {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d"},
+    {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5"},
+    {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24"},
+    {file = "propcache-0.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff"},
+    {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f"},
+    {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec"},
+    {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348"},
+    {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6"},
+    {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6"},
+    {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518"},
+    {file = "propcache-0.2.1-cp312-cp312-win32.whl", hash = "sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246"},
+    {file = "propcache-0.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1"},
+    {file = "propcache-0.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc"},
+    {file = "propcache-0.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9"},
+    {file = "propcache-0.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439"},
+    {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536"},
+    {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629"},
+    {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b"},
+    {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052"},
+    {file = "propcache-0.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce"},
+    {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d"},
+    {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce"},
+    {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95"},
+    {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf"},
+    {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f"},
+    {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30"},
+    {file = "propcache-0.2.1-cp313-cp313-win32.whl", hash = "sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6"},
+    {file = "propcache-0.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1"},
+    {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6a9a8c34fb7bb609419a211e59da8887eeca40d300b5ea8e56af98f6fbbb1541"},
+    {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae1aa1cd222c6d205853b3013c69cd04515f9d6ab6de4b0603e2e1c33221303e"},
+    {file = "propcache-0.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:accb6150ce61c9c4b7738d45550806aa2b71c7668c6942f17b0ac182b6142fd4"},
+    {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eee736daafa7af6d0a2dc15cc75e05c64f37fc37bafef2e00d77c14171c2097"},
+    {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7a31fc1e1bd362874863fdeed71aed92d348f5336fd84f2197ba40c59f061bd"},
+    {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba4cfa1052819d16699e1d55d18c92b6e094d4517c41dd231a8b9f87b6fa681"},
+    {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f089118d584e859c62b3da0892b88a83d611c2033ac410e929cb6754eec0ed16"},
+    {file = "propcache-0.2.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:781e65134efaf88feb447e8c97a51772aa75e48b794352f94cb7ea717dedda0d"},
+    {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31f5af773530fd3c658b32b6bdc2d0838543de70eb9a2156c03e410f7b0d3aae"},
+    {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:a7a078f5d37bee6690959c813977da5291b24286e7b962e62a94cec31aa5188b"},
+    {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cea7daf9fc7ae6687cf1e2c049752f19f146fdc37c2cc376e7d0032cf4f25347"},
+    {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:8b3489ff1ed1e8315674d0775dc7d2195fb13ca17b3808721b54dbe9fd020faf"},
+    {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9403db39be1393618dd80c746cb22ccda168efce239c73af13c3763ef56ffc04"},
+    {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5d97151bc92d2b2578ff7ce779cdb9174337390a535953cbb9452fb65164c587"},
+    {file = "propcache-0.2.1-cp39-cp39-win32.whl", hash = "sha256:9caac6b54914bdf41bcc91e7eb9147d331d29235a7c967c150ef5df6464fd1bb"},
+    {file = "propcache-0.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:92fc4500fcb33899b05ba73276dfb684a20d31caa567b7cb5252d48f896a91b1"},
+    {file = "propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54"},
+    {file = "propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64"},
+]
+
 [[package]]
 name = "pycparser"
 version = "2.22"
@@ -1373,10 +1803,106 @@ docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"]
 optional = ["python-socks", "wsaccel"]
 test = ["websockets"]
 
+[[package]]
+name = "yarl"
+version = "1.18.3"
+description = "Yet another URL library"
+optional = false
+python-versions = ">=3.9"
+files = [
+    {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"},
+    {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"},
+    {file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"},
+    {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"},
+    {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"},
+    {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"},
+    {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"},
+    {file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"},
+    {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"},
+    {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"},
+    {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"},
+    {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"},
+    {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"},
+    {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"},
+    {file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"},
+    {file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"},
+    {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"},
+    {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"},
+    {file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"},
+    {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"},
+    {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"},
+    {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"},
+    {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"},
+    {file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"},
+    {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"},
+    {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"},
+    {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"},
+    {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"},
+    {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"},
+    {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"},
+    {file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"},
+    {file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"},
+    {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50"},
+    {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576"},
+    {file = "yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640"},
+    {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2"},
+    {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75"},
+    {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512"},
+    {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba"},
+    {file = "yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb"},
+    {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272"},
+    {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6"},
+    {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e"},
+    {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb"},
+    {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393"},
+    {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285"},
+    {file = "yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2"},
+    {file = "yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477"},
+    {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb"},
+    {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa"},
+    {file = "yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782"},
+    {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0"},
+    {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482"},
+    {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186"},
+    {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58"},
+    {file = "yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53"},
+    {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2"},
+    {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8"},
+    {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1"},
+    {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a"},
+    {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10"},
+    {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8"},
+    {file = "yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d"},
+    {file = "yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c"},
+    {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"},
+    {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"},
+    {file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"},
+    {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"},
+    {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"},
+    {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"},
+    {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"},
+    {file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"},
+    {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"},
+    {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"},
+    {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"},
+    {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"},
+    {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"},
+    {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"},
+    {file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"},
+    {file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"},
+    {file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"},
+    {file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"},
+]
+
+[package.dependencies]
+idna = ">=2.0"
+multidict = ">=4.0"
+propcache = ">=0.2.0"
+
 [extras]
 templating = ["jinja2"]
 
 [metadata]
 lock-version = "2.0"
 python-versions = "^3.9"
-content-hash = "bac2cff41c35ede85e4a7dddb0481022bb2fb8ab465f7087cdfc9bbfdfe53038"
+content-hash = "278307adcc86b7b6ecb6384cae64437573f00cd3c22002cf1ebea5e1848ff64f"
diff --git a/pyproject.toml b/pyproject.toml
index 3c502c49a..256a3e820 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -82,6 +82,7 @@ optional = true
 
 [tool.poetry.group.test.dependencies]
 pytest = "7.2.1"
+aiohttp = "3.11.0"
 pytest-asyncio = "0.21.0"
 pytest-cov = "4.0.0"
 pytest-xdist = "3.6.1"

From bd83bf05a00c86c3460eff175ffe8df4a40c02cb Mon Sep 17 00:00:00 2001
From: Sanskar Jethi <sansyrox@gmail.com>
Date: Wed, 18 Dec 2024 02:52:38 +0000
Subject: [PATCH 13/14] remove unused files

---
 src/base_routes.rs | 105 ---------------------------------------------
 1 file changed, 105 deletions(-)
 delete mode 100644 src/base_routes.rs

diff --git a/src/base_routes.rs b/src/base_routes.rs
deleted file mode 100644
index 73a3018c8..000000000
--- a/src/base_routes.rs
+++ /dev/null
@@ -1,105 +0,0 @@
-use actix_web::{web, HttpRequest, HttpResponse};
-use pyo3::prelude::*;
-use pyo3::types::PyDict;
-
-use crate::types::{Headers, Response};
-
-pub async fn handle_request(
-    req: HttpRequest,
-    path: web::Path<String>,
-    query: web::Query<std::collections::HashMap<String, String>>,
-    payload: web::Payload,
-    app_state: web::Data<PyObject>,
-) -> HttpResponse {
-    let path = path.into_inner();
-    let query = query.into_inner();
-
-    Python::with_gil(|py| {
-        let app = app_state.as_ref();
-        let args = PyDict::new(py);
-
-        // Convert query params to Python dict
-        let query_dict = PyDict::new(py);
-        for (key, value) in query {
-            query_dict.set_item(key, value).unwrap();
-        }
-
-        // Create headers dict
-        let headers = Headers::new(None);
-
-        // Call the route handler
-        let result = app.call_method1(
-            py,
-            "handle_request",
-            (path, req.method().as_str(), query_dict, headers),
-        );
-
-        match result {
-            Ok(response) => {
-                match response.extract::<Response>(py) {
-                    Ok(response) => response.respond_to(&req),
-                    Err(e) => {
-                        // If extraction fails, return 500 error
-                        let headers = Headers::new(None);
-                        Response::internal_server_error(Some(&headers)).respond_to(&req)
-                    }
-                }
-            }
-            Err(e) => {
-                // Handle Python error by returning 500
-                let headers = Headers::new(None);
-                Response::internal_server_error(Some(&headers)).respond_to(&req)
-            }
-        }
-    })
-}
-
-pub async fn handle_request_with_body(
-    req: HttpRequest,
-    path: web::Path<String>,
-    query: web::Query<std::collections::HashMap<String, String>>,
-    payload: web::Payload,
-    app_state: web::Data<PyObject>,
-) -> HttpResponse {
-    let path = path.into_inner();
-    let query = query.into_inner();
-
-    Python::with_gil(|py| {
-        let app = app_state.as_ref();
-        let args = PyDict::new(py);
-
-        // Convert query params to Python dict
-        let query_dict = PyDict::new(py);
-        for (key, value) in query {
-            query_dict.set_item(key, value).unwrap();
-        }
-
-        // Create headers dict
-        let headers = Headers::new(None);
-
-        // Call the route handler
-        let result = app.call_method1(
-            py,
-            "handle_request_with_body",
-            (path, req.method().as_str(), query_dict, headers, payload),
-        );
-
-        match result {
-            Ok(response) => {
-                match response.extract::<Response>(py) {
-                    Ok(response) => response.respond_to(&req),
-                    Err(e) => {
-                        // If extraction fails, return 500 error
-                        let headers = Headers::new(None);
-                        Response::internal_server_error(Some(&headers)).respond_to(&req)
-                    }
-                }
-            }
-            Err(e) => {
-                // Handle Python error by return 500
-                let headers = Headers::new(None);
-                Response::internal_server_error(Some(&headers)).respond_to(&req)
-            }
-        }
-    })
-} 
\ No newline at end of file

From 23ded4bb1a5ba1a515f602d157892fa604203720 Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
 <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Wed, 25 Dec 2024 02:35:29 +0000
Subject: [PATCH 14/14] [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci
---
 integration_tests/base_routes.py              |  75 ++++-------
 integration_tests/conftest.py                 |   5 +-
 integration_tests/test_streaming_responses.py |  19 +--
 robyn/__init__.py                             | 119 +++++++-----------
 robyn/responses.py                            |   2 +-
 robyn/robyn.pyi                               |   2 +-
 6 files changed, 88 insertions(+), 134 deletions(-)

diff --git a/integration_tests/base_routes.py b/integration_tests/base_routes.py
index 3000c4b01..e3b901742 100644
--- a/integration_tests/base_routes.py
+++ b/integration_tests/base_routes.py
@@ -1091,30 +1091,25 @@ def create_item(request, body: CreateItemBody, query: CreateItemQueryParamsParam
 
 # --- Streaming responses ---
 
+
 @app.get("/stream/sync", streaming=True)
 async def sync_stream():
     def generator():
         for i in range(5):
             yield f"Chunk {i}\n".encode()
-    
+
     headers = Headers({"Content-Type": "text/plain"})
-    return Response(
-        status_code=200,
-        description=generator(),
-        headers=headers
-    )
+    return Response(status_code=200, description=generator(), headers=headers)
+
 
 @app.get("/stream/async", streaming=True)
 async def async_stream():
     async def generator():
         for i in range(5):
             yield f"Async Chunk {i}\n".encode()
-    
-    return Response(
-        status_code=200,
-        headers={"Content-Type": "text/plain"},
-        description=generator()
-    )
+
+    return Response(status_code=200, headers={"Content-Type": "text/plain"}, description=generator())
+
 
 @app.get("/stream/mixed", streaming=True)
 async def mixed_stream():
@@ -1123,12 +1118,9 @@ async def generator():
         yield "String chunk\n".encode()
         yield str(42).encode() + b"\n"
         yield json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"
-    
-    return Response(
-        status_code=200,
-        headers={"Content-Type": "text/plain"},
-        description=generator()
-    )
+
+    return Response(status_code=200, headers={"Content-Type": "text/plain"}, description=generator())
+
 
 @app.get("/stream/events", streaming=True)
 async def server_sent_events():
@@ -1136,73 +1128,60 @@ async def event_generator():
         import asyncio
         import json
         import time
-        
+
         # Regular event
         yield f"event: message\ndata: {json.dumps({'time': time.time(), 'type': 'start'})}\n\n".encode()
         await asyncio.sleep(1)
-        
+
         # Event with ID
         yield f"id: 1\nevent: update\ndata: {json.dumps({'progress': 50})}\n\n".encode()
         await asyncio.sleep(1)
-        
+
         # Multiple data lines
-        data = json.dumps({'status': 'complete', 'results': [1, 2, 3]}, indent=2)
+        data = json.dumps({"status": "complete", "results": [1, 2, 3]}, indent=2)
         yield f"event: complete\ndata: {data}\n\n".encode()
-    
+
     return Response(
-        status_code=200,
-        headers={
-            "Content-Type": "text/event-stream",
-            "Cache-Control": "no-cache",
-            "Connection": "keep-alive"
-        },
-        description=event_generator()
+        status_code=200, headers={"Content-Type": "text/event-stream", "Cache-Control": "no-cache", "Connection": "keep-alive"}, description=event_generator()
     )
 
+
 @app.get("/stream/large-file", streaming=True)
 async def stream_large_file():
     async def file_generator():
         # Simulate streaming a large file in chunks
         chunk_size = 1024  # 1KB chunks
         total_size = 10 * chunk_size  # 10KB total
-        
+
         for offset in range(0, total_size, chunk_size):
             # Simulate reading file chunk
             chunk = b"X" * min(chunk_size, total_size - offset)
             yield chunk
-    
+
     return Response(
         status_code=200,
-        headers={
-            "Content-Type": "application/octet-stream",
-            "Content-Disposition": "attachment; filename=large-file.bin"
-        },
-        description=file_generator()
+        headers={"Content-Type": "application/octet-stream", "Content-Disposition": "attachment; filename=large-file.bin"},
+        description=file_generator(),
     )
 
+
 @app.get("/stream/csv", streaming=True)
 async def stream_csv():
     async def csv_generator():
         # CSV header
         yield "id,name,value\n".encode()
-        
+
         import asyncio
         import random
-        
+
         # Generate rows
         for i in range(5):
             await asyncio.sleep(0.5)  # Simulate data processing
             row = f"{i},item-{i},{random.randint(1, 100)}\n"
             yield row.encode()
-    
-    return Response(
-        status_code=200,
-        headers={
-            "Content-Type": "text/csv",
-            "Content-Disposition": "attachment; filename=data.csv"
-        },
-        description=csv_generator()
-    )
+
+    return Response(status_code=200, headers={"Content-Type": "text/csv", "Content-Disposition": "attachment; filename=data.csv"}, description=csv_generator())
+
 
 def main():
     app.set_response_header("server", "robyn")
diff --git a/integration_tests/conftest.py b/integration_tests/conftest.py
index c450919df..77c661e0b 100644
--- a/integration_tests/conftest.py
+++ b/integration_tests/conftest.py
@@ -8,12 +8,10 @@
 from typing import List
 
 import pytest
-import pytest_asyncio
-from robyn import Robyn
-from integration_tests.base_routes import app
 
 from integration_tests.helpers.network_helpers import get_network_host
 
+
 def spawn_process(command: List[str]) -> subprocess.Popen:
     if platform.system() == "Windows":
         command[0] = "python"
@@ -129,4 +127,3 @@ def env_file():
     env_path.unlink()
     del os.environ["ROBYN_PORT"]
     del os.environ["ROBYN_HOST"]
-
diff --git a/integration_tests/test_streaming_responses.py b/integration_tests/test_streaming_responses.py
index 4e16f9504..be5d8d0a3 100644
--- a/integration_tests/test_streaming_responses.py
+++ b/integration_tests/test_streaming_responses.py
@@ -19,6 +19,7 @@
 # Mark all tests in this module as async
 pytestmark = pytest.mark.asyncio
 
+
 async def test_sync_stream():
     """Test basic synchronous streaming response."""
     async with aiohttp.ClientSession() as client:
@@ -34,6 +35,7 @@ async def test_sync_stream():
             for i, chunk in enumerate(chunks):
                 assert chunk == f"Chunk {i}\n"
 
+
 async def test_async_stream():
     """Test asynchronous streaming response."""
     async with aiohttp.ClientSession() as client:
@@ -49,6 +51,7 @@ async def test_async_stream():
             for i, chunk in enumerate(chunks):
                 assert chunk == f"Async Chunk {i}\n"
 
+
 async def test_mixed_stream():
     """Test streaming of mixed content types."""
     async with aiohttp.ClientSession() as client:
@@ -56,12 +59,7 @@ async def test_mixed_stream():
             assert response.status == 200
             assert response.headers["Content-Type"] == "text/plain"
 
-            expected = [
-                b"Binary chunk\n",
-                b"String chunk\n",
-                b"42\n",
-                json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"
-            ]
+            expected = [b"Binary chunk\n", b"String chunk\n", b"42\n", json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"]
 
             chunks = []
             async for chunk in response.content:
@@ -71,6 +69,7 @@ async def test_mixed_stream():
             for chunk, expected_chunk in zip(chunks, expected):
                 assert chunk == expected_chunk
 
+
 async def test_server_sent_events():
     """Test Server-Sent Events (SSE) streaming."""
     async with aiohttp.ClientSession() as client:
@@ -103,6 +102,7 @@ async def test_server_sent_events():
             assert event_data["status"] == "complete"
             assert event_data["results"] == [1, 2, 3]
 
+
 async def test_large_file_stream():
     """Test streaming of large files in chunks."""
     async with aiohttp.ClientSession() as client:
@@ -118,6 +118,7 @@ async def test_large_file_stream():
 
             assert total_size == 10 * 1024  # 10KB total
 
+
 async def test_csv_stream():
     """Test streaming of CSV data."""
     async with aiohttp.ClientSession() as client:
@@ -132,11 +133,11 @@ async def test_csv_stream():
 
             # Verify header
             assert lines[0] == "id,name,value"
-            
+
             # Verify data rows
             assert len(lines) == 6  # Header + 5 data rows
             for i, line in enumerate(lines[1:], 0):
-                id_, name, value = line.split(',')
+                id_, name, value = line.split(",")
                 assert int(id_) == i
                 assert name == f"item-{i}"
-                assert 1 <= int(value) <= 100
\ No newline at end of file
+                assert 1 <= int(value) <= 100
diff --git a/robyn/__init__.py b/robyn/__init__.py
index f2c5369b2..b694a44f7 100644
--- a/robyn/__init__.py
+++ b/robyn/__init__.py
@@ -338,14 +338,7 @@ def get_functions(view) -> List[Tuple[HttpMethod, Callable]]:
 
         handlers = get_functions(view)
         for route_type, handler in handlers:
-            self.add_route(
-                route_type=route_type,
-                endpoint=endpoint,
-                handler=handler,
-                is_const=const,
-                streaming=False,
-                auth_required=False
-            )
+            self.add_route(route_type=route_type, endpoint=endpoint, handler=handler, is_const=const, streaming=False, auth_required=False)
 
     def view(self, endpoint: str, const: bool = False):
         """
@@ -383,12 +376,7 @@ def inner(handler):
             self.openapi.add_openapi_path_obj("get", endpoint, openapi_name, openapi_tags, handler)
 
             return self.add_route(
-                route_type=HttpMethod.GET,
-                endpoint=endpoint,
-                handler=handler,
-                is_const=const,
-                streaming=streaming,
-                auth_required=auth_required
+                route_type=HttpMethod.GET, endpoint=endpoint, handler=handler, is_const=const, streaming=streaming, auth_required=auth_required
             )
 
         return inner
@@ -415,12 +403,7 @@ def inner(handler):
             self.openapi.add_openapi_path_obj("post", endpoint, openapi_name, openapi_tags, handler)
 
             return self.add_route(
-                route_type=HttpMethod.POST,
-                endpoint=endpoint,
-                handler=handler,
-                is_const=False,
-                streaming=streaming,
-                auth_required=auth_required
+                route_type=HttpMethod.POST, endpoint=endpoint, handler=handler, is_const=False, streaming=streaming, auth_required=auth_required
             )
 
         return inner
@@ -447,12 +430,7 @@ def inner(handler):
             self.openapi.add_openapi_path_obj("put", endpoint, openapi_name, openapi_tags, handler)
 
             return self.add_route(
-                route_type=HttpMethod.PUT,
-                endpoint=endpoint,
-                handler=handler,
-                is_const=False,
-                streaming=streaming,
-                auth_required=auth_required
+                route_type=HttpMethod.PUT, endpoint=endpoint, handler=handler, is_const=False, streaming=streaming, auth_required=auth_required
             )
 
         return inner
@@ -479,12 +457,7 @@ def inner(handler):
             self.openapi.add_openapi_path_obj("delete", endpoint, openapi_name, openapi_tags, handler)
 
             return self.add_route(
-                route_type=HttpMethod.DELETE,
-                endpoint=endpoint,
-                handler=handler,
-                is_const=False,
-                streaming=streaming,
-                auth_required=auth_required
+                route_type=HttpMethod.DELETE, endpoint=endpoint, handler=handler, is_const=False, streaming=streaming, auth_required=auth_required
             )
 
         return inner
@@ -511,12 +484,7 @@ def inner(handler):
             self.openapi.add_openapi_path_obj("patch", endpoint, openapi_name, openapi_tags, handler)
 
             return self.add_route(
-                route_type=HttpMethod.PATCH,
-                endpoint=endpoint,
-                handler=handler,
-                is_const=False,
-                streaming=streaming,
-                auth_required=auth_required
+                route_type=HttpMethod.PATCH, endpoint=endpoint, handler=handler, is_const=False, streaming=streaming, auth_required=auth_required
             )
 
         return inner
@@ -543,12 +511,7 @@ def inner(handler):
             self.openapi.add_openapi_path_obj("head", endpoint, openapi_name, openapi_tags, handler)
 
             return self.add_route(
-                route_type=HttpMethod.HEAD,
-                endpoint=endpoint,
-                handler=handler,
-                is_const=False,
-                streaming=streaming,
-                auth_required=auth_required
+                route_type=HttpMethod.HEAD, endpoint=endpoint, handler=handler, is_const=False, streaming=streaming, auth_required=auth_required
             )
 
         return inner
@@ -575,12 +538,7 @@ def inner(handler):
             self.openapi.add_openapi_path_obj("options", endpoint, openapi_name, openapi_tags, handler)
 
             return self.add_route(
-                route_type=HttpMethod.OPTIONS,
-                endpoint=endpoint,
-                handler=handler,
-                is_const=False,
-                streaming=streaming,
-                auth_required=auth_required
+                route_type=HttpMethod.OPTIONS, endpoint=endpoint, handler=handler, is_const=False, streaming=streaming, auth_required=auth_required
             )
 
         return inner
@@ -606,12 +564,7 @@ def connect(
         def inner(handler):
             self.openapi.add_openapi_path_obj("connect", endpoint, openapi_name, openapi_tags, handler)
             return self.add_route(
-                route_type=HttpMethod.CONNECT,
-                endpoint=endpoint,
-                handler=handler,
-                is_const=False,
-                streaming=streaming,
-                auth_required=auth_required
+                route_type=HttpMethod.CONNECT, endpoint=endpoint, handler=handler, is_const=False, streaming=streaming, auth_required=auth_required
             )
 
         return inner
@@ -638,12 +591,7 @@ def inner(handler):
             self.openapi.add_openapi_path_obj("trace", endpoint, openapi_name, openapi_tags, handler)
 
             return self.add_route(
-                route_type=HttpMethod.TRACE,
-                endpoint=endpoint,
-                handler=handler,
-                is_const=False,
-                streaming=streaming,
-                auth_required=auth_required
+                route_type=HttpMethod.TRACE, endpoint=endpoint, handler=handler, is_const=False, streaming=streaming, auth_required=auth_required
             )
 
         return inner
@@ -686,29 +634,58 @@ def __init__(self, file_object: str, prefix: str = "", config: Config = Config()
     def __add_prefix(self, endpoint: str):
         return f"{self.prefix}{endpoint}"
 
-    def get(self, endpoint: str, const: bool = False, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["get"]):
-        return super().get(endpoint=self.__add_prefix(endpoint), const=const, streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+    def get(
+        self,
+        endpoint: str,
+        const: bool = False,
+        streaming: bool = False,
+        auth_required: bool = False,
+        openapi_name: str = "",
+        openapi_tags: List[str] = ["get"],
+    ):
+        return super().get(
+            endpoint=self.__add_prefix(endpoint),
+            const=const,
+            streaming=streaming,
+            auth_required=auth_required,
+            openapi_name=openapi_name,
+            openapi_tags=openapi_tags,
+        )
 
     def post(self, endpoint: str, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["post"]):
-        return super().post(endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+        return super().post(
+            endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags
+        )
 
     def put(self, endpoint: str, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["put"]):
-        return super().put(endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+        return super().put(
+            endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags
+        )
 
     def delete(self, endpoint: str, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["delete"]):
-        return super().delete(endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+        return super().delete(
+            endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags
+        )
 
     def patch(self, endpoint: str, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["patch"]):
-        return super().patch(endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+        return super().patch(
+            endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags
+        )
 
     def head(self, endpoint: str, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["head"]):
-        return super().head(endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+        return super().head(
+            endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags
+        )
 
     def trace(self, endpoint: str, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["trace"]):
-        return super().trace(endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+        return super().trace(
+            endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags
+        )
 
     def options(self, endpoint: str, streaming: bool = False, auth_required: bool = False, openapi_name: str = "", openapi_tags: List[str] = ["options"]):
-        return super().options(endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags)
+        return super().options(
+            endpoint=self.__add_prefix(endpoint), streaming=streaming, auth_required=auth_required, openapi_name=openapi_name, openapi_tags=openapi_tags
+        )
 
 
 def ALLOW_CORS(app: Robyn, origins: Union[List[str], str]):
diff --git a/robyn/responses.py b/robyn/responses.py
index dd081d0d3..2958c1b71 100644
--- a/robyn/responses.py
+++ b/robyn/responses.py
@@ -1,6 +1,6 @@
 import mimetypes
 import os
-from typing import Optional, Any, Union, Callable, Iterator, AsyncIterator
+from typing import Optional
 
 from robyn.robyn import Headers, Response
 
diff --git a/robyn/robyn.pyi b/robyn/robyn.pyi
index fa8da141d..dcb035fc9 100644
--- a/robyn/robyn.pyi
+++ b/robyn/robyn.pyi
@@ -287,7 +287,7 @@ class Response:
         status_code (int): The status code of the response. e.g. 200, 404, 500 etc.
         response_type (Optional[str]): The response type of the response. e.g. text, json, html, file etc.
         headers (Union[Headers, dict]): The headers of the response or Headers directly. e.g. {"Content-Type": "application/json"}
-        description (Union[str, bytes, Iterator[bytes], AsyncIterator[bytes], Generator[bytes, None, None], AsyncGenerator[bytes, None]]): 
+        description (Union[str, bytes, Iterator[bytes], AsyncIterator[bytes], Generator[bytes, None, None], AsyncGenerator[bytes, None]]):
             The body of the response. Can be:
             - str: Plain text response
             - bytes: Binary response