From 47c934c74b083fa650a0b5246b601f2beccc7ebf Mon Sep 17 00:00:00 2001 From: Michael Yang Date: Thu, 21 Dec 2023 14:21:02 -0800 Subject: [PATCH] format --- examples/simple-fill-in-middle/main.py | 4 +- ollama/_client.py | 327 +++++++------ ollama/_types.py | 1 + pyproject.toml | 1 + tests/test_client.py | 635 +++++++++++++++---------- 5 files changed, 576 insertions(+), 392 deletions(-) diff --git a/examples/simple-fill-in-middle/main.py b/examples/simple-fill-in-middle/main.py index ca14d05..67d7a74 100644 --- a/examples/simple-fill-in-middle/main.py +++ b/examples/simple-fill-in-middle/main.py @@ -3,9 +3,9 @@ prefix = '''def remove_non_ascii(s: str) -> str: """ ''' -suffix = ''' +suffix = """ return result -''' +""" response = generate( diff --git a/ollama/_client.py b/ollama/_client.py index 9e5ee32..d0fa30f 100644 --- a/ollama/_client.py +++ b/ollama/_client.py @@ -9,6 +9,7 @@ from typing import Any, AnyStr, Union, Optional, List, Mapping import sys + if sys.version_info < (3, 9): from typing import Iterator, AsyncIterator else: @@ -18,13 +19,11 @@ class BaseClient: - def __init__(self, client, base_url='http://127.0.0.1:11434') -> None: self._client = client(base_url=base_url, follow_redirects=True, timeout=None) class Client(BaseClient): - def __init__(self, base='http://localhost:11434') -> None: super().__init__(httpx.Client, base) @@ -45,43 +44,47 @@ def _stream(self, method: str, url: str, **kwargs) -> Iterator[Mapping[str, Any] yield part def generate( - self, - model: str = '', - prompt: str = '', - system: str = '', - template: str = '', - context: Optional[List[int]] = None, - stream: bool = False, - raw: bool = False, - format: str = '', - images: Optional[List[AnyStr]] = None, - options: Optional[Options] = None, - ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: + self, + model: str = '', + prompt: str = '', + system: str = '', + template: str = '', + context: Optional[List[int]] = None, + stream: bool = False, + raw: bool = False, + format: str = '', + images: Optional[List[AnyStr]] = None, + options: Optional[Options] = None, + ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: if not model: raise Exception('must provide a model') fn = self._stream if stream else self._request_json - return fn('POST', '/api/generate', json={ - 'model': model, - 'prompt': prompt, - 'system': system, - 'template': template, - 'context': context or [], - 'stream': stream, - 'raw': raw, - 'images': [_encode_image(image) for image in images or []], - 'format': format, - 'options': options or {}, - }) + return fn( + 'POST', + '/api/generate', + json={ + 'model': model, + 'prompt': prompt, + 'system': system, + 'template': template, + 'context': context or [], + 'stream': stream, + 'raw': raw, + 'images': [_encode_image(image) for image in images or []], + 'format': format, + 'options': options or {}, + }, + ) def chat( - self, - model: str = '', - messages: Optional[List[Message]] = None, - stream: bool = False, - format: str = '', - options: Optional[Options] = None, - ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: + self, + model: str = '', + messages: Optional[List[Message]] = None, + stream: bool = False, + format: str = '', + options: Optional[Options] = None, + ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: if not model: raise Exception('must provide a model') @@ -96,47 +99,59 @@ def chat( message['images'] = [_encode_image(image) for image in images] fn = self._stream if stream else self._request_json - return fn('POST', '/api/chat', json={ - 'model': model, - 'messages': messages, - 'stream': stream, - 'format': format, - 'options': options or {}, - }) + return fn( + 'POST', + '/api/chat', + json={ + 'model': model, + 'messages': messages, + 'stream': stream, + 'format': format, + 'options': options or {}, + }, + ) def pull( - self, - model: str, - insecure: bool = False, - stream: bool = False, - ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: + self, + model: str, + insecure: bool = False, + stream: bool = False, + ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: fn = self._stream if stream else self._request_json - return fn('POST', '/api/pull', json={ - 'model': model, - 'insecure': insecure, - 'stream': stream, - }) + return fn( + 'POST', + '/api/pull', + json={ + 'model': model, + 'insecure': insecure, + 'stream': stream, + }, + ) def push( - self, - model: str, - insecure: bool = False, - stream: bool = False, - ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: + self, + model: str, + insecure: bool = False, + stream: bool = False, + ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: fn = self._stream if stream else self._request_json - return fn('POST', '/api/push', json={ - 'model': model, - 'insecure': insecure, - 'stream': stream, - }) + return fn( + 'POST', + '/api/push', + json={ + 'model': model, + 'insecure': insecure, + 'stream': stream, + }, + ) def create( - self, - model: str, - path: Optional[Union[str, PathLike]] = None, - modelfile: Optional[str] = None, - stream: bool = False, - ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: + self, + model: str, + path: Optional[Union[str, PathLike]] = None, + modelfile: Optional[str] = None, + stream: bool = False, + ) -> Union[Mapping[str, Any], Iterator[Mapping[str, Any]]]: if (realpath := _as_path(path)) and realpath.exists(): modelfile = self._parse_modelfile(realpath.read_text(), base=realpath.parent) elif modelfile: @@ -145,11 +160,15 @@ def create( raise Exception('must provide either path or modelfile') fn = self._stream if stream else self._request_json - return fn('POST', '/api/create', json={ - 'model': model, - 'modelfile': modelfile, - 'stream': stream, - }) + return fn( + 'POST', + '/api/create', + json={ + 'model': model, + 'modelfile': modelfile, + 'stream': stream, + }, + ) def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str: base = Path.cwd() if base is None else base @@ -170,7 +189,7 @@ def _create_blob(self, path: Union[str, Path]) -> str: sha256sum = sha256() with open(path, 'rb') as r: while True: - chunk = r.read(32*1024) + chunk = r.read(32 * 1024) if not chunk: break sha256sum.update(chunk) @@ -204,7 +223,6 @@ def show(self, model: str) -> Mapping[str, Any]: class AsyncClient(BaseClient): - def __init__(self, base='http://localhost:11434') -> None: super().__init__(httpx.AsyncClient, base) @@ -225,46 +243,51 @@ async def inner(): if e := part.get('error'): raise Exception(e) yield part + return inner() async def generate( - self, - model: str = '', - prompt: str = '', - system: str = '', - template: str = '', - context: Optional[List[int]] = None, - stream: bool = False, - raw: bool = False, - format: str = '', - images: Optional[List[AnyStr]] = None, - options: Optional[Options] = None, - ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: + self, + model: str = '', + prompt: str = '', + system: str = '', + template: str = '', + context: Optional[List[int]] = None, + stream: bool = False, + raw: bool = False, + format: str = '', + images: Optional[List[AnyStr]] = None, + options: Optional[Options] = None, + ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: if not model: raise Exception('must provide a model') fn = self._stream if stream else self._request_json - return await fn('POST', '/api/generate', json={ - 'model': model, - 'prompt': prompt, - 'system': system, - 'template': template, - 'context': context or [], - 'stream': stream, - 'raw': raw, - 'images': [_encode_image(image) for image in images or []], - 'format': format, - 'options': options or {}, - }) + return await fn( + 'POST', + '/api/generate', + json={ + 'model': model, + 'prompt': prompt, + 'system': system, + 'template': template, + 'context': context or [], + 'stream': stream, + 'raw': raw, + 'images': [_encode_image(image) for image in images or []], + 'format': format, + 'options': options or {}, + }, + ) async def chat( - self, - model: str = '', - messages: Optional[List[Message]] = None, - stream: bool = False, - format: str = '', - options: Optional[Options] = None, - ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: + self, + model: str = '', + messages: Optional[List[Message]] = None, + stream: bool = False, + format: str = '', + options: Optional[Options] = None, + ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: if not model: raise Exception('must provide a model') @@ -279,47 +302,59 @@ async def chat( message['images'] = [_encode_image(image) for image in images] fn = self._stream if stream else self._request_json - return await fn('POST', '/api/chat', json={ - 'model': model, - 'messages': messages, - 'stream': stream, - 'format': format, - 'options': options or {}, - }) + return await fn( + 'POST', + '/api/chat', + json={ + 'model': model, + 'messages': messages, + 'stream': stream, + 'format': format, + 'options': options or {}, + }, + ) async def pull( - self, - model: str, - insecure: bool = False, - stream: bool = False, - ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: + self, + model: str, + insecure: bool = False, + stream: bool = False, + ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: fn = self._stream if stream else self._request_json - return await fn('POST', '/api/pull', json={ - 'model': model, - 'insecure': insecure, - 'stream': stream, - }) + return await fn( + 'POST', + '/api/pull', + json={ + 'model': model, + 'insecure': insecure, + 'stream': stream, + }, + ) async def push( - self, - model: str, - insecure: bool = False, - stream: bool = False, - ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: + self, + model: str, + insecure: bool = False, + stream: bool = False, + ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: fn = self._stream if stream else self._request_json - return await fn('POST', '/api/push', json={ - 'model': model, - 'insecure': insecure, - 'stream': stream, - }) + return await fn( + 'POST', + '/api/push', + json={ + 'model': model, + 'insecure': insecure, + 'stream': stream, + }, + ) async def create( - self, - model: str, - path: Optional[Union[str, PathLike]] = None, - modelfile: Optional[str] = None, - stream: bool = False, - ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: + self, + model: str, + path: Optional[Union[str, PathLike]] = None, + modelfile: Optional[str] = None, + stream: bool = False, + ) -> Union[Mapping[str, Any], AsyncIterator[Mapping[str, Any]]]: if (realpath := _as_path(path)) and realpath.exists(): modelfile = await self._parse_modelfile(realpath.read_text(), base=realpath.parent) elif modelfile: @@ -328,11 +363,15 @@ async def create( raise Exception('must provide either path or modelfile') fn = self._stream if stream else self._request_json - return await fn('POST', '/api/create', json={ - 'model': model, - 'modelfile': modelfile, - 'stream': stream, - }) + return await fn( + 'POST', + '/api/create', + json={ + 'model': model, + 'modelfile': modelfile, + 'stream': stream, + }, + ) async def _parse_modelfile(self, modelfile: str, base: Optional[Path] = None) -> str: base = Path.cwd() if base is None else base @@ -353,7 +392,7 @@ async def _create_blob(self, path: Union[str, Path]) -> str: sha256sum = sha256() with open(path, 'rb') as r: while True: - chunk = r.read(32*1024) + chunk = r.read(32 * 1024) if not chunk: break sha256sum.update(chunk) @@ -369,7 +408,7 @@ async def _create_blob(self, path: Union[str, Path]) -> str: async def upload_bytes(): with open(path, 'rb') as r: while True: - chunk = r.read(32*1024) + chunk = r.read(32 * 1024) if not chunk: break yield chunk diff --git a/ollama/_types.py b/ollama/_types.py index 7fe3bf0..d263269 100644 --- a/ollama/_types.py +++ b/ollama/_types.py @@ -1,6 +1,7 @@ from typing import Any, TypedDict, List import sys + if sys.version_info < (3, 11): from typing_extensions import NotRequired else: diff --git a/pyproject.toml b/pyproject.toml index be5ddac..76b5e15 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" [tool.ruff] +line-length = 999 indent-width = 2 [tool.ruff.format] diff --git a/tests/test_client.py b/tests/test_client.py index 31b3160..fe151dc 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -21,19 +21,25 @@ def match(self, uri): def test_client_chat(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/chat', method='POST', json={ - 'model': 'dummy', - 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], - 'stream': False, - 'format': '', - 'options': {}, - }).respond_with_json({ - 'model': 'dummy', - 'message': { - 'role': 'assistant', - 'content': "I don't know.", - }, - }) + httpserver.expect_ordered_request( + '/api/chat', + method='POST', + json={ + 'model': 'dummy', + 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], + 'stream': False, + 'format': '', + 'options': {}, + }, + ).respond_with_json( + { + 'model': 'dummy', + 'message': { + 'role': 'assistant', + 'content': "I don't know.", + }, + } + ) client = Client(httpserver.url_for('/')) response = client.chat('dummy', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}]) @@ -46,22 +52,32 @@ def test_client_chat_stream(httpserver: HTTPServer): def stream_handler(_: Request): def generate(): for message in ['I ', "don't ", 'know.']: - yield json.dumps({ - 'model': 'dummy', - 'message': { - 'role': 'assistant', - 'content': message, - }, - }) + '\n' + yield ( + json.dumps( + { + 'model': 'dummy', + 'message': { + 'role': 'assistant', + 'content': message, + }, + } + ) + + '\n' + ) + return Response(generate()) - httpserver.expect_ordered_request('/api/chat', method='POST', json={ - 'model': 'dummy', - 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], - 'stream': True, - 'format': '', - 'options': {}, - }).respond_with_handler(stream_handler) + httpserver.expect_ordered_request( + '/api/chat', + method='POST', + json={ + 'model': 'dummy', + 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], + 'stream': True, + 'format': '', + 'options': {}, + }, + ).respond_with_handler(stream_handler) client = Client(httpserver.url_for('/')) response = client.chat('dummy', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}], stream=True) @@ -71,25 +87,31 @@ def generate(): def test_client_chat_images(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/chat', method='POST', json={ - 'model': 'dummy', - 'messages': [ - { - 'role': 'user', - 'content': 'Why is the sky blue?', - 'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'], + httpserver.expect_ordered_request( + '/api/chat', + method='POST', + json={ + 'model': 'dummy', + 'messages': [ + { + 'role': 'user', + 'content': 'Why is the sky blue?', + 'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'], + }, + ], + 'stream': False, + 'format': '', + 'options': {}, + }, + ).respond_with_json( + { + 'model': 'dummy', + 'message': { + 'role': 'assistant', + 'content': "I don't know.", }, - ], - 'stream': False, - 'format': '', - 'options': {}, - }).respond_with_json({ - 'model': 'dummy', - 'message': { - 'role': 'assistant', - 'content': "I don't know.", - }, - }) + } + ) client = Client(httpserver.url_for('/')) @@ -102,21 +124,27 @@ def test_client_chat_images(httpserver: HTTPServer): def test_client_generate(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/generate', method='POST', json={ - 'model': 'dummy', - 'prompt': 'Why is the sky blue?', - 'system': '', - 'template': '', - 'context': [], - 'stream': False, - 'raw': False, - 'images': [], - 'format': '', - 'options': {}, - }).respond_with_json({ - 'model': 'dummy', - 'response': 'Because it is.', - }) + httpserver.expect_ordered_request( + '/api/generate', + method='POST', + json={ + 'model': 'dummy', + 'prompt': 'Why is the sky blue?', + 'system': '', + 'template': '', + 'context': [], + 'stream': False, + 'raw': False, + 'images': [], + 'format': '', + 'options': {}, + }, + ).respond_with_json( + { + 'model': 'dummy', + 'response': 'Because it is.', + } + ) client = Client(httpserver.url_for('/')) response = client.generate('dummy', 'Why is the sky blue?') @@ -128,24 +156,34 @@ def test_client_generate_stream(httpserver: HTTPServer): def stream_handler(_: Request): def generate(): for message in ['Because ', 'it ', 'is.']: - yield json.dumps({ - 'model': 'dummy', - 'response': message, - }) + '\n' + yield ( + json.dumps( + { + 'model': 'dummy', + 'response': message, + } + ) + + '\n' + ) + return Response(generate()) - httpserver.expect_ordered_request('/api/generate', method='POST', json={ - 'model': 'dummy', - 'prompt': 'Why is the sky blue?', - 'system': '', - 'template': '', - 'context': [], - 'stream': True, - 'raw': False, - 'images': [], - 'format': '', - 'options': {}, - }).respond_with_handler(stream_handler) + httpserver.expect_ordered_request( + '/api/generate', + method='POST', + json={ + 'model': 'dummy', + 'prompt': 'Why is the sky blue?', + 'system': '', + 'template': '', + 'context': [], + 'stream': True, + 'raw': False, + 'images': [], + 'format': '', + 'options': {}, + }, + ).respond_with_handler(stream_handler) client = Client(httpserver.url_for('/')) response = client.generate('dummy', 'Why is the sky blue?', stream=True) @@ -155,21 +193,27 @@ def generate(): def test_client_generate_images(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/generate', method='POST', json={ - 'model': 'dummy', - 'prompt': 'Why is the sky blue?', - 'system': '', - 'template': '', - 'context': [], - 'stream': False, - 'raw': False, - 'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'], - 'format': '', - 'options': {}, - }).respond_with_json({ - 'model': 'dummy', - 'response': 'Because it is.', - }) + httpserver.expect_ordered_request( + '/api/generate', + method='POST', + json={ + 'model': 'dummy', + 'prompt': 'Why is the sky blue?', + 'system': '', + 'template': '', + 'context': [], + 'stream': False, + 'raw': False, + 'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'], + 'format': '', + 'options': {}, + }, + ).respond_with_json( + { + 'model': 'dummy', + 'response': 'Because it is.', + } + ) client = Client(httpserver.url_for('/')) @@ -181,13 +225,19 @@ def test_client_generate_images(httpserver: HTTPServer): def test_client_pull(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/pull', method='POST', json={ - 'model': 'dummy', - 'insecure': False, - 'stream': False, - }).respond_with_json({ - 'status': 'success', - }) + httpserver.expect_ordered_request( + '/api/pull', + method='POST', + json={ + 'model': 'dummy', + 'insecure': False, + 'stream': False, + }, + ).respond_with_json( + { + 'status': 'success', + } + ) client = Client(httpserver.url_for('/')) response = client.pull('dummy') @@ -202,13 +252,18 @@ def generate(): yield json.dumps({'status': 'writing manifest'}) + '\n' yield json.dumps({'status': 'removing any unused layers'}) + '\n' yield json.dumps({'status': 'success'}) + '\n' + return Response(generate()) - httpserver.expect_ordered_request('/api/pull', method='POST', json={ - 'model': 'dummy', - 'insecure': False, - 'stream': True, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/pull', + method='POST', + json={ + 'model': 'dummy', + 'insecure': False, + 'stream': True, + }, + ).respond_with_json({}) client = Client(httpserver.url_for('/')) response = client.pull('dummy', stream=True) @@ -216,11 +271,15 @@ def generate(): def test_client_push(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/push', method='POST', json={ - 'model': 'dummy', - 'insecure': False, - 'stream': False, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/push', + method='POST', + json={ + 'model': 'dummy', + 'insecure': False, + 'stream': False, + }, + ).respond_with_json({}) client = Client(httpserver.url_for('/')) response = client.push('dummy') @@ -228,11 +287,15 @@ def test_client_push(httpserver: HTTPServer): def test_client_push_stream(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/push', method='POST', json={ - 'model': 'dummy', - 'insecure': False, - 'stream': True, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/push', + method='POST', + json={ + 'model': 'dummy', + 'insecure': False, + 'stream': True, + }, + ).respond_with_json({}) client = Client(httpserver.url_for('/')) response = client.push('dummy', stream=True) @@ -241,11 +304,15 @@ def test_client_push_stream(httpserver: HTTPServer): def test_client_create_path(httpserver: HTTPServer): httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) - httpserver.expect_ordered_request('/api/create', method='POST', json={ - 'model': 'dummy', - 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', - 'stream': False, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/create', + method='POST', + json={ + 'model': 'dummy', + 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', + 'stream': False, + }, + ).respond_with_json({}) client = Client(httpserver.url_for('/')) @@ -260,11 +327,15 @@ def test_client_create_path(httpserver: HTTPServer): def test_client_create_path_relative(httpserver: HTTPServer): httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) - httpserver.expect_ordered_request('/api/create', method='POST', json={ - 'model': 'dummy', - 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', - 'stream': False, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/create', + method='POST', + json={ + 'model': 'dummy', + 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', + 'stream': False, + }, + ).respond_with_json({}) client = Client(httpserver.url_for('/')) @@ -288,11 +359,15 @@ def userhomedir(): def test_client_create_path_user_home(httpserver: HTTPServer, userhomedir): httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) - httpserver.expect_ordered_request('/api/create', method='POST', json={ - 'model': 'dummy', - 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', - 'stream': False, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/create', + method='POST', + json={ + 'model': 'dummy', + 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', + 'stream': False, + }, + ).respond_with_json({}) client = Client(httpserver.url_for('/')) @@ -307,11 +382,15 @@ def test_client_create_path_user_home(httpserver: HTTPServer, userhomedir): def test_client_create_modelfile(httpserver: HTTPServer): httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) - httpserver.expect_ordered_request('/api/create', method='POST', json={ - 'model': 'dummy', - 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', - 'stream': False, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/create', + method='POST', + json={ + 'model': 'dummy', + 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', + 'stream': False, + }, + ).respond_with_json({}) client = Client(httpserver.url_for('/')) @@ -321,11 +400,15 @@ def test_client_create_modelfile(httpserver: HTTPServer): def test_client_create_from_library(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/create', method='POST', json={ - 'model': 'dummy', - 'modelfile': 'FROM llama2\n', - 'stream': False, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/create', + method='POST', + json={ + 'model': 'dummy', + 'modelfile': 'FROM llama2\n', + 'stream': False, + }, + ).respond_with_json({}) client = Client(httpserver.url_for('/')) @@ -356,13 +439,17 @@ def test_client_create_blob_exists(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_chat(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/chat', method='POST', json={ - 'model': 'dummy', - 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], - 'stream': False, - 'format': '', - 'options': {}, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/chat', + method='POST', + json={ + 'model': 'dummy', + 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], + 'stream': False, + 'format': '', + 'options': {}, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/')) response = await client.chat('dummy', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}]) @@ -371,13 +458,17 @@ async def test_async_client_chat(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_chat_stream(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/chat', method='POST', json={ - 'model': 'dummy', - 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], - 'stream': True, - 'format': '', - 'options': {}, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/chat', + method='POST', + json={ + 'model': 'dummy', + 'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}], + 'stream': True, + 'format': '', + 'options': {}, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/')) response = await client.chat('dummy', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}], stream=True) @@ -386,19 +477,23 @@ async def test_async_client_chat_stream(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_chat_images(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/chat', method='POST', json={ - 'model': 'dummy', - 'messages': [ - { - 'role': 'user', - 'content': 'Why is the sky blue?', - 'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'], - }, - ], - 'stream': False, - 'format': '', - 'options': {}, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/chat', + method='POST', + json={ + 'model': 'dummy', + 'messages': [ + { + 'role': 'user', + 'content': 'Why is the sky blue?', + 'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'], + }, + ], + 'stream': False, + 'format': '', + 'options': {}, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/')) @@ -410,18 +505,22 @@ async def test_async_client_chat_images(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_generate(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/generate', method='POST', json={ - 'model': 'dummy', - 'prompt': 'Why is the sky blue?', - 'system': '', - 'template': '', - 'context': [], - 'stream': False, - 'raw': False, - 'images': [], - 'format': '', - 'options': {}, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/generate', + method='POST', + json={ + 'model': 'dummy', + 'prompt': 'Why is the sky blue?', + 'system': '', + 'template': '', + 'context': [], + 'stream': False, + 'raw': False, + 'images': [], + 'format': '', + 'options': {}, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/')) response = await client.generate('dummy', 'Why is the sky blue?') @@ -430,18 +529,22 @@ async def test_async_client_generate(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_generate_stream(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/generate', method='POST', json={ - 'model': 'dummy', - 'prompt': 'Why is the sky blue?', - 'system': '', - 'template': '', - 'context': [], - 'stream': True, - 'raw': False, - 'images': [], - 'format': '', - 'options': {}, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/generate', + method='POST', + json={ + 'model': 'dummy', + 'prompt': 'Why is the sky blue?', + 'system': '', + 'template': '', + 'context': [], + 'stream': True, + 'raw': False, + 'images': [], + 'format': '', + 'options': {}, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/')) response = await client.generate('dummy', 'Why is the sky blue?', stream=True) @@ -450,18 +553,22 @@ async def test_async_client_generate_stream(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_generate_images(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/generate', method='POST', json={ - 'model': 'dummy', - 'prompt': 'Why is the sky blue?', - 'system': '', - 'template': '', - 'context': [], - 'stream': False, - 'raw': False, - 'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'], - 'format': '', - 'options': {}, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/generate', + method='POST', + json={ + 'model': 'dummy', + 'prompt': 'Why is the sky blue?', + 'system': '', + 'template': '', + 'context': [], + 'stream': False, + 'raw': False, + 'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'], + 'format': '', + 'options': {}, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/')) @@ -473,11 +580,15 @@ async def test_async_client_generate_images(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_pull(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/pull', method='POST', json={ - 'model': 'dummy', - 'insecure': False, - 'stream': False, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/pull', + method='POST', + json={ + 'model': 'dummy', + 'insecure': False, + 'stream': False, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/')) response = await client.pull('dummy') @@ -486,11 +597,15 @@ async def test_async_client_pull(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_pull_stream(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/pull', method='POST', json={ - 'model': 'dummy', - 'insecure': False, - 'stream': True, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/pull', + method='POST', + json={ + 'model': 'dummy', + 'insecure': False, + 'stream': True, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/')) response = await client.pull('dummy', stream=True) @@ -499,11 +614,15 @@ async def test_async_client_pull_stream(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_push(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/push', method='POST', json={ - 'model': 'dummy', - 'insecure': False, - 'stream': False, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/push', + method='POST', + json={ + 'model': 'dummy', + 'insecure': False, + 'stream': False, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/')) response = await client.push('dummy') @@ -512,11 +631,15 @@ async def test_async_client_push(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_push_stream(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/push', method='POST', json={ - 'model': 'dummy', - 'insecure': False, - 'stream': True, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/push', + method='POST', + json={ + 'model': 'dummy', + 'insecure': False, + 'stream': True, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/')) response = await client.push('dummy', stream=True) @@ -526,11 +649,15 @@ async def test_async_client_push_stream(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_create_path(httpserver: HTTPServer): httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) - httpserver.expect_ordered_request('/api/create', method='POST', json={ - 'model': 'dummy', - 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', - 'stream': False, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/create', + method='POST', + json={ + 'model': 'dummy', + 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', + 'stream': False, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/')) @@ -546,11 +673,15 @@ async def test_async_client_create_path(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_create_path_relative(httpserver: HTTPServer): httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) - httpserver.expect_ordered_request('/api/create', method='POST', json={ - 'model': 'dummy', - 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', - 'stream': False, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/create', + method='POST', + json={ + 'model': 'dummy', + 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', + 'stream': False, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/')) @@ -566,11 +697,15 @@ async def test_async_client_create_path_relative(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_create_path_user_home(httpserver: HTTPServer, userhomedir): httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) - httpserver.expect_ordered_request('/api/create', method='POST', json={ - 'model': 'dummy', - 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', - 'stream': False, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/create', + method='POST', + json={ + 'model': 'dummy', + 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', + 'stream': False, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/')) @@ -586,11 +721,15 @@ async def test_async_client_create_path_user_home(httpserver: HTTPServer, userho @pytest.mark.asyncio async def test_async_client_create_modelfile(httpserver: HTTPServer): httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200)) - httpserver.expect_ordered_request('/api/create', method='POST', json={ - 'model': 'dummy', - 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', - 'stream': False, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/create', + method='POST', + json={ + 'model': 'dummy', + 'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n', + 'stream': False, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/')) @@ -601,11 +740,15 @@ async def test_async_client_create_modelfile(httpserver: HTTPServer): @pytest.mark.asyncio async def test_async_client_create_from_library(httpserver: HTTPServer): - httpserver.expect_ordered_request('/api/create', method='POST', json={ - 'model': 'dummy', - 'modelfile': 'FROM llama2\n', - 'stream': False, - }).respond_with_json({}) + httpserver.expect_ordered_request( + '/api/create', + method='POST', + json={ + 'model': 'dummy', + 'modelfile': 'FROM llama2\n', + 'stream': False, + }, + ).respond_with_json({}) client = AsyncClient(httpserver.url_for('/'))