This commit is contained in:
Michael Yang
2023-12-21 14:21:02 -08:00
parent 9d93f70806
commit 47c934c74b
5 changed files with 576 additions and 392 deletions
+389 -246
View File
@@ -21,19 +21,25 @@ class PrefixPattern(URIPattern):
def test_client_chat(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/chat', method='POST', json={
'model': 'dummy',
'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}],
'stream': False,
'format': '',
'options': {},
}).respond_with_json({
'model': 'dummy',
'message': {
'role': 'assistant',
'content': "I don't know.",
},
})
httpserver.expect_ordered_request(
'/api/chat',
method='POST',
json={
'model': 'dummy',
'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}],
'stream': False,
'format': '',
'options': {},
},
).respond_with_json(
{
'model': 'dummy',
'message': {
'role': 'assistant',
'content': "I don't know.",
},
}
)
client = Client(httpserver.url_for('/'))
response = client.chat('dummy', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}])
@@ -46,22 +52,32 @@ def test_client_chat_stream(httpserver: HTTPServer):
def stream_handler(_: Request):
def generate():
for message in ['I ', "don't ", 'know.']:
yield json.dumps({
'model': 'dummy',
'message': {
'role': 'assistant',
'content': message,
},
}) + '\n'
yield (
json.dumps(
{
'model': 'dummy',
'message': {
'role': 'assistant',
'content': message,
},
}
)
+ '\n'
)
return Response(generate())
httpserver.expect_ordered_request('/api/chat', method='POST', json={
'model': 'dummy',
'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}],
'stream': True,
'format': '',
'options': {},
}).respond_with_handler(stream_handler)
httpserver.expect_ordered_request(
'/api/chat',
method='POST',
json={
'model': 'dummy',
'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}],
'stream': True,
'format': '',
'options': {},
},
).respond_with_handler(stream_handler)
client = Client(httpserver.url_for('/'))
response = client.chat('dummy', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}], stream=True)
@@ -71,25 +87,31 @@ def test_client_chat_stream(httpserver: HTTPServer):
def test_client_chat_images(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/chat', method='POST', json={
'model': 'dummy',
'messages': [
{
'role': 'user',
'content': 'Why is the sky blue?',
'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'],
httpserver.expect_ordered_request(
'/api/chat',
method='POST',
json={
'model': 'dummy',
'messages': [
{
'role': 'user',
'content': 'Why is the sky blue?',
'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'],
},
],
'stream': False,
'format': '',
'options': {},
},
).respond_with_json(
{
'model': 'dummy',
'message': {
'role': 'assistant',
'content': "I don't know.",
},
],
'stream': False,
'format': '',
'options': {},
}).respond_with_json({
'model': 'dummy',
'message': {
'role': 'assistant',
'content': "I don't know.",
},
})
}
)
client = Client(httpserver.url_for('/'))
@@ -102,21 +124,27 @@ def test_client_chat_images(httpserver: HTTPServer):
def test_client_generate(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/generate', method='POST', json={
'model': 'dummy',
'prompt': 'Why is the sky blue?',
'system': '',
'template': '',
'context': [],
'stream': False,
'raw': False,
'images': [],
'format': '',
'options': {},
}).respond_with_json({
'model': 'dummy',
'response': 'Because it is.',
})
httpserver.expect_ordered_request(
'/api/generate',
method='POST',
json={
'model': 'dummy',
'prompt': 'Why is the sky blue?',
'system': '',
'template': '',
'context': [],
'stream': False,
'raw': False,
'images': [],
'format': '',
'options': {},
},
).respond_with_json(
{
'model': 'dummy',
'response': 'Because it is.',
}
)
client = Client(httpserver.url_for('/'))
response = client.generate('dummy', 'Why is the sky blue?')
@@ -128,24 +156,34 @@ def test_client_generate_stream(httpserver: HTTPServer):
def stream_handler(_: Request):
def generate():
for message in ['Because ', 'it ', 'is.']:
yield json.dumps({
'model': 'dummy',
'response': message,
}) + '\n'
yield (
json.dumps(
{
'model': 'dummy',
'response': message,
}
)
+ '\n'
)
return Response(generate())
httpserver.expect_ordered_request('/api/generate', method='POST', json={
'model': 'dummy',
'prompt': 'Why is the sky blue?',
'system': '',
'template': '',
'context': [],
'stream': True,
'raw': False,
'images': [],
'format': '',
'options': {},
}).respond_with_handler(stream_handler)
httpserver.expect_ordered_request(
'/api/generate',
method='POST',
json={
'model': 'dummy',
'prompt': 'Why is the sky blue?',
'system': '',
'template': '',
'context': [],
'stream': True,
'raw': False,
'images': [],
'format': '',
'options': {},
},
).respond_with_handler(stream_handler)
client = Client(httpserver.url_for('/'))
response = client.generate('dummy', 'Why is the sky blue?', stream=True)
@@ -155,21 +193,27 @@ def test_client_generate_stream(httpserver: HTTPServer):
def test_client_generate_images(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/generate', method='POST', json={
'model': 'dummy',
'prompt': 'Why is the sky blue?',
'system': '',
'template': '',
'context': [],
'stream': False,
'raw': False,
'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'],
'format': '',
'options': {},
}).respond_with_json({
'model': 'dummy',
'response': 'Because it is.',
})
httpserver.expect_ordered_request(
'/api/generate',
method='POST',
json={
'model': 'dummy',
'prompt': 'Why is the sky blue?',
'system': '',
'template': '',
'context': [],
'stream': False,
'raw': False,
'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'],
'format': '',
'options': {},
},
).respond_with_json(
{
'model': 'dummy',
'response': 'Because it is.',
}
)
client = Client(httpserver.url_for('/'))
@@ -181,13 +225,19 @@ def test_client_generate_images(httpserver: HTTPServer):
def test_client_pull(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/pull', method='POST', json={
'model': 'dummy',
'insecure': False,
'stream': False,
}).respond_with_json({
'status': 'success',
})
httpserver.expect_ordered_request(
'/api/pull',
method='POST',
json={
'model': 'dummy',
'insecure': False,
'stream': False,
},
).respond_with_json(
{
'status': 'success',
}
)
client = Client(httpserver.url_for('/'))
response = client.pull('dummy')
@@ -202,13 +252,18 @@ def test_client_pull_stream(httpserver: HTTPServer):
yield json.dumps({'status': 'writing manifest'}) + '\n'
yield json.dumps({'status': 'removing any unused layers'}) + '\n'
yield json.dumps({'status': 'success'}) + '\n'
return Response(generate())
httpserver.expect_ordered_request('/api/pull', method='POST', json={
'model': 'dummy',
'insecure': False,
'stream': True,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/pull',
method='POST',
json={
'model': 'dummy',
'insecure': False,
'stream': True,
},
).respond_with_json({})
client = Client(httpserver.url_for('/'))
response = client.pull('dummy', stream=True)
@@ -216,11 +271,15 @@ def test_client_pull_stream(httpserver: HTTPServer):
def test_client_push(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/push', method='POST', json={
'model': 'dummy',
'insecure': False,
'stream': False,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/push',
method='POST',
json={
'model': 'dummy',
'insecure': False,
'stream': False,
},
).respond_with_json({})
client = Client(httpserver.url_for('/'))
response = client.push('dummy')
@@ -228,11 +287,15 @@ def test_client_push(httpserver: HTTPServer):
def test_client_push_stream(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/push', method='POST', json={
'model': 'dummy',
'insecure': False,
'stream': True,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/push',
method='POST',
json={
'model': 'dummy',
'insecure': False,
'stream': True,
},
).respond_with_json({})
client = Client(httpserver.url_for('/'))
response = client.push('dummy', stream=True)
@@ -241,11 +304,15 @@ def test_client_push_stream(httpserver: HTTPServer):
def test_client_create_path(httpserver: HTTPServer):
httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200))
httpserver.expect_ordered_request('/api/create', method='POST', json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/create',
method='POST',
json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
},
).respond_with_json({})
client = Client(httpserver.url_for('/'))
@@ -260,11 +327,15 @@ def test_client_create_path(httpserver: HTTPServer):
def test_client_create_path_relative(httpserver: HTTPServer):
httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200))
httpserver.expect_ordered_request('/api/create', method='POST', json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/create',
method='POST',
json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
},
).respond_with_json({})
client = Client(httpserver.url_for('/'))
@@ -288,11 +359,15 @@ def userhomedir():
def test_client_create_path_user_home(httpserver: HTTPServer, userhomedir):
httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200))
httpserver.expect_ordered_request('/api/create', method='POST', json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/create',
method='POST',
json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
},
).respond_with_json({})
client = Client(httpserver.url_for('/'))
@@ -307,11 +382,15 @@ def test_client_create_path_user_home(httpserver: HTTPServer, userhomedir):
def test_client_create_modelfile(httpserver: HTTPServer):
httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200))
httpserver.expect_ordered_request('/api/create', method='POST', json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/create',
method='POST',
json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
},
).respond_with_json({})
client = Client(httpserver.url_for('/'))
@@ -321,11 +400,15 @@ def test_client_create_modelfile(httpserver: HTTPServer):
def test_client_create_from_library(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/create', method='POST', json={
'model': 'dummy',
'modelfile': 'FROM llama2\n',
'stream': False,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/create',
method='POST',
json={
'model': 'dummy',
'modelfile': 'FROM llama2\n',
'stream': False,
},
).respond_with_json({})
client = Client(httpserver.url_for('/'))
@@ -356,13 +439,17 @@ def test_client_create_blob_exists(httpserver: HTTPServer):
@pytest.mark.asyncio
async def test_async_client_chat(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/chat', method='POST', json={
'model': 'dummy',
'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}],
'stream': False,
'format': '',
'options': {},
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/chat',
method='POST',
json={
'model': 'dummy',
'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}],
'stream': False,
'format': '',
'options': {},
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))
response = await client.chat('dummy', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}])
@@ -371,13 +458,17 @@ async def test_async_client_chat(httpserver: HTTPServer):
@pytest.mark.asyncio
async def test_async_client_chat_stream(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/chat', method='POST', json={
'model': 'dummy',
'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}],
'stream': True,
'format': '',
'options': {},
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/chat',
method='POST',
json={
'model': 'dummy',
'messages': [{'role': 'user', 'content': 'Why is the sky blue?'}],
'stream': True,
'format': '',
'options': {},
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))
response = await client.chat('dummy', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}], stream=True)
@@ -386,19 +477,23 @@ async def test_async_client_chat_stream(httpserver: HTTPServer):
@pytest.mark.asyncio
async def test_async_client_chat_images(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/chat', method='POST', json={
'model': 'dummy',
'messages': [
{
'role': 'user',
'content': 'Why is the sky blue?',
'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'],
},
],
'stream': False,
'format': '',
'options': {},
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/chat',
method='POST',
json={
'model': 'dummy',
'messages': [
{
'role': 'user',
'content': 'Why is the sky blue?',
'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'],
},
],
'stream': False,
'format': '',
'options': {},
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))
@@ -410,18 +505,22 @@ async def test_async_client_chat_images(httpserver: HTTPServer):
@pytest.mark.asyncio
async def test_async_client_generate(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/generate', method='POST', json={
'model': 'dummy',
'prompt': 'Why is the sky blue?',
'system': '',
'template': '',
'context': [],
'stream': False,
'raw': False,
'images': [],
'format': '',
'options': {},
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/generate',
method='POST',
json={
'model': 'dummy',
'prompt': 'Why is the sky blue?',
'system': '',
'template': '',
'context': [],
'stream': False,
'raw': False,
'images': [],
'format': '',
'options': {},
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))
response = await client.generate('dummy', 'Why is the sky blue?')
@@ -430,18 +529,22 @@ async def test_async_client_generate(httpserver: HTTPServer):
@pytest.mark.asyncio
async def test_async_client_generate_stream(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/generate', method='POST', json={
'model': 'dummy',
'prompt': 'Why is the sky blue?',
'system': '',
'template': '',
'context': [],
'stream': True,
'raw': False,
'images': [],
'format': '',
'options': {},
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/generate',
method='POST',
json={
'model': 'dummy',
'prompt': 'Why is the sky blue?',
'system': '',
'template': '',
'context': [],
'stream': True,
'raw': False,
'images': [],
'format': '',
'options': {},
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))
response = await client.generate('dummy', 'Why is the sky blue?', stream=True)
@@ -450,18 +553,22 @@ async def test_async_client_generate_stream(httpserver: HTTPServer):
@pytest.mark.asyncio
async def test_async_client_generate_images(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/generate', method='POST', json={
'model': 'dummy',
'prompt': 'Why is the sky blue?',
'system': '',
'template': '',
'context': [],
'stream': False,
'raw': False,
'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'],
'format': '',
'options': {},
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/generate',
method='POST',
json={
'model': 'dummy',
'prompt': 'Why is the sky blue?',
'system': '',
'template': '',
'context': [],
'stream': False,
'raw': False,
'images': ['iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAADElEQVR4nGNgYGAAAAAEAAH2FzhVAAAAAElFTkSuQmCC'],
'format': '',
'options': {},
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))
@@ -473,11 +580,15 @@ async def test_async_client_generate_images(httpserver: HTTPServer):
@pytest.mark.asyncio
async def test_async_client_pull(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/pull', method='POST', json={
'model': 'dummy',
'insecure': False,
'stream': False,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/pull',
method='POST',
json={
'model': 'dummy',
'insecure': False,
'stream': False,
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))
response = await client.pull('dummy')
@@ -486,11 +597,15 @@ async def test_async_client_pull(httpserver: HTTPServer):
@pytest.mark.asyncio
async def test_async_client_pull_stream(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/pull', method='POST', json={
'model': 'dummy',
'insecure': False,
'stream': True,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/pull',
method='POST',
json={
'model': 'dummy',
'insecure': False,
'stream': True,
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))
response = await client.pull('dummy', stream=True)
@@ -499,11 +614,15 @@ async def test_async_client_pull_stream(httpserver: HTTPServer):
@pytest.mark.asyncio
async def test_async_client_push(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/push', method='POST', json={
'model': 'dummy',
'insecure': False,
'stream': False,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/push',
method='POST',
json={
'model': 'dummy',
'insecure': False,
'stream': False,
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))
response = await client.push('dummy')
@@ -512,11 +631,15 @@ async def test_async_client_push(httpserver: HTTPServer):
@pytest.mark.asyncio
async def test_async_client_push_stream(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/push', method='POST', json={
'model': 'dummy',
'insecure': False,
'stream': True,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/push',
method='POST',
json={
'model': 'dummy',
'insecure': False,
'stream': True,
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))
response = await client.push('dummy', stream=True)
@@ -526,11 +649,15 @@ async def test_async_client_push_stream(httpserver: HTTPServer):
@pytest.mark.asyncio
async def test_async_client_create_path(httpserver: HTTPServer):
httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200))
httpserver.expect_ordered_request('/api/create', method='POST', json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/create',
method='POST',
json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))
@@ -546,11 +673,15 @@ async def test_async_client_create_path(httpserver: HTTPServer):
@pytest.mark.asyncio
async def test_async_client_create_path_relative(httpserver: HTTPServer):
httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200))
httpserver.expect_ordered_request('/api/create', method='POST', json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/create',
method='POST',
json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))
@@ -566,11 +697,15 @@ async def test_async_client_create_path_relative(httpserver: HTTPServer):
@pytest.mark.asyncio
async def test_async_client_create_path_user_home(httpserver: HTTPServer, userhomedir):
httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200))
httpserver.expect_ordered_request('/api/create', method='POST', json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/create',
method='POST',
json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))
@@ -586,11 +721,15 @@ async def test_async_client_create_path_user_home(httpserver: HTTPServer, userho
@pytest.mark.asyncio
async def test_async_client_create_modelfile(httpserver: HTTPServer):
httpserver.expect_ordered_request(PrefixPattern('/api/blobs/'), method='HEAD').respond_with_response(Response(status=200))
httpserver.expect_ordered_request('/api/create', method='POST', json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/create',
method='POST',
json={
'model': 'dummy',
'modelfile': 'FROM @sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\n',
'stream': False,
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))
@@ -601,11 +740,15 @@ async def test_async_client_create_modelfile(httpserver: HTTPServer):
@pytest.mark.asyncio
async def test_async_client_create_from_library(httpserver: HTTPServer):
httpserver.expect_ordered_request('/api/create', method='POST', json={
'model': 'dummy',
'modelfile': 'FROM llama2\n',
'stream': False,
}).respond_with_json({})
httpserver.expect_ordered_request(
'/api/create',
method='POST',
json={
'model': 'dummy',
'modelfile': 'FROM llama2\n',
'stream': False,
},
).respond_with_json({})
client = AsyncClient(httpserver.url_for('/'))