From 7eae50377d22674390fb3f228668808ba57d3590 Mon Sep 17 00:00:00 2001 From: Aaron Pham <29749331+aarnphm@users.noreply.github.com> Date: Wed, 22 Nov 2023 11:50:50 +0000 Subject: [PATCH] infra: prepare for release 0.4.26 [generated] [skip ci] Signed-off-by: Aaron Pham <29749331+aarnphm@users.noreply.github.com> --- CHANGELOG.md | 11 +++++++++++ changelog.d/725.feature.md | 5 ----- openllm-node/package.json | 2 +- openllm-python/CHANGELOG.md | 11 +++++++++++ openllm-python/pyproject.toml | 4 ++-- package.json | 2 +- 6 files changed, 26 insertions(+), 9 deletions(-) delete mode 100644 changelog.d/725.feature.md diff --git a/CHANGELOG.md b/CHANGELOG.md index 8df6d30f0..d337344fe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,17 @@ This changelog is managed by towncrier and is compiled at release time. +## [0.4.26](https://github.com/bentoml/openllm/tree/v0.4.26) + +### Features + +- `/v1/chat/completions` now accepts two additional parameters + + - `chat_templates`: this is a string of [Jinja templates](https://huggingface.co/docs/transformers/main/chat_templating#templates-for-chat-models) + to use with this models. By default, it will just use the default models provided chat templates based on config.json. + - `add_generation_prompt`: See [here](https://huggingface.co/docs/transformers/main/chat_templating#how-do-i-use-chat-templates) + [#725](https://github.com/bentoml/openllm/issues/725) + ## [0.4.25](https://github.com/bentoml/openllm/tree/v0.4.25) No significant changes. diff --git a/changelog.d/725.feature.md b/changelog.d/725.feature.md deleted file mode 100644 index a99987c09..000000000 --- a/changelog.d/725.feature.md +++ /dev/null @@ -1,5 +0,0 @@ -`/v1/chat/completions` now accepts two additional parameters - -- `chat_templates`: this is a string of [Jinja templates](https://huggingface.co/docs/transformers/main/chat_templating#templates-for-chat-models) - to use with this models. By default, it will just use the default models provided chat templates based on config.json. -- `add_generation_prompt`: See [here](https://huggingface.co/docs/transformers/main/chat_templating#how-do-i-use-chat-templates) diff --git a/openllm-node/package.json b/openllm-node/package.json index 8f7f06473..fa737f58b 100644 --- a/openllm-node/package.json +++ b/openllm-node/package.json @@ -1,6 +1,6 @@ { "name": "@bentoml/openllm-node", - "version": "0.4.26.dev0", + "version": "0.4.26", "description": "NodeJS library for OpenLLM", "type": "module", "repository": { diff --git a/openllm-python/CHANGELOG.md b/openllm-python/CHANGELOG.md index 8df6d30f0..d337344fe 100644 --- a/openllm-python/CHANGELOG.md +++ b/openllm-python/CHANGELOG.md @@ -18,6 +18,17 @@ This changelog is managed by towncrier and is compiled at release time. +## [0.4.26](https://github.com/bentoml/openllm/tree/v0.4.26) + +### Features + +- `/v1/chat/completions` now accepts two additional parameters + + - `chat_templates`: this is a string of [Jinja templates](https://huggingface.co/docs/transformers/main/chat_templating#templates-for-chat-models) + to use with this models. By default, it will just use the default models provided chat templates based on config.json. + - `add_generation_prompt`: See [here](https://huggingface.co/docs/transformers/main/chat_templating#how-do-i-use-chat-templates) + [#725](https://github.com/bentoml/openllm/issues/725) + ## [0.4.25](https://github.com/bentoml/openllm/tree/v0.4.25) No significant changes. diff --git a/openllm-python/pyproject.toml b/openllm-python/pyproject.toml index 4599c32c2..660344e33 100644 --- a/openllm-python/pyproject.toml +++ b/openllm-python/pyproject.toml @@ -40,8 +40,8 @@ classifiers = [ dependencies = [ "bentoml[io]>=1.1.10", "transformers[torch,tokenizers]>=4.35.0", - "openllm-client>=0.4.25", - "openllm-core>=0.4.25", + "openllm-client>=0.4.26", + "openllm-core>=0.4.26", "safetensors", "optimum>=1.12.0", "accelerate", diff --git a/package.json b/package.json index 8fe3785d7..8599b861f 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@bentoml/openllm-monorepo", - "version": "0.4.26.dev0", + "version": "0.4.26", "description": "OpenLLM: Operating LLMs in production", "author": "Aaron Pham <29749331+aarnphm@users.noreply.github.com>", "license": "Apache-2.0",