diff --git a/README.md b/README.md index df6a3028d..815ed4c5e 100644 --- a/README.md +++ b/README.md @@ -98,7 +98,7 @@ npm i @xenova/transformers Alternatively, you can use it in vanilla JS, without any bundler, by using a CDN or static hosting. For example, using [ES Modules](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules), you can import the library with: ```html ``` @@ -125,7 +125,7 @@ Want to jump straight in? Get started with one of our sample applications/templa -By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@xenova/transformers@2.5.4/dist/), which should work out-of-the-box. You can customize this as follows: +By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@xenova/transformers@2.6.0/dist/), which should work out-of-the-box. You can customize this as follows: ### Settings diff --git a/docs/snippets/2_installation.snippet b/docs/snippets/2_installation.snippet index 78e90e90b..05f69c170 100644 --- a/docs/snippets/2_installation.snippet +++ b/docs/snippets/2_installation.snippet @@ -7,6 +7,6 @@ npm i @xenova/transformers Alternatively, you can use it in vanilla JS, without any bundler, by using a CDN or static hosting. For example, using [ES Modules](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules), you can import the library with: ```html ``` diff --git a/docs/snippets/4_custom-usage.snippet b/docs/snippets/4_custom-usage.snippet index 2e7aef730..a656a7b1c 100644 --- a/docs/snippets/4_custom-usage.snippet +++ b/docs/snippets/4_custom-usage.snippet @@ -1,6 +1,6 @@ -By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@xenova/transformers@2.5.4/dist/), which should work out-of-the-box. You can customize this as follows: +By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@xenova/transformers@2.6.0/dist/), which should work out-of-the-box. You can customize this as follows: ### Settings diff --git a/package-lock.json b/package-lock.json index 28b4d3bc6..b09db9463 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@xenova/transformers", - "version": "2.5.4", + "version": "2.6.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@xenova/transformers", - "version": "2.5.4", + "version": "2.6.0", "license": "Apache-2.0", "dependencies": { "onnxruntime-web": "1.14.0", diff --git a/package.json b/package.json index b12787470..ed3216411 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@xenova/transformers", - "version": "2.5.4", + "version": "2.6.0", "description": "State-of-the-art Machine Learning for the web. Run 🤗 Transformers directly in your browser, with no need for a server!", "main": "./src/transformers.js", "types": "./types/transformers.d.ts", diff --git a/src/env.js b/src/env.js index 0af70f12b..fb7f24130 100644 --- a/src/env.js +++ b/src/env.js @@ -29,7 +29,7 @@ import url from 'url'; import { ONNX } from './backends/onnx.js'; const { env: onnx_env } = ONNX; -const VERSION = '2.5.4'; +const VERSION = '2.6.0'; // Check if various APIs are available (depends on environment) const WEB_CACHE_AVAILABLE = typeof self !== 'undefined' && 'caches' in self;