diff --git a/README.md b/README.md
index 6089b44eb..efb6f6dcc 100644
--- a/README.md
+++ b/README.md
@@ -98,7 +98,7 @@ npm i @xenova/transformers
Alternatively, you can use it in vanilla JS, without any bundler, by using a CDN or static hosting. For example, using [ES Modules](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules), you can import the library with:
```html
```
@@ -126,7 +126,7 @@ Want to jump straight in? Get started with one of our sample applications/templa
-By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@xenova/transformers@2.6.0/dist/), which should work out-of-the-box. You can customize this as follows:
+By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@xenova/transformers@2.6.1/dist/), which should work out-of-the-box. You can customize this as follows:
### Settings
diff --git a/docs/snippets/2_installation.snippet b/docs/snippets/2_installation.snippet
index 05f69c170..b255f5407 100644
--- a/docs/snippets/2_installation.snippet
+++ b/docs/snippets/2_installation.snippet
@@ -7,6 +7,6 @@ npm i @xenova/transformers
Alternatively, you can use it in vanilla JS, without any bundler, by using a CDN or static hosting. For example, using [ES Modules](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules), you can import the library with:
```html
```
diff --git a/docs/snippets/4_custom-usage.snippet b/docs/snippets/4_custom-usage.snippet
index a656a7b1c..0a758d581 100644
--- a/docs/snippets/4_custom-usage.snippet
+++ b/docs/snippets/4_custom-usage.snippet
@@ -1,6 +1,6 @@
-By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@xenova/transformers@2.6.0/dist/), which should work out-of-the-box. You can customize this as follows:
+By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@xenova/transformers@2.6.1/dist/), which should work out-of-the-box. You can customize this as follows:
### Settings
diff --git a/package-lock.json b/package-lock.json
index b09db9463..6a3847db3 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "@xenova/transformers",
- "version": "2.6.0",
+ "version": "2.6.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@xenova/transformers",
- "version": "2.6.0",
+ "version": "2.6.1",
"license": "Apache-2.0",
"dependencies": {
"onnxruntime-web": "1.14.0",
diff --git a/package.json b/package.json
index ed3216411..ffeb8852e 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "@xenova/transformers",
- "version": "2.6.0",
+ "version": "2.6.1",
"description": "State-of-the-art Machine Learning for the web. Run 🤗 Transformers directly in your browser, with no need for a server!",
"main": "./src/transformers.js",
"types": "./types/transformers.d.ts",
diff --git a/src/env.js b/src/env.js
index fb7f24130..5e5ecb876 100644
--- a/src/env.js
+++ b/src/env.js
@@ -29,7 +29,7 @@ import url from 'url';
import { ONNX } from './backends/onnx.js';
const { env: onnx_env } = ONNX;
-const VERSION = '2.6.0';
+const VERSION = '2.6.1';
// Check if various APIs are available (depends on environment)
const WEB_CACHE_AVAILABLE = typeof self !== 'undefined' && 'caches' in self;