diff --git a/docs/basic-usage/integration/py-library.md b/docs/basic-usage/integration/py-library.md index 3e126d0..8091ed8 100644 --- a/docs/basic-usage/integration/py-library.md +++ b/docs/basic-usage/integration/py-library.md @@ -36,7 +36,7 @@ pip install @janhq/cortex-python ```py from @janhq/cortex-python import Cortex -client = OpenAI(base_url="http://localhost:3928", api_key="cortex") +client = Cortex(base_url="http://localhost:3928", api_key="cortex") model = "TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF" client.models.start(model=model) diff --git a/docs/installation/mac.mdx b/docs/installation/mac.mdx index a198c2f..813384f 100644 --- a/docs/installation/mac.mdx +++ b/docs/installation/mac.mdx @@ -22,7 +22,7 @@ Before installation, make sure that you have met the required [dependencies](#de - Stable: https://github.com/janhq/cortex.cpp/releases - Beta: https://github.com/janhq/cortex.cpp/releases - Nightly: https://github.com/janhq/cortex.cpp/releases -2. Ensure that Cortex.cpp is sucessfulyy installed: +2. Ensure that Cortex.cpp is successfully installed: ```bash # Stable cortex @@ -100,4 +100,4 @@ cortex -h - 16GB for running up to 7B models. - 32GB for running up to 13B models. #### Disk -- At least 10GB for app storage and model download. \ No newline at end of file +- At least 10GB for app storage and model download.