Skip to content

Commit

Permalink
Merge branch 'main' into dependabot/pip/orjson-3.9.15
Browse files Browse the repository at this point in the history
  • Loading branch information
rchan26 committed Apr 5, 2024
2 parents 662cdbf + 34b0c67 commit 3fbba75
Show file tree
Hide file tree
Showing 7 changed files with 3,345 additions and 1,666 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ repos:
- id: trailing-whitespace

- repo: https://github.com/psf/black
rev: 23.3.0
rev: 24.3.0
hooks:
- id: black-jupyter
- id: black
Expand Down
10 changes: 0 additions & 10 deletions azure/production/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,16 +132,6 @@
resource_group_name=resource_group.name,
restart_policy=containerinstance.ContainerGroupRestartPolicy.ALWAYS,
sku=containerinstance.ContainerGroupSku.STANDARD,
volumes=[
containerinstance.VolumeArgs(
azure_file=containerinstance.AzureFileVolumeArgs(
share_name=file_share.name,
storage_account_key=storage_account_key,
storage_account_name=storage_account.name,
),
name="llama-data",
),
],
)

# Define the container group for the data creation
Expand Down
4,704 changes: 3,128 additions & 1,576 deletions poetry.lock

Large diffs are not rendered by default.

54 changes: 31 additions & 23 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "reginald"
version = "0.1.0"
version= " 0.1.0"
description = "A Slack bot for REG Hack Week 2023"
authors = ["Evelina Gabasova <[email protected]>",
"James Robinson <[email protected]>",
Expand All @@ -17,36 +17,44 @@ readme = "README.md"

[tool.poetry.dependencies]
python = ">=3.11,<3.12"
accelerate = "^0.25.0"
accelerate = "^0.28.0"
bitsandbytes = { version="^0.41.1", optional=true }
datasets = { version="^2.16.1", optional=true }
faiss-cpu = { version="^1.7.4", optional=true }
fastapi = { version="^0.103.1", optional=true }
gitpython = "^3.1.41"
gradio = { version = "^4.12.0", optional=true }
httpx = "^0.26.0"
fastapi = { version="^0.110.1", optional=true }
gitpython = "^3.1.43"
gradio = { version="^4.25.0", optional=true }
httpx = "^0.27.0"
ipykernel = { version="^6.23.2", optional=true }
langchain = "^0.0.354"
llama-cpp-python = "^0.2.27"
llama-index = "^0.9.29"
llama-hub = "^0.0.74"
nbconvert = { version = "^7.8.0", optional = true }
nest_asyncio = "^1.5.8"
openai = "^1.6.1"
pandas = "^2.0.2"
pulumi = { version = "^3.100.0", optional=true }
pulumi-azure-native = { version = "^2.24.0", optional=true }
pydantic = { version = "^2.4.1", optional=true }
requests = { version = "^2.31.0", optional=true }
safetensors = "^0.3.3"
langchain = "^0.1.14"
llama-cpp-python = "^0.2.58"
llama-index = "^0.10.26"
nbconvert = { version="^7.8.0", optional=true }
nest_asyncio = "^1.6.0"
openai = "^1.16.0"
pandas = "^2.2.1"
pulumi = { version="^3.100.0", optional=true }
pulumi-azure-native = { version="^2.24.0", optional=true }
pydantic = { version="^2.4.1", optional=true }
requests = { version="^2.31.0", optional=true }
safetensors = "^0.4.2"
slack-sdk = "^3.26.1"
sentence-transformers = "^2.2.2"
torch = "^2.1.2"
transformers = "^4.36.2"
sentence-transformers = "^2.6.1"
torch = "^2.2.2"
transformers = "^4.39.2"
uvicorn = { version="^0.23.2", optional=true }
llama-index-readers-github = "^0.1.7"
llama-index-llms-azure-openai = "^0.1.5"
llama-index-llms-openai = "^0.1.14"
llama-index-llms-huggingface = "^0.1.4"
llama-index-llms-llama-cpp = "^0.1.3"
llama-index-readers-file = "^0.1.13"
llama-index-embeddings-langchain = "^0.1.2"
langchain-community = "^0.0.31"
tiktoken = "^0.6.0"

[tool.poetry.group.dev.dependencies]
black = "^23.3.0"
black = "^24.3.0"
isort = "^5.12.0"
pre-commit = "^3.3.2"

Expand Down
17 changes: 8 additions & 9 deletions reginald/models/create_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,22 @@
import pathlib
from typing import Any

from llama_index.llms import (
from llama_index.core.base.llms.types import (
CompletionResponse,
CompletionResponseGen,
CustomLLM,
LLMMetadata,
)
from llama_index.llms.base import llm_completion_callback
from llama_index.llms.custom import CustomLLM
from llama_index.core.llms.callbacks import llm_completion_callback
from llama_index.core.llms.custom import CustomLLM

from reginald.models.models.llama_index import DataIndexCreator, setup_service_context
from reginald.models.models.llama_index import DataIndexCreator, setup_settings
from reginald.models.setup_llm import DEFAULT_ARGS
from reginald.parser_utils import Parser, get_args


class DummyLLM(CustomLLM):
"""
Dummy LLM for passing into the ServiceContext below to create the index.
Dummy LLM for passing into the Settings below to create the index.
The minimum required attributes are set here, but this LLM is not used anywhere else.
"""

Expand Down Expand Up @@ -65,8 +64,8 @@ def main():
args = get_args(parser)

# pass args to create data index
logging.info("Setting up service context...")
service_context = setup_service_context(
logging.info("Setting up settings...")
settings = setup_settings(
llm=DummyLLM(),
max_input_size=args.max_input_size or DEFAULT_ARGS["max_input_size"],
num_output=args.num_output or DEFAULT_ARGS["num_output"],
Expand All @@ -81,7 +80,7 @@ def main():
data_creator = DataIndexCreator(
data_dir=pathlib.Path(args.data_dir or DEFAULT_ARGS["data_dir"]).resolve(),
which_index=args.which_index or DEFAULT_ARGS["which_index"],
service_context=service_context,
settings=settings,
)
data_creator.create_index()
data_creator.save_index()
Expand Down
Loading

0 comments on commit 3fbba75

Please sign in to comment.