Skip to content

Commit

Permalink
adding extra requires in setup.py, fixinf ci
Browse files Browse the repository at this point in the history
  • Loading branch information
deepanker13 committed Jan 10, 2024
1 parent 2b72670 commit a449ece
Show file tree
Hide file tree
Showing 5 changed files with 29 additions and 16 deletions.
1 change: 0 additions & 1 deletion .github/workflows/integration-tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,6 @@ jobs:
- name: Run tests
run: |
pip install pytest
pip install -r sdk/python/kubeflow/storage_init_container/requirements.txt
python3 -m pip install -e sdk/python; pytest sdk/python/test --log-cli-level=info --namespace=default
env:
GANG_SCHEDULER_NAME: ${{ matrix.gang-scheduler-name }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/test-python.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:
- name: Install dependencies
run: |
pip install pytest python-dateutil urllib3 kubernetes
pip install -r ./sdk/python/kubeflow/storage_init_container/requirements.txt
pip install -U './sdk/python[huggingface]'
- name: Run unit test for training sdk
run: pytest ./sdk/python/kubeflow/training/api/training_client_test.py
3 changes: 1 addition & 2 deletions sdk/python/Dockerfile.conformance
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# limitations under the License.

# Dockerfile for building the source code of conformance tests
FROM python:3.10
FROM python:3.10-alpine

WORKDIR /kubeflow/training-operator/sdk/python

Expand All @@ -25,7 +25,6 @@ COPY sdk/python/conformance/run.sh .
RUN chmod +x run.sh

RUN pip install pytest
RUN pip install -r ./kubeflow/storage_init_container/requirements.txt
RUN python -m pip install -e .

ENTRYPOINT [ "./run.sh" ]
32 changes: 22 additions & 10 deletions sdk/python/kubeflow/training/api/training_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,8 @@
from kubeflow.training.constants import constants
from kubeflow.training.utils import utils

from kubeflow.storage_init_container.s3 import S3DatasetParams
from kubeflow.storage_init_container.hugging_face import (
HuggingFaceModelParams,
HuggingFaceTrainParams,
HfDatasetParams,
INIT_CONTAINER_MOUNT_PATH,
)
from kubeflow.storage_init_container.constants import INIT_CONTAINER_MOUNT_PATH


logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -103,15 +98,32 @@ def train(
num_workers: int = 1,
num_procs_per_worker: int = 1,
storage_config: Dict[str, str] = {"size": "10Gi", "storage_class": None},
model_provider_parameters: HuggingFaceModelParams = None,
dataset_provider_parameters: Union[HfDatasetParams, S3DatasetParams] = None,
train_parameters: HuggingFaceTrainParams = None,
model_provider_parameters=None,
dataset_provider_parameters=None,
train_parameters=None,
resources_per_worker: Union[dict, client.V1ResourceRequirements, None] = None,
# Dict[Literal["gpu", "cpu", "memory"], any] = None,
):
"""
Higher level train api
model_provider_parameters: It can be of type HuggingFaceModelParams
dataset_provider_parameters: It can be of type HfDatasetParams or S3DatasetParams
train_parameters: It can be of type HuggingFaceTrainParams
"""
try:
import peft
import transformers
except ImportError:
print(
"train api dependencies not installed. Run pip install -U 'kubeflow-training[huggingface]' "
)
from kubeflow.storage_init_container.s3 import S3DatasetParams
from kubeflow.storage_init_container.hugging_face import (
HuggingFaceModelParams,
HuggingFaceTrainParams,
HfDatasetParams,
)

if (
not name
or not model_provider_parameters
Expand Down
5 changes: 4 additions & 1 deletion sdk/python/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,5 +64,8 @@
],
install_requires=REQUIRES,
tests_require=TESTS_REQUIRES,
extras_require={"test": TESTS_REQUIRES},
extras_require={
"test": TESTS_REQUIRES,
"huggingface": ["transformers>=4.20.0", "peft>=0.3.0"],
},
)

0 comments on commit a449ece

Please sign in to comment.