diff --git a/docs/source/_toctree.yml b/docs/source/_toctree.yml index 3e74d4086b..e51f2509cf 100644 --- a/docs/source/_toctree.yml +++ b/docs/source/_toctree.yml @@ -11,6 +11,8 @@ - sections: - local: quickstart_spaces title: Train on Spaces + - local: quickstart_py + title: Python SDK - local: quickstart title: Train Locally - local: config diff --git a/docs/source/quickstart.mdx b/docs/source/quickstart.mdx index 51028cc54c..725ea67ff5 100644 --- a/docs/source/quickstart.mdx +++ b/docs/source/quickstart.mdx @@ -1,4 +1,4 @@ -# Quickstart +# Quickstart Guide for Local Training This quickstart is for local installation and usage. If you want to use AutoTrain on Hugging Face Spaces, please refer to the *AutoTrain on Hugging Face Spaces* section. diff --git a/docs/source/quickstart_py.mdx b/docs/source/quickstart_py.mdx new file mode 100644 index 0000000000..9d48ec39f8 --- /dev/null +++ b/docs/source/quickstart_py.mdx @@ -0,0 +1,44 @@ +# Quickstart with Python + +Example code: + +```python +import os + +from autotrain.params import LLMTrainingParams +from autotrain.project import AutoTrainProject + + +params = LLMTrainingParams( + model="meta-llama/Llama-3.2-1B-Instruct", + data_path="HuggingFaceH4/no_robots", + chat_template="tokenizer", + text_column="messages", + train_split="train", + trainer="sft", + epochs=3, + batch_size=1, + lr=1e-5, + peft=True, + quantization="int4", + target_modules="all-linear", + padding="right", + optimizer="paged_adamw_8bit", + scheduler="cosine", + gradient_accumulation=8, + mixed_precision="bf16", + merge_adapter=True, + project_name="autotrain-llama32-1b-finetune", + log="tensorboard", + push_to_hub=False, + username=os.environ.get("HF_USERNAME"), + token=os.environ.get("HF_TOKEN"), +) + + +backend = "local" +project = AutoTrainProject(params=params, backend=backend, process=True) +project.create() +``` + +[[autodoc]] project.AutoTrainProject \ No newline at end of file