From 77f2ac87a1aa658431122d6f449c8225b6ebe212 Mon Sep 17 00:00:00 2001 From: Lance Martin Date: Wed, 18 Sep 2024 11:24:56 -0700 Subject: [PATCH] Update ntbk --- ntbk/testing.ipynb | 157 ++++++++++++++++++++++++++++++++------------- 1 file changed, 114 insertions(+), 43 deletions(-) diff --git a/ntbk/testing.ipynb b/ntbk/testing.ipynb index 7a34cf6..7a3cb80 100644 --- a/ntbk/testing.ipynb +++ b/ntbk/testing.ipynb @@ -38,7 +38,7 @@ "source": [ "from langgraph_sdk import get_client\n", "\n", - "client = get_client(url=\"http://localhost:53394\")" + "client = get_client(url=\"http://localhost:63252\")" ] }, { @@ -56,27 +56,41 @@ "metadata": {}, "outputs": [], "source": [ - "schema = {\n", + "schema = {\n", " \"type\": \"object\",\n", " \"properties\": {\n", " \"companies\": {\n", - " \"type\": \"string\",\n", - " \"description\": \"Names of top chip providers for LLM training\",\n", - " },\n", - " \"technologies\": {\n", - " \"type\": \"string\",\n", - " \"description\": \"Brief summary of key chip technologies used for LLM training\",\n", - " },\n", - " \"market_share\": {\n", - " \"type\": \"string\",\n", - " \"description\": \"Overview of market share distribution among top providers\",\n", - " },\n", - " \"future_outlook\": {\n", - " \"type\": \"string\",\n", - " \"description\": \"Brief summary of future prospects and developments in the field\",\n", - " },\n", + " \"type\": \"array\",\n", + " \"items\": {\n", + " \"type\": \"object\",\n", + " \"properties\": {\n", + " \"name\": {\n", + " \"type\": \"string\",\n", + " \"description\": \"Company name\"\n", + " },\n", + " \"technologies\": {\n", + " \"type\": \"string\",\n", + " \"description\": \"Brief summary of key technologies used by the company\"\n", + " },\n", + " \"market_share\": {\n", + " \"type\": \"string\",\n", + " \"description\": \"Overview of market share for this company\"\n", + " },\n", + " \"future_outlook\": {\n", + " \"type\": \"string\",\n", + " \"description\": \"Brief summary of future prospects and developments in the field for this company\"\n", + " },\n", + " \"key_powers\": {\n", + " \"type\": \"string\",\n", + " \"description\": \"Which of the 7 Powers (Scale Economies, Network Economies, Counter Positioning, Switching Costs, Branding, Cornered Resource, Process Power) best describe this company's competitive advantage\"\n", + " }\n", + " },\n", + " \"required\": [\"name\", \"technologies\", \"market_share\", \"future_outlook\"]\n", + " },\n", + " \"description\": \"List of companies\"\n", + " }\n", " },\n", - " \"required\": [\"companies\", \"technologies\", \"market_share\", \"future_outlook\"],\n", + " \"required\": [\"companies\"]\n", "}" ] }, @@ -105,7 +119,7 @@ "# Stream\n", "async for event in client.runs.stream(\n", " thread[\"thread_id\"],\n", - " assistant_id=\"agent\",\n", + " assistant_id= \"agent\",\n", " input={\n", " \"topic\": topic,\n", " \"extraction_schema\": schema,\n", @@ -121,7 +135,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -130,26 +144,74 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 10, "metadata": {}, "outputs": [ { "data": { "text/markdown": [ - "\n", "# Top 5 Chip Providers for LLM Training\n", "\n", - "## Companies\n", - "Nvidia, AMD, Google, OpenAI, Microsoft, Meta, Amazon, Baidu, Alibaba, Huawei\n", "\n", - "## Key Technologies\n", - "GPUs (particularly Nvidia A100 and H100), TPUs (Google's custom AI chips), Large Language Models (LLMs)\n", + "## NVIDIA\n", + "\n", + "**Key Technologies:** A100 and H100 GPUs\n", + "\n", + "**Market Share:** Dominates the market with over 80% share in AI chips\n", + "\n", + "**Key Powers:** Scale Economies, Network Economies\n", + "\n", + "**Future Outlook:** Likely to maintain dominance but facing increasing competition from custom chips by cloud providers and other manufacturers\n", + "\n", + "---\n", + "\n", + "## AMD\n", + "\n", + "**Key Technologies:** MI250, MI300X, and Ryzen AI NPUs\n", + "\n", + "**Market Share:** Growing presence, estimated to be around 10-15% of the AI chip market\n", + "\n", + "**Key Powers:** Counter Positioning, Process Power\n", + "\n", + "**Future Outlook:** Expanding its GPU offerings to compete with NVIDIA, with potential for increased market share. Launching new chips like MI300X to target LLM training and inference\n", + "\n", + "---\n", + "\n", + "## Intel\n", + "\n", + "**Key Technologies:** Gaudi 2 and upcoming Gaudi 3 AI accelerators\n", + "\n", + "**Market Share:** Emerging player in the AI chip market, with competitive performance-per-dollar metrics\n", + "\n", + "**Key Powers:** Process Power, Counter Positioning\n", + "\n", + "**Future Outlook:** Promising outlook with Gaudi 2 showing strong performance and cost-effectiveness, and Gaudi 3 in development. Targeting both training and inference markets for LLMs\n", + "\n", + "---\n", + "\n", + "## Google\n", + "\n", + "**Key Technologies:** Custom Tensor Processing Units (TPUs), including TPU V5P\n", + "\n", + "**Market Share:** Significant in-house usage for Google's AI services, including Gemini LLM\n", "\n", - "## Market Share\n", - "Nvidia dominates the market, particularly in data center GPUs. In 2023, the top five LLM developers acquired around 88.22% of the market revenue. The global LLM market is projected to grow from $1,590 million in 2023 to $259,800 million in 2030, with a CAGR of 79.80%.\n", + "**Key Powers:** Cornered Resource, Process Power\n", "\n", - "## Future Outlook\n", - "The LLM market is expected to experience explosive growth, with projections reaching $35.43 billion by 2030 at a CAGR of 35.9%. Key developments include advanced pre-training techniques, multimodal models like Google's VideoPoet, and a focus on efficiency and specialized models. The industry is also seeing increased investment in AI infrastructure, prompt engineering, and MLOps (Machine Learning Operations) to support the growing demand for AI technologies.\n" + "**Future Outlook:** Continuing development of TPUs for internal use and potential expansion to cloud customers. TPU V5P offers improved performance for LLM training and inference\n", + "\n", + "---\n", + "\n", + "## Amazon Web Services (AWS)\n", + "\n", + "**Key Technologies:** Trainium2 and Graviton4 custom chips, Amazon SageMaker platform\n", + "\n", + "**Market Share:** Major cloud provider for AI services, exact market share in AI chips not specified\n", + "\n", + "**Key Powers:** Scale Economies, Network Economies\n", + "\n", + "**Future Outlook:** Expanding development and usage of in-house AI chips to reduce dependence on external providers. Focusing on custom silicon for AI fabric and specialized chips for LLM training and inference\n", + "\n", + "---\n" ], "text/plain": [ "" @@ -162,26 +224,26 @@ "source": [ "from IPython.display import Markdown, display\n", "\n", - "\n", "def format_llm_chip_info(data):\n", - " markdown_text = f\"\"\"\n", - "# Top 5 Chip Providers for LLM Training\n", + " markdown_text = \"# Top 5 Chip Providers for LLM Training\\n\\n\"\n", + " \n", + " for company in data['companies']:\n", + " markdown_text += f\"\"\"\n", + "## {company['name']}\n", + "\n", + "**Key Technologies:** {company['technologies']}\n", "\n", - "## Companies\n", - "{', '.join(data['companies'].split(', '))}\n", + "**Market Share:** {company['market_share']}\n", "\n", - "## Key Technologies\n", - "{data['technologies']}\n", + "**Key Powers:** {company.get('key_powers', 'Not specified')}\n", "\n", - "## Market Share\n", - "{data['market_share']}\n", + "**Future Outlook:** {company['future_outlook']}\n", "\n", - "## Future Outlook\n", - "{data['future_outlook']}\n", + "---\n", "\"\"\"\n", + " \n", " return Markdown(markdown_text)\n", "\n", - "\n", "# Display the formatted markdown\n", "display(format_llm_chip_info(current_state[\"values\"][\"info\"]))" ] @@ -198,8 +260,17 @@ ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, "metadata": {}, + "outputs": [], "source": [] } ],