From 18f5770de6b3df25196e29064a5370fe889da06b Mon Sep 17 00:00:00 2001 From: Dmitrii Cherkasov Date: Mon, 7 Jul 2025 17:25:10 -0700 Subject: [PATCH] Updates readme files. --- README.md | 150 ++++++++++++++++++++++++++++++++++++++++++--- libs/oci/README.md | 107 +++++++++++++++++--------------- 2 files changed, 198 insertions(+), 59 deletions(-) diff --git a/README.md b/README.md index d9c0351..d0bf491 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -# 🦜️🔗 LangChain 🤝 Oracle Cloud Infrastructure (OCI) +# 🦜️🔗 LangChain 🤝 Oracle -This repository provides LangChain components for various OCI services. It aims to replace and expand upon the existing LangChain OCI components found in the `langchain-community` package in the LangChain repository. +Welcome to the official repository for LangChain integration with [Oracle Cloud Infrastructure (OCI)](https://cloud.oracle.com/). This library provides native LangChain components for interacting with Oracle's AI services—combining support for **OCI Generative AI** and **OCI Data Science**. ## Features @@ -8,11 +8,147 @@ This repository provides LangChain components for various OCI services. It aims - **Agents**: Includes Runnables to support [Oracle Generative AI Agents](https://www.oracle.com/artificial-intelligence/generative-ai/agents/), allowing you to leverage Generative AI Agents within LangChain and LangGraph. - **More to come**: This repository will continue to expand and offer additional components for various OCI services as development progresses. -**Note**: This repository will replace all OCI integrations currently present in the `langchain-community` package. Users are encouraged to migrate to this repository as soon as possible. +> This project merges and replaces earlier OCI integrations from the `langchain-community` repository and unifies contributions from Oracle's GenAI and Data Science teams. +> All integrations in this package assume that you have the credentials setup to connect with oci services. + +--- ## Installation -You can install the `langchain-oracle` package from PyPI. + +```bash +pip install -U langchain-oci +``` + +--- + +## Quick Start + +This repository includes two main integration categories: + +- [OCI Generative AI](#oci-generative-ai-examples) +- [OCI Data Science (Model Deployment)](#oci-data-science-model-deployment-examples) + + +--- + +## OCI Generative AI Examples + +### 1. Use a Chat Model + +`ChatOCIGenAI` class exposes chat models from OCI Generative AI. + +```python +from langchain_oci import ChatOCIGenAI + +llm = ChatOCIGenAI() +llm.invoke("Sing a ballad of LangChain.") +``` + +### 2. Use a Completion Model +`OCIGenAI` class exposes LLMs from OCI Generative AI. + +```python +from langchain_oci import OCIGenAI + +llm = OCIGenAI() +llm.invoke("The meaning of life is") +``` + +### 3. Use an Embedding Model +`OCIGenAIEmbeddings` class exposes embeddings from OCI Generative AI. + +```python +from langchain_oci import OCIGenAIEmbeddings + +embeddings = OCIGenAIEmbeddings() +embeddings.embed_query("What is the meaning of life?") +``` + + +## OCI Data Science Model Deployment Examples + +### 1. Use a Chat Model + +You may instantiate the OCI Data Science model with the generic `ChatOCIModelDeployment` or framework specific class like `ChatOCIModelDeploymentVLLM`. + +```python +from langchain_oci.chat_models import ChatOCIModelDeployment, ChatOCIModelDeploymentVLLM + +# Create an instance of OCI Model Deployment Endpoint +# Replace the endpoint uri with your own +endpoint = "https://modeldeployment..oci.customer-oci.com//predict" + +messages = [ + ( + "system", + "You are a helpful assistant that translates English to French. Translate the user sentence.", + ), + ("human", "I love programming."), +] + +chat = ChatOCIModelDeployment( + endpoint=endpoint, + streaming=True, + max_retries=1, + model_kwargs={ + "temperature": 0.2, + "max_tokens": 512, + }, # other model params... + default_headers={ + "route": "/v1/chat/completions", + # other request headers ... + }, +) +chat.invoke(messages) + +chat_vllm = ChatOCIModelDeploymentVLLM(endpoint=endpoint) +chat_vllm.invoke(messages) +``` + +### 2. Use a Completion Model +You may instantiate the OCI Data Science model with `OCIModelDeploymentLLM` or `OCIModelDeploymentVLLM`. + +```python +from langchain_oci.llms import OCIModelDeploymentLLM, OCIModelDeploymentVLLM + +# Create an instance of OCI Model Deployment Endpoint +# Replace the endpoint uri and model name with your own +endpoint = "https://modeldeployment..oci.customer-oci.com//predict" + +llm = OCIModelDeploymentLLM( + endpoint=endpoint, + model="odsc-llm", +) +llm.invoke("Who is the first president of United States?") + +vllm = OCIModelDeploymentVLLM( + endpoint=endpoint, +) +vllm.invoke("Who is the first president of United States?") +``` + +### 3. Use an Embedding Model +You may instantiate the OCI Data Science model with the `OCIModelDeploymentEndpointEmbeddings`. + +```python +from langchain_oci.embeddings import OCIModelDeploymentEndpointEmbeddings + +# Create an instance of OCI Model Deployment Endpoint +# Replace the endpoint uri with your own +endpoint = "https://modeldeployment..oci.customer-oci.com//predict" + +embeddings = OCIModelDeploymentEndpointEmbeddings( + endpoint=endpoint, +) + +query = "Hello World!" +embeddings.embed_query(query) + +documents = ["This is a sample document", "and here is another one"] +embeddings.embed_documents(documents) +``` + ## Contributing @@ -27,8 +163,4 @@ Please consult the [security guide](./SECURITY.md) for our responsible security Copyright (c) 2025 Oracle and/or its affiliates. Released under the Universal Permissive License v1.0 as shown at -. - -release: -/github subscribe langchain-ai/langchain-oci releases workflows:{name:"release"} -/github unsubscribe langchain-ai/langchain-oci issues pulls commits deployments + diff --git a/libs/oci/README.md b/libs/oci/README.md index d06cb7e..2266d10 100644 --- a/libs/oci/README.md +++ b/libs/oci/README.md @@ -9,9 +9,21 @@ pip install -U langchain-oci ``` All integrations in this package assume that you have the credentials setup to connect with oci services. -## Chat Models +--- -### OCI Generative AI +## Quick Start + +This repository includes two main integration categories: + +- [OCI Generative AI](#oci-generative-ai-examples) +- [OCI Data Science (Model Deployment)](#oci-data-science-model-deployment-examples) + + +--- + +## OCI Generative AI Examples + +### 1. Use a Chat Model `ChatOCIGenAI` class exposes chat models from OCI Generative AI. @@ -22,9 +34,32 @@ llm = ChatOCIGenAI() llm.invoke("Sing a ballad of LangChain.") ``` -### OCI Data Science +### 2. Use a Completion Model +`OCIGenAI` class exposes LLMs from OCI Generative AI. + +```python +from langchain_oci import OCIGenAI + +llm = OCIGenAI() +llm.invoke("The meaning of life is") +``` -You may also instantiate the OCI Data Science model with the generic `ChatOCIModelDeployment` or framework specific class like `ChatOCIModelDeploymentVLLM`. +### 3. Use an Embedding Model +`OCIGenAIEmbeddings` class exposes embeddings from OCI Generative AI. + +```python +from langchain_oci import OCIGenAIEmbeddings + +embeddings = OCIGenAIEmbeddings() +embeddings.embed_query("What is the meaning of life?") +``` + + +## OCI Data Science Model Deployment Examples + +### 1. Use a Chat Model + +You may instantiate the OCI Data Science model with the generic `ChatOCIModelDeployment` or framework specific class like `ChatOCIModelDeploymentVLLM`. ```python from langchain_oci.chat_models import ChatOCIModelDeployment, ChatOCIModelDeploymentVLLM @@ -60,22 +95,30 @@ chat_vllm = ChatOCIModelDeploymentVLLM(endpoint=endpoint) chat_vllm.invoke(messages) ``` -## Embeddings +### 2. Use a Completion Model +You may instantiate the OCI Data Science model with `OCIModelDeploymentLLM` or `OCIModelDeploymentVLLM`. -### OCI Generative AI +```python +from langchain_oci.llms import OCIModelDeploymentLLM, OCIModelDeploymentVLLM -`OCIGenAIEmbeddings` class exposes embeddings from OCI Generative AI. +# Create an instance of OCI Model Deployment Endpoint +# Replace the endpoint uri and model name with your own +endpoint = "https://modeldeployment..oci.customer-oci.com//predict" -```python -from langchain_oci import OCIGenAIEmbeddings +llm = OCIModelDeploymentLLM( + endpoint=endpoint, + model="odsc-llm", +) +llm.invoke("Who is the first president of United States?") -embeddings = OCIGenAIEmbeddings() -embeddings.embed_query("What is the meaning of life?") +vllm = OCIModelDeploymentVLLM( + endpoint=endpoint, +) +vllm.invoke("Who is the first president of United States?") ``` -### OCI Data Science - -You may also instantiate the OCI Data Science model with the `OCIModelDeploymentEndpointEmbeddings`. +### 3. Use an Embedding Model +You may instantiate the OCI Data Science model with the `OCIModelDeploymentEndpointEmbeddings`. ```python from langchain_oci.embeddings import OCIModelDeploymentEndpointEmbeddings @@ -94,39 +137,3 @@ embeddings.embed_query(query) documents = ["This is a sample document", "and here is another one"] embeddings.embed_documents(documents) ``` - -## LLMs - -### OCI Generative AI - -`OCIGenAI` class exposes LLMs from OCI Generative AI. - -```python -from langchain_oci import OCIGenAI - -llm = OCIGenAI() -llm.invoke("The meaning of life is") -``` - -### OCI Data Science - -You may also instantiate the OCI Data Science model with `OCIModelDeploymentLLM` or `OCIModelDeploymentVLLM`. - -```python -from langchain_oci.llms import OCIModelDeploymentLLM, OCIModelDeploymentVLLM - -# Create an instance of OCI Model Deployment Endpoint -# Replace the endpoint uri and model name with your own -endpoint = "https://modeldeployment..oci.customer-oci.com//predict" - -llm = OCIModelDeploymentLLM( - endpoint=endpoint, - model="odsc-llm", -) -llm.invoke("Who is the first president of United States?") - -vllm = OCIModelDeploymentVLLM( - endpoint=endpoint, -) -vllm.invoke("Who is the first president of United States?") -```