Skip to content

Commit

Permalink
Merge branch 'master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
rbrugaro authored Jun 26, 2024
2 parents e6694f1 + 1325471 commit 630c703
Show file tree
Hide file tree
Showing 662 changed files with 22,877 additions and 242,816 deletions.
53 changes: 46 additions & 7 deletions .github/scripts/check_diff.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
import json
import sys
import os
from typing import Dict
from typing import Dict, List, Set

import tomllib
from collections import defaultdict
import glob

LANGCHAIN_DIRS = [
"libs/core",
Expand All @@ -11,6 +15,38 @@
"libs/experimental",
]

def all_package_dirs() -> Set[str]:
return {"/".join(path.split("/")[:-1]) for path in glob.glob("./libs/**/pyproject.toml", recursive=True)}


def dependents_graph() -> dict:
dependents = defaultdict(set)

for path in glob.glob("./libs/**/pyproject.toml", recursive=True):
if "template" in path:
continue
with open(path, "rb") as f:
pyproject = tomllib.load(f)['tool']['poetry']
pkg_dir = "libs" + "/".join(path.split("libs")[1].split("/")[:-1])
for dep in pyproject['dependencies']:
if "langchain" in dep:
dependents[dep].add(pkg_dir)
return dependents


def add_dependents(dirs_to_eval: Set[str], dependents: dict) -> List[str]:
updated = set()
for dir_ in dirs_to_eval:
# handle core manually because it has so many dependents
if "core" in dir_:
updated.add(dir_)
continue
pkg = "langchain-" + dir_.split("/")[-1]
updated.update(dependents[pkg])
updated.add(dir_)
return list(updated)


if __name__ == "__main__":
files = sys.argv[1:]

Expand All @@ -21,10 +57,11 @@
}
docs_edited = False

if len(files) == 300:
if len(files) >= 300:
# max diff length is 300 files - there are likely files missing
raise ValueError("Max diff reached. Please manually run CI on changed libs.")

dirs_to_run["lint"] = all_package_dirs()
dirs_to_run["test"] = all_package_dirs()
dirs_to_run["extended-test"] = set(LANGCHAIN_DIRS)
for file in files:
if any(
file.startswith(dir_)
Expand Down Expand Up @@ -81,11 +118,13 @@
docs_edited = True
dirs_to_run["lint"].add(".")

dependents = dependents_graph()

outputs = {
"dirs-to-lint": list(
dirs_to_run["lint"] | dirs_to_run["test"] | dirs_to_run["extended-test"]
"dirs-to-lint": add_dependents(
dirs_to_run["lint"] | dirs_to_run["test"] | dirs_to_run["extended-test"], dependents
),
"dirs-to-test": list(dirs_to_run["test"] | dirs_to_run["extended-test"]),
"dirs-to-test": add_dependents(dirs_to_run["test"] | dirs_to_run["extended-test"], dependents),
"dirs-to-extended-test": list(dirs_to_run["extended-test"]),
"docs-edited": "true" if docs_edited else "",
}
Expand Down
8 changes: 7 additions & 1 deletion .github/workflows/_integration_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ env:

jobs:
build:
environment: Scheduled testing
defaults:
run:
working-directory: ${{ inputs.working-directory }}
Expand Down Expand Up @@ -53,8 +52,15 @@ jobs:
shell: bash
env:
AI21_API_KEY: ${{ secrets.AI21_API_KEY }}
FIREWORKS_API_KEY: ${{ secrets.FIREWORKS_API_KEY }}
GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
AZURE_OPENAI_API_VERSION: ${{ secrets.AZURE_OPENAI_API_VERSION }}
AZURE_OPENAI_API_BASE: ${{ secrets.AZURE_OPENAI_API_BASE }}
AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }}
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_CHAT_DEPLOYMENT_NAME }}
AZURE_OPENAI_LLM_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_LLM_DEPLOYMENT_NAME }}
AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME }}
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
TOGETHER_API_KEY: ${{ secrets.TOGETHER_API_KEY }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
Expand Down
6 changes: 5 additions & 1 deletion .github/workflows/_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ jobs:
poetry run python -c "import $IMPORT_NAME; print(dir($IMPORT_NAME))"
- name: Import test dependencies
run: poetry install --with test,test_integration
run: poetry install --with test
working-directory: ${{ inputs.working-directory }}

# Overwrite the local version of the package with the test PyPI version.
Expand Down Expand Up @@ -245,6 +245,10 @@ jobs:
with:
credentials_json: '${{ secrets.GOOGLE_CREDENTIALS }}'

- name: Import integration test dependencies
run: poetry install --with test,test_integration
working-directory: ${{ inputs.working-directory }}

- name: Run integration tests
if: ${{ startsWith(inputs.working-directory, 'libs/partners/') }}
env:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/check_diffs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.10'
python-version: '3.11'
- id: files
uses: Ana06/[email protected]
- id: set-matrix
Expand Down
8 changes: 0 additions & 8 deletions .github/workflows/scheduled_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ jobs:
- "libs/partners/google-vertexai"
- "libs/partners/google-genai"
- "libs/partners/aws"
- "libs/partners/nvidia-ai-endpoints"

steps:
- uses: actions/checkout@v4
Expand All @@ -41,10 +40,6 @@ jobs:
with:
repository: langchain-ai/langchain-google
path: langchain-google
- uses: actions/checkout@v4
with:
repository: langchain-ai/langchain-nvidia
path: langchain-nvidia
- uses: actions/checkout@v4
with:
repository: langchain-ai/langchain-cohere
Expand All @@ -59,11 +54,9 @@ jobs:
rm -rf \
langchain/libs/partners/google-genai \
langchain/libs/partners/google-vertexai \
langchain/libs/partners/nvidia-ai-endpoints \
langchain/libs/partners/cohere
mv langchain-google/libs/genai langchain/libs/partners/google-genai
mv langchain-google/libs/vertexai langchain/libs/partners/google-vertexai
mv langchain-nvidia/libs/ai-endpoints langchain/libs/partners/nvidia-ai-endpoints
mv langchain-cohere/libs/cohere langchain/libs/partners/cohere
mv langchain-aws/libs/aws langchain/libs/partners/aws
Expand Down Expand Up @@ -123,7 +116,6 @@ jobs:
rm -rf \
langchain/libs/partners/google-genai \
langchain/libs/partners/google-vertexai \
langchain/libs/partners/nvidia-ai-endpoints \
langchain/libs/partners/cohere \
langchain/libs/partners/aws
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ Please see [here](https://python.langchain.com) for full documentation, which in
- [🦜🛠️ LangSmith](https://docs.smith.langchain.com/): Tracing and evaluating your language model applications and intelligent agents to help you move from prototype to production.
- [🦜🕸️ LangGraph](https://langchain-ai.github.io/langgraph/): Creating stateful, multi-actor applications with LLMs, built on top of (and intended to be used with) LangChain primitives.
- [🦜🏓 LangServe](https://python.langchain.com/docs/langserve): Deploying LangChain runnables and chains as REST APIs.
- [LangChain Templates](https://python.langchain.com/v0.2/docs/templates/): Example applications hosted with LangServe.
- [LangChain Templates](https://python.langchain.com/v0.2/docs/templates/): Example applications hosted with LangServe.


## 💁 Contributing
Expand Down

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion docs/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ render:
$(PYTHON) scripts/notebook_convert.py $(INTERMEDIATE_DIR) $(OUTPUT_NEW_DOCS_DIR)

md-sync:
rsync -avm --include="*/" --include="*.mdx" --include="*.md" --include="*.png" --exclude="*" $(INTERMEDIATE_DIR)/ $(OUTPUT_NEW_DOCS_DIR)
rsync -avm --include="*/" --include="*.mdx" --include="*.md" --include="*.png" --include="*/_category_.yml" --exclude="*" $(INTERMEDIATE_DIR)/ $(OUTPUT_NEW_DOCS_DIR)

generate-references:
$(PYTHON) scripts/generate_api_reference_links.py --docs_dir $(OUTPUT_NEW_DOCS_DIR)
Expand Down
45 changes: 43 additions & 2 deletions docs/api_reference/create_api_rst.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,21 @@
from typing import Dict, List, Literal, Optional, Sequence, TypedDict, Union

import toml
import typing_extensions
from langchain_core.runnables import Runnable, RunnableSerializable
from pydantic import BaseModel

ROOT_DIR = Path(__file__).parents[2].absolute()
HERE = Path(__file__).parent

ClassKind = Literal["TypedDict", "Regular", "Pydantic", "enum"]
ClassKind = Literal[
"TypedDict",
"Regular",
"Pydantic",
"enum",
"RunnablePydantic",
"RunnableNonPydantic",
]


class ClassInfo(TypedDict):
Expand Down Expand Up @@ -69,8 +78,36 @@ def _load_module_members(module_path: str, namespace: str) -> ModuleMembers:
continue

if inspect.isclass(type_):
if type(type_) == typing._TypedDictMeta: # type: ignore
# The clasification of the class is used to select a template
# for the object when rendering the documentation.
# See `templates` directory for defined templates.
# This is a hacky solution to distinguish between different
# kinds of thing that we want to render.
if type(type_) is typing_extensions._TypedDictMeta: # type: ignore
kind: ClassKind = "TypedDict"
elif type(type_) is typing._TypedDictMeta: # type: ignore
kind: ClassKind = "TypedDict"
elif (
issubclass(type_, Runnable)
and issubclass(type_, BaseModel)
and type_ is not Runnable
):
# RunnableSerializable subclasses from Pydantic which
# for which we use autodoc_pydantic for rendering.
# We need to distinguish these from regular Pydantic
# classes so we can hide inherited Runnable methods
# and provide a link to the Runnable interface from
# the template.
kind = "RunnablePydantic"
elif (
issubclass(type_, Runnable)
and not issubclass(type_, BaseModel)
and type_ is not Runnable
):
# These are not pydantic classes but are Runnable.
# We'll hide all the inherited methods from Runnable
# but use a regular class template to render.
kind = "RunnableNonPydantic"
elif issubclass(type_, Enum):
kind = "enum"
elif issubclass(type_, BaseModel):
Expand Down Expand Up @@ -251,6 +288,10 @@ def _construct_doc(
template = "enum.rst"
elif class_["kind"] == "Pydantic":
template = "pydantic.rst"
elif class_["kind"] == "RunnablePydantic":
template = "runnable_pydantic.rst"
elif class_["kind"] == "RunnableNonPydantic":
template = "runnable_non_pydantic.rst"
else:
template = "class.rst"

Expand Down
2 changes: 1 addition & 1 deletion docs/api_reference/guide_imports.json

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion docs/api_reference/templates/class.rst
Original file line number Diff line number Diff line change
Expand Up @@ -33,4 +33,4 @@
{% endblock %}


.. example_links:: {{ objname }}
.. example_links:: {{ objname }}
2 changes: 2 additions & 0 deletions docs/api_reference/templates/pydantic.rst
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
:member-order: groupwise
:show-inheritance: True
:special-members: __call__
:exclude-members: construct, copy, dict, from_orm, parse_file, parse_obj, parse_raw, schema, schema_json, update_forward_refs, validate, json, is_lc_serializable, to_json, to_json_not_implemented, lc_secrets, lc_attributes, lc_id, get_lc_namespace


{% block attributes %}
{% endblock %}
Expand Down
39 changes: 39 additions & 0 deletions docs/api_reference/templates/runnable_non_pydantic.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
:mod:`{{module}}`.{{objname}}
{{ underline }}==============

.. NOTE:: {{objname}} implements the standard :py:class:`Runnable Interface <langchain_core.runnables.base.Runnable>`. 🏃


.. currentmodule:: {{ module }}

.. autoclass:: {{ objname }}

{% block attributes %}
{% if attributes %}
.. rubric:: {{ _('Attributes') }}

.. autosummary::
{% for item in attributes %}
~{{ name }}.{{ item }}
{%- endfor %}
{% endif %}
{% endblock %}

{% block methods %}
{% if methods %}
.. rubric:: {{ _('Methods') }}

.. autosummary::
{% for item in methods %}
~{{ name }}.{{ item }}
{%- endfor %}

{% for item in methods %}
.. automethod:: {{ name }}.{{ item }}
{%- endfor %}

{% endif %}
{% endblock %}


.. example_links:: {{ objname }}
22 changes: 22 additions & 0 deletions docs/api_reference/templates/runnable_pydantic.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
:mod:`{{module}}`.{{objname}}
{{ underline }}==============

.. NOTE:: {{objname}} implements the standard :py:class:`Runnable Interface <langchain_core.runnables.base.Runnable>`. 🏃

.. currentmodule:: {{ module }}

.. autopydantic_model:: {{ objname }}
:model-show-json: False
:model-show-config-summary: False
:model-show-validator-members: False
:model-show-field-summary: False
:field-signature-prefix: param
:members:
:undoc-members:
:inherited-members:
:member-order: groupwise
:show-inheritance: True
:special-members: __call__
:exclude-members: construct, copy, dict, from_orm, parse_file, parse_obj, parse_raw, schema, schema_json, update_forward_refs, validate, json, is_lc_serializable, to_json, to_json_not_implemented, lc_secrets, lc_attributes, lc_id, get_lc_namespace, invoke, ainvoke, batch, abatch, batch_as_completed, abatch_as_completed, astream_log, stream, astream, astream_events, transform, atransform, get_output_schema, get_prompts, configurable_fields, configurable_alternatives, config_schema, map, pick, pipe, with_listeners, with_alisteners, with_config, with_fallbacks, with_types, with_retry, InputType, OutputType, config_specs, output_schema, get_input_schema, get_graph, get_name, input_schema, name, bind, assign

.. example_links:: {{ objname }}
Loading

0 comments on commit 630c703

Please sign in to comment.