Skip to content

Commit fe7d0f6

Browse files
Stainless Botstainless-app[bot]
authored andcommitted
chore(internal): codegen related update (#396)
1 parent adbf12e commit fe7d0f6

File tree

9 files changed

+63
-66
lines changed

9 files changed

+63
-66
lines changed

examples/tracing/groq/groq_tracing.ipynb

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -95,14 +95,11 @@
9595
"source": [
9696
"chat_completion = groq_client.chat.completions.create(\n",
9797
" messages=[\n",
98-
" {\n",
99-
" \"role\": \"system\",\n",
100-
" \"content\": \"You are a helpful assistant.\"\n",
101-
" },\n",
98+
" {\"role\": \"system\", \"content\": \"You are a helpful assistant.\"},\n",
10299
" {\n",
103100
" \"role\": \"user\",\n",
104101
" \"content\": \"Explain the importance of fast language models\",\n",
105-
" }\n",
102+
" },\n",
106103
" ],\n",
107104
" model=\"llama3-8b-8192\",\n",
108105
")"

examples/tracing/mistral/mistral_tracing.ipynb

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -92,12 +92,12 @@
9292
"source": [
9393
"response = mistral_client.chat.complete(\n",
9494
" model=\"mistral-large-latest\",\n",
95-
" messages = [\n",
95+
" messages=[\n",
9696
" {\n",
9797
" \"role\": \"user\",\n",
9898
" \"content\": \"What is the best French cheese?\",\n",
9999
" },\n",
100-
" ]\n",
100+
" ],\n",
101101
")"
102102
]
103103
},
@@ -109,14 +109,14 @@
109109
"outputs": [],
110110
"source": [
111111
"stream_response = mistral_client.chat.stream(\n",
112-
" model = \"mistral-large-latest\",\n",
113-
" messages = [\n",
112+
" model=\"mistral-large-latest\",\n",
113+
" messages=[\n",
114114
" {\n",
115115
" \"role\": \"user\",\n",
116116
" \"content\": \"What's the meaning of life?\",\n",
117117
" },\n",
118-
" ]\n",
119-
") "
118+
" ],\n",
119+
")"
120120
]
121121
},
122122
{

examples/tracing/ollama/ollama_tracing.ipynb

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -94,10 +94,7 @@
9494
"metadata": {},
9595
"outputs": [],
9696
"source": [
97-
"chat = ChatOllama(\n",
98-
" model=\"llama3.1\",\n",
99-
" callbacks=[openlayer_handler]\n",
100-
")"
97+
"chat = ChatOllama(model=\"llama3.1\", callbacks=[openlayer_handler])"
10198
]
10299
},
103100
{

examples/tracing/vertex-ai/vertex_ai_tracing.ipynb

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -97,10 +97,7 @@
9797
"metadata": {},
9898
"outputs": [],
9999
"source": [
100-
"chat = ChatVertexAI(\n",
101-
" model=\"gemini-1.5-flash-001\",\n",
102-
" callbacks=[openlayer_handler]\n",
103-
")"
100+
"chat = ChatVertexAI(model=\"gemini-1.5-flash-001\", callbacks=[openlayer_handler])"
104101
]
105102
},
106103
{

requirements-dev.lock

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,9 @@ argcomplete==3.1.2
1919
certifi==2023.7.22
2020
# via httpcore
2121
# via httpx
22+
# via requests
23+
charset-normalizer==3.4.0
24+
# via requests
2225
colorlog==6.7.0
2326
# via nox
2427
dirty-equals==0.6.0
@@ -41,6 +44,7 @@ httpx==0.25.2
4144
idna==3.4
4245
# via anyio
4346
# via httpx
47+
# via requests
4448
importlib-metadata==7.0.0
4549
iniconfig==2.0.0
4650
# via pytest
@@ -56,7 +60,9 @@ nodeenv==1.8.0
5660
# via pyright
5761
nox==2023.4.22
5862
numpy==1.26.4
63+
# via openlayer
5964
# via pandas
65+
# via pyarrow
6066
packaging==23.2
6167
# via nox
6268
# via pytest
@@ -66,13 +72,15 @@ platformdirs==3.11.0
6672
# via virtualenv
6773
pluggy==1.5.0
6874
# via pytest
75+
pyarrow==14.0.1
76+
# via openlayer
6977
pydantic==2.9.2
7078
# via openlayer
7179
pydantic-core==2.23.4
7280
# via pydantic
7381
pygments==2.18.0
7482
# via rich
75-
pyright==1.1.380
83+
pyright==1.1.389
7684
pytest==8.3.3
7785
# via pytest-asyncio
7886
pytest-asyncio==0.24.0
@@ -82,6 +90,12 @@ python-dateutil==2.8.2
8290
pytz==2023.3.post1
8391
# via dirty-equals
8492
# via pandas
93+
pyyaml==6.0.2
94+
# via openlayer
95+
requests==2.32.3
96+
# via requests-toolbelt
97+
requests-toolbelt==1.0.0
98+
# via openlayer
8599
respx==0.20.2
86100
rich==13.7.1
87101
ruff==0.6.9
@@ -97,14 +111,19 @@ time-machine==2.9.0
97111
tomli==2.0.2
98112
# via mypy
99113
# via pytest
114+
tqdm==4.67.1
115+
# via openlayer
100116
typing-extensions==4.12.2
101117
# via anyio
102118
# via mypy
103119
# via openlayer
104120
# via pydantic
105121
# via pydantic-core
122+
# via pyright
106123
tzdata==2024.1
107124
# via pandas
125+
urllib3==2.2.3
126+
# via requests
108127
virtualenv==20.24.5
109128
# via nox
110129
zipp==3.17.0

requirements.lock

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@ anyio==4.4.0
1717
certifi==2023.7.22
1818
# via httpcore
1919
# via httpx
20+
# via requests
21+
charset-normalizer==3.4.0
22+
# via requests
2023
distro==1.8.0
2124
# via openlayer
2225
exceptiongroup==1.2.2
@@ -30,10 +33,15 @@ httpx==0.25.2
3033
idna==3.4
3134
# via anyio
3235
# via httpx
36+
# via requests
3337
numpy==1.26.4
38+
# via openlayer
3439
# via pandas
40+
# via pyarrow
3541
pandas==2.2.2
3642
# via openlayer
43+
pyarrow==14.0.1
44+
# via openlayer
3745
pydantic==2.9.2
3846
# via openlayer
3947
pydantic-core==2.23.4
@@ -42,16 +50,26 @@ python-dateutil==2.9.0.post0
4250
# via pandas
4351
pytz==2024.1
4452
# via pandas
53+
pyyaml==6.0.2
54+
# via openlayer
55+
requests==2.32.3
56+
# via requests-toolbelt
57+
requests-toolbelt==1.0.0
58+
# via openlayer
4559
six==1.16.0
4660
# via python-dateutil
4761
sniffio==1.3.0
4862
# via anyio
4963
# via httpx
5064
# via openlayer
65+
tqdm==4.67.1
66+
# via openlayer
5167
typing-extensions==4.12.2
5268
# via anyio
5369
# via openlayer
5470
# via pydantic
5571
# via pydantic-core
5672
tzdata==2024.1
5773
# via pandas
74+
urllib3==2.2.3
75+
# via requests

src/openlayer/lib/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
"""Openlayer lib.
2-
"""
1+
"""Openlayer lib."""
32

43
__all__ = [
54
"trace",

src/openlayer/lib/core/metrics.py

Lines changed: 10 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,7 @@ def _parse_args(self) -> None:
100100
type=str,
101101
required=False,
102102
default="",
103-
help="The name of the dataset to compute the metric on. Runs on all "
104-
"datasets if not provided.",
103+
help="The name of the dataset to compute the metric on. Runs on all " "datasets if not provided.",
105104
)
106105

107106
# Parse the arguments
@@ -133,9 +132,7 @@ def _load_datasets(self) -> None:
133132
dataset_names = [dataset["name"] for dataset in datasets_list]
134133
if self.dataset_name:
135134
if self.dataset_name not in dataset_names:
136-
raise ValueError(
137-
f"Dataset {self.dataset_name} not found in the openlayer.json."
138-
)
135+
raise ValueError(f"Dataset {self.dataset_name} not found in the openlayer.json.")
139136
dataset_names = [self.dataset_name]
140137
output_directory = model["outputDirectory"]
141138
# Read the outputs directory for dataset folders. For each, load
@@ -152,11 +149,7 @@ def _load_datasets(self) -> None:
152149
dataset_config = json.load(f)
153150
# Merge with the dataset fields from the openlayer.json
154151
dataset_dict = next(
155-
(
156-
item
157-
for item in datasets_list
158-
if item["name"] == dataset_folder
159-
),
152+
(item for item in datasets_list if item["name"] == dataset_folder),
160153
None,
161154
)
162155
dataset_config = {**dataset_dict, **dataset_config}
@@ -166,9 +159,7 @@ def _load_datasets(self) -> None:
166159
dataset_df = pd.read_csv(os.path.join(dataset_path, "dataset.csv"))
167160
data_format = "csv"
168161
elif os.path.exists(os.path.join(dataset_path, "dataset.json")):
169-
dataset_df = pd.read_json(
170-
os.path.join(dataset_path, "dataset.json"), orient="records"
171-
)
162+
dataset_df = pd.read_json(os.path.join(dataset_path, "dataset.json"), orient="records")
172163
data_format = "json"
173164
else:
174165
raise ValueError(f"No dataset found in {dataset_folder}.")
@@ -183,14 +174,10 @@ def _load_datasets(self) -> None:
183174
)
184175
)
185176
else:
186-
raise ValueError(
187-
"No model found in the openlayer.json file. Cannot compute metric."
188-
)
177+
raise ValueError("No model found in the openlayer.json file. Cannot compute metric.")
189178

190179
if not datasets:
191-
raise ValueError(
192-
"No datasets found in the openlayer.json file. Cannot compute metric."
193-
)
180+
raise ValueError("No datasets found in the openlayer.json file. Cannot compute metric.")
194181

195182
self.datasets = datasets
196183

@@ -243,13 +230,8 @@ def compute(self, datasets: List[Dataset]) -> None:
243230
"""Compute the metric on the model outputs."""
244231
for dataset in datasets:
245232
# Check if the metric has already been computed
246-
if os.path.exists(
247-
os.path.join(dataset.output_path, "metrics", f"{self.key}.json")
248-
):
249-
print(
250-
f"Metric ({self.key}) already computed on {dataset.name}. "
251-
"Skipping."
252-
)
233+
if os.path.exists(os.path.join(dataset.output_path, "metrics", f"{self.key}.json")):
234+
print(f"Metric ({self.key}) already computed on {dataset.name}. " "Skipping.")
253235
continue
254236

255237
try:
@@ -276,9 +258,7 @@ def compute_on_dataset(self, dataset: Dataset) -> MetricReturn:
276258
"""Compute the metric on a specific dataset."""
277259
pass
278260

279-
def _write_metric_return_to_file(
280-
self, metric_return: MetricReturn, output_dir: str
281-
) -> None:
261+
def _write_metric_return_to_file(self, metric_return: MetricReturn, output_dir: str) -> None:
282262
"""Write the metric return to a file."""
283263

284264
# Create the directory if it doesn't exist
@@ -289,9 +269,7 @@ def _write_metric_return_to_file(
289269
# Convert the set to a list
290270
metric_return_dict["added_cols"] = list(metric_return.added_cols)
291271

292-
with open(
293-
os.path.join(output_dir, f"{self.key}.json"), "w", encoding="utf-8"
294-
) as f:
272+
with open(os.path.join(output_dir, f"{self.key}.json"), "w", encoding="utf-8") as f:
295273
json.dump(metric_return_dict, f, indent=4)
296274
print(f"Metric ({self.key}) value written to {output_dir}/{self.key}.json")
297275

src/openlayer/lib/data/_upload.py

Lines changed: 4 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -105,9 +105,7 @@ def upload_blob_s3(
105105
fields = presigned_url_response.fields
106106
fields["file"] = (object_name, f, "application/x-tar")
107107
e = MultipartEncoder(fields=fields)
108-
m = MultipartEncoderMonitor(
109-
e, lambda monitor: t.update(min(t.total, monitor.bytes_read) - t.n)
110-
)
108+
m = MultipartEncoderMonitor(e, lambda monitor: t.update(min(t.total, monitor.bytes_read) - t.n))
111109
headers = {"Content-Type": m.content_type}
112110
res = requests.post(
113111
presigned_url_response.url,
@@ -118,9 +116,7 @@ def upload_blob_s3(
118116
)
119117
return res
120118

121-
def upload_blob_gcs(
122-
self, file_path: str, presigned_url_response: PresignedURLCreateResponse
123-
):
119+
def upload_blob_gcs(self, file_path: str, presigned_url_response: PresignedURLCreateResponse):
124120
"""Generic method to upload data to Google Cloud Storage and create the
125121
appropriate resource in the backend.
126122
"""
@@ -141,9 +137,7 @@ def upload_blob_gcs(
141137
)
142138
return res
143139

144-
def upload_blob_azure(
145-
self, file_path: str, presigned_url_response: PresignedURLCreateResponse
146-
):
140+
def upload_blob_azure(self, file_path: str, presigned_url_response: PresignedURLCreateResponse):
147141
"""Generic method to upload data to Azure Blob Storage and create the
148142
appropriate resource in the backend.
149143
"""
@@ -186,9 +180,7 @@ def upload_blob_local(
186180
with open(file_path, "rb") as f:
187181
fields = {"file": (object_name, f, "application/x-tar")}
188182
e = MultipartEncoder(fields=fields)
189-
m = MultipartEncoderMonitor(
190-
e, lambda monitor: t.update(min(t.total, monitor.bytes_read) - t.n)
191-
)
183+
m = MultipartEncoderMonitor(e, lambda monitor: t.update(min(t.total, monitor.bytes_read) - t.n))
192184
headers = {"Content-Type": m.content_type}
193185
res = requests.post(
194186
presigned_url_response.url,

0 commit comments

Comments
 (0)