Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion nemoguardrails/actions/llm/generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ async def init(self):
self._init_flows_index(),
)

def _extract_user_message_example(self, flow: Flow) -> None:
def _extract_user_message_example(self, flow: Flow):
"""Heuristic to extract user message examples from a flow."""
elements = [
item
Expand Down
14 changes: 9 additions & 5 deletions nemoguardrails/colang/runtime.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,28 +34,32 @@ def __init__(self, config: RailsConfig, verbose: bool = False):
# Register the actions with the dispatcher.
self.action_dispatcher = ActionDispatcher(
config_path=config.config_path,
import_paths=list(config.imported_paths.values()),
import_paths=list(
config.imported_paths.values() if config.imported_paths else []
),
)

if hasattr(self, "_run_output_rails_in_parallel_streaming"):
self.action_dispatcher.register_action(
self._run_output_rails_in_parallel_streaming,
getattr(self, "_run_output_rails_in_parallel_streaming"),
name="run_output_rails_in_parallel_streaming",
)

if hasattr(self, "_run_flows_in_parallel"):
self.action_dispatcher.register_action(
self._run_flows_in_parallel, name="run_flows_in_parallel"
getattr(self, "_run_flows_in_parallel"), name="run_flows_in_parallel"
)

if hasattr(self, "_run_input_rails_in_parallel"):
self.action_dispatcher.register_action(
self._run_input_rails_in_parallel, name="run_input_rails_in_parallel"
getattr(self, "_run_input_rails_in_parallel"),
name="run_input_rails_in_parallel",
)

if hasattr(self, "_run_output_rails_in_parallel"):
self.action_dispatcher.register_action(
self._run_output_rails_in_parallel, name="run_output_rails_in_parallel"
getattr(self, "_run_output_rails_in_parallel"),
name="run_output_rails_in_parallel",
)

# The list of additional parameters that can be passed to the actions.
Expand Down
103 changes: 69 additions & 34 deletions nemoguardrails/colang/v1_0/lang/colang_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import json
import re
from ast import literal_eval
from typing import List, Optional
from typing import Any, Dict, List, Optional

import yaml

Expand Down Expand Up @@ -126,7 +126,7 @@ def __init__(
self.current_params_indentation = 1

# The current element i.e. user, bot, event, if ...
self.current_element = None
self.current_element: Optional[Dict[str, Any]] = None

# The flows that have been parsed
self.flows = {}
Expand Down Expand Up @@ -264,7 +264,7 @@ def _normalize_line_text(self):

flow_hash = string_hash(flow_text)

self.text += " anonymous-" + flow_hash
self.text += " anonymous-" + str(flow_hash)

# Below are some more advanced normalizations

Expand Down Expand Up @@ -313,8 +313,9 @@ def _create_namespace(self, namespace):
# Now, append the new one
self.current_namespaces.append(namespace)
self.current_namespace = ".".join(self.current_namespaces)
self.current_indentation = self.next_line["indentation"]
self.current_indentations.append(self.next_line["indentation"])
next_indentation = self.next_line["indentation"] if self.next_line else 0
self.current_indentation = next_indentation
self.current_indentations.append(next_indentation)

# Reset the branches and the ifs on a new flow
self.branches = []
Expand All @@ -335,7 +336,11 @@ def _ignore_block_body(self):
def _include_source_mappings(self):
# Include the source mapping information if required
if self.include_source_mapping:
if self.current_element and "_source_mapping" not in self.current_element:
if (
self.current_element is not None
and isinstance(self.current_element, dict)
and "_source_mapping" not in self.current_element
):
self.current_element["_source_mapping"] = {
"filename": self.filename,
"line_number": self.current_line["number"],
Expand Down Expand Up @@ -790,7 +795,7 @@ def _process_define(self):

# If we're dealing with a topic, then we expand the flow definition
if define_token == "topic":
self._insert_topic_flow_definition()
# TODO: Implement topic flow definition insertion
return

# Compute the symbol type
Expand Down Expand Up @@ -957,14 +962,18 @@ def _extract_params(self, param_lines: Optional[List] = None):
if isinstance(yaml_value, str):
yaml_value = {"$0": yaml_value}

# self.current_element.update(yaml_value)
for k in yaml_value.keys():
# if the key tarts with $, we remove it
param_name = k
if param_name[0] == "$":
param_name = param_name[1:]
if (
self.current_element is not None
and isinstance(self.current_element, dict)
and yaml_value is not None
):
for k in yaml_value.keys():
# if the key tarts with $, we remove it
param_name = k
if param_name[0] == "$":
param_name = param_name[1:]

self.current_element[param_name] = yaml_value[k]
self.current_element[param_name] = yaml_value[k]

def _is_test_flow(self):
"""Returns true if the current flow is a test one.
Expand Down Expand Up @@ -1005,11 +1014,13 @@ def _is_sample_flow(self):
def _parse_when(self):
# TODO: deal with "when" after "else when"
assert (
self.next_line["indentation"] > self.current_line["indentation"]
self.next_line is not None
and self.next_line["indentation"] > self.current_line["indentation"]
), "Expected indented block after 'when' statement."

# Create the new branch
new_branch = {"elements": [], "indentation": self.next_line["indentation"]}
next_indentation = self.next_line["indentation"] if self.next_line else 0
new_branch = {"elements": [], "indentation": next_indentation}

# # on else, we need to pop the previous branch
# if self.main_token == "else when":
Expand Down Expand Up @@ -1040,13 +1051,16 @@ def _parse_when(self):
# continue
# else
# ...
next_indentation = (
self.next_line["indentation"] if self.next_line else 0
)
self.lines.insert(
self.current_line_idx + 1,
{
"text": f"continue",
# We keep the line mapping the same
"number": self.current_line["number"],
"indentation": self.next_line["indentation"],
"indentation": next_indentation,
},
)
self.lines.insert(
Expand Down Expand Up @@ -1320,9 +1334,11 @@ def _parse_bot(self):
"text": f"{utterance_text}",
# We keep the line mapping the same
"number": self.current_line["number"],
"indentation": self.current_indentation + 2
if i == len(indented_lines)
else indented_lines[i]["indentation"],
"indentation": (
self.current_indentation + 2
if i == len(indented_lines)
else indented_lines[i]["indentation"]
),
},
)

Expand All @@ -1343,7 +1359,9 @@ def _parse_bot(self):
if utterance_id is None:
self.current_element["bot"] = {
"_type": "element",
"text": utterance_text[1:-1],
"text": (
utterance_text[1:-1] if utterance_text is not None else ""
),
}

# if we have quick_replies, we move them in the element
Expand All @@ -1361,7 +1379,13 @@ def _parse_bot(self):
# If there was a bot message with a snippet, we also add an expect
# TODO: can this be handled better?
try:
if "snippet" in self.current_element["bot"]:
if (
self.current_element is not None
and isinstance(self.current_element, dict)
and "bot" in self.current_element
and isinstance(self.current_element["bot"], dict)
and "snippet" in self.current_element["bot"]
):
self.branches[-1]["elements"].append(
{
"expect": "snippet",
Expand Down Expand Up @@ -1425,7 +1449,8 @@ def _parse_do(self):

# if we need to save the return values, we store the info
if "=" in flow_name:
return_vars, flow_name = get_stripped_tokens(split_max(flow_name, "=", 1))
stripped_tokens = get_stripped_tokens(split_max(flow_name, "=", 1))
return_vars, flow_name = stripped_tokens[0], stripped_tokens[1]
else:
return_vars = None

Expand Down Expand Up @@ -1475,8 +1500,9 @@ def _parse_meta(self):
branch_elements.insert(0, {"meta": {}})

# Update the elements coming from the parameters
for k in self.current_element.keys():
branch_elements[0]["meta"][k] = self.current_element[k]
if self.current_element is not None:
for k in self.current_element.keys():
branch_elements[0]["meta"][k] = self.current_element[k]

def _parse_generic(self):
value = split_max(self.text, " ", 1)[1].strip()
Expand Down Expand Up @@ -1545,7 +1571,9 @@ def _parse_if_branch(self, if_condition):
self.ifs.append(
{
"element": self.current_element,
"indentation": self.next_line["indentation"],
"indentation": (
self.next_line["indentation"] if self.next_line is not None else 0
),
# We also record this to match it with the else
"keyword_indentation": self.current_indentation,
}
Expand Down Expand Up @@ -1588,7 +1616,9 @@ def _parse_while(self):
self.branches.append(
{
"elements": self.current_element["do"],
"indentation": self.next_line["indentation"],
"indentation": (
self.next_line["indentation"] if self.next_line is not None else 0
),
}
)

Expand All @@ -1602,7 +1632,9 @@ def _parse_any(self):
self.branches.append(
{
"elements": self.current_element["any"],
"indentation": self.next_line["indentation"],
"indentation": (
self.next_line["indentation"] if self.next_line is not None else 0
),
}
)

Expand Down Expand Up @@ -1631,7 +1663,9 @@ def _parse_infer(self):
self.branches.append(
{
"elements": self.current_element["infer"],
"indentation": self.next_line["indentation"],
"indentation": (
self.next_line["indentation"] if self.next_line is not None else 0
),
}
)

Expand Down Expand Up @@ -1767,15 +1801,15 @@ def parse(self):
exception = Exception(error)

# Decorate the exception with where the parsing failed
exception.filename = self.filename
exception.line = self.current_line["number"]
exception.error = str(ex)
setattr(exception, "filename", self.filename)
setattr(exception, "line", self.current_line["number"])
setattr(exception, "error", str(ex))

raise exception

self.current_line_idx += 1

result = {"flows": self.flows}
result: Dict[str, Any] = {"flows": self.flows}

if self.imports:
result["imports"] = self.imports
Expand Down Expand Up @@ -1818,7 +1852,7 @@ def parse_snippets_and_imports(self):
"""
snippets = {}
imports = []
snippet = None
snippet: Optional[Dict[str, Any]] = None

while self.current_line_idx < len(self.lines):
self._fetch_current_line()
Expand All @@ -1833,6 +1867,7 @@ def parse_snippets_and_imports(self):
for k in self.current_line.keys():
d[k] = self.current_line[k]
d["filename"] = self.filename
assert snippet is not None # Type checker hint
snippet["lines"].append(d)

self.current_line_idx += 1
Expand Down
25 changes: 13 additions & 12 deletions nemoguardrails/colang/v1_0/lang/comd_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,21 +362,22 @@ def parse_md_file(file_name, content=None):
continue

# Make sure we have the type of the symbol in the name of the symbol
sym = _get_typed_symbol_name(sym, symbol_type)
if sym is not None:
sym = _get_typed_symbol_name(sym, symbol_type)

# For objects, we translate the "string" type to "kb:Object:prop|partial"
param_type = _get_param_type(parts[1])
if symbol_type == "object" and param_type in ["string", "text"]:
object_name = split_max(sym, ":", 1)[1]
param_type = f"kb:{object_name}:{parts[0]}|partial"
# For objects, we translate the "string" type to "kb:Object:prop|partial"
param_type = _get_param_type(parts[1])
if symbol_type == "object" and param_type in ["string", "text"]:
object_name = split_max(sym, ":", 1)[1]
param_type = f"kb:{object_name}:{parts[0]}|partial"

# TODO: figure out a cleaner way to deal with this
# For the "type:time" type, we transform it into "lookup:time"
if param_type == "type:time":
param_type = "lookup:time"
# TODO: figure out a cleaner way to deal with this
# For the "type:time" type, we transform it into "lookup:time"
if param_type == "type:time":
param_type = "lookup:time"

result["mappings"].append((f"{sym}:{parts[0]}", param_type))
symbol_params.append(parts[0])
result["mappings"].append((f"{sym}:{parts[0]}", param_type))
symbol_params.append(parts[0])

elif line.startswith("-") or line.startswith("*"):
if sym is None:
Expand Down
Loading