Skip to content

Commit

Permalink
Merge pull request #355 from PrefectHQ/system-messages
Browse files Browse the repository at this point in the history
Add compilation flag for removing all system messages
  • Loading branch information
jlowin authored Oct 9, 2024
2 parents af51a90 + 1cc4986 commit 80fca97
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 12 deletions.
28 changes: 18 additions & 10 deletions src/controlflow/events/message_compiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,19 +113,25 @@ def convert_system_messages(
messages: list[BaseMessage], rules: LLMRules
) -> list[BaseMessage]:
"""
Converts system messages to human messages if the LLM doesnt support system messages.
Converts system messages to human messages if the LLM doesnt support system
messages, either at all or in the first position.
"""
if not messages or not rules.require_system_message_first:
return messages

new_messages = []
for message in messages:
for i, message in enumerate(messages):
if isinstance(message, SystemMessage):
new_messages.append(
HumanMessage(
content=f"ORCHESTRATOR: {message.content}", name=message.name
# If system messages are not supported OR if they must be first and
# this is not the first message, THEN convert the message to a human message
if not rules.allow_system_messages or (
i > 0 and rules.require_system_message_first
):
new_messages.append(
HumanMessage(
content=f"ORCHESTRATOR: {message.content}", name=message.name
)
)
)
else:
# If the system message is allowed, add it as-is
new_messages.append(message)
else:
new_messages.append(message)
return new_messages
Expand Down Expand Up @@ -249,7 +255,9 @@ def compile_to_messages(self, agent: "Agent") -> list[BaseMessage]:
messages = break_up_consecutive_ai_messages(messages, rules=context.llm_rules)
messages = format_message_name(messages, rules=context.llm_rules)

messages = system_prompt + messages

# this should go last
messages = convert_system_messages(messages, rules=context.llm_rules)

return system_prompt + messages
return messages
3 changes: 3 additions & 0 deletions src/controlflow/llm/rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@ class LLMRules(ControlFlowModel):
# require at least one non-system message
require_at_least_one_message: bool = False

# system messages are supported as a role
allow_system_messages: bool = True

# system messages can only be provided as the very first message in a thread
require_system_message_first: bool = False

Expand Down
4 changes: 2 additions & 2 deletions tests/test_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def test_min_failed(self):
task2 = Task("Task 2")
task3 = Task("Task 3")

with instructions("fail tasks 1 and 3"):
with instructions("fail tasks 1 and 3. Don't work on task 2."):
run_tasks(
[task1, task2, task3],
run_until=AnyFailed(min_failed=2),
Expand Down Expand Up @@ -157,7 +157,7 @@ async def test_min_failed(self):
task2 = Task("Task 2")
task3 = Task("Task 3")

with instructions("fail tasks 1 and 3"):
with instructions("fail tasks 1 and 3. Don't work on task 2."):
await run_tasks_async(
[task1, task2, task3],
run_until=AnyFailed(min_failed=2),
Expand Down

0 comments on commit 80fca97

Please sign in to comment.