Message Types

Maticlib provides convenient message classes for structured conversation management across all LLM clients. These classes help maintain clean and type-safe multi-turn conversations.

Overview

The message system includes three main classes:

Quick Start

from maticlib.messages import SystemMessage, HumanMessage, AIMessage

# Create messages
system = SystemMessage("You are a helpful assistant")
human = HumanMessage("Hello!")
ai = AIMessage("Hi there! How can I help you?")

SystemMessage

System messages provide instructions or context to the AI model. They set the behavior, tone, and constraints for the conversation.

Constructor

SystemMessage(content: str)

Attributes

Examples

from maticlib.messages import SystemMessage

# Basic system message
system_msg = SystemMessage("You are a helpful Python tutor")

# Access properties
print(system_msg.content)           # "You are a helpful Python tutor"
print(system_msg.message_type)      # MessageType.SYSTEM
print(system_msg.message_type.name) # "SYSTEM"
print(system_msg.message_type.value)# "system"

Common Use Cases

# Setting personality
SystemMessage("You are a friendly and patient teacher")

# Setting constraints
SystemMessage("You must respond in JSON format only")

# Setting context
SystemMessage("You are an expert in machine learning and data science")

# Setting tone
SystemMessage("You are a professional business consultant. Be formal and concise")

# Setting behavior
SystemMessage("""
You are a code reviewer. For each code submission:
1. Check for bugs and errors
2. Suggest improvements
3. Provide examples of better practices
""")

HumanMessage

Human messages represent input from the user or human participant in the conversation.

Constructor

HumanMessage(content: str)

Attributes

Examples

from maticlib.messages import HumanMessage

# Create human message
human_msg = HumanMessage("What is Python?")

# Access properties
print(human_msg.content)           # "What is Python?"
print(human_msg.message_type)      # MessageType.HUMAN
print(human_msg.message_type.name) # "HUMAN"
print(human_msg.message_type.value)# "user"

Common Patterns

# Questions
HumanMessage("How does recursion work?")

# Commands
HumanMessage("Generate a list of prime numbers")

# Clarifications
HumanMessage("Can you explain that in simpler terms?")

# Follow-ups
HumanMessage("What about edge cases?")

# Multi-line input
HumanMessage("""
Please review this code:

def factorial(n):
    return n * factorial(n-1)
""")

AIMessage

AI messages represent responses from the AI assistant. These are typically used when building conversation history.

Constructor

AIMessage(content: str)

Attributes

Examples

from maticlib.messages import AIMessage

# Create AI message
ai_msg = AIMessage("Python is a high-level programming language...")

# Access properties
print(ai_msg.content)           # "Python is a high-level..."
print(ai_msg.message_type)      # MessageType.AI
print(ai_msg.message_type.name) # "AI"
print(ai_msg.message_type.value)# "assistant"

Multi-turn Conversations

The primary use case for message classes is building multi-turn conversations:

from maticlib.messages import SystemMessage, HumanMessage, AIMessage
from maticlib.llm.google_genai import GoogleGenAIClient

client = GoogleGenAIClient(api_key="YOUR_KEY")

# Build conversation history
conversation = [
    SystemMessage("You are a Python expert"),
    HumanMessage("What are decorators?"),
    AIMessage("Decorators are functions that modify other functions..."),
    HumanMessage("Can you show me an example?"),
    AIMessage("Sure! Here's a simple example: @timer"),
    HumanMessage("What about class decorators?")
]

# Continue conversation
response = client.complete(conversation)
print(response.content)

Conversation Builder Pattern

from maticlib.messages import SystemMessage, HumanMessage, AIMessage

class Conversation:
    def __init__(self, system_instruction: str = None):
        self.messages = []
        if system_instruction:
            self.messages.append(SystemMessage(system_instruction))
    
    def add_human(self, content: str):
        self.messages.append(HumanMessage(content))
        return self
    
    def add_ai(self, content: str):
        self.messages.append(AIMessage(content))
        return self
    
    def get_messages(self):
        return self.messages

# Usage
convo = Conversation("You are a helpful assistant")
convo.add_human("Hello!").add_ai("Hi there!")
convo.add_human("How are you?")

messages = convo.get_messages()

MessageType Enum

All message classes use the MessageType enum for type identification:

from maticlib.messages import MessageType

# Enum values
MessageType.SYSTEM    # name="SYSTEM", value="system"
MessageType.HUMAN     # name="HUMAN", value="user"
MessageType.AI        # name="AI", value="assistant"

Using MessageType for Routing

from maticlib.messages import MessageType, HumanMessage, AIMessage

def process_message(msg):
    if msg.message_type == MessageType.HUMAN:
        print(f"User says: {msg.content}")
    elif msg.message_type == MessageType.AI:
        print(f"AI responds: {msg.content}")

messages = [
    HumanMessage("Hello"),
    AIMessage("Hi there!")
]

for msg in messages:
    process_message(msg)

Integration with LLM Clients

Google GenAI Client

from maticlib.llm.google_genai import GoogleGenAIClient
from maticlib.messages import SystemMessage, HumanMessage, AIMessage

client = GoogleGenAIClient(
    system_instruct=SystemMessage("You are a helpful assistant"),
    api_key="YOUR_KEY"
)

conversation = [
    HumanMessage("Hello!"),
    AIMessage("Hi! How can I help?"),
    HumanMessage("Tell me about Python")
]

response = client.complete(conversation)
print(response.content)

Mistral Client

from maticlib.llm.mistral import MistralClient
from maticlib.messages import HumanMessage, AIMessage

client = MistralClient(api_key="YOUR_KEY")

conversation = [
    HumanMessage("Bonjour!"),
    AIMessage("Hello! How can I assist you?"),
    HumanMessage("Tell me about France")
]

response = client.complete(conversation)
print(response.content)

Best Practices

Advanced: Custom Message Processing

from maticlib.messages import SystemMessage, HumanMessage, AIMessage
from typing import List, Union

MessageList = List[Union[SystemMessage, HumanMessage, AIMessage]]

def filter_by_type(messages: MessageList, msg_type):
    """Filter messages by type"""
    return [msg for msg in messages if msg.message_type == msg_type]

def count_tokens_estimate(messages: MessageList) -> int:
    """Rough token count estimate"""
    total = 0
    for msg in messages:
        total += len(msg.content.split())
    return total

def truncate_conversation(messages: MessageList, max_messages: int):
    """Keep only recent messages"""
    system_msgs = filter_by_type(messages, MessageType.SYSTEM)
    other_msgs = [m for m in messages if m.message_type != MessageType.SYSTEM]
    
    # Keep system messages + recent conversation
    return system_msgs + other_msgs[-max_messages:]

# Usage
conversation = [
    SystemMessage("You are helpful"),
    HumanMessage("Hi"),
    AIMessage("Hello!"),
    HumanMessage("How are you?"),
    AIMessage("I'm great!"),
    HumanMessage("Tell me more")
]

# Get only human messages
human_msgs = filter_by_type(conversation, MessageType.HUMAN)

# Estimate tokens
tokens = count_tokens_estimate(conversation)

# Keep only last 2 exchanges
recent = truncate_conversation(conversation, max_messages=4)

Common Patterns

Chat Loop

from maticlib.messages import SystemMessage, HumanMessage, AIMessage
from maticlib.llm.google_genai import GoogleGenAIClient

client = GoogleGenAIClient(api_key="YOUR_KEY")

conversation = [
    SystemMessage("You are a helpful assistant")
]

while True:
    user_input = input("You: ")
    if user_input.lower() in ['exit', 'quit']:
        break
    
    conversation.append(HumanMessage(user_input))
    response = client.complete(conversation)
    
    print(f"AI: {response.content}")
    conversation.append(AIMessage(response.content))

Context Management

class ConversationManager:
    def __init__(self, client, system_instruction: str, max_history: int = 10):
        self.client = client
        self.messages = [SystemMessage(system_instruction)]
        self.max_history = max_history
    
    def send(self, user_message: str) -> str:
        # Add user message
        self.messages.append(HumanMessage(user_message))
        
        # Get response
        response = self.client.complete(self.messages)
        
        # Add AI response
        self.messages.append(AIMessage(response.content))
        
        # Trim history (keep system message)
        if len(self.messages) > self.max_history + 1:
            self.messages = [self.messages[0]] + self.messages[-(self.max_history):]
        
        return response.content

# Usage
manager = ConversationManager(client, "You are a Python tutor")
response = manager.send("What are decorators?")
print(response)

See Also