Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions tiny_chat/creators/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
"""
Profile creators for TinyChat.

This module provides creators for generating agent, environment, and relationship profiles
with LLM enhancement capabilities.
"""

from .agent_creator import AgentCreator
from .base_creator import BaseCreator
from .environment_creator import EnvironmentCreator
from .relationship_creator import RelationshipCreator

__all__ = [
'BaseCreator',
'AgentCreator',
'EnvironmentCreator',
'RelationshipCreator',
]
137 changes: 137 additions & 0 deletions tiny_chat/creators/agent_creator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
from typing import Any

from pydantic import validate_call

from tiny_chat.generator.generate import agenerate
from tiny_chat.generator.output_parsers import PydanticOutputParser
from tiny_chat.profiles.agent_profile import BaseAgentProfile
from tiny_chat.utils.template import TemplateManager

from .base_creator import BaseCreator


class AgentCreator(BaseCreator):
"""Creator for agent profiles with LLM enhancement"""

def __init__(self, model_name: str = 'gpt-4o-mini', output_dir: str = 'data'):
super().__init__(model_name, output_dir)
self.template_manager = TemplateManager()

def get_default_filename(self) -> str:
"""Get default output filename"""
return 'agent_profiles.jsonl'

@validate_call
async def create_profile(
self,
partial_info: dict[str, Any],
output_file: str | None = None,
temperature: float = 0.7,
**kwargs,
) -> BaseAgentProfile:
"""
Create an agent profile with LLM enhancement

Args:
partial_info: Partial information provided by user
output_file: Output file name (defaults to agent_profiles.jsonl)
temperature: LLM temperature for generation
**kwargs: Additional parameters

Returns:
Created BaseAgentProfile object
"""
unique_id = self.generate_unique_id('agent')

provided_info_parts = []
if partial_info:
provided_info_parts.append('Provided information:')
for key, value in partial_info.items():
if value is not None and value != '' and value != []:
provided_info_parts.append(f'- {key}: {value}')

provided_info_text = (
'\n'.join(provided_info_parts)
if provided_info_parts
else 'No specific information provided.'
)

generated_profile = await agenerate(
model_name=self.model_name,
template=self.template_manager.get_template('AGENT_PROFILE'),
input_values={
'provided_info': provided_info_text,
},
output_parser=PydanticOutputParser(pydantic_object=BaseAgentProfile),
temperature=temperature,
**kwargs,
)

generated_data = generated_profile.model_dump()
for key, value in partial_info.items():
if value is not None and value != '' and value != []:
generated_data[key] = value

generated_data['pk'] = unique_id

final_profile = BaseAgentProfile(**generated_data)

output_filename = output_file or self.get_default_filename()
self.save_to_jsonl(final_profile, output_filename)

return final_profile

@validate_call
async def polish_profile(
self,
existing_profile: BaseAgentProfile,
improvement_request: str,
output_file: str | None = None,
temperature: float = 0.7,
**kwargs,
) -> BaseAgentProfile:
"""
Polish/improve an existing agent profile based on user request

Args:
existing_profile: The agent profile to be improved
improvement_request: User's specific improvement requirements
output_file: Output file name (optional)
temperature: LLM temperature for generation
**kwargs: Additional parameters

Returns:
Improved BaseAgentProfile object
"""
current_profile_data = existing_profile.model_dump()
current_profile_text = 'Current agent profile:\n'
for key, value in current_profile_data.items():
if key != 'pk' and value is not None and value != '' and value != []:
if isinstance(value, list):
value = ', '.join(str(v) for v in value)
current_profile_text += f'- {key}: {value}\n'

polish_input = (
f'{current_profile_text}\nImprovement request: {improvement_request}'
)

polished_profile = await agenerate(
model_name=self.model_name,
template=self.template_manager.get_template('AGENT_PROFILE'),
input_values={
'provided_info': polish_input,
},
output_parser=PydanticOutputParser(pydantic_object=BaseAgentProfile),
temperature=temperature,
**kwargs,
)

polished_data = polished_profile.model_dump()
polished_data['pk'] = existing_profile.pk

final_profile = BaseAgentProfile(**polished_data)

if output_file:
self.save_to_jsonl(final_profile, output_file)

return final_profile
88 changes: 88 additions & 0 deletions tiny_chat/creators/base_creator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
import json
import uuid
from abc import ABC, abstractmethod
from datetime import datetime
from pathlib import Path
from typing import Any, TypeVar

from pydantic import BaseModel, validate_call

ProfileType = TypeVar('ProfileType', bound=BaseModel)


class BaseCreator(ABC):
"""Base class for creating profiles with LLM enhancement"""

def __init__(self, model_name: str = 'gpt-4o-mini', output_dir: str = 'data'):
"""
Initialize the creator

Args:
model_name: Model name for LLM generation
output_dir: Directory to save created profiles
"""
self.model_name = model_name
self.output_dir = Path(output_dir)
self.output_dir.mkdir(parents=True, exist_ok=True)

def generate_unique_id(self, profile_type: str) -> str:
"""Generate a unique ID for the profile"""
timestamp = int(datetime.now().timestamp())
uuid_part = str(uuid.uuid4())[:8]
return f'{profile_type}_{uuid_part}_{timestamp}'

@validate_call
def save_to_jsonl(
self, profile: BaseModel, filename: str, append: bool = True
) -> None:
"""Save profile to JSONL file"""
file_path = self.output_dir / filename
mode = 'a' if append else 'w'

with open(file_path, mode, encoding='utf-8') as f:
data = profile.model_dump()
f.write(json.dumps(data, ensure_ascii=False) + '\n')

@abstractmethod
async def create_profile(
self, partial_info: dict[str, Any], output_file: str | None = None, **kwargs
) -> ProfileType:
"""
Create a profile with LLM enhancement

Args:
partial_info: Partial information provided by user
output_file: Output file name (optional)
**kwargs: Additional parameters

Returns:
Created profile object
"""
raise NotImplementedError

@abstractmethod
def get_default_filename(self) -> str:
"""Get default output filename for this creator type"""
raise NotImplementedError

@abstractmethod
async def polish_profile(
self,
existing_profile: ProfileType,
improvement_request: str,
temperature: float = 0.7,
**kwargs,
) -> ProfileType:
"""
Polish/improve an existing profile based on user request

Args:
existing_profile: The profile to be improved
improvement_request: User's specific improvement requirements
temperature: LLM temperature for generation
**kwargs: Additional parameters

Returns:
Improved profile object
"""
raise NotImplementedError
137 changes: 137 additions & 0 deletions tiny_chat/creators/environment_creator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
from typing import Any

from pydantic import validate_call

from tiny_chat.generator.generate import agenerate
from tiny_chat.generator.output_parsers import PydanticOutputParser
from tiny_chat.profiles.enviroment_profile import BaseEnvironmentProfile
from tiny_chat.utils.template import TemplateManager

from .base_creator import BaseCreator


class EnvironmentCreator(BaseCreator):
"""Creator for environment profiles with LLM enhancement"""

def __init__(self, model_name: str = 'gpt-4o-mini', output_dir: str = 'data'):
super().__init__(model_name, output_dir)
self.template_manager = TemplateManager()

def get_default_filename(self) -> str:
"""Get default output filename"""
return 'environment_profiles.jsonl'

@validate_call
async def create_profile(
self,
partial_info: dict[str, Any],
output_file: str | None = None,
temperature: float = 0.7,
**kwargs,
) -> BaseEnvironmentProfile:
"""
Create an environment profile with LLM enhancement

Args:
partial_info: Partial information provided by user
output_file: Output file name (defaults to environment_profiles.jsonl)
temperature: LLM temperature for generation
**kwargs: Additional parameters

Returns:
Created BaseEnvironmentProfile object
"""
unique_id = self.generate_unique_id('env')

provided_info_parts = []
if partial_info:
provided_info_parts.append('Provided information:')
for key, value in partial_info.items():
if value is not None and value != '' and value != []:
provided_info_parts.append(f'- {key}: {value}')

provided_info_text = (
'\n'.join(provided_info_parts)
if provided_info_parts
else 'No specific information provided.'
)

generated_profile = await agenerate(
model_name=self.model_name,
template=self.template_manager.get_template('ENV_PROFILE_CREATOR'),
input_values={
'provided_info': provided_info_text,
},
output_parser=PydanticOutputParser(pydantic_object=BaseEnvironmentProfile),
temperature=temperature,
**kwargs,
)

generated_data = generated_profile.model_dump()
for key, value in partial_info.items():
if value is not None and value != '' and value != []:
generated_data[key] = value

generated_data['pk'] = unique_id

final_profile = BaseEnvironmentProfile(**generated_data)

output_filename = output_file or self.get_default_filename()
self.save_to_jsonl(final_profile, output_filename)

return final_profile

@validate_call
async def polish_profile(
self,
existing_profile: BaseEnvironmentProfile,
improvement_request: str,
output_file: str | None = None,
temperature: float = 0.7,
**kwargs,
) -> BaseEnvironmentProfile:
"""
Polish/improve an existing environment profile based on user request

Args:
existing_profile: The environment profile to be improved
improvement_request: User's specific improvement requirements
output_file: Output file name (optional)
temperature: LLM temperature for generation
**kwargs: Additional parameters

Returns:
Improved BaseEnvironmentProfile object
"""
current_profile_data = existing_profile.model_dump()
current_profile_text = 'Current environment profile:\n'
for key, value in current_profile_data.items():
if key != 'pk' and value is not None and value != '' and value != []:
if isinstance(value, list):
value = ', '.join(str(v) for v in value)
current_profile_text += f'- {key}: {value}\n'

polish_input = (
f'{current_profile_text}\nImprovement request: {improvement_request}'
)

polished_profile = await agenerate(
model_name=self.model_name,
template=self.template_manager.get_template('ENV_PROFILE_CREATOR'),
input_values={
'provided_info': polish_input,
},
output_parser=PydanticOutputParser(pydantic_object=BaseEnvironmentProfile),
temperature=temperature,
**kwargs,
)

polished_data = polished_profile.model_dump()
polished_data['pk'] = existing_profile.pk

final_profile = BaseEnvironmentProfile(**polished_data)

if output_file:
self.save_to_jsonl(final_profile, output_file)

return final_profile
Loading
Loading