Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions codeflow_engine/actions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,13 @@
except ImportError:
pass

# Create llm alias for backward compatibility (codeflow_engine.actions.llm)
llm = None
try:
from codeflow_engine.actions.ai_actions import llm
except ImportError:
pass

# Platform
PlatformDetector: type[Any] | None = None
try:
Expand Down Expand Up @@ -223,6 +230,7 @@
"generation",
"git",
"issues",
"llm", # Backward compatibility alias for ai_actions.llm
"maintenance",
"platform",
"quality",
Expand Down
51 changes: 35 additions & 16 deletions codeflow_engine/actions/ai_actions/llm/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""
"""
CODEFLOW LLM Package - Modular LLM provider system.

This package provides a unified interface for multiple LLM providers including:
Expand All @@ -12,7 +12,7 @@

Usage::

from codeflow_engine.actions.llm import get_llm_provider_manager, complete_chat
from codeflow_engine.actions.ai_actions.llm import get_llm_provider_manager, complete_chat

# Get a manager instance
manager = get_llm_provider_manager()
Expand All @@ -28,31 +28,41 @@
import os
from typing import Any

# Export base classes
from codeflow_engine.actions.llm.base import BaseLLMProvider
# Export base classes from core
from codeflow_engine.core.llm import (
BaseLLMProvider,
LLMProviderRegistry,
LLMResponse,
OpenAICompatibleProvider,
)

# Export manager
from codeflow_engine.actions.llm.manager import ActionLLMProviderManager
from codeflow_engine.actions.ai_actions.llm.manager import ActionLLMProviderManager

# Export providers
from codeflow_engine.actions.llm.providers import (
from codeflow_engine.actions.ai_actions.llm.providers import (
AnthropicProvider,
AzureOpenAIProvider,
GroqProvider,
MistralProvider,
OpenAIProvider,
PerplexityProvider,
TogetherAIProvider,
MISTRAL_AVAILABLE,
)

# Export types
from codeflow_engine.actions.llm.types import (
from codeflow_engine.actions.ai_actions.llm.types import (
LLMConfig,
LLMProviderType,
LLMResponse,
Message,
MessageRole,
)

# Conditionally import MistralProvider
MistralProvider = None
if MISTRAL_AVAILABLE:
from codeflow_engine.actions.ai_actions.llm.providers import MistralProvider


# Global provider manager instance
_provider_manager: ActionLLMProviderManager | None = None
Expand All @@ -63,7 +73,7 @@ def get_llm_provider_manager() -> ActionLLMProviderManager:
Get or create the global LLM provider manager with configuration from environment variables.

Returns:
LLMProviderManager: A configured instance of LLMProviderManager
ActionLLMProviderManager: A configured instance of LLMProviderManager
"""
global _provider_manager

Expand Down Expand Up @@ -166,26 +176,35 @@ def complete_chat(
return manager.complete(request)


# Backward compatibility alias
LLMProviderManager = ActionLLMProviderManager


# Export all public components
__all__ = [
"AnthropicProvider",
# Base classes
"BaseLLMProvider",
"GroqProvider",
"LLMConfig",
"OpenAICompatibleProvider",
# Manager
"ActionLLMProviderManager",
"LLMProviderManager", # Backward compatibility
# Registry
"LLMProviderRegistry",
# Types
"LLMConfig",
"LLMProviderType",
"LLMResponse",
"Message",
# Types
"MessageRole",
"MistralProvider",
# Providers
"AnthropicProvider",
"AzureOpenAIProvider",
"GroqProvider",
"MistralProvider",
"OpenAIProvider",
"PerplexityProvider",
"TogetherAIProvider",
"complete_chat",
# Factory functions
"complete_chat",
"get_llm_provider_manager",
]
29 changes: 6 additions & 23 deletions codeflow_engine/actions/ai_actions/llm/base.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,11 @@
"""
Abstract base class for LLM providers.
"""

from abc import ABC, abstractmethod
import os
from typing import Any

from codeflow_engine.actions.llm.types import LLMResponse

This module re-exports from codeflow_engine.core.llm for backwards compatibility.
New code should import directly from codeflow_engine.core.llm.
"""

class BaseLLMProvider(ABC):
"""Abstract base class for LLM providers."""

def __init__(self, config: dict[str, Any]) -> None:
self.config = config
self.api_key = config.get("api_key") or os.getenv(config.get("api_key_env", ""))
self.base_url = config.get("base_url")
self.default_model = config.get("default_model")
self.name = config.get("name", self.__class__.__name__.lower().replace("provider", ""))

@abstractmethod
def complete(self, request: dict[str, Any]) -> LLMResponse:
"""Complete a chat conversation."""
# Re-export from core for backwards compatibility
from codeflow_engine.core.llm.base import BaseLLMProvider

@abstractmethod
def is_available(self) -> bool:
"""Check if the provider is properly configured and available."""
__all__ = ["BaseLLMProvider"]
Loading