Troviku-1.1 / troviku_client.py
Trouter-Library's picture
Create troviku_client.py
b39a3fb verified
raw
history blame
12.9 kB
"""
Troviku-1.1 Python Client Library
A comprehensive client for interacting with the Troviku-1.1 API.
Supports code generation, code completion, and various programming tasks.
"""
import json
import requests
from typing import Optional, List, Dict, Any, Union
from dataclasses import dataclass, asdict
from enum import Enum
class Language(Enum):
"""Supported programming languages."""
PYTHON = "python"
JAVASCRIPT = "javascript"
TYPESCRIPT = "typescript"
JAVA = "java"
CPP = "cpp"
RUST = "rust"
GO = "go"
RUBY = "ruby"
PHP = "php"
SWIFT = "swift"
KOTLIN = "kotlin"
CSHARP = "csharp"
class TaskType(Enum):
"""Supported task types."""
GENERATE = "generate"
COMPLETE = "complete"
EXPLAIN = "explain"
REVIEW = "review"
DEBUG = "debug"
TRANSLATE = "translate"
DOCUMENT = "document"
TEST = "test"
@dataclass
class GenerationConfig:
"""Configuration for code generation."""
temperature: float = 0.7
top_p: float = 0.95
top_k: int = 50
max_tokens: int = 2048
stop_sequences: Optional[List[str]] = None
frequency_penalty: float = 0.0
presence_penalty: float = 0.0
repetition_penalty: float = 1.1
def to_dict(self) -> Dict[str, Any]:
"""Convert to dictionary, excluding None values."""
return {k: v for k, v in asdict(self).items() if v is not None}
@dataclass
class CodeResponse:
"""Response from the Troviku API."""
code: str
language: str
explanation: Optional[str] = None
confidence: Optional[float] = None
tokens_used: Optional[int] = None
model_version: str = "1.1.0"
@classmethod
def from_api_response(cls, response: Dict[str, Any]) -> 'CodeResponse':
"""Create CodeResponse from API response."""
content = response['choices'][0]['message']['content']
usage = response.get('usage', {})
return cls(
code=content,
language="auto-detected",
tokens_used=usage.get('total_tokens'),
model_version=response.get('model', '1.1.0')
)
class TrovikuClient:
"""
Client for interacting with the Troviku-1.1 API.
Attributes:
api_key: Your OpenTrouter API key
base_url: Base URL for the API endpoint
model: Model identifier
"""
DEFAULT_BASE_URL = "https://api.opentrouter.ai/v1"
DEFAULT_MODEL = "OpenTrouter/Troviku-1.1"
def __init__(
self,
api_key: str,
base_url: str = DEFAULT_BASE_URL,
model: str = DEFAULT_MODEL,
timeout: int = 60
):
"""
Initialize the Troviku client.
Args:
api_key: Your OpenTrouter API key
base_url: Base URL for API requests
model: Model identifier to use
timeout: Request timeout in seconds
"""
self.api_key = api_key
self.base_url = base_url.rstrip('/')
self.model = model
self.timeout = timeout
self.session = requests.Session()
self.session.headers.update({
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json"
})
def generate(
self,
prompt: str,
language: Optional[Union[str, Language]] = None,
config: Optional[GenerationConfig] = None,
system_prompt: Optional[str] = None
) -> CodeResponse:
"""
Generate code based on a prompt.
Args:
prompt: Description of the code to generate
language: Target programming language
config: Generation configuration
system_prompt: Optional system prompt for context
Returns:
CodeResponse containing generated code
Raises:
requests.exceptions.RequestException: On API errors
"""
if config is None:
config = GenerationConfig()
if isinstance(language, Language):
language = language.value
messages = []
if system_prompt:
messages.append({"role": "system", "content": system_prompt})
elif language:
messages.append({
"role": "system",
"content": f"You are a code generation assistant. Generate {language} code."
})
messages.append({"role": "user", "content": prompt})
payload = {
"model": self.model,
"messages": messages,
**config.to_dict()
}
response = self._make_request("/chat/completions", payload)
return CodeResponse.from_api_response(response)
def complete(
self,
code: str,
language: Optional[Union[str, Language]] = None,
config: Optional[GenerationConfig] = None
) -> CodeResponse:
"""
Complete partial code.
Args:
code: Partial code to complete
language: Programming language of the code
config: Generation configuration
Returns:
CodeResponse with completed code
"""
if isinstance(language, Language):
language = language.value
prompt = f"Complete the following {language or ''} code:\n\n```\n{code}\n```"
return self.generate(prompt, language, config)
def explain(
self,
code: str,
language: Optional[Union[str, Language]] = None,
detail_level: str = "medium"
) -> str:
"""
Generate an explanation for given code.
Args:
code: Code to explain
language: Programming language
detail_level: Level of detail (low, medium, high)
Returns:
Explanation text
"""
if isinstance(language, Language):
language = language.value
prompt = f"""Explain the following {language or ''} code in {detail_level} detail:
```
{code}
```"""
config = GenerationConfig(temperature=0.3)
response = self.generate(prompt, language, config)
return response.code
def review(
self,
code: str,
language: Optional[Union[str, Language]] = None,
focus_areas: Optional[List[str]] = None
) -> str:
"""
Review code for quality, bugs, and improvements.
Args:
code: Code to review
language: Programming language
focus_areas: Specific areas to focus on (e.g., security, performance)
Returns:
Code review feedback
"""
if isinstance(language, Language):
language = language.value
focus = f" Focus on: {', '.join(focus_areas)}." if focus_areas else ""
prompt = f"""Review the following {language or ''} code for quality, potential bugs,
and improvements.{focus}
```
{code}
```"""
config = GenerationConfig(temperature=0.5, max_tokens=3000)
response = self.generate(prompt, language, config)
return response.code
def debug(
self,
code: str,
error_message: str,
language: Optional[Union[str, Language]] = None
) -> CodeResponse:
"""
Debug code with error message.
Args:
code: Code with the error
error_message: Error message or description
language: Programming language
Returns:
CodeResponse with fixed code
"""
if isinstance(language, Language):
language = language.value
prompt = f"""Debug the following {language or ''} code that produces this error:
Error: {error_message}
Code:
```
{code}
```
Provide the corrected code."""
config = GenerationConfig(temperature=0.3)
return self.generate(prompt, language, config)
def translate(
self,
code: str,
source_language: Union[str, Language],
target_language: Union[str, Language],
config: Optional[GenerationConfig] = None
) -> CodeResponse:
"""
Translate code from one language to another.
Args:
code: Source code
source_language: Source programming language
target_language: Target programming language
config: Generation configuration
Returns:
CodeResponse with translated code
"""
if isinstance(source_language, Language):
source_language = source_language.value
if isinstance(target_language, Language):
target_language = target_language.value
prompt = f"""Translate the following {source_language} code to {target_language}:
```
{code}
```"""
return self.generate(prompt, target_language, config)
def generate_tests(
self,
code: str,
language: Optional[Union[str, Language]] = None,
test_framework: Optional[str] = None
) -> CodeResponse:
"""
Generate unit tests for given code.
Args:
code: Code to generate tests for
language: Programming language
test_framework: Testing framework to use (e.g., pytest, jest)
Returns:
CodeResponse with test code
"""
if isinstance(language, Language):
language = language.value
framework_text = f" using {test_framework}" if test_framework else ""
prompt = f"""Generate comprehensive unit tests{framework_text} for the following {language or ''} code:
```
{code}
```"""
config = GenerationConfig(temperature=0.5, max_tokens=3000)
return self.generate(prompt, language, config)
def document(
self,
code: str,
language: Optional[Union[str, Language]] = None,
style: str = "google"
) -> CodeResponse:
"""
Generate documentation for code.
Args:
code: Code to document
language: Programming language
style: Documentation style (google, numpy, sphinx)
Returns:
CodeResponse with documented code
"""
if isinstance(language, Language):
language = language.value
prompt = f"""Add comprehensive {style}-style documentation to the following {language or ''} code:
```
{code}
```"""
config = GenerationConfig(temperature=0.3)
return self.generate(prompt, language, config)
def _make_request(self, endpoint: str, payload: Dict[str, Any]) -> Dict[str, Any]:
"""
Make a request to the API.
Args:
endpoint: API endpoint
payload: Request payload
Returns:
API response as dictionary
Raises:
requests.exceptions.RequestException: On API errors
"""
url = f"{self.base_url}{endpoint}"
try:
response = self.session.post(
url,
json=payload,
timeout=self.timeout
)
response.raise_for_status()
return response.json()
except requests.exceptions.HTTPError as e:
error_detail = e.response.text
raise requests.exceptions.RequestException(
f"API request failed: {e}. Details: {error_detail}"
)
def close(self):
"""Close the session."""
self.session.close()
def __enter__(self):
"""Context manager entry."""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Context manager exit."""
self.close()
# Example usage
if __name__ == "__main__":
# Initialize client
client = TrovikuClient(api_key="your_api_key_here")
# Generate code
response = client.generate(
prompt="Create a function to calculate factorial recursively",
language=Language.PYTHON
)
print("Generated Code:")
print(response.code)
print(f"\nTokens used: {response.tokens_used}")
# Complete code
partial_code = """def fibonacci(n):
if n <= 1:
return n
# Complete this function"""
completion = client.complete(partial_code, Language.PYTHON)
print("\nCompleted Code:")
print(completion.code)
# Explain code
explanation = client.explain(
code="lambda x: x**2 + 2*x + 1",
language=Language.PYTHON
)
print("\nExplanation:")
print(explanation)
client.close()