Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "late-sdk"
version = "1.0.1"
version = "1.1.0"
description = "Python SDK for Late API - Social Media Scheduling"
readme = "README.md"
requires-python = ">=3.10"
Expand Down
35 changes: 35 additions & 0 deletions scripts/generate_mcp_docs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
#!/usr/bin/env python3
"""
Generate MCP documentation from tool definitions.

Usage:
python scripts/generate_mcp_docs.py

This script generates MDX documentation from the centralized tool definitions
in src/late/mcp/tool_definitions.py
"""

import sys
from pathlib import Path

# Add src to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent / "src"))

from late.mcp.tool_definitions import generate_mdx_docs, TOOL_DEFINITIONS


def main():
"""Generate and print MDX documentation."""
print("=" * 60)
print("MCP Tool Documentation (generated from tool_definitions.py)")
print("=" * 60)
print()
print(generate_mdx_docs())
print()
print("=" * 60)
print("Copy the above into claude-mcp.mdx under '## Tool Reference'")
print("=" * 60)


if __name__ == "__main__":
main()
5 changes: 2 additions & 3 deletions scripts/generate_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ def main() -> int:

# Create output directory
output_dir.mkdir(parents=True, exist_ok=True)
output_file = output_dir / "models.py"

# Run datamodel-code-generator
cmd = [
Expand All @@ -43,7 +44,7 @@ def main() -> int:
"--input",
str(openapi_spec),
"--output",
str(output_dir),
str(output_file),
"--output-model-type",
"pydantic_v2.BaseModel",
"--input-file-type",
Expand All @@ -55,8 +56,6 @@ def main() -> int:
"--field-constraints",
"--use-field-description",
"--capitalise-enum-members",
"--enum-field-as-literal",
"all",
"--use-default-kwarg",
"--collapse-root-models",
"--use-union-operator",
Expand Down
33 changes: 32 additions & 1 deletion src/late/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,42 @@
LateValidationError,
)
from .client.late_client import Late
from .enums import (
CaptionTone,
DayOfWeek,
FacebookContentType,
GoogleBusinessCTAType,
InstagramContentType,
MediaType,
Platform,
PostStatus,
TikTokCommercialContentType,
TikTokMediaType,
TikTokPrivacyLevel,
Visibility,
)

__version__ = "1.0.0"
__version__ = "1.1.0"

__all__ = [
# Client
"Late",
# Enums - Core
"Platform",
"PostStatus",
"MediaType",
"Visibility",
# Enums - Platform-specific
"InstagramContentType",
"FacebookContentType",
"TikTokPrivacyLevel",
"TikTokCommercialContentType",
"TikTokMediaType",
"GoogleBusinessCTAType",
# Enums - Tools & Queue
"CaptionTone",
"DayOfWeek",
# Exceptions
"LateAPIError",
"LateAuthenticationError",
"LateConnectionError",
Expand Down
19 changes: 10 additions & 9 deletions src/late/ai/content_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@

from typing import TYPE_CHECKING, Any

from late.enums import CaptionTone, Platform

from .protocols import (
AIProvider,
GenerateRequest,
Expand All @@ -25,15 +27,16 @@ class ContentGenerator:

Example:
>>> from late.ai import ContentGenerator, GenerateRequest
>>> from late import Platform, CaptionTone
>>>
>>> # Using OpenAI
>>> generator = ContentGenerator(provider="openai", api_key="sk-...")
>>>
>>> response = generator.generate(
... GenerateRequest(
... prompt="Write a tweet about Python",
... platform="twitter",
... tone="professional",
... platform=Platform.TWITTER,
... tone=CaptionTone.PROFESSIONAL,
... )
... )
>>> print(response.text)
Expand Down Expand Up @@ -94,9 +97,7 @@ async def agenerate(self, request: GenerateRequest) -> GenerateResponse:
"""Generate content asynchronously."""
return await self._provider.agenerate(request)

async def agenerate_stream(
self, request: GenerateRequest
) -> AsyncIterator[str]:
async def agenerate_stream(self, request: GenerateRequest) -> AsyncIterator[str]:
"""Generate content as a stream."""
if not isinstance(self._provider, StreamingAIProvider):
raise NotImplementedError(
Expand All @@ -109,9 +110,9 @@ async def agenerate_stream(
def generate_post(
self,
topic: str,
platform: str,
platform: Platform | str,
*,
tone: str = "professional",
tone: CaptionTone | str = CaptionTone.PROFESSIONAL,
language: str = "en",
**kwargs: Any,
) -> str:
Expand Down Expand Up @@ -139,9 +140,9 @@ def generate_post(
async def agenerate_post(
self,
topic: str,
platform: str,
platform: Platform | str,
*,
tone: str = "professional",
tone: CaptionTone | str = CaptionTone.PROFESSIONAL,
language: str = "en",
**kwargs: Any,
) -> str:
Expand Down
10 changes: 5 additions & 5 deletions src/late/ai/protocols.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
if TYPE_CHECKING:
from collections.abc import AsyncIterator

from late.enums import CaptionTone, Platform


@dataclass
class GenerateRequest:
Expand All @@ -21,8 +23,8 @@ class GenerateRequest:
system: str | None = None
max_tokens: int = 500
temperature: float = 0.7
platform: str | None = None # e.g., "twitter", "linkedin"
tone: str | None = None # e.g., "professional", "casual"
platform: Platform | str | None = None
tone: CaptionTone | str | None = None
language: str = "en"
context: dict[str, Any] = field(default_factory=dict)

Expand Down Expand Up @@ -68,8 +70,6 @@ class StreamingAIProvider(Protocol):
"""Protocol for streaming content generation."""

@abstractmethod
async def agenerate_stream(
self, request: GenerateRequest
) -> AsyncIterator[str]:
async def agenerate_stream(self, request: GenerateRequest) -> AsyncIterator[str]:
"""Generate content as a stream."""
...
24 changes: 15 additions & 9 deletions src/late/ai/providers/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
import os
from typing import TYPE_CHECKING, Any

from late.enums import Platform

from ..protocols import GenerateRequest, GenerateResponse

if TYPE_CHECKING:
Expand Down Expand Up @@ -55,8 +57,14 @@ def __init__(
def name(self) -> str:
return "openai"

@property
def model(self) -> str:
"""Current model being used."""
return self._model

@property
def default_model(self) -> str:
"""Default model if none specified."""
return "gpt-4o-mini"

def _build_messages(self, request: GenerateRequest) -> list[dict[str, str]]:
Expand All @@ -76,12 +84,12 @@ def _build_system_prompt(self, request: GenerateRequest) -> str:
parts = ["You are an expert social media content creator."]

if request.platform:
platform_guides = {
"twitter": "Keep it under 280 characters. Be concise and engaging.",
"linkedin": "Be professional and insightful. Use paragraphs.",
"instagram": "Be visual and use emojis. Include hashtag suggestions.",
"tiktok": "Be trendy and use Gen-Z language. Keep it fun.",
"facebook": "Be conversational and engaging.",
platform_guides: dict[Platform | str, str] = {
Platform.TWITTER: "Keep it under 280 characters. Be concise and engaging.",
Platform.LINKEDIN: "Be professional and insightful. Use paragraphs.",
Platform.INSTAGRAM: "Be visual and use emojis. Include hashtag suggestions.",
Platform.TIKTOK: "Be trendy and use Gen-Z language. Keep it fun.",
Platform.FACEBOOK: "Be conversational and engaging.",
}
guide = platform_guides.get(request.platform, "")
parts.append(f"Writing for {request.platform}. {guide}")
Expand Down Expand Up @@ -140,9 +148,7 @@ async def agenerate(self, request: GenerateRequest) -> GenerateResponse:
finish_reason=choice.finish_reason,
)

async def agenerate_stream(
self, request: GenerateRequest
) -> AsyncIterator[str]:
async def agenerate_stream(self, request: GenerateRequest) -> AsyncIterator[str]:
"""Generate content as a stream."""
stream = await self._async_client.chat.completions.create(
model=self._model,
Expand Down
20 changes: 15 additions & 5 deletions src/late/client/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,10 +206,14 @@ def _post(
headers=headers,
timeout=self.timeout,
) as client:
return self._request_with_retry(client, "POST", path, files=files, params=params)
return self._request_with_retry(
client, "POST", path, files=files, params=params
)

with self._sync_client() as client:
return self._request_with_retry(client, "POST", path, json=data, params=params)
return self._request_with_retry(
client, "POST", path, json=data, params=params
)

def _put(
self,
Expand Down Expand Up @@ -312,10 +316,14 @@ async def _apost(
headers=headers,
timeout=self.timeout,
) as client:
return await self._arequest_with_retry(client, "POST", path, files=files, params=params)
return await self._arequest_with_retry(
client, "POST", path, files=files, params=params
)

async with self._async_client() as client:
return await self._arequest_with_retry(client, "POST", path, json=data, params=params)
return await self._arequest_with_retry(
client, "POST", path, json=data, params=params
)

async def _aput(
self,
Expand All @@ -333,4 +341,6 @@ async def _adelete(
) -> dict[str, Any]:
"""Make an async DELETE request."""
async with self._async_client() as client:
return await self._arequest_with_retry(client, "DELETE", path, params=params)
return await self._arequest_with_retry(
client, "DELETE", path, params=params
)
8 changes: 5 additions & 3 deletions src/late/client/late_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class Late(BaseClient):
Late API client for scheduling social media posts.

Example:
>>> from late import Late
>>> from late import Late, Platform
>>>
>>> # Initialize client
>>> client = Late(api_key="your_api_key")
Expand All @@ -33,7 +33,7 @@ class Late(BaseClient):
>>> # Create a post
>>> post = client.posts.create(
... content="Hello world!",
... platforms=[{"platform": "twitter", "accountId": "..."}],
... platforms=[{"platform": Platform.TWITTER, "accountId": "..."}],
... scheduled_for="2024-12-25T10:00:00Z",
... )
>>>
Expand All @@ -59,7 +59,9 @@ def __init__(
timeout: Request timeout in seconds
max_retries: Maximum retries for failed requests
"""
super().__init__(api_key, base_url=base_url, timeout=timeout, max_retries=max_retries)
super().__init__(
api_key, base_url=base_url, timeout=timeout, max_retries=max_retries
)

# Initialize resources
self.posts = PostsResource(self)
Expand Down
Loading