Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 31 additions & 1 deletion src/celeste/modalities/text/providers/deepseek/models.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""DeepSeek models for text modality."""

from celeste.constraints import Range, Schema, ToolChoiceSupport, ToolSupport
from celeste.constraints import Choice, Range, Schema, ToolChoiceSupport, ToolSupport
from celeste.core import Modality, Operation, Parameter, Provider
from celeste.models import Model

Expand Down Expand Up @@ -33,4 +33,34 @@
TextParameter.OUTPUT_SCHEMA: Schema(),
},
),
Model(
id="deepseek-v4-flash",
provider=Provider.DEEPSEEK,
display_name="DeepSeek V4 Flash",
operations={Modality.TEXT: {Operation.GENERATE}},
streaming=True,
parameter_constraints={
Parameter.TEMPERATURE: Range(min=0.0, max=2.0, step=0.01),
Parameter.MAX_TOKENS: Range(min=1, max=384_000, step=1),
TextParameter.THINKING_LEVEL: Choice(options=["disabled", "high", "max"]),
TextParameter.OUTPUT_SCHEMA: Schema(),
TextParameter.TOOLS: ToolSupport(tools=[]),
TextParameter.TOOL_CHOICE: ToolChoiceSupport(),
},
),
Model(
id="deepseek-v4-pro",
provider=Provider.DEEPSEEK,
display_name="DeepSeek V4 Pro",
operations={Modality.TEXT: {Operation.GENERATE}},
streaming=True,
parameter_constraints={
Parameter.TEMPERATURE: Range(min=0.0, max=2.0, step=0.01),
Parameter.MAX_TOKENS: Range(min=1, max=384_000, step=1),
TextParameter.THINKING_LEVEL: Choice(options=["disabled", "high", "max"]),
TextParameter.OUTPUT_SCHEMA: Schema(),
TextParameter.TOOLS: ToolSupport(tools=[]),
TextParameter.TOOL_CHOICE: ToolChoiceSupport(),
},
),
]
19 changes: 18 additions & 1 deletion src/celeste/modalities/text/providers/deepseek/parameters.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,24 @@
"""DeepSeek parameter mappers for text."""

from celeste.parameters import ParameterMapper
from celeste.providers.deepseek.chat.parameters import (
ThinkingLevelMapper as _ThinkingLevelMapper,
)
from celeste.types import TextContent

from ...parameters import TextParameter
from ...protocols.chatcompletions.parameters import CHATCOMPLETIONS_PARAMETER_MAPPERS

DEEPSEEK_PARAMETER_MAPPERS = CHATCOMPLETIONS_PARAMETER_MAPPERS

class ThinkingLevelMapper(_ThinkingLevelMapper):
"""Map thinking_level to DeepSeek's thinking/reasoning_effort fields."""

name = TextParameter.THINKING_LEVEL


DEEPSEEK_PARAMETER_MAPPERS: list[ParameterMapper[TextContent]] = [
*CHATCOMPLETIONS_PARAMETER_MAPPERS,
ThinkingLevelMapper(),
]

__all__ = ["DEEPSEEK_PARAMETER_MAPPERS"]
35 changes: 35 additions & 0 deletions src/celeste/providers/deepseek/chat/parameters.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
"""DeepSeek Chat API parameter mappers."""

from typing import Any

from celeste.models import Model
from celeste.parameters import ParameterMapper
from celeste.types import TextContent


class ThinkingLevelMapper(ParameterMapper[TextContent]):
"""Map thinking_level to DeepSeek's thinking + reasoning_effort fields.

- "disabled" → thinking={"type": "disabled"}
- "high" → thinking={"type": "enabled"}, reasoning_effort="high"
- "max" → thinking={"type": "enabled"}, reasoning_effort="max"
"""

def map(
self,
request: dict[str, Any],
value: object,
model: Model,
) -> dict[str, Any]:
validated_value = self._validate_value(value, model)
if validated_value is None:
return request
if validated_value == "disabled":
request["thinking"] = {"type": "disabled"}
else:
request["thinking"] = {"type": "enabled"}
request["reasoning_effort"] = validated_value
return request


__all__ = ["ThinkingLevelMapper"]
Loading