diff --git a/src/celeste/modalities/text/providers/deepseek/models.py b/src/celeste/modalities/text/providers/deepseek/models.py index 39693086..fb4a9eff 100644 --- a/src/celeste/modalities/text/providers/deepseek/models.py +++ b/src/celeste/modalities/text/providers/deepseek/models.py @@ -1,6 +1,6 @@ """DeepSeek models for text modality.""" -from celeste.constraints import Range, Schema, ToolChoiceSupport, ToolSupport +from celeste.constraints import Choice, Range, Schema, ToolChoiceSupport, ToolSupport from celeste.core import Modality, Operation, Parameter, Provider from celeste.models import Model @@ -33,4 +33,34 @@ TextParameter.OUTPUT_SCHEMA: Schema(), }, ), + Model( + id="deepseek-v4-flash", + provider=Provider.DEEPSEEK, + display_name="DeepSeek V4 Flash", + operations={Modality.TEXT: {Operation.GENERATE}}, + streaming=True, + parameter_constraints={ + Parameter.TEMPERATURE: Range(min=0.0, max=2.0, step=0.01), + Parameter.MAX_TOKENS: Range(min=1, max=384_000, step=1), + TextParameter.THINKING_LEVEL: Choice(options=["disabled", "high", "max"]), + TextParameter.OUTPUT_SCHEMA: Schema(), + TextParameter.TOOLS: ToolSupport(tools=[]), + TextParameter.TOOL_CHOICE: ToolChoiceSupport(), + }, + ), + Model( + id="deepseek-v4-pro", + provider=Provider.DEEPSEEK, + display_name="DeepSeek V4 Pro", + operations={Modality.TEXT: {Operation.GENERATE}}, + streaming=True, + parameter_constraints={ + Parameter.TEMPERATURE: Range(min=0.0, max=2.0, step=0.01), + Parameter.MAX_TOKENS: Range(min=1, max=384_000, step=1), + TextParameter.THINKING_LEVEL: Choice(options=["disabled", "high", "max"]), + TextParameter.OUTPUT_SCHEMA: Schema(), + TextParameter.TOOLS: ToolSupport(tools=[]), + TextParameter.TOOL_CHOICE: ToolChoiceSupport(), + }, + ), ] diff --git a/src/celeste/modalities/text/providers/deepseek/parameters.py b/src/celeste/modalities/text/providers/deepseek/parameters.py index 2dcb360b..8587f656 100644 --- a/src/celeste/modalities/text/providers/deepseek/parameters.py +++ b/src/celeste/modalities/text/providers/deepseek/parameters.py @@ -1,7 +1,24 @@ """DeepSeek parameter mappers for text.""" +from celeste.parameters import ParameterMapper +from celeste.providers.deepseek.chat.parameters import ( + ThinkingLevelMapper as _ThinkingLevelMapper, +) +from celeste.types import TextContent + +from ...parameters import TextParameter from ...protocols.chatcompletions.parameters import CHATCOMPLETIONS_PARAMETER_MAPPERS -DEEPSEEK_PARAMETER_MAPPERS = CHATCOMPLETIONS_PARAMETER_MAPPERS + +class ThinkingLevelMapper(_ThinkingLevelMapper): + """Map thinking_level to DeepSeek's thinking/reasoning_effort fields.""" + + name = TextParameter.THINKING_LEVEL + + +DEEPSEEK_PARAMETER_MAPPERS: list[ParameterMapper[TextContent]] = [ + *CHATCOMPLETIONS_PARAMETER_MAPPERS, + ThinkingLevelMapper(), +] __all__ = ["DEEPSEEK_PARAMETER_MAPPERS"] diff --git a/src/celeste/providers/deepseek/chat/parameters.py b/src/celeste/providers/deepseek/chat/parameters.py new file mode 100644 index 00000000..b47ce4c6 --- /dev/null +++ b/src/celeste/providers/deepseek/chat/parameters.py @@ -0,0 +1,35 @@ +"""DeepSeek Chat API parameter mappers.""" + +from typing import Any + +from celeste.models import Model +from celeste.parameters import ParameterMapper +from celeste.types import TextContent + + +class ThinkingLevelMapper(ParameterMapper[TextContent]): + """Map thinking_level to DeepSeek's thinking + reasoning_effort fields. + + - "disabled" → thinking={"type": "disabled"} + - "high" → thinking={"type": "enabled"}, reasoning_effort="high" + - "max" → thinking={"type": "enabled"}, reasoning_effort="max" + """ + + def map( + self, + request: dict[str, Any], + value: object, + model: Model, + ) -> dict[str, Any]: + validated_value = self._validate_value(value, model) + if validated_value is None: + return request + if validated_value == "disabled": + request["thinking"] = {"type": "disabled"} + else: + request["thinking"] = {"type": "enabled"} + request["reasoning_effort"] = validated_value + return request + + +__all__ = ["ThinkingLevelMapper"]