@@ -75,6 +75,25 @@ export class DeepSeekApi implements LLMApi {
75
75
}
76
76
}
77
77
78
+ // 检测并修复消息顺序,确保除system外的第一个消息是user
79
+ const filteredMessages : ChatOptions [ "messages" ] = [ ] ;
80
+ let hasFoundFirstUser = false ;
81
+
82
+ for ( const msg of messages ) {
83
+ if ( msg . role === "system" ) {
84
+ // Keep all system messages
85
+ filteredMessages . push ( msg ) ;
86
+ } else if ( msg . role === "user" ) {
87
+ // User message directly added
88
+ filteredMessages . push ( msg ) ;
89
+ hasFoundFirstUser = true ;
90
+ } else if ( hasFoundFirstUser ) {
91
+ // After finding the first user message, all subsequent non-system messages are retained.
92
+ filteredMessages . push ( msg ) ;
93
+ }
94
+ // If hasFoundFirstUser is false and it is not a system message, it will be skipped.
95
+ }
96
+
78
97
const modelConfig = {
79
98
...useAppConfig . getState ( ) . modelConfig ,
80
99
...useChatStore . getState ( ) . currentSession ( ) . mask . modelConfig ,
@@ -85,7 +104,7 @@ export class DeepSeekApi implements LLMApi {
85
104
} ;
86
105
87
106
const requestPayload : RequestPayload = {
88
- messages,
107
+ messages : filteredMessages ,
89
108
stream : options . config . stream ,
90
109
model : modelConfig . model ,
91
110
temperature : modelConfig . temperature ,
0 commit comments