udpate , use stream mode all the time

This commit is contained in:
duanfuxiang
2025-04-30 19:23:20 +08:00
parent 61b42a8a07
commit db34038acc
4 changed files with 54 additions and 16 deletions

View File

@@ -128,7 +128,7 @@ export default function ApplyViewRoot({
</div>
<div className="view-header-title-container mod-at-start">
<div className="view-header-title">
Applying: {state?.file?.name ?? ''}
Applying: {state?.file ?? ''}
</div>
<div className="view-actions">
<button
@@ -157,8 +157,8 @@ export default function ApplyViewRoot({
<div className="cm-scroller">
<div className="cm-sizer">
<div className="infio-inline-title">
{state?.file?.name
? state.file.name.replace(/\.[^/.]+$/, '')
{state?.file
? state.file.replace(/\.[^/.]+$/, '')
: ''}
</div>

View File

@@ -217,21 +217,34 @@ export const InlineEdit: React.FC<InlineEditProps> = ({
endLine: defaultEndLine,
});
const response = await llmManager.generateResponse(chatModel, {
model: chatModel.modelId,
messages: requestMessages,
stream: false,
});
const stream = await llmManager.streamResponse(
chatModel,
{
messages: requestMessages,
model: chatModel.modelId,
max_tokens: settings.modelOptions.max_tokens,
temperature: settings.modelOptions.temperature,
// top_p: settings.modelOptions.top_p,
// frequency_penalty: settings.modelOptions.frequency_penalty,
// presence_penalty: settings.modelOptions.presence_penalty,
stream: true,
}
)
if (!response.choices[0].message.content) {
let response_content = ""
for await (const chunk of stream) {
const content = chunk.choices[0]?.delta?.content ?? ''
response_content += content
}
if (!response_content) {
setIsSubmitting(false);
throw new Error("Empty response from LLM");
}
const parsedBlock = parseSmartComposeBlock(
response.choices[0].message.content
response_content
);
const finalContent = parsedBlock?.content || response.choices[0].message.content;
const finalContent = parsedBlock?.content || response_content;
if (!activeFile || !(activeFile.path && typeof activeFile.path === 'string')) {
setIsSubmitting(false);