Compare commits

...

13 Commits

Author SHA1 Message Date
Dallin Romney
425ca9c64d fix: main release trigger 2025-09-10 14:33:41 -07:00
Dallin Romney
de53721357 vscode 1.2.3 2025-09-10 14:16:38 -07:00
Dallin Romney
d43f2290db fix: response actions disappearing and causing jumping while streaming 2025-09-10 14:15:16 -07:00
Dallin Romney
62ddbf2081 fix: show added/removed diff chars 2025-09-10 14:14:41 -07:00
Dallin Romney
be15de4d4c fix: clean up diff viewer 2025-09-10 14:14:35 -07:00
Dallin Romney
810d39fe60 fix: don't show unchanged diff sections in extension UI 2025-09-10 14:14:29 -07:00
uinstinct
eef181fcc1 fix: abort2error during stream fim 2025-09-10 14:08:01 -07:00
Fred Bricon
b786e08b04 fix: prevent AUTODETECT model from being queried on Ollama
Signed-off-by: Fred Bricon <fbricon@gmail.com>
2025-09-10 14:07:02 -07:00
uinstinct
5abcdfdb90 fix: context items dialog not visible on conversation compaction 2025-09-10 14:06:20 -07:00
Dallin Romney
c80b445391 fix: diff line color overflow 2025-09-10 14:04:43 -07:00
Dallin Romney
26284e24e5 fix: z.string fallback for model capability 2025-09-10 14:04:23 -07:00
chezsmithy
aaa06b7d3c fix: 🎨 2025-09-10 14:03:43 -07:00
chezsmithy
662687fe94 fix: 🚑 Fix bedrock conversation prompt caching 2025-09-10 14:03:38 -07:00
11 changed files with 216 additions and 106 deletions

View File

@@ -20,7 +20,7 @@ jobs:
- id: check
working-directory: .
run: |
if [[ "${{ github.event.release.tag_name }}" == v1.0.*-vscode ]]; then
if [[ "${{ github.event.release.tag_name }}" == v1.2.*-vscode ]]; then
echo "should_run=true" >> $GITHUB_OUTPUT
else
echo "should_run=false" >> $GITHUB_OUTPUT

View File

@@ -43,7 +43,8 @@
"extensions/vscode/e2e/storage": true,
"extensions/vscode/e2e/vsix": true,
"extensions/.continue-debug": true,
"extensions/cli/dist/**": true
"extensions/cli/dist/**": true,
"packages/config-yaml/dist/**": true
// "sync/**": true
},
"editor.formatOnSave": true,

View File

@@ -370,8 +370,17 @@ export abstract class BaseLLM implements ILLM {
},
});
if (error !== undefined) {
if (error === "cancel" || error.name === "AbortError") {
if (typeof error === "undefined") {
interaction?.logItem({
kind: "success",
promptTokens,
generatedTokens,
thinkingTokens,
usage,
});
return "success";
} else {
if (error === "cancel" || error?.name?.includes("AbortError")) {
interaction?.logItem({
kind: "cancel",
promptTokens,
@@ -393,15 +402,6 @@ export abstract class BaseLLM implements ILLM {
});
return "error";
}
} else {
interaction?.logItem({
kind: "success",
promptTokens,
generatedTokens,
thinkingTokens,
usage,
});
return "success";
}
}
@@ -615,6 +615,7 @@ export abstract class BaseLLM implements ILLM {
kind: "chunk",
chunk: formattedContent,
});
completion += formattedContent;
yield content;
}
@@ -630,6 +631,7 @@ export abstract class BaseLLM implements ILLM {
kind: "chunk",
chunk,
});
completion += chunk;
yield chunk;
}

View File

@@ -532,7 +532,7 @@ describe("Bedrock", () => {
);
});
it("should add cache points for prompt caching when needed", () => {
it("should add cache points when cacheBehavior.cacheConversation is enabled", () => {
// Create a test instance with caching behavior enabled
const bedrockWithCaching = new TestBedrock({
apiKey: "test-key",
@@ -576,5 +576,95 @@ describe("Bedrock", () => {
const userMessageText = converted[0].content?.[0].text;
expect(userMessageText).toMatch(/First message.+/);
});
it("should add cache points when completionOptions.promptCaching is enabled", () => {
// Create a test instance with prompt caching enabled via completionOptions
const bedrockWithPromptCaching = new TestBedrock({
apiKey: "test-key",
model: "anthropic.claude-3-sonnet-20240229-v1:0",
region: "us-east-1",
});
// Set completionOptions.promptCaching (without cacheBehavior)
bedrockWithPromptCaching.completionOptions = {
...bedrockWithPromptCaching.completionOptions,
promptCaching: true,
};
const messages: ChatMessage[] = [
{ role: "user", content: "First message" } as UserChatMessage,
{ role: "assistant", content: "First response" },
{ role: "user", content: "Second message" } as UserChatMessage,
];
const availableTools = new Set<string>();
const converted = bedrockWithPromptCaching["_convertMessages"](
messages,
availableTools,
);
// The last two user messages should have cache points
expect(converted.length).toBe(3);
expect(converted[0].role).toBe("user");
expect(converted[0].content?.some((block) => block.cachePoint)).toBe(
true,
);
expect(converted[1].role).toBe("assistant");
expect(converted[2].role).toBe("user");
expect(converted[2].content?.some((block) => block.cachePoint)).toBe(
true,
);
// Verify that text content includes a cache ID suffix
const userMessageText = converted[0].content?.[0].text;
expect(userMessageText).toMatch(/First message.+/);
});
it("should not add cache points when both caching options are disabled", () => {
// Create a test instance with no caching enabled
const bedrockNoCaching = new TestBedrock({
apiKey: "test-key",
model: "anthropic.claude-3-sonnet-20240229-v1:0",
region: "us-east-1",
});
// Explicitly set no caching
bedrockNoCaching.cacheBehavior = {
cacheConversation: false,
cacheSystemMessage: false,
};
bedrockNoCaching.completionOptions = {
...bedrockNoCaching.completionOptions,
promptCaching: false,
};
const messages: ChatMessage[] = [
{ role: "user", content: "First message" } as UserChatMessage,
{ role: "assistant", content: "First response" },
{ role: "user", content: "Second message" } as UserChatMessage,
];
const availableTools = new Set<string>();
const converted = bedrockNoCaching["_convertMessages"](
messages,
availableTools,
);
// No cache points should be added
expect(converted.length).toBe(3);
expect(converted[0].role).toBe("user");
expect(converted[0].content?.some((block) => block.cachePoint)).toBe(
false,
);
expect(converted[1].role).toBe("assistant");
expect(converted[2].role).toBe("user");
expect(converted[2].content?.some((block) => block.cachePoint)).toBe(
false,
);
// Verify that text content doesn't include a cache ID suffix
const userMessageText = converted[0].content?.[0].text;
expect(userMessageText).toBe("First message");
});
});
});

View File

@@ -513,7 +513,10 @@ class Bedrock extends BaseLLM {
// The second-to-last because it retrieves potentially already cached contents,
// The last one because we want it cached for later retrieval.
// See: https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-caching.html
if (this.cacheBehavior?.cacheConversation) {
if (
this.cacheBehavior?.cacheConversation ||
this.completionOptions.promptCaching
) {
this._addCachingToLastTwoUserMessages(converted);
}

View File

@@ -150,7 +150,7 @@ class Ollama extends BaseLLM implements ModelInstaller {
constructor(options: LLMOptions) {
super(options);
if (options.isFromAutoDetect) {
if (options.model === "AUTODETECT") {
return;
}
const headers: Record<string, string> = {

View File

@@ -2,7 +2,7 @@
"name": "continue",
"icon": "media/icon.png",
"author": "Continue Dev, Inc",
"version": "1.3.3",
"version": "1.2.3",
"repository": {
"type": "git",
"url": "https://github.com/continuedev/continue"

View File

@@ -22,9 +22,6 @@ export default function StepContainer(props: StepContainerProps) {
const dispatch = useDispatch();
const [isTruncated, setIsTruncated] = useState(false);
const isStreaming = useAppSelector((state) => state.session.isStreaming);
const historyItemAfterThis = useAppSelector(
(state) => state.session.history[props.index + 1],
);
const uiConfig = useAppSelector(selectUIConfig);
// Calculate dimming and indicator state based on latest summary index
@@ -34,22 +31,12 @@ export default function StepContainer(props: StepContainerProps) {
const isLatestSummary =
latestSummaryIndex !== -1 && props.index === latestSummaryIndex;
const isNextMsgAssistantOrThinking =
historyItemAfterThis?.message.role === "assistant" ||
historyItemAfterThis?.message.role === "thinking" ||
historyItemAfterThis?.message.role === "tool";
const shouldRenderResponseAction = () => {
if (isNextMsgAssistantOrThinking) {
return false;
}
if (!historyItemAfterThis) {
return !props.item.toolCallStates;
}
return true;
};
const historyItemAfterThis = useAppSelector(
(state) => state.session.history[props.index + 1],
);
const showResponseActions =
(props.isLast || historyItemAfterThis?.message.role === "user") &&
!(props.isLast && (isStreaming || props.item.toolCallStates));
useEffect(() => {
if (!isStreaming) {
@@ -110,9 +97,9 @@ export default function StepContainer(props: StepContainerProps) {
{props.isLast && <ThinkingIndicator historyItem={props.item} />}
</div>
{shouldRenderResponseAction() && !isStreaming && (
{showResponseActions && (
<div
className={`mt-2 h-7 transition-opacity duration-300 ease-in-out ${isBeforeLatestSummary ? "opacity-35" : ""}`}
className={`mt-2 h-7 transition-opacity duration-300 ease-in-out ${isBeforeLatestSummary || isStreaming ? "opacity-35" : ""} ${isStreaming && "pointer-events-none cursor-not-allowed"}`}
>
<ResponseActions
isTruncated={isTruncated}

View File

@@ -288,19 +288,17 @@ export function Chat() {
if (message.role === "user") {
return (
<div className={isBeforeLatestSummary ? "opacity-50" : ""}>
<ContinueInputBox
onEnter={(editorState, modifiers) =>
sendInput(editorState, modifiers, index)
}
isLastUserInput={isLastUserInput(index)}
isMainInput={false}
editorState={editorState}
contextItems={contextItems}
appliedRules={appliedRules}
inputId={message.id}
/>
</div>
<ContinueInputBox
onEnter={(editorState, modifiers) =>
sendInput(editorState, modifiers, index)
}
isLastUserInput={isLastUserInput(index)}
isMainInput={false}
editorState={editorState}
contextItems={contextItems}
appliedRules={appliedRules}
inputId={message.id}
/>
);
}

View File

@@ -14,6 +14,7 @@ import {
selectToolCallById,
} from "../../../redux/selectors/selectToolCalls";
import { performFindAndReplace } from "../../../util/clientTools/findAndReplaceUtils";
import { cn } from "../../../util/cn";
import { getStatusIcon } from "./utils";
interface FindAndReplaceDisplayProps {
@@ -25,6 +26,48 @@ interface FindAndReplaceDisplayProps {
historyIndex: number;
}
const MAX_SAME_LINES = 2;
function EllipsisLine() {
return (
<div className="text-description-muted px-3 py-1 text-center font-mono">
</div>
);
}
function DiffLines({
lines,
className = "",
diffChar = " ",
diffCharClass = "text-description-muted",
}: {
lines: string[];
diffChar?: string;
diffCharClass?: string;
className?: string;
}) {
return (
<>
{lines.map((line, lineIndex) => {
const isLastPartLine = lineIndex === lines.length - 1;
if (line === "" && isLastPartLine) return null;
return (
<div
key={lineIndex}
className={cn("text-foreground px-3 py-px font-mono", className)}
>
<span className={cn("mr-2 select-none", diffCharClass)}>
{diffChar}
</span>
{line}
</div>
);
})}
</>
);
}
export function FindAndReplaceDisplay({
fileUri,
relativeFilePath,
@@ -198,72 +241,54 @@ export function FindAndReplaceDisplay({
className={`${config?.ui?.showChatScrollbar ? "thin-scrollbar" : "no-scrollbar"} max-h-72 overflow-auto`}
>
<pre
className={`bg-editor m-0 w-full text-xs leading-tight ${config?.ui?.codeWrap ? "whitespace-pre-wrap" : "whitespace-pre"}`}
className={`bg-editor m-0 w-fit min-w-full text-xs leading-tight ${config?.ui?.codeWrap ? "whitespace-pre-wrap" : "whitespace-pre"}`}
>
{diffResult.diff.map((part, index) => {
{diffResult.diff?.map((part, index) => {
if (part.removed) {
return (
<div
<DiffLines
key={index}
className="text-foreground border-l-4 border-red-900 bg-red-900/30"
>
{part.value.split("\n").map((line, lineIndex) => {
if (
line === "" &&
lineIndex === part.value.split("\n").length - 1
)
return null;
return (
<div key={lineIndex} className="px-3 py-px font-mono">
<span className="mr-2 select-none text-red-600">-</span>
{line}
</div>
);
})}
</div>
lines={part.value.split("\n")}
className="border-l-4 border-red-900 bg-red-900/30"
diffCharClass="text-red-600"
diffChar="-"
/>
);
} else if (part.added) {
return (
<div
<DiffLines
key={index}
className="text-foreground border-l-4 border-green-600 bg-green-600/20"
>
{part.value.split("\n").map((line, lineIndex) => {
if (
line === "" &&
lineIndex === part.value.split("\n").length - 1
)
return null;
return (
<div key={lineIndex} className="px-3 py-px font-mono">
<span className="mr-2 select-none text-green-600">+</span>
{line}
</div>
);
})}
</div>
lines={part.value.split("\n")}
diffCharClass="text-green-600"
className="border-l-4 border-green-600 bg-green-600/20"
diffChar="+"
/>
);
} else {
const isFirst = index === 0;
const isLast = index === diffResult.diff.length - 1;
const lines = part.value.split("\n");
const showStartEllipsis = isFirst && lines.length > MAX_SAME_LINES;
const showEndEllipsis = isLast && lines.length > MAX_SAME_LINES;
const showMiddleEllipses =
!isFirst && !isLast && lines.length > MAX_SAME_LINES * 2 + 1;
const startLines = showStartEllipsis
? lines.slice(-MAX_SAME_LINES)
: showMiddleEllipses || showEndEllipsis
? lines.slice(0, MAX_SAME_LINES)
: lines;
const endLines = showMiddleEllipses
? lines.slice(-MAX_SAME_LINES)
: [];
return (
<div key={index}>
{part.value.split("\n").map((line, lineIndex) => {
if (
line === "" &&
lineIndex === part.value.split("\n").length - 1
)
return null;
return (
<div
key={lineIndex}
className="text-foreground px-3 py-px font-mono"
>
<span className="text-description-muted mr-2 select-none">
{" "}
</span>
{line}
</div>
);
})}
{showStartEllipsis && <EllipsisLine />}
<DiffLines lines={startLines} />
{showMiddleEllipses && <EllipsisLine />}
<DiffLines lines={endLines} />
{showEndEllipsis && <EllipsisLine />}
</div>
);
}

View File

@@ -37,8 +37,12 @@ export const modelCapabilitySchema = z.union([
z.literal("tool_use"),
z.literal("image_input"),
z.literal("next_edit"),
z.string(),
]);
export type ModelCapability = z.infer<typeof modelCapabilitySchema>;
// not ideal but lose type suggestions if use z.infer because of the string fallback
export type ModelCapability = "tool_use" | "image_input" | "next_edit";
export const completionOptionsSchema = z.object({
contextLength: z.number().optional(),