Squash bugs

This commit is contained in:
Ian Arawjo 2024-03-30 20:09:03 -04:00
parent 8b996eacfc
commit 5ee42f660e
10 changed files with 63 additions and 15 deletions

View File

@ -449,7 +449,9 @@ The Python interpeter in the browser is Pyodide. You may not be able to run some
setLastResponses(stripLLMDetailsFromResponses(resps));
setStatus(Status.READY);
})
.catch(handleError);
.catch(() => {
// soft fail
});
}, []);
// On upstream changes
@ -701,7 +703,7 @@ The Python interpeter in the browser is Pyodide. You may not be able to run some
// Custom buttons for the node label
const customButtons = useMemo(() => {
const btns = [];
const btns: React.ReactNode[] = [];
// If this is Python and we are running locally, the user has
// two options ---whether to run code in sandbox with pyodide, or from Flask (unsafe):

View File

@ -333,7 +333,7 @@ const GlobalSettingsModal = forwardRef<GlobalSettingsModalRef, object>(
<Tabs.Tab value="api-keys">API Keys</Tabs.Tab>
<Tabs.Tab value="ai-support">AI Support (BETA)</Tabs.Tab>
<Tabs.Tab value="custom-providers">Custom Providers</Tabs.Tab>
<Tabs.Tab value="advanced">Advanced Settings</Tabs.Tab>
<Tabs.Tab value="advanced">Advanced</Tabs.Tab>
</Tabs.List>
<Tabs.Panel value="api-keys" pt="xs">

View File

@ -2045,7 +2045,9 @@ export function getSettingsSchemaForLLM(
): ModelSettingsDict | undefined {
const llm_provider = getProvider(llm_name);
const provider_to_settings_schema: { [K in LLMProvider]?: ModelSettingsDict } = {
const provider_to_settings_schema: {
[K in LLMProvider]?: ModelSettingsDict;
} = {
[LLMProvider.OpenAI]: GPT4Settings,
[LLMProvider.Anthropic]: ClaudeSettings,
[LLMProvider.Google]: PaLM2Settings,

View File

@ -21,7 +21,7 @@ export interface NodeLabelProps {
editable?: boolean;
status?: Status;
isRunning?: boolean;
customButtons?: React.ReactElement[];
customButtons?: React.ReactNode[];
handleRunClick?: () => void;
handleStopClick?: (nodeId: string) => void;
handleRunHover?: () => void;

View File

@ -20,7 +20,7 @@ import {
Tooltip,
} from "@mantine/core";
import { useDisclosure } from "@mantine/hooks";
import { IconList } from "@tabler/icons-react";
import { IconEraser, IconList } from "@tabler/icons-react";
import useStore from "./store";
import BaseNode from "./BaseNode";
import NodeLabel from "./NodeLabelComponent";
@ -56,6 +56,7 @@ import {
import { AlertModalContext } from "./AlertModal";
import { Status } from "./StatusIndicatorComponent";
import {
clearCachedResponses,
countQueries,
generatePrompts,
grabResponses,
@ -1073,8 +1074,28 @@ Soft failing by replacing undefined with empty strings.`,
[textAreaRef],
);
// Add custom context menu options on right-click.
// 1. Convert TextFields to Items Node, for convenience.
const customContextMenuItems = useMemo(
() => [
{
key: "clear_cache",
icon: <IconEraser size="11pt" />,
text: "Clear cached responses",
onClick: () => {
// Clear responses associated with this node
clearCachedResponses(id);
// Remove items and reset status
setStatus(Status.NONE);
setJSONResponses(null);
},
},
],
[id],
);
return (
<BaseNode classNames="prompt-node" nodeId={id}>
<BaseNode classNames="prompt-node" nodeId={id} contextMenuExts={customContextMenuItems}>
<NodeLabel
title={data.title || node_default_title}
nodeId={id}

View File

@ -244,7 +244,7 @@ export const genResponseTextsDisplay = (
</Flex>
{same_resp_text_counts[r].length > 1 ? (
<span className="num-same-responses">
{same_resp_text_counts[r]} times
{same_resp_text_counts[r].length} times
</span>
) : (
<></>

View File

@ -873,8 +873,8 @@ const VisNode: React.FC<VisNodeProps> = ({ data, id }) => {
.then(function (resps) {
if (resps && resps.length > 0) {
// Store responses and extract + store vars
// @ts-expect-error toReversed exists, but TypeScript does not see it.
setResponses(resps.toReversed());
// toReversed exists, but TypeScript does not see it.
setResponses((resps as any).toReversed());
// Find all vars in responses
let varnames: string[] | Set<string> = new Set<string>();

View File

@ -1369,6 +1369,23 @@ export async function grabResponses(
return grabbed_resps;
}
/**
* Deletes cache data for the responses indexed by 'id'.
* @param id The id of the cached responses to clear.
*/
export async function clearCachedResponses(id: string): Promise<boolean> {
if (!StorageCache.has(`${id}.json`)) {
console.error(`Did not find cache data for id ${id}`);
return false;
}
// Clear all cache items related to 'id'
for (const k of get_cache_keys_related_to_id(id, true))
StorageCache.clear(k);
return true;
}
/**
* Exports the cache'd data relevant to the given node id(s).
*

View File

@ -61,12 +61,19 @@ export default class StorageCache {
StorageCache.getInstance().storeCacheData(key, data);
}
private clearCache(): void {
this.data = {};
private clearCache(key?: string): void {
if (key === undefined)
this.data = {};
else if (key in this.data)
delete this.data[key];
}
public static clear(): void {
StorageCache.getInstance().clearCache();
/**
* Clears data in the cache.
* @param key Optional. A specific key to clear in the storage dict. If undefined, clears all data.
*/
public static clear(key?: string): void {
StorageCache.getInstance().clearCache(key);
}
/**

View File

@ -1232,7 +1232,6 @@ export async function call_ollama_provider(
function to_bedrock_chat_history(
chat_history: ChatHistory,
): BedrockChatMessage[] {
const role_map: Dict<string> = {
assistant: "ai",
user: "human",