Performance improvements

Mantine React Table was rerendering cells even when hidden.
This commit is contained in:
Ian Arawjo 2025-03-01 10:02:12 -05:00
parent 4a226f79bb
commit 8d71b54a8f
8 changed files with 611 additions and 507 deletions

View File

@ -4,6 +4,7 @@ import React, {
useRef,
useEffect,
useContext,
useMemo,
} from "react";
import ReactFlow, { Controls, Background, ReactFlowInstance } from "reactflow";
import {
@ -196,6 +197,15 @@ const getSharedFlowURLParam = () => {
return undefined;
};
const getWindowSize = () => ({
width: window.innerWidth,
height: window.innerHeight,
});
const getWindowCenter = () => {
const { width, height } = getWindowSize();
return { centerX: width / 2.0, centerY: height / 2.0 };
};
const MenuTooltip = ({
label,
children,
@ -277,71 +287,35 @@ const App = () => {
const [isLoading, setIsLoading] = useState(true);
// Helper
const getWindowSize = () => ({
width: window.innerWidth,
height: window.innerHeight,
});
const getWindowCenter = () => {
const { width, height } = getWindowSize();
return { centerX: width / 2.0, centerY: height / 2.0 };
};
const getViewportCenter = () => {
const getViewportCenter = useCallback(() => {
const { centerX, centerY } = getWindowCenter();
if (rfInstance === null) return { x: centerX, y: centerY };
// Support Zoom
const { x, y, zoom } = rfInstance.getViewport();
return { x: -(x / zoom) + centerX / zoom, y: -(y / zoom) + centerY / zoom };
};
}, [rfInstance]);
const addNode = (
id: string,
type?: string,
data?: Dict,
offsetX?: number,
offsetY?: number,
) => {
const { x, y } = getViewportCenter();
addNodeToStore({
id: `${id}-` + Date.now(),
type: type ?? id,
data: data ?? {},
position: {
x: x - 200 + (offsetX || 0),
y: y - 100 + (offsetY || 0),
},
});
};
const addTextFieldsNode = () => addNode("textFieldsNode", "textfields");
const addPromptNode = () => addNode("promptNode", "prompt", { prompt: "" });
const addChatTurnNode = () => addNode("chatTurn", "chat", { prompt: "" });
const addSimpleEvalNode = () => addNode("simpleEval", "simpleval");
const addEvalNode = (progLang: string) => {
let code = "";
if (progLang === "python")
code = "def evaluate(response):\n return len(response.text)";
else if (progLang === "javascript")
code = "function evaluate(response) {\n return response.text.length;\n}";
addNode("evalNode", "evaluator", { language: progLang, code });
};
const addVisNode = () => addNode("visNode", "vis", {});
const addInspectNode = () => addNode("inspectNode", "inspect");
const addScriptNode = () => addNode("scriptNode", "script");
const addItemsNode = () => addNode("csvNode", "csv");
const addTabularDataNode = () => addNode("table");
const addCommentNode = () => addNode("comment");
const addLLMEvalNode = () => addNode("llmeval");
const addMultiEvalNode = () => addNode("multieval");
const addJoinNode = () => addNode("join");
const addSplitNode = () => addNode("split");
const addProcessorNode = (progLang: string) => {
let code = "";
if (progLang === "python")
code = "def process(response):\n return response.text;";
else if (progLang === "javascript")
code = "function process(response) {\n return response.text;\n}";
addNode("process", "processor", { language: progLang, code });
};
const addNode = useCallback(
(
id: string,
type?: string,
data?: Dict,
offsetX?: number,
offsetY?: number,
) => {
const { x, y } = getViewportCenter();
addNodeToStore({
id: `${id}-` + Date.now(),
type: type ?? id,
data: data ?? {},
position: {
x: x - 200 + (offsetX || 0),
y: y - 100 + (offsetY || 0),
},
});
},
[addNodeToStore],
);
const onClickExamples = () => {
if (examplesModal && examplesModal.current) examplesModal.current.trigger();
@ -350,13 +324,16 @@ const App = () => {
if (settingsModal && settingsModal.current) settingsModal.current.trigger();
};
const handleError = (err: Error | string) => {
const msg = typeof err === "string" ? err : err.message;
setIsLoading(false);
setWaitingForShare(false);
if (showAlert) showAlert(msg);
console.error(msg);
};
const handleError = useCallback(
(err: Error | string) => {
const msg = typeof err === "string" ? err : err.message;
setIsLoading(false);
setWaitingForShare(false);
if (showAlert) showAlert(msg);
console.error(msg);
},
[showAlert],
);
/**
* SAVING / LOADING, IMPORT / EXPORT (from JSON)
@ -405,6 +382,42 @@ const App = () => {
[rfInstance],
);
// Initialize auto-saving
const initAutosaving = useCallback(
(rf_inst: ReactFlowInstance) => {
if (autosavingInterval !== undefined) return; // autosaving interval already set
console.log("Init autosaving");
// Autosave the flow to localStorage every minute:
const interv = setInterval(() => {
// Check the visibility of the browser tab --if it's not visible, don't autosave
if (!browserTabIsActive()) return;
// Start a timer, in case the saving takes a long time
const startTime = Date.now();
// Save the flow to localStorage
saveFlow(rf_inst);
// Check how long the save took
const duration = Date.now() - startTime;
if (duration > 1500) {
// If the operation took longer than 1.5 seconds, that's not good.
// Although this function is called async inside setInterval,
// calls to localStorage block the UI in JavaScript, freezing the screen.
// We smart-disable autosaving here when we detect it's starting the freeze the UI:
console.warn(
"Autosaving disabled. The time required to save to localStorage exceeds 1 second. This can happen when there's a lot of data in your flow. Make sure to export frequently to save your work.",
);
clearInterval(interv);
setAutosavingInterval(undefined);
}
}, 60000); // 60000 milliseconds = 1 minute
setAutosavingInterval(interv);
},
[autosavingInterval, saveFlow],
);
// Triggered when user confirms 'New Flow' button
const resetFlow = useCallback(() => {
resetLLMColors();
@ -436,58 +449,64 @@ const App = () => {
if (rfInstance) rfInstance.setViewport({ x: 200, y: 80, zoom: 1 });
}, [setNodes, setEdges, resetLLMColors, rfInstance]);
const loadFlow = async (flow?: Dict, rf_inst?: ReactFlowInstance | null) => {
if (flow === undefined) return;
if (rf_inst) {
if (flow.viewport)
rf_inst.setViewport({
x: flow.viewport.x || 0,
y: flow.viewport.y || 0,
zoom: flow.viewport.zoom || 1,
});
else rf_inst.setViewport({ x: 0, y: 0, zoom: 1 });
}
resetLLMColors();
const loadFlow = useCallback(
async (flow?: Dict, rf_inst?: ReactFlowInstance | null) => {
if (flow === undefined) return;
if (rf_inst) {
if (flow.viewport)
rf_inst.setViewport({
x: flow.viewport.x || 0,
y: flow.viewport.y || 0,
zoom: flow.viewport.zoom || 1,
});
else rf_inst.setViewport({ x: 0, y: 0, zoom: 1 });
}
resetLLMColors();
// First, clear the ReactFlow state entirely
// NOTE: We need to do this so it forgets any node/edge ids, which might have cross-over in the loaded flow.
setNodes([]);
setEdges([]);
// First, clear the ReactFlow state entirely
// NOTE: We need to do this so it forgets any node/edge ids, which might have cross-over in the loaded flow.
setNodes([]);
setEdges([]);
// After a delay, load in the new state.
setTimeout(() => {
setNodes(flow.nodes || []);
setEdges(flow.edges || []);
// After a delay, load in the new state.
setTimeout(() => {
setNodes(flow.nodes || []);
setEdges(flow.edges || []);
// Save flow that user loaded to autosave cache, in case they refresh the browser
StorageCache.saveToLocalStorage("chainforge-flow", flow);
// Save flow that user loaded to autosave cache, in case they refresh the browser
StorageCache.saveToLocalStorage("chainforge-flow", flow);
// Cancel loading spinner
setIsLoading(false);
}, 10);
// Cancel loading spinner
setIsLoading(false);
}, 10);
// Start auto-saving, if it's not already enabled
if (rf_inst) initAutosaving(rf_inst);
};
// Start auto-saving, if it's not already enabled
if (rf_inst) initAutosaving(rf_inst);
},
[resetLLMColors, setNodes, setEdges, initAutosaving],
);
const importGlobalStateFromCache = useCallback(() => {
importState(StorageCache.getAllMatching((key) => key.startsWith("r.")));
}, [importState]);
const autosavedFlowExists = () => {
const autosavedFlowExists = useCallback(() => {
return window.localStorage.getItem("chainforge-flow") !== null;
};
const loadFlowFromAutosave = async (rf_inst: ReactFlowInstance) => {
const saved_flow = StorageCache.loadFromLocalStorage(
"chainforge-flow",
false,
) as Dict;
if (saved_flow) {
StorageCache.loadFromLocalStorage("chainforge-state", true);
importGlobalStateFromCache();
loadFlow(saved_flow, rf_inst);
}
};
}, []);
const loadFlowFromAutosave = useCallback(
async (rf_inst: ReactFlowInstance) => {
const saved_flow = StorageCache.loadFromLocalStorage(
"chainforge-flow",
false,
) as Dict;
if (saved_flow) {
StorageCache.loadFromLocalStorage("chainforge-state", true);
importGlobalStateFromCache();
loadFlow(saved_flow, rf_inst);
}
},
[importGlobalStateFromCache, loadFlow],
);
// Export / Import (from JSON)
const exportFlow = useCallback(() => {
@ -754,107 +773,83 @@ const App = () => {
waitingForShare,
]);
// Initialize auto-saving
const initAutosaving = (rf_inst: ReactFlowInstance) => {
if (autosavingInterval !== undefined) return; // autosaving interval already set
console.log("Init autosaving");
// Autosave the flow to localStorage every minute:
const interv = setInterval(() => {
// Check the visibility of the browser tab --if it's not visible, don't autosave
if (!browserTabIsActive()) return;
// Start a timer, in case the saving takes a long time
const startTime = Date.now();
// Save the flow to localStorage
saveFlow(rf_inst);
// Check how long the save took
const duration = Date.now() - startTime;
if (duration > 1500) {
// If the operation took longer than 1.5 seconds, that's not good.
// Although this function is called async inside setInterval,
// calls to localStorage block the UI in JavaScript, freezing the screen.
// We smart-disable autosaving here when we detect it's starting the freeze the UI:
console.warn(
"Autosaving disabled. The time required to save to localStorage exceeds 1 second. This can happen when there's a lot of data in your flow. Make sure to export frequently to save your work.",
);
clearInterval(interv);
setAutosavingInterval(undefined);
}
}, 60000); // 60000 milliseconds = 1 minute
setAutosavingInterval(interv);
};
// Run once upon ReactFlow initialization
const onInit = (rf_inst: ReactFlowInstance) => {
setRfInstance(rf_inst);
const onInit = useCallback(
(rf_inst: ReactFlowInstance) => {
setRfInstance(rf_inst);
if (IS_RUNNING_LOCALLY) {
// If we're running locally, try to fetch API keys from Python os.environ variables in the locally running Flask backend:
fetchEnvironAPIKeys()
.then((api_keys) => {
setAPIKeys(api_keys);
})
.catch((err) => {
// Soft fail
console.warn(
"Warning: Could not fetch API key environment variables from Flask server. Error:",
err.message,
);
});
} else {
// Check if there's a shared flow UID in the URL as a GET param
// If so, we need to look it up in the database and attempt to load it:
const shared_flow_uid = getSharedFlowURLParam();
if (shared_flow_uid !== undefined) {
try {
// The format passed a basic smell test;
// now let's query the server for a flow with that UID:
fetch("/db/get_sharedflow.php", {
method: "POST",
body: shared_flow_uid,
if (IS_RUNNING_LOCALLY) {
// If we're running locally, try to fetch API keys from Python os.environ variables in the locally running Flask backend:
fetchEnvironAPIKeys()
.then((api_keys) => {
setAPIKeys(api_keys);
})
.then((r) => r.text())
.then((response) => {
if (!response || response.startsWith("Error")) {
// Error encountered during the query; alert the user
// with the error message:
throw new Error(response || "Unknown error");
}
// Attempt to parse the response as a compressed flow + import it:
const cforge_json = JSON.parse(
LZString.decompressFromUTF16(response),
);
importFlowFromJSON(cforge_json, rf_inst);
.catch((err) => {
// Soft fail
console.warn(
"Warning: Could not fetch API key environment variables from Flask server. Error:",
err.message,
);
});
} else {
// Check if there's a shared flow UID in the URL as a GET param
// If so, we need to look it up in the database and attempt to load it:
const shared_flow_uid = getSharedFlowURLParam();
if (shared_flow_uid !== undefined) {
try {
// The format passed a basic smell test;
// now let's query the server for a flow with that UID:
fetch("/db/get_sharedflow.php", {
method: "POST",
body: shared_flow_uid,
})
.catch(handleError);
} catch (err) {
// Soft fail
setIsLoading(false);
console.error(err);
.then((r) => r.text())
.then((response) => {
if (!response || response.startsWith("Error")) {
// Error encountered during the query; alert the user
// with the error message:
throw new Error(response || "Unknown error");
}
// Attempt to parse the response as a compressed flow + import it:
const cforge_json = JSON.parse(
LZString.decompressFromUTF16(response),
);
importFlowFromJSON(cforge_json, rf_inst);
})
.catch(handleError);
} catch (err) {
// Soft fail
setIsLoading(false);
console.error(err);
}
// Since we tried to load from the shared flow ID, don't try to load from autosave
return;
}
// Since we tried to load from the shared flow ID, don't try to load from autosave
return;
}
}
// Attempt to load an autosaved flow, if one exists:
if (autosavedFlowExists()) loadFlowFromAutosave(rf_inst);
else {
// Load an interesting default starting flow for new users
importFlowFromJSON(EXAMPLEFLOW_1, rf_inst);
// Attempt to load an autosaved flow, if one exists:
if (autosavedFlowExists()) loadFlowFromAutosave(rf_inst);
else {
// Load an interesting default starting flow for new users
importFlowFromJSON(EXAMPLEFLOW_1, rf_inst);
// Open a welcome pop-up
// openWelcomeModal();
}
// Open a welcome pop-up
// openWelcomeModal();
}
// Turn off loading wheel
setIsLoading(false);
};
// Turn off loading wheel
setIsLoading(false);
},
[
setAPIKeys,
handleError,
importFlowFromJSON,
autosavedFlowExists,
loadFlowFromAutosave,
],
);
useEffect(() => {
// Cleanup the autosaving interval upon component unmount:
@ -863,6 +858,273 @@ const App = () => {
};
}, []);
const reactFlowUI = useMemo(() => {
return (
<div
id="cf-root-container"
style={{ display: "flex", height: "100vh" }}
onPointerDown={hideContextMenu}
>
<div style={{ height: "100%", backgroundColor: "#eee", flexGrow: "1" }}>
<ReactFlow
onNodesChange={onNodesChange}
onEdgesChange={onEdgesChange}
onConnect={onConnect}
nodes={nodes}
edges={edges}
// @ts-expect-error Node types won't perfectly fit unless we explicitly extend from RF's types; ignoring this for now.
nodeTypes={nodeTypes}
// @ts-expect-error Edge types won't perfectly fit unless we explicitly extend from RF's types; ignoring this for now.
edgeTypes={edgeTypes}
zoomOnPinch={false}
zoomOnScroll={false}
panOnScroll={true}
disableKeyboardA11y={true}
deleteKeyCode={[]}
// connectionLineComponent={AnimatedConnectionLine}
// connectionLineStyle={connectionLineStyle}
snapToGrid={true}
snapGrid={snapGrid}
onInit={onInit}
onError={() => {
// Suppress ReactFlow warnings spamming the console.
// console.log(err);
}}
>
<Background color="#999" gap={16} />
<Controls showZoom={true} />
</ReactFlow>
</div>
</div>
);
}, [
onNodesChange,
onEdgesChange,
onConnect,
nodes,
edges,
onInit,
hideContextMenu,
]);
const addNodeMenu = useMemo(
() => (
<Menu
transitionProps={{ transition: "pop-top-left" }}
position="top-start"
width={220}
closeOnClickOutside={true}
closeOnEscape
styles={{ item: { maxHeight: "28px" } }}
>
<Menu.Target>
<Button size="sm" variant="gradient" compact mr="sm">
Add Node +
</Button>
</Menu.Target>
<Menu.Dropdown>
<Menu.Label>Input Data</Menu.Label>
<MenuTooltip label="Specify input text to prompt or chat nodes. You can also declare variables in brackets {} to chain TextFields together.">
<Menu.Item
onClick={() => addNode("textFieldsNode", "textfields")}
icon={<IconTextPlus size="16px" />}
>
{" "}
TextFields Node{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Specify inputs as a comma-separated list of items. Good for specifying lots of short text values. An alternative to TextFields node.">
<Menu.Item
onClick={() => addNode("csvNode", "csv")}
icon={<IconForms size="16px" />}
>
{" "}
Items Node{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Import or create a spreadhseet of data to use as input to prompt or chat nodes. Import accepts xlsx, csv, and jsonl.">
<Menu.Item onClick={() => addNode("table")} icon={"🗂️"}>
{" "}
Tabular Data Node{" "}
</Menu.Item>
</MenuTooltip>
<Menu.Divider />
<Menu.Label>Prompters</Menu.Label>
<MenuTooltip label="Prompt one or multiple LLMs. Specify prompt variables in brackets {}.">
<Menu.Item
onClick={() => addNode("promptNode", "prompt", { prompt: "" })}
icon={"💬"}
>
{" "}
Prompt Node{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Start or continue a conversation with chat models. Attach Prompt Node output as past context to continue chatting past the first turn.">
<Menu.Item
onClick={() => addNode("chatTurn", "chat", { prompt: "" })}
icon={"🗣"}
>
{" "}
Chat Turn Node{" "}
</Menu.Item>
</MenuTooltip>
<Menu.Divider />
<Menu.Label>Evaluators</Menu.Label>
<MenuTooltip label="Evaluate responses with a simple check (no coding required).">
<Menu.Item
onClick={() => addNode("simpleEval", "simpleval")}
icon={<IconRuler2 size="16px" />}
>
{" "}
Simple Evaluator{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Evaluate responses by writing JavaScript code.">
<Menu.Item
onClick={() =>
addNode("evalNode", "evaluator", {
language: "javascript",
code: "function evaluate(response) {\n return response.text.length;\n}",
})
}
icon={<IconTerminal size="16px" />}
>
{" "}
JavaScript Evaluator{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Evaluate responses by writing Python code.">
<Menu.Item
onClick={() =>
addNode("evalNode", "evaluator", {
language: "python",
code: "def evaluate(response):\n return len(response.text)",
})
}
icon={<IconTerminal size="16px" />}
>
{" "}
Python Evaluator{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Evaluate responses with an LLM like GPT-4.">
<Menu.Item
onClick={() => addNode("llmeval")}
icon={<IconRobot size="16px" />}
>
{" "}
LLM Scorer{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Evaluate responses across multiple criteria (multiple code and/or LLM evaluators).">
<Menu.Item
onClick={() => addNode("multieval")}
icon={<IconAbacus size="16px" />}
>
{" "}
Multi-Evaluator{" "}
</Menu.Item>
</MenuTooltip>
<Menu.Divider />
<Menu.Label>Visualizers</Menu.Label>
<MenuTooltip label="Plot evaluation results. (Attach an evaluator or scorer node as input.)">
<Menu.Item
onClick={() => addNode("visNode", "vis", {})}
icon={"📊"}
>
{" "}
Vis Node{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Used to inspect responses from prompter or evaluation nodes, without opening up the pop-up view.">
<Menu.Item
onClick={() => addNode("inspectNode", "inspect")}
icon={"🔍"}
>
{" "}
Inspect Node{" "}
</Menu.Item>
</MenuTooltip>
<Menu.Divider />
<Menu.Label>Processors</Menu.Label>
<MenuTooltip label="Transform responses by mapping a JavaScript function over them.">
<Menu.Item
onClick={() =>
addNode("process", "processor", {
language: "javascript",
code: "function process(response) {\n return response.text;\n}",
})
}
icon={<IconTerminal size="14pt" />}
>
{" "}
JavaScript Processor{" "}
</Menu.Item>
</MenuTooltip>
{IS_RUNNING_LOCALLY ? (
<MenuTooltip label="Transform responses by mapping a Python function over them.">
<Menu.Item
onClick={() =>
addNode("process", "processor", {
language: "python",
code: "def process(response):\n return response.text;",
})
}
icon={<IconTerminal size="14pt" />}
>
{" "}
Python Processor{" "}
</Menu.Item>
</MenuTooltip>
) : (
<></>
)}
<MenuTooltip label="Concatenate responses or input data together before passing into later nodes, within or across variables and LLMs.">
<Menu.Item
onClick={() => addNode("join")}
icon={<IconArrowMerge size="14pt" />}
>
{" "}
Join Node{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Split responses or input data by some format. For instance, you can split a markdown list into separate items.">
<Menu.Item
onClick={() => addNode("split")}
icon={<IconArrowsSplit size="14pt" />}
>
{" "}
Split Node{" "}
</Menu.Item>
</MenuTooltip>
<Menu.Divider />
<Menu.Label>Misc</Menu.Label>
<MenuTooltip label="Make a comment about your flow.">
<Menu.Item onClick={() => addNode("comment")} icon={"✏️"}>
{" "}
Comment Node{" "}
</Menu.Item>
</MenuTooltip>
{IS_RUNNING_LOCALLY ? (
<MenuTooltip label="Specify directories to load as local packages, so they can be imported in your Python evaluator nodes (add to sys path).">
<Menu.Item
onClick={() => addNode("scriptNode", "script")}
icon={<IconSettingsAutomation size="16px" />}
>
{" "}
Global Python Scripts{" "}
</Menu.Item>
</MenuTooltip>
) : (
<></>
)}
</Menu.Dropdown>
</Menu>
),
[addNode],
);
if (!IS_ACCEPTED_BROWSER) {
return (
<Box maw={600} mx="auto" mt="40px">
@ -925,228 +1187,13 @@ const App = () => {
</Box>
</Modal> */}
<div
id="cf-root-container"
style={{ display: "flex", height: "100vh" }}
onPointerDown={hideContextMenu}
>
<div
style={{ height: "100%", backgroundColor: "#eee", flexGrow: "1" }}
>
<ReactFlow
onNodesChange={onNodesChange}
onEdgesChange={onEdgesChange}
onConnect={onConnect}
nodes={nodes}
edges={edges}
// @ts-expect-error Node types won't perfectly fit unless we explicitly extend from RF's types; ignoring this for now.
nodeTypes={nodeTypes}
// @ts-expect-error Edge types won't perfectly fit unless we explicitly extend from RF's types; ignoring this for now.
edgeTypes={edgeTypes}
zoomOnPinch={false}
zoomOnScroll={false}
panOnScroll={true}
disableKeyboardA11y={true}
deleteKeyCode={[]}
// connectionLineComponent={AnimatedConnectionLine}
// connectionLineStyle={connectionLineStyle}
snapToGrid={true}
snapGrid={snapGrid}
onInit={onInit}
onError={() => {
// Suppress ReactFlow warnings spamming the console.
// console.log(err);
}}
>
<Background color="#999" gap={16} />
<Controls showZoom={true} />
</ReactFlow>
</div>
</div>
{reactFlowUI}
<div
id="custom-controls"
style={{ position: "fixed", left: "10px", top: "10px", zIndex: 8 }}
>
<Menu
transitionProps={{ transition: "pop-top-left" }}
position="top-start"
width={220}
closeOnClickOutside={true}
closeOnEscape
styles={{ item: { maxHeight: "28px" } }}
>
<Menu.Target>
<Button size="sm" variant="gradient" compact mr="sm">
Add Node +
</Button>
</Menu.Target>
<Menu.Dropdown>
<Menu.Label>Input Data</Menu.Label>
<MenuTooltip label="Specify input text to prompt or chat nodes. You can also declare variables in brackets {} to chain TextFields together.">
<Menu.Item
onClick={addTextFieldsNode}
icon={<IconTextPlus size="16px" />}
>
{" "}
TextFields Node{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Specify inputs as a comma-separated list of items. Good for specifying lots of short text values. An alternative to TextFields node.">
<Menu.Item
onClick={addItemsNode}
icon={<IconForms size="16px" />}
>
{" "}
Items Node{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Import or create a spreadhseet of data to use as input to prompt or chat nodes. Import accepts xlsx, csv, and jsonl.">
<Menu.Item onClick={addTabularDataNode} icon={"🗂️"}>
{" "}
Tabular Data Node{" "}
</Menu.Item>
</MenuTooltip>
<Menu.Divider />
<Menu.Label>Prompters</Menu.Label>
<MenuTooltip label="Prompt one or multiple LLMs. Specify prompt variables in brackets {}.">
<Menu.Item onClick={addPromptNode} icon={"💬"}>
{" "}
Prompt Node{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Start or continue a conversation with chat models. Attach Prompt Node output as past context to continue chatting past the first turn.">
<Menu.Item onClick={addChatTurnNode} icon={"🗣"}>
{" "}
Chat Turn Node{" "}
</Menu.Item>
</MenuTooltip>
<Menu.Divider />
<Menu.Label>Evaluators</Menu.Label>
<MenuTooltip label="Evaluate responses with a simple check (no coding required).">
<Menu.Item
onClick={addSimpleEvalNode}
icon={<IconRuler2 size="16px" />}
>
{" "}
Simple Evaluator{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Evaluate responses by writing JavaScript code.">
<Menu.Item
onClick={() => addEvalNode("javascript")}
icon={<IconTerminal size="16px" />}
>
{" "}
JavaScript Evaluator{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Evaluate responses by writing Python code.">
<Menu.Item
onClick={() => addEvalNode("python")}
icon={<IconTerminal size="16px" />}
>
{" "}
Python Evaluator{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Evaluate responses with an LLM like GPT-4.">
<Menu.Item
onClick={addLLMEvalNode}
icon={<IconRobot size="16px" />}
>
{" "}
LLM Scorer{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Evaluate responses across multiple criteria (multiple code and/or LLM evaluators).">
<Menu.Item
onClick={addMultiEvalNode}
icon={<IconAbacus size="16px" />}
>
{" "}
Multi-Evaluator{" "}
</Menu.Item>
</MenuTooltip>
<Menu.Divider />
<Menu.Label>Visualizers</Menu.Label>
<MenuTooltip label="Plot evaluation results. (Attach an evaluator or scorer node as input.)">
<Menu.Item onClick={addVisNode} icon={"📊"}>
{" "}
Vis Node{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Used to inspect responses from prompter or evaluation nodes, without opening up the pop-up view.">
<Menu.Item onClick={addInspectNode} icon={"🔍"}>
{" "}
Inspect Node{" "}
</Menu.Item>
</MenuTooltip>
<Menu.Divider />
<Menu.Label>Processors</Menu.Label>
<MenuTooltip label="Transform responses by mapping a JavaScript function over them.">
<Menu.Item
onClick={() => addProcessorNode("javascript")}
icon={<IconTerminal size="14pt" />}
>
{" "}
JavaScript Processor{" "}
</Menu.Item>
</MenuTooltip>
{IS_RUNNING_LOCALLY ? (
<MenuTooltip label="Transform responses by mapping a Python function over them.">
<Menu.Item
onClick={() => addProcessorNode("python")}
icon={<IconTerminal size="14pt" />}
>
{" "}
Python Processor{" "}
</Menu.Item>
</MenuTooltip>
) : (
<></>
)}
<MenuTooltip label="Concatenate responses or input data together before passing into later nodes, within or across variables and LLMs.">
<Menu.Item
onClick={addJoinNode}
icon={<IconArrowMerge size="14pt" />}
>
{" "}
Join Node{" "}
</Menu.Item>
</MenuTooltip>
<MenuTooltip label="Split responses or input data by some format. For instance, you can split a markdown list into separate items.">
<Menu.Item
onClick={addSplitNode}
icon={<IconArrowsSplit size="14pt" />}
>
{" "}
Split Node{" "}
</Menu.Item>
</MenuTooltip>
<Menu.Divider />
<Menu.Label>Misc</Menu.Label>
<MenuTooltip label="Make a comment about your flow.">
<Menu.Item onClick={addCommentNode} icon={"✏️"}>
{" "}
Comment Node{" "}
</Menu.Item>
</MenuTooltip>
{IS_RUNNING_LOCALLY ? (
<MenuTooltip label="Specify directories to load as local packages, so they can be imported in your Python evaluator nodes (add to sys path).">
<Menu.Item
onClick={addScriptNode}
icon={<IconSettingsAutomation size="16px" />}
>
{" "}
Global Python Scripts{" "}
</Menu.Item>
</MenuTooltip>
) : (
<></>
)}
</Menu.Dropdown>
</Menu>
{addNodeMenu}
<Button
onClick={exportFlow}
size="sm"

View File

@ -58,11 +58,10 @@ export const BaseNode: React.FC<BaseNodeProps> = ({
// Remove the node, after user confirmation dialog
const handleRemoveNode = useCallback(() => {
// Open the 'are you sure' modal:
if (deleteConfirmModal && deleteConfirmModal.current)
deleteConfirmModal.current.trigger();
deleteConfirmModal?.current?.trigger();
}, [deleteConfirmModal]);
const handleOpenContextMenu = (e: Dict) => {
const handleOpenContextMenu = useCallback((e: Dict) => {
// Ignore all right-clicked elements that aren't children of the parent,
// and that aren't divs (for instance, textfields should still have normal right-click)
if (e.target?.localName !== "div") return;
@ -91,23 +90,22 @@ export const BaseNode: React.FC<BaseNodeProps> = ({
},
});
setContextMenuOpened(true);
};
}, []);
// A BaseNode is just a div with "cfnode" as a class, and optional other className(s) for the specific node.
// It adds a context menu to all nodes upon right-click of the node itself (the div), to duplicate or delete the node.
return (
<div
className={classes}
onPointerDown={() => setContextMenuOpened(false)}
onContextMenu={handleOpenContextMenu}
style={style}
>
const areYouSureModal = useMemo(
() => (
<AreYouSureModal
ref={deleteConfirmModal}
title="Delete node"
message="Are you sure you want to delete this node? This action is irreversible."
onConfirm={() => removeNode(nodeId)}
/>
),
[removeNode, nodeId, deleteConfirmModal],
);
const contextMenu = useMemo(
() => (
<Menu
opened={contextMenuOpened}
withinPortal={true}
@ -132,6 +130,29 @@ export const BaseNode: React.FC<BaseNodeProps> = ({
</Menu.Item>
</Menu.Dropdown>
</Menu>
),
[
handleDuplicateNode,
handleRemoveNode,
contextMenuExts,
children,
contextMenuStyle,
contextMenuOpened,
setContextMenuOpened,
],
);
// A BaseNode is just a div with "cfnode" as a class, and optional other className(s) for the specific node.
// It adds a context menu to all nodes upon right-click of the node itself (the div), to duplicate or delete the node.
return (
<div
className={classes}
onPointerDown={() => setContextMenuOpened(false)}
onContextMenu={handleOpenContextMenu}
style={style}
>
{areYouSureModal}
{contextMenu}
</div>
);
};

View File

@ -94,6 +94,7 @@ const InspectorNode: React.FC<InspectorNodeProps> = ({ data, id }) => {
>
<LLMResponseInspector
jsonResponses={jsonResponses ?? []}
isOpen={true}
wideFormat={false}
/>
</div>

View File

@ -10,6 +10,7 @@ import React, {
useRef,
useMemo,
useTransition,
Suspense,
} from "react";
import {
MultiSelect,
@ -22,9 +23,7 @@ import {
Tooltip,
TextInput,
Stack,
ScrollArea,
LoadingOverlay,
Button,
} from "@mantine/core";
import { useToggle } from "@mantine/hooks";
import {
@ -38,10 +37,6 @@ import {
useMantineReactTable,
type MRT_ColumnDef,
type MRT_Cell,
type MRT_ColumnFiltersState,
type MRT_SortingState,
type MRT_Virtualizer,
MRT_Row,
MRT_ShowHideColumnsButton,
MRT_ToggleFiltersButton,
MRT_ToggleDensePaddingButton,
@ -273,11 +268,13 @@ export const exportToExcel = (
export interface LLMResponseInspectorProps {
jsonResponses: LLMResponse[];
isOpen: boolean;
wideFormat?: boolean;
}
const LLMResponseInspector: React.FC<LLMResponseInspectorProps> = ({
jsonResponses,
isOpen,
wideFormat,
}) => {
// Responses
@ -773,33 +770,6 @@ const LLMResponseInspector: React.FC<LLMResponseInspectorProps> = ({
});
return row;
// return (
// <tr key={`r${idx}`} style={{ borderBottom: "2px solid #fff" }}>
// {var_cols_vals.map((c, i) => (
// <td key={`v${i}`} className="inspect-table-var">
// <ScrollArea.Autosize mt="sm" mah={500} maw={300}>
// {StringLookup.get(c)}
// </ScrollArea.Autosize>
// </td>
// ))}
// {metavar_cols_vals.map((c, i) => (
// <td key={`m${i}`} className="inspect-table-metavar">
// {StringLookup.get(c)}
// </td>
// ))}
// {sel_var_cols.map((c, i) => (
// <td key={`c${i}`} className="inspect-table-llm-resp">
// {StringLookup.get(c)}
// </td>
// ))}
// {eval_cols_vals.map((c, i) => (
// <td key={`e${i}`} className="inspect-table-score-col">
// <Stack spacing={0}>{c}</Stack>
// </td>
// ))}
// </tr>
// );
},
);
@ -863,7 +833,9 @@ const LLMResponseInspector: React.FC<LLMResponseInspectorProps> = ({
return (
<Stack spacing={0}>
{(val.data as [string | JSX.Element, string][]).map(
(e) => e[0],
(e, i) => (
<div key={i}>{e[0]}</div>
),
)}
</Stack>
);
@ -878,10 +850,12 @@ const LLMResponseInspector: React.FC<LLMResponseInspectorProps> = ({
)}
</Stack>
);
// return <div style={{backgroundColor: "red"}}>{cell.getValue() as string}</div>;
},
Header: ({ column }) => (
<div style={{ lineHeight: 1.0, overflowY: "auto", maxHeight: 100 }}>
<div
key={column.columnDef.id}
style={{ lineHeight: 1.0, overflowY: "auto", maxHeight: 100 }}
>
{column.columnDef.header}
</div>
),
@ -1228,15 +1202,21 @@ const LLMResponseInspector: React.FC<LLMResponseInspectorProps> = ({
</Tabs.Panel>
</Tabs>
<div className="nowheel nodrag">
{/* To get the overlay to operate just inside the div, use style={{position: "relative"}}. However it won't show the spinner in the right place. */}
<LoadingOverlay visible={showLoadingSpinner} overlayOpacity={0.5} />
{viewFormat === "table" ? (
<MantineReactTable table={table} />
) : (
responseDivs
)}
</div>
{isOpen ? (
<Suspense fallback={<></>}>
<div className="nowheel nodrag">
{/* To get the overlay to operate just inside the div, use style={{position: "relative"}}. However it won't show the spinner in the right place. */}
<LoadingOverlay visible={showLoadingSpinner} overlayOpacity={0.5} />
{viewFormat === "table" ? (
<MantineReactTable table={table} />
) : (
responseDivs
)}
</div>
</Suspense>
) : (
<></>
)}
</div>
);
};

View File

@ -22,6 +22,7 @@ export default function LLMResponseInspectorDrawer({
>
<LLMResponseInspector
jsonResponses={jsonResponses}
isOpen={showDrawer}
wideFormat={false}
/>
</div>

View File

@ -80,6 +80,7 @@ const LLMResponseInspectorModal = forwardRef<
<Suspense fallback={<LoadingOverlay visible={true} />}>
<LLMResponseInspector
jsonResponses={props.jsonResponses}
isOpen={opened}
wideFormat={true}
/>
</Suspense>

View File

@ -432,7 +432,7 @@ const PromptNode: React.FC<PromptNodeProps> = ({
// Debounce refreshing the template hooks so we don't annoy the user
// debounce((_value) => refreshTemplateHooks(_value), 500)(value);
},
[promptTextOnLastRun, status, refreshTemplateHooks],
[promptTextOnLastRun, status, refreshTemplateHooks, debounceTimeoutRef],
);
// On initialization
@ -543,33 +543,43 @@ const PromptNode: React.FC<PromptNodeProps> = ({
}, [id, pullInputData]);
// Ask the backend how many responses it needs to collect, given the input data:
const fetchResponseCounts = (
prompt: string,
vars: Dict,
llms: (StringOrHash | LLMSpec)[],
chat_histories?:
| (ChatHistoryInfo | undefined)[]
| Dict<(ChatHistoryInfo | undefined)[]>,
) => {
return countQueries(
prompt,
vars,
llms,
const fetchResponseCounts = useCallback(
(
prompt: string,
vars: Dict,
llms: (StringOrHash | LLMSpec)[],
chat_histories?:
| (ChatHistoryInfo | undefined)[]
| Dict<(ChatHistoryInfo | undefined)[]>,
) => {
return countQueries(
prompt,
vars,
llms,
numGenerations,
chat_histories,
id,
node_type !== "chat" ? showContToggle && contWithPriorLLMs : undefined,
).then(function (results) {
return [results.counts, results.total_num_responses] as [
Dict<Dict<number>>,
Dict<number>,
];
});
},
[
countQueries,
numGenerations,
chat_histories,
showContToggle,
contWithPriorLLMs,
id,
node_type !== "chat" ? showContToggle && contWithPriorLLMs : undefined,
).then(function (results) {
return [results.counts, results.total_num_responses] as [
Dict<Dict<number>>,
Dict<number>,
];
});
};
node_type,
],
);
// On hover over the 'info' button, to preview the prompts that will be sent out
const [promptPreviews, setPromptPreviews] = useState<PromptInfo[]>([]);
const handlePreviewHover = () => {
const handlePreviewHover = useCallback(() => {
// Pull input data and prompt
try {
const pulled_vars = pullInputData(templateVars, id);
@ -590,10 +600,18 @@ const PromptNode: React.FC<PromptNodeProps> = ({
console.error(err);
setPromptPreviews([]);
}
};
}, [
pullInputData,
templateVars,
id,
updateShowContToggle,
generatePrompts,
promptText,
pullInputChats,
]);
// On hover over the 'Run' button, request how many responses are required and update the tooltip. Soft fails.
const handleRunHover = () => {
const handleRunHover = useCallback(() => {
// Check if the PromptNode is not already waiting for a response...
if (status === "loading") {
setRunTooltip("Fetching responses...");
@ -724,9 +742,17 @@ const PromptNode: React.FC<PromptNodeProps> = ({
console.error(err); // soft fail
setRunTooltip("Could not reach backend server.");
});
};
}, [
status,
llmItemsCurrState,
pullInputChats,
contWithPriorLLMs,
pullInputData,
fetchResponseCounts,
promptText,
]);
const handleRunClick = () => {
const handleRunClick = useCallback(() => {
// Go through all template hooks (if any) and check they're connected:
const is_fully_connected = templateVars.every((varname) => {
// Check that some edge has, as its target, this node and its template hook:
@ -1063,7 +1089,31 @@ Soft failing by replacing undefined with empty strings.`,
.then(open_progress_listener)
.then(query_llms)
.catch(rejected);
};
}, [
templateVars,
triggerAlert,
pullInputChats,
pullInputData,
updateShowContToggle,
llmItemsCurrState,
contWithPriorLLMs,
showAlert,
fetchResponseCounts,
numGenerations,
promptText,
apiKeys,
showContToggle,
cancelId,
refreshCancelId,
node_type,
id,
setDataPropsForNode,
llmListContainer,
responsesWillChange,
showDrawer,
pingOutputNodes,
debounceTimeoutRef,
]);
const handleStopClick = useCallback(() => {
CancelTracker.add(cancelId);
@ -1081,7 +1131,7 @@ Soft failing by replacing undefined with empty strings.`,
setStatus(Status.NONE);
setContChatToggleDisabled(false);
llmListContainer?.current?.resetLLMItemsProgress();
}, [cancelId, refreshCancelId]);
}, [cancelId, refreshCancelId, debounceTimeoutRef]);
const handleNumGenChange = useCallback(
(event: React.ChangeEvent<HTMLInputElement>) => {

View File

@ -36,6 +36,7 @@ export const getEvalResultStr = (
} else if (typeof eval_item === "object") {
const strs: [JSX.Element | string, string][] = Object.keys(eval_item).map(
(key, j) => {
const innerKey = `${key}-${j}`;
let val = eval_item[key];
if (typeof val === "number" && val.toString().indexOf(".") > -1)
val = val.toFixed(4); // truncate floats to 4 decimal places
@ -43,9 +44,9 @@ export const getEvalResultStr = (
if (onlyString) return [`${key}: ${recurs_str}`, recurs_str];
else
return [
<div key={`${key}-${j}`}>
<span>{key}: </span>
<span>{recurs_res}</span>
<div key={innerKey}>
<span key={0}>{key}: </span>
<span key={1}>{recurs_res}</span>
</div>,
recurs_str,
];
@ -57,7 +58,9 @@ export const getEvalResultStr = (
} else
return [
<Stack key={1} spacing={0}>
{strs}
{strs.map((s, i) => (
<span key={i}>s</span>
))}
</Stack>,
joined_strs,
];