Compare commits

...

4 Commits
v0.3.4 ... main

Author SHA1 Message Date
ianarawjo
2c7447d5f7
Saved flows sidebar and save button when running locally (#332)
* Add save to local disk/cache button

* Autosave to local filesystem but hide it

* Use imported filename when saving to local disk after import. Ensure no name clashes.

* Remove unneeded dependencies in pip and update package version

* Hide sidebar button and sidebar when hosted on web
2025-03-02 17:13:40 -05:00
Ian Arawjo
3b929880dc Update package version 2025-03-02 10:44:48 -05:00
Ian Arawjo
6ed42fe518 Bug fix: prompt node data saving 2025-03-02 10:43:48 -05:00
ianarawjo
98a8184a6a
Add copy button to response toolbars, and improve UI rendering performance (#331)
* Add copy button to toolbar.

* Debounce text update on prompt node

* Refactored App.tsx and BaseNode to useMemo for many places such as the toolbar

* Mantine React Table was rerendering cells even when hidden. This change ensures that the inner views of Response Inspectors do not display when the inspector is hidden.

* Add buffered before displaying the inner response inspector view, so that the modal immediately pops up 

* Update package version
2025-03-01 10:28:31 -05:00
14 changed files with 1356 additions and 625 deletions

View File

@ -3,11 +3,13 @@ from dataclasses import dataclass
from enum import Enum
from typing import List
from statistics import mean, median, stdev
from datetime import datetime
from flask import Flask, request, jsonify, render_template
from flask_cors import CORS
from chainforge.providers.dalai import call_dalai
from chainforge.providers import ProviderRegistry
import requests as py_requests
from platformdirs import user_data_dir
""" =================
SETUP AND GLOBALS
@ -26,6 +28,7 @@ app = Flask(__name__, static_folder=STATIC_DIR, template_folder=BUILD_DIR)
cors = CORS(app, resources={r"/*": {"origins": "*"}})
# The cache and examples files base directories
FLOWS_DIR = user_data_dir("chainforge") # platform-agnostic local storage that persists outside the package install location
CACHE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'cache')
EXAMPLES_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'examples')
@ -509,7 +512,7 @@ def makeFetchCall():
ret.headers.add('Access-Control-Allow-Origin', '*')
return ret
else:
err_msg = "API request to Anthropic failed"
err_msg = "API request failed"
ret = response.json()
if "error" in ret and "message" in ret["error"]:
err_msg += ": " + ret["error"]["message"]
@ -721,6 +724,109 @@ async def callCustomProvider():
# Return the response
return jsonify({'response': response})
"""
LOCALLY SAVED FLOWS
"""
@app.route('/api/flows', methods=['GET'])
def get_flows():
"""Return a list of all saved flows. If the directory does not exist, try to create it."""
os.makedirs(FLOWS_DIR, exist_ok=True) # Creates the directory if it doesn't exist
flows = [
{
"name": f,
"last_modified": datetime.fromtimestamp(os.path.getmtime(os.path.join(FLOWS_DIR, f))).isoformat()
}
for f in os.listdir(FLOWS_DIR)
if f.endswith('.cforge') and f != "__autosave.cforge" # ignore the special autosave file
]
# Sort the flow files by last modified date in descending order (most recent first)
flows.sort(key=lambda x: x["last_modified"], reverse=True)
return jsonify({
"flow_dir": FLOWS_DIR,
"flows": flows
})
@app.route('/api/flows/<filename>', methods=['GET'])
def get_flow(filename):
"""Return the content of a specific flow"""
if not filename.endswith('.cforge'):
filename += '.cforge'
try:
with open(os.path.join(FLOWS_DIR, filename), 'r') as f:
return jsonify(json.load(f))
except FileNotFoundError:
return jsonify({"error": "Flow not found"}), 404
@app.route('/api/flows/<filename>', methods=['DELETE'])
def delete_flow(filename):
"""Delete a flow"""
if not filename.endswith('.cforge'):
filename += '.cforge'
try:
os.remove(os.path.join(FLOWS_DIR, filename))
return jsonify({"message": f"Flow {filename} deleted successfully"})
except FileNotFoundError:
return jsonify({"error": "Flow not found"}), 404
@app.route('/api/flows/<filename>', methods=['PUT'])
def save_or_rename_flow(filename):
"""Save or rename a flow"""
data = request.json
if not filename.endswith('.cforge'):
filename += '.cforge'
if data.get('flow'):
# Save flow (overwriting any existing flow file with the same name)
flow_data = data.get('flow')
try:
filepath = os.path.join(FLOWS_DIR, filename)
with open(filepath, 'w') as f:
json.dump(flow_data, f)
return jsonify({"message": f"Flow '{filename}' saved!"})
except FileNotFoundError:
return jsonify({"error": f"Could not save flow '{filename}' to local filesystem. See terminal for more details."}), 404
elif data.get('newName'):
# Rename flow
new_name = data.get('newName')
if not new_name.endswith('.cforge'):
new_name += '.cforge'
try:
# Check for name clashes (if a flow already exists with the new name)
if os.path.isfile(os.path.join(FLOWS_DIR, new_name)):
raise Exception("A flow with that name already exists.")
os.rename(os.path.join(FLOWS_DIR, filename), os.path.join(FLOWS_DIR, new_name))
return jsonify({"message": f"Flow renamed from {filename} to {new_name}"})
except Exception as error:
return jsonify({"error": str(error)}), 404
@app.route('/api/getUniqueFlowFilename', methods=['PUT'])
def get_unique_flow_name():
"""Return a non-name-clashing filename to store in the local disk."""
data = request.json
filename = data.get("name")
try:
base, ext = os.path.splitext(filename)
if ext is None or len(ext) == 0:
ext = ".cforge"
unique_filename = base + ext
i = 1
# Find the first non-clashing filename of the form <filename>(i).cforge where i=1,2,3 etc
while os.path.isfile(os.path.join(FLOWS_DIR, unique_filename)):
unique_filename = f"{base}({i}){ext}"
i += 1
return jsonify(unique_filename.replace(".cforge", ""))
except Exception as e:
return jsonify({"error": str(e)}), 404
def run_server(host="", port=8000, cmd_args=None):
global HOSTNAME, PORT

File diff suppressed because it is too large Load Diff

View File

@ -58,11 +58,10 @@ export const BaseNode: React.FC<BaseNodeProps> = ({
// Remove the node, after user confirmation dialog
const handleRemoveNode = useCallback(() => {
// Open the 'are you sure' modal:
if (deleteConfirmModal && deleteConfirmModal.current)
deleteConfirmModal.current.trigger();
deleteConfirmModal?.current?.trigger();
}, [deleteConfirmModal]);
const handleOpenContextMenu = (e: Dict) => {
const handleOpenContextMenu = useCallback((e: Dict) => {
// Ignore all right-clicked elements that aren't children of the parent,
// and that aren't divs (for instance, textfields should still have normal right-click)
if (e.target?.localName !== "div") return;
@ -91,23 +90,22 @@ export const BaseNode: React.FC<BaseNodeProps> = ({
},
});
setContextMenuOpened(true);
};
}, []);
// A BaseNode is just a div with "cfnode" as a class, and optional other className(s) for the specific node.
// It adds a context menu to all nodes upon right-click of the node itself (the div), to duplicate or delete the node.
return (
<div
className={classes}
onPointerDown={() => setContextMenuOpened(false)}
onContextMenu={handleOpenContextMenu}
style={style}
>
const areYouSureModal = useMemo(
() => (
<AreYouSureModal
ref={deleteConfirmModal}
title="Delete node"
message="Are you sure you want to delete this node? This action is irreversible."
onConfirm={() => removeNode(nodeId)}
/>
),
[removeNode, nodeId, deleteConfirmModal],
);
const contextMenu = useMemo(
() => (
<Menu
opened={contextMenuOpened}
withinPortal={true}
@ -132,6 +130,29 @@ export const BaseNode: React.FC<BaseNodeProps> = ({
</Menu.Item>
</Menu.Dropdown>
</Menu>
),
[
handleDuplicateNode,
handleRemoveNode,
contextMenuExts,
children,
contextMenuStyle,
contextMenuOpened,
setContextMenuOpened,
],
);
// A BaseNode is just a div with "cfnode" as a class, and optional other className(s) for the specific node.
// It adds a context menu to all nodes upon right-click of the node itself (the div), to duplicate or delete the node.
return (
<div
className={classes}
onPointerDown={() => setContextMenuOpened(false)}
onContextMenu={handleOpenContextMenu}
style={style}
>
{areYouSureModal}
{contextMenu}
</div>
);
};

View File

@ -0,0 +1,312 @@
import React, { useState, useEffect, useContext } from "react";
import {
IconEdit,
IconTrash,
IconMenu2,
IconX,
IconCheck,
} from "@tabler/icons-react";
import axios from "axios";
import { AlertModalContext } from "./AlertModal";
import { Dict } from "./backend/typing";
import {
ActionIcon,
Box,
Drawer,
Group,
Stack,
TextInput,
Text,
Flex,
Divider,
ScrollArea,
} from "@mantine/core";
import { FLASK_BASE_URL } from "./backend/utils";
interface FlowFile {
name: string;
last_modified: string;
}
interface FlowSidebarProps {
/** The name of flow that's currently loaded in the front-end, if defined. */
currentFlow?: string;
onLoadFlow: (flowFile?: Dict<any>, flowName?: string) => void;
}
const FlowSidebar: React.FC<FlowSidebarProps> = ({
onLoadFlow,
currentFlow,
}) => {
const [isOpen, setIsOpen] = useState(false);
const [savedFlows, setSavedFlows] = useState<FlowFile[]>([]);
const [editName, setEditName] = useState<string | null>(null);
const [newEditName, setNewEditName] = useState<string>("newName");
// The name of the local directory where flows are stored
const [flowDir, setFlowDir] = useState<string | undefined>(undefined);
// For displaying alerts
const showAlert = useContext(AlertModalContext);
// Fetch saved flows from the Flask backend
const fetchSavedFlowList = async () => {
try {
const response = await axios.get(`${FLASK_BASE_URL}api/flows`);
const flows = response.data.flows as FlowFile[];
setFlowDir(response.data.flow_dir);
setSavedFlows(
flows.map((item) => ({
name: item.name.replace(".cforge", ""),
last_modified: new Date(item.last_modified).toLocaleString(),
})),
);
} catch (error) {
console.error("Error fetching saved flows:", error);
}
};
// Load a flow when clicked, and push it to the caller
const handleLoadFlow = async (filename: string) => {
try {
// Fetch the flow
const response = await axios.get(
`${FLASK_BASE_URL}api/flows/${filename}`,
);
// Push the flow to the ReactFlow UI. We also pass the filename
// so that the caller can use that info to save the right flow when the user presses save.
onLoadFlow(response.data, filename);
setIsOpen(false); // Close sidebar after loading
} catch (error) {
console.error(`Error loading flow ${filename}:`, error);
if (showAlert) showAlert(error as Error);
}
};
// Delete a flow
const handleDeleteFlow = async (
filename: string,
event: React.MouseEvent<HTMLButtonElement, MouseEvent>,
) => {
event.stopPropagation(); // Prevent triggering the parent click
if (window.confirm(`Are you sure you want to delete "${filename}"?`)) {
try {
await axios.delete(`${FLASK_BASE_URL}api/flows/${filename}`);
fetchSavedFlowList(); // Refresh the list
} catch (error) {
console.error(`Error deleting flow ${filename}:`, error);
if (showAlert) showAlert(error as Error);
}
}
};
// Start editing a flow name
const handleEditClick = (
flowFile: string,
event: React.MouseEvent<HTMLButtonElement, MouseEvent>,
) => {
event.stopPropagation(); // Prevent triggering the parent click
setEditName(flowFile);
setNewEditName(flowFile);
};
// Cancel editing
const handleCancelEdit = (
event: React.MouseEvent<HTMLButtonElement, MouseEvent>,
) => {
event.stopPropagation(); // Prevent triggering the parent click
setEditName(null);
};
// Save the edited flow name
const handleSaveEdit = async (
oldFilename: string,
newFilename: string,
event: React.MouseEvent<HTMLButtonElement, MouseEvent>,
) => {
event?.stopPropagation(); // Prevent triggering the parent click
if (newFilename && newFilename !== oldFilename) {
await axios
.put(`${FLASK_BASE_URL}api/flows/${oldFilename}`, {
newName: newFilename,
})
.then(() => {
onLoadFlow(undefined, newFilename); // Tell the parent that the filename has changed. This won't replace the flow.
fetchSavedFlowList(); // Refresh the list
})
.catch((error) => {
let msg: string;
if (error.response) {
msg = `404 Error: ${error.response.status === 404 ? error.response.data?.error ?? "Not Found" : error.response.data}`;
} else if (error.request) {
// Request was made but no response was received
msg = "No response received from server.";
} else {
// Something else happened in setting up the request
msg = `Unknown Error: ${error.message}`;
}
console.error(msg);
if (showAlert) showAlert(msg);
});
}
// No longer editing
setEditName(null);
setNewEditName("newName");
};
// Load flows when component mounts
useEffect(() => {
if (isOpen) {
fetchSavedFlowList();
}
}, [isOpen]);
return (
<div className="relative">
{/* <RenameValueModal title="Rename flow" label="Edit name" initialValue="" onSubmit={handleEditName} /> */}
{/* Toggle Button */}
<ActionIcon
variant="gradient"
size="1.625rem"
style={{
position: "absolute",
top: "10px",
left: "10px",
// left: isOpen ? "250px" : "10px",
// transition: "left 0.3s ease-in-out",
zIndex: 10,
}}
onClick={() => setIsOpen(!isOpen)}
>
{isOpen ? <IconX /> : <IconMenu2 />}
</ActionIcon>
{/* Sidebar */}
<Drawer
opened={isOpen}
onClose={() => setIsOpen(false)}
title="Saved Flows"
position="left"
size="250px" // Adjust sidebar width
padding="md"
withCloseButton={true}
scrollAreaComponent={ScrollArea.Autosize}
>
<Divider />
<Stack spacing="4px" mt="0px" mb="120px">
{savedFlows.length === 0 ? (
<Text color="dimmed">No saved flows found</Text>
) : (
savedFlows.map((flow) => (
<Box
key={flow.name}
p="6px"
sx={(theme) => ({
borderRadius: theme.radius.sm,
cursor: "pointer",
"&:hover": {
backgroundColor:
theme.colorScheme === "dark"
? theme.colors.dark[6]
: theme.colors.gray[0],
},
})}
onClick={() => {
if (editName !== flow.name) handleLoadFlow(flow.name);
}}
>
{editName === flow.name ? (
<Group spacing="xs">
<TextInput
value={newEditName}
onChange={(e) => setNewEditName(e.target.value)}
style={{ flex: 1 }}
autoFocus
/>
<ActionIcon
color="green"
onClick={(e) => handleSaveEdit(editName, newEditName, e)}
>
<IconCheck size={18} />
</ActionIcon>
<ActionIcon color="gray" onClick={handleCancelEdit}>
<IconX size={18} />
</ActionIcon>
</Group>
) : (
<>
<Flex
justify="space-between"
align="center"
gap="0px"
h="auto"
>
{currentFlow === flow.name ? (
<Box
ml="-15px"
mr="5px"
bg="green"
w="10px"
h="10px"
style={{ borderRadius: "50%" }}
></Box>
) : (
<></>
)}
<Text size="sm" mr="auto">
{flow.name}
</Text>
<Flex gap="0px">
<ActionIcon
color="blue"
onClick={(e) => handleEditClick(flow.name, e)}
>
<IconEdit size={18} />
</ActionIcon>
<ActionIcon
color="red"
onClick={(e) => handleDeleteFlow(flow.name, e)}
>
<IconTrash size={18} />
</ActionIcon>
</Flex>
</Flex>
<Text size="xs" color="gray">
{flow.last_modified}
</Text>
</>
)}
<Divider />
</Box>
))
)}
</Stack>
{/* Sticky footer */}
<div
style={{
position: "fixed",
bottom: 0,
background: "white",
padding: "10px",
borderTop: "1px solid #ddd",
}}
>
{flowDir ? (
<Text size="xs" color="gray">
Local flows are saved at: {flowDir}
</Text>
) : (
<></>
)}
</div>
</Drawer>
</div>
);
};
export default FlowSidebar;

View File

@ -94,6 +94,7 @@ const InspectorNode: React.FC<InspectorNodeProps> = ({ data, id }) => {
>
<LLMResponseInspector
jsonResponses={jsonResponses ?? []}
isOpen={true}
wideFormat={false}
/>
</div>

View File

@ -13,7 +13,6 @@ import React, {
} from "react";
import {
MultiSelect,
Table,
NativeSelect,
Checkbox,
Flex,
@ -22,9 +21,7 @@ import {
Tooltip,
TextInput,
Stack,
ScrollArea,
LoadingOverlay,
Button,
} from "@mantine/core";
import { useToggle } from "@mantine/hooks";
import {
@ -38,10 +35,6 @@ import {
useMantineReactTable,
type MRT_ColumnDef,
type MRT_Cell,
type MRT_ColumnFiltersState,
type MRT_SortingState,
type MRT_Virtualizer,
MRT_Row,
MRT_ShowHideColumnsButton,
MRT_ToggleFiltersButton,
MRT_ToggleDensePaddingButton,
@ -273,17 +266,27 @@ export const exportToExcel = (
export interface LLMResponseInspectorProps {
jsonResponses: LLMResponse[];
isOpen: boolean;
wideFormat?: boolean;
}
const LLMResponseInspector: React.FC<LLMResponseInspectorProps> = ({
jsonResponses,
isOpen,
wideFormat,
}) => {
// Responses
const [responseDivs, setResponseDivs] = useState<React.ReactNode>([]);
const [receivedResponsesOnce, setReceivedResponsesOnce] = useState(false);
// Debounce isOpen changes, to avoid blocking the UI
const [isOpenDelayed, setIsOpenDelayed] = useState(false);
useEffect(() => {
setTimeout(() => {
setIsOpenDelayed(isOpen);
}, 300);
}, [isOpen]);
// The type of view to use to display responses. Can be either hierarchy or table.
const [viewFormat, setViewFormat] = useState(
wideFormat ? "table" : "hierarchy",
@ -302,7 +305,6 @@ const LLMResponseInspector: React.FC<LLMResponseInspectorProps> = ({
true,
false,
]);
const [numMatches, setNumMatches] = useState(-1);
// Count number of response texts wehenever jsonResponses changes
const numResponses = useMemo(() => {
@ -773,33 +775,6 @@ const LLMResponseInspector: React.FC<LLMResponseInspectorProps> = ({
});
return row;
// return (
// <tr key={`r${idx}`} style={{ borderBottom: "2px solid #fff" }}>
// {var_cols_vals.map((c, i) => (
// <td key={`v${i}`} className="inspect-table-var">
// <ScrollArea.Autosize mt="sm" mah={500} maw={300}>
// {StringLookup.get(c)}
// </ScrollArea.Autosize>
// </td>
// ))}
// {metavar_cols_vals.map((c, i) => (
// <td key={`m${i}`} className="inspect-table-metavar">
// {StringLookup.get(c)}
// </td>
// ))}
// {sel_var_cols.map((c, i) => (
// <td key={`c${i}`} className="inspect-table-llm-resp">
// {StringLookup.get(c)}
// </td>
// ))}
// {eval_cols_vals.map((c, i) => (
// <td key={`e${i}`} className="inspect-table-score-col">
// <Stack spacing={0}>{c}</Stack>
// </td>
// ))}
// </tr>
// );
},
);
@ -863,7 +838,9 @@ const LLMResponseInspector: React.FC<LLMResponseInspectorProps> = ({
return (
<Stack spacing={0}>
{(val.data as [string | JSX.Element, string][]).map(
(e) => e[0],
(e, i) => (
<div key={i}>{e[0]}</div>
),
)}
</Stack>
);
@ -878,10 +855,12 @@ const LLMResponseInspector: React.FC<LLMResponseInspectorProps> = ({
)}
</Stack>
);
// return <div style={{backgroundColor: "red"}}>{cell.getValue() as string}</div>;
},
Header: ({ column }) => (
<div style={{ lineHeight: 1.0, overflowY: "auto", maxHeight: 100 }}>
<div
key={column.columnDef.id}
style={{ lineHeight: 1.0, overflowY: "auto", maxHeight: 100 }}
>
{column.columnDef.header}
</div>
),
@ -1069,8 +1048,6 @@ const LLMResponseInspector: React.FC<LLMResponseInspectorProps> = ({
const divs = groupByVars(responses, selected_vars, [], null);
setResponseDivs(divs);
}
// setNumMatches(numResponsesDisplayed);
});
};
@ -1111,10 +1088,7 @@ const LLMResponseInspector: React.FC<LLMResponseInspectorProps> = ({
<Flex gap="6px" align="end" w="100%">
<TextInput
id="search_bar"
label={
"Find"
// + (searchValue.length > 0 ? ` (${numMatches}/${numResponses})` : "")
}
label={"Find"}
autoComplete="off"
size={sz}
placeholder={"Search responses"}
@ -1162,7 +1136,6 @@ const LLMResponseInspector: React.FC<LLMResponseInspectorProps> = ({
searchValue,
filterBySearchValue,
numResponses,
numMatches,
sz,
toggleCaseSensitivity,
toggleFilterBySearchValue,
@ -1228,13 +1201,20 @@ const LLMResponseInspector: React.FC<LLMResponseInspectorProps> = ({
</Tabs.Panel>
</Tabs>
<div className="nowheel nodrag">
<div className="nowheel nodrag" style={{ height: "800px" }}>
{/* To get the overlay to operate just inside the div, use style={{position: "relative"}}. However it won't show the spinner in the right place. */}
<LoadingOverlay visible={showLoadingSpinner} overlayOpacity={0.5} />
{viewFormat === "table" ? (
<MantineReactTable table={table} />
<LoadingOverlay
visible={showLoadingSpinner || (isOpen && !isOpenDelayed)}
overlayOpacity={0.5}
/>
{isOpenDelayed ? (
viewFormat === "table" ? (
<MantineReactTable table={table} />
) : (
responseDivs
)
) : (
responseDivs
<></>
)}
</div>
</div>

View File

@ -22,6 +22,7 @@ export default function LLMResponseInspectorDrawer({
>
<LLMResponseInspector
jsonResponses={jsonResponses}
isOpen={showDrawer}
wideFormat={false}
/>
</div>

View File

@ -80,6 +80,7 @@ const LLMResponseInspectorModal = forwardRef<
<Suspense fallback={<LoadingOverlay visible={true} />}>
<LLMResponseInspector
jsonResponses={props.jsonResponses}
isOpen={opened}
wideFormat={true}
/>
</Suspense>

View File

@ -411,24 +411,36 @@ const PromptNode: React.FC<PromptNodeProps> = ({
[setTemplateVars, templateVars, pullInputData, id],
);
const handleInputChange = (event: React.ChangeEvent<HTMLTextAreaElement>) => {
const value = event.target.value;
const handleInputChange = useCallback(
(event: React.ChangeEvent<HTMLTextAreaElement>) => {
const value = event.target.value;
const updateStatus =
promptTextOnLastRun !== null &&
status !== Status.WARNING &&
value !== promptTextOnLastRun;
// Store prompt text
setPromptText(value);
data.prompt = value;
// Store prompt text
data.prompt = value;
// Update status icon, if need be:
if (
promptTextOnLastRun !== null &&
status !== Status.WARNING &&
value !== promptTextOnLastRun
)
setStatus(Status.WARNING);
// Debounce the global state change to happen only after 500ms, as it forces a costly rerender:
debounce((_value, _updateStatus) => {
setPromptText(_value);
setDataPropsForNode(id, { prompt: _value });
refreshTemplateHooks(_value);
if (_updateStatus) setStatus(Status.WARNING);
}, 300)(value, updateStatus);
// Debounce refreshing the template hooks so we don't annoy the user
debounce((_value) => refreshTemplateHooks(_value), 500)(value);
};
// Debounce refreshing the template hooks so we don't annoy the user
// debounce((_value) => refreshTemplateHooks(_value), 500)(value);
},
[
promptTextOnLastRun,
status,
refreshTemplateHooks,
setDataPropsForNode,
debounceTimeoutRef,
],
);
// On initialization
useEffect(() => {
@ -467,7 +479,7 @@ const PromptNode: React.FC<PromptNodeProps> = ({
// Chat nodes only. Pulls input data attached to the 'past conversations' handle.
// Returns a tuple (past_chat_llms, __past_chats), where both are undefined if nothing is connected.
const pullInputChats = () => {
const pullInputChats = useCallback(() => {
const pulled_data = pullInputData(["__past_chats"], id);
if (!("__past_chats" in pulled_data)) return [undefined, undefined];
@ -535,36 +547,46 @@ const PromptNode: React.FC<PromptNodeProps> = ({
// Returns [list of LLM specs, list of ChatHistoryInfo]
return [past_chat_llms, past_chats];
};
}, [id, pullInputData]);
// Ask the backend how many responses it needs to collect, given the input data:
const fetchResponseCounts = (
prompt: string,
vars: Dict,
llms: (StringOrHash | LLMSpec)[],
chat_histories?:
| (ChatHistoryInfo | undefined)[]
| Dict<(ChatHistoryInfo | undefined)[]>,
) => {
return countQueries(
prompt,
vars,
llms,
const fetchResponseCounts = useCallback(
(
prompt: string,
vars: Dict,
llms: (StringOrHash | LLMSpec)[],
chat_histories?:
| (ChatHistoryInfo | undefined)[]
| Dict<(ChatHistoryInfo | undefined)[]>,
) => {
return countQueries(
prompt,
vars,
llms,
numGenerations,
chat_histories,
id,
node_type !== "chat" ? showContToggle && contWithPriorLLMs : undefined,
).then(function (results) {
return [results.counts, results.total_num_responses] as [
Dict<Dict<number>>,
Dict<number>,
];
});
},
[
countQueries,
numGenerations,
chat_histories,
showContToggle,
contWithPriorLLMs,
id,
node_type !== "chat" ? showContToggle && contWithPriorLLMs : undefined,
).then(function (results) {
return [results.counts, results.total_num_responses] as [
Dict<Dict<number>>,
Dict<number>,
];
});
};
node_type,
],
);
// On hover over the 'info' button, to preview the prompts that will be sent out
const [promptPreviews, setPromptPreviews] = useState<PromptInfo[]>([]);
const handlePreviewHover = () => {
const handlePreviewHover = useCallback(() => {
// Pull input data and prompt
try {
const pulled_vars = pullInputData(templateVars, id);
@ -585,10 +607,18 @@ const PromptNode: React.FC<PromptNodeProps> = ({
console.error(err);
setPromptPreviews([]);
}
};
}, [
pullInputData,
templateVars,
id,
updateShowContToggle,
generatePrompts,
promptText,
pullInputChats,
]);
// On hover over the 'Run' button, request how many responses are required and update the tooltip. Soft fails.
const handleRunHover = () => {
const handleRunHover = useCallback(() => {
// Check if the PromptNode is not already waiting for a response...
if (status === "loading") {
setRunTooltip("Fetching responses...");
@ -719,9 +749,17 @@ const PromptNode: React.FC<PromptNodeProps> = ({
console.error(err); // soft fail
setRunTooltip("Could not reach backend server.");
});
};
}, [
status,
llmItemsCurrState,
pullInputChats,
contWithPriorLLMs,
pullInputData,
fetchResponseCounts,
promptText,
]);
const handleRunClick = () => {
const handleRunClick = useCallback(() => {
// Go through all template hooks (if any) and check they're connected:
const is_fully_connected = templateVars.every((varname) => {
// Check that some edge has, as its target, this node and its template hook:
@ -1058,7 +1096,31 @@ Soft failing by replacing undefined with empty strings.`,
.then(open_progress_listener)
.then(query_llms)
.catch(rejected);
};
}, [
templateVars,
triggerAlert,
pullInputChats,
pullInputData,
updateShowContToggle,
llmItemsCurrState,
contWithPriorLLMs,
showAlert,
fetchResponseCounts,
numGenerations,
promptText,
apiKeys,
showContToggle,
cancelId,
refreshCancelId,
node_type,
id,
setDataPropsForNode,
llmListContainer,
responsesWillChange,
showDrawer,
pingOutputNodes,
debounceTimeoutRef,
]);
const handleStopClick = useCallback(() => {
CancelTracker.add(cancelId);
@ -1076,7 +1138,7 @@ Soft failing by replacing undefined with empty strings.`,
setStatus(Status.NONE);
setContChatToggleDisabled(false);
llmListContainer?.current?.resetLLMItemsProgress();
}, [cancelId, refreshCancelId]);
}, [cancelId, refreshCancelId, debounceTimeoutRef]);
const handleNumGenChange = useCallback(
(event: React.ChangeEvent<HTMLInputElement>) => {

View File

@ -36,6 +36,7 @@ export const getEvalResultStr = (
} else if (typeof eval_item === "object") {
const strs: [JSX.Element | string, string][] = Object.keys(eval_item).map(
(key, j) => {
const innerKey = `${key}-${j}`;
let val = eval_item[key];
if (typeof val === "number" && val.toString().indexOf(".") > -1)
val = val.toFixed(4); // truncate floats to 4 decimal places
@ -43,9 +44,9 @@ export const getEvalResultStr = (
if (onlyString) return [`${key}: ${recurs_str}`, recurs_str];
else
return [
<div key={`${key}-${j}`}>
<span>{key}: </span>
<span>{recurs_res}</span>
<div key={innerKey}>
<span key={0}>{key}: </span>
<span key={1}>{recurs_res}</span>
</div>,
recurs_str,
];
@ -57,7 +58,9 @@ export const getEvalResultStr = (
} else
return [
<Stack key={1} spacing={0}>
{strs}
{strs.map((s, i) => (
<span key={i}>s</span>
))}
</Stack>,
joined_strs,
];
@ -272,6 +275,7 @@ export const genResponseTextsDisplay = (
uid={res_obj.uid}
innerIdxs={origIdxs}
wideFormat={wideFormat}
responseData={r}
/>
</Suspense>
{llmName !== undefined &&

View File

@ -5,11 +5,17 @@ import React, {
useMemo,
useState,
} from "react";
import { Button, Flex, Popover, Stack, Textarea } from "@mantine/core";
import { IconMessage2, IconThumbDown, IconThumbUp } from "@tabler/icons-react";
import { Button, Flex, Popover, Stack, Textarea, Tooltip } from "@mantine/core";
import {
IconCopy,
IconMessage2,
IconThumbDown,
IconThumbUp,
} from "@tabler/icons-react";
import StorageCache from "./backend/cache";
import useStore from "./store";
import { deepcopy } from "./backend/utils";
import { LLMResponseData } from "./backend/typing";
type RatingDict = Record<number, boolean | string | undefined>;
@ -63,14 +69,14 @@ export interface ResponseRatingToolbarProps {
uid: string;
wideFormat?: boolean;
innerIdxs: number[];
onUpdateResponses?: () => void;
responseData?: string;
}
const ResponseRatingToolbar: React.FC<ResponseRatingToolbarProps> = ({
uid,
wideFormat,
innerIdxs,
onUpdateResponses,
responseData,
}) => {
// The cache keys storing the ratings for this response object
const gradeKey = getRatingKeyForResponse(uid, "grade");
@ -108,6 +114,9 @@ const ResponseRatingToolbar: React.FC<ResponseRatingToolbarProps> = ({
const [noteText, setNoteText] = useState("");
const [notePopoverOpened, setNotePopoverOpened] = useState(false);
// Text state
const [copied, setCopied] = useState(false);
// Override the text in the internal textarea whenever upstream annotation changes.
useEffect(() => {
setNoteText(note !== undefined ? note.toString() : "");
@ -133,7 +142,6 @@ const ResponseRatingToolbar: React.FC<ResponseRatingToolbarProps> = ({
new_grades[idx] = grade;
});
setRating(uid, "grade", new_grades);
if (onUpdateResponses) onUpdateResponses();
};
const onAnnotate = (label?: string) => {
@ -145,7 +153,6 @@ const ResponseRatingToolbar: React.FC<ResponseRatingToolbarProps> = ({
new_notes[idx] = label;
});
setRating(uid, "note", new_notes);
if (onUpdateResponses) onUpdateResponses();
};
const handleSaveAnnotation = useCallback(() => {
@ -175,6 +182,33 @@ const ResponseRatingToolbar: React.FC<ResponseRatingToolbarProps> = ({
>
<IconThumbDown size={size} />
</ToolbarButton>
<Tooltip
label={copied ? "Copied!" : "Copy"}
withArrow
arrowPosition="center"
>
<ToolbarButton
selected={copied}
onClick={() => {
if (responseData) {
navigator.clipboard
.writeText(responseData)
.then(() => {
console.log("Text copied to clipboard");
setCopied(() => true);
setTimeout(() => {
setCopied(() => false);
}, 1000);
})
.catch((err) => {
console.error("Failed to copy text: ", err);
});
}
}}
>
<IconCopy size={size} />
</ToolbarButton>
</Tooltip>
<Popover
opened={notePopoverOpened}
onChange={setNotePopoverOpened}

View File

@ -1,4 +1,5 @@
import MarkdownIt from "markdown-it";
import axios from "axios";
import { v4 as uuid } from "uuid";
import {
Dict,
@ -697,6 +698,40 @@ export async function fetchEnvironAPIKeys(): Promise<Dict<string>> {
}).then((res) => res.json());
}
export async function saveFlowToLocalFilesystem(
flowJSON: Dict,
filename: string,
): Promise<void> {
try {
await axios.put(`${FLASK_BASE_URL}api/flows/${filename}`, {
flow: flowJSON,
});
} catch (error) {
throw new Error(
`Error saving flow with name ${filename}: ${(error as Error).toString()}`,
);
}
}
export async function ensureUniqueFlowFilename(
filename: string,
): Promise<string> {
try {
const response = await axios.put(
`${FLASK_BASE_URL}api/getUniqueFlowFilename`,
{
name: filename,
},
);
return response.data as string;
} catch (error) {
console.error(
`Error contact Flask to ensure unique filename for imported flow. Defaulting to passed filename (warning: risk this overrides an existing flow.) Error: ${(error as Error).toString()}`,
);
return filename;
}
}
/**
* Queries LLM(s) with root prompt template `prompt` and prompt input variables `vars`, `n` times per prompt.
* Soft-fails if API calls fail, and collects the errors in `errors` property of the return object.

View File

@ -5,6 +5,5 @@ requests
openai
dalaipy==2.0.2
urllib3==1.26.6
anthropic
google-generativeai
mistune>=2.0
mistune>=2.0
platformdirs

View File

@ -6,7 +6,7 @@ def readme():
setup(
name="chainforge",
version="0.3.4.0",
version="0.3.4.3",
packages=find_packages(),
author="Ian Arawjo",
description="A Visual Programming Environment for Prompt Engineering",
@ -21,10 +21,9 @@ setup(
"flask[async]",
"flask_cors",
"requests",
"platformdirs",
"urllib3==1.26.6",
"openai",
"anthropic",
"google-generativeai",
"dalaipy>=2.0.2",
"mistune>=2.0", # for LLM response markdown parsing
],