mirror of
https://github.com/ianarawjo/ChainForge.git
synced 2025-03-14 08:16:37 +00:00
Debounce re-renders in prompt node progress listener (#204)
* Debounce rerenders in prompt node progress listener * Fix debounce lag in genAI for Items nodes * Rebuild app and update version
This commit is contained in:
parent
32c62225d2
commit
48f1314d23
@ -1,15 +1,15 @@
|
||||
{
|
||||
"files": {
|
||||
"main.css": "/static/css/main.15dfff17.css",
|
||||
"main.js": "/static/js/main.b9d7b8bf.js",
|
||||
"main.js": "/static/js/main.6451743d.js",
|
||||
"static/js/787.4c72bb55.chunk.js": "/static/js/787.4c72bb55.chunk.js",
|
||||
"index.html": "/index.html",
|
||||
"main.15dfff17.css.map": "/static/css/main.15dfff17.css.map",
|
||||
"main.b9d7b8bf.js.map": "/static/js/main.b9d7b8bf.js.map",
|
||||
"main.6451743d.js.map": "/static/js/main.6451743d.js.map",
|
||||
"787.4c72bb55.chunk.js.map": "/static/js/787.4c72bb55.chunk.js.map"
|
||||
},
|
||||
"entrypoints": [
|
||||
"static/css/main.15dfff17.css",
|
||||
"static/js/main.b9d7b8bf.js"
|
||||
"static/js/main.6451743d.js"
|
||||
]
|
||||
}
|
@ -1 +1 @@
|
||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><script async src="https://www.googletagmanager.com/gtag/js?id=G-RN3FDBLMCR"></script><script>function gtag(){dataLayer.push(arguments)}window.dataLayer=window.dataLayer||[],gtag("js",new Date),gtag("config","G-RN3FDBLMCR")</script><link rel="icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="theme-color" content="#000000"/><meta name="description" content="A visual programming environment for prompt engineering"/><link rel="apple-touch-icon" href="/logo192.png"/><link rel="manifest" href="/manifest.json"/><title>ChainForge</title><script defer="defer" src="/static/js/main.b9d7b8bf.js"></script><link href="/static/css/main.15dfff17.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
|
||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><script async src="https://www.googletagmanager.com/gtag/js?id=G-RN3FDBLMCR"></script><script>function gtag(){dataLayer.push(arguments)}window.dataLayer=window.dataLayer||[],gtag("js",new Date),gtag("config","G-RN3FDBLMCR")</script><link rel="icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="theme-color" content="#000000"/><meta name="description" content="A visual programming environment for prompt engineering"/><link rel="apple-touch-icon" href="/logo192.png"/><link rel="manifest" href="/manifest.json"/><title>ChainForge</title><script defer="defer" src="/static/js/main.6451743d.js"></script><link href="/static/css/main.15dfff17.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
22
chainforge/react-server/src/ItemsNode.js
vendored
22
chainforge/react-server/src/ItemsNode.js
vendored
@ -56,14 +56,19 @@ const ItemsNode = ({ data, id }) => {
|
||||
}, []);
|
||||
|
||||
// Handle a change in a text fields' input.
|
||||
const setFieldsFromText = useCallback((text_val) => {
|
||||
// Debounce the state change to only run 300 ms after the edit
|
||||
debounce((_text_val) => {
|
||||
const setFieldsFromText = useCallback((text_val, no_debounce) => {
|
||||
const _update = (_text_val) => {
|
||||
// Update the data for this text fields' id.
|
||||
const new_data = { text: _text_val, fields: processCSV(_text_val).map(stripWrappingQuotes).map(escapeBraces) };
|
||||
setDataPropsForNode(id, new_data);
|
||||
pingOutputNodes(id);
|
||||
}, 300)(text_val);
|
||||
};
|
||||
|
||||
// Debounce the state change to only run 300 ms after the edit
|
||||
if (no_debounce)
|
||||
_update(text_val);
|
||||
else
|
||||
debounce(_update, 300)(text_val);
|
||||
}, [id, pingOutputNodes, setDataPropsForNode]);
|
||||
|
||||
const handKeyDown = useCallback((event) => {
|
||||
@ -106,7 +111,7 @@ const ItemsNode = ({ data, id }) => {
|
||||
setCountText(
|
||||
<Text size="xs" style={{ marginTop: '5px' }} color='gray' align='right'>{elements.length} elements</Text>
|
||||
);
|
||||
}, [data.text, handleDivOnClick]);
|
||||
}, [data, handleDivOnClick]);
|
||||
|
||||
// When isEditing changes, add input
|
||||
useEffect(() => {
|
||||
@ -139,8 +144,7 @@ const ItemsNode = ({ data, id }) => {
|
||||
if (isEditing || !data.text) return;
|
||||
|
||||
renderCsvDiv();
|
||||
|
||||
}, [id, data.text]);
|
||||
}, [id, data]);
|
||||
|
||||
return (
|
||||
<BaseNode classNames="text-fields-node" nodeId={id}>
|
||||
@ -151,8 +155,8 @@ const ItemsNode = ({ data, id }) => {
|
||||
(flags["aiSupport"] ?
|
||||
[<AIPopover key='ai-popover'
|
||||
values={data.fields ?? []}
|
||||
onAddValues={(vals) => setFieldsFromText(data.text + ", " + vals.map(makeSafeForCSLFormat).join(", "))}
|
||||
onReplaceValues={(vals) => setFieldsFromText(vals.map(makeSafeForCSLFormat).join(", "))}
|
||||
onAddValues={(vals) => setFieldsFromText(data.text + ", " + vals.map(makeSafeForCSLFormat).join(", "), true)}
|
||||
onReplaceValues={(vals) => setFieldsFromText(vals.map(makeSafeForCSLFormat).join(", "), true)}
|
||||
areValuesLoading={isLoading}
|
||||
setValuesLoading={setIsLoading}
|
||||
apiKeys={apiKeys} />]
|
||||
|
54
chainforge/react-server/src/PromptNode.js
vendored
54
chainforge/react-server/src/PromptNode.js
vendored
@ -125,6 +125,19 @@ const PromptNode = ({ data, id, type: node_type }) => {
|
||||
const [progressAnimated, setProgressAnimated] = useState(true);
|
||||
const [runTooltip, setRunTooltip] = useState(null);
|
||||
|
||||
// Debounce helpers
|
||||
const debounceTimeoutRef = useRef(null);
|
||||
const debounce = (func, delay) => {
|
||||
return (...args) => {
|
||||
if (debounceTimeoutRef.current) {
|
||||
clearTimeout(debounceTimeoutRef.current);
|
||||
}
|
||||
debounceTimeoutRef.current = setTimeout(() => {
|
||||
func(...args);
|
||||
}, delay);
|
||||
};
|
||||
};
|
||||
|
||||
const triggerAlert = useCallback((msg) => {
|
||||
setProgress(undefined);
|
||||
llmListContainer?.current?.resetLLMItemsProgress();
|
||||
@ -151,8 +164,11 @@ const PromptNode = ({ data, id, type: node_type }) => {
|
||||
}, [signalDirty]);
|
||||
|
||||
const onLLMListItemsChange = useCallback((new_items, old_items) => {
|
||||
setLLMItemsCurrState(new_items);
|
||||
setDataPropsForNode(id, { llms: new_items });
|
||||
// Update the local and global state, with some debounce to limit re-rendering:
|
||||
debounce((_id, _new_items) => {
|
||||
setLLMItemsCurrState(_new_items);
|
||||
setDataPropsForNode(_id, { llms: _new_items });
|
||||
}, 300)(id, new_items);
|
||||
|
||||
// If there's been any change to the item list, signal dirty:
|
||||
if (new_items.length !== old_items.length || !new_items.every(i => old_items.some(s => s.key === i.key))) {
|
||||
@ -538,16 +554,7 @@ const PromptNode = ({ data, id, type: node_type }) => {
|
||||
|
||||
// Update individual progress bars
|
||||
const num_llms = _llmItemsCurrState.length;
|
||||
const num_resp_per_llm = (max_responses / num_llms);
|
||||
llmListContainer?.current?.updateProgress(item => {
|
||||
if (item.key in progress_by_llm_key) {
|
||||
item.progress = {
|
||||
success: progress_by_llm_key[item.key]['success'] / num_resp_per_llm * 100,
|
||||
error: progress_by_llm_key[item.key]['error'] / num_resp_per_llm * 100,
|
||||
}
|
||||
}
|
||||
return item;
|
||||
});
|
||||
const num_resp_per_llm = (max_responses / num_llms);
|
||||
|
||||
// Update total progress bar
|
||||
const total_num_success = Object.keys(progress_by_llm_key).reduce((acc, llm_key) => {
|
||||
@ -557,13 +564,27 @@ const PromptNode = ({ data, id, type: node_type }) => {
|
||||
return acc + progress_by_llm_key[llm_key]['error'];
|
||||
}, 0);
|
||||
|
||||
setProgress({
|
||||
success: Math.max(5, total_num_success / max_responses * 100),
|
||||
error: total_num_error / max_responses * 100 }
|
||||
);
|
||||
// Debounce the progress bars UI update to ensure we don't re-render too often:
|
||||
debounce(() => {
|
||||
llmListContainer?.current?.updateProgress(item => {
|
||||
if (item.key in progress_by_llm_key) {
|
||||
item.progress = {
|
||||
success: progress_by_llm_key[item.key]['success'] / num_resp_per_llm * 100,
|
||||
error: progress_by_llm_key[item.key]['error'] / num_resp_per_llm * 100,
|
||||
}
|
||||
}
|
||||
return item;
|
||||
});
|
||||
|
||||
setProgress({
|
||||
success: Math.max(5, total_num_success / max_responses * 100),
|
||||
error: total_num_error / max_responses * 100 }
|
||||
);
|
||||
}, 30)();
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
// Run all prompt permutations through the LLM to generate + cache responses:
|
||||
const query_llms = () => {
|
||||
return fetch_from_backend('queryllm', {
|
||||
@ -638,6 +659,7 @@ const PromptNode = ({ data, id, type: node_type }) => {
|
||||
// We also need to store a unique metavar for the LLM *set* (set of LLM nicknames) that produced these responses,
|
||||
// so we can keep track of 'upstream' LLMs (and plot against them) later on:
|
||||
const llm_metavar_key = getUniqueLLMMetavarKey(json.responses);
|
||||
|
||||
setDataPropsForNode(id, {fields: json.responses.map(
|
||||
resp_obj => resp_obj['responses'].map(
|
||||
r => {
|
||||
|
@ -273,7 +273,6 @@ const TextFieldsNode = ({ data, id }) => {
|
||||
value={textfieldsValues[i]}
|
||||
placeholder={flags["aiAutocomplete"] ? placeholder : undefined}
|
||||
disabled={fieldVisibility[i] === false}
|
||||
onBlur={(event) => handleTextFieldChange(i, event.currentTarget.value, false)}
|
||||
onChange={(event) => handleTextFieldChange(i, event.currentTarget.value, true)}
|
||||
onKeyDown={(event) => handleTextAreaKeyDown(event, placeholder, i)} />
|
||||
{Object.keys(textfieldsValues).length > 1 ? (
|
||||
|
@ -732,21 +732,20 @@ export async function queryLLM(id: string,
|
||||
vars_lookup[varname][v] = i;
|
||||
});
|
||||
});
|
||||
const vars_entries = Object.entries(vars_lookup);
|
||||
res.sort((a, b) => {
|
||||
if (!a.vars || !b.vars) return 0;
|
||||
for (const [varname, vals] of Object.entries(vars_lookup)) {
|
||||
for (const [varname, vals] of vars_entries) {
|
||||
if (varname in a.vars && varname in b.vars) {
|
||||
const a_val = a.vars[varname];
|
||||
const b_val = b.vars[varname];
|
||||
const a_idx = vals[a_val];
|
||||
const b_idx = vals[b_val];
|
||||
const a_idx = vals[a.vars[varname]];
|
||||
const b_idx = vals[b.vars[varname]];
|
||||
if (a_idx > -1 && b_idx > -1 && a_idx !== b_idx)
|
||||
return a_idx - b_idx;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
|
||||
|
||||
// Save the responses *of this run* to the storage cache, for further recall:
|
||||
let cache_filenames = past_cache_files;
|
||||
llms.forEach((llm_spec: string | Dict) => {
|
||||
@ -759,7 +758,6 @@ export async function queryLLM(id: string,
|
||||
cache_files: cache_filenames,
|
||||
responses_last_run: res,
|
||||
});
|
||||
|
||||
// Return all responses for all LLMs
|
||||
return {
|
||||
responses: res,
|
||||
|
@ -231,7 +231,7 @@ export class PromptPipeline {
|
||||
for await (const result of yield_as_completed(tasks)) {
|
||||
yield this.collect_LLM_response(result, llm, responses);
|
||||
}
|
||||
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
13
chainforge/react-server/src/store.js
vendored
13
chainforge/react-server/src/store.js
vendored
@ -5,7 +5,7 @@ import {
|
||||
applyEdgeChanges,
|
||||
} from 'reactflow';
|
||||
import { escapeBraces } from './backend/template';
|
||||
import { filterDict } from './backend/utils';
|
||||
import { deepcopy, filterDict } from './backend/utils';
|
||||
import { APP_IS_RUNNING_LOCALLY } from './backend/utils';
|
||||
import { DuplicateVariableNameError } from './backend/errors';
|
||||
|
||||
@ -292,14 +292,19 @@ const useStore = create((set, get) => ({
|
||||
return pulled_data;
|
||||
},
|
||||
|
||||
/**
|
||||
* Sets select 'data' properties for node 'id'. This updates global state, and forces re-renders. Use sparingly.
|
||||
* @param {*} id The id of the node to set 'data' properties for.
|
||||
* @param {*} data_props The properties to set on the node's 'data'.
|
||||
*/
|
||||
setDataPropsForNode: (id, data_props) => {
|
||||
set({
|
||||
nodes: (nds =>
|
||||
nds.map(n => {
|
||||
if (n.id === id) {
|
||||
for (const key of Object.keys(data_props))
|
||||
n.data[key] = data_props[key];
|
||||
n.data = JSON.parse(JSON.stringify(n.data)); // deep copy json
|
||||
for (const key of Object.keys(data_props))
|
||||
n.data[key] = data_props[key];
|
||||
n.data = deepcopy(n.data);
|
||||
}
|
||||
return n;
|
||||
})
|
||||
|
Loading…
x
Reference in New Issue
Block a user