mirror of
https://github.com/ianarawjo/ChainForge.git
synced 2025-03-14 08:16:37 +00:00
Table view and persistent response inspector (#103)
* WIP Tabular data view in Inspect modal * Keep response inspector mounted * Rebuild react app and update package version
This commit is contained in:
parent
973970b8ab
commit
038832910f
@ -1,15 +1,15 @@
|
||||
{
|
||||
"files": {
|
||||
"main.css": "/static/css/main.d97bf957.css",
|
||||
"main.js": "/static/js/main.b9a02cd0.js",
|
||||
"main.js": "/static/js/main.dd928385.js",
|
||||
"static/js/787.4c72bb55.chunk.js": "/static/js/787.4c72bb55.chunk.js",
|
||||
"index.html": "/index.html",
|
||||
"main.d97bf957.css.map": "/static/css/main.d97bf957.css.map",
|
||||
"main.b9a02cd0.js.map": "/static/js/main.b9a02cd0.js.map",
|
||||
"main.dd928385.js.map": "/static/js/main.dd928385.js.map",
|
||||
"787.4c72bb55.chunk.js.map": "/static/js/787.4c72bb55.chunk.js.map"
|
||||
},
|
||||
"entrypoints": [
|
||||
"static/css/main.d97bf957.css",
|
||||
"static/js/main.b9a02cd0.js"
|
||||
"static/js/main.dd928385.js"
|
||||
]
|
||||
}
|
@ -1 +1 @@
|
||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><script async src="https://www.googletagmanager.com/gtag/js?id=G-RN3FDBLMCR"></script><script>function gtag(){dataLayer.push(arguments)}window.dataLayer=window.dataLayer||[],gtag("js",new Date),gtag("config","G-RN3FDBLMCR")</script><link rel="icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="theme-color" content="#000000"/><meta name="description" content="A visual programming environment for prompt engineering"/><link rel="apple-touch-icon" href="/logo192.png"/><link rel="manifest" href="/manifest.json"/><title>ChainForge</title><script defer="defer" src="/static/js/main.b9a02cd0.js"></script><link href="/static/css/main.d97bf957.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
|
||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><script async src="https://www.googletagmanager.com/gtag/js?id=G-RN3FDBLMCR"></script><script>function gtag(){dataLayer.push(arguments)}window.dataLayer=window.dataLayer||[],gtag("js",new Date),gtag("config","G-RN3FDBLMCR")</script><link rel="icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1"/><meta name="theme-color" content="#000000"/><meta name="description" content="A visual programming environment for prompt engineering"/><link rel="apple-touch-icon" href="/logo192.png"/><link rel="manifest" href="/manifest.json"/><title>ChainForge</title><script defer="defer" src="/static/js/main.dd928385.js"></script><link href="/static/css/main.d97bf957.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
349
chainforge/react-server/src/LLMResponseInspector.js
vendored
349
chainforge/react-server/src/LLMResponseInspector.js
vendored
@ -5,8 +5,9 @@
|
||||
* be deployed in multiple locations.
|
||||
*/
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import { Collapse, Flex, MultiSelect, NativeSelect } from '@mantine/core';
|
||||
import { Collapse, Radio, MultiSelect, Group, Table } from '@mantine/core';
|
||||
import { useDisclosure } from '@mantine/hooks';
|
||||
import { IconTable, IconSitemap } from '@tabler/icons-react';
|
||||
import * as XLSX from 'xlsx';
|
||||
import useStore from './store';
|
||||
import { filterDict } from './backend/utils';
|
||||
@ -126,6 +127,9 @@ const LLMResponseInspector = ({ jsonResponses, wideFormat }) => {
|
||||
const [responses, setResponses] = useState([]);
|
||||
const [receivedResponsesOnce, setReceivedResponsesOnce] = useState(false);
|
||||
|
||||
// The type of view to use to display responses. Can be either hierarchy or table.
|
||||
const [viewFormat, setViewFormat] = useState("hierarchy");
|
||||
|
||||
// The MultiSelect so people can dynamically set what vars they care about
|
||||
const [multiSelectVars, setMultiSelectVars] = useState([]);
|
||||
const [multiSelectValue, setMultiSelectValue] = useState([]);
|
||||
@ -140,14 +144,18 @@ const LLMResponseInspector = ({ jsonResponses, wideFormat }) => {
|
||||
|
||||
// Find all vars in responses
|
||||
let found_vars = new Set();
|
||||
let found_llms = new Set();
|
||||
jsonResponses.forEach(res_obj => {
|
||||
Object.keys(res_obj.vars).forEach(v => {
|
||||
found_vars.add(v);
|
||||
});
|
||||
found_llms.add(res_obj.llm);
|
||||
});
|
||||
found_vars = Array.from(found_vars);
|
||||
found_llms = Array.from(found_llms);
|
||||
|
||||
// Set the variables accessible in the MultiSelect for 'group by'
|
||||
let msvars = Array.from(found_vars).map(name => (
|
||||
let msvars = found_vars.map(name => (
|
||||
// We add a $ prefix to mark this as a prompt parameter, and so
|
||||
// in the future we can add special types of variables without name collisions
|
||||
{value: `${name}`, label: name}
|
||||
@ -180,135 +188,185 @@ const LLMResponseInspector = ({ jsonResponses, wideFormat }) => {
|
||||
}
|
||||
};
|
||||
|
||||
// Now we need to perform groupings by each var in the selected vars list,
|
||||
// nesting the groupings (preferrably with custom divs) and sorting within
|
||||
// each group by value of that group's var (so all same values are clumped together).
|
||||
// :: For instance, for varnames = ['LLM', '$var1', '$var2'] we should get back
|
||||
// :: nested divs first grouped by LLM (first level), then by var1, then var2 (deepest level).
|
||||
let leaf_id = 0;
|
||||
let first_opened = false;
|
||||
const groupByVars = (resps, varnames, eatenvars, header) => {
|
||||
if (resps.length === 0) return [];
|
||||
if (varnames.length === 0) {
|
||||
// Base case. Display n response(s) to each single prompt, back-to-back:
|
||||
let fixed_width = 100;
|
||||
if (wideFormat && eatenvars.length > 0) {
|
||||
const num_llms = Array.from(new Set(resps.map(res_obj => res_obj.llm))).length;
|
||||
fixed_width = Math.max(20, Math.trunc(100 / num_llms)) - 1; // 20% width is lowest we will go (5 LLM response boxes max)
|
||||
}
|
||||
const resp_boxes = resps.map((res_obj, res_idx) => {
|
||||
const generateResponseBoxes = (resps, eatenvars, fixed_width) => {
|
||||
return resps.map((res_obj, res_idx) => {
|
||||
|
||||
const eval_res_items = res_obj.eval_res ? res_obj.eval_res.items : null;
|
||||
const eval_res_items = res_obj.eval_res ? res_obj.eval_res.items : null;
|
||||
|
||||
// Bucket responses that have the same text, and sort by the
|
||||
// number of same responses so that the top div is the most prevalent response.
|
||||
// We first need to keep track of the original evaluation result per response str:
|
||||
let resp_str_to_eval_res = {};
|
||||
if (eval_res_items)
|
||||
res_obj.responses.forEach((r, idx) => {
|
||||
resp_str_to_eval_res[r] = eval_res_items[idx]
|
||||
});
|
||||
const same_resp_text_counts = countResponsesBy(res_obj.responses, (r) => r)[0];
|
||||
const same_resp_keys = Object.keys(same_resp_text_counts).sort((key1, key2) => (same_resp_text_counts[key2] - same_resp_text_counts[key1]));
|
||||
// Bucket responses that have the same text, and sort by the
|
||||
// number of same responses so that the top div is the most prevalent response.
|
||||
// We first need to keep track of the original evaluation result per response str:
|
||||
let resp_str_to_eval_res = {};
|
||||
if (eval_res_items)
|
||||
res_obj.responses.forEach((r, idx) => {
|
||||
resp_str_to_eval_res[r] = eval_res_items[idx]
|
||||
});
|
||||
const same_resp_text_counts = countResponsesBy(res_obj.responses, (r) => r)[0];
|
||||
const same_resp_keys = Object.keys(same_resp_text_counts).sort((key1, key2) => (same_resp_text_counts[key2] - same_resp_text_counts[key1]));
|
||||
|
||||
// Spans for actual individual response texts
|
||||
const ps = same_resp_keys.map((r, idx) => (
|
||||
<div key={idx}>
|
||||
{same_resp_text_counts[r] > 1 ?
|
||||
(<span className="num-same-responses">{same_resp_text_counts[r]} times</span>)
|
||||
: <></>}
|
||||
{eval_res_items ? (
|
||||
<p className="small-response-metrics">{getEvalResultStr(resp_str_to_eval_res[r])}</p>
|
||||
) : <></>}
|
||||
<pre className="small-response">{r}</pre>
|
||||
</div>
|
||||
));
|
||||
// Spans for actual individual response texts
|
||||
const ps = same_resp_keys.map((r, idx) => (
|
||||
<div key={idx}>
|
||||
{same_resp_text_counts[r] > 1 ?
|
||||
(<span className="num-same-responses">{same_resp_text_counts[r]} times</span>)
|
||||
: <></>}
|
||||
{eval_res_items ? (
|
||||
<p className="small-response-metrics">{getEvalResultStr(resp_str_to_eval_res[r])}</p>
|
||||
) : <></>}
|
||||
<pre className="small-response">{r}</pre>
|
||||
</div>
|
||||
));
|
||||
|
||||
// At the deepest level, there may still be some vars left over. We want to display these
|
||||
// as tags, too, so we need to display only the ones that weren't 'eaten' during the recursive call:
|
||||
// (e.g., the vars that weren't part of the initial 'varnames' list that form the groupings)
|
||||
const unused_vars = filterDict(res_obj.vars, v => !eatenvars.includes(v));
|
||||
const var_tags = Object.keys(unused_vars).map((varname) => {
|
||||
const v = truncStr(unused_vars[varname].trim(), wideFormat ? 72 : 18);
|
||||
return (<div key={varname} className="response-var-inline" >
|
||||
<span className="response-var-name">{varname} = </span><span className="response-var-value">{v}</span>
|
||||
</div>);
|
||||
});
|
||||
return (
|
||||
<div key={"r"+res_idx} className="response-box" style={{ backgroundColor: color_for_llm(res_obj.llm), width: `${fixed_width}%` }}>
|
||||
<div className="response-var-inline-container">
|
||||
{var_tags}
|
||||
</div>
|
||||
{eatenvars.includes('LLM') ?
|
||||
ps
|
||||
: (<div className="response-item-llm-name-wrapper">
|
||||
<h1>{res_obj.llm}</h1>
|
||||
{ps}
|
||||
|
||||
</div>)
|
||||
}
|
||||
</div>
|
||||
);
|
||||
});
|
||||
const className = eatenvars.length > 0 ? "response-group" : "";
|
||||
const boxesClassName = eatenvars.length > 0 ? "response-boxes-wrapper" : "";
|
||||
const flexbox = (wideFormat && fixed_width < 100) ? 'flex' : 'block';
|
||||
const defaultOpened = !first_opened || eatenvars.length === 0 || eatenvars[eatenvars.length-1] === 'LLM';
|
||||
first_opened = true;
|
||||
leaf_id += 1;
|
||||
return (
|
||||
<div key={'l'+leaf_id} className={className} style={{ backgroundColor: rgroup_color(eatenvars.length) }}>
|
||||
<ResponseGroup header={header}
|
||||
responseBoxes={resp_boxes}
|
||||
responseBoxesWrapperClass={boxesClassName}
|
||||
displayStyle={flexbox}
|
||||
defaultState={defaultOpened} />
|
||||
// At the deepest level, there may still be some vars left over. We want to display these
|
||||
// as tags, too, so we need to display only the ones that weren't 'eaten' during the recursive call:
|
||||
// (e.g., the vars that weren't part of the initial 'varnames' list that form the groupings)
|
||||
const unused_vars = filterDict(res_obj.vars, v => !eatenvars.includes(v));
|
||||
const var_tags = Object.keys(unused_vars).map((varname) => {
|
||||
const v = truncStr(unused_vars[varname].trim(), wideFormat ? 72 : 18);
|
||||
return (<div key={varname} className="response-var-inline" >
|
||||
<span className="response-var-name">{varname} = </span><span className="response-var-value">{v}</span>
|
||||
</div>);
|
||||
});
|
||||
return (
|
||||
<div key={"r"+res_idx} className="response-box" style={{ backgroundColor: color_for_llm(res_obj.llm), width: `${fixed_width}%` }}>
|
||||
<div className="response-var-inline-container">
|
||||
{var_tags}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Bucket responses by the first var in the list, where
|
||||
// we also bucket any 'leftover' responses that didn't have the requested variable (a kind of 'soft fail')
|
||||
const group_name = varnames[0];
|
||||
const [grouped_resps, leftover_resps] = (group_name === 'LLM')
|
||||
? groupResponsesBy(resps, (r => r.llm))
|
||||
: groupResponsesBy(resps, (r => ((group_name in r.vars) ? r.vars[group_name] : null)));
|
||||
const get_header = (group_name === 'LLM')
|
||||
? ((key, val) => (<div key={val} style={{backgroundColor: color_for_llm(val)}} className='response-llm-header'>{val}</div>))
|
||||
: ((key, val) => getHeaderBadge(key, val, eatenvars.length));
|
||||
|
||||
// Now produce nested divs corresponding to the groups
|
||||
const remaining_vars = varnames.slice(1);
|
||||
const updated_eatenvars = eatenvars.concat([group_name]);
|
||||
const defaultOpened = !first_opened || eatenvars.length === 0 || eatenvars[eatenvars.length-1] === 'LLM';
|
||||
const grouped_resps_divs = Object.keys(grouped_resps).map(g => groupByVars(grouped_resps[g], remaining_vars, updated_eatenvars, get_header(group_name, g)));
|
||||
const leftover_resps_divs = leftover_resps.length > 0 ? groupByVars(leftover_resps, remaining_vars, updated_eatenvars, get_header(group_name, undefined)) : [];
|
||||
|
||||
leaf_id += 1;
|
||||
|
||||
return (<div key={'h'+ group_name + '_' + leaf_id}>
|
||||
{header ?
|
||||
(<div key={group_name} className="response-group" style={{ backgroundColor: rgroup_color(eatenvars.length) }}>
|
||||
<ResponseGroup header={header}
|
||||
responseBoxes={grouped_resps_divs}
|
||||
responseBoxesWrapperClass="response-boxes-wrapper"
|
||||
displayStyle="block"
|
||||
defaultState={defaultOpened} />
|
||||
</div>)
|
||||
: <div key={group_name}>{grouped_resps_divs}</div>}
|
||||
{leftover_resps_divs.length === 0 ? (<></>) : (
|
||||
<div key={'__unspecified_group'} className="response-group">
|
||||
{leftover_resps_divs}
|
||||
</div>
|
||||
)}
|
||||
</div>);
|
||||
{eatenvars.includes('LLM') ?
|
||||
ps
|
||||
: (<div className="response-item-llm-name-wrapper">
|
||||
<h1>{res_obj.llm}</h1>
|
||||
{ps}
|
||||
</div>)
|
||||
}
|
||||
</div>
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
// Produce DIV elements grouped by selected vars
|
||||
const divs = groupByVars(responses, selected_vars, [], null);
|
||||
setResponses(divs);
|
||||
|
||||
}, [multiSelectValue, jsonResponses, wideFormat]);
|
||||
// Generate a view of the responses based on the view format set by the user
|
||||
if (viewFormat === "table") {
|
||||
|
||||
// Generate a table, with default columns for: input vars, LLMs queried
|
||||
// First get column names as input vars + LLMs:
|
||||
const colnames = found_vars.concat(found_llms);
|
||||
|
||||
// Then group responses by prompts. Each prompt will become a separate row of the table (will be treated as unique)
|
||||
const responses_by_prompt = groupResponsesBy(responses, (r => r.prompt))[0];
|
||||
|
||||
const rows = Object.entries(responses_by_prompt).map(([prompt, resp_objs], idx) => {
|
||||
// We assume here that prompt input vars will be the same across all responses in this bundle,
|
||||
// so we just take the value of the first one per each varname:
|
||||
const vars_cols = found_vars.map(v => v in resp_objs[0].vars ? resp_objs[0].vars[v] : '(unspecified)');
|
||||
const resp_objs_by_llm = groupResponsesBy(resp_objs, r => r.llm)[0];
|
||||
const llm_cols = found_llms.map(llm => {
|
||||
if (llm in resp_objs_by_llm) {
|
||||
const rs = resp_objs_by_llm[llm];
|
||||
if (rs.length > 1)
|
||||
console.warn(`Found more than one response object for LLM ${llm} for the same prompt. Only displaying first...`);
|
||||
// Return response divs as response box here:
|
||||
return generateResponseBoxes(rs, found_vars, 100)[0];
|
||||
} else {
|
||||
console.warn(`Could not find response object for LLM: ${llm}`);
|
||||
return (<span>(not queried)</span>);
|
||||
}
|
||||
});
|
||||
|
||||
return (
|
||||
<tr key={idx} style={{borderBottom: '8px solid #eee'}}>
|
||||
{vars_cols.map(c => (<td style={{backgroundColor: 'rgb(224, 244, 250)', paddingTop: '10px', borderRight: '1px solid #cde', fontWeight: '500'}}>{c}</td>))}
|
||||
{llm_cols.map((c, i) => (<td style={{paddingTop: '8px', paddingBottom: '20px', borderRight: '1px solid #eee'}}>{c}</td>))}
|
||||
</tr>
|
||||
);
|
||||
});
|
||||
|
||||
setResponses([(<Table>
|
||||
<thead>
|
||||
<tr>{colnames.map(c => (<th>{c}</th>))}</tr>
|
||||
</thead>
|
||||
<tbody style={{verticalAlign: 'top'}}>{rows}</tbody>
|
||||
</Table>)]);
|
||||
}
|
||||
else if (viewFormat === "hierarchy") {
|
||||
|
||||
// Now we need to perform groupings by each var in the selected vars list,
|
||||
// nesting the groupings (preferrably with custom divs) and sorting within
|
||||
// each group by value of that group's var (so all same values are clumped together).
|
||||
// :: For instance, for varnames = ['LLM', '$var1', '$var2'] we should get back
|
||||
// :: nested divs first grouped by LLM (first level), then by var1, then var2 (deepest level).
|
||||
let leaf_id = 0;
|
||||
let first_opened = false;
|
||||
const groupByVars = (resps, varnames, eatenvars, header) => {
|
||||
if (resps.length === 0) return [];
|
||||
if (varnames.length === 0) {
|
||||
// Base case. Display n response(s) to each single prompt, back-to-back:
|
||||
let fixed_width = 100;
|
||||
if (wideFormat && eatenvars.length > 0) {
|
||||
const num_llms = Array.from(new Set(resps.map(res_obj => res_obj.llm))).length;
|
||||
fixed_width = Math.max(20, Math.trunc(100 / num_llms)) - 1; // 20% width is lowest we will go (5 LLM response boxes max)
|
||||
}
|
||||
const resp_boxes = generateResponseBoxes(resps, eatenvars, fixed_width);
|
||||
const className = eatenvars.length > 0 ? "response-group" : "";
|
||||
const boxesClassName = eatenvars.length > 0 ? "response-boxes-wrapper" : "";
|
||||
const flexbox = (wideFormat && fixed_width < 100) ? 'flex' : 'block';
|
||||
const defaultOpened = !first_opened || eatenvars.length === 0 || eatenvars[eatenvars.length-1] === 'LLM';
|
||||
first_opened = true;
|
||||
leaf_id += 1;
|
||||
return (
|
||||
<div key={'l'+leaf_id} className={className} style={{ backgroundColor: rgroup_color(eatenvars.length) }}>
|
||||
<ResponseGroup header={header}
|
||||
responseBoxes={resp_boxes}
|
||||
responseBoxesWrapperClass={boxesClassName}
|
||||
displayStyle={flexbox}
|
||||
defaultState={defaultOpened} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Bucket responses by the first var in the list, where
|
||||
// we also bucket any 'leftover' responses that didn't have the requested variable (a kind of 'soft fail')
|
||||
const group_name = varnames[0];
|
||||
const [grouped_resps, leftover_resps] = (group_name === 'LLM')
|
||||
? groupResponsesBy(resps, (r => r.llm))
|
||||
: groupResponsesBy(resps, (r => ((group_name in r.vars) ? r.vars[group_name] : null)));
|
||||
const get_header = (group_name === 'LLM')
|
||||
? ((key, val) => (<div key={val} style={{backgroundColor: color_for_llm(val)}} className='response-llm-header'>{val}</div>))
|
||||
: ((key, val) => getHeaderBadge(key, val, eatenvars.length));
|
||||
|
||||
// Now produce nested divs corresponding to the groups
|
||||
const remaining_vars = varnames.slice(1);
|
||||
const updated_eatenvars = eatenvars.concat([group_name]);
|
||||
const defaultOpened = !first_opened || eatenvars.length === 0 || eatenvars[eatenvars.length-1] === 'LLM';
|
||||
const grouped_resps_divs = Object.keys(grouped_resps).map(g => groupByVars(grouped_resps[g], remaining_vars, updated_eatenvars, get_header(group_name, g)));
|
||||
const leftover_resps_divs = leftover_resps.length > 0 ? groupByVars(leftover_resps, remaining_vars, updated_eatenvars, get_header(group_name, undefined)) : [];
|
||||
|
||||
leaf_id += 1;
|
||||
|
||||
return (<div key={'h'+ group_name + '_' + leaf_id}>
|
||||
{header ?
|
||||
(<div key={group_name} className="response-group" style={{ backgroundColor: rgroup_color(eatenvars.length) }}>
|
||||
<ResponseGroup header={header}
|
||||
responseBoxes={grouped_resps_divs}
|
||||
responseBoxesWrapperClass="response-boxes-wrapper"
|
||||
displayStyle="block"
|
||||
defaultState={defaultOpened} />
|
||||
</div>)
|
||||
: <div key={group_name}>{grouped_resps_divs}</div>}
|
||||
{leftover_resps_divs.length === 0 ? (<></>) : (
|
||||
<div key={'__unspecified_group'} className="response-group">
|
||||
{leftover_resps_divs}
|
||||
</div>
|
||||
)}
|
||||
</div>);
|
||||
};
|
||||
|
||||
// Produce DIV elements grouped by selected vars
|
||||
const divs = groupByVars(responses, selected_vars, [], null);
|
||||
setResponses(divs);
|
||||
}
|
||||
|
||||
}, [multiSelectValue, jsonResponses, wideFormat, viewFormat]);
|
||||
|
||||
// When the user clicks an item in the drop-down,
|
||||
// we want to autoclose the multiselect drop-down:
|
||||
@ -321,20 +379,37 @@ const LLMResponseInspector = ({ jsonResponses, wideFormat }) => {
|
||||
};
|
||||
|
||||
return (<div style={{height: '100%'}}>
|
||||
{/* <Flex> */}
|
||||
{/* <NativeSelect label='View as' data={['Hierarchy', 'Table']} mr='8px' w='15%' /> */}
|
||||
<MultiSelect ref={multiSelectRef}
|
||||
onChange={handleMultiSelectValueChange}
|
||||
className='nodrag nowheel inspect-multiselect'
|
||||
label={<span style={{marginTop: '0px', fontWeight: 'normal'}}>Group responses by (order matters):</span>}
|
||||
data={multiSelectVars}
|
||||
placeholder="Pick vars to group responses, in order of importance"
|
||||
size={wideFormat ? 'sm' : 'xs'}
|
||||
value={multiSelectValue}
|
||||
clearSearchOnChange={true}
|
||||
clearSearchOnBlur={true}
|
||||
w='100%' />
|
||||
{/* </Flex> */}
|
||||
|
||||
{wideFormat ?
|
||||
<Radio.Group
|
||||
name="viewFormat"
|
||||
defaultValue="hierarchy"
|
||||
value={viewFormat}
|
||||
onChange={setViewFormat}
|
||||
>
|
||||
<Group mt="0px" mb='xs'>
|
||||
<Radio value="hierarchy" label={<span><IconSitemap size='10pt' style={{marginBottom: '-1px'}}/> Hierarchy</span>} />
|
||||
<Radio value="table" label={<span><IconTable size='10pt' style={{marginBottom: '-1px'}}/> Table</span>} />
|
||||
</Group>
|
||||
</Radio.Group>
|
||||
: <></>}
|
||||
|
||||
{wideFormat === false || viewFormat === "hierarchy" ?
|
||||
<div>
|
||||
<MultiSelect ref={multiSelectRef}
|
||||
onChange={handleMultiSelectValueChange}
|
||||
className='nodrag nowheel inspect-multiselect'
|
||||
label={<span style={{marginTop: '0px', fontWeight: 'normal'}}>Group responses by (order matters):</span>}
|
||||
data={multiSelectVars}
|
||||
placeholder="Pick vars to group responses, in order of importance"
|
||||
size={wideFormat ? 'sm' : 'xs'}
|
||||
value={multiSelectValue}
|
||||
clearSearchOnChange={true}
|
||||
clearSearchOnBlur={true}
|
||||
w='100%' />
|
||||
</div>
|
||||
: <></>}
|
||||
|
||||
<div className="nowheel nodrag">
|
||||
{responses}
|
||||
</div>
|
||||
|
@ -19,7 +19,7 @@ const LLMResponseInspectorModal = forwardRef((props, ref) => {
|
||||
}));
|
||||
|
||||
return (
|
||||
<Modal size='80%' opened={opened} onClose={close} closeOnClickOutside={true} style={{position: 'relative', 'left': '-100px'}} title={
|
||||
<Modal size='80%' keepMounted opened={opened} onClose={close} closeOnClickOutside={true} style={{position: 'relative', 'left': '-100px'}} title={
|
||||
<div><span>Response Inspector</span><button className="custom-button" style={{marginTop: 'auto', marginRight: '14px', float: 'right'}} onClick={() => exportToExcel(props.jsonResponses)}>Export data to Excel</button></div>
|
||||
} styles={{ title: {justifyContent: 'space-between', width: '100%'} }} >
|
||||
{ props.prompt !== undefined ?
|
||||
|
Loading…
x
Reference in New Issue
Block a user