mirror of
https://github.com/ianarawjo/ChainForge.git
synced 2025-03-14 08:16:37 +00:00
Add better example flow upon load
This commit is contained in:
parent
7cd5b6797f
commit
7e60c50338
72
chainforge/react-server/src/App.js
vendored
72
chainforge/react-server/src/App.js
vendored
@ -24,6 +24,9 @@ import CommentNode from './CommentNode';
|
||||
import GlobalSettingsModal from './GlobalSettingsModal';
|
||||
import ExampleFlowsModal from './ExampleFlowsModal';
|
||||
import AreYouSureModal from './AreYouSureModal';
|
||||
import { getDefaultModelFormData, getDefaultModelSettings } from './ModelSettingSchemas';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { EXAMPLEFLOW_1 } from './example_flows';
|
||||
import './text-fields-node.css';
|
||||
|
||||
// State management (from https://reactflow.dev/docs/guides/state-management/)
|
||||
@ -49,6 +52,23 @@ const selector = (state) => ({
|
||||
resetLLMColors: state.resetLLMColors,
|
||||
});
|
||||
|
||||
// The initial LLM to use when new flows are created, or upon first load
|
||||
const INITIAL_LLM = () => {
|
||||
let falcon7b = {
|
||||
key: uuid(),
|
||||
name: "Falcon.7B.Instruct",
|
||||
emoji: "🤗",
|
||||
model: "tiiuae/falcon-7b-instruct",
|
||||
base_model: "hf",
|
||||
temp: 1.0,
|
||||
settings: getDefaultModelSettings('hf'),
|
||||
formData: getDefaultModelFormData('hf'),
|
||||
};
|
||||
falcon7b.formData.shortname = falcon7b.name;
|
||||
falcon7b.formData.model = falcon7b.model;
|
||||
return falcon7b;
|
||||
};
|
||||
|
||||
// import AnimatedConnectionLine from './AnimatedConnectionLine';
|
||||
|
||||
const nodeTypes = {
|
||||
@ -86,6 +106,9 @@ const App = () => {
|
||||
// For modal popup of example flows
|
||||
const examplesModal = useRef(null);
|
||||
|
||||
// For an info pop-up that welcomes new users
|
||||
// const [welcomeModalOpened, { open: openWelcomeModal, close: closeWelcomeModal }] = useDisclosure(false);
|
||||
|
||||
// For confirmation popup
|
||||
const confirmationModal = useRef(null);
|
||||
const [confirmationDialogProps, setConfirmationDialogProps] = useState({
|
||||
@ -217,7 +240,11 @@ const App = () => {
|
||||
|
||||
const uid = (id) => `${id}-${Date.now()}`;
|
||||
const starting_nodes = [
|
||||
{ id: uid('prompt'), type: 'prompt', data: { prompt: 'Why is the sky blue?', n: 1 }, position: { x: 450, y: 200 } },
|
||||
{ id: uid('prompt'), type: 'prompt', data: {
|
||||
prompt: 'Why is the sky blue?',
|
||||
n: 1,
|
||||
llms: [INITIAL_LLM()] },
|
||||
position: { x: 450, y: 200 } },
|
||||
{ id: uid('textfields'), type: 'textfields', data: {}, position: { x: 80, y: 270 } },
|
||||
];
|
||||
|
||||
@ -419,6 +446,9 @@ const App = () => {
|
||||
|
||||
// Run once upon ReactFlow initialization
|
||||
const onInit = (rf_inst) => {
|
||||
localStorage.removeItem('chainforge-flow');
|
||||
localStorage.removeItem('chainforge-state');
|
||||
|
||||
setRfInstance(rf_inst);
|
||||
|
||||
// Autosave the flow to localStorage every minute:
|
||||
@ -430,17 +460,26 @@ const App = () => {
|
||||
if (autosavedFlowExists())
|
||||
loadFlowFromAutosave(rf_inst);
|
||||
else {
|
||||
// Create a default starting flow for new users
|
||||
// Load an interesting default starting flow for new users
|
||||
importFlowFromJSON(EXAMPLEFLOW_1);
|
||||
rf_inst.setViewport(EXAMPLEFLOW_1.flow.viewport);
|
||||
|
||||
// Open a welcome pop-up
|
||||
// openWelcomeModal();
|
||||
|
||||
// NOTE: We need to create a unique ID using the current date,
|
||||
// because of the way ReactFlow saves and restores states.
|
||||
const uid = (id) => `${id}-${Date.now()}`;
|
||||
setNodes([
|
||||
{ id: uid('prompt'), type: 'prompt', data: { prompt: 'What is the opening sentence of Pride and Prejudice?', n: 1 }, position: { x: 450, y: 200 } },
|
||||
{ id: uid('eval'), type: 'evaluator', data: { language: "javascript", code: "function evaluate(response) {\n return response.text.length;\n}" }, position: { x: 820, y: 150 } },
|
||||
{ id: uid('textfields'), type: 'textfields', data: {}, position: { x: 80, y: 270 } },
|
||||
{ id: uid('vis'), type: 'vis', data: {}, position: { x: 1200, y: 250 } },
|
||||
{ id: uid('inspect'), type: 'inspect', data: {}, position: { x:820, y:400 } },
|
||||
]);
|
||||
// const uid = (id) => `${id}-${Date.now()}`;
|
||||
// setNodes([
|
||||
// { id: uid('prompt'), type: 'prompt', data: {
|
||||
// llms: [ INITIAL_LLM() ],
|
||||
// prompt: 'What is the opening sentence of Pride and Prejudice?',
|
||||
// n: 1 }, position: { x: 450, y: 200 } },
|
||||
// { id: uid('eval'), type: 'evaluator', data: { language: "javascript", code: "function evaluate(response) {\n return response.text.length;\n}" }, position: { x: 820, y: 150 } },
|
||||
// { id: uid('textfields'), type: 'textfields', data: {}, position: { x: 80, y: 270 } },
|
||||
// { id: uid('vis'), type: 'vis', data: {}, position: { x: 1200, y: 250 } },
|
||||
// { id: uid('inspect'), type: 'inspect', data: {}, position: { x:820, y:400 } },
|
||||
// ]);
|
||||
}
|
||||
|
||||
// Turn off loading wheel
|
||||
@ -477,6 +516,13 @@ const App = () => {
|
||||
<LoadingOverlay visible={isLoading} overlayBlur={1} />
|
||||
<ExampleFlowsModal ref={examplesModal} onSelect={onSelectExampleFlow} />
|
||||
<AreYouSureModal ref={confirmationModal} title={confirmationDialogProps.title} message={confirmationDialogProps.message} onConfirm={confirmationDialogProps.onConfirm} />
|
||||
|
||||
{/* <Modal title={'Welcome to ChainForge'} size='400px' opened={welcomeModalOpened} onClose={closeWelcomeModal} yOffset={'6vh'} styles={{header: {backgroundColor: '#FFD700'}, root: {position: 'relative', left: '-80px'}}}>
|
||||
<Box m='lg' mt='xl'>
|
||||
<Text>To get started, click the Settings icon in the top-right corner.</Text>
|
||||
</Box>
|
||||
</Modal> */}
|
||||
|
||||
<div style={{ height: '100vh', width: '100%', backgroundColor: '#eee' }}>
|
||||
<ReactFlow
|
||||
onNodesChange={onNodesChange}
|
||||
@ -529,11 +575,11 @@ const App = () => {
|
||||
</div>
|
||||
<div style={{position: 'fixed', right: '10px', top: '10px', zIndex: 8}}>
|
||||
<Button onClick={onClickNewFlow} size="sm" variant="outline" compact mr='xs' style={{float: 'left'}}> New Flow </Button>
|
||||
<Button onClick={onClickExamples} size="sm" variant="outline" compact mr='xs' style={{float: 'left'}}> Example Flows </Button>
|
||||
<Button onClick={onClickSettings} size="sm" variant="outline" compact><IconSettings size={"90%"} /></Button>
|
||||
<Button onClick={onClickExamples} size="sm" variant="filled" compact mr='xs' style={{float: 'left'}}> Example Flows </Button>
|
||||
<Button onClick={onClickSettings} size="sm" variant="gradient" compact><IconSettings size={"90%"} /></Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default App;
|
||||
export default App;
|
@ -670,6 +670,14 @@ const HuggingFaceTextInferenceSettings = {
|
||||
"maximum": 5.0,
|
||||
"multipleOf": 0.01,
|
||||
},
|
||||
"num_continuations": {
|
||||
"type": "integer",
|
||||
"title": "Number of times to continue generation (ChainForge-specific)",
|
||||
"description": "The number of times to feed the model response back into the model, to continue generating text past the 250 token limit per API call. Only useful for text completions models like gpt2. Set to 0 to ignore.",
|
||||
"default": 0,
|
||||
"minimum": 0,
|
||||
"maximum": 6,
|
||||
},
|
||||
"top_k": {
|
||||
"type": "integer",
|
||||
"title": "top_k",
|
||||
@ -699,18 +707,10 @@ const HuggingFaceTextInferenceSettings = {
|
||||
"type": "integer",
|
||||
"title": "max_new_tokens",
|
||||
"description": "The amount of new tokens to be generated. Free HF models only support up to 250 tokens. Set to -1 to remain unspecified.",
|
||||
"default": -1,
|
||||
"default": 250,
|
||||
"minimum": -1,
|
||||
"maximum": 250,
|
||||
},
|
||||
"num_continuations": {
|
||||
"type": "integer",
|
||||
"title": "Number of times to continue generation (ChainForge-specific)",
|
||||
"description": "The number of times to feed the model response back into the model, to continue generating text past the 250 token limit per API call. Only useful for text completions models like gpt2. Set to 0 to ignore.",
|
||||
"default": 0,
|
||||
"minimum": 0,
|
||||
"maximum": 6,
|
||||
},
|
||||
"do_sample": {
|
||||
"type": "boolean",
|
||||
"title": "do_sample",
|
||||
@ -762,7 +762,7 @@ const HuggingFaceTextInferenceSettings = {
|
||||
"ui:widget": "range"
|
||||
},
|
||||
"max_new_tokens": {
|
||||
"ui:help": "Defaults to unspecified (-1)",
|
||||
"ui:help": "Defaults to 250 (max)",
|
||||
},
|
||||
"num_continuations": {
|
||||
"ui:widget": "range"
|
||||
@ -819,6 +819,8 @@ export const postProcessFormData = (settingsSpec, formData) => {
|
||||
};
|
||||
|
||||
export const getDefaultModelFormData = (settingsSpec) => {
|
||||
if (typeof settingsSpec === 'string')
|
||||
settingsSpec = ModelSettings[settingsSpec];
|
||||
let default_formdata = {};
|
||||
const schema = settingsSpec.schema;
|
||||
Object.keys(schema.properties).forEach(key => {
|
||||
|
9
chainforge/react-server/src/PromptNode.js
vendored
9
chainforge/react-server/src/PromptNode.js
vendored
@ -5,10 +5,9 @@ import { v4 as uuid } from 'uuid';
|
||||
import { IconSearch } from '@tabler/icons-react';
|
||||
import useStore from './store';
|
||||
import NodeLabel from './NodeLabelComponent'
|
||||
import TemplateHooks, { extractBracketedSubstrings, toPyTemplateFormat } from './TemplateHooksComponent'
|
||||
import TemplateHooks, { extractBracketedSubstrings } from './TemplateHooksComponent'
|
||||
import LLMList from './LLMListComponent'
|
||||
import LLMResponseInspectorModal from './LLMResponseInspectorModal';
|
||||
import io from 'socket.io-client';
|
||||
import { getDefaultModelSettings, AvailableLLMs } from './ModelSettingSchemas'
|
||||
import fetch_from_backend from './fetch_from_backend';
|
||||
|
||||
@ -17,12 +16,6 @@ import fetch_from_backend from './fetch_from_backend';
|
||||
const initLLMs = [AvailableLLMs[0]];
|
||||
|
||||
// Helper funcs
|
||||
const truncStr = (s, maxLen) => {
|
||||
if (s.length > maxLen) // Cut the name short if it's long
|
||||
return s.substring(0, maxLen) + '...'
|
||||
else
|
||||
return s;
|
||||
}
|
||||
// Ensure that a name is 'unique'; if not, return an amended version with a count tacked on (e.g. "GPT-4 (2)")
|
||||
const ensureUniqueName = (_name, _prev_names) => {
|
||||
// Strip whitespace around names
|
||||
|
@ -497,12 +497,18 @@ export async function call_huggingface(prompt: string, model: LLM, n: number = 1
|
||||
|
||||
// Merge responses
|
||||
const resp_text: string = result[0].generated_text;
|
||||
|
||||
console.log(curr_cont, 'curr_text', curr_text);
|
||||
console.log(curr_cont, 'resp_text', resp_text);
|
||||
|
||||
continued_response.generated_text += resp_text;
|
||||
curr_text += resp_text;
|
||||
|
||||
curr_cont += 1;
|
||||
}
|
||||
|
||||
console.log(continued_response);
|
||||
|
||||
// Continue querying
|
||||
responses.push(continued_response);
|
||||
}
|
||||
|
1162
chainforge/react-server/src/example_flows.js
vendored
Normal file
1162
chainforge/react-server/src/example_flows.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
Loading…
x
Reference in New Issue
Block a user