diff --git a/chainforge/react-server/src/App.js b/chainforge/react-server/src/App.js
index 949b74c..cabaaa5 100644
--- a/chainforge/react-server/src/App.js
+++ b/chainforge/react-server/src/App.js
@@ -106,9 +106,14 @@ const App = () => {
const { x, y } = getViewportCenter();
addNode({ id: 'promptNode-'+Date.now(), type: 'prompt', data: { prompt: '' }, position: {x: x-200, y:y-100} });
};
- const addEvalNode = (event) => {
+ const addEvalNode = (progLang) => {
const { x, y } = getViewportCenter();
- addNode({ id: 'evalNode-'+Date.now(), type: 'evaluator', data: { code: "def evaluate(response):\n return len(response.text)" }, position: {x: x-200, y:y-100} });
+ let code = "";
+ if (progLang === 'python')
+ code = "def evaluate(response):\n return len(response.text)";
+ else if (progLang === 'javascript')
+ code = "function evaluate(resp) {\n return resp.text.length;\n}";
+ addNode({ id: 'evalNode-'+Date.now(), type: 'evaluator', data: { language: progLang, code: code }, position: {x: x-200, y:y-100} });
};
const addVisNode = (event) => {
const { x, y } = getViewportCenter();
@@ -465,7 +470,8 @@ const App = () => {
}> TextFields
Prompt Node
- }> Evaluator Node
+ addEvalNode('javascript')} icon={}> JavaScript Evaluator Node
+ addEvalNode('python')} icon={}> Python Evaluator Node
Vis Node
Inspect Node
}> CSV Node
diff --git a/chainforge/react-server/src/EvaluatorNode.js b/chainforge/react-server/src/EvaluatorNode.js
index 43cac77..6d714db 100644
--- a/chainforge/react-server/src/EvaluatorNode.js
+++ b/chainforge/react-server/src/EvaluatorNode.js
@@ -9,6 +9,7 @@ import LLMResponseInspectorModal from './LLMResponseInspectorModal';
// Ace code editor
import AceEditor from "react-ace";
import "ace-builds/src-noconflict/mode-python";
+import "ace-builds/src-noconflict/mode-javascript";
import "ace-builds/src-noconflict/theme-xcode";
import "ace-builds/src-noconflict/ext-language_tools";
import fetch_from_backend from './fetch_from_backend';
@@ -28,8 +29,14 @@ const EvaluatorNode = ({ data, id }) => {
// For a way to inspect responses without having to attach a dedicated node
const inspectModal = useRef(null);
+ // The programming language for the editor. Also determines what 'execute'
+ // function will ultimately be called.
+ const [progLang, setProgLang] = useState(data.language || 'python');
+
+ // The text in the code editor.
const [codeText, setCodeText] = useState(data.code);
const [codeTextOnLastRun, setCodeTextOnLastRun] = useState(false);
+
const [lastRunLogs, setLastRunLogs] = useState("");
const [lastResponses, setLastResponses] = useState([]);
const [lastRunSuccess, setLastRunSuccess] = useState(true);
@@ -85,7 +92,8 @@ const EvaluatorNode = ({ data, id }) => {
}
// Double-check that the code includes an 'evaluate' function:
- if (codeText.search(/def\s+evaluate\s*(.*):/) === -1) {
+ const find_evalfunc_regex = progLang === 'python' ? /def\s+evaluate\s*(.*):/ : /function\s+evaluate\s*(.*)/;
+ if (codeText.search(find_evalfunc_regex) === -1) {
const err_msg = `Could not find required function 'evaluate'. Make sure you have defined an 'evaluate' function.`;
setStatus('error');
alertModal.current.trigger(err_msg);
@@ -107,11 +115,12 @@ const EvaluatorNode = ({ data, id }) => {
// Run evaluator in backend
const codeTextOnRun = codeText + '';
- fetch_from_backend('execute', {
+ const execute_route = (progLang === 'python') ? 'execute' : 'executejs';
+ fetch_from_backend(execute_route, {
id: id,
code: codeTextOnRun,
- scope: mapScope,
responses: input_node_ids,
+ scope: mapScope,
reduce_vars: [],
script_paths: script_paths,
}, rejected).then(function(json) {
@@ -163,7 +172,10 @@ const EvaluatorNode = ({ data, id }) => {
return (
-
}
@@ -197,7 +209,7 @@ const EvaluatorNode = ({ data, id }) => {
{/*
response: */}
{
width='100%'
height='100px'
style={{minWidth:'310px'}}
+ setOptions={{useWorker: false}}
tabSize={2}
onLoad={editorInstance => { // Make Ace Editor div resizeable.
editorInstance.container.style.resize = "both";
diff --git a/chainforge/react-server/src/backend/__test__/backend.test.ts b/chainforge/react-server/src/backend/__test__/backend.test.ts
index d1058bd..028fb29 100644
--- a/chainforge/react-server/src/backend/__test__/backend.test.ts
+++ b/chainforge/react-server/src/backend/__test__/backend.test.ts
@@ -3,7 +3,7 @@
*/
import { LLM } from '../models';
import { expect, test } from '@jest/globals';
-import { queryLLM, execute, ResponseInfo } from '../backend';
+import { queryLLM, executejs, ResponseInfo } from '../backend';
import { StandardizedLLMResponse } from '../typing';
import StorageCache from '../cache';
@@ -48,7 +48,7 @@ test('run evaluate func over responses', async () => {
// `;
// Execute the code, and map the evaluate function over all responses
- const {responses, logs, error} = await execute('evalid', code, ['dummy_response_id'], 'response');
+ const {responses, logs, error} = await executejs('evalid', code, ['dummy_response_id'], 'response');
// There should be no errors
if (error)
diff --git a/chainforge/react-server/src/backend/backend.ts b/chainforge/react-server/src/backend/backend.ts
index 4dcd1c2..8a7ef2b 100644
--- a/chainforge/react-server/src/backend/backend.ts
+++ b/chainforge/react-server/src/backend/backend.ts
@@ -642,20 +642,22 @@ export async function queryLLM(id: string,
* @param response_ids the cache'd response to run on, which must be a unique ID or list of unique IDs of cache'd data
* @param scope the scope of responses to run on --a single response, or all across each batch. (If batch, evaluate() func has access to 'responses'.)
*/
-export async function execute(id: string,
- code: string | ((rinfo: ResponseInfo) => any),
- response_ids: string | string[],
- scope: 'response' | 'batch'): Promise {
+export async function executejs(id: string,
+ code: string | ((rinfo: ResponseInfo) => any),
+ response_ids: string | string[],
+ scope: 'response' | 'batch'): Promise {
// Check format of response_ids
if (!Array.isArray(response_ids))
response_ids = [ response_ids ];
response_ids = response_ids as Array;
+ console.log('executing js');
+
// const iframe = document.createElement('iframe');
// Instantiate the evaluator function by eval'ing the passed code
// DANGER DANGER!!
- let iframe: HTMLIFrameElement | undefined;
+ let iframe: HTMLElement | undefined;
if (typeof code === 'string') {
try {
/*
@@ -667,7 +669,7 @@ export async function execute(id: string,
The Evaluate node in the front-end has a hidden iframe with the following id.
We need to get this iframe element.
*/
- let iframe = document.getElementById(`${id}-iframe`);
+ iframe = document.getElementById(`${id}-iframe`);
if (!iframe)
throw new Error("Could not find iframe sandbox for evaluator node.");
diff --git a/chainforge/react-server/src/fetch_from_backend.js b/chainforge/react-server/src/fetch_from_backend.js
index 8d64f12..6a21303 100644
--- a/chainforge/react-server/src/fetch_from_backend.js
+++ b/chainforge/react-server/src/fetch_from_backend.js
@@ -1,5 +1,4 @@
-// import { queryLLM, execute } from "./backend/backend.ts";
-import { queryLLM, execute } from "./backend/backend";
+import { queryLLM, executejs } from "./backend/backend";
const BACKEND_TYPES = {
FLASK: 'flask',
@@ -15,7 +14,7 @@ async function _route_to_js_backend(route, params) {
case 'queryllm':
return queryLLM(...Object.values(params));
case 'executejs':
- return execute(...Object.values(params));
+ return executejs(params.id, params.code, params.responses, params.scope);
default:
throw new Error(`Could not find backend function for route named ${route}`);
}
@@ -30,6 +29,11 @@ async function _route_to_js_backend(route, params) {
*/
export default function fetch_from_backend(route, params, rejected) {
rejected = rejected || ((err) => {throw new Error(err)});
+
+ if (route === 'executejs') {
+ return _route_to_js_backend(route, params);
+ }
+
switch (BACKEND_TYPE) {
case BACKEND_TYPES.FLASK: // Fetch from Flask (python) backend
return fetch(`${FLASK_BASE_URL}app/${route}`, {
@@ -49,4 +53,4 @@ export default function fetch_from_backend(route, params, rejected) {
export function set_backend_type(t) {
BACKEND_TYPE = t;
-}
\ No newline at end of file
+}