Merge branch 'feat/plugins' into dev/plugin-deploy
commit
6d8b54f1e5
@ -0,0 +1,43 @@
|
||||
import { useUpdateModelProviders } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { useInvalidateInstalledPluginList } from '@/service/use-plugins'
|
||||
import { useInvalidateAllBuiltInTools, useInvalidateAllToolProviders } from '@/service/use-tools'
|
||||
import { useInvalidateStrategyProviders } from '@/service/use-strategy'
|
||||
import type { Plugin, PluginManifestInMarket } from '../../types'
|
||||
import { PluginType } from '../../types'
|
||||
|
||||
const useRefreshPluginList = () => {
|
||||
const invalidateInstalledPluginList = useInvalidateInstalledPluginList()
|
||||
const updateModelProviders = useUpdateModelProviders()
|
||||
const { refreshModelProviders } = useProviderContext()
|
||||
|
||||
const invalidateAllToolProviders = useInvalidateAllToolProviders()
|
||||
const invalidateAllBuiltInTools = useInvalidateAllBuiltInTools()
|
||||
|
||||
const invalidateStrategyProviders = useInvalidateStrategyProviders()
|
||||
return {
|
||||
refreshPluginList: (manifest: PluginManifestInMarket | Plugin) => {
|
||||
// installed list
|
||||
invalidateInstalledPluginList()
|
||||
|
||||
// tool page, tool select
|
||||
if (PluginType.tool.includes(manifest.category)) {
|
||||
invalidateAllToolProviders()
|
||||
invalidateAllBuiltInTools()
|
||||
// TODO: update suggested tools. It's a function in hook useMarketplacePlugins,handleUpdatePlugins
|
||||
}
|
||||
|
||||
// model select
|
||||
if (PluginType.model.includes(manifest.category)) {
|
||||
updateModelProviders()
|
||||
refreshModelProviders()
|
||||
}
|
||||
|
||||
// agent select
|
||||
if (PluginType.agent.includes(manifest.category))
|
||||
invalidateStrategyProviders()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export default useRefreshPluginList
|
||||
@ -1,16 +1,43 @@
|
||||
import Button from '@/app/components/base/button'
|
||||
import { RiInstallLine, RiLoader2Line } from '@remixicon/react'
|
||||
import type { ComponentProps } from 'react'
|
||||
import type { ComponentProps, MouseEventHandler } from 'react'
|
||||
import classNames from '@/utils/classnames'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useCheckInstalled, useInstallPackageFromMarketPlace } from '@/service/use-plugins'
|
||||
|
||||
type InstallPluginButtonProps = Omit<ComponentProps<typeof Button>, 'children'>
|
||||
type InstallPluginButtonProps = Omit<ComponentProps<typeof Button>, 'children' | 'loading'> & {
|
||||
uniqueIdentifier: string
|
||||
onSuccess?: () => void
|
||||
}
|
||||
|
||||
export const InstallPluginButton = (props: InstallPluginButtonProps) => {
|
||||
const { loading, className, ...rest } = props
|
||||
const { className, uniqueIdentifier, onSuccess, ...rest } = props
|
||||
const { t } = useTranslation()
|
||||
return <Button variant={'secondary'} disabled={loading} className={classNames('flex items-center', className)} {...rest}>
|
||||
{loading ? t('workflow.nodes.agent.pluginInstaller.install') : t('workflow.nodes.agent.pluginInstaller.installing')}
|
||||
{!loading ? <RiInstallLine className='size-4 ml-1' /> : <RiLoader2Line className='size-4 ml-1 animate-spin' />}
|
||||
const manifest = useCheckInstalled({
|
||||
pluginIds: [uniqueIdentifier],
|
||||
enabled: !!uniqueIdentifier,
|
||||
})
|
||||
const install = useInstallPackageFromMarketPlace({
|
||||
onSuccess() {
|
||||
manifest.refetch()
|
||||
onSuccess?.()
|
||||
},
|
||||
})
|
||||
const handleInstall: MouseEventHandler = (e) => {
|
||||
e.stopPropagation()
|
||||
install.mutate(uniqueIdentifier)
|
||||
}
|
||||
const isLoading = manifest.isLoading || install.isPending
|
||||
if (!manifest.data) return null
|
||||
if (manifest.data.plugins.some(plugin => plugin.id === uniqueIdentifier)) return null
|
||||
return <Button
|
||||
variant={'secondary'}
|
||||
disabled={isLoading}
|
||||
{...rest}
|
||||
onClick={handleInstall}
|
||||
className={classNames('flex items-center', className)}
|
||||
>
|
||||
{!isLoading ? t('workflow.nodes.agent.pluginInstaller.install') : t('workflow.nodes.agent.pluginInstaller.installing')}
|
||||
{!isLoading ? <RiInstallLine className='size-4 ml-1' /> : <RiLoader2Line className='size-4 ml-1 animate-spin' />}
|
||||
</Button>
|
||||
}
|
||||
|
||||
@ -0,0 +1,80 @@
|
||||
'use client'
|
||||
|
||||
import Badge from '@/app/components/base/badge'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import PluginVersionPicker from '@/app/components/plugins/update-plugin/plugin-version-picker'
|
||||
import { RiArrowLeftRightLine } from '@remixicon/react'
|
||||
import { type FC, useCallback, useState } from 'react'
|
||||
import cn from '@/utils/classnames'
|
||||
import UpdateFromMarketplace from '@/app/components/plugins/update-plugin/from-market-place'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import { useCheckInstalled } from '@/service/use-plugins'
|
||||
|
||||
export type SwitchPluginVersionProps = {
|
||||
uniqueIdentifier: string
|
||||
tooltip?: string
|
||||
onChange?: (version: string) => void
|
||||
}
|
||||
|
||||
export const SwitchPluginVersion: FC<SwitchPluginVersionProps> = (props) => {
|
||||
const { uniqueIdentifier, tooltip, onChange } = props
|
||||
const [pluginId] = uniqueIdentifier.split(':')
|
||||
const [isShow, setIsShow] = useState(false)
|
||||
const [isShowUpdateModal, { setTrue: showUpdateModal, setFalse: hideUpdateModal }] = useBoolean(false)
|
||||
const [targetVersion, setTargetVersion] = useState<string>()
|
||||
const pluginDetails = useCheckInstalled({
|
||||
pluginIds: [pluginId],
|
||||
enabled: true,
|
||||
})
|
||||
const pluginDetail = pluginDetails.data?.plugins.at(0)
|
||||
|
||||
const handleUpdatedFromMarketplace = useCallback(() => {
|
||||
hideUpdateModal()
|
||||
pluginDetails.refetch()
|
||||
onChange?.(targetVersion!)
|
||||
}, [hideUpdateModal, onChange, pluginDetails, targetVersion])
|
||||
return <Tooltip popupContent={!isShow && !isShowUpdateModal && tooltip} triggerMethod='hover'>
|
||||
<div className='w-fit'>
|
||||
{isShowUpdateModal && pluginDetail && <UpdateFromMarketplace
|
||||
payload={{
|
||||
originalPackageInfo: {
|
||||
id: uniqueIdentifier,
|
||||
payload: pluginDetail.declaration,
|
||||
},
|
||||
targetPackageInfo: {
|
||||
id: uniqueIdentifier,
|
||||
version: targetVersion!,
|
||||
},
|
||||
}}
|
||||
onCancel={hideUpdateModal}
|
||||
onSave={handleUpdatedFromMarketplace}
|
||||
/>}
|
||||
{pluginDetail && <PluginVersionPicker
|
||||
isShow={isShow}
|
||||
onShowChange={setIsShow}
|
||||
pluginID={pluginId}
|
||||
currentVersion={pluginDetail.version}
|
||||
onSelect={(state) => {
|
||||
setTargetVersion(state.version)
|
||||
showUpdateModal()
|
||||
}}
|
||||
trigger={
|
||||
<Badge
|
||||
className={cn(
|
||||
'mx-1 hover:bg-state-base-hover flex',
|
||||
isShow && 'bg-state-base-hover',
|
||||
)}
|
||||
uppercase={true}
|
||||
text={
|
||||
<>
|
||||
<div>{pluginDetail.version}</div>
|
||||
<RiArrowLeftRightLine className='ml-1 w-3 h-3 text-text-tertiary' />
|
||||
</>
|
||||
}
|
||||
hasRedCornerMark={true}
|
||||
/>
|
||||
}
|
||||
/>}
|
||||
</div>
|
||||
</Tooltip>
|
||||
}
|
||||
@ -0,0 +1,128 @@
|
||||
import { parseDSL } from './graph-to-log-struct-2'
|
||||
|
||||
describe('parseDSL', () => {
|
||||
it('should parse plain nodes correctly', () => {
|
||||
const dsl = 'plainNode1 -> plainNode2'
|
||||
const result = parseDSL(dsl)
|
||||
expect(result).toEqual([
|
||||
{ id: 'plainNode1', node_id: 'plainNode1', title: 'plainNode1', execution_metadata: {}, status: 'succeeded' },
|
||||
{ id: 'plainNode2', node_id: 'plainNode2', title: 'plainNode2', execution_metadata: {}, status: 'succeeded' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should parse retry nodes correctly', () => {
|
||||
const dsl = '(retry, retryNode, 3)'
|
||||
const result = parseDSL(dsl)
|
||||
expect(result).toEqual([
|
||||
{ id: 'retryNode', node_id: 'retryNode', title: 'retryNode', execution_metadata: {}, status: 'succeeded' },
|
||||
{ id: 'retryNode', node_id: 'retryNode', title: 'retryNode', execution_metadata: {}, status: 'retry' },
|
||||
{ id: 'retryNode', node_id: 'retryNode', title: 'retryNode', execution_metadata: {}, status: 'retry' },
|
||||
{ id: 'retryNode', node_id: 'retryNode', title: 'retryNode', execution_metadata: {}, status: 'retry' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should parse iteration nodes correctly', () => {
|
||||
const dsl = '(iteration, iterationNode, plainNode1 -> plainNode2)'
|
||||
const result = parseDSL(dsl)
|
||||
expect(result).toEqual([
|
||||
{ id: 'iterationNode', node_id: 'iterationNode', title: 'iterationNode', node_type: 'iteration', execution_metadata: {}, status: 'succeeded' },
|
||||
{ id: 'plainNode1', node_id: 'plainNode1', title: 'plainNode1', execution_metadata: { iteration_id: 'iterationNode', iteration_index: 0 }, status: 'succeeded' },
|
||||
{ id: 'plainNode2', node_id: 'plainNode2', title: 'plainNode2', execution_metadata: { iteration_id: 'iterationNode', iteration_index: 0 }, status: 'succeeded' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should parse parallel nodes correctly', () => {
|
||||
const dsl = '(parallel, parallelNode, nodeA, nodeB -> nodeC)'
|
||||
const result = parseDSL(dsl)
|
||||
expect(result).toEqual([
|
||||
{ id: 'parallelNode', node_id: 'parallelNode', title: 'parallelNode', execution_metadata: { parallel_id: 'parallelNode' }, status: 'succeeded' },
|
||||
{ id: 'nodeA', node_id: 'nodeA', title: 'nodeA', execution_metadata: { parallel_id: 'parallelNode', parallel_start_node_id: 'nodeA' }, status: 'succeeded' },
|
||||
{ id: 'nodeB', node_id: 'nodeB', title: 'nodeB', execution_metadata: { parallel_id: 'parallelNode', parallel_start_node_id: 'nodeB' }, status: 'succeeded' },
|
||||
{ id: 'nodeC', node_id: 'nodeC', title: 'nodeC', execution_metadata: { parallel_id: 'parallelNode', parallel_start_node_id: 'nodeB' }, status: 'succeeded' },
|
||||
])
|
||||
})
|
||||
|
||||
// TODO
|
||||
it('should handle nested parallel nodes', () => {
|
||||
const dsl = '(parallel, outerParallel, (parallel, innerParallel, plainNode1 -> plainNode2) -> plainNode3)'
|
||||
const result = parseDSL(dsl)
|
||||
expect(result).toEqual([
|
||||
{
|
||||
id: 'outerParallel',
|
||||
node_id: 'outerParallel',
|
||||
title: 'outerParallel',
|
||||
execution_metadata: { parallel_id: 'outerParallel' },
|
||||
status: 'succeeded',
|
||||
},
|
||||
{
|
||||
id: 'innerParallel',
|
||||
node_id: 'innerParallel',
|
||||
title: 'innerParallel',
|
||||
execution_metadata: { parallel_id: 'outerParallel', parallel_start_node_id: 'innerParallel' },
|
||||
status: 'succeeded',
|
||||
},
|
||||
{
|
||||
id: 'plainNode1',
|
||||
node_id: 'plainNode1',
|
||||
title: 'plainNode1',
|
||||
execution_metadata: {
|
||||
parallel_id: 'innerParallel',
|
||||
parallel_start_node_id: 'plainNode1',
|
||||
parent_parallel_id: 'outerParallel',
|
||||
parent_parallel_start_node_id: 'innerParallel',
|
||||
},
|
||||
status: 'succeeded',
|
||||
},
|
||||
{
|
||||
id: 'plainNode2',
|
||||
node_id: 'plainNode2',
|
||||
title: 'plainNode2',
|
||||
execution_metadata: {
|
||||
parallel_id: 'innerParallel',
|
||||
parallel_start_node_id: 'plainNode1',
|
||||
parent_parallel_id: 'outerParallel',
|
||||
parent_parallel_start_node_id: 'innerParallel',
|
||||
},
|
||||
status: 'succeeded',
|
||||
},
|
||||
{
|
||||
id: 'plainNode3',
|
||||
node_id: 'plainNode3',
|
||||
title: 'plainNode3',
|
||||
execution_metadata: {
|
||||
parallel_id: 'outerParallel',
|
||||
parallel_start_node_id: 'plainNode3',
|
||||
},
|
||||
status: 'succeeded',
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
// iterations not support nested iterations
|
||||
// it('should handle nested iterations', () => {
|
||||
// const dsl = '(iteration, outerIteration, (iteration, innerIteration -> plainNode1 -> plainNode2))'
|
||||
// const result = parseDSL(dsl)
|
||||
// expect(result).toEqual([
|
||||
// { id: 'outerIteration', node_id: 'outerIteration', title: 'outerIteration', node_type: 'iteration', execution_metadata: {}, status: 'succeeded' },
|
||||
// { id: 'innerIteration', node_id: 'innerIteration', title: 'innerIteration', node_type: 'iteration', execution_metadata: { iteration_id: 'outerIteration', iteration_index: 0 }, status: 'succeeded' },
|
||||
// { id: 'plainNode1', node_id: 'plainNode1', title: 'plainNode1', execution_metadata: { iteration_id: 'innerIteration', iteration_index: 0 }, status: 'succeeded' },
|
||||
// { id: 'plainNode2', node_id: 'plainNode2', title: 'plainNode2', execution_metadata: { iteration_id: 'innerIteration', iteration_index: 0 }, status: 'succeeded' },
|
||||
// ])
|
||||
// })
|
||||
|
||||
it('should handle nested iterations within parallel nodes', () => {
|
||||
const dsl = '(parallel, parallelNode, (iteration, iterationNode, plainNode1, plainNode2))'
|
||||
const result = parseDSL(dsl)
|
||||
expect(result).toEqual([
|
||||
{ id: 'parallelNode', node_id: 'parallelNode', title: 'parallelNode', execution_metadata: { parallel_id: 'parallelNode' }, status: 'succeeded' },
|
||||
{ id: 'iterationNode', node_id: 'iterationNode', title: 'iterationNode', node_type: 'iteration', execution_metadata: { parallel_id: 'parallelNode', parallel_start_node_id: 'iterationNode' }, status: 'succeeded' },
|
||||
{ id: 'plainNode1', node_id: 'plainNode1', title: 'plainNode1', execution_metadata: { iteration_id: 'iterationNode', iteration_index: 0, parallel_id: 'parallelNode', parallel_start_node_id: 'iterationNode' }, status: 'succeeded' },
|
||||
{ id: 'plainNode2', node_id: 'plainNode2', title: 'plainNode2', execution_metadata: { iteration_id: 'iterationNode', iteration_index: 0, parallel_id: 'parallelNode', parallel_start_node_id: 'iterationNode' }, status: 'succeeded' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should throw an error for unknown node types', () => {
|
||||
const dsl = '(unknown, nodeId)'
|
||||
expect(() => parseDSL(dsl)).toThrowError('Unknown nodeType: unknown')
|
||||
})
|
||||
})
|
||||
@ -0,0 +1,304 @@
|
||||
type IterationInfo = { iterationId: string; iterationIndex: number }
|
||||
type NodePlain = { nodeType: 'plain'; nodeId: string; } & Partial<IterationInfo>
|
||||
type NodeComplex = { nodeType: string; nodeId: string; params: (NodePlain | (NodeComplex & Partial<IterationInfo>) | Node[] | number)[] } & Partial<IterationInfo>
|
||||
type Node = NodePlain | NodeComplex
|
||||
|
||||
/**
|
||||
* Parses a DSL string into an array of node objects.
|
||||
* @param dsl - The input DSL string.
|
||||
* @returns An array of parsed nodes.
|
||||
*/
|
||||
function parseDSL(dsl: string): NodeData[] {
|
||||
return convertToNodeData(parseTopLevelFlow(dsl).map(nodeStr => parseNode(nodeStr)))
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits a top-level flow string by "->", respecting nested structures.
|
||||
* @param dsl - The DSL string to split.
|
||||
* @returns An array of top-level segments.
|
||||
*/
|
||||
function parseTopLevelFlow(dsl: string): string[] {
|
||||
const segments: string[] = []
|
||||
let buffer = ''
|
||||
let nested = 0
|
||||
|
||||
for (let i = 0; i < dsl.length; i++) {
|
||||
const char = dsl[i]
|
||||
if (char === '(') nested++
|
||||
if (char === ')') nested--
|
||||
if (char === '-' && dsl[i + 1] === '>' && nested === 0) {
|
||||
segments.push(buffer.trim())
|
||||
buffer = ''
|
||||
i++ // Skip the ">" character
|
||||
}
|
||||
else {
|
||||
buffer += char
|
||||
}
|
||||
}
|
||||
if (buffer.trim())
|
||||
segments.push(buffer.trim())
|
||||
|
||||
return segments
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a single node string.
|
||||
* If the node is complex (e.g., has parentheses), it extracts the node type, node ID, and parameters.
|
||||
* @param nodeStr - The node string to parse.
|
||||
* @param parentIterationId - The ID of the parent iteration node (if applicable).
|
||||
* @returns A parsed node object.
|
||||
*/
|
||||
function parseNode(nodeStr: string, parentIterationId?: string): Node {
|
||||
// Check if the node is a complex node
|
||||
if (nodeStr.startsWith('(') && nodeStr.endsWith(')')) {
|
||||
const innerContent = nodeStr.slice(1, -1).trim() // Remove outer parentheses
|
||||
let nested = 0
|
||||
let buffer = ''
|
||||
const parts: string[] = []
|
||||
|
||||
// Split the inner content by commas, respecting nested parentheses
|
||||
for (let i = 0; i < innerContent.length; i++) {
|
||||
const char = innerContent[i]
|
||||
if (char === '(') nested++
|
||||
if (char === ')') nested--
|
||||
|
||||
if (char === ',' && nested === 0) {
|
||||
parts.push(buffer.trim())
|
||||
buffer = ''
|
||||
}
|
||||
else {
|
||||
buffer += char
|
||||
}
|
||||
}
|
||||
parts.push(buffer.trim())
|
||||
|
||||
// Extract nodeType, nodeId, and params
|
||||
const [nodeType, nodeId, ...paramsRaw] = parts
|
||||
const params = parseParams(paramsRaw, nodeType === 'iteration' ? nodeId.trim() : parentIterationId)
|
||||
const complexNode = {
|
||||
nodeType: nodeType.trim(),
|
||||
nodeId: nodeId.trim(),
|
||||
params,
|
||||
}
|
||||
if (parentIterationId) {
|
||||
(complexNode as any).iterationId = parentIterationId;
|
||||
(complexNode as any).iterationIndex = 0 // Fixed as 0
|
||||
}
|
||||
return complexNode
|
||||
}
|
||||
|
||||
// If it's not a complex node, treat it as a plain node
|
||||
const plainNode: NodePlain = { nodeType: 'plain', nodeId: nodeStr.trim() }
|
||||
if (parentIterationId) {
|
||||
plainNode.iterationId = parentIterationId
|
||||
plainNode.iterationIndex = 0 // Fixed as 0
|
||||
}
|
||||
return plainNode
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses parameters of a complex node.
|
||||
* Supports nested flows and complex sub-nodes.
|
||||
* Adds iteration-specific metadata recursively.
|
||||
* @param paramParts - The parameters string split by commas.
|
||||
* @param iterationId - The ID of the iteration node, if applicable.
|
||||
* @returns An array of parsed parameters (plain nodes, nested nodes, or flows).
|
||||
*/
|
||||
function parseParams(paramParts: string[], iterationId?: string): (Node | Node[] | number)[] {
|
||||
return paramParts.map((part) => {
|
||||
if (part.includes('->')) {
|
||||
// Parse as a flow and return an array of nodes
|
||||
return parseTopLevelFlow(part).map(node => parseNode(node, iterationId))
|
||||
}
|
||||
else if (part.startsWith('(')) {
|
||||
// Parse as a nested complex node
|
||||
return parseNode(part, iterationId)
|
||||
}
|
||||
else if (!Number.isNaN(Number(part.trim()))) {
|
||||
// Parse as a numeric parameter
|
||||
return Number(part.trim())
|
||||
}
|
||||
else {
|
||||
// Parse as a plain node
|
||||
return parseNode(part, iterationId)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
type NodeData = {
|
||||
id: string;
|
||||
node_id: string;
|
||||
title: string;
|
||||
node_type?: string;
|
||||
execution_metadata: Record<string, any>;
|
||||
status: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a plain node to node data.
|
||||
*/
|
||||
function convertPlainNode(node: Node): NodeData[] {
|
||||
return [
|
||||
{
|
||||
id: node.nodeId,
|
||||
node_id: node.nodeId,
|
||||
title: node.nodeId,
|
||||
execution_metadata: {},
|
||||
status: 'succeeded',
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a retry node to node data.
|
||||
*/
|
||||
function convertRetryNode(node: Node): NodeData[] {
|
||||
const { nodeId, iterationId, iterationIndex, params } = node as NodeComplex
|
||||
const retryCount = params ? Number.parseInt(params[0] as unknown as string, 10) : 0
|
||||
const result: NodeData[] = [
|
||||
{
|
||||
id: nodeId,
|
||||
node_id: nodeId,
|
||||
title: nodeId,
|
||||
execution_metadata: {},
|
||||
status: 'succeeded',
|
||||
},
|
||||
]
|
||||
|
||||
for (let i = 0; i < retryCount; i++) {
|
||||
result.push({
|
||||
id: nodeId,
|
||||
node_id: nodeId,
|
||||
title: nodeId,
|
||||
execution_metadata: iterationId ? {
|
||||
iteration_id: iterationId,
|
||||
iteration_index: iterationIndex || 0,
|
||||
} : {},
|
||||
status: 'retry',
|
||||
})
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an iteration node to node data.
|
||||
*/
|
||||
function convertIterationNode(node: Node): NodeData[] {
|
||||
const { nodeId, params } = node as NodeComplex
|
||||
const result: NodeData[] = [
|
||||
{
|
||||
id: nodeId,
|
||||
node_id: nodeId,
|
||||
title: nodeId,
|
||||
node_type: 'iteration',
|
||||
status: 'succeeded',
|
||||
execution_metadata: {},
|
||||
},
|
||||
]
|
||||
|
||||
params?.forEach((param: any) => {
|
||||
if (Array.isArray(param)) {
|
||||
param.forEach((childNode: Node) => {
|
||||
const childData = convertToNodeData([childNode])
|
||||
childData.forEach((data) => {
|
||||
data.execution_metadata = {
|
||||
...data.execution_metadata,
|
||||
iteration_id: nodeId,
|
||||
iteration_index: 0,
|
||||
}
|
||||
})
|
||||
result.push(...childData)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a parallel node to node data.
|
||||
*/
|
||||
function convertParallelNode(node: Node, parentParallelId?: string, parentStartNodeId?: string): NodeData[] {
|
||||
const { nodeId, params } = node as NodeComplex
|
||||
const result: NodeData[] = [
|
||||
{
|
||||
id: nodeId,
|
||||
node_id: nodeId,
|
||||
title: nodeId,
|
||||
execution_metadata: {
|
||||
parallel_id: nodeId,
|
||||
},
|
||||
status: 'succeeded',
|
||||
},
|
||||
]
|
||||
|
||||
params?.forEach((param) => {
|
||||
if (Array.isArray(param)) {
|
||||
const startNodeId = param[0]?.nodeId
|
||||
param.forEach((childNode: Node) => {
|
||||
const childData = convertToNodeData([childNode])
|
||||
childData.forEach((data) => {
|
||||
data.execution_metadata = {
|
||||
...data.execution_metadata,
|
||||
parallel_id: nodeId,
|
||||
parallel_start_node_id: startNodeId,
|
||||
...(parentParallelId && {
|
||||
parent_parallel_id: parentParallelId,
|
||||
parent_parallel_start_node_id: parentStartNodeId,
|
||||
}),
|
||||
}
|
||||
})
|
||||
result.push(...childData)
|
||||
})
|
||||
}
|
||||
else if (param && typeof param === 'object') {
|
||||
const startNodeId = param.nodeId
|
||||
const childData = convertToNodeData([param])
|
||||
childData.forEach((data) => {
|
||||
data.execution_metadata = {
|
||||
...data.execution_metadata,
|
||||
parallel_id: nodeId,
|
||||
parallel_start_node_id: startNodeId,
|
||||
...(parentParallelId && {
|
||||
parent_parallel_id: parentParallelId,
|
||||
parent_parallel_start_node_id: parentStartNodeId,
|
||||
}),
|
||||
}
|
||||
})
|
||||
result.push(...childData)
|
||||
}
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Main function to convert nodes to node data.
|
||||
*/
|
||||
function convertToNodeData(nodes: Node[], parentParallelId?: string, parentStartNodeId?: string): NodeData[] {
|
||||
const result: NodeData[] = []
|
||||
|
||||
nodes.forEach((node) => {
|
||||
switch (node.nodeType) {
|
||||
case 'plain':
|
||||
result.push(...convertPlainNode(node))
|
||||
break
|
||||
case 'retry':
|
||||
result.push(...convertRetryNode(node))
|
||||
break
|
||||
case 'iteration':
|
||||
result.push(...convertIterationNode(node))
|
||||
break
|
||||
case 'parallel':
|
||||
result.push(...convertParallelNode(node, parentParallelId, parentStartNodeId))
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unknown nodeType: ${node.nodeType}`)
|
||||
}
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
export { parseDSL }
|
||||
@ -0,0 +1,97 @@
|
||||
import graphToLogStruct, { parseNodeString } from './graph-to-log-struct'
|
||||
|
||||
describe('graphToLogStruct', () => {
|
||||
test('parseNodeString', () => {
|
||||
expect(parseNodeString('(node1, param1, (node2, param2, (node3, param1)), param4)')).toEqual({
|
||||
node: 'node1',
|
||||
params: [
|
||||
'param1',
|
||||
{
|
||||
node: 'node2',
|
||||
params: [
|
||||
'param2',
|
||||
{
|
||||
node: 'node3',
|
||||
params: [
|
||||
'param1',
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
'param4',
|
||||
],
|
||||
})
|
||||
})
|
||||
test('iteration nodes', () => {
|
||||
expect(graphToLogStruct('start -> (iteration, 1, [2, 3])')).toEqual([
|
||||
{
|
||||
id: 'start',
|
||||
node_id: 'start',
|
||||
title: 'start',
|
||||
execution_metadata: {},
|
||||
status: 'succeeded',
|
||||
},
|
||||
{
|
||||
id: '1',
|
||||
node_id: '1',
|
||||
title: '1',
|
||||
execution_metadata: {},
|
||||
status: 'succeeded',
|
||||
node_type: 'iteration',
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
node_id: '2',
|
||||
title: '2',
|
||||
execution_metadata: { iteration_id: '1', iteration_index: 0 },
|
||||
status: 'succeeded',
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
node_id: '3',
|
||||
title: '3',
|
||||
execution_metadata: { iteration_id: '1', iteration_index: 1 },
|
||||
status: 'succeeded',
|
||||
},
|
||||
])
|
||||
})
|
||||
test('retry nodes', () => {
|
||||
expect(graphToLogStruct('start -> (retry, 1, 3)')).toEqual([
|
||||
{
|
||||
id: 'start',
|
||||
node_id: 'start',
|
||||
title: 'start',
|
||||
execution_metadata: {},
|
||||
status: 'succeeded',
|
||||
},
|
||||
{
|
||||
id: '1',
|
||||
node_id: '1',
|
||||
title: '1',
|
||||
execution_metadata: {},
|
||||
status: 'succeeded',
|
||||
},
|
||||
{
|
||||
id: '1',
|
||||
node_id: '1',
|
||||
title: '1',
|
||||
execution_metadata: {},
|
||||
status: 'retry',
|
||||
},
|
||||
{
|
||||
id: '1',
|
||||
node_id: '1',
|
||||
title: '1',
|
||||
execution_metadata: {},
|
||||
status: 'retry',
|
||||
},
|
||||
{
|
||||
id: '1',
|
||||
node_id: '1',
|
||||
title: '1',
|
||||
execution_metadata: {},
|
||||
status: 'retry',
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
@ -0,0 +1,174 @@
|
||||
const STEP_SPLIT = '->'
|
||||
|
||||
const toNodeData = (step: string, info: Record<string, any> = {}): any => {
|
||||
const [nodeId, title] = step.split('@')
|
||||
|
||||
const data: Record<string, any> = {
|
||||
id: nodeId,
|
||||
node_id: nodeId,
|
||||
title: title || nodeId,
|
||||
execution_metadata: {},
|
||||
status: 'succeeded',
|
||||
}
|
||||
|
||||
const executionMetadata = data.execution_metadata
|
||||
const { isRetry, isIteration, inIterationInfo } = info
|
||||
if (isRetry)
|
||||
data.status = 'retry'
|
||||
|
||||
if (isIteration)
|
||||
data.node_type = 'iteration'
|
||||
|
||||
if (inIterationInfo) {
|
||||
executionMetadata.iteration_id = inIterationInfo.iterationId
|
||||
executionMetadata.iteration_index = inIterationInfo.iterationIndex
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
const toRetryNodeData = ({
|
||||
nodeId,
|
||||
repeatTimes,
|
||||
}: {
|
||||
nodeId: string,
|
||||
repeatTimes: number,
|
||||
}): any => {
|
||||
const res = [toNodeData(nodeId)]
|
||||
for (let i = 0; i < repeatTimes; i++)
|
||||
res.push(toNodeData(nodeId, { isRetry: true }))
|
||||
return res
|
||||
}
|
||||
|
||||
const toIterationNodeData = ({
|
||||
nodeId,
|
||||
children,
|
||||
}: {
|
||||
nodeId: string,
|
||||
children: number[],
|
||||
}) => {
|
||||
const res = [toNodeData(nodeId, { isIteration: true })]
|
||||
// TODO: handle inner node structure
|
||||
for (let i = 0; i < children.length; i++) {
|
||||
const step = `${children[i]}`
|
||||
res.push(toNodeData(step, { inIterationInfo: { iterationId: nodeId, iterationIndex: i } }))
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
type NodeStructure = {
|
||||
node: string;
|
||||
params: Array<string | NodeStructure>;
|
||||
}
|
||||
|
||||
export function parseNodeString(input: string): NodeStructure {
|
||||
input = input.trim()
|
||||
if (input.startsWith('(') && input.endsWith(')'))
|
||||
input = input.slice(1, -1)
|
||||
|
||||
const parts: Array<string | NodeStructure> = []
|
||||
let current = ''
|
||||
let depth = 0
|
||||
let inArrayDepth = 0
|
||||
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
const char = input[i]
|
||||
|
||||
if (char === '(')
|
||||
depth++
|
||||
else if (char === ')')
|
||||
depth--
|
||||
|
||||
if (char === '[')
|
||||
inArrayDepth++
|
||||
else if (char === ']')
|
||||
inArrayDepth--
|
||||
|
||||
const isInArray = inArrayDepth > 0
|
||||
|
||||
if (char === ',' && depth === 0 && !isInArray) {
|
||||
parts.push(current.trim())
|
||||
current = ''
|
||||
}
|
||||
else {
|
||||
current += char
|
||||
}
|
||||
}
|
||||
|
||||
if (current)
|
||||
parts.push(current.trim())
|
||||
|
||||
const result: NodeStructure = {
|
||||
node: '',
|
||||
params: [],
|
||||
}
|
||||
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
const part = parts[i]
|
||||
|
||||
if (typeof part === 'string') {
|
||||
if (part.startsWith('('))
|
||||
result.params.push(parseNodeString(part))
|
||||
|
||||
if (part.startsWith('[')) {
|
||||
const content = part.slice(1, -1)
|
||||
result.params.push(parseNodeString(content))
|
||||
}
|
||||
}
|
||||
else if (i === 0) {
|
||||
result.node = part as unknown as string
|
||||
}
|
||||
else {
|
||||
result.params.push(part as unknown as string)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
const toNodes = (input: string): any[] => {
|
||||
const list = input.split(STEP_SPLIT)
|
||||
.map(step => step.trim())
|
||||
|
||||
const res: any[] = []
|
||||
list.forEach((step) => {
|
||||
const isPlainStep = !step.includes('(')
|
||||
if (isPlainStep) {
|
||||
res.push(toNodeData(step))
|
||||
return
|
||||
}
|
||||
|
||||
const { node, params } = parseNodeString(step)
|
||||
switch (node) {
|
||||
case 'iteration':
|
||||
console.log(params)
|
||||
break
|
||||
res.push(...toIterationNodeData({
|
||||
nodeId: params[0] as string,
|
||||
children: JSON.parse(params[1] as string) as number[],
|
||||
}))
|
||||
break
|
||||
case 'retry':
|
||||
res.push(...toRetryNodeData({
|
||||
nodeId: params[0] as string,
|
||||
repeatTimes: Number.parseInt(params[1] as string),
|
||||
}))
|
||||
break
|
||||
}
|
||||
})
|
||||
return res
|
||||
}
|
||||
|
||||
/*
|
||||
* : 1 -> 2 -> 3
|
||||
* iteration: (iteration, 1, [2, 3]) -> 4. (1, [2, 3]) means 1 is parent, [2, 3] is children
|
||||
* parallel: 1 -> (parallel, [1,2,3], [4, (parallel: (6,7))]).
|
||||
* retry: (retry, 1, 3). 1 is parent, 3 is retry times
|
||||
*/
|
||||
const graphToLogStruct = (input: string): any[] => {
|
||||
const list = toNodes(input)
|
||||
return list
|
||||
}
|
||||
|
||||
export default graphToLogStruct
|
||||
@ -1,105 +0,0 @@
|
||||
import type { NodeTracing } from '@/types/workflow'
|
||||
import { BlockEnum } from '../../../types'
|
||||
|
||||
type IterationNodeId = string
|
||||
type RunIndex = string
|
||||
type IterationGroupMap = Map<IterationNodeId, Map<RunIndex, NodeTracing[]>>
|
||||
|
||||
const processIterationNode = (item: NodeTracing) => {
|
||||
return {
|
||||
...item,
|
||||
details: [], // to add the sub nodes in the iteration
|
||||
}
|
||||
}
|
||||
|
||||
const updateParallelModeGroup = (nodeGroupMap: IterationGroupMap, runIndex: string, item: NodeTracing, iterationNode: NodeTracing) => {
|
||||
if (!nodeGroupMap.has(iterationNode.node_id))
|
||||
nodeGroupMap.set(iterationNode.node_id, new Map())
|
||||
|
||||
const groupMap = nodeGroupMap.get(iterationNode.node_id)!
|
||||
|
||||
if (!groupMap.has(runIndex))
|
||||
groupMap.set(runIndex, [item])
|
||||
|
||||
else
|
||||
groupMap.get(runIndex)!.push(item)
|
||||
|
||||
if (item.status === 'failed') {
|
||||
iterationNode.status = 'failed'
|
||||
iterationNode.error = item.error
|
||||
}
|
||||
|
||||
iterationNode.details = Array.from(groupMap.values())
|
||||
}
|
||||
|
||||
const updateSequentialModeGroup = (runIndex: number, item: NodeTracing, iterationNode: NodeTracing) => {
|
||||
const { details } = iterationNode
|
||||
if (details) {
|
||||
if (!details[runIndex])
|
||||
details[runIndex] = [item]
|
||||
else
|
||||
details[runIndex].push(item)
|
||||
}
|
||||
|
||||
if (item.status === 'failed') {
|
||||
iterationNode.status = 'failed'
|
||||
iterationNode.error = item.error
|
||||
}
|
||||
}
|
||||
|
||||
const addRetryDetail = (result: NodeTracing[], item: NodeTracing) => {
|
||||
const retryNode = result.find(node => node.node_id === item.node_id)
|
||||
|
||||
if (retryNode) {
|
||||
if (retryNode?.retryDetail)
|
||||
retryNode.retryDetail.push(item)
|
||||
else
|
||||
retryNode.retryDetail = [item]
|
||||
}
|
||||
}
|
||||
|
||||
const processNonIterationNode = (result: NodeTracing[], nodeGroupMap: IterationGroupMap, item: NodeTracing) => {
|
||||
const { execution_metadata } = item
|
||||
if (!execution_metadata?.iteration_id) {
|
||||
if (item.status === 'retry') {
|
||||
addRetryDetail(result, item)
|
||||
return
|
||||
}
|
||||
result.push(item)
|
||||
return
|
||||
}
|
||||
|
||||
const parentIterationNode = result.find(node => node.node_id === execution_metadata.iteration_id)
|
||||
const isInIteration = !!parentIterationNode && Array.isArray(parentIterationNode.details)
|
||||
if (!isInIteration)
|
||||
return
|
||||
|
||||
// the parallel in the iteration in mode.
|
||||
const { parallel_mode_run_id, iteration_index = 0 } = execution_metadata
|
||||
const isInParallel = !!parallel_mode_run_id
|
||||
|
||||
if (isInParallel)
|
||||
updateParallelModeGroup(nodeGroupMap, parallel_mode_run_id, item, parentIterationNode)
|
||||
else
|
||||
updateSequentialModeGroup(iteration_index, item, parentIterationNode)
|
||||
}
|
||||
|
||||
// list => tree. Put the iteration node's children into the details field.
|
||||
const formatToTracingNodeList = (list: NodeTracing[]) => {
|
||||
const allItems = [...list].reverse()
|
||||
const result: NodeTracing[] = []
|
||||
const iterationGroupMap = new Map<string, Map<string, NodeTracing[]>>()
|
||||
|
||||
allItems.forEach((item) => {
|
||||
item.node_type === BlockEnum.Iteration
|
||||
? result.push(processIterationNode(item))
|
||||
: processNonIterationNode(result, iterationGroupMap, item)
|
||||
})
|
||||
|
||||
// console.log(allItems)
|
||||
// console.log(result)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
export default formatToTracingNodeList
|
||||
@ -1,190 +0,0 @@
|
||||
export const simpleIterationData = (() => {
|
||||
// start -> code(output: [1, 2, 3]) -> iteration(output: ['aaa', 'aaa', 'aaa']) -> end(output: ['aaa', 'aaa', 'aaa'])
|
||||
const startNode = {
|
||||
id: '36c9860a-39e6-4107-b750-655b07895f47',
|
||||
index: 1,
|
||||
predecessor_node_id: null,
|
||||
node_id: '1735023354069',
|
||||
node_type: 'start',
|
||||
title: 'Start',
|
||||
inputs: {
|
||||
'sys.files': [],
|
||||
'sys.user_id': '5ee03762-1d1a-46e8-ba0b-5f419a77da96',
|
||||
'sys.app_id': '8a5e87f8-6433-40f4-a67a-4be78a558dc7',
|
||||
'sys.workflow_id': 'bb5e2b89-40ac-45c9-9ccb-4f2cd926e080',
|
||||
'sys.workflow_run_id': '76adf675-a7d3-4cc1-9282-ed7ecfe4f65d',
|
||||
},
|
||||
process_data: null,
|
||||
outputs: {
|
||||
'sys.files': [],
|
||||
'sys.user_id': '5ee03762-1d1a-46e8-ba0b-5f419a77da96',
|
||||
'sys.app_id': '8a5e87f8-6433-40f4-a67a-4be78a558dc7',
|
||||
'sys.workflow_id': 'bb5e2b89-40ac-45c9-9ccb-4f2cd926e080',
|
||||
'sys.workflow_run_id': '76adf675-a7d3-4cc1-9282-ed7ecfe4f65d',
|
||||
},
|
||||
status: 'succeeded',
|
||||
error: null,
|
||||
elapsed_time: 0.011458,
|
||||
execution_metadata: null,
|
||||
extras: {},
|
||||
created_by_end_user: null,
|
||||
finished_at: 1735023510,
|
||||
}
|
||||
|
||||
const outputArrayNode = {
|
||||
id: 'a3105c5d-ff9e-44ea-9f4c-ab428958af20',
|
||||
index: 2,
|
||||
predecessor_node_id: '1735023354069',
|
||||
node_id: '1735023361224',
|
||||
node_type: 'code',
|
||||
title: 'Code',
|
||||
inputs: null,
|
||||
process_data: null,
|
||||
outputs: {
|
||||
result: [
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
],
|
||||
},
|
||||
status: 'succeeded',
|
||||
error: null,
|
||||
elapsed_time: 0.103333,
|
||||
execution_metadata: null,
|
||||
extras: {},
|
||||
finished_at: 1735023511,
|
||||
}
|
||||
|
||||
const iterationNode = {
|
||||
id: 'a823134d-9f1a-45a4-8977-db838d076316',
|
||||
index: 3,
|
||||
predecessor_node_id: '1735023361224',
|
||||
node_id: '1735023391914',
|
||||
node_type: 'iteration',
|
||||
title: 'Iteration',
|
||||
inputs: null,
|
||||
process_data: null,
|
||||
outputs: {
|
||||
output: [
|
||||
'aaa',
|
||||
'aaa',
|
||||
'aaa',
|
||||
],
|
||||
},
|
||||
|
||||
}
|
||||
|
||||
const iterations = [
|
||||
{
|
||||
id: 'a84a22d8-0f08-4006-bee2-fa7a7aef0420',
|
||||
index: 4,
|
||||
predecessor_node_id: '1735023391914start',
|
||||
node_id: '1735023409906',
|
||||
node_type: 'code',
|
||||
title: 'Code 2',
|
||||
inputs: null,
|
||||
process_data: null,
|
||||
outputs: {
|
||||
result: 'aaa',
|
||||
},
|
||||
status: 'succeeded',
|
||||
error: null,
|
||||
elapsed_time: 0.112688,
|
||||
execution_metadata: {
|
||||
iteration_id: '1735023391914',
|
||||
iteration_index: 0,
|
||||
},
|
||||
extras: {},
|
||||
created_at: 1735023511,
|
||||
finished_at: 1735023511,
|
||||
},
|
||||
{
|
||||
id: 'ff71d773-a916-4513-960f-d7dcc4fadd86',
|
||||
index: 5,
|
||||
predecessor_node_id: '1735023391914start',
|
||||
node_id: '1735023409906',
|
||||
node_type: 'code',
|
||||
title: 'Code 2',
|
||||
inputs: null,
|
||||
process_data: null,
|
||||
outputs: {
|
||||
result: 'aaa',
|
||||
},
|
||||
status: 'succeeded',
|
||||
error: null,
|
||||
elapsed_time: 0.126034,
|
||||
execution_metadata: {
|
||||
iteration_id: '1735023391914',
|
||||
iteration_index: 1,
|
||||
},
|
||||
extras: {},
|
||||
created_at: 1735023511,
|
||||
finished_at: 1735023511,
|
||||
},
|
||||
{
|
||||
id: 'd91c3ef9-0162-4013-9272-d4cc7fb1f188',
|
||||
index: 6,
|
||||
predecessor_node_id: '1735023391914start',
|
||||
node_id: '1735023409906',
|
||||
node_type: 'code',
|
||||
title: 'Code 2',
|
||||
inputs: null,
|
||||
process_data: null,
|
||||
outputs: {
|
||||
result: 'aaa',
|
||||
},
|
||||
status: 'succeeded',
|
||||
error: null,
|
||||
elapsed_time: 0.122716,
|
||||
execution_metadata: {
|
||||
iteration_id: '1735023391914',
|
||||
iteration_index: 2,
|
||||
},
|
||||
extras: {},
|
||||
created_at: 1735023511,
|
||||
finished_at: 1735023511,
|
||||
},
|
||||
]
|
||||
|
||||
const endNode = {
|
||||
id: 'e6ad6560-1aa3-43f3-89e3-e5287c9ea272',
|
||||
index: 7,
|
||||
predecessor_node_id: '1735023391914',
|
||||
node_id: '1735023417757',
|
||||
node_type: 'end',
|
||||
title: 'End',
|
||||
inputs: {
|
||||
output: [
|
||||
'aaa',
|
||||
'aaa',
|
||||
'aaa',
|
||||
],
|
||||
},
|
||||
process_data: null,
|
||||
outputs: {
|
||||
output: [
|
||||
'aaa',
|
||||
'aaa',
|
||||
'aaa',
|
||||
],
|
||||
},
|
||||
status: 'succeeded',
|
||||
error: null,
|
||||
elapsed_time: 0.017552,
|
||||
execution_metadata: null,
|
||||
extras: {},
|
||||
finished_at: 1735023511,
|
||||
}
|
||||
|
||||
return {
|
||||
in: [startNode, outputArrayNode, iterationNode, ...iterations, endNode],
|
||||
expect: [startNode, outputArrayNode, {
|
||||
...iterationNode,
|
||||
details: [
|
||||
[iterations[0]],
|
||||
[iterations[1]],
|
||||
[iterations[2]],
|
||||
],
|
||||
}, endNode],
|
||||
}
|
||||
})()
|
||||
@ -1,11 +1,23 @@
|
||||
import format from '.'
|
||||
import { simpleIterationData } from './data'
|
||||
import graphToLogStruct from '../graph-to-log-struct'
|
||||
|
||||
describe('iteration', () => {
|
||||
const list = graphToLogStruct('start -> (iteration, 1, [2, 3])')
|
||||
const [startNode, iterationNode, ...iterations] = graphToLogStruct('start -> (iteration, 1, [2, 3])')
|
||||
const result = format(list as any, () => { })
|
||||
test('result should have no nodes in iteration node', () => {
|
||||
expect(format(simpleIterationData.in as any).find(item => !!(item as any).execution_metadata?.iteration_id)).toBeUndefined()
|
||||
expect((result as any).find((item: any) => !!item.execution_metadata?.iteration_id)).toBeUndefined()
|
||||
})
|
||||
test('iteration should put nodes in details', () => {
|
||||
expect(format(simpleIterationData.in as any)).toEqual(simpleIterationData.expect)
|
||||
expect(result as any).toEqual([
|
||||
startNode,
|
||||
{
|
||||
...iterationNode,
|
||||
details: [
|
||||
[iterations[0]],
|
||||
[iterations[1]],
|
||||
],
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
@ -1,133 +0,0 @@
|
||||
export const simpleRetryData = (() => {
|
||||
const startNode = {
|
||||
id: 'f7938b2b-77cd-43f0-814c-2f0ade7cbc60',
|
||||
index: 1,
|
||||
predecessor_node_id: null,
|
||||
node_id: '1735112903395',
|
||||
node_type: 'start',
|
||||
title: 'Start',
|
||||
inputs: {
|
||||
'sys.files': [],
|
||||
'sys.user_id': '6d8ad01f-edf9-43a6-b863-a034b1828ac7',
|
||||
'sys.app_id': '6180ead7-2190-4a61-975c-ec3bf29653da',
|
||||
'sys.workflow_id': 'eef6da45-244b-4c79-958e-f3573f7c12bb',
|
||||
'sys.workflow_run_id': 'fc8970ef-1406-484e-afde-8567dc22f34c',
|
||||
},
|
||||
process_data: null,
|
||||
outputs: {
|
||||
'sys.files': [],
|
||||
'sys.user_id': '6d8ad01f-edf9-43a6-b863-a034b1828ac7',
|
||||
'sys.app_id': '6180ead7-2190-4a61-975c-ec3bf29653da',
|
||||
'sys.workflow_id': 'eef6da45-244b-4c79-958e-f3573f7c12bb',
|
||||
'sys.workflow_run_id': 'fc8970ef-1406-484e-afde-8567dc22f34c',
|
||||
},
|
||||
status: 'succeeded',
|
||||
error: null,
|
||||
elapsed_time: 0.008715,
|
||||
execution_metadata: null,
|
||||
extras: {},
|
||||
created_at: 1735112940,
|
||||
created_by_role: 'account',
|
||||
created_by_account: {
|
||||
id: '6d8ad01f-edf9-43a6-b863-a034b1828ac7',
|
||||
name: '九彩拼盘',
|
||||
email: 'iamjoel007@gmail.com',
|
||||
},
|
||||
created_by_end_user: null,
|
||||
finished_at: 1735112940,
|
||||
}
|
||||
|
||||
const httpNode = {
|
||||
id: '50220407-3420-4ad4-89da-c6959710d1aa',
|
||||
index: 2,
|
||||
predecessor_node_id: '1735112903395',
|
||||
node_id: '1735112908006',
|
||||
node_type: 'http-request',
|
||||
title: 'HTTP Request',
|
||||
inputs: null,
|
||||
process_data: {
|
||||
request: 'GET / HTTP/1.1\r\nHost: 404\r\n\r\n',
|
||||
},
|
||||
outputs: null,
|
||||
status: 'failed',
|
||||
error: 'timed out',
|
||||
elapsed_time: 30.247757,
|
||||
execution_metadata: null,
|
||||
extras: {},
|
||||
created_at: 1735112940,
|
||||
created_by_role: 'account',
|
||||
created_by_account: {
|
||||
id: '6d8ad01f-edf9-43a6-b863-a034b1828ac7',
|
||||
name: '九彩拼盘',
|
||||
email: 'iamjoel007@gmail.com',
|
||||
},
|
||||
created_by_end_user: null,
|
||||
finished_at: 1735112970,
|
||||
}
|
||||
|
||||
const retry1 = {
|
||||
id: 'ed352b36-27fb-49c6-9e8f-cc755bfc25fc',
|
||||
index: 3,
|
||||
predecessor_node_id: '1735112903395',
|
||||
node_id: '1735112908006',
|
||||
node_type: 'http-request',
|
||||
title: 'HTTP Request',
|
||||
inputs: null,
|
||||
process_data: null,
|
||||
outputs: null,
|
||||
status: 'retry',
|
||||
error: 'timed out',
|
||||
elapsed_time: 10.011833,
|
||||
execution_metadata: {
|
||||
iteration_id: null,
|
||||
parallel_mode_run_id: null,
|
||||
},
|
||||
extras: {},
|
||||
created_at: 1735112940,
|
||||
created_by_role: 'account',
|
||||
created_by_account: {
|
||||
id: '6d8ad01f-edf9-43a6-b863-a034b1828ac7',
|
||||
name: '九彩拼盘',
|
||||
email: 'iamjoel007@gmail.com',
|
||||
},
|
||||
created_by_end_user: null,
|
||||
finished_at: 1735112950,
|
||||
}
|
||||
|
||||
const retry2 = {
|
||||
id: '74dfb3d3-dacf-44f2-8784-e36bfa2d6c4e',
|
||||
index: 4,
|
||||
predecessor_node_id: '1735112903395',
|
||||
node_id: '1735112908006',
|
||||
node_type: 'http-request',
|
||||
title: 'HTTP Request',
|
||||
inputs: null,
|
||||
process_data: null,
|
||||
outputs: null,
|
||||
status: 'retry',
|
||||
error: 'timed out',
|
||||
elapsed_time: 10.010368,
|
||||
execution_metadata: {
|
||||
iteration_id: null,
|
||||
parallel_mode_run_id: null,
|
||||
},
|
||||
extras: {},
|
||||
created_at: 1735112950,
|
||||
created_by_role: 'account',
|
||||
created_by_account: {
|
||||
id: '6d8ad01f-edf9-43a6-b863-a034b1828ac7',
|
||||
name: '九彩拼盘',
|
||||
email: 'iamjoel007@gmail.com',
|
||||
},
|
||||
created_by_end_user: null,
|
||||
finished_at: 1735112960,
|
||||
}
|
||||
|
||||
return {
|
||||
in: [startNode, httpNode, retry1, retry2],
|
||||
expect: [startNode, {
|
||||
...httpNode,
|
||||
retryDetail: [retry1, retry2],
|
||||
}],
|
||||
}
|
||||
})()
|
||||
@ -1,11 +1,21 @@
|
||||
import format from '.'
|
||||
import { simpleRetryData } from './data'
|
||||
import graphToLogStruct from '../graph-to-log-struct'
|
||||
|
||||
describe('retry', () => {
|
||||
// retry nodeId:1 3 times.
|
||||
const steps = graphToLogStruct('start -> (retry, 1, 3)')
|
||||
const [startNode, retryNode, ...retryDetail] = steps
|
||||
const result = format(steps)
|
||||
test('should have no retry status nodes', () => {
|
||||
expect(format(simpleRetryData.in as any).find(item => (item as any).status === 'retry')).toBeUndefined()
|
||||
expect(result.find(item => (item as any).status === 'retry')).toBeUndefined()
|
||||
})
|
||||
test('should put retry nodes in retryDetail', () => {
|
||||
expect(format(simpleRetryData.in as any)).toEqual(simpleRetryData.expect)
|
||||
expect(result).toEqual([
|
||||
startNode,
|
||||
{
|
||||
...retryNode,
|
||||
retryDetail,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
@ -1,14 +0,0 @@
|
||||
const STEP_SPLIT = '->'
|
||||
|
||||
/*
|
||||
* : 1 -> 2 -> 3
|
||||
* iteration: (iteration, 1, [2, 3]) -> 4. (1, [2, 3]) means 1 is parent, [2, 3] is children
|
||||
* parallel: 1 -> (parallel, [1,2,3], [4, (parallel: (6,7))]).
|
||||
* retry: (retry, 1, [2,3]). 1 is parent, [2, 3] is retry nodes
|
||||
*/
|
||||
const simpleGraphToLogStruct = (input: string): any[] => {
|
||||
const list = input.split(STEP_SPLIT)
|
||||
return list
|
||||
}
|
||||
|
||||
export default simpleGraphToLogStruct
|
||||
@ -1,76 +1,14 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { FormattedText } from '../components/datasets/formatted-text/formatted'
|
||||
import { PreviewSlice } from '../components/datasets/formatted-text/flavours/preview-slice'
|
||||
import { PreviewContainer } from '../components/datasets/preview/container'
|
||||
import { PreviewHeader } from '../components/datasets/preview/header'
|
||||
import FileIcon from '../components/base/file-icon'
|
||||
import { ChevronDown } from '../components/base/icons/src/vender/solid/arrows'
|
||||
import Badge from '../components/base/badge'
|
||||
import { DividerWithLabel } from '../components/base/divider/with-label'
|
||||
import Button from '../components/base/button'
|
||||
import { ChunkContainer, QAPreview } from '../components/datasets/chunk'
|
||||
import classNames from '@/utils/classnames'
|
||||
import { SwitchPluginVersion } from '../components/workflow/nodes/_base/components/switch-plugin-version'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
export default function Page() {
|
||||
const [parentChild, setParentChild] = useState(false)
|
||||
const [vertical, setVertical] = useState(false)
|
||||
const [qa, setQa] = useState(false)
|
||||
return <div className='p-4'>
|
||||
<div className='flex gap-2 my-4'>
|
||||
<Button onClick={() => setParentChild(!parentChild)}>
|
||||
Parent-Child
|
||||
</Button>
|
||||
<Button onClick={() => setVertical(!vertical)}>Vertical</Button>
|
||||
<Button onClick={() => setQa(!qa)}>QA</Button>
|
||||
</div>
|
||||
<PreviewContainer header={
|
||||
<PreviewHeader title='Preview'>
|
||||
<div className='flex items-center'>
|
||||
<FileIcon type='pdf' className='size-4' />
|
||||
<p
|
||||
className='text-text-primary text-sm font-semibold mx-1'
|
||||
>EOS R3 Tech Sheet.pdf</p>
|
||||
<ChevronDown className='size-[18px]' />
|
||||
<Badge text='276 Estimated chunks' className='ml-1' />
|
||||
</div>
|
||||
</PreviewHeader>
|
||||
}>
|
||||
<div className='space-y-6'>{parentChild
|
||||
? Array.from({ length: 4 }, (_, i) => {
|
||||
return <ChunkContainer
|
||||
label='Parent-Chunk-01'
|
||||
characterCount={521}
|
||||
key={i}
|
||||
>
|
||||
<FormattedText className={classNames(
|
||||
'w-full',
|
||||
vertical && 'flex flex-col gap-2',
|
||||
)}>
|
||||
{Array.from({ length: 4 }, (_, i) => {
|
||||
return <PreviewSlice
|
||||
key={i}
|
||||
label='C-1'
|
||||
text='lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.' tooltip={'Child-chunk-2 · 268 Characters'} />
|
||||
})}
|
||||
</FormattedText>
|
||||
</ChunkContainer>
|
||||
})
|
||||
: Array.from({ length: 2 }, (_, i) => {
|
||||
return <ChunkContainer label='Chunk-01' characterCount={521} key={i}>
|
||||
{
|
||||
qa
|
||||
? <QAPreview qa={{
|
||||
question: 'What is the author\'s unconventional approach to writing this book, and how does it challenge the traditional academic mindset of \'publish or perish\'?',
|
||||
answer: 'It is quite natural for academics who are continuously told to “publish or perish” to want to always create something from scratch that is their own fresh creation. This book is an experiment in not starting from scratch, but instead “re-mixing” the book titled Think Python: How to Think Like a Computer Scientist written by Allen B. Downey, Jeff Elkner and others.',
|
||||
}} />
|
||||
: 'In December of 2009, I was preparing to teach SI502 - Networked Programming at the University of Michigan for the fifth semester in a row and decided it was time to write a Python textbook that focused on exploring data instead of understanding algorithms and abstractions. My goal in SI502 is to teach people life-long data handling skills using Python. Few of my students were planning to be professional computer programmers. Instead, they planned be librarians, managers, lawyers, biologists, economists, etc. who happened to want to skillfully use technology in their chosen field.'
|
||||
}
|
||||
</ChunkContainer>
|
||||
})
|
||||
}</div>
|
||||
<DividerWithLabel label='Display previews of up to 10 paragraphs' />
|
||||
</PreviewContainer>
|
||||
const { t } = useTranslation()
|
||||
return <div className="p-20">
|
||||
<SwitchPluginVersion
|
||||
uniqueIdentifier={'langgenius/openai:12'}
|
||||
tooltip={t('workflow.nodes.agent.switchToNewVersion')}
|
||||
/>
|
||||
</div>
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue