Skip to content

Commit

Permalink
Merge pull request FlowiseAI#956 from BitVoyagerMan/new_feature
Browse files Browse the repository at this point in the history
add bittensor chatmodel
  • Loading branch information
HenryHengZJ authored Sep 21, 2023
2 parents d60b39d + f7c8a3d commit 4f249ce
Show file tree
Hide file tree
Showing 4 changed files with 103 additions and 0 deletions.
46 changes: 46 additions & 0 deletions packages/components/nodes/chatmodels/Bittensor/Bittensor.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { NIBittensorChatModel, BittensorInput } from 'langchain/experimental/chat_models/bittensor'

class Bittensor_ChatModels implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
inputs: INodeParams[]

constructor() {
this.label = 'NIBittensorChat'
this.name = 'NIBittensorChatModel'
this.version = 1.0
this.type = 'BittensorChat'
this.icon = 'logo.png'
this.category = 'Chat Models'
this.description = 'Wrapper around Bittensor subnet 1 large language models'
this.baseClasses = [this.type, ...getBaseClasses(NIBittensorChatModel)]
this.inputs = [
{
label: 'System prompt',
name: 'system_prompt',
type: 'string',
additionalParams: true,
optional: true
}
]
}

async init(nodeData: INodeData, _: string): Promise<any> {
const system_prompt = nodeData.inputs?.system_prompt as string
const obj: Partial<BittensorInput> = {
systemPrompt: system_prompt
}
const model = new NIBittensorChatModel(obj)
return model
}
}

module.exports = { nodeClass: Bittensor_ChatModels }
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
57 changes: 57 additions & 0 deletions packages/components/nodes/llms/Bittensor/Bittensor.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { NIBittensorLLM, BittensorInput } from 'langchain/experimental/llms/bittensor'

class Bittensor_LLMs implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
inputs: INodeParams[]

constructor() {
this.label = 'NIBittensorLLM'
this.name = 'NIBittensorLLM'
this.version = 1.0
this.type = 'Bittensor'
this.icon = 'logo.png'
this.category = 'LLMs'
this.description = 'Wrapper around Bittensor subnet 1 large language models'
this.baseClasses = [this.type, ...getBaseClasses(NIBittensorLLM)]
this.inputs = [
{
label: 'System prompt',
name: 'system_prompt',
type: 'string',
additionalParams: true,
optional: true
},
{
label: 'Top Responses',
name: 'topResponses',
type: 'number',
step: 1,
optional: true,
additionalParams: true
}
]
}

async init(nodeData: INodeData, _: string): Promise<any> {
const system_prompt = nodeData.inputs?.system_prompt as string
const topResponses = Number(nodeData.inputs?.topResponses as number)
const obj: Partial<BittensorInput> = {
systemPrompt: system_prompt,
topResponses: topResponses
}

const model = new NIBittensorLLM(obj)
return model
}
}

module.exports = { nodeClass: Bittensor_LLMs }
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.

0 comments on commit 4f249ce

Please sign in to comment.