Skip to content

Commit

Permalink
init
Browse files Browse the repository at this point in the history
  • Loading branch information
tomatyss committed Mar 27, 2024
0 parents commit 4759f1e
Show file tree
Hide file tree
Showing 15 changed files with 2,431 additions and 0 deletions.
3 changes: 3 additions & 0 deletions .eslintignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
node_modules/

main.js
26 changes: 26 additions & 0 deletions .eslintrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
{
"root": true,
"parser": "@typescript-eslint/parser",
"env": { "node": true },
"plugins": [
"@typescript-eslint",
"prettier"
],
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended",
"prettier"
],
"parserOptions": {
"sourceType": "module"
},
"rules": {
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": ["error", { "args": "none" }],
"@typescript-eslint/ban-ts-comment": "off",
"no-prototype-builtins": "off",
"@typescript-eslint/no-empty-function": "off",
"prettier/prettier": "error"
}
}
6 changes: 6 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "weekly"
40 changes: 40 additions & 0 deletions .github/workflows/build-and-release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
name: Build and Release

on:
push:
tags:
- 'v*'
workflow_dispatch:
inputs:
releaseVersion:
description: 'Release Version (e.g., v1.0.0)'
required: true

jobs:
release:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3

- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: '21'

- name: Install dependencies
run: npm ci

- name: Build
run: npm run build

- name: Create Release
id: create_release
uses: ncipollo/release-action@v1
with:
token: ${{ secrets.GH_TOKEN }}
tag: ${{ github.event.inputs.releaseVersion || github.ref_name }}
name: Release ${{ github.event.inputs.releaseVersion || github.ref_name }}
draft: false
prerelease: false
artifacts: 'build/*'
24 changes: 24 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# vscode
.vscode

# Intellij
*.iml
.idea

# npm
node_modules

# Don't include the compiled main.js file in the repo.
# They should be uploaded to GitHub releases instead.
main.js

# Exclude sourcemaps
*.map

# obsidian
data.json

# Exclude macOS Finder (System Explorer) View States
.DS_Store

build
7 changes: 7 additions & 0 deletions .prettierrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{
"semi": true,
"singleQuote": true,
"trailingComma": "all",
"tabWidth": 2,
"printWidth": 80
}
49 changes: 49 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# AI/ML Connector

This is a connector for a service that provides a unified API for accessing a variety of open-source AI/ML models.

## Features

- Access a wide range of AI/ML models through a single API
- Easily query models for tasks like text generation, image generation, classification, and more
- Customizable settings and parameters for fine-tuning model behavior
- Seamless integration with your application or platform

## Installation

To install the AI/ML Connector, follow these steps:

1. Sign up for an account on the [AI/ML service platform](https://www.aimlapi.com/).
2. Obtain an API key from the platform.
3. In Prompt Mixer navigate to the connector section.
4. Install the AI/ML Connector and enter your API key.

## Usage

Once the connector is installed, you can start using the AI/ML models through your application or platform's interface. Typical usage might include:

- Generating text based on a given prompt
- Classifying text or images into predefined categories
- Generating images from textual descriptions
- Performing sentiment analysis on text
- Extracting key information or entities from text

To get started, refer to the documentation provided by the AI/ML service platform. This will outline the available models, their capabilities, and the required input/output formats.

## Configuration

The AI/ML Connector allows you to customize the behavior of the models by adjusting various settings and parameters. These may include:

- Model selection
- Temperature, top-k, and other sampling parameters
- Input length, output length, and other constraints

Consult the connector's documentation for details on the available configuration options.

## Contributing

Contributions to the AI/ML Connector are welcome! If you encounter any issues or have ideas for improvements, please feel free to open a GitHub issue or submit a pull request.

## License

The AI/ML Connector is licensed under the [MIT License](LICENSE).
25 changes: 25 additions & 0 deletions config.d.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
export interface ModelConfig {
connectorName: string;
models: string[];
properties: Property[];
settings: Setting[];
iconBase64: string;
description?: string;
author?: string;
}

export interface Property {
id: string;
name: string;
value: string | number | boolean | string[];
type: 'string' | 'number' | 'boolean' | 'array';
}

export interface Setting {
id: string;
name: string;
value: string;
type: 'string';
}

export declare const config: ModelConfig;
135 changes: 135 additions & 0 deletions config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
export const config = {
connectorName: 'AI/ML API Connector',
models: [
'zero-one-ai/Yi-34B-Chat',
'Austism/chronos-hermes-13b',
'deepseek-ai/deepseek-coder-33b-instruct',
'garage-bAInd/Platypus2-70B-instruct',
'google/gemma-2b-it',
'google/gemma-7b-it',
'Gryphe/MythoMax-L2-13b',
'lmsys/vicuna-13b-v1.5',
'lmsys/vicuna-7b-v1.5',
'codellama/CodeLlama-13b-Instruct-hf',
'codellama/CodeLlama-34b-Instruct-hf',
'codellama/CodeLlama-70b-Instruct-hf',
'codellama/CodeLlama-7b-Instruct-hf',
'meta-llama/Llama-2-70b-chat-hf',
'meta-llama/Llama-2-13b-chat-hf',
'meta-llama/Llama-2-7b-chat-hf',
'mistralai/Mistral-7B-Instruct-v0.1',
'mistralai/Mistral-7B-Instruct-v0.2',
'mistralai/Mixtral-8x7B-Instruct-v0.1',
'NousResearch/Nous-Capybara-7B-V1p9',
'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO',
'NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT',
'NousResearch/Nous-Hermes-llama-2-7b',
'NousResearch/Nous-Hermes-Llama2-13b',
'NousResearch/Nous-Hermes-2-Yi-34B',
'openchat/openchat-3.5-1210',
'Open-Orca/Mistral-7B-OpenOrca',
'togethercomputer/Qwen-7B-Chat',
'Qwen/Qwen1.5-0.5B-Chat',
'Qwen/Qwen1.5-1.8B-Chat',
'Qwen/Qwen1.5-4B-Chat',
'Qwen/Qwen1.5-7B-Chat',
'Qwen/Qwen1.5-14B-Chat',
'Qwen/Qwen1.5-72B-Chat',
'snorkelai/Snorkel-Mistral-PairRM-DPO',
'togethercomputer/alpaca-7b',
'teknium/OpenHermes-2-Mistral-7B',
'teknium/OpenHermes-2p5-Mistral-7B',
'togethercomputer/falcon-40b-instruct',
'togethercomputer/falcon-7b-instruct',
'togethercomputer/Llama-2-7B-32K-Instruct',
'togethercomputer/RedPajama-INCITE-Chat-3B-v1',
'togethercomputer/RedPajama-INCITE-7B-Chat',
'togethercomputer/StripedHyena-Nous-7B',
'Undi95/ReMM-SLERP-L2-13B',
'Undi95/Toppy-M-7B',
'WizardLM/WizardLM-13B-V1.2',
'upstage/SOLAR-10.7B-Instruct-v1.0',
'codellama/CodeLlama-70b-Python-hf',
'codellama/CodeLlama-34b-Python-hf',
'codellama/CodeLlama-13b-Python-hf',
'codellama/CodeLlama-7b-Python-hf',
'Phind/Phind-CodeLlama-34B-v2',
'WizardLM/WizardCoder-Python-34B-V1.0',
'WizardLM/WizardCoder-15B-V1.0'
],
description:
'This plugin for connect to AI/ML LLM models by API',
author: 'Prompt Mixer',
properties: [
{
id: 'prompt',
name: 'System Prompt',
value: 'You are a helpful assistant.',
type: 'string',
},
{
id: 'max_tokens',
name: 'Max Tokens',
value: 4096,
type: 'number',
},
{
id: 'temperature',
name: 'Temperature',
value: 0.7,
type: 'number',
},
{
id: 'top_p',
name: 'Top P',
value: 1,
type: 'number',
},
{
id: 'frequency_penalty',
name: 'Frequency Penalty',
value: 0.5,
type: 'number',
},
{
id: 'presence_penalty',
name: 'Presence Penalty',
value: 0.5,
type: 'number',
},
{
id: 'stop',
name: 'Stop Sequences',
value: ['\n'],
type: 'array',
},
{
id: 'echo',
name: 'Echo',
value: false,
type: 'boolean',
},
{
id: 'best_of',
name: 'Best Of',
value: 1,
type: 'number',
},
{
id: 'logprobs',
name: 'LogProbs',
value: false,
type: 'boolean',
},
],
settings: [
{
id: 'API_KEY',
name: 'API Key',
value: '',
type: 'string',
},
],
iconBase64:
'data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTYiIGhlaWdodD0iMTYiIHZpZXdCb3g9IjAgMCAxNiAxNiIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHBhdGggZD0iTTQuNjY2NjcgMy4zMzMzNUM0LjY2NjY3IDEuODYwNTkgNS44NjA1NyAwLjY2NjY4NyA3LjMzMzMzIDAuNjY2Njg3QzguODA2MDcgMC42NjY2ODcgMTAgMS44NjA1OSAxMCAzLjMzMzM1SDEyQzEyLjM2ODIgMy4zMzMzNSAxMi42NjY3IDMuNjMxODMgMTIuNjY2NyA0LjAwMDAyVjYuMDAwMDJDMTQuMTM5NCA2LjAwMDAyIDE1LjMzMzMgNy4xOTM5NSAxNS4zMzMzIDguNjY2NjlDMTUuMzMzMyAxMC4xMzk0IDE0LjEzOTQgMTEuMzMzNCAxMi42NjY3IDExLjMzMzRWMTMuMzMzNEMxMi42NjY3IDEzLjcwMTYgMTIuMzY4MiAxNCAxMiAxNEgyLjY2NjY3QzIuMjk4NDggMTQgMiAxMy43MDE2IDIgMTMuMzMzNFY0LjAwMDAyQzIgMy42MzE4MyAyLjI5ODQ4IDMuMzMzMzUgMi42NjY2NyAzLjMzMzM1SDQuNjY2NjdaTTcuMzMzMzMgMi4wMDAwMkM2LjU5Njk1IDIuMDAwMDIgNiAyLjU5Njk3IDYgMy4zMzMzNUM2IDMuNDkwMzggNi4wMjY4NyAzLjYzOTcgNi4wNzU3IDMuNzc3ODVDNi4xNDc4MSAzLjk4MTkgNi4xMTY0MSA0LjIwODI1IDUuOTkxNDUgNC4zODQ5NUM1Ljg2NjQ5IDQuNTYxNjQgNS42NjM1NSA0LjY2NjY5IDUuNDQ3MTQgNC42NjY2OUgzLjMzMzMzVjEyLjY2NjdIMTEuMzMzM1YxMC41NTI5QzExLjMzMzMgMTAuMzM2NSAxMS40Mzg0IDEwLjEzMzYgMTEuNjE1MSAxMC4wMDg2QzExLjc5MTggOS44ODM2MiAxMi4wMTgxIDkuODUyMjIgMTIuMjIyMSA5LjkyNDM1QzEyLjM2MDMgOS45NzMxNSAxMi41MDk3IDEwIDEyLjY2NjcgMTBDMTMuNDAzMSAxMCAxNCA5LjQwMzA5IDE0IDguNjY2NjlDMTQgNy45MzAyOSAxMy40MDMxIDcuMzMzMzUgMTIuNjY2NyA3LjMzMzM1QzEyLjUwOTcgNy4zMzMzNSAxMi4zNjAzIDcuMzYwMjIgMTIuMjIyMSA3LjQwOTAyQzEyLjAxODEgNy40ODExNSAxMS43OTE4IDcuNDQ5NzUgMTEuNjE1MSA3LjMyNDgyQzExLjQzODQgNy4xOTk4MiAxMS4zMzMzIDYuOTk2ODkgMTEuMzMzMyA2Ljc4MDQ5VjQuNjY2NjlIOS4yMTk1M0M5LjAwMzEzIDQuNjY2NjkgOC44MDAyIDQuNTYxNjQgOC42NzUyIDQuMzg0OTVDOC41NTAyNyA0LjIwODI1IDguNTE4ODcgMy45ODE5IDguNTkxIDMuNzc3ODVDOC42Mzk4IDMuNjM5NyA4LjY2NjY3IDMuNDkwMzkgOC42NjY2NyAzLjMzMzM1QzguNjY2NjcgMi41OTY5NyA4LjA2OTczIDIuMDAwMDIgNy4zMzMzMyAyLjAwMDAyWiIgZmlsbD0iIzZGNzM3QSIvPgo8L3N2Zz4K',
};
19 changes: 19 additions & 0 deletions esbuild.config.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import esbuild from 'esbuild';
import process from 'process';

const prod = process.argv[2] === 'production';

const context = await esbuild.context({
entryPoints: ['main.ts'],
bundle: true,
platform: 'node',
target: 'es2022',
outfile: './build/main.js',
});

if (prod) {
await context.rebuild();
process.exit(0);
} else {
await context.watch();
}
Loading

0 comments on commit 4759f1e

Please sign in to comment.