Skip to content

Commit

Permalink
code tidy up; 0.1.6->0.1.7
Browse files Browse the repository at this point in the history
  • Loading branch information
shenjinti committed Apr 22, 2024
1 parent 9b7e4f6 commit 97658f2
Show file tree
Hide file tree
Showing 8 changed files with 58 additions and 50 deletions.
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "fgpt"
version = "0.1.6"
version = "0.1.7"
edition = "2021"
description = "A free reverse proxy and cli tool for OpenAI GPT-3.5-turbo."
authors = ["jinti <shenjindi@fourz.cn>"]
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ RUN cargo build --release
FROM debian:bookworm
ENV DEBIAN_FRONTEND noninteractive
ENV LANG C.UTF-8
LABEL maintainer="shenjindi@ruzhila.cn"
LABEL maintainer="shenjindi@fourz.cn"

COPY --from=builder /build/target/release/fgpt /bin/

Expand Down
30 changes: 20 additions & 10 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,24 +1,23 @@
# A free reverse proxy and CLI tool for OpenAI GPT-3.5-turbo

It allows you to use the GPT-3.5 API without needing to sign up for an API key or pay for usage.
> OpenAI GPT-3.5-turbo is free to use, without any account or API key
> DON'T USE IN PRODUCTION, ONLY FOR PERSONAL USE/TESTING
> 😄 OpenAI GPT-3.5-turbo is free to use, without any account or API key
> 🔔 DON'T USE IN PRODUCTION, ONLY FOR PERSONAL USE/TESTING
## Features

- [x] **REPL** mode, you can input questions and get answers interactively
- [x] **Reverse proxy mode**, you can use the OpenAI OpenAPI with a local server
- [x] **CLI mode**, with shell pipe, file input, code output, etc.
- [x] Support https proxy and socks5 proxy
- [x] 🔏 Support https proxy and socks5 proxy

## Download precompiled binary

- [Linux](https://github.com/shenjinti/fgpt/releases/download/v0.1.5/fgpt-linux-v0.1.5.tar.gz) executable binary
- [Mac M1/M2](https://github.com/shenjinti/fgpt/releases/download/v0.1.5/fgpt-mac_aarch64-v0.1.5.tar.gz) executable binary
- [Linux x64](https://github.com/shenjinti/fgpt/releases/download/v0.1.7/fgpt-linux_x64.tar.gz) executable binary
- [Mac M1/M2](https://github.com/shenjinti/fgpt/releases/download/v0.1.7/fgpt-mac_aarch64.tar.gz) executable binary
- Windows (Coming soon)
- Or via [Docker](https://hub.docker.com/r/shenjinti/fgpt)
- Or build from source (see below, cargo is required)

```bash
cargo install fgpt
```
Expand All @@ -35,7 +34,7 @@ fgpt "How to get a domain's MX record on linux shell?"
fgpt -c "Write python code to reverse a string"
# With pipe
git diff | fgpt "Write a git commit bief with follow diff"
cat README.md | fgpt "summarize for reddit post"
# With stdin
fgpt "Convert the follow csv data to json, without any description" < contacts.csv
Expand Down Expand Up @@ -94,6 +93,7 @@ Your local server will now be running and accessible at: `http://127.0.0.1:4090/

```python
import openai
import sys
openai.api_key = 'nothing'
openai.base_url = "http://127.0.0.1:4090/v1/"
Expand All @@ -102,17 +102,27 @@ completion = openai.chat.completions.create(
messages=[
{"role": "user", "content": "Write a javascript simple code"},
],
stream=True,
)
print(completion.choices[0].message.content)
for chunk in completion:
print(chunk.choices[0].delta.content, end='')
sys.stdout.flush()
print()
```
or test with curl:
```bash
curl -X POST -H "Content-Type: application/json" -d '{"model":"gpt-3.5-turbo","messages":[{"role":"user","content":"Write a javascript simple code"}], "stream":true}' http://127.0.0.1:4090/v1/chat/completions
curl -X POST -H "Content-Type: application/json" -d '{"model":"gpt-3.5-turbo",
"messages":[{"role":"user","content":"Write a javascript simple code"}],
"stream":true}' \
http://127.0.0.1:4090/v1/chat/completions
```
```bash
curl -X POST -H "Content-Type: application/json" -d '{"model":"gpt-3.5-turbo","messages":[{"role":"user","content":"Write a javascript simple code"}]}' http://127.0.0.1:4090/v1/chat/completions
curl -X POST -H "Content-Type: application/json" -d '{"model":"gpt-3.5-turbo",
"messages":[{"role":"user","content":"Write a javascript simple code"}]}' \
http://127.0.0.1:4090/v1/chat/completions
```
14 changes: 12 additions & 2 deletions src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,15 @@ use std::borrow::Cow;
use std::io::Write;
use std::io::{IsTerminal, Read};

impl From<ReadlineError> for fgpt::Error {
fn from(e: ReadlineError) -> Self {
match e {
ReadlineError::Eof => fgpt::Error::Io("EOF".to_string()),
_ => fgpt::Error::Io(e.to_string()),
}
}
}

#[derive(Default)]
struct PromptHighlighter {}

Expand All @@ -26,7 +35,7 @@ struct PromptHelper {
highlighter: PromptHighlighter,
}

pub async fn run_repl(state: fgpt::StateRef) -> Result<(), fgpt::Error> {
pub async fn run_repl(state: fgpt::AppStateRef) -> Result<(), fgpt::Error> {
println!("free GPT-3.5 cli tools | 🪐 https://github.com/shenjinti/fgpt");
println!("💖 To star the repository if you like \x1b[1;32mfgpt\x1b[0m!");

Expand Down Expand Up @@ -58,6 +67,7 @@ pub async fn run_repl(state: fgpt::StateRef) -> Result<(), fgpt::Error> {
let line = line.trim();
match line {
"/exit" => break,
"/bye" => break,
"/help" => {
help_texts.iter().for_each(|text| println!("{}", text));
continue;
Expand Down Expand Up @@ -143,7 +153,7 @@ pub async fn run_repl(state: fgpt::StateRef) -> Result<(), fgpt::Error> {
Ok(())
}

pub async fn run(state: fgpt::StateRef) -> Result<(), fgpt::Error> {
pub async fn run(state: fgpt::AppStateRef) -> Result<(), fgpt::Error> {
if state.repl || (state.qusetion.is_none() && state.input_file.is_none()) {
return run_repl(state).await;
}
Expand Down
32 changes: 10 additions & 22 deletions src/fgpt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ use reqwest::{
},
Client, Proxy,
};
use rustyline::error::ReadlineError;
use serde::{ser::SerializeStruct, Deserialize, Serialize, Serializer};
use sha3::Digest;
use std::cell::RefCell;
Expand All @@ -26,7 +25,9 @@ use std::{
const OPENAI_ENDPOINT: &str = "https://chat.openai.com";
const OPENAI_API_URL: &str = "https://chat.openai.com/backend-anon/conversation";
const OPENAI_SENTINEL_URL: &str = "https://chat.openai.com/backend-anon/sentinel/chat-requirements";
const UA: &str = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36";
const UA: &str = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36 Edg/121.0.0.0";
const CH_UA: &str = r#""Not A(Brand";v="99", "Microsoft Edge";v="121", "Chromium";v="121""#;
const CH_PLATFORM: &str = r#""macOS""#;

#[derive(Clone)]
pub struct AppState {
Expand All @@ -46,7 +47,7 @@ pub struct AppState {
pub serve_addr: String,
}

pub type StateRef = Arc<AppState>;
pub type AppStateRef = Arc<AppState>;

#[derive(Debug)]
pub enum Error {
Expand All @@ -73,16 +74,6 @@ impl From<serde_json::Error> for Error {
}
}

#[cfg(feature = "cli")]
impl From<ReadlineError> for Error {
fn from(e: ReadlineError) -> Self {
match e {
ReadlineError::Eof => Error::Io("EOF".to_string()),
_ => Error::Io(e.to_string()),
}
}
}

impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Expand Down Expand Up @@ -162,7 +153,7 @@ pub struct CompletionRequest {

impl CompletionRequest {
pub fn new(
state: StateRef,
state: AppStateRef,
messages: Vec<Message>,
conversation_id: Option<String>,
parent_message_id: Option<String>,
Expand All @@ -188,7 +179,7 @@ impl CompletionRequest {
}
}

pub async fn stream(&self, state: StateRef) -> Result<CompletionStream, Error> {
pub async fn stream(&self, state: AppStateRef) -> Result<CompletionStream, Error> {
let start_at = std::time::Instant::now();
let session = alloc_session(state.clone()).await?;
let builder = build_req(
Expand Down Expand Up @@ -457,7 +448,7 @@ fn build_req(
token: Option<&str>,
seed: Option<&str>,
difficulty: Option<&str>,
state: StateRef,
state: AppStateRef,
) -> Result<reqwest::RequestBuilder, reqwest::Error> {
let client = match state.proxy.as_ref() {
Some(proxy) => match Proxy::all(proxy) {
Expand Down Expand Up @@ -486,12 +477,9 @@ fn build_req(
.header(REFERER, OPENAI_ENDPOINT)
.header(ORIGIN, OPENAI_ENDPOINT)
.header(CONTENT_TYPE, "application/json")
.header(
"sec-ch-ua",
"\"Google Chrome\";v=\"123\", \"Not:A-Brand\";v=\"8\", \"Chromium\";v=\"123\"",
)
.header("sec-ch-ua", CH_UA)
.header("sec-ch-ua-mobile", "?0")
.header("sec-ch-ua-platform", "\"Windows\"")
.header("sec-ch-ua-platform", CH_PLATFORM)
.header("sec-fetch-dest", "empty")
.header("sec-fetch-mode", "cors")
.header("sec-fetch-site", "same-origin")
Expand All @@ -509,7 +497,7 @@ fn build_req(
}
}

pub async fn alloc_session(state: StateRef) -> Result<Session, Error> {
pub async fn alloc_session(state: AppStateRef) -> Result<Session, Error> {
let start_at = SystemTime::now();
let resp = build_req(
OPENAI_SENTINEL_URL,
Expand Down
2 changes: 1 addition & 1 deletion src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ pub async fn main() -> Result<(), crate::fgpt::Error> {
init_log(&args.log_level, false, &args.log_file);
}

let state: fgpt::StateRef = Arc::new(args.into());
let state: fgpt::AppStateRef = Arc::new(args.into());

#[cfg(feature = "proxy")]
if state.serve_addr != "" {
Expand Down
24 changes: 12 additions & 12 deletions src/proxy.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use crate::fgpt::{self, CompletionRequest, StateRef};
use crate::fgpt::{self, AppStateRef, CompletionEvent, CompletionRequest, Message};
use axum::{
extract::State,
response::{sse::Event, IntoResponse, Response, Sse},
Expand All @@ -16,12 +16,12 @@ use std::{

#[derive(Deserialize, Debug, Serialize, Default)]
struct OpenAPIClientRequest {
messages: Vec<crate::fgpt::Message>,
messages: Vec<Message>,
stream: Option<bool>,
}

async fn proxy_completions(
State(state): State<StateRef>,
State(state): State<AppStateRef>,
Json(params): Json<OpenAPIClientRequest>,
) -> Response {
log::info!(
Expand All @@ -43,7 +43,7 @@ async fn proxy_completions(
}

async fn handle_proxy_completions(
State(state): State<StateRef>,
State(state): State<AppStateRef>,
Json(params): Json<OpenAPIClientRequest>,
) -> Result<Response, fgpt::Error> {
let stream_mode = params.stream.unwrap_or(false);
Expand All @@ -58,10 +58,10 @@ async fn handle_proxy_completions(
if !stream_mode {
while let Some(Ok(event)) = stream.next().await {
match event {
crate::fgpt::CompletionEvent::Done => {
CompletionEvent::Done => {
break;
}
crate::fgpt::CompletionEvent::Error(reason) => {
CompletionEvent::Error(reason) => {
return Err(fgpt::Error::Io(reason));
}
_ => {}
Expand Down Expand Up @@ -104,7 +104,7 @@ async fn handle_proxy_completions(
);

log::info!(
"sync exec request_id:{} elapsed:{:.2}s throughput:{} tokens:{:.2}",
"sync exec request_id:{} elapsed:{:.2}s throughput:{:.2} tokens:{}",
stream.request_id,
stream.start_at.elapsed().unwrap().as_secs_f64(),
*stream.completion_tokens.borrow() as f64
Expand All @@ -128,7 +128,7 @@ impl Stream for CompletionToSSEStream {
let poll = stream.poll_next_unpin(cx);
match poll {
Poll::Ready(Some(Ok(event))) => match event {
crate::fgpt::CompletionEvent::Data(data) => {
CompletionEvent::Data(data) => {
let body = json!(
{
"id": stream.request_id,
Expand All @@ -154,7 +154,7 @@ impl Stream for CompletionToSSEStream {
let event = Event::default().data(body.to_string());
Poll::Ready(Some(Ok(event)))
}
crate::fgpt::CompletionEvent::Done => {
CompletionEvent::Done => {
let completion_tokens = *stream.completion_tokens.borrow();
let total_tokens = completion_tokens + stream.prompt_tokens;
log::info!(
Expand All @@ -166,11 +166,11 @@ impl Stream for CompletionToSSEStream {
);
Poll::Ready(None)
}
crate::fgpt::CompletionEvent::Error(reason) => {
CompletionEvent::Error(reason) => {
let body = json!(
{
"id": stream.request_id,
"created": stream.start_at.duration_since(UNIX_EPOCH).unwrap(),
"created": stream.start_at.duration_since(UNIX_EPOCH).unwrap().as_secs_f64(),
"model": "gpt-3.5-turbo",
"object": "chat.completion.chunk",
"choices": [
Expand All @@ -196,7 +196,7 @@ impl Stream for CompletionToSSEStream {
}
}

pub async fn serve(state: StateRef) -> Result<(), crate::fgpt::Error> {
pub async fn serve(state: AppStateRef) -> Result<(), fgpt::Error> {
let app = Router::new()
.nest(
&state.prefix,
Expand Down

0 comments on commit 97658f2

Please sign in to comment.