-
-
Notifications
You must be signed in to change notification settings - Fork 86
/
Copy pathopenai_dart_example.dart
94 lines (83 loc) · 2.71 KB
/
openai_dart_example.dart
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
// ignore_for_file: avoid_print
import 'dart:async';
import 'dart:io';
import 'package:openai_dart/openai_dart.dart';
Future<void> main() async {
final client = OpenAIClient(apiKey: Platform.environment['OPENAI_API_KEY']);
await _chatCompletions(client);
await _completions(client);
await _embeddings(client);
await _fineTuning(client);
await _images(client);
await _models(client);
await _moderations(client);
client.endSession();
}
Future<void> _chatCompletions(final OpenAIClient client) async {
final res = await client.createChatCompletion(
request: const CreateChatCompletionRequest(
model: ChatCompletionModel.modelId('gpt-4'),
messages: [
ChatCompletionMessage.system(
content: 'You are a helpful assistant.',
),
ChatCompletionMessage.user(
content: ChatCompletionUserMessageContent.string('Hello!'),
),
],
temperature: 0,
),
);
print(res.choices.first.message.content);
}
Future<void> _completions(final OpenAIClient client) async {
final res = await client.createCompletion(
request: const CreateCompletionRequest(
model: CompletionModel.modelId('gpt-3.5-turbo-instruct'),
prompt: CompletionPrompt.string('Say this is a test'),
maxTokens: 7,
temperature: 0,
),
);
print(res.choices.first.text);
}
Future<void> _embeddings(final OpenAIClient client) async {
final res = await client.createEmbedding(
request: const CreateEmbeddingRequest(
model: EmbeddingModel.modelId('text-embedding-3-small'),
input: EmbeddingInput.string('The food was delicious and the waiter...'),
),
);
print(res.data.first.embeddingVector);
}
Future<void> _fineTuning(final OpenAIClient client) async {
final res = await client.listPaginatedFineTuningJobs();
print(res.data.first.id);
}
Future<void> _images(final OpenAIClient client) async {
final res = await client.createImage(
request: const CreateImageRequest(
model: CreateImageRequestModel.model(ImageModels.dallE3),
prompt: 'A cute baby sea otter',
quality: ImageQuality.hd,
size: ImageSize.v1024x1792,
style: ImageStyle.natural,
),
);
print(res.data.first.url);
}
Future<void> _models(final OpenAIClient client) async {
final res1 = await client.listModels();
print(res1.data.first.id);
final res2 = await client.retrieveModel(model: 'gpt-4');
print(res2.ownedBy);
}
Future<void> _moderations(final OpenAIClient client) async {
final res = await client.createModeration(
request: const CreateModerationRequest(
input: ModerationInput.string('I want to kill them.'),
),
);
print(res.results.first.categories.violence);
print(res.results.first.categoryScores.violence);
}