1
- use std:: time:: Duration ;
2
1
use std:: io;
3
2
4
3
use async_openai:: types:: {
5
- AssistantObject , AssistantTools , AssistantToolsCode , CreateAssistantRequestArgs , CreateMessageRequestArgs , CreateRunRequestArgs , CreateThreadRequestArgs , MessageContent , RunStatus
4
+ ChatCompletionRequestSystemMessageArgs , ChatCompletionRequestUserMessageArgs , CreateChatCompletionRequestArgs
6
5
} ;
7
6
use async_openai:: config:: OpenAIConfig ;
8
7
use async_openai:: error:: OpenAIError ;
9
- use indicatif:: ProgressBar ;
10
8
use async_openai:: Client ;
11
- use tokio:: time:: sleep;
12
- use git2:: Repository ;
13
9
use thiserror:: Error ;
14
10
use anyhow:: Context ;
15
11
@@ -36,249 +32,57 @@ pub enum ChatError {
36
32
}
37
33
38
34
fn instruction ( ) -> String {
39
- include_str ! ( "../resources/prompt.md" ) . to_string ( )
40
- }
41
-
42
- #[ derive( Debug , Clone , PartialEq ) ]
43
- pub struct Session {
44
- pub thread_id : String ,
45
- pub assistant_id : String
46
- }
35
+ format ! ( "You are an AI assistant that generates concise and meaningful git commit messages based on provided diffs. Please adhere to the following guidelines:
47
36
48
- impl Session {
49
- pub async fn new_from_client ( client : & Client < OpenAIConfig > ) -> Result < Self , ChatError > {
50
- log :: debug! ( "Creating new session from client" ) ;
51
- let assistant = create_assistant ( client ) . await ? ;
52
- let thread_request = CreateThreadRequestArgs :: default ( ) . build ( ) ? ;
53
- let thread = client . threads ( ) . create ( thread_request ) . await ? ;
37
+ - Structure: Begin with a clear, present-tense summary.
38
+ - Content: Emphasize the changes and their rationale, excluding irrelevant details.
39
+ - Consistency: Maintain uniformity in tense, punctuation, and capitalization.
40
+ - Accuracy: Ensure the message accurately reflects the changes and their purpose.
41
+ - Present tense, imperative mood. (e.g., 'Add x to y' instead of 'Added x to y')
42
+ - Max {} chars in the output
54
43
55
- Ok ( Session {
56
- thread_id : thread. id ,
57
- assistant_id : assistant. id
58
- } )
59
- }
44
+ ## Output:
60
45
61
- // Load the session from the repository
62
- pub async fn load_from_repo ( repo : & Repository ) -> anyhow:: Result < Option < Self > > {
63
- log:: debug!( "Loading session from repo" ) ;
64
- let mut config = repo. config ( ) . context ( "Failed to load config" ) ?;
65
- let thread_id = config. get_string ( "ai.thread-id" ) . ok ( ) ;
46
+ Your output should be a commit message generated from the input diff and nothing else.
66
47
67
- let global_config = config
68
- . open_global ( )
69
- . context ( "Failed to open global config" ) ?;
70
- let assistant_id = global_config. get_string ( "ai.assistant-id" ) . ok ( ) ;
71
- log:: debug!( "Loaded session from repo: thread_id: {:?}, assistant_id: {:?}" , thread_id, assistant_id) ;
48
+ ## Input:
72
49
73
- match ( thread_id, assistant_id) {
74
- ( Some ( thread_id) , Some ( assistant_id) ) => {
75
- Ok ( Some ( Session {
76
- thread_id,
77
- assistant_id
78
- } ) )
79
- }
80
- _ => Ok ( None )
81
- }
82
- }
83
-
84
- // Save the session to the repository
85
- pub async fn save_to_repo ( & self , repo : & Repository ) -> anyhow:: Result < ( ) > {
86
- log:: debug!( "Saving session to repo" ) ;
87
- let mut config = repo. config ( ) . context ( "Failed to load config" ) ?;
88
- config. set_str ( "ai.thread-id" , self . thread_id . as_str ( ) ) ?;
89
- config. snapshot ( ) . context ( "Failed to save config" ) ?;
90
-
91
- let mut global_config = config
92
- . open_global ( )
93
- . context ( "Failed to open global config" ) ?;
94
- global_config. set_str ( "ai.assistant-id" , self . assistant_id . as_str ( ) ) ?;
95
- global_config
96
- . snapshot ( )
97
- . context ( "Failed to save global config" ) ?;
98
- Ok ( ( ) )
99
- }
50
+ INPUT:" , config:: APP . max_commit_length)
100
51
}
101
52
102
53
#[ derive( Debug , Clone , PartialEq ) ]
103
54
pub struct OpenAIResponse {
104
- pub session : Session ,
105
55
pub response : String
106
56
}
107
57
108
- // Create a new assistant
109
- async fn create_assistant ( client : & Client < OpenAIConfig > ) -> Result < AssistantObject , ChatError > {
110
- let model = config:: APP . model . clone ( ) ;
111
- let instruction = instruction ( ) ;
112
- // let example_jsonl_id = "file-a8ghhy1FbWtBKEadAj5OHJWz";
113
-
114
- let tools = vec ! [ AssistantTools :: Code ( AssistantToolsCode {
115
- r#type: "code_interpreter" . to_string( )
116
- } ) ] ;
117
-
118
- let assistant_request = CreateAssistantRequestArgs :: default ( )
119
- . name ( "Git Commit Assistant" )
120
- . instructions ( & instruction)
121
- . tools ( tools)
122
- . model ( model)
123
- . build ( ) ?;
124
-
125
- Ok ( client. assistants ( ) . create ( assistant_request) . await ?)
126
- }
127
-
128
- #[ derive( Debug , Clone ) ]
129
- struct Connection {
130
- client : Client < OpenAIConfig > ,
131
- session : Session
132
- }
133
-
134
- impl Connection {
135
- pub async fn new ( session : Option < Session > ) -> Result < Self , ChatError > {
136
- let api_key = config:: APP
137
- . openai_api_key
138
- . clone ( )
139
- . context ( "Failed to get OpenAI API key, please run `git-ai config set openai-api" ) ?;
140
- let config = OpenAIConfig :: new ( ) . with_api_key ( api_key) ;
141
- let client = Client :: with_config ( config) ;
142
-
143
- let session = match session {
144
- Some ( session) => session,
145
- None => Session :: new_from_client ( & client) . await ?
146
- } ;
147
-
148
- Ok ( Connection {
149
- client,
150
- session
151
- } )
152
- }
153
-
154
- // Create a new run
155
- async fn create_run ( & self ) -> Result < Run , ChatError > {
156
- let request = CreateRunRequestArgs :: default ( )
157
- . assistant_id ( self . session . clone ( ) . assistant_id )
158
- . build ( ) ?;
159
- let run = self
160
- . client
161
- . threads ( )
162
- . runs ( & self . session . thread_id )
163
- . create ( request)
164
- . await ?;
165
- Ok ( Run {
166
- id : run. id ,
167
- connection : self . clone ( )
168
- } )
169
- }
170
-
171
- // Get the last message from the thread
172
- async fn last_message ( & self ) -> Result < String , ChatError > {
173
- let query = [ ( "limit" , "1" ) ] ;
174
- let response = self
175
- . client
176
- . threads ( )
177
- . messages ( & self . session . thread_id )
178
- . list ( & query)
179
- . await ?;
180
- let message_id = response. data . get ( 0 ) . unwrap ( ) . id . clone ( ) ;
181
- let message = self
182
- . client
183
- . threads ( )
184
- . messages ( & self . session . thread_id )
185
- . retrieve ( & message_id)
186
- . await ?;
187
- let content = message. content . get ( 0 ) . unwrap ( ) ;
188
- let MessageContent :: Text ( text) = & content else {
189
- return Err ( ChatError :: OpenAIError ( "Message content is not text" . to_string ( ) ) ) ;
190
- } ;
191
-
192
- Ok ( text. text . value . clone ( ) )
193
- }
194
-
195
- async fn create_message ( & self , message : & str ) -> Result < ( ) , ChatError > {
196
- let message = CreateMessageRequestArgs :: default ( )
197
- . role ( "user" )
198
- . content ( message)
199
- . build ( ) ?;
200
- self
201
- . client
202
- . threads ( )
203
- . messages ( & self . session . thread_id )
204
- . create ( message)
205
- . await ?;
206
- Ok ( ( ) )
207
- }
208
-
209
- async fn into_response ( & self ) -> Result < OpenAIResponse , ChatError > {
210
- let message = self . last_message ( ) . await ?;
211
- let response = OpenAIResponse {
212
- response : message,
213
- session : self . session . clone ( )
214
- } ;
215
- Ok ( response)
216
- }
217
- }
218
-
219
- #[ derive( Debug , Clone ) ]
220
- struct Run {
221
- id : String ,
222
- connection : Connection
223
- }
224
-
225
- impl Run {
226
- pub async fn pull_status ( & self ) -> Result < RunStatus , ChatError > {
227
- Ok (
228
- self
229
- . connection
230
- . client
231
- . threads ( )
232
- . runs ( & self . connection . session . thread_id )
233
- . retrieve ( self . id . as_str ( ) )
234
- . await ?
235
- . status
236
- )
237
- }
238
- }
239
-
240
- pub async fn generate (
241
- diff : String , session : Option < Session > , progressbar : Option < ProgressBar >
242
- ) -> Result < OpenAIResponse , ChatError > {
243
- progressbar
58
+ pub async fn generate ( diff : String ) -> Result < OpenAIResponse , ChatError > {
59
+ let api_key = config:: APP
60
+ . openai_api_key
244
61
. clone ( )
245
- . map ( |pb| pb. set_message ( "Generating commit message..." ) ) ;
246
-
247
- let connection = Connection :: new ( session) . await ?;
248
- connection. create_message ( & diff) . await ?;
249
- let run = connection. create_run ( ) . await ?;
62
+ . context ( "Failed to get OpenAI API key, please run `git-ai config set openai-api" ) ?;
63
+
64
+ let config = OpenAIConfig :: new ( ) . with_api_key ( api_key) ;
65
+ let client = Client :: with_config ( config) ;
66
+ let request = CreateChatCompletionRequestArgs :: default ( )
67
+ . max_tokens ( config:: APP . max_tokens as u16 )
68
+ . model ( config:: APP . model . clone ( ) )
69
+ . messages ( [
70
+ ChatCompletionRequestSystemMessageArgs :: default ( )
71
+ . content ( instruction ( ) )
72
+ . build ( ) ?
73
+ . into ( ) ,
74
+ ChatCompletionRequestUserMessageArgs :: default ( )
75
+ . content ( diff)
76
+ . build ( ) ?
77
+ . into ( )
78
+ ] )
79
+ . build ( ) ?;
250
80
251
- return loop {
252
- match run. pull_status ( ) . await ? {
253
- RunStatus :: Completed => {
254
- break connection. into_response ( ) . await ;
255
- }
256
- RunStatus :: Failed => {
257
- break Err ( ChatError :: OpenAIError ( "Run failed" . to_string ( ) ) ) ;
258
- }
259
- RunStatus :: Cancelled => {
260
- break Err ( ChatError :: OpenAIError ( "Run cancelled" . to_string ( ) ) ) ;
261
- }
262
- RunStatus :: Expired => {
263
- break Err ( ChatError :: OpenAIError ( "Run expired" . to_string ( ) ) ) ;
264
- }
265
- RunStatus :: RequiresAction => {
266
- break Err ( ChatError :: OpenAIError ( "Run requires action" . to_string ( ) ) ) ;
267
- }
268
- RunStatus :: InProgress => {
269
- log:: debug!( "Run is in progress" ) ;
270
- // progressbar.clone().map(|pb| pb.set_message("In progress..."));
271
- }
272
- RunStatus :: Queued => {
273
- log:: debug!( "Run is queued" ) ;
274
- // progressbar.clone().map(|pb| pb.set_message("Queued..."));
275
- }
276
- RunStatus :: Cancelling => {
277
- log:: debug!( "Run is cancelling" ) ;
278
- // progressbar.clone().map(|pb| pb.set_message("Cancelling..."));
279
- }
280
- }
81
+ let response = client. chat ( ) . create ( request) . await ?;
82
+ let choise = response. choices . get ( 0 ) . unwrap ( ) ;
83
+ let text = choise. message . content . clone ( ) ;
281
84
282
- sleep ( Duration :: from_millis ( 300 ) ) . await ;
283
- } ;
85
+ Ok ( OpenAIResponse {
86
+ response : text. unwrap ( )
87
+ } )
284
88
}
0 commit comments