Skip to content

Commit dcc15df

Browse files
committed
chore(release): v0.2.9
1 parent 3110641 commit dcc15df

8 files changed

Lines changed: 186 additions & 35 deletions

File tree

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,3 +39,6 @@ node_modules/
3939
npm-debug.log*
4040
yarn-error.log*
4141
pnpm-debug.log*
42+
43+
# Local contributor guide
44+
AGENTS.md

Cargo.lock

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

RELEASING.md

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
# Releasing Good Commit
2+
3+
Use this checklist to ship a new version with working Homebrew + npm updates.
4+
5+
## 1) Bump versions
6+
7+
- `crates/core/Cargo.toml`
8+
- `crates/cli/Cargo.toml`
9+
- `npm/package.json`
10+
- `homebrew/goodcommit.rb` (version + URLs; checksums are filled by CI)
11+
12+
## 2) Commit and tag
13+
14+
```bash
15+
git commit -am "chore(release): vX.Y.Z"
16+
git tag -a vX.Y.Z -m "vX.Y.Z"
17+
git push origin main vX.Y.Z
18+
```
19+
20+
## 3) CI publishes artifacts
21+
22+
- `release` workflow builds binaries and creates the GitHub release.
23+
- `publish-npm` runs after `release` and publishes `npm/package.json` (must match tag).
24+
- `publish-brew` runs on the GitHub release and updates the tap with checksums.
25+
26+
If a workflow didn't run, trigger it manually:
27+
28+
- `release`: push the tag again or re-run in GitHub Actions.
29+
- `publish-npm`: run workflow with the tag.
30+
- `publish-brew`: run workflow with the tag.
31+
32+
## 4) Verify installs
33+
34+
```bash
35+
brew upgrade goodcommit
36+
goodcommit --version
37+
npm view goodcommit version
38+
```

crates/cli/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "goodcommit"
3-
version = "0.2.7"
3+
version = "0.2.9"
44
edition = "2021"
55
license = "MIT"
66
description = "Good Commit: fast, reliable AI commit messages"

crates/core/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "goodcommit-core"
3-
version = "0.2.7"
3+
version = "0.2.9"
44
edition = "2021"
55
license = "MIT"
66
description = "Core library for Good Commit"

crates/core/src/providers/openai.rs

Lines changed: 136 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -98,21 +98,18 @@ impl OpenAiProvider {
9898
user_prompt: &str,
9999
request: ProviderRequest,
100100
) -> CoreResult<String> {
101-
let base = self.responses_base_payload(system_prompt, user_prompt, request.temperature);
101+
let base = self.responses_base_payload(system_prompt, user_prompt, Some(request.temperature));
102102

103103
match self
104-
.complete_responses_with_param(&base, "max_output_tokens", request.max_output_tokens)
104+
.complete_responses_with_fallbacks(&base, request.max_output_tokens)
105105
.await
106106
{
107107
Ok(message) => Ok(message),
108108
Err(err) => {
109-
if is_unsupported_param(&err, "max_output_tokens") {
109+
if is_unsupported_param(&err, "temperature") {
110+
let base = self.responses_base_payload(system_prompt, user_prompt, None);
110111
return self
111-
.complete_responses_with_param(
112-
&base,
113-
"max_completion_tokens",
114-
request.max_output_tokens,
115-
)
112+
.complete_responses_with_fallbacks(&base, request.max_output_tokens)
116113
.await;
117114
}
118115
Err(err)
@@ -126,33 +123,51 @@ impl OpenAiProvider {
126123
user_prompt: &str,
127124
request: ProviderRequest,
128125
) -> CoreResult<String> {
129-
let body = serde_json::json!({
130-
"model": self.model,
131-
"messages": [
132-
{ "role": "system", "content": system_prompt },
133-
{ "role": "user", "content": user_prompt }
134-
],
135-
"max_tokens": request.max_output_tokens,
136-
"temperature": request.temperature
137-
});
126+
let body = self.chat_payload(
127+
system_prompt,
128+
user_prompt,
129+
request.max_output_tokens,
130+
Some(request.temperature),
131+
);
138132

139-
let request = self
133+
let http_request = self
140134
.client
141135
.post(self.chat_url())
142136
.bearer_auth(&self.api_key)
143137
.json(&body);
144138

145-
let json = self.send_with_retries(request).await?;
139+
let json = match self.send_with_retries(http_request).await {
140+
Ok(json) => json,
141+
Err(err) => {
142+
if is_unsupported_param(&err, "temperature") {
143+
let body = self.chat_payload(
144+
system_prompt,
145+
user_prompt,
146+
request.max_output_tokens,
147+
None,
148+
);
149+
let http_request = self
150+
.client
151+
.post(self.chat_url())
152+
.bearer_auth(&self.api_key)
153+
.json(&body);
154+
let json = self.send_with_retries(http_request).await?;
155+
return parse_chat_output(&json);
156+
}
157+
return Err(err);
158+
}
159+
};
160+
146161
parse_chat_output(&json)
147162
}
148163

149164
fn responses_base_payload(
150165
&self,
151166
system_prompt: &str,
152167
user_prompt: &str,
153-
temperature: f32,
168+
temperature: Option<f32>,
154169
) -> Value {
155-
serde_json::json!({
170+
let mut payload = serde_json::json!({
156171
"model": self.model,
157172
"input": [
158173
{
@@ -163,9 +178,62 @@ impl OpenAiProvider {
163178
"role": "user",
164179
"content": [{ "type": "input_text", "text": user_prompt }]
165180
}
181+
]
182+
});
183+
184+
if let Some(obj) = payload.as_object_mut() {
185+
if let Some(value) = temperature {
186+
obj.insert("temperature".to_string(), serde_json::json!(value));
187+
}
188+
}
189+
190+
payload
191+
}
192+
193+
fn chat_payload(
194+
&self,
195+
system_prompt: &str,
196+
user_prompt: &str,
197+
max_tokens: u32,
198+
temperature: Option<f32>,
199+
) -> Value {
200+
let mut payload = serde_json::json!({
201+
"model": self.model,
202+
"messages": [
203+
{ "role": "system", "content": system_prompt },
204+
{ "role": "user", "content": user_prompt }
166205
],
167-
"temperature": temperature
168-
})
206+
"max_tokens": max_tokens
207+
});
208+
209+
if let Some(obj) = payload.as_object_mut() {
210+
if let Some(value) = temperature {
211+
obj.insert("temperature".to_string(), serde_json::json!(value));
212+
}
213+
}
214+
215+
payload
216+
}
217+
218+
async fn complete_responses_with_fallbacks(
219+
&self,
220+
base: &Value,
221+
max_tokens: u32,
222+
) -> CoreResult<String> {
223+
match self
224+
.complete_responses_with_param(base, "max_output_tokens", max_tokens)
225+
.await
226+
{
227+
Ok(message) => Ok(message),
228+
Err(err) => {
229+
if is_unsupported_param(&err, "max_output_tokens") {
230+
return self
231+
.complete_responses_with_param(base, "max_completion_tokens", max_tokens)
232+
.await;
233+
}
234+
Err(err)
235+
}
236+
}
169237
}
170238

171239
async fn complete_responses_with_param(
@@ -274,7 +342,7 @@ mod tests {
274342
)
275343
.expect("provider");
276344

277-
let payload = provider.responses_base_payload("system", "user", 0.2);
345+
let payload = provider.responses_base_payload("system", "user", Some(0.2));
278346
let input = payload
279347
.get("input")
280348
.and_then(|value| value.as_array())
@@ -306,6 +374,46 @@ mod tests {
306374
Some("user")
307375
);
308376
}
377+
378+
#[test]
379+
fn responses_payload_omits_temperature_when_none() {
380+
let provider = OpenAiProvider::new(
381+
"gpt-5-nano-2025-08-07".to_string(),
382+
"https://api.openai.com/v1".to_string(),
383+
OpenAiMode::Responses,
384+
5,
385+
Some("test-key".to_string()),
386+
)
387+
.expect("provider");
388+
389+
let payload = provider.responses_base_payload("system", "user", None);
390+
assert!(payload.get("temperature").is_none());
391+
}
392+
393+
#[test]
394+
fn chat_payload_omits_temperature_when_none() {
395+
let provider = OpenAiProvider::new(
396+
"gpt-5-nano-2025-08-07".to_string(),
397+
"https://api.openai.com/v1".to_string(),
398+
OpenAiMode::Chat,
399+
5,
400+
Some("test-key".to_string()),
401+
)
402+
.expect("provider");
403+
404+
let payload = provider.chat_payload("system", "user", 100, None);
405+
assert!(payload.get("temperature").is_none());
406+
}
407+
408+
#[test]
409+
fn unsupported_param_matches_openai_message() {
410+
let err = CoreError::Provider(
411+
"openai error 400 Bad Request: {\"error\": {\"message\": \"Unsupported parameter: 'temperature' is not supported with this model.\", \"type\": \"invalid_request_error\", \"param\": \"temperature\", \"code\": null}}"
412+
.to_string(),
413+
);
414+
415+
assert!(is_unsupported_param(&err, "temperature"));
416+
}
309417
}
310418

311419
fn should_retry(status: StatusCode) -> bool {
@@ -315,6 +423,8 @@ fn should_retry(status: StatusCode) -> bool {
315423
}
316424

317425
fn is_unsupported_param(err: &CoreError, param: &str) -> bool {
318-
let message = err.to_string();
319-
message.contains("unsupported_parameter") && message.contains(param)
426+
let message = err.to_string().to_lowercase();
427+
let param = param.to_lowercase();
428+
(message.contains("unsupported_parameter") || message.contains("unsupported parameter"))
429+
&& message.contains(&param)
320430
}

homebrew/goodcommit.rb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
class Goodcommit < Formula
22
desc "Good Commit: fast, reliable AI commit messages"
33
homepage "https://github.com/Bikz/goodcommit"
4-
version "0.2.6"
4+
version "0.2.9"
55

66
on_macos do
77
if Hardware::CPU.arm?
8-
url "https://github.com/Bikz/goodcommit/releases/download/v0.2.6/goodcommit-aarch64-apple-darwin.tar.gz"
8+
url "https://github.com/Bikz/goodcommit/releases/download/v0.2.9/goodcommit-aarch64-apple-darwin.tar.gz"
99
sha256 "REPLACE_ME"
1010
else
11-
url "https://github.com/Bikz/goodcommit/releases/download/v0.2.6/goodcommit-x86_64-apple-darwin.tar.gz"
11+
url "https://github.com/Bikz/goodcommit/releases/download/v0.2.9/goodcommit-x86_64-apple-darwin.tar.gz"
1212
sha256 "REPLACE_ME"
1313
end
1414
end
@@ -18,7 +18,7 @@ class Goodcommit < Formula
1818
odie "linux arm64 builds are not yet available"
1919
end
2020

21-
url "https://github.com/Bikz/goodcommit/releases/download/v0.2.6/goodcommit-x86_64-unknown-linux-gnu.tar.gz"
21+
url "https://github.com/Bikz/goodcommit/releases/download/v0.2.9/goodcommit-x86_64-unknown-linux-gnu.tar.gz"
2222
sha256 "REPLACE_ME"
2323
end
2424

npm/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "goodcommit",
3-
"version": "0.2.7",
3+
"version": "0.2.9",
44
"description": "Good Commit: fast, reliable AI commit messages",
55
"license": "MIT",
66
"repository": {

0 commit comments

Comments
 (0)