Skip to content

Commit 61a523d

Browse files
committed
fixes #442 - add auto-compose support for custom prompt and max token limit
1 parent 7cf6dee commit 61a523d

File tree

4 files changed

+24
-9
lines changed

4 files changed

+24
-9
lines changed

doc/AUTOCOMPOSE.md

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,10 @@ Configuring Custom Service / Model
5353
----------------------------------
5454
Edit `ui.conf` to match the desired compose path and usage.
5555

56+
Determine the path of `compose` based on nchat install path:
57+
58+
realpath $(dirname $(which nchat))/../libexec/nchat/compose
59+
5660
Example usage with Google Gemini and custom model:
5761

5862
auto_compose_command=/usr/local/libexec/nchat/compose -s gemini -m gemini-2.0-flash -c '%1'
@@ -61,8 +65,7 @@ Example usage with OpenAI and custom model and longer timeout of 60 secs:
6165

6266
auto_compose_command=/usr/local/libexec/nchat/compose -s openai -m gpt-5-nano -T 60 -c '%1'
6367

68+
Example usage with custom prompt and max token limit of 100:
6469

65-
Generally one can determine the path of compose based on nchat install path:
66-
67-
realpath $(dirname $(which nchat))/../libexec/nchat/compose
70+
auto_compose_command=/usr/local/libexec/nchat/compose -p "Suggest {your_name}'s next reply in a joking manner." -M 100 -c '%1'
6871

lib/common/src/version.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,4 +7,4 @@
77

88
#pragma once
99

10-
#define NCHAT_VERSION "5.12.12"
10+
#define NCHAT_VERSION "5.12.13"

src/compose

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -66,21 +66,29 @@ def parse_chat(lines: list[str]) -> tuple[str, list[dict]]:
6666

6767
return your_name, messages
6868

69-
def build_payload(model: str, your_name: str, chat_messages: list[dict], temperature: float|None) -> dict:
69+
def build_payload(model: str, your_name: str, chat_messages: list[dict], temperature: float|None, prompt: str | None, max_tokens: int | None) -> dict:
70+
prompt = prompt if prompt else "Suggest {your_name}'s next reply."
71+
prompt = prompt.replace("{your_name}", your_name)
72+
7073
system_msg = {
7174
"role": "system",
72-
"content": f"You are {your_name} in a chat. Suggest {your_name}'s next reply."
75+
"content": f"You are {your_name} in a chat. " + prompt
7376
}
7477
final_instruction = {
7578
"role": "user",
76-
"content": f"Suggest {your_name}'s next reply."
79+
"content": prompt
7780
}
7881
payload = {
7982
"model": model,
8083
"messages": [system_msg] + chat_messages + [final_instruction]
8184
}
85+
8286
if temperature is not None:
8387
payload["temperature"] = temperature
88+
89+
if max_tokens is not None:
90+
payload["max_tokens"] = max_tokens
91+
8492
return payload
8593

8694
def send_request(payload: dict, api_url: str, api_key: str, verbose: bool, timeout: int) -> str:
@@ -136,6 +144,10 @@ def main():
136144
help="Service: openai (default), gemini, or host[:port]/URL for OpenAI-compatible server.")
137145
parser.add_argument("-m", "--model", default=None,
138146
help="Model name. Defaults depend on --service (openai: gpt-4o-mini, gemini: gemini-2.0-flash).")
147+
parser.add_argument("-M", "--max-tokens", type=int,
148+
help="Maximum number of output tokens to generate.")
149+
parser.add_argument("-p", "--prompt", default=None,
150+
help="Instruction prompt (default: \"Suggest {your_name}'s next reply.\")")
139151
parser.add_argument("-t", "--temperature", type=float,
140152
help="Sampling temperature (e.g., 0.2).")
141153
parser.add_argument("-v", "--verbose", action="store_true",
@@ -169,7 +181,7 @@ def main():
169181

170182
lines = read_chat_file(args.chat_completion)
171183
your_name, chat_messages = parse_chat(lines)
172-
payload = build_payload(model, your_name, chat_messages, args.temperature)
184+
payload = build_payload(model, your_name, chat_messages, args.temperature, args.prompt, args.max_tokens)
173185
reply = send_request(payload, api_url, api_key, verbose=args.verbose, timeout=args.timeout)
174186
reply_without_name = reply.split(":", 1)[1].strip() if ":" in reply else reply
175187
print(reply_without_name)

src/nchat.1

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
2-
.TH NCHAT "1" "November 2025" "nchat 5.12.12" "User Commands"
2+
.TH NCHAT "1" "November 2025" "nchat 5.12.13" "User Commands"
33
.SH NAME
44
nchat \- ncurses chat
55
.SH SYNOPSIS

0 commit comments

Comments
 (0)