#!/usr/bin/env python3

# Copyright (c) 2025 Kristofer Berggren
# All rights reserved.
#
# nchat is distributed under the MIT license, see LICENSE for details.

import os
import sys
import json
import argparse
import urllib.request
import urllib.error

def get_api_key(api_key_env: str|None) -> str|None:
    """Return API key from `api_key_env`. If None, return None (no auth)."""
    if api_key_env is None:
        return None
    api_key = os.getenv(api_key_env)
    if not api_key:
        print(f"Error: Please set the {api_key_env} environment variable.", file=sys.stderr)
        sys.exit(1)
    return api_key

def read_chat_file(path: str) -> list[str]:
    try:
        with open(path, "r", encoding="utf-8") as f:
            return [line.rstrip("\n") for line in f]
    except FileNotFoundError:
        print(f"Error: File '{path}' not found.", file=sys.stderr)
        sys.exit(1)

def parse_chat(lines: list[str]) -> tuple[str, list[dict]]:
    # Drop trailing empties
    while lines and not lines[-1].strip():
        lines.pop()

    if not lines or ":" not in lines[-1]:
        print("Error: Last line must be 'YourName:'", file=sys.stderr)
        sys.exit(1)

    last_sender, last_rest = lines[-1].split(":", 1)
    if last_rest.strip():
        print("Error: Last line must end with a colon and no message (e.g., 'Stanley:')", file=sys.stderr)
        sys.exit(1)
    your_name = last_sender.strip()
    if not your_name:
        print("Error: Empty name on last line.", file=sys.stderr)
        sys.exit(1)

    messages = []
    for ln in lines[:-1]:
        if not ln.strip():
            continue
        if ":" not in ln:
            continue
        sender, content = ln.split(":", 1)
        sender = sender.strip()
        content = content.lstrip()
        role = "assistant" if sender == your_name else "user"
        messages.append({"role": role, "content": f"{sender}: {content}"})

    if not messages:
        print("Error: No valid 'Name: message' lines before the final 'YourName:' marker.", file=sys.stderr)
        sys.exit(1)

    return your_name, messages

def build_payload(model: str, your_name: str, chat_messages: list[dict], temperature: float|None) -> dict:
    system_msg = {
        "role": "system",
        "content": f"You are {your_name} in a chat. Suggest {your_name}'s next reply."
    }
    final_instruction = {
        "role": "user",
        "content": f"Suggest {your_name}'s next reply."
    }
    payload = {
        "model": model,
        "messages": [system_msg] + chat_messages + [final_instruction]
    }
    if temperature is not None:
        payload["temperature"] = temperature
    return payload

def send_request(payload: dict, api_url: str, api_key: str, verbose: bool, timeout: int) -> str:
    headers = {
        "Content-Type": "application/json",
        "Authorization": f"Bearer {api_key}",
    }
    if verbose:
        print("=== Request ===", file=sys.stderr)
        print(json.dumps(payload, ensure_ascii=False, indent=2), file=sys.stderr)

    req = urllib.request.Request(
        api_url,
        data=json.dumps(payload).encode("utf-8"),
        headers=headers
    )

    try:
        with urllib.request.urlopen(req, timeout=timeout) as resp:
            body = resp.read().decode("utf-8")
            result = json.loads(body)
    except urllib.error.HTTPError as e:
        try:
            err_json = json.loads(e.read().decode("utf-8"))
            msg = err_json.get("error", {}).get("message") or str(err_json)
        except Exception:
            msg = f"{e.code} {e.reason}"
        print(f"HTTP Error: {msg}", file=sys.stderr)
        sys.exit(1)
    except urllib.error.URLError as e:
        print(f"URL Error: {e.reason}", file=sys.stderr)
        sys.exit(1)
    except json.JSONDecodeError:
        print("Error: Response was not valid JSON.", file=sys.stderr)
        sys.exit(1)

    try:
        return result["choices"][0]["message"]["content"]
    except (KeyError, IndexError):
        if verbose:
            print("=== Raw Response ===", file=sys.stderr)
            print(json.dumps(result, ensure_ascii=False, indent=2), file=sys.stderr)
        print("Error: Unexpected response structure.", file=sys.stderr)
        sys.exit(1)

def main():
    parser = argparse.ArgumentParser(
        description="Suggest your next reply from a simple chat log using OpenAI/Gemini or an OpenAI-compatible server."
    )
    parser.add_argument("-c", "--chat-completion", required=True,
                        help="Path to input chat file. Last line must be 'YourName:'.")
    parser.add_argument("-s", "--service", default="openai",
                        help="Service: openai (default), gemini, or host[:port]/URL for OpenAI-compatible server.")
    parser.add_argument("-m", "--model", default=None,
                        help="Model name. Defaults depend on --service (openai: gpt-4o-mini, gemini: gemini-2.0-flash).")
    parser.add_argument("-t", "--temperature", type=float,
                        help="Sampling temperature (e.g., 0.2).")
    parser.add_argument("-v", "--verbose", action="store_true",
                        help="Print request payload to stderr.")
    parser.add_argument("-T", "--timeout", type=int, default=10,
                        help="Network timeout in seconds (default: 10).")
    args = parser.parse_args()

    if args.timeout <= 0:
        print("Error: -T / --timeout must be > 0 seconds.", file=sys.stderr)
        sys.exit(1)

    # Resolve service -> api_url, api_key_env, and default model
    service = args.service.strip()
    if service == "openai":
        api_url = "https://api.openai.com/v1/chat/completions"
        api_key_env = "OPENAI_API_KEY"
        service_default_model = "gpt-4o-mini"
    elif service == "gemini":
        api_url = "https://generativelanguage.googleapis.com/v1beta/openai/chat/completions"
        api_key_env = "GEMINI_API_KEY"
        service_default_model = "gemini-2.0-flash"
    else:
        base = service if service.startswith(("http://", "https://")) else f"http://{service}"
        api_url = base.rstrip("/") + "/v1/chat/completions"
        api_key_env = None  # no auth for custom hosts
        service_default_model = "gpt-4o-mini"

    api_key = get_api_key(api_key_env)
    model = args.model if args.model else service_default_model

    lines = read_chat_file(args.chat_completion)
    your_name, chat_messages = parse_chat(lines)
    payload = build_payload(model, your_name, chat_messages, args.temperature)
    reply = send_request(payload, api_url, api_key, verbose=args.verbose, timeout=args.timeout)
    reply_without_name = reply.split(":", 1)[1].strip() if ":" in reply else reply
    print(reply_without_name)

if __name__ == "__main__":
    main()
