From 3f6d3cacec26d36a6591c3950d9ca9a32dd052b5 Mon Sep 17 00:00:00 2001 From: Valent Turkovic Date: Sun, 29 Mar 2026 20:54:47 +0200 Subject: [PATCH] Add OpenRouter support (--tool openrouter) Adds OpenRouter as a third tool option alongside amp and claude. This enables running Ralph with cloud LLMs via OpenRouter API, no local AI tools needed. Usage: OPENROUTER_API_KEY=sk-... ./ralph.sh --tool openrouter 10 OPENROUTER_MODEL=qwen/qwen3-235b-a22b:free ./ralph.sh --tool openrouter New files: - openrouter_call.py: OpenRouter API caller (reads prompt.md, calls API) Supports any OpenRouter model including free tier models like nvidia/llama-3.1-nemotron-ultra-253b:free. Co-Authored-By: Claude --- openrouter_call.py | 103 +++++++++++++++++++++++++++++++++++++++++++++ ralph.sh | 11 +++-- 2 files changed, 111 insertions(+), 3 deletions(-) create mode 100644 openrouter_call.py diff --git a/openrouter_call.py b/openrouter_call.py new file mode 100644 index 000000000..04277aea2 --- /dev/null +++ b/openrouter_call.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python3 +""" +OpenRouter API caller for Ralph loop. + +Sends the prompt to an OpenRouter model and streams the response. +Requires OPENROUTER_API_KEY environment variable or .env file. + +Usage: + python openrouter_call.py [--model MODEL] [--temperature T] + echo "prompt" | python openrouter_call.py - [--model MODEL] +""" +import io +import json +import os +import sys +import urllib.request + +sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace') +sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace') + + +def load_api_key(): + """Load OpenRouter API key from environment or .env file.""" + key = os.environ.get('OPENROUTER_API_KEY', '') + if key: + return key + # Try .env file + for p in ['.env', os.path.join(os.path.dirname(__file__), '.env')]: + try: + with open(p, 'r') as f: + for line in f: + line = line.strip() + if line.startswith('OPENROUTER_API_KEY='): + return line.split('=', 1)[1].strip().strip('"').strip("'") + except FileNotFoundError: + continue + return '' + + +def main(): + import argparse + parser = argparse.ArgumentParser(description='Call OpenRouter API with a prompt file') + parser.add_argument('prompt_file', help='Path to prompt file, or - for stdin') + parser.add_argument('--model', default='nvidia/llama-3.1-nemotron-ultra-253b:free', + help='OpenRouter model ID (default: nvidia/llama-3.1-nemotron-ultra-253b:free)') + parser.add_argument('--temperature', type=float, default=0.7, help='Temperature (default: 0.7)') + parser.add_argument('--max-tokens', type=int, default=16384, help='Max tokens (default: 16384)') + parser.add_argument('--system-prompt', default=None, help='Optional system prompt file') + args = parser.parse_args() + + api_key = load_api_key() + if not api_key: + print("ERROR: No OPENROUTER_API_KEY found. Set it in environment or .env file.", file=sys.stderr) + sys.exit(1) + + # Read prompt + if args.prompt_file == '-': + prompt = sys.stdin.read() + else: + with open(args.prompt_file, 'r', encoding='utf-8') as f: + prompt = f.read() + + messages = [] + + # Optional system prompt + if args.system_prompt: + with open(args.system_prompt, 'r', encoding='utf-8') as f: + messages.append({'role': 'system', 'content': f.read()}) + + messages.append({'role': 'user', 'content': prompt}) + + payload = json.dumps({ + 'model': args.model, + 'messages': messages, + 'temperature': args.temperature, + 'max_tokens': args.max_tokens, + }).encode() + + req = urllib.request.Request( + 'https://openrouter.ai/api/v1/chat/completions', + data=payload, + headers={ + 'Content-Type': 'application/json', + 'Authorization': f'Bearer {api_key}', + } + ) + + try: + resp = urllib.request.urlopen(req, timeout=600) + data = json.loads(resp.read()) + content = data.get('choices', [{}])[0].get('message', {}).get('content', '') + print(content) + except urllib.error.HTTPError as e: + body = e.read().decode('utf-8', errors='replace') + print(f'ERROR: HTTP {e.code}: {body}', file=sys.stderr) + sys.exit(1) + except Exception as e: + print(f'ERROR: {e}', file=sys.stderr) + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/ralph.sh b/ralph.sh index baff052ac..152d6773a 100755 --- a/ralph.sh +++ b/ralph.sh @@ -29,8 +29,8 @@ while [[ $# -gt 0 ]]; do done # Validate tool choice -if [[ "$TOOL" != "amp" && "$TOOL" != "claude" ]]; then - echo "Error: Invalid tool '$TOOL'. Must be 'amp' or 'claude'." +if [[ "$TOOL" != "amp" && "$TOOL" != "claude" && "$TOOL" != "openrouter" ]]; then + echo "Error: Invalid tool '$TOOL'. Must be 'amp', 'claude', or 'openrouter'." exit 1 fi SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" @@ -90,9 +90,14 @@ for i in $(seq 1 $MAX_ITERATIONS); do # Run the selected tool with the ralph prompt if [[ "$TOOL" == "amp" ]]; then OUTPUT=$(cat "$SCRIPT_DIR/prompt.md" | amp --dangerously-allow-all 2>&1 | tee /dev/stderr) || true - else + elif [[ "$TOOL" == "claude" ]]; then # Claude Code: use --dangerously-skip-permissions for autonomous operation, --print for output OUTPUT=$(claude --dangerously-skip-permissions --print < "$SCRIPT_DIR/CLAUDE.md" 2>&1 | tee /dev/stderr) || true + elif [[ "$TOOL" == "openrouter" ]]; then + # OpenRouter: call cloud LLM directly via API (no local tools needed) + OUTPUT=$(python3 "$SCRIPT_DIR/openrouter_call.py" "$SCRIPT_DIR/prompt.md" \ + --model "${OPENROUTER_MODEL:-nvidia/llama-3.1-nemotron-ultra-253b:free}" \ + 2>&1 | tee /dev/stderr) || true fi # Check for completion signal