Skip to content

Commit 4654200

Browse files
committed
fix: LM Studio/Ollama count as real providers; add swarm command; improve help
1 parent d4c4ad3 commit 4654200

4 files changed

Lines changed: 123 additions & 7 deletions

File tree

crates/devsper-bin/src/main.rs

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -361,7 +361,7 @@ async fn run_command(
361361
router.add_provider(Arc::new(LiteLlmProvider::new(base_url, api_key)));
362362
has_real_provider = true;
363363
}
364-
// LM Studio
364+
// LM Studio — counts as real if URL is explicitly set
365365
{
366366
let base_url = std::env::var("LMSTUDIO_BASE_URL")
367367
.unwrap_or_else(|_| "http://localhost:1234".into());
@@ -371,9 +371,16 @@ async fn run_command(
371371
provider = provider.with_api_key(api_key);
372372
}
373373
router.add_provider(Arc::new(provider));
374+
if std::env::var("LMSTUDIO_BASE_URL").is_ok() {
375+
has_real_provider = true;
376+
}
374377
}
378+
// Ollama — counts as real if host is explicitly set
375379
let ollama_host = std::env::var("OLLAMA_HOST")
376380
.unwrap_or_else(|_| "http://localhost:11434".into());
381+
if std::env::var("OLLAMA_HOST").is_ok() {
382+
has_real_provider = true;
383+
}
377384
router.add_provider(Arc::new(OllamaProvider::new().with_base_url(ollama_host)));
378385
router.add_provider(Arc::new(MockProvider::new("[Task completed by agent]")));
379386
if !has_real_provider {

crates/devsper-py/src/lib.rs

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,19 +68,26 @@ fn build_router() -> (Arc<ModelRouter>, bool) {
6868
router.add_provider(Arc::new(LiteLlmProvider::new(base_url, api_key)));
6969
has_real = true;
7070
}
71-
// LM Studio
71+
// LM Studio — counts as real if URL is explicitly set
7272
{
7373
let base_url = std::env::var("LMSTUDIO_BASE_URL")
7474
.unwrap_or_else(|_| "http://localhost:1234".into());
7575
let api_key = std::env::var("LMSTUDIO_API_KEY").unwrap_or_default();
76-
let mut provider = LmStudioProvider::new().with_base_url(base_url);
76+
let mut provider = LmStudioProvider::new().with_base_url(base_url.clone());
7777
if !api_key.is_empty() {
7878
provider = provider.with_api_key(api_key);
7979
}
8080
router.add_provider(Arc::new(provider));
81+
if std::env::var("LMSTUDIO_BASE_URL").is_ok() {
82+
has_real = true;
83+
}
8184
}
85+
// Ollama — counts as real if host is explicitly set
8286
let ollama_host = std::env::var("OLLAMA_HOST")
8387
.unwrap_or_else(|_| "http://localhost:11434".into());
88+
if std::env::var("OLLAMA_HOST").is_ok() {
89+
has_real = true;
90+
}
8491
router.add_provider(Arc::new(OllamaProvider::new().with_base_url(ollama_host)));
8592
router.add_provider(Arc::new(MockProvider::new("[Task completed by agent]")));
8693

python/devsper/_cli.py

Lines changed: 105 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -78,11 +78,22 @@ def _kdel(provider: str, field: str) -> bool:
7878
@click.option("-v", "--verbose", is_flag=True, help="Enable verbose logging.")
7979
@click.pass_context
8080
def cli(ctx: click.Context, verbose: bool) -> None:
81-
"""Devsper — self-evolving AI workflow engine.
81+
"""AI swarm runtime built in Rust.
8282
8383
\b
84-
Run AI workflows, manage provider credentials, and orchestrate
85-
distributed agent clusters.
84+
Give it a task — it breaks it into a graph of steps, runs them in
85+
parallel across your LLM provider, and returns the result.
86+
87+
\b
88+
Quick start:
89+
devsper swarm "write a blog post about Rust"
90+
devsper run workflow.devsper --input topic="climate change"
91+
92+
\b
93+
Providers (set via env or keychain):
94+
ANTHROPIC_API_KEY OPENAI_API_KEY GITHUB_TOKEN
95+
LMSTUDIO_BASE_URL OLLAMA_HOST LITELLM_BASE_URL
96+
ZAI_API_KEY AZURE_OPENAI_* AZURE_FOUNDRY_*
8697
8798
\b
8899
Shell completions:
@@ -95,6 +106,97 @@ def cli(ctx: click.Context, verbose: bool) -> None:
95106

96107
# ── run ──────────────────────────────────────────────────────────────────────
97108

109+
@cli.command()
110+
@click.argument("task")
111+
@click.option("-o", "--output", default=None, type=click.Path(),
112+
help="Output directory (default: temp dir).")
113+
@click.option("--model", default=None, metavar="MODEL",
114+
help="LLM model (e.g. google/gemma-4-e4b, claude-sonnet-4-6).")
115+
@click.option("--workers", default=4, show_default=True, type=int,
116+
help="Parallel worker count.")
117+
@click.option("--no-plan", is_flag=True, default=False,
118+
help="Skip planning step, run as single task.")
119+
def swarm(task: str, output: Optional[str], model: Optional[str], workers: int, no_plan: bool) -> None:
120+
"""Run any task through the AI swarm.
121+
122+
\b
123+
The swarm plans the task into parallel subtasks, executes them
124+
concurrently, and writes all results to an output directory.
125+
126+
\b
127+
Examples:
128+
devsper swarm "write a research paper on quantum computing"
129+
devsper swarm "build a todo app with Redis" --model google/gemma-4-e4b
130+
devsper swarm "analyze this codebase" -o ./results --no-plan
131+
"""
132+
import json as _json
133+
import os
134+
import re
135+
import tempfile
136+
137+
from devsper._core import NodeSpec, run_specs
138+
139+
click.echo(f"Task: {task}")
140+
141+
if not no_plan:
142+
click.echo("Planning subtasks...")
143+
plan_prompt = (
144+
"Break this task into 3-5 independent subtasks for parallel AI execution.\n"
145+
f"Task: {task}\n\n"
146+
"Return ONLY a valid JSON array, no explanation:\n"
147+
'[{"id":"step1","name":"short name","prompt":"full prompt","depends_on":[]}]'
148+
)
149+
plan_spec = NodeSpec(plan_prompt, model=model)
150+
plan_result = run_specs([plan_spec])
151+
plan_text = next(iter(plan_result.values()), "")
152+
153+
match = re.search(r"\[.*?\]", plan_text, re.DOTALL)
154+
steps: list[dict] = []
155+
if match:
156+
try:
157+
steps = _json.loads(match.group())
158+
except Exception:
159+
pass
160+
161+
if steps:
162+
click.echo(f"Plan: {len(steps)} steps")
163+
id_to_spec: dict[str, NodeSpec] = {}
164+
for step in steps:
165+
deps = [id_to_spec[d] for d in step.get("depends_on", []) if d in id_to_spec]
166+
spec = NodeSpec(step["prompt"], model=model, depends_on=deps or None)
167+
id_to_spec[step["id"]] = spec
168+
specs = list(id_to_spec.values())
169+
names = [s.get("name", s["id"]) for s in steps]
170+
else:
171+
click.echo("Plan parse failed — running as single task.")
172+
specs = [NodeSpec(task, model=model)]
173+
names = ["result"]
174+
else:
175+
specs = [NodeSpec(task, model=model)]
176+
names = ["result"]
177+
178+
click.echo(f"Executing {len(specs)} task(s) in parallel...")
179+
results = run_specs(specs)
180+
181+
out_dir = output or tempfile.mkdtemp(prefix="devsper-swarm-")
182+
os.makedirs(out_dir, exist_ok=True)
183+
184+
for (node_id, content), name in zip(results.items(), names):
185+
safe_name = re.sub(r"[^\w\-]", "_", name)
186+
with open(os.path.join(out_dir, f"{safe_name}.md"), "w") as f:
187+
f.write(f"# {name}\n\n{content}\n")
188+
189+
combined = "\n\n---\n\n".join(
190+
f"# {n}\n\n{c}" for n, c in zip(names, results.values())
191+
)
192+
combined_path = os.path.join(out_dir, "combined.md")
193+
with open(combined_path, "w") as f:
194+
f.write(f"# {task}\n\n{combined}\n")
195+
196+
click.echo(f"\nDone → {out_dir}/")
197+
click.echo(f"combined: {combined_path}")
198+
199+
98200
@cli.command()
99201
@click.argument("workflow", type=click.Path(exists=True, dir_okay=False))
100202
@click.option("--input", "-i", "inputs", multiple=True, metavar="KEY=VALUE",

python/pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "maturin"
44

55
[project]
66
name = "devsper"
7-
version = "3.1.4"
7+
version = "3.1.5"
88
description = "Devsper — self-evolving AI workflow engine"
99
readme = "README.md"
1010
requires-python = ">=3.11"

0 commit comments

Comments
 (0)