Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 46 additions & 5 deletions .github/workflows/build.yml → .github/workflows/publish_pip.yml
Original file line number Diff line number Diff line change
@@ -1,14 +1,21 @@
name: Build and Release
name: Build and Publish

on:
push:
branches: [main]
pull_request:
branches: [main]

permissions:
contents: write
id-token: write

jobs:
build:
runs-on: ubuntu-latest
defaults:
run:
working-directory: .
steps:
- uses: actions/checkout@v4
with:
Expand All @@ -22,9 +29,16 @@ jobs:
- name: Install build dependencies
run: pip install build

- name: Set version from run number
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
run: echo "SETUPTOOLS_SCM_PRETEND_VERSION=0.1.dev${{ github.run_number }}" >> $GITHUB_ENV

- name: Build wheel
run: python -m build

- name: Show dist contents
run: ls -lah dist

- name: Test wheel installs and imports
run: |
pip install dist/*.whl
Expand All @@ -43,10 +57,37 @@ jobs:
name: truffile-sdist
path: dist/*.tar.gz

release:
publish-pypi:
name: Publish to PyPI
needs: build
runs-on: ubuntu-latest
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
environment:
name: pypi
url: https://pypi.org/p/truffile
permissions:
id-token: write
steps:
- name: Download wheel
uses: actions/download-artifact@v4
with:
name: truffile-wheel
path: dist/

- name: Download sdist
uses: actions/download-artifact@v4
with:
name: truffile-sdist
path: dist/

- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@release/v1

github-release:
name: Create GitHub Release
needs: [build, publish-pypi]
runs-on: ubuntu-latest
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
permissions:
contents: write
steps:
Expand All @@ -59,8 +100,8 @@ jobs:
- name: Create Release
uses: softprops/action-gh-release@v2
with:
tag_name: build-${{ github.run_number }}
name: truffile build ${{ github.run_number }}
tag_name: v0.1.dev${{ github.run_number }}
name: truffile v0.1.dev${{ github.run_number }}
files: dist/*.whl
generate_release_notes: true
make_latest: true
make_latest: true
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,5 @@
__pycache__/
.DS_Store
dist
docs
whatsapp/
9 changes: 0 additions & 9 deletions APP.md
Original file line number Diff line number Diff line change
Expand Up @@ -281,15 +281,6 @@ default_schedule:
allowed_days: [mon, wed, fri] # optional
```

### 3. Always

Run continuously (never stops).

```yaml
default_schedule:
type: always
```

---

## Duration Format
Expand Down
8 changes: 1 addition & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ metadata:
MY_VAR: value
# schedule for background apps only:
default_schedule:
type: interval # interval | times | always
type: interval # interval | times
interval:
duration: "1h" # 15m, 2h, 1d, etc.
schedule:
Expand Down Expand Up @@ -100,12 +100,6 @@ default_schedule:
allowed_days: [mon, tue, wed, thu, fri]
```

**always** - run continuously:
```yaml
default_schedule:
type: always
```

## example apps

see `example-apps/` for working examples:
Expand Down
29 changes: 5 additions & 24 deletions example-apps/focus/research/truffile.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,29 +11,10 @@ metadata:
environment:
PYTHONUNBUFFERED: "1"
icon_file: ./icon.png
steps:
- name: Welcome
type: welcome
content: |
This app provides tools to help your Truffle research and gather information from the web.
It includes web search, scraping and data extraction capabilities to assist in your research tasks.
Installation will set up the necessary dependencies and files, and may take a moment.
- name: Copy application files
type: files
files:
- source: ./research.py
destination: ./research.py
- name: Install dependencies
type: bash
run: |
pip install --no-cache-dir -i https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ 'gourmet[abrasive]==0.1.dev45'
pip install --no-cache-dir --force-reinstall requests trafilatura==2.0.0 tld==0.13.1
pip install --no-cache-dir mcp ddgs


files:
- source: ./research.py
destination: ./research.py






run: |
pip install --no-cache-dir mcp requests ddgs
175 changes: 175 additions & 0 deletions truffile/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import asyncio
import ast
import signal
import socket
import sys
import threading
import time
Expand All @@ -12,6 +13,10 @@
from truffile.storage import StorageService
from truffile.client import TruffleClient, resolve_mdns, NewSessionStatus

import grpc
from truffle.infer.infer_pb2_grpc import InferenceServiceStub
from truffle.infer.model_pb2 import GetModelListRequest, Model


# ANSI colors
class C:
Expand Down Expand Up @@ -699,6 +704,159 @@ def cmd_list(args, storage: StorageService) -> int:
return 0


async def cmd_models(storage: StorageService) -> int:
"""List models on the connected device."""
device = storage.state.last_used_device
if not device:
error("No device connected")
print(f" {C.DIM}Run: truffile connect <device>{C.RESET}")
return 1

spinner = Spinner(f"Connecting to {device}")
spinner.start()

try:
ip = await resolve_mdns(f"{device}.local")
except RuntimeError:
spinner.fail(f"Could not resolve {device}.local")
return 1

try:
channel = grpc.insecure_channel(f"{ip}:80")
stub = InferenceServiceStub(channel)
model_list = stub.GetModelList(GetModelListRequest(use_filter=False))
spinner.stop(success=True)
except Exception as e:
spinner.fail(f"Failed to get models: {e}")
return 1

loaded = [m for m in model_list.models if m.state == Model.MODEL_STATE_LOADED]
available = [m for m in model_list.models if m.state == Model.MODEL_STATE_AVAILABLE]

print()
print(f"{MUSHROOM} {C.BOLD}Models on {device}{C.RESET}")
print()

if loaded:
for m in loaded:
reasoner = f" {C.MAGENTA}reasoner{C.RESET}" if m.config.info.has_chain_of_thought else ""
print(f" {C.GREEN}{CHECK}{C.RESET} {m.name}{reasoner}")
print(f" {C.DIM}id: {m.uuid}{C.RESET}")

if available:
for m in available:
print(f" {C.DIM}○ {m.name} (not loaded){C.RESET}")

if not loaded and not available:
print(f" {C.DIM}No models found{C.RESET}")

print()
total_mb = model_list.total_memory // (1024 * 1024) if model_list.total_memory else 0
used_mb = model_list.used_memory // (1024 * 1024) if model_list.used_memory else 0
print(f"{C.DIM}Memory: {used_mb}MB / {total_mb}MB{C.RESET}")

return 0


def cmd_proxy(args, storage: StorageService) -> int:
"""Start the OpenAI-compatible proxy."""
device = args.device if hasattr(args, 'device') and args.device else storage.state.last_used_device
if not device:
error("No device specified or connected")
print(f" {C.DIM}Run: truffile connect <device>{C.RESET}")
print(f" {C.DIM}Or: truffile proxy --device <device>{C.RESET}")
return 1

port = args.port if hasattr(args, 'port') else 8080
host = args.host if hasattr(args, 'host') else "127.0.0.1"
debug = args.debug if hasattr(args, 'debug') else False

spinner = None

try:
print(f"{MUSHROOM} {C.BOLD}Starting OpenAI proxy{C.RESET}")
print()

spinner = Spinner(f"Resolving {device}.local")
spinner.start()

hostname = f"{device}.local"
ip = socket.gethostbyname(hostname)
spinner.stop(success=True)

grpc_address = f"{ip}:80"

spinner = Spinner("Connecting to inference service")
spinner.start()

from truffile.infer.proxy import OpenAIProxy, OpenAIProxyHandler
from http.server import ThreadingHTTPServer

proxy = OpenAIProxy(grpc_address, include_debug=debug)

channel = grpc.insecure_channel(grpc_address)
stub = InferenceServiceStub(channel)
model_list = stub.GetModelList(GetModelListRequest(use_filter=False))
loaded = [m for m in model_list.models if m.state == Model.MODEL_STATE_LOADED]
spinner.stop(success=True)
spinner = None

print(f" {C.DIM}Device: {device} ({ip}){C.RESET}")
print(f" {C.DIM}Models: {len(loaded)} loaded{C.RESET}")

print()
print(f"{C.GREEN}{CHECK}{C.RESET} Proxy running at {C.BOLD}http://{host}:{port}/v1{C.RESET}")
print()
print(f" {C.DIM}Use with OpenAI SDK:{C.RESET}")
print(f" {C.CYAN}from openai import OpenAI{C.RESET}")
print(f" {C.CYAN}client = OpenAI(base_url=\"http://{host}:{port}/v1\", api_key=\"x\"){C.RESET}")
print()
print(f" {C.DIM}Or set environment variables:{C.RESET}")
print(f" {C.CYAN}export OPENAI_BASE_URL=http://{host}:{port}/v1{C.RESET}")
print(f" {C.CYAN}export OPENAI_API_KEY=anything{C.RESET}")
print()
print(f" {C.DIM}Press Ctrl+C to stop{C.RESET}")
print()

class _Server(ThreadingHTTPServer):
def __init__(self, server_address, handler_cls):
super().__init__(server_address, handler_cls)
self.proxy = proxy

server = _Server((host, port), OpenAIProxyHandler)
server.serve_forever()

except KeyboardInterrupt:
if spinner:
spinner.running = False
sys.stdout.write("\r\033[K")
sys.stdout.flush()
print(f"{C.RED}{CROSS} Cancelled{C.RESET}")
return 130
except socket.gaierror:
if spinner:
spinner.fail(f"Could not resolve {device}.local")
else:
error(f"Could not resolve {device}.local")
print(f" {C.DIM}Try: ping {device}.local{C.RESET}")
return 1
except OSError as e:
if spinner:
spinner.fail(str(e))
else:
error(f"Could not start server: {e}")
print(f" {C.DIM}Port {port} may already be in use{C.RESET}")
return 1
except Exception as e:
if spinner:
spinner.fail(str(e))
else:
error(str(e))
return 1

return 0


async def cmd_scan(args, storage: StorageService) -> int:
try:
from zeroconf import ServiceBrowser, ServiceListener, Zeroconf, IPVersion
Expand Down Expand Up @@ -819,13 +977,18 @@ def print_help():
print(f" {C.BLUE}disconnect{C.RESET} <device|all> Disconnect and clear credentials")
print(f" {C.BLUE}deploy{C.RESET} [path] Deploy an app (reads type from truffile.yaml)")
print(f" {C.BLUE}list{C.RESET} <apps|devices> List installed apps or devices")
print(f" {C.BLUE}models{C.RESET} List AI models on connected device")
print(f" {C.BLUE}proxy{C.RESET} Start OpenAI-compatible inference proxy")
print()
print(f"{C.BOLD}Examples:{C.RESET}")
print(f" {C.DIM}truffile scan{C.RESET} {C.DIM}# find devices on network{C.RESET}")
print(f" {C.DIM}truffile connect truffle-6272{C.RESET}")
print(f" {C.DIM}truffile deploy ./my-app{C.RESET}")
print(f" {C.DIM}truffile deploy{C.RESET} {C.DIM}# uses current directory{C.RESET}")
print(f" {C.DIM}truffile list apps{C.RESET}")
print(f" {C.DIM}truffile models{C.RESET} {C.DIM}# show loaded models{C.RESET}")
print(f" {C.DIM}truffile proxy{C.RESET} {C.DIM}# start proxy on :8080{C.RESET}")
print(f" {C.DIM}truffile proxy --port 9000{C.RESET}")
print()


Expand Down Expand Up @@ -857,6 +1020,14 @@ def main() -> int:
p_list = subparsers.add_parser("list", add_help=False)
p_list.add_argument("what", choices=["apps", "devices"], nargs="?")

p_models = subparsers.add_parser("models", add_help=False)

p_proxy = subparsers.add_parser("proxy", add_help=False)
p_proxy.add_argument("--device", "-d", help="Device name (defaults to last connected)")
p_proxy.add_argument("--port", "-p", type=int, default=8080, help="Port to listen on")
p_proxy.add_argument("--host", default="127.0.0.1", help="Host to bind to")
p_proxy.add_argument("--debug", action="store_true", help="Include reasoning in responses")

args = parser.parse_args()

if args.command is None:
Expand Down Expand Up @@ -891,6 +1062,10 @@ def main() -> int:
return run_async(cmd_deploy(args, storage))
elif args.command == "list":
return cmd_list(args, storage)
elif args.command == "models":
return run_async(cmd_models(storage))
elif args.command == "proxy":
return cmd_proxy(args, storage)

return 0

Expand Down