Skip to content

Commit

Permalink
Merge pull request #518 from rhatdan/docs
Browse files Browse the repository at this point in the history
Add --version and fix sort order of commands
  • Loading branch information
ericcurtin authored Dec 17, 2024
2 parents ec8ba6f + 2ffeac5 commit 307628e
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 16 deletions.
3 changes: 3 additions & 0 deletions docs/ramalama.1.md
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,9 @@ The default can be overridden in the ramalama.conf file.
store AI Models in the specified directory (default rootless: `$HOME/.local/share/ramalama`, default rootful: `/var/lib/ramalama`)
The default can be overridden in the ramalama.conf file.

#### **--version**, **-v**
show RamaLama version

## COMMANDS

| Command | Description |
Expand Down
31 changes: 15 additions & 16 deletions ramalama/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,13 @@ def configure_arguments(parser):
help="""run RamaLama using the specified container engine.
The RAMALAMA_CONTAINER_ENGINE environment variable modifies default behaviour.""",
)
parser.add_argument(
"--gpu",
dest="gpu",
default=False,
action="store_true",
help="offload the workload to the GPU",
)
parser.add_argument(
"--image",
default=config.get("image"),
Expand All @@ -195,13 +202,6 @@ def configure_arguments(parser):
help="""do not run RamaLama in the default container.
The RAMALAMA_IN_CONTAINER environment variable modifies default behaviour.""",
)
parser.add_argument(
"--gpu",
dest="gpu",
default=False,
action="store_true",
help="offload the workload to the GPU",
)
parser.add_argument(
"--runtime",
default=config.get("runtime"),
Expand All @@ -213,16 +213,15 @@ def configure_arguments(parser):
default=config.get("store"),
help="store AI Models in the specified directory",
)
parser.add_argument("-v", dest="version", action="store_true", help="show RamaLama version")

parser.add_argument("-v", "--version", dest="version", action="store_true", help="show RamaLama version")

def configure_subcommands(parser):
"""Add subcommand parsers to the main argument parser."""
subparsers = parser.add_subparsers(dest="subcommand")
subparsers.required = False
help_parser(subparsers)
convert_parser(subparsers)
containers_parser(subparsers)
convert_parser(subparsers)
info_parser(subparsers)
list_parser(subparsers)
login_parser(subparsers)
Expand Down Expand Up @@ -372,10 +371,10 @@ def list_files_by_modification():

def containers_parser(subparsers):
parser = subparsers.add_parser("containers", aliases=["ps"], help="list all RamaLama containers")
parser.add_argument("--container", default=False, action="store_false", help=argparse.SUPPRESS)
parser.add_argument("--format", help="pretty-print containers to JSON or using a Go template")
parser.add_argument("-n", "--noheading", dest="noheading", action="store_true", help="do not display heading")
parser.add_argument("--no-trunc", dest="notrunc", action="store_true", help="display the extended information")
parser.add_argument("--container", default=False, action="store_false", help=argparse.SUPPRESS)
parser.set_defaults(func=list_containers)


Expand Down Expand Up @@ -418,8 +417,8 @@ def info_parser(subparsers):
def list_parser(subparsers):
parser = subparsers.add_parser("list", aliases=["ls"], help="list all downloaded AI Models")
parser.add_argument("--container", default=False, action="store_false", help=argparse.SUPPRESS)
parser.add_argument("-n", "--noheading", dest="noheading", action="store_true", help="do not display heading")
parser.add_argument("--json", dest="json", action="store_true", help="print using json")
parser.add_argument("-n", "--noheading", dest="noheading", action="store_true", help="do not display heading")
parser.add_argument("-q", "--quiet", dest="quiet", action="store_true", help="print only Model names")
parser.set_defaults(func=list_cli)

Expand Down Expand Up @@ -699,14 +698,14 @@ def serve_parser(subparsers):
_run(parser)
parser.add_argument("-d", "--detach", action="store_true", dest="detach", help="run the container in detached mode")
parser.add_argument("--host", default=config.get('host', "0.0.0.0"), help="IP address to listen")
parser.add_argument(
"-p", "--port", default=config.get('port', "8080"), help="port for AI Model server to listen on"
)
parser.add_argument(
"--generate",
choices=["quadlet", "kube", "quadlet/kube"],
help="generate specified configuration format for running the AI Model as a service",
)
parser.add_argument(
"-p", "--port", default=config.get('port', "8080"), help="port for AI Model server to listen on"
)
parser.add_argument("MODEL") # positional argument
parser.set_defaults(func=serve_cli)

Expand All @@ -720,8 +719,8 @@ def serve_cli(args):

def stop_parser(subparsers):
parser = subparsers.add_parser("stop", help="stop named container that is running AI Model")
parser.add_argument("--container", default=False, action="store_false", help=argparse.SUPPRESS)
parser.add_argument("-a", "--all", action="store_true", help="stop all RamaLama containers")
parser.add_argument("--container", default=False, action="store_false", help=argparse.SUPPRESS)
parser.add_argument(
"--ignore", action="store_true", help="ignore errors when specified RamaLama container is missing"
)
Expand Down
4 changes: 4 additions & 0 deletions test/system/060-info.bats
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,13 @@ load helpers
run_ramalama 2 info bogus
is "$output" ".*ramalama: error: unrecognized arguments: bogus"

run_ramalama --version
run_ramalama -v
version=$(cut -f3 -d " " <<<"$output")

run_ramalama version
is "$output" "ramalama version $version"

run_ramalama info

# FIXME Engine (podman|docker|'')
Expand Down

0 comments on commit 307628e

Please sign in to comment.