Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: support exporting notebook models #143

Merged
merged 5 commits into from
Sep 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,11 @@ jobs:
uses: opvious/[email protected]
with:
license-key: ${{ secrets.OPVIOUS_LICENSE_KEY }}
log-level: debug
- name: Register specifications
run: ./scripts/specifications.sh register
- name: Test
run: poetry run pytest
- name: Show API logs
if: failure()
run: opvious api logs
395 changes: 202 additions & 193 deletions poetry.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = 'poetry.core.masonry.api'

[tool.poetry]
name = 'opvious'
version = '0.21.0rc1'
version = '0.22.0rc1'
description = 'Opvious Python SDK'
authors = ['Opvious Engineering <[email protected]>']
readme = 'README.md'
Expand Down
91 changes: 66 additions & 25 deletions src/opvious/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,8 @@
import sys
from typing import Any, Mapping, Optional

from .common import __version__
from .client import Client
from .specifications import load_notebook_models, LocalSpecification
from . import __version__, Client, LocalSpecification, load_notebook_models
from .modeling import Model

_COMMAND = "python -m opvious"

Expand All @@ -23,20 +22,25 @@
{_COMMAND} register-notebook PATH [MODEL]
[-dn NAME] [-t TAGS] [--allow-empty]
{_COMMAND} register-sources GLOB [-dn NAME] [-t TAGS]
{_COMMAND} export-notebook-model PATH [MODEL] [-a PATH]
{_COMMAND} (-h | --help)
{_COMMAND} --version

Options:
--allow-empty Do not throw an error if no models were found in a
notebook
-d, --dry-run Validate the specification but do not store it on the
server
-n, --name NAME Formulation name. By default this name is inferred
from the file's name, omitting the extension
-t, --tags TAGS Comma-separated list of tags. By default only the
`latest` tag is added
--version Show SDK version
-h, --help Show this message
--allow-empty Do not throw an error if no models were found in
a notebook. Requires `--dry-run` to be set.
-a, --assembly-path PATH Path where to store the exported model. Defaults
to the model's name with a `.proto` extension.
-d, --dry-run Validate the specification but do not store it on
the server. When this option is enabled,
notebooks can have more than one model.
-h, --help Show this message.
-n, --name NAME Formulation name. By default this name is
inferred from the file's name, omitting the
extension.
-t, --tags TAGS Comma-separated list of tags. By default only the
`latest` tag is added.
--version Show SDK version.
"""


Expand Down Expand Up @@ -80,18 +84,11 @@ async def handle_notebook(
name: Optional[str],
allow_empty: bool,
) -> None:
sn = load_notebook_models(path, allow_empty=allow_empty)
if model_name is None:
model_names = list(sn.__dict__.keys())
if not self._dry_run and len(model_names) != 1:
raise Exception(f"Notebook has 0 or 2+ models ({model_names})")
else:
model_names = [model_name]
if name is None:
name = _default_name(path)
for model_name in model_names:
model = getattr(sn, model_name)
await self._handle(model.specification(), name)
models = _load_notebook_models(path, model_name)
if self._dry_run:
return
_name, model = _singleton_model(models)
await self._handle(model.specification(), name or _default_name(path))

async def handle_sources(self, glob: str, name: Optional[str]) -> None:
if name is None:
Expand All @@ -104,10 +101,54 @@ def _default_name(path: str) -> str:
return os.path.splitext(os.path.basename(path))[0]


def _load_notebook_models(
path: str,
model_name: Optional[str],
) -> dict[str, Model]:
sn = load_notebook_models(path, allow_empty=True)
if model_name is None:
return {k: v for k, v in sn.__dict__.items() if isinstance(v, Model)}
return {model_name: getattr(sn, model_name)}


def _singleton_model(models: dict[str, Model]) -> tuple[str, Model]:
if len(models) != 1:
raise Exception(
"Notebook has 0 or 2+ models, please specify a model "
"name to select one"
)
return next(iter(models.items()))


async def _export_notebook_model(
client: Client,
notebook_path: str,
model_name: Optional[str] = None,
export_path: Optional[str] = None,
) -> None:
# TODO: Support transformations by accepting an additional variable name.
models = _load_notebook_models(notebook_path, model_name)
name, model = _singleton_model(models)
if not export_path:
export_path = f"{name}.proto"
with open(export_path, "bw+") as writer:
await client.export_specification(model.specification(), writer)


async def _run(args: Mapping[str, Any]) -> None:
client = Client.from_environment()
if not client:
raise Exception("Missing OPVIOUS_ENDPOINT environment variable")

if args["export-notebook-model"]:
await _export_notebook_model(
client,
notebook_path=args["PATH"],
model_name=args["MODEL"],
export_path=args["--assembly-path"],
)
return

handler = _SpecificationHandler(client, args["--tags"], args["--dry-run"])
if args["register-notebook"]:
await handler.handle_notebook(
Expand Down
39 changes: 39 additions & 0 deletions src/opvious/client/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import logging
from typing import (
AsyncIterator,
BinaryIO,
Iterable,
Mapping,
Optional,
Expand Down Expand Up @@ -48,6 +49,7 @@
solve_strategy_to_json,
)
from ..executors import (
BinaryExecutorResult,
Executor,
JsonExecutorResult,
JsonSeqExecutorResult,
Expand All @@ -69,6 +71,7 @@
Problem,
ProblemOutlineCache,
ProblemOutlineGenerator,
ProblemTransformation,
SolveInputsBuilder,
feasible_outcome_details,
log_progress,
Expand Down Expand Up @@ -193,6 +196,42 @@ async def annotate_specification(
]
return specification.annotated(issues)

async def export_specification(
self,
specification: LocalSpecification,
writer: BinaryIO,
transformations: Optional[list[ProblemTransformation]] = None,
) -> None:
"""Exports a specification to its canonical representation

Args:
specification: The specification to export
transformations: Transformations to apply to the specification
"""
sources = [s.text for s in specification.sources]

if transformations:
outline_generator = await ProblemOutlineGenerator.sources(
executor=self._executor, sources=sources
)
for tf in transformations or []:
outline_generator.add_transformation(tf)
_outline, transformation_data = await outline_generator.generate()
else:
transformation_data = []

async with self._executor.execute(
result_type=BinaryExecutorResult,
url="/sources/assemble",
method="POST",
json_data=json_dict(
sources=sources,
transformations=transformation_data,
),
) as res:
async for chunk in res.bytes():
writer.write(chunk)

async def register_specification(
self,
specification: LocalSpecification,
Expand Down
2 changes: 2 additions & 0 deletions src/opvious/executors/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from typing import Optional

from .common import (
BinaryExecutorResult,
Executor,
ExecutorError,
ExecutorResult,
Expand All @@ -24,6 +25,7 @@
"Executor",
"ExecutorError",
"ExecutorResult",
"BinaryExecutorResult",
"JsonExecutorResult",
"JsonSeqExecutorResult",
"PlainTextExecutorResult",
Expand Down
7 changes: 7 additions & 0 deletions src/opvious/executors/aiohttp.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from typing import AsyncIterator, Optional

from .common import (
BinaryExecutorResult,
CONTENT_TYPE_HEADER,
Executor,
ExecutorError,
Expand Down Expand Up @@ -89,6 +90,12 @@ async def _send(
trace=trace,
reader=res.content,
)
elif BinaryExecutorResult.is_eligible(ctype):
yield BinaryExecutorResult(
status=status,
trace=trace,
reader=res.content,
)
else:
text = await res.text()
raise ExecutorError(
Expand Down
30 changes: 30 additions & 0 deletions src/opvious/executors/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,34 @@ def flush(self) -> str:
return buf


@dataclasses.dataclass
class BinaryExecutorResult(ExecutorResult):
"""Binary execution result"""

content_type = "application/octet-stream"
reader: Any = dataclasses.field(repr=False)

async def bytes(
self, assert_status: Optional[int] = 200
) -> AsyncIterator[bytes]:
if assert_status:
self._assert_status(assert_status)

# Non-streaming
if isinstance(self.reader, bytes):
yield self.reader

# Streaming
if hasattr(self.reader, "__aiter__"):
async for chunk in self.reader:
yield chunk
elif hasattr(self.reader, "__iter__"):
for chunk in self.reader:
yield chunk
else:
raise Exception(f"Non-iterable reader: {self.reader}")


@dataclasses.dataclass
class JsonExecutorResult(ExecutorResult):
"""Unary JSON execution result"""
Expand Down Expand Up @@ -256,6 +284,8 @@ async def execute(
accept = "application/json-seq;q=1, text/plain;q=0.1"
elif result_type == PlainTextExecutorResult:
accept = "text/plain"
elif result_type == BinaryExecutorResult:
accept = "application/octet-stream;q=1, text/plain;q=0.1"
else:
raise Exception(f"Unsupported result type: {result_type}")
all_headers["accept"] = accept
Expand Down
4 changes: 4 additions & 0 deletions src/opvious/executors/pyodide.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from typing import AsyncIterator, Optional

from .common import (
BinaryExecutorResult,
CONTENT_TYPE_HEADER,
Executor,
ExecutorError,
Expand Down Expand Up @@ -48,6 +49,9 @@ async def _send(
yield PlainTextExecutorResult(
status=status, trace=trace, reader=text
)
elif BinaryExecutorResult.is_eligible(ctype):
data = await res.js_response.bytes()
yield BinaryExecutorResult(status=status, trace=trace, reader=data)
else:
text = await res.js_response.text()
raise ExecutorError(status=status, trace=trace, reason=text)
3 changes: 3 additions & 0 deletions src/opvious/executors/urllib.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from typing import AsyncIterator, Optional

from .common import (
BinaryExecutorResult,
CONTENT_TYPE_HEADER,
Headers,
Executor,
Expand Down Expand Up @@ -54,6 +55,8 @@ async def _send(
yield PlainTextExecutorResult(
status=status, trace=trace, reader=res
)
elif BinaryExecutorResult.is_eligible(ctype):
yield BinaryExecutorResult(status=status, trace=trace, reader=res)
else:
raise ExecutorError(
status=status,
Expand Down
15 changes: 15 additions & 0 deletions tests/opvious/client_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,6 +346,21 @@ async def test_solve_relaxed_sudoku(self):
deficit = solution.outputs.variable("hintsObserved_deficit")
assert len(deficit) == 1

@pytest.mark.asyncio
async def test_format_inline_problem(self):
spec = opvious.LocalSpecification.inline(
r"""
$\S^{v}_{target}: \alpha \in \{0,1\}$
$\S^{p}_{bound}: b \in \mathbb{R}_+$
$\S^{c}_{greaterThanBound}: \alpha \geq b$
$\S^o_{maximize}: \max 2 \alpha$
"""
)
instructions = await client.format_problem(
opvious.Problem(specification=spec, parameters={"bound": 30})
)
assert "greaterThanBound" in instructions

@pytest.mark.asyncio
async def test_format_problem(self):
instructions = await client.format_problem(
Expand Down
Loading