Skip to content

Commit

Permalink
Better API response typing + other misc changes
Browse files Browse the repository at this point in the history
  • Loading branch information
JBWilkie committed Sep 17, 2024
1 parent d8f6de2 commit 09c623e
Show file tree
Hide file tree
Showing 8 changed files with 66 additions and 57 deletions.
10 changes: 5 additions & 5 deletions darwin/cli_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -800,8 +800,8 @@ def file_upload_callback(
other_skipped_items = []
for item in upload_manager.blocked_items:
for slot in item.slots:
if (slot["reason"] is not None) and (
slot["reason"].upper() == BLOCKED_UPLOAD_ERROR_ALREADY_EXISTS
if (slot.reason is not None) and (
slot.reason.upper() == BLOCKED_UPLOAD_ERROR_ALREADY_EXISTS
):
already_existing_items.append(item)
else:
Expand Down Expand Up @@ -835,15 +835,15 @@ def file_upload_callback(
)
for item in upload_manager.blocked_items:
for slot in item.slots:
if (slot["reason"] is not None) and (
slot["reason"].upper() != BLOCKED_UPLOAD_ERROR_ALREADY_EXISTS
if (slot.reason is not None) and (
slot.reason.upper() != BLOCKED_UPLOAD_ERROR_ALREADY_EXISTS
):
error_table.add_row(
str(item.dataset_item_id),
item.filename,
item.path,
"UPLOAD_REQUEST",
slot["reason"],
slot.reason,
)
for error in upload_manager.errors:
for local_file in upload_manager.local_files:
Expand Down
2 changes: 1 addition & 1 deletion darwin/dataset/download_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -672,7 +672,7 @@ def _get_planned_image_paths(
for slot in annotation.slots:
slot_name = Path(slot.name)
for source_file in slot.source_files:
file_name = source_file["file_name"]
file_name = source_file.file_name
if use_folders and annotation.remote_path != "/":
file_paths.append(
images_path
Expand Down
52 changes: 23 additions & 29 deletions darwin/dataset/upload_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

import requests

from darwin.datatypes import PathLike
from darwin.datatypes import PathLike, Slot, SourceFile
from darwin.doc_enum import DocEnum
from darwin.path_utils import construct_full_path
from darwin.utils import chunk
Expand All @@ -34,22 +34,6 @@
from abc import ABC, abstractmethod


LAYOUT_SHAPE_MAP = {
1: [1, 1],
2: [2, 1],
3: [3, 1],
4: [2, 2],
5: [3, 2],
6: [3, 2],
7: [4, 2],
8: [4, 2],
9: [3, 3],
10: [4, 3],
11: [4, 3],
12: [4, 3],
}


class ItemMergeMode(Enum):
SLOTS = "slots"
SERIES = "series"
Expand Down Expand Up @@ -79,8 +63,6 @@ class ItemPayload:
The filename of where this ``ItemPayload``'s data is.
path : str
The path to ``filename``.
reasons : Optional[List[str]], default: None
A per-slot reason to upload this ``ItemPayload``.
"""

def __init__(
Expand All @@ -90,13 +72,21 @@ def __init__(
filename: str,
path: str,
reasons: Optional[List[str]] = None,
slots: Optional[any] = None,
slots: List[Dict[str, str]],
):
self.dataset_item_id = dataset_item_id
self.filename = filename
self.path = PurePosixPath(path).as_posix()
self.reasons = reasons
self.slots = slots
self.slots = [
Slot(
type=slot["type"],
source_files=[SourceFile(file_name=slot["file_name"])],
name=slot["slot_name"],
upload_id=slot["upload_id"] if "upload_id" in slot else None,
reason=slot["reason"] if "reason" in slot else None,
)
for slot in slots
]

@staticmethod
def parse_v2(payload):
Expand Down Expand Up @@ -193,7 +183,7 @@ def serialize(self):
"name": self.data["filename"],
}

def serialize_v2(self):
def serialize_darwin_json_v2(self):
optional_properties = ["tags", "fps", "as_frames", "extract_views"]
slot = {"file_name": self.data["filename"], "slot_name": "0"}
for optional_property in optional_properties:
Expand Down Expand Up @@ -235,7 +225,7 @@ def _create_layout(self):
self.slot_names = []
if self.merge_mode == ItemMergeMode.SLOTS:
num_viewports = min(len(self.files), 16)
slots_grid = SLOTS_GRID_MAP.get(num_viewports)
slots_grid = SLOTS_GRID_MAP[num_viewports]
self.layout = {
"version": 3,
"slots_grid": slots_grid,
Expand Down Expand Up @@ -273,7 +263,7 @@ def _create_layout(self):
}
self.slot_names = self.layout["slots_grid"][0][0]

def serialize_v2(self):
def serialize_darwin_json_v2(self):
optional_properties = ["fps"]
slots = []
for idx, local_file in enumerate(self.files):
Expand Down Expand Up @@ -534,7 +524,9 @@ def _request_upload(self) -> Tuple[List[ItemPayload], List[ItemPayload]]:
upload_payloads.extend(
[
{
"items": [file.serialize_v2() for file in file_chunk],
"items": [
file.serialize_darwin_json_v2() for file in file_chunk
],
"options": {"ignore_dicom_layout": True},
}
for file_chunk in chunk(self.multi_file_items, chunk_size)
Expand All @@ -553,7 +545,7 @@ def _request_upload(self) -> Tuple[List[ItemPayload], List[ItemPayload]]:

upload_payloads.extend(
[
{"items": [file.serialize_v2() for file in file_chunk]}
{"items": [file.serialize_darwin_json_v2() for file in file_chunk]}
for file_chunk in chunk(single_file_items, chunk_size)
]
)
Expand Down Expand Up @@ -581,8 +573,10 @@ def upload_function(
file_lookup = {file.full_path: file for file in self.local_files}
for item in self.pending_items:
for slot in item.slots:
upload_id = slot["upload_id"]
slot_path = (Path(item.path) / Path((slot["file_name"]))).as_posix()
upload_id = slot.upload_id
slot_path = (
Path(item.path) / Path((slot.source_files[0].file_name))
).as_posix()
file = file_lookup.get(str(slot_path))
if not file:
raise ValueError(
Expand Down
17 changes: 16 additions & 1 deletion darwin/datatypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,7 @@ class Slot:
type: str

#: Original upload information for the slot
source_files: List[Dict[str, str]]
source_files: List[SourceFile]

#: Thumbnail url to the file
thumbnail_url: Optional[str] = None
Expand Down Expand Up @@ -390,6 +390,21 @@ class Slot:
#: Segments for video slots
segments: Optional[List[Dict[str, UnknownType]]] = None

#: Upload ID
upload_id: Optional[str] = None

#: The reason for blocking upload of this slot, if it was blocked
reason: Optional[str] = None


@dataclass
class SourceFile:
#: File name of source file
file_name: str

#: URL of file
url: Optional[str] = None


@dataclass
class AnnotationFileVersion:
Expand Down
2 changes: 1 addition & 1 deletion darwin/importer/formats/nifti.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def _parse_nifti(
dt.Slot(
name=slot_name,
type="dicom",
source_files=[{"url": None, "file_name": str(filename)}],
source_files=[dt.SourceFile(file_name=str(filename), url=None)],
)
for slot_name in slot_names
],
Expand Down
16 changes: 8 additions & 8 deletions darwin/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -658,10 +658,10 @@ def _parse_darwin_image(
name=None,
type="image",
source_files=[
{
"url": data["image"].get("url"),
"file_name": _get_local_filename(data["image"]),
}
dt.SourceFile(
file_name=_get_local_filename(data["image"]),
url=data["image"].get("url"),
)
],
thumbnail_url=data["image"].get("thumbnail_url"),
width=data["image"].get("width"),
Expand Down Expand Up @@ -708,10 +708,10 @@ def _parse_darwin_video(
name=None,
type="video",
source_files=[
{
"url": data["image"].get("url"),
"file_name": _get_local_filename(data["image"]),
}
dt.SourceFile(
file_name=_get_local_filename(data["image"]),
url=data["image"].get("url"),
)
],
thumbnail_url=data["image"].get("thumbnail_url"),
width=data["image"].get("width"),
Expand Down
20 changes: 10 additions & 10 deletions tests/darwin/dataset/download_manager_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import responses

from darwin.dataset import download_manager as dm
from darwin.datatypes import AnnotationClass, AnnotationFile, Slot
from darwin.datatypes import AnnotationClass, AnnotationFile, Slot, SourceFile
from tests.fixtures import *


Expand Down Expand Up @@ -89,7 +89,7 @@ def test_single_slot_without_folders_planned_image_paths():
Slot(
name="slot1",
type="image",
source_files=[{"file_name": "source_name.jpg"}],
source_files=[SourceFile(file_name="source_name.jpg")],
)
],
remote_path="/",
Expand All @@ -112,7 +112,7 @@ def test_single_slot_with_folders_planned_image_paths():
Slot(
name="slot1",
type="image",
source_files=[{"file_name": "source_name.jpg"}],
source_files=[SourceFile(file_name="source_name.jpg")],
)
],
remote_path="/remote/path",
Expand All @@ -135,12 +135,12 @@ def test_multi_slot_without_folders_planned_image_paths():
Slot(
name="slot1",
type="image",
source_files=[{"file_name": "source_name_1.jpg"}],
source_files=[SourceFile(file_name="source_name_1.jpg")],
),
Slot(
name="slot2",
type="image",
source_files=[{"file_name": "source_name_2.jpg"}],
source_files=[SourceFile(file_name="source_name_2.jpg")],
),
],
remote_path="/",
Expand All @@ -166,12 +166,12 @@ def test_multi_slot_with_folders_planned_image_path():
Slot(
name="slot1",
type="image",
source_files=[{"file_name": "source_name_1.jpg"}],
source_files=[SourceFile(file_name="source_name_1.jpg")],
),
Slot(
name="slot2",
type="image",
source_files=[{"file_name": "source_name_2.jpg"}],
source_files=[SourceFile(file_name="source_name_2.jpg")],
),
],
remote_path="/remote/path",
Expand All @@ -197,7 +197,7 @@ def test_single_slot_root_path_with_folders_planned_image_paths():
Slot(
name="slot1",
type="image",
source_files=[{"file_name": "source_name.jpg"}],
source_files=[SourceFile(file_name="source_name.jpg")],
)
],
remote_path="/",
Expand All @@ -221,8 +221,8 @@ def test_multiple_source_files_planned_image_paths():
name="slot1",
type="image",
source_files=[
{"file_name": "source_name_1.jpg"},
{"file_name": "source_name_2.jpg"},
SourceFile(file_name="source_name_1.jpg"),
SourceFile(file_name="source_name_2.jpg"),
],
)
],
Expand Down
4 changes: 2 additions & 2 deletions tests/darwin/dataset/upload_manager_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def test_pending_count_is_correct(dataset: RemoteDataset, request_upload_endpoin
assert pending_item.dataset_item_id == "3b241101-e2bb-4255-8caf-4136c566a964"
assert pending_item.filename == "test.jpg"
assert pending_item.path == "/"
assert pending_item.reasons[0] is None
assert pending_item.slots[0].reason is None


@pytest.mark.usefixtures("file_read_write_test")
Expand Down Expand Up @@ -150,7 +150,7 @@ def test_blocked_count_is_correct(dataset: RemoteDataset, request_upload_endpoin
assert blocked_item.dataset_item_id == "3b241101-e2bb-4255-8caf-4136c566a964"
assert blocked_item.filename == "test.jpg"
assert blocked_item.path == "/"
assert blocked_item.reasons[0] == BLOCKED_UPLOAD_ERROR_ALREADY_EXISTS
assert blocked_item.slots[0].reason == BLOCKED_UPLOAD_ERROR_ALREADY_EXISTS


@pytest.mark.usefixtures("file_read_write_test")
Expand Down

0 comments on commit 09c623e

Please sign in to comment.