Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Auto-formatting #202

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 21 additions & 11 deletions benchmarks/BM_resnet50/model_repository/dali/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,24 +24,34 @@

def parse_args():
import argparse
parser = argparse.ArgumentParser(description="Serialize the pipeline and save it to a file")
parser.add_argument('file_path', type=str, help='The path where to save the serialized pipeline')
parser = argparse.ArgumentParser(
description="Serialize the pipeline and save it to a file")
parser.add_argument('file_path',
type=str,
help='The path where to save the serialized pipeline')
return parser.parse_args()


def preprocessing(images, device='gpu'):
images = dali.fn.decoders.image(images, device="mixed" if device == 'gpu' else 'cpu', output_type=types.RGB)
images = dali.fn.resize(images, resize_x=224, resize_y=224)
return dali.fn.crop_mirror_normalize(images,
dtype=types.FLOAT,
output_layout="HWC",
crop=(224, 224),
mean=[0.485 * 255, 0.456 * 255, 0.406 * 255],
std=[0.229 * 255, 0.224 * 255, 0.225 * 255])
images = dali.fn.decoders.image(
images,
device="mixed" if device == 'gpu' else 'cpu',
output_type=types.RGB)
images = dali.fn.resize(images, resize_x=224, resize_y=224)
return dali.fn.crop_mirror_normalize(
images,
dtype=types.FLOAT,
output_layout="HWC",
crop=(224, 224),
mean=[0.485 * 255, 0.456 * 255, 0.406 * 255],
std=[0.229 * 255, 0.224 * 255, 0.225 * 255])


@dali.pipeline_def(batch_size=1, num_threads=1, device_id=0)
def pipe():
images = dali.fn.external_source(device="cpu", name="DALI_INPUT_0", no_copy=True)
images = dali.fn.external_source(device="cpu",
name="DALI_INPUT_0",
no_copy=True)
return preprocessing(images)


Expand Down
63 changes: 36 additions & 27 deletions benchmarks/BM_resnet50/scripts/dataset_preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,11 @@ def parse_meta_mat(metafile) -> Dict[int, str]:

meta = scipy.io.loadmat(metafile, squeeze_me=True)["synsets"]
nums_children = list(zip(*meta))[4]
meta = [meta[idx] for idx, num_children in enumerate(nums_children) if num_children == 0]
meta = [
meta[idx]
for idx, num_children in enumerate(nums_children)
if num_children == 0
]
idcs, wnids = list(zip(*meta))[:2]
idx_to_wnid = {idx: wnid for idx, wnid in zip(idcs, wnids)}
return idx_to_wnid
Expand All @@ -57,13 +61,17 @@ def _process_image(image_file, target_size):
original_size = image.size

# scale image to size where minimal size is _RESIZE_MIN
scale_factor = max(_RESIZE_MIN / original_size[0], _RESIZE_MIN / original_size[1])
resize_to = int(original_size[0] * scale_factor), int(original_size[1] * scale_factor)
scale_factor = max(_RESIZE_MIN / original_size[0],
_RESIZE_MIN / original_size[1])
resize_to = int(original_size[0] * scale_factor), int(original_size[1] *
scale_factor)
resized_image = image.resize(resize_to)

# central crop of image to target_size
left, upper = (resize_to[0] - target_size[0]) // 2, (resize_to[1] - target_size[1]) // 2
cropped_image = resized_image.crop((left, upper, left + target_size[0], upper + target_size[1]))
left, upper = (resize_to[0] - target_size[0]) // 2, (resize_to[1] -
target_size[1]) // 2
cropped_image = resized_image.crop(
(left, upper, left + target_size[0], upper + target_size[1]))
return cropped_image


Expand All @@ -73,27 +81,25 @@ def main():
parser = argparse.ArgumentParser(description="short_description")
parser.add_argument(
"--dataset-dir",
help="Path to dataset directory where imagenet archives are stored and processed files will be saved.",
help=
"Path to dataset directory where imagenet archives are stored and processed files will be saved.",
required=False,
default=DATASETS_DIR,
)
parser.add_argument(
'--save',
help='Save processed images.',
required=False, default=False
)
parser.add_argument('--save',
help='Save processed images.',
required=False,
default=False)
parser.add_argument(
"--target-size",
help="Size of target image. Format it as <width>,<height>.",
required=False,
default=",".join(map(str, TARGET_SIZE)),
)
parser.add_argument(
'--perf-file',
required=False,
default=None,
help='Path to save a file with time measurements.'
)
parser.add_argument('--perf-file',
required=False,
default=None,
help='Path to save a file with time measurements.')
args = parser.parse_args()

if args.dataset_dir is None:
Expand Down Expand Up @@ -130,32 +136,35 @@ def main():

# remap WNID into index in sorted list of all WNIDs - this is how network outputs class
available_wnids = sorted(set(labels_wnid))
wnid_to_newidx = {wnid: new_cls for new_cls, wnid in enumerate(available_wnids)}
wnid_to_newidx = {
wnid: new_cls for new_cls, wnid in enumerate(available_wnids)
}
labels = [wnid_to_newidx[wnid] for wnid in labels_wnid]
if args.perf_file is None:
perf = False
perf = False
else:
times = []
perf = True
times = []
perf = True
output_dir = datasets_dir / IMAGENET_DIRNAME
with tarfile.open(image_archive_path, mode="r") as image_archive_file:
image_rel_paths = sorted(image_archive_file.getnames())
for cls, image_rel_path in tqdm(zip(labels, image_rel_paths), total=len(image_rel_paths)):
for cls, image_rel_path in tqdm(zip(labels, image_rel_paths),
total=len(image_rel_paths)):
output_path = output_dir / str(cls) / image_rel_path
original_image_file = image_archive_file.extractfile(image_rel_path)
file_data = original_image_file.read()
start = time.perf_counter()
processed_image = _process_image(io.BytesIO(file_data), target_size)
end = time.perf_counter()
if perf:
times.append(end-start)
times.append(end - start)
if args.save:
output_path.parent.mkdir(parents=True, exist_ok=True)
processed_image.save(output_path.as_posix())
output_path.parent.mkdir(parents=True, exist_ok=True)
processed_image.save(output_path.as_posix())

if perf:
with open(args.perf_file, 'w') as perf_file:
print(times, file=perf_file)
with open(args.perf_file, 'w') as perf_file:
print(times, file=perf_file)


if __name__ == "__main__":
Expand Down
41 changes: 27 additions & 14 deletions benchmarks/BM_resnet50/scripts/model-loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,25 +23,38 @@
import argparse
import sys


def get_args():
parser = argparse.ArgumentParser(description='Load or unload a model in Triton server.')
parser.add_argument('action', action='store', choices=['load', 'unload', 'reload'])
parser.add_argument('-u', '--url', required=False, action='store', default='localhost:8001', help='Server url.')
parser.add_argument('-m', '--model', required=True, action='store', help='Model name.')
return parser.parse_args()
parser = argparse.ArgumentParser(
description='Load or unload a model in Triton server.')
parser.add_argument('action',
action='store',
choices=['load', 'unload', 'reload'])
parser.add_argument('-u',
'--url',
required=False,
action='store',
default='localhost:8001',
help='Server url.')
parser.add_argument('-m',
'--model',
required=True,
action='store',
help='Model name.')
return parser.parse_args()


def main(args):
client = t_client.InferenceServerClient(url=args.url)
if args.action in ['reload', 'unload']:
client.unload_model(args.model)
print('Successfully unloaded model', args.model)
client = t_client.InferenceServerClient(url=args.url)
if args.action in ['reload', 'unload']:
client.unload_model(args.model)
print('Successfully unloaded model', args.model)

if args.action in ['reload', 'load']:
client.load_model(args.model)
print('Successfully loaded model', args.model)
if args.action in ['reload', 'load']:
client.load_model(args.model)
print('Successfully loaded model', args.model)


if __name__ == '__main__':
args = get_args()
main(args)
args = get_args()
main(args)
76 changes: 47 additions & 29 deletions benchmarks/BM_resnet50/scripts/prepare-input-data.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import base64 as b64
import json


dali_extra_path = os.getenv('DALI_EXTRA_PATH', None)
assert dali_extra_path is not None, "Please set DALI_EXTRA_PATH env variable."

Expand All @@ -39,44 +38,63 @@
# choose 16 smallest samples
chosen_set = [p for (_, p) in sized_images[:16]]

# choose medium sized image
# choose medium sized image
chosen_sample = sized_images[8][1]


def save_sample_input(sample, dir_name, input_name):
Path(dir_name).mkdir(exist_ok=True)
shutil.copy(sample, Path(dir_name) / Path(input_name))
Path(dir_name).mkdir(exist_ok=True)
shutil.copy(sample, Path(dir_name) / Path(input_name))


def get_content(fpath):
with fpath.open("rb") as f:
content = f.read()
return {
'content' : {
'b64': b64.b64encode(content).decode('utf-8')
},
'shape': [len(content)]
}
with fpath.open("rb") as f:
content = f.read()
return {
'content': {
'b64': b64.b64encode(content).decode('utf-8')
},
'shape': [len(content)]
}


def save_json_dataset(files, dataset_filename, input_name):
contents = [get_content(fpath) for fpath in files]
inputs = [{input_name: content} for content in contents]
result_dict = {'data': inputs}
with open(dataset_filename, 'w') as dataset_file:
json.dump(result_dict, dataset_file)
contents = [get_content(fpath) for fpath in files]
inputs = [{input_name: content} for content in contents]
result_dict = {'data': inputs}
with open(dataset_filename, 'w') as dataset_file:
json.dump(result_dict, dataset_file)


def get_args():
parser = argparse.ArgumentParser(description='Prepare perf_analyzer input data.')
parser.add_argument('-d', '--directory-name', required=False, action='store', default='inputs-data',
help='Directory name to store a single sample data.')
parser.add_argument('-i', '--input-name', required=False, action='store', default='input',
help='Input name.')
parser.add_argument('-f', '--dataset-filename', required=False, action='store', default='dataset.json',
help='Name of the created JSON dataset.')
return parser.parse_args()
parser = argparse.ArgumentParser(
description='Prepare perf_analyzer input data.')
parser.add_argument('-d',
'--directory-name',
required=False,
action='store',
default='inputs-data',
help='Directory name to store a single sample data.')
parser.add_argument('-i',
'--input-name',
required=False,
action='store',
default='input',
help='Input name.')
parser.add_argument('-f',
'--dataset-filename',
required=False,
action='store',
default='dataset.json',
help='Name of the created JSON dataset.')
return parser.parse_args()


def main(args):
save_sample_input(chosen_sample, args.directory_name, args.input_name)
save_json_dataset(chosen_set, args.dataset_filename, args.input_name)
save_sample_input(chosen_sample, args.directory_name, args.input_name)
save_json_dataset(chosen_set, args.dataset_filename, args.input_name)


if __name__ == '__main__':
args = get_args()
main(args)
args = get_args()
main(args)
Loading