Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[tuner] Clean up candidate generation code #508

Merged
merged 4 commits into from
Nov 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions tuner/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
[project]
name = "SHARK Tuner"
authors = [
{name = "SHARK Authors"},
]
description = "IREE Dispatch Tuner"
readme = "README.md"
license = {text = "Apache-2.0"}
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
]
requires-python = ">= 3.10"

# Version is set via the `setup.py`.
dynamic = ["version"]
kuhar marked this conversation as resolved.
Show resolved Hide resolved

[project.urls]
Repository = "https://github.com/nod-ai/SHARK-Platform"
35 changes: 35 additions & 0 deletions tuner/setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# Copyright 2024 Advanced Micro Devices, Inc.
#
# Licensed under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception

import json
import os

from setuptools import setup

SETUPPY_DIR = os.path.realpath(os.path.dirname(__file__))

# Setup and get version information.
VERSION_FILE = os.path.join(SETUPPY_DIR, "version.json")
VERSION_FILE_LOCAL = os.path.join(SETUPPY_DIR, "version_local.json")


def load_version_info(version_file):
with open(version_file, "rt") as f:
return json.load(f)


try:
version_info = load_version_info(VERSION_FILE_LOCAL)
except FileNotFoundError:
print("version_local.json not found. Default to dev build")
version_info = load_version_info(VERSION_FILE)

PACKAGE_VERSION = version_info.get("package-version")
print(f"Using PACKAGE_VERSION: '{PACKAGE_VERSION}'")

setup(
version=f"{PACKAGE_VERSION}",
)
108 changes: 53 additions & 55 deletions tuner/tuner/candidate_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,18 +23,15 @@
import math
import pickle
import re
import z3
import z3 # type: ignore
from dataclasses import astuple, dataclass
from enum import Enum
from os import mkdir, path, makedirs
from os import path, makedirs
from typing import Optional
from textwrap import indent
from abc import ABC, abstractmethod

import iree.compiler as ireec
from iree.compiler import ir
from iree.compiler.dialects import _linalg_ops_gen, _util_ops_gen

from iree.compiler import ir # type: ignore

tune_logger = logging.getLogger("tune")

Expand Down Expand Up @@ -520,15 +517,14 @@ def get_default_output_dir() -> str:
return "tuning_" + datetime.now().strftime("%Y_%m_%d_%H_%M")


def parse_mlir(mlir_text: str) -> ir.Module:
def parse_mlir(mlir_text: str, ctx: ir.Context) -> ir.Module:
mlir_module = None
with ireec.ir.Context() as context:
try:
mlir_module = ireec.ir.Module.parse(mlir_text)
tune_logger.info("MLIR parsing successful!")
except ireec.ir.MLIRError as e:
tune_logger.error(f"Error parsing MLIR: {e}")
raise RuntimeError(f"Error parsing MLIR: {e}")
try:
mlir_module = ir.Module.parse(mlir_text)
tune_logger.info("MLIR parsing successful!")
except ir.MLIRError as e:
tune_logger.error(f"Error parsing MLIR: {e}")
raise RuntimeError(f"Error parsing MLIR: {e}")

return mlir_module

Expand All @@ -537,7 +533,7 @@ def parse_mlir(mlir_text: str) -> ir.Module:
class MLIRTransformation:
"""Transformation of MLIR context"""

template: str
template: list[str]
modified: str
embeddable: str

Expand All @@ -550,7 +546,7 @@ def supports(self, op_name: str) -> bool:

@abstractmethod
def get_shapes(self, template: list[str]) -> ProblemSize:
"""Extract problem size of thge operation."""
"""Extract problem size of the operation."""
pass

@abstractmethod
Expand Down Expand Up @@ -645,7 +641,7 @@ def get_shapes(self, template: list[str]) -> ProblemSize:
dispatch_kind=DispatchKind.mmt,
)
assert mmt_re
assert dps, f"'{mmt_re}' not found in given context"
assert False, f"'{mmt_re}' not found in given context"

def get_transform_function_mmt(
self, problem_size: ProblemSize, functionName: str, configuration: Configuration
Expand Down Expand Up @@ -1353,45 +1349,47 @@ def tune(
mlir_template = read_input_mlir(input_file)
mlir_text = "".join(mlir_template)

mlir_module = parse_mlir(mlir_text)
# Save the input file as the first candidate.
with open(path.join(output, f"0.mlir"), "w") as f:
f.write(mlir_text)

dispatch_tuner_registry = DispatchTunerRegistry()
dispatch_tuner_registry.register(
[
MmtTuner(),
ConvTuner(),
ContractionTuner(lhs_dims, rhs_dims, tile_dims),
BatchMmtTuner(),
BatchMatmulTuner(lhs_dims, rhs_dims, tile_dims),
]
)

walk_result = walk_mlir_op(mlir_module, dispatch_tuner_registry)

dispatch_tuner = walk_result.dispatch_tuner
problem_size = dispatch_tuner.get_shapes(mlir_template)
tune_logger.debug(str(problem_size))
configs = []
for i, config in enumerate(generate_solutions(problem_size, num_subgroups)):
if i >= limit:
break
tune_logger.info(f"Solution #{i+1}: {config}")
configs.append(config)
tf_mlir = dispatch_tuner.apply_params(problem_size, mlir_template, config)

with open(path.join(output, f"{i+1}.mlir"), "w") as f:
f.write(tf_mlir.modified)
with open(path.join(output, f"{i+1}_config.mlir"), "w") as f:
f.write(tf_mlir.embeddable)

with open(path.join(output, "configs.pkl"), "wb") as file:
pickle.dump(configs, file)
with ir.Context() as ctx:
mlir_module: ir.Module = parse_mlir(mlir_text, ctx)
# Save the input file as the first candidate.
with open(path.join(output, f"0.mlir"), "w") as f:
f.write(mlir_text)

dispatch_tuner_registry = DispatchTunerRegistry()
dispatch_tuner_registry.register(
[
MmtTuner(),
ConvTuner(),
ContractionTuner(lhs_dims, rhs_dims, tile_dims),
BatchMmtTuner(),
BatchMatmulTuner(lhs_dims, rhs_dims, tile_dims),
]
)

tune_logger.info(f"Generated {len(configs)} candidates")
tune_logger.info(f"Configurations .pkl is stored in {output}/configs.pkl")
walk_result: OpWalkResult = walk_mlir_op(mlir_module, dispatch_tuner_registry)

dispatch_tuner = walk_result.dispatch_tuner
assert dispatch_tuner, "No suitable dispatch tuner found"
problem_size: ProblemSize = dispatch_tuner.get_shapes(mlir_template)
tune_logger.debug(str(problem_size))
configs = []
for i, config in enumerate(generate_solutions(problem_size, num_subgroups)):
if i >= limit:
break
tune_logger.info(f"Solution #{i+1}: {config}")
configs.append(config)
tf_mlir = dispatch_tuner.apply_params(problem_size, mlir_template, config)

with open(path.join(output, f"{i+1}.mlir"), "w") as f:
f.write(tf_mlir.modified)
with open(path.join(output, f"{i+1}_config.mlir"), "w") as f:
f.write(tf_mlir.embeddable)

with open(path.join(output, "configs.pkl"), "wb") as file:
pickle.dump(configs, file)

tune_logger.info(f"Generated {len(configs)} candidates")
tune_logger.info(f"Configurations .pkl is stored in {output}/configs.pkl")


def main():
Expand Down
Loading
Loading