Skip to content

Commit

Permalink
Merge remote-tracking branch 'refs/remotes/origin/nomicbert' into nom…
Browse files Browse the repository at this point in the history
…icbert
  • Loading branch information
bhavika committed Jun 27, 2024
2 parents e10b3e1 + 9a6cd59 commit 773c567
Show file tree
Hide file tree
Showing 35 changed files with 925 additions and 684 deletions.
42 changes: 23 additions & 19 deletions .github/workflows/test_offline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ name: Offline usage / Python - Test

on:
push:
branches: [ main ]
branches: [main]
pull_request:
branches: [ main ]
branches: [main]

concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
Expand All @@ -15,29 +15,33 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: [3.9]
python-version: [3.8, 3.9]
os: [ubuntu-20.04]

runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies for pytorch export
run: |
pip install .[tests,exporters,onnxruntime]
- name: Test with unittest
run: |
HF_HOME=/tmp/ huggingface-cli download hf-internal-testing/tiny-random-gpt2
- name: Checkout code
uses: actions/checkout@v4

HF_HOME=/tmp/ HF_HUB_OFFLINE=1 optimum-cli export onnx --model hf-internal-testing/tiny-random-gpt2 gpt2_onnx --task text-generation
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}

huggingface-cli download hf-internal-testing/tiny-random-gpt2
- name: Install dependencies for pytorch export
run: |
pip install .[tests,exporters,onnxruntime]
HF_HUB_OFFLINE=1 optimum-cli export onnx --model hf-internal-testing/tiny-random-gpt2 gpt2_onnx --task text-generation
- name: Test with pytest
run: |
HF_HOME=/tmp/ huggingface-cli download hf-internal-testing/tiny-random-gpt2
pytest tests/onnxruntime/test_modeling.py -k "test_load_model_from_hub and not from_hub_onnx" -s -vvvvv
HF_HOME=/tmp/ HF_HUB_OFFLINE=1 optimum-cli export onnx --model hf-internal-testing/tiny-random-gpt2 gpt2_onnx --task text-generation
HF_HUB_OFFLINE=1 pytest tests/onnxruntime/test_modeling.py -k "test_load_model_from_hub and not from_hub_onnx" -s -vvvvv
huggingface-cli download hf-internal-testing/tiny-random-gpt2
HF_HUB_OFFLINE=1 optimum-cli export onnx --model hf-internal-testing/tiny-random-gpt2 gpt2_onnx --task text-generation
pytest tests/onnxruntime/test_modeling.py -k "test_load_model_from_hub and not from_hub_onnx" -s -vvvvv
HF_HUB_OFFLINE=1 pytest tests/onnxruntime/test_modeling.py -k "test_load_model_from_hub and not from_hub_onnx" -s -vvvvv
2 changes: 2 additions & 0 deletions .github/workflows/test_onnxruntime.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@ jobs:
pytest onnxruntime -m "run_in_series" --durations=0 -vvvv -s
- name: Test with pytest (in parallel)
env:
FXMARTYCLONE_READ_TOKEN: ${{ secrets.HF_HUB_READ_TOKEN }}
working-directory: tests
run: |
pytest onnxruntime -m "not run_in_series" --durations=0 -vvvv -s -n auto
17 changes: 17 additions & 0 deletions .github/workflows/trufflehog.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
on:
push:

name: Secret Leaks

jobs:
trufflehog:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Secret Scanning
uses: trufflesecurity/trufflehog@main


18 changes: 12 additions & 6 deletions docs/source/_redirects.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,18 @@ habana/tutorials/pretraining: habana/usage_guides/pretraining

# Optimum Intel
intel_index: intel/index
intel_quickstart: intel/optimization_inc
intel_configuration: intel/reference_inc
intel_optimization: intel/optimization_inc
intel_quantization: intel/optimization_inc
intel_pruning: intel/optimization_inc
intel_trainer: intel/reference_inc
intel_quickstart: intel/index
intel_configuration: intel/neural_compressor/reference
intel_optimization: intel/neural_compressor/optimization
intel_quantization: intel/neural_compressor/optimization
intel_pruning: intel/neural_compressor/optimization
intel_trainer: intel/neural_compressor/reference
intel/inference: intel/openvino/inference
intel/optimization_ov: intel/openvino/optimization
intel/reference_ov: intel/openvino/reference
intel/optimization_inc: intel/neural_compressor/optimization
intel/distributed_training: intel/neural_compressor/distributed_training
intel/reference_inc: intel/neural_compressor/reference

# Optimum Neuron
docs/optimum-neuron/index: /docs/optimum-neuron/index
Expand Down
3 changes: 1 addition & 2 deletions optimum/commands/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,4 @@
from .base import BaseOptimumCLICommand, CommandInfo, RootOptimumCLICommand
from .env import EnvironmentCommand
from .export import ExportCommand, ONNXExportCommand, TFLiteExportCommand
from .onnxruntime import ONNXRuntimeCommand, ONNXRuntimeOptimizeCommand, ONNXRuntimeQuantizeCommand
from .optimum_cli import register_optimum_cli_subcommand
from .optimum_cli import optimum_cli_subcommand
57 changes: 51 additions & 6 deletions optimum/commands/optimum_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,57 @@
from pathlib import Path
from typing import Dict, List, Optional, Tuple, Type, Union

from ..subpackages import load_subpackages
from ..utils import logging
from .base import BaseOptimumCLICommand, CommandInfo, RootOptimumCLICommand
from .env import EnvironmentCommand
from .export import ExportCommand
from .onnxruntime import ONNXRuntimeCommand


logger = logging.get_logger()

OPTIMUM_CLI_SUBCOMMANDS = [ExportCommand, EnvironmentCommand, ONNXRuntimeCommand]
# The table below contains the optimum-cli root subcommands provided by the optimum package
OPTIMUM_CLI_ROOT_SUBCOMMANDS = [ExportCommand, EnvironmentCommand]

# The table below is dynamically populated when loading subpackages
_OPTIMUM_CLI_SUBCOMMANDS = []


def optimum_cli_subcommand(parent_command: Optional[Type[BaseOptimumCLICommand]] = None):
"""
A decorator to declare optimum-cli subcommands.
The declaration of an optimum-cli subcommand looks like this:
```
@optimum_cli_subcommand()
class MySubcommand(BaseOptimumCLICommand):
<implementation>
```
or
```
@optimum_cli_subcommand(ExportCommand)
class MySubcommand(BaseOptimumCLICommand):
<implementation>
```
Args:
parent_command: (`Optional[Type[BaseOptimumCLICommand]]`):
The class of the parent command or None if this is a top-level command. Defaults to None.
"""

if parent_command is not None and not issubclass(parent_command, BaseOptimumCLICommand):
raise ValueError(f"The parent command {parent_command} must be a subclass of BaseOptimumCLICommand")

def wrapper(subcommand):
if not issubclass(subcommand, BaseOptimumCLICommand):
raise ValueError(f"The subcommand {subcommand} must be a subclass of BaseOptimumCLICommand")
_OPTIMUM_CLI_SUBCOMMANDS.append((subcommand, parent_command))

return wrapper


def resolve_command_to_command_instance(
Expand Down Expand Up @@ -137,15 +178,19 @@ def main():
root = RootOptimumCLICommand("Optimum CLI tool", usage="optimum-cli")
parser = root.parser

for subcommand_cls in OPTIMUM_CLI_SUBCOMMANDS:
for subcommand_cls in OPTIMUM_CLI_ROOT_SUBCOMMANDS:
register_optimum_cli_subcommand(subcommand_cls, parent_command=root)

commands_in_register = dynamic_load_commands_in_register()
# Load subpackages to give them a chance to declare their own subcommands
load_subpackages()

# Register subcommands declared by the subpackages or found in the register files under commands/register
commands_to_register = _OPTIMUM_CLI_SUBCOMMANDS + dynamic_load_commands_in_register()
command2command_instance = resolve_command_to_command_instance(
root, [parent_command_cls for _, parent_command_cls in commands_in_register if parent_command_cls is not None]
root, [parent_command_cls for _, parent_command_cls in commands_to_register if parent_command_cls is not None]
)

for command_or_command_info, parent_command in commands_in_register:
for command_or_command_info, parent_command in commands_to_register:
if parent_command is None:
parent_command_instance = root
else:
Expand Down
30 changes: 26 additions & 4 deletions optimum/configuration_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@
import json
import os
import re
import warnings
from typing import Any, Dict, List, Tuple, Union

from huggingface_hub import HfFolder
from packaging import version
from transformers import PretrainedConfig
from transformers import __version__ as transformers_version_str
Expand Down Expand Up @@ -93,7 +93,19 @@ def save_pretrained(self, save_directory: Union[str, os.PathLike], push_to_hub:
repo_id = self._create_repo(repo_id, **kwargs)

use_auth_token = kwargs.get("use_auth_token", None)
token = HfFolder.get_token() if use_auth_token is True else use_auth_token
token = kwargs.get("token", None)

if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
FutureWarning,
)
if token is not None:
raise ValueError(
"You cannot use both `use_auth_token` and `token` arguments at the same time."
)
kwargs["token"] = use_auth_token
token = use_auth_token

files_timestamps = self._get_files_timestamps(save_directory)

Expand Down Expand Up @@ -197,6 +209,7 @@ def _get_config_dict(
resume_download = kwargs.pop("resume_download", False)
proxies = kwargs.pop("proxies", None)
use_auth_token = kwargs.pop("use_auth_token", None)
token = kwargs.pop("token", None)
local_files_only = kwargs.pop("local_files_only", False)
revision = kwargs.pop("revision", None)
trust_remote_code = kwargs.pop("trust_remote_code", None)
Expand All @@ -205,6 +218,15 @@ def _get_config_dict(
from_auto_class = kwargs.pop("_from_auto", False)
commit_hash = kwargs.pop("_commit_hash", None)

if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
FutureWarning,
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")
token = use_auth_token

if trust_remote_code is True:
logger.warning(
"The argument `trust_remote_code` is to be used with Auto classes. It has no effect here and is"
Expand Down Expand Up @@ -255,7 +277,7 @@ def _get_config_dict(
proxies=proxies,
resume_download=resume_download,
local_files_only=local_files_only,
use_auth_token=use_auth_token,
token=token,
user_agent=user_agent,
)
else:
Expand All @@ -268,7 +290,7 @@ def _get_config_dict(
proxies=proxies,
resume_download=resume_download,
local_files_only=local_files_only,
use_auth_token=use_auth_token,
token=token,
user_agent=user_agent,
revision=revision,
subfolder=subfolder,
Expand Down
21 changes: 17 additions & 4 deletions optimum/exporters/onnx/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
"""Entry point to the optimum.exporters.onnx command line."""

import argparse
import warnings
from pathlib import Path

from huggingface_hub.constants import HUGGINGFACE_HUB_CACHE
Expand Down Expand Up @@ -66,6 +67,7 @@ def main_export(
force_download: bool = False,
local_files_only: bool = False,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
for_ort: bool = False,
do_validation: bool = True,
model_kwargs: Optional[Dict[str, Any]] = None,
Expand Down Expand Up @@ -135,9 +137,11 @@ def main_export(
cached versions if they exist.
local_files_only (`Optional[bool]`, defaults to `False`):
Whether or not to only look at local files (i.e., do not try to download the model).
use_auth_token (`Optional[str]`, defaults to `None`):
use_auth_token (`Optional[Union[bool,str]]`, defaults to `None`):
Deprecated. Please use the `token` argument instead.
token (`Optional[Union[bool,str]]`, defaults to `None`):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`).
when running `huggingface-cli login` (stored in `huggingface_hub.constants.HF_TOKEN_PATH`).
model_kwargs (`Optional[Dict[str, Any]]`, defaults to `None`):
Experimental usage: keyword arguments to pass to the model during
the export. This argument should be used along the `custom_onnx_configs` argument
Expand Down Expand Up @@ -174,6 +178,15 @@ def main_export(
```
"""

if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
FutureWarning,
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")
token = use_auth_token

if fp16:
if dtype is not None:
raise ValueError(
Expand Down Expand Up @@ -250,7 +263,7 @@ def main_export(
subfolder=subfolder,
revision=revision,
cache_dir=cache_dir,
use_auth_token=use_auth_token,
token=token,
local_files_only=local_files_only,
force_download=force_download,
trust_remote_code=trust_remote_code,
Expand Down Expand Up @@ -283,7 +296,7 @@ def main_export(
subfolder=subfolder,
revision=revision,
cache_dir=cache_dir,
use_auth_token=use_auth_token,
token=token,
local_files_only=local_files_only,
force_download=force_download,
trust_remote_code=trust_remote_code,
Expand Down
Loading

0 comments on commit 773c567

Please sign in to comment.