Skip to content

Commit

Permalink
Fix torch._inductor.aoti_compile_and_package input (#7400)
Browse files Browse the repository at this point in the history
Summary:

The inputs to `torch._inductor.aoti_compile_and_package` changed in pytorch/pytorch#140991.

The `args` and `kwargs` do not need to be inputed anymore. The API will get them from `exported_program.example_inputs`

Differential Revision: D67436429
  • Loading branch information
yushangdi authored and facebook-github-bot committed Dec 19, 2024
1 parent f341da8 commit c14f801
Show file tree
Hide file tree
Showing 6 changed files with 6 additions and 10 deletions.
2 changes: 1 addition & 1 deletion .ci/docker/ci_commit_pins/pytorch.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
19eff28ff3f19b50da46f5a9ff5f4d4d213806fe
288aa873831057b1eb7d747914ec4fdc76c23a80
4 changes: 2 additions & 2 deletions examples/models/llama3_2_vision/install_requirements.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.

NIGHTLY_VERSION="dev20241112"
NIGHTLY_VERSION="dev20241218"

# Install torchtune nightly for model definitions.
pip install --pre torchtune==0.4.0.${NIGHTLY_VERSION} --extra-index-url https://download.pytorch.org/whl/nightly/cpu --no-cache-dir
pip install --pre torchtune==0.5.0.${NIGHTLY_VERSION} --extra-index-url https://download.pytorch.org/whl/nightly/cpu --no-cache-dir

# Install torchao.
pip install "$(dirname "$0")/../../../third-party/ao"
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,6 @@ def test_llama3_2_text_decoder_aoti(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
path = torch._inductor.aoti_compile_and_package(
ep,
model.get_example_inputs(),
kwargs=model.get_example_kwarg_inputs(),
package_path=os.path.join(tmpdir, "text_decoder.pt2"),
)
encoder_aoti = torch._inductor.aoti_load_package(path)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ def test_flamingo_vision_encoder(self) -> None:
with tempfile.TemporaryDirectory() as tmpdir:
path = torch._inductor.aoti_compile_and_package(
ep,
model.get_example_inputs(),
package_path=os.path.join(tmpdir, "vision_encoder.pt2"),
)
print(path)
Expand Down
1 change: 0 additions & 1 deletion extension/llm/modules/test/test_position_embeddings.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,6 @@ def test_tiled_token_positional_embedding_aoti(self):
with tempfile.TemporaryDirectory() as tmpdir:
path = torch._inductor.aoti_compile_and_package(
tpe_ep,
(self.x, self.aspect_ratio),
package_path=os.path.join(tmpdir, "tpe.pt2"),
)
tpe_aoti = load_package(path)
Expand Down
6 changes: 3 additions & 3 deletions install_requirements.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def python_is_compatible():
# NOTE: If a newly-fetched version of the executorch repo changes the value of
# NIGHTLY_VERSION, you should re-run this script to install the necessary
# package versions.
NIGHTLY_VERSION = "dev20241112"
NIGHTLY_VERSION = "dev20241218"

# The pip repository that hosts nightly torch packages.
TORCH_NIGHTLY_URL = "https://download.pytorch.org/whl/nightly/cpu"
Expand All @@ -124,7 +124,7 @@ def python_is_compatible():
# been installed on CI before this step, so pip won't reinstall them
f"torch==2.6.0.{NIGHTLY_VERSION}" if USE_PYTORCH_NIGHTLY else "torch",
(
f"torchvision==0.20.0.{NIGHTLY_VERSION}"
f"torchvision==0.22.0.{NIGHTLY_VERSION}"
if USE_PYTORCH_NIGHTLY
else "torchvision"
), # For testing.
Expand All @@ -135,7 +135,7 @@ def python_is_compatible():
# TODO: Make each example publish its own requirements.txt
EXAMPLES_REQUIREMENTS = [
"timm==1.0.7",
f"torchaudio==2.5.0.{NIGHTLY_VERSION}" if USE_PYTORCH_NIGHTLY else "torchaudio",
f"torchaudio==2.6.0.{NIGHTLY_VERSION}" if USE_PYTORCH_NIGHTLY else "torchaudio",
"torchsr==1.0.4",
"transformers==4.46.1",
]
Expand Down

0 comments on commit c14f801

Please sign in to comment.