forked from dusty-nv/jetson-containers
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
4 changed files
with
145 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,52 @@ | ||
name: "tvm_jp51" | ||
run-name: "Build tvm (JetPack 5.1)" | ||
on: | ||
workflow_dispatch: {} | ||
push: | ||
branches: | ||
- 'dev' | ||
paths: | ||
- '.github/workflows/tvm_jp51.yml' | ||
- 'packages/tvm/*' | ||
- '!packages/tvm/README.md' | ||
- '!packages/tvm/docs.md' | ||
- 'packages/build-essential/*' | ||
- '!packages/build-essential/README.md' | ||
- '!packages/build-essential/docs.md' | ||
- 'packages/python/*' | ||
- '!packages/python/README.md' | ||
- '!packages/python/docs.md' | ||
- 'packages/numpy/*' | ||
- '!packages/numpy/README.md' | ||
- '!packages/numpy/docs.md' | ||
- 'packages/cmake/cmake_pip/*' | ||
- '!packages/cmake/cmake_pip/README.md' | ||
- '!packages/cmake/cmake_pip/docs.md' | ||
- 'packages/onnx/*' | ||
- '!packages/onnx/README.md' | ||
- '!packages/onnx/docs.md' | ||
- 'packages/pytorch/*' | ||
- '!packages/pytorch/README.md' | ||
- '!packages/pytorch/docs.md' | ||
- 'packages/rust/*' | ||
- '!packages/rust/README.md' | ||
- '!packages/rust/docs.md' | ||
jobs: | ||
tvm_jp51: | ||
runs-on: [self-hosted, jetson, jp51] | ||
steps: | ||
- run: | | ||
cat /etc/nv_tegra_release | ||
- name: "Checkout ${{ github.repository }} SHA=${{ github.sha }}" | ||
run: | | ||
echo "$RUNNER_WORKSPACE" | ||
cd $RUNNER_WORKSPACE | ||
git config --global user.email "[email protected]" | ||
git config --global user.name "Dustin Franklin" | ||
git clone $GITHUB_SERVER_URL/$GITHUB_REPOSITORY || echo 'repo already cloned or another error encountered' | ||
cd jetson-containers | ||
git fetch origin | ||
git checkout $GITHUB_SHA | ||
git status | ||
ls -a | ||
- run: ./build.sh --name=runner/ --push=dustynv tvm |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,76 @@ | ||
#--- | ||
# name: tvm | ||
# group: ml | ||
# depends: [pytorch, rust] | ||
# requires: '>=34.1.0' | ||
# test: test.py | ||
#--- | ||
ARG BASE_IMAGE | ||
FROM ${BASE_IMAGE} | ||
|
||
WORKDIR /opt | ||
|
||
ARG CUDAARCHS | ||
ARG TORCH_CUDA_ARCH_LIST | ||
|
||
ARG TVM_REPO=apache/tvm | ||
ARG TVM_BRANCH=main | ||
|
||
# MLC/TVM recommends to use LLVM | ||
RUN apt-get update && \ | ||
apt-get install -y --no-install-recommends \ | ||
llvm \ | ||
llvm-dev \ | ||
&& rm -rf /var/lib/apt/lists/* \ | ||
&& apt-get clean | ||
|
||
#ADD https://api.github.com/repos/${TVM_REPO}/git/refs/heads/${TVM_BRANCH} /tmp/tvm_version.json | ||
|
||
RUN git clone --branch=${TVM_BRANCH} --depth=1 --recursive https://github.com/${TVM_REPO} | ||
|
||
# https://github.com/Dao-AILab/flash-attention/pull/343 | ||
RUN wget https://raw.githubusercontent.com/Dao-AILab/flash-attention/main/csrc/flash_attn/src/static_switch.h -O tvm/3rdparty/libflash_attn/src/static_switch.h && \ | ||
sed 's|^set_target_properties(flash_attn PROPERTIES CUDA_ARCHITECTURES.*||' -i tvm/3rdparty/libflash_attn/src/CMakeLists.txt && \ | ||
sed 's|^.*-gencode.*|\\|' -i tvm/3rdparty/libflash_attn/src/CMakeLists.txt && \ | ||
cat tvm/3rdparty/libflash_attn/src/CMakeLists.txt | ||
|
||
RUN mkdir tvm/build && \ | ||
cd tvm/build && \ | ||
cmake -G Ninja \ | ||
-DCMAKE_CXX_STANDARD=17 \ | ||
-DCMAKE_CUDA_STANDARD=17 \ | ||
-DCMAKE_CUDA_ARCHITECTURES=${CUDAARCHS} \ | ||
-DUSE_CUDA=ON \ | ||
-DUSE_CUDNN=ON \ | ||
-DUSE_CUBLAS=ON \ | ||
-DUSE_CURAND=ON \ | ||
-DUSE_CUTLASS=ON \ | ||
-DUSE_THRUST=ON \ | ||
-DUSE_TENSORRT_CODEGEN=ON \ | ||
-DUSE_TENSORRT_RUNTIME=ON \ | ||
-DUSE_GRAPH_EXECUTOR_CUDA_GRAPH=ON \ | ||
-DUSE_TARGET_ONNX=ON \ | ||
-DUSE_LLVM=ON \ | ||
-DUSE_STACKVM_RUNTIME=ON \ | ||
-DUSE_LIBTORCH=$(pip3 show torch | grep Location: | cut -d' ' -f2)/torch \ | ||
-DCAFFE2_USE_CUDNN=1 \ | ||
-DSUMMARIZE=ON \ | ||
../ && \ | ||
ninja && \ | ||
rm -rf CMakeFiles | ||
|
||
RUN cd tvm/python && \ | ||
python3 setup.py --verbose bdist_wheel && \ | ||
cp dist/tvm*.whl /opt && \ | ||
rm -rf dist && \ | ||
rm -rf build | ||
|
||
RUN pip3 install --no-cache-dir --verbose tvm*.whl | ||
|
||
WORKDIR / | ||
|
||
RUN pip3 show tvm && python3 -c 'import tvm' | ||
|
||
ENV TVM_HOME=/opt/tvm | ||
|
||
ln -s /opt/mlc-llm/3rdparty/tvm/3rdparty /usr/local/lib/python3.8/dist-packages/tvm/3rdparty |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
|
||
from jetson_containers import CUDA_ARCHITECTURES | ||
|
||
package['build_args'] = { | ||
'CUDAARCHS': ';'.join([str(x) for x in CUDA_ARCHITECTURES]), | ||
'TORCH_CUDA_ARCH_LIST': ';'.join([f'{x/10:.1f}' for x in CUDA_ARCHITECTURES]) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
#!/usr/bin/env python3 | ||
import tvm | ||
import tvm.runtime | ||
|
||
print('tvm version:', tvm.__version__) | ||
print('tvm cuda:', tvm.cuda().exist) | ||
|
||
print('\n'.join(f'{k}: {v}' for k, v in tvm.support.libinfo().items())) | ||
|
||
assert(tvm.cuda().exist) |