File tree Expand file tree Collapse file tree 8 files changed +169
-0
lines changed Expand file tree Collapse file tree 8 files changed +169
-0
lines changed Original file line number Diff line number Diff line change 1+ #! /bin/bash
2+
3+
4+ set -o pipefail
5+ set -o errexit
6+ set -o nounset
7+ set -o errtrace
8+
9+ opts=" "
10+ opts=" $opts --device /dev/dri "
11+
12+ # IMAGE=quay.io/ramalama/remoting:latest
13+ IMAGE=localhost/mesa:compile
14+
15+ what=${1:- }
16+ if [[ -z " $what " ]]; then
17+ what=remoting
18+ fi
19+
20+ cmd=" bash ./build.$what .sh"
21+
22+ POD_NAME=mac_ai_compiling
23+ podman machine ssh podman rm $POD_NAME --force
24+
25+ set -x
26+ podman run \
27+ --name $POD_NAME \
28+ --user root:root \
29+ --cgroupns host \
30+ --security-opt label=disable \
31+ --env HOME=" $HOME " \
32+ --env PERF_MODE=" ${PERF_MODE:- } " \
33+ --env BENCH_MODE=" ${BENCH_MODE:- } " \
34+ -v " $HOME " :" $HOME " :Z \
35+ -w " $PWD " \
36+ -it --rm \
37+ $opts \
38+ $IMAGE \
39+ $cmd
Original file line number Diff line number Diff line change 1+ if [[ " ${PERF_MODE:- } " ]]; then
2+ FLAVOR=" -prod"
3+ else
4+ FLAVOR=" "
5+ fi
6+
7+ cmake -S . -B ../build.remoting-backend$FLAVOR \
8+ -DGGML_REMOTINGBACKEND=ON \
9+ -DGGML_NATIVE=OFF \
10+ -DGGML_METAL=ON \
11+ -DGGML_BACKEND_DL=OFF \
12+ -DLLAMA_CURL=OFF \
13+ -DGGML_VULKAN=OFF -DVulkan_INCLUDE_DIR=/opt/homebrew/include/ -DVulkan_LIBRARY=/opt/homebrew/lib/libMoltenVK.dylib \
14+ " $@ "
15+
16+ # -DCMAKE_BUILD_TYPE=Debug \
17+ #
Original file line number Diff line number Diff line change 1+ cmake -S . -B ../build.remoting-frontend \
2+ -DGGML_REMOTINGFRONTEND=ON \
3+ -DGGML_CPU_ARM_ARCH=native \
4+ -DGGML_NATIVE=OFF \
5+ -DGGML_OPENMP=OFF \
6+ -DLLAMA_CURL=OFF \
7+ -DCMAKE_BUILD_TYPE=Debug \
8+ " $@ "
Original file line number Diff line number Diff line change 1+ cmake -S . -B ./build -DGGML_VULKAN=ON -DGGML_NATIVE=OFF -DGGML_METAL=OFF # -DCMAKE_BUILD_TYPE=Debug #-DGGML_VULKAN_DEBUG=1
Original file line number Diff line number Diff line change 1+ cmake -S . \
2+ -B ../build.vulkan \
3+ -DGGML_VULKAN=ON \
4+ -DGGML_NATIVE=OFF \
5+ -DGGML_METAL=OFF \
6+ -DLLAMA_CURL=OFF \
7+ -DCMAKE_BUILD_TYPE=Debug
Original file line number Diff line number Diff line change 1+ ICD_DIR=/Users/kevinpouget/.local/share/vulkan/icd.d
2+ export VK_ICD_FILENAMES=$ICD_DIR /virtio_icd.cont.aarch64.json
3+
4+ llama-run ~ /models/llama3.2 " say nothing" --ngl 99
Original file line number Diff line number Diff line change 1+ #! /bin/bash
2+ # clear
3+ if [[ ${1:- } == " strace" ]]; then
4+ prefix=" strace"
5+ elif [[ ${1:- } == " gdb" ]]; then
6+ prefix=" gdb --args"
7+ else
8+ prefix=" "
9+ fi
10+
11+ MODEL=${MODEL:- llama3.2}
12+
13+ LLAMA_BUILD_DIR=../build.remoting-frontend$FLAVOR
14+
15+ MODEL_HOME=" $HOME /models"
16+
17+ set -x
18+ if [[ " ${BENCH_MODE:- } " == " bench" ]]; then
19+ cat << EOF
20+ ###
21+ ### Running llama-server
22+ ###
23+
24+ EOF
25+ $prefix \
26+ $LLAMA_BUILD_DIR /bin/llama-server \
27+ --host 0.0.0.0 \
28+ --port 8080 \
29+ --model " $MODEL_HOME /$MODEL " \
30+ --n-gpu-layers 99 \
31+ --threads 1
32+ elif [[ " ${BENCH_MODE:- } " == " bench" ]]; then
33+ cat << EOF
34+ ###
35+ ### Running llama-bench
36+ ###
37+
38+ EOF
39+ $prefix \
40+ $LLAMA_BUILD_DIR /bin/llama-bench \
41+ --model " $MODEL_HOME /$MODEL " \
42+ --n-gpu-layers 99
43+ elif [[ " ${BENCH_MODE:- } " == " perf" ]]; then
44+ cat << EOF
45+ ###
46+ ### Running test-backend-ops perf
47+ ###
48+
49+ EOF
50+ $prefix \
51+ $LLAMA_BUILD_DIR /bin/test-backend-ops perf
52+
53+ else
54+ PROMPT=" say nothing"
55+ # PROMPT="tell what's Apple metal API"
56+ $prefix \
57+ $LLAMA_BUILD_DIR /bin/llama-run \
58+ --ngl 99 \
59+ --verbose \
60+ --context-size 4096 \
61+ " $MODEL_HOME /$MODEL " \
62+ " $PROMPT "
63+ fi
Original file line number Diff line number Diff line change 1+ #! /bin/bash
2+ if [[ ${1:- } == " strace" ]]; then
3+ prefix=" strace"
4+ elif [[ ${1:- } == " gdb" ]]; then
5+ prefix=" gdb --args"
6+ elif [[ ${1:- } == " gdbr" ]]; then
7+ prefix=" gdb -ex='set confirm on' -ex=run -ex=quit --args"
8+ else
9+ prefix=" "
10+ fi
11+
12+ # rm -f /usr/lib64/libvulkan_virtio.so
13+
14+ ICD_DIR=/Users/kevinpouget/.local/share/vulkan/icd.d
15+
16+ MESA_FLAVOR=good
17+ if [[ " $MESA_FLAVOR " == " work" ]]; then
18+ export VK_ICD_FILENAMES=$ICD_DIR /virtio_icd.aarch64.json
19+ elif [[ " $MESA_FLAVOR " == " good" ]]; then
20+ export VK_ICD_FILENAMES=$ICD_DIR /virtio_icd.good.aarch64.json
21+ elif [[ " $MESA_FLAVOR " == " cont" ]]; then
22+ export VK_ICD_FILENAMES=$ICD_DIR /virtio_icd.cont.aarch64.json
23+ else
24+ echo " ERROR: invalid MESA_FLAVOR=$MESA_FLAVOR "
25+ exit 1
26+ fi
27+
28+ # init result vtest wsi no_abort log_ctx_info cache no_sparse no_gpl
29+ export VN_DEBUG=vtest
30+ $prefix ../build.vulkan/bin/llama-run --verbose ~ /models/llama3.2 " say nothing" --ngl 99
You can’t perform that action at this time.
0 commit comments