diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 308bc55ead71..ab704aa80a2b 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -19,8 +19,8 @@ jobs:
fail-fast: false
matrix:
libgccjit_version:
- - { gcc: "libgccjit.so", artifacts_branch: "master" }
- - { gcc: "libgccjit_without_int128.so", artifacts_branch: "master-without-128bit-integers" }
+ - { gcc: "gcc-13.deb" }
+ - { gcc: "gcc-13-without-int128.deb" }
commands: [
"--mini-tests",
"--std-tests",
@@ -32,60 +32,36 @@ jobs:
"--extended-regex-tests",
"--test-successful-rustc --nb-parts 2 --current-part 0",
"--test-successful-rustc --nb-parts 2 --current-part 1",
+ "--projects",
]
steps:
- uses: actions/checkout@v3
+ # `rustup show` installs from rust-toolchain.toml
+ - name: Setup rust toolchain
+ run: rustup show
+
+ - name: Setup rust cache
+ uses: Swatinem/rust-cache@v2
+
- name: Install packages
# `llvm-14-tools` is needed to install the `FileCheck` binary which is used for asm tests.
run: sudo apt-get install ninja-build ripgrep llvm-14-tools
- name: Download artifact
- uses: dawidd6/action-download-artifact@v2
- with:
- workflow: main.yml
- name: gcc-13
- path: gcc-13
- repo: antoyo/gcc
- branch: ${{ matrix.libgccjit_version.artifacts_branch }}
- event: push
- search_artifacts: true # Because, instead, the action only check the last job ran and that won't work since we want multiple artifacts.
+ run: curl -LO https://github.com/antoyo/gcc/releases/latest/download/${{ matrix.libgccjit_version.gcc }}
- name: Setup path to libgccjit
run: |
- sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb
- echo /usr/lib/ > gcc_path
+ sudo dpkg --force-overwrite -i ${{ matrix.libgccjit_version.gcc }}
+ echo 'gcc-path = "/usr/lib/"' > config.toml
- name: Set env
run: |
- echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
- echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV
-
- - name: Cache cargo installed crates
- uses: actions/cache@v3
- with:
- path: ~/.cargo/bin
- key: cargo-installed-crates2-ubuntu-latest
-
- - name: Cache cargo registry
- uses: actions/cache@v3
- with:
- path: ~/.cargo/registry
- key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Cache cargo index
- uses: actions/cache@v3
- with:
- path: ~/.cargo/git
- key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Cache cargo target dir
- uses: actions/cache@v3
- with:
- path: target
- key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }}
+ echo "LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
+ echo "LD_LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
#- name: Cache rust repository
## We only clone the rust repository for rustc tests
@@ -99,11 +75,9 @@ jobs:
- name: Build
run: |
./y.sh prepare --only-libcore
- # TODO: remove --features master when it is back to the default.
- ./y.sh build --features master
- # TODO: remove --features master when it is back to the default.
- cargo test --features master
- ./clean_all.sh
+ ./y.sh build
+ cargo test
+ ./y.sh clean all
- name: Prepare dependencies
run: |
@@ -111,23 +85,24 @@ jobs:
git config --global user.name "User"
./y.sh prepare
- # Compile is a separate step, as the actions-rs/cargo action supports error annotations
- - name: Compile
- uses: actions-rs/cargo@v1.0.3
- with:
- command: build
- args: --release
-
- name: Add more failing tests because the sysroot is not compiled with LTO
- run: cat failing-non-lto-tests.txt >> failing-ui-tests.txt
+ run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt
- name: Run tests
run: |
- # TODO: remove --features master when it is back to the default.
- ./test.sh --features master --release --clean --build-sysroot ${{ matrix.commands }}
+ ./y.sh test --release --clean --build-sysroot ${{ matrix.commands }}
duplicates:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- run: python tools/check_intrinsics_duplicates.py
+
+ build_system:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - name: Test build system
+ run: |
+ cd build_system
+ cargo test
diff --git a/.github/workflows/failures.yml b/.github/workflows/failures.yml
index ae8de79b773d..2bca694e8328 100644
--- a/.github/workflows/failures.yml
+++ b/.github/workflows/failures.yml
@@ -21,14 +21,11 @@ jobs:
libgccjit_version:
- gcc: "libgccjit.so"
artifacts_branch: "master"
- # TODO: switch back to --no-default-features in the case of libgccjit 12 when the default is to enable
- # master again.
- extra: "--features master"
- gcc: "libgccjit_without_int128.so"
artifacts_branch: "master-without-128bit-integers"
- extra: "--features master"
- gcc: "libgccjit12.so"
artifacts_branch: "gcc12"
+ extra: "--no-default-features"
# FIXME(antoyo): we need to set GCC_EXEC_PREFIX so that the linker can find the linker plugin.
# Not sure why it's not found otherwise.
env_extra: "TEST_FLAGS='-Cpanic=abort -Zpanic-abort-tests' GCC_EXEC_PREFIX=/usr/lib/gcc/"
@@ -36,6 +33,13 @@ jobs:
steps:
- uses: actions/checkout@v3
+ # `rustup show` installs from rust-toolchain.toml
+ - name: Setup rust toolchain
+ run: rustup show
+
+ - name: Setup rust cache
+ uses: Swatinem/rust-cache@v2
+
- name: Install packages
run: sudo apt-get install ninja-build ripgrep
@@ -45,56 +49,27 @@ jobs:
- name: Setup path to libgccjit
if: matrix.libgccjit_version.gcc == 'libgccjit12.so'
- run: echo /usr/lib/gcc/x86_64-linux-gnu/12 > gcc_path
+ run: |
+ echo 'gcc-path = "/usr/lib/gcc/x86_64-linux-gnu/12"' > config.toml
+ echo "LIBRARY_PATH=/usr/lib/gcc/x86_64-linux-gnu/12" >> $GITHUB_ENV
+ echo "LD_LIBRARY_PATH=/usr/lib/gcc/x86_64-linux-gnu/12" >> $GITHUB_ENV
- name: Download artifact
if: matrix.libgccjit_version.gcc != 'libgccjit12.so'
- uses: dawidd6/action-download-artifact@v2
- with:
- workflow: main.yml
- name: gcc-13
- path: gcc-13
- repo: antoyo/gcc
- branch: ${{ matrix.libgccjit_version.artifacts_branch }}
- event: push
- search_artifacts: true # Because, instead, the action only check the last job ran and that won't work since we want multiple artifacts.
+ run: curl -LO https://github.com/antoyo/gcc/releases/latest/download/gcc-13.deb
- name: Setup path to libgccjit
if: matrix.libgccjit_version.gcc != 'libgccjit12.so'
run: |
- sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb
- echo /usr/lib/ > gcc_path
+ sudo dpkg --force-overwrite -i gcc-13.deb
+ echo 'gcc-path = "/usr/lib"' > config.toml
+ echo "LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
+ echo "LD_LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
- name: Set env
run: |
- echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
- echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV
- - name: Cache cargo installed crates
- uses: actions/cache@v3
- with:
- path: ~/.cargo/bin
- key: cargo-installed-crates2-ubuntu-latest
-
- - name: Cache cargo registry
- uses: actions/cache@v3
- with:
- path: ~/.cargo/registry
- key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Cache cargo index
- uses: actions/cache@v3
- with:
- path: ~/.cargo/git
- key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Cache cargo target dir
- uses: actions/cache@v3
- with:
- path: target
- key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }}
-
#- name: Cache rust repository
#uses: actions/cache@v3
#id: cache-rust-repository
@@ -115,18 +90,11 @@ jobs:
if: matrix.libgccjit_version.gcc != 'libgccjit12.so'
run: ./y.sh prepare
- # Compile is a separate step, as the actions-rs/cargo action supports error annotations
- - name: Compile
- uses: actions-rs/cargo@v1.0.3
- with:
- command: build
- args: --release
-
- name: Add more failing tests because the sysroot is not compiled with LTO
- run: cat failing-non-lto-tests.txt >> failing-ui-tests.txt
+ run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt
- name: Run tests
id: tests
run: |
- ${{ matrix.libgccjit_version.env_extra }} ./test.sh --release --clean --build-sysroot --test-failing-rustc ${{ matrix.libgccjit_version.extra }} | tee output_log
+ ${{ matrix.libgccjit_version.env_extra }} ./y.sh test --release --clean --build-sysroot --test-failing-rustc ${{ matrix.libgccjit_version.extra }} | tee output_log
rg --text "test result" output_log >> $GITHUB_STEP_SUMMARY
diff --git a/.github/workflows/gcc12.yml b/.github/workflows/gcc12.yml
index a0d363cf1fbd..f7bb15604923 100644
--- a/.github/workflows/gcc12.yml
+++ b/.github/workflows/gcc12.yml
@@ -28,9 +28,6 @@ jobs:
# FIXME: re-enable asm tests when GCC can emit in the right syntax.
# "--asm-tests",
"--test-libcore",
- "--extended-rand-tests",
- "--extended-regex-example-tests",
- "--extended-regex-tests",
"--test-successful-rustc --nb-parts 2 --current-part 0",
"--test-successful-rustc --nb-parts 2 --current-part 1",
]
@@ -38,42 +35,25 @@ jobs:
steps:
- uses: actions/checkout@v3
+ # `rustup show` installs from rust-toolchain.toml
+ - name: Setup rust toolchain
+ run: rustup show
+
+ - name: Setup rust cache
+ uses: Swatinem/rust-cache@v2
+
- name: Install packages
# `llvm-14-tools` is needed to install the `FileCheck` binary which is used for asm tests.
run: sudo apt-get install ninja-build ripgrep llvm-14-tools libgccjit-12-dev
- name: Setup path to libgccjit
- run: echo /usr/lib/gcc/x86_64-linux-gnu/12 > gcc_path
+ run: echo 'gcc-path = "/usr/lib/gcc/x86_64-linux-gnu/12"' > config.toml
- name: Set env
run: |
- echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
- echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV
-
- - name: Cache cargo installed crates
- uses: actions/cache@v3
- with:
- path: ~/.cargo/bin
- key: cargo-installed-crates2-ubuntu-latest
-
- - name: Cache cargo registry
- uses: actions/cache@v3
- with:
- path: ~/.cargo/registry
- key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Cache cargo index
- uses: actions/cache@v3
- with:
- path: ~/.cargo/git
- key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Cache cargo target dir
- uses: actions/cache@v3
- with:
- path: target
- key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }}
+ echo "LIBRARY_PATH=/usr/lib/gcc/x86_64-linux-gnu/12" >> $GITHUB_ENV
+ echo "LD_LIBRARY_PATH=/usr/lib/gcc/x86_64-linux-gnu/12" >> $GITHUB_ENV
#- name: Cache rust repository
## We only clone the rust repository for rustc tests
@@ -89,7 +69,7 @@ jobs:
./y.sh prepare --only-libcore --libgccjit12-patches
./y.sh build --no-default-features --sysroot-panic-abort
cargo test --no-default-features
- ./clean_all.sh
+ ./y.sh clean all
- name: Prepare dependencies
run: |
@@ -97,19 +77,12 @@ jobs:
git config --global user.name "User"
./y.sh prepare --libgccjit12-patches
- # Compile is a separate step, as the actions-rs/cargo action supports error annotations
- - name: Compile
- uses: actions-rs/cargo@v1.0.3
- with:
- command: build
- args: --release
-
- name: Add more failing tests for GCC 12
- run: cat failing-ui-tests12.txt >> failing-ui-tests.txt
+ run: cat tests/failing-ui-tests12.txt >> tests/failing-ui-tests.txt
- name: Add more failing tests because the sysroot is not compiled with LTO
- run: cat failing-non-lto-tests.txt >> failing-ui-tests.txt
+ run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt
- name: Run tests
run: |
- ./test.sh --release --clean --build-sysroot ${{ matrix.commands }} --no-default-features
+ ./y.sh test --release --clean --build-sysroot ${{ matrix.commands }} --no-default-features
diff --git a/.github/workflows/m68k.yml b/.github/workflows/m68k.yml
index 4d9d7e23dc2b..a8c6b614ce81 100644
--- a/.github/workflows/m68k.yml
+++ b/.github/workflows/m68k.yml
@@ -36,21 +36,22 @@ jobs:
]
steps:
+ - uses: actions/checkout@v3
+
+ # `rustup show` installs from rust-toolchain.toml
+ - name: Setup rust toolchain
+ run: rustup show
+
+ - name: Setup rust cache
+ uses: Swatinem/rust-cache@v2
+
- name: Install packages
run: |
sudo apt-get update
sudo apt-get install qemu qemu-user-static
- - uses: actions/checkout@v3
-
- - name: Download GCC artifact
- uses: dawidd6/action-download-artifact@v2
- with:
- workflow: m68k.yml
- name: gcc-m68k-13
- repo: cross-cg-gcc-tools/cross-gcc
- branch: master
- event: push
+ - name: Download artifact
+ run: curl -LO https://github.com/cross-cg-gcc-tools/cross-gcc/releases/latest/download/gcc-m68k-13.deb
- name: Download VM artifact
uses: dawidd6/action-download-artifact@v2
@@ -64,37 +65,13 @@ jobs:
- name: Setup path to libgccjit
run: |
sudo dpkg -i gcc-m68k-13.deb
- echo /usr/lib/ > gcc_path
+ echo 'gcc-path = "/usr/lib/"' > config.toml
- name: Set env
run: |
- echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
- echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV
-
- - name: Cache cargo installed crates
- uses: actions/cache@v3
- with:
- path: ~/.cargo/bin
- key: cargo-installed-crates2-ubuntu-latest
-
- #- name: Cache cargo registry
- #uses: actions/cache@v3
- #with:
- #path: ~/.cargo/registry
- #key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }}
-
- #- name: Cache cargo index
- #uses: actions/cache@v3
- #with:
- #path: ~/.cargo/git
- #key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Cache cargo target dir
- uses: actions/cache@v3
- with:
- path: target
- key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }}
+ echo "LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
+ echo "LD_LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
#- name: Cache rust repository
## We only clone the rust repository for rustc tests
@@ -114,11 +91,9 @@ jobs:
- name: Build
run: |
./y.sh prepare --only-libcore --cross
- # TODO: remove --features master when it is back to the default.
- ./y.sh build --target-triple m68k-unknown-linux-gnu --features master
- # TODO: remove --features master when it is back to the default.
- CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu cargo test --features master
- ./clean_all.sh
+ ./y.sh build --target-triple m68k-unknown-linux-gnu
+ CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu cargo test
+ ./y.sh clean all
- name: Prepare dependencies
run: |
@@ -126,17 +101,9 @@ jobs:
git config --global user.name "User"
./y.sh prepare --cross
- # Compile is a separate step, as the actions-rs/cargo action supports error annotations
- - name: Compile
- uses: actions-rs/cargo@v1.0.3
- with:
- command: build
- args: --release
-
- name: Add more failing tests because the sysroot is not compiled with LTO
- run: cat failing-non-lto-tests.txt >> failing-ui-tests.txt
+ run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt
- name: Run tests
run: |
- # TODO: remove --features master when it is back to the default.
- ./test.sh --release --features master --clean --build-sysroot ${{ matrix.commands }}
+ ./y.sh test --release --clean --build-sysroot ${{ matrix.commands }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 43b90fcec933..28336998ffcd 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -26,63 +26,36 @@ jobs:
steps:
- uses: actions/checkout@v3
+ # `rustup show` installs from rust-toolchain.toml
+ - name: Setup rust toolchain
+ run: rustup show
+
+ - name: Setup rust cache
+ uses: Swatinem/rust-cache@v2
+
- name: Install packages
run: sudo apt-get install ninja-build ripgrep
- name: Download artifact
- uses: dawidd6/action-download-artifact@v2
- with:
- workflow: main.yml
- name: gcc-13
- path: gcc-13
- repo: antoyo/gcc
- branch: "master"
- event: push
- search_artifacts: true # Because, instead, the action only check the last job ran and that won't work since we want multiple artifacts.
+ run: curl -LO https://github.com/antoyo/gcc/releases/latest/download/gcc-13.deb
- name: Setup path to libgccjit
run: |
- sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb
- echo /usr/lib/ > gcc_path
+ sudo dpkg --force-overwrite -i gcc-13.deb
+ echo 'gcc-path = "/usr/lib/"' > config.toml
- name: Set env
run: |
- echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
- echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV
-
- - name: Cache cargo installed crates
- uses: actions/cache@v3
- with:
- path: ~/.cargo/bin
- key: cargo-installed-crates2-ubuntu-latest
-
- - name: Cache cargo registry
- uses: actions/cache@v3
- with:
- path: ~/.cargo/registry
- key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Cache cargo index
- uses: actions/cache@v3
- with:
- path: ~/.cargo/git
- key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Cache cargo target dir
- uses: actions/cache@v3
- with:
- path: target
- key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }}
+ echo "LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
+ echo "LD_LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
- name: Build
run: |
./y.sh prepare --only-libcore
- # TODO: remove --features master when it is back to the default.
- EMBED_LTO_BITCODE=1 ./y.sh build --release --release-sysroot --features master
- # TODO: remove --features master when it is back to the default.
- cargo test --features master
- ./clean_all.sh
+ EMBED_LTO_BITCODE=1 ./y.sh build --release --release-sysroot
+ cargo test
+ ./y.sh clean all
- name: Prepare dependencies
run: |
@@ -92,17 +65,9 @@ jobs:
# FIXME(antoyo): we cannot enable LTO for stdarch tests currently because of some failing LTO tests using proc-macros.
echo -n 'lto = "fat"' >> build_sysroot/Cargo.toml
- # Compile is a separate step, as the actions-rs/cargo action supports error annotations
- - name: Compile
- uses: actions-rs/cargo@v1.0.3
- with:
- command: build
- args: --release
-
- name: Add more failing tests because of undefined symbol errors (FIXME)
- run: cat failing-lto-tests.txt >> failing-ui-tests.txt
+ run: cat tests/failing-lto-tests.txt >> tests/failing-ui-tests.txt
- name: Run tests
run: |
- # TODO: remove --features master when it is back to the default.
- EMBED_LTO_BITCODE=1 ./test.sh --release --clean --release-sysroot --build-sysroot ${{ matrix.commands }} --features master
+ EMBED_LTO_BITCODE=1 ./y.sh test --release --clean --release-sysroot --build-sysroot ${{ matrix.commands }}
diff --git a/.github/workflows/stdarch.yml b/.github/workflows/stdarch.yml
index 42109ba3e024..41a9318007f1 100644
--- a/.github/workflows/stdarch.yml
+++ b/.github/workflows/stdarch.yml
@@ -26,6 +26,13 @@ jobs:
steps:
- uses: actions/checkout@v3
+ # `rustup show` installs from rust-toolchain.toml
+ - name: Setup rust toolchain
+ run: rustup show
+
+ - name: Setup rust cache
+ uses: Swatinem/rust-cache@v2
+
- name: Install packages
run: sudo apt-get install ninja-build ripgrep
@@ -34,73 +41,39 @@ jobs:
run: |
mkdir intel-sde
cd intel-sde
- dir=sde-external-9.14.0-2022-10-25-lin
+ dir=sde-external-9.33.0-2024-01-07-lin
file=$dir.tar.xz
- wget https://downloadmirror.intel.com/751535/$file
+ wget https://downloadmirror.intel.com/813591/$file
tar xvf $file
sudo mkdir /usr/share/intel-sde
sudo cp -r $dir/* /usr/share/intel-sde
sudo ln -s /usr/share/intel-sde/sde /usr/bin/sde
sudo ln -s /usr/share/intel-sde/sde64 /usr/bin/sde64
- - name: Download artifact
- uses: dawidd6/action-download-artifact@v2
- with:
- workflow: main.yml
- name: gcc-13
- path: gcc-13
- repo: antoyo/gcc
- branch: "master"
- event: push
- search_artifacts: true # Because, instead, the action only check the last job ran and that won't work since we want multiple artifacts.
-
- - name: Setup path to libgccjit
- run: |
- sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb
- echo /usr/lib/ > gcc_path
-
- name: Set env
run: |
- echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
- echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV
-
- - name: Cache cargo installed crates
- uses: actions/cache@v3
- with:
- path: ~/.cargo/bin
- key: cargo-installed-crates2-ubuntu-latest
-
- - name: Cache cargo registry
- uses: actions/cache@v3
- with:
- path: ~/.cargo/registry
- key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Cache cargo index
- uses: actions/cache@v3
- with:
- path: ~/.cargo/git
- key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Cache cargo target dir
- uses: actions/cache@v3
- with:
- path: target
- key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }}
+ echo 'download-gccjit = true' > config.toml
- name: Build
run: |
./y.sh prepare --only-libcore
- # TODO: remove `--features master` when it is back to the default.
- ./y.sh build --release --release-sysroot --features master
- # TODO: remove --features master when it is back to the default.
- cargo test --features master
+ ./y.sh build --release --release-sysroot
+
+ - name: Set env (part 2)
+ run: |
+ # Set the `LD_LIBRARY_PATH` and `LIBRARY_PATH` env variables...
+ echo "LD_LIBRARY_PATH="$(./y.sh info | grep -v Using) >> $GITHUB_ENV
+ echo "LIBRARY_PATH="$(./y.sh info | grep -v Using) >> $GITHUB_ENV
+
+ - name: Build (part 2)
+ run: |
+ cargo test
- name: Clean
if: ${{ !matrix.cargo_runner }}
run: |
- ./clean_all.sh
+ ./y.sh clean all
- name: Prepare dependencies
run: |
@@ -108,29 +81,20 @@ jobs:
git config --global user.name "User"
./y.sh prepare
- # Compile is a separate step, as the actions-rs/cargo action supports error annotations
- - name: Compile
- uses: actions-rs/cargo@v1.0.3
- with:
- command: build
- # TODO: remove `--features master` when it is back to the default.
- args: --release --features master
-
- name: Run tests
if: ${{ !matrix.cargo_runner }}
run: |
- # TODO: remove `--features master` when it is back to the default.
- ./test.sh --release --clean --release-sysroot --build-sysroot --mini-tests --std-tests --test-libcore --features master
+ ./y.sh test --release --clean --release-sysroot --build-sysroot --mini-tests --std-tests --test-libcore
- name: Run stdarch tests
if: ${{ !matrix.cargo_runner }}
run: |
cd build_sysroot/sysroot_src/library/stdarch/
- CHANNEL=release TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ../../../../cargo.sh test
+ CHANNEL=release TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ../../../../y.sh cargo test
- name: Run stdarch tests
if: ${{ matrix.cargo_runner }}
run: |
cd build_sysroot/sysroot_src/library/stdarch/
# FIXME: these tests fail when the sysroot is compiled with LTO because of a missing symbol in proc-macro.
- STDARCH_TEST_EVERYTHING=1 CHANNEL=release CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER="${{ matrix.cargo_runner }}" TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ../../../../cargo.sh test -- --skip rtm --skip tbm --skip sse4a
+ STDARCH_TEST_EVERYTHING=1 CHANNEL=release CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER="${{ matrix.cargo_runner }}" TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ../../../../y.sh cargo test -- --skip rtm --skip tbm --skip sse4a
diff --git a/.gitignore b/.gitignore
index b44d1aa78c2e..bf975f92014d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,15 +10,11 @@ perf.data.old
/build_sysroot/sysroot_src
/build_sysroot/Cargo.lock
/build_sysroot/test_target/Cargo.lock
-/rust
-/simple-raytracer
-/regex
-/rand
gimple*
*asm
res
test-backend
-gcc_path
+projects
benchmarks
tools/llvm-project
tools/llvmint
@@ -26,3 +22,5 @@ tools/llvmint-2
# The `llvm` folder is generated by the `tools/generate_intrinsics.py` script to update intrinsics.
llvm
build_system/target
+config.toml
+build
\ No newline at end of file
diff --git a/.ignore b/.ignore
index d8d189e5c7c6..702dd9e2a23f 100644
--- a/.ignore
+++ b/.ignore
@@ -8,3 +8,4 @@
!*gimple*
!*asm*
!.github
+!config.toml
diff --git a/.rustfmt.toml b/.rustfmt.toml
index c7ad93bafe36..87f034950e3b 100644
--- a/.rustfmt.toml
+++ b/.rustfmt.toml
@@ -1 +1 @@
-disable_all_formatting = true
+ignore = ["/src", "/tests"]
diff --git a/Cargo.lock b/Cargo.lock
index ddfce5d59bde..d73101f97d6a 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -23,6 +23,12 @@ version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
+[[package]]
+name = "boml"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85fdb93f04c73bff54305fa437ffea5449c41edcaadfe882f35836206b166ac5"
+
[[package]]
name = "cc"
version = "1.0.79"
@@ -74,7 +80,7 @@ dependencies = [
[[package]]
name = "gccjit"
version = "1.0.0"
-source = "git+https://github.com/antoyo/gccjit.rs#6e290f25b1d1edab5ae9ace486fd2dc8c08d6421"
+source = "git+https://github.com/antoyo/gccjit.rs#4b7aba76891e6436984f7f098fe92824d95194d5"
dependencies = [
"gccjit_sys",
]
@@ -82,7 +88,7 @@ dependencies = [
[[package]]
name = "gccjit_sys"
version = "0.0.1"
-source = "git+https://github.com/antoyo/gccjit.rs#6e290f25b1d1edab5ae9ace486fd2dc8c08d6421"
+source = "git+https://github.com/antoyo/gccjit.rs#4b7aba76891e6436984f7f098fe92824d95194d5"
dependencies = [
"libc",
]
@@ -185,6 +191,7 @@ checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
name = "rustc_codegen_gcc"
version = "0.1.0"
dependencies = [
+ "boml",
"gccjit",
"lang_tester",
"object",
diff --git a/Cargo.toml b/Cargo.toml
index b0b3aeecdbdf..85ad69e00fde 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -19,6 +19,7 @@ harness = false
[features]
master = ["gccjit/master"]
+default = ["master"]
[dependencies]
gccjit = { git = "https://github.com/antoyo/gccjit.rs" }
@@ -37,6 +38,7 @@ tempfile = "3.7.1"
[dev-dependencies]
lang_tester = "0.3.9"
tempfile = "3.1.0"
+boml = "0.3.1"
[profile.dev]
# By compiling dependencies with optimizations, performing tests gets much faster.
@@ -55,3 +57,6 @@ debug = false
[profile.release.build-override]
opt-level = 0
debug = false
+
+[package.metadata.rust-analyzer]
+rustc_private = true
\ No newline at end of file
diff --git a/Readme.md b/Readme.md
index 95fc6374c09a..da6e91587fda 100644
--- a/Readme.md
+++ b/Readme.md
@@ -17,6 +17,18 @@ A secondary goal is to check if using the gcc backend will provide any run-time
**This requires a patched libgccjit in order to work.
You need to use my [fork of gcc](https://github.com/antoyo/gcc) which already includes these patches.**
+```bash
+$ cp config.example.toml config.toml
+```
+
+If don't need to test GCC patches you wrote in our GCC fork, then the default configuration should
+be all you need. You can update the `rustc_codegen_gcc` without worrying about GCC.
+
+### Building with your own GCC version
+
+If you wrote a patch for GCC and want to test it without this backend, you will need
+to do a few more things.
+
To build it (most of these instructions come from [here](https://gcc.gnu.org/onlinedocs/jit/internals/index.html), so don't hesitate to take a look there if you encounter an issue):
```bash
@@ -49,23 +61,32 @@ $ make check-jit
$ make check-jit RUNTESTFLAGS="-v -v -v jit.exp=jit.dg/test-asm.cc"
```
-**Put the path to your custom build of libgccjit in the file `gcc_path`.**
+**Put the path to your custom build of libgccjit in the file `config.toml`.**
+
+You now need to set the `gcc-path` value in `config.toml` with the result of this command:
```bash
-$ dirname $(readlink -f `find . -name libgccjit.so`) > gcc_path
+$ dirname $(readlink -f `find . -name libgccjit.so`)
+```
+
+and to comment the `download-gccjit` setting:
+
+```toml
+gcc-path = "[MY PATH]"
+# download-gccjit = true
```
Then you can run commands like this:
```bash
$ ./y.sh prepare # download and patch sysroot src and install hyperfine for benchmarking
-$ LIBRARY_PATH=$(cat gcc_path) LD_LIBRARY_PATH=$(cat gcc_path) ./y.sh build --release
+$ ./y.sh build --release
```
To run the tests:
```bash
-$ ./test.sh --release
+$ ./y.sh test --release
```
## Usage
@@ -79,10 +100,10 @@ export CG_GCCJIT_DIR=[the full path to rustc_codegen_gcc]
### Cargo
```bash
-$ CHANNEL="release" $CG_GCCJIT_DIR/cargo.sh run
+$ CHANNEL="release" $CG_GCCJIT_DIR/y.sh cargo run
```
-If you compiled cg_gccjit in debug mode (aka you didn't pass `--release` to `./test.sh`) you should use `CHANNEL="debug"` instead or omit `CHANNEL="release"` completely.
+If you compiled cg_gccjit in debug mode (aka you didn't pass `--release` to `./y.sh test`) you should use `CHANNEL="debug"` instead or omit `CHANNEL="release"` completely.
### LTO
@@ -100,7 +121,7 @@ error: failed to copy bitcode to object file: No such file or directory (os erro
> You should prefer using the Cargo method.
```bash
-$ LIBRARY_PATH=$(cat gcc_path) LD_LIBRARY_PATH=$(cat gcc_path) rustc +$(cat $CG_GCCJIT_DIR/rust-toolchain | grep 'channel' | cut -d '=' -f 2 | sed 's/"//g' | sed 's/ //g') -Cpanic=abort -Zcodegen-backend=$CG_GCCJIT_DIR/target/release/librustc_codegen_gcc.so --sysroot $CG_GCCJIT_DIR/build_sysroot/sysroot my_crate.rs
+$ LIBRARY_PATH="[gcc-path value]" LD_LIBRARY_PATH="[gcc-path value]" rustc +$(cat $CG_GCCJIT_DIR/rust-toolchain | grep 'channel' | cut -d '=' -f 2 | sed 's/"//g' | sed 's/ //g') -Cpanic=abort -Zcodegen-backend=$CG_GCCJIT_DIR/target/release/librustc_codegen_gcc.so --sysroot $CG_GCCJIT_DIR/build_sysroot/sysroot my_crate.rs
```
## Env vars
@@ -118,221 +139,19 @@ $ LIBRARY_PATH=$(cat gcc_path) LD_LIBRARY_PATH=$(cat gcc_path) rustc +$(cat $CG_
Dump a C-like representation to /tmp/gccjit_dumps and enable debug info in order to debug this C-like representation.
+## Extra documentation
+
+More specific documentation is available in the [`doc`](./doc) folder:
+
+ * [Common errors](./doc/errors.md)
+ * [Debugging GCC LTO](./doc/debugging-gcc-lto.md)
+ * [Debugging libgccjit](./doc/debugging-libgccjit.md)
+ * [Git subtree sync](./doc/subtree.md)
+ * [List of useful commands](./doc/tips.md)
+ * [Send a patch to GCC](./doc/sending-gcc-patch.md)
+
## Licensing
While this crate is licensed under a dual Apache/MIT license, it links to `libgccjit` which is under the GPLv3+ and thus, the resulting toolchain (rustc + GCC codegen) will need to be released under the GPL license.
However, programs compiled with `rustc_codegen_gcc` do not need to be released under a GPL license.
-
-## Debugging
-
-Sometimes, libgccjit will crash and output an error like this:
-
-```
-during RTL pass: expand
-libgccjit.so: error: in expmed_mode_index, at expmed.h:249
-0x7f0da2e61a35 expmed_mode_index
- ../../../gcc/gcc/expmed.h:249
-0x7f0da2e61aa4 expmed_op_cost_ptr
- ../../../gcc/gcc/expmed.h:271
-0x7f0da2e620dc sdiv_cost_ptr
- ../../../gcc/gcc/expmed.h:540
-0x7f0da2e62129 sdiv_cost
- ../../../gcc/gcc/expmed.h:558
-0x7f0da2e73c12 expand_divmod(int, tree_code, machine_mode, rtx_def*, rtx_def*, rtx_def*, int)
- ../../../gcc/gcc/expmed.c:4335
-0x7f0da2ea1423 expand_expr_real_2(separate_ops*, rtx_def*, machine_mode, expand_modifier)
- ../../../gcc/gcc/expr.c:9240
-0x7f0da2cd1a1e expand_gimple_stmt_1
- ../../../gcc/gcc/cfgexpand.c:3796
-0x7f0da2cd1c30 expand_gimple_stmt
- ../../../gcc/gcc/cfgexpand.c:3857
-0x7f0da2cd90a9 expand_gimple_basic_block
- ../../../gcc/gcc/cfgexpand.c:5898
-0x7f0da2cdade8 execute
- ../../../gcc/gcc/cfgexpand.c:6582
-```
-
-To see the code which causes this error, call the following function:
-
-```c
-gcc_jit_context_dump_to_file(ctxt, "/tmp/output.c", 1 /* update_locations */)
-```
-
-This will create a C-like file and add the locations into the IR pointing to this C file.
-Then, rerun the program and it will output the location in the second line:
-
-```
-libgccjit.so: /tmp/something.c:61322:0: error: in expmed_mode_index, at expmed.h:249
-```
-
-Or add a breakpoint to `add_error` in gdb and print the line number using:
-
-```
-p loc->m_line
-p loc->m_filename->m_buffer
-```
-
-To print a debug representation of a tree:
-
-```c
-debug_tree(expr);
-```
-
-(defined in print-tree.h)
-
-To print a debug reprensentation of a gimple struct:
-
-```c
-debug_gimple_stmt(gimple_struct)
-```
-
-To get the `rustc` command to run in `gdb`, add the `--verbose` flag to `cargo build`.
-
-To have the correct file paths in `gdb` instead of `/usr/src/debug/gcc/libstdc++-v3/libsupc++/eh_personality.cc`:
-
-Maybe by calling the following at the beginning of gdb:
-
-```
-set substitute-path /usr/src/debug/gcc /path/to/gcc-repo/gcc
-```
-
-TODO(antoyo): but that's not what I remember I was doing.
-
-### `failed to build archive` error
-
-When you get this error:
-
-```
-error: failed to build archive: failed to open object file: No such file or directory (os error 2)
-```
-
-That can be caused by the fact that you try to compile with `lto = "fat"`, but you didn't compile the sysroot with LTO.
-(Not sure if that's the reason since I cannot reproduce anymore. Maybe it happened when forgetting setting `FAT_LTO`.)
-
-### ld: cannot find crtbegin.o
-
-When compiling an executable with libgccijt, if setting the `*LIBRARY_PATH` variables to the install directory, you will get the following errors:
-
-```
-ld: cannot find crtbegin.o: No such file or directory
-ld: cannot find -lgcc: No such file or directory
-ld: cannot find -lgcc: No such file or directory
-libgccjit.so: error: error invoking gcc driver
-```
-
-To fix this, set the variables to `gcc-build/build/gcc`.
-
-### How to debug GCC LTO
-
-Run do the command with `-v -save-temps` and then extract the `lto1` line from the output and run that under the debugger.
-
-### How to send arguments to the GCC linker
-
-```
-CG_RUSTFLAGS="-Clink-args=-save-temps -v" ../cargo.sh build
-```
-
-### How to see the personality functions in the asm dump
-
-```
-CG_RUSTFLAGS="-Clink-arg=-save-temps -v -Clink-arg=-dA" ../cargo.sh build
-```
-
-### How to see the LLVM IR for a sysroot crate
-
-```
-cargo build -v --target x86_64-unknown-linux-gnu -Zbuild-std
-# Take the command from the output and add --emit=llvm-ir
-```
-
-### To prevent the linker from unmangling symbols
-
-Run with:
-
-```
-COLLECT_NO_DEMANGLE=1
-```
-
-### How to use a custom-build rustc
-
- * Build the stage2 compiler (`rustup toolchain link debug-current build/x86_64-unknown-linux-gnu/stage2`).
- * Clean and rebuild the codegen with `debug-current` in the file `rust-toolchain`.
-
-### How to install a forked git-subtree
-
-Using git-subtree with `rustc` requires a patched git to make it work.
-The PR that is needed is [here](https://github.com/gitgitgadget/git/pull/493).
-Use the following instructions to install it:
-
-```bash
-git clone git@github.com:tqc/git.git
-cd git
-git checkout tqc/subtree
-make
-make install
-cd contrib/subtree
-make
-cp git-subtree ~/bin
-```
-
-Then, do a sync with this command:
-
-```bash
-PATH="$HOME/bin:$PATH" ~/bin/git-subtree push -P compiler/rustc_codegen_gcc/ ../rustc_codegen_gcc/ sync_branch_name
-cd ../rustc_codegen_gcc
-git checkout master
-git pull
-git checkout sync_branch_name
-git merge master
-```
-
-To send the changes to the rust repo:
-
-```bash
-cd ../rust
-git pull origin master
-git checkout -b subtree-update_cg_gcc_YYYY-MM-DD
-PATH="$HOME/bin:$PATH" ~/bin/git-subtree pull --prefix=compiler/rustc_codegen_gcc/ https://github.com/rust-lang/rustc_codegen_gcc.git master
-git push
-```
-
-TODO: write a script that does the above.
-
-https://rust-lang.zulipchat.com/#narrow/stream/301329-t-devtools/topic/subtree.20madness/near/258877725
-
-### How to use [mem-trace](https://github.com/antoyo/mem-trace)
-
-`rustc` needs to be built without `jemalloc` so that `mem-trace` can overload `malloc` since `jemalloc` is linked statically, so a `LD_PRELOAD`-ed library won't a chance to intercept the calls to `malloc`.
-
-### How to generate GIMPLE
-
-If you need to check what gccjit is generating (GIMPLE), then take a look at how to
-generate it in [gimple.md](./doc/gimple.md).
-
-### How to build a cross-compiling libgccjit
-
-#### Building libgccjit
-
- * Follow the instructions on [this repo](https://github.com/cross-cg-gcc-tools/cross-gcc).
-
-#### Configuring rustc_codegen_gcc
-
- * Run `./y.sh prepare --cross` so that the sysroot is patched for the cross-compiling case.
- * Set the path to the cross-compiling libgccjit in `gcc_path`.
- * Make sure you have the linker for your target (for instance `m68k-unknown-linux-gnu-gcc`) in your `$PATH`. Currently, the linker name is hardcoded as being `$TARGET-gcc`. Specify the target when building the sysroot: `./y.sh build --target-triple m68k-unknown-linux-gnu`.
- * Build your project by specifying the target: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../cargo.sh build --target m68k-unknown-linux-gnu`.
-
-If the target is not yet supported by the Rust compiler, create a [target specification file](https://docs.rust-embedded.org/embedonomicon/custom-target.html) (note that the `arch` specified in this file must be supported by the rust compiler).
-Then, you can use it the following way:
-
- * Add the target specification file using `--target` as an **absolute** path to build the sysroot: `./y.sh build --target-triple m68k-unknown-linux-gnu --target $(pwd)/m68k-unknown-linux-gnu.json`
- * Build your project by specifying the target specification file: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../cargo.sh build --target path/to/m68k-unknown-linux-gnu.json`.
-
-If you get the following error:
-
-```
-/usr/bin/ld: unrecognised emulation mode: m68kelf
-```
-
-Make sure you set `gcc_path` to the install directory.
diff --git a/build_sysroot/build_sysroot.sh b/build_sysroot/build_sysroot.sh
deleted file mode 100755
index ebc7dc375b12..000000000000
--- a/build_sysroot/build_sysroot.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env bash
-
-# Requires the CHANNEL env var to be set to `debug` or `release.`
-
-set -e
-cd $(dirname "$0")
-
-pushd ../
-source ./config.sh
-popd
-
-# Cleanup for previous run
-# v Clean target dir except for build scripts and incremental cache
-rm -r target/*/{debug,release}/{build,deps,examples,libsysroot*,native} 2>/dev/null || true
-rm Cargo.lock test_target/Cargo.lock 2>/dev/null || true
-rm -r sysroot/ 2>/dev/null || true
-
-# Build libs
-export RUSTFLAGS="$RUSTFLAGS -Z force-unstable-if-unmarked"
-if [[ "$1" == "--release" ]]; then
- sysroot_channel='release'
- RUSTFLAGS="$RUSTFLAGS -Zmir-opt-level=3" cargo build --target $TARGET_TRIPLE --release
-else
- sysroot_channel='debug'
- cargo build --target $TARGET_TRIPLE
-fi
-
-# Copy files to sysroot
-mkdir -p sysroot/lib/rustlib/$TARGET_TRIPLE/lib/
-cp -r target/$TARGET_TRIPLE/$sysroot_channel/deps/* sysroot/lib/rustlib/$TARGET_TRIPLE/lib/
-# Copy the source files to the sysroot (Rust for Linux needs this).
-source_dir=sysroot/lib/rustlib/src/rust
-mkdir -p $source_dir
-cp -r sysroot_src/library/ $source_dir
diff --git a/build_system/Cargo.lock b/build_system/Cargo.lock
index 86268e191603..e727561a2bfb 100644
--- a/build_system/Cargo.lock
+++ b/build_system/Cargo.lock
@@ -2,6 +2,15 @@
# It is not intended for manual editing.
version = 3
+[[package]]
+name = "boml"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85fdb93f04c73bff54305fa437ffea5449c41edcaadfe882f35836206b166ac5"
+
[[package]]
name = "y"
version = "0.1.0"
+dependencies = [
+ "boml",
+]
diff --git a/build_system/Cargo.toml b/build_system/Cargo.toml
index f36709ea0360..d2600ed5a031 100644
--- a/build_system/Cargo.toml
+++ b/build_system/Cargo.toml
@@ -3,6 +3,9 @@ name = "y"
version = "0.1.0"
edition = "2021"
+[dependencies]
+boml = "0.3.1"
+
[[bin]]
name = "y"
path = "src/main.rs"
diff --git a/build_system/src/build.rs b/build_system/src/build.rs
index f1c3701a946e..308ad3465494 100644
--- a/build_system/src/build.rs
+++ b/build_system/src/build.rs
@@ -1,7 +1,5 @@
-use crate::config::{set_config, ConfigInfo};
-use crate::utils::{
- get_gcc_path, run_command, run_command_with_output_and_env, walk_dir,
-};
+use crate::config::{Channel, ConfigInfo};
+use crate::utils::{run_command, run_command_with_output_and_env, walk_dir};
use std::collections::HashMap;
use std::ffi::OsStr;
use std::fs;
@@ -9,33 +7,21 @@ use std::path::Path;
#[derive(Default)]
struct BuildArg {
- codegen_release_channel: bool,
- sysroot_release_channel: bool,
- sysroot_panic_abort: bool,
flags: Vec,
- gcc_path: String,
+ config_info: ConfigInfo,
}
impl BuildArg {
fn new() -> Result, String> {
- let gcc_path = get_gcc_path()?;
- let mut build_arg = Self {
- gcc_path,
- ..Default::default()
- };
+ let mut build_arg = Self::default();
// We skip binary name and the `build` command.
let mut args = std::env::args().skip(2);
while let Some(arg) = args.next() {
match arg.as_str() {
- "--release" => build_arg.codegen_release_channel = true,
- "--release-sysroot" => build_arg.sysroot_release_channel = true,
"--no-default-features" => {
build_arg.flags.push("--no-default-features".to_string());
}
- "--sysroot-panic-abort" => {
- build_arg.sysroot_panic_abort = true;
- },
"--features" => {
if let Some(arg) = args.next() {
build_arg.flags.push("--features".to_string());
@@ -50,25 +36,11 @@ impl BuildArg {
Self::usage();
return Ok(None);
}
- "--target-triple" => {
- if args.next().is_some() {
- // Handled in config.rs.
- } else {
- return Err(
- "Expected a value after `--target-triple`, found nothing".to_string()
- );
+ arg => {
+ if !build_arg.config_info.parse_argument(arg, &mut args)? {
+ return Err(format!("Unknown argument `{}`", arg));
}
}
- "--target" => {
- if args.next().is_some() {
- // Handled in config.rs.
- } else {
- return Err(
- "Expected a value after `--target`, found nothing".to_string()
- );
- }
- }
- arg => return Err(format!("Unknown argument `{}`", arg)),
}
}
Ok(Some(build_arg))
@@ -79,29 +51,20 @@ impl BuildArg {
r#"
`build` command help:
- --release : Build codegen in release mode
- --release-sysroot : Build sysroot in release mode
- --sysroot-panic-abort : Build the sysroot without unwinding support.
--no-default-features : Add `--no-default-features` flag
- --features [arg] : Add a new feature [arg]
- --target-triple [arg] : Set the target triple to [arg]
- --help : Show this help
-"#
- )
+ --features [arg] : Add a new feature [arg]"#
+ );
+ ConfigInfo::show_usage();
+ println!(" --help : Show this help");
}
}
-fn build_sysroot(
- env: &mut HashMap,
- args: &BuildArg,
- config: &ConfigInfo,
-) -> Result<(), String> {
- std::env::set_current_dir("build_sysroot")
- .map_err(|error| format!("Failed to go to `build_sysroot` directory: {:?}", error))?;
+pub fn build_sysroot(env: &HashMap, config: &ConfigInfo) -> Result<(), String> {
+ let start_dir = Path::new("build_sysroot");
// Cleanup for previous run
// Clean target dir except for build scripts and incremental cache
let _ = walk_dir(
- "target",
+ start_dir.join("target"),
|dir: &Path| {
for top in &["debug", "release"] {
let _ = fs::remove_dir_all(dir.join(top).join("build"));
@@ -138,79 +101,100 @@ fn build_sysroot(
|_| Ok(()),
);
- let _ = fs::remove_file("Cargo.lock");
- let _ = fs::remove_file("test_target/Cargo.lock");
- let _ = fs::remove_dir_all("sysroot");
+ let _ = fs::remove_file(start_dir.join("Cargo.lock"));
+ let _ = fs::remove_file(start_dir.join("test_target/Cargo.lock"));
+ let _ = fs::remove_dir_all(start_dir.join("sysroot"));
// Builds libs
- let mut rustflags = env
- .get("RUSTFLAGS")
- .cloned()
- .unwrap_or_default();
- if args.sysroot_panic_abort {
+ let mut rustflags = env.get("RUSTFLAGS").cloned().unwrap_or_default();
+ if config.sysroot_panic_abort {
rustflags.push_str(" -Cpanic=abort -Zpanic-abort-tests");
}
- env.insert(
- "RUSTFLAGS".to_string(),
- format!("{} -Zmir-opt-level=3", rustflags),
- );
- let channel = if args.sysroot_release_channel {
+ rustflags.push_str(" -Z force-unstable-if-unmarked");
+ let mut env = env.clone();
+ let channel = if config.sysroot_release_channel {
+ env.insert(
+ "RUSTFLAGS".to_string(),
+ format!("{} -Zmir-opt-level=3", rustflags),
+ );
run_command_with_output_and_env(
&[
&"cargo",
&"build",
+ &"--release",
&"--target",
&config.target,
- &"--release",
],
- None,
+ Some(start_dir),
Some(&env),
)?;
"release"
} else {
+ env.insert("RUSTFLAGS".to_string(), rustflags);
+
run_command_with_output_and_env(
- &[
- &"cargo",
- &"build",
- &"--target",
- &config.target,
- ],
- None,
- Some(env),
+ &[&"cargo", &"build", &"--target", &config.target],
+ Some(start_dir),
+ Some(&env),
)?;
"debug"
};
// Copy files to sysroot
- let sysroot_path = format!("sysroot/lib/rustlib/{}/lib/", config.target_triple);
- fs::create_dir_all(&sysroot_path)
- .map_err(|error| format!("Failed to create directory `{}`: {:?}", sysroot_path, error))?;
+ let sysroot_path = start_dir.join(format!("sysroot/lib/rustlib/{}/lib/", config.target_triple));
+ fs::create_dir_all(&sysroot_path).map_err(|error| {
+ format!(
+ "Failed to create directory `{}`: {:?}",
+ sysroot_path.display(),
+ error
+ )
+ })?;
let copier = |dir_to_copy: &Path| {
+ // FIXME: should not use shell command!
run_command(&[&"cp", &"-r", &dir_to_copy, &sysroot_path], None).map(|_| ())
};
walk_dir(
- &format!("target/{}/{}/deps", config.target_triple, channel),
+ start_dir.join(&format!("target/{}/{}/deps", config.target_triple, channel)),
copier,
copier,
)?;
// Copy the source files to the sysroot (Rust for Linux needs this).
- let sysroot_src_path = "sysroot/lib/rustlib/src/rust";
- fs::create_dir_all(&sysroot_src_path)
- .map_err(|error| format!("Failed to create directory `{}`: {:?}", sysroot_src_path, error))?;
- run_command(&[&"cp", &"-r", &"sysroot_src/library/", &sysroot_src_path], None)?;
+ let sysroot_src_path = start_dir.join("sysroot/lib/rustlib/src/rust");
+ fs::create_dir_all(&sysroot_src_path).map_err(|error| {
+ format!(
+ "Failed to create directory `{}`: {:?}",
+ sysroot_src_path.display(),
+ error
+ )
+ })?;
+ run_command(
+ &[
+ &"cp",
+ &"-r",
+ &start_dir.join("sysroot_src/library/"),
+ &sysroot_src_path,
+ ],
+ None,
+ )?;
Ok(())
}
-fn build_codegen(args: &BuildArg) -> Result<(), String> {
+fn build_codegen(args: &mut BuildArg) -> Result<(), String> {
let mut env = HashMap::new();
- env.insert("LD_LIBRARY_PATH".to_string(), args.gcc_path.clone());
- env.insert("LIBRARY_PATH".to_string(), args.gcc_path.clone());
+ env.insert(
+ "LD_LIBRARY_PATH".to_string(),
+ args.config_info.gcc_path.clone(),
+ );
+ env.insert(
+ "LIBRARY_PATH".to_string(),
+ args.config_info.gcc_path.clone(),
+ );
let mut command: Vec<&dyn AsRef> = vec![&"cargo", &"rustc"];
- if args.codegen_release_channel {
+ if args.config_info.channel == Channel::Release {
command.push(&"--release");
env.insert("CHANNEL".to_string(), "release".to_string());
env.insert("CARGO_INCREMENTAL".to_string(), "1".to_string());
@@ -223,7 +207,7 @@ fn build_codegen(args: &BuildArg) -> Result<(), String> {
}
run_command_with_output_and_env(&command, None, Some(&env))?;
- let config = set_config(&mut env, &[], Some(&args.gcc_path))?;
+ args.config_info.setup(&mut env, false)?;
// We voluntarily ignore the error.
let _ = fs::remove_dir_all("target/out");
@@ -236,19 +220,16 @@ fn build_codegen(args: &BuildArg) -> Result<(), String> {
})?;
println!("[BUILD] sysroot");
- build_sysroot(
- &mut env,
- args,
- &config,
- )?;
+ build_sysroot(&env, &args.config_info)?;
Ok(())
}
pub fn run() -> Result<(), String> {
- let args = match BuildArg::new()? {
+ let mut args = match BuildArg::new()? {
Some(args) => args,
None => return Ok(()),
};
- build_codegen(&args)?;
+ args.config_info.setup_gcc_path()?;
+ build_codegen(&mut args)?;
Ok(())
}
diff --git a/build_system/src/cargo.rs b/build_system/src/cargo.rs
new file mode 100644
index 000000000000..1cfcdba6b1cd
--- /dev/null
+++ b/build_system/src/cargo.rs
@@ -0,0 +1,114 @@
+use crate::config::ConfigInfo;
+use crate::utils::{
+ get_toolchain, run_command_with_output_and_env_no_err, rustc_toolchain_version_info,
+ rustc_version_info,
+};
+
+use std::collections::HashMap;
+use std::ffi::OsStr;
+use std::path::PathBuf;
+
+fn args() -> Result>, String> {
+ // We skip the binary and the "cargo" option.
+ if let Some("--help") = std::env::args().skip(2).next().as_deref() {
+ usage();
+ return Ok(None);
+ }
+ let args = std::env::args().skip(2).collect::>();
+ if args.is_empty() {
+ return Err(
+ "Expected at least one argument for `cargo` subcommand, found none".to_string(),
+ );
+ }
+ Ok(Some(args))
+}
+
+fn usage() {
+ println!(
+ r#"
+`cargo` command help:
+
+ [args] : Arguments to be passed to the cargo command
+ --help : Show this help
+"#
+ )
+}
+
+pub fn run() -> Result<(), String> {
+ let args = match args()? {
+ Some(a) => a,
+ None => return Ok(()),
+ };
+
+ // We first need to go to the original location to ensure that the config setup will go as
+ // expected.
+ let current_dir = std::env::current_dir()
+ .and_then(|path| path.canonicalize())
+ .map_err(|error| format!("Failed to get current directory path: {:?}", error))?;
+ let current_exe = std::env::current_exe()
+ .and_then(|path| path.canonicalize())
+ .map_err(|error| format!("Failed to get current exe path: {:?}", error))?;
+ let mut parent_dir = current_exe
+ .components()
+ .map(|comp| comp.as_os_str())
+ .collect::>();
+ // We run this script from "build_system/target/release/y", so we need to remove these elements.
+ for to_remove in &["y", "release", "target", "build_system"] {
+ if parent_dir
+ .last()
+ .map(|part| part == to_remove)
+ .unwrap_or(false)
+ {
+ parent_dir.pop();
+ } else {
+ return Err(format!(
+ "Build script not executed from `build_system/target/release/y` (in path {})",
+ current_exe.display(),
+ ));
+ }
+ }
+ let parent_dir = PathBuf::from(parent_dir.join(&OsStr::new("/")));
+ std::env::set_current_dir(&parent_dir).map_err(|error| {
+ format!(
+ "Failed to go to `{}` folder: {:?}",
+ parent_dir.display(),
+ error
+ )
+ })?;
+
+ let mut env: HashMap = std::env::vars().collect();
+ ConfigInfo::default().setup(&mut env, false)?;
+ let toolchain = get_toolchain()?;
+
+ let toolchain_version = rustc_toolchain_version_info(&toolchain)?;
+ let default_version = rustc_version_info(None)?;
+ if toolchain_version != default_version {
+ println!(
+ "rustc_codegen_gcc is built for {} but the default rustc version is {}.",
+ toolchain_version.short, default_version.short,
+ );
+ println!("Using {}.", toolchain_version.short);
+ }
+
+ // We go back to the original folder since we now have set up everything we needed.
+ std::env::set_current_dir(¤t_dir).map_err(|error| {
+ format!(
+ "Failed to go back to `{}` folder: {:?}",
+ current_dir.display(),
+ error
+ )
+ })?;
+
+ let rustflags = env.get("RUSTFLAGS").cloned().unwrap_or_default();
+ env.insert("RUSTDOCFLAGS".to_string(), rustflags);
+ let toolchain = format!("+{}", toolchain);
+ let mut command: Vec<&dyn AsRef> = vec![&"cargo", &toolchain];
+ for arg in &args {
+ command.push(arg);
+ }
+ if run_command_with_output_and_env_no_err(&command, None, Some(&env)).is_err() {
+ std::process::exit(1);
+ }
+
+ Ok(())
+}
diff --git a/build_system/src/clean.rs b/build_system/src/clean.rs
new file mode 100644
index 000000000000..cd8e691a0ed9
--- /dev/null
+++ b/build_system/src/clean.rs
@@ -0,0 +1,82 @@
+use crate::utils::{remove_file, run_command};
+
+use std::fs::remove_dir_all;
+use std::path::Path;
+
+#[derive(Default)]
+enum CleanArg {
+ /// `clean all`
+ All,
+ /// `clean ui-tests`
+ UiTests,
+ /// `clean --help`
+ #[default]
+ Help,
+}
+
+impl CleanArg {
+ fn new() -> Result {
+ // We skip the binary and the "clean" option.
+ for arg in std::env::args().skip(2) {
+ return match arg.as_str() {
+ "all" => Ok(Self::All),
+ "ui-tests" => Ok(Self::UiTests),
+ "--help" => Ok(Self::Help),
+ a => Err(format!("Unknown argument `{}`", a)),
+ };
+ }
+ Ok(Self::default())
+ }
+}
+
+fn usage() {
+ println!(
+ r#"
+`clean` command help:
+
+ all : Clean all data
+ ui-tests : Clean ui tests
+ --help : Show this help
+"#
+ )
+}
+
+fn clean_all() -> Result<(), String> {
+ let dirs_to_remove = [
+ "target",
+ "build_sysroot/sysroot",
+ "build_sysroot/sysroot_src",
+ "build_sysroot/target",
+ ];
+ for dir in dirs_to_remove {
+ let _ = remove_dir_all(dir);
+ }
+ let dirs_to_remove = ["regex", "rand", "simple-raytracer"];
+ for dir in dirs_to_remove {
+ let _ = remove_dir_all(Path::new(crate::BUILD_DIR).join(dir));
+ }
+
+ let files_to_remove = ["build_sysroot/Cargo.lock", "perf.data", "perf.data.old"];
+
+ for file in files_to_remove {
+ let _ = remove_file(file);
+ }
+
+ println!("Successfully ran `clean all`");
+ Ok(())
+}
+
+fn clean_ui_tests() -> Result<(), String> {
+ let path = Path::new(crate::BUILD_DIR).join("rust/build/x86_64-unknown-linux-gnu/test/ui/");
+ run_command(&[&"find", &path, &"-name", &"stamp", &"-delete"], None)?;
+ Ok(())
+}
+
+pub fn run() -> Result<(), String> {
+ match CleanArg::new()? {
+ CleanArg::All => clean_all()?,
+ CleanArg::UiTests => clean_ui_tests()?,
+ CleanArg::Help => usage(),
+ }
+ Ok(())
+}
diff --git a/build_system/src/config.rs b/build_system/src/config.rs
index 64d9bd73e01a..ddfc0e4a925d 100644
--- a/build_system/src/config.rs
+++ b/build_system/src/config.rs
@@ -1,149 +1,551 @@
-use crate::utils::{get_gcc_path, get_os_name, get_rustc_host_triple};
+use crate::utils::{
+ create_symlink, get_os_name, run_command_with_output, rustc_version_info, split_args,
+};
use std::collections::HashMap;
use std::env as std_env;
+use std::ffi::OsStr;
+use std::fs;
+use std::path::{Path, PathBuf};
+use boml::{types::TomlValue, Toml};
+
+#[derive(Default, PartialEq, Eq, Clone, Copy, Debug)]
+pub enum Channel {
+ #[default]
+ Debug,
+ Release,
+}
+
+impl Channel {
+ pub fn as_str(self) -> &'static str {
+ match self {
+ Self::Debug => "debug",
+ Self::Release => "release",
+ }
+ }
+}
+
+fn failed_config_parsing(config_file: &Path, err: &str) -> Result {
+ Err(format!(
+ "Failed to parse `{}`: {}",
+ config_file.display(),
+ err
+ ))
+}
+
+#[derive(Default)]
+pub struct ConfigFile {
+ gcc_path: Option,
+ download_gccjit: Option,
+}
+
+impl ConfigFile {
+ pub fn new(config_file: &Path) -> Result {
+ let content = fs::read_to_string(config_file).map_err(|_| {
+ format!(
+ "Failed to read `{}`. Take a look at `Readme.md` to see how to set up the project",
+ config_file.display(),
+ )
+ })?;
+ let toml = Toml::parse(&content).map_err(|err| {
+ format!(
+ "Error occurred around `{}`: {:?}",
+ &content[err.start..=err.end],
+ err.kind
+ )
+ })?;
+ let mut config = Self::default();
+ for (key, value) in toml.iter() {
+ match (key, value) {
+ ("gcc-path", TomlValue::String(value)) => {
+ config.gcc_path = Some(value.as_str().to_string())
+ }
+ ("gcc-path", _) => {
+ return failed_config_parsing(config_file, "Expected a string for `gcc-path`")
+ }
+ ("download-gccjit", TomlValue::Boolean(value)) => {
+ config.download_gccjit = Some(*value)
+ }
+ ("download-gccjit", _) => {
+ return failed_config_parsing(
+ config_file,
+ "Expected a boolean for `download-gccjit`",
+ )
+ }
+ _ => return failed_config_parsing(config_file, &format!("Unknown key `{}`", key)),
+ }
+ }
+ match (config.gcc_path.as_mut(), config.download_gccjit) {
+ (None, None | Some(false)) => {
+ return failed_config_parsing(
+ config_file,
+ "At least one of `gcc-path` or `download-gccjit` value must be set",
+ )
+ }
+ (Some(_), Some(true)) => {
+ println!(
+ "WARNING: both `gcc-path` and `download-gccjit` arguments are used, \
+ ignoring `gcc-path`"
+ );
+ }
+ (Some(gcc_path), _) => {
+ let path = Path::new(gcc_path);
+ *gcc_path = path
+ .canonicalize()
+ .map_err(|err| {
+ format!("Failed to get absolute path of `{}`: {:?}", gcc_path, err)
+ })?
+ .display()
+ .to_string();
+ }
+ _ => {}
+ }
+ Ok(config)
+ }
+}
+
+#[derive(Default, Debug)]
pub struct ConfigInfo {
pub target: String,
pub target_triple: String,
+ pub host_triple: String,
pub rustc_command: Vec,
+ pub run_in_vm: bool,
+ pub cargo_target_dir: String,
+ pub dylib_ext: String,
+ pub sysroot_release_channel: bool,
+ pub channel: Channel,
+ pub sysroot_panic_abort: bool,
+ pub cg_backend_path: String,
+ pub sysroot_path: String,
+ pub gcc_path: String,
+ config_file: Option,
+ // This is used in particular in rust compiler bootstrap because it doesn't run at the root
+ // of the `cg_gcc` folder, making it complicated for us to get access to local files we need
+ // like `libgccjit.version` or `config.toml`.
+ cg_gcc_path: Option,
+ // Needed for the `info` command which doesn't want to actually download the lib if needed,
+ // just to set the `gcc_path` field to display it.
+ pub no_download: bool,
}
-// Returns the beginning for the command line of rustc.
-pub fn set_config(
- env: &mut HashMap,
- test_flags: &[String],
- gcc_path: Option<&str>,
-) -> Result {
- env.insert("CARGO_INCREMENTAL".to_string(), "0".to_string());
-
- let gcc_path = match gcc_path {
- Some(path) => path.to_string(),
- None => get_gcc_path()?,
- };
- env.insert("GCC_PATH".to_string(), gcc_path.clone());
-
- let os_name = get_os_name()?;
- let dylib_ext = match os_name.as_str() {
- "Linux" => "so",
- "Darwin" => "dylib",
- os => return Err(format!("unsupported OS `{}`", os)),
- };
- let host_triple = get_rustc_host_triple()?;
- let mut linker = None;
- let mut target_triple = host_triple.clone();
- let mut target = target_triple.clone();
-
- // We skip binary name and the command.
- let mut args = std::env::args().skip(2);
-
- let mut set_target_triple = false;
- let mut set_target = false;
- while let Some(arg) = args.next() {
- match arg.as_str() {
- "--target-triple" => {
- if let Some(arg) = args.next() {
- target_triple = arg;
- set_target_triple = true;
- } else {
- return Err(
- "Expected a value after `--target-triple`, found nothing".to_string()
- );
- }
- },
+impl ConfigInfo {
+ /// Returns `true` if the argument was taken into account.
+ pub fn parse_argument(
+ &mut self,
+ arg: &str,
+ args: &mut impl Iterator- ,
+ ) -> Result
{
+ match arg {
"--target" => {
if let Some(arg) = args.next() {
- target = arg;
- set_target = true;
+ self.target = arg;
} else {
+ return Err("Expected a value after `--target`, found nothing".to_string());
+ }
+ }
+ "--target-triple" => match args.next() {
+ Some(arg) if !arg.is_empty() => self.target_triple = arg.to_string(),
+ _ => {
return Err(
- "Expected a value after `--target`, found nothing".to_string()
- );
+ "Expected a value after `--target-triple`, found nothing".to_string()
+ )
}
},
- _ => (),
+ "--out-dir" => match args.next() {
+ Some(arg) if !arg.is_empty() => {
+ self.cargo_target_dir = arg.to_string();
+ }
+ _ => return Err("Expected a value after `--out-dir`, found nothing".to_string()),
+ },
+ "--config-file" => match args.next() {
+ Some(arg) if !arg.is_empty() => {
+ self.config_file = Some(arg.to_string());
+ }
+ _ => {
+ return Err("Expected a value after `--config-file`, found nothing".to_string())
+ }
+ },
+ "--release-sysroot" => self.sysroot_release_channel = true,
+ "--release" => self.channel = Channel::Release,
+ "--sysroot-panic-abort" => self.sysroot_panic_abort = true,
+ "--cg_gcc-path" => match args.next() {
+ Some(arg) if !arg.is_empty() => {
+ self.cg_gcc_path = Some(arg.into());
+ }
+ _ => {
+ return Err("Expected a value after `--cg_gcc-path`, found nothing".to_string())
+ }
+ },
+ _ => return Ok(false),
+ }
+ Ok(true)
+ }
+
+ pub fn rustc_command_vec(&self) -> Vec<&dyn AsRef> {
+ let mut command: Vec<&dyn AsRef> = Vec::with_capacity(self.rustc_command.len());
+ for arg in self.rustc_command.iter() {
+ command.push(arg);
+ }
+ command
+ }
+
+ fn download_gccjit_if_needed(&mut self) -> Result<(), String> {
+ let output_dir = Path::new(crate::BUILD_DIR).join("libgccjit");
+
+ let commit_hash_file = self.compute_path("libgccjit.version");
+ let content = fs::read_to_string(&commit_hash_file).map_err(|_| {
+ format!(
+ "Failed to read `{}`. Take a look at `Readme.md` to see how to set up the project",
+ commit_hash_file.display(),
+ )
+ })?;
+ let commit = content.trim();
+ // This is a very simple check to ensure this is not a path. For the rest, it'll just fail
+ // when trying to download the file so we should be fine.
+ if commit.contains('/') || commit.contains('\\') {
+ return Err(format!(
+ "{}: invalid commit hash `{}`",
+ commit_hash_file.display(),
+ commit,
+ ));
+ }
+ let output_dir = output_dir.join(commit);
+ if !output_dir.is_dir() {
+ std::fs::create_dir_all(&output_dir).map_err(|err| {
+ format!(
+ "failed to create folder `{}`: {:?}",
+ output_dir.display(),
+ err,
+ )
+ })?;
+ }
+ let output_dir = output_dir.canonicalize().map_err(|err| {
+ format!(
+ "Failed to get absolute path of `{}`: {:?}",
+ output_dir.display(),
+ err
+ )
+ })?;
+
+ let libgccjit_so_name = "libgccjit.so";
+ let libgccjit_so = output_dir.join(libgccjit_so_name);
+ if !libgccjit_so.is_file() && !self.no_download {
+ // Download time!
+ let tempfile_name = format!("{}.download", libgccjit_so_name);
+ let tempfile = output_dir.join(&tempfile_name);
+ let is_in_ci = std::env::var("GITHUB_ACTIONS").is_ok();
+
+ let url = format!(
+ "https://github.com/antoyo/gcc/releases/download/master-{}/libgccjit.so",
+ commit,
+ );
+
+ println!("Downloading `{}`...", url);
+ download_gccjit(url, &output_dir, tempfile_name, !is_in_ci)?;
+
+ let libgccjit_so = output_dir.join(libgccjit_so_name);
+ // If we reach this point, it means the file was correctly downloaded, so let's
+ // rename it!
+ std::fs::rename(&tempfile, &libgccjit_so).map_err(|err| {
+ format!(
+ "Failed to rename `{}` into `{}`: {:?}",
+ tempfile.display(),
+ libgccjit_so.display(),
+ err,
+ )
+ })?;
+
+ println!("Downloaded libgccjit.so version {} successfully!", commit);
+ // We need to create a link named `libgccjit.so.0` because that's what the linker is
+ // looking for.
+ create_symlink(
+ &libgccjit_so,
+ output_dir.join(&format!("{}.0", libgccjit_so_name)),
+ )?;
+ }
+
+ self.gcc_path = output_dir.display().to_string();
+ println!("Using `{}` as path for libgccjit", self.gcc_path);
+ Ok(())
+ }
+
+ pub fn compute_path>(&self, other: P) -> PathBuf {
+ match self.cg_gcc_path {
+ Some(ref path) => path.join(other),
+ None => PathBuf::new().join(other),
}
}
- if set_target_triple && !set_target {
- target = target_triple.clone();
+ pub fn setup_gcc_path(&mut self) -> Result<(), String> {
+ let config_file = match self.config_file.as_deref() {
+ Some(config_file) => config_file.into(),
+ None => self.compute_path("config.toml"),
+ };
+ let ConfigFile {
+ gcc_path,
+ download_gccjit,
+ } = ConfigFile::new(&config_file)?;
+
+ if let Some(true) = download_gccjit {
+ self.download_gccjit_if_needed()?;
+ return Ok(());
+ }
+ self.gcc_path = match gcc_path {
+ Some(path) => path,
+ None => {
+ return Err(format!(
+ "missing `gcc-path` value from `{}`",
+ config_file.display(),
+ ))
+ }
+ };
+ Ok(())
}
- if host_triple != target_triple {
- linker = Some(format!("-Clinker={}-gcc", target_triple));
- }
- let current_dir =
- std_env::current_dir().map_err(|error| format!("`current_dir` failed: {:?}", error))?;
- let channel = if let Some(channel) = env.get("CHANNEL") {
- channel.as_str()
- } else {
- "debug"
- };
- let cg_backend_path = current_dir
- .join("target")
- .join(channel)
- .join(&format!("librustc_codegen_gcc.{}", dylib_ext));
- let sysroot_path = current_dir.join("build_sysroot/sysroot");
- let mut rustflags = Vec::new();
- if let Some(cg_rustflags) = env.get("CG_RUSTFLAGS") {
- rustflags.push(cg_rustflags.clone());
- }
- if let Some(linker) = linker {
- rustflags.push(linker.to_string());
- }
- rustflags.extend_from_slice(&[
- "-Csymbol-mangling-version=v0".to_string(),
- "-Cdebuginfo=2".to_string(),
- format!("-Zcodegen-backend={}", cg_backend_path.display()),
- "--sysroot".to_string(),
- sysroot_path.display().to_string(),
- ]);
+ pub fn setup(
+ &mut self,
+ env: &mut HashMap,
+ use_system_gcc: bool,
+ ) -> Result<(), String> {
+ env.insert("CARGO_INCREMENTAL".to_string(), "0".to_string());
+
+ if self.gcc_path.is_empty() && !use_system_gcc {
+ self.setup_gcc_path()?;
+ }
+ env.insert("GCC_PATH".to_string(), self.gcc_path.clone());
+
+ if self.cargo_target_dir.is_empty() {
+ match env.get("CARGO_TARGET_DIR").filter(|dir| !dir.is_empty()) {
+ Some(cargo_target_dir) => self.cargo_target_dir = cargo_target_dir.clone(),
+ None => self.cargo_target_dir = "target/out".to_string(),
+ }
+ }
+
+ let os_name = get_os_name()?;
+ self.dylib_ext = match os_name.as_str() {
+ "Linux" => "so",
+ "Darwin" => "dylib",
+ os => return Err(format!("unsupported OS `{}`", os)),
+ }
+ .to_string();
+ let rustc = match env.get("RUSTC") {
+ Some(r) if !r.is_empty() => r.to_string(),
+ _ => "rustc".to_string(),
+ };
+ self.host_triple = match rustc_version_info(Some(&rustc))?.host {
+ Some(host) => host,
+ None => return Err("no host found".to_string()),
+ };
+
+ if self.target_triple.is_empty() {
+ if let Some(overwrite) = env.get("OVERWRITE_TARGET_TRIPLE") {
+ self.target_triple = overwrite.clone();
+ }
+ }
+ if self.target_triple.is_empty() {
+ self.target_triple = self.host_triple.clone();
+ }
+ if self.target.is_empty() && !self.target_triple.is_empty() {
+ self.target = self.target_triple.clone();
+ }
+
+ let mut linker = None;
+
+ if self.host_triple != self.target_triple {
+ if self.target_triple.is_empty() {
+ return Err("Unknown non-native platform".to_string());
+ }
+ linker = Some(format!("-Clinker={}-gcc", self.target_triple));
+ self.run_in_vm = true;
+ }
+
+ let current_dir =
+ std_env::current_dir().map_err(|error| format!("`current_dir` failed: {:?}", error))?;
+ let channel = if self.channel == Channel::Release {
+ "release"
+ } else if let Some(channel) = env.get("CHANNEL") {
+ channel.as_str()
+ } else {
+ "debug"
+ };
+
+ let has_builtin_backend = env
+ .get("BUILTIN_BACKEND")
+ .map(|backend| !backend.is_empty())
+ .unwrap_or(false);
+
+ let mut rustflags = Vec::new();
+ if has_builtin_backend {
+ // It means we're building inside the rustc testsuite, so some options need to be handled
+ // a bit differently.
+ self.cg_backend_path = "gcc".to_string();
+
+ match env.get("RUSTC_SYSROOT") {
+ Some(rustc_sysroot) if !rustc_sysroot.is_empty() => {
+ rustflags.extend_from_slice(&["--sysroot".to_string(), rustc_sysroot.clone()]);
+ }
+ _ => {}
+ }
+ // This should not be needed, but is necessary for the CI in the rust repository.
+ // FIXME: Remove when the rust CI switches to the master version of libgccjit.
+ rustflags.push("-Cpanic=abort".to_string());
+ } else {
+ self.cg_backend_path = current_dir
+ .join("target")
+ .join(channel)
+ .join(&format!("librustc_codegen_gcc.{}", self.dylib_ext))
+ .display()
+ .to_string();
+ self.sysroot_path = current_dir
+ .join("build_sysroot/sysroot")
+ .display()
+ .to_string();
+ rustflags.extend_from_slice(&["--sysroot".to_string(), self.sysroot_path.clone()]);
+ };
+
+ // This environment variable is useful in case we want to change options of rustc commands.
+ if let Some(cg_rustflags) = env.get("CG_RUSTFLAGS") {
+ rustflags.extend_from_slice(&split_args(&cg_rustflags)?);
+ }
+ if let Some(test_flags) = env.get("TEST_FLAGS") {
+ rustflags.extend_from_slice(&split_args(&test_flags)?);
+ }
+
+ if let Some(linker) = linker {
+ rustflags.push(linker.to_string());
+ }
+
+ #[cfg(not(feature="master"))]
+ rustflags.push("-Csymbol-mangling-version=v0".to_string());
- // Since we don't support ThinLTO, disable LTO completely when not trying to do LTO.
- // TODO(antoyo): remove when we can handle ThinLTO.
- if !env.contains_key(&"FAT_LTO".to_string()) {
- rustflags.push("-Clto=off".to_string());
- }
- rustflags.extend_from_slice(test_flags);
- // FIXME(antoyo): remove once the atomic shim is gone
- if os_name == "Darwin" {
rustflags.extend_from_slice(&[
- "-Clink-arg=-undefined".to_string(),
- "-Clink-arg=dynamic_lookup".to_string(),
+ "-Cdebuginfo=2".to_string(),
+ format!("-Zcodegen-backend={}", self.cg_backend_path),
]);
+
+ // Since we don't support ThinLTO, disable LTO completely when not trying to do LTO.
+ // TODO(antoyo): remove when we can handle ThinLTO.
+ if !env.contains_key(&"FAT_LTO".to_string()) {
+ rustflags.push("-Clto=off".to_string());
+ }
+ // FIXME(antoyo): remove once the atomic shim is gone
+ if os_name == "Darwin" {
+ rustflags.extend_from_slice(&[
+ "-Clink-arg=-undefined".to_string(),
+ "-Clink-arg=dynamic_lookup".to_string(),
+ ]);
+ }
+ env.insert("RUSTFLAGS".to_string(), rustflags.join(" "));
+ // display metadata load errors
+ env.insert("RUSTC_LOG".to_string(), "warn".to_string());
+
+ let sysroot = current_dir.join(&format!(
+ "build_sysroot/sysroot/lib/rustlib/{}/lib",
+ self.target_triple,
+ ));
+ let ld_library_path = format!(
+ "{target}:{sysroot}:{gcc_path}",
+ // FIXME: It's possible to pick another out directory. Would be nice to have a command
+ // line option to change it.
+ target = current_dir.join("target/out").display(),
+ sysroot = sysroot.display(),
+ gcc_path = self.gcc_path,
+ );
+ env.insert("LIBRARY_PATH".to_string(), ld_library_path.clone());
+ env.insert("LD_LIBRARY_PATH".to_string(), ld_library_path.clone());
+ env.insert("DYLD_LIBRARY_PATH".to_string(), ld_library_path);
+
+ // NOTE: To avoid the -fno-inline errors, use /opt/gcc/bin/gcc instead of cc.
+ // To do so, add a symlink for cc to /opt/gcc/bin/gcc in our PATH.
+ // Another option would be to add the following Rust flag: -Clinker=/opt/gcc/bin/gcc
+ let path = std::env::var("PATH").unwrap_or_default();
+ env.insert(
+ "PATH".to_string(),
+ format!(
+ "/opt/gcc/bin:/opt/m68k-unknown-linux-gnu/bin{}{}",
+ if path.is_empty() { "" } else { ":" },
+ path
+ ),
+ );
+
+ self.rustc_command = vec![rustc];
+ self.rustc_command.extend_from_slice(&rustflags);
+ self.rustc_command.extend_from_slice(&[
+ "-L".to_string(),
+ "crate=target/out".to_string(),
+ "--out-dir".to_string(),
+ self.cargo_target_dir.clone(),
+ ]);
+
+ if !env.contains_key("RUSTC_LOG") {
+ env.insert("RUSTC_LOG".to_string(), "warn".to_string());
+ }
+ Ok(())
}
- env.insert("RUSTFLAGS".to_string(), rustflags.join(" "));
- // display metadata load errors
- env.insert("RUSTC_LOG".to_string(), "warn".to_string());
- let sysroot = current_dir.join(&format!(
- "build_sysroot/sysroot/lib/rustlib/{}/lib",
- target_triple
- ));
- let ld_library_path = format!(
- "{target}:{sysroot}:{gcc_path}",
- target = current_dir.join("target/out").display(),
- sysroot = sysroot.display(),
- );
- env.insert("LD_LIBRARY_PATH".to_string(), ld_library_path.clone());
- env.insert("DYLD_LIBRARY_PATH".to_string(), ld_library_path);
-
- // NOTE: To avoid the -fno-inline errors, use /opt/gcc/bin/gcc instead of cc.
- // To do so, add a symlink for cc to /opt/gcc/bin/gcc in our PATH.
- // Another option would be to add the following Rust flag: -Clinker=/opt/gcc/bin/gcc
- let path = std::env::var("PATH").unwrap_or_default();
- env.insert("PATH".to_string(), format!("/opt/gcc/bin:{}", path));
-
- let mut rustc_command = vec!["rustc".to_string()];
- rustc_command.extend_from_slice(&rustflags);
- rustc_command.extend_from_slice(&[
- "-L".to_string(),
- "crate=target/out".to_string(),
- "--out-dir".to_string(),
- "target/out".to_string(),
- ]);
- Ok(ConfigInfo {
- target,
- target_triple,
- rustc_command,
- })
+ pub fn show_usage() {
+ println!(
+ "\
+ --target-triple [arg] : Set the target triple to [arg]
+ --target [arg] : Set the target to [arg]
+ --out-dir : Location where the files will be generated
+ --release : Build in release mode
+ --release-sysroot : Build sysroot in release mode
+ --sysroot-panic-abort : Build the sysroot without unwinding support
+ --config-file : Location of the config file to be used
+ --cg_gcc-path : Location of the rustc_codegen_gcc root folder (used
+ when ran from another directory)"
+ );
+ }
+}
+
+fn download_gccjit(
+ url: String,
+ output_dir: &Path,
+ tempfile_name: String,
+ with_progress_bar: bool,
+) -> Result<(), String> {
+ // Try curl. If that fails and we are on windows, fallback to PowerShell.
+ let mut ret = run_command_with_output(
+ &[
+ &"curl",
+ &"--speed-time",
+ &"30",
+ &"--speed-limit",
+ &"10", // timeout if speed is < 10 bytes/sec for > 30 seconds
+ &"--connect-timeout",
+ &"30", // timeout if cannot connect within 30 seconds
+ &"-o",
+ &tempfile_name,
+ &"--retry",
+ &"3",
+ &"-SRfL",
+ if with_progress_bar {
+ &"--progress-bar"
+ } else {
+ &"-s"
+ },
+ &url.as_str(),
+ ],
+ Some(&output_dir),
+ );
+ if ret.is_err() && cfg!(windows) {
+ eprintln!("Fallback to PowerShell");
+ ret = run_command_with_output(
+ &[
+ &"PowerShell.exe",
+ &"/nologo",
+ &"-Command",
+ &"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
+ &format!(
+ "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')",
+ url, tempfile_name,
+ )
+ .as_str(),
+ ],
+ Some(&output_dir),
+ );
+ }
+ ret
}
diff --git a/build_system/src/info.rs b/build_system/src/info.rs
new file mode 100644
index 000000000000..ea38791d38c9
--- /dev/null
+++ b/build_system/src/info.rs
@@ -0,0 +1,19 @@
+use crate::config::ConfigInfo;
+
+pub fn run() -> Result<(), String> {
+ let mut config = ConfigInfo::default();
+
+ // We skip binary name and the `info` command.
+ let mut args = std::env::args().skip(2);
+ while let Some(arg) = args.next() {
+ if arg == "--help" {
+ println!("Display the path where the libgccjit will be located");
+ return Ok(());
+ }
+ config.parse_argument(&arg, &mut args)?;
+ }
+ config.no_download = true;
+ config.setup_gcc_path()?;
+ println!("{}", config.gcc_path);
+ Ok(())
+}
diff --git a/build_system/src/main.rs b/build_system/src/main.rs
index bff82b6e3e57..18dc4b21a962 100644
--- a/build_system/src/main.rs
+++ b/build_system/src/main.rs
@@ -2,12 +2,17 @@ use std::env;
use std::process;
mod build;
+mod cargo;
+mod clean;
mod config;
+mod info;
mod prepare;
mod rustc_info;
mod test;
mod utils;
+const BUILD_DIR: &str = "build";
+
macro_rules! arg_error {
($($err:tt)*) => {{
eprintln!($($err)*);
@@ -22,17 +27,23 @@ fn usage() {
"\
Available commands for build_system:
+ cargo : Run cargo command
+ clean : Run clean command
prepare : Run prepare command
build : Run build command
test : Run test command
+ info: : Run info command
--help : Show this message"
);
}
pub enum Command {
+ Cargo,
+ Clean,
Prepare,
Build,
Test,
+ Info,
}
fn main() {
@@ -41,9 +52,12 @@ fn main() {
}
let command = match env::args().nth(1).as_deref() {
+ Some("cargo") => Command::Cargo,
+ Some("clean") => Command::Clean,
Some("prepare") => Command::Prepare,
Some("build") => Command::Build,
Some("test") => Command::Test,
+ Some("info") => Command::Info,
Some("--help") => {
usage();
process::exit(0);
@@ -57,11 +71,14 @@ fn main() {
};
if let Err(e) = match command {
+ Command::Cargo => cargo::run(),
+ Command::Clean => clean::run(),
Command::Prepare => prepare::run(),
Command::Build => build::run(),
Command::Test => test::run(),
+ Command::Info => info::run(),
} {
- eprintln!("Command failed to run: {e:?}");
+ eprintln!("Command failed to run: {e}");
process::exit(1);
}
}
diff --git a/build_system/src/prepare.rs b/build_system/src/prepare.rs
index 6c7c85868345..1a3eb7d2e57b 100644
--- a/build_system/src/prepare.rs
+++ b/build_system/src/prepare.rs
@@ -1,10 +1,16 @@
use crate::rustc_info::get_rustc_path;
-use crate::utils::{cargo_install, git_clone, run_command, run_command_with_output, walk_dir};
+use crate::utils::{
+ cargo_install, git_clone, remove_file, run_command, run_command_with_output, walk_dir,
+};
use std::fs;
use std::path::Path;
-fn prepare_libcore(sysroot_path: &Path, libgccjit12_patches: bool, cross_compile: bool) -> Result<(), String> {
+fn prepare_libcore(
+ sysroot_path: &Path,
+ libgccjit12_patches: bool,
+ cross_compile: bool,
+) -> Result<(), String> {
let rustc_path = match get_rustc_path() {
Some(path) => path,
None => return Err("`rustc` path not found".to_string()),
@@ -88,10 +94,14 @@ fn prepare_libcore(sysroot_path: &Path, libgccjit12_patches: bool, cross_compile
},
)?;
if cross_compile {
- walk_dir("cross_patches", |_| Ok(()), |file_path: &Path| {
- patches.push(file_path.to_path_buf());
- Ok(())
- })?;
+ walk_dir(
+ "patches/cross_patches",
+ |_| Ok(()),
+ |file_path: &Path| {
+ patches.push(file_path.to_path_buf());
+ Ok(())
+ },
+ )?;
}
if libgccjit12_patches {
walk_dir(
@@ -129,8 +139,7 @@ fn build_raytracer(repo_dir: &Path) -> Result<(), String> {
run_command(&[&"cargo", &"build"], Some(repo_dir))?;
let mv_target = repo_dir.join("raytracer_cg_llvm");
if mv_target.is_file() {
- std::fs::remove_file(&mv_target)
- .map_err(|e| format!("Failed to remove file `{}`: {e:?}", mv_target.display()))?;
+ remove_file(&mv_target)?;
}
run_command(
&[&"mv", &"target/debug/main", &"raytracer_cg_llvm"],
@@ -143,16 +152,16 @@ fn clone_and_setup(repo_url: &str, checkout_commit: &str, extra: Option) -
where
F: Fn(&Path) -> Result<(), String>,
{
- let clone_result = git_clone(repo_url, None)?;
+ let clone_result = git_clone(repo_url, Some(&Path::new(crate::BUILD_DIR)), false)?;
if !clone_result.ran_clone {
println!("`{}` has already been cloned", clone_result.repo_name);
}
- let repo_path = Path::new(&clone_result.repo_name);
+ let repo_path = Path::new(crate::BUILD_DIR).join(&clone_result.repo_name);
run_command(&[&"git", &"checkout", &"--", &"."], Some(&repo_path))?;
run_command(&[&"git", &"checkout", &checkout_commit], Some(&repo_path))?;
let filter = format!("-{}-", clone_result.repo_name);
walk_dir(
- "crate_patches",
+ "patches/crate_patches",
|_| Ok(()),
|file_path| {
let patch = file_path.as_os_str().to_str().unwrap();
@@ -210,8 +219,7 @@ impl PrepareArg {
--only-libcore : Only setup libcore and don't clone other repositories
--cross : Apply the patches needed to do cross-compilation
--libgccjit12-patches : Apply patches needed for libgccjit12
- --help : Show this help
-"#
+ --help : Show this help"#
)
}
}
diff --git a/build_system/src/test.rs b/build_system/src/test.rs
index 4c8c63e59ab7..ab65fed0f75a 100644
--- a/build_system/src/test.rs
+++ b/build_system/src/test.rs
@@ -1,15 +1,1215 @@
-use crate::utils::run_command_with_output;
+use crate::build;
+use crate::config::{Channel, ConfigInfo};
+use crate::utils::{
+ get_toolchain, git_clone, remove_file, run_command, run_command_with_env,
+ run_command_with_output_and_env, rustc_version_info, split_args, walk_dir,
+};
-fn get_args<'a>(args: &mut Vec<&'a dyn AsRef>, extra_args: &'a Vec) {
- for extra_arg in extra_args {
- args.push(extra_arg);
+use std::collections::{BTreeSet, HashMap};
+use std::ffi::OsStr;
+use std::fs::{create_dir_all, remove_dir_all, File};
+use std::io::{BufRead, BufReader};
+use std::path::{Path, PathBuf};
+use std::str::FromStr;
+
+type Env = HashMap;
+type Runner = fn(&Env, &TestArg) -> Result<(), String>;
+type Runners = HashMap<&'static str, (&'static str, Runner)>;
+
+fn get_runners() -> Runners {
+ let mut runners = HashMap::new();
+
+ runners.insert(
+ "--test-rustc",
+ ("Run all rustc tests", test_rustc as Runner),
+ );
+ runners.insert(
+ "--test-successful-rustc",
+ ("Run successful rustc tests", test_successful_rustc),
+ );
+ runners.insert(
+ "--test-failing-rustc",
+ ("Run failing rustc tests", test_failing_rustc),
+ );
+ runners.insert(
+ "--projects",
+ ("Run the tests of popular crates", test_projects),
+ );
+ runners.insert("--test-libcore", ("Run libcore tests", test_libcore));
+ runners.insert("--clean", ("Empty cargo target directory", clean));
+ runners.insert("--build-sysroot", ("Build sysroot", build_sysroot));
+ runners.insert("--std-tests", ("Run std tests", std_tests));
+ runners.insert("--asm-tests", ("Run asm tests", asm_tests));
+ runners.insert(
+ "--extended-tests",
+ ("Run extended sysroot tests", extended_sysroot_tests),
+ );
+ runners.insert(
+ "--extended-rand-tests",
+ ("Run extended rand tests", extended_rand_tests),
+ );
+ runners.insert(
+ "--extended-regex-example-tests",
+ (
+ "Run extended regex example tests",
+ extended_regex_example_tests,
+ ),
+ );
+ runners.insert(
+ "--extended-regex-tests",
+ ("Run extended regex tests", extended_regex_tests),
+ );
+ runners.insert("--mini-tests", ("Run mini tests", mini_tests));
+
+ runners
+}
+
+fn get_number_after_arg(
+ args: &mut impl Iterator- ,
+ option: &str,
+) -> Result
{
+ match args.next() {
+ Some(nb) if !nb.is_empty() => match usize::from_str(&nb) {
+ Ok(nb) => Ok(nb),
+ Err(_) => Err(format!(
+ "Expected a number after `{}`, found `{}`",
+ option, nb
+ )),
+ },
+ _ => Err(format!(
+ "Expected a number after `{}`, found nothing",
+ option
+ )),
}
}
-pub fn run() -> Result<(), String> {
- let mut args: Vec<&dyn AsRef> = vec![&"bash", &"test.sh"];
- let extra_args = std::env::args().skip(2).collect::>();
- get_args(&mut args, &extra_args);
- let current_dir = std::env::current_dir().map_err(|error| format!("`current_dir` failed: {:?}", error))?;
- run_command_with_output(args.as_slice(), Some(¤t_dir))
+fn show_usage() {
+ println!(
+ r#"
+`test` command help:
+
+ --release : Build codegen in release mode
+ --sysroot-panic-abort : Build the sysroot without unwinding support.
+ --no-default-features : Add `--no-default-features` flag
+ --features [arg] : Add a new feature [arg]
+ --use-system-gcc : Use system installed libgccjit
+ --build-only : Only build rustc_codegen_gcc then exits
+ --use-backend : Useful only for rustc testsuite
+ --nb-parts : Used to split rustc_tests (for CI needs)
+ --current-part : Used with `--nb-parts`, allows you to specify which parts to test"#
+ );
+ ConfigInfo::show_usage();
+ for (option, (doc, _)) in get_runners() {
+ // FIXME: Instead of using the hard-coded `23` value, better to compute it instead.
+ let needed_spaces = 23_usize.saturating_sub(option.len());
+ let spaces: String = std::iter::repeat(' ').take(needed_spaces).collect();
+ println!(" {}{}: {}", option, spaces, doc);
+ }
+ println!(" --help : Show this help");
+}
+
+#[derive(Default, Debug)]
+struct TestArg {
+ no_default_features: bool,
+ build_only: bool,
+ use_system_gcc: bool,
+ runners: BTreeSet,
+ flags: Vec,
+ backend: Option,
+ nb_parts: Option,
+ current_part: Option,
+ sysroot_panic_abort: bool,
+ config_info: ConfigInfo,
+}
+
+impl TestArg {
+ fn new() -> Result, String> {
+ let mut test_arg = Self::default();
+
+ // We skip binary name and the `test` command.
+ let mut args = std::env::args().skip(2);
+ let runners = get_runners();
+
+ while let Some(arg) = args.next() {
+ match arg.as_str() {
+ "--no-default-features" => {
+ // To prevent adding it more than once.
+ if !test_arg.no_default_features {
+ test_arg.flags.push("--no-default-features".into());
+ }
+ test_arg.no_default_features = true;
+ }
+ "--features" => match args.next() {
+ Some(feature) if !feature.is_empty() => {
+ test_arg
+ .flags
+ .extend_from_slice(&["--features".into(), feature]);
+ }
+ _ => {
+ return Err("Expected an argument after `--features`, found nothing".into())
+ }
+ },
+ "--use-system-gcc" => {
+ println!("Using system GCC");
+ test_arg.use_system_gcc = true;
+ }
+ "--build-only" => test_arg.build_only = true,
+ "--use-backend" => match args.next() {
+ Some(backend) if !backend.is_empty() => test_arg.backend = Some(backend),
+ _ => {
+ return Err(
+ "Expected an argument after `--use-backend`, found nothing".into()
+ )
+ }
+ },
+ "--nb-parts" => {
+ test_arg.nb_parts = Some(get_number_after_arg(&mut args, "--nb-parts")?);
+ }
+ "--current-part" => {
+ test_arg.current_part =
+ Some(get_number_after_arg(&mut args, "--current-part")?);
+ }
+ "--sysroot-panic-abort" => {
+ test_arg.sysroot_panic_abort = true;
+ }
+ "--help" => {
+ show_usage();
+ return Ok(None);
+ }
+ x if runners.contains_key(x) => {
+ test_arg.runners.insert(x.into());
+ }
+ arg => {
+ if !test_arg.config_info.parse_argument(arg, &mut args)? {
+ return Err(format!("Unknown option {}", arg));
+ }
+ }
+ }
+ }
+ match (test_arg.current_part, test_arg.nb_parts) {
+ (Some(_), Some(_)) | (None, None) => {}
+ _ => {
+ return Err(
+ "If either `--current-part` or `--nb-parts` is specified, the other one \
+ needs to be specified as well!"
+ .to_string(),
+ );
+ }
+ }
+ Ok(Some(test_arg))
+ }
+
+ pub fn is_using_gcc_master_branch(&self) -> bool {
+ !self.no_default_features
+ }
+}
+
+fn build_if_no_backend(env: &Env, args: &TestArg) -> Result<(), String> {
+ if args.backend.is_some() {
+ return Ok(());
+ }
+ let mut command: Vec<&dyn AsRef> = vec![&"cargo", &"rustc"];
+ let mut tmp_env;
+ let env = if args.config_info.channel == Channel::Release {
+ tmp_env = env.clone();
+ tmp_env.insert("CARGO_INCREMENTAL".to_string(), "1".to_string());
+ command.push(&"--release");
+ &tmp_env
+ } else {
+ &env
+ };
+ for flag in args.flags.iter() {
+ command.push(flag);
+ }
+ run_command_with_output_and_env(&command, None, Some(env))
+}
+
+fn clean(_env: &Env, args: &TestArg) -> Result<(), String> {
+ let _ = std::fs::remove_dir_all(&args.config_info.cargo_target_dir);
+ let path = Path::new(&args.config_info.cargo_target_dir).join("gccjit");
+ std::fs::create_dir_all(&path)
+ .map_err(|error| format!("failed to create folder `{}`: {:?}", path.display(), error))
+}
+
+fn mini_tests(env: &Env, args: &TestArg) -> Result<(), String> {
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[BUILD] mini_core");
+ let crate_types = if args.config_info.host_triple != args.config_info.target_triple {
+ "lib"
+ } else {
+ "lib,dylib"
+ }
+ .to_string();
+ let mut command = args.config_info.rustc_command_vec();
+ command.extend_from_slice(&[
+ &"example/mini_core.rs",
+ &"--crate-name",
+ &"mini_core",
+ &"--crate-type",
+ &crate_types,
+ &"--target",
+ &args.config_info.target_triple,
+ ]);
+ run_command_with_output_and_env(&command, None, Some(&env))?;
+
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[BUILD] example");
+ let mut command = args.config_info.rustc_command_vec();
+ command.extend_from_slice(&[
+ &"example/example.rs",
+ &"--crate-type",
+ &"lib",
+ &"--target",
+ &args.config_info.target_triple,
+ ]);
+ run_command_with_output_and_env(&command, None, Some(&env))?;
+
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[AOT] mini_core_hello_world");
+ let mut command = args.config_info.rustc_command_vec();
+ command.extend_from_slice(&[
+ &"example/mini_core_hello_world.rs",
+ &"--crate-name",
+ &"mini_core_hello_world",
+ &"--crate-type",
+ &"bin",
+ &"-g",
+ &"--target",
+ &args.config_info.target_triple,
+ ]);
+ run_command_with_output_and_env(&command, None, Some(&env))?;
+
+ let command: &[&dyn AsRef] = &[
+ &Path::new(&args.config_info.cargo_target_dir).join("mini_core_hello_world"),
+ &"abc",
+ &"bcd",
+ ];
+ maybe_run_command_in_vm(&command, env, args)?;
+ Ok(())
+}
+
+fn build_sysroot(env: &Env, args: &TestArg) -> Result<(), String> {
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[BUILD] sysroot");
+ build::build_sysroot(env, &args.config_info)?;
+ Ok(())
+}
+
+// TODO(GuillaumeGomez): when rewriting in Rust, refactor with the code in tests/lang_tests_common.rs if possible.
+fn maybe_run_command_in_vm(
+ command: &[&dyn AsRef],
+ env: &Env,
+ args: &TestArg,
+) -> Result<(), String> {
+ if !args.config_info.run_in_vm {
+ run_command_with_output_and_env(command, None, Some(env))?;
+ return Ok(());
+ }
+ let vm_parent_dir = match env.get("CG_GCC_VM_DIR") {
+ Some(dir) if !dir.is_empty() => PathBuf::from(dir.clone()),
+ _ => std::env::current_dir().unwrap(),
+ };
+ let vm_dir = "vm";
+ let exe_to_run = command.first().unwrap();
+ let exe = Path::new(&exe_to_run);
+ let exe_filename = exe.file_name().unwrap();
+ let vm_home_dir = vm_parent_dir.join(vm_dir).join("home");
+ let vm_exe_path = vm_home_dir.join(exe_filename);
+ let inside_vm_exe_path = Path::new("/home").join(exe_filename);
+
+ let sudo_command: &[&dyn AsRef] = &[&"sudo", &"cp", &exe, &vm_exe_path];
+ run_command_with_env(sudo_command, None, Some(env))?;
+
+ let mut vm_command: Vec<&dyn AsRef> = vec![
+ &"sudo",
+ &"chroot",
+ &vm_dir,
+ &"qemu-m68k-static",
+ &inside_vm_exe_path,
+ ];
+ vm_command.extend_from_slice(command);
+ run_command_with_output_and_env(&vm_command, Some(&vm_parent_dir), Some(env))?;
+ Ok(())
+}
+
+fn std_tests(env: &Env, args: &TestArg) -> Result<(), String> {
+ let cargo_target_dir = Path::new(&args.config_info.cargo_target_dir);
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[AOT] arbitrary_self_types_pointers_and_wrappers");
+ let mut command = args.config_info.rustc_command_vec();
+ command.extend_from_slice(&[
+ &"example/arbitrary_self_types_pointers_and_wrappers.rs",
+ &"--crate-name",
+ &"arbitrary_self_types_pointers_and_wrappers",
+ &"--crate-type",
+ &"bin",
+ &"--target",
+ &args.config_info.target_triple,
+ ]);
+ run_command_with_env(&command, None, Some(env))?;
+ maybe_run_command_in_vm(
+ &[&cargo_target_dir.join("arbitrary_self_types_pointers_and_wrappers")],
+ env,
+ args,
+ )?;
+
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[AOT] alloc_system");
+ let mut command = args.config_info.rustc_command_vec();
+ command.extend_from_slice(&[
+ &"example/alloc_system.rs",
+ &"--crate-type",
+ &"lib",
+ &"--target",
+ &args.config_info.target_triple,
+ ]);
+ if args.is_using_gcc_master_branch() {
+ command.extend_from_slice(&[&"--cfg", &"feature=\"master\""]);
+ }
+ run_command_with_env(&command, None, Some(env))?;
+
+ // FIXME: doesn't work on m68k.
+ if args.config_info.host_triple == args.config_info.target_triple {
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[AOT] alloc_example");
+ let mut command = args.config_info.rustc_command_vec();
+ command.extend_from_slice(&[
+ &"example/alloc_example.rs",
+ &"--crate-type",
+ &"bin",
+ &"--target",
+ &args.config_info.target_triple,
+ ]);
+ run_command_with_env(&command, None, Some(env))?;
+ maybe_run_command_in_vm(&[&cargo_target_dir.join("alloc_example")], env, args)?;
+ }
+
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[AOT] dst_field_align");
+ // FIXME(antoyo): Re-add -Zmir-opt-level=2 once rust-lang/rust#67529 is fixed.
+ let mut command = args.config_info.rustc_command_vec();
+ command.extend_from_slice(&[
+ &"example/dst-field-align.rs",
+ &"--crate-name",
+ &"dst_field_align",
+ &"--crate-type",
+ &"bin",
+ &"--target",
+ &args.config_info.target_triple,
+ ]);
+ run_command_with_env(&command, None, Some(env))?;
+ maybe_run_command_in_vm(&[&cargo_target_dir.join("dst_field_align")], env, args)?;
+
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[AOT] std_example");
+ let mut command = args.config_info.rustc_command_vec();
+ command.extend_from_slice(&[
+ &"example/std_example.rs",
+ &"--crate-type",
+ &"bin",
+ &"--target",
+ &args.config_info.target_triple,
+ ]);
+ if args.is_using_gcc_master_branch() {
+ command.extend_from_slice(&[&"--cfg", &"feature=\"master\""]);
+ }
+ run_command_with_env(&command, None, Some(env))?;
+ maybe_run_command_in_vm(
+ &[
+ &cargo_target_dir.join("std_example"),
+ &"--target",
+ &args.config_info.target_triple,
+ ],
+ env,
+ args,
+ )?;
+
+ let test_flags = if let Some(test_flags) = env.get("TEST_FLAGS") {
+ split_args(test_flags)?
+ } else {
+ Vec::new()
+ };
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[AOT] subslice-patterns-const-eval");
+ let mut command = args.config_info.rustc_command_vec();
+ command.extend_from_slice(&[
+ &"example/subslice-patterns-const-eval.rs",
+ &"--crate-type",
+ &"bin",
+ &"--target",
+ &args.config_info.target_triple,
+ ]);
+ for test_flag in &test_flags {
+ command.push(test_flag);
+ }
+ run_command_with_env(&command, None, Some(env))?;
+ maybe_run_command_in_vm(
+ &[&cargo_target_dir.join("subslice-patterns-const-eval")],
+ env,
+ args,
+ )?;
+
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[AOT] track-caller-attribute");
+ let mut command = args.config_info.rustc_command_vec();
+ command.extend_from_slice(&[
+ &"example/track-caller-attribute.rs",
+ &"--crate-type",
+ &"bin",
+ &"--target",
+ &args.config_info.target_triple,
+ ]);
+ for test_flag in &test_flags {
+ command.push(test_flag);
+ }
+ run_command_with_env(&command, None, Some(env))?;
+ maybe_run_command_in_vm(
+ &[&cargo_target_dir.join("track-caller-attribute")],
+ env,
+ args,
+ )?;
+
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[AOT] mod_bench");
+ let mut command = args.config_info.rustc_command_vec();
+ command.extend_from_slice(&[
+ &"example/mod_bench.rs",
+ &"--crate-type",
+ &"bin",
+ &"--target",
+ &args.config_info.target_triple,
+ ]);
+ run_command_with_env(&command, None, Some(env))?;
+ // FIXME: the compiled binary is not run.
+
+ Ok(())
+}
+
+fn setup_rustc(env: &mut Env, args: &TestArg) -> Result {
+ let toolchain = format!(
+ "+{channel}-{host}",
+ channel = get_toolchain()?, // May also include date
+ host = args.config_info.host_triple
+ );
+ let rust_dir_path = Path::new(crate::BUILD_DIR).join("rust");
+ // If the repository was already cloned, command will fail, so doesn't matter.
+ let _ = run_command_with_output_and_env(
+ &[
+ &"git",
+ &"clone",
+ &"https://github.com/rust-lang/rust.git",
+ &rust_dir_path,
+ ],
+ None,
+ Some(env),
+ );
+ let rust_dir: Option<&Path> = Some(&rust_dir_path);
+ run_command(&[&"git", &"checkout", &"--", &"tests/"], rust_dir)?;
+ run_command_with_output_and_env(&[&"git", &"fetch"], rust_dir, Some(env))?;
+ let rustc_commit = match rustc_version_info(env.get("RUSTC").map(|s| s.as_str()))?.commit_hash {
+ Some(commit_hash) => commit_hash,
+ None => return Err("Couldn't retrieve rustc commit hash".to_string()),
+ };
+ if rustc_commit != "unknown" {
+ run_command_with_output_and_env(
+ &[&"git", &"checkout", &rustc_commit],
+ rust_dir,
+ Some(env),
+ )?;
+ } else {
+ run_command_with_output_and_env(&[&"git", &"checkout"], rust_dir, Some(env))?;
+ }
+ let cargo = String::from_utf8(
+ run_command_with_env(&[&"rustup", &"which", &"cargo"], rust_dir, Some(env))?.stdout,
+ )
+ .map_err(|error| format!("Failed to retrieve cargo path: {:?}", error))
+ .and_then(|cargo| {
+ let cargo = cargo.trim().to_owned();
+ if cargo.is_empty() {
+ Err(format!("`cargo` path is empty"))
+ } else {
+ Ok(cargo)
+ }
+ })?;
+ let rustc = String::from_utf8(
+ run_command_with_env(
+ &[&"rustup", &toolchain, &"which", &"rustc"],
+ rust_dir,
+ Some(env),
+ )?
+ .stdout,
+ )
+ .map_err(|error| format!("Failed to retrieve rustc path: {:?}", error))
+ .and_then(|rustc| {
+ let rustc = rustc.trim().to_owned();
+ if rustc.is_empty() {
+ Err(format!("`rustc` path is empty"))
+ } else {
+ Ok(rustc)
+ }
+ })?;
+ let llvm_filecheck = match run_command_with_env(
+ &[
+ &"bash",
+ &"-c",
+ &"which FileCheck-10 || \
+ which FileCheck-11 || \
+ which FileCheck-12 || \
+ which FileCheck-13 || \
+ which FileCheck-14",
+ ],
+ rust_dir,
+ Some(env),
+ ) {
+ Ok(cmd) => String::from_utf8_lossy(&cmd.stdout).to_string(),
+ Err(_) => {
+ eprintln!("Failed to retrieve LLVM FileCheck, ignoring...");
+ String::new()
+ }
+ };
+ let file_path = rust_dir_path.join("config.toml");
+ std::fs::write(
+ &file_path,
+ &format!(
+ r#"change-id = 115898
+
+[rust]
+codegen-backends = []
+deny-warnings = false
+verbose-tests = true
+
+[build]
+cargo = "{cargo}"
+local-rebuild = true
+rustc = "{rustc}"
+
+[target.x86_64-unknown-linux-gnu]
+llvm-filecheck = "{llvm_filecheck}"
+
+[llvm]
+download-ci-llvm = false
+"#,
+ cargo = cargo,
+ rustc = rustc,
+ llvm_filecheck = llvm_filecheck.trim(),
+ ),
+ )
+ .map_err(|error| {
+ format!(
+ "Failed to write into `{}`: {:?}",
+ file_path.display(),
+ error
+ )
+ })?;
+ Ok(rust_dir_path)
+}
+
+fn asm_tests(env: &Env, args: &TestArg) -> Result<(), String> {
+ let mut env = env.clone();
+ let rust_dir = setup_rustc(&mut env, args)?;
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[TEST] rustc asm test suite");
+
+ env.insert("COMPILETEST_FORCE_STAGE0".to_string(), "1".to_string());
+
+ let rustc_args =
+ &format!(
+ r#"-Zpanic-abort-tests \
+ -Zcodegen-backend="{pwd}/target/{channel}/librustc_codegen_gcc.{dylib_ext}" \
+ --sysroot "{pwd}/build_sysroot/sysroot" -Cpanic=abort"#,
+ pwd = std::env::current_dir()
+ .map_err(|error| format!("`current_dir` failed: {:?}", error))?
+ .display(),
+ channel = args.config_info.channel.as_str(),
+ dylib_ext = args.config_info.dylib_ext,
+ );
+
+ #[cfg(not(feature="master"))]
+ let rustc_args = format!("{} -Csymbol-mangling-version=v0", rustc_args);
+
+ run_command_with_env(
+ &[
+ &"./x.py",
+ &"test",
+ &"--run",
+ &"always",
+ &"--stage",
+ &"0",
+ &"tests/assembly/asm",
+ &"--rustc-args",
+ &rustc_args,
+ ],
+ Some(&rust_dir),
+ Some(&env),
+ )?;
+ Ok(())
+}
+
+fn run_cargo_command(
+ command: &[&dyn AsRef],
+ cwd: Option<&Path>,
+ env: &Env,
+ args: &TestArg,
+) -> Result<(), String> {
+ run_cargo_command_with_callback(command, cwd, env, args, |cargo_command, cwd, env| {
+ run_command_with_output_and_env(cargo_command, cwd, Some(env))?;
+ Ok(())
+ })
+}
+
+fn run_cargo_command_with_callback(
+ command: &[&dyn AsRef],
+ cwd: Option<&Path>,
+ env: &Env,
+ args: &TestArg,
+ callback: F,
+) -> Result<(), String>
+where
+ F: Fn(&[&dyn AsRef], Option<&Path>, &Env) -> Result<(), String>,
+{
+ let toolchain = get_toolchain()?;
+ let toolchain_arg = format!("+{}", toolchain);
+ let rustc_version = String::from_utf8(
+ run_command_with_env(&[&args.config_info.rustc_command[0], &"-V"], cwd, Some(env))?.stdout,
+ )
+ .map_err(|error| format!("Failed to retrieve rustc version: {:?}", error))?;
+ let rustc_toolchain_version = String::from_utf8(
+ run_command_with_env(
+ &[&args.config_info.rustc_command[0], &toolchain_arg, &"-V"],
+ cwd,
+ Some(env),
+ )?
+ .stdout,
+ )
+ .map_err(|error| format!("Failed to retrieve rustc +toolchain version: {:?}", error))?;
+
+ if rustc_version != rustc_toolchain_version {
+ eprintln!(
+ "rustc_codegen_gcc is built for `{}` but the default rustc version is `{}`.",
+ rustc_toolchain_version, rustc_version,
+ );
+ eprintln!("Using `{}`.", rustc_toolchain_version);
+ }
+ let mut env = env.clone();
+ let rustflags = env.get("RUSTFLAGS").cloned().unwrap_or_default();
+ env.insert("RUSTDOCFLAGS".to_string(), rustflags);
+ let mut cargo_command: Vec<&dyn AsRef> = vec![&"cargo", &toolchain_arg];
+ cargo_command.extend_from_slice(&command);
+ callback(&cargo_command, cwd, &env)
+}
+
+// FIXME(antoyo): linker gives multiple definitions error on Linux
+// echo "[BUILD] sysroot in release mode"
+// ./build_sysroot/build_sysroot.sh --release
+
+fn test_projects(env: &Env, args: &TestArg) -> Result<(), String> {
+ let projects = [
+ //"https://gitlab.gnome.org/GNOME/librsvg", // FIXME: doesn't compile in the CI since the
+ // version of cairo and other libraries is too old.
+ "https://github.com/rust-random/getrandom",
+ "https://github.com/BurntSushi/memchr",
+ "https://github.com/dtolnay/itoa",
+ "https://github.com/rust-lang/cfg-if",
+ "https://github.com/rust-lang-nursery/lazy-static.rs",
+ //"https://github.com/marshallpierce/rust-base64", // FIXME: one test is OOM-killed.
+ // TODO: ignore the base64 test that is OOM-killed.
+ "https://github.com/time-rs/time",
+ "https://github.com/rust-lang/log",
+ "https://github.com/bitflags/bitflags",
+ //"https://github.com/serde-rs/serde", // FIXME: one test fails.
+ //"https://github.com/rayon-rs/rayon", // TODO: very slow, only run on master?
+ //"https://github.com/rust-lang/cargo", // TODO: very slow, only run on master?
+ ];
+
+ let run_tests = |projects_path, iter: &mut dyn Iterator- | -> Result<(), String> {
+ for project in iter {
+ let clone_result = git_clone(project, Some(projects_path), true)?;
+ let repo_path = Path::new(&clone_result.repo_dir);
+ run_cargo_command(&[&"build", &"--release"], Some(repo_path), env, args)?;
+ run_cargo_command(&[&"test"], Some(repo_path), env, args)?;
+ }
+
+ Ok(())
+ };
+
+ let projects_path = Path::new("projects");
+ create_dir_all(projects_path)
+ .map_err(|err| format!("Failed to create directory `projects`: {}", err))?;
+
+ let nb_parts = args.nb_parts.unwrap_or(0);
+ if nb_parts > 0 {
+ // We increment the number of tests by one because if this is an odd number, we would skip
+ // one test.
+ let count = projects.len() / nb_parts + 1;
+ let current_part = args.current_part.unwrap();
+ let start = current_part * count;
+ // We remove the projects we don't want to test.
+ run_tests(projects_path, &mut projects.iter().skip(start).take(count))?;
+ } else {
+ run_tests(projects_path, &mut projects.iter())?;
+ }
+
+ Ok(())
+}
+
+fn test_libcore(env: &Env, args: &TestArg) -> Result<(), String> {
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[TEST] libcore");
+ let path = Path::new("build_sysroot/sysroot_src/library/core/tests");
+ let _ = remove_dir_all(path.join("target"));
+ run_cargo_command(&[&"test"], Some(path), env, args)?;
+ Ok(())
+}
+
+// echo "[BENCH COMPILE] mod_bench"
+//
+// COMPILE_MOD_BENCH_INLINE="$RUSTC example/mod_bench.rs --crate-type bin -Zmir-opt-level=3 -O --crate-name mod_bench_inline"
+// COMPILE_MOD_BENCH_LLVM_0="rustc example/mod_bench.rs --crate-type bin -Copt-level=0 -o $cargo_target_dir/mod_bench_llvm_0 -Cpanic=abort"
+// COMPILE_MOD_BENCH_LLVM_1="rustc example/mod_bench.rs --crate-type bin -Copt-level=1 -o $cargo_target_dir/mod_bench_llvm_1 -Cpanic=abort"
+// COMPILE_MOD_BENCH_LLVM_2="rustc example/mod_bench.rs --crate-type bin -Copt-level=2 -o $cargo_target_dir/mod_bench_llvm_2 -Cpanic=abort"
+// COMPILE_MOD_BENCH_LLVM_3="rustc example/mod_bench.rs --crate-type bin -Copt-level=3 -o $cargo_target_dir/mod_bench_llvm_3 -Cpanic=abort"
+//
+// Use 100 runs, because a single compilations doesn't take more than ~150ms, so it isn't very slow
+// hyperfine --runs ${COMPILE_RUNS:-100} "$COMPILE_MOD_BENCH_INLINE" "$COMPILE_MOD_BENCH_LLVM_0" "$COMPILE_MOD_BENCH_LLVM_1" "$COMPILE_MOD_BENCH_LLVM_2" "$COMPILE_MOD_BENCH_LLVM_3"
+// echo "[BENCH RUN] mod_bench"
+// hyperfine --runs ${RUN_RUNS:-10} $cargo_target_dir/mod_bench{,_inline} $cargo_target_dir/mod_bench_llvm_*
+
+fn extended_rand_tests(env: &Env, args: &TestArg) -> Result<(), String> {
+ if !args.is_using_gcc_master_branch() {
+ println!("Not using GCC master branch. Skipping `extended_rand_tests`.");
+ return Ok(());
+ }
+ let path = Path::new(crate::BUILD_DIR).join("rand");
+ run_cargo_command(&[&"clean"], Some(&path), env, args)?;
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[TEST] rust-random/rand");
+ run_cargo_command(&[&"test", &"--workspace"], Some(&path), env, args)?;
+ Ok(())
+}
+
+fn extended_regex_example_tests(env: &Env, args: &TestArg) -> Result<(), String> {
+ if !args.is_using_gcc_master_branch() {
+ println!("Not using GCC master branch. Skipping `extended_regex_example_tests`.");
+ return Ok(());
+ }
+ let path = Path::new(crate::BUILD_DIR).join("regex");
+ run_cargo_command(&[&"clean"], Some(&path), env, args)?;
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[TEST] rust-lang/regex example shootout-regex-dna");
+ let mut env = env.clone();
+ // newer aho_corasick versions throw a deprecation warning
+ let rustflags = format!(
+ "{} --cap-lints warn",
+ env.get("RUSTFLAGS").cloned().unwrap_or_default()
+ );
+ env.insert("RUSTFLAGS".to_string(), rustflags);
+ // Make sure `[codegen mono items] start` doesn't poison the diff
+ run_cargo_command(
+ &[&"build", &"--example", &"shootout-regex-dna"],
+ Some(&path),
+ &env,
+ args,
+ )?;
+
+ run_cargo_command_with_callback(
+ &[&"run", &"--example", &"shootout-regex-dna"],
+ Some(&path),
+ &env,
+ args,
+ |cargo_command, cwd, env| {
+ // FIXME: rewrite this with `child.stdin.write_all()` because
+ // `examples/regexdna-input.txt` is very small.
+ let mut command: Vec<&dyn AsRef
> = vec![&"bash", &"-c"];
+ let cargo_args = cargo_command
+ .iter()
+ .map(|s| s.as_ref().to_str().unwrap())
+ .collect::>();
+ let bash_command = format!(
+ "cat examples/regexdna-input.txt | {} | grep -v 'Spawned thread' > res.txt",
+ cargo_args.join(" "),
+ );
+ command.push(&bash_command);
+ run_command_with_output_and_env(&command, cwd, Some(env))?;
+ run_command_with_output_and_env(
+ &[&"diff", &"-u", &"res.txt", &"examples/regexdna-output.txt"],
+ cwd,
+ Some(env),
+ )?;
+ Ok(())
+ },
+ )?;
+
+ Ok(())
+}
+
+fn extended_regex_tests(env: &Env, args: &TestArg) -> Result<(), String> {
+ if !args.is_using_gcc_master_branch() {
+ println!("Not using GCC master branch. Skipping `extended_regex_tests`.");
+ return Ok(());
+ }
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[TEST] rust-lang/regex tests");
+ let mut env = env.clone();
+ // newer aho_corasick versions throw a deprecation warning
+ let rustflags = format!(
+ "{} --cap-lints warn",
+ env.get("RUSTFLAGS").cloned().unwrap_or_default()
+ );
+ env.insert("RUSTFLAGS".to_string(), rustflags);
+ let path = Path::new(crate::BUILD_DIR).join("regex");
+ run_cargo_command(
+ &[
+ &"test",
+ &"--tests",
+ &"--",
+ // FIXME: try removing `--exclude-should-panic` argument
+ &"--exclude-should-panic",
+ &"--test-threads",
+ &"1",
+ &"-Zunstable-options",
+ &"-q",
+ ],
+ Some(&path),
+ &env,
+ args,
+ )?;
+ Ok(())
+}
+
+fn extended_sysroot_tests(env: &Env, args: &TestArg) -> Result<(), String> {
+ // pushd simple-raytracer
+ // echo "[BENCH COMPILE] ebobby/simple-raytracer"
+ // hyperfine --runs "${RUN_RUNS:-10}" --warmup 1 --prepare "cargo clean" \
+ // "RUSTC=rustc RUSTFLAGS='' cargo build" \
+ // "../y.sh cargo build"
+
+ // echo "[BENCH RUN] ebobby/simple-raytracer"
+ // cp ./target/debug/main ./raytracer_cg_gcc
+ // hyperfine --runs "${RUN_RUNS:-10}" ./raytracer_cg_llvm ./raytracer_cg_gcc
+ // popd
+ extended_rand_tests(env, args)?;
+ extended_regex_example_tests(env, args)?;
+ extended_regex_tests(env, args)?;
+
+ Ok(())
+}
+
+fn should_not_remove_test(file: &str) -> bool {
+ // contains //~ERROR, but shouldn't be removed
+ [
+ "issues/auxiliary/issue-3136-a.rs",
+ "type-alias-impl-trait/auxiliary/cross_crate_ice.rs",
+ "type-alias-impl-trait/auxiliary/cross_crate_ice2.rs",
+ "macros/rfc-2011-nicer-assert-messages/auxiliary/common.rs",
+ "imports/ambiguous-1.rs",
+ "imports/ambiguous-4-extern.rs",
+ "entry-point/auxiliary/bad_main_functions.rs",
+ ]
+ .iter()
+ .any(|to_ignore| file.ends_with(to_ignore))
+}
+
+fn should_remove_test(file_path: &Path) -> Result {
+ // Tests generating errors.
+ let file = File::open(file_path)
+ .map_err(|error| format!("Failed to read `{}`: {:?}", file_path.display(), error))?;
+ for line in BufReader::new(file).lines().filter_map(|line| line.ok()) {
+ let line = line.trim();
+ if line.is_empty() {
+ continue;
+ }
+ if [
+ "// error-pattern:",
+ "// build-fail",
+ "// run-fail",
+ "-Cllvm-args",
+ "//~",
+ "thread",
+ ]
+ .iter()
+ .any(|check| line.contains(check))
+ {
+ return Ok(true);
+ }
+ if line.contains("//[") && line.contains("]~") {
+ return Ok(true);
+ }
+ }
+ if file_path
+ .display()
+ .to_string()
+ .contains("ambiguous-4-extern.rs")
+ {
+ eprintln!("nothing found for {file_path:?}");
+ }
+ Ok(false)
+}
+
+fn test_rustc_inner(env: &Env, args: &TestArg, prepare_files_callback: F) -> Result<(), String>
+where
+ F: Fn(&Path) -> Result,
+{
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[TEST] rust-lang/rust");
+ let mut env = env.clone();
+ let rust_path = setup_rustc(&mut env, args)?;
+
+ walk_dir(
+ rust_path.join("tests/ui"),
+ |dir| {
+ let dir_name = dir.file_name().and_then(|name| name.to_str()).unwrap_or("");
+ if [
+ "abi",
+ "extern",
+ "unsized-locals",
+ "proc-macro",
+ "threads-sendsync",
+ "borrowck",
+ "test-attrs",
+ ]
+ .iter()
+ .any(|name| *name == dir_name)
+ {
+ std::fs::remove_dir_all(dir).map_err(|error| {
+ format!("Failed to remove folder `{}`: {:?}", dir.display(), error)
+ })?;
+ }
+ Ok(())
+ },
+ |_| Ok(()),
+ )?;
+
+ // These two functions are used to remove files that are known to not be working currently
+ // with the GCC backend to reduce noise.
+ fn dir_handling(dir: &Path) -> Result<(), String> {
+ if dir
+ .file_name()
+ .map(|name| name == "auxiliary")
+ .unwrap_or(true)
+ {
+ return Ok(());
+ }
+ walk_dir(dir, dir_handling, file_handling)
+ }
+ fn file_handling(file_path: &Path) -> Result<(), String> {
+ if !file_path
+ .extension()
+ .map(|extension| extension == "rs")
+ .unwrap_or(false)
+ {
+ return Ok(());
+ }
+ let path_str = file_path.display().to_string().replace("\\", "/");
+ if should_not_remove_test(&path_str) {
+ return Ok(());
+ } else if should_remove_test(file_path)? {
+ return remove_file(&file_path);
+ }
+ Ok(())
+ }
+
+ remove_file(&rust_path.join("tests/ui/consts/const_cmp_type_id.rs"))?;
+ remove_file(&rust_path.join("tests/ui/consts/issue-73976-monomorphic.rs"))?;
+ // this test is oom-killed in the CI.
+ remove_file(&rust_path.join("tests/ui/consts/issue-miri-1910.rs"))?;
+ // Tests generating errors.
+ remove_file(&rust_path.join("tests/ui/consts/issue-94675.rs"))?;
+ remove_file(&rust_path.join("tests/ui/mir/mir_heavy_promoted.rs"))?;
+
+ walk_dir(rust_path.join("tests/ui"), dir_handling, file_handling)?;
+
+ if !prepare_files_callback(&rust_path)? {
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("Keeping all UI tests");
+ }
+
+ let nb_parts = args.nb_parts.unwrap_or(0);
+ if nb_parts > 0 {
+ let current_part = args.current_part.unwrap();
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!(
+ "Splitting ui_test into {} parts (and running part {})",
+ nb_parts, current_part
+ );
+ let out = String::from_utf8(
+ run_command(
+ &[
+ &"find",
+ &"tests/ui",
+ &"-type",
+ &"f",
+ &"-name",
+ &"*.rs",
+ &"-not",
+ &"-path",
+ &"*/auxiliary/*",
+ ],
+ Some(&rust_path),
+ )?
+ .stdout,
+ )
+ .map_err(|error| format!("Failed to retrieve output of find command: {:?}", error))?;
+ let mut files = out
+ .split('\n')
+ .map(|line| line.trim())
+ .filter(|line| !line.is_empty())
+ .collect::>();
+ // To ensure it'll be always the same sub files, we sort the content.
+ files.sort();
+ // We increment the number of tests by one because if this is an odd number, we would skip
+ // one test.
+ let count = files.len() / nb_parts + 1;
+ let start = current_part * count;
+ // We remove the files we don't want to test.
+ for path in files.iter().skip(start).take(count) {
+ remove_file(&rust_path.join(path))?;
+ }
+ }
+
+ // FIXME: create a function "display_if_not_quiet" or something along the line.
+ println!("[TEST] rustc test suite");
+ env.insert("COMPILETEST_FORCE_STAGE0".to_string(), "1".to_string());
+ let rustc_args = format!(
+ "{} -Zcodegen-backend={} --sysroot {}",
+ env.get("TEST_FLAGS").unwrap_or(&String::new()),
+ args.config_info.cg_backend_path,
+ args.config_info.sysroot_path,
+ );
+
+ #[cfg(not(feature="master"))]
+ let rustc_args = format!("{} -Csymbol-mangling-version=v0", rustc_args);
+
+ env.get_mut("RUSTFLAGS").unwrap().clear();
+ run_command_with_output_and_env(
+ &[
+ &"./x.py",
+ &"test",
+ &"--run",
+ &"always",
+ &"--stage",
+ &"0",
+ &"tests/ui",
+ &"--rustc-args",
+ &rustc_args,
+ ],
+ Some(&rust_path),
+ Some(&env),
+ )?;
+ Ok(())
+}
+
+fn test_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
+ test_rustc_inner(env, args, |_| Ok(false))
+}
+
+fn test_failing_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
+ test_rustc_inner(env, args, |rust_path| {
+ // Removing all tests.
+ run_command(
+ &[
+ &"find",
+ &"tests/ui",
+ &"-type",
+ &"f",
+ &"-name",
+ &"*.rs",
+ &"-not",
+ &"-path",
+ &"*/auxiliary/*",
+ &"-delete",
+ ],
+ Some(rust_path),
+ )?;
+ // Putting back only the failing ones.
+ let path = "tests/failing-ui-tests.txt";
+ if let Ok(files) = std::fs::read_to_string(path) {
+ for file in files
+ .split('\n')
+ .map(|line| line.trim())
+ .filter(|line| !line.is_empty())
+ {
+ run_command(&[&"git", &"checkout", &"--", &file], Some(&rust_path))?;
+ }
+ } else {
+ println!(
+ "Failed to read `{}`, not putting back failing ui tests",
+ path
+ );
+ }
+ Ok(true)
+ })
+}
+
+fn test_successful_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
+ test_rustc_inner(env, args, |rust_path| {
+ // Removing the failing tests.
+ let path = "tests/failing-ui-tests.txt";
+ if let Ok(files) = std::fs::read_to_string(path) {
+ for file in files
+ .split('\n')
+ .map(|line| line.trim())
+ .filter(|line| !line.is_empty())
+ {
+ let path = rust_path.join(file);
+ remove_file(&path)?;
+ }
+ } else {
+ println!(
+ "Failed to read `{}`, not putting back failing ui tests",
+ path
+ );
+ }
+ Ok(true)
+ })
+}
+
+fn run_all(env: &Env, args: &TestArg) -> Result<(), String> {
+ clean(env, args)?;
+ mini_tests(env, args)?;
+ build_sysroot(env, args)?;
+ std_tests(env, args)?;
+ // asm_tests(env, args)?;
+ test_libcore(env, args)?;
+ extended_sysroot_tests(env, args)?;
+ test_rustc(env, args)?;
+ Ok(())
+}
+
+pub fn run() -> Result<(), String> {
+ let mut args = match TestArg::new()? {
+ Some(args) => args,
+ None => return Ok(()),
+ };
+ let mut env: HashMap = std::env::vars().collect();
+
+ if !args.use_system_gcc {
+ args.config_info.setup_gcc_path()?;
+ env.insert(
+ "LIBRARY_PATH".to_string(),
+ args.config_info.gcc_path.clone(),
+ );
+ env.insert(
+ "LD_LIBRARY_PATH".to_string(),
+ args.config_info.gcc_path.clone(),
+ );
+ }
+
+ build_if_no_backend(&env, &args)?;
+ if args.build_only {
+ println!("Since it's build only, exiting...");
+ return Ok(());
+ }
+
+ args.config_info.setup(&mut env, args.use_system_gcc)?;
+
+ if args.runners.is_empty() {
+ run_all(&env, &args)?;
+ } else {
+ let runners = get_runners();
+ for runner in args.runners.iter() {
+ runners.get(runner.as_str()).unwrap().1(&env, &args)?;
+ }
+ }
+
+ Ok(())
}
diff --git a/build_system/src/utils.rs b/build_system/src/utils.rs
index 536f33a80293..33dcd9ef7005 100644
--- a/build_system/src/utils.rs
+++ b/build_system/src/utils.rs
@@ -29,22 +29,40 @@ fn check_exit_status(
input: &[&dyn AsRef],
cwd: Option<&Path>,
exit_status: ExitStatus,
+ output: Option<&Output>,
+ show_err: bool,
) -> Result<(), String> {
if exit_status.success() {
- Ok(())
- } else {
- Err(format!(
- "Command `{}`{} exited with status {:?}",
- input
- .iter()
- .map(|s| s.as_ref().to_str().unwrap())
- .collect::>()
- .join(" "),
- cwd.map(|cwd| format!(" (running in folder `{}`)", cwd.display()))
- .unwrap_or_default(),
- exit_status.code(),
- ))
+ return Ok(());
}
+ let mut error = format!(
+ "Command `{}`{} exited with status {:?}",
+ input
+ .iter()
+ .map(|s| s.as_ref().to_str().unwrap())
+ .collect::>()
+ .join(" "),
+ cwd.map(|cwd| format!(" (running in folder `{}`)", cwd.display()))
+ .unwrap_or_default(),
+ exit_status.code()
+ );
+ let input = input.iter().map(|i| i.as_ref()).collect::>();
+ if show_err {
+ eprintln!("Command `{:?}` failed", input);
+ }
+ if let Some(output) = output {
+ let stdout = String::from_utf8_lossy(&output.stdout);
+ if !stdout.is_empty() {
+ error.push_str("\n==== STDOUT ====\n");
+ error.push_str(&*stdout);
+ }
+ let stderr = String::from_utf8_lossy(&output.stderr);
+ if !stderr.is_empty() {
+ error.push_str("\n==== STDERR ====\n");
+ error.push_str(&*stderr);
+ }
+ }
+ Err(error)
}
fn command_error(input: &[&dyn AsRef], cwd: &Option<&Path>, error: D) -> String {
@@ -73,7 +91,7 @@ pub fn run_command_with_env(
let output = get_command_inner(input, cwd, env)
.output()
.map_err(|e| command_error(input, &cwd, e))?;
- check_exit_status(input, cwd, output.status)?;
+ check_exit_status(input, cwd, output.status, Some(&output), true)?;
Ok(output)
}
@@ -86,7 +104,7 @@ pub fn run_command_with_output(
.map_err(|e| command_error(input, &cwd, e))?
.wait()
.map_err(|e| command_error(input, &cwd, e))?;
- check_exit_status(input, cwd, exit_status)?;
+ check_exit_status(input, cwd, exit_status, None, true)?;
Ok(())
}
@@ -100,7 +118,21 @@ pub fn run_command_with_output_and_env(
.map_err(|e| command_error(input, &cwd, e))?
.wait()
.map_err(|e| command_error(input, &cwd, e))?;
- check_exit_status(input, cwd, exit_status)?;
+ check_exit_status(input, cwd, exit_status, None, true)?;
+ Ok(())
+}
+
+pub fn run_command_with_output_and_env_no_err(
+ input: &[&dyn AsRef],
+ cwd: Option<&Path>,
+ env: Option<&HashMap>,
+) -> Result<(), String> {
+ let exit_status = get_command_inner(input, cwd, env)
+ .spawn()
+ .map_err(|e| command_error(input, &cwd, e))?
+ .wait()
+ .map_err(|e| command_error(input, &cwd, e))?;
+ check_exit_status(input, cwd, exit_status, None, false)?;
Ok(())
}
@@ -143,57 +175,90 @@ pub fn get_os_name() -> Result {
}
}
-pub fn get_rustc_host_triple() -> Result {
- let output = run_command(&[&"rustc", &"-vV"], None)?;
- let content = std::str::from_utf8(&output.stdout).unwrap_or("");
-
- for line in content.split('\n').map(|line| line.trim()) {
- if !line.starts_with("host:") {
- continue;
- }
- return Ok(line.split(':').nth(1).unwrap().trim().to_string());
- }
- Err("Cannot find host triple".to_string())
+#[derive(Default, PartialEq)]
+pub struct RustcVersionInfo {
+ pub short: String,
+ pub version: String,
+ pub host: Option,
+ pub commit_hash: Option,
+ pub commit_date: Option,
}
-pub fn get_gcc_path() -> Result {
- let content = match fs::read_to_string("gcc_path") {
- Ok(content) => content,
- Err(_) => {
- return Err(
- "Please put the path to your custom build of libgccjit in the file \
- `gcc_path`, see Readme.md for details"
- .into(),
- )
+pub fn rustc_toolchain_version_info(toolchain: &str) -> Result {
+ rustc_version_info_inner(None, Some(toolchain))
+}
+
+pub fn rustc_version_info(rustc: Option<&str>) -> Result {
+ rustc_version_info_inner(rustc, None)
+}
+
+fn rustc_version_info_inner(
+ rustc: Option<&str>,
+ toolchain: Option<&str>,
+) -> Result {
+ let output = if let Some(toolchain) = toolchain {
+ run_command(&[&rustc.unwrap_or("rustc"), &toolchain, &"-vV"], None)
+ } else {
+ run_command(&[&rustc.unwrap_or("rustc"), &"-vV"], None)
+ }?;
+ let content = std::str::from_utf8(&output.stdout).unwrap_or("");
+
+ let mut info = RustcVersionInfo::default();
+ let mut lines = content.split('\n');
+ info.short = match lines.next() {
+ Some(s) => s.to_string(),
+ None => return Err("failed to retrieve rustc version".to_string()),
+ };
+
+ for line in lines.map(|line| line.trim()) {
+ match line.split_once(':') {
+ Some(("host", data)) => info.host = Some(data.trim().to_string()),
+ Some(("release", data)) => info.version = data.trim().to_string(),
+ Some(("commit-hash", data)) => info.commit_hash = Some(data.trim().to_string()),
+ Some(("commit-date", data)) => info.commit_date = Some(data.trim().to_string()),
+ _ => {}
}
+ }
+ if info.version.is_empty() {
+ Err("failed to retrieve rustc version".to_string())
+ } else {
+ Ok(info)
+ }
+}
+
+pub fn get_toolchain() -> Result {
+ let content = match fs::read_to_string("rust-toolchain") {
+ Ok(content) => content,
+ Err(_) => return Err("No `rust-toolchain` file found".to_string()),
};
match content
.split('\n')
.map(|line| line.trim())
.filter(|line| !line.is_empty())
+ .filter_map(|line| {
+ if !line.starts_with("channel") {
+ return None;
+ }
+ line.split('"').skip(1).next()
+ })
.next()
{
- Some(gcc_path) => {
- let path = Path::new(gcc_path);
- if !path.exists() {
- Err(format!(
- "Path `{}` contained in the `gcc_path` file doesn't exist",
- gcc_path,
- ))
- } else {
- Ok(gcc_path.into())
- }
- }
- None => Err("No path found in `gcc_path` file".into()),
+ Some(toolchain) => Ok(toolchain.to_string()),
+ None => Err("Couldn't find `channel` in `rust-toolchain` file".to_string()),
}
}
pub struct CloneResult {
pub ran_clone: bool,
pub repo_name: String,
+ pub repo_dir: String,
}
-pub fn git_clone(to_clone: &str, dest: Option<&Path>) -> Result {
+pub fn git_clone(
+ to_clone: &str,
+ dest: Option<&Path>,
+ shallow_clone: bool,
+) -> Result {
let repo_name = to_clone.split('/').last().unwrap();
let repo_name = match repo_name.strip_suffix(".git") {
Some(n) => n.to_string(),
@@ -207,13 +272,20 @@ pub fn git_clone(to_clone: &str, dest: Option<&Path>) -> Result> = vec![&"git", &"clone", &to_clone, &dest];
+ if shallow_clone {
+ command.push(&"--depth");
+ command.push(&"1");
+ }
+ run_command_with_output(&command, None)?;
Ok(CloneResult {
ran_clone: true,
repo_name,
+ repo_dir: dest.display().to_string(),
})
}
@@ -238,3 +310,105 @@ where
}
Ok(())
}
+
+pub fn split_args(args: &str) -> Result, String> {
+ let mut out = Vec::new();
+ let mut start = 0;
+ let args = args.trim();
+ let mut iter = args.char_indices().peekable();
+
+ while let Some((pos, c)) = iter.next() {
+ if c == ' ' {
+ out.push(args[start..pos].to_string());
+ let mut found_start = false;
+ while let Some((pos, c)) = iter.peek() {
+ if *c != ' ' {
+ start = *pos;
+ found_start = true;
+ break;
+ } else {
+ iter.next();
+ }
+ }
+ if !found_start {
+ return Ok(out);
+ }
+ } else if c == '"' || c == '\'' {
+ let end = c;
+ let mut found_end = false;
+ while let Some((_, c)) = iter.next() {
+ if c == end {
+ found_end = true;
+ break;
+ } else if c == '\\' {
+ // We skip the escaped character.
+ iter.next();
+ }
+ }
+ if !found_end {
+ return Err(format!(
+ "Didn't find `{}` at the end of `{}`",
+ end,
+ &args[start..]
+ ));
+ }
+ } else if c == '\\' {
+ // We skip the escaped character.
+ iter.next();
+ }
+ }
+ let s = args[start..].trim();
+ if !s.is_empty() {
+ out.push(s.to_string());
+ }
+ Ok(out)
+}
+
+pub fn remove_file + ?Sized>(file_path: &P) -> Result<(), String> {
+ std::fs::remove_file(file_path).map_err(|error| {
+ format!(
+ "Failed to remove `{}`: {:?}",
+ file_path.as_ref().display(),
+ error
+ )
+ })
+}
+
+pub fn create_symlink, Q: AsRef>(original: P, link: Q) -> Result<(), String> {
+ #[cfg(windows)]
+ let symlink = std::os::windows::fs::symlink_file;
+ #[cfg(not(windows))]
+ let symlink = std::os::unix::fs::symlink;
+
+ symlink(&original, &link).map_err(|err| {
+ format!(
+ "failed to create a symlink `{}` to `{}`: {:?}",
+ original.as_ref().display(),
+ link.as_ref().display(),
+ err,
+ )
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_split_args() {
+ // Missing `"` at the end.
+ assert!(split_args("\"tada").is_err());
+ // Missing `'` at the end.
+ assert!(split_args("\'tada").is_err());
+
+ assert_eq!(
+ split_args("a \"b\" c"),
+ Ok(vec!["a".to_string(), "\"b\"".to_string(), "c".to_string()])
+ );
+ // Trailing whitespace characters.
+ assert_eq!(
+ split_args(" a \"b\" c "),
+ Ok(vec!["a".to_string(), "\"b\"".to_string(), "c".to_string()])
+ );
+ }
+}
diff --git a/cargo.sh b/cargo.sh
deleted file mode 100755
index b68a08ee88f8..000000000000
--- a/cargo.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env bash
-
-if [ -z $CHANNEL ]; then
-export CHANNEL='debug'
-fi
-
-pushd $(dirname "$0") >/dev/null
-source config.sh
-
-# read nightly compiler from rust-toolchain file
-TOOLCHAIN=$(cat rust-toolchain | grep channel | sed 's/channel = "\(.*\)"/\1/')
-
-popd >/dev/null
-
-if [[ $(${RUSTC} -V) != $(${RUSTC} +${TOOLCHAIN} -V) ]]; then
- echo "rustc_codegen_gcc is build for $(rustc +${TOOLCHAIN} -V) but the default rustc version is $(rustc -V)."
- echo "Using $(rustc +${TOOLCHAIN} -V)."
-fi
-
-cmd=$1
-shift
-
-RUSTDOCFLAGS="$RUSTFLAGS" cargo +${TOOLCHAIN} $cmd $@
diff --git a/clean_all.sh b/clean_all.sh
deleted file mode 100755
index 782bd3e5058c..000000000000
--- a/clean_all.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env bash
-set -e
-set -v
-
-rm -rf target/ build_sysroot/{sysroot/,sysroot_src/,target/,Cargo.lock} perf.data{,.old}
-rm -rf regex/ simple-raytracer/
diff --git a/config.example.toml b/config.example.toml
new file mode 100644
index 000000000000..dcc414b73100
--- /dev/null
+++ b/config.example.toml
@@ -0,0 +1,2 @@
+gcc-path = "gcc-build/gcc"
+# download-gccjit = true
diff --git a/config.sh b/config.sh
deleted file mode 100644
index 7ae2175d41de..000000000000
--- a/config.sh
+++ /dev/null
@@ -1,85 +0,0 @@
-set -e
-
-export CARGO_INCREMENTAL=0
-
-if [ -f ./gcc_path ]; then
- export GCC_PATH=$(cat gcc_path)
-elif (( $use_system_gcc == 1 )); then
- echo 'Using system GCC'
-else
- echo 'Please put the path to your custom build of libgccjit in the file `gcc_path`, see Readme.md for details'
- exit 1
-fi
-
-if [[ -z "$RUSTC" ]]; then
- export RUSTC="rustc"
-fi
-
-unamestr=`uname`
-if [[ "$unamestr" == 'Linux' ]]; then
- dylib_ext='so'
-elif [[ "$unamestr" == 'Darwin' ]]; then
- dylib_ext='dylib'
-else
- echo "Unsupported os"
- exit 1
-fi
-
-HOST_TRIPLE=$($RUSTC -vV | grep host | cut -d: -f2 | tr -d " ")
-# TODO: remove $OVERWRITE_TARGET_TRIPLE when config.sh is removed.
-TARGET_TRIPLE="${OVERWRITE_TARGET_TRIPLE:-$HOST_TRIPLE}"
-
-linker=''
-RUN_WRAPPER=''
-if [[ "$HOST_TRIPLE" != "$TARGET_TRIPLE" ]]; then
- RUN_WRAPPER=run_in_vm
- if [[ "$TARGET_TRIPLE" == "m68k-unknown-linux-gnu" ]]; then
- linker='-Clinker=m68k-unknown-linux-gnu-gcc'
- elif [[ "$TARGET_TRIPLE" == "aarch64-unknown-linux-gnu" ]]; then
- # We are cross-compiling for aarch64. Use the correct linker and run tests in qemu.
- linker='-Clinker=aarch64-linux-gnu-gcc'
- else
- echo "Unknown non-native platform"
- fi
-fi
-
-# Since we don't support ThinLTO, disable LTO completely when not trying to do LTO.
-# TODO(antoyo): remove when we can handle ThinLTO.
-disable_lto_flags=''
-if [[ ! -v FAT_LTO ]]; then
- disable_lto_flags='-Clto=off'
-fi
-
-if [[ -z "$BUILTIN_BACKEND" ]]; then
- export RUSTFLAGS="$CG_RUSTFLAGS $linker -Csymbol-mangling-version=v0 -Cdebuginfo=2 $disable_lto_flags -Zcodegen-backend=$(pwd)/target/${CHANNEL:-debug}/librustc_codegen_gcc.$dylib_ext --sysroot $(pwd)/build_sysroot/sysroot $TEST_FLAGS"
-else
- export RUSTFLAGS="$CG_RUSTFLAGS $linker -Csymbol-mangling-version=v0 -Cdebuginfo=2 $disable_lto_flags -Zcodegen-backend=gcc $TEST_FLAGS -Cpanic=abort"
-
- if [[ ! -z "$RUSTC_SYSROOT" ]]; then
- export RUSTFLAGS="$RUSTFLAGS --sysroot $RUSTC_SYSROOT"
- fi
-fi
-
-# FIXME(antoyo): remove once the atomic shim is gone
-if [[ unamestr == 'Darwin' ]]; then
- export RUSTFLAGS="$RUSTFLAGS -Clink-arg=-undefined -Clink-arg=dynamic_lookup"
-fi
-
-if [[ -z "$cargo_target_dir" ]]; then
- RUST_CMD="$RUSTC $RUSTFLAGS -L crate=target/out --out-dir target/out"
- cargo_target_dir="target/out"
-else
- RUST_CMD="$RUSTC $RUSTFLAGS -L crate=$cargo_target_dir --out-dir $cargo_target_dir"
-fi
-export RUSTC_LOG=warn # display metadata load errors
-
-export LD_LIBRARY_PATH="$(pwd)/target/out:$(pwd)/build_sysroot/sysroot/lib/rustlib/$TARGET_TRIPLE/lib"
-if [[ ! -z "$:$GCC_PATH" ]]; then
- export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$GCC_PATH"
-fi
-
-export DYLD_LIBRARY_PATH=$LD_LIBRARY_PATH
-# NOTE: To avoid the -fno-inline errors, use /opt/gcc/bin/gcc instead of cc.
-# To do so, add a symlink for cc to /opt/gcc/bin/gcc in our PATH.
-# Another option would be to add the following Rust flag: -Clinker=/opt/gcc/bin/gcc
-export PATH="/opt/gcc/bin:/opt/m68k-unknown-linux-gnu/bin:$PATH"
diff --git a/doc/debugging-gcc-lto.md b/doc/debugging-gcc-lto.md
new file mode 100644
index 000000000000..93b150d76865
--- /dev/null
+++ b/doc/debugging-gcc-lto.md
@@ -0,0 +1,3 @@
+# How to debug GCC LTO
+
+Run do the command with `-v -save-temps` and then extract the `lto1` line from the output and run that under the debugger.
diff --git a/doc/debugging-libgccjit.md b/doc/debugging-libgccjit.md
new file mode 100644
index 000000000000..be0ec83f7cdc
--- /dev/null
+++ b/doc/debugging-libgccjit.md
@@ -0,0 +1,74 @@
+# Debugging libgccjit
+
+Sometimes, libgccjit will crash and output an error like this:
+
+```
+during RTL pass: expand
+libgccjit.so: error: in expmed_mode_index, at expmed.h:249
+0x7f0da2e61a35 expmed_mode_index
+ ../../../gcc/gcc/expmed.h:249
+0x7f0da2e61aa4 expmed_op_cost_ptr
+ ../../../gcc/gcc/expmed.h:271
+0x7f0da2e620dc sdiv_cost_ptr
+ ../../../gcc/gcc/expmed.h:540
+0x7f0da2e62129 sdiv_cost
+ ../../../gcc/gcc/expmed.h:558
+0x7f0da2e73c12 expand_divmod(int, tree_code, machine_mode, rtx_def*, rtx_def*, rtx_def*, int)
+ ../../../gcc/gcc/expmed.c:4335
+0x7f0da2ea1423 expand_expr_real_2(separate_ops*, rtx_def*, machine_mode, expand_modifier)
+ ../../../gcc/gcc/expr.c:9240
+0x7f0da2cd1a1e expand_gimple_stmt_1
+ ../../../gcc/gcc/cfgexpand.c:3796
+0x7f0da2cd1c30 expand_gimple_stmt
+ ../../../gcc/gcc/cfgexpand.c:3857
+0x7f0da2cd90a9 expand_gimple_basic_block
+ ../../../gcc/gcc/cfgexpand.c:5898
+0x7f0da2cdade8 execute
+ ../../../gcc/gcc/cfgexpand.c:6582
+```
+
+To see the code which causes this error, call the following function:
+
+```c
+gcc_jit_context_dump_to_file(ctxt, "/tmp/output.c", 1 /* update_locations */)
+```
+
+This will create a C-like file and add the locations into the IR pointing to this C file.
+Then, rerun the program and it will output the location in the second line:
+
+```
+libgccjit.so: /tmp/something.c:61322:0: error: in expmed_mode_index, at expmed.h:249
+```
+
+Or add a breakpoint to `add_error` in gdb and print the line number using:
+
+```
+p loc->m_line
+p loc->m_filename->m_buffer
+```
+
+To print a debug representation of a tree:
+
+```c
+debug_tree(expr);
+```
+
+(defined in print-tree.h)
+
+To print a debug representation of a gimple struct:
+
+```c
+debug_gimple_stmt(gimple_struct)
+```
+
+To get the `rustc` command to run in `gdb`, add the `--verbose` flag to `cargo build`.
+
+To have the correct file paths in `gdb` instead of `/usr/src/debug/gcc/libstdc++-v3/libsupc++/eh_personality.cc`:
+
+Maybe by calling the following at the beginning of gdb:
+
+```
+set substitute-path /usr/src/debug/gcc /path/to/gcc-repo/gcc
+```
+
+TODO(antoyo): but that's not what I remember I was doing.
diff --git a/doc/errors.md b/doc/errors.md
new file mode 100644
index 000000000000..5727b0ff7c86
--- /dev/null
+++ b/doc/errors.md
@@ -0,0 +1,27 @@
+# Common errors
+
+This file lists errors that were encountered and how to fix them.
+
+### `failed to build archive` error
+
+When you get this error:
+
+```
+error: failed to build archive: failed to open object file: No such file or directory (os error 2)
+```
+
+That can be caused by the fact that you try to compile with `lto = "fat"`, but you didn't compile the sysroot with LTO.
+(Not sure if that's the reason since I cannot reproduce anymore. Maybe it happened when forgetting setting `FAT_LTO`.)
+
+### ld: cannot find crtbegin.o
+
+When compiling an executable with libgccijt, if setting the `*LIBRARY_PATH` variables to the install directory, you will get the following errors:
+
+```
+ld: cannot find crtbegin.o: No such file or directory
+ld: cannot find -lgcc: No such file or directory
+ld: cannot find -lgcc: No such file or directory
+libgccjit.so: error: error invoking gcc driver
+```
+
+To fix this, set the variables to `gcc-build/build/gcc`.
diff --git a/doc/subtree.md b/doc/subtree.md
new file mode 100644
index 000000000000..5d7af2a066bd
--- /dev/null
+++ b/doc/subtree.md
@@ -0,0 +1,52 @@
+# git subtree sync
+
+`rustc_codegen_gcc` is a subtree of the rust compiler. As such, it needs to be
+sync from time to time to ensure changes that happened on their side are also
+included on our side.
+
+### How to install a forked git-subtree
+
+Using git-subtree with `rustc` requires a patched git to make it work.
+The PR that is needed is [here](https://github.com/gitgitgadget/git/pull/493).
+Use the following instructions to install it:
+
+```bash
+git clone git@github.com:tqc/git.git
+cd git
+git checkout tqc/subtree
+make
+make install
+cd contrib/subtree
+make
+cp git-subtree ~/bin
+```
+
+### Syncing with rust compiler
+
+Do a sync with this command:
+
+```bash
+PATH="$HOME/bin:$PATH" ~/bin/git-subtree push -P compiler/rustc_codegen_gcc/ ../rustc_codegen_gcc/ sync_branch_name
+cd ../rustc_codegen_gcc
+git checkout master
+git pull
+git checkout sync_branch_name
+git merge master
+```
+
+To send the changes to the rust repo:
+
+```bash
+cd ../rust
+git pull origin master
+git checkout -b subtree-update_cg_gcc_YYYY-MM-DD
+PATH="$HOME/bin:$PATH" ~/bin/git-subtree pull --prefix=compiler/rustc_codegen_gcc/ https://github.com/rust-lang/rustc_codegen_gcc.git master
+git push
+
+# Immediately merge the merge commit into cg_gcc to prevent merge conflicts when syncing from rust-lang/rust later.
+PATH="$HOME/bin:$PATH" ~/bin/git-subtree push -P compiler/rustc_codegen_gcc/ ../rustc_codegen_gcc/ sync_branch_name
+```
+
+TODO: write a script that does the above.
+
+https://rust-lang.zulipchat.com/#narrow/stream/301329-t-devtools/topic/subtree.20madness/near/258877725
diff --git a/doc/tips.md b/doc/tips.md
new file mode 100644
index 000000000000..1379f5130be0
--- /dev/null
+++ b/doc/tips.md
@@ -0,0 +1,72 @@
+# Tips
+
+The following shows how to do different random small things we encountered and thought could
+be useful.
+
+### How to send arguments to the GCC linker
+
+```
+CG_RUSTFLAGS="-Clink-args=-save-temps -v" ../y.sh cargo build
+```
+
+### How to see the personality functions in the asm dump
+
+```
+CG_RUSTFLAGS="-Clink-arg=-save-temps -v -Clink-arg=-dA" ../y.sh cargo build
+```
+
+### How to see the LLVM IR for a sysroot crate
+
+```
+cargo build -v --target x86_64-unknown-linux-gnu -Zbuild-std
+# Take the command from the output and add --emit=llvm-ir
+```
+
+### To prevent the linker from unmangling symbols
+
+Run with:
+
+```
+COLLECT_NO_DEMANGLE=1
+```
+
+### How to use a custom-build rustc
+
+ * Build the stage2 compiler (`rustup toolchain link debug-current build/x86_64-unknown-linux-gnu/stage2`).
+ * Clean and rebuild the codegen with `debug-current` in the file `rust-toolchain`.
+
+### How to use [mem-trace](https://github.com/antoyo/mem-trace)
+
+`rustc` needs to be built without `jemalloc` so that `mem-trace` can overload `malloc` since `jemalloc` is linked statically, so a `LD_PRELOAD`-ed library won't a chance to intercept the calls to `malloc`.
+
+### How to generate GIMPLE
+
+If you need to check what gccjit is generating (GIMPLE), then take a look at how to
+generate it in [gimple.md](./doc/gimple.md).
+
+### How to build a cross-compiling libgccjit
+
+#### Building libgccjit
+
+ * Follow the instructions on [this repo](https://github.com/cross-cg-gcc-tools/cross-gcc).
+
+#### Configuring rustc_codegen_gcc
+
+ * Run `./y.sh prepare --cross` so that the sysroot is patched for the cross-compiling case.
+ * Set the path to the cross-compiling libgccjit in `gcc-path` (in `config.toml`).
+ * Make sure you have the linker for your target (for instance `m68k-unknown-linux-gnu-gcc`) in your `$PATH`. Currently, the linker name is hardcoded as being `$TARGET-gcc`. Specify the target when building the sysroot: `./y.sh build --target-triple m68k-unknown-linux-gnu`.
+ * Build your project by specifying the target: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../y.sh cargo build --target m68k-unknown-linux-gnu`.
+
+If the target is not yet supported by the Rust compiler, create a [target specification file](https://docs.rust-embedded.org/embedonomicon/custom-target.html) (note that the `arch` specified in this file must be supported by the rust compiler).
+Then, you can use it the following way:
+
+ * Add the target specification file using `--target` as an **absolute** path to build the sysroot: `./y.sh build --target-triple m68k-unknown-linux-gnu --target $(pwd)/m68k-unknown-linux-gnu.json`
+ * Build your project by specifying the target specification file: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../y.sh cargo build --target path/to/m68k-unknown-linux-gnu.json`.
+
+If you get the following error:
+
+```
+/usr/bin/ld: unrecognised emulation mode: m68kelf
+```
+
+Make sure you set `gcc-path` (in `config.toml`) to the install directory.
diff --git a/libgccjit.version b/libgccjit.version
new file mode 100644
index 000000000000..12dafeb9edee
--- /dev/null
+++ b/libgccjit.version
@@ -0,0 +1 @@
+cdd897840
diff --git a/crate_patches/0002-rand-Disable-failing-test.patch b/patches/crate_patches/0002-rand-Disable-failing-test.patch
similarity index 100%
rename from crate_patches/0002-rand-Disable-failing-test.patch
rename to patches/crate_patches/0002-rand-Disable-failing-test.patch
diff --git a/cross_patches/0001-Disable-libstd-and-libtest-dylib.patch b/patches/cross_patches/0001-Disable-libstd-and-libtest-dylib.patch
similarity index 100%
rename from cross_patches/0001-Disable-libstd-and-libtest-dylib.patch
rename to patches/cross_patches/0001-Disable-libstd-and-libtest-dylib.patch
diff --git a/rustup.sh b/rustup.sh
deleted file mode 100755
index a4f938e4b5b7..000000000000
--- a/rustup.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env bash
-
-set -e
-
-case $1 in
- "prepare")
- TOOLCHAIN=$(date +%Y-%m-%d)
-
- echo "=> Installing new nightly"
- rustup toolchain install --profile minimal nightly-${TOOLCHAIN} # Sanity check to see if the nightly exists
- echo nightly-${TOOLCHAIN} > rust-toolchain
-
- echo "=> Uninstalling all old nightlies"
- for nightly in $(rustup toolchain list | grep nightly | grep -v $TOOLCHAIN | grep -v nightly-x86_64); do
- rustup toolchain uninstall $nightly
- done
-
- ./clean_all.sh
- ./y.sh prepare
- ;;
- "commit")
- git add rust-toolchain
- git commit -m "Rustup to $(rustc -V)"
- ;;
- *)
- echo "Unknown command '$1'"
- echo "Usage: ./rustup.sh prepare|commit"
- ;;
-esac
diff --git a/src/back/write.rs b/src/back/write.rs
index 2f8a54f529cc..eea62adca07e 100644
--- a/src/back/write.rs
+++ b/src/back/write.rs
@@ -70,7 +70,8 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext, dcx: &Dia
}
if config.emit_ir {
- unimplemented!();
+ let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name);
+ std::fs::write(out, "").expect("write file");
}
if config.emit_asm {
diff --git a/src/base.rs b/src/base.rs
index b0788718da4d..773e234150d1 100644
--- a/src/base.rs
+++ b/src/base.rs
@@ -164,7 +164,7 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, target_info: Lock
context.add_driver_option("-v");
}
- // NOTE: The codegen generates unrechable blocks.
+ // NOTE: The codegen generates unreachable blocks.
context.set_allow_unreachable_blocks(true);
{
diff --git a/src/builder.rs b/src/builder.rs
index 42e61b3ccb5a..56d9fd30bf67 100644
--- a/src/builder.rs
+++ b/src/builder.rs
@@ -606,12 +606,29 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
// ../../../gcc/gcc/cfgexpand.cc:6069
// 0x7f0101bf9194 execute
// ../../../gcc/gcc/cfgexpand.cc:6795
- if a.get_type().is_compatible_with(self.cx.float_type) {
+ let a_type = a.get_type();
+ let a_type_unqualified = a_type.unqualified();
+ if a_type.is_compatible_with(self.cx.float_type) {
let fmodf = self.context.get_builtin_function("fmodf");
// FIXME(antoyo): this seems to produce the wrong result.
return self.context.new_call(None, fmodf, &[a, b]);
}
- assert_eq!(a.get_type().unqualified(), self.cx.double_type);
+ else if let Some(vector_type) = a_type_unqualified.dyncast_vector() {
+ assert_eq!(a_type_unqualified, b.get_type().unqualified());
+
+ let num_units = vector_type.get_num_units();
+ let new_elements: Vec<_> = (0..num_units)
+ .map(|i| {
+ let index = self.context.new_rvalue_from_long(self.cx.type_u32(), i as _);
+ let x = self.extract_element(a, index).to_rvalue();
+ let y = self.extract_element(b, index).to_rvalue();
+ self.frem(x, y)
+ })
+ .collect();
+
+ return self.context.new_rvalue_from_vector(None, a_type, &new_elements)
+ }
+ assert_eq!(a_type_unqualified, self.cx.double_type);
let fmod = self.context.get_builtin_function("fmod");
return self.context.new_call(None, fmod, &[a, b]);
diff --git a/src/consts.rs b/src/consts.rs
index 70d8db02247c..054741e16423 100644
--- a/src/consts.rs
+++ b/src/consts.rs
@@ -235,7 +235,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
if !self.tcx.is_reachable_non_generic(def_id) {
#[cfg(feature = "master")]
- global.add_attribute(VarAttribute::Visibility(Visibility::Hidden));
+ global.add_string_attribute(VarAttribute::Visibility(Visibility::Hidden));
}
global
diff --git a/src/declare.rs b/src/declare.rs
index 247454fa58e1..72cba9fbba95 100644
--- a/src/declare.rs
+++ b/src/declare.rs
@@ -125,7 +125,9 @@ fn declare_raw_fn<'gcc>(cx: &CodegenCx<'gcc, '_>, name: &str, _callconv: () /*ll
let params: Vec<_> = param_types.into_iter().enumerate()
.map(|(index, param)| cx.context.new_parameter(None, *param, &format!("param{}", index))) // TODO(antoyo): set name.
.collect();
- let func = cx.context.new_function(None, cx.linkage.get(), return_type, ¶ms, mangle_name(name), variadic);
+ #[cfg(not(feature="master"))]
+ let name = mangle_name(name);
+ let func = cx.context.new_function(None, cx.linkage.get(), return_type, ¶ms, &name, variadic);
cx.functions.borrow_mut().insert(name.to_string(), func);
#[cfg(feature="master")]
@@ -179,8 +181,10 @@ fn declare_raw_fn<'gcc>(cx: &CodegenCx<'gcc, '_>, name: &str, _callconv: () /*ll
}
// FIXME(antoyo): this is a hack because libgccjit currently only supports alpha, num and _.
-// Unsupported characters: `$` and `.`.
-pub fn mangle_name(name: &str) -> String {
+// Unsupported characters: `$`, `.` and `*`.
+// FIXME(antoyo): `*` might not be expected: https://github.com/rust-lang/rust/issues/116979#issuecomment-1840926865
+#[cfg(not(feature="master"))]
+fn mangle_name(name: &str) -> String {
name.replace(|char: char| {
if !char.is_alphanumeric() && char != '_' {
debug_assert!("$.*".contains(char), "Unsupported char in function name {}: {}", name, char);
diff --git a/src/int.rs b/src/int.rs
index 9b9b3ea4f870..b69e073c4d94 100644
--- a/src/int.rs
+++ b/src/int.rs
@@ -48,7 +48,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
pub fn gcc_neg(&self, a: RValue<'gcc>) -> RValue<'gcc> {
let a_type = a.get_type();
- if self.is_native_int_type(a_type) {
+ if self.is_native_int_type(a_type) || a_type.is_vector() {
self.cx.context.new_unary_op(None, UnaryOp::Minus, a.get_type(), a)
}
else {
diff --git a/src/intrinsic/archs.rs b/src/intrinsic/archs.rs
index 15d67385c3e9..c4ae1751fa05 100644
--- a/src/intrinsic/archs.rs
+++ b/src/intrinsic/archs.rs
@@ -151,8 +151,10 @@ match name {
"llvm.amdgcn.msad.u8" => "__builtin_amdgcn_msad_u8",
"llvm.amdgcn.perm" => "__builtin_amdgcn_perm",
"llvm.amdgcn.permlane16" => "__builtin_amdgcn_permlane16",
+ "llvm.amdgcn.permlane16.var" => "__builtin_amdgcn_permlane16_var",
"llvm.amdgcn.permlane64" => "__builtin_amdgcn_permlane64",
"llvm.amdgcn.permlanex16" => "__builtin_amdgcn_permlanex16",
+ "llvm.amdgcn.permlanex16.var" => "__builtin_amdgcn_permlanex16_var",
"llvm.amdgcn.qsad.pk.u16.u8" => "__builtin_amdgcn_qsad_pk_u16_u8",
"llvm.amdgcn.queue.ptr" => "__builtin_amdgcn_queue_ptr",
"llvm.amdgcn.rcp.legacy" => "__builtin_amdgcn_rcp_legacy",
@@ -160,11 +162,20 @@ match name {
"llvm.amdgcn.readlane" => "__builtin_amdgcn_readlane",
"llvm.amdgcn.rsq.legacy" => "__builtin_amdgcn_rsq_legacy",
"llvm.amdgcn.s.barrier" => "__builtin_amdgcn_s_barrier",
+ "llvm.amdgcn.s.barrier.init" => "__builtin_amdgcn_s_barrier_init",
+ "llvm.amdgcn.s.barrier.join" => "__builtin_amdgcn_s_barrier_join",
+ "llvm.amdgcn.s.barrier.leave" => "__builtin_amdgcn_s_barrier_leave",
+ "llvm.amdgcn.s.barrier.signal" => "__builtin_amdgcn_s_barrier_signal",
+ "llvm.amdgcn.s.barrier.signal.isfirst" => "__builtin_amdgcn_s_barrier_signal_isfirst",
+ "llvm.amdgcn.s.barrier.signal.isfirst.var" => "__builtin_amdgcn_s_barrier_signal_isfirst_var",
+ "llvm.amdgcn.s.barrier.signal.var" => "__builtin_amdgcn_s_barrier_signal_var",
+ "llvm.amdgcn.s.barrier.wait" => "__builtin_amdgcn_s_barrier_wait",
"llvm.amdgcn.s.dcache.inv" => "__builtin_amdgcn_s_dcache_inv",
"llvm.amdgcn.s.dcache.inv.vol" => "__builtin_amdgcn_s_dcache_inv_vol",
"llvm.amdgcn.s.dcache.wb" => "__builtin_amdgcn_s_dcache_wb",
"llvm.amdgcn.s.dcache.wb.vol" => "__builtin_amdgcn_s_dcache_wb_vol",
"llvm.amdgcn.s.decperflevel" => "__builtin_amdgcn_s_decperflevel",
+ "llvm.amdgcn.s.get.barrier.state" => "__builtin_amdgcn_s_get_barrier_state",
"llvm.amdgcn.s.get.waveid.in.workgroup" => "__builtin_amdgcn_s_get_waveid_in_workgroup",
"llvm.amdgcn.s.getpc" => "__builtin_amdgcn_s_getpc",
"llvm.amdgcn.s.getreg" => "__builtin_amdgcn_s_getreg",
@@ -176,8 +187,10 @@ match name {
"llvm.amdgcn.s.setprio" => "__builtin_amdgcn_s_setprio",
"llvm.amdgcn.s.setreg" => "__builtin_amdgcn_s_setreg",
"llvm.amdgcn.s.sleep" => "__builtin_amdgcn_s_sleep",
+ "llvm.amdgcn.s.sleep.var" => "__builtin_amdgcn_s_sleep_var",
"llvm.amdgcn.s.wait.event.export.ready" => "__builtin_amdgcn_s_wait_event_export_ready",
"llvm.amdgcn.s.waitcnt" => "__builtin_amdgcn_s_waitcnt",
+ "llvm.amdgcn.s.wakeup.barrier" => "__builtin_amdgcn_s_wakeup_barrier",
"llvm.amdgcn.sad.hi.u8" => "__builtin_amdgcn_sad_hi_u8",
"llvm.amdgcn.sad.u16" => "__builtin_amdgcn_sad_u16",
"llvm.amdgcn.sad.u8" => "__builtin_amdgcn_sad_u8",
@@ -314,6 +327,8 @@ match name {
// bpf
"llvm.bpf.btf.type.id" => "__builtin_bpf_btf_type_id",
"llvm.bpf.compare" => "__builtin_bpf_compare",
+ "llvm.bpf.getelementptr.and.load" => "__builtin_bpf_getelementptr_and_load",
+ "llvm.bpf.getelementptr.and.store" => "__builtin_bpf_getelementptr_and_store",
"llvm.bpf.load.byte" => "__builtin_bpf_load_byte",
"llvm.bpf.load.half" => "__builtin_bpf_load_half",
"llvm.bpf.load.word" => "__builtin_bpf_load_word",
@@ -5776,14 +5791,6 @@ match name {
"llvm.s390.verimf" => "__builtin_s390_verimf",
"llvm.s390.verimg" => "__builtin_s390_verimg",
"llvm.s390.verimh" => "__builtin_s390_verimh",
- "llvm.s390.verllb" => "__builtin_s390_verllb",
- "llvm.s390.verllf" => "__builtin_s390_verllf",
- "llvm.s390.verllg" => "__builtin_s390_verllg",
- "llvm.s390.verllh" => "__builtin_s390_verllh",
- "llvm.s390.verllvb" => "__builtin_s390_verllvb",
- "llvm.s390.verllvf" => "__builtin_s390_verllvf",
- "llvm.s390.verllvg" => "__builtin_s390_verllvg",
- "llvm.s390.verllvh" => "__builtin_s390_verllvh",
"llvm.s390.vfaeb" => "__builtin_s390_vfaeb",
"llvm.s390.vfaef" => "__builtin_s390_vfaef",
"llvm.s390.vfaeh" => "__builtin_s390_vfaeh",
@@ -5815,7 +5822,7 @@ match name {
"llvm.s390.vistrh" => "__builtin_s390_vistrh",
"llvm.s390.vlbb" => "__builtin_s390_vlbb",
"llvm.s390.vll" => "__builtin_s390_vll",
- "llvm.s390.vlrl" => "__builtin_s390_vlrl",
+ "llvm.s390.vlrl" => "__builtin_s390_vlrlr",
"llvm.s390.vmaeb" => "__builtin_s390_vmaeb",
"llvm.s390.vmaef" => "__builtin_s390_vmaef",
"llvm.s390.vmaeh" => "__builtin_s390_vmaeh",
@@ -5885,7 +5892,7 @@ match name {
"llvm.s390.vstrczb" => "__builtin_s390_vstrczb",
"llvm.s390.vstrczf" => "__builtin_s390_vstrczf",
"llvm.s390.vstrczh" => "__builtin_s390_vstrczh",
- "llvm.s390.vstrl" => "__builtin_s390_vstrl",
+ "llvm.s390.vstrl" => "__builtin_s390_vstrlr",
"llvm.s390.vsumb" => "__builtin_s390_vsumb",
"llvm.s390.vsumgf" => "__builtin_s390_vsumgf",
"llvm.s390.vsumgh" => "__builtin_s390_vsumgh",
diff --git a/src/intrinsic/llvm.rs b/src/intrinsic/llvm.rs
index 35eb4a11005b..0d2ce20c654c 100644
--- a/src/intrinsic/llvm.rs
+++ b/src/intrinsic/llvm.rs
@@ -262,7 +262,7 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc
},
// NOTE: the LLVM intrinsic receives 3 floats, but the GCC builtin requires 3 vectors.
// FIXME: the intrinsics like _mm_mask_fmadd_sd should probably directly call the GCC
- // instrinsic to avoid this.
+ // intrinsic to avoid this.
"__builtin_ia32_vfmaddss3_round" => {
let new_args = args.to_vec();
let arg1_type = gcc_func.get_param_type(0);
diff --git a/src/intrinsic/simd.rs b/src/intrinsic/simd.rs
index 9fa978cd2ef7..cecc982bb1f8 100644
--- a/src/intrinsic/simd.rs
+++ b/src/intrinsic/simd.rs
@@ -1,3 +1,5 @@
+use std::iter::FromIterator;
+
use gccjit::ToRValue;
use gccjit::{BinaryOp, RValue, Type};
#[cfg(feature = "master")]
@@ -21,6 +23,8 @@ use rustc_target::abi::Align;
use crate::builder::Builder;
#[cfg(feature = "master")]
use crate::context::CodegenCx;
+#[cfg(not(feature = "master"))]
+use crate::common::SignType;
pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
bx: &mut Builder<'a, 'gcc, 'tcx>,
@@ -156,6 +160,195 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
return Ok(compare_simd_types(bx, arg1, arg2, in_elem, llret_ty, cmp_op));
}
+ let simd_bswap = |bx: &mut Builder<'a, 'gcc, 'tcx>, vector: RValue<'gcc>| -> RValue<'gcc> {
+ let v_type = vector.get_type();
+ let vector_type = v_type.unqualified().dyncast_vector().expect("vector type");
+ let elem_type = vector_type.get_element_type();
+ let elem_size_bytes = elem_type.get_size();
+ if elem_size_bytes == 1 {
+ return vector;
+ }
+
+ let type_size_bytes = elem_size_bytes as u64 * in_len;
+ let shuffle_indices = Vec::from_iter(0..type_size_bytes);
+ let byte_vector_type = bx.context.new_vector_type(bx.type_u8(), type_size_bytes);
+ let byte_vector = bx.context.new_bitcast(None, args[0].immediate(), byte_vector_type);
+
+ #[cfg(not(feature = "master"))]
+ let shuffled = {
+ let new_elements: Vec<_> = shuffle_indices.chunks_exact(elem_size_bytes as _)
+ .flat_map(|x| x.iter().rev())
+ .map(|&i| {
+ let index = bx.context.new_rvalue_from_long(bx.u64_type, i as _);
+ bx.extract_element(byte_vector, index)
+ })
+ .collect();
+
+ bx.context.new_rvalue_from_vector(None, byte_vector_type, &new_elements)
+ };
+ #[cfg(feature = "master")]
+ let shuffled = {
+ let indices: Vec<_> = shuffle_indices.chunks_exact(elem_size_bytes as _)
+ .flat_map(|x| x.iter().rev())
+ .map(|&i| bx.context.new_rvalue_from_int(bx.u8_type, i as _))
+ .collect();
+
+ let mask = bx.context.new_rvalue_from_vector(None, byte_vector_type, &indices);
+ bx.context.new_rvalue_vector_perm(None, byte_vector, byte_vector, mask)
+ };
+ bx.context.new_bitcast(None, shuffled, v_type)
+ };
+
+ if name == sym::simd_bswap || name == sym::simd_bitreverse {
+ require!(
+ bx.type_kind(bx.element_type(llret_ty)) == TypeKind::Integer,
+ InvalidMonomorphization::UnsupportedOperation {
+ span,
+ name,
+ in_ty,
+ in_elem,
+ }
+ );
+ }
+
+ if name == sym::simd_bswap {
+ return Ok(simd_bswap(bx, args[0].immediate()));
+ }
+
+ // We use a different algorithm from non-vector bitreverse to take advantage of most
+ // processors' vector shuffle units. It works like this:
+ // 1. Generate pre-reversed low and high nibbles as a vector.
+ // 2. Byte-swap the input.
+ // 3. Mask off the low and high nibbles of each byte in the byte-swapped input.
+ // 4. Shuffle the pre-reversed low and high-nibbles using the masked nibbles as a shuffle mask.
+ // 5. Combine the results of the shuffle back together and cast back to the original type.
+ #[cfg(feature = "master")]
+ if name == sym::simd_bitreverse {
+ let vector = args[0].immediate();
+ let v_type = vector.get_type();
+ let vector_type = v_type.unqualified().dyncast_vector().expect("vector type");
+ let elem_type = vector_type.get_element_type();
+ let elem_size_bytes = elem_type.get_size();
+
+ let type_size_bytes = elem_size_bytes as u64 * in_len;
+ // We need to ensure at least 16 entries in our vector type, since the pre-reversed vectors
+ // we generate below have 16 entries in them. `new_rvalue_vector_perm` requires the mask
+ // vector to be of the same length as the source vectors.
+ let byte_vector_type_size = type_size_bytes.max(16);
+
+ let byte_vector_type = bx.context.new_vector_type(bx.u8_type, type_size_bytes);
+ let long_byte_vector_type = bx.context.new_vector_type(bx.u8_type, byte_vector_type_size);
+
+ // Step 1: Generate pre-reversed low and high nibbles as a vector.
+ let zero_byte = bx.context.new_rvalue_zero(bx.u8_type);
+ let hi_nibble_elements: Vec<_> = (0u8..16)
+ .map(|x| bx.context.new_rvalue_from_int(bx.u8_type, x.reverse_bits() as _))
+ .chain((16..byte_vector_type_size).map(|_| zero_byte))
+ .collect();
+ let hi_nibble = bx.context.new_rvalue_from_vector(None, long_byte_vector_type, &hi_nibble_elements);
+
+ let lo_nibble_elements: Vec<_> = (0u8..16)
+ .map(|x| bx.context.new_rvalue_from_int(bx.u8_type, (x.reverse_bits() >> 4) as _))
+ .chain((16..byte_vector_type_size).map(|_| zero_byte))
+ .collect();
+ let lo_nibble = bx.context.new_rvalue_from_vector(None, long_byte_vector_type, &lo_nibble_elements);
+
+ let mask = bx.context.new_rvalue_from_vector(
+ None,
+ long_byte_vector_type,
+ &vec![bx.context.new_rvalue_from_int(bx.u8_type, 0x0f); byte_vector_type_size as _]);
+
+ let four_vec = bx.context.new_rvalue_from_vector(
+ None,
+ long_byte_vector_type,
+ &vec![bx.context.new_rvalue_from_int(bx.u8_type, 4); byte_vector_type_size as _]);
+
+ // Step 2: Byte-swap the input.
+ let swapped = simd_bswap(bx, args[0].immediate());
+ let byte_vector = bx.context.new_bitcast(None, swapped, byte_vector_type);
+
+ // We're going to need to extend the vector with zeros to make sure that the types are the
+ // same, since that's what new_rvalue_vector_perm expects.
+ let byte_vector = if byte_vector_type_size > type_size_bytes {
+ let mut byte_vector_elements = Vec::with_capacity(byte_vector_type_size as _);
+ for i in 0..type_size_bytes {
+ let idx = bx.context.new_rvalue_from_int(bx.u32_type, i as _);
+ let val = bx.extract_element(byte_vector, idx);
+ byte_vector_elements.push(val);
+ }
+ for _ in type_size_bytes..byte_vector_type_size {
+ byte_vector_elements.push(zero_byte);
+ }
+ bx.context.new_rvalue_from_vector(None, long_byte_vector_type, &byte_vector_elements)
+ } else {
+ bx.context.new_bitcast(None, byte_vector, long_byte_vector_type)
+ };
+
+ // Step 3: Mask off the low and high nibbles of each byte in the byte-swapped input.
+ let masked_hi = (byte_vector >> four_vec) & mask;
+ let masked_lo = byte_vector & mask;
+
+ // Step 4: Shuffle the pre-reversed low and high-nibbles using the masked nibbles as a shuffle mask.
+ let hi = bx.context.new_rvalue_vector_perm(None, hi_nibble, hi_nibble, masked_lo);
+ let lo = bx.context.new_rvalue_vector_perm(None, lo_nibble, lo_nibble, masked_hi);
+
+ // Step 5: Combine the results of the shuffle back together and cast back to the original type.
+ let result = hi | lo;
+ let cast_ty = bx.context.new_vector_type(elem_type, byte_vector_type_size / (elem_size_bytes as u64));
+
+ // we might need to truncate if sizeof(v_type) < sizeof(cast_type)
+ if type_size_bytes < byte_vector_type_size {
+ let cast_result = bx.context.new_bitcast(None, result, cast_ty);
+ let elems: Vec<_> = (0..in_len)
+ .map(|i| {
+ let idx = bx.context.new_rvalue_from_int(bx.u32_type, i as _);
+ bx.extract_element(cast_result, idx)
+ })
+ .collect();
+ return Ok(bx.context.new_rvalue_from_vector(None, v_type, &elems))
+ } else {
+ // avoid the unnecessary truncation as an optimization.
+ return Ok(bx.context.new_bitcast(None, result, v_type));
+ }
+ }
+ // since gcc doesn't have vector shuffle methods available in non-patched builds, fallback to
+ // component-wise bitreverses if they're not available.
+ #[cfg(not(feature = "master"))]
+ if name == sym::simd_bitreverse {
+ let vector = args[0].immediate();
+ let vector_ty = vector.get_type();
+ let vector_type = vector_ty.unqualified().dyncast_vector().expect("vector type");
+ let num_elements = vector_type.get_num_units();
+
+ let elem_type = vector_type.get_element_type();
+ let elem_size_bytes = elem_type.get_size();
+ let num_type = elem_type.to_unsigned(bx.cx);
+ let new_elements: Vec<_> = (0..num_elements)
+ .map(|idx| {
+ let index = bx.context.new_rvalue_from_long(num_type, idx as _);
+ let extracted_value = bx.extract_element(vector, index).to_rvalue();
+ bx.bit_reverse(elem_size_bytes as u64 * 8, extracted_value)
+ })
+ .collect();
+ return Ok(bx.context.new_rvalue_from_vector(None, vector_ty, &new_elements));
+ }
+
+ if name == sym::simd_ctlz || name == sym::simd_cttz {
+ let vector = args[0].immediate();
+ let elements: Vec<_> = (0..in_len)
+ .map(|i| {
+ let index = bx.context.new_rvalue_from_long(bx.i32_type, i as i64);
+ let value = bx.extract_element(vector, index).to_rvalue();
+ if name == sym::simd_ctlz {
+ bx.count_leading_zeroes(value.get_type().get_size() as u64 * 8, value)
+ } else {
+ bx.count_trailing_zeroes(value.get_type().get_size() as u64 * 8, value)
+ }
+ })
+ .collect();
+ return Ok(bx.context.new_rvalue_from_vector(None, vector.get_type(), &elements));
+ }
+
if name == sym::simd_shuffle {
// Make sure this is actually an array, since typeck only checks the length-suffixed
// version of this intrinsic.
@@ -504,20 +697,14 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
default: RValue<'gcc>,
pointers: RValue<'gcc>,
mask: RValue<'gcc>,
- pointer_count: usize,
bx: &mut Builder<'a, 'gcc, 'tcx>,
in_len: u64,
- underlying_ty: Ty<'tcx>,
invert: bool,
) -> RValue<'gcc> {
- let vector_type = if pointer_count > 1 {
- bx.context.new_vector_type(bx.usize_type, in_len)
- } else {
- vector_ty(bx, underlying_ty, in_len)
- };
- let elem_type = vector_type.dyncast_vector().expect("vector type").get_element_type();
+ let vector_type = default.get_type();
+ let elem_type = vector_type.unqualified().dyncast_vector().expect("vector type").get_element_type();
- let mut values = vec![];
+ let mut values = Vec::with_capacity(in_len as usize);
for i in 0..in_len {
let index = bx.context.new_rvalue_from_long(bx.i32_type, i as i64);
let int = bx.context.new_vector_access(None, pointers, index).to_rvalue();
@@ -530,13 +717,14 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
let vector = bx.context.new_rvalue_from_vector(None, vector_type, &values);
- let mut mask_types = vec![];
- let mut mask_values = vec![];
+ let mut mask_types = Vec::with_capacity(in_len as usize);
+ let mut mask_values = Vec::with_capacity(in_len as usize);
for i in 0..in_len {
let index = bx.context.new_rvalue_from_long(bx.i32_type, i as i64);
mask_types.push(bx.context.new_field(None, bx.i32_type, "m"));
let mask_value = bx.context.new_vector_access(None, mask, index).to_rvalue();
- let masked = bx.context.new_rvalue_from_int(bx.i32_type, in_len as i32) & mask_value;
+ let mask_value_cast = bx.context.new_cast(None, mask_value, bx.i32_type);
+ let masked = bx.context.new_rvalue_from_int(bx.i32_type, in_len as i32) & mask_value_cast;
let value = index + masked;
mask_values.push(value);
}
@@ -665,10 +853,8 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
args[0].immediate(),
args[1].immediate(),
args[2].immediate(),
- pointer_count,
bx,
in_len,
- underlying_ty,
false,
));
}
@@ -783,10 +969,8 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
args[0].immediate(),
args[1].immediate(),
args[2].immediate(),
- pointer_count,
bx,
in_len,
- underlying_ty,
true,
);
diff --git a/src/lib.rs b/src/lib.rs
index 09ce059476ec..7f0696740b37 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -4,6 +4,7 @@
* TODO(antoyo): support LTO (gcc's equivalent to Full LTO is -flto -flto-partition=one — https://documentation.suse.com/sbp/all/html/SBP-GCC-10/index.html).
* For Thin LTO, this might be helpful:
* In gcc 4.6 -fwhopr was removed and became default with -flto. The non-whopr path can still be executed via -flto-partition=none.
+ * Or the new incremental LTO?
*
* Maybe some missing optizations enabled by rustc's LTO is in there: https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html
* Like -fipa-icf (should be already enabled) and maybe -fdevirtualize-at-ltrans.
@@ -27,6 +28,7 @@
#![recursion_limit="256"]
#![warn(rust_2018_idioms)]
#![warn(unused_lifetimes)]
+#![deny(clippy::pattern_type_mismatch)]
extern crate rustc_apfloat;
extern crate rustc_ast;
@@ -247,6 +249,7 @@ fn new_context<'gcc, 'tcx>(tcx: TyCtxt<'tcx>) -> Context<'gcc> {
}
#[cfg(feature="master")]
{
+ context.set_allow_special_chars_in_func_names(true);
let version = Version::get();
let version = format!("{}.{}.{}", version.major, version.minor, version.patch);
context.set_output_ident(&format!("rustc version {} with libgccjit {}",
diff --git a/src/mono_item.rs b/src/mono_item.rs
index 3322d56513bb..fdeb2f96fe2c 100644
--- a/src/mono_item.rs
+++ b/src/mono_item.rs
@@ -23,7 +23,7 @@ impl<'gcc, 'tcx> PreDefineMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
let is_tls = attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL);
let global = self.define_global(symbol_name, gcc_type, is_tls, attrs.link_section);
#[cfg(feature="master")]
- global.add_attribute(VarAttribute::Visibility(base::visibility_to_gcc(visibility)));
+ global.add_string_attribute(VarAttribute::Visibility(base::visibility_to_gcc(visibility)));
// TODO(antoyo): set linkage.
self.instances.borrow_mut().insert(instance, global);
diff --git a/test.sh b/test.sh
deleted file mode 100755
index e896237a1ea4..000000000000
--- a/test.sh
+++ /dev/null
@@ -1,479 +0,0 @@
-#!/usr/bin/env bash
-
-# TODO(antoyo): rewrite to cargo-make (or just) or something like that to only rebuild the sysroot when needed?
-
-set -e
-#set -x
-
-flags=
-gcc_master_branch=1
-channel="debug"
-funcs=()
-build_only=0
-nb_parts=0
-current_part=0
-use_system_gcc=0
-use_backend=0
-cargo_target_dir=""
-
-export CHANNEL='debug'
-
-while [[ $# -gt 0 ]]; do
- case $1 in
- --release)
- codegen_channel=release
- channel="release"
- export CHANNEL='release'
- shift
- ;;
- --release-sysroot)
- sysroot_channel="--release"
- shift
- ;;
- --no-default-features)
- gcc_master_branch=0
- flags="$flags --no-default-features"
- shift
- ;;
- --features)
- shift
- flags="$flags --features $1"
- shift
- ;;
- "--test-rustc")
- funcs+=(test_rustc)
- shift
- ;;
- "--test-successful-rustc")
- funcs+=(test_successful_rustc)
- shift
- ;;
- "--test-failing-rustc")
- funcs+=(test_failing_rustc)
- shift
- ;;
-
- "--test-libcore")
- funcs+=(test_libcore)
- shift
- ;;
-
- "--clean-ui-tests")
- funcs+=(clean_ui_tests)
- shift
- ;;
- "--clean")
- funcs+=(clean)
- shift
- ;;
-
- "--std-tests")
- funcs+=(std_tests)
- shift
- ;;
-
- "--asm-tests")
- funcs+=(asm_tests)
- shift
- ;;
-
- "--extended-tests")
- funcs+=(extended_sysroot_tests)
- shift
- ;;
- "--extended-rand-tests")
- funcs+=(extended_rand_tests)
- shift
- ;;
- "--extended-regex-example-tests")
- funcs+=(extended_regex_example_tests)
- shift
- ;;
- "--extended-regex-tests")
- funcs+=(extended_regex_tests)
- shift
- ;;
-
- "--mini-tests")
- funcs+=(mini_tests)
- shift
- ;;
-
- "--build-sysroot")
- funcs+=(build_sysroot)
- shift
- ;;
- "--build")
- build_only=1
- shift
- ;;
- "--use-system-gcc")
- use_system_gcc=1
- shift
- ;;
- "--use-backend")
- use_backend=1
- shift
- export BUILTIN_BACKEND=$1
- shift
- ;;
- "--out-dir")
- shift
- export CARGO_TARGET_DIR=$1
- cargo_target_dir=$1
- shift
- ;;
- "--nb-parts")
- shift
- nb_parts=$1
- shift
- ;;
- "--current-part")
- shift
- current_part=$1
- shift
- ;;
- *)
- echo "Unknown option $1"
- exit 1
- ;;
- esac
-done
-
-if [ -f ./gcc_path ]; then
- export GCC_PATH=$(cat gcc_path)
-elif (( $use_system_gcc == 1 )); then
- echo 'Using system GCC'
-else
- echo 'Please put the path to your custom build of libgccjit in the file `gcc_path`, see Readme.md for details'
- exit 1
-fi
-
-export LD_LIBRARY_PATH="$GCC_PATH"
-export LIBRARY_PATH="$GCC_PATH"
-
-if [[ $use_backend == 0 ]]; then
- if [[ $channel == "release" ]]; then
- CARGO_INCREMENTAL=1 cargo rustc --release $flags
- else
- echo $LD_LIBRARY_PATH
- cargo rustc $flags
- fi
-fi
-
-if (( $build_only == 1 )); then
- echo "Since it's 'build-only', exiting..."
- exit
-fi
-
-source config.sh
-
-function clean() {
- rm -r $cargo_target_dir || true
- mkdir -p $cargo_target_dir/gccjit
-}
-
-function mini_tests() {
- echo "[BUILD] mini_core"
- crate_types="lib,dylib"
-
- if [[ "$HOST_TRIPLE" != "$TARGET_TRIPLE" ]]; then
- crate_types="lib"
- fi
-
- $RUST_CMD example/mini_core.rs --crate-name mini_core --crate-type $crate_types --target $TARGET_TRIPLE
-
- echo "[BUILD] example"
- $RUST_CMD example/example.rs --crate-type lib --target $TARGET_TRIPLE
-
- echo "[AOT] mini_core_hello_world"
- $RUST_CMD example/mini_core_hello_world.rs --crate-name mini_core_hello_world --crate-type bin -g --target $TARGET_TRIPLE
- $RUN_WRAPPER $cargo_target_dir/mini_core_hello_world abc bcd
-}
-
-function build_sysroot() {
- echo "[BUILD] sysroot"
- time ./build_sysroot/build_sysroot.sh $sysroot_channel
-}
-
-# TODO(GuillaumeGomez): when rewriting in Rust, refactor with the code in tests/lang_tests_common.rs if possible.
-function run_in_vm() {
- vm_parent_dir=${CG_GCC_VM_DIR:-$(pwd)}
- vm_dir=vm
- exe=$1
- exe_filename=$(basename $exe)
- vm_home_dir=$vm_parent_dir/$vm_dir/home
- vm_exe_path=$vm_home_dir/$exe_filename
- inside_vm_exe_path=/home/$exe_filename
- sudo cp $exe $vm_exe_path
-
- shift
- pushd $vm_parent_dir
- sudo chroot $vm_dir qemu-m68k-static $inside_vm_exe_path $@
- popd
-}
-
-function std_tests() {
- echo "[AOT] arbitrary_self_types_pointers_and_wrappers"
- $RUST_CMD example/arbitrary_self_types_pointers_and_wrappers.rs --crate-name arbitrary_self_types_pointers_and_wrappers --crate-type bin --target $TARGET_TRIPLE
- $RUN_WRAPPER $cargo_target_dir/arbitrary_self_types_pointers_and_wrappers
-
- echo "[AOT] alloc_system"
- $RUST_CMD example/alloc_system.rs --crate-type lib --target "$TARGET_TRIPLE"
-
- # FIXME: doesn't work on m68k.
- if [[ "$HOST_TRIPLE" == "$TARGET_TRIPLE" ]]; then
- echo "[AOT] alloc_example"
- $RUST_CMD example/alloc_example.rs --crate-type bin --target $TARGET_TRIPLE
- $RUN_WRAPPER $cargo_target_dir/alloc_example
- fi
-
- echo "[AOT] dst_field_align"
- # FIXME(antoyo): Re-add -Zmir-opt-level=2 once rust-lang/rust#67529 is fixed.
- $RUST_CMD example/dst-field-align.rs --crate-name dst_field_align --crate-type bin --target $TARGET_TRIPLE
- $RUN_WRAPPER $cargo_target_dir/dst_field_align || (echo $?; false)
-
- echo "[AOT] std_example"
- std_flags="--cfg feature=\"master\""
- if (( $gcc_master_branch == 0 )); then
- std_flags=""
- fi
- $RUST_CMD example/std_example.rs --crate-type bin --target $TARGET_TRIPLE $std_flags
- $RUN_WRAPPER $cargo_target_dir/std_example --target $TARGET_TRIPLE
-
- echo "[AOT] subslice-patterns-const-eval"
- $RUST_CMD example/subslice-patterns-const-eval.rs --crate-type bin $TEST_FLAGS --target $TARGET_TRIPLE
- $RUN_WRAPPER $cargo_target_dir/subslice-patterns-const-eval
-
- echo "[AOT] track-caller-attribute"
- $RUST_CMD example/track-caller-attribute.rs --crate-type bin $TEST_FLAGS --target $TARGET_TRIPLE
- $RUN_WRAPPER $cargo_target_dir/track-caller-attribute
-
- echo "[BUILD] mod_bench"
- $RUST_CMD example/mod_bench.rs --crate-type bin --target $TARGET_TRIPLE
-}
-
-function setup_rustc() {
- rust_toolchain=$(cat rust-toolchain | grep channel | sed 's/channel = "\(.*\)"/\1/')
-
- git clone https://github.com/rust-lang/rust.git || true
- cd rust
- git fetch
- git checkout $($RUSTC -V | cut -d' ' -f3 | tr -d '(')
- export RUSTFLAGS=
-
- rm config.toml || true
-
- cat > config.toml < res.txt
- diff -u res.txt examples/regexdna-output.txt
- popd
-}
-
-function extended_regex_tests() {
- if (( $gcc_master_branch == 0 )); then
- return
- fi
-
- pushd regex
- echo "[TEST] rust-lang/regex tests"
- export CG_RUSTFLAGS="--cap-lints warn" # newer aho_corasick versions throw a deprecation warning
- ../cargo.sh test --tests -- --exclude-should-panic --test-threads 1 -Zunstable-options -q
- popd
-}
-
-function extended_sysroot_tests() {
- #pushd simple-raytracer
- #echo "[BENCH COMPILE] ebobby/simple-raytracer"
- #hyperfine --runs "${RUN_RUNS:-10}" --warmup 1 --prepare "cargo clean" \
- #"RUSTC=rustc RUSTFLAGS='' cargo build" \
- #"../cargo.sh build"
-
- #echo "[BENCH RUN] ebobby/simple-raytracer"
- #cp ./target/debug/main ./raytracer_cg_gcc
- #hyperfine --runs "${RUN_RUNS:-10}" ./raytracer_cg_llvm ./raytracer_cg_gcc
- #popd
-
- extended_rand_tests
- extended_regex_example_tests
- extended_regex_tests
-}
-
-function test_rustc() {
- echo
- echo "[TEST] rust-lang/rust"
-
- setup_rustc
-
- for test in $(rg -i --files-with-matches "//(\[\w+\])?~|// error-pattern:|// build-fail|// run-fail|-Cllvm-args" tests/ui); do
- rm $test
- done
- rm tests/ui/consts/const_cmp_type_id.rs
- rm tests/ui/consts/issue-73976-monomorphic.rs
-
- git checkout -- tests/ui/issues/auxiliary/issue-3136-a.rs # contains //~ERROR, but shouldn't be removed
-
- rm -r tests/ui/{abi*,extern/,unsized-locals/,proc-macro/,threads-sendsync/,borrowck/,test*,consts/issue-miri-1910.rs} || true
- rm tests/ui/mir/mir_heavy_promoted.rs # this test is oom-killed in the CI.
- # Tests generating errors.
- rm tests/ui/consts/issue-94675.rs
- for test in $(rg --files-with-matches "thread" tests/ui); do
- rm $test
- done
- git checkout tests/ui/type-alias-impl-trait/auxiliary/cross_crate_ice.rs
- git checkout tests/ui/type-alias-impl-trait/auxiliary/cross_crate_ice2.rs
- git checkout tests/ui/macros/rfc-2011-nicer-assert-messages/auxiliary/common.rs
- git checkout tests/ui/imports/ambiguous-1.rs
- git checkout tests/ui/imports/ambiguous-4-extern.rs
- git checkout tests/ui/entry-point/auxiliary/bad_main_functions.rs
-
- RUSTC_ARGS="$TEST_FLAGS -Csymbol-mangling-version=v0 -Zcodegen-backend="$(pwd)"/../target/"$CHANNEL"/librustc_codegen_gcc."$dylib_ext" --sysroot "$(pwd)"/../build_sysroot/sysroot"
-
- if [ $# -eq 0 ]; then
- # No argument supplied to the function. Doing nothing.
- echo "No argument provided. Keeping all UI tests"
- elif [ $1 = "0" ]; then
- # Removing the failing tests.
- xargs -a ../failing-ui-tests.txt -d'\n' rm
- else
- # Removing all tests.
- find tests/ui -type f -name '*.rs' -not -path '*/auxiliary/*' -delete
- # Putting back only the failing ones.
- xargs -a ../failing-ui-tests.txt -d'\n' git checkout --
- fi
-
- if [ $nb_parts -gt 0 ]; then
- echo "Splitting ui_test into $nb_parts parts (and running part $current_part)"
- find tests/ui -type f -name '*.rs' -not -path "*/auxiliary/*" > ui_tests
- # To ensure it'll be always the same sub files, we sort the content.
- sort ui_tests -o ui_tests
- count=$((`wc -l < ui_tests` / $nb_parts))
- # We increment the number of tests by one because if this is an odd number, we would skip
- # one test.
- count=$((count + 1))
- split -d -l $count -a 1 ui_tests ui_tests.split
- # Removing all tests.
- find tests/ui -type f -name '*.rs' -not -path "*/auxiliary/*" -delete
- # Putting back only the ones we want to test.
- xargs -a "ui_tests.split$current_part" -d'\n' git checkout --
- fi
-
- echo "[TEST] rustc test suite"
- COMPILETEST_FORCE_STAGE0=1 ./x.py test --run always --stage 0 tests/ui/ --rustc-args "$RUSTC_ARGS" # --target $TARGET_TRIPLE
-}
-
-function test_failing_rustc() {
- test_rustc "1"
-}
-
-function test_successful_rustc() {
- test_rustc "0"
-}
-
-function clean_ui_tests() {
- find rust/build/x86_64-unknown-linux-gnu/test/ui/ -name stamp -delete
-}
-
-function all() {
- clean
- mini_tests
- build_sysroot
- std_tests
- #asm_tests
- test_libcore
- extended_sysroot_tests
- test_rustc
-}
-
-if [ ${#funcs[@]} -eq 0 ]; then
- echo "No command passed, running '--all'..."
- all
-else
- for t in ${funcs[@]}; do
- $t
- done
-fi
diff --git a/failing-lto-tests.txt b/tests/failing-lto-tests.txt
similarity index 100%
rename from failing-lto-tests.txt
rename to tests/failing-lto-tests.txt
diff --git a/failing-non-lto-tests.txt b/tests/failing-non-lto-tests.txt
similarity index 100%
rename from failing-non-lto-tests.txt
rename to tests/failing-non-lto-tests.txt
diff --git a/failing-ui-tests.txt b/tests/failing-ui-tests.txt
similarity index 95%
rename from failing-ui-tests.txt
rename to tests/failing-ui-tests.txt
index 22044eabe969..6e020e9b354d 100644
--- a/failing-ui-tests.txt
+++ b/tests/failing-ui-tests.txt
@@ -13,7 +13,6 @@ tests/ui/sepcomp/sepcomp-extern.rs
tests/ui/sepcomp/sepcomp-fns-backwards.rs
tests/ui/sepcomp/sepcomp-fns.rs
tests/ui/sepcomp/sepcomp-statics.rs
-tests/ui/simd/intrinsic/generic-arithmetic-pass.rs
tests/ui/asm/x86_64/may_unwind.rs
tests/ui/backtrace.rs
tests/ui/catch-unwind-bang.rs
@@ -49,7 +48,6 @@ tests/ui/rfcs/rfc-1857-stabilize-drop-order/drop-order.rs
tests/ui/rfcs/rfc-2091-track-caller/std-panic-locations.rs
tests/ui/simd/issue-17170.rs
tests/ui/simd/issue-39720.rs
-tests/ui/simd/issue-89193.rs
tests/ui/statics/issue-91050-1.rs
tests/ui/statics/issue-91050-2.rs
tests/ui/alloc-error/default-alloc-error-hook.rs
@@ -57,7 +55,6 @@ tests/ui/coroutine/panic-safe.rs
tests/ui/issues/issue-14875.rs
tests/ui/issues/issue-29948.rs
tests/ui/panics/nested_panic_caught.rs
-tests/ui/simd/intrinsic/generic-bswap-byte.rs
tests/ui/const_prop/ice-issue-111353.rs
tests/ui/process/println-with-broken-pipe.rs
tests/ui/panic-runtime/lto-abort.rs
diff --git a/failing-ui-tests12.txt b/tests/failing-ui-tests12.txt
similarity index 95%
rename from failing-ui-tests12.txt
rename to tests/failing-ui-tests12.txt
index 4af93939b064..64f89b03eecc 100644
--- a/failing-ui-tests12.txt
+++ b/tests/failing-ui-tests12.txt
@@ -9,6 +9,7 @@ tests/ui/packed/packed-struct-vec.rs
tests/ui/packed/packed-tuple-struct-layout.rs
tests/ui/simd/array-type.rs
tests/ui/simd/intrinsic/float-minmax-pass.rs
+tests/ui/simd/intrinsic/generic-arithmetic-pass.rs
tests/ui/simd/intrinsic/generic-arithmetic-saturating-pass.rs
tests/ui/simd/intrinsic/generic-as.rs
tests/ui/simd/intrinsic/generic-cast-pass.rs
@@ -32,11 +33,12 @@ tests/ui/coroutine/size-moved-locals.rs
tests/ui/macros/rfc-2011-nicer-assert-messages/all-not-available-cases.rs
tests/ui/simd/intrinsic/generic-gather-pass.rs
tests/ui/simd/issue-85915-simd-ptrs.rs
+tests/ui/simd/issue-89193.rs
tests/ui/issues/issue-68010-large-zst-consts.rs
tests/ui/rust-2018/proc-macro-crate-in-paths.rs
tests/ui/target-feature/missing-plusminus.rs
tests/ui/sse2.rs
tests/ui/codegen/issue-79865-llvm-miscompile.rs
-tests/ui/intrinsics/intrinsics-integer.rs
tests/ui/std-backtrace.rs
tests/ui/mir/alignment/packed.rs
+tests/ui/intrinsics/intrinsics-integer.rs
diff --git a/tests/lang_tests_common.rs b/tests/lang_tests_common.rs
index af0133aad461..4cc429cfa456 100644
--- a/tests/lang_tests_common.rs
+++ b/tests/lang_tests_common.rs
@@ -7,6 +7,7 @@ use std::{
use lang_tester::LangTester;
use tempfile::TempDir;
+use boml::Toml;
/// Controls the compile options (e.g., optimization level) used to compile
/// test code.
@@ -20,8 +21,21 @@ pub fn main_inner(profile: Profile) {
let tempdir = TempDir::new().expect("temp dir");
let current_dir = current_dir().expect("current dir");
let current_dir = current_dir.to_str().expect("current dir").to_string();
- let gcc_path = include_str!("../gcc_path");
- let gcc_path = gcc_path.trim();
+ let toml = Toml::parse(include_str!("../config.toml"))
+ .expect("Failed to parse `config.toml`");
+ let gcc_path = if let Ok(gcc_path) = toml.get_string("gcc-path") {
+ PathBuf::from(gcc_path.to_string())
+ } else {
+ // then we try to retrieve it from the `target` folder.
+ let commit = include_str!("../libgccjit.version").trim();
+ Path::new("build/libgccjit").join(commit)
+ };
+
+ let gcc_path = Path::new(&gcc_path)
+ .canonicalize()
+ .expect("failed to get absolute path of `gcc-path`")
+ .display()
+ .to_string();
env::set_var("LD_LIBRARY_PATH", gcc_path);
fn rust_filter(filename: &Path) -> bool {
diff --git a/y.sh b/y.sh
index 188109743e3d..69d7917dd777 100755
--- a/y.sh
+++ b/y.sh
@@ -2,7 +2,7 @@
set -e
echo "[BUILD] build system" 1>&2
-cd build_system
+pushd $(dirname "$0")/build_system > /dev/null
cargo build --release
-cd ..
-./build_system/target/release/y $@
+popd > /dev/null
+$(dirname "$0")/build_system/target/release/y $@