| // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py |
| // REQUIRES: riscv-registered-target |
| // RUN: %clang_cc1 -triple riscv64 -target-feature +experimental-v -emit-llvm -o - %s \ |
| // RUN: | FileCheck --check-prefix=CHECK-RV64 %s |
| |
| #include <riscv_vector.h> |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e8m1( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 0, i64 0) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e8m1(size_t avl) { |
| return vsetvl_e8m1(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e8m2( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 0, i64 1) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e8m2(size_t avl) { |
| return vsetvl_e8m2(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e8m4( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 0, i64 2) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e8m4(size_t avl) { |
| return vsetvl_e8m4(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e8m8( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 0, i64 3) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e8m8(size_t avl) { |
| return vsetvl_e8m8(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e8mf2( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 0, i64 7) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e8mf2(size_t avl) { |
| return vsetvl_e8mf2(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e8mf4( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 0, i64 6) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e8mf4(size_t avl) { |
| return vsetvl_e8mf4(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e8mf8( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 0, i64 5) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e8mf8(size_t avl) { |
| return vsetvl_e8mf8(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e16m1( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 1, i64 0) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e16m1(size_t avl) { |
| return vsetvl_e16m1(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e16m2( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 1, i64 1) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e16m2(size_t avl) { |
| return vsetvl_e16m2(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e16m4( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 1, i64 2) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e16m4(size_t avl) { |
| return vsetvl_e16m4(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e16m8( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 1, i64 3) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e16m8(size_t avl) { |
| return vsetvl_e16m8(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e16mf2( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 1, i64 7) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e16mf2(size_t avl) { |
| return vsetvl_e16mf2(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e16mf4( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 1, i64 6) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e16mf4(size_t avl) { |
| return vsetvl_e16mf4(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e32m1( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 2, i64 0) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e32m1(size_t avl) { |
| return vsetvl_e32m1(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e32m2( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 2, i64 1) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e32m2(size_t avl) { |
| return vsetvl_e32m2(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e32m4( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 2, i64 2) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e32m4(size_t avl) { |
| return vsetvl_e32m4(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e32m8( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 2, i64 3) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e32m8(size_t avl) { |
| return vsetvl_e32m8(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e32mf2( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 2, i64 7) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e32mf2(size_t avl) { |
| return vsetvl_e32mf2(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e64m1( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 3, i64 0) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e64m1(size_t avl) { |
| return vsetvl_e64m1(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e64m2( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 3, i64 1) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e64m2(size_t avl) { |
| return vsetvl_e64m2(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e64m4( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 3, i64 2) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e64m4(size_t avl) { |
| return vsetvl_e64m4(avl); |
| } |
| |
| // |
| // CHECK-RV64-LABEL: @test_vsetvl_e64m8( |
| // CHECK-RV64-NEXT: entry: |
| // CHECK-RV64-NEXT: [[AVL_ADDR:%.*]] = alloca i64, align 8 |
| // CHECK-RV64-NEXT: store i64 [[AVL:%.*]], i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP0:%.*]] = load i64, i64* [[AVL_ADDR]], align 8 |
| // CHECK-RV64-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.vsetvli.i64(i64 [[TMP0]], i64 3, i64 3) |
| // CHECK-RV64-NEXT: ret i64 [[TMP1]] |
| // |
| size_t test_vsetvl_e64m8(size_t avl) { |
| return vsetvl_e64m8(avl); |
| } |