Tim Murray | be46aa6 | 2014-06-10 12:25:43 -0700 | [diff] [blame] | 1 | target datalayout = "e-m:e-i64:64-i128:128-n32:64-S128" |
Tim Murray | bdceed5 | 2014-08-18 13:58:46 -0700 | [diff] [blame] | 2 | target triple = "aarch64-linux-android" |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 3 | |
Tim Murray | fa6f90e | 2014-06-12 14:16:10 -0700 | [diff] [blame] | 4 | %struct.rs_allocation = type { i64*, i64*, i64*, i64* } |
| 5 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 6 | declare i8* @rsOffset(%struct.rs_allocation* nocapture readonly %a, i32 %sizeOf, i32 %x, i32 %y, i32 %z) |
| 7 | declare i8* @rsOffsetNs(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 8 | |
| 9 | ; The loads and stores in this file are annotated with RenderScript-specific |
| 10 | ; information for the type based alias analysis, such that the TBAA analysis |
| 11 | ; understands that loads and stores from two allocations with different types |
| 12 | ; can never access the same memory element. This is different from C, where |
| 13 | ; a char or uchar load/store is special as it can alias with about everything. |
| 14 | ; |
Stephen Hines | 9ba2911 | 2015-04-03 21:48:23 -0700 | [diff] [blame] | 15 | ; The TBAA tree in this file has the the node "RenderScript Distinct TBAA" as |
| 16 | ; its root. |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 17 | ; This means all loads/stores that share this common root can be proven to not |
| 18 | ; alias. However, the alias analysis still has to assume MayAlias between |
| 19 | ; memory accesses in this file and memory accesses annotated with the C/C++ |
| 20 | ; TBAA metadata. |
Stephen Hines | 9ba2911 | 2015-04-03 21:48:23 -0700 | [diff] [blame] | 21 | ; A node named "RenderScript TBAA" wraps our distinct TBAA root node. |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 22 | ; If we can ensure that all accesses to elements loaded from RenderScript |
| 23 | ; allocations are either annotated with the RenderScript TBAA information or |
| 24 | ; not annotated at all, but never annotated with the C/C++ metadata, we |
Stephen Hines | 9ba2911 | 2015-04-03 21:48:23 -0700 | [diff] [blame] | 25 | ; can add the "RenderScript TBAA" tree under the C/C++ TBAA tree. This enables |
| 26 | ; TBAA to prove that an access to data from the RenderScript allocation |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 27 | ; does not alias with a load/store accessing something not part of a RenderScript |
| 28 | ; allocation. |
Stephen Hines | 9ba2911 | 2015-04-03 21:48:23 -0700 | [diff] [blame] | 29 | ; We do this by swapping the second operand of "RenderScript TBAA" with the node |
| 30 | ; for "Simple C/C++ TBAA", thus connecting these TBAA groups. The other root |
| 31 | ; node (with no children) can then safely be dropped from the analysis. |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 32 | |
Stephen Hines | 9ba2911 | 2015-04-03 21:48:23 -0700 | [diff] [blame] | 33 | !13 = !{!"RenderScript Distinct TBAA"} |
| 34 | !14 = !{!"RenderScript TBAA", !13} |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 35 | !15 = !{!"allocation", !14} |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 36 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 37 | !21 = !{!"char", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 38 | define void @rsSetElementAtImpl_char(%struct.rs_allocation* nocapture readonly %a, i8 signext %val, i32 %x, i32 %y, i32 %z) #1 { |
| 39 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 1, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 40 | store i8 %val, i8* %1, align 1, !tbaa !21 |
| 41 | ret void |
| 42 | } |
| 43 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 44 | define signext i8 @rsGetElementAtImpl_char(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 45 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 1, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 46 | %2 = load i8, i8* %1, align 1, !tbaa !21 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 47 | ret i8 %2 |
| 48 | } |
| 49 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 50 | !22 = !{!"char2", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 51 | define void @rsSetElementAtImpl_char2(%struct.rs_allocation* nocapture readonly %a, i16 %val, i32 %x, i32 %y, i32 %z) #1 { |
| 52 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 2, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 53 | %2 = bitcast i8* %1 to <2 x i8>* |
Tim Murray | bdceed5 | 2014-08-18 13:58:46 -0700 | [diff] [blame] | 54 | %3 = bitcast i16 %val to <2 x i8> |
Pirama Arumuga Nainar | 501f433 | 2016-02-11 15:17:23 -0800 | [diff] [blame] | 55 | store <2 x i8> %3, <2 x i8>* %2, align 2, !tbaa !22 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 56 | ret void |
| 57 | } |
| 58 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 59 | define <2 x i8> @rsGetElementAtImpl_char2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 60 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 2, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 61 | %2 = bitcast i8* %1 to <2 x i8>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 62 | %3 = load <2 x i8>, <2 x i8>* %2, align 2, !tbaa !22 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 63 | ret <2 x i8> %3 |
| 64 | } |
| 65 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 66 | !23 = !{!"char3", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 67 | define void @rsSetElementAtImpl_char3(%struct.rs_allocation* nocapture readonly %a, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
| 68 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tim Murray | bdceed5 | 2014-08-18 13:58:46 -0700 | [diff] [blame] | 69 | %2 = bitcast i32 %val to <4 x i8> |
| 70 | %3 = shufflevector <4 x i8> %2, <4 x i8> undef, <4 x i32> <i32 0, i32 1, i32 2, i32 undef> |
| 71 | %4 = bitcast i8* %1 to <4 x i8>* |
Pirama Arumuga Nainar | 501f433 | 2016-02-11 15:17:23 -0800 | [diff] [blame] | 72 | store <4 x i8> %3, <4 x i8>* %4, align 4, !tbaa !23 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 73 | ret void |
| 74 | } |
| 75 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 76 | define <3 x i8> @rsGetElementAtImpl_char3(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 77 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 78 | %2 = bitcast i8* %1 to <4 x i8>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 79 | %3 = load <4 x i8>, <4 x i8>* %2, align 4, !tbaa !23 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 80 | %4 = shufflevector <4 x i8> %3, <4 x i8> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 81 | ret <3 x i8> %4 |
| 82 | } |
| 83 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 84 | !24 = !{!"char4", !15} |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 85 | define void @rsSetElementAtImpl_char4(%struct.rs_allocation* nocapture readonly %a, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 86 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 87 | %2 = bitcast i8* %1 to <4 x i8>* |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 88 | %3 = bitcast i32 %val to <4 x i8> |
| 89 | store <4 x i8> %3, <4 x i8>* %2, align 4, !tbaa !24 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 90 | ret void |
| 91 | } |
| 92 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 93 | define <4 x i8> @rsGetElementAtImpl_char4(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 94 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 95 | %2 = bitcast i8* %1 to <4 x i8>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 96 | %3 = load <4 x i8>, <4 x i8>* %2, align 4, !tbaa !24 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 97 | ret <4 x i8> %3 |
| 98 | } |
| 99 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 100 | !25 = !{!"uchar", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 101 | define void @rsSetElementAtImpl_uchar(%struct.rs_allocation* nocapture readonly %a, i8 zeroext %val, i32 %x, i32 %y, i32 %z) #1 { |
| 102 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 1, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 103 | store i8 %val, i8* %1, align 1, !tbaa !25 |
| 104 | ret void |
| 105 | } |
| 106 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 107 | define zeroext i8 @rsGetElementAtImpl_uchar(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 108 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 1, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 109 | %2 = load i8, i8* %1, align 1, !tbaa !25 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 110 | ret i8 %2 |
| 111 | } |
| 112 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 113 | !26 = !{!"uchar2", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 114 | define void @rsSetElementAtImpl_uchar2(%struct.rs_allocation* nocapture readonly %a, i16 %val, i32 %x, i32 %y, i32 %z) #1 { |
| 115 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 2, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 116 | %2 = bitcast i8* %1 to <2 x i8>* |
Tim Murray | bdceed5 | 2014-08-18 13:58:46 -0700 | [diff] [blame] | 117 | %3 = bitcast i16 %val to <2 x i8> |
| 118 | store <2 x i8> %3, <2 x i8>* %2, align 2, !tbaa !26 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 119 | ret void |
| 120 | } |
| 121 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 122 | define <2 x i8> @rsGetElementAtImpl_uchar2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 123 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 2, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 124 | %2 = bitcast i8* %1 to <2 x i8>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 125 | %3 = load <2 x i8>, <2 x i8>* %2, align 2, !tbaa !26 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 126 | ret <2 x i8> %3 |
| 127 | } |
| 128 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 129 | !27 = !{!"uchar3", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 130 | define void @rsSetElementAtImpl_uchar3(%struct.rs_allocation* nocapture readonly %a, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
| 131 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tim Murray | bdceed5 | 2014-08-18 13:58:46 -0700 | [diff] [blame] | 132 | %2 = bitcast i32 %val to <4 x i8> |
| 133 | %3 = shufflevector <4 x i8> %2, <4 x i8> undef, <4 x i32> <i32 0, i32 1, i32 2, i32 undef> |
| 134 | %4 = bitcast i8* %1 to <4 x i8>* |
| 135 | store <4 x i8> %3, <4 x i8>* %4, align 4, !tbaa !27 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 136 | ret void |
| 137 | } |
| 138 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 139 | define <3 x i8> @rsGetElementAtImpl_uchar3(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 140 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 141 | %2 = bitcast i8* %1 to <4 x i8>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 142 | %3 = load <4 x i8>, <4 x i8>* %2, align 4, !tbaa !27 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 143 | %4 = shufflevector <4 x i8> %3, <4 x i8> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 144 | ret <3 x i8> %4 |
| 145 | } |
| 146 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 147 | !28 = !{!"uchar4", !15} |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 148 | define void @rsSetElementAtImpl_uchar4(%struct.rs_allocation* nocapture readonly %a, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 149 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 150 | %2 = bitcast i8* %1 to <4 x i8>* |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 151 | %3 = bitcast i32 %val to <4 x i8> |
| 152 | store <4 x i8> %3, <4 x i8>* %2, align 4, !tbaa !28 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 153 | ret void |
| 154 | } |
| 155 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 156 | define <4 x i8> @rsGetElementAtImpl_uchar4(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 157 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 158 | %2 = bitcast i8* %1 to <4 x i8>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 159 | %3 = load <4 x i8>, <4 x i8>* %2, align 4, !tbaa !28 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 160 | ret <4 x i8> %3 |
| 161 | } |
| 162 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 163 | !29 = !{!"short", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 164 | define void @rsSetElementAtImpl_short(%struct.rs_allocation* nocapture readonly %a, i16 signext %val, i32 %x, i32 %y, i32 %z) #1 { |
| 165 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 2, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 166 | %2 = bitcast i8* %1 to i16* |
| 167 | store i16 %val, i16* %2, align 2, !tbaa !29 |
| 168 | ret void |
| 169 | } |
| 170 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 171 | define signext i16 @rsGetElementAtImpl_short(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 172 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 2, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 173 | %2 = bitcast i8* %1 to i16* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 174 | %3 = load i16, i16* %2, align 2, !tbaa !29 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 175 | ret i16 %3 |
| 176 | } |
| 177 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 178 | !30 = !{!"short2", !15} |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 179 | define void @rsSetElementAtImpl_short2(%struct.rs_allocation* nocapture readonly %a, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 180 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 181 | %2 = bitcast i8* %1 to <2 x i16>* |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 182 | %3 = bitcast i32 %val to <2 x i16> |
| 183 | store <2 x i16> %3, <2 x i16>* %2, align 4, !tbaa !30 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 184 | ret void |
| 185 | } |
| 186 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 187 | define <2 x i16> @rsGetElementAtImpl_short2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 188 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 189 | %2 = bitcast i8* %1 to <2 x i16>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 190 | %3 = load <2 x i16>, <2 x i16>* %2, align 4, !tbaa !30 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 191 | ret <2 x i16> %3 |
| 192 | } |
| 193 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 194 | !31 = !{!"short3", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 195 | define void @rsSetElementAtImpl_short3(%struct.rs_allocation* nocapture readonly %a, <2 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 196 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
| 197 | %2 = bitcast <2 x i32> %val to <4 x i16> |
| 198 | %3 = shufflevector <4 x i16> %2, <4 x i16> undef, <4 x i32> <i32 0, i32 1, i32 2, i32 undef> |
| 199 | %4 = bitcast i8* %1 to <4 x i16>* |
| 200 | store <4 x i16> %3, <4 x i16>* %4, align 8, !tbaa !31 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 201 | ret void |
| 202 | } |
| 203 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 204 | define <3 x i16> @rsGetElementAtImpl_short3(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 205 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 206 | %2 = bitcast i8* %1 to <4 x i16>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 207 | %3 = load <4 x i16>, <4 x i16>* %2, align 8, !tbaa !31 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 208 | %4 = shufflevector <4 x i16> %3, <4 x i16> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 209 | ret <3 x i16> %4 |
| 210 | } |
| 211 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 212 | !32 = !{!"short4", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 213 | define void @rsSetElementAtImpl_short4(%struct.rs_allocation* nocapture readonly %a, <4 x i16> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 214 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 215 | %2 = bitcast i8* %1 to <4 x i16>* |
| 216 | store <4 x i16> %val, <4 x i16>* %2, align 8, !tbaa !32 |
| 217 | ret void |
| 218 | } |
| 219 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 220 | define <4 x i16> @rsGetElementAtImpl_short4(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 221 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 222 | %2 = bitcast i8* %1 to <4 x i16>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 223 | %3 = load <4 x i16>, <4 x i16>* %2, align 8, !tbaa !32 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 224 | ret <4 x i16> %3 |
| 225 | } |
| 226 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 227 | !33 = !{!"ushort", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 228 | define void @rsSetElementAtImpl_ushort(%struct.rs_allocation* nocapture readonly %a, i16 zeroext %val, i32 %x, i32 %y, i32 %z) #1 { |
| 229 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 2, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 230 | %2 = bitcast i8* %1 to i16* |
| 231 | store i16 %val, i16* %2, align 2, !tbaa !33 |
| 232 | ret void |
| 233 | } |
| 234 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 235 | define zeroext i16 @rsGetElementAtImpl_ushort(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 236 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 2, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 237 | %2 = bitcast i8* %1 to i16* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 238 | %3 = load i16, i16* %2, align 2, !tbaa !33 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 239 | ret i16 %3 |
| 240 | } |
| 241 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 242 | !34 = !{!"ushort2", !15} |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 243 | define void @rsSetElementAtImpl_ushort2(%struct.rs_allocation* nocapture readonly %a, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 244 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 245 | %2 = bitcast i8* %1 to <2 x i16>* |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 246 | %3 = bitcast i32 %val to <2 x i16> |
| 247 | store <2 x i16> %3, <2 x i16>* %2, align 4, !tbaa !34 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 248 | ret void |
| 249 | } |
| 250 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 251 | define <2 x i16> @rsGetElementAtImpl_ushort2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 252 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 253 | %2 = bitcast i8* %1 to <2 x i16>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 254 | %3 = load <2 x i16>, <2 x i16>* %2, align 4, !tbaa !34 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 255 | ret <2 x i16> %3 |
| 256 | } |
| 257 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 258 | !35 = !{!"ushort3", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 259 | define void @rsSetElementAtImpl_ushort3(%struct.rs_allocation* nocapture readonly %a, <2 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 260 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
| 261 | %2 = bitcast <2 x i32> %val to <4 x i16> |
| 262 | %3 = shufflevector <4 x i16> %2, <4 x i16> undef, <4 x i32> <i32 0, i32 1, i32 2, i32 undef> |
| 263 | %4 = bitcast i8* %1 to <4 x i16>* |
Pirama Arumuga Nainar | 501f433 | 2016-02-11 15:17:23 -0800 | [diff] [blame] | 264 | store <4 x i16> %3, <4 x i16>* %4, align 8, !tbaa !35 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 265 | ret void |
| 266 | } |
| 267 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 268 | define <3 x i16> @rsGetElementAtImpl_ushort3(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 269 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 270 | %2 = bitcast i8* %1 to <4 x i16>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 271 | %3 = load <4 x i16>, <4 x i16>* %2, align 8, !tbaa !35 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 272 | %4 = shufflevector <4 x i16> %3, <4 x i16> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 273 | ret <3 x i16> %4 |
| 274 | } |
| 275 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 276 | !36 = !{!"ushort4", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 277 | define void @rsSetElementAtImpl_ushort4(%struct.rs_allocation* nocapture readonly %a, <4 x i16> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 278 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 279 | %2 = bitcast i8* %1 to <4 x i16>* |
| 280 | store <4 x i16> %val, <4 x i16>* %2, align 8, !tbaa !36 |
| 281 | ret void |
| 282 | } |
| 283 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 284 | define <4 x i16> @rsGetElementAtImpl_ushort4(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 285 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 286 | %2 = bitcast i8* %1 to <4 x i16>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 287 | %3 = load <4 x i16>, <4 x i16>* %2, align 8, !tbaa !36 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 288 | ret <4 x i16> %3 |
| 289 | } |
| 290 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 291 | !37 = !{!"int", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 292 | define void @rsSetElementAtImpl_int(%struct.rs_allocation* nocapture readonly %a, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
| 293 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 294 | %2 = bitcast i8* %1 to i32* |
| 295 | store i32 %val, i32* %2, align 4, !tbaa !37 |
| 296 | ret void |
| 297 | } |
| 298 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 299 | define i32 @rsGetElementAtImpl_int(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 300 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 301 | %2 = bitcast i8* %1 to i32* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 302 | %3 = load i32, i32* %2, align 4, !tbaa !37 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 303 | ret i32 %3 |
| 304 | } |
| 305 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 306 | !38 = !{!"int2", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 307 | define void @rsSetElementAtImpl_int2(%struct.rs_allocation* nocapture readonly %a, <2 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 308 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 309 | %2 = bitcast i8* %1 to <2 x i32>* |
| 310 | store <2 x i32> %val, <2 x i32>* %2, align 8, !tbaa !38 |
| 311 | ret void |
| 312 | } |
| 313 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 314 | define <2 x i32> @rsGetElementAtImpl_int2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 315 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 316 | %2 = bitcast i8* %1 to <2 x i32>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 317 | %3 = load <2 x i32>, <2 x i32>* %2, align 8, !tbaa !38 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 318 | ret <2 x i32> %3 |
| 319 | } |
| 320 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 321 | !39 = !{!"int3", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 322 | define void @rsSetElementAtImpl_int3(%struct.rs_allocation* nocapture readonly %a, <4 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 323 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
| 324 | %2 = shufflevector <4 x i32> %val, <4 x i32> undef, <4 x i32> <i32 0, i32 1, i32 2, i32 undef> |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 325 | %3 = bitcast i8* %1 to <4 x i32>* |
| 326 | store <4 x i32> %2, <4 x i32>* %3, align 16, !tbaa !39 |
| 327 | ret void |
| 328 | } |
| 329 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 330 | define <3 x i32> @rsGetElementAtImpl_int3(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 331 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 332 | %2 = bitcast i8* %1 to <4 x i32>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 333 | %3 = load <4 x i32>, <4 x i32>* %2, align 8, !tbaa !39 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 334 | %4 = shufflevector <4 x i32> %3, <4 x i32> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 335 | ret <3 x i32> %4 |
| 336 | } |
| 337 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 338 | !40 = !{!"int4", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 339 | define void @rsSetElementAtImpl_int4(%struct.rs_allocation* nocapture readonly %a, <4 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 340 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 341 | %2 = bitcast i8* %1 to <4 x i32>* |
| 342 | store <4 x i32> %val, <4 x i32>* %2, align 16, !tbaa !40 |
| 343 | ret void |
| 344 | } |
| 345 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 346 | define <4 x i32> @rsGetElementAtImpl_int4(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 347 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 348 | %2 = bitcast i8* %1 to <4 x i32>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 349 | %3 = load <4 x i32>, <4 x i32>* %2, align 16, !tbaa !40 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 350 | ret <4 x i32> %3 |
| 351 | } |
| 352 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 353 | !41 = !{!"uint", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 354 | define void @rsSetElementAtImpl_uint(%struct.rs_allocation* nocapture readonly %a, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
| 355 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 356 | %2 = bitcast i8* %1 to i32* |
| 357 | store i32 %val, i32* %2, align 4, !tbaa !41 |
| 358 | ret void |
| 359 | } |
| 360 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 361 | define i32 @rsGetElementAtImpl_uint(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 362 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 363 | %2 = bitcast i8* %1 to i32* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 364 | %3 = load i32, i32* %2, align 4, !tbaa !41 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 365 | ret i32 %3 |
| 366 | } |
| 367 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 368 | !42 = !{!"uint2", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 369 | define void @rsSetElementAtImpl_uint2(%struct.rs_allocation* nocapture readonly %a, <2 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 370 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 371 | %2 = bitcast i8* %1 to <2 x i32>* |
| 372 | store <2 x i32> %val, <2 x i32>* %2, align 8, !tbaa !42 |
| 373 | ret void |
| 374 | } |
| 375 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 376 | define <2 x i32> @rsGetElementAtImpl_uint2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 377 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 378 | %2 = bitcast i8* %1 to <2 x i32>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 379 | %3 = load <2 x i32>, <2 x i32>* %2, align 8, !tbaa !42 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 380 | ret <2 x i32> %3 |
| 381 | } |
| 382 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 383 | !43 = !{!"uint3", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 384 | define void @rsSetElementAtImpl_uint3(%struct.rs_allocation* nocapture readonly %a, <4 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 385 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
| 386 | %2 = shufflevector <4 x i32> %val, <4 x i32> undef, <4 x i32> <i32 0, i32 1, i32 2, i32 undef> |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 387 | %3 = bitcast i8* %1 to <4 x i32>* |
| 388 | store <4 x i32> %2, <4 x i32>* %3, align 16, !tbaa !43 |
| 389 | ret void |
| 390 | } |
| 391 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 392 | define <3 x i32> @rsGetElementAtImpl_uint3(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 393 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 394 | %2 = bitcast i8* %1 to <4 x i32>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 395 | %3 = load <4 x i32>, <4 x i32>* %2, align 8, !tbaa !43 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 396 | %4 = shufflevector <4 x i32> %3, <4 x i32> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 397 | ret <3 x i32> %4 |
| 398 | } |
| 399 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 400 | !44 = !{!"uint4", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 401 | define void @rsSetElementAtImpl_uint4(%struct.rs_allocation* nocapture readonly %a, <4 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 402 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 403 | %2 = bitcast i8* %1 to <4 x i32>* |
| 404 | store <4 x i32> %val, <4 x i32>* %2, align 16, !tbaa !44 |
| 405 | ret void |
| 406 | } |
| 407 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 408 | define <4 x i32> @rsGetElementAtImpl_uint4(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 409 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 410 | %2 = bitcast i8* %1 to <4 x i32>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 411 | %3 = load <4 x i32>, <4 x i32>* %2, align 16, !tbaa !44 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 412 | ret <4 x i32> %3 |
| 413 | } |
| 414 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 415 | !45 = !{!"long", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 416 | define void @rsSetElementAtImpl_long(%struct.rs_allocation* nocapture readonly %a, i64 %val, i32 %x, i32 %y, i32 %z) #1 { |
| 417 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 418 | %2 = bitcast i8* %1 to i64* |
| 419 | store i64 %val, i64* %2, align 8, !tbaa !45 |
| 420 | ret void |
| 421 | } |
| 422 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 423 | define i64 @rsGetElementAtImpl_long(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 424 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 425 | %2 = bitcast i8* %1 to i64* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 426 | %3 = load i64, i64* %2, align 8, !tbaa !45 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 427 | ret i64 %3 |
| 428 | } |
| 429 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 430 | !46 = !{!"long2", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 431 | define void @rsSetElementAtImpl_long2(%struct.rs_allocation* nocapture readonly %a, <2 x i64> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 432 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 433 | %2 = bitcast i8* %1 to <2 x i64>* |
| 434 | store <2 x i64> %val, <2 x i64>* %2, align 16, !tbaa !46 |
| 435 | ret void |
| 436 | } |
| 437 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 438 | define <2 x i64> @rsGetElementAtImpl_long2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 439 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 440 | %2 = bitcast i8* %1 to <2 x i64>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 441 | %3 = load <2 x i64>, <2 x i64>* %2, align 16, !tbaa !46 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 442 | ret <2 x i64> %3 |
| 443 | } |
| 444 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 445 | !47 = !{!"long3", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 446 | define void @rsSetElementAtImpl_long3(%struct.rs_allocation* nocapture readonly %a, <3 x i64>* %val, i32 %x, i32 %y, i32 %z) #1 { |
| 447 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 32, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 448 | %2 = load <3 x i64>, <3 x i64>* %val |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 449 | %3 = shufflevector <3 x i64> %2, <3 x i64> undef, <4 x i32> <i32 0, i32 1, i32 2, i32 undef> |
| 450 | %4 = bitcast i8* %1 to <4 x i64>* |
| 451 | store <4 x i64> %3, <4 x i64>* %4, align 32, !tbaa !47 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 452 | ret void |
| 453 | } |
| 454 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 455 | define void @rsGetElementAtImpl_long3(<3 x i64>* noalias nocapture sret %agg.result, %struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #1 { |
| 456 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 32, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 457 | %2 = bitcast i8* %1 to <4 x i64>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 458 | %3 = load <4 x i64>, <4 x i64>* %2, align 32 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 459 | %4 = bitcast <3 x i64>* %agg.result to <4 x i64>* |
| 460 | store <4 x i64> %3, <4 x i64>* %4, align 32, !tbaa !47 |
| 461 | ret void |
| 462 | } |
| 463 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 464 | !48 = !{!"long4", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 465 | define void @rsSetElementAtImpl_long4(%struct.rs_allocation* nocapture readonly %a, <4 x i64>* %val, i32 %x, i32 %y, i32 %z) #1 { |
| 466 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 32, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 467 | %2 = load <4 x i64>, <4 x i64>* %val |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 468 | %3 = bitcast i8* %1 to <4 x i64>* |
| 469 | store <4 x i64> %2, <4 x i64>* %3, align 32, !tbaa !48 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 470 | ret void |
| 471 | } |
| 472 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 473 | define void @rsGetElementAtImpl_long4(<4 x i64>* noalias nocapture sret %agg.result, %struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #1 { |
| 474 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 32, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 475 | %2 = bitcast i8* %1 to <4 x i64>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 476 | %3 = load <4 x i64>, <4 x i64>* %2, align 32, !tbaa !15 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 477 | store <4 x i64> %3, <4 x i64>* %agg.result, align 32, !tbaa !48 |
| 478 | ret void |
| 479 | } |
| 480 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 481 | !49 = !{!"ulong", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 482 | define void @rsSetElementAtImpl_ulong(%struct.rs_allocation* nocapture readonly %a, i64 %val, i32 %x, i32 %y, i32 %z) #1 { |
| 483 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 484 | %2 = bitcast i8* %1 to i64* |
| 485 | store i64 %val, i64* %2, align 8, !tbaa !49 |
| 486 | ret void |
| 487 | } |
| 488 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 489 | define i64 @rsGetElementAtImpl_ulong(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 490 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 491 | %2 = bitcast i8* %1 to i64* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 492 | %3 = load i64, i64* %2, align 8, !tbaa !49 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 493 | ret i64 %3 |
| 494 | } |
| 495 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 496 | !50 = !{!"ulong2", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 497 | define void @rsSetElementAtImpl_ulong2(%struct.rs_allocation* nocapture readonly %a, <2 x i64> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 498 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 499 | %2 = bitcast i8* %1 to <2 x i64>* |
| 500 | store <2 x i64> %val, <2 x i64>* %2, align 16, !tbaa !50 |
| 501 | ret void |
| 502 | } |
| 503 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 504 | define <2 x i64> @rsGetElementAtImpl_ulong2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 505 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 506 | %2 = bitcast i8* %1 to <2 x i64>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 507 | %3 = load <2 x i64>, <2 x i64>* %2, align 16, !tbaa !50 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 508 | ret <2 x i64> %3 |
| 509 | } |
| 510 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 511 | !51 = !{!"ulong3", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 512 | define void @rsSetElementAtImpl_ulong3(%struct.rs_allocation* nocapture readonly %a, <3 x i64>* %val, i32 %x, i32 %y, i32 %z) #1 { |
| 513 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 32, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 514 | %2 = load <3 x i64>, <3 x i64>* %val |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 515 | %3 = shufflevector <3 x i64> %2, <3 x i64> undef, <4 x i32> <i32 0, i32 1, i32 2, i32 undef> |
| 516 | %4 = bitcast i8* %1 to <4 x i64>* |
Pirama Arumuga Nainar | 501f433 | 2016-02-11 15:17:23 -0800 | [diff] [blame] | 517 | store <4 x i64> %3, <4 x i64>* %4, align 32, !tbaa !51 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 518 | ret void |
| 519 | } |
| 520 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 521 | define void @rsGetElementAtImpl_ulong3(<3 x i64>* noalias nocapture sret %agg.result, %struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #1 { |
| 522 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 32, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 523 | %2 = bitcast i8* %1 to <4 x i64>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 524 | %3 = load <4 x i64>, <4 x i64>* %2, align 32 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 525 | %4 = bitcast <3 x i64>* %agg.result to <4 x i64>* |
| 526 | store <4 x i64> %3, <4 x i64>* %4, align 32, !tbaa !51 |
| 527 | ret void |
| 528 | } |
| 529 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 530 | !52 = !{!"ulong4", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 531 | define void @rsSetElementAtImpl_ulong4(%struct.rs_allocation* nocapture readonly %a, <4 x i64>* %val, i32 %x, i32 %y, i32 %z) #1 { |
| 532 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 32, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 533 | %2 = load <4 x i64>, <4 x i64>* %val |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 534 | %3 = bitcast i8* %1 to <4 x i64>* |
| 535 | store <4 x i64> %2, <4 x i64>* %3, align 32, !tbaa !52 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 536 | ret void |
| 537 | } |
| 538 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 539 | define void @rsGetElementAtImpl_ulong4(<4 x i64>* noalias nocapture sret %agg.result, %struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #1 { |
| 540 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 32, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 541 | %2 = bitcast i8* %1 to <4 x i64>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 542 | %3 = load <4 x i64>, <4 x i64>* %2, align 32, !tbaa !15 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 543 | store <4 x i64> %3, <4 x i64>* %agg.result, align 32, !tbaa !52 |
| 544 | ret void |
| 545 | } |
| 546 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 547 | !53 = !{!"float", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 548 | define void @rsSetElementAtImpl_float(%struct.rs_allocation* nocapture readonly %a, float %val, i32 %x, i32 %y, i32 %z) #1 { |
| 549 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 550 | %2 = bitcast i8* %1 to float* |
| 551 | store float %val, float* %2, align 4, !tbaa !53 |
| 552 | ret void |
| 553 | } |
| 554 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 555 | define float @rsGetElementAtImpl_float(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 556 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 4, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 557 | %2 = bitcast i8* %1 to float* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 558 | %3 = load float, float* %2, align 4, !tbaa !53 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 559 | ret float %3 |
| 560 | } |
| 561 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 562 | !54 = !{!"float2", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 563 | define void @rsSetElementAtImpl_float2(%struct.rs_allocation* nocapture readonly %a, <2 x float> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 564 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 565 | %2 = bitcast i8* %1 to <2 x float>* |
| 566 | store <2 x float> %val, <2 x float>* %2, align 8, !tbaa !54 |
| 567 | ret void |
| 568 | } |
| 569 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 570 | define <2 x float> @rsGetElementAtImpl_float2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 571 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 572 | %2 = bitcast i8* %1 to <2 x float>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 573 | %3 = load <2 x float>, <2 x float>* %2, align 8, !tbaa !54 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 574 | ret <2 x float> %3 |
| 575 | } |
| 576 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 577 | !55 = !{!"float3", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 578 | define void @rsSetElementAtImpl_float3(%struct.rs_allocation* nocapture readonly %a, <4 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 579 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
| 580 | %2 = bitcast <4 x i32> %val to <4 x float> |
| 581 | %3 = shufflevector <4 x float> %2, <4 x float> undef, <4 x i32> <i32 0, i32 1, i32 2, i32 undef> |
| 582 | %4 = bitcast i8* %1 to <4 x float>* |
| 583 | store <4 x float> %3, <4 x float>* %4, align 16, !tbaa !55 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 584 | ret void |
| 585 | } |
| 586 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 587 | define <3 x float> @rsGetElementAtImpl_float3(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 588 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 589 | %2 = bitcast i8* %1 to <4 x float>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 590 | %3 = load <4 x float>, <4 x float>* %2, align 8, !tbaa !55 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 591 | %4 = shufflevector <4 x float> %3, <4 x float> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 592 | ret <3 x float> %4 |
| 593 | } |
| 594 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 595 | !56 = !{!"float4", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 596 | define void @rsSetElementAtImpl_float4(%struct.rs_allocation* nocapture readonly %a, <4 x float> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 597 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 598 | %2 = bitcast i8* %1 to <4 x float>* |
| 599 | store <4 x float> %val, <4 x float>* %2, align 16, !tbaa !56 |
| 600 | ret void |
| 601 | } |
| 602 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 603 | define <4 x float> @rsGetElementAtImpl_float4(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 604 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 605 | %2 = bitcast i8* %1 to <4 x float>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 606 | %3 = load <4 x float>, <4 x float>* %2, align 16, !tbaa !56 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 607 | ret <4 x float> %3 |
| 608 | } |
| 609 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 610 | !57 = !{!"double", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 611 | define void @rsSetElementAtImpl_double(%struct.rs_allocation* nocapture readonly %a, double %val, i32 %x, i32 %y, i32 %z) #1 { |
| 612 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 613 | %2 = bitcast i8* %1 to double* |
| 614 | store double %val, double* %2, align 8, !tbaa !57 |
| 615 | ret void |
| 616 | } |
| 617 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 618 | define double @rsGetElementAtImpl_double(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 619 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 620 | %2 = bitcast i8* %1 to double* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 621 | %3 = load double, double* %2, align 8, !tbaa !57 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 622 | ret double %3 |
| 623 | } |
| 624 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 625 | !58 = !{!"double2", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 626 | define void @rsSetElementAtImpl_double2(%struct.rs_allocation* nocapture readonly %a, <2 x double> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 627 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 628 | %2 = bitcast i8* %1 to <2 x double>* |
| 629 | store <2 x double> %val, <2 x double>* %2, align 16, !tbaa !58 |
| 630 | ret void |
| 631 | } |
| 632 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 633 | define <2 x double> @rsGetElementAtImpl_double2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 634 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 16, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 635 | %2 = bitcast i8* %1 to <2 x double>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 636 | %3 = load <2 x double>, <2 x double>* %2, align 16, !tbaa !58 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 637 | ret <2 x double> %3 |
| 638 | } |
| 639 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 640 | !59 = !{!"double3", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 641 | define void @rsSetElementAtImpl_double3(%struct.rs_allocation* nocapture readonly %a, <3 x double>* %val, i32 %x, i32 %y, i32 %z) #1 { |
| 642 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 32, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 643 | %2 = load <3 x double>, <3 x double>* %val |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 644 | %3 = shufflevector <3 x double> %2, <3 x double> undef, <4 x i32> <i32 0, i32 1, i32 2, i32 undef> |
| 645 | %4 = bitcast i8* %1 to <4 x double>* |
Pirama Arumuga Nainar | 501f433 | 2016-02-11 15:17:23 -0800 | [diff] [blame] | 646 | store <4 x double> %3, <4 x double>* %4, align 32, !tbaa !59 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 647 | ret void |
| 648 | } |
| 649 | |
| 650 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 651 | define void @rsGetElementAtImpl_double3(<3 x double>* noalias nocapture sret %agg.result, %struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #1 { |
| 652 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 32, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 653 | %2 = bitcast i8* %1 to <4 x double>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 654 | %3 = load <4 x double>, <4 x double>* %2, align 32 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 655 | %4 = bitcast <3 x double>* %agg.result to <4 x double>* |
| 656 | store <4 x double> %3, <4 x double>* %4, align 32, !tbaa !59 |
| 657 | ret void |
| 658 | } |
| 659 | |
Stephen Hines | 4d0de13 | 2015-03-18 14:53:03 -0700 | [diff] [blame] | 660 | !60 = !{!"double4", !15} |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 661 | define void @rsSetElementAtImpl_double4(%struct.rs_allocation* nocapture readonly %a, <4 x double>* %val, i32 %x, i32 %y, i32 %z) #1 { |
| 662 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 32, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 663 | %2 = load <4 x double>, <4 x double>* %val |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 664 | %3 = bitcast i8* %1 to <4 x double>* |
| 665 | store <4 x double> %2, <4 x double>* %3, align 32, !tbaa !60 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 666 | ret void |
| 667 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 668 | define void @rsGetElementAtImpl_double4(<4 x double>* noalias nocapture sret %agg.result, %struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #1 { |
| 669 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a, i32 32, i32 %x, i32 %y, i32 %z) #2 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 670 | %2 = bitcast i8* %1 to <4 x double>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 671 | %3 = load <4 x double>, <4 x double>* %2, align 32, !tbaa !15 |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 672 | store <4 x double> %3, <4 x double>* %agg.result, align 32, !tbaa !60 |
| 673 | ret void |
| 674 | } |
| 675 | |
Pirama Arumuga Nainar | 2dcbc3d | 2015-04-07 10:32:17 -0700 | [diff] [blame] | 676 | !61 = !{!"half", !15} |
| 677 | define void @rsSetElementAtImpl_half(%struct.rs_allocation* nocapture readonly %a.coerce, half %val, i32 %x, i32 %y, i32 %z) #1 { |
| 678 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a.coerce, i32 2, i32 %x, i32 %y, i32 %z) #2 |
| 679 | %2 = bitcast i8* %1 to half* |
| 680 | store half %val, half* %2, align 2, !tbaa !61 |
| 681 | ret void |
| 682 | } |
| 683 | |
| 684 | define half @rsGetElementAtImpl_half(%struct.rs_allocation* nocapture readonly %a.coerce, i32 %x, i32 %y, i32 %z) #0 { |
| 685 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a.coerce, i32 2, i32 %x, i32 %y, i32 %z) #2 |
| 686 | %2 = bitcast i8* %1 to half* |
| 687 | %3 = load half, half* %2, align 2, !tbaa !61 |
| 688 | ret half %3 |
| 689 | } |
| 690 | |
| 691 | !62 = !{!"half2", !15} |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 692 | define void @rsSetElementAtImpl_half2(%struct.rs_allocation* nocapture readonly %a.coerce, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
Pirama Arumuga Nainar | 2dcbc3d | 2015-04-07 10:32:17 -0700 | [diff] [blame] | 693 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a.coerce, i32 4, i32 %x, i32 %y, i32 %z) #2 |
| 694 | %2 = bitcast i8* %1 to <2 x half>* |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 695 | %3 = bitcast i32 %val to <2 x half> |
| 696 | store <2 x half> %3, <2 x half>* %2, align 4, !tbaa !62 |
Pirama Arumuga Nainar | 2dcbc3d | 2015-04-07 10:32:17 -0700 | [diff] [blame] | 697 | ret void |
| 698 | } |
| 699 | |
| 700 | define <2 x half> @rsGetElementAtImpl_half2(%struct.rs_allocation* nocapture readonly %a.coerce, i32 %x, i32 %y, i32 %z) #0 { |
| 701 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a.coerce, i32 4, i32 %x, i32 %y, i32 %z) #2 |
| 702 | %2 = bitcast i8* %1 to <2 x half>* |
| 703 | %3 = load <2 x half>, <2 x half>* %2, align 4, !tbaa !62 |
| 704 | ret <2 x half> %3 |
| 705 | } |
| 706 | |
| 707 | !63 = !{!"half3", !15} |
Pirama Arumuga Nainar | 5d6fa39 | 2016-02-11 14:20:17 -0800 | [diff] [blame] | 708 | define void @rsSetElementAtImpl_half3(%struct.rs_allocation* nocapture readonly %a.coerce, <2 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
Pirama Arumuga Nainar | 2dcbc3d | 2015-04-07 10:32:17 -0700 | [diff] [blame] | 709 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a.coerce, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 5d6fa39 | 2016-02-11 14:20:17 -0800 | [diff] [blame] | 710 | %2 = bitcast <2 x i32> %val to <4 x half> |
| 711 | %3 = shufflevector <4 x half> %2, <4 x half> undef, <4 x i32> <i32 0, i32 1, i32 2, i32 undef> |
| 712 | %4 = bitcast i8* %1 to <4 x half>* |
| 713 | store <4 x half> %3, <4 x half>* %4, align 8, !tbaa !63 |
Pirama Arumuga Nainar | 2dcbc3d | 2015-04-07 10:32:17 -0700 | [diff] [blame] | 714 | ret void |
| 715 | } |
| 716 | |
Pirama Arumuga Nainar | f1ce4be | 2015-08-17 13:06:12 -0700 | [diff] [blame] | 717 | define <3 x half> @rsGetElementAtImpl_half3(%struct.rs_allocation* nocapture readonly %a.coerce, i32 %x, i32 %y, i32 %z) #1 { |
| 718 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a.coerce, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 2dcbc3d | 2015-04-07 10:32:17 -0700 | [diff] [blame] | 719 | %2 = bitcast i8* %1 to <4 x half>* |
Pirama Arumuga Nainar | f1ce4be | 2015-08-17 13:06:12 -0700 | [diff] [blame] | 720 | %3 = load <4 x half>, <4 x half>* %2, align 8, !tbaa !63 |
| 721 | %4 = shufflevector <4 x half> %3, <4 x half> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 722 | ret <3 x half> %4 |
Pirama Arumuga Nainar | 2dcbc3d | 2015-04-07 10:32:17 -0700 | [diff] [blame] | 723 | } |
| 724 | |
| 725 | !64 = !{!"half4", !15} |
| 726 | define void @rsSetElementAtImpl_half4(%struct.rs_allocation* nocapture readonly %a.coerce, <4 x half> %val, i32 %x, i32 %y, i32 %z) #1 { |
Pirama Arumuga Nainar | f1ce4be | 2015-08-17 13:06:12 -0700 | [diff] [blame] | 727 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a.coerce, i32 8, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 2dcbc3d | 2015-04-07 10:32:17 -0700 | [diff] [blame] | 728 | %2 = bitcast i8* %1 to <4 x half>* |
| 729 | store <4 x half> %val, <4 x half>* %2, align 8, !tbaa !64 |
| 730 | ret void |
| 731 | } |
| 732 | |
| 733 | define <4 x half> @rsGetElementAtImpl_half4(%struct.rs_allocation* nocapture readonly %a.coerce, i32 %x, i32 %y, i32 %z) #0 { |
| 734 | %1 = tail call i8* @rsOffset(%struct.rs_allocation* %a.coerce, i32 8, i32 %x, i32 %y, i32 %z) #2 |
| 735 | %2 = bitcast i8* %1 to <4 x half>* |
| 736 | %3 = load <4 x half>, <4 x half>* %2, align 8, !tbaa !64 |
| 737 | ret <4 x half> %3 |
| 738 | } |
| 739 | |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 740 | |
Tim Murray | acff9f2 | 2014-10-15 11:14:06 -0700 | [diff] [blame] | 741 | define void @__rsAllocationVLoadXImpl_long4(<4 x i64>* noalias nocapture sret %agg.result, %struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #1 { |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 742 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 743 | %2 = bitcast i8* %1 to <4 x i64>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 744 | %3 = load <4 x i64>, <4 x i64>* %2, align 8 |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 745 | store <4 x i64> %3, <4 x i64>* %agg.result |
| 746 | ret void |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 747 | } |
Tim Murray | acff9f2 | 2014-10-15 11:14:06 -0700 | [diff] [blame] | 748 | define void @__rsAllocationVLoadXImpl_long3(<3 x i64>* noalias nocapture sret %agg.result, %struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #1 { |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 749 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 750 | %2 = bitcast i8* %1 to <3 x i64>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 751 | %3 = load <3 x i64>, <3 x i64>* %2, align 8 |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 752 | store <3 x i64> %3, <3 x i64>* %agg.result |
| 753 | ret void |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 754 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 755 | define <2 x i64> @__rsAllocationVLoadXImpl_long2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 756 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 757 | %2 = bitcast i8* %1 to <2 x i64>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 758 | %3 = load <2 x i64>, <2 x i64>* %2, align 8 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 759 | ret <2 x i64> %3 |
| 760 | } |
| 761 | |
Tim Murray | acff9f2 | 2014-10-15 11:14:06 -0700 | [diff] [blame] | 762 | define void @__rsAllocationVLoadXImpl_ulong4(<4 x i64>* noalias nocapture sret %agg.result, %struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #1 { |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 763 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 764 | %2 = bitcast i8* %1 to <4 x i64>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 765 | %3 = load <4 x i64>, <4 x i64>* %2, align 8 |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 766 | store <4 x i64> %3, <4 x i64>* %agg.result |
| 767 | ret void |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 768 | } |
Tim Murray | acff9f2 | 2014-10-15 11:14:06 -0700 | [diff] [blame] | 769 | define void @__rsAllocationVLoadXImpl_ulong3(<3 x i64>* noalias nocapture sret %agg.result, %struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #1 { |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 770 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 771 | %2 = bitcast i8* %1 to <3 x i64>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 772 | %3 = load <3 x i64>, <3 x i64>* %2, align 8 |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 773 | store <3 x i64> %3, <3 x i64>* %agg.result |
| 774 | ret void |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 775 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 776 | define <2 x i64> @__rsAllocationVLoadXImpl_ulong2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 777 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 778 | %2 = bitcast i8* %1 to <2 x i64>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 779 | %3 = load <2 x i64>, <2 x i64>* %2, align 8 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 780 | ret <2 x i64> %3 |
| 781 | } |
| 782 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 783 | define <4 x i32> @__rsAllocationVLoadXImpl_int4(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 784 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 785 | %2 = bitcast i8* %1 to <4 x i32>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 786 | %3 = load <4 x i32>, <4 x i32>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 787 | ret <4 x i32> %3 |
| 788 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 789 | define <3 x i32> @__rsAllocationVLoadXImpl_int3(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 790 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 791 | %2 = bitcast i8* %1 to <3 x i32>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 792 | %3 = load <3 x i32>, <3 x i32>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 793 | ret <3 x i32> %3 |
| 794 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 795 | define <2 x i32> @__rsAllocationVLoadXImpl_int2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 796 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 797 | %2 = bitcast i8* %1 to <2 x i32>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 798 | %3 = load <2 x i32>, <2 x i32>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 799 | ret <2 x i32> %3 |
| 800 | } |
| 801 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 802 | define <4 x i32> @__rsAllocationVLoadXImpl_uint4(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 803 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 804 | %2 = bitcast i8* %1 to <4 x i32>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 805 | %3 = load <4 x i32>, <4 x i32>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 806 | ret <4 x i32> %3 |
| 807 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 808 | define <3 x i32> @__rsAllocationVLoadXImpl_uint3(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 809 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 810 | %2 = bitcast i8* %1 to <3 x i32>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 811 | %3 = load <3 x i32>, <3 x i32>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 812 | ret <3 x i32> %3 |
| 813 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 814 | define <2 x i32> @__rsAllocationVLoadXImpl_uint2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 815 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 816 | %2 = bitcast i8* %1 to <2 x i32>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 817 | %3 = load <2 x i32>, <2 x i32>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 818 | ret <2 x i32> %3 |
| 819 | } |
| 820 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 821 | define <4 x i16> @__rsAllocationVLoadXImpl_short4(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 822 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 823 | %2 = bitcast i8* %1 to <4 x i16>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 824 | %3 = load <4 x i16>, <4 x i16>* %2, align 2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 825 | ret <4 x i16> %3 |
| 826 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 827 | define <3 x i16> @__rsAllocationVLoadXImpl_short3(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 828 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 829 | %2 = bitcast i8* %1 to <3 x i16>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 830 | %3 = load <3 x i16>, <3 x i16>* %2, align 2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 831 | ret <3 x i16> %3 |
| 832 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 833 | define <2 x i16> @__rsAllocationVLoadXImpl_short2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 834 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 835 | %2 = bitcast i8* %1 to <2 x i16>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 836 | %3 = load <2 x i16>, <2 x i16>* %2, align 2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 837 | ret <2 x i16> %3 |
| 838 | } |
| 839 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 840 | define <4 x i16> @__rsAllocationVLoadXImpl_ushort4(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 841 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 842 | %2 = bitcast i8* %1 to <4 x i16>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 843 | %3 = load <4 x i16>, <4 x i16>* %2, align 2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 844 | ret <4 x i16> %3 |
| 845 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 846 | define <3 x i16> @__rsAllocationVLoadXImpl_ushort3(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 847 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 848 | %2 = bitcast i8* %1 to <3 x i16>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 849 | %3 = load <3 x i16>, <3 x i16>* %2, align 2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 850 | ret <3 x i16> %3 |
| 851 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 852 | define <2 x i16> @__rsAllocationVLoadXImpl_ushort2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 853 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 854 | %2 = bitcast i8* %1 to <2 x i16>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 855 | %3 = load <2 x i16>, <2 x i16>* %2, align 2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 856 | ret <2 x i16> %3 |
| 857 | } |
| 858 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 859 | define <4 x i8> @__rsAllocationVLoadXImpl_char4(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 860 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 861 | %2 = bitcast i8* %1 to <4 x i8>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 862 | %3 = load <4 x i8>, <4 x i8>* %2, align 1 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 863 | ret <4 x i8> %3 |
| 864 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 865 | define <3 x i8> @__rsAllocationVLoadXImpl_char3(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 866 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 867 | %2 = bitcast i8* %1 to <3 x i8>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 868 | %3 = load <3 x i8>, <3 x i8>* %2, align 1 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 869 | ret <3 x i8> %3 |
| 870 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 871 | define <2 x i8> @__rsAllocationVLoadXImpl_char2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 872 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 873 | %2 = bitcast i8* %1 to <2 x i8>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 874 | %3 = load <2 x i8>, <2 x i8>* %2, align 1 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 875 | ret <2 x i8> %3 |
| 876 | } |
| 877 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 878 | define <4 x i8> @__rsAllocationVLoadXImpl_uchar4(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 879 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 880 | %2 = bitcast i8* %1 to <4 x i8>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 881 | %3 = load <4 x i8>, <4 x i8>* %2, align 1 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 882 | ret <4 x i8> %3 |
| 883 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 884 | define <3 x i8> @__rsAllocationVLoadXImpl_uchar3(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 885 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 886 | %2 = bitcast i8* %1 to <3 x i8>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 887 | %3 = load <3 x i8>, <3 x i8>* %2, align 1 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 888 | ret <3 x i8> %3 |
| 889 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 890 | define <2 x i8> @__rsAllocationVLoadXImpl_uchar2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 891 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 892 | %2 = bitcast i8* %1 to <2 x i8>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 893 | %3 = load <2 x i8>, <2 x i8>* %2, align 1 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 894 | ret <2 x i8> %3 |
| 895 | } |
| 896 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 897 | define <4 x float> @__rsAllocationVLoadXImpl_float4(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 898 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 899 | %2 = bitcast i8* %1 to <4 x float>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 900 | %3 = load <4 x float>, <4 x float>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 901 | ret <4 x float> %3 |
| 902 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 903 | define <3 x float> @__rsAllocationVLoadXImpl_float3(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 904 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 905 | %2 = bitcast i8* %1 to <3 x float>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 906 | %3 = load <3 x float>, <3 x float>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 907 | ret <3 x float> %3 |
| 908 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 909 | define <2 x float> @__rsAllocationVLoadXImpl_float2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 910 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 911 | %2 = bitcast i8* %1 to <2 x float>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 912 | %3 = load <2 x float>, <2 x float>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 913 | ret <2 x float> %3 |
| 914 | } |
| 915 | |
Tim Murray | acff9f2 | 2014-10-15 11:14:06 -0700 | [diff] [blame] | 916 | define void @__rsAllocationVLoadXImpl_double4(<4 x double>* noalias nocapture sret %agg.result, %struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #1 { |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 917 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 918 | %2 = bitcast i8* %1 to <4 x double>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 919 | %3 = load <4 x double>, <4 x double>* %2, align 8 |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 920 | store <4 x double> %3, <4 x double>* %agg.result |
| 921 | ret void |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 922 | } |
Tim Murray | acff9f2 | 2014-10-15 11:14:06 -0700 | [diff] [blame] | 923 | define void @__rsAllocationVLoadXImpl_double3(<3 x double>* noalias nocapture sret %agg.result, %struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #1 { |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 924 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 925 | %2 = bitcast i8* %1 to <3 x double>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 926 | %3 = load <3 x double>, <3 x double>* %2, align 8 |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 927 | store <3 x double> %3, <3 x double>* %agg.result |
| 928 | ret void |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 929 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 930 | define <2 x double> @__rsAllocationVLoadXImpl_double2(%struct.rs_allocation* nocapture readonly %a, i32 %x, i32 %y, i32 %z) #0 { |
| 931 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 932 | %2 = bitcast i8* %1 to <2 x double>* |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 933 | %3 = load <2 x double>, <2 x double>* %2, align 8 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 934 | ret <2 x double> %3 |
| 935 | } |
| 936 | |
| 937 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 938 | define void @__rsAllocationVStoreXImpl_long4(%struct.rs_allocation* nocapture readonly %a, <4 x i64>* %val, i32 %x, i32 %y, i32 %z) #1 { |
| 939 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 940 | %2 = load <4 x i64>, <4 x i64>* %val |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 941 | %3 = bitcast i8* %1 to <4 x i64>* |
| 942 | store <4 x i64> %2, <4 x i64>* %3, align 8 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 943 | ret void |
| 944 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 945 | define void @__rsAllocationVStoreXImpl_long3(%struct.rs_allocation* nocapture readonly %a, <3 x i64>* %val, i32 %x, i32 %y, i32 %z) #1 { |
| 946 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 947 | %2 = load <3 x i64>, <3 x i64>* %val |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 948 | %3 = bitcast i8* %1 to <3 x i64>* |
| 949 | store <3 x i64> %2, <3 x i64>* %3, align 8 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 950 | ret void |
| 951 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 952 | define void @__rsAllocationVStoreXImpl_long2(%struct.rs_allocation* nocapture readonly %a, <2 x i64> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 953 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 954 | %2 = bitcast i8* %1 to <2 x i64>* |
| 955 | store <2 x i64> %val, <2 x i64>* %2, align 8 |
| 956 | ret void |
| 957 | } |
| 958 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 959 | define void @__rsAllocationVStoreXImpl_ulong4(%struct.rs_allocation* nocapture readonly %a, <4 x i64>* %val, i32 %x, i32 %y, i32 %z) #1 { |
| 960 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 961 | %2 = load <4 x i64>, <4 x i64>* %val |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 962 | %3 = bitcast i8* %1 to <4 x i64>* |
| 963 | store <4 x i64> %2, <4 x i64>* %3, align 8 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 964 | ret void |
| 965 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 966 | define void @__rsAllocationVStoreXImpl_ulong3(%struct.rs_allocation* nocapture readonly %a, <3 x i64>* %val, i32 %x, i32 %y, i32 %z) #1 { |
| 967 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 968 | %2 = load <3 x i64>, <3 x i64>* %val |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 969 | %3 = bitcast i8* %1 to <3 x i64>* |
| 970 | store <3 x i64> %2, <3 x i64>* %3, align 8 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 971 | ret void |
| 972 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 973 | define void @__rsAllocationVStoreXImpl_ulong2(%struct.rs_allocation* nocapture readonly %a, <2 x i64> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 974 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 975 | %2 = bitcast i8* %1 to <2 x i64>* |
| 976 | store <2 x i64> %val, <2 x i64>* %2, align 8 |
| 977 | ret void |
| 978 | } |
| 979 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 980 | define void @__rsAllocationVStoreXImpl_int4(%struct.rs_allocation* nocapture readonly %a, <4 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 981 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 982 | %2 = bitcast i8* %1 to <4 x i32>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 983 | store <4 x i32> %val, <4 x i32>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 984 | ret void |
| 985 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 986 | define void @__rsAllocationVStoreXImpl_int3(%struct.rs_allocation* nocapture readonly %a, <4 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 987 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 988 | %2 = bitcast i8* %1 to <3 x i32>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 989 | %3 = shufflevector <4 x i32> %val, <4 x i32> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 990 | store <3 x i32> %3, <3 x i32>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 991 | ret void |
| 992 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 993 | define void @__rsAllocationVStoreXImpl_int2(%struct.rs_allocation* nocapture readonly %a, <2 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 994 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 995 | %2 = bitcast i8* %1 to <2 x i32>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 996 | store <2 x i32> %val, <2 x i32>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 997 | ret void |
| 998 | } |
| 999 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1000 | define void @__rsAllocationVStoreXImpl_uint4(%struct.rs_allocation* nocapture readonly %a, <4 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1001 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1002 | %2 = bitcast i8* %1 to <4 x i32>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1003 | store <4 x i32> %val, <4 x i32>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1004 | ret void |
| 1005 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1006 | define void @__rsAllocationVStoreXImpl_uint3(%struct.rs_allocation* nocapture readonly %a, <4 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1007 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1008 | %2 = bitcast i8* %1 to <3 x i32>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1009 | %3 = shufflevector <4 x i32> %val, <4 x i32> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 1010 | store <3 x i32> %3, <3 x i32>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1011 | ret void |
| 1012 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1013 | define void @__rsAllocationVStoreXImpl_uint2(%struct.rs_allocation* nocapture readonly %a, <2 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1014 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1015 | %2 = bitcast i8* %1 to <2 x i32>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1016 | store <2 x i32> %val, <2 x i32>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1017 | ret void |
| 1018 | } |
| 1019 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1020 | define void @__rsAllocationVStoreXImpl_short4(%struct.rs_allocation* nocapture readonly %a, <4 x i16> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1021 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1022 | %2 = bitcast i8* %1 to <4 x i16>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1023 | store <4 x i16> %val, <4 x i16>* %2, align 2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1024 | ret void |
| 1025 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1026 | define void @__rsAllocationVStoreXImpl_short3(%struct.rs_allocation* nocapture readonly %a, <2 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1027 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1028 | %2 = bitcast i8* %1 to <3 x i16>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1029 | %3 = bitcast <2 x i32> %val to <4 x i16> |
| 1030 | %4 = shufflevector <4 x i16> %3, <4 x i16> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 1031 | store <3 x i16> %4, <3 x i16>* %2, align 2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1032 | ret void |
| 1033 | } |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 1034 | define void @__rsAllocationVStoreXImpl_short2(%struct.rs_allocation* nocapture readonly %a, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1035 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1036 | %2 = bitcast i8* %1 to <2 x i16>* |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 1037 | %3 = bitcast i32 %val to <2 x i16> |
| 1038 | store <2 x i16> %3, <2 x i16>* %2, align 2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1039 | ret void |
| 1040 | } |
| 1041 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1042 | define void @__rsAllocationVStoreXImpl_ushort4(%struct.rs_allocation* nocapture readonly %a, <4 x i16> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1043 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1044 | %2 = bitcast i8* %1 to <4 x i16>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1045 | store <4 x i16> %val, <4 x i16>* %2, align 2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1046 | ret void |
| 1047 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1048 | define void @__rsAllocationVStoreXImpl_ushort3(%struct.rs_allocation* nocapture readonly %a, <2 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1049 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1050 | %2 = bitcast i8* %1 to <3 x i16>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1051 | %3 = bitcast <2 x i32> %val to <4 x i16> |
| 1052 | %4 = shufflevector <4 x i16> %3, <4 x i16> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 1053 | store <3 x i16> %4, <3 x i16>* %2, align 2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1054 | ret void |
| 1055 | } |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 1056 | define void @__rsAllocationVStoreXImpl_ushort2(%struct.rs_allocation* nocapture readonly %a, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1057 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1058 | %2 = bitcast i8* %1 to <2 x i16>* |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 1059 | %3 = bitcast i32 %val to <2 x i16> |
| 1060 | store <2 x i16> %3, <2 x i16>* %2, align 2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1061 | ret void |
| 1062 | } |
| 1063 | |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 1064 | define void @__rsAllocationVStoreXImpl_char4(%struct.rs_allocation* nocapture readonly %a, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1065 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1066 | %2 = bitcast i8* %1 to <4 x i8>* |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 1067 | %3 = bitcast i32 %val to <4 x i8> |
| 1068 | store <4 x i8> %3, <4 x i8>* %2, align 1 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1069 | ret void |
| 1070 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1071 | define void @__rsAllocationVStoreXImpl_char3(%struct.rs_allocation* nocapture readonly %a, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1072 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1073 | %2 = bitcast i8* %1 to <3 x i8>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1074 | %3 = bitcast i32 %val to <4 x i8> |
| 1075 | %4 = shufflevector <4 x i8> %3, <4 x i8> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 1076 | store <3 x i8> %4, <3 x i8>* %2, align 1 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1077 | ret void |
| 1078 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1079 | define void @__rsAllocationVStoreXImpl_char2(%struct.rs_allocation* nocapture readonly %a, i16 %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1080 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1081 | %2 = bitcast i8* %1 to <2 x i8>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1082 | %3 = bitcast i16 %val to <2 x i8> |
| 1083 | store <2 x i8> %3, <2 x i8>* %2, align 8 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1084 | ret void |
| 1085 | } |
| 1086 | |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 1087 | define void @__rsAllocationVStoreXImpl_uchar4(%struct.rs_allocation* nocapture readonly %a, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1088 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1089 | %2 = bitcast i8* %1 to <4 x i8>* |
Yang Ni | fb70934 | 2017-05-16 14:20:03 -0700 | [diff] [blame] | 1090 | %3 = bitcast i32 %val to <4 x i8> |
| 1091 | store <4 x i8> %3, <4 x i8>* %2, align 1 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1092 | ret void |
| 1093 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1094 | define void @__rsAllocationVStoreXImpl_uchar3(%struct.rs_allocation* nocapture readonly %a, i32 %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1095 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1096 | %2 = bitcast i8* %1 to <3 x i8>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1097 | %3 = bitcast i32 %val to <4 x i8> |
| 1098 | %4 = shufflevector <4 x i8> %3, <4 x i8> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 1099 | store <3 x i8> %4, <3 x i8>* %2, align 1 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1100 | ret void |
| 1101 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1102 | define void @__rsAllocationVStoreXImpl_uchar2(%struct.rs_allocation* nocapture readonly %a, i16 %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1103 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1104 | %2 = bitcast i8* %1 to <2 x i8>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1105 | %3 = bitcast i16 %val to <2 x i8> |
| 1106 | store <2 x i8> %3, <2 x i8>* %2, align 8 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1107 | ret void |
| 1108 | } |
| 1109 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1110 | define void @__rsAllocationVStoreXImpl_float4(%struct.rs_allocation* nocapture readonly %a, <4 x float> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1111 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1112 | %2 = bitcast i8* %1 to <4 x float>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1113 | store <4 x float> %val, <4 x float>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1114 | ret void |
| 1115 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1116 | define void @__rsAllocationVStoreXImpl_float3(%struct.rs_allocation* nocapture readonly %a, <4 x i32> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1117 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1118 | %2 = bitcast i8* %1 to <3 x float>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1119 | %3 = bitcast <4 x i32> %val to <4 x float> |
| 1120 | %4 = shufflevector <4 x float> %3, <4 x float> undef, <3 x i32> <i32 0, i32 1, i32 2> |
| 1121 | store <3 x float> %4, <3 x float>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1122 | ret void |
| 1123 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1124 | define void @__rsAllocationVStoreXImpl_float2(%struct.rs_allocation* nocapture readonly %a, <2 x float> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1125 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1126 | %2 = bitcast i8* %1 to <2 x float>* |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1127 | store <2 x float> %val, <2 x float>* %2, align 4 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1128 | ret void |
| 1129 | } |
| 1130 | |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1131 | define void @__rsAllocationVStoreXImpl_double4(%struct.rs_allocation* nocapture readonly %a, <4 x double>* %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1132 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 1133 | %2 = load <4 x double>, <4 x double>* %val |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1134 | %3 = bitcast i8* %1 to <4 x double>* |
| 1135 | store <4 x double> %2, <4 x double>* %3, align 8 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1136 | ret void |
| 1137 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1138 | define void @__rsAllocationVStoreXImpl_double3(%struct.rs_allocation* nocapture readonly %a, <3 x double>* %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1139 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Pirama Arumuga Nainar | 8184739 | 2015-04-08 12:35:45 -0700 | [diff] [blame] | 1140 | %2 = load <3 x double>, <3 x double>* %val |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1141 | %3 = bitcast i8* %1 to <3 x double>* |
| 1142 | store <3 x double> %2, <3 x double>* %3, align 8 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1143 | ret void |
| 1144 | } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1145 | define void @__rsAllocationVStoreXImpl_double2(%struct.rs_allocation* nocapture readonly %a, <2 x double> %val, i32 %x, i32 %y, i32 %z) #1 { |
| 1146 | %1 = tail call i8* @rsOffsetNs(%struct.rs_allocation* %a, i32 %x, i32 %y, i32 %z) #2 |
Jason Sams | 41660c4 | 2014-02-28 16:08:18 -0800 | [diff] [blame] | 1147 | %2 = bitcast i8* %1 to <2 x double>* |
| 1148 | store <2 x double> %val, <2 x double>* %2, align 8 |
| 1149 | ret void |
| 1150 | } |
| 1151 | |
| 1152 | |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 1153 | attributes #0 = { nounwind readonly "less-precise-fpmad"="false" "no-frame-pointer-elim"="true" "no-frame-pointer-elim-non-leaf"="true" "no-infs-fp-math"="false" "no-nans-fp-math"="false" "unsafe-fp-math"="false" "use-soft-float"="false" } |
| 1154 | attributes #1 = { nounwind "less-precise-fpmad"="false" "no-frame-pointer-elim"="true" "no-frame-pointer-elim-non-leaf"="true" "no-infs-fp-math"="false" "no-nans-fp-math"="false" "unsafe-fp-math"="false" "use-soft-float"="false" } |
Tim Murray | 01ca8a4 | 2014-09-24 10:08:25 -0700 | [diff] [blame] | 1155 | attributes #2 = { nobuiltin } |
Tobias Grosser | 1ed5ef9 | 2013-07-29 11:39:38 -0700 | [diff] [blame] | 1156 | |