22// The .NET Foundation licenses this file to you under the MIT license.
33// See the LICENSE file in the project root for more information.
44
5- #if AMD64 || ARM64 || ( BIT32 && ! ARM )
6- #define HAS_CUSTOM_BLOCKS
7- #endif
85
96using System . Diagnostics ;
107using System . Runtime ;
118using System . Runtime . CompilerServices ;
12- using System . Runtime . InteropServices ;
139
1410using Internal . Runtime . CompilerServices ;
1511
2016using nint = System . Int32 ;
2117using nuint = System . UInt32 ;
2218#endif
19+ using Block16 = System . Runtime . Intrinsics . Vector128 < byte > ;
20+ using Block32 = System . Runtime . Intrinsics . Vector256 < byte > ;
2321
2422namespace System
2523{
@@ -87,7 +85,7 @@ internal static unsafe void ZeroMemory(byte* dest, nuint len)
8785
8886 // The attributes on this method are chosen for best JIT performance.
8987 // Please do not edit unless intentional.
90- [ MethodImplAttribute ( MethodImplOptions . AggressiveInlining ) ]
88+ [ MethodImpl ( MethodImplOptions . AggressiveInlining ) ]
9189 [ CLSCompliant ( false ) ]
9290 public static unsafe void MemoryCopy ( void * source , void * destination , long destinationSizeInBytes , long sourceBytesToCopy )
9391 {
@@ -100,7 +98,7 @@ public static unsafe void MemoryCopy(void* source, void* destination, long desti
10098
10199 // The attributes on this method are chosen for best JIT performance.
102100 // Please do not edit unless intentional.
103- [ MethodImplAttribute ( MethodImplOptions . AggressiveInlining ) ]
101+ [ MethodImpl ( MethodImplOptions . AggressiveInlining ) ]
104102 [ CLSCompliant ( false ) ]
105103 public static unsafe void MemoryCopy ( void * source , void * destination , ulong destinationSizeInBytes , ulong sourceBytesToCopy )
106104 {
@@ -141,171 +139,7 @@ internal static unsafe void Memcpy(byte* pDest, int destIndex, byte[] src, int s
141139
142140 // This method has different signature for x64 and other platforms and is done for performance reasons.
143141 internal static unsafe void Memmove ( byte * dest , byte * src , nuint len )
144- {
145- // P/Invoke into the native version when the buffers are overlapping.
146- if ( ( ( nuint ) dest - ( nuint ) src < len ) || ( ( nuint ) src - ( nuint ) dest < len ) )
147- {
148- goto PInvoke ;
149- }
150-
151- byte * srcEnd = src + len ;
152- byte * destEnd = dest + len ;
153-
154- if ( len <= 16 ) goto MCPY02 ;
155- if ( len > 64 ) goto MCPY05 ;
156-
157- MCPY00 :
158- // Copy bytes which are multiples of 16 and leave the remainder for MCPY01 to handle.
159- Debug . Assert ( len > 16 && len <= 64 ) ;
160- #if HAS_CUSTOM_BLOCKS
161- * ( Block16 * ) dest = * ( Block16 * ) src ; // [0,16]
162- #elif BIT64
163- * ( long * ) dest = * ( long * ) src ;
164- * ( long * ) ( dest + 8 ) = * ( long * ) ( src + 8 ) ; // [0,16]
165- #else
166- * ( int * ) dest = * ( int * ) src ;
167- * ( int * ) ( dest + 4 ) = * ( int * ) ( src + 4 ) ;
168- * ( int * ) ( dest + 8 ) = * ( int * ) ( src + 8 ) ;
169- * ( int * ) ( dest + 12 ) = * ( int * ) ( src + 12 ) ; // [0,16]
170- #endif
171- if ( len <= 32 ) goto MCPY01;
172- #if HAS_CUSTOM_BLOCKS
173- * ( Block16 * ) ( dest + 16 ) = * ( Block16 * ) ( src + 16 ) ; // [0,32]
174- #elif BIT64
175- * ( long * ) ( dest + 16 ) = * ( long * ) ( src + 16 ) ;
176- * ( long * ) ( dest + 24 ) = * ( long * ) ( src + 24 ) ; // [0,32]
177- #else
178- * ( int * ) ( dest + 16 ) = * ( int * ) ( src + 16 ) ;
179- * ( int * ) ( dest + 20 ) = * ( int * ) ( src + 20 ) ;
180- * ( int * ) ( dest + 24 ) = * ( int * ) ( src + 24 ) ;
181- * ( int * ) ( dest + 28 ) = * ( int * ) ( src + 28 ) ; // [0,32]
182- #endif
183- if ( len <= 48 ) goto MCPY01;
184- #if HAS_CUSTOM_BLOCKS
185- * ( Block16 * ) ( dest + 32 ) = * ( Block16 * ) ( src + 32 ) ; // [0,48]
186- #elif BIT64
187- * ( long * ) ( dest + 32 ) = * ( long * ) ( src + 32 ) ;
188- * ( long * ) ( dest + 40 ) = * ( long * ) ( src + 40 ) ; // [0,48]
189- #else
190- * ( int * ) ( dest + 32 ) = * ( int * ) ( src + 32 ) ;
191- * ( int * ) ( dest + 36 ) = * ( int * ) ( src + 36 ) ;
192- * ( int * ) ( dest + 40 ) = * ( int * ) ( src + 40 ) ;
193- * ( int * ) ( dest + 44 ) = * ( int * ) ( src + 44 ) ; // [0,48]
194- #endif
195-
196- MCPY01:
197- // Unconditionally copy the last 16 bytes using destEnd and srcEnd and return.
198- Debug . Assert ( len > 16 && len <= 64 ) ;
199- #if HAS_CUSTOM_BLOCKS
200- * ( Block16 * ) ( destEnd - 16 ) = * ( Block16 * ) ( srcEnd - 16 ) ;
201- #elif BIT64
202- * ( long * ) ( destEnd - 16 ) = * ( long * ) ( srcEnd - 16 ) ;
203- * ( long * ) ( destEnd - 8 ) = * ( long * ) ( srcEnd - 8 ) ;
204- #else
205- * ( int * ) ( destEnd - 16 ) = * ( int * ) ( srcEnd - 16 ) ;
206- * ( int * ) ( destEnd - 12 ) = * ( int * ) ( srcEnd - 12 ) ;
207- * ( int * ) ( destEnd - 8 ) = * ( int * ) ( srcEnd - 8 ) ;
208- * ( int * ) ( destEnd - 4 ) = * ( int * ) ( srcEnd - 4 ) ;
209- #endif
210- return ;
211-
212- MCPY02:
213- // Copy the first 8 bytes and then unconditionally copy the last 8 bytes and return.
214- if ( ( len & 24 ) == 0 ) goto MCPY03;
215- Debug. Assert ( len >= 8 && len <= 16 ) ;
216- #if BIT64
217- * ( long * ) dest = * ( long * ) src ;
218- * ( long * ) ( destEnd - 8 ) = * ( long * ) ( srcEnd - 8 ) ;
219- #else
220- * ( int * ) dest = * ( int * ) src ;
221- * ( int * ) ( dest + 4 ) = * ( int * ) ( src + 4 ) ;
222- * ( int * ) ( destEnd - 8 ) = * ( int * ) ( srcEnd - 8 ) ;
223- * ( int * ) ( destEnd - 4 ) = * ( int * ) ( srcEnd - 4 ) ;
224- #endif
225- return ;
226-
227- MCPY03:
228- // Copy the first 4 bytes and then unconditionally copy the last 4 bytes and return.
229- if ( ( len & 4 ) == 0 ) goto MCPY04;
230- Debug. Assert ( len >= 4 && len < 8 ) ;
231- * ( int * ) dest = * ( int * ) src ;
232- * ( int * ) ( destEnd - 4 ) = * ( int * ) ( srcEnd - 4 ) ;
233- return ;
234-
235- MCPY04:
236- // Copy the first byte. For pending bytes, do an unconditionally copy of the last 2 bytes and return.
237- Debug . Assert ( len < 4 ) ;
238- if ( len == 0 ) return;
239- * dest = * src ;
240- if ( ( len & 2 ) == 0 ) return;
241- * ( short * ) ( destEnd - 2 ) = * ( short * ) ( srcEnd - 2 ) ;
242- return ;
243-
244- MCPY05:
245- // PInvoke to the native version when the copy length exceeds the threshold.
246- if ( len > MemmoveNativeThreshold )
247- {
248- goto PInvoke;
249- }
250-
251- // Copy 64-bytes at a time until the remainder is less than 64.
252- // If remainder is greater than 16 bytes, then jump to MCPY00. Otherwise, unconditionally copy the last 16 bytes and return.
253- Debug . Assert ( len > 64 && len <= MemmoveNativeThreshold ) ;
254- nuint n = len >> 6 ;
255-
256- MCPY06:
257- #if HAS_CUSTOM_BLOCKS
258- * ( Block64 * ) dest = * ( Block64 * ) src ;
259- #elif BIT64
260- * ( long * ) dest = * ( long * ) src ;
261- * ( long * ) ( dest + 8 ) = * ( long * ) ( src + 8 ) ;
262- * ( long * ) ( dest + 16 ) = * ( long * ) ( src + 16 ) ;
263- * ( long * ) ( dest + 24 ) = * ( long * ) ( src + 24 ) ;
264- * ( long * ) ( dest + 32 ) = * ( long * ) ( src + 32 ) ;
265- * ( long * ) ( dest + 40 ) = * ( long * ) ( src + 40 ) ;
266- * ( long * ) ( dest + 48 ) = * ( long * ) ( src + 48 ) ;
267- * ( long * ) ( dest + 56 ) = * ( long * ) ( src + 56 ) ;
268- #else
269- * ( int * ) dest = * ( int * ) src ;
270- * ( int * ) ( dest + 4 ) = * ( int * ) ( src + 4 ) ;
271- * ( int * ) ( dest + 8 ) = * ( int * ) ( src + 8 ) ;
272- * ( int * ) ( dest + 12 ) = * ( int * ) ( src + 12 ) ;
273- * ( int * ) ( dest + 16 ) = * ( int * ) ( src + 16 ) ;
274- * ( int * ) ( dest + 20 ) = * ( int * ) ( src + 20 ) ;
275- * ( int * ) ( dest + 24 ) = * ( int * ) ( src + 24 ) ;
276- * ( int * ) ( dest + 28 ) = * ( int * ) ( src + 28 ) ;
277- * ( int * ) ( dest + 32 ) = * ( int * ) ( src + 32 ) ;
278- * ( int * ) ( dest + 36 ) = * ( int * ) ( src + 36 ) ;
279- * ( int * ) ( dest + 40 ) = * ( int * ) ( src + 40 ) ;
280- * ( int * ) ( dest + 44 ) = * ( int * ) ( src + 44 ) ;
281- * ( int * ) ( dest + 48 ) = * ( int * ) ( src + 48 ) ;
282- * ( int * ) ( dest + 52 ) = * ( int * ) ( src + 52 ) ;
283- * ( int * ) ( dest + 56 ) = * ( int * ) ( src + 56 ) ;
284- * ( int * ) ( dest + 60 ) = * ( int * ) ( src + 60 ) ;
285- #endif
286- dest += 64 ;
287- src += 64 ;
288- n -- ;
289- if ( n != 0 ) goto MCPY06;
290-
291- len %= 64 ;
292- if ( len > 16 ) goto MCPY00;
293- #if HAS_CUSTOM_BLOCKS
294- * ( Block16 * ) ( destEnd - 16 ) = * ( Block16 * ) ( srcEnd - 16 ) ;
295- #elif BIT64
296- * ( long * ) ( destEnd - 16 ) = * ( long * ) ( srcEnd - 16 ) ;
297- * ( long * ) ( destEnd - 8 ) = * ( long * ) ( srcEnd - 8 ) ;
298- #else
299- * ( int * ) ( destEnd - 16 ) = * ( int * ) ( srcEnd - 16 ) ;
300- * ( int * ) ( destEnd - 12 ) = * ( int * ) ( srcEnd - 12 ) ;
301- * ( int * ) ( destEnd - 8 ) = * ( int * ) ( srcEnd - 8 ) ;
302- * ( int * ) ( destEnd - 4 ) = * ( int * ) ( srcEnd - 4 ) ;
303- #endif
304- return ;
305-
306- PInvoke:
307- _Memmove ( dest , src , len ) ;
308- }
142+ => Memmove ( ref Unsafe . AsRef < byte > ( dest ) , ref Unsafe . AsRef < byte > ( src ) , len ) ;
309143
310144 // This method has different signature for x64 and other platforms and is done for performance reasons.
311145 [ MethodImpl ( MethodImplOptions . AggressiveInlining ) ]
@@ -358,58 +192,25 @@ private static void Memmove(ref byte dest, ref byte src, nuint len)
358192 MCPY00 :
359193 // Copy bytes which are multiples of 16 and leave the remainder for MCPY01 to handle.
360194 Debug . Assert ( len > 16 && len <= 64 ) ;
361- #if HAS_CUSTOM_BLOCKS
195+
362196 Unsafe . As < byte , Block16 > ( ref dest ) = Unsafe. As < byte , Block16 > ( ref src ) ; // [0,16]
363- #elif BIT64
364- Unsafe. As < byte , long > ( ref dest ) = Unsafe. As < byte , long > ( ref src ) ;
365- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref dest , 8 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref src , 8 ) ) ; // [0,16]
366- #else
367- Unsafe. As < byte , int > ( ref dest ) = Unsafe. As < byte , int > ( ref src ) ;
368- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 4 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 4 ) ) ;
369- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 8 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 8 ) ) ;
370- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 12 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 12 ) ) ; // [0,16]
371- #endif
197+
372198 if ( len <= 32 )
373199 goto MCPY01 ;
374- #if HAS_CUSTOM_BLOCKS
200+
375201 Unsafe . As < byte , Block16 > ( ref Unsafe . Add ( ref dest , 16 ) ) = Unsafe. As < byte , Block16 > ( ref Unsafe . Add ( ref src , 16 ) ) ; // [0,32]
376- #elif BIT64
377- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref dest , 16 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref src , 16 ) ) ;
378- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref dest , 24 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref src , 24 ) ) ; // [0,32]
379- #else
380- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 16 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 16 ) ) ;
381- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 20 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 20 ) ) ;
382- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 24 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 24 ) ) ;
383- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 28 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 28 ) ) ; // [0,32]
384- #endif
202+
385203 if ( len <= 48 )
386204 goto MCPY01 ;
387- #if HAS_CUSTOM_BLOCKS
205+
388206 Unsafe . As < byte , Block16 > ( ref Unsafe . Add ( ref dest , 32 ) ) = Unsafe. As < byte , Block16 > ( ref Unsafe . Add ( ref src , 32 ) ) ; // [0,48]
389- #elif BIT64
390- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref dest , 32 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref src , 32 ) ) ;
391- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref dest , 40 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref src , 40 ) ) ; // [0,48]
392- #else
393- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 32 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 32 ) ) ;
394- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 36 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 36 ) ) ;
395- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 40 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 40 ) ) ;
396- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 44 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 44 ) ) ; // [0,48]
397- #endif
398207
399208 MCPY01 :
400209 // Unconditionally copy the last 16 bytes using destEnd and srcEnd and return.
401210 Debug . Assert ( len > 16 && len <= 64 ) ;
402- #if HAS_CUSTOM_BLOCKS
211+
403212 Unsafe . As < byte , Block16 > ( ref Unsafe . Add ( ref destEnd , - 16 ) ) = Unsafe. As < byte , Block16 > ( ref Unsafe . Add ( ref srcEnd , - 16 ) ) ;
404- #elif BIT64
405- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref destEnd , - 16 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref srcEnd , - 16 ) ) ;
406- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref destEnd , - 8 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref srcEnd , - 8 ) ) ;
407- #else
408- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref destEnd , - 16 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref srcEnd , - 16 ) ) ;
409- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref destEnd , - 12 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref srcEnd , - 12 ) ) ;
410- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref destEnd , - 8 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref srcEnd , - 8 ) ) ;
411- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref destEnd , - 4 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref srcEnd , - 4 ) ) ;
412- #endif
213+
413214 return ;
414215
415216 MCPY02 :
@@ -461,35 +262,9 @@ private static void Memmove(ref byte dest, ref byte src, nuint len)
461262 nuint n = len >> 6 ;
462263
463264 MCPY06 :
464- #if HAS_CUSTOM_BLOCKS
465- Unsafe . As < byte , Block64 > ( ref dest ) = Unsafe. As < byte , Block64 > ( ref src ) ;
466- #elif BIT64
467- Unsafe . As < byte , long > ( ref dest ) = Unsafe. As < byte , long > ( ref src ) ;
468- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref dest , 8 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref src , 8 ) ) ;
469- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref dest , 16 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref src , 16 ) ) ;
470- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref dest , 24 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref src , 24 ) ) ;
471- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref dest , 32 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref src , 32 ) ) ;
472- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref dest , 40 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref src , 40 ) ) ;
473- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref dest , 48 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref src , 48 ) ) ;
474- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref dest , 56 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref src , 56 ) ) ;
475- #else
476- Unsafe . As < byte , int > ( ref dest ) = Unsafe. As < byte , int > ( ref src ) ;
477- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 4 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 4 ) ) ;
478- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 8 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 8 ) ) ;
479- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 12 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 12 ) ) ;
480- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 16 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 16 ) ) ;
481- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 20 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 20 ) ) ;
482- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 24 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 24 ) ) ;
483- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 28 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 28 ) ) ;
484- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 32 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 32 ) ) ;
485- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 36 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 36 ) ) ;
486- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 40 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 40 ) ) ;
487- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 44 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 44 ) ) ;
488- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 48 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 48 ) ) ;
489- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 52 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 52 ) ) ;
490- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 56 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 56 ) ) ;
491- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref dest , 60 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref src , 60 ) ) ;
492- #endif
265+ Unsafe . As < byte , Block32 > ( ref dest ) = Unsafe. As < byte , Block32 > ( ref src ) ;
266+ Unsafe . As < byte , Block32 > ( ref Unsafe . Add ( ref dest , 32 ) ) = Unsafe. As < byte , Block32 > ( ref Unsafe . Add ( ref src , 32 ) ) ;
267+
493268 dest = ref Unsafe . Add ( ref dest , 64 ) ;
494269 src = ref Unsafe . Add ( ref src , 64 ) ;
495270 n -- ;
@@ -499,17 +274,9 @@ private static void Memmove(ref byte dest, ref byte src, nuint len)
499274 len %= 64 ;
500275 if ( len > 16 )
501276 goto MCPY00 ;
502- #if HAS_CUSTOM_BLOCKS
277+
503278 Unsafe . As < byte , Block16 > ( ref Unsafe . Add ( ref destEnd , - 16 ) ) = Unsafe. As < byte , Block16 > ( ref Unsafe . Add ( ref srcEnd , - 16 ) ) ;
504- #elif BIT64
505- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref destEnd , - 16 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref srcEnd , - 16 ) ) ;
506- Unsafe. As < byte , long > ( ref Unsafe . Add ( ref destEnd , - 8 ) ) = Unsafe. As < byte , long > ( ref Unsafe . Add ( ref srcEnd , - 8 ) ) ;
507- #else
508- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref destEnd , - 16 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref srcEnd , - 16 ) ) ;
509- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref destEnd , - 12 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref srcEnd , - 12 ) ) ;
510- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref destEnd , - 8 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref srcEnd , - 8 ) ) ;
511- Unsafe. As < byte , int > ( ref Unsafe . Add ( ref destEnd , - 4 ) ) = Unsafe. As < byte , int > ( ref Unsafe . Add ( ref srcEnd , - 4 ) ) ;
512- #endif
279+
513280 return ;
514281
515282 BuffersOverlap :
@@ -525,28 +292,20 @@ private static void Memmove(ref byte dest, ref byte src, nuint len)
525292
526293 // Non-inlinable wrapper around the QCall that avoids polluting the fast path
527294 // with P/Invoke prolog/epilog.
528- [ MethodImplAttribute ( MethodImplOptions . NoInlining ) ]
295+ [ MethodImpl ( MethodImplOptions . NoInlining ) ]
529296 private static unsafe void _Memmove ( byte * dest , byte * src , nuint len )
530297 {
531298 __Memmove ( dest , src , len ) ;
532299 }
533300
534301 // Non-inlinable wrapper around the QCall that avoids polluting the fast path
535302 // with P/Invoke prolog/epilog.
536- [ MethodImplAttribute ( MethodImplOptions . NoInlining ) ]
303+ [ MethodImpl ( MethodImplOptions . NoInlining ) ]
537304 private static unsafe void _Memmove ( ref byte dest , ref byte src , nuint len )
538305 {
539306 fixed ( byte * pDest = & dest )
540307 fixed ( byte * pSrc = & src )
541308 __Memmove ( pDest , pSrc , len ) ;
542309 }
543-
544- #if HAS_CUSTOM_BLOCKS
545- [ StructLayout ( LayoutKind . Sequential , Size = 16 ) ]
546- private struct Block16 { }
547-
548- [ StructLayout ( LayoutKind . Sequential , Size = 64 ) ]
549- private struct Block64 { }
550- #endif // HAS_CUSTOM_BLOCKS
551310 }
552311}
0 commit comments