+static void* fast_memcpy_unaligned_impl(void* dest, const void* source, size_t count)\r
+{\r
+ CASPAR_ASSERT(dest != nullptr);\r
+ CASPAR_ASSERT(source != nullptr);\r
+\r
+ if(count == 0)\r
+ return dest;\r
+\r
+ __asm \r
+ { \r
+ mov esi, source; \r
+ mov edi, dest; \r
+ mov ebx, count; \r
+ shr ebx, 7;\r
+\r
+ cpy: \r
+ movdqu xmm0, [esi+00h]; \r
+ movdqu xmm1, [esi+10h]; \r
+ movdqu xmm2, [esi+20h]; \r
+ movdqu xmm3, [esi+30h]; \r
+\r
+ movdqu [edi+00h], xmm0;\r
+ movdqu [edi+10h], xmm1;\r
+ movdqu [edi+20h], xmm2; \r
+ movdqu [edi+30h], xmm3;\r
+\r
+ movdqu xmm4, [esi+40h];\r
+ movdqu xmm5, [esi+50h];\r
+ movdqu xmm6, [esi+60h];\r
+ movdqu xmm7, [esi+70h]; \r
+\r
+ movdqu [edi+40h], xmm4; \r
+ movdqu [edi+50h], xmm5; \r
+ movdqu [edi+60h], xmm6; \r
+ movdqu [edi+70h], xmm7; \r
+\r
+ lea edi, [edi+80h]; \r
+ lea esi, [esi+80h]; \r
+\r
+ dec ebx; \r
+ jnz cpy; \r
+ } \r
+ return dest;\r