From e512af2e4fd6e45c162b5f90e26a41988f065b79 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=88=B1=E7=BC=96=E7=A8=8B=E7=9A=84=E5=8F=B6=E4=B8=80?= =?UTF-8?q?=E7=AC=91?= <92030377+love-code-yeyixiao@users.noreply.github.com> Date: Fri, 20 Jun 2025 18:44:31 +0800 Subject: [PATCH 1/4] Enclave Support Structure --- ImBox/framework.h | 1 + ImBoxEnclave/EnclaveEntry.cpp | 36 +++ ImBoxEnclave/ImBoxEnclave.vcxproj | 262 ++++++++++++++++++++++ ImBoxEnclave/ImBoxEnclave.vcxproj.filters | 137 +++++++++++ ImBoxEnclave/framework.h | 5 + ImBoxEnclave/pch.cpp | 5 + ImBoxEnclave/pch.h | 13 ++ 7 files changed, 459 insertions(+) create mode 100644 ImBoxEnclave/EnclaveEntry.cpp create mode 100644 ImBoxEnclave/ImBoxEnclave.vcxproj create mode 100644 ImBoxEnclave/ImBoxEnclave.vcxproj.filters create mode 100644 ImBoxEnclave/framework.h create mode 100644 ImBoxEnclave/pch.cpp create mode 100644 ImBoxEnclave/pch.h diff --git a/ImBox/framework.h b/ImBox/framework.h index ff67fa1..995103f 100644 --- a/ImBox/framework.h +++ b/ImBox/framework.h @@ -9,6 +9,7 @@ #include #define WIN32_NO_STATUS +#define ENCLAVE_ENABLED typedef long NTSTATUS; #define WIN32_LEAN_AND_MEAN // Exclude rarely-used stuff from Windows headers diff --git a/ImBoxEnclave/EnclaveEntry.cpp b/ImBoxEnclave/EnclaveEntry.cpp new file mode 100644 index 0000000..7e5b76b --- /dev/null +++ b/ImBoxEnclave/EnclaveEntry.cpp @@ -0,0 +1,36 @@ +// dllmain.cpp : 定义 DLL 应用程序的入口点。 +#include "pch.h" + +const IMAGE_ENCLAVE_CONFIG __enclave_config = { + sizeof(IMAGE_ENCLAVE_CONFIG), + IMAGE_ENCLAVE_MINIMUM_CONFIG_SIZE, + //IMAGE_ENCLAVE_POLICY_DEBUGGABLE, // DO NOT SHIP DEBUGGABLE ENCLAVES TO PRODUCTION + 0, + 0, + 0, + 0, + { 0xFE, 0xFE }, // family id + { 0x01, 0x01 }, // image id + 0, // version + 0, // SVN + 0x10000000, // size + 16, // number of threads + IMAGE_ENCLAVE_FLAG_PRIMARY_IMAGE +}; + +BOOL APIENTRY DllMain( HMODULE hModule, + DWORD ul_reason_for_call, + LPVOID lpReserved + ) +{ + switch (ul_reason_for_call) + { + case DLL_PROCESS_ATTACH: + case DLL_THREAD_ATTACH: + case DLL_THREAD_DETACH: + case DLL_PROCESS_DETACH: + break; + } + return TRUE; +} + diff --git a/ImBoxEnclave/ImBoxEnclave.vcxproj b/ImBoxEnclave/ImBoxEnclave.vcxproj new file mode 100644 index 0000000..22ec8fc --- /dev/null +++ b/ImBoxEnclave/ImBoxEnclave.vcxproj @@ -0,0 +1,262 @@ + + + + + Debug + Win32 + + + Release + Win32 + + + Debug + x64 + + + Release + x64 + + + + 17.0 + Win32Proj + {1b958da3-e81a-4cfa-9951-6ff4e23b4ad9} + ImBoxEnclave + 10.0 + + + + DynamicLibrary + true + v143 + Unicode + + + DynamicLibrary + false + v143 + true + Unicode + + + DynamicLibrary + true + v143 + Unicode + + + DynamicLibrary + false + v143 + true + Unicode + + + + + + + + + + + + + + + + + + + + + + Level3 + true + WIN32;_DEBUG;IMBOXENCLAVE_EXPORTS;_WINDOWS;_USRDLL;%(PreprocessorDefinitions) + true + Use + pch.h + + + Windows + true + false + + + + + Level3 + true + true + true + WIN32;NDEBUG;IMBOXENCLAVE_EXPORTS;_WINDOWS;_USRDLL;%(PreprocessorDefinitions) + true + Use + pch.h + + + Windows + true + true + true + false + + + + + Level3 + true + _DEBUG;IMBOXENCLAVE_EXPORTS;_WINDOWS;_USRDLL;%(PreprocessorDefinitions) + true + Use + pch.h + + + Windows + true + false + + + + + Level3 + true + true + true + NDEBUG;IMBOXENCLAVE_EXPORTS;_WINDOWS;_USRDLL;%(PreprocessorDefinitions) + true + Use + pch.h + + + Windows + true + true + true + false + + + + + + + + + + + + + + + + + + true + true + true + true + + + + + + + + + + + + + + + + true + true + true + true + + + + Create + Create + Create + Create + + + + + true + true + true + true + Document + + + + + true + true + true + true + Document + + + + + true + true + true + true + Document + + + + + true + true + true + true + Document + + + + + true + true + true + true + Document + + + + + true + true + true + true + Document + + + + + false + false + false + false + Document + "$(ProjectDir)dc\tools\yasm\yasm.exe" -Xvc -f win32 -o "$(OutDir)obj\$(ProjectName)\%(Filename).obj" "%(FullPath)" + "$(ProjectDir)dc\tools\yasm\yasm.exe" -Xvc -f win32 -o "$(OutDir)obj\$(ProjectName)\%(Filename).obj" "%(FullPath)" + "$(ProjectDir)dc\tools\yasm\yasm.exe" -Xvc -f win32 -o "$(OutDir)obj\$(ProjectName)\%(Filename).obj" "%(FullPath)" + "$(ProjectDir)dc\tools\yasm\yasm.exe" -Xvc -f win32 -o "$(OutDir)obj\$(ProjectName)\%(Filename).obj" "%(FullPath)" + $(OutDir)obj\$(ProjectName)\%(Filename).obj + $(OutDir)obj\$(ProjectName)\%(Filename).obj + $(OutDir)obj\$(ProjectName)\%(Filename).obj + $(OutDir)obj\$(ProjectName)\%(Filename).obj + + + + + + \ No newline at end of file diff --git a/ImBoxEnclave/ImBoxEnclave.vcxproj.filters b/ImBoxEnclave/ImBoxEnclave.vcxproj.filters new file mode 100644 index 0000000..b6304f5 --- /dev/null +++ b/ImBoxEnclave/ImBoxEnclave.vcxproj.filters @@ -0,0 +1,137 @@ + + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;c++;cppm;ixx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hh;hpp;hxx;h++;hm;inl;inc;ipp;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + {4ce92d95-faa8-4f3a-861e-f7f0c9d49d18} + + + {479c6f66-f1af-4c42-88fd-b6de07ed71b1} + + + {332d4ec4-6fb7-426e-9f7d-ede4c0a906bd} + + + + + 头文件 + + + 头文件 + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + + + 源文件 + + + 源文件 + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + crypto_fast + + + + + crypto_fast\amd64 + + + crypto_fast\amd64 + + + crypto_fast\amd64 + + + crypto_fast\amd64 + + + crypto_fast\amd64 + + + crypto_fast\amd64 + + + crypto_fast\i386 + + + \ No newline at end of file diff --git a/ImBoxEnclave/framework.h b/ImBoxEnclave/framework.h new file mode 100644 index 0000000..039da1e --- /dev/null +++ b/ImBoxEnclave/framework.h @@ -0,0 +1,5 @@ +#pragma once + +// Windows 头文件 +#include +#include \ No newline at end of file diff --git a/ImBoxEnclave/pch.cpp b/ImBoxEnclave/pch.cpp new file mode 100644 index 0000000..b6fb8f4 --- /dev/null +++ b/ImBoxEnclave/pch.cpp @@ -0,0 +1,5 @@ +// pch.cpp: 与预编译标头对应的源文件 + +#include "pch.h" + +// 当使用预编译的头时,需要使用此源文件,编译才能成功。 diff --git a/ImBoxEnclave/pch.h b/ImBoxEnclave/pch.h new file mode 100644 index 0000000..9660927 --- /dev/null +++ b/ImBoxEnclave/pch.h @@ -0,0 +1,13 @@ +// pch.h: 这是预编译标头文件。 +// 下方列出的文件仅编译一次,提高了将来生成的生成性能。 +// 这还将影响 IntelliSense 性能,包括代码完成和许多代码浏览功能。 +// 但是,如果此处列出的文件中的任何一个在生成之间有更新,它们全部都将被重新编译。 +// 请勿在此处添加要频繁更新的文件,这将使得性能优势无效。 + +#ifndef PCH_H +#define PCH_H + +// 添加要在此处预编译的标头 +#include "framework.h" + +#endif //PCH_H From 29c62a2b7ddb45efcb46d2453777ed3bc50d19cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=88=B1=E7=BC=96=E7=A8=8B=E7=9A=84=E5=8F=B6=E4=B8=80?= =?UTF-8?q?=E7=AC=91?= <92030377+love-code-yeyixiao@users.noreply.github.com> Date: Fri, 20 Jun 2025 22:18:31 +0800 Subject: [PATCH 2/4] Update VBS Enclave --- ImBox/CryptoIO.cpp | 59 +- ImBox/CryptoIO.h | 4 +- ImBox/dc/crypto_fast/xts_fast.c | 7 + ImBox/dc/crypto_fast/xts_fast.h | 5 +- ImBox/framework.h | 6 +- ImBoxEnclave/EnclaveEntry.cpp | 74 + ImBoxEnclave/EnclaveEntry.h | 9 + ImBoxEnclave/ImBoxEnclave.vcxproj | 140 +- ImBoxEnclave/crypto_fast/aes_asm.h | 14 + ImBoxEnclave/crypto_fast/aes_key.c | 642 +++ ImBoxEnclave/crypto_fast/aes_key.h | 19 + ImBoxEnclave/crypto_fast/aes_padlock.h | 21 + ImBoxEnclave/crypto_fast/amd64/aes_amd64.asm | 886 ++++ .../crypto_fast/amd64/aes_padlock_amd64.asm | 91 + .../crypto_fast/amd64/twofish_amd64.asm | 320 ++ .../crypto_fast/amd64/xts_aes_ni_amd64.asm | 242 + .../amd64/xts_serpent_avx_amd64.asm | 3966 ++++++++++++++ .../amd64/xts_serpent_sse2_amd64.asm | 4567 ++++++++++++++++ ImBoxEnclave/crypto_fast/crc32.c | 86 + ImBoxEnclave/crypto_fast/crc32.h | 6 + ImBoxEnclave/crypto_fast/i386/aes_i386.asm | 368 ++ .../crypto_fast/i386/aes_padlock_i386.asm | 93 + .../crypto_fast/i386/twofish_i386.asm | 321 ++ .../crypto_fast/i386/xts_aes_ni_i386.asm | 206 + .../crypto_fast/i386/xts_serpent_avx_i386.asm | 4022 ++++++++++++++ .../i386/xts_serpent_sse2_i386.asm | 4609 +++++++++++++++++ ImBoxEnclave/crypto_fast/serpent.c | 424 ++ ImBoxEnclave/crypto_fast/serpent.h | 16 + ImBoxEnclave/crypto_fast/sha512.c | 188 + ImBoxEnclave/crypto_fast/sha512.h | 21 + ImBoxEnclave/crypto_fast/sha512_hmac.c | 84 + ImBoxEnclave/crypto_fast/sha512_hmac.h | 17 + ImBoxEnclave/crypto_fast/sha512_hmac_drbg.c | 175 + ImBoxEnclave/crypto_fast/sha512_hmac_drbg.h | 59 + ImBoxEnclave/crypto_fast/sha512_pkcs5_2.c | 67 + ImBoxEnclave/crypto_fast/sha512_pkcs5_2.h | 6 + ImBoxEnclave/crypto_fast/twofish.c | 779 +++ ImBoxEnclave/crypto_fast/twofish.h | 20 + ImBoxEnclave/crypto_fast/xts_aes_ni.h | 7 + ImBoxEnclave/crypto_fast/xts_fast.c | 437 ++ ImBoxEnclave/crypto_fast/xts_fast.h | 66 + ImBoxEnclave/crypto_fast/xts_serpent_avx.h | 8 + ImBoxEnclave/crypto_fast/xts_serpent_sse2.c | 703 +++ ImBoxEnclave/crypto_fast/xts_serpent_sse2.h | 8 + 44 files changed, 23760 insertions(+), 108 deletions(-) create mode 100644 ImBoxEnclave/EnclaveEntry.h create mode 100644 ImBoxEnclave/crypto_fast/aes_asm.h create mode 100644 ImBoxEnclave/crypto_fast/aes_key.c create mode 100644 ImBoxEnclave/crypto_fast/aes_key.h create mode 100644 ImBoxEnclave/crypto_fast/aes_padlock.h create mode 100644 ImBoxEnclave/crypto_fast/amd64/aes_amd64.asm create mode 100644 ImBoxEnclave/crypto_fast/amd64/aes_padlock_amd64.asm create mode 100644 ImBoxEnclave/crypto_fast/amd64/twofish_amd64.asm create mode 100644 ImBoxEnclave/crypto_fast/amd64/xts_aes_ni_amd64.asm create mode 100644 ImBoxEnclave/crypto_fast/amd64/xts_serpent_avx_amd64.asm create mode 100644 ImBoxEnclave/crypto_fast/amd64/xts_serpent_sse2_amd64.asm create mode 100644 ImBoxEnclave/crypto_fast/crc32.c create mode 100644 ImBoxEnclave/crypto_fast/crc32.h create mode 100644 ImBoxEnclave/crypto_fast/i386/aes_i386.asm create mode 100644 ImBoxEnclave/crypto_fast/i386/aes_padlock_i386.asm create mode 100644 ImBoxEnclave/crypto_fast/i386/twofish_i386.asm create mode 100644 ImBoxEnclave/crypto_fast/i386/xts_aes_ni_i386.asm create mode 100644 ImBoxEnclave/crypto_fast/i386/xts_serpent_avx_i386.asm create mode 100644 ImBoxEnclave/crypto_fast/i386/xts_serpent_sse2_i386.asm create mode 100644 ImBoxEnclave/crypto_fast/serpent.c create mode 100644 ImBoxEnclave/crypto_fast/serpent.h create mode 100644 ImBoxEnclave/crypto_fast/sha512.c create mode 100644 ImBoxEnclave/crypto_fast/sha512.h create mode 100644 ImBoxEnclave/crypto_fast/sha512_hmac.c create mode 100644 ImBoxEnclave/crypto_fast/sha512_hmac.h create mode 100644 ImBoxEnclave/crypto_fast/sha512_hmac_drbg.c create mode 100644 ImBoxEnclave/crypto_fast/sha512_hmac_drbg.h create mode 100644 ImBoxEnclave/crypto_fast/sha512_pkcs5_2.c create mode 100644 ImBoxEnclave/crypto_fast/sha512_pkcs5_2.h create mode 100644 ImBoxEnclave/crypto_fast/twofish.c create mode 100644 ImBoxEnclave/crypto_fast/twofish.h create mode 100644 ImBoxEnclave/crypto_fast/xts_aes_ni.h create mode 100644 ImBoxEnclave/crypto_fast/xts_fast.c create mode 100644 ImBoxEnclave/crypto_fast/xts_fast.h create mode 100644 ImBoxEnclave/crypto_fast/xts_serpent_avx.h create mode 100644 ImBoxEnclave/crypto_fast/xts_serpent_sse2.c create mode 100644 ImBoxEnclave/crypto_fast/xts_serpent_sse2.h diff --git a/ImBox/CryptoIO.cpp b/ImBox/CryptoIO.cpp index 5c522ef..38e4a36 100644 --- a/ImBox/CryptoIO.cpp +++ b/ImBox/CryptoIO.cpp @@ -75,13 +75,22 @@ CCryptoIO::CCryptoIO(CAbstractIO* pIO, const WCHAR* pKey, const std::wstring& Ci m->Cipher = Cipher; m->AllowFormat = false; +#ifdef ENCLAVE_ENABLED + if (!IsEnclaveTypeSupported(ENCLAVE_TYPE_VBS)) + { + OutputDebugString(L"Enclave not supported!\n"); + ExitProcess(STATUS_NOT_SUPPORTED); + } + + +#else if (m->password) { m->password->size = wcslen(pKey) * sizeof(wchar_t); if (m->password->size > MAX_PASSWORD * sizeof(wchar_t)) m->password->size = MAX_PASSWORD * sizeof(wchar_t); memcpy(m->password->pass, pKey, m->password->size); } - +#endif // ENCLAVE_ENABLED m->section = NULL; m_pIO = pIO; @@ -177,6 +186,48 @@ int CCryptoIO::InitCrypto() int ret = dc_decrypt_header(header.ptr, m->password.ptr) ? ERR_OK : (m->AllowFormat ? ERR_INTERNAL : ERR_WRONG_PASSWORD); +#ifdef ENCLAVE_ENABLED + // Create the enclave + if (ret == ERR_OK) { + constexpr ENCLAVE_CREATE_INFO_VBS CreateInfo + { + //ENCLAVE_VBS_FLAG_DEBUG, // Flags + 0, + { 0x10, 0x22, 0x30, 0x45, 0x41, 0x37, 0x21, 0x13 }, // OwnerID + }; + Enclave = CreateEnclave(GetCurrentProcess(), + nullptr, // Preferred base address + 0x10000000, // size + 0, + ENCLAVE_TYPE_VBS, + &CreateInfo, + sizeof(ENCLAVE_CREATE_INFO_VBS), + nullptr); + } + if (Enclave == NULL) { + DbgPrint(L"CreateEnclave failed\n"); + ret = ERR_INTERNAL; + } + if (ret == ERR_OK) + if (LoadEnclaveImageW(Enclave, L"ImBoxEnclave.dll") == FALSE) + ret = ERR_INTERNAL; + if (ret == ERR_OK) { + ENCLAVE_INIT_INFO_VBS InitInfo{}; + + InitInfo.Length = sizeof(ENCLAVE_INIT_INFO_VBS); + InitInfo.ThreadCount = 1; + if (InitializeEnclave(GetCurrentProcess(), + Enclave, + &InitInfo, + InitInfo.Length, + nullptr) == 0) { + ret= ERR_INTERNAL; + } + } + + +#endif // ENCLAVE_ENABLED + if (ret == ERR_OK) { xts_set_key(header->key_1, header->alg_1, &m->benc_k); @@ -202,6 +253,12 @@ int CCryptoIO::Init() ret = InitCrypto(); m->password.free(); +#ifdef ENCLAVE_ENABLED + delete& m->benc_k; + delete& m->Cipher; + //Clear key in the enternal thread +#endif + return ret; } diff --git a/ImBox/CryptoIO.h b/ImBox/CryptoIO.h index 4b3a9e8..ec63086 100644 --- a/ImBox/CryptoIO.h +++ b/ImBox/CryptoIO.h @@ -30,7 +30,9 @@ class CCryptoIO : public CAbstractIO protected: virtual int InitCrypto(); virtual int WriteHeader(struct _dc_header* header); - +#ifdef ENCLAVE_ENABLED + PVOID Enclave; +#endif struct SCryptoIO* m; public: diff --git a/ImBox/dc/crypto_fast/xts_fast.c b/ImBox/dc/crypto_fast/xts_fast.c index c294607..1cdd449 100644 --- a/ImBox/dc/crypto_fast/xts_fast.c +++ b/ImBox/dc/crypto_fast/xts_fast.c @@ -16,6 +16,13 @@ You should have received a copy of the GNU General Public License along with this program. If not, see . */ + +//!!IMPORTANT!! Please adjust it when you are going to publish non-enclave version +#ifndef ENCLAVE_ENABLED +#define ENCLAVE_ENABLED +#endif // ! + + #include #include //#include diff --git a/ImBox/dc/crypto_fast/xts_fast.h b/ImBox/dc/crypto_fast/xts_fast.h index 39897a0..03c45f1 100644 --- a/ImBox/dc/crypto_fast/xts_fast.h +++ b/ImBox/dc/crypto_fast/xts_fast.h @@ -1,6 +1,9 @@ #ifndef _XTS_FAST_H_ #define _XTS_FAST_H_ + + + #include #include "aes_key.h" #include "twofish.h" @@ -52,4 +55,4 @@ int _stdcall xts_aes_ni_available(); extern void load_fpu_state(unsigned char state[32]); #endif -#endif \ No newline at end of file +#endif diff --git a/ImBox/framework.h b/ImBox/framework.h index 995103f..e7ce30e 100644 --- a/ImBox/framework.h +++ b/ImBox/framework.h @@ -9,7 +9,6 @@ #include #define WIN32_NO_STATUS -#define ENCLAVE_ENABLED typedef long NTSTATUS; #define WIN32_LEAN_AND_MEAN // Exclude rarely-used stuff from Windows headers @@ -44,4 +43,7 @@ extern "C" { #include #include #include -#include \ No newline at end of file +#include + +//Please adjust it when you are going to publish non-enclave version +#define ENCLAVE_ENABLED \ No newline at end of file diff --git a/ImBoxEnclave/EnclaveEntry.cpp b/ImBoxEnclave/EnclaveEntry.cpp index 7e5b76b..bfeda4d 100644 --- a/ImBoxEnclave/EnclaveEntry.cpp +++ b/ImBoxEnclave/EnclaveEntry.cpp @@ -1,5 +1,15 @@ // dllmain.cpp : 定义 DLL 应用程序的入口点。 #include "pch.h" +#include "EnclaveEntry.h" + +void assert(VOID** obj) { + if (!obj) { + OutputDebugString(L"assert failed\n"); + //We cannot use it in an enclave + //__fastfail(1); + TerminateThread(GetCurrentThread(), 0); + } +} const IMAGE_ENCLAVE_CONFIG __enclave_config = { sizeof(IMAGE_ENCLAVE_CONFIG), @@ -18,6 +28,70 @@ const IMAGE_ENCLAVE_CONFIG __enclave_config = { IMAGE_ENCLAVE_FLAG_PRIMARY_IMAGE }; + + +template +struct SSecureBuffer +{ + SSecureBuffer() { alloc(sizeof(T)); } + SSecureBuffer(ULONG length) { alloc(length); } + ~SSecureBuffer() { free(); } + + void alloc(ULONG length) + { + // on 32 bit system xts_key must be located in executable memory + // x64 does not require this +#ifdef _M_IX86 + ptr = (T*)VirtualAlloc(NULL, length, MEM_COMMIT + MEM_RESERVE, PAGE_EXECUTE_READWRITE); +#else + ptr = (T*)VirtualAlloc(NULL, length, MEM_COMMIT + MEM_RESERVE, PAGE_READWRITE); +#endif + //We assume memory in the enclave will not be swapped anyway + //if (ptr) + // VirtualLock(ptr, length); + } + + void free() + { + if (!ptr) + return; + + MEMORY_BASIC_INFORMATION mbi; + if ((VirtualQuery(ptr, &mbi, sizeof(mbi)) == sizeof(mbi) && mbi.BaseAddress == ptr && mbi.AllocationBase == ptr)) + { + //RtlSecureZeroMemory(ptr, mbi.RegionSize); + //We have to use particular APIs in an enclave + memset(ptr, 0, mbi.RegionSize); + + //VirtualUnlock(ptr, mbi.RegionSize); + + } + VirtualFree(ptr, 0, MEM_RELEASE); + + ptr = NULL; + } + + T* operator ->() { return ptr; } + explicit operator bool() { return ptr != NULL; } + + T* ptr; +}; + + + +void* +CALLBACK +EnclaveSetKey( + _In_ void* Context +) +{ + WCHAR String[32]; + swprintf_s(String, ARRAYSIZE(String), L"%s\n", L"CallEnclaveTest started"); + OutputDebugStringW(String); + + return (void*)((ULONG_PTR)(Context) ^ InitialCookie); +} + BOOL APIENTRY DllMain( HMODULE hModule, DWORD ul_reason_for_call, LPVOID lpReserved diff --git a/ImBoxEnclave/EnclaveEntry.h b/ImBoxEnclave/EnclaveEntry.h new file mode 100644 index 0000000..f68c238 --- /dev/null +++ b/ImBoxEnclave/EnclaveEntry.h @@ -0,0 +1,9 @@ +#pragma once +#include ".\crypto_fast\xts_fast.h" +#include ".\crypto_fast\aes_asm.h" + +typedef struct KeySetArgs { + const unsigned char* key; + int alg; + xts_key* skey; +} KeySetArgs; \ No newline at end of file diff --git a/ImBoxEnclave/ImBoxEnclave.vcxproj b/ImBoxEnclave/ImBoxEnclave.vcxproj index 22ec8fc..74bb52b 100644 --- a/ImBoxEnclave/ImBoxEnclave.vcxproj +++ b/ImBoxEnclave/ImBoxEnclave.vcxproj @@ -139,44 +139,35 @@ - - - - - - - - - - - - - - - true - true - true - true - + + + + + + + + + + + + + + + - - - - - - - - - - - true - true - true - true - + + + + + + + + + + Create @@ -186,75 +177,18 @@ - - true - true - true - true - Document - - - - - true - true - true - true - Document - - - - - true - true - true - true - Document - - - - - true - true - true - true - Document - - - - - true - true - true - true - Document - - - - - true - true - true - true - Document - - - - - false - false - false - false - Document - "$(ProjectDir)dc\tools\yasm\yasm.exe" -Xvc -f win32 -o "$(OutDir)obj\$(ProjectName)\%(Filename).obj" "%(FullPath)" - "$(ProjectDir)dc\tools\yasm\yasm.exe" -Xvc -f win32 -o "$(OutDir)obj\$(ProjectName)\%(Filename).obj" "%(FullPath)" - "$(ProjectDir)dc\tools\yasm\yasm.exe" -Xvc -f win32 -o "$(OutDir)obj\$(ProjectName)\%(Filename).obj" "%(FullPath)" - "$(ProjectDir)dc\tools\yasm\yasm.exe" -Xvc -f win32 -o "$(OutDir)obj\$(ProjectName)\%(Filename).obj" "%(FullPath)" - $(OutDir)obj\$(ProjectName)\%(Filename).obj - $(OutDir)obj\$(ProjectName)\%(Filename).obj - $(OutDir)obj\$(ProjectName)\%(Filename).obj - $(OutDir)obj\$(ProjectName)\%(Filename).obj - + + + + + + + + + + + + diff --git a/ImBoxEnclave/crypto_fast/aes_asm.h b/ImBoxEnclave/crypto_fast/aes_asm.h new file mode 100644 index 0000000..ed92e2e --- /dev/null +++ b/ImBoxEnclave/crypto_fast/aes_asm.h @@ -0,0 +1,14 @@ +#ifndef _AES_ASM_H_ +#define _AES_ASM_H_ + +#include "aes_key.h" + +#ifdef _M_IX86 + void _stdcall aes256_asm_set_key(const unsigned char *key, aes256_key *skey); +#else + #define aes256_asm_set_key aes256_set_key +#endif +void _stdcall aes256_asm_encrypt(const unsigned char *in, unsigned char *out, aes256_key *key); +void _stdcall aes256_asm_decrypt(const unsigned char *in, unsigned char *out, aes256_key *key); + +#endif \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/aes_key.c b/ImBoxEnclave/crypto_fast/aes_key.c new file mode 100644 index 0000000..9cd709a --- /dev/null +++ b/ImBoxEnclave/crypto_fast/aes_key.c @@ -0,0 +1,642 @@ +/* + * + * DiskCryptor - open source partition encryption tool + * Copyright (c) 2007-2012 + * ntldr PGP key ID - 0x1B6A24550F33E44A + * based on rijndael-alg-fst.c + * @author Vincent Rijmen + * @author Antoon Bosselaers + * @author Paulo Barreto + * + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License version 3 as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +*/ +#include +#include +#include "aes_key.h" + +__declspec(align(16)) const unsigned long Te0[256] = { + 0xa56363c6, 0x847c7cf8, 0x997777ee, 0x8d7b7bf6, 0x0df2f2ff, 0xbd6b6bd6, 0xb16f6fde, 0x54c5c591, + 0x50303060, 0x03010102, 0xa96767ce, 0x7d2b2b56, 0x19fefee7, 0x62d7d7b5, 0xe6abab4d, 0x9a7676ec, + 0x45caca8f, 0x9d82821f, 0x40c9c989, 0x877d7dfa, 0x15fafaef, 0xeb5959b2, 0xc947478e, 0x0bf0f0fb, + 0xecadad41, 0x67d4d4b3, 0xfda2a25f, 0xeaafaf45, 0xbf9c9c23, 0xf7a4a453, 0x967272e4, 0x5bc0c09b, + 0xc2b7b775, 0x1cfdfde1, 0xae93933d, 0x6a26264c, 0x5a36366c, 0x413f3f7e, 0x02f7f7f5, 0x4fcccc83, + 0x5c343468, 0xf4a5a551, 0x34e5e5d1, 0x08f1f1f9, 0x937171e2, 0x73d8d8ab, 0x53313162, 0x3f15152a, + 0x0c040408, 0x52c7c795, 0x65232346, 0x5ec3c39d, 0x28181830, 0xa1969637, 0x0f05050a, 0xb59a9a2f, + 0x0907070e, 0x36121224, 0x9b80801b, 0x3de2e2df, 0x26ebebcd, 0x6927274e, 0xcdb2b27f, 0x9f7575ea, + 0x1b090912, 0x9e83831d, 0x742c2c58, 0x2e1a1a34, 0x2d1b1b36, 0xb26e6edc, 0xee5a5ab4, 0xfba0a05b, + 0xf65252a4, 0x4d3b3b76, 0x61d6d6b7, 0xceb3b37d, 0x7b292952, 0x3ee3e3dd, 0x712f2f5e, 0x97848413, + 0xf55353a6, 0x68d1d1b9, 0x00000000, 0x2cededc1, 0x60202040, 0x1ffcfce3, 0xc8b1b179, 0xed5b5bb6, + 0xbe6a6ad4, 0x46cbcb8d, 0xd9bebe67, 0x4b393972, 0xde4a4a94, 0xd44c4c98, 0xe85858b0, 0x4acfcf85, + 0x6bd0d0bb, 0x2aefefc5, 0xe5aaaa4f, 0x16fbfbed, 0xc5434386, 0xd74d4d9a, 0x55333366, 0x94858511, + 0xcf45458a, 0x10f9f9e9, 0x06020204, 0x817f7ffe, 0xf05050a0, 0x443c3c78, 0xba9f9f25, 0xe3a8a84b, + 0xf35151a2, 0xfea3a35d, 0xc0404080, 0x8a8f8f05, 0xad92923f, 0xbc9d9d21, 0x48383870, 0x04f5f5f1, + 0xdfbcbc63, 0xc1b6b677, 0x75dadaaf, 0x63212142, 0x30101020, 0x1affffe5, 0x0ef3f3fd, 0x6dd2d2bf, + 0x4ccdcd81, 0x140c0c18, 0x35131326, 0x2fececc3, 0xe15f5fbe, 0xa2979735, 0xcc444488, 0x3917172e, + 0x57c4c493, 0xf2a7a755, 0x827e7efc, 0x473d3d7a, 0xac6464c8, 0xe75d5dba, 0x2b191932, 0x957373e6, + 0xa06060c0, 0x98818119, 0xd14f4f9e, 0x7fdcdca3, 0x66222244, 0x7e2a2a54, 0xab90903b, 0x8388880b, + 0xca46468c, 0x29eeeec7, 0xd3b8b86b, 0x3c141428, 0x79dedea7, 0xe25e5ebc, 0x1d0b0b16, 0x76dbdbad, + 0x3be0e0db, 0x56323264, 0x4e3a3a74, 0x1e0a0a14, 0xdb494992, 0x0a06060c, 0x6c242448, 0xe45c5cb8, + 0x5dc2c29f, 0x6ed3d3bd, 0xefacac43, 0xa66262c4, 0xa8919139, 0xa4959531, 0x37e4e4d3, 0x8b7979f2, + 0x32e7e7d5, 0x43c8c88b, 0x5937376e, 0xb76d6dda, 0x8c8d8d01, 0x64d5d5b1, 0xd24e4e9c, 0xe0a9a949, + 0xb46c6cd8, 0xfa5656ac, 0x07f4f4f3, 0x25eaeacf, 0xaf6565ca, 0x8e7a7af4, 0xe9aeae47, 0x18080810, + 0xd5baba6f, 0x887878f0, 0x6f25254a, 0x722e2e5c, 0x241c1c38, 0xf1a6a657, 0xc7b4b473, 0x51c6c697, + 0x23e8e8cb, 0x7cdddda1, 0x9c7474e8, 0x211f1f3e, 0xdd4b4b96, 0xdcbdbd61, 0x868b8b0d, 0x858a8a0f, + 0x907070e0, 0x423e3e7c, 0xc4b5b571, 0xaa6666cc, 0xd8484890, 0x05030306, 0x01f6f6f7, 0x120e0e1c, + 0xa36161c2, 0x5f35356a, 0xf95757ae, 0xd0b9b969, 0x91868617, 0x58c1c199, 0x271d1d3a, 0xb99e9e27, + 0x38e1e1d9, 0x13f8f8eb, 0xb398982b, 0x33111122, 0xbb6969d2, 0x70d9d9a9, 0x898e8e07, 0xa7949433, + 0xb69b9b2d, 0x221e1e3c, 0x92878715, 0x20e9e9c9, 0x49cece87, 0xff5555aa, 0x78282850, 0x7adfdfa5, + 0x8f8c8c03, 0xf8a1a159, 0x80898909, 0x170d0d1a, 0xdabfbf65, 0x31e6e6d7, 0xc6424284, 0xb86868d0, + 0xc3414182, 0xb0999929, 0x772d2d5a, 0x110f0f1e, 0xcbb0b07b, 0xfc5454a8, 0xd6bbbb6d, 0x3a16162c +}; + +__declspec(align(16)) const unsigned long Te1[256] = { + 0x6363c6a5, 0x7c7cf884, 0x7777ee99, 0x7b7bf68d, 0xf2f2ff0d, 0x6b6bd6bd, 0x6f6fdeb1, 0xc5c59154, + 0x30306050, 0x01010203, 0x6767cea9, 0x2b2b567d, 0xfefee719, 0xd7d7b562, 0xabab4de6, 0x7676ec9a, + 0xcaca8f45, 0x82821f9d, 0xc9c98940, 0x7d7dfa87, 0xfafaef15, 0x5959b2eb, 0x47478ec9, 0xf0f0fb0b, + 0xadad41ec, 0xd4d4b367, 0xa2a25ffd, 0xafaf45ea, 0x9c9c23bf, 0xa4a453f7, 0x7272e496, 0xc0c09b5b, + 0xb7b775c2, 0xfdfde11c, 0x93933dae, 0x26264c6a, 0x36366c5a, 0x3f3f7e41, 0xf7f7f502, 0xcccc834f, + 0x3434685c, 0xa5a551f4, 0xe5e5d134, 0xf1f1f908, 0x7171e293, 0xd8d8ab73, 0x31316253, 0x15152a3f, + 0x0404080c, 0xc7c79552, 0x23234665, 0xc3c39d5e, 0x18183028, 0x969637a1, 0x05050a0f, 0x9a9a2fb5, + 0x07070e09, 0x12122436, 0x80801b9b, 0xe2e2df3d, 0xebebcd26, 0x27274e69, 0xb2b27fcd, 0x7575ea9f, + 0x0909121b, 0x83831d9e, 0x2c2c5874, 0x1a1a342e, 0x1b1b362d, 0x6e6edcb2, 0x5a5ab4ee, 0xa0a05bfb, + 0x5252a4f6, 0x3b3b764d, 0xd6d6b761, 0xb3b37dce, 0x2929527b, 0xe3e3dd3e, 0x2f2f5e71, 0x84841397, + 0x5353a6f5, 0xd1d1b968, 0x00000000, 0xededc12c, 0x20204060, 0xfcfce31f, 0xb1b179c8, 0x5b5bb6ed, + 0x6a6ad4be, 0xcbcb8d46, 0xbebe67d9, 0x3939724b, 0x4a4a94de, 0x4c4c98d4, 0x5858b0e8, 0xcfcf854a, + 0xd0d0bb6b, 0xefefc52a, 0xaaaa4fe5, 0xfbfbed16, 0x434386c5, 0x4d4d9ad7, 0x33336655, 0x85851194, + 0x45458acf, 0xf9f9e910, 0x02020406, 0x7f7ffe81, 0x5050a0f0, 0x3c3c7844, 0x9f9f25ba, 0xa8a84be3, + 0x5151a2f3, 0xa3a35dfe, 0x404080c0, 0x8f8f058a, 0x92923fad, 0x9d9d21bc, 0x38387048, 0xf5f5f104, + 0xbcbc63df, 0xb6b677c1, 0xdadaaf75, 0x21214263, 0x10102030, 0xffffe51a, 0xf3f3fd0e, 0xd2d2bf6d, + 0xcdcd814c, 0x0c0c1814, 0x13132635, 0xececc32f, 0x5f5fbee1, 0x979735a2, 0x444488cc, 0x17172e39, + 0xc4c49357, 0xa7a755f2, 0x7e7efc82, 0x3d3d7a47, 0x6464c8ac, 0x5d5dbae7, 0x1919322b, 0x7373e695, + 0x6060c0a0, 0x81811998, 0x4f4f9ed1, 0xdcdca37f, 0x22224466, 0x2a2a547e, 0x90903bab, 0x88880b83, + 0x46468cca, 0xeeeec729, 0xb8b86bd3, 0x1414283c, 0xdedea779, 0x5e5ebce2, 0x0b0b161d, 0xdbdbad76, + 0xe0e0db3b, 0x32326456, 0x3a3a744e, 0x0a0a141e, 0x494992db, 0x06060c0a, 0x2424486c, 0x5c5cb8e4, + 0xc2c29f5d, 0xd3d3bd6e, 0xacac43ef, 0x6262c4a6, 0x919139a8, 0x959531a4, 0xe4e4d337, 0x7979f28b, + 0xe7e7d532, 0xc8c88b43, 0x37376e59, 0x6d6ddab7, 0x8d8d018c, 0xd5d5b164, 0x4e4e9cd2, 0xa9a949e0, + 0x6c6cd8b4, 0x5656acfa, 0xf4f4f307, 0xeaeacf25, 0x6565caaf, 0x7a7af48e, 0xaeae47e9, 0x08081018, + 0xbaba6fd5, 0x7878f088, 0x25254a6f, 0x2e2e5c72, 0x1c1c3824, 0xa6a657f1, 0xb4b473c7, 0xc6c69751, + 0xe8e8cb23, 0xdddda17c, 0x7474e89c, 0x1f1f3e21, 0x4b4b96dd, 0xbdbd61dc, 0x8b8b0d86, 0x8a8a0f85, + 0x7070e090, 0x3e3e7c42, 0xb5b571c4, 0x6666ccaa, 0x484890d8, 0x03030605, 0xf6f6f701, 0x0e0e1c12, + 0x6161c2a3, 0x35356a5f, 0x5757aef9, 0xb9b969d0, 0x86861791, 0xc1c19958, 0x1d1d3a27, 0x9e9e27b9, + 0xe1e1d938, 0xf8f8eb13, 0x98982bb3, 0x11112233, 0x6969d2bb, 0xd9d9a970, 0x8e8e0789, 0x949433a7, + 0x9b9b2db6, 0x1e1e3c22, 0x87871592, 0xe9e9c920, 0xcece8749, 0x5555aaff, 0x28285078, 0xdfdfa57a, + 0x8c8c038f, 0xa1a159f8, 0x89890980, 0x0d0d1a17, 0xbfbf65da, 0xe6e6d731, 0x424284c6, 0x6868d0b8, + 0x414182c3, 0x999929b0, 0x2d2d5a77, 0x0f0f1e11, 0xb0b07bcb, 0x5454a8fc, 0xbbbb6dd6, 0x16162c3a +}; + +__declspec(align(16)) const unsigned long Te2[256] = { + 0x63c6a563, 0x7cf8847c, 0x77ee9977, 0x7bf68d7b, 0xf2ff0df2, 0x6bd6bd6b, 0x6fdeb16f, 0xc59154c5, + 0x30605030, 0x01020301, 0x67cea967, 0x2b567d2b, 0xfee719fe, 0xd7b562d7, 0xab4de6ab, 0x76ec9a76, + 0xca8f45ca, 0x821f9d82, 0xc98940c9, 0x7dfa877d, 0xfaef15fa, 0x59b2eb59, 0x478ec947, 0xf0fb0bf0, + 0xad41ecad, 0xd4b367d4, 0xa25ffda2, 0xaf45eaaf, 0x9c23bf9c, 0xa453f7a4, 0x72e49672, 0xc09b5bc0, + 0xb775c2b7, 0xfde11cfd, 0x933dae93, 0x264c6a26, 0x366c5a36, 0x3f7e413f, 0xf7f502f7, 0xcc834fcc, + 0x34685c34, 0xa551f4a5, 0xe5d134e5, 0xf1f908f1, 0x71e29371, 0xd8ab73d8, 0x31625331, 0x152a3f15, + 0x04080c04, 0xc79552c7, 0x23466523, 0xc39d5ec3, 0x18302818, 0x9637a196, 0x050a0f05, 0x9a2fb59a, + 0x070e0907, 0x12243612, 0x801b9b80, 0xe2df3de2, 0xebcd26eb, 0x274e6927, 0xb27fcdb2, 0x75ea9f75, + 0x09121b09, 0x831d9e83, 0x2c58742c, 0x1a342e1a, 0x1b362d1b, 0x6edcb26e, 0x5ab4ee5a, 0xa05bfba0, + 0x52a4f652, 0x3b764d3b, 0xd6b761d6, 0xb37dceb3, 0x29527b29, 0xe3dd3ee3, 0x2f5e712f, 0x84139784, + 0x53a6f553, 0xd1b968d1, 0x00000000, 0xedc12ced, 0x20406020, 0xfce31ffc, 0xb179c8b1, 0x5bb6ed5b, + 0x6ad4be6a, 0xcb8d46cb, 0xbe67d9be, 0x39724b39, 0x4a94de4a, 0x4c98d44c, 0x58b0e858, 0xcf854acf, + 0xd0bb6bd0, 0xefc52aef, 0xaa4fe5aa, 0xfbed16fb, 0x4386c543, 0x4d9ad74d, 0x33665533, 0x85119485, + 0x458acf45, 0xf9e910f9, 0x02040602, 0x7ffe817f, 0x50a0f050, 0x3c78443c, 0x9f25ba9f, 0xa84be3a8, + 0x51a2f351, 0xa35dfea3, 0x4080c040, 0x8f058a8f, 0x923fad92, 0x9d21bc9d, 0x38704838, 0xf5f104f5, + 0xbc63dfbc, 0xb677c1b6, 0xdaaf75da, 0x21426321, 0x10203010, 0xffe51aff, 0xf3fd0ef3, 0xd2bf6dd2, + 0xcd814ccd, 0x0c18140c, 0x13263513, 0xecc32fec, 0x5fbee15f, 0x9735a297, 0x4488cc44, 0x172e3917, + 0xc49357c4, 0xa755f2a7, 0x7efc827e, 0x3d7a473d, 0x64c8ac64, 0x5dbae75d, 0x19322b19, 0x73e69573, + 0x60c0a060, 0x81199881, 0x4f9ed14f, 0xdca37fdc, 0x22446622, 0x2a547e2a, 0x903bab90, 0x880b8388, + 0x468cca46, 0xeec729ee, 0xb86bd3b8, 0x14283c14, 0xdea779de, 0x5ebce25e, 0x0b161d0b, 0xdbad76db, + 0xe0db3be0, 0x32645632, 0x3a744e3a, 0x0a141e0a, 0x4992db49, 0x060c0a06, 0x24486c24, 0x5cb8e45c, + 0xc29f5dc2, 0xd3bd6ed3, 0xac43efac, 0x62c4a662, 0x9139a891, 0x9531a495, 0xe4d337e4, 0x79f28b79, + 0xe7d532e7, 0xc88b43c8, 0x376e5937, 0x6ddab76d, 0x8d018c8d, 0xd5b164d5, 0x4e9cd24e, 0xa949e0a9, + 0x6cd8b46c, 0x56acfa56, 0xf4f307f4, 0xeacf25ea, 0x65caaf65, 0x7af48e7a, 0xae47e9ae, 0x08101808, + 0xba6fd5ba, 0x78f08878, 0x254a6f25, 0x2e5c722e, 0x1c38241c, 0xa657f1a6, 0xb473c7b4, 0xc69751c6, + 0xe8cb23e8, 0xdda17cdd, 0x74e89c74, 0x1f3e211f, 0x4b96dd4b, 0xbd61dcbd, 0x8b0d868b, 0x8a0f858a, + 0x70e09070, 0x3e7c423e, 0xb571c4b5, 0x66ccaa66, 0x4890d848, 0x03060503, 0xf6f701f6, 0x0e1c120e, + 0x61c2a361, 0x356a5f35, 0x57aef957, 0xb969d0b9, 0x86179186, 0xc19958c1, 0x1d3a271d, 0x9e27b99e, + 0xe1d938e1, 0xf8eb13f8, 0x982bb398, 0x11223311, 0x69d2bb69, 0xd9a970d9, 0x8e07898e, 0x9433a794, + 0x9b2db69b, 0x1e3c221e, 0x87159287, 0xe9c920e9, 0xce8749ce, 0x55aaff55, 0x28507828, 0xdfa57adf, + 0x8c038f8c, 0xa159f8a1, 0x89098089, 0x0d1a170d, 0xbf65dabf, 0xe6d731e6, 0x4284c642, 0x68d0b868, + 0x4182c341, 0x9929b099, 0x2d5a772d, 0x0f1e110f, 0xb07bcbb0, 0x54a8fc54, 0xbb6dd6bb, 0x162c3a16 +}; + +__declspec(align(16)) const unsigned long Te3[256] = { + 0xc6a56363, 0xf8847c7c, 0xee997777, 0xf68d7b7b, 0xff0df2f2, 0xd6bd6b6b, 0xdeb16f6f, 0x9154c5c5, + 0x60503030, 0x02030101, 0xcea96767, 0x567d2b2b, 0xe719fefe, 0xb562d7d7, 0x4de6abab, 0xec9a7676, + 0x8f45caca, 0x1f9d8282, 0x8940c9c9, 0xfa877d7d, 0xef15fafa, 0xb2eb5959, 0x8ec94747, 0xfb0bf0f0, + 0x41ecadad, 0xb367d4d4, 0x5ffda2a2, 0x45eaafaf, 0x23bf9c9c, 0x53f7a4a4, 0xe4967272, 0x9b5bc0c0, + 0x75c2b7b7, 0xe11cfdfd, 0x3dae9393, 0x4c6a2626, 0x6c5a3636, 0x7e413f3f, 0xf502f7f7, 0x834fcccc, + 0x685c3434, 0x51f4a5a5, 0xd134e5e5, 0xf908f1f1, 0xe2937171, 0xab73d8d8, 0x62533131, 0x2a3f1515, + 0x080c0404, 0x9552c7c7, 0x46652323, 0x9d5ec3c3, 0x30281818, 0x37a19696, 0x0a0f0505, 0x2fb59a9a, + 0x0e090707, 0x24361212, 0x1b9b8080, 0xdf3de2e2, 0xcd26ebeb, 0x4e692727, 0x7fcdb2b2, 0xea9f7575, + 0x121b0909, 0x1d9e8383, 0x58742c2c, 0x342e1a1a, 0x362d1b1b, 0xdcb26e6e, 0xb4ee5a5a, 0x5bfba0a0, + 0xa4f65252, 0x764d3b3b, 0xb761d6d6, 0x7dceb3b3, 0x527b2929, 0xdd3ee3e3, 0x5e712f2f, 0x13978484, + 0xa6f55353, 0xb968d1d1, 0x00000000, 0xc12ceded, 0x40602020, 0xe31ffcfc, 0x79c8b1b1, 0xb6ed5b5b, + 0xd4be6a6a, 0x8d46cbcb, 0x67d9bebe, 0x724b3939, 0x94de4a4a, 0x98d44c4c, 0xb0e85858, 0x854acfcf, + 0xbb6bd0d0, 0xc52aefef, 0x4fe5aaaa, 0xed16fbfb, 0x86c54343, 0x9ad74d4d, 0x66553333, 0x11948585, + 0x8acf4545, 0xe910f9f9, 0x04060202, 0xfe817f7f, 0xa0f05050, 0x78443c3c, 0x25ba9f9f, 0x4be3a8a8, + 0xa2f35151, 0x5dfea3a3, 0x80c04040, 0x058a8f8f, 0x3fad9292, 0x21bc9d9d, 0x70483838, 0xf104f5f5, + 0x63dfbcbc, 0x77c1b6b6, 0xaf75dada, 0x42632121, 0x20301010, 0xe51affff, 0xfd0ef3f3, 0xbf6dd2d2, + 0x814ccdcd, 0x18140c0c, 0x26351313, 0xc32fecec, 0xbee15f5f, 0x35a29797, 0x88cc4444, 0x2e391717, + 0x9357c4c4, 0x55f2a7a7, 0xfc827e7e, 0x7a473d3d, 0xc8ac6464, 0xbae75d5d, 0x322b1919, 0xe6957373, + 0xc0a06060, 0x19988181, 0x9ed14f4f, 0xa37fdcdc, 0x44662222, 0x547e2a2a, 0x3bab9090, 0x0b838888, + 0x8cca4646, 0xc729eeee, 0x6bd3b8b8, 0x283c1414, 0xa779dede, 0xbce25e5e, 0x161d0b0b, 0xad76dbdb, + 0xdb3be0e0, 0x64563232, 0x744e3a3a, 0x141e0a0a, 0x92db4949, 0x0c0a0606, 0x486c2424, 0xb8e45c5c, + 0x9f5dc2c2, 0xbd6ed3d3, 0x43efacac, 0xc4a66262, 0x39a89191, 0x31a49595, 0xd337e4e4, 0xf28b7979, + 0xd532e7e7, 0x8b43c8c8, 0x6e593737, 0xdab76d6d, 0x018c8d8d, 0xb164d5d5, 0x9cd24e4e, 0x49e0a9a9, + 0xd8b46c6c, 0xacfa5656, 0xf307f4f4, 0xcf25eaea, 0xcaaf6565, 0xf48e7a7a, 0x47e9aeae, 0x10180808, + 0x6fd5baba, 0xf0887878, 0x4a6f2525, 0x5c722e2e, 0x38241c1c, 0x57f1a6a6, 0x73c7b4b4, 0x9751c6c6, + 0xcb23e8e8, 0xa17cdddd, 0xe89c7474, 0x3e211f1f, 0x96dd4b4b, 0x61dcbdbd, 0x0d868b8b, 0x0f858a8a, + 0xe0907070, 0x7c423e3e, 0x71c4b5b5, 0xccaa6666, 0x90d84848, 0x06050303, 0xf701f6f6, 0x1c120e0e, + 0xc2a36161, 0x6a5f3535, 0xaef95757, 0x69d0b9b9, 0x17918686, 0x9958c1c1, 0x3a271d1d, 0x27b99e9e, + 0xd938e1e1, 0xeb13f8f8, 0x2bb39898, 0x22331111, 0xd2bb6969, 0xa970d9d9, 0x07898e8e, 0x33a79494, + 0x2db69b9b, 0x3c221e1e, 0x15928787, 0xc920e9e9, 0x8749cece, 0xaaff5555, 0x50782828, 0xa57adfdf, + 0x038f8c8c, 0x59f8a1a1, 0x09808989, 0x1a170d0d, 0x65dabfbf, 0xd731e6e6, 0x84c64242, 0xd0b86868, + 0x82c34141, 0x29b09999, 0x5a772d2d, 0x1e110f0f, 0x7bcbb0b0, 0xa8fc5454, 0x6dd6bbbb, 0x2c3a1616 +}; + +__declspec(align(16)) const unsigned long Te4_0[256] = { + 0x00000063, 0x0000007c, 0x00000077, 0x0000007b, 0x000000f2, 0x0000006b, 0x0000006f, 0x000000c5, + 0x00000030, 0x00000001, 0x00000067, 0x0000002b, 0x000000fe, 0x000000d7, 0x000000ab, 0x00000076, + 0x000000ca, 0x00000082, 0x000000c9, 0x0000007d, 0x000000fa, 0x00000059, 0x00000047, 0x000000f0, + 0x000000ad, 0x000000d4, 0x000000a2, 0x000000af, 0x0000009c, 0x000000a4, 0x00000072, 0x000000c0, + 0x000000b7, 0x000000fd, 0x00000093, 0x00000026, 0x00000036, 0x0000003f, 0x000000f7, 0x000000cc, + 0x00000034, 0x000000a5, 0x000000e5, 0x000000f1, 0x00000071, 0x000000d8, 0x00000031, 0x00000015, + 0x00000004, 0x000000c7, 0x00000023, 0x000000c3, 0x00000018, 0x00000096, 0x00000005, 0x0000009a, + 0x00000007, 0x00000012, 0x00000080, 0x000000e2, 0x000000eb, 0x00000027, 0x000000b2, 0x00000075, + 0x00000009, 0x00000083, 0x0000002c, 0x0000001a, 0x0000001b, 0x0000006e, 0x0000005a, 0x000000a0, + 0x00000052, 0x0000003b, 0x000000d6, 0x000000b3, 0x00000029, 0x000000e3, 0x0000002f, 0x00000084, + 0x00000053, 0x000000d1, 0x00000000, 0x000000ed, 0x00000020, 0x000000fc, 0x000000b1, 0x0000005b, + 0x0000006a, 0x000000cb, 0x000000be, 0x00000039, 0x0000004a, 0x0000004c, 0x00000058, 0x000000cf, + 0x000000d0, 0x000000ef, 0x000000aa, 0x000000fb, 0x00000043, 0x0000004d, 0x00000033, 0x00000085, + 0x00000045, 0x000000f9, 0x00000002, 0x0000007f, 0x00000050, 0x0000003c, 0x0000009f, 0x000000a8, + 0x00000051, 0x000000a3, 0x00000040, 0x0000008f, 0x00000092, 0x0000009d, 0x00000038, 0x000000f5, + 0x000000bc, 0x000000b6, 0x000000da, 0x00000021, 0x00000010, 0x000000ff, 0x000000f3, 0x000000d2, + 0x000000cd, 0x0000000c, 0x00000013, 0x000000ec, 0x0000005f, 0x00000097, 0x00000044, 0x00000017, + 0x000000c4, 0x000000a7, 0x0000007e, 0x0000003d, 0x00000064, 0x0000005d, 0x00000019, 0x00000073, + 0x00000060, 0x00000081, 0x0000004f, 0x000000dc, 0x00000022, 0x0000002a, 0x00000090, 0x00000088, + 0x00000046, 0x000000ee, 0x000000b8, 0x00000014, 0x000000de, 0x0000005e, 0x0000000b, 0x000000db, + 0x000000e0, 0x00000032, 0x0000003a, 0x0000000a, 0x00000049, 0x00000006, 0x00000024, 0x0000005c, + 0x000000c2, 0x000000d3, 0x000000ac, 0x00000062, 0x00000091, 0x00000095, 0x000000e4, 0x00000079, + 0x000000e7, 0x000000c8, 0x00000037, 0x0000006d, 0x0000008d, 0x000000d5, 0x0000004e, 0x000000a9, + 0x0000006c, 0x00000056, 0x000000f4, 0x000000ea, 0x00000065, 0x0000007a, 0x000000ae, 0x00000008, + 0x000000ba, 0x00000078, 0x00000025, 0x0000002e, 0x0000001c, 0x000000a6, 0x000000b4, 0x000000c6, + 0x000000e8, 0x000000dd, 0x00000074, 0x0000001f, 0x0000004b, 0x000000bd, 0x0000008b, 0x0000008a, + 0x00000070, 0x0000003e, 0x000000b5, 0x00000066, 0x00000048, 0x00000003, 0x000000f6, 0x0000000e, + 0x00000061, 0x00000035, 0x00000057, 0x000000b9, 0x00000086, 0x000000c1, 0x0000001d, 0x0000009e, + 0x000000e1, 0x000000f8, 0x00000098, 0x00000011, 0x00000069, 0x000000d9, 0x0000008e, 0x00000094, + 0x0000009b, 0x0000001e, 0x00000087, 0x000000e9, 0x000000ce, 0x00000055, 0x00000028, 0x000000df, + 0x0000008c, 0x000000a1, 0x00000089, 0x0000000d, 0x000000bf, 0x000000e6, 0x00000042, 0x00000068, + 0x00000041, 0x00000099, 0x0000002d, 0x0000000f, 0x000000b0, 0x00000054, 0x000000bb, 0x00000016 +}; + +__declspec(align(16)) const unsigned long Te4_1[256] = { + 0x00006300, 0x00007c00, 0x00007700, 0x00007b00, 0x0000f200, 0x00006b00, 0x00006f00, 0x0000c500, + 0x00003000, 0x00000100, 0x00006700, 0x00002b00, 0x0000fe00, 0x0000d700, 0x0000ab00, 0x00007600, + 0x0000ca00, 0x00008200, 0x0000c900, 0x00007d00, 0x0000fa00, 0x00005900, 0x00004700, 0x0000f000, + 0x0000ad00, 0x0000d400, 0x0000a200, 0x0000af00, 0x00009c00, 0x0000a400, 0x00007200, 0x0000c000, + 0x0000b700, 0x0000fd00, 0x00009300, 0x00002600, 0x00003600, 0x00003f00, 0x0000f700, 0x0000cc00, + 0x00003400, 0x0000a500, 0x0000e500, 0x0000f100, 0x00007100, 0x0000d800, 0x00003100, 0x00001500, + 0x00000400, 0x0000c700, 0x00002300, 0x0000c300, 0x00001800, 0x00009600, 0x00000500, 0x00009a00, + 0x00000700, 0x00001200, 0x00008000, 0x0000e200, 0x0000eb00, 0x00002700, 0x0000b200, 0x00007500, + 0x00000900, 0x00008300, 0x00002c00, 0x00001a00, 0x00001b00, 0x00006e00, 0x00005a00, 0x0000a000, + 0x00005200, 0x00003b00, 0x0000d600, 0x0000b300, 0x00002900, 0x0000e300, 0x00002f00, 0x00008400, + 0x00005300, 0x0000d100, 0x00000000, 0x0000ed00, 0x00002000, 0x0000fc00, 0x0000b100, 0x00005b00, + 0x00006a00, 0x0000cb00, 0x0000be00, 0x00003900, 0x00004a00, 0x00004c00, 0x00005800, 0x0000cf00, + 0x0000d000, 0x0000ef00, 0x0000aa00, 0x0000fb00, 0x00004300, 0x00004d00, 0x00003300, 0x00008500, + 0x00004500, 0x0000f900, 0x00000200, 0x00007f00, 0x00005000, 0x00003c00, 0x00009f00, 0x0000a800, + 0x00005100, 0x0000a300, 0x00004000, 0x00008f00, 0x00009200, 0x00009d00, 0x00003800, 0x0000f500, + 0x0000bc00, 0x0000b600, 0x0000da00, 0x00002100, 0x00001000, 0x0000ff00, 0x0000f300, 0x0000d200, + 0x0000cd00, 0x00000c00, 0x00001300, 0x0000ec00, 0x00005f00, 0x00009700, 0x00004400, 0x00001700, + 0x0000c400, 0x0000a700, 0x00007e00, 0x00003d00, 0x00006400, 0x00005d00, 0x00001900, 0x00007300, + 0x00006000, 0x00008100, 0x00004f00, 0x0000dc00, 0x00002200, 0x00002a00, 0x00009000, 0x00008800, + 0x00004600, 0x0000ee00, 0x0000b800, 0x00001400, 0x0000de00, 0x00005e00, 0x00000b00, 0x0000db00, + 0x0000e000, 0x00003200, 0x00003a00, 0x00000a00, 0x00004900, 0x00000600, 0x00002400, 0x00005c00, + 0x0000c200, 0x0000d300, 0x0000ac00, 0x00006200, 0x00009100, 0x00009500, 0x0000e400, 0x00007900, + 0x0000e700, 0x0000c800, 0x00003700, 0x00006d00, 0x00008d00, 0x0000d500, 0x00004e00, 0x0000a900, + 0x00006c00, 0x00005600, 0x0000f400, 0x0000ea00, 0x00006500, 0x00007a00, 0x0000ae00, 0x00000800, + 0x0000ba00, 0x00007800, 0x00002500, 0x00002e00, 0x00001c00, 0x0000a600, 0x0000b400, 0x0000c600, + 0x0000e800, 0x0000dd00, 0x00007400, 0x00001f00, 0x00004b00, 0x0000bd00, 0x00008b00, 0x00008a00, + 0x00007000, 0x00003e00, 0x0000b500, 0x00006600, 0x00004800, 0x00000300, 0x0000f600, 0x00000e00, + 0x00006100, 0x00003500, 0x00005700, 0x0000b900, 0x00008600, 0x0000c100, 0x00001d00, 0x00009e00, + 0x0000e100, 0x0000f800, 0x00009800, 0x00001100, 0x00006900, 0x0000d900, 0x00008e00, 0x00009400, + 0x00009b00, 0x00001e00, 0x00008700, 0x0000e900, 0x0000ce00, 0x00005500, 0x00002800, 0x0000df00, + 0x00008c00, 0x0000a100, 0x00008900, 0x00000d00, 0x0000bf00, 0x0000e600, 0x00004200, 0x00006800, + 0x00004100, 0x00009900, 0x00002d00, 0x00000f00, 0x0000b000, 0x00005400, 0x0000bb00, 0x00001600 +}; + +__declspec(align(16)) const unsigned long Te4_2[256] = { + 0x00630000, 0x007c0000, 0x00770000, 0x007b0000, 0x00f20000, 0x006b0000, 0x006f0000, 0x00c50000, + 0x00300000, 0x00010000, 0x00670000, 0x002b0000, 0x00fe0000, 0x00d70000, 0x00ab0000, 0x00760000, + 0x00ca0000, 0x00820000, 0x00c90000, 0x007d0000, 0x00fa0000, 0x00590000, 0x00470000, 0x00f00000, + 0x00ad0000, 0x00d40000, 0x00a20000, 0x00af0000, 0x009c0000, 0x00a40000, 0x00720000, 0x00c00000, + 0x00b70000, 0x00fd0000, 0x00930000, 0x00260000, 0x00360000, 0x003f0000, 0x00f70000, 0x00cc0000, + 0x00340000, 0x00a50000, 0x00e50000, 0x00f10000, 0x00710000, 0x00d80000, 0x00310000, 0x00150000, + 0x00040000, 0x00c70000, 0x00230000, 0x00c30000, 0x00180000, 0x00960000, 0x00050000, 0x009a0000, + 0x00070000, 0x00120000, 0x00800000, 0x00e20000, 0x00eb0000, 0x00270000, 0x00b20000, 0x00750000, + 0x00090000, 0x00830000, 0x002c0000, 0x001a0000, 0x001b0000, 0x006e0000, 0x005a0000, 0x00a00000, + 0x00520000, 0x003b0000, 0x00d60000, 0x00b30000, 0x00290000, 0x00e30000, 0x002f0000, 0x00840000, + 0x00530000, 0x00d10000, 0x00000000, 0x00ed0000, 0x00200000, 0x00fc0000, 0x00b10000, 0x005b0000, + 0x006a0000, 0x00cb0000, 0x00be0000, 0x00390000, 0x004a0000, 0x004c0000, 0x00580000, 0x00cf0000, + 0x00d00000, 0x00ef0000, 0x00aa0000, 0x00fb0000, 0x00430000, 0x004d0000, 0x00330000, 0x00850000, + 0x00450000, 0x00f90000, 0x00020000, 0x007f0000, 0x00500000, 0x003c0000, 0x009f0000, 0x00a80000, + 0x00510000, 0x00a30000, 0x00400000, 0x008f0000, 0x00920000, 0x009d0000, 0x00380000, 0x00f50000, + 0x00bc0000, 0x00b60000, 0x00da0000, 0x00210000, 0x00100000, 0x00ff0000, 0x00f30000, 0x00d20000, + 0x00cd0000, 0x000c0000, 0x00130000, 0x00ec0000, 0x005f0000, 0x00970000, 0x00440000, 0x00170000, + 0x00c40000, 0x00a70000, 0x007e0000, 0x003d0000, 0x00640000, 0x005d0000, 0x00190000, 0x00730000, + 0x00600000, 0x00810000, 0x004f0000, 0x00dc0000, 0x00220000, 0x002a0000, 0x00900000, 0x00880000, + 0x00460000, 0x00ee0000, 0x00b80000, 0x00140000, 0x00de0000, 0x005e0000, 0x000b0000, 0x00db0000, + 0x00e00000, 0x00320000, 0x003a0000, 0x000a0000, 0x00490000, 0x00060000, 0x00240000, 0x005c0000, + 0x00c20000, 0x00d30000, 0x00ac0000, 0x00620000, 0x00910000, 0x00950000, 0x00e40000, 0x00790000, + 0x00e70000, 0x00c80000, 0x00370000, 0x006d0000, 0x008d0000, 0x00d50000, 0x004e0000, 0x00a90000, + 0x006c0000, 0x00560000, 0x00f40000, 0x00ea0000, 0x00650000, 0x007a0000, 0x00ae0000, 0x00080000, + 0x00ba0000, 0x00780000, 0x00250000, 0x002e0000, 0x001c0000, 0x00a60000, 0x00b40000, 0x00c60000, + 0x00e80000, 0x00dd0000, 0x00740000, 0x001f0000, 0x004b0000, 0x00bd0000, 0x008b0000, 0x008a0000, + 0x00700000, 0x003e0000, 0x00b50000, 0x00660000, 0x00480000, 0x00030000, 0x00f60000, 0x000e0000, + 0x00610000, 0x00350000, 0x00570000, 0x00b90000, 0x00860000, 0x00c10000, 0x001d0000, 0x009e0000, + 0x00e10000, 0x00f80000, 0x00980000, 0x00110000, 0x00690000, 0x00d90000, 0x008e0000, 0x00940000, + 0x009b0000, 0x001e0000, 0x00870000, 0x00e90000, 0x00ce0000, 0x00550000, 0x00280000, 0x00df0000, + 0x008c0000, 0x00a10000, 0x00890000, 0x000d0000, 0x00bf0000, 0x00e60000, 0x00420000, 0x00680000, + 0x00410000, 0x00990000, 0x002d0000, 0x000f0000, 0x00b00000, 0x00540000, 0x00bb0000, 0x00160000 +}; + +__declspec(align(16)) const unsigned long Te4_3[256] = { + 0x63000000, 0x7c000000, 0x77000000, 0x7b000000, 0xf2000000, 0x6b000000, 0x6f000000, 0xc5000000, + 0x30000000, 0x01000000, 0x67000000, 0x2b000000, 0xfe000000, 0xd7000000, 0xab000000, 0x76000000, + 0xca000000, 0x82000000, 0xc9000000, 0x7d000000, 0xfa000000, 0x59000000, 0x47000000, 0xf0000000, + 0xad000000, 0xd4000000, 0xa2000000, 0xaf000000, 0x9c000000, 0xa4000000, 0x72000000, 0xc0000000, + 0xb7000000, 0xfd000000, 0x93000000, 0x26000000, 0x36000000, 0x3f000000, 0xf7000000, 0xcc000000, + 0x34000000, 0xa5000000, 0xe5000000, 0xf1000000, 0x71000000, 0xd8000000, 0x31000000, 0x15000000, + 0x04000000, 0xc7000000, 0x23000000, 0xc3000000, 0x18000000, 0x96000000, 0x05000000, 0x9a000000, + 0x07000000, 0x12000000, 0x80000000, 0xe2000000, 0xeb000000, 0x27000000, 0xb2000000, 0x75000000, + 0x09000000, 0x83000000, 0x2c000000, 0x1a000000, 0x1b000000, 0x6e000000, 0x5a000000, 0xa0000000, + 0x52000000, 0x3b000000, 0xd6000000, 0xb3000000, 0x29000000, 0xe3000000, 0x2f000000, 0x84000000, + 0x53000000, 0xd1000000, 0x00000000, 0xed000000, 0x20000000, 0xfc000000, 0xb1000000, 0x5b000000, + 0x6a000000, 0xcb000000, 0xbe000000, 0x39000000, 0x4a000000, 0x4c000000, 0x58000000, 0xcf000000, + 0xd0000000, 0xef000000, 0xaa000000, 0xfb000000, 0x43000000, 0x4d000000, 0x33000000, 0x85000000, + 0x45000000, 0xf9000000, 0x02000000, 0x7f000000, 0x50000000, 0x3c000000, 0x9f000000, 0xa8000000, + 0x51000000, 0xa3000000, 0x40000000, 0x8f000000, 0x92000000, 0x9d000000, 0x38000000, 0xf5000000, + 0xbc000000, 0xb6000000, 0xda000000, 0x21000000, 0x10000000, 0xff000000, 0xf3000000, 0xd2000000, + 0xcd000000, 0x0c000000, 0x13000000, 0xec000000, 0x5f000000, 0x97000000, 0x44000000, 0x17000000, + 0xc4000000, 0xa7000000, 0x7e000000, 0x3d000000, 0x64000000, 0x5d000000, 0x19000000, 0x73000000, + 0x60000000, 0x81000000, 0x4f000000, 0xdc000000, 0x22000000, 0x2a000000, 0x90000000, 0x88000000, + 0x46000000, 0xee000000, 0xb8000000, 0x14000000, 0xde000000, 0x5e000000, 0x0b000000, 0xdb000000, + 0xe0000000, 0x32000000, 0x3a000000, 0x0a000000, 0x49000000, 0x06000000, 0x24000000, 0x5c000000, + 0xc2000000, 0xd3000000, 0xac000000, 0x62000000, 0x91000000, 0x95000000, 0xe4000000, 0x79000000, + 0xe7000000, 0xc8000000, 0x37000000, 0x6d000000, 0x8d000000, 0xd5000000, 0x4e000000, 0xa9000000, + 0x6c000000, 0x56000000, 0xf4000000, 0xea000000, 0x65000000, 0x7a000000, 0xae000000, 0x08000000, + 0xba000000, 0x78000000, 0x25000000, 0x2e000000, 0x1c000000, 0xa6000000, 0xb4000000, 0xc6000000, + 0xe8000000, 0xdd000000, 0x74000000, 0x1f000000, 0x4b000000, 0xbd000000, 0x8b000000, 0x8a000000, + 0x70000000, 0x3e000000, 0xb5000000, 0x66000000, 0x48000000, 0x03000000, 0xf6000000, 0x0e000000, + 0x61000000, 0x35000000, 0x57000000, 0xb9000000, 0x86000000, 0xc1000000, 0x1d000000, 0x9e000000, + 0xe1000000, 0xf8000000, 0x98000000, 0x11000000, 0x69000000, 0xd9000000, 0x8e000000, 0x94000000, + 0x9b000000, 0x1e000000, 0x87000000, 0xe9000000, 0xce000000, 0x55000000, 0x28000000, 0xdf000000, + 0x8c000000, 0xa1000000, 0x89000000, 0x0d000000, 0xbf000000, 0xe6000000, 0x42000000, 0x68000000, + 0x41000000, 0x99000000, 0x2d000000, 0x0f000000, 0xb0000000, 0x54000000, 0xbb000000, 0x16000000 +}; + +__declspec(align(16)) const unsigned long Td0[256] = { + 0x50a7f451, 0x5365417e, 0xc3a4171a, 0x965e273a, 0xcb6bab3b, 0xf1459d1f, 0xab58faac, 0x9303e34b, + 0x55fa3020, 0xf66d76ad, 0x9176cc88, 0x254c02f5, 0xfcd7e54f, 0xd7cb2ac5, 0x80443526, 0x8fa362b5, + 0x495ab1de, 0x671bba25, 0x980eea45, 0xe1c0fe5d, 0x02752fc3, 0x12f04c81, 0xa397468d, 0xc6f9d36b, + 0xe75f8f03, 0x959c9215, 0xeb7a6dbf, 0xda595295, 0x2d83bed4, 0xd3217458, 0x2969e049, 0x44c8c98e, + 0x6a89c275, 0x78798ef4, 0x6b3e5899, 0xdd71b927, 0xb64fe1be, 0x17ad88f0, 0x66ac20c9, 0xb43ace7d, + 0x184adf63, 0x82311ae5, 0x60335197, 0x457f5362, 0xe07764b1, 0x84ae6bbb, 0x1ca081fe, 0x942b08f9, + 0x58684870, 0x19fd458f, 0x876cde94, 0xb7f87b52, 0x23d373ab, 0xe2024b72, 0x578f1fe3, 0x2aab5566, + 0x0728ebb2, 0x03c2b52f, 0x9a7bc586, 0xa50837d3, 0xf2872830, 0xb2a5bf23, 0xba6a0302, 0x5c8216ed, + 0x2b1ccf8a, 0x92b479a7, 0xf0f207f3, 0xa1e2694e, 0xcdf4da65, 0xd5be0506, 0x1f6234d1, 0x8afea6c4, + 0x9d532e34, 0xa055f3a2, 0x32e18a05, 0x75ebf6a4, 0x39ec830b, 0xaaef6040, 0x069f715e, 0x51106ebd, + 0xf98a213e, 0x3d06dd96, 0xae053edd, 0x46bde64d, 0xb58d5491, 0x055dc471, 0x6fd40604, 0xff155060, + 0x24fb9819, 0x97e9bdd6, 0xcc434089, 0x779ed967, 0xbd42e8b0, 0x888b8907, 0x385b19e7, 0xdbeec879, + 0x470a7ca1, 0xe90f427c, 0xc91e84f8, 0x00000000, 0x83868009, 0x48ed2b32, 0xac70111e, 0x4e725a6c, + 0xfbff0efd, 0x5638850f, 0x1ed5ae3d, 0x27392d36, 0x64d90f0a, 0x21a65c68, 0xd1545b9b, 0x3a2e3624, + 0xb1670a0c, 0x0fe75793, 0xd296eeb4, 0x9e919b1b, 0x4fc5c080, 0xa220dc61, 0x694b775a, 0x161a121c, + 0x0aba93e2, 0xe52aa0c0, 0x43e0223c, 0x1d171b12, 0x0b0d090e, 0xadc78bf2, 0xb9a8b62d, 0xc8a91e14, + 0x8519f157, 0x4c0775af, 0xbbdd99ee, 0xfd607fa3, 0x9f2601f7, 0xbcf5725c, 0xc53b6644, 0x347efb5b, + 0x7629438b, 0xdcc623cb, 0x68fcedb6, 0x63f1e4b8, 0xcadc31d7, 0x10856342, 0x40229713, 0x2011c684, + 0x7d244a85, 0xf83dbbd2, 0x1132f9ae, 0x6da129c7, 0x4b2f9e1d, 0xf330b2dc, 0xec52860d, 0xd0e3c177, + 0x6c16b32b, 0x99b970a9, 0xfa489411, 0x2264e947, 0xc48cfca8, 0x1a3ff0a0, 0xd82c7d56, 0xef903322, + 0xc74e4987, 0xc1d138d9, 0xfea2ca8c, 0x360bd498, 0xcf81f5a6, 0x28de7aa5, 0x268eb7da, 0xa4bfad3f, + 0xe49d3a2c, 0x0d927850, 0x9bcc5f6a, 0x62467e54, 0xc2138df6, 0xe8b8d890, 0x5ef7392e, 0xf5afc382, + 0xbe805d9f, 0x7c93d069, 0xa92dd56f, 0xb31225cf, 0x3b99acc8, 0xa77d1810, 0x6e639ce8, 0x7bbb3bdb, + 0x097826cd, 0xf418596e, 0x01b79aec, 0xa89a4f83, 0x656e95e6, 0x7ee6ffaa, 0x08cfbc21, 0xe6e815ef, + 0xd99be7ba, 0xce366f4a, 0xd4099fea, 0xd67cb029, 0xafb2a431, 0x31233f2a, 0x3094a5c6, 0xc066a235, + 0x37bc4e74, 0xa6ca82fc, 0xb0d090e0, 0x15d8a733, 0x4a9804f1, 0xf7daec41, 0x0e50cd7f, 0x2ff69117, + 0x8dd64d76, 0x4db0ef43, 0x544daacc, 0xdf0496e4, 0xe3b5d19e, 0x1b886a4c, 0xb81f2cc1, 0x7f516546, + 0x04ea5e9d, 0x5d358c01, 0x737487fa, 0x2e410bfb, 0x5a1d67b3, 0x52d2db92, 0x335610e9, 0x1347d66d, + 0x8c61d79a, 0x7a0ca137, 0x8e14f859, 0x893c13eb, 0xee27a9ce, 0x35c961b7, 0xede51ce1, 0x3cb1477a, + 0x59dfd29c, 0x3f73f255, 0x79ce1418, 0xbf37c773, 0xeacdf753, 0x5baafd5f, 0x146f3ddf, 0x86db4478, + 0x81f3afca, 0x3ec468b9, 0x2c342438, 0x5f40a3c2, 0x72c31d16, 0x0c25e2bc, 0x8b493c28, 0x41950dff, + 0x7101a839, 0xdeb30c08, 0x9ce4b4d8, 0x90c15664, 0x6184cb7b, 0x70b632d5, 0x745c6c48, 0x4257b8d0 +}; + +__declspec(align(16)) const unsigned long Td1[256] = { + 0xa7f45150, 0x65417e53, 0xa4171ac3, 0x5e273a96, 0x6bab3bcb, 0x459d1ff1, 0x58faacab, 0x03e34b93, + 0xfa302055, 0x6d76adf6, 0x76cc8891, 0x4c02f525, 0xd7e54ffc, 0xcb2ac5d7, 0x44352680, 0xa362b58f, + 0x5ab1de49, 0x1bba2567, 0x0eea4598, 0xc0fe5de1, 0x752fc302, 0xf04c8112, 0x97468da3, 0xf9d36bc6, + 0x5f8f03e7, 0x9c921595, 0x7a6dbfeb, 0x595295da, 0x83bed42d, 0x217458d3, 0x69e04929, 0xc8c98e44, + 0x89c2756a, 0x798ef478, 0x3e58996b, 0x71b927dd, 0x4fe1beb6, 0xad88f017, 0xac20c966, 0x3ace7db4, + 0x4adf6318, 0x311ae582, 0x33519760, 0x7f536245, 0x7764b1e0, 0xae6bbb84, 0xa081fe1c, 0x2b08f994, + 0x68487058, 0xfd458f19, 0x6cde9487, 0xf87b52b7, 0xd373ab23, 0x024b72e2, 0x8f1fe357, 0xab55662a, + 0x28ebb207, 0xc2b52f03, 0x7bc5869a, 0x0837d3a5, 0x872830f2, 0xa5bf23b2, 0x6a0302ba, 0x8216ed5c, + 0x1ccf8a2b, 0xb479a792, 0xf207f3f0, 0xe2694ea1, 0xf4da65cd, 0xbe0506d5, 0x6234d11f, 0xfea6c48a, + 0x532e349d, 0x55f3a2a0, 0xe18a0532, 0xebf6a475, 0xec830b39, 0xef6040aa, 0x9f715e06, 0x106ebd51, + 0x8a213ef9, 0x06dd963d, 0x053eddae, 0xbde64d46, 0x8d5491b5, 0x5dc47105, 0xd406046f, 0x155060ff, + 0xfb981924, 0xe9bdd697, 0x434089cc, 0x9ed96777, 0x42e8b0bd, 0x8b890788, 0x5b19e738, 0xeec879db, + 0x0a7ca147, 0x0f427ce9, 0x1e84f8c9, 0x00000000, 0x86800983, 0xed2b3248, 0x70111eac, 0x725a6c4e, + 0xff0efdfb, 0x38850f56, 0xd5ae3d1e, 0x392d3627, 0xd90f0a64, 0xa65c6821, 0x545b9bd1, 0x2e36243a, + 0x670a0cb1, 0xe757930f, 0x96eeb4d2, 0x919b1b9e, 0xc5c0804f, 0x20dc61a2, 0x4b775a69, 0x1a121c16, + 0xba93e20a, 0x2aa0c0e5, 0xe0223c43, 0x171b121d, 0x0d090e0b, 0xc78bf2ad, 0xa8b62db9, 0xa91e14c8, + 0x19f15785, 0x0775af4c, 0xdd99eebb, 0x607fa3fd, 0x2601f79f, 0xf5725cbc, 0x3b6644c5, 0x7efb5b34, + 0x29438b76, 0xc623cbdc, 0xfcedb668, 0xf1e4b863, 0xdc31d7ca, 0x85634210, 0x22971340, 0x11c68420, + 0x244a857d, 0x3dbbd2f8, 0x32f9ae11, 0xa129c76d, 0x2f9e1d4b, 0x30b2dcf3, 0x52860dec, 0xe3c177d0, + 0x16b32b6c, 0xb970a999, 0x489411fa, 0x64e94722, 0x8cfca8c4, 0x3ff0a01a, 0x2c7d56d8, 0x903322ef, + 0x4e4987c7, 0xd138d9c1, 0xa2ca8cfe, 0x0bd49836, 0x81f5a6cf, 0xde7aa528, 0x8eb7da26, 0xbfad3fa4, + 0x9d3a2ce4, 0x9278500d, 0xcc5f6a9b, 0x467e5462, 0x138df6c2, 0xb8d890e8, 0xf7392e5e, 0xafc382f5, + 0x805d9fbe, 0x93d0697c, 0x2dd56fa9, 0x1225cfb3, 0x99acc83b, 0x7d1810a7, 0x639ce86e, 0xbb3bdb7b, + 0x7826cd09, 0x18596ef4, 0xb79aec01, 0x9a4f83a8, 0x6e95e665, 0xe6ffaa7e, 0xcfbc2108, 0xe815efe6, + 0x9be7bad9, 0x366f4ace, 0x099fead4, 0x7cb029d6, 0xb2a431af, 0x233f2a31, 0x94a5c630, 0x66a235c0, + 0xbc4e7437, 0xca82fca6, 0xd090e0b0, 0xd8a73315, 0x9804f14a, 0xdaec41f7, 0x50cd7f0e, 0xf691172f, + 0xd64d768d, 0xb0ef434d, 0x4daacc54, 0x0496e4df, 0xb5d19ee3, 0x886a4c1b, 0x1f2cc1b8, 0x5165467f, + 0xea5e9d04, 0x358c015d, 0x7487fa73, 0x410bfb2e, 0x1d67b35a, 0xd2db9252, 0x5610e933, 0x47d66d13, + 0x61d79a8c, 0x0ca1377a, 0x14f8598e, 0x3c13eb89, 0x27a9ceee, 0xc961b735, 0xe51ce1ed, 0xb1477a3c, + 0xdfd29c59, 0x73f2553f, 0xce141879, 0x37c773bf, 0xcdf753ea, 0xaafd5f5b, 0x6f3ddf14, 0xdb447886, + 0xf3afca81, 0xc468b93e, 0x3424382c, 0x40a3c25f, 0xc31d1672, 0x25e2bc0c, 0x493c288b, 0x950dff41, + 0x01a83971, 0xb30c08de, 0xe4b4d89c, 0xc1566490, 0x84cb7b61, 0xb632d570, 0x5c6c4874, 0x57b8d042 +}; + +__declspec(align(16)) const unsigned long Td2[256] = { + 0xf45150a7, 0x417e5365, 0x171ac3a4, 0x273a965e, 0xab3bcb6b, 0x9d1ff145, 0xfaacab58, 0xe34b9303, + 0x302055fa, 0x76adf66d, 0xcc889176, 0x02f5254c, 0xe54ffcd7, 0x2ac5d7cb, 0x35268044, 0x62b58fa3, + 0xb1de495a, 0xba25671b, 0xea45980e, 0xfe5de1c0, 0x2fc30275, 0x4c8112f0, 0x468da397, 0xd36bc6f9, + 0x8f03e75f, 0x9215959c, 0x6dbfeb7a, 0x5295da59, 0xbed42d83, 0x7458d321, 0xe0492969, 0xc98e44c8, + 0xc2756a89, 0x8ef47879, 0x58996b3e, 0xb927dd71, 0xe1beb64f, 0x88f017ad, 0x20c966ac, 0xce7db43a, + 0xdf63184a, 0x1ae58231, 0x51976033, 0x5362457f, 0x64b1e077, 0x6bbb84ae, 0x81fe1ca0, 0x08f9942b, + 0x48705868, 0x458f19fd, 0xde94876c, 0x7b52b7f8, 0x73ab23d3, 0x4b72e202, 0x1fe3578f, 0x55662aab, + 0xebb20728, 0xb52f03c2, 0xc5869a7b, 0x37d3a508, 0x2830f287, 0xbf23b2a5, 0x0302ba6a, 0x16ed5c82, + 0xcf8a2b1c, 0x79a792b4, 0x07f3f0f2, 0x694ea1e2, 0xda65cdf4, 0x0506d5be, 0x34d11f62, 0xa6c48afe, + 0x2e349d53, 0xf3a2a055, 0x8a0532e1, 0xf6a475eb, 0x830b39ec, 0x6040aaef, 0x715e069f, 0x6ebd5110, + 0x213ef98a, 0xdd963d06, 0x3eddae05, 0xe64d46bd, 0x5491b58d, 0xc471055d, 0x06046fd4, 0x5060ff15, + 0x981924fb, 0xbdd697e9, 0x4089cc43, 0xd967779e, 0xe8b0bd42, 0x8907888b, 0x19e7385b, 0xc879dbee, + 0x7ca1470a, 0x427ce90f, 0x84f8c91e, 0x00000000, 0x80098386, 0x2b3248ed, 0x111eac70, 0x5a6c4e72, + 0x0efdfbff, 0x850f5638, 0xae3d1ed5, 0x2d362739, 0x0f0a64d9, 0x5c6821a6, 0x5b9bd154, 0x36243a2e, + 0x0a0cb167, 0x57930fe7, 0xeeb4d296, 0x9b1b9e91, 0xc0804fc5, 0xdc61a220, 0x775a694b, 0x121c161a, + 0x93e20aba, 0xa0c0e52a, 0x223c43e0, 0x1b121d17, 0x090e0b0d, 0x8bf2adc7, 0xb62db9a8, 0x1e14c8a9, + 0xf1578519, 0x75af4c07, 0x99eebbdd, 0x7fa3fd60, 0x01f79f26, 0x725cbcf5, 0x6644c53b, 0xfb5b347e, + 0x438b7629, 0x23cbdcc6, 0xedb668fc, 0xe4b863f1, 0x31d7cadc, 0x63421085, 0x97134022, 0xc6842011, + 0x4a857d24, 0xbbd2f83d, 0xf9ae1132, 0x29c76da1, 0x9e1d4b2f, 0xb2dcf330, 0x860dec52, 0xc177d0e3, + 0xb32b6c16, 0x70a999b9, 0x9411fa48, 0xe9472264, 0xfca8c48c, 0xf0a01a3f, 0x7d56d82c, 0x3322ef90, + 0x4987c74e, 0x38d9c1d1, 0xca8cfea2, 0xd498360b, 0xf5a6cf81, 0x7aa528de, 0xb7da268e, 0xad3fa4bf, + 0x3a2ce49d, 0x78500d92, 0x5f6a9bcc, 0x7e546246, 0x8df6c213, 0xd890e8b8, 0x392e5ef7, 0xc382f5af, + 0x5d9fbe80, 0xd0697c93, 0xd56fa92d, 0x25cfb312, 0xacc83b99, 0x1810a77d, 0x9ce86e63, 0x3bdb7bbb, + 0x26cd0978, 0x596ef418, 0x9aec01b7, 0x4f83a89a, 0x95e6656e, 0xffaa7ee6, 0xbc2108cf, 0x15efe6e8, + 0xe7bad99b, 0x6f4ace36, 0x9fead409, 0xb029d67c, 0xa431afb2, 0x3f2a3123, 0xa5c63094, 0xa235c066, + 0x4e7437bc, 0x82fca6ca, 0x90e0b0d0, 0xa73315d8, 0x04f14a98, 0xec41f7da, 0xcd7f0e50, 0x91172ff6, + 0x4d768dd6, 0xef434db0, 0xaacc544d, 0x96e4df04, 0xd19ee3b5, 0x6a4c1b88, 0x2cc1b81f, 0x65467f51, + 0x5e9d04ea, 0x8c015d35, 0x87fa7374, 0x0bfb2e41, 0x67b35a1d, 0xdb9252d2, 0x10e93356, 0xd66d1347, + 0xd79a8c61, 0xa1377a0c, 0xf8598e14, 0x13eb893c, 0xa9ceee27, 0x61b735c9, 0x1ce1ede5, 0x477a3cb1, + 0xd29c59df, 0xf2553f73, 0x141879ce, 0xc773bf37, 0xf753eacd, 0xfd5f5baa, 0x3ddf146f, 0x447886db, + 0xafca81f3, 0x68b93ec4, 0x24382c34, 0xa3c25f40, 0x1d1672c3, 0xe2bc0c25, 0x3c288b49, 0x0dff4195, + 0xa8397101, 0x0c08deb3, 0xb4d89ce4, 0x566490c1, 0xcb7b6184, 0x32d570b6, 0x6c48745c, 0xb8d04257 +}; + +__declspec(align(16)) const unsigned long Td3[256] = { + 0x5150a7f4, 0x7e536541, 0x1ac3a417, 0x3a965e27, 0x3bcb6bab, 0x1ff1459d, 0xacab58fa, 0x4b9303e3, + 0x2055fa30, 0xadf66d76, 0x889176cc, 0xf5254c02, 0x4ffcd7e5, 0xc5d7cb2a, 0x26804435, 0xb58fa362, + 0xde495ab1, 0x25671bba, 0x45980eea, 0x5de1c0fe, 0xc302752f, 0x8112f04c, 0x8da39746, 0x6bc6f9d3, + 0x03e75f8f, 0x15959c92, 0xbfeb7a6d, 0x95da5952, 0xd42d83be, 0x58d32174, 0x492969e0, 0x8e44c8c9, + 0x756a89c2, 0xf478798e, 0x996b3e58, 0x27dd71b9, 0xbeb64fe1, 0xf017ad88, 0xc966ac20, 0x7db43ace, + 0x63184adf, 0xe582311a, 0x97603351, 0x62457f53, 0xb1e07764, 0xbb84ae6b, 0xfe1ca081, 0xf9942b08, + 0x70586848, 0x8f19fd45, 0x94876cde, 0x52b7f87b, 0xab23d373, 0x72e2024b, 0xe3578f1f, 0x662aab55, + 0xb20728eb, 0x2f03c2b5, 0x869a7bc5, 0xd3a50837, 0x30f28728, 0x23b2a5bf, 0x02ba6a03, 0xed5c8216, + 0x8a2b1ccf, 0xa792b479, 0xf3f0f207, 0x4ea1e269, 0x65cdf4da, 0x06d5be05, 0xd11f6234, 0xc48afea6, + 0x349d532e, 0xa2a055f3, 0x0532e18a, 0xa475ebf6, 0x0b39ec83, 0x40aaef60, 0x5e069f71, 0xbd51106e, + 0x3ef98a21, 0x963d06dd, 0xddae053e, 0x4d46bde6, 0x91b58d54, 0x71055dc4, 0x046fd406, 0x60ff1550, + 0x1924fb98, 0xd697e9bd, 0x89cc4340, 0x67779ed9, 0xb0bd42e8, 0x07888b89, 0xe7385b19, 0x79dbeec8, + 0xa1470a7c, 0x7ce90f42, 0xf8c91e84, 0x00000000, 0x09838680, 0x3248ed2b, 0x1eac7011, 0x6c4e725a, + 0xfdfbff0e, 0x0f563885, 0x3d1ed5ae, 0x3627392d, 0x0a64d90f, 0x6821a65c, 0x9bd1545b, 0x243a2e36, + 0x0cb1670a, 0x930fe757, 0xb4d296ee, 0x1b9e919b, 0x804fc5c0, 0x61a220dc, 0x5a694b77, 0x1c161a12, + 0xe20aba93, 0xc0e52aa0, 0x3c43e022, 0x121d171b, 0x0e0b0d09, 0xf2adc78b, 0x2db9a8b6, 0x14c8a91e, + 0x578519f1, 0xaf4c0775, 0xeebbdd99, 0xa3fd607f, 0xf79f2601, 0x5cbcf572, 0x44c53b66, 0x5b347efb, + 0x8b762943, 0xcbdcc623, 0xb668fced, 0xb863f1e4, 0xd7cadc31, 0x42108563, 0x13402297, 0x842011c6, + 0x857d244a, 0xd2f83dbb, 0xae1132f9, 0xc76da129, 0x1d4b2f9e, 0xdcf330b2, 0x0dec5286, 0x77d0e3c1, + 0x2b6c16b3, 0xa999b970, 0x11fa4894, 0x472264e9, 0xa8c48cfc, 0xa01a3ff0, 0x56d82c7d, 0x22ef9033, + 0x87c74e49, 0xd9c1d138, 0x8cfea2ca, 0x98360bd4, 0xa6cf81f5, 0xa528de7a, 0xda268eb7, 0x3fa4bfad, + 0x2ce49d3a, 0x500d9278, 0x6a9bcc5f, 0x5462467e, 0xf6c2138d, 0x90e8b8d8, 0x2e5ef739, 0x82f5afc3, + 0x9fbe805d, 0x697c93d0, 0x6fa92dd5, 0xcfb31225, 0xc83b99ac, 0x10a77d18, 0xe86e639c, 0xdb7bbb3b, + 0xcd097826, 0x6ef41859, 0xec01b79a, 0x83a89a4f, 0xe6656e95, 0xaa7ee6ff, 0x2108cfbc, 0xefe6e815, + 0xbad99be7, 0x4ace366f, 0xead4099f, 0x29d67cb0, 0x31afb2a4, 0x2a31233f, 0xc63094a5, 0x35c066a2, + 0x7437bc4e, 0xfca6ca82, 0xe0b0d090, 0x3315d8a7, 0xf14a9804, 0x41f7daec, 0x7f0e50cd, 0x172ff691, + 0x768dd64d, 0x434db0ef, 0xcc544daa, 0xe4df0496, 0x9ee3b5d1, 0x4c1b886a, 0xc1b81f2c, 0x467f5165, + 0x9d04ea5e, 0x015d358c, 0xfa737487, 0xfb2e410b, 0xb35a1d67, 0x9252d2db, 0xe9335610, 0x6d1347d6, + 0x9a8c61d7, 0x377a0ca1, 0x598e14f8, 0xeb893c13, 0xceee27a9, 0xb735c961, 0xe1ede51c, 0x7a3cb147, + 0x9c59dfd2, 0x553f73f2, 0x1879ce14, 0x73bf37c7, 0x53eacdf7, 0x5f5baafd, 0xdf146f3d, 0x7886db44, + 0xca81f3af, 0xb93ec468, 0x382c3424, 0xc25f40a3, 0x1672c31d, 0xbc0c25e2, 0x288b493c, 0xff41950d, + 0x397101a8, 0x08deb30c, 0xd89ce4b4, 0x6490c156, 0x7b6184cb, 0xd570b632, 0x48745c6c, 0xd04257b8 +}; + +__declspec(align(16)) const unsigned long Td4_0[256] = { + 0x00000052, 0x00000009, 0x0000006a, 0x000000d5, 0x00000030, 0x00000036, 0x000000a5, 0x00000038, + 0x000000bf, 0x00000040, 0x000000a3, 0x0000009e, 0x00000081, 0x000000f3, 0x000000d7, 0x000000fb, + 0x0000007c, 0x000000e3, 0x00000039, 0x00000082, 0x0000009b, 0x0000002f, 0x000000ff, 0x00000087, + 0x00000034, 0x0000008e, 0x00000043, 0x00000044, 0x000000c4, 0x000000de, 0x000000e9, 0x000000cb, + 0x00000054, 0x0000007b, 0x00000094, 0x00000032, 0x000000a6, 0x000000c2, 0x00000023, 0x0000003d, + 0x000000ee, 0x0000004c, 0x00000095, 0x0000000b, 0x00000042, 0x000000fa, 0x000000c3, 0x0000004e, + 0x00000008, 0x0000002e, 0x000000a1, 0x00000066, 0x00000028, 0x000000d9, 0x00000024, 0x000000b2, + 0x00000076, 0x0000005b, 0x000000a2, 0x00000049, 0x0000006d, 0x0000008b, 0x000000d1, 0x00000025, + 0x00000072, 0x000000f8, 0x000000f6, 0x00000064, 0x00000086, 0x00000068, 0x00000098, 0x00000016, + 0x000000d4, 0x000000a4, 0x0000005c, 0x000000cc, 0x0000005d, 0x00000065, 0x000000b6, 0x00000092, + 0x0000006c, 0x00000070, 0x00000048, 0x00000050, 0x000000fd, 0x000000ed, 0x000000b9, 0x000000da, + 0x0000005e, 0x00000015, 0x00000046, 0x00000057, 0x000000a7, 0x0000008d, 0x0000009d, 0x00000084, + 0x00000090, 0x000000d8, 0x000000ab, 0x00000000, 0x0000008c, 0x000000bc, 0x000000d3, 0x0000000a, + 0x000000f7, 0x000000e4, 0x00000058, 0x00000005, 0x000000b8, 0x000000b3, 0x00000045, 0x00000006, + 0x000000d0, 0x0000002c, 0x0000001e, 0x0000008f, 0x000000ca, 0x0000003f, 0x0000000f, 0x00000002, + 0x000000c1, 0x000000af, 0x000000bd, 0x00000003, 0x00000001, 0x00000013, 0x0000008a, 0x0000006b, + 0x0000003a, 0x00000091, 0x00000011, 0x00000041, 0x0000004f, 0x00000067, 0x000000dc, 0x000000ea, + 0x00000097, 0x000000f2, 0x000000cf, 0x000000ce, 0x000000f0, 0x000000b4, 0x000000e6, 0x00000073, + 0x00000096, 0x000000ac, 0x00000074, 0x00000022, 0x000000e7, 0x000000ad, 0x00000035, 0x00000085, + 0x000000e2, 0x000000f9, 0x00000037, 0x000000e8, 0x0000001c, 0x00000075, 0x000000df, 0x0000006e, + 0x00000047, 0x000000f1, 0x0000001a, 0x00000071, 0x0000001d, 0x00000029, 0x000000c5, 0x00000089, + 0x0000006f, 0x000000b7, 0x00000062, 0x0000000e, 0x000000aa, 0x00000018, 0x000000be, 0x0000001b, + 0x000000fc, 0x00000056, 0x0000003e, 0x0000004b, 0x000000c6, 0x000000d2, 0x00000079, 0x00000020, + 0x0000009a, 0x000000db, 0x000000c0, 0x000000fe, 0x00000078, 0x000000cd, 0x0000005a, 0x000000f4, + 0x0000001f, 0x000000dd, 0x000000a8, 0x00000033, 0x00000088, 0x00000007, 0x000000c7, 0x00000031, + 0x000000b1, 0x00000012, 0x00000010, 0x00000059, 0x00000027, 0x00000080, 0x000000ec, 0x0000005f, + 0x00000060, 0x00000051, 0x0000007f, 0x000000a9, 0x00000019, 0x000000b5, 0x0000004a, 0x0000000d, + 0x0000002d, 0x000000e5, 0x0000007a, 0x0000009f, 0x00000093, 0x000000c9, 0x0000009c, 0x000000ef, + 0x000000a0, 0x000000e0, 0x0000003b, 0x0000004d, 0x000000ae, 0x0000002a, 0x000000f5, 0x000000b0, + 0x000000c8, 0x000000eb, 0x000000bb, 0x0000003c, 0x00000083, 0x00000053, 0x00000099, 0x00000061, + 0x00000017, 0x0000002b, 0x00000004, 0x0000007e, 0x000000ba, 0x00000077, 0x000000d6, 0x00000026, + 0x000000e1, 0x00000069, 0x00000014, 0x00000063, 0x00000055, 0x00000021, 0x0000000c, 0x0000007d +}; + +__declspec(align(16)) const unsigned long Td4_1[256] = { + 0x00005200, 0x00000900, 0x00006a00, 0x0000d500, 0x00003000, 0x00003600, 0x0000a500, 0x00003800, + 0x0000bf00, 0x00004000, 0x0000a300, 0x00009e00, 0x00008100, 0x0000f300, 0x0000d700, 0x0000fb00, + 0x00007c00, 0x0000e300, 0x00003900, 0x00008200, 0x00009b00, 0x00002f00, 0x0000ff00, 0x00008700, + 0x00003400, 0x00008e00, 0x00004300, 0x00004400, 0x0000c400, 0x0000de00, 0x0000e900, 0x0000cb00, + 0x00005400, 0x00007b00, 0x00009400, 0x00003200, 0x0000a600, 0x0000c200, 0x00002300, 0x00003d00, + 0x0000ee00, 0x00004c00, 0x00009500, 0x00000b00, 0x00004200, 0x0000fa00, 0x0000c300, 0x00004e00, + 0x00000800, 0x00002e00, 0x0000a100, 0x00006600, 0x00002800, 0x0000d900, 0x00002400, 0x0000b200, + 0x00007600, 0x00005b00, 0x0000a200, 0x00004900, 0x00006d00, 0x00008b00, 0x0000d100, 0x00002500, + 0x00007200, 0x0000f800, 0x0000f600, 0x00006400, 0x00008600, 0x00006800, 0x00009800, 0x00001600, + 0x0000d400, 0x0000a400, 0x00005c00, 0x0000cc00, 0x00005d00, 0x00006500, 0x0000b600, 0x00009200, + 0x00006c00, 0x00007000, 0x00004800, 0x00005000, 0x0000fd00, 0x0000ed00, 0x0000b900, 0x0000da00, + 0x00005e00, 0x00001500, 0x00004600, 0x00005700, 0x0000a700, 0x00008d00, 0x00009d00, 0x00008400, + 0x00009000, 0x0000d800, 0x0000ab00, 0x00000000, 0x00008c00, 0x0000bc00, 0x0000d300, 0x00000a00, + 0x0000f700, 0x0000e400, 0x00005800, 0x00000500, 0x0000b800, 0x0000b300, 0x00004500, 0x00000600, + 0x0000d000, 0x00002c00, 0x00001e00, 0x00008f00, 0x0000ca00, 0x00003f00, 0x00000f00, 0x00000200, + 0x0000c100, 0x0000af00, 0x0000bd00, 0x00000300, 0x00000100, 0x00001300, 0x00008a00, 0x00006b00, + 0x00003a00, 0x00009100, 0x00001100, 0x00004100, 0x00004f00, 0x00006700, 0x0000dc00, 0x0000ea00, + 0x00009700, 0x0000f200, 0x0000cf00, 0x0000ce00, 0x0000f000, 0x0000b400, 0x0000e600, 0x00007300, + 0x00009600, 0x0000ac00, 0x00007400, 0x00002200, 0x0000e700, 0x0000ad00, 0x00003500, 0x00008500, + 0x0000e200, 0x0000f900, 0x00003700, 0x0000e800, 0x00001c00, 0x00007500, 0x0000df00, 0x00006e00, + 0x00004700, 0x0000f100, 0x00001a00, 0x00007100, 0x00001d00, 0x00002900, 0x0000c500, 0x00008900, + 0x00006f00, 0x0000b700, 0x00006200, 0x00000e00, 0x0000aa00, 0x00001800, 0x0000be00, 0x00001b00, + 0x0000fc00, 0x00005600, 0x00003e00, 0x00004b00, 0x0000c600, 0x0000d200, 0x00007900, 0x00002000, + 0x00009a00, 0x0000db00, 0x0000c000, 0x0000fe00, 0x00007800, 0x0000cd00, 0x00005a00, 0x0000f400, + 0x00001f00, 0x0000dd00, 0x0000a800, 0x00003300, 0x00008800, 0x00000700, 0x0000c700, 0x00003100, + 0x0000b100, 0x00001200, 0x00001000, 0x00005900, 0x00002700, 0x00008000, 0x0000ec00, 0x00005f00, + 0x00006000, 0x00005100, 0x00007f00, 0x0000a900, 0x00001900, 0x0000b500, 0x00004a00, 0x00000d00, + 0x00002d00, 0x0000e500, 0x00007a00, 0x00009f00, 0x00009300, 0x0000c900, 0x00009c00, 0x0000ef00, + 0x0000a000, 0x0000e000, 0x00003b00, 0x00004d00, 0x0000ae00, 0x00002a00, 0x0000f500, 0x0000b000, + 0x0000c800, 0x0000eb00, 0x0000bb00, 0x00003c00, 0x00008300, 0x00005300, 0x00009900, 0x00006100, + 0x00001700, 0x00002b00, 0x00000400, 0x00007e00, 0x0000ba00, 0x00007700, 0x0000d600, 0x00002600, + 0x0000e100, 0x00006900, 0x00001400, 0x00006300, 0x00005500, 0x00002100, 0x00000c00, 0x00007d00 +}; + +__declspec(align(16)) const unsigned long Td4_2[256] = { + 0x00520000, 0x00090000, 0x006a0000, 0x00d50000, 0x00300000, 0x00360000, 0x00a50000, 0x00380000, + 0x00bf0000, 0x00400000, 0x00a30000, 0x009e0000, 0x00810000, 0x00f30000, 0x00d70000, 0x00fb0000, + 0x007c0000, 0x00e30000, 0x00390000, 0x00820000, 0x009b0000, 0x002f0000, 0x00ff0000, 0x00870000, + 0x00340000, 0x008e0000, 0x00430000, 0x00440000, 0x00c40000, 0x00de0000, 0x00e90000, 0x00cb0000, + 0x00540000, 0x007b0000, 0x00940000, 0x00320000, 0x00a60000, 0x00c20000, 0x00230000, 0x003d0000, + 0x00ee0000, 0x004c0000, 0x00950000, 0x000b0000, 0x00420000, 0x00fa0000, 0x00c30000, 0x004e0000, + 0x00080000, 0x002e0000, 0x00a10000, 0x00660000, 0x00280000, 0x00d90000, 0x00240000, 0x00b20000, + 0x00760000, 0x005b0000, 0x00a20000, 0x00490000, 0x006d0000, 0x008b0000, 0x00d10000, 0x00250000, + 0x00720000, 0x00f80000, 0x00f60000, 0x00640000, 0x00860000, 0x00680000, 0x00980000, 0x00160000, + 0x00d40000, 0x00a40000, 0x005c0000, 0x00cc0000, 0x005d0000, 0x00650000, 0x00b60000, 0x00920000, + 0x006c0000, 0x00700000, 0x00480000, 0x00500000, 0x00fd0000, 0x00ed0000, 0x00b90000, 0x00da0000, + 0x005e0000, 0x00150000, 0x00460000, 0x00570000, 0x00a70000, 0x008d0000, 0x009d0000, 0x00840000, + 0x00900000, 0x00d80000, 0x00ab0000, 0x00000000, 0x008c0000, 0x00bc0000, 0x00d30000, 0x000a0000, + 0x00f70000, 0x00e40000, 0x00580000, 0x00050000, 0x00b80000, 0x00b30000, 0x00450000, 0x00060000, + 0x00d00000, 0x002c0000, 0x001e0000, 0x008f0000, 0x00ca0000, 0x003f0000, 0x000f0000, 0x00020000, + 0x00c10000, 0x00af0000, 0x00bd0000, 0x00030000, 0x00010000, 0x00130000, 0x008a0000, 0x006b0000, + 0x003a0000, 0x00910000, 0x00110000, 0x00410000, 0x004f0000, 0x00670000, 0x00dc0000, 0x00ea0000, + 0x00970000, 0x00f20000, 0x00cf0000, 0x00ce0000, 0x00f00000, 0x00b40000, 0x00e60000, 0x00730000, + 0x00960000, 0x00ac0000, 0x00740000, 0x00220000, 0x00e70000, 0x00ad0000, 0x00350000, 0x00850000, + 0x00e20000, 0x00f90000, 0x00370000, 0x00e80000, 0x001c0000, 0x00750000, 0x00df0000, 0x006e0000, + 0x00470000, 0x00f10000, 0x001a0000, 0x00710000, 0x001d0000, 0x00290000, 0x00c50000, 0x00890000, + 0x006f0000, 0x00b70000, 0x00620000, 0x000e0000, 0x00aa0000, 0x00180000, 0x00be0000, 0x001b0000, + 0x00fc0000, 0x00560000, 0x003e0000, 0x004b0000, 0x00c60000, 0x00d20000, 0x00790000, 0x00200000, + 0x009a0000, 0x00db0000, 0x00c00000, 0x00fe0000, 0x00780000, 0x00cd0000, 0x005a0000, 0x00f40000, + 0x001f0000, 0x00dd0000, 0x00a80000, 0x00330000, 0x00880000, 0x00070000, 0x00c70000, 0x00310000, + 0x00b10000, 0x00120000, 0x00100000, 0x00590000, 0x00270000, 0x00800000, 0x00ec0000, 0x005f0000, + 0x00600000, 0x00510000, 0x007f0000, 0x00a90000, 0x00190000, 0x00b50000, 0x004a0000, 0x000d0000, + 0x002d0000, 0x00e50000, 0x007a0000, 0x009f0000, 0x00930000, 0x00c90000, 0x009c0000, 0x00ef0000, + 0x00a00000, 0x00e00000, 0x003b0000, 0x004d0000, 0x00ae0000, 0x002a0000, 0x00f50000, 0x00b00000, + 0x00c80000, 0x00eb0000, 0x00bb0000, 0x003c0000, 0x00830000, 0x00530000, 0x00990000, 0x00610000, + 0x00170000, 0x002b0000, 0x00040000, 0x007e0000, 0x00ba0000, 0x00770000, 0x00d60000, 0x00260000, + 0x00e10000, 0x00690000, 0x00140000, 0x00630000, 0x00550000, 0x00210000, 0x000c0000, 0x007d0000 +}; + +__declspec(align(16)) const unsigned long Td4_3[256] = { + 0x52000000, 0x09000000, 0x6a000000, 0xd5000000, 0x30000000, 0x36000000, 0xa5000000, 0x38000000, + 0xbf000000, 0x40000000, 0xa3000000, 0x9e000000, 0x81000000, 0xf3000000, 0xd7000000, 0xfb000000, + 0x7c000000, 0xe3000000, 0x39000000, 0x82000000, 0x9b000000, 0x2f000000, 0xff000000, 0x87000000, + 0x34000000, 0x8e000000, 0x43000000, 0x44000000, 0xc4000000, 0xde000000, 0xe9000000, 0xcb000000, + 0x54000000, 0x7b000000, 0x94000000, 0x32000000, 0xa6000000, 0xc2000000, 0x23000000, 0x3d000000, + 0xee000000, 0x4c000000, 0x95000000, 0x0b000000, 0x42000000, 0xfa000000, 0xc3000000, 0x4e000000, + 0x08000000, 0x2e000000, 0xa1000000, 0x66000000, 0x28000000, 0xd9000000, 0x24000000, 0xb2000000, + 0x76000000, 0x5b000000, 0xa2000000, 0x49000000, 0x6d000000, 0x8b000000, 0xd1000000, 0x25000000, + 0x72000000, 0xf8000000, 0xf6000000, 0x64000000, 0x86000000, 0x68000000, 0x98000000, 0x16000000, + 0xd4000000, 0xa4000000, 0x5c000000, 0xcc000000, 0x5d000000, 0x65000000, 0xb6000000, 0x92000000, + 0x6c000000, 0x70000000, 0x48000000, 0x50000000, 0xfd000000, 0xed000000, 0xb9000000, 0xda000000, + 0x5e000000, 0x15000000, 0x46000000, 0x57000000, 0xa7000000, 0x8d000000, 0x9d000000, 0x84000000, + 0x90000000, 0xd8000000, 0xab000000, 0x00000000, 0x8c000000, 0xbc000000, 0xd3000000, 0x0a000000, + 0xf7000000, 0xe4000000, 0x58000000, 0x05000000, 0xb8000000, 0xb3000000, 0x45000000, 0x06000000, + 0xd0000000, 0x2c000000, 0x1e000000, 0x8f000000, 0xca000000, 0x3f000000, 0x0f000000, 0x02000000, + 0xc1000000, 0xaf000000, 0xbd000000, 0x03000000, 0x01000000, 0x13000000, 0x8a000000, 0x6b000000, + 0x3a000000, 0x91000000, 0x11000000, 0x41000000, 0x4f000000, 0x67000000, 0xdc000000, 0xea000000, + 0x97000000, 0xf2000000, 0xcf000000, 0xce000000, 0xf0000000, 0xb4000000, 0xe6000000, 0x73000000, + 0x96000000, 0xac000000, 0x74000000, 0x22000000, 0xe7000000, 0xad000000, 0x35000000, 0x85000000, + 0xe2000000, 0xf9000000, 0x37000000, 0xe8000000, 0x1c000000, 0x75000000, 0xdf000000, 0x6e000000, + 0x47000000, 0xf1000000, 0x1a000000, 0x71000000, 0x1d000000, 0x29000000, 0xc5000000, 0x89000000, + 0x6f000000, 0xb7000000, 0x62000000, 0x0e000000, 0xaa000000, 0x18000000, 0xbe000000, 0x1b000000, + 0xfc000000, 0x56000000, 0x3e000000, 0x4b000000, 0xc6000000, 0xd2000000, 0x79000000, 0x20000000, + 0x9a000000, 0xdb000000, 0xc0000000, 0xfe000000, 0x78000000, 0xcd000000, 0x5a000000, 0xf4000000, + 0x1f000000, 0xdd000000, 0xa8000000, 0x33000000, 0x88000000, 0x07000000, 0xc7000000, 0x31000000, + 0xb1000000, 0x12000000, 0x10000000, 0x59000000, 0x27000000, 0x80000000, 0xec000000, 0x5f000000, + 0x60000000, 0x51000000, 0x7f000000, 0xa9000000, 0x19000000, 0xb5000000, 0x4a000000, 0x0d000000, + 0x2d000000, 0xe5000000, 0x7a000000, 0x9f000000, 0x93000000, 0xc9000000, 0x9c000000, 0xef000000, + 0xa0000000, 0xe0000000, 0x3b000000, 0x4d000000, 0xae000000, 0x2a000000, 0xf5000000, 0xb0000000, + 0xc8000000, 0xeb000000, 0xbb000000, 0x3c000000, 0x83000000, 0x53000000, 0x99000000, 0x61000000, + 0x17000000, 0x2b000000, 0x04000000, 0x7e000000, 0xba000000, 0x77000000, 0xd6000000, 0x26000000, + 0xe1000000, 0x69000000, 0x14000000, 0x63000000, 0x55000000, 0x21000000, 0x0c000000, 0x7d000000, +}; + +static unsigned long key_mix(unsigned long t) +{ + return Te4_0[(unsigned char)(t >> 8)] ^ Te4_1[(unsigned char)(t >> 16)] ^ + Te4_2[(unsigned char)(t >> 24)] ^ Te4_3[(unsigned char)(t >> 0)]; +} + +static unsigned long key_mix2(unsigned long t) +{ + return Td0[Te4_0[(unsigned char)(t >> 0)]] ^ Td1[Te4_0[(unsigned char)(t >> 8)]] ^ + Td2[Te4_0[(unsigned char)(t >> 16)]] ^ Td3[Te4_0[(unsigned char)(t >> 24)]]; +} + +void _stdcall aes256_set_key(const unsigned char *key, aes256_key *skey) +{ + unsigned long *ek, *dk; + int j, i; + unsigned long t, rcon; + + ek = skey->enc_key; + i = 7; rcon = 1; + + memcpy(ek, key, AES_KEY_SIZE); + do + { + ek[ 8] = ek[0] ^ key_mix(ek[7]) ^ rcon; + ek[ 9] = ek[1] ^ ek[ 8]; + ek[10] = ek[2] ^ ek[ 9]; + ek[11] = ek[3] ^ ek[10]; + + if (--i == 0) { + break; + }; + + ek[12] = ek[4] ^ key_mix(_rotr(ek[11], 24)); + ek[13] = ek[5] ^ ek[12]; + ek[14] = ek[6] ^ ek[13]; + ek[15] = ek[7] ^ ek[14]; + ek += 8; rcon <<= 1; + } while (1); + + ek = skey->enc_key; + dk = skey->dec_key; + + for (i = 0, j = 4*ROUNDS; i <= j; i += 4, j -= 4) { + t = ek[i ]; dk[i ] = ek[j ]; dk[j ] = t; + t = ek[i + 1]; dk[i + 1] = ek[j + 1]; dk[j + 1] = t; + t = ek[i + 2]; dk[i + 2] = ek[j + 2]; dk[j + 2] = t; + t = ek[i + 3]; dk[i + 3] = ek[j + 3]; dk[j + 3] = t; + } + i = (ROUNDS-1) * 4; + + do { + dk[4] = key_mix2(dk[4]); dk++; + } while (--i); +} diff --git a/ImBoxEnclave/crypto_fast/aes_key.h b/ImBoxEnclave/crypto_fast/aes_key.h new file mode 100644 index 0000000..bb2fdbf --- /dev/null +++ b/ImBoxEnclave/crypto_fast/aes_key.h @@ -0,0 +1,19 @@ +#ifndef _AES_KEY_H_ +#define _AES_KEY_H_ + +#define ROUNDS 14 +#define AES_KEY_SIZE 32 +#define AES_BLOCK_SIZE 16 + +typedef __declspec(align(16)) struct _aes256_key { + __declspec(align(16)) unsigned long enc_key[4 *(ROUNDS + 1)]; + __declspec(align(16)) unsigned long dec_key[4 *(ROUNDS + 1)]; +#ifdef _M_IX86 + __declspec(align(16)) unsigned char ek_code[3072]; + __declspec(align(16)) unsigned char dk_code[3072]; +#endif +} aes256_key; + +void _stdcall aes256_set_key(const unsigned char *key, aes256_key *skey); + +#endif \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/aes_padlock.h b/ImBoxEnclave/crypto_fast/aes_padlock.h new file mode 100644 index 0000000..59a64a4 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/aes_padlock.h @@ -0,0 +1,21 @@ +#ifndef _AES_PADLOCK_H_ +#define _AES_PADLOCK_H_ + +#include "aes_key.h" + +#ifdef _M_IX86 +static void __forceinline aes256_padlock_rekey() { + __asm { + pushfd + popfd + } +} +#else +#define aes256_padlock_rekey() __writeeflags(__readeflags()) +#endif + +int _stdcall aes256_padlock_available(); +void _stdcall aes256_padlock_encrypt(const unsigned char *in, unsigned char *out, int n_blocks, aes256_key *key); +void _stdcall aes256_padlock_decrypt(const unsigned char *in, unsigned char *out, int n_blocks, aes256_key *key); + +#endif \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/amd64/aes_amd64.asm b/ImBoxEnclave/crypto_fast/amd64/aes_amd64.asm new file mode 100644 index 0000000..4251a81 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/amd64/aes_amd64.asm @@ -0,0 +1,886 @@ + +; --------------------------------------------------------------------------- +; Copyright (c) 1998-2007, Brian Gladman, Worcester, UK. All rights reserved. +; +; LICENSE TERMS +; +; The free distribution and use of this software is allowed (with or without +; changes) provided that: +; +; 1. source code distributions include the above copyright notice, this +; list of conditions and the following disclaimer; +; +; 2. binary distributions include the above copyright notice, this list +; of conditions and the following disclaimer in their documentation; +; +; 3. the name of the copyright holder is not used to endorse products +; built using this software without specific written permission. +; +; DISCLAIMER +; +; This software is provided 'as is' with no explicit or implied warranties +; in respect of its properties, including, but not limited to, correctness +; and/or fitness for purpose. +; --------------------------------------------------------------------------- +; Issue 20/12/2007 +; +; I am grateful to Dag Arne Osvik for many discussions of the techniques that +; can be used to optimise AES assembler code on AMD64/EM64T architectures. +; Some of the techniques used in this implementation are the result of +; suggestions made by him for which I am most grateful. + +; An AES implementation for AMD64 processors using the YASM assembler. This +; implementation provides only encryption, decryption and hence requires key +; scheduling support in C. It uses 8k bytes of tables but its encryption and +; decryption performance is very close to that obtained using large tables. +; It can use either Windows or Gnu/Linux calling conventions, which are as +; follows: +; windows gnu/linux +; +; in_blk rcx rdi +; out_blk rdx rsi +; context (cx) r8 rdx +; +; preserved rsi - + rbx, rbp, rsp, r12, r13, r14 & r15 +; registers rdi - on both +; +; destroyed - rsi + rax, rcx, rdx, r8, r9, r10 & r11 +; registers - rdi on both +; +; The default convention is that for windows, the gnu/linux convention being +; used if __GNUC__ is defined. +; +; Define _SEH_ to include support for Win64 structured exception handling +; (this requires YASM version 0.6 or later). +; +; This code provides the standard AES block size (128 bits, 16 bytes) and the +; three standard AES key sizes (128, 192 and 256 bits). It has the same call +; interface as my C implementation. It uses the Microsoft C AMD64 calling +; conventions in which the three parameters are placed in rcx, rdx and r8 +; respectively. The rbx, rsi, rdi, rbp and r12..r15 registers are preserved. +; +; AES_RETURN aes_encrypt(const unsigned char in_blk[], +; unsigned char out_blk[], const aes_encrypt_ctx cx[1]); +; +; AES_RETURN aes_decrypt(const unsigned char in_blk[], +; unsigned char out_blk[], const aes_decrypt_ctx cx[1]); +; +; AES_RETURN aes_encrypt_key(const unsigned char key[], +; const aes_encrypt_ctx cx[1]); +; +; AES_RETURN aes_decrypt_key(const unsigned char key[], +; const aes_decrypt_ctx cx[1]); +; +; AES_RETURN aes_encrypt_key(const unsigned char key[], +; unsigned int len, const aes_decrypt_ctx cx[1]); +; +; AES_RETURN aes_decrypt_key(const unsigned char key[], +; unsigned int len, const aes_decrypt_ctx cx[1]); +; +; where is 128, 102 or 256. In the last two calls the length can be in +; either bits or bytes. +; +; Comment in/out the following lines to obtain the desired subroutines. These +; selections MUST match those in the C header file aes.h + +;%define AES_128 ; define if AES with 128 bit keys is needed +;%define AES_192 ; define if AES with 192 bit keys is needed +%define AES_256 ; define if AES with 256 bit keys is needed +;%define AES_VAR ; define if a variable key size is needed +%define ENCRYPTION ; define if encryption is needed +%define DECRYPTION ; define if decryption is needed +%define AES_REV_DKS ; define if key decryption schedule is reversed + +%define LAST_ROUND_TABLES ; define for the faster version using extra tables + +; The encryption key schedule has the following in memory layout where N is the +; number of rounds (10, 12 or 14): +; +; lo: | input key (round 0) | ; each round is four 32-bit words +; | encryption round 1 | +; | encryption round 2 | +; .... +; | encryption round N-1 | +; hi: | encryption round N | +; +; The decryption key schedule is normally set up so that it has the same +; layout as above by actually reversing the order of the encryption key +; schedule in memory (this happens when AES_REV_DKS is set): +; +; lo: | decryption round 0 | = | encryption round N | +; | decryption round 1 | = INV_MIX_COL[ | encryption round N-1 | ] +; | decryption round 2 | = INV_MIX_COL[ | encryption round N-2 | ] +; .... .... +; | decryption round N-1 | = INV_MIX_COL[ | encryption round 1 | ] +; hi: | decryption round N | = | input key (round 0) | +; +; with rounds except the first and last modified using inv_mix_column() +; But if AES_REV_DKS is NOT set the order of keys is left as it is for +; encryption so that it has to be accessed in reverse when used for +; decryption (although the inverse mix column modifications are done) +; +; lo: | decryption round 0 | = | input key (round 0) | +; | decryption round 1 | = INV_MIX_COL[ | encryption round 1 | ] +; | decryption round 2 | = INV_MIX_COL[ | encryption round 2 | ] +; .... .... +; | decryption round N-1 | = INV_MIX_COL[ | encryption round N-1 | ] +; hi: | decryption round N | = | encryption round N | +; +; This layout is faster when the assembler key scheduling provided here +; is used. +; +; The DLL interface must use the _stdcall convention in which the number +; of bytes of parameter space is added after an @ to the sutine's name. +; We must also remove our parameters from the stack before return (see +; the do_exit macro). Define DLL_EXPORT for the Dynamic Link Library version. + +;%define DLL_EXPORT + +; End of user defines + +%ifdef AES_VAR +%ifndef AES_128 +%define AES_128 +%endif +%ifndef AES_192 +%define AES_192 +%endif +%ifndef AES_256 +%define AES_256 +%endif +%endif + +%ifdef AES_VAR +%define KS_LENGTH 60 +%elifdef AES_256 +%define KS_LENGTH 60 +%elifdef AES_192 +%define KS_LENGTH 52 +%else +%define KS_LENGTH 44 +%endif + +%define r0 rax +%define r1 rdx +%define r2 rcx +%define r3 rbx +%define r4 rsi +%define r5 rdi +%define r6 rbp +%define r7 rsp + +%define raxd eax +%define rdxd edx +%define rcxd ecx +%define rbxd ebx +%define rsid esi +%define rdid edi +%define rbpd ebp +%define rspd esp + +%define raxb al +%define rdxb dl +%define rcxb cl +%define rbxb bl +%define rsib sil +%define rdib dil +%define rbpb bpl +%define rspb spl + +%define r0h ah +%define r1h dh +%define r2h ch +%define r3h bh + +%define r0d eax +%define r1d edx +%define r2d ecx +%define r3d ebx + +; finite field multiplies by {02}, {04} and {08} + +%define f2(x) ((x<<1)^(((x>>7)&1)*0x11b)) +%define f4(x) ((x<<2)^(((x>>6)&1)*0x11b)^(((x>>6)&2)*0x11b)) +%define f8(x) ((x<<3)^(((x>>5)&1)*0x11b)^(((x>>5)&2)*0x11b)^(((x>>5)&4)*0x11b)) + +; finite field multiplies required in table generation + +%define f3(x) (f2(x) ^ x) +%define f9(x) (f8(x) ^ x) +%define fb(x) (f8(x) ^ f2(x) ^ x) +%define fd(x) (f8(x) ^ f4(x) ^ x) +%define fe(x) (f8(x) ^ f4(x) ^ f2(x)) + +; macro for expanding S-box data + +%macro enc_vals 1 + db %1(0x63),%1(0x7c),%1(0x77),%1(0x7b),%1(0xf2),%1(0x6b),%1(0x6f),%1(0xc5) + db %1(0x30),%1(0x01),%1(0x67),%1(0x2b),%1(0xfe),%1(0xd7),%1(0xab),%1(0x76) + db %1(0xca),%1(0x82),%1(0xc9),%1(0x7d),%1(0xfa),%1(0x59),%1(0x47),%1(0xf0) + db %1(0xad),%1(0xd4),%1(0xa2),%1(0xaf),%1(0x9c),%1(0xa4),%1(0x72),%1(0xc0) + db %1(0xb7),%1(0xfd),%1(0x93),%1(0x26),%1(0x36),%1(0x3f),%1(0xf7),%1(0xcc) + db %1(0x34),%1(0xa5),%1(0xe5),%1(0xf1),%1(0x71),%1(0xd8),%1(0x31),%1(0x15) + db %1(0x04),%1(0xc7),%1(0x23),%1(0xc3),%1(0x18),%1(0x96),%1(0x05),%1(0x9a) + db %1(0x07),%1(0x12),%1(0x80),%1(0xe2),%1(0xeb),%1(0x27),%1(0xb2),%1(0x75) + db %1(0x09),%1(0x83),%1(0x2c),%1(0x1a),%1(0x1b),%1(0x6e),%1(0x5a),%1(0xa0) + db %1(0x52),%1(0x3b),%1(0xd6),%1(0xb3),%1(0x29),%1(0xe3),%1(0x2f),%1(0x84) + db %1(0x53),%1(0xd1),%1(0x00),%1(0xed),%1(0x20),%1(0xfc),%1(0xb1),%1(0x5b) + db %1(0x6a),%1(0xcb),%1(0xbe),%1(0x39),%1(0x4a),%1(0x4c),%1(0x58),%1(0xcf) + db %1(0xd0),%1(0xef),%1(0xaa),%1(0xfb),%1(0x43),%1(0x4d),%1(0x33),%1(0x85) + db %1(0x45),%1(0xf9),%1(0x02),%1(0x7f),%1(0x50),%1(0x3c),%1(0x9f),%1(0xa8) + db %1(0x51),%1(0xa3),%1(0x40),%1(0x8f),%1(0x92),%1(0x9d),%1(0x38),%1(0xf5) + db %1(0xbc),%1(0xb6),%1(0xda),%1(0x21),%1(0x10),%1(0xff),%1(0xf3),%1(0xd2) + db %1(0xcd),%1(0x0c),%1(0x13),%1(0xec),%1(0x5f),%1(0x97),%1(0x44),%1(0x17) + db %1(0xc4),%1(0xa7),%1(0x7e),%1(0x3d),%1(0x64),%1(0x5d),%1(0x19),%1(0x73) + db %1(0x60),%1(0x81),%1(0x4f),%1(0xdc),%1(0x22),%1(0x2a),%1(0x90),%1(0x88) + db %1(0x46),%1(0xee),%1(0xb8),%1(0x14),%1(0xde),%1(0x5e),%1(0x0b),%1(0xdb) + db %1(0xe0),%1(0x32),%1(0x3a),%1(0x0a),%1(0x49),%1(0x06),%1(0x24),%1(0x5c) + db %1(0xc2),%1(0xd3),%1(0xac),%1(0x62),%1(0x91),%1(0x95),%1(0xe4),%1(0x79) + db %1(0xe7),%1(0xc8),%1(0x37),%1(0x6d),%1(0x8d),%1(0xd5),%1(0x4e),%1(0xa9) + db %1(0x6c),%1(0x56),%1(0xf4),%1(0xea),%1(0x65),%1(0x7a),%1(0xae),%1(0x08) + db %1(0xba),%1(0x78),%1(0x25),%1(0x2e),%1(0x1c),%1(0xa6),%1(0xb4),%1(0xc6) + db %1(0xe8),%1(0xdd),%1(0x74),%1(0x1f),%1(0x4b),%1(0xbd),%1(0x8b),%1(0x8a) + db %1(0x70),%1(0x3e),%1(0xb5),%1(0x66),%1(0x48),%1(0x03),%1(0xf6),%1(0x0e) + db %1(0x61),%1(0x35),%1(0x57),%1(0xb9),%1(0x86),%1(0xc1),%1(0x1d),%1(0x9e) + db %1(0xe1),%1(0xf8),%1(0x98),%1(0x11),%1(0x69),%1(0xd9),%1(0x8e),%1(0x94) + db %1(0x9b),%1(0x1e),%1(0x87),%1(0xe9),%1(0xce),%1(0x55),%1(0x28),%1(0xdf) + db %1(0x8c),%1(0xa1),%1(0x89),%1(0x0d),%1(0xbf),%1(0xe6),%1(0x42),%1(0x68) + db %1(0x41),%1(0x99),%1(0x2d),%1(0x0f),%1(0xb0),%1(0x54),%1(0xbb),%1(0x16) +%endmacro + +%macro dec_vals 1 + db %1(0x52),%1(0x09),%1(0x6a),%1(0xd5),%1(0x30),%1(0x36),%1(0xa5),%1(0x38) + db %1(0xbf),%1(0x40),%1(0xa3),%1(0x9e),%1(0x81),%1(0xf3),%1(0xd7),%1(0xfb) + db %1(0x7c),%1(0xe3),%1(0x39),%1(0x82),%1(0x9b),%1(0x2f),%1(0xff),%1(0x87) + db %1(0x34),%1(0x8e),%1(0x43),%1(0x44),%1(0xc4),%1(0xde),%1(0xe9),%1(0xcb) + db %1(0x54),%1(0x7b),%1(0x94),%1(0x32),%1(0xa6),%1(0xc2),%1(0x23),%1(0x3d) + db %1(0xee),%1(0x4c),%1(0x95),%1(0x0b),%1(0x42),%1(0xfa),%1(0xc3),%1(0x4e) + db %1(0x08),%1(0x2e),%1(0xa1),%1(0x66),%1(0x28),%1(0xd9),%1(0x24),%1(0xb2) + db %1(0x76),%1(0x5b),%1(0xa2),%1(0x49),%1(0x6d),%1(0x8b),%1(0xd1),%1(0x25) + db %1(0x72),%1(0xf8),%1(0xf6),%1(0x64),%1(0x86),%1(0x68),%1(0x98),%1(0x16) + db %1(0xd4),%1(0xa4),%1(0x5c),%1(0xcc),%1(0x5d),%1(0x65),%1(0xb6),%1(0x92) + db %1(0x6c),%1(0x70),%1(0x48),%1(0x50),%1(0xfd),%1(0xed),%1(0xb9),%1(0xda) + db %1(0x5e),%1(0x15),%1(0x46),%1(0x57),%1(0xa7),%1(0x8d),%1(0x9d),%1(0x84) + db %1(0x90),%1(0xd8),%1(0xab),%1(0x00),%1(0x8c),%1(0xbc),%1(0xd3),%1(0x0a) + db %1(0xf7),%1(0xe4),%1(0x58),%1(0x05),%1(0xb8),%1(0xb3),%1(0x45),%1(0x06) + db %1(0xd0),%1(0x2c),%1(0x1e),%1(0x8f),%1(0xca),%1(0x3f),%1(0x0f),%1(0x02) + db %1(0xc1),%1(0xaf),%1(0xbd),%1(0x03),%1(0x01),%1(0x13),%1(0x8a),%1(0x6b) + db %1(0x3a),%1(0x91),%1(0x11),%1(0x41),%1(0x4f),%1(0x67),%1(0xdc),%1(0xea) + db %1(0x97),%1(0xf2),%1(0xcf),%1(0xce),%1(0xf0),%1(0xb4),%1(0xe6),%1(0x73) + db %1(0x96),%1(0xac),%1(0x74),%1(0x22),%1(0xe7),%1(0xad),%1(0x35),%1(0x85) + db %1(0xe2),%1(0xf9),%1(0x37),%1(0xe8),%1(0x1c),%1(0x75),%1(0xdf),%1(0x6e) + db %1(0x47),%1(0xf1),%1(0x1a),%1(0x71),%1(0x1d),%1(0x29),%1(0xc5),%1(0x89) + db %1(0x6f),%1(0xb7),%1(0x62),%1(0x0e),%1(0xaa),%1(0x18),%1(0xbe),%1(0x1b) + db %1(0xfc),%1(0x56),%1(0x3e),%1(0x4b),%1(0xc6),%1(0xd2),%1(0x79),%1(0x20) + db %1(0x9a),%1(0xdb),%1(0xc0),%1(0xfe),%1(0x78),%1(0xcd),%1(0x5a),%1(0xf4) + db %1(0x1f),%1(0xdd),%1(0xa8),%1(0x33),%1(0x88),%1(0x07),%1(0xc7),%1(0x31) + db %1(0xb1),%1(0x12),%1(0x10),%1(0x59),%1(0x27),%1(0x80),%1(0xec),%1(0x5f) + db %1(0x60),%1(0x51),%1(0x7f),%1(0xa9),%1(0x19),%1(0xb5),%1(0x4a),%1(0x0d) + db %1(0x2d),%1(0xe5),%1(0x7a),%1(0x9f),%1(0x93),%1(0xc9),%1(0x9c),%1(0xef) + db %1(0xa0),%1(0xe0),%1(0x3b),%1(0x4d),%1(0xae),%1(0x2a),%1(0xf5),%1(0xb0) + db %1(0xc8),%1(0xeb),%1(0xbb),%1(0x3c),%1(0x83),%1(0x53),%1(0x99),%1(0x61) + db %1(0x17),%1(0x2b),%1(0x04),%1(0x7e),%1(0xba),%1(0x77),%1(0xd6),%1(0x26) + db %1(0xe1),%1(0x69),%1(0x14),%1(0x63),%1(0x55),%1(0x21),%1(0x0c),%1(0x7d) +%endmacro + +%define u8(x) f2(x), x, x, f3(x), f2(x), x, x, f3(x) +%define v8(x) fe(x), f9(x), fd(x), fb(x), fe(x), f9(x), fd(x), x +%define w8(x) x, 0, 0, 0, x, 0, 0, 0 + +%define tptr rbp ; table pointer +%define kptr r8 ; key schedule pointer +%define fofs 128 ; adjust offset in key schedule to keep |disp| < 128 +%define fk_ref(x,y) [kptr-16*x+fofs+4*y] +%ifdef AES_REV_DKS +%define rofs 128 +%define ik_ref(x,y) [kptr-16*x+rofs+4*y] +%else +%define rofs -128 +%define ik_ref(x,y) [kptr+16*x+rofs+4*y] +%endif + +%define tab_0(x) [tptr+8*x] +%define tab_1(x) [tptr+8*x+3] +%define tab_2(x) [tptr+8*x+2] +%define tab_3(x) [tptr+8*x+1] +%define tab_f(x) byte [tptr+8*x+1] +%define tab_i(x) byte [tptr+8*x+7] +%define t_ref(x,r) tab_ %+ x(r) + +%macro ff_rnd 5 ; normal forward round + mov %1d, fk_ref(%5,0) + mov %2d, fk_ref(%5,1) + mov %3d, fk_ref(%5,2) + mov %4d, fk_ref(%5,3) + + movzx esi, al + movzx edi, ah + shr eax, 16 + xor %1d, t_ref(0,rsi) + xor %4d, t_ref(1,rdi) + movzx esi, al + movzx edi, ah + xor %3d, t_ref(2,rsi) + xor %2d, t_ref(3,rdi) + + movzx esi, bl + movzx edi, bh + shr ebx, 16 + xor %2d, t_ref(0,rsi) + xor %1d, t_ref(1,rdi) + movzx esi, bl + movzx edi, bh + xor %4d, t_ref(2,rsi) + xor %3d, t_ref(3,rdi) + + movzx esi, cl + movzx edi, ch + shr ecx, 16 + xor %3d, t_ref(0,rsi) + xor %2d, t_ref(1,rdi) + movzx esi, cl + movzx edi, ch + xor %1d, t_ref(2,rsi) + xor %4d, t_ref(3,rdi) + + movzx esi, dl + movzx edi, dh + shr edx, 16 + xor %4d, t_ref(0,rsi) + xor %3d, t_ref(1,rdi) + movzx esi, dl + movzx edi, dh + xor %2d, t_ref(2,rsi) + xor %1d, t_ref(3,rdi) + + mov eax,%1d + mov ebx,%2d + mov ecx,%3d + mov edx,%4d +%endmacro + +%ifdef LAST_ROUND_TABLES + +%macro fl_rnd 5 ; last forward round + add tptr, 2048 + mov %1d, fk_ref(%5,0) + mov %2d, fk_ref(%5,1) + mov %3d, fk_ref(%5,2) + mov %4d, fk_ref(%5,3) + + movzx esi, al + movzx edi, ah + shr eax, 16 + xor %1d, t_ref(0,rsi) + xor %4d, t_ref(1,rdi) + movzx esi, al + movzx edi, ah + xor %3d, t_ref(2,rsi) + xor %2d, t_ref(3,rdi) + + movzx esi, bl + movzx edi, bh + shr ebx, 16 + xor %2d, t_ref(0,rsi) + xor %1d, t_ref(1,rdi) + movzx esi, bl + movzx edi, bh + xor %4d, t_ref(2,rsi) + xor %3d, t_ref(3,rdi) + + movzx esi, cl + movzx edi, ch + shr ecx, 16 + xor %3d, t_ref(0,rsi) + xor %2d, t_ref(1,rdi) + movzx esi, cl + movzx edi, ch + xor %1d, t_ref(2,rsi) + xor %4d, t_ref(3,rdi) + + movzx esi, dl + movzx edi, dh + shr edx, 16 + xor %4d, t_ref(0,rsi) + xor %3d, t_ref(1,rdi) + movzx esi, dl + movzx edi, dh + xor %2d, t_ref(2,rsi) + xor %1d, t_ref(3,rdi) +%endmacro + +%else + +%macro fl_rnd 5 ; last forward round + mov %1d, fk_ref(%5,0) + mov %2d, fk_ref(%5,1) + mov %3d, fk_ref(%5,2) + mov %4d, fk_ref(%5,3) + + movzx esi, al + movzx edi, ah + shr eax, 16 + movzx esi, t_ref(f,rsi) + movzx edi, t_ref(f,rdi) + xor %1d, esi + rol edi, 8 + xor %4d, edi + movzx esi, al + movzx edi, ah + movzx esi, t_ref(f,rsi) + movzx edi, t_ref(f,rdi) + rol esi, 16 + rol edi, 24 + xor %3d, esi + xor %2d, edi + + movzx esi, bl + movzx edi, bh + shr ebx, 16 + movzx esi, t_ref(f,rsi) + movzx edi, t_ref(f,rdi) + xor %2d, esi + rol edi, 8 + xor %1d, edi + movzx esi, bl + movzx edi, bh + movzx esi, t_ref(f,rsi) + movzx edi, t_ref(f,rdi) + rol esi, 16 + rol edi, 24 + xor %4d, esi + xor %3d, edi + + movzx esi, cl + movzx edi, ch + movzx esi, t_ref(f,rsi) + movzx edi, t_ref(f,rdi) + shr ecx, 16 + xor %3d, esi + rol edi, 8 + xor %2d, edi + movzx esi, cl + movzx edi, ch + movzx esi, t_ref(f,rsi) + movzx edi, t_ref(f,rdi) + rol esi, 16 + rol edi, 24 + xor %1d, esi + xor %4d, edi + + movzx esi, dl + movzx edi, dh + movzx esi, t_ref(f,rsi) + movzx edi, t_ref(f,rdi) + shr edx, 16 + xor %4d, esi + rol edi, 8 + xor %3d, edi + movzx esi, dl + movzx edi, dh + movzx esi, t_ref(f,rsi) + movzx edi, t_ref(f,rdi) + rol esi, 16 + rol edi, 24 + xor %2d, esi + xor %1d, edi +%endmacro + +%endif + +%macro ii_rnd 5 ; normal inverse round + mov %1d, ik_ref(%5,0) + mov %2d, ik_ref(%5,1) + mov %3d, ik_ref(%5,2) + mov %4d, ik_ref(%5,3) + + movzx esi, al + movzx edi, ah + shr eax, 16 + xor %1d, t_ref(0,rsi) + xor %2d, t_ref(1,rdi) + movzx esi, al + movzx edi, ah + xor %3d, t_ref(2,rsi) + xor %4d, t_ref(3,rdi) + + movzx esi, bl + movzx edi, bh + shr ebx, 16 + xor %2d, t_ref(0,rsi) + xor %3d, t_ref(1,rdi) + movzx esi, bl + movzx edi, bh + xor %4d, t_ref(2,rsi) + xor %1d, t_ref(3,rdi) + + movzx esi, cl + movzx edi, ch + shr ecx, 16 + xor %3d, t_ref(0,rsi) + xor %4d, t_ref(1,rdi) + movzx esi, cl + movzx edi, ch + xor %1d, t_ref(2,rsi) + xor %2d, t_ref(3,rdi) + + movzx esi, dl + movzx edi, dh + shr edx, 16 + xor %4d, t_ref(0,rsi) + xor %1d, t_ref(1,rdi) + movzx esi, dl + movzx edi, dh + xor %2d, t_ref(2,rsi) + xor %3d, t_ref(3,rdi) + + mov eax,%1d + mov ebx,%2d + mov ecx,%3d + mov edx,%4d +%endmacro + +%ifdef LAST_ROUND_TABLES + +%macro il_rnd 5 ; last inverse round + add tptr, 2048 + mov %1d, ik_ref(%5,0) + mov %2d, ik_ref(%5,1) + mov %3d, ik_ref(%5,2) + mov %4d, ik_ref(%5,3) + + movzx esi, al + movzx edi, ah + shr eax, 16 + xor %1d, t_ref(0,rsi) + xor %2d, t_ref(1,rdi) + movzx esi, al + movzx edi, ah + xor %3d, t_ref(2,rsi) + xor %4d, t_ref(3,rdi) + + movzx esi, bl + movzx edi, bh + shr ebx, 16 + xor %2d, t_ref(0,rsi) + xor %3d, t_ref(1,rdi) + movzx esi, bl + movzx edi, bh + xor %4d, t_ref(2,rsi) + xor %1d, t_ref(3,rdi) + + movzx esi, cl + movzx edi, ch + shr ecx, 16 + xor %3d, t_ref(0,rsi) + xor %4d, t_ref(1,rdi) + movzx esi, cl + movzx edi, ch + xor %1d, t_ref(2,rsi) + xor %2d, t_ref(3,rdi) + + movzx esi, dl + movzx edi, dh + shr edx, 16 + xor %4d, t_ref(0,rsi) + xor %1d, t_ref(1,rdi) + movzx esi, dl + movzx edi, dh + xor %2d, t_ref(2,rsi) + xor %3d, t_ref(3,rdi) +%endmacro + +%else + +%macro il_rnd 5 ; last inverse round + mov %1d, ik_ref(%5,0) + mov %2d, ik_ref(%5,1) + mov %3d, ik_ref(%5,2) + mov %4d, ik_ref(%5,3) + + movzx esi, al + movzx edi, ah + movzx esi, t_ref(i,rsi) + movzx edi, t_ref(i,rdi) + shr eax, 16 + xor %1d, esi + rol edi, 8 + xor %2d, edi + movzx esi, al + movzx edi, ah + movzx esi, t_ref(i,rsi) + movzx edi, t_ref(i,rdi) + rol esi, 16 + rol edi, 24 + xor %3d, esi + xor %4d, edi + + movzx esi, bl + movzx edi, bh + movzx esi, t_ref(i,rsi) + movzx edi, t_ref(i,rdi) + shr ebx, 16 + xor %2d, esi + rol edi, 8 + xor %3d, edi + movzx esi, bl + movzx edi, bh + movzx esi, t_ref(i,rsi) + movzx edi, t_ref(i,rdi) + rol esi, 16 + rol edi, 24 + xor %4d, esi + xor %1d, edi + + movzx esi, cl + movzx edi, ch + movzx esi, t_ref(i,rsi) + movzx edi, t_ref(i,rdi) + shr ecx, 16 + xor %3d, esi + rol edi, 8 + xor %4d, edi + movzx esi, cl + movzx edi, ch + movzx esi, t_ref(i,rsi) + movzx edi, t_ref(i,rdi) + rol esi, 16 + rol edi, 24 + xor %1d, esi + xor %2d, edi + + movzx esi, dl + movzx edi, dh + movzx esi, t_ref(i,rsi) + movzx edi, t_ref(i,rdi) + shr edx, 16 + xor %4d, esi + rol edi, 8 + xor %1d, edi + movzx esi, dl + movzx edi, dh + movzx esi, t_ref(i,rsi) + movzx edi, t_ref(i,rdi) + rol esi, 16 + rol edi, 24 + xor %2d, esi + xor %3d, edi +%endmacro + +%endif + +%ifdef ENCRYPTION + + global aes256_asm_encrypt +%ifdef DLL_EXPORT + export aes256_asm_encrypt +%endif + + section .data align=64 + align 64 +enc_tab: + enc_vals u8 +%ifdef LAST_ROUND_TABLES + enc_vals w8 +%endif + + section .text align=16 + align 16 + +%ifdef _SEH_ +proc_frame aes256_asm_encrypt + alloc_stack 7*8 ; 7 to align stack to 16 bytes + save_reg rsi,4*8 + save_reg rdi,5*8 + save_reg rbx,1*8 + save_reg rbp,2*8 + save_reg r12,3*8 +end_prologue + mov rdi, rcx ; input pointer + mov [rsp+0*8], rdx ; output pointer +%else + aes256_asm_encrypt: + %ifdef __GNUC__ + sub rsp, 4*8 ; gnu/linux binary interface + mov [rsp+0*8], rsi ; output pointer + mov r8, rdx ; context + %else + sub rsp, 6*8 ; windows binary interface + mov [rsp+4*8], rsi + mov [rsp+5*8], rdi + mov rdi, rcx ; input pointer + mov [rsp+0*8], rdx ; output pointer + %endif + mov [rsp+1*8], rbx ; input pointer in rdi + mov [rsp+2*8], rbp ; output pointer in [rsp] + mov [rsp+3*8], r12 ; context in r8 +%endif + lea tptr,[enc_tab wrt rip] + sub kptr, fofs + + mov eax, [rdi+0*4] + mov ebx, [rdi+1*4] + mov ecx, [rdi+2*4] + mov edx, [rdi+3*4] + + xor eax, [kptr+fofs] + xor ebx, [kptr+fofs+4] + xor ecx, [kptr+fofs+8] + xor edx, [kptr+fofs+12] + + add kptr, 14*16 + + ff_rnd r9, r10, r11, r12, 13 + ff_rnd r9, r10, r11, r12, 12 + ff_rnd r9, r10, r11, r12, 11 + ff_rnd r9, r10, r11, r12, 10 + ff_rnd r9, r10, r11, r12, 9 + ff_rnd r9, r10, r11, r12, 8 + ff_rnd r9, r10, r11, r12, 7 + ff_rnd r9, r10, r11, r12, 6 + ff_rnd r9, r10, r11, r12, 5 + ff_rnd r9, r10, r11, r12, 4 + ff_rnd r9, r10, r11, r12, 3 + ff_rnd r9, r10, r11, r12, 2 + ff_rnd r9, r10, r11, r12, 1 + fl_rnd r9, r10, r11, r12, 0 + + mov rbx, [rsp] + mov [rbx], r9d + mov [rbx+4], r10d + mov [rbx+8], r11d + mov [rbx+12], r12d + xor rax, rax + + mov rbx, [rsp+1*8] + mov rbp, [rsp+2*8] + mov r12, [rsp+3*8] +%ifdef __GNUC__ + add rsp, 4*8 + ret +%else + mov rsi, [rsp+4*8] + mov rdi, [rsp+5*8] + %ifdef _SEH_ + add rsp, 7*8 + ret + endproc_frame + %else + add rsp, 6*8 + ret + %endif +%endif + +%endif + +%ifdef DECRYPTION + + global aes256_asm_decrypt +%ifdef DLL_EXPORT + export aes256_asm_decrypt +%endif + + section .data + align 64 +dec_tab: + dec_vals v8 +%ifdef LAST_ROUND_TABLES + dec_vals w8 +%endif + + section .text + align 16 + +%ifdef _SEH_ +proc_frame aes256_asm_decrypt + alloc_stack 7*8 ; 7 to align stack to 16 bytes + save_reg rsi,4*8 + save_reg rdi,5*8 + save_reg rbx,1*8 + save_reg rbp,2*8 + save_reg r12,3*8 +end_prologue + mov rdi, rcx ; input pointer + mov [rsp+0*8], rdx ; output pointer +%else + aes256_asm_decrypt: + %ifdef __GNUC__ + sub rsp, 4*8 ; gnu/linux binary interface + mov [rsp+0*8], rsi ; output pointer + mov r8, rdx ; context + %else + sub rsp, 6*8 ; windows binary interface + mov [rsp+4*8], rsi + mov [rsp+5*8], rdi + mov rdi, rcx ; input pointer + mov [rsp+0*8], rdx ; output pointer + %endif + mov [rsp+1*8], rbx ; input pointer in rdi + mov [rsp+2*8], rbp ; output pointer in [rsp] + mov [rsp+3*8], r12 ; context in r8 +%endif + add kptr, 4*KS_LENGTH + lea tptr,[dec_tab wrt rip] + sub kptr, rofs + + mov eax, [rdi+0*4] + mov ebx, [rdi+1*4] + mov ecx, [rdi+2*4] + mov edx, [rdi+3*4] + +%ifdef AES_REV_DKS + mov rdi, kptr + add kptr, 14*16 +%else + add rdi, 14*16 +%endif + + xor eax, [rdi+rofs] + xor ebx, [rdi+rofs+4] + xor ecx, [rdi+rofs+8] + xor edx, [rdi+rofs+12] + + + ii_rnd r9, r10, r11, r12, 13 + ii_rnd r9, r10, r11, r12, 12 + ii_rnd r9, r10, r11, r12, 11 + ii_rnd r9, r10, r11, r12, 10 + ii_rnd r9, r10, r11, r12, 9 + ii_rnd r9, r10, r11, r12, 8 + ii_rnd r9, r10, r11, r12, 7 + ii_rnd r9, r10, r11, r12, 6 + ii_rnd r9, r10, r11, r12, 5 + ii_rnd r9, r10, r11, r12, 4 + ii_rnd r9, r10, r11, r12, 3 + ii_rnd r9, r10, r11, r12, 2 + ii_rnd r9, r10, r11, r12, 1 + il_rnd r9, r10, r11, r12, 0 + + mov rbx, [rsp] + mov [rbx], r9d + mov [rbx+4], r10d + mov [rbx+8], r11d + mov [rbx+12], r12d + xor rax, rax + mov rbx, [rsp+1*8] + mov rbp, [rsp+2*8] + mov r12, [rsp+3*8] +%ifdef __GNUC__ + add rsp, 4*8 + ret +%else + mov rsi, [rsp+4*8] + mov rdi, [rsp+5*8] + %ifdef _SEH_ + add rsp, 7*8 + ret + endproc_frame + %else + add rsp, 6*8 + ret + %endif +%endif + +%endif + + end diff --git a/ImBoxEnclave/crypto_fast/amd64/aes_padlock_amd64.asm b/ImBoxEnclave/crypto_fast/amd64/aes_padlock_amd64.asm new file mode 100644 index 0000000..293462c --- /dev/null +++ b/ImBoxEnclave/crypto_fast/amd64/aes_padlock_amd64.asm @@ -0,0 +1,91 @@ +; +; * +; * Copyright (c) 2009-2010 +; * ntldr PGP key ID - 0x1B6A24550F33E44A +; * +; This program is free software: you can redistribute it and/or modify +; it under the terms of the GNU General Public License version 3 as +; published by the Free Software Foundation. +; +; This program is distributed in the hope that it will be useful, +; but WITHOUT ANY WARRANTY; without even the implied warranty of +; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +; GNU General Public License for more details. +; +; You should have received a copy of the GNU General Public License +; along with this program. If not, see . +; + +%define NEH_LOAD_KEY 00000080h ; load schedule from memory +%define NEH_ENCRYPT 00000000h ; encryption +%define NEH_DECRYPT 00000200h ; decryption +%define NEH_KEY128 00000000h+0ah ; 128 bit key +%define NEH_KEY192 00000400h+0ch ; 192 bit key +%define NEH_KEY256 00000800h+0eh ; 256 bit key +%define NEH_ENC_LOAD (NEH_ENCRYPT | NEH_LOAD_KEY) +%define NEH_DEC_LOAD (NEH_DECRYPT | NEH_LOAD_KEY) + +align 16 +enc_cwd dd (NEH_ENC_LOAD | NEH_KEY256), 0, 0 +align 16 +dec_cwd dd (NEH_DEC_LOAD | NEH_KEY256), 0, 0 + +global aes256_padlock_available +global aes256_padlock_encrypt +global aes256_padlock_decrypt + +aes256_padlock_available: + push rbx + ; test for VIA CPU + mov eax, 0C0000000h + cpuid + cmp eax, 0C0000001h + jb no_ace + ; read VIA flags + mov eax, 0C0000001h + cpuid + and edx, 0C0h ; ACE_MASK,CPUID EDX code for ACE + cmp edx, 0C0h ; ACE_MASK,CPUID EDX code for ACE + jnz no_ace + ; ACE present + xor rax, rax + inc eax + jmp end_ace +no_ace: + xor rax, rax +end_ace: + pop rbx + ret + +align 16 +aes256_padlock_encrypt: + push rbx + push rsi + push rdi + mov rsi, rcx ; in + mov rdi, rdx ; out + mov rcx, r8 ; n_blocks + mov rbx, r9 ; key + lea rdx, [rel enc_cwd] + xcryptecb + pop rdi + pop rsi + pop rbx + ret + +align 16 +aes256_padlock_decrypt: + push rbx + push rsi + push rdi + mov rsi, rcx ; in + mov rdi, rdx ; out + mov rcx, r8 ; n_blocks + lea rbx, [r9+4*15*4] ; key + lea rdx, [rel dec_cwd] + xcryptecb + pop rdi + pop rsi + pop rbx + ret + diff --git a/ImBoxEnclave/crypto_fast/amd64/twofish_amd64.asm b/ImBoxEnclave/crypto_fast/amd64/twofish_amd64.asm new file mode 100644 index 0000000..0f1f822 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/amd64/twofish_amd64.asm @@ -0,0 +1,320 @@ +;*************************************************************************** +;* Copyright (C) 2006 by Joachim Fritschi, * +;* adapted for DiskCryptor by ntldr * +;* PGP key ID - 0x1B6A24550F33E44A * +;* * +;* This program is free software; you can redistribute it and/or modify * +;* it under the terms of the GNU General Public License as published by * +;* the Free Software Foundation; either version 2 of the License, or * +;* (at your option) any later version. * +;* * +;* This program is distributed in the hope that it will be useful, * +;* but WITHOUT ANY WARRANTY; without even the implied warranty of * +;* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * +;* GNU General Public License for more details. * +;* * +;* You should have received a copy of the GNU General Public License * +;* along with this program; if not, write to the * +;* Free Software Foundation, Inc., * +;* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * +;*************************************************************************** + +%define a_offset 0 +%define b_offset 4 +%define c_offset 8 +%define d_offset 12 + +; Structure of the crypto context struct +%define s0 0 ; S0 Array 256 Words each +%define s1 1024 ; S1 Array +%define s2 2048 ; S2 Array +%define s3 3072 ; S3 Array +%define w 4096 ; 8 whitening keys (word) +%define k 4128 ; key 1-32 ( word ) + +; define a few register aliases to allow macro substitution +%define R0Q rax +%define R0D eax +%define R0B al +%define R0H ah + +%define R1Q rbx +%define R1D ebx +%define R1B bl +%define R1H bh + +%define R2Q rcx +%define R2D ecx +%define R2B cl +%define R2H ch + +%define R3Q rdx +%define R3D edx +%define R3B dl +%define R3H dh + + +; performs input whitening +%macro input_whitening 3 + xor %1, [w+(%2)+%3] +%endmacro + +; performs input whitening +%macro output_whitening 3 + xor %1, [w+16+(%2)+%3] +%endmacro + + +; * a input register containing a (rotated 16) +; * b input register containing b +; * c input register containing c +; * d input register containing d (already rol $1) +; * operations on a and b are interleaved to increase performance +%macro encrypt_round 5 + movzx edi, %2B + mov r11d, [r8+rdi*4+s1] + movzx edi, %1B + mov r9d, [r8+rdi*4+s2] + movzx edi, %2H + ror %2D, 16 + xor r11d, [r8+rdi*4+s2] + movzx edi, %1H + ror %1D, 16 + xor r9d, [r8+rdi*4+s3] + movzx edi, %2B + xor r11d, [r8+rdi*4+s3] + movzx edi, %1B + xor r9d, [r8+rdi*4] + movzx edi, %2H + ror %2D, 15 + xor r11d, [r8+rdi*4] + movzx edi, %1H + xor r9d, [r8+rdi*4+s1] + add r9d, r11d + add r11d, r9d + add r9d, [r8+k+%5] + xor %3D, r9d + rol %3D, 15 + add r11d, [r8+k+4+%5] + xor %4D, r11d +%endmacro + +; * a input register containing a(rotated 16) +; * b input register containing b +; * c input register containing c +; * d input register containing d (already rol $1) +; * operations on a and b are interleaved to increase performance +; * during the %5 a and b are prepared for the output whitening +%macro encrypt_last_round 5 + mov r10d, %2D + shl r10, 32 + movzx edi, %2B + mov r11d, [r8+rdi*4+s1] + movzx edi, %1B + mov r9d, [r8+rdi*4+s2] + movzx edi, %2H + ror %2D, 16 + xor r11d, [r8+rdi*4+s2] + movzx edi, %1H + ror %1D, 16 + xor r9d, [r8+rdi*4+s3] + movzx edi, %2B + xor r11d, [r8+rdi*4+s3] + movzx edi, %1B + xor r9d, [r8+rdi*4] + xor r10, %1Q + movzx edi, %2H + xor r11d, [r8+rdi*4] + movzx edi, %1H + xor r9d, [r8+rdi*4+s1] + add r9d, r11d + add r11d, r9d + add r9d, [r8+k+%5] + xor %3D, r9d + ror %3D, 1 + add r11d, [r8+k+4+%5] + xor %4D, r11d +%endmacro + +; * a input register containing a +; * b input register containing b (rotated 16) +; * c input register containing c (already rol $1) +; * d input register containing d +; * operations on a and b are interleaved to increase performance +%macro decrypt_round 5 + movzx edi, %1B + mov r9d, [r8+rdi*4] + movzx edi, %2B + mov r11d, [r8+rdi*4+s3] + movzx edi, %1H + ror %1D, 16 + xor r9d, [r8+rdi*4+s1] + movzx edi, %2H + ror %2D, 16 + xor r11d, [r8+rdi*4] + movzx edi, %1B + xor r9d, [r8+rdi*4+s2] + movzx edi, %2B + xor r11d, [r8+rdi*4+s1] + movzx edi, %1H + ror %1D, 15 + xor r9d, [r8+rdi*4+s3] + movzx edi, %2H + xor r11d, [r8+rdi*4+s2] + add r9d, r11d + add r11d, r9d + add r9d, [r8+k+%5] + xor %3D, r9d + add r11d, [r8+k+4+%5] + xor %4D, r11d + rol %4D, 15 +%endmacro + +; * a input register containing a +; * b input register containing b +; * c input register containing c (already rol $1) +; * d input register containing d +; * operations on a and b are interleaved to increase performance +; * during the %5 a and b are prepared for the output whitening +%macro decrypt_last_round 5 + movzx edi, %1B + mov r9d, [r8+rdi*4] + movzx edi, %2B + mov r11d, [r8+rdi*4+s3] + movzx edi, %2H + ror %2D, 16 + xor r11d, [r8+rdi*4] + movzx edi, %1H + mov r10d, %2D + shl r10, 32 + xor r10, %1Q + ror %1D, 16 + xor r9d, [r8+rdi*4+s1] + movzx edi, %2B + xor r11d, [r8+rdi*4+s1] + movzx edi, %1B + xor r9d, [r8+rdi*4+s2] + movzx edi, %2H + xor r11d, [r8+rdi*4+s2] + movzx edi, %1H + xor r9d, [r8+rdi*4+s3] + add r9d, r11d + add r11d, r9d + add r9d, [r8+k+%5] + xor %3D, r9d + add r11d, [r8+k+4+%5] + xor %4D, r11d + ror %4D, 1 +%endmacro + +global twofish256_encrypt +global twofish256_decrypt + +align 16 +twofish256_encrypt: + push R1Q + push rsi + push rdi + ; r8 contains the crypto tfm address + ; rdx contains the output address + ; rcx contains the input address + mov rsi, rdx + + mov R1Q, [rcx] + mov R3Q, [rcx+8] + + input_whitening R1Q, r8, a_offset + input_whitening R3Q, r8, c_offset + mov R0D, R1D + rol R0D, 16 + shr R1Q, 32 + mov R2D, R3D + shr R3Q, 32 + rol R3D, 1 + + encrypt_round R0,R1,R2,R3,0 + encrypt_round R2,R3,R0,R1,8 + encrypt_round R0,R1,R2,R3,2*8 + encrypt_round R2,R3,R0,R1,3*8 + encrypt_round R0,R1,R2,R3,4*8 + encrypt_round R2,R3,R0,R1,5*8 + encrypt_round R0,R1,R2,R3,6*8 + encrypt_round R2,R3,R0,R1,7*8 + + encrypt_round R0,R1,R2,R3,8*8 + encrypt_round R2,R3,R0,R1,9*8 + encrypt_round R0,R1,R2,R3,10*8 + encrypt_round R2,R3,R0,R1,11*8 + encrypt_round R0,R1,R2,R3,12*8 + encrypt_round R2,R3,R0,R1,13*8 + encrypt_round R0,R1,R2,R3,14*8 + encrypt_last_round R2,R3,R0,R1,15*8 + + output_whitening r10, r8, a_offset + mov [rsi], r10 + + shl R1Q, 32 + xor R1Q, R0Q + + output_whitening R1Q, r8, c_offset + mov [rsi+8], R1Q + + pop rdi + pop rsi + + pop R1Q + ret + +align 16 +twofish256_decrypt: + push R1Q + push rsi + push rdi + ; r8 contains the crypto tfm address + ; rdx contains the output address + ; rcx contains the input address + mov rsi, rdx + + mov R1Q, [rcx] + mov R3Q, [rcx+8] + + output_whitening R1Q, r8, a_offset + output_whitening R3Q, r8, c_offset + mov R0D, R1D + shr R1Q, 32 + rol R1D, 16 + mov R2D, R3D + shr R3Q, 32 + rol R2D, 1 + + decrypt_round R0,R1,R2,R3,15*8 + decrypt_round R2,R3,R0,R1,14*8 + decrypt_round R0,R1,R2,R3,13*8 + decrypt_round R2,R3,R0,R1,12*8 + decrypt_round R0,R1,R2,R3,11*8 + decrypt_round R2,R3,R0,R1,10*8 + decrypt_round R0,R1,R2,R3,9*8 + decrypt_round R2,R3,R0,R1,8*8 + decrypt_round R0,R1,R2,R3,7*8 + decrypt_round R2,R3,R0,R1,6*8 + decrypt_round R0,R1,R2,R3,5*8 + decrypt_round R2,R3,R0,R1,4*8 + decrypt_round R0,R1,R2,R3,3*8 + decrypt_round R2,R3,R0,R1,2*8 + decrypt_round R0,R1,R2,R3,1*8 + decrypt_last_round R2,R3,R0,R1,0 + + input_whitening r10, r8, a_offset + mov [rsi], r10 + + shl R1Q, 32 + xor R1Q, R0Q + + input_whitening R1Q, r8, c_offset + mov [rsi+8], R1Q + + pop rdi + pop rsi + + pop R1Q + ret diff --git a/ImBoxEnclave/crypto_fast/amd64/xts_aes_ni_amd64.asm b/ImBoxEnclave/crypto_fast/amd64/xts_aes_ni_amd64.asm new file mode 100644 index 0000000..4b90e4e --- /dev/null +++ b/ImBoxEnclave/crypto_fast/amd64/xts_aes_ni_amd64.asm @@ -0,0 +1,242 @@ +; +; * +; * Copyright (c) 2010 +; * ntldr PGP key ID - 0x1B6A24550F33E44A +; * +; This program is free software: you can redistribute it and/or modify +; it under the terms of the GNU General Public License version 3 as +; published by the Free Software Foundation. +; +; This program is distributed in the hope that it will be useful, +; but WITHOUT ANY WARRANTY; without even the implied warranty of +; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +; GNU General Public License for more details. +; +; You should have received a copy of the GNU General Public License +; along with this program. If not, see . +; + +%macro aesxor_4 6 ; B0, B1, B2, B3, key, round + movdqa tt, [%5+(%6*10h)] + pxor %1, tt + pxor %2, tt + pxor %3, tt + pxor %4, tt +%endmacro + +%macro aesenc_4 6 ; B0, B1, B2, B3, key, round + movdqa tt, [%5+(%6*10h)] + aesenc %1, tt + aesenc %2, tt + aesenc %3, tt + aesenc %4, tt +%endmacro + +%macro aesdec_4 6 ; B0, B1, B2, B3, key, round + movdqa tt, [%5+(%6*10h)] + aesdec %1, tt + aesdec %2, tt + aesdec %3, tt + aesdec %4, tt +%endmacro + +%macro aesenclast_4 6 ; B0, B1, B2, B3, key, round + movdqa tt, [%5+(%6*10h)] + aesenclast %1, tt + aesenclast %2, tt + aesenclast %3, tt + aesenclast %4, tt +%endmacro + +%macro aesdeclast_4 6 ; B0, B1, B2, B3, key, round + movdqa tt, [%5+(%6*10h)] + aesdeclast %1, tt + aesdeclast %2, tt + aesdeclast %3, tt + aesdeclast %4, tt +%endmacro + +%macro aes_encrypt_1 2 ; XMMn, key + pxor %1, [%2] + aesenc %1, [%2+010h] + aesenc %1, [%2+020h] + aesenc %1, [%2+030h] + aesenc %1, [%2+040h] + aesenc %1, [%2+050h] + aesenc %1, [%2+060h] + aesenc %1, [%2+070h] + aesenc %1, [%2+080h] + aesenc %1, [%2+090h] + aesenc %1, [%2+0A0h] + aesenc %1, [%2+0B0h] + aesenc %1, [%2+0C0h] + aesenc %1, [%2+0D0h] + aesenclast %1, [%2+0E0h] +%endmacro + +%macro aes_encrypt_4 5 ; B0, B1, B2, B3, key + aesxor_4 %1, %2, %3, %4, %5, 0 + aesenc_4 %1, %2, %3, %4, %5, 1 + aesenc_4 %1, %2, %3, %4, %5, 2 + aesenc_4 %1, %2, %3, %4, %5, 3 + aesenc_4 %1, %2, %3, %4, %5, 4 + aesenc_4 %1, %2, %3, %4, %5, 5 + aesenc_4 %1, %2, %3, %4, %5, 6 + aesenc_4 %1, %2, %3, %4, %5, 7 + aesenc_4 %1, %2, %3, %4, %5, 8 + aesenc_4 %1, %2, %3, %4, %5, 9 + aesenc_4 %1, %2, %3, %4, %5, 10 + aesenc_4 %1, %2, %3, %4, %5, 11 + aesenc_4 %1, %2, %3, %4, %5, 12 + aesenc_4 %1, %2, %3, %4, %5, 13 + aesenclast_4 %1, %2, %3, %4, %5, 14 +%endmacro + +%macro aes_decrypt_4 5 ; B0, B1, B2, B3, key + aesxor_4 %1, %2, %3, %4, %5, 0 + aesdec_4 %1, %2, %3, %4, %5, 1 + aesdec_4 %1, %2, %3, %4, %5, 2 + aesdec_4 %1, %2, %3, %4, %5, 3 + aesdec_4 %1, %2, %3, %4, %5, 4 + aesdec_4 %1, %2, %3, %4, %5, 5 + aesdec_4 %1, %2, %3, %4, %5, 6 + aesdec_4 %1, %2, %3, %4, %5, 7 + aesdec_4 %1, %2, %3, %4, %5, 8 + aesdec_4 %1, %2, %3, %4, %5, 9 + aesdec_4 %1, %2, %3, %4, %5, 10 + aesdec_4 %1, %2, %3, %4, %5, 11 + aesdec_4 %1, %2, %3, %4, %5, 12 + aesdec_4 %1, %2, %3, %4, %5, 13 + aesdeclast_4 %1, %2, %3, %4, %5, 14 +%endmacro + +%macro next_tweak 2 ; new, old + movdqa tt, %2 + psraw tt, 8 + psrldq tt, 15 + pand tt, POLY + movdqa t2, %2 + pslldq t2, 8 + psrldq t2, 7 + psrlq t2, 7 + movdqa %1, %2 + psllq %1, 1 + por %1, t2 + pxor %1, tt +%endmacro + +%macro tweak_block_4 0 + pxor B0, T0 + pxor B1, T1 + pxor B2, T2 + pxor B3, T3 +%endmacro + +%macro load_block_4 1 + movdqu B0, [%1+00h] + movdqu B1, [%1+10h] + movdqu B2, [%1+20h] + movdqu B3, [%1+30h] +%endmacro + +%macro save_block_4 1 + movdqu [%1+00h], B0 + movdqu [%1+10h], B1 + movdqu [%1+20h], B2 + movdqu [%1+30h], B3 +%endmacro + +%macro aes_xts_process 2 + ; rcx = in, rdx = out, r8 = len, r9 = offset, [rsp+28h] = key + sub rsp, 58h + ; save nonvolatile XMM registers + movaps [rsp+40h], xmm6 + movaps [rsp+30h], xmm7 + movaps [rsp+20h], xmm8 + movaps [rsp+10h], xmm9 + movaps [rsp+00h], xmm10 + ; load XTS tweak polynomial + mov eax, 135 + movd POLY, eax + ; load pointers of keys + mov rax, [rsp+28h+58h] + lea r11, [rax+tweak_k] ; r11 - tweak key +%if %2 != 0 + add rax, %2 ; rax - encryption key +%endif + shr r9, 9 ; idx = offset / XTS_SECTOR_SIZE + shr r8, 9 ; len /= XTS_SECTOR_SIZE +%%xts_loop: + inc r9 ; idx++ + movq T0, r9 + aes_encrypt_1 T0, r11 + mov r10d, 8 ; XTS_BLOCKS_IN_SECTOR / 4 +%%blocks_loop: + ; calc tweaks + next_tweak T1, T0 + next_tweak T2, T1 + next_tweak T3, T2 + ; load blocks + load_block_4 rcx + add rcx, 64 ; in += XTS_BLOCK_SIZE*4 +align 16 + ; input tweak + tweak_block_4 + ; encrypt / decrypt + %1 B0, B1, B2, B3, rax + ; output tweak + tweak_block_4 + ; save blocks + save_block_4 rdx + dec r10d + jz %%block_done + next_tweak T0, T3 + add rdx, 64 ; out += XTS_BLOCK_SIZE*4 + jmp %%blocks_loop +%%block_done: + add rdx, 64 ; out += XTS_BLOCK_SIZE*4 + dec r8 + jnz %%xts_loop + ; restore nonvolatile XMM registers + movaps xmm6, [rsp+40h] + movaps xmm7, [rsp+30h] + movaps xmm8, [rsp+20h] + movaps xmm9, [rsp+10h] + movaps xmm10, [rsp+00h] + add rsp, 58h + ret +%endmacro + +%define B0 xmm0 +%define B1 xmm1 +%define B2 xmm2 +%define B3 xmm3 + +%define T0 xmm4 +%define T1 xmm5 +%define T2 xmm6 +%define T3 xmm7 + +%define tt xmm8 +%define t2 xmm9 +%define POLY xmm10 + +%define tweak_k 5264 +%define enc_key 0 +%define dec_key 4*15*4 + +global xts_aes_ni_encrypt +global xts_aes_ni_decrypt + +xts_aes_ni_encrypt: + aes_xts_process aes_encrypt_4, enc_key + +align 16 +xts_aes_ni_decrypt: + aes_xts_process aes_decrypt_4, dec_key + + + + + + diff --git a/ImBoxEnclave/crypto_fast/amd64/xts_serpent_avx_amd64.asm b/ImBoxEnclave/crypto_fast/amd64/xts_serpent_avx_amd64.asm new file mode 100644 index 0000000..7f6f130 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/amd64/xts_serpent_avx_amd64.asm @@ -0,0 +1,3966 @@ +; this code compiled with Intel C++ Compiler Version 12.0.0.063 +; +; Disassembly of file: xts_serpent_sse2.obj +; Thu May 19 19:05:15 2011 +; Mode: 64 bits +; Syntax: YASM/NASM +; Instruction set: AVX, x64 + +default rel + +global xts_serpent_avx_encrypt +global xts_serpent_avx_decrypt +global xts_serpent_avx_available + +extern serpent256_encrypt ; near + +SECTION .text align=16 execute ; section number 2, code +; Communal section not supported by YASM + +xts_serpent_avx_encrypt:; Function begin + push r12 ; 0000 _ 41: 54 + push r13 ; 0002 _ 41: 55 + push r14 ; 0004 _ 41: 56 + push r15 ; 0006 _ 41: 57 + push rbp ; 0008 _ 55 + sub rsp, 208 ; 0009 _ 48: 81. EC, 000000D0 + mov rbp, rdx ; 0010 _ 48: 89. D5 + mov r10d, 135 ; 0013 _ 41: BA, 00000087 + vpcmpeqd xmm0, xmm0, xmm0 ; 0019 _ C5 F9: 76. C0 + mov rax, qword [rsp+120H] ; 001D _ 48: 8B. 84 24, 00000120 + mov r12, rax ; 0025 _ 49: 89. C4 + shr r9, 9 ; 0028 _ 49: C1. E9, 09 + mov r13, rcx ; 002C _ 49: 89. CD + mov qword [rsp+0B0H], r9 ; 002F _ 4C: 89. 8C 24, 000000B0 + mov r14, r8 ; 0037 _ 4D: 89. C6 + vmovd xmm1, r10d ; 003A _ C4 C1 79: 6E. CA + lea r9, [rax+2710H] ; 003F _ 4C: 8D. 88, 00002710 + mov qword [rsp+0B8H], 0 ; 0046 _ 48: C7. 84 24, 000000B8, 00000000 + mov r15, r9 ; 0052 _ 4D: 89. CF + vmovups oword [rsp+0A0H], xmm6 ; 0055 _ C5 F8: 11. B4 24, 000000A0 + vmovdqa xmm6, xmm0 ; 005E _ C5 F9: 6F. F0 + vmovups oword [rsp+90H], xmm7 ; 0062 _ C5 F8: 11. BC 24, 00000090 + vmovdqa xmm7, xmm1 ; 006B _ C5 F9: 6F. F9 + vmovups oword [rsp+80H], xmm8 ; 006F _ C5 78: 11. 84 24, 00000080 + vmovups oword [rsp+70H], xmm9 ; 0078 _ C5 78: 11. 4C 24, 70 + vmovups oword [rsp+60H], xmm10 ; 007E _ C5 78: 11. 54 24, 60 + vmovups oword [rsp+50H], xmm11 ; 0084 _ C5 78: 11. 5C 24, 50 + vmovups oword [rsp+40H], xmm12 ; 008A _ C5 78: 11. 64 24, 40 + vmovups oword [rsp+30H], xmm13 ; 0090 _ C5 78: 11. 6C 24, 30 + vmovups oword [rsp+20H], xmm14 ; 0096 _ C5 78: 11. 74 24, 20 + jmp ?_002 ; 009C _ EB, 09 + +?_001: vmovdqu oword [rsp+0C0H], xmm3 ; 009E _ C5 FA: 7F. 9C 24, 000000C0 +?_002: mov r8, r15 ; 00A7 _ 4D: 89. F8 + lea rcx, [rsp+0B0H] ; 00AA _ 48: 8D. 8C 24, 000000B0 + inc qword [rsp+0B0H] ; 00B2 _ 48: FF. 84 24, 000000B0 + lea rdx, [rsp+0C0H] ; 00BA _ 48: 8D. 94 24, 000000C0 + call serpent256_encrypt ; 00C2 _ E8, 00000000(rel) + vmovdqu xmm3, oword [rsp+0C0H] ; 00C7 _ C5 FA: 6F. 9C 24, 000000C0 + xor r10d, r10d ; 00D0 _ 45: 33. D2 +?_003: vpslldq xmm2, xmm3, 8 ; 00D3 _ C5 E9: 73. FB, 08 + vpsllq xmm4, xmm3, 1 ; 00D8 _ C5 D9: 73. F3, 01 + vpsrldq xmm2, xmm2, 7 ; 00DD _ C5 E9: 73. DA, 07 + vpsrlq xmm2, xmm2, 7 ; 00E2 _ C5 E9: 73. D2, 07 + vpor xmm4, xmm4, xmm2 ; 00E7 _ C5 D9: EB. E2 + vpsraw xmm2, xmm3, 8 ; 00EB _ C5 E9: 71. E3, 08 + vpsrldq xmm2, xmm2, 15 ; 00F0 _ C5 E9: 73. DA, 0F + vpand xmm2, xmm2, xmm7 ; 00F5 _ C5 E9: DB. D7 + vpxor xmm2, xmm4, xmm2 ; 00F9 _ C5 D9: EF. D2 + vpslldq xmm4, xmm2, 8 ; 00FD _ C5 D9: 73. FA, 08 + vpsllq xmm5, xmm2, 1 ; 0102 _ C5 D1: 73. F2, 01 + vpsrldq xmm4, xmm4, 7 ; 0107 _ C5 D9: 73. DC, 07 + vpsrlq xmm4, xmm4, 7 ; 010C _ C5 D9: 73. D4, 07 + vpor xmm5, xmm5, xmm4 ; 0111 _ C5 D1: EB. EC + vpsraw xmm4, xmm2, 8 ; 0115 _ C5 D9: 71. E2, 08 + vpsrldq xmm4, xmm4, 15 ; 011A _ C5 D9: 73. DC, 0F + vpand xmm4, xmm4, xmm7 ; 011F _ C5 D9: DB. E7 + vpxor xmm1, xmm5, xmm4 ; 0123 _ C5 D1: EF. CC + vpslldq xmm4, xmm1, 8 ; 0127 _ C5 D9: 73. F9, 08 + vpsllq xmm5, xmm1, 1 ; 012C _ C5 D1: 73. F1, 01 + vpsrldq xmm4, xmm4, 7 ; 0131 _ C5 D9: 73. DC, 07 + vpsrlq xmm4, xmm4, 7 ; 0136 _ C5 D9: 73. D4, 07 + vpor xmm5, xmm5, xmm4 ; 013B _ C5 D1: EB. EC + vpsraw xmm4, xmm1, 8 ; 013F _ C5 D9: 71. E1, 08 + vpsrldq xmm4, xmm4, 15 ; 0144 _ C5 D9: 73. DC, 0F + vpand xmm4, xmm4, xmm7 ; 0149 _ C5 D9: DB. E7 + vpxor xmm0, xmm5, xmm4 ; 014D _ C5 D1: EF. C4 + vpxor xmm9, xmm3, oword [r13] ; 0151 _ C4 41 61: EF. 4D, 00 + vpxor xmm8, xmm2, oword [r13+10H] ; 0157 _ C4 41 69: EF. 45, 10 + vpxor xmm14, xmm1, oword [r13+20H] ; 015D _ C4 41 71: EF. 75, 20 + vpxor xmm13, xmm0, oword [r13+30H] ; 0163 _ C4 41 79: EF. 6D, 30 + vpunpckldq xmm5, xmm9, xmm8 ; 0169 _ C4 C1 31: 62. E8 + vpunpckldq xmm4, xmm14, xmm13 ; 016E _ C4 C1 09: 62. E5 + vpunpckhdq xmm10, xmm9, xmm8 ; 0173 _ C4 41 31: 6A. D0 + vpunpcklqdq xmm9, xmm5, xmm4 ; 0178 _ C5 51: 6C. CC + vpunpckhqdq xmm5, xmm5, xmm4 ; 017C _ C5 D1: 6D. EC + vmovd xmm4, dword [r12+1284H] ; 0180 _ C4 C1 79: 6E. A4 24, 00001284 + vpunpckhdq xmm11, xmm14, xmm13 ; 018A _ C4 41 09: 6A. DD + vpshufd xmm14, xmm4, 0 ; 018F _ C5 79: 70. F4, 00 + vmovd xmm12, dword [r12+1280H] ; 0194 _ C4 41 79: 6E. A4 24, 00001280 + vpxor xmm4, xmm5, xmm14 ; 019E _ C4 C1 51: EF. E6 + vpunpcklqdq xmm5, xmm10, xmm11 ; 01A3 _ C4 C1 29: 6C. EB + vpunpckhqdq xmm10, xmm10, xmm11 ; 01A8 _ C4 41 29: 6D. D3 + vmovd xmm11, dword [r12+128CH] ; 01AD _ C4 41 79: 6E. 9C 24, 0000128C + vmovd xmm13, dword [r12+1288H] ; 01B7 _ C4 41 79: 6E. AC 24, 00001288 + vpshufd xmm8, xmm12, 0 ; 01C1 _ C4 41 79: 70. C4, 00 + vpshufd xmm11, xmm11, 0 ; 01C7 _ C4 41 79: 70. DB, 00 + vpxor xmm12, xmm9, xmm8 ; 01CD _ C4 41 31: EF. E0 + vpshufd xmm9, xmm13, 0 ; 01D2 _ C4 41 79: 70. CD, 00 + vpxor xmm10, xmm10, xmm11 ; 01D8 _ C4 41 29: EF. D3 + vpxor xmm13, xmm5, xmm9 ; 01DD _ C4 41 51: EF. E9 + vpxor xmm8, xmm10, xmm12 ; 01E2 _ C4 41 29: EF. C4 + vpxor xmm5, xmm4, xmm13 ; 01E7 _ C4 C1 59: EF. ED + vpand xmm4, xmm4, xmm8 ; 01EC _ C4 C1 59: DB. E0 + vpxor xmm14, xmm4, xmm12 ; 01F1 _ C4 41 59: EF. F4 + vpor xmm12, xmm12, xmm8 ; 01F6 _ C4 41 19: EB. E0 + vpxor xmm10, xmm5, xmm8 ; 01FB _ C4 41 51: EF. D0 + vpxor xmm4, xmm12, xmm5 ; 0200 _ C5 99: EF. E5 + vpxor xmm9, xmm8, xmm13 ; 0204 _ C4 41 39: EF. CD + vpor xmm11, xmm13, xmm14 ; 0209 _ C4 41 11: EB. DE + vpxor xmm8, xmm10, xmm6 ; 020E _ C5 29: EF. C6 + vpxor xmm5, xmm11, xmm10 ; 0212 _ C4 C1 21: EF. EA + vpor xmm10, xmm8, xmm14 ; 0217 _ C4 41 39: EB. D6 + vpor xmm11, xmm9, xmm4 ; 021C _ C5 31: EB. DC + vpxor xmm9, xmm14, xmm9 ; 0220 _ C4 41 09: EF. C9 + vpslld xmm8, xmm5, 3 ; 0225 _ C5 B9: 72. F5, 03 + vpxor xmm14, xmm9, xmm10 ; 022A _ C4 41 31: EF. F2 + vpsrld xmm5, xmm5, 29 ; 022F _ C5 D1: 72. D5, 1D + vpxor xmm13, xmm14, xmm11 ; 0234 _ C4 41 09: EF. EB + vpxor xmm11, xmm10, xmm11 ; 0239 _ C4 41 29: EF. DB + vpslld xmm12, xmm13, 13 ; 023E _ C4 C1 19: 72. F5, 0D + vpsrld xmm9, xmm13, 19 ; 0244 _ C4 C1 31: 72. D5, 13 + vpor xmm12, xmm12, xmm9 ; 024A _ C4 41 19: EB. E1 + vpor xmm5, xmm8, xmm5 ; 024F _ C5 B9: EB. ED + vpxor xmm10, xmm11, xmm12 ; 0253 _ C4 41 21: EF. D4 + vpxor xmm4, xmm4, xmm5 ; 0258 _ C5 D9: EF. E5 + vpxor xmm13, xmm10, xmm5 ; 025C _ C5 29: EF. ED + vpslld xmm14, xmm12, 3 ; 0260 _ C4 C1 09: 72. F4, 03 + vpxor xmm10, xmm4, xmm14 ; 0266 _ C4 41 59: EF. D6 + vpslld xmm4, xmm13, 1 ; 026B _ C4 C1 59: 72. F5, 01 + vpsrld xmm11, xmm13, 31 ; 0271 _ C4 C1 21: 72. D5, 1F + vpslld xmm9, xmm10, 7 ; 0277 _ C4 C1 31: 72. F2, 07 + vpor xmm8, xmm4, xmm11 ; 027D _ C4 41 59: EB. C3 + vpsrld xmm14, xmm10, 25 ; 0282 _ C4 C1 09: 72. D2, 19 + vpor xmm13, xmm9, xmm14 ; 0288 _ C4 41 31: EB. EE + vpxor xmm12, xmm12, xmm8 ; 028D _ C4 41 19: EF. E0 + vpxor xmm14, xmm12, xmm13 ; 0292 _ C4 41 19: EF. F5 + vpxor xmm5, xmm5, xmm13 ; 0297 _ C4 C1 51: EF. ED + vmovd xmm12, dword [r12+129CH] ; 029C _ C4 41 79: 6E. A4 24, 0000129C + vpslld xmm4, xmm8, 7 ; 02A6 _ C4 C1 59: 72. F0, 07 + vmovd xmm11, dword [r12+1294H] ; 02AC _ C4 41 79: 6E. 9C 24, 00001294 + vpxor xmm5, xmm5, xmm4 ; 02B6 _ C5 D1: EF. EC + vpshufd xmm4, xmm12, 0 ; 02BA _ C4 C1 79: 70. E4, 00 + vmovd xmm9, dword [r12+1290H] ; 02C0 _ C4 41 79: 6E. 8C 24, 00001290 + vpshufd xmm10, xmm11, 0 ; 02CA _ C4 41 79: 70. D3, 00 + vpxor xmm11, xmm13, xmm4 ; 02D0 _ C5 11: EF. DC + vpslld xmm13, xmm14, 5 ; 02D4 _ C4 C1 11: 72. F6, 05 + vpsrld xmm14, xmm14, 27 ; 02DA _ C4 C1 09: 72. D6, 1B + vpshufd xmm9, xmm9, 0 ; 02E0 _ C4 41 79: 70. C9, 00 + vpxor xmm8, xmm8, xmm10 ; 02E6 _ C4 41 39: EF. C2 + vmovd xmm10, dword [r12+1298H] ; 02EB _ C4 41 79: 6E. 94 24, 00001298 + vpor xmm4, xmm13, xmm14 ; 02F5 _ C4 C1 11: EB. E6 + vpslld xmm13, xmm5, 22 ; 02FA _ C5 91: 72. F5, 16 + vpsrld xmm5, xmm5, 10 ; 02FF _ C5 D1: 72. D5, 0A + vpshufd xmm10, xmm10, 0 ; 0304 _ C4 41 79: 70. D2, 00 + vpxor xmm4, xmm4, xmm9 ; 030A _ C4 C1 59: EF. E1 + vpor xmm5, xmm13, xmm5 ; 030F _ C5 91: EB. ED + vpxor xmm4, xmm4, xmm6 ; 0313 _ C5 D9: EF. E6 + vpxor xmm10, xmm5, xmm10 ; 0317 _ C4 41 51: EF. D2 + vpand xmm12, xmm4, xmm8 ; 031C _ C4 41 59: DB. E0 + vpxor xmm5, xmm10, xmm6 ; 0321 _ C5 A9: EF. EE + vpor xmm14, xmm12, xmm11 ; 0325 _ C4 41 19: EB. F3 + vpxor xmm9, xmm5, xmm12 ; 032A _ C4 41 51: EF. CC + vpxor xmm12, xmm14, xmm4 ; 032F _ C5 09: EF. E4 + vpxor xmm13, xmm11, xmm9 ; 0333 _ C4 41 21: EF. E9 + vpxor xmm11, xmm8, xmm14 ; 0338 _ C4 41 39: EF. DE + vpor xmm8, xmm4, xmm11 ; 033D _ C4 41 59: EB. C3 + vpxor xmm14, xmm11, xmm13 ; 0342 _ C4 41 21: EF. F5 + vpor xmm4, xmm9, xmm12 ; 0347 _ C4 C1 31: EB. E4 + vpxor xmm10, xmm12, xmm14 ; 034C _ C4 41 19: EF. D6 + vpand xmm5, xmm4, xmm8 ; 0351 _ C4 C1 59: DB. E8 + vpslld xmm11, xmm13, 3 ; 0356 _ C4 C1 21: 72. F5, 03 + vpslld xmm9, xmm5, 13 ; 035C _ C5 B1: 72. F5, 0D + vpsrld xmm4, xmm5, 19 ; 0361 _ C5 D9: 72. D5, 13 + vpand xmm12, xmm10, xmm5 ; 0366 _ C5 29: DB. E5 + vpor xmm9, xmm9, xmm4 ; 036A _ C5 31: EB. CC + vpsrld xmm13, xmm13, 29 ; 036E _ C4 C1 11: 72. D5, 1D + vpxor xmm8, xmm8, xmm12 ; 0374 _ C4 41 39: EF. C4 + vpand xmm5, xmm14, xmm5 ; 0379 _ C5 89: DB. ED + vpor xmm11, xmm11, xmm13 ; 037D _ C4 41 21: EB. DD + vpxor xmm4, xmm8, xmm9 ; 0382 _ C4 C1 39: EF. E1 + vpxor xmm10, xmm5, xmm10 ; 0387 _ C4 41 51: EF. D2 + vpxor xmm4, xmm4, xmm11 ; 038C _ C4 C1 59: EF. E3 + vpxor xmm8, xmm10, xmm11 ; 0391 _ C4 41 29: EF. C3 + vpslld xmm14, xmm9, 3 ; 0396 _ C4 C1 09: 72. F1, 03 + vpslld xmm13, xmm4, 1 ; 039C _ C5 91: 72. F4, 01 + vpxor xmm10, xmm8, xmm14 ; 03A1 _ C4 41 39: EF. D6 + vpsrld xmm12, xmm4, 31 ; 03A6 _ C5 99: 72. D4, 1F + vpor xmm12, xmm13, xmm12 ; 03AB _ C4 41 11: EB. E4 + vpslld xmm4, xmm10, 7 ; 03B0 _ C4 C1 59: 72. F2, 07 + vpsrld xmm5, xmm10, 25 ; 03B6 _ C4 C1 51: 72. D2, 19 + vpxor xmm9, xmm9, xmm12 ; 03BC _ C4 41 31: EF. CC + vpor xmm13, xmm4, xmm5 ; 03C1 _ C5 59: EB. ED + vpslld xmm4, xmm12, 7 ; 03C5 _ C4 C1 59: 72. F4, 07 + vpxor xmm10, xmm9, xmm13 ; 03CB _ C4 41 31: EF. D5 + vpxor xmm11, xmm11, xmm13 ; 03D0 _ C4 41 21: EF. DD + vpxor xmm14, xmm11, xmm4 ; 03D5 _ C5 21: EF. F4 + vpslld xmm11, xmm10, 5 ; 03D9 _ C4 C1 21: 72. F2, 05 + vpsrld xmm5, xmm10, 27 ; 03DF _ C4 C1 51: 72. D2, 1B + vmovd xmm9, dword [r12+12A0H] ; 03E5 _ C4 41 79: 6E. 8C 24, 000012A0 + vpor xmm8, xmm11, xmm5 ; 03EF _ C5 21: EB. C5 + vmovd xmm11, dword [r12+12A4H] ; 03F3 _ C4 41 79: 6E. 9C 24, 000012A4 + vpshufd xmm4, xmm9, 0 ; 03FD _ C4 C1 79: 70. E1, 00 + vpshufd xmm10, xmm11, 0 ; 0403 _ C4 41 79: 70. D3, 00 + vpxor xmm8, xmm8, xmm4 ; 0409 _ C5 39: EF. C4 + vmovd xmm4, dword [r12+12A8H] ; 040D _ C4 C1 79: 6E. A4 24, 000012A8 + vpxor xmm9, xmm12, xmm10 ; 0417 _ C4 41 19: EF. CA + vpslld xmm12, xmm14, 22 ; 041C _ C4 C1 19: 72. F6, 16 + vpsrld xmm14, xmm14, 10 ; 0422 _ C4 C1 09: 72. D6, 0A + vmovd xmm5, dword [r12+12ACH] ; 0428 _ C4 C1 79: 6E. AC 24, 000012AC + vpor xmm11, xmm12, xmm14 ; 0432 _ C4 41 19: EB. DE + vpshufd xmm10, xmm4, 0 ; 0437 _ C5 79: 70. D4, 00 + vpshufd xmm14, xmm5, 0 ; 043C _ C5 79: 70. F5, 00 + vpxor xmm12, xmm11, xmm10 ; 0441 _ C4 41 21: EF. E2 + vpxor xmm11, xmm13, xmm14 ; 0446 _ C4 41 11: EF. DE + vpand xmm13, xmm8, xmm12 ; 044B _ C4 41 39: DB. EC + vpxor xmm5, xmm13, xmm11 ; 0450 _ C4 C1 11: EF. EB + vpxor xmm4, xmm12, xmm9 ; 0455 _ C4 C1 19: EF. E1 + vpxor xmm10, xmm4, xmm5 ; 045A _ C5 59: EF. D5 + vpor xmm14, xmm11, xmm8 ; 045E _ C4 41 21: EB. F0 + vpxor xmm13, xmm14, xmm9 ; 0463 _ C4 41 09: EF. E9 + vpxor xmm8, xmm8, xmm10 ; 0468 _ C4 41 39: EF. C2 + vpor xmm9, xmm13, xmm8 ; 046D _ C4 41 11: EB. C8 + vpxor xmm11, xmm9, xmm5 ; 0472 _ C5 31: EF. DD + vpand xmm5, xmm5, xmm13 ; 0476 _ C4 C1 51: DB. ED + vpxor xmm4, xmm8, xmm5 ; 047B _ C5 B9: EF. E5 + vpxor xmm5, xmm13, xmm11 ; 047F _ C4 C1 11: EF. EB + vpxor xmm8, xmm5, xmm4 ; 0484 _ C5 51: EF. C4 + vpslld xmm9, xmm10, 13 ; 0488 _ C4 C1 31: 72. F2, 0D + vpsrld xmm10, xmm10, 19 ; 048E _ C4 C1 29: 72. D2, 13 + vpsrld xmm5, xmm8, 29 ; 0494 _ C4 C1 51: 72. D0, 1D + vpor xmm12, xmm9, xmm10 ; 049A _ C4 41 31: EB. E2 + vpslld xmm10, xmm8, 3 ; 049F _ C4 C1 29: 72. F0, 03 + vpor xmm13, xmm10, xmm5 ; 04A5 _ C5 29: EB. ED + vpxor xmm11, xmm11, xmm12 ; 04A9 _ C4 41 21: EF. DC + vpxor xmm4, xmm4, xmm6 ; 04AE _ C5 D9: EF. E6 + vpxor xmm11, xmm11, xmm13 ; 04B2 _ C4 41 21: EF. DD + vpxor xmm14, xmm4, xmm13 ; 04B7 _ C4 41 59: EF. F5 + vpslld xmm4, xmm12, 3 ; 04BC _ C4 C1 59: 72. F4, 03 + vpxor xmm9, xmm14, xmm4 ; 04C2 _ C5 09: EF. CC + vpslld xmm10, xmm11, 1 ; 04C6 _ C4 C1 29: 72. F3, 01 + vpsrld xmm5, xmm11, 31 ; 04CC _ C4 C1 51: 72. D3, 1F + vpslld xmm8, xmm9, 7 ; 04D2 _ C4 C1 39: 72. F1, 07 + vpor xmm14, xmm10, xmm5 ; 04D8 _ C5 29: EB. F5 + vpsrld xmm4, xmm9, 25 ; 04DC _ C4 C1 59: 72. D1, 19 + vpor xmm8, xmm8, xmm4 ; 04E2 _ C5 39: EB. C4 + vpxor xmm12, xmm12, xmm14 ; 04E6 _ C4 41 19: EF. E6 + vpxor xmm11, xmm12, xmm8 ; 04EB _ C4 41 19: EF. D8 + vpxor xmm13, xmm13, xmm8 ; 04F0 _ C4 41 11: EF. E8 + vpslld xmm4, xmm14, 7 ; 04F5 _ C4 C1 59: 72. F6, 07 + vpslld xmm5, xmm11, 5 ; 04FB _ C4 C1 51: 72. F3, 05 + vpsrld xmm9, xmm11, 27 ; 0501 _ C4 C1 31: 72. D3, 1B + vpxor xmm10, xmm13, xmm4 ; 0507 _ C5 11: EF. D4 + vmovd xmm11, dword [r12+12B4H] ; 050B _ C4 41 79: 6E. 9C 24, 000012B4 + vpor xmm12, xmm5, xmm9 ; 0515 _ C4 41 51: EB. E1 + vpshufd xmm5, xmm11, 0 ; 051A _ C4 C1 79: 70. EB, 00 + vmovd xmm13, dword [r12+12B0H] ; 0520 _ C4 41 79: 6E. AC 24, 000012B0 + vpxor xmm11, xmm14, xmm5 ; 052A _ C5 09: EF. DD + vpslld xmm14, xmm10, 22 ; 052E _ C4 C1 09: 72. F2, 16 + vpsrld xmm10, xmm10, 10 ; 0534 _ C4 C1 29: 72. D2, 0A + vpshufd xmm4, xmm13, 0 ; 053A _ C4 C1 79: 70. E5, 00 + vpor xmm10, xmm14, xmm10 ; 0540 _ C4 41 09: EB. D2 + vmovd xmm14, dword [r12+12BCH] ; 0545 _ C4 41 79: 6E. B4 24, 000012BC + vpxor xmm9, xmm12, xmm4 ; 054F _ C5 19: EF. CC + vmovd xmm4, dword [r12+12B8H] ; 0553 _ C4 C1 79: 6E. A4 24, 000012B8 + vpand xmm12, xmm11, xmm9 ; 055D _ C4 41 21: DB. E1 + vpshufd xmm13, xmm14, 0 ; 0562 _ C4 41 79: 70. EE, 00 + vpshufd xmm5, xmm4, 0 ; 0568 _ C5 F9: 70. EC, 00 + vpxor xmm8, xmm8, xmm13 ; 056D _ C4 41 39: EF. C5 + vpxor xmm4, xmm10, xmm5 ; 0572 _ C5 A9: EF. E5 + vpor xmm5, xmm9, xmm8 ; 0576 _ C4 C1 31: EB. E8 + vpxor xmm8, xmm8, xmm11 ; 057B _ C4 41 39: EF. C3 + vpxor xmm10, xmm4, xmm8 ; 0580 _ C4 41 59: EF. D0 + vpxor xmm4, xmm9, xmm4 ; 0585 _ C5 B1: EF. E4 + vpor xmm11, xmm4, xmm12 ; 0589 _ C4 41 59: EB. DC + vpand xmm9, xmm8, xmm5 ; 058E _ C5 39: DB. CD + vpxor xmm4, xmm9, xmm11 ; 0592 _ C4 C1 31: EF. E3 + vpxor xmm5, xmm5, xmm12 ; 0597 _ C4 C1 51: EF. EC + vpxor xmm14, xmm12, xmm4 ; 059C _ C5 19: EF. F4 + vpxor xmm12, xmm5, xmm4 ; 05A0 _ C5 51: EF. E4 + vpor xmm13, xmm14, xmm5 ; 05A4 _ C5 09: EB. ED + vpand xmm11, xmm11, xmm5 ; 05A8 _ C5 21: DB. DD + vpxor xmm13, xmm13, xmm10 ; 05AC _ C4 41 11: EF. EA + vpxor xmm10, xmm11, xmm10 ; 05B1 _ C4 41 21: EF. D2 + vpor xmm9, xmm13, xmm4 ; 05B6 _ C5 11: EB. CC + vpxor xmm8, xmm12, xmm9 ; 05BA _ C4 41 19: EF. C1 + vpslld xmm9, xmm4, 3 ; 05BF _ C5 B1: 72. F4, 03 + vpslld xmm14, xmm8, 13 ; 05C4 _ C4 C1 09: 72. F0, 0D + vpsrld xmm12, xmm8, 19 ; 05CA _ C4 C1 19: 72. D0, 13 + vpor xmm12, xmm14, xmm12 ; 05D0 _ C4 41 09: EB. E4 + vpsrld xmm4, xmm4, 29 ; 05D5 _ C5 D9: 72. D4, 1D + vpor xmm14, xmm9, xmm4 ; 05DA _ C5 31: EB. F4 + vpxor xmm13, xmm13, xmm12 ; 05DE _ C4 41 11: EF. EC + vpxor xmm9, xmm13, xmm14 ; 05E3 _ C4 41 11: EF. CE + vpxor xmm5, xmm10, xmm14 ; 05E8 _ C4 C1 29: EF. EE + vpslld xmm4, xmm12, 3 ; 05ED _ C4 C1 59: 72. F4, 03 + vpslld xmm11, xmm9, 1 ; 05F3 _ C4 C1 21: 72. F1, 01 + vpxor xmm8, xmm5, xmm4 ; 05F9 _ C5 51: EF. C4 + vpsrld xmm10, xmm9, 31 ; 05FD _ C4 C1 29: 72. D1, 1F + vpor xmm5, xmm11, xmm10 ; 0603 _ C4 C1 21: EB. EA + vpslld xmm9, xmm8, 7 ; 0608 _ C4 C1 31: 72. F0, 07 + vpsrld xmm13, xmm8, 25 ; 060E _ C4 C1 11: 72. D0, 19 + vpxor xmm12, xmm12, xmm5 ; 0614 _ C5 19: EF. E5 + vpor xmm10, xmm9, xmm13 ; 0618 _ C4 41 31: EB. D5 + vpslld xmm4, xmm5, 7 ; 061D _ C5 D9: 72. F5, 07 + vpxor xmm9, xmm12, xmm10 ; 0622 _ C4 41 19: EF. CA + vpxor xmm14, xmm14, xmm10 ; 0627 _ C4 41 09: EF. F2 + vpxor xmm11, xmm14, xmm4 ; 062C _ C5 09: EF. DC + vpslld xmm8, xmm9, 5 ; 0630 _ C4 C1 39: 72. F1, 05 + vpsrld xmm14, xmm9, 27 ; 0636 _ C4 C1 09: 72. D1, 1B + vmovd xmm9, dword [r12+12CCH] ; 063C _ C4 41 79: 6E. 8C 24, 000012CC + vpor xmm12, xmm8, xmm14 ; 0646 _ C4 41 39: EB. E6 + vmovd xmm13, dword [r12+12C0H] ; 064B _ C4 41 79: 6E. AC 24, 000012C0 + vmovd xmm14, dword [r12+12C4H] ; 0655 _ C4 41 79: 6E. B4 24, 000012C4 + vpshufd xmm8, xmm9, 0 ; 065F _ C4 41 79: 70. C1, 00 + vpshufd xmm4, xmm13, 0 ; 0665 _ C4 C1 79: 70. E5, 00 + vpxor xmm9, xmm10, xmm8 ; 066B _ C4 41 29: EF. C8 + vpshufd xmm10, xmm14, 0 ; 0670 _ C4 41 79: 70. D6, 00 + vpxor xmm13, xmm12, xmm4 ; 0676 _ C5 19: EF. EC + vmovd xmm4, dword [r12+12C8H] ; 067A _ C4 C1 79: 6E. A4 24, 000012C8 + vpxor xmm5, xmm5, xmm10 ; 0684 _ C4 C1 51: EF. EA + vpslld xmm10, xmm11, 22 ; 0689 _ C4 C1 29: 72. F3, 16 + vpsrld xmm11, xmm11, 10 ; 068F _ C4 C1 21: 72. D3, 0A + vpshufd xmm4, xmm4, 0 ; 0695 _ C5 F9: 70. E4, 00 + vpor xmm11, xmm10, xmm11 ; 069A _ C4 41 29: EB. DB + vpxor xmm14, xmm9, xmm6 ; 069F _ C5 31: EF. F6 + vpxor xmm10, xmm11, xmm4 ; 06A3 _ C5 21: EF. D4 + vpxor xmm8, xmm5, xmm9 ; 06A7 _ C4 41 51: EF. C1 + vpxor xmm4, xmm10, xmm14 ; 06AC _ C4 C1 29: EF. E6 + vpxor xmm10, xmm14, xmm13 ; 06B1 _ C4 41 09: EF. D5 + vpand xmm5, xmm8, xmm10 ; 06B6 _ C4 C1 39: DB. EA + vpxor xmm12, xmm8, xmm10 ; 06BB _ C4 41 39: EF. E2 + vpxor xmm14, xmm5, xmm4 ; 06C0 _ C5 51: EF. F4 + vpxor xmm13, xmm13, xmm12 ; 06C4 _ C4 41 11: EF. EC + vpand xmm5, xmm13, xmm14 ; 06C9 _ C4 C1 11: DB. EE + vpand xmm8, xmm4, xmm12 ; 06CE _ C4 41 59: DB. C4 + vpxor xmm11, xmm10, xmm5 ; 06D3 _ C5 29: EF. DD + vpxor xmm9, xmm8, xmm13 ; 06D7 _ C4 41 39: EF. CD + vpor xmm4, xmm5, xmm11 ; 06DC _ C4 C1 51: EB. E3 + vpor xmm12, xmm12, xmm14 ; 06E1 _ C4 41 19: EB. E6 + vpxor xmm10, xmm4, xmm9 ; 06E6 _ C4 41 59: EF. D1 + vpslld xmm8, xmm14, 13 ; 06EB _ C4 C1 39: 72. F6, 0D + vpxor xmm4, xmm10, xmm6 ; 06F1 _ C5 A9: EF. E6 + vpsrld xmm13, xmm14, 19 ; 06F5 _ C4 C1 11: 72. D6, 13 + vpslld xmm10, xmm4, 3 ; 06FB _ C5 A9: 72. F4, 03 + vpsrld xmm4, xmm4, 29 ; 0700 _ C5 D9: 72. D4, 1D + vpor xmm10, xmm10, xmm4 ; 0705 _ C5 29: EB. D4 + vpxor xmm14, xmm12, xmm5 ; 0709 _ C5 19: EF. F5 + vpand xmm4, xmm9, xmm11 ; 070D _ C4 C1 31: DB. E3 + vpor xmm13, xmm8, xmm13 ; 0712 _ C4 41 39: EB. ED + vpxor xmm5, xmm14, xmm4 ; 0717 _ C5 89: EF. EC + vpxor xmm11, xmm11, xmm10 ; 071B _ C4 41 21: EF. DA + vpxor xmm9, xmm5, xmm13 ; 0720 _ C4 41 51: EF. CD + vpslld xmm8, xmm13, 3 ; 0725 _ C4 C1 39: 72. F5, 03 + vpxor xmm14, xmm9, xmm10 ; 072B _ C4 41 31: EF. F2 + vpxor xmm5, xmm11, xmm8 ; 0730 _ C4 C1 21: EF. E8 + vpslld xmm12, xmm14, 1 ; 0735 _ C4 C1 19: 72. F6, 01 + vpsrld xmm4, xmm14, 31 ; 073B _ C4 C1 59: 72. D6, 1F + vpor xmm11, xmm12, xmm4 ; 0741 _ C5 19: EB. DC + vpslld xmm9, xmm5, 7 ; 0745 _ C5 B1: 72. F5, 07 + vpsrld xmm8, xmm5, 25 ; 074A _ C5 B9: 72. D5, 19 + vpxor xmm13, xmm13, xmm11 ; 074F _ C4 41 11: EF. EB + vpor xmm12, xmm9, xmm8 ; 0754 _ C4 41 31: EB. E0 + vpslld xmm4, xmm11, 7 ; 0759 _ C4 C1 59: 72. F3, 07 + vpxor xmm8, xmm13, xmm12 ; 075F _ C4 41 11: EF. C4 + vpxor xmm10, xmm10, xmm12 ; 0764 _ C4 41 29: EF. D4 + vmovd xmm13, dword [r12+12DCH] ; 0769 _ C4 41 79: 6E. AC 24, 000012DC + vpxor xmm14, xmm10, xmm4 ; 0773 _ C5 29: EF. F4 + vpshufd xmm4, xmm13, 0 ; 0777 _ C4 C1 79: 70. E5, 00 + vmovd xmm5, dword [r12+12D0H] ; 077D _ C4 C1 79: 6E. AC 24, 000012D0 + vpxor xmm4, xmm12, xmm4 ; 0787 _ C5 99: EF. E4 + vmovd xmm10, dword [r12+12D4H] ; 078B _ C4 41 79: 6E. 94 24, 000012D4 + vpslld xmm12, xmm8, 5 ; 0795 _ C4 C1 19: 72. F0, 05 + vpsrld xmm8, xmm8, 27 ; 079B _ C4 C1 39: 72. D0, 1B + vpshufd xmm9, xmm10, 0 ; 07A1 _ C4 41 79: 70. CA, 00 + vpor xmm10, xmm12, xmm8 ; 07A7 _ C4 41 19: EB. D0 + vpshufd xmm5, xmm5, 0 ; 07AC _ C5 F9: 70. ED, 00 + vpxor xmm9, xmm11, xmm9 ; 07B1 _ C4 41 21: EF. C9 + vpxor xmm10, xmm10, xmm5 ; 07B6 _ C5 29: EF. D5 + vpxor xmm13, xmm9, xmm4 ; 07BA _ C5 31: EF. EC + vmovd xmm11, dword [r12+12D8H] ; 07BE _ C4 41 79: 6E. 9C 24, 000012D8 + vpxor xmm5, xmm10, xmm9 ; 07C8 _ C4 C1 29: EF. E9 + vpxor xmm10, xmm4, xmm6 ; 07CD _ C5 59: EF. D6 + vpslld xmm4, xmm14, 22 ; 07D1 _ C4 C1 59: 72. F6, 16 + vpsrld xmm14, xmm14, 10 ; 07D7 _ C4 C1 09: 72. D6, 0A + vpshufd xmm11, xmm11, 0 ; 07DD _ C4 41 79: 70. DB, 00 + vpor xmm4, xmm4, xmm14 ; 07E3 _ C4 C1 59: EB. E6 + vpxor xmm4, xmm4, xmm11 ; 07E8 _ C4 C1 59: EF. E3 + vpand xmm11, xmm13, xmm5 ; 07ED _ C5 11: DB. DD + vpxor xmm9, xmm4, xmm10 ; 07F1 _ C4 41 59: EF. CA + vpxor xmm4, xmm11, xmm9 ; 07F6 _ C4 C1 21: EF. E1 + vpor xmm9, xmm9, xmm13 ; 07FB _ C4 41 31: EB. CD + vpand xmm8, xmm10, xmm4 ; 0800 _ C5 29: DB. C4 + vpxor xmm10, xmm13, xmm10 ; 0804 _ C4 41 11: EF. D2 + vpxor xmm8, xmm8, xmm5 ; 0809 _ C5 39: EF. C5 + vpxor xmm14, xmm10, xmm4 ; 080D _ C5 29: EF. F4 + vpxor xmm14, xmm14, xmm9 ; 0811 _ C4 41 09: EF. F1 + vpand xmm12, xmm5, xmm8 ; 0816 _ C4 41 51: DB. E0 + vpxor xmm11, xmm12, xmm14 ; 081B _ C4 41 19: EF. DE + vpslld xmm13, xmm4, 13 ; 0820 _ C5 91: 72. F4, 0D + vpsrld xmm4, xmm4, 19 ; 0825 _ C5 D9: 72. D4, 13 + vpslld xmm10, xmm11, 3 ; 082A _ C4 C1 29: 72. F3, 03 + vpor xmm4, xmm13, xmm4 ; 0830 _ C5 91: EB. E4 + vpsrld xmm11, xmm11, 29 ; 0834 _ C4 C1 21: 72. D3, 1D + vpxor xmm5, xmm9, xmm5 ; 083A _ C5 B1: EF. ED + vpor xmm11, xmm10, xmm11 ; 083E _ C4 41 29: EB. DB + vpxor xmm12, xmm8, xmm4 ; 0843 _ C5 39: EF. E4 + vpor xmm8, xmm14, xmm8 ; 0847 _ C4 41 09: EB. C0 + vpxor xmm5, xmm5, xmm6 ; 084C _ C5 D1: EF. EE + vpxor xmm10, xmm12, xmm11 ; 0850 _ C4 41 19: EF. D3 + vpxor xmm9, xmm8, xmm5 ; 0855 _ C5 39: EF. CD + vpslld xmm13, xmm4, 3 ; 0859 _ C5 91: 72. F4, 03 + vpxor xmm12, xmm9, xmm11 ; 085E _ C4 41 31: EF. E3 + vpslld xmm14, xmm10, 1 ; 0863 _ C4 C1 09: 72. F2, 01 + vpsrld xmm10, xmm10, 31 ; 0869 _ C4 C1 29: 72. D2, 1F + vpxor xmm8, xmm12, xmm13 ; 086F _ C4 41 19: EF. C5 + vpor xmm5, xmm14, xmm10 ; 0874 _ C4 C1 09: EB. EA + vpslld xmm13, xmm8, 7 ; 0879 _ C4 C1 11: 72. F0, 07 + vpsrld xmm10, xmm8, 25 ; 087F _ C4 C1 29: 72. D0, 19 + vpor xmm8, xmm13, xmm10 ; 0885 _ C4 41 11: EB. C2 + vpxor xmm4, xmm4, xmm5 ; 088A _ C5 D9: EF. E5 + vpxor xmm4, xmm4, xmm8 ; 088E _ C4 C1 59: EF. E0 + vpxor xmm11, xmm11, xmm8 ; 0893 _ C4 41 21: EF. D8 + vmovd xmm10, dword [r12+12E0H] ; 0898 _ C4 41 79: 6E. 94 24, 000012E0 + vpslld xmm9, xmm5, 7 ; 08A2 _ C5 B1: 72. F5, 07 + vpslld xmm14, xmm4, 5 ; 08A7 _ C5 89: 72. F4, 05 + vpsrld xmm13, xmm4, 27 ; 08AC _ C5 91: 72. D4, 1B + vpxor xmm12, xmm11, xmm9 ; 08B1 _ C4 41 21: EF. E1 + vpor xmm11, xmm14, xmm13 ; 08B6 _ C4 41 09: EB. DD + vpshufd xmm9, xmm10, 0 ; 08BB _ C4 41 79: 70. CA, 00 + vmovd xmm10, dword [r12+12E8H] ; 08C1 _ C4 41 79: 6E. 94 24, 000012E8 + vpxor xmm14, xmm11, xmm9 ; 08CB _ C4 41 21: EF. F1 + vmovd xmm13, dword [r12+12ECH] ; 08D0 _ C4 41 79: 6E. AC 24, 000012EC + vpslld xmm9, xmm12, 22 ; 08DA _ C4 C1 31: 72. F4, 16 + vpsrld xmm12, xmm12, 10 ; 08E0 _ C4 C1 19: 72. D4, 0A + vpshufd xmm11, xmm13, 0 ; 08E6 _ C4 41 79: 70. DD, 00 + vpor xmm12, xmm9, xmm12 ; 08EC _ C4 41 31: EB. E4 + vpshufd xmm13, xmm10, 0 ; 08F1 _ C4 41 79: 70. EA, 00 + vpxor xmm8, xmm8, xmm11 ; 08F7 _ C4 41 39: EF. C3 + vmovd xmm4, dword [r12+12E4H] ; 08FC _ C4 C1 79: 6E. A4 24, 000012E4 + vpxor xmm10, xmm12, xmm13 ; 0906 _ C4 41 19: EF. D5 + vpshufd xmm4, xmm4, 0 ; 090B _ C5 F9: 70. E4, 00 + vpxor xmm11, xmm10, xmm6 ; 0910 _ C5 29: EF. DE + vpxor xmm9, xmm14, xmm8 ; 0914 _ C4 41 09: EF. C8 + vpand xmm14, xmm8, xmm14 ; 0919 _ C4 41 39: DB. F6 + vpxor xmm10, xmm14, xmm11 ; 091E _ C4 41 09: EF. D3 + vpxor xmm5, xmm5, xmm4 ; 0923 _ C5 D1: EF. EC + vpxor xmm5, xmm5, xmm10 ; 0927 _ C4 C1 51: EF. EA + vpor xmm11, xmm11, xmm8 ; 092C _ C4 41 21: EB. D8 + vpor xmm13, xmm9, xmm5 ; 0931 _ C5 31: EB. ED + vpxor xmm9, xmm11, xmm9 ; 0935 _ C4 41 21: EF. C9 + vpxor xmm9, xmm9, xmm5 ; 093A _ C5 31: EF. CD + vpor xmm4, xmm13, xmm10 ; 093E _ C4 C1 11: EB. E2 + vpxor xmm8, xmm8, xmm13 ; 0943 _ C4 41 39: EF. C5 + vpxor xmm12, xmm4, xmm9 ; 0948 _ C4 41 59: EF. E1 + vpxor xmm4, xmm8, xmm10 ; 094D _ C4 C1 39: EF. E2 + vpslld xmm8, xmm12, 13 ; 0952 _ C4 C1 39: 72. F4, 0D + vpxor xmm4, xmm4, xmm12 ; 0958 _ C4 C1 59: EF. E4 + vpsrld xmm14, xmm12, 19 ; 095D _ C4 C1 09: 72. D4, 13 + vpor xmm14, xmm8, xmm14 ; 0963 _ C4 41 39: EB. F6 + vpslld xmm12, xmm4, 3 ; 0968 _ C5 99: 72. F4, 03 + vpsrld xmm13, xmm4, 29 ; 096D _ C5 91: 72. D4, 1D + vpxor xmm5, xmm5, xmm14 ; 0972 _ C4 C1 51: EF. EE + vpor xmm8, xmm12, xmm13 ; 0977 _ C4 41 19: EB. C5 + vpxor xmm10, xmm10, xmm6 ; 097C _ C5 29: EF. D6 + vpxor xmm12, xmm5, xmm8 ; 0980 _ C4 41 51: EF. E0 + vpand xmm5, xmm9, xmm4 ; 0985 _ C5 B1: DB. EC + vpxor xmm4, xmm10, xmm5 ; 0989 _ C5 A9: EF. E5 + vpslld xmm10, xmm14, 3 ; 098D _ C4 C1 29: 72. F6, 03 + vpxor xmm13, xmm4, xmm8 ; 0993 _ C4 41 59: EF. E8 + vpslld xmm11, xmm12, 1 ; 0998 _ C4 C1 21: 72. F4, 01 + vpxor xmm9, xmm13, xmm10 ; 099E _ C4 41 11: EF. CA + vpsrld xmm12, xmm12, 31 ; 09A3 _ C4 C1 19: 72. D4, 1F + vpor xmm4, xmm11, xmm12 ; 09A9 _ C4 C1 21: EB. E4 + vpslld xmm5, xmm9, 7 ; 09AE _ C4 C1 51: 72. F1, 07 + vpsrld xmm12, xmm9, 25 ; 09B4 _ C4 C1 19: 72. D1, 19 + vpxor xmm14, xmm14, xmm4 ; 09BA _ C5 09: EF. F4 + vpor xmm5, xmm5, xmm12 ; 09BE _ C4 C1 51: EB. EC + vpslld xmm13, xmm4, 7 ; 09C3 _ C5 91: 72. F4, 07 + vpxor xmm10, xmm14, xmm5 ; 09C8 _ C5 09: EF. D5 + vpxor xmm8, xmm8, xmm5 ; 09CC _ C5 39: EF. C5 + vpslld xmm11, xmm10, 5 ; 09D0 _ C4 C1 21: 72. F2, 05 + vpsrld xmm9, xmm10, 27 ; 09D6 _ C4 C1 31: 72. D2, 1B + vmovd xmm10, dword [r12+12F4H] ; 09DC _ C4 41 79: 6E. 94 24, 000012F4 + vpxor xmm13, xmm8, xmm13 ; 09E6 _ C4 41 39: EF. ED + vmovd xmm8, dword [r12+12F0H] ; 09EB _ C4 41 79: 6E. 84 24, 000012F0 + vpor xmm14, xmm11, xmm9 ; 09F5 _ C4 41 21: EB. F1 + vpshufd xmm11, xmm10, 0 ; 09FA _ C4 41 79: 70. DA, 00 + vpshufd xmm12, xmm8, 0 ; 0A00 _ C4 41 79: 70. E0, 00 + vpxor xmm9, xmm4, xmm11 ; 0A06 _ C4 41 59: EF. CB + vpslld xmm4, xmm13, 22 ; 0A0B _ C4 C1 59: 72. F5, 16 + vpsrld xmm8, xmm13, 10 ; 0A11 _ C4 C1 39: 72. D5, 0A + vpxor xmm12, xmm14, xmm12 ; 0A17 _ C4 41 09: EF. E4 + vpor xmm14, xmm4, xmm8 ; 0A1C _ C4 41 59: EB. F0 + vmovd xmm4, dword [r12+12F8H] ; 0A21 _ C4 C1 79: 6E. A4 24, 000012F8 + vmovd xmm11, dword [r12+12FCH] ; 0A2B _ C4 41 79: 6E. 9C 24, 000012FC + vpshufd xmm13, xmm4, 0 ; 0A35 _ C5 79: 70. EC, 00 + vpshufd xmm4, xmm11, 0 ; 0A3A _ C4 C1 79: 70. E3, 00 + vpxor xmm10, xmm14, xmm13 ; 0A40 _ C4 41 09: EF. D5 + vpxor xmm14, xmm5, xmm4 ; 0A45 _ C5 51: EF. F4 + vpor xmm5, xmm9, xmm10 ; 0A49 _ C4 C1 31: EB. EA + vpxor xmm13, xmm5, xmm14 ; 0A4E _ C4 41 51: EF. EE + vpxor xmm9, xmm9, xmm10 ; 0A53 _ C4 41 31: EF. CA + vpxor xmm4, xmm10, xmm13 ; 0A58 _ C4 C1 29: EF. E5 + vpxor xmm8, xmm9, xmm4 ; 0A5D _ C5 31: EF. C4 + vpor xmm9, xmm14, xmm9 ; 0A61 _ C4 41 09: EB. C9 + vpor xmm5, xmm12, xmm8 ; 0A66 _ C4 C1 19: EB. E8 + vpor xmm11, xmm13, xmm8 ; 0A6B _ C4 41 11: EB. D8 + vpxor xmm10, xmm5, xmm4 ; 0A70 _ C5 51: EF. D4 + vpxor xmm5, xmm11, xmm12 ; 0A74 _ C4 C1 21: EF. EC + vpxor xmm5, xmm5, xmm8 ; 0A79 _ C4 C1 51: EF. E8 + vpand xmm12, xmm9, xmm12 ; 0A7E _ C4 41 31: DB. E4 + vpxor xmm4, xmm4, xmm5 ; 0A83 _ C5 D9: EF. E5 + vpand xmm11, xmm5, xmm10 ; 0A87 _ C4 41 51: DB. DA + vpxor xmm5, xmm4, xmm6 ; 0A8C _ C5 D9: EF. EE + vpxor xmm11, xmm11, xmm8 ; 0A90 _ C4 41 21: EF. D8 + vpor xmm4, xmm5, xmm10 ; 0A95 _ C4 C1 51: EB. E2 + vpxor xmm8, xmm8, xmm4 ; 0A9A _ C5 39: EF. C4 + vpslld xmm5, xmm8, 13 ; 0A9E _ C4 C1 51: 72. F0, 0D + vpsrld xmm4, xmm8, 19 ; 0AA4 _ C4 C1 59: 72. D0, 13 + vpor xmm5, xmm5, xmm4 ; 0AAA _ C5 D1: EB. EC + vpslld xmm8, xmm11, 3 ; 0AAE _ C4 C1 39: 72. F3, 03 + vpsrld xmm11, xmm11, 29 ; 0AB4 _ C4 C1 21: 72. D3, 1D + vpxor xmm4, xmm12, xmm13 ; 0ABA _ C4 C1 19: EF. E5 + vpor xmm11, xmm8, xmm11 ; 0ABF _ C4 41 39: EB. DB + vpxor xmm8, xmm4, xmm5 ; 0AC4 _ C5 59: EF. C5 + vpxor xmm14, xmm8, xmm11 ; 0AC8 _ C4 41 39: EF. F3 + vpxor xmm10, xmm10, xmm11 ; 0ACD _ C4 41 29: EF. D3 + vpslld xmm4, xmm5, 3 ; 0AD2 _ C5 D9: 72. F5, 03 + vpslld xmm8, xmm14, 1 ; 0AD7 _ C4 C1 39: 72. F6, 01 + vpxor xmm13, xmm10, xmm4 ; 0ADD _ C5 29: EF. EC + vpsrld xmm12, xmm14, 31 ; 0AE1 _ C4 C1 19: 72. D6, 1F + vpor xmm14, xmm8, xmm12 ; 0AE7 _ C4 41 39: EB. F4 + vpslld xmm10, xmm13, 7 ; 0AEC _ C4 C1 29: 72. F5, 07 + vpsrld xmm9, xmm13, 25 ; 0AF2 _ C4 C1 31: 72. D5, 19 + vpxor xmm5, xmm5, xmm14 ; 0AF8 _ C4 C1 51: EF. EE + vpor xmm8, xmm10, xmm9 ; 0AFD _ C4 41 29: EB. C1 + vpxor xmm12, xmm5, xmm8 ; 0B02 _ C4 41 51: EF. E0 + vpxor xmm11, xmm11, xmm8 ; 0B07 _ C4 41 21: EF. D8 + vpslld xmm5, xmm14, 7 ; 0B0C _ C4 C1 51: 72. F6, 07 + vpslld xmm13, xmm12, 5 ; 0B12 _ C4 C1 11: 72. F4, 05 + vpsrld xmm10, xmm12, 27 ; 0B18 _ C4 C1 29: 72. D4, 1B + vpxor xmm4, xmm11, xmm5 ; 0B1E _ C5 A1: EF. E5 + vmovd xmm12, dword [r12+1304H] ; 0B22 _ C4 41 79: 6E. A4 24, 00001304 + vpor xmm9, xmm13, xmm10 ; 0B2C _ C4 41 11: EB. CA + vmovd xmm11, dword [r12+1300H] ; 0B31 _ C4 41 79: 6E. 9C 24, 00001300 + vmovd xmm10, dword [r12+130CH] ; 0B3B _ C4 41 79: 6E. 94 24, 0000130C + vpshufd xmm13, xmm12, 0 ; 0B45 _ C4 41 79: 70. EC, 00 + vpshufd xmm5, xmm11, 0 ; 0B4B _ C4 C1 79: 70. EB, 00 + vpxor xmm13, xmm14, xmm13 ; 0B51 _ C4 41 09: EF. ED + vpshufd xmm11, xmm10, 0 ; 0B56 _ C4 41 79: 70. DA, 00 + vpslld xmm14, xmm4, 22 ; 0B5C _ C5 89: 72. F4, 16 + vpsrld xmm4, xmm4, 10 ; 0B61 _ C5 D9: 72. D4, 0A + vpxor xmm5, xmm9, xmm5 ; 0B66 _ C5 B1: EF. ED + vpor xmm14, xmm14, xmm4 ; 0B6A _ C5 09: EB. F4 + vpxor xmm8, xmm8, xmm11 ; 0B6E _ C4 41 39: EF. C3 + vmovd xmm4, dword [r12+1308H] ; 0B73 _ C4 C1 79: 6E. A4 24, 00001308 + vpxor xmm10, xmm8, xmm5 ; 0B7D _ C5 39: EF. D5 + vpshufd xmm12, xmm4, 0 ; 0B81 _ C5 79: 70. E4, 00 + vpand xmm4, xmm13, xmm10 ; 0B86 _ C4 C1 11: DB. E2 + vpxor xmm9, xmm14, xmm12 ; 0B8B _ C4 41 09: EF. CC + vpxor xmm11, xmm4, xmm5 ; 0B90 _ C5 59: EF. DD + vpxor xmm8, xmm13, xmm9 ; 0B94 _ C4 41 11: EF. C1 + vpor xmm5, xmm5, xmm10 ; 0B99 _ C4 C1 51: EB. EA + vpxor xmm13, xmm5, xmm8 ; 0B9E _ C4 41 51: EF. E8 + vpxor xmm5, xmm8, xmm10 ; 0BA3 _ C4 C1 39: EF. EA + vpxor xmm12, xmm10, xmm9 ; 0BA8 _ C4 41 29: EF. E1 + vpxor xmm10, xmm5, xmm6 ; 0BAD _ C5 51: EF. D6 + vpor xmm9, xmm9, xmm11 ; 0BB1 _ C4 41 31: EB. CB + vpor xmm8, xmm10, xmm11 ; 0BB6 _ C4 41 29: EB. C3 + vpxor xmm4, xmm11, xmm12 ; 0BBB _ C4 C1 21: EF. E4 + vpxor xmm14, xmm9, xmm5 ; 0BC0 _ C5 31: EF. F5 + vpor xmm5, xmm12, xmm13 ; 0BC4 _ C4 C1 19: EB. ED + vpxor xmm12, xmm4, xmm8 ; 0BC9 _ C4 41 59: EF. E0 + vpxor xmm11, xmm12, xmm5 ; 0BCE _ C5 19: EF. DD + vpslld xmm12, xmm14, 3 ; 0BD2 _ C4 C1 19: 72. F6, 03 + vpslld xmm9, xmm11, 13 ; 0BD8 _ C4 C1 31: 72. F3, 0D + vpsrld xmm4, xmm11, 19 ; 0BDE _ C4 C1 59: 72. D3, 13 + vpor xmm4, xmm9, xmm4 ; 0BE4 _ C5 B1: EB. E4 + vpsrld xmm14, xmm14, 29 ; 0BE8 _ C4 C1 09: 72. D6, 1D + vpxor xmm8, xmm8, xmm5 ; 0BEE _ C5 39: EF. C5 + vpor xmm12, xmm12, xmm14 ; 0BF2 _ C4 41 19: EB. E6 + vpxor xmm5, xmm8, xmm4 ; 0BF7 _ C5 B9: EF. EC + vpxor xmm13, xmm13, xmm12 ; 0BFB _ C4 41 11: EF. EC + vpxor xmm8, xmm5, xmm12 ; 0C00 _ C4 41 51: EF. C4 + vpslld xmm5, xmm4, 3 ; 0C05 _ C5 D1: 72. F4, 03 + vpxor xmm10, xmm13, xmm5 ; 0C0A _ C5 11: EF. D5 + vpslld xmm14, xmm8, 1 ; 0C0E _ C4 C1 09: 72. F0, 01 + vpsrld xmm13, xmm8, 31 ; 0C14 _ C4 C1 11: 72. D0, 1F + vpslld xmm11, xmm10, 7 ; 0C1A _ C4 C1 21: 72. F2, 07 + vpor xmm14, xmm14, xmm13 ; 0C20 _ C4 41 09: EB. F5 + vpsrld xmm9, xmm10, 25 ; 0C25 _ C4 C1 31: 72. D2, 19 + vpor xmm5, xmm11, xmm9 ; 0C2B _ C4 C1 21: EB. E9 + vpxor xmm4, xmm4, xmm14 ; 0C30 _ C4 C1 59: EF. E6 + vpxor xmm9, xmm4, xmm5 ; 0C35 _ C5 59: EF. CD + vpxor xmm12, xmm12, xmm5 ; 0C39 _ C5 19: EF. E5 + vpslld xmm4, xmm14, 7 ; 0C3D _ C4 C1 59: 72. F6, 07 + vmovd xmm8, dword [r12+1314H] ; 0C43 _ C4 41 79: 6E. 84 24, 00001314 + vpxor xmm12, xmm12, xmm4 ; 0C4D _ C5 19: EF. E4 + vmovd xmm4, dword [r12+131CH] ; 0C51 _ C4 C1 79: 6E. A4 24, 0000131C + vpshufd xmm13, xmm8, 0 ; 0C5B _ C4 41 79: 70. E8, 00 + vpshufd xmm8, xmm4, 0 ; 0C61 _ C5 79: 70. C4, 00 + vpxor xmm14, xmm14, xmm13 ; 0C66 _ C4 41 09: EF. F5 + vmovd xmm11, dword [r12+1310H] ; 0C6B _ C4 41 79: 6E. 9C 24, 00001310 + vpxor xmm13, xmm5, xmm8 ; 0C75 _ C4 41 51: EF. E8 + vpslld xmm5, xmm9, 5 ; 0C7A _ C4 C1 51: 72. F1, 05 + vpsrld xmm9, xmm9, 27 ; 0C80 _ C4 C1 31: 72. D1, 1B + vmovd xmm10, dword [r12+1318H] ; 0C86 _ C4 41 79: 6E. 94 24, 00001318 + vpor xmm5, xmm5, xmm9 ; 0C90 _ C4 C1 51: EB. E9 + vpshufd xmm11, xmm11, 0 ; 0C95 _ C4 41 79: 70. DB, 00 + vpslld xmm4, xmm12, 22 ; 0C9B _ C4 C1 59: 72. F4, 16 + vpsrld xmm12, xmm12, 10 ; 0CA1 _ C4 C1 19: 72. D4, 0A + vpxor xmm5, xmm5, xmm11 ; 0CA7 _ C4 C1 51: EF. EB + vpshufd xmm10, xmm10, 0 ; 0CAC _ C4 41 79: 70. D2, 00 + vpor xmm11, xmm4, xmm12 ; 0CB2 _ C4 41 59: EB. DC + vpxor xmm8, xmm5, xmm6 ; 0CB7 _ C5 51: EF. C6 + vpxor xmm4, xmm11, xmm10 ; 0CBB _ C4 C1 21: EF. E2 + vpand xmm5, xmm8, xmm14 ; 0CC0 _ C4 C1 39: DB. EE + vpxor xmm12, xmm4, xmm6 ; 0CC5 _ C5 59: EF. E6 + vpxor xmm11, xmm12, xmm5 ; 0CC9 _ C5 19: EF. DD + vpor xmm5, xmm5, xmm13 ; 0CCD _ C4 C1 51: EB. ED + vpxor xmm4, xmm13, xmm11 ; 0CD2 _ C4 C1 11: EF. E3 + vpxor xmm14, xmm14, xmm5 ; 0CD7 _ C5 09: EF. F5 + vpxor xmm12, xmm5, xmm8 ; 0CDB _ C4 41 51: EF. E0 + vpor xmm9, xmm8, xmm14 ; 0CE0 _ C4 41 39: EB. CE + vpxor xmm5, xmm14, xmm4 ; 0CE5 _ C5 89: EF. EC + vpor xmm8, xmm11, xmm12 ; 0CE9 _ C4 41 21: EB. C4 + vpand xmm10, xmm8, xmm9 ; 0CEE _ C4 41 39: DB. D1 + vpxor xmm13, xmm12, xmm5 ; 0CF3 _ C5 19: EF. ED + vpslld xmm8, xmm4, 3 ; 0CF7 _ C5 B9: 72. F4, 03 + vpsrld xmm4, xmm4, 29 ; 0CFC _ C5 D9: 72. D4, 1D + vpslld xmm14, xmm10, 13 ; 0D01 _ C4 C1 09: 72. F2, 0D + vpsrld xmm11, xmm10, 19 ; 0D07 _ C4 C1 21: 72. D2, 13 + vpor xmm12, xmm8, xmm4 ; 0D0D _ C5 39: EB. E4 + vpand xmm4, xmm13, xmm10 ; 0D11 _ C4 C1 11: DB. E2 + vpor xmm11, xmm14, xmm11 ; 0D16 _ C4 41 09: EB. DB + vpxor xmm9, xmm9, xmm4 ; 0D1B _ C5 31: EF. CC + vpand xmm10, xmm5, xmm10 ; 0D1F _ C4 41 51: DB. D2 + vpxor xmm4, xmm9, xmm11 ; 0D24 _ C4 C1 31: EF. E3 + vpxor xmm13, xmm10, xmm13 ; 0D29 _ C4 41 29: EF. ED + vpxor xmm14, xmm4, xmm12 ; 0D2E _ C4 41 59: EF. F4 + vpxor xmm5, xmm13, xmm12 ; 0D33 _ C4 C1 11: EF. EC + vpslld xmm4, xmm11, 3 ; 0D38 _ C4 C1 59: 72. F3, 03 + vpxor xmm13, xmm5, xmm4 ; 0D3E _ C5 51: EF. EC + vpslld xmm8, xmm14, 1 ; 0D42 _ C4 C1 39: 72. F6, 01 + vpsrld xmm5, xmm14, 31 ; 0D48 _ C4 C1 51: 72. D6, 1F + vpslld xmm14, xmm13, 7 ; 0D4E _ C4 C1 09: 72. F5, 07 + vpor xmm8, xmm8, xmm5 ; 0D54 _ C5 39: EB. C5 + vpsrld xmm10, xmm13, 25 ; 0D58 _ C4 C1 29: 72. D5, 19 + vpor xmm4, xmm14, xmm10 ; 0D5E _ C4 C1 09: EB. E2 + vpxor xmm11, xmm11, xmm8 ; 0D63 _ C4 41 21: EF. D8 + vpxor xmm14, xmm11, xmm4 ; 0D68 _ C5 21: EF. F4 + vpxor xmm12, xmm12, xmm4 ; 0D6C _ C5 19: EF. E4 + vpslld xmm9, xmm8, 7 ; 0D70 _ C4 C1 31: 72. F0, 07 + vpsrld xmm13, xmm14, 27 ; 0D76 _ C4 C1 11: 72. D6, 1B + vmovd xmm10, dword [r12+1320H] ; 0D7C _ C4 41 79: 6E. 94 24, 00001320 + vpxor xmm5, xmm12, xmm9 ; 0D86 _ C4 C1 19: EF. E9 + vpslld xmm12, xmm14, 5 ; 0D8B _ C4 C1 19: 72. F6, 05 + vmovd xmm14, dword [r12+1324H] ; 0D91 _ C4 41 79: 6E. B4 24, 00001324 + vpor xmm11, xmm12, xmm13 ; 0D9B _ C4 41 19: EB. DD + vpshufd xmm9, xmm10, 0 ; 0DA0 _ C4 41 79: 70. CA, 00 + vpshufd xmm12, xmm14, 0 ; 0DA6 _ C4 41 79: 70. E6, 00 + vpxor xmm9, xmm11, xmm9 ; 0DAC _ C4 41 21: EF. C9 + vpxor xmm11, xmm8, xmm12 ; 0DB1 _ C4 41 39: EF. DC + vpslld xmm8, xmm5, 22 ; 0DB6 _ C5 B9: 72. F5, 16 + vpsrld xmm5, xmm5, 10 ; 0DBB _ C5 D1: 72. D5, 0A + vpor xmm8, xmm8, xmm5 ; 0DC0 _ C5 39: EB. C5 + vmovd xmm5, dword [r12+1328H] ; 0DC4 _ C4 C1 79: 6E. AC 24, 00001328 + vmovd xmm12, dword [r12+132CH] ; 0DCE _ C4 41 79: 6E. A4 24, 0000132C + vpshufd xmm14, xmm5, 0 ; 0DD8 _ C5 79: 70. F5, 00 + vpshufd xmm13, xmm12, 0 ; 0DDD _ C4 41 79: 70. EC, 00 + vpxor xmm5, xmm8, xmm14 ; 0DE3 _ C4 C1 39: EF. EE + vpxor xmm8, xmm4, xmm13 ; 0DE8 _ C4 41 59: EF. C5 + vpand xmm4, xmm9, xmm5 ; 0DED _ C5 B1: DB. E5 + vpxor xmm10, xmm4, xmm8 ; 0DF1 _ C4 41 59: EF. D0 + vpxor xmm4, xmm5, xmm11 ; 0DF6 _ C4 C1 51: EF. E3 + vpxor xmm13, xmm4, xmm10 ; 0DFB _ C4 41 59: EF. EA + vpor xmm14, xmm8, xmm9 ; 0E00 _ C4 41 39: EB. F1 + vpxor xmm5, xmm14, xmm11 ; 0E05 _ C4 C1 09: EF. EB + vpxor xmm11, xmm9, xmm13 ; 0E0A _ C4 41 31: EF. DD + vpor xmm12, xmm5, xmm11 ; 0E0F _ C4 41 51: EB. E3 + vpxor xmm12, xmm12, xmm10 ; 0E14 _ C4 41 19: EF. E2 + vpand xmm10, xmm10, xmm5 ; 0E19 _ C5 29: DB. D5 + vpxor xmm14, xmm11, xmm10 ; 0E1D _ C4 41 21: EF. F2 + vpxor xmm9, xmm5, xmm12 ; 0E22 _ C4 41 51: EF. CC + vpxor xmm4, xmm9, xmm14 ; 0E27 _ C4 C1 31: EF. E6 + vpslld xmm5, xmm13, 13 ; 0E2C _ C4 C1 51: 72. F5, 0D + vpsrld xmm13, xmm13, 19 ; 0E32 _ C4 C1 11: 72. D5, 13 + vpxor xmm14, xmm14, xmm6 ; 0E38 _ C5 09: EF. F6 + vpor xmm8, xmm5, xmm13 ; 0E3C _ C4 41 51: EB. C5 + vpslld xmm5, xmm4, 3 ; 0E41 _ C5 D1: 72. F4, 03 + vpsrld xmm13, xmm4, 29 ; 0E46 _ C5 91: 72. D4, 1D + vpxor xmm12, xmm12, xmm8 ; 0E4B _ C4 41 19: EF. E0 + vpor xmm4, xmm5, xmm13 ; 0E50 _ C4 C1 51: EB. E5 + vpslld xmm11, xmm8, 3 ; 0E55 _ C4 C1 21: 72. F0, 03 + vpxor xmm9, xmm12, xmm4 ; 0E5B _ C5 19: EF. CC + vpxor xmm10, xmm14, xmm4 ; 0E5F _ C5 09: EF. D4 + vpxor xmm12, xmm10, xmm11 ; 0E63 _ C4 41 29: EF. E3 + vpslld xmm5, xmm9, 1 ; 0E68 _ C4 C1 51: 72. F1, 01 + vpsrld xmm14, xmm9, 31 ; 0E6E _ C4 C1 09: 72. D1, 1F + vpslld xmm13, xmm12, 7 ; 0E74 _ C4 C1 11: 72. F4, 07 + vpor xmm5, xmm5, xmm14 ; 0E7A _ C4 C1 51: EB. EE + vpsrld xmm10, xmm12, 25 ; 0E7F _ C4 C1 29: 72. D4, 19 + vpor xmm9, xmm13, xmm10 ; 0E85 _ C4 41 11: EB. CA + vpxor xmm8, xmm8, xmm5 ; 0E8A _ C5 39: EF. C5 + vpxor xmm14, xmm8, xmm9 ; 0E8E _ C4 41 39: EF. F1 + vpxor xmm4, xmm4, xmm9 ; 0E93 _ C4 C1 59: EF. E1 + vmovd xmm10, dword [r12+1330H] ; 0E98 _ C4 41 79: 6E. 94 24, 00001330 + vpslld xmm8, xmm5, 7 ; 0EA2 _ C5 B9: 72. F5, 07 + vpslld xmm12, xmm14, 5 ; 0EA7 _ C4 C1 19: 72. F6, 05 + vpsrld xmm13, xmm14, 27 ; 0EAD _ C4 C1 11: 72. D6, 1B + vmovd xmm14, dword [r12+1334H] ; 0EB3 _ C4 41 79: 6E. B4 24, 00001334 + vpxor xmm11, xmm4, xmm8 ; 0EBD _ C4 41 59: EF. D8 + vpshufd xmm8, xmm10, 0 ; 0EC2 _ C4 41 79: 70. C2, 00 + vpor xmm4, xmm12, xmm13 ; 0EC8 _ C4 C1 19: EB. E5 + vpshufd xmm12, xmm14, 0 ; 0ECD _ C4 41 79: 70. E6, 00 + vpxor xmm10, xmm4, xmm8 ; 0ED3 _ C4 41 59: EF. D0 + vpxor xmm8, xmm5, xmm12 ; 0ED8 _ C4 41 51: EF. C4 + vpslld xmm5, xmm11, 22 ; 0EDD _ C4 C1 51: 72. F3, 16 + vpsrld xmm11, xmm11, 10 ; 0EE3 _ C4 C1 21: 72. D3, 0A + vmovd xmm12, dword [r12+133CH] ; 0EE9 _ C4 41 79: 6E. A4 24, 0000133C + vpor xmm4, xmm5, xmm11 ; 0EF3 _ C4 C1 51: EB. E3 + vmovd xmm5, dword [r12+1338H] ; 0EF8 _ C4 C1 79: 6E. AC 24, 00001338 + vpshufd xmm14, xmm5, 0 ; 0F02 _ C5 79: 70. F5, 00 + vpshufd xmm13, xmm12, 0 ; 0F07 _ C4 41 79: 70. EC, 00 + vpxor xmm4, xmm4, xmm14 ; 0F0D _ C4 C1 59: EF. E6 + vpxor xmm9, xmm9, xmm13 ; 0F12 _ C4 41 31: EF. CD + vpand xmm12, xmm8, xmm10 ; 0F17 _ C4 41 39: DB. E2 + vpor xmm13, xmm10, xmm9 ; 0F1C _ C4 41 29: EB. E9 + vpxor xmm14, xmm9, xmm8 ; 0F21 _ C4 41 31: EF. F0 + vpxor xmm5, xmm10, xmm4 ; 0F26 _ C5 A9: EF. EC + vpxor xmm8, xmm4, xmm14 ; 0F2A _ C4 41 59: EF. C6 + vpor xmm4, xmm5, xmm12 ; 0F2F _ C4 C1 51: EB. E4 + vpand xmm10, xmm14, xmm13 ; 0F34 _ C4 41 09: DB. D5 + vpxor xmm5, xmm10, xmm4 ; 0F39 _ C5 A9: EF. EC + vpxor xmm14, xmm13, xmm12 ; 0F3D _ C4 41 11: EF. F4 + vpxor xmm12, xmm12, xmm5 ; 0F42 _ C5 19: EF. E5 + vpxor xmm11, xmm14, xmm5 ; 0F46 _ C5 09: EF. DD + vpor xmm13, xmm12, xmm14 ; 0F4A _ C4 41 19: EB. EE + vpand xmm4, xmm4, xmm14 ; 0F4F _ C4 C1 59: DB. E6 + vpxor xmm9, xmm13, xmm8 ; 0F54 _ C4 41 11: EF. C8 + vpxor xmm8, xmm4, xmm8 ; 0F59 _ C4 41 59: EF. C0 + vpor xmm12, xmm9, xmm5 ; 0F5E _ C5 31: EB. E5 + vpxor xmm13, xmm11, xmm12 ; 0F62 _ C4 41 21: EF. EC + vpslld xmm12, xmm5, 3 ; 0F67 _ C5 99: 72. F5, 03 + vpslld xmm10, xmm13, 13 ; 0F6C _ C4 C1 29: 72. F5, 0D + vpsrld xmm11, xmm13, 19 ; 0F72 _ C4 C1 21: 72. D5, 13 + vpor xmm11, xmm10, xmm11 ; 0F78 _ C4 41 29: EB. DB + vpsrld xmm5, xmm5, 29 ; 0F7D _ C5 D1: 72. D5, 1D + vpor xmm10, xmm12, xmm5 ; 0F82 _ C5 19: EB. D5 + vpxor xmm9, xmm9, xmm11 ; 0F86 _ C4 41 31: EF. CB + vpxor xmm5, xmm9, xmm10 ; 0F8B _ C4 C1 31: EF. EA + vpxor xmm4, xmm8, xmm10 ; 0F90 _ C4 C1 39: EF. E2 + vpslld xmm8, xmm11, 3 ; 0F95 _ C4 C1 39: 72. F3, 03 + vpslld xmm14, xmm5, 1 ; 0F9B _ C5 89: 72. F5, 01 + vpxor xmm12, xmm4, xmm8 ; 0FA0 _ C4 41 59: EF. E0 + vpsrld xmm5, xmm5, 31 ; 0FA5 _ C5 D1: 72. D5, 1F + vpor xmm13, xmm14, xmm5 ; 0FAA _ C5 09: EB. ED + vpslld xmm9, xmm12, 7 ; 0FAE _ C4 C1 31: 72. F4, 07 + vpsrld xmm5, xmm12, 25 ; 0FB4 _ C4 C1 51: 72. D4, 19 + vpxor xmm11, xmm11, xmm13 ; 0FBA _ C4 41 21: EF. DD + vpor xmm4, xmm9, xmm5 ; 0FBF _ C5 B1: EB. E5 + vpslld xmm5, xmm13, 7 ; 0FC3 _ C4 C1 51: 72. F5, 07 + vpxor xmm8, xmm11, xmm4 ; 0FC9 _ C5 21: EF. C4 + vpxor xmm10, xmm10, xmm4 ; 0FCD _ C5 29: EF. D4 + vpxor xmm12, xmm10, xmm5 ; 0FD1 _ C5 29: EF. E5 + vpslld xmm14, xmm8, 5 ; 0FD5 _ C4 C1 09: 72. F0, 05 + vmovd xmm10, dword [r12+1340H] ; 0FDB _ C4 41 79: 6E. 94 24, 00001340 + vpsrld xmm9, xmm8, 27 ; 0FE5 _ C4 C1 31: 72. D0, 1B + vmovd xmm8, dword [r12+134CH] ; 0FEB _ C4 41 79: 6E. 84 24, 0000134C + vpor xmm11, xmm14, xmm9 ; 0FF5 _ C4 41 09: EB. D9 + vpshufd xmm5, xmm10, 0 ; 0FFA _ C4 C1 79: 70. EA, 00 + vmovd xmm10, dword [r12+1344H] ; 1000 _ C4 41 79: 6E. 94 24, 00001344 + vpxor xmm5, xmm11, xmm5 ; 100A _ C5 A1: EF. ED + vpshufd xmm9, xmm8, 0 ; 100E _ C4 41 79: 70. C8, 00 + vpxor xmm11, xmm4, xmm9 ; 1014 _ C4 41 59: EF. D9 + vpslld xmm9, xmm12, 22 ; 1019 _ C4 C1 31: 72. F4, 16 + vpshufd xmm4, xmm10, 0 ; 101F _ C4 C1 79: 70. E2, 00 + vpsrld xmm10, xmm12, 10 ; 1025 _ C4 C1 29: 72. D4, 0A + vmovd xmm14, dword [r12+1348H] ; 102B _ C4 41 79: 6E. B4 24, 00001348 + vpxor xmm13, xmm13, xmm4 ; 1035 _ C5 11: EF. EC + vpxor xmm4, xmm13, xmm11 ; 1039 _ C4 C1 11: EF. E3 + vpor xmm12, xmm9, xmm10 ; 103E _ C4 41 31: EB. E2 + vpshufd xmm13, xmm14, 0 ; 1043 _ C4 41 79: 70. EE, 00 + vpxor xmm8, xmm11, xmm6 ; 1049 _ C5 21: EF. C6 + vpxor xmm14, xmm12, xmm13 ; 104D _ C4 41 19: EF. F5 + vpxor xmm11, xmm14, xmm8 ; 1052 _ C4 41 09: EF. D8 + vpxor xmm8, xmm8, xmm5 ; 1057 _ C5 39: EF. C5 + vpand xmm9, xmm4, xmm8 ; 105B _ C4 41 59: DB. C8 + vpxor xmm4, xmm4, xmm8 ; 1060 _ C4 C1 59: EF. E0 + vpxor xmm9, xmm9, xmm11 ; 1065 _ C4 41 31: EF. CB + vpxor xmm10, xmm5, xmm4 ; 106A _ C5 51: EF. D4 + vpand xmm5, xmm11, xmm4 ; 106E _ C5 A1: DB. EC + vpand xmm12, xmm10, xmm9 ; 1072 _ C4 41 29: DB. E1 + vpxor xmm13, xmm5, xmm10 ; 1077 _ C4 41 51: EF. EA + vpxor xmm14, xmm8, xmm12 ; 107C _ C4 41 39: EF. F4 + vpslld xmm5, xmm9, 13 ; 1081 _ C4 C1 51: 72. F1, 0D + vpor xmm8, xmm12, xmm14 ; 1087 _ C4 41 19: EB. C6 + vpor xmm4, xmm4, xmm9 ; 108C _ C4 C1 59: EB. E1 + vpxor xmm11, xmm8, xmm13 ; 1091 _ C4 41 39: EF. DD + vpsrld xmm8, xmm9, 19 ; 1096 _ C4 C1 39: 72. D1, 13 + vpxor xmm11, xmm11, xmm6 ; 109C _ C5 21: EF. DE + vpor xmm5, xmm5, xmm8 ; 10A0 _ C4 C1 51: EB. E8 + vpslld xmm10, xmm11, 3 ; 10A5 _ C4 C1 29: 72. F3, 03 + vpsrld xmm11, xmm11, 29 ; 10AB _ C4 C1 21: 72. D3, 1D + vpor xmm8, xmm10, xmm11 ; 10B1 _ C4 41 29: EB. C3 + vpxor xmm9, xmm4, xmm12 ; 10B6 _ C4 41 59: EF. CC + vpand xmm11, xmm13, xmm14 ; 10BB _ C4 41 11: DB. DE + vpxor xmm14, xmm14, xmm8 ; 10C0 _ C4 41 09: EF. F0 + vpxor xmm4, xmm9, xmm11 ; 10C5 _ C4 C1 31: EF. E3 + vpslld xmm12, xmm5, 3 ; 10CA _ C5 99: 72. F5, 03 + vpxor xmm9, xmm4, xmm5 ; 10CF _ C5 59: EF. CD + vpxor xmm4, xmm14, xmm12 ; 10D3 _ C4 C1 09: EF. E4 + vpxor xmm13, xmm9, xmm8 ; 10D8 _ C4 41 31: EF. E8 + vpslld xmm9, xmm4, 7 ; 10DD _ C5 B1: 72. F4, 07 + vpslld xmm10, xmm13, 1 ; 10E2 _ C4 C1 29: 72. F5, 01 + vpsrld xmm11, xmm13, 31 ; 10E8 _ C4 C1 21: 72. D5, 1F + vpor xmm11, xmm10, xmm11 ; 10EE _ C4 41 29: EB. DB + vpsrld xmm14, xmm4, 25 ; 10F3 _ C5 89: 72. D4, 19 + vpor xmm9, xmm9, xmm14 ; 10F8 _ C4 41 31: EB. CE + vpxor xmm5, xmm5, xmm11 ; 10FD _ C4 C1 51: EF. EB + vpxor xmm4, xmm5, xmm9 ; 1102 _ C4 C1 51: EF. E1 + vpxor xmm12, xmm8, xmm9 ; 1107 _ C4 41 39: EF. E1 + vmovd xmm8, dword [r12+1354H] ; 110C _ C4 41 79: 6E. 84 24, 00001354 + vpslld xmm13, xmm11, 7 ; 1116 _ C4 C1 11: 72. F3, 07 + vmovd xmm5, dword [r12+135CH] ; 111C _ C4 C1 79: 6E. AC 24, 0000135C + vpxor xmm10, xmm12, xmm13 ; 1126 _ C4 41 19: EF. D5 + vpshufd xmm12, xmm8, 0 ; 112B _ C4 41 79: 70. E0, 00 + vpshufd xmm8, xmm5, 0 ; 1131 _ C5 79: 70. C5, 00 + vpxor xmm13, xmm11, xmm12 ; 1136 _ C4 41 21: EF. EC + vmovd xmm14, dword [r12+1350H] ; 113B _ C4 41 79: 6E. B4 24, 00001350 + vpxor xmm12, xmm9, xmm8 ; 1145 _ C4 41 31: EF. E0 + vpslld xmm9, xmm4, 5 ; 114A _ C5 B1: 72. F4, 05 + vpsrld xmm4, xmm4, 27 ; 114F _ C5 D9: 72. D4, 1B + vpshufd xmm14, xmm14, 0 ; 1154 _ C4 41 79: 70. F6, 00 + vpor xmm4, xmm9, xmm4 ; 115A _ C5 B1: EB. E4 + vpxor xmm4, xmm4, xmm14 ; 115E _ C4 C1 59: EF. E6 + vpxor xmm9, xmm13, xmm12 ; 1163 _ C4 41 11: EF. CC + vmovd xmm11, dword [r12+1358H] ; 1168 _ C4 41 79: 6E. 9C 24, 00001358 + vpxor xmm14, xmm4, xmm13 ; 1172 _ C4 41 59: EF. F5 + vpslld xmm13, xmm10, 22 ; 1177 _ C4 C1 11: 72. F2, 16 + vpsrld xmm10, xmm10, 10 ; 117D _ C4 C1 29: 72. D2, 0A + vpshufd xmm11, xmm11, 0 ; 1183 _ C4 41 79: 70. DB, 00 + vpxor xmm4, xmm12, xmm6 ; 1189 _ C5 99: EF. E6 + vpor xmm12, xmm13, xmm10 ; 118D _ C4 41 11: EB. E2 + vpxor xmm11, xmm12, xmm11 ; 1192 _ C4 41 19: EF. DB + vpand xmm12, xmm9, xmm14 ; 1197 _ C4 41 31: DB. E6 + vpxor xmm13, xmm11, xmm4 ; 119C _ C5 21: EF. EC + vpxor xmm10, xmm12, xmm13 ; 11A0 _ C4 41 19: EF. D5 + vpor xmm12, xmm13, xmm9 ; 11A5 _ C4 41 11: EB. E1 + vpand xmm5, xmm4, xmm10 ; 11AA _ C4 C1 59: DB. EA + vpxor xmm4, xmm9, xmm4 ; 11AF _ C5 B1: EF. E4 + vpxor xmm13, xmm5, xmm14 ; 11B3 _ C4 41 51: EF. EE + vpxor xmm9, xmm4, xmm10 ; 11B8 _ C4 41 59: EF. CA + vpxor xmm8, xmm9, xmm12 ; 11BD _ C4 41 31: EF. C4 + vpand xmm11, xmm14, xmm13 ; 11C2 _ C4 41 09: DB. DD + vpxor xmm5, xmm11, xmm8 ; 11C7 _ C4 C1 21: EF. E8 + vpslld xmm4, xmm10, 13 ; 11CC _ C4 C1 59: 72. F2, 0D + vpsrld xmm10, xmm10, 19 ; 11D2 _ C4 C1 29: 72. D2, 13 + vpxor xmm14, xmm12, xmm14 ; 11D8 _ C4 41 19: EF. F6 + vpor xmm11, xmm4, xmm10 ; 11DD _ C4 41 59: EB. DA + vpslld xmm4, xmm5, 3 ; 11E2 _ C5 D9: 72. F5, 03 + vpsrld xmm9, xmm5, 29 ; 11E7 _ C5 B1: 72. D5, 1D + vpor xmm8, xmm8, xmm13 ; 11EC _ C4 41 39: EB. C5 + vpxor xmm14, xmm14, xmm6 ; 11F1 _ C5 09: EF. F6 + vpor xmm9, xmm4, xmm9 ; 11F5 _ C4 41 59: EB. C9 + vpxor xmm5, xmm13, xmm11 ; 11FA _ C4 C1 11: EF. EB + vpxor xmm12, xmm8, xmm14 ; 11FF _ C4 41 39: EF. E6 + vpxor xmm4, xmm5, xmm9 ; 1204 _ C4 C1 51: EF. E1 + vpxor xmm13, xmm12, xmm9 ; 1209 _ C4 41 19: EF. E9 + vpslld xmm5, xmm11, 3 ; 120E _ C4 C1 51: 72. F3, 03 + vpslld xmm8, xmm4, 1 ; 1214 _ C5 B9: 72. F4, 01 + vpxor xmm10, xmm13, xmm5 ; 1219 _ C5 11: EF. D5 + vpsrld xmm4, xmm4, 31 ; 121D _ C5 D9: 72. D4, 1F + vpor xmm12, xmm8, xmm4 ; 1222 _ C5 39: EB. E4 + vpslld xmm4, xmm10, 7 ; 1226 _ C4 C1 59: 72. F2, 07 + vpsrld xmm14, xmm10, 25 ; 122C _ C4 C1 09: 72. D2, 19 + vpxor xmm11, xmm11, xmm12 ; 1232 _ C4 41 21: EF. DC + vpor xmm13, xmm4, xmm14 ; 1237 _ C4 41 59: EB. EE + vpxor xmm5, xmm11, xmm13 ; 123C _ C4 C1 21: EF. ED + vpxor xmm9, xmm9, xmm13 ; 1241 _ C4 41 31: EF. CD + vpslld xmm11, xmm12, 7 ; 1246 _ C4 C1 21: 72. F4, 07 + vpslld xmm4, xmm5, 5 ; 124C _ C5 D9: 72. F5, 05 + vmovd xmm14, dword [r12+1360H] ; 1251 _ C4 41 79: 6E. B4 24, 00001360 + vpxor xmm11, xmm9, xmm11 ; 125B _ C4 41 31: EF. DB + vpsrld xmm9, xmm5, 27 ; 1260 _ C5 B1: 72. D5, 1B + vpshufd xmm10, xmm14, 0 ; 1265 _ C4 41 79: 70. D6, 00 + vpor xmm8, xmm4, xmm9 ; 126B _ C4 41 59: EB. C1 + vmovd xmm5, dword [r12+1368H] ; 1270 _ C4 C1 79: 6E. AC 24, 00001368 + vpxor xmm10, xmm8, xmm10 ; 127A _ C4 41 39: EF. D2 + vmovd xmm4, dword [r12+136CH] ; 127F _ C4 C1 79: 6E. A4 24, 0000136C + vpslld xmm8, xmm11, 22 ; 1289 _ C4 C1 39: 72. F3, 16 + vpsrld xmm11, xmm11, 10 ; 128F _ C4 C1 21: 72. D3, 0A + vpshufd xmm9, xmm4, 0 ; 1295 _ C5 79: 70. CC, 00 + vpor xmm11, xmm8, xmm11 ; 129A _ C4 41 39: EB. DB + vpshufd xmm5, xmm5, 0 ; 129F _ C5 F9: 70. ED, 00 + vpxor xmm13, xmm13, xmm9 ; 12A4 _ C4 41 11: EF. E9 + vmovd xmm14, dword [r12+1364H] ; 12A9 _ C4 41 79: 6E. B4 24, 00001364 + vpxor xmm11, xmm11, xmm5 ; 12B3 _ C5 21: EF. DD + vpshufd xmm14, xmm14, 0 ; 12B7 _ C4 41 79: 70. F6, 00 + vpxor xmm8, xmm11, xmm6 ; 12BD _ C5 21: EF. C6 + vpxor xmm11, xmm10, xmm13 ; 12C1 _ C4 41 29: EF. DD + vpand xmm10, xmm13, xmm10 ; 12C6 _ C4 41 11: DB. D2 + vpxor xmm5, xmm10, xmm8 ; 12CB _ C4 C1 29: EF. E8 + vpxor xmm12, xmm12, xmm14 ; 12D0 _ C4 41 19: EF. E6 + vpxor xmm10, xmm12, xmm5 ; 12D5 _ C5 19: EF. D5 + vpor xmm4, xmm8, xmm13 ; 12D9 _ C4 C1 39: EB. E5 + vpor xmm14, xmm11, xmm10 ; 12DE _ C4 41 21: EB. F2 + vpxor xmm11, xmm4, xmm11 ; 12E3 _ C4 41 59: EF. DB + vpxor xmm8, xmm11, xmm10 ; 12E8 _ C4 41 21: EF. C2 + vpor xmm9, xmm14, xmm5 ; 12ED _ C5 09: EB. CD + vpxor xmm13, xmm13, xmm14 ; 12F1 _ C4 41 11: EF. EE + vpxor xmm11, xmm9, xmm8 ; 12F6 _ C4 41 31: EF. D8 + vpxor xmm4, xmm13, xmm5 ; 12FB _ C5 91: EF. E5 + vpslld xmm9, xmm11, 13 ; 12FF _ C4 C1 31: 72. F3, 0D + vpxor xmm12, xmm4, xmm11 ; 1305 _ C4 41 59: EF. E3 + vpsrld xmm11, xmm11, 19 ; 130A _ C4 C1 21: 72. D3, 13 + vpor xmm9, xmm9, xmm11 ; 1310 _ C4 41 31: EB. CB + vpslld xmm14, xmm12, 3 ; 1315 _ C4 C1 09: 72. F4, 03 + vpsrld xmm13, xmm12, 29 ; 131B _ C4 C1 11: 72. D4, 1D + vpxor xmm5, xmm5, xmm6 ; 1321 _ C5 D1: EF. EE + vpand xmm12, xmm8, xmm12 ; 1325 _ C4 41 39: DB. E4 + vpor xmm4, xmm14, xmm13 ; 132A _ C4 C1 09: EB. E5 + vpxor xmm10, xmm10, xmm9 ; 132F _ C4 41 29: EF. D1 + vpxor xmm8, xmm5, xmm12 ; 1334 _ C4 41 51: EF. C4 + vpxor xmm14, xmm10, xmm4 ; 1339 _ C5 29: EF. F4 + vpxor xmm11, xmm8, xmm4 ; 133D _ C5 39: EF. DC + vpslld xmm12, xmm9, 3 ; 1341 _ C4 C1 19: 72. F1, 03 + vpslld xmm13, xmm14, 1 ; 1347 _ C4 C1 11: 72. F6, 01 + vpxor xmm5, xmm11, xmm12 ; 134D _ C4 C1 21: EF. EC + vpsrld xmm14, xmm14, 31 ; 1352 _ C4 C1 09: 72. D6, 1F + vpor xmm11, xmm13, xmm14 ; 1358 _ C4 41 11: EB. DE + vpslld xmm8, xmm5, 7 ; 135D _ C5 B9: 72. F5, 07 + vpsrld xmm10, xmm5, 25 ; 1362 _ C5 A9: 72. D5, 19 + vpxor xmm9, xmm9, xmm11 ; 1367 _ C4 41 31: EF. CB + vpor xmm8, xmm8, xmm10 ; 136C _ C4 41 39: EB. C2 + vpxor xmm14, xmm9, xmm8 ; 1371 _ C4 41 31: EF. F0 + vpxor xmm4, xmm4, xmm8 ; 1376 _ C4 C1 59: EF. E0 + vpslld xmm9, xmm11, 7 ; 137B _ C4 C1 31: 72. F3, 07 + vpsrld xmm12, xmm14, 27 ; 1381 _ C4 C1 19: 72. D6, 1B + vmovd xmm13, dword [r12+1370H] ; 1387 _ C4 41 79: 6E. AC 24, 00001370 + vpxor xmm5, xmm4, xmm9 ; 1391 _ C4 C1 59: EF. E9 + vpslld xmm4, xmm14, 5 ; 1396 _ C4 C1 59: 72. F6, 05 + vmovd xmm14, dword [r12+1374H] ; 139C _ C4 41 79: 6E. B4 24, 00001374 + vpor xmm10, xmm4, xmm12 ; 13A6 _ C4 41 59: EB. D4 + vpshufd xmm4, xmm13, 0 ; 13AB _ C4 C1 79: 70. E5, 00 + vpshufd xmm12, xmm14, 0 ; 13B1 _ C4 41 79: 70. E6, 00 + vpxor xmm9, xmm10, xmm4 ; 13B7 _ C5 29: EF. CC + vpxor xmm10, xmm11, xmm12 ; 13BB _ C4 41 21: EF. D4 + vpslld xmm11, xmm5, 22 ; 13C0 _ C5 A1: 72. F5, 16 + vpsrld xmm5, xmm5, 10 ; 13C5 _ C5 D1: 72. D5, 0A + vpor xmm4, xmm11, xmm5 ; 13CA _ C5 A1: EB. E5 + vmovd xmm11, dword [r12+1378H] ; 13CE _ C4 41 79: 6E. 9C 24, 00001378 + vmovd xmm12, dword [r12+137CH] ; 13D8 _ C4 41 79: 6E. A4 24, 0000137C + vpshufd xmm14, xmm11, 0 ; 13E2 _ C4 41 79: 70. F3, 00 + vpshufd xmm5, xmm12, 0 ; 13E8 _ C4 C1 79: 70. EC, 00 + vpxor xmm13, xmm4, xmm14 ; 13EE _ C4 41 59: EF. EE + vpxor xmm5, xmm8, xmm5 ; 13F3 _ C5 B9: EF. ED + vpor xmm8, xmm10, xmm13 ; 13F7 _ C4 41 29: EB. C5 + vpxor xmm8, xmm8, xmm5 ; 13FC _ C5 39: EF. C5 + vpxor xmm10, xmm10, xmm13 ; 1400 _ C4 41 29: EF. D5 + vpxor xmm11, xmm13, xmm8 ; 1405 _ C4 41 11: EF. D8 + vpxor xmm14, xmm10, xmm11 ; 140A _ C4 41 29: EF. F3 + vpor xmm10, xmm5, xmm10 ; 140F _ C4 41 51: EB. D2 + vpor xmm4, xmm8, xmm14 ; 1414 _ C4 C1 39: EB. E6 + vpor xmm13, xmm9, xmm14 ; 1419 _ C4 41 31: EB. EE + vpxor xmm12, xmm4, xmm9 ; 141E _ C4 41 59: EF. E1 + vpxor xmm13, xmm13, xmm11 ; 1423 _ C4 41 11: EF. EB + vpxor xmm12, xmm12, xmm14 ; 1428 _ C4 41 19: EF. E6 + vpand xmm9, xmm10, xmm9 ; 142D _ C4 41 29: DB. C9 + vpxor xmm11, xmm11, xmm12 ; 1432 _ C4 41 21: EF. DC + vpand xmm4, xmm12, xmm13 ; 1437 _ C4 C1 19: DB. E5 + vpxor xmm11, xmm11, xmm6 ; 143C _ C5 21: EF. DE + vpxor xmm4, xmm4, xmm14 ; 1440 _ C4 C1 59: EF. E6 + vpor xmm12, xmm11, xmm13 ; 1445 _ C4 41 21: EB. E5 + vpxor xmm5, xmm9, xmm8 ; 144A _ C4 C1 31: EF. E8 + vpxor xmm14, xmm14, xmm12 ; 144F _ C4 41 09: EF. F4 + vpslld xmm11, xmm14, 13 ; 1454 _ C4 C1 21: 72. F6, 0D + vpsrld xmm14, xmm14, 19 ; 145A _ C4 C1 09: 72. D6, 13 + vpor xmm12, xmm11, xmm14 ; 1460 _ C4 41 21: EB. E6 + vpslld xmm11, xmm4, 3 ; 1465 _ C5 A1: 72. F4, 03 + vpsrld xmm4, xmm4, 29 ; 146A _ C5 D9: 72. D4, 1D + vpxor xmm8, xmm5, xmm12 ; 146F _ C4 41 51: EF. C4 + vpor xmm11, xmm11, xmm4 ; 1474 _ C5 21: EB. DC + vpslld xmm9, xmm12, 3 ; 1478 _ C4 C1 31: 72. F4, 03 + vpxor xmm14, xmm8, xmm11 ; 147E _ C4 41 39: EF. F3 + vpxor xmm4, xmm13, xmm11 ; 1483 _ C4 C1 11: EF. E3 + vpxor xmm8, xmm4, xmm9 ; 1488 _ C4 41 59: EF. C1 + vpslld xmm13, xmm14, 1 ; 148D _ C4 C1 11: 72. F6, 01 + vpsrld xmm5, xmm14, 31 ; 1493 _ C4 C1 51: 72. D6, 1F + vpslld xmm10, xmm8, 7 ; 1499 _ C4 C1 29: 72. F0, 07 + vpor xmm9, xmm13, xmm5 ; 149F _ C5 11: EB. CD + vpsrld xmm4, xmm8, 25 ; 14A3 _ C4 C1 59: 72. D0, 19 + vpor xmm4, xmm10, xmm4 ; 14A9 _ C5 A9: EB. E4 + vpxor xmm12, xmm12, xmm9 ; 14AD _ C4 41 19: EF. E1 + vpxor xmm12, xmm12, xmm4 ; 14B2 _ C5 19: EF. E4 + vpxor xmm11, xmm11, xmm4 ; 14B6 _ C5 21: EF. DC + vpslld xmm14, xmm9, 7 ; 14BA _ C4 C1 09: 72. F1, 07 + vpslld xmm13, xmm12, 5 ; 14C0 _ C4 C1 11: 72. F4, 05 + vmovd xmm8, dword [r12+1380H] ; 14C6 _ C4 41 79: 6E. 84 24, 00001380 + vpxor xmm11, xmm11, xmm14 ; 14D0 _ C4 41 21: EF. DE + vpsrld xmm5, xmm12, 27 ; 14D5 _ C4 C1 51: 72. D4, 1B + vmovd xmm12, dword [r12+1384H] ; 14DB _ C4 41 79: 6E. A4 24, 00001384 + vpor xmm10, xmm13, xmm5 ; 14E5 _ C5 11: EB. D5 + vpshufd xmm14, xmm8, 0 ; 14E9 _ C4 41 79: 70. F0, 00 + vpslld xmm5, xmm11, 22 ; 14EF _ C4 C1 51: 72. F3, 16 + vmovd xmm8, dword [r12+138CH] ; 14F5 _ C4 41 79: 6E. 84 24, 0000138C + vpsrld xmm11, xmm11, 10 ; 14FF _ C4 C1 21: 72. D3, 0A + vpshufd xmm13, xmm12, 0 ; 1505 _ C4 41 79: 70. EC, 00 + vpxor xmm14, xmm10, xmm14 ; 150B _ C4 41 29: EF. F6 + vpshufd xmm10, xmm8, 0 ; 1510 _ C4 41 79: 70. D0, 00 + vpor xmm12, xmm5, xmm11 ; 1516 _ C4 41 51: EB. E3 + vmovd xmm11, dword [r12+1388H] ; 151B _ C4 41 79: 6E. 9C 24, 00001388 + vpxor xmm9, xmm9, xmm13 ; 1525 _ C4 41 31: EF. CD + vpshufd xmm13, xmm11, 0 ; 152A _ C4 41 79: 70. EB, 00 + vpxor xmm4, xmm4, xmm10 ; 1530 _ C4 C1 59: EF. E2 + vpxor xmm11, xmm12, xmm13 ; 1535 _ C4 41 19: EF. DD + vpxor xmm8, xmm4, xmm14 ; 153A _ C4 41 59: EF. C6 + vpxor xmm12, xmm9, xmm11 ; 153F _ C4 41 31: EF. E3 + vpand xmm9, xmm9, xmm8 ; 1544 _ C4 41 31: DB. C8 + vpxor xmm10, xmm9, xmm14 ; 1549 _ C4 41 31: EF. D6 + vpor xmm4, xmm14, xmm8 ; 154E _ C4 C1 09: EB. E0 + vpxor xmm14, xmm12, xmm8 ; 1553 _ C4 41 19: EF. F0 + vpxor xmm9, xmm4, xmm12 ; 1558 _ C4 41 59: EF. CC + vpxor xmm5, xmm8, xmm11 ; 155D _ C4 C1 39: EF. EB + vpor xmm11, xmm11, xmm10 ; 1562 _ C4 41 21: EB. DA + vpxor xmm8, xmm14, xmm6 ; 1567 _ C5 09: EF. C6 + vpxor xmm13, xmm11, xmm14 ; 156B _ C4 41 21: EF. EE + vpor xmm12, xmm8, xmm10 ; 1570 _ C4 41 39: EB. E2 + vpor xmm14, xmm5, xmm9 ; 1575 _ C4 41 51: EB. F1 + vpxor xmm5, xmm10, xmm5 ; 157A _ C5 A9: EF. ED + vpslld xmm8, xmm13, 3 ; 157E _ C4 C1 39: 72. F5, 03 + vpxor xmm10, xmm5, xmm12 ; 1584 _ C4 41 51: EF. D4 + vpsrld xmm13, xmm13, 29 ; 1589 _ C4 C1 11: 72. D5, 1D + vpxor xmm11, xmm10, xmm14 ; 158F _ C4 41 29: EF. DE + vpxor xmm14, xmm12, xmm14 ; 1594 _ C4 41 19: EF. F6 + vpslld xmm4, xmm11, 13 ; 1599 _ C4 C1 59: 72. F3, 0D + vpsrld xmm5, xmm11, 19 ; 159F _ C4 C1 51: 72. D3, 13 + vpor xmm4, xmm4, xmm5 ; 15A5 _ C5 D9: EB. E5 + vpor xmm11, xmm8, xmm13 ; 15A9 _ C4 41 39: EB. DD + vpxor xmm12, xmm14, xmm4 ; 15AE _ C5 09: EF. E4 + vpxor xmm9, xmm9, xmm11 ; 15B2 _ C4 41 31: EF. CB + vpxor xmm13, xmm12, xmm11 ; 15B7 _ C4 41 19: EF. EB + vpslld xmm14, xmm4, 3 ; 15BC _ C5 89: 72. F4, 03 + vpxor xmm5, xmm9, xmm14 ; 15C1 _ C4 C1 31: EF. EE + vpslld xmm9, xmm13, 1 ; 15C6 _ C4 C1 31: 72. F5, 01 + vpsrld xmm12, xmm13, 31 ; 15CC _ C4 C1 19: 72. D5, 1F + vpslld xmm8, xmm5, 7 ; 15D2 _ C5 B9: 72. F5, 07 + vpor xmm14, xmm9, xmm12 ; 15D7 _ C4 41 31: EB. F4 + vpsrld xmm10, xmm5, 25 ; 15DC _ C5 A9: 72. D5, 19 + vpor xmm10, xmm8, xmm10 ; 15E1 _ C4 41 39: EB. D2 + vpxor xmm4, xmm4, xmm14 ; 15E6 _ C4 C1 59: EF. E6 + vmovd xmm12, dword [r12+139CH] ; 15EB _ C4 41 79: 6E. A4 24, 0000139C + vpxor xmm8, xmm4, xmm10 ; 15F5 _ C4 41 59: EF. C2 + vpxor xmm11, xmm11, xmm10 ; 15FA _ C4 41 21: EF. DA + vpslld xmm4, xmm14, 7 ; 15FF _ C4 C1 59: 72. F6, 07 + vpshufd xmm13, xmm12, 0 ; 1605 _ C4 41 79: 70. EC, 00 + vpxor xmm4, xmm11, xmm4 ; 160B _ C5 A1: EF. E4 + vmovd xmm11, dword [r12+1394H] ; 160F _ C4 41 79: 6E. 9C 24, 00001394 + vpxor xmm13, xmm10, xmm13 ; 1619 _ C4 41 29: EF. ED + vmovd xmm5, dword [r12+1390H] ; 161E _ C4 C1 79: 6E. AC 24, 00001390 + vpslld xmm10, xmm8, 5 ; 1628 _ C4 C1 29: 72. F0, 05 + vpshufd xmm9, xmm11, 0 ; 162E _ C4 41 79: 70. CB, 00 + vpsrld xmm8, xmm8, 27 ; 1634 _ C4 C1 39: 72. D0, 1B + vpshufd xmm5, xmm5, 0 ; 163A _ C5 F9: 70. ED, 00 + vpxor xmm14, xmm14, xmm9 ; 163F _ C4 41 09: EF. F1 + vpor xmm9, xmm10, xmm8 ; 1644 _ C4 41 29: EB. C8 + vmovd xmm11, dword [r12+1398H] ; 1649 _ C4 41 79: 6E. 9C 24, 00001398 + vpxor xmm9, xmm9, xmm5 ; 1653 _ C5 31: EF. CD + vpslld xmm5, xmm4, 22 ; 1657 _ C5 D1: 72. F4, 16 + vpsrld xmm4, xmm4, 10 ; 165C _ C5 D9: 72. D4, 0A + vpshufd xmm11, xmm11, 0 ; 1661 _ C4 41 79: 70. DB, 00 + vpor xmm8, xmm5, xmm4 ; 1667 _ C5 51: EB. C4 + vpxor xmm12, xmm9, xmm6 ; 166B _ C5 31: EF. E6 + vpxor xmm10, xmm8, xmm11 ; 166F _ C4 41 39: EF. D3 + vpand xmm9, xmm12, xmm14 ; 1674 _ C4 41 19: DB. CE + vpxor xmm11, xmm10, xmm6 ; 1679 _ C5 29: EF. DE + vpxor xmm11, xmm11, xmm9 ; 167D _ C4 41 21: EF. D9 + vpor xmm9, xmm9, xmm13 ; 1682 _ C4 41 31: EB. CD + vpxor xmm14, xmm14, xmm9 ; 1687 _ C4 41 09: EF. F1 + vpxor xmm9, xmm9, xmm12 ; 168C _ C4 41 31: EF. CC + vpxor xmm4, xmm13, xmm11 ; 1691 _ C4 C1 11: EF. E3 + vpor xmm8, xmm12, xmm14 ; 1696 _ C4 41 19: EB. C6 + vpor xmm11, xmm11, xmm9 ; 169B _ C4 41 21: EB. D9 + vpxor xmm10, xmm14, xmm4 ; 16A0 _ C5 09: EF. D4 + vpand xmm5, xmm11, xmm8 ; 16A4 _ C4 C1 21: DB. E8 + vpxor xmm13, xmm9, xmm10 ; 16A9 _ C4 41 31: EF. EA + vpslld xmm14, xmm5, 13 ; 16AE _ C5 89: 72. F5, 0D + vpsrld xmm12, xmm5, 19 ; 16B3 _ C5 99: 72. D5, 13 + vpslld xmm9, xmm4, 3 ; 16B8 _ C5 B1: 72. F4, 03 + vpsrld xmm4, xmm4, 29 ; 16BD _ C5 D9: 72. D4, 1D + vpor xmm11, xmm14, xmm12 ; 16C2 _ C4 41 09: EB. DC + vpor xmm12, xmm9, xmm4 ; 16C7 _ C5 31: EB. E4 + vpand xmm4, xmm13, xmm5 ; 16CB _ C5 91: DB. E5 + vpand xmm5, xmm10, xmm5 ; 16CF _ C5 A9: DB. ED + vpxor xmm8, xmm8, xmm4 ; 16D3 _ C5 39: EF. C4 + vpxor xmm13, xmm5, xmm13 ; 16D7 _ C4 41 51: EF. ED + vpxor xmm4, xmm8, xmm11 ; 16DC _ C4 C1 39: EF. E3 + vpxor xmm10, xmm13, xmm12 ; 16E1 _ C4 41 11: EF. D4 + vpxor xmm9, xmm4, xmm12 ; 16E6 _ C4 41 59: EF. CC + vpslld xmm4, xmm11, 3 ; 16EB _ C4 C1 59: 72. F3, 03 + vpxor xmm13, xmm10, xmm4 ; 16F1 _ C5 29: EF. EC + vpslld xmm14, xmm9, 1 ; 16F5 _ C4 C1 09: 72. F1, 01 + vpsrld xmm9, xmm9, 31 ; 16FB _ C4 C1 31: 72. D1, 1F + vpslld xmm5, xmm13, 7 ; 1701 _ C4 C1 51: 72. F5, 07 + vpor xmm4, xmm14, xmm9 ; 1707 _ C4 C1 09: EB. E1 + vpsrld xmm8, xmm13, 25 ; 170C _ C4 C1 39: 72. D5, 19 + vpor xmm14, xmm5, xmm8 ; 1712 _ C4 41 51: EB. F0 + vpxor xmm11, xmm11, xmm4 ; 1717 _ C5 21: EF. DC + vpxor xmm9, xmm11, xmm14 ; 171B _ C4 41 21: EF. CE + vpxor xmm12, xmm12, xmm14 ; 1720 _ C4 41 19: EF. E6 + vpslld xmm11, xmm4, 7 ; 1725 _ C5 A1: 72. F4, 07 + vpsrld xmm13, xmm9, 27 ; 172A _ C4 C1 11: 72. D1, 1B + vmovd xmm5, dword [r12+13A0H] ; 1730 _ C4 C1 79: 6E. AC 24, 000013A0 + vpxor xmm10, xmm12, xmm11 ; 173A _ C4 41 19: EF. D3 + vpslld xmm12, xmm9, 5 ; 173F _ C4 C1 19: 72. F1, 05 + vmovd xmm9, dword [r12+13A4H] ; 1745 _ C4 41 79: 6E. 8C 24, 000013A4 + vpor xmm8, xmm12, xmm13 ; 174F _ C4 41 19: EB. C5 + vpshufd xmm11, xmm5, 0 ; 1754 _ C5 79: 70. DD, 00 + vpshufd xmm12, xmm9, 0 ; 1759 _ C4 41 79: 70. E1, 00 + vpxor xmm5, xmm8, xmm11 ; 175F _ C4 C1 39: EF. EB + vmovd xmm11, dword [r12+13A8H] ; 1764 _ C4 41 79: 6E. 9C 24, 000013A8 + vpxor xmm8, xmm4, xmm12 ; 176E _ C4 41 59: EF. C4 + vpslld xmm4, xmm10, 22 ; 1773 _ C4 C1 59: 72. F2, 16 + vpsrld xmm10, xmm10, 10 ; 1779 _ C4 C1 29: 72. D2, 0A + vmovd xmm12, dword [r12+13ACH] ; 177F _ C4 41 79: 6E. A4 24, 000013AC + vpor xmm4, xmm4, xmm10 ; 1789 _ C4 C1 59: EB. E2 + vpshufd xmm9, xmm11, 0 ; 178E _ C4 41 79: 70. CB, 00 + vpshufd xmm13, xmm12, 0 ; 1794 _ C4 41 79: 70. EC, 00 + vpxor xmm10, xmm4, xmm9 ; 179A _ C4 41 59: EF. D1 + vpxor xmm4, xmm14, xmm13 ; 179F _ C4 C1 09: EF. E5 + vpand xmm14, xmm5, xmm10 ; 17A4 _ C4 41 51: DB. F2 + vpxor xmm13, xmm14, xmm4 ; 17A9 _ C5 09: EF. EC + vpxor xmm11, xmm10, xmm8 ; 17AD _ C4 41 29: EF. D8 + vpxor xmm12, xmm11, xmm13 ; 17B2 _ C4 41 21: EF. E5 + vpor xmm9, xmm4, xmm5 ; 17B7 _ C5 59: EB. CD + vpxor xmm14, xmm9, xmm8 ; 17BB _ C4 41 31: EF. F0 + vpxor xmm8, xmm5, xmm12 ; 17C0 _ C4 41 51: EF. C4 + vpor xmm5, xmm14, xmm8 ; 17C5 _ C4 C1 09: EB. E8 + vpand xmm11, xmm13, xmm14 ; 17CA _ C4 41 11: DB. DE + vpxor xmm9, xmm5, xmm13 ; 17CF _ C4 41 51: EF. CD + vpxor xmm4, xmm8, xmm11 ; 17D4 _ C4 C1 39: EF. E3 + vpxor xmm5, xmm14, xmm9 ; 17D9 _ C4 C1 09: EF. E9 + vpslld xmm8, xmm12, 13 ; 17DE _ C4 C1 39: 72. F4, 0D + vpxor xmm10, xmm5, xmm4 ; 17E4 _ C5 51: EF. D4 + vpsrld xmm12, xmm12, 19 ; 17E8 _ C4 C1 19: 72. D4, 13 + vpor xmm14, xmm8, xmm12 ; 17EE _ C4 41 39: EB. F4 + vpslld xmm13, xmm10, 3 ; 17F3 _ C4 C1 11: 72. F2, 03 + vpsrld xmm11, xmm10, 29 ; 17F9 _ C4 C1 21: 72. D2, 1D + vpxor xmm9, xmm9, xmm14 ; 17FF _ C4 41 31: EF. CE + vpor xmm11, xmm13, xmm11 ; 1804 _ C4 41 11: EB. DB + vpxor xmm4, xmm4, xmm6 ; 1809 _ C5 D9: EF. E6 + vpxor xmm5, xmm9, xmm11 ; 180D _ C4 C1 31: EF. EB + vpxor xmm4, xmm4, xmm11 ; 1812 _ C4 C1 59: EF. E3 + vpslld xmm9, xmm14, 3 ; 1817 _ C4 C1 31: 72. F6, 03 + vpslld xmm8, xmm5, 1 ; 181D _ C5 B9: 72. F5, 01 + vpxor xmm12, xmm4, xmm9 ; 1822 _ C4 41 59: EF. E1 + vpsrld xmm10, xmm5, 31 ; 1827 _ C5 A9: 72. D5, 1F + vpor xmm4, xmm8, xmm10 ; 182C _ C4 C1 39: EB. E2 + vpslld xmm13, xmm12, 7 ; 1831 _ C4 C1 11: 72. F4, 07 + vpsrld xmm9, xmm12, 25 ; 1837 _ C4 C1 31: 72. D4, 19 + vpxor xmm14, xmm14, xmm4 ; 183D _ C5 09: EF. F4 + vpor xmm9, xmm13, xmm9 ; 1841 _ C4 41 11: EB. C9 + vpxor xmm8, xmm14, xmm9 ; 1846 _ C4 41 09: EF. C1 + vpxor xmm5, xmm11, xmm9 ; 184B _ C4 C1 21: EF. E9 + vpslld xmm10, xmm4, 7 ; 1850 _ C5 A9: 72. F4, 07 + vpslld xmm14, xmm8, 5 ; 1855 _ C4 C1 09: 72. F0, 05 + vpxor xmm5, xmm5, xmm10 ; 185B _ C4 C1 51: EF. EA + vpsrld xmm12, xmm8, 27 ; 1860 _ C4 C1 19: 72. D0, 1B + vmovd xmm8, dword [r12+13B0H] ; 1866 _ C4 41 79: 6E. 84 24, 000013B0 + vpor xmm13, xmm14, xmm12 ; 1870 _ C4 41 09: EB. EC + vmovd xmm10, dword [r12+13B4H] ; 1875 _ C4 41 79: 6E. 94 24, 000013B4 + vpslld xmm12, xmm5, 22 ; 187F _ C5 99: 72. F5, 16 + vpshufd xmm11, xmm8, 0 ; 1884 _ C4 41 79: 70. D8, 00 + vpsrld xmm5, xmm5, 10 ; 188A _ C5 D1: 72. D5, 0A + vmovd xmm8, dword [r12+13B8H] ; 188F _ C4 41 79: 6E. 84 24, 000013B8 + vpxor xmm11, xmm13, xmm11 ; 1899 _ C4 41 11: EF. DB + vpshufd xmm14, xmm10, 0 ; 189E _ C4 41 79: 70. F2, 00 + vpor xmm13, xmm12, xmm5 ; 18A4 _ C5 19: EB. ED + vmovd xmm10, dword [r12+13BCH] ; 18A8 _ C4 41 79: 6E. 94 24, 000013BC + vpxor xmm4, xmm4, xmm14 ; 18B2 _ C4 C1 59: EF. E6 + vpshufd xmm5, xmm8, 0 ; 18B7 _ C4 C1 79: 70. E8, 00 + vpshufd xmm14, xmm10, 0 ; 18BD _ C4 41 79: 70. F2, 00 + vpxor xmm8, xmm13, xmm5 ; 18C3 _ C5 11: EF. C5 + vpxor xmm9, xmm9, xmm14 ; 18C7 _ C4 41 31: EF. CE + vpor xmm13, xmm11, xmm9 ; 18CC _ C4 41 21: EB. E9 + vpxor xmm12, xmm9, xmm4 ; 18D1 _ C5 31: EF. E4 + vpand xmm9, xmm4, xmm11 ; 18D5 _ C4 41 59: DB. CB + vpxor xmm11, xmm11, xmm8 ; 18DA _ C4 41 21: EF. D8 + vpxor xmm4, xmm8, xmm12 ; 18DF _ C4 C1 39: EF. E4 + vpor xmm8, xmm11, xmm9 ; 18E4 _ C4 41 21: EB. C1 + vpand xmm11, xmm12, xmm13 ; 18E9 _ C4 41 19: DB. DD + vpxor xmm12, xmm13, xmm9 ; 18EE _ C4 41 11: EF. E1 + vpxor xmm14, xmm11, xmm8 ; 18F3 _ C4 41 21: EF. F0 + vpand xmm8, xmm8, xmm12 ; 18F8 _ C4 41 39: DB. C4 + vpxor xmm9, xmm9, xmm14 ; 18FD _ C4 41 31: EF. CE + vpxor xmm5, xmm12, xmm14 ; 1902 _ C4 C1 19: EF. EE + vpor xmm13, xmm9, xmm12 ; 1907 _ C4 41 31: EB. EC + vpxor xmm10, xmm13, xmm4 ; 190C _ C5 11: EF. D4 + vpxor xmm4, xmm8, xmm4 ; 1910 _ C5 B9: EF. E4 + vpor xmm9, xmm10, xmm14 ; 1914 _ C4 41 29: EB. CE + vpxor xmm13, xmm5, xmm9 ; 1919 _ C4 41 51: EF. E9 + vpslld xmm11, xmm13, 13 ; 191E _ C4 C1 21: 72. F5, 0D + vpsrld xmm5, xmm13, 19 ; 1924 _ C4 C1 51: 72. D5, 13 + vpor xmm9, xmm11, xmm5 ; 192A _ C5 21: EB. CD + vpslld xmm13, xmm14, 3 ; 192E _ C4 C1 11: 72. F6, 03 + vpsrld xmm14, xmm14, 29 ; 1934 _ C4 C1 09: 72. D6, 1D + vpxor xmm10, xmm10, xmm9 ; 193A _ C4 41 29: EF. D1 + vpor xmm5, xmm13, xmm14 ; 193F _ C4 C1 11: EB. EE + vpxor xmm11, xmm10, xmm5 ; 1944 _ C5 29: EF. DD + vpxor xmm8, xmm4, xmm5 ; 1948 _ C5 59: EF. C5 + vpslld xmm4, xmm9, 3 ; 194C _ C4 C1 59: 72. F1, 03 + vpslld xmm13, xmm11, 1 ; 1952 _ C4 C1 11: 72. F3, 01 + vpxor xmm10, xmm8, xmm4 ; 1958 _ C5 39: EF. D4 + vpsrld xmm11, xmm11, 31 ; 195C _ C4 C1 21: 72. D3, 1F + vpor xmm13, xmm13, xmm11 ; 1962 _ C4 41 11: EB. EB + vpslld xmm14, xmm10, 7 ; 1967 _ C4 C1 09: 72. F2, 07 + vpsrld xmm12, xmm10, 25 ; 196D _ C4 C1 19: 72. D2, 19 + vpxor xmm9, xmm9, xmm13 ; 1973 _ C4 41 31: EF. CD + vpor xmm4, xmm14, xmm12 ; 1978 _ C4 C1 09: EB. E4 + vpslld xmm8, xmm13, 7 ; 197D _ C4 C1 39: 72. F5, 07 + vpxor xmm9, xmm9, xmm4 ; 1983 _ C5 31: EF. CC + vpxor xmm5, xmm5, xmm4 ; 1987 _ C5 D1: EF. EC + vpxor xmm8, xmm5, xmm8 ; 198B _ C4 41 51: EF. C0 + vpslld xmm11, xmm9, 5 ; 1990 _ C4 C1 21: 72. F1, 05 + vpsrld xmm5, xmm9, 27 ; 1996 _ C4 C1 51: 72. D1, 1B + vmovd xmm9, dword [r12+13CCH] ; 199C _ C4 41 79: 6E. 8C 24, 000013CC + vpor xmm14, xmm11, xmm5 ; 19A6 _ C5 21: EB. F5 + vmovd xmm10, dword [r12+13C0H] ; 19AA _ C4 41 79: 6E. 94 24, 000013C0 + vmovd xmm5, dword [r12+13C4H] ; 19B4 _ C4 C1 79: 6E. AC 24, 000013C4 + vpshufd xmm11, xmm9, 0 ; 19BE _ C4 41 79: 70. D9, 00 + vpshufd xmm12, xmm10, 0 ; 19C4 _ C4 41 79: 70. E2, 00 + vpxor xmm9, xmm4, xmm11 ; 19CA _ C4 41 59: EF. CB + vpshufd xmm4, xmm5, 0 ; 19CF _ C5 F9: 70. E5, 00 + vpxor xmm10, xmm14, xmm12 ; 19D4 _ C4 41 09: EF. D4 + vmovd xmm12, dword [r12+13C8H] ; 19D9 _ C4 41 79: 6E. A4 24, 000013C8 + vpxor xmm13, xmm13, xmm4 ; 19E3 _ C5 11: EF. EC + vpslld xmm4, xmm8, 22 ; 19E7 _ C4 C1 59: 72. F0, 16 + vpsrld xmm8, xmm8, 10 ; 19ED _ C4 C1 39: 72. D0, 0A + vpshufd xmm5, xmm12, 0 ; 19F3 _ C4 C1 79: 70. EC, 00 + vpxor xmm14, xmm13, xmm9 ; 19F9 _ C4 41 11: EF. F1 + vpxor xmm11, xmm9, xmm6 ; 19FE _ C5 31: EF. DE + vpor xmm13, xmm4, xmm8 ; 1A02 _ C4 41 59: EB. E8 + vpxor xmm12, xmm13, xmm5 ; 1A07 _ C5 11: EF. E5 + vpxor xmm13, xmm11, xmm10 ; 1A0B _ C4 41 21: EF. EA + vpxor xmm8, xmm12, xmm11 ; 1A10 _ C4 41 19: EF. C3 + vpand xmm11, xmm14, xmm13 ; 1A15 _ C4 41 09: DB. DD + vpxor xmm12, xmm14, xmm13 ; 1A1A _ C4 41 09: EF. E5 + vpxor xmm11, xmm11, xmm8 ; 1A1F _ C4 41 21: EF. D8 + vpxor xmm14, xmm10, xmm12 ; 1A24 _ C4 41 29: EF. F4 + vpand xmm10, xmm8, xmm12 ; 1A29 _ C4 41 39: DB. D4 + vpand xmm8, xmm14, xmm11 ; 1A2E _ C4 41 09: DB. C3 + vpxor xmm5, xmm10, xmm14 ; 1A33 _ C4 C1 29: EF. EE + vpxor xmm9, xmm13, xmm8 ; 1A38 _ C4 41 11: EF. C8 + vpslld xmm10, xmm11, 13 ; 1A3D _ C4 C1 29: 72. F3, 0D + vpor xmm4, xmm8, xmm9 ; 1A43 _ C4 C1 39: EB. E1 + vpsrld xmm14, xmm11, 19 ; 1A48 _ C4 C1 09: 72. D3, 13 + vpxor xmm13, xmm4, xmm5 ; 1A4E _ C5 59: EF. ED + vpor xmm11, xmm12, xmm11 ; 1A52 _ C4 41 19: EB. DB + vpxor xmm4, xmm13, xmm6 ; 1A57 _ C5 91: EF. E6 + vpxor xmm8, xmm11, xmm8 ; 1A5B _ C4 41 21: EF. C0 + vpand xmm5, xmm5, xmm9 ; 1A60 _ C4 C1 51: DB. E9 + vpor xmm13, xmm10, xmm14 ; 1A65 _ C4 41 29: EB. EE + vpslld xmm10, xmm4, 3 ; 1A6A _ C5 A9: 72. F4, 03 + vpsrld xmm4, xmm4, 29 ; 1A6F _ C5 D9: 72. D4, 1D + vpxor xmm12, xmm8, xmm5 ; 1A74 _ C5 39: EF. E5 + vpor xmm14, xmm10, xmm4 ; 1A78 _ C5 29: EB. F4 + vpxor xmm8, xmm12, xmm13 ; 1A7C _ C4 41 19: EF. C5 + vpxor xmm9, xmm9, xmm14 ; 1A81 _ C4 41 31: EF. CE + vpxor xmm4, xmm8, xmm14 ; 1A86 _ C4 C1 39: EF. E6 + vpslld xmm8, xmm13, 3 ; 1A8B _ C4 C1 39: 72. F5, 03 + vpxor xmm10, xmm9, xmm8 ; 1A91 _ C4 41 31: EF. D0 + vpslld xmm9, xmm4, 1 ; 1A96 _ C5 B1: 72. F4, 01 + vpsrld xmm11, xmm4, 31 ; 1A9B _ C5 A1: 72. D4, 1F + vpslld xmm12, xmm10, 7 ; 1AA0 _ C4 C1 19: 72. F2, 07 + vpor xmm5, xmm9, xmm11 ; 1AA6 _ C4 C1 31: EB. EB + vpsrld xmm8, xmm10, 25 ; 1AAB _ C4 C1 39: 72. D2, 19 + vmovd xmm9, dword [r12+13D4H] ; 1AB1 _ C4 41 79: 6E. 8C 24, 000013D4 + vpor xmm10, xmm12, xmm8 ; 1ABB _ C4 41 19: EB. D0 + vmovd xmm12, dword [r12+13DCH] ; 1AC0 _ C4 41 79: 6E. A4 24, 000013DC + vpxor xmm13, xmm13, xmm5 ; 1ACA _ C5 11: EF. ED + vpxor xmm8, xmm13, xmm10 ; 1ACE _ C4 41 11: EF. C2 + vpxor xmm14, xmm14, xmm10 ; 1AD3 _ C4 41 09: EF. F2 + vpshufd xmm13, xmm9, 0 ; 1AD8 _ C4 41 79: 70. E9, 00 + vpslld xmm4, xmm5, 7 ; 1ADE _ C5 D9: 72. F5, 07 + vpshufd xmm9, xmm12, 0 ; 1AE3 _ C4 41 79: 70. CC, 00 + vpxor xmm4, xmm14, xmm4 ; 1AE9 _ C5 89: EF. E4 + vmovd xmm11, dword [r12+13D0H] ; 1AED _ C4 41 79: 6E. 9C 24, 000013D0 + vpxor xmm14, xmm5, xmm13 ; 1AF7 _ C4 41 51: EF. F5 + vpxor xmm13, xmm10, xmm9 ; 1AFC _ C4 41 29: EF. E9 + vpslld xmm10, xmm8, 5 ; 1B01 _ C4 C1 29: 72. F0, 05 + vpsrld xmm8, xmm8, 27 ; 1B07 _ C4 C1 39: 72. D0, 1B + vpxor xmm12, xmm14, xmm13 ; 1B0D _ C4 41 09: EF. E5 + vpshufd xmm11, xmm11, 0 ; 1B12 _ C4 41 79: 70. DB, 00 + vpor xmm9, xmm10, xmm8 ; 1B18 _ C4 41 29: EB. C8 + vpxor xmm8, xmm9, xmm11 ; 1B1D _ C4 41 31: EF. C3 + vpxor xmm9, xmm13, xmm6 ; 1B22 _ C5 11: EF. CE + vmovd xmm5, dword [r12+13D8H] ; 1B26 _ C4 C1 79: 6E. AC 24, 000013D8 + vpxor xmm11, xmm8, xmm14 ; 1B30 _ C4 41 39: EF. DE + vpslld xmm14, xmm4, 22 ; 1B35 _ C5 89: 72. F4, 16 + vpsrld xmm8, xmm4, 10 ; 1B3A _ C5 B9: 72. D4, 0A + vpshufd xmm5, xmm5, 0 ; 1B3F _ C5 F9: 70. ED, 00 + vpor xmm4, xmm14, xmm8 ; 1B44 _ C4 C1 09: EB. E0 + vpxor xmm8, xmm4, xmm5 ; 1B49 _ C5 59: EF. C5 + vpand xmm4, xmm12, xmm11 ; 1B4D _ C4 C1 19: DB. E3 + vpxor xmm13, xmm8, xmm9 ; 1B52 _ C4 41 39: EF. E9 + vpxor xmm10, xmm4, xmm13 ; 1B57 _ C4 41 59: EF. D5 + vpor xmm14, xmm13, xmm12 ; 1B5C _ C4 41 11: EB. F4 + vpand xmm5, xmm9, xmm10 ; 1B61 _ C4 C1 31: DB. EA + vpxor xmm9, xmm12, xmm9 ; 1B66 _ C4 41 19: EF. C9 + vpxor xmm13, xmm5, xmm11 ; 1B6B _ C4 41 51: EF. EB + vpxor xmm12, xmm9, xmm10 ; 1B70 _ C4 41 31: EF. E2 + vpxor xmm5, xmm12, xmm14 ; 1B75 _ C4 C1 19: EF. EE + vpand xmm8, xmm11, xmm13 ; 1B7A _ C4 41 21: DB. C5 + vpslld xmm4, xmm10, 13 ; 1B7F _ C4 C1 59: 72. F2, 0D + vpsrld xmm10, xmm10, 19 ; 1B85 _ C4 C1 29: 72. D2, 13 + vpxor xmm8, xmm8, xmm5 ; 1B8B _ C5 39: EF. C5 + vpor xmm12, xmm4, xmm10 ; 1B8F _ C4 41 59: EB. E2 + vpxor xmm11, xmm14, xmm11 ; 1B94 _ C4 41 09: EF. DB + vpslld xmm4, xmm8, 3 ; 1B99 _ C4 C1 59: 72. F0, 03 + vpsrld xmm8, xmm8, 29 ; 1B9F _ C4 C1 39: 72. D0, 1D + vpxor xmm9, xmm13, xmm12 ; 1BA5 _ C4 41 11: EF. CC + vpor xmm13, xmm5, xmm13 ; 1BAA _ C4 41 51: EB. ED + vpxor xmm14, xmm11, xmm6 ; 1BAF _ C5 21: EF. F6 + vpor xmm4, xmm4, xmm8 ; 1BB3 _ C4 C1 59: EB. E0 + vpxor xmm8, xmm13, xmm14 ; 1BB8 _ C4 41 11: EF. C6 + vpxor xmm9, xmm9, xmm4 ; 1BBD _ C5 31: EF. CC + vpxor xmm13, xmm8, xmm4 ; 1BC1 _ C5 39: EF. EC + vpslld xmm11, xmm12, 3 ; 1BC5 _ C4 C1 21: 72. F4, 03 + vpslld xmm5, xmm9, 1 ; 1BCB _ C4 C1 51: 72. F1, 01 + vpxor xmm10, xmm13, xmm11 ; 1BD1 _ C4 41 11: EF. D3 + vpsrld xmm9, xmm9, 31 ; 1BD6 _ C4 C1 31: 72. D1, 1F + vpor xmm9, xmm5, xmm9 ; 1BDC _ C4 41 51: EB. C9 + vpslld xmm14, xmm10, 7 ; 1BE1 _ C4 C1 09: 72. F2, 07 + vpsrld xmm8, xmm10, 25 ; 1BE7 _ C4 C1 39: 72. D2, 19 + vpxor xmm12, xmm12, xmm9 ; 1BED _ C4 41 19: EF. E1 + vpor xmm8, xmm14, xmm8 ; 1BF2 _ C4 41 09: EB. C0 + vpslld xmm13, xmm9, 7 ; 1BF7 _ C4 C1 11: 72. F1, 07 + vpxor xmm11, xmm12, xmm8 ; 1BFD _ C4 41 19: EF. D8 + vpxor xmm4, xmm4, xmm8 ; 1C02 _ C4 C1 59: EF. E0 + vmovd xmm10, dword [r12+13E0H] ; 1C07 _ C4 41 79: 6E. 94 24, 000013E0 + vpxor xmm13, xmm4, xmm13 ; 1C11 _ C4 41 59: EF. ED + vpslld xmm4, xmm11, 5 ; 1C16 _ C4 C1 59: 72. F3, 05 + vpsrld xmm5, xmm11, 27 ; 1C1C _ C4 C1 51: 72. D3, 1B + vpshufd xmm12, xmm10, 0 ; 1C22 _ C4 41 79: 70. E2, 00 + vpor xmm14, xmm4, xmm5 ; 1C28 _ C5 59: EB. F5 + vmovd xmm11, dword [r12+13ECH] ; 1C2C _ C4 41 79: 6E. 9C 24, 000013EC + vpxor xmm4, xmm14, xmm12 ; 1C36 _ C4 C1 09: EF. E4 + vmovd xmm12, dword [r12+13E8H] ; 1C3B _ C4 41 79: 6E. A4 24, 000013E8 + vpslld xmm14, xmm13, 22 ; 1C45 _ C4 C1 09: 72. F5, 16 + vpsrld xmm13, xmm13, 10 ; 1C4B _ C4 C1 11: 72. D5, 0A + vpshufd xmm5, xmm11, 0 ; 1C51 _ C4 C1 79: 70. EB, 00 + vpor xmm13, xmm14, xmm13 ; 1C57 _ C4 41 09: EB. ED + vpshufd xmm12, xmm12, 0 ; 1C5C _ C4 41 79: 70. E4, 00 + vpxor xmm8, xmm8, xmm5 ; 1C62 _ C5 39: EF. C5 + vmovd xmm10, dword [r12+13E4H] ; 1C66 _ C4 41 79: 6E. 94 24, 000013E4 + vpxor xmm13, xmm13, xmm12 ; 1C70 _ C4 41 11: EF. EC + vpshufd xmm10, xmm10, 0 ; 1C75 _ C4 41 79: 70. D2, 00 + vpxor xmm5, xmm13, xmm6 ; 1C7B _ C5 91: EF. EE + vpxor xmm13, xmm4, xmm8 ; 1C7F _ C4 41 59: EF. E8 + vpand xmm4, xmm8, xmm4 ; 1C84 _ C5 B9: DB. E4 + vpxor xmm12, xmm4, xmm5 ; 1C88 _ C5 59: EF. E5 + vpxor xmm9, xmm9, xmm10 ; 1C8C _ C4 41 31: EF. CA + vpxor xmm4, xmm9, xmm12 ; 1C91 _ C4 C1 31: EF. E4 + vpor xmm9, xmm5, xmm8 ; 1C96 _ C4 41 51: EB. C8 + vpor xmm14, xmm13, xmm4 ; 1C9B _ C5 11: EB. F4 + vpxor xmm13, xmm9, xmm13 ; 1C9F _ C4 41 31: EF. ED + vpxor xmm5, xmm13, xmm4 ; 1CA4 _ C5 91: EF. EC + vpor xmm11, xmm14, xmm12 ; 1CA8 _ C4 41 09: EB. DC + vpxor xmm8, xmm8, xmm14 ; 1CAD _ C4 41 39: EF. C6 + vpxor xmm10, xmm11, xmm5 ; 1CB2 _ C5 21: EF. D5 + vpxor xmm14, xmm8, xmm12 ; 1CB6 _ C4 41 39: EF. F4 + vpslld xmm9, xmm10, 13 ; 1CBB _ C4 C1 31: 72. F2, 0D + vpxor xmm8, xmm14, xmm10 ; 1CC1 _ C4 41 09: EF. C2 + vpsrld xmm10, xmm10, 19 ; 1CC6 _ C4 C1 29: 72. D2, 13 + vpor xmm11, xmm9, xmm10 ; 1CCC _ C4 41 31: EB. DA + vpslld xmm9, xmm8, 3 ; 1CD1 _ C4 C1 31: 72. F0, 03 + vpsrld xmm13, xmm8, 29 ; 1CD7 _ C4 C1 11: 72. D0, 1D + vpxor xmm12, xmm12, xmm6 ; 1CDD _ C5 19: EF. E6 + vpand xmm8, xmm5, xmm8 ; 1CE1 _ C4 41 51: DB. C0 + vpor xmm13, xmm9, xmm13 ; 1CE6 _ C4 41 31: EB. ED + vpxor xmm4, xmm4, xmm11 ; 1CEB _ C4 C1 59: EF. E3 + vpxor xmm5, xmm12, xmm8 ; 1CF0 _ C4 C1 19: EF. E8 + vpxor xmm10, xmm4, xmm13 ; 1CF5 _ C4 41 59: EF. D5 + vpxor xmm8, xmm5, xmm13 ; 1CFA _ C4 41 51: EF. C5 + vpslld xmm4, xmm11, 3 ; 1CFF _ C4 C1 59: 72. F3, 03 + vpslld xmm9, xmm10, 1 ; 1D05 _ C4 C1 31: 72. F2, 01 + vpxor xmm14, xmm8, xmm4 ; 1D0B _ C5 39: EF. F4 + vpsrld xmm5, xmm10, 31 ; 1D0F _ C4 C1 51: 72. D2, 1F + vpor xmm8, xmm9, xmm5 ; 1D15 _ C5 31: EB. C5 + vpslld xmm10, xmm14, 7 ; 1D19 _ C4 C1 29: 72. F6, 07 + vpsrld xmm12, xmm14, 25 ; 1D1F _ C4 C1 19: 72. D6, 19 + vpxor xmm11, xmm11, xmm8 ; 1D25 _ C4 41 21: EF. D8 + vpor xmm4, xmm10, xmm12 ; 1D2A _ C4 C1 29: EB. E4 + vpslld xmm9, xmm8, 7 ; 1D2F _ C4 C1 31: 72. F0, 07 + vpxor xmm11, xmm11, xmm4 ; 1D35 _ C5 21: EF. DC + vpxor xmm13, xmm13, xmm4 ; 1D39 _ C5 11: EF. EC + vpxor xmm14, xmm13, xmm9 ; 1D3D _ C4 41 11: EF. F1 + vpslld xmm13, xmm11, 5 ; 1D42 _ C4 C1 11: 72. F3, 05 + vpsrld xmm5, xmm11, 27 ; 1D48 _ C4 C1 51: 72. D3, 1B + vmovd xmm10, dword [r12+13F0H] ; 1D4E _ C4 41 79: 6E. 94 24, 000013F0 + vpor xmm12, xmm13, xmm5 ; 1D58 _ C5 11: EB. E5 + vmovd xmm13, dword [r12+13F4H] ; 1D5C _ C4 41 79: 6E. AC 24, 000013F4 + vpshufd xmm9, xmm10, 0 ; 1D66 _ C4 41 79: 70. CA, 00 + vpshufd xmm11, xmm13, 0 ; 1D6C _ C4 41 79: 70. DD, 00 + vpxor xmm10, xmm12, xmm9 ; 1D72 _ C4 41 19: EF. D1 + vpxor xmm9, xmm8, xmm11 ; 1D77 _ C4 41 39: EF. CB + vpslld xmm8, xmm14, 22 ; 1D7C _ C4 C1 39: 72. F6, 16 + vpsrld xmm14, xmm14, 10 ; 1D82 _ C4 C1 09: 72. D6, 0A + vpor xmm13, xmm8, xmm14 ; 1D88 _ C4 41 39: EB. EE + vmovd xmm8, dword [r12+13F8H] ; 1D8D _ C4 41 79: 6E. 84 24, 000013F8 + vmovd xmm5, dword [r12+13FCH] ; 1D97 _ C4 C1 79: 6E. AC 24, 000013FC + vpshufd xmm11, xmm8, 0 ; 1DA1 _ C4 41 79: 70. D8, 00 + vpshufd xmm14, xmm5, 0 ; 1DA7 _ C5 79: 70. F5, 00 + vpxor xmm8, xmm13, xmm11 ; 1DAC _ C4 41 11: EF. C3 + vpxor xmm14, xmm4, xmm14 ; 1DB1 _ C4 41 59: EF. F6 + vpor xmm4, xmm9, xmm8 ; 1DB6 _ C4 C1 31: EB. E0 + vpxor xmm11, xmm4, xmm14 ; 1DBB _ C4 41 59: EF. DE + vpxor xmm9, xmm9, xmm8 ; 1DC0 _ C4 41 31: EF. C8 + vpxor xmm5, xmm8, xmm11 ; 1DC5 _ C4 C1 39: EF. EB + vpxor xmm4, xmm9, xmm5 ; 1DCA _ C5 B1: EF. E5 + vpor xmm9, xmm14, xmm9 ; 1DCE _ C4 41 09: EB. C9 + vpor xmm12, xmm10, xmm4 ; 1DD3 _ C5 29: EB. E4 + vpor xmm8, xmm11, xmm4 ; 1DD7 _ C5 21: EB. C4 + vpxor xmm13, xmm12, xmm5 ; 1DDB _ C5 19: EF. ED + vpxor xmm12, xmm8, xmm10 ; 1DDF _ C4 41 39: EF. E2 + vpxor xmm12, xmm12, xmm4 ; 1DE4 _ C5 19: EF. E4 + vpand xmm10, xmm9, xmm10 ; 1DE8 _ C4 41 31: DB. D2 + vpxor xmm5, xmm5, xmm12 ; 1DED _ C4 C1 51: EF. EC + vpand xmm8, xmm12, xmm13 ; 1DF2 _ C4 41 19: DB. C5 + vpxor xmm5, xmm5, xmm6 ; 1DF7 _ C5 D1: EF. EE + vpxor xmm8, xmm8, xmm4 ; 1DFB _ C5 39: EF. C4 + vpor xmm12, xmm5, xmm13 ; 1DFF _ C4 41 51: EB. E5 + vpxor xmm4, xmm4, xmm12 ; 1E04 _ C4 C1 59: EF. E4 + vpslld xmm12, xmm8, 3 ; 1E09 _ C4 C1 19: 72. F0, 03 + vpslld xmm5, xmm4, 13 ; 1E0F _ C5 D1: 72. F4, 0D + vpsrld xmm4, xmm4, 19 ; 1E14 _ C5 D9: 72. D4, 13 + vpsrld xmm8, xmm8, 29 ; 1E19 _ C4 C1 39: 72. D0, 1D + vpor xmm4, xmm5, xmm4 ; 1E1F _ C5 D1: EB. E4 + vpor xmm5, xmm12, xmm8 ; 1E23 _ C4 C1 19: EB. E8 + vpxor xmm8, xmm10, xmm11 ; 1E28 _ C4 41 29: EF. C3 + vpxor xmm9, xmm8, xmm4 ; 1E2D _ C5 39: EF. CC + vpxor xmm13, xmm13, xmm5 ; 1E31 _ C5 11: EF. ED + vpxor xmm10, xmm9, xmm5 ; 1E35 _ C5 31: EF. D5 + vpslld xmm11, xmm4, 3 ; 1E39 _ C5 A1: 72. F4, 03 + vpxor xmm8, xmm13, xmm11 ; 1E3E _ C4 41 11: EF. C3 + vpslld xmm14, xmm10, 1 ; 1E43 _ C4 C1 09: 72. F2, 01 + vpsrld xmm12, xmm10, 31 ; 1E49 _ C4 C1 19: 72. D2, 1F + vpslld xmm9, xmm8, 7 ; 1E4F _ C4 C1 31: 72. F0, 07 + vpor xmm11, xmm14, xmm12 ; 1E55 _ C4 41 09: EB. DC + vpsrld xmm13, xmm8, 25 ; 1E5A _ C4 C1 11: 72. D0, 19 + vpor xmm13, xmm9, xmm13 ; 1E60 _ C4 41 31: EB. ED + vpxor xmm4, xmm4, xmm11 ; 1E65 _ C4 C1 59: EF. E3 + vpxor xmm14, xmm4, xmm13 ; 1E6A _ C4 41 59: EF. F5 + vpxor xmm5, xmm5, xmm13 ; 1E6F _ C4 C1 51: EF. ED + vpslld xmm10, xmm11, 7 ; 1E74 _ C4 C1 29: 72. F3, 07 + vpslld xmm8, xmm14, 5 ; 1E7A _ C4 C1 39: 72. F6, 05 + vpsrld xmm9, xmm14, 27 ; 1E80 _ C4 C1 31: 72. D6, 1B + vpxor xmm4, xmm5, xmm10 ; 1E86 _ C4 C1 51: EF. E2 + vmovd xmm5, dword [r12+1400H] ; 1E8B _ C4 C1 79: 6E. AC 24, 00001400 + vpor xmm12, xmm8, xmm9 ; 1E95 _ C4 41 39: EB. E1 + vmovd xmm9, dword [r12+1404H] ; 1E9A _ C4 41 79: 6E. 8C 24, 00001404 + vpshufd xmm8, xmm5, 0 ; 1EA4 _ C5 79: 70. C5, 00 + vpshufd xmm5, xmm9, 0 ; 1EA9 _ C4 C1 79: 70. E9, 00 + vpxor xmm8, xmm12, xmm8 ; 1EAF _ C4 41 19: EF. C0 + vmovd xmm14, dword [r12+140CH] ; 1EB4 _ C4 41 79: 6E. B4 24, 0000140C + vpxor xmm5, xmm11, xmm5 ; 1EBE _ C5 A1: EF. ED + vpslld xmm11, xmm4, 22 ; 1EC2 _ C5 A1: 72. F4, 16 + vpsrld xmm4, xmm4, 10 ; 1EC7 _ C5 D9: 72. D4, 0A + vmovd xmm9, dword [r12+1408H] ; 1ECC _ C4 41 79: 6E. 8C 24, 00001408 + vpor xmm11, xmm11, xmm4 ; 1ED6 _ C5 21: EB. DC + vpshufd xmm4, xmm14, 0 ; 1EDA _ C4 C1 79: 70. E6, 00 + vpshufd xmm10, xmm9, 0 ; 1EE0 _ C4 41 79: 70. D1, 00 + vpxor xmm13, xmm13, xmm4 ; 1EE6 _ C5 11: EF. EC + vpxor xmm12, xmm11, xmm10 ; 1EEA _ C4 41 21: EF. E2 + vpxor xmm14, xmm13, xmm8 ; 1EEF _ C4 41 11: EF. F0 + vpxor xmm4, xmm5, xmm12 ; 1EF4 _ C4 C1 51: EF. E4 + vpand xmm5, xmm5, xmm14 ; 1EF9 _ C4 C1 51: DB. EE + vpxor xmm9, xmm5, xmm8 ; 1EFE _ C4 41 51: EF. C8 + vpxor xmm13, xmm4, xmm14 ; 1F03 _ C4 41 59: EF. EE + vpor xmm8, xmm8, xmm14 ; 1F08 _ C4 41 39: EB. C6 + vpxor xmm10, xmm14, xmm12 ; 1F0D _ C4 41 09: EF. D4 + vpor xmm12, xmm12, xmm9 ; 1F12 _ C4 41 19: EB. E1 + vpxor xmm14, xmm13, xmm6 ; 1F17 _ C5 11: EF. F6 + vpxor xmm5, xmm8, xmm4 ; 1F1B _ C5 B9: EF. EC + vpxor xmm11, xmm12, xmm13 ; 1F1F _ C4 41 19: EF. DD + vpor xmm13, xmm14, xmm9 ; 1F24 _ C4 41 09: EB. E9 + vpxor xmm9, xmm9, xmm10 ; 1F29 _ C4 41 31: EF. CA + vpor xmm8, xmm10, xmm5 ; 1F2E _ C5 29: EB. C5 + vpxor xmm10, xmm9, xmm13 ; 1F32 _ C4 41 31: EF. D5 + vpxor xmm4, xmm10, xmm8 ; 1F37 _ C4 C1 29: EF. E0 + vpslld xmm12, xmm11, 3 ; 1F3C _ C4 C1 19: 72. F3, 03 + vpslld xmm9, xmm4, 13 ; 1F42 _ C5 B1: 72. F4, 0D + vpsrld xmm10, xmm4, 19 ; 1F47 _ C5 A9: 72. D4, 13 + vpsrld xmm11, xmm11, 29 ; 1F4C _ C4 C1 21: 72. D3, 1D + vpor xmm4, xmm9, xmm10 ; 1F52 _ C4 C1 31: EB. E2 + vpor xmm12, xmm12, xmm11 ; 1F57 _ C4 41 19: EB. E3 + vpxor xmm8, xmm13, xmm8 ; 1F5C _ C4 41 11: EF. C0 + vpxor xmm8, xmm8, xmm4 ; 1F61 _ C5 39: EF. C4 + vpxor xmm9, xmm5, xmm12 ; 1F65 _ C4 41 51: EF. CC + vpslld xmm13, xmm4, 3 ; 1F6A _ C5 91: 72. F4, 03 + vpxor xmm11, xmm8, xmm12 ; 1F6F _ C4 41 39: EF. DC + vpxor xmm14, xmm9, xmm13 ; 1F74 _ C4 41 31: EF. F5 + vpslld xmm5, xmm11, 1 ; 1F79 _ C4 C1 51: 72. F3, 01 + vpsrld xmm10, xmm11, 31 ; 1F7F _ C4 C1 29: 72. D3, 1F + vpslld xmm8, xmm14, 7 ; 1F85 _ C4 C1 39: 72. F6, 07 + vpsrld xmm13, xmm14, 25 ; 1F8B _ C4 C1 11: 72. D6, 19 + vpor xmm9, xmm5, xmm10 ; 1F91 _ C4 41 51: EB. CA + vpor xmm8, xmm8, xmm13 ; 1F96 _ C4 41 39: EB. C5 + vpxor xmm4, xmm4, xmm9 ; 1F9B _ C4 C1 59: EF. E1 + vpxor xmm11, xmm12, xmm8 ; 1FA0 _ C4 41 19: EF. D8 + vpxor xmm14, xmm4, xmm8 ; 1FA5 _ C4 41 59: EF. F0 + vmovd xmm12, dword [r12+1414H] ; 1FAA _ C4 41 79: 6E. A4 24, 00001414 + vpslld xmm5, xmm9, 7 ; 1FB4 _ C4 C1 51: 72. F1, 07 + vpshufd xmm4, xmm12, 0 ; 1FBA _ C4 C1 79: 70. E4, 00 + vpslld xmm13, xmm14, 5 ; 1FC0 _ C4 C1 11: 72. F6, 05 + vmovd xmm10, dword [r12+1410H] ; 1FC6 _ C4 41 79: 6E. 94 24, 00001410 + vpxor xmm4, xmm9, xmm4 ; 1FD0 _ C5 B1: EF. E4 + vmovd xmm9, dword [r12+141CH] ; 1FD4 _ C4 41 79: 6E. 8C 24, 0000141C + vpsrld xmm12, xmm14, 27 ; 1FDE _ C4 C1 19: 72. D6, 1B + vpshufd xmm9, xmm9, 0 ; 1FE4 _ C4 41 79: 70. C9, 00 + vpxor xmm5, xmm11, xmm5 ; 1FEA _ C5 A1: EF. ED + vpshufd xmm10, xmm10, 0 ; 1FEE _ C4 41 79: 70. D2, 00 + vpor xmm14, xmm13, xmm12 ; 1FF4 _ C4 41 11: EB. F4 + vmovd xmm11, dword [r12+1418H] ; 1FF9 _ C4 41 79: 6E. 9C 24, 00001418 + vpxor xmm8, xmm8, xmm9 ; 2003 _ C4 41 39: EF. C1 + vpxor xmm9, xmm14, xmm10 ; 2008 _ C4 41 09: EF. CA + vpslld xmm10, xmm5, 22 ; 200D _ C5 A9: 72. F5, 16 + vpsrld xmm5, xmm5, 10 ; 2012 _ C5 D1: 72. D5, 0A + vpxor xmm9, xmm9, xmm6 ; 2017 _ C5 31: EF. CE + vpshufd xmm11, xmm11, 0 ; 201B _ C4 41 79: 70. DB, 00 + vpor xmm5, xmm10, xmm5 ; 2021 _ C5 A9: EB. ED + vpxor xmm5, xmm5, xmm11 ; 2025 _ C4 C1 51: EF. EB + vpand xmm13, xmm9, xmm4 ; 202A _ C5 31: DB. EC + vpxor xmm10, xmm5, xmm6 ; 202E _ C5 51: EF. D6 + vpxor xmm5, xmm10, xmm13 ; 2032 _ C4 C1 29: EF. ED + vpor xmm10, xmm13, xmm8 ; 2037 _ C4 41 11: EB. D0 + vpxor xmm14, xmm8, xmm5 ; 203C _ C5 39: EF. F5 + vpxor xmm4, xmm4, xmm10 ; 2040 _ C4 C1 59: EF. E2 + vpxor xmm11, xmm10, xmm9 ; 2045 _ C4 41 29: EF. D9 + vpor xmm13, xmm9, xmm4 ; 204A _ C5 31: EB. EC + vpxor xmm10, xmm4, xmm14 ; 204E _ C4 41 59: EF. D6 + vpor xmm12, xmm5, xmm11 ; 2053 _ C4 41 51: EB. E3 + vpand xmm9, xmm12, xmm13 ; 2058 _ C4 41 19: DB. CD + vpxor xmm5, xmm11, xmm10 ; 205D _ C4 C1 21: EF. EA + vpslld xmm8, xmm9, 13 ; 2062 _ C4 C1 39: 72. F1, 0D + vpsrld xmm4, xmm9, 19 ; 2068 _ C4 C1 59: 72. D1, 13 + vpand xmm11, xmm5, xmm9 ; 206E _ C4 41 51: DB. D9 + vpor xmm4, xmm8, xmm4 ; 2073 _ C5 B9: EB. E4 + vpslld xmm12, xmm14, 3 ; 2077 _ C4 C1 19: 72. F6, 03 + vpsrld xmm14, xmm14, 29 ; 207D _ C4 C1 09: 72. D6, 1D + vpxor xmm13, xmm13, xmm11 ; 2083 _ C4 41 11: EF. EB + vpand xmm9, xmm10, xmm9 ; 2088 _ C4 41 29: DB. C9 + vpor xmm8, xmm12, xmm14 ; 208D _ C4 41 19: EB. C6 + vpxor xmm13, xmm13, xmm4 ; 2092 _ C5 11: EF. EC + vpxor xmm5, xmm9, xmm5 ; 2096 _ C5 B1: EF. ED + vpxor xmm11, xmm13, xmm8 ; 209A _ C4 41 11: EF. D8 + vpxor xmm10, xmm5, xmm8 ; 209F _ C4 41 51: EF. D0 + vpslld xmm5, xmm4, 3 ; 20A4 _ C5 D1: 72. F4, 03 + vpxor xmm13, xmm10, xmm5 ; 20A9 _ C5 29: EF. ED + vpslld xmm9, xmm11, 1 ; 20AD _ C4 C1 31: 72. F3, 01 + vpsrld xmm10, xmm11, 31 ; 20B3 _ C4 C1 29: 72. D3, 1F + vpslld xmm14, xmm13, 7 ; 20B9 _ C4 C1 09: 72. F5, 07 + vpor xmm12, xmm9, xmm10 ; 20BF _ C4 41 31: EB. E2 + vpsrld xmm11, xmm13, 25 ; 20C4 _ C4 C1 21: 72. D5, 19 + vpor xmm14, xmm14, xmm11 ; 20CA _ C4 41 09: EB. F3 + vpxor xmm4, xmm4, xmm12 ; 20CF _ C4 C1 59: EF. E4 + vpxor xmm9, xmm4, xmm14 ; 20D4 _ C4 41 59: EF. CE + vpxor xmm8, xmm8, xmm14 ; 20D9 _ C4 41 39: EF. C6 + vmovd xmm11, dword [r12+1420H] ; 20DE _ C4 41 79: 6E. 9C 24, 00001420 + vpslld xmm5, xmm12, 7 ; 20E8 _ C4 C1 51: 72. F4, 07 + vpslld xmm10, xmm9, 5 ; 20EE _ C4 C1 29: 72. F1, 05 + vpsrld xmm13, xmm9, 27 ; 20F4 _ C4 C1 11: 72. D1, 1B + vmovd xmm9, dword [r12+1424H] ; 20FA _ C4 41 79: 6E. 8C 24, 00001424 + vpxor xmm4, xmm8, xmm5 ; 2104 _ C5 B9: EF. E5 + vpshufd xmm5, xmm11, 0 ; 2108 _ C4 C1 79: 70. EB, 00 + vpor xmm8, xmm10, xmm13 ; 210E _ C4 41 29: EB. C5 + vpshufd xmm10, xmm9, 0 ; 2113 _ C4 41 79: 70. D1, 00 + vpxor xmm13, xmm8, xmm5 ; 2119 _ C5 39: EF. ED + vmovd xmm5, dword [r12+1428H] ; 211D _ C4 C1 79: 6E. AC 24, 00001428 + vpxor xmm10, xmm12, xmm10 ; 2127 _ C4 41 19: EF. D2 + vpslld xmm12, xmm4, 22 ; 212C _ C5 99: 72. F4, 16 + vpsrld xmm4, xmm4, 10 ; 2131 _ C5 D9: 72. D4, 0A + vpor xmm9, xmm12, xmm4 ; 2136 _ C5 19: EB. CC + vmovd xmm12, dword [r12+142CH] ; 213A _ C4 41 79: 6E. A4 24, 0000142C + vpshufd xmm4, xmm5, 0 ; 2144 _ C5 F9: 70. E5, 00 + vpshufd xmm11, xmm12, 0 ; 2149 _ C4 41 79: 70. DC, 00 + vpxor xmm8, xmm9, xmm4 ; 214F _ C5 31: EF. C4 + vpxor xmm4, xmm14, xmm11 ; 2153 _ C4 C1 09: EF. E3 + vpand xmm14, xmm13, xmm8 ; 2158 _ C4 41 11: DB. F0 + vpxor xmm9, xmm14, xmm4 ; 215D _ C5 09: EF. CC + vpxor xmm5, xmm8, xmm10 ; 2161 _ C4 C1 39: EF. EA + vpxor xmm5, xmm5, xmm9 ; 2166 _ C4 C1 51: EF. E9 + vpor xmm14, xmm4, xmm13 ; 216B _ C4 41 59: EB. F5 + vpxor xmm4, xmm14, xmm10 ; 2170 _ C4 C1 09: EF. E2 + vpxor xmm13, xmm13, xmm5 ; 2175 _ C5 11: EF. ED + vpor xmm10, xmm4, xmm13 ; 2179 _ C4 41 59: EB. D5 + vpxor xmm8, xmm10, xmm9 ; 217E _ C4 41 29: EF. C1 + vpand xmm9, xmm9, xmm4 ; 2183 _ C5 31: DB. CC + vpxor xmm11, xmm13, xmm9 ; 2187 _ C4 41 11: EF. D9 + vpxor xmm10, xmm4, xmm8 ; 218C _ C4 41 59: EF. D0 + vpxor xmm9, xmm10, xmm11 ; 2191 _ C4 41 29: EF. CB + vpslld xmm13, xmm5, 13 ; 2196 _ C5 91: 72. F5, 0D + vpsrld xmm5, xmm5, 19 ; 219B _ C5 D1: 72. D5, 13 + vpslld xmm14, xmm9, 3 ; 21A0 _ C4 C1 09: 72. F1, 03 + vpor xmm12, xmm13, xmm5 ; 21A6 _ C5 11: EB. E5 + vpsrld xmm5, xmm9, 29 ; 21AA _ C4 C1 51: 72. D1, 1D + vpor xmm14, xmm14, xmm5 ; 21B0 _ C5 09: EB. F5 + vpxor xmm8, xmm8, xmm12 ; 21B4 _ C4 41 39: EF. C4 + vpxor xmm11, xmm11, xmm6 ; 21B9 _ C5 21: EF. DE + vpxor xmm10, xmm8, xmm14 ; 21BD _ C4 41 39: EF. D6 + vpxor xmm5, xmm11, xmm14 ; 21C2 _ C4 C1 21: EF. EE + vpslld xmm9, xmm12, 3 ; 21C7 _ C4 C1 31: 72. F4, 03 + vpxor xmm11, xmm5, xmm9 ; 21CD _ C4 41 51: EF. D9 + vpslld xmm13, xmm10, 1 ; 21D2 _ C4 C1 11: 72. F2, 01 + vpsrld xmm4, xmm10, 31 ; 21D8 _ C4 C1 59: 72. D2, 1F + vpslld xmm8, xmm11, 7 ; 21DE _ C4 C1 39: 72. F3, 07 + vpor xmm4, xmm13, xmm4 ; 21E4 _ C5 91: EB. E4 + vpsrld xmm5, xmm11, 25 ; 21E8 _ C4 C1 51: 72. D3, 19 + vpor xmm13, xmm8, xmm5 ; 21EE _ C5 39: EB. ED + vpxor xmm12, xmm12, xmm4 ; 21F2 _ C5 19: EF. E4 + vpxor xmm10, xmm12, xmm13 ; 21F6 _ C4 41 19: EF. D5 + vpxor xmm5, xmm14, xmm13 ; 21FB _ C4 C1 09: EF. ED + vpslld xmm14, xmm10, 5 ; 2200 _ C4 C1 09: 72. F2, 05 + vpsrld xmm12, xmm10, 27 ; 2206 _ C4 C1 19: 72. D2, 1B + vmovd xmm11, dword [r12+1430H] ; 220C _ C4 41 79: 6E. 9C 24, 00001430 + vpslld xmm9, xmm4, 7 ; 2216 _ C5 B1: 72. F4, 07 + vpor xmm8, xmm14, xmm12 ; 221B _ C4 41 09: EB. C4 + vpxor xmm9, xmm5, xmm9 ; 2220 _ C4 41 51: EF. C9 + vmovd xmm14, dword [r12+1434H] ; 2225 _ C4 41 79: 6E. B4 24, 00001434 + vpshufd xmm5, xmm11, 0 ; 222F _ C4 C1 79: 70. EB, 00 + vpshufd xmm12, xmm14, 0 ; 2235 _ C4 41 79: 70. E6, 00 + vpxor xmm10, xmm8, xmm5 ; 223B _ C5 39: EF. D5 + vpxor xmm5, xmm4, xmm12 ; 223F _ C4 C1 59: EF. EC + vpslld xmm4, xmm9, 22 ; 2244 _ C4 C1 59: 72. F1, 16 + vpsrld xmm9, xmm9, 10 ; 224A _ C4 C1 31: 72. D1, 0A + vmovd xmm11, dword [r12+143CH] ; 2250 _ C4 41 79: 6E. 9C 24, 0000143C + vpor xmm14, xmm4, xmm9 ; 225A _ C4 41 59: EB. F1 + vmovd xmm4, dword [r12+1438H] ; 225F _ C4 C1 79: 6E. A4 24, 00001438 + vpshufd xmm12, xmm4, 0 ; 2269 _ C5 79: 70. E4, 00 + vpshufd xmm9, xmm11, 0 ; 226E _ C4 41 79: 70. CB, 00 + vpxor xmm8, xmm14, xmm12 ; 2274 _ C4 41 09: EF. C4 + vpxor xmm13, xmm13, xmm9 ; 2279 _ C4 41 11: EF. E9 + vpand xmm9, xmm5, xmm10 ; 227E _ C4 41 51: DB. CA + vpor xmm11, xmm10, xmm13 ; 2283 _ C4 41 29: EB. DD + vpxor xmm13, xmm13, xmm5 ; 2288 _ C5 11: EF. ED + vpxor xmm10, xmm10, xmm8 ; 228C _ C4 41 29: EF. D0 + vpxor xmm5, xmm8, xmm13 ; 2291 _ C4 C1 39: EF. ED + vpor xmm8, xmm10, xmm9 ; 2296 _ C4 41 29: EB. C1 + vpand xmm4, xmm13, xmm11 ; 229B _ C4 C1 11: DB. E3 + vpxor xmm14, xmm4, xmm8 ; 22A0 _ C4 41 59: EF. F0 + vpxor xmm12, xmm11, xmm9 ; 22A5 _ C4 41 21: EF. E1 + vpxor xmm9, xmm9, xmm14 ; 22AA _ C4 41 31: EF. CE + vpand xmm8, xmm8, xmm12 ; 22AF _ C4 41 39: DB. C4 + vpor xmm11, xmm9, xmm12 ; 22B4 _ C4 41 31: EB. DC + vpxor xmm9, xmm12, xmm14 ; 22B9 _ C4 41 19: EF. CE + vpxor xmm4, xmm11, xmm5 ; 22BE _ C5 A1: EF. E5 + vpxor xmm5, xmm8, xmm5 ; 22C2 _ C5 B9: EF. ED + vpor xmm10, xmm4, xmm14 ; 22C6 _ C4 41 59: EB. D6 + vpxor xmm13, xmm9, xmm10 ; 22CB _ C4 41 31: EF. EA + vpslld xmm10, xmm14, 3 ; 22D0 _ C4 C1 29: 72. F6, 03 + vpslld xmm11, xmm13, 13 ; 22D6 _ C4 C1 21: 72. F5, 0D + vpsrld xmm9, xmm13, 19 ; 22DC _ C4 C1 31: 72. D5, 13 + vpor xmm9, xmm11, xmm9 ; 22E2 _ C4 41 21: EB. C9 + vpsrld xmm14, xmm14, 29 ; 22E7 _ C4 C1 09: 72. D6, 1D + vpor xmm13, xmm10, xmm14 ; 22ED _ C4 41 29: EB. EE + vpxor xmm10, xmm4, xmm9 ; 22F2 _ C4 41 59: EF. D1 + vpxor xmm10, xmm10, xmm13 ; 22F7 _ C4 41 29: EF. D5 + vpxor xmm5, xmm5, xmm13 ; 22FC _ C4 C1 51: EF. ED + vpslld xmm4, xmm9, 3 ; 2301 _ C4 C1 59: 72. F1, 03 + vpslld xmm14, xmm10, 1 ; 2307 _ C4 C1 09: 72. F2, 01 + vpxor xmm12, xmm5, xmm4 ; 230D _ C5 51: EF. E4 + vpsrld xmm10, xmm10, 31 ; 2311 _ C4 C1 29: 72. D2, 1F + vpor xmm5, xmm14, xmm10 ; 2317 _ C4 C1 09: EB. EA + vpslld xmm11, xmm12, 7 ; 231C _ C4 C1 21: 72. F4, 07 + vpsrld xmm8, xmm12, 25 ; 2322 _ C4 C1 39: 72. D4, 19 + vpxor xmm9, xmm9, xmm5 ; 2328 _ C5 31: EF. CD + vpor xmm11, xmm11, xmm8 ; 232C _ C4 41 21: EB. D8 + vpxor xmm9, xmm9, xmm11 ; 2331 _ C4 41 31: EF. CB + vpxor xmm10, xmm13, xmm11 ; 2336 _ C4 41 11: EF. D3 + vpslld xmm13, xmm5, 7 ; 233B _ C5 91: 72. F5, 07 + vmovd xmm4, dword [r12+1440H] ; 2340 _ C4 C1 79: 6E. A4 24, 00001440 + vpxor xmm12, xmm10, xmm13 ; 234A _ C4 41 29: EF. E5 + vpslld xmm10, xmm9, 5 ; 234F _ C4 C1 29: 72. F1, 05 + vpsrld xmm13, xmm9, 27 ; 2355 _ C4 C1 11: 72. D1, 1B + vmovd xmm9, dword [r12+144CH] ; 235B _ C4 41 79: 6E. 8C 24, 0000144C + vpor xmm14, xmm10, xmm13 ; 2365 _ C4 41 29: EB. F5 + vpshufd xmm8, xmm4, 0 ; 236A _ C5 79: 70. C4, 00 + vmovd xmm4, dword [r12+1444H] ; 236F _ C4 C1 79: 6E. A4 24, 00001444 + vpxor xmm10, xmm14, xmm8 ; 2379 _ C4 41 09: EF. D0 + vpshufd xmm13, xmm9, 0 ; 237E _ C4 41 79: 70. E9, 00 + vpxor xmm9, xmm11, xmm13 ; 2384 _ C4 41 21: EF. CD + vpshufd xmm11, xmm4, 0 ; 2389 _ C5 79: 70. DC, 00 + vpxor xmm13, xmm9, xmm6 ; 238E _ C5 31: EF. EE + vpxor xmm5, xmm5, xmm11 ; 2392 _ C4 C1 51: EF. EB + vmovd xmm14, dword [r12+1448H] ; 2397 _ C4 41 79: 6E. B4 24, 00001448 + vpxor xmm4, xmm5, xmm9 ; 23A1 _ C4 C1 51: EF. E1 + vpslld xmm5, xmm12, 22 ; 23A6 _ C4 C1 51: 72. F4, 16 + vpsrld xmm12, xmm12, 10 ; 23AC _ C4 C1 19: 72. D4, 0A + vpshufd xmm9, xmm14, 0 ; 23B2 _ C4 41 79: 70. CE, 00 + vpor xmm5, xmm5, xmm12 ; 23B8 _ C4 C1 51: EB. EC + vpxor xmm14, xmm5, xmm9 ; 23BD _ C4 41 51: EF. F1 + vpxor xmm9, xmm13, xmm10 ; 23C2 _ C4 41 11: EF. CA + vpxor xmm8, xmm14, xmm13 ; 23C7 _ C4 41 09: EF. C5 + vpand xmm13, xmm4, xmm9 ; 23CC _ C4 41 59: DB. E9 + vpxor xmm12, xmm4, xmm9 ; 23D1 _ C4 41 59: EF. E1 + vpxor xmm11, xmm13, xmm8 ; 23D6 _ C4 41 11: EF. D8 + vpxor xmm13, xmm10, xmm12 ; 23DB _ C4 41 29: EF. EC + vpand xmm10, xmm8, xmm12 ; 23E0 _ C4 41 39: DB. D4 + vpand xmm8, xmm13, xmm11 ; 23E5 _ C4 41 11: DB. C3 + vpxor xmm5, xmm10, xmm13 ; 23EA _ C4 C1 29: EF. ED + vpxor xmm10, xmm9, xmm8 ; 23EF _ C4 41 31: EF. D0 + vpor xmm12, xmm12, xmm11 ; 23F4 _ C4 41 19: EB. E3 + vpor xmm9, xmm8, xmm10 ; 23F9 _ C4 41 39: EB. CA + vpsrld xmm13, xmm11, 19 ; 23FE _ C4 C1 11: 72. D3, 13 + vpxor xmm4, xmm9, xmm5 ; 2404 _ C5 B1: EF. E5 + vpslld xmm9, xmm11, 13 ; 2408 _ C4 C1 31: 72. F3, 0D + vpxor xmm14, xmm4, xmm6 ; 240E _ C5 59: EF. F6 + vpxor xmm11, xmm12, xmm8 ; 2412 _ C4 41 19: EF. D8 + vpand xmm5, xmm5, xmm10 ; 2417 _ C4 C1 51: DB. EA + vpor xmm13, xmm9, xmm13 ; 241C _ C4 41 31: EB. ED + vpslld xmm4, xmm14, 3 ; 2421 _ C4 C1 59: 72. F6, 03 + vpsrld xmm14, xmm14, 29 ; 2427 _ C4 C1 09: 72. D6, 1D + vpxor xmm9, xmm11, xmm5 ; 242D _ C5 21: EF. CD + vpor xmm14, xmm4, xmm14 ; 2431 _ C4 41 59: EB. F6 + vpxor xmm4, xmm9, xmm13 ; 2436 _ C4 C1 31: EF. E5 + vpxor xmm10, xmm10, xmm14 ; 243B _ C4 41 29: EF. D6 + vpxor xmm11, xmm4, xmm14 ; 2440 _ C4 41 59: EF. DE + vpslld xmm12, xmm13, 3 ; 2445 _ C4 C1 19: 72. F5, 03 + vpxor xmm9, xmm10, xmm12 ; 244B _ C4 41 29: EF. CC + vpslld xmm8, xmm11, 1 ; 2450 _ C4 C1 39: 72. F3, 01 + vpsrld xmm5, xmm11, 31 ; 2456 _ C4 C1 51: 72. D3, 1F + vpslld xmm10, xmm9, 7 ; 245C _ C4 C1 29: 72. F1, 07 + vpor xmm4, xmm8, xmm5 ; 2462 _ C5 B9: EB. E5 + vpsrld xmm12, xmm9, 25 ; 2466 _ C4 C1 19: 72. D1, 19 + vpor xmm9, xmm10, xmm12 ; 246C _ C4 41 29: EB. CC + vpxor xmm13, xmm13, xmm4 ; 2471 _ C5 11: EF. EC + vpxor xmm5, xmm13, xmm9 ; 2475 _ C4 C1 11: EF. E9 + vpslld xmm10, xmm4, 7 ; 247A _ C5 A9: 72. F4, 07 + vmovd xmm13, dword [r12+1454H] ; 247F _ C4 41 79: 6E. AC 24, 00001454 + vpxor xmm14, xmm14, xmm9 ; 2489 _ C4 41 09: EF. F1 + vpshufd xmm12, xmm13, 0 ; 248E _ C4 41 79: 70. E5, 00 + vpxor xmm14, xmm14, xmm10 ; 2494 _ C4 41 09: EF. F2 + vpxor xmm11, xmm4, xmm12 ; 2499 _ C4 41 59: EF. DC + vmovd xmm4, dword [r12+145CH] ; 249E _ C4 C1 79: 6E. A4 24, 0000145C + vpshufd xmm8, xmm4, 0 ; 24A8 _ C5 79: 70. C4, 00 + vmovd xmm10, dword [r12+1450H] ; 24AD _ C4 41 79: 6E. 94 24, 00001450 + vpxor xmm12, xmm9, xmm8 ; 24B7 _ C4 41 31: EF. E0 + vpslld xmm9, xmm5, 5 ; 24BC _ C5 B1: 72. F5, 05 + vpsrld xmm5, xmm5, 27 ; 24C1 _ C5 D1: 72. D5, 1B + vpshufd xmm10, xmm10, 0 ; 24C6 _ C4 41 79: 70. D2, 00 + vpor xmm4, xmm9, xmm5 ; 24CC _ C5 B1: EB. E5 + vpxor xmm8, xmm4, xmm10 ; 24D0 _ C4 41 59: EF. C2 + vpxor xmm5, xmm11, xmm12 ; 24D5 _ C4 C1 21: EF. EC + vmovd xmm13, dword [r12+1458H] ; 24DA _ C4 41 79: 6E. AC 24, 00001458 + vpxor xmm10, xmm8, xmm11 ; 24E4 _ C4 41 39: EF. D3 + vpslld xmm11, xmm14, 22 ; 24E9 _ C4 C1 21: 72. F6, 16 + vpsrld xmm14, xmm14, 10 ; 24EF _ C4 C1 09: 72. D6, 0A + vpshufd xmm13, xmm13, 0 ; 24F5 _ C4 41 79: 70. ED, 00 + vpor xmm9, xmm11, xmm14 ; 24FB _ C4 41 21: EB. CE + vpxor xmm8, xmm12, xmm6 ; 2500 _ C5 19: EF. C6 + vpxor xmm4, xmm9, xmm13 ; 2504 _ C4 C1 31: EF. E5 + vpxor xmm12, xmm4, xmm8 ; 2509 _ C4 41 59: EF. E0 + vpand xmm14, xmm5, xmm10 ; 250E _ C4 41 51: DB. F2 + vpxor xmm9, xmm14, xmm12 ; 2513 _ C4 41 09: EF. CC + vpor xmm11, xmm12, xmm5 ; 2518 _ C5 19: EB. DD + vpand xmm13, xmm8, xmm9 ; 251C _ C4 41 39: DB. E9 + vpxor xmm5, xmm5, xmm8 ; 2521 _ C4 C1 51: EF. E8 + vpxor xmm12, xmm13, xmm10 ; 2526 _ C4 41 11: EF. E2 + vpxor xmm8, xmm5, xmm9 ; 252B _ C4 41 51: EF. C1 + vpxor xmm4, xmm8, xmm11 ; 2530 _ C4 C1 39: EF. E3 + vpand xmm5, xmm10, xmm12 ; 2535 _ C4 C1 29: DB. EC + vpxor xmm13, xmm5, xmm4 ; 253A _ C5 51: EF. EC + vpslld xmm14, xmm9, 13 ; 253E _ C4 C1 09: 72. F1, 0D + vpsrld xmm9, xmm9, 19 ; 2544 _ C4 C1 31: 72. D1, 13 + vpxor xmm10, xmm11, xmm10 ; 254A _ C4 41 21: EF. D2 + vpor xmm9, xmm14, xmm9 ; 254F _ C4 41 09: EB. C9 + vpslld xmm8, xmm13, 3 ; 2554 _ C4 C1 39: 72. F5, 03 + vpsrld xmm13, xmm13, 29 ; 255A _ C4 C1 11: 72. D5, 1D + vpor xmm4, xmm4, xmm12 ; 2560 _ C4 C1 59: EB. E4 + vpxor xmm11, xmm10, xmm6 ; 2565 _ C5 29: EF. DE + vpor xmm5, xmm8, xmm13 ; 2569 _ C4 C1 39: EB. ED + vpxor xmm13, xmm12, xmm9 ; 256E _ C4 41 19: EF. E9 + vpxor xmm10, xmm4, xmm11 ; 2573 _ C4 41 59: EF. D3 + vpxor xmm8, xmm13, xmm5 ; 2578 _ C5 11: EF. C5 + vpxor xmm13, xmm10, xmm5 ; 257C _ C5 29: EF. ED + vpslld xmm4, xmm9, 3 ; 2580 _ C4 C1 59: 72. F1, 03 + vpslld xmm14, xmm8, 1 ; 2586 _ C4 C1 09: 72. F0, 01 + vpxor xmm11, xmm13, xmm4 ; 258C _ C5 11: EF. DC + vpsrld xmm12, xmm8, 31 ; 2590 _ C4 C1 19: 72. D0, 1F + vpor xmm8, xmm14, xmm12 ; 2596 _ C4 41 09: EB. C4 + vpslld xmm10, xmm11, 7 ; 259B _ C4 C1 29: 72. F3, 07 + vpsrld xmm13, xmm11, 25 ; 25A1 _ C4 C1 11: 72. D3, 19 + vpxor xmm9, xmm9, xmm8 ; 25A7 _ C4 41 31: EF. C8 + vpor xmm4, xmm10, xmm13 ; 25AC _ C4 C1 29: EB. E5 + vpxor xmm13, xmm9, xmm4 ; 25B1 _ C5 31: EF. EC + vpxor xmm5, xmm5, xmm4 ; 25B5 _ C5 D1: EF. EC + vpslld xmm9, xmm8, 7 ; 25B9 _ C4 C1 31: 72. F0, 07 + vpsrld xmm14, xmm13, 27 ; 25BF _ C4 C1 09: 72. D5, 1B + vmovd xmm12, dword [r12+1460H] ; 25C5 _ C4 41 79: 6E. A4 24, 00001460 + vpxor xmm10, xmm5, xmm9 ; 25CF _ C4 41 51: EF. D1 + vpslld xmm5, xmm13, 5 ; 25D4 _ C4 C1 51: 72. F5, 05 + vpor xmm11, xmm5, xmm14 ; 25DA _ C4 41 51: EB. DE + vpshufd xmm5, xmm12, 0 ; 25DF _ C4 C1 79: 70. EC, 00 + vmovd xmm9, dword [r12+1468H] ; 25E5 _ C4 41 79: 6E. 8C 24, 00001468 + vpxor xmm14, xmm11, xmm5 ; 25EF _ C5 21: EF. F5 + vmovd xmm13, dword [r12+146CH] ; 25F3 _ C4 41 79: 6E. AC 24, 0000146C + vpslld xmm11, xmm10, 22 ; 25FD _ C4 C1 21: 72. F2, 16 + vpsrld xmm10, xmm10, 10 ; 2603 _ C4 C1 29: 72. D2, 0A + vpshufd xmm12, xmm13, 0 ; 2609 _ C4 41 79: 70. E5, 00 + vpor xmm10, xmm11, xmm10 ; 260F _ C4 41 21: EB. D2 + vpshufd xmm9, xmm9, 0 ; 2614 _ C4 41 79: 70. C9, 00 + vpxor xmm4, xmm4, xmm12 ; 261A _ C4 C1 59: EF. E4 + vmovd xmm5, dword [r12+1464H] ; 261F _ C4 C1 79: 6E. AC 24, 00001464 + vpxor xmm13, xmm10, xmm9 ; 2629 _ C4 41 29: EF. E9 + vpshufd xmm5, xmm5, 0 ; 262E _ C5 F9: 70. ED, 00 + vpxor xmm10, xmm13, xmm6 ; 2633 _ C5 11: EF. D6 + vpxor xmm13, xmm14, xmm4 ; 2637 _ C5 09: EF. EC + vpand xmm14, xmm4, xmm14 ; 263B _ C4 41 59: DB. F6 + vpxor xmm9, xmm14, xmm10 ; 2640 _ C4 41 09: EF. CA + vpxor xmm8, xmm8, xmm5 ; 2645 _ C5 39: EF. C5 + vpxor xmm8, xmm8, xmm9 ; 2649 _ C4 41 39: EF. C1 + vpor xmm12, xmm10, xmm4 ; 264E _ C5 29: EB. E4 + vpor xmm11, xmm13, xmm8 ; 2652 _ C4 41 11: EB. D8 + vpxor xmm5, xmm12, xmm13 ; 2657 _ C4 C1 19: EF. ED + vpxor xmm5, xmm5, xmm8 ; 265C _ C4 C1 51: EF. E8 + vpor xmm10, xmm11, xmm9 ; 2661 _ C4 41 21: EB. D1 + vpxor xmm4, xmm4, xmm11 ; 2666 _ C4 C1 59: EF. E3 + vpxor xmm12, xmm10, xmm5 ; 266B _ C5 29: EF. E5 + vpxor xmm10, xmm4, xmm9 ; 266F _ C4 41 59: EF. D1 + vpslld xmm13, xmm12, 13 ; 2674 _ C4 C1 11: 72. F4, 0D + vpxor xmm14, xmm10, xmm12 ; 267A _ C4 41 29: EF. F4 + vpsrld xmm4, xmm12, 19 ; 267F _ C4 C1 59: 72. D4, 13 + vpor xmm4, xmm13, xmm4 ; 2685 _ C5 91: EB. E4 + vpslld xmm11, xmm14, 3 ; 2689 _ C4 C1 21: 72. F6, 03 + vpsrld xmm10, xmm14, 29 ; 268F _ C4 C1 29: 72. D6, 1D + vpxor xmm9, xmm9, xmm6 ; 2695 _ C5 31: EF. CE + vpand xmm14, xmm5, xmm14 ; 2699 _ C4 41 51: DB. F6 + vpor xmm13, xmm11, xmm10 ; 269E _ C4 41 21: EB. EA + vpxor xmm8, xmm8, xmm4 ; 26A3 _ C5 39: EF. C4 + vpxor xmm5, xmm9, xmm14 ; 26A7 _ C4 C1 31: EF. EE + vpxor xmm10, xmm8, xmm13 ; 26AC _ C4 41 39: EF. D5 + vpxor xmm9, xmm5, xmm13 ; 26B1 _ C4 41 51: EF. CD + vpslld xmm14, xmm4, 3 ; 26B6 _ C5 89: 72. F4, 03 + vpslld xmm12, xmm10, 1 ; 26BB _ C4 C1 19: 72. F2, 01 + vpxor xmm11, xmm9, xmm14 ; 26C1 _ C4 41 31: EF. DE + vpsrld xmm10, xmm10, 31 ; 26C6 _ C4 C1 29: 72. D2, 1F + vpor xmm5, xmm12, xmm10 ; 26CC _ C4 C1 19: EB. EA + vpslld xmm8, xmm11, 7 ; 26D1 _ C4 C1 39: 72. F3, 07 + vpsrld xmm9, xmm11, 25 ; 26D7 _ C4 C1 31: 72. D3, 19 + vpxor xmm4, xmm4, xmm5 ; 26DD _ C5 D9: EF. E5 + vpor xmm12, xmm8, xmm9 ; 26E1 _ C4 41 39: EB. E1 + vpslld xmm9, xmm5, 7 ; 26E6 _ C5 B1: 72. F5, 07 + vpxor xmm10, xmm4, xmm12 ; 26EB _ C4 41 59: EF. D4 + vpxor xmm13, xmm13, xmm12 ; 26F0 _ C4 41 11: EF. EC + vpxor xmm4, xmm13, xmm9 ; 26F5 _ C4 C1 11: EF. E1 + vpslld xmm13, xmm10, 5 ; 26FA _ C4 C1 11: 72. F2, 05 + vpsrld xmm14, xmm10, 27 ; 2700 _ C4 C1 09: 72. D2, 1B + vmovd xmm11, dword [r12+1470H] ; 2706 _ C4 41 79: 6E. 9C 24, 00001470 + vpor xmm8, xmm13, xmm14 ; 2710 _ C4 41 11: EB. C6 + vmovd xmm13, dword [r12+1474H] ; 2715 _ C4 41 79: 6E. AC 24, 00001474 + vpshufd xmm9, xmm11, 0 ; 271F _ C4 41 79: 70. CB, 00 + vpshufd xmm14, xmm13, 0 ; 2725 _ C4 41 79: 70. F5, 00 + vpxor xmm10, xmm8, xmm9 ; 272B _ C4 41 39: EF. D1 + vpxor xmm11, xmm5, xmm14 ; 2730 _ C4 41 51: EF. DE + vpslld xmm5, xmm4, 22 ; 2735 _ C5 D1: 72. F4, 16 + vpsrld xmm9, xmm4, 10 ; 273A _ C5 B1: 72. D4, 0A + vpor xmm13, xmm5, xmm9 ; 273F _ C4 41 51: EB. E9 + vmovd xmm5, dword [r12+1478H] ; 2744 _ C4 C1 79: 6E. AC 24, 00001478 + vmovd xmm8, dword [r12+147CH] ; 274E _ C4 41 79: 6E. 84 24, 0000147C + vpshufd xmm4, xmm5, 0 ; 2758 _ C5 F9: 70. E5, 00 + vpshufd xmm5, xmm8, 0 ; 275D _ C4 C1 79: 70. E8, 00 + vpxor xmm14, xmm13, xmm4 ; 2763 _ C5 11: EF. F4 + vpxor xmm12, xmm12, xmm5 ; 2767 _ C5 19: EF. E5 + vpor xmm9, xmm11, xmm14 ; 276B _ C4 41 21: EB. CE + vpxor xmm4, xmm9, xmm12 ; 2770 _ C4 C1 31: EF. E4 + vpxor xmm11, xmm11, xmm14 ; 2775 _ C4 41 21: EF. DE + vpxor xmm9, xmm14, xmm4 ; 277A _ C5 09: EF. CC + vpor xmm12, xmm12, xmm11 ; 277E _ C4 41 19: EB. E3 + vpxor xmm13, xmm11, xmm9 ; 2783 _ C4 41 21: EF. E9 + vpor xmm14, xmm10, xmm13 ; 2788 _ C4 41 29: EB. F5 + vpor xmm8, xmm4, xmm13 ; 278D _ C4 41 59: EB. C5 + vpxor xmm5, xmm14, xmm9 ; 2792 _ C4 C1 09: EF. E9 + vpxor xmm14, xmm8, xmm10 ; 2797 _ C4 41 39: EF. F2 + vpxor xmm14, xmm14, xmm13 ; 279C _ C4 41 09: EF. F5 + vpand xmm10, xmm12, xmm10 ; 27A1 _ C4 41 19: DB. D2 + vpxor xmm9, xmm9, xmm14 ; 27A6 _ C4 41 31: EF. CE + vpand xmm12, xmm14, xmm5 ; 27AB _ C5 09: DB. E5 + vpxor xmm8, xmm9, xmm6 ; 27AF _ C5 31: EF. C6 + vpxor xmm14, xmm12, xmm13 ; 27B3 _ C4 41 19: EF. F5 + vpor xmm9, xmm8, xmm5 ; 27B8 _ C5 39: EB. CD + vpxor xmm8, xmm13, xmm9 ; 27BC _ C4 41 11: EF. C1 + vmovd xmm9, dword [r12+1480H] ; 27C1 _ C4 41 79: 6E. 8C 24, 00001480 + vpshufd xmm9, xmm9, 0 ; 27CB _ C4 41 79: 70. C9, 00 + vpxor xmm9, xmm8, xmm9 ; 27D1 _ C4 41 39: EF. C9 + vpxor xmm8, xmm10, xmm4 ; 27D6 _ C5 29: EF. C4 + vmovd xmm4, dword [r12+1484H] ; 27DA _ C4 C1 79: 6E. A4 24, 00001484 + vmovd xmm13, dword [r12+1488H] ; 27E4 _ C4 41 79: 6E. AC 24, 00001488 + vpshufd xmm11, xmm4, 0 ; 27EE _ C5 79: 70. DC, 00 + vmovd xmm4, dword [r12+148CH] ; 27F3 _ C4 C1 79: 6E. A4 24, 0000148C + vpxor xmm10, xmm8, xmm11 ; 27FD _ C4 41 39: EF. D3 + vpshufd xmm13, xmm13, 0 ; 2802 _ C4 41 79: 70. ED, 00 + vpshufd xmm8, xmm4, 0 ; 2808 _ C5 79: 70. C4, 00 + vpxor xmm13, xmm14, xmm13 ; 280D _ C4 41 09: EF. ED + vpxor xmm11, xmm5, xmm8 ; 2812 _ C4 41 51: EF. D8 + vpunpckldq xmm4, xmm9, xmm10 ; 2817 _ C4 C1 31: 62. E2 + vpunpckldq xmm5, xmm13, xmm11 ; 281C _ C4 C1 11: 62. EB + vpunpckhdq xmm9, xmm9, xmm10 ; 2821 _ C4 41 31: 6A. CA + vpunpckhdq xmm8, xmm13, xmm11 ; 2826 _ C4 41 11: 6A. C3 + vpunpcklqdq xmm10, xmm4, xmm5 ; 282B _ C5 59: 6C. D5 + vpsllq xmm14, xmm0, 1 ; 282F _ C5 89: 73. F0, 01 + vpunpckhqdq xmm4, xmm4, xmm5 ; 2834 _ C5 D9: 6D. E5 + vpxor xmm3, xmm10, xmm3 ; 2838 _ C5 A9: EF. DB + vpslldq xmm12, xmm0, 8 ; 283C _ C5 99: 73. F8, 08 + vpxor xmm2, xmm4, xmm2 ; 2841 _ C5 D9: EF. D2 + vpsrldq xmm13, xmm12, 7 ; 2845 _ C4 C1 11: 73. DC, 07 + inc r10d ; 284B _ 41: FF. C2 + vmovdqu oword [rbp+10H], xmm2 ; 284E _ C5 FA: 7F. 55, 10 + vmovdqu oword [rbp], xmm3 ; 2853 _ C5 FA: 7F. 5D, 00 + vpsrlq xmm2, xmm13, 7 ; 2858 _ C4 C1 69: 73. D5, 07 + add r13, 64 ; 285E _ 49: 83. C5, 40 + vpor xmm3, xmm14, xmm2 ; 2862 _ C5 89: EB. DA + vpsraw xmm2, xmm0, 8 ; 2866 _ C5 E9: 71. E0, 08 + vpunpcklqdq xmm5, xmm9, xmm8 ; 286B _ C4 C1 31: 6C. E8 + vpunpckhqdq xmm8, xmm9, xmm8 ; 2870 _ C4 41 31: 6D. C0 + vpxor xmm11, xmm5, xmm1 ; 2875 _ C5 51: EF. D9 + vpsrldq xmm2, xmm2, 15 ; 2879 _ C5 E9: 73. DA, 0F + vpxor xmm9, xmm8, xmm0 ; 287E _ C5 39: EF. C8 + vpand xmm2, xmm2, xmm7 ; 2882 _ C5 E9: DB. D7 + vmovdqu oword [rbp+20H], xmm11 ; 2886 _ C5 7A: 7F. 5D, 20 + vmovdqu oword [rbp+30H], xmm9 ; 288B _ C5 7A: 7F. 4D, 30 + add rbp, 64 ; 2890 _ 48: 83. C5, 40 + vpxor xmm3, xmm3, xmm2 ; 2894 _ C5 E1: EF. DA + cmp r10d, 8 ; 2898 _ 41: 83. FA, 08 + jl ?_003 ; 289C _ 0F 8C, FFFFD831 + add r14, -512 ; 28A2 _ 49: 81. C6, FFFFFE00 + jne ?_001 ; 28A9 _ 0F 85, FFFFD7EF + vmovups xmm6, oword [rsp+0A0H] ; 28AF _ C5 F8: 10. B4 24, 000000A0 + vmovups xmm7, oword [rsp+90H] ; 28B8 _ C5 F8: 10. BC 24, 00000090 + vmovups xmm8, oword [rsp+80H] ; 28C1 _ C5 78: 10. 84 24, 00000080 + vmovups xmm9, oword [rsp+70H] ; 28CA _ C5 78: 10. 4C 24, 70 + vmovups xmm10, oword [rsp+60H] ; 28D0 _ C5 78: 10. 54 24, 60 + vmovups xmm11, oword [rsp+50H] ; 28D6 _ C5 78: 10. 5C 24, 50 + vmovups xmm12, oword [rsp+40H] ; 28DC _ C5 78: 10. 64 24, 40 + vmovups xmm13, oword [rsp+30H] ; 28E2 _ C5 78: 10. 6C 24, 30 + vmovups xmm14, oword [rsp+20H] ; 28E8 _ C5 78: 10. 74 24, 20 + add rsp, 208 ; 28EE _ 48: 81. C4, 000000D0 + pop rbp ; 28F5 _ 5D + pop r15 ; 28F6 _ 41: 5F + pop r14 ; 28F8 _ 41: 5E + pop r13 ; 28FA _ 41: 5D + pop r12 ; 28FC _ 41: 5C + ret ; 28FE _ C3 +; xts_serpent_avx_encrypt End of function + +ALIGN 16 + +xts_serpent_avx_decrypt:; Function begin + push r12 ; 0000 _ 41: 54 + push r13 ; 0002 _ 41: 55 + push r14 ; 0004 _ 41: 56 + push r15 ; 0006 _ 41: 57 + push rbp ; 0008 _ 55 + sub rsp, 192 ; 0009 _ 48: 81. EC, 000000C0 + mov rbp, rdx ; 0010 _ 48: 89. D5 + mov r10d, 135 ; 0013 _ 41: BA, 00000087 + vpcmpeqd xmm0, xmm0, xmm0 ; 0019 _ C5 F9: 76. C0 + mov rax, qword [rsp+110H] ; 001D _ 48: 8B. 84 24, 00000110 + mov r12, rax ; 0025 _ 49: 89. C4 + shr r9, 9 ; 0028 _ 49: C1. E9, 09 + mov r13, rcx ; 002C _ 49: 89. CD + mov qword [rsp+0A0H], r9 ; 002F _ 4C: 89. 8C 24, 000000A0 + mov r14, r8 ; 0037 _ 4D: 89. C6 + vmovd xmm1, r10d ; 003A _ C4 C1 79: 6E. CA + lea r9, [rax+2710H] ; 003F _ 4C: 8D. 88, 00002710 + mov qword [rsp+0A8H], 0 ; 0046 _ 48: C7. 84 24, 000000A8, 00000000 + mov r15, r9 ; 0052 _ 4D: 89. CF + vmovups oword [rsp+90H], xmm6 ; 0055 _ C5 F8: 11. B4 24, 00000090 + vmovdqa xmm6, xmm0 ; 005E _ C5 F9: 6F. F0 + vmovups oword [rsp+80H], xmm7 ; 0062 _ C5 F8: 11. BC 24, 00000080 + vmovdqa xmm7, xmm1 ; 006B _ C5 F9: 6F. F9 + vmovups oword [rsp+70H], xmm8 ; 006F _ C5 78: 11. 44 24, 70 + vmovups oword [rsp+60H], xmm9 ; 0075 _ C5 78: 11. 4C 24, 60 + vmovups oword [rsp+50H], xmm10 ; 007B _ C5 78: 11. 54 24, 50 + vmovups oword [rsp+40H], xmm11 ; 0081 _ C5 78: 11. 5C 24, 40 + vmovups oword [rsp+30H], xmm12 ; 0087 _ C5 78: 11. 64 24, 30 + vmovups oword [rsp+20H], xmm13 ; 008D _ C5 78: 11. 6C 24, 20 + jmp ?_005 ; 0093 _ EB, 09 + +?_004: vmovdqu oword [rsp+0B0H], xmm3 ; 0095 _ C5 FA: 7F. 9C 24, 000000B0 +?_005: mov r8, r15 ; 009E _ 4D: 89. F8 + lea rcx, [rsp+0A0H] ; 00A1 _ 48: 8D. 8C 24, 000000A0 + inc qword [rsp+0A0H] ; 00A9 _ 48: FF. 84 24, 000000A0 + lea rdx, [rsp+0B0H] ; 00B1 _ 48: 8D. 94 24, 000000B0 + call serpent256_encrypt ; 00B9 _ E8, 00000000(rel) + vmovdqu xmm3, oword [rsp+0B0H] ; 00BE _ C5 FA: 6F. 9C 24, 000000B0 + xor r10d, r10d ; 00C7 _ 45: 33. D2 +?_006: vpslldq xmm2, xmm3, 8 ; 00CA _ C5 E9: 73. FB, 08 + vpsllq xmm4, xmm3, 1 ; 00CF _ C5 D9: 73. F3, 01 + vpsrldq xmm2, xmm2, 7 ; 00D4 _ C5 E9: 73. DA, 07 + vpsrlq xmm2, xmm2, 7 ; 00D9 _ C5 E9: 73. D2, 07 + vpor xmm4, xmm4, xmm2 ; 00DE _ C5 D9: EB. E2 + vpsraw xmm2, xmm3, 8 ; 00E2 _ C5 E9: 71. E3, 08 + vpsrldq xmm2, xmm2, 15 ; 00E7 _ C5 E9: 73. DA, 0F + vpand xmm2, xmm2, xmm7 ; 00EC _ C5 E9: DB. D7 + vpxor xmm0, xmm4, xmm2 ; 00F0 _ C5 D9: EF. C2 + vpslldq xmm2, xmm0, 8 ; 00F4 _ C5 E9: 73. F8, 08 + vpsllq xmm4, xmm0, 1 ; 00F9 _ C5 D9: 73. F0, 01 + vpsrldq xmm2, xmm2, 7 ; 00FE _ C5 E9: 73. DA, 07 + vpsrlq xmm2, xmm2, 7 ; 0103 _ C5 E9: 73. D2, 07 + vpor xmm4, xmm4, xmm2 ; 0108 _ C5 D9: EB. E2 + vpsraw xmm2, xmm0, 8 ; 010C _ C5 E9: 71. E0, 08 + vpsrldq xmm2, xmm2, 15 ; 0111 _ C5 E9: 73. DA, 0F + vpand xmm2, xmm2, xmm7 ; 0116 _ C5 E9: DB. D7 + vpxor xmm2, xmm4, xmm2 ; 011A _ C5 D9: EF. D2 + vpslldq xmm4, xmm2, 8 ; 011E _ C5 D9: 73. FA, 08 + vpsllq xmm5, xmm2, 1 ; 0123 _ C5 D1: 73. F2, 01 + vpsrldq xmm4, xmm4, 7 ; 0128 _ C5 D9: 73. DC, 07 + vpsrlq xmm4, xmm4, 7 ; 012D _ C5 D9: 73. D4, 07 + vpor xmm5, xmm5, xmm4 ; 0132 _ C5 D1: EB. EC + vpsraw xmm4, xmm2, 8 ; 0136 _ C5 D9: 71. E2, 08 + vpsrldq xmm4, xmm4, 15 ; 013B _ C5 D9: 73. DC, 0F + vpand xmm4, xmm4, xmm7 ; 0140 _ C5 D9: DB. E7 + vpxor xmm1, xmm5, xmm4 ; 0144 _ C5 D1: EF. CC + vpxor xmm13, xmm3, oword [r13] ; 0148 _ C4 41 61: EF. 6D, 00 + vpxor xmm9, xmm0, oword [r13+10H] ; 014E _ C4 41 79: EF. 4D, 10 + vpxor xmm12, xmm2, oword [r13+20H] ; 0154 _ C4 41 69: EF. 65, 20 + vpxor xmm4, xmm1, oword [r13+30H] ; 015A _ C4 C1 71: EF. 65, 30 + vpunpckldq xmm10, xmm13, xmm9 ; 0160 _ C4 41 11: 62. D1 + vpunpckldq xmm8, xmm12, xmm4 ; 0165 _ C5 19: 62. C4 + vpunpckhdq xmm11, xmm13, xmm9 ; 0169 _ C4 41 11: 6A. D9 + vpunpcklqdq xmm9, xmm10, xmm8 ; 016E _ C4 41 29: 6C. C8 + vmovd xmm13, dword [r12+1480H] ; 0173 _ C4 41 79: 6E. AC 24, 00001480 + vpunpckhqdq xmm8, xmm10, xmm8 ; 017D _ C4 41 29: 6D. C0 + vmovd xmm10, dword [r12+1484H] ; 0182 _ C4 41 79: 6E. 94 24, 00001484 + vpunpckhdq xmm12, xmm12, xmm4 ; 018C _ C5 19: 6A. E4 + vpshufd xmm5, xmm13, 0 ; 0190 _ C4 C1 79: 70. ED, 00 + vpshufd xmm13, xmm10, 0 ; 0196 _ C4 41 79: 70. EA, 00 + vpxor xmm9, xmm9, xmm5 ; 019C _ C5 31: EF. CD + vpunpcklqdq xmm4, xmm11, xmm12 ; 01A0 _ C4 C1 21: 6C. E4 + vpxor xmm10, xmm8, xmm13 ; 01A5 _ C4 41 39: EF. D5 + vmovd xmm8, dword [r12+1488H] ; 01AA _ C4 41 79: 6E. 84 24, 00001488 + vpunpckhqdq xmm12, xmm11, xmm12 ; 01B4 _ C4 41 21: 6D. E4 + vmovd xmm11, dword [r12+148CH] ; 01B9 _ C4 41 79: 6E. 9C 24, 0000148C + vpshufd xmm5, xmm8, 0 ; 01C3 _ C4 C1 79: 70. E8, 00 + vpshufd xmm11, xmm11, 0 ; 01C9 _ C4 41 79: 70. DB, 00 + vpxor xmm8, xmm4, xmm5 ; 01CF _ C5 59: EF. C5 + vpxor xmm5, xmm12, xmm11 ; 01D3 _ C4 C1 19: EF. EB + vpand xmm11, xmm9, xmm5 ; 01D8 _ C5 31: DB. DD + vpxor xmm9, xmm8, xmm9 ; 01DC _ C4 41 39: EF. C9 + vpor xmm12, xmm8, xmm5 ; 01E1 _ C5 39: EB. E5 + vpxor xmm8, xmm9, xmm6 ; 01E5 _ C5 31: EF. C6 + vpxor xmm13, xmm11, xmm8 ; 01E9 _ C4 41 21: EF. E8 + vpand xmm8, xmm8, xmm12 ; 01EE _ C4 41 39: DB. C4 + vpxor xmm9, xmm5, xmm10 ; 01F3 _ C4 41 51: EF. CA + vpor xmm10, xmm10, xmm11 ; 01F8 _ C4 41 29: EB. D3 + vpxor xmm5, xmm10, xmm8 ; 01FD _ C4 C1 29: EF. E8 + vpxor xmm10, xmm8, xmm13 ; 0202 _ C4 41 39: EF. D5 + vpand xmm4, xmm9, xmm12 ; 0207 _ C4 C1 31: DB. E4 + vpxor xmm8, xmm12, xmm5 ; 020C _ C5 19: EF. C5 + vpor xmm13, xmm13, xmm10 ; 0210 _ C4 41 11: EB. EA + vpxor xmm12, xmm4, xmm8 ; 0215 _ C4 41 59: EF. E0 + vpxor xmm11, xmm13, xmm4 ; 021A _ C5 11: EF. DC + vpxor xmm13, xmm12, xmm10 ; 021E _ C4 41 19: EF. EA + vmovd xmm4, dword [r12+1470H] ; 0223 _ C4 C1 79: 6E. A4 24, 00001470 + vpshufd xmm9, xmm4, 0 ; 022D _ C5 79: 70. CC, 00 + vmovd xmm4, dword [r12+1474H] ; 0232 _ C4 C1 79: 6E. A4 24, 00001474 + vpxor xmm12, xmm13, xmm9 ; 023C _ C4 41 11: EF. E1 + vmovd xmm9, dword [r12+1478H] ; 0241 _ C4 41 79: 6E. 8C 24, 00001478 + vpshufd xmm13, xmm4, 0 ; 024B _ C5 79: 70. EC, 00 + vpshufd xmm4, xmm9, 0 ; 0250 _ C4 C1 79: 70. E1, 00 + vpxor xmm13, xmm11, xmm13 ; 0256 _ C4 41 21: EF. ED + vpxor xmm4, xmm5, xmm4 ; 025B _ C5 D1: EF. E4 + vpor xmm5, xmm8, xmm11 ; 025F _ C4 C1 39: EB. EB + vmovd xmm8, dword [r12+147CH] ; 0264 _ C4 41 79: 6E. 84 24, 0000147C + vpxor xmm11, xmm5, xmm10 ; 026E _ C4 41 51: EF. DA + vpshufd xmm10, xmm8, 0 ; 0273 _ C4 41 79: 70. D0, 00 + vpslld xmm8, xmm4, 10 ; 0279 _ C5 B9: 72. F4, 0A + vpsrld xmm9, xmm4, 22 ; 027E _ C5 B1: 72. D4, 16 + vpxor xmm11, xmm11, xmm10 ; 0283 _ C4 41 21: EF. DA + vpor xmm4, xmm8, xmm9 ; 0288 _ C4 C1 39: EB. E1 + vpslld xmm8, xmm12, 27 ; 028D _ C4 C1 39: 72. F4, 1B + vpsrld xmm12, xmm12, 5 ; 0293 _ C4 C1 19: 72. D4, 05 + vpxor xmm5, xmm4, xmm11 ; 0299 _ C4 C1 59: EF. EB + vpor xmm8, xmm8, xmm12 ; 029E _ C4 41 39: EB. C4 + vpslld xmm10, xmm13, 7 ; 02A3 _ C4 C1 29: 72. F5, 07 + vpxor xmm12, xmm8, xmm13 ; 02A9 _ C4 41 39: EF. E5 + vpslld xmm4, xmm11, 25 ; 02AE _ C4 C1 59: 72. F3, 19 + vpxor xmm8, xmm12, xmm11 ; 02B4 _ C4 41 19: EF. C3 + vpsrld xmm11, xmm11, 7 ; 02B9 _ C4 C1 21: 72. D3, 07 + vpxor xmm9, xmm5, xmm10 ; 02BF _ C4 41 51: EF. CA + vpor xmm11, xmm4, xmm11 ; 02C4 _ C4 41 59: EB. DB + vpslld xmm5, xmm13, 31 ; 02C9 _ C4 C1 51: 72. F5, 1F + vpsrld xmm13, xmm13, 1 ; 02CF _ C4 C1 11: 72. D5, 01 + vpxor xmm12, xmm11, xmm9 ; 02D5 _ C4 41 21: EF. E1 + vpslld xmm4, xmm8, 3 ; 02DA _ C4 C1 59: 72. F0, 03 + vpor xmm13, xmm5, xmm13 ; 02E0 _ C4 41 51: EB. ED + vpxor xmm11, xmm12, xmm4 ; 02E5 _ C5 19: EF. DC + vpxor xmm12, xmm13, xmm8 ; 02E9 _ C4 41 11: EF. E0 + vpslld xmm4, xmm9, 29 ; 02EE _ C4 C1 59: 72. F1, 1D + vpxor xmm5, xmm12, xmm9 ; 02F4 _ C4 C1 19: EF. E9 + vpsrld xmm9, xmm9, 3 ; 02F9 _ C4 C1 31: 72. D1, 03 + vpslld xmm10, xmm8, 19 ; 02FF _ C4 C1 29: 72. F0, 13 + vpsrld xmm8, xmm8, 13 ; 0305 _ C4 C1 39: 72. D0, 0D + vpor xmm13, xmm4, xmm9 ; 030B _ C4 41 59: EB. E9 + vpor xmm9, xmm10, xmm8 ; 0310 _ C4 41 29: EB. C8 + vpxor xmm4, xmm9, xmm13 ; 0315 _ C4 C1 31: EF. E5 + vpxor xmm10, xmm11, xmm5 ; 031A _ C5 21: EF. D5 + vpand xmm12, xmm13, xmm4 ; 031E _ C5 11: DB. E4 + vpxor xmm11, xmm13, xmm11 ; 0322 _ C4 41 11: EF. DB + vpxor xmm8, xmm12, xmm6 ; 0327 _ C5 19: EF. C6 + vpor xmm9, xmm11, xmm4 ; 032B _ C5 21: EB. CC + vpxor xmm8, xmm8, xmm10 ; 032F _ C4 41 39: EF. C2 + vpxor xmm13, xmm10, xmm9 ; 0334 _ C4 41 29: EF. E9 + vmovd xmm10, dword [r12+1460H] ; 0339 _ C4 41 79: 6E. 94 24, 00001460 + vpxor xmm11, xmm4, xmm8 ; 0343 _ C4 41 59: EF. D8 + vpand xmm4, xmm5, xmm13 ; 0348 _ C4 C1 51: DB. E5 + vpxor xmm9, xmm9, xmm5 ; 034D _ C5 31: EF. CD + vpxor xmm12, xmm4, xmm11 ; 0351 _ C4 41 59: EF. E3 + vpxor xmm5, xmm11, xmm13 ; 0356 _ C4 C1 21: EF. ED + vpshufd xmm4, xmm10, 0 ; 035B _ C4 C1 79: 70. E2, 00 + vpxor xmm10, xmm12, xmm4 ; 0361 _ C5 19: EF. D4 + vpxor xmm12, xmm13, xmm12 ; 0365 _ C4 41 11: EF. E4 + vmovd xmm4, dword [r12+1464H] ; 036A _ C4 C1 79: 6E. A4 24, 00001464 + vpshufd xmm4, xmm4, 0 ; 0374 _ C5 F9: 70. E4, 00 + vmovd xmm11, dword [r12+1468H] ; 0379 _ C4 41 79: 6E. 9C 24, 00001468 + vpxor xmm4, xmm8, xmm4 ; 0383 _ C5 B9: EF. E4 + vpor xmm8, xmm5, xmm8 ; 0387 _ C4 41 51: EB. C0 + vpshufd xmm5, xmm11, 0 ; 038C _ C4 C1 79: 70. EB, 00 + vpxor xmm9, xmm9, xmm8 ; 0392 _ C4 41 31: EF. C8 + vmovd xmm13, dword [r12+146CH] ; 0397 _ C4 41 79: 6E. AC 24, 0000146C + vpxor xmm8, xmm9, xmm5 ; 03A1 _ C5 31: EF. C5 + vpshufd xmm11, xmm13, 0 ; 03A5 _ C4 41 79: 70. DD, 00 + vpslld xmm13, xmm8, 10 ; 03AB _ C4 C1 11: 72. F0, 0A + vpsrld xmm8, xmm8, 22 ; 03B1 _ C4 C1 39: 72. D0, 16 + vpxor xmm11, xmm12, xmm11 ; 03B7 _ C4 41 19: EF. DB + vpor xmm9, xmm13, xmm8 ; 03BC _ C4 41 11: EB. C8 + vpslld xmm8, xmm10, 27 ; 03C1 _ C4 C1 39: 72. F2, 1B + vpsrld xmm10, xmm10, 5 ; 03C7 _ C4 C1 29: 72. D2, 05 + vpxor xmm12, xmm9, xmm11 ; 03CD _ C4 41 31: EF. E3 + vpor xmm8, xmm8, xmm10 ; 03D2 _ C4 41 39: EB. C2 + vpslld xmm5, xmm4, 7 ; 03D7 _ C5 D1: 72. F4, 07 + vpxor xmm9, xmm8, xmm4 ; 03DC _ C5 39: EF. CC + vpxor xmm13, xmm12, xmm5 ; 03E0 _ C5 19: EF. ED + vpxor xmm8, xmm9, xmm11 ; 03E4 _ C4 41 31: EF. C3 + vpslld xmm12, xmm11, 25 ; 03E9 _ C4 C1 19: 72. F3, 19 + vpsrld xmm11, xmm11, 7 ; 03EF _ C4 C1 21: 72. D3, 07 + vpslld xmm9, xmm8, 19 ; 03F5 _ C4 C1 31: 72. F0, 13 + vpor xmm5, xmm12, xmm11 ; 03FB _ C4 C1 19: EB. EB + vpslld xmm11, xmm8, 3 ; 0400 _ C4 C1 21: 72. F0, 03 + vpxor xmm10, xmm5, xmm13 ; 0406 _ C4 41 51: EF. D5 + vpsrld xmm12, xmm8, 13 ; 040B _ C4 C1 19: 72. D0, 0D + vpslld xmm5, xmm4, 31 ; 0411 _ C5 D1: 72. F4, 1F + vpsrld xmm4, xmm4, 1 ; 0416 _ C5 D9: 72. D4, 01 + vpxor xmm11, xmm10, xmm11 ; 041B _ C4 41 29: EF. DB + vpor xmm10, xmm9, xmm12 ; 0420 _ C4 41 31: EB. D4 + vpor xmm9, xmm5, xmm4 ; 0425 _ C5 51: EB. CC + vpslld xmm4, xmm13, 29 ; 0429 _ C4 C1 59: 72. F5, 1D + vpxor xmm8, xmm9, xmm8 ; 042F _ C4 41 31: EF. C0 + vpxor xmm12, xmm8, xmm13 ; 0434 _ C4 41 39: EF. E5 + vpsrld xmm13, xmm13, 3 ; 0439 _ C4 C1 11: 72. D5, 03 + vpxor xmm5, xmm12, xmm6 ; 043F _ C5 99: EF. EE + vpor xmm8, xmm4, xmm13 ; 0443 _ C4 41 59: EB. C5 + vpxor xmm12, xmm8, xmm5 ; 0448 _ C5 39: EF. E5 + vpor xmm13, xmm11, xmm10 ; 044C _ C4 41 21: EB. EA + vpxor xmm8, xmm13, xmm12 ; 0451 _ C4 41 11: EF. C4 + vpxor xmm9, xmm11, xmm8 ; 0456 _ C4 41 21: EF. C8 + vpor xmm11, xmm12, xmm5 ; 045B _ C5 19: EB. DD + vpand xmm11, xmm11, xmm10 ; 045F _ C4 41 21: DB. DA + vpxor xmm4, xmm11, xmm9 ; 0464 _ C4 C1 21: EF. E1 + vpor xmm9, xmm9, xmm10 ; 0469 _ C4 41 31: EB. CA + vpand xmm13, xmm5, xmm4 ; 046E _ C5 51: DB. EC + vpxor xmm5, xmm9, xmm5 ; 0472 _ C5 B1: EF. ED + vmovd xmm12, dword [r12+1450H] ; 0476 _ C4 41 79: 6E. A4 24, 00001450 + vpxor xmm13, xmm13, xmm8 ; 0480 _ C4 41 11: EF. E8 + vpxor xmm5, xmm5, xmm4 ; 0485 _ C5 D1: EF. EC + vpshufd xmm11, xmm12, 0 ; 0489 _ C4 41 79: 70. DC, 00 + vpxor xmm9, xmm5, xmm13 ; 048F _ C4 41 51: EF. CD + vpand xmm8, xmm8, xmm5 ; 0494 _ C5 39: DB. C5 + vpxor xmm13, xmm13, xmm11 ; 0498 _ C4 41 11: EF. EB + vmovd xmm11, dword [r12+1454H] ; 049D _ C4 41 79: 6E. 9C 24, 00001454 + vpxor xmm12, xmm9, xmm6 ; 04A7 _ C5 31: EF. E6 + vpxor xmm9, xmm8, xmm9 ; 04AB _ C4 41 39: EF. C9 + vpshufd xmm11, xmm11, 0 ; 04B0 _ C4 41 79: 70. DB, 00 + vpxor xmm8, xmm9, xmm10 ; 04B6 _ C4 41 31: EF. C2 + vmovd xmm10, dword [r12+1458H] ; 04BB _ C4 41 79: 6E. 94 24, 00001458 + vpxor xmm12, xmm12, xmm11 ; 04C5 _ C4 41 19: EF. E3 + vpshufd xmm11, xmm10, 0 ; 04CA _ C4 41 79: 70. DA, 00 + vpxor xmm10, xmm8, xmm11 ; 04D0 _ C4 41 39: EF. D3 + vmovd xmm9, dword [r12+145CH] ; 04D5 _ C4 41 79: 6E. 8C 24, 0000145C + vpslld xmm8, xmm10, 10 ; 04DF _ C4 C1 39: 72. F2, 0A + vpsrld xmm11, xmm10, 22 ; 04E5 _ C4 C1 21: 72. D2, 16 + vpslld xmm10, xmm12, 7 ; 04EB _ C4 C1 29: 72. F4, 07 + vpshufd xmm5, xmm9, 0 ; 04F1 _ C4 C1 79: 70. E9, 00 + vpor xmm9, xmm8, xmm11 ; 04F7 _ C4 41 39: EB. CB + vpslld xmm8, xmm13, 27 ; 04FC _ C4 C1 39: 72. F5, 1B + vpsrld xmm13, xmm13, 5 ; 0502 _ C4 C1 11: 72. D5, 05 + vpxor xmm4, xmm4, xmm5 ; 0508 _ C5 D9: EF. E5 + vpor xmm8, xmm8, xmm13 ; 050C _ C4 41 39: EB. C5 + vpxor xmm5, xmm9, xmm4 ; 0511 _ C5 B1: EF. EC + vpxor xmm13, xmm8, xmm12 ; 0515 _ C4 41 39: EF. EC + vpxor xmm11, xmm5, xmm10 ; 051A _ C4 41 51: EF. DA + vpxor xmm5, xmm13, xmm4 ; 051F _ C5 91: EF. EC + vpslld xmm9, xmm4, 25 ; 0523 _ C5 B1: 72. F4, 19 + vpsrld xmm4, xmm4, 7 ; 0528 _ C5 D9: 72. D4, 07 + vpor xmm8, xmm9, xmm4 ; 052D _ C5 31: EB. C4 + vpslld xmm10, xmm12, 31 ; 0531 _ C4 C1 29: 72. F4, 1F + vpsrld xmm12, xmm12, 1 ; 0537 _ C4 C1 19: 72. D4, 01 + vpxor xmm13, xmm8, xmm11 ; 053D _ C4 41 39: EF. EB + vpslld xmm9, xmm5, 3 ; 0542 _ C5 B1: 72. F5, 03 + vpor xmm8, xmm10, xmm12 ; 0547 _ C4 41 29: EB. C4 + vpxor xmm4, xmm13, xmm9 ; 054C _ C4 C1 11: EF. E1 + vpxor xmm13, xmm8, xmm5 ; 0551 _ C5 39: EF. ED + vpxor xmm8, xmm13, xmm11 ; 0555 _ C4 41 11: EF. C3 + vpslld xmm9, xmm11, 29 ; 055A _ C4 C1 31: 72. F3, 1D + vpsrld xmm11, xmm11, 3 ; 0560 _ C4 C1 21: 72. D3, 03 + vpslld xmm12, xmm5, 19 ; 0566 _ C5 99: 72. F5, 13 + vpor xmm11, xmm9, xmm11 ; 056B _ C4 41 31: EB. DB + vpsrld xmm5, xmm5, 13 ; 0570 _ C5 D1: 72. D5, 0D + vpand xmm10, xmm11, xmm4 ; 0575 _ C5 21: DB. D4 + vpor xmm9, xmm12, xmm5 ; 0579 _ C5 19: EB. CD + vpxor xmm5, xmm10, xmm8 ; 057D _ C4 C1 29: EF. E8 + vpor xmm8, xmm8, xmm4 ; 0582 _ C5 39: EB. C4 + vpand xmm8, xmm8, xmm9 ; 0586 _ C4 41 39: DB. C1 + vpxor xmm11, xmm11, xmm5 ; 058B _ C5 21: EF. DD + vpxor xmm11, xmm11, xmm8 ; 058F _ C4 41 21: EF. D8 + vpxor xmm12, xmm9, xmm6 ; 0594 _ C5 31: EF. E6 + vpxor xmm13, xmm4, xmm11 ; 0598 _ C4 41 59: EF. EB + vpand xmm4, xmm8, xmm5 ; 059D _ C5 B9: DB. E5 + vpxor xmm9, xmm4, xmm13 ; 05A1 _ C4 41 59: EF. CD + vpxor xmm8, xmm12, xmm9 ; 05A6 _ C4 41 19: EF. C1 + vpand xmm12, xmm13, xmm12 ; 05AB _ C4 41 11: DB. E4 + vmovd xmm13, dword [r12+1440H] ; 05B0 _ C4 41 79: 6E. AC 24, 00001440 + vpxor xmm4, xmm12, xmm5 ; 05BA _ C5 99: EF. E5 + vpshufd xmm12, xmm13, 0 ; 05BE _ C4 41 79: 70. E5, 00 + vpxor xmm10, xmm4, xmm8 ; 05C4 _ C4 41 59: EF. D0 + vpxor xmm4, xmm8, xmm12 ; 05C9 _ C4 C1 39: EF. E4 + vpand xmm5, xmm5, xmm8 ; 05CE _ C4 C1 51: DB. E8 + vmovd xmm12, dword [r12+1444H] ; 05D3 _ C4 41 79: 6E. A4 24, 00001444 + vpxor xmm13, xmm10, xmm8 ; 05DD _ C4 41 29: EF. E8 + vpshufd xmm12, xmm12, 0 ; 05E2 _ C4 41 79: 70. E4, 00 + vpxor xmm8, xmm5, xmm11 ; 05E8 _ C4 41 51: EF. C3 + vpxor xmm12, xmm13, xmm12 ; 05ED _ C4 41 11: EF. E4 + vpor xmm13, xmm8, xmm10 ; 05F2 _ C4 41 39: EB. EA + vpxor xmm8, xmm13, xmm9 ; 05F7 _ C4 41 11: EF. C1 + vmovd xmm9, dword [r12+1448H] ; 05FC _ C4 41 79: 6E. 8C 24, 00001448 + vpshufd xmm13, xmm9, 0 ; 0606 _ C4 41 79: 70. E9, 00 + vpxor xmm10, xmm8, xmm13 ; 060C _ C4 41 39: EF. D5 + vmovd xmm9, dword [r12+144CH] ; 0611 _ C4 41 79: 6E. 8C 24, 0000144C + vpslld xmm8, xmm10, 10 ; 061B _ C4 C1 39: 72. F2, 0A + vpsrld xmm13, xmm10, 22 ; 0621 _ C4 C1 11: 72. D2, 16 + vpslld xmm10, xmm12, 7 ; 0627 _ C4 C1 29: 72. F4, 07 + vpshufd xmm5, xmm9, 0 ; 062D _ C4 C1 79: 70. E9, 00 + vpor xmm9, xmm8, xmm13 ; 0633 _ C4 41 39: EB. CD + vpslld xmm8, xmm4, 27 ; 0638 _ C5 B9: 72. F4, 1B + vpsrld xmm4, xmm4, 5 ; 063D _ C5 D9: 72. D4, 05 + vpor xmm8, xmm8, xmm4 ; 0642 _ C5 39: EB. C4 + vpxor xmm11, xmm11, xmm5 ; 0646 _ C5 21: EF. DD + vpxor xmm13, xmm8, xmm12 ; 064A _ C4 41 39: EF. EC + vpxor xmm5, xmm9, xmm11 ; 064F _ C4 C1 31: EF. EB + vpxor xmm9, xmm13, xmm11 ; 0654 _ C4 41 11: EF. CB + vpslld xmm4, xmm11, 25 ; 0659 _ C4 C1 59: 72. F3, 19 + vpsrld xmm11, xmm11, 7 ; 065F _ C4 C1 21: 72. D3, 07 + vpxor xmm10, xmm5, xmm10 ; 0665 _ C4 41 51: EF. D2 + vpor xmm8, xmm4, xmm11 ; 066A _ C4 41 59: EB. C3 + vpslld xmm4, xmm12, 31 ; 066F _ C4 C1 59: 72. F4, 1F + vpsrld xmm12, xmm12, 1 ; 0675 _ C4 C1 19: 72. D4, 01 + vpxor xmm11, xmm8, xmm10 ; 067B _ C4 41 39: EF. DA + vpslld xmm13, xmm9, 3 ; 0680 _ C4 C1 11: 72. F1, 03 + vpor xmm8, xmm4, xmm12 ; 0686 _ C4 41 59: EB. C4 + vpxor xmm5, xmm11, xmm13 ; 068B _ C4 C1 21: EF. ED + vpxor xmm11, xmm8, xmm9 ; 0690 _ C4 41 39: EF. D9 + vpslld xmm13, xmm10, 29 ; 0695 _ C4 C1 11: 72. F2, 1D + vpsrld xmm12, xmm10, 3 ; 069B _ C4 C1 19: 72. D2, 03 + vpxor xmm11, xmm11, xmm10 ; 06A1 _ C4 41 21: EF. DA + vpor xmm8, xmm13, xmm12 ; 06A6 _ C4 41 11: EB. C4 + vpslld xmm10, xmm9, 19 ; 06AB _ C4 C1 29: 72. F1, 13 + vpsrld xmm9, xmm9, 13 ; 06B1 _ C4 C1 31: 72. D1, 0D + vpxor xmm12, xmm8, xmm11 ; 06B7 _ C4 41 39: EF. E3 + vpor xmm13, xmm10, xmm9 ; 06BC _ C4 41 29: EB. E9 + vpxor xmm13, xmm13, xmm12 ; 06C1 _ C4 41 11: EF. EC + vpand xmm8, xmm8, xmm12 ; 06C6 _ C4 41 39: DB. C4 + vpxor xmm9, xmm8, xmm13 ; 06CB _ C4 41 39: EF. CD + vpand xmm4, xmm13, xmm11 ; 06D0 _ C4 C1 11: DB. E3 + vpor xmm10, xmm5, xmm9 ; 06D5 _ C4 41 51: EB. D1 + vpxor xmm5, xmm11, xmm5 ; 06DA _ C5 A1: EF. ED + vpxor xmm8, xmm12, xmm10 ; 06DE _ C4 41 19: EF. C2 + vpxor xmm13, xmm4, xmm10 ; 06E3 _ C4 41 59: EF. EA + vpxor xmm12, xmm5, xmm9 ; 06E8 _ C4 41 51: EF. E1 + vpand xmm11, xmm10, xmm8 ; 06ED _ C4 41 29: DB. D8 + vpxor xmm4, xmm12, xmm13 ; 06F2 _ C4 C1 19: EF. E5 + vpxor xmm11, xmm11, xmm12 ; 06F7 _ C4 41 21: EF. DC + vpor xmm5, xmm4, xmm8 ; 06FC _ C4 C1 59: EB. E8 + vpxor xmm5, xmm5, xmm9 ; 0701 _ C4 C1 51: EF. E9 + vmovd xmm9, dword [r12+1430H] ; 0706 _ C4 41 79: 6E. 8C 24, 00001430 + vpshufd xmm10, xmm9, 0 ; 0710 _ C4 41 79: 70. D1, 00 + vmovd xmm12, dword [r12+1438H] ; 0716 _ C4 41 79: 6E. A4 24, 00001438 + vpxor xmm4, xmm8, xmm10 ; 0720 _ C4 C1 39: EF. E2 + vmovd xmm8, dword [r12+1434H] ; 0725 _ C4 41 79: 6E. 84 24, 00001434 + vpshufd xmm9, xmm8, 0 ; 072F _ C4 41 79: 70. C8, 00 + vpshufd xmm8, xmm12, 0 ; 0735 _ C4 41 79: 70. C4, 00 + vpxor xmm10, xmm5, xmm9 ; 073B _ C4 41 51: EF. D1 + vpxor xmm12, xmm11, xmm8 ; 0740 _ C4 41 21: EF. E0 + vpxor xmm11, xmm13, xmm11 ; 0745 _ C4 41 11: EF. DB + vmovd xmm13, dword [r12+143CH] ; 074A _ C4 41 79: 6E. AC 24, 0000143C + vpxor xmm8, xmm11, xmm5 ; 0754 _ C5 21: EF. C5 + vpshufd xmm11, xmm13, 0 ; 0758 _ C4 41 79: 70. DD, 00 + vpslld xmm13, xmm12, 10 ; 075E _ C4 C1 11: 72. F4, 0A + vpsrld xmm9, xmm12, 22 ; 0764 _ C4 C1 31: 72. D4, 16 + vpxor xmm11, xmm8, xmm11 ; 076A _ C4 41 39: EF. DB + vpor xmm12, xmm13, xmm9 ; 076F _ C4 41 11: EB. E1 + vpslld xmm8, xmm10, 7 ; 0774 _ C4 C1 39: 72. F2, 07 + vpxor xmm5, xmm12, xmm11 ; 077A _ C4 C1 19: EF. EB + vpslld xmm9, xmm4, 27 ; 077F _ C5 B1: 72. F4, 1B + vpsrld xmm4, xmm4, 5 ; 0784 _ C5 D9: 72. D4, 05 + vpxor xmm13, xmm5, xmm8 ; 0789 _ C4 41 51: EF. E8 + vpor xmm8, xmm9, xmm4 ; 078E _ C5 31: EB. C4 + vpslld xmm12, xmm11, 25 ; 0792 _ C4 C1 19: 72. F3, 19 + vpxor xmm9, xmm8, xmm10 ; 0798 _ C4 41 39: EF. CA + vpxor xmm8, xmm9, xmm11 ; 079D _ C4 41 31: EF. C3 + vpsrld xmm11, xmm11, 7 ; 07A2 _ C4 C1 21: 72. D3, 07 + vpor xmm4, xmm12, xmm11 ; 07A8 _ C4 C1 19: EB. E3 + vpslld xmm11, xmm8, 3 ; 07AD _ C4 C1 21: 72. F0, 03 + vpxor xmm5, xmm4, xmm13 ; 07B3 _ C4 C1 59: EF. ED + vpslld xmm12, xmm10, 31 ; 07B8 _ C4 C1 19: 72. F2, 1F + vpsrld xmm10, xmm10, 1 ; 07BE _ C4 C1 29: 72. D2, 01 + vpxor xmm9, xmm5, xmm11 ; 07C4 _ C4 41 51: EF. CB + vpor xmm11, xmm12, xmm10 ; 07C9 _ C4 41 19: EB. DA + vpslld xmm4, xmm8, 19 ; 07CE _ C4 C1 59: 72. F0, 13 + vpxor xmm12, xmm11, xmm8 ; 07D4 _ C4 41 21: EF. E0 + vpsrld xmm8, xmm8, 13 ; 07D9 _ C4 C1 39: 72. D0, 0D + vpxor xmm11, xmm12, xmm13 ; 07DF _ C4 41 19: EF. DD + vpslld xmm5, xmm13, 29 ; 07E4 _ C4 C1 51: 72. F5, 1D + vpsrld xmm13, xmm13, 3 ; 07EA _ C4 C1 11: 72. D5, 03 + vpor xmm8, xmm4, xmm8 ; 07F0 _ C4 41 59: EB. C0 + vpor xmm10, xmm5, xmm13 ; 07F5 _ C4 41 51: EB. D5 + vpxor xmm4, xmm9, xmm8 ; 07FA _ C4 C1 31: EF. E0 + vpxor xmm13, xmm10, xmm9 ; 07FF _ C4 41 29: EF. E9 + vpand xmm9, xmm4, xmm13 ; 0804 _ C4 41 59: DB. CD + vpxor xmm12, xmm9, xmm11 ; 0809 _ C4 41 31: EF. E3 + vpor xmm11, xmm11, xmm13 ; 080E _ C4 41 21: EB. DD + vpxor xmm10, xmm13, xmm12 ; 0813 _ C4 41 11: EF. D4 + vpand xmm13, xmm4, xmm12 ; 0818 _ C4 41 59: DB. EC + vpxor xmm11, xmm11, xmm4 ; 081D _ C5 21: EF. DC + vpand xmm4, xmm13, xmm8 ; 0821 _ C4 C1 11: DB. E0 + vpxor xmm9, xmm12, xmm6 ; 0826 _ C5 19: EF. CE + vpxor xmm13, xmm4, xmm10 ; 082A _ C4 41 59: EF. EA + vmovd xmm12, dword [r12+1420H] ; 082F _ C4 41 79: 6E. A4 24, 00001420 + vpand xmm10, xmm10, xmm11 ; 0839 _ C4 41 29: DB. D3 + vmovd xmm4, dword [r12+1424H] ; 083E _ C4 C1 79: 6E. A4 24, 00001424 + vpshufd xmm5, xmm12, 0 ; 0848 _ C4 C1 79: 70. EC, 00 + vpxor xmm12, xmm11, xmm5 ; 084E _ C5 21: EF. E5 + vpshufd xmm5, xmm4, 0 ; 0852 _ C5 F9: 70. EC, 00 + vpxor xmm4, xmm13, xmm5 ; 0857 _ C5 91: EF. E5 + vpor xmm5, xmm10, xmm8 ; 085B _ C4 C1 29: EB. E8 + vmovd xmm10, dword [r12+1428H] ; 0860 _ C4 41 79: 6E. 94 24, 00001428 + vpxor xmm5, xmm5, xmm9 ; 086A _ C4 C1 51: EF. E9 + vpshufd xmm10, xmm10, 0 ; 086F _ C4 41 79: 70. D2, 00 + vpxor xmm8, xmm8, xmm9 ; 0875 _ C4 41 39: EF. C1 + vpxor xmm5, xmm5, xmm10 ; 087A _ C4 C1 51: EF. EA + vpxor xmm10, xmm9, xmm13 ; 087F _ C4 41 31: EF. D5 + vpand xmm13, xmm8, xmm11 ; 0884 _ C4 41 39: DB. EB + vpxor xmm9, xmm10, xmm13 ; 0889 _ C4 41 29: EF. CD + vpslld xmm13, xmm12, 27 ; 088E _ C4 C1 11: 72. F4, 1B + vmovd xmm10, dword [r12+142CH] ; 0894 _ C4 41 79: 6E. 94 24, 0000142C + vpsrld xmm12, xmm12, 5 ; 089E _ C4 C1 19: 72. D4, 05 + vpshufd xmm8, xmm10, 0 ; 08A4 _ C4 41 79: 70. C2, 00 + vpslld xmm10, xmm5, 10 ; 08AA _ C5 A9: 72. F5, 0A + vpsrld xmm5, xmm5, 22 ; 08AF _ C5 D1: 72. D5, 16 + vpxor xmm11, xmm9, xmm8 ; 08B4 _ C4 41 31: EF. D8 + vpor xmm8, xmm10, xmm5 ; 08B9 _ C5 29: EB. C5 + vpslld xmm5, xmm4, 7 ; 08BD _ C5 D1: 72. F4, 07 + vpxor xmm10, xmm8, xmm11 ; 08C2 _ C4 41 39: EF. D3 + vpxor xmm8, xmm10, xmm5 ; 08C7 _ C5 29: EF. C5 + vpor xmm10, xmm13, xmm12 ; 08CB _ C4 41 11: EB. D4 + vpxor xmm5, xmm10, xmm4 ; 08D0 _ C5 A9: EF. EC + vpslld xmm12, xmm11, 25 ; 08D4 _ C4 C1 19: 72. F3, 19 + vpxor xmm13, xmm5, xmm11 ; 08DA _ C4 41 51: EF. EB + vpsrld xmm11, xmm11, 7 ; 08DF _ C4 C1 21: 72. D3, 07 + vpor xmm9, xmm12, xmm11 ; 08E5 _ C4 41 19: EB. CB + vpslld xmm5, xmm13, 3 ; 08EA _ C4 C1 51: 72. F5, 03 + vpxor xmm10, xmm9, xmm8 ; 08F0 _ C4 41 31: EF. D0 + vpslld xmm11, xmm4, 31 ; 08F5 _ C5 A1: 72. F4, 1F + vpsrld xmm4, xmm4, 1 ; 08FA _ C5 D9: 72. D4, 01 + vpxor xmm12, xmm10, xmm5 ; 08FF _ C5 29: EF. E5 + vpor xmm10, xmm11, xmm4 ; 0903 _ C5 21: EB. D4 + vpslld xmm9, xmm13, 19 ; 0907 _ C4 C1 31: 72. F5, 13 + vpxor xmm5, xmm10, xmm13 ; 090D _ C4 C1 29: EF. ED + vpsrld xmm13, xmm13, 13 ; 0912 _ C4 C1 11: 72. D5, 0D + vpxor xmm11, xmm5, xmm8 ; 0918 _ C4 41 51: EF. D8 + vpslld xmm4, xmm8, 29 ; 091D _ C4 C1 59: 72. F0, 1D + vpxor xmm10, xmm11, xmm12 ; 0923 _ C4 41 21: EF. D4 + vpsrld xmm8, xmm8, 3 ; 0928 _ C4 C1 39: 72. D0, 03 + vpor xmm9, xmm9, xmm13 ; 092E _ C4 41 31: EB. CD + vpand xmm12, xmm12, xmm10 ; 0933 _ C4 41 19: DB. E2 + vpor xmm8, xmm4, xmm8 ; 0938 _ C4 41 59: EB. C0 + vpxor xmm4, xmm12, xmm9 ; 093D _ C4 C1 19: EF. E1 + vpxor xmm11, xmm11, xmm8 ; 0942 _ C4 41 21: EF. D8 + vpxor xmm5, xmm10, xmm4 ; 0947 _ C5 A9: EF. EC + vpor xmm10, xmm9, xmm10 ; 094B _ C4 41 31: EB. D2 + vpxor xmm8, xmm8, xmm4 ; 0950 _ C5 39: EF. C4 + vpxor xmm13, xmm10, xmm11 ; 0954 _ C4 41 29: EF. EB + vpxor xmm11, xmm11, xmm6 ; 0959 _ C5 21: EF. DE + vpor xmm9, xmm13, xmm8 ; 095D _ C4 41 11: EB. C8 + vpxor xmm13, xmm9, xmm5 ; 0962 _ C5 31: EF. ED + vpor xmm5, xmm5, xmm4 ; 0966 _ C5 D1: EB. EC + vpxor xmm10, xmm5, xmm13 ; 096A _ C4 41 51: EF. D5 + vmovd xmm12, dword [r12+1410H] ; 096F _ C4 41 79: 6E. A4 24, 00001410 + vpxor xmm9, xmm11, xmm10 ; 0979 _ C4 41 21: EF. CA + vmovd xmm11, dword [r12+1414H] ; 097E _ C4 41 79: 6E. 9C 24, 00001414 + vpor xmm10, xmm10, xmm13 ; 0988 _ C4 41 29: EB. D5 + vpshufd xmm5, xmm12, 0 ; 098D _ C4 C1 79: 70. EC, 00 + vpshufd xmm12, xmm11, 0 ; 0993 _ C4 41 79: 70. E3, 00 + vpxor xmm5, xmm9, xmm5 ; 0999 _ C5 B1: EF. ED + vpxor xmm11, xmm13, xmm12 ; 099D _ C4 41 11: EF. DC + vpxor xmm13, xmm10, xmm13 ; 09A2 _ C4 41 29: EF. ED + vpor xmm9, xmm13, xmm9 ; 09A7 _ C4 41 11: EB. C9 + vpxor xmm10, xmm4, xmm9 ; 09AC _ C4 41 59: EF. D1 + vmovd xmm4, dword [r12+1418H] ; 09B1 _ C4 C1 79: 6E. A4 24, 00001418 + vmovd xmm12, dword [r12+141CH] ; 09BB _ C4 41 79: 6E. A4 24, 0000141C + vpshufd xmm13, xmm4, 0 ; 09C5 _ C5 79: 70. EC, 00 + vpshufd xmm4, xmm12, 0 ; 09CA _ C4 C1 79: 70. E4, 00 + vpxor xmm9, xmm10, xmm13 ; 09D0 _ C4 41 29: EF. CD + vpxor xmm13, xmm8, xmm4 ; 09D5 _ C5 39: EF. EC + vpslld xmm8, xmm9, 10 ; 09D9 _ C4 C1 39: 72. F1, 0A + vpsrld xmm10, xmm9, 22 ; 09DF _ C4 C1 29: 72. D1, 16 + vpslld xmm9, xmm5, 27 ; 09E5 _ C5 B1: 72. F5, 1B + vpor xmm8, xmm8, xmm10 ; 09EA _ C4 41 39: EB. C2 + vpsrld xmm5, xmm5, 5 ; 09EF _ C5 D1: 72. D5, 05 + vpxor xmm12, xmm8, xmm13 ; 09F4 _ C4 41 39: EF. E5 + vpor xmm8, xmm9, xmm5 ; 09F9 _ C5 31: EB. C5 + vpxor xmm10, xmm8, xmm11 ; 09FD _ C4 41 39: EF. D3 + vpslld xmm4, xmm11, 7 ; 0A02 _ C4 C1 59: 72. F3, 07 + vpxor xmm8, xmm10, xmm13 ; 0A08 _ C4 41 29: EF. C5 + vpslld xmm5, xmm13, 25 ; 0A0D _ C4 C1 51: 72. F5, 19 + vpsrld xmm13, xmm13, 7 ; 0A13 _ C4 C1 11: 72. D5, 07 + vpxor xmm12, xmm12, xmm4 ; 0A19 _ C5 19: EF. E4 + vpor xmm4, xmm5, xmm13 ; 0A1D _ C4 C1 51: EB. E5 + vpslld xmm10, xmm8, 3 ; 0A22 _ C4 C1 29: 72. F0, 03 + vpxor xmm9, xmm4, xmm12 ; 0A28 _ C4 41 59: EF. CC + vpslld xmm5, xmm11, 31 ; 0A2D _ C4 C1 51: 72. F3, 1F + vpsrld xmm11, xmm11, 1 ; 0A33 _ C4 C1 21: 72. D3, 01 + vpxor xmm13, xmm9, xmm10 ; 0A39 _ C4 41 31: EF. EA + vpor xmm10, xmm5, xmm11 ; 0A3E _ C4 41 51: EB. D3 + vpslld xmm11, xmm8, 19 ; 0A43 _ C4 C1 21: 72. F0, 13 + vpxor xmm5, xmm10, xmm8 ; 0A49 _ C4 C1 29: EF. E8 + vpsrld xmm8, xmm8, 13 ; 0A4E _ C4 C1 39: 72. D0, 0D + vpxor xmm9, xmm5, xmm12 ; 0A54 _ C4 41 51: EF. CC + vpslld xmm4, xmm12, 29 ; 0A59 _ C4 C1 59: 72. F4, 1D + vpsrld xmm12, xmm12, 3 ; 0A5F _ C4 C1 19: 72. D4, 03 + vpor xmm5, xmm11, xmm8 ; 0A65 _ C4 C1 21: EB. E8 + vpor xmm8, xmm4, xmm12 ; 0A6A _ C4 41 59: EB. C4 + vpxor xmm10, xmm8, xmm6 ; 0A6F _ C5 39: EF. D6 + vpxor xmm8, xmm9, xmm6 ; 0A73 _ C5 31: EF. C6 + vpor xmm9, xmm9, xmm5 ; 0A77 _ C5 31: EB. CD + vpxor xmm5, xmm5, xmm8 ; 0A7B _ C4 C1 51: EF. E8 + vpxor xmm11, xmm9, xmm10 ; 0A80 _ C4 41 31: EF. DA + vpor xmm10, xmm10, xmm8 ; 0A85 _ C4 41 29: EB. D0 + vpxor xmm9, xmm11, xmm13 ; 0A8A _ C4 41 21: EF. CD + vpxor xmm12, xmm10, xmm5 ; 0A8F _ C5 29: EF. E5 + vpand xmm10, xmm5, xmm13 ; 0A93 _ C4 41 51: DB. D5 + vpxor xmm4, xmm8, xmm10 ; 0A98 _ C4 C1 39: EF. E2 + vpor xmm8, xmm10, xmm9 ; 0A9D _ C4 41 29: EB. C1 + vmovd xmm10, dword [r12+1400H] ; 0AA2 _ C4 41 79: 6E. 94 24, 00001400 + vpxor xmm11, xmm8, xmm12 ; 0AAC _ C4 41 39: EF. DC + vpxor xmm13, xmm13, xmm4 ; 0AB1 _ C5 11: EF. EC + vpxor xmm8, xmm13, xmm11 ; 0AB5 _ C4 41 11: EF. C3 + vpshufd xmm13, xmm10, 0 ; 0ABA _ C4 41 79: 70. EA, 00 + vpxor xmm5, xmm8, xmm9 ; 0AC0 _ C4 C1 39: EF. E9 + vpxor xmm8, xmm11, xmm13 ; 0AC5 _ C4 41 21: EF. C5 + vpxor xmm11, xmm12, xmm9 ; 0ACA _ C4 41 19: EF. D9 + vpand xmm12, xmm11, xmm5 ; 0ACF _ C5 21: DB. E5 + vmovd xmm11, dword [r12+1408H] ; 0AD3 _ C4 41 79: 6E. 9C 24, 00001408 + vpxor xmm10, xmm4, xmm12 ; 0ADD _ C4 41 59: EF. D4 + vmovd xmm4, dword [r12+1404H] ; 0AE2 _ C4 C1 79: 6E. A4 24, 00001404 + vpshufd xmm13, xmm4, 0 ; 0AEC _ C5 79: 70. EC, 00 + vpshufd xmm4, xmm11, 0 ; 0AF1 _ C4 C1 79: 70. E3, 00 + vpxor xmm12, xmm10, xmm13 ; 0AF7 _ C4 41 29: EF. E5 + vpxor xmm13, xmm9, xmm4 ; 0AFC _ C5 31: EF. EC + vpslld xmm4, xmm12, 7 ; 0B00 _ C4 C1 59: 72. F4, 07 + vmovd xmm9, dword [r12+140CH] ; 0B06 _ C4 41 79: 6E. 8C 24, 0000140C + vpshufd xmm10, xmm9, 0 ; 0B10 _ C4 41 79: 70. D1, 00 + vpslld xmm9, xmm8, 27 ; 0B16 _ C4 C1 31: 72. F0, 1B + vpxor xmm10, xmm5, xmm10 ; 0B1C _ C4 41 51: EF. D2 + vpslld xmm5, xmm13, 10 ; 0B21 _ C4 C1 51: 72. F5, 0A + vpsrld xmm13, xmm13, 22 ; 0B27 _ C4 C1 11: 72. D5, 16 + vpsrld xmm8, xmm8, 5 ; 0B2D _ C4 C1 39: 72. D0, 05 + vpor xmm5, xmm5, xmm13 ; 0B33 _ C4 C1 51: EB. ED + vpor xmm8, xmm9, xmm8 ; 0B38 _ C4 41 31: EB. C0 + vpxor xmm11, xmm5, xmm10 ; 0B3D _ C4 41 51: EF. DA + vpxor xmm5, xmm8, xmm12 ; 0B42 _ C4 C1 39: EF. EC + vpxor xmm5, xmm5, xmm10 ; 0B47 _ C4 C1 51: EF. EA + vpslld xmm13, xmm10, 25 ; 0B4C _ C4 C1 11: 72. F2, 19 + vpsrld xmm10, xmm10, 7 ; 0B52 _ C4 C1 29: 72. D2, 07 + vpxor xmm11, xmm11, xmm4 ; 0B58 _ C5 21: EF. DC + vpor xmm4, xmm13, xmm10 ; 0B5C _ C4 C1 11: EB. E2 + vpslld xmm8, xmm5, 3 ; 0B61 _ C5 B9: 72. F5, 03 + vpxor xmm9, xmm4, xmm11 ; 0B66 _ C4 41 59: EF. CB + vpslld xmm13, xmm12, 31 ; 0B6B _ C4 C1 11: 72. F4, 1F + vpsrld xmm12, xmm12, 1 ; 0B71 _ C4 C1 19: 72. D4, 01 + vpxor xmm10, xmm9, xmm8 ; 0B77 _ C4 41 31: EF. D0 + vpor xmm8, xmm13, xmm12 ; 0B7C _ C4 41 11: EB. C4 + vpslld xmm4, xmm11, 29 ; 0B81 _ C4 C1 59: 72. F3, 1D + vpxor xmm13, xmm8, xmm5 ; 0B87 _ C5 39: EF. ED + vpxor xmm12, xmm13, xmm11 ; 0B8B _ C4 41 11: EF. E3 + vpsrld xmm11, xmm11, 3 ; 0B90 _ C4 C1 21: 72. D3, 03 + vpslld xmm13, xmm5, 19 ; 0B96 _ C5 91: 72. F5, 13 + vpsrld xmm5, xmm5, 13 ; 0B9B _ C5 D1: 72. D5, 0D + vpor xmm8, xmm4, xmm11 ; 0BA0 _ C4 41 59: EB. C3 + vpor xmm11, xmm13, xmm5 ; 0BA5 _ C5 11: EB. DD + vpor xmm5, xmm8, xmm10 ; 0BA9 _ C4 C1 39: EB. EA + vpxor xmm8, xmm8, xmm11 ; 0BAE _ C4 41 39: EF. C3 + vpand xmm13, xmm11, xmm10 ; 0BB3 _ C4 41 21: DB. EA + vpxor xmm9, xmm8, xmm6 ; 0BB8 _ C5 39: EF. CE + vpxor xmm4, xmm13, xmm9 ; 0BBC _ C4 C1 11: EF. E1 + vpand xmm9, xmm9, xmm5 ; 0BC1 _ C5 31: DB. CD + vpxor xmm10, xmm10, xmm12 ; 0BC5 _ C4 41 29: EF. D4 + vpor xmm12, xmm12, xmm13 ; 0BCA _ C4 41 19: EB. E5 + vpand xmm8, xmm10, xmm5 ; 0BCF _ C5 29: DB. C5 + vpxor xmm10, xmm12, xmm9 ; 0BD3 _ C4 41 19: EF. D1 + vpxor xmm13, xmm9, xmm4 ; 0BD8 _ C5 31: EF. EC + vpxor xmm12, xmm5, xmm10 ; 0BDC _ C4 41 51: EF. E2 + vpor xmm4, xmm4, xmm13 ; 0BE1 _ C4 C1 59: EB. E5 + vmovd xmm11, dword [r12+13F0H] ; 0BE6 _ C4 41 79: 6E. 9C 24, 000013F0 + vpxor xmm5, xmm4, xmm8 ; 0BF0 _ C4 C1 59: EF. E8 + vpxor xmm8, xmm8, xmm12 ; 0BF5 _ C4 41 39: EF. C4 + vpshufd xmm9, xmm11, 0 ; 0BFA _ C4 41 79: 70. CB, 00 + vpxor xmm4, xmm8, xmm13 ; 0C00 _ C4 C1 39: EF. E5 + vmovd xmm8, dword [r12+13F4H] ; 0C05 _ C4 41 79: 6E. 84 24, 000013F4 + vpxor xmm9, xmm4, xmm9 ; 0C0F _ C4 41 59: EF. C9 + vmovd xmm4, dword [r12+13F8H] ; 0C14 _ C4 C1 79: 6E. A4 24, 000013F8 + vpshufd xmm11, xmm8, 0 ; 0C1E _ C4 41 79: 70. D8, 00 + vpshufd xmm8, xmm4, 0 ; 0C24 _ C5 79: 70. C4, 00 + vpxor xmm11, xmm5, xmm11 ; 0C29 _ C4 41 51: EF. DB + vpxor xmm4, xmm10, xmm8 ; 0C2E _ C4 C1 29: EF. E0 + vpor xmm10, xmm12, xmm5 ; 0C33 _ C5 19: EB. D5 + vmovd xmm5, dword [r12+13FCH] ; 0C37 _ C4 C1 79: 6E. AC 24, 000013FC + vpxor xmm13, xmm10, xmm13 ; 0C41 _ C4 41 29: EF. ED + vpshufd xmm12, xmm5, 0 ; 0C46 _ C5 79: 70. E5, 00 + vpslld xmm8, xmm4, 10 ; 0C4B _ C5 B9: 72. F4, 0A + vpsrld xmm10, xmm4, 22 ; 0C50 _ C5 A9: 72. D4, 16 + vpxor xmm5, xmm13, xmm12 ; 0C55 _ C4 C1 11: EF. EC + vpor xmm13, xmm8, xmm10 ; 0C5A _ C4 41 39: EB. EA + vpslld xmm8, xmm9, 27 ; 0C5F _ C4 C1 39: 72. F1, 1B + vpsrld xmm9, xmm9, 5 ; 0C65 _ C4 C1 31: 72. D1, 05 + vpxor xmm12, xmm13, xmm5 ; 0C6B _ C5 11: EF. E5 + vpor xmm8, xmm8, xmm9 ; 0C6F _ C4 41 39: EB. C1 + vpslld xmm4, xmm11, 7 ; 0C74 _ C4 C1 59: 72. F3, 07 + vpxor xmm10, xmm8, xmm11 ; 0C7A _ C4 41 39: EF. D3 + vpslld xmm13, xmm5, 25 ; 0C7F _ C5 91: 72. F5, 19 + vpxor xmm8, xmm10, xmm5 ; 0C84 _ C5 29: EF. C5 + vpsrld xmm5, xmm5, 7 ; 0C88 _ C5 D1: 72. D5, 07 + vpxor xmm12, xmm12, xmm4 ; 0C8D _ C5 19: EF. E4 + vpor xmm10, xmm13, xmm5 ; 0C91 _ C5 11: EB. D5 + vpslld xmm9, xmm11, 31 ; 0C95 _ C4 C1 31: 72. F3, 1F + vpsrld xmm11, xmm11, 1 ; 0C9B _ C4 C1 21: 72. D3, 01 + vpxor xmm5, xmm10, xmm12 ; 0CA1 _ C4 C1 29: EF. EC + vpslld xmm13, xmm8, 3 ; 0CA6 _ C4 C1 11: 72. F0, 03 + vpor xmm10, xmm9, xmm11 ; 0CAC _ C4 41 31: EB. D3 + vpxor xmm4, xmm5, xmm13 ; 0CB1 _ C4 C1 51: EF. E5 + vpxor xmm5, xmm10, xmm8 ; 0CB6 _ C4 C1 29: EF. E8 + vpslld xmm13, xmm12, 29 ; 0CBB _ C4 C1 11: 72. F4, 1D + vpxor xmm5, xmm5, xmm12 ; 0CC1 _ C4 C1 51: EF. EC + vpsrld xmm11, xmm12, 3 ; 0CC6 _ C4 C1 21: 72. D4, 03 + vpslld xmm12, xmm8, 19 ; 0CCC _ C4 C1 19: 72. F0, 13 + vpsrld xmm8, xmm8, 13 ; 0CD2 _ C4 C1 39: 72. D0, 0D + vpor xmm11, xmm13, xmm11 ; 0CD8 _ C4 41 11: EB. DB + vpor xmm10, xmm12, xmm8 ; 0CDD _ C4 41 19: EB. D0 + vpxor xmm10, xmm10, xmm11 ; 0CE2 _ C4 41 29: EF. D3 + vpxor xmm13, xmm4, xmm5 ; 0CE7 _ C5 59: EF. ED + vpand xmm12, xmm11, xmm10 ; 0CEB _ C4 41 21: DB. E2 + vpxor xmm4, xmm11, xmm4 ; 0CF0 _ C5 A1: EF. E4 + vpxor xmm9, xmm12, xmm6 ; 0CF4 _ C5 19: EF. CE + vpor xmm11, xmm4, xmm10 ; 0CF8 _ C4 41 59: EB. DA + vpxor xmm8, xmm9, xmm13 ; 0CFD _ C4 41 31: EF. C5 + vpxor xmm13, xmm13, xmm11 ; 0D02 _ C4 41 11: EF. EB + vmovd xmm4, dword [r12+13E0H] ; 0D07 _ C4 C1 79: 6E. A4 24, 000013E0 + vpxor xmm9, xmm10, xmm8 ; 0D11 _ C4 41 29: EF. C8 + vpand xmm10, xmm5, xmm13 ; 0D16 _ C4 41 51: DB. D5 + vpxor xmm5, xmm11, xmm5 ; 0D1B _ C5 A1: EF. ED + vpxor xmm12, xmm10, xmm9 ; 0D1F _ C4 41 29: EF. E1 + vpxor xmm11, xmm9, xmm13 ; 0D24 _ C4 41 31: EF. DD + vpshufd xmm10, xmm4, 0 ; 0D29 _ C5 79: 70. D4, 00 + vmovd xmm4, dword [r12+13E4H] ; 0D2E _ C4 C1 79: 6E. A4 24, 000013E4 + vpxor xmm10, xmm12, xmm10 ; 0D38 _ C4 41 19: EF. D2 + vpshufd xmm4, xmm4, 0 ; 0D3D _ C5 F9: 70. E4, 00 + vpxor xmm12, xmm13, xmm12 ; 0D42 _ C4 41 11: EF. E4 + vpxor xmm4, xmm8, xmm4 ; 0D47 _ C5 B9: EF. E4 + vpor xmm8, xmm11, xmm8 ; 0D4B _ C4 41 21: EB. C0 + vpxor xmm9, xmm5, xmm8 ; 0D50 _ C4 41 51: EF. C8 + vmovd xmm5, dword [r12+13E8H] ; 0D55 _ C4 C1 79: 6E. AC 24, 000013E8 + vpshufd xmm8, xmm5, 0 ; 0D5F _ C5 79: 70. C5, 00 + vmovd xmm13, dword [r12+13ECH] ; 0D64 _ C4 41 79: 6E. AC 24, 000013EC + vpxor xmm8, xmm9, xmm8 ; 0D6E _ C4 41 31: EF. C0 + vpshufd xmm5, xmm13, 0 ; 0D73 _ C4 C1 79: 70. ED, 00 + vpslld xmm11, xmm8, 10 ; 0D79 _ C4 C1 21: 72. F0, 0A + vpsrld xmm8, xmm8, 22 ; 0D7F _ C4 C1 39: 72. D0, 16 + vpxor xmm13, xmm12, xmm5 ; 0D85 _ C5 19: EF. ED + vpor xmm12, xmm11, xmm8 ; 0D89 _ C4 41 21: EB. E0 + vpslld xmm8, xmm4, 7 ; 0D8E _ C5 B9: 72. F4, 07 + vpxor xmm9, xmm12, xmm13 ; 0D93 _ C4 41 19: EF. CD + vpslld xmm5, xmm10, 27 ; 0D98 _ C4 C1 51: 72. F2, 1B + vpsrld xmm10, xmm10, 5 ; 0D9E _ C4 C1 29: 72. D2, 05 + vpxor xmm9, xmm9, xmm8 ; 0DA4 _ C4 41 31: EF. C8 + vpor xmm8, xmm5, xmm10 ; 0DA9 _ C4 41 51: EB. C2 + vpslld xmm11, xmm13, 25 ; 0DAE _ C4 C1 21: 72. F5, 19 + vpxor xmm10, xmm8, xmm4 ; 0DB4 _ C5 39: EF. D4 + vpxor xmm5, xmm10, xmm13 ; 0DB8 _ C4 C1 29: EF. ED + vpsrld xmm13, xmm13, 7 ; 0DBD _ C4 C1 11: 72. D5, 07 + vpor xmm8, xmm11, xmm13 ; 0DC3 _ C4 41 21: EB. C5 + vpslld xmm13, xmm5, 3 ; 0DC8 _ C5 91: 72. F5, 03 + vpxor xmm10, xmm8, xmm9 ; 0DCD _ C4 41 39: EF. D1 + vpslld xmm11, xmm5, 19 ; 0DD2 _ C5 A1: 72. F5, 13 + vpxor xmm8, xmm10, xmm13 ; 0DD7 _ C4 41 29: EF. C5 + vpslld xmm10, xmm4, 31 ; 0DDC _ C5 A9: 72. F4, 1F + vpsrld xmm4, xmm4, 1 ; 0DE1 _ C5 D9: 72. D4, 01 + vpsrld xmm12, xmm5, 13 ; 0DE6 _ C5 99: 72. D5, 0D + vpor xmm10, xmm10, xmm4 ; 0DEB _ C5 29: EB. D4 + vpor xmm13, xmm11, xmm12 ; 0DEF _ C4 41 21: EB. EC + vpxor xmm5, xmm10, xmm5 ; 0DF4 _ C5 A9: EF. ED + vpslld xmm10, xmm9, 29 ; 0DF8 _ C4 C1 29: 72. F1, 1D + vpxor xmm11, xmm5, xmm9 ; 0DFE _ C4 41 51: EF. D9 + vpsrld xmm9, xmm9, 3 ; 0E03 _ C4 C1 31: 72. D1, 03 + vpxor xmm4, xmm11, xmm6 ; 0E09 _ C5 A1: EF. E6 + vpor xmm10, xmm10, xmm9 ; 0E0D _ C4 41 29: EB. D1 + vpxor xmm11, xmm10, xmm4 ; 0E12 _ C5 29: EF. DC + vpor xmm5, xmm8, xmm13 ; 0E16 _ C4 C1 39: EB. ED + vpxor xmm5, xmm5, xmm11 ; 0E1B _ C4 C1 51: EF. EB + vpxor xmm12, xmm8, xmm5 ; 0E20 _ C5 39: EF. E5 + vpor xmm8, xmm11, xmm4 ; 0E24 _ C5 21: EB. C4 + vpand xmm9, xmm8, xmm13 ; 0E28 _ C4 41 39: DB. CD + vpxor xmm8, xmm9, xmm12 ; 0E2D _ C4 41 31: EF. C4 + vpor xmm12, xmm12, xmm13 ; 0E32 _ C4 41 19: EB. E5 + vpand xmm10, xmm4, xmm8 ; 0E37 _ C4 41 59: DB. D0 + vpxor xmm4, xmm12, xmm4 ; 0E3C _ C5 99: EF. E4 + vmovd xmm12, dword [r12+13D0H] ; 0E40 _ C4 41 79: 6E. A4 24, 000013D0 + vpxor xmm11, xmm10, xmm5 ; 0E4A _ C5 29: EF. DD + vpxor xmm4, xmm4, xmm8 ; 0E4E _ C4 C1 59: EF. E0 + vpshufd xmm9, xmm12, 0 ; 0E53 _ C4 41 79: 70. CC, 00 + vpxor xmm10, xmm4, xmm11 ; 0E59 _ C4 41 59: EF. D3 + vpand xmm5, xmm5, xmm4 ; 0E5E _ C5 D1: DB. EC + vpxor xmm12, xmm11, xmm9 ; 0E62 _ C4 41 21: EF. E1 + vpxor xmm9, xmm10, xmm6 ; 0E67 _ C5 29: EF. CE + vpxor xmm10, xmm5, xmm10 ; 0E6B _ C4 41 51: EF. D2 + vmovd xmm11, dword [r12+13D4H] ; 0E70 _ C4 41 79: 6E. 9C 24, 000013D4 + vpxor xmm10, xmm10, xmm13 ; 0E7A _ C4 41 29: EF. D5 + vmovd xmm13, dword [r12+13D8H] ; 0E7F _ C4 41 79: 6E. AC 24, 000013D8 + vpshufd xmm11, xmm11, 0 ; 0E89 _ C4 41 79: 70. DB, 00 + vpshufd xmm5, xmm13, 0 ; 0E8F _ C4 C1 79: 70. ED, 00 + vpxor xmm9, xmm9, xmm11 ; 0E95 _ C4 41 31: EF. CB + vmovd xmm13, dword [r12+13DCH] ; 0E9A _ C4 41 79: 6E. AC 24, 000013DC + vpxor xmm4, xmm10, xmm5 ; 0EA4 _ C5 A9: EF. E5 + vpshufd xmm11, xmm13, 0 ; 0EA8 _ C4 41 79: 70. DD, 00 + vpsrld xmm10, xmm4, 22 ; 0EAE _ C5 A9: 72. D4, 16 + vpxor xmm5, xmm8, xmm11 ; 0EB3 _ C4 C1 39: EF. EB + vpslld xmm8, xmm4, 10 ; 0EB8 _ C5 B9: 72. F4, 0A + vpor xmm8, xmm8, xmm10 ; 0EBD _ C4 41 39: EB. C2 + vpslld xmm4, xmm12, 27 ; 0EC2 _ C4 C1 59: 72. F4, 1B + vpsrld xmm12, xmm12, 5 ; 0EC8 _ C4 C1 19: 72. D4, 05 + vpxor xmm13, xmm8, xmm5 ; 0ECE _ C5 39: EF. ED + vpslld xmm11, xmm9, 7 ; 0ED2 _ C4 C1 21: 72. F1, 07 + vpor xmm8, xmm4, xmm12 ; 0ED8 _ C4 41 59: EB. C4 + vpxor xmm10, xmm13, xmm11 ; 0EDD _ C4 41 11: EF. D3 + vpxor xmm13, xmm8, xmm9 ; 0EE2 _ C4 41 39: EF. E9 + vpxor xmm4, xmm13, xmm5 ; 0EE7 _ C5 91: EF. E5 + vpslld xmm11, xmm5, 25 ; 0EEB _ C5 A1: 72. F5, 19 + vpsrld xmm5, xmm5, 7 ; 0EF0 _ C5 D1: 72. D5, 07 + vpslld xmm13, xmm9, 31 ; 0EF5 _ C4 C1 11: 72. F1, 1F + vpor xmm12, xmm11, xmm5 ; 0EFB _ C5 21: EB. E5 + vpslld xmm5, xmm4, 3 ; 0EFF _ C5 D1: 72. F4, 03 + vpxor xmm8, xmm12, xmm10 ; 0F04 _ C4 41 19: EF. C2 + vpsrld xmm9, xmm9, 1 ; 0F09 _ C4 C1 31: 72. D1, 01 + vpxor xmm5, xmm8, xmm5 ; 0F0F _ C5 B9: EF. ED + vpor xmm8, xmm13, xmm9 ; 0F13 _ C4 41 11: EB. C1 + vpxor xmm13, xmm8, xmm4 ; 0F18 _ C5 39: EF. EC + vpslld xmm11, xmm10, 29 ; 0F1C _ C4 C1 21: 72. F2, 1D + vpxor xmm8, xmm13, xmm10 ; 0F22 _ C4 41 11: EF. C2 + vpsrld xmm10, xmm10, 3 ; 0F27 _ C4 C1 29: 72. D2, 03 + vpor xmm12, xmm11, xmm10 ; 0F2D _ C4 41 21: EB. E2 + vpslld xmm9, xmm4, 19 ; 0F32 _ C5 B1: 72. F4, 13 + vpsrld xmm4, xmm4, 13 ; 0F37 _ C5 D9: 72. D4, 0D + vpand xmm10, xmm12, xmm5 ; 0F3C _ C5 19: DB. D5 + vpor xmm11, xmm9, xmm4 ; 0F40 _ C5 31: EB. DC + vpxor xmm9, xmm10, xmm8 ; 0F44 _ C4 41 29: EF. C8 + vpor xmm8, xmm8, xmm5 ; 0F49 _ C5 39: EB. C5 + vpxor xmm13, xmm12, xmm9 ; 0F4D _ C4 41 19: EF. E9 + vpand xmm8, xmm8, xmm11 ; 0F52 _ C4 41 39: DB. C3 + vpxor xmm12, xmm13, xmm8 ; 0F57 _ C4 41 11: EF. E0 + vpxor xmm13, xmm11, xmm6 ; 0F5C _ C5 21: EF. EE + vpxor xmm4, xmm5, xmm12 ; 0F60 _ C4 C1 51: EF. E4 + vpand xmm5, xmm8, xmm9 ; 0F65 _ C4 C1 39: DB. E9 + vpxor xmm8, xmm5, xmm4 ; 0F6A _ C5 51: EF. C4 + vpand xmm10, xmm4, xmm13 ; 0F6E _ C4 41 59: DB. D5 + vpxor xmm11, xmm13, xmm8 ; 0F73 _ C4 41 11: EF. D8 + vpxor xmm5, xmm10, xmm9 ; 0F78 _ C4 C1 29: EF. E9 + vmovd xmm13, dword [r12+13C0H] ; 0F7D _ C4 41 79: 6E. AC 24, 000013C0 + vpxor xmm4, xmm5, xmm11 ; 0F87 _ C4 C1 51: EF. E3 + vpshufd xmm10, xmm13, 0 ; 0F8C _ C4 41 79: 70. D5, 00 + vmovd xmm13, dword [r12+13C4H] ; 0F92 _ C4 41 79: 6E. AC 24, 000013C4 + vpxor xmm5, xmm11, xmm10 ; 0F9C _ C4 C1 21: EF. EA + vpxor xmm10, xmm4, xmm11 ; 0FA1 _ C4 41 59: EF. D3 + vpand xmm11, xmm9, xmm11 ; 0FA6 _ C4 41 31: DB. DB + vpshufd xmm13, xmm13, 0 ; 0FAB _ C4 41 79: 70. ED, 00 + vpxor xmm9, xmm11, xmm12 ; 0FB1 _ C4 41 21: EF. CC + vpxor xmm13, xmm10, xmm13 ; 0FB6 _ C4 41 29: EF. ED + vpor xmm10, xmm9, xmm4 ; 0FBB _ C5 31: EB. D4 + vpxor xmm11, xmm10, xmm8 ; 0FBF _ C4 41 29: EF. D8 + vmovd xmm8, dword [r12+13C8H] ; 0FC4 _ C4 41 79: 6E. 84 24, 000013C8 + vmovd xmm9, dword [r12+13CCH] ; 0FCE _ C4 41 79: 6E. 8C 24, 000013CC + vpshufd xmm4, xmm8, 0 ; 0FD8 _ C4 C1 79: 70. E0, 00 + vpshufd xmm8, xmm9, 0 ; 0FDE _ C4 41 79: 70. C1, 00 + vpxor xmm10, xmm11, xmm4 ; 0FE4 _ C5 21: EF. D4 + vpxor xmm11, xmm12, xmm8 ; 0FE8 _ C4 41 19: EF. D8 + vpslld xmm12, xmm10, 10 ; 0FED _ C4 C1 19: 72. F2, 0A + vpsrld xmm8, xmm10, 22 ; 0FF3 _ C4 C1 39: 72. D2, 16 + vpslld xmm9, xmm13, 7 ; 0FF9 _ C4 C1 31: 72. F5, 07 + vpor xmm10, xmm12, xmm8 ; 0FFF _ C4 41 19: EB. D0 + vpslld xmm12, xmm5, 27 ; 1004 _ C5 99: 72. F5, 1B + vpsrld xmm5, xmm5, 5 ; 1009 _ C5 D1: 72. D5, 05 + vpxor xmm4, xmm10, xmm11 ; 100E _ C4 C1 29: EF. E3 + vpor xmm10, xmm12, xmm5 ; 1013 _ C5 19: EB. D5 + vpxor xmm8, xmm4, xmm9 ; 1017 _ C4 41 59: EF. C1 + vpxor xmm5, xmm10, xmm13 ; 101C _ C4 C1 29: EF. ED + vpslld xmm4, xmm11, 25 ; 1021 _ C4 C1 59: 72. F3, 19 + vpxor xmm10, xmm5, xmm11 ; 1027 _ C4 41 51: EF. D3 + vpsrld xmm11, xmm11, 7 ; 102C _ C4 C1 21: 72. D3, 07 + vpor xmm5, xmm4, xmm11 ; 1032 _ C4 C1 59: EB. EB + vpslld xmm9, xmm10, 3 ; 1037 _ C4 C1 31: 72. F2, 03 + vpxor xmm4, xmm5, xmm8 ; 103D _ C4 C1 51: EF. E0 + vpslld xmm11, xmm13, 31 ; 1042 _ C4 C1 21: 72. F5, 1F + vpsrld xmm13, xmm13, 1 ; 1048 _ C4 C1 11: 72. D5, 01 + vpxor xmm5, xmm4, xmm9 ; 104E _ C4 C1 59: EF. E9 + vpor xmm4, xmm11, xmm13 ; 1053 _ C4 C1 21: EB. E5 + vpxor xmm9, xmm4, xmm10 ; 1058 _ C4 41 59: EF. CA + vpxor xmm4, xmm9, xmm8 ; 105D _ C4 C1 31: EF. E0 + vpslld xmm9, xmm8, 29 ; 1062 _ C4 C1 31: 72. F0, 1D + vpsrld xmm13, xmm8, 3 ; 1068 _ C4 C1 11: 72. D0, 03 + vpslld xmm12, xmm10, 19 ; 106E _ C4 C1 19: 72. F2, 13 + vpor xmm11, xmm9, xmm13 ; 1074 _ C4 41 31: EB. DD + vpsrld xmm10, xmm10, 13 ; 1079 _ C4 C1 29: 72. D2, 0D + vpxor xmm9, xmm11, xmm4 ; 107F _ C5 21: EF. CC + vpor xmm8, xmm12, xmm10 ; 1083 _ C4 41 19: EB. C2 + vpxor xmm8, xmm8, xmm9 ; 1088 _ C4 41 39: EF. C1 + vpand xmm11, xmm11, xmm9 ; 108D _ C4 41 21: DB. D9 + vpxor xmm10, xmm11, xmm8 ; 1092 _ C4 41 21: EF. D0 + vpor xmm13, xmm5, xmm10 ; 1097 _ C4 41 51: EB. EA + vpxor xmm11, xmm9, xmm13 ; 109C _ C4 41 31: EF. DD + vpand xmm9, xmm8, xmm4 ; 10A1 _ C5 39: DB. CC + vpxor xmm4, xmm4, xmm5 ; 10A5 _ C5 D9: EF. E5 + vpxor xmm12, xmm9, xmm13 ; 10A9 _ C4 41 31: EF. E5 + vpxor xmm8, xmm4, xmm10 ; 10AE _ C4 41 59: EF. C2 + vpand xmm5, xmm13, xmm11 ; 10B3 _ C4 C1 11: DB. EB + vpxor xmm4, xmm8, xmm12 ; 10B8 _ C4 C1 39: EF. E4 + vpxor xmm5, xmm5, xmm8 ; 10BD _ C4 C1 51: EF. E8 + vmovd xmm13, dword [r12+13B0H] ; 10C2 _ C4 41 79: 6E. AC 24, 000013B0 + vpor xmm9, xmm4, xmm11 ; 10CC _ C4 41 59: EB. CB + vpxor xmm4, xmm9, xmm10 ; 10D1 _ C4 C1 31: EF. E2 + vpxor xmm12, xmm12, xmm5 ; 10D6 _ C5 19: EF. E5 + vpshufd xmm10, xmm13, 0 ; 10DA _ C4 41 79: 70. D5, 00 + vpxor xmm8, xmm11, xmm10 ; 10E0 _ C4 41 21: EF. C2 + vmovd xmm11, dword [r12+13B4H] ; 10E5 _ C4 41 79: 6E. 9C 24, 000013B4 + vmovd xmm10, dword [r12+13B8H] ; 10EF _ C4 41 79: 6E. 94 24, 000013B8 + vpshufd xmm9, xmm11, 0 ; 10F9 _ C4 41 79: 70. CB, 00 + vpshufd xmm11, xmm10, 0 ; 10FF _ C4 41 79: 70. DA, 00 + vpxor xmm13, xmm4, xmm9 ; 1105 _ C4 41 59: EF. E9 + vpxor xmm11, xmm5, xmm11 ; 110A _ C4 41 51: EF. DB + vpxor xmm5, xmm12, xmm4 ; 110F _ C5 99: EF. EC + vmovd xmm4, dword [r12+13BCH] ; 1113 _ C4 C1 79: 6E. A4 24, 000013BC + vpslld xmm12, xmm11, 10 ; 111D _ C4 C1 19: 72. F3, 0A + vpshufd xmm9, xmm4, 0 ; 1123 _ C5 79: 70. CC, 00 + vpsrld xmm11, xmm11, 22 ; 1128 _ C4 C1 21: 72. D3, 16 + vpxor xmm10, xmm5, xmm9 ; 112E _ C4 41 51: EF. D1 + vpor xmm11, xmm12, xmm11 ; 1133 _ C4 41 19: EB. DB + vpxor xmm4, xmm11, xmm10 ; 1138 _ C4 C1 21: EF. E2 + vpslld xmm11, xmm8, 27 ; 113D _ C4 C1 21: 72. F0, 1B + vpsrld xmm8, xmm8, 5 ; 1143 _ C4 C1 39: 72. D0, 05 + vpslld xmm5, xmm13, 7 ; 1149 _ C4 C1 51: 72. F5, 07 + vpor xmm11, xmm11, xmm8 ; 114F _ C4 41 21: EB. D8 + vpxor xmm12, xmm4, xmm5 ; 1154 _ C5 59: EF. E5 + vpxor xmm8, xmm11, xmm13 ; 1158 _ C4 41 21: EF. C5 + vpslld xmm4, xmm10, 25 ; 115D _ C4 C1 59: 72. F2, 19 + vpxor xmm8, xmm8, xmm10 ; 1163 _ C4 41 39: EF. C2 + vpsrld xmm10, xmm10, 7 ; 1168 _ C4 C1 29: 72. D2, 07 + vpor xmm11, xmm4, xmm10 ; 116E _ C4 41 59: EB. DA + vpslld xmm10, xmm13, 31 ; 1173 _ C4 C1 29: 72. F5, 1F + vpsrld xmm13, xmm13, 1 ; 1179 _ C4 C1 11: 72. D5, 01 + vpxor xmm4, xmm11, xmm12 ; 117F _ C4 C1 21: EF. E4 + vpslld xmm5, xmm8, 3 ; 1184 _ C4 C1 51: 72. F0, 03 + vpor xmm11, xmm10, xmm13 ; 118A _ C4 41 29: EB. DD + vpxor xmm9, xmm4, xmm5 ; 118F _ C5 59: EF. CD + vpxor xmm4, xmm11, xmm8 ; 1193 _ C4 C1 21: EF. E0 + vpxor xmm5, xmm4, xmm12 ; 1198 _ C4 C1 59: EF. EC + vpslld xmm13, xmm8, 19 ; 119D _ C4 C1 11: 72. F0, 13 + vpsrld xmm8, xmm8, 13 ; 11A3 _ C4 C1 39: 72. D0, 0D + vpslld xmm10, xmm12, 29 ; 11A9 _ C4 C1 29: 72. F4, 1D + vpsrld xmm12, xmm12, 3 ; 11AF _ C4 C1 19: 72. D4, 03 + vpor xmm8, xmm13, xmm8 ; 11B5 _ C4 41 11: EB. C0 + vpor xmm11, xmm10, xmm12 ; 11BA _ C4 41 29: EB. DC + vpxor xmm4, xmm11, xmm9 ; 11BF _ C4 C1 21: EF. E1 + vpxor xmm11, xmm9, xmm8 ; 11C4 _ C4 41 31: EF. D8 + vpand xmm9, xmm11, xmm4 ; 11C9 _ C5 21: DB. CC + vpxor xmm9, xmm9, xmm5 ; 11CD _ C5 31: EF. CD + vpor xmm5, xmm5, xmm4 ; 11D1 _ C5 D1: EB. EC + vpxor xmm12, xmm5, xmm11 ; 11D5 _ C4 41 51: EF. E3 + vpand xmm11, xmm11, xmm9 ; 11DA _ C4 41 21: DB. D9 + vpxor xmm4, xmm4, xmm9 ; 11DF _ C4 C1 59: EF. E1 + vpand xmm5, xmm11, xmm8 ; 11E4 _ C4 C1 21: DB. E8 + vmovd xmm11, dword [r12+13A4H] ; 11E9 _ C4 41 79: 6E. 9C 24, 000013A4 + vpxor xmm5, xmm5, xmm4 ; 11F3 _ C5 D1: EF. EC + vpxor xmm13, xmm9, xmm6 ; 11F7 _ C5 31: EF. EE + vpand xmm4, xmm4, xmm12 ; 11FB _ C4 C1 59: DB. E4 + vmovd xmm9, dword [r12+13A0H] ; 1200 _ C4 41 79: 6E. 8C 24, 000013A0 + vpshufd xmm10, xmm9, 0 ; 120A _ C4 41 79: 70. D1, 00 + vpshufd xmm9, xmm11, 0 ; 1210 _ C4 41 79: 70. CB, 00 + vpor xmm11, xmm4, xmm8 ; 1216 _ C4 41 59: EB. D8 + vmovd xmm4, dword [r12+13A8H] ; 121B _ C4 C1 79: 6E. A4 24, 000013A8 + vpxor xmm11, xmm11, xmm13 ; 1225 _ C4 41 21: EF. DD + vpshufd xmm4, xmm4, 0 ; 122A _ C5 F9: 70. E4, 00 + vpxor xmm8, xmm8, xmm13 ; 122F _ C4 41 39: EF. C5 + vpxor xmm10, xmm12, xmm10 ; 1234 _ C4 41 19: EF. D2 + vpxor xmm11, xmm11, xmm4 ; 1239 _ C5 21: EF. DC + vpxor xmm4, xmm13, xmm5 ; 123D _ C5 91: EF. E5 + vpand xmm13, xmm8, xmm12 ; 1241 _ C4 41 39: DB. EC + vmovd xmm12, dword [r12+13ACH] ; 1246 _ C4 41 79: 6E. A4 24, 000013AC + vpxor xmm8, xmm4, xmm13 ; 1250 _ C4 41 59: EF. C5 + vpshufd xmm4, xmm12, 0 ; 1255 _ C4 C1 79: 70. E4, 00 + vpxor xmm9, xmm5, xmm9 ; 125B _ C4 41 51: EF. C9 + vpxor xmm4, xmm8, xmm4 ; 1260 _ C5 B9: EF. E4 + vpslld xmm5, xmm11, 10 ; 1264 _ C4 C1 51: 72. F3, 0A + vpsrld xmm11, xmm11, 22 ; 126A _ C4 C1 21: 72. D3, 16 + vpslld xmm8, xmm10, 27 ; 1270 _ C4 C1 39: 72. F2, 1B + vpsrld xmm10, xmm10, 5 ; 1276 _ C4 C1 29: 72. D2, 05 + vpor xmm13, xmm5, xmm11 ; 127C _ C4 41 51: EB. EB + vpor xmm8, xmm8, xmm10 ; 1281 _ C4 41 39: EB. C2 + vpxor xmm12, xmm13, xmm4 ; 1286 _ C5 11: EF. E4 + vpslld xmm11, xmm9, 7 ; 128A _ C4 C1 21: 72. F1, 07 + vpxor xmm5, xmm8, xmm9 ; 1290 _ C4 C1 39: EF. E9 + vpxor xmm11, xmm12, xmm11 ; 1295 _ C4 41 19: EF. DB + vpxor xmm13, xmm5, xmm4 ; 129A _ C5 51: EF. EC + vpslld xmm12, xmm4, 25 ; 129E _ C5 99: 72. F4, 19 + vpsrld xmm4, xmm4, 7 ; 12A3 _ C5 D9: 72. D4, 07 + vpor xmm10, xmm12, xmm4 ; 12A8 _ C5 19: EB. D4 + vpslld xmm4, xmm13, 3 ; 12AC _ C4 C1 59: 72. F5, 03 + vpxor xmm8, xmm10, xmm11 ; 12B2 _ C4 41 29: EF. C3 + vpslld xmm5, xmm9, 31 ; 12B7 _ C4 C1 51: 72. F1, 1F + vpsrld xmm9, xmm9, 1 ; 12BD _ C4 C1 31: 72. D1, 01 + vpxor xmm4, xmm8, xmm4 ; 12C3 _ C5 B9: EF. E4 + vpor xmm8, xmm5, xmm9 ; 12C7 _ C4 41 51: EB. C1 + vpslld xmm9, xmm11, 29 ; 12CC _ C4 C1 31: 72. F3, 1D + vpxor xmm5, xmm8, xmm13 ; 12D2 _ C4 C1 39: EF. ED + vpslld xmm12, xmm13, 19 ; 12D7 _ C4 C1 19: 72. F5, 13 + vpxor xmm10, xmm5, xmm11 ; 12DD _ C4 41 51: EF. D3 + vpsrld xmm11, xmm11, 3 ; 12E2 _ C4 C1 21: 72. D3, 03 + vpsrld xmm13, xmm13, 13 ; 12E8 _ C4 C1 11: 72. D5, 0D + vpxor xmm5, xmm10, xmm4 ; 12EE _ C5 A9: EF. EC + vpor xmm11, xmm9, xmm11 ; 12F2 _ C4 41 31: EB. DB + vpor xmm9, xmm12, xmm13 ; 12F7 _ C4 41 19: EB. CD + vpand xmm4, xmm4, xmm5 ; 12FC _ C5 D9: DB. E5 + vpxor xmm8, xmm10, xmm11 ; 1300 _ C4 41 29: EF. C3 + vpxor xmm10, xmm4, xmm9 ; 1305 _ C4 41 59: EF. D1 + vpor xmm4, xmm9, xmm5 ; 130A _ C5 B1: EB. E5 + vpxor xmm11, xmm11, xmm10 ; 130E _ C4 41 21: EF. DA + vpxor xmm12, xmm5, xmm10 ; 1313 _ C4 41 51: EF. E2 + vpxor xmm5, xmm4, xmm8 ; 1318 _ C4 C1 59: EF. E8 + vpor xmm13, xmm12, xmm10 ; 131D _ C4 41 19: EB. EA + vpor xmm9, xmm5, xmm11 ; 1322 _ C4 41 51: EB. CB + vpxor xmm8, xmm8, xmm6 ; 1327 _ C5 39: EF. C6 + vpxor xmm4, xmm9, xmm12 ; 132B _ C4 C1 31: EF. E4 + vmovd xmm12, dword [r12+1390H] ; 1330 _ C4 41 79: 6E. A4 24, 00001390 + vpxor xmm5, xmm13, xmm4 ; 133A _ C5 91: EF. EC + vmovd xmm13, dword [r12+1394H] ; 133E _ C4 41 79: 6E. AC 24, 00001394 + vpxor xmm8, xmm8, xmm5 ; 1348 _ C5 39: EF. C5 + vpshufd xmm9, xmm12, 0 ; 134C _ C4 41 79: 70. CC, 00 + vpor xmm5, xmm5, xmm4 ; 1352 _ C5 D1: EB. EC + vpshufd xmm12, xmm13, 0 ; 1356 _ C4 41 79: 70. E5, 00 + vpxor xmm9, xmm8, xmm9 ; 135C _ C4 41 39: EF. C9 + vpxor xmm12, xmm4, xmm12 ; 1361 _ C4 41 59: EF. E4 + vpxor xmm4, xmm5, xmm4 ; 1366 _ C5 D1: EF. E4 + vpor xmm8, xmm4, xmm8 ; 136A _ C4 41 59: EB. C0 + vpxor xmm8, xmm10, xmm8 ; 136F _ C4 41 29: EF. C0 + vmovd xmm10, dword [r12+1398H] ; 1374 _ C4 41 79: 6E. 94 24, 00001398 + vmovd xmm5, dword [r12+139CH] ; 137E _ C4 C1 79: 6E. AC 24, 0000139C + vpshufd xmm4, xmm10, 0 ; 1388 _ C4 C1 79: 70. E2, 00 + vpshufd xmm13, xmm5, 0 ; 138E _ C5 79: 70. ED, 00 + vpxor xmm10, xmm8, xmm4 ; 1393 _ C5 39: EF. D4 + vpxor xmm13, xmm11, xmm13 ; 1397 _ C4 41 21: EF. ED + vpslld xmm11, xmm10, 10 ; 139C _ C4 C1 21: 72. F2, 0A + vpsrld xmm8, xmm10, 22 ; 13A2 _ C4 C1 39: 72. D2, 16 + vpslld xmm10, xmm9, 27 ; 13A8 _ C4 C1 29: 72. F1, 1B + vpor xmm11, xmm11, xmm8 ; 13AE _ C4 41 21: EB. D8 + vpsrld xmm9, xmm9, 5 ; 13B3 _ C4 C1 31: 72. D1, 05 + vpxor xmm4, xmm11, xmm13 ; 13B9 _ C4 C1 21: EF. E5 + vpslld xmm5, xmm12, 7 ; 13BE _ C4 C1 51: 72. F4, 07 + vpor xmm11, xmm10, xmm9 ; 13C4 _ C4 41 29: EB. D9 + vpxor xmm8, xmm4, xmm5 ; 13C9 _ C5 59: EF. C5 + vpxor xmm4, xmm11, xmm12 ; 13CD _ C4 C1 21: EF. E4 + vpslld xmm5, xmm13, 25 ; 13D2 _ C4 C1 51: 72. F5, 19 + vpsrld xmm9, xmm13, 7 ; 13D8 _ C4 C1 31: 72. D5, 07 + vpxor xmm11, xmm4, xmm13 ; 13DE _ C4 41 59: EF. DD + vpor xmm13, xmm5, xmm9 ; 13E3 _ C4 41 51: EB. E9 + vpslld xmm4, xmm11, 3 ; 13E8 _ C4 C1 59: 72. F3, 03 + vpxor xmm10, xmm13, xmm8 ; 13EE _ C4 41 11: EF. D0 + vpslld xmm5, xmm12, 31 ; 13F3 _ C4 C1 51: 72. F4, 1F + vpsrld xmm12, xmm12, 1 ; 13F9 _ C4 C1 19: 72. D4, 01 + vpxor xmm13, xmm10, xmm4 ; 13FF _ C5 29: EF. EC + vpor xmm4, xmm5, xmm12 ; 1403 _ C4 C1 51: EB. E4 + vpslld xmm9, xmm11, 19 ; 1408 _ C4 C1 31: 72. F3, 13 + vpxor xmm5, xmm4, xmm11 ; 140E _ C4 C1 59: EF. EB + vpsrld xmm11, xmm11, 13 ; 1413 _ C4 C1 21: 72. D3, 0D + vpxor xmm4, xmm5, xmm8 ; 1419 _ C4 C1 51: EF. E0 + vpslld xmm12, xmm8, 29 ; 141E _ C4 C1 19: 72. F0, 1D + vpsrld xmm8, xmm8, 3 ; 1424 _ C4 C1 39: 72. D0, 03 + vpor xmm11, xmm9, xmm11 ; 142A _ C4 41 31: EB. DB + vpor xmm10, xmm12, xmm8 ; 142F _ C4 41 19: EB. D0 + vpor xmm8, xmm4, xmm11 ; 1434 _ C4 41 59: EB. C3 + vpxor xmm5, xmm10, xmm6 ; 1439 _ C5 A9: EF. EE + vpxor xmm10, xmm4, xmm6 ; 143D _ C5 59: EF. D6 + vpxor xmm4, xmm8, xmm5 ; 1441 _ C5 B9: EF. E5 + vpxor xmm9, xmm11, xmm10 ; 1445 _ C4 41 21: EF. CA + vpxor xmm4, xmm4, xmm13 ; 144A _ C4 C1 59: EF. E5 + vpor xmm11, xmm5, xmm10 ; 144F _ C4 41 51: EB. DA + vpand xmm5, xmm9, xmm13 ; 1454 _ C4 C1 31: DB. ED + vpxor xmm12, xmm11, xmm9 ; 1459 _ C4 41 21: EF. E1 + vpxor xmm9, xmm10, xmm5 ; 145E _ C5 29: EF. CD + vpor xmm11, xmm5, xmm4 ; 1462 _ C5 51: EB. DC + vmovd xmm5, dword [r12+1380H] ; 1466 _ C4 C1 79: 6E. AC 24, 00001380 + vpxor xmm8, xmm11, xmm12 ; 1470 _ C4 41 21: EF. C4 + vpxor xmm13, xmm13, xmm9 ; 1475 _ C4 41 11: EF. E9 + vpxor xmm11, xmm13, xmm8 ; 147A _ C4 41 11: EF. D8 + vpshufd xmm13, xmm5, 0 ; 147F _ C5 79: 70. ED, 00 + vpxor xmm11, xmm11, xmm4 ; 1484 _ C5 21: EF. DC + vpxor xmm5, xmm8, xmm13 ; 1488 _ C4 C1 39: EF. ED + vpxor xmm8, xmm12, xmm4 ; 148D _ C5 19: EF. C4 + vpand xmm12, xmm8, xmm11 ; 1491 _ C4 41 39: DB. E3 + vpxor xmm10, xmm9, xmm12 ; 1496 _ C4 41 31: EF. D4 + vmovd xmm9, dword [r12+1384H] ; 149B _ C4 41 79: 6E. 8C 24, 00001384 + vpshufd xmm8, xmm9, 0 ; 14A5 _ C4 41 79: 70. C1, 00 + vmovd xmm9, dword [r12+1388H] ; 14AB _ C4 41 79: 6E. 8C 24, 00001388 + vpxor xmm13, xmm10, xmm8 ; 14B5 _ C4 41 29: EF. E8 + vpshufd xmm12, xmm9, 0 ; 14BA _ C4 41 79: 70. E1, 00 + vpxor xmm9, xmm4, xmm12 ; 14C0 _ C4 41 59: EF. CC + vpslld xmm12, xmm13, 7 ; 14C5 _ C4 C1 19: 72. F5, 07 + vmovd xmm4, dword [r12+138CH] ; 14CB _ C4 C1 79: 6E. A4 24, 0000138C + vpshufd xmm10, xmm4, 0 ; 14D5 _ C5 79: 70. D4, 00 + vpsrld xmm4, xmm9, 22 ; 14DA _ C4 C1 59: 72. D1, 16 + vpxor xmm8, xmm11, xmm10 ; 14E0 _ C4 41 21: EF. C2 + vpslld xmm11, xmm9, 10 ; 14E5 _ C4 C1 21: 72. F1, 0A + vpslld xmm10, xmm5, 27 ; 14EB _ C5 A9: 72. F5, 1B + vpsrld xmm5, xmm5, 5 ; 14F0 _ C5 D1: 72. D5, 05 + vpor xmm11, xmm11, xmm4 ; 14F5 _ C5 21: EB. DC + vpor xmm4, xmm10, xmm5 ; 14F9 _ C5 A9: EB. E5 + vpxor xmm9, xmm11, xmm8 ; 14FD _ C4 41 21: EF. C8 + vpxor xmm5, xmm4, xmm13 ; 1502 _ C4 C1 59: EF. ED + vpxor xmm11, xmm9, xmm12 ; 1507 _ C4 41 31: EF. DC + vpxor xmm9, xmm5, xmm8 ; 150C _ C4 41 51: EF. C8 + vpslld xmm12, xmm8, 25 ; 1511 _ C4 C1 19: 72. F0, 19 + vpsrld xmm8, xmm8, 7 ; 1517 _ C4 C1 39: 72. D0, 07 + vpor xmm10, xmm12, xmm8 ; 151D _ C4 41 19: EB. D0 + vpslld xmm4, xmm9, 3 ; 1522 _ C4 C1 59: 72. F1, 03 + vpxor xmm8, xmm10, xmm11 ; 1528 _ C4 41 29: EF. C3 + vpslld xmm12, xmm13, 31 ; 152D _ C4 C1 19: 72. F5, 1F + vpsrld xmm13, xmm13, 1 ; 1533 _ C4 C1 11: 72. D5, 01 + vpxor xmm5, xmm8, xmm4 ; 1539 _ C5 B9: EF. EC + vpor xmm8, xmm12, xmm13 ; 153D _ C4 41 19: EB. C5 + vpslld xmm13, xmm11, 29 ; 1542 _ C4 C1 11: 72. F3, 1D + vpxor xmm4, xmm8, xmm9 ; 1548 _ C4 C1 39: EF. E1 + vpslld xmm12, xmm9, 19 ; 154D _ C4 C1 19: 72. F1, 13 + vpxor xmm8, xmm4, xmm11 ; 1553 _ C4 41 59: EF. C3 + vpsrld xmm11, xmm11, 3 ; 1558 _ C4 C1 21: 72. D3, 03 + vpsrld xmm9, xmm9, 13 ; 155E _ C4 C1 31: 72. D1, 0D + vpor xmm11, xmm13, xmm11 ; 1564 _ C4 41 11: EB. DB + vpor xmm4, xmm12, xmm9 ; 1569 _ C4 C1 19: EB. E1 + vpor xmm13, xmm11, xmm5 ; 156E _ C5 21: EB. ED + vpxor xmm11, xmm11, xmm4 ; 1572 _ C5 21: EF. DC + vpand xmm10, xmm4, xmm5 ; 1576 _ C5 59: DB. D5 + vpxor xmm9, xmm11, xmm6 ; 157A _ C5 21: EF. CE + vpxor xmm5, xmm5, xmm8 ; 157E _ C4 C1 51: EF. E8 + vpand xmm4, xmm9, xmm13 ; 1583 _ C4 C1 31: DB. E5 + vpor xmm8, xmm8, xmm10 ; 1588 _ C4 41 39: EB. C2 + vpxor xmm12, xmm10, xmm9 ; 158D _ C4 41 29: EF. E1 + vpxor xmm11, xmm8, xmm4 ; 1592 _ C5 39: EF. DC + vpand xmm9, xmm5, xmm13 ; 1596 _ C4 41 51: DB. CD + vpxor xmm5, xmm4, xmm12 ; 159B _ C4 C1 59: EF. EC + vpxor xmm10, xmm13, xmm11 ; 15A0 _ C4 41 11: EF. D3 + vpor xmm13, xmm12, xmm5 ; 15A5 _ C5 19: EB. ED + vmovd xmm4, dword [r12+1370H] ; 15A9 _ C4 C1 79: 6E. A4 24, 00001370 + vpxor xmm8, xmm9, xmm10 ; 15B3 _ C4 41 31: EF. C2 + vpxor xmm12, xmm13, xmm9 ; 15B8 _ C4 41 11: EF. E1 + vpxor xmm9, xmm8, xmm5 ; 15BD _ C5 39: EF. CD + vmovd xmm8, dword [r12+1374H] ; 15C1 _ C4 41 79: 6E. 84 24, 00001374 + vpshufd xmm13, xmm4, 0 ; 15CB _ C5 79: 70. EC, 00 + vpxor xmm4, xmm9, xmm13 ; 15D0 _ C4 C1 31: EF. E5 + vmovd xmm13, dword [r12+1378H] ; 15D5 _ C4 41 79: 6E. AC 24, 00001378 + vpshufd xmm9, xmm8, 0 ; 15DF _ C4 41 79: 70. C8, 00 + vpxor xmm8, xmm12, xmm9 ; 15E5 _ C4 41 19: EF. C1 + vpor xmm12, xmm10, xmm12 ; 15EA _ C4 41 29: EB. E4 + vmovd xmm10, dword [r12+137CH] ; 15EF _ C4 41 79: 6E. 94 24, 0000137C + vpshufd xmm9, xmm13, 0 ; 15F9 _ C4 41 79: 70. CD, 00 + vpxor xmm13, xmm11, xmm9 ; 15FF _ C4 41 21: EF. E9 + vpxor xmm11, xmm12, xmm5 ; 1604 _ C5 19: EF. DD + vpshufd xmm5, xmm10, 0 ; 1608 _ C4 C1 79: 70. EA, 00 + vpslld xmm9, xmm13, 10 ; 160E _ C4 C1 31: 72. F5, 0A + vpxor xmm10, xmm11, xmm5 ; 1614 _ C5 21: EF. D5 + vpsrld xmm11, xmm13, 22 ; 1618 _ C4 C1 21: 72. D5, 16 + vpor xmm5, xmm9, xmm11 ; 161E _ C4 C1 31: EB. EB + vpslld xmm11, xmm4, 27 ; 1623 _ C5 A1: 72. F4, 1B + vpsrld xmm4, xmm4, 5 ; 1628 _ C5 D9: 72. D4, 05 + vpxor xmm13, xmm5, xmm10 ; 162D _ C4 41 51: EF. EA + vpor xmm11, xmm11, xmm4 ; 1632 _ C5 21: EB. DC + vpslld xmm12, xmm8, 7 ; 1636 _ C4 C1 19: 72. F0, 07 + vpxor xmm4, xmm11, xmm8 ; 163C _ C4 C1 21: EF. E0 + vpslld xmm5, xmm10, 25 ; 1641 _ C4 C1 51: 72. F2, 19 + vpxor xmm4, xmm4, xmm10 ; 1647 _ C4 C1 59: EF. E2 + vpsrld xmm10, xmm10, 7 ; 164C _ C4 C1 29: 72. D2, 07 + vpxor xmm9, xmm13, xmm12 ; 1652 _ C4 41 11: EF. CC + vpor xmm11, xmm5, xmm10 ; 1657 _ C4 41 51: EB. DA + vpslld xmm12, xmm8, 31 ; 165C _ C4 C1 19: 72. F0, 1F + vpsrld xmm8, xmm8, 1 ; 1662 _ C4 C1 39: 72. D0, 01 + vpxor xmm5, xmm11, xmm9 ; 1668 _ C4 C1 21: EF. E9 + vpor xmm11, xmm12, xmm8 ; 166D _ C4 41 19: EB. D8 + vpslld xmm13, xmm4, 3 ; 1672 _ C5 91: 72. F4, 03 + vpxor xmm8, xmm11, xmm4 ; 1677 _ C5 21: EF. C4 + vpxor xmm10, xmm5, xmm13 ; 167B _ C4 41 51: EF. D5 + vpxor xmm5, xmm8, xmm9 ; 1680 _ C4 C1 39: EF. E9 + vpslld xmm13, xmm9, 29 ; 1685 _ C4 C1 11: 72. F1, 1D + vpsrld xmm9, xmm9, 3 ; 168B _ C4 C1 31: 72. D1, 03 + vpslld xmm12, xmm4, 19 ; 1691 _ C5 99: 72. F4, 13 + vpsrld xmm4, xmm4, 13 ; 1696 _ C5 D9: 72. D4, 0D + vpor xmm11, xmm13, xmm9 ; 169B _ C4 41 11: EB. D9 + vpor xmm8, xmm12, xmm4 ; 16A0 _ C5 19: EB. C4 + vpxor xmm4, xmm8, xmm11 ; 16A4 _ C4 C1 39: EF. E3 + vpxor xmm12, xmm10, xmm5 ; 16A9 _ C5 29: EF. E5 + vpand xmm9, xmm11, xmm4 ; 16AD _ C5 21: DB. CC + vpxor xmm10, xmm11, xmm10 ; 16B1 _ C4 41 21: EF. D2 + vpxor xmm13, xmm9, xmm6 ; 16B6 _ C5 31: EF. EE + vpor xmm11, xmm10, xmm4 ; 16BA _ C5 29: EB. DC + vpxor xmm8, xmm13, xmm12 ; 16BE _ C4 41 11: EF. C4 + vpxor xmm13, xmm12, xmm11 ; 16C3 _ C4 41 19: EF. EB + vmovd xmm10, dword [r12+1360H] ; 16C8 _ C4 41 79: 6E. 94 24, 00001360 + vpxor xmm9, xmm4, xmm8 ; 16D2 _ C4 41 59: EF. C8 + vpand xmm4, xmm5, xmm13 ; 16D7 _ C4 C1 51: DB. E5 + vpxor xmm11, xmm11, xmm5 ; 16DC _ C5 21: EF. DD + vpxor xmm12, xmm4, xmm9 ; 16E0 _ C4 41 59: EF. E1 + vpxor xmm5, xmm9, xmm13 ; 16E5 _ C4 C1 31: EF. ED + vpshufd xmm4, xmm10, 0 ; 16EA _ C4 C1 79: 70. E2, 00 + vpxor xmm13, xmm13, xmm12 ; 16F0 _ C4 41 11: EF. EC + vpxor xmm10, xmm12, xmm4 ; 16F5 _ C5 19: EF. D4 + vmovd xmm4, dword [r12+1364H] ; 16F9 _ C4 C1 79: 6E. A4 24, 00001364 + vpshufd xmm4, xmm4, 0 ; 1703 _ C5 F9: 70. E4, 00 + vpxor xmm4, xmm8, xmm4 ; 1708 _ C5 B9: EF. E4 + vpor xmm8, xmm5, xmm8 ; 170C _ C4 41 51: EB. C0 + vpxor xmm5, xmm11, xmm8 ; 1711 _ C4 C1 21: EF. E8 + vmovd xmm11, dword [r12+1368H] ; 1716 _ C4 41 79: 6E. 9C 24, 00001368 + vpshufd xmm9, xmm11, 0 ; 1720 _ C4 41 79: 70. CB, 00 + vpxor xmm8, xmm5, xmm9 ; 1726 _ C4 41 51: EF. C1 + vpslld xmm5, xmm8, 10 ; 172B _ C4 C1 51: 72. F0, 0A + vpsrld xmm8, xmm8, 22 ; 1731 _ C4 C1 39: 72. D0, 16 + vmovd xmm11, dword [r12+136CH] ; 1737 _ C4 41 79: 6E. 9C 24, 0000136C + vpor xmm9, xmm5, xmm8 ; 1741 _ C4 41 51: EB. C8 + vpslld xmm8, xmm10, 27 ; 1746 _ C4 C1 39: 72. F2, 1B + vpsrld xmm10, xmm10, 5 ; 174C _ C4 C1 29: 72. D2, 05 + vpshufd xmm12, xmm11, 0 ; 1752 _ C4 41 79: 70. E3, 00 + vpor xmm8, xmm8, xmm10 ; 1758 _ C4 41 39: EB. C2 + vpxor xmm11, xmm13, xmm12 ; 175D _ C4 41 11: EF. DC + vpxor xmm5, xmm8, xmm4 ; 1762 _ C5 B9: EF. EC + vpxor xmm13, xmm9, xmm11 ; 1766 _ C4 41 31: EF. EB + vpslld xmm12, xmm4, 7 ; 176B _ C5 99: 72. F4, 07 + vpxor xmm5, xmm5, xmm11 ; 1770 _ C4 C1 51: EF. EB + vpslld xmm9, xmm11, 25 ; 1775 _ C4 C1 31: 72. F3, 19 + vpsrld xmm11, xmm11, 7 ; 177B _ C4 C1 21: 72. D3, 07 + vpxor xmm13, xmm13, xmm12 ; 1781 _ C4 41 11: EF. EC + vpor xmm11, xmm9, xmm11 ; 1786 _ C4 41 31: EB. DB + vpslld xmm9, xmm5, 3 ; 178B _ C5 B1: 72. F5, 03 + vpxor xmm8, xmm11, xmm13 ; 1790 _ C4 41 21: EF. C5 + vpslld xmm12, xmm5, 19 ; 1795 _ C5 99: 72. F5, 13 + vpxor xmm8, xmm8, xmm9 ; 179A _ C4 41 39: EF. C1 + vpslld xmm9, xmm4, 31 ; 179F _ C5 B1: 72. F4, 1F + vpsrld xmm4, xmm4, 1 ; 17A4 _ C5 D9: 72. D4, 01 + vpsrld xmm10, xmm5, 13 ; 17A9 _ C5 A9: 72. D5, 0D + vpor xmm4, xmm9, xmm4 ; 17AE _ C5 B1: EB. E4 + vpor xmm11, xmm12, xmm10 ; 17B2 _ C4 41 19: EB. DA + vpxor xmm5, xmm4, xmm5 ; 17B7 _ C5 D9: EF. ED + vpslld xmm12, xmm13, 29 ; 17BB _ C4 C1 19: 72. F5, 1D + vpxor xmm9, xmm5, xmm13 ; 17C1 _ C4 41 51: EF. CD + vpsrld xmm13, xmm13, 3 ; 17C6 _ C4 C1 11: 72. D5, 03 + vpxor xmm9, xmm9, xmm6 ; 17CC _ C5 31: EF. CE + vpor xmm10, xmm12, xmm13 ; 17D0 _ C4 41 19: EB. D5 + vpxor xmm12, xmm10, xmm9 ; 17D5 _ C4 41 29: EF. E1 + vpor xmm4, xmm8, xmm11 ; 17DA _ C4 C1 39: EB. E3 + vpxor xmm13, xmm4, xmm12 ; 17DF _ C4 41 59: EF. EC + vpxor xmm5, xmm8, xmm13 ; 17E4 _ C4 C1 39: EF. ED + vpor xmm8, xmm12, xmm9 ; 17E9 _ C4 41 19: EB. C1 + vpand xmm8, xmm8, xmm11 ; 17EE _ C4 41 39: DB. C3 + vpxor xmm8, xmm8, xmm5 ; 17F3 _ C5 39: EF. C5 + vpor xmm5, xmm5, xmm11 ; 17F7 _ C4 C1 51: EB. EB + vpand xmm10, xmm9, xmm8 ; 17FC _ C4 41 31: DB. D0 + vpxor xmm9, xmm5, xmm9 ; 1801 _ C4 41 51: EF. C9 + vmovd xmm5, dword [r12+1350H] ; 1806 _ C4 C1 79: 6E. AC 24, 00001350 + vpxor xmm4, xmm10, xmm13 ; 1810 _ C4 C1 29: EF. E5 + vpshufd xmm10, xmm5, 0 ; 1815 _ C5 79: 70. D5, 00 + vpxor xmm12, xmm9, xmm8 ; 181A _ C4 41 31: EF. E0 + vpxor xmm9, xmm12, xmm4 ; 181F _ C5 19: EF. CC + vpxor xmm4, xmm4, xmm10 ; 1823 _ C4 C1 59: EF. E2 + vmovd xmm10, dword [r12+1354H] ; 1828 _ C4 41 79: 6E. 94 24, 00001354 + vpand xmm13, xmm13, xmm12 ; 1832 _ C4 41 11: DB. EC + vpshufd xmm10, xmm10, 0 ; 1837 _ C4 41 79: 70. D2, 00 + vpxor xmm5, xmm9, xmm6 ; 183D _ C5 B1: EF. EE + vpxor xmm9, xmm13, xmm9 ; 1841 _ C4 41 11: EF. C9 + vpxor xmm5, xmm5, xmm10 ; 1846 _ C4 C1 51: EF. EA + vpxor xmm9, xmm9, xmm11 ; 184B _ C4 41 31: EF. CB + vmovd xmm10, dword [r12+1358H] ; 1850 _ C4 41 79: 6E. 94 24, 00001358 + vmovd xmm11, dword [r12+135CH] ; 185A _ C4 41 79: 6E. 9C 24, 0000135C + vpshufd xmm12, xmm10, 0 ; 1864 _ C4 41 79: 70. E2, 00 + vpshufd xmm10, xmm11, 0 ; 186A _ C4 41 79: 70. D3, 00 + vpxor xmm9, xmm9, xmm12 ; 1870 _ C4 41 31: EF. CC + vpxor xmm13, xmm8, xmm10 ; 1875 _ C4 41 39: EF. EA + vpslld xmm8, xmm9, 10 ; 187A _ C4 C1 39: 72. F1, 0A + vpsrld xmm10, xmm9, 22 ; 1880 _ C4 C1 29: 72. D1, 16 + vpslld xmm11, xmm4, 27 ; 1886 _ C5 A1: 72. F4, 1B + vpsrld xmm4, xmm4, 5 ; 188B _ C5 D9: 72. D4, 05 + vpor xmm9, xmm8, xmm10 ; 1890 _ C4 41 39: EB. CA + vpor xmm10, xmm11, xmm4 ; 1895 _ C5 21: EB. D4 + vpxor xmm8, xmm9, xmm13 ; 1899 _ C4 41 31: EF. C5 + vpslld xmm12, xmm5, 7 ; 189E _ C5 99: 72. F5, 07 + vpxor xmm4, xmm10, xmm5 ; 18A3 _ C5 A9: EF. E5 + vpxor xmm9, xmm8, xmm12 ; 18A7 _ C4 41 39: EF. CC + vpxor xmm11, xmm4, xmm13 ; 18AC _ C4 41 59: EF. DD + vpslld xmm8, xmm13, 25 ; 18B1 _ C4 C1 39: 72. F5, 19 + vpsrld xmm13, xmm13, 7 ; 18B7 _ C4 C1 11: 72. D5, 07 + vpor xmm12, xmm8, xmm13 ; 18BD _ C4 41 39: EB. E5 + vpslld xmm4, xmm11, 3 ; 18C2 _ C4 C1 59: 72. F3, 03 + vpxor xmm10, xmm12, xmm9 ; 18C8 _ C4 41 19: EF. D1 + vpslld xmm13, xmm5, 31 ; 18CD _ C5 91: 72. F5, 1F + vpsrld xmm5, xmm5, 1 ; 18D2 _ C5 D1: 72. D5, 01 + vpxor xmm8, xmm10, xmm4 ; 18D7 _ C5 29: EF. C4 + vpor xmm10, xmm13, xmm5 ; 18DB _ C5 11: EB. D5 + vpslld xmm13, xmm9, 29 ; 18DF _ C4 C1 11: 72. F1, 1D + vpxor xmm5, xmm10, xmm11 ; 18E5 _ C4 C1 29: EF. EB + vpslld xmm10, xmm11, 19 ; 18EA _ C4 C1 29: 72. F3, 13 + vpxor xmm4, xmm5, xmm9 ; 18F0 _ C4 C1 51: EF. E1 + vpsrld xmm9, xmm9, 3 ; 18F5 _ C4 C1 31: 72. D1, 03 + vpor xmm12, xmm13, xmm9 ; 18FB _ C4 41 11: EB. E1 + vpsrld xmm11, xmm11, 13 ; 1900 _ C4 C1 21: 72. D3, 0D + vpor xmm5, xmm10, xmm11 ; 1906 _ C4 C1 29: EB. EB + vpand xmm10, xmm12, xmm8 ; 190B _ C4 41 19: DB. D0 + vpxor xmm11, xmm10, xmm4 ; 1910 _ C5 29: EF. DC + vpor xmm4, xmm4, xmm8 ; 1914 _ C4 C1 59: EB. E0 + vpand xmm9, xmm4, xmm5 ; 1919 _ C5 59: DB. CD + vpxor xmm12, xmm12, xmm11 ; 191D _ C4 41 19: EF. E3 + vpxor xmm12, xmm12, xmm9 ; 1922 _ C4 41 19: EF. E1 + vpxor xmm13, xmm5, xmm6 ; 1927 _ C5 51: EF. EE + vpxor xmm5, xmm8, xmm12 ; 192B _ C4 C1 39: EF. EC + vpand xmm8, xmm9, xmm11 ; 1930 _ C4 41 31: DB. C3 + vpxor xmm9, xmm8, xmm5 ; 1935 _ C5 39: EF. CD + vpand xmm4, xmm5, xmm13 ; 1939 _ C4 C1 51: DB. E5 + vmovd xmm5, dword [r12+1340H] ; 193E _ C4 C1 79: 6E. AC 24, 00001340 + vpxor xmm10, xmm13, xmm9 ; 1948 _ C4 41 11: EF. D1 + vpxor xmm13, xmm4, xmm11 ; 194D _ C4 41 59: EF. EB + vpand xmm11, xmm11, xmm10 ; 1952 _ C4 41 21: DB. DA + vpshufd xmm4, xmm5, 0 ; 1957 _ C5 F9: 70. E5, 00 + vpxor xmm8, xmm13, xmm10 ; 195C _ C4 41 11: EF. C2 + vpxor xmm13, xmm10, xmm4 ; 1961 _ C5 29: EF. EC + vpxor xmm4, xmm8, xmm10 ; 1965 _ C4 C1 39: EF. E2 + vpxor xmm10, xmm11, xmm12 ; 196A _ C4 41 21: EF. D4 + vmovd xmm5, dword [r12+1344H] ; 196F _ C4 C1 79: 6E. AC 24, 00001344 + vpor xmm8, xmm10, xmm8 ; 1979 _ C4 41 29: EB. C0 + vpshufd xmm5, xmm5, 0 ; 197E _ C5 F9: 70. ED, 00 + vpxor xmm10, xmm8, xmm9 ; 1983 _ C4 41 39: EF. D1 + vmovd xmm9, dword [r12+1348H] ; 1988 _ C4 41 79: 6E. 8C 24, 00001348 + vpxor xmm4, xmm4, xmm5 ; 1992 _ C5 D9: EF. E5 + vpshufd xmm5, xmm9, 0 ; 1996 _ C4 C1 79: 70. E9, 00 + vmovd xmm9, dword [r12+134CH] ; 199C _ C4 41 79: 6E. 8C 24, 0000134C + vpxor xmm11, xmm10, xmm5 ; 19A6 _ C5 29: EF. DD + vpshufd xmm8, xmm9, 0 ; 19AA _ C4 41 79: 70. C1, 00 + vpsrld xmm10, xmm11, 22 ; 19B0 _ C4 C1 29: 72. D3, 16 + vpxor xmm5, xmm12, xmm8 ; 19B6 _ C4 C1 19: EF. E8 + vpslld xmm12, xmm11, 10 ; 19BB _ C4 C1 19: 72. F3, 0A + vpor xmm9, xmm12, xmm10 ; 19C1 _ C4 41 19: EB. CA + vpslld xmm11, xmm13, 27 ; 19C6 _ C4 C1 21: 72. F5, 1B + vpsrld xmm13, xmm13, 5 ; 19CC _ C4 C1 11: 72. D5, 05 + vpxor xmm8, xmm9, xmm5 ; 19D2 _ C5 31: EF. C5 + vpor xmm9, xmm11, xmm13 ; 19D6 _ C4 41 21: EB. CD + vpslld xmm12, xmm4, 7 ; 19DB _ C5 99: 72. F4, 07 + vpxor xmm13, xmm9, xmm4 ; 19E0 _ C5 31: EF. EC + vpxor xmm10, xmm8, xmm12 ; 19E4 _ C4 41 39: EF. D4 + vpxor xmm13, xmm13, xmm5 ; 19E9 _ C5 11: EF. ED + vpslld xmm8, xmm5, 25 ; 19ED _ C5 B9: 72. F5, 19 + vpsrld xmm5, xmm5, 7 ; 19F2 _ C5 D1: 72. D5, 07 + vpslld xmm11, xmm4, 31 ; 19F7 _ C5 A1: 72. F4, 1F + vpor xmm5, xmm8, xmm5 ; 19FC _ C5 B9: EB. ED + vpsrld xmm4, xmm4, 1 ; 1A00 _ C5 D9: 72. D4, 01 + vpxor xmm9, xmm5, xmm10 ; 1A05 _ C4 41 51: EF. CA + vpor xmm5, xmm11, xmm4 ; 1A0A _ C5 A1: EB. EC + vpslld xmm8, xmm13, 3 ; 1A0E _ C4 C1 39: 72. F5, 03 + vpxor xmm4, xmm5, xmm13 ; 1A14 _ C4 C1 51: EF. E5 + vpxor xmm12, xmm9, xmm8 ; 1A19 _ C4 41 31: EF. E0 + vpxor xmm5, xmm4, xmm10 ; 1A1E _ C4 C1 59: EF. EA + vpslld xmm9, xmm10, 29 ; 1A23 _ C4 C1 31: 72. F2, 1D + vpsrld xmm10, xmm10, 3 ; 1A29 _ C4 C1 29: 72. D2, 03 + vpor xmm9, xmm9, xmm10 ; 1A2F _ C4 41 31: EB. CA + vpslld xmm11, xmm13, 19 ; 1A34 _ C4 C1 21: 72. F5, 13 + vpsrld xmm13, xmm13, 13 ; 1A3A _ C4 C1 11: 72. D5, 0D + vpxor xmm8, xmm9, xmm5 ; 1A40 _ C5 31: EF. C5 + vpor xmm10, xmm11, xmm13 ; 1A44 _ C4 41 21: EB. D5 + vpand xmm9, xmm9, xmm8 ; 1A49 _ C4 41 31: DB. C8 + vpxor xmm4, xmm10, xmm8 ; 1A4E _ C4 C1 29: EF. E0 + vpxor xmm13, xmm9, xmm4 ; 1A53 _ C5 31: EF. EC + vpor xmm10, xmm12, xmm13 ; 1A57 _ C4 41 19: EB. D5 + vpxor xmm12, xmm5, xmm12 ; 1A5C _ C4 41 51: EF. E4 + vpxor xmm9, xmm8, xmm10 ; 1A61 _ C4 41 39: EF. CA + vpand xmm8, xmm4, xmm5 ; 1A66 _ C5 59: DB. C5 + vpxor xmm11, xmm8, xmm10 ; 1A6A _ C4 41 39: EF. DA + vpxor xmm4, xmm12, xmm13 ; 1A6F _ C4 C1 19: EF. E5 + vpand xmm10, xmm10, xmm9 ; 1A74 _ C4 41 29: DB. D1 + vpxor xmm8, xmm4, xmm11 ; 1A79 _ C4 41 59: EF. C3 + vpxor xmm5, xmm10, xmm4 ; 1A7E _ C5 A9: EF. EC + vpor xmm12, xmm8, xmm9 ; 1A82 _ C4 41 39: EB. E1 + vmovd xmm10, dword [r12+1330H] ; 1A87 _ C4 41 79: 6E. 94 24, 00001330 + vpxor xmm13, xmm12, xmm13 ; 1A91 _ C4 41 19: EF. ED + vpshufd xmm4, xmm10, 0 ; 1A96 _ C4 C1 79: 70. E2, 00 + vmovd xmm8, dword [r12+1334H] ; 1A9C _ C4 41 79: 6E. 84 24, 00001334 + vpxor xmm9, xmm9, xmm4 ; 1AA6 _ C5 31: EF. CC + vmovd xmm4, dword [r12+1338H] ; 1AAA _ C4 C1 79: 6E. A4 24, 00001338 + vpshufd xmm12, xmm8, 0 ; 1AB4 _ C4 41 79: 70. E0, 00 + vpshufd xmm8, xmm4, 0 ; 1ABA _ C5 79: 70. C4, 00 + vpxor xmm10, xmm13, xmm12 ; 1ABF _ C4 41 11: EF. D4 + vpxor xmm4, xmm5, xmm8 ; 1AC4 _ C4 C1 51: EF. E0 + vpxor xmm5, xmm11, xmm5 ; 1AC9 _ C5 A1: EF. ED + vpxor xmm11, xmm5, xmm13 ; 1ACD _ C4 41 51: EF. DD + vmovd xmm13, dword [r12+133CH] ; 1AD2 _ C4 41 79: 6E. AC 24, 0000133C + vpshufd xmm5, xmm13, 0 ; 1ADC _ C4 C1 79: 70. ED, 00 + vpslld xmm13, xmm4, 10 ; 1AE2 _ C5 91: 72. F4, 0A + vpsrld xmm4, xmm4, 22 ; 1AE7 _ C5 D9: 72. D4, 16 + vpxor xmm11, xmm11, xmm5 ; 1AEC _ C5 21: EF. DD + vpor xmm8, xmm13, xmm4 ; 1AF0 _ C5 11: EB. C4 + vpslld xmm5, xmm10, 7 ; 1AF4 _ C4 C1 51: 72. F2, 07 + vpxor xmm12, xmm8, xmm11 ; 1AFA _ C4 41 39: EF. E3 + vpslld xmm4, xmm9, 27 ; 1AFF _ C4 C1 59: 72. F1, 1B + vpsrld xmm9, xmm9, 5 ; 1B05 _ C4 C1 31: 72. D1, 05 + vpxor xmm12, xmm12, xmm5 ; 1B0B _ C5 19: EF. E5 + vpor xmm5, xmm4, xmm9 ; 1B0F _ C4 C1 59: EB. E9 + vpslld xmm9, xmm11, 25 ; 1B14 _ C4 C1 31: 72. F3, 19 + vpxor xmm4, xmm5, xmm10 ; 1B1A _ C4 C1 51: EF. E2 + vpslld xmm13, xmm10, 31 ; 1B1F _ C4 C1 11: 72. F2, 1F + vpxor xmm8, xmm4, xmm11 ; 1B25 _ C4 41 59: EF. C3 + vpsrld xmm11, xmm11, 7 ; 1B2A _ C4 C1 21: 72. D3, 07 + vpsrld xmm10, xmm10, 1 ; 1B30 _ C4 C1 29: 72. D2, 01 + vpor xmm5, xmm9, xmm11 ; 1B36 _ C4 C1 31: EB. EB + vpor xmm10, xmm13, xmm10 ; 1B3B _ C4 41 11: EB. D2 + vpxor xmm4, xmm5, xmm12 ; 1B40 _ C4 C1 51: EF. E4 + vpslld xmm9, xmm8, 3 ; 1B45 _ C4 C1 31: 72. F0, 03 + vpxor xmm5, xmm10, xmm8 ; 1B4B _ C4 C1 29: EF. E8 + vpxor xmm4, xmm4, xmm9 ; 1B50 _ C4 C1 59: EF. E1 + vpxor xmm9, xmm5, xmm12 ; 1B55 _ C4 41 51: EF. CC + vpslld xmm13, xmm8, 19 ; 1B5A _ C4 C1 11: 72. F0, 13 + vpsrld xmm8, xmm8, 13 ; 1B60 _ C4 C1 39: 72. D0, 0D + vpslld xmm11, xmm12, 29 ; 1B66 _ C4 C1 21: 72. F4, 1D + vpsrld xmm12, xmm12, 3 ; 1B6C _ C4 C1 19: 72. D4, 03 + vpor xmm10, xmm13, xmm8 ; 1B72 _ C4 41 11: EB. D0 + vpor xmm5, xmm11, xmm12 ; 1B77 _ C4 C1 21: EB. EC + vpxor xmm5, xmm5, xmm4 ; 1B7C _ C5 D1: EF. EC + vpxor xmm13, xmm4, xmm10 ; 1B80 _ C4 41 59: EF. EA + vpand xmm4, xmm13, xmm5 ; 1B85 _ C5 91: DB. E5 + vpxor xmm4, xmm4, xmm9 ; 1B89 _ C4 C1 59: EF. E1 + vpor xmm9, xmm9, xmm5 ; 1B8E _ C5 31: EB. CD + vpxor xmm9, xmm9, xmm13 ; 1B92 _ C4 41 31: EF. CD + vpand xmm13, xmm13, xmm4 ; 1B97 _ C5 11: DB. EC + vpxor xmm8, xmm5, xmm4 ; 1B9B _ C5 51: EF. C4 + vpand xmm12, xmm13, xmm10 ; 1B9F _ C4 41 11: DB. E2 + vpxor xmm5, xmm12, xmm8 ; 1BA4 _ C4 C1 19: EF. E8 + vpxor xmm11, xmm4, xmm6 ; 1BA9 _ C5 59: EF. DE + vmovd xmm4, dword [r12+1320H] ; 1BAD _ C4 C1 79: 6E. A4 24, 00001320 + vpand xmm8, xmm8, xmm9 ; 1BB7 _ C4 41 39: DB. C1 + vmovd xmm12, dword [r12+1324H] ; 1BBC _ C4 41 79: 6E. A4 24, 00001324 + vpor xmm8, xmm8, xmm10 ; 1BC6 _ C4 41 39: EB. C2 + vpshufd xmm13, xmm4, 0 ; 1BCB _ C5 79: 70. EC, 00 + vpxor xmm10, xmm10, xmm11 ; 1BD0 _ C4 41 29: EF. D3 + vpshufd xmm4, xmm12, 0 ; 1BD5 _ C4 C1 79: 70. E4, 00 + vpxor xmm8, xmm8, xmm11 ; 1BDB _ C4 41 39: EF. C3 + vmovd xmm12, dword [r12+1328H] ; 1BE0 _ C4 41 79: 6E. A4 24, 00001328 + vpxor xmm4, xmm5, xmm4 ; 1BEA _ C5 D1: EF. E4 + vpshufd xmm12, xmm12, 0 ; 1BEE _ C4 41 79: 70. E4, 00 + vpxor xmm5, xmm11, xmm5 ; 1BF4 _ C5 A1: EF. ED + vpand xmm11, xmm10, xmm9 ; 1BF8 _ C4 41 29: DB. D9 + vpxor xmm13, xmm9, xmm13 ; 1BFD _ C4 41 31: EF. ED + vmovd xmm10, dword [r12+132CH] ; 1C02 _ C4 41 79: 6E. 94 24, 0000132C + vpxor xmm8, xmm8, xmm12 ; 1C0C _ C4 41 39: EF. C4 + vpshufd xmm9, xmm10, 0 ; 1C11 _ C4 41 79: 70. CA, 00 + vpxor xmm5, xmm5, xmm11 ; 1C17 _ C4 C1 51: EF. EB + vpxor xmm12, xmm5, xmm9 ; 1C1C _ C4 41 51: EF. E1 + vpslld xmm10, xmm8, 10 ; 1C21 _ C4 C1 29: 72. F0, 0A + vpsrld xmm5, xmm8, 22 ; 1C27 _ C4 C1 51: 72. D0, 16 + vpslld xmm11, xmm4, 7 ; 1C2D _ C5 A1: 72. F4, 07 + vpor xmm9, xmm10, xmm5 ; 1C32 _ C5 29: EB. CD + vpslld xmm10, xmm13, 27 ; 1C36 _ C4 C1 29: 72. F5, 1B + vpsrld xmm13, xmm13, 5 ; 1C3C _ C4 C1 11: 72. D5, 05 + vpxor xmm8, xmm9, xmm12 ; 1C42 _ C4 41 31: EF. C4 + vpor xmm10, xmm10, xmm13 ; 1C47 _ C4 41 29: EB. D5 + vpxor xmm9, xmm8, xmm11 ; 1C4C _ C4 41 39: EF. CB + vpxor xmm5, xmm10, xmm4 ; 1C51 _ C5 A9: EF. EC + vpslld xmm13, xmm12, 25 ; 1C55 _ C4 C1 11: 72. F4, 19 + vpxor xmm11, xmm5, xmm12 ; 1C5B _ C4 41 51: EF. DC + vpsrld xmm12, xmm12, 7 ; 1C60 _ C4 C1 19: 72. D4, 07 + vpor xmm10, xmm13, xmm12 ; 1C66 _ C4 41 11: EB. D4 + vpslld xmm8, xmm4, 31 ; 1C6B _ C5 B9: 72. F4, 1F + vpsrld xmm4, xmm4, 1 ; 1C70 _ C5 D9: 72. D4, 01 + vpxor xmm5, xmm10, xmm9 ; 1C75 _ C4 C1 29: EF. E9 + vpslld xmm13, xmm11, 3 ; 1C7A _ C4 C1 11: 72. F3, 03 + vpor xmm10, xmm8, xmm4 ; 1C80 _ C5 39: EB. D4 + vpxor xmm12, xmm5, xmm13 ; 1C84 _ C4 41 51: EF. E5 + vpxor xmm5, xmm10, xmm11 ; 1C89 _ C4 C1 29: EF. EB + vpxor xmm8, xmm5, xmm9 ; 1C8E _ C4 41 51: EF. C1 + vpslld xmm4, xmm9, 29 ; 1C93 _ C4 C1 59: 72. F1, 1D + vpsrld xmm9, xmm9, 3 ; 1C99 _ C4 C1 31: 72. D1, 03 + vpslld xmm13, xmm11, 19 ; 1C9F _ C4 C1 11: 72. F3, 13 + vpsrld xmm11, xmm11, 13 ; 1CA5 _ C4 C1 21: 72. D3, 0D + vpxor xmm5, xmm8, xmm12 ; 1CAB _ C4 C1 39: EF. EC + vpor xmm9, xmm4, xmm9 ; 1CB0 _ C4 41 59: EB. C9 + vpor xmm4, xmm13, xmm11 ; 1CB5 _ C4 C1 11: EB. E3 + vpand xmm12, xmm12, xmm5 ; 1CBA _ C5 19: DB. E5 + vpxor xmm13, xmm8, xmm9 ; 1CBE _ C4 41 39: EF. E9 + vpxor xmm8, xmm12, xmm4 ; 1CC3 _ C5 19: EF. C4 + vpxor xmm10, xmm5, xmm8 ; 1CC7 _ C4 41 51: EF. D0 + vpor xmm5, xmm4, xmm5 ; 1CCC _ C5 D9: EB. ED + vpxor xmm9, xmm9, xmm8 ; 1CD0 _ C4 41 31: EF. C8 + vpxor xmm4, xmm5, xmm13 ; 1CD5 _ C4 C1 51: EF. E5 + vpor xmm12, xmm4, xmm9 ; 1CDA _ C4 41 59: EB. E1 + vpxor xmm13, xmm13, xmm6 ; 1CDF _ C5 11: EF. EE + vpxor xmm4, xmm12, xmm10 ; 1CE3 _ C4 C1 19: EF. E2 + vpor xmm10, xmm10, xmm8 ; 1CE8 _ C4 41 29: EB. D0 + vmovd xmm11, dword [r12+1310H] ; 1CED _ C4 41 79: 6E. 9C 24, 00001310 + vpxor xmm5, xmm10, xmm4 ; 1CF7 _ C5 A9: EF. EC + vpshufd xmm10, xmm11, 0 ; 1CFB _ C4 41 79: 70. D3, 00 + vpxor xmm13, xmm13, xmm5 ; 1D01 _ C5 11: EF. ED + vmovd xmm11, dword [r12+1314H] ; 1D05 _ C4 41 79: 6E. 9C 24, 00001314 + vpxor xmm12, xmm13, xmm10 ; 1D0F _ C4 41 11: EF. E2 + vpshufd xmm10, xmm11, 0 ; 1D14 _ C4 41 79: 70. D3, 00 + vpor xmm5, xmm5, xmm4 ; 1D1A _ C5 D1: EB. EC + vpxor xmm10, xmm4, xmm10 ; 1D1E _ C4 41 59: EF. D2 + vpxor xmm4, xmm5, xmm4 ; 1D23 _ C5 D1: EF. E4 + vpor xmm13, xmm4, xmm13 ; 1D27 _ C4 41 59: EB. ED + vpxor xmm5, xmm8, xmm13 ; 1D2C _ C4 C1 39: EF. ED + vmovd xmm8, dword [r12+1318H] ; 1D31 _ C4 41 79: 6E. 84 24, 00001318 + vmovd xmm13, dword [r12+131CH] ; 1D3B _ C4 41 79: 6E. AC 24, 0000131C + vpshufd xmm4, xmm8, 0 ; 1D45 _ C4 C1 79: 70. E0, 00 + vpshufd xmm8, xmm13, 0 ; 1D4B _ C4 41 79: 70. C5, 00 + vpxor xmm5, xmm5, xmm4 ; 1D51 _ C5 D1: EF. EC + vpxor xmm11, xmm9, xmm8 ; 1D55 _ C4 41 31: EF. D8 + vpslld xmm9, xmm5, 10 ; 1D5A _ C5 B1: 72. F5, 0A + vpsrld xmm5, xmm5, 22 ; 1D5F _ C5 D1: 72. D5, 16 + vpslld xmm8, xmm12, 27 ; 1D64 _ C4 C1 39: 72. F4, 1B + vpor xmm4, xmm9, xmm5 ; 1D6A _ C5 B1: EB. E5 + vpsrld xmm12, xmm12, 5 ; 1D6E _ C4 C1 19: 72. D4, 05 + vpxor xmm9, xmm4, xmm11 ; 1D74 _ C4 41 59: EF. CB + vpslld xmm13, xmm10, 7 ; 1D79 _ C4 C1 11: 72. F2, 07 + vpor xmm4, xmm8, xmm12 ; 1D7F _ C4 C1 39: EB. E4 + vpxor xmm5, xmm9, xmm13 ; 1D84 _ C4 C1 31: EF. ED + vpxor xmm9, xmm4, xmm10 ; 1D89 _ C4 41 59: EF. CA + vpslld xmm13, xmm11, 25 ; 1D8E _ C4 C1 11: 72. F3, 19 + vpxor xmm9, xmm9, xmm11 ; 1D94 _ C4 41 31: EF. CB + vpsrld xmm11, xmm11, 7 ; 1D99 _ C4 C1 21: 72. D3, 07 + vpslld xmm12, xmm10, 31 ; 1D9F _ C4 C1 19: 72. F2, 1F + vpsrld xmm10, xmm10, 1 ; 1DA5 _ C4 C1 29: 72. D2, 01 + vpor xmm4, xmm13, xmm11 ; 1DAB _ C4 C1 11: EB. E3 + vpor xmm10, xmm12, xmm10 ; 1DB0 _ C4 41 19: EB. D2 + vpxor xmm13, xmm4, xmm5 ; 1DB5 _ C5 59: EF. ED + vpslld xmm8, xmm9, 3 ; 1DB9 _ C4 C1 39: 72. F1, 03 + vpxor xmm4, xmm10, xmm9 ; 1DBF _ C4 C1 29: EF. E1 + vpxor xmm11, xmm13, xmm8 ; 1DC4 _ C4 41 11: EF. D8 + vpxor xmm4, xmm4, xmm5 ; 1DC9 _ C5 D9: EF. E5 + vpslld xmm13, xmm9, 19 ; 1DCD _ C4 C1 11: 72. F1, 13 + vpsrld xmm9, xmm9, 13 ; 1DD3 _ C4 C1 31: 72. D1, 0D + vpslld xmm8, xmm5, 29 ; 1DD9 _ C5 B9: 72. F5, 1D + vpsrld xmm5, xmm5, 3 ; 1DDE _ C5 D1: 72. D5, 03 + vpor xmm13, xmm13, xmm9 ; 1DE3 _ C4 41 11: EB. E9 + vpor xmm12, xmm8, xmm5 ; 1DE8 _ C5 39: EB. E5 + vpxor xmm9, xmm4, xmm6 ; 1DEC _ C5 59: EF. CE + vpxor xmm10, xmm12, xmm6 ; 1DF0 _ C5 19: EF. D6 + vpor xmm5, xmm4, xmm13 ; 1DF4 _ C4 C1 59: EB. ED + vpxor xmm4, xmm5, xmm10 ; 1DF9 _ C4 C1 51: EF. E2 + vpxor xmm13, xmm13, xmm9 ; 1DFE _ C4 41 11: EF. E9 + vpor xmm10, xmm10, xmm9 ; 1E03 _ C4 41 29: EB. D1 + vpxor xmm4, xmm4, xmm11 ; 1E08 _ C4 C1 59: EF. E3 + vpxor xmm12, xmm10, xmm13 ; 1E0D _ C4 41 29: EF. E5 + vpand xmm10, xmm13, xmm11 ; 1E12 _ C4 41 11: DB. D3 + vpxor xmm8, xmm9, xmm10 ; 1E17 _ C4 41 31: EF. C2 + vpor xmm9, xmm10, xmm4 ; 1E1C _ C5 29: EB. CC + vmovd xmm13, dword [r12+1300H] ; 1E20 _ C4 41 79: 6E. AC 24, 00001300 + vpxor xmm5, xmm9, xmm12 ; 1E2A _ C4 C1 31: EF. EC + vpxor xmm11, xmm11, xmm8 ; 1E2F _ C4 41 21: EF. D8 + vpxor xmm10, xmm11, xmm5 ; 1E34 _ C5 21: EF. D5 + vpshufd xmm11, xmm13, 0 ; 1E38 _ C4 41 79: 70. DD, 00 + vpxor xmm9, xmm10, xmm4 ; 1E3E _ C5 29: EF. CC + vpxor xmm13, xmm5, xmm11 ; 1E42 _ C4 41 51: EF. EB + vpxor xmm5, xmm12, xmm4 ; 1E47 _ C5 99: EF. EC + vpand xmm12, xmm5, xmm9 ; 1E4B _ C4 41 51: DB. E1 + vpxor xmm10, xmm8, xmm12 ; 1E50 _ C4 41 39: EF. D4 + vmovd xmm8, dword [r12+1304H] ; 1E55 _ C4 41 79: 6E. 84 24, 00001304 + vpshufd xmm5, xmm8, 0 ; 1E5F _ C4 C1 79: 70. E8, 00 + vmovd xmm8, dword [r12+1308H] ; 1E65 _ C4 41 79: 6E. 84 24, 00001308 + vpxor xmm11, xmm10, xmm5 ; 1E6F _ C5 29: EF. DD + vpshufd xmm12, xmm8, 0 ; 1E73 _ C4 41 79: 70. E0, 00 + vpxor xmm8, xmm4, xmm12 ; 1E79 _ C4 41 59: EF. C4 + vpslld xmm12, xmm13, 27 ; 1E7E _ C4 C1 19: 72. F5, 1B + vmovd xmm4, dword [r12+130CH] ; 1E84 _ C4 C1 79: 6E. A4 24, 0000130C + vpsrld xmm13, xmm13, 5 ; 1E8E _ C4 C1 11: 72. D5, 05 + vpshufd xmm10, xmm4, 0 ; 1E94 _ C5 79: 70. D4, 00 + vpxor xmm5, xmm9, xmm10 ; 1E99 _ C4 C1 31: EF. EA + vpslld xmm9, xmm8, 10 ; 1E9E _ C4 C1 31: 72. F0, 0A + vpsrld xmm10, xmm8, 22 ; 1EA4 _ C4 C1 29: 72. D0, 16 + vpslld xmm8, xmm11, 7 ; 1EAA _ C4 C1 39: 72. F3, 07 + vpor xmm4, xmm9, xmm10 ; 1EB0 _ C4 C1 31: EB. E2 + vpor xmm10, xmm12, xmm13 ; 1EB5 _ C4 41 19: EB. D5 + vpxor xmm9, xmm4, xmm5 ; 1EBA _ C5 59: EF. CD + vpxor xmm4, xmm10, xmm11 ; 1EBE _ C4 C1 29: EF. E3 + vpxor xmm4, xmm4, xmm5 ; 1EC3 _ C5 D9: EF. E5 + vpslld xmm13, xmm5, 25 ; 1EC7 _ C5 91: 72. F5, 19 + vpsrld xmm5, xmm5, 7 ; 1ECC _ C5 D1: 72. D5, 07 + vpxor xmm9, xmm9, xmm8 ; 1ED1 _ C4 41 31: EF. C8 + vpor xmm8, xmm13, xmm5 ; 1ED6 _ C5 11: EB. C5 + vpslld xmm10, xmm4, 3 ; 1EDA _ C5 A9: 72. F4, 03 + vpxor xmm12, xmm8, xmm9 ; 1EDF _ C4 41 39: EF. E1 + vpslld xmm13, xmm11, 31 ; 1EE4 _ C4 C1 11: 72. F3, 1F + vpsrld xmm11, xmm11, 1 ; 1EEA _ C4 C1 21: 72. D3, 01 + vpxor xmm5, xmm12, xmm10 ; 1EF0 _ C4 C1 19: EF. EA + vpor xmm10, xmm13, xmm11 ; 1EF5 _ C4 41 11: EB. D3 + vpslld xmm8, xmm9, 29 ; 1EFA _ C4 C1 39: 72. F1, 1D + vpxor xmm13, xmm10, xmm4 ; 1F00 _ C5 29: EF. EC + vpslld xmm10, xmm4, 19 ; 1F04 _ C5 A9: 72. F4, 13 + vpxor xmm11, xmm13, xmm9 ; 1F09 _ C4 41 11: EF. D9 + vpsrld xmm9, xmm9, 3 ; 1F0E _ C4 C1 31: 72. D1, 03 + vpsrld xmm4, xmm4, 13 ; 1F14 _ C5 D9: 72. D4, 0D + vpor xmm12, xmm8, xmm9 ; 1F19 _ C4 41 39: EB. E1 + vpor xmm9, xmm10, xmm4 ; 1F1E _ C5 29: EB. CC + vpor xmm10, xmm12, xmm5 ; 1F22 _ C5 19: EB. D5 + vpxor xmm8, xmm12, xmm9 ; 1F26 _ C4 41 19: EF. C1 + vpand xmm13, xmm9, xmm5 ; 1F2B _ C5 31: DB. ED + vpxor xmm4, xmm8, xmm6 ; 1F2F _ C5 B9: EF. E6 + vpxor xmm5, xmm5, xmm11 ; 1F33 _ C4 C1 51: EF. EB + vpxor xmm12, xmm13, xmm4 ; 1F38 _ C5 11: EF. E4 + vpand xmm4, xmm4, xmm10 ; 1F3C _ C4 C1 59: DB. E2 + vpor xmm11, xmm11, xmm13 ; 1F41 _ C4 41 21: EB. DD + vpand xmm9, xmm5, xmm10 ; 1F46 _ C4 41 51: DB. CA + vpxor xmm8, xmm11, xmm4 ; 1F4B _ C5 21: EF. C4 + vpxor xmm4, xmm4, xmm12 ; 1F4F _ C4 C1 59: EF. E4 + vpxor xmm13, xmm10, xmm8 ; 1F54 _ C4 41 29: EF. E8 + vpor xmm12, xmm12, xmm4 ; 1F59 _ C5 19: EB. E4 + vpxor xmm10, xmm12, xmm9 ; 1F5D _ C4 41 19: EF. D1 + vpxor xmm5, xmm9, xmm13 ; 1F62 _ C4 C1 31: EF. ED + vmovd xmm9, dword [r12+12F0H] ; 1F67 _ C4 41 79: 6E. 8C 24, 000012F0 + vpxor xmm11, xmm5, xmm4 ; 1F71 _ C5 51: EF. DC + vpshufd xmm12, xmm9, 0 ; 1F75 _ C4 41 79: 70. E1, 00 + vmovd xmm9, dword [r12+12F4H] ; 1F7B _ C4 41 79: 6E. 8C 24, 000012F4 + vpxor xmm5, xmm11, xmm12 ; 1F85 _ C4 C1 21: EF. EC + vmovd xmm12, dword [r12+12F8H] ; 1F8A _ C4 41 79: 6E. A4 24, 000012F8 + vpshufd xmm11, xmm9, 0 ; 1F94 _ C4 41 79: 70. D9, 00 + vpxor xmm9, xmm10, xmm11 ; 1F9A _ C4 41 29: EF. CB + vpor xmm10, xmm13, xmm10 ; 1F9F _ C4 41 11: EB. D2 + vpshufd xmm11, xmm12, 0 ; 1FA4 _ C4 41 79: 70. DC, 00 + vpxor xmm13, xmm10, xmm4 ; 1FAA _ C5 29: EF. EC + vmovd xmm4, dword [r12+12FCH] ; 1FAE _ C4 C1 79: 6E. A4 24, 000012FC + vpxor xmm8, xmm8, xmm11 ; 1FB8 _ C4 41 39: EF. C3 + vpshufd xmm10, xmm4, 0 ; 1FBD _ C5 79: 70. D4, 00 + vpslld xmm4, xmm8, 10 ; 1FC2 _ C4 C1 59: 72. F0, 0A + vpsrld xmm8, xmm8, 22 ; 1FC8 _ C4 C1 39: 72. D0, 16 + vpslld xmm11, xmm5, 27 ; 1FCE _ C5 A1: 72. F5, 1B + vpsrld xmm5, xmm5, 5 ; 1FD3 _ C5 D1: 72. D5, 05 + vpxor xmm13, xmm13, xmm10 ; 1FD8 _ C4 41 11: EF. EA + vpor xmm10, xmm4, xmm8 ; 1FDD _ C4 41 59: EB. D0 + vpor xmm12, xmm11, xmm5 ; 1FE2 _ C5 21: EB. E5 + vpxor xmm4, xmm10, xmm13 ; 1FE6 _ C4 C1 29: EF. E5 + vpslld xmm8, xmm9, 7 ; 1FEB _ C4 C1 39: 72. F1, 07 + vpxor xmm5, xmm12, xmm9 ; 1FF1 _ C4 C1 19: EF. E9 + vpxor xmm10, xmm4, xmm8 ; 1FF6 _ C4 41 59: EF. D0 + vpxor xmm5, xmm5, xmm13 ; 1FFB _ C4 C1 51: EF. ED + vpslld xmm4, xmm13, 25 ; 2000 _ C4 C1 59: 72. F5, 19 + vpsrld xmm13, xmm13, 7 ; 2006 _ C4 C1 11: 72. D5, 07 + vpslld xmm11, xmm5, 3 ; 200C _ C5 A1: 72. F5, 03 + vpor xmm4, xmm4, xmm13 ; 2011 _ C4 C1 59: EB. E5 + vpslld xmm12, xmm9, 31 ; 2016 _ C4 C1 19: 72. F1, 1F + vpxor xmm8, xmm4, xmm10 ; 201C _ C4 41 59: EF. C2 + vpsrld xmm9, xmm9, 1 ; 2021 _ C4 C1 31: 72. D1, 01 + vpxor xmm4, xmm8, xmm11 ; 2027 _ C4 C1 39: EF. E3 + vpor xmm8, xmm12, xmm9 ; 202C _ C4 41 19: EB. C1 + vpxor xmm8, xmm8, xmm5 ; 2031 _ C5 39: EF. C5 + vpslld xmm12, xmm10, 29 ; 2035 _ C4 C1 19: 72. F2, 1D + vpxor xmm8, xmm8, xmm10 ; 203B _ C4 41 39: EF. C2 + vpsrld xmm10, xmm10, 3 ; 2040 _ C4 C1 29: 72. D2, 03 + vpslld xmm9, xmm5, 19 ; 2046 _ C5 B1: 72. F5, 13 + vpsrld xmm5, xmm5, 13 ; 204B _ C5 D1: 72. D5, 0D + vpor xmm11, xmm12, xmm10 ; 2050 _ C4 41 19: EB. DA + vpor xmm5, xmm9, xmm5 ; 2055 _ C5 B1: EB. ED + vpxor xmm5, xmm5, xmm11 ; 2059 _ C4 C1 51: EF. EB + vpxor xmm13, xmm4, xmm8 ; 205E _ C4 41 59: EF. E8 + vpand xmm12, xmm11, xmm5 ; 2063 _ C5 21: DB. E5 + vpxor xmm4, xmm11, xmm4 ; 2067 _ C5 A1: EF. E4 + vpxor xmm10, xmm12, xmm6 ; 206B _ C5 19: EF. D6 + vpxor xmm9, xmm10, xmm13 ; 206F _ C4 41 29: EF. CD + vpor xmm10, xmm4, xmm5 ; 2074 _ C5 59: EB. D5 + vpxor xmm12, xmm13, xmm10 ; 2078 _ C4 41 11: EF. E2 + vpxor xmm11, xmm5, xmm9 ; 207D _ C4 41 51: EF. D9 + vmovd xmm4, dword [r12+12E0H] ; 2082 _ C4 C1 79: 6E. A4 24, 000012E0 + vpand xmm5, xmm8, xmm12 ; 208C _ C4 C1 39: DB. EC + vpxor xmm13, xmm5, xmm11 ; 2091 _ C4 41 51: EF. EB + vpxor xmm10, xmm10, xmm8 ; 2096 _ C4 41 29: EF. D0 + vpshufd xmm5, xmm4, 0 ; 209B _ C5 F9: 70. EC, 00 + vpxor xmm8, xmm11, xmm12 ; 20A0 _ C4 41 21: EF. C4 + vpxor xmm4, xmm13, xmm5 ; 20A5 _ C5 91: EF. E5 + vpor xmm11, xmm8, xmm9 ; 20A9 _ C4 41 39: EB. D9 + vmovd xmm5, dword [r12+12E4H] ; 20AE _ C4 C1 79: 6E. AC 24, 000012E4 + vpxor xmm8, xmm10, xmm11 ; 20B8 _ C4 41 29: EF. C3 + vpshufd xmm5, xmm5, 0 ; 20BD _ C5 F9: 70. ED, 00 + vpxor xmm5, xmm9, xmm5 ; 20C2 _ C5 B1: EF. ED + vmovd xmm9, dword [r12+12E8H] ; 20C6 _ C4 41 79: 6E. 8C 24, 000012E8 + vpshufd xmm11, xmm9, 0 ; 20D0 _ C4 41 79: 70. D9, 00 + vpxor xmm9, xmm8, xmm11 ; 20D6 _ C4 41 39: EF. CB + vpxor xmm8, xmm12, xmm13 ; 20DB _ C4 41 19: EF. C5 + vmovd xmm13, dword [r12+12ECH] ; 20E0 _ C4 41 79: 6E. AC 24, 000012EC + vpslld xmm12, xmm9, 10 ; 20EA _ C4 C1 19: 72. F1, 0A + vpshufd xmm13, xmm13, 0 ; 20F0 _ C4 41 79: 70. ED, 00 + vpsrld xmm10, xmm9, 22 ; 20F6 _ C4 C1 29: 72. D1, 16 + vpxor xmm11, xmm8, xmm13 ; 20FC _ C4 41 39: EF. DD + vpor xmm9, xmm12, xmm10 ; 2101 _ C4 41 19: EB. CA + vpslld xmm12, xmm4, 27 ; 2106 _ C5 99: 72. F4, 1B + vpsrld xmm4, xmm4, 5 ; 210B _ C5 D9: 72. D4, 05 + vpxor xmm13, xmm9, xmm11 ; 2110 _ C4 41 31: EF. EB + vpslld xmm8, xmm5, 7 ; 2115 _ C5 B9: 72. F5, 07 + vpor xmm10, xmm12, xmm4 ; 211A _ C5 19: EB. D4 + vpxor xmm8, xmm13, xmm8 ; 211E _ C4 41 11: EF. C0 + vpxor xmm13, xmm10, xmm5 ; 2123 _ C5 29: EF. ED + vpslld xmm4, xmm11, 25 ; 2127 _ C4 C1 59: 72. F3, 19 + vpxor xmm10, xmm13, xmm11 ; 212D _ C4 41 11: EF. D3 + vpsrld xmm11, xmm11, 7 ; 2132 _ C4 C1 21: 72. D3, 07 + vpor xmm13, xmm4, xmm11 ; 2138 _ C4 41 59: EB. EB + vpslld xmm11, xmm10, 3 ; 213D _ C4 C1 21: 72. F2, 03 + vpxor xmm4, xmm13, xmm8 ; 2143 _ C4 C1 11: EF. E0 + vpslld xmm9, xmm10, 19 ; 2148 _ C4 C1 31: 72. F2, 13 + vpxor xmm12, xmm4, xmm11 ; 214E _ C4 41 59: EF. E3 + vpslld xmm4, xmm5, 31 ; 2153 _ C5 D9: 72. F5, 1F + vpsrld xmm5, xmm5, 1 ; 2158 _ C5 D1: 72. D5, 01 + vpsrld xmm13, xmm10, 13 ; 215D _ C4 C1 11: 72. D2, 0D + vpor xmm5, xmm4, xmm5 ; 2163 _ C5 D9: EB. ED + vpor xmm11, xmm9, xmm13 ; 2167 _ C4 41 31: EB. DD + vpxor xmm13, xmm5, xmm10 ; 216C _ C4 41 51: EF. EA + vpslld xmm10, xmm8, 29 ; 2171 _ C4 C1 29: 72. F0, 1D + vpxor xmm4, xmm13, xmm8 ; 2177 _ C4 C1 11: EF. E0 + vpsrld xmm8, xmm8, 3 ; 217C _ C4 C1 39: 72. D0, 03 + vpxor xmm4, xmm4, xmm6 ; 2182 _ C5 D9: EF. E6 + vpor xmm9, xmm10, xmm8 ; 2186 _ C4 41 29: EB. C8 + vpxor xmm8, xmm9, xmm4 ; 218B _ C5 31: EF. C4 + vpor xmm5, xmm12, xmm11 ; 218F _ C4 C1 19: EB. EB + vpxor xmm5, xmm5, xmm8 ; 2194 _ C4 C1 51: EF. E8 + vpxor xmm13, xmm12, xmm5 ; 2199 _ C5 19: EF. ED + vpor xmm12, xmm8, xmm4 ; 219D _ C5 39: EB. E4 + vpand xmm8, xmm12, xmm11 ; 21A1 _ C4 41 19: DB. C3 + vpxor xmm12, xmm8, xmm13 ; 21A6 _ C4 41 39: EF. E5 + vpor xmm13, xmm13, xmm11 ; 21AB _ C4 41 11: EB. EB + vpand xmm10, xmm4, xmm12 ; 21B0 _ C4 41 59: DB. D4 + vpxor xmm4, xmm13, xmm4 ; 21B5 _ C5 91: EF. E4 + vpxor xmm8, xmm10, xmm5 ; 21B9 _ C5 29: EF. C5 + vpxor xmm13, xmm4, xmm12 ; 21BD _ C4 41 59: EF. EC + vmovd xmm4, dword [r12+12D0H] ; 21C2 _ C4 C1 79: 6E. A4 24, 000012D0 + vpxor xmm9, xmm13, xmm8 ; 21CC _ C4 41 11: EF. C8 + vpshufd xmm10, xmm4, 0 ; 21D1 _ C5 79: 70. D4, 00 + vpand xmm5, xmm5, xmm13 ; 21D6 _ C4 C1 51: DB. ED + vmovd xmm4, dword [r12+12D4H] ; 21DB _ C4 C1 79: 6E. A4 24, 000012D4 + vpxor xmm8, xmm8, xmm10 ; 21E5 _ C4 41 39: EF. C2 + vpxor xmm10, xmm9, xmm6 ; 21EA _ C5 31: EF. D6 + vpxor xmm9, xmm5, xmm9 ; 21EE _ C4 41 51: EF. C9 + vpshufd xmm4, xmm4, 0 ; 21F3 _ C5 F9: 70. E4, 00 + vpxor xmm5, xmm9, xmm11 ; 21F8 _ C4 C1 31: EF. EB + vmovd xmm11, dword [r12+12D8H] ; 21FD _ C4 41 79: 6E. 9C 24, 000012D8 + vpxor xmm10, xmm10, xmm4 ; 2207 _ C5 29: EF. D4 + vmovd xmm4, dword [r12+12DCH] ; 220B _ C4 C1 79: 6E. A4 24, 000012DC + vpshufd xmm13, xmm11, 0 ; 2215 _ C4 41 79: 70. EB, 00 + vpshufd xmm11, xmm4, 0 ; 221B _ C5 79: 70. DC, 00 + vpxor xmm9, xmm5, xmm13 ; 2220 _ C4 41 51: EF. CD + vpxor xmm4, xmm12, xmm11 ; 2225 _ C4 C1 19: EF. E3 + vpslld xmm12, xmm9, 10 ; 222A _ C4 C1 19: 72. F1, 0A + vpsrld xmm5, xmm9, 22 ; 2230 _ C4 C1 51: 72. D1, 16 + vpor xmm13, xmm12, xmm5 ; 2236 _ C5 19: EB. ED + vpslld xmm5, xmm8, 27 ; 223A _ C4 C1 51: 72. F0, 1B + vpsrld xmm8, xmm8, 5 ; 2240 _ C4 C1 39: 72. D0, 05 + vpxor xmm11, xmm13, xmm4 ; 2246 _ C5 11: EF. DC + vpor xmm5, xmm5, xmm8 ; 224A _ C4 C1 51: EB. E8 + vpslld xmm12, xmm10, 7 ; 224F _ C4 C1 19: 72. F2, 07 + vpxor xmm13, xmm5, xmm10 ; 2255 _ C4 41 51: EF. EA + vpxor xmm9, xmm11, xmm12 ; 225A _ C4 41 21: EF. CC + vpxor xmm11, xmm13, xmm4 ; 225F _ C5 11: EF. DC + vpslld xmm8, xmm4, 25 ; 2263 _ C5 B9: 72. F4, 19 + vpsrld xmm4, xmm4, 7 ; 2268 _ C5 D9: 72. D4, 07 + vpslld xmm12, xmm10, 31 ; 226D _ C4 C1 19: 72. F2, 1F + vpor xmm5, xmm8, xmm4 ; 2273 _ C5 B9: EB. EC + vpsrld xmm10, xmm10, 1 ; 2277 _ C4 C1 29: 72. D2, 01 + vpxor xmm13, xmm5, xmm9 ; 227D _ C4 41 51: EF. E9 + vpslld xmm8, xmm11, 3 ; 2282 _ C4 C1 39: 72. F3, 03 + vpor xmm5, xmm12, xmm10 ; 2288 _ C4 C1 19: EB. EA + vpxor xmm4, xmm13, xmm8 ; 228D _ C4 C1 11: EF. E0 + vpxor xmm13, xmm5, xmm11 ; 2292 _ C4 41 51: EF. EB + vpslld xmm8, xmm9, 29 ; 2297 _ C4 C1 39: 72. F1, 1D + vpxor xmm10, xmm13, xmm9 ; 229D _ C4 41 11: EF. D1 + vpsrld xmm9, xmm9, 3 ; 22A2 _ C4 C1 31: 72. D1, 03 + vpor xmm12, xmm8, xmm9 ; 22A8 _ C4 41 39: EB. E1 + vpslld xmm5, xmm11, 19 ; 22AD _ C4 C1 51: 72. F3, 13 + vpsrld xmm11, xmm11, 13 ; 22B3 _ C4 C1 21: 72. D3, 0D + vpor xmm13, xmm5, xmm11 ; 22B9 _ C4 41 51: EB. EB + vpand xmm5, xmm12, xmm4 ; 22BE _ C5 99: DB. EC + vpxor xmm11, xmm5, xmm10 ; 22C2 _ C4 41 51: EF. DA + vpor xmm10, xmm10, xmm4 ; 22C7 _ C5 29: EB. D4 + vpand xmm9, xmm10, xmm13 ; 22CB _ C4 41 29: DB. CD + vpxor xmm12, xmm12, xmm11 ; 22D0 _ C4 41 19: EF. E3 + vpxor xmm12, xmm12, xmm9 ; 22D5 _ C4 41 19: EF. E1 + vpxor xmm5, xmm13, xmm6 ; 22DA _ C5 91: EF. EE + vpxor xmm4, xmm4, xmm12 ; 22DE _ C4 C1 59: EF. E4 + vpand xmm13, xmm9, xmm11 ; 22E3 _ C4 41 31: DB. EB + vpxor xmm10, xmm13, xmm4 ; 22E8 _ C5 11: EF. D4 + vpand xmm9, xmm4, xmm5 ; 22EC _ C5 59: DB. CD + vmovd xmm4, dword [r12+12C0H] ; 22F0 _ C4 C1 79: 6E. A4 24, 000012C0 + vpxor xmm8, xmm5, xmm10 ; 22FA _ C4 41 51: EF. C2 + vpxor xmm5, xmm9, xmm11 ; 22FF _ C4 C1 31: EF. EB + vpshufd xmm9, xmm4, 0 ; 2304 _ C5 79: 70. CC, 00 + vpxor xmm13, xmm5, xmm8 ; 2309 _ C4 41 51: EF. E8 + vpxor xmm9, xmm8, xmm9 ; 230E _ C4 41 39: EF. C9 + vpxor xmm5, xmm13, xmm8 ; 2313 _ C4 C1 11: EF. E8 + vpand xmm8, xmm11, xmm8 ; 2318 _ C4 41 21: DB. C0 + vpxor xmm11, xmm8, xmm12 ; 231D _ C4 41 39: EF. DC + vmovd xmm4, dword [r12+12C4H] ; 2322 _ C4 C1 79: 6E. A4 24, 000012C4 + vpor xmm13, xmm11, xmm13 ; 232C _ C4 41 21: EB. ED + vpshufd xmm4, xmm4, 0 ; 2331 _ C5 F9: 70. E4, 00 + vpxor xmm13, xmm13, xmm10 ; 2336 _ C4 41 11: EF. EA + vmovd xmm10, dword [r12+12C8H] ; 233B _ C4 41 79: 6E. 94 24, 000012C8 + vpxor xmm5, xmm5, xmm4 ; 2345 _ C5 D1: EF. EC + vmovd xmm4, dword [r12+12CCH] ; 2349 _ C4 C1 79: 6E. A4 24, 000012CC + vpshufd xmm8, xmm10, 0 ; 2353 _ C4 41 79: 70. C2, 00 + vpshufd xmm11, xmm4, 0 ; 2359 _ C5 79: 70. DC, 00 + vpxor xmm10, xmm13, xmm8 ; 235E _ C4 41 11: EF. D0 + vpxor xmm13, xmm12, xmm11 ; 2363 _ C4 41 19: EF. EB + vpslld xmm12, xmm10, 10 ; 2368 _ C4 C1 19: 72. F2, 0A + vpsrld xmm8, xmm10, 22 ; 236E _ C4 C1 39: 72. D2, 16 + vpslld xmm10, xmm9, 27 ; 2374 _ C4 C1 29: 72. F1, 1B + vpsrld xmm9, xmm9, 5 ; 237A _ C4 C1 31: 72. D1, 05 + vpor xmm4, xmm12, xmm8 ; 2380 _ C4 C1 19: EB. E0 + vpor xmm8, xmm10, xmm9 ; 2385 _ C4 41 29: EB. C1 + vpxor xmm11, xmm4, xmm13 ; 238A _ C4 41 59: EF. DD + vpslld xmm12, xmm5, 7 ; 238F _ C5 99: 72. F5, 07 + vpxor xmm4, xmm8, xmm5 ; 2394 _ C5 B9: EF. E5 + vpxor xmm11, xmm11, xmm12 ; 2398 _ C4 41 21: EF. DC + vpxor xmm4, xmm4, xmm13 ; 239D _ C4 C1 59: EF. E5 + vpslld xmm12, xmm13, 25 ; 23A2 _ C4 C1 19: 72. F5, 19 + vpsrld xmm13, xmm13, 7 ; 23A8 _ C4 C1 11: 72. D5, 07 + vpor xmm10, xmm12, xmm13 ; 23AE _ C4 41 19: EB. D5 + vpslld xmm8, xmm5, 31 ; 23B3 _ C5 B9: 72. F5, 1F + vpsrld xmm5, xmm5, 1 ; 23B8 _ C5 D1: 72. D5, 01 + vpxor xmm9, xmm10, xmm11 ; 23BD _ C4 41 29: EF. CB + vpslld xmm13, xmm4, 3 ; 23C2 _ C5 91: 72. F4, 03 + vpor xmm5, xmm8, xmm5 ; 23C7 _ C5 B9: EB. ED + vpxor xmm9, xmm9, xmm13 ; 23CB _ C4 41 31: EF. CD + vpxor xmm13, xmm5, xmm4 ; 23D0 _ C5 51: EF. EC + vpxor xmm13, xmm13, xmm11 ; 23D4 _ C4 41 11: EF. EB + vpslld xmm8, xmm11, 29 ; 23D9 _ C4 C1 39: 72. F3, 1D + vpsrld xmm11, xmm11, 3 ; 23DF _ C4 C1 21: 72. D3, 03 + vpor xmm8, xmm8, xmm11 ; 23E5 _ C4 41 39: EB. C3 + vpslld xmm11, xmm4, 19 ; 23EA _ C5 A1: 72. F4, 13 + vpsrld xmm4, xmm4, 13 ; 23EF _ C5 D9: 72. D4, 0D + vpxor xmm5, xmm8, xmm13 ; 23F4 _ C4 C1 39: EF. ED + vpor xmm12, xmm11, xmm4 ; 23F9 _ C5 21: EB. E4 + vpand xmm8, xmm8, xmm5 ; 23FD _ C5 39: DB. C5 + vpxor xmm12, xmm12, xmm5 ; 2401 _ C5 19: EF. E5 + vpxor xmm4, xmm8, xmm12 ; 2405 _ C4 C1 39: EF. E4 + vpand xmm8, xmm12, xmm13 ; 240A _ C4 41 19: DB. C5 + vpor xmm10, xmm9, xmm4 ; 240F _ C5 31: EB. D4 + vpxor xmm13, xmm13, xmm9 ; 2413 _ C4 41 11: EF. E9 + vpxor xmm5, xmm5, xmm10 ; 2418 _ C4 C1 51: EF. EA + vpxor xmm12, xmm8, xmm10 ; 241D _ C4 41 39: EF. E2 + vpxor xmm9, xmm13, xmm4 ; 2422 _ C5 11: EF. CC + vpand xmm10, xmm10, xmm5 ; 2426 _ C5 29: DB. D5 + vpxor xmm11, xmm10, xmm9 ; 242A _ C4 41 29: EF. D9 + vpxor xmm13, xmm9, xmm12 ; 242F _ C4 41 31: EF. EC + vmovd xmm10, dword [r12+12B0H] ; 2434 _ C4 41 79: 6E. 94 24, 000012B0 + vpor xmm8, xmm13, xmm5 ; 243E _ C5 11: EB. C5 + vpshufd xmm9, xmm10, 0 ; 2442 _ C4 41 79: 70. CA, 00 + vpxor xmm4, xmm8, xmm4 ; 2448 _ C5 B9: EF. E4 + vmovd xmm10, dword [r12+12B8H] ; 244C _ C4 41 79: 6E. 94 24, 000012B8 + vpxor xmm5, xmm5, xmm9 ; 2456 _ C4 C1 51: EF. E9 + vmovd xmm13, dword [r12+12B4H] ; 245B _ C4 41 79: 6E. AC 24, 000012B4 + vpshufd xmm9, xmm10, 0 ; 2465 _ C4 41 79: 70. CA, 00 + vpshufd xmm8, xmm13, 0 ; 246B _ C4 41 79: 70. C5, 00 + vpxor xmm9, xmm11, xmm9 ; 2471 _ C4 41 21: EF. C9 + vpxor xmm11, xmm12, xmm11 ; 2476 _ C4 41 19: EF. DB + vpxor xmm13, xmm4, xmm8 ; 247B _ C4 41 59: EF. E8 + vpxor xmm12, xmm11, xmm4 ; 2480 _ C5 21: EF. E4 + vpsrld xmm11, xmm9, 22 ; 2484 _ C4 C1 21: 72. D1, 16 + vmovd xmm4, dword [r12+12BCH] ; 248A _ C4 C1 79: 6E. A4 24, 000012BC + vpshufd xmm8, xmm4, 0 ; 2494 _ C5 79: 70. C4, 00 + vpslld xmm4, xmm9, 10 ; 2499 _ C4 C1 59: 72. F1, 0A + vpxor xmm8, xmm12, xmm8 ; 249F _ C4 41 19: EF. C0 + vpor xmm12, xmm4, xmm11 ; 24A4 _ C4 41 59: EB. E3 + vpslld xmm4, xmm5, 27 ; 24A9 _ C5 D9: 72. F5, 1B + vpsrld xmm5, xmm5, 5 ; 24AE _ C5 D1: 72. D5, 05 + vpor xmm5, xmm4, xmm5 ; 24B3 _ C5 D9: EB. ED + vpxor xmm10, xmm12, xmm8 ; 24B7 _ C4 41 19: EF. D0 + vpxor xmm4, xmm5, xmm13 ; 24BC _ C4 C1 51: EF. E5 + vpslld xmm9, xmm13, 7 ; 24C1 _ C4 C1 31: 72. F5, 07 + vpxor xmm4, xmm4, xmm8 ; 24C7 _ C4 C1 59: EF. E0 + vpslld xmm11, xmm8, 25 ; 24CC _ C4 C1 21: 72. F0, 19 + vpsrld xmm8, xmm8, 7 ; 24D2 _ C4 C1 39: 72. D0, 07 + vpxor xmm10, xmm10, xmm9 ; 24D8 _ C4 41 29: EF. D1 + vpor xmm12, xmm11, xmm8 ; 24DD _ C4 41 21: EB. E0 + vpslld xmm5, xmm4, 3 ; 24E2 _ C5 D1: 72. F4, 03 + vpxor xmm9, xmm12, xmm10 ; 24E7 _ C4 41 19: EF. CA + vpslld xmm8, xmm13, 31 ; 24EC _ C4 C1 39: 72. F5, 1F + vpsrld xmm13, xmm13, 1 ; 24F2 _ C4 C1 11: 72. D5, 01 + vpxor xmm12, xmm9, xmm5 ; 24F8 _ C5 31: EF. E5 + vpor xmm5, xmm8, xmm13 ; 24FC _ C4 C1 39: EB. ED + vpslld xmm8, xmm4, 19 ; 2501 _ C5 B9: 72. F4, 13 + vpxor xmm13, xmm5, xmm4 ; 2506 _ C5 51: EF. EC + vpsrld xmm4, xmm4, 13 ; 250A _ C5 D9: 72. D4, 0D + vpxor xmm11, xmm13, xmm10 ; 250F _ C4 41 11: EF. DA + vpslld xmm9, xmm10, 29 ; 2514 _ C4 C1 31: 72. F2, 1D + vpsrld xmm10, xmm10, 3 ; 251A _ C4 C1 29: 72. D2, 03 + vpor xmm8, xmm8, xmm4 ; 2520 _ C5 39: EB. C4 + vpor xmm5, xmm9, xmm10 ; 2524 _ C4 C1 31: EB. EA + vpxor xmm4, xmm5, xmm12 ; 2529 _ C4 C1 51: EF. E4 + vpxor xmm5, xmm12, xmm8 ; 252E _ C4 C1 19: EF. E8 + vpand xmm12, xmm5, xmm4 ; 2533 _ C5 51: DB. E4 + vpxor xmm13, xmm12, xmm11 ; 2537 _ C4 41 19: EF. EB + vpor xmm11, xmm11, xmm4 ; 253C _ C5 21: EB. DC + vpxor xmm12, xmm11, xmm5 ; 2540 _ C5 21: EF. E5 + vpand xmm5, xmm5, xmm13 ; 2544 _ C4 C1 51: DB. ED + vpxor xmm10, xmm4, xmm13 ; 2549 _ C4 41 59: EF. D5 + vpand xmm4, xmm5, xmm8 ; 254E _ C4 C1 51: DB. E0 + vmovd xmm9, dword [r12+12A0H] ; 2553 _ C4 41 79: 6E. 8C 24, 000012A0 + vpxor xmm11, xmm4, xmm10 ; 255D _ C4 41 59: EF. DA + vmovd xmm4, dword [r12+12A4H] ; 2562 _ C4 C1 79: 6E. A4 24, 000012A4 + vpand xmm10, xmm10, xmm12 ; 256C _ C4 41 29: DB. D4 + vpshufd xmm5, xmm9, 0 ; 2571 _ C4 C1 79: 70. E9, 00 + vpxor xmm13, xmm13, xmm6 ; 2577 _ C5 11: EF. EE + vpshufd xmm9, xmm4, 0 ; 257B _ C5 79: 70. CC, 00 + vpor xmm10, xmm10, xmm8 ; 2580 _ C4 41 29: EB. D0 + vpxor xmm4, xmm11, xmm9 ; 2585 _ C4 C1 21: EF. E1 + vpxor xmm10, xmm10, xmm13 ; 258A _ C4 41 29: EF. D5 + vpxor xmm11, xmm13, xmm11 ; 258F _ C4 41 11: EF. DB + vpxor xmm13, xmm8, xmm13 ; 2594 _ C4 41 39: EF. ED + vmovd xmm9, dword [r12+12A8H] ; 2599 _ C4 41 79: 6E. 8C 24, 000012A8 + vpand xmm8, xmm13, xmm12 ; 25A3 _ C4 41 11: DB. C4 + vpshufd xmm9, xmm9, 0 ; 25A8 _ C4 41 79: 70. C9, 00 + vpxor xmm5, xmm12, xmm5 ; 25AE _ C5 99: EF. ED + vpxor xmm12, xmm11, xmm8 ; 25B2 _ C4 41 21: EF. E0 + vpxor xmm10, xmm10, xmm9 ; 25B7 _ C4 41 29: EF. D1 + vmovd xmm11, dword [r12+12ACH] ; 25BC _ C4 41 79: 6E. 9C 24, 000012AC + vpshufd xmm13, xmm11, 0 ; 25C6 _ C4 41 79: 70. EB, 00 + vpslld xmm11, xmm10, 10 ; 25CC _ C4 C1 21: 72. F2, 0A + vpxor xmm8, xmm12, xmm13 ; 25D2 _ C4 41 19: EF. C5 + vpsrld xmm12, xmm10, 22 ; 25D7 _ C4 C1 19: 72. D2, 16 + vpor xmm10, xmm11, xmm12 ; 25DD _ C4 41 21: EB. D4 + vpslld xmm11, xmm5, 27 ; 25E2 _ C5 A1: 72. F5, 1B + vpsrld xmm5, xmm5, 5 ; 25E7 _ C5 D1: 72. D5, 05 + vpxor xmm9, xmm10, xmm8 ; 25EC _ C4 41 29: EF. C8 + vpor xmm5, xmm11, xmm5 ; 25F1 _ C5 A1: EB. ED + vpslld xmm13, xmm4, 7 ; 25F5 _ C5 91: 72. F4, 07 + vpxor xmm11, xmm5, xmm4 ; 25FA _ C5 51: EF. DC + vpslld xmm12, xmm8, 25 ; 25FE _ C4 C1 19: 72. F0, 19 + vpxor xmm11, xmm11, xmm8 ; 2604 _ C4 41 21: EF. D8 + vpsrld xmm8, xmm8, 7 ; 2609 _ C4 C1 39: 72. D0, 07 + vpxor xmm13, xmm9, xmm13 ; 260F _ C4 41 31: EF. ED + vpor xmm5, xmm12, xmm8 ; 2614 _ C4 C1 19: EB. E8 + vpslld xmm10, xmm4, 31 ; 2619 _ C5 A9: 72. F4, 1F + vpsrld xmm4, xmm4, 1 ; 261E _ C5 D9: 72. D4, 01 + vpxor xmm8, xmm5, xmm13 ; 2623 _ C4 41 51: EF. C5 + vpor xmm5, xmm10, xmm4 ; 2628 _ C5 A9: EB. EC + vpslld xmm12, xmm11, 3 ; 262C _ C4 C1 19: 72. F3, 03 + vpxor xmm4, xmm5, xmm11 ; 2632 _ C4 C1 51: EF. E3 + vpxor xmm8, xmm8, xmm12 ; 2637 _ C4 41 39: EF. C4 + vpxor xmm9, xmm4, xmm13 ; 263C _ C4 41 59: EF. CD + vpslld xmm10, xmm11, 19 ; 2641 _ C4 C1 29: 72. F3, 13 + vpsrld xmm11, xmm11, 13 ; 2647 _ C4 C1 21: 72. D3, 0D + vpslld xmm12, xmm13, 29 ; 264D _ C4 C1 19: 72. F5, 1D + vpsrld xmm13, xmm13, 3 ; 2653 _ C4 C1 11: 72. D5, 03 + vpor xmm4, xmm10, xmm11 ; 2659 _ C4 C1 29: EB. E3 + vpxor xmm11, xmm9, xmm8 ; 265E _ C4 41 31: EF. D8 + vpor xmm5, xmm12, xmm13 ; 2663 _ C4 C1 19: EB. ED + vpand xmm13, xmm8, xmm11 ; 2668 _ C4 41 39: DB. EB + vpxor xmm9, xmm9, xmm5 ; 266D _ C5 31: EF. CD + vpxor xmm8, xmm13, xmm4 ; 2671 _ C5 11: EF. C4 + vpor xmm4, xmm4, xmm11 ; 2675 _ C4 C1 59: EB. E3 + vpxor xmm5, xmm5, xmm8 ; 267A _ C4 C1 51: EF. E8 + vpxor xmm10, xmm11, xmm8 ; 267F _ C4 41 21: EF. D0 + vpxor xmm11, xmm4, xmm9 ; 2684 _ C4 41 59: EF. D9 + vpor xmm12, xmm11, xmm5 ; 2689 _ C5 21: EB. E5 + vpxor xmm9, xmm9, xmm6 ; 268D _ C5 31: EF. CE + vpxor xmm4, xmm12, xmm10 ; 2691 _ C4 C1 19: EF. E2 + vpor xmm10, xmm10, xmm8 ; 2696 _ C4 41 29: EB. D0 + vmovd xmm11, dword [r12+1290H] ; 269B _ C4 41 79: 6E. 9C 24, 00001290 + vpxor xmm12, xmm10, xmm4 ; 26A5 _ C5 29: EF. E4 + vpshufd xmm10, xmm11, 0 ; 26A9 _ C4 41 79: 70. D3, 00 + vpxor xmm13, xmm9, xmm12 ; 26AF _ C4 41 31: EF. EC + vmovd xmm11, dword [r12+1294H] ; 26B4 _ C4 41 79: 6E. 9C 24, 00001294 + vpxor xmm9, xmm13, xmm10 ; 26BE _ C4 41 11: EF. CA + vpshufd xmm10, xmm11, 0 ; 26C3 _ C4 41 79: 70. D3, 00 + vpor xmm12, xmm12, xmm4 ; 26C9 _ C5 19: EB. E4 + vpxor xmm11, xmm4, xmm10 ; 26CD _ C4 41 59: EF. DA + vpxor xmm4, xmm12, xmm4 ; 26D2 _ C5 99: EF. E4 + vpor xmm13, xmm4, xmm13 ; 26D6 _ C4 41 59: EB. ED + vpxor xmm13, xmm8, xmm13 ; 26DB _ C4 41 39: EF. ED + vmovd xmm8, dword [r12+1298H] ; 26E0 _ C4 41 79: 6E. 84 24, 00001298 + vmovd xmm4, dword [r12+129CH] ; 26EA _ C4 C1 79: 6E. A4 24, 0000129C + vpshufd xmm8, xmm8, 0 ; 26F4 _ C4 41 79: 70. C0, 00 + vpshufd xmm12, xmm4, 0 ; 26FA _ C5 79: 70. E4, 00 + vpxor xmm13, xmm13, xmm8 ; 26FF _ C4 41 11: EF. E8 + vpxor xmm10, xmm5, xmm12 ; 2704 _ C4 41 51: EF. D4 + vpslld xmm5, xmm13, 10 ; 2709 _ C4 C1 51: 72. F5, 0A + vpsrld xmm13, xmm13, 22 ; 270F _ C4 C1 11: 72. D5, 16 + vpslld xmm12, xmm9, 27 ; 2715 _ C4 C1 19: 72. F1, 1B + vpor xmm5, xmm5, xmm13 ; 271B _ C4 C1 51: EB. ED + vpsrld xmm9, xmm9, 5 ; 2720 _ C4 C1 31: 72. D1, 05 + vpxor xmm8, xmm5, xmm10 ; 2726 _ C4 41 51: EF. C2 + vpor xmm5, xmm12, xmm9 ; 272B _ C4 C1 19: EB. E9 + vpslld xmm4, xmm11, 7 ; 2730 _ C4 C1 59: 72. F3, 07 + vpxor xmm13, xmm5, xmm11 ; 2736 _ C4 41 51: EF. EB + vpxor xmm8, xmm8, xmm4 ; 273B _ C5 39: EF. C4 + vpxor xmm9, xmm13, xmm10 ; 273F _ C4 41 11: EF. CA + vpslld xmm4, xmm10, 25 ; 2744 _ C4 C1 59: 72. F2, 19 + vpsrld xmm10, xmm10, 7 ; 274A _ C4 C1 29: 72. D2, 07 + vpor xmm5, xmm4, xmm10 ; 2750 _ C4 C1 59: EB. EA + vpslld xmm12, xmm11, 31 ; 2755 _ C4 C1 19: 72. F3, 1F + vpsrld xmm11, xmm11, 1 ; 275B _ C4 C1 21: 72. D3, 01 + vpxor xmm13, xmm5, xmm8 ; 2761 _ C4 41 51: EF. E8 + vpslld xmm4, xmm9, 3 ; 2766 _ C4 C1 59: 72. F1, 03 + vpor xmm5, xmm12, xmm11 ; 276C _ C4 C1 19: EB. EB + vpxor xmm10, xmm13, xmm4 ; 2771 _ C5 11: EF. D4 + vpxor xmm13, xmm5, xmm9 ; 2775 _ C4 41 51: EF. E9 + vpxor xmm5, xmm13, xmm8 ; 277A _ C4 C1 11: EF. E8 + vpslld xmm4, xmm9, 19 ; 277F _ C4 C1 59: 72. F1, 13 + vpsrld xmm9, xmm9, 13 ; 2785 _ C4 C1 31: 72. D1, 0D + vpslld xmm13, xmm8, 29 ; 278B _ C4 C1 11: 72. F0, 1D + vpsrld xmm8, xmm8, 3 ; 2791 _ C4 C1 39: 72. D0, 03 + vpor xmm12, xmm4, xmm9 ; 2797 _ C4 41 59: EB. E1 + vpor xmm11, xmm13, xmm8 ; 279C _ C4 41 11: EB. D8 + vpxor xmm4, xmm5, xmm6 ; 27A1 _ C5 D1: EF. E6 + vpxor xmm13, xmm11, xmm6 ; 27A5 _ C5 21: EF. EE + vpor xmm5, xmm5, xmm12 ; 27A9 _ C4 C1 51: EB. EC + vpxor xmm8, xmm5, xmm13 ; 27AE _ C4 41 51: EF. C5 + vpxor xmm12, xmm12, xmm4 ; 27B3 _ C5 19: EF. E4 + vpxor xmm5, xmm8, xmm10 ; 27B7 _ C4 C1 39: EF. EA + vpor xmm13, xmm13, xmm4 ; 27BC _ C5 11: EB. EC + vpand xmm9, xmm12, xmm10 ; 27C0 _ C4 41 19: DB. CA + vpxor xmm11, xmm13, xmm12 ; 27C5 _ C4 41 11: EF. DC + vpxor xmm13, xmm4, xmm9 ; 27CA _ C4 41 59: EF. E9 + vpor xmm4, xmm9, xmm5 ; 27CF _ C5 B1: EB. E5 + vpxor xmm12, xmm4, xmm11 ; 27D3 _ C4 41 59: EF. E3 + vpxor xmm10, xmm10, xmm13 ; 27D8 _ C4 41 29: EF. D5 + vpxor xmm8, xmm10, xmm12 ; 27DD _ C4 41 29: EF. C4 + vpxor xmm10, xmm11, xmm5 ; 27E2 _ C5 21: EF. D5 + vmovd xmm4, dword [r12+1280H] ; 27E6 _ C4 C1 79: 6E. A4 24, 00001280 + vpxor xmm8, xmm8, xmm5 ; 27F0 _ C5 39: EF. C5 + vpshufd xmm9, xmm4, 0 ; 27F4 _ C5 79: 70. CC, 00 + vpand xmm11, xmm10, xmm8 ; 27F9 _ C4 41 29: DB. D8 + vpxor xmm4, xmm12, xmm9 ; 27FE _ C4 C1 19: EF. E1 + vpxor xmm9, xmm13, xmm11 ; 2803 _ C4 41 11: EF. CB + vmovd xmm12, dword [r12+1284H] ; 2808 _ C4 41 79: 6E. A4 24, 00001284 + inc r10d ; 2812 _ 41: FF. C2 + vpshufd xmm13, xmm12, 0 ; 2815 _ C4 41 79: 70. EC, 00 + add r13, 64 ; 281B _ 49: 83. C5, 40 + vpxor xmm11, xmm9, xmm13 ; 281F _ C4 41 31: EF. DD + vmovd xmm9, dword [r12+1288H] ; 2824 _ C4 41 79: 6E. 8C 24, 00001288 + vpshufd xmm10, xmm9, 0 ; 282E _ C4 41 79: 70. D1, 00 + vpxor xmm10, xmm5, xmm10 ; 2834 _ C4 41 51: EF. D2 + vmovd xmm5, dword [r12+128CH] ; 2839 _ C4 C1 79: 6E. AC 24, 0000128C + vpshufd xmm12, xmm5, 0 ; 2843 _ C5 79: 70. E5, 00 + vpxor xmm13, xmm8, xmm12 ; 2848 _ C4 41 39: EF. EC + vpunpckldq xmm8, xmm4, xmm11 ; 284D _ C4 41 59: 62. C3 + vpunpckldq xmm9, xmm10, xmm13 ; 2852 _ C4 41 29: 62. CD + vpunpckhdq xmm5, xmm4, xmm11 ; 2857 _ C4 C1 59: 6A. EB + vpunpcklqdq xmm11, xmm8, xmm9 ; 285C _ C4 41 39: 6C. D9 + vpxor xmm3, xmm11, xmm3 ; 2861 _ C5 A1: EF. DB + vmovdqu oword [rbp], xmm3 ; 2865 _ C5 FA: 7F. 5D, 00 + vpunpckhqdq xmm3, xmm8, xmm9 ; 286A _ C4 C1 39: 6D. D9 + vpunpckhdq xmm4, xmm10, xmm13 ; 286F _ C4 C1 29: 6A. E5 + vpxor xmm3, xmm3, xmm0 ; 2874 _ C5 E1: EF. D8 + vmovdqu oword [rbp+10H], xmm3 ; 2878 _ C5 FA: 7F. 5D, 10 + vpunpcklqdq xmm3, xmm5, xmm4 ; 287D _ C5 D1: 6C. DC + vpxor xmm2, xmm3, xmm2 ; 2881 _ C5 E1: EF. D2 + vpsllq xmm3, xmm1, 1 ; 2885 _ C5 E1: 73. F1, 01 + vmovdqu oword [rbp+20H], xmm2 ; 288A _ C5 FA: 7F. 55, 20 + vpunpckhqdq xmm2, xmm5, xmm4 ; 288F _ C5 D1: 6D. D4 + vpxor xmm2, xmm2, xmm1 ; 2893 _ C5 E9: EF. D1 + vmovdqu oword [rbp+30H], xmm2 ; 2897 _ C5 FA: 7F. 55, 30 + vpslldq xmm2, xmm1, 8 ; 289C _ C5 E9: 73. F9, 08 + add rbp, 64 ; 28A1 _ 48: 83. C5, 40 + vpsrldq xmm2, xmm2, 7 ; 28A5 _ C5 E9: 73. DA, 07 + cmp r10d, 8 ; 28AA _ 41: 83. FA, 08 + vpsrlq xmm2, xmm2, 7 ; 28AE _ C5 E9: 73. D2, 07 + vpor xmm3, xmm3, xmm2 ; 28B3 _ C5 E1: EB. DA + vpsraw xmm2, xmm1, 8 ; 28B7 _ C5 E9: 71. E1, 08 + vpsrldq xmm2, xmm2, 15 ; 28BC _ C5 E9: 73. DA, 0F + vpand xmm2, xmm2, xmm7 ; 28C1 _ C5 E9: DB. D7 + vpxor xmm3, xmm3, xmm2 ; 28C5 _ C5 E1: EF. DA + jl ?_006 ; 28C9 _ 0F 8C, FFFFD7FB + add r14, -512 ; 28CF _ 49: 81. C6, FFFFFE00 + jne ?_004 ; 28D6 _ 0F 85, FFFFD7B9 + vmovups xmm6, oword [rsp+90H] ; 28DC _ C5 F8: 10. B4 24, 00000090 + vmovups xmm7, oword [rsp+80H] ; 28E5 _ C5 F8: 10. BC 24, 00000080 + vmovups xmm8, oword [rsp+70H] ; 28EE _ C5 78: 10. 44 24, 70 + vmovups xmm9, oword [rsp+60H] ; 28F4 _ C5 78: 10. 4C 24, 60 + vmovups xmm10, oword [rsp+50H] ; 28FA _ C5 78: 10. 54 24, 50 + vmovups xmm11, oword [rsp+40H] ; 2900 _ C5 78: 10. 5C 24, 40 + vmovups xmm12, oword [rsp+30H] ; 2906 _ C5 78: 10. 64 24, 30 + vmovups xmm13, oword [rsp+20H] ; 290C _ C5 78: 10. 6C 24, 20 + add rsp, 192 ; 2912 _ 48: 81. C4, 000000C0 + pop rbp ; 2919 _ 5D + pop r15 ; 291A _ 41: 5F + pop r14 ; 291C _ 41: 5E + pop r13 ; 291E _ 41: 5D + pop r12 ; 2920 _ 41: 5C + ret ; 2922 _ C3 +; xts_serpent_avx_decrypt End of function + +; Filling space: 0DH +; Filler type: Multi-byte NOP +; db 0FH, 1FH, 44H, 00H, 00H, 0FH, 1FH, 84H +; db 00H, 00H, 00H, 00H, 00H + +ALIGN 16 + +xts_serpent_avx_available:; Function begin + push rbx ; 0000 _ 53 + sub rsp, 48 ; 0001 _ 48: 83. EC, 30 + mov dword [rsp+20H], 0 ; 0005 _ C7. 44 24, 20, 00000000 + mov eax, 1 ; 000D _ B8, 00000001 + cpuid ; 0012 _ 0F A2 + and ecx, 18000000H ; 0014 _ 81. E1, 18000000 + cmp ecx, 402653184 ; 001A _ 81. F9, 18000000 + jnz ?_008 ; 0020 _ 75, 15 + xor ecx, ecx ; 0022 _ 33. C9 +?_007: +; Error: Illegal opcode +; Error: Internal error in opcode table in opcodes.cpp +; UNDEFINED ; 0024 _ 0F 01. D0 + db 0FH, 01H, 0D0H +; and eax, 06H ; 0027 _ 83. E0, 06 + db 83H, 0E0H, 06H +; cmp eax, 6 ; 002A _ 83. F8, 06 + db 83H, 0F8H, 06H +; jnz ?_008 ; 002D _ 75, 08 + db 75H, 08H +; mov dword [rsp+20H], 1 ; 002F _ C7. 44 24, 20, 00000001 + db 0C7H, 44H, 24H, 20H, 01H, 00H, 00H, 00H +?_008: mov eax, dword [rsp+20H] ; 0037 _ 8B. 44 24, 20 + add rsp, 48 ; 003B _ 48: 83. C4, 30 + pop rbx ; 003F _ 5B + ret ; 0040 _ C3 +; xts_serpent_avx_available End of function + + + + diff --git a/ImBoxEnclave/crypto_fast/amd64/xts_serpent_sse2_amd64.asm b/ImBoxEnclave/crypto_fast/amd64/xts_serpent_sse2_amd64.asm new file mode 100644 index 0000000..7af55e2 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/amd64/xts_serpent_sse2_amd64.asm @@ -0,0 +1,4567 @@ +; this code compiled with Intel C++ Compiler Version 11.1.835.200 +; +; Disassembly of file: xts_serpent_sse2.obj +; Mon May 10 08:16:59 2010 +; Mode: 64 bits +; Syntax: YASM/NASM +; Instruction set: SSE2, x64 + +default rel + +global xts_serpent_sse2_encrypt +global xts_serpent_sse2_decrypt +global xts_serpent_sse2_available + +extern serpent256_encrypt ; near + + +SECTION .text align=16 execute ; section number 2, code + +xts_serpent_sse2_encrypt:; Function begin + push r15 ; 0000 _ 41: 57 + push r14 ; 0002 _ 41: 56 + push r13 ; 0004 _ 41: 55 + push r12 ; 0006 _ 41: 54 + push rbp ; 0008 _ 55 + sub rsp, 160 ; 0009 _ 48: 81. EC, 000000A0 + mov rax, qword [rsp+0F0H] ; 0010 _ 48: 8B. 84 24, 000000F0 + movaps oword [rsp+70H], xmm6 ; 0018 _ 0F 29. 74 24, 70 + movaps oword [rsp+60H], xmm7 ; 001D _ 0F 29. 7C 24, 60 + movaps oword [rsp+50H], xmm8 ; 0022 _ 44: 0F 29. 44 24, 50 + movaps oword [rsp+40H], xmm9 ; 0028 _ 44: 0F 29. 4C 24, 40 + movaps oword [rsp+30H], xmm10 ; 002E _ 44: 0F 29. 54 24, 30 + movaps oword [rsp+20H], xmm11 ; 0034 _ 44: 0F 29. 5C 24, 20 + shr r9, 9 ; 003A _ 49: C1. E9, 09 + mov qword [rsp+80H], r9 ; 003E _ 4C: 89. 8C 24, 00000080 + lea r9, [rax+2710H] ; 0046 _ 4C: 8D. 88, 00002710 + mov qword [rsp+88H], 0 ; 004D _ 48: C7. 84 24, 00000088, 00000000 + mov r10d, 135 ; 0059 _ 41: BA, 00000087 + mov r12, rax ; 005F _ 49: 89. C4 + movd xmm1, r10d ; 0062 _ 66 41: 0F 6E. CA + movdqa xmm7, xmm1 ; 0067 _ 66: 0F 6F. F9 + mov rbp, rdx ; 006B _ 48: 89. D5 + mov r13, rcx ; 006E _ 49: 89. CD + mov r14, r8 ; 0071 _ 4D: 89. C6 + mov r15, r9 ; 0074 _ 4D: 89. CF + pcmpeqd xmm0, xmm0 ; 0077 _ 66: 0F 76. C0 + movdqa xmm6, xmm0 ; 007B _ 66: 0F 6F. F0 + jmp ?_002 ; 007F _ EB, 0A + +?_001: movdqa oword [rsp+90H], xmm9 ; 0081 _ 66 44: 0F 7F. 8C 24, 00000090 +?_002: inc qword [rsp+80H] ; 008B _ 48: FF. 84 24, 00000080 + lea rcx, [rsp+80H] ; 0093 _ 48: 8D. 8C 24, 00000080 + mov r8, r15 ; 009B _ 4D: 89. F8 + lea rdx, [rsp+90H] ; 009E _ 48: 8D. 94 24, 00000090 + call serpent256_encrypt ; 00A6 _ E8, 00000000(rel) + movdqa xmm9, oword [rsp+90H] ; 00AB _ 66 44: 0F 6F. 8C 24, 00000090 + xor r10d, r10d ; 00B5 _ 45: 33. D2 +?_003: movdqa xmm5, xmm9 ; 00B8 _ 66 41: 0F 6F. E9 + movdqa xmm4, xmm9 ; 00BD _ 66 41: 0F 6F. E1 + movdqa xmm3, xmm9 ; 00C2 _ 66 41: 0F 6F. D9 + movdqu xmm11, oword [r13] ; 00C7 _ F3 45: 0F 6F. 5D, 00 + movdqu xmm1, oword [r13+20H] ; 00CD _ F3 41: 0F 6F. 4D, 20 + psllq xmm5, 1 ; 00D3 _ 66: 0F 73. F5, 01 + pslldq xmm4, 8 ; 00D8 _ 66: 0F 73. FC, 08 + psrldq xmm4, 7 ; 00DD _ 66: 0F 73. DC, 07 + psrlq xmm4, 7 ; 00E2 _ 66: 0F 73. D4, 07 + por xmm5, xmm4 ; 00E7 _ 66: 0F EB. EC + psraw xmm3, 8 ; 00EB _ 66: 0F 71. E3, 08 + psrldq xmm3, 15 ; 00F0 _ 66: 0F 73. DB, 0F + pand xmm3, xmm7 ; 00F5 _ 66: 0F DB. DF + pxor xmm5, xmm3 ; 00F9 _ 66: 0F EF. EB + movdqa xmm4, xmm5 ; 00FD _ 66: 0F 6F. E5 + movdqa xmm8, xmm5 ; 0101 _ 66 44: 0F 6F. C5 + movdqa xmm10, xmm5 ; 0106 _ 66 44: 0F 6F. D5 + psllq xmm4, 1 ; 010B _ 66: 0F 73. F4, 01 + pslldq xmm8, 8 ; 0110 _ 66 41: 0F 73. F8, 08 + psrldq xmm8, 7 ; 0116 _ 66 41: 0F 73. D8, 07 + psrlq xmm8, 7 ; 011C _ 66 41: 0F 73. D0, 07 + por xmm4, xmm8 ; 0122 _ 66 41: 0F EB. E0 + psraw xmm10, 8 ; 0127 _ 66 41: 0F 71. E2, 08 + psrldq xmm10, 15 ; 012D _ 66 41: 0F 73. DA, 0F + pand xmm10, xmm7 ; 0133 _ 66 44: 0F DB. D7 + pxor xmm4, xmm10 ; 0138 _ 66 41: 0F EF. E2 + movdqa xmm3, xmm4 ; 013D _ 66: 0F 6F. DC + movdqa xmm8, xmm4 ; 0141 _ 66 44: 0F 6F. C4 + movdqa xmm2, xmm4 ; 0146 _ 66: 0F 6F. D4 + psllq xmm3, 1 ; 014A _ 66: 0F 73. F3, 01 + pslldq xmm8, 8 ; 014F _ 66 41: 0F 73. F8, 08 + psrldq xmm8, 7 ; 0155 _ 66 41: 0F 73. D8, 07 + psrlq xmm8, 7 ; 015B _ 66 41: 0F 73. D0, 07 + por xmm3, xmm8 ; 0161 _ 66 41: 0F EB. D8 + movdqu xmm8, oword [r13+10H] ; 0166 _ F3 45: 0F 6F. 45, 10 + psraw xmm2, 8 ; 016C _ 66: 0F 71. E2, 08 + psrldq xmm2, 15 ; 0171 _ 66: 0F 73. DA, 0F + pand xmm2, xmm7 ; 0176 _ 66: 0F DB. D7 + pxor xmm3, xmm2 ; 017A _ 66: 0F EF. DA + movdqu xmm2, oword [r13+30H] ; 017E _ F3 41: 0F 6F. 55, 30 + pxor xmm11, xmm9 ; 0184 _ 66 45: 0F EF. D9 + movdqa xmm10, xmm11 ; 0189 _ 66 45: 0F 6F. D3 + pxor xmm8, xmm5 ; 018E _ 66 44: 0F EF. C5 + pxor xmm1, xmm4 ; 0193 _ 66: 0F EF. CC + movdqa xmm0, xmm1 ; 0197 _ 66: 0F 6F. C1 + pxor xmm2, xmm3 ; 019B _ 66: 0F EF. D3 + punpckldq xmm10, xmm8 ; 019F _ 66 45: 0F 62. D0 + punpckldq xmm0, xmm2 ; 01A4 _ 66: 0F 62. C2 + punpckhdq xmm11, xmm8 ; 01A8 _ 66 45: 0F 6A. D8 + movdqa xmm8, xmm11 ; 01AD _ 66 45: 0F 6F. C3 + punpckhdq xmm1, xmm2 ; 01B2 _ 66: 0F 6A. CA + movdqa xmm2, xmm10 ; 01B6 _ 66 41: 0F 6F. D2 + punpckhqdq xmm10, xmm0 ; 01BB _ 66 44: 0F 6D. D0 + punpcklqdq xmm8, xmm1 ; 01C0 _ 66 44: 0F 6C. C1 + punpcklqdq xmm2, xmm0 ; 01C5 _ 66: 0F 6C. D0 + punpckhqdq xmm11, xmm1 ; 01C9 _ 66 44: 0F 6D. D9 + movd xmm0, dword [r12+1280H] ; 01CE _ 66 41: 0F 6E. 84 24, 00001280 + pshufd xmm0, xmm0, 0 ; 01D8 _ 66: 0F 70. C0, 00 + pxor xmm2, xmm0 ; 01DD _ 66: 0F EF. D0 + movd xmm0, dword [r12+1284H] ; 01E1 _ 66 41: 0F 6E. 84 24, 00001284 + pshufd xmm0, xmm0, 0 ; 01EB _ 66: 0F 70. C0, 00 + pxor xmm10, xmm0 ; 01F0 _ 66 44: 0F EF. D0 + movdqa xmm1, xmm10 ; 01F5 _ 66 41: 0F 6F. CA + movd xmm0, dword [r12+1288H] ; 01FA _ 66 41: 0F 6E. 84 24, 00001288 + pshufd xmm0, xmm0, 0 ; 0204 _ 66: 0F 70. C0, 00 + pxor xmm8, xmm0 ; 0209 _ 66 44: 0F EF. C0 + movd xmm0, dword [r12+128CH] ; 020E _ 66 41: 0F 6E. 84 24, 0000128C + pshufd xmm0, xmm0, 0 ; 0218 _ 66: 0F 70. C0, 00 + pxor xmm11, xmm0 ; 021D _ 66 44: 0F EF. D8 + pxor xmm11, xmm2 ; 0222 _ 66 44: 0F EF. DA + pand xmm1, xmm11 ; 0227 _ 66 41: 0F DB. CB + pxor xmm10, xmm8 ; 022C _ 66 45: 0F EF. D0 + pxor xmm1, xmm2 ; 0231 _ 66: 0F EF. CA + por xmm2, xmm11 ; 0235 _ 66 41: 0F EB. D3 + pxor xmm2, xmm10 ; 023A _ 66 41: 0F EF. D2 + pxor xmm10, xmm11 ; 023F _ 66 45: 0F EF. D3 + pxor xmm11, xmm8 ; 0244 _ 66 45: 0F EF. D8 + por xmm8, xmm1 ; 0249 _ 66 44: 0F EB. C1 + pxor xmm8, xmm10 ; 024E _ 66 45: 0F EF. C2 + pxor xmm10, xmm6 ; 0253 _ 66 44: 0F EF. D6 + por xmm10, xmm1 ; 0258 _ 66 44: 0F EB. D1 + pxor xmm1, xmm11 ; 025D _ 66 41: 0F EF. CB + pxor xmm1, xmm10 ; 0262 _ 66 41: 0F EF. CA + por xmm11, xmm2 ; 0267 _ 66 44: 0F EB. DA + pxor xmm1, xmm11 ; 026C _ 66 41: 0F EF. CB + movdqa xmm0, xmm1 ; 0271 _ 66: 0F 6F. C1 + pxor xmm10, xmm11 ; 0275 _ 66 45: 0F EF. D3 + movdqa xmm11, xmm8 ; 027A _ 66 45: 0F 6F. D8 + pslld xmm0, 13 ; 027F _ 66: 0F 72. F0, 0D + psrld xmm1, 19 ; 0284 _ 66: 0F 72. D1, 13 + por xmm0, xmm1 ; 0289 _ 66: 0F EB. C1 + pslld xmm11, 3 ; 028D _ 66 41: 0F 72. F3, 03 + psrld xmm8, 29 ; 0293 _ 66 41: 0F 72. D0, 1D + por xmm11, xmm8 ; 0299 _ 66 45: 0F EB. D8 + movdqa xmm8, xmm0 ; 029E _ 66 44: 0F 6F. C0 + pxor xmm10, xmm0 ; 02A3 _ 66 44: 0F EF. D0 + pxor xmm10, xmm11 ; 02A8 _ 66 45: 0F EF. D3 + movdqa xmm1, xmm10 ; 02AD _ 66 41: 0F 6F. CA + pxor xmm2, xmm11 ; 02B2 _ 66 41: 0F EF. D3 + pslld xmm8, 3 ; 02B7 _ 66 41: 0F 72. F0, 03 + pxor xmm2, xmm8 ; 02BD _ 66 41: 0F EF. D0 + movdqa xmm8, xmm2 ; 02C2 _ 66 44: 0F 6F. C2 + pslld xmm1, 1 ; 02C7 _ 66: 0F 72. F1, 01 + psrld xmm10, 31 ; 02CC _ 66 41: 0F 72. D2, 1F + por xmm1, xmm10 ; 02D2 _ 66 41: 0F EB. CA + movdqa xmm10, xmm1 ; 02D7 _ 66 44: 0F 6F. D1 + pslld xmm8, 7 ; 02DC _ 66 41: 0F 72. F0, 07 + psrld xmm2, 25 ; 02E2 _ 66: 0F 72. D2, 19 + por xmm8, xmm2 ; 02E7 _ 66 44: 0F EB. C2 + pxor xmm0, xmm1 ; 02EC _ 66: 0F EF. C1 + pxor xmm0, xmm8 ; 02F0 _ 66 41: 0F EF. C0 + pxor xmm11, xmm8 ; 02F5 _ 66 45: 0F EF. D8 + pslld xmm10, 7 ; 02FA _ 66 41: 0F 72. F2, 07 + pxor xmm11, xmm10 ; 0300 _ 66 45: 0F EF. DA + movdqa xmm10, xmm0 ; 0305 _ 66 44: 0F 6F. D0 + movdqa xmm2, xmm11 ; 030A _ 66 41: 0F 6F. D3 + psrld xmm0, 27 ; 030F _ 66: 0F 72. D0, 1B + pslld xmm10, 5 ; 0314 _ 66 41: 0F 72. F2, 05 + por xmm10, xmm0 ; 031A _ 66 44: 0F EB. D0 + pslld xmm2, 22 ; 031F _ 66: 0F 72. F2, 16 + psrld xmm11, 10 ; 0324 _ 66 41: 0F 72. D3, 0A + por xmm2, xmm11 ; 032A _ 66 41: 0F EB. D3 + movd xmm11, dword [r12+1290H] ; 032F _ 66 45: 0F 6E. 9C 24, 00001290 + pshufd xmm11, xmm11, 0 ; 0339 _ 66 45: 0F 70. DB, 00 + pxor xmm10, xmm11 ; 033F _ 66 45: 0F EF. D3 + pxor xmm10, xmm6 ; 0344 _ 66 44: 0F EF. D6 + movd xmm11, dword [r12+1294H] ; 0349 _ 66 45: 0F 6E. 9C 24, 00001294 + pshufd xmm11, xmm11, 0 ; 0353 _ 66 45: 0F 70. DB, 00 + pxor xmm1, xmm11 ; 0359 _ 66 41: 0F EF. CB + movd xmm11, dword [r12+1298H] ; 035E _ 66 45: 0F 6E. 9C 24, 00001298 + pshufd xmm11, xmm11, 0 ; 0368 _ 66 45: 0F 70. DB, 00 + pxor xmm2, xmm11 ; 036E _ 66 41: 0F EF. D3 + pxor xmm2, xmm6 ; 0373 _ 66: 0F EF. D6 + movd xmm11, dword [r12+129CH] ; 0377 _ 66 45: 0F 6E. 9C 24, 0000129C + pshufd xmm11, xmm11, 0 ; 0381 _ 66 45: 0F 70. DB, 00 + pxor xmm8, xmm11 ; 0387 _ 66 45: 0F EF. C3 + movdqa xmm11, xmm10 ; 038C _ 66 45: 0F 6F. DA + pand xmm11, xmm1 ; 0391 _ 66 44: 0F DB. D9 + pxor xmm2, xmm11 ; 0396 _ 66 41: 0F EF. D3 + por xmm11, xmm8 ; 039B _ 66 45: 0F EB. D8 + pxor xmm8, xmm2 ; 03A0 _ 66 44: 0F EF. C2 + pxor xmm1, xmm11 ; 03A5 _ 66 41: 0F EF. CB + pxor xmm11, xmm10 ; 03AA _ 66 45: 0F EF. DA + por xmm10, xmm1 ; 03AF _ 66 44: 0F EB. D1 + pxor xmm1, xmm8 ; 03B4 _ 66 41: 0F EF. C8 + por xmm2, xmm11 ; 03B9 _ 66 41: 0F EB. D3 + pand xmm2, xmm10 ; 03BE _ 66 41: 0F DB. D2 + movdqa xmm0, xmm2 ; 03C3 _ 66: 0F 6F. C2 + pxor xmm11, xmm1 ; 03C7 _ 66 44: 0F EF. D9 + pand xmm1, xmm2 ; 03CC _ 66: 0F DB. CA + pxor xmm1, xmm11 ; 03D0 _ 66 41: 0F EF. CB + pand xmm11, xmm2 ; 03D5 _ 66 44: 0F DB. DA + pxor xmm10, xmm11 ; 03DA _ 66 45: 0F EF. D3 + pslld xmm0, 13 ; 03DF _ 66: 0F 72. F0, 0D + psrld xmm2, 19 ; 03E4 _ 66: 0F 72. D2, 13 + por xmm0, xmm2 ; 03E9 _ 66: 0F EB. C2 + movdqa xmm2, xmm8 ; 03ED _ 66 41: 0F 6F. D0 + psrld xmm8, 29 ; 03F2 _ 66 41: 0F 72. D0, 1D + pxor xmm10, xmm0 ; 03F8 _ 66 44: 0F EF. D0 + pslld xmm2, 3 ; 03FD _ 66: 0F 72. F2, 03 + por xmm2, xmm8 ; 0402 _ 66 41: 0F EB. D0 + movdqa xmm8, xmm0 ; 0407 _ 66 44: 0F 6F. C0 + pxor xmm10, xmm2 ; 040C _ 66 44: 0F EF. D2 + pxor xmm1, xmm2 ; 0411 _ 66: 0F EF. CA + pslld xmm8, 3 ; 0415 _ 66 41: 0F 72. F0, 03 + pxor xmm1, xmm8 ; 041B _ 66 41: 0F EF. C8 + movdqa xmm8, xmm10 ; 0420 _ 66 45: 0F 6F. C2 + psrld xmm10, 31 ; 0425 _ 66 41: 0F 72. D2, 1F + pslld xmm8, 1 ; 042B _ 66 41: 0F 72. F0, 01 + por xmm8, xmm10 ; 0431 _ 66 45: 0F EB. C2 + movdqa xmm10, xmm1 ; 0436 _ 66 44: 0F 6F. D1 + movdqa xmm11, xmm8 ; 043B _ 66 45: 0F 6F. D8 + psrld xmm1, 25 ; 0440 _ 66: 0F 72. D1, 19 + pslld xmm10, 7 ; 0445 _ 66 41: 0F 72. F2, 07 + por xmm10, xmm1 ; 044B _ 66 44: 0F EB. D1 + pxor xmm0, xmm8 ; 0450 _ 66 41: 0F EF. C0 + pxor xmm0, xmm10 ; 0455 _ 66 41: 0F EF. C2 + pxor xmm2, xmm10 ; 045A _ 66 41: 0F EF. D2 + pslld xmm11, 7 ; 045F _ 66 41: 0F 72. F3, 07 + pxor xmm2, xmm11 ; 0465 _ 66 41: 0F EF. D3 + movdqa xmm11, xmm0 ; 046A _ 66 44: 0F 6F. D8 + psrld xmm0, 27 ; 046F _ 66: 0F 72. D0, 1B + pslld xmm11, 5 ; 0474 _ 66 41: 0F 72. F3, 05 + por xmm11, xmm0 ; 047A _ 66 44: 0F EB. D8 + movdqa xmm0, xmm2 ; 047F _ 66: 0F 6F. C2 + psrld xmm2, 10 ; 0483 _ 66: 0F 72. D2, 0A + pslld xmm0, 22 ; 0488 _ 66: 0F 72. F0, 16 + por xmm0, xmm2 ; 048D _ 66: 0F EB. C2 + movd xmm2, dword [r12+12A0H] ; 0491 _ 66 41: 0F 6E. 94 24, 000012A0 + pshufd xmm2, xmm2, 0 ; 049B _ 66: 0F 70. D2, 00 + pxor xmm11, xmm2 ; 04A0 _ 66 44: 0F EF. DA + movdqa xmm1, xmm11 ; 04A5 _ 66 41: 0F 6F. CB + movd xmm2, dword [r12+12A4H] ; 04AA _ 66 41: 0F 6E. 94 24, 000012A4 + pshufd xmm2, xmm2, 0 ; 04B4 _ 66: 0F 70. D2, 00 + pxor xmm8, xmm2 ; 04B9 _ 66 44: 0F EF. C2 + movd xmm2, dword [r12+12A8H] ; 04BE _ 66 41: 0F 6E. 94 24, 000012A8 + pshufd xmm2, xmm2, 0 ; 04C8 _ 66: 0F 70. D2, 00 + pxor xmm0, xmm2 ; 04CD _ 66: 0F EF. C2 + pand xmm1, xmm0 ; 04D1 _ 66: 0F DB. C8 + pxor xmm0, xmm8 ; 04D5 _ 66 41: 0F EF. C0 + movd xmm2, dword [r12+12ACH] ; 04DA _ 66 41: 0F 6E. 94 24, 000012AC + pshufd xmm2, xmm2, 0 ; 04E4 _ 66: 0F 70. D2, 00 + pxor xmm10, xmm2 ; 04E9 _ 66 44: 0F EF. D2 + pxor xmm1, xmm10 ; 04EE _ 66 41: 0F EF. CA + pxor xmm0, xmm1 ; 04F3 _ 66: 0F EF. C1 + por xmm10, xmm11 ; 04F7 _ 66 45: 0F EB. D3 + pxor xmm10, xmm8 ; 04FC _ 66 45: 0F EF. D0 + movdqa xmm2, xmm10 ; 0501 _ 66 41: 0F 6F. D2 + movdqa xmm8, xmm0 ; 0506 _ 66 44: 0F 6F. C0 + pxor xmm11, xmm0 ; 050B _ 66 44: 0F EF. D8 + por xmm2, xmm11 ; 0510 _ 66 41: 0F EB. D3 + pxor xmm2, xmm1 ; 0515 _ 66: 0F EF. D1 + pand xmm1, xmm10 ; 0519 _ 66 41: 0F DB. CA + pxor xmm11, xmm1 ; 051E _ 66 44: 0F EF. D9 + pxor xmm10, xmm2 ; 0523 _ 66 44: 0F EF. D2 + pxor xmm10, xmm11 ; 0528 _ 66 45: 0F EF. D3 + movdqa xmm1, xmm10 ; 052D _ 66 41: 0F 6F. CA + pxor xmm11, xmm6 ; 0532 _ 66 44: 0F EF. DE + pslld xmm8, 13 ; 0537 _ 66 41: 0F 72. F0, 0D + psrld xmm0, 19 ; 053D _ 66: 0F 72. D0, 13 + por xmm8, xmm0 ; 0542 _ 66 44: 0F EB. C0 + pslld xmm1, 3 ; 0547 _ 66: 0F 72. F1, 03 + psrld xmm10, 29 ; 054C _ 66 41: 0F 72. D2, 1D + por xmm1, xmm10 ; 0552 _ 66 41: 0F EB. CA + movdqa xmm10, xmm8 ; 0557 _ 66 45: 0F 6F. D0 + pxor xmm2, xmm8 ; 055C _ 66 41: 0F EF. D0 + pxor xmm2, xmm1 ; 0561 _ 66: 0F EF. D1 + movdqa xmm0, xmm2 ; 0565 _ 66: 0F 6F. C2 + pxor xmm11, xmm1 ; 0569 _ 66 44: 0F EF. D9 + pslld xmm10, 3 ; 056E _ 66 41: 0F 72. F2, 03 + pxor xmm11, xmm10 ; 0574 _ 66 45: 0F EF. DA + movdqa xmm10, xmm11 ; 0579 _ 66 45: 0F 6F. D3 + pslld xmm0, 1 ; 057E _ 66: 0F 72. F0, 01 + psrld xmm2, 31 ; 0583 _ 66: 0F 72. D2, 1F + por xmm0, xmm2 ; 0588 _ 66: 0F EB. C2 + pslld xmm10, 7 ; 058C _ 66 41: 0F 72. F2, 07 + psrld xmm11, 25 ; 0592 _ 66 41: 0F 72. D3, 19 + por xmm10, xmm11 ; 0598 _ 66 45: 0F EB. D3 + movdqa xmm11, xmm0 ; 059D _ 66 44: 0F 6F. D8 + pxor xmm8, xmm0 ; 05A2 _ 66 44: 0F EF. C0 + pxor xmm8, xmm10 ; 05A7 _ 66 45: 0F EF. C2 + movdqa xmm2, xmm8 ; 05AC _ 66 41: 0F 6F. D0 + pxor xmm1, xmm10 ; 05B1 _ 66 41: 0F EF. CA + pslld xmm11, 7 ; 05B6 _ 66 41: 0F 72. F3, 07 + pxor xmm1, xmm11 ; 05BC _ 66 41: 0F EF. CB + pslld xmm2, 5 ; 05C1 _ 66: 0F 72. F2, 05 + psrld xmm8, 27 ; 05C6 _ 66 41: 0F 72. D0, 1B + por xmm2, xmm8 ; 05CC _ 66 41: 0F EB. D0 + movdqa xmm8, xmm1 ; 05D1 _ 66 44: 0F 6F. C1 + movd xmm11, dword [r12+12B0H] ; 05D6 _ 66 45: 0F 6E. 9C 24, 000012B0 + psrld xmm1, 10 ; 05E0 _ 66: 0F 72. D1, 0A + pshufd xmm11, xmm11, 0 ; 05E5 _ 66 45: 0F 70. DB, 00 + pslld xmm8, 22 ; 05EB _ 66 41: 0F 72. F0, 16 + por xmm8, xmm1 ; 05F1 _ 66 44: 0F EB. C1 + pxor xmm2, xmm11 ; 05F6 _ 66 41: 0F EF. D3 + movdqa xmm1, xmm2 ; 05FB _ 66: 0F 6F. CA + movd xmm11, dword [r12+12B4H] ; 05FF _ 66 45: 0F 6E. 9C 24, 000012B4 + pshufd xmm11, xmm11, 0 ; 0609 _ 66 45: 0F 70. DB, 00 + pxor xmm0, xmm11 ; 060F _ 66 41: 0F EF. C3 + movd xmm11, dword [r12+12B8H] ; 0614 _ 66 45: 0F 6E. 9C 24, 000012B8 + pshufd xmm11, xmm11, 0 ; 061E _ 66 45: 0F 70. DB, 00 + pxor xmm8, xmm11 ; 0624 _ 66 45: 0F EF. C3 + movd xmm11, dword [r12+12BCH] ; 0629 _ 66 45: 0F 6E. 9C 24, 000012BC + pshufd xmm11, xmm11, 0 ; 0633 _ 66 45: 0F 70. DB, 00 + pxor xmm10, xmm11 ; 0639 _ 66 45: 0F EF. D3 + por xmm1, xmm10 ; 063E _ 66 41: 0F EB. CA + pxor xmm10, xmm0 ; 0643 _ 66 44: 0F EF. D0 + pand xmm0, xmm2 ; 0648 _ 66: 0F DB. C2 + pxor xmm2, xmm8 ; 064C _ 66 41: 0F EF. D0 + pxor xmm8, xmm10 ; 0651 _ 66 45: 0F EF. C2 + pand xmm10, xmm1 ; 0656 _ 66 44: 0F DB. D1 + por xmm2, xmm0 ; 065B _ 66: 0F EB. D0 + pxor xmm10, xmm2 ; 065F _ 66 44: 0F EF. D2 + pxor xmm1, xmm0 ; 0664 _ 66: 0F EF. C8 + pand xmm2, xmm1 ; 0668 _ 66: 0F DB. D1 + pxor xmm0, xmm10 ; 066C _ 66 41: 0F EF. C2 + pxor xmm2, xmm8 ; 0671 _ 66 41: 0F EF. D0 + por xmm0, xmm1 ; 0676 _ 66: 0F EB. C1 + pxor xmm0, xmm8 ; 067A _ 66 41: 0F EF. C0 + movdqa xmm8, xmm0 ; 067F _ 66 44: 0F 6F. C0 + pxor xmm1, xmm10 ; 0684 _ 66 41: 0F EF. CA + por xmm8, xmm10 ; 0689 _ 66 45: 0F EB. C2 + pxor xmm1, xmm8 ; 068E _ 66 41: 0F EF. C8 + movdqa xmm11, xmm1 ; 0693 _ 66 44: 0F 6F. D9 + psrld xmm1, 19 ; 0698 _ 66: 0F 72. D1, 13 + pslld xmm11, 13 ; 069D _ 66 41: 0F 72. F3, 0D + por xmm11, xmm1 ; 06A3 _ 66 44: 0F EB. D9 + movdqa xmm1, xmm10 ; 06A8 _ 66 41: 0F 6F. CA + psrld xmm10, 29 ; 06AD _ 66 41: 0F 72. D2, 1D + pxor xmm0, xmm11 ; 06B3 _ 66 41: 0F EF. C3 + pslld xmm1, 3 ; 06B8 _ 66: 0F 72. F1, 03 + por xmm1, xmm10 ; 06BD _ 66 41: 0F EB. CA + movdqa xmm10, xmm11 ; 06C2 _ 66 45: 0F 6F. D3 + pxor xmm0, xmm1 ; 06C7 _ 66: 0F EF. C1 + movdqa xmm8, xmm0 ; 06CB _ 66 44: 0F 6F. C0 + pxor xmm2, xmm1 ; 06D0 _ 66: 0F EF. D1 + pslld xmm10, 3 ; 06D4 _ 66 41: 0F 72. F2, 03 + pxor xmm2, xmm10 ; 06DA _ 66 41: 0F EF. D2 + movdqa xmm10, xmm2 ; 06DF _ 66 44: 0F 6F. D2 + pslld xmm8, 1 ; 06E4 _ 66 41: 0F 72. F0, 01 + psrld xmm0, 31 ; 06EA _ 66: 0F 72. D0, 1F + por xmm8, xmm0 ; 06EF _ 66 44: 0F EB. C0 + pslld xmm10, 7 ; 06F4 _ 66 41: 0F 72. F2, 07 + psrld xmm2, 25 ; 06FA _ 66: 0F 72. D2, 19 + por xmm10, xmm2 ; 06FF _ 66 44: 0F EB. D2 + movdqa xmm2, xmm8 ; 0704 _ 66 41: 0F 6F. D0 + pxor xmm11, xmm8 ; 0709 _ 66 45: 0F EF. D8 + pxor xmm11, xmm10 ; 070E _ 66 45: 0F EF. DA + movdqa xmm0, xmm11 ; 0713 _ 66 41: 0F 6F. C3 + pxor xmm1, xmm10 ; 0718 _ 66 41: 0F EF. CA + pslld xmm2, 7 ; 071D _ 66: 0F 72. F2, 07 + pxor xmm1, xmm2 ; 0722 _ 66: 0F EF. CA + movdqa xmm2, xmm1 ; 0726 _ 66: 0F 6F. D1 + pslld xmm0, 5 ; 072A _ 66: 0F 72. F0, 05 + psrld xmm11, 27 ; 072F _ 66 41: 0F 72. D3, 1B + por xmm0, xmm11 ; 0735 _ 66 41: 0F EB. C3 + pslld xmm2, 22 ; 073A _ 66: 0F 72. F2, 16 + psrld xmm1, 10 ; 073F _ 66: 0F 72. D1, 0A + por xmm2, xmm1 ; 0744 _ 66: 0F EB. D1 + movd xmm11, dword [r12+12C0H] ; 0748 _ 66 45: 0F 6E. 9C 24, 000012C0 + pshufd xmm11, xmm11, 0 ; 0752 _ 66 45: 0F 70. DB, 00 + pxor xmm0, xmm11 ; 0758 _ 66 41: 0F EF. C3 + movd xmm11, dword [r12+12C4H] ; 075D _ 66 45: 0F 6E. 9C 24, 000012C4 + pshufd xmm11, xmm11, 0 ; 0767 _ 66 45: 0F 70. DB, 00 + pxor xmm8, xmm11 ; 076D _ 66 45: 0F EF. C3 + movd xmm11, dword [r12+12C8H] ; 0772 _ 66 45: 0F 6E. 9C 24, 000012C8 + pshufd xmm11, xmm11, 0 ; 077C _ 66 45: 0F 70. DB, 00 + pxor xmm2, xmm11 ; 0782 _ 66 41: 0F EF. D3 + movd xmm11, dword [r12+12CCH] ; 0787 _ 66 45: 0F 6E. 9C 24, 000012CC + pshufd xmm11, xmm11, 0 ; 0791 _ 66 45: 0F 70. DB, 00 + pxor xmm10, xmm11 ; 0797 _ 66 45: 0F EF. D3 + pxor xmm8, xmm10 ; 079C _ 66 45: 0F EF. C2 + movdqa xmm1, xmm8 ; 07A1 _ 66 41: 0F 6F. C8 + pxor xmm10, xmm6 ; 07A6 _ 66 44: 0F EF. D6 + pxor xmm2, xmm10 ; 07AB _ 66 41: 0F EF. D2 + pxor xmm10, xmm0 ; 07B0 _ 66 44: 0F EF. D0 + pand xmm1, xmm10 ; 07B5 _ 66 41: 0F DB. CA + pxor xmm1, xmm2 ; 07BA _ 66: 0F EF. CA + movdqa xmm11, xmm1 ; 07BE _ 66 44: 0F 6F. D9 + pxor xmm8, xmm10 ; 07C3 _ 66 45: 0F EF. C2 + pxor xmm0, xmm8 ; 07C8 _ 66 41: 0F EF. C0 + pand xmm2, xmm8 ; 07CD _ 66 41: 0F DB. D0 + pxor xmm2, xmm0 ; 07D2 _ 66: 0F EF. D0 + pand xmm0, xmm1 ; 07D6 _ 66: 0F DB. C1 + pxor xmm10, xmm0 ; 07DA _ 66 44: 0F EF. D0 + por xmm8, xmm1 ; 07DF _ 66 44: 0F EB. C1 + pxor xmm8, xmm0 ; 07E4 _ 66 44: 0F EF. C0 + por xmm0, xmm10 ; 07E9 _ 66 41: 0F EB. C2 + pxor xmm0, xmm2 ; 07EE _ 66: 0F EF. C2 + pand xmm2, xmm10 ; 07F2 _ 66 41: 0F DB. D2 + pxor xmm0, xmm6 ; 07F7 _ 66: 0F EF. C6 + pxor xmm8, xmm2 ; 07FB _ 66 44: 0F EF. C2 + movdqa xmm2, xmm0 ; 0800 _ 66: 0F 6F. D0 + pslld xmm11, 13 ; 0804 _ 66 41: 0F 72. F3, 0D + psrld xmm1, 19 ; 080A _ 66: 0F 72. D1, 13 + por xmm11, xmm1 ; 080F _ 66 44: 0F EB. D9 + pslld xmm2, 3 ; 0814 _ 66: 0F 72. F2, 03 + psrld xmm0, 29 ; 0819 _ 66: 0F 72. D0, 1D + por xmm2, xmm0 ; 081E _ 66: 0F EB. D0 + movdqa xmm0, xmm11 ; 0822 _ 66 41: 0F 6F. C3 + pxor xmm8, xmm11 ; 0827 _ 66 45: 0F EF. C3 + pxor xmm8, xmm2 ; 082C _ 66 44: 0F EF. C2 + pxor xmm10, xmm2 ; 0831 _ 66 44: 0F EF. D2 + pslld xmm0, 3 ; 0836 _ 66: 0F 72. F0, 03 + pxor xmm10, xmm0 ; 083B _ 66 44: 0F EF. D0 + movdqa xmm0, xmm8 ; 0840 _ 66 41: 0F 6F. C0 + psrld xmm8, 31 ; 0845 _ 66 41: 0F 72. D0, 1F + pslld xmm0, 1 ; 084B _ 66: 0F 72. F0, 01 + por xmm0, xmm8 ; 0850 _ 66 41: 0F EB. C0 + movdqa xmm8, xmm10 ; 0855 _ 66 45: 0F 6F. C2 + psrld xmm10, 25 ; 085A _ 66 41: 0F 72. D2, 19 + pxor xmm11, xmm0 ; 0860 _ 66 44: 0F EF. D8 + pslld xmm8, 7 ; 0865 _ 66 41: 0F 72. F0, 07 + por xmm8, xmm10 ; 086B _ 66 45: 0F EB. C2 + movdqa xmm10, xmm0 ; 0870 _ 66 44: 0F 6F. D0 + pxor xmm11, xmm8 ; 0875 _ 66 45: 0F EF. D8 + movdqa xmm1, xmm11 ; 087A _ 66 41: 0F 6F. CB + pxor xmm2, xmm8 ; 087F _ 66 41: 0F EF. D0 + pslld xmm10, 7 ; 0884 _ 66 41: 0F 72. F2, 07 + pxor xmm2, xmm10 ; 088A _ 66 41: 0F EF. D2 + pslld xmm1, 5 ; 088F _ 66: 0F 72. F1, 05 + psrld xmm11, 27 ; 0894 _ 66 41: 0F 72. D3, 1B + por xmm1, xmm11 ; 089A _ 66 41: 0F EB. CB + movdqa xmm11, xmm2 ; 089F _ 66 44: 0F 6F. DA + psrld xmm2, 10 ; 08A4 _ 66: 0F 72. D2, 0A + pslld xmm11, 22 ; 08A9 _ 66 41: 0F 72. F3, 16 + por xmm11, xmm2 ; 08AF _ 66 44: 0F EB. DA + movd xmm2, dword [r12+12D0H] ; 08B4 _ 66 41: 0F 6E. 94 24, 000012D0 + pshufd xmm10, xmm2, 0 ; 08BE _ 66 44: 0F 70. D2, 00 + pxor xmm1, xmm10 ; 08C4 _ 66 41: 0F EF. CA + movd xmm2, dword [r12+12D4H] ; 08C9 _ 66 41: 0F 6E. 94 24, 000012D4 + pshufd xmm10, xmm2, 0 ; 08D3 _ 66 44: 0F 70. D2, 00 + pxor xmm0, xmm10 ; 08D9 _ 66 41: 0F EF. C2 + pxor xmm1, xmm0 ; 08DE _ 66: 0F EF. C8 + movd xmm2, dword [r12+12D8H] ; 08E2 _ 66 41: 0F 6E. 94 24, 000012D8 + pshufd xmm10, xmm2, 0 ; 08EC _ 66 44: 0F 70. D2, 00 + pxor xmm11, xmm10 ; 08F2 _ 66 45: 0F EF. DA + movd xmm2, dword [r12+12DCH] ; 08F7 _ 66 41: 0F 6E. 94 24, 000012DC + pshufd xmm10, xmm2, 0 ; 0901 _ 66 44: 0F 70. D2, 00 + pxor xmm8, xmm10 ; 0907 _ 66 45: 0F EF. C2 + pxor xmm0, xmm8 ; 090C _ 66 41: 0F EF. C0 + movdqa xmm2, xmm0 ; 0911 _ 66: 0F 6F. D0 + pxor xmm8, xmm6 ; 0915 _ 66 44: 0F EF. C6 + pxor xmm11, xmm8 ; 091A _ 66 45: 0F EF. D8 + pand xmm2, xmm1 ; 091F _ 66: 0F DB. D1 + pxor xmm2, xmm11 ; 0923 _ 66 41: 0F EF. D3 + movdqa xmm10, xmm2 ; 0928 _ 66 44: 0F 6F. D2 + por xmm11, xmm0 ; 092D _ 66 44: 0F EB. D8 + pxor xmm0, xmm8 ; 0932 _ 66 41: 0F EF. C0 + pand xmm8, xmm2 ; 0937 _ 66 44: 0F DB. C2 + pxor xmm8, xmm1 ; 093C _ 66 44: 0F EF. C1 + pxor xmm0, xmm2 ; 0941 _ 66: 0F EF. C2 + pxor xmm0, xmm11 ; 0945 _ 66 41: 0F EF. C3 + pxor xmm11, xmm1 ; 094A _ 66 44: 0F EF. D9 + pand xmm1, xmm8 ; 094F _ 66 41: 0F DB. C8 + pxor xmm11, xmm6 ; 0954 _ 66 44: 0F EF. DE + pxor xmm1, xmm0 ; 0959 _ 66: 0F EF. C8 + por xmm0, xmm8 ; 095D _ 66 41: 0F EB. C0 + pxor xmm0, xmm11 ; 0962 _ 66 41: 0F EF. C3 + pslld xmm10, 13 ; 0967 _ 66 41: 0F 72. F2, 0D + psrld xmm2, 19 ; 096D _ 66: 0F 72. D2, 13 + por xmm10, xmm2 ; 0972 _ 66 44: 0F EB. D2 + movdqa xmm2, xmm1 ; 0977 _ 66: 0F 6F. D1 + movdqa xmm11, xmm10 ; 097B _ 66 45: 0F 6F. DA + psrld xmm1, 29 ; 0980 _ 66: 0F 72. D1, 1D + pslld xmm2, 3 ; 0985 _ 66: 0F 72. F2, 03 + por xmm2, xmm1 ; 098A _ 66: 0F EB. D1 + pxor xmm8, xmm10 ; 098E _ 66 45: 0F EF. C2 + pxor xmm8, xmm2 ; 0993 _ 66 44: 0F EF. C2 + movdqa xmm1, xmm8 ; 0998 _ 66 41: 0F 6F. C8 + pxor xmm0, xmm2 ; 099D _ 66: 0F EF. C2 + pslld xmm11, 3 ; 09A1 _ 66 41: 0F 72. F3, 03 + pxor xmm0, xmm11 ; 09A7 _ 66 41: 0F EF. C3 + movdqa xmm11, xmm0 ; 09AC _ 66 44: 0F 6F. D8 + pslld xmm1, 1 ; 09B1 _ 66: 0F 72. F1, 01 + psrld xmm8, 31 ; 09B6 _ 66 41: 0F 72. D0, 1F + por xmm1, xmm8 ; 09BC _ 66 41: 0F EB. C8 + pslld xmm11, 7 ; 09C1 _ 66 41: 0F 72. F3, 07 + psrld xmm0, 25 ; 09C7 _ 66: 0F 72. D0, 19 + por xmm11, xmm0 ; 09CC _ 66 44: 0F EB. D8 + pxor xmm10, xmm1 ; 09D1 _ 66 44: 0F EF. D1 + movdqa xmm8, xmm1 ; 09D6 _ 66 44: 0F 6F. C1 + pxor xmm10, xmm11 ; 09DB _ 66 45: 0F EF. D3 + pxor xmm2, xmm11 ; 09E0 _ 66 41: 0F EF. D3 + pslld xmm8, 7 ; 09E5 _ 66 41: 0F 72. F0, 07 + pxor xmm2, xmm8 ; 09EB _ 66 41: 0F EF. D0 + movdqa xmm8, xmm10 ; 09F0 _ 66 45: 0F 6F. C2 + psrld xmm10, 27 ; 09F5 _ 66 41: 0F 72. D2, 1B + pslld xmm8, 5 ; 09FB _ 66 41: 0F 72. F0, 05 + por xmm8, xmm10 ; 0A01 _ 66 45: 0F EB. C2 + movdqa xmm10, xmm2 ; 0A06 _ 66 44: 0F 6F. D2 + psrld xmm2, 10 ; 0A0B _ 66: 0F 72. D2, 0A + pslld xmm10, 22 ; 0A10 _ 66 41: 0F 72. F2, 16 + por xmm10, xmm2 ; 0A16 _ 66 44: 0F EB. D2 + movd xmm2, dword [r12+12E0H] ; 0A1B _ 66 41: 0F 6E. 94 24, 000012E0 + pshufd xmm2, xmm2, 0 ; 0A25 _ 66: 0F 70. D2, 00 + pxor xmm8, xmm2 ; 0A2A _ 66 44: 0F EF. C2 + movd xmm2, dword [r12+12E4H] ; 0A2F _ 66 41: 0F 6E. 94 24, 000012E4 + pshufd xmm2, xmm2, 0 ; 0A39 _ 66: 0F 70. D2, 00 + pxor xmm1, xmm2 ; 0A3E _ 66: 0F EF. CA + movd xmm2, dword [r12+12E8H] ; 0A42 _ 66 41: 0F 6E. 94 24, 000012E8 + pshufd xmm2, xmm2, 0 ; 0A4C _ 66: 0F 70. D2, 00 + pxor xmm10, xmm2 ; 0A51 _ 66 44: 0F EF. D2 + pxor xmm10, xmm6 ; 0A56 _ 66 44: 0F EF. D6 + movd xmm2, dword [r12+12ECH] ; 0A5B _ 66 41: 0F 6E. 94 24, 000012EC + pshufd xmm2, xmm2, 0 ; 0A65 _ 66: 0F 70. D2, 00 + pxor xmm11, xmm2 ; 0A6A _ 66 44: 0F EF. DA + movdqa xmm2, xmm11 ; 0A6F _ 66 41: 0F 6F. D3 + pand xmm2, xmm8 ; 0A74 _ 66 41: 0F DB. D0 + pxor xmm8, xmm11 ; 0A79 _ 66 45: 0F EF. C3 + pxor xmm2, xmm10 ; 0A7E _ 66 41: 0F EF. D2 + por xmm10, xmm11 ; 0A83 _ 66 45: 0F EB. D3 + pxor xmm1, xmm2 ; 0A88 _ 66: 0F EF. CA + pxor xmm10, xmm8 ; 0A8C _ 66 45: 0F EF. D0 + por xmm8, xmm1 ; 0A91 _ 66 44: 0F EB. C1 + pxor xmm10, xmm1 ; 0A96 _ 66 44: 0F EF. D1 + pxor xmm11, xmm8 ; 0A9B _ 66 45: 0F EF. D8 + por xmm8, xmm2 ; 0AA0 _ 66 44: 0F EB. C2 + pxor xmm8, xmm10 ; 0AA5 _ 66 45: 0F EF. C2 + movdqa xmm0, xmm8 ; 0AAA _ 66 41: 0F 6F. C0 + pxor xmm11, xmm2 ; 0AAF _ 66 44: 0F EF. DA + pxor xmm11, xmm8 ; 0AB4 _ 66 45: 0F EF. D8 + pxor xmm2, xmm6 ; 0AB9 _ 66: 0F EF. D6 + pand xmm10, xmm11 ; 0ABD _ 66 45: 0F DB. D3 + pxor xmm2, xmm10 ; 0AC2 _ 66 41: 0F EF. D2 + movdqa xmm10, xmm11 ; 0AC7 _ 66 45: 0F 6F. D3 + pslld xmm0, 13 ; 0ACC _ 66: 0F 72. F0, 0D + psrld xmm8, 19 ; 0AD1 _ 66 41: 0F 72. D0, 13 + por xmm0, xmm8 ; 0AD7 _ 66 41: 0F EB. C0 + pslld xmm10, 3 ; 0ADC _ 66 41: 0F 72. F2, 03 + psrld xmm11, 29 ; 0AE2 _ 66 41: 0F 72. D3, 1D + por xmm10, xmm11 ; 0AE8 _ 66 45: 0F EB. D3 + movdqa xmm11, xmm0 ; 0AED _ 66 44: 0F 6F. D8 + pxor xmm1, xmm0 ; 0AF2 _ 66: 0F EF. C8 + pxor xmm1, xmm10 ; 0AF6 _ 66 41: 0F EF. CA + pxor xmm2, xmm10 ; 0AFB _ 66 41: 0F EF. D2 + pslld xmm11, 3 ; 0B00 _ 66 41: 0F 72. F3, 03 + pxor xmm2, xmm11 ; 0B06 _ 66 41: 0F EF. D3 + movdqa xmm11, xmm1 ; 0B0B _ 66 44: 0F 6F. D9 + movdqa xmm8, xmm2 ; 0B10 _ 66 44: 0F 6F. C2 + psrld xmm1, 31 ; 0B15 _ 66: 0F 72. D1, 1F + pslld xmm11, 1 ; 0B1A _ 66 41: 0F 72. F3, 01 + por xmm11, xmm1 ; 0B20 _ 66 44: 0F EB. D9 + pslld xmm8, 7 ; 0B25 _ 66 41: 0F 72. F0, 07 + psrld xmm2, 25 ; 0B2B _ 66: 0F 72. D2, 19 + por xmm8, xmm2 ; 0B30 _ 66 44: 0F EB. C2 + movdqa xmm2, xmm11 ; 0B35 _ 66 41: 0F 6F. D3 + pxor xmm0, xmm11 ; 0B3A _ 66 41: 0F EF. C3 + pxor xmm0, xmm8 ; 0B3F _ 66 41: 0F EF. C0 + movdqa xmm1, xmm0 ; 0B44 _ 66: 0F 6F. C8 + pxor xmm10, xmm8 ; 0B48 _ 66 45: 0F EF. D0 + pslld xmm2, 7 ; 0B4D _ 66: 0F 72. F2, 07 + pxor xmm10, xmm2 ; 0B52 _ 66 44: 0F EF. D2 + movdqa xmm2, xmm10 ; 0B57 _ 66 41: 0F 6F. D2 + pslld xmm1, 5 ; 0B5C _ 66: 0F 72. F1, 05 + psrld xmm0, 27 ; 0B61 _ 66: 0F 72. D0, 1B + por xmm1, xmm0 ; 0B66 _ 66: 0F EB. C8 + pslld xmm2, 22 ; 0B6A _ 66: 0F 72. F2, 16 + psrld xmm10, 10 ; 0B6F _ 66 41: 0F 72. D2, 0A + por xmm2, xmm10 ; 0B75 _ 66 41: 0F EB. D2 + movd xmm10, dword [r12+12F0H] ; 0B7A _ 66 45: 0F 6E. 94 24, 000012F0 + pshufd xmm10, xmm10, 0 ; 0B84 _ 66 45: 0F 70. D2, 00 + pxor xmm1, xmm10 ; 0B8A _ 66 41: 0F EF. CA + movd xmm10, dword [r12+12F4H] ; 0B8F _ 66 45: 0F 6E. 94 24, 000012F4 + pshufd xmm10, xmm10, 0 ; 0B99 _ 66 45: 0F 70. D2, 00 + pxor xmm11, xmm10 ; 0B9F _ 66 45: 0F EF. DA + movdqa xmm0, xmm11 ; 0BA4 _ 66 41: 0F 6F. C3 + movd xmm10, dword [r12+12F8H] ; 0BA9 _ 66 45: 0F 6E. 94 24, 000012F8 + pshufd xmm10, xmm10, 0 ; 0BB3 _ 66 45: 0F 70. D2, 00 + pxor xmm2, xmm10 ; 0BB9 _ 66 41: 0F EF. D2 + por xmm0, xmm2 ; 0BBE _ 66: 0F EB. C2 + movd xmm10, dword [r12+12FCH] ; 0BC2 _ 66 45: 0F 6E. 94 24, 000012FC + pshufd xmm10, xmm10, 0 ; 0BCC _ 66 45: 0F 70. D2, 00 + pxor xmm8, xmm10 ; 0BD2 _ 66 45: 0F EF. C2 + pxor xmm0, xmm8 ; 0BD7 _ 66 41: 0F EF. C0 + pxor xmm11, xmm2 ; 0BDC _ 66 44: 0F EF. DA + pxor xmm2, xmm0 ; 0BE1 _ 66: 0F EF. D0 + por xmm8, xmm11 ; 0BE5 _ 66 45: 0F EB. C3 + pand xmm8, xmm1 ; 0BEA _ 66 44: 0F DB. C1 + pxor xmm11, xmm2 ; 0BEF _ 66 44: 0F EF. DA + pxor xmm8, xmm0 ; 0BF4 _ 66 44: 0F EF. C0 + por xmm0, xmm11 ; 0BF9 _ 66 41: 0F EB. C3 + pxor xmm0, xmm1 ; 0BFE _ 66: 0F EF. C1 + por xmm1, xmm11 ; 0C02 _ 66 41: 0F EB. CB + pxor xmm1, xmm2 ; 0C07 _ 66: 0F EF. CA + pxor xmm0, xmm11 ; 0C0B _ 66 41: 0F EF. C3 + pxor xmm2, xmm0 ; 0C10 _ 66: 0F EF. D0 + pand xmm0, xmm1 ; 0C14 _ 66: 0F DB. C1 + pxor xmm0, xmm11 ; 0C18 _ 66 41: 0F EF. C3 + pxor xmm2, xmm6 ; 0C1D _ 66: 0F EF. D6 + por xmm2, xmm1 ; 0C21 _ 66: 0F EB. D1 + pxor xmm11, xmm2 ; 0C25 _ 66 44: 0F EF. DA + movdqa xmm2, xmm11 ; 0C2A _ 66 41: 0F 6F. D3 + psrld xmm11, 19 ; 0C2F _ 66 41: 0F 72. D3, 13 + pslld xmm2, 13 ; 0C35 _ 66: 0F 72. F2, 0D + por xmm2, xmm11 ; 0C3A _ 66 41: 0F EB. D3 + movdqa xmm11, xmm0 ; 0C3F _ 66 44: 0F 6F. D8 + movdqa xmm10, xmm2 ; 0C44 _ 66 44: 0F 6F. D2 + psrld xmm0, 29 ; 0C49 _ 66: 0F 72. D0, 1D + pslld xmm11, 3 ; 0C4E _ 66 41: 0F 72. F3, 03 + por xmm11, xmm0 ; 0C54 _ 66 44: 0F EB. D8 + pxor xmm8, xmm2 ; 0C59 _ 66 44: 0F EF. C2 + pxor xmm8, xmm11 ; 0C5E _ 66 45: 0F EF. C3 + movdqa xmm0, xmm8 ; 0C63 _ 66 41: 0F 6F. C0 + pxor xmm1, xmm11 ; 0C68 _ 66 41: 0F EF. CB + pslld xmm10, 3 ; 0C6D _ 66 41: 0F 72. F2, 03 + pxor xmm1, xmm10 ; 0C73 _ 66 41: 0F EF. CA + movdqa xmm10, xmm1 ; 0C78 _ 66 44: 0F 6F. D1 + pslld xmm0, 1 ; 0C7D _ 66: 0F 72. F0, 01 + psrld xmm8, 31 ; 0C82 _ 66 41: 0F 72. D0, 1F + por xmm0, xmm8 ; 0C88 _ 66 41: 0F EB. C0 + movdqa xmm8, xmm0 ; 0C8D _ 66 44: 0F 6F. C0 + pslld xmm10, 7 ; 0C92 _ 66 41: 0F 72. F2, 07 + psrld xmm1, 25 ; 0C98 _ 66: 0F 72. D1, 19 + por xmm10, xmm1 ; 0C9D _ 66 44: 0F EB. D1 + pxor xmm2, xmm0 ; 0CA2 _ 66: 0F EF. D0 + pxor xmm2, xmm10 ; 0CA6 _ 66 41: 0F EF. D2 + pxor xmm11, xmm10 ; 0CAB _ 66 45: 0F EF. DA + pslld xmm8, 7 ; 0CB0 _ 66 41: 0F 72. F0, 07 + pxor xmm11, xmm8 ; 0CB6 _ 66 45: 0F EF. D8 + movdqa xmm8, xmm2 ; 0CBB _ 66 44: 0F 6F. C2 + movdqa xmm1, xmm11 ; 0CC0 _ 66 41: 0F 6F. CB + psrld xmm2, 27 ; 0CC5 _ 66: 0F 72. D2, 1B + pslld xmm8, 5 ; 0CCA _ 66 41: 0F 72. F0, 05 + por xmm8, xmm2 ; 0CD0 _ 66 44: 0F EB. C2 + pslld xmm1, 22 ; 0CD5 _ 66: 0F 72. F1, 16 + psrld xmm11, 10 ; 0CDA _ 66 41: 0F 72. D3, 0A + por xmm1, xmm11 ; 0CE0 _ 66 41: 0F EB. CB + movd xmm11, dword [r12+1300H] ; 0CE5 _ 66 45: 0F 6E. 9C 24, 00001300 + pshufd xmm2, xmm11, 0 ; 0CEF _ 66 41: 0F 70. D3, 00 + pxor xmm8, xmm2 ; 0CF5 _ 66 44: 0F EF. C2 + movd xmm11, dword [r12+1304H] ; 0CFA _ 66 45: 0F 6E. 9C 24, 00001304 + pshufd xmm2, xmm11, 0 ; 0D04 _ 66 41: 0F 70. D3, 00 + pxor xmm0, xmm2 ; 0D0A _ 66: 0F EF. C2 + movd xmm11, dword [r12+1308H] ; 0D0E _ 66 45: 0F 6E. 9C 24, 00001308 + pshufd xmm2, xmm11, 0 ; 0D18 _ 66 41: 0F 70. D3, 00 + pxor xmm1, xmm2 ; 0D1E _ 66: 0F EF. CA + movd xmm11, dword [r12+130CH] ; 0D22 _ 66 45: 0F 6E. 9C 24, 0000130C + pshufd xmm2, xmm11, 0 ; 0D2C _ 66 41: 0F 70. D3, 00 + pxor xmm10, xmm2 ; 0D32 _ 66 44: 0F EF. D2 + movdqa xmm2, xmm0 ; 0D37 _ 66: 0F 6F. D0 + pxor xmm10, xmm8 ; 0D3B _ 66 45: 0F EF. D0 + pxor xmm0, xmm1 ; 0D40 _ 66: 0F EF. C1 + pand xmm2, xmm10 ; 0D44 _ 66 41: 0F DB. D2 + pxor xmm2, xmm8 ; 0D49 _ 66 41: 0F EF. D0 + por xmm8, xmm10 ; 0D4E _ 66 45: 0F EB. C2 + pxor xmm8, xmm0 ; 0D53 _ 66 44: 0F EF. C0 + pxor xmm0, xmm10 ; 0D58 _ 66 41: 0F EF. C2 + pxor xmm10, xmm1 ; 0D5D _ 66 44: 0F EF. D1 + por xmm1, xmm2 ; 0D62 _ 66: 0F EB. CA + pxor xmm1, xmm0 ; 0D66 _ 66: 0F EF. C8 + pxor xmm0, xmm6 ; 0D6A _ 66: 0F EF. C6 + por xmm0, xmm2 ; 0D6E _ 66: 0F EB. C2 + pxor xmm2, xmm10 ; 0D72 _ 66 41: 0F EF. D2 + pxor xmm2, xmm0 ; 0D77 _ 66: 0F EF. D0 + por xmm10, xmm8 ; 0D7B _ 66 45: 0F EB. D0 + pxor xmm2, xmm10 ; 0D80 _ 66 41: 0F EF. D2 + movdqa xmm11, xmm2 ; 0D85 _ 66 44: 0F 6F. DA + pxor xmm0, xmm10 ; 0D8A _ 66 41: 0F EF. C2 + psrld xmm2, 19 ; 0D8F _ 66: 0F 72. D2, 13 + pslld xmm11, 13 ; 0D94 _ 66 41: 0F 72. F3, 0D + por xmm11, xmm2 ; 0D9A _ 66 44: 0F EB. DA + movdqa xmm2, xmm1 ; 0D9F _ 66: 0F 6F. D1 + movdqa xmm10, xmm11 ; 0DA3 _ 66 45: 0F 6F. D3 + psrld xmm1, 29 ; 0DA8 _ 66: 0F 72. D1, 1D + pslld xmm2, 3 ; 0DAD _ 66: 0F 72. F2, 03 + por xmm2, xmm1 ; 0DB2 _ 66: 0F EB. D1 + pxor xmm0, xmm11 ; 0DB6 _ 66 41: 0F EF. C3 + pxor xmm0, xmm2 ; 0DBB _ 66: 0F EF. C2 + movdqa xmm1, xmm0 ; 0DBF _ 66: 0F 6F. C8 + pxor xmm8, xmm2 ; 0DC3 _ 66 44: 0F EF. C2 + pslld xmm10, 3 ; 0DC8 _ 66 41: 0F 72. F2, 03 + pxor xmm8, xmm10 ; 0DCE _ 66 45: 0F EF. C2 + movdqa xmm10, xmm8 ; 0DD3 _ 66 45: 0F 6F. D0 + pslld xmm1, 1 ; 0DD8 _ 66: 0F 72. F1, 01 + psrld xmm0, 31 ; 0DDD _ 66: 0F 72. D0, 1F + por xmm1, xmm0 ; 0DE2 _ 66: 0F EB. C8 + pslld xmm10, 7 ; 0DE6 _ 66 41: 0F 72. F2, 07 + psrld xmm8, 25 ; 0DEC _ 66 41: 0F 72. D0, 19 + por xmm10, xmm8 ; 0DF2 _ 66 45: 0F EB. D0 + movdqa xmm8, xmm1 ; 0DF7 _ 66 44: 0F 6F. C1 + pxor xmm11, xmm1 ; 0DFC _ 66 44: 0F EF. D9 + pxor xmm11, xmm10 ; 0E01 _ 66 45: 0F EF. DA + movdqa xmm0, xmm11 ; 0E06 _ 66 41: 0F 6F. C3 + pxor xmm2, xmm10 ; 0E0B _ 66 41: 0F EF. D2 + pslld xmm8, 7 ; 0E10 _ 66 41: 0F 72. F0, 07 + pxor xmm2, xmm8 ; 0E16 _ 66 41: 0F EF. D0 + movdqa xmm8, xmm2 ; 0E1B _ 66 44: 0F 6F. C2 + pslld xmm0, 5 ; 0E20 _ 66: 0F 72. F0, 05 + psrld xmm11, 27 ; 0E25 _ 66 41: 0F 72. D3, 1B + por xmm0, xmm11 ; 0E2B _ 66 41: 0F EB. C3 + pslld xmm8, 22 ; 0E30 _ 66 41: 0F 72. F0, 16 + psrld xmm2, 10 ; 0E36 _ 66: 0F 72. D2, 0A + por xmm8, xmm2 ; 0E3B _ 66 44: 0F EB. C2 + movd xmm11, dword [r12+1310H] ; 0E40 _ 66 45: 0F 6E. 9C 24, 00001310 + pshufd xmm2, xmm11, 0 ; 0E4A _ 66 41: 0F 70. D3, 00 + pxor xmm0, xmm2 ; 0E50 _ 66: 0F EF. C2 + movd xmm11, dword [r12+1314H] ; 0E54 _ 66 45: 0F 6E. 9C 24, 00001314 + pshufd xmm2, xmm11, 0 ; 0E5E _ 66 41: 0F 70. D3, 00 + pxor xmm1, xmm2 ; 0E64 _ 66: 0F EF. CA + pxor xmm0, xmm6 ; 0E68 _ 66: 0F EF. C6 + movd xmm11, dword [r12+1318H] ; 0E6C _ 66 45: 0F 6E. 9C 24, 00001318 + pshufd xmm2, xmm11, 0 ; 0E76 _ 66 41: 0F 70. D3, 00 + pxor xmm8, xmm2 ; 0E7C _ 66 44: 0F EF. C2 + pxor xmm8, xmm6 ; 0E81 _ 66 44: 0F EF. C6 + movd xmm11, dword [r12+131CH] ; 0E86 _ 66 45: 0F 6E. 9C 24, 0000131C + pshufd xmm2, xmm11, 0 ; 0E90 _ 66 41: 0F 70. D3, 00 + movdqa xmm11, xmm0 ; 0E96 _ 66 44: 0F 6F. D8 + pxor xmm10, xmm2 ; 0E9B _ 66 44: 0F EF. D2 + pand xmm11, xmm1 ; 0EA0 _ 66 44: 0F DB. D9 + pxor xmm8, xmm11 ; 0EA5 _ 66 45: 0F EF. C3 + por xmm11, xmm10 ; 0EAA _ 66 45: 0F EB. DA + pxor xmm10, xmm8 ; 0EAF _ 66 45: 0F EF. D0 + pxor xmm1, xmm11 ; 0EB4 _ 66 41: 0F EF. CB + pxor xmm11, xmm0 ; 0EB9 _ 66 44: 0F EF. D8 + por xmm0, xmm1 ; 0EBE _ 66: 0F EB. C1 + pxor xmm1, xmm10 ; 0EC2 _ 66 41: 0F EF. CA + por xmm8, xmm11 ; 0EC7 _ 66 45: 0F EB. C3 + pand xmm8, xmm0 ; 0ECC _ 66 44: 0F DB. C0 + pxor xmm11, xmm1 ; 0ED1 _ 66 44: 0F EF. D9 + pand xmm1, xmm8 ; 0ED6 _ 66 41: 0F DB. C8 + pxor xmm1, xmm11 ; 0EDB _ 66 41: 0F EF. CB + pand xmm11, xmm8 ; 0EE0 _ 66 45: 0F DB. D8 + pxor xmm0, xmm11 ; 0EE5 _ 66 41: 0F EF. C3 + movdqa xmm11, xmm8 ; 0EEA _ 66 45: 0F 6F. D8 + psrld xmm8, 19 ; 0EEF _ 66 41: 0F 72. D0, 13 + pslld xmm11, 13 ; 0EF5 _ 66 41: 0F 72. F3, 0D + por xmm11, xmm8 ; 0EFB _ 66 45: 0F EB. D8 + movdqa xmm8, xmm10 ; 0F00 _ 66 45: 0F 6F. C2 + psrld xmm10, 29 ; 0F05 _ 66 41: 0F 72. D2, 1D + pxor xmm0, xmm11 ; 0F0B _ 66 41: 0F EF. C3 + pslld xmm8, 3 ; 0F10 _ 66 41: 0F 72. F0, 03 + por xmm8, xmm10 ; 0F16 _ 66 45: 0F EB. C2 + movdqa xmm10, xmm11 ; 0F1B _ 66 45: 0F 6F. D3 + pxor xmm0, xmm8 ; 0F20 _ 66 41: 0F EF. C0 + pxor xmm1, xmm8 ; 0F25 _ 66 41: 0F EF. C8 + pslld xmm10, 3 ; 0F2A _ 66 41: 0F 72. F2, 03 + pxor xmm1, xmm10 ; 0F30 _ 66 41: 0F EF. CA + movdqa xmm10, xmm0 ; 0F35 _ 66 44: 0F 6F. D0 + movdqa xmm2, xmm1 ; 0F3A _ 66: 0F 6F. D1 + psrld xmm0, 31 ; 0F3E _ 66: 0F 72. D0, 1F + pslld xmm10, 1 ; 0F43 _ 66 41: 0F 72. F2, 01 + por xmm10, xmm0 ; 0F49 _ 66 44: 0F EB. D0 + movdqa xmm0, xmm10 ; 0F4E _ 66 41: 0F 6F. C2 + pslld xmm2, 7 ; 0F53 _ 66: 0F 72. F2, 07 + psrld xmm1, 25 ; 0F58 _ 66: 0F 72. D1, 19 + por xmm2, xmm1 ; 0F5D _ 66: 0F EB. D1 + pxor xmm11, xmm10 ; 0F61 _ 66 45: 0F EF. DA + pxor xmm11, xmm2 ; 0F66 _ 66 44: 0F EF. DA + pxor xmm8, xmm2 ; 0F6B _ 66 44: 0F EF. C2 + pslld xmm0, 7 ; 0F70 _ 66: 0F 72. F0, 07 + pxor xmm8, xmm0 ; 0F75 _ 66 44: 0F EF. C0 + movdqa xmm0, xmm11 ; 0F7A _ 66 41: 0F 6F. C3 + movdqa xmm1, xmm8 ; 0F7F _ 66 41: 0F 6F. C8 + psrld xmm11, 27 ; 0F84 _ 66 41: 0F 72. D3, 1B + pslld xmm0, 5 ; 0F8A _ 66: 0F 72. F0, 05 + por xmm0, xmm11 ; 0F8F _ 66 41: 0F EB. C3 + pslld xmm1, 22 ; 0F94 _ 66: 0F 72. F1, 16 + psrld xmm8, 10 ; 0F99 _ 66 41: 0F 72. D0, 0A + por xmm1, xmm8 ; 0F9F _ 66 41: 0F EB. C8 + movd xmm8, dword [r12+1320H] ; 0FA4 _ 66 45: 0F 6E. 84 24, 00001320 + movd xmm11, dword [r12+1324H] ; 0FAE _ 66 45: 0F 6E. 9C 24, 00001324 + pshufd xmm8, xmm8, 0 ; 0FB8 _ 66 45: 0F 70. C0, 00 + pxor xmm0, xmm8 ; 0FBE _ 66 41: 0F EF. C0 + pshufd xmm8, xmm11, 0 ; 0FC3 _ 66 45: 0F 70. C3, 00 + pxor xmm10, xmm8 ; 0FC9 _ 66 45: 0F EF. D0 + movd xmm11, dword [r12+1328H] ; 0FCE _ 66 45: 0F 6E. 9C 24, 00001328 + pshufd xmm8, xmm11, 0 ; 0FD8 _ 66 45: 0F 70. C3, 00 + pxor xmm1, xmm8 ; 0FDE _ 66 41: 0F EF. C8 + movd xmm11, dword [r12+132CH] ; 0FE3 _ 66 45: 0F 6E. 9C 24, 0000132C + pshufd xmm8, xmm11, 0 ; 0FED _ 66 45: 0F 70. C3, 00 + movdqa xmm11, xmm0 ; 0FF3 _ 66 44: 0F 6F. D8 + pxor xmm2, xmm8 ; 0FF8 _ 66 41: 0F EF. D0 + pand xmm11, xmm1 ; 0FFD _ 66 44: 0F DB. D9 + pxor xmm11, xmm2 ; 1002 _ 66 44: 0F EF. DA + pxor xmm1, xmm10 ; 1007 _ 66 41: 0F EF. CA + pxor xmm1, xmm11 ; 100C _ 66 41: 0F EF. CB + por xmm2, xmm0 ; 1011 _ 66: 0F EB. D0 + pxor xmm2, xmm10 ; 1015 _ 66 41: 0F EF. D2 + movdqa xmm8, xmm2 ; 101A _ 66 44: 0F 6F. C2 + pxor xmm0, xmm1 ; 101F _ 66: 0F EF. C1 + por xmm8, xmm0 ; 1023 _ 66 44: 0F EB. C0 + pxor xmm8, xmm11 ; 1028 _ 66 45: 0F EF. C3 + pand xmm11, xmm2 ; 102D _ 66 44: 0F DB. DA + pxor xmm0, xmm11 ; 1032 _ 66 41: 0F EF. C3 + movdqa xmm11, xmm1 ; 1037 _ 66 44: 0F 6F. D9 + pxor xmm2, xmm8 ; 103C _ 66 41: 0F EF. D0 + pxor xmm2, xmm0 ; 1041 _ 66: 0F EF. D0 + movdqa xmm10, xmm2 ; 1045 _ 66 44: 0F 6F. D2 + pxor xmm0, xmm6 ; 104A _ 66: 0F EF. C6 + pslld xmm11, 13 ; 104E _ 66 41: 0F 72. F3, 0D + psrld xmm1, 19 ; 1054 _ 66: 0F 72. D1, 13 + por xmm11, xmm1 ; 1059 _ 66 44: 0F EB. D9 + pslld xmm10, 3 ; 105E _ 66 41: 0F 72. F2, 03 + psrld xmm2, 29 ; 1064 _ 66: 0F 72. D2, 1D + por xmm10, xmm2 ; 1069 _ 66 44: 0F EB. D2 + movdqa xmm2, xmm11 ; 106E _ 66 41: 0F 6F. D3 + pxor xmm8, xmm11 ; 1073 _ 66 45: 0F EF. C3 + pxor xmm8, xmm10 ; 1078 _ 66 45: 0F EF. C2 + movdqa xmm1, xmm8 ; 107D _ 66 41: 0F 6F. C8 + pxor xmm0, xmm10 ; 1082 _ 66 41: 0F EF. C2 + pslld xmm2, 3 ; 1087 _ 66: 0F 72. F2, 03 + pxor xmm0, xmm2 ; 108C _ 66: 0F EF. C2 + pslld xmm1, 1 ; 1090 _ 66: 0F 72. F1, 01 + psrld xmm8, 31 ; 1095 _ 66 41: 0F 72. D0, 1F + por xmm1, xmm8 ; 109B _ 66 41: 0F EB. C8 + movdqa xmm8, xmm0 ; 10A0 _ 66 44: 0F 6F. C0 + movdqa xmm2, xmm1 ; 10A5 _ 66: 0F 6F. D1 + psrld xmm0, 25 ; 10A9 _ 66: 0F 72. D0, 19 + pslld xmm8, 7 ; 10AE _ 66 41: 0F 72. F0, 07 + por xmm8, xmm0 ; 10B4 _ 66 44: 0F EB. C0 + pxor xmm11, xmm1 ; 10B9 _ 66 44: 0F EF. D9 + pxor xmm11, xmm8 ; 10BE _ 66 45: 0F EF. D8 + movdqa xmm0, xmm11 ; 10C3 _ 66 41: 0F 6F. C3 + pxor xmm10, xmm8 ; 10C8 _ 66 45: 0F EF. D0 + pslld xmm2, 7 ; 10CD _ 66: 0F 72. F2, 07 + pxor xmm10, xmm2 ; 10D2 _ 66 44: 0F EF. D2 + movdqa xmm2, xmm10 ; 10D7 _ 66 41: 0F 6F. D2 + pslld xmm0, 5 ; 10DC _ 66: 0F 72. F0, 05 + psrld xmm11, 27 ; 10E1 _ 66 41: 0F 72. D3, 1B + por xmm0, xmm11 ; 10E7 _ 66 41: 0F EB. C3 + pslld xmm2, 22 ; 10EC _ 66: 0F 72. F2, 16 + psrld xmm10, 10 ; 10F1 _ 66 41: 0F 72. D2, 0A + por xmm2, xmm10 ; 10F7 _ 66 41: 0F EB. D2 + movd xmm10, dword [r12+1330H] ; 10FC _ 66 45: 0F 6E. 94 24, 00001330 + movd xmm11, dword [r12+1334H] ; 1106 _ 66 45: 0F 6E. 9C 24, 00001334 + pshufd xmm10, xmm10, 0 ; 1110 _ 66 45: 0F 70. D2, 00 + pxor xmm0, xmm10 ; 1116 _ 66 41: 0F EF. C2 + pshufd xmm10, xmm11, 0 ; 111B _ 66 45: 0F 70. D3, 00 + pxor xmm1, xmm10 ; 1121 _ 66 41: 0F EF. CA + movd xmm11, dword [r12+1338H] ; 1126 _ 66 45: 0F 6E. 9C 24, 00001338 + pshufd xmm10, xmm11, 0 ; 1130 _ 66 45: 0F 70. D3, 00 + pxor xmm2, xmm10 ; 1136 _ 66 41: 0F EF. D2 + movd xmm11, dword [r12+133CH] ; 113B _ 66 45: 0F 6E. 9C 24, 0000133C + pshufd xmm10, xmm11, 0 ; 1145 _ 66 45: 0F 70. D3, 00 + movdqa xmm11, xmm0 ; 114B _ 66 44: 0F 6F. D8 + pxor xmm8, xmm10 ; 1150 _ 66 45: 0F EF. C2 + por xmm11, xmm8 ; 1155 _ 66 45: 0F EB. D8 + pxor xmm8, xmm1 ; 115A _ 66 44: 0F EF. C1 + pand xmm1, xmm0 ; 115F _ 66: 0F DB. C8 + pxor xmm0, xmm2 ; 1163 _ 66: 0F EF. C2 + pxor xmm2, xmm8 ; 1167 _ 66 41: 0F EF. D0 + pand xmm8, xmm11 ; 116C _ 66 45: 0F DB. C3 + por xmm0, xmm1 ; 1171 _ 66: 0F EB. C1 + pxor xmm8, xmm0 ; 1175 _ 66 44: 0F EF. C0 + pxor xmm11, xmm1 ; 117A _ 66 44: 0F EF. D9 + pand xmm0, xmm11 ; 117F _ 66 41: 0F DB. C3 + pxor xmm1, xmm8 ; 1184 _ 66 41: 0F EF. C8 + pxor xmm0, xmm2 ; 1189 _ 66: 0F EF. C2 + por xmm1, xmm11 ; 118D _ 66 41: 0F EB. CB + pxor xmm1, xmm2 ; 1192 _ 66: 0F EF. CA + movdqa xmm2, xmm1 ; 1196 _ 66: 0F 6F. D1 + pxor xmm11, xmm8 ; 119A _ 66 45: 0F EF. D8 + por xmm2, xmm8 ; 119F _ 66 41: 0F EB. D0 + pxor xmm11, xmm2 ; 11A4 _ 66 44: 0F EF. DA + movdqa xmm10, xmm11 ; 11A9 _ 66 45: 0F 6F. D3 + movdqa xmm2, xmm8 ; 11AE _ 66 41: 0F 6F. D0 + psrld xmm11, 19 ; 11B3 _ 66 41: 0F 72. D3, 13 + pslld xmm10, 13 ; 11B9 _ 66 41: 0F 72. F2, 0D + por xmm10, xmm11 ; 11BF _ 66 45: 0F EB. D3 + pslld xmm2, 3 ; 11C4 _ 66: 0F 72. F2, 03 + psrld xmm8, 29 ; 11C9 _ 66 41: 0F 72. D0, 1D + por xmm2, xmm8 ; 11CF _ 66 41: 0F EB. D0 + movdqa xmm8, xmm10 ; 11D4 _ 66 45: 0F 6F. C2 + pxor xmm1, xmm10 ; 11D9 _ 66 41: 0F EF. CA + pxor xmm1, xmm2 ; 11DE _ 66: 0F EF. CA + pxor xmm0, xmm2 ; 11E2 _ 66: 0F EF. C2 + pslld xmm8, 3 ; 11E6 _ 66 41: 0F 72. F0, 03 + pxor xmm0, xmm8 ; 11EC _ 66 41: 0F EF. C0 + movdqa xmm8, xmm1 ; 11F1 _ 66 44: 0F 6F. C1 + psrld xmm1, 31 ; 11F6 _ 66: 0F 72. D1, 1F + pslld xmm8, 1 ; 11FB _ 66 41: 0F 72. F0, 01 + por xmm8, xmm1 ; 1201 _ 66 44: 0F EB. C1 + movdqa xmm1, xmm0 ; 1206 _ 66: 0F 6F. C8 + movdqa xmm11, xmm8 ; 120A _ 66 45: 0F 6F. D8 + psrld xmm0, 25 ; 120F _ 66: 0F 72. D0, 19 + pslld xmm1, 7 ; 1214 _ 66: 0F 72. F1, 07 + por xmm1, xmm0 ; 1219 _ 66: 0F EB. C8 + pxor xmm10, xmm8 ; 121D _ 66 45: 0F EF. D0 + pxor xmm10, xmm1 ; 1222 _ 66 44: 0F EF. D1 + movdqa xmm0, xmm10 ; 1227 _ 66 41: 0F 6F. C2 + pxor xmm2, xmm1 ; 122C _ 66: 0F EF. D1 + pslld xmm11, 7 ; 1230 _ 66 41: 0F 72. F3, 07 + pxor xmm2, xmm11 ; 1236 _ 66 41: 0F EF. D3 + pslld xmm0, 5 ; 123B _ 66: 0F 72. F0, 05 + psrld xmm10, 27 ; 1240 _ 66 41: 0F 72. D2, 1B + por xmm0, xmm10 ; 1246 _ 66 41: 0F EB. C2 + movdqa xmm10, xmm2 ; 124B _ 66 44: 0F 6F. D2 + movd xmm11, dword [r12+1344H] ; 1250 _ 66 45: 0F 6E. 9C 24, 00001344 + psrld xmm2, 10 ; 125A _ 66: 0F 72. D2, 0A + pslld xmm10, 22 ; 125F _ 66 41: 0F 72. F2, 16 + por xmm10, xmm2 ; 1265 _ 66 44: 0F EB. D2 + movd xmm2, dword [r12+1340H] ; 126A _ 66 41: 0F 6E. 94 24, 00001340 + pshufd xmm2, xmm2, 0 ; 1274 _ 66: 0F 70. D2, 00 + pxor xmm0, xmm2 ; 1279 _ 66: 0F EF. C2 + pshufd xmm2, xmm11, 0 ; 127D _ 66 41: 0F 70. D3, 00 + movd xmm11, dword [r12+1348H] ; 1283 _ 66 45: 0F 6E. 9C 24, 00001348 + pxor xmm8, xmm2 ; 128D _ 66 44: 0F EF. C2 + pshufd xmm2, xmm11, 0 ; 1292 _ 66 41: 0F 70. D3, 00 + pxor xmm10, xmm2 ; 1298 _ 66 44: 0F EF. D2 + movd xmm11, dword [r12+134CH] ; 129D _ 66 45: 0F 6E. 9C 24, 0000134C + pshufd xmm2, xmm11, 0 ; 12A7 _ 66 41: 0F 70. D3, 00 + pxor xmm1, xmm2 ; 12AD _ 66: 0F EF. CA + pxor xmm8, xmm1 ; 12B1 _ 66 44: 0F EF. C1 + movdqa xmm11, xmm8 ; 12B6 _ 66 45: 0F 6F. D8 + pxor xmm1, xmm6 ; 12BB _ 66: 0F EF. CE + pxor xmm10, xmm1 ; 12BF _ 66 44: 0F EF. D1 + pxor xmm1, xmm0 ; 12C4 _ 66: 0F EF. C8 + pand xmm11, xmm1 ; 12C8 _ 66 44: 0F DB. D9 + pxor xmm11, xmm10 ; 12CD _ 66 45: 0F EF. DA + pxor xmm8, xmm1 ; 12D2 _ 66 44: 0F EF. C1 + pxor xmm0, xmm8 ; 12D7 _ 66 41: 0F EF. C0 + movdqa xmm2, xmm11 ; 12DC _ 66 41: 0F 6F. D3 + pand xmm10, xmm8 ; 12E1 _ 66 45: 0F DB. D0 + pxor xmm10, xmm0 ; 12E6 _ 66 44: 0F EF. D0 + pand xmm0, xmm11 ; 12EB _ 66 41: 0F DB. C3 + pxor xmm1, xmm0 ; 12F0 _ 66: 0F EF. C8 + por xmm8, xmm11 ; 12F4 _ 66 45: 0F EB. C3 + pxor xmm8, xmm0 ; 12F9 _ 66 44: 0F EF. C0 + por xmm0, xmm1 ; 12FE _ 66: 0F EB. C1 + pxor xmm0, xmm10 ; 1302 _ 66 41: 0F EF. C2 + pand xmm10, xmm1 ; 1307 _ 66 44: 0F DB. D1 + pxor xmm0, xmm6 ; 130C _ 66: 0F EF. C6 + pxor xmm8, xmm10 ; 1310 _ 66 45: 0F EF. C2 + pslld xmm2, 13 ; 1315 _ 66: 0F 72. F2, 0D + psrld xmm11, 19 ; 131A _ 66 41: 0F 72. D3, 13 + por xmm2, xmm11 ; 1320 _ 66 41: 0F EB. D3 + movdqa xmm11, xmm0 ; 1325 _ 66 44: 0F 6F. D8 + movdqa xmm10, xmm2 ; 132A _ 66 44: 0F 6F. D2 + psrld xmm0, 29 ; 132F _ 66: 0F 72. D0, 1D + pslld xmm11, 3 ; 1334 _ 66 41: 0F 72. F3, 03 + por xmm11, xmm0 ; 133A _ 66 44: 0F EB. D8 + pxor xmm8, xmm2 ; 133F _ 66 44: 0F EF. C2 + pxor xmm8, xmm11 ; 1344 _ 66 45: 0F EF. C3 + movdqa xmm0, xmm8 ; 1349 _ 66 41: 0F 6F. C0 + pxor xmm1, xmm11 ; 134E _ 66 41: 0F EF. CB + pslld xmm10, 3 ; 1353 _ 66 41: 0F 72. F2, 03 + pxor xmm1, xmm10 ; 1359 _ 66 41: 0F EF. CA + movdqa xmm10, xmm1 ; 135E _ 66 44: 0F 6F. D1 + pslld xmm0, 1 ; 1363 _ 66: 0F 72. F0, 01 + psrld xmm8, 31 ; 1368 _ 66 41: 0F 72. D0, 1F + por xmm0, xmm8 ; 136E _ 66 41: 0F EB. C0 + movdqa xmm8, xmm0 ; 1373 _ 66 44: 0F 6F. C0 + pslld xmm10, 7 ; 1378 _ 66 41: 0F 72. F2, 07 + psrld xmm1, 25 ; 137E _ 66: 0F 72. D1, 19 + por xmm10, xmm1 ; 1383 _ 66 44: 0F EB. D1 + pxor xmm2, xmm0 ; 1388 _ 66: 0F EF. D0 + pxor xmm2, xmm10 ; 138C _ 66 41: 0F EF. D2 + pxor xmm11, xmm10 ; 1391 _ 66 45: 0F EF. DA + pslld xmm8, 7 ; 1396 _ 66 41: 0F 72. F0, 07 + pxor xmm11, xmm8 ; 139C _ 66 45: 0F EF. D8 + movdqa xmm8, xmm2 ; 13A1 _ 66 44: 0F 6F. C2 + movdqa xmm1, xmm11 ; 13A6 _ 66 41: 0F 6F. CB + psrld xmm2, 27 ; 13AB _ 66: 0F 72. D2, 1B + pslld xmm8, 5 ; 13B0 _ 66 41: 0F 72. F0, 05 + por xmm8, xmm2 ; 13B6 _ 66 44: 0F EB. C2 + pslld xmm1, 22 ; 13BB _ 66: 0F 72. F1, 16 + psrld xmm11, 10 ; 13C0 _ 66 41: 0F 72. D3, 0A + por xmm1, xmm11 ; 13C6 _ 66 41: 0F EB. CB + movd xmm11, dword [r12+1350H] ; 13CB _ 66 45: 0F 6E. 9C 24, 00001350 + movd xmm2, dword [r12+1354H] ; 13D5 _ 66 41: 0F 6E. 94 24, 00001354 + pshufd xmm11, xmm11, 0 ; 13DF _ 66 45: 0F 70. DB, 00 + pxor xmm8, xmm11 ; 13E5 _ 66 45: 0F EF. C3 + pshufd xmm11, xmm2, 0 ; 13EA _ 66 44: 0F 70. DA, 00 + pxor xmm0, xmm11 ; 13F0 _ 66 41: 0F EF. C3 + pxor xmm8, xmm0 ; 13F5 _ 66 44: 0F EF. C0 + movd xmm2, dword [r12+1358H] ; 13FA _ 66 41: 0F 6E. 94 24, 00001358 + pshufd xmm11, xmm2, 0 ; 1404 _ 66 44: 0F 70. DA, 00 + movd xmm2, dword [r12+135CH] ; 140A _ 66 41: 0F 6E. 94 24, 0000135C + pxor xmm1, xmm11 ; 1414 _ 66 41: 0F EF. CB + pshufd xmm11, xmm2, 0 ; 1419 _ 66 44: 0F 70. DA, 00 + pxor xmm10, xmm11 ; 141F _ 66 45: 0F EF. D3 + pxor xmm0, xmm10 ; 1424 _ 66 41: 0F EF. C2 + movdqa xmm11, xmm0 ; 1429 _ 66 44: 0F 6F. D8 + pxor xmm10, xmm6 ; 142E _ 66 44: 0F EF. D6 + pxor xmm1, xmm10 ; 1433 _ 66 41: 0F EF. CA + pand xmm11, xmm8 ; 1438 _ 66 45: 0F DB. D8 + pxor xmm11, xmm1 ; 143D _ 66 44: 0F EF. D9 + movdqa xmm2, xmm11 ; 1442 _ 66 41: 0F 6F. D3 + por xmm1, xmm0 ; 1447 _ 66: 0F EB. C8 + pxor xmm0, xmm10 ; 144B _ 66 41: 0F EF. C2 + pand xmm10, xmm11 ; 1450 _ 66 45: 0F DB. D3 + pxor xmm10, xmm8 ; 1455 _ 66 45: 0F EF. D0 + pxor xmm0, xmm11 ; 145A _ 66 41: 0F EF. C3 + pxor xmm0, xmm1 ; 145F _ 66: 0F EF. C1 + pxor xmm1, xmm8 ; 1463 _ 66 41: 0F EF. C8 + pand xmm8, xmm10 ; 1468 _ 66 45: 0F DB. C2 + pxor xmm1, xmm6 ; 146D _ 66: 0F EF. CE + pxor xmm8, xmm0 ; 1471 _ 66 44: 0F EF. C0 + por xmm0, xmm10 ; 1476 _ 66 41: 0F EB. C2 + pxor xmm0, xmm1 ; 147B _ 66: 0F EF. C1 + movdqa xmm1, xmm8 ; 147F _ 66 41: 0F 6F. C8 + pslld xmm2, 13 ; 1484 _ 66: 0F 72. F2, 0D + psrld xmm11, 19 ; 1489 _ 66 41: 0F 72. D3, 13 + por xmm2, xmm11 ; 148F _ 66 41: 0F EB. D3 + pslld xmm1, 3 ; 1494 _ 66: 0F 72. F1, 03 + psrld xmm8, 29 ; 1499 _ 66 41: 0F 72. D0, 1D + por xmm1, xmm8 ; 149F _ 66 41: 0F EB. C8 + movdqa xmm8, xmm2 ; 14A4 _ 66 44: 0F 6F. C2 + pxor xmm10, xmm2 ; 14A9 _ 66 44: 0F EF. D2 + pxor xmm10, xmm1 ; 14AE _ 66 44: 0F EF. D1 + movdqa xmm11, xmm10 ; 14B3 _ 66 45: 0F 6F. DA + pxor xmm0, xmm1 ; 14B8 _ 66: 0F EF. C1 + pslld xmm8, 3 ; 14BC _ 66 41: 0F 72. F0, 03 + pxor xmm0, xmm8 ; 14C2 _ 66 41: 0F EF. C0 + movdqa xmm8, xmm0 ; 14C7 _ 66 44: 0F 6F. C0 + pslld xmm11, 1 ; 14CC _ 66 41: 0F 72. F3, 01 + psrld xmm10, 31 ; 14D2 _ 66 41: 0F 72. D2, 1F + por xmm11, xmm10 ; 14D8 _ 66 45: 0F EB. DA + movdqa xmm10, xmm11 ; 14DD _ 66 45: 0F 6F. D3 + pslld xmm8, 7 ; 14E2 _ 66 41: 0F 72. F0, 07 + psrld xmm0, 25 ; 14E8 _ 66: 0F 72. D0, 19 + por xmm8, xmm0 ; 14ED _ 66 44: 0F EB. C0 + pxor xmm2, xmm11 ; 14F2 _ 66 41: 0F EF. D3 + pxor xmm2, xmm8 ; 14F7 _ 66 41: 0F EF. D0 + pxor xmm1, xmm8 ; 14FC _ 66 41: 0F EF. C8 + pslld xmm10, 7 ; 1501 _ 66 41: 0F 72. F2, 07 + movd xmm0, dword [r12+1360H] ; 1507 _ 66 41: 0F 6E. 84 24, 00001360 + pxor xmm1, xmm10 ; 1511 _ 66 41: 0F EF. CA + movdqa xmm10, xmm2 ; 1516 _ 66 44: 0F 6F. D2 + psrld xmm2, 27 ; 151B _ 66: 0F 72. D2, 1B + pshufd xmm0, xmm0, 0 ; 1520 _ 66: 0F 70. C0, 00 + pslld xmm10, 5 ; 1525 _ 66 41: 0F 72. F2, 05 + por xmm10, xmm2 ; 152B _ 66 44: 0F EB. D2 + movdqa xmm2, xmm1 ; 1530 _ 66: 0F 6F. D1 + psrld xmm1, 10 ; 1534 _ 66: 0F 72. D1, 0A + pxor xmm10, xmm0 ; 1539 _ 66 44: 0F EF. D0 + pslld xmm2, 22 ; 153E _ 66: 0F 72. F2, 16 + por xmm2, xmm1 ; 1543 _ 66: 0F EB. D1 + movd xmm0, dword [r12+1364H] ; 1547 _ 66 41: 0F 6E. 84 24, 00001364 + pshufd xmm0, xmm0, 0 ; 1551 _ 66: 0F 70. C0, 00 + pxor xmm11, xmm0 ; 1556 _ 66 44: 0F EF. D8 + movd xmm0, dword [r12+1368H] ; 155B _ 66 41: 0F 6E. 84 24, 00001368 + pshufd xmm0, xmm0, 0 ; 1565 _ 66: 0F 70. C0, 00 + pxor xmm2, xmm0 ; 156A _ 66: 0F EF. D0 + pxor xmm2, xmm6 ; 156E _ 66: 0F EF. D6 + movd xmm0, dword [r12+136CH] ; 1572 _ 66 41: 0F 6E. 84 24, 0000136C + pshufd xmm0, xmm0, 0 ; 157C _ 66: 0F 70. C0, 00 + pxor xmm8, xmm0 ; 1581 _ 66 44: 0F EF. C0 + movdqa xmm1, xmm8 ; 1586 _ 66 41: 0F 6F. C8 + pand xmm1, xmm10 ; 158B _ 66 41: 0F DB. CA + pxor xmm10, xmm8 ; 1590 _ 66 45: 0F EF. D0 + pxor xmm1, xmm2 ; 1595 _ 66: 0F EF. CA + por xmm2, xmm8 ; 1599 _ 66 41: 0F EB. D0 + pxor xmm11, xmm1 ; 159E _ 66 44: 0F EF. D9 + pxor xmm2, xmm10 ; 15A3 _ 66 41: 0F EF. D2 + por xmm10, xmm11 ; 15A8 _ 66 45: 0F EB. D3 + pxor xmm2, xmm11 ; 15AD _ 66 41: 0F EF. D3 + pxor xmm8, xmm10 ; 15B2 _ 66 45: 0F EF. C2 + por xmm10, xmm1 ; 15B7 _ 66 44: 0F EB. D1 + pxor xmm10, xmm2 ; 15BC _ 66 44: 0F EF. D2 + movdqa xmm0, xmm10 ; 15C1 _ 66 41: 0F 6F. C2 + pxor xmm8, xmm1 ; 15C6 _ 66 44: 0F EF. C1 + pxor xmm8, xmm10 ; 15CB _ 66 45: 0F EF. C2 + pxor xmm1, xmm6 ; 15D0 _ 66: 0F EF. CE + pand xmm2, xmm8 ; 15D4 _ 66 41: 0F DB. D0 + pxor xmm1, xmm2 ; 15D9 _ 66: 0F EF. CA + movdqa xmm2, xmm8 ; 15DD _ 66 41: 0F 6F. D0 + pslld xmm0, 13 ; 15E2 _ 66: 0F 72. F0, 0D + psrld xmm10, 19 ; 15E7 _ 66 41: 0F 72. D2, 13 + por xmm0, xmm10 ; 15ED _ 66 41: 0F EB. C2 + pslld xmm2, 3 ; 15F2 _ 66: 0F 72. F2, 03 + psrld xmm8, 29 ; 15F7 _ 66 41: 0F 72. D0, 1D + por xmm2, xmm8 ; 15FD _ 66 41: 0F EB. D0 + movdqa xmm8, xmm0 ; 1602 _ 66 44: 0F 6F. C0 + pxor xmm11, xmm0 ; 1607 _ 66 44: 0F EF. D8 + pxor xmm11, xmm2 ; 160C _ 66 44: 0F EF. DA + pxor xmm1, xmm2 ; 1611 _ 66: 0F EF. CA + pslld xmm8, 3 ; 1615 _ 66 41: 0F 72. F0, 03 + pxor xmm1, xmm8 ; 161B _ 66 41: 0F EF. C8 + movdqa xmm8, xmm11 ; 1620 _ 66 45: 0F 6F. C3 + movdqa xmm10, xmm1 ; 1625 _ 66 44: 0F 6F. D1 + psrld xmm11, 31 ; 162A _ 66 41: 0F 72. D3, 1F + pslld xmm8, 1 ; 1630 _ 66 41: 0F 72. F0, 01 + por xmm8, xmm11 ; 1636 _ 66 45: 0F EB. C3 + movdqa xmm11, xmm8 ; 163B _ 66 45: 0F 6F. D8 + pslld xmm10, 7 ; 1640 _ 66 41: 0F 72. F2, 07 + psrld xmm1, 25 ; 1646 _ 66: 0F 72. D1, 19 + por xmm10, xmm1 ; 164B _ 66 44: 0F EB. D1 + pxor xmm0, xmm8 ; 1650 _ 66 41: 0F EF. C0 + pxor xmm0, xmm10 ; 1655 _ 66 41: 0F EF. C2 + pxor xmm2, xmm10 ; 165A _ 66 41: 0F EF. D2 + pslld xmm11, 7 ; 165F _ 66 41: 0F 72. F3, 07 + pxor xmm2, xmm11 ; 1665 _ 66 41: 0F EF. D3 + movdqa xmm11, xmm0 ; 166A _ 66 44: 0F 6F. D8 + movdqa xmm1, xmm2 ; 166F _ 66: 0F 6F. CA + psrld xmm0, 27 ; 1673 _ 66: 0F 72. D0, 1B + pslld xmm11, 5 ; 1678 _ 66 41: 0F 72. F3, 05 + por xmm11, xmm0 ; 167E _ 66 44: 0F EB. D8 + pslld xmm1, 22 ; 1683 _ 66: 0F 72. F1, 16 + psrld xmm2, 10 ; 1688 _ 66: 0F 72. D2, 0A + por xmm1, xmm2 ; 168D _ 66: 0F EB. CA + movd xmm2, dword [r12+1370H] ; 1691 _ 66 41: 0F 6E. 94 24, 00001370 + pshufd xmm2, xmm2, 0 ; 169B _ 66: 0F 70. D2, 00 + pxor xmm11, xmm2 ; 16A0 _ 66 44: 0F EF. DA + movd xmm2, dword [r12+1374H] ; 16A5 _ 66 41: 0F 6E. 94 24, 00001374 + pshufd xmm2, xmm2, 0 ; 16AF _ 66: 0F 70. D2, 00 + pxor xmm8, xmm2 ; 16B4 _ 66 44: 0F EF. C2 + movdqa xmm0, xmm8 ; 16B9 _ 66 41: 0F 6F. C0 + movd xmm2, dword [r12+1378H] ; 16BE _ 66 41: 0F 6E. 94 24, 00001378 + pshufd xmm2, xmm2, 0 ; 16C8 _ 66: 0F 70. D2, 00 + pxor xmm1, xmm2 ; 16CD _ 66: 0F EF. CA + movd xmm2, dword [r12+137CH] ; 16D1 _ 66 41: 0F 6E. 94 24, 0000137C + pshufd xmm2, xmm2, 0 ; 16DB _ 66: 0F 70. D2, 00 + pxor xmm10, xmm2 ; 16E0 _ 66 44: 0F EF. D2 + por xmm0, xmm1 ; 16E5 _ 66: 0F EB. C1 + pxor xmm0, xmm10 ; 16E9 _ 66 41: 0F EF. C2 + pxor xmm8, xmm1 ; 16EE _ 66 44: 0F EF. C1 + pxor xmm1, xmm0 ; 16F3 _ 66: 0F EF. C8 + por xmm10, xmm8 ; 16F7 _ 66 45: 0F EB. D0 + pand xmm10, xmm11 ; 16FC _ 66 45: 0F DB. D3 + pxor xmm8, xmm1 ; 1701 _ 66 44: 0F EF. C1 + pxor xmm10, xmm0 ; 1706 _ 66 44: 0F EF. D0 + por xmm0, xmm8 ; 170B _ 66 41: 0F EB. C0 + pxor xmm0, xmm11 ; 1710 _ 66 41: 0F EF. C3 + por xmm11, xmm8 ; 1715 _ 66 45: 0F EB. D8 + pxor xmm11, xmm1 ; 171A _ 66 44: 0F EF. D9 + pxor xmm0, xmm8 ; 171F _ 66 41: 0F EF. C0 + pxor xmm1, xmm0 ; 1724 _ 66: 0F EF. C8 + pand xmm0, xmm11 ; 1728 _ 66 41: 0F DB. C3 + pxor xmm0, xmm8 ; 172D _ 66 41: 0F EF. C0 + pxor xmm1, xmm6 ; 1732 _ 66: 0F EF. CE + por xmm1, xmm11 ; 1736 _ 66 41: 0F EB. CB + pxor xmm8, xmm1 ; 173B _ 66 44: 0F EF. C1 + movdqa xmm1, xmm8 ; 1740 _ 66 41: 0F 6F. C8 + psrld xmm8, 19 ; 1745 _ 66 41: 0F 72. D0, 13 + pslld xmm1, 13 ; 174B _ 66: 0F 72. F1, 0D + por xmm1, xmm8 ; 1750 _ 66 41: 0F EB. C8 + movdqa xmm8, xmm0 ; 1755 _ 66 44: 0F 6F. C0 + movdqa xmm2, xmm1 ; 175A _ 66: 0F 6F. D1 + psrld xmm0, 29 ; 175E _ 66: 0F 72. D0, 1D + pslld xmm8, 3 ; 1763 _ 66 41: 0F 72. F0, 03 + por xmm8, xmm0 ; 1769 _ 66 44: 0F EB. C0 + pxor xmm10, xmm1 ; 176E _ 66 44: 0F EF. D1 + pxor xmm10, xmm8 ; 1773 _ 66 45: 0F EF. D0 + pxor xmm11, xmm8 ; 1778 _ 66 45: 0F EF. D8 + pslld xmm2, 3 ; 177D _ 66: 0F 72. F2, 03 + pxor xmm11, xmm2 ; 1782 _ 66 44: 0F EF. DA + movdqa xmm2, xmm10 ; 1787 _ 66 41: 0F 6F. D2 + movdqa xmm0, xmm11 ; 178C _ 66 41: 0F 6F. C3 + psrld xmm10, 31 ; 1791 _ 66 41: 0F 72. D2, 1F + pslld xmm2, 1 ; 1797 _ 66: 0F 72. F2, 01 + por xmm2, xmm10 ; 179C _ 66 41: 0F EB. D2 + movdqa xmm10, xmm2 ; 17A1 _ 66 44: 0F 6F. D2 + pslld xmm0, 7 ; 17A6 _ 66: 0F 72. F0, 07 + psrld xmm11, 25 ; 17AB _ 66 41: 0F 72. D3, 19 + por xmm0, xmm11 ; 17B1 _ 66 41: 0F EB. C3 + pxor xmm1, xmm2 ; 17B6 _ 66: 0F EF. CA + pxor xmm1, xmm0 ; 17BA _ 66: 0F EF. C8 + pxor xmm8, xmm0 ; 17BE _ 66 44: 0F EF. C0 + pslld xmm10, 7 ; 17C3 _ 66 41: 0F 72. F2, 07 + movd xmm11, dword [r12+1380H] ; 17C9 _ 66 45: 0F 6E. 9C 24, 00001380 + pxor xmm8, xmm10 ; 17D3 _ 66 45: 0F EF. C2 + movdqa xmm10, xmm1 ; 17D8 _ 66 44: 0F 6F. D1 + psrld xmm1, 27 ; 17DD _ 66: 0F 72. D1, 1B + pslld xmm10, 5 ; 17E2 _ 66 41: 0F 72. F2, 05 + por xmm10, xmm1 ; 17E8 _ 66 44: 0F EB. D1 + movdqa xmm1, xmm8 ; 17ED _ 66 41: 0F 6F. C8 + psrld xmm8, 10 ; 17F2 _ 66 41: 0F 72. D0, 0A + pslld xmm1, 22 ; 17F8 _ 66: 0F 72. F1, 16 + por xmm1, xmm8 ; 17FD _ 66 41: 0F EB. C8 + pshufd xmm8, xmm11, 0 ; 1802 _ 66 45: 0F 70. C3, 00 + pxor xmm10, xmm8 ; 1808 _ 66 45: 0F EF. D0 + movd xmm11, dword [r12+1384H] ; 180D _ 66 45: 0F 6E. 9C 24, 00001384 + pshufd xmm8, xmm11, 0 ; 1817 _ 66 45: 0F 70. C3, 00 + movd xmm11, dword [r12+1388H] ; 181D _ 66 45: 0F 6E. 9C 24, 00001388 + pxor xmm2, xmm8 ; 1827 _ 66 41: 0F EF. D0 + pshufd xmm8, xmm11, 0 ; 182C _ 66 45: 0F 70. C3, 00 + pxor xmm1, xmm8 ; 1832 _ 66 41: 0F EF. C8 + movd xmm11, dword [r12+138CH] ; 1837 _ 66 45: 0F 6E. 9C 24, 0000138C + pshufd xmm8, xmm11, 0 ; 1841 _ 66 45: 0F 70. C3, 00 + movdqa xmm11, xmm2 ; 1847 _ 66 44: 0F 6F. DA + pxor xmm0, xmm8 ; 184C _ 66 41: 0F EF. C0 + pxor xmm0, xmm10 ; 1851 _ 66 41: 0F EF. C2 + pand xmm11, xmm0 ; 1856 _ 66 44: 0F DB. D8 + pxor xmm2, xmm1 ; 185B _ 66: 0F EF. D1 + pxor xmm11, xmm10 ; 185F _ 66 45: 0F EF. DA + por xmm10, xmm0 ; 1864 _ 66 44: 0F EB. D0 + pxor xmm10, xmm2 ; 1869 _ 66 44: 0F EF. D2 + pxor xmm2, xmm0 ; 186E _ 66: 0F EF. D0 + pxor xmm0, xmm1 ; 1872 _ 66: 0F EF. C1 + por xmm1, xmm11 ; 1876 _ 66 41: 0F EB. CB + pxor xmm1, xmm2 ; 187B _ 66: 0F EF. CA + pxor xmm2, xmm6 ; 187F _ 66: 0F EF. D6 + por xmm2, xmm11 ; 1883 _ 66 41: 0F EB. D3 + pxor xmm11, xmm0 ; 1888 _ 66 44: 0F EF. D8 + pxor xmm11, xmm2 ; 188D _ 66 44: 0F EF. DA + por xmm0, xmm10 ; 1892 _ 66 41: 0F EB. C2 + pxor xmm11, xmm0 ; 1897 _ 66 44: 0F EF. D8 + movdqa xmm8, xmm11 ; 189C _ 66 45: 0F 6F. C3 + pxor xmm2, xmm0 ; 18A1 _ 66: 0F EF. D0 + psrld xmm11, 19 ; 18A5 _ 66 41: 0F 72. D3, 13 + pslld xmm8, 13 ; 18AB _ 66 41: 0F 72. F0, 0D + por xmm8, xmm11 ; 18B1 _ 66 45: 0F EB. C3 + movdqa xmm11, xmm1 ; 18B6 _ 66 44: 0F 6F. D9 + movdqa xmm0, xmm8 ; 18BB _ 66 41: 0F 6F. C0 + psrld xmm1, 29 ; 18C0 _ 66: 0F 72. D1, 1D + pslld xmm11, 3 ; 18C5 _ 66 41: 0F 72. F3, 03 + por xmm11, xmm1 ; 18CB _ 66 44: 0F EB. D9 + pxor xmm2, xmm8 ; 18D0 _ 66 41: 0F EF. D0 + pxor xmm2, xmm11 ; 18D5 _ 66 41: 0F EF. D3 + pxor xmm10, xmm11 ; 18DA _ 66 45: 0F EF. D3 + pslld xmm0, 3 ; 18DF _ 66: 0F 72. F0, 03 + pxor xmm10, xmm0 ; 18E4 _ 66 44: 0F EF. D0 + movdqa xmm0, xmm2 ; 18E9 _ 66: 0F 6F. C2 + psrld xmm2, 31 ; 18ED _ 66: 0F 72. D2, 1F + pslld xmm0, 1 ; 18F2 _ 66: 0F 72. F0, 01 + por xmm0, xmm2 ; 18F7 _ 66: 0F EB. C2 + movdqa xmm2, xmm10 ; 18FB _ 66 41: 0F 6F. D2 + psrld xmm10, 25 ; 1900 _ 66 41: 0F 72. D2, 19 + pxor xmm8, xmm0 ; 1906 _ 66 44: 0F EF. C0 + pslld xmm2, 7 ; 190B _ 66: 0F 72. F2, 07 + por xmm2, xmm10 ; 1910 _ 66 41: 0F EB. D2 + movdqa xmm10, xmm0 ; 1915 _ 66 44: 0F 6F. D0 + pxor xmm8, xmm2 ; 191A _ 66 44: 0F EF. C2 + pxor xmm11, xmm2 ; 191F _ 66 44: 0F EF. DA + pslld xmm10, 7 ; 1924 _ 66 41: 0F 72. F2, 07 + pxor xmm11, xmm10 ; 192A _ 66 45: 0F EF. DA + movdqa xmm10, xmm8 ; 192F _ 66 45: 0F 6F. D0 + psrld xmm8, 27 ; 1934 _ 66 41: 0F 72. D0, 1B + pslld xmm10, 5 ; 193A _ 66 41: 0F 72. F2, 05 + por xmm10, xmm8 ; 1940 _ 66 45: 0F EB. D0 + movdqa xmm8, xmm11 ; 1945 _ 66 45: 0F 6F. C3 + psrld xmm11, 10 ; 194A _ 66 41: 0F 72. D3, 0A + pslld xmm8, 22 ; 1950 _ 66 41: 0F 72. F0, 16 + por xmm8, xmm11 ; 1956 _ 66 45: 0F EB. C3 + movd xmm11, dword [r12+1390H] ; 195B _ 66 45: 0F 6E. 9C 24, 00001390 + pshufd xmm11, xmm11, 0 ; 1965 _ 66 45: 0F 70. DB, 00 + pxor xmm10, xmm11 ; 196B _ 66 45: 0F EF. D3 + pxor xmm10, xmm6 ; 1970 _ 66 44: 0F EF. D6 + movd xmm11, dword [r12+1394H] ; 1975 _ 66 45: 0F 6E. 9C 24, 00001394 + pshufd xmm11, xmm11, 0 ; 197F _ 66 45: 0F 70. DB, 00 + pxor xmm0, xmm11 ; 1985 _ 66 41: 0F EF. C3 + movd xmm11, dword [r12+1398H] ; 198A _ 66 45: 0F 6E. 9C 24, 00001398 + pshufd xmm11, xmm11, 0 ; 1994 _ 66 45: 0F 70. DB, 00 + pxor xmm8, xmm11 ; 199A _ 66 45: 0F EF. C3 + movd xmm11, dword [r12+139CH] ; 199F _ 66 45: 0F 6E. 9C 24, 0000139C + pshufd xmm11, xmm11, 0 ; 19A9 _ 66 45: 0F 70. DB, 00 + pxor xmm2, xmm11 ; 19AF _ 66 41: 0F EF. D3 + movdqa xmm11, xmm10 ; 19B4 _ 66 45: 0F 6F. DA + pxor xmm8, xmm6 ; 19B9 _ 66 44: 0F EF. C6 + pand xmm11, xmm0 ; 19BE _ 66 44: 0F DB. D8 + pxor xmm8, xmm11 ; 19C3 _ 66 45: 0F EF. C3 + por xmm11, xmm2 ; 19C8 _ 66 44: 0F EB. DA + pxor xmm2, xmm8 ; 19CD _ 66 41: 0F EF. D0 + pxor xmm0, xmm11 ; 19D2 _ 66 41: 0F EF. C3 + pxor xmm11, xmm10 ; 19D7 _ 66 45: 0F EF. DA + por xmm10, xmm0 ; 19DC _ 66 44: 0F EB. D0 + pxor xmm0, xmm2 ; 19E1 _ 66: 0F EF. C2 + por xmm8, xmm11 ; 19E5 _ 66 45: 0F EB. C3 + pand xmm8, xmm10 ; 19EA _ 66 45: 0F DB. C2 + pxor xmm11, xmm0 ; 19EF _ 66 44: 0F EF. D8 + pand xmm0, xmm8 ; 19F4 _ 66 41: 0F DB. C0 + pxor xmm0, xmm11 ; 19F9 _ 66 41: 0F EF. C3 + pand xmm11, xmm8 ; 19FE _ 66 45: 0F DB. D8 + pxor xmm10, xmm11 ; 1A03 _ 66 45: 0F EF. D3 + movdqa xmm11, xmm8 ; 1A08 _ 66 45: 0F 6F. D8 + psrld xmm8, 19 ; 1A0D _ 66 41: 0F 72. D0, 13 + pslld xmm11, 13 ; 1A13 _ 66 41: 0F 72. F3, 0D + por xmm11, xmm8 ; 1A19 _ 66 45: 0F EB. D8 + movdqa xmm8, xmm2 ; 1A1E _ 66 44: 0F 6F. C2 + psrld xmm2, 29 ; 1A23 _ 66: 0F 72. D2, 1D + pxor xmm10, xmm11 ; 1A28 _ 66 45: 0F EF. D3 + pslld xmm8, 3 ; 1A2D _ 66 41: 0F 72. F0, 03 + por xmm8, xmm2 ; 1A33 _ 66 44: 0F EB. C2 + movdqa xmm2, xmm11 ; 1A38 _ 66 41: 0F 6F. D3 + pxor xmm10, xmm8 ; 1A3D _ 66 45: 0F EF. D0 + movdqa xmm1, xmm10 ; 1A42 _ 66 41: 0F 6F. CA + pxor xmm0, xmm8 ; 1A47 _ 66 41: 0F EF. C0 + pslld xmm2, 3 ; 1A4C _ 66: 0F 72. F2, 03 + pxor xmm0, xmm2 ; 1A51 _ 66: 0F EF. C2 + pslld xmm1, 1 ; 1A55 _ 66: 0F 72. F1, 01 + psrld xmm10, 31 ; 1A5A _ 66 41: 0F 72. D2, 1F + por xmm1, xmm10 ; 1A60 _ 66 41: 0F EB. CA + movdqa xmm10, xmm0 ; 1A65 _ 66 44: 0F 6F. D0 + movdqa xmm2, xmm1 ; 1A6A _ 66: 0F 6F. D1 + psrld xmm0, 25 ; 1A6E _ 66: 0F 72. D0, 19 + pslld xmm10, 7 ; 1A73 _ 66 41: 0F 72. F2, 07 + por xmm10, xmm0 ; 1A79 _ 66 44: 0F EB. D0 + pxor xmm11, xmm1 ; 1A7E _ 66 44: 0F EF. D9 + pxor xmm11, xmm10 ; 1A83 _ 66 45: 0F EF. DA + movdqa xmm0, xmm11 ; 1A88 _ 66 41: 0F 6F. C3 + pxor xmm8, xmm10 ; 1A8D _ 66 45: 0F EF. C2 + pslld xmm2, 7 ; 1A92 _ 66: 0F 72. F2, 07 + pxor xmm8, xmm2 ; 1A97 _ 66 44: 0F EF. C2 + pslld xmm0, 5 ; 1A9C _ 66: 0F 72. F0, 05 + psrld xmm11, 27 ; 1AA1 _ 66 41: 0F 72. D3, 1B + por xmm0, xmm11 ; 1AA7 _ 66 41: 0F EB. C3 + movdqa xmm11, xmm8 ; 1AAC _ 66 45: 0F 6F. D8 + psrld xmm8, 10 ; 1AB1 _ 66 41: 0F 72. D0, 0A + pslld xmm11, 22 ; 1AB7 _ 66 41: 0F 72. F3, 16 + por xmm11, xmm8 ; 1ABD _ 66 45: 0F EB. D8 + movd xmm8, dword [r12+13A0H] ; 1AC2 _ 66 45: 0F 6E. 84 24, 000013A0 + pshufd xmm2, xmm8, 0 ; 1ACC _ 66 41: 0F 70. D0, 00 + pxor xmm0, xmm2 ; 1AD2 _ 66: 0F EF. C2 + movd xmm8, dword [r12+13A4H] ; 1AD6 _ 66 45: 0F 6E. 84 24, 000013A4 + pshufd xmm2, xmm8, 0 ; 1AE0 _ 66 41: 0F 70. D0, 00 + pxor xmm1, xmm2 ; 1AE6 _ 66: 0F EF. CA + movd xmm8, dword [r12+13A8H] ; 1AEA _ 66 45: 0F 6E. 84 24, 000013A8 + pshufd xmm2, xmm8, 0 ; 1AF4 _ 66 41: 0F 70. D0, 00 + movd xmm8, dword [r12+13ACH] ; 1AFA _ 66 45: 0F 6E. 84 24, 000013AC + pxor xmm11, xmm2 ; 1B04 _ 66 44: 0F EF. DA + pshufd xmm2, xmm8, 0 ; 1B09 _ 66 41: 0F 70. D0, 00 + movdqa xmm8, xmm0 ; 1B0F _ 66 44: 0F 6F. C0 + pxor xmm10, xmm2 ; 1B14 _ 66 44: 0F EF. D2 + pand xmm8, xmm11 ; 1B19 _ 66 45: 0F DB. C3 + pxor xmm8, xmm10 ; 1B1E _ 66 45: 0F EF. C2 + pxor xmm11, xmm1 ; 1B23 _ 66 44: 0F EF. D9 + pxor xmm11, xmm8 ; 1B28 _ 66 45: 0F EF. D8 + movdqa xmm2, xmm11 ; 1B2D _ 66 41: 0F 6F. D3 + por xmm10, xmm0 ; 1B32 _ 66 44: 0F EB. D0 + pxor xmm10, xmm1 ; 1B37 _ 66 44: 0F EF. D1 + movdqa xmm1, xmm10 ; 1B3C _ 66 41: 0F 6F. CA + pxor xmm0, xmm11 ; 1B41 _ 66 41: 0F EF. C3 + pslld xmm2, 13 ; 1B46 _ 66: 0F 72. F2, 0D + por xmm1, xmm0 ; 1B4B _ 66: 0F EB. C8 + pxor xmm1, xmm8 ; 1B4F _ 66 41: 0F EF. C8 + pand xmm8, xmm10 ; 1B54 _ 66 45: 0F DB. C2 + pxor xmm0, xmm8 ; 1B59 _ 66 41: 0F EF. C0 + pxor xmm10, xmm1 ; 1B5E _ 66 44: 0F EF. D1 + pxor xmm10, xmm0 ; 1B63 _ 66 44: 0F EF. D0 + pxor xmm0, xmm6 ; 1B68 _ 66: 0F EF. C6 + psrld xmm11, 19 ; 1B6C _ 66 41: 0F 72. D3, 13 + por xmm2, xmm11 ; 1B72 _ 66 41: 0F EB. D3 + movdqa xmm11, xmm10 ; 1B77 _ 66 45: 0F 6F. DA + psrld xmm10, 29 ; 1B7C _ 66 41: 0F 72. D2, 1D + pxor xmm1, xmm2 ; 1B82 _ 66: 0F EF. CA + pslld xmm11, 3 ; 1B86 _ 66 41: 0F 72. F3, 03 + por xmm11, xmm10 ; 1B8C _ 66 45: 0F EB. DA + movdqa xmm10, xmm2 ; 1B91 _ 66 44: 0F 6F. D2 + pxor xmm1, xmm11 ; 1B96 _ 66 41: 0F EF. CB + movdqa xmm8, xmm1 ; 1B9B _ 66 44: 0F 6F. C1 + pxor xmm0, xmm11 ; 1BA0 _ 66 41: 0F EF. C3 + pslld xmm10, 3 ; 1BA5 _ 66 41: 0F 72. F2, 03 + pxor xmm0, xmm10 ; 1BAB _ 66 41: 0F EF. C2 + pslld xmm8, 1 ; 1BB0 _ 66 41: 0F 72. F0, 01 + psrld xmm1, 31 ; 1BB6 _ 66: 0F 72. D1, 1F + por xmm8, xmm1 ; 1BBB _ 66 44: 0F EB. C1 + movdqa xmm1, xmm0 ; 1BC0 _ 66: 0F 6F. C8 + movdqa xmm10, xmm8 ; 1BC4 _ 66 45: 0F 6F. D0 + psrld xmm0, 25 ; 1BC9 _ 66: 0F 72. D0, 19 + pslld xmm1, 7 ; 1BCE _ 66: 0F 72. F1, 07 + por xmm1, xmm0 ; 1BD3 _ 66: 0F EB. C8 + pxor xmm2, xmm8 ; 1BD7 _ 66 41: 0F EF. D0 + pxor xmm2, xmm1 ; 1BDC _ 66: 0F EF. D1 + pxor xmm11, xmm1 ; 1BE0 _ 66 44: 0F EF. D9 + pslld xmm10, 7 ; 1BE5 _ 66 41: 0F 72. F2, 07 + pxor xmm11, xmm10 ; 1BEB _ 66 45: 0F EF. DA + movdqa xmm0, xmm2 ; 1BF0 _ 66: 0F 6F. C2 + psrld xmm2, 27 ; 1BF4 _ 66: 0F 72. D2, 1B + pslld xmm0, 5 ; 1BF9 _ 66: 0F 72. F0, 05 + por xmm0, xmm2 ; 1BFE _ 66: 0F EB. C2 + movdqa xmm2, xmm11 ; 1C02 _ 66 41: 0F 6F. D3 + movd xmm10, dword [r12+13B4H] ; 1C07 _ 66 45: 0F 6E. 94 24, 000013B4 + psrld xmm11, 10 ; 1C11 _ 66 41: 0F 72. D3, 0A + pslld xmm2, 22 ; 1C17 _ 66: 0F 72. F2, 16 + por xmm2, xmm11 ; 1C1C _ 66 41: 0F EB. D3 + movd xmm11, dword [r12+13B0H] ; 1C21 _ 66 45: 0F 6E. 9C 24, 000013B0 + pshufd xmm11, xmm11, 0 ; 1C2B _ 66 45: 0F 70. DB, 00 + pxor xmm0, xmm11 ; 1C31 _ 66 41: 0F EF. C3 + pshufd xmm11, xmm10, 0 ; 1C36 _ 66 45: 0F 70. DA, 00 + movd xmm10, dword [r12+13B8H] ; 1C3C _ 66 45: 0F 6E. 94 24, 000013B8 + pxor xmm8, xmm11 ; 1C46 _ 66 45: 0F EF. C3 + pshufd xmm11, xmm10, 0 ; 1C4B _ 66 45: 0F 70. DA, 00 + pxor xmm2, xmm11 ; 1C51 _ 66 41: 0F EF. D3 + movd xmm10, dword [r12+13BCH] ; 1C56 _ 66 45: 0F 6E. 94 24, 000013BC + pshufd xmm11, xmm10, 0 ; 1C60 _ 66 45: 0F 70. DA, 00 + movdqa xmm10, xmm0 ; 1C66 _ 66 44: 0F 6F. D0 + pxor xmm1, xmm11 ; 1C6B _ 66 41: 0F EF. CB + por xmm10, xmm1 ; 1C70 _ 66 44: 0F EB. D1 + pxor xmm1, xmm8 ; 1C75 _ 66 41: 0F EF. C8 + pand xmm8, xmm0 ; 1C7A _ 66 44: 0F DB. C0 + pxor xmm0, xmm2 ; 1C7F _ 66: 0F EF. C2 + pxor xmm2, xmm1 ; 1C83 _ 66: 0F EF. D1 + pand xmm1, xmm10 ; 1C87 _ 66 41: 0F DB. CA + por xmm0, xmm8 ; 1C8C _ 66 41: 0F EB. C0 + pxor xmm1, xmm0 ; 1C91 _ 66: 0F EF. C8 + pxor xmm10, xmm8 ; 1C95 _ 66 45: 0F EF. D0 + pand xmm0, xmm10 ; 1C9A _ 66 41: 0F DB. C2 + pxor xmm8, xmm1 ; 1C9F _ 66 44: 0F EF. C1 + pxor xmm0, xmm2 ; 1CA4 _ 66: 0F EF. C2 + por xmm8, xmm10 ; 1CA8 _ 66 45: 0F EB. C2 + pxor xmm8, xmm2 ; 1CAD _ 66 44: 0F EF. C2 + movdqa xmm2, xmm8 ; 1CB2 _ 66 41: 0F 6F. D0 + pxor xmm10, xmm1 ; 1CB7 _ 66 44: 0F EF. D1 + por xmm2, xmm1 ; 1CBC _ 66: 0F EB. D1 + pxor xmm10, xmm2 ; 1CC0 _ 66 44: 0F EF. D2 + movdqa xmm11, xmm10 ; 1CC5 _ 66 45: 0F 6F. DA + movdqa xmm2, xmm1 ; 1CCA _ 66: 0F 6F. D1 + psrld xmm10, 19 ; 1CCE _ 66 41: 0F 72. D2, 13 + pslld xmm11, 13 ; 1CD4 _ 66 41: 0F 72. F3, 0D + por xmm11, xmm10 ; 1CDA _ 66 45: 0F EB. DA + movdqa xmm10, xmm11 ; 1CDF _ 66 45: 0F 6F. D3 + pslld xmm2, 3 ; 1CE4 _ 66: 0F 72. F2, 03 + psrld xmm1, 29 ; 1CE9 _ 66: 0F 72. D1, 1D + por xmm2, xmm1 ; 1CEE _ 66: 0F EB. D1 + pxor xmm8, xmm11 ; 1CF2 _ 66 45: 0F EF. C3 + pxor xmm8, xmm2 ; 1CF7 _ 66 44: 0F EF. C2 + movdqa xmm1, xmm8 ; 1CFC _ 66 41: 0F 6F. C8 + pxor xmm0, xmm2 ; 1D01 _ 66: 0F EF. C2 + pslld xmm10, 3 ; 1D05 _ 66 41: 0F 72. F2, 03 + pxor xmm0, xmm10 ; 1D0B _ 66 41: 0F EF. C2 + movdqa xmm10, xmm0 ; 1D10 _ 66 44: 0F 6F. D0 + pslld xmm1, 1 ; 1D15 _ 66: 0F 72. F1, 01 + psrld xmm8, 31 ; 1D1A _ 66 41: 0F 72. D0, 1F + por xmm1, xmm8 ; 1D20 _ 66 41: 0F EB. C8 + movdqa xmm8, xmm1 ; 1D25 _ 66 44: 0F 6F. C1 + pslld xmm10, 7 ; 1D2A _ 66 41: 0F 72. F2, 07 + psrld xmm0, 25 ; 1D30 _ 66: 0F 72. D0, 19 + por xmm10, xmm0 ; 1D35 _ 66 44: 0F EB. D0 + pxor xmm11, xmm1 ; 1D3A _ 66 44: 0F EF. D9 + pxor xmm11, xmm10 ; 1D3F _ 66 45: 0F EF. DA + pxor xmm2, xmm10 ; 1D44 _ 66 41: 0F EF. D2 + pslld xmm8, 7 ; 1D49 _ 66 41: 0F 72. F0, 07 + pxor xmm2, xmm8 ; 1D4F _ 66 41: 0F EF. D0 + movdqa xmm8, xmm11 ; 1D54 _ 66 45: 0F 6F. C3 + psrld xmm11, 27 ; 1D59 _ 66 41: 0F 72. D3, 1B + pslld xmm8, 5 ; 1D5F _ 66 41: 0F 72. F0, 05 + por xmm8, xmm11 ; 1D65 _ 66 45: 0F EB. C3 + movdqa xmm11, xmm2 ; 1D6A _ 66 44: 0F 6F. DA + psrld xmm2, 10 ; 1D6F _ 66: 0F 72. D2, 0A + pslld xmm11, 22 ; 1D74 _ 66 41: 0F 72. F3, 16 + por xmm11, xmm2 ; 1D7A _ 66 44: 0F EB. DA + movd xmm2, dword [r12+13C0H] ; 1D7F _ 66 41: 0F 6E. 94 24, 000013C0 + pshufd xmm2, xmm2, 0 ; 1D89 _ 66: 0F 70. D2, 00 + pxor xmm8, xmm2 ; 1D8E _ 66 44: 0F EF. C2 + movd xmm2, dword [r12+13C4H] ; 1D93 _ 66 41: 0F 6E. 94 24, 000013C4 + pshufd xmm2, xmm2, 0 ; 1D9D _ 66: 0F 70. D2, 00 + pxor xmm1, xmm2 ; 1DA2 _ 66: 0F EF. CA + movd xmm2, dword [r12+13C8H] ; 1DA6 _ 66 41: 0F 6E. 94 24, 000013C8 + pshufd xmm2, xmm2, 0 ; 1DB0 _ 66: 0F 70. D2, 00 + pxor xmm11, xmm2 ; 1DB5 _ 66 44: 0F EF. DA + movd xmm2, dword [r12+13CCH] ; 1DBA _ 66 41: 0F 6E. 94 24, 000013CC + pshufd xmm2, xmm2, 0 ; 1DC4 _ 66: 0F 70. D2, 00 + pxor xmm10, xmm2 ; 1DC9 _ 66 44: 0F EF. D2 + pxor xmm1, xmm10 ; 1DCE _ 66 41: 0F EF. CA + movdqa xmm0, xmm1 ; 1DD3 _ 66: 0F 6F. C1 + pxor xmm10, xmm6 ; 1DD7 _ 66 44: 0F EF. D6 + pxor xmm11, xmm10 ; 1DDC _ 66 45: 0F EF. DA + pxor xmm10, xmm8 ; 1DE1 _ 66 45: 0F EF. D0 + pand xmm0, xmm10 ; 1DE6 _ 66 41: 0F DB. C2 + pxor xmm0, xmm11 ; 1DEB _ 66 41: 0F EF. C3 + movdqa xmm2, xmm0 ; 1DF0 _ 66: 0F 6F. D0 + pxor xmm1, xmm10 ; 1DF4 _ 66 41: 0F EF. CA + pxor xmm8, xmm1 ; 1DF9 _ 66 44: 0F EF. C1 + pand xmm11, xmm1 ; 1DFE _ 66 44: 0F DB. D9 + pxor xmm11, xmm8 ; 1E03 _ 66 45: 0F EF. D8 + pand xmm8, xmm0 ; 1E08 _ 66 44: 0F DB. C0 + pxor xmm10, xmm8 ; 1E0D _ 66 45: 0F EF. D0 + por xmm1, xmm0 ; 1E12 _ 66: 0F EB. C8 + pxor xmm1, xmm8 ; 1E16 _ 66 41: 0F EF. C8 + por xmm8, xmm10 ; 1E1B _ 66 45: 0F EB. C2 + pxor xmm8, xmm11 ; 1E20 _ 66 45: 0F EF. C3 + pand xmm11, xmm10 ; 1E25 _ 66 45: 0F DB. DA + pxor xmm8, xmm6 ; 1E2A _ 66 44: 0F EF. C6 + pxor xmm1, xmm11 ; 1E2F _ 66 41: 0F EF. CB + movdqa xmm11, xmm8 ; 1E34 _ 66 45: 0F 6F. D8 + pslld xmm2, 13 ; 1E39 _ 66: 0F 72. F2, 0D + psrld xmm0, 19 ; 1E3E _ 66: 0F 72. D0, 13 + por xmm2, xmm0 ; 1E43 _ 66: 0F EB. D0 + pslld xmm11, 3 ; 1E47 _ 66 41: 0F 72. F3, 03 + psrld xmm8, 29 ; 1E4D _ 66 41: 0F 72. D0, 1D + por xmm11, xmm8 ; 1E53 _ 66 45: 0F EB. D8 + movdqa xmm8, xmm2 ; 1E58 _ 66 44: 0F 6F. C2 + pxor xmm1, xmm2 ; 1E5D _ 66: 0F EF. CA + pxor xmm1, xmm11 ; 1E61 _ 66 41: 0F EF. CB + pxor xmm10, xmm11 ; 1E66 _ 66 45: 0F EF. D3 + pslld xmm8, 3 ; 1E6B _ 66 41: 0F 72. F0, 03 + pxor xmm10, xmm8 ; 1E71 _ 66 45: 0F EF. D0 + movdqa xmm8, xmm1 ; 1E76 _ 66 44: 0F 6F. C1 + movdqa xmm0, xmm10 ; 1E7B _ 66 41: 0F 6F. C2 + psrld xmm1, 31 ; 1E80 _ 66: 0F 72. D1, 1F + pslld xmm8, 1 ; 1E85 _ 66 41: 0F 72. F0, 01 + por xmm8, xmm1 ; 1E8B _ 66 44: 0F EB. C1 + pslld xmm0, 7 ; 1E90 _ 66: 0F 72. F0, 07 + psrld xmm10, 25 ; 1E95 _ 66 41: 0F 72. D2, 19 + por xmm0, xmm10 ; 1E9B _ 66 41: 0F EB. C2 + movdqa xmm10, xmm8 ; 1EA0 _ 66 45: 0F 6F. D0 + pxor xmm2, xmm8 ; 1EA5 _ 66 41: 0F EF. D0 + pxor xmm2, xmm0 ; 1EAA _ 66: 0F EF. D0 + movdqa xmm1, xmm2 ; 1EAE _ 66: 0F 6F. CA + pxor xmm11, xmm0 ; 1EB2 _ 66 44: 0F EF. D8 + pslld xmm10, 7 ; 1EB7 _ 66 41: 0F 72. F2, 07 + pxor xmm11, xmm10 ; 1EBD _ 66 45: 0F EF. DA + pslld xmm1, 5 ; 1EC2 _ 66: 0F 72. F1, 05 + psrld xmm2, 27 ; 1EC7 _ 66: 0F 72. D2, 1B + por xmm1, xmm2 ; 1ECC _ 66: 0F EB. CA + movdqa xmm2, xmm11 ; 1ED0 _ 66 41: 0F 6F. D3 + psrld xmm11, 10 ; 1ED5 _ 66 41: 0F 72. D3, 0A + pslld xmm2, 22 ; 1EDB _ 66: 0F 72. F2, 16 + por xmm2, xmm11 ; 1EE0 _ 66 41: 0F EB. D3 + movd xmm11, dword [r12+13D0H] ; 1EE5 _ 66 45: 0F 6E. 9C 24, 000013D0 + pshufd xmm11, xmm11, 0 ; 1EEF _ 66 45: 0F 70. DB, 00 + pxor xmm1, xmm11 ; 1EF5 _ 66 41: 0F EF. CB + movd xmm10, dword [r12+13D4H] ; 1EFA _ 66 45: 0F 6E. 94 24, 000013D4 + pshufd xmm11, xmm10, 0 ; 1F04 _ 66 45: 0F 70. DA, 00 + pxor xmm8, xmm11 ; 1F0A _ 66 45: 0F EF. C3 + pxor xmm1, xmm8 ; 1F0F _ 66 41: 0F EF. C8 + movd xmm10, dword [r12+13D8H] ; 1F14 _ 66 45: 0F 6E. 94 24, 000013D8 + pshufd xmm11, xmm10, 0 ; 1F1E _ 66 45: 0F 70. DA, 00 + pxor xmm2, xmm11 ; 1F24 _ 66 41: 0F EF. D3 + movd xmm10, dword [r12+13DCH] ; 1F29 _ 66 45: 0F 6E. 94 24, 000013DC + pshufd xmm11, xmm10, 0 ; 1F33 _ 66 45: 0F 70. DA, 00 + pxor xmm0, xmm11 ; 1F39 _ 66 41: 0F EF. C3 + pxor xmm8, xmm0 ; 1F3E _ 66 44: 0F EF. C0 + movdqa xmm10, xmm8 ; 1F43 _ 66 45: 0F 6F. D0 + pxor xmm0, xmm6 ; 1F48 _ 66: 0F EF. C6 + pxor xmm2, xmm0 ; 1F4C _ 66: 0F EF. D0 + pand xmm10, xmm1 ; 1F50 _ 66 44: 0F DB. D1 + pxor xmm10, xmm2 ; 1F55 _ 66 44: 0F EF. D2 + movdqa xmm11, xmm10 ; 1F5A _ 66 45: 0F 6F. DA + por xmm2, xmm8 ; 1F5F _ 66 41: 0F EB. D0 + pxor xmm8, xmm0 ; 1F64 _ 66 44: 0F EF. C0 + pand xmm0, xmm10 ; 1F69 _ 66 41: 0F DB. C2 + pxor xmm0, xmm1 ; 1F6E _ 66: 0F EF. C1 + pxor xmm8, xmm10 ; 1F72 _ 66 45: 0F EF. C2 + pxor xmm8, xmm2 ; 1F77 _ 66 44: 0F EF. C2 + pxor xmm2, xmm1 ; 1F7C _ 66: 0F EF. D1 + pand xmm1, xmm0 ; 1F80 _ 66: 0F DB. C8 + pxor xmm2, xmm6 ; 1F84 _ 66: 0F EF. D6 + pxor xmm1, xmm8 ; 1F88 _ 66 41: 0F EF. C8 + por xmm8, xmm0 ; 1F8D _ 66 44: 0F EB. C0 + pxor xmm8, xmm2 ; 1F92 _ 66 44: 0F EF. C2 + movdqa xmm2, xmm1 ; 1F97 _ 66: 0F 6F. D1 + pslld xmm11, 13 ; 1F9B _ 66 41: 0F 72. F3, 0D + psrld xmm10, 19 ; 1FA1 _ 66 41: 0F 72. D2, 13 + por xmm11, xmm10 ; 1FA7 _ 66 45: 0F EB. DA + movdqa xmm10, xmm11 ; 1FAC _ 66 45: 0F 6F. D3 + pslld xmm2, 3 ; 1FB1 _ 66: 0F 72. F2, 03 + psrld xmm1, 29 ; 1FB6 _ 66: 0F 72. D1, 1D + por xmm2, xmm1 ; 1FBB _ 66: 0F EB. D1 + pxor xmm0, xmm11 ; 1FBF _ 66 41: 0F EF. C3 + pxor xmm0, xmm2 ; 1FC4 _ 66: 0F EF. C2 + movdqa xmm1, xmm0 ; 1FC8 _ 66: 0F 6F. C8 + pxor xmm8, xmm2 ; 1FCC _ 66 44: 0F EF. C2 + pslld xmm10, 3 ; 1FD1 _ 66 41: 0F 72. F2, 03 + pxor xmm8, xmm10 ; 1FD7 _ 66 45: 0F EF. C2 + movdqa xmm10, xmm8 ; 1FDC _ 66 45: 0F 6F. D0 + pslld xmm1, 1 ; 1FE1 _ 66: 0F 72. F1, 01 + psrld xmm0, 31 ; 1FE6 _ 66: 0F 72. D0, 1F + por xmm1, xmm0 ; 1FEB _ 66: 0F EB. C8 + pslld xmm10, 7 ; 1FEF _ 66 41: 0F 72. F2, 07 + psrld xmm8, 25 ; 1FF5 _ 66 41: 0F 72. D0, 19 + por xmm10, xmm8 ; 1FFB _ 66 45: 0F EB. D0 + movdqa xmm8, xmm1 ; 2000 _ 66 44: 0F 6F. C1 + pxor xmm11, xmm1 ; 2005 _ 66 44: 0F EF. D9 + pxor xmm11, xmm10 ; 200A _ 66 45: 0F EF. DA + pxor xmm2, xmm10 ; 200F _ 66 41: 0F EF. D2 + pslld xmm8, 7 ; 2014 _ 66 41: 0F 72. F0, 07 + pxor xmm2, xmm8 ; 201A _ 66 41: 0F EF. D0 + movdqa xmm8, xmm11 ; 201F _ 66 45: 0F 6F. C3 + movdqa xmm0, xmm2 ; 2024 _ 66: 0F 6F. C2 + psrld xmm11, 27 ; 2028 _ 66 41: 0F 72. D3, 1B + pslld xmm8, 5 ; 202E _ 66 41: 0F 72. F0, 05 + por xmm8, xmm11 ; 2034 _ 66 45: 0F EB. C3 + pslld xmm0, 22 ; 2039 _ 66: 0F 72. F0, 16 + psrld xmm2, 10 ; 203E _ 66: 0F 72. D2, 0A + por xmm0, xmm2 ; 2043 _ 66: 0F EB. C2 + movd xmm2, dword [r12+13E0H] ; 2047 _ 66 41: 0F 6E. 94 24, 000013E0 + pshufd xmm11, xmm2, 0 ; 2051 _ 66 44: 0F 70. DA, 00 + pxor xmm8, xmm11 ; 2057 _ 66 45: 0F EF. C3 + movd xmm2, dword [r12+13E4H] ; 205C _ 66 41: 0F 6E. 94 24, 000013E4 + pshufd xmm11, xmm2, 0 ; 2066 _ 66 44: 0F 70. DA, 00 + pxor xmm1, xmm11 ; 206C _ 66 41: 0F EF. CB + movd xmm2, dword [r12+13E8H] ; 2071 _ 66 41: 0F 6E. 94 24, 000013E8 + pshufd xmm11, xmm2, 0 ; 207B _ 66 44: 0F 70. DA, 00 + movd xmm2, dword [r12+13ECH] ; 2081 _ 66 41: 0F 6E. 94 24, 000013EC + pxor xmm0, xmm11 ; 208B _ 66 41: 0F EF. C3 + pshufd xmm11, xmm2, 0 ; 2090 _ 66 44: 0F 70. DA, 00 + pxor xmm10, xmm11 ; 2096 _ 66 45: 0F EF. D3 + movdqa xmm2, xmm10 ; 209B _ 66 41: 0F 6F. D2 + pxor xmm0, xmm6 ; 20A0 _ 66: 0F EF. C6 + pand xmm2, xmm8 ; 20A4 _ 66 41: 0F DB. D0 + pxor xmm8, xmm10 ; 20A9 _ 66 45: 0F EF. C2 + pxor xmm2, xmm0 ; 20AE _ 66: 0F EF. D0 + por xmm0, xmm10 ; 20B2 _ 66 41: 0F EB. C2 + pxor xmm1, xmm2 ; 20B7 _ 66: 0F EF. CA + pxor xmm0, xmm8 ; 20BB _ 66 41: 0F EF. C0 + por xmm8, xmm1 ; 20C0 _ 66 44: 0F EB. C1 + pxor xmm0, xmm1 ; 20C5 _ 66: 0F EF. C1 + pxor xmm10, xmm8 ; 20C9 _ 66 45: 0F EF. D0 + por xmm8, xmm2 ; 20CE _ 66 44: 0F EB. C2 + pxor xmm8, xmm0 ; 20D3 _ 66 44: 0F EF. C0 + movdqa xmm11, xmm8 ; 20D8 _ 66 45: 0F 6F. D8 + pxor xmm10, xmm2 ; 20DD _ 66 44: 0F EF. D2 + pxor xmm10, xmm8 ; 20E2 _ 66 45: 0F EF. D0 + pxor xmm2, xmm6 ; 20E7 _ 66: 0F EF. D6 + pand xmm0, xmm10 ; 20EB _ 66 41: 0F DB. C2 + pxor xmm2, xmm0 ; 20F0 _ 66: 0F EF. D0 + pslld xmm11, 13 ; 20F4 _ 66 41: 0F 72. F3, 0D + psrld xmm8, 19 ; 20FA _ 66 41: 0F 72. D0, 13 + por xmm11, xmm8 ; 2100 _ 66 45: 0F EB. D8 + movdqa xmm8, xmm10 ; 2105 _ 66 45: 0F 6F. C2 + psrld xmm10, 29 ; 210A _ 66 41: 0F 72. D2, 1D + pxor xmm1, xmm11 ; 2110 _ 66 41: 0F EF. CB + pslld xmm8, 3 ; 2115 _ 66 41: 0F 72. F0, 03 + por xmm8, xmm10 ; 211B _ 66 45: 0F EB. C2 + movdqa xmm10, xmm11 ; 2120 _ 66 45: 0F 6F. D3 + pxor xmm1, xmm8 ; 2125 _ 66 41: 0F EF. C8 + movdqa xmm0, xmm1 ; 212A _ 66: 0F 6F. C1 + pxor xmm2, xmm8 ; 212E _ 66 41: 0F EF. D0 + pslld xmm10, 3 ; 2133 _ 66 41: 0F 72. F2, 03 + pxor xmm2, xmm10 ; 2139 _ 66 41: 0F EF. D2 + pslld xmm0, 1 ; 213E _ 66: 0F 72. F0, 01 + psrld xmm1, 31 ; 2143 _ 66: 0F 72. D1, 1F + por xmm0, xmm1 ; 2148 _ 66: 0F EB. C1 + movdqa xmm1, xmm2 ; 214C _ 66: 0F 6F. CA + psrld xmm2, 25 ; 2150 _ 66: 0F 72. D2, 19 + pxor xmm11, xmm0 ; 2155 _ 66 44: 0F EF. D8 + pslld xmm1, 7 ; 215A _ 66: 0F 72. F1, 07 + por xmm1, xmm2 ; 215F _ 66: 0F EB. CA + movdqa xmm2, xmm0 ; 2163 _ 66: 0F 6F. D0 + pxor xmm11, xmm1 ; 2167 _ 66 44: 0F EF. D9 + pxor xmm8, xmm1 ; 216C _ 66 44: 0F EF. C1 + pslld xmm2, 7 ; 2171 _ 66: 0F 72. F2, 07 + pxor xmm8, xmm2 ; 2176 _ 66 44: 0F EF. C2 + movdqa xmm2, xmm11 ; 217B _ 66 41: 0F 6F. D3 + movdqa xmm10, xmm8 ; 2180 _ 66 45: 0F 6F. D0 + psrld xmm11, 27 ; 2185 _ 66 41: 0F 72. D3, 1B + pslld xmm2, 5 ; 218B _ 66: 0F 72. F2, 05 + por xmm2, xmm11 ; 2190 _ 66 41: 0F EB. D3 + movd xmm11, dword [r12+13F4H] ; 2195 _ 66 45: 0F 6E. 9C 24, 000013F4 + pslld xmm10, 22 ; 219F _ 66 41: 0F 72. F2, 16 + psrld xmm8, 10 ; 21A5 _ 66 41: 0F 72. D0, 0A + por xmm10, xmm8 ; 21AB _ 66 45: 0F EB. D0 + movd xmm8, dword [r12+13F0H] ; 21B0 _ 66 45: 0F 6E. 84 24, 000013F0 + pshufd xmm8, xmm8, 0 ; 21BA _ 66 45: 0F 70. C0, 00 + pxor xmm2, xmm8 ; 21C0 _ 66 41: 0F EF. D0 + pshufd xmm8, xmm11, 0 ; 21C5 _ 66 45: 0F 70. C3, 00 + movd xmm11, dword [r12+13F8H] ; 21CB _ 66 45: 0F 6E. 9C 24, 000013F8 + pxor xmm0, xmm8 ; 21D5 _ 66 41: 0F EF. C0 + pshufd xmm8, xmm11, 0 ; 21DA _ 66 45: 0F 70. C3, 00 + pxor xmm10, xmm8 ; 21E0 _ 66 45: 0F EF. D0 + movd xmm11, dword [r12+13FCH] ; 21E5 _ 66 45: 0F 6E. 9C 24, 000013FC + pshufd xmm8, xmm11, 0 ; 21EF _ 66 45: 0F 70. C3, 00 + movdqa xmm11, xmm0 ; 21F5 _ 66 44: 0F 6F. D8 + pxor xmm1, xmm8 ; 21FA _ 66 41: 0F EF. C8 + pxor xmm0, xmm10 ; 21FF _ 66 41: 0F EF. C2 + por xmm11, xmm10 ; 2204 _ 66 45: 0F EB. DA + pxor xmm11, xmm1 ; 2209 _ 66 44: 0F EF. D9 + pxor xmm10, xmm11 ; 220E _ 66 45: 0F EF. D3 + por xmm1, xmm0 ; 2213 _ 66: 0F EB. C8 + pand xmm1, xmm2 ; 2217 _ 66: 0F DB. CA + pxor xmm0, xmm10 ; 221B _ 66 41: 0F EF. C2 + pxor xmm1, xmm11 ; 2220 _ 66 41: 0F EF. CB + por xmm11, xmm0 ; 2225 _ 66 44: 0F EB. D8 + pxor xmm11, xmm2 ; 222A _ 66 44: 0F EF. DA + por xmm2, xmm0 ; 222F _ 66: 0F EB. D0 + pxor xmm2, xmm10 ; 2233 _ 66 41: 0F EF. D2 + pxor xmm11, xmm0 ; 2238 _ 66 44: 0F EF. D8 + pxor xmm10, xmm11 ; 223D _ 66 45: 0F EF. D3 + pand xmm11, xmm2 ; 2242 _ 66 44: 0F DB. DA + pxor xmm11, xmm0 ; 2247 _ 66 44: 0F EF. D8 + pxor xmm10, xmm6 ; 224C _ 66 44: 0F EF. D6 + por xmm10, xmm2 ; 2251 _ 66 44: 0F EB. D2 + pxor xmm0, xmm10 ; 2256 _ 66 41: 0F EF. C2 + movdqa xmm8, xmm0 ; 225B _ 66 44: 0F 6F. C0 + movdqa xmm10, xmm11 ; 2260 _ 66 45: 0F 6F. D3 + psrld xmm0, 19 ; 2265 _ 66: 0F 72. D0, 13 + pslld xmm8, 13 ; 226A _ 66 41: 0F 72. F0, 0D + por xmm8, xmm0 ; 2270 _ 66 44: 0F EB. C0 + pslld xmm10, 3 ; 2275 _ 66 41: 0F 72. F2, 03 + psrld xmm11, 29 ; 227B _ 66 41: 0F 72. D3, 1D + por xmm10, xmm11 ; 2281 _ 66 45: 0F EB. D3 + movdqa xmm11, xmm8 ; 2286 _ 66 45: 0F 6F. D8 + pxor xmm1, xmm8 ; 228B _ 66 41: 0F EF. C8 + pxor xmm1, xmm10 ; 2290 _ 66 41: 0F EF. CA + pxor xmm2, xmm10 ; 2295 _ 66 41: 0F EF. D2 + pslld xmm11, 3 ; 229A _ 66 41: 0F 72. F3, 03 + pxor xmm2, xmm11 ; 22A0 _ 66 41: 0F EF. D3 + movdqa xmm11, xmm1 ; 22A5 _ 66 44: 0F 6F. D9 + psrld xmm1, 31 ; 22AA _ 66: 0F 72. D1, 1F + pslld xmm11, 1 ; 22AF _ 66 41: 0F 72. F3, 01 + por xmm11, xmm1 ; 22B5 _ 66 44: 0F EB. D9 + movdqa xmm1, xmm2 ; 22BA _ 66: 0F 6F. CA + psrld xmm2, 25 ; 22BE _ 66: 0F 72. D2, 19 + pxor xmm8, xmm11 ; 22C3 _ 66 45: 0F EF. C3 + pslld xmm1, 7 ; 22C8 _ 66: 0F 72. F1, 07 + por xmm1, xmm2 ; 22CD _ 66: 0F EB. CA + movdqa xmm2, xmm11 ; 22D1 _ 66 41: 0F 6F. D3 + pxor xmm8, xmm1 ; 22D6 _ 66 44: 0F EF. C1 + pxor xmm10, xmm1 ; 22DB _ 66 44: 0F EF. D1 + pslld xmm2, 7 ; 22E0 _ 66: 0F 72. F2, 07 + pxor xmm10, xmm2 ; 22E5 _ 66 44: 0F EF. D2 + movdqa xmm2, xmm8 ; 22EA _ 66 41: 0F 6F. D0 + psrld xmm8, 27 ; 22EF _ 66 41: 0F 72. D0, 1B + pslld xmm2, 5 ; 22F5 _ 66: 0F 72. F2, 05 + por xmm2, xmm8 ; 22FA _ 66 41: 0F EB. D0 + movdqa xmm8, xmm10 ; 22FF _ 66 45: 0F 6F. C2 + psrld xmm10, 10 ; 2304 _ 66 41: 0F 72. D2, 0A + pslld xmm8, 22 ; 230A _ 66 41: 0F 72. F0, 16 + por xmm8, xmm10 ; 2310 _ 66 45: 0F EB. C2 + movd xmm10, dword [r12+1400H] ; 2315 _ 66 45: 0F 6E. 94 24, 00001400 + pshufd xmm10, xmm10, 0 ; 231F _ 66 45: 0F 70. D2, 00 + pxor xmm2, xmm10 ; 2325 _ 66 41: 0F EF. D2 + movd xmm10, dword [r12+1404H] ; 232A _ 66 45: 0F 6E. 94 24, 00001404 + pshufd xmm10, xmm10, 0 ; 2334 _ 66 45: 0F 70. D2, 00 + pxor xmm11, xmm10 ; 233A _ 66 45: 0F EF. DA + movd xmm10, dword [r12+1408H] ; 233F _ 66 45: 0F 6E. 94 24, 00001408 + pshufd xmm10, xmm10, 0 ; 2349 _ 66 45: 0F 70. D2, 00 + pxor xmm8, xmm10 ; 234F _ 66 45: 0F EF. C2 + movd xmm10, dword [r12+140CH] ; 2354 _ 66 45: 0F 6E. 94 24, 0000140C + pshufd xmm10, xmm10, 0 ; 235E _ 66 45: 0F 70. D2, 00 + pxor xmm1, xmm10 ; 2364 _ 66 41: 0F EF. CA + movdqa xmm10, xmm11 ; 2369 _ 66 45: 0F 6F. D3 + pxor xmm1, xmm2 ; 236E _ 66: 0F EF. CA + pxor xmm11, xmm8 ; 2372 _ 66 45: 0F EF. D8 + pand xmm10, xmm1 ; 2377 _ 66 44: 0F DB. D1 + pxor xmm10, xmm2 ; 237C _ 66 44: 0F EF. D2 + por xmm2, xmm1 ; 2381 _ 66: 0F EB. D1 + pxor xmm2, xmm11 ; 2385 _ 66 41: 0F EF. D3 + pxor xmm11, xmm1 ; 238A _ 66 44: 0F EF. D9 + pxor xmm1, xmm8 ; 238F _ 66 41: 0F EF. C8 + por xmm8, xmm10 ; 2394 _ 66 45: 0F EB. C2 + pxor xmm8, xmm11 ; 2399 _ 66 45: 0F EF. C3 + pxor xmm11, xmm6 ; 239E _ 66 44: 0F EF. DE + por xmm11, xmm10 ; 23A3 _ 66 45: 0F EB. DA + pxor xmm10, xmm1 ; 23A8 _ 66 44: 0F EF. D1 + pxor xmm10, xmm11 ; 23AD _ 66 45: 0F EF. D3 + por xmm1, xmm2 ; 23B2 _ 66: 0F EB. CA + pxor xmm10, xmm1 ; 23B6 _ 66 44: 0F EF. D1 + pxor xmm11, xmm1 ; 23BB _ 66 44: 0F EF. D9 + movdqa xmm1, xmm10 ; 23C0 _ 66 41: 0F 6F. CA + psrld xmm10, 19 ; 23C5 _ 66 41: 0F 72. D2, 13 + pslld xmm1, 13 ; 23CB _ 66: 0F 72. F1, 0D + por xmm1, xmm10 ; 23D0 _ 66 41: 0F EB. CA + movdqa xmm10, xmm8 ; 23D5 _ 66 45: 0F 6F. D0 + psrld xmm8, 29 ; 23DA _ 66 41: 0F 72. D0, 1D + pxor xmm11, xmm1 ; 23E0 _ 66 44: 0F EF. D9 + pslld xmm10, 3 ; 23E5 _ 66 41: 0F 72. F2, 03 + por xmm10, xmm8 ; 23EB _ 66 45: 0F EB. D0 + movdqa xmm8, xmm1 ; 23F0 _ 66 44: 0F 6F. C1 + pxor xmm11, xmm10 ; 23F5 _ 66 45: 0F EF. DA + movdqa xmm0, xmm11 ; 23FA _ 66 41: 0F 6F. C3 + pxor xmm2, xmm10 ; 23FF _ 66 41: 0F EF. D2 + pslld xmm8, 3 ; 2404 _ 66 41: 0F 72. F0, 03 + pxor xmm2, xmm8 ; 240A _ 66 41: 0F EF. D0 + movdqa xmm8, xmm2 ; 240F _ 66 44: 0F 6F. C2 + pslld xmm0, 1 ; 2414 _ 66: 0F 72. F0, 01 + psrld xmm11, 31 ; 2419 _ 66 41: 0F 72. D3, 1F + por xmm0, xmm11 ; 241F _ 66 41: 0F EB. C3 + movdqa xmm11, xmm0 ; 2424 _ 66 44: 0F 6F. D8 + pslld xmm8, 7 ; 2429 _ 66 41: 0F 72. F0, 07 + psrld xmm2, 25 ; 242F _ 66: 0F 72. D2, 19 + por xmm8, xmm2 ; 2434 _ 66 44: 0F EB. C2 + pxor xmm1, xmm0 ; 2439 _ 66: 0F EF. C8 + pxor xmm1, xmm8 ; 243D _ 66 41: 0F EF. C8 + movdqa xmm2, xmm1 ; 2442 _ 66: 0F 6F. D1 + pxor xmm10, xmm8 ; 2446 _ 66 45: 0F EF. D0 + pslld xmm11, 7 ; 244B _ 66 41: 0F 72. F3, 07 + pxor xmm10, xmm11 ; 2451 _ 66 45: 0F EF. D3 + movdqa xmm11, xmm10 ; 2456 _ 66 45: 0F 6F. DA + pslld xmm2, 5 ; 245B _ 66: 0F 72. F2, 05 + psrld xmm1, 27 ; 2460 _ 66: 0F 72. D1, 1B + por xmm2, xmm1 ; 2465 _ 66: 0F EB. D1 + pslld xmm11, 22 ; 2469 _ 66 41: 0F 72. F3, 16 + psrld xmm10, 10 ; 246F _ 66 41: 0F 72. D2, 0A + por xmm11, xmm10 ; 2475 _ 66 45: 0F EB. DA + movd xmm10, dword [r12+1410H] ; 247A _ 66 45: 0F 6E. 94 24, 00001410 + pshufd xmm10, xmm10, 0 ; 2484 _ 66 45: 0F 70. D2, 00 + pxor xmm2, xmm10 ; 248A _ 66 41: 0F EF. D2 + pxor xmm2, xmm6 ; 248F _ 66: 0F EF. D6 + movd xmm10, dword [r12+1414H] ; 2493 _ 66 45: 0F 6E. 94 24, 00001414 + pshufd xmm10, xmm10, 0 ; 249D _ 66 45: 0F 70. D2, 00 + pxor xmm0, xmm10 ; 24A3 _ 66 41: 0F EF. C2 + movd xmm10, dword [r12+1418H] ; 24A8 _ 66 45: 0F 6E. 94 24, 00001418 + pshufd xmm10, xmm10, 0 ; 24B2 _ 66 45: 0F 70. D2, 00 + pxor xmm11, xmm10 ; 24B8 _ 66 45: 0F EF. DA + pxor xmm11, xmm6 ; 24BD _ 66 44: 0F EF. DE + movd xmm10, dword [r12+141CH] ; 24C2 _ 66 45: 0F 6E. 94 24, 0000141C + pshufd xmm10, xmm10, 0 ; 24CC _ 66 45: 0F 70. D2, 00 + pxor xmm8, xmm10 ; 24D2 _ 66 45: 0F EF. C2 + movdqa xmm10, xmm2 ; 24D7 _ 66 44: 0F 6F. D2 + pand xmm10, xmm0 ; 24DC _ 66 44: 0F DB. D0 + pxor xmm11, xmm10 ; 24E1 _ 66 45: 0F EF. DA + por xmm10, xmm8 ; 24E6 _ 66 45: 0F EB. D0 + pxor xmm8, xmm11 ; 24EB _ 66 45: 0F EF. C3 + pxor xmm0, xmm10 ; 24F0 _ 66 41: 0F EF. C2 + pxor xmm10, xmm2 ; 24F5 _ 66 44: 0F EF. D2 + por xmm2, xmm0 ; 24FA _ 66: 0F EB. D0 + pxor xmm0, xmm8 ; 24FE _ 66 41: 0F EF. C0 + por xmm11, xmm10 ; 2503 _ 66 45: 0F EB. DA + pand xmm11, xmm2 ; 2508 _ 66 44: 0F DB. DA + movdqa xmm1, xmm11 ; 250D _ 66 41: 0F 6F. CB + pxor xmm10, xmm0 ; 2512 _ 66 44: 0F EF. D0 + pand xmm0, xmm11 ; 2517 _ 66 41: 0F DB. C3 + pxor xmm0, xmm10 ; 251C _ 66 41: 0F EF. C2 + pand xmm10, xmm11 ; 2521 _ 66 45: 0F DB. D3 + pxor xmm2, xmm10 ; 2526 _ 66 41: 0F EF. D2 + movdqa xmm10, xmm8 ; 252B _ 66 45: 0F 6F. D0 + pslld xmm1, 13 ; 2530 _ 66: 0F 72. F1, 0D + psrld xmm11, 19 ; 2535 _ 66 41: 0F 72. D3, 13 + por xmm1, xmm11 ; 253B _ 66 41: 0F EB. CB + pslld xmm10, 3 ; 2540 _ 66 41: 0F 72. F2, 03 + psrld xmm8, 29 ; 2546 _ 66 41: 0F 72. D0, 1D + por xmm10, xmm8 ; 254C _ 66 45: 0F EB. D0 + movdqa xmm8, xmm1 ; 2551 _ 66 44: 0F 6F. C1 + pxor xmm2, xmm1 ; 2556 _ 66: 0F EF. D1 + pxor xmm2, xmm10 ; 255A _ 66 41: 0F EF. D2 + pxor xmm0, xmm10 ; 255F _ 66 41: 0F EF. C2 + pslld xmm8, 3 ; 2564 _ 66 41: 0F 72. F0, 03 + pxor xmm0, xmm8 ; 256A _ 66 41: 0F EF. C0 + movdqa xmm8, xmm2 ; 256F _ 66 44: 0F 6F. C2 + psrld xmm2, 31 ; 2574 _ 66: 0F 72. D2, 1F + pslld xmm8, 1 ; 2579 _ 66 41: 0F 72. F0, 01 + por xmm8, xmm2 ; 257F _ 66 44: 0F EB. C2 + movdqa xmm2, xmm0 ; 2584 _ 66: 0F 6F. D0 + movdqa xmm11, xmm8 ; 2588 _ 66 45: 0F 6F. D8 + psrld xmm0, 25 ; 258D _ 66: 0F 72. D0, 19 + pslld xmm2, 7 ; 2592 _ 66: 0F 72. F2, 07 + por xmm2, xmm0 ; 2597 _ 66: 0F EB. D0 + pxor xmm1, xmm8 ; 259B _ 66 41: 0F EF. C8 + pxor xmm1, xmm2 ; 25A0 _ 66: 0F EF. CA + pxor xmm10, xmm2 ; 25A4 _ 66 44: 0F EF. D2 + pslld xmm11, 7 ; 25A9 _ 66 41: 0F 72. F3, 07 + pxor xmm10, xmm11 ; 25AF _ 66 45: 0F EF. D3 + movdqa xmm11, xmm1 ; 25B4 _ 66 44: 0F 6F. D9 + movdqa xmm0, xmm10 ; 25B9 _ 66 41: 0F 6F. C2 + psrld xmm1, 27 ; 25BE _ 66: 0F 72. D1, 1B + pslld xmm11, 5 ; 25C3 _ 66 41: 0F 72. F3, 05 + por xmm11, xmm1 ; 25C9 _ 66 44: 0F EB. D9 + pslld xmm0, 22 ; 25CE _ 66: 0F 72. F0, 16 + psrld xmm10, 10 ; 25D3 _ 66 41: 0F 72. D2, 0A + por xmm0, xmm10 ; 25D9 _ 66 41: 0F EB. C2 + movd xmm10, dword [r12+1420H] ; 25DE _ 66 45: 0F 6E. 94 24, 00001420 + pshufd xmm10, xmm10, 0 ; 25E8 _ 66 45: 0F 70. D2, 00 + pxor xmm11, xmm10 ; 25EE _ 66 45: 0F EF. DA + movd xmm10, dword [r12+1424H] ; 25F3 _ 66 45: 0F 6E. 94 24, 00001424 + pshufd xmm10, xmm10, 0 ; 25FD _ 66 45: 0F 70. D2, 00 + pxor xmm8, xmm10 ; 2603 _ 66 45: 0F EF. C2 + movd xmm10, dword [r12+1428H] ; 2608 _ 66 45: 0F 6E. 94 24, 00001428 + pshufd xmm10, xmm10, 0 ; 2612 _ 66 45: 0F 70. D2, 00 + pxor xmm0, xmm10 ; 2618 _ 66 41: 0F EF. C2 + movd xmm10, dword [r12+142CH] ; 261D _ 66 45: 0F 6E. 94 24, 0000142C + pshufd xmm10, xmm10, 0 ; 2627 _ 66 45: 0F 70. D2, 00 + pxor xmm2, xmm10 ; 262D _ 66 41: 0F EF. D2 + movdqa xmm10, xmm11 ; 2632 _ 66 45: 0F 6F. D3 + pand xmm10, xmm0 ; 2637 _ 66 44: 0F DB. D0 + pxor xmm10, xmm2 ; 263C _ 66 44: 0F EF. D2 + pxor xmm0, xmm8 ; 2641 _ 66 41: 0F EF. C0 + pxor xmm0, xmm10 ; 2646 _ 66 41: 0F EF. C2 + movdqa xmm1, xmm0 ; 264B _ 66: 0F 6F. C8 + por xmm2, xmm11 ; 264F _ 66 41: 0F EB. D3 + pxor xmm2, xmm8 ; 2654 _ 66 41: 0F EF. D0 + movdqa xmm8, xmm2 ; 2659 _ 66 44: 0F 6F. C2 + pxor xmm11, xmm0 ; 265E _ 66 44: 0F EF. D8 + pslld xmm1, 13 ; 2663 _ 66: 0F 72. F1, 0D + por xmm8, xmm11 ; 2668 _ 66 45: 0F EB. C3 + pxor xmm8, xmm10 ; 266D _ 66 45: 0F EF. C2 + pand xmm10, xmm2 ; 2672 _ 66 44: 0F DB. D2 + pxor xmm11, xmm10 ; 2677 _ 66 45: 0F EF. DA + pxor xmm2, xmm8 ; 267C _ 66 41: 0F EF. D0 + pxor xmm2, xmm11 ; 2681 _ 66 41: 0F EF. D3 + pxor xmm11, xmm6 ; 2686 _ 66 44: 0F EF. DE + psrld xmm0, 19 ; 268B _ 66: 0F 72. D0, 13 + por xmm1, xmm0 ; 2690 _ 66: 0F EB. C8 + movdqa xmm0, xmm2 ; 2694 _ 66: 0F 6F. C2 + psrld xmm2, 29 ; 2698 _ 66: 0F 72. D2, 1D + pxor xmm8, xmm1 ; 269D _ 66 44: 0F EF. C1 + pslld xmm0, 3 ; 26A2 _ 66: 0F 72. F0, 03 + por xmm0, xmm2 ; 26A7 _ 66: 0F EB. C2 + movdqa xmm2, xmm1 ; 26AB _ 66: 0F 6F. D1 + pxor xmm8, xmm0 ; 26AF _ 66 44: 0F EF. C0 + pxor xmm11, xmm0 ; 26B4 _ 66 44: 0F EF. D8 + pslld xmm2, 3 ; 26B9 _ 66: 0F 72. F2, 03 + pxor xmm11, xmm2 ; 26BE _ 66 44: 0F EF. DA + movdqa xmm2, xmm8 ; 26C3 _ 66 41: 0F 6F. D0 + movdqa xmm10, xmm11 ; 26C8 _ 66 45: 0F 6F. D3 + psrld xmm8, 31 ; 26CD _ 66 41: 0F 72. D0, 1F + pslld xmm2, 1 ; 26D3 _ 66: 0F 72. F2, 01 + por xmm2, xmm8 ; 26D8 _ 66 41: 0F EB. D0 + movdqa xmm8, xmm2 ; 26DD _ 66 44: 0F 6F. C2 + pslld xmm10, 7 ; 26E2 _ 66 41: 0F 72. F2, 07 + psrld xmm11, 25 ; 26E8 _ 66 41: 0F 72. D3, 19 + por xmm10, xmm11 ; 26EE _ 66 45: 0F EB. D3 + pxor xmm1, xmm2 ; 26F3 _ 66: 0F EF. CA + movd xmm11, dword [r12+1430H] ; 26F7 _ 66 45: 0F 6E. 9C 24, 00001430 + pxor xmm1, xmm10 ; 2701 _ 66 41: 0F EF. CA + pxor xmm0, xmm10 ; 2706 _ 66 41: 0F EF. C2 + pslld xmm8, 7 ; 270B _ 66 41: 0F 72. F0, 07 + pxor xmm0, xmm8 ; 2711 _ 66 41: 0F EF. C0 + movdqa xmm8, xmm1 ; 2716 _ 66 44: 0F 6F. C1 + psrld xmm1, 27 ; 271B _ 66: 0F 72. D1, 1B + pshufd xmm11, xmm11, 0 ; 2720 _ 66 45: 0F 70. DB, 00 + pslld xmm8, 5 ; 2726 _ 66 41: 0F 72. F0, 05 + por xmm8, xmm1 ; 272C _ 66 44: 0F EB. C1 + movdqa xmm1, xmm0 ; 2731 _ 66: 0F 6F. C8 + psrld xmm0, 10 ; 2735 _ 66: 0F 72. D0, 0A + pxor xmm8, xmm11 ; 273A _ 66 45: 0F EF. C3 + movd xmm11, dword [r12+1434H] ; 273F _ 66 45: 0F 6E. 9C 24, 00001434 + pslld xmm1, 22 ; 2749 _ 66: 0F 72. F1, 16 + por xmm1, xmm0 ; 274E _ 66: 0F EB. C8 + pshufd xmm11, xmm11, 0 ; 2752 _ 66 45: 0F 70. DB, 00 + pxor xmm2, xmm11 ; 2758 _ 66 41: 0F EF. D3 + movd xmm11, dword [r12+1438H] ; 275D _ 66 45: 0F 6E. 9C 24, 00001438 + pshufd xmm11, xmm11, 0 ; 2767 _ 66 45: 0F 70. DB, 00 + pxor xmm1, xmm11 ; 276D _ 66 41: 0F EF. CB + movd xmm11, dword [r12+143CH] ; 2772 _ 66 45: 0F 6E. 9C 24, 0000143C + pshufd xmm11, xmm11, 0 ; 277C _ 66 45: 0F 70. DB, 00 + pxor xmm10, xmm11 ; 2782 _ 66 45: 0F EF. D3 + movdqa xmm11, xmm8 ; 2787 _ 66 45: 0F 6F. D8 + por xmm11, xmm10 ; 278C _ 66 45: 0F EB. DA + pxor xmm10, xmm2 ; 2791 _ 66 44: 0F EF. D2 + pand xmm2, xmm8 ; 2796 _ 66 41: 0F DB. D0 + pxor xmm8, xmm1 ; 279B _ 66 44: 0F EF. C1 + pxor xmm1, xmm10 ; 27A0 _ 66 41: 0F EF. CA + pand xmm10, xmm11 ; 27A5 _ 66 45: 0F DB. D3 + por xmm8, xmm2 ; 27AA _ 66 44: 0F EB. C2 + pxor xmm10, xmm8 ; 27AF _ 66 45: 0F EF. D0 + pxor xmm11, xmm2 ; 27B4 _ 66 44: 0F EF. DA + pand xmm8, xmm11 ; 27B9 _ 66 45: 0F DB. C3 + pxor xmm2, xmm10 ; 27BE _ 66 41: 0F EF. D2 + pxor xmm8, xmm1 ; 27C3 _ 66 44: 0F EF. C1 + por xmm2, xmm11 ; 27C8 _ 66 41: 0F EB. D3 + pxor xmm2, xmm1 ; 27CD _ 66: 0F EF. D1 + movdqa xmm0, xmm2 ; 27D1 _ 66: 0F 6F. C2 + pxor xmm11, xmm10 ; 27D5 _ 66 45: 0F EF. DA + por xmm0, xmm10 ; 27DA _ 66 41: 0F EB. C2 + pxor xmm11, xmm0 ; 27DF _ 66 44: 0F EF. D8 + movdqa xmm1, xmm11 ; 27E4 _ 66 41: 0F 6F. CB + psrld xmm11, 19 ; 27E9 _ 66 41: 0F 72. D3, 13 + pslld xmm1, 13 ; 27EF _ 66: 0F 72. F1, 0D + por xmm1, xmm11 ; 27F4 _ 66 41: 0F EB. CB + movdqa xmm11, xmm10 ; 27F9 _ 66 45: 0F 6F. DA + psrld xmm10, 29 ; 27FE _ 66 41: 0F 72. D2, 1D + pxor xmm2, xmm1 ; 2804 _ 66: 0F EF. D1 + pslld xmm11, 3 ; 2808 _ 66 41: 0F 72. F3, 03 + por xmm11, xmm10 ; 280E _ 66 45: 0F EB. DA + movdqa xmm10, xmm1 ; 2813 _ 66 44: 0F 6F. D1 + pxor xmm2, xmm11 ; 2818 _ 66 41: 0F EF. D3 + movdqa xmm0, xmm2 ; 281D _ 66: 0F 6F. C2 + pxor xmm8, xmm11 ; 2821 _ 66 45: 0F EF. C3 + pslld xmm10, 3 ; 2826 _ 66 41: 0F 72. F2, 03 + pxor xmm8, xmm10 ; 282C _ 66 45: 0F EF. C2 + movdqa xmm10, xmm8 ; 2831 _ 66 45: 0F 6F. D0 + pslld xmm0, 1 ; 2836 _ 66: 0F 72. F0, 01 + psrld xmm2, 31 ; 283B _ 66: 0F 72. D2, 1F + por xmm0, xmm2 ; 2840 _ 66: 0F EB. C2 + movdqa xmm2, xmm0 ; 2844 _ 66: 0F 6F. D0 + pslld xmm10, 7 ; 2848 _ 66 41: 0F 72. F2, 07 + psrld xmm8, 25 ; 284E _ 66 41: 0F 72. D0, 19 + por xmm10, xmm8 ; 2854 _ 66 45: 0F EB. D0 + pxor xmm1, xmm0 ; 2859 _ 66: 0F EF. C8 + pxor xmm1, xmm10 ; 285D _ 66 41: 0F EF. CA + pxor xmm11, xmm10 ; 2862 _ 66 45: 0F EF. DA + pslld xmm2, 7 ; 2867 _ 66: 0F 72. F2, 07 + pxor xmm11, xmm2 ; 286C _ 66 44: 0F EF. DA + movdqa xmm2, xmm1 ; 2871 _ 66: 0F 6F. D1 + movdqa xmm8, xmm11 ; 2875 _ 66 45: 0F 6F. C3 + psrld xmm1, 27 ; 287A _ 66: 0F 72. D1, 1B + pslld xmm2, 5 ; 287F _ 66: 0F 72. F2, 05 + por xmm2, xmm1 ; 2884 _ 66: 0F EB. D1 + pslld xmm8, 22 ; 2888 _ 66 41: 0F 72. F0, 16 + psrld xmm11, 10 ; 288E _ 66 41: 0F 72. D3, 0A + por xmm8, xmm11 ; 2894 _ 66 45: 0F EB. C3 + movd xmm11, dword [r12+1440H] ; 2899 _ 66 45: 0F 6E. 9C 24, 00001440 + pshufd xmm11, xmm11, 0 ; 28A3 _ 66 45: 0F 70. DB, 00 + pxor xmm2, xmm11 ; 28A9 _ 66 41: 0F EF. D3 + movd xmm11, dword [r12+1444H] ; 28AE _ 66 45: 0F 6E. 9C 24, 00001444 + pshufd xmm11, xmm11, 0 ; 28B8 _ 66 45: 0F 70. DB, 00 + pxor xmm0, xmm11 ; 28BE _ 66 41: 0F EF. C3 + movd xmm11, dword [r12+1448H] ; 28C3 _ 66 45: 0F 6E. 9C 24, 00001448 + pshufd xmm11, xmm11, 0 ; 28CD _ 66 45: 0F 70. DB, 00 + pxor xmm8, xmm11 ; 28D3 _ 66 45: 0F EF. C3 + movd xmm11, dword [r12+144CH] ; 28D8 _ 66 45: 0F 6E. 9C 24, 0000144C + pshufd xmm11, xmm11, 0 ; 28E2 _ 66 45: 0F 70. DB, 00 + pxor xmm10, xmm11 ; 28E8 _ 66 45: 0F EF. D3 + pxor xmm0, xmm10 ; 28ED _ 66 41: 0F EF. C2 + movdqa xmm11, xmm0 ; 28F2 _ 66 44: 0F 6F. D8 + pxor xmm10, xmm6 ; 28F7 _ 66 44: 0F EF. D6 + pxor xmm8, xmm10 ; 28FC _ 66 45: 0F EF. C2 + pxor xmm10, xmm2 ; 2901 _ 66 44: 0F EF. D2 + pand xmm11, xmm10 ; 2906 _ 66 45: 0F DB. DA + pxor xmm11, xmm8 ; 290B _ 66 45: 0F EF. D8 + movdqa xmm1, xmm11 ; 2910 _ 66 41: 0F 6F. CB + pxor xmm0, xmm10 ; 2915 _ 66 41: 0F EF. C2 + pxor xmm2, xmm0 ; 291A _ 66: 0F EF. D0 + pand xmm8, xmm0 ; 291E _ 66 44: 0F DB. C0 + pxor xmm8, xmm2 ; 2923 _ 66 44: 0F EF. C2 + pand xmm2, xmm11 ; 2928 _ 66 41: 0F DB. D3 + pxor xmm10, xmm2 ; 292D _ 66 44: 0F EF. D2 + por xmm0, xmm11 ; 2932 _ 66 41: 0F EB. C3 + pxor xmm0, xmm2 ; 2937 _ 66: 0F EF. C2 + por xmm2, xmm10 ; 293B _ 66 41: 0F EB. D2 + pxor xmm2, xmm8 ; 2940 _ 66 41: 0F EF. D0 + pand xmm8, xmm10 ; 2945 _ 66 45: 0F DB. C2 + pxor xmm2, xmm6 ; 294A _ 66: 0F EF. D6 + pxor xmm0, xmm8 ; 294E _ 66 41: 0F EF. C0 + movdqa xmm8, xmm2 ; 2953 _ 66 44: 0F 6F. C2 + pslld xmm1, 13 ; 2958 _ 66: 0F 72. F1, 0D + psrld xmm11, 19 ; 295D _ 66 41: 0F 72. D3, 13 + por xmm1, xmm11 ; 2963 _ 66 41: 0F EB. CB + pslld xmm8, 3 ; 2968 _ 66 41: 0F 72. F0, 03 + psrld xmm2, 29 ; 296E _ 66: 0F 72. D2, 1D + por xmm8, xmm2 ; 2973 _ 66 44: 0F EB. C2 + movdqa xmm2, xmm1 ; 2978 _ 66: 0F 6F. D1 + pxor xmm0, xmm1 ; 297C _ 66: 0F EF. C1 + pxor xmm0, xmm8 ; 2980 _ 66 41: 0F EF. C0 + movdqa xmm11, xmm0 ; 2985 _ 66 44: 0F 6F. D8 + pxor xmm10, xmm8 ; 298A _ 66 45: 0F EF. D0 + pslld xmm2, 3 ; 298F _ 66: 0F 72. F2, 03 + pxor xmm10, xmm2 ; 2994 _ 66 44: 0F EF. D2 + movdqa xmm2, xmm10 ; 2999 _ 66 41: 0F 6F. D2 + pslld xmm11, 1 ; 299E _ 66 41: 0F 72. F3, 01 + psrld xmm0, 31 ; 29A4 _ 66: 0F 72. D0, 1F + por xmm11, xmm0 ; 29A9 _ 66 44: 0F EB. D8 + pslld xmm2, 7 ; 29AE _ 66: 0F 72. F2, 07 + psrld xmm10, 25 ; 29B3 _ 66 41: 0F 72. D2, 19 + por xmm2, xmm10 ; 29B9 _ 66 41: 0F EB. D2 + movdqa xmm10, xmm11 ; 29BE _ 66 45: 0F 6F. D3 + pxor xmm1, xmm11 ; 29C3 _ 66 41: 0F EF. CB + pxor xmm1, xmm2 ; 29C8 _ 66: 0F EF. CA + movdqa xmm0, xmm1 ; 29CC _ 66: 0F 6F. C1 + pxor xmm8, xmm2 ; 29D0 _ 66 44: 0F EF. C2 + pslld xmm10, 7 ; 29D5 _ 66 41: 0F 72. F2, 07 + pxor xmm8, xmm10 ; 29DB _ 66 45: 0F EF. C2 + movdqa xmm10, xmm8 ; 29E0 _ 66 45: 0F 6F. D0 + pslld xmm0, 5 ; 29E5 _ 66: 0F 72. F0, 05 + psrld xmm1, 27 ; 29EA _ 66: 0F 72. D1, 1B + por xmm0, xmm1 ; 29EF _ 66: 0F EB. C1 + pslld xmm10, 22 ; 29F3 _ 66 41: 0F 72. F2, 16 + psrld xmm8, 10 ; 29F9 _ 66 41: 0F 72. D0, 0A + por xmm10, xmm8 ; 29FF _ 66 45: 0F EB. D0 + movd xmm8, dword [r12+1450H] ; 2A04 _ 66 45: 0F 6E. 84 24, 00001450 + pshufd xmm8, xmm8, 0 ; 2A0E _ 66 45: 0F 70. C0, 00 + pxor xmm0, xmm8 ; 2A14 _ 66 41: 0F EF. C0 + movd xmm8, dword [r12+1454H] ; 2A19 _ 66 45: 0F 6E. 84 24, 00001454 + pshufd xmm8, xmm8, 0 ; 2A23 _ 66 45: 0F 70. C0, 00 + pxor xmm11, xmm8 ; 2A29 _ 66 45: 0F EF. D8 + pxor xmm0, xmm11 ; 2A2E _ 66 41: 0F EF. C3 + movd xmm8, dword [r12+1458H] ; 2A33 _ 66 45: 0F 6E. 84 24, 00001458 + pshufd xmm8, xmm8, 0 ; 2A3D _ 66 45: 0F 70. C0, 00 + pxor xmm10, xmm8 ; 2A43 _ 66 45: 0F EF. D0 + movd xmm8, dword [r12+145CH] ; 2A48 _ 66 45: 0F 6E. 84 24, 0000145C + pshufd xmm8, xmm8, 0 ; 2A52 _ 66 45: 0F 70. C0, 00 + pxor xmm2, xmm8 ; 2A58 _ 66 41: 0F EF. D0 + pxor xmm11, xmm2 ; 2A5D _ 66 44: 0F EF. DA + movdqa xmm8, xmm11 ; 2A62 _ 66 45: 0F 6F. C3 + pxor xmm2, xmm6 ; 2A67 _ 66: 0F EF. D6 + pxor xmm10, xmm2 ; 2A6B _ 66 44: 0F EF. D2 + pand xmm8, xmm0 ; 2A70 _ 66 44: 0F DB. C0 + pxor xmm8, xmm10 ; 2A75 _ 66 45: 0F EF. C2 + movdqa xmm1, xmm8 ; 2A7A _ 66 41: 0F 6F. C8 + por xmm10, xmm11 ; 2A7F _ 66 45: 0F EB. D3 + pxor xmm11, xmm2 ; 2A84 _ 66 44: 0F EF. DA + pand xmm2, xmm8 ; 2A89 _ 66 41: 0F DB. D0 + pxor xmm2, xmm0 ; 2A8E _ 66: 0F EF. D0 + pxor xmm11, xmm8 ; 2A92 _ 66 45: 0F EF. D8 + pxor xmm11, xmm10 ; 2A97 _ 66 45: 0F EF. DA + pxor xmm10, xmm0 ; 2A9C _ 66 44: 0F EF. D0 + pand xmm0, xmm2 ; 2AA1 _ 66: 0F DB. C2 + pxor xmm10, xmm6 ; 2AA5 _ 66 44: 0F EF. D6 + pxor xmm0, xmm11 ; 2AAA _ 66 41: 0F EF. C3 + por xmm11, xmm2 ; 2AAF _ 66 44: 0F EB. DA + pxor xmm11, xmm10 ; 2AB4 _ 66 45: 0F EF. DA + movdqa xmm10, xmm0 ; 2AB9 _ 66 44: 0F 6F. D0 + pslld xmm1, 13 ; 2ABE _ 66: 0F 72. F1, 0D + psrld xmm8, 19 ; 2AC3 _ 66 41: 0F 72. D0, 13 + por xmm1, xmm8 ; 2AC9 _ 66 41: 0F EB. C8 + movdqa xmm8, xmm1 ; 2ACE _ 66 44: 0F 6F. C1 + pslld xmm10, 3 ; 2AD3 _ 66 41: 0F 72. F2, 03 + psrld xmm0, 29 ; 2AD9 _ 66: 0F 72. D0, 1D + por xmm10, xmm0 ; 2ADE _ 66 44: 0F EB. D0 + pxor xmm2, xmm1 ; 2AE3 _ 66: 0F EF. D1 + pxor xmm2, xmm10 ; 2AE7 _ 66 41: 0F EF. D2 + pxor xmm11, xmm10 ; 2AEC _ 66 45: 0F EF. DA + pslld xmm8, 3 ; 2AF1 _ 66 41: 0F 72. F0, 03 + pxor xmm11, xmm8 ; 2AF7 _ 66 45: 0F EF. D8 + movdqa xmm8, xmm2 ; 2AFC _ 66 44: 0F 6F. C2 + psrld xmm2, 31 ; 2B01 _ 66: 0F 72. D2, 1F + pslld xmm8, 1 ; 2B06 _ 66 41: 0F 72. F0, 01 + por xmm8, xmm2 ; 2B0C _ 66 44: 0F EB. C2 + movdqa xmm2, xmm11 ; 2B11 _ 66 41: 0F 6F. D3 + psrld xmm11, 25 ; 2B16 _ 66 41: 0F 72. D3, 19 + pxor xmm1, xmm8 ; 2B1C _ 66 41: 0F EF. C8 + pslld xmm2, 7 ; 2B21 _ 66: 0F 72. F2, 07 + por xmm2, xmm11 ; 2B26 _ 66 41: 0F EB. D3 + movdqa xmm11, xmm8 ; 2B2B _ 66 45: 0F 6F. D8 + pxor xmm1, xmm2 ; 2B30 _ 66: 0F EF. CA + pxor xmm10, xmm2 ; 2B34 _ 66 44: 0F EF. D2 + pslld xmm11, 7 ; 2B39 _ 66 41: 0F 72. F3, 07 + pxor xmm10, xmm11 ; 2B3F _ 66 45: 0F EF. D3 + movdqa xmm11, xmm1 ; 2B44 _ 66 44: 0F 6F. D9 + movdqa xmm0, xmm10 ; 2B49 _ 66 41: 0F 6F. C2 + psrld xmm1, 27 ; 2B4E _ 66: 0F 72. D1, 1B + pslld xmm11, 5 ; 2B53 _ 66 41: 0F 72. F3, 05 + por xmm11, xmm1 ; 2B59 _ 66 44: 0F EB. D9 + pslld xmm0, 22 ; 2B5E _ 66: 0F 72. F0, 16 + psrld xmm10, 10 ; 2B63 _ 66 41: 0F 72. D2, 0A + por xmm0, xmm10 ; 2B69 _ 66 41: 0F EB. C2 + movd xmm10, dword [r12+1460H] ; 2B6E _ 66 45: 0F 6E. 94 24, 00001460 + pshufd xmm10, xmm10, 0 ; 2B78 _ 66 45: 0F 70. D2, 00 + pxor xmm11, xmm10 ; 2B7E _ 66 45: 0F EF. DA + movd xmm10, dword [r12+1464H] ; 2B83 _ 66 45: 0F 6E. 94 24, 00001464 + pshufd xmm10, xmm10, 0 ; 2B8D _ 66 45: 0F 70. D2, 00 + pxor xmm8, xmm10 ; 2B93 _ 66 45: 0F EF. C2 + movd xmm10, dword [r12+1468H] ; 2B98 _ 66 45: 0F 6E. 94 24, 00001468 + pshufd xmm10, xmm10, 0 ; 2BA2 _ 66 45: 0F 70. D2, 00 + pxor xmm0, xmm10 ; 2BA8 _ 66 41: 0F EF. C2 + pxor xmm0, xmm6 ; 2BAD _ 66: 0F EF. C6 + movd xmm10, dword [r12+146CH] ; 2BB1 _ 66 45: 0F 6E. 94 24, 0000146C + pshufd xmm10, xmm10, 0 ; 2BBB _ 66 45: 0F 70. D2, 00 + pxor xmm2, xmm10 ; 2BC1 _ 66 41: 0F EF. D2 + movdqa xmm1, xmm2 ; 2BC6 _ 66: 0F 6F. CA + pand xmm1, xmm11 ; 2BCA _ 66 41: 0F DB. CB + pxor xmm11, xmm2 ; 2BCF _ 66 44: 0F EF. DA + pxor xmm1, xmm0 ; 2BD4 _ 66: 0F EF. C8 + por xmm0, xmm2 ; 2BD8 _ 66: 0F EB. C2 + pxor xmm8, xmm1 ; 2BDC _ 66 44: 0F EF. C1 + pxor xmm0, xmm11 ; 2BE1 _ 66 41: 0F EF. C3 + por xmm11, xmm8 ; 2BE6 _ 66 45: 0F EB. D8 + pxor xmm0, xmm8 ; 2BEB _ 66 41: 0F EF. C0 + pxor xmm2, xmm11 ; 2BF0 _ 66 41: 0F EF. D3 + por xmm11, xmm1 ; 2BF5 _ 66 44: 0F EB. D9 + pxor xmm11, xmm0 ; 2BFA _ 66 44: 0F EF. D8 + pxor xmm2, xmm1 ; 2BFF _ 66: 0F EF. D1 + pxor xmm2, xmm11 ; 2C03 _ 66 41: 0F EF. D3 + movdqa xmm10, xmm2 ; 2C08 _ 66 44: 0F 6F. D2 + pxor xmm1, xmm6 ; 2C0D _ 66: 0F EF. CE + pand xmm0, xmm2 ; 2C11 _ 66: 0F DB. C2 + pxor xmm1, xmm0 ; 2C15 _ 66: 0F EF. C8 + movdqa xmm0, xmm11 ; 2C19 _ 66 41: 0F 6F. C3 + psrld xmm11, 19 ; 2C1E _ 66 41: 0F 72. D3, 13 + pslld xmm10, 3 ; 2C24 _ 66 41: 0F 72. F2, 03 + pslld xmm0, 13 ; 2C2A _ 66: 0F 72. F0, 0D + por xmm0, xmm11 ; 2C2F _ 66 41: 0F EB. C3 + psrld xmm2, 29 ; 2C34 _ 66: 0F 72. D2, 1D + por xmm10, xmm2 ; 2C39 _ 66 44: 0F EB. D2 + movdqa xmm2, xmm0 ; 2C3E _ 66: 0F 6F. D0 + pxor xmm8, xmm0 ; 2C42 _ 66 44: 0F EF. C0 + pxor xmm8, xmm10 ; 2C47 _ 66 45: 0F EF. C2 + pxor xmm1, xmm10 ; 2C4C _ 66 41: 0F EF. CA + pslld xmm2, 3 ; 2C51 _ 66: 0F 72. F2, 03 + pxor xmm1, xmm2 ; 2C56 _ 66: 0F EF. CA + movdqa xmm2, xmm8 ; 2C5A _ 66 41: 0F 6F. D0 + psrld xmm8, 31 ; 2C5F _ 66 41: 0F 72. D0, 1F + pslld xmm2, 1 ; 2C65 _ 66: 0F 72. F2, 01 + por xmm2, xmm8 ; 2C6A _ 66 41: 0F EB. D0 + movdqa xmm8, xmm1 ; 2C6F _ 66 44: 0F 6F. C1 + movdqa xmm11, xmm2 ; 2C74 _ 66 44: 0F 6F. DA + psrld xmm1, 25 ; 2C79 _ 66: 0F 72. D1, 19 + pslld xmm8, 7 ; 2C7E _ 66 41: 0F 72. F0, 07 + por xmm8, xmm1 ; 2C84 _ 66 44: 0F EB. C1 + pxor xmm0, xmm2 ; 2C89 _ 66: 0F EF. C2 + pxor xmm0, xmm8 ; 2C8D _ 66 41: 0F EF. C0 + movdqa xmm1, xmm0 ; 2C92 _ 66: 0F 6F. C8 + pxor xmm10, xmm8 ; 2C96 _ 66 45: 0F EF. D0 + pslld xmm11, 7 ; 2C9B _ 66 41: 0F 72. F3, 07 + pxor xmm10, xmm11 ; 2CA1 _ 66 45: 0F EF. D3 + movdqa xmm11, xmm10 ; 2CA6 _ 66 45: 0F 6F. DA + pslld xmm1, 5 ; 2CAB _ 66: 0F 72. F1, 05 + psrld xmm0, 27 ; 2CB0 _ 66: 0F 72. D0, 1B + por xmm1, xmm0 ; 2CB5 _ 66: 0F EB. C8 + pslld xmm11, 22 ; 2CB9 _ 66 41: 0F 72. F3, 16 + psrld xmm10, 10 ; 2CBF _ 66 41: 0F 72. D2, 0A + por xmm11, xmm10 ; 2CC5 _ 66 45: 0F EB. DA + movd xmm10, dword [r12+1470H] ; 2CCA _ 66 45: 0F 6E. 94 24, 00001470 + pshufd xmm10, xmm10, 0 ; 2CD4 _ 66 45: 0F 70. D2, 00 + pxor xmm1, xmm10 ; 2CDA _ 66 41: 0F EF. CA + movd xmm10, dword [r12+1474H] ; 2CDF _ 66 45: 0F 6E. 94 24, 00001474 + pshufd xmm10, xmm10, 0 ; 2CE9 _ 66 45: 0F 70. D2, 00 + pxor xmm2, xmm10 ; 2CEF _ 66 41: 0F EF. D2 + movdqa xmm0, xmm2 ; 2CF4 _ 66: 0F 6F. C2 + movd xmm10, dword [r12+1478H] ; 2CF8 _ 66 45: 0F 6E. 94 24, 00001478 + pshufd xmm10, xmm10, 0 ; 2D02 _ 66 45: 0F 70. D2, 00 + pxor xmm11, xmm10 ; 2D08 _ 66 45: 0F EF. DA + por xmm0, xmm11 ; 2D0D _ 66 41: 0F EB. C3 + pxor xmm2, xmm11 ; 2D12 _ 66 41: 0F EF. D3 + movd xmm10, dword [r12+147CH] ; 2D17 _ 66 45: 0F 6E. 94 24, 0000147C + pshufd xmm10, xmm10, 0 ; 2D21 _ 66 45: 0F 70. D2, 00 + pxor xmm8, xmm10 ; 2D27 _ 66 45: 0F EF. C2 + pxor xmm0, xmm8 ; 2D2C _ 66 41: 0F EF. C0 + pxor xmm11, xmm0 ; 2D31 _ 66 44: 0F EF. D8 + por xmm8, xmm2 ; 2D36 _ 66 44: 0F EB. C2 + pand xmm8, xmm1 ; 2D3B _ 66 44: 0F DB. C1 + pxor xmm2, xmm11 ; 2D40 _ 66 41: 0F EF. D3 + pxor xmm8, xmm0 ; 2D45 _ 66 44: 0F EF. C0 + por xmm0, xmm2 ; 2D4A _ 66: 0F EB. C2 + pxor xmm0, xmm1 ; 2D4E _ 66: 0F EF. C1 + por xmm1, xmm2 ; 2D52 _ 66: 0F EB. CA + pxor xmm1, xmm11 ; 2D56 _ 66 41: 0F EF. CB + pxor xmm0, xmm2 ; 2D5B _ 66: 0F EF. C2 + pxor xmm11, xmm0 ; 2D5F _ 66 44: 0F EF. D8 + pand xmm0, xmm1 ; 2D64 _ 66: 0F DB. C1 + pxor xmm0, xmm2 ; 2D68 _ 66: 0F EF. C2 + pxor xmm11, xmm6 ; 2D6C _ 66 44: 0F EF. DE + por xmm11, xmm1 ; 2D71 _ 66 44: 0F EB. D9 + pxor xmm2, xmm11 ; 2D76 _ 66 41: 0F EF. D3 + movd xmm11, dword [r12+1480H] ; 2D7B _ 66 45: 0F 6E. 9C 24, 00001480 + pshufd xmm10, xmm11, 0 ; 2D85 _ 66 45: 0F 70. D3, 00 + pxor xmm2, xmm10 ; 2D8B _ 66 41: 0F EF. D2 + movd xmm11, dword [r12+1484H] ; 2D90 _ 66 45: 0F 6E. 9C 24, 00001484 + pshufd xmm10, xmm11, 0 ; 2D9A _ 66 45: 0F 70. D3, 00 + pxor xmm8, xmm10 ; 2DA0 _ 66 45: 0F EF. C2 + movd xmm11, dword [r12+1488H] ; 2DA5 _ 66 45: 0F 6E. 9C 24, 00001488 + pshufd xmm10, xmm11, 0 ; 2DAF _ 66 45: 0F 70. D3, 00 + pxor xmm0, xmm10 ; 2DB5 _ 66 41: 0F EF. C2 + movd xmm11, dword [r12+148CH] ; 2DBA _ 66 45: 0F 6E. 9C 24, 0000148C + pshufd xmm10, xmm11, 0 ; 2DC4 _ 66 45: 0F 70. D3, 00 + movdqa xmm11, xmm2 ; 2DCA _ 66 44: 0F 6F. DA + pxor xmm1, xmm10 ; 2DCF _ 66 41: 0F EF. CA + movdqa xmm10, xmm0 ; 2DD4 _ 66 44: 0F 6F. D0 + punpckldq xmm11, xmm8 ; 2DD9 _ 66 45: 0F 62. D8 + punpckhdq xmm2, xmm8 ; 2DDE _ 66 41: 0F 6A. D0 + punpckldq xmm10, xmm1 ; 2DE3 _ 66 44: 0F 62. D1 + punpckhdq xmm0, xmm1 ; 2DE8 _ 66: 0F 6A. C1 + movdqa xmm8, xmm11 ; 2DEC _ 66 45: 0F 6F. C3 + punpckhqdq xmm11, xmm10 ; 2DF1 _ 66 45: 0F 6D. DA + pxor xmm11, xmm5 ; 2DF6 _ 66 44: 0F EF. DD + movdqu oword [rbp+10H], xmm11 ; 2DFB _ F3 44: 0F 7F. 5D, 10 + punpcklqdq xmm8, xmm10 ; 2E01 _ 66 45: 0F 6C. C2 + movdqa xmm10, xmm2 ; 2E06 _ 66 44: 0F 6F. D2 + punpckhqdq xmm2, xmm0 ; 2E0B _ 66: 0F 6D. D0 + pxor xmm8, xmm9 ; 2E0F _ 66 45: 0F EF. C1 + movdqu oword [rbp], xmm8 ; 2E14 _ F3 44: 0F 7F. 45, 00 + movdqa xmm9, xmm3 ; 2E1A _ 66 44: 0F 6F. CB + punpcklqdq xmm10, xmm0 ; 2E1F _ 66 44: 0F 6C. D0 + pxor xmm10, xmm4 ; 2E24 _ 66 44: 0F EF. D4 + movdqu oword [rbp+20H], xmm10 ; 2E29 _ F3 44: 0F 7F. 55, 20 + pxor xmm2, xmm3 ; 2E2F _ 66: 0F EF. D3 + movdqu oword [rbp+30H], xmm2 ; 2E33 _ F3: 0F 7F. 55, 30 + movdqa xmm2, xmm3 ; 2E38 _ 66: 0F 6F. D3 + psllq xmm9, 1 ; 2E3C _ 66 41: 0F 73. F1, 01 + psraw xmm3, 8 ; 2E42 _ 66: 0F 71. E3, 08 + pslldq xmm2, 8 ; 2E47 _ 66: 0F 73. FA, 08 + psrldq xmm2, 7 ; 2E4C _ 66: 0F 73. DA, 07 + psrlq xmm2, 7 ; 2E51 _ 66: 0F 73. D2, 07 + por xmm9, xmm2 ; 2E56 _ 66 44: 0F EB. CA + psrldq xmm3, 15 ; 2E5B _ 66: 0F 73. DB, 0F + pand xmm3, xmm7 ; 2E60 _ 66: 0F DB. DF + pxor xmm9, xmm3 ; 2E64 _ 66 44: 0F EF. CB + add r13, 64 ; 2E69 _ 49: 83. C5, 40 + add rbp, 64 ; 2E6D _ 48: 83. C5, 40 + inc r10d ; 2E71 _ 41: FF. C2 + cmp r10d, 8 ; 2E74 _ 41: 83. FA, 08 + jl ?_003 ; 2E78 _ 0F 8C, FFFFD23A + add r14, -512 ; 2E7E _ 49: 81. C6, FFFFFE00 + jne ?_001 ; 2E85 _ 0F 85, FFFFD1F6 + movaps xmm6, oword [rsp+70H] ; 2E8B _ 0F 28. 74 24, 70 + movaps xmm7, oword [rsp+60H] ; 2E90 _ 0F 28. 7C 24, 60 + movaps xmm8, oword [rsp+50H] ; 2E95 _ 44: 0F 28. 44 24, 50 + movaps xmm9, oword [rsp+40H] ; 2E9B _ 44: 0F 28. 4C 24, 40 + movaps xmm10, oword [rsp+30H] ; 2EA1 _ 44: 0F 28. 54 24, 30 + movaps xmm11, oword [rsp+20H] ; 2EA7 _ 44: 0F 28. 5C 24, 20 + add rsp, 160 ; 2EAD _ 48: 81. C4, 000000A0 + pop rbp ; 2EB4 _ 5D + pop r12 ; 2EB5 _ 41: 5C + pop r13 ; 2EB7 _ 41: 5D + pop r14 ; 2EB9 _ 41: 5E + pop r15 ; 2EBB _ 41: 5F + ret ; 2EBD _ C3 +; xts_serpent_sse2_encrypt End of function + + nop ; 2EBE _ 90 + nop ; 2EBF _ 90 + +ALIGN 16 +xts_serpent_sse2_decrypt:; Function begin + push r15 ; 2EC0 _ 41: 57 + push r14 ; 2EC2 _ 41: 56 + push r13 ; 2EC4 _ 41: 55 + push r12 ; 2EC6 _ 41: 54 + push rbp ; 2EC8 _ 55 + sub rsp, 160 ; 2EC9 _ 48: 81. EC, 000000A0 + mov rax, qword [rsp+0F0H] ; 2ED0 _ 48: 8B. 84 24, 000000F0 + movaps oword [rsp+70H], xmm6 ; 2ED8 _ 0F 29. 74 24, 70 + movaps oword [rsp+60H], xmm7 ; 2EDD _ 0F 29. 7C 24, 60 + movaps oword [rsp+50H], xmm8 ; 2EE2 _ 44: 0F 29. 44 24, 50 + movaps oword [rsp+40H], xmm9 ; 2EE8 _ 44: 0F 29. 4C 24, 40 + movaps oword [rsp+30H], xmm10 ; 2EEE _ 44: 0F 29. 54 24, 30 + movaps oword [rsp+20H], xmm11 ; 2EF4 _ 44: 0F 29. 5C 24, 20 + shr r9, 9 ; 2EFA _ 49: C1. E9, 09 + mov qword [rsp+80H], r9 ; 2EFE _ 4C: 89. 8C 24, 00000080 + lea r9, [rax+2710H] ; 2F06 _ 4C: 8D. 88, 00002710 + mov qword [rsp+88H], 0 ; 2F0D _ 48: C7. 84 24, 00000088, 00000000 + mov r10d, 135 ; 2F19 _ 41: BA, 00000087 + mov r12, rax ; 2F1F _ 49: 89. C4 + movd xmm1, r10d ; 2F22 _ 66 41: 0F 6E. CA + movdqa xmm7, xmm1 ; 2F27 _ 66: 0F 6F. F9 + mov rbp, rdx ; 2F2B _ 48: 89. D5 + mov r13, rcx ; 2F2E _ 49: 89. CD + mov r14, r8 ; 2F31 _ 4D: 89. C6 + mov r15, r9 ; 2F34 _ 4D: 89. CF + pcmpeqd xmm0, xmm0 ; 2F37 _ 66: 0F 76. C0 + movdqa xmm6, xmm0 ; 2F3B _ 66: 0F 6F. F0 + jmp ?_005 ; 2F3F _ EB, 0A + +?_004: movdqa oword [rsp+90H], xmm8 ; 2F41 _ 66 44: 0F 7F. 84 24, 00000090 +?_005: inc qword [rsp+80H] ; 2F4B _ 48: FF. 84 24, 00000080 + lea rcx, [rsp+80H] ; 2F53 _ 48: 8D. 8C 24, 00000080 + mov r8, r15 ; 2F5B _ 4D: 89. F8 + lea rdx, [rsp+90H] ; 2F5E _ 48: 8D. 94 24, 00000090 + call serpent256_encrypt ; 2F66 _ E8, 00000000(rel) + movdqa xmm8, oword [rsp+90H] ; 2F6B _ 66 44: 0F 6F. 84 24, 00000090 + xor r10d, r10d ; 2F75 _ 45: 33. D2 +?_006: movdqa xmm5, xmm8 ; 2F78 _ 66 41: 0F 6F. E8 + movdqa xmm4, xmm8 ; 2F7D _ 66 41: 0F 6F. E0 + movdqa xmm3, xmm8 ; 2F82 _ 66 41: 0F 6F. D8 + movdqu xmm10, oword [r13+10H] ; 2F87 _ F3 45: 0F 6F. 55, 10 + psllq xmm5, 1 ; 2F8D _ 66: 0F 73. F5, 01 + pslldq xmm4, 8 ; 2F92 _ 66: 0F 73. FC, 08 + psrldq xmm4, 7 ; 2F97 _ 66: 0F 73. DC, 07 + psrlq xmm4, 7 ; 2F9C _ 66: 0F 73. D4, 07 + por xmm5, xmm4 ; 2FA1 _ 66: 0F EB. EC + psraw xmm3, 8 ; 2FA5 _ 66: 0F 71. E3, 08 + psrldq xmm3, 15 ; 2FAA _ 66: 0F 73. DB, 0F + pand xmm3, xmm7 ; 2FAF _ 66: 0F DB. DF + pxor xmm5, xmm3 ; 2FB3 _ 66: 0F EF. EB + movdqa xmm4, xmm5 ; 2FB7 _ 66: 0F 6F. E5 + movdqa xmm11, xmm5 ; 2FBB _ 66 44: 0F 6F. DD + movdqa xmm9, xmm5 ; 2FC0 _ 66 44: 0F 6F. CD + psllq xmm4, 1 ; 2FC5 _ 66: 0F 73. F4, 01 + pslldq xmm11, 8 ; 2FCA _ 66 41: 0F 73. FB, 08 + psrldq xmm11, 7 ; 2FD0 _ 66 41: 0F 73. DB, 07 + psrlq xmm11, 7 ; 2FD6 _ 66 41: 0F 73. D3, 07 + por xmm4, xmm11 ; 2FDC _ 66 41: 0F EB. E3 + psraw xmm9, 8 ; 2FE1 _ 66 41: 0F 71. E1, 08 + psrldq xmm9, 15 ; 2FE7 _ 66 41: 0F 73. D9, 0F + pand xmm9, xmm7 ; 2FED _ 66 44: 0F DB. CF + pxor xmm4, xmm9 ; 2FF2 _ 66 41: 0F EF. E1 + movdqa xmm3, xmm4 ; 2FF7 _ 66: 0F 6F. DC + movdqa xmm2, xmm4 ; 2FFB _ 66: 0F 6F. D4 + movdqa xmm11, xmm4 ; 2FFF _ 66 44: 0F 6F. DC + movdqu xmm9, oword [r13+20H] ; 3004 _ F3 45: 0F 6F. 4D, 20 + psllq xmm3, 1 ; 300A _ 66: 0F 73. F3, 01 + pslldq xmm2, 8 ; 300F _ 66: 0F 73. FA, 08 + psrldq xmm2, 7 ; 3014 _ 66: 0F 73. DA, 07 + psrlq xmm2, 7 ; 3019 _ 66: 0F 73. D2, 07 + por xmm3, xmm2 ; 301E _ 66: 0F EB. DA + movdqu xmm2, oword [r13+30H] ; 3022 _ F3 41: 0F 6F. 55, 30 + psraw xmm11, 8 ; 3028 _ 66 41: 0F 71. E3, 08 + psrldq xmm11, 15 ; 302E _ 66 41: 0F 73. DB, 0F + pand xmm11, xmm7 ; 3034 _ 66 44: 0F DB. DF + pxor xmm3, xmm11 ; 3039 _ 66 41: 0F EF. DB + movdqu xmm11, oword [r13] ; 303E _ F3 45: 0F 6F. 5D, 00 + pxor xmm10, xmm5 ; 3044 _ 66 44: 0F EF. D5 + pxor xmm9, xmm4 ; 3049 _ 66 44: 0F EF. CC + movdqa xmm0, xmm9 ; 304E _ 66 41: 0F 6F. C1 + pxor xmm11, xmm8 ; 3053 _ 66 45: 0F EF. D8 + movdqa xmm1, xmm11 ; 3058 _ 66 41: 0F 6F. CB + pxor xmm2, xmm3 ; 305D _ 66: 0F EF. D3 + punpckldq xmm0, xmm2 ; 3061 _ 66: 0F 62. C2 + punpckldq xmm1, xmm10 ; 3065 _ 66 41: 0F 62. CA + punpckhdq xmm11, xmm10 ; 306A _ 66 45: 0F 6A. DA + movdqa xmm10, xmm1 ; 306F _ 66 44: 0F 6F. D1 + punpckhdq xmm9, xmm2 ; 3074 _ 66 44: 0F 6A. CA + movdqa xmm2, xmm11 ; 3079 _ 66 41: 0F 6F. D3 + punpcklqdq xmm10, xmm0 ; 307E _ 66 44: 0F 6C. D0 + punpckhqdq xmm1, xmm0 ; 3083 _ 66: 0F 6D. C8 + punpcklqdq xmm2, xmm9 ; 3087 _ 66 41: 0F 6C. D1 + punpckhqdq xmm11, xmm9 ; 308C _ 66 45: 0F 6D. D9 + movd xmm9, dword [r12+1480H] ; 3091 _ 66 45: 0F 6E. 8C 24, 00001480 + pshufd xmm9, xmm9, 0 ; 309B _ 66 45: 0F 70. C9, 00 + pxor xmm10, xmm9 ; 30A1 _ 66 45: 0F EF. D1 + movd xmm9, dword [r12+1484H] ; 30A6 _ 66 45: 0F 6E. 8C 24, 00001484 + pshufd xmm9, xmm9, 0 ; 30B0 _ 66 45: 0F 70. C9, 00 + pxor xmm1, xmm9 ; 30B6 _ 66 41: 0F EF. C9 + movd xmm9, dword [r12+1488H] ; 30BB _ 66 45: 0F 6E. 8C 24, 00001488 + pshufd xmm9, xmm9, 0 ; 30C5 _ 66 45: 0F 70. C9, 00 + pxor xmm2, xmm9 ; 30CB _ 66 41: 0F EF. D1 + movd xmm9, dword [r12+148CH] ; 30D0 _ 66 45: 0F 6E. 8C 24, 0000148C + pshufd xmm9, xmm9, 0 ; 30DA _ 66 45: 0F 70. C9, 00 + pxor xmm11, xmm9 ; 30E0 _ 66 45: 0F EF. D9 + movdqa xmm9, xmm2 ; 30E5 _ 66 44: 0F 6F. CA + por xmm2, xmm11 ; 30EA _ 66 41: 0F EB. D3 + pxor xmm9, xmm10 ; 30EF _ 66 45: 0F EF. CA + pand xmm10, xmm11 ; 30F4 _ 66 45: 0F DB. D3 + pxor xmm9, xmm6 ; 30F9 _ 66 44: 0F EF. CE + pxor xmm11, xmm1 ; 30FE _ 66 44: 0F EF. D9 + por xmm1, xmm10 ; 3103 _ 66 41: 0F EB. CA + pxor xmm10, xmm9 ; 3108 _ 66 45: 0F EF. D1 + pand xmm9, xmm2 ; 310D _ 66 44: 0F DB. CA + pand xmm11, xmm2 ; 3112 _ 66 44: 0F DB. DA + pxor xmm1, xmm9 ; 3117 _ 66 41: 0F EF. C9 + pxor xmm9, xmm10 ; 311C _ 66 45: 0F EF. CA + por xmm10, xmm9 ; 3121 _ 66 45: 0F EB. D1 + pxor xmm2, xmm1 ; 3126 _ 66: 0F EF. D1 + pxor xmm10, xmm11 ; 312A _ 66 45: 0F EF. D3 + pxor xmm11, xmm2 ; 312F _ 66 44: 0F EF. DA + por xmm2, xmm10 ; 3134 _ 66 41: 0F EB. D2 + pxor xmm11, xmm9 ; 3139 _ 66 45: 0F EF. D9 + pxor xmm2, xmm9 ; 313E _ 66 41: 0F EF. D1 + movd xmm9, dword [r12+1470H] ; 3143 _ 66 45: 0F 6E. 8C 24, 00001470 + pshufd xmm9, xmm9, 0 ; 314D _ 66 45: 0F 70. C9, 00 + pxor xmm11, xmm9 ; 3153 _ 66 45: 0F EF. D9 + movdqa xmm0, xmm11 ; 3158 _ 66 41: 0F 6F. C3 + psrld xmm11, 5 ; 315D _ 66 41: 0F 72. D3, 05 + movd xmm9, dword [r12+1474H] ; 3163 _ 66 45: 0F 6E. 8C 24, 00001474 + pshufd xmm9, xmm9, 0 ; 316D _ 66 45: 0F 70. C9, 00 + pxor xmm10, xmm9 ; 3173 _ 66 45: 0F EF. D1 + pslld xmm0, 27 ; 3178 _ 66: 0F 72. F0, 1B + por xmm0, xmm11 ; 317D _ 66 41: 0F EB. C3 + movdqa xmm11, xmm10 ; 3182 _ 66 45: 0F 6F. DA + pxor xmm0, xmm10 ; 3187 _ 66 41: 0F EF. C2 + movd xmm9, dword [r12+1478H] ; 318C _ 66 45: 0F 6E. 8C 24, 00001478 + pshufd xmm9, xmm9, 0 ; 3196 _ 66 45: 0F 70. C9, 00 + pxor xmm1, xmm9 ; 319C _ 66 41: 0F EF. C9 + pslld xmm11, 7 ; 31A1 _ 66 41: 0F 72. F3, 07 + movd xmm9, dword [r12+147CH] ; 31A7 _ 66 45: 0F 6E. 8C 24, 0000147C + pshufd xmm9, xmm9, 0 ; 31B1 _ 66 45: 0F 70. C9, 00 + pxor xmm2, xmm9 ; 31B7 _ 66 41: 0F EF. D1 + movdqa xmm9, xmm1 ; 31BC _ 66 44: 0F 6F. C9 + psrld xmm1, 22 ; 31C1 _ 66: 0F 72. D1, 16 + pxor xmm0, xmm2 ; 31C6 _ 66: 0F EF. C2 + pslld xmm9, 10 ; 31CA _ 66 41: 0F 72. F1, 0A + por xmm9, xmm1 ; 31D0 _ 66 44: 0F EB. C9 + movdqa xmm1, xmm2 ; 31D5 _ 66: 0F 6F. CA + pxor xmm9, xmm2 ; 31D9 _ 66 44: 0F EF. CA + pxor xmm9, xmm11 ; 31DE _ 66 45: 0F EF. CB + movdqa xmm11, xmm10 ; 31E3 _ 66 45: 0F 6F. DA + pslld xmm1, 25 ; 31E8 _ 66: 0F 72. F1, 19 + psrld xmm2, 7 ; 31ED _ 66: 0F 72. D2, 07 + por xmm1, xmm2 ; 31F2 _ 66: 0F EB. CA + movdqa xmm2, xmm0 ; 31F6 _ 66: 0F 6F. D0 + pslld xmm11, 31 ; 31FA _ 66 41: 0F 72. F3, 1F + psrld xmm10, 1 ; 3200 _ 66 41: 0F 72. D2, 01 + por xmm11, xmm10 ; 3206 _ 66 45: 0F EB. DA + movdqa xmm10, xmm0 ; 320B _ 66 44: 0F 6F. D0 + pxor xmm1, xmm9 ; 3210 _ 66 41: 0F EF. C9 + pxor xmm11, xmm0 ; 3215 _ 66 44: 0F EF. D8 + pslld xmm10, 3 ; 321A _ 66 41: 0F 72. F2, 03 + pxor xmm1, xmm10 ; 3220 _ 66 41: 0F EF. CA + movdqa xmm10, xmm9 ; 3225 _ 66 45: 0F 6F. D1 + pxor xmm11, xmm9 ; 322A _ 66 45: 0F EF. D9 + psrld xmm9, 3 ; 322F _ 66 41: 0F 72. D1, 03 + pslld xmm10, 29 ; 3235 _ 66 41: 0F 72. F2, 1D + por xmm10, xmm9 ; 323B _ 66 45: 0F EB. D1 + pslld xmm2, 19 ; 3240 _ 66: 0F 72. F2, 13 + psrld xmm0, 13 ; 3245 _ 66: 0F 72. D0, 0D + por xmm2, xmm0 ; 324A _ 66: 0F EB. D0 + movdqa xmm0, xmm10 ; 324E _ 66 41: 0F 6F. C2 + pxor xmm2, xmm10 ; 3253 _ 66 41: 0F EF. D2 + movd xmm9, dword [r12+1460H] ; 3258 _ 66 45: 0F 6E. 8C 24, 00001460 + pxor xmm10, xmm1 ; 3262 _ 66 44: 0F EF. D1 + pand xmm0, xmm2 ; 3267 _ 66: 0F DB. C2 + pxor xmm0, xmm6 ; 326B _ 66: 0F EF. C6 + pxor xmm1, xmm11 ; 326F _ 66 41: 0F EF. CB + pxor xmm0, xmm1 ; 3274 _ 66: 0F EF. C1 + por xmm10, xmm2 ; 3278 _ 66 44: 0F EB. D2 + pxor xmm2, xmm0 ; 327D _ 66: 0F EF. D0 + pxor xmm1, xmm10 ; 3281 _ 66 41: 0F EF. CA + pxor xmm10, xmm11 ; 3286 _ 66 45: 0F EF. D3 + pand xmm11, xmm1 ; 328B _ 66 44: 0F DB. D9 + pxor xmm11, xmm2 ; 3290 _ 66 44: 0F EF. DA + pxor xmm2, xmm1 ; 3295 _ 66: 0F EF. D1 + por xmm2, xmm0 ; 3299 _ 66: 0F EB. D0 + pxor xmm1, xmm11 ; 329D _ 66 41: 0F EF. CB + pxor xmm10, xmm2 ; 32A2 _ 66 44: 0F EF. D2 + pshufd xmm2, xmm9, 0 ; 32A7 _ 66 41: 0F 70. D1, 00 + pxor xmm11, xmm2 ; 32AD _ 66 44: 0F EF. DA + movd xmm9, dword [r12+1464H] ; 32B2 _ 66 45: 0F 6E. 8C 24, 00001464 + pshufd xmm2, xmm9, 0 ; 32BC _ 66 41: 0F 70. D1, 00 + movd xmm9, dword [r12+1468H] ; 32C2 _ 66 45: 0F 6E. 8C 24, 00001468 + pxor xmm0, xmm2 ; 32CC _ 66: 0F EF. C2 + pshufd xmm2, xmm9, 0 ; 32D0 _ 66 41: 0F 70. D1, 00 + pxor xmm10, xmm2 ; 32D6 _ 66 44: 0F EF. D2 + movd xmm9, dword [r12+146CH] ; 32DB _ 66 45: 0F 6E. 8C 24, 0000146C + pshufd xmm2, xmm9, 0 ; 32E5 _ 66 41: 0F 70. D1, 00 + movdqa xmm9, xmm10 ; 32EB _ 66 45: 0F 6F. CA + pxor xmm1, xmm2 ; 32F0 _ 66: 0F EF. CA + movdqa xmm2, xmm11 ; 32F4 _ 66 41: 0F 6F. D3 + pslld xmm9, 10 ; 32F9 _ 66 41: 0F 72. F1, 0A + psrld xmm10, 22 ; 32FF _ 66 41: 0F 72. D2, 16 + por xmm9, xmm10 ; 3305 _ 66 45: 0F EB. CA + movdqa xmm10, xmm1 ; 330A _ 66 44: 0F 6F. D1 + pslld xmm2, 27 ; 330F _ 66: 0F 72. F2, 1B + psrld xmm11, 5 ; 3314 _ 66 41: 0F 72. D3, 05 + por xmm2, xmm11 ; 331A _ 66 41: 0F EB. D3 + movdqa xmm11, xmm0 ; 331F _ 66 44: 0F 6F. D8 + pxor xmm9, xmm1 ; 3324 _ 66 44: 0F EF. C9 + pxor xmm2, xmm0 ; 3329 _ 66: 0F EF. D0 + pslld xmm11, 7 ; 332D _ 66 41: 0F 72. F3, 07 + pxor xmm9, xmm11 ; 3333 _ 66 45: 0F EF. CB + movdqa xmm11, xmm0 ; 3338 _ 66 44: 0F 6F. D8 + pxor xmm2, xmm1 ; 333D _ 66: 0F EF. D1 + pslld xmm10, 25 ; 3341 _ 66 41: 0F 72. F2, 19 + psrld xmm1, 7 ; 3347 _ 66: 0F 72. D1, 07 + por xmm10, xmm1 ; 334C _ 66 44: 0F EB. D1 + movdqa xmm1, xmm2 ; 3351 _ 66: 0F 6F. CA + pslld xmm11, 31 ; 3355 _ 66 41: 0F 72. F3, 1F + psrld xmm0, 1 ; 335B _ 66: 0F 72. D0, 01 + por xmm11, xmm0 ; 3360 _ 66 44: 0F EB. D8 + movdqa xmm0, xmm2 ; 3365 _ 66: 0F 6F. C2 + pxor xmm10, xmm9 ; 3369 _ 66 45: 0F EF. D1 + pxor xmm11, xmm2 ; 336E _ 66 44: 0F EF. DA + pslld xmm0, 3 ; 3373 _ 66: 0F 72. F0, 03 + pxor xmm10, xmm0 ; 3378 _ 66 44: 0F EF. D0 + movdqa xmm0, xmm9 ; 337D _ 66 41: 0F 6F. C1 + pxor xmm11, xmm9 ; 3382 _ 66 45: 0F EF. D9 + psrld xmm9, 3 ; 3387 _ 66 41: 0F 72. D1, 03 + pslld xmm0, 29 ; 338D _ 66: 0F 72. F0, 1D + por xmm0, xmm9 ; 3392 _ 66 41: 0F EB. C1 + movdqa xmm9, xmm10 ; 3397 _ 66 45: 0F 6F. CA + pslld xmm1, 19 ; 339C _ 66: 0F 72. F1, 13 + psrld xmm2, 13 ; 33A1 _ 66: 0F 72. D2, 0D + por xmm1, xmm2 ; 33A6 _ 66: 0F EB. CA + movd xmm2, dword [r12+1450H] ; 33AA _ 66 41: 0F 6E. 94 24, 00001450 + pxor xmm11, xmm6 ; 33B4 _ 66 44: 0F EF. DE + pxor xmm0, xmm11 ; 33B9 _ 66 41: 0F EF. C3 + por xmm9, xmm1 ; 33BE _ 66 44: 0F EB. C9 + pxor xmm9, xmm0 ; 33C3 _ 66 44: 0F EF. C8 + por xmm0, xmm11 ; 33C8 _ 66 41: 0F EB. C3 + pand xmm0, xmm1 ; 33CD _ 66: 0F DB. C1 + pxor xmm10, xmm9 ; 33D1 _ 66 45: 0F EF. D1 + pxor xmm0, xmm10 ; 33D6 _ 66 41: 0F EF. C2 + por xmm10, xmm1 ; 33DB _ 66 44: 0F EB. D1 + pxor xmm10, xmm11 ; 33E0 _ 66 45: 0F EF. D3 + pand xmm11, xmm0 ; 33E5 _ 66 44: 0F DB. D8 + pxor xmm11, xmm9 ; 33EA _ 66 45: 0F EF. D9 + pxor xmm10, xmm0 ; 33EF _ 66 44: 0F EF. D0 + pand xmm9, xmm10 ; 33F4 _ 66 45: 0F DB. CA + pxor xmm10, xmm11 ; 33F9 _ 66 45: 0F EF. D3 + pxor xmm9, xmm10 ; 33FE _ 66 45: 0F EF. CA + pxor xmm10, xmm6 ; 3403 _ 66 44: 0F EF. D6 + pxor xmm9, xmm1 ; 3408 _ 66 44: 0F EF. C9 + pshufd xmm2, xmm2, 0 ; 340D _ 66: 0F 70. D2, 00 + pxor xmm11, xmm2 ; 3412 _ 66 44: 0F EF. DA + movd xmm2, dword [r12+1454H] ; 3417 _ 66 41: 0F 6E. 94 24, 00001454 + pshufd xmm2, xmm2, 0 ; 3421 _ 66: 0F 70. D2, 00 + pxor xmm10, xmm2 ; 3426 _ 66 44: 0F EF. D2 + movd xmm2, dword [r12+1458H] ; 342B _ 66 41: 0F 6E. 94 24, 00001458 + pshufd xmm2, xmm2, 0 ; 3435 _ 66: 0F 70. D2, 00 + pxor xmm9, xmm2 ; 343A _ 66 44: 0F EF. CA + movd xmm2, dword [r12+145CH] ; 343F _ 66 41: 0F 6E. 94 24, 0000145C + pshufd xmm2, xmm2, 0 ; 3449 _ 66: 0F 70. D2, 00 + pxor xmm0, xmm2 ; 344E _ 66: 0F EF. C2 + movdqa xmm2, xmm9 ; 3452 _ 66 41: 0F 6F. D1 + psrld xmm9, 22 ; 3457 _ 66 41: 0F 72. D1, 16 + pslld xmm2, 10 ; 345D _ 66: 0F 72. F2, 0A + por xmm2, xmm9 ; 3462 _ 66 41: 0F EB. D1 + movdqa xmm9, xmm11 ; 3467 _ 66 45: 0F 6F. CB + psrld xmm11, 5 ; 346C _ 66 41: 0F 72. D3, 05 + pxor xmm2, xmm0 ; 3472 _ 66: 0F EF. D0 + pslld xmm9, 27 ; 3476 _ 66 41: 0F 72. F1, 1B + por xmm9, xmm11 ; 347C _ 66 45: 0F EB. CB + movdqa xmm11, xmm10 ; 3481 _ 66 45: 0F 6F. DA + pxor xmm9, xmm10 ; 3486 _ 66 45: 0F EF. CA + pxor xmm9, xmm0 ; 348B _ 66 44: 0F EF. C8 + pslld xmm11, 7 ; 3490 _ 66 41: 0F 72. F3, 07 + pxor xmm2, xmm11 ; 3496 _ 66 41: 0F EF. D3 + movdqa xmm11, xmm0 ; 349B _ 66 44: 0F 6F. D8 + movdqa xmm1, xmm2 ; 34A0 _ 66: 0F 6F. CA + psrld xmm0, 7 ; 34A4 _ 66: 0F 72. D0, 07 + pslld xmm11, 25 ; 34A9 _ 66 41: 0F 72. F3, 19 + por xmm11, xmm0 ; 34AF _ 66 44: 0F EB. D8 + movdqa xmm0, xmm10 ; 34B4 _ 66 41: 0F 6F. C2 + psrld xmm10, 1 ; 34B9 _ 66 41: 0F 72. D2, 01 + pxor xmm11, xmm2 ; 34BF _ 66 44: 0F EF. DA + pslld xmm0, 31 ; 34C4 _ 66: 0F 72. F0, 1F + por xmm0, xmm10 ; 34C9 _ 66 41: 0F EB. C2 + movdqa xmm10, xmm9 ; 34CE _ 66 45: 0F 6F. D1 + pxor xmm0, xmm9 ; 34D3 _ 66 41: 0F EF. C1 + pxor xmm0, xmm2 ; 34D8 _ 66: 0F EF. C2 + pslld xmm10, 3 ; 34DC _ 66 41: 0F 72. F2, 03 + pxor xmm11, xmm10 ; 34E2 _ 66 45: 0F EF. DA + pslld xmm1, 29 ; 34E7 _ 66: 0F 72. F1, 1D + psrld xmm2, 3 ; 34EC _ 66: 0F 72. D2, 03 + por xmm1, xmm2 ; 34F1 _ 66: 0F EB. CA + movdqa xmm2, xmm9 ; 34F5 _ 66 41: 0F 6F. D1 + movdqa xmm10, xmm1 ; 34FA _ 66 44: 0F 6F. D1 + psrld xmm9, 13 ; 34FF _ 66 41: 0F 72. D1, 0D + pslld xmm2, 19 ; 3505 _ 66: 0F 72. F2, 13 + por xmm2, xmm9 ; 350A _ 66 41: 0F EB. D1 + pand xmm10, xmm11 ; 350F _ 66 45: 0F DB. D3 + pxor xmm10, xmm0 ; 3514 _ 66 44: 0F EF. D0 + por xmm0, xmm11 ; 3519 _ 66 41: 0F EB. C3 + movd xmm9, dword [r12+1440H] ; 351E _ 66 45: 0F 6E. 8C 24, 00001440 + pand xmm0, xmm2 ; 3528 _ 66: 0F DB. C2 + pxor xmm1, xmm10 ; 352C _ 66 41: 0F EF. CA + pxor xmm1, xmm0 ; 3531 _ 66: 0F EF. C8 + pand xmm0, xmm10 ; 3535 _ 66 41: 0F DB. C2 + pxor xmm2, xmm6 ; 353A _ 66: 0F EF. D6 + pxor xmm11, xmm1 ; 353E _ 66 44: 0F EF. D9 + pxor xmm0, xmm11 ; 3543 _ 66 41: 0F EF. C3 + pand xmm11, xmm2 ; 3548 _ 66 44: 0F DB. DA + pxor xmm11, xmm10 ; 354D _ 66 45: 0F EF. DA + pxor xmm2, xmm0 ; 3552 _ 66: 0F EF. D0 + pand xmm10, xmm2 ; 3556 _ 66 44: 0F DB. D2 + pxor xmm11, xmm2 ; 355B _ 66 44: 0F EF. DA + pxor xmm10, xmm1 ; 3560 _ 66 44: 0F EF. D1 + por xmm10, xmm11 ; 3565 _ 66 45: 0F EB. D3 + pxor xmm11, xmm2 ; 356A _ 66 44: 0F EF. DA + pxor xmm10, xmm0 ; 356F _ 66 44: 0F EF. D0 + pshufd xmm9, xmm9, 0 ; 3574 _ 66 45: 0F 70. C9, 00 + pxor xmm2, xmm9 ; 357A _ 66 41: 0F EF. D1 + movdqa xmm0, xmm2 ; 357F _ 66: 0F 6F. C2 + psrld xmm2, 5 ; 3583 _ 66: 0F 72. D2, 05 + pslld xmm0, 27 ; 3588 _ 66: 0F 72. F0, 1B + por xmm0, xmm2 ; 358D _ 66: 0F EB. C2 + movd xmm9, dword [r12+1444H] ; 3591 _ 66 45: 0F 6E. 8C 24, 00001444 + pshufd xmm9, xmm9, 0 ; 359B _ 66 45: 0F 70. C9, 00 + pxor xmm11, xmm9 ; 35A1 _ 66 45: 0F EF. D9 + movdqa xmm2, xmm11 ; 35A6 _ 66 41: 0F 6F. D3 + pxor xmm0, xmm11 ; 35AB _ 66 41: 0F EF. C3 + pslld xmm2, 7 ; 35B0 _ 66: 0F 72. F2, 07 + movd xmm9, dword [r12+1448H] ; 35B5 _ 66 45: 0F 6E. 8C 24, 00001448 + pshufd xmm9, xmm9, 0 ; 35BF _ 66 45: 0F 70. C9, 00 + pxor xmm10, xmm9 ; 35C5 _ 66 45: 0F EF. D1 + movd xmm9, dword [r12+144CH] ; 35CA _ 66 45: 0F 6E. 8C 24, 0000144C + pshufd xmm9, xmm9, 0 ; 35D4 _ 66 45: 0F 70. C9, 00 + pxor xmm1, xmm9 ; 35DA _ 66 41: 0F EF. C9 + movdqa xmm9, xmm10 ; 35DF _ 66 45: 0F 6F. CA + psrld xmm10, 22 ; 35E4 _ 66 41: 0F 72. D2, 16 + pxor xmm0, xmm1 ; 35EA _ 66: 0F EF. C1 + pslld xmm9, 10 ; 35EE _ 66 41: 0F 72. F1, 0A + por xmm9, xmm10 ; 35F4 _ 66 45: 0F EB. CA + movdqa xmm10, xmm1 ; 35F9 _ 66 44: 0F 6F. D1 + pxor xmm9, xmm1 ; 35FE _ 66 44: 0F EF. C9 + pxor xmm9, xmm2 ; 3603 _ 66 44: 0F EF. CA + movdqa xmm2, xmm11 ; 3608 _ 66 41: 0F 6F. D3 + pslld xmm10, 25 ; 360D _ 66 41: 0F 72. F2, 19 + psrld xmm1, 7 ; 3613 _ 66: 0F 72. D1, 07 + por xmm10, xmm1 ; 3618 _ 66 44: 0F EB. D1 + movdqa xmm1, xmm9 ; 361D _ 66 41: 0F 6F. C9 + pslld xmm2, 31 ; 3622 _ 66: 0F 72. F2, 1F + psrld xmm11, 1 ; 3627 _ 66 41: 0F 72. D3, 01 + por xmm2, xmm11 ; 362D _ 66 41: 0F EB. D3 + movdqa xmm11, xmm0 ; 3632 _ 66 44: 0F 6F. D8 + pxor xmm10, xmm9 ; 3637 _ 66 45: 0F EF. D1 + pxor xmm2, xmm0 ; 363C _ 66: 0F EF. D0 + pslld xmm11, 3 ; 3640 _ 66 41: 0F 72. F3, 03 + pxor xmm10, xmm11 ; 3646 _ 66 45: 0F EF. D3 + movdqa xmm11, xmm0 ; 364B _ 66 44: 0F 6F. D8 + pxor xmm2, xmm9 ; 3650 _ 66 41: 0F EF. D1 + pslld xmm1, 29 ; 3655 _ 66: 0F 72. F1, 1D + psrld xmm9, 3 ; 365A _ 66 41: 0F 72. D1, 03 + por xmm1, xmm9 ; 3660 _ 66 41: 0F EB. C9 + movdqa xmm9, xmm1 ; 3665 _ 66 44: 0F 6F. C9 + pslld xmm11, 19 ; 366A _ 66 41: 0F 72. F3, 13 + psrld xmm0, 13 ; 3670 _ 66: 0F 72. D0, 0D + por xmm11, xmm0 ; 3675 _ 66 44: 0F EB. D8 + pxor xmm9, xmm2 ; 367A _ 66 44: 0F EF. CA + pxor xmm11, xmm9 ; 367F _ 66 45: 0F EF. D9 + pand xmm1, xmm9 ; 3684 _ 66 41: 0F DB. C9 + pxor xmm1, xmm11 ; 3689 _ 66 41: 0F EF. CB + pand xmm11, xmm2 ; 368E _ 66 44: 0F DB. DA + movd xmm0, dword [r12+1430H] ; 3693 _ 66 41: 0F 6E. 84 24, 00001430 + pxor xmm2, xmm10 ; 369D _ 66 41: 0F EF. D2 + por xmm10, xmm1 ; 36A2 _ 66 44: 0F EB. D1 + pxor xmm9, xmm10 ; 36A7 _ 66 45: 0F EF. CA + pxor xmm11, xmm10 ; 36AC _ 66 45: 0F EF. DA + pxor xmm2, xmm1 ; 36B1 _ 66: 0F EF. D1 + pand xmm10, xmm9 ; 36B5 _ 66 45: 0F DB. D1 + pxor xmm10, xmm2 ; 36BA _ 66 44: 0F EF. D2 + pxor xmm2, xmm11 ; 36BF _ 66 41: 0F EF. D3 + por xmm2, xmm9 ; 36C4 _ 66 41: 0F EB. D1 + pxor xmm11, xmm10 ; 36C9 _ 66 45: 0F EF. DA + pxor xmm2, xmm1 ; 36CE _ 66: 0F EF. D1 + pxor xmm11, xmm2 ; 36D2 _ 66 44: 0F EF. DA + pshufd xmm0, xmm0, 0 ; 36D7 _ 66: 0F 70. C0, 00 + pxor xmm9, xmm0 ; 36DC _ 66 44: 0F EF. C8 + movdqa xmm1, xmm9 ; 36E1 _ 66 41: 0F 6F. C9 + psrld xmm9, 5 ; 36E6 _ 66 41: 0F 72. D1, 05 + pslld xmm1, 27 ; 36EC _ 66: 0F 72. F1, 1B + por xmm1, xmm9 ; 36F1 _ 66 41: 0F EB. C9 + movd xmm0, dword [r12+1434H] ; 36F6 _ 66 41: 0F 6E. 84 24, 00001434 + pshufd xmm0, xmm0, 0 ; 3700 _ 66: 0F 70. C0, 00 + pxor xmm2, xmm0 ; 3705 _ 66: 0F EF. D0 + pxor xmm1, xmm2 ; 3709 _ 66: 0F EF. CA + movd xmm0, dword [r12+1438H] ; 370D _ 66 41: 0F 6E. 84 24, 00001438 + pshufd xmm0, xmm0, 0 ; 3717 _ 66: 0F 70. C0, 00 + pxor xmm10, xmm0 ; 371C _ 66 44: 0F EF. D0 + movd xmm0, dword [r12+143CH] ; 3721 _ 66 41: 0F 6E. 84 24, 0000143C + pshufd xmm0, xmm0, 0 ; 372B _ 66: 0F 70. C0, 00 + pxor xmm11, xmm0 ; 3730 _ 66 44: 0F EF. D8 + movdqa xmm0, xmm10 ; 3735 _ 66 41: 0F 6F. C2 + movdqa xmm9, xmm11 ; 373A _ 66 45: 0F 6F. CB + psrld xmm10, 22 ; 373F _ 66 41: 0F 72. D2, 16 + pslld xmm0, 10 ; 3745 _ 66: 0F 72. F0, 0A + por xmm0, xmm10 ; 374A _ 66 41: 0F EB. C2 + movdqa xmm10, xmm2 ; 374F _ 66 44: 0F 6F. D2 + pxor xmm0, xmm11 ; 3754 _ 66 41: 0F EF. C3 + pxor xmm1, xmm11 ; 3759 _ 66 41: 0F EF. CB + pslld xmm10, 7 ; 375E _ 66 41: 0F 72. F2, 07 + pxor xmm0, xmm10 ; 3764 _ 66 41: 0F EF. C2 + movdqa xmm10, xmm1 ; 3769 _ 66 44: 0F 6F. D1 + pslld xmm9, 25 ; 376E _ 66 41: 0F 72. F1, 19 + psrld xmm11, 7 ; 3774 _ 66 41: 0F 72. D3, 07 + por xmm9, xmm11 ; 377A _ 66 45: 0F EB. CB + movdqa xmm11, xmm2 ; 377F _ 66 44: 0F 6F. DA + psrld xmm2, 1 ; 3784 _ 66: 0F 72. D2, 01 + pxor xmm9, xmm0 ; 3789 _ 66 44: 0F EF. C8 + pslld xmm11, 31 ; 378E _ 66 41: 0F 72. F3, 1F + por xmm11, xmm2 ; 3794 _ 66 44: 0F EB. DA + movdqa xmm2, xmm1 ; 3799 _ 66: 0F 6F. D1 + pxor xmm11, xmm1 ; 379D _ 66 44: 0F EF. D9 + pxor xmm11, xmm0 ; 37A2 _ 66 44: 0F EF. D8 + pslld xmm2, 3 ; 37A7 _ 66: 0F 72. F2, 03 + pxor xmm9, xmm2 ; 37AC _ 66 44: 0F EF. CA + movdqa xmm2, xmm0 ; 37B1 _ 66: 0F 6F. D0 + psrld xmm0, 3 ; 37B5 _ 66: 0F 72. D0, 03 + pslld xmm10, 19 ; 37BA _ 66 41: 0F 72. F2, 13 + pslld xmm2, 29 ; 37C0 _ 66: 0F 72. F2, 1D + por xmm2, xmm0 ; 37C5 _ 66: 0F EB. D0 + psrld xmm1, 13 ; 37C9 _ 66: 0F 72. D1, 0D + por xmm10, xmm1 ; 37CE _ 66 44: 0F EB. D1 + pxor xmm2, xmm9 ; 37D3 _ 66 41: 0F EF. D1 + pxor xmm9, xmm10 ; 37D8 _ 66 45: 0F EF. CA + movdqa xmm1, xmm9 ; 37DD _ 66 41: 0F 6F. C9 + pand xmm1, xmm2 ; 37E2 _ 66: 0F DB. CA + pxor xmm1, xmm11 ; 37E6 _ 66 41: 0F EF. CB + por xmm11, xmm2 ; 37EB _ 66 44: 0F EB. DA + pxor xmm11, xmm9 ; 37F0 _ 66 45: 0F EF. D9 + pand xmm9, xmm1 ; 37F5 _ 66 44: 0F DB. C9 + pxor xmm2, xmm1 ; 37FA _ 66: 0F EF. D1 + pand xmm9, xmm10 ; 37FE _ 66 45: 0F DB. CA + pxor xmm9, xmm2 ; 3803 _ 66 44: 0F EF. CA + pand xmm2, xmm11 ; 3808 _ 66 41: 0F DB. D3 + por xmm2, xmm10 ; 380D _ 66 41: 0F EB. D2 + pxor xmm1, xmm6 ; 3812 _ 66: 0F EF. CE + movdqa xmm0, xmm1 ; 3816 _ 66: 0F 6F. C1 + pxor xmm2, xmm1 ; 381A _ 66: 0F EF. D1 + pxor xmm10, xmm1 ; 381E _ 66 44: 0F EF. D1 + pxor xmm0, xmm9 ; 3823 _ 66 41: 0F EF. C1 + pand xmm10, xmm11 ; 3828 _ 66 45: 0F DB. D3 + pxor xmm0, xmm10 ; 382D _ 66 41: 0F EF. C2 + movd xmm10, dword [r12+1420H] ; 3832 _ 66 45: 0F 6E. 94 24, 00001420 + pshufd xmm10, xmm10, 0 ; 383C _ 66 45: 0F 70. D2, 00 + pxor xmm11, xmm10 ; 3842 _ 66 45: 0F EF. DA + movd xmm10, dword [r12+1424H] ; 3847 _ 66 45: 0F 6E. 94 24, 00001424 + pshufd xmm10, xmm10, 0 ; 3851 _ 66 45: 0F 70. D2, 00 + pxor xmm9, xmm10 ; 3857 _ 66 45: 0F EF. CA + movd xmm10, dword [r12+1428H] ; 385C _ 66 45: 0F 6E. 94 24, 00001428 + pshufd xmm10, xmm10, 0 ; 3866 _ 66 45: 0F 70. D2, 00 + pxor xmm2, xmm10 ; 386C _ 66 41: 0F EF. D2 + movd xmm10, dword [r12+142CH] ; 3871 _ 66 45: 0F 6E. 94 24, 0000142C + movdqa xmm1, xmm2 ; 387B _ 66: 0F 6F. CA + pshufd xmm10, xmm10, 0 ; 387F _ 66 45: 0F 70. D2, 00 + pxor xmm0, xmm10 ; 3885 _ 66 41: 0F EF. C2 + movdqa xmm10, xmm11 ; 388A _ 66 45: 0F 6F. D3 + pslld xmm1, 10 ; 388F _ 66: 0F 72. F1, 0A + psrld xmm2, 22 ; 3894 _ 66: 0F 72. D2, 16 + por xmm1, xmm2 ; 3899 _ 66: 0F EB. CA + movdqa xmm2, xmm9 ; 389D _ 66 41: 0F 6F. D1 + pslld xmm10, 27 ; 38A2 _ 66 41: 0F 72. F2, 1B + psrld xmm11, 5 ; 38A8 _ 66 41: 0F 72. D3, 05 + por xmm10, xmm11 ; 38AE _ 66 45: 0F EB. D3 + movdqa xmm11, xmm9 ; 38B3 _ 66 45: 0F 6F. D9 + pxor xmm1, xmm0 ; 38B8 _ 66: 0F EF. C8 + pslld xmm2, 7 ; 38BC _ 66: 0F 72. F2, 07 + pxor xmm1, xmm2 ; 38C1 _ 66: 0F EF. CA + movdqa xmm2, xmm0 ; 38C5 _ 66: 0F 6F. D0 + pxor xmm10, xmm9 ; 38C9 _ 66 45: 0F EF. D1 + pxor xmm10, xmm0 ; 38CE _ 66 44: 0F EF. D0 + pslld xmm2, 25 ; 38D3 _ 66: 0F 72. F2, 19 + psrld xmm0, 7 ; 38D8 _ 66: 0F 72. D0, 07 + por xmm2, xmm0 ; 38DD _ 66: 0F EB. D0 + movdqa xmm0, xmm1 ; 38E1 _ 66: 0F 6F. C1 + pslld xmm11, 31 ; 38E5 _ 66 41: 0F 72. F3, 1F + psrld xmm9, 1 ; 38EB _ 66 41: 0F 72. D1, 01 + por xmm11, xmm9 ; 38F1 _ 66 45: 0F EB. D9 + movdqa xmm9, xmm10 ; 38F6 _ 66 45: 0F 6F. CA + pxor xmm2, xmm1 ; 38FB _ 66: 0F EF. D1 + pxor xmm11, xmm10 ; 38FF _ 66 45: 0F EF. DA + pslld xmm9, 3 ; 3904 _ 66 41: 0F 72. F1, 03 + pxor xmm2, xmm9 ; 390A _ 66 41: 0F EF. D1 + movdqa xmm9, xmm10 ; 390F _ 66 45: 0F 6F. CA + pxor xmm11, xmm1 ; 3914 _ 66 44: 0F EF. D9 + pslld xmm0, 29 ; 3919 _ 66: 0F 72. F0, 1D + psrld xmm1, 3 ; 391E _ 66: 0F 72. D1, 03 + por xmm0, xmm1 ; 3923 _ 66: 0F EB. C1 + pslld xmm9, 19 ; 3927 _ 66 41: 0F 72. F1, 13 + psrld xmm10, 13 ; 392D _ 66 41: 0F 72. D2, 0D + por xmm9, xmm10 ; 3933 _ 66 45: 0F EB. CA + movdqa xmm10, xmm11 ; 3938 _ 66 45: 0F 6F. D3 + pxor xmm11, xmm0 ; 393D _ 66 44: 0F EF. D8 + pxor xmm10, xmm2 ; 3942 _ 66 44: 0F EF. D2 + pand xmm2, xmm10 ; 3947 _ 66 41: 0F DB. D2 + pxor xmm2, xmm9 ; 394C _ 66 41: 0F EF. D1 + por xmm9, xmm10 ; 3951 _ 66 45: 0F EB. CA + pxor xmm0, xmm2 ; 3956 _ 66: 0F EF. C2 + pxor xmm9, xmm11 ; 395A _ 66 45: 0F EF. CB + por xmm9, xmm0 ; 395F _ 66 44: 0F EB. C8 + pxor xmm10, xmm2 ; 3964 _ 66 44: 0F EF. D2 + pxor xmm9, xmm10 ; 3969 _ 66 45: 0F EF. CA + por xmm10, xmm2 ; 396E _ 66 44: 0F EB. D2 + pxor xmm10, xmm9 ; 3973 _ 66 45: 0F EF. D1 + pxor xmm11, xmm6 ; 3978 _ 66 44: 0F EF. DE + pxor xmm11, xmm10 ; 397D _ 66 45: 0F EF. DA + por xmm10, xmm9 ; 3982 _ 66 45: 0F EB. D1 + pxor xmm10, xmm9 ; 3987 _ 66 45: 0F EF. D1 + por xmm10, xmm11 ; 398C _ 66 45: 0F EB. D3 + pxor xmm2, xmm10 ; 3991 _ 66 41: 0F EF. D2 + movd xmm10, dword [r12+1410H] ; 3996 _ 66 45: 0F 6E. 94 24, 00001410 + pshufd xmm10, xmm10, 0 ; 39A0 _ 66 45: 0F 70. D2, 00 + pxor xmm11, xmm10 ; 39A6 _ 66 45: 0F EF. DA + movdqa xmm1, xmm11 ; 39AB _ 66 41: 0F 6F. CB + psrld xmm11, 5 ; 39B0 _ 66 41: 0F 72. D3, 05 + pslld xmm1, 27 ; 39B6 _ 66: 0F 72. F1, 1B + por xmm1, xmm11 ; 39BB _ 66 41: 0F EB. CB + movd xmm10, dword [r12+1414H] ; 39C0 _ 66 45: 0F 6E. 94 24, 00001414 + pshufd xmm10, xmm10, 0 ; 39CA _ 66 45: 0F 70. D2, 00 + pxor xmm9, xmm10 ; 39D0 _ 66 45: 0F EF. CA + movdqa xmm11, xmm9 ; 39D5 _ 66 45: 0F 6F. D9 + pxor xmm1, xmm9 ; 39DA _ 66 41: 0F EF. C9 + pslld xmm11, 7 ; 39DF _ 66 41: 0F 72. F3, 07 + movd xmm10, dword [r12+1418H] ; 39E5 _ 66 45: 0F 6E. 94 24, 00001418 + pshufd xmm10, xmm10, 0 ; 39EF _ 66 45: 0F 70. D2, 00 + pxor xmm2, xmm10 ; 39F5 _ 66 41: 0F EF. D2 + movd xmm10, dword [r12+141CH] ; 39FA _ 66 45: 0F 6E. 94 24, 0000141C + pshufd xmm10, xmm10, 0 ; 3A04 _ 66 45: 0F 70. D2, 00 + pxor xmm0, xmm10 ; 3A0A _ 66 41: 0F EF. C2 + movdqa xmm10, xmm2 ; 3A0F _ 66 44: 0F 6F. D2 + psrld xmm2, 22 ; 3A14 _ 66: 0F 72. D2, 16 + pxor xmm1, xmm0 ; 3A19 _ 66: 0F EF. C8 + pslld xmm10, 10 ; 3A1D _ 66 41: 0F 72. F2, 0A + por xmm10, xmm2 ; 3A23 _ 66 44: 0F EB. D2 + movdqa xmm2, xmm0 ; 3A28 _ 66: 0F 6F. D0 + pxor xmm10, xmm0 ; 3A2C _ 66 44: 0F EF. D0 + pxor xmm10, xmm11 ; 3A31 _ 66 45: 0F EF. D3 + movdqa xmm11, xmm9 ; 3A36 _ 66 45: 0F 6F. D9 + pslld xmm2, 25 ; 3A3B _ 66: 0F 72. F2, 19 + psrld xmm0, 7 ; 3A40 _ 66: 0F 72. D0, 07 + por xmm2, xmm0 ; 3A45 _ 66: 0F EB. D0 + movdqa xmm0, xmm1 ; 3A49 _ 66: 0F 6F. C1 + pslld xmm11, 31 ; 3A4D _ 66 41: 0F 72. F3, 1F + psrld xmm9, 1 ; 3A53 _ 66 41: 0F 72. D1, 01 + por xmm11, xmm9 ; 3A59 _ 66 45: 0F EB. D9 + movdqa xmm9, xmm1 ; 3A5E _ 66 44: 0F 6F. C9 + pxor xmm2, xmm10 ; 3A63 _ 66 41: 0F EF. D2 + pxor xmm11, xmm1 ; 3A68 _ 66 44: 0F EF. D9 + pslld xmm9, 3 ; 3A6D _ 66 41: 0F 72. F1, 03 + pxor xmm2, xmm9 ; 3A73 _ 66 41: 0F EF. D1 + movdqa xmm9, xmm10 ; 3A78 _ 66 45: 0F 6F. CA + pxor xmm11, xmm10 ; 3A7D _ 66 45: 0F EF. DA + psrld xmm10, 3 ; 3A82 _ 66 41: 0F 72. D2, 03 + pslld xmm9, 29 ; 3A88 _ 66 41: 0F 72. F1, 1D + por xmm9, xmm10 ; 3A8E _ 66 45: 0F EB. CA + movdqa xmm10, xmm11 ; 3A93 _ 66 45: 0F 6F. D3 + pslld xmm0, 19 ; 3A98 _ 66: 0F 72. F0, 13 + psrld xmm1, 13 ; 3A9D _ 66: 0F 72. D1, 0D + por xmm0, xmm1 ; 3AA2 _ 66: 0F EB. C1 + pxor xmm9, xmm6 ; 3AA6 _ 66 44: 0F EF. CE + por xmm10, xmm0 ; 3AAB _ 66 44: 0F EB. D0 + pxor xmm11, xmm6 ; 3AB0 _ 66 44: 0F EF. DE + pxor xmm10, xmm9 ; 3AB5 _ 66 45: 0F EF. D1 + por xmm9, xmm11 ; 3ABA _ 66 45: 0F EB. CB + pxor xmm10, xmm2 ; 3ABF _ 66 44: 0F EF. D2 + pxor xmm0, xmm11 ; 3AC4 _ 66 41: 0F EF. C3 + pxor xmm9, xmm0 ; 3AC9 _ 66 44: 0F EF. C8 + pand xmm0, xmm2 ; 3ACE _ 66: 0F DB. C2 + pxor xmm11, xmm0 ; 3AD2 _ 66 44: 0F EF. D8 + por xmm0, xmm10 ; 3AD7 _ 66 41: 0F EB. C2 + pxor xmm0, xmm9 ; 3ADC _ 66 41: 0F EF. C1 + pxor xmm2, xmm11 ; 3AE1 _ 66 41: 0F EF. D3 + pxor xmm9, xmm10 ; 3AE6 _ 66 45: 0F EF. CA + pxor xmm2, xmm0 ; 3AEB _ 66: 0F EF. D0 + pxor xmm2, xmm10 ; 3AEF _ 66 41: 0F EF. D2 + pand xmm9, xmm2 ; 3AF4 _ 66 44: 0F DB. CA + pxor xmm11, xmm9 ; 3AF9 _ 66 45: 0F EF. D9 + movd xmm9, dword [r12+1400H] ; 3AFE _ 66 45: 0F 6E. 8C 24, 00001400 + pshufd xmm9, xmm9, 0 ; 3B08 _ 66 45: 0F 70. C9, 00 + pxor xmm0, xmm9 ; 3B0E _ 66 41: 0F EF. C1 + movd xmm9, dword [r12+1404H] ; 3B13 _ 66 45: 0F 6E. 8C 24, 00001404 + pshufd xmm9, xmm9, 0 ; 3B1D _ 66 45: 0F 70. C9, 00 + pxor xmm11, xmm9 ; 3B23 _ 66 45: 0F EF. D9 + movd xmm9, dword [r12+1408H] ; 3B28 _ 66 45: 0F 6E. 8C 24, 00001408 + pshufd xmm9, xmm9, 0 ; 3B32 _ 66 45: 0F 70. C9, 00 + pxor xmm10, xmm9 ; 3B38 _ 66 45: 0F EF. D1 + movdqa xmm1, xmm10 ; 3B3D _ 66 41: 0F 6F. CA + psrld xmm10, 22 ; 3B42 _ 66 41: 0F 72. D2, 16 + pslld xmm1, 10 ; 3B48 _ 66: 0F 72. F1, 0A + por xmm1, xmm10 ; 3B4D _ 66 41: 0F EB. CA + movdqa xmm10, xmm0 ; 3B52 _ 66 44: 0F 6F. D0 + movd xmm9, dword [r12+140CH] ; 3B57 _ 66 45: 0F 6E. 8C 24, 0000140C + pshufd xmm9, xmm9, 0 ; 3B61 _ 66 45: 0F 70. C9, 00 + pxor xmm2, xmm9 ; 3B67 _ 66 41: 0F EF. D1 + movdqa xmm9, xmm11 ; 3B6C _ 66 45: 0F 6F. CB + pslld xmm10, 27 ; 3B71 _ 66 41: 0F 72. F2, 1B + psrld xmm0, 5 ; 3B77 _ 66: 0F 72. D0, 05 + por xmm10, xmm0 ; 3B7C _ 66 44: 0F EB. D0 + movdqa xmm0, xmm11 ; 3B81 _ 66 41: 0F 6F. C3 + pxor xmm1, xmm2 ; 3B86 _ 66: 0F EF. CA + pslld xmm9, 7 ; 3B8A _ 66 41: 0F 72. F1, 07 + pxor xmm1, xmm9 ; 3B90 _ 66 41: 0F EF. C9 + movdqa xmm9, xmm2 ; 3B95 _ 66 44: 0F 6F. CA + pxor xmm10, xmm11 ; 3B9A _ 66 45: 0F EF. D3 + pxor xmm10, xmm2 ; 3B9F _ 66 44: 0F EF. D2 + pslld xmm9, 25 ; 3BA4 _ 66 41: 0F 72. F1, 19 + psrld xmm2, 7 ; 3BAA _ 66: 0F 72. D2, 07 + por xmm9, xmm2 ; 3BAF _ 66 44: 0F EB. CA + movdqa xmm2, xmm10 ; 3BB4 _ 66 41: 0F 6F. D2 + pslld xmm0, 31 ; 3BB9 _ 66: 0F 72. F0, 1F + psrld xmm11, 1 ; 3BBE _ 66 41: 0F 72. D3, 01 + por xmm0, xmm11 ; 3BC4 _ 66 41: 0F EB. C3 + movdqa xmm11, xmm10 ; 3BC9 _ 66 45: 0F 6F. DA + pxor xmm9, xmm1 ; 3BCE _ 66 44: 0F EF. C9 + pslld xmm2, 3 ; 3BD3 _ 66: 0F 72. F2, 03 + pxor xmm9, xmm2 ; 3BD8 _ 66 44: 0F EF. CA + movdqa xmm2, xmm1 ; 3BDD _ 66: 0F 6F. D1 + pxor xmm0, xmm10 ; 3BE1 _ 66 41: 0F EF. C2 + pxor xmm0, xmm1 ; 3BE6 _ 66: 0F EF. C1 + pslld xmm2, 29 ; 3BEA _ 66: 0F 72. F2, 1D + psrld xmm1, 3 ; 3BEF _ 66: 0F 72. D1, 03 + por xmm2, xmm1 ; 3BF4 _ 66: 0F EB. D1 + pslld xmm11, 19 ; 3BF8 _ 66 41: 0F 72. F3, 13 + psrld xmm10, 13 ; 3BFE _ 66 41: 0F 72. D2, 0D + por xmm11, xmm10 ; 3C04 _ 66 45: 0F EB. DA + movdqa xmm10, xmm2 ; 3C09 _ 66 44: 0F 6F. D2 + por xmm2, xmm9 ; 3C0E _ 66 41: 0F EB. D1 + pxor xmm10, xmm11 ; 3C13 _ 66 45: 0F EF. D3 + pand xmm11, xmm9 ; 3C18 _ 66 45: 0F DB. D9 + pxor xmm10, xmm6 ; 3C1D _ 66 44: 0F EF. D6 + pxor xmm9, xmm0 ; 3C22 _ 66 44: 0F EF. C8 + por xmm0, xmm11 ; 3C27 _ 66 41: 0F EB. C3 + pxor xmm11, xmm10 ; 3C2C _ 66 45: 0F EF. DA + pand xmm10, xmm2 ; 3C31 _ 66 44: 0F DB. D2 + pand xmm9, xmm2 ; 3C36 _ 66 44: 0F DB. CA + pxor xmm0, xmm10 ; 3C3B _ 66 41: 0F EF. C2 + pxor xmm10, xmm11 ; 3C40 _ 66 45: 0F EF. D3 + por xmm11, xmm10 ; 3C45 _ 66 45: 0F EB. DA + pxor xmm2, xmm0 ; 3C4A _ 66: 0F EF. D0 + pxor xmm11, xmm9 ; 3C4E _ 66 45: 0F EF. D9 + pxor xmm9, xmm2 ; 3C53 _ 66 44: 0F EF. CA + por xmm2, xmm11 ; 3C58 _ 66 41: 0F EB. D3 + pxor xmm9, xmm10 ; 3C5D _ 66 45: 0F EF. CA + pxor xmm2, xmm10 ; 3C62 _ 66 41: 0F EF. D2 + movd xmm10, dword [r12+13F0H] ; 3C67 _ 66 45: 0F 6E. 94 24, 000013F0 + pshufd xmm10, xmm10, 0 ; 3C71 _ 66 45: 0F 70. D2, 00 + pxor xmm9, xmm10 ; 3C77 _ 66 45: 0F EF. CA + movd xmm10, dword [r12+13F4H] ; 3C7C _ 66 45: 0F 6E. 94 24, 000013F4 + pshufd xmm10, xmm10, 0 ; 3C86 _ 66 45: 0F 70. D2, 00 + pxor xmm11, xmm10 ; 3C8C _ 66 45: 0F EF. DA + movd xmm10, dword [r12+13F8H] ; 3C91 _ 66 45: 0F 6E. 94 24, 000013F8 + pshufd xmm10, xmm10, 0 ; 3C9B _ 66 45: 0F 70. D2, 00 + pxor xmm0, xmm10 ; 3CA1 _ 66 41: 0F EF. C2 + movdqa xmm1, xmm0 ; 3CA6 _ 66: 0F 6F. C8 + psrld xmm0, 22 ; 3CAA _ 66: 0F 72. D0, 16 + movd xmm10, dword [r12+13FCH] ; 3CAF _ 66 45: 0F 6E. 94 24, 000013FC + pshufd xmm10, xmm10, 0 ; 3CB9 _ 66 45: 0F 70. D2, 00 + pxor xmm2, xmm10 ; 3CBF _ 66 41: 0F EF. D2 + movdqa xmm10, xmm9 ; 3CC4 _ 66 45: 0F 6F. D1 + pslld xmm1, 10 ; 3CC9 _ 66: 0F 72. F1, 0A + por xmm1, xmm0 ; 3CCE _ 66: 0F EB. C8 + movdqa xmm0, xmm2 ; 3CD2 _ 66: 0F 6F. C2 + pslld xmm10, 27 ; 3CD6 _ 66 41: 0F 72. F2, 1B + psrld xmm9, 5 ; 3CDC _ 66 41: 0F 72. D1, 05 + por xmm10, xmm9 ; 3CE2 _ 66 45: 0F EB. D1 + movdqa xmm9, xmm11 ; 3CE7 _ 66 45: 0F 6F. CB + pxor xmm1, xmm2 ; 3CEC _ 66: 0F EF. CA + pxor xmm10, xmm11 ; 3CF0 _ 66 45: 0F EF. D3 + pslld xmm9, 7 ; 3CF5 _ 66 41: 0F 72. F1, 07 + pxor xmm1, xmm9 ; 3CFB _ 66 41: 0F EF. C9 + movdqa xmm9, xmm11 ; 3D00 _ 66 45: 0F 6F. CB + pxor xmm10, xmm2 ; 3D05 _ 66 44: 0F EF. D2 + pslld xmm0, 25 ; 3D0A _ 66: 0F 72. F0, 19 + psrld xmm2, 7 ; 3D0F _ 66: 0F 72. D2, 07 + por xmm0, xmm2 ; 3D14 _ 66: 0F EB. C2 + movdqa xmm2, xmm1 ; 3D18 _ 66: 0F 6F. D1 + pslld xmm9, 31 ; 3D1C _ 66 41: 0F 72. F1, 1F + psrld xmm11, 1 ; 3D22 _ 66 41: 0F 72. D3, 01 + por xmm9, xmm11 ; 3D28 _ 66 45: 0F EB. CB + movdqa xmm11, xmm10 ; 3D2D _ 66 45: 0F 6F. DA + pxor xmm0, xmm1 ; 3D32 _ 66: 0F EF. C1 + pxor xmm9, xmm10 ; 3D36 _ 66 45: 0F EF. CA + pslld xmm11, 3 ; 3D3B _ 66 41: 0F 72. F3, 03 + pxor xmm0, xmm11 ; 3D41 _ 66 41: 0F EF. C3 + movdqa xmm11, xmm10 ; 3D46 _ 66 45: 0F 6F. DA + pxor xmm9, xmm1 ; 3D4B _ 66 44: 0F EF. C9 + pslld xmm2, 29 ; 3D50 _ 66: 0F 72. F2, 1D + psrld xmm1, 3 ; 3D55 _ 66: 0F 72. D1, 03 + por xmm2, xmm1 ; 3D5A _ 66: 0F EB. D1 + pslld xmm11, 19 ; 3D5E _ 66 41: 0F 72. F3, 13 + psrld xmm10, 13 ; 3D64 _ 66 41: 0F 72. D2, 0D + por xmm11, xmm10 ; 3D6A _ 66 45: 0F EB. DA + movdqa xmm10, xmm2 ; 3D6F _ 66 44: 0F 6F. D2 + pxor xmm11, xmm2 ; 3D74 _ 66 44: 0F EF. DA + pxor xmm2, xmm0 ; 3D79 _ 66: 0F EF. D0 + pand xmm10, xmm11 ; 3D7D _ 66 45: 0F DB. D3 + pxor xmm10, xmm6 ; 3D82 _ 66 44: 0F EF. D6 + pxor xmm0, xmm9 ; 3D87 _ 66 41: 0F EF. C1 + pxor xmm10, xmm0 ; 3D8C _ 66 44: 0F EF. D0 + por xmm2, xmm11 ; 3D91 _ 66 41: 0F EB. D3 + pxor xmm11, xmm10 ; 3D96 _ 66 45: 0F EF. DA + pxor xmm0, xmm2 ; 3D9B _ 66: 0F EF. C2 + pxor xmm2, xmm9 ; 3D9F _ 66 41: 0F EF. D1 + pand xmm9, xmm0 ; 3DA4 _ 66 44: 0F DB. C8 + pxor xmm9, xmm11 ; 3DA9 _ 66 45: 0F EF. CB + pxor xmm11, xmm0 ; 3DAE _ 66 44: 0F EF. D8 + por xmm11, xmm10 ; 3DB3 _ 66 45: 0F EB. DA + pxor xmm0, xmm9 ; 3DB8 _ 66 41: 0F EF. C1 + pxor xmm2, xmm11 ; 3DBD _ 66 41: 0F EF. D3 + movd xmm11, dword [r12+13E0H] ; 3DC2 _ 66 45: 0F 6E. 9C 24, 000013E0 + pshufd xmm11, xmm11, 0 ; 3DCC _ 66 45: 0F 70. DB, 00 + pxor xmm9, xmm11 ; 3DD2 _ 66 45: 0F EF. CB + movd xmm11, dword [r12+13E4H] ; 3DD7 _ 66 45: 0F 6E. 9C 24, 000013E4 + pshufd xmm11, xmm11, 0 ; 3DE1 _ 66 45: 0F 70. DB, 00 + pxor xmm10, xmm11 ; 3DE7 _ 66 45: 0F EF. D3 + movd xmm11, dword [r12+13E8H] ; 3DEC _ 66 45: 0F 6E. 9C 24, 000013E8 + pshufd xmm11, xmm11, 0 ; 3DF6 _ 66 45: 0F 70. DB, 00 + pxor xmm2, xmm11 ; 3DFC _ 66 41: 0F EF. D3 + movdqa xmm1, xmm2 ; 3E01 _ 66: 0F 6F. CA + movd xmm11, dword [r12+13ECH] ; 3E05 _ 66 45: 0F 6E. 9C 24, 000013EC + pshufd xmm11, xmm11, 0 ; 3E0F _ 66 45: 0F 70. DB, 00 + pxor xmm0, xmm11 ; 3E15 _ 66 41: 0F EF. C3 + movdqa xmm11, xmm9 ; 3E1A _ 66 45: 0F 6F. D9 + pslld xmm1, 10 ; 3E1F _ 66: 0F 72. F1, 0A + psrld xmm2, 22 ; 3E24 _ 66: 0F 72. D2, 16 + por xmm1, xmm2 ; 3E29 _ 66: 0F EB. CA + movdqa xmm2, xmm10 ; 3E2D _ 66 41: 0F 6F. D2 + pslld xmm11, 27 ; 3E32 _ 66 41: 0F 72. F3, 1B + psrld xmm9, 5 ; 3E38 _ 66 41: 0F 72. D1, 05 + por xmm11, xmm9 ; 3E3E _ 66 45: 0F EB. D9 + pxor xmm1, xmm0 ; 3E43 _ 66: 0F EF. C8 + pslld xmm2, 7 ; 3E47 _ 66: 0F 72. F2, 07 + pxor xmm1, xmm2 ; 3E4C _ 66: 0F EF. CA + movdqa xmm2, xmm0 ; 3E50 _ 66: 0F 6F. D0 + pxor xmm11, xmm10 ; 3E54 _ 66 45: 0F EF. DA + pxor xmm11, xmm0 ; 3E59 _ 66 44: 0F EF. D8 + movdqa xmm9, xmm11 ; 3E5E _ 66 45: 0F 6F. CB + pslld xmm2, 25 ; 3E63 _ 66: 0F 72. F2, 19 + psrld xmm0, 7 ; 3E68 _ 66: 0F 72. D0, 07 + por xmm2, xmm0 ; 3E6D _ 66: 0F EB. D0 + movdqa xmm0, xmm10 ; 3E71 _ 66 41: 0F 6F. C2 + psrld xmm10, 1 ; 3E76 _ 66 41: 0F 72. D2, 01 + pxor xmm2, xmm1 ; 3E7C _ 66: 0F EF. D1 + pslld xmm0, 31 ; 3E80 _ 66: 0F 72. F0, 1F + por xmm0, xmm10 ; 3E85 _ 66 41: 0F EB. C2 + movdqa xmm10, xmm11 ; 3E8A _ 66 45: 0F 6F. D3 + pxor xmm0, xmm11 ; 3E8F _ 66 41: 0F EF. C3 + pxor xmm0, xmm1 ; 3E94 _ 66: 0F EF. C1 + pslld xmm10, 3 ; 3E98 _ 66 41: 0F 72. F2, 03 + pxor xmm2, xmm10 ; 3E9E _ 66 41: 0F EF. D2 + movdqa xmm10, xmm1 ; 3EA3 _ 66 44: 0F 6F. D1 + psrld xmm1, 3 ; 3EA8 _ 66: 0F 72. D1, 03 + pslld xmm9, 19 ; 3EAD _ 66 41: 0F 72. F1, 13 + pslld xmm10, 29 ; 3EB3 _ 66 41: 0F 72. F2, 1D + por xmm10, xmm1 ; 3EB9 _ 66 44: 0F EB. D1 + psrld xmm11, 13 ; 3EBE _ 66 41: 0F 72. D3, 0D + por xmm9, xmm11 ; 3EC4 _ 66 45: 0F EB. CB + movdqa xmm11, xmm2 ; 3EC9 _ 66 44: 0F 6F. DA + pxor xmm0, xmm6 ; 3ECE _ 66: 0F EF. C6 + pxor xmm10, xmm0 ; 3ED2 _ 66 44: 0F EF. D0 + por xmm11, xmm9 ; 3ED7 _ 66 45: 0F EB. D9 + pxor xmm11, xmm10 ; 3EDC _ 66 45: 0F EF. DA + por xmm10, xmm0 ; 3EE1 _ 66 44: 0F EB. D0 + pand xmm10, xmm9 ; 3EE6 _ 66 45: 0F DB. D1 + pxor xmm2, xmm11 ; 3EEB _ 66 41: 0F EF. D3 + pxor xmm10, xmm2 ; 3EF0 _ 66 44: 0F EF. D2 + por xmm2, xmm9 ; 3EF5 _ 66 41: 0F EB. D1 + pxor xmm2, xmm0 ; 3EFA _ 66: 0F EF. D0 + pand xmm0, xmm10 ; 3EFE _ 66 41: 0F DB. C2 + pxor xmm0, xmm11 ; 3F03 _ 66 41: 0F EF. C3 + pxor xmm2, xmm10 ; 3F08 _ 66 41: 0F EF. D2 + pand xmm11, xmm2 ; 3F0D _ 66 44: 0F DB. DA + pxor xmm2, xmm0 ; 3F12 _ 66: 0F EF. D0 + pxor xmm11, xmm2 ; 3F16 _ 66 44: 0F EF. DA + pxor xmm2, xmm6 ; 3F1B _ 66: 0F EF. D6 + pxor xmm11, xmm9 ; 3F1F _ 66 45: 0F EF. D9 + movd xmm9, dword [r12+13D0H] ; 3F24 _ 66 45: 0F 6E. 8C 24, 000013D0 + pshufd xmm9, xmm9, 0 ; 3F2E _ 66 45: 0F 70. C9, 00 + pxor xmm0, xmm9 ; 3F34 _ 66 41: 0F EF. C1 + movd xmm9, dword [r12+13D4H] ; 3F39 _ 66 45: 0F 6E. 8C 24, 000013D4 + pshufd xmm9, xmm9, 0 ; 3F43 _ 66 45: 0F 70. C9, 00 + pxor xmm2, xmm9 ; 3F49 _ 66 41: 0F EF. D1 + movd xmm9, dword [r12+13D8H] ; 3F4E _ 66 45: 0F 6E. 8C 24, 000013D8 + pshufd xmm9, xmm9, 0 ; 3F58 _ 66 45: 0F 70. C9, 00 + pxor xmm11, xmm9 ; 3F5E _ 66 45: 0F EF. D9 + movdqa xmm1, xmm11 ; 3F63 _ 66 41: 0F 6F. CB + psrld xmm11, 22 ; 3F68 _ 66 41: 0F 72. D3, 16 + pslld xmm1, 10 ; 3F6E _ 66: 0F 72. F1, 0A + por xmm1, xmm11 ; 3F73 _ 66 41: 0F EB. CB + movdqa xmm11, xmm2 ; 3F78 _ 66 44: 0F 6F. DA + pslld xmm11, 7 ; 3F7D _ 66 41: 0F 72. F3, 07 + movd xmm9, dword [r12+13DCH] ; 3F83 _ 66 45: 0F 6E. 8C 24, 000013DC + pshufd xmm9, xmm9, 0 ; 3F8D _ 66 45: 0F 70. C9, 00 + pxor xmm10, xmm9 ; 3F93 _ 66 45: 0F EF. D1 + movdqa xmm9, xmm0 ; 3F98 _ 66 44: 0F 6F. C8 + psrld xmm0, 5 ; 3F9D _ 66: 0F 72. D0, 05 + pxor xmm1, xmm10 ; 3FA2 _ 66 41: 0F EF. CA + pslld xmm9, 27 ; 3FA7 _ 66 41: 0F 72. F1, 1B + por xmm9, xmm0 ; 3FAD _ 66 44: 0F EB. C8 + movdqa xmm0, xmm2 ; 3FB2 _ 66: 0F 6F. C2 + pxor xmm1, xmm11 ; 3FB6 _ 66 41: 0F EF. CB + movdqa xmm11, xmm10 ; 3FBB _ 66 45: 0F 6F. DA + pxor xmm9, xmm2 ; 3FC0 _ 66 44: 0F EF. CA + pxor xmm9, xmm10 ; 3FC5 _ 66 45: 0F EF. CA + pslld xmm11, 25 ; 3FCA _ 66 41: 0F 72. F3, 19 + psrld xmm10, 7 ; 3FD0 _ 66 41: 0F 72. D2, 07 + por xmm11, xmm10 ; 3FD6 _ 66 45: 0F EB. DA + movdqa xmm10, xmm9 ; 3FDB _ 66 45: 0F 6F. D1 + pslld xmm0, 31 ; 3FE0 _ 66: 0F 72. F0, 1F + psrld xmm2, 1 ; 3FE5 _ 66: 0F 72. D2, 01 + por xmm0, xmm2 ; 3FEA _ 66: 0F EB. C2 + movdqa xmm2, xmm9 ; 3FEE _ 66 41: 0F 6F. D1 + pxor xmm11, xmm1 ; 3FF3 _ 66 44: 0F EF. D9 + pxor xmm0, xmm9 ; 3FF8 _ 66 41: 0F EF. C1 + pslld xmm2, 3 ; 3FFD _ 66: 0F 72. F2, 03 + pxor xmm11, xmm2 ; 4002 _ 66 44: 0F EF. DA + movdqa xmm2, xmm1 ; 4007 _ 66: 0F 6F. D1 + pxor xmm0, xmm1 ; 400B _ 66: 0F EF. C1 + psrld xmm1, 3 ; 400F _ 66: 0F 72. D1, 03 + pslld xmm2, 29 ; 4014 _ 66: 0F 72. F2, 1D + por xmm2, xmm1 ; 4019 _ 66: 0F EB. D1 + pslld xmm10, 19 ; 401D _ 66 41: 0F 72. F2, 13 + psrld xmm9, 13 ; 4023 _ 66 41: 0F 72. D1, 0D + por xmm10, xmm9 ; 4029 _ 66 45: 0F EB. D1 + movdqa xmm9, xmm2 ; 402E _ 66 44: 0F 6F. CA + pand xmm9, xmm11 ; 4033 _ 66 45: 0F DB. CB + pxor xmm9, xmm0 ; 4038 _ 66 44: 0F EF. C8 + por xmm0, xmm11 ; 403D _ 66 41: 0F EB. C3 + pand xmm0, xmm10 ; 4042 _ 66 41: 0F DB. C2 + pxor xmm2, xmm9 ; 4047 _ 66 41: 0F EF. D1 + pxor xmm2, xmm0 ; 404C _ 66: 0F EF. D0 + pand xmm0, xmm9 ; 4050 _ 66 41: 0F DB. C1 + pxor xmm10, xmm6 ; 4055 _ 66 44: 0F EF. D6 + pxor xmm11, xmm2 ; 405A _ 66 44: 0F EF. DA + pxor xmm0, xmm11 ; 405F _ 66 41: 0F EF. C3 + pand xmm11, xmm10 ; 4064 _ 66 45: 0F DB. DA + pxor xmm11, xmm9 ; 4069 _ 66 45: 0F EF. D9 + pxor xmm10, xmm0 ; 406E _ 66 44: 0F EF. D0 + pand xmm9, xmm10 ; 4073 _ 66 45: 0F DB. CA + pxor xmm11, xmm10 ; 4078 _ 66 45: 0F EF. DA + pxor xmm9, xmm2 ; 407D _ 66 44: 0F EF. CA + por xmm9, xmm11 ; 4082 _ 66 45: 0F EB. CB + pxor xmm11, xmm10 ; 4087 _ 66 45: 0F EF. DA + pxor xmm9, xmm0 ; 408C _ 66 44: 0F EF. C8 + movd xmm0, dword [r12+13C0H] ; 4091 _ 66 41: 0F 6E. 84 24, 000013C0 + pshufd xmm0, xmm0, 0 ; 409B _ 66: 0F 70. C0, 00 + pxor xmm10, xmm0 ; 40A0 _ 66 44: 0F EF. D0 + movd xmm0, dword [r12+13C4H] ; 40A5 _ 66 41: 0F 6E. 84 24, 000013C4 + pshufd xmm0, xmm0, 0 ; 40AF _ 66: 0F 70. C0, 00 + pxor xmm11, xmm0 ; 40B4 _ 66 44: 0F EF. D8 + movd xmm0, dword [r12+13C8H] ; 40B9 _ 66 41: 0F 6E. 84 24, 000013C8 + pshufd xmm0, xmm0, 0 ; 40C3 _ 66: 0F 70. C0, 00 + pxor xmm9, xmm0 ; 40C8 _ 66 44: 0F EF. C8 + movdqa xmm1, xmm9 ; 40CD _ 66 41: 0F 6F. C9 + psrld xmm9, 22 ; 40D2 _ 66 41: 0F 72. D1, 16 + pslld xmm1, 10 ; 40D8 _ 66: 0F 72. F1, 0A + por xmm1, xmm9 ; 40DD _ 66 41: 0F EB. C9 + movd xmm0, dword [r12+13CCH] ; 40E2 _ 66 41: 0F 6E. 84 24, 000013CC + pshufd xmm0, xmm0, 0 ; 40EC _ 66: 0F 70. C0, 00 + pxor xmm2, xmm0 ; 40F1 _ 66: 0F EF. D0 + movdqa xmm0, xmm10 ; 40F5 _ 66 41: 0F 6F. C2 + psrld xmm10, 5 ; 40FA _ 66 41: 0F 72. D2, 05 + pxor xmm1, xmm2 ; 4100 _ 66: 0F EF. CA + pslld xmm0, 27 ; 4104 _ 66: 0F 72. F0, 1B + por xmm0, xmm10 ; 4109 _ 66 41: 0F EB. C2 + movdqa xmm10, xmm11 ; 410E _ 66 45: 0F 6F. D3 + pxor xmm0, xmm11 ; 4113 _ 66 41: 0F EF. C3 + pxor xmm0, xmm2 ; 4118 _ 66: 0F EF. C2 + movdqa xmm9, xmm0 ; 411C _ 66 44: 0F 6F. C8 + pslld xmm10, 7 ; 4121 _ 66 41: 0F 72. F2, 07 + pxor xmm1, xmm10 ; 4127 _ 66 41: 0F EF. CA + movdqa xmm10, xmm2 ; 412C _ 66 44: 0F 6F. D2 + psrld xmm2, 7 ; 4131 _ 66: 0F 72. D2, 07 + pslld xmm9, 3 ; 4136 _ 66 41: 0F 72. F1, 03 + pslld xmm10, 25 ; 413C _ 66 41: 0F 72. F2, 19 + por xmm10, xmm2 ; 4142 _ 66 44: 0F EB. D2 + movdqa xmm2, xmm11 ; 4147 _ 66 41: 0F 6F. D3 + psrld xmm11, 1 ; 414C _ 66 41: 0F 72. D3, 01 + pxor xmm10, xmm1 ; 4152 _ 66 44: 0F EF. D1 + pslld xmm2, 31 ; 4157 _ 66: 0F 72. F2, 1F + por xmm2, xmm11 ; 415C _ 66 41: 0F EB. D3 + movdqa xmm11, xmm1 ; 4161 _ 66 44: 0F 6F. D9 + pxor xmm10, xmm9 ; 4166 _ 66 45: 0F EF. D1 + pxor xmm2, xmm0 ; 416B _ 66: 0F EF. D0 + pxor xmm2, xmm1 ; 416F _ 66: 0F EF. D1 + pslld xmm11, 29 ; 4173 _ 66 41: 0F 72. F3, 1D + psrld xmm1, 3 ; 4179 _ 66: 0F 72. D1, 03 + por xmm11, xmm1 ; 417E _ 66 44: 0F EB. D9 + movdqa xmm1, xmm0 ; 4183 _ 66: 0F 6F. C8 + pslld xmm1, 19 ; 4187 _ 66: 0F 72. F1, 13 + movdqa xmm9, xmm11 ; 418C _ 66 45: 0F 6F. CB + psrld xmm0, 13 ; 4191 _ 66: 0F 72. D0, 0D + por xmm1, xmm0 ; 4196 _ 66: 0F EB. C8 + pxor xmm9, xmm2 ; 419A _ 66 44: 0F EF. CA + pxor xmm1, xmm9 ; 419F _ 66 41: 0F EF. C9 + pand xmm11, xmm9 ; 41A4 _ 66 45: 0F DB. D9 + pxor xmm11, xmm1 ; 41A9 _ 66 44: 0F EF. D9 + pand xmm1, xmm2 ; 41AE _ 66: 0F DB. CA + pxor xmm2, xmm10 ; 41B2 _ 66 41: 0F EF. D2 + por xmm10, xmm11 ; 41B7 _ 66 45: 0F EB. D3 + pxor xmm9, xmm10 ; 41BC _ 66 45: 0F EF. CA + pxor xmm1, xmm10 ; 41C1 _ 66 41: 0F EF. CA + pxor xmm2, xmm11 ; 41C6 _ 66 41: 0F EF. D3 + pand xmm10, xmm9 ; 41CB _ 66 45: 0F DB. D1 + pxor xmm10, xmm2 ; 41D0 _ 66 44: 0F EF. D2 + pxor xmm2, xmm1 ; 41D5 _ 66: 0F EF. D1 + por xmm2, xmm9 ; 41D9 _ 66 41: 0F EB. D1 + pxor xmm1, xmm10 ; 41DE _ 66 41: 0F EF. CA + pxor xmm2, xmm11 ; 41E3 _ 66 41: 0F EF. D3 + pxor xmm1, xmm2 ; 41E8 _ 66: 0F EF. CA + movd xmm11, dword [r12+13B0H] ; 41EC _ 66 45: 0F 6E. 9C 24, 000013B0 + pshufd xmm11, xmm11, 0 ; 41F6 _ 66 45: 0F 70. DB, 00 + pxor xmm9, xmm11 ; 41FC _ 66 45: 0F EF. CB + movd xmm11, dword [r12+13B4H] ; 4201 _ 66 45: 0F 6E. 9C 24, 000013B4 + pshufd xmm11, xmm11, 0 ; 420B _ 66 45: 0F 70. DB, 00 + pxor xmm2, xmm11 ; 4211 _ 66 41: 0F EF. D3 + movd xmm11, dword [r12+13B8H] ; 4216 _ 66 45: 0F 6E. 9C 24, 000013B8 + pshufd xmm11, xmm11, 0 ; 4220 _ 66 45: 0F 70. DB, 00 + pxor xmm10, xmm11 ; 4226 _ 66 45: 0F EF. D3 + movdqa xmm0, xmm10 ; 422B _ 66 41: 0F 6F. C2 + psrld xmm10, 22 ; 4230 _ 66 41: 0F 72. D2, 16 + pslld xmm0, 10 ; 4236 _ 66: 0F 72. F0, 0A + movd xmm11, dword [r12+13BCH] ; 423B _ 66 45: 0F 6E. 9C 24, 000013BC + pshufd xmm11, xmm11, 0 ; 4245 _ 66 45: 0F 70. DB, 00 + pxor xmm1, xmm11 ; 424B _ 66 41: 0F EF. CB + movdqa xmm11, xmm9 ; 4250 _ 66 45: 0F 6F. D9 + por xmm0, xmm10 ; 4255 _ 66 41: 0F EB. C2 + movdqa xmm10, xmm2 ; 425A _ 66 44: 0F 6F. D2 + pslld xmm11, 27 ; 425F _ 66 41: 0F 72. F3, 1B + psrld xmm9, 5 ; 4265 _ 66 41: 0F 72. D1, 05 + por xmm11, xmm9 ; 426B _ 66 45: 0F EB. D9 + movdqa xmm9, xmm1 ; 4270 _ 66 44: 0F 6F. C9 + pxor xmm0, xmm1 ; 4275 _ 66: 0F EF. C1 + pslld xmm10, 7 ; 4279 _ 66 41: 0F 72. F2, 07 + pxor xmm0, xmm10 ; 427F _ 66 41: 0F EF. C2 + movdqa xmm10, xmm2 ; 4284 _ 66 44: 0F 6F. D2 + pxor xmm11, xmm2 ; 4289 _ 66 44: 0F EF. DA + pxor xmm11, xmm1 ; 428E _ 66 44: 0F EF. D9 + pslld xmm9, 25 ; 4293 _ 66 41: 0F 72. F1, 19 + psrld xmm1, 7 ; 4299 _ 66: 0F 72. D1, 07 + por xmm9, xmm1 ; 429E _ 66 44: 0F EB. C9 + movdqa xmm1, xmm0 ; 42A3 _ 66: 0F 6F. C8 + pslld xmm10, 31 ; 42A7 _ 66 41: 0F 72. F2, 1F + psrld xmm2, 1 ; 42AD _ 66: 0F 72. D2, 01 + por xmm10, xmm2 ; 42B2 _ 66 44: 0F EB. D2 + movdqa xmm2, xmm11 ; 42B7 _ 66 41: 0F 6F. D3 + pxor xmm9, xmm0 ; 42BC _ 66 44: 0F EF. C8 + pxor xmm10, xmm11 ; 42C1 _ 66 45: 0F EF. D3 + pslld xmm2, 3 ; 42C6 _ 66: 0F 72. F2, 03 + pxor xmm9, xmm2 ; 42CB _ 66 44: 0F EF. CA + movdqa xmm2, xmm11 ; 42D0 _ 66 41: 0F 6F. D3 + pxor xmm10, xmm0 ; 42D5 _ 66 44: 0F EF. D0 + pslld xmm1, 29 ; 42DA _ 66: 0F 72. F1, 1D + psrld xmm0, 3 ; 42DF _ 66: 0F 72. D0, 03 + por xmm1, xmm0 ; 42E4 _ 66: 0F EB. C8 + pslld xmm2, 19 ; 42E8 _ 66: 0F 72. F2, 13 + psrld xmm11, 13 ; 42ED _ 66 41: 0F 72. D3, 0D + por xmm2, xmm11 ; 42F3 _ 66 41: 0F EB. D3 + pxor xmm1, xmm9 ; 42F8 _ 66 41: 0F EF. C9 + pxor xmm9, xmm2 ; 42FD _ 66 44: 0F EF. CA + movdqa xmm11, xmm9 ; 4302 _ 66 45: 0F 6F. D9 + pand xmm11, xmm1 ; 4307 _ 66 44: 0F DB. D9 + pxor xmm11, xmm10 ; 430C _ 66 45: 0F EF. DA + por xmm10, xmm1 ; 4311 _ 66 44: 0F EB. D1 + pxor xmm10, xmm9 ; 4316 _ 66 45: 0F EF. D1 + pand xmm9, xmm11 ; 431B _ 66 45: 0F DB. CB + pxor xmm1, xmm11 ; 4320 _ 66 41: 0F EF. CB + pand xmm9, xmm2 ; 4325 _ 66 44: 0F DB. CA + pxor xmm9, xmm1 ; 432A _ 66 44: 0F EF. C9 + pand xmm1, xmm10 ; 432F _ 66 41: 0F DB. CA + por xmm1, xmm2 ; 4334 _ 66: 0F EB. CA + pxor xmm11, xmm6 ; 4338 _ 66 44: 0F EF. DE + movdqa xmm0, xmm11 ; 433D _ 66 41: 0F 6F. C3 + pxor xmm1, xmm11 ; 4342 _ 66 41: 0F EF. CB + pxor xmm2, xmm11 ; 4347 _ 66 41: 0F EF. D3 + pxor xmm0, xmm9 ; 434C _ 66 41: 0F EF. C1 + pand xmm2, xmm10 ; 4351 _ 66 41: 0F DB. D2 + pxor xmm0, xmm2 ; 4356 _ 66: 0F EF. C2 + movd xmm2, dword [r12+13A0H] ; 435A _ 66 41: 0F 6E. 94 24, 000013A0 + pshufd xmm11, xmm2, 0 ; 4364 _ 66 44: 0F 70. DA, 00 + pxor xmm10, xmm11 ; 436A _ 66 45: 0F EF. D3 + movd xmm2, dword [r12+13A4H] ; 436F _ 66 41: 0F 6E. 94 24, 000013A4 + pshufd xmm11, xmm2, 0 ; 4379 _ 66 44: 0F 70. DA, 00 + pxor xmm9, xmm11 ; 437F _ 66 45: 0F EF. CB + movd xmm2, dword [r12+13A8H] ; 4384 _ 66 41: 0F 6E. 94 24, 000013A8 + pshufd xmm11, xmm2, 0 ; 438E _ 66 44: 0F 70. DA, 00 + movd xmm2, dword [r12+13ACH] ; 4394 _ 66 41: 0F 6E. 94 24, 000013AC + pxor xmm1, xmm11 ; 439E _ 66 41: 0F EF. CB + pshufd xmm11, xmm2, 0 ; 43A3 _ 66 44: 0F 70. DA, 00 + movdqa xmm2, xmm1 ; 43A9 _ 66: 0F 6F. D1 + pxor xmm0, xmm11 ; 43AD _ 66 41: 0F EF. C3 + movdqa xmm11, xmm10 ; 43B2 _ 66 45: 0F 6F. DA + pslld xmm2, 10 ; 43B7 _ 66: 0F 72. F2, 0A + psrld xmm1, 22 ; 43BC _ 66: 0F 72. D1, 16 + por xmm2, xmm1 ; 43C1 _ 66: 0F EB. D1 + movdqa xmm1, xmm0 ; 43C5 _ 66: 0F 6F. C8 + pslld xmm11, 27 ; 43C9 _ 66 41: 0F 72. F3, 1B + psrld xmm10, 5 ; 43CF _ 66 41: 0F 72. D2, 05 + por xmm11, xmm10 ; 43D5 _ 66 45: 0F EB. DA + movdqa xmm10, xmm9 ; 43DA _ 66 45: 0F 6F. D1 + pxor xmm2, xmm0 ; 43DF _ 66: 0F EF. D0 + pxor xmm11, xmm9 ; 43E3 _ 66 45: 0F EF. D9 + pslld xmm10, 7 ; 43E8 _ 66 41: 0F 72. F2, 07 + pxor xmm2, xmm10 ; 43EE _ 66 41: 0F EF. D2 + pxor xmm11, xmm0 ; 43F3 _ 66 44: 0F EF. D8 + movd xmm10, dword [r12+1390H] ; 43F8 _ 66 45: 0F 6E. 94 24, 00001390 + pslld xmm1, 25 ; 4402 _ 66: 0F 72. F1, 19 + psrld xmm0, 7 ; 4407 _ 66: 0F 72. D0, 07 + por xmm1, xmm0 ; 440C _ 66: 0F EB. C8 + movdqa xmm0, xmm9 ; 4410 _ 66 41: 0F 6F. C1 + psrld xmm9, 1 ; 4415 _ 66 41: 0F 72. D1, 01 + pxor xmm1, xmm2 ; 441B _ 66: 0F EF. CA + pslld xmm0, 31 ; 441F _ 66: 0F 72. F0, 1F + por xmm0, xmm9 ; 4424 _ 66 41: 0F EB. C1 + movdqa xmm9, xmm11 ; 4429 _ 66 45: 0F 6F. CB + pxor xmm0, xmm11 ; 442E _ 66 41: 0F EF. C3 + pxor xmm0, xmm2 ; 4433 _ 66: 0F EF. C2 + pslld xmm9, 3 ; 4437 _ 66 41: 0F 72. F1, 03 + pxor xmm1, xmm9 ; 443D _ 66 41: 0F EF. C9 + movdqa xmm9, xmm2 ; 4442 _ 66 44: 0F 6F. CA + psrld xmm2, 3 ; 4447 _ 66: 0F 72. D2, 03 + pslld xmm9, 29 ; 444C _ 66 41: 0F 72. F1, 1D + por xmm9, xmm2 ; 4452 _ 66 44: 0F EB. CA + movdqa xmm2, xmm11 ; 4457 _ 66 41: 0F 6F. D3 + psrld xmm11, 13 ; 445C _ 66 41: 0F 72. D3, 0D + pslld xmm2, 19 ; 4462 _ 66: 0F 72. F2, 13 + por xmm2, xmm11 ; 4467 _ 66 41: 0F EB. D3 + movdqa xmm11, xmm0 ; 446C _ 66 44: 0F 6F. D8 + pxor xmm0, xmm9 ; 4471 _ 66 41: 0F EF. C1 + pxor xmm11, xmm1 ; 4476 _ 66 44: 0F EF. D9 + pand xmm1, xmm11 ; 447B _ 66 41: 0F DB. CB + pxor xmm1, xmm2 ; 4480 _ 66: 0F EF. CA + por xmm2, xmm11 ; 4484 _ 66 41: 0F EB. D3 + pxor xmm9, xmm1 ; 4489 _ 66 44: 0F EF. C9 + pxor xmm2, xmm0 ; 448E _ 66: 0F EF. D0 + por xmm2, xmm9 ; 4492 _ 66 41: 0F EB. D1 + pxor xmm11, xmm1 ; 4497 _ 66 44: 0F EF. D9 + pxor xmm2, xmm11 ; 449C _ 66 41: 0F EF. D3 + por xmm11, xmm1 ; 44A1 _ 66 44: 0F EB. D9 + pxor xmm11, xmm2 ; 44A6 _ 66 44: 0F EF. DA + pxor xmm0, xmm6 ; 44AB _ 66: 0F EF. C6 + pxor xmm0, xmm11 ; 44AF _ 66 41: 0F EF. C3 + por xmm11, xmm2 ; 44B4 _ 66 44: 0F EB. DA + pxor xmm11, xmm2 ; 44B9 _ 66 44: 0F EF. DA + por xmm11, xmm0 ; 44BE _ 66 44: 0F EB. D8 + pxor xmm1, xmm11 ; 44C3 _ 66 41: 0F EF. CB + pshufd xmm11, xmm10, 0 ; 44C8 _ 66 45: 0F 70. DA, 00 + pxor xmm0, xmm11 ; 44CE _ 66 41: 0F EF. C3 + movd xmm10, dword [r12+1394H] ; 44D3 _ 66 45: 0F 6E. 94 24, 00001394 + pshufd xmm11, xmm10, 0 ; 44DD _ 66 45: 0F 70. DA, 00 + pxor xmm2, xmm11 ; 44E3 _ 66 41: 0F EF. D3 + movd xmm10, dword [r12+1398H] ; 44E8 _ 66 45: 0F 6E. 94 24, 00001398 + pshufd xmm11, xmm10, 0 ; 44F2 _ 66 45: 0F 70. DA, 00 + pxor xmm1, xmm11 ; 44F8 _ 66 41: 0F EF. CB + movd xmm10, dword [r12+139CH] ; 44FD _ 66 45: 0F 6E. 94 24, 0000139C + pshufd xmm11, xmm10, 0 ; 4507 _ 66 45: 0F 70. DA, 00 + movdqa xmm10, xmm2 ; 450D _ 66 44: 0F 6F. D2 + pxor xmm9, xmm11 ; 4512 _ 66 45: 0F EF. CB + movdqa xmm11, xmm1 ; 4517 _ 66 44: 0F 6F. D9 + psrld xmm1, 22 ; 451C _ 66: 0F 72. D1, 16 + pslld xmm10, 7 ; 4521 _ 66 41: 0F 72. F2, 07 + pslld xmm11, 10 ; 4527 _ 66 41: 0F 72. F3, 0A + por xmm11, xmm1 ; 452D _ 66 44: 0F EB. D9 + movdqa xmm1, xmm0 ; 4532 _ 66: 0F 6F. C8 + psrld xmm0, 5 ; 4536 _ 66: 0F 72. D0, 05 + pxor xmm11, xmm9 ; 453B _ 66 45: 0F EF. D9 + pslld xmm1, 27 ; 4540 _ 66: 0F 72. F1, 1B + por xmm1, xmm0 ; 4545 _ 66: 0F EB. C8 + movdqa xmm0, xmm2 ; 4549 _ 66: 0F 6F. C2 + pxor xmm11, xmm10 ; 454D _ 66 45: 0F EF. DA + movdqa xmm10, xmm9 ; 4552 _ 66 45: 0F 6F. D1 + pxor xmm1, xmm2 ; 4557 _ 66: 0F EF. CA + pxor xmm1, xmm9 ; 455B _ 66 41: 0F EF. C9 + pslld xmm10, 25 ; 4560 _ 66 41: 0F 72. F2, 19 + psrld xmm9, 7 ; 4566 _ 66 41: 0F 72. D1, 07 + por xmm10, xmm9 ; 456C _ 66 45: 0F EB. D1 + movdqa xmm9, xmm11 ; 4571 _ 66 45: 0F 6F. CB + pslld xmm0, 31 ; 4576 _ 66: 0F 72. F0, 1F + psrld xmm2, 1 ; 457B _ 66: 0F 72. D2, 01 + por xmm0, xmm2 ; 4580 _ 66: 0F EB. C2 + movdqa xmm2, xmm1 ; 4584 _ 66: 0F 6F. D1 + pxor xmm10, xmm11 ; 4588 _ 66 45: 0F EF. D3 + pxor xmm0, xmm1 ; 458D _ 66: 0F EF. C1 + pslld xmm2, 3 ; 4591 _ 66: 0F 72. F2, 03 + pxor xmm10, xmm2 ; 4596 _ 66 44: 0F EF. D2 + movdqa xmm2, xmm1 ; 459B _ 66: 0F 6F. D1 + pxor xmm0, xmm11 ; 459F _ 66 41: 0F EF. C3 + pslld xmm9, 29 ; 45A4 _ 66 41: 0F 72. F1, 1D + psrld xmm11, 3 ; 45AA _ 66 41: 0F 72. D3, 03 + por xmm9, xmm11 ; 45B0 _ 66 45: 0F EB. CB + pslld xmm2, 19 ; 45B5 _ 66: 0F 72. F2, 13 + psrld xmm1, 13 ; 45BA _ 66: 0F 72. D1, 0D + por xmm2, xmm1 ; 45BF _ 66: 0F EB. D1 + movdqa xmm1, xmm0 ; 45C3 _ 66: 0F 6F. C8 + pxor xmm9, xmm6 ; 45C7 _ 66 44: 0F EF. CE + pxor xmm0, xmm6 ; 45CC _ 66: 0F EF. C6 + por xmm1, xmm2 ; 45D0 _ 66: 0F EB. CA + pxor xmm1, xmm9 ; 45D4 _ 66 41: 0F EF. C9 + por xmm9, xmm0 ; 45D9 _ 66 44: 0F EB. C8 + pxor xmm1, xmm10 ; 45DE _ 66 41: 0F EF. CA + pxor xmm2, xmm0 ; 45E3 _ 66: 0F EF. D0 + pxor xmm9, xmm2 ; 45E7 _ 66 44: 0F EF. CA + pand xmm2, xmm10 ; 45EC _ 66 41: 0F DB. D2 + pxor xmm0, xmm2 ; 45F1 _ 66: 0F EF. C2 + por xmm2, xmm1 ; 45F5 _ 66: 0F EB. D1 + pxor xmm2, xmm9 ; 45F9 _ 66 41: 0F EF. D1 + pxor xmm10, xmm0 ; 45FE _ 66 44: 0F EF. D0 + pxor xmm9, xmm1 ; 4603 _ 66 44: 0F EF. C9 + pxor xmm10, xmm2 ; 4608 _ 66 44: 0F EF. D2 + pxor xmm10, xmm1 ; 460D _ 66 44: 0F EF. D1 + pand xmm9, xmm10 ; 4612 _ 66 45: 0F DB. CA + pxor xmm0, xmm9 ; 4617 _ 66 41: 0F EF. C1 + movd xmm9, dword [r12+1380H] ; 461C _ 66 45: 0F 6E. 8C 24, 00001380 + pshufd xmm11, xmm9, 0 ; 4626 _ 66 45: 0F 70. D9, 00 + pxor xmm2, xmm11 ; 462C _ 66 41: 0F EF. D3 + movd xmm9, dword [r12+1384H] ; 4631 _ 66 45: 0F 6E. 8C 24, 00001384 + pshufd xmm11, xmm9, 0 ; 463B _ 66 45: 0F 70. D9, 00 + pxor xmm0, xmm11 ; 4641 _ 66 41: 0F EF. C3 + movd xmm9, dword [r12+1388H] ; 4646 _ 66 45: 0F 6E. 8C 24, 00001388 + pshufd xmm11, xmm9, 0 ; 4650 _ 66 45: 0F 70. D9, 00 + movd xmm9, dword [r12+138CH] ; 4656 _ 66 45: 0F 6E. 8C 24, 0000138C + pxor xmm1, xmm11 ; 4660 _ 66 41: 0F EF. CB + pshufd xmm11, xmm9, 0 ; 4665 _ 66 45: 0F 70. D9, 00 + movdqa xmm9, xmm2 ; 466B _ 66 44: 0F 6F. CA + pxor xmm10, xmm11 ; 4670 _ 66 45: 0F EF. D3 + movdqa xmm11, xmm1 ; 4675 _ 66 44: 0F 6F. D9 + psrld xmm1, 22 ; 467A _ 66: 0F 72. D1, 16 + pslld xmm9, 27 ; 467F _ 66 41: 0F 72. F1, 1B + pslld xmm11, 10 ; 4685 _ 66 41: 0F 72. F3, 0A + por xmm11, xmm1 ; 468B _ 66 44: 0F EB. D9 + movdqa xmm1, xmm10 ; 4690 _ 66 41: 0F 6F. CA + psrld xmm2, 5 ; 4695 _ 66: 0F 72. D2, 05 + por xmm9, xmm2 ; 469A _ 66 44: 0F EB. CA + movdqa xmm2, xmm0 ; 469F _ 66: 0F 6F. D0 + pxor xmm11, xmm10 ; 46A3 _ 66 45: 0F EF. DA + pxor xmm9, xmm0 ; 46A8 _ 66 44: 0F EF. C8 + pslld xmm2, 7 ; 46AD _ 66: 0F 72. F2, 07 + pxor xmm11, xmm2 ; 46B2 _ 66 44: 0F EF. DA + movdqa xmm2, xmm0 ; 46B7 _ 66: 0F 6F. D0 + pxor xmm9, xmm10 ; 46BB _ 66 45: 0F EF. CA + pslld xmm1, 25 ; 46C0 _ 66: 0F 72. F1, 19 + psrld xmm10, 7 ; 46C5 _ 66 41: 0F 72. D2, 07 + por xmm1, xmm10 ; 46CB _ 66 41: 0F EB. CA + movdqa xmm10, xmm9 ; 46D0 _ 66 45: 0F 6F. D1 + pslld xmm2, 31 ; 46D5 _ 66: 0F 72. F2, 1F + psrld xmm0, 1 ; 46DA _ 66: 0F 72. D0, 01 + por xmm2, xmm0 ; 46DF _ 66: 0F EB. D0 + movdqa xmm0, xmm11 ; 46E3 _ 66 41: 0F 6F. C3 + pxor xmm1, xmm11 ; 46E8 _ 66 41: 0F EF. CB + pslld xmm10, 3 ; 46ED _ 66 41: 0F 72. F2, 03 + pxor xmm1, xmm10 ; 46F3 _ 66 41: 0F EF. CA + movdqa xmm10, xmm9 ; 46F8 _ 66 45: 0F 6F. D1 + pxor xmm2, xmm9 ; 46FD _ 66 41: 0F EF. D1 + pxor xmm2, xmm11 ; 4702 _ 66 41: 0F EF. D3 + pslld xmm0, 29 ; 4707 _ 66: 0F 72. F0, 1D + psrld xmm11, 3 ; 470C _ 66 41: 0F 72. D3, 03 + por xmm0, xmm11 ; 4712 _ 66 41: 0F EB. C3 + movd xmm11, dword [r12+1370H] ; 4717 _ 66 45: 0F 6E. 9C 24, 00001370 + pslld xmm10, 19 ; 4721 _ 66 41: 0F 72. F2, 13 + psrld xmm9, 13 ; 4727 _ 66 41: 0F 72. D1, 0D + por xmm10, xmm9 ; 472D _ 66 45: 0F EB. D1 + movdqa xmm9, xmm0 ; 4732 _ 66 44: 0F 6F. C8 + por xmm0, xmm1 ; 4737 _ 66: 0F EB. C1 + pxor xmm9, xmm10 ; 473B _ 66 45: 0F EF. CA + pand xmm10, xmm1 ; 4740 _ 66 44: 0F DB. D1 + pxor xmm9, xmm6 ; 4745 _ 66 44: 0F EF. CE + pxor xmm1, xmm2 ; 474A _ 66: 0F EF. CA + por xmm2, xmm10 ; 474E _ 66 41: 0F EB. D2 + pxor xmm10, xmm9 ; 4753 _ 66 45: 0F EF. D1 + pand xmm9, xmm0 ; 4758 _ 66 44: 0F DB. C8 + pand xmm1, xmm0 ; 475D _ 66: 0F DB. C8 + pxor xmm2, xmm9 ; 4761 _ 66 41: 0F EF. D1 + pxor xmm9, xmm10 ; 4766 _ 66 45: 0F EF. CA + por xmm10, xmm9 ; 476B _ 66 45: 0F EB. D1 + pxor xmm0, xmm2 ; 4770 _ 66: 0F EF. C2 + pxor xmm10, xmm1 ; 4774 _ 66 44: 0F EF. D1 + pxor xmm1, xmm0 ; 4779 _ 66: 0F EF. C8 + por xmm0, xmm10 ; 477D _ 66 41: 0F EB. C2 + pxor xmm1, xmm9 ; 4782 _ 66 41: 0F EF. C9 + pxor xmm0, xmm9 ; 4787 _ 66 41: 0F EF. C1 + pshufd xmm9, xmm11, 0 ; 478C _ 66 45: 0F 70. CB, 00 + pxor xmm1, xmm9 ; 4792 _ 66 41: 0F EF. C9 + movd xmm11, dword [r12+1374H] ; 4797 _ 66 45: 0F 6E. 9C 24, 00001374 + pshufd xmm9, xmm11, 0 ; 47A1 _ 66 45: 0F 70. CB, 00 + pxor xmm10, xmm9 ; 47A7 _ 66 45: 0F EF. D1 + movd xmm11, dword [r12+1378H] ; 47AC _ 66 45: 0F 6E. 9C 24, 00001378 + pshufd xmm9, xmm11, 0 ; 47B6 _ 66 45: 0F 70. CB, 00 + pxor xmm2, xmm9 ; 47BC _ 66 41: 0F EF. D1 + movd xmm11, dword [r12+137CH] ; 47C1 _ 66 45: 0F 6E. 9C 24, 0000137C + pshufd xmm9, xmm11, 0 ; 47CB _ 66 45: 0F 70. CB, 00 + movdqa xmm11, xmm10 ; 47D1 _ 66 45: 0F 6F. DA + pxor xmm0, xmm9 ; 47D6 _ 66 41: 0F EF. C1 + movdqa xmm9, xmm2 ; 47DB _ 66 44: 0F 6F. CA + psrld xmm2, 22 ; 47E0 _ 66: 0F 72. D2, 16 + pslld xmm11, 7 ; 47E5 _ 66 41: 0F 72. F3, 07 + pslld xmm9, 10 ; 47EB _ 66 41: 0F 72. F1, 0A + por xmm9, xmm2 ; 47F1 _ 66 44: 0F EB. CA + movdqa xmm2, xmm1 ; 47F6 _ 66: 0F 6F. D1 + psrld xmm1, 5 ; 47FA _ 66: 0F 72. D1, 05 + pxor xmm9, xmm0 ; 47FF _ 66 44: 0F EF. C8 + pslld xmm2, 27 ; 4804 _ 66: 0F 72. F2, 1B + por xmm2, xmm1 ; 4809 _ 66: 0F EB. D1 + movdqa xmm1, xmm0 ; 480D _ 66: 0F 6F. C8 + pxor xmm9, xmm11 ; 4811 _ 66 45: 0F EF. CB + pxor xmm2, xmm10 ; 4816 _ 66 41: 0F EF. D2 + pxor xmm2, xmm0 ; 481B _ 66: 0F EF. D0 + movdqa xmm11, xmm2 ; 481F _ 66 44: 0F 6F. DA + pslld xmm1, 25 ; 4824 _ 66: 0F 72. F1, 19 + psrld xmm0, 7 ; 4829 _ 66: 0F 72. D0, 07 + por xmm1, xmm0 ; 482E _ 66: 0F EB. C8 + movdqa xmm0, xmm10 ; 4832 _ 66 41: 0F 6F. C2 + psrld xmm10, 1 ; 4837 _ 66 41: 0F 72. D2, 01 + pxor xmm1, xmm9 ; 483D _ 66 41: 0F EF. C9 + pslld xmm0, 31 ; 4842 _ 66: 0F 72. F0, 1F + por xmm0, xmm10 ; 4847 _ 66 41: 0F EB. C2 + movdqa xmm10, xmm2 ; 484C _ 66 44: 0F 6F. D2 + pxor xmm0, xmm2 ; 4851 _ 66: 0F EF. C2 + pxor xmm0, xmm9 ; 4855 _ 66 41: 0F EF. C1 + pslld xmm10, 3 ; 485A _ 66 41: 0F 72. F2, 03 + pxor xmm1, xmm10 ; 4860 _ 66 41: 0F EF. CA + movdqa xmm10, xmm9 ; 4865 _ 66 45: 0F 6F. D1 + psrld xmm9, 3 ; 486A _ 66 41: 0F 72. D1, 03 + pslld xmm11, 19 ; 4870 _ 66 41: 0F 72. F3, 13 + pslld xmm10, 29 ; 4876 _ 66 41: 0F 72. F2, 1D + por xmm10, xmm9 ; 487C _ 66 45: 0F EB. D1 + movdqa xmm9, xmm10 ; 4881 _ 66 45: 0F 6F. CA + psrld xmm2, 13 ; 4886 _ 66: 0F 72. D2, 0D + por xmm11, xmm2 ; 488B _ 66 44: 0F EB. DA + pxor xmm11, xmm10 ; 4890 _ 66 45: 0F EF. DA + pand xmm9, xmm11 ; 4895 _ 66 45: 0F DB. CB + pxor xmm10, xmm1 ; 489A _ 66 44: 0F EF. D1 + pxor xmm9, xmm6 ; 489F _ 66 44: 0F EF. CE + pxor xmm1, xmm0 ; 48A4 _ 66: 0F EF. C8 + movd xmm2, dword [r12+1360H] ; 48A8 _ 66 41: 0F 6E. 94 24, 00001360 + pxor xmm9, xmm1 ; 48B2 _ 66 44: 0F EF. C9 + por xmm10, xmm11 ; 48B7 _ 66 45: 0F EB. D3 + pxor xmm11, xmm9 ; 48BC _ 66 45: 0F EF. D9 + pxor xmm1, xmm10 ; 48C1 _ 66 41: 0F EF. CA + pxor xmm10, xmm0 ; 48C6 _ 66 44: 0F EF. D0 + pand xmm0, xmm1 ; 48CB _ 66: 0F DB. C1 + pxor xmm0, xmm11 ; 48CF _ 66 41: 0F EF. C3 + pxor xmm11, xmm1 ; 48D4 _ 66 44: 0F EF. D9 + por xmm11, xmm9 ; 48D9 _ 66 45: 0F EB. D9 + pxor xmm1, xmm0 ; 48DE _ 66: 0F EF. C8 + pxor xmm10, xmm11 ; 48E2 _ 66 45: 0F EF. D3 + pshufd xmm11, xmm2, 0 ; 48E7 _ 66 44: 0F 70. DA, 00 + movd xmm2, dword [r12+1364H] ; 48ED _ 66 41: 0F 6E. 94 24, 00001364 + pxor xmm0, xmm11 ; 48F7 _ 66 41: 0F EF. C3 + pshufd xmm11, xmm2, 0 ; 48FC _ 66 44: 0F 70. DA, 00 + pxor xmm9, xmm11 ; 4902 _ 66 45: 0F EF. CB + movd xmm2, dword [r12+1368H] ; 4907 _ 66 41: 0F 6E. 94 24, 00001368 + pshufd xmm11, xmm2, 0 ; 4911 _ 66 44: 0F 70. DA, 00 + pxor xmm10, xmm11 ; 4917 _ 66 45: 0F EF. D3 + movd xmm2, dword [r12+136CH] ; 491C _ 66 41: 0F 6E. 94 24, 0000136C + pshufd xmm11, xmm2, 0 ; 4926 _ 66 44: 0F 70. DA, 00 + movdqa xmm2, xmm10 ; 492C _ 66 41: 0F 6F. D2 + pxor xmm1, xmm11 ; 4931 _ 66 41: 0F EF. CB + movdqa xmm11, xmm9 ; 4936 _ 66 45: 0F 6F. D9 + pslld xmm2, 10 ; 493B _ 66: 0F 72. F2, 0A + psrld xmm10, 22 ; 4940 _ 66 41: 0F 72. D2, 16 + por xmm2, xmm10 ; 4946 _ 66 41: 0F EB. D2 + movdqa xmm10, xmm0 ; 494B _ 66 44: 0F 6F. D0 + psrld xmm0, 5 ; 4950 _ 66: 0F 72. D0, 05 + pxor xmm2, xmm1 ; 4955 _ 66: 0F EF. D1 + pslld xmm10, 27 ; 4959 _ 66 41: 0F 72. F2, 1B + por xmm10, xmm0 ; 495F _ 66 44: 0F EB. D0 + movdqa xmm0, xmm1 ; 4964 _ 66: 0F 6F. C1 + pslld xmm11, 7 ; 4968 _ 66 41: 0F 72. F3, 07 + pxor xmm2, xmm11 ; 496E _ 66 41: 0F EF. D3 + movdqa xmm11, xmm9 ; 4973 _ 66 45: 0F 6F. D9 + pxor xmm10, xmm9 ; 4978 _ 66 45: 0F EF. D1 + pxor xmm10, xmm1 ; 497D _ 66 44: 0F EF. D1 + pslld xmm0, 25 ; 4982 _ 66: 0F 72. F0, 19 + psrld xmm1, 7 ; 4987 _ 66: 0F 72. D1, 07 + por xmm0, xmm1 ; 498C _ 66: 0F EB. C1 + movdqa xmm1, xmm2 ; 4990 _ 66: 0F 6F. CA + pslld xmm11, 31 ; 4994 _ 66 41: 0F 72. F3, 1F + psrld xmm9, 1 ; 499A _ 66 41: 0F 72. D1, 01 + por xmm11, xmm9 ; 49A0 _ 66 45: 0F EB. D9 + movdqa xmm9, xmm10 ; 49A5 _ 66 45: 0F 6F. CA + pxor xmm0, xmm2 ; 49AA _ 66: 0F EF. C2 + pxor xmm11, xmm10 ; 49AE _ 66 45: 0F EF. DA + pslld xmm9, 3 ; 49B3 _ 66 41: 0F 72. F1, 03 + pxor xmm0, xmm9 ; 49B9 _ 66 41: 0F EF. C1 + movdqa xmm9, xmm0 ; 49BE _ 66 44: 0F 6F. C8 + pxor xmm11, xmm2 ; 49C3 _ 66 44: 0F EF. DA + pslld xmm1, 29 ; 49C8 _ 66: 0F 72. F1, 1D + psrld xmm2, 3 ; 49CD _ 66: 0F 72. D2, 03 + por xmm1, xmm2 ; 49D2 _ 66: 0F EB. CA + movdqa xmm2, xmm10 ; 49D6 _ 66 41: 0F 6F. D2 + psrld xmm10, 13 ; 49DB _ 66 41: 0F 72. D2, 0D + pxor xmm11, xmm6 ; 49E1 _ 66 44: 0F EF. DE + pslld xmm2, 19 ; 49E6 _ 66: 0F 72. F2, 13 + por xmm2, xmm10 ; 49EB _ 66 41: 0F EB. D2 + pxor xmm1, xmm11 ; 49F0 _ 66 41: 0F EF. CB + por xmm9, xmm2 ; 49F5 _ 66 44: 0F EB. CA + pxor xmm9, xmm1 ; 49FA _ 66 44: 0F EF. C9 + movd xmm10, dword [r12+1350H] ; 49FF _ 66 45: 0F 6E. 94 24, 00001350 + por xmm1, xmm11 ; 4A09 _ 66 41: 0F EB. CB + pand xmm1, xmm2 ; 4A0E _ 66: 0F DB. CA + pxor xmm0, xmm9 ; 4A12 _ 66 41: 0F EF. C1 + pxor xmm1, xmm0 ; 4A17 _ 66: 0F EF. C8 + por xmm0, xmm2 ; 4A1B _ 66: 0F EB. C2 + pxor xmm0, xmm11 ; 4A1F _ 66 41: 0F EF. C3 + pand xmm11, xmm1 ; 4A24 _ 66 44: 0F DB. D9 + pxor xmm11, xmm9 ; 4A29 _ 66 45: 0F EF. D9 + pxor xmm0, xmm1 ; 4A2E _ 66: 0F EF. C1 + pand xmm9, xmm0 ; 4A32 _ 66 44: 0F DB. C8 + pxor xmm0, xmm11 ; 4A37 _ 66 41: 0F EF. C3 + pxor xmm9, xmm0 ; 4A3C _ 66 44: 0F EF. C8 + pxor xmm0, xmm6 ; 4A41 _ 66: 0F EF. C6 + pxor xmm9, xmm2 ; 4A45 _ 66 44: 0F EF. CA + pshufd xmm2, xmm10, 0 ; 4A4A _ 66 41: 0F 70. D2, 00 + pxor xmm11, xmm2 ; 4A50 _ 66 44: 0F EF. DA + movd xmm10, dword [r12+1354H] ; 4A55 _ 66 45: 0F 6E. 94 24, 00001354 + pshufd xmm2, xmm10, 0 ; 4A5F _ 66 41: 0F 70. D2, 00 + pxor xmm0, xmm2 ; 4A65 _ 66: 0F EF. C2 + movd xmm2, dword [r12+1358H] ; 4A69 _ 66 41: 0F 6E. 94 24, 00001358 + pshufd xmm10, xmm2, 0 ; 4A73 _ 66 44: 0F 70. D2, 00 + pxor xmm9, xmm10 ; 4A79 _ 66 45: 0F EF. CA + movd xmm10, dword [r12+135CH] ; 4A7E _ 66 45: 0F 6E. 94 24, 0000135C + pshufd xmm2, xmm10, 0 ; 4A88 _ 66 41: 0F 70. D2, 00 + pxor xmm1, xmm2 ; 4A8E _ 66: 0F EF. CA + movdqa xmm2, xmm9 ; 4A92 _ 66 41: 0F 6F. D1 + psrld xmm9, 22 ; 4A97 _ 66 41: 0F 72. D1, 16 + pslld xmm2, 10 ; 4A9D _ 66: 0F 72. F2, 0A + por xmm2, xmm9 ; 4AA2 _ 66 41: 0F EB. D1 + movdqa xmm9, xmm11 ; 4AA7 _ 66 45: 0F 6F. CB + psrld xmm11, 5 ; 4AAC _ 66 41: 0F 72. D3, 05 + pxor xmm2, xmm1 ; 4AB2 _ 66: 0F EF. D1 + pslld xmm9, 27 ; 4AB6 _ 66 41: 0F 72. F1, 1B + por xmm9, xmm11 ; 4ABC _ 66 45: 0F EB. CB + movdqa xmm11, xmm0 ; 4AC1 _ 66 44: 0F 6F. D8 + pxor xmm9, xmm0 ; 4AC6 _ 66 44: 0F EF. C8 + pxor xmm9, xmm1 ; 4ACB _ 66 44: 0F EF. C9 + movdqa xmm10, xmm9 ; 4AD0 _ 66 45: 0F 6F. D1 + pslld xmm11, 7 ; 4AD5 _ 66 41: 0F 72. F3, 07 + pxor xmm2, xmm11 ; 4ADB _ 66 41: 0F EF. D3 + movdqa xmm11, xmm1 ; 4AE0 _ 66 44: 0F 6F. D9 + psrld xmm1, 7 ; 4AE5 _ 66: 0F 72. D1, 07 + pslld xmm10, 3 ; 4AEA _ 66 41: 0F 72. F2, 03 + pslld xmm11, 25 ; 4AF0 _ 66 41: 0F 72. F3, 19 + por xmm11, xmm1 ; 4AF6 _ 66 44: 0F EB. D9 + movdqa xmm1, xmm0 ; 4AFB _ 66: 0F 6F. C8 + psrld xmm0, 1 ; 4AFF _ 66: 0F 72. D0, 01 + pxor xmm11, xmm2 ; 4B04 _ 66 44: 0F EF. DA + pslld xmm1, 31 ; 4B09 _ 66: 0F 72. F1, 1F + por xmm1, xmm0 ; 4B0E _ 66: 0F EB. C8 + movdqa xmm0, xmm9 ; 4B12 _ 66 41: 0F 6F. C1 + pxor xmm11, xmm10 ; 4B17 _ 66 45: 0F EF. DA + movdqa xmm10, xmm2 ; 4B1C _ 66 44: 0F 6F. D2 + pxor xmm1, xmm9 ; 4B21 _ 66 41: 0F EF. C9 + pxor xmm1, xmm2 ; 4B26 _ 66: 0F EF. CA + pslld xmm10, 29 ; 4B2A _ 66 41: 0F 72. F2, 1D + psrld xmm2, 3 ; 4B30 _ 66: 0F 72. D2, 03 + por xmm10, xmm2 ; 4B35 _ 66 44: 0F EB. D2 + movdqa xmm2, xmm10 ; 4B3A _ 66 41: 0F 6F. D2 + pslld xmm0, 19 ; 4B3F _ 66: 0F 72. F0, 13 + psrld xmm9, 13 ; 4B44 _ 66 41: 0F 72. D1, 0D + por xmm0, xmm9 ; 4B4A _ 66 41: 0F EB. C1 + movd xmm9, dword [r12+1340H] ; 4B4F _ 66 45: 0F 6E. 8C 24, 00001340 + pand xmm2, xmm11 ; 4B59 _ 66 41: 0F DB. D3 + pxor xmm2, xmm1 ; 4B5E _ 66: 0F EF. D1 + por xmm1, xmm11 ; 4B62 _ 66 41: 0F EB. CB + pand xmm1, xmm0 ; 4B67 _ 66: 0F DB. C8 + pxor xmm10, xmm2 ; 4B6B _ 66 44: 0F EF. D2 + pxor xmm10, xmm1 ; 4B70 _ 66 44: 0F EF. D1 + pand xmm1, xmm2 ; 4B75 _ 66: 0F DB. CA + pxor xmm0, xmm6 ; 4B79 _ 66: 0F EF. C6 + pxor xmm11, xmm10 ; 4B7D _ 66 45: 0F EF. DA + pxor xmm1, xmm11 ; 4B82 _ 66 41: 0F EF. CB + pand xmm11, xmm0 ; 4B87 _ 66 44: 0F DB. D8 + pxor xmm11, xmm2 ; 4B8C _ 66 44: 0F EF. DA + pxor xmm0, xmm1 ; 4B91 _ 66: 0F EF. C1 + pand xmm2, xmm0 ; 4B95 _ 66: 0F DB. D0 + pxor xmm11, xmm0 ; 4B99 _ 66 44: 0F EF. D8 + pxor xmm2, xmm10 ; 4B9E _ 66 41: 0F EF. D2 + por xmm2, xmm11 ; 4BA3 _ 66 41: 0F EB. D3 + pxor xmm11, xmm0 ; 4BA8 _ 66 44: 0F EF. D8 + pxor xmm2, xmm1 ; 4BAD _ 66: 0F EF. D1 + pshufd xmm9, xmm9, 0 ; 4BB1 _ 66 45: 0F 70. C9, 00 + pxor xmm0, xmm9 ; 4BB7 _ 66 41: 0F EF. C1 + movd xmm9, dword [r12+1344H] ; 4BBC _ 66 45: 0F 6E. 8C 24, 00001344 + pshufd xmm9, xmm9, 0 ; 4BC6 _ 66 45: 0F 70. C9, 00 + pxor xmm11, xmm9 ; 4BCC _ 66 45: 0F EF. D9 + movd xmm9, dword [r12+1348H] ; 4BD1 _ 66 45: 0F 6E. 8C 24, 00001348 + pshufd xmm9, xmm9, 0 ; 4BDB _ 66 45: 0F 70. C9, 00 + pxor xmm2, xmm9 ; 4BE1 _ 66 41: 0F EF. D1 + movdqa xmm1, xmm2 ; 4BE6 _ 66: 0F 6F. CA + psrld xmm2, 22 ; 4BEA _ 66: 0F 72. D2, 16 + pslld xmm1, 10 ; 4BEF _ 66: 0F 72. F1, 0A + por xmm1, xmm2 ; 4BF4 _ 66: 0F EB. CA + movdqa xmm2, xmm0 ; 4BF8 _ 66: 0F 6F. D0 + movd xmm9, dword [r12+134CH] ; 4BFC _ 66 45: 0F 6E. 8C 24, 0000134C + pshufd xmm9, xmm9, 0 ; 4C06 _ 66 45: 0F 70. C9, 00 + pxor xmm10, xmm9 ; 4C0C _ 66 45: 0F EF. D1 + movdqa xmm9, xmm11 ; 4C11 _ 66 45: 0F 6F. CB + pslld xmm2, 27 ; 4C16 _ 66: 0F 72. F2, 1B + psrld xmm0, 5 ; 4C1B _ 66: 0F 72. D0, 05 + por xmm2, xmm0 ; 4C20 _ 66: 0F EB. D0 + movdqa xmm0, xmm11 ; 4C24 _ 66 41: 0F 6F. C3 + pxor xmm1, xmm10 ; 4C29 _ 66 41: 0F EF. CA + pslld xmm9, 7 ; 4C2E _ 66 41: 0F 72. F1, 07 + pxor xmm1, xmm9 ; 4C34 _ 66 41: 0F EF. C9 + movdqa xmm9, xmm10 ; 4C39 _ 66 45: 0F 6F. CA + pxor xmm2, xmm11 ; 4C3E _ 66 41: 0F EF. D3 + pxor xmm2, xmm10 ; 4C43 _ 66 41: 0F EF. D2 + pslld xmm9, 25 ; 4C48 _ 66 41: 0F 72. F1, 19 + psrld xmm10, 7 ; 4C4E _ 66 41: 0F 72. D2, 07 + por xmm9, xmm10 ; 4C54 _ 66 45: 0F EB. CA + movdqa xmm10, xmm1 ; 4C59 _ 66 44: 0F 6F. D1 + pslld xmm0, 31 ; 4C5E _ 66: 0F 72. F0, 1F + psrld xmm11, 1 ; 4C63 _ 66 41: 0F 72. D3, 01 + por xmm0, xmm11 ; 4C69 _ 66 41: 0F EB. C3 + movdqa xmm11, xmm2 ; 4C6E _ 66 44: 0F 6F. DA + pxor xmm9, xmm1 ; 4C73 _ 66 44: 0F EF. C9 + pxor xmm0, xmm2 ; 4C78 _ 66: 0F EF. C2 + pslld xmm11, 3 ; 4C7C _ 66 41: 0F 72. F3, 03 + pxor xmm9, xmm11 ; 4C82 _ 66 45: 0F EF. CB + movdqa xmm11, xmm2 ; 4C87 _ 66 44: 0F 6F. DA + pxor xmm0, xmm1 ; 4C8C _ 66: 0F EF. C1 + pslld xmm10, 29 ; 4C90 _ 66 41: 0F 72. F2, 1D + psrld xmm1, 3 ; 4C96 _ 66: 0F 72. D1, 03 + por xmm10, xmm1 ; 4C9B _ 66 44: 0F EB. D1 + movdqa xmm1, xmm10 ; 4CA0 _ 66 41: 0F 6F. CA + pslld xmm11, 19 ; 4CA5 _ 66 41: 0F 72. F3, 13 + psrld xmm2, 13 ; 4CAB _ 66: 0F 72. D2, 0D + por xmm11, xmm2 ; 4CB0 _ 66 44: 0F EB. DA + pxor xmm1, xmm0 ; 4CB5 _ 66: 0F EF. C8 + pxor xmm11, xmm1 ; 4CB9 _ 66 44: 0F EF. D9 + movd xmm2, dword [r12+1330H] ; 4CBE _ 66 41: 0F 6E. 94 24, 00001330 + pand xmm10, xmm1 ; 4CC8 _ 66 44: 0F DB. D1 + pxor xmm10, xmm11 ; 4CCD _ 66 45: 0F EF. D3 + pand xmm11, xmm0 ; 4CD2 _ 66 44: 0F DB. D8 + pxor xmm0, xmm9 ; 4CD7 _ 66 41: 0F EF. C1 + por xmm9, xmm10 ; 4CDC _ 66 45: 0F EB. CA + pxor xmm1, xmm9 ; 4CE1 _ 66 41: 0F EF. C9 + pxor xmm11, xmm9 ; 4CE6 _ 66 45: 0F EF. D9 + pxor xmm0, xmm10 ; 4CEB _ 66 41: 0F EF. C2 + pand xmm9, xmm1 ; 4CF0 _ 66 44: 0F DB. C9 + pxor xmm9, xmm0 ; 4CF5 _ 66 44: 0F EF. C8 + pxor xmm0, xmm11 ; 4CFA _ 66 41: 0F EF. C3 + por xmm0, xmm1 ; 4CFF _ 66: 0F EB. C1 + pxor xmm11, xmm9 ; 4D03 _ 66 45: 0F EF. D9 + pxor xmm0, xmm10 ; 4D08 _ 66 41: 0F EF. C2 + pxor xmm11, xmm0 ; 4D0D _ 66 44: 0F EF. D8 + pshufd xmm10, xmm2, 0 ; 4D12 _ 66 44: 0F 70. D2, 00 + pxor xmm1, xmm10 ; 4D18 _ 66 41: 0F EF. CA + movd xmm2, dword [r12+1334H] ; 4D1D _ 66 41: 0F 6E. 94 24, 00001334 + pshufd xmm10, xmm2, 0 ; 4D27 _ 66 44: 0F 70. D2, 00 + movd xmm2, dword [r12+1338H] ; 4D2D _ 66 41: 0F 6E. 94 24, 00001338 + pxor xmm0, xmm10 ; 4D37 _ 66 41: 0F EF. C2 + pshufd xmm10, xmm2, 0 ; 4D3C _ 66 44: 0F 70. D2, 00 + pxor xmm9, xmm10 ; 4D42 _ 66 45: 0F EF. CA + movd xmm2, dword [r12+133CH] ; 4D47 _ 66 41: 0F 6E. 94 24, 0000133C + pshufd xmm10, xmm2, 0 ; 4D51 _ 66 44: 0F 70. D2, 00 + movdqa xmm2, xmm9 ; 4D57 _ 66 41: 0F 6F. D1 + pxor xmm11, xmm10 ; 4D5C _ 66 45: 0F EF. DA + movdqa xmm10, xmm1 ; 4D61 _ 66 44: 0F 6F. D1 + pslld xmm2, 10 ; 4D66 _ 66: 0F 72. F2, 0A + psrld xmm9, 22 ; 4D6B _ 66 41: 0F 72. D1, 16 + por xmm2, xmm9 ; 4D71 _ 66 41: 0F EB. D1 + movdqa xmm9, xmm0 ; 4D76 _ 66 44: 0F 6F. C8 + pslld xmm10, 27 ; 4D7B _ 66 41: 0F 72. F2, 1B + psrld xmm1, 5 ; 4D81 _ 66: 0F 72. D1, 05 + por xmm10, xmm1 ; 4D86 _ 66 44: 0F EB. D1 + pxor xmm2, xmm11 ; 4D8B _ 66 41: 0F EF. D3 + pslld xmm9, 7 ; 4D90 _ 66 41: 0F 72. F1, 07 + pxor xmm2, xmm9 ; 4D96 _ 66 41: 0F EF. D1 + movdqa xmm9, xmm11 ; 4D9B _ 66 45: 0F 6F. CB + pxor xmm10, xmm0 ; 4DA0 _ 66 44: 0F EF. D0 + pxor xmm10, xmm11 ; 4DA5 _ 66 45: 0F EF. D3 + movdqa xmm1, xmm10 ; 4DAA _ 66 41: 0F 6F. CA + pslld xmm9, 25 ; 4DAF _ 66 41: 0F 72. F1, 19 + psrld xmm11, 7 ; 4DB5 _ 66 41: 0F 72. D3, 07 + por xmm9, xmm11 ; 4DBB _ 66 45: 0F EB. CB + movdqa xmm11, xmm0 ; 4DC0 _ 66 44: 0F 6F. D8 + psrld xmm0, 1 ; 4DC5 _ 66: 0F 72. D0, 01 + pxor xmm9, xmm2 ; 4DCA _ 66 44: 0F EF. CA + pslld xmm11, 31 ; 4DCF _ 66 41: 0F 72. F3, 1F + por xmm11, xmm0 ; 4DD5 _ 66 44: 0F EB. D8 + movdqa xmm0, xmm10 ; 4DDA _ 66 41: 0F 6F. C2 + pxor xmm11, xmm10 ; 4DDF _ 66 45: 0F EF. DA + pxor xmm11, xmm2 ; 4DE4 _ 66 44: 0F EF. DA + pslld xmm0, 3 ; 4DE9 _ 66: 0F 72. F0, 03 + pxor xmm9, xmm0 ; 4DEE _ 66 44: 0F EF. C8 + movdqa xmm0, xmm2 ; 4DF3 _ 66: 0F 6F. C2 + psrld xmm2, 3 ; 4DF7 _ 66: 0F 72. D2, 03 + pslld xmm1, 19 ; 4DFC _ 66: 0F 72. F1, 13 + pslld xmm0, 29 ; 4E01 _ 66: 0F 72. F0, 1D + por xmm0, xmm2 ; 4E06 _ 66: 0F EB. C2 + psrld xmm10, 13 ; 4E0A _ 66 41: 0F 72. D2, 0D + por xmm1, xmm10 ; 4E10 _ 66 41: 0F EB. CA + pxor xmm0, xmm9 ; 4E15 _ 66 41: 0F EF. C1 + pxor xmm9, xmm1 ; 4E1A _ 66 44: 0F EF. C9 + movdqa xmm2, xmm9 ; 4E1F _ 66 41: 0F 6F. D1 + pand xmm2, xmm0 ; 4E24 _ 66: 0F DB. D0 + pxor xmm2, xmm11 ; 4E28 _ 66 41: 0F EF. D3 + por xmm11, xmm0 ; 4E2D _ 66 44: 0F EB. D8 + pxor xmm11, xmm9 ; 4E32 _ 66 45: 0F EF. D9 + pand xmm9, xmm2 ; 4E37 _ 66 44: 0F DB. CA + pxor xmm0, xmm2 ; 4E3C _ 66: 0F EF. C2 + pand xmm9, xmm1 ; 4E40 _ 66 44: 0F DB. C9 + pxor xmm9, xmm0 ; 4E45 _ 66 44: 0F EF. C8 + pand xmm0, xmm11 ; 4E4A _ 66 41: 0F DB. C3 + por xmm0, xmm1 ; 4E4F _ 66: 0F EB. C1 + pxor xmm2, xmm6 ; 4E53 _ 66: 0F EF. D6 + movdqa xmm10, xmm2 ; 4E57 _ 66 44: 0F 6F. D2 + pxor xmm0, xmm2 ; 4E5C _ 66: 0F EF. C2 + pxor xmm1, xmm2 ; 4E60 _ 66: 0F EF. CA + pxor xmm10, xmm9 ; 4E64 _ 66 45: 0F EF. D1 + pand xmm1, xmm11 ; 4E69 _ 66 41: 0F DB. CB + movd xmm2, dword [r12+1320H] ; 4E6E _ 66 41: 0F 6E. 94 24, 00001320 + pxor xmm10, xmm1 ; 4E78 _ 66 44: 0F EF. D1 + pshufd xmm2, xmm2, 0 ; 4E7D _ 66: 0F 70. D2, 00 + pxor xmm11, xmm2 ; 4E82 _ 66 44: 0F EF. DA + movd xmm2, dword [r12+1324H] ; 4E87 _ 66 41: 0F 6E. 94 24, 00001324 + pshufd xmm2, xmm2, 0 ; 4E91 _ 66: 0F 70. D2, 00 + pxor xmm9, xmm2 ; 4E96 _ 66 44: 0F EF. CA + movd xmm2, dword [r12+1328H] ; 4E9B _ 66 41: 0F 6E. 94 24, 00001328 + pshufd xmm2, xmm2, 0 ; 4EA5 _ 66: 0F 70. D2, 00 + pxor xmm0, xmm2 ; 4EAA _ 66: 0F EF. C2 + movdqa xmm1, xmm0 ; 4EAE _ 66: 0F 6F. C8 + psrld xmm0, 22 ; 4EB2 _ 66: 0F 72. D0, 16 + pslld xmm1, 10 ; 4EB7 _ 66: 0F 72. F1, 0A + por xmm1, xmm0 ; 4EBC _ 66: 0F EB. C8 + movd xmm2, dword [r12+132CH] ; 4EC0 _ 66 41: 0F 6E. 94 24, 0000132C + pshufd xmm2, xmm2, 0 ; 4ECA _ 66: 0F 70. D2, 00 + pxor xmm10, xmm2 ; 4ECF _ 66 44: 0F EF. D2 + movdqa xmm2, xmm11 ; 4ED4 _ 66 41: 0F 6F. D3 + psrld xmm11, 5 ; 4ED9 _ 66 41: 0F 72. D3, 05 + pxor xmm1, xmm10 ; 4EDF _ 66 41: 0F EF. CA + pslld xmm2, 27 ; 4EE4 _ 66: 0F 72. F2, 1B + por xmm2, xmm11 ; 4EE9 _ 66 41: 0F EB. D3 + movdqa xmm11, xmm9 ; 4EEE _ 66 45: 0F 6F. D9 + pxor xmm2, xmm9 ; 4EF3 _ 66 41: 0F EF. D1 + pxor xmm2, xmm10 ; 4EF8 _ 66 41: 0F EF. D2 + pslld xmm11, 7 ; 4EFD _ 66 41: 0F 72. F3, 07 + pxor xmm1, xmm11 ; 4F03 _ 66 41: 0F EF. CB + movdqa xmm11, xmm10 ; 4F08 _ 66 45: 0F 6F. DA + movdqa xmm0, xmm1 ; 4F0D _ 66: 0F 6F. C1 + psrld xmm10, 7 ; 4F11 _ 66 41: 0F 72. D2, 07 + pslld xmm11, 25 ; 4F17 _ 66 41: 0F 72. F3, 19 + por xmm11, xmm10 ; 4F1D _ 66 45: 0F EB. DA + movdqa xmm10, xmm9 ; 4F22 _ 66 45: 0F 6F. D1 + psrld xmm9, 1 ; 4F27 _ 66 41: 0F 72. D1, 01 + pxor xmm11, xmm1 ; 4F2D _ 66 44: 0F EF. D9 + pslld xmm10, 31 ; 4F32 _ 66 41: 0F 72. F2, 1F + por xmm10, xmm9 ; 4F38 _ 66 45: 0F EB. D1 + movdqa xmm9, xmm2 ; 4F3D _ 66 44: 0F 6F. CA + pxor xmm10, xmm2 ; 4F42 _ 66 44: 0F EF. D2 + pxor xmm10, xmm1 ; 4F47 _ 66 44: 0F EF. D1 + pslld xmm9, 3 ; 4F4C _ 66 41: 0F 72. F1, 03 + pxor xmm11, xmm9 ; 4F52 _ 66 45: 0F EF. D9 + movdqa xmm9, xmm2 ; 4F57 _ 66 44: 0F 6F. CA + pslld xmm0, 29 ; 4F5C _ 66: 0F 72. F0, 1D + psrld xmm1, 3 ; 4F61 _ 66: 0F 72. D1, 03 + por xmm0, xmm1 ; 4F66 _ 66: 0F EB. C1 + pslld xmm9, 19 ; 4F6A _ 66 41: 0F 72. F1, 13 + psrld xmm2, 13 ; 4F70 _ 66: 0F 72. D2, 0D + por xmm9, xmm2 ; 4F75 _ 66 44: 0F EB. CA + movdqa xmm2, xmm10 ; 4F7A _ 66 41: 0F 6F. D2 + pxor xmm10, xmm0 ; 4F7F _ 66 44: 0F EF. D0 + pxor xmm2, xmm11 ; 4F84 _ 66 41: 0F EF. D3 + pand xmm11, xmm2 ; 4F89 _ 66 44: 0F DB. DA + pxor xmm11, xmm9 ; 4F8E _ 66 45: 0F EF. D9 + por xmm9, xmm2 ; 4F93 _ 66 44: 0F EB. CA + pxor xmm0, xmm11 ; 4F98 _ 66 41: 0F EF. C3 + pxor xmm9, xmm10 ; 4F9D _ 66 45: 0F EF. CA + por xmm9, xmm0 ; 4FA2 _ 66 44: 0F EB. C8 + pxor xmm2, xmm11 ; 4FA7 _ 66 41: 0F EF. D3 + pxor xmm9, xmm2 ; 4FAC _ 66 44: 0F EF. CA + por xmm2, xmm11 ; 4FB1 _ 66 41: 0F EB. D3 + pxor xmm2, xmm9 ; 4FB6 _ 66 41: 0F EF. D1 + pxor xmm10, xmm6 ; 4FBB _ 66 44: 0F EF. D6 + pxor xmm10, xmm2 ; 4FC0 _ 66 44: 0F EF. D2 + por xmm2, xmm9 ; 4FC5 _ 66 41: 0F EB. D1 + pxor xmm2, xmm9 ; 4FCA _ 66 41: 0F EF. D1 + por xmm2, xmm10 ; 4FCF _ 66 41: 0F EB. D2 + pxor xmm11, xmm2 ; 4FD4 _ 66 44: 0F EF. DA + movd xmm2, dword [r12+1310H] ; 4FD9 _ 66 41: 0F 6E. 94 24, 00001310 + pshufd xmm2, xmm2, 0 ; 4FE3 _ 66: 0F 70. D2, 00 + pxor xmm10, xmm2 ; 4FE8 _ 66 44: 0F EF. D2 + movd xmm2, dword [r12+1314H] ; 4FED _ 66 41: 0F 6E. 94 24, 00001314 + pshufd xmm2, xmm2, 0 ; 4FF7 _ 66: 0F 70. D2, 00 + pxor xmm9, xmm2 ; 4FFC _ 66 44: 0F EF. CA + movd xmm2, dword [r12+1318H] ; 5001 _ 66 41: 0F 6E. 94 24, 00001318 + pshufd xmm2, xmm2, 0 ; 500B _ 66: 0F 70. D2, 00 + pxor xmm11, xmm2 ; 5010 _ 66 44: 0F EF. DA + movd xmm2, dword [r12+131CH] ; 5015 _ 66 41: 0F 6E. 94 24, 0000131C + pshufd xmm2, xmm2, 0 ; 501F _ 66: 0F 70. D2, 00 + pxor xmm0, xmm2 ; 5024 _ 66: 0F EF. C2 + movdqa xmm2, xmm11 ; 5028 _ 66 41: 0F 6F. D3 + movdqa xmm1, xmm0 ; 502D _ 66: 0F 6F. C8 + psrld xmm11, 22 ; 5031 _ 66 41: 0F 72. D3, 16 + pslld xmm2, 10 ; 5037 _ 66: 0F 72. F2, 0A + por xmm2, xmm11 ; 503C _ 66 41: 0F EB. D3 + movdqa xmm11, xmm10 ; 5041 _ 66 45: 0F 6F. DA + psrld xmm10, 5 ; 5046 _ 66 41: 0F 72. D2, 05 + pxor xmm2, xmm0 ; 504C _ 66: 0F EF. D0 + pslld xmm11, 27 ; 5050 _ 66 41: 0F 72. F3, 1B + por xmm11, xmm10 ; 5056 _ 66 45: 0F EB. DA + movdqa xmm10, xmm9 ; 505B _ 66 45: 0F 6F. D1 + pxor xmm11, xmm9 ; 5060 _ 66 45: 0F EF. D9 + pxor xmm11, xmm0 ; 5065 _ 66 44: 0F EF. D8 + pslld xmm10, 7 ; 506A _ 66 41: 0F 72. F2, 07 + pxor xmm2, xmm10 ; 5070 _ 66 41: 0F EF. D2 + movdqa xmm10, xmm9 ; 5075 _ 66 45: 0F 6F. D1 + pslld xmm1, 25 ; 507A _ 66: 0F 72. F1, 19 + psrld xmm0, 7 ; 507F _ 66: 0F 72. D0, 07 + por xmm1, xmm0 ; 5084 _ 66: 0F EB. C8 + movdqa xmm0, xmm11 ; 5088 _ 66 41: 0F 6F. C3 + pslld xmm10, 31 ; 508D _ 66 41: 0F 72. F2, 1F + psrld xmm9, 1 ; 5093 _ 66 41: 0F 72. D1, 01 + por xmm10, xmm9 ; 5099 _ 66 45: 0F EB. D1 + movdqa xmm9, xmm11 ; 509E _ 66 45: 0F 6F. CB + pxor xmm1, xmm2 ; 50A3 _ 66: 0F EF. CA + pxor xmm10, xmm11 ; 50A7 _ 66 45: 0F EF. D3 + pslld xmm9, 3 ; 50AC _ 66 41: 0F 72. F1, 03 + pxor xmm1, xmm9 ; 50B2 _ 66 41: 0F EF. C9 + movdqa xmm9, xmm2 ; 50B7 _ 66 44: 0F 6F. CA + pxor xmm10, xmm2 ; 50BC _ 66 44: 0F EF. D2 + psrld xmm2, 3 ; 50C1 _ 66: 0F 72. D2, 03 + pslld xmm9, 29 ; 50C6 _ 66 41: 0F 72. F1, 1D + por xmm9, xmm2 ; 50CC _ 66 44: 0F EB. CA + pslld xmm0, 19 ; 50D1 _ 66: 0F 72. F0, 13 + psrld xmm11, 13 ; 50D6 _ 66 41: 0F 72. D3, 0D + por xmm0, xmm11 ; 50DC _ 66 41: 0F EB. C3 + movdqa xmm11, xmm10 ; 50E1 _ 66 45: 0F 6F. DA + pxor xmm9, xmm6 ; 50E6 _ 66 44: 0F EF. CE + pxor xmm10, xmm6 ; 50EB _ 66 44: 0F EF. D6 + por xmm11, xmm0 ; 50F0 _ 66 44: 0F EB. D8 + pxor xmm11, xmm9 ; 50F5 _ 66 45: 0F EF. D9 + por xmm9, xmm10 ; 50FA _ 66 45: 0F EB. CA + pxor xmm11, xmm1 ; 50FF _ 66 44: 0F EF. D9 + pxor xmm0, xmm10 ; 5104 _ 66 41: 0F EF. C2 + pxor xmm9, xmm0 ; 5109 _ 66 44: 0F EF. C8 + pand xmm0, xmm1 ; 510E _ 66: 0F DB. C1 + pxor xmm10, xmm0 ; 5112 _ 66 44: 0F EF. D0 + por xmm0, xmm11 ; 5117 _ 66 41: 0F EB. C3 + pxor xmm0, xmm9 ; 511C _ 66 41: 0F EF. C1 + pxor xmm1, xmm10 ; 5121 _ 66 41: 0F EF. CA + pxor xmm9, xmm11 ; 5126 _ 66 45: 0F EF. CB + pxor xmm1, xmm0 ; 512B _ 66: 0F EF. C8 + pxor xmm1, xmm11 ; 512F _ 66 41: 0F EF. CB + pand xmm9, xmm1 ; 5134 _ 66 44: 0F DB. C9 + pxor xmm10, xmm9 ; 5139 _ 66 45: 0F EF. D1 + movd xmm9, dword [r12+1300H] ; 513E _ 66 45: 0F 6E. 8C 24, 00001300 + pshufd xmm2, xmm9, 0 ; 5148 _ 66 41: 0F 70. D1, 00 + pxor xmm0, xmm2 ; 514E _ 66: 0F EF. C2 + movd xmm9, dword [r12+1304H] ; 5152 _ 66 45: 0F 6E. 8C 24, 00001304 + pshufd xmm2, xmm9, 0 ; 515C _ 66 41: 0F 70. D1, 00 + pxor xmm10, xmm2 ; 5162 _ 66 44: 0F EF. D2 + movd xmm9, dword [r12+1308H] ; 5167 _ 66 45: 0F 6E. 8C 24, 00001308 + pshufd xmm2, xmm9, 0 ; 5171 _ 66 41: 0F 70. D1, 00 + movd xmm9, dword [r12+130CH] ; 5177 _ 66 45: 0F 6E. 8C 24, 0000130C + pxor xmm11, xmm2 ; 5181 _ 66 44: 0F EF. DA + pshufd xmm2, xmm9, 0 ; 5186 _ 66 41: 0F 70. D1, 00 + movdqa xmm9, xmm0 ; 518C _ 66 44: 0F 6F. C8 + pxor xmm1, xmm2 ; 5191 _ 66: 0F EF. CA + movdqa xmm2, xmm11 ; 5195 _ 66 41: 0F 6F. D3 + psrld xmm11, 22 ; 519A _ 66 41: 0F 72. D3, 16 + pslld xmm9, 27 ; 51A0 _ 66 41: 0F 72. F1, 1B + pslld xmm2, 10 ; 51A6 _ 66: 0F 72. F2, 0A + por xmm2, xmm11 ; 51AB _ 66 41: 0F EB. D3 + movdqa xmm11, xmm10 ; 51B0 _ 66 45: 0F 6F. DA + psrld xmm0, 5 ; 51B5 _ 66: 0F 72. D0, 05 + por xmm9, xmm0 ; 51BA _ 66 44: 0F EB. C8 + movdqa xmm0, xmm10 ; 51BF _ 66 41: 0F 6F. C2 + pxor xmm2, xmm1 ; 51C4 _ 66: 0F EF. D1 + pslld xmm11, 7 ; 51C8 _ 66 41: 0F 72. F3, 07 + pxor xmm2, xmm11 ; 51CE _ 66 41: 0F EF. D3 + movdqa xmm11, xmm1 ; 51D3 _ 66 44: 0F 6F. D9 + pxor xmm9, xmm10 ; 51D8 _ 66 45: 0F EF. CA + pxor xmm9, xmm1 ; 51DD _ 66 44: 0F EF. C9 + pslld xmm11, 25 ; 51E2 _ 66 41: 0F 72. F3, 19 + psrld xmm1, 7 ; 51E8 _ 66: 0F 72. D1, 07 + por xmm11, xmm1 ; 51ED _ 66 44: 0F EB. D9 + pslld xmm0, 31 ; 51F2 _ 66: 0F 72. F0, 1F + psrld xmm10, 1 ; 51F7 _ 66 41: 0F 72. D2, 01 + por xmm0, xmm10 ; 51FD _ 66 41: 0F EB. C2 + movdqa xmm10, xmm9 ; 5202 _ 66 45: 0F 6F. D1 + pxor xmm11, xmm2 ; 5207 _ 66 44: 0F EF. DA + pxor xmm0, xmm9 ; 520C _ 66 41: 0F EF. C1 + pslld xmm10, 3 ; 5211 _ 66 41: 0F 72. F2, 03 + pxor xmm11, xmm10 ; 5217 _ 66 45: 0F EF. DA + movdqa xmm10, xmm2 ; 521C _ 66 44: 0F 6F. D2 + pxor xmm0, xmm2 ; 5221 _ 66: 0F EF. C2 + psrld xmm2, 3 ; 5225 _ 66: 0F 72. D2, 03 + pslld xmm10, 29 ; 522A _ 66 41: 0F 72. F2, 1D + por xmm10, xmm2 ; 5230 _ 66 44: 0F EB. D2 + movdqa xmm2, xmm9 ; 5235 _ 66 41: 0F 6F. D1 + psrld xmm9, 13 ; 523A _ 66 41: 0F 72. D1, 0D + pslld xmm2, 19 ; 5240 _ 66: 0F 72. F2, 13 + por xmm2, xmm9 ; 5245 _ 66 41: 0F EB. D1 + movdqa xmm9, xmm10 ; 524A _ 66 45: 0F 6F. CA + por xmm10, xmm11 ; 524F _ 66 45: 0F EB. D3 + pxor xmm9, xmm2 ; 5254 _ 66 44: 0F EF. CA + pand xmm2, xmm11 ; 5259 _ 66 41: 0F DB. D3 + pxor xmm9, xmm6 ; 525E _ 66 44: 0F EF. CE + pxor xmm11, xmm0 ; 5263 _ 66 44: 0F EF. D8 + por xmm0, xmm2 ; 5268 _ 66: 0F EB. C2 + pxor xmm2, xmm9 ; 526C _ 66 41: 0F EF. D1 + pand xmm9, xmm10 ; 5271 _ 66 45: 0F DB. CA + pand xmm11, xmm10 ; 5276 _ 66 45: 0F DB. DA + pxor xmm0, xmm9 ; 527B _ 66 41: 0F EF. C1 + pxor xmm9, xmm2 ; 5280 _ 66 44: 0F EF. CA + por xmm2, xmm9 ; 5285 _ 66 41: 0F EB. D1 + pxor xmm10, xmm0 ; 528A _ 66 44: 0F EF. D0 + pxor xmm2, xmm11 ; 528F _ 66 41: 0F EF. D3 + pxor xmm11, xmm10 ; 5294 _ 66 45: 0F EF. DA + por xmm10, xmm2 ; 5299 _ 66 44: 0F EB. D2 + pxor xmm11, xmm9 ; 529E _ 66 45: 0F EF. D9 + pxor xmm10, xmm9 ; 52A3 _ 66 45: 0F EF. D1 + movd xmm9, dword [r12+12F0H] ; 52A8 _ 66 45: 0F 6E. 8C 24, 000012F0 + pshufd xmm9, xmm9, 0 ; 52B2 _ 66 45: 0F 70. C9, 00 + pxor xmm11, xmm9 ; 52B8 _ 66 45: 0F EF. D9 + movd xmm9, dword [r12+12F4H] ; 52BD _ 66 45: 0F 6E. 8C 24, 000012F4 + pshufd xmm9, xmm9, 0 ; 52C7 _ 66 45: 0F 70. C9, 00 + pxor xmm2, xmm9 ; 52CD _ 66 41: 0F EF. D1 + movd xmm9, dword [r12+12F8H] ; 52D2 _ 66 45: 0F 6E. 8C 24, 000012F8 + pshufd xmm9, xmm9, 0 ; 52DC _ 66 45: 0F 70. C9, 00 + pxor xmm0, xmm9 ; 52E2 _ 66 41: 0F EF. C1 + movdqa xmm1, xmm0 ; 52E7 _ 66: 0F 6F. C8 + psrld xmm0, 22 ; 52EB _ 66: 0F 72. D0, 16 + movd xmm9, dword [r12+12FCH] ; 52F0 _ 66 45: 0F 6E. 8C 24, 000012FC + pshufd xmm9, xmm9, 0 ; 52FA _ 66 45: 0F 70. C9, 00 + pxor xmm10, xmm9 ; 5300 _ 66 45: 0F EF. D1 + movdqa xmm9, xmm11 ; 5305 _ 66 45: 0F 6F. CB + pslld xmm1, 10 ; 530A _ 66: 0F 72. F1, 0A + por xmm1, xmm0 ; 530F _ 66: 0F EB. C8 + movdqa xmm0, xmm10 ; 5313 _ 66 41: 0F 6F. C2 + pslld xmm9, 27 ; 5318 _ 66 41: 0F 72. F1, 1B + psrld xmm11, 5 ; 531E _ 66 41: 0F 72. D3, 05 + por xmm9, xmm11 ; 5324 _ 66 45: 0F EB. CB + movdqa xmm11, xmm2 ; 5329 _ 66 44: 0F 6F. DA + pxor xmm1, xmm10 ; 532E _ 66 41: 0F EF. CA + pxor xmm9, xmm2 ; 5333 _ 66 44: 0F EF. CA + pslld xmm11, 7 ; 5338 _ 66 41: 0F 72. F3, 07 + pxor xmm1, xmm11 ; 533E _ 66 41: 0F EF. CB + pxor xmm9, xmm10 ; 5343 _ 66 45: 0F EF. CA + pslld xmm0, 25 ; 5348 _ 66: 0F 72. F0, 19 + psrld xmm10, 7 ; 534D _ 66 41: 0F 72. D2, 07 + por xmm0, xmm10 ; 5353 _ 66 41: 0F EB. C2 + movdqa xmm10, xmm2 ; 5358 _ 66 44: 0F 6F. D2 + psrld xmm2, 1 ; 535D _ 66: 0F 72. D2, 01 + pxor xmm0, xmm1 ; 5362 _ 66: 0F EF. C1 + pslld xmm10, 31 ; 5366 _ 66 41: 0F 72. F2, 1F + por xmm10, xmm2 ; 536C _ 66 44: 0F EB. D2 + movdqa xmm2, xmm9 ; 5371 _ 66 41: 0F 6F. D1 + pxor xmm10, xmm9 ; 5376 _ 66 45: 0F EF. D1 + pslld xmm2, 3 ; 537B _ 66: 0F 72. F2, 03 + pxor xmm0, xmm2 ; 5380 _ 66: 0F EF. C2 + movdqa xmm2, xmm1 ; 5384 _ 66: 0F 6F. D1 + movdqa xmm11, xmm9 ; 5388 _ 66 45: 0F 6F. D9 + pxor xmm10, xmm1 ; 538D _ 66 44: 0F EF. D1 + pslld xmm2, 29 ; 5392 _ 66: 0F 72. F2, 1D + psrld xmm1, 3 ; 5397 _ 66: 0F 72. D1, 03 + por xmm2, xmm1 ; 539C _ 66: 0F EB. D1 + pslld xmm11, 19 ; 53A0 _ 66 41: 0F 72. F3, 13 + psrld xmm9, 13 ; 53A6 _ 66 41: 0F 72. D1, 0D + por xmm11, xmm9 ; 53AC _ 66 45: 0F EB. D9 + movdqa xmm9, xmm2 ; 53B1 _ 66 44: 0F 6F. CA + pxor xmm11, xmm2 ; 53B6 _ 66 44: 0F EF. DA + pxor xmm2, xmm0 ; 53BB _ 66: 0F EF. D0 + pand xmm9, xmm11 ; 53BF _ 66 45: 0F DB. CB + pxor xmm9, xmm6 ; 53C4 _ 66 44: 0F EF. CE + pxor xmm0, xmm10 ; 53C9 _ 66 41: 0F EF. C2 + pxor xmm9, xmm0 ; 53CE _ 66 44: 0F EF. C8 + por xmm2, xmm11 ; 53D3 _ 66 41: 0F EB. D3 + pxor xmm11, xmm9 ; 53D8 _ 66 45: 0F EF. D9 + pxor xmm0, xmm2 ; 53DD _ 66: 0F EF. C2 + pxor xmm2, xmm10 ; 53E1 _ 66 41: 0F EF. D2 + pand xmm10, xmm0 ; 53E6 _ 66 44: 0F DB. D0 + pxor xmm10, xmm11 ; 53EB _ 66 45: 0F EF. D3 + pxor xmm11, xmm0 ; 53F0 _ 66 44: 0F EF. D8 + por xmm11, xmm9 ; 53F5 _ 66 45: 0F EB. D9 + pxor xmm0, xmm10 ; 53FA _ 66 41: 0F EF. C2 + pxor xmm2, xmm11 ; 53FF _ 66 41: 0F EF. D3 + movd xmm11, dword [r12+12E0H] ; 5404 _ 66 45: 0F 6E. 9C 24, 000012E0 + pshufd xmm11, xmm11, 0 ; 540E _ 66 45: 0F 70. DB, 00 + pxor xmm10, xmm11 ; 5414 _ 66 45: 0F EF. D3 + movd xmm11, dword [r12+12E4H] ; 5419 _ 66 45: 0F 6E. 9C 24, 000012E4 + pshufd xmm11, xmm11, 0 ; 5423 _ 66 45: 0F 70. DB, 00 + pxor xmm9, xmm11 ; 5429 _ 66 45: 0F EF. CB + movd xmm11, dword [r12+12E8H] ; 542E _ 66 45: 0F 6E. 9C 24, 000012E8 + pshufd xmm11, xmm11, 0 ; 5438 _ 66 45: 0F 70. DB, 00 + pxor xmm2, xmm11 ; 543E _ 66 41: 0F EF. D3 + movdqa xmm1, xmm2 ; 5443 _ 66: 0F 6F. CA + psrld xmm2, 22 ; 5447 _ 66: 0F 72. D2, 16 + pslld xmm1, 10 ; 544C _ 66: 0F 72. F1, 0A + movd xmm11, dword [r12+12ECH] ; 5451 _ 66 45: 0F 6E. 9C 24, 000012EC + pshufd xmm11, xmm11, 0 ; 545B _ 66 45: 0F 70. DB, 00 + pxor xmm0, xmm11 ; 5461 _ 66 41: 0F EF. C3 + movdqa xmm11, xmm10 ; 5466 _ 66 45: 0F 6F. DA + por xmm1, xmm2 ; 546B _ 66: 0F EB. CA + movdqa xmm2, xmm9 ; 546F _ 66 41: 0F 6F. D1 + pslld xmm11, 27 ; 5474 _ 66 41: 0F 72. F3, 1B + psrld xmm10, 5 ; 547A _ 66 41: 0F 72. D2, 05 + por xmm11, xmm10 ; 5480 _ 66 45: 0F EB. DA + movdqa xmm10, xmm9 ; 5485 _ 66 45: 0F 6F. D1 + pxor xmm1, xmm0 ; 548A _ 66: 0F EF. C8 + pxor xmm11, xmm9 ; 548E _ 66 45: 0F EF. D9 + pslld xmm10, 7 ; 5493 _ 66 41: 0F 72. F2, 07 + pxor xmm1, xmm10 ; 5499 _ 66 41: 0F EF. CA + movdqa xmm10, xmm0 ; 549E _ 66 44: 0F 6F. D0 + pxor xmm11, xmm0 ; 54A3 _ 66 44: 0F EF. D8 + psrld xmm0, 7 ; 54A8 _ 66: 0F 72. D0, 07 + pslld xmm10, 25 ; 54AD _ 66 41: 0F 72. F2, 19 + por xmm10, xmm0 ; 54B3 _ 66 44: 0F EB. D0 + movdqa xmm0, xmm1 ; 54B8 _ 66: 0F 6F. C1 + pslld xmm2, 31 ; 54BC _ 66: 0F 72. F2, 1F + psrld xmm9, 1 ; 54C1 _ 66 41: 0F 72. D1, 01 + por xmm2, xmm9 ; 54C7 _ 66 41: 0F EB. D1 + movdqa xmm9, xmm11 ; 54CC _ 66 45: 0F 6F. CB + pxor xmm10, xmm1 ; 54D1 _ 66 44: 0F EF. D1 + pxor xmm2, xmm11 ; 54D6 _ 66 41: 0F EF. D3 + pslld xmm9, 3 ; 54DB _ 66 41: 0F 72. F1, 03 + pxor xmm10, xmm9 ; 54E1 _ 66 45: 0F EF. D1 + movdqa xmm9, xmm10 ; 54E6 _ 66 45: 0F 6F. CA + pxor xmm2, xmm1 ; 54EB _ 66: 0F EF. D1 + pslld xmm0, 29 ; 54EF _ 66: 0F 72. F0, 1D + psrld xmm1, 3 ; 54F4 _ 66: 0F 72. D1, 03 + por xmm0, xmm1 ; 54F9 _ 66: 0F EB. C1 + movdqa xmm1, xmm11 ; 54FD _ 66 41: 0F 6F. CB + psrld xmm11, 13 ; 5502 _ 66 41: 0F 72. D3, 0D + pxor xmm2, xmm6 ; 5508 _ 66: 0F EF. D6 + pslld xmm1, 19 ; 550C _ 66: 0F 72. F1, 13 + por xmm1, xmm11 ; 5511 _ 66 41: 0F EB. CB + pxor xmm0, xmm2 ; 5516 _ 66: 0F EF. C2 + por xmm9, xmm1 ; 551A _ 66 44: 0F EB. C9 + pxor xmm9, xmm0 ; 551F _ 66 44: 0F EF. C8 + por xmm0, xmm2 ; 5524 _ 66: 0F EB. C2 + pand xmm0, xmm1 ; 5528 _ 66: 0F DB. C1 + movd xmm11, dword [r12+12D0H] ; 552C _ 66 45: 0F 6E. 9C 24, 000012D0 + pxor xmm10, xmm9 ; 5536 _ 66 45: 0F EF. D1 + pxor xmm0, xmm10 ; 553B _ 66 41: 0F EF. C2 + por xmm10, xmm1 ; 5540 _ 66 44: 0F EB. D1 + pxor xmm10, xmm2 ; 5545 _ 66 44: 0F EF. D2 + pand xmm2, xmm0 ; 554A _ 66: 0F DB. D0 + pxor xmm2, xmm9 ; 554E _ 66 41: 0F EF. D1 + pxor xmm10, xmm0 ; 5553 _ 66 44: 0F EF. D0 + pand xmm9, xmm10 ; 5558 _ 66 45: 0F DB. CA + pxor xmm10, xmm2 ; 555D _ 66 44: 0F EF. D2 + pxor xmm9, xmm10 ; 5562 _ 66 45: 0F EF. CA + pxor xmm10, xmm6 ; 5567 _ 66 44: 0F EF. D6 + pxor xmm9, xmm1 ; 556C _ 66 44: 0F EF. C9 + pshufd xmm11, xmm11, 0 ; 5571 _ 66 45: 0F 70. DB, 00 + pxor xmm2, xmm11 ; 5577 _ 66 41: 0F EF. D3 + movdqa xmm1, xmm2 ; 557C _ 66: 0F 6F. CA + psrld xmm2, 5 ; 5580 _ 66: 0F 72. D2, 05 + pslld xmm1, 27 ; 5585 _ 66: 0F 72. F1, 1B + por xmm1, xmm2 ; 558A _ 66: 0F EB. CA + movd xmm11, dword [r12+12D4H] ; 558E _ 66 45: 0F 6E. 9C 24, 000012D4 + pshufd xmm11, xmm11, 0 ; 5598 _ 66 45: 0F 70. DB, 00 + pxor xmm10, xmm11 ; 559E _ 66 45: 0F EF. D3 + movdqa xmm2, xmm10 ; 55A3 _ 66 41: 0F 6F. D2 + pxor xmm1, xmm10 ; 55A8 _ 66 41: 0F EF. CA + pslld xmm2, 7 ; 55AD _ 66: 0F 72. F2, 07 + movd xmm11, dword [r12+12D8H] ; 55B2 _ 66 45: 0F 6E. 9C 24, 000012D8 + pshufd xmm11, xmm11, 0 ; 55BC _ 66 45: 0F 70. DB, 00 + pxor xmm9, xmm11 ; 55C2 _ 66 45: 0F EF. CB + movd xmm11, dword [r12+12DCH] ; 55C7 _ 66 45: 0F 6E. 9C 24, 000012DC + pshufd xmm11, xmm11, 0 ; 55D1 _ 66 45: 0F 70. DB, 00 + pxor xmm0, xmm11 ; 55D7 _ 66 41: 0F EF. C3 + movdqa xmm11, xmm9 ; 55DC _ 66 45: 0F 6F. D9 + psrld xmm9, 22 ; 55E1 _ 66 41: 0F 72. D1, 16 + pxor xmm1, xmm0 ; 55E7 _ 66: 0F EF. C8 + pslld xmm11, 10 ; 55EB _ 66 41: 0F 72. F3, 0A + por xmm11, xmm9 ; 55F1 _ 66 45: 0F EB. D9 + movdqa xmm9, xmm10 ; 55F6 _ 66 45: 0F 6F. CA + pxor xmm11, xmm0 ; 55FB _ 66 44: 0F EF. D8 + pxor xmm11, xmm2 ; 5600 _ 66 44: 0F EF. DA + movdqa xmm2, xmm0 ; 5605 _ 66: 0F 6F. D0 + psrld xmm0, 7 ; 5609 _ 66: 0F 72. D0, 07 + pslld xmm9, 31 ; 560E _ 66 41: 0F 72. F1, 1F + pslld xmm2, 25 ; 5614 _ 66: 0F 72. F2, 19 + por xmm2, xmm0 ; 5619 _ 66: 0F EB. D0 + movdqa xmm0, xmm11 ; 561D _ 66 41: 0F 6F. C3 + psrld xmm10, 1 ; 5622 _ 66 41: 0F 72. D2, 01 + por xmm9, xmm10 ; 5628 _ 66 45: 0F EB. CA + movdqa xmm10, xmm1 ; 562D _ 66 44: 0F 6F. D1 + pxor xmm2, xmm11 ; 5632 _ 66 41: 0F EF. D3 + pxor xmm9, xmm1 ; 5637 _ 66 44: 0F EF. C9 + pslld xmm10, 3 ; 563C _ 66 41: 0F 72. F2, 03 + pxor xmm2, xmm10 ; 5642 _ 66 41: 0F EF. D2 + movdqa xmm10, xmm1 ; 5647 _ 66 44: 0F 6F. D1 + pxor xmm9, xmm11 ; 564C _ 66 45: 0F EF. CB + pslld xmm0, 29 ; 5651 _ 66: 0F 72. F0, 1D + psrld xmm11, 3 ; 5656 _ 66 41: 0F 72. D3, 03 + por xmm0, xmm11 ; 565C _ 66 41: 0F EB. C3 + pslld xmm10, 19 ; 5661 _ 66 41: 0F 72. F2, 13 + psrld xmm1, 13 ; 5667 _ 66: 0F 72. D1, 0D + por xmm10, xmm1 ; 566C _ 66 44: 0F EB. D1 + movdqa xmm1, xmm0 ; 5671 _ 66: 0F 6F. C8 + pand xmm1, xmm2 ; 5675 _ 66: 0F DB. CA + pxor xmm1, xmm9 ; 5679 _ 66 41: 0F EF. C9 + por xmm9, xmm2 ; 567E _ 66 44: 0F EB. CA + pand xmm9, xmm10 ; 5683 _ 66 45: 0F DB. CA + pxor xmm0, xmm1 ; 5688 _ 66: 0F EF. C1 + pxor xmm0, xmm9 ; 568C _ 66 41: 0F EF. C1 + pand xmm9, xmm1 ; 5691 _ 66 44: 0F DB. C9 + pxor xmm10, xmm6 ; 5696 _ 66 44: 0F EF. D6 + pxor xmm2, xmm0 ; 569B _ 66: 0F EF. D0 + pxor xmm9, xmm2 ; 569F _ 66 44: 0F EF. CA + pand xmm2, xmm10 ; 56A4 _ 66 41: 0F DB. D2 + pxor xmm2, xmm1 ; 56A9 _ 66: 0F EF. D1 + pxor xmm10, xmm9 ; 56AD _ 66 45: 0F EF. D1 + pand xmm1, xmm10 ; 56B2 _ 66 41: 0F DB. CA + pxor xmm2, xmm10 ; 56B7 _ 66 41: 0F EF. D2 + pxor xmm1, xmm0 ; 56BC _ 66: 0F EF. C8 + por xmm1, xmm2 ; 56C0 _ 66: 0F EB. CA + pxor xmm2, xmm10 ; 56C4 _ 66 41: 0F EF. D2 + pxor xmm1, xmm9 ; 56C9 _ 66 41: 0F EF. C9 + movd xmm9, dword [r12+12C0H] ; 56CE _ 66 45: 0F 6E. 8C 24, 000012C0 + pshufd xmm11, xmm9, 0 ; 56D8 _ 66 45: 0F 70. D9, 00 + pxor xmm10, xmm11 ; 56DE _ 66 45: 0F EF. D3 + movd xmm9, dword [r12+12C4H] ; 56E3 _ 66 45: 0F 6E. 8C 24, 000012C4 + pshufd xmm11, xmm9, 0 ; 56ED _ 66 45: 0F 70. D9, 00 + pxor xmm2, xmm11 ; 56F3 _ 66 41: 0F EF. D3 + movd xmm9, dword [r12+12C8H] ; 56F8 _ 66 45: 0F 6E. 8C 24, 000012C8 + pshufd xmm11, xmm9, 0 ; 5702 _ 66 45: 0F 70. D9, 00 + pxor xmm1, xmm11 ; 5708 _ 66 41: 0F EF. CB + movd xmm9, dword [r12+12CCH] ; 570D _ 66 45: 0F 6E. 8C 24, 000012CC + pshufd xmm11, xmm9, 0 ; 5717 _ 66 45: 0F 70. D9, 00 + movdqa xmm9, xmm1 ; 571D _ 66 44: 0F 6F. C9 + pxor xmm0, xmm11 ; 5722 _ 66 41: 0F EF. C3 + movdqa xmm11, xmm10 ; 5727 _ 66 45: 0F 6F. DA + pslld xmm9, 10 ; 572C _ 66 41: 0F 72. F1, 0A + psrld xmm1, 22 ; 5732 _ 66: 0F 72. D1, 16 + por xmm9, xmm1 ; 5737 _ 66 44: 0F EB. C9 + movdqa xmm1, xmm2 ; 573C _ 66: 0F 6F. CA + pslld xmm11, 27 ; 5740 _ 66 41: 0F 72. F3, 1B + psrld xmm10, 5 ; 5746 _ 66 41: 0F 72. D2, 05 + por xmm11, xmm10 ; 574C _ 66 45: 0F EB. DA + movdqa xmm10, xmm2 ; 5751 _ 66 44: 0F 6F. D2 + pxor xmm9, xmm0 ; 5756 _ 66 44: 0F EF. C8 + pxor xmm11, xmm2 ; 575B _ 66 44: 0F EF. DA + pslld xmm10, 7 ; 5760 _ 66 41: 0F 72. F2, 07 + pxor xmm9, xmm10 ; 5766 _ 66 45: 0F EF. CA + movdqa xmm10, xmm0 ; 576B _ 66 44: 0F 6F. D0 + pxor xmm11, xmm0 ; 5770 _ 66 44: 0F EF. D8 + psrld xmm0, 7 ; 5775 _ 66: 0F 72. D0, 07 + pslld xmm10, 25 ; 577A _ 66 41: 0F 72. F2, 19 + por xmm10, xmm0 ; 5780 _ 66 44: 0F EB. D0 + movdqa xmm0, xmm9 ; 5785 _ 66 41: 0F 6F. C1 + pslld xmm1, 31 ; 578A _ 66: 0F 72. F1, 1F + psrld xmm2, 1 ; 578F _ 66: 0F 72. D2, 01 + por xmm1, xmm2 ; 5794 _ 66: 0F EB. CA + movdqa xmm2, xmm11 ; 5798 _ 66 41: 0F 6F. D3 + pxor xmm10, xmm9 ; 579D _ 66 45: 0F EF. D1 + pxor xmm1, xmm11 ; 57A2 _ 66 41: 0F EF. CB + pslld xmm2, 3 ; 57A7 _ 66: 0F 72. F2, 03 + pxor xmm10, xmm2 ; 57AC _ 66 44: 0F EF. D2 + movdqa xmm2, xmm11 ; 57B1 _ 66 41: 0F 6F. D3 + pxor xmm1, xmm9 ; 57B6 _ 66 41: 0F EF. C9 + pslld xmm0, 29 ; 57BB _ 66: 0F 72. F0, 1D + psrld xmm9, 3 ; 57C0 _ 66 41: 0F 72. D1, 03 + por xmm0, xmm9 ; 57C6 _ 66 41: 0F EB. C1 + movdqa xmm9, xmm0 ; 57CB _ 66 44: 0F 6F. C8 + pslld xmm2, 19 ; 57D0 _ 66: 0F 72. F2, 13 + psrld xmm11, 13 ; 57D5 _ 66 41: 0F 72. D3, 0D + por xmm2, xmm11 ; 57DB _ 66 41: 0F EB. D3 + pxor xmm9, xmm1 ; 57E0 _ 66 44: 0F EF. C9 + pxor xmm2, xmm9 ; 57E5 _ 66 41: 0F EF. D1 + pand xmm0, xmm9 ; 57EA _ 66 41: 0F DB. C1 + pxor xmm0, xmm2 ; 57EF _ 66: 0F EF. C2 + pand xmm2, xmm1 ; 57F3 _ 66: 0F DB. D1 + movd xmm11, dword [r12+12B0H] ; 57F7 _ 66 45: 0F 6E. 9C 24, 000012B0 + pxor xmm1, xmm10 ; 5801 _ 66 41: 0F EF. CA + por xmm10, xmm0 ; 5806 _ 66 44: 0F EB. D0 + pxor xmm9, xmm10 ; 580B _ 66 45: 0F EF. CA + pxor xmm2, xmm10 ; 5810 _ 66 41: 0F EF. D2 + pxor xmm1, xmm0 ; 5815 _ 66: 0F EF. C8 + pand xmm10, xmm9 ; 5819 _ 66 45: 0F DB. D1 + pxor xmm10, xmm1 ; 581E _ 66 44: 0F EF. D1 + pxor xmm1, xmm2 ; 5823 _ 66: 0F EF. CA + por xmm1, xmm9 ; 5827 _ 66 41: 0F EB. C9 + pxor xmm2, xmm10 ; 582C _ 66 41: 0F EF. D2 + pxor xmm1, xmm0 ; 5831 _ 66: 0F EF. C8 + pxor xmm2, xmm1 ; 5835 _ 66: 0F EF. D1 + pshufd xmm11, xmm11, 0 ; 5839 _ 66 45: 0F 70. DB, 00 + pxor xmm9, xmm11 ; 583F _ 66 45: 0F EF. CB + movdqa xmm0, xmm9 ; 5844 _ 66 41: 0F 6F. C1 + psrld xmm9, 5 ; 5849 _ 66 41: 0F 72. D1, 05 + pslld xmm0, 27 ; 584F _ 66: 0F 72. F0, 1B + por xmm0, xmm9 ; 5854 _ 66 41: 0F EB. C1 + movd xmm11, dword [r12+12B4H] ; 5859 _ 66 45: 0F 6E. 9C 24, 000012B4 + pshufd xmm11, xmm11, 0 ; 5863 _ 66 45: 0F 70. DB, 00 + pxor xmm1, xmm11 ; 5869 _ 66 41: 0F EF. CB + movdqa xmm9, xmm1 ; 586E _ 66 44: 0F 6F. C9 + pxor xmm0, xmm1 ; 5873 _ 66: 0F EF. C1 + pslld xmm9, 7 ; 5877 _ 66 41: 0F 72. F1, 07 + movd xmm11, dword [r12+12B8H] ; 587D _ 66 45: 0F 6E. 9C 24, 000012B8 + pshufd xmm11, xmm11, 0 ; 5887 _ 66 45: 0F 70. DB, 00 + pxor xmm10, xmm11 ; 588D _ 66 45: 0F EF. D3 + movd xmm11, dword [r12+12BCH] ; 5892 _ 66 45: 0F 6E. 9C 24, 000012BC + pshufd xmm11, xmm11, 0 ; 589C _ 66 45: 0F 70. DB, 00 + pxor xmm2, xmm11 ; 58A2 _ 66 41: 0F EF. D3 + movdqa xmm11, xmm10 ; 58A7 _ 66 45: 0F 6F. DA + psrld xmm10, 22 ; 58AC _ 66 41: 0F 72. D2, 16 + pxor xmm0, xmm2 ; 58B2 _ 66: 0F EF. C2 + pslld xmm11, 10 ; 58B6 _ 66 41: 0F 72. F3, 0A + por xmm11, xmm10 ; 58BC _ 66 45: 0F EB. DA + movdqa xmm10, xmm2 ; 58C1 _ 66 44: 0F 6F. D2 + pxor xmm11, xmm2 ; 58C6 _ 66 44: 0F EF. DA + pxor xmm11, xmm9 ; 58CB _ 66 45: 0F EF. D9 + movdqa xmm9, xmm0 ; 58D0 _ 66 44: 0F 6F. C8 + pslld xmm10, 25 ; 58D5 _ 66 41: 0F 72. F2, 19 + psrld xmm2, 7 ; 58DB _ 66: 0F 72. D2, 07 + por xmm10, xmm2 ; 58E0 _ 66 44: 0F EB. D2 + movdqa xmm2, xmm1 ; 58E5 _ 66: 0F 6F. D1 + psrld xmm1, 1 ; 58E9 _ 66: 0F 72. D1, 01 + pxor xmm10, xmm11 ; 58EE _ 66 45: 0F EF. D3 + pslld xmm2, 31 ; 58F3 _ 66: 0F 72. F2, 1F + por xmm2, xmm1 ; 58F8 _ 66: 0F EB. D1 + movdqa xmm1, xmm11 ; 58FC _ 66 41: 0F 6F. CB + pslld xmm9, 3 ; 5901 _ 66 41: 0F 72. F1, 03 + pxor xmm10, xmm9 ; 5907 _ 66 45: 0F EF. D1 + pxor xmm2, xmm0 ; 590C _ 66: 0F EF. D0 + pxor xmm2, xmm11 ; 5910 _ 66 41: 0F EF. D3 + pslld xmm1, 29 ; 5915 _ 66: 0F 72. F1, 1D + psrld xmm11, 3 ; 591A _ 66 41: 0F 72. D3, 03 + por xmm1, xmm11 ; 5920 _ 66 41: 0F EB. CB + movdqa xmm11, xmm0 ; 5925 _ 66 44: 0F 6F. D8 + psrld xmm0, 13 ; 592A _ 66: 0F 72. D0, 0D + pxor xmm1, xmm10 ; 592F _ 66 41: 0F EF. CA + pslld xmm11, 19 ; 5934 _ 66 41: 0F 72. F3, 13 + por xmm11, xmm0 ; 593A _ 66 44: 0F EB. D8 + pxor xmm10, xmm11 ; 593F _ 66 45: 0F EF. D3 + movdqa xmm9, xmm10 ; 5944 _ 66 45: 0F 6F. CA + pand xmm9, xmm1 ; 5949 _ 66 44: 0F DB. C9 + pxor xmm9, xmm2 ; 594E _ 66 44: 0F EF. CA + por xmm2, xmm1 ; 5953 _ 66: 0F EB. D1 + pxor xmm2, xmm10 ; 5957 _ 66 41: 0F EF. D2 + pand xmm10, xmm9 ; 595C _ 66 45: 0F DB. D1 + pxor xmm1, xmm9 ; 5961 _ 66 41: 0F EF. C9 + pand xmm10, xmm11 ; 5966 _ 66 45: 0F DB. D3 + pxor xmm10, xmm1 ; 596B _ 66 44: 0F EF. D1 + pand xmm1, xmm2 ; 5970 _ 66: 0F DB. CA + por xmm1, xmm11 ; 5974 _ 66 41: 0F EB. CB + pxor xmm9, xmm6 ; 5979 _ 66 44: 0F EF. CE + movdqa xmm0, xmm9 ; 597E _ 66 41: 0F 6F. C1 + pxor xmm1, xmm9 ; 5983 _ 66 41: 0F EF. C9 + pxor xmm11, xmm9 ; 5988 _ 66 45: 0F EF. D9 + pxor xmm0, xmm10 ; 598D _ 66 41: 0F EF. C2 + pand xmm11, xmm2 ; 5992 _ 66 44: 0F DB. DA + pxor xmm0, xmm11 ; 5997 _ 66 41: 0F EF. C3 + movd xmm11, dword [r12+12A0H] ; 599C _ 66 45: 0F 6E. 9C 24, 000012A0 + pshufd xmm11, xmm11, 0 ; 59A6 _ 66 45: 0F 70. DB, 00 + pxor xmm2, xmm11 ; 59AC _ 66 41: 0F EF. D3 + movd xmm9, dword [r12+12A4H] ; 59B1 _ 66 45: 0F 6E. 8C 24, 000012A4 + pshufd xmm11, xmm9, 0 ; 59BB _ 66 45: 0F 70. D9, 00 + pxor xmm10, xmm11 ; 59C1 _ 66 45: 0F EF. D3 + movd xmm9, dword [r12+12A8H] ; 59C6 _ 66 45: 0F 6E. 8C 24, 000012A8 + pshufd xmm11, xmm9, 0 ; 59D0 _ 66 45: 0F 70. D9, 00 + pxor xmm1, xmm11 ; 59D6 _ 66 41: 0F EF. CB + movd xmm9, dword [r12+12ACH] ; 59DB _ 66 45: 0F 6E. 8C 24, 000012AC + pshufd xmm11, xmm9, 0 ; 59E5 _ 66 45: 0F 70. D9, 00 + pxor xmm0, xmm11 ; 59EB _ 66 41: 0F EF. C3 + movdqa xmm11, xmm1 ; 59F0 _ 66 44: 0F 6F. D9 + movdqa xmm9, xmm0 ; 59F5 _ 66 44: 0F 6F. C8 + psrld xmm1, 22 ; 59FA _ 66: 0F 72. D1, 16 + pslld xmm11, 10 ; 59FF _ 66 41: 0F 72. F3, 0A + por xmm11, xmm1 ; 5A05 _ 66 44: 0F EB. D9 + movdqa xmm1, xmm2 ; 5A0A _ 66: 0F 6F. CA + psrld xmm2, 5 ; 5A0E _ 66: 0F 72. D2, 05 + pxor xmm11, xmm0 ; 5A13 _ 66 44: 0F EF. D8 + pslld xmm1, 27 ; 5A18 _ 66: 0F 72. F1, 1B + por xmm1, xmm2 ; 5A1D _ 66: 0F EB. CA + movdqa xmm2, xmm10 ; 5A21 _ 66 41: 0F 6F. D2 + pxor xmm1, xmm10 ; 5A26 _ 66 41: 0F EF. CA + pxor xmm1, xmm0 ; 5A2B _ 66: 0F EF. C8 + pslld xmm2, 7 ; 5A2F _ 66: 0F 72. F2, 07 + pxor xmm11, xmm2 ; 5A34 _ 66 44: 0F EF. DA + pslld xmm9, 25 ; 5A39 _ 66 41: 0F 72. F1, 19 + psrld xmm0, 7 ; 5A3F _ 66: 0F 72. D0, 07 + por xmm9, xmm0 ; 5A44 _ 66 44: 0F EB. C8 + movdqa xmm0, xmm10 ; 5A49 _ 66 41: 0F 6F. C2 + psrld xmm10, 1 ; 5A4E _ 66 41: 0F 72. D2, 01 + pxor xmm9, xmm11 ; 5A54 _ 66 45: 0F EF. CB + pslld xmm0, 31 ; 5A59 _ 66: 0F 72. F0, 1F + por xmm0, xmm10 ; 5A5E _ 66 41: 0F EB. C2 + movdqa xmm10, xmm1 ; 5A63 _ 66 44: 0F 6F. D1 + pxor xmm0, xmm1 ; 5A68 _ 66: 0F EF. C1 + pxor xmm0, xmm11 ; 5A6C _ 66 41: 0F EF. C3 + movdqa xmm2, xmm0 ; 5A71 _ 66: 0F 6F. D0 + pslld xmm10, 3 ; 5A75 _ 66 41: 0F 72. F2, 03 + pxor xmm9, xmm10 ; 5A7B _ 66 45: 0F EF. CA + movdqa xmm10, xmm11 ; 5A80 _ 66 45: 0F 6F. D3 + psrld xmm11, 3 ; 5A85 _ 66 41: 0F 72. D3, 03 + pxor xmm2, xmm9 ; 5A8B _ 66 41: 0F EF. D1 + pslld xmm10, 29 ; 5A90 _ 66 41: 0F 72. F2, 1D + por xmm10, xmm11 ; 5A96 _ 66 45: 0F EB. D3 + movdqa xmm11, xmm1 ; 5A9B _ 66 44: 0F 6F. D9 + psrld xmm1, 13 ; 5AA0 _ 66: 0F 72. D1, 0D + pand xmm9, xmm2 ; 5AA5 _ 66 44: 0F DB. CA + pslld xmm11, 19 ; 5AAA _ 66 41: 0F 72. F3, 13 + por xmm11, xmm1 ; 5AB0 _ 66 44: 0F EB. D9 + pxor xmm0, xmm10 ; 5AB5 _ 66 41: 0F EF. C2 + pxor xmm9, xmm11 ; 5ABA _ 66 45: 0F EF. CB + por xmm11, xmm2 ; 5ABF _ 66 44: 0F EB. DA + pxor xmm10, xmm9 ; 5AC4 _ 66 45: 0F EF. D1 + pxor xmm11, xmm0 ; 5AC9 _ 66 44: 0F EF. D8 + por xmm11, xmm10 ; 5ACE _ 66 45: 0F EB. DA + pxor xmm2, xmm9 ; 5AD3 _ 66 41: 0F EF. D1 + pxor xmm11, xmm2 ; 5AD8 _ 66 44: 0F EF. DA + por xmm2, xmm9 ; 5ADD _ 66 41: 0F EB. D1 + pxor xmm2, xmm11 ; 5AE2 _ 66 41: 0F EF. D3 + pxor xmm0, xmm6 ; 5AE7 _ 66: 0F EF. C6 + pxor xmm0, xmm2 ; 5AEB _ 66: 0F EF. C2 + por xmm2, xmm11 ; 5AEF _ 66 41: 0F EB. D3 + pxor xmm2, xmm11 ; 5AF4 _ 66 41: 0F EF. D3 + por xmm2, xmm0 ; 5AF9 _ 66: 0F EB. D0 + pxor xmm9, xmm2 ; 5AFD _ 66 44: 0F EF. CA + movd xmm2, dword [r12+1290H] ; 5B02 _ 66 41: 0F 6E. 94 24, 00001290 + pshufd xmm2, xmm2, 0 ; 5B0C _ 66: 0F 70. D2, 00 + pxor xmm0, xmm2 ; 5B11 _ 66: 0F EF. C2 + movd xmm2, dword [r12+1294H] ; 5B15 _ 66 41: 0F 6E. 94 24, 00001294 + pshufd xmm2, xmm2, 0 ; 5B1F _ 66: 0F 70. D2, 00 + pxor xmm11, xmm2 ; 5B24 _ 66 44: 0F EF. DA + movd xmm2, dword [r12+1298H] ; 5B29 _ 66 41: 0F 6E. 94 24, 00001298 + pshufd xmm2, xmm2, 0 ; 5B33 _ 66: 0F 70. D2, 00 + pxor xmm9, xmm2 ; 5B38 _ 66 44: 0F EF. CA + movdqa xmm1, xmm9 ; 5B3D _ 66 41: 0F 6F. C9 + psrld xmm9, 22 ; 5B42 _ 66 41: 0F 72. D1, 16 + pslld xmm1, 10 ; 5B48 _ 66: 0F 72. F1, 0A + por xmm1, xmm9 ; 5B4D _ 66 41: 0F EB. C9 + movdqa xmm9, xmm0 ; 5B52 _ 66 44: 0F 6F. C8 + movd xmm2, dword [r12+129CH] ; 5B57 _ 66 41: 0F 6E. 94 24, 0000129C + pshufd xmm2, xmm2, 0 ; 5B61 _ 66: 0F 70. D2, 00 + pxor xmm10, xmm2 ; 5B66 _ 66 44: 0F EF. D2 + movdqa xmm2, xmm11 ; 5B6B _ 66 41: 0F 6F. D3 + pslld xmm9, 27 ; 5B70 _ 66 41: 0F 72. F1, 1B + psrld xmm0, 5 ; 5B76 _ 66: 0F 72. D0, 05 + por xmm9, xmm0 ; 5B7B _ 66 44: 0F EB. C8 + movdqa xmm0, xmm10 ; 5B80 _ 66 41: 0F 6F. C2 + pxor xmm1, xmm10 ; 5B85 _ 66 41: 0F EF. CA + pslld xmm2, 7 ; 5B8A _ 66: 0F 72. F2, 07 + pxor xmm1, xmm2 ; 5B8F _ 66: 0F EF. CA + movdqa xmm2, xmm1 ; 5B93 _ 66: 0F 6F. D1 + pxor xmm9, xmm11 ; 5B97 _ 66 45: 0F EF. CB + pxor xmm9, xmm10 ; 5B9C _ 66 45: 0F EF. CA + pslld xmm0, 25 ; 5BA1 _ 66: 0F 72. F0, 19 + psrld xmm10, 7 ; 5BA6 _ 66 41: 0F 72. D2, 07 + por xmm0, xmm10 ; 5BAC _ 66 41: 0F EB. C2 + movdqa xmm10, xmm11 ; 5BB1 _ 66 45: 0F 6F. D3 + psrld xmm11, 1 ; 5BB6 _ 66 41: 0F 72. D3, 01 + pxor xmm0, xmm1 ; 5BBC _ 66: 0F EF. C1 + pslld xmm10, 31 ; 5BC0 _ 66 41: 0F 72. F2, 1F + por xmm10, xmm11 ; 5BC6 _ 66 45: 0F EB. D3 + movdqa xmm11, xmm9 ; 5BCB _ 66 45: 0F 6F. D9 + pxor xmm10, xmm9 ; 5BD0 _ 66 45: 0F EF. D1 + pxor xmm10, xmm1 ; 5BD5 _ 66 44: 0F EF. D1 + pslld xmm11, 3 ; 5BDA _ 66 41: 0F 72. F3, 03 + pxor xmm0, xmm11 ; 5BE0 _ 66 41: 0F EF. C3 + movdqa xmm11, xmm9 ; 5BE5 _ 66 45: 0F 6F. D9 + pslld xmm2, 29 ; 5BEA _ 66: 0F 72. F2, 1D + psrld xmm1, 3 ; 5BEF _ 66: 0F 72. D1, 03 + por xmm2, xmm1 ; 5BF4 _ 66: 0F EB. D1 + movdqa xmm1, xmm10 ; 5BF8 _ 66 41: 0F 6F. CA + pslld xmm11, 19 ; 5BFD _ 66 41: 0F 72. F3, 13 + psrld xmm9, 13 ; 5C03 _ 66 41: 0F 72. D1, 0D + por xmm11, xmm9 ; 5C09 _ 66 45: 0F EB. D9 + pxor xmm2, xmm6 ; 5C0E _ 66: 0F EF. D6 + por xmm1, xmm11 ; 5C12 _ 66 41: 0F EB. CB + movd xmm9, dword [r12+1280H] ; 5C17 _ 66 45: 0F 6E. 8C 24, 00001280 + pxor xmm10, xmm6 ; 5C21 _ 66 44: 0F EF. D6 + pxor xmm1, xmm2 ; 5C26 _ 66: 0F EF. CA + por xmm2, xmm10 ; 5C2A _ 66 41: 0F EB. D2 + pxor xmm1, xmm0 ; 5C2F _ 66: 0F EF. C8 + pxor xmm11, xmm10 ; 5C33 _ 66 45: 0F EF. DA + pxor xmm2, xmm11 ; 5C38 _ 66 41: 0F EF. D3 + pand xmm11, xmm0 ; 5C3D _ 66 44: 0F DB. D8 + pxor xmm10, xmm11 ; 5C42 _ 66 45: 0F EF. D3 + por xmm11, xmm1 ; 5C47 _ 66 44: 0F EB. D9 + pxor xmm11, xmm2 ; 5C4C _ 66 44: 0F EF. DA + pxor xmm0, xmm10 ; 5C51 _ 66 41: 0F EF. C2 + pxor xmm2, xmm1 ; 5C56 _ 66: 0F EF. D1 + pxor xmm0, xmm11 ; 5C5A _ 66 41: 0F EF. C3 + pxor xmm0, xmm1 ; 5C5F _ 66: 0F EF. C1 + pand xmm2, xmm0 ; 5C63 _ 66: 0F DB. D0 + pxor xmm10, xmm2 ; 5C67 _ 66 44: 0F EF. D2 + pshufd xmm2, xmm9, 0 ; 5C6C _ 66 41: 0F 70. D1, 00 + pxor xmm11, xmm2 ; 5C72 _ 66 44: 0F EF. DA + add r13, 64 ; 5C77 _ 49: 83. C5, 40 + inc r10d ; 5C7B _ 41: FF. C2 + movd xmm9, dword [r12+1284H] ; 5C7E _ 66 45: 0F 6E. 8C 24, 00001284 + pshufd xmm2, xmm9, 0 ; 5C88 _ 66 41: 0F 70. D1, 00 + pxor xmm10, xmm2 ; 5C8E _ 66 44: 0F EF. D2 + movd xmm9, dword [r12+1288H] ; 5C93 _ 66 45: 0F 6E. 8C 24, 00001288 + pshufd xmm2, xmm9, 0 ; 5C9D _ 66 41: 0F 70. D1, 00 + movd xmm9, dword [r12+128CH] ; 5CA3 _ 66 45: 0F 6E. 8C 24, 0000128C + pxor xmm1, xmm2 ; 5CAD _ 66: 0F EF. CA + pshufd xmm2, xmm9, 0 ; 5CB1 _ 66 41: 0F 70. D1, 00 + movdqa xmm9, xmm11 ; 5CB7 _ 66 45: 0F 6F. CB + pxor xmm0, xmm2 ; 5CBC _ 66: 0F EF. C2 + movdqa xmm2, xmm1 ; 5CC0 _ 66: 0F 6F. D1 + punpckldq xmm9, xmm10 ; 5CC4 _ 66 45: 0F 62. CA + punpckhdq xmm11, xmm10 ; 5CC9 _ 66 45: 0F 6A. DA + movdqa xmm10, xmm9 ; 5CCE _ 66 45: 0F 6F. D1 + punpckldq xmm2, xmm0 ; 5CD3 _ 66: 0F 62. D0 + punpckhdq xmm1, xmm0 ; 5CD7 _ 66: 0F 6A. C8 + punpcklqdq xmm10, xmm2 ; 5CDB _ 66 44: 0F 6C. D2 + punpckhqdq xmm9, xmm2 ; 5CE0 _ 66 44: 0F 6D. CA + movdqa xmm2, xmm11 ; 5CE5 _ 66 41: 0F 6F. D3 + punpckhqdq xmm11, xmm1 ; 5CEA _ 66 44: 0F 6D. D9 + pxor xmm10, xmm8 ; 5CEF _ 66 45: 0F EF. D0 + movdqu oword [rbp], xmm10 ; 5CF4 _ F3 44: 0F 7F. 55, 00 + movdqa xmm8, xmm3 ; 5CFA _ 66 44: 0F 6F. C3 + punpcklqdq xmm2, xmm1 ; 5CFF _ 66: 0F 6C. D1 + pxor xmm9, xmm5 ; 5D03 _ 66 44: 0F EF. CD + movdqu oword [rbp+10H], xmm9 ; 5D08 _ F3 44: 0F 7F. 4D, 10 + pxor xmm2, xmm4 ; 5D0E _ 66: 0F EF. D4 + movdqu oword [rbp+20H], xmm2 ; 5D12 _ F3: 0F 7F. 55, 20 + movdqa xmm4, xmm3 ; 5D17 _ 66: 0F 6F. E3 + pxor xmm11, xmm3 ; 5D1B _ 66 44: 0F EF. DB + movdqu oword [rbp+30H], xmm11 ; 5D20 _ F3 44: 0F 7F. 5D, 30 + psllq xmm8, 1 ; 5D26 _ 66 41: 0F 73. F0, 01 + pslldq xmm4, 8 ; 5D2C _ 66: 0F 73. FC, 08 + psrldq xmm4, 7 ; 5D31 _ 66: 0F 73. DC, 07 + psrlq xmm4, 7 ; 5D36 _ 66: 0F 73. D4, 07 + por xmm8, xmm4 ; 5D3B _ 66 44: 0F EB. C4 + psraw xmm3, 8 ; 5D40 _ 66: 0F 71. E3, 08 + psrldq xmm3, 15 ; 5D45 _ 66: 0F 73. DB, 0F + pand xmm3, xmm7 ; 5D4A _ 66: 0F DB. DF + pxor xmm8, xmm3 ; 5D4E _ 66 44: 0F EF. C3 + add rbp, 64 ; 5D53 _ 48: 83. C5, 40 + cmp r10d, 8 ; 5D57 _ 41: 83. FA, 08 + jl ?_006 ; 5D5B _ 0F 8C, FFFFD217 + add r14, -512 ; 5D61 _ 49: 81. C6, FFFFFE00 + jne ?_004 ; 5D68 _ 0F 85, FFFFD1D3 + movaps xmm6, oword [rsp+70H] ; 5D6E _ 0F 28. 74 24, 70 + movaps xmm7, oword [rsp+60H] ; 5D73 _ 0F 28. 7C 24, 60 + movaps xmm8, oword [rsp+50H] ; 5D78 _ 44: 0F 28. 44 24, 50 + movaps xmm9, oword [rsp+40H] ; 5D7E _ 44: 0F 28. 4C 24, 40 + movaps xmm10, oword [rsp+30H] ; 5D84 _ 44: 0F 28. 54 24, 30 + movaps xmm11, oword [rsp+20H] ; 5D8A _ 44: 0F 28. 5C 24, 20 + add rsp, 160 ; 5D90 _ 48: 81. C4, 000000A0 + pop rbp ; 5D97 _ 5D + pop r12 ; 5D98 _ 41: 5C + pop r13 ; 5D9A _ 41: 5D + pop r14 ; 5D9C _ 41: 5E + pop r15 ; 5D9E _ 41: 5F + ret ; 5DA0 _ C3 +; xts_serpent_sse2_decrypt End of function + +; Filling space: 0FH +; Filler type: lea with same source and destination +; db 48H, 8DH, 0B4H, 26H, 00H, 00H, 00H, 00H +; db 48H, 8DH, 0BFH, 00H, 00H, 00H, 00H + +ALIGN 16 + +xts_serpent_sse2_available:; Function begin + push rsi ; 5DB0 _ 56 + push rbx ; 5DB1 _ 53 + sub rsp, 56 ; 5DB2 _ 48: 83. EC, 38 + mov eax, 1 ; 5DB6 _ B8, 00000001 + lea rsi, [rsp+20H] ; 5DBB _ 48: 8D. 74 24, 20 + cpuid ; 5DC0 _ 0F A2 + mov dword [rsi], eax ; 5DC2 _ 89. 06 + mov dword [rsi+4H], ebx ; 5DC4 _ 89. 5E, 04 + mov dword [rsi+8H], ecx ; 5DC7 _ 89. 4E, 08 + mov dword [rsi+0CH], edx ; 5DCA _ 89. 56, 0C + mov eax, dword [rsp+2CH] ; 5DCD _ 8B. 44 24, 2C + and eax, 4000000H ; 5DD1 _ 25, 04000000 + shr eax, 26 ; 5DD6 _ C1. E8, 1A + add rsp, 56 ; 5DD9 _ 48: 83. C4, 38 + pop rbx ; 5DDD _ 5B + pop rsi ; 5DDE _ 5E + ret ; 5DDF _ C3 +; xts_serpent_sse2_available End of function + + + + diff --git a/ImBoxEnclave/crypto_fast/crc32.c b/ImBoxEnclave/crypto_fast/crc32.c new file mode 100644 index 0000000..2d7a0f8 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/crc32.c @@ -0,0 +1,86 @@ +/* + * + * DiskCryptor - open source partition encryption tool + * Copyright (c) 2007 + * ntldr PGP key ID - 0x1B6A24550F33E44A + * + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License version 3 as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +*/ +#include "crc32.h" + +static const unsigned long crc32_tab[] = { + 0x00000000L, 0x77073096L, 0xee0e612cL, 0x990951baL, 0x076dc419L, + 0x706af48fL, 0xe963a535L, 0x9e6495a3L, 0x0edb8832L, 0x79dcb8a4L, + 0xe0d5e91eL, 0x97d2d988L, 0x09b64c2bL, 0x7eb17cbdL, 0xe7b82d07L, + 0x90bf1d91L, 0x1db71064L, 0x6ab020f2L, 0xf3b97148L, 0x84be41deL, + 0x1adad47dL, 0x6ddde4ebL, 0xf4d4b551L, 0x83d385c7L, 0x136c9856L, + 0x646ba8c0L, 0xfd62f97aL, 0x8a65c9ecL, 0x14015c4fL, 0x63066cd9L, + 0xfa0f3d63L, 0x8d080df5L, 0x3b6e20c8L, 0x4c69105eL, 0xd56041e4L, + 0xa2677172L, 0x3c03e4d1L, 0x4b04d447L, 0xd20d85fdL, 0xa50ab56bL, + 0x35b5a8faL, 0x42b2986cL, 0xdbbbc9d6L, 0xacbcf940L, 0x32d86ce3L, + 0x45df5c75L, 0xdcd60dcfL, 0xabd13d59L, 0x26d930acL, 0x51de003aL, + 0xc8d75180L, 0xbfd06116L, 0x21b4f4b5L, 0x56b3c423L, 0xcfba9599L, + 0xb8bda50fL, 0x2802b89eL, 0x5f058808L, 0xc60cd9b2L, 0xb10be924L, + 0x2f6f7c87L, 0x58684c11L, 0xc1611dabL, 0xb6662d3dL, 0x76dc4190L, + 0x01db7106L, 0x98d220bcL, 0xefd5102aL, 0x71b18589L, 0x06b6b51fL, + 0x9fbfe4a5L, 0xe8b8d433L, 0x7807c9a2L, 0x0f00f934L, 0x9609a88eL, + 0xe10e9818L, 0x7f6a0dbbL, 0x086d3d2dL, 0x91646c97L, 0xe6635c01L, + 0x6b6b51f4L, 0x1c6c6162L, 0x856530d8L, 0xf262004eL, 0x6c0695edL, + 0x1b01a57bL, 0x8208f4c1L, 0xf50fc457L, 0x65b0d9c6L, 0x12b7e950L, + 0x8bbeb8eaL, 0xfcb9887cL, 0x62dd1ddfL, 0x15da2d49L, 0x8cd37cf3L, + 0xfbd44c65L, 0x4db26158L, 0x3ab551ceL, 0xa3bc0074L, 0xd4bb30e2L, + 0x4adfa541L, 0x3dd895d7L, 0xa4d1c46dL, 0xd3d6f4fbL, 0x4369e96aL, + 0x346ed9fcL, 0xad678846L, 0xda60b8d0L, 0x44042d73L, 0x33031de5L, + 0xaa0a4c5fL, 0xdd0d7cc9L, 0x5005713cL, 0x270241aaL, 0xbe0b1010L, + 0xc90c2086L, 0x5768b525L, 0x206f85b3L, 0xb966d409L, 0xce61e49fL, + 0x5edef90eL, 0x29d9c998L, 0xb0d09822L, 0xc7d7a8b4L, 0x59b33d17L, + 0x2eb40d81L, 0xb7bd5c3bL, 0xc0ba6cadL, 0xedb88320L, 0x9abfb3b6L, + 0x03b6e20cL, 0x74b1d29aL, 0xead54739L, 0x9dd277afL, 0x04db2615L, + 0x73dc1683L, 0xe3630b12L, 0x94643b84L, 0x0d6d6a3eL, 0x7a6a5aa8L, + 0xe40ecf0bL, 0x9309ff9dL, 0x0a00ae27L, 0x7d079eb1L, 0xf00f9344L, + 0x8708a3d2L, 0x1e01f268L, 0x6906c2feL, 0xf762575dL, 0x806567cbL, + 0x196c3671L, 0x6e6b06e7L, 0xfed41b76L, 0x89d32be0L, 0x10da7a5aL, + 0x67dd4accL, 0xf9b9df6fL, 0x8ebeeff9L, 0x17b7be43L, 0x60b08ed5L, + 0xd6d6a3e8L, 0xa1d1937eL, 0x38d8c2c4L, 0x4fdff252L, 0xd1bb67f1L, + 0xa6bc5767L, 0x3fb506ddL, 0x48b2364bL, 0xd80d2bdaL, 0xaf0a1b4cL, + 0x36034af6L, 0x41047a60L, 0xdf60efc3L, 0xa867df55L, 0x316e8eefL, + 0x4669be79L, 0xcb61b38cL, 0xbc66831aL, 0x256fd2a0L, 0x5268e236L, + 0xcc0c7795L, 0xbb0b4703L, 0x220216b9L, 0x5505262fL, 0xc5ba3bbeL, + 0xb2bd0b28L, 0x2bb45a92L, 0x5cb36a04L, 0xc2d7ffa7L, 0xb5d0cf31L, + 0x2cd99e8bL, 0x5bdeae1dL, 0x9b64c2b0L, 0xec63f226L, 0x756aa39cL, + 0x026d930aL, 0x9c0906a9L, 0xeb0e363fL, 0x72076785L, 0x05005713L, + 0x95bf4a82L, 0xe2b87a14L, 0x7bb12baeL, 0x0cb61b38L, 0x92d28e9bL, + 0xe5d5be0dL, 0x7cdcefb7L, 0x0bdbdf21L, 0x86d3d2d4L, 0xf1d4e242L, + 0x68ddb3f8L, 0x1fda836eL, 0x81be16cdL, 0xf6b9265bL, 0x6fb077e1L, + 0x18b74777L, 0x88085ae6L, 0xff0f6a70L, 0x66063bcaL, 0x11010b5cL, + 0x8f659effL, 0xf862ae69L, 0x616bffd3L, 0x166ccf45L, 0xa00ae278L, + 0xd70dd2eeL, 0x4e048354L, 0x3903b3c2L, 0xa7672661L, 0xd06016f7L, + 0x4969474dL, 0x3e6e77dbL, 0xaed16a4aL, 0xd9d65adcL, 0x40df0b66L, + 0x37d83bf0L, 0xa9bcae53L, 0xdebb9ec5L, 0x47b2cf7fL, 0x30b5ffe9L, + 0xbdbdf21cL, 0xcabac28aL, 0x53b39330L, 0x24b4a3a6L, 0xbad03605L, + 0xcdd70693L, 0x54de5729L, 0x23d967bfL, 0xb3667a2eL, 0xc4614ab8L, + 0x5d681b02L, 0x2a6f2b94L, 0xb40bbe37L, 0xc30c8ea1L, 0x5a05df1bL, + 0x2d02ef8dL +}; + +unsigned long _stdcall crc32(const unsigned char *p, unsigned long len) +{ + unsigned long i; + unsigned long crc = 0xFFFFFFFF; + + for (i = 0; i < len; i++) { + crc = crc32_tab[(unsigned char)(crc ^ p[i])] ^ (crc >> 8); + } + return ~crc; +} diff --git a/ImBoxEnclave/crypto_fast/crc32.h b/ImBoxEnclave/crypto_fast/crc32.h new file mode 100644 index 0000000..40d99a1 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/crc32.h @@ -0,0 +1,6 @@ +#ifndef _CRC32_H_ +#define _CRC32_H_ + +unsigned long _stdcall crc32(const unsigned char *p, unsigned long len); + +#endif \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/i386/aes_i386.asm b/ImBoxEnclave/crypto_fast/i386/aes_i386.asm new file mode 100644 index 0000000..0c4c641 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/i386/aes_i386.asm @@ -0,0 +1,368 @@ + +; --------------------------------------------------------------------------- +; Copyright (c) 1998-2008, Brian Gladman, Worcester, UK. All rights reserved. +; Copyright (c) 2010, ntldr PGP key ID - 0x1B6A24550F33E44A +; +; LICENSE TERMS +; +; The redistribution and use of this software (with or without changes) +; is allowed without the payment of fees or royalties provided that: +; +; 1. source code distributions include the above copyright notice, this +; list of conditions and the following disclaimer; +; +; 2. binary distributions include the above copyright notice, this list +; of conditions and the following disclaimer in their documentation; +; +; 3. the name of the copyright holder is not used to endorse products +; built using this software without specific written permission. +; +; DISCLAIMER +; +; This software is provided 'as is' with no explicit or implied warranties +; in respect of its properties, including, but not limited to, correctness +; and/or fitness for purpose. +; --------------------------------------------------------------------------- +; offsets to parameters +in_blk equ 4 ; input byte array address parameter +out_blk equ 8 ; output byte array address parameter +stk_spc equ 16 ; stack space +parms equ 12 ; parameter space on stack + +extern _Te0, _Te1, _Te2, _Te3, _Te4_0, _Te4_1, _Te4_2, _Te4_3 +extern _Td0, _Td1, _Td2, _Td3, _Td4_0, _Td4_1, _Td4_2, _Td4_3 +extern _aes256_set_key@8 + +global _aes256_asm_set_key@8 +global _aes256_asm_encrypt@12 +global _aes256_asm_decrypt@12 + +; ROUND FUNCTION. Build column[2] on ESI and column[3] on EDI that have the +; round keys pre-loaded. Build column[0] in EBP and column[1] in EBX. +; +; Input: +; +; EAX column[0] +; EBX column[1] +; ECX column[2] +; EDX column[3] +; ESI column key[round][2] +; EDI column key[round][3] +; EBP scratch +; +; Output: +; +; EBP column[0] unkeyed +; EBX column[1] unkeyed +; ESI column[2] keyed +; EDI column[3] keyed +; EAX scratch +; ECX scratch +; EDX scratch +%macro rnd_fun 2 + rol ebx,16 + %1 esi, cl, 0, ebp + %1 esi, dh, 1, ebp + %1 esi, bh, 3, ebp + %1 edi, dl, 0, ebp + %1 edi, ah, 1, ebp + %1 edi, bl, 2, ebp + %2 ebp, al, 0, ebp + shr ebx,16 + and eax,0xffff0000 + or eax,ebx + shr edx,16 + %1 ebp, ah, 1, ebx + %1 ebp, dh, 3, ebx + %2 ebx, dl, 2, ebx + %1 ebx, ch, 1, edx + %1 ebx, al, 0, edx + shr eax,16 + shr ecx,16 + %1 ebp, cl, 2, edx + %1 edi, ch, 3, edx + %1 esi, al, 2, edx + %1 ebx, ah, 3, edx +%endmacro + +; Basic MOV and XOR Operations for normal rounds +%macro nr_xor 4 + movzx %4, %2 + xor %1, [_Te%3+4*%4] +%endmacro + +%macro nr_mov 4 + movzx %4, %2 + mov %1, [_Te%3+4*%4] +%endmacro + +; Basic MOV and XOR Operations for last round +%macro lr_xor 4 + movzx %4, %2 + xor %1, [_Te4_%3+4*%4] +%endmacro + +%macro lr_mov 4 + movzx %4, %2 + mov %1, [_Te4_%3+4*%4] +%endmacro + +%macro enc_round 4 + mov esi, %3 + mov edi, %4 + + rnd_fun nr_xor, nr_mov + + mov eax, ebp + mov ecx, esi + mov edx, edi + xor eax, %1 + xor ebx, %2 +%endmacro + +%macro enc_last_round 4 + mov esi, %3 + mov edi, %4 + + rnd_fun lr_xor, lr_mov + + mov eax, ebp + xor eax, %1 + xor ebx, %2 +%endmacro + +%macro irn_fun 2 + rol eax,16 + %1 esi, cl, 0, ebp + %1 esi, bh, 1, ebp + %1 esi, al, 2, ebp + %1 edi, dl, 0, ebp + %1 edi, ch, 1, ebp + %1 edi, ah, 3, ebp + %2 ebp, bl, 0, ebp + shr eax,16 + and ebx,0xffff0000 + or ebx,eax + shr ecx,16 + %1 ebp, bh, 1, eax + %1 ebp, ch, 3, eax + %2 eax, cl, 2, ecx + %1 eax, bl, 0, ecx + %1 eax, dh, 1, ecx + shr ebx,16 + shr edx,16 + %1 esi, dh, 3, ecx + %1 ebp, dl, 2, ecx + %1 eax, bh, 3, ecx + %1 edi, bl, 2, ecx +%endmacro + +; Basic MOV and XOR Operations for normal rounds +%macro ni_xor 4 + movzx %4, %2 + xor %1, [_Td%3+4*%4] +%endmacro + +%macro ni_mov 4 + movzx %4, %2 + mov %1, [_Td%3+4*%4] +%endmacro + +; Basic MOV and XOR Operations for last round +%macro li_xor 4 + movzx %4, %2 + xor %1, [_Td4_%3+4*%4] +%endmacro + +%macro li_mov 4 + movzx %4, %2 + mov %1, [_Td4_%3+4*%4] +%endmacro + +%macro dec_round 4 + mov esi, %3 + mov edi, %4 + + irn_fun ni_xor, ni_mov + + mov ebx, ebp + mov ecx, esi + mov edx, edi + xor eax, %1 + xor ebx, %2 +%endmacro + +%macro dec_last_round 4 + mov esi, %3 + mov edi, %4 + + irn_fun li_xor, li_mov + + mov ebx, ebp + xor eax, %1 + xor ebx, %2 +%endmacro + +%assign i 0 +%rep 60 + RK_ %+ i equ (12340000h | i) +%assign i i+1 +%endrep + + + section .text align=32 + +; AES Encryption Subroutine +aes256_encrypt_code: + sub esp, stk_spc + mov [esp+12], ebp + mov [esp+ 8], ebx + mov [esp+ 4], esi + mov [esp+ 0], edi + + mov esi, [esp+in_blk+stk_spc] ; input pointer + mov eax, [esi+ 0] + mov ebx, [esi+ 4] + mov ecx, [esi+ 8] + mov edx, [esi+12] + + xor eax, RK_0 + xor ebx, RK_1 + xor ecx, RK_2 + xor edx, RK_3 + + enc_round RK_4, RK_5, RK_6, RK_7 + enc_round RK_8, RK_9, RK_10, RK_11 + enc_round RK_12, RK_13, RK_14, RK_15 + enc_round RK_16, RK_17, RK_18, RK_19 + enc_round RK_20, RK_21, RK_22, RK_23 + enc_round RK_24, RK_25, RK_26, RK_27 + enc_round RK_28, RK_29, RK_30, RK_31 + enc_round RK_32, RK_33, RK_34, RK_35 + enc_round RK_36, RK_37, RK_38, RK_39 + enc_round RK_40, RK_41, RK_42, RK_43 + enc_round RK_44, RK_45, RK_46, RK_47 + enc_round RK_48, RK_49, RK_50, RK_51 + enc_round RK_52, RK_53, RK_54, RK_55 + enc_last_round RK_56, RK_57, RK_58, RK_59 + + mov edx, [esp+out_blk+stk_spc] + mov [edx+ 0], eax + mov [edx+ 4], ebx + mov [edx+ 8], esi + mov [edx+12], edi + + mov ebp, [esp+12] + mov ebx, [esp+ 8] + mov esi, [esp+ 4] + mov edi, [esp+ 0] + add esp, stk_spc + retn 0Ch +aes256_encrypt_size equ $-aes256_encrypt_code + +aes256_decrypt_code: + sub esp, stk_spc + mov [esp+12],ebp + mov [esp+ 8],ebx + mov [esp+ 4],esi + mov [esp+ 0],edi + + ; input four columns and xor in first round key + mov esi,[esp+in_blk+stk_spc] ; input pointer + mov eax,[esi ] + mov ebx,[esi+ 4] + mov ecx,[esi+ 8] + mov edx,[esi+12] + + xor eax, RK_0 + xor ebx, RK_1 + xor ecx, RK_2 + xor edx, RK_3 + + dec_round RK_4, RK_5, RK_6, RK_7 + dec_round RK_8, RK_9, RK_10, RK_11 + dec_round RK_12, RK_13, RK_14, RK_15 + dec_round RK_16, RK_17, RK_18, RK_19 + dec_round RK_20, RK_21, RK_22, RK_23 + dec_round RK_24, RK_25, RK_26, RK_27 + dec_round RK_28, RK_29, RK_30, RK_31 + dec_round RK_32, RK_33, RK_34, RK_35 + dec_round RK_36, RK_37, RK_38, RK_39 + dec_round RK_40, RK_41, RK_42, RK_43 + dec_round RK_44, RK_45, RK_46, RK_47 + dec_round RK_48, RK_49, RK_50, RK_51 + dec_round RK_52, RK_53, RK_54, RK_55 + dec_last_round RK_56, RK_57, RK_58, RK_59 + + ; move final values to the output array. + mov ebp,[esp+out_blk+stk_spc] + mov [ebp],eax + mov [ebp+4],ebx + mov [ebp+8],esi + mov [ebp+12],edi + + mov ebp,[esp+12] + mov ebx,[esp+ 8] + mov esi,[esp+ 4] + mov edi,[esp+ 0] + add esp,stk_spc + retn 0Ch +aes256_decrypt_size equ $-aes256_decrypt_code + +align 32 +_aes256_asm_encrypt@12: + mov eax, [esp+12] ; key + add eax, 480 ; ek_code + jmp eax + +align 32 +_aes256_asm_decrypt@12: + mov eax, [esp+12] ; key + add eax, 3552 ; dk_code + jmp eax + +aes256_patch_code: ; ebp - round keys, ebx - code buff, ecx - code size + pushad + sub ecx, 4 +patch_loop: + mov eax, [ebx] + mov edx, eax + shr edx, 16 + cmp edx, 1234h + jnz no_patch + movzx edx, ax + mov eax, [ebp+edx*4] + mov [ebx], eax +no_patch: + inc ebx + loop patch_loop + popad + retn + +_aes256_asm_set_key@8: + pushad + mov ebp, [esp+28h] ; skey + mov eax, [esp+24h] ; key + push ebp + push eax + call _aes256_set_key@8 + lea ebx, [ebp+480] ; ek_code + mov esi, aes256_encrypt_code + mov edi, ebx + mov ecx, aes256_encrypt_size + push ecx + rep movsb + pop ecx + call aes256_patch_code + lea ebx, [ebp+3552] ; dk_code + add ebp, 240 ; dec_key + mov esi, aes256_decrypt_code + mov edi, ebx + mov ecx, aes256_decrypt_size + push ecx + rep movsb + pop ecx + call aes256_patch_code + popad + retn 08h + diff --git a/ImBoxEnclave/crypto_fast/i386/aes_padlock_i386.asm b/ImBoxEnclave/crypto_fast/i386/aes_padlock_i386.asm new file mode 100644 index 0000000..c99e69b --- /dev/null +++ b/ImBoxEnclave/crypto_fast/i386/aes_padlock_i386.asm @@ -0,0 +1,93 @@ +; +; * +; * Copyright (c) 2009-2010 +; * ntldr PGP key ID - 0x1B6A24550F33E44A +; * +; This program is free software: you can redistribute it and/or modify +; it under the terms of the GNU General Public License version 3 as +; published by the Free Software Foundation. +; +; This program is distributed in the hope that it will be useful, +; but WITHOUT ANY WARRANTY; without even the implied warranty of +; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +; GNU General Public License for more details. +; +; You should have received a copy of the GNU General Public License +; along with this program. If not, see . +; + +%define NEH_LOAD_KEY 00000080h ; load schedule from memory +%define NEH_ENCRYPT 00000000h ; encryption +%define NEH_DECRYPT 00000200h ; decryption +%define NEH_KEY128 00000000h+0ah ; 128 bit key +%define NEH_KEY192 00000400h+0ch ; 192 bit key +%define NEH_KEY256 00000800h+0eh ; 256 bit key +%define NEH_ENC_LOAD (NEH_ENCRYPT | NEH_LOAD_KEY) +%define NEH_DEC_LOAD (NEH_DECRYPT | NEH_LOAD_KEY) + +align 16 +enc_cwd dd (NEH_ENC_LOAD | NEH_KEY256), 0, 0 +align 16 +dec_cwd dd (NEH_DEC_LOAD | NEH_KEY256), 0, 0 + +global _aes256_padlock_available@0 +global _aes256_padlock_encrypt@16 +global _aes256_padlock_decrypt@16 + +_aes256_padlock_available@0: + push ebx + ; test for VIA CPU + mov eax, 0C0000000h + cpuid + cmp eax, 0C0000001h + jb no_ace + ; read VIA flags + mov eax, 0C0000001h + cpuid + and edx, 0C0h ; ACE_MASK,CPUID EDX code for ACE + cmp edx, 0C0h ; ACE_MASK,CPUID EDX code for ACE + jnz no_ace + ; ACE present + xor eax, eax + inc eax + jmp end_ace +no_ace: + xor eax, eax +end_ace: + pop ebx + ret + +align 16 +_aes256_padlock_encrypt@16: + push ebx + push esi + push edi + mov esi, [esp+10h] ; in + mov edi, [esp+14h] ; out + mov ecx, [esp+18h] ; n_blocks + mov ebx, [esp+1Ch] ; key + mov edx, enc_cwd + xcryptecb + pop edi + pop esi + pop ebx + retn 10h + +align 16 +_aes256_padlock_decrypt@16 + push ebx + push esi + push edi + mov esi, [esp+10h] ; in + mov edi, [esp+14h] ; out + mov ecx, [esp+18h] ; n_blocks + mov ebx, [esp+1Ch] ; key + add ebx, 4*15*4 + mov edx, dec_cwd + xcryptecb + pop edi + pop esi + pop ebx + retn 10h + + diff --git a/ImBoxEnclave/crypto_fast/i386/twofish_i386.asm b/ImBoxEnclave/crypto_fast/i386/twofish_i386.asm new file mode 100644 index 0000000..afb0558 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/i386/twofish_i386.asm @@ -0,0 +1,321 @@ +;*************************************************************************** +;* Copyright (C) 2006 by Joachim Fritschi, * +;* adapted for DiskCryptor by ntldr * +;* PGP key ID - 0x1B6A24550F33E44A * +;* * +;* This program is free software; you can redistribute it and/or modify * +;* it under the terms of the GNU General Public License as published by * +;* the Free Software Foundation; either version 2 of the License, or * +;* (at your option) any later version. * +;* * +;* This program is distributed in the hope that it will be useful, * +;* but WITHOUT ANY WARRANTY; without even the implied warranty of * +;* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * +;* GNU General Public License for more details. * +;* * +;* You should have received a copy of the GNU General Public License * +;* along with this program; if not, write to the * +;* Free Software Foundation, Inc., * +;* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * +;*************************************************************************** + +%define in_blk 4 ; input byte array address parameter +%define out_blk 8 ; output byte array address parameter +%define tfm 12 ; Twofish context structure + +%define a_offset 0 +%define b_offset 4 +%define c_offset 8 +%define d_offset 12 + +; Structure of the crypto context struct +%define s0 0 ; S0 Array 256 Words each +%define s1 1024 ; S1 Array +%define s2 2048 ; S2 Array +%define s3 3072 ; S3 Array +%define w 4096 ; 8 whitening keys (word) +%define k 4128 ; key 1-32 ( word ) + +; define a few register aliases to allow macro substitution +%define R0D eax +%define R0B al +%define R0H ah + +%define R1D ebx +%define R1B bl +%define R1H bh + +%define R2D ecx +%define R2B cl +%define R2H ch + +%define R3D edx +%define R3B dl +%define R3H dh + +; performs input whitening +%macro input_whitening 3 + xor %1, [w+(%2)+%3] +%endmacro + +; performs input whitening +%macro output_whitening 3 + xor %1, [w+16+(%2)+%3] +%endmacro + +; +; * a input register containing a (rotated 16) +; * b input register containing b +; * c input register containing c +; * d input register containing d (already rol $1) +; * operations on a and b are interleaved to increase performance + +%macro encrypt_round 5 + push %4D + movzx edi, %2B + mov %4D, [ebp+edi*4+s1] + movzx edi, %1B + mov esi, [ebp+edi*4+s2] + movzx edi, %2H + ror %2D, 16 + xor %4D, [ebp+edi*4+s2] + movzx edi, %1H + ror %1D, 16 + xor esi, [ebp+edi*4+s3] + movzx edi, %2B + xor %4D, [ebp+edi*4+s3] + movzx edi, %1B + xor esi, [ebp+edi*4] + movzx edi, %2H + ror %2D, 15 + xor %4D, [ebp+edi*4] + movzx edi, %1H + xor esi, [ebp+edi*4+s1] + pop edi + add esi, %4D + add %4D, esi + add esi, [ebp+k+%5] + xor %3D, esi + rol %3D, 15 + add %4D, [ebp+k+4+%5] + xor %4D, edi +%endmacro + +; * a input register containing a (rotated 16) +; * b input register containing b +; * c input register containing c +; * d input register containing d (already rol $1) +; * operations on a and b are interleaved to increase performance +; * last round has different rotations for the output preparation +%macro encrypt_last_round 5 + push %4D + movzx edi, %2B + mov %4D, [ebp+edi*4+s1] + movzx edi, %1B + mov esi, [ebp+edi*4+s2] + movzx edi, %2H + ror %2D, 16 + xor %4D, [ebp+edi*4+s2] + movzx edi, %1H + ror %1D, 16 + xor esi, [ebp+edi*4+s3] + movzx edi, %2B + xor %4D, [ebp+edi*4+s3] + movzx edi, %1B + xor esi, [ebp+edi*4] + movzx edi, %2H + ror %2D, 16 + xor %4D, [ebp+edi*4] + movzx edi, %1H + xor esi, [ebp+edi*4+s1] + pop edi + add esi, %4D + add %4D, esi + add esi, [ebp+k+%5] + xor %3D, esi + ror %3D, 1 + add %4D, [ebp+k+4+%5] + xor %4D, edi +%endmacro + +; * a input register containing a +; * b input register containing b (rotated 16) +; * c input register containing c +; * d input register containing d (already rol $1) +; * operations on a and b are interleaved to increase performance +%macro decrypt_round 5 + push %3D + movzx edi, %1B + mov %3D, [ebp+edi*4] + movzx edi, %2B + mov esi, [ebp+edi*4+s3] + movzx edi, %1H + ror %1D, 16 + xor %3D, [ebp+edi*4+s1] + movzx edi, %2H + ror %2D, 16 + xor esi, [ebp+edi*4] + movzx edi, %1B + xor %3D, [ebp+edi*4+s2] + movzx edi, %2B + xor esi, [ebp+edi*4+s1] + movzx edi, %1H + ror %1D, 15 + xor %3D, [ebp+edi*4+s3] + movzx edi, %2H + xor esi, [ebp+edi*4+s2] + pop edi + add %3D, esi + add esi, %3D + add %3D, [ebp+k+%5] + xor %3D, edi + add esi, [ebp+k+4+%5] + xor %4D, esi + rol %4D, 15 +%endmacro + +; * a input register containing a +; * b input register containing b (rotated 16) +; * c input register containing c +; * d input register containing d (already rol $1) +; * operations on a and b are interleaved to increase performance +; * last round has different rotations for the output preparation +%macro decrypt_last_round 5 + push %3D + movzx edi, %1B + mov %3D, [ebp+edi*4] + movzx edi, %2B + mov esi, [ebp+edi*4+s3] + movzx edi, %1H + ror %1D, 16 + xor %3D, [ebp+edi*4+s1] + movzx edi, %2H + ror %2D, 16 + xor esi, [ebp+edi*4] + movzx edi, %1B + xor %3D, [ebp+edi*4+s2] + movzx edi, %2B + xor esi, [ebp+edi*4+s1] + movzx edi, %1H + ror %1D, 16 + xor %3D, [ebp+edi*4+s3] + movzx edi, %2H + xor esi, [ebp+edi*4+s2] + pop edi + add %3D, esi + add esi, %3D + add %3D, [ebp+k+%5] + xor %3D, edi + add esi, [ebp+k+4+%5] + xor %4D, esi + ror %4D, 1 +%endmacro + +global _twofish256_encrypt@12 +global _twofish256_decrypt@12 + +_twofish256_encrypt@12: + push ebp ; save registers according to calling convention + push ebx + push esi + push edi + mov ebp, [tfm + 16+esp] ; abuse the base pointer: set new base pointer to the crypto tfm + mov edi, [in_blk+16+esp] ; input address in edi + + mov eax, [edi] + mov ebx, [b_offset+edi] + mov ecx, [c_offset+edi] + mov edx, [d_offset+edi] + input_whitening eax, ebp, a_offset + ror eax, 16 + input_whitening ebx, ebp, b_offset + input_whitening ecx, ebp, c_offset + input_whitening edx, ebp, d_offset + rol edx, 1 + + encrypt_round R0,R1,R2,R3,0 + encrypt_round R2,R3,R0,R1,8 + encrypt_round R0,R1,R2,R3,2*8 + encrypt_round R2,R3,R0,R1,3*8 + encrypt_round R0,R1,R2,R3,4*8 + encrypt_round R2,R3,R0,R1,5*8 + encrypt_round R0,R1,R2,R3,6*8 + encrypt_round R2,R3,R0,R1,7*8 + encrypt_round R0,R1,R2,R3,8*8 + encrypt_round R2,R3,R0,R1,9*8 + encrypt_round R0,R1,R2,R3,10*8 + encrypt_round R2,R3,R0,R1,11*8 + encrypt_round R0,R1,R2,R3,12*8 + encrypt_round R2,R3,R0,R1,13*8 + encrypt_round R0,R1,R2,R3,14*8 + encrypt_last_round R2,R3,R0,R1,15*8 + + output_whitening eax, ebp, c_offset + output_whitening ebx, ebp, d_offset + output_whitening ecx, ebp, a_offset + output_whitening edx, ebp, b_offset + mov edi, [out_blk+16+esp] + mov [c_offset+edi], eax + mov [d_offset+edi], ebx + mov [edi], ecx + mov [b_offset+edi], edx + pop edi + pop esi + pop ebx + pop ebp + retn 0Ch + + +_twofish256_decrypt@12: + push ebp ; save registers according to calling convention*/ + push ebx + push esi + push edi + + + mov ebp, [tfm + 16+esp] ; abuse the base pointer: set new base pointer to the crypto tfm + mov edi, [in_blk + 16+esp] ; input address in edi + + mov eax, [edi] + mov ebx, [b_offset+edi] + mov ecx, [c_offset+edi] + mov edx, [d_offset+edi] + output_whitening eax, ebp, a_offset + output_whitening ebx, ebp, b_offset + ror ebx, 16 + output_whitening ecx, ebp, c_offset + output_whitening edx, ebp, d_offset + rol ecx, 1 + + decrypt_round R0,R1,R2,R3,15*8 + decrypt_round R2,R3,R0,R1,14*8 + decrypt_round R0,R1,R2,R3,13*8 + decrypt_round R2,R3,R0,R1,12*8 + decrypt_round R0,R1,R2,R3,11*8 + decrypt_round R2,R3,R0,R1,10*8 + decrypt_round R0,R1,R2,R3,9*8 + decrypt_round R2,R3,R0,R1,8*8 + decrypt_round R0,R1,R2,R3,7*8 + decrypt_round R2,R3,R0,R1,6*8 + decrypt_round R0,R1,R2,R3,5*8 + decrypt_round R2,R3,R0,R1,4*8 + decrypt_round R0,R1,R2,R3,3*8 + decrypt_round R2,R3,R0,R1,2*8 + decrypt_round R0,R1,R2,R3,1*8 + decrypt_last_round R2,R3,R0,R1,0 + + input_whitening eax, ebp, c_offset + input_whitening ebx, ebp, d_offset + input_whitening ecx, ebp, a_offset + input_whitening edx, ebp, b_offset + mov edi, [out_blk + 16+esp] + mov [c_offset+edi], eax + mov [d_offset+edi], ebx + mov [edi], ecx + mov [b_offset+edi], edx + + pop edi + pop esi + pop ebx + pop ebp + retn 0Ch diff --git a/ImBoxEnclave/crypto_fast/i386/xts_aes_ni_i386.asm b/ImBoxEnclave/crypto_fast/i386/xts_aes_ni_i386.asm new file mode 100644 index 0000000..6020dba --- /dev/null +++ b/ImBoxEnclave/crypto_fast/i386/xts_aes_ni_i386.asm @@ -0,0 +1,206 @@ +; +; * +; * Copyright (c) 2010 +; * ntldr PGP key ID - 0x1B6A24550F33E44A +; * +; This program is free software: you can redistribute it and/or modify +; it under the terms of the GNU General Public License version 3 as +; published by the Free Software Foundation. +; +; This program is distributed in the hope that it will be useful, +; but WITHOUT ANY WARRANTY; without even the implied warranty of +; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +; GNU General Public License for more details. +; +; You should have received a copy of the GNU General Public License +; along with this program. If not, see . +; + +%macro aesxor_2 4 ; B0, B1, key, round + movdqa tt, [%3+(%4*10h)] + pxor %1, tt + pxor %2, tt +%endmacro + +%macro aesenc_2 4 ; B0, B1, key, round + movdqa tt, [%3+(%4*10h)] + aesenc %1, tt + aesenc %2, tt +%endmacro + +%macro aesdec_2 4 ; B0, B1, key, round + movdqa tt, [%3+(%4*10h)] + aesdec %1, tt + aesdec %2, tt +%endmacro + +%macro aesenclast_2 4 ; B0, B1, key, round + movdqa tt, [%3+(%4*10h)] + aesenclast %1, tt + aesenclast %2, tt +%endmacro + +%macro aesdeclast_2 4 ; B0, B1, key, round + movdqa tt, [%3+(%4*10h)] + aesdeclast %1, tt + aesdeclast %2, tt +%endmacro + +%macro aes_encrypt_1 2 ; XMMn, key + pxor %1, [%2] + aesenc %1, [%2+010h] + aesenc %1, [%2+020h] + aesenc %1, [%2+030h] + aesenc %1, [%2+040h] + aesenc %1, [%2+050h] + aesenc %1, [%2+060h] + aesenc %1, [%2+070h] + aesenc %1, [%2+080h] + aesenc %1, [%2+090h] + aesenc %1, [%2+0A0h] + aesenc %1, [%2+0B0h] + aesenc %1, [%2+0C0h] + aesenc %1, [%2+0D0h] + aesenclast %1, [%2+0E0h] +%endmacro + +%macro aes_encrypt_2 3 ; B0, B1, key + aesxor_2 %1, %2, %3, 0 + aesenc_2 %1, %2, %3, 1 + aesenc_2 %1, %2, %3, 2 + aesenc_2 %1, %2, %3, 3 + aesenc_2 %1, %2, %3, 4 + aesenc_2 %1, %2, %3, 5 + aesenc_2 %1, %2, %3, 6 + aesenc_2 %1, %2, %3, 7 + aesenc_2 %1, %2, %3, 8 + aesenc_2 %1, %2, %3, 9 + aesenc_2 %1, %2, %3, 10 + aesenc_2 %1, %2, %3, 11 + aesenc_2 %1, %2, %3, 12 + aesenc_2 %1, %2, %3, 13 + aesenclast_2 %1, %2, %3, 14 +%endmacro + +%macro aes_decrypt_2 3 ; B0, B1, key + aesxor_2 %1, %2, %3, 0 + aesdec_2 %1, %2, %3, 1 + aesdec_2 %1, %2, %3, 2 + aesdec_2 %1, %2, %3, 3 + aesdec_2 %1, %2, %3, 4 + aesdec_2 %1, %2, %3, 5 + aesdec_2 %1, %2, %3, 6 + aesdec_2 %1, %2, %3, 7 + aesdec_2 %1, %2, %3, 8 + aesdec_2 %1, %2, %3, 9 + aesdec_2 %1, %2, %3, 10 + aesdec_2 %1, %2, %3, 11 + aesdec_2 %1, %2, %3, 12 + aesdec_2 %1, %2, %3, 13 + aesdeclast_2 %1, %2, %3, 14 +%endmacro + +%macro next_tweak 2 ; new, old + movdqa tt, %2 + psraw tt, 8 + psrldq tt, 15 + pand tt, POLY + movdqa t2, %2 + pslldq t2, 8 + psrldq t2, 7 + psrlq t2, 7 + movdqa %1, %2 + psllq %1, 1 + por %1, t2 + pxor %1, tt +%endmacro + +%macro aes_xts_process 2 + push esi + push edi + ; load XTS tweak polynomial + mov eax, 135 + movd POLY, eax + mov eax, [esp+1Ch] ; + shrd [esp+18h], eax, 9 ; idx.a = offset / XTS_SECTOR_SIZE + shr eax, 9 ; + mov [esp+1Ch], eax ; + mov esi, [esp+0Ch] ; esi = in + mov edi, [esp+10h] ; edi = out + mov eax, [esp+20h] ; eax = crypt key + lea edx, [eax+tweak_k] ; edx = tweak key +%if %2 != 0 + add eax, %2 ; eax = decryption key +%endif +%%xts_loop: + add dword [esp+18h], 1 ; idx.a++ + adc dword [esp+1Ch], 0 ; + movq T0, [esp+18h] + aes_encrypt_1 T0, edx + mov ecx, 16 ; ecx = XTS_BLOCKS_IN_SECTOR +%%blocks_loop: + next_tweak T1, T0 + ; load two blocks + movdqu B0, [esi+00h] + movdqu B1, [esi+10h] + ; input tweak + pxor B0, T0 + pxor B1, T1 + ; encrypt / decrypt + %1 B0, B1, eax + ; output tweak + pxor B0, T0 + pxor B1, T1 + ; save two blocks + movdqu [edi+00h], B0 + movdqu [edi+10h], B1 + add esi, 32 ; in += XTS_BLOCK_SIZE*2 + add edi, 32 ; out += XTS_BLOCK_SIZE*2 + dec ecx + jz %%block_done + next_tweak T0, T1 + jmp %%blocks_loop +%%block_done: + sub dword [esp+14h], 512 ; len -= XTS_SECTOR_SIZE + jnz %%xts_loop + pop edi + pop esi + retn 18h +%endmacro + +; ========================================= + +%define B0 xmm0 +%define B1 xmm1 + +%define T0 xmm2 +%define T1 xmm3 + +%define tt xmm4 +%define t2 xmm5 +%define POLY xmm6 + +%define tweak_k 11408 +%define enc_key 0 +%define dec_key 4*15*4 + +; ========================================= + +global _xts_aes_ni_encrypt@24 +global _xts_aes_ni_decrypt@24 + + +align 16 +_xts_aes_ni_encrypt@24: + aes_xts_process aes_encrypt_2, enc_key + +align 16 +_xts_aes_ni_decrypt@24: + aes_xts_process aes_decrypt_2, dec_key + + + + + + + diff --git a/ImBoxEnclave/crypto_fast/i386/xts_serpent_avx_i386.asm b/ImBoxEnclave/crypto_fast/i386/xts_serpent_avx_i386.asm new file mode 100644 index 0000000..9c479e1 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/i386/xts_serpent_avx_i386.asm @@ -0,0 +1,4022 @@ +; this code compiled with Intel C++ Compiler Version 12.0.0.063 +; +; Disassembly of file: xts_serpent_sse2.obj +; Thu May 19 19:04:44 2011 +; Mode: 32 bits +; Syntax: YASM/NASM +; Instruction set: AVX + + +global _xts_serpent_avx_encrypt@24 +global _xts_serpent_avx_decrypt@24 +global _xts_serpent_avx_available@0 + +extern _serpent256_encrypt@12 ; near + +SECTION .text align=16 execute ; section number 2, code +; Communal section not supported by YASM + +_xts_serpent_avx_encrypt@24:; Function begin + push ebp ; 0000 _ 55 + mov ebp, esp ; 0001 _ 8B. EC + and esp, 0FFFFFFF0H ; 0003 _ 83. E4, F0 + push esi ; 0006 _ 56 + push edi ; 0007 _ 57 + push ebx ; 0008 _ 53 + sub esp, 244 ; 0009 _ 81. EC, 000000F4 + mov esi, dword [ebp+18H] ; 000F _ 8B. 75, 18 + mov ecx, esi ; 0012 _ 8B. CE + mov edi, dword [ebp+14H] ; 0014 _ 8B. 7D, 14 + shl ecx, 23 ; 0017 _ C1. E1, 17 + shr edi, 9 ; 001A _ C1. EF, 09 + or ecx, edi ; 001D _ 0B. CF + mov edi, 135 ; 001F _ BF, 00000087 + mov ebx, dword [ebp+1CH] ; 0024 _ 8B. 5D, 1C + mov edx, dword [ebp+8H] ; 0027 _ 8B. 55, 08 + mov eax, dword [ebp+0CH] ; 002A _ 8B. 45, 0C + vmovd xmm1, edi ; 002D _ C5 F9: 6E. CF + shr esi, 9 ; 0031 _ C1. EE, 09 + lea ebx, [ebx+5710H] ; 0034 _ 8D. 9B, 00005710 + mov dword [esp], ecx ; 003A _ 89. 0C 24 + xor ecx, ecx ; 003D _ 33. C9 + mov dword [esp+4H], esi ; 003F _ 89. 74 24, 04 + lea edi, [esp+10H] ; 0043 _ 8D. 7C 24, 10 + mov dword [esp+8H], ecx ; 0047 _ 89. 4C 24, 08 + mov dword [esp+0CH], ecx ; 004B _ 89. 4C 24, 0C + mov esi, dword [ebp+10H] ; 004F _ 8B. 75, 10 + vmovdqu oword [esp+30H], xmm1 ; 0052 _ C5 FA: 7F. 4C 24, 30 + mov dword [esp+24H], eax ; 0058 _ 89. 44 24, 24 + mov dword [esp+28H], edx ; 005C _ 89. 54 24, 28 + jmp ?_002 ; 0060 _ EB, 06 + +?_001: vmovdqu oword [esp+10H], xmm0 ; 0062 _ C5 FA: 7F. 44 24, 10 +?_002: add dword [esp], 1 ; 0068 _ 83. 04 24, 01 + adc dword [esp+4H], 0 ; 006C _ 83. 54 24, 04, 00 + push ebx ; 0071 _ 53 + push edi ; 0072 _ 57 + lea eax, [esp+8H] ; 0073 _ 8D. 44 24, 08 + push eax ; 0077 _ 50 + call _serpent256_encrypt@12 ; 0078 _ E8, 00000000(rel) + vmovdqu xmm0, oword [esp+10H] ; 007D _ C5 FA: 6F. 44 24, 10 + xor eax, eax ; 0083 _ 33. C0 + mov dword [esp+20H], esi ; 0085 _ 89. 74 24, 20 + vmovdqu oword [esp+70H], xmm0 ; 0089 _ C5 FA: 7F. 44 24, 70 + mov edx, dword [esp+24H] ; 008F _ 8B. 54 24, 24 + mov esi, dword [esp+28H] ; 0093 _ 8B. 74 24, 28 + mov ecx, dword [ebp+1CH] ; 0097 _ 8B. 4D, 1C +?_003: vmovdqu xmm3, oword [esp+70H] ; 009A _ C5 FA: 6F. 5C 24, 70 + vpslldq xmm2, xmm3, 8 ; 00A0 _ C5 E9: 73. FB, 08 + vpsllq xmm0, xmm3, 1 ; 00A5 _ C5 F9: 73. F3, 01 + vpsrldq xmm6, xmm2, 7 ; 00AA _ C5 C9: 73. DA, 07 + vpsraw xmm7, xmm3, 8 ; 00AF _ C5 C1: 71. E3, 08 + vpsrlq xmm1, xmm6, 7 ; 00B4 _ C5 F1: 73. D6, 07 + vpsrldq xmm4, xmm7, 15 ; 00B9 _ C5 D9: 73. DF, 0F + vpor xmm5, xmm0, xmm1 ; 00BE _ C5 F9: EB. E9 + vmovdqu xmm0, oword [esp+30H] ; 00C2 _ C5 FA: 6F. 44 24, 30 + vpand xmm2, xmm4, xmm0 ; 00C8 _ C5 D9: DB. D0 + vpxor xmm4, xmm5, xmm2 ; 00CC _ C5 D1: EF. E2 + vpslldq xmm6, xmm4, 8 ; 00D0 _ C5 C9: 73. FC, 08 + vpsraw xmm2, xmm4, 8 ; 00D5 _ C5 E9: 71. E4, 08 + vpsrldq xmm1, xmm6, 7 ; 00DA _ C5 F1: 73. DE, 07 + vpsllq xmm7, xmm4, 1 ; 00DF _ C5 C1: 73. F4, 01 + vpsrldq xmm6, xmm2, 15 ; 00E4 _ C5 C9: 73. DA, 0F + vpsrlq xmm5, xmm1, 7 ; 00E9 _ C5 D1: 73. D1, 07 + vpor xmm1, xmm7, xmm5 ; 00EE _ C5 C1: EB. CD + vpand xmm7, xmm6, xmm0 ; 00F2 _ C5 C9: DB. F8 + vpxor xmm1, xmm1, xmm7 ; 00F6 _ C5 F1: EF. CF + vpslldq xmm5, xmm1, 8 ; 00FA _ C5 D1: 73. F9, 08 + vpsllq xmm6, xmm1, 1 ; 00FF _ C5 C9: 73. F1, 01 + vpsrldq xmm2, xmm5, 7 ; 0104 _ C5 E9: 73. DD, 07 + vpsraw xmm5, xmm1, 8 ; 0109 _ C5 D1: 71. E1, 08 + vpsrlq xmm7, xmm2, 7 ; 010E _ C5 C1: 73. D2, 07 + vpsrldq xmm2, xmm5, 15 ; 0113 _ C5 E9: 73. DD, 0F + vpor xmm6, xmm6, xmm7 ; 0118 _ C5 C9: EB. F7 + vpand xmm0, xmm2, xmm0 ; 011C _ C5 E9: DB. C0 + vpxor xmm7, xmm6, xmm0 ; 0120 _ C5 C9: EF. F8 + vpxor xmm0, xmm3, oword [esi] ; 0124 _ C5 E1: EF. 06 + vpxor xmm3, xmm4, oword [esi+10H] ; 0128 _ C5 D9: EF. 5E, 10 + vmovdqu oword [esp+50H], xmm4 ; 012D _ C5 FA: 7F. 64 24, 50 + vmovdqu oword [esp+40H], xmm1 ; 0133 _ C5 FA: 7F. 4C 24, 40 + vpxor xmm1, xmm1, oword [esi+20H] ; 0139 _ C5 F1: EF. 4E, 20 + vpxor xmm4, xmm7, oword [esi+30H] ; 013E _ C5 C1: EF. 66, 30 + vmovdqu oword [esp+60H], xmm7 ; 0143 _ C5 FA: 7F. 7C 24, 60 + vpunpckldq xmm6, xmm0, xmm3 ; 0149 _ C5 F9: 62. F3 + vpunpckldq xmm7, xmm1, xmm4 ; 014D _ C5 F1: 62. FC + vpunpckhdq xmm5, xmm0, xmm3 ; 0151 _ C5 F9: 6A. EB + vpunpcklqdq xmm3, xmm6, xmm7 ; 0155 _ C5 C9: 6C. DF + vmovd xmm0, dword [ecx+2A80H] ; 0159 _ C5 F9: 6E. 81, 00002A80 + vpunpckhqdq xmm7, xmm6, xmm7 ; 0161 _ C5 C9: 6D. FF + vmovd xmm6, dword [ecx+2A84H] ; 0165 _ C5 F9: 6E. B1, 00002A84 + vpunpckhdq xmm1, xmm1, xmm4 ; 016D _ C5 F1: 6A. CC + vpshufd xmm2, xmm0, 0 ; 0171 _ C5 F9: 70. D0, 00 + vpshufd xmm6, xmm6, 0 ; 0176 _ C5 F9: 70. F6, 00 + vpxor xmm0, xmm3, xmm2 ; 017B _ C5 E1: EF. C2 + vpunpcklqdq xmm3, xmm5, xmm1 ; 017F _ C5 D1: 6C. D9 + vpxor xmm4, xmm7, xmm6 ; 0183 _ C5 C1: EF. E6 + vmovd xmm7, dword [ecx+2A88H] ; 0187 _ C5 F9: 6E. B9, 00002A88 + vpunpckhqdq xmm1, xmm5, xmm1 ; 018F _ C5 D1: 6D. C9 + vmovd xmm5, dword [ecx+2A8CH] ; 0193 _ C5 F9: 6E. A9, 00002A8C + vpshufd xmm2, xmm7, 0 ; 019B _ C5 F9: 70. D7, 00 + vpshufd xmm7, xmm5, 0 ; 01A0 _ C5 F9: 70. FD, 00 + vpxor xmm6, xmm3, xmm2 ; 01A5 _ C5 E1: EF. F2 + vpxor xmm5, xmm1, xmm7 ; 01A9 _ C5 F1: EF. EF + vpxor xmm7, xmm4, xmm6 ; 01AD _ C5 D9: EF. FE + vpxor xmm5, xmm5, xmm0 ; 01B1 _ C5 D1: EF. E8 + vpand xmm4, xmm4, xmm5 ; 01B5 _ C5 D9: DB. E5 + vpxor xmm1, xmm7, xmm5 ; 01B9 _ C5 C1: EF. CD + vpxor xmm3, xmm4, xmm0 ; 01BD _ C5 D9: EF. D8 + vpor xmm0, xmm0, xmm5 ; 01C1 _ C5 F9: EB. C5 + vpxor xmm4, xmm0, xmm7 ; 01C5 _ C5 F9: EF. E7 + vpxor xmm5, xmm5, xmm6 ; 01C9 _ C5 D1: EF. EE + vpor xmm6, xmm6, xmm3 ; 01CD _ C5 C9: EB. F3 + vpcmpeqd xmm0, xmm0, xmm0 ; 01D1 _ C5 F9: 76. C0 + vpxor xmm7, xmm6, xmm1 ; 01D5 _ C5 C9: EF. F9 + vpxor xmm1, xmm1, xmm0 ; 01D9 _ C5 F1: EF. C8 + vpor xmm2, xmm1, xmm3 ; 01DD _ C5 F1: EB. D3 + vpxor xmm3, xmm3, xmm5 ; 01E1 _ C5 E1: EF. DD + vpor xmm1, xmm5, xmm4 ; 01E5 _ C5 D1: EB. CC + vpxor xmm6, xmm3, xmm2 ; 01E9 _ C5 E1: EF. F2 + vpxor xmm5, xmm6, xmm1 ; 01ED _ C5 C9: EF. E9 + vpxor xmm2, xmm2, xmm1 ; 01F1 _ C5 E9: EF. D1 + vpslld xmm3, xmm5, 13 ; 01F5 _ C5 E1: 72. F5, 0D + vpsrld xmm6, xmm5, 19 ; 01FA _ C5 C9: 72. D5, 13 + vpor xmm5, xmm3, xmm6 ; 01FF _ C5 E1: EB. EE + vpslld xmm3, xmm7, 3 ; 0203 _ C5 E1: 72. F7, 03 + vpsrld xmm7, xmm7, 29 ; 0208 _ C5 C1: 72. D7, 1D + vpxor xmm1, xmm2, xmm5 ; 020D _ C5 E9: EF. CD + vpor xmm6, xmm3, xmm7 ; 0211 _ C5 E1: EB. F7 + vpxor xmm2, xmm1, xmm6 ; 0215 _ C5 F1: EF. D6 + vpxor xmm4, xmm4, xmm6 ; 0219 _ C5 D9: EF. E6 + vpslld xmm1, xmm5, 3 ; 021D _ C5 F1: 72. F5, 03 + vpslld xmm7, xmm2, 1 ; 0222 _ C5 C1: 72. F2, 01 + vpxor xmm3, xmm4, xmm1 ; 0227 _ C5 D9: EF. D9 + vpsrld xmm4, xmm2, 31 ; 022B _ C5 D9: 72. D2, 1F + vpor xmm4, xmm7, xmm4 ; 0230 _ C5 C1: EB. E4 + vpslld xmm2, xmm3, 7 ; 0234 _ C5 E9: 72. F3, 07 + vpsrld xmm3, xmm3, 25 ; 0239 _ C5 E1: 72. D3, 19 + vpxor xmm5, xmm5, xmm4 ; 023E _ C5 D1: EF. EC + vpor xmm1, xmm2, xmm3 ; 0242 _ C5 E9: EB. CB + vpslld xmm7, xmm4, 7 ; 0246 _ C5 C1: 72. F4, 07 + vpxor xmm2, xmm5, xmm1 ; 024B _ C5 D1: EF. D1 + vpxor xmm6, xmm6, xmm1 ; 024F _ C5 C9: EF. F1 + vmovd xmm5, dword [ecx+2A94H] ; 0253 _ C5 F9: 6E. A9, 00002A94 + vpxor xmm3, xmm6, xmm7 ; 025B _ C5 C9: EF. DF + vpshufd xmm6, xmm5, 0 ; 025F _ C5 F9: 70. F5, 00 + vmovd xmm5, dword [ecx+2A9CH] ; 0264 _ C5 F9: 6E. A9, 00002A9C + vpxor xmm6, xmm4, xmm6 ; 026C _ C5 D9: EF. F6 + vpshufd xmm5, xmm5, 0 ; 0270 _ C5 F9: 70. ED, 00 + vmovd xmm7, dword [ecx+2A90H] ; 0275 _ C5 F9: 6E. B9, 00002A90 + vpxor xmm5, xmm1, xmm5 ; 027D _ C5 F1: EF. ED + vpslld xmm1, xmm2, 5 ; 0281 _ C5 F1: 72. F2, 05 + vpsrld xmm2, xmm2, 27 ; 0286 _ C5 E9: 72. D2, 1B + vpshufd xmm7, xmm7, 0 ; 028B _ C5 F9: 70. FF, 00 + vpor xmm1, xmm1, xmm2 ; 0290 _ C5 F1: EB. CA + vmovd xmm4, dword [ecx+2A98H] ; 0294 _ C5 F9: 6E. A1, 00002A98 + vpxor xmm1, xmm1, xmm7 ; 029C _ C5 F1: EF. CF + vpslld xmm7, xmm3, 22 ; 02A0 _ C5 C1: 72. F3, 16 + vpsrld xmm3, xmm3, 10 ; 02A5 _ C5 E1: 72. D3, 0A + vpshufd xmm4, xmm4, 0 ; 02AA _ C5 F9: 70. E4, 00 + vpor xmm7, xmm7, xmm3 ; 02AF _ C5 C1: EB. FB + vpxor xmm1, xmm1, xmm0 ; 02B3 _ C5 F1: EF. C8 + vpxor xmm7, xmm7, xmm4 ; 02B7 _ C5 C1: EF. FC + vpand xmm2, xmm1, xmm6 ; 02BB _ C5 F1: DB. D6 + vpxor xmm4, xmm7, xmm0 ; 02BF _ C5 C1: EF. E0 + vpxor xmm4, xmm4, xmm2 ; 02C3 _ C5 D9: EF. E2 + vpor xmm3, xmm2, xmm5 ; 02C7 _ C5 E9: EB. DD + vpxor xmm7, xmm5, xmm4 ; 02CB _ C5 D1: EF. FC + vpxor xmm6, xmm6, xmm3 ; 02CF _ C5 C9: EF. F3 + vpxor xmm5, xmm3, xmm1 ; 02D3 _ C5 E1: EF. E9 + vpor xmm3, xmm1, xmm6 ; 02D7 _ C5 F1: EB. DE + vpor xmm1, xmm4, xmm5 ; 02DB _ C5 D9: EB. CD + vpxor xmm2, xmm6, xmm7 ; 02DF _ C5 C9: EF. D7 + vpand xmm1, xmm1, xmm3 ; 02E3 _ C5 F1: DB. CB + vpxor xmm4, xmm5, xmm2 ; 02E7 _ C5 D1: EF. E2 + vpslld xmm6, xmm1, 13 ; 02EB _ C5 C9: 72. F1, 0D + vpsrld xmm5, xmm1, 19 ; 02F0 _ C5 D1: 72. D1, 13 + vpor xmm5, xmm6, xmm5 ; 02F5 _ C5 C9: EB. ED + vpslld xmm6, xmm7, 3 ; 02F9 _ C5 C9: 72. F7, 03 + vpsrld xmm7, xmm7, 29 ; 02FE _ C5 C1: 72. D7, 1D + vpand xmm2, xmm2, xmm1 ; 0303 _ C5 E9: DB. D1 + vpor xmm6, xmm6, xmm7 ; 0307 _ C5 C9: EB. F7 + vpand xmm7, xmm4, xmm1 ; 030B _ C5 D9: DB. F9 + vpxor xmm3, xmm3, xmm7 ; 030F _ C5 E1: EF. DF + vpxor xmm1, xmm2, xmm4 ; 0313 _ C5 E9: EF. CC + vpxor xmm7, xmm3, xmm5 ; 0317 _ C5 E1: EF. FD + vpslld xmm4, xmm5, 3 ; 031B _ C5 D9: 72. F5, 03 + vpxor xmm3, xmm7, xmm6 ; 0320 _ C5 C1: EF. DE + vpxor xmm7, xmm1, xmm6 ; 0324 _ C5 F1: EF. FE + vpxor xmm2, xmm7, xmm4 ; 0328 _ C5 C1: EF. D4 + vpslld xmm1, xmm3, 1 ; 032C _ C5 F1: 72. F3, 01 + vpsrld xmm7, xmm3, 31 ; 0331 _ C5 C1: 72. D3, 1F + vpslld xmm4, xmm2, 7 ; 0336 _ C5 D9: 72. F2, 07 + vpor xmm3, xmm1, xmm7 ; 033B _ C5 F1: EB. DF + vpsrld xmm2, xmm2, 25 ; 033F _ C5 E9: 72. D2, 19 + vpor xmm4, xmm4, xmm2 ; 0344 _ C5 D9: EB. E2 + vpxor xmm5, xmm5, xmm3 ; 0348 _ C5 D1: EF. EB + vpxor xmm5, xmm5, xmm4 ; 034C _ C5 D1: EF. EC + vpxor xmm6, xmm6, xmm4 ; 0350 _ C5 C9: EF. F4 + vpslld xmm1, xmm3, 7 ; 0354 _ C5 F1: 72. F3, 07 + vpsrld xmm7, xmm5, 27 ; 0359 _ C5 C1: 72. D5, 1B + vpxor xmm1, xmm6, xmm1 ; 035E _ C5 C9: EF. C9 + vpslld xmm6, xmm5, 5 ; 0362 _ C5 C9: 72. F5, 05 + vmovd xmm5, dword [ecx+2AA0H] ; 0367 _ C5 F9: 6E. A9, 00002AA0 + vpor xmm2, xmm6, xmm7 ; 036F _ C5 C9: EB. D7 + vmovd xmm7, dword [ecx+2AA4H] ; 0373 _ C5 F9: 6E. B9, 00002AA4 + vpshufd xmm6, xmm5, 0 ; 037B _ C5 F9: 70. F5, 00 + vpshufd xmm5, xmm7, 0 ; 0380 _ C5 F9: 70. EF, 00 + vpxor xmm2, xmm2, xmm6 ; 0385 _ C5 E9: EF. D6 + vmovd xmm7, dword [ecx+2AA8H] ; 0389 _ C5 F9: 6E. B9, 00002AA8 + vpxor xmm6, xmm3, xmm5 ; 0391 _ C5 E1: EF. F5 + vpslld xmm3, xmm1, 22 ; 0395 _ C5 E1: 72. F1, 16 + vpsrld xmm1, xmm1, 10 ; 039A _ C5 F1: 72. D1, 0A + vpor xmm5, xmm3, xmm1 ; 039F _ C5 E1: EB. E9 + vmovd xmm1, dword [ecx+2AACH] ; 03A3 _ C5 F9: 6E. 89, 00002AAC + vpshufd xmm3, xmm7, 0 ; 03AB _ C5 F9: 70. DF, 00 + vpxor xmm7, xmm5, xmm3 ; 03B0 _ C5 D1: EF. FB + vpshufd xmm5, xmm1, 0 ; 03B4 _ C5 F9: 70. E9, 00 + vpxor xmm1, xmm4, xmm5 ; 03B9 _ C5 D9: EF. CD + vpand xmm4, xmm2, xmm7 ; 03BD _ C5 E9: DB. E7 + vpxor xmm5, xmm4, xmm1 ; 03C1 _ C5 D9: EF. E9 + vpxor xmm7, xmm7, xmm6 ; 03C5 _ C5 C1: EF. FE + vpxor xmm3, xmm7, xmm5 ; 03C9 _ C5 C1: EF. DD + vpor xmm1, xmm1, xmm2 ; 03CD _ C5 F1: EB. CA + vpxor xmm6, xmm1, xmm6 ; 03D1 _ C5 F1: EF. F6 + vpxor xmm2, xmm2, xmm3 ; 03D5 _ C5 E9: EF. D3 + vpor xmm4, xmm6, xmm2 ; 03D9 _ C5 C9: EB. E2 + vpxor xmm7, xmm4, xmm5 ; 03DD _ C5 D9: EF. FD + vpand xmm5, xmm5, xmm6 ; 03E1 _ C5 D1: DB. EE + vpxor xmm1, xmm2, xmm5 ; 03E5 _ C5 E9: EF. CD + vpxor xmm6, xmm6, xmm7 ; 03E9 _ C5 C9: EF. F7 + vpxor xmm4, xmm6, xmm1 ; 03ED _ C5 C9: EF. E1 + vpslld xmm2, xmm3, 13 ; 03F1 _ C5 E9: 72. F3, 0D + vpsrld xmm6, xmm3, 19 ; 03F6 _ C5 C9: 72. D3, 13 + vpslld xmm3, xmm4, 3 ; 03FB _ C5 E1: 72. F4, 03 + vpor xmm6, xmm2, xmm6 ; 0400 _ C5 E9: EB. F6 + vpsrld xmm4, xmm4, 29 ; 0404 _ C5 D9: 72. D4, 1D + vpor xmm4, xmm3, xmm4 ; 0409 _ C5 E1: EB. E4 + vpxor xmm7, xmm7, xmm6 ; 040D _ C5 C1: EF. FE + vpxor xmm1, xmm1, xmm0 ; 0411 _ C5 F1: EF. C8 + vpxor xmm5, xmm7, xmm4 ; 0415 _ C5 C1: EF. EC + vpxor xmm1, xmm1, xmm4 ; 0419 _ C5 F1: EF. CC + vpslld xmm7, xmm6, 3 ; 041D _ C5 C1: 72. F6, 03 + vpxor xmm3, xmm1, xmm7 ; 0422 _ C5 F1: EF. DF + vpslld xmm2, xmm5, 1 ; 0426 _ C5 E9: 72. F5, 01 + vpsrld xmm5, xmm5, 31 ; 042B _ C5 D1: 72. D5, 1F + vpslld xmm1, xmm3, 7 ; 0430 _ C5 F1: 72. F3, 07 + vpor xmm5, xmm2, xmm5 ; 0435 _ C5 E9: EB. ED + vpsrld xmm3, xmm3, 25 ; 0439 _ C5 E1: 72. D3, 19 + vpor xmm3, xmm1, xmm3 ; 043E _ C5 F1: EB. DB + vpxor xmm6, xmm6, xmm5 ; 0442 _ C5 C9: EF. F5 + vpxor xmm7, xmm6, xmm3 ; 0446 _ C5 C9: EF. FB + vpxor xmm4, xmm4, xmm3 ; 044A _ C5 D9: EF. E3 + vpslld xmm6, xmm5, 7 ; 044E _ C5 C9: 72. F5, 07 + vpslld xmm1, xmm7, 5 ; 0453 _ C5 F1: 72. F7, 05 + vpxor xmm2, xmm4, xmm6 ; 0458 _ C5 D9: EF. D6 + vpsrld xmm7, xmm7, 27 ; 045C _ C5 C1: 72. D7, 1B + vmovd xmm4, dword [ecx+2AB0H] ; 0461 _ C5 F9: 6E. A1, 00002AB0 + vpor xmm6, xmm1, xmm7 ; 0469 _ C5 F1: EB. F7 + vpshufd xmm1, xmm4, 0 ; 046D _ C5 F9: 70. CC, 00 + vmovd xmm4, dword [ecx+2AB4H] ; 0472 _ C5 F9: 6E. A1, 00002AB4 + vpxor xmm7, xmm6, xmm1 ; 047A _ C5 C9: EF. F9 + vpshufd xmm6, xmm4, 0 ; 047E _ C5 F9: 70. F4, 00 + vmovd xmm1, dword [ecx+2AB8H] ; 0483 _ C5 F9: 6E. 89, 00002AB8 + vpxor xmm6, xmm5, xmm6 ; 048B _ C5 D1: EF. F6 + vpslld xmm5, xmm2, 22 ; 048F _ C5 D1: 72. F2, 16 + vpsrld xmm2, xmm2, 10 ; 0494 _ C5 E9: 72. D2, 0A + vpor xmm4, xmm5, xmm2 ; 0499 _ C5 D1: EB. E2 + vpshufd xmm5, xmm1, 0 ; 049D _ C5 F9: 70. E9, 00 + vmovd xmm1, dword [ecx+2ABCH] ; 04A2 _ C5 F9: 6E. 89, 00002ABC + vpxor xmm2, xmm4, xmm5 ; 04AA _ C5 D9: EF. D5 + vpshufd xmm4, xmm1, 0 ; 04AE _ C5 F9: 70. E1, 00 + vpxor xmm3, xmm3, xmm4 ; 04B3 _ C5 E1: EF. DC + vpand xmm4, xmm6, xmm7 ; 04B7 _ C5 C9: DB. E7 + vpor xmm1, xmm7, xmm3 ; 04BB _ C5 C1: EB. CB + vpxor xmm5, xmm3, xmm6 ; 04BF _ C5 E1: EF. EE + vpxor xmm7, xmm7, xmm2 ; 04C3 _ C5 C1: EF. FA + vpxor xmm6, xmm2, xmm5 ; 04C7 _ C5 E9: EF. F5 + vpor xmm7, xmm7, xmm4 ; 04CB _ C5 C1: EB. FC + vpand xmm5, xmm5, xmm1 ; 04CF _ C5 D1: DB. E9 + vpxor xmm2, xmm5, xmm7 ; 04D3 _ C5 D1: EF. D7 + vpxor xmm3, xmm1, xmm4 ; 04D7 _ C5 F1: EF. DC + vpxor xmm1, xmm4, xmm2 ; 04DB _ C5 D9: EF. CA + vpxor xmm5, xmm3, xmm2 ; 04DF _ C5 E1: EF. EA + vpor xmm4, xmm1, xmm3 ; 04E3 _ C5 F1: EB. E3 + vpand xmm3, xmm7, xmm3 ; 04E7 _ C5 C1: DB. DB + vpxor xmm1, xmm4, xmm6 ; 04EB _ C5 D9: EF. CE + vpxor xmm6, xmm3, xmm6 ; 04EF _ C5 E1: EF. F6 + vpor xmm4, xmm1, xmm2 ; 04F3 _ C5 F1: EB. E2 + vpxor xmm4, xmm5, xmm4 ; 04F7 _ C5 D1: EF. E4 + vpslld xmm5, xmm4, 13 ; 04FB _ C5 D1: 72. F4, 0D + vpsrld xmm4, xmm4, 19 ; 0500 _ C5 D9: 72. D4, 13 + vpor xmm5, xmm5, xmm4 ; 0505 _ C5 D1: EB. EC + vpslld xmm4, xmm2, 3 ; 0509 _ C5 D9: 72. F2, 03 + vpsrld xmm2, xmm2, 29 ; 050E _ C5 E9: 72. D2, 1D + vpxor xmm1, xmm1, xmm5 ; 0513 _ C5 F1: EF. CD + vpor xmm4, xmm4, xmm2 ; 0517 _ C5 D9: EB. E2 + vpslld xmm7, xmm5, 3 ; 051B _ C5 C1: 72. F5, 03 + vpxor xmm1, xmm1, xmm4 ; 0520 _ C5 F1: EF. CC + vpxor xmm6, xmm6, xmm4 ; 0524 _ C5 C9: EF. F4 + vpxor xmm6, xmm6, xmm7 ; 0528 _ C5 C9: EF. F7 + vpslld xmm3, xmm1, 1 ; 052C _ C5 E1: 72. F1, 01 + vpsrld xmm1, xmm1, 31 ; 0531 _ C5 F1: 72. D1, 1F + vpslld xmm2, xmm6, 7 ; 0536 _ C5 E9: 72. F6, 07 + vpor xmm3, xmm3, xmm1 ; 053B _ C5 E1: EB. D9 + vpsrld xmm6, xmm6, 25 ; 053F _ C5 C9: 72. D6, 19 + vpor xmm7, xmm2, xmm6 ; 0544 _ C5 E9: EB. FE + vpxor xmm5, xmm5, xmm3 ; 0548 _ C5 D1: EF. EB + vpxor xmm6, xmm5, xmm7 ; 054C _ C5 D1: EF. F7 + vpxor xmm4, xmm4, xmm7 ; 0550 _ C5 D9: EF. E7 + vpslld xmm1, xmm3, 7 ; 0554 _ C5 F1: 72. F3, 07 + vmovd xmm2, dword [ecx+2AC0H] ; 0559 _ C5 F9: 6E. 91, 00002AC0 + vpxor xmm5, xmm4, xmm1 ; 0561 _ C5 D9: EF. E9 + vpslld xmm4, xmm6, 5 ; 0565 _ C5 D9: 72. F6, 05 + vpsrld xmm6, xmm6, 27 ; 056A _ C5 C9: 72. D6, 1B + vpshufd xmm1, xmm2, 0 ; 056F _ C5 F9: 70. CA, 00 + vpor xmm6, xmm4, xmm6 ; 0574 _ C5 D9: EB. F6 + vpxor xmm6, xmm6, xmm1 ; 0578 _ C5 C9: EF. F1 + vmovd xmm1, dword [ecx+2ACCH] ; 057C _ C5 F9: 6E. 89, 00002ACC + vmovd xmm2, dword [ecx+2AC4H] ; 0584 _ C5 F9: 6E. 91, 00002AC4 + vpshufd xmm1, xmm1, 0 ; 058C _ C5 F9: 70. C9, 00 + vpxor xmm1, xmm7, xmm1 ; 0591 _ C5 C1: EF. C9 + vpshufd xmm7, xmm2, 0 ; 0595 _ C5 F9: 70. FA, 00 + vpxor xmm3, xmm3, xmm7 ; 059A _ C5 E1: EF. DF + vpsrld xmm7, xmm5, 10 ; 059E _ C5 C1: 72. D5, 0A + vmovd xmm4, dword [ecx+2AC8H] ; 05A3 _ C5 F9: 6E. A1, 00002AC8 + vpxor xmm2, xmm3, xmm1 ; 05AB _ C5 E1: EF. D1 + vpxor xmm3, xmm1, xmm0 ; 05AF _ C5 F1: EF. D8 + vpslld xmm1, xmm5, 22 ; 05B3 _ C5 F1: 72. F5, 16 + vpshufd xmm4, xmm4, 0 ; 05B8 _ C5 F9: 70. E4, 00 + vpor xmm5, xmm1, xmm7 ; 05BD _ C5 F1: EB. EF + vpxor xmm1, xmm5, xmm4 ; 05C1 _ C5 D1: EF. CC + vpxor xmm7, xmm3, xmm6 ; 05C5 _ C5 E1: EF. FE + vpxor xmm4, xmm1, xmm3 ; 05C9 _ C5 F1: EF. E3 + vpand xmm5, xmm2, xmm7 ; 05CD _ C5 E9: DB. EF + vpxor xmm2, xmm2, xmm7 ; 05D1 _ C5 E9: EF. D7 + vpxor xmm1, xmm5, xmm4 ; 05D5 _ C5 D1: EF. CC + vpxor xmm3, xmm6, xmm2 ; 05D9 _ C5 C9: EF. DA + vpand xmm6, xmm4, xmm2 ; 05DD _ C5 D9: DB. F2 + vpand xmm4, xmm3, xmm1 ; 05E1 _ C5 E1: DB. E1 + vpxor xmm5, xmm6, xmm3 ; 05E5 _ C5 C9: EF. EB + vpxor xmm6, xmm7, xmm4 ; 05E9 _ C5 C1: EF. F4 + vpor xmm2, xmm2, xmm1 ; 05ED _ C5 E9: EB. D1 + vpor xmm7, xmm4, xmm6 ; 05F1 _ C5 D9: EB. FE + vpxor xmm3, xmm7, xmm5 ; 05F5 _ C5 C1: EF. DD + vpxor xmm7, xmm3, xmm0 ; 05F9 _ C5 E1: EF. F8 + vpslld xmm0, xmm1, 13 ; 05FD _ C5 F9: 72. F1, 0D + vpsrld xmm3, xmm1, 19 ; 0602 _ C5 E1: 72. D1, 13 + vpxor xmm1, xmm2, xmm4 ; 0607 _ C5 E9: EF. CC + vpor xmm3, xmm0, xmm3 ; 060B _ C5 F9: EB. DB + vpslld xmm0, xmm7, 3 ; 060F _ C5 F9: 72. F7, 03 + vpsrld xmm7, xmm7, 29 ; 0614 _ C5 C1: 72. D7, 1D + vpslld xmm2, xmm3, 3 ; 0619 _ C5 E9: 72. F3, 03 + vpor xmm0, xmm0, xmm7 ; 061E _ C5 F9: EB. C7 + vpand xmm7, xmm5, xmm6 ; 0622 _ C5 D1: DB. FE + vpxor xmm4, xmm1, xmm7 ; 0626 _ C5 F1: EF. E7 + vpxor xmm6, xmm6, xmm0 ; 062A _ C5 C9: EF. F0 + vpxor xmm5, xmm4, xmm3 ; 062E _ C5 D9: EF. EB + vpxor xmm4, xmm6, xmm2 ; 0632 _ C5 C9: EF. E2 + vpxor xmm1, xmm5, xmm0 ; 0636 _ C5 D1: EF. C8 + vpslld xmm5, xmm4, 7 ; 063A _ C5 D1: 72. F4, 07 + vpslld xmm6, xmm1, 1 ; 063F _ C5 C9: 72. F1, 01 + vpsrld xmm7, xmm1, 31 ; 0644 _ C5 C1: 72. D1, 1F + vpsrld xmm2, xmm4, 25 ; 0649 _ C5 E9: 72. D4, 19 + vpor xmm7, xmm6, xmm7 ; 064E _ C5 C9: EB. FF + vpor xmm1, xmm5, xmm2 ; 0652 _ C5 D1: EB. CA + vpslld xmm6, xmm7, 7 ; 0656 _ C5 C9: 72. F7, 07 + vpxor xmm0, xmm0, xmm1 ; 065B _ C5 F9: EF. C1 + vpxor xmm3, xmm3, xmm7 ; 065F _ C5 E1: EF. DF + vpxor xmm6, xmm0, xmm6 ; 0663 _ C5 F9: EF. F6 + vpxor xmm4, xmm3, xmm1 ; 0667 _ C5 E1: EF. E1 + vmovd xmm0, dword [ecx+2AD4H] ; 066B _ C5 F9: 6E. 81, 00002AD4 + vpshufd xmm3, xmm0, 0 ; 0673 _ C5 F9: 70. D8, 00 + vpxor xmm0, xmm7, xmm3 ; 0678 _ C5 C1: EF. C3 + vmovd xmm7, dword [ecx+2ADCH] ; 067C _ C5 F9: 6E. B9, 00002ADC + vpshufd xmm2, xmm7, 0 ; 0684 _ C5 F9: 70. D7, 00 + vmovd xmm5, dword [ecx+2AD0H] ; 0689 _ C5 F9: 6E. A9, 00002AD0 + vpxor xmm2, xmm1, xmm2 ; 0691 _ C5 F1: EF. D2 + vpslld xmm1, xmm4, 5 ; 0695 _ C5 F1: 72. F4, 05 + vpsrld xmm4, xmm4, 27 ; 069A _ C5 D9: 72. D4, 1B + vpshufd xmm7, xmm5, 0 ; 069F _ C5 F9: 70. FD, 00 + vpor xmm1, xmm1, xmm4 ; 06A4 _ C5 F1: EB. CC + vpxor xmm4, xmm1, xmm7 ; 06A8 _ C5 F1: EF. E7 + vpxor xmm5, xmm4, xmm0 ; 06AC _ C5 D9: EF. E8 + vpxor xmm0, xmm0, xmm2 ; 06B0 _ C5 F9: EF. C2 + vpcmpeqd xmm4, xmm4, xmm4 ; 06B4 _ C5 D9: 76. E4 + vpand xmm1, xmm0, xmm5 ; 06B8 _ C5 F9: DB. CD + vmovd xmm3, dword [ecx+2AD8H] ; 06BC _ C5 F9: 6E. 99, 00002AD8 + vpxor xmm7, xmm2, xmm4 ; 06C4 _ C5 E9: EF. FC + vpslld xmm2, xmm6, 22 ; 06C8 _ C5 E9: 72. F6, 16 + vpsrld xmm6, xmm6, 10 ; 06CD _ C5 C9: 72. D6, 0A + vpshufd xmm3, xmm3, 0 ; 06D2 _ C5 F9: 70. DB, 00 + vpor xmm6, xmm2, xmm6 ; 06D7 _ C5 E9: EB. F6 + vpxor xmm6, xmm6, xmm3 ; 06DB _ C5 C9: EF. F3 + vpxor xmm3, xmm6, xmm7 ; 06DF _ C5 C9: EF. DF + vpxor xmm1, xmm1, xmm3 ; 06E3 _ C5 F1: EF. CB + vpor xmm3, xmm3, xmm0 ; 06E7 _ C5 E1: EB. D8 + vpand xmm2, xmm7, xmm1 ; 06EB _ C5 C1: DB. D1 + vpxor xmm0, xmm0, xmm7 ; 06EF _ C5 F9: EF. C7 + vpxor xmm2, xmm2, xmm5 ; 06F3 _ C5 E9: EF. D5 + vpxor xmm6, xmm0, xmm1 ; 06F7 _ C5 F9: EF. F1 + vpxor xmm7, xmm6, xmm3 ; 06FB _ C5 C9: EF. FB + vpand xmm0, xmm5, xmm2 ; 06FF _ C5 D1: DB. C2 + vpxor xmm0, xmm0, xmm7 ; 0703 _ C5 F9: EF. C7 + vpslld xmm6, xmm1, 13 ; 0707 _ C5 C9: 72. F1, 0D + vpsrld xmm1, xmm1, 19 ; 070C _ C5 F1: 72. D1, 13 + vpxor xmm5, xmm3, xmm5 ; 0711 _ C5 E1: EF. ED + vpor xmm6, xmm6, xmm1 ; 0715 _ C5 C9: EB. F1 + vpslld xmm1, xmm0, 3 ; 0719 _ C5 F1: 72. F0, 03 + vpsrld xmm0, xmm0, 29 ; 071E _ C5 F9: 72. D0, 1D + vpor xmm7, xmm7, xmm2 ; 0723 _ C5 C1: EB. FA + vpor xmm0, xmm1, xmm0 ; 0727 _ C5 F1: EB. C0 + vpxor xmm1, xmm2, xmm6 ; 072B _ C5 E9: EF. CE + vpxor xmm2, xmm5, xmm4 ; 072F _ C5 D1: EF. D4 + vpxor xmm1, xmm1, xmm0 ; 0733 _ C5 F1: EF. C8 + vpxor xmm3, xmm7, xmm2 ; 0737 _ C5 C1: EF. DA + vpslld xmm5, xmm6, 3 ; 073B _ C5 D1: 72. F6, 03 + vpxor xmm4, xmm3, xmm0 ; 0740 _ C5 E1: EF. E0 + vpslld xmm7, xmm1, 1 ; 0744 _ C5 C1: 72. F1, 01 + vpsrld xmm1, xmm1, 31 ; 0749 _ C5 F1: 72. D1, 1F + vpxor xmm2, xmm4, xmm5 ; 074E _ C5 D9: EF. D5 + vpor xmm1, xmm7, xmm1 ; 0752 _ C5 C1: EB. C9 + vpslld xmm7, xmm2, 7 ; 0756 _ C5 C1: 72. F2, 07 + vpsrld xmm2, xmm2, 25 ; 075B _ C5 E9: 72. D2, 19 + vpor xmm4, xmm7, xmm2 ; 0760 _ C5 C1: EB. E2 + vpxor xmm6, xmm6, xmm1 ; 0764 _ C5 C9: EF. F1 + vpxor xmm2, xmm6, xmm4 ; 0768 _ C5 C9: EF. D4 + vpxor xmm0, xmm0, xmm4 ; 076C _ C5 F9: EF. C4 + vpslld xmm3, xmm1, 7 ; 0770 _ C5 E1: 72. F1, 07 + vmovd xmm7, dword [ecx+2AE0H] ; 0775 _ C5 F9: 6E. B9, 00002AE0 + vpxor xmm5, xmm0, xmm3 ; 077D _ C5 F9: EF. EB + vpslld xmm0, xmm2, 5 ; 0781 _ C5 F9: 72. F2, 05 + vpsrld xmm3, xmm2, 27 ; 0786 _ C5 E1: 72. D2, 1B + vpor xmm6, xmm0, xmm3 ; 078B _ C5 F9: EB. F3 + vmovd xmm3, dword [ecx+2AECH] ; 078F _ C5 F9: 6E. 99, 00002AEC + vpshufd xmm0, xmm7, 0 ; 0797 _ C5 F9: 70. C7, 00 + vpshufd xmm7, xmm3, 0 ; 079C _ C5 F9: 70. FB, 00 + vpxor xmm2, xmm6, xmm0 ; 07A1 _ C5 C9: EF. D0 + vmovd xmm0, dword [ecx+2AE8H] ; 07A5 _ C5 F9: 6E. 81, 00002AE8 + vpslld xmm3, xmm5, 22 ; 07AD _ C5 E1: 72. F5, 16 + vpsrld xmm5, xmm5, 10 ; 07B2 _ C5 D1: 72. D5, 0A + vpxor xmm4, xmm4, xmm7 ; 07B7 _ C5 D9: EF. E7 + vpshufd xmm0, xmm0, 0 ; 07BB _ C5 F9: 70. C0, 00 + vpor xmm5, xmm3, xmm5 ; 07C0 _ C5 E1: EB. ED + vpxor xmm3, xmm5, xmm0 ; 07C4 _ C5 D1: EF. D8 + vmovd xmm6, dword [ecx+2AE4H] ; 07C8 _ C5 F9: 6E. B1, 00002AE4 + vpcmpeqd xmm0, xmm0, xmm0 ; 07D0 _ C5 F9: 76. C0 + vpshufd xmm6, xmm6, 0 ; 07D4 _ C5 F9: 70. F6, 00 + vpxor xmm5, xmm3, xmm0 ; 07D9 _ C5 E1: EF. E8 + vpxor xmm3, xmm2, xmm4 ; 07DD _ C5 E9: EF. DC + vpand xmm2, xmm4, xmm2 ; 07E1 _ C5 D9: DB. D2 + vpxor xmm2, xmm2, xmm5 ; 07E5 _ C5 E9: EF. D5 + vpxor xmm1, xmm1, xmm6 ; 07E9 _ C5 F1: EF. CE + vpxor xmm1, xmm1, xmm2 ; 07ED _ C5 F1: EF. CA + vpor xmm7, xmm5, xmm4 ; 07F1 _ C5 D1: EB. FC + vpor xmm6, xmm3, xmm1 ; 07F5 _ C5 E1: EB. F1 + vpxor xmm5, xmm7, xmm3 ; 07F9 _ C5 C1: EF. EB + vpxor xmm7, xmm5, xmm1 ; 07FD _ C5 D1: EF. F9 + vpor xmm3, xmm6, xmm2 ; 0801 _ C5 C9: EB. DA + vpxor xmm4, xmm4, xmm6 ; 0805 _ C5 D9: EF. E6 + vpxor xmm3, xmm3, xmm7 ; 0809 _ C5 E1: EF. DF + vpxor xmm4, xmm4, xmm2 ; 080D _ C5 D9: EF. E2 + vpslld xmm6, xmm3, 13 ; 0811 _ C5 C9: 72. F3, 0D + vpxor xmm5, xmm4, xmm3 ; 0816 _ C5 D9: EF. EB + vpsrld xmm3, xmm3, 19 ; 081A _ C5 E1: 72. D3, 13 + vpor xmm4, xmm6, xmm3 ; 081F _ C5 C9: EB. E3 + vpslld xmm6, xmm5, 3 ; 0823 _ C5 C9: 72. F5, 03 + vpsrld xmm3, xmm5, 29 ; 0828 _ C5 E1: 72. D5, 1D + vpxor xmm1, xmm1, xmm4 ; 082D _ C5 F1: EF. CC + vpor xmm6, xmm6, xmm3 ; 0831 _ C5 C9: EB. F3 + vpxor xmm2, xmm2, xmm0 ; 0835 _ C5 E9: EF. D0 + vpxor xmm3, xmm1, xmm6 ; 0839 _ C5 F1: EF. DE + vpand xmm1, xmm7, xmm5 ; 083D _ C5 C1: DB. CD + vpxor xmm2, xmm2, xmm1 ; 0841 _ C5 E9: EF. D1 + vpslld xmm7, xmm4, 3 ; 0845 _ C5 C1: 72. F4, 03 + vpxor xmm5, xmm2, xmm6 ; 084A _ C5 E9: EF. EE + vpslld xmm1, xmm3, 1 ; 084E _ C5 F1: 72. F3, 01 + vpxor xmm7, xmm5, xmm7 ; 0853 _ C5 D1: EF. FF + vpsrld xmm3, xmm3, 31 ; 0857 _ C5 E1: 72. D3, 1F + vpor xmm1, xmm1, xmm3 ; 085C _ C5 F1: EB. CB + vpslld xmm2, xmm7, 7 ; 0860 _ C5 E9: 72. F7, 07 + vpsrld xmm5, xmm7, 25 ; 0865 _ C5 D1: 72. D7, 19 + vpxor xmm4, xmm4, xmm1 ; 086A _ C5 D9: EF. E1 + vpor xmm7, xmm2, xmm5 ; 086E _ C5 E9: EB. FD + vpslld xmm2, xmm1, 7 ; 0872 _ C5 E9: 72. F1, 07 + vpxor xmm4, xmm4, xmm7 ; 0877 _ C5 D9: EF. E7 + vpxor xmm6, xmm6, xmm7 ; 087B _ C5 C9: EF. F7 + vmovd xmm3, dword [ecx+2AF0H] ; 087F _ C5 F9: 6E. 99, 00002AF0 + vpslld xmm5, xmm4, 5 ; 0887 _ C5 D1: 72. F4, 05 + vpsrld xmm4, xmm4, 27 ; 088C _ C5 D9: 72. D4, 1B + vpxor xmm2, xmm6, xmm2 ; 0891 _ C5 C9: EF. D2 + vpor xmm6, xmm5, xmm4 ; 0895 _ C5 D1: EB. F4 + vpshufd xmm4, xmm3, 0 ; 0899 _ C5 F9: 70. E3, 00 + vmovd xmm5, dword [ecx+2AF4H] ; 089E _ C5 F9: 6E. A9, 00002AF4 + vpxor xmm4, xmm6, xmm4 ; 08A6 _ C5 C9: EF. E4 + vpslld xmm6, xmm2, 22 ; 08AA _ C5 C9: 72. F2, 16 + vpsrld xmm2, xmm2, 10 ; 08AF _ C5 E9: 72. D2, 0A + vpshufd xmm3, xmm5, 0 ; 08B4 _ C5 F9: 70. DD, 00 + vpor xmm5, xmm6, xmm2 ; 08B9 _ C5 C9: EB. EA + vmovd xmm2, dword [ecx+2AF8H] ; 08BD _ C5 F9: 6E. 91, 00002AF8 + vpxor xmm1, xmm1, xmm3 ; 08C5 _ C5 F1: EF. CB + vmovd xmm6, dword [ecx+2AFCH] ; 08C9 _ C5 F9: 6E. B1, 00002AFC + vpshufd xmm3, xmm2, 0 ; 08D1 _ C5 F9: 70. DA, 00 + vpshufd xmm2, xmm6, 0 ; 08D6 _ C5 F9: 70. D6, 00 + vpxor xmm5, xmm5, xmm3 ; 08DB _ C5 D1: EF. EB + vpxor xmm3, xmm7, xmm2 ; 08DF _ C5 C1: EF. DA + vpor xmm7, xmm1, xmm5 ; 08E3 _ C5 F1: EB. FD + vpxor xmm6, xmm7, xmm3 ; 08E7 _ C5 C1: EF. F3 + vpxor xmm2, xmm1, xmm5 ; 08EB _ C5 F1: EF. D5 + vpxor xmm7, xmm5, xmm6 ; 08EF _ C5 D1: EF. FE + vpxor xmm5, xmm2, xmm7 ; 08F3 _ C5 E9: EF. EF + vpor xmm2, xmm3, xmm2 ; 08F7 _ C5 E1: EB. D2 + vmovdqu oword [esp+90H], xmm6 ; 08FB _ C5 FA: 7F. B4 24, 00000090 + vpor xmm6, xmm6, xmm5 ; 0904 _ C5 C9: EB. F5 + vmovdqu oword [esp+80H], xmm4 ; 0908 _ C5 FA: 7F. A4 24, 00000080 + vpor xmm1, xmm4, xmm5 ; 0911 _ C5 D9: EB. CD + vpxor xmm4, xmm6, xmm4 ; 0915 _ C5 C9: EF. E4 + vpxor xmm1, xmm1, xmm7 ; 0919 _ C5 F1: EF. CF + vpxor xmm6, xmm4, xmm5 ; 091D _ C5 D9: EF. F5 + vpxor xmm7, xmm7, xmm6 ; 0921 _ C5 C1: EF. FE + vpand xmm4, xmm6, xmm1 ; 0925 _ C5 C9: DB. E1 + vpxor xmm7, xmm7, xmm0 ; 0929 _ C5 C1: EF. F8 + vpxor xmm4, xmm4, xmm5 ; 092D _ C5 D9: EF. E5 + vpor xmm6, xmm7, xmm1 ; 0931 _ C5 C1: EB. F1 + vpxor xmm5, xmm5, xmm6 ; 0935 _ C5 D1: EF. EE + vpand xmm3, xmm2, oword [esp+80H] ; 0939 _ C5 E9: DB. 9C 24, 00000080 + vpslld xmm7, xmm5, 13 ; 0942 _ C5 C1: 72. F5, 0D + vpsrld xmm5, xmm5, 19 ; 0947 _ C5 D1: 72. D5, 13 + vpxor xmm2, xmm3, oword [esp+90H] ; 094C _ C5 E1: EF. 94 24, 00000090 + vpor xmm6, xmm7, xmm5 ; 0955 _ C5 C1: EB. F5 + vpslld xmm5, xmm4, 3 ; 0959 _ C5 D1: 72. F4, 03 + vpsrld xmm4, xmm4, 29 ; 095E _ C5 D9: 72. D4, 1D + vpor xmm7, xmm5, xmm4 ; 0963 _ C5 D1: EB. FC + vpxor xmm4, xmm2, xmm6 ; 0967 _ C5 E9: EF. E6 + vpxor xmm3, xmm4, xmm7 ; 096B _ C5 D9: EF. DF + vpxor xmm1, xmm1, xmm7 ; 096F _ C5 F1: EF. CF + vpslld xmm2, xmm6, 3 ; 0973 _ C5 E9: 72. F6, 03 + vpsrld xmm4, xmm3, 31 ; 0978 _ C5 D9: 72. D3, 1F + vpxor xmm2, xmm1, xmm2 ; 097D _ C5 F1: EF. D2 + vpslld xmm1, xmm3, 1 ; 0981 _ C5 F1: 72. F3, 01 + vpor xmm3, xmm1, xmm4 ; 0986 _ C5 F1: EB. DC + vpslld xmm5, xmm2, 7 ; 098A _ C5 D1: 72. F2, 07 + vpsrld xmm1, xmm2, 25 ; 098F _ C5 F1: 72. D2, 19 + vpxor xmm6, xmm6, xmm3 ; 0994 _ C5 C9: EF. F3 + vpor xmm2, xmm5, xmm1 ; 0998 _ C5 D1: EB. D1 + vpslld xmm1, xmm3, 7 ; 099C _ C5 F1: 72. F3, 07 + vpxor xmm4, xmm6, xmm2 ; 09A1 _ C5 C9: EF. E2 + vpxor xmm7, xmm7, xmm2 ; 09A5 _ C5 C1: EF. FA + vpslld xmm5, xmm4, 5 ; 09A9 _ C5 D1: 72. F4, 05 + vpsrld xmm4, xmm4, 27 ; 09AE _ C5 D9: 72. D4, 1B + vmovd xmm6, dword [ecx+2B00H] ; 09B3 _ C5 F9: 6E. B1, 00002B00 + vpxor xmm7, xmm7, xmm1 ; 09BB _ C5 C1: EF. F9 + vpor xmm1, xmm5, xmm4 ; 09BF _ C5 D1: EB. CC + vmovd xmm5, dword [ecx+2B04H] ; 09C3 _ C5 F9: 6E. A9, 00002B04 + vpshufd xmm4, xmm6, 0 ; 09CB _ C5 F9: 70. E6, 00 + vpshufd xmm6, xmm5, 0 ; 09D0 _ C5 F9: 70. F5, 00 + vpxor xmm4, xmm1, xmm4 ; 09D5 _ C5 F1: EF. E4 + vpxor xmm1, xmm3, xmm6 ; 09D9 _ C5 E1: EF. CE + vpslld xmm3, xmm7, 22 ; 09DD _ C5 E1: 72. F7, 16 + vpsrld xmm5, xmm7, 10 ; 09E2 _ C5 D1: 72. D7, 0A + vpor xmm7, xmm3, xmm5 ; 09E7 _ C5 E1: EB. FD + vmovd xmm3, dword [ecx+2B08H] ; 09EB _ C5 F9: 6E. 99, 00002B08 + vmovd xmm5, dword [ecx+2B0CH] ; 09F3 _ C5 F9: 6E. A9, 00002B0C + vpshufd xmm6, xmm3, 0 ; 09FB _ C5 F9: 70. F3, 00 + vpshufd xmm3, xmm5, 0 ; 0A00 _ C5 F9: 70. DD, 00 + vpxor xmm7, xmm7, xmm6 ; 0A05 _ C5 C1: EF. FE + vpxor xmm2, xmm2, xmm3 ; 0A09 _ C5 E9: EF. D3 + vpxor xmm5, xmm1, xmm7 ; 0A0D _ C5 F1: EF. EF + vpxor xmm6, xmm2, xmm4 ; 0A11 _ C5 E9: EF. F4 + vpand xmm1, xmm1, xmm6 ; 0A15 _ C5 F1: DB. CE + vpxor xmm2, xmm5, xmm6 ; 0A19 _ C5 D1: EF. D6 + vpxor xmm1, xmm1, xmm4 ; 0A1D _ C5 F1: EF. CC + vpor xmm4, xmm4, xmm6 ; 0A21 _ C5 D9: EB. E6 + vpxor xmm4, xmm4, xmm5 ; 0A25 _ C5 D9: EF. E5 + vpxor xmm5, xmm6, xmm7 ; 0A29 _ C5 C9: EF. EF + vpor xmm7, xmm7, xmm1 ; 0A2D _ C5 C1: EB. F9 + vpxor xmm7, xmm7, xmm2 ; 0A31 _ C5 C1: EF. FA + vpxor xmm2, xmm2, xmm0 ; 0A35 _ C5 E9: EF. D0 + vpor xmm3, xmm2, xmm1 ; 0A39 _ C5 E9: EB. D9 + vpxor xmm1, xmm1, xmm5 ; 0A3D _ C5 F1: EF. CD + vpor xmm2, xmm5, xmm4 ; 0A41 _ C5 D1: EB. D4 + vpxor xmm5, xmm1, xmm3 ; 0A45 _ C5 F1: EF. EB + vpxor xmm6, xmm5, xmm2 ; 0A49 _ C5 D1: EF. F2 + vpxor xmm3, xmm3, xmm2 ; 0A4D _ C5 E1: EF. DA + vpslld xmm1, xmm6, 13 ; 0A51 _ C5 F1: 72. F6, 0D + vpsrld xmm5, xmm6, 19 ; 0A56 _ C5 D1: 72. D6, 13 + vpor xmm6, xmm1, xmm5 ; 0A5B _ C5 F1: EB. F5 + vpslld xmm1, xmm7, 3 ; 0A5F _ C5 F1: 72. F7, 03 + vpsrld xmm7, xmm7, 29 ; 0A64 _ C5 C1: 72. D7, 1D + vpslld xmm2, xmm6, 3 ; 0A69 _ C5 E9: 72. F6, 03 + vpor xmm7, xmm1, xmm7 ; 0A6E _ C5 F1: EB. FF + vpxor xmm1, xmm3, xmm6 ; 0A72 _ C5 E1: EF. CE + vpxor xmm4, xmm4, xmm7 ; 0A76 _ C5 D9: EF. E7 + vpxor xmm1, xmm1, xmm7 ; 0A7A _ C5 F1: EF. CF + vpxor xmm4, xmm4, xmm2 ; 0A7E _ C5 D9: EF. E2 + vpslld xmm5, xmm1, 1 ; 0A82 _ C5 D1: 72. F1, 01 + vpsrld xmm3, xmm1, 31 ; 0A87 _ C5 E1: 72. D1, 1F + vpslld xmm1, xmm4, 7 ; 0A8C _ C5 F1: 72. F4, 07 + vpsrld xmm4, xmm4, 25 ; 0A91 _ C5 D9: 72. D4, 19 + vpor xmm2, xmm5, xmm3 ; 0A96 _ C5 D1: EB. D3 + vpor xmm3, xmm1, xmm4 ; 0A9A _ C5 F1: EB. DC + vpslld xmm1, xmm2, 7 ; 0A9E _ C5 F1: 72. F2, 07 + vpxor xmm7, xmm7, xmm3 ; 0AA3 _ C5 C1: EF. FB + vpxor xmm6, xmm6, xmm2 ; 0AA7 _ C5 C9: EF. F2 + vpxor xmm4, xmm7, xmm1 ; 0AAB _ C5 C1: EF. E1 + vpxor xmm5, xmm6, xmm3 ; 0AAF _ C5 C9: EF. EB + vmovd xmm7, dword [ecx+2B14H] ; 0AB3 _ C5 F9: 6E. B9, 00002B14 + vpshufd xmm6, xmm7, 0 ; 0ABB _ C5 F9: 70. F7, 00 + vmovd xmm7, dword [ecx+2B1CH] ; 0AC0 _ C5 F9: 6E. B9, 00002B1C + vpxor xmm2, xmm2, xmm6 ; 0AC8 _ C5 E9: EF. D6 + vpshufd xmm7, xmm7, 0 ; 0ACC _ C5 F9: 70. FF, 00 + vmovd xmm1, dword [ecx+2B10H] ; 0AD1 _ C5 F9: 6E. 89, 00002B10 + vpxor xmm7, xmm3, xmm7 ; 0AD9 _ C5 E1: EF. FF + vpslld xmm3, xmm5, 5 ; 0ADD _ C5 E1: 72. F5, 05 + vpsrld xmm5, xmm5, 27 ; 0AE2 _ C5 D1: 72. D5, 1B + vpshufd xmm1, xmm1, 0 ; 0AE7 _ C5 F9: 70. C9, 00 + vpor xmm3, xmm3, xmm5 ; 0AEC _ C5 E1: EB. DD + vmovd xmm6, dword [ecx+2B18H] ; 0AF0 _ C5 F9: 6E. B1, 00002B18 + vpxor xmm5, xmm3, xmm1 ; 0AF8 _ C5 E1: EF. E9 + vpslld xmm3, xmm4, 22 ; 0AFC _ C5 E1: 72. F4, 16 + vpsrld xmm4, xmm4, 10 ; 0B01 _ C5 D9: 72. D4, 0A + vpor xmm4, xmm3, xmm4 ; 0B06 _ C5 E1: EB. E4 + vpxor xmm5, xmm5, xmm0 ; 0B0A _ C5 D1: EF. E8 + vpshufd xmm3, xmm6, 0 ; 0B0E _ C5 F9: 70. DE, 00 + vpand xmm1, xmm5, xmm2 ; 0B13 _ C5 D1: DB. CA + vpxor xmm6, xmm4, xmm3 ; 0B17 _ C5 D9: EF. F3 + vpor xmm3, xmm1, xmm7 ; 0B1B _ C5 F1: EB. DF + vpxor xmm4, xmm6, xmm0 ; 0B1F _ C5 C9: EF. E0 + vpxor xmm4, xmm4, xmm1 ; 0B23 _ C5 D9: EF. E1 + vpxor xmm1, xmm7, xmm4 ; 0B27 _ C5 C1: EF. CC + vpxor xmm7, xmm2, xmm3 ; 0B2B _ C5 E9: EF. FB + vpxor xmm2, xmm3, xmm5 ; 0B2F _ C5 E1: EF. D5 + vpor xmm3, xmm5, xmm7 ; 0B33 _ C5 D1: EB. DF + vpor xmm4, xmm4, xmm2 ; 0B37 _ C5 D9: EB. E2 + vpxor xmm5, xmm7, xmm1 ; 0B3B _ C5 C1: EF. E9 + vpand xmm4, xmm4, xmm3 ; 0B3F _ C5 D9: DB. E3 + vpxor xmm2, xmm2, xmm5 ; 0B43 _ C5 E9: EF. D5 + vpslld xmm6, xmm4, 13 ; 0B47 _ C5 C9: 72. F4, 0D + vpsrld xmm7, xmm4, 19 ; 0B4C _ C5 C1: 72. D4, 13 + vpor xmm7, xmm6, xmm7 ; 0B51 _ C5 C9: EB. FF + vpslld xmm6, xmm1, 3 ; 0B55 _ C5 C9: 72. F1, 03 + vpsrld xmm1, xmm1, 29 ; 0B5A _ C5 F1: 72. D1, 1D + vpand xmm5, xmm5, xmm4 ; 0B5F _ C5 D1: DB. EC + vpor xmm6, xmm6, xmm1 ; 0B63 _ C5 C9: EB. F1 + vpand xmm1, xmm2, xmm4 ; 0B67 _ C5 E9: DB. CC + vpxor xmm3, xmm3, xmm1 ; 0B6B _ C5 E1: EF. D9 + vpxor xmm2, xmm5, xmm2 ; 0B6F _ C5 D1: EF. D2 + vpxor xmm1, xmm3, xmm7 ; 0B73 _ C5 E1: EF. CF + vpxor xmm2, xmm2, xmm6 ; 0B77 _ C5 E9: EF. D6 + vpxor xmm1, xmm1, xmm6 ; 0B7B _ C5 F1: EF. CE + vpslld xmm4, xmm7, 3 ; 0B7F _ C5 D9: 72. F7, 03 + vpxor xmm5, xmm2, xmm4 ; 0B84 _ C5 E9: EF. EC + vpslld xmm3, xmm1, 1 ; 0B88 _ C5 E1: 72. F1, 01 + vpsrld xmm1, xmm1, 31 ; 0B8D _ C5 F1: 72. D1, 1F + vpor xmm3, xmm3, xmm1 ; 0B92 _ C5 E1: EB. D9 + vpslld xmm1, xmm5, 7 ; 0B96 _ C5 F1: 72. F5, 07 + vpsrld xmm5, xmm5, 25 ; 0B9B _ C5 D1: 72. D5, 19 + vpxor xmm7, xmm7, xmm3 ; 0BA0 _ C5 C1: EF. FB + vpor xmm1, xmm1, xmm5 ; 0BA4 _ C5 F1: EB. CD + vpslld xmm2, xmm3, 7 ; 0BA8 _ C5 E9: 72. F3, 07 + vpxor xmm7, xmm7, xmm1 ; 0BAD _ C5 C1: EF. F9 + vpxor xmm6, xmm6, xmm1 ; 0BB1 _ C5 C9: EF. F1 + vpslld xmm4, xmm7, 5 ; 0BB5 _ C5 D9: 72. F7, 05 + vpsrld xmm5, xmm7, 27 ; 0BBA _ C5 D1: 72. D7, 1B + vmovd xmm7, dword [ecx+2B20H] ; 0BBF _ C5 F9: 6E. B9, 00002B20 + vpxor xmm6, xmm6, xmm2 ; 0BC7 _ C5 C9: EF. F2 + vpor xmm2, xmm4, xmm5 ; 0BCB _ C5 D9: EB. D5 + vmovd xmm5, dword [ecx+2B24H] ; 0BCF _ C5 F9: 6E. A9, 00002B24 + vpshufd xmm4, xmm7, 0 ; 0BD7 _ C5 F9: 70. E7, 00 + vpxor xmm7, xmm2, xmm4 ; 0BDC _ C5 E9: EF. FC + vpshufd xmm2, xmm5, 0 ; 0BE0 _ C5 F9: 70. D5, 00 + vpxor xmm4, xmm3, xmm2 ; 0BE5 _ C5 E1: EF. E2 + vpslld xmm3, xmm6, 22 ; 0BE9 _ C5 E1: 72. F6, 16 + vmovd xmm2, dword [ecx+2B28H] ; 0BEE _ C5 F9: 6E. 91, 00002B28 + vpsrld xmm6, xmm6, 10 ; 0BF6 _ C5 C9: 72. D6, 0A + vpor xmm5, xmm3, xmm6 ; 0BFB _ C5 E1: EB. EE + vmovd xmm6, dword [ecx+2B2CH] ; 0BFF _ C5 F9: 6E. B1, 00002B2C + vpshufd xmm3, xmm2, 0 ; 0C07 _ C5 F9: 70. DA, 00 + vpxor xmm2, xmm5, xmm3 ; 0C0C _ C5 D1: EF. D3 + vpshufd xmm5, xmm6, 0 ; 0C10 _ C5 F9: 70. EE, 00 + vpxor xmm3, xmm1, xmm5 ; 0C15 _ C5 F1: EF. DD + vpand xmm1, xmm7, xmm2 ; 0C19 _ C5 C1: DB. CA + vpxor xmm6, xmm1, xmm3 ; 0C1D _ C5 F1: EF. F3 + vpxor xmm1, xmm2, xmm4 ; 0C21 _ C5 E9: EF. CC + vpxor xmm5, xmm1, xmm6 ; 0C25 _ C5 F1: EF. EE + vpor xmm2, xmm3, xmm7 ; 0C29 _ C5 E1: EB. D7 + vpxor xmm1, xmm2, xmm4 ; 0C2D _ C5 E9: EF. CC + vpxor xmm7, xmm7, xmm5 ; 0C31 _ C5 C1: EF. FD + vpor xmm4, xmm1, xmm7 ; 0C35 _ C5 F1: EB. E7 + vpand xmm2, xmm6, xmm1 ; 0C39 _ C5 C9: DB. D1 + vpxor xmm3, xmm4, xmm6 ; 0C3D _ C5 D9: EF. DE + vpxor xmm2, xmm7, xmm2 ; 0C41 _ C5 C1: EF. D2 + vpxor xmm6, xmm1, xmm3 ; 0C45 _ C5 F1: EF. F3 + vpslld xmm1, xmm5, 13 ; 0C49 _ C5 F1: 72. F5, 0D + vpxor xmm4, xmm6, xmm2 ; 0C4E _ C5 C9: EF. E2 + vpsrld xmm5, xmm5, 19 ; 0C52 _ C5 D1: 72. D5, 13 + vpor xmm1, xmm1, xmm5 ; 0C57 _ C5 F1: EB. CD + vpslld xmm7, xmm4, 3 ; 0C5B _ C5 C1: 72. F4, 03 + vpsrld xmm4, xmm4, 29 ; 0C60 _ C5 D9: 72. D4, 1D + vpxor xmm3, xmm3, xmm1 ; 0C65 _ C5 E1: EF. D9 + vpor xmm4, xmm7, xmm4 ; 0C69 _ C5 C1: EB. E4 + vpxor xmm2, xmm2, xmm0 ; 0C6D _ C5 E9: EF. D0 + vpxor xmm5, xmm3, xmm4 ; 0C71 _ C5 E1: EF. EC + vpxor xmm3, xmm2, xmm4 ; 0C75 _ C5 E9: EF. DC + vpslld xmm7, xmm1, 3 ; 0C79 _ C5 C1: 72. F1, 03 + vpslld xmm6, xmm5, 1 ; 0C7E _ C5 C9: 72. F5, 01 + vpxor xmm7, xmm3, xmm7 ; 0C83 _ C5 E1: EF. FF + vpsrld xmm5, xmm5, 31 ; 0C87 _ C5 D1: 72. D5, 1F + vpor xmm6, xmm6, xmm5 ; 0C8C _ C5 C9: EB. F5 + vpslld xmm2, xmm7, 7 ; 0C90 _ C5 E9: 72. F7, 07 + vpsrld xmm5, xmm7, 25 ; 0C95 _ C5 D1: 72. D7, 19 + vpxor xmm1, xmm1, xmm6 ; 0C9A _ C5 F1: EF. CE + vpor xmm7, xmm2, xmm5 ; 0C9E _ C5 E9: EB. FD + vpxor xmm2, xmm1, xmm7 ; 0CA2 _ C5 F1: EF. D7 + vpxor xmm4, xmm4, xmm7 ; 0CA6 _ C5 D9: EF. E7 + vmovd xmm3, dword [ecx+2B30H] ; 0CAA _ C5 F9: 6E. 99, 00002B30 + vpslld xmm1, xmm6, 7 ; 0CB2 _ C5 F1: 72. F6, 07 + vpslld xmm5, xmm2, 5 ; 0CB7 _ C5 D1: 72. F2, 05 + vpsrld xmm2, xmm2, 27 ; 0CBC _ C5 E9: 72. D2, 1B + vpxor xmm4, xmm4, xmm1 ; 0CC1 _ C5 D9: EF. E1 + vpor xmm1, xmm5, xmm2 ; 0CC5 _ C5 D1: EB. CA + vmovd xmm5, dword [ecx+2B34H] ; 0CC9 _ C5 F9: 6E. A9, 00002B34 + vpshufd xmm2, xmm3, 0 ; 0CD1 _ C5 F9: 70. D3, 00 + vpshufd xmm3, xmm5, 0 ; 0CD6 _ C5 F9: 70. DD, 00 + vpxor xmm1, xmm1, xmm2 ; 0CDB _ C5 F1: EF. CA + vpslld xmm2, xmm4, 22 ; 0CDF _ C5 E9: 72. F4, 16 + vpsrld xmm4, xmm4, 10 ; 0CE4 _ C5 D9: 72. D4, 0A + vpxor xmm6, xmm6, xmm3 ; 0CE9 _ C5 C9: EF. F3 + vpor xmm4, xmm2, xmm4 ; 0CED _ C5 E9: EB. E4 + vmovd xmm2, dword [ecx+2B38H] ; 0CF1 _ C5 F9: 6E. 91, 00002B38 + vmovd xmm3, dword [ecx+2B3CH] ; 0CF9 _ C5 F9: 6E. 99, 00002B3C + vpshufd xmm5, xmm2, 0 ; 0D01 _ C5 F9: 70. EA, 00 + vpshufd xmm2, xmm3, 0 ; 0D06 _ C5 F9: 70. D3, 00 + vpxor xmm5, xmm4, xmm5 ; 0D0B _ C5 D9: EF. ED + vpxor xmm7, xmm7, xmm2 ; 0D0F _ C5 C1: EF. FA + vpand xmm2, xmm6, xmm1 ; 0D13 _ C5 C9: DB. D1 + vpor xmm4, xmm1, xmm7 ; 0D17 _ C5 F1: EB. E7 + vpxor xmm3, xmm7, xmm6 ; 0D1B _ C5 C1: EF. DE + vpxor xmm1, xmm1, xmm5 ; 0D1F _ C5 F1: EF. CD + vpxor xmm6, xmm5, xmm3 ; 0D23 _ C5 D1: EF. F3 + vpor xmm7, xmm1, xmm2 ; 0D27 _ C5 F1: EB. FA + vpand xmm1, xmm3, xmm4 ; 0D2B _ C5 E1: DB. CC + vpxor xmm3, xmm1, xmm7 ; 0D2F _ C5 F1: EF. DF + vpxor xmm5, xmm4, xmm2 ; 0D33 _ C5 D9: EF. EA + vpxor xmm2, xmm2, xmm3 ; 0D37 _ C5 E9: EF. D3 + vpxor xmm1, xmm5, xmm3 ; 0D3B _ C5 D1: EF. CB + vpor xmm4, xmm2, xmm5 ; 0D3F _ C5 E9: EB. E5 + vpand xmm5, xmm7, xmm5 ; 0D43 _ C5 C1: DB. ED + vpxor xmm2, xmm4, xmm6 ; 0D47 _ C5 D9: EF. D6 + vpxor xmm6, xmm5, xmm6 ; 0D4B _ C5 D1: EF. F6 + vpor xmm4, xmm2, xmm3 ; 0D4F _ C5 E9: EB. E3 + vpxor xmm4, xmm1, xmm4 ; 0D53 _ C5 F1: EF. E4 + vpslld xmm1, xmm4, 13 ; 0D57 _ C5 F1: 72. F4, 0D + vpsrld xmm4, xmm4, 19 ; 0D5C _ C5 D9: 72. D4, 13 + vpor xmm4, xmm1, xmm4 ; 0D61 _ C5 F1: EB. E4 + vpslld xmm1, xmm3, 3 ; 0D65 _ C5 F1: 72. F3, 03 + vpsrld xmm3, xmm3, 29 ; 0D6A _ C5 E1: 72. D3, 1D + vpxor xmm2, xmm2, xmm4 ; 0D6F _ C5 E9: EF. D4 + vpor xmm1, xmm1, xmm3 ; 0D73 _ C5 F1: EB. CB + vpslld xmm5, xmm4, 3 ; 0D77 _ C5 D1: 72. F4, 03 + vpxor xmm3, xmm2, xmm1 ; 0D7C _ C5 E9: EF. D9 + vpxor xmm2, xmm6, xmm1 ; 0D80 _ C5 C9: EF. D1 + vpxor xmm5, xmm2, xmm5 ; 0D84 _ C5 E9: EF. ED + vpslld xmm7, xmm3, 1 ; 0D88 _ C5 C1: 72. F3, 01 + vpsrld xmm3, xmm3, 31 ; 0D8D _ C5 E1: 72. D3, 1F + vpslld xmm6, xmm5, 7 ; 0D92 _ C5 C9: 72. F5, 07 + vpor xmm7, xmm7, xmm3 ; 0D97 _ C5 C1: EB. FB + vpsrld xmm2, xmm5, 25 ; 0D9B _ C5 E9: 72. D5, 19 + vpor xmm6, xmm6, xmm2 ; 0DA0 _ C5 C9: EB. F2 + vpxor xmm4, xmm4, xmm7 ; 0DA4 _ C5 D9: EF. E7 + vpxor xmm2, xmm4, xmm6 ; 0DA8 _ C5 D9: EF. D6 + vpxor xmm1, xmm1, xmm6 ; 0DAC _ C5 F1: EF. CE + vpslld xmm4, xmm7, 7 ; 0DB0 _ C5 D9: 72. F7, 07 + vmovd xmm3, dword [ecx+2B40H] ; 0DB5 _ C5 F9: 6E. 99, 00002B40 + vpxor xmm5, xmm1, xmm4 ; 0DBD _ C5 F1: EF. EC + vpslld xmm1, xmm2, 5 ; 0DC1 _ C5 F1: 72. F2, 05 + vpsrld xmm2, xmm2, 27 ; 0DC6 _ C5 E9: 72. D2, 1B + vpor xmm1, xmm1, xmm2 ; 0DCB _ C5 F1: EB. CA + vpshufd xmm2, xmm3, 0 ; 0DCF _ C5 F9: 70. D3, 00 + vmovd xmm3, dword [ecx+2B4CH] ; 0DD4 _ C5 F9: 6E. 99, 00002B4C + vpxor xmm1, xmm1, xmm2 ; 0DDC _ C5 F1: EF. CA + vmovd xmm2, dword [ecx+2B44H] ; 0DE0 _ C5 F9: 6E. 91, 00002B44 + vpshufd xmm3, xmm3, 0 ; 0DE8 _ C5 F9: 70. DB, 00 + vpxor xmm3, xmm6, xmm3 ; 0DED _ C5 C9: EF. DB + vpshufd xmm6, xmm2, 0 ; 0DF1 _ C5 F9: 70. F2, 00 + vpxor xmm0, xmm3, xmm0 ; 0DF6 _ C5 E1: EF. C0 + vpxor xmm7, xmm7, xmm6 ; 0DFA _ C5 C1: EF. FE + vmovd xmm4, dword [ecx+2B48H] ; 0DFE _ C5 F9: 6E. A1, 00002B48 + vpxor xmm2, xmm7, xmm3 ; 0E06 _ C5 C1: EF. D3 + vpslld xmm3, xmm5, 22 ; 0E0A _ C5 E1: 72. F5, 16 + vpsrld xmm5, xmm5, 10 ; 0E0F _ C5 D1: 72. D5, 0A + vpshufd xmm4, xmm4, 0 ; 0E14 _ C5 F9: 70. E4, 00 + vpor xmm3, xmm3, xmm5 ; 0E19 _ C5 E1: EB. DD + vpxor xmm5, xmm3, xmm4 ; 0E1D _ C5 E1: EF. EC + vpxor xmm4, xmm5, xmm0 ; 0E21 _ C5 D1: EF. E0 + vpxor xmm5, xmm0, xmm1 ; 0E25 _ C5 F9: EF. E9 + vpand xmm0, xmm2, xmm5 ; 0E29 _ C5 E9: DB. C5 + vpxor xmm3, xmm2, xmm5 ; 0E2D _ C5 E9: EF. DD + vpxor xmm0, xmm0, xmm4 ; 0E31 _ C5 F9: EF. C4 + vpxor xmm7, xmm1, xmm3 ; 0E35 _ C5 F1: EF. FB + vpand xmm4, xmm4, xmm3 ; 0E39 _ C5 D9: DB. E3 + vpand xmm6, xmm7, xmm0 ; 0E3D _ C5 C1: DB. F0 + vpxor xmm4, xmm4, xmm7 ; 0E41 _ C5 D9: EF. E7 + vpxor xmm2, xmm5, xmm6 ; 0E45 _ C5 D1: EF. D6 + vpor xmm1, xmm6, xmm2 ; 0E49 _ C5 C9: EB. CA + vpcmpeqd xmm7, xmm7, xmm7 ; 0E4D _ C5 C1: 76. FF + vpxor xmm5, xmm1, xmm4 ; 0E51 _ C5 F1: EF. EC + vpslld xmm1, xmm0, 13 ; 0E55 _ C5 F1: 72. F0, 0D + vpxor xmm5, xmm5, xmm7 ; 0E5A _ C5 D1: EF. EF + vpsrld xmm7, xmm0, 19 ; 0E5E _ C5 C1: 72. D0, 13 + vpor xmm0, xmm3, xmm0 ; 0E63 _ C5 E1: EB. C0 + vpor xmm7, xmm1, xmm7 ; 0E67 _ C5 F1: EB. FF + vpslld xmm1, xmm5, 3 ; 0E6B _ C5 F1: 72. F5, 03 + vpsrld xmm5, xmm5, 29 ; 0E70 _ C5 D1: 72. D5, 1D + vpxor xmm3, xmm0, xmm6 ; 0E75 _ C5 F9: EF. DE + vpand xmm4, xmm4, xmm2 ; 0E79 _ C5 D9: DB. E2 + vpor xmm1, xmm1, xmm5 ; 0E7D _ C5 F1: EB. CD + vpxor xmm0, xmm3, xmm4 ; 0E81 _ C5 E1: EF. C4 + vpxor xmm5, xmm0, xmm7 ; 0E85 _ C5 F9: EF. EF + vpxor xmm2, xmm2, xmm1 ; 0E89 _ C5 E9: EF. D1 + vpslld xmm6, xmm7, 3 ; 0E8D _ C5 C9: 72. F7, 03 + vpxor xmm4, xmm5, xmm1 ; 0E92 _ C5 D1: EF. E1 + vpxor xmm2, xmm2, xmm6 ; 0E96 _ C5 E9: EF. D6 + vpslld xmm3, xmm4, 1 ; 0E9A _ C5 E1: 72. F4, 01 + vpsrld xmm0, xmm4, 31 ; 0E9F _ C5 F9: 72. D4, 1F + vpslld xmm5, xmm2, 7 ; 0EA4 _ C5 D1: 72. F2, 07 + vpsrld xmm4, xmm2, 25 ; 0EA9 _ C5 D9: 72. D2, 19 + vpor xmm6, xmm3, xmm0 ; 0EAE _ C5 E1: EB. F0 + vpor xmm3, xmm5, xmm4 ; 0EB2 _ C5 D1: EB. DC + vpslld xmm4, xmm6, 7 ; 0EB6 _ C5 D9: 72. F6, 07 + vpxor xmm1, xmm1, xmm3 ; 0EBB _ C5 F1: EF. CB + vpxor xmm7, xmm7, xmm6 ; 0EBF _ C5 C1: EF. FE + vpxor xmm5, xmm1, xmm4 ; 0EC3 _ C5 F1: EF. EC + vpxor xmm0, xmm7, xmm3 ; 0EC7 _ C5 C1: EF. C3 + vmovd xmm1, dword [ecx+2B54H] ; 0ECB _ C5 F9: 6E. 89, 00002B54 + vpshufd xmm7, xmm1, 0 ; 0ED3 _ C5 F9: 70. F9, 00 + vpxor xmm1, xmm6, xmm7 ; 0ED8 _ C5 C9: EF. CF + vmovd xmm6, dword [ecx+2B5CH] ; 0EDC _ C5 F9: 6E. B1, 00002B5C + vpshufd xmm4, xmm6, 0 ; 0EE4 _ C5 F9: 70. E6, 00 + vmovd xmm2, dword [ecx+2B50H] ; 0EE9 _ C5 F9: 6E. 91, 00002B50 + vpxor xmm4, xmm3, xmm4 ; 0EF1 _ C5 E1: EF. E4 + vpslld xmm3, xmm0, 5 ; 0EF5 _ C5 E1: 72. F0, 05 + vpsrld xmm0, xmm0, 27 ; 0EFA _ C5 F9: 72. D0, 1B + vpshufd xmm2, xmm2, 0 ; 0EFF _ C5 F9: 70. D2, 00 + vpor xmm3, xmm3, xmm0 ; 0F04 _ C5 E1: EB. D8 + vpxor xmm6, xmm3, xmm2 ; 0F08 _ C5 E1: EF. F2 + vpxor xmm2, xmm1, xmm4 ; 0F0C _ C5 F1: EF. D4 + vpxor xmm6, xmm6, xmm1 ; 0F10 _ C5 C9: EF. F1 + vpcmpeqd xmm1, xmm1, xmm1 ; 0F14 _ C5 F1: 76. C9 + vmovd xmm7, dword [ecx+2B58H] ; 0F18 _ C5 F9: 6E. B9, 00002B58 + vpxor xmm3, xmm4, xmm1 ; 0F20 _ C5 D9: EF. D9 + vpslld xmm4, xmm5, 22 ; 0F24 _ C5 D9: 72. F5, 16 + vpsrld xmm5, xmm5, 10 ; 0F29 _ C5 D1: 72. D5, 0A + vpshufd xmm0, xmm7, 0 ; 0F2E _ C5 F9: 70. C7, 00 + vpor xmm4, xmm4, xmm5 ; 0F33 _ C5 D9: EB. E5 + vpxor xmm5, xmm4, xmm0 ; 0F37 _ C5 D9: EF. E8 + vpand xmm7, xmm2, xmm6 ; 0F3B _ C5 E9: DB. FE + vpxor xmm0, xmm5, xmm3 ; 0F3F _ C5 D1: EF. C3 + vpxor xmm4, xmm7, xmm0 ; 0F43 _ C5 C1: EF. E0 + vpor xmm7, xmm0, xmm2 ; 0F47 _ C5 F9: EB. FA + vpand xmm5, xmm3, xmm4 ; 0F4B _ C5 E1: DB. EC + vpxor xmm3, xmm2, xmm3 ; 0F4F _ C5 E9: EF. DB + vpxor xmm0, xmm5, xmm6 ; 0F53 _ C5 D1: EF. C6 + vpxor xmm2, xmm3, xmm4 ; 0F57 _ C5 E1: EF. D4 + vpxor xmm2, xmm2, xmm7 ; 0F5B _ C5 E9: EF. D7 + vpand xmm3, xmm6, xmm0 ; 0F5F _ C5 C9: DB. D8 + vpxor xmm5, xmm3, xmm2 ; 0F63 _ C5 E1: EF. EA + vpslld xmm3, xmm4, 13 ; 0F67 _ C5 E1: 72. F4, 0D + vpsrld xmm4, xmm4, 19 ; 0F6C _ C5 D9: 72. D4, 13 + vpxor xmm7, xmm7, xmm6 ; 0F71 _ C5 C1: EF. FE + vpor xmm4, xmm3, xmm4 ; 0F75 _ C5 E1: EB. E4 + vpslld xmm3, xmm5, 3 ; 0F79 _ C5 E1: 72. F5, 03 + vpsrld xmm5, xmm5, 29 ; 0F7E _ C5 D1: 72. D5, 1D + vpxor xmm6, xmm7, xmm1 ; 0F83 _ C5 C1: EF. F1 + vpor xmm5, xmm3, xmm5 ; 0F87 _ C5 E1: EB. ED + vpxor xmm3, xmm0, xmm4 ; 0F8B _ C5 F9: EF. DC + vpor xmm0, xmm2, xmm0 ; 0F8F _ C5 E9: EB. C0 + vpxor xmm3, xmm3, xmm5 ; 0F93 _ C5 E1: EF. DD + vpxor xmm0, xmm0, xmm6 ; 0F97 _ C5 F9: EF. C6 + vpslld xmm7, xmm4, 3 ; 0F9B _ C5 C1: 72. F4, 03 + vpxor xmm2, xmm0, xmm5 ; 0FA0 _ C5 F9: EF. D5 + vpslld xmm6, xmm3, 1 ; 0FA4 _ C5 C9: 72. F3, 01 + vpxor xmm7, xmm2, xmm7 ; 0FA9 _ C5 E9: EF. FF + vpsrld xmm3, xmm3, 31 ; 0FAD _ C5 E1: 72. D3, 1F + vpor xmm0, xmm6, xmm3 ; 0FB2 _ C5 C9: EB. C3 + vpslld xmm3, xmm7, 7 ; 0FB6 _ C5 E1: 72. F7, 07 + vpsrld xmm2, xmm7, 25 ; 0FBB _ C5 E9: 72. D7, 19 + vpxor xmm4, xmm4, xmm0 ; 0FC0 _ C5 D9: EF. E0 + vpor xmm2, xmm3, xmm2 ; 0FC4 _ C5 E1: EB. D2 + vpslld xmm3, xmm0, 7 ; 0FC8 _ C5 E1: 72. F0, 07 + vmovd xmm7, dword [ecx+2B60H] ; 0FCD _ C5 F9: 6E. B9, 00002B60 + vpxor xmm4, xmm4, xmm2 ; 0FD5 _ C5 D9: EF. E2 + vpxor xmm5, xmm5, xmm2 ; 0FD9 _ C5 D1: EF. EA + vpxor xmm6, xmm5, xmm3 ; 0FDD _ C5 D1: EF. F3 + vpslld xmm5, xmm4, 5 ; 0FE1 _ C5 D1: 72. F4, 05 + vpshufd xmm3, xmm7, 0 ; 0FE6 _ C5 F9: 70. DF, 00 + vpsrld xmm4, xmm4, 27 ; 0FEB _ C5 D9: 72. D4, 1B + vmovd xmm7, dword [ecx+2B6CH] ; 0FF0 _ C5 F9: 6E. B9, 00002B6C + vpor xmm4, xmm5, xmm4 ; 0FF8 _ C5 D1: EB. E4 + vpshufd xmm7, xmm7, 0 ; 0FFC _ C5 F9: 70. FF, 00 + vpxor xmm3, xmm4, xmm3 ; 1001 _ C5 D9: EF. DB + vmovd xmm4, dword [ecx+2B68H] ; 1005 _ C5 F9: 6E. A1, 00002B68 + vpxor xmm7, xmm2, xmm7 ; 100D _ C5 E9: EF. FF + vpslld xmm2, xmm6, 22 ; 1011 _ C5 E9: 72. F6, 16 + vpsrld xmm6, xmm6, 10 ; 1016 _ C5 C9: 72. D6, 0A + vpshufd xmm4, xmm4, 0 ; 101B _ C5 F9: 70. E4, 00 + vpor xmm2, xmm2, xmm6 ; 1020 _ C5 E9: EB. D6 + vmovd xmm5, dword [ecx+2B64H] ; 1024 _ C5 F9: 6E. A9, 00002B64 + vpxor xmm2, xmm2, xmm4 ; 102C _ C5 E9: EF. D4 + vpshufd xmm5, xmm5, 0 ; 1030 _ C5 F9: 70. ED, 00 + vpxor xmm4, xmm2, xmm1 ; 1035 _ C5 E9: EF. E1 + vpxor xmm6, xmm3, xmm7 ; 1039 _ C5 E1: EF. F7 + vpand xmm3, xmm7, xmm3 ; 103D _ C5 C1: DB. DB + vpxor xmm3, xmm3, xmm4 ; 1041 _ C5 E1: EF. DC + vpxor xmm0, xmm0, xmm5 ; 1045 _ C5 F9: EF. C5 + vpxor xmm2, xmm0, xmm3 ; 1049 _ C5 F9: EF. D3 + vpor xmm4, xmm4, xmm7 ; 104D _ C5 D9: EB. E7 + vpor xmm0, xmm6, xmm2 ; 1051 _ C5 C9: EB. C2 + vpxor xmm6, xmm4, xmm6 ; 1055 _ C5 D9: EF. F6 + vpxor xmm5, xmm6, xmm2 ; 1059 _ C5 C9: EF. EA + vpor xmm4, xmm0, xmm3 ; 105D _ C5 F9: EB. E3 + vpxor xmm0, xmm7, xmm0 ; 1061 _ C5 C1: EF. C0 + vpxor xmm4, xmm4, xmm5 ; 1065 _ C5 D9: EF. E5 + vpxor xmm7, xmm0, xmm3 ; 1069 _ C5 F9: EF. FB + vpslld xmm0, xmm4, 13 ; 106D _ C5 F9: 72. F4, 0D + vpxor xmm7, xmm7, xmm4 ; 1072 _ C5 C1: EF. FC + vpsrld xmm4, xmm4, 19 ; 1076 _ C5 D9: 72. D4, 13 + vpor xmm0, xmm0, xmm4 ; 107B _ C5 F9: EB. C4 + vpslld xmm6, xmm7, 3 ; 107F _ C5 C9: 72. F7, 03 + vpsrld xmm4, xmm7, 29 ; 1084 _ C5 D9: 72. D7, 1D + vpxor xmm3, xmm3, xmm1 ; 1089 _ C5 E1: EF. D9 + vpand xmm5, xmm5, xmm7 ; 108D _ C5 D1: DB. EF + vpor xmm4, xmm6, xmm4 ; 1091 _ C5 C9: EB. E4 + vpxor xmm2, xmm2, xmm0 ; 1095 _ C5 E9: EF. D0 + vpxor xmm7, xmm3, xmm5 ; 1099 _ C5 E1: EF. FD + vpxor xmm6, xmm2, xmm4 ; 109D _ C5 E9: EF. F4 + vpxor xmm3, xmm7, xmm4 ; 10A1 _ C5 C1: EF. DC + vpslld xmm2, xmm0, 3 ; 10A5 _ C5 E9: 72. F0, 03 + vpslld xmm7, xmm6, 1 ; 10AA _ C5 C1: 72. F6, 01 + vpxor xmm5, xmm3, xmm2 ; 10AF _ C5 E1: EF. EA + vpsrld xmm6, xmm6, 31 ; 10B3 _ C5 C9: 72. D6, 1F + vpor xmm2, xmm7, xmm6 ; 10B8 _ C5 C1: EB. D6 + vpslld xmm3, xmm5, 7 ; 10BC _ C5 E1: 72. F5, 07 + vpsrld xmm5, xmm5, 25 ; 10C1 _ C5 D1: 72. D5, 19 + vpxor xmm0, xmm0, xmm2 ; 10C6 _ C5 F9: EF. C2 + vpor xmm5, xmm3, xmm5 ; 10CA _ C5 E1: EB. ED + vpslld xmm3, xmm2, 7 ; 10CE _ C5 E1: 72. F2, 07 + vpxor xmm0, xmm0, xmm5 ; 10D3 _ C5 F9: EF. C5 + vpxor xmm4, xmm4, xmm5 ; 10D7 _ C5 D9: EF. E5 + vmovd xmm7, dword [ecx+2B70H] ; 10DB _ C5 F9: 6E. B9, 00002B70 + vpxor xmm3, xmm4, xmm3 ; 10E3 _ C5 D9: EF. DB + vpslld xmm4, xmm0, 5 ; 10E7 _ C5 D9: 72. F0, 05 + vpsrld xmm0, xmm0, 27 ; 10EC _ C5 F9: 72. D0, 1B + vpor xmm6, xmm4, xmm0 ; 10F1 _ C5 D9: EB. F0 + vpshufd xmm4, xmm7, 0 ; 10F5 _ C5 F9: 70. E7, 00 + vpxor xmm0, xmm6, xmm4 ; 10FA _ C5 C9: EF. C4 + vpslld xmm4, xmm3, 22 ; 10FE _ C5 D9: 72. F3, 16 + vpsrld xmm3, xmm3, 10 ; 1103 _ C5 E1: 72. D3, 0A + vmovd xmm7, dword [ecx+2B74H] ; 1108 _ C5 F9: 6E. B9, 00002B74 + vpor xmm3, xmm4, xmm3 ; 1110 _ C5 D9: EB. DB + vmovd xmm4, dword [ecx+2B78H] ; 1114 _ C5 F9: 6E. A1, 00002B78 + vpshufd xmm6, xmm7, 0 ; 111C _ C5 F9: 70. F7, 00 + vpshufd xmm7, xmm4, 0 ; 1121 _ C5 F9: 70. FC, 00 + vpxor xmm2, xmm2, xmm6 ; 1126 _ C5 E9: EF. D6 + vmovd xmm4, dword [ecx+2B7CH] ; 112A _ C5 F9: 6E. A1, 00002B7C + vpxor xmm6, xmm3, xmm7 ; 1132 _ C5 E1: EF. F7 + vpshufd xmm3, xmm4, 0 ; 1136 _ C5 F9: 70. DC, 00 + vpxor xmm3, xmm5, xmm3 ; 113B _ C5 D1: EF. DB + vpor xmm5, xmm2, xmm6 ; 113F _ C5 E9: EB. EE + vpxor xmm4, xmm5, xmm3 ; 1143 _ C5 D1: EF. E3 + vpxor xmm5, xmm2, xmm6 ; 1147 _ C5 E9: EF. EE + vpxor xmm2, xmm6, xmm4 ; 114B _ C5 C9: EF. D4 + vpor xmm3, xmm3, xmm5 ; 114F _ C5 E1: EB. DD + vpxor xmm7, xmm5, xmm2 ; 1153 _ C5 D1: EF. FA + vmovdqu oword [esp+0B0H], xmm4 ; 1157 _ C5 FA: 7F. A4 24, 000000B0 + vpor xmm4, xmm4, xmm7 ; 1160 _ C5 D9: EB. E7 + vmovdqu oword [esp+0A0H], xmm0 ; 1164 _ C5 FA: 7F. 84 24, 000000A0 + vpor xmm6, xmm0, xmm7 ; 116D _ C5 F9: EB. F7 + vpxor xmm0, xmm4, xmm0 ; 1171 _ C5 D9: EF. C0 + vpxor xmm6, xmm6, xmm2 ; 1175 _ C5 C9: EF. F2 + vpxor xmm0, xmm0, xmm7 ; 1179 _ C5 F9: EF. C7 + vpxor xmm2, xmm2, xmm0 ; 117D _ C5 E9: EF. D0 + vpand xmm4, xmm0, xmm6 ; 1181 _ C5 F9: DB. E6 + vpxor xmm0, xmm2, xmm1 ; 1185 _ C5 E9: EF. C1 + vpxor xmm4, xmm4, xmm7 ; 1189 _ C5 D9: EF. E7 + vpor xmm2, xmm0, xmm6 ; 118D _ C5 F9: EB. D6 + vpxor xmm7, xmm7, xmm2 ; 1191 _ C5 C1: EF. FA + vpand xmm5, xmm3, oword [esp+0A0H] ; 1195 _ C5 E1: DB. AC 24, 000000A0 + vpslld xmm0, xmm7, 13 ; 119E _ C5 F9: 72. F7, 0D + vpsrld xmm2, xmm7, 19 ; 11A3 _ C5 E9: 72. D7, 13 + vpslld xmm7, xmm4, 3 ; 11A8 _ C5 C1: 72. F4, 03 + vpsrld xmm4, xmm4, 29 ; 11AD _ C5 D9: 72. D4, 1D + vpor xmm0, xmm0, xmm2 ; 11B2 _ C5 F9: EB. C2 + vpor xmm2, xmm7, xmm4 ; 11B6 _ C5 C1: EB. D4 + vpxor xmm4, xmm5, oword [esp+0B0H] ; 11BA _ C5 D1: EF. A4 24, 000000B0 + vpxor xmm5, xmm6, xmm2 ; 11C3 _ C5 C9: EF. EA + vpxor xmm3, xmm4, xmm0 ; 11C7 _ C5 D9: EF. D8 + vpslld xmm6, xmm0, 3 ; 11CB _ C5 C9: 72. F0, 03 + vpxor xmm7, xmm3, xmm2 ; 11D0 _ C5 E1: EF. FA + vpxor xmm5, xmm5, xmm6 ; 11D4 _ C5 D1: EF. EE + vpslld xmm4, xmm7, 1 ; 11D8 _ C5 D9: 72. F7, 01 + vpsrld xmm3, xmm7, 31 ; 11DD _ C5 E1: 72. D7, 1F + vpor xmm6, xmm4, xmm3 ; 11E2 _ C5 D9: EB. F3 + vpslld xmm7, xmm5, 7 ; 11E6 _ C5 C1: 72. F5, 07 + vpsrld xmm4, xmm5, 25 ; 11EB _ C5 D9: 72. D5, 19 + vpxor xmm0, xmm0, xmm6 ; 11F0 _ C5 F9: EF. C6 + vpor xmm7, xmm7, xmm4 ; 11F4 _ C5 C1: EB. FC + vpslld xmm4, xmm6, 7 ; 11F8 _ C5 D9: 72. F6, 07 + vpxor xmm0, xmm0, xmm7 ; 11FD _ C5 F9: EF. C7 + vpxor xmm2, xmm2, xmm7 ; 1201 _ C5 E9: EF. D7 + vpxor xmm4, xmm2, xmm4 ; 1205 _ C5 E9: EF. E4 + vpslld xmm3, xmm0, 5 ; 1209 _ C5 E1: 72. F0, 05 + vmovd xmm2, dword [ecx+2B80H] ; 120E _ C5 F9: 6E. 91, 00002B80 + vpsrld xmm0, xmm0, 27 ; 1216 _ C5 F9: 72. D0, 1B + vpor xmm5, xmm3, xmm0 ; 121B _ C5 E1: EB. E8 + vpshufd xmm3, xmm2, 0 ; 121F _ C5 F9: 70. DA, 00 + vmovd xmm2, dword [ecx+2B84H] ; 1224 _ C5 F9: 6E. 91, 00002B84 + vpxor xmm0, xmm5, xmm3 ; 122C _ C5 D1: EF. C3 + vpshufd xmm5, xmm2, 0 ; 1230 _ C5 F9: 70. EA, 00 + vpxor xmm5, xmm6, xmm5 ; 1235 _ C5 C9: EF. ED + vpslld xmm6, xmm4, 22 ; 1239 _ C5 C9: 72. F4, 16 + vpsrld xmm4, xmm4, 10 ; 123E _ C5 D9: 72. D4, 0A + vpor xmm2, xmm6, xmm4 ; 1243 _ C5 C9: EB. D4 + vmovd xmm4, dword [ecx+2B8CH] ; 1247 _ C5 F9: 6E. A1, 00002B8C + vpshufd xmm4, xmm4, 0 ; 124F _ C5 F9: 70. E4, 00 + vmovd xmm3, dword [ecx+2B88H] ; 1254 _ C5 F9: 6E. 99, 00002B88 + vpxor xmm7, xmm7, xmm4 ; 125C _ C5 C1: EF. FC + vpshufd xmm6, xmm3, 0 ; 1260 _ C5 F9: 70. F3, 00 + vpxor xmm3, xmm7, xmm0 ; 1265 _ C5 C1: EF. D8 + vpxor xmm6, xmm2, xmm6 ; 1269 _ C5 E9: EF. F6 + vpand xmm4, xmm5, xmm3 ; 126D _ C5 D1: DB. E3 + vpxor xmm2, xmm5, xmm6 ; 1271 _ C5 D1: EF. D6 + vpxor xmm5, xmm4, xmm0 ; 1275 _ C5 D9: EF. E8 + vpor xmm0, xmm0, xmm3 ; 1279 _ C5 F9: EB. C3 + vpxor xmm7, xmm2, xmm3 ; 127D _ C5 E9: EF. FB + vpxor xmm4, xmm0, xmm2 ; 1281 _ C5 F9: EF. E2 + vpxor xmm0, xmm3, xmm6 ; 1285 _ C5 E1: EF. C6 + vpor xmm3, xmm6, xmm5 ; 1289 _ C5 C9: EB. DD + vpor xmm6, xmm0, xmm4 ; 128D _ C5 F9: EB. F4 + vpxor xmm2, xmm3, xmm7 ; 1291 _ C5 E1: EF. D7 + vpxor xmm7, xmm7, xmm1 ; 1295 _ C5 C1: EF. F9 + vpor xmm3, xmm7, xmm5 ; 1299 _ C5 C1: EB. DD + vpxor xmm5, xmm5, xmm0 ; 129D _ C5 D1: EF. E8 + vpxor xmm0, xmm5, xmm3 ; 12A1 _ C5 D1: EF. C3 + vpxor xmm5, xmm0, xmm6 ; 12A5 _ C5 F9: EF. EE + vpxor xmm6, xmm3, xmm6 ; 12A9 _ C5 E1: EF. F6 + vpslld xmm7, xmm5, 13 ; 12AD _ C5 C1: 72. F5, 0D + vpsrld xmm0, xmm5, 19 ; 12B2 _ C5 F9: 72. D5, 13 + vpor xmm0, xmm7, xmm0 ; 12B7 _ C5 C1: EB. C0 + vpslld xmm5, xmm2, 3 ; 12BB _ C5 D1: 72. F2, 03 + vpsrld xmm2, xmm2, 29 ; 12C0 _ C5 E9: 72. D2, 1D + vpxor xmm3, xmm6, xmm0 ; 12C5 _ C5 C9: EF. D8 + vpor xmm5, xmm5, xmm2 ; 12C9 _ C5 D1: EB. EA + vpslld xmm7, xmm0, 3 ; 12CD _ C5 C1: 72. F0, 03 + vpxor xmm2, xmm3, xmm5 ; 12D2 _ C5 E1: EF. D5 + vpxor xmm4, xmm4, xmm5 ; 12D6 _ C5 D9: EF. E5 + vpxor xmm6, xmm4, xmm7 ; 12DA _ C5 D9: EF. F7 + vpslld xmm4, xmm2, 1 ; 12DE _ C5 D9: 72. F2, 01 + vpsrld xmm2, xmm2, 31 ; 12E3 _ C5 E9: 72. D2, 1F + vpslld xmm3, xmm6, 7 ; 12E8 _ C5 E1: 72. F6, 07 + vpor xmm4, xmm4, xmm2 ; 12ED _ C5 D9: EB. E2 + vpsrld xmm6, xmm6, 25 ; 12F1 _ C5 C9: 72. D6, 19 + vpor xmm3, xmm3, xmm6 ; 12F6 _ C5 E1: EB. DE + vpxor xmm0, xmm0, xmm4 ; 12FA _ C5 F9: EF. C4 + vmovd xmm6, dword [ecx+2B94H] ; 12FE _ C5 F9: 6E. B1, 00002B94 + vpxor xmm2, xmm0, xmm3 ; 1306 _ C5 F9: EF. D3 + vpxor xmm5, xmm5, xmm3 ; 130A _ C5 D1: EF. EB + vpslld xmm0, xmm4, 7 ; 130E _ C5 F9: 72. F4, 07 + vpxor xmm7, xmm5, xmm0 ; 1313 _ C5 D1: EF. F8 + vpshufd xmm0, xmm6, 0 ; 1317 _ C5 F9: 70. C6, 00 + vpxor xmm0, xmm4, xmm0 ; 131C _ C5 D9: EF. C0 + vmovd xmm4, dword [ecx+2B9CH] ; 1320 _ C5 F9: 6E. A1, 00002B9C + vpshufd xmm4, xmm4, 0 ; 1328 _ C5 F9: 70. E4, 00 + vmovd xmm5, dword [ecx+2B90H] ; 132D _ C5 F9: 6E. A9, 00002B90 + vpxor xmm4, xmm3, xmm4 ; 1335 _ C5 E1: EF. E4 + vpslld xmm3, xmm2, 5 ; 1339 _ C5 E1: 72. F2, 05 + vpsrld xmm2, xmm2, 27 ; 133E _ C5 E9: 72. D2, 1B + vpshufd xmm5, xmm5, 0 ; 1343 _ C5 F9: 70. ED, 00 + vpor xmm3, xmm3, xmm2 ; 1348 _ C5 E1: EB. DA + vpxor xmm3, xmm3, xmm5 ; 134C _ C5 E1: EF. DD + vmovd xmm6, dword [ecx+2B98H] ; 1350 _ C5 F9: 6E. B1, 00002B98 + vpxor xmm2, xmm3, xmm1 ; 1358 _ C5 E1: EF. D1 + vpslld xmm3, xmm7, 22 ; 135C _ C5 E1: 72. F7, 16 + vpsrld xmm7, xmm7, 10 ; 1361 _ C5 C1: 72. D7, 0A + vpor xmm3, xmm3, xmm7 ; 1366 _ C5 E1: EB. DF + vpand xmm5, xmm2, xmm0 ; 136A _ C5 E9: DB. E8 + vpshufd xmm7, xmm6, 0 ; 136E _ C5 F9: 70. FE, 00 + vpxor xmm6, xmm3, xmm7 ; 1373 _ C5 E1: EF. F7 + vpxor xmm3, xmm6, xmm1 ; 1377 _ C5 C9: EF. D9 + vpor xmm6, xmm5, xmm4 ; 137B _ C5 D1: EB. F4 + vpxor xmm7, xmm3, xmm5 ; 137F _ C5 E1: EF. FD + vpxor xmm0, xmm0, xmm6 ; 1383 _ C5 F9: EF. C6 + vpxor xmm3, xmm6, xmm2 ; 1387 _ C5 C9: EF. DA + vpxor xmm5, xmm4, xmm7 ; 138B _ C5 D9: EF. EF + vpor xmm4, xmm2, xmm0 ; 138F _ C5 E9: EB. E0 + vpor xmm2, xmm7, xmm3 ; 1393 _ C5 C1: EB. D3 + vpand xmm7, xmm2, xmm4 ; 1397 _ C5 E9: DB. FC + vpxor xmm6, xmm0, xmm5 ; 139B _ C5 F9: EF. F5 + vpslld xmm0, xmm7, 13 ; 139F _ C5 F9: 72. F7, 0D + vpsrld xmm2, xmm7, 19 ; 13A4 _ C5 E9: 72. D7, 13 + vpxor xmm3, xmm3, xmm6 ; 13A9 _ C5 E1: EF. DE + vpor xmm0, xmm0, xmm2 ; 13AD _ C5 F9: EB. C2 + vpslld xmm2, xmm5, 3 ; 13B1 _ C5 E9: 72. F5, 03 + vpsrld xmm5, xmm5, 29 ; 13B6 _ C5 D1: 72. D5, 1D + vpor xmm5, xmm2, xmm5 ; 13BB _ C5 E9: EB. ED + vpand xmm2, xmm3, xmm7 ; 13BF _ C5 E1: DB. D7 + vpxor xmm4, xmm4, xmm2 ; 13C3 _ C5 D9: EF. E2 + vpand xmm7, xmm6, xmm7 ; 13C7 _ C5 C9: DB. FF + vpxor xmm4, xmm4, xmm0 ; 13CB _ C5 D9: EF. E0 + vpxor xmm3, xmm7, xmm3 ; 13CF _ C5 C1: EF. DB + vpxor xmm4, xmm4, xmm5 ; 13D3 _ C5 D9: EF. E5 + vpxor xmm6, xmm3, xmm5 ; 13D7 _ C5 E1: EF. F5 + vpslld xmm3, xmm0, 3 ; 13DB _ C5 E1: 72. F0, 03 + vpslld xmm2, xmm4, 1 ; 13E0 _ C5 E9: 72. F4, 01 + vpxor xmm7, xmm6, xmm3 ; 13E5 _ C5 C9: EF. FB + vpsrld xmm4, xmm4, 31 ; 13E9 _ C5 D9: 72. D4, 1F + vpor xmm3, xmm2, xmm4 ; 13EE _ C5 E9: EB. DC + vpslld xmm2, xmm7, 7 ; 13F2 _ C5 E9: 72. F7, 07 + vpsrld xmm7, xmm7, 25 ; 13F7 _ C5 C1: 72. D7, 19 + vpxor xmm0, xmm0, xmm3 ; 13FC _ C5 F9: EF. C3 + vpor xmm6, xmm2, xmm7 ; 1400 _ C5 E9: EB. F7 + vpslld xmm4, xmm3, 7 ; 1404 _ C5 D9: 72. F3, 07 + vpxor xmm0, xmm0, xmm6 ; 1409 _ C5 F9: EF. C6 + vpxor xmm5, xmm5, xmm6 ; 140D _ C5 D1: EF. EE + vmovd xmm7, dword [ecx+2BA0H] ; 1411 _ C5 F9: 6E. B9, 00002BA0 + vpxor xmm2, xmm5, xmm4 ; 1419 _ C5 D1: EF. D4 + vpslld xmm5, xmm0, 5 ; 141D _ C5 D1: 72. F0, 05 + vpsrld xmm0, xmm0, 27 ; 1422 _ C5 F9: 72. D0, 1B + vpor xmm4, xmm5, xmm0 ; 1427 _ C5 D1: EB. E0 + vmovd xmm5, dword [ecx+2BA4H] ; 142B _ C5 F9: 6E. A9, 00002BA4 + vpshufd xmm0, xmm7, 0 ; 1433 _ C5 F9: 70. C7, 00 + vpxor xmm7, xmm4, xmm0 ; 1438 _ C5 D9: EF. F8 + vpshufd xmm4, xmm5, 0 ; 143C _ C5 F9: 70. E5, 00 + vpxor xmm0, xmm3, xmm4 ; 1441 _ C5 E1: EF. C4 + vpslld xmm3, xmm2, 22 ; 1445 _ C5 E1: 72. F2, 16 + vmovd xmm4, dword [ecx+2BA8H] ; 144A _ C5 F9: 6E. A1, 00002BA8 + vpsrld xmm2, xmm2, 10 ; 1452 _ C5 E9: 72. D2, 0A + vmovd xmm5, dword [ecx+2BACH] ; 1457 _ C5 F9: 6E. A9, 00002BAC + vpor xmm3, xmm3, xmm2 ; 145F _ C5 E1: EB. DA + vpshufd xmm2, xmm4, 0 ; 1463 _ C5 F9: 70. D4, 00 + vpxor xmm4, xmm3, xmm2 ; 1468 _ C5 E1: EF. E2 + vpshufd xmm3, xmm5, 0 ; 146C _ C5 F9: 70. DD, 00 + vpand xmm2, xmm7, xmm4 ; 1471 _ C5 C1: DB. D4 + vpxor xmm6, xmm6, xmm3 ; 1475 _ C5 C9: EF. F3 + vpxor xmm4, xmm4, xmm0 ; 1479 _ C5 D9: EF. E0 + vpxor xmm5, xmm2, xmm6 ; 147D _ C5 E9: EF. EE + vpor xmm6, xmm6, xmm7 ; 1481 _ C5 C9: EB. F7 + vpxor xmm4, xmm4, xmm5 ; 1485 _ C5 D9: EF. E5 + vpxor xmm0, xmm6, xmm0 ; 1489 _ C5 C9: EF. C0 + vpxor xmm7, xmm7, xmm4 ; 148D _ C5 C1: EF. FC + vpslld xmm6, xmm4, 13 ; 1491 _ C5 C9: 72. F4, 0D + vpor xmm3, xmm0, xmm7 ; 1496 _ C5 F9: EB. DF + vpsrld xmm4, xmm4, 19 ; 149A _ C5 D9: 72. D4, 13 + vpxor xmm2, xmm3, xmm5 ; 149F _ C5 E1: EF. D5 + vpand xmm5, xmm5, xmm0 ; 14A3 _ C5 D1: DB. E8 + vpxor xmm5, xmm7, xmm5 ; 14A7 _ C5 C1: EF. ED + vpxor xmm0, xmm0, xmm2 ; 14AB _ C5 F9: EF. C2 + vpxor xmm7, xmm0, xmm5 ; 14AF _ C5 F9: EF. FD + vpor xmm0, xmm6, xmm4 ; 14B3 _ C5 C9: EB. C4 + vpslld xmm4, xmm7, 3 ; 14B7 _ C5 D9: 72. F7, 03 + vpsrld xmm3, xmm7, 29 ; 14BC _ C5 E1: 72. D7, 1D + vpor xmm4, xmm4, xmm3 ; 14C1 _ C5 D9: EB. E3 + vpxor xmm2, xmm2, xmm0 ; 14C5 _ C5 E9: EF. D0 + vpxor xmm1, xmm5, xmm1 ; 14C9 _ C5 D1: EF. C9 + vpxor xmm2, xmm2, xmm4 ; 14CD _ C5 E9: EF. D4 + vpxor xmm1, xmm1, xmm4 ; 14D1 _ C5 F1: EF. CC + vpslld xmm3, xmm0, 3 ; 14D5 _ C5 E1: 72. F0, 03 + vpxor xmm6, xmm1, xmm3 ; 14DA _ C5 F1: EF. F3 + vpslld xmm5, xmm2, 1 ; 14DE _ C5 D1: 72. F2, 01 + vpsrld xmm2, xmm2, 31 ; 14E3 _ C5 E9: 72. D2, 1F + vpslld xmm7, xmm6, 7 ; 14E8 _ C5 C1: 72. F6, 07 + vpor xmm1, xmm5, xmm2 ; 14ED _ C5 D1: EB. CA + vpsrld xmm3, xmm6, 25 ; 14F1 _ C5 E1: 72. D6, 19 + vpor xmm3, xmm7, xmm3 ; 14F6 _ C5 C1: EB. DB + vpxor xmm0, xmm0, xmm1 ; 14FA _ C5 F9: EF. C1 + vpxor xmm2, xmm0, xmm3 ; 14FE _ C5 F9: EF. D3 + vpxor xmm0, xmm4, xmm3 ; 1502 _ C5 D9: EF. C3 + vpslld xmm5, xmm1, 7 ; 1506 _ C5 D1: 72. F1, 07 + vpxor xmm6, xmm0, xmm5 ; 150B _ C5 F9: EF. F5 + vpslld xmm0, xmm2, 5 ; 150F _ C5 F9: 72. F2, 05 + vpsrld xmm5, xmm2, 27 ; 1514 _ C5 D1: 72. D2, 1B + vmovd xmm7, dword [ecx+2BB0H] ; 1519 _ C5 F9: 6E. B9, 00002BB0 + vpor xmm2, xmm0, xmm5 ; 1521 _ C5 F9: EB. D5 + vmovd xmm0, dword [ecx+2BB4H] ; 1525 _ C5 F9: 6E. 81, 00002BB4 + vpshufd xmm4, xmm7, 0 ; 152D _ C5 F9: 70. E7, 00 + vpshufd xmm7, xmm0, 0 ; 1532 _ C5 F9: 70. F8, 00 + vpxor xmm5, xmm2, xmm4 ; 1537 _ C5 E9: EF. EC + vpxor xmm4, xmm1, xmm7 ; 153B _ C5 F1: EF. E7 + vpslld xmm1, xmm6, 22 ; 153F _ C5 F1: 72. F6, 16 + vpsrld xmm6, xmm6, 10 ; 1544 _ C5 C9: 72. D6, 0A + vmovd xmm0, dword [ecx+2BB8H] ; 1549 _ C5 F9: 6E. 81, 00002BB8 + vpor xmm7, xmm1, xmm6 ; 1551 _ C5 F1: EB. FE + vmovd xmm1, dword [ecx+2BBCH] ; 1555 _ C5 F9: 6E. 89, 00002BBC + vpshufd xmm2, xmm0, 0 ; 155D _ C5 F9: 70. D0, 00 + vpand xmm0, xmm4, xmm5 ; 1562 _ C5 D9: DB. C5 + vpshufd xmm6, xmm1, 0 ; 1566 _ C5 F9: 70. F1, 00 + vpxor xmm2, xmm7, xmm2 ; 156B _ C5 C1: EF. D2 + vpxor xmm3, xmm3, xmm6 ; 156F _ C5 E1: EF. DE + vpor xmm6, xmm5, xmm3 ; 1573 _ C5 D1: EB. F3 + vpxor xmm1, xmm3, xmm4 ; 1577 _ C5 E1: EF. CC + vpxor xmm5, xmm5, xmm2 ; 157B _ C5 D1: EF. EA + vpxor xmm3, xmm2, xmm1 ; 157F _ C5 E9: EF. D9 + vpor xmm2, xmm5, xmm0 ; 1583 _ C5 D1: EB. D0 + vpand xmm4, xmm1, xmm6 ; 1587 _ C5 F1: DB. E6 + vpxor xmm7, xmm4, xmm2 ; 158B _ C5 D9: EF. FA + vpxor xmm5, xmm6, xmm0 ; 158F _ C5 C9: EF. E8 + vpxor xmm0, xmm0, xmm7 ; 1593 _ C5 F9: EF. C7 + vpxor xmm4, xmm5, xmm7 ; 1597 _ C5 D1: EF. E7 + vpor xmm1, xmm0, xmm5 ; 159B _ C5 F9: EB. CD + vpand xmm5, xmm2, xmm5 ; 159F _ C5 E9: DB. ED + vpxor xmm0, xmm1, xmm3 ; 15A3 _ C5 F1: EF. C3 + vpxor xmm2, xmm5, xmm3 ; 15A7 _ C5 D1: EF. D3 + vpor xmm6, xmm0, xmm7 ; 15AB _ C5 F9: EB. F7 + vpxor xmm1, xmm4, xmm6 ; 15AF _ C5 D9: EF. CE + vpslld xmm4, xmm1, 13 ; 15B3 _ C5 D9: 72. F1, 0D + vpsrld xmm6, xmm1, 19 ; 15B8 _ C5 C9: 72. D1, 13 + vpor xmm1, xmm4, xmm6 ; 15BD _ C5 D9: EB. CE + vpslld xmm4, xmm7, 3 ; 15C1 _ C5 D9: 72. F7, 03 + vpsrld xmm7, xmm7, 29 ; 15C6 _ C5 C1: 72. D7, 1D + vpxor xmm0, xmm0, xmm1 ; 15CB _ C5 F9: EF. C1 + vpor xmm4, xmm4, xmm7 ; 15CF _ C5 D9: EB. E7 + vpxor xmm7, xmm0, xmm4 ; 15D3 _ C5 F9: EF. FC + vpxor xmm3, xmm2, xmm4 ; 15D7 _ C5 E9: EF. DC + vpslld xmm0, xmm1, 3 ; 15DB _ C5 F9: 72. F1, 03 + vpslld xmm5, xmm7, 1 ; 15E0 _ C5 D1: 72. F7, 01 + vpxor xmm2, xmm3, xmm0 ; 15E5 _ C5 E1: EF. D0 + vpsrld xmm7, xmm7, 31 ; 15E9 _ C5 C1: 72. D7, 1F + vpor xmm7, xmm5, xmm7 ; 15EE _ C5 D1: EB. FF + vpslld xmm6, xmm2, 7 ; 15F2 _ C5 C9: 72. F2, 07 + vpsrld xmm3, xmm2, 25 ; 15F7 _ C5 E1: 72. D2, 19 + vpxor xmm1, xmm1, xmm7 ; 15FC _ C5 F1: EF. CF + vpor xmm5, xmm6, xmm3 ; 1600 _ C5 C9: EB. EB + vpslld xmm0, xmm7, 7 ; 1604 _ C5 F9: 72. F7, 07 + vpxor xmm2, xmm1, xmm5 ; 1609 _ C5 F1: EF. D5 + vpxor xmm4, xmm4, xmm5 ; 160D _ C5 D9: EF. E5 + vmovd xmm3, dword [ecx+2BC0H] ; 1611 _ C5 F9: 6E. 99, 00002BC0 + vpxor xmm6, xmm4, xmm0 ; 1619 _ C5 D9: EF. F0 + vpslld xmm1, xmm2, 5 ; 161D _ C5 F1: 72. F2, 05 + vpsrld xmm4, xmm2, 27 ; 1622 _ C5 D9: 72. D2, 1B + vpshufd xmm2, xmm3, 0 ; 1627 _ C5 F9: 70. D3, 00 + vpor xmm0, xmm1, xmm4 ; 162C _ C5 F1: EB. C4 + vmovd xmm4, dword [ecx+2BCCH] ; 1630 _ C5 F9: 6E. A1, 00002BCC + vpxor xmm1, xmm0, xmm2 ; 1638 _ C5 F9: EF. CA + vmovd xmm3, dword [ecx+2BC4H] ; 163C _ C5 F9: 6E. 99, 00002BC4 + vpshufd xmm2, xmm4, 0 ; 1644 _ C5 F9: 70. D4, 00 + vpxor xmm4, xmm5, xmm2 ; 1649 _ C5 D1: EF. E2 + vpshufd xmm5, xmm3, 0 ; 164D _ C5 F9: 70. EB, 00 + vpslld xmm3, xmm6, 22 ; 1652 _ C5 E1: 72. F6, 16 + vpxor xmm7, xmm7, xmm5 ; 1657 _ C5 C1: EF. FD + vpsrld xmm6, xmm6, 10 ; 165B _ C5 C9: 72. D6, 0A + vpxor xmm5, xmm7, xmm4 ; 1660 _ C5 C1: EF. EC + vmovd xmm0, dword [ecx+2BC8H] ; 1664 _ C5 F9: 6E. 81, 00002BC8 + vpcmpeqd xmm7, xmm7, xmm7 ; 166C _ C5 C1: 76. FF + vpshufd xmm0, xmm0, 0 ; 1670 _ C5 F9: 70. C0, 00 + vpxor xmm2, xmm4, xmm7 ; 1675 _ C5 D9: EF. D7 + vpor xmm4, xmm3, xmm6 ; 1679 _ C5 E1: EB. E6 + vpxor xmm6, xmm4, xmm0 ; 167D _ C5 D9: EF. F0 + vpxor xmm0, xmm6, xmm2 ; 1681 _ C5 C9: EF. C2 + vpxor xmm6, xmm2, xmm1 ; 1685 _ C5 E9: EF. F1 + vpand xmm2, xmm5, xmm6 ; 1689 _ C5 D1: DB. D6 + vpxor xmm4, xmm5, xmm6 ; 168D _ C5 D1: EF. E6 + vpxor xmm3, xmm2, xmm0 ; 1691 _ C5 E9: EF. D8 + vpxor xmm1, xmm1, xmm4 ; 1695 _ C5 F1: EF. CC + vpand xmm5, xmm0, xmm4 ; 1699 _ C5 F9: DB. EC + vpand xmm0, xmm1, xmm3 ; 169D _ C5 F1: DB. C3 + vpxor xmm2, xmm5, xmm1 ; 16A1 _ C5 D1: EF. D1 + vpxor xmm5, xmm6, xmm0 ; 16A5 _ C5 C9: EF. E8 + vpor xmm1, xmm0, xmm5 ; 16A9 _ C5 F9: EB. CD + vpxor xmm6, xmm1, xmm2 ; 16AD _ C5 F1: EF. F2 + vpsrld xmm1, xmm3, 19 ; 16B1 _ C5 F1: 72. D3, 13 + vpxor xmm6, xmm6, xmm7 ; 16B6 _ C5 C9: EF. F7 + vpslld xmm7, xmm3, 13 ; 16BA _ C5 C1: 72. F3, 0D + vpor xmm7, xmm7, xmm1 ; 16BF _ C5 C1: EB. F9 + vpslld xmm1, xmm6, 3 ; 16C3 _ C5 F1: 72. F6, 03 + vpsrld xmm6, xmm6, 29 ; 16C8 _ C5 C9: 72. D6, 1D + vpor xmm3, xmm4, xmm3 ; 16CD _ C5 D9: EB. DB + vpor xmm1, xmm1, xmm6 ; 16D1 _ C5 F1: EB. CE + vpxor xmm0, xmm3, xmm0 ; 16D5 _ C5 E1: EF. C0 + vpand xmm2, xmm2, xmm5 ; 16D9 _ C5 E9: DB. D5 + vpxor xmm5, xmm5, xmm1 ; 16DD _ C5 D1: EF. E9 + vpxor xmm4, xmm0, xmm2 ; 16E1 _ C5 F9: EF. E2 + vpslld xmm3, xmm7, 3 ; 16E5 _ C5 E1: 72. F7, 03 + vpxor xmm6, xmm4, xmm7 ; 16EA _ C5 D9: EF. F7 + vpxor xmm4, xmm5, xmm3 ; 16EE _ C5 D1: EF. E3 + vpxor xmm0, xmm6, xmm1 ; 16F2 _ C5 C9: EF. C1 + vpslld xmm6, xmm4, 7 ; 16F6 _ C5 C9: 72. F4, 07 + vpsrld xmm3, xmm4, 25 ; 16FB _ C5 E1: 72. D4, 19 + vpslld xmm5, xmm0, 1 ; 1700 _ C5 D1: 72. F0, 01 + vpsrld xmm2, xmm0, 31 ; 1705 _ C5 E9: 72. D0, 1F + vpor xmm4, xmm6, xmm3 ; 170A _ C5 C9: EB. E3 + vpor xmm5, xmm5, xmm2 ; 170E _ C5 D1: EB. EA + vpxor xmm0, xmm1, xmm4 ; 1712 _ C5 F1: EF. C4 + vmovd xmm1, dword [ecx+2BD4H] ; 1716 _ C5 F9: 6E. 89, 00002BD4 + vpxor xmm7, xmm7, xmm5 ; 171E _ C5 C1: EF. FD + vpshufd xmm2, xmm1, 0 ; 1722 _ C5 F9: 70. D1, 00 + vpxor xmm3, xmm7, xmm4 ; 1727 _ C5 C1: EF. DC + vpslld xmm7, xmm5, 7 ; 172B _ C5 C1: 72. F5, 07 + vpxor xmm1, xmm5, xmm2 ; 1730 _ C5 D1: EF. CA + vmovd xmm5, dword [ecx+2BDCH] ; 1734 _ C5 F9: 6E. A9, 00002BDC + vpxor xmm7, xmm0, xmm7 ; 173C _ C5 F9: EF. FF + vmovd xmm0, dword [ecx+2BD0H] ; 1740 _ C5 F9: 6E. 81, 00002BD0 + vpshufd xmm6, xmm5, 0 ; 1748 _ C5 F9: 70. F5, 00 + vpslld xmm5, xmm3, 5 ; 174D _ C5 D1: 72. F3, 05 + vpsrld xmm3, xmm3, 27 ; 1752 _ C5 E1: 72. D3, 1B + vpxor xmm4, xmm4, xmm6 ; 1757 _ C5 D9: EF. E6 + vpshufd xmm0, xmm0, 0 ; 175B _ C5 F9: 70. C0, 00 + vpor xmm5, xmm5, xmm3 ; 1760 _ C5 D1: EB. EB + vpxor xmm6, xmm5, xmm0 ; 1764 _ C5 D1: EF. F0 + vpxor xmm5, xmm1, xmm4 ; 1768 _ C5 F1: EF. EC + vpxor xmm0, xmm6, xmm1 ; 176C _ C5 C9: EF. C1 + vpcmpeqd xmm1, xmm1, xmm1 ; 1770 _ C5 F1: 76. C9 + vmovd xmm2, dword [ecx+2BD8H] ; 1774 _ C5 F9: 6E. 91, 00002BD8 + vpxor xmm6, xmm4, xmm1 ; 177C _ C5 D9: EF. F1 + vpslld xmm4, xmm7, 22 ; 1780 _ C5 D9: 72. F7, 16 + vpsrld xmm7, xmm7, 10 ; 1785 _ C5 C1: 72. D7, 0A + vpshufd xmm2, xmm2, 0 ; 178A _ C5 F9: 70. D2, 00 + vpor xmm4, xmm4, xmm7 ; 178F _ C5 D9: EB. E7 + vpxor xmm3, xmm4, xmm2 ; 1793 _ C5 D9: EF. DA + vpand xmm7, xmm5, xmm0 ; 1797 _ C5 D1: DB. F8 + vpxor xmm2, xmm3, xmm6 ; 179B _ C5 E1: EF. D6 + vpxor xmm4, xmm7, xmm2 ; 179F _ C5 C1: EF. E2 + vpor xmm3, xmm2, xmm5 ; 17A3 _ C5 E9: EB. DD + vpand xmm7, xmm6, xmm4 ; 17A7 _ C5 C9: DB. FC + vpxor xmm5, xmm5, xmm6 ; 17AB _ C5 D1: EF. EE + vpxor xmm2, xmm7, xmm0 ; 17AF _ C5 C1: EF. D0 + vpxor xmm6, xmm5, xmm4 ; 17B3 _ C5 D1: EF. F4 + vpxor xmm6, xmm6, xmm3 ; 17B7 _ C5 C9: EF. F3 + vpand xmm5, xmm0, xmm2 ; 17BB _ C5 F9: DB. EA + vpxor xmm5, xmm5, xmm6 ; 17BF _ C5 D1: EF. EE + vpslld xmm7, xmm4, 13 ; 17C3 _ C5 C1: 72. F4, 0D + vpsrld xmm4, xmm4, 19 ; 17C8 _ C5 D9: 72. D4, 13 + vpxor xmm0, xmm3, xmm0 ; 17CD _ C5 E1: EF. C0 + vpor xmm7, xmm7, xmm4 ; 17D1 _ C5 C1: EB. FC + vpslld xmm4, xmm5, 3 ; 17D5 _ C5 D9: 72. F5, 03 + vpsrld xmm5, xmm5, 29 ; 17DA _ C5 D1: 72. D5, 1D + vpor xmm6, xmm6, xmm2 ; 17DF _ C5 C9: EB. F2 + vpxor xmm3, xmm0, xmm1 ; 17E3 _ C5 F9: EF. D9 + vpor xmm4, xmm4, xmm5 ; 17E7 _ C5 D9: EB. E5 + vpxor xmm5, xmm2, xmm7 ; 17EB _ C5 E9: EF. EF + vpxor xmm0, xmm6, xmm3 ; 17EF _ C5 C9: EF. C3 + vpxor xmm5, xmm5, xmm4 ; 17F3 _ C5 D1: EF. EC + vpxor xmm2, xmm0, xmm4 ; 17F7 _ C5 F9: EF. D4 + vpslld xmm6, xmm7, 3 ; 17FB _ C5 C9: 72. F7, 03 + vpslld xmm0, xmm5, 1 ; 1800 _ C5 F9: 72. F5, 01 + vpxor xmm3, xmm2, xmm6 ; 1805 _ C5 E9: EF. DE + vpsrld xmm5, xmm5, 31 ; 1809 _ C5 D1: 72. D5, 1F + vpor xmm0, xmm0, xmm5 ; 180E _ C5 F9: EB. C5 + vpslld xmm2, xmm3, 7 ; 1812 _ C5 E9: 72. F3, 07 + vpsrld xmm3, xmm3, 25 ; 1817 _ C5 E1: 72. D3, 19 + vpxor xmm7, xmm7, xmm0 ; 181C _ C5 C1: EF. F8 + vpor xmm2, xmm2, xmm3 ; 1820 _ C5 E9: EB. D3 + vpxor xmm5, xmm7, xmm2 ; 1824 _ C5 C1: EF. EA + vpxor xmm7, xmm4, xmm2 ; 1828 _ C5 D9: EF. FA + vpslld xmm4, xmm0, 7 ; 182C _ C5 D9: 72. F0, 07 + vmovd xmm3, dword [ecx+2BE0H] ; 1831 _ C5 F9: 6E. 99, 00002BE0 + vpxor xmm6, xmm7, xmm4 ; 1839 _ C5 C1: EF. F4 + vmovd xmm4, dword [ecx+2BECH] ; 183D _ C5 F9: 6E. A1, 00002BEC + vpslld xmm7, xmm5, 5 ; 1845 _ C5 C1: 72. F5, 05 + vpsrld xmm5, xmm5, 27 ; 184A _ C5 D1: 72. D5, 1B + vpshufd xmm4, xmm4, 0 ; 184F _ C5 F9: 70. E4, 00 + vpor xmm5, xmm7, xmm5 ; 1854 _ C5 C1: EB. ED + vpshufd xmm7, xmm3, 0 ; 1858 _ C5 F9: 70. FB, 00 + vpxor xmm4, xmm2, xmm4 ; 185D _ C5 E9: EF. E4 + vpxor xmm3, xmm5, xmm7 ; 1861 _ C5 D1: EF. DF + vpslld xmm2, xmm6, 22 ; 1865 _ C5 E9: 72. F6, 16 + vmovd xmm5, dword [ecx+2BE8H] ; 186A _ C5 F9: 6E. A9, 00002BE8 + vpsrld xmm6, xmm6, 10 ; 1872 _ C5 C9: 72. D6, 0A + vpshufd xmm5, xmm5, 0 ; 1877 _ C5 F9: 70. ED, 00 + vpor xmm2, xmm2, xmm6 ; 187C _ C5 E9: EB. D6 + vmovd xmm7, dword [ecx+2BE4H] ; 1880 _ C5 F9: 6E. B9, 00002BE4 + vpxor xmm6, xmm2, xmm5 ; 1888 _ C5 E9: EF. F5 + vpshufd xmm7, xmm7, 0 ; 188C _ C5 F9: 70. FF, 00 + vpxor xmm5, xmm6, xmm1 ; 1891 _ C5 C9: EF. E9 + vpand xmm2, xmm4, xmm3 ; 1895 _ C5 D9: DB. D3 + vpxor xmm6, xmm3, xmm4 ; 1899 _ C5 E1: EF. F4 + vpxor xmm3, xmm2, xmm5 ; 189D _ C5 E9: EF. DD + vpxor xmm0, xmm0, xmm7 ; 18A1 _ C5 F9: EF. C7 + vpxor xmm0, xmm0, xmm3 ; 18A5 _ C5 F9: EF. C3 + vpor xmm5, xmm5, xmm4 ; 18A9 _ C5 D1: EB. EC + vpor xmm2, xmm6, xmm0 ; 18AD _ C5 C9: EB. D0 + vpxor xmm7, xmm5, xmm6 ; 18B1 _ C5 D1: EF. FE + vpxor xmm5, xmm7, xmm0 ; 18B5 _ C5 C1: EF. E8 + vpor xmm6, xmm2, xmm3 ; 18B9 _ C5 E9: EB. F3 + vpxor xmm4, xmm4, xmm2 ; 18BD _ C5 D9: EF. E2 + vpxor xmm6, xmm6, xmm5 ; 18C1 _ C5 C9: EF. F5 + vpxor xmm7, xmm4, xmm3 ; 18C5 _ C5 D9: EF. FB + vpslld xmm4, xmm6, 13 ; 18C9 _ C5 D9: 72. F6, 0D + vpxor xmm2, xmm7, xmm6 ; 18CE _ C5 C1: EF. D6 + vpsrld xmm6, xmm6, 19 ; 18D2 _ C5 C9: 72. D6, 13 + vpor xmm7, xmm4, xmm6 ; 18D7 _ C5 D9: EB. FE + vpslld xmm4, xmm2, 3 ; 18DB _ C5 D9: 72. F2, 03 + vpsrld xmm6, xmm2, 29 ; 18E0 _ C5 C9: 72. D2, 1D + vpxor xmm3, xmm3, xmm1 ; 18E5 _ C5 E1: EF. D9 + vpand xmm5, xmm5, xmm2 ; 18E9 _ C5 D1: DB. EA + vpor xmm6, xmm4, xmm6 ; 18ED _ C5 D9: EB. F6 + vpxor xmm0, xmm0, xmm7 ; 18F1 _ C5 F9: EF. C7 + vpxor xmm2, xmm3, xmm5 ; 18F5 _ C5 E1: EF. D5 + vpxor xmm0, xmm0, xmm6 ; 18F9 _ C5 F9: EF. C6 + vpxor xmm4, xmm2, xmm6 ; 18FD _ C5 E9: EF. E6 + vpslld xmm3, xmm7, 3 ; 1901 _ C5 E1: 72. F7, 03 + vpslld xmm2, xmm0, 1 ; 1906 _ C5 E9: 72. F0, 01 + vpxor xmm5, xmm4, xmm3 ; 190B _ C5 D9: EF. EB + vpsrld xmm0, xmm0, 31 ; 190F _ C5 F9: 72. D0, 1F + vpor xmm0, xmm2, xmm0 ; 1914 _ C5 E9: EB. C0 + vpslld xmm4, xmm5, 7 ; 1918 _ C5 D9: 72. F5, 07 + vpsrld xmm5, xmm5, 25 ; 191D _ C5 D1: 72. D5, 19 + vpxor xmm7, xmm7, xmm0 ; 1922 _ C5 C1: EF. F8 + vpor xmm5, xmm4, xmm5 ; 1926 _ C5 D9: EB. ED + vpxor xmm3, xmm7, xmm5 ; 192A _ C5 C1: EF. DD + vpxor xmm6, xmm6, xmm5 ; 192E _ C5 C9: EF. F5 + vpslld xmm7, xmm0, 7 ; 1932 _ C5 C1: 72. F0, 07 + vpslld xmm2, xmm3, 5 ; 1937 _ C5 E9: 72. F3, 05 + vpsrld xmm4, xmm3, 27 ; 193C _ C5 D9: 72. D3, 1B + vpxor xmm7, xmm6, xmm7 ; 1941 _ C5 C9: EF. FF + vmovd xmm6, dword [ecx+2BF0H] ; 1945 _ C5 F9: 6E. B1, 00002BF0 + vpor xmm3, xmm2, xmm4 ; 194D _ C5 E9: EB. DC + vmovd xmm4, dword [ecx+2BF4H] ; 1951 _ C5 F9: 6E. A1, 00002BF4 + vpshufd xmm2, xmm6, 0 ; 1959 _ C5 F9: 70. D6, 00 + vpshufd xmm6, xmm4, 0 ; 195E _ C5 F9: 70. F4, 00 + vpxor xmm3, xmm3, xmm2 ; 1963 _ C5 E1: EF. DA + vpxor xmm2, xmm0, xmm6 ; 1967 _ C5 F9: EF. D6 + vpslld xmm0, xmm7, 22 ; 196B _ C5 F9: 72. F7, 16 + vpsrld xmm7, xmm7, 10 ; 1970 _ C5 C1: 72. D7, 0A + vpor xmm4, xmm0, xmm7 ; 1975 _ C5 F9: EB. E7 + vmovd xmm0, dword [ecx+2BF8H] ; 1979 _ C5 F9: 6E. 81, 00002BF8 + vpshufd xmm6, xmm0, 0 ; 1981 _ C5 F9: 70. F0, 00 + vmovd xmm0, dword [ecx+2BFCH] ; 1986 _ C5 F9: 6E. 81, 00002BFC + vpxor xmm4, xmm4, xmm6 ; 198E _ C5 D9: EF. E6 + vpshufd xmm7, xmm0, 0 ; 1992 _ C5 F9: 70. F8, 00 + vpxor xmm0, xmm5, xmm7 ; 1997 _ C5 D1: EF. C7 + vpor xmm5, xmm2, xmm4 ; 199B _ C5 E9: EB. EC + vpxor xmm7, xmm5, xmm0 ; 199F _ C5 D1: EF. F8 + vpxor xmm2, xmm2, xmm4 ; 19A3 _ C5 E9: EF. D4 + vpxor xmm6, xmm4, xmm7 ; 19A7 _ C5 D9: EF. F7 + vpxor xmm5, xmm2, xmm6 ; 19AB _ C5 E9: EF. EE + vpor xmm2, xmm0, xmm2 ; 19AF _ C5 F9: EB. D2 + vmovdqu oword [esp+0D0H], xmm7 ; 19B3 _ C5 FA: 7F. BC 24, 000000D0 + vpor xmm7, xmm7, xmm5 ; 19BC _ C5 C1: EB. FD + vmovdqu oword [esp+0C0H], xmm3 ; 19C0 _ C5 FA: 7F. 9C 24, 000000C0 + vpor xmm4, xmm3, xmm5 ; 19C9 _ C5 E1: EB. E5 + vpxor xmm3, xmm7, xmm3 ; 19CD _ C5 C1: EF. DB + vpxor xmm4, xmm4, xmm6 ; 19D1 _ C5 D9: EF. E6 + vpxor xmm7, xmm3, xmm5 ; 19D5 _ C5 E1: EF. FD + vpxor xmm6, xmm6, xmm7 ; 19D9 _ C5 C9: EF. F7 + vpand xmm3, xmm7, xmm4 ; 19DD _ C5 C1: DB. DC + vpxor xmm7, xmm6, xmm1 ; 19E1 _ C5 C9: EF. F9 + vpxor xmm3, xmm3, xmm5 ; 19E5 _ C5 E1: EF. DD + vpor xmm6, xmm7, xmm4 ; 19E9 _ C5 C1: EB. F4 + vpxor xmm5, xmm5, xmm6 ; 19ED _ C5 D1: EF. EE + vpslld xmm6, xmm3, 3 ; 19F1 _ C5 C9: 72. F3, 03 + vpand xmm0, xmm2, oword [esp+0C0H] ; 19F6 _ C5 E9: DB. 84 24, 000000C0 + vpslld xmm7, xmm5, 13 ; 19FF _ C5 C1: 72. F5, 0D + vpsrld xmm5, xmm5, 19 ; 1A04 _ C5 D1: 72. D5, 13 + vpsrld xmm3, xmm3, 29 ; 1A09 _ C5 E1: 72. D3, 1D + vpxor xmm2, xmm0, oword [esp+0D0H] ; 1A0E _ C5 F9: EF. 94 24, 000000D0 + vpor xmm7, xmm7, xmm5 ; 1A17 _ C5 C1: EB. FD + vpor xmm5, xmm6, xmm3 ; 1A1B _ C5 C9: EB. EB + vpxor xmm6, xmm2, xmm7 ; 1A1F _ C5 E9: EF. F7 + vpxor xmm6, xmm6, xmm5 ; 1A23 _ C5 C9: EF. F5 + vpxor xmm4, xmm4, xmm5 ; 1A27 _ C5 D9: EF. E5 + vpslld xmm3, xmm7, 3 ; 1A2B _ C5 E1: 72. F7, 03 + vpslld xmm0, xmm6, 1 ; 1A30 _ C5 F9: 72. F6, 01 + vpxor xmm2, xmm4, xmm3 ; 1A35 _ C5 D9: EF. D3 + vpsrld xmm4, xmm6, 31 ; 1A39 _ C5 D9: 72. D6, 1F + vpor xmm4, xmm0, xmm4 ; 1A3E _ C5 F9: EB. E4 + vpslld xmm0, xmm2, 7 ; 1A42 _ C5 F9: 72. F2, 07 + vpsrld xmm2, xmm2, 25 ; 1A47 _ C5 E9: 72. D2, 19 + vpxor xmm7, xmm7, xmm4 ; 1A4C _ C5 C1: EF. FC + vpor xmm6, xmm0, xmm2 ; 1A50 _ C5 F9: EB. F2 + vpxor xmm0, xmm7, xmm6 ; 1A54 _ C5 C1: EF. C6 + vpxor xmm5, xmm5, xmm6 ; 1A58 _ C5 D1: EF. EE + vpslld xmm7, xmm4, 7 ; 1A5C _ C5 C1: 72. F4, 07 + vmovd xmm2, dword [ecx+2C00H] ; 1A61 _ C5 F9: 6E. 91, 00002C00 + vpxor xmm3, xmm5, xmm7 ; 1A69 _ C5 D1: EF. DF + vmovd xmm7, dword [ecx+2C04H] ; 1A6D _ C5 F9: 6E. B9, 00002C04 + vpslld xmm5, xmm0, 5 ; 1A75 _ C5 D1: 72. F0, 05 + vpsrld xmm0, xmm0, 27 ; 1A7A _ C5 F9: 72. D0, 1B + vpor xmm0, xmm5, xmm0 ; 1A7F _ C5 D1: EB. C0 + vpshufd xmm5, xmm2, 0 ; 1A83 _ C5 F9: 70. EA, 00 + vpshufd xmm2, xmm7, 0 ; 1A88 _ C5 F9: 70. D7, 00 + vpxor xmm0, xmm0, xmm5 ; 1A8D _ C5 F9: EF. C5 + vmovd xmm5, dword [ecx+2C08H] ; 1A91 _ C5 F9: 6E. A9, 00002C08 + vpxor xmm2, xmm4, xmm2 ; 1A99 _ C5 D9: EF. D2 + vpslld xmm4, xmm3, 22 ; 1A9D _ C5 D9: 72. F3, 16 + vpsrld xmm3, xmm3, 10 ; 1AA2 _ C5 E1: 72. D3, 0A + vpor xmm7, xmm4, xmm3 ; 1AA7 _ C5 D9: EB. FB + vmovd xmm3, dword [ecx+2C0CH] ; 1AAB _ C5 F9: 6E. 99, 00002C0C + vpshufd xmm4, xmm5, 0 ; 1AB3 _ C5 F9: 70. E5, 00 + vpxor xmm5, xmm7, xmm4 ; 1AB8 _ C5 C1: EF. EC + vpshufd xmm7, xmm3, 0 ; 1ABC _ C5 F9: 70. FB, 00 + vpxor xmm6, xmm6, xmm7 ; 1AC1 _ C5 C9: EF. F7 + vpxor xmm7, xmm6, xmm0 ; 1AC5 _ C5 C9: EF. F8 + vpxor xmm6, xmm2, xmm5 ; 1AC9 _ C5 E9: EF. F5 + vpand xmm2, xmm2, xmm7 ; 1ACD _ C5 E9: DB. D7 + vpxor xmm4, xmm6, xmm7 ; 1AD1 _ C5 C9: EF. E7 + vpxor xmm2, xmm2, xmm0 ; 1AD5 _ C5 E9: EF. D0 + vpor xmm0, xmm0, xmm7 ; 1AD9 _ C5 F9: EB. C7 + vpxor xmm3, xmm0, xmm6 ; 1ADD _ C5 F9: EF. DE + vpxor xmm7, xmm7, xmm5 ; 1AE1 _ C5 C1: EF. FD + vpor xmm5, xmm5, xmm2 ; 1AE5 _ C5 D1: EB. EA + vpxor xmm0, xmm4, xmm1 ; 1AE9 _ C5 D9: EF. C1 + vpxor xmm6, xmm5, xmm4 ; 1AED _ C5 D1: EF. F4 + vpor xmm0, xmm0, xmm2 ; 1AF1 _ C5 F9: EB. C2 + vpor xmm4, xmm7, xmm3 ; 1AF5 _ C5 C1: EB. E3 + vpxor xmm7, xmm2, xmm7 ; 1AF9 _ C5 E9: EF. FF + vpxor xmm2, xmm7, xmm0 ; 1AFD _ C5 C1: EF. D0 + vpxor xmm5, xmm2, xmm4 ; 1B01 _ C5 E9: EF. EC + vpxor xmm4, xmm0, xmm4 ; 1B05 _ C5 F9: EF. E4 + vpslld xmm7, xmm5, 13 ; 1B09 _ C5 C1: 72. F5, 0D + vpsrld xmm2, xmm5, 19 ; 1B0E _ C5 E9: 72. D5, 13 + vpslld xmm5, xmm6, 3 ; 1B13 _ C5 D1: 72. F6, 03 + vpsrld xmm6, xmm6, 29 ; 1B18 _ C5 C9: 72. D6, 1D + vpor xmm2, xmm7, xmm2 ; 1B1D _ C5 C1: EB. D2 + vpor xmm5, xmm5, xmm6 ; 1B21 _ C5 D1: EB. EE + vpxor xmm0, xmm4, xmm2 ; 1B25 _ C5 D9: EF. C2 + vpxor xmm3, xmm3, xmm5 ; 1B29 _ C5 E1: EF. DD + vpslld xmm7, xmm2, 3 ; 1B2D _ C5 C1: 72. F2, 03 + vpxor xmm0, xmm0, xmm5 ; 1B32 _ C5 F9: EF. C5 + vpxor xmm4, xmm3, xmm7 ; 1B36 _ C5 E1: EF. E7 + vpslld xmm6, xmm0, 1 ; 1B3A _ C5 C9: 72. F0, 01 + vpsrld xmm0, xmm0, 31 ; 1B3F _ C5 F9: 72. D0, 1F + vpslld xmm3, xmm4, 7 ; 1B44 _ C5 E1: 72. F4, 07 + vpsrld xmm4, xmm4, 25 ; 1B49 _ C5 D9: 72. D4, 19 + vpor xmm0, xmm6, xmm0 ; 1B4E _ C5 C9: EB. C0 + vpor xmm6, xmm3, xmm4 ; 1B52 _ C5 E1: EB. F4 + vpslld xmm7, xmm0, 7 ; 1B56 _ C5 C1: 72. F0, 07 + vpxor xmm5, xmm5, xmm6 ; 1B5B _ C5 D1: EF. EE + vpxor xmm2, xmm2, xmm0 ; 1B5F _ C5 E9: EF. D0 + vpxor xmm3, xmm5, xmm7 ; 1B63 _ C5 D1: EF. DF + vpxor xmm2, xmm2, xmm6 ; 1B67 _ C5 E9: EF. D6 + vmovd xmm5, dword [ecx+2C14H] ; 1B6B _ C5 F9: 6E. A9, 00002C14 + vpshufd xmm4, xmm5, 0 ; 1B73 _ C5 F9: 70. E5, 00 + vmovd xmm5, dword [ecx+2C1CH] ; 1B78 _ C5 F9: 6E. A9, 00002C1C + vpxor xmm0, xmm0, xmm4 ; 1B80 _ C5 F9: EF. C4 + vpshufd xmm5, xmm5, 0 ; 1B84 _ C5 F9: 70. ED, 00 + vmovd xmm7, dword [ecx+2C10H] ; 1B89 _ C5 F9: 6E. B9, 00002C10 + vpxor xmm5, xmm6, xmm5 ; 1B91 _ C5 C9: EF. ED + vpslld xmm6, xmm2, 5 ; 1B95 _ C5 C9: 72. F2, 05 + vpsrld xmm2, xmm2, 27 ; 1B9A _ C5 E9: 72. D2, 1B + vpor xmm6, xmm6, xmm2 ; 1B9F _ C5 C9: EB. F2 + vpshufd xmm2, xmm7, 0 ; 1BA3 _ C5 F9: 70. D7, 00 + vmovd xmm4, dword [ecx+2C18H] ; 1BA8 _ C5 F9: 6E. A1, 00002C18 + vpxor xmm7, xmm6, xmm2 ; 1BB0 _ C5 C9: EF. FA + vpslld xmm6, xmm3, 22 ; 1BB4 _ C5 C9: 72. F3, 16 + vpsrld xmm3, xmm3, 10 ; 1BB9 _ C5 E1: 72. D3, 0A + vpshufd xmm4, xmm4, 0 ; 1BBE _ C5 F9: 70. E4, 00 + vpor xmm3, xmm6, xmm3 ; 1BC3 _ C5 C9: EB. DB + vpxor xmm2, xmm7, xmm1 ; 1BC7 _ C5 C1: EF. D1 + vpxor xmm6, xmm3, xmm4 ; 1BCB _ C5 E1: EF. F4 + vpand xmm7, xmm2, xmm0 ; 1BCF _ C5 E9: DB. F8 + vpxor xmm1, xmm6, xmm1 ; 1BD3 _ C5 C9: EF. C9 + vpxor xmm1, xmm1, xmm7 ; 1BD7 _ C5 F1: EF. CF + vpor xmm4, xmm7, xmm5 ; 1BDB _ C5 C1: EB. E5 + vpxor xmm5, xmm5, xmm1 ; 1BDF _ C5 D1: EF. E9 + vpxor xmm3, xmm0, xmm4 ; 1BE3 _ C5 F9: EF. DC + vpxor xmm6, xmm4, xmm2 ; 1BE7 _ C5 D9: EF. F2 + vpor xmm4, xmm2, xmm3 ; 1BEB _ C5 E9: EB. E3 + vpxor xmm0, xmm3, xmm5 ; 1BEF _ C5 E1: EF. C5 + vpor xmm2, xmm1, xmm6 ; 1BF3 _ C5 F1: EB. D6 + vpand xmm3, xmm2, xmm4 ; 1BF7 _ C5 E9: DB. DC + vpxor xmm1, xmm6, xmm0 ; 1BFB _ C5 C9: EF. C8 + vpslld xmm2, xmm5, 3 ; 1BFF _ C5 E9: 72. F5, 03 + vpsrld xmm5, xmm5, 29 ; 1C04 _ C5 D1: 72. D5, 1D + vpslld xmm7, xmm3, 13 ; 1C09 _ C5 C1: 72. F3, 0D + vpsrld xmm6, xmm3, 19 ; 1C0E _ C5 C9: 72. D3, 13 + vpor xmm2, xmm2, xmm5 ; 1C13 _ C5 E9: EB. D5 + vpand xmm5, xmm1, xmm3 ; 1C17 _ C5 F1: DB. EB + vpor xmm6, xmm7, xmm6 ; 1C1B _ C5 C1: EB. F6 + vpxor xmm4, xmm4, xmm5 ; 1C1F _ C5 D9: EF. E5 + vpand xmm0, xmm0, xmm3 ; 1C23 _ C5 F9: DB. C3 + vpxor xmm4, xmm4, xmm6 ; 1C27 _ C5 D9: EF. E6 + vpxor xmm1, xmm0, xmm1 ; 1C2B _ C5 F9: EF. C9 + vpxor xmm4, xmm4, xmm2 ; 1C2F _ C5 D9: EF. E2 + vpxor xmm3, xmm1, xmm2 ; 1C33 _ C5 F1: EF. DA + vpslld xmm0, xmm6, 3 ; 1C37 _ C5 F9: 72. F6, 03 + vpxor xmm7, xmm3, xmm0 ; 1C3C _ C5 E1: EF. F8 + vpslld xmm5, xmm4, 1 ; 1C40 _ C5 D1: 72. F4, 01 + vpsrld xmm4, xmm4, 31 ; 1C45 _ C5 D9: 72. D4, 1F + vpslld xmm1, xmm7, 7 ; 1C4A _ C5 F1: 72. F7, 07 + vpor xmm3, xmm5, xmm4 ; 1C4F _ C5 D1: EB. DC + vpsrld xmm4, xmm7, 25 ; 1C53 _ C5 D9: 72. D7, 19 + vpor xmm7, xmm1, xmm4 ; 1C58 _ C5 F1: EB. FC + vpxor xmm6, xmm6, xmm3 ; 1C5C _ C5 C9: EF. F3 + vpxor xmm6, xmm6, xmm7 ; 1C60 _ C5 C9: EF. F7 + vpxor xmm2, xmm2, xmm7 ; 1C64 _ C5 E9: EF. D7 + vpslld xmm1, xmm3, 7 ; 1C68 _ C5 F1: 72. F3, 07 + vpslld xmm4, xmm6, 5 ; 1C6D _ C5 D9: 72. F6, 05 + vpxor xmm5, xmm2, xmm1 ; 1C72 _ C5 E9: EF. E9 + vpsrld xmm2, xmm6, 27 ; 1C76 _ C5 E9: 72. D6, 1B + vmovd xmm0, dword [ecx+2C20H] ; 1C7B _ C5 F9: 6E. 81, 00002C20 + vpor xmm1, xmm4, xmm2 ; 1C83 _ C5 D9: EB. CA + vmovd xmm2, dword [ecx+2C24H] ; 1C87 _ C5 F9: 6E. 91, 00002C24 + vpshufd xmm6, xmm0, 0 ; 1C8F _ C5 F9: 70. F0, 00 + vpshufd xmm0, xmm2, 0 ; 1C94 _ C5 F9: 70. C2, 00 + vpxor xmm4, xmm1, xmm6 ; 1C99 _ C5 F1: EF. E6 + vmovd xmm1, dword [ecx+2C28H] ; 1C9D _ C5 F9: 6E. 89, 00002C28 + vpxor xmm2, xmm3, xmm0 ; 1CA5 _ C5 E1: EF. D0 + vpslld xmm3, xmm5, 22 ; 1CA9 _ C5 E1: 72. F5, 16 + vpsrld xmm5, xmm5, 10 ; 1CAE _ C5 D1: 72. D5, 0A + vmovd xmm0, dword [ecx+2C2CH] ; 1CB3 _ C5 F9: 6E. 81, 00002C2C + vpor xmm6, xmm3, xmm5 ; 1CBB _ C5 E1: EB. F5 + vpshufd xmm3, xmm1, 0 ; 1CBF _ C5 F9: 70. D9, 00 + vpshufd xmm5, xmm0, 0 ; 1CC4 _ C5 F9: 70. E8, 00 + vpxor xmm6, xmm6, xmm3 ; 1CC9 _ C5 C9: EF. F3 + vpxor xmm3, xmm7, xmm5 ; 1CCD _ C5 C1: EF. DD + vpand xmm7, xmm4, xmm6 ; 1CD1 _ C5 D9: DB. FE + vpxor xmm0, xmm7, xmm3 ; 1CD5 _ C5 C1: EF. C3 + vpxor xmm1, xmm6, xmm2 ; 1CD9 _ C5 C9: EF. CA + vpxor xmm1, xmm1, xmm0 ; 1CDD _ C5 F1: EF. C8 + vpor xmm6, xmm3, xmm4 ; 1CE1 _ C5 E1: EB. F4 + vpxor xmm7, xmm6, xmm2 ; 1CE5 _ C5 C9: EF. FA + vpxor xmm2, xmm4, xmm1 ; 1CE9 _ C5 D9: EF. D1 + vpor xmm4, xmm7, xmm2 ; 1CED _ C5 C1: EB. E2 + vpand xmm6, xmm0, xmm7 ; 1CF1 _ C5 F9: DB. F7 + vpxor xmm4, xmm4, xmm0 ; 1CF5 _ C5 D9: EF. E0 + vpxor xmm3, xmm2, xmm6 ; 1CF9 _ C5 E9: EF. DE + vpxor xmm0, xmm7, xmm4 ; 1CFD _ C5 C1: EF. C4 + vpslld xmm5, xmm1, 13 ; 1D01 _ C5 D1: 72. F1, 0D + vpxor xmm2, xmm0, xmm3 ; 1D06 _ C5 F9: EF. D3 + vpsrld xmm7, xmm1, 19 ; 1D0A _ C5 C1: 72. D1, 13 + vpor xmm7, xmm5, xmm7 ; 1D0F _ C5 D1: EB. FF + vpslld xmm1, xmm2, 3 ; 1D13 _ C5 F1: 72. F2, 03 + vpsrld xmm6, xmm2, 29 ; 1D18 _ C5 C9: 72. D2, 1D + vpxor xmm4, xmm4, xmm7 ; 1D1D _ C5 D9: EF. E7 + vpor xmm1, xmm1, xmm6 ; 1D21 _ C5 F1: EB. CE + vpslld xmm2, xmm7, 3 ; 1D25 _ C5 E9: 72. F7, 03 + vpxor xmm0, xmm4, xmm1 ; 1D2A _ C5 D9: EF. C1 + vpcmpeqd xmm4, xmm4, xmm4 ; 1D2E _ C5 D9: 76. E4 + vpxor xmm3, xmm3, xmm4 ; 1D32 _ C5 E1: EF. DC + vpxor xmm6, xmm3, xmm1 ; 1D36 _ C5 E1: EF. F1 + vpslld xmm3, xmm0, 1 ; 1D3A _ C5 E1: 72. F0, 01 + vpxor xmm6, xmm6, xmm2 ; 1D3F _ C5 C9: EF. F2 + vpsrld xmm0, xmm0, 31 ; 1D43 _ C5 F9: 72. D0, 1F + vpor xmm3, xmm3, xmm0 ; 1D48 _ C5 E1: EB. D8 + vpslld xmm5, xmm6, 7 ; 1D4C _ C5 D1: 72. F6, 07 + vpsrld xmm6, xmm6, 25 ; 1D51 _ C5 C9: 72. D6, 19 + vpxor xmm7, xmm7, xmm3 ; 1D56 _ C5 C1: EF. FB + vpor xmm0, xmm5, xmm6 ; 1D5A _ C5 D1: EB. C6 + vpslld xmm6, xmm3, 7 ; 1D5E _ C5 C9: 72. F3, 07 + vpxor xmm2, xmm7, xmm0 ; 1D63 _ C5 C1: EF. D0 + vpxor xmm1, xmm1, xmm0 ; 1D67 _ C5 F1: EF. C8 + vpslld xmm7, xmm2, 5 ; 1D6B _ C5 C1: 72. F2, 05 + vpsrld xmm2, xmm2, 27 ; 1D70 _ C5 E9: 72. D2, 1B + vpxor xmm5, xmm1, xmm6 ; 1D75 _ C5 F1: EF. EE + vpor xmm6, xmm7, xmm2 ; 1D79 _ C5 C1: EB. F2 + vmovd xmm1, dword [ecx+2C30H] ; 1D7D _ C5 F9: 6E. 89, 00002C30 + vmovd xmm7, dword [ecx+2C34H] ; 1D85 _ C5 F9: 6E. B9, 00002C34 + vpshufd xmm2, xmm1, 0 ; 1D8D _ C5 F9: 70. D1, 00 + vpshufd xmm1, xmm7, 0 ; 1D92 _ C5 F9: 70. CF, 00 + vpxor xmm2, xmm6, xmm2 ; 1D97 _ C5 C9: EF. D2 + vmovd xmm7, dword [ecx+2C3CH] ; 1D9B _ C5 F9: 6E. B9, 00002C3C + vpxor xmm3, xmm3, xmm1 ; 1DA3 _ C5 E1: EF. D9 + vmovd xmm1, dword [ecx+2C38H] ; 1DA7 _ C5 F9: 6E. 89, 00002C38 + vpslld xmm6, xmm5, 22 ; 1DAF _ C5 C9: 72. F5, 16 + vpsrld xmm5, xmm5, 10 ; 1DB4 _ C5 D1: 72. D5, 0A + vpor xmm6, xmm6, xmm5 ; 1DB9 _ C5 C9: EB. F5 + vpshufd xmm5, xmm1, 0 ; 1DBD _ C5 F9: 70. E9, 00 + vpshufd xmm1, xmm7, 0 ; 1DC2 _ C5 F9: 70. CF, 00 + vpxor xmm5, xmm6, xmm5 ; 1DC7 _ C5 C9: EF. ED + vpxor xmm0, xmm0, xmm1 ; 1DCB _ C5 F9: EF. C1 + vpand xmm1, xmm3, xmm2 ; 1DCF _ C5 E1: DB. CA + vpor xmm7, xmm2, xmm0 ; 1DD3 _ C5 E9: EB. F8 + vpxor xmm6, xmm0, xmm3 ; 1DD7 _ C5 F9: EF. F3 + vpxor xmm2, xmm2, xmm5 ; 1DDB _ C5 E9: EF. D5 + vpxor xmm3, xmm5, xmm6 ; 1DDF _ C5 D1: EF. DE + vpor xmm2, xmm2, xmm1 ; 1DE3 _ C5 E9: EB. D1 + vpand xmm6, xmm6, xmm7 ; 1DE7 _ C5 C9: DB. F7 + vpxor xmm6, xmm6, xmm2 ; 1DEB _ C5 C9: EF. F2 + vpxor xmm5, xmm7, xmm1 ; 1DEF _ C5 C1: EF. E9 + vpxor xmm1, xmm1, xmm6 ; 1DF3 _ C5 F1: EF. CE + vpand xmm2, xmm2, xmm5 ; 1DF7 _ C5 E9: DB. D5 + vpor xmm0, xmm1, xmm5 ; 1DFB _ C5 F1: EB. C5 + vpxor xmm1, xmm5, xmm6 ; 1DFF _ C5 D1: EF. CE + vpxor xmm7, xmm0, xmm3 ; 1E03 _ C5 F9: EF. FB + vpxor xmm3, xmm2, xmm3 ; 1E07 _ C5 E9: EF. DB + vpor xmm0, xmm7, xmm6 ; 1E0B _ C5 C1: EB. C6 + vpxor xmm1, xmm1, xmm0 ; 1E0F _ C5 F1: EF. C8 + vpslld xmm0, xmm1, 13 ; 1E13 _ C5 F9: 72. F1, 0D + vpsrld xmm1, xmm1, 19 ; 1E18 _ C5 F1: 72. D1, 13 + vpor xmm0, xmm0, xmm1 ; 1E1D _ C5 F9: EB. C1 + vpslld xmm1, xmm6, 3 ; 1E21 _ C5 F1: 72. F6, 03 + vpsrld xmm6, xmm6, 29 ; 1E26 _ C5 C9: 72. D6, 1D + vpxor xmm7, xmm7, xmm0 ; 1E2B _ C5 C1: EF. F8 + vpor xmm1, xmm1, xmm6 ; 1E2F _ C5 F1: EB. CE + vpxor xmm6, xmm7, xmm1 ; 1E33 _ C5 C1: EF. F1 + vpxor xmm2, xmm3, xmm1 ; 1E37 _ C5 E1: EF. D1 + vpslld xmm3, xmm0, 3 ; 1E3B _ C5 E1: 72. F0, 03 + vpslld xmm5, xmm6, 1 ; 1E40 _ C5 D1: 72. F6, 01 + vpxor xmm2, xmm2, xmm3 ; 1E45 _ C5 E9: EF. D3 + vpsrld xmm6, xmm6, 31 ; 1E49 _ C5 C9: 72. D6, 1F + vpor xmm3, xmm5, xmm6 ; 1E4E _ C5 D1: EB. DE + vpslld xmm5, xmm2, 7 ; 1E52 _ C5 D1: 72. F2, 07 + vpsrld xmm2, xmm2, 25 ; 1E57 _ C5 E9: 72. D2, 19 + vpxor xmm0, xmm0, xmm3 ; 1E5C _ C5 F9: EF. C3 + vpor xmm2, xmm5, xmm2 ; 1E60 _ C5 D1: EB. D2 + vpslld xmm6, xmm3, 7 ; 1E64 _ C5 C9: 72. F3, 07 + vpxor xmm7, xmm0, xmm2 ; 1E69 _ C5 F9: EF. FA + vpxor xmm1, xmm1, xmm2 ; 1E6D _ C5 F1: EF. CA + vpxor xmm0, xmm1, xmm6 ; 1E71 _ C5 F1: EF. C6 + vpslld xmm1, xmm7, 5 ; 1E75 _ C5 F1: 72. F7, 05 + vpsrld xmm7, xmm7, 27 ; 1E7A _ C5 C1: 72. D7, 1B + vpor xmm6, xmm1, xmm7 ; 1E7F _ C5 F1: EB. F7 + vmovd xmm1, dword [ecx+2C40H] ; 1E83 _ C5 F9: 6E. 89, 00002C40 + vpshufd xmm5, xmm1, 0 ; 1E8B _ C5 F9: 70. E9, 00 + vpxor xmm7, xmm6, xmm5 ; 1E90 _ C5 C9: EF. FD + vmovd xmm5, dword [ecx+2C4CH] ; 1E94 _ C5 F9: 6E. A9, 00002C4C + vmovd xmm6, dword [ecx+2C44H] ; 1E9C _ C5 F9: 6E. B1, 00002C44 + vpshufd xmm5, xmm5, 0 ; 1EA4 _ C5 F9: 70. ED, 00 + vpxor xmm5, xmm2, xmm5 ; 1EA9 _ C5 E9: EF. ED + vpshufd xmm2, xmm6, 0 ; 1EAD _ C5 F9: 70. D6, 00 + vpxor xmm6, xmm5, xmm4 ; 1EB2 _ C5 D1: EF. F4 + vpxor xmm3, xmm3, xmm2 ; 1EB6 _ C5 E1: EF. DA + vmovd xmm1, dword [ecx+2C48H] ; 1EBA _ C5 F9: 6E. 89, 00002C48 + vpxor xmm2, xmm3, xmm5 ; 1EC2 _ C5 E1: EF. D5 + vpslld xmm3, xmm0, 22 ; 1EC6 _ C5 E1: 72. F0, 16 + vpsrld xmm0, xmm0, 10 ; 1ECB _ C5 F9: 72. D0, 0A + vpshufd xmm1, xmm1, 0 ; 1ED0 _ C5 F9: 70. C9, 00 + vpor xmm3, xmm3, xmm0 ; 1ED5 _ C5 E1: EB. D8 + vpxor xmm0, xmm3, xmm1 ; 1ED9 _ C5 E1: EF. C1 + vpxor xmm1, xmm6, xmm7 ; 1EDD _ C5 C9: EF. CF + vpxor xmm0, xmm0, xmm6 ; 1EE1 _ C5 F9: EF. C6 + vpand xmm6, xmm2, xmm1 ; 1EE5 _ C5 E9: DB. F1 + vpxor xmm5, xmm2, xmm1 ; 1EE9 _ C5 E9: EF. E9 + vpxor xmm6, xmm6, xmm0 ; 1EED _ C5 C9: EF. F0 + vpxor xmm3, xmm7, xmm5 ; 1EF1 _ C5 C1: EF. DD + vpand xmm7, xmm0, xmm5 ; 1EF5 _ C5 F9: DB. FD + vpand xmm0, xmm3, xmm6 ; 1EF9 _ C5 E1: DB. C6 + vpxor xmm2, xmm7, xmm3 ; 1EFD _ C5 C1: EF. D3 + vpxor xmm7, xmm1, xmm0 ; 1F01 _ C5 F1: EF. F8 + vpor xmm5, xmm5, xmm6 ; 1F05 _ C5 D1: EB. EE + vpor xmm1, xmm0, xmm7 ; 1F09 _ C5 F9: EB. CF + vpxor xmm1, xmm1, xmm2 ; 1F0D _ C5 F1: EF. CA + vpand xmm2, xmm2, xmm7 ; 1F11 _ C5 E9: DB. D7 + vpxor xmm3, xmm1, xmm4 ; 1F15 _ C5 F1: EF. DC + vpslld xmm4, xmm6, 13 ; 1F19 _ C5 D9: 72. F6, 0D + vpsrld xmm1, xmm6, 19 ; 1F1E _ C5 F1: 72. D6, 13 + vpxor xmm6, xmm5, xmm0 ; 1F23 _ C5 D1: EF. F0 + vpor xmm4, xmm4, xmm1 ; 1F27 _ C5 D9: EB. E1 + vpslld xmm1, xmm3, 3 ; 1F2B _ C5 F1: 72. F3, 03 + vpsrld xmm3, xmm3, 29 ; 1F30 _ C5 E1: 72. D3, 1D + vpxor xmm6, xmm6, xmm2 ; 1F35 _ C5 C9: EF. F2 + vpor xmm1, xmm1, xmm3 ; 1F39 _ C5 F1: EB. CB + vpxor xmm2, xmm6, xmm4 ; 1F3D _ C5 C9: EF. D4 + vpxor xmm3, xmm2, xmm1 ; 1F41 _ C5 E9: EF. D9 + vpxor xmm0, xmm7, xmm1 ; 1F45 _ C5 C1: EF. C1 + vpslld xmm7, xmm4, 3 ; 1F49 _ C5 C1: 72. F4, 03 + vpslld xmm6, xmm3, 1 ; 1F4E _ C5 C9: 72. F3, 01 + vpxor xmm0, xmm0, xmm7 ; 1F53 _ C5 F9: EF. C7 + vpsrld xmm2, xmm3, 31 ; 1F57 _ C5 E9: 72. D3, 1F + vpor xmm5, xmm6, xmm2 ; 1F5C _ C5 C9: EB. EA + vpslld xmm7, xmm0, 7 ; 1F60 _ C5 C1: 72. F0, 07 + vpsrld xmm6, xmm0, 25 ; 1F65 _ C5 C9: 72. D0, 19 + vpxor xmm4, xmm4, xmm5 ; 1F6A _ C5 D9: EF. E5 + vpor xmm0, xmm7, xmm6 ; 1F6E _ C5 C1: EB. C6 + vpslld xmm6, xmm5, 7 ; 1F72 _ C5 C9: 72. F5, 07 + vpxor xmm1, xmm1, xmm0 ; 1F77 _ C5 F1: EF. C8 + vpxor xmm4, xmm4, xmm0 ; 1F7B _ C5 D9: EF. E0 + vpxor xmm7, xmm1, xmm6 ; 1F7F _ C5 F1: EF. FE + vmovd xmm1, dword [ecx+2C54H] ; 1F83 _ C5 F9: 6E. 89, 00002C54 + vpshufd xmm2, xmm1, 0 ; 1F8B _ C5 F9: 70. D1, 00 + vpxor xmm1, xmm5, xmm2 ; 1F90 _ C5 D1: EF. CA + vmovd xmm5, dword [ecx+2C5CH] ; 1F94 _ C5 F9: 6E. A9, 00002C5C + vpshufd xmm2, xmm5, 0 ; 1F9C _ C5 F9: 70. D5, 00 + vmovd xmm6, dword [ecx+2C50H] ; 1FA1 _ C5 F9: 6E. B1, 00002C50 + vpxor xmm2, xmm0, xmm2 ; 1FA9 _ C5 F9: EF. D2 + vpslld xmm0, xmm4, 5 ; 1FAD _ C5 F9: 72. F4, 05 + vpsrld xmm4, xmm4, 27 ; 1FB2 _ C5 D9: 72. D4, 1B + vpshufd xmm6, xmm6, 0 ; 1FB7 _ C5 F9: 70. F6, 00 + vpor xmm4, xmm0, xmm4 ; 1FBC _ C5 F9: EB. E4 + vpxor xmm0, xmm4, xmm6 ; 1FC0 _ C5 D9: EF. C6 + vpxor xmm5, xmm1, xmm2 ; 1FC4 _ C5 F1: EF. EA + vpxor xmm0, xmm0, xmm1 ; 1FC8 _ C5 F9: EF. C1 + vpcmpeqd xmm1, xmm1, xmm1 ; 1FCC _ C5 F1: 76. C9 + vmovd xmm3, dword [ecx+2C58H] ; 1FD0 _ C5 F9: 6E. 99, 00002C58 + vpxor xmm6, xmm2, xmm1 ; 1FD8 _ C5 E9: EF. F1 + vpslld xmm2, xmm7, 22 ; 1FDC _ C5 E9: 72. F7, 16 + vpsrld xmm7, xmm7, 10 ; 1FE1 _ C5 C1: 72. D7, 0A + vpshufd xmm3, xmm3, 0 ; 1FE6 _ C5 F9: 70. DB, 00 + vpor xmm4, xmm2, xmm7 ; 1FEB _ C5 E9: EB. E7 + vpxor xmm4, xmm4, xmm3 ; 1FEF _ C5 D9: EF. E3 + vpand xmm2, xmm5, xmm0 ; 1FF3 _ C5 D1: DB. D0 + vpxor xmm3, xmm4, xmm6 ; 1FF7 _ C5 D9: EF. DE + vpxor xmm2, xmm2, xmm3 ; 1FFB _ C5 E9: EF. D3 + vpor xmm4, xmm3, xmm5 ; 1FFF _ C5 E1: EB. E5 + vpand xmm7, xmm6, xmm2 ; 2003 _ C5 C9: DB. FA + vpxor xmm5, xmm5, xmm6 ; 2007 _ C5 D1: EF. EE + vpxor xmm3, xmm7, xmm0 ; 200B _ C5 C1: EF. D8 + vpxor xmm6, xmm5, xmm2 ; 200F _ C5 D1: EF. F2 + vpxor xmm6, xmm6, xmm4 ; 2013 _ C5 C9: EF. F4 + vpand xmm5, xmm0, xmm3 ; 2017 _ C5 F9: DB. EB + vpxor xmm5, xmm5, xmm6 ; 201B _ C5 D1: EF. EE + vpslld xmm7, xmm2, 13 ; 201F _ C5 C1: 72. F2, 0D + vpsrld xmm2, xmm2, 19 ; 2024 _ C5 E9: 72. D2, 13 + vpxor xmm0, xmm4, xmm0 ; 2029 _ C5 D9: EF. C0 + vpor xmm7, xmm7, xmm2 ; 202D _ C5 C1: EB. FA + vpslld xmm2, xmm5, 3 ; 2031 _ C5 E9: 72. F5, 03 + vpsrld xmm5, xmm5, 29 ; 2036 _ C5 D1: 72. D5, 1D + vpor xmm5, xmm2, xmm5 ; 203B _ C5 E9: EB. ED + vpxor xmm2, xmm3, xmm7 ; 203F _ C5 E1: EF. D7 + vpor xmm3, xmm6, xmm3 ; 2043 _ C5 C9: EB. DB + vpxor xmm6, xmm0, xmm1 ; 2047 _ C5 F9: EF. F1 + vpxor xmm4, xmm3, xmm6 ; 204B _ C5 E1: EF. E6 + vpxor xmm2, xmm2, xmm5 ; 204F _ C5 E9: EF. D5 + vpxor xmm3, xmm4, xmm5 ; 2053 _ C5 D9: EF. DD + vpslld xmm0, xmm7, 3 ; 2057 _ C5 F9: 72. F7, 03 + vpxor xmm6, xmm3, xmm0 ; 205C _ C5 E1: EF. F0 + vpslld xmm4, xmm2, 1 ; 2060 _ C5 D9: 72. F2, 01 + vpsrld xmm2, xmm2, 31 ; 2065 _ C5 E9: 72. D2, 1F + vpslld xmm3, xmm6, 7 ; 206A _ C5 E1: 72. F6, 07 + vpor xmm0, xmm4, xmm2 ; 206F _ C5 D9: EB. C2 + vpsrld xmm6, xmm6, 25 ; 2073 _ C5 C9: 72. D6, 19 + vpor xmm3, xmm3, xmm6 ; 2078 _ C5 E1: EB. DE + vpxor xmm7, xmm7, xmm0 ; 207C _ C5 C1: EF. F8 + vpxor xmm7, xmm7, xmm3 ; 2080 _ C5 C1: EF. FB + vpxor xmm5, xmm5, xmm3 ; 2084 _ C5 D1: EF. EB + vpslld xmm6, xmm0, 7 ; 2088 _ C5 C9: 72. F0, 07 + vpslld xmm4, xmm7, 5 ; 208D _ C5 D9: 72. F7, 05 + vpxor xmm6, xmm5, xmm6 ; 2092 _ C5 D1: EF. F6 + vpsrld xmm2, xmm7, 27 ; 2096 _ C5 E9: 72. D7, 1B + vmovd xmm7, dword [ecx+2C60H] ; 209B _ C5 F9: 6E. B9, 00002C60 + vpor xmm4, xmm4, xmm2 ; 20A3 _ C5 D9: EB. E2 + vmovd xmm5, dword [ecx+2C6CH] ; 20A7 _ C5 F9: 6E. A9, 00002C6C + vpshufd xmm2, xmm7, 0 ; 20AF _ C5 F9: 70. D7, 00 + vpshufd xmm5, xmm5, 0 ; 20B4 _ C5 F9: 70. ED, 00 + vpxor xmm4, xmm4, xmm2 ; 20B9 _ C5 D9: EF. E2 + vmovd xmm2, dword [ecx+2C68H] ; 20BD _ C5 F9: 6E. 91, 00002C68 + vpxor xmm5, xmm3, xmm5 ; 20C5 _ C5 E1: EF. ED + vpslld xmm3, xmm6, 22 ; 20C9 _ C5 E1: 72. F6, 16 + vpsrld xmm6, xmm6, 10 ; 20CE _ C5 C9: 72. D6, 0A + vpor xmm3, xmm3, xmm6 ; 20D3 _ C5 E1: EB. DE + vpshufd xmm6, xmm2, 0 ; 20D7 _ C5 F9: 70. F2, 00 + vmovd xmm7, dword [ecx+2C64H] ; 20DC _ C5 F9: 6E. B9, 00002C64 + vpxor xmm2, xmm3, xmm6 ; 20E4 _ C5 E1: EF. D6 + vpshufd xmm3, xmm7, 0 ; 20E8 _ C5 F9: 70. DF, 00 + vpxor xmm6, xmm2, xmm1 ; 20ED _ C5 E9: EF. F1 + vpxor xmm2, xmm4, xmm5 ; 20F1 _ C5 D9: EF. D5 + vpand xmm4, xmm5, xmm4 ; 20F5 _ C5 D1: DB. E4 + vpxor xmm4, xmm4, xmm6 ; 20F9 _ C5 D9: EF. E6 + vpxor xmm0, xmm0, xmm3 ; 20FD _ C5 F9: EF. C3 + vpxor xmm0, xmm0, xmm4 ; 2101 _ C5 F9: EF. C4 + vpor xmm7, xmm6, xmm5 ; 2105 _ C5 C9: EB. FD + vpor xmm3, xmm2, xmm0 ; 2109 _ C5 E9: EB. D8 + vpxor xmm6, xmm7, xmm2 ; 210D _ C5 C1: EF. F2 + vpxor xmm7, xmm6, xmm0 ; 2111 _ C5 C9: EF. F8 + vpor xmm2, xmm3, xmm4 ; 2115 _ C5 E1: EB. D4 + vpxor xmm5, xmm5, xmm3 ; 2119 _ C5 D1: EF. EB + vpxor xmm2, xmm2, xmm7 ; 211D _ C5 E9: EF. D7 + vpxor xmm6, xmm5, xmm4 ; 2121 _ C5 D1: EF. F4 + vpslld xmm3, xmm2, 13 ; 2125 _ C5 E1: 72. F2, 0D + vpxor xmm5, xmm6, xmm2 ; 212A _ C5 C9: EF. EA + vpsrld xmm6, xmm2, 19 ; 212E _ C5 C9: 72. D2, 13 + vpor xmm2, xmm3, xmm6 ; 2133 _ C5 E1: EB. D6 + vpslld xmm3, xmm5, 3 ; 2137 _ C5 E1: 72. F5, 03 + vpsrld xmm6, xmm5, 29 ; 213C _ C5 C9: 72. D5, 1D + vpxor xmm4, xmm4, xmm1 ; 2141 _ C5 D9: EF. E1 + vpand xmm5, xmm7, xmm5 ; 2145 _ C5 C1: DB. ED + vpor xmm6, xmm3, xmm6 ; 2149 _ C5 E1: EB. F6 + vpxor xmm0, xmm0, xmm2 ; 214D _ C5 F9: EF. C2 + vpxor xmm7, xmm4, xmm5 ; 2151 _ C5 D9: EF. FD + vpxor xmm0, xmm0, xmm6 ; 2155 _ C5 F9: EF. C6 + vpxor xmm4, xmm7, xmm6 ; 2159 _ C5 C1: EF. E6 + vpslld xmm3, xmm2, 3 ; 215D _ C5 E1: 72. F2, 03 + vpslld xmm5, xmm0, 1 ; 2162 _ C5 D1: 72. F0, 01 + vpxor xmm4, xmm4, xmm3 ; 2167 _ C5 D9: EF. E3 + vpsrld xmm0, xmm0, 31 ; 216B _ C5 F9: 72. D0, 1F + vpor xmm7, xmm5, xmm0 ; 2170 _ C5 D1: EB. F8 + vpslld xmm3, xmm4, 7 ; 2174 _ C5 E1: 72. F4, 07 + vpsrld xmm4, xmm4, 25 ; 2179 _ C5 D9: 72. D4, 19 + vpxor xmm2, xmm2, xmm7 ; 217E _ C5 E9: EF. D7 + vpor xmm0, xmm3, xmm4 ; 2182 _ C5 E1: EB. C4 + vpslld xmm4, xmm7, 7 ; 2186 _ C5 D9: 72. F7, 07 + vpxor xmm5, xmm2, xmm0 ; 218B _ C5 E9: EF. E8 + vpxor xmm6, xmm6, xmm0 ; 218F _ C5 C9: EF. F0 + vpslld xmm2, xmm5, 5 ; 2193 _ C5 E9: 72. F5, 05 + vpsrld xmm3, xmm5, 27 ; 2198 _ C5 E1: 72. D5, 1B + vmovd xmm5, dword [ecx+2C70H] ; 219D _ C5 F9: 6E. A9, 00002C70 + vpxor xmm6, xmm6, xmm4 ; 21A5 _ C5 C9: EF. F4 + vpor xmm4, xmm2, xmm3 ; 21A9 _ C5 E9: EB. E3 + vmovd xmm3, dword [ecx+2C74H] ; 21AD _ C5 F9: 6E. 99, 00002C74 + vpshufd xmm2, xmm5, 0 ; 21B5 _ C5 F9: 70. D5, 00 + vpxor xmm5, xmm4, xmm2 ; 21BA _ C5 D9: EF. EA + vpslld xmm2, xmm6, 22 ; 21BE _ C5 E9: 72. F6, 16 + vpshufd xmm4, xmm3, 0 ; 21C3 _ C5 F9: 70. E3, 00 + vpsrld xmm6, xmm6, 10 ; 21C8 _ C5 C9: 72. D6, 0A + vpxor xmm7, xmm7, xmm4 ; 21CD _ C5 C1: EF. FC + vpor xmm4, xmm2, xmm6 ; 21D1 _ C5 E9: EB. E6 + vmovd xmm6, dword [ecx+2C78H] ; 21D5 _ C5 F9: 6E. B1, 00002C78 + vmovd xmm3, dword [ecx+2C7CH] ; 21DD _ C5 F9: 6E. 99, 00002C7C + vpshufd xmm2, xmm6, 0 ; 21E5 _ C5 F9: 70. D6, 00 + vpshufd xmm6, xmm3, 0 ; 21EA _ C5 F9: 70. F3, 00 + vpxor xmm4, xmm4, xmm2 ; 21EF _ C5 D9: EF. E2 + vpxor xmm6, xmm0, xmm6 ; 21F3 _ C5 F9: EF. F6 + vpor xmm0, xmm7, xmm4 ; 21F7 _ C5 C1: EB. C4 + vpxor xmm2, xmm0, xmm6 ; 21FB _ C5 F9: EF. D6 + vpxor xmm7, xmm7, xmm4 ; 21FF _ C5 C1: EF. FC + vpxor xmm3, xmm4, xmm2 ; 2203 _ C5 D9: EF. DA + vpxor xmm4, xmm7, xmm3 ; 2207 _ C5 C1: EF. E3 + vpor xmm7, xmm6, xmm7 ; 220B _ C5 C9: EB. FF + vmovdqu oword [esp+0E0H], xmm2 ; 220F _ C5 FA: 7F. 94 24, 000000E0 + vpor xmm2, xmm2, xmm4 ; 2218 _ C5 E9: EB. D4 + vpxor xmm2, xmm2, xmm5 ; 221C _ C5 E9: EF. D5 + vpor xmm0, xmm5, xmm4 ; 2220 _ C5 D1: EB. C4 + vpxor xmm2, xmm2, xmm4 ; 2224 _ C5 E9: EF. D4 + vpxor xmm0, xmm0, xmm3 ; 2228 _ C5 F9: EF. C3 + vpxor xmm3, xmm3, xmm2 ; 222C _ C5 E1: EF. DA + vpand xmm6, xmm7, xmm5 ; 2230 _ C5 C1: DB. F5 + vpxor xmm1, xmm3, xmm1 ; 2234 _ C5 E1: EF. C9 + vpand xmm2, xmm2, xmm0 ; 2238 _ C5 E9: DB. D0 + vpor xmm1, xmm1, xmm0 ; 223C _ C5 F1: EB. C8 + vpxor xmm2, xmm2, xmm4 ; 2240 _ C5 E9: EF. D4 + vpxor xmm3, xmm4, xmm1 ; 2244 _ C5 D9: EF. D9 + vmovd xmm1, dword [ecx+2C80H] ; 2248 _ C5 F9: 6E. 89, 00002C80 + vpshufd xmm1, xmm1, 0 ; 2250 _ C5 F9: 70. C9, 00 + vpxor xmm1, xmm3, xmm1 ; 2255 _ C5 E1: EF. C9 + vmovd xmm3, dword [ecx+2C84H] ; 2259 _ C5 F9: 6E. 99, 00002C84 + vmovd xmm4, dword [ecx+2C88H] ; 2261 _ C5 F9: 6E. A1, 00002C88 + vpxor xmm5, xmm6, oword [esp+0E0H] ; 2269 _ C5 C9: EF. AC 24, 000000E0 + vpshufd xmm7, xmm3, 0 ; 2272 _ C5 F9: 70. FB, 00 + vmovd xmm3, dword [ecx+2C8CH] ; 2277 _ C5 F9: 6E. 99, 00002C8C + vpxor xmm6, xmm5, xmm7 ; 227F _ C5 D1: EF. F7 + vpshufd xmm4, xmm4, 0 ; 2283 _ C5 F9: 70. E4, 00 + vpshufd xmm5, xmm3, 0 ; 2288 _ C5 F9: 70. EB, 00 + vpxor xmm4, xmm2, xmm4 ; 228D _ C5 E9: EF. E4 + vpxor xmm0, xmm0, xmm5 ; 2291 _ C5 F9: EF. C5 + vpunpckldq xmm2, xmm1, xmm6 ; 2295 _ C5 F1: 62. D6 + vpunpckldq xmm3, xmm4, xmm0 ; 2299 _ C5 D9: 62. D8 + vpunpckhdq xmm1, xmm1, xmm6 ; 229D _ C5 F1: 6A. CE + vpunpckhdq xmm0, xmm4, xmm0 ; 22A1 _ C5 D9: 6A. C0 + vpunpcklqdq xmm4, xmm2, xmm3 ; 22A5 _ C5 E9: 6C. E3 + inc eax ; 22A9 _ 40 + vpxor xmm5, xmm4, oword [esp+70H] ; 22AA _ C5 D9: EF. 6C 24, 70 + add esi, 64 ; 22B0 _ 83. C6, 40 + vmovdqu xmm4, oword [esp+60H] ; 22B3 _ C5 FA: 6F. 64 24, 60 + vpunpcklqdq xmm6, xmm1, xmm0 ; 22B9 _ C5 F1: 6C. F0 + vpunpckhqdq xmm0, xmm1, xmm0 ; 22BD _ C5 F1: 6D. C0 + vpunpckhqdq xmm2, xmm2, xmm3 ; 22C1 _ C5 E9: 6D. D3 + vpxor xmm1, xmm0, xmm4 ; 22C5 _ C5 F9: EF. CC + vpslldq xmm0, xmm4, 8 ; 22C9 _ C5 F9: 73. FC, 08 + vpxor xmm3, xmm2, oword [esp+50H] ; 22CE _ C5 E9: EF. 5C 24, 50 + vpsllq xmm2, xmm4, 1 ; 22D4 _ C5 E9: 73. F4, 01 + vmovdqu oword [edx], xmm5 ; 22D9 _ C5 FA: 7F. 2A + vmovdqu oword [edx+30H], xmm1 ; 22DD _ C5 FA: 7F. 4A, 30 + vmovdqu oword [edx+10H], xmm3 ; 22E2 _ C5 FA: 7F. 5A, 10 + vpsrldq xmm1, xmm0, 7 ; 22E7 _ C5 F1: 73. D8, 07 + vpsraw xmm5, xmm4, 8 ; 22EC _ C5 D1: 71. E4, 08 + vpxor xmm7, xmm6, oword [esp+40H] ; 22F1 _ C5 C9: EF. 7C 24, 40 + vpsrlq xmm3, xmm1, 7 ; 22F7 _ C5 E1: 73. D1, 07 + vpsrldq xmm6, xmm5, 15 ; 22FC _ C5 C9: 73. DD, 0F + vpand xmm0, xmm6, oword [esp+30H] ; 2301 _ C5 C9: DB. 44 24, 30 + vmovdqu oword [edx+20H], xmm7 ; 2307 _ C5 FA: 7F. 7A, 20 + vpor xmm7, xmm2, xmm3 ; 230C _ C5 E9: EB. FB + add edx, 64 ; 2310 _ 83. C2, 40 + vpxor xmm1, xmm7, xmm0 ; 2313 _ C5 C1: EF. C8 + cmp eax, 8 ; 2317 _ 83. F8, 08 + vmovdqu oword [esp+70H], xmm1 ; 231A _ C5 FA: 7F. 4C 24, 70 + jl ?_003 ; 2320 _ 0F 8C, FFFFDD74 + mov dword [esp+28H], esi ; 2326 _ 89. 74 24, 28 + mov esi, dword [esp+20H] ; 232A _ 8B. 74 24, 20 + vmovdqu xmm0, oword [esp+70H] ; 232E _ C5 FA: 6F. 44 24, 70 + add esi, -512 ; 2334 _ 81. C6, FFFFFE00 + mov dword [esp+24H], edx ; 233A _ 89. 54 24, 24 + jne ?_001 ; 233E _ 0F 85, FFFFDD1E + add esp, 244 ; 2344 _ 81. C4, 000000F4 + pop ebx ; 234A _ 5B + pop edi ; 234B _ 5F + pop esi ; 234C _ 5E + mov esp, ebp ; 234D _ 8B. E5 + pop ebp ; 234F _ 5D + ret 24 ; 2350 _ C2, 0018 +; _xts_serpent_avx_encrypt@24 End of function + +; Filling space: 0DH +; Filler type: Multi-byte NOP +; db 0FH, 1FH, 44H, 00H, 00H, 0FH, 1FH, 84H +; db 00H, 00H, 00H, 00H, 00H + +ALIGN 16 + +_xts_serpent_avx_decrypt@24:; Function begin + push ebp ; 0000 _ 55 + mov ebp, esp ; 0001 _ 8B. EC + and esp, 0FFFFFFF0H ; 0003 _ 83. E4, F0 + push esi ; 0006 _ 56 + push edi ; 0007 _ 57 + push ebx ; 0008 _ 53 + sub esp, 132 ; 0009 _ 81. EC, 00000084 + mov esi, dword [ebp+18H] ; 000F _ 8B. 75, 18 + mov ecx, esi ; 0012 _ 8B. CE + mov edi, dword [ebp+14H] ; 0014 _ 8B. 7D, 14 + shl ecx, 23 ; 0017 _ C1. E1, 17 + shr edi, 9 ; 001A _ C1. EF, 09 + or ecx, edi ; 001D _ 0B. CF + mov edi, 135 ; 001F _ BF, 00000087 + mov ebx, dword [ebp+1CH] ; 0024 _ 8B. 5D, 1C + mov edx, dword [ebp+8H] ; 0027 _ 8B. 55, 08 + mov eax, dword [ebp+0CH] ; 002A _ 8B. 45, 0C + vmovd xmm1, edi ; 002D _ C5 F9: 6E. CF + shr esi, 9 ; 0031 _ C1. EE, 09 + lea ebx, [ebx+5710H] ; 0034 _ 8D. 9B, 00005710 + mov dword [esp], ecx ; 003A _ 89. 0C 24 + xor ecx, ecx ; 003D _ 33. C9 + mov dword [esp+4H], esi ; 003F _ 89. 74 24, 04 + lea edi, [esp+10H] ; 0043 _ 8D. 7C 24, 10 + mov dword [esp+8H], ecx ; 0047 _ 89. 4C 24, 08 + mov dword [esp+0CH], ecx ; 004B _ 89. 4C 24, 0C + mov esi, dword [ebp+10H] ; 004F _ 8B. 75, 10 + vmovdqu oword [esp+30H], xmm1 ; 0052 _ C5 FA: 7F. 4C 24, 30 + mov dword [esp+24H], eax ; 0058 _ 89. 44 24, 24 + mov dword [esp+28H], edx ; 005C _ 89. 54 24, 28 + jmp ?_005 ; 0060 _ EB, 06 + +?_004: vmovdqu oword [esp+10H], xmm0 ; 0062 _ C5 FA: 7F. 44 24, 10 +?_005: add dword [esp], 1 ; 0068 _ 83. 04 24, 01 + adc dword [esp+4H], 0 ; 006C _ 83. 54 24, 04, 00 + push ebx ; 0071 _ 53 + push edi ; 0072 _ 57 + lea eax, [esp+8H] ; 0073 _ 8D. 44 24, 08 + push eax ; 0077 _ 50 + call _serpent256_encrypt@12 ; 0078 _ E8, 00000000(rel) + vmovdqu xmm0, oword [esp+10H] ; 007D _ C5 FA: 6F. 44 24, 10 + xor eax, eax ; 0083 _ 33. C0 + mov dword [esp+20H], esi ; 0085 _ 89. 74 24, 20 + vmovdqu oword [esp+70H], xmm0 ; 0089 _ C5 FA: 7F. 44 24, 70 + mov edx, dword [esp+24H] ; 008F _ 8B. 54 24, 24 + mov esi, dword [esp+28H] ; 0093 _ 8B. 74 24, 28 + mov ecx, dword [ebp+1CH] ; 0097 _ 8B. 4D, 1C +?_006: vmovdqu xmm1, oword [esp+70H] ; 009A _ C5 FA: 6F. 4C 24, 70 + vpslldq xmm4, xmm1, 8 ; 00A0 _ C5 D9: 73. F9, 08 + vpsraw xmm7, xmm1, 8 ; 00A5 _ C5 C1: 71. E1, 08 + vpsrldq xmm5, xmm4, 7 ; 00AA _ C5 D1: 73. DC, 07 + vpsllq xmm2, xmm1, 1 ; 00AF _ C5 E9: 73. F1, 01 + vmovdqu xmm3, oword [esp+30H] ; 00B4 _ C5 FA: 6F. 5C 24, 30 + vpsrlq xmm6, xmm5, 7 ; 00BA _ C5 C9: 73. D5, 07 + vpsrldq xmm0, xmm7, 15 ; 00BF _ C5 F9: 73. DF, 0F + vpor xmm4, xmm2, xmm6 ; 00C4 _ C5 E9: EB. E6 + vpand xmm5, xmm0, xmm3 ; 00C8 _ C5 F9: DB. EB + vpxor xmm2, xmm4, xmm5 ; 00CC _ C5 D9: EF. D5 + vpslldq xmm6, xmm2, 8 ; 00D0 _ C5 C9: 73. FA, 08 + vpsraw xmm5, xmm2, 8 ; 00D5 _ C5 D1: 71. E2, 08 + vpsrldq xmm7, xmm6, 7 ; 00DA _ C5 C1: 73. DE, 07 + vpsllq xmm0, xmm2, 1 ; 00DF _ C5 F9: 73. F2, 01 + vpsrldq xmm6, xmm5, 15 ; 00E4 _ C5 C9: 73. DD, 0F + vpsrlq xmm4, xmm7, 7 ; 00E9 _ C5 D9: 73. D7, 07 + vpor xmm7, xmm0, xmm4 ; 00EE _ C5 F9: EB. FC + vpand xmm0, xmm6, xmm3 ; 00F2 _ C5 C9: DB. C3 + vpxor xmm5, xmm7, xmm0 ; 00F6 _ C5 C1: EF. E8 + vpslldq xmm4, xmm5, 8 ; 00FA _ C5 D9: 73. FD, 08 + vpsllq xmm7, xmm5, 1 ; 00FF _ C5 C1: 73. F5, 01 + vpsrldq xmm6, xmm4, 7 ; 0104 _ C5 C9: 73. DC, 07 + vpsraw xmm4, xmm5, 8 ; 0109 _ C5 D9: 71. E5, 08 + vpsrlq xmm0, xmm6, 7 ; 010E _ C5 F9: 73. D6, 07 + vpsrldq xmm6, xmm4, 15 ; 0113 _ C5 C9: 73. DC, 0F + vpor xmm7, xmm7, xmm0 ; 0118 _ C5 C1: EB. F8 + vpand xmm3, xmm6, xmm3 ; 011C _ C5 C9: DB. DB + vpxor xmm0, xmm7, xmm3 ; 0120 _ C5 C1: EF. C3 + vpxor xmm3, xmm1, oword [esi] ; 0124 _ C5 F1: EF. 1E + vpxor xmm1, xmm2, oword [esi+10H] ; 0128 _ C5 E9: EF. 4E, 10 + vmovdqu oword [esp+50H], xmm2 ; 012D _ C5 FA: 7F. 54 24, 50 + vmovdqu oword [esp+40H], xmm5 ; 0133 _ C5 FA: 7F. 6C 24, 40 + vpxor xmm5, xmm5, oword [esi+20H] ; 0139 _ C5 D1: EF. 6E, 20 + vpxor xmm2, xmm0, oword [esi+30H] ; 013E _ C5 F9: EF. 56, 30 + vmovdqu oword [esp+60H], xmm0 ; 0143 _ C5 FA: 7F. 44 24, 60 + vpunpckldq xmm6, xmm3, xmm1 ; 0149 _ C5 E1: 62. F1 + vpunpckldq xmm7, xmm5, xmm2 ; 014D _ C5 D1: 62. FA + vpunpckhdq xmm0, xmm3, xmm1 ; 0151 _ C5 E1: 6A. C1 + vmovd xmm3, dword [ecx+2C80H] ; 0155 _ C5 F9: 6E. 99, 00002C80 + vpunpckhdq xmm5, xmm5, xmm2 ; 015D _ C5 D1: 6A. EA + vpunpcklqdq xmm1, xmm6, xmm7 ; 0161 _ C5 C9: 6C. CF + vpshufd xmm4, xmm3, 0 ; 0165 _ C5 F9: 70. E3, 00 + vpunpckhqdq xmm7, xmm6, xmm7 ; 016A _ C5 C9: 6D. FF + vpxor xmm3, xmm1, xmm4 ; 016E _ C5 F1: EF. DC + vmovd xmm6, dword [ecx+2C84H] ; 0172 _ C5 F9: 6E. B1, 00002C84 + vpshufd xmm2, xmm6, 0 ; 017A _ C5 F9: 70. D6, 00 + vmovd xmm6, dword [ecx+2C88H] ; 017F _ C5 F9: 6E. B1, 00002C88 + vpxor xmm7, xmm7, xmm2 ; 0187 _ C5 C1: EF. FA + vpunpcklqdq xmm1, xmm0, xmm5 ; 018B _ C5 F9: 6C. CD + vpunpckhqdq xmm0, xmm0, xmm5 ; 018F _ C5 F9: 6D. C5 + vmovd xmm5, dword [ecx+2C8CH] ; 0193 _ C5 F9: 6E. A9, 00002C8C + vpshufd xmm4, xmm6, 0 ; 019B _ C5 F9: 70. E6, 00 + vpshufd xmm5, xmm5, 0 ; 01A0 _ C5 F9: 70. ED, 00 + vpxor xmm1, xmm1, xmm4 ; 01A5 _ C5 F1: EF. CC + vpxor xmm2, xmm0, xmm5 ; 01A9 _ C5 F9: EF. D5 + vpxor xmm6, xmm1, xmm3 ; 01AD _ C5 F1: EF. F3 + vpand xmm4, xmm3, xmm2 ; 01B1 _ C5 E1: DB. E2 + vpor xmm0, xmm1, xmm2 ; 01B5 _ C5 F1: EB. C2 + vpcmpeqd xmm3, xmm3, xmm3 ; 01B9 _ C5 E1: 76. DB + vpxor xmm2, xmm2, xmm7 ; 01BD _ C5 E9: EF. D7 + vpxor xmm5, xmm6, xmm3 ; 01C1 _ C5 C9: EF. EB + vpor xmm7, xmm7, xmm4 ; 01C5 _ C5 C1: EB. FC + vpxor xmm1, xmm4, xmm5 ; 01C9 _ C5 D9: EF. CD + vpand xmm6, xmm5, xmm0 ; 01CD _ C5 D1: DB. F0 + vpxor xmm5, xmm7, xmm6 ; 01D1 _ C5 C1: EF. EE + vpxor xmm6, xmm6, xmm1 ; 01D5 _ C5 C9: EF. F1 + vpand xmm2, xmm2, xmm0 ; 01D9 _ C5 E9: DB. D0 + vpxor xmm0, xmm0, xmm5 ; 01DD _ C5 F9: EF. C5 + vpor xmm7, xmm1, xmm6 ; 01E1 _ C5 F1: EB. FE + vmovd xmm4, dword [ecx+2C70H] ; 01E5 _ C5 F9: 6E. A1, 00002C70 + vpxor xmm1, xmm7, xmm2 ; 01ED _ C5 C1: EF. CA + vpxor xmm2, xmm2, xmm0 ; 01F1 _ C5 E9: EF. D0 + vpor xmm0, xmm0, xmm1 ; 01F5 _ C5 F9: EB. C1 + vpshufd xmm7, xmm4, 0 ; 01F9 _ C5 F9: 70. FC, 00 + vpxor xmm2, xmm2, xmm6 ; 01FE _ C5 E9: EF. D6 + vmovd xmm4, dword [ecx+2C74H] ; 0202 _ C5 F9: 6E. A1, 00002C74 + vpxor xmm7, xmm2, xmm7 ; 020A _ C5 E9: EF. FF + vpshufd xmm2, xmm4, 0 ; 020E _ C5 F9: 70. D4, 00 + vpxor xmm4, xmm1, xmm2 ; 0213 _ C5 F1: EF. E2 + vmovd xmm2, dword [ecx+2C78H] ; 0217 _ C5 F9: 6E. 91, 00002C78 + vpshufd xmm2, xmm2, 0 ; 021F _ C5 F9: 70. D2, 00 + vmovd xmm1, dword [ecx+2C7CH] ; 0224 _ C5 F9: 6E. 89, 00002C7C + vpxor xmm2, xmm5, xmm2 ; 022C _ C5 D1: EF. D2 + vpxor xmm5, xmm0, xmm6 ; 0230 _ C5 F9: EF. EE + vpshufd xmm6, xmm1, 0 ; 0234 _ C5 F9: 70. F1, 00 + vpslld xmm1, xmm2, 10 ; 0239 _ C5 F1: 72. F2, 0A + vpsrld xmm2, xmm2, 22 ; 023E _ C5 E9: 72. D2, 16 + vpxor xmm0, xmm5, xmm6 ; 0243 _ C5 D1: EF. C6 + vpor xmm5, xmm1, xmm2 ; 0247 _ C5 F1: EB. EA + vpslld xmm6, xmm4, 7 ; 024B _ C5 C9: 72. F4, 07 + vpxor xmm2, xmm5, xmm0 ; 0250 _ C5 D1: EF. D0 + vpslld xmm1, xmm4, 31 ; 0254 _ C5 F1: 72. F4, 1F + vpxor xmm5, xmm2, xmm6 ; 0259 _ C5 E9: EF. EE + vpslld xmm2, xmm7, 27 ; 025D _ C5 E9: 72. F7, 1B + vpsrld xmm7, xmm7, 5 ; 0262 _ C5 C1: 72. D7, 05 + vpor xmm2, xmm2, xmm7 ; 0267 _ C5 E9: EB. D7 + vpslld xmm7, xmm0, 25 ; 026B _ C5 C1: 72. F0, 19 + vpxor xmm6, xmm2, xmm4 ; 0270 _ C5 E9: EF. F4 + vpsrld xmm4, xmm4, 1 ; 0274 _ C5 D9: 72. D4, 01 + vpxor xmm6, xmm6, xmm0 ; 0279 _ C5 C9: EF. F0 + vpsrld xmm0, xmm0, 7 ; 027D _ C5 F9: 72. D0, 07 + vpor xmm2, xmm7, xmm0 ; 0282 _ C5 C1: EB. D0 + vpslld xmm0, xmm6, 3 ; 0286 _ C5 F9: 72. F6, 03 + vpxor xmm7, xmm2, xmm5 ; 028B _ C5 E9: EF. FD + vpor xmm2, xmm1, xmm4 ; 028F _ C5 F1: EB. D4 + vpxor xmm0, xmm7, xmm0 ; 0293 _ C5 C1: EF. C0 + vpxor xmm7, xmm2, xmm6 ; 0297 _ C5 E9: EF. FE + vpxor xmm1, xmm7, xmm5 ; 029B _ C5 C1: EF. CD + vpslld xmm4, xmm5, 29 ; 029F _ C5 D9: 72. F5, 1D + vpsrld xmm5, xmm5, 3 ; 02A4 _ C5 D1: 72. D5, 03 + vpslld xmm2, xmm6, 19 ; 02A9 _ C5 E9: 72. F6, 13 + vpsrld xmm6, xmm6, 13 ; 02AE _ C5 C9: 72. D6, 0D + vpor xmm5, xmm4, xmm5 ; 02B3 _ C5 D9: EB. ED + vpor xmm2, xmm2, xmm6 ; 02B7 _ C5 E9: EB. D6 + vpxor xmm6, xmm0, xmm1 ; 02BB _ C5 F9: EF. F1 + vpxor xmm4, xmm2, xmm5 ; 02BF _ C5 E9: EF. E5 + vpand xmm7, xmm5, xmm4 ; 02C3 _ C5 D1: DB. FC + vpxor xmm5, xmm5, xmm0 ; 02C7 _ C5 D1: EF. E8 + vpxor xmm3, xmm7, xmm3 ; 02CB _ C5 C1: EF. DB + vpor xmm7, xmm5, xmm4 ; 02CF _ C5 D1: EB. FC + vpxor xmm2, xmm3, xmm6 ; 02D3 _ C5 E1: EF. D6 + vpxor xmm3, xmm6, xmm7 ; 02D7 _ C5 C9: EF. DF + vmovd xmm6, dword [ecx+2C60H] ; 02DB _ C5 F9: 6E. B1, 00002C60 + vpxor xmm5, xmm4, xmm2 ; 02E3 _ C5 D9: EF. EA + vpand xmm0, xmm1, xmm3 ; 02E7 _ C5 F1: DB. C3 + vpxor xmm7, xmm7, xmm1 ; 02EB _ C5 C1: EF. F9 + vpshufd xmm4, xmm6, 0 ; 02EF _ C5 F9: 70. E6, 00 + vpxor xmm0, xmm0, xmm5 ; 02F4 _ C5 F9: EF. C5 + vpxor xmm6, xmm0, xmm4 ; 02F8 _ C5 F9: EF. F4 + vpxor xmm1, xmm5, xmm3 ; 02FC _ C5 D1: EF. CB + vmovd xmm4, dword [ecx+2C64H] ; 0300 _ C5 F9: 6E. A1, 00002C64 + vpor xmm5, xmm1, xmm2 ; 0308 _ C5 F1: EB. EA + vpshufd xmm4, xmm4, 0 ; 030C _ C5 F9: 70. E4, 00 + vpxor xmm7, xmm7, xmm5 ; 0311 _ C5 C1: EF. FD + vpxor xmm4, xmm2, xmm4 ; 0315 _ C5 E9: EF. E4 + vpxor xmm5, xmm3, xmm0 ; 0319 _ C5 E1: EF. E8 + vmovd xmm2, dword [ecx+2C68H] ; 031D _ C5 F9: 6E. 91, 00002C68 + vpshufd xmm1, xmm2, 0 ; 0325 _ C5 F9: 70. CA, 00 + vmovd xmm3, dword [ecx+2C6CH] ; 032A _ C5 F9: 6E. 99, 00002C6C + vpxor xmm1, xmm7, xmm1 ; 0332 _ C5 C1: EF. C9 + vpshufd xmm2, xmm3, 0 ; 0336 _ C5 F9: 70. D3, 00 + vpslld xmm3, xmm1, 10 ; 033B _ C5 E1: 72. F1, 0A + vpsrld xmm0, xmm1, 22 ; 0340 _ C5 F9: 72. D1, 16 + vpxor xmm7, xmm5, xmm2 ; 0345 _ C5 D1: EF. FA + vpor xmm1, xmm3, xmm0 ; 0349 _ C5 E1: EB. C8 + vpslld xmm2, xmm4, 7 ; 034D _ C5 E9: 72. F4, 07 + vpxor xmm5, xmm1, xmm7 ; 0352 _ C5 F1: EF. EF + vpslld xmm3, xmm6, 27 ; 0356 _ C5 E1: 72. F6, 1B + vpsrld xmm6, xmm6, 5 ; 035B _ C5 C9: 72. D6, 05 + vpxor xmm5, xmm5, xmm2 ; 0360 _ C5 D1: EF. EA + vpor xmm2, xmm3, xmm6 ; 0364 _ C5 E1: EB. D6 + vpslld xmm3, xmm7, 25 ; 0368 _ C5 E1: 72. F7, 19 + vpxor xmm6, xmm2, xmm4 ; 036D _ C5 E9: EF. F4 + vpxor xmm2, xmm6, xmm7 ; 0371 _ C5 C9: EF. D7 + vpsrld xmm7, xmm7, 7 ; 0375 _ C5 C1: 72. D7, 07 + vpor xmm6, xmm3, xmm7 ; 037A _ C5 E1: EB. F7 + vpslld xmm3, xmm2, 3 ; 037E _ C5 E1: 72. F2, 03 + vpxor xmm7, xmm6, xmm5 ; 0383 _ C5 C9: EF. FD + vpslld xmm0, xmm2, 19 ; 0387 _ C5 F9: 72. F2, 13 + vpxor xmm6, xmm7, xmm3 ; 038C _ C5 C1: EF. F3 + vpslld xmm7, xmm4, 31 ; 0390 _ C5 C1: 72. F4, 1F + vpsrld xmm4, xmm4, 1 ; 0395 _ C5 D9: 72. D4, 01 + vpsrld xmm1, xmm2, 13 ; 039A _ C5 F1: 72. D2, 0D + vpor xmm7, xmm7, xmm4 ; 039F _ C5 C1: EB. FC + vpor xmm3, xmm0, xmm1 ; 03A3 _ C5 F9: EB. D9 + vpxor xmm2, xmm7, xmm2 ; 03A7 _ C5 C1: EF. D2 + vpslld xmm4, xmm5, 29 ; 03AB _ C5 D9: 72. F5, 1D + vpxor xmm1, xmm2, xmm5 ; 03B0 _ C5 E9: EF. CD + vpcmpeqd xmm0, xmm0, xmm0 ; 03B4 _ C5 F9: 76. C0 + vpsrld xmm5, xmm5, 3 ; 03B8 _ C5 D1: 72. D5, 03 + vpxor xmm2, xmm1, xmm0 ; 03BD _ C5 F1: EF. D0 + vpor xmm5, xmm4, xmm5 ; 03C1 _ C5 D9: EB. ED + vpor xmm7, xmm6, xmm3 ; 03C5 _ C5 C9: EB. FB + vpxor xmm1, xmm5, xmm2 ; 03C9 _ C5 D1: EF. CA + vpxor xmm7, xmm7, xmm1 ; 03CD _ C5 C1: EF. F9 + vpxor xmm5, xmm6, xmm7 ; 03D1 _ C5 C9: EF. EF + vpor xmm6, xmm1, xmm2 ; 03D5 _ C5 F1: EB. F2 + vpand xmm6, xmm6, xmm3 ; 03D9 _ C5 C9: DB. F3 + vpxor xmm6, xmm6, xmm5 ; 03DD _ C5 C9: EF. F5 + vpor xmm5, xmm5, xmm3 ; 03E1 _ C5 D1: EB. EB + vpand xmm4, xmm2, xmm6 ; 03E5 _ C5 E9: DB. E6 + vpxor xmm2, xmm5, xmm2 ; 03E9 _ C5 D1: EF. D2 + vpxor xmm1, xmm4, xmm7 ; 03ED _ C5 D9: EF. CF + vpxor xmm4, xmm2, xmm6 ; 03F1 _ C5 E9: EF. E6 + vmovd xmm2, dword [ecx+2C50H] ; 03F5 _ C5 F9: 6E. 91, 00002C50 + vpxor xmm5, xmm4, xmm1 ; 03FD _ C5 D9: EF. E9 + vpshufd xmm2, xmm2, 0 ; 0401 _ C5 F9: 70. D2, 00 + vpand xmm7, xmm7, xmm4 ; 0406 _ C5 C1: DB. FC + vpxor xmm1, xmm1, xmm2 ; 040A _ C5 F1: EF. CA + vpxor xmm0, xmm5, xmm0 ; 040E _ C5 D1: EF. C0 + vmovd xmm2, dword [ecx+2C54H] ; 0412 _ C5 F9: 6E. 91, 00002C54 + vpxor xmm5, xmm7, xmm5 ; 041A _ C5 C1: EF. ED + vpshufd xmm2, xmm2, 0 ; 041E _ C5 F9: 70. D2, 00 + vpxor xmm5, xmm5, xmm3 ; 0423 _ C5 D1: EF. EB + vmovd xmm3, dword [ecx+2C58H] ; 0427 _ C5 F9: 6E. 99, 00002C58 + vpxor xmm0, xmm0, xmm2 ; 042F _ C5 F9: EF. C2 + vmovd xmm7, dword [ecx+2C5CH] ; 0433 _ C5 F9: 6E. B9, 00002C5C + vpshufd xmm2, xmm3, 0 ; 043B _ C5 F9: 70. D3, 00 + vpshufd xmm3, xmm7, 0 ; 0440 _ C5 F9: 70. DF, 00 + vpxor xmm4, xmm5, xmm2 ; 0445 _ C5 D1: EF. E2 + vpxor xmm2, xmm6, xmm3 ; 0449 _ C5 C9: EF. D3 + vpslld xmm6, xmm4, 10 ; 044D _ C5 C9: 72. F4, 0A + vpsrld xmm5, xmm4, 22 ; 0452 _ C5 D1: 72. D4, 16 + vpslld xmm3, xmm0, 7 ; 0457 _ C5 E1: 72. F0, 07 + vpor xmm6, xmm6, xmm5 ; 045C _ C5 C9: EB. F5 + vpslld xmm5, xmm1, 27 ; 0460 _ C5 D1: 72. F1, 1B + vpsrld xmm1, xmm1, 5 ; 0465 _ C5 F1: 72. D1, 05 + vpxor xmm7, xmm6, xmm2 ; 046A _ C5 C9: EF. FA + vpor xmm5, xmm5, xmm1 ; 046E _ C5 D1: EB. E9 + vpxor xmm4, xmm7, xmm3 ; 0472 _ C5 C1: EF. E3 + vpxor xmm6, xmm5, xmm0 ; 0476 _ C5 D1: EF. F0 + vpslld xmm3, xmm2, 25 ; 047A _ C5 E1: 72. F2, 19 + vpxor xmm7, xmm6, xmm2 ; 047F _ C5 C9: EF. FA + vpsrld xmm2, xmm2, 7 ; 0483 _ C5 E9: 72. D2, 07 + vpor xmm1, xmm3, xmm2 ; 0488 _ C5 E1: EB. CA + vpslld xmm2, xmm7, 3 ; 048C _ C5 E9: 72. F7, 03 + vpxor xmm5, xmm1, xmm4 ; 0491 _ C5 F1: EF. EC + vpslld xmm3, xmm0, 31 ; 0495 _ C5 E1: 72. F0, 1F + vpsrld xmm0, xmm0, 1 ; 049A _ C5 F9: 72. D0, 01 + vpxor xmm6, xmm5, xmm2 ; 049F _ C5 D1: EF. F2 + vpor xmm5, xmm3, xmm0 ; 04A3 _ C5 E1: EB. E8 + vpslld xmm3, xmm4, 29 ; 04A7 _ C5 E1: 72. F4, 1D + vpsrld xmm0, xmm4, 3 ; 04AC _ C5 F9: 72. D4, 03 + vpxor xmm2, xmm5, xmm7 ; 04B1 _ C5 D1: EF. D7 + vpor xmm0, xmm3, xmm0 ; 04B5 _ C5 E1: EB. C0 + vpxor xmm1, xmm2, xmm4 ; 04B9 _ C5 E9: EF. CC + vpslld xmm4, xmm7, 19 ; 04BD _ C5 D9: 72. F7, 13 + vpsrld xmm5, xmm7, 13 ; 04C2 _ C5 D1: 72. D7, 0D + vpand xmm2, xmm0, xmm6 ; 04C7 _ C5 F9: DB. D6 + vpor xmm3, xmm4, xmm5 ; 04CB _ C5 D9: EB. DD + vpxor xmm7, xmm2, xmm1 ; 04CF _ C5 E9: EF. F9 + vpor xmm1, xmm1, xmm6 ; 04D3 _ C5 F1: EB. CE + vpand xmm5, xmm1, xmm3 ; 04D7 _ C5 F1: DB. EB + vpxor xmm2, xmm0, xmm7 ; 04DB _ C5 F9: EF. D7 + vpxor xmm4, xmm2, xmm5 ; 04DF _ C5 E9: EF. E5 + vpcmpeqd xmm0, xmm0, xmm0 ; 04E3 _ C5 F9: 76. C0 + vpxor xmm2, xmm3, xmm0 ; 04E7 _ C5 E1: EF. D0 + vpxor xmm0, xmm6, xmm4 ; 04EB _ C5 C9: EF. C4 + vpand xmm6, xmm5, xmm7 ; 04EF _ C5 D1: DB. F7 + vpxor xmm3, xmm6, xmm0 ; 04F3 _ C5 C9: EF. D8 + vpxor xmm5, xmm2, xmm3 ; 04F7 _ C5 E9: EF. EB + vpand xmm2, xmm0, xmm2 ; 04FB _ C5 F9: DB. D2 + vmovd xmm1, dword [ecx+2C40H] ; 04FF _ C5 F9: 6E. 89, 00002C40 + vpxor xmm6, xmm2, xmm7 ; 0507 _ C5 E9: EF. F7 + vpshufd xmm2, xmm1, 0 ; 050B _ C5 F9: 70. D1, 00 + vpxor xmm6, xmm6, xmm5 ; 0510 _ C5 C9: EF. F5 + vpand xmm7, xmm7, xmm5 ; 0514 _ C5 C1: DB. FD + vpxor xmm2, xmm5, xmm2 ; 0518 _ C5 D1: EF. D2 + vmovd xmm1, dword [ecx+2C44H] ; 051C _ C5 F9: 6E. 89, 00002C44 + vpxor xmm0, xmm6, xmm5 ; 0524 _ C5 C9: EF. C5 + vpxor xmm5, xmm7, xmm4 ; 0528 _ C5 C1: EF. EC + vmovd xmm7, dword [ecx+2C48H] ; 052C _ C5 F9: 6E. B9, 00002C48 + vpor xmm6, xmm5, xmm6 ; 0534 _ C5 D1: EB. F6 + vpshufd xmm1, xmm1, 0 ; 0538 _ C5 F9: 70. C9, 00 + vpxor xmm3, xmm6, xmm3 ; 053D _ C5 C9: EF. DB + vmovd xmm5, dword [ecx+2C4CH] ; 0541 _ C5 F9: 6E. A9, 00002C4C + vpxor xmm0, xmm0, xmm1 ; 0549 _ C5 F9: EF. C1 + vpshufd xmm1, xmm7, 0 ; 054D _ C5 F9: 70. CF, 00 + vpshufd xmm6, xmm5, 0 ; 0552 _ C5 F9: 70. F5, 00 + vpxor xmm7, xmm3, xmm1 ; 0557 _ C5 E1: EF. F9 + vpxor xmm3, xmm4, xmm6 ; 055B _ C5 D9: EF. DE + vpslld xmm4, xmm7, 10 ; 055F _ C5 D9: 72. F7, 0A + vpsrld xmm5, xmm7, 22 ; 0564 _ C5 D1: 72. D7, 16 + vpslld xmm1, xmm0, 7 ; 0569 _ C5 F1: 72. F0, 07 + vpor xmm6, xmm4, xmm5 ; 056E _ C5 D9: EB. F5 + vpslld xmm5, xmm2, 27 ; 0572 _ C5 D1: 72. F2, 1B + vpsrld xmm2, xmm2, 5 ; 0577 _ C5 E9: 72. D2, 05 + vpxor xmm7, xmm6, xmm3 ; 057C _ C5 C9: EF. FB + vpor xmm5, xmm5, xmm2 ; 0580 _ C5 D1: EB. EA + vpxor xmm4, xmm7, xmm1 ; 0584 _ C5 C1: EF. E1 + vpxor xmm2, xmm5, xmm0 ; 0588 _ C5 D1: EF. D0 + vpslld xmm6, xmm3, 25 ; 058C _ C5 C9: 72. F3, 19 + vpsrld xmm7, xmm3, 7 ; 0591 _ C5 C1: 72. D3, 07 + vpxor xmm1, xmm2, xmm3 ; 0596 _ C5 E9: EF. CB + vpor xmm3, xmm6, xmm7 ; 059A _ C5 C9: EB. DF + vpslld xmm2, xmm1, 3 ; 059E _ C5 E9: 72. F1, 03 + vpxor xmm5, xmm3, xmm4 ; 05A3 _ C5 E1: EF. EC + vpslld xmm6, xmm0, 31 ; 05A7 _ C5 C9: 72. F0, 1F + vpsrld xmm0, xmm0, 1 ; 05AC _ C5 F9: 72. D0, 01 + vpxor xmm3, xmm5, xmm2 ; 05B1 _ C5 D1: EF. DA + vpor xmm5, xmm6, xmm0 ; 05B5 _ C5 C9: EB. E8 + vpslld xmm6, xmm4, 29 ; 05B9 _ C5 C9: 72. F4, 1D + vpxor xmm2, xmm5, xmm1 ; 05BE _ C5 D1: EF. D1 + vpsrld xmm7, xmm4, 3 ; 05C2 _ C5 C1: 72. D4, 03 + vpxor xmm2, xmm2, xmm4 ; 05C7 _ C5 E9: EF. D4 + vpor xmm6, xmm6, xmm7 ; 05CB _ C5 C9: EB. F7 + vpslld xmm4, xmm1, 19 ; 05CF _ C5 D9: 72. F1, 13 + vpsrld xmm5, xmm1, 13 ; 05D4 _ C5 D1: 72. D1, 0D + vpxor xmm7, xmm6, xmm2 ; 05D9 _ C5 C9: EF. FA + vpor xmm0, xmm4, xmm5 ; 05DD _ C5 D9: EB. C5 + vpxor xmm5, xmm0, xmm7 ; 05E1 _ C5 F9: EF. EF + vpand xmm6, xmm6, xmm7 ; 05E5 _ C5 C9: DB. F7 + vpxor xmm4, xmm6, xmm5 ; 05E9 _ C5 C9: EF. E5 + vpand xmm1, xmm5, xmm2 ; 05ED _ C5 D1: DB. CA + vpor xmm6, xmm3, xmm4 ; 05F1 _ C5 E1: EB. F4 + vpxor xmm3, xmm2, xmm3 ; 05F5 _ C5 E9: EF. DB + vpxor xmm7, xmm7, xmm6 ; 05F9 _ C5 C1: EF. FE + vpxor xmm5, xmm1, xmm6 ; 05FD _ C5 F1: EF. EE + vpxor xmm3, xmm3, xmm4 ; 0601 _ C5 E1: EF. DC + vpand xmm2, xmm6, xmm7 ; 0605 _ C5 C9: DB. D7 + vmovd xmm1, dword [ecx+2C30H] ; 0609 _ C5 F9: 6E. 89, 00002C30 + vpxor xmm6, xmm3, xmm5 ; 0611 _ C5 E1: EF. F5 + vpor xmm0, xmm6, xmm7 ; 0615 _ C5 C9: EB. C7 + vpxor xmm2, xmm2, xmm3 ; 0619 _ C5 E9: EF. D3 + vpshufd xmm6, xmm1, 0 ; 061D _ C5 F9: 70. F1, 00 + vpxor xmm4, xmm0, xmm4 ; 0622 _ C5 F9: EF. E4 + vpxor xmm1, xmm7, xmm6 ; 0626 _ C5 C1: EF. CE + vpxor xmm5, xmm5, xmm2 ; 062A _ C5 D1: EF. EA + vmovd xmm7, dword [ecx+2C34H] ; 062E _ C5 F9: 6E. B9, 00002C34 + vpxor xmm5, xmm5, xmm4 ; 0636 _ C5 D1: EF. EC + vpshufd xmm6, xmm7, 0 ; 063A _ C5 F9: 70. F7, 00 + vmovd xmm7, dword [ecx+2C38H] ; 063F _ C5 F9: 6E. B9, 00002C38 + vpxor xmm6, xmm4, xmm6 ; 0647 _ C5 D9: EF. F6 + vpshufd xmm3, xmm7, 0 ; 064B _ C5 F9: 70. DF, 00 + vmovd xmm4, dword [ecx+2C3CH] ; 0650 _ C5 F9: 6E. A1, 00002C3C + vpxor xmm0, xmm2, xmm3 ; 0658 _ C5 E9: EF. C3 + vpshufd xmm2, xmm4, 0 ; 065C _ C5 F9: 70. D4, 00 + vpslld xmm7, xmm0, 10 ; 0661 _ C5 C1: 72. F0, 0A + vpsrld xmm3, xmm0, 22 ; 0666 _ C5 E1: 72. D0, 16 + vpxor xmm2, xmm5, xmm2 ; 066B _ C5 D1: EF. D2 + vpor xmm0, xmm7, xmm3 ; 066F _ C5 C1: EB. C3 + vpslld xmm5, xmm6, 7 ; 0673 _ C5 D1: 72. F6, 07 + vpxor xmm4, xmm0, xmm2 ; 0678 _ C5 F9: EF. E2 + vpslld xmm7, xmm1, 27 ; 067C _ C5 C1: 72. F1, 1B + vpsrld xmm1, xmm1, 5 ; 0681 _ C5 F1: 72. D1, 05 + vpxor xmm3, xmm4, xmm5 ; 0686 _ C5 D9: EF. DD + vpor xmm5, xmm7, xmm1 ; 068A _ C5 C1: EB. E9 + vpslld xmm0, xmm2, 25 ; 068E _ C5 F9: 72. F2, 19 + vpxor xmm7, xmm5, xmm6 ; 0693 _ C5 D1: EF. FE + vpxor xmm5, xmm7, xmm2 ; 0697 _ C5 C1: EF. EA + vpsrld xmm2, xmm2, 7 ; 069B _ C5 E9: 72. D2, 07 + vpor xmm1, xmm0, xmm2 ; 06A0 _ C5 F9: EB. CA + vpslld xmm2, xmm5, 3 ; 06A4 _ C5 E9: 72. F5, 03 + vpxor xmm4, xmm1, xmm3 ; 06A9 _ C5 F1: EF. E3 + vpslld xmm0, xmm6, 31 ; 06AD _ C5 F9: 72. F6, 1F + vpsrld xmm6, xmm6, 1 ; 06B2 _ C5 C9: 72. D6, 01 + vpxor xmm7, xmm4, xmm2 ; 06B7 _ C5 D9: EF. FA + vpor xmm2, xmm0, xmm6 ; 06BB _ C5 F9: EB. D6 + vpslld xmm0, xmm5, 19 ; 06BF _ C5 F9: 72. F5, 13 + vpxor xmm6, xmm2, xmm5 ; 06C4 _ C5 E9: EF. F5 + vpsrld xmm5, xmm5, 13 ; 06C8 _ C5 D1: 72. D5, 0D + vpxor xmm2, xmm6, xmm3 ; 06CD _ C5 C9: EF. D3 + vpslld xmm1, xmm3, 29 ; 06D1 _ C5 F1: 72. F3, 1D + vpsrld xmm3, xmm3, 3 ; 06D6 _ C5 E1: 72. D3, 03 + vpor xmm5, xmm0, xmm5 ; 06DB _ C5 F9: EB. ED + vpor xmm4, xmm1, xmm3 ; 06DF _ C5 F1: EB. E3 + vpxor xmm0, xmm7, xmm5 ; 06E3 _ C5 C1: EF. C5 + vpxor xmm4, xmm4, xmm7 ; 06E7 _ C5 D9: EF. E7 + vpand xmm6, xmm0, xmm4 ; 06EB _ C5 F9: DB. F4 + vpor xmm7, xmm2, xmm4 ; 06EF _ C5 E9: EB. FC + vpxor xmm3, xmm6, xmm2 ; 06F3 _ C5 C9: EF. DA + vpxor xmm2, xmm7, xmm0 ; 06F7 _ C5 C1: EF. D0 + vpand xmm6, xmm0, xmm3 ; 06FB _ C5 F9: DB. F3 + vpxor xmm1, xmm4, xmm3 ; 06FF _ C5 D9: EF. CB + vpand xmm7, xmm6, xmm5 ; 0703 _ C5 C9: DB. FD + vmovd xmm4, dword [ecx+2C24H] ; 0707 _ C5 F9: 6E. A1, 00002C24 + vpxor xmm6, xmm7, xmm1 ; 070F _ C5 C1: EF. F1 + vpcmpeqd xmm0, xmm0, xmm0 ; 0713 _ C5 F9: 76. C0 + vpand xmm1, xmm1, xmm2 ; 0717 _ C5 F1: DB. CA + vpxor xmm7, xmm3, xmm0 ; 071B _ C5 E1: EF. F8 + vpor xmm1, xmm1, xmm5 ; 071F _ C5 F1: EB. CD + vmovd xmm3, dword [ecx+2C20H] ; 0723 _ C5 F9: 6E. 99, 00002C20 + vpxor xmm1, xmm1, xmm7 ; 072B _ C5 F1: EF. CF + vpshufd xmm0, xmm4, 0 ; 072F _ C5 F9: 70. C4, 00 + vpxor xmm5, xmm5, xmm7 ; 0734 _ C5 D1: EF. EF + vmovd xmm4, dword [ecx+2C28H] ; 0738 _ C5 F9: 6E. A1, 00002C28 + vpxor xmm0, xmm6, xmm0 ; 0740 _ C5 C9: EF. C0 + vpshufd xmm3, xmm3, 0 ; 0744 _ C5 F9: 70. DB, 00 + vpshufd xmm4, xmm4, 0 ; 0749 _ C5 F9: 70. E4, 00 + vpxor xmm3, xmm2, xmm3 ; 074E _ C5 E9: EF. DB + vpxor xmm1, xmm1, xmm4 ; 0752 _ C5 F1: EF. CC + vpxor xmm4, xmm7, xmm6 ; 0756 _ C5 C1: EF. E6 + vpand xmm5, xmm5, xmm2 ; 075A _ C5 D1: DB. EA + vpxor xmm7, xmm4, xmm5 ; 075E _ C5 D9: EF. FD + vpslld xmm5, xmm3, 27 ; 0762 _ C5 D1: 72. F3, 1B + vmovd xmm4, dword [ecx+2C2CH] ; 0767 _ C5 F9: 6E. A1, 00002C2C + vpsrld xmm3, xmm3, 5 ; 076F _ C5 E1: 72. D3, 05 + vpshufd xmm2, xmm4, 0 ; 0774 _ C5 F9: 70. D4, 00 + vpslld xmm4, xmm1, 10 ; 0779 _ C5 D9: 72. F1, 0A + vpsrld xmm1, xmm1, 22 ; 077E _ C5 F1: 72. D1, 16 + vpxor xmm6, xmm7, xmm2 ; 0783 _ C5 C1: EF. F2 + vpor xmm2, xmm4, xmm1 ; 0787 _ C5 D9: EB. D1 + vpslld xmm4, xmm0, 7 ; 078B _ C5 D9: 72. F0, 07 + vpxor xmm1, xmm2, xmm6 ; 0790 _ C5 E9: EF. CE + vpor xmm3, xmm5, xmm3 ; 0794 _ C5 D1: EB. DB + vpxor xmm2, xmm1, xmm4 ; 0798 _ C5 F1: EF. D4 + vpxor xmm1, xmm3, xmm0 ; 079C _ C5 E1: EF. C8 + vpxor xmm7, xmm1, xmm6 ; 07A0 _ C5 F1: EF. FE + vpslld xmm4, xmm6, 25 ; 07A4 _ C5 D9: 72. F6, 19 + vpsrld xmm6, xmm6, 7 ; 07A9 _ C5 C9: 72. D6, 07 + vpslld xmm1, xmm7, 3 ; 07AE _ C5 F1: 72. F7, 03 + vpor xmm5, xmm4, xmm6 ; 07B3 _ C5 D9: EB. EE + vpslld xmm6, xmm0, 31 ; 07B7 _ C5 C9: 72. F0, 1F + vpxor xmm3, xmm5, xmm2 ; 07BC _ C5 D1: EF. DA + vpsrld xmm0, xmm0, 1 ; 07C0 _ C5 F9: 72. D0, 01 + vpxor xmm1, xmm3, xmm1 ; 07C5 _ C5 E1: EF. C9 + vpor xmm3, xmm6, xmm0 ; 07C9 _ C5 C9: EB. D8 + vpxor xmm0, xmm3, xmm7 ; 07CD _ C5 E1: EF. C7 + vpslld xmm6, xmm2, 29 ; 07D1 _ C5 C9: 72. F2, 1D + vpxor xmm4, xmm0, xmm2 ; 07D6 _ C5 F9: EF. E2 + vpsrld xmm2, xmm2, 3 ; 07DA _ C5 E9: 72. D2, 03 + vpor xmm6, xmm6, xmm2 ; 07DF _ C5 C9: EB. F2 + vpslld xmm5, xmm7, 19 ; 07E3 _ C5 D1: 72. F7, 13 + vpsrld xmm7, xmm7, 13 ; 07E8 _ C5 C1: 72. D7, 0D + vpxor xmm2, xmm4, xmm1 ; 07ED _ C5 D9: EF. D1 + vpor xmm0, xmm5, xmm7 ; 07F1 _ C5 D1: EB. C7 + vpand xmm3, xmm1, xmm2 ; 07F5 _ C5 F1: DB. DA + vpxor xmm4, xmm4, xmm6 ; 07F9 _ C5 D9: EF. E6 + vpxor xmm7, xmm3, xmm0 ; 07FD _ C5 E1: EF. F8 + vpor xmm1, xmm0, xmm2 ; 0801 _ C5 F9: EB. CA + vpxor xmm6, xmm6, xmm7 ; 0805 _ C5 C9: EF. F7 + vpxor xmm5, xmm2, xmm7 ; 0809 _ C5 E9: EF. EF + vpxor xmm2, xmm1, xmm4 ; 080D _ C5 F1: EF. D4 + vpor xmm3, xmm2, xmm6 ; 0811 _ C5 E9: EB. DE + vpor xmm0, xmm5, xmm7 ; 0815 _ C5 D1: EB. C7 + vpxor xmm1, xmm3, xmm5 ; 0819 _ C5 E1: EF. CD + vpxor xmm3, xmm0, xmm1 ; 081D _ C5 F9: EF. D9 + vpcmpeqd xmm0, xmm0, xmm0 ; 0821 _ C5 F9: 76. C0 + vpxor xmm4, xmm4, xmm0 ; 0825 _ C5 D9: EF. E0 + vpxor xmm2, xmm4, xmm3 ; 0829 _ C5 D9: EF. D3 + vpor xmm3, xmm3, xmm1 ; 082D _ C5 E1: EB. D9 + vmovd xmm4, dword [ecx+2C10H] ; 0831 _ C5 F9: 6E. A1, 00002C10 + vpshufd xmm5, xmm4, 0 ; 0839 _ C5 F9: 70. EC, 00 + vpxor xmm4, xmm2, xmm5 ; 083E _ C5 E9: EF. E5 + vmovd xmm5, dword [ecx+2C14H] ; 0842 _ C5 F9: 6E. A9, 00002C14 + vpshufd xmm5, xmm5, 0 ; 084A _ C5 F9: 70. ED, 00 + vpxor xmm5, xmm1, xmm5 ; 084F _ C5 F1: EF. ED + vpxor xmm1, xmm3, xmm1 ; 0853 _ C5 E1: EF. C9 + vpor xmm2, xmm1, xmm2 ; 0857 _ C5 F1: EB. D2 + vpxor xmm2, xmm7, xmm2 ; 085B _ C5 C1: EF. D2 + vmovd xmm7, dword [ecx+2C18H] ; 085F _ C5 F9: 6E. B9, 00002C18 + vmovd xmm1, dword [ecx+2C1CH] ; 0867 _ C5 F9: 6E. 89, 00002C1C + vpshufd xmm3, xmm7, 0 ; 086F _ C5 F9: 70. DF, 00 + vpshufd xmm7, xmm1, 0 ; 0874 _ C5 F9: 70. F9, 00 + vpxor xmm2, xmm2, xmm3 ; 0879 _ C5 E9: EF. D3 + vpxor xmm1, xmm6, xmm7 ; 087D _ C5 C9: EF. CF + vpslld xmm6, xmm2, 10 ; 0881 _ C5 C9: 72. F2, 0A + vpsrld xmm2, xmm2, 22 ; 0886 _ C5 E9: 72. D2, 16 + vpslld xmm7, xmm5, 7 ; 088B _ C5 C1: 72. F5, 07 + vpor xmm3, xmm6, xmm2 ; 0890 _ C5 C9: EB. DA + vpslld xmm2, xmm4, 27 ; 0894 _ C5 E9: 72. F4, 1B + vpsrld xmm4, xmm4, 5 ; 0899 _ C5 D9: 72. D4, 05 + vpxor xmm6, xmm3, xmm1 ; 089E _ C5 E1: EF. F1 + vpor xmm2, xmm2, xmm4 ; 08A2 _ C5 E9: EB. D4 + vpxor xmm7, xmm6, xmm7 ; 08A6 _ C5 C9: EF. FF + vpxor xmm3, xmm2, xmm5 ; 08AA _ C5 E9: EF. DD + vpslld xmm6, xmm1, 25 ; 08AE _ C5 C9: 72. F1, 19 + vpxor xmm3, xmm3, xmm1 ; 08B3 _ C5 E1: EF. D9 + vpsrld xmm1, xmm1, 7 ; 08B7 _ C5 F1: 72. D1, 07 + vpor xmm2, xmm6, xmm1 ; 08BC _ C5 C9: EB. D1 + vpslld xmm4, xmm5, 31 ; 08C0 _ C5 D9: 72. F5, 1F + vpsrld xmm5, xmm5, 1 ; 08C5 _ C5 D1: 72. D5, 01 + vpxor xmm1, xmm2, xmm7 ; 08CA _ C5 E9: EF. CF + vpslld xmm6, xmm3, 3 ; 08CE _ C5 C9: 72. F3, 03 + vpor xmm2, xmm4, xmm5 ; 08D3 _ C5 D9: EB. D5 + vpxor xmm1, xmm1, xmm6 ; 08D7 _ C5 F1: EF. CE + vpxor xmm6, xmm2, xmm3 ; 08DB _ C5 E9: EF. F3 + vpxor xmm4, xmm6, xmm7 ; 08DF _ C5 C9: EF. E7 + vpslld xmm5, xmm3, 19 ; 08E3 _ C5 D1: 72. F3, 13 + vpsrld xmm3, xmm3, 13 ; 08E8 _ C5 E1: 72. D3, 0D + vpslld xmm2, xmm7, 29 ; 08ED _ C5 E9: 72. F7, 1D + vpsrld xmm7, xmm7, 3 ; 08F2 _ C5 C1: 72. D7, 03 + vpor xmm5, xmm5, xmm3 ; 08F7 _ C5 D1: EB. EB + vpor xmm2, xmm2, xmm7 ; 08FB _ C5 E9: EB. D7 + vpxor xmm6, xmm4, xmm0 ; 08FF _ C5 D9: EF. F0 + vpxor xmm2, xmm2, xmm0 ; 0903 _ C5 E9: EF. D0 + vpor xmm3, xmm4, xmm5 ; 0907 _ C5 D9: EB. DD + vpxor xmm4, xmm3, xmm2 ; 090B _ C5 E1: EF. E2 + vpxor xmm7, xmm5, xmm6 ; 090F _ C5 D1: EF. FE + vpxor xmm4, xmm4, xmm1 ; 0913 _ C5 D9: EF. E1 + vpor xmm5, xmm2, xmm6 ; 0917 _ C5 E9: EB. EE + vpand xmm2, xmm7, xmm1 ; 091B _ C5 C1: DB. D1 + vpxor xmm5, xmm5, xmm7 ; 091F _ C5 D1: EF. EF + vpxor xmm7, xmm6, xmm2 ; 0923 _ C5 C9: EF. FA + vpor xmm6, xmm2, xmm4 ; 0927 _ C5 E9: EB. F4 + vpxor xmm3, xmm6, xmm5 ; 092B _ C5 C9: EF. DD + vpxor xmm1, xmm1, xmm7 ; 092F _ C5 F1: EF. CF + vpxor xmm2, xmm1, xmm3 ; 0933 _ C5 F1: EF. D3 + vmovd xmm1, dword [ecx+2C00H] ; 0937 _ C5 F9: 6E. 89, 00002C00 + vpxor xmm6, xmm2, xmm4 ; 093F _ C5 E9: EF. F4 + vpshufd xmm2, xmm1, 0 ; 0943 _ C5 F9: 70. D1, 00 + vpxor xmm2, xmm3, xmm2 ; 0948 _ C5 E1: EF. D2 + vpxor xmm3, xmm5, xmm4 ; 094C _ C5 D1: EF. DC + vpand xmm5, xmm3, xmm6 ; 0950 _ C5 E1: DB. EE + vpxor xmm3, xmm7, xmm5 ; 0954 _ C5 C1: EF. DD + vmovd xmm7, dword [ecx+2C04H] ; 0958 _ C5 F9: 6E. B9, 00002C04 + vmovd xmm5, dword [ecx+2C08H] ; 0960 _ C5 F9: 6E. A9, 00002C08 + vpshufd xmm1, xmm7, 0 ; 0968 _ C5 F9: 70. CF, 00 + vpshufd xmm7, xmm5, 0 ; 096D _ C5 F9: 70. FD, 00 + vpxor xmm3, xmm3, xmm1 ; 0972 _ C5 E1: EF. D9 + vpxor xmm7, xmm4, xmm7 ; 0976 _ C5 D9: EF. FF + vmovd xmm4, dword [ecx+2C0CH] ; 097A _ C5 F9: 6E. A1, 00002C0C + vpshufd xmm1, xmm4, 0 ; 0982 _ C5 F9: 70. CC, 00 + vpxor xmm5, xmm6, xmm1 ; 0987 _ C5 C9: EF. E9 + vpslld xmm6, xmm7, 10 ; 098B _ C5 C9: 72. F7, 0A + vpsrld xmm1, xmm7, 22 ; 0990 _ C5 F1: 72. D7, 16 + vpslld xmm7, xmm3, 7 ; 0995 _ C5 C1: 72. F3, 07 + vpor xmm6, xmm6, xmm1 ; 099A _ C5 C9: EB. F1 + vpslld xmm1, xmm2, 27 ; 099E _ C5 F1: 72. F2, 1B + vpsrld xmm2, xmm2, 5 ; 09A3 _ C5 E9: 72. D2, 05 + vpxor xmm4, xmm6, xmm5 ; 09A8 _ C5 C9: EF. E5 + vpor xmm2, xmm1, xmm2 ; 09AC _ C5 F1: EB. D2 + vpxor xmm6, xmm4, xmm7 ; 09B0 _ C5 D9: EF. F7 + vpxor xmm1, xmm2, xmm3 ; 09B4 _ C5 E9: EF. CB + vpslld xmm4, xmm5, 25 ; 09B8 _ C5 D9: 72. F5, 19 + vpxor xmm2, xmm1, xmm5 ; 09BD _ C5 F1: EF. D5 + vpsrld xmm5, xmm5, 7 ; 09C1 _ C5 D1: 72. D5, 07 + vpslld xmm7, xmm3, 31 ; 09C6 _ C5 C1: 72. F3, 1F + vpsrld xmm3, xmm3, 1 ; 09CB _ C5 E1: 72. D3, 01 + vpor xmm1, xmm4, xmm5 ; 09D0 _ C5 D9: EB. CD + vpor xmm3, xmm7, xmm3 ; 09D4 _ C5 C1: EB. DB + vpxor xmm4, xmm1, xmm6 ; 09D8 _ C5 F1: EF. E6 + vpslld xmm5, xmm2, 3 ; 09DC _ C5 D1: 72. F2, 03 + vpxor xmm1, xmm3, xmm2 ; 09E1 _ C5 E1: EF. CA + vpxor xmm5, xmm4, xmm5 ; 09E5 _ C5 D9: EF. ED + vpxor xmm7, xmm1, xmm6 ; 09E9 _ C5 F1: EF. FE + vpslld xmm4, xmm6, 29 ; 09ED _ C5 D9: 72. F6, 1D + vpsrld xmm6, xmm6, 3 ; 09F2 _ C5 C9: 72. D6, 03 + vpslld xmm3, xmm2, 19 ; 09F7 _ C5 E1: 72. F2, 13 + vpsrld xmm2, xmm2, 13 ; 09FC _ C5 E9: 72. D2, 0D + vpor xmm1, xmm4, xmm6 ; 0A01 _ C5 D9: EB. CE + vpor xmm6, xmm3, xmm2 ; 0A05 _ C5 E1: EB. F2 + vpor xmm2, xmm1, xmm5 ; 0A09 _ C5 F1: EB. D5 + vpxor xmm1, xmm1, xmm6 ; 0A0D _ C5 F1: EF. CE + vpand xmm4, xmm6, xmm5 ; 0A11 _ C5 C9: DB. E5 + vpxor xmm3, xmm1, xmm0 ; 0A15 _ C5 F1: EF. D8 + vpxor xmm5, xmm5, xmm7 ; 0A19 _ C5 D1: EF. EF + vpxor xmm1, xmm4, xmm3 ; 0A1D _ C5 D9: EF. CB + vpand xmm6, xmm3, xmm2 ; 0A21 _ C5 E1: DB. F2 + vpor xmm7, xmm7, xmm4 ; 0A25 _ C5 C1: EB. FC + vpand xmm3, xmm5, xmm2 ; 0A29 _ C5 D1: DB. DA + vpxor xmm4, xmm7, xmm6 ; 0A2D _ C5 C1: EF. E6 + vpxor xmm7, xmm6, xmm1 ; 0A31 _ C5 C9: EF. F9 + vpxor xmm5, xmm2, xmm4 ; 0A35 _ C5 E9: EF. EC + vpor xmm2, xmm1, xmm7 ; 0A39 _ C5 F1: EB. D7 + vpxor xmm1, xmm2, xmm3 ; 0A3D _ C5 E9: EF. CB + vpxor xmm2, xmm3, xmm5 ; 0A41 _ C5 E1: EF. D5 + vmovd xmm3, dword [ecx+2BF0H] ; 0A45 _ C5 F9: 6E. 99, 00002BF0 + vpxor xmm6, xmm2, xmm7 ; 0A4D _ C5 E9: EF. F7 + vpshufd xmm2, xmm3, 0 ; 0A51 _ C5 F9: 70. D3, 00 + vmovd xmm3, dword [ecx+2BF4H] ; 0A56 _ C5 F9: 6E. 99, 00002BF4 + vpxor xmm6, xmm6, xmm2 ; 0A5E _ C5 C9: EF. F2 + vpshufd xmm2, xmm3, 0 ; 0A62 _ C5 F9: 70. D3, 00 + vmovd xmm3, dword [ecx+2BF8H] ; 0A67 _ C5 F9: 6E. 99, 00002BF8 + vpxor xmm2, xmm1, xmm2 ; 0A6F _ C5 F1: EF. D2 + vpshufd xmm3, xmm3, 0 ; 0A73 _ C5 F9: 70. DB, 00 + vpor xmm1, xmm5, xmm1 ; 0A78 _ C5 D1: EB. C9 + vmovd xmm5, dword [ecx+2BFCH] ; 0A7C _ C5 F9: 6E. A9, 00002BFC + vpxor xmm4, xmm4, xmm3 ; 0A84 _ C5 D9: EF. E3 + vpshufd xmm3, xmm5, 0 ; 0A88 _ C5 F9: 70. DD, 00 + vpxor xmm7, xmm1, xmm7 ; 0A8D _ C5 F1: EF. FF + vpslld xmm1, xmm4, 10 ; 0A91 _ C5 F1: 72. F4, 0A + vpsrld xmm4, xmm4, 22 ; 0A96 _ C5 D9: 72. D4, 16 + vpxor xmm5, xmm7, xmm3 ; 0A9B _ C5 C1: EF. EB + vpor xmm3, xmm1, xmm4 ; 0A9F _ C5 F1: EB. DC + vpxor xmm1, xmm3, xmm5 ; 0AA3 _ C5 E1: EF. CD + vpslld xmm4, xmm2, 7 ; 0AA7 _ C5 D9: 72. F2, 07 + vpslld xmm7, xmm6, 27 ; 0AAC _ C5 C1: 72. F6, 1B + vpsrld xmm6, xmm6, 5 ; 0AB1 _ C5 C9: 72. D6, 05 + vpxor xmm3, xmm1, xmm4 ; 0AB6 _ C5 F1: EF. DC + vpor xmm1, xmm7, xmm6 ; 0ABA _ C5 C1: EB. CE + vpxor xmm6, xmm1, xmm2 ; 0ABE _ C5 F1: EF. F2 + vpslld xmm4, xmm5, 25 ; 0AC2 _ C5 D9: 72. F5, 19 + vpxor xmm7, xmm6, xmm5 ; 0AC7 _ C5 C9: EF. FD + vpsrld xmm5, xmm5, 7 ; 0ACB _ C5 D1: 72. D5, 07 + vpor xmm1, xmm4, xmm5 ; 0AD0 _ C5 D9: EB. CD + vpslld xmm4, xmm7, 3 ; 0AD4 _ C5 D9: 72. F7, 03 + vpxor xmm6, xmm1, xmm3 ; 0AD9 _ C5 F1: EF. F3 + vpslld xmm1, xmm2, 31 ; 0ADD _ C5 F1: 72. F2, 1F + vpsrld xmm2, xmm2, 1 ; 0AE2 _ C5 E9: 72. D2, 01 + vpxor xmm5, xmm6, xmm4 ; 0AE7 _ C5 C9: EF. EC + vpor xmm2, xmm1, xmm2 ; 0AEB _ C5 F1: EB. D2 + vpslld xmm6, xmm3, 29 ; 0AEF _ C5 C9: 72. F3, 1D + vpxor xmm1, xmm2, xmm7 ; 0AF4 _ C5 E9: EF. CF + vpslld xmm2, xmm7, 19 ; 0AF8 _ C5 E9: 72. F7, 13 + vpxor xmm1, xmm1, xmm3 ; 0AFD _ C5 F1: EF. CB + vpsrld xmm3, xmm3, 3 ; 0B01 _ C5 E1: 72. D3, 03 + vpsrld xmm7, xmm7, 13 ; 0B06 _ C5 C1: 72. D7, 0D + vpor xmm4, xmm6, xmm3 ; 0B0B _ C5 C9: EB. E3 + vpor xmm2, xmm2, xmm7 ; 0B0F _ C5 E9: EB. D7 + vpxor xmm7, xmm5, xmm1 ; 0B13 _ C5 D1: EF. F9 + vpxor xmm2, xmm2, xmm4 ; 0B17 _ C5 E9: EF. D4 + vpand xmm3, xmm4, xmm2 ; 0B1B _ C5 D9: DB. DA + vpxor xmm0, xmm3, xmm0 ; 0B1F _ C5 E1: EF. C0 + vpxor xmm3, xmm0, xmm7 ; 0B23 _ C5 F9: EF. DF + vpxor xmm0, xmm4, xmm5 ; 0B27 _ C5 D9: EF. C5 + vpor xmm6, xmm0, xmm2 ; 0B2B _ C5 F9: EB. F2 + vpxor xmm4, xmm2, xmm3 ; 0B2F _ C5 E9: EF. E3 + vpxor xmm0, xmm7, xmm6 ; 0B33 _ C5 C1: EF. C6 + vpxor xmm6, xmm6, xmm1 ; 0B37 _ C5 C9: EF. F1 + vmovd xmm7, dword [ecx+2BE0H] ; 0B3B _ C5 F9: 6E. B9, 00002BE0 + vpand xmm5, xmm1, xmm0 ; 0B43 _ C5 F1: DB. E8 + vpshufd xmm2, xmm7, 0 ; 0B47 _ C5 F9: 70. D7, 00 + vpxor xmm5, xmm5, xmm4 ; 0B4C _ C5 D1: EF. EC + vpxor xmm7, xmm5, xmm2 ; 0B50 _ C5 D1: EF. FA + vpxor xmm4, xmm4, xmm0 ; 0B54 _ C5 D9: EF. E0 + vmovd xmm2, dword [ecx+2BE4H] ; 0B58 _ C5 F9: 6E. 91, 00002BE4 + vpor xmm1, xmm4, xmm3 ; 0B60 _ C5 D9: EB. CB + vpshufd xmm2, xmm2, 0 ; 0B64 _ C5 F9: 70. D2, 00 + vpxor xmm0, xmm0, xmm5 ; 0B69 _ C5 F9: EF. C5 + vpxor xmm2, xmm3, xmm2 ; 0B6D _ C5 E1: EF. D2 + vpxor xmm3, xmm6, xmm1 ; 0B71 _ C5 C9: EF. D9 + vmovd xmm6, dword [ecx+2BE8H] ; 0B75 _ C5 F9: 6E. B1, 00002BE8 + vpslld xmm4, xmm2, 7 ; 0B7D _ C5 D9: 72. F2, 07 + vpshufd xmm1, xmm6, 0 ; 0B82 _ C5 F9: 70. CE, 00 + vpxor xmm1, xmm3, xmm1 ; 0B87 _ C5 E1: EF. C9 + vmovd xmm3, dword [ecx+2BECH] ; 0B8B _ C5 F9: 6E. 99, 00002BEC + vpslld xmm5, xmm1, 10 ; 0B93 _ C5 D1: 72. F1, 0A + vpshufd xmm6, xmm3, 0 ; 0B98 _ C5 F9: 70. F3, 00 + vpsrld xmm3, xmm1, 22 ; 0B9D _ C5 E1: 72. D1, 16 + vpxor xmm0, xmm0, xmm6 ; 0BA2 _ C5 F9: EF. C6 + vpor xmm1, xmm5, xmm3 ; 0BA6 _ C5 D1: EB. CB + vpslld xmm5, xmm7, 27 ; 0BAA _ C5 D1: 72. F7, 1B + vpsrld xmm7, xmm7, 5 ; 0BAF _ C5 C1: 72. D7, 05 + vpxor xmm6, xmm1, xmm0 ; 0BB4 _ C5 F1: EF. F0 + vpor xmm3, xmm5, xmm7 ; 0BB8 _ C5 D1: EB. DF + vpxor xmm1, xmm6, xmm4 ; 0BBC _ C5 C9: EF. CC + vpxor xmm6, xmm3, xmm2 ; 0BC0 _ C5 E1: EF. F2 + vpxor xmm3, xmm6, xmm0 ; 0BC4 _ C5 C9: EF. D8 + vpslld xmm4, xmm0, 25 ; 0BC8 _ C5 D9: 72. F0, 19 + vpsrld xmm0, xmm0, 7 ; 0BCD _ C5 F9: 72. D0, 07 + vpslld xmm6, xmm3, 19 ; 0BD2 _ C5 C9: 72. F3, 13 + vpor xmm5, xmm4, xmm0 ; 0BD7 _ C5 D9: EB. E8 + vpslld xmm0, xmm3, 3 ; 0BDB _ C5 F9: 72. F3, 03 + vpxor xmm7, xmm5, xmm1 ; 0BE0 _ C5 D1: EF. F9 + vpslld xmm5, xmm2, 31 ; 0BE4 _ C5 D1: 72. F2, 1F + vpsrld xmm2, xmm2, 1 ; 0BE9 _ C5 E9: 72. D2, 01 + vpsrld xmm4, xmm3, 13 ; 0BEE _ C5 D9: 72. D3, 0D + vpor xmm2, xmm5, xmm2 ; 0BF3 _ C5 D1: EB. D2 + vpxor xmm0, xmm7, xmm0 ; 0BF7 _ C5 C1: EF. C0 + vpxor xmm3, xmm2, xmm3 ; 0BFB _ C5 E9: EF. DB + vpor xmm6, xmm6, xmm4 ; 0BFF _ C5 C9: EB. F4 + vpxor xmm4, xmm3, xmm1 ; 0C03 _ C5 E1: EF. E1 + vpcmpeqd xmm5, xmm5, xmm5 ; 0C07 _ C5 D1: 76. ED + vpslld xmm7, xmm1, 29 ; 0C0B _ C5 C1: 72. F1, 1D + vpsrld xmm1, xmm1, 3 ; 0C10 _ C5 F1: 72. D1, 03 + vpxor xmm2, xmm4, xmm5 ; 0C15 _ C5 D9: EF. D5 + vpor xmm3, xmm7, xmm1 ; 0C19 _ C5 C1: EB. D9 + vpxor xmm4, xmm3, xmm2 ; 0C1D _ C5 E1: EF. E2 + vpor xmm1, xmm0, xmm6 ; 0C21 _ C5 F9: EB. CE + vpxor xmm3, xmm1, xmm4 ; 0C25 _ C5 F1: EF. DC + vpxor xmm1, xmm0, xmm3 ; 0C29 _ C5 F9: EF. CB + vpor xmm0, xmm4, xmm2 ; 0C2D _ C5 D9: EB. C2 + vpand xmm7, xmm0, xmm6 ; 0C31 _ C5 F9: DB. FE + vpxor xmm4, xmm7, xmm1 ; 0C35 _ C5 C1: EF. E1 + vpor xmm1, xmm1, xmm6 ; 0C39 _ C5 F1: EB. CE + vpand xmm0, xmm2, xmm4 ; 0C3D _ C5 E9: DB. C4 + vpxor xmm2, xmm1, xmm2 ; 0C41 _ C5 F1: EF. D2 + vpxor xmm7, xmm0, xmm3 ; 0C45 _ C5 F9: EF. FB + vpxor xmm1, xmm2, xmm4 ; 0C49 _ C5 E9: EF. CC + vmovd xmm0, dword [ecx+2BD0H] ; 0C4D _ C5 F9: 6E. 81, 00002BD0 + vpxor xmm2, xmm1, xmm7 ; 0C55 _ C5 F1: EF. D7 + vpand xmm3, xmm3, xmm1 ; 0C59 _ C5 E1: DB. D9 + vpxor xmm5, xmm2, xmm5 ; 0C5D _ C5 E9: EF. ED + vpshufd xmm0, xmm0, 0 ; 0C61 _ C5 F9: 70. C0, 00 + vpxor xmm2, xmm3, xmm2 ; 0C66 _ C5 E1: EF. D2 + vpxor xmm0, xmm7, xmm0 ; 0C6A _ C5 C1: EF. C0 + vpxor xmm2, xmm2, xmm6 ; 0C6E _ C5 E9: EF. D6 + vmovd xmm7, dword [ecx+2BD4H] ; 0C72 _ C5 F9: 6E. B9, 00002BD4 + vmovd xmm6, dword [ecx+2BD8H] ; 0C7A _ C5 F9: 6E. B1, 00002BD8 + vpshufd xmm7, xmm7, 0 ; 0C82 _ C5 F9: 70. FF, 00 + vpshufd xmm3, xmm6, 0 ; 0C87 _ C5 F9: 70. DE, 00 + vpxor xmm5, xmm5, xmm7 ; 0C8C _ C5 D1: EF. EF + vpxor xmm7, xmm2, xmm3 ; 0C90 _ C5 E9: EF. FB + vmovd xmm1, dword [ecx+2BDCH] ; 0C94 _ C5 F9: 6E. 89, 00002BDC + vpslld xmm2, xmm7, 10 ; 0C9C _ C5 E9: 72. F7, 0A + vpsrld xmm3, xmm7, 22 ; 0CA1 _ C5 E1: 72. D7, 16 + vpslld xmm7, xmm5, 7 ; 0CA6 _ C5 C1: 72. F5, 07 + vpshufd xmm6, xmm1, 0 ; 0CAB _ C5 F9: 70. F1, 00 + vpor xmm1, xmm2, xmm3 ; 0CB0 _ C5 E9: EB. CB + vpslld xmm2, xmm0, 27 ; 0CB4 _ C5 E9: 72. F0, 1B + vpsrld xmm0, xmm0, 5 ; 0CB9 _ C5 F9: 72. D0, 05 + vpor xmm2, xmm2, xmm0 ; 0CBE _ C5 E9: EB. D0 + vpxor xmm4, xmm4, xmm6 ; 0CC2 _ C5 D9: EF. E6 + vpxor xmm3, xmm2, xmm5 ; 0CC6 _ C5 E9: EF. DD + vpxor xmm6, xmm1, xmm4 ; 0CCA _ C5 F1: EF. F4 + vpxor xmm1, xmm3, xmm4 ; 0CCE _ C5 E1: EF. CC + vpslld xmm0, xmm4, 25 ; 0CD2 _ C5 F9: 72. F4, 19 + vpsrld xmm4, xmm4, 7 ; 0CD7 _ C5 D9: 72. D4, 07 + vpxor xmm7, xmm6, xmm7 ; 0CDC _ C5 C9: EF. FF + vpor xmm2, xmm0, xmm4 ; 0CE0 _ C5 F9: EB. D4 + vpslld xmm0, xmm1, 3 ; 0CE4 _ C5 F9: 72. F1, 03 + vpxor xmm3, xmm2, xmm7 ; 0CE9 _ C5 E9: EF. DF + vpslld xmm6, xmm5, 31 ; 0CED _ C5 C9: 72. F5, 1F + vpsrld xmm5, xmm5, 1 ; 0CF2 _ C5 D1: 72. D5, 01 + vpxor xmm4, xmm3, xmm0 ; 0CF7 _ C5 E1: EF. E0 + vpor xmm2, xmm6, xmm5 ; 0CFB _ C5 C9: EB. D5 + vpslld xmm0, xmm7, 29 ; 0CFF _ C5 F9: 72. F7, 1D + vpsrld xmm5, xmm7, 3 ; 0D04 _ C5 D1: 72. D7, 03 + vpxor xmm3, xmm2, xmm1 ; 0D09 _ C5 E9: EF. D9 + vpor xmm0, xmm0, xmm5 ; 0D0D _ C5 F9: EB. C5 + vpxor xmm6, xmm3, xmm7 ; 0D11 _ C5 E1: EF. F7 + vpslld xmm7, xmm1, 19 ; 0D15 _ C5 C1: 72. F1, 13 + vpsrld xmm1, xmm1, 13 ; 0D1A _ C5 F1: 72. D1, 0D + vpand xmm3, xmm0, xmm4 ; 0D1F _ C5 F9: DB. DC + vpor xmm2, xmm7, xmm1 ; 0D23 _ C5 C1: EB. D1 + vpxor xmm1, xmm3, xmm6 ; 0D27 _ C5 E1: EF. CE + vpor xmm6, xmm6, xmm4 ; 0D2B _ C5 C9: EB. F4 + vpand xmm3, xmm6, xmm2 ; 0D2F _ C5 C9: DB. DA + vpxor xmm0, xmm0, xmm1 ; 0D33 _ C5 F9: EF. C1 + vpxor xmm5, xmm0, xmm3 ; 0D37 _ C5 F9: EF. EB + vpcmpeqd xmm7, xmm7, xmm7 ; 0D3B _ C5 C1: 76. FF + vpxor xmm0, xmm4, xmm5 ; 0D3F _ C5 D9: EF. C5 + vpand xmm4, xmm3, xmm1 ; 0D43 _ C5 E1: DB. E1 + vpxor xmm2, xmm2, xmm7 ; 0D47 _ C5 E9: EF. D7 + vpxor xmm6, xmm4, xmm0 ; 0D4B _ C5 D9: EF. F0 + vpxor xmm3, xmm2, xmm6 ; 0D4F _ C5 E9: EF. DE + vpand xmm2, xmm0, xmm2 ; 0D53 _ C5 F9: DB. D2 + vpxor xmm4, xmm2, xmm1 ; 0D57 _ C5 E9: EF. E1 + vpand xmm1, xmm1, xmm3 ; 0D5B _ C5 F1: DB. CB + vmovd xmm2, dword [ecx+2BC0H] ; 0D5F _ C5 F9: 6E. 91, 00002BC0 + vpxor xmm7, xmm4, xmm3 ; 0D67 _ C5 D9: EF. FB + vpshufd xmm0, xmm2, 0 ; 0D6B _ C5 F9: 70. C2, 00 + vpxor xmm2, xmm7, xmm3 ; 0D70 _ C5 C1: EF. D3 + vpxor xmm4, xmm3, xmm0 ; 0D74 _ C5 E1: EF. E0 + vmovd xmm0, dword [ecx+2BC4H] ; 0D78 _ C5 F9: 6E. 81, 00002BC4 + vpshufd xmm0, xmm0, 0 ; 0D80 _ C5 F9: 70. C0, 00 + vpxor xmm0, xmm2, xmm0 ; 0D85 _ C5 E9: EF. C0 + vpxor xmm2, xmm1, xmm5 ; 0D89 _ C5 F1: EF. D5 + vmovd xmm1, dword [ecx+2BC8H] ; 0D8D _ C5 F9: 6E. 89, 00002BC8 + vpor xmm3, xmm2, xmm7 ; 0D95 _ C5 E9: EB. DF + vmovd xmm2, dword [ecx+2BCCH] ; 0D99 _ C5 F9: 6E. 91, 00002BCC + vpxor xmm6, xmm3, xmm6 ; 0DA1 _ C5 E1: EF. F6 + vpshufd xmm7, xmm1, 0 ; 0DA5 _ C5 F9: 70. F9, 00 + vpshufd xmm3, xmm2, 0 ; 0DAA _ C5 F9: 70. DA, 00 + vpxor xmm1, xmm6, xmm7 ; 0DAF _ C5 C9: EF. CF + vpxor xmm6, xmm5, xmm3 ; 0DB3 _ C5 D1: EF. F3 + vpslld xmm5, xmm1, 10 ; 0DB7 _ C5 D1: 72. F1, 0A + vpsrld xmm2, xmm1, 22 ; 0DBC _ C5 E9: 72. D1, 16 + vpslld xmm7, xmm4, 27 ; 0DC1 _ C5 C1: 72. F4, 1B + vpor xmm3, xmm5, xmm2 ; 0DC6 _ C5 D1: EB. DA + vpsrld xmm4, xmm4, 5 ; 0DCA _ C5 D9: 72. D4, 05 + vpxor xmm1, xmm3, xmm6 ; 0DCF _ C5 E1: EF. CE + vpslld xmm5, xmm0, 7 ; 0DD3 _ C5 D1: 72. F0, 07 + vpor xmm3, xmm7, xmm4 ; 0DD8 _ C5 C1: EB. DC + vpxor xmm2, xmm1, xmm5 ; 0DDC _ C5 F1: EF. D5 + vpxor xmm1, xmm3, xmm0 ; 0DE0 _ C5 E1: EF. C8 + vpslld xmm4, xmm6, 25 ; 0DE4 _ C5 D9: 72. F6, 19 + vpxor xmm3, xmm1, xmm6 ; 0DE9 _ C5 F1: EF. DE + vpsrld xmm6, xmm6, 7 ; 0DED _ C5 C9: 72. D6, 07 + vpor xmm1, xmm4, xmm6 ; 0DF2 _ C5 D9: EB. CE + vpslld xmm6, xmm0, 31 ; 0DF6 _ C5 C9: 72. F0, 1F + vpsrld xmm0, xmm0, 1 ; 0DFB _ C5 F9: 72. D0, 01 + vpxor xmm4, xmm1, xmm2 ; 0E00 _ C5 F1: EF. E2 + vpslld xmm5, xmm3, 3 ; 0E04 _ C5 D1: 72. F3, 03 + vpor xmm0, xmm6, xmm0 ; 0E09 _ C5 C9: EB. C0 + vpxor xmm1, xmm4, xmm5 ; 0E0D _ C5 D9: EF. CD + vpxor xmm4, xmm0, xmm3 ; 0E11 _ C5 F9: EF. E3 + vpxor xmm0, xmm4, xmm2 ; 0E15 _ C5 D9: EF. C2 + vpslld xmm4, xmm2, 29 ; 0E19 _ C5 D9: 72. F2, 1D + vpsrld xmm5, xmm2, 3 ; 0E1E _ C5 D1: 72. D2, 03 + vpslld xmm2, xmm3, 19 ; 0E23 _ C5 E9: 72. F3, 13 + vpor xmm6, xmm4, xmm5 ; 0E28 _ C5 D9: EB. F5 + vpsrld xmm3, xmm3, 13 ; 0E2C _ C5 E1: 72. D3, 0D + vpxor xmm4, xmm6, xmm0 ; 0E31 _ C5 C9: EF. E0 + vpor xmm7, xmm2, xmm3 ; 0E35 _ C5 E9: EB. FB + vpxor xmm2, xmm7, xmm4 ; 0E39 _ C5 C1: EF. D4 + vpand xmm6, xmm6, xmm4 ; 0E3D _ C5 C9: DB. F4 + vpxor xmm6, xmm6, xmm2 ; 0E41 _ C5 C9: EF. F2 + vpand xmm5, xmm2, xmm0 ; 0E45 _ C5 E9: DB. E8 + vpor xmm7, xmm1, xmm6 ; 0E49 _ C5 F1: EB. FE + vpxor xmm0, xmm0, xmm1 ; 0E4D _ C5 F9: EF. C1 + vpxor xmm2, xmm5, xmm7 ; 0E51 _ C5 D1: EF. D7 + vpxor xmm5, xmm0, xmm6 ; 0E55 _ C5 F9: EF. EE + vpxor xmm4, xmm4, xmm7 ; 0E59 _ C5 D9: EF. E7 + vpxor xmm3, xmm5, xmm2 ; 0E5D _ C5 D1: EF. DA + vpand xmm0, xmm7, xmm4 ; 0E61 _ C5 C1: DB. C4 + vpor xmm7, xmm3, xmm4 ; 0E65 _ C5 E1: EB. FC + vpxor xmm3, xmm7, xmm6 ; 0E69 _ C5 C1: EF. DE + vpxor xmm1, xmm0, xmm5 ; 0E6D _ C5 F9: EF. CD + vmovd xmm6, dword [ecx+2BB0H] ; 0E71 _ C5 F9: 6E. B1, 00002BB0 + vpshufd xmm6, xmm6, 0 ; 0E79 _ C5 F9: 70. F6, 00 + vpxor xmm5, xmm4, xmm6 ; 0E7E _ C5 D9: EF. EE + vmovd xmm4, dword [ecx+2BB4H] ; 0E82 _ C5 F9: 6E. A1, 00002BB4 + vpshufd xmm6, xmm4, 0 ; 0E8A _ C5 F9: 70. F4, 00 + vmovd xmm4, dword [ecx+2BB8H] ; 0E8F _ C5 F9: 6E. A1, 00002BB8 + vpxor xmm0, xmm3, xmm6 ; 0E97 _ C5 E1: EF. C6 + vpshufd xmm7, xmm4, 0 ; 0E9B _ C5 F9: 70. FC, 00 + vmovd xmm6, dword [ecx+2BBCH] ; 0EA0 _ C5 F9: 6E. B1, 00002BBC + vpxor xmm4, xmm1, xmm7 ; 0EA8 _ C5 F1: EF. E7 + vpxor xmm1, xmm2, xmm1 ; 0EAC _ C5 E9: EF. C9 + vpxor xmm2, xmm1, xmm3 ; 0EB0 _ C5 F1: EF. D3 + vpslld xmm1, xmm4, 10 ; 0EB4 _ C5 F1: 72. F4, 0A + vpshufd xmm3, xmm6, 0 ; 0EB9 _ C5 F9: 70. DE, 00 + vpsrld xmm4, xmm4, 22 ; 0EBE _ C5 D9: 72. D4, 16 + vpxor xmm6, xmm2, xmm3 ; 0EC3 _ C5 E9: EF. F3 + vpor xmm7, xmm1, xmm4 ; 0EC7 _ C5 F1: EB. FC + vpxor xmm4, xmm7, xmm6 ; 0ECB _ C5 C1: EF. E6 + vpslld xmm2, xmm0, 7 ; 0ECF _ C5 E9: 72. F0, 07 + vpslld xmm3, xmm5, 27 ; 0ED4 _ C5 E1: 72. F5, 1B + vpsrld xmm5, xmm5, 5 ; 0ED9 _ C5 D1: 72. D5, 05 + vpxor xmm7, xmm4, xmm2 ; 0EDE _ C5 D9: EF. FA + vpor xmm4, xmm3, xmm5 ; 0EE2 _ C5 E1: EB. E5 + vpxor xmm5, xmm4, xmm0 ; 0EE6 _ C5 D9: EF. E8 + vpslld xmm1, xmm6, 25 ; 0EEA _ C5 F1: 72. F6, 19 + vpxor xmm5, xmm5, xmm6 ; 0EEF _ C5 D1: EF. EE + vpsrld xmm6, xmm6, 7 ; 0EF3 _ C5 C9: 72. D6, 07 + vpor xmm6, xmm1, xmm6 ; 0EF8 _ C5 F1: EB. F6 + vpslld xmm3, xmm0, 31 ; 0EFC _ C5 E1: 72. F0, 1F + vpsrld xmm0, xmm0, 1 ; 0F01 _ C5 F9: 72. D0, 01 + vpxor xmm4, xmm6, xmm7 ; 0F06 _ C5 C9: EF. E7 + vpslld xmm2, xmm5, 3 ; 0F0A _ C5 E9: 72. F5, 03 + vpor xmm6, xmm3, xmm0 ; 0F0F _ C5 E1: EB. F0 + vpxor xmm1, xmm4, xmm2 ; 0F13 _ C5 D9: EF. CA + vpxor xmm4, xmm6, xmm5 ; 0F17 _ C5 C9: EF. E5 + vpslld xmm0, xmm5, 19 ; 0F1B _ C5 F9: 72. F5, 13 + vpsrld xmm5, xmm5, 13 ; 0F20 _ C5 D1: 72. D5, 0D + vpslld xmm2, xmm7, 29 ; 0F25 _ C5 E9: 72. F7, 1D + vpsrld xmm3, xmm7, 3 ; 0F2A _ C5 E1: 72. D7, 03 + vpxor xmm6, xmm4, xmm7 ; 0F2F _ C5 D9: EF. F7 + vpor xmm5, xmm0, xmm5 ; 0F33 _ C5 F9: EB. ED + vpor xmm7, xmm2, xmm3 ; 0F37 _ C5 E9: EB. FB + vpxor xmm0, xmm1, xmm5 ; 0F3B _ C5 F1: EF. C5 + vpxor xmm2, xmm7, xmm1 ; 0F3F _ C5 C1: EF. D1 + vpand xmm4, xmm0, xmm2 ; 0F43 _ C5 F9: DB. E2 + vpxor xmm4, xmm4, xmm6 ; 0F47 _ C5 D9: EF. E6 + vpor xmm6, xmm6, xmm2 ; 0F4B _ C5 C9: EB. F2 + vpxor xmm3, xmm2, xmm4 ; 0F4F _ C5 E9: EF. DC + vpand xmm1, xmm0, xmm4 ; 0F53 _ C5 F9: DB. CC + vpcmpeqd xmm2, xmm2, xmm2 ; 0F57 _ C5 E9: 76. D2 + vpxor xmm6, xmm6, xmm0 ; 0F5B _ C5 C9: EF. F0 + vpand xmm0, xmm1, xmm5 ; 0F5F _ C5 F1: DB. C5 + vpxor xmm7, xmm4, xmm2 ; 0F63 _ C5 D9: EF. FA + vmovd xmm4, dword [ecx+2BA0H] ; 0F67 _ C5 F9: 6E. A1, 00002BA0 + vpxor xmm1, xmm0, xmm3 ; 0F6F _ C5 F9: EF. CB + vpshufd xmm0, xmm4, 0 ; 0F73 _ C5 F9: 70. C4, 00 + vpand xmm3, xmm3, xmm6 ; 0F78 _ C5 E1: DB. DE + vmovd xmm4, dword [ecx+2BA4H] ; 0F7C _ C5 F9: 6E. A1, 00002BA4 + vpxor xmm2, xmm6, xmm0 ; 0F84 _ C5 C9: EF. D0 + vpshufd xmm0, xmm4, 0 ; 0F88 _ C5 F9: 70. C4, 00 + vpor xmm4, xmm3, xmm5 ; 0F8D _ C5 E1: EB. E5 + vmovd xmm3, dword [ecx+2BA8H] ; 0F91 _ C5 F9: 6E. 99, 00002BA8 + vpxor xmm4, xmm4, xmm7 ; 0F99 _ C5 D9: EF. E7 + vpshufd xmm3, xmm3, 0 ; 0F9D _ C5 F9: 70. DB, 00 + vpxor xmm5, xmm5, xmm7 ; 0FA2 _ C5 D1: EF. EF + vpxor xmm4, xmm4, xmm3 ; 0FA6 _ C5 D9: EF. E3 + vpxor xmm3, xmm7, xmm1 ; 0FAA _ C5 C1: EF. D9 + vpand xmm7, xmm5, xmm6 ; 0FAE _ C5 D1: DB. FE + vpxor xmm0, xmm1, xmm0 ; 0FB2 _ C5 F1: EF. C0 + vmovd xmm6, dword [ecx+2BACH] ; 0FB6 _ C5 F9: 6E. B1, 00002BAC + vpxor xmm5, xmm3, xmm7 ; 0FBE _ C5 E1: EF. EF + vpshufd xmm3, xmm6, 0 ; 0FC2 _ C5 F9: 70. DE, 00 + vpslld xmm1, xmm4, 10 ; 0FC7 _ C5 F1: 72. F4, 0A + vpsrld xmm4, xmm4, 22 ; 0FCC _ C5 D9: 72. D4, 16 + vpxor xmm6, xmm5, xmm3 ; 0FD1 _ C5 D1: EF. F3 + vpor xmm7, xmm1, xmm4 ; 0FD5 _ C5 F1: EB. FC + vpslld xmm5, xmm0, 7 ; 0FD9 _ C5 D1: 72. F0, 07 + vpxor xmm4, xmm7, xmm6 ; 0FDE _ C5 C1: EF. E6 + vpslld xmm3, xmm2, 27 ; 0FE2 _ C5 E1: 72. F2, 1B + vpsrld xmm2, xmm2, 5 ; 0FE7 _ C5 E9: 72. D2, 05 + vpxor xmm4, xmm4, xmm5 ; 0FEC _ C5 D9: EF. E5 + vpor xmm5, xmm3, xmm2 ; 0FF0 _ C5 E1: EB. EA + vpslld xmm3, xmm6, 25 ; 0FF4 _ C5 E1: 72. F6, 19 + vpxor xmm2, xmm5, xmm0 ; 0FF9 _ C5 D1: EF. D0 + vpxor xmm1, xmm2, xmm6 ; 0FFD _ C5 E9: EF. CE + vpsrld xmm6, xmm6, 7 ; 1001 _ C5 C9: 72. D6, 07 + vpor xmm7, xmm3, xmm6 ; 1006 _ C5 E1: EB. FE + vpslld xmm5, xmm1, 3 ; 100A _ C5 D1: 72. F1, 03 + vpxor xmm6, xmm7, xmm4 ; 100F _ C5 C1: EF. F4 + vpslld xmm2, xmm0, 31 ; 1013 _ C5 E9: 72. F0, 1F + vpsrld xmm0, xmm0, 1 ; 1018 _ C5 F9: 72. D0, 01 + vpxor xmm3, xmm6, xmm5 ; 101D _ C5 C9: EF. DD + vpor xmm6, xmm2, xmm0 ; 1021 _ C5 E9: EB. F0 + vpslld xmm5, xmm4, 29 ; 1025 _ C5 D1: 72. F4, 1D + vpxor xmm0, xmm6, xmm1 ; 102A _ C5 C9: EF. C1 + vpslld xmm2, xmm1, 19 ; 102E _ C5 E9: 72. F1, 13 + vpxor xmm6, xmm0, xmm4 ; 1033 _ C5 F9: EF. F4 + vpsrld xmm4, xmm4, 3 ; 1037 _ C5 D9: 72. D4, 03 + vpsrld xmm1, xmm1, 13 ; 103C _ C5 F1: 72. D1, 0D + vpor xmm7, xmm5, xmm4 ; 1041 _ C5 D1: EB. FC + vpor xmm0, xmm2, xmm1 ; 1045 _ C5 E9: EB. C1 + vpxor xmm1, xmm6, xmm3 ; 1049 _ C5 C9: EF. CB + vpxor xmm5, xmm6, xmm7 ; 104D _ C5 C9: EF. EF + vpand xmm6, xmm3, xmm1 ; 1051 _ C5 E1: DB. F1 + vpxor xmm4, xmm6, xmm0 ; 1055 _ C5 C9: EF. E0 + vpor xmm0, xmm0, xmm1 ; 1059 _ C5 F9: EB. C1 + vpxor xmm7, xmm7, xmm4 ; 105D _ C5 C1: EF. FC + vpxor xmm2, xmm0, xmm5 ; 1061 _ C5 F9: EF. D5 + vpxor xmm6, xmm1, xmm4 ; 1065 _ C5 F1: EF. F4 + vpor xmm3, xmm2, xmm7 ; 1069 _ C5 E9: EB. DF + vpxor xmm2, xmm3, xmm6 ; 106D _ C5 E1: EF. D6 + vpor xmm1, xmm6, xmm4 ; 1071 _ C5 C9: EB. CC + vpcmpeqd xmm0, xmm0, xmm0 ; 1075 _ C5 F9: 76. C0 + vpxor xmm6, xmm1, xmm2 ; 1079 _ C5 F1: EF. F2 + vpxor xmm5, xmm5, xmm0 ; 107D _ C5 D1: EF. E8 + vpxor xmm3, xmm5, xmm6 ; 1081 _ C5 D1: EF. DE + vpor xmm6, xmm6, xmm2 ; 1085 _ C5 C9: EB. F2 + vmovd xmm5, dword [ecx+2B90H] ; 1089 _ C5 F9: 6E. A9, 00002B90 + vpshufd xmm1, xmm5, 0 ; 1091 _ C5 F9: 70. CD, 00 + vpxor xmm5, xmm3, xmm1 ; 1096 _ C5 E1: EF. E9 + vmovd xmm1, dword [ecx+2B94H] ; 109A _ C5 F9: 6E. 89, 00002B94 + vpshufd xmm1, xmm1, 0 ; 10A2 _ C5 F9: 70. C9, 00 + vpxor xmm1, xmm2, xmm1 ; 10A7 _ C5 E9: EF. C9 + vpxor xmm2, xmm6, xmm2 ; 10AB _ C5 C9: EF. D2 + vpor xmm3, xmm2, xmm3 ; 10AF _ C5 E9: EB. DB + vpxor xmm6, xmm4, xmm3 ; 10B3 _ C5 D9: EF. F3 + vmovd xmm4, dword [ecx+2B98H] ; 10B7 _ C5 F9: 6E. A1, 00002B98 + vmovd xmm2, dword [ecx+2B9CH] ; 10BF _ C5 F9: 6E. 91, 00002B9C + vpshufd xmm4, xmm4, 0 ; 10C7 _ C5 F9: 70. E4, 00 + vpxor xmm3, xmm6, xmm4 ; 10CC _ C5 C9: EF. DC + vpshufd xmm6, xmm2, 0 ; 10D0 _ C5 F9: 70. F2, 00 + vpxor xmm4, xmm7, xmm6 ; 10D5 _ C5 C1: EF. E6 + vpslld xmm7, xmm3, 10 ; 10D9 _ C5 C1: 72. F3, 0A + vpsrld xmm6, xmm3, 22 ; 10DE _ C5 C9: 72. D3, 16 + vpor xmm2, xmm7, xmm6 ; 10E3 _ C5 C1: EB. D6 + vpslld xmm6, xmm5, 27 ; 10E7 _ C5 C9: 72. F5, 1B + vpsrld xmm5, xmm5, 5 ; 10EC _ C5 D1: 72. D5, 05 + vpxor xmm3, xmm2, xmm4 ; 10F1 _ C5 E9: EF. DC + vpor xmm6, xmm6, xmm5 ; 10F5 _ C5 C9: EB. F5 + vpslld xmm7, xmm1, 7 ; 10F9 _ C5 C1: 72. F1, 07 + vpxor xmm5, xmm6, xmm1 ; 10FE _ C5 C9: EF. E9 + vpxor xmm7, xmm3, xmm7 ; 1102 _ C5 E1: EF. FF + vpxor xmm2, xmm5, xmm4 ; 1106 _ C5 D1: EF. D4 + vpslld xmm3, xmm4, 25 ; 110A _ C5 E1: 72. F4, 19 + vpsrld xmm4, xmm4, 7 ; 110F _ C5 D9: 72. D4, 07 + vpslld xmm5, xmm2, 3 ; 1114 _ C5 D1: 72. F2, 03 + vpor xmm6, xmm3, xmm4 ; 1119 _ C5 E1: EB. F4 + vpslld xmm3, xmm1, 31 ; 111D _ C5 E1: 72. F1, 1F + vpsrld xmm1, xmm1, 1 ; 1122 _ C5 F1: 72. D1, 01 + vpxor xmm4, xmm6, xmm7 ; 1127 _ C5 C9: EF. E7 + vpor xmm6, xmm3, xmm1 ; 112B _ C5 E1: EB. F1 + vpxor xmm4, xmm4, xmm5 ; 112F _ C5 D9: EF. E5 + vpxor xmm5, xmm6, xmm2 ; 1133 _ C5 C9: EF. EA + vpslld xmm3, xmm2, 19 ; 1137 _ C5 E1: 72. F2, 13 + vpxor xmm5, xmm5, xmm7 ; 113C _ C5 D1: EF. EF + vpsrld xmm2, xmm2, 13 ; 1140 _ C5 E9: 72. D2, 0D + vpslld xmm1, xmm7, 29 ; 1145 _ C5 F1: 72. F7, 1D + vpsrld xmm7, xmm7, 3 ; 114A _ C5 C1: 72. D7, 03 + vpor xmm6, xmm3, xmm2 ; 114F _ C5 E1: EB. F2 + vpor xmm2, xmm1, xmm7 ; 1153 _ C5 F1: EB. D7 + vpxor xmm3, xmm2, xmm0 ; 1157 _ C5 E9: EF. D8 + vpxor xmm1, xmm5, xmm0 ; 115B _ C5 D1: EF. C8 + vpor xmm5, xmm5, xmm6 ; 115F _ C5 D1: EB. EE + vpxor xmm7, xmm5, xmm3 ; 1163 _ C5 D1: EF. FB + vpxor xmm5, xmm6, xmm1 ; 1167 _ C5 C9: EF. E9 + vpxor xmm2, xmm7, xmm4 ; 116B _ C5 C1: EF. D4 + vpor xmm6, xmm3, xmm1 ; 116F _ C5 E1: EB. F1 + vpand xmm3, xmm5, xmm4 ; 1173 _ C5 D1: DB. DC + vpxor xmm6, xmm6, xmm5 ; 1177 _ C5 C9: EF. F5 + vpxor xmm7, xmm1, xmm3 ; 117B _ C5 F1: EF. FB + vpor xmm1, xmm3, xmm2 ; 117F _ C5 E1: EB. CA + vpxor xmm5, xmm1, xmm6 ; 1183 _ C5 F1: EF. EE + vpxor xmm4, xmm4, xmm7 ; 1187 _ C5 D9: EF. E7 + vmovd xmm1, dword [ecx+2B80H] ; 118B _ C5 F9: 6E. 89, 00002B80 + vpxor xmm4, xmm4, xmm5 ; 1193 _ C5 D9: EF. E5 + vpxor xmm3, xmm4, xmm2 ; 1197 _ C5 D9: EF. DA + vpxor xmm6, xmm6, xmm2 ; 119B _ C5 C9: EF. F2 + vpshufd xmm4, xmm1, 0 ; 119F _ C5 F9: 70. E1, 00 + vpxor xmm1, xmm5, xmm4 ; 11A4 _ C5 D1: EF. CC + vpand xmm5, xmm6, xmm3 ; 11A8 _ C5 C9: DB. EB + vpxor xmm6, xmm7, xmm5 ; 11AC _ C5 C1: EF. F5 + vmovd xmm7, dword [ecx+2B84H] ; 11B0 _ C5 F9: 6E. B9, 00002B84 + vpshufd xmm4, xmm7, 0 ; 11B8 _ C5 F9: 70. E7, 00 + vmovd xmm7, dword [ecx+2B88H] ; 11BD _ C5 F9: 6E. B9, 00002B88 + vpxor xmm5, xmm6, xmm4 ; 11C5 _ C5 C9: EF. EC + vmovd xmm4, dword [ecx+2B8CH] ; 11C9 _ C5 F9: 6E. A1, 00002B8C + vpshufd xmm6, xmm7, 0 ; 11D1 _ C5 F9: 70. F7, 00 + vpshufd xmm7, xmm4, 0 ; 11D6 _ C5 F9: 70. FC, 00 + vpxor xmm2, xmm2, xmm6 ; 11DB _ C5 E9: EF. D6 + vpxor xmm4, xmm3, xmm7 ; 11DF _ C5 E1: EF. E7 + vpslld xmm3, xmm2, 10 ; 11E3 _ C5 E1: 72. F2, 0A + vpsrld xmm2, xmm2, 22 ; 11E8 _ C5 E9: 72. D2, 16 + vpslld xmm7, xmm1, 27 ; 11ED _ C5 C1: 72. F1, 1B + vpor xmm6, xmm3, xmm2 ; 11F2 _ C5 E1: EB. F2 + vpslld xmm3, xmm5, 7 ; 11F6 _ C5 E1: 72. F5, 07 + vpxor xmm2, xmm6, xmm4 ; 11FB _ C5 C9: EF. D4 + vpsrld xmm1, xmm1, 5 ; 11FF _ C5 F1: 72. D1, 05 + vpxor xmm6, xmm2, xmm3 ; 1204 _ C5 E9: EF. F3 + vpor xmm2, xmm7, xmm1 ; 1208 _ C5 C1: EB. D1 + vpxor xmm3, xmm2, xmm5 ; 120C _ C5 E9: EF. DD + vpslld xmm1, xmm4, 25 ; 1210 _ C5 F1: 72. F4, 19 + vpxor xmm7, xmm3, xmm4 ; 1215 _ C5 E1: EF. FC + vpsrld xmm4, xmm4, 7 ; 1219 _ C5 D9: 72. D4, 07 + vpor xmm4, xmm1, xmm4 ; 121E _ C5 F1: EB. E4 + vpslld xmm1, xmm5, 31 ; 1222 _ C5 F1: 72. F5, 1F + vpsrld xmm5, xmm5, 1 ; 1227 _ C5 D1: 72. D5, 01 + vpxor xmm2, xmm4, xmm6 ; 122C _ C5 D9: EF. D6 + vpslld xmm3, xmm7, 3 ; 1230 _ C5 E1: 72. F7, 03 + vpor xmm5, xmm1, xmm5 ; 1235 _ C5 F1: EB. ED + vpxor xmm4, xmm2, xmm3 ; 1239 _ C5 E9: EF. E3 + vpxor xmm2, xmm5, xmm7 ; 123D _ C5 D1: EF. D7 + vpxor xmm1, xmm2, xmm6 ; 1241 _ C5 E9: EF. CE + vpslld xmm3, xmm6, 29 ; 1245 _ C5 E1: 72. F6, 1D + vpsrld xmm6, xmm6, 3 ; 124A _ C5 C9: 72. D6, 03 + vpor xmm2, xmm3, xmm6 ; 124F _ C5 E1: EB. D6 + vpslld xmm6, xmm7, 19 ; 1253 _ C5 C9: 72. F7, 13 + vpsrld xmm7, xmm7, 13 ; 1258 _ C5 C1: 72. D7, 0D + vpor xmm3, xmm6, xmm7 ; 125D _ C5 C9: EB. DF + vpor xmm6, xmm2, xmm4 ; 1261 _ C5 E9: EB. F4 + vpxor xmm2, xmm2, xmm3 ; 1265 _ C5 E9: EF. D3 + vpand xmm5, xmm3, xmm4 ; 1269 _ C5 E1: DB. EC + vpxor xmm7, xmm2, xmm0 ; 126D _ C5 E9: EF. F8 + vpxor xmm4, xmm4, xmm1 ; 1271 _ C5 D9: EF. E1 + vpxor xmm3, xmm5, xmm7 ; 1275 _ C5 D1: EF. DF + vpand xmm7, xmm7, xmm6 ; 1279 _ C5 C1: DB. FE + vpor xmm1, xmm1, xmm5 ; 127D _ C5 F1: EB. CD + vpand xmm2, xmm4, xmm6 ; 1281 _ C5 D9: DB. D6 + vpxor xmm5, xmm1, xmm7 ; 1285 _ C5 F1: EF. EF + vpxor xmm4, xmm7, xmm3 ; 1289 _ C5 C1: EF. E3 + vpxor xmm1, xmm6, xmm5 ; 128D _ C5 C9: EF. CD + vpor xmm6, xmm3, xmm4 ; 1291 _ C5 E1: EB. F4 + vmovd xmm3, dword [ecx+2B70H] ; 1295 _ C5 F9: 6E. 99, 00002B70 + vpxor xmm7, xmm6, xmm2 ; 129D _ C5 C9: EF. FA + vpxor xmm2, xmm2, xmm1 ; 12A1 _ C5 E9: EF. D1 + vpxor xmm6, xmm2, xmm4 ; 12A5 _ C5 E9: EF. F4 + vpshufd xmm2, xmm3, 0 ; 12A9 _ C5 F9: 70. D3, 00 + vmovd xmm3, dword [ecx+2B74H] ; 12AE _ C5 F9: 6E. 99, 00002B74 + vpxor xmm6, xmm6, xmm2 ; 12B6 _ C5 C9: EF. F2 + vpshufd xmm2, xmm3, 0 ; 12BA _ C5 F9: 70. D3, 00 + vmovd xmm3, dword [ecx+2B78H] ; 12BF _ C5 F9: 6E. 99, 00002B78 + vpxor xmm2, xmm7, xmm2 ; 12C7 _ C5 C1: EF. D2 + vpshufd xmm3, xmm3, 0 ; 12CB _ C5 F9: 70. DB, 00 + vpxor xmm3, xmm5, xmm3 ; 12D0 _ C5 D1: EF. DB + vpor xmm5, xmm1, xmm7 ; 12D4 _ C5 F1: EB. EF + vpxor xmm1, xmm5, xmm4 ; 12D8 _ C5 D1: EF. CC + vpslld xmm5, xmm3, 10 ; 12DC _ C5 D1: 72. F3, 0A + vmovd xmm4, dword [ecx+2B7CH] ; 12E1 _ C5 F9: 6E. A1, 00002B7C + vpsrld xmm3, xmm3, 22 ; 12E9 _ C5 E1: 72. D3, 16 + vpshufd xmm7, xmm4, 0 ; 12EE _ C5 F9: 70. FC, 00 + vpxor xmm4, xmm1, xmm7 ; 12F3 _ C5 F1: EF. E7 + vpor xmm1, xmm5, xmm3 ; 12F7 _ C5 D1: EB. CB + vpslld xmm3, xmm6, 27 ; 12FB _ C5 E1: 72. F6, 1B + vpsrld xmm6, xmm6, 5 ; 1300 _ C5 C9: 72. D6, 05 + vpxor xmm7, xmm1, xmm4 ; 1305 _ C5 F1: EF. FC + vpslld xmm5, xmm2, 7 ; 1309 _ C5 D1: 72. F2, 07 + vpor xmm6, xmm3, xmm6 ; 130E _ C5 E1: EB. F6 + vpxor xmm1, xmm7, xmm5 ; 1312 _ C5 C1: EF. CD + vpxor xmm5, xmm6, xmm2 ; 1316 _ C5 C9: EF. EA + vpslld xmm3, xmm4, 25 ; 131A _ C5 E1: 72. F4, 19 + vpxor xmm5, xmm5, xmm4 ; 131F _ C5 D1: EF. EC + vpsrld xmm4, xmm4, 7 ; 1323 _ C5 D9: 72. D4, 07 + vpor xmm6, xmm3, xmm4 ; 1328 _ C5 E1: EB. F4 + vpslld xmm7, xmm2, 31 ; 132C _ C5 C1: 72. F2, 1F + vpsrld xmm2, xmm2, 1 ; 1331 _ C5 E9: 72. D2, 01 + vpxor xmm4, xmm6, xmm1 ; 1336 _ C5 C9: EF. E1 + vpor xmm6, xmm7, xmm2 ; 133A _ C5 C1: EB. F2 + vpslld xmm3, xmm5, 3 ; 133E _ C5 E1: 72. F5, 03 + vpxor xmm2, xmm6, xmm5 ; 1343 _ C5 C9: EF. D5 + vpxor xmm4, xmm4, xmm3 ; 1347 _ C5 D9: EF. E3 + vpxor xmm3, xmm2, xmm1 ; 134B _ C5 E9: EF. D9 + vpslld xmm7, xmm1, 29 ; 134F _ C5 C1: 72. F1, 1D + vpsrld xmm1, xmm1, 3 ; 1354 _ C5 F1: 72. D1, 03 + vpslld xmm2, xmm5, 19 ; 1359 _ C5 E9: 72. F5, 13 + vpsrld xmm5, xmm5, 13 ; 135E _ C5 D1: 72. D5, 0D + vpor xmm6, xmm7, xmm1 ; 1363 _ C5 C1: EB. F1 + vpor xmm5, xmm2, xmm5 ; 1367 _ C5 E9: EB. ED + vpxor xmm1, xmm4, xmm3 ; 136B _ C5 D9: EF. CB + vpxor xmm7, xmm5, xmm6 ; 136F _ C5 D1: EF. FE + vpand xmm2, xmm6, xmm7 ; 1373 _ C5 C9: DB. D7 + vpxor xmm6, xmm6, xmm4 ; 1377 _ C5 C9: EF. F4 + vpxor xmm0, xmm2, xmm0 ; 137B _ C5 E9: EF. C0 + vpor xmm6, xmm6, xmm7 ; 137F _ C5 C9: EB. F7 + vpxor xmm2, xmm0, xmm1 ; 1383 _ C5 F9: EF. D1 + vpxor xmm4, xmm1, xmm6 ; 1387 _ C5 F1: EF. E6 + vmovd xmm5, dword [ecx+2B60H] ; 138B _ C5 F9: 6E. A9, 00002B60 + vpxor xmm7, xmm7, xmm2 ; 1393 _ C5 C1: EF. FA + vpand xmm0, xmm3, xmm4 ; 1397 _ C5 E1: DB. C4 + vpxor xmm6, xmm6, xmm3 ; 139B _ C5 C9: EF. F3 + vpxor xmm1, xmm0, xmm7 ; 139F _ C5 F9: EF. CF + vpxor xmm3, xmm7, xmm4 ; 13A3 _ C5 C1: EF. DC + vpshufd xmm0, xmm5, 0 ; 13A7 _ C5 F9: 70. C5, 00 + vpxor xmm5, xmm1, xmm0 ; 13AC _ C5 F1: EF. E8 + vmovd xmm0, dword [ecx+2B64H] ; 13B0 _ C5 F9: 6E. 81, 00002B64 + vpshufd xmm0, xmm0, 0 ; 13B8 _ C5 F9: 70. C0, 00 + vpxor xmm0, xmm2, xmm0 ; 13BD _ C5 E9: EF. C0 + vpor xmm2, xmm3, xmm2 ; 13C1 _ C5 E1: EB. D2 + vpxor xmm3, xmm6, xmm2 ; 13C5 _ C5 C9: EF. DA + vmovd xmm6, dword [ecx+2B68H] ; 13C9 _ C5 F9: 6E. B1, 00002B68 + vpshufd xmm7, xmm6, 0 ; 13D1 _ C5 F9: 70. FE, 00 + vpxor xmm6, xmm4, xmm1 ; 13D6 _ C5 D9: EF. F1 + vmovd xmm4, dword [ecx+2B6CH] ; 13DA _ C5 F9: 6E. A1, 00002B6C + vpxor xmm3, xmm3, xmm7 ; 13E2 _ C5 E1: EF. DF + vpshufd xmm4, xmm4, 0 ; 13E6 _ C5 F9: 70. E4, 00 + vpslld xmm2, xmm3, 10 ; 13EB _ C5 E9: 72. F3, 0A + vpsrld xmm3, xmm3, 22 ; 13F0 _ C5 E1: 72. D3, 16 + vpxor xmm6, xmm6, xmm4 ; 13F5 _ C5 C9: EF. F4 + vpor xmm1, xmm2, xmm3 ; 13F9 _ C5 E9: EB. CB + vpslld xmm4, xmm0, 7 ; 13FD _ C5 D9: 72. F0, 07 + vpxor xmm7, xmm1, xmm6 ; 1402 _ C5 F1: EF. FE + vpslld xmm2, xmm5, 27 ; 1406 _ C5 E9: 72. F5, 1B + vpsrld xmm5, xmm5, 5 ; 140B _ C5 D1: 72. D5, 05 + vpxor xmm3, xmm7, xmm4 ; 1410 _ C5 C1: EF. DC + vpor xmm4, xmm2, xmm5 ; 1414 _ C5 E9: EB. E5 + vpslld xmm1, xmm6, 25 ; 1418 _ C5 F1: 72. F6, 19 + vpxor xmm5, xmm4, xmm0 ; 141D _ C5 D9: EF. E8 + vpxor xmm5, xmm5, xmm6 ; 1421 _ C5 D1: EF. EE + vpsrld xmm6, xmm6, 7 ; 1425 _ C5 C9: 72. D6, 07 + vpor xmm7, xmm1, xmm6 ; 142A _ C5 F1: EB. FE + vpslld xmm4, xmm5, 3 ; 142E _ C5 D9: 72. F5, 03 + vpxor xmm6, xmm7, xmm3 ; 1433 _ C5 C1: EF. F3 + vpslld xmm2, xmm5, 19 ; 1437 _ C5 E9: 72. F5, 13 + vpxor xmm1, xmm6, xmm4 ; 143C _ C5 C9: EF. CC + vpslld xmm4, xmm0, 31 ; 1440 _ C5 D9: 72. F0, 1F + vpsrld xmm0, xmm0, 1 ; 1445 _ C5 F9: 72. D0, 01 + vpsrld xmm7, xmm5, 13 ; 144A _ C5 C1: 72. D5, 0D + vpor xmm4, xmm4, xmm0 ; 144F _ C5 D9: EB. E0 + vpor xmm6, xmm2, xmm7 ; 1453 _ C5 E9: EB. F7 + vpxor xmm0, xmm4, xmm5 ; 1457 _ C5 D9: EF. C5 + vpslld xmm2, xmm3, 29 ; 145B _ C5 E9: 72. F3, 1D + vpxor xmm5, xmm0, xmm3 ; 1460 _ C5 F9: EF. EB + vpcmpeqd xmm4, xmm4, xmm4 ; 1464 _ C5 D9: 76. E4 + vpsrld xmm3, xmm3, 3 ; 1468 _ C5 E1: 72. D3, 03 + vpxor xmm5, xmm5, xmm4 ; 146D _ C5 D1: EF. EC + vpor xmm7, xmm2, xmm3 ; 1471 _ C5 E9: EB. FB + vpor xmm0, xmm1, xmm6 ; 1475 _ C5 F1: EB. C6 + vpxor xmm7, xmm7, xmm5 ; 1479 _ C5 C1: EF. FD + vpxor xmm3, xmm0, xmm7 ; 147D _ C5 F9: EF. DF + vpxor xmm2, xmm1, xmm3 ; 1481 _ C5 F1: EF. D3 + vpor xmm1, xmm7, xmm5 ; 1485 _ C5 C1: EB. CD + vpand xmm0, xmm1, xmm6 ; 1489 _ C5 F1: DB. C6 + vpxor xmm0, xmm0, xmm2 ; 148D _ C5 F9: EF. C2 + vpor xmm2, xmm2, xmm6 ; 1491 _ C5 E9: EB. D6 + vpand xmm1, xmm5, xmm0 ; 1495 _ C5 D1: DB. C8 + vpxor xmm5, xmm2, xmm5 ; 1499 _ C5 E9: EF. ED + vpxor xmm7, xmm1, xmm3 ; 149D _ C5 F1: EF. FB + vpxor xmm1, xmm5, xmm0 ; 14A1 _ C5 D1: EF. C8 + vmovd xmm5, dword [ecx+2B50H] ; 14A5 _ C5 F9: 6E. A9, 00002B50 + vpxor xmm2, xmm1, xmm7 ; 14AD _ C5 F1: EF. D7 + vpshufd xmm5, xmm5, 0 ; 14B1 _ C5 F9: 70. ED, 00 + vpand xmm3, xmm3, xmm1 ; 14B6 _ C5 E1: DB. D9 + vpxor xmm5, xmm7, xmm5 ; 14BA _ C5 C1: EF. ED + vpxor xmm4, xmm2, xmm4 ; 14BE _ C5 E9: EF. E4 + vmovd xmm7, dword [ecx+2B54H] ; 14C2 _ C5 F9: 6E. B9, 00002B54 + vpxor xmm1, xmm3, xmm2 ; 14CA _ C5 E1: EF. CA + vpshufd xmm7, xmm7, 0 ; 14CE _ C5 F9: 70. FF, 00 + vpxor xmm1, xmm1, xmm6 ; 14D3 _ C5 F1: EF. CE + vpxor xmm4, xmm4, xmm7 ; 14D7 _ C5 D9: EF. E7 + vmovd xmm2, dword [ecx+2B58H] ; 14DB _ C5 F9: 6E. 91, 00002B58 + vpshufd xmm3, xmm2, 0 ; 14E3 _ C5 F9: 70. DA, 00 + vpxor xmm7, xmm1, xmm3 ; 14E8 _ C5 F1: EF. FB + vmovd xmm1, dword [ecx+2B5CH] ; 14EC _ C5 F9: 6E. 89, 00002B5C + vpslld xmm2, xmm7, 10 ; 14F4 _ C5 E9: 72. F7, 0A + vpsrld xmm3, xmm7, 22 ; 14F9 _ C5 E1: 72. D7, 16 + vpslld xmm7, xmm4, 7 ; 14FE _ C5 C1: 72. F4, 07 + vpshufd xmm6, xmm1, 0 ; 1503 _ C5 F9: 70. F1, 00 + vpor xmm1, xmm2, xmm3 ; 1508 _ C5 E9: EB. CB + vpslld xmm2, xmm5, 27 ; 150C _ C5 E9: 72. F5, 1B + vpsrld xmm5, xmm5, 5 ; 1511 _ C5 D1: 72. D5, 05 + vpxor xmm0, xmm0, xmm6 ; 1516 _ C5 F9: EF. C6 + vpor xmm2, xmm2, xmm5 ; 151A _ C5 E9: EB. D5 + vpxor xmm6, xmm1, xmm0 ; 151E _ C5 F1: EF. F0 + vpxor xmm3, xmm2, xmm4 ; 1522 _ C5 E9: EF. DC + vpxor xmm7, xmm6, xmm7 ; 1526 _ C5 C9: EF. FF + vpxor xmm6, xmm3, xmm0 ; 152A _ C5 E1: EF. F0 + vpslld xmm1, xmm0, 25 ; 152E _ C5 F1: 72. F0, 19 + vpsrld xmm0, xmm0, 7 ; 1533 _ C5 F9: 72. D0, 07 + vpor xmm5, xmm1, xmm0 ; 1538 _ C5 F1: EB. E8 + vpslld xmm0, xmm6, 3 ; 153C _ C5 F9: 72. F6, 03 + vpxor xmm2, xmm5, xmm7 ; 1541 _ C5 D1: EF. D7 + vpslld xmm3, xmm4, 31 ; 1545 _ C5 E1: 72. F4, 1F + vpsrld xmm4, xmm4, 1 ; 154A _ C5 D9: 72. D4, 01 + vpxor xmm1, xmm2, xmm0 ; 154F _ C5 E9: EF. C8 + vpor xmm2, xmm3, xmm4 ; 1553 _ C5 E1: EB. D4 + vpslld xmm0, xmm7, 29 ; 1557 _ C5 F9: 72. F7, 1D + vpsrld xmm5, xmm7, 3 ; 155C _ C5 D1: 72. D7, 03 + vpxor xmm4, xmm2, xmm6 ; 1561 _ C5 E9: EF. E6 + vpor xmm0, xmm0, xmm5 ; 1565 _ C5 F9: EB. C5 + vpxor xmm3, xmm4, xmm7 ; 1569 _ C5 D9: EF. DF + vpslld xmm7, xmm6, 19 ; 156D _ C5 C1: 72. F6, 13 + vpsrld xmm6, xmm6, 13 ; 1572 _ C5 C9: 72. D6, 0D + vpand xmm4, xmm0, xmm1 ; 1577 _ C5 F9: DB. E1 + vpor xmm2, xmm7, xmm6 ; 157B _ C5 C1: EB. D6 + vpxor xmm6, xmm4, xmm3 ; 157F _ C5 D9: EF. F3 + vpor xmm3, xmm3, xmm1 ; 1583 _ C5 E1: EB. D9 + vpand xmm3, xmm3, xmm2 ; 1587 _ C5 E1: DB. DA + vpxor xmm0, xmm0, xmm6 ; 158B _ C5 F9: EF. C6 + vpxor xmm4, xmm0, xmm3 ; 158F _ C5 F9: EF. E3 + vpcmpeqd xmm5, xmm5, xmm5 ; 1593 _ C5 D1: 76. ED + vpxor xmm1, xmm1, xmm4 ; 1597 _ C5 F1: EF. CC + vpxor xmm0, xmm2, xmm5 ; 159B _ C5 E9: EF. C5 + vpand xmm2, xmm3, xmm6 ; 159F _ C5 E1: DB. D6 + vpxor xmm3, xmm2, xmm1 ; 15A3 _ C5 E9: EF. D9 + vpand xmm7, xmm1, xmm0 ; 15A7 _ C5 F1: DB. F8 + vmovd xmm5, dword [ecx+2B40H] ; 15AB _ C5 F9: 6E. A9, 00002B40 + vpxor xmm2, xmm0, xmm3 ; 15B3 _ C5 F9: EF. D3 + vpxor xmm0, xmm7, xmm6 ; 15B7 _ C5 C1: EF. C6 + vpand xmm6, xmm6, xmm2 ; 15BB _ C5 C9: DB. F2 + vpshufd xmm7, xmm5, 0 ; 15BF _ C5 F9: 70. FD, 00 + vpxor xmm1, xmm0, xmm2 ; 15C4 _ C5 F9: EF. CA + vpxor xmm5, xmm2, xmm7 ; 15C8 _ C5 E9: EF. EF + vpxor xmm7, xmm1, xmm2 ; 15CC _ C5 F1: EF. FA + vpxor xmm2, xmm6, xmm4 ; 15D0 _ C5 C9: EF. D4 + vmovd xmm0, dword [ecx+2B44H] ; 15D4 _ C5 F9: 6E. 81, 00002B44 + vpor xmm1, xmm2, xmm1 ; 15DC _ C5 E9: EB. C9 + vpshufd xmm0, xmm0, 0 ; 15E0 _ C5 F9: 70. C0, 00 + vpxor xmm2, xmm1, xmm3 ; 15E5 _ C5 F1: EF. D3 + vmovd xmm3, dword [ecx+2B48H] ; 15E9 _ C5 F9: 6E. 99, 00002B48 + vpxor xmm7, xmm7, xmm0 ; 15F1 _ C5 C1: EF. F8 + vpshufd xmm0, xmm3, 0 ; 15F5 _ C5 F9: 70. C3, 00 + vmovd xmm3, dword [ecx+2B4CH] ; 15FA _ C5 F9: 6E. 99, 00002B4C + vpxor xmm6, xmm2, xmm0 ; 1602 _ C5 E9: EF. F0 + vpshufd xmm1, xmm3, 0 ; 1606 _ C5 F9: 70. CB, 00 + vpsrld xmm2, xmm6, 22 ; 160B _ C5 E9: 72. D6, 16 + vpxor xmm0, xmm4, xmm1 ; 1610 _ C5 D9: EF. C1 + vpslld xmm4, xmm6, 10 ; 1614 _ C5 D9: 72. F6, 0A + vpor xmm4, xmm4, xmm2 ; 1619 _ C5 D9: EB. E2 + vpslld xmm6, xmm5, 27 ; 161D _ C5 C9: 72. F5, 1B + vpsrld xmm5, xmm5, 5 ; 1622 _ C5 D1: 72. D5, 05 + vpxor xmm3, xmm4, xmm0 ; 1627 _ C5 D9: EF. D8 + vpslld xmm1, xmm7, 7 ; 162B _ C5 F1: 72. F7, 07 + vpor xmm2, xmm6, xmm5 ; 1630 _ C5 C9: EB. D5 + vpxor xmm4, xmm3, xmm1 ; 1634 _ C5 E1: EF. E1 + vpxor xmm3, xmm2, xmm7 ; 1638 _ C5 E9: EF. DF + vpxor xmm5, xmm3, xmm0 ; 163C _ C5 E1: EF. E8 + vpslld xmm1, xmm0, 25 ; 1640 _ C5 F1: 72. F0, 19 + vpsrld xmm0, xmm0, 7 ; 1645 _ C5 F9: 72. D0, 07 + vpslld xmm3, xmm5, 3 ; 164A _ C5 E1: 72. F5, 03 + vpor xmm2, xmm1, xmm0 ; 164F _ C5 F1: EB. D0 + vpslld xmm1, xmm7, 31 ; 1653 _ C5 F1: 72. F7, 1F + vpsrld xmm7, xmm7, 1 ; 1658 _ C5 C1: 72. D7, 01 + vpxor xmm0, xmm2, xmm4 ; 165D _ C5 E9: EF. C4 + vpor xmm2, xmm1, xmm7 ; 1661 _ C5 F1: EB. D7 + vpxor xmm3, xmm0, xmm3 ; 1665 _ C5 F9: EF. DB + vpxor xmm0, xmm2, xmm5 ; 1669 _ C5 E9: EF. C5 + vpslld xmm1, xmm4, 29 ; 166D _ C5 F1: 72. F4, 1D + vpxor xmm0, xmm0, xmm4 ; 1672 _ C5 F9: EF. C4 + vpsrld xmm4, xmm4, 3 ; 1676 _ C5 D9: 72. D4, 03 + vpor xmm1, xmm1, xmm4 ; 167B _ C5 F1: EB. CC + vpslld xmm6, xmm5, 19 ; 167F _ C5 C9: 72. F5, 13 + vpsrld xmm5, xmm5, 13 ; 1684 _ C5 D1: 72. D5, 0D + vpxor xmm2, xmm1, xmm0 ; 1689 _ C5 F1: EF. D0 + vpor xmm7, xmm6, xmm5 ; 168D _ C5 C9: EB. FD + vpand xmm4, xmm1, xmm2 ; 1691 _ C5 F1: DB. E2 + vpxor xmm6, xmm7, xmm2 ; 1695 _ C5 C1: EF. F2 + vpxor xmm1, xmm4, xmm6 ; 1699 _ C5 D9: EF. CE + vpand xmm6, xmm6, xmm0 ; 169D _ C5 C9: DB. F0 + vpor xmm4, xmm3, xmm1 ; 16A1 _ C5 E1: EB. E1 + vpxor xmm3, xmm0, xmm3 ; 16A5 _ C5 F9: EF. DB + vpxor xmm6, xmm6, xmm4 ; 16A9 _ C5 C9: EF. F4 + vpxor xmm0, xmm3, xmm1 ; 16AD _ C5 E1: EF. C1 + vpxor xmm2, xmm2, xmm4 ; 16B1 _ C5 E9: EF. D4 + vpxor xmm3, xmm0, xmm6 ; 16B5 _ C5 F9: EF. DE + vpor xmm7, xmm3, xmm2 ; 16B9 _ C5 E1: EB. FA + vpand xmm4, xmm4, xmm2 ; 16BD _ C5 D9: DB. E2 + vpxor xmm3, xmm7, xmm1 ; 16C1 _ C5 C1: EF. D9 + vpxor xmm5, xmm4, xmm0 ; 16C5 _ C5 D9: EF. E8 + vmovd xmm1, dword [ecx+2B30H] ; 16C9 _ C5 F9: 6E. 89, 00002B30 + vpshufd xmm4, xmm1, 0 ; 16D1 _ C5 F9: 70. E1, 00 + vmovd xmm1, dword [ecx+2B38H] ; 16D6 _ C5 F9: 6E. 89, 00002B38 + vpxor xmm0, xmm2, xmm4 ; 16DE _ C5 E9: EF. C4 + vmovd xmm2, dword [ecx+2B34H] ; 16E2 _ C5 F9: 6E. 91, 00002B34 + vpshufd xmm2, xmm2, 0 ; 16EA _ C5 F9: 70. D2, 00 + vpshufd xmm7, xmm1, 0 ; 16EF _ C5 F9: 70. F9, 00 + vpxor xmm4, xmm3, xmm2 ; 16F4 _ C5 E1: EF. E2 + vmovd xmm2, dword [ecx+2B3CH] ; 16F8 _ C5 F9: 6E. 91, 00002B3C + vpxor xmm7, xmm5, xmm7 ; 1700 _ C5 D1: EF. FF + vpxor xmm5, xmm6, xmm5 ; 1704 _ C5 C9: EF. ED + vpslld xmm6, xmm7, 10 ; 1708 _ C5 C9: 72. F7, 0A + vpshufd xmm1, xmm2, 0 ; 170D _ C5 F9: 70. CA, 00 + vpxor xmm3, xmm5, xmm3 ; 1712 _ C5 D1: EF. DB + vpsrld xmm2, xmm7, 22 ; 1716 _ C5 E9: 72. D7, 16 + vpslld xmm7, xmm0, 27 ; 171B _ C5 C1: 72. F0, 1B + vpsrld xmm0, xmm0, 5 ; 1720 _ C5 F9: 72. D0, 05 + vpxor xmm5, xmm3, xmm1 ; 1725 _ C5 E1: EF. E9 + vpor xmm3, xmm6, xmm2 ; 1729 _ C5 C9: EB. DA + vpor xmm2, xmm7, xmm0 ; 172D _ C5 C1: EB. D0 + vpxor xmm1, xmm3, xmm5 ; 1731 _ C5 E1: EF. CD + vpslld xmm6, xmm4, 7 ; 1735 _ C5 C9: 72. F4, 07 + vpxor xmm0, xmm2, xmm4 ; 173A _ C5 E9: EF. C4 + vpxor xmm6, xmm1, xmm6 ; 173E _ C5 F1: EF. F6 + vpxor xmm1, xmm0, xmm5 ; 1742 _ C5 F9: EF. CD + vpslld xmm3, xmm5, 25 ; 1746 _ C5 E1: 72. F5, 19 + vpsrld xmm5, xmm5, 7 ; 174B _ C5 D1: 72. D5, 07 + vpslld xmm7, xmm6, 29 ; 1750 _ C5 C1: 72. F6, 1D + vpor xmm2, xmm3, xmm5 ; 1755 _ C5 E1: EB. D5 + vpslld xmm5, xmm4, 31 ; 1759 _ C5 D1: 72. F4, 1F + vpsrld xmm4, xmm4, 1 ; 175E _ C5 D9: 72. D4, 01 + vpxor xmm0, xmm2, xmm6 ; 1763 _ C5 E9: EF. C6 + vpslld xmm3, xmm1, 3 ; 1767 _ C5 E1: 72. F1, 03 + vpor xmm4, xmm5, xmm4 ; 176C _ C5 D1: EB. E4 + vpxor xmm2, xmm0, xmm3 ; 1770 _ C5 F9: EF. D3 + vpxor xmm0, xmm4, xmm1 ; 1774 _ C5 D9: EF. C1 + vpxor xmm3, xmm0, xmm6 ; 1778 _ C5 F9: EF. DE + vpslld xmm5, xmm1, 19 ; 177C _ C5 D1: 72. F1, 13 + vpsrld xmm1, xmm1, 13 ; 1781 _ C5 F1: 72. D1, 0D + vpsrld xmm6, xmm6, 3 ; 1786 _ C5 C9: 72. D6, 03 + vpor xmm1, xmm5, xmm1 ; 178B _ C5 D1: EB. C9 + vpor xmm4, xmm7, xmm6 ; 178F _ C5 C1: EB. E6 + vpxor xmm0, xmm4, xmm2 ; 1793 _ C5 D9: EF. C2 + vpxor xmm4, xmm2, xmm1 ; 1797 _ C5 E9: EF. E1 + vpand xmm2, xmm4, xmm0 ; 179B _ C5 D9: DB. D0 + vpxor xmm7, xmm2, xmm3 ; 179F _ C5 E9: EF. FB + vpor xmm3, xmm3, xmm0 ; 17A3 _ C5 E1: EB. D8 + vpxor xmm5, xmm0, xmm7 ; 17A7 _ C5 F9: EF. EF + vpand xmm0, xmm4, xmm7 ; 17AB _ C5 D9: DB. C7 + vpxor xmm2, xmm3, xmm4 ; 17AF _ C5 E1: EF. D4 + vpand xmm6, xmm0, xmm1 ; 17B3 _ C5 F9: DB. F1 + vpcmpeqd xmm4, xmm4, xmm4 ; 17B7 _ C5 D9: 76. E4 + vpxor xmm0, xmm6, xmm5 ; 17BB _ C5 C9: EF. C5 + vmovd xmm6, dword [ecx+2B24H] ; 17BF _ C5 F9: 6E. B1, 00002B24 + vpxor xmm3, xmm7, xmm4 ; 17C7 _ C5 C1: EF. DC + vmovd xmm7, dword [ecx+2B20H] ; 17CB _ C5 F9: 6E. B9, 00002B20 + vpand xmm5, xmm5, xmm2 ; 17D3 _ C5 D1: DB. EA + vpshufd xmm4, xmm7, 0 ; 17D7 _ C5 F9: 70. E7, 00 + vpshufd xmm7, xmm6, 0 ; 17DC _ C5 F9: 70. FE, 00 + vpor xmm6, xmm5, xmm1 ; 17E1 _ C5 D1: EB. F1 + vmovd xmm5, dword [ecx+2B28H] ; 17E5 _ C5 F9: 6E. A9, 00002B28 + vpxor xmm6, xmm6, xmm3 ; 17ED _ C5 C9: EF. F3 + vpshufd xmm5, xmm5, 0 ; 17F1 _ C5 F9: 70. ED, 00 + vpxor xmm1, xmm1, xmm3 ; 17F6 _ C5 F1: EF. CB + vpxor xmm4, xmm2, xmm4 ; 17FA _ C5 E9: EF. E4 + vpxor xmm7, xmm0, xmm7 ; 17FE _ C5 F9: EF. FF + vpxor xmm6, xmm6, xmm5 ; 1802 _ C5 C9: EF. F5 + vpxor xmm5, xmm3, xmm0 ; 1806 _ C5 E1: EF. E8 + vmovd xmm0, dword [ecx+2B2CH] ; 180A _ C5 F9: 6E. 81, 00002B2C + vpand xmm2, xmm1, xmm2 ; 1812 _ C5 F1: DB. D2 + vpshufd xmm1, xmm0, 0 ; 1816 _ C5 F9: 70. C8, 00 + vpxor xmm3, xmm5, xmm2 ; 181B _ C5 D1: EF. DA + vpslld xmm5, xmm6, 10 ; 181F _ C5 D1: 72. F6, 0A + vpsrld xmm6, xmm6, 22 ; 1824 _ C5 C9: 72. D6, 16 + vpxor xmm0, xmm3, xmm1 ; 1829 _ C5 E1: EF. C1 + vpor xmm2, xmm5, xmm6 ; 182D _ C5 D1: EB. D6 + vpslld xmm5, xmm4, 27 ; 1831 _ C5 D1: 72. F4, 1B + vpsrld xmm4, xmm4, 5 ; 1836 _ C5 D9: 72. D4, 05 + vpxor xmm3, xmm2, xmm0 ; 183B _ C5 E9: EF. D8 + vpor xmm2, xmm5, xmm4 ; 183F _ C5 D1: EB. D4 + vpslld xmm1, xmm7, 7 ; 1843 _ C5 F1: 72. F7, 07 + vpxor xmm4, xmm2, xmm7 ; 1848 _ C5 E9: EF. E7 + vpxor xmm6, xmm3, xmm1 ; 184C _ C5 E1: EF. F1 + vpxor xmm1, xmm4, xmm0 ; 1850 _ C5 D9: EF. C8 + vpslld xmm3, xmm0, 25 ; 1854 _ C5 E1: 72. F0, 19 + vpsrld xmm0, xmm0, 7 ; 1859 _ C5 F9: 72. D0, 07 + vpor xmm2, xmm3, xmm0 ; 185E _ C5 E1: EB. D0 + vpslld xmm5, xmm7, 31 ; 1862 _ C5 D1: 72. F7, 1F + vpsrld xmm7, xmm7, 1 ; 1867 _ C5 C1: 72. D7, 01 + vpxor xmm4, xmm2, xmm6 ; 186C _ C5 E9: EF. E6 + vpslld xmm0, xmm1, 3 ; 1870 _ C5 F9: 72. F1, 03 + vpor xmm2, xmm5, xmm7 ; 1875 _ C5 D1: EB. D7 + vpxor xmm0, xmm4, xmm0 ; 1879 _ C5 D9: EF. C0 + vpxor xmm4, xmm2, xmm1 ; 187D _ C5 E9: EF. E1 + vpxor xmm2, xmm4, xmm6 ; 1881 _ C5 D9: EF. D6 + vpslld xmm3, xmm6, 29 ; 1885 _ C5 E1: 72. F6, 1D + vpsrld xmm6, xmm6, 3 ; 188A _ C5 C9: 72. D6, 03 + vpslld xmm5, xmm1, 19 ; 188F _ C5 D1: 72. F1, 13 + vpsrld xmm1, xmm1, 13 ; 1894 _ C5 F1: 72. D1, 0D + vpor xmm3, xmm3, xmm6 ; 1899 _ C5 E1: EB. DE + vpor xmm7, xmm5, xmm1 ; 189D _ C5 D1: EB. F9 + vpxor xmm1, xmm2, xmm0 ; 18A1 _ C5 E9: EF. C8 + vpxor xmm5, xmm2, xmm3 ; 18A5 _ C5 E9: EF. EB + vpand xmm2, xmm0, xmm1 ; 18A9 _ C5 F9: DB. D1 + vpxor xmm4, xmm2, xmm7 ; 18AD _ C5 E9: EF. E7 + vpor xmm0, xmm7, xmm1 ; 18B1 _ C5 C1: EB. C1 + vpxor xmm6, xmm3, xmm4 ; 18B5 _ C5 E1: EF. F4 + vpxor xmm7, xmm0, xmm5 ; 18B9 _ C5 F9: EF. FD + vpxor xmm3, xmm1, xmm4 ; 18BD _ C5 F1: EF. DC + vpor xmm2, xmm7, xmm6 ; 18C1 _ C5 C1: EB. D6 + vpxor xmm7, xmm2, xmm3 ; 18C5 _ C5 E9: EF. FB + vpor xmm3, xmm3, xmm4 ; 18C9 _ C5 E1: EB. DC + vpxor xmm0, xmm3, xmm7 ; 18CD _ C5 E1: EF. C7 + vpcmpeqd xmm3, xmm3, xmm3 ; 18D1 _ C5 E1: 76. DB + vpxor xmm1, xmm5, xmm3 ; 18D5 _ C5 D1: EF. CB + vmovd xmm5, dword [ecx+2B10H] ; 18D9 _ C5 F9: 6E. A9, 00002B10 + vpxor xmm1, xmm1, xmm0 ; 18E1 _ C5 F1: EF. C8 + vpshufd xmm2, xmm5, 0 ; 18E5 _ C5 F9: 70. D5, 00 + vpor xmm0, xmm0, xmm7 ; 18EA _ C5 F9: EB. C7 + vpxor xmm5, xmm1, xmm2 ; 18EE _ C5 F1: EF. EA + vmovd xmm2, dword [ecx+2B14H] ; 18F2 _ C5 F9: 6E. 91, 00002B14 + vpshufd xmm2, xmm2, 0 ; 18FA _ C5 F9: 70. D2, 00 + vpxor xmm2, xmm7, xmm2 ; 18FF _ C5 C1: EF. D2 + vpxor xmm7, xmm0, xmm7 ; 1903 _ C5 F9: EF. FF + vpor xmm0, xmm7, xmm1 ; 1907 _ C5 C1: EB. C1 + vpxor xmm0, xmm4, xmm0 ; 190B _ C5 D9: EF. C0 + vmovd xmm4, dword [ecx+2B18H] ; 190F _ C5 F9: 6E. A1, 00002B18 + vmovd xmm1, dword [ecx+2B1CH] ; 1917 _ C5 F9: 6E. 89, 00002B1C + vpshufd xmm4, xmm4, 0 ; 191F _ C5 F9: 70. E4, 00 + vpshufd xmm7, xmm1, 0 ; 1924 _ C5 F9: 70. F9, 00 + vpxor xmm0, xmm0, xmm4 ; 1929 _ C5 F9: EF. C4 + vpxor xmm4, xmm6, xmm7 ; 192D _ C5 C9: EF. E7 + vpslld xmm6, xmm0, 10 ; 1931 _ C5 C9: 72. F0, 0A + vpsrld xmm0, xmm0, 22 ; 1936 _ C5 F9: 72. D0, 16 + vpslld xmm7, xmm2, 7 ; 193B _ C5 C1: 72. F2, 07 + vpor xmm1, xmm6, xmm0 ; 1940 _ C5 C9: EB. C8 + vpslld xmm0, xmm5, 27 ; 1944 _ C5 F9: 72. F5, 1B + vpsrld xmm5, xmm5, 5 ; 1949 _ C5 D1: 72. D5, 05 + vpxor xmm6, xmm1, xmm4 ; 194E _ C5 F1: EF. F4 + vpor xmm0, xmm0, xmm5 ; 1952 _ C5 F9: EB. C5 + vpxor xmm6, xmm6, xmm7 ; 1956 _ C5 C9: EF. F7 + vpxor xmm1, xmm0, xmm2 ; 195A _ C5 F9: EF. CA + vpslld xmm7, xmm4, 25 ; 195E _ C5 C1: 72. F4, 19 + vpxor xmm5, xmm1, xmm4 ; 1963 _ C5 F1: EF. EC + vpsrld xmm4, xmm4, 7 ; 1967 _ C5 D9: 72. D4, 07 + vpor xmm4, xmm7, xmm4 ; 196C _ C5 C1: EB. E4 + vpslld xmm7, xmm2, 31 ; 1970 _ C5 C1: 72. F2, 1F + vpsrld xmm2, xmm2, 1 ; 1975 _ C5 E9: 72. D2, 01 + vpxor xmm0, xmm4, xmm6 ; 197A _ C5 D9: EF. C6 + vpslld xmm1, xmm5, 3 ; 197E _ C5 F1: 72. F5, 03 + vpor xmm2, xmm7, xmm2 ; 1983 _ C5 C1: EB. D2 + vpxor xmm4, xmm0, xmm1 ; 1987 _ C5 F9: EF. E1 + vpxor xmm0, xmm2, xmm5 ; 198B _ C5 E9: EF. C5 + vpslld xmm1, xmm5, 19 ; 198F _ C5 F1: 72. F5, 13 + vpsrld xmm5, xmm5, 13 ; 1994 _ C5 D1: 72. D5, 0D + vpxor xmm0, xmm0, xmm6 ; 1999 _ C5 F9: EF. C6 + vpor xmm2, xmm1, xmm5 ; 199D _ C5 F1: EB. D5 + vpslld xmm1, xmm6, 29 ; 19A1 _ C5 F1: 72. F6, 1D + vpsrld xmm6, xmm6, 3 ; 19A6 _ C5 C9: 72. D6, 03 + vpor xmm1, xmm1, xmm6 ; 19AB _ C5 F1: EB. CE + vpxor xmm6, xmm0, xmm3 ; 19AF _ C5 F9: EF. F3 + vpxor xmm1, xmm1, xmm3 ; 19B3 _ C5 F1: EF. CB + vpor xmm0, xmm0, xmm2 ; 19B7 _ C5 F9: EB. C2 + vpxor xmm5, xmm0, xmm1 ; 19BB _ C5 F9: EF. E9 + vpor xmm7, xmm1, xmm6 ; 19BF _ C5 F1: EB. FE + vpxor xmm0, xmm5, xmm4 ; 19C3 _ C5 D1: EF. C4 + vpxor xmm5, xmm2, xmm6 ; 19C7 _ C5 E9: EF. EE + vpand xmm1, xmm5, xmm4 ; 19CB _ C5 D1: DB. CC + vpxor xmm2, xmm7, xmm5 ; 19CF _ C5 C1: EF. D5 + vpxor xmm7, xmm6, xmm1 ; 19D3 _ C5 C9: EF. F9 + vpor xmm6, xmm1, xmm0 ; 19D7 _ C5 F1: EB. F0 + vpxor xmm5, xmm6, xmm2 ; 19DB _ C5 C9: EF. EA + vpxor xmm4, xmm4, xmm7 ; 19DF _ C5 D9: EF. E7 + vpxor xmm4, xmm4, xmm5 ; 19E3 _ C5 D9: EF. E5 + vpxor xmm2, xmm2, xmm0 ; 19E7 _ C5 E9: EF. D0 + vpxor xmm1, xmm4, xmm0 ; 19EB _ C5 D9: EF. C8 + vmovd xmm6, dword [ecx+2B00H] ; 19EF _ C5 F9: 6E. B1, 00002B00 + vpand xmm2, xmm2, xmm1 ; 19F7 _ C5 E9: DB. D1 + vpshufd xmm4, xmm6, 0 ; 19FB _ C5 F9: 70. E6, 00 + vpxor xmm2, xmm7, xmm2 ; 1A00 _ C5 C1: EF. D2 + vmovd xmm7, dword [ecx+2B04H] ; 1A04 _ C5 F9: 6E. B9, 00002B04 + vpxor xmm4, xmm5, xmm4 ; 1A0C _ C5 D1: EF. E4 + vmovd xmm5, dword [ecx+2B08H] ; 1A10 _ C5 F9: 6E. A9, 00002B08 + vpshufd xmm6, xmm7, 0 ; 1A18 _ C5 F9: 70. F7, 00 + vpxor xmm2, xmm2, xmm6 ; 1A1D _ C5 E9: EF. D6 + vpshufd xmm6, xmm5, 0 ; 1A21 _ C5 F9: 70. F5, 00 + vmovd xmm5, dword [ecx+2B0CH] ; 1A26 _ C5 F9: 6E. A9, 00002B0C + vpxor xmm0, xmm0, xmm6 ; 1A2E _ C5 F9: EF. C6 + vpshufd xmm7, xmm5, 0 ; 1A32 _ C5 F9: 70. FD, 00 + vpslld xmm5, xmm2, 7 ; 1A37 _ C5 D1: 72. F2, 07 + vpxor xmm6, xmm1, xmm7 ; 1A3C _ C5 F1: EF. F7 + vpslld xmm1, xmm0, 10 ; 1A40 _ C5 F1: 72. F0, 0A + vpsrld xmm0, xmm0, 22 ; 1A45 _ C5 F9: 72. D0, 16 + vpslld xmm7, xmm4, 27 ; 1A4A _ C5 C1: 72. F4, 1B + vpsrld xmm4, xmm4, 5 ; 1A4F _ C5 D9: 72. D4, 05 + vpor xmm0, xmm1, xmm0 ; 1A54 _ C5 F1: EB. C0 + vpor xmm4, xmm7, xmm4 ; 1A58 _ C5 C1: EB. E4 + vpxor xmm1, xmm0, xmm6 ; 1A5C _ C5 F9: EF. CE + vpxor xmm0, xmm4, xmm2 ; 1A60 _ C5 D9: EF. C2 + vpxor xmm1, xmm1, xmm5 ; 1A64 _ C5 F1: EF. CD + vpxor xmm0, xmm0, xmm6 ; 1A68 _ C5 F9: EF. C6 + vpslld xmm5, xmm6, 25 ; 1A6C _ C5 D1: 72. F6, 19 + vpsrld xmm6, xmm6, 7 ; 1A71 _ C5 C9: 72. D6, 07 + vpor xmm7, xmm5, xmm6 ; 1A76 _ C5 D1: EB. FE + vpslld xmm5, xmm2, 31 ; 1A7A _ C5 D1: 72. F2, 1F + vpsrld xmm2, xmm2, 1 ; 1A7F _ C5 E9: 72. D2, 01 + vpxor xmm4, xmm7, xmm1 ; 1A84 _ C5 C1: EF. E1 + vpslld xmm6, xmm0, 3 ; 1A88 _ C5 C9: 72. F0, 03 + vpor xmm2, xmm5, xmm2 ; 1A8D _ C5 D1: EB. D2 + vpxor xmm6, xmm4, xmm6 ; 1A91 _ C5 D9: EF. F6 + vpxor xmm4, xmm2, xmm0 ; 1A95 _ C5 E9: EF. E0 + vpxor xmm5, xmm4, xmm1 ; 1A99 _ C5 D9: EF. E9 + vpslld xmm7, xmm1, 29 ; 1A9D _ C5 C1: 72. F1, 1D + vpsrld xmm1, xmm1, 3 ; 1AA2 _ C5 F1: 72. D1, 03 + vpslld xmm2, xmm0, 19 ; 1AA7 _ C5 E9: 72. F0, 13 + vpsrld xmm0, xmm0, 13 ; 1AAC _ C5 F9: 72. D0, 0D + vpor xmm1, xmm7, xmm1 ; 1AB1 _ C5 C1: EB. C9 + vpor xmm4, xmm2, xmm0 ; 1AB5 _ C5 E9: EB. E0 + vpor xmm0, xmm1, xmm6 ; 1AB9 _ C5 F1: EB. C6 + vpxor xmm1, xmm1, xmm4 ; 1ABD _ C5 F1: EF. CC + vpand xmm7, xmm4, xmm6 ; 1AC1 _ C5 D9: DB. FE + vpxor xmm3, xmm1, xmm3 ; 1AC5 _ C5 F1: EF. DB + vpxor xmm4, xmm6, xmm5 ; 1AC9 _ C5 C9: EF. E5 + vpxor xmm1, xmm7, xmm3 ; 1ACD _ C5 C1: EF. CB + vpand xmm2, xmm3, xmm0 ; 1AD1 _ C5 E1: DB. D0 + vpor xmm6, xmm5, xmm7 ; 1AD5 _ C5 D1: EB. F7 + vpand xmm3, xmm4, xmm0 ; 1AD9 _ C5 D9: DB. D8 + vpxor xmm5, xmm6, xmm2 ; 1ADD _ C5 C9: EF. EA + vpxor xmm6, xmm2, xmm1 ; 1AE1 _ C5 E9: EF. F1 + vpxor xmm7, xmm0, xmm5 ; 1AE5 _ C5 F9: EF. FD + vpor xmm2, xmm1, xmm6 ; 1AE9 _ C5 F1: EB. D6 + vpxor xmm4, xmm2, xmm3 ; 1AED _ C5 E9: EF. E3 + vpxor xmm0, xmm3, xmm7 ; 1AF1 _ C5 E1: EF. C7 + vmovd xmm3, dword [ecx+2AF0H] ; 1AF5 _ C5 F9: 6E. 99, 00002AF0 + vpxor xmm1, xmm0, xmm6 ; 1AFD _ C5 F9: EF. CE + vpshufd xmm2, xmm3, 0 ; 1B01 _ C5 F9: 70. D3, 00 + vmovd xmm3, dword [ecx+2AF4H] ; 1B06 _ C5 F9: 6E. 99, 00002AF4 + vpxor xmm0, xmm1, xmm2 ; 1B0E _ C5 F1: EF. C2 + vmovd xmm2, dword [ecx+2AF8H] ; 1B12 _ C5 F9: 6E. 91, 00002AF8 + vpshufd xmm1, xmm3, 0 ; 1B1A _ C5 F9: 70. CB, 00 + vpxor xmm3, xmm4, xmm1 ; 1B1F _ C5 D9: EF. D9 + vpshufd xmm1, xmm2, 0 ; 1B23 _ C5 F9: 70. CA, 00 + vpxor xmm2, xmm5, xmm1 ; 1B28 _ C5 D1: EF. D1 + vpor xmm5, xmm7, xmm4 ; 1B2C _ C5 C1: EB. EC + vmovd xmm7, dword [ecx+2AFCH] ; 1B30 _ C5 F9: 6E. B9, 00002AFC + vpxor xmm4, xmm5, xmm6 ; 1B38 _ C5 D1: EF. E6 + vpshufd xmm1, xmm7, 0 ; 1B3C _ C5 F9: 70. CF, 00 + vpslld xmm5, xmm2, 10 ; 1B41 _ C5 D1: 72. F2, 0A + vpsrld xmm2, xmm2, 22 ; 1B46 _ C5 E9: 72. D2, 16 + vpxor xmm1, xmm4, xmm1 ; 1B4B _ C5 D9: EF. C9 + vpor xmm6, xmm5, xmm2 ; 1B4F _ C5 D1: EB. F2 + vpslld xmm4, xmm3, 7 ; 1B53 _ C5 D9: 72. F3, 07 + vpxor xmm2, xmm6, xmm1 ; 1B58 _ C5 C9: EF. D1 + vpslld xmm7, xmm0, 27 ; 1B5C _ C5 C1: 72. F0, 1B + vpsrld xmm0, xmm0, 5 ; 1B61 _ C5 F9: 72. D0, 05 + vpxor xmm2, xmm2, xmm4 ; 1B66 _ C5 E9: EF. D4 + vpor xmm4, xmm7, xmm0 ; 1B6A _ C5 C1: EB. E0 + vpslld xmm5, xmm1, 25 ; 1B6E _ C5 D1: 72. F1, 19 + vpxor xmm0, xmm4, xmm3 ; 1B73 _ C5 D9: EF. C3 + vpxor xmm4, xmm0, xmm1 ; 1B77 _ C5 F9: EF. E1 + vpsrld xmm1, xmm1, 7 ; 1B7B _ C5 F1: 72. D1, 07 + vpor xmm6, xmm5, xmm1 ; 1B80 _ C5 D1: EB. F1 + vpslld xmm0, xmm4, 3 ; 1B84 _ C5 F9: 72. F4, 03 + vpxor xmm7, xmm6, xmm2 ; 1B89 _ C5 C9: EF. FA + vpslld xmm1, xmm3, 31 ; 1B8D _ C5 F1: 72. F3, 1F + vpsrld xmm3, xmm3, 1 ; 1B92 _ C5 E1: 72. D3, 01 + vpxor xmm0, xmm7, xmm0 ; 1B97 _ C5 C1: EF. C0 + vpor xmm1, xmm1, xmm3 ; 1B9B _ C5 F1: EB. CB + vpxor xmm5, xmm1, xmm4 ; 1B9F _ C5 F1: EF. EC + vpslld xmm1, xmm2, 29 ; 1BA3 _ C5 F1: 72. F2, 1D + vpxor xmm7, xmm5, xmm2 ; 1BA8 _ C5 D1: EF. FA + vpsrld xmm2, xmm2, 3 ; 1BAC _ C5 E9: 72. D2, 03 + vpslld xmm3, xmm4, 19 ; 1BB1 _ C5 E1: 72. F4, 13 + vpsrld xmm4, xmm4, 13 ; 1BB6 _ C5 D9: 72. D4, 0D + vpor xmm6, xmm1, xmm2 ; 1BBB _ C5 F1: EB. F2 + vpor xmm5, xmm3, xmm4 ; 1BBF _ C5 E1: EB. EC + vpxor xmm3, xmm5, xmm6 ; 1BC3 _ C5 D1: EF. DE + vpxor xmm4, xmm0, xmm7 ; 1BC7 _ C5 F9: EF. E7 + vpand xmm1, xmm6, xmm3 ; 1BCB _ C5 C9: DB. CB + vpcmpeqd xmm2, xmm2, xmm2 ; 1BCF _ C5 E9: 76. D2 + vpxor xmm5, xmm1, xmm2 ; 1BD3 _ C5 F1: EF. EA + vpxor xmm6, xmm6, xmm0 ; 1BD7 _ C5 C9: EF. F0 + vpxor xmm5, xmm5, xmm4 ; 1BDB _ C5 D1: EF. EC + vpor xmm0, xmm6, xmm3 ; 1BDF _ C5 C9: EB. C3 + vpxor xmm1, xmm3, xmm5 ; 1BE3 _ C5 E1: EF. CD + vpxor xmm2, xmm4, xmm0 ; 1BE7 _ C5 D9: EF. D0 + vpand xmm6, xmm7, xmm2 ; 1BEB _ C5 C1: DB. F2 + vpxor xmm0, xmm0, xmm7 ; 1BEF _ C5 F9: EF. C7 + vpxor xmm7, xmm1, xmm2 ; 1BF3 _ C5 F1: EF. FA + vpxor xmm4, xmm6, xmm1 ; 1BF7 _ C5 C9: EF. E1 + vmovd xmm3, dword [ecx+2AE0H] ; 1BFB _ C5 F9: 6E. 99, 00002AE0 + vpor xmm1, xmm7, xmm5 ; 1C03 _ C5 C1: EB. CD + vpshufd xmm6, xmm3, 0 ; 1C07 _ C5 F9: 70. F3, 00 + vpxor xmm1, xmm0, xmm1 ; 1C0C _ C5 F9: EF. C9 + vmovd xmm0, dword [ecx+2AE8H] ; 1C10 _ C5 F9: 6E. 81, 00002AE8 + vpxor xmm3, xmm4, xmm6 ; 1C18 _ C5 D9: EF. DE + vmovd xmm6, dword [ecx+2AE4H] ; 1C1C _ C5 F9: 6E. B1, 00002AE4 + vpxor xmm4, xmm2, xmm4 ; 1C24 _ C5 E9: EF. E4 + vpshufd xmm0, xmm0, 0 ; 1C28 _ C5 F9: 70. C0, 00 + vmovd xmm2, dword [ecx+2AECH] ; 1C2D _ C5 F9: 6E. 91, 00002AEC + vpxor xmm7, xmm1, xmm0 ; 1C35 _ C5 F1: EF. F8 + vpshufd xmm6, xmm6, 0 ; 1C39 _ C5 F9: 70. F6, 00 + vpslld xmm1, xmm7, 10 ; 1C3E _ C5 F1: 72. F7, 0A + vpxor xmm6, xmm5, xmm6 ; 1C43 _ C5 D1: EF. F6 + vpsrld xmm7, xmm7, 22 ; 1C47 _ C5 C1: 72. D7, 16 + vpshufd xmm5, xmm2, 0 ; 1C4C _ C5 F9: 70. EA, 00 + vpor xmm1, xmm1, xmm7 ; 1C51 _ C5 F1: EB. CF + vpxor xmm0, xmm4, xmm5 ; 1C55 _ C5 D9: EF. C5 + vpslld xmm4, xmm3, 27 ; 1C59 _ C5 D9: 72. F3, 1B + vpsrld xmm3, xmm3, 5 ; 1C5E _ C5 E1: 72. D3, 05 + vpxor xmm2, xmm1, xmm0 ; 1C63 _ C5 F1: EF. D0 + vpslld xmm7, xmm6, 7 ; 1C67 _ C5 C1: 72. F6, 07 + vpor xmm1, xmm4, xmm3 ; 1C6C _ C5 D9: EB. CB + vpxor xmm5, xmm2, xmm7 ; 1C70 _ C5 E9: EF. EF + vpxor xmm2, xmm1, xmm6 ; 1C74 _ C5 F1: EF. D6 + vpxor xmm2, xmm2, xmm0 ; 1C78 _ C5 E9: EF. D0 + vpslld xmm3, xmm0, 25 ; 1C7C _ C5 E1: 72. F0, 19 + vpsrld xmm0, xmm0, 7 ; 1C81 _ C5 F9: 72. D0, 07 + vpslld xmm1, xmm2, 3 ; 1C86 _ C5 F1: 72. F2, 03 + vpor xmm7, xmm3, xmm0 ; 1C8B _ C5 E1: EB. F8 + vpslld xmm0, xmm2, 19 ; 1C8F _ C5 F9: 72. F2, 13 + vpxor xmm4, xmm7, xmm5 ; 1C94 _ C5 C1: EF. E5 + vpslld xmm7, xmm6, 31 ; 1C98 _ C5 C1: 72. F6, 1F + vpsrld xmm6, xmm6, 1 ; 1C9D _ C5 C9: 72. D6, 01 + vpsrld xmm3, xmm2, 13 ; 1CA2 _ C5 E1: 72. D2, 0D + vpor xmm6, xmm7, xmm6 ; 1CA7 _ C5 C1: EB. F6 + vpxor xmm1, xmm4, xmm1 ; 1CAB _ C5 D9: EF. C9 + vpxor xmm2, xmm6, xmm2 ; 1CAF _ C5 C9: EF. D2 + vpor xmm4, xmm0, xmm3 ; 1CB3 _ C5 F9: EB. E3 + vpxor xmm0, xmm2, xmm5 ; 1CB7 _ C5 E9: EF. C5 + vpslld xmm7, xmm5, 29 ; 1CBB _ C5 C1: 72. F5, 1D + vpcmpeqd xmm2, xmm2, xmm2 ; 1CC0 _ C5 E9: 76. D2 + vpsrld xmm5, xmm5, 3 ; 1CC4 _ C5 D1: 72. D5, 03 + vpxor xmm3, xmm0, xmm2 ; 1CC9 _ C5 F9: EF. DA + vpor xmm6, xmm7, xmm5 ; 1CCD _ C5 C1: EB. F5 + vpxor xmm7, xmm6, xmm3 ; 1CD1 _ C5 C9: EF. FB + vpor xmm0, xmm1, xmm4 ; 1CD5 _ C5 F1: EB. C4 + vpxor xmm0, xmm0, xmm7 ; 1CD9 _ C5 F9: EF. C7 + vpxor xmm6, xmm1, xmm0 ; 1CDD _ C5 F1: EF. F0 + vpor xmm1, xmm7, xmm3 ; 1CE1 _ C5 C1: EB. CB + vpand xmm5, xmm1, xmm4 ; 1CE5 _ C5 F1: DB. EC + vpxor xmm1, xmm5, xmm6 ; 1CE9 _ C5 D1: EF. CE + vpor xmm6, xmm6, xmm4 ; 1CED _ C5 C9: EB. F4 + vpand xmm7, xmm3, xmm1 ; 1CF1 _ C5 E1: DB. F9 + vpxor xmm3, xmm6, xmm3 ; 1CF5 _ C5 C9: EF. DB + vpxor xmm5, xmm7, xmm0 ; 1CF9 _ C5 C1: EF. E8 + vpxor xmm6, xmm3, xmm1 ; 1CFD _ C5 E1: EF. F1 + vmovd xmm3, dword [ecx+2AD0H] ; 1D01 _ C5 F9: 6E. 99, 00002AD0 + vpxor xmm7, xmm6, xmm5 ; 1D09 _ C5 C9: EF. FD + vpshufd xmm3, xmm3, 0 ; 1D0D _ C5 F9: 70. DB, 00 + vpand xmm0, xmm0, xmm6 ; 1D12 _ C5 F9: DB. C6 + vpxor xmm3, xmm5, xmm3 ; 1D16 _ C5 D1: EF. DB + vpxor xmm5, xmm7, xmm2 ; 1D1A _ C5 C1: EF. EA + vmovd xmm2, dword [ecx+2AD4H] ; 1D1E _ C5 F9: 6E. 91, 00002AD4 + vpshufd xmm2, xmm2, 0 ; 1D26 _ C5 F9: 70. D2, 00 + vpxor xmm2, xmm5, xmm2 ; 1D2B _ C5 D1: EF. D2 + vpxor xmm5, xmm0, xmm7 ; 1D2F _ C5 F9: EF. EF + vpxor xmm6, xmm5, xmm4 ; 1D33 _ C5 D1: EF. F4 + vmovd xmm4, dword [ecx+2AD8H] ; 1D37 _ C5 F9: 6E. A1, 00002AD8 + vmovd xmm7, dword [ecx+2ADCH] ; 1D3F _ C5 F9: 6E. B9, 00002ADC + vpshufd xmm0, xmm4, 0 ; 1D47 _ C5 F9: 70. C4, 00 + vpshufd xmm4, xmm7, 0 ; 1D4C _ C5 F9: 70. E7, 00 + vpxor xmm5, xmm6, xmm0 ; 1D51 _ C5 C9: EF. E8 + vpxor xmm7, xmm1, xmm4 ; 1D55 _ C5 F1: EF. FC + vpslld xmm1, xmm5, 10 ; 1D59 _ C5 F1: 72. F5, 0A + vpsrld xmm6, xmm5, 22 ; 1D5E _ C5 C9: 72. D5, 16 + vpslld xmm5, xmm3, 27 ; 1D63 _ C5 D1: 72. F3, 1B + vpor xmm1, xmm1, xmm6 ; 1D68 _ C5 F1: EB. CE + vpsrld xmm3, xmm3, 5 ; 1D6C _ C5 E1: 72. D3, 05 + vpxor xmm0, xmm1, xmm7 ; 1D71 _ C5 F1: EF. C7 + vpor xmm1, xmm5, xmm3 ; 1D75 _ C5 D1: EB. CB + vpslld xmm4, xmm2, 7 ; 1D79 _ C5 D9: 72. F2, 07 + vpxor xmm6, xmm1, xmm2 ; 1D7E _ C5 F1: EF. F2 + vpxor xmm4, xmm0, xmm4 ; 1D82 _ C5 F9: EF. E4 + vpxor xmm1, xmm6, xmm7 ; 1D86 _ C5 C9: EF. CF + vpslld xmm0, xmm7, 25 ; 1D8A _ C5 F9: 72. F7, 19 + vpsrld xmm7, xmm7, 7 ; 1D8F _ C5 C1: 72. D7, 07 + vpslld xmm5, xmm2, 31 ; 1D94 _ C5 D1: 72. F2, 1F + vpsrld xmm2, xmm2, 1 ; 1D99 _ C5 E9: 72. D2, 01 + vpor xmm6, xmm0, xmm7 ; 1D9E _ C5 F9: EB. F7 + vpor xmm2, xmm5, xmm2 ; 1DA2 _ C5 D1: EB. D2 + vpxor xmm0, xmm6, xmm4 ; 1DA6 _ C5 C9: EF. C4 + vpslld xmm3, xmm1, 3 ; 1DAA _ C5 E1: 72. F1, 03 + vpxor xmm6, xmm2, xmm1 ; 1DAF _ C5 E9: EF. F1 + vpxor xmm7, xmm0, xmm3 ; 1DB3 _ C5 F9: EF. FB + vpxor xmm3, xmm6, xmm4 ; 1DB7 _ C5 C9: EF. DC + vpslld xmm0, xmm4, 29 ; 1DBB _ C5 F9: 72. F4, 1D + vpsrld xmm4, xmm4, 3 ; 1DC0 _ C5 D9: 72. D4, 03 + vpslld xmm6, xmm1, 19 ; 1DC5 _ C5 C9: 72. F1, 13 + vpor xmm2, xmm0, xmm4 ; 1DCA _ C5 F9: EB. D4 + vpsrld xmm1, xmm1, 13 ; 1DCE _ C5 F1: 72. D1, 0D + vpand xmm4, xmm2, xmm7 ; 1DD3 _ C5 E9: DB. E7 + vpor xmm0, xmm6, xmm1 ; 1DD7 _ C5 C9: EB. C1 + vpxor xmm1, xmm4, xmm3 ; 1DDB _ C5 D9: EF. CB + vpor xmm3, xmm3, xmm7 ; 1DDF _ C5 E1: EB. DF + vpand xmm6, xmm3, xmm0 ; 1DE3 _ C5 E1: DB. F0 + vpxor xmm5, xmm2, xmm1 ; 1DE7 _ C5 E9: EF. E9 + vpxor xmm2, xmm5, xmm6 ; 1DEB _ C5 D1: EF. D6 + vpcmpeqd xmm3, xmm3, xmm3 ; 1DEF _ C5 E1: 76. DB + vpxor xmm4, xmm0, xmm3 ; 1DF3 _ C5 F9: EF. E3 + vpxor xmm3, xmm7, xmm2 ; 1DF7 _ C5 C1: EF. DA + vpand xmm7, xmm6, xmm1 ; 1DFB _ C5 C9: DB. F9 + vpxor xmm6, xmm7, xmm3 ; 1DFF _ C5 C1: EF. F3 + vpand xmm7, xmm3, xmm4 ; 1E03 _ C5 E1: DB. FC + vmovd xmm3, dword [ecx+2AC0H] ; 1E07 _ C5 F9: 6E. 99, 00002AC0 + vpxor xmm0, xmm4, xmm6 ; 1E0F _ C5 D9: EF. C6 + vpxor xmm4, xmm7, xmm1 ; 1E13 _ C5 C1: EF. E1 + vpand xmm1, xmm1, xmm0 ; 1E17 _ C5 F1: DB. C8 + vpshufd xmm7, xmm3, 0 ; 1E1B _ C5 F9: 70. FB, 00 + vpxor xmm5, xmm4, xmm0 ; 1E20 _ C5 D9: EF. E8 + vpxor xmm4, xmm0, xmm7 ; 1E24 _ C5 F9: EF. E7 + vpxor xmm3, xmm5, xmm0 ; 1E28 _ C5 D1: EF. D8 + vpxor xmm0, xmm1, xmm2 ; 1E2C _ C5 F1: EF. C2 + vpor xmm5, xmm0, xmm5 ; 1E30 _ C5 F9: EB. ED + vmovd xmm7, dword [ecx+2AC4H] ; 1E34 _ C5 F9: 6E. B9, 00002AC4 + vpxor xmm1, xmm5, xmm6 ; 1E3C _ C5 D1: EF. CE + vmovd xmm6, dword [ecx+2AC8H] ; 1E40 _ C5 F9: 6E. B1, 00002AC8 + vmovd xmm0, dword [ecx+2ACCH] ; 1E48 _ C5 F9: 6E. 81, 00002ACC + vpshufd xmm7, xmm7, 0 ; 1E50 _ C5 F9: 70. FF, 00 + vpshufd xmm6, xmm6, 0 ; 1E55 _ C5 F9: 70. F6, 00 + vpxor xmm3, xmm3, xmm7 ; 1E5A _ C5 E1: EF. DF + vpshufd xmm7, xmm0, 0 ; 1E5E _ C5 F9: 70. F8, 00 + vpxor xmm5, xmm1, xmm6 ; 1E63 _ C5 F1: EF. EE + vpxor xmm7, xmm2, xmm7 ; 1E67 _ C5 E9: EF. FF + vpslld xmm2, xmm5, 10 ; 1E6B _ C5 E9: 72. F5, 0A + vpsrld xmm1, xmm5, 22 ; 1E70 _ C5 F1: 72. D5, 16 + vpslld xmm5, xmm4, 27 ; 1E75 _ C5 D1: 72. F4, 1B + vpor xmm2, xmm2, xmm1 ; 1E7A _ C5 E9: EB. D1 + vpsrld xmm4, xmm4, 5 ; 1E7E _ C5 D9: 72. D4, 05 + vpxor xmm6, xmm2, xmm7 ; 1E83 _ C5 E9: EF. F7 + vpslld xmm0, xmm3, 7 ; 1E87 _ C5 F9: 72. F3, 07 + vpor xmm1, xmm5, xmm4 ; 1E8C _ C5 D1: EB. CC + vpxor xmm2, xmm6, xmm0 ; 1E90 _ C5 C9: EF. D0 + vpxor xmm6, xmm1, xmm3 ; 1E94 _ C5 F1: EF. F3 + vpslld xmm0, xmm7, 25 ; 1E98 _ C5 F9: 72. F7, 19 + vpxor xmm1, xmm6, xmm7 ; 1E9D _ C5 C9: EF. CF + vpsrld xmm7, xmm7, 7 ; 1EA1 _ C5 C1: 72. D7, 07 + vpor xmm4, xmm0, xmm7 ; 1EA6 _ C5 F9: EB. E7 + vpslld xmm6, xmm1, 3 ; 1EAA _ C5 C9: 72. F1, 03 + vpxor xmm5, xmm4, xmm2 ; 1EAF _ C5 D9: EF. EA + vpslld xmm0, xmm3, 31 ; 1EB3 _ C5 F9: 72. F3, 1F + vpsrld xmm3, xmm3, 1 ; 1EB8 _ C5 E1: 72. D3, 01 + vpxor xmm7, xmm5, xmm6 ; 1EBD _ C5 D1: EF. FE + vpor xmm6, xmm0, xmm3 ; 1EC1 _ C5 F9: EB. F3 + vpslld xmm3, xmm2, 29 ; 1EC5 _ C5 E1: 72. F2, 1D + vpxor xmm0, xmm6, xmm1 ; 1ECA _ C5 C9: EF. C1 + vpslld xmm4, xmm1, 19 ; 1ECE _ C5 D9: 72. F1, 13 + vpxor xmm5, xmm0, xmm2 ; 1ED3 _ C5 F9: EF. EA + vpsrld xmm2, xmm2, 3 ; 1ED7 _ C5 E9: 72. D2, 03 + vpor xmm6, xmm3, xmm2 ; 1EDC _ C5 E1: EB. F2 + vpsrld xmm1, xmm1, 13 ; 1EE0 _ C5 F1: 72. D1, 0D + vpxor xmm0, xmm6, xmm5 ; 1EE5 _ C5 C9: EF. C5 + vpor xmm1, xmm4, xmm1 ; 1EE9 _ C5 D9: EB. C9 + vpxor xmm1, xmm1, xmm0 ; 1EED _ C5 F1: EF. C8 + vpand xmm2, xmm6, xmm0 ; 1EF1 _ C5 C9: DB. D0 + vpxor xmm6, xmm2, xmm1 ; 1EF5 _ C5 E9: EF. F1 + vpand xmm3, xmm1, xmm5 ; 1EF9 _ C5 F1: DB. DD + vpor xmm2, xmm7, xmm6 ; 1EFD _ C5 C1: EB. D6 + vpxor xmm7, xmm5, xmm7 ; 1F01 _ C5 D1: EF. FF + vpxor xmm0, xmm0, xmm2 ; 1F05 _ C5 F9: EF. C2 + vpxor xmm1, xmm3, xmm2 ; 1F09 _ C5 E1: EF. CA + vpxor xmm3, xmm7, xmm6 ; 1F0D _ C5 C1: EF. DE + vpand xmm5, xmm2, xmm0 ; 1F11 _ C5 E9: DB. E8 + vpxor xmm2, xmm3, xmm1 ; 1F15 _ C5 E1: EF. D1 + vpxor xmm5, xmm5, xmm3 ; 1F19 _ C5 D1: EF. EB + vpor xmm7, xmm2, xmm0 ; 1F1D _ C5 E9: EB. F8 + vpxor xmm1, xmm1, xmm5 ; 1F21 _ C5 F1: EF. CD + vpxor xmm4, xmm7, xmm6 ; 1F25 _ C5 C1: EF. E6 + vmovd xmm6, dword [ecx+2AB0H] ; 1F29 _ C5 F9: 6E. B1, 00002AB0 + vpshufd xmm2, xmm6, 0 ; 1F31 _ C5 F9: 70. D6, 00 + vmovd xmm6, dword [ecx+2AB8H] ; 1F36 _ C5 F9: 6E. B1, 00002AB8 + vpxor xmm7, xmm0, xmm2 ; 1F3E _ C5 F9: EF. FA + vmovd xmm0, dword [ecx+2AB4H] ; 1F42 _ C5 F9: 6E. 81, 00002AB4 + vpshufd xmm2, xmm0, 0 ; 1F4A _ C5 F9: 70. D0, 00 + vpshufd xmm0, xmm6, 0 ; 1F4F _ C5 F9: 70. C6, 00 + vpxor xmm2, xmm4, xmm2 ; 1F54 _ C5 D9: EF. D2 + vpxor xmm0, xmm5, xmm0 ; 1F58 _ C5 D1: EF. C0 + vpxor xmm5, xmm1, xmm4 ; 1F5C _ C5 F1: EF. EC + vpslld xmm6, xmm0, 10 ; 1F60 _ C5 C9: 72. F0, 0A + vpsrld xmm0, xmm0, 22 ; 1F65 _ C5 F9: 72. D0, 16 + vmovd xmm4, dword [ecx+2ABCH] ; 1F6A _ C5 F9: 6E. A1, 00002ABC + vpor xmm3, xmm6, xmm0 ; 1F72 _ C5 C9: EB. D8 + vpslld xmm6, xmm7, 27 ; 1F76 _ C5 C9: 72. F7, 1B + vpsrld xmm7, xmm7, 5 ; 1F7B _ C5 C1: 72. D7, 05 + vpshufd xmm1, xmm4, 0 ; 1F80 _ C5 F9: 70. CC, 00 + vpor xmm6, xmm6, xmm7 ; 1F85 _ C5 C9: EB. F7 + vpxor xmm1, xmm5, xmm1 ; 1F89 _ C5 D1: EF. C9 + vpxor xmm0, xmm6, xmm2 ; 1F8D _ C5 C9: EF. C2 + vpxor xmm4, xmm3, xmm1 ; 1F91 _ C5 E1: EF. E1 + vpslld xmm5, xmm2, 7 ; 1F95 _ C5 D1: 72. F2, 07 + vpxor xmm6, xmm0, xmm1 ; 1F9A _ C5 F9: EF. F1 + vpslld xmm7, xmm1, 25 ; 1F9E _ C5 C1: 72. F1, 19 + vpsrld xmm1, xmm1, 7 ; 1FA3 _ C5 F1: 72. D1, 07 + vpxor xmm3, xmm4, xmm5 ; 1FA8 _ C5 D9: EF. DD + vpor xmm4, xmm7, xmm1 ; 1FAC _ C5 C1: EB. E1 + vpslld xmm1, xmm6, 3 ; 1FB0 _ C5 F1: 72. F6, 03 + vpxor xmm5, xmm4, xmm3 ; 1FB5 _ C5 D9: EF. EB + vpslld xmm7, xmm2, 31 ; 1FB9 _ C5 C1: 72. F2, 1F + vpsrld xmm2, xmm2, 1 ; 1FBE _ C5 E9: 72. D2, 01 + vpxor xmm0, xmm5, xmm1 ; 1FC3 _ C5 D1: EF. C1 + vpor xmm1, xmm7, xmm2 ; 1FC7 _ C5 C1: EB. CA + vpslld xmm4, xmm6, 19 ; 1FCB _ C5 D9: 72. F6, 13 + vpxor xmm2, xmm1, xmm6 ; 1FD0 _ C5 F1: EF. D6 + vpsrld xmm6, xmm6, 13 ; 1FD4 _ C5 C9: 72. D6, 0D + vpxor xmm2, xmm2, xmm3 ; 1FD9 _ C5 E9: EF. D3 + vpslld xmm5, xmm3, 29 ; 1FDD _ C5 D1: 72. F3, 1D + vpsrld xmm3, xmm3, 3 ; 1FE2 _ C5 E1: 72. D3, 03 + vpor xmm6, xmm4, xmm6 ; 1FE7 _ C5 D9: EB. F6 + vpor xmm1, xmm5, xmm3 ; 1FEB _ C5 D1: EB. CB + vpxor xmm7, xmm0, xmm6 ; 1FEF _ C5 F9: EF. FE + vpxor xmm1, xmm1, xmm0 ; 1FF3 _ C5 F1: EF. C8 + vpand xmm0, xmm7, xmm1 ; 1FF7 _ C5 C1: DB. C1 + vpxor xmm5, xmm0, xmm2 ; 1FFB _ C5 F9: EF. EA + vpor xmm2, xmm2, xmm1 ; 1FFF _ C5 E9: EB. D1 + vpxor xmm0, xmm2, xmm7 ; 2003 _ C5 E9: EF. C7 + vpand xmm7, xmm7, xmm5 ; 2007 _ C5 C1: DB. FD + vpcmpeqd xmm2, xmm2, xmm2 ; 200B _ C5 E9: 76. D2 + vpxor xmm4, xmm1, xmm5 ; 200F _ C5 F1: EF. E5 + vpand xmm1, xmm7, xmm6 ; 2013 _ C5 C1: DB. CE + vpxor xmm7, xmm5, xmm2 ; 2017 _ C5 D1: EF. FA + vmovd xmm5, dword [ecx+2AA0H] ; 201B _ C5 F9: 6E. A9, 00002AA0 + vpxor xmm3, xmm1, xmm4 ; 2023 _ C5 F1: EF. DC + vpshufd xmm1, xmm5, 0 ; 2027 _ C5 F9: 70. CD, 00 + vpand xmm4, xmm4, xmm0 ; 202C _ C5 D9: DB. E0 + vmovd xmm5, dword [ecx+2AA4H] ; 2030 _ C5 F9: 6E. A9, 00002AA4 + vpxor xmm2, xmm0, xmm1 ; 2038 _ C5 F9: EF. D1 + vpshufd xmm1, xmm5, 0 ; 203C _ C5 F9: 70. CD, 00 + vpxor xmm5, xmm3, xmm1 ; 2041 _ C5 E1: EF. E9 + vpor xmm1, xmm4, xmm6 ; 2045 _ C5 D9: EB. CE + vmovd xmm4, dword [ecx+2AA8H] ; 2049 _ C5 F9: 6E. A1, 00002AA8 + vpxor xmm6, xmm6, xmm7 ; 2051 _ C5 C9: EF. F7 + vpshufd xmm4, xmm4, 0 ; 2055 _ C5 F9: 70. E4, 00 + vpxor xmm1, xmm1, xmm7 ; 205A _ C5 F1: EF. CF + vpxor xmm3, xmm7, xmm3 ; 205E _ C5 C1: EF. DB + vpand xmm0, xmm6, xmm0 ; 2062 _ C5 C9: DB. C0 + vpxor xmm1, xmm1, xmm4 ; 2066 _ C5 F1: EF. CC + vpxor xmm6, xmm3, xmm0 ; 206A _ C5 E1: EF. F0 + vmovd xmm3, dword [ecx+2AACH] ; 206E _ C5 F9: 6E. 99, 00002AAC + vpshufd xmm0, xmm3, 0 ; 2076 _ C5 F9: 70. C3, 00 + vpslld xmm3, xmm1, 10 ; 207B _ C5 E1: 72. F1, 0A + vpsrld xmm1, xmm1, 22 ; 2080 _ C5 F1: 72. D1, 16 + vpxor xmm4, xmm6, xmm0 ; 2085 _ C5 C9: EF. E0 + vpor xmm7, xmm3, xmm1 ; 2089 _ C5 E1: EB. F9 + vpslld xmm0, xmm2, 27 ; 208D _ C5 F9: 72. F2, 1B + vpsrld xmm2, xmm2, 5 ; 2092 _ C5 E9: 72. D2, 05 + vpxor xmm1, xmm7, xmm4 ; 2097 _ C5 C1: EF. CC + vpslld xmm6, xmm5, 7 ; 209B _ C5 C9: 72. F5, 07 + vpor xmm7, xmm0, xmm2 ; 20A0 _ C5 F9: EB. FA + vpxor xmm3, xmm1, xmm6 ; 20A4 _ C5 F1: EF. DE + vpxor xmm1, xmm7, xmm5 ; 20A8 _ C5 C1: EF. CD + vpxor xmm0, xmm1, xmm4 ; 20AC _ C5 F1: EF. C4 + vpslld xmm2, xmm4, 25 ; 20B0 _ C5 E9: 72. F4, 19 + vpsrld xmm4, xmm4, 7 ; 20B5 _ C5 D9: 72. D4, 07 + vpslld xmm7, xmm5, 31 ; 20BA _ C5 C1: 72. F5, 1F + vpor xmm1, xmm2, xmm4 ; 20BF _ C5 E9: EB. CC + vpsrld xmm5, xmm5, 1 ; 20C3 _ C5 D1: 72. D5, 01 + vpxor xmm2, xmm1, xmm3 ; 20C8 _ C5 F1: EF. D3 + vpslld xmm6, xmm0, 3 ; 20CC _ C5 C9: 72. F0, 03 + vpor xmm1, xmm7, xmm5 ; 20D1 _ C5 C1: EB. CD + vpxor xmm4, xmm2, xmm6 ; 20D5 _ C5 E9: EF. E6 + vpxor xmm2, xmm1, xmm0 ; 20D9 _ C5 F1: EF. D0 + vpslld xmm7, xmm0, 19 ; 20DD _ C5 C1: 72. F0, 13 + vpxor xmm2, xmm2, xmm3 ; 20E2 _ C5 E9: EF. D3 + vpsrld xmm0, xmm0, 13 ; 20E6 _ C5 F9: 72. D0, 0D + vpslld xmm6, xmm3, 29 ; 20EB _ C5 C9: 72. F3, 1D + vpsrld xmm3, xmm3, 3 ; 20F0 _ C5 E1: 72. D3, 03 + vpor xmm5, xmm7, xmm0 ; 20F5 _ C5 C1: EB. E8 + vpxor xmm0, xmm2, xmm4 ; 20F9 _ C5 E9: EF. C4 + vpor xmm1, xmm6, xmm3 ; 20FD _ C5 C9: EB. CB + vpand xmm4, xmm4, xmm0 ; 2101 _ C5 D9: DB. E0 + vpxor xmm3, xmm2, xmm1 ; 2105 _ C5 E9: EF. D9 + vpxor xmm4, xmm4, xmm5 ; 2109 _ C5 D9: EF. E5 + vpor xmm5, xmm5, xmm0 ; 210D _ C5 D1: EB. E8 + vpxor xmm2, xmm1, xmm4 ; 2111 _ C5 F1: EF. D4 + vpxor xmm6, xmm5, xmm3 ; 2115 _ C5 D1: EF. F3 + vpxor xmm1, xmm0, xmm4 ; 2119 _ C5 F9: EF. CC + vpor xmm0, xmm6, xmm2 ; 211D _ C5 C9: EB. C2 + vpxor xmm6, xmm0, xmm1 ; 2121 _ C5 F9: EF. F1 + vpor xmm1, xmm1, xmm4 ; 2125 _ C5 F1: EB. CC + vpxor xmm5, xmm1, xmm6 ; 2129 _ C5 F1: EF. EE + vpcmpeqd xmm1, xmm1, xmm1 ; 212D _ C5 F1: 76. C9 + vmovd xmm0, dword [ecx+2A90H] ; 2131 _ C5 F9: 6E. 81, 00002A90 + vpxor xmm3, xmm3, xmm1 ; 2139 _ C5 E1: EF. D9 + vpxor xmm7, xmm3, xmm5 ; 213D _ C5 E1: EF. FD + vpor xmm5, xmm5, xmm6 ; 2141 _ C5 D1: EB. EE + vpshufd xmm3, xmm0, 0 ; 2145 _ C5 F9: 70. D8, 00 + vmovd xmm0, dword [ecx+2A94H] ; 214A _ C5 F9: 6E. 81, 00002A94 + vpxor xmm3, xmm7, xmm3 ; 2152 _ C5 C1: EF. DB + vpshufd xmm0, xmm0, 0 ; 2156 _ C5 F9: 70. C0, 00 + vpxor xmm0, xmm6, xmm0 ; 215B _ C5 C9: EF. C0 + vpxor xmm6, xmm5, xmm6 ; 215F _ C5 D1: EF. F6 + vpor xmm7, xmm6, xmm7 ; 2163 _ C5 C9: EB. FF + vpxor xmm6, xmm4, xmm7 ; 2167 _ C5 D9: EF. F7 + vmovd xmm4, dword [ecx+2A98H] ; 216B _ C5 F9: 6E. A1, 00002A98 + vpshufd xmm7, xmm4, 0 ; 2173 _ C5 F9: 70. FC, 00 + vmovd xmm4, dword [ecx+2A9CH] ; 2178 _ C5 F9: 6E. A1, 00002A9C + vpxor xmm7, xmm6, xmm7 ; 2180 _ C5 C9: EF. FF + vpshufd xmm5, xmm4, 0 ; 2184 _ C5 F9: 70. EC, 00 + vpsrld xmm6, xmm7, 22 ; 2189 _ C5 C9: 72. D7, 16 + vpxor xmm4, xmm2, xmm5 ; 218E _ C5 E9: EF. E5 + vpslld xmm2, xmm7, 10 ; 2192 _ C5 E9: 72. F7, 0A + vpor xmm2, xmm2, xmm6 ; 2197 _ C5 E9: EB. D6 + vpslld xmm6, xmm3, 27 ; 219B _ C5 C9: 72. F3, 1B + vpsrld xmm3, xmm3, 5 ; 21A0 _ C5 E1: 72. D3, 05 + vpxor xmm7, xmm2, xmm4 ; 21A5 _ C5 E9: EF. FC + vpor xmm6, xmm6, xmm3 ; 21A9 _ C5 C9: EB. F3 + vpslld xmm5, xmm0, 7 ; 21AD _ C5 D1: 72. F0, 07 + vpxor xmm3, xmm6, xmm0 ; 21B2 _ C5 C9: EF. D8 + vpxor xmm2, xmm7, xmm5 ; 21B6 _ C5 C1: EF. D5 + vpxor xmm3, xmm3, xmm4 ; 21BA _ C5 E1: EF. DC + vpslld xmm7, xmm4, 25 ; 21BE _ C5 C1: 72. F4, 19 + vpsrld xmm4, xmm4, 7 ; 21C3 _ C5 D9: 72. D4, 07 + vpslld xmm5, xmm0, 31 ; 21C8 _ C5 D1: 72. F0, 1F + vpor xmm6, xmm7, xmm4 ; 21CD _ C5 C1: EB. F4 + vpsrld xmm0, xmm0, 1 ; 21D1 _ C5 F9: 72. D0, 01 + vpxor xmm7, xmm6, xmm2 ; 21D6 _ C5 C9: EF. FA + vpor xmm6, xmm5, xmm0 ; 21DA _ C5 D1: EB. F0 + vpslld xmm4, xmm3, 3 ; 21DE _ C5 D9: 72. F3, 03 + vpxor xmm0, xmm6, xmm3 ; 21E3 _ C5 C9: EF. C3 + vpxor xmm4, xmm7, xmm4 ; 21E7 _ C5 C1: EF. E4 + vpxor xmm5, xmm0, xmm2 ; 21EB _ C5 F9: EF. EA + vpslld xmm7, xmm3, 19 ; 21EF _ C5 C1: 72. F3, 13 + vpsrld xmm3, xmm3, 13 ; 21F4 _ C5 E1: 72. D3, 0D + vpslld xmm0, xmm2, 29 ; 21F9 _ C5 F9: 72. F2, 1D + vpsrld xmm2, xmm2, 3 ; 21FE _ C5 E9: 72. D2, 03 + vpor xmm6, xmm7, xmm3 ; 2203 _ C5 C1: EB. F3 + vpor xmm2, xmm0, xmm2 ; 2207 _ C5 F9: EB. D2 + vpxor xmm7, xmm2, xmm1 ; 220B _ C5 E9: EF. F9 + vpxor xmm0, xmm5, xmm1 ; 220F _ C5 D1: EF. C1 + vpor xmm1, xmm5, xmm6 ; 2213 _ C5 D1: EB. CE + vpxor xmm5, xmm6, xmm0 ; 2217 _ C5 C9: EF. E8 + vpxor xmm1, xmm1, xmm7 ; 221B _ C5 F1: EF. CF + vpor xmm6, xmm7, xmm0 ; 221F _ C5 C1: EB. F0 + vpxor xmm1, xmm1, xmm4 ; 2223 _ C5 F1: EF. CC + vpxor xmm3, xmm6, xmm5 ; 2227 _ C5 C9: EF. DD + vpand xmm6, xmm5, xmm4 ; 222B _ C5 D1: DB. F4 + vpxor xmm2, xmm0, xmm6 ; 222F _ C5 F9: EF. D6 + vpor xmm0, xmm6, xmm1 ; 2233 _ C5 C9: EB. C1 + vpxor xmm7, xmm0, xmm3 ; 2237 _ C5 F9: EF. FB + vpxor xmm4, xmm4, xmm2 ; 223B _ C5 D9: EF. E2 + vpxor xmm6, xmm4, xmm7 ; 223F _ C5 D9: EF. F7 + vpxor xmm3, xmm3, xmm1 ; 2243 _ C5 E1: EF. D9 + vmovd xmm0, dword [ecx+2A80H] ; 2247 _ C5 F9: 6E. 81, 00002A80 + vpxor xmm6, xmm6, xmm1 ; 224F _ C5 C9: EF. F1 + vpshufd xmm4, xmm0, 0 ; 2253 _ C5 F9: 70. E0, 00 + vpand xmm5, xmm3, xmm6 ; 2258 _ C5 E1: DB. EE + vpxor xmm0, xmm7, xmm4 ; 225C _ C5 C1: EF. C4 + vpxor xmm3, xmm2, xmm5 ; 2260 _ C5 E9: EF. DD + vmovd xmm7, dword [ecx+2A84H] ; 2264 _ C5 F9: 6E. B9, 00002A84 + inc eax ; 226C _ 40 + vpshufd xmm2, xmm7, 0 ; 226D _ C5 F9: 70. D7, 00 + add esi, 64 ; 2272 _ 83. C6, 40 + vpxor xmm5, xmm3, xmm2 ; 2275 _ C5 E1: EF. EA + vmovd xmm3, dword [ecx+2A88H] ; 2279 _ C5 F9: 6E. 99, 00002A88 + vpshufd xmm4, xmm3, 0 ; 2281 _ C5 F9: 70. E3, 00 + vpxor xmm4, xmm1, xmm4 ; 2286 _ C5 F1: EF. E4 + vmovd xmm1, dword [ecx+2A8CH] ; 228A _ C5 F9: 6E. 89, 00002A8C + vpshufd xmm7, xmm1, 0 ; 2292 _ C5 F9: 70. F9, 00 + vpxor xmm6, xmm6, xmm7 ; 2297 _ C5 C9: EF. F7 + vpunpckldq xmm2, xmm0, xmm5 ; 229B _ C5 F9: 62. D5 + vpunpckldq xmm3, xmm4, xmm6 ; 229F _ C5 D9: 62. DE + vpunpckhdq xmm1, xmm0, xmm5 ; 22A3 _ C5 F9: 6A. CD + vpunpckhdq xmm0, xmm4, xmm6 ; 22A7 _ C5 D9: 6A. C6 + vpunpcklqdq xmm4, xmm2, xmm3 ; 22AB _ C5 E9: 6C. E3 + vpxor xmm5, xmm4, oword [esp+70H] ; 22AF _ C5 D9: EF. 6C 24, 70 + vmovdqu xmm4, oword [esp+60H] ; 22B5 _ C5 FA: 6F. 64 24, 60 + vpunpcklqdq xmm6, xmm1, xmm0 ; 22BB _ C5 F1: 6C. F0 + vpunpckhqdq xmm0, xmm1, xmm0 ; 22BF _ C5 F1: 6D. C0 + vpunpckhqdq xmm2, xmm2, xmm3 ; 22C3 _ C5 E9: 6D. D3 + vpxor xmm1, xmm0, xmm4 ; 22C7 _ C5 F9: EF. CC + vpslldq xmm0, xmm4, 8 ; 22CB _ C5 F9: 73. FC, 08 + vpxor xmm3, xmm2, oword [esp+50H] ; 22D0 _ C5 E9: EF. 5C 24, 50 + vpsllq xmm2, xmm4, 1 ; 22D6 _ C5 E9: 73. F4, 01 + vmovdqu oword [edx], xmm5 ; 22DB _ C5 FA: 7F. 2A + vmovdqu oword [edx+30H], xmm1 ; 22DF _ C5 FA: 7F. 4A, 30 + vmovdqu oword [edx+10H], xmm3 ; 22E4 _ C5 FA: 7F. 5A, 10 + vpsrldq xmm1, xmm0, 7 ; 22E9 _ C5 F1: 73. D8, 07 + vpsraw xmm5, xmm4, 8 ; 22EE _ C5 D1: 71. E4, 08 + vpxor xmm7, xmm6, oword [esp+40H] ; 22F3 _ C5 C9: EF. 7C 24, 40 + vpsrlq xmm3, xmm1, 7 ; 22F9 _ C5 E1: 73. D1, 07 + vpsrldq xmm6, xmm5, 15 ; 22FE _ C5 C9: 73. DD, 0F + vpand xmm0, xmm6, oword [esp+30H] ; 2303 _ C5 C9: DB. 44 24, 30 + vmovdqu oword [edx+20H], xmm7 ; 2309 _ C5 FA: 7F. 7A, 20 + vpor xmm7, xmm2, xmm3 ; 230E _ C5 E9: EB. FB + add edx, 64 ; 2312 _ 83. C2, 40 + vpxor xmm1, xmm7, xmm0 ; 2315 _ C5 C1: EF. C8 + cmp eax, 8 ; 2319 _ 83. F8, 08 + vmovdqu oword [esp+70H], xmm1 ; 231C _ C5 FA: 7F. 4C 24, 70 + jl ?_006 ; 2322 _ 0F 8C, FFFFDD72 + mov dword [esp+28H], esi ; 2328 _ 89. 74 24, 28 + mov esi, dword [esp+20H] ; 232C _ 8B. 74 24, 20 + vmovdqu xmm0, oword [esp+70H] ; 2330 _ C5 FA: 6F. 44 24, 70 + add esi, -512 ; 2336 _ 81. C6, FFFFFE00 + mov dword [esp+24H], edx ; 233C _ 89. 54 24, 24 + jne ?_004 ; 2340 _ 0F 85, FFFFDD1C + add esp, 132 ; 2346 _ 81. C4, 00000084 + pop ebx ; 234C _ 5B + pop edi ; 234D _ 5F + pop esi ; 234E _ 5E + mov esp, ebp ; 234F _ 8B. E5 + pop ebp ; 2351 _ 5D + ret 24 ; 2352 _ C2, 0018 +; _xts_serpent_avx_decrypt@24 End of function + +; Filling space: 0BH +; Filler type: Multi-byte NOP +; db 0FH, 1FH, 40H, 00H, 0FH, 1FH, 80H, 00H +; db 00H, 00H, 00H + +ALIGN 16 + + +_xts_serpent_avx_available@0:; Function begin + push ebx ; 0000 _ 53 + push esi ; 0001 _ 56 + mov dword [esp], 0 ; 0002 _ C7. 04 24, 00000000 + mov eax, 1 ; 0009 _ B8, 00000001 + cpuid ; 000E _ 0F A2 + and ecx, 18000000H ; 0010 _ 81. E1, 18000000 + cmp ecx, 402653184 ; 0016 _ 81. F9, 18000000 + jnz ?_008 ; 001C _ 75, 14 + xor ecx, ecx ; 001E _ 33. C9 +?_007: +; Error: Illegal opcode +; Error: Internal error in opcode table in opcodes.cpp +; UNDEFINED ; 0020 _ 0F 01. D0 + db 0FH, 01H, 0D0H +; and eax, 06H ; 0023 _ 83. E0, 06 + db 83H, 0E0H, 06H +; cmp eax, 6 ; 0026 _ 83. F8, 06 + db 83H, 0F8H, 06H +; jnz ?_008 ; 0029 _ 75, 07 + db 75H, 07H +; mov dword [esp], 1 ; 002B _ C7. 04 24, 00000001 + db 0C7H, 04H, 24H, 01H, 00H, 00H, 00H +?_008: mov eax, dword [esp] ; 0032 _ 8B. 04 24 + pop ecx ; 0035 _ 59 + pop ebx ; 0036 _ 5B + ret ; 0037 _ C3 +; _xts_serpent_avx_available@0 End of function + diff --git a/ImBoxEnclave/crypto_fast/i386/xts_serpent_sse2_i386.asm b/ImBoxEnclave/crypto_fast/i386/xts_serpent_sse2_i386.asm new file mode 100644 index 0000000..6d8be95 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/i386/xts_serpent_sse2_i386.asm @@ -0,0 +1,4609 @@ +; this code compiled with Intel C++ Compiler Version 11.1.835.200 +; +; Disassembly of file: xts_serpent_sse2.obj +; Mon May 10 08:12:48 2010 +; Mode: 32 bits +; Syntax: YASM/NASM +; Instruction set: SSE2 + + +global _xts_serpent_sse2_encrypt@24 +global _xts_serpent_sse2_decrypt@24 +global _xts_serpent_sse2_available@0 + +extern _serpent256_encrypt@12 ; near + +SECTION .text align=16 execute ; section number 2, code +; Communal section not supported by YASM + +_xts_serpent_sse2_encrypt@24:; Function begin + push ebp ; 0000 _ 55 + mov ebp, esp ; 0001 _ 8B. EC + and esp, 0FFFFFFF0H ; 0003 _ 83. E4, F0 + push edi ; 0006 _ 57 + push esi ; 0007 _ 56 + push ebx ; 0008 _ 53 + sub esp, 116 ; 0009 _ 83. EC, 74 + mov edx, dword [ebp+8H] ; 000C _ 8B. 55, 08 + mov eax, dword [ebp+0CH] ; 000F _ 8B. 45, 0C + mov esi, dword [ebp+18H] ; 0012 _ 8B. 75, 18 + mov ebx, dword [ebp+1CH] ; 0015 _ 8B. 5D, 1C + mov edi, dword [ebp+14H] ; 0018 _ 8B. 7D, 14 + mov dword [esp+24H], eax ; 001B _ 89. 44 24, 24 + mov ecx, esi ; 001F _ 8B. CE + mov dword [esp+28H], edx ; 0021 _ 89. 54 24, 28 + shl ecx, 23 ; 0025 _ C1. E1, 17 + shr esi, 9 ; 0028 _ C1. EE, 09 + mov dword [esp+4H], esi ; 002B _ 89. 74 24, 04 + mov esi, dword [ebp+10H] ; 002F _ 8B. 75, 10 + shr edi, 9 ; 0032 _ C1. EF, 09 + or ecx, edi ; 0035 _ 0B. CF + lea ebx, [ebx+5710H] ; 0037 _ 8D. 9B, 00005710 + mov dword [esp], ecx ; 003D _ 89. 0C 24 + xor ecx, ecx ; 0040 _ 33. C9 + mov dword [esp+8H], ecx ; 0042 _ 89. 4C 24, 08 + mov dword [esp+0CH], ecx ; 0046 _ 89. 4C 24, 0C + mov edi, 135 ; 004A _ BF, 00000087 + movd xmm1, edi ; 004F _ 66: 0F 6E. CF + movdqa oword [esp+30H], xmm1 ; 0053 _ 66: 0F 7F. 4C 24, 30 + lea edi, [esp+10H] ; 0059 _ 8D. 7C 24, 10 + jmp ?_002 ; 005D _ EB, 06 + +?_001: movdqa oword [esp+10H], xmm0 ; 005F _ 66: 0F 7F. 44 24, 10 +?_002: add dword [esp], 1 ; 0065 _ 83. 04 24, 01 + adc dword [esp+4H], 0 ; 0069 _ 83. 54 24, 04, 00 + push ebx ; 006E _ 53 + push edi ; 006F _ 57 + lea eax, [esp+8H] ; 0070 _ 8D. 44 24, 08 + push eax ; 0074 _ 50 + call _serpent256_encrypt@12 ; 0075 _ E8, 00000000(rel) + movdqa xmm0, oword [esp+10H] ; 007A _ 66: 0F 6F. 44 24, 10 + mov edx, dword [esp+24H] ; 0080 _ 8B. 54 24, 24 + mov ecx, dword [ebp+1CH] ; 0084 _ 8B. 4D, 1C + movdqa oword [esp+60H], xmm0 ; 0087 _ 66: 0F 7F. 44 24, 60 + mov dword [esp+20H], esi ; 008D _ 89. 74 24, 20 + mov esi, dword [esp+28H] ; 0091 _ 8B. 74 24, 28 + xor eax, eax ; 0095 _ 33. C0 + pcmpeqd xmm4, xmm4 ; 0097 _ 66: 0F 76. E4 +?_003: movdqa xmm5, oword [esp+60H] ; 009B _ 66: 0F 6F. 6C 24, 60 + movdqa xmm7, xmm5 ; 00A1 _ 66: 0F 6F. FD + movdqa xmm3, xmm5 ; 00A5 _ 66: 0F 6F. DD + movdqa xmm6, xmm5 ; 00A9 _ 66: 0F 6F. F5 + psllq xmm7, 1 ; 00AD _ 66: 0F 73. F7, 01 + pslldq xmm3, 8 ; 00B2 _ 66: 0F 73. FB, 08 + movdqa xmm1, oword [esp+30H] ; 00B7 _ 66: 0F 6F. 4C 24, 30 + psrldq xmm3, 7 ; 00BD _ 66: 0F 73. DB, 07 + psrlq xmm3, 7 ; 00C2 _ 66: 0F 73. D3, 07 + por xmm7, xmm3 ; 00C7 _ 66: 0F EB. FB + psraw xmm6, 8 ; 00CB _ 66: 0F 71. E6, 08 + psrldq xmm6, 15 ; 00D0 _ 66: 0F 73. DE, 0F + pand xmm6, xmm1 ; 00D5 _ 66: 0F DB. F1 + pxor xmm7, xmm6 ; 00D9 _ 66: 0F EF. FE + movdqa oword [esp+40H], xmm7 ; 00DD _ 66: 0F 7F. 7C 24, 40 + movdqa xmm2, xmm7 ; 00E3 _ 66: 0F 6F. D7 + movdqa xmm0, xmm7 ; 00E7 _ 66: 0F 6F. C7 + movdqa xmm3, xmm7 ; 00EB _ 66: 0F 6F. DF + psllq xmm2, 1 ; 00EF _ 66: 0F 73. F2, 01 + pslldq xmm0, 8 ; 00F4 _ 66: 0F 73. F8, 08 + psrldq xmm0, 7 ; 00F9 _ 66: 0F 73. D8, 07 + psrlq xmm0, 7 ; 00FE _ 66: 0F 73. D0, 07 + por xmm2, xmm0 ; 0103 _ 66: 0F EB. D0 + psraw xmm3, 8 ; 0107 _ 66: 0F 71. E3, 08 + psrldq xmm3, 15 ; 010C _ 66: 0F 73. DB, 0F + pand xmm3, xmm1 ; 0111 _ 66: 0F DB. D9 + pxor xmm2, xmm3 ; 0115 _ 66: 0F EF. D3 + movdqa oword [esp+50H], xmm2 ; 0119 _ 66: 0F 7F. 54 24, 50 + movdqa xmm3, xmm2 ; 011F _ 66: 0F 6F. DA + movdqa xmm6, xmm2 ; 0123 _ 66: 0F 6F. F2 + movdqa xmm0, xmm2 ; 0127 _ 66: 0F 6F. C2 + psllq xmm3, 1 ; 012B _ 66: 0F 73. F3, 01 + pslldq xmm6, 8 ; 0130 _ 66: 0F 73. FE, 08 + psrldq xmm6, 7 ; 0135 _ 66: 0F 73. DE, 07 + psrlq xmm6, 7 ; 013A _ 66: 0F 73. D6, 07 + por xmm3, xmm6 ; 013F _ 66: 0F EB. DE + movdqu xmm6, oword [esi+10H] ; 0143 _ F3: 0F 6F. 76, 10 + psraw xmm0, 8 ; 0148 _ 66: 0F 71. E0, 08 + psrldq xmm0, 15 ; 014D _ 66: 0F 73. D8, 0F + pand xmm0, xmm1 ; 0152 _ 66: 0F DB. C1 + movdqu xmm1, oword [esi+20H] ; 0156 _ F3: 0F 6F. 4E, 20 + pxor xmm3, xmm0 ; 015B _ 66: 0F EF. D8 + movdqu xmm0, oword [esi] ; 015F _ F3: 0F 6F. 06 + pxor xmm6, xmm7 ; 0163 _ 66: 0F EF. F7 + pxor xmm1, xmm2 ; 0167 _ 66: 0F EF. CA + movdqa xmm7, xmm1 ; 016B _ 66: 0F 6F. F9 + movdqu xmm2, oword [esi+30H] ; 016F _ F3: 0F 6F. 56, 30 + pxor xmm0, xmm5 ; 0174 _ 66: 0F EF. C5 + movdqa xmm5, xmm0 ; 0178 _ 66: 0F 6F. E8 + pxor xmm2, xmm3 ; 017C _ 66: 0F EF. D3 + punpckldq xmm7, xmm2 ; 0180 _ 66: 0F 62. FA + punpckldq xmm5, xmm6 ; 0184 _ 66: 0F 62. EE + punpckhdq xmm0, xmm6 ; 0188 _ 66: 0F 6A. C6 + movdqa xmm6, xmm0 ; 018C _ 66: 0F 6F. F0 + punpckhdq xmm1, xmm2 ; 0190 _ 66: 0F 6A. CA + movdqa xmm2, xmm5 ; 0194 _ 66: 0F 6F. D5 + punpckhqdq xmm5, xmm7 ; 0198 _ 66: 0F 6D. EF + punpcklqdq xmm6, xmm1 ; 019C _ 66: 0F 6C. F1 + punpcklqdq xmm2, xmm7 ; 01A0 _ 66: 0F 6C. D7 + movd xmm7, dword [ecx+2A84H] ; 01A4 _ 66: 0F 6E. B9, 00002A84 + punpckhqdq xmm0, xmm1 ; 01AC _ 66: 0F 6D. C1 + movd xmm1, dword [ecx+2A80H] ; 01B0 _ 66: 0F 6E. 89, 00002A80 + pshufd xmm1, xmm1, 0 ; 01B8 _ 66: 0F 70. C9, 00 + pxor xmm2, xmm1 ; 01BD _ 66: 0F EF. D1 + pshufd xmm1, xmm7, 0 ; 01C1 _ 66: 0F 70. CF, 00 + pxor xmm5, xmm1 ; 01C6 _ 66: 0F EF. E9 + movd xmm7, dword [ecx+2A88H] ; 01CA _ 66: 0F 6E. B9, 00002A88 + pshufd xmm1, xmm7, 0 ; 01D2 _ 66: 0F 70. CF, 00 + movd xmm7, dword [ecx+2A8CH] ; 01D7 _ 66: 0F 6E. B9, 00002A8C + pxor xmm6, xmm1 ; 01DF _ 66: 0F EF. F1 + pshufd xmm1, xmm7, 0 ; 01E3 _ 66: 0F 70. CF, 00 + movdqa xmm7, xmm5 ; 01E8 _ 66: 0F 6F. FD + pxor xmm0, xmm1 ; 01EC _ 66: 0F EF. C1 + pxor xmm0, xmm2 ; 01F0 _ 66: 0F EF. C2 + pand xmm7, xmm0 ; 01F4 _ 66: 0F DB. F8 + pxor xmm5, xmm6 ; 01F8 _ 66: 0F EF. EE + pxor xmm7, xmm2 ; 01FC _ 66: 0F EF. FA + por xmm2, xmm0 ; 0200 _ 66: 0F EB. D0 + pxor xmm2, xmm5 ; 0204 _ 66: 0F EF. D5 + pxor xmm5, xmm0 ; 0208 _ 66: 0F EF. E8 + pxor xmm0, xmm6 ; 020C _ 66: 0F EF. C6 + por xmm6, xmm7 ; 0210 _ 66: 0F EB. F7 + pxor xmm6, xmm5 ; 0214 _ 66: 0F EF. F5 + pxor xmm5, xmm4 ; 0218 _ 66: 0F EF. EC + por xmm5, xmm7 ; 021C _ 66: 0F EB. EF + pxor xmm7, xmm0 ; 0220 _ 66: 0F EF. F8 + pxor xmm7, xmm5 ; 0224 _ 66: 0F EF. FD + por xmm0, xmm2 ; 0228 _ 66: 0F EB. C2 + pxor xmm7, xmm0 ; 022C _ 66: 0F EF. F8 + movdqa xmm1, xmm7 ; 0230 _ 66: 0F 6F. CF + pxor xmm5, xmm0 ; 0234 _ 66: 0F EF. E8 + psrld xmm7, 19 ; 0238 _ 66: 0F 72. D7, 13 + pslld xmm1, 13 ; 023D _ 66: 0F 72. F1, 0D + por xmm1, xmm7 ; 0242 _ 66: 0F EB. CF + movdqa xmm7, xmm6 ; 0246 _ 66: 0F 6F. FE + psrld xmm6, 29 ; 024A _ 66: 0F 72. D6, 1D + pxor xmm5, xmm1 ; 024F _ 66: 0F EF. E9 + pslld xmm7, 3 ; 0253 _ 66: 0F 72. F7, 03 + por xmm7, xmm6 ; 0258 _ 66: 0F EB. FE + movdqa xmm6, xmm1 ; 025C _ 66: 0F 6F. F1 + pxor xmm5, xmm7 ; 0260 _ 66: 0F EF. EF + movdqa xmm0, xmm5 ; 0264 _ 66: 0F 6F. C5 + pxor xmm2, xmm7 ; 0268 _ 66: 0F EF. D7 + pslld xmm6, 3 ; 026C _ 66: 0F 72. F6, 03 + pxor xmm2, xmm6 ; 0271 _ 66: 0F EF. D6 + movdqa xmm6, xmm2 ; 0275 _ 66: 0F 6F. F2 + pslld xmm0, 1 ; 0279 _ 66: 0F 72. F0, 01 + psrld xmm5, 31 ; 027E _ 66: 0F 72. D5, 1F + por xmm0, xmm5 ; 0283 _ 66: 0F EB. C5 + movdqa xmm5, xmm0 ; 0287 _ 66: 0F 6F. E8 + pslld xmm6, 7 ; 028B _ 66: 0F 72. F6, 07 + psrld xmm2, 25 ; 0290 _ 66: 0F 72. D2, 19 + por xmm6, xmm2 ; 0295 _ 66: 0F EB. F2 + pxor xmm1, xmm0 ; 0299 _ 66: 0F EF. C8 + pxor xmm1, xmm6 ; 029D _ 66: 0F EF. CE + pxor xmm7, xmm6 ; 02A1 _ 66: 0F EF. FE + pslld xmm5, 7 ; 02A5 _ 66: 0F 72. F5, 07 + pxor xmm7, xmm5 ; 02AA _ 66: 0F EF. FD + movdqa xmm5, xmm1 ; 02AE _ 66: 0F 6F. E9 + movdqa xmm2, xmm7 ; 02B2 _ 66: 0F 6F. D7 + psrld xmm1, 27 ; 02B6 _ 66: 0F 72. D1, 1B + pslld xmm5, 5 ; 02BB _ 66: 0F 72. F5, 05 + por xmm5, xmm1 ; 02C0 _ 66: 0F EB. E9 + movd xmm1, dword [ecx+2A90H] ; 02C4 _ 66: 0F 6E. 89, 00002A90 + pslld xmm2, 22 ; 02CC _ 66: 0F 72. F2, 16 + psrld xmm7, 10 ; 02D1 _ 66: 0F 72. D7, 0A + por xmm2, xmm7 ; 02D6 _ 66: 0F EB. D7 + pshufd xmm7, xmm1, 0 ; 02DA _ 66: 0F 70. F9, 00 + pxor xmm5, xmm7 ; 02DF _ 66: 0F EF. EF + pxor xmm5, xmm4 ; 02E3 _ 66: 0F EF. EC + movd xmm1, dword [ecx+2A94H] ; 02E7 _ 66: 0F 6E. 89, 00002A94 + pshufd xmm7, xmm1, 0 ; 02EF _ 66: 0F 70. F9, 00 + pxor xmm0, xmm7 ; 02F4 _ 66: 0F EF. C7 + movd xmm1, dword [ecx+2A98H] ; 02F8 _ 66: 0F 6E. 89, 00002A98 + pshufd xmm7, xmm1, 0 ; 0300 _ 66: 0F 70. F9, 00 + pxor xmm2, xmm7 ; 0305 _ 66: 0F EF. D7 + pxor xmm2, xmm4 ; 0309 _ 66: 0F EF. D4 + movd xmm1, dword [ecx+2A9CH] ; 030D _ 66: 0F 6E. 89, 00002A9C + pshufd xmm7, xmm1, 0 ; 0315 _ 66: 0F 70. F9, 00 + movdqa xmm1, xmm5 ; 031A _ 66: 0F 6F. CD + pxor xmm6, xmm7 ; 031E _ 66: 0F EF. F7 + pand xmm1, xmm0 ; 0322 _ 66: 0F DB. C8 + pxor xmm2, xmm1 ; 0326 _ 66: 0F EF. D1 + por xmm1, xmm6 ; 032A _ 66: 0F EB. CE + pxor xmm6, xmm2 ; 032E _ 66: 0F EF. F2 + pxor xmm0, xmm1 ; 0332 _ 66: 0F EF. C1 + pxor xmm1, xmm5 ; 0336 _ 66: 0F EF. CD + por xmm5, xmm0 ; 033A _ 66: 0F EB. E8 + pxor xmm0, xmm6 ; 033E _ 66: 0F EF. C6 + por xmm2, xmm1 ; 0342 _ 66: 0F EB. D1 + pand xmm2, xmm5 ; 0346 _ 66: 0F DB. D5 + pxor xmm1, xmm0 ; 034A _ 66: 0F EF. C8 + pand xmm0, xmm2 ; 034E _ 66: 0F DB. C2 + pxor xmm0, xmm1 ; 0352 _ 66: 0F EF. C1 + pand xmm1, xmm2 ; 0356 _ 66: 0F DB. CA + pxor xmm5, xmm1 ; 035A _ 66: 0F EF. E9 + movdqa xmm1, xmm2 ; 035E _ 66: 0F 6F. CA + psrld xmm2, 19 ; 0362 _ 66: 0F 72. D2, 13 + pslld xmm1, 13 ; 0367 _ 66: 0F 72. F1, 0D + por xmm1, xmm2 ; 036C _ 66: 0F EB. CA + movdqa xmm2, xmm6 ; 0370 _ 66: 0F 6F. D6 + psrld xmm6, 29 ; 0374 _ 66: 0F 72. D6, 1D + pxor xmm5, xmm1 ; 0379 _ 66: 0F EF. E9 + pslld xmm2, 3 ; 037D _ 66: 0F 72. F2, 03 + por xmm2, xmm6 ; 0382 _ 66: 0F EB. D6 + movdqa xmm6, xmm1 ; 0386 _ 66: 0F 6F. F1 + pxor xmm5, xmm2 ; 038A _ 66: 0F EF. EA + pxor xmm0, xmm2 ; 038E _ 66: 0F EF. C2 + pslld xmm6, 3 ; 0392 _ 66: 0F 72. F6, 03 + pxor xmm0, xmm6 ; 0397 _ 66: 0F EF. C6 + movdqa xmm6, xmm5 ; 039B _ 66: 0F 6F. F5 + psrld xmm5, 31 ; 039F _ 66: 0F 72. D5, 1F + pslld xmm6, 1 ; 03A4 _ 66: 0F 72. F6, 01 + por xmm6, xmm5 ; 03A9 _ 66: 0F EB. F5 + movdqa xmm5, xmm0 ; 03AD _ 66: 0F 6F. E8 + psrld xmm0, 25 ; 03B1 _ 66: 0F 72. D0, 19 + pxor xmm1, xmm6 ; 03B6 _ 66: 0F EF. CE + pslld xmm5, 7 ; 03BA _ 66: 0F 72. F5, 07 + por xmm5, xmm0 ; 03BF _ 66: 0F EB. E8 + movdqa xmm0, xmm6 ; 03C3 _ 66: 0F 6F. C6 + pxor xmm1, xmm5 ; 03C7 _ 66: 0F EF. CD + movdqa xmm7, xmm1 ; 03CB _ 66: 0F 6F. F9 + pxor xmm2, xmm5 ; 03CF _ 66: 0F EF. D5 + pslld xmm0, 7 ; 03D3 _ 66: 0F 72. F0, 07 + pxor xmm2, xmm0 ; 03D8 _ 66: 0F EF. D0 + pslld xmm7, 5 ; 03DC _ 66: 0F 72. F7, 05 + psrld xmm1, 27 ; 03E1 _ 66: 0F 72. D1, 1B + movd xmm0, dword [ecx+2AA4H] ; 03E6 _ 66: 0F 6E. 81, 00002AA4 + por xmm7, xmm1 ; 03EE _ 66: 0F EB. F9 + movdqa xmm1, xmm2 ; 03F2 _ 66: 0F 6F. CA + psrld xmm2, 10 ; 03F6 _ 66: 0F 72. D2, 0A + pslld xmm1, 22 ; 03FB _ 66: 0F 72. F1, 16 + por xmm1, xmm2 ; 0400 _ 66: 0F EB. CA + movd xmm2, dword [ecx+2AA0H] ; 0404 _ 66: 0F 6E. 91, 00002AA0 + pshufd xmm2, xmm2, 0 ; 040C _ 66: 0F 70. D2, 00 + pxor xmm7, xmm2 ; 0411 _ 66: 0F EF. FA + pshufd xmm2, xmm0, 0 ; 0415 _ 66: 0F 70. D0, 00 + pxor xmm6, xmm2 ; 041A _ 66: 0F EF. F2 + movd xmm0, dword [ecx+2AA8H] ; 041E _ 66: 0F 6E. 81, 00002AA8 + pshufd xmm2, xmm0, 0 ; 0426 _ 66: 0F 70. D0, 00 + pxor xmm1, xmm2 ; 042B _ 66: 0F EF. CA + movd xmm0, dword [ecx+2AACH] ; 042F _ 66: 0F 6E. 81, 00002AAC + pshufd xmm2, xmm0, 0 ; 0437 _ 66: 0F 70. D0, 00 + movdqa xmm0, xmm7 ; 043C _ 66: 0F 6F. C7 + pxor xmm5, xmm2 ; 0440 _ 66: 0F EF. EA + pand xmm0, xmm1 ; 0444 _ 66: 0F DB. C1 + pxor xmm0, xmm5 ; 0448 _ 66: 0F EF. C5 + pxor xmm1, xmm6 ; 044C _ 66: 0F EF. CE + pxor xmm1, xmm0 ; 0450 _ 66: 0F EF. C8 + por xmm5, xmm7 ; 0454 _ 66: 0F EB. EF + pxor xmm5, xmm6 ; 0458 _ 66: 0F EF. EE + movdqa xmm2, xmm5 ; 045C _ 66: 0F 6F. D5 + movdqa xmm6, xmm1 ; 0460 _ 66: 0F 6F. F1 + pxor xmm7, xmm1 ; 0464 _ 66: 0F EF. F9 + por xmm2, xmm7 ; 0468 _ 66: 0F EB. D7 + pxor xmm2, xmm0 ; 046C _ 66: 0F EF. D0 + pand xmm0, xmm5 ; 0470 _ 66: 0F DB. C5 + pxor xmm7, xmm0 ; 0474 _ 66: 0F EF. F8 + pxor xmm5, xmm2 ; 0478 _ 66: 0F EF. EA + pxor xmm5, xmm7 ; 047C _ 66: 0F EF. EF + movdqa xmm0, xmm5 ; 0480 _ 66: 0F 6F. C5 + pxor xmm7, xmm4 ; 0484 _ 66: 0F EF. FC + pslld xmm6, 13 ; 0488 _ 66: 0F 72. F6, 0D + psrld xmm1, 19 ; 048D _ 66: 0F 72. D1, 13 + por xmm6, xmm1 ; 0492 _ 66: 0F EB. F1 + pslld xmm0, 3 ; 0496 _ 66: 0F 72. F0, 03 + psrld xmm5, 29 ; 049B _ 66: 0F 72. D5, 1D + por xmm0, xmm5 ; 04A0 _ 66: 0F EB. C5 + movdqa xmm5, xmm6 ; 04A4 _ 66: 0F 6F. EE + pxor xmm2, xmm6 ; 04A8 _ 66: 0F EF. D6 + pxor xmm2, xmm0 ; 04AC _ 66: 0F EF. D0 + movdqa xmm1, xmm2 ; 04B0 _ 66: 0F 6F. CA + pxor xmm7, xmm0 ; 04B4 _ 66: 0F EF. F8 + pslld xmm5, 3 ; 04B8 _ 66: 0F 72. F5, 03 + pxor xmm7, xmm5 ; 04BD _ 66: 0F EF. FD + movdqa xmm5, xmm7 ; 04C1 _ 66: 0F 6F. EF + pslld xmm1, 1 ; 04C5 _ 66: 0F 72. F1, 01 + psrld xmm2, 31 ; 04CA _ 66: 0F 72. D2, 1F + por xmm1, xmm2 ; 04CF _ 66: 0F EB. CA + pslld xmm5, 7 ; 04D3 _ 66: 0F 72. F5, 07 + psrld xmm7, 25 ; 04D8 _ 66: 0F 72. D7, 19 + por xmm5, xmm7 ; 04DD _ 66: 0F EB. EF + movdqa xmm7, xmm1 ; 04E1 _ 66: 0F 6F. F9 + pxor xmm6, xmm1 ; 04E5 _ 66: 0F EF. F1 + pxor xmm6, xmm5 ; 04E9 _ 66: 0F EF. F5 + movdqa xmm2, xmm6 ; 04ED _ 66: 0F 6F. D6 + pxor xmm0, xmm5 ; 04F1 _ 66: 0F EF. C5 + pslld xmm7, 7 ; 04F5 _ 66: 0F 72. F7, 07 + pxor xmm0, xmm7 ; 04FA _ 66: 0F EF. C7 + pslld xmm2, 5 ; 04FE _ 66: 0F 72. F2, 05 + psrld xmm6, 27 ; 0503 _ 66: 0F 72. D6, 1B + movd xmm7, dword [ecx+2AB0H] ; 0508 _ 66: 0F 6E. B9, 00002AB0 + por xmm2, xmm6 ; 0510 _ 66: 0F EB. D6 + movdqa xmm6, xmm0 ; 0514 _ 66: 0F 6F. F0 + psrld xmm0, 10 ; 0518 _ 66: 0F 72. D0, 0A + pslld xmm6, 22 ; 051D _ 66: 0F 72. F6, 16 + por xmm6, xmm0 ; 0522 _ 66: 0F EB. F0 + pshufd xmm0, xmm7, 0 ; 0526 _ 66: 0F 70. C7, 00 + movd xmm7, dword [ecx+2AB4H] ; 052B _ 66: 0F 6E. B9, 00002AB4 + pxor xmm2, xmm0 ; 0533 _ 66: 0F EF. D0 + pshufd xmm0, xmm7, 0 ; 0537 _ 66: 0F 70. C7, 00 + pxor xmm1, xmm0 ; 053C _ 66: 0F EF. C8 + movd xmm7, dword [ecx+2AB8H] ; 0540 _ 66: 0F 6E. B9, 00002AB8 + pshufd xmm0, xmm7, 0 ; 0548 _ 66: 0F 70. C7, 00 + pxor xmm6, xmm0 ; 054D _ 66: 0F EF. F0 + movd xmm7, dword [ecx+2ABCH] ; 0551 _ 66: 0F 6E. B9, 00002ABC + pshufd xmm0, xmm7, 0 ; 0559 _ 66: 0F 70. C7, 00 + pxor xmm5, xmm0 ; 055E _ 66: 0F EF. E8 + movdqa xmm0, xmm2 ; 0562 _ 66: 0F 6F. C2 + por xmm0, xmm5 ; 0566 _ 66: 0F EB. C5 + pxor xmm5, xmm1 ; 056A _ 66: 0F EF. E9 + pand xmm1, xmm2 ; 056E _ 66: 0F DB. CA + pxor xmm2, xmm6 ; 0572 _ 66: 0F EF. D6 + pxor xmm6, xmm5 ; 0576 _ 66: 0F EF. F5 + pand xmm5, xmm0 ; 057A _ 66: 0F DB. E8 + por xmm2, xmm1 ; 057E _ 66: 0F EB. D1 + pxor xmm5, xmm2 ; 0582 _ 66: 0F EF. EA + pxor xmm0, xmm1 ; 0586 _ 66: 0F EF. C1 + pand xmm2, xmm0 ; 058A _ 66: 0F DB. D0 + pxor xmm1, xmm5 ; 058E _ 66: 0F EF. CD + pxor xmm2, xmm6 ; 0592 _ 66: 0F EF. D6 + por xmm1, xmm0 ; 0596 _ 66: 0F EB. C8 + pxor xmm1, xmm6 ; 059A _ 66: 0F EF. CE + movdqa xmm6, xmm1 ; 059E _ 66: 0F 6F. F1 + pxor xmm0, xmm5 ; 05A2 _ 66: 0F EF. C5 + por xmm6, xmm5 ; 05A6 _ 66: 0F EB. F5 + pxor xmm0, xmm6 ; 05AA _ 66: 0F EF. C6 + movdqa xmm7, xmm0 ; 05AE _ 66: 0F 6F. F8 + psrld xmm0, 19 ; 05B2 _ 66: 0F 72. D0, 13 + pslld xmm7, 13 ; 05B7 _ 66: 0F 72. F7, 0D + por xmm7, xmm0 ; 05BC _ 66: 0F EB. F8 + movdqa xmm0, xmm5 ; 05C0 _ 66: 0F 6F. C5 + psrld xmm5, 29 ; 05C4 _ 66: 0F 72. D5, 1D + pxor xmm1, xmm7 ; 05C9 _ 66: 0F EF. CF + pslld xmm0, 3 ; 05CD _ 66: 0F 72. F0, 03 + por xmm0, xmm5 ; 05D2 _ 66: 0F EB. C5 + movdqa xmm5, xmm7 ; 05D6 _ 66: 0F 6F. EF + pxor xmm1, xmm0 ; 05DA _ 66: 0F EF. C8 + movdqa xmm6, xmm1 ; 05DE _ 66: 0F 6F. F1 + pxor xmm2, xmm0 ; 05E2 _ 66: 0F EF. D0 + pslld xmm5, 3 ; 05E6 _ 66: 0F 72. F5, 03 + pxor xmm2, xmm5 ; 05EB _ 66: 0F EF. D5 + movdqa xmm5, xmm2 ; 05EF _ 66: 0F 6F. EA + pslld xmm6, 1 ; 05F3 _ 66: 0F 72. F6, 01 + psrld xmm1, 31 ; 05F8 _ 66: 0F 72. D1, 1F + por xmm6, xmm1 ; 05FD _ 66: 0F EB. F1 + movdqa xmm1, xmm6 ; 0601 _ 66: 0F 6F. CE + pslld xmm5, 7 ; 0605 _ 66: 0F 72. F5, 07 + psrld xmm2, 25 ; 060A _ 66: 0F 72. D2, 19 + por xmm5, xmm2 ; 060F _ 66: 0F EB. EA + pxor xmm7, xmm6 ; 0613 _ 66: 0F EF. FE + pxor xmm7, xmm5 ; 0617 _ 66: 0F EF. FD + pxor xmm0, xmm5 ; 061B _ 66: 0F EF. C5 + pslld xmm1, 7 ; 061F _ 66: 0F 72. F1, 07 + pxor xmm0, xmm1 ; 0624 _ 66: 0F EF. C1 + movdqa xmm1, xmm7 ; 0628 _ 66: 0F 6F. CF + movdqa xmm2, xmm0 ; 062C _ 66: 0F 6F. D0 + psrld xmm7, 27 ; 0630 _ 66: 0F 72. D7, 1B + pslld xmm1, 5 ; 0635 _ 66: 0F 72. F1, 05 + por xmm1, xmm7 ; 063A _ 66: 0F EB. CF + pslld xmm2, 22 ; 063E _ 66: 0F 72. F2, 16 + movd xmm7, dword [ecx+2AC0H] ; 0643 _ 66: 0F 6E. B9, 00002AC0 + psrld xmm0, 10 ; 064B _ 66: 0F 72. D0, 0A + por xmm2, xmm0 ; 0650 _ 66: 0F EB. D0 + pshufd xmm0, xmm7, 0 ; 0654 _ 66: 0F 70. C7, 00 + pxor xmm1, xmm0 ; 0659 _ 66: 0F EF. C8 + movd xmm7, dword [ecx+2AC4H] ; 065D _ 66: 0F 6E. B9, 00002AC4 + pshufd xmm0, xmm7, 0 ; 0665 _ 66: 0F 70. C7, 00 + pxor xmm6, xmm0 ; 066A _ 66: 0F EF. F0 + movd xmm7, dword [ecx+2AC8H] ; 066E _ 66: 0F 6E. B9, 00002AC8 + pshufd xmm0, xmm7, 0 ; 0676 _ 66: 0F 70. C7, 00 + pxor xmm2, xmm0 ; 067B _ 66: 0F EF. D0 + movd xmm7, dword [ecx+2ACCH] ; 067F _ 66: 0F 6E. B9, 00002ACC + pshufd xmm0, xmm7, 0 ; 0687 _ 66: 0F 70. C7, 00 + pxor xmm5, xmm0 ; 068C _ 66: 0F EF. E8 + pxor xmm6, xmm5 ; 0690 _ 66: 0F EF. F5 + movdqa xmm0, xmm6 ; 0694 _ 66: 0F 6F. C6 + pxor xmm5, xmm4 ; 0698 _ 66: 0F EF. EC + pxor xmm2, xmm5 ; 069C _ 66: 0F EF. D5 + pxor xmm5, xmm1 ; 06A0 _ 66: 0F EF. E9 + pand xmm0, xmm5 ; 06A4 _ 66: 0F DB. C5 + pxor xmm0, xmm2 ; 06A8 _ 66: 0F EF. C2 + movdqa xmm7, xmm0 ; 06AC _ 66: 0F 6F. F8 + pxor xmm6, xmm5 ; 06B0 _ 66: 0F EF. F5 + pxor xmm1, xmm6 ; 06B4 _ 66: 0F EF. CE + pand xmm2, xmm6 ; 06B8 _ 66: 0F DB. D6 + pxor xmm2, xmm1 ; 06BC _ 66: 0F EF. D1 + pand xmm1, xmm0 ; 06C0 _ 66: 0F DB. C8 + pxor xmm5, xmm1 ; 06C4 _ 66: 0F EF. E9 + por xmm6, xmm0 ; 06C8 _ 66: 0F EB. F0 + pxor xmm6, xmm1 ; 06CC _ 66: 0F EF. F1 + por xmm1, xmm5 ; 06D0 _ 66: 0F EB. CD + pxor xmm1, xmm2 ; 06D4 _ 66: 0F EF. CA + pand xmm2, xmm5 ; 06D8 _ 66: 0F DB. D5 + pxor xmm1, xmm4 ; 06DC _ 66: 0F EF. CC + pxor xmm6, xmm2 ; 06E0 _ 66: 0F EF. F2 + movdqa xmm2, xmm1 ; 06E4 _ 66: 0F 6F. D1 + pslld xmm7, 13 ; 06E8 _ 66: 0F 72. F7, 0D + psrld xmm0, 19 ; 06ED _ 66: 0F 72. D0, 13 + por xmm7, xmm0 ; 06F2 _ 66: 0F EB. F8 + pslld xmm2, 3 ; 06F6 _ 66: 0F 72. F2, 03 + psrld xmm1, 29 ; 06FB _ 66: 0F 72. D1, 1D + por xmm2, xmm1 ; 0700 _ 66: 0F EB. D1 + movdqa xmm1, xmm7 ; 0704 _ 66: 0F 6F. CF + pxor xmm6, xmm7 ; 0708 _ 66: 0F EF. F7 + pxor xmm6, xmm2 ; 070C _ 66: 0F EF. F2 + pxor xmm5, xmm2 ; 0710 _ 66: 0F EF. EA + pslld xmm1, 3 ; 0714 _ 66: 0F 72. F1, 03 + pxor xmm5, xmm1 ; 0719 _ 66: 0F EF. E9 + movdqa xmm1, xmm6 ; 071D _ 66: 0F 6F. CE + psrld xmm6, 31 ; 0721 _ 66: 0F 72. D6, 1F + pslld xmm1, 1 ; 0726 _ 66: 0F 72. F1, 01 + por xmm1, xmm6 ; 072B _ 66: 0F EB. CE + movdqa xmm6, xmm5 ; 072F _ 66: 0F 6F. F5 + psrld xmm5, 25 ; 0733 _ 66: 0F 72. D5, 19 + pxor xmm7, xmm1 ; 0738 _ 66: 0F EF. F9 + pslld xmm6, 7 ; 073C _ 66: 0F 72. F6, 07 + por xmm6, xmm5 ; 0741 _ 66: 0F EB. F5 + movdqa xmm5, xmm1 ; 0745 _ 66: 0F 6F. E9 + pxor xmm7, xmm6 ; 0749 _ 66: 0F EF. FE + movdqa xmm0, xmm7 ; 074D _ 66: 0F 6F. C7 + pxor xmm2, xmm6 ; 0751 _ 66: 0F EF. D6 + pslld xmm5, 7 ; 0755 _ 66: 0F 72. F5, 07 + pxor xmm2, xmm5 ; 075A _ 66: 0F EF. D5 + pslld xmm0, 5 ; 075E _ 66: 0F 72. F0, 05 + psrld xmm7, 27 ; 0763 _ 66: 0F 72. D7, 1B + por xmm0, xmm7 ; 0768 _ 66: 0F EB. C7 + movdqa xmm7, xmm2 ; 076C _ 66: 0F 6F. FA + psrld xmm2, 10 ; 0770 _ 66: 0F 72. D2, 0A + pslld xmm7, 22 ; 0775 _ 66: 0F 72. F7, 16 + por xmm7, xmm2 ; 077A _ 66: 0F EB. FA + movd xmm2, dword [ecx+2AD0H] ; 077E _ 66: 0F 6E. 91, 00002AD0 + pshufd xmm5, xmm2, 0 ; 0786 _ 66: 0F 70. EA, 00 + pxor xmm0, xmm5 ; 078B _ 66: 0F EF. C5 + movd xmm2, dword [ecx+2AD4H] ; 078F _ 66: 0F 6E. 91, 00002AD4 + pshufd xmm5, xmm2, 0 ; 0797 _ 66: 0F 70. EA, 00 + pxor xmm1, xmm5 ; 079C _ 66: 0F EF. CD + pxor xmm0, xmm1 ; 07A0 _ 66: 0F EF. C1 + movd xmm2, dword [ecx+2AD8H] ; 07A4 _ 66: 0F 6E. 91, 00002AD8 + pshufd xmm5, xmm2, 0 ; 07AC _ 66: 0F 70. EA, 00 + movd xmm2, dword [ecx+2ADCH] ; 07B1 _ 66: 0F 6E. 91, 00002ADC + pxor xmm7, xmm5 ; 07B9 _ 66: 0F EF. FD + pshufd xmm5, xmm2, 0 ; 07BD _ 66: 0F 70. EA, 00 + pxor xmm6, xmm5 ; 07C2 _ 66: 0F EF. F5 + pxor xmm1, xmm6 ; 07C6 _ 66: 0F EF. CE + movdqa xmm2, xmm1 ; 07CA _ 66: 0F 6F. D1 + pxor xmm6, xmm4 ; 07CE _ 66: 0F EF. F4 + pxor xmm7, xmm6 ; 07D2 _ 66: 0F EF. FE + pand xmm2, xmm0 ; 07D6 _ 66: 0F DB. D0 + pxor xmm2, xmm7 ; 07DA _ 66: 0F EF. D7 + movdqa xmm5, xmm2 ; 07DE _ 66: 0F 6F. EA + por xmm7, xmm1 ; 07E2 _ 66: 0F EB. F9 + pxor xmm1, xmm6 ; 07E6 _ 66: 0F EF. CE + pand xmm6, xmm2 ; 07EA _ 66: 0F DB. F2 + pxor xmm6, xmm0 ; 07EE _ 66: 0F EF. F0 + pxor xmm1, xmm2 ; 07F2 _ 66: 0F EF. CA + pxor xmm1, xmm7 ; 07F6 _ 66: 0F EF. CF + pxor xmm7, xmm0 ; 07FA _ 66: 0F EF. F8 + pand xmm0, xmm6 ; 07FE _ 66: 0F DB. C6 + pxor xmm7, xmm4 ; 0802 _ 66: 0F EF. FC + pxor xmm0, xmm1 ; 0806 _ 66: 0F EF. C1 + por xmm1, xmm6 ; 080A _ 66: 0F EB. CE + pxor xmm1, xmm7 ; 080E _ 66: 0F EF. CF + pslld xmm5, 13 ; 0812 _ 66: 0F 72. F5, 0D + psrld xmm2, 19 ; 0817 _ 66: 0F 72. D2, 13 + por xmm5, xmm2 ; 081C _ 66: 0F EB. EA + movdqa xmm2, xmm0 ; 0820 _ 66: 0F 6F. D0 + movdqa xmm7, xmm5 ; 0824 _ 66: 0F 6F. FD + psrld xmm0, 29 ; 0828 _ 66: 0F 72. D0, 1D + pslld xmm2, 3 ; 082D _ 66: 0F 72. F2, 03 + por xmm2, xmm0 ; 0832 _ 66: 0F EB. D0 + pxor xmm6, xmm5 ; 0836 _ 66: 0F EF. F5 + pxor xmm6, xmm2 ; 083A _ 66: 0F EF. F2 + movdqa xmm0, xmm6 ; 083E _ 66: 0F 6F. C6 + pxor xmm1, xmm2 ; 0842 _ 66: 0F EF. CA + pslld xmm7, 3 ; 0846 _ 66: 0F 72. F7, 03 + pxor xmm1, xmm7 ; 084B _ 66: 0F EF. CF + pslld xmm0, 1 ; 084F _ 66: 0F 72. F0, 01 + psrld xmm6, 31 ; 0854 _ 66: 0F 72. D6, 1F + por xmm0, xmm6 ; 0859 _ 66: 0F EB. C6 + movdqa xmm6, xmm1 ; 085D _ 66: 0F 6F. F1 + psrld xmm1, 25 ; 0861 _ 66: 0F 72. D1, 19 + pxor xmm5, xmm0 ; 0866 _ 66: 0F EF. E8 + pslld xmm6, 7 ; 086A _ 66: 0F 72. F6, 07 + por xmm6, xmm1 ; 086F _ 66: 0F EB. F1 + movdqa xmm7, xmm0 ; 0873 _ 66: 0F 6F. F8 + pxor xmm5, xmm6 ; 0877 _ 66: 0F EF. EE + pxor xmm2, xmm6 ; 087B _ 66: 0F EF. D6 + pslld xmm7, 7 ; 087F _ 66: 0F 72. F7, 07 + pxor xmm2, xmm7 ; 0884 _ 66: 0F EF. D7 + movdqa xmm7, xmm5 ; 0888 _ 66: 0F 6F. FD + movdqa xmm1, xmm2 ; 088C _ 66: 0F 6F. CA + psrld xmm5, 27 ; 0890 _ 66: 0F 72. D5, 1B + pslld xmm7, 5 ; 0895 _ 66: 0F 72. F7, 05 + por xmm7, xmm5 ; 089A _ 66: 0F EB. FD + pslld xmm1, 22 ; 089E _ 66: 0F 72. F1, 16 + movd xmm5, dword [ecx+2AE0H] ; 08A3 _ 66: 0F 6E. A9, 00002AE0 + psrld xmm2, 10 ; 08AB _ 66: 0F 72. D2, 0A + por xmm1, xmm2 ; 08B0 _ 66: 0F EB. CA + pshufd xmm2, xmm5, 0 ; 08B4 _ 66: 0F 70. D5, 00 + pxor xmm7, xmm2 ; 08B9 _ 66: 0F EF. FA + movd xmm5, dword [ecx+2AE4H] ; 08BD _ 66: 0F 6E. A9, 00002AE4 + pshufd xmm2, xmm5, 0 ; 08C5 _ 66: 0F 70. D5, 00 + pxor xmm0, xmm2 ; 08CA _ 66: 0F EF. C2 + movd xmm5, dword [ecx+2AE8H] ; 08CE _ 66: 0F 6E. A9, 00002AE8 + pshufd xmm2, xmm5, 0 ; 08D6 _ 66: 0F 70. D5, 00 + pxor xmm1, xmm2 ; 08DB _ 66: 0F EF. CA + pxor xmm1, xmm4 ; 08DF _ 66: 0F EF. CC + movd xmm5, dword [ecx+2AECH] ; 08E3 _ 66: 0F 6E. A9, 00002AEC + pshufd xmm2, xmm5, 0 ; 08EB _ 66: 0F 70. D5, 00 + pxor xmm6, xmm2 ; 08F0 _ 66: 0F EF. F2 + movdqa xmm2, xmm6 ; 08F4 _ 66: 0F 6F. D6 + pand xmm2, xmm7 ; 08F8 _ 66: 0F DB. D7 + pxor xmm7, xmm6 ; 08FC _ 66: 0F EF. FE + pxor xmm2, xmm1 ; 0900 _ 66: 0F EF. D1 + por xmm1, xmm6 ; 0904 _ 66: 0F EB. CE + pxor xmm0, xmm2 ; 0908 _ 66: 0F EF. C2 + pxor xmm1, xmm7 ; 090C _ 66: 0F EF. CF + por xmm7, xmm0 ; 0910 _ 66: 0F EB. F8 + pxor xmm1, xmm0 ; 0914 _ 66: 0F EF. C8 + pxor xmm6, xmm7 ; 0918 _ 66: 0F EF. F7 + por xmm7, xmm2 ; 091C _ 66: 0F EB. FA + pxor xmm7, xmm1 ; 0920 _ 66: 0F EF. F9 + movdqa xmm5, xmm7 ; 0924 _ 66: 0F 6F. EF + pxor xmm6, xmm2 ; 0928 _ 66: 0F EF. F2 + pxor xmm6, xmm7 ; 092C _ 66: 0F EF. F7 + pxor xmm2, xmm4 ; 0930 _ 66: 0F EF. D4 + pand xmm1, xmm6 ; 0934 _ 66: 0F DB. CE + pxor xmm2, xmm1 ; 0938 _ 66: 0F EF. D1 + movdqa xmm1, xmm6 ; 093C _ 66: 0F 6F. CE + pslld xmm5, 13 ; 0940 _ 66: 0F 72. F5, 0D + psrld xmm7, 19 ; 0945 _ 66: 0F 72. D7, 13 + por xmm5, xmm7 ; 094A _ 66: 0F EB. EF + pslld xmm1, 3 ; 094E _ 66: 0F 72. F1, 03 + psrld xmm6, 29 ; 0953 _ 66: 0F 72. D6, 1D + por xmm1, xmm6 ; 0958 _ 66: 0F EB. CE + movdqa xmm6, xmm5 ; 095C _ 66: 0F 6F. F5 + pxor xmm0, xmm5 ; 0960 _ 66: 0F EF. C5 + pxor xmm0, xmm1 ; 0964 _ 66: 0F EF. C1 + pxor xmm2, xmm1 ; 0968 _ 66: 0F EF. D1 + pslld xmm6, 3 ; 096C _ 66: 0F 72. F6, 03 + pxor xmm2, xmm6 ; 0971 _ 66: 0F EF. D6 + movdqa xmm6, xmm0 ; 0975 _ 66: 0F 6F. F0 + movdqa xmm7, xmm2 ; 0979 _ 66: 0F 6F. FA + psrld xmm0, 31 ; 097D _ 66: 0F 72. D0, 1F + pslld xmm6, 1 ; 0982 _ 66: 0F 72. F6, 01 + por xmm6, xmm0 ; 0987 _ 66: 0F EB. F0 + movdqa xmm0, xmm6 ; 098B _ 66: 0F 6F. C6 + pslld xmm7, 7 ; 098F _ 66: 0F 72. F7, 07 + psrld xmm2, 25 ; 0994 _ 66: 0F 72. D2, 19 + por xmm7, xmm2 ; 0999 _ 66: 0F EB. FA + pxor xmm5, xmm6 ; 099D _ 66: 0F EF. EE + pxor xmm5, xmm7 ; 09A1 _ 66: 0F EF. EF + pxor xmm1, xmm7 ; 09A5 _ 66: 0F EF. CF + movd xmm2, dword [ecx+2AF4H] ; 09A9 _ 66: 0F 6E. 91, 00002AF4 + pslld xmm0, 7 ; 09B1 _ 66: 0F 72. F0, 07 + pxor xmm1, xmm0 ; 09B6 _ 66: 0F EF. C8 + movdqa xmm0, xmm5 ; 09BA _ 66: 0F 6F. C5 + psrld xmm5, 27 ; 09BE _ 66: 0F 72. D5, 1B + pslld xmm0, 5 ; 09C3 _ 66: 0F 72. F0, 05 + por xmm0, xmm5 ; 09C8 _ 66: 0F EB. C5 + movdqa xmm5, xmm1 ; 09CC _ 66: 0F 6F. E9 + psrld xmm1, 10 ; 09D0 _ 66: 0F 72. D1, 0A + pslld xmm5, 22 ; 09D5 _ 66: 0F 72. F5, 16 + por xmm5, xmm1 ; 09DA _ 66: 0F EB. E9 + movd xmm1, dword [ecx+2AF0H] ; 09DE _ 66: 0F 6E. 89, 00002AF0 + pshufd xmm1, xmm1, 0 ; 09E6 _ 66: 0F 70. C9, 00 + pxor xmm0, xmm1 ; 09EB _ 66: 0F EF. C1 + pshufd xmm1, xmm2, 0 ; 09EF _ 66: 0F 70. CA, 00 + pxor xmm6, xmm1 ; 09F4 _ 66: 0F EF. F1 + movd xmm2, dword [ecx+2AF8H] ; 09F8 _ 66: 0F 6E. 91, 00002AF8 + pshufd xmm1, xmm2, 0 ; 0A00 _ 66: 0F 70. CA, 00 + pxor xmm5, xmm1 ; 0A05 _ 66: 0F EF. E9 + movd xmm2, dword [ecx+2AFCH] ; 0A09 _ 66: 0F 6E. 91, 00002AFC + pshufd xmm1, xmm2, 0 ; 0A11 _ 66: 0F 70. CA, 00 + pxor xmm7, xmm1 ; 0A16 _ 66: 0F EF. F9 + movdqa xmm1, xmm6 ; 0A1A _ 66: 0F 6F. CE + pxor xmm6, xmm5 ; 0A1E _ 66: 0F EF. F5 + por xmm1, xmm5 ; 0A22 _ 66: 0F EB. CD + pxor xmm1, xmm7 ; 0A26 _ 66: 0F EF. CF + pxor xmm5, xmm1 ; 0A2A _ 66: 0F EF. E9 + por xmm7, xmm6 ; 0A2E _ 66: 0F EB. FE + pand xmm7, xmm0 ; 0A32 _ 66: 0F DB. F8 + pxor xmm6, xmm5 ; 0A36 _ 66: 0F EF. F5 + pxor xmm7, xmm1 ; 0A3A _ 66: 0F EF. F9 + por xmm1, xmm6 ; 0A3E _ 66: 0F EB. CE + pxor xmm1, xmm0 ; 0A42 _ 66: 0F EF. C8 + por xmm0, xmm6 ; 0A46 _ 66: 0F EB. C6 + pxor xmm0, xmm5 ; 0A4A _ 66: 0F EF. C5 + pxor xmm1, xmm6 ; 0A4E _ 66: 0F EF. CE + pxor xmm5, xmm1 ; 0A52 _ 66: 0F EF. E9 + pand xmm1, xmm0 ; 0A56 _ 66: 0F DB. C8 + pxor xmm1, xmm6 ; 0A5A _ 66: 0F EF. CE + pxor xmm5, xmm4 ; 0A5E _ 66: 0F EF. EC + por xmm5, xmm0 ; 0A62 _ 66: 0F EB. E8 + pxor xmm6, xmm5 ; 0A66 _ 66: 0F EF. F5 + movdqa xmm2, xmm6 ; 0A6A _ 66: 0F 6F. D6 + psrld xmm6, 19 ; 0A6E _ 66: 0F 72. D6, 13 + pslld xmm2, 13 ; 0A73 _ 66: 0F 72. F2, 0D + por xmm2, xmm6 ; 0A78 _ 66: 0F EB. D6 + movdqa xmm6, xmm1 ; 0A7C _ 66: 0F 6F. F1 + movdqa xmm5, xmm2 ; 0A80 _ 66: 0F 6F. EA + psrld xmm1, 29 ; 0A84 _ 66: 0F 72. D1, 1D + pslld xmm6, 3 ; 0A89 _ 66: 0F 72. F6, 03 + por xmm6, xmm1 ; 0A8E _ 66: 0F EB. F1 + pxor xmm7, xmm2 ; 0A92 _ 66: 0F EF. FA + pxor xmm7, xmm6 ; 0A96 _ 66: 0F EF. FE + movdqa xmm1, xmm7 ; 0A9A _ 66: 0F 6F. CF + pxor xmm0, xmm6 ; 0A9E _ 66: 0F EF. C6 + pslld xmm5, 3 ; 0AA2 _ 66: 0F 72. F5, 03 + pxor xmm0, xmm5 ; 0AA7 _ 66: 0F EF. C5 + movdqa xmm5, xmm0 ; 0AAB _ 66: 0F 6F. E8 + pslld xmm1, 1 ; 0AAF _ 66: 0F 72. F1, 01 + psrld xmm7, 31 ; 0AB4 _ 66: 0F 72. D7, 1F + por xmm1, xmm7 ; 0AB9 _ 66: 0F EB. CF + movdqa xmm7, xmm1 ; 0ABD _ 66: 0F 6F. F9 + pslld xmm5, 7 ; 0AC1 _ 66: 0F 72. F5, 07 + psrld xmm0, 25 ; 0AC6 _ 66: 0F 72. D0, 19 + por xmm5, xmm0 ; 0ACB _ 66: 0F EB. E8 + pxor xmm2, xmm1 ; 0ACF _ 66: 0F EF. D1 + pxor xmm2, xmm5 ; 0AD3 _ 66: 0F EF. D5 + pxor xmm6, xmm5 ; 0AD7 _ 66: 0F EF. F5 + pslld xmm7, 7 ; 0ADB _ 66: 0F 72. F7, 07 + pxor xmm6, xmm7 ; 0AE0 _ 66: 0F EF. F7 + movdqa xmm7, xmm2 ; 0AE4 _ 66: 0F 6F. FA + movdqa xmm0, xmm6 ; 0AE8 _ 66: 0F 6F. C6 + psrld xmm2, 27 ; 0AEC _ 66: 0F 72. D2, 1B + pslld xmm7, 5 ; 0AF1 _ 66: 0F 72. F7, 05 + por xmm7, xmm2 ; 0AF6 _ 66: 0F EB. FA + pslld xmm0, 22 ; 0AFA _ 66: 0F 72. F0, 16 + psrld xmm6, 10 ; 0AFF _ 66: 0F 72. D6, 0A + por xmm0, xmm6 ; 0B04 _ 66: 0F EB. C6 + movd xmm6, dword [ecx+2B00H] ; 0B08 _ 66: 0F 6E. B1, 00002B00 + pshufd xmm2, xmm6, 0 ; 0B10 _ 66: 0F 70. D6, 00 + pxor xmm7, xmm2 ; 0B15 _ 66: 0F EF. FA + movd xmm6, dword [ecx+2B04H] ; 0B19 _ 66: 0F 6E. B1, 00002B04 + pshufd xmm2, xmm6, 0 ; 0B21 _ 66: 0F 70. D6, 00 + pxor xmm1, xmm2 ; 0B26 _ 66: 0F EF. CA + movd xmm6, dword [ecx+2B08H] ; 0B2A _ 66: 0F 6E. B1, 00002B08 + pshufd xmm2, xmm6, 0 ; 0B32 _ 66: 0F 70. D6, 00 + pxor xmm0, xmm2 ; 0B37 _ 66: 0F EF. C2 + movd xmm6, dword [ecx+2B0CH] ; 0B3B _ 66: 0F 6E. B1, 00002B0C + pshufd xmm2, xmm6, 0 ; 0B43 _ 66: 0F 70. D6, 00 + pxor xmm5, xmm2 ; 0B48 _ 66: 0F EF. EA + movdqa xmm2, xmm1 ; 0B4C _ 66: 0F 6F. D1 + pxor xmm5, xmm7 ; 0B50 _ 66: 0F EF. EF + pxor xmm1, xmm0 ; 0B54 _ 66: 0F EF. C8 + pand xmm2, xmm5 ; 0B58 _ 66: 0F DB. D5 + pxor xmm2, xmm7 ; 0B5C _ 66: 0F EF. D7 + por xmm7, xmm5 ; 0B60 _ 66: 0F EB. FD + pxor xmm7, xmm1 ; 0B64 _ 66: 0F EF. F9 + pxor xmm1, xmm5 ; 0B68 _ 66: 0F EF. CD + pxor xmm5, xmm0 ; 0B6C _ 66: 0F EF. E8 + por xmm0, xmm2 ; 0B70 _ 66: 0F EB. C2 + pxor xmm0, xmm1 ; 0B74 _ 66: 0F EF. C1 + pxor xmm1, xmm4 ; 0B78 _ 66: 0F EF. CC + por xmm1, xmm2 ; 0B7C _ 66: 0F EB. CA + pxor xmm2, xmm5 ; 0B80 _ 66: 0F EF. D5 + pxor xmm2, xmm1 ; 0B84 _ 66: 0F EF. D1 + por xmm5, xmm7 ; 0B88 _ 66: 0F EB. EF + pxor xmm2, xmm5 ; 0B8C _ 66: 0F EF. D5 + movdqa xmm6, xmm2 ; 0B90 _ 66: 0F 6F. F2 + pxor xmm1, xmm5 ; 0B94 _ 66: 0F EF. CD + psrld xmm2, 19 ; 0B98 _ 66: 0F 72. D2, 13 + pslld xmm6, 13 ; 0B9D _ 66: 0F 72. F6, 0D + por xmm6, xmm2 ; 0BA2 _ 66: 0F EB. F2 + movdqa xmm2, xmm0 ; 0BA6 _ 66: 0F 6F. D0 + psrld xmm0, 29 ; 0BAA _ 66: 0F 72. D0, 1D + pxor xmm1, xmm6 ; 0BAF _ 66: 0F EF. CE + pslld xmm2, 3 ; 0BB3 _ 66: 0F 72. F2, 03 + por xmm2, xmm0 ; 0BB8 _ 66: 0F EB. D0 + movdqa xmm0, xmm6 ; 0BBC _ 66: 0F 6F. C6 + pxor xmm1, xmm2 ; 0BC0 _ 66: 0F EF. CA + pxor xmm7, xmm2 ; 0BC4 _ 66: 0F EF. FA + pslld xmm0, 3 ; 0BC8 _ 66: 0F 72. F0, 03 + pxor xmm7, xmm0 ; 0BCD _ 66: 0F EF. F8 + movdqa xmm0, xmm1 ; 0BD1 _ 66: 0F 6F. C1 + movdqa xmm5, xmm7 ; 0BD5 _ 66: 0F 6F. EF + psrld xmm1, 31 ; 0BD9 _ 66: 0F 72. D1, 1F + pslld xmm0, 1 ; 0BDE _ 66: 0F 72. F0, 01 + por xmm0, xmm1 ; 0BE3 _ 66: 0F EB. C1 + movdqa xmm1, xmm0 ; 0BE7 _ 66: 0F 6F. C8 + pslld xmm5, 7 ; 0BEB _ 66: 0F 72. F5, 07 + psrld xmm7, 25 ; 0BF0 _ 66: 0F 72. D7, 19 + por xmm5, xmm7 ; 0BF5 _ 66: 0F EB. EF + pxor xmm6, xmm0 ; 0BF9 _ 66: 0F EF. F0 + pxor xmm6, xmm5 ; 0BFD _ 66: 0F EF. F5 + pxor xmm2, xmm5 ; 0C01 _ 66: 0F EF. D5 + pslld xmm1, 7 ; 0C05 _ 66: 0F 72. F1, 07 + pxor xmm2, xmm1 ; 0C0A _ 66: 0F EF. D1 + movdqa xmm1, xmm6 ; 0C0E _ 66: 0F 6F. CE + movdqa xmm7, xmm2 ; 0C12 _ 66: 0F 6F. FA + psrld xmm6, 27 ; 0C16 _ 66: 0F 72. D6, 1B + pslld xmm1, 5 ; 0C1B _ 66: 0F 72. F1, 05 + por xmm1, xmm6 ; 0C20 _ 66: 0F EB. CE + pslld xmm7, 22 ; 0C24 _ 66: 0F 72. F7, 16 + psrld xmm2, 10 ; 0C29 _ 66: 0F 72. D2, 0A + por xmm7, xmm2 ; 0C2E _ 66: 0F EB. FA + movd xmm6, dword [ecx+2B10H] ; 0C32 _ 66: 0F 6E. B1, 00002B10 + pshufd xmm2, xmm6, 0 ; 0C3A _ 66: 0F 70. D6, 00 + pxor xmm1, xmm2 ; 0C3F _ 66: 0F EF. CA + movd xmm6, dword [ecx+2B14H] ; 0C43 _ 66: 0F 6E. B1, 00002B14 + pshufd xmm2, xmm6, 0 ; 0C4B _ 66: 0F 70. D6, 00 + pxor xmm0, xmm2 ; 0C50 _ 66: 0F EF. C2 + pxor xmm1, xmm4 ; 0C54 _ 66: 0F EF. CC + movd xmm6, dword [ecx+2B18H] ; 0C58 _ 66: 0F 6E. B1, 00002B18 + pshufd xmm2, xmm6, 0 ; 0C60 _ 66: 0F 70. D6, 00 + pxor xmm7, xmm2 ; 0C65 _ 66: 0F EF. FA + pxor xmm7, xmm4 ; 0C69 _ 66: 0F EF. FC + movd xmm6, dword [ecx+2B1CH] ; 0C6D _ 66: 0F 6E. B1, 00002B1C + pshufd xmm2, xmm6, 0 ; 0C75 _ 66: 0F 70. D6, 00 + movdqa xmm6, xmm1 ; 0C7A _ 66: 0F 6F. F1 + pxor xmm5, xmm2 ; 0C7E _ 66: 0F EF. EA + pand xmm6, xmm0 ; 0C82 _ 66: 0F DB. F0 + pxor xmm7, xmm6 ; 0C86 _ 66: 0F EF. FE + por xmm6, xmm5 ; 0C8A _ 66: 0F EB. F5 + pxor xmm5, xmm7 ; 0C8E _ 66: 0F EF. EF + pxor xmm0, xmm6 ; 0C92 _ 66: 0F EF. C6 + pxor xmm6, xmm1 ; 0C96 _ 66: 0F EF. F1 + por xmm1, xmm0 ; 0C9A _ 66: 0F EB. C8 + pxor xmm0, xmm5 ; 0C9E _ 66: 0F EF. C5 + por xmm7, xmm6 ; 0CA2 _ 66: 0F EB. FE + pand xmm7, xmm1 ; 0CA6 _ 66: 0F DB. F9 + pxor xmm6, xmm0 ; 0CAA _ 66: 0F EF. F0 + pand xmm0, xmm7 ; 0CAE _ 66: 0F DB. C7 + pxor xmm0, xmm6 ; 0CB2 _ 66: 0F EF. C6 + pand xmm6, xmm7 ; 0CB6 _ 66: 0F DB. F7 + pxor xmm1, xmm6 ; 0CBA _ 66: 0F EF. CE + movdqa xmm6, xmm7 ; 0CBE _ 66: 0F 6F. F7 + psrld xmm7, 19 ; 0CC2 _ 66: 0F 72. D7, 13 + pslld xmm6, 13 ; 0CC7 _ 66: 0F 72. F6, 0D + por xmm6, xmm7 ; 0CCC _ 66: 0F EB. F7 + movdqa xmm7, xmm5 ; 0CD0 _ 66: 0F 6F. FD + psrld xmm5, 29 ; 0CD4 _ 66: 0F 72. D5, 1D + pxor xmm1, xmm6 ; 0CD9 _ 66: 0F EF. CE + pslld xmm7, 3 ; 0CDD _ 66: 0F 72. F7, 03 + por xmm7, xmm5 ; 0CE2 _ 66: 0F EB. FD + movdqa xmm5, xmm6 ; 0CE6 _ 66: 0F 6F. EE + pxor xmm1, xmm7 ; 0CEA _ 66: 0F EF. CF + pxor xmm0, xmm7 ; 0CEE _ 66: 0F EF. C7 + pslld xmm5, 3 ; 0CF2 _ 66: 0F 72. F5, 03 + pxor xmm0, xmm5 ; 0CF7 _ 66: 0F EF. C5 + movdqa xmm5, xmm1 ; 0CFB _ 66: 0F 6F. E9 + movdqa xmm2, xmm0 ; 0CFF _ 66: 0F 6F. D0 + psrld xmm1, 31 ; 0D03 _ 66: 0F 72. D1, 1F + pslld xmm5, 1 ; 0D08 _ 66: 0F 72. F5, 01 + por xmm5, xmm1 ; 0D0D _ 66: 0F EB. E9 + pslld xmm2, 7 ; 0D11 _ 66: 0F 72. F2, 07 + psrld xmm0, 25 ; 0D16 _ 66: 0F 72. D0, 19 + por xmm2, xmm0 ; 0D1B _ 66: 0F EB. D0 + movdqa xmm0, xmm5 ; 0D1F _ 66: 0F 6F. C5 + pxor xmm6, xmm5 ; 0D23 _ 66: 0F EF. F5 + pxor xmm6, xmm2 ; 0D27 _ 66: 0F EF. F2 + movdqa xmm1, xmm6 ; 0D2B _ 66: 0F 6F. CE + pxor xmm7, xmm2 ; 0D2F _ 66: 0F EF. FA + pslld xmm0, 7 ; 0D33 _ 66: 0F 72. F0, 07 + pxor xmm7, xmm0 ; 0D38 _ 66: 0F EF. F8 + movdqa xmm0, xmm7 ; 0D3C _ 66: 0F 6F. C7 + pslld xmm1, 5 ; 0D40 _ 66: 0F 72. F1, 05 + psrld xmm6, 27 ; 0D45 _ 66: 0F 72. D6, 1B + por xmm1, xmm6 ; 0D4A _ 66: 0F EB. CE + pslld xmm0, 22 ; 0D4E _ 66: 0F 72. F0, 16 + psrld xmm7, 10 ; 0D53 _ 66: 0F 72. D7, 0A + por xmm0, xmm7 ; 0D58 _ 66: 0F EB. C7 + movd xmm7, dword [ecx+2B20H] ; 0D5C _ 66: 0F 6E. B9, 00002B20 + movd xmm6, dword [ecx+2B24H] ; 0D64 _ 66: 0F 6E. B1, 00002B24 + pshufd xmm7, xmm7, 0 ; 0D6C _ 66: 0F 70. FF, 00 + pxor xmm1, xmm7 ; 0D71 _ 66: 0F EF. CF + pshufd xmm7, xmm6, 0 ; 0D75 _ 66: 0F 70. FE, 00 + pxor xmm5, xmm7 ; 0D7A _ 66: 0F EF. EF + movd xmm6, dword [ecx+2B28H] ; 0D7E _ 66: 0F 6E. B1, 00002B28 + pshufd xmm7, xmm6, 0 ; 0D86 _ 66: 0F 70. FE, 00 + pxor xmm0, xmm7 ; 0D8B _ 66: 0F EF. C7 + movd xmm6, dword [ecx+2B2CH] ; 0D8F _ 66: 0F 6E. B1, 00002B2C + pshufd xmm7, xmm6, 0 ; 0D97 _ 66: 0F 70. FE, 00 + movdqa xmm6, xmm1 ; 0D9C _ 66: 0F 6F. F1 + pxor xmm2, xmm7 ; 0DA0 _ 66: 0F EF. D7 + pand xmm6, xmm0 ; 0DA4 _ 66: 0F DB. F0 + pxor xmm6, xmm2 ; 0DA8 _ 66: 0F EF. F2 + pxor xmm0, xmm5 ; 0DAC _ 66: 0F EF. C5 + pxor xmm0, xmm6 ; 0DB0 _ 66: 0F EF. C6 + por xmm2, xmm1 ; 0DB4 _ 66: 0F EB. D1 + pxor xmm2, xmm5 ; 0DB8 _ 66: 0F EF. D5 + movdqa xmm7, xmm2 ; 0DBC _ 66: 0F 6F. FA + pxor xmm1, xmm0 ; 0DC0 _ 66: 0F EF. C8 + por xmm7, xmm1 ; 0DC4 _ 66: 0F EB. F9 + pxor xmm7, xmm6 ; 0DC8 _ 66: 0F EF. FE + pand xmm6, xmm2 ; 0DCC _ 66: 0F DB. F2 + pxor xmm1, xmm6 ; 0DD0 _ 66: 0F EF. CE + movdqa xmm6, xmm0 ; 0DD4 _ 66: 0F 6F. F0 + pxor xmm2, xmm7 ; 0DD8 _ 66: 0F EF. D7 + pxor xmm2, xmm1 ; 0DDC _ 66: 0F EF. D1 + movdqa xmm5, xmm2 ; 0DE0 _ 66: 0F 6F. EA + pxor xmm1, xmm4 ; 0DE4 _ 66: 0F EF. CC + pslld xmm6, 13 ; 0DE8 _ 66: 0F 72. F6, 0D + psrld xmm0, 19 ; 0DED _ 66: 0F 72. D0, 13 + por xmm6, xmm0 ; 0DF2 _ 66: 0F EB. F0 + pslld xmm5, 3 ; 0DF6 _ 66: 0F 72. F5, 03 + psrld xmm2, 29 ; 0DFB _ 66: 0F 72. D2, 1D + por xmm5, xmm2 ; 0E00 _ 66: 0F EB. EA + movdqa xmm2, xmm6 ; 0E04 _ 66: 0F 6F. D6 + pxor xmm7, xmm6 ; 0E08 _ 66: 0F EF. FE + pxor xmm7, xmm5 ; 0E0C _ 66: 0F EF. FD + movdqa xmm0, xmm7 ; 0E10 _ 66: 0F 6F. C7 + pxor xmm1, xmm5 ; 0E14 _ 66: 0F EF. CD + pslld xmm2, 3 ; 0E18 _ 66: 0F 72. F2, 03 + pxor xmm1, xmm2 ; 0E1D _ 66: 0F EF. CA + pslld xmm0, 1 ; 0E21 _ 66: 0F 72. F0, 01 + psrld xmm7, 31 ; 0E26 _ 66: 0F 72. D7, 1F + por xmm0, xmm7 ; 0E2B _ 66: 0F EB. C7 + movdqa xmm7, xmm1 ; 0E2F _ 66: 0F 6F. F9 + psrld xmm1, 25 ; 0E33 _ 66: 0F 72. D1, 19 + pxor xmm6, xmm0 ; 0E38 _ 66: 0F EF. F0 + pslld xmm7, 7 ; 0E3C _ 66: 0F 72. F7, 07 + por xmm7, xmm1 ; 0E41 _ 66: 0F EB. F9 + movdqa xmm1, xmm0 ; 0E45 _ 66: 0F 6F. C8 + pxor xmm6, xmm7 ; 0E49 _ 66: 0F EF. F7 + pxor xmm5, xmm7 ; 0E4D _ 66: 0F EF. EF + pslld xmm1, 7 ; 0E51 _ 66: 0F 72. F1, 07 + pxor xmm5, xmm1 ; 0E56 _ 66: 0F EF. E9 + movdqa xmm1, xmm6 ; 0E5A _ 66: 0F 6F. CE + movdqa xmm2, xmm5 ; 0E5E _ 66: 0F 6F. D5 + psrld xmm6, 27 ; 0E62 _ 66: 0F 72. D6, 1B + pslld xmm1, 5 ; 0E67 _ 66: 0F 72. F1, 05 + por xmm1, xmm6 ; 0E6C _ 66: 0F EB. CE + pslld xmm2, 22 ; 0E70 _ 66: 0F 72. F2, 16 + psrld xmm5, 10 ; 0E75 _ 66: 0F 72. D5, 0A + por xmm2, xmm5 ; 0E7A _ 66: 0F EB. D5 + movd xmm5, dword [ecx+2B30H] ; 0E7E _ 66: 0F 6E. A9, 00002B30 + movd xmm6, dword [ecx+2B34H] ; 0E86 _ 66: 0F 6E. B1, 00002B34 + pshufd xmm5, xmm5, 0 ; 0E8E _ 66: 0F 70. ED, 00 + pxor xmm1, xmm5 ; 0E93 _ 66: 0F EF. CD + pshufd xmm5, xmm6, 0 ; 0E97 _ 66: 0F 70. EE, 00 + pxor xmm0, xmm5 ; 0E9C _ 66: 0F EF. C5 + movd xmm6, dword [ecx+2B38H] ; 0EA0 _ 66: 0F 6E. B1, 00002B38 + pshufd xmm5, xmm6, 0 ; 0EA8 _ 66: 0F 70. EE, 00 + pxor xmm2, xmm5 ; 0EAD _ 66: 0F EF. D5 + movd xmm6, dword [ecx+2B3CH] ; 0EB1 _ 66: 0F 6E. B1, 00002B3C + pshufd xmm5, xmm6, 0 ; 0EB9 _ 66: 0F 70. EE, 00 + movdqa xmm6, xmm1 ; 0EBE _ 66: 0F 6F. F1 + pxor xmm7, xmm5 ; 0EC2 _ 66: 0F EF. FD + por xmm6, xmm7 ; 0EC6 _ 66: 0F EB. F7 + pxor xmm7, xmm0 ; 0ECA _ 66: 0F EF. F8 + pand xmm0, xmm1 ; 0ECE _ 66: 0F DB. C1 + pxor xmm1, xmm2 ; 0ED2 _ 66: 0F EF. CA + pxor xmm2, xmm7 ; 0ED6 _ 66: 0F EF. D7 + pand xmm7, xmm6 ; 0EDA _ 66: 0F DB. FE + por xmm1, xmm0 ; 0EDE _ 66: 0F EB. C8 + pxor xmm7, xmm1 ; 0EE2 _ 66: 0F EF. F9 + pxor xmm6, xmm0 ; 0EE6 _ 66: 0F EF. F0 + pand xmm1, xmm6 ; 0EEA _ 66: 0F DB. CE + pxor xmm0, xmm7 ; 0EEE _ 66: 0F EF. C7 + pxor xmm1, xmm2 ; 0EF2 _ 66: 0F EF. CA + por xmm0, xmm6 ; 0EF6 _ 66: 0F EB. C6 + pxor xmm0, xmm2 ; 0EFA _ 66: 0F EF. C2 + movdqa xmm2, xmm0 ; 0EFE _ 66: 0F 6F. D0 + pxor xmm6, xmm7 ; 0F02 _ 66: 0F EF. F7 + por xmm2, xmm7 ; 0F06 _ 66: 0F EB. D7 + pxor xmm6, xmm2 ; 0F0A _ 66: 0F EF. F2 + movdqa xmm5, xmm6 ; 0F0E _ 66: 0F 6F. EE + movdqa xmm2, xmm7 ; 0F12 _ 66: 0F 6F. D7 + psrld xmm6, 19 ; 0F16 _ 66: 0F 72. D6, 13 + pslld xmm5, 13 ; 0F1B _ 66: 0F 72. F5, 0D + por xmm5, xmm6 ; 0F20 _ 66: 0F EB. EE + pslld xmm2, 3 ; 0F24 _ 66: 0F 72. F2, 03 + psrld xmm7, 29 ; 0F29 _ 66: 0F 72. D7, 1D + por xmm2, xmm7 ; 0F2E _ 66: 0F EB. D7 + movdqa xmm7, xmm5 ; 0F32 _ 66: 0F 6F. FD + pxor xmm0, xmm5 ; 0F36 _ 66: 0F EF. C5 + pxor xmm0, xmm2 ; 0F3A _ 66: 0F EF. C2 + pxor xmm1, xmm2 ; 0F3E _ 66: 0F EF. CA + pslld xmm7, 3 ; 0F42 _ 66: 0F 72. F7, 03 + pxor xmm1, xmm7 ; 0F47 _ 66: 0F EF. CF + movdqa xmm7, xmm0 ; 0F4B _ 66: 0F 6F. F8 + psrld xmm0, 31 ; 0F4F _ 66: 0F 72. D0, 1F + pslld xmm7, 1 ; 0F54 _ 66: 0F 72. F7, 01 + por xmm7, xmm0 ; 0F59 _ 66: 0F EB. F8 + movdqa xmm0, xmm1 ; 0F5D _ 66: 0F 6F. C1 + psrld xmm1, 25 ; 0F61 _ 66: 0F 72. D1, 19 + pxor xmm5, xmm7 ; 0F66 _ 66: 0F EF. EF + pslld xmm0, 7 ; 0F6A _ 66: 0F 72. F0, 07 + por xmm0, xmm1 ; 0F6F _ 66: 0F EB. C1 + movdqa xmm1, xmm7 ; 0F73 _ 66: 0F 6F. CF + pxor xmm5, xmm0 ; 0F77 _ 66: 0F EF. E8 + pxor xmm2, xmm0 ; 0F7B _ 66: 0F EF. D0 + pslld xmm1, 7 ; 0F7F _ 66: 0F 72. F1, 07 + pxor xmm2, xmm1 ; 0F84 _ 66: 0F EF. D1 + movdqa xmm1, xmm5 ; 0F88 _ 66: 0F 6F. CD + psrld xmm5, 27 ; 0F8C _ 66: 0F 72. D5, 1B + pslld xmm1, 5 ; 0F91 _ 66: 0F 72. F1, 05 + por xmm1, xmm5 ; 0F96 _ 66: 0F EB. CD + movdqa xmm5, xmm2 ; 0F9A _ 66: 0F 6F. EA + psrld xmm2, 10 ; 0F9E _ 66: 0F 72. D2, 0A + pslld xmm5, 22 ; 0FA3 _ 66: 0F 72. F5, 16 + por xmm5, xmm2 ; 0FA8 _ 66: 0F EB. EA + movd xmm2, dword [ecx+2B40H] ; 0FAC _ 66: 0F 6E. 91, 00002B40 + pshufd xmm6, xmm2, 0 ; 0FB4 _ 66: 0F 70. F2, 00 + pxor xmm1, xmm6 ; 0FB9 _ 66: 0F EF. CE + movd xmm2, dword [ecx+2B44H] ; 0FBD _ 66: 0F 6E. 91, 00002B44 + pshufd xmm6, xmm2, 0 ; 0FC5 _ 66: 0F 70. F2, 00 + movd xmm2, dword [ecx+2B48H] ; 0FCA _ 66: 0F 6E. 91, 00002B48 + pxor xmm7, xmm6 ; 0FD2 _ 66: 0F EF. FE + pshufd xmm6, xmm2, 0 ; 0FD6 _ 66: 0F 70. F2, 00 + pxor xmm5, xmm6 ; 0FDB _ 66: 0F EF. EE + movd xmm2, dword [ecx+2B4CH] ; 0FDF _ 66: 0F 6E. 91, 00002B4C + pshufd xmm6, xmm2, 0 ; 0FE7 _ 66: 0F 70. F2, 00 + pxor xmm0, xmm6 ; 0FEC _ 66: 0F EF. C6 + pxor xmm7, xmm0 ; 0FF0 _ 66: 0F EF. F8 + movdqa xmm2, xmm7 ; 0FF4 _ 66: 0F 6F. D7 + pxor xmm0, xmm4 ; 0FF8 _ 66: 0F EF. C4 + pxor xmm5, xmm0 ; 0FFC _ 66: 0F EF. E8 + pxor xmm0, xmm1 ; 1000 _ 66: 0F EF. C1 + pand xmm2, xmm0 ; 1004 _ 66: 0F DB. D0 + pxor xmm2, xmm5 ; 1008 _ 66: 0F EF. D5 + pxor xmm7, xmm0 ; 100C _ 66: 0F EF. F8 + pxor xmm1, xmm7 ; 1010 _ 66: 0F EF. CF + movdqa xmm6, xmm2 ; 1014 _ 66: 0F 6F. F2 + pand xmm5, xmm7 ; 1018 _ 66: 0F DB. EF + pxor xmm5, xmm1 ; 101C _ 66: 0F EF. E9 + pand xmm1, xmm2 ; 1020 _ 66: 0F DB. CA + pxor xmm0, xmm1 ; 1024 _ 66: 0F EF. C1 + por xmm7, xmm2 ; 1028 _ 66: 0F EB. FA + pxor xmm7, xmm1 ; 102C _ 66: 0F EF. F9 + por xmm1, xmm0 ; 1030 _ 66: 0F EB. C8 + pxor xmm1, xmm5 ; 1034 _ 66: 0F EF. CD + pand xmm5, xmm0 ; 1038 _ 66: 0F DB. E8 + pxor xmm1, xmm4 ; 103C _ 66: 0F EF. CC + pxor xmm7, xmm5 ; 1040 _ 66: 0F EF. FD + pslld xmm6, 13 ; 1044 _ 66: 0F 72. F6, 0D + psrld xmm2, 19 ; 1049 _ 66: 0F 72. D2, 13 + por xmm6, xmm2 ; 104E _ 66: 0F EB. F2 + movdqa xmm2, xmm1 ; 1052 _ 66: 0F 6F. D1 + psrld xmm1, 29 ; 1056 _ 66: 0F 72. D1, 1D + pxor xmm7, xmm6 ; 105B _ 66: 0F EF. FE + pslld xmm2, 3 ; 105F _ 66: 0F 72. F2, 03 + por xmm2, xmm1 ; 1064 _ 66: 0F EB. D1 + movdqa xmm1, xmm6 ; 1068 _ 66: 0F 6F. CE + pxor xmm7, xmm2 ; 106C _ 66: 0F EF. FA + movdqa xmm5, xmm7 ; 1070 _ 66: 0F 6F. EF + pxor xmm0, xmm2 ; 1074 _ 66: 0F EF. C2 + pslld xmm1, 3 ; 1078 _ 66: 0F 72. F1, 03 + pxor xmm0, xmm1 ; 107D _ 66: 0F EF. C1 + movdqa xmm1, xmm0 ; 1081 _ 66: 0F 6F. C8 + pslld xmm5, 1 ; 1085 _ 66: 0F 72. F5, 01 + psrld xmm7, 31 ; 108A _ 66: 0F 72. D7, 1F + por xmm5, xmm7 ; 108F _ 66: 0F EB. EF + movdqa xmm7, xmm5 ; 1093 _ 66: 0F 6F. FD + pslld xmm1, 7 ; 1097 _ 66: 0F 72. F1, 07 + psrld xmm0, 25 ; 109C _ 66: 0F 72. D0, 19 + por xmm1, xmm0 ; 10A1 _ 66: 0F EB. C8 + pxor xmm6, xmm5 ; 10A5 _ 66: 0F EF. F5 + movd xmm0, dword [ecx+2B50H] ; 10A9 _ 66: 0F 6E. 81, 00002B50 + pxor xmm6, xmm1 ; 10B1 _ 66: 0F EF. F1 + pxor xmm2, xmm1 ; 10B5 _ 66: 0F EF. D1 + pslld xmm7, 7 ; 10B9 _ 66: 0F 72. F7, 07 + pxor xmm2, xmm7 ; 10BE _ 66: 0F EF. D7 + movdqa xmm7, xmm6 ; 10C2 _ 66: 0F 6F. FE + psrld xmm6, 27 ; 10C6 _ 66: 0F 72. D6, 1B + pslld xmm7, 5 ; 10CB _ 66: 0F 72. F7, 05 + por xmm7, xmm6 ; 10D0 _ 66: 0F EB. FE + movdqa xmm6, xmm2 ; 10D4 _ 66: 0F 6F. F2 + psrld xmm2, 10 ; 10D8 _ 66: 0F 72. D2, 0A + pslld xmm6, 22 ; 10DD _ 66: 0F 72. F6, 16 + por xmm6, xmm2 ; 10E2 _ 66: 0F EB. F2 + pshufd xmm2, xmm0, 0 ; 10E6 _ 66: 0F 70. D0, 00 + pxor xmm7, xmm2 ; 10EB _ 66: 0F EF. FA + movd xmm0, dword [ecx+2B54H] ; 10EF _ 66: 0F 6E. 81, 00002B54 + pshufd xmm2, xmm0, 0 ; 10F7 _ 66: 0F 70. D0, 00 + pxor xmm5, xmm2 ; 10FC _ 66: 0F EF. EA + pxor xmm7, xmm5 ; 1100 _ 66: 0F EF. FD + movd xmm0, dword [ecx+2B58H] ; 1104 _ 66: 0F 6E. 81, 00002B58 + pshufd xmm2, xmm0, 0 ; 110C _ 66: 0F 70. D0, 00 + movd xmm0, dword [ecx+2B5CH] ; 1111 _ 66: 0F 6E. 81, 00002B5C + pxor xmm6, xmm2 ; 1119 _ 66: 0F EF. F2 + pshufd xmm2, xmm0, 0 ; 111D _ 66: 0F 70. D0, 00 + pxor xmm1, xmm2 ; 1122 _ 66: 0F EF. CA + pxor xmm5, xmm1 ; 1126 _ 66: 0F EF. E9 + movdqa xmm0, xmm5 ; 112A _ 66: 0F 6F. C5 + pxor xmm1, xmm4 ; 112E _ 66: 0F EF. CC + pxor xmm6, xmm1 ; 1132 _ 66: 0F EF. F1 + pand xmm0, xmm7 ; 1136 _ 66: 0F DB. C7 + pxor xmm0, xmm6 ; 113A _ 66: 0F EF. C6 + movdqa xmm2, xmm0 ; 113E _ 66: 0F 6F. D0 + por xmm6, xmm5 ; 1142 _ 66: 0F EB. F5 + pxor xmm5, xmm1 ; 1146 _ 66: 0F EF. E9 + pand xmm1, xmm0 ; 114A _ 66: 0F DB. C8 + pxor xmm1, xmm7 ; 114E _ 66: 0F EF. CF + pxor xmm5, xmm0 ; 1152 _ 66: 0F EF. E8 + pxor xmm5, xmm6 ; 1156 _ 66: 0F EF. EE + pxor xmm6, xmm7 ; 115A _ 66: 0F EF. F7 + pand xmm7, xmm1 ; 115E _ 66: 0F DB. F9 + pxor xmm6, xmm4 ; 1162 _ 66: 0F EF. F4 + pxor xmm7, xmm5 ; 1166 _ 66: 0F EF. FD + por xmm5, xmm1 ; 116A _ 66: 0F EB. E9 + pxor xmm5, xmm6 ; 116E _ 66: 0F EF. EE + movdqa xmm6, xmm7 ; 1172 _ 66: 0F 6F. F7 + pslld xmm2, 13 ; 1176 _ 66: 0F 72. F2, 0D + psrld xmm0, 19 ; 117B _ 66: 0F 72. D0, 13 + por xmm2, xmm0 ; 1180 _ 66: 0F EB. D0 + pslld xmm6, 3 ; 1184 _ 66: 0F 72. F6, 03 + psrld xmm7, 29 ; 1189 _ 66: 0F 72. D7, 1D + por xmm6, xmm7 ; 118E _ 66: 0F EB. F7 + movdqa xmm7, xmm2 ; 1192 _ 66: 0F 6F. FA + pxor xmm1, xmm2 ; 1196 _ 66: 0F EF. CA + pxor xmm1, xmm6 ; 119A _ 66: 0F EF. CE + movdqa xmm0, xmm1 ; 119E _ 66: 0F 6F. C1 + pxor xmm5, xmm6 ; 11A2 _ 66: 0F EF. EE + pslld xmm7, 3 ; 11A6 _ 66: 0F 72. F7, 03 + pxor xmm5, xmm7 ; 11AB _ 66: 0F EF. EF + pslld xmm0, 1 ; 11AF _ 66: 0F 72. F0, 01 + psrld xmm1, 31 ; 11B4 _ 66: 0F 72. D1, 1F + por xmm0, xmm1 ; 11B9 _ 66: 0F EB. C1 + movdqa xmm1, xmm5 ; 11BD _ 66: 0F 6F. CD + psrld xmm5, 25 ; 11C1 _ 66: 0F 72. D5, 19 + pxor xmm2, xmm0 ; 11C6 _ 66: 0F EF. D0 + pslld xmm1, 7 ; 11CA _ 66: 0F 72. F1, 07 + por xmm1, xmm5 ; 11CF _ 66: 0F EB. CD + movdqa xmm5, xmm0 ; 11D3 _ 66: 0F 6F. E8 + pxor xmm2, xmm1 ; 11D7 _ 66: 0F EF. D1 + movdqa xmm7, xmm2 ; 11DB _ 66: 0F 6F. FA + pxor xmm6, xmm1 ; 11DF _ 66: 0F EF. F1 + pslld xmm5, 7 ; 11E3 _ 66: 0F 72. F5, 07 + pxor xmm6, xmm5 ; 11E8 _ 66: 0F EF. F5 + movdqa xmm5, xmm6 ; 11EC _ 66: 0F 6F. EE + pslld xmm7, 5 ; 11F0 _ 66: 0F 72. F7, 05 + psrld xmm2, 27 ; 11F5 _ 66: 0F 72. D2, 1B + por xmm7, xmm2 ; 11FA _ 66: 0F EB. FA + movd xmm2, dword [ecx+2B64H] ; 11FE _ 66: 0F 6E. 91, 00002B64 + pslld xmm5, 22 ; 1206 _ 66: 0F 72. F5, 16 + psrld xmm6, 10 ; 120B _ 66: 0F 72. D6, 0A + por xmm5, xmm6 ; 1210 _ 66: 0F EB. EE + movd xmm6, dword [ecx+2B60H] ; 1214 _ 66: 0F 6E. B1, 00002B60 + pshufd xmm6, xmm6, 0 ; 121C _ 66: 0F 70. F6, 00 + pxor xmm7, xmm6 ; 1221 _ 66: 0F EF. FE + pshufd xmm6, xmm2, 0 ; 1225 _ 66: 0F 70. F2, 00 + movd xmm2, dword [ecx+2B68H] ; 122A _ 66: 0F 6E. 91, 00002B68 + pxor xmm0, xmm6 ; 1232 _ 66: 0F EF. C6 + pshufd xmm6, xmm2, 0 ; 1236 _ 66: 0F 70. F2, 00 + pxor xmm5, xmm6 ; 123B _ 66: 0F EF. EE + pxor xmm5, xmm4 ; 123F _ 66: 0F EF. EC + movd xmm2, dword [ecx+2B6CH] ; 1243 _ 66: 0F 6E. 91, 00002B6C + pshufd xmm6, xmm2, 0 ; 124B _ 66: 0F 70. F2, 00 + pxor xmm1, xmm6 ; 1250 _ 66: 0F EF. CE + movdqa xmm2, xmm1 ; 1254 _ 66: 0F 6F. D1 + pand xmm2, xmm7 ; 1258 _ 66: 0F DB. D7 + pxor xmm7, xmm1 ; 125C _ 66: 0F EF. F9 + pxor xmm2, xmm5 ; 1260 _ 66: 0F EF. D5 + por xmm5, xmm1 ; 1264 _ 66: 0F EB. E9 + pxor xmm0, xmm2 ; 1268 _ 66: 0F EF. C2 + pxor xmm5, xmm7 ; 126C _ 66: 0F EF. EF + por xmm7, xmm0 ; 1270 _ 66: 0F EB. F8 + pxor xmm5, xmm0 ; 1274 _ 66: 0F EF. E8 + pxor xmm1, xmm7 ; 1278 _ 66: 0F EF. CF + por xmm7, xmm2 ; 127C _ 66: 0F EB. FA + pxor xmm7, xmm5 ; 1280 _ 66: 0F EF. FD + movdqa xmm6, xmm7 ; 1284 _ 66: 0F 6F. F7 + pxor xmm1, xmm2 ; 1288 _ 66: 0F EF. CA + pxor xmm1, xmm7 ; 128C _ 66: 0F EF. CF + pxor xmm2, xmm4 ; 1290 _ 66: 0F EF. D4 + pand xmm5, xmm1 ; 1294 _ 66: 0F DB. E9 + pxor xmm2, xmm5 ; 1298 _ 66: 0F EF. D5 + movdqa xmm5, xmm1 ; 129C _ 66: 0F 6F. E9 + pslld xmm6, 13 ; 12A0 _ 66: 0F 72. F6, 0D + psrld xmm7, 19 ; 12A5 _ 66: 0F 72. D7, 13 + por xmm6, xmm7 ; 12AA _ 66: 0F EB. F7 + pslld xmm5, 3 ; 12AE _ 66: 0F 72. F5, 03 + psrld xmm1, 29 ; 12B3 _ 66: 0F 72. D1, 1D + por xmm5, xmm1 ; 12B8 _ 66: 0F EB. E9 + movdqa xmm1, xmm6 ; 12BC _ 66: 0F 6F. CE + pxor xmm0, xmm6 ; 12C0 _ 66: 0F EF. C6 + pxor xmm0, xmm5 ; 12C4 _ 66: 0F EF. C5 + movdqa xmm7, xmm0 ; 12C8 _ 66: 0F 6F. F8 + pxor xmm2, xmm5 ; 12CC _ 66: 0F EF. D5 + pslld xmm1, 3 ; 12D0 _ 66: 0F 72. F1, 03 + pxor xmm2, xmm1 ; 12D5 _ 66: 0F EF. D1 + movdqa xmm1, xmm2 ; 12D9 _ 66: 0F 6F. CA + pslld xmm7, 1 ; 12DD _ 66: 0F 72. F7, 01 + psrld xmm0, 31 ; 12E2 _ 66: 0F 72. D0, 1F + por xmm7, xmm0 ; 12E7 _ 66: 0F EB. F8 + movdqa xmm0, xmm7 ; 12EB _ 66: 0F 6F. C7 + pslld xmm1, 7 ; 12EF _ 66: 0F 72. F1, 07 + psrld xmm2, 25 ; 12F4 _ 66: 0F 72. D2, 19 + por xmm1, xmm2 ; 12F9 _ 66: 0F EB. CA + pxor xmm6, xmm7 ; 12FD _ 66: 0F EF. F7 + pxor xmm6, xmm1 ; 1301 _ 66: 0F EF. F1 + movdqa xmm2, xmm6 ; 1305 _ 66: 0F 6F. D6 + pxor xmm5, xmm1 ; 1309 _ 66: 0F EF. E9 + pslld xmm0, 7 ; 130D _ 66: 0F 72. F0, 07 + pxor xmm5, xmm0 ; 1312 _ 66: 0F EF. E8 + movdqa xmm0, xmm5 ; 1316 _ 66: 0F 6F. C5 + pslld xmm2, 5 ; 131A _ 66: 0F 72. F2, 05 + psrld xmm6, 27 ; 131F _ 66: 0F 72. D6, 1B + por xmm2, xmm6 ; 1324 _ 66: 0F EB. D6 + pslld xmm0, 22 ; 1328 _ 66: 0F 72. F0, 16 + psrld xmm5, 10 ; 132D _ 66: 0F 72. D5, 0A + por xmm0, xmm5 ; 1332 _ 66: 0F EB. C5 + movd xmm5, dword [ecx+2B70H] ; 1336 _ 66: 0F 6E. A9, 00002B70 + pshufd xmm6, xmm5, 0 ; 133E _ 66: 0F 70. F5, 00 + movd xmm5, dword [ecx+2B74H] ; 1343 _ 66: 0F 6E. A9, 00002B74 + pxor xmm2, xmm6 ; 134B _ 66: 0F EF. D6 + pshufd xmm6, xmm5, 0 ; 134F _ 66: 0F 70. F5, 00 + pxor xmm7, xmm6 ; 1354 _ 66: 0F EF. FE + movd xmm5, dword [ecx+2B78H] ; 1358 _ 66: 0F 6E. A9, 00002B78 + pshufd xmm6, xmm5, 0 ; 1360 _ 66: 0F 70. F5, 00 + pxor xmm0, xmm6 ; 1365 _ 66: 0F EF. C6 + movd xmm5, dword [ecx+2B7CH] ; 1369 _ 66: 0F 6E. A9, 00002B7C + pshufd xmm6, xmm5, 0 ; 1371 _ 66: 0F 70. F5, 00 + movdqa xmm5, xmm7 ; 1376 _ 66: 0F 6F. EF + pxor xmm1, xmm6 ; 137A _ 66: 0F EF. CE + pxor xmm7, xmm0 ; 137E _ 66: 0F EF. F8 + por xmm5, xmm0 ; 1382 _ 66: 0F EB. E8 + pxor xmm5, xmm1 ; 1386 _ 66: 0F EF. E9 + pxor xmm0, xmm5 ; 138A _ 66: 0F EF. C5 + por xmm1, xmm7 ; 138E _ 66: 0F EB. CF + pand xmm1, xmm2 ; 1392 _ 66: 0F DB. CA + pxor xmm7, xmm0 ; 1396 _ 66: 0F EF. F8 + pxor xmm1, xmm5 ; 139A _ 66: 0F EF. CD + por xmm5, xmm7 ; 139E _ 66: 0F EB. EF + pxor xmm5, xmm2 ; 13A2 _ 66: 0F EF. EA + por xmm2, xmm7 ; 13A6 _ 66: 0F EB. D7 + pxor xmm2, xmm0 ; 13AA _ 66: 0F EF. D0 + pxor xmm5, xmm7 ; 13AE _ 66: 0F EF. EF + pxor xmm0, xmm5 ; 13B2 _ 66: 0F EF. C5 + pand xmm5, xmm2 ; 13B6 _ 66: 0F DB. EA + pxor xmm5, xmm7 ; 13BA _ 66: 0F EF. EF + movdqa xmm6, xmm5 ; 13BE _ 66: 0F 6F. F5 + pxor xmm0, xmm4 ; 13C2 _ 66: 0F EF. C4 + por xmm0, xmm2 ; 13C6 _ 66: 0F EB. C2 + pxor xmm7, xmm0 ; 13CA _ 66: 0F EF. F8 + movdqa xmm0, xmm7 ; 13CE _ 66: 0F 6F. C7 + psrld xmm7, 19 ; 13D2 _ 66: 0F 72. D7, 13 + pslld xmm6, 3 ; 13D7 _ 66: 0F 72. F6, 03 + pslld xmm0, 13 ; 13DC _ 66: 0F 72. F0, 0D + por xmm0, xmm7 ; 13E1 _ 66: 0F EB. C7 + movdqa xmm7, xmm0 ; 13E5 _ 66: 0F 6F. F8 + psrld xmm5, 29 ; 13E9 _ 66: 0F 72. D5, 1D + por xmm6, xmm5 ; 13EE _ 66: 0F EB. F5 + pxor xmm1, xmm0 ; 13F2 _ 66: 0F EF. C8 + pxor xmm1, xmm6 ; 13F6 _ 66: 0F EF. CE + pxor xmm2, xmm6 ; 13FA _ 66: 0F EF. D6 + pslld xmm7, 3 ; 13FE _ 66: 0F 72. F7, 03 + pxor xmm2, xmm7 ; 1403 _ 66: 0F EF. D7 + movdqa xmm7, xmm1 ; 1407 _ 66: 0F 6F. F9 + movdqa xmm5, xmm2 ; 140B _ 66: 0F 6F. EA + psrld xmm1, 31 ; 140F _ 66: 0F 72. D1, 1F + pslld xmm7, 1 ; 1414 _ 66: 0F 72. F7, 01 + por xmm7, xmm1 ; 1419 _ 66: 0F EB. F9 + pslld xmm5, 7 ; 141D _ 66: 0F 72. F5, 07 + psrld xmm2, 25 ; 1422 _ 66: 0F 72. D2, 19 + por xmm5, xmm2 ; 1427 _ 66: 0F EB. EA + movdqa xmm2, xmm7 ; 142B _ 66: 0F 6F. D7 + pxor xmm0, xmm7 ; 142F _ 66: 0F EF. C7 + pxor xmm0, xmm5 ; 1433 _ 66: 0F EF. C5 + pxor xmm6, xmm5 ; 1437 _ 66: 0F EF. F5 + pslld xmm2, 7 ; 143B _ 66: 0F 72. F2, 07 + pxor xmm6, xmm2 ; 1440 _ 66: 0F EF. F2 + movdqa xmm2, xmm0 ; 1444 _ 66: 0F 6F. D0 + movdqa xmm1, xmm6 ; 1448 _ 66: 0F 6F. CE + psrld xmm0, 27 ; 144C _ 66: 0F 72. D0, 1B + pslld xmm2, 5 ; 1451 _ 66: 0F 72. F2, 05 + por xmm2, xmm0 ; 1456 _ 66: 0F EB. D0 + movd xmm0, dword [ecx+2B80H] ; 145A _ 66: 0F 6E. 81, 00002B80 + pslld xmm1, 22 ; 1462 _ 66: 0F 72. F1, 16 + psrld xmm6, 10 ; 1467 _ 66: 0F 72. D6, 0A + por xmm1, xmm6 ; 146C _ 66: 0F EB. CE + pshufd xmm6, xmm0, 0 ; 1470 _ 66: 0F 70. F0, 00 + pxor xmm2, xmm6 ; 1475 _ 66: 0F EF. D6 + movd xmm0, dword [ecx+2B84H] ; 1479 _ 66: 0F 6E. 81, 00002B84 + pshufd xmm6, xmm0, 0 ; 1481 _ 66: 0F 70. F0, 00 + movd xmm0, dword [ecx+2B88H] ; 1486 _ 66: 0F 6E. 81, 00002B88 + pxor xmm7, xmm6 ; 148E _ 66: 0F EF. FE + pshufd xmm6, xmm0, 0 ; 1492 _ 66: 0F 70. F0, 00 + pxor xmm1, xmm6 ; 1497 _ 66: 0F EF. CE + movd xmm0, dword [ecx+2B8CH] ; 149B _ 66: 0F 6E. 81, 00002B8C + pshufd xmm6, xmm0, 0 ; 14A3 _ 66: 0F 70. F0, 00 + movdqa xmm0, xmm7 ; 14A8 _ 66: 0F 6F. C7 + pxor xmm5, xmm6 ; 14AC _ 66: 0F EF. EE + pxor xmm5, xmm2 ; 14B0 _ 66: 0F EF. EA + pand xmm0, xmm5 ; 14B4 _ 66: 0F DB. C5 + pxor xmm7, xmm1 ; 14B8 _ 66: 0F EF. F9 + pxor xmm0, xmm2 ; 14BC _ 66: 0F EF. C2 + por xmm2, xmm5 ; 14C0 _ 66: 0F EB. D5 + pxor xmm2, xmm7 ; 14C4 _ 66: 0F EF. D7 + pxor xmm7, xmm5 ; 14C8 _ 66: 0F EF. FD + pxor xmm5, xmm1 ; 14CC _ 66: 0F EF. E9 + por xmm1, xmm0 ; 14D0 _ 66: 0F EB. C8 + pxor xmm1, xmm7 ; 14D4 _ 66: 0F EF. CF + pxor xmm7, xmm4 ; 14D8 _ 66: 0F EF. FC + por xmm7, xmm0 ; 14DC _ 66: 0F EB. F8 + pxor xmm0, xmm5 ; 14E0 _ 66: 0F EF. C5 + pxor xmm0, xmm7 ; 14E4 _ 66: 0F EF. C7 + por xmm5, xmm2 ; 14E8 _ 66: 0F EB. EA + pxor xmm0, xmm5 ; 14EC _ 66: 0F EF. C5 + movdqa xmm6, xmm0 ; 14F0 _ 66: 0F 6F. F0 + pxor xmm7, xmm5 ; 14F4 _ 66: 0F EF. FD + psrld xmm0, 19 ; 14F8 _ 66: 0F 72. D0, 13 + pslld xmm6, 13 ; 14FD _ 66: 0F 72. F6, 0D + por xmm6, xmm0 ; 1502 _ 66: 0F EB. F0 + movdqa xmm0, xmm1 ; 1506 _ 66: 0F 6F. C1 + psrld xmm1, 29 ; 150A _ 66: 0F 72. D1, 1D + pxor xmm7, xmm6 ; 150F _ 66: 0F EF. FE + pslld xmm0, 3 ; 1513 _ 66: 0F 72. F0, 03 + por xmm0, xmm1 ; 1518 _ 66: 0F EB. C1 + movdqa xmm1, xmm6 ; 151C _ 66: 0F 6F. CE + pxor xmm7, xmm0 ; 1520 _ 66: 0F EF. F8 + movdqa xmm5, xmm7 ; 1524 _ 66: 0F 6F. EF + pxor xmm2, xmm0 ; 1528 _ 66: 0F EF. D0 + pslld xmm1, 3 ; 152C _ 66: 0F 72. F1, 03 + pxor xmm2, xmm1 ; 1531 _ 66: 0F EF. D1 + movdqa xmm1, xmm2 ; 1535 _ 66: 0F 6F. CA + pslld xmm5, 1 ; 1539 _ 66: 0F 72. F5, 01 + psrld xmm7, 31 ; 153E _ 66: 0F 72. D7, 1F + por xmm5, xmm7 ; 1543 _ 66: 0F EB. EF + movdqa xmm7, xmm5 ; 1547 _ 66: 0F 6F. FD + pslld xmm1, 7 ; 154B _ 66: 0F 72. F1, 07 + psrld xmm2, 25 ; 1550 _ 66: 0F 72. D2, 19 + por xmm1, xmm2 ; 1555 _ 66: 0F EB. CA + pxor xmm6, xmm5 ; 1559 _ 66: 0F EF. F5 + pxor xmm6, xmm1 ; 155D _ 66: 0F EF. F1 + movd xmm2, dword [ecx+2B90H] ; 1561 _ 66: 0F 6E. 91, 00002B90 + pxor xmm0, xmm1 ; 1569 _ 66: 0F EF. C1 + pslld xmm7, 7 ; 156D _ 66: 0F 72. F7, 07 + pxor xmm0, xmm7 ; 1572 _ 66: 0F EF. C7 + movdqa xmm7, xmm6 ; 1576 _ 66: 0F 6F. FE + psrld xmm6, 27 ; 157A _ 66: 0F 72. D6, 1B + pslld xmm7, 5 ; 157F _ 66: 0F 72. F7, 05 + por xmm7, xmm6 ; 1584 _ 66: 0F EB. FE + movdqa xmm6, xmm0 ; 1588 _ 66: 0F 6F. F0 + psrld xmm0, 10 ; 158C _ 66: 0F 72. D0, 0A + pslld xmm6, 22 ; 1591 _ 66: 0F 72. F6, 16 + por xmm6, xmm0 ; 1596 _ 66: 0F EB. F0 + pshufd xmm0, xmm2, 0 ; 159A _ 66: 0F 70. C2, 00 + movd xmm2, dword [ecx+2B94H] ; 159F _ 66: 0F 6E. 91, 00002B94 + pxor xmm7, xmm0 ; 15A7 _ 66: 0F EF. F8 + pshufd xmm0, xmm2, 0 ; 15AB _ 66: 0F 70. C2, 00 + pxor xmm5, xmm0 ; 15B0 _ 66: 0F EF. E8 + pxor xmm7, xmm4 ; 15B4 _ 66: 0F EF. FC + movd xmm2, dword [ecx+2B98H] ; 15B8 _ 66: 0F 6E. 91, 00002B98 + pshufd xmm0, xmm2, 0 ; 15C0 _ 66: 0F 70. C2, 00 + pxor xmm6, xmm0 ; 15C5 _ 66: 0F EF. F0 + movd xmm2, dword [ecx+2B9CH] ; 15C9 _ 66: 0F 6E. 91, 00002B9C + pshufd xmm0, xmm2, 0 ; 15D1 _ 66: 0F 70. C2, 00 + movdqa xmm2, xmm7 ; 15D6 _ 66: 0F 6F. D7 + pxor xmm1, xmm0 ; 15DA _ 66: 0F EF. C8 + pxor xmm6, xmm4 ; 15DE _ 66: 0F EF. F4 + pand xmm2, xmm5 ; 15E2 _ 66: 0F DB. D5 + pxor xmm6, xmm2 ; 15E6 _ 66: 0F EF. F2 + por xmm2, xmm1 ; 15EA _ 66: 0F EB. D1 + pxor xmm1, xmm6 ; 15EE _ 66: 0F EF. CE + pxor xmm5, xmm2 ; 15F2 _ 66: 0F EF. EA + pxor xmm2, xmm7 ; 15F6 _ 66: 0F EF. D7 + por xmm7, xmm5 ; 15FA _ 66: 0F EB. FD + pxor xmm5, xmm1 ; 15FE _ 66: 0F EF. E9 + por xmm6, xmm2 ; 1602 _ 66: 0F EB. F2 + pand xmm6, xmm7 ; 1606 _ 66: 0F DB. F7 + movdqa xmm0, xmm6 ; 160A _ 66: 0F 6F. C6 + pxor xmm2, xmm5 ; 160E _ 66: 0F EF. D5 + pand xmm5, xmm6 ; 1612 _ 66: 0F DB. EE + pxor xmm5, xmm2 ; 1616 _ 66: 0F EF. EA + pand xmm2, xmm6 ; 161A _ 66: 0F DB. D6 + pxor xmm7, xmm2 ; 161E _ 66: 0F EF. FA + pslld xmm0, 13 ; 1622 _ 66: 0F 72. F0, 0D + psrld xmm6, 19 ; 1627 _ 66: 0F 72. D6, 13 + por xmm0, xmm6 ; 162C _ 66: 0F EB. C6 + movdqa xmm6, xmm1 ; 1630 _ 66: 0F 6F. F1 + psrld xmm1, 29 ; 1634 _ 66: 0F 72. D1, 1D + pxor xmm7, xmm0 ; 1639 _ 66: 0F EF. F8 + pslld xmm6, 3 ; 163D _ 66: 0F 72. F6, 03 + por xmm6, xmm1 ; 1642 _ 66: 0F EB. F1 + movdqa xmm1, xmm0 ; 1646 _ 66: 0F 6F. C8 + pxor xmm7, xmm6 ; 164A _ 66: 0F EF. FE + pxor xmm5, xmm6 ; 164E _ 66: 0F EF. EE + pslld xmm1, 3 ; 1652 _ 66: 0F 72. F1, 03 + pxor xmm5, xmm1 ; 1657 _ 66: 0F EF. E9 + movdqa xmm1, xmm7 ; 165B _ 66: 0F 6F. CF + psrld xmm7, 31 ; 165F _ 66: 0F 72. D7, 1F + pslld xmm1, 1 ; 1664 _ 66: 0F 72. F1, 01 + por xmm1, xmm7 ; 1669 _ 66: 0F EB. CF + movdqa xmm7, xmm5 ; 166D _ 66: 0F 6F. FD + psrld xmm5, 25 ; 1671 _ 66: 0F 72. D5, 19 + pxor xmm0, xmm1 ; 1676 _ 66: 0F EF. C1 + pslld xmm7, 7 ; 167A _ 66: 0F 72. F7, 07 + por xmm7, xmm5 ; 167F _ 66: 0F EB. FD + movdqa xmm5, xmm1 ; 1683 _ 66: 0F 6F. E9 + pxor xmm0, xmm7 ; 1687 _ 66: 0F EF. C7 + movdqa xmm2, xmm0 ; 168B _ 66: 0F 6F. D0 + pxor xmm6, xmm7 ; 168F _ 66: 0F EF. F7 + pslld xmm5, 7 ; 1693 _ 66: 0F 72. F5, 07 + pxor xmm6, xmm5 ; 1698 _ 66: 0F EF. F5 + movdqa xmm5, xmm6 ; 169C _ 66: 0F 6F. EE + pslld xmm2, 5 ; 16A0 _ 66: 0F 72. F2, 05 + psrld xmm0, 27 ; 16A5 _ 66: 0F 72. D0, 1B + por xmm2, xmm0 ; 16AA _ 66: 0F EB. D0 + pslld xmm5, 22 ; 16AE _ 66: 0F 72. F5, 16 + psrld xmm6, 10 ; 16B3 _ 66: 0F 72. D6, 0A + por xmm5, xmm6 ; 16B8 _ 66: 0F EB. EE + movd xmm6, dword [ecx+2BA0H] ; 16BC _ 66: 0F 6E. B1, 00002BA0 + pshufd xmm0, xmm6, 0 ; 16C4 _ 66: 0F 70. C6, 00 + pxor xmm2, xmm0 ; 16C9 _ 66: 0F EF. D0 + movd xmm6, dword [ecx+2BA4H] ; 16CD _ 66: 0F 6E. B1, 00002BA4 + pshufd xmm0, xmm6, 0 ; 16D5 _ 66: 0F 70. C6, 00 + pxor xmm1, xmm0 ; 16DA _ 66: 0F EF. C8 + movd xmm6, dword [ecx+2BA8H] ; 16DE _ 66: 0F 6E. B1, 00002BA8 + pshufd xmm0, xmm6, 0 ; 16E6 _ 66: 0F 70. C6, 00 + movd xmm6, dword [ecx+2BACH] ; 16EB _ 66: 0F 6E. B1, 00002BAC + pxor xmm5, xmm0 ; 16F3 _ 66: 0F EF. E8 + pshufd xmm0, xmm6, 0 ; 16F7 _ 66: 0F 70. C6, 00 + movdqa xmm6, xmm2 ; 16FC _ 66: 0F 6F. F2 + pxor xmm7, xmm0 ; 1700 _ 66: 0F EF. F8 + pand xmm6, xmm5 ; 1704 _ 66: 0F DB. F5 + pxor xmm6, xmm7 ; 1708 _ 66: 0F EF. F7 + pxor xmm5, xmm1 ; 170C _ 66: 0F EF. E9 + pxor xmm5, xmm6 ; 1710 _ 66: 0F EF. EE + movdqa xmm0, xmm5 ; 1714 _ 66: 0F 6F. C5 + por xmm7, xmm2 ; 1718 _ 66: 0F EB. FA + pxor xmm7, xmm1 ; 171C _ 66: 0F EF. F9 + movdqa xmm1, xmm7 ; 1720 _ 66: 0F 6F. CF + pxor xmm2, xmm5 ; 1724 _ 66: 0F EF. D5 + pslld xmm0, 13 ; 1728 _ 66: 0F 72. F0, 0D + por xmm1, xmm2 ; 172D _ 66: 0F EB. CA + pxor xmm1, xmm6 ; 1731 _ 66: 0F EF. CE + pand xmm6, xmm7 ; 1735 _ 66: 0F DB. F7 + pxor xmm2, xmm6 ; 1739 _ 66: 0F EF. D6 + pxor xmm7, xmm1 ; 173D _ 66: 0F EF. F9 + pxor xmm7, xmm2 ; 1741 _ 66: 0F EF. FA + movdqa xmm6, xmm7 ; 1745 _ 66: 0F 6F. F7 + pxor xmm2, xmm4 ; 1749 _ 66: 0F EF. D4 + psrld xmm5, 19 ; 174D _ 66: 0F 72. D5, 13 + por xmm0, xmm5 ; 1752 _ 66: 0F EB. C5 + pslld xmm6, 3 ; 1756 _ 66: 0F 72. F6, 03 + psrld xmm7, 29 ; 175B _ 66: 0F 72. D7, 1D + por xmm6, xmm7 ; 1760 _ 66: 0F EB. F7 + movdqa xmm7, xmm0 ; 1764 _ 66: 0F 6F. F8 + pxor xmm1, xmm0 ; 1768 _ 66: 0F EF. C8 + pxor xmm1, xmm6 ; 176C _ 66: 0F EF. CE + movdqa xmm5, xmm1 ; 1770 _ 66: 0F 6F. E9 + pxor xmm2, xmm6 ; 1774 _ 66: 0F EF. D6 + pslld xmm7, 3 ; 1778 _ 66: 0F 72. F7, 03 + pxor xmm2, xmm7 ; 177D _ 66: 0F EF. D7 + pslld xmm5, 1 ; 1781 _ 66: 0F 72. F5, 01 + psrld xmm1, 31 ; 1786 _ 66: 0F 72. D1, 1F + por xmm5, xmm1 ; 178B _ 66: 0F EB. E9 + movdqa xmm1, xmm2 ; 178F _ 66: 0F 6F. CA + psrld xmm2, 25 ; 1793 _ 66: 0F 72. D2, 19 + pxor xmm0, xmm5 ; 1798 _ 66: 0F EF. C5 + pslld xmm1, 7 ; 179C _ 66: 0F 72. F1, 07 + por xmm1, xmm2 ; 17A1 _ 66: 0F EB. CA + movdqa xmm2, xmm5 ; 17A5 _ 66: 0F 6F. D5 + pxor xmm0, xmm1 ; 17A9 _ 66: 0F EF. C1 + pxor xmm6, xmm1 ; 17AD _ 66: 0F EF. F1 + pslld xmm2, 7 ; 17B1 _ 66: 0F 72. F2, 07 + pxor xmm6, xmm2 ; 17B6 _ 66: 0F EF. F2 + movdqa xmm2, xmm0 ; 17BA _ 66: 0F 6F. D0 + psrld xmm0, 27 ; 17BE _ 66: 0F 72. D0, 1B + pslld xmm2, 5 ; 17C3 _ 66: 0F 72. F2, 05 + por xmm2, xmm0 ; 17C8 _ 66: 0F EB. D0 + movdqa xmm0, xmm6 ; 17CC _ 66: 0F 6F. C6 + psrld xmm6, 10 ; 17D0 _ 66: 0F 72. D6, 0A + pslld xmm0, 22 ; 17D5 _ 66: 0F 72. F0, 16 + por xmm0, xmm6 ; 17DA _ 66: 0F EB. C6 + movd xmm6, dword [ecx+2BB0H] ; 17DE _ 66: 0F 6E. B1, 00002BB0 + pshufd xmm7, xmm6, 0 ; 17E6 _ 66: 0F 70. FE, 00 + pxor xmm2, xmm7 ; 17EB _ 66: 0F EF. D7 + movd xmm6, dword [ecx+2BB4H] ; 17EF _ 66: 0F 6E. B1, 00002BB4 + pshufd xmm7, xmm6, 0 ; 17F7 _ 66: 0F 70. FE, 00 + movd xmm6, dword [ecx+2BB8H] ; 17FC _ 66: 0F 6E. B1, 00002BB8 + pxor xmm5, xmm7 ; 1804 _ 66: 0F EF. EF + pshufd xmm7, xmm6, 0 ; 1808 _ 66: 0F 70. FE, 00 + pxor xmm0, xmm7 ; 180D _ 66: 0F EF. C7 + movd xmm6, dword [ecx+2BBCH] ; 1811 _ 66: 0F 6E. B1, 00002BBC + pshufd xmm7, xmm6, 0 ; 1819 _ 66: 0F 70. FE, 00 + pxor xmm1, xmm7 ; 181E _ 66: 0F EF. CF + movdqa xmm7, xmm2 ; 1822 _ 66: 0F 6F. FA + por xmm7, xmm1 ; 1826 _ 66: 0F EB. F9 + pxor xmm1, xmm5 ; 182A _ 66: 0F EF. CD + pand xmm5, xmm2 ; 182E _ 66: 0F DB. EA + pxor xmm2, xmm0 ; 1832 _ 66: 0F EF. D0 + pxor xmm0, xmm1 ; 1836 _ 66: 0F EF. C1 + pand xmm1, xmm7 ; 183A _ 66: 0F DB. CF + por xmm2, xmm5 ; 183E _ 66: 0F EB. D5 + pxor xmm1, xmm2 ; 1842 _ 66: 0F EF. CA + pxor xmm7, xmm5 ; 1846 _ 66: 0F EF. FD + pand xmm2, xmm7 ; 184A _ 66: 0F DB. D7 + pxor xmm5, xmm1 ; 184E _ 66: 0F EF. E9 + pxor xmm2, xmm0 ; 1852 _ 66: 0F EF. D0 + por xmm5, xmm7 ; 1856 _ 66: 0F EB. EF + pxor xmm5, xmm0 ; 185A _ 66: 0F EF. E8 + movdqa xmm0, xmm5 ; 185E _ 66: 0F 6F. C5 + pxor xmm7, xmm1 ; 1862 _ 66: 0F EF. F9 + por xmm0, xmm1 ; 1866 _ 66: 0F EB. C1 + pxor xmm7, xmm0 ; 186A _ 66: 0F EF. F8 + movdqa xmm6, xmm7 ; 186E _ 66: 0F 6F. F7 + movdqa xmm0, xmm1 ; 1872 _ 66: 0F 6F. C1 + psrld xmm7, 19 ; 1876 _ 66: 0F 72. D7, 13 + pslld xmm6, 13 ; 187B _ 66: 0F 72. F6, 0D + por xmm6, xmm7 ; 1880 _ 66: 0F EB. F7 + pslld xmm0, 3 ; 1884 _ 66: 0F 72. F0, 03 + psrld xmm1, 29 ; 1889 _ 66: 0F 72. D1, 1D + por xmm0, xmm1 ; 188E _ 66: 0F EB. C1 + movdqa xmm1, xmm6 ; 1892 _ 66: 0F 6F. CE + pxor xmm5, xmm6 ; 1896 _ 66: 0F EF. EE + pxor xmm5, xmm0 ; 189A _ 66: 0F EF. E8 + pxor xmm2, xmm0 ; 189E _ 66: 0F EF. D0 + pslld xmm1, 3 ; 18A2 _ 66: 0F 72. F1, 03 + pxor xmm2, xmm1 ; 18A7 _ 66: 0F EF. D1 + movdqa xmm1, xmm5 ; 18AB _ 66: 0F 6F. CD + movdqa xmm7, xmm2 ; 18AF _ 66: 0F 6F. FA + psrld xmm5, 31 ; 18B3 _ 66: 0F 72. D5, 1F + pslld xmm1, 1 ; 18B8 _ 66: 0F 72. F1, 01 + por xmm1, xmm5 ; 18BD _ 66: 0F EB. CD + movdqa xmm5, xmm1 ; 18C1 _ 66: 0F 6F. E9 + pslld xmm7, 7 ; 18C5 _ 66: 0F 72. F7, 07 + psrld xmm2, 25 ; 18CA _ 66: 0F 72. D2, 19 + por xmm7, xmm2 ; 18CF _ 66: 0F EB. FA + pxor xmm6, xmm1 ; 18D3 _ 66: 0F EF. F1 + movd xmm2, dword [ecx+2BC0H] ; 18D7 _ 66: 0F 6E. 91, 00002BC0 + pxor xmm6, xmm7 ; 18DF _ 66: 0F EF. F7 + pxor xmm0, xmm7 ; 18E3 _ 66: 0F EF. C7 + pslld xmm5, 7 ; 18E7 _ 66: 0F 72. F5, 07 + pxor xmm0, xmm5 ; 18EC _ 66: 0F EF. C5 + movdqa xmm5, xmm6 ; 18F0 _ 66: 0F 6F. EE + psrld xmm6, 27 ; 18F4 _ 66: 0F 72. D6, 1B + pslld xmm5, 5 ; 18F9 _ 66: 0F 72. F5, 05 + por xmm5, xmm6 ; 18FE _ 66: 0F EB. EE + movdqa xmm6, xmm0 ; 1902 _ 66: 0F 6F. F0 + psrld xmm0, 10 ; 1906 _ 66: 0F 72. D0, 0A + pslld xmm6, 22 ; 190B _ 66: 0F 72. F6, 16 + por xmm6, xmm0 ; 1910 _ 66: 0F EB. F0 + pshufd xmm0, xmm2, 0 ; 1914 _ 66: 0F 70. C2, 00 + pxor xmm5, xmm0 ; 1919 _ 66: 0F EF. E8 + movd xmm2, dword [ecx+2BC4H] ; 191D _ 66: 0F 6E. 91, 00002BC4 + pshufd xmm0, xmm2, 0 ; 1925 _ 66: 0F 70. C2, 00 + pxor xmm1, xmm0 ; 192A _ 66: 0F EF. C8 + movd xmm2, dword [ecx+2BC8H] ; 192E _ 66: 0F 6E. 91, 00002BC8 + pshufd xmm0, xmm2, 0 ; 1936 _ 66: 0F 70. C2, 00 + pxor xmm6, xmm0 ; 193B _ 66: 0F EF. F0 + movd xmm2, dword [ecx+2BCCH] ; 193F _ 66: 0F 6E. 91, 00002BCC + pshufd xmm0, xmm2, 0 ; 1947 _ 66: 0F 70. C2, 00 + pxor xmm7, xmm0 ; 194C _ 66: 0F EF. F8 + pxor xmm1, xmm7 ; 1950 _ 66: 0F EF. CF + movdqa xmm2, xmm1 ; 1954 _ 66: 0F 6F. D1 + pxor xmm7, xmm4 ; 1958 _ 66: 0F EF. FC + pxor xmm6, xmm7 ; 195C _ 66: 0F EF. F7 + pxor xmm7, xmm5 ; 1960 _ 66: 0F EF. FD + pand xmm2, xmm7 ; 1964 _ 66: 0F DB. D7 + pxor xmm2, xmm6 ; 1968 _ 66: 0F EF. D6 + movdqa xmm0, xmm2 ; 196C _ 66: 0F 6F. C2 + pxor xmm1, xmm7 ; 1970 _ 66: 0F EF. CF + pxor xmm5, xmm1 ; 1974 _ 66: 0F EF. E9 + pand xmm6, xmm1 ; 1978 _ 66: 0F DB. F1 + pxor xmm6, xmm5 ; 197C _ 66: 0F EF. F5 + pand xmm5, xmm2 ; 1980 _ 66: 0F DB. EA + pxor xmm7, xmm5 ; 1984 _ 66: 0F EF. FD + por xmm1, xmm2 ; 1988 _ 66: 0F EB. CA + pxor xmm1, xmm5 ; 198C _ 66: 0F EF. CD + por xmm5, xmm7 ; 1990 _ 66: 0F EB. EF + pxor xmm5, xmm6 ; 1994 _ 66: 0F EF. EE + pand xmm6, xmm7 ; 1998 _ 66: 0F DB. F7 + pxor xmm5, xmm4 ; 199C _ 66: 0F EF. EC + pxor xmm1, xmm6 ; 19A0 _ 66: 0F EF. CE + movdqa xmm6, xmm5 ; 19A4 _ 66: 0F 6F. F5 + pslld xmm0, 13 ; 19A8 _ 66: 0F 72. F0, 0D + psrld xmm2, 19 ; 19AD _ 66: 0F 72. D2, 13 + por xmm0, xmm2 ; 19B2 _ 66: 0F EB. C2 + pslld xmm6, 3 ; 19B6 _ 66: 0F 72. F6, 03 + psrld xmm5, 29 ; 19BB _ 66: 0F 72. D5, 1D + por xmm6, xmm5 ; 19C0 _ 66: 0F EB. F5 + movdqa xmm5, xmm0 ; 19C4 _ 66: 0F 6F. E8 + pxor xmm1, xmm0 ; 19C8 _ 66: 0F EF. C8 + pxor xmm1, xmm6 ; 19CC _ 66: 0F EF. CE + pxor xmm7, xmm6 ; 19D0 _ 66: 0F EF. FE + pslld xmm5, 3 ; 19D4 _ 66: 0F 72. F5, 03 + pxor xmm7, xmm5 ; 19D9 _ 66: 0F EF. FD + movdqa xmm5, xmm1 ; 19DD _ 66: 0F 6F. E9 + movdqa xmm2, xmm7 ; 19E1 _ 66: 0F 6F. D7 + psrld xmm1, 31 ; 19E5 _ 66: 0F 72. D1, 1F + pslld xmm5, 1 ; 19EA _ 66: 0F 72. F5, 01 + por xmm5, xmm1 ; 19EF _ 66: 0F EB. E9 + pslld xmm2, 7 ; 19F3 _ 66: 0F 72. F2, 07 + psrld xmm7, 25 ; 19F8 _ 66: 0F 72. D7, 19 + por xmm2, xmm7 ; 19FD _ 66: 0F EB. D7 + movdqa xmm7, xmm5 ; 1A01 _ 66: 0F 6F. FD + pxor xmm0, xmm5 ; 1A05 _ 66: 0F EF. C5 + pxor xmm0, xmm2 ; 1A09 _ 66: 0F EF. C2 + movdqa xmm1, xmm0 ; 1A0D _ 66: 0F 6F. C8 + pxor xmm6, xmm2 ; 1A11 _ 66: 0F EF. F2 + pslld xmm7, 7 ; 1A15 _ 66: 0F 72. F7, 07 + pxor xmm6, xmm7 ; 1A1A _ 66: 0F EF. F7 + pslld xmm1, 5 ; 1A1E _ 66: 0F 72. F1, 05 + psrld xmm0, 27 ; 1A23 _ 66: 0F 72. D0, 1B + por xmm1, xmm0 ; 1A28 _ 66: 0F EB. C8 + movdqa xmm0, xmm6 ; 1A2C _ 66: 0F 6F. C6 + psrld xmm6, 10 ; 1A30 _ 66: 0F 72. D6, 0A + pslld xmm0, 22 ; 1A35 _ 66: 0F 72. F0, 16 + por xmm0, xmm6 ; 1A3A _ 66: 0F EB. C6 + movd xmm6, dword [ecx+2BD0H] ; 1A3E _ 66: 0F 6E. B1, 00002BD0 + pshufd xmm6, xmm6, 0 ; 1A46 _ 66: 0F 70. F6, 00 + pxor xmm1, xmm6 ; 1A4B _ 66: 0F EF. CE + movd xmm7, dword [ecx+2BD4H] ; 1A4F _ 66: 0F 6E. B9, 00002BD4 + pshufd xmm6, xmm7, 0 ; 1A57 _ 66: 0F 70. F7, 00 + pxor xmm5, xmm6 ; 1A5C _ 66: 0F EF. EE + pxor xmm1, xmm5 ; 1A60 _ 66: 0F EF. CD + movd xmm7, dword [ecx+2BD8H] ; 1A64 _ 66: 0F 6E. B9, 00002BD8 + pshufd xmm6, xmm7, 0 ; 1A6C _ 66: 0F 70. F7, 00 + pxor xmm0, xmm6 ; 1A71 _ 66: 0F EF. C6 + movd xmm7, dword [ecx+2BDCH] ; 1A75 _ 66: 0F 6E. B9, 00002BDC + pshufd xmm6, xmm7, 0 ; 1A7D _ 66: 0F 70. F7, 00 + pxor xmm2, xmm6 ; 1A82 _ 66: 0F EF. D6 + pxor xmm5, xmm2 ; 1A86 _ 66: 0F EF. EA + movdqa xmm7, xmm5 ; 1A8A _ 66: 0F 6F. FD + pxor xmm2, xmm4 ; 1A8E _ 66: 0F EF. D4 + pxor xmm0, xmm2 ; 1A92 _ 66: 0F EF. C2 + pand xmm7, xmm1 ; 1A96 _ 66: 0F DB. F9 + pxor xmm7, xmm0 ; 1A9A _ 66: 0F EF. F8 + movdqa xmm6, xmm7 ; 1A9E _ 66: 0F 6F. F7 + por xmm0, xmm5 ; 1AA2 _ 66: 0F EB. C5 + pxor xmm5, xmm2 ; 1AA6 _ 66: 0F EF. EA + pand xmm2, xmm7 ; 1AAA _ 66: 0F DB. D7 + pxor xmm2, xmm1 ; 1AAE _ 66: 0F EF. D1 + pxor xmm5, xmm7 ; 1AB2 _ 66: 0F EF. EF + pxor xmm5, xmm0 ; 1AB6 _ 66: 0F EF. E8 + pxor xmm0, xmm1 ; 1ABA _ 66: 0F EF. C1 + pand xmm1, xmm2 ; 1ABE _ 66: 0F DB. CA + pxor xmm0, xmm4 ; 1AC2 _ 66: 0F EF. C4 + pxor xmm1, xmm5 ; 1AC6 _ 66: 0F EF. CD + por xmm5, xmm2 ; 1ACA _ 66: 0F EB. EA + pxor xmm5, xmm0 ; 1ACE _ 66: 0F EF. E8 + movdqa xmm0, xmm1 ; 1AD2 _ 66: 0F 6F. C1 + pslld xmm6, 13 ; 1AD6 _ 66: 0F 72. F6, 0D + psrld xmm7, 19 ; 1ADB _ 66: 0F 72. D7, 13 + por xmm6, xmm7 ; 1AE0 _ 66: 0F EB. F7 + pslld xmm0, 3 ; 1AE4 _ 66: 0F 72. F0, 03 + psrld xmm1, 29 ; 1AE9 _ 66: 0F 72. D1, 1D + por xmm0, xmm1 ; 1AEE _ 66: 0F EB. C1 + movdqa xmm1, xmm6 ; 1AF2 _ 66: 0F 6F. CE + pxor xmm2, xmm6 ; 1AF6 _ 66: 0F EF. D6 + pxor xmm2, xmm0 ; 1AFA _ 66: 0F EF. D0 + pxor xmm5, xmm0 ; 1AFE _ 66: 0F EF. E8 + pslld xmm1, 3 ; 1B02 _ 66: 0F 72. F1, 03 + pxor xmm5, xmm1 ; 1B07 _ 66: 0F EF. E9 + movdqa xmm1, xmm2 ; 1B0B _ 66: 0F 6F. CA + movdqa xmm7, xmm5 ; 1B0F _ 66: 0F 6F. FD + psrld xmm2, 31 ; 1B13 _ 66: 0F 72. D2, 1F + pslld xmm1, 1 ; 1B18 _ 66: 0F 72. F1, 01 + por xmm1, xmm2 ; 1B1D _ 66: 0F EB. CA + movdqa xmm2, xmm1 ; 1B21 _ 66: 0F 6F. D1 + pslld xmm7, 7 ; 1B25 _ 66: 0F 72. F7, 07 + psrld xmm5, 25 ; 1B2A _ 66: 0F 72. D5, 19 + por xmm7, xmm5 ; 1B2F _ 66: 0F EB. FD + pxor xmm6, xmm1 ; 1B33 _ 66: 0F EF. F1 + pxor xmm6, xmm7 ; 1B37 _ 66: 0F EF. F7 + movdqa xmm5, xmm6 ; 1B3B _ 66: 0F 6F. EE + pxor xmm0, xmm7 ; 1B3F _ 66: 0F EF. C7 + pslld xmm2, 7 ; 1B43 _ 66: 0F 72. F2, 07 + pxor xmm0, xmm2 ; 1B48 _ 66: 0F EF. C2 + movdqa xmm2, xmm0 ; 1B4C _ 66: 0F 6F. D0 + pslld xmm5, 5 ; 1B50 _ 66: 0F 72. F5, 05 + psrld xmm6, 27 ; 1B55 _ 66: 0F 72. D6, 1B + por xmm5, xmm6 ; 1B5A _ 66: 0F EB. EE + pslld xmm2, 22 ; 1B5E _ 66: 0F 72. F2, 16 + psrld xmm0, 10 ; 1B63 _ 66: 0F 72. D0, 0A + por xmm2, xmm0 ; 1B68 _ 66: 0F EB. D0 + movd xmm0, dword [ecx+2BE0H] ; 1B6C _ 66: 0F 6E. 81, 00002BE0 + pshufd xmm6, xmm0, 0 ; 1B74 _ 66: 0F 70. F0, 00 + pxor xmm5, xmm6 ; 1B79 _ 66: 0F EF. EE + movd xmm0, dword [ecx+2BE4H] ; 1B7D _ 66: 0F 6E. 81, 00002BE4 + pshufd xmm6, xmm0, 0 ; 1B85 _ 66: 0F 70. F0, 00 + pxor xmm1, xmm6 ; 1B8A _ 66: 0F EF. CE + movd xmm0, dword [ecx+2BE8H] ; 1B8E _ 66: 0F 6E. 81, 00002BE8 + pshufd xmm6, xmm0, 0 ; 1B96 _ 66: 0F 70. F0, 00 + movd xmm0, dword [ecx+2BECH] ; 1B9B _ 66: 0F 6E. 81, 00002BEC + pxor xmm2, xmm6 ; 1BA3 _ 66: 0F EF. D6 + pshufd xmm6, xmm0, 0 ; 1BA7 _ 66: 0F 70. F0, 00 + pxor xmm7, xmm6 ; 1BAC _ 66: 0F EF. FE + movdqa xmm0, xmm7 ; 1BB0 _ 66: 0F 6F. C7 + pxor xmm2, xmm4 ; 1BB4 _ 66: 0F EF. D4 + pand xmm0, xmm5 ; 1BB8 _ 66: 0F DB. C5 + pxor xmm5, xmm7 ; 1BBC _ 66: 0F EF. EF + pxor xmm0, xmm2 ; 1BC0 _ 66: 0F EF. C2 + por xmm2, xmm7 ; 1BC4 _ 66: 0F EB. D7 + pxor xmm1, xmm0 ; 1BC8 _ 66: 0F EF. C8 + pxor xmm2, xmm5 ; 1BCC _ 66: 0F EF. D5 + por xmm5, xmm1 ; 1BD0 _ 66: 0F EB. E9 + pxor xmm2, xmm1 ; 1BD4 _ 66: 0F EF. D1 + pxor xmm7, xmm5 ; 1BD8 _ 66: 0F EF. FD + por xmm5, xmm0 ; 1BDC _ 66: 0F EB. E8 + pxor xmm5, xmm2 ; 1BE0 _ 66: 0F EF. EA + movdqa xmm6, xmm5 ; 1BE4 _ 66: 0F 6F. F5 + pxor xmm7, xmm0 ; 1BE8 _ 66: 0F EF. F8 + pxor xmm7, xmm5 ; 1BEC _ 66: 0F EF. FD + pxor xmm0, xmm4 ; 1BF0 _ 66: 0F EF. C4 + pand xmm2, xmm7 ; 1BF4 _ 66: 0F DB. D7 + pxor xmm0, xmm2 ; 1BF8 _ 66: 0F EF. C2 + pslld xmm6, 13 ; 1BFC _ 66: 0F 72. F6, 0D + psrld xmm5, 19 ; 1C01 _ 66: 0F 72. D5, 13 + por xmm6, xmm5 ; 1C06 _ 66: 0F EB. F5 + movdqa xmm5, xmm7 ; 1C0A _ 66: 0F 6F. EF + psrld xmm7, 29 ; 1C0E _ 66: 0F 72. D7, 1D + pxor xmm1, xmm6 ; 1C13 _ 66: 0F EF. CE + pslld xmm5, 3 ; 1C17 _ 66: 0F 72. F5, 03 + por xmm5, xmm7 ; 1C1C _ 66: 0F EB. EF + movdqa xmm7, xmm6 ; 1C20 _ 66: 0F 6F. FE + pxor xmm1, xmm5 ; 1C24 _ 66: 0F EF. CD + movdqa xmm2, xmm1 ; 1C28 _ 66: 0F 6F. D1 + pxor xmm0, xmm5 ; 1C2C _ 66: 0F EF. C5 + pslld xmm7, 3 ; 1C30 _ 66: 0F 72. F7, 03 + pxor xmm0, xmm7 ; 1C35 _ 66: 0F EF. C7 + movd xmm7, dword [ecx+2BF4H] ; 1C39 _ 66: 0F 6E. B9, 00002BF4 + pslld xmm2, 1 ; 1C41 _ 66: 0F 72. F2, 01 + psrld xmm1, 31 ; 1C46 _ 66: 0F 72. D1, 1F + por xmm2, xmm1 ; 1C4B _ 66: 0F EB. D1 + movdqa xmm1, xmm0 ; 1C4F _ 66: 0F 6F. C8 + psrld xmm0, 25 ; 1C53 _ 66: 0F 72. D0, 19 + pxor xmm6, xmm2 ; 1C58 _ 66: 0F EF. F2 + pslld xmm1, 7 ; 1C5C _ 66: 0F 72. F1, 07 + por xmm1, xmm0 ; 1C61 _ 66: 0F EB. C8 + movdqa xmm0, xmm2 ; 1C65 _ 66: 0F 6F. C2 + pxor xmm6, xmm1 ; 1C69 _ 66: 0F EF. F1 + pxor xmm5, xmm1 ; 1C6D _ 66: 0F EF. E9 + pslld xmm0, 7 ; 1C71 _ 66: 0F 72. F0, 07 + pxor xmm5, xmm0 ; 1C76 _ 66: 0F EF. E8 + movdqa xmm0, xmm6 ; 1C7A _ 66: 0F 6F. C6 + psrld xmm6, 27 ; 1C7E _ 66: 0F 72. D6, 1B + pslld xmm0, 5 ; 1C83 _ 66: 0F 72. F0, 05 + por xmm0, xmm6 ; 1C88 _ 66: 0F EB. C6 + movdqa xmm6, xmm5 ; 1C8C _ 66: 0F 6F. F5 + psrld xmm5, 10 ; 1C90 _ 66: 0F 72. D5, 0A + pslld xmm6, 22 ; 1C95 _ 66: 0F 72. F6, 16 + por xmm6, xmm5 ; 1C9A _ 66: 0F EB. F5 + movd xmm5, dword [ecx+2BF0H] ; 1C9E _ 66: 0F 6E. A9, 00002BF0 + pshufd xmm5, xmm5, 0 ; 1CA6 _ 66: 0F 70. ED, 00 + pxor xmm0, xmm5 ; 1CAB _ 66: 0F EF. C5 + pshufd xmm5, xmm7, 0 ; 1CAF _ 66: 0F 70. EF, 00 + movd xmm7, dword [ecx+2BF8H] ; 1CB4 _ 66: 0F 6E. B9, 00002BF8 + pxor xmm2, xmm5 ; 1CBC _ 66: 0F EF. D5 + pshufd xmm5, xmm7, 0 ; 1CC0 _ 66: 0F 70. EF, 00 + pxor xmm6, xmm5 ; 1CC5 _ 66: 0F EF. F5 + movd xmm7, dword [ecx+2BFCH] ; 1CC9 _ 66: 0F 6E. B9, 00002BFC + pshufd xmm5, xmm7, 0 ; 1CD1 _ 66: 0F 70. EF, 00 + movdqa xmm7, xmm2 ; 1CD6 _ 66: 0F 6F. FA + pxor xmm1, xmm5 ; 1CDA _ 66: 0F EF. CD + pxor xmm2, xmm6 ; 1CDE _ 66: 0F EF. D6 + por xmm7, xmm6 ; 1CE2 _ 66: 0F EB. FE + pxor xmm7, xmm1 ; 1CE6 _ 66: 0F EF. F9 + pxor xmm6, xmm7 ; 1CEA _ 66: 0F EF. F7 + por xmm1, xmm2 ; 1CEE _ 66: 0F EB. CA + pand xmm1, xmm0 ; 1CF2 _ 66: 0F DB. C8 + pxor xmm2, xmm6 ; 1CF6 _ 66: 0F EF. D6 + pxor xmm1, xmm7 ; 1CFA _ 66: 0F EF. CF + por xmm7, xmm2 ; 1CFE _ 66: 0F EB. FA + pxor xmm7, xmm0 ; 1D02 _ 66: 0F EF. F8 + por xmm0, xmm2 ; 1D06 _ 66: 0F EB. C2 + pxor xmm0, xmm6 ; 1D0A _ 66: 0F EF. C6 + pxor xmm7, xmm2 ; 1D0E _ 66: 0F EF. FA + pxor xmm6, xmm7 ; 1D12 _ 66: 0F EF. F7 + pand xmm7, xmm0 ; 1D16 _ 66: 0F DB. F8 + pxor xmm7, xmm2 ; 1D1A _ 66: 0F EF. FA + pxor xmm6, xmm4 ; 1D1E _ 66: 0F EF. F4 + por xmm6, xmm0 ; 1D22 _ 66: 0F EB. F0 + pxor xmm2, xmm6 ; 1D26 _ 66: 0F EF. D6 + movdqa xmm5, xmm2 ; 1D2A _ 66: 0F 6F. EA + movdqa xmm6, xmm7 ; 1D2E _ 66: 0F 6F. F7 + psrld xmm2, 19 ; 1D32 _ 66: 0F 72. D2, 13 + pslld xmm5, 13 ; 1D37 _ 66: 0F 72. F5, 0D + por xmm5, xmm2 ; 1D3C _ 66: 0F EB. EA + movdqa xmm2, xmm5 ; 1D40 _ 66: 0F 6F. D5 + pslld xmm6, 3 ; 1D44 _ 66: 0F 72. F6, 03 + psrld xmm7, 29 ; 1D49 _ 66: 0F 72. D7, 1D + por xmm6, xmm7 ; 1D4E _ 66: 0F EB. F7 + pxor xmm1, xmm5 ; 1D52 _ 66: 0F EF. CD + pxor xmm1, xmm6 ; 1D56 _ 66: 0F EF. CE + movdqa xmm7, xmm1 ; 1D5A _ 66: 0F 6F. F9 + pxor xmm0, xmm6 ; 1D5E _ 66: 0F EF. C6 + pslld xmm2, 3 ; 1D62 _ 66: 0F 72. F2, 03 + pxor xmm0, xmm2 ; 1D67 _ 66: 0F EF. C2 + pslld xmm7, 1 ; 1D6B _ 66: 0F 72. F7, 01 + psrld xmm1, 31 ; 1D70 _ 66: 0F 72. D1, 1F + por xmm7, xmm1 ; 1D75 _ 66: 0F EB. F9 + movdqa xmm1, xmm0 ; 1D79 _ 66: 0F 6F. C8 + psrld xmm0, 25 ; 1D7D _ 66: 0F 72. D0, 19 + pxor xmm5, xmm7 ; 1D82 _ 66: 0F EF. EF + pslld xmm1, 7 ; 1D86 _ 66: 0F 72. F1, 07 + por xmm1, xmm0 ; 1D8B _ 66: 0F EB. C8 + movdqa xmm0, xmm7 ; 1D8F _ 66: 0F 6F. C7 + pxor xmm5, xmm1 ; 1D93 _ 66: 0F EF. E9 + pxor xmm6, xmm1 ; 1D97 _ 66: 0F EF. F1 + pslld xmm0, 7 ; 1D9B _ 66: 0F 72. F0, 07 + pxor xmm6, xmm0 ; 1DA0 _ 66: 0F EF. F0 + movdqa xmm0, xmm5 ; 1DA4 _ 66: 0F 6F. C5 + psrld xmm5, 27 ; 1DA8 _ 66: 0F 72. D5, 1B + pslld xmm0, 5 ; 1DAD _ 66: 0F 72. F0, 05 + por xmm0, xmm5 ; 1DB2 _ 66: 0F EB. C5 + movdqa xmm5, xmm6 ; 1DB6 _ 66: 0F 6F. EE + psrld xmm6, 10 ; 1DBA _ 66: 0F 72. D6, 0A + pslld xmm5, 22 ; 1DBF _ 66: 0F 72. F5, 16 + por xmm5, xmm6 ; 1DC4 _ 66: 0F EB. EE + movd xmm6, dword [ecx+2C00H] ; 1DC8 _ 66: 0F 6E. B1, 00002C00 + pshufd xmm2, xmm6, 0 ; 1DD0 _ 66: 0F 70. D6, 00 + pxor xmm0, xmm2 ; 1DD5 _ 66: 0F EF. C2 + movd xmm6, dword [ecx+2C04H] ; 1DD9 _ 66: 0F 6E. B1, 00002C04 + pshufd xmm2, xmm6, 0 ; 1DE1 _ 66: 0F 70. D6, 00 + pxor xmm7, xmm2 ; 1DE6 _ 66: 0F EF. FA + movd xmm6, dword [ecx+2C08H] ; 1DEA _ 66: 0F 6E. B1, 00002C08 + pshufd xmm2, xmm6, 0 ; 1DF2 _ 66: 0F 70. D6, 00 + movd xmm6, dword [ecx+2C0CH] ; 1DF7 _ 66: 0F 6E. B1, 00002C0C + pxor xmm5, xmm2 ; 1DFF _ 66: 0F EF. EA + pshufd xmm2, xmm6, 0 ; 1E03 _ 66: 0F 70. D6, 00 + pxor xmm1, xmm2 ; 1E08 _ 66: 0F EF. CA + movdqa xmm2, xmm7 ; 1E0C _ 66: 0F 6F. D7 + pxor xmm1, xmm0 ; 1E10 _ 66: 0F EF. C8 + pxor xmm7, xmm5 ; 1E14 _ 66: 0F EF. FD + pand xmm2, xmm1 ; 1E18 _ 66: 0F DB. D1 + pxor xmm2, xmm0 ; 1E1C _ 66: 0F EF. D0 + por xmm0, xmm1 ; 1E20 _ 66: 0F EB. C1 + pxor xmm0, xmm7 ; 1E24 _ 66: 0F EF. C7 + pxor xmm7, xmm1 ; 1E28 _ 66: 0F EF. F9 + pxor xmm1, xmm5 ; 1E2C _ 66: 0F EF. CD + por xmm5, xmm2 ; 1E30 _ 66: 0F EB. EA + pxor xmm5, xmm7 ; 1E34 _ 66: 0F EF. EF + pxor xmm7, xmm4 ; 1E38 _ 66: 0F EF. FC + por xmm7, xmm2 ; 1E3C _ 66: 0F EB. FA + pxor xmm2, xmm1 ; 1E40 _ 66: 0F EF. D1 + pxor xmm2, xmm7 ; 1E44 _ 66: 0F EF. D7 + por xmm1, xmm0 ; 1E48 _ 66: 0F EB. C8 + pxor xmm2, xmm1 ; 1E4C _ 66: 0F EF. D1 + movdqa xmm6, xmm2 ; 1E50 _ 66: 0F 6F. F2 + pxor xmm7, xmm1 ; 1E54 _ 66: 0F EF. F9 + psrld xmm2, 19 ; 1E58 _ 66: 0F 72. D2, 13 + pslld xmm6, 13 ; 1E5D _ 66: 0F 72. F6, 0D + por xmm6, xmm2 ; 1E62 _ 66: 0F EB. F2 + movdqa xmm2, xmm5 ; 1E66 _ 66: 0F 6F. D5 + movdqa xmm1, xmm6 ; 1E6A _ 66: 0F 6F. CE + psrld xmm5, 29 ; 1E6E _ 66: 0F 72. D5, 1D + pslld xmm2, 3 ; 1E73 _ 66: 0F 72. F2, 03 + por xmm2, xmm5 ; 1E78 _ 66: 0F EB. D5 + pxor xmm7, xmm6 ; 1E7C _ 66: 0F EF. FE + pxor xmm7, xmm2 ; 1E80 _ 66: 0F EF. FA + pxor xmm0, xmm2 ; 1E84 _ 66: 0F EF. C2 + pslld xmm1, 3 ; 1E88 _ 66: 0F 72. F1, 03 + pxor xmm0, xmm1 ; 1E8D _ 66: 0F EF. C1 + movdqa xmm1, xmm7 ; 1E91 _ 66: 0F 6F. CF + movdqa xmm5, xmm0 ; 1E95 _ 66: 0F 6F. E8 + psrld xmm7, 31 ; 1E99 _ 66: 0F 72. D7, 1F + pslld xmm1, 1 ; 1E9E _ 66: 0F 72. F1, 01 + por xmm1, xmm7 ; 1EA3 _ 66: 0F EB. CF + movdqa xmm7, xmm1 ; 1EA7 _ 66: 0F 6F. F9 + pslld xmm5, 7 ; 1EAB _ 66: 0F 72. F5, 07 + psrld xmm0, 25 ; 1EB0 _ 66: 0F 72. D0, 19 + por xmm5, xmm0 ; 1EB5 _ 66: 0F EB. E8 + pxor xmm6, xmm1 ; 1EB9 _ 66: 0F EF. F1 + pxor xmm6, xmm5 ; 1EBD _ 66: 0F EF. F5 + movdqa xmm0, xmm6 ; 1EC1 _ 66: 0F 6F. C6 + pxor xmm2, xmm5 ; 1EC5 _ 66: 0F EF. D5 + pslld xmm7, 7 ; 1EC9 _ 66: 0F 72. F7, 07 + pxor xmm2, xmm7 ; 1ECE _ 66: 0F EF. D7 + pslld xmm0, 5 ; 1ED2 _ 66: 0F 72. F0, 05 + psrld xmm6, 27 ; 1ED7 _ 66: 0F 72. D6, 1B + por xmm0, xmm6 ; 1EDC _ 66: 0F EB. C6 + movdqa xmm6, xmm2 ; 1EE0 _ 66: 0F 6F. F2 + psrld xmm2, 10 ; 1EE4 _ 66: 0F 72. D2, 0A + pslld xmm6, 22 ; 1EE9 _ 66: 0F 72. F6, 16 + por xmm6, xmm2 ; 1EEE _ 66: 0F EB. F2 + movd xmm2, dword [ecx+2C10H] ; 1EF2 _ 66: 0F 6E. 91, 00002C10 + pshufd xmm7, xmm2, 0 ; 1EFA _ 66: 0F 70. FA, 00 + pxor xmm0, xmm7 ; 1EFF _ 66: 0F EF. C7 + pxor xmm0, xmm4 ; 1F03 _ 66: 0F EF. C4 + movd xmm2, dword [ecx+2C14H] ; 1F07 _ 66: 0F 6E. 91, 00002C14 + pshufd xmm7, xmm2, 0 ; 1F0F _ 66: 0F 70. FA, 00 + pxor xmm1, xmm7 ; 1F14 _ 66: 0F EF. CF + movd xmm2, dword [ecx+2C18H] ; 1F18 _ 66: 0F 6E. 91, 00002C18 + pshufd xmm7, xmm2, 0 ; 1F20 _ 66: 0F 70. FA, 00 + pxor xmm6, xmm7 ; 1F25 _ 66: 0F EF. F7 + pxor xmm6, xmm4 ; 1F29 _ 66: 0F EF. F4 + movd xmm2, dword [ecx+2C1CH] ; 1F2D _ 66: 0F 6E. 91, 00002C1C + pshufd xmm7, xmm2, 0 ; 1F35 _ 66: 0F 70. FA, 00 + movdqa xmm2, xmm0 ; 1F3A _ 66: 0F 6F. D0 + pxor xmm5, xmm7 ; 1F3E _ 66: 0F EF. EF + pand xmm2, xmm1 ; 1F42 _ 66: 0F DB. D1 + pxor xmm6, xmm2 ; 1F46 _ 66: 0F EF. F2 + por xmm2, xmm5 ; 1F4A _ 66: 0F EB. D5 + pxor xmm5, xmm6 ; 1F4E _ 66: 0F EF. EE + pxor xmm1, xmm2 ; 1F52 _ 66: 0F EF. CA + pxor xmm2, xmm0 ; 1F56 _ 66: 0F EF. D0 + por xmm0, xmm1 ; 1F5A _ 66: 0F EB. C1 + pxor xmm1, xmm5 ; 1F5E _ 66: 0F EF. CD + por xmm6, xmm2 ; 1F62 _ 66: 0F EB. F2 + pand xmm6, xmm0 ; 1F66 _ 66: 0F DB. F0 + pxor xmm2, xmm1 ; 1F6A _ 66: 0F EF. D1 + pand xmm1, xmm6 ; 1F6E _ 66: 0F DB. CE + pxor xmm1, xmm2 ; 1F72 _ 66: 0F EF. CA + pand xmm2, xmm6 ; 1F76 _ 66: 0F DB. D6 + pxor xmm0, xmm2 ; 1F7A _ 66: 0F EF. C2 + movdqa xmm2, xmm6 ; 1F7E _ 66: 0F 6F. D6 + psrld xmm6, 19 ; 1F82 _ 66: 0F 72. D6, 13 + pslld xmm2, 13 ; 1F87 _ 66: 0F 72. F2, 0D + por xmm2, xmm6 ; 1F8C _ 66: 0F EB. D6 + movdqa xmm6, xmm5 ; 1F90 _ 66: 0F 6F. F5 + psrld xmm5, 29 ; 1F94 _ 66: 0F 72. D5, 1D + pxor xmm0, xmm2 ; 1F99 _ 66: 0F EF. C2 + pslld xmm6, 3 ; 1F9D _ 66: 0F 72. F6, 03 + por xmm6, xmm5 ; 1FA2 _ 66: 0F EB. F5 + movdqa xmm5, xmm2 ; 1FA6 _ 66: 0F 6F. EA + pxor xmm0, xmm6 ; 1FAA _ 66: 0F EF. C6 + pxor xmm1, xmm6 ; 1FAE _ 66: 0F EF. CE + pslld xmm5, 3 ; 1FB2 _ 66: 0F 72. F5, 03 + pxor xmm1, xmm5 ; 1FB7 _ 66: 0F EF. CD + movdqa xmm5, xmm0 ; 1FBB _ 66: 0F 6F. E8 + psrld xmm0, 31 ; 1FBF _ 66: 0F 72. D0, 1F + pslld xmm5, 1 ; 1FC4 _ 66: 0F 72. F5, 01 + por xmm5, xmm0 ; 1FC9 _ 66: 0F EB. E8 + movdqa xmm0, xmm1 ; 1FCD _ 66: 0F 6F. C1 + psrld xmm1, 25 ; 1FD1 _ 66: 0F 72. D1, 19 + pxor xmm2, xmm5 ; 1FD6 _ 66: 0F EF. D5 + pslld xmm0, 7 ; 1FDA _ 66: 0F 72. F0, 07 + por xmm0, xmm1 ; 1FDF _ 66: 0F EB. C1 + movdqa xmm1, xmm5 ; 1FE3 _ 66: 0F 6F. CD + pxor xmm2, xmm0 ; 1FE7 _ 66: 0F EF. D0 + movdqa xmm7, xmm2 ; 1FEB _ 66: 0F 6F. FA + pxor xmm6, xmm0 ; 1FEF _ 66: 0F EF. F0 + pslld xmm1, 7 ; 1FF3 _ 66: 0F 72. F1, 07 + pxor xmm6, xmm1 ; 1FF8 _ 66: 0F EF. F1 + movdqa xmm1, xmm6 ; 1FFC _ 66: 0F 6F. CE + pslld xmm7, 5 ; 2000 _ 66: 0F 72. F7, 05 + psrld xmm2, 27 ; 2005 _ 66: 0F 72. D2, 1B + por xmm7, xmm2 ; 200A _ 66: 0F EB. FA + pslld xmm1, 22 ; 200E _ 66: 0F 72. F1, 16 + psrld xmm6, 10 ; 2013 _ 66: 0F 72. D6, 0A + por xmm1, xmm6 ; 2018 _ 66: 0F EB. CE + movd xmm6, dword [ecx+2C20H] ; 201C _ 66: 0F 6E. B1, 00002C20 + pshufd xmm2, xmm6, 0 ; 2024 _ 66: 0F 70. D6, 00 + pxor xmm7, xmm2 ; 2029 _ 66: 0F EF. FA + movd xmm6, dword [ecx+2C24H] ; 202D _ 66: 0F 6E. B1, 00002C24 + pshufd xmm2, xmm6, 0 ; 2035 _ 66: 0F 70. D6, 00 + pxor xmm5, xmm2 ; 203A _ 66: 0F EF. EA + movd xmm6, dword [ecx+2C28H] ; 203E _ 66: 0F 6E. B1, 00002C28 + pshufd xmm2, xmm6, 0 ; 2046 _ 66: 0F 70. D6, 00 + pxor xmm1, xmm2 ; 204B _ 66: 0F EF. CA + movd xmm6, dword [ecx+2C2CH] ; 204F _ 66: 0F 6E. B1, 00002C2C + pshufd xmm2, xmm6, 0 ; 2057 _ 66: 0F 70. D6, 00 + movdqa xmm6, xmm7 ; 205C _ 66: 0F 6F. F7 + pxor xmm0, xmm2 ; 2060 _ 66: 0F EF. C2 + pand xmm6, xmm1 ; 2064 _ 66: 0F DB. F1 + pxor xmm6, xmm0 ; 2068 _ 66: 0F EF. F0 + pxor xmm1, xmm5 ; 206C _ 66: 0F EF. CD + pxor xmm1, xmm6 ; 2070 _ 66: 0F EF. CE + movdqa xmm2, xmm1 ; 2074 _ 66: 0F 6F. D1 + por xmm0, xmm7 ; 2078 _ 66: 0F EB. C7 + pxor xmm0, xmm5 ; 207C _ 66: 0F EF. C5 + movdqa xmm5, xmm0 ; 2080 _ 66: 0F 6F. E8 + pxor xmm7, xmm1 ; 2084 _ 66: 0F EF. F9 + pslld xmm2, 13 ; 2088 _ 66: 0F 72. F2, 0D + por xmm5, xmm7 ; 208D _ 66: 0F EB. EF + pxor xmm5, xmm6 ; 2091 _ 66: 0F EF. EE + pand xmm6, xmm0 ; 2095 _ 66: 0F DB. F0 + pxor xmm7, xmm6 ; 2099 _ 66: 0F EF. FE + pxor xmm0, xmm5 ; 209D _ 66: 0F EF. C5 + pxor xmm0, xmm7 ; 20A1 _ 66: 0F EF. C7 + pxor xmm7, xmm4 ; 20A5 _ 66: 0F EF. FC + psrld xmm1, 19 ; 20A9 _ 66: 0F 72. D1, 13 + por xmm2, xmm1 ; 20AE _ 66: 0F EB. D1 + movdqa xmm1, xmm0 ; 20B2 _ 66: 0F 6F. C8 + psrld xmm0, 29 ; 20B6 _ 66: 0F 72. D0, 1D + pxor xmm5, xmm2 ; 20BB _ 66: 0F EF. EA + pslld xmm1, 3 ; 20BF _ 66: 0F 72. F1, 03 + por xmm1, xmm0 ; 20C4 _ 66: 0F EB. C8 + movdqa xmm0, xmm2 ; 20C8 _ 66: 0F 6F. C2 + pxor xmm5, xmm1 ; 20CC _ 66: 0F EF. E9 + pxor xmm7, xmm1 ; 20D0 _ 66: 0F EF. F9 + pslld xmm0, 3 ; 20D4 _ 66: 0F 72. F0, 03 + pxor xmm7, xmm0 ; 20D9 _ 66: 0F EF. F8 + movdqa xmm0, xmm5 ; 20DD _ 66: 0F 6F. C5 + movdqa xmm6, xmm7 ; 20E1 _ 66: 0F 6F. F7 + psrld xmm5, 31 ; 20E5 _ 66: 0F 72. D5, 1F + pslld xmm0, 1 ; 20EA _ 66: 0F 72. F0, 01 + por xmm0, xmm5 ; 20EF _ 66: 0F EB. C5 + pslld xmm6, 7 ; 20F3 _ 66: 0F 72. F6, 07 + psrld xmm7, 25 ; 20F8 _ 66: 0F 72. D7, 19 + por xmm6, xmm7 ; 20FD _ 66: 0F EB. F7 + movdqa xmm7, xmm0 ; 2101 _ 66: 0F 6F. F8 + pxor xmm2, xmm0 ; 2105 _ 66: 0F EF. D0 + pxor xmm2, xmm6 ; 2109 _ 66: 0F EF. D6 + movdqa xmm5, xmm2 ; 210D _ 66: 0F 6F. EA + pxor xmm1, xmm6 ; 2111 _ 66: 0F EF. CE + pslld xmm7, 7 ; 2115 _ 66: 0F 72. F7, 07 + pxor xmm1, xmm7 ; 211A _ 66: 0F EF. CF + pslld xmm5, 5 ; 211E _ 66: 0F 72. F5, 05 + movd xmm7, dword [ecx+2C34H] ; 2123 _ 66: 0F 6E. B9, 00002C34 + psrld xmm2, 27 ; 212B _ 66: 0F 72. D2, 1B + por xmm5, xmm2 ; 2130 _ 66: 0F EB. EA + movdqa xmm2, xmm1 ; 2134 _ 66: 0F 6F. D1 + psrld xmm1, 10 ; 2138 _ 66: 0F 72. D1, 0A + pslld xmm2, 22 ; 213D _ 66: 0F 72. F2, 16 + por xmm2, xmm1 ; 2142 _ 66: 0F EB. D1 + movd xmm1, dword [ecx+2C30H] ; 2146 _ 66: 0F 6E. 89, 00002C30 + pshufd xmm1, xmm1, 0 ; 214E _ 66: 0F 70. C9, 00 + pxor xmm5, xmm1 ; 2153 _ 66: 0F EF. E9 + pshufd xmm1, xmm7, 0 ; 2157 _ 66: 0F 70. CF, 00 + pxor xmm0, xmm1 ; 215C _ 66: 0F EF. C1 + movd xmm7, dword [ecx+2C38H] ; 2160 _ 66: 0F 6E. B9, 00002C38 + pshufd xmm1, xmm7, 0 ; 2168 _ 66: 0F 70. CF, 00 + pxor xmm2, xmm1 ; 216D _ 66: 0F EF. D1 + movd xmm7, dword [ecx+2C3CH] ; 2171 _ 66: 0F 6E. B9, 00002C3C + pshufd xmm1, xmm7, 0 ; 2179 _ 66: 0F 70. CF, 00 + movdqa xmm7, xmm5 ; 217E _ 66: 0F 6F. FD + pxor xmm6, xmm1 ; 2182 _ 66: 0F EF. F1 + por xmm7, xmm6 ; 2186 _ 66: 0F EB. FE + pxor xmm6, xmm0 ; 218A _ 66: 0F EF. F0 + pand xmm0, xmm5 ; 218E _ 66: 0F DB. C5 + pxor xmm5, xmm2 ; 2192 _ 66: 0F EF. EA + pxor xmm2, xmm6 ; 2196 _ 66: 0F EF. D6 + pand xmm6, xmm7 ; 219A _ 66: 0F DB. F7 + por xmm5, xmm0 ; 219E _ 66: 0F EB. E8 + pxor xmm6, xmm5 ; 21A2 _ 66: 0F EF. F5 + pxor xmm7, xmm0 ; 21A6 _ 66: 0F EF. F8 + pand xmm5, xmm7 ; 21AA _ 66: 0F DB. EF + pxor xmm0, xmm6 ; 21AE _ 66: 0F EF. C6 + pxor xmm5, xmm2 ; 21B2 _ 66: 0F EF. EA + por xmm0, xmm7 ; 21B6 _ 66: 0F EB. C7 + pxor xmm0, xmm2 ; 21BA _ 66: 0F EF. C2 + movdqa xmm2, xmm0 ; 21BE _ 66: 0F 6F. D0 + pxor xmm7, xmm6 ; 21C2 _ 66: 0F EF. FE + por xmm2, xmm6 ; 21C6 _ 66: 0F EB. D6 + pxor xmm7, xmm2 ; 21CA _ 66: 0F EF. FA + movdqa xmm2, xmm7 ; 21CE _ 66: 0F 6F. D7 + psrld xmm7, 19 ; 21D2 _ 66: 0F 72. D7, 13 + pslld xmm2, 13 ; 21D7 _ 66: 0F 72. F2, 0D + por xmm2, xmm7 ; 21DC _ 66: 0F EB. D7 + movdqa xmm7, xmm6 ; 21E0 _ 66: 0F 6F. FE + psrld xmm6, 29 ; 21E4 _ 66: 0F 72. D6, 1D + pxor xmm0, xmm2 ; 21E9 _ 66: 0F EF. C2 + pslld xmm7, 3 ; 21ED _ 66: 0F 72. F7, 03 + por xmm7, xmm6 ; 21F2 _ 66: 0F EB. FE + movdqa xmm6, xmm2 ; 21F6 _ 66: 0F 6F. F2 + pxor xmm0, xmm7 ; 21FA _ 66: 0F EF. C7 + movdqa xmm1, xmm0 ; 21FE _ 66: 0F 6F. C8 + pxor xmm5, xmm7 ; 2202 _ 66: 0F EF. EF + pslld xmm6, 3 ; 2206 _ 66: 0F 72. F6, 03 + pxor xmm5, xmm6 ; 220B _ 66: 0F EF. EE + movdqa xmm6, xmm5 ; 220F _ 66: 0F 6F. F5 + pslld xmm1, 1 ; 2213 _ 66: 0F 72. F1, 01 + psrld xmm0, 31 ; 2218 _ 66: 0F 72. D0, 1F + por xmm1, xmm0 ; 221D _ 66: 0F EB. C8 + pslld xmm6, 7 ; 2221 _ 66: 0F 72. F6, 07 + psrld xmm5, 25 ; 2226 _ 66: 0F 72. D5, 19 + por xmm6, xmm5 ; 222B _ 66: 0F EB. F5 + movdqa xmm5, xmm1 ; 222F _ 66: 0F 6F. E9 + pxor xmm2, xmm1 ; 2233 _ 66: 0F EF. D1 + pxor xmm2, xmm6 ; 2237 _ 66: 0F EF. D6 + movdqa xmm0, xmm2 ; 223B _ 66: 0F 6F. C2 + pxor xmm7, xmm6 ; 223F _ 66: 0F EF. FE + pslld xmm5, 7 ; 2243 _ 66: 0F 72. F5, 07 + pxor xmm7, xmm5 ; 2248 _ 66: 0F EF. FD + movdqa xmm5, xmm7 ; 224C _ 66: 0F 6F. EF + pslld xmm0, 5 ; 2250 _ 66: 0F 72. F0, 05 + psrld xmm2, 27 ; 2255 _ 66: 0F 72. D2, 1B + por xmm0, xmm2 ; 225A _ 66: 0F EB. C2 + movd xmm2, dword [ecx+2C40H] ; 225E _ 66: 0F 6E. 91, 00002C40 + pslld xmm5, 22 ; 2266 _ 66: 0F 72. F5, 16 + psrld xmm7, 10 ; 226B _ 66: 0F 72. D7, 0A + por xmm5, xmm7 ; 2270 _ 66: 0F EB. EF + pshufd xmm7, xmm2, 0 ; 2274 _ 66: 0F 70. FA, 00 + pxor xmm0, xmm7 ; 2279 _ 66: 0F EF. C7 + movd xmm2, dword [ecx+2C44H] ; 227D _ 66: 0F 6E. 91, 00002C44 + pshufd xmm7, xmm2, 0 ; 2285 _ 66: 0F 70. FA, 00 + movd xmm2, dword [ecx+2C48H] ; 228A _ 66: 0F 6E. 91, 00002C48 + pxor xmm1, xmm7 ; 2292 _ 66: 0F EF. CF + pshufd xmm7, xmm2, 0 ; 2296 _ 66: 0F 70. FA, 00 + pxor xmm5, xmm7 ; 229B _ 66: 0F EF. EF + movd xmm2, dword [ecx+2C4CH] ; 229F _ 66: 0F 6E. 91, 00002C4C + pshufd xmm7, xmm2, 0 ; 22A7 _ 66: 0F 70. FA, 00 + pxor xmm6, xmm7 ; 22AC _ 66: 0F EF. F7 + pxor xmm1, xmm6 ; 22B0 _ 66: 0F EF. CE + movdqa xmm7, xmm1 ; 22B4 _ 66: 0F 6F. F9 + pxor xmm6, xmm4 ; 22B8 _ 66: 0F EF. F4 + pxor xmm5, xmm6 ; 22BC _ 66: 0F EF. EE + pxor xmm6, xmm0 ; 22C0 _ 66: 0F EF. F0 + pand xmm7, xmm6 ; 22C4 _ 66: 0F DB. FE + pxor xmm7, xmm5 ; 22C8 _ 66: 0F EF. FD + movdqa xmm2, xmm7 ; 22CC _ 66: 0F 6F. D7 + pxor xmm1, xmm6 ; 22D0 _ 66: 0F EF. CE + pxor xmm0, xmm1 ; 22D4 _ 66: 0F EF. C1 + pand xmm5, xmm1 ; 22D8 _ 66: 0F DB. E9 + pxor xmm5, xmm0 ; 22DC _ 66: 0F EF. E8 + pand xmm0, xmm7 ; 22E0 _ 66: 0F DB. C7 + pxor xmm6, xmm0 ; 22E4 _ 66: 0F EF. F0 + por xmm1, xmm7 ; 22E8 _ 66: 0F EB. CF + pxor xmm1, xmm0 ; 22EC _ 66: 0F EF. C8 + por xmm0, xmm6 ; 22F0 _ 66: 0F EB. C6 + pxor xmm0, xmm5 ; 22F4 _ 66: 0F EF. C5 + pand xmm5, xmm6 ; 22F8 _ 66: 0F DB. EE + pxor xmm0, xmm4 ; 22FC _ 66: 0F EF. C4 + pxor xmm1, xmm5 ; 2300 _ 66: 0F EF. CD + movdqa xmm5, xmm0 ; 2304 _ 66: 0F 6F. E8 + pslld xmm2, 13 ; 2308 _ 66: 0F 72. F2, 0D + psrld xmm7, 19 ; 230D _ 66: 0F 72. D7, 13 + por xmm2, xmm7 ; 2312 _ 66: 0F EB. D7 + pslld xmm5, 3 ; 2316 _ 66: 0F 72. F5, 03 + psrld xmm0, 29 ; 231B _ 66: 0F 72. D0, 1D + por xmm5, xmm0 ; 2320 _ 66: 0F EB. E8 + movdqa xmm0, xmm2 ; 2324 _ 66: 0F 6F. C2 + pxor xmm1, xmm2 ; 2328 _ 66: 0F EF. CA + pxor xmm1, xmm5 ; 232C _ 66: 0F EF. CD + movdqa xmm7, xmm1 ; 2330 _ 66: 0F 6F. F9 + pxor xmm6, xmm5 ; 2334 _ 66: 0F EF. F5 + pslld xmm0, 3 ; 2338 _ 66: 0F 72. F0, 03 + pxor xmm6, xmm0 ; 233D _ 66: 0F EF. F0 + movdqa xmm0, xmm6 ; 2341 _ 66: 0F 6F. C6 + pslld xmm7, 1 ; 2345 _ 66: 0F 72. F7, 01 + psrld xmm1, 31 ; 234A _ 66: 0F 72. D1, 1F + por xmm7, xmm1 ; 234F _ 66: 0F EB. F9 + movdqa xmm1, xmm7 ; 2353 _ 66: 0F 6F. CF + pslld xmm0, 7 ; 2357 _ 66: 0F 72. F0, 07 + psrld xmm6, 25 ; 235C _ 66: 0F 72. D6, 19 + por xmm0, xmm6 ; 2361 _ 66: 0F EB. C6 + pxor xmm2, xmm7 ; 2365 _ 66: 0F EF. D7 + pxor xmm2, xmm0 ; 2369 _ 66: 0F EF. D0 + pxor xmm5, xmm0 ; 236D _ 66: 0F EF. E8 + pslld xmm1, 7 ; 2371 _ 66: 0F 72. F1, 07 + pxor xmm5, xmm1 ; 2376 _ 66: 0F EF. E9 + movdqa xmm1, xmm2 ; 237A _ 66: 0F 6F. CA + movdqa xmm6, xmm5 ; 237E _ 66: 0F 6F. F5 + psrld xmm2, 27 ; 2382 _ 66: 0F 72. D2, 1B + pslld xmm1, 5 ; 2387 _ 66: 0F 72. F1, 05 + por xmm1, xmm2 ; 238C _ 66: 0F EB. CA + pslld xmm6, 22 ; 2390 _ 66: 0F 72. F6, 16 + psrld xmm5, 10 ; 2395 _ 66: 0F 72. D5, 0A + movd xmm2, dword [ecx+2C54H] ; 239A _ 66: 0F 6E. 91, 00002C54 + por xmm6, xmm5 ; 23A2 _ 66: 0F EB. F5 + movd xmm5, dword [ecx+2C50H] ; 23A6 _ 66: 0F 6E. A9, 00002C50 + pshufd xmm5, xmm5, 0 ; 23AE _ 66: 0F 70. ED, 00 + pxor xmm1, xmm5 ; 23B3 _ 66: 0F EF. CD + pshufd xmm5, xmm2, 0 ; 23B7 _ 66: 0F 70. EA, 00 + pxor xmm7, xmm5 ; 23BC _ 66: 0F EF. FD + pxor xmm1, xmm7 ; 23C0 _ 66: 0F EF. CF + movd xmm2, dword [ecx+2C58H] ; 23C4 _ 66: 0F 6E. 91, 00002C58 + pshufd xmm5, xmm2, 0 ; 23CC _ 66: 0F 70. EA, 00 + pxor xmm6, xmm5 ; 23D1 _ 66: 0F EF. F5 + movd xmm2, dword [ecx+2C5CH] ; 23D5 _ 66: 0F 6E. 91, 00002C5C + pshufd xmm5, xmm2, 0 ; 23DD _ 66: 0F 70. EA, 00 + pxor xmm0, xmm5 ; 23E2 _ 66: 0F EF. C5 + pxor xmm7, xmm0 ; 23E6 _ 66: 0F EF. F8 + movdqa xmm5, xmm7 ; 23EA _ 66: 0F 6F. EF + pxor xmm0, xmm4 ; 23EE _ 66: 0F EF. C4 + pxor xmm6, xmm0 ; 23F2 _ 66: 0F EF. F0 + pand xmm5, xmm1 ; 23F6 _ 66: 0F DB. E9 + pxor xmm5, xmm6 ; 23FA _ 66: 0F EF. EE + movdqa xmm2, xmm5 ; 23FE _ 66: 0F 6F. D5 + por xmm6, xmm7 ; 2402 _ 66: 0F EB. F7 + pxor xmm7, xmm0 ; 2406 _ 66: 0F EF. F8 + pand xmm0, xmm5 ; 240A _ 66: 0F DB. C5 + pxor xmm0, xmm1 ; 240E _ 66: 0F EF. C1 + pxor xmm7, xmm5 ; 2412 _ 66: 0F EF. FD + pxor xmm7, xmm6 ; 2416 _ 66: 0F EF. FE + pxor xmm6, xmm1 ; 241A _ 66: 0F EF. F1 + pand xmm1, xmm0 ; 241E _ 66: 0F DB. C8 + pxor xmm6, xmm4 ; 2422 _ 66: 0F EF. F4 + pxor xmm1, xmm7 ; 2426 _ 66: 0F EF. CF + por xmm7, xmm0 ; 242A _ 66: 0F EB. F8 + pxor xmm7, xmm6 ; 242E _ 66: 0F EF. FE + movdqa xmm6, xmm1 ; 2432 _ 66: 0F 6F. F1 + pslld xmm2, 13 ; 2436 _ 66: 0F 72. F2, 0D + psrld xmm5, 19 ; 243B _ 66: 0F 72. D5, 13 + por xmm2, xmm5 ; 2440 _ 66: 0F EB. D5 + pslld xmm6, 3 ; 2444 _ 66: 0F 72. F6, 03 + psrld xmm1, 29 ; 2449 _ 66: 0F 72. D1, 1D + por xmm6, xmm1 ; 244E _ 66: 0F EB. F1 + movdqa xmm1, xmm2 ; 2452 _ 66: 0F 6F. CA + pxor xmm0, xmm2 ; 2456 _ 66: 0F EF. C2 + pxor xmm0, xmm6 ; 245A _ 66: 0F EF. C6 + movdqa xmm5, xmm0 ; 245E _ 66: 0F 6F. E8 + pxor xmm7, xmm6 ; 2462 _ 66: 0F EF. FE + pslld xmm1, 3 ; 2466 _ 66: 0F 72. F1, 03 + pxor xmm7, xmm1 ; 246B _ 66: 0F EF. F9 + pslld xmm5, 1 ; 246F _ 66: 0F 72. F5, 01 + psrld xmm0, 31 ; 2474 _ 66: 0F 72. D0, 1F + por xmm5, xmm0 ; 2479 _ 66: 0F EB. E8 + movdqa xmm0, xmm7 ; 247D _ 66: 0F 6F. C7 + psrld xmm7, 25 ; 2481 _ 66: 0F 72. D7, 19 + pxor xmm2, xmm5 ; 2486 _ 66: 0F EF. D5 + pslld xmm0, 7 ; 248A _ 66: 0F 72. F0, 07 + por xmm0, xmm7 ; 248F _ 66: 0F EB. C7 + movdqa xmm7, xmm5 ; 2493 _ 66: 0F 6F. FD + pxor xmm2, xmm0 ; 2497 _ 66: 0F EF. D0 + pxor xmm6, xmm0 ; 249B _ 66: 0F EF. F0 + pslld xmm7, 7 ; 249F _ 66: 0F 72. F7, 07 + pxor xmm6, xmm7 ; 24A4 _ 66: 0F EF. F7 + movdqa xmm7, xmm2 ; 24A8 _ 66: 0F 6F. FA + movdqa xmm1, xmm6 ; 24AC _ 66: 0F 6F. CE + psrld xmm2, 27 ; 24B0 _ 66: 0F 72. D2, 1B + pslld xmm7, 5 ; 24B5 _ 66: 0F 72. F7, 05 + por xmm7, xmm2 ; 24BA _ 66: 0F EB. FA + pslld xmm1, 22 ; 24BE _ 66: 0F 72. F1, 16 + psrld xmm6, 10 ; 24C3 _ 66: 0F 72. D6, 0A + por xmm1, xmm6 ; 24C8 _ 66: 0F EB. CE + movd xmm6, dword [ecx+2C60H] ; 24CC _ 66: 0F 6E. B1, 00002C60 + pshufd xmm2, xmm6, 0 ; 24D4 _ 66: 0F 70. D6, 00 + pxor xmm7, xmm2 ; 24D9 _ 66: 0F EF. FA + movd xmm6, dword [ecx+2C64H] ; 24DD _ 66: 0F 6E. B1, 00002C64 + pshufd xmm2, xmm6, 0 ; 24E5 _ 66: 0F 70. D6, 00 + pxor xmm5, xmm2 ; 24EA _ 66: 0F EF. EA + movd xmm6, dword [ecx+2C68H] ; 24EE _ 66: 0F 6E. B1, 00002C68 + pshufd xmm2, xmm6, 0 ; 24F6 _ 66: 0F 70. D6, 00 + pxor xmm1, xmm2 ; 24FB _ 66: 0F EF. CA + pxor xmm1, xmm4 ; 24FF _ 66: 0F EF. CC + movd xmm6, dword [ecx+2C6CH] ; 2503 _ 66: 0F 6E. B1, 00002C6C + pshufd xmm2, xmm6, 0 ; 250B _ 66: 0F 70. D6, 00 + pxor xmm0, xmm2 ; 2510 _ 66: 0F EF. C2 + movdqa xmm2, xmm0 ; 2514 _ 66: 0F 6F. D0 + pand xmm2, xmm7 ; 2518 _ 66: 0F DB. D7 + pxor xmm7, xmm0 ; 251C _ 66: 0F EF. F8 + pxor xmm2, xmm1 ; 2520 _ 66: 0F EF. D1 + por xmm1, xmm0 ; 2524 _ 66: 0F EB. C8 + pxor xmm5, xmm2 ; 2528 _ 66: 0F EF. EA + pxor xmm1, xmm7 ; 252C _ 66: 0F EF. CF + por xmm7, xmm5 ; 2530 _ 66: 0F EB. FD + pxor xmm1, xmm5 ; 2534 _ 66: 0F EF. CD + pxor xmm0, xmm7 ; 2538 _ 66: 0F EF. C7 + por xmm7, xmm2 ; 253C _ 66: 0F EB. FA + pxor xmm7, xmm1 ; 2540 _ 66: 0F EF. F9 + pxor xmm0, xmm2 ; 2544 _ 66: 0F EF. C2 + pxor xmm0, xmm7 ; 2548 _ 66: 0F EF. C7 + movdqa xmm6, xmm0 ; 254C _ 66: 0F 6F. F0 + pxor xmm2, xmm4 ; 2550 _ 66: 0F EF. D4 + pand xmm1, xmm0 ; 2554 _ 66: 0F DB. C8 + pxor xmm2, xmm1 ; 2558 _ 66: 0F EF. D1 + movdqa xmm1, xmm7 ; 255C _ 66: 0F 6F. CF + psrld xmm7, 19 ; 2560 _ 66: 0F 72. D7, 13 + pslld xmm6, 3 ; 2565 _ 66: 0F 72. F6, 03 + pslld xmm1, 13 ; 256A _ 66: 0F 72. F1, 0D + por xmm1, xmm7 ; 256F _ 66: 0F EB. CF + psrld xmm0, 29 ; 2573 _ 66: 0F 72. D0, 1D + por xmm6, xmm0 ; 2578 _ 66: 0F EB. F0 + movdqa xmm0, xmm1 ; 257C _ 66: 0F 6F. C1 + pxor xmm5, xmm1 ; 2580 _ 66: 0F EF. E9 + pxor xmm5, xmm6 ; 2584 _ 66: 0F EF. EE + pxor xmm2, xmm6 ; 2588 _ 66: 0F EF. D6 + pslld xmm0, 3 ; 258C _ 66: 0F 72. F0, 03 + pxor xmm2, xmm0 ; 2591 _ 66: 0F EF. D0 + movdqa xmm0, xmm5 ; 2595 _ 66: 0F 6F. C5 + psrld xmm5, 31 ; 2599 _ 66: 0F 72. D5, 1F + pslld xmm0, 1 ; 259E _ 66: 0F 72. F0, 01 + por xmm0, xmm5 ; 25A3 _ 66: 0F EB. C5 + movdqa xmm5, xmm2 ; 25A7 _ 66: 0F 6F. EA + movdqa xmm7, xmm0 ; 25AB _ 66: 0F 6F. F8 + psrld xmm2, 25 ; 25AF _ 66: 0F 72. D2, 19 + pslld xmm5, 7 ; 25B4 _ 66: 0F 72. F5, 07 + por xmm5, xmm2 ; 25B9 _ 66: 0F EB. EA + pxor xmm1, xmm0 ; 25BD _ 66: 0F EF. C8 + pxor xmm1, xmm5 ; 25C1 _ 66: 0F EF. CD + movdqa xmm2, xmm1 ; 25C5 _ 66: 0F 6F. D1 + pxor xmm6, xmm5 ; 25C9 _ 66: 0F EF. F5 + pslld xmm7, 7 ; 25CD _ 66: 0F 72. F7, 07 + pxor xmm6, xmm7 ; 25D2 _ 66: 0F EF. F7 + movdqa xmm7, xmm6 ; 25D6 _ 66: 0F 6F. FE + pslld xmm2, 5 ; 25DA _ 66: 0F 72. F2, 05 + psrld xmm1, 27 ; 25DF _ 66: 0F 72. D1, 1B + por xmm2, xmm1 ; 25E4 _ 66: 0F EB. D1 + pslld xmm7, 22 ; 25E8 _ 66: 0F 72. F7, 16 + psrld xmm6, 10 ; 25ED _ 66: 0F 72. D6, 0A + por xmm7, xmm6 ; 25F2 _ 66: 0F EB. FE + movd xmm1, dword [ecx+2C70H] ; 25F6 _ 66: 0F 6E. 89, 00002C70 + movd xmm6, dword [ecx+2C74H] ; 25FE _ 66: 0F 6E. B1, 00002C74 + pshufd xmm1, xmm1, 0 ; 2606 _ 66: 0F 70. C9, 00 + pxor xmm2, xmm1 ; 260B _ 66: 0F EF. D1 + pshufd xmm1, xmm6, 0 ; 260F _ 66: 0F 70. CE, 00 + pxor xmm0, xmm1 ; 2614 _ 66: 0F EF. C1 + movd xmm6, dword [ecx+2C78H] ; 2618 _ 66: 0F 6E. B1, 00002C78 + pshufd xmm1, xmm6, 0 ; 2620 _ 66: 0F 70. CE, 00 + pxor xmm7, xmm1 ; 2625 _ 66: 0F EF. F9 + movd xmm6, dword [ecx+2C7CH] ; 2629 _ 66: 0F 6E. B1, 00002C7C + pshufd xmm1, xmm6, 0 ; 2631 _ 66: 0F 70. CE, 00 + pxor xmm5, xmm1 ; 2636 _ 66: 0F EF. E9 + movdqa xmm1, xmm0 ; 263A _ 66: 0F 6F. C8 + pxor xmm0, xmm7 ; 263E _ 66: 0F EF. C7 + por xmm1, xmm7 ; 2642 _ 66: 0F EB. CF + pxor xmm1, xmm5 ; 2646 _ 66: 0F EF. CD + pxor xmm7, xmm1 ; 264A _ 66: 0F EF. F9 + por xmm5, xmm0 ; 264E _ 66: 0F EB. E8 + pand xmm5, xmm2 ; 2652 _ 66: 0F DB. EA + pxor xmm0, xmm7 ; 2656 _ 66: 0F EF. C7 + pxor xmm5, xmm1 ; 265A _ 66: 0F EF. E9 + por xmm1, xmm0 ; 265E _ 66: 0F EB. C8 + pxor xmm1, xmm2 ; 2662 _ 66: 0F EF. CA + por xmm2, xmm0 ; 2666 _ 66: 0F EB. D0 + pxor xmm2, xmm7 ; 266A _ 66: 0F EF. D7 + pxor xmm1, xmm0 ; 266E _ 66: 0F EF. C8 + pxor xmm7, xmm1 ; 2672 _ 66: 0F EF. F9 + pand xmm1, xmm2 ; 2676 _ 66: 0F DB. CA + pxor xmm1, xmm0 ; 267A _ 66: 0F EF. C8 + pxor xmm7, xmm4 ; 267E _ 66: 0F EF. FC + por xmm7, xmm2 ; 2682 _ 66: 0F EB. FA + pxor xmm0, xmm7 ; 2686 _ 66: 0F EF. C7 + movd xmm7, dword [ecx+2C80H] ; 268A _ 66: 0F 6E. B9, 00002C80 + pshufd xmm6, xmm7, 0 ; 2692 _ 66: 0F 70. F7, 00 + pxor xmm0, xmm6 ; 2697 _ 66: 0F EF. C6 + movd xmm7, dword [ecx+2C84H] ; 269B _ 66: 0F 6E. B9, 00002C84 + pshufd xmm6, xmm7, 0 ; 26A3 _ 66: 0F 70. F7, 00 + pxor xmm5, xmm6 ; 26A8 _ 66: 0F EF. EE + movd xmm7, dword [ecx+2C88H] ; 26AC _ 66: 0F 6E. B9, 00002C88 + pshufd xmm6, xmm7, 0 ; 26B4 _ 66: 0F 70. F7, 00 + pxor xmm1, xmm6 ; 26B9 _ 66: 0F EF. CE + movd xmm7, dword [ecx+2C8CH] ; 26BD _ 66: 0F 6E. B9, 00002C8C + pshufd xmm6, xmm7, 0 ; 26C5 _ 66: 0F 70. F7, 00 + movdqa xmm7, xmm0 ; 26CA _ 66: 0F 6F. F8 + pxor xmm2, xmm6 ; 26CE _ 66: 0F EF. D6 + movdqa xmm6, xmm1 ; 26D2 _ 66: 0F 6F. F1 + punpckldq xmm7, xmm5 ; 26D6 _ 66: 0F 62. FD + punpckhdq xmm0, xmm5 ; 26DA _ 66: 0F 6A. C5 + punpckldq xmm6, xmm2 ; 26DE _ 66: 0F 62. F2 + punpckhdq xmm1, xmm2 ; 26E2 _ 66: 0F 6A. CA + movdqa xmm2, xmm7 ; 26E6 _ 66: 0F 6F. D7 + movdqa xmm5, xmm0 ; 26EA _ 66: 0F 6F. E8 + punpckhqdq xmm7, xmm6 ; 26EE _ 66: 0F 6D. FE + pxor xmm7, oword [esp+40H] ; 26F2 _ 66: 0F EF. 7C 24, 40 + movdqu oword [edx+10H], xmm7 ; 26F8 _ F3: 0F 7F. 7A, 10 + punpcklqdq xmm2, xmm6 ; 26FD _ 66: 0F 6C. D6 + pxor xmm2, oword [esp+60H] ; 2701 _ 66: 0F EF. 54 24, 60 + movdqu oword [edx], xmm2 ; 2707 _ F3: 0F 7F. 12 + punpcklqdq xmm5, xmm1 ; 270B _ 66: 0F 6C. E9 + pxor xmm5, oword [esp+50H] ; 270F _ 66: 0F EF. 6C 24, 50 + movdqu oword [edx+20H], xmm5 ; 2715 _ F3: 0F 7F. 6A, 20 + punpckhqdq xmm0, xmm1 ; 271A _ 66: 0F 6D. C1 + movdqa xmm1, xmm3 ; 271E _ 66: 0F 6F. CB + pxor xmm0, xmm3 ; 2722 _ 66: 0F EF. C3 + movdqu oword [edx+30H], xmm0 ; 2726 _ F3: 0F 7F. 42, 30 + movdqa xmm0, xmm3 ; 272B _ 66: 0F 6F. C3 + psllq xmm1, 1 ; 272F _ 66: 0F 73. F1, 01 + psraw xmm3, 8 ; 2734 _ 66: 0F 71. E3, 08 + pslldq xmm0, 8 ; 2739 _ 66: 0F 73. F8, 08 + psrldq xmm0, 7 ; 273E _ 66: 0F 73. D8, 07 + psrlq xmm0, 7 ; 2743 _ 66: 0F 73. D0, 07 + por xmm1, xmm0 ; 2748 _ 66: 0F EB. C8 + psrldq xmm3, 15 ; 274C _ 66: 0F 73. DB, 0F + pand xmm3, oword [esp+30H] ; 2751 _ 66: 0F DB. 5C 24, 30 + pxor xmm1, xmm3 ; 2757 _ 66: 0F EF. CB + movdqa oword [esp+60H], xmm1 ; 275B _ 66: 0F 7F. 4C 24, 60 + add esi, 64 ; 2761 _ 83. C6, 40 + add edx, 64 ; 2764 _ 83. C2, 40 + inc eax ; 2767 _ 40 + cmp eax, 8 ; 2768 _ 83. F8, 08 + jl ?_003 ; 276B _ 0F 8C, FFFFD92A + movdqa xmm0, oword [esp+60H] ; 2771 _ 66: 0F 6F. 44 24, 60 + mov dword [esp+24H], edx ; 2777 _ 89. 54 24, 24 + mov dword [esp+28H], esi ; 277B _ 89. 74 24, 28 + mov esi, dword [esp+20H] ; 277F _ 8B. 74 24, 20 + add esi, -512 ; 2783 _ 81. C6, FFFFFE00 + jne ?_001 ; 2789 _ 0F 85, FFFFD8D0 + add esp, 116 ; 278F _ 83. C4, 74 + pop ebx ; 2792 _ 5B + pop esi ; 2793 _ 5E + pop edi ; 2794 _ 5F + mov esp, ebp ; 2795 _ 8B. E5 + pop ebp ; 2797 _ 5D + ret 24 ; 2798 _ C2, 0018 +; _xts_serpent_sse2_encrypt@24 End of function + + nop ; 279B _ 90 +; Filling space: 4H +; Filler type: lea with same source and destination +; db 8DH, 74H, 26H, 00H + +ALIGN 8 + + +_xts_serpent_sse2_decrypt@24:; Function begin + push ebp ; 0000 _ 55 + mov ebp, esp ; 0001 _ 8B. EC + and esp, 0FFFFFFF0H ; 0003 _ 83. E4, F0 + push edi ; 0006 _ 57 + push esi ; 0007 _ 56 + push ebx ; 0008 _ 53 + sub esp, 116 ; 0009 _ 83. EC, 74 + mov edx, dword [ebp+8H] ; 000C _ 8B. 55, 08 + mov eax, dword [ebp+0CH] ; 000F _ 8B. 45, 0C + mov esi, dword [ebp+18H] ; 0012 _ 8B. 75, 18 + mov ebx, dword [ebp+1CH] ; 0015 _ 8B. 5D, 1C + mov edi, dword [ebp+14H] ; 0018 _ 8B. 7D, 14 + mov dword [esp+24H], eax ; 001B _ 89. 44 24, 24 + mov ecx, esi ; 001F _ 8B. CE + mov dword [esp+28H], edx ; 0021 _ 89. 54 24, 28 + shl ecx, 23 ; 0025 _ C1. E1, 17 + shr esi, 9 ; 0028 _ C1. EE, 09 + mov dword [esp+4H], esi ; 002B _ 89. 74 24, 04 + mov esi, dword [ebp+10H] ; 002F _ 8B. 75, 10 + shr edi, 9 ; 0032 _ C1. EF, 09 + or ecx, edi ; 0035 _ 0B. CF + lea ebx, [ebx+5710H] ; 0037 _ 8D. 9B, 00005710 + mov dword [esp], ecx ; 003D _ 89. 0C 24 + xor ecx, ecx ; 0040 _ 33. C9 + mov dword [esp+8H], ecx ; 0042 _ 89. 4C 24, 08 + mov dword [esp+0CH], ecx ; 0046 _ 89. 4C 24, 0C + mov edi, 135 ; 004A _ BF, 00000087 + movd xmm1, edi ; 004F _ 66: 0F 6E. CF + movdqa oword [esp+30H], xmm1 ; 0053 _ 66: 0F 7F. 4C 24, 30 + lea edi, [esp+10H] ; 0059 _ 8D. 7C 24, 10 + jmp ?_005 ; 005D _ EB, 06 + +?_004: movdqa oword [esp+10H], xmm4 ; 005F _ 66: 0F 7F. 64 24, 10 +?_005: add dword [esp], 1 ; 0065 _ 83. 04 24, 01 + adc dword [esp+4H], 0 ; 0069 _ 83. 54 24, 04, 00 + push ebx ; 006E _ 53 + push edi ; 006F _ 57 + lea eax, [esp+8H] ; 0070 _ 8D. 44 24, 08 + push eax ; 0074 _ 50 + call _serpent256_encrypt@12 ; 0075 _ E8, 00000000(rel) + movdqa xmm4, oword [esp+10H] ; 007A _ 66: 0F 6F. 64 24, 10 + mov edx, dword [esp+24H] ; 0080 _ 8B. 54 24, 24 + mov ecx, dword [ebp+1CH] ; 0084 _ 8B. 4D, 1C + mov dword [esp+20H], esi ; 0087 _ 89. 74 24, 20 + xor eax, eax ; 008B _ 33. C0 + mov esi, dword [esp+28H] ; 008D _ 8B. 74 24, 28 +?_006: movdqa xmm1, xmm4 ; 0091 _ 66: 0F 6F. CC + movdqa xmm7, xmm4 ; 0095 _ 66: 0F 6F. FC + movdqa xmm5, xmm4 ; 0099 _ 66: 0F 6F. EC + movdqa xmm0, oword [esp+30H] ; 009D _ 66: 0F 6F. 44 24, 30 + psllq xmm1, 1 ; 00A3 _ 66: 0F 73. F1, 01 + pslldq xmm7, 8 ; 00A8 _ 66: 0F 73. FF, 08 + psrldq xmm7, 7 ; 00AD _ 66: 0F 73. DF, 07 + psrlq xmm7, 7 ; 00B2 _ 66: 0F 73. D7, 07 + por xmm1, xmm7 ; 00B7 _ 66: 0F EB. CF + psraw xmm5, 8 ; 00BB _ 66: 0F 71. E5, 08 + psrldq xmm5, 15 ; 00C0 _ 66: 0F 73. DD, 0F + pand xmm5, xmm0 ; 00C5 _ 66: 0F DB. E8 + pxor xmm1, xmm5 ; 00C9 _ 66: 0F EF. CD + movdqa oword [esp+40H], xmm1 ; 00CD _ 66: 0F 7F. 4C 24, 40 + movdqa xmm5, xmm1 ; 00D3 _ 66: 0F 6F. E9 + movdqa xmm2, xmm1 ; 00D7 _ 66: 0F 6F. D1 + movdqa xmm6, xmm1 ; 00DB _ 66: 0F 6F. F1 + psllq xmm5, 1 ; 00DF _ 66: 0F 73. F5, 01 + pslldq xmm2, 8 ; 00E4 _ 66: 0F 73. FA, 08 + psrldq xmm2, 7 ; 00E9 _ 66: 0F 73. DA, 07 + psrlq xmm2, 7 ; 00EE _ 66: 0F 73. D2, 07 + por xmm5, xmm2 ; 00F3 _ 66: 0F EB. EA + psraw xmm6, 8 ; 00F7 _ 66: 0F 71. E6, 08 + psrldq xmm6, 15 ; 00FC _ 66: 0F 73. DE, 0F + pand xmm6, xmm0 ; 0101 _ 66: 0F DB. F0 + pxor xmm5, xmm6 ; 0105 _ 66: 0F EF. EE + movdqu xmm6, oword [esi+20H] ; 0109 _ F3: 0F 6F. 76, 20 + movdqa oword [esp+50H], xmm5 ; 010E _ 66: 0F 7F. 6C 24, 50 + movdqa xmm7, xmm5 ; 0114 _ 66: 0F 6F. FD + movdqa xmm3, xmm5 ; 0118 _ 66: 0F 6F. DD + movdqa xmm2, xmm5 ; 011C _ 66: 0F 6F. D5 + psllq xmm7, 1 ; 0120 _ 66: 0F 73. F7, 01 + pslldq xmm3, 8 ; 0125 _ 66: 0F 73. FB, 08 + psrldq xmm3, 7 ; 012A _ 66: 0F 73. DB, 07 + psrlq xmm3, 7 ; 012F _ 66: 0F 73. D3, 07 + por xmm7, xmm3 ; 0134 _ 66: 0F EB. FB + movdqu xmm3, oword [esi] ; 0138 _ F3: 0F 6F. 1E + psraw xmm2, 8 ; 013C _ 66: 0F 71. E2, 08 + psrldq xmm2, 15 ; 0141 _ 66: 0F 73. DA, 0F + pand xmm2, xmm0 ; 0146 _ 66: 0F DB. D0 + movdqu xmm0, oword [esi+10H] ; 014A _ F3: 0F 6F. 46, 10 + pxor xmm7, xmm2 ; 014F _ 66: 0F EF. FA + pxor xmm3, xmm4 ; 0153 _ 66: 0F EF. DC + pxor xmm0, xmm1 ; 0157 _ 66: 0F EF. C1 + movdqu xmm1, oword [esi+30H] ; 015B _ F3: 0F 6F. 4E, 30 + pxor xmm6, xmm5 ; 0160 _ 66: 0F EF. F5 + movdqa xmm5, xmm3 ; 0164 _ 66: 0F 6F. EB + movdqa xmm2, xmm6 ; 0168 _ 66: 0F 6F. D6 + pxor xmm1, xmm7 ; 016C _ 66: 0F EF. CF + punpckldq xmm5, xmm0 ; 0170 _ 66: 0F 62. E8 + punpckldq xmm2, xmm1 ; 0174 _ 66: 0F 62. D1 + punpckhdq xmm3, xmm0 ; 0178 _ 66: 0F 6A. D8 + punpckhdq xmm6, xmm1 ; 017C _ 66: 0F 6A. F1 + movdqa xmm1, xmm5 ; 0180 _ 66: 0F 6F. CD + punpckhqdq xmm5, xmm2 ; 0184 _ 66: 0F 6D. EA + punpcklqdq xmm1, xmm2 ; 0188 _ 66: 0F 6C. CA + movdqa xmm2, xmm3 ; 018C _ 66: 0F 6F. D3 + punpckhqdq xmm3, xmm6 ; 0190 _ 66: 0F 6D. DE + punpcklqdq xmm2, xmm6 ; 0194 _ 66: 0F 6C. D6 + movd xmm0, dword [ecx+2C80H] ; 0198 _ 66: 0F 6E. 81, 00002C80 + pshufd xmm0, xmm0, 0 ; 01A0 _ 66: 0F 70. C0, 00 + pxor xmm1, xmm0 ; 01A5 _ 66: 0F EF. C8 + movd xmm6, dword [ecx+2C84H] ; 01A9 _ 66: 0F 6E. B1, 00002C84 + pshufd xmm0, xmm6, 0 ; 01B1 _ 66: 0F 70. C6, 00 + movd xmm6, dword [ecx+2C88H] ; 01B6 _ 66: 0F 6E. B1, 00002C88 + pxor xmm5, xmm0 ; 01BE _ 66: 0F EF. E8 + pshufd xmm0, xmm6, 0 ; 01C2 _ 66: 0F 70. C6, 00 + pxor xmm2, xmm0 ; 01C7 _ 66: 0F EF. D0 + movd xmm6, dword [ecx+2C8CH] ; 01CB _ 66: 0F 6E. B1, 00002C8C + pshufd xmm0, xmm6, 0 ; 01D3 _ 66: 0F 70. C6, 00 + movdqa xmm6, xmm2 ; 01D8 _ 66: 0F 6F. F2 + pxor xmm3, xmm0 ; 01DC _ 66: 0F EF. D8 + por xmm2, xmm3 ; 01E0 _ 66: 0F EB. D3 + pxor xmm6, xmm1 ; 01E4 _ 66: 0F EF. F1 + pand xmm1, xmm3 ; 01E8 _ 66: 0F DB. CB + pxor xmm3, xmm5 ; 01EC _ 66: 0F EF. DD + por xmm5, xmm1 ; 01F0 _ 66: 0F EB. E9 + pand xmm3, xmm2 ; 01F4 _ 66: 0F DB. DA + pcmpeqd xmm0, xmm0 ; 01F8 _ 66: 0F 76. C0 + pxor xmm6, xmm0 ; 01FC _ 66: 0F EF. F0 + pxor xmm1, xmm6 ; 0200 _ 66: 0F EF. CE + pand xmm6, xmm2 ; 0204 _ 66: 0F DB. F2 + pxor xmm5, xmm6 ; 0208 _ 66: 0F EF. EE + pxor xmm6, xmm1 ; 020C _ 66: 0F EF. F1 + por xmm1, xmm6 ; 0210 _ 66: 0F EB. CE + pxor xmm2, xmm5 ; 0214 _ 66: 0F EF. D5 + pxor xmm1, xmm3 ; 0218 _ 66: 0F EF. CB + pxor xmm3, xmm2 ; 021C _ 66: 0F EF. DA + por xmm2, xmm1 ; 0220 _ 66: 0F EB. D1 + pxor xmm3, xmm6 ; 0224 _ 66: 0F EF. DE + pxor xmm2, xmm6 ; 0228 _ 66: 0F EF. D6 + movd xmm6, dword [ecx+2C70H] ; 022C _ 66: 0F 6E. B1, 00002C70 + pshufd xmm6, xmm6, 0 ; 0234 _ 66: 0F 70. F6, 00 + pxor xmm3, xmm6 ; 0239 _ 66: 0F EF. DE + movd xmm6, dword [ecx+2C74H] ; 023D _ 66: 0F 6E. B1, 00002C74 + pshufd xmm6, xmm6, 0 ; 0245 _ 66: 0F 70. F6, 00 + pxor xmm1, xmm6 ; 024A _ 66: 0F EF. CE + movd xmm6, dword [ecx+2C78H] ; 024E _ 66: 0F 6E. B1, 00002C78 + pshufd xmm6, xmm6, 0 ; 0256 _ 66: 0F 70. F6, 00 + pxor xmm5, xmm6 ; 025B _ 66: 0F EF. EE + movd xmm6, dword [ecx+2C7CH] ; 025F _ 66: 0F 6E. B1, 00002C7C + pshufd xmm6, xmm6, 0 ; 0267 _ 66: 0F 70. F6, 00 + pxor xmm2, xmm6 ; 026C _ 66: 0F EF. D6 + movdqa xmm6, xmm5 ; 0270 _ 66: 0F 6F. F5 + psrld xmm5, 22 ; 0274 _ 66: 0F 72. D5, 16 + pslld xmm6, 10 ; 0279 _ 66: 0F 72. F6, 0A + por xmm6, xmm5 ; 027E _ 66: 0F EB. F5 + movdqa xmm5, xmm3 ; 0282 _ 66: 0F 6F. EB + psrld xmm3, 5 ; 0286 _ 66: 0F 72. D3, 05 + pxor xmm6, xmm2 ; 028B _ 66: 0F EF. F2 + pslld xmm5, 27 ; 028F _ 66: 0F 72. F5, 1B + por xmm5, xmm3 ; 0294 _ 66: 0F EB. EB + movdqa xmm3, xmm1 ; 0298 _ 66: 0F 6F. D9 + pxor xmm5, xmm1 ; 029C _ 66: 0F EF. E9 + pxor xmm5, xmm2 ; 02A0 _ 66: 0F EF. EA + pslld xmm3, 7 ; 02A4 _ 66: 0F 72. F3, 07 + pxor xmm6, xmm3 ; 02A9 _ 66: 0F EF. F3 + movdqa xmm3, xmm2 ; 02AD _ 66: 0F 6F. DA + psrld xmm2, 7 ; 02B1 _ 66: 0F 72. D2, 07 + pslld xmm3, 25 ; 02B6 _ 66: 0F 72. F3, 19 + por xmm3, xmm2 ; 02BB _ 66: 0F EB. DA + movdqa xmm2, xmm1 ; 02BF _ 66: 0F 6F. D1 + psrld xmm1, 1 ; 02C3 _ 66: 0F 72. D1, 01 + pxor xmm3, xmm6 ; 02C8 _ 66: 0F EF. DE + pslld xmm2, 31 ; 02CC _ 66: 0F 72. F2, 1F + por xmm2, xmm1 ; 02D1 _ 66: 0F EB. D1 + movdqa xmm1, xmm5 ; 02D5 _ 66: 0F 6F. CD + pxor xmm2, xmm5 ; 02D9 _ 66: 0F EF. D5 + pxor xmm2, xmm6 ; 02DD _ 66: 0F EF. D6 + pslld xmm1, 3 ; 02E1 _ 66: 0F 72. F1, 03 + pxor xmm3, xmm1 ; 02E6 _ 66: 0F EF. D9 + movdqa xmm1, xmm6 ; 02EA _ 66: 0F 6F. CE + psrld xmm6, 3 ; 02EE _ 66: 0F 72. D6, 03 + pslld xmm1, 29 ; 02F3 _ 66: 0F 72. F1, 1D + por xmm1, xmm6 ; 02F8 _ 66: 0F EB. CE + movdqa xmm6, xmm5 ; 02FC _ 66: 0F 6F. F5 + psrld xmm5, 13 ; 0300 _ 66: 0F 72. D5, 0D + pslld xmm6, 19 ; 0305 _ 66: 0F 72. F6, 13 + por xmm6, xmm5 ; 030A _ 66: 0F EB. F5 + movdqa xmm5, xmm1 ; 030E _ 66: 0F 6F. E9 + pxor xmm6, xmm1 ; 0312 _ 66: 0F EF. F1 + pxor xmm1, xmm3 ; 0316 _ 66: 0F EF. CB + pand xmm5, xmm6 ; 031A _ 66: 0F DB. EE + pxor xmm5, xmm0 ; 031E _ 66: 0F EF. E8 + pxor xmm3, xmm2 ; 0322 _ 66: 0F EF. DA + pxor xmm5, xmm3 ; 0326 _ 66: 0F EF. EB + por xmm1, xmm6 ; 032A _ 66: 0F EB. CE + pxor xmm6, xmm5 ; 032E _ 66: 0F EF. F5 + pxor xmm3, xmm1 ; 0332 _ 66: 0F EF. D9 + pxor xmm1, xmm2 ; 0336 _ 66: 0F EF. CA + pand xmm2, xmm3 ; 033A _ 66: 0F DB. D3 + pxor xmm2, xmm6 ; 033E _ 66: 0F EF. D6 + pxor xmm6, xmm3 ; 0342 _ 66: 0F EF. F3 + por xmm6, xmm5 ; 0346 _ 66: 0F EB. F5 + pxor xmm3, xmm2 ; 034A _ 66: 0F EF. DA + pxor xmm1, xmm6 ; 034E _ 66: 0F EF. CE + movd xmm6, dword [ecx+2C60H] ; 0352 _ 66: 0F 6E. B1, 00002C60 + pshufd xmm6, xmm6, 0 ; 035A _ 66: 0F 70. F6, 00 + pxor xmm2, xmm6 ; 035F _ 66: 0F EF. D6 + movd xmm6, dword [ecx+2C64H] ; 0363 _ 66: 0F 6E. B1, 00002C64 + pshufd xmm6, xmm6, 0 ; 036B _ 66: 0F 70. F6, 00 + pxor xmm5, xmm6 ; 0370 _ 66: 0F EF. EE + movd xmm6, dword [ecx+2C68H] ; 0374 _ 66: 0F 6E. B1, 00002C68 + pshufd xmm6, xmm6, 0 ; 037C _ 66: 0F 70. F6, 00 + pxor xmm1, xmm6 ; 0381 _ 66: 0F EF. CE + movd xmm6, dword [ecx+2C6CH] ; 0385 _ 66: 0F 6E. B1, 00002C6C + pshufd xmm6, xmm6, 0 ; 038D _ 66: 0F 70. F6, 00 + pxor xmm3, xmm6 ; 0392 _ 66: 0F EF. DE + movdqa xmm6, xmm1 ; 0396 _ 66: 0F 6F. F1 + psrld xmm1, 22 ; 039A _ 66: 0F 72. D1, 16 + pslld xmm6, 10 ; 039F _ 66: 0F 72. F6, 0A + por xmm6, xmm1 ; 03A4 _ 66: 0F EB. F1 + movdqa xmm1, xmm2 ; 03A8 _ 66: 0F 6F. CA + psrld xmm2, 5 ; 03AC _ 66: 0F 72. D2, 05 + pxor xmm6, xmm3 ; 03B1 _ 66: 0F EF. F3 + pslld xmm1, 27 ; 03B5 _ 66: 0F 72. F1, 1B + por xmm1, xmm2 ; 03BA _ 66: 0F EB. CA + movdqa xmm2, xmm5 ; 03BE _ 66: 0F 6F. D5 + pxor xmm1, xmm5 ; 03C2 _ 66: 0F EF. CD + pxor xmm1, xmm3 ; 03C6 _ 66: 0F EF. CB + pslld xmm2, 7 ; 03CA _ 66: 0F 72. F2, 07 + pxor xmm6, xmm2 ; 03CF _ 66: 0F EF. F2 + movdqa xmm2, xmm3 ; 03D3 _ 66: 0F 6F. D3 + psrld xmm3, 7 ; 03D7 _ 66: 0F 72. D3, 07 + pslld xmm2, 25 ; 03DC _ 66: 0F 72. F2, 19 + por xmm2, xmm3 ; 03E1 _ 66: 0F EB. D3 + movdqa xmm3, xmm5 ; 03E5 _ 66: 0F 6F. DD + psrld xmm5, 1 ; 03E9 _ 66: 0F 72. D5, 01 + pxor xmm2, xmm6 ; 03EE _ 66: 0F EF. D6 + pslld xmm3, 31 ; 03F2 _ 66: 0F 72. F3, 1F + por xmm3, xmm5 ; 03F7 _ 66: 0F EB. DD + movdqa xmm5, xmm1 ; 03FB _ 66: 0F 6F. E9 + pxor xmm3, xmm1 ; 03FF _ 66: 0F EF. D9 + pxor xmm3, xmm6 ; 0403 _ 66: 0F EF. DE + pslld xmm5, 3 ; 0407 _ 66: 0F 72. F5, 03 + pxor xmm2, xmm5 ; 040C _ 66: 0F EF. D5 + movdqa xmm5, xmm6 ; 0410 _ 66: 0F 6F. EE + psrld xmm6, 3 ; 0414 _ 66: 0F 72. D6, 03 + pxor xmm3, xmm0 ; 0419 _ 66: 0F EF. D8 + pslld xmm5, 29 ; 041D _ 66: 0F 72. F5, 1D + por xmm5, xmm6 ; 0422 _ 66: 0F EB. EE + movdqa xmm6, xmm1 ; 0426 _ 66: 0F 6F. F1 + psrld xmm1, 13 ; 042A _ 66: 0F 72. D1, 0D + pxor xmm5, xmm3 ; 042F _ 66: 0F EF. EB + pslld xmm6, 19 ; 0433 _ 66: 0F 72. F6, 13 + por xmm6, xmm1 ; 0438 _ 66: 0F EB. F1 + movdqa xmm1, xmm2 ; 043C _ 66: 0F 6F. CA + por xmm1, xmm6 ; 0440 _ 66: 0F EB. CE + pxor xmm1, xmm5 ; 0444 _ 66: 0F EF. CD + por xmm5, xmm3 ; 0448 _ 66: 0F EB. EB + pand xmm5, xmm6 ; 044C _ 66: 0F DB. EE + pxor xmm2, xmm1 ; 0450 _ 66: 0F EF. D1 + pxor xmm5, xmm2 ; 0454 _ 66: 0F EF. EA + por xmm2, xmm6 ; 0458 _ 66: 0F EB. D6 + pxor xmm2, xmm3 ; 045C _ 66: 0F EF. D3 + pand xmm3, xmm5 ; 0460 _ 66: 0F DB. DD + pxor xmm3, xmm1 ; 0464 _ 66: 0F EF. D9 + pxor xmm2, xmm5 ; 0468 _ 66: 0F EF. D5 + pand xmm1, xmm2 ; 046C _ 66: 0F DB. CA + pxor xmm2, xmm3 ; 0470 _ 66: 0F EF. D3 + pxor xmm1, xmm2 ; 0474 _ 66: 0F EF. CA + pxor xmm2, xmm0 ; 0478 _ 66: 0F EF. D0 + pxor xmm1, xmm6 ; 047C _ 66: 0F EF. CE + movd xmm6, dword [ecx+2C50H] ; 0480 _ 66: 0F 6E. B1, 00002C50 + pshufd xmm6, xmm6, 0 ; 0488 _ 66: 0F 70. F6, 00 + pxor xmm3, xmm6 ; 048D _ 66: 0F EF. DE + movd xmm6, dword [ecx+2C54H] ; 0491 _ 66: 0F 6E. B1, 00002C54 + pshufd xmm6, xmm6, 0 ; 0499 _ 66: 0F 70. F6, 00 + pxor xmm2, xmm6 ; 049E _ 66: 0F EF. D6 + movd xmm6, dword [ecx+2C58H] ; 04A2 _ 66: 0F 6E. B1, 00002C58 + pshufd xmm6, xmm6, 0 ; 04AA _ 66: 0F 70. F6, 00 + pxor xmm1, xmm6 ; 04AF _ 66: 0F EF. CE + movd xmm6, dword [ecx+2C5CH] ; 04B3 _ 66: 0F 6E. B1, 00002C5C + pshufd xmm6, xmm6, 0 ; 04BB _ 66: 0F 70. F6, 00 + pxor xmm5, xmm6 ; 04C0 _ 66: 0F EF. EE + movdqa xmm6, xmm1 ; 04C4 _ 66: 0F 6F. F1 + psrld xmm1, 22 ; 04C8 _ 66: 0F 72. D1, 16 + pslld xmm6, 10 ; 04CD _ 66: 0F 72. F6, 0A + por xmm6, xmm1 ; 04D2 _ 66: 0F EB. F1 + movdqa xmm1, xmm3 ; 04D6 _ 66: 0F 6F. CB + psrld xmm3, 5 ; 04DA _ 66: 0F 72. D3, 05 + pxor xmm6, xmm5 ; 04DF _ 66: 0F EF. F5 + pslld xmm1, 27 ; 04E3 _ 66: 0F 72. F1, 1B + por xmm1, xmm3 ; 04E8 _ 66: 0F EB. CB + movdqa xmm3, xmm2 ; 04EC _ 66: 0F 6F. DA + pxor xmm1, xmm2 ; 04F0 _ 66: 0F EF. CA + pxor xmm1, xmm5 ; 04F4 _ 66: 0F EF. CD + pslld xmm3, 7 ; 04F8 _ 66: 0F 72. F3, 07 + pxor xmm6, xmm3 ; 04FD _ 66: 0F EF. F3 + movdqa xmm3, xmm5 ; 0501 _ 66: 0F 6F. DD + psrld xmm5, 7 ; 0505 _ 66: 0F 72. D5, 07 + pslld xmm3, 25 ; 050A _ 66: 0F 72. F3, 19 + por xmm3, xmm5 ; 050F _ 66: 0F EB. DD + movdqa xmm5, xmm2 ; 0513 _ 66: 0F 6F. EA + psrld xmm2, 1 ; 0517 _ 66: 0F 72. D2, 01 + pxor xmm3, xmm6 ; 051C _ 66: 0F EF. DE + pslld xmm5, 31 ; 0520 _ 66: 0F 72. F5, 1F + por xmm5, xmm2 ; 0525 _ 66: 0F EB. EA + movdqa xmm2, xmm1 ; 0529 _ 66: 0F 6F. D1 + pxor xmm5, xmm1 ; 052D _ 66: 0F EF. E9 + pxor xmm5, xmm6 ; 0531 _ 66: 0F EF. EE + pslld xmm2, 3 ; 0535 _ 66: 0F 72. F2, 03 + pxor xmm3, xmm2 ; 053A _ 66: 0F EF. DA + movdqa xmm2, xmm6 ; 053E _ 66: 0F 6F. D6 + psrld xmm6, 3 ; 0542 _ 66: 0F 72. D6, 03 + pslld xmm2, 29 ; 0547 _ 66: 0F 72. F2, 1D + por xmm2, xmm6 ; 054C _ 66: 0F EB. D6 + movdqa xmm6, xmm1 ; 0550 _ 66: 0F 6F. F1 + psrld xmm1, 13 ; 0554 _ 66: 0F 72. D1, 0D + pslld xmm6, 19 ; 0559 _ 66: 0F 72. F6, 13 + por xmm6, xmm1 ; 055E _ 66: 0F EB. F1 + movdqa xmm1, xmm2 ; 0562 _ 66: 0F 6F. CA + pand xmm1, xmm3 ; 0566 _ 66: 0F DB. CB + pxor xmm1, xmm5 ; 056A _ 66: 0F EF. CD + por xmm5, xmm3 ; 056E _ 66: 0F EB. EB + pand xmm5, xmm6 ; 0572 _ 66: 0F DB. EE + pxor xmm2, xmm1 ; 0576 _ 66: 0F EF. D1 + pxor xmm2, xmm5 ; 057A _ 66: 0F EF. D5 + pand xmm5, xmm1 ; 057E _ 66: 0F DB. E9 + pxor xmm6, xmm0 ; 0582 _ 66: 0F EF. F0 + pxor xmm3, xmm2 ; 0586 _ 66: 0F EF. DA + pxor xmm5, xmm3 ; 058A _ 66: 0F EF. EB + pand xmm3, xmm6 ; 058E _ 66: 0F DB. DE + pxor xmm3, xmm1 ; 0592 _ 66: 0F EF. D9 + pxor xmm6, xmm5 ; 0596 _ 66: 0F EF. F5 + pand xmm1, xmm6 ; 059A _ 66: 0F DB. CE + pxor xmm3, xmm6 ; 059E _ 66: 0F EF. DE + pxor xmm1, xmm2 ; 05A2 _ 66: 0F EF. CA + por xmm1, xmm3 ; 05A6 _ 66: 0F EB. CB + pxor xmm3, xmm6 ; 05AA _ 66: 0F EF. DE + pxor xmm1, xmm5 ; 05AE _ 66: 0F EF. CD + movd xmm5, dword [ecx+2C40H] ; 05B2 _ 66: 0F 6E. A9, 00002C40 + pshufd xmm5, xmm5, 0 ; 05BA _ 66: 0F 70. ED, 00 + pxor xmm6, xmm5 ; 05BF _ 66: 0F EF. F5 + movd xmm5, dword [ecx+2C44H] ; 05C3 _ 66: 0F 6E. A9, 00002C44 + pshufd xmm5, xmm5, 0 ; 05CB _ 66: 0F 70. ED, 00 + pxor xmm3, xmm5 ; 05D0 _ 66: 0F EF. DD + movd xmm5, dword [ecx+2C48H] ; 05D4 _ 66: 0F 6E. A9, 00002C48 + pshufd xmm5, xmm5, 0 ; 05DC _ 66: 0F 70. ED, 00 + pxor xmm1, xmm5 ; 05E1 _ 66: 0F EF. CD + movd xmm5, dword [ecx+2C4CH] ; 05E5 _ 66: 0F 6E. A9, 00002C4C + pshufd xmm5, xmm5, 0 ; 05ED _ 66: 0F 70. ED, 00 + pxor xmm2, xmm5 ; 05F2 _ 66: 0F EF. D5 + movdqa xmm5, xmm1 ; 05F6 _ 66: 0F 6F. E9 + psrld xmm1, 22 ; 05FA _ 66: 0F 72. D1, 16 + pslld xmm5, 10 ; 05FF _ 66: 0F 72. F5, 0A + por xmm5, xmm1 ; 0604 _ 66: 0F EB. E9 + movdqa xmm1, xmm6 ; 0608 _ 66: 0F 6F. CE + psrld xmm6, 5 ; 060C _ 66: 0F 72. D6, 05 + pxor xmm5, xmm2 ; 0611 _ 66: 0F EF. EA + pslld xmm1, 27 ; 0615 _ 66: 0F 72. F1, 1B + por xmm1, xmm6 ; 061A _ 66: 0F EB. CE + movdqa xmm6, xmm3 ; 061E _ 66: 0F 6F. F3 + pxor xmm1, xmm3 ; 0622 _ 66: 0F EF. CB + pxor xmm1, xmm2 ; 0626 _ 66: 0F EF. CA + pslld xmm6, 7 ; 062A _ 66: 0F 72. F6, 07 + pxor xmm5, xmm6 ; 062F _ 66: 0F EF. EE + movdqa xmm6, xmm2 ; 0633 _ 66: 0F 6F. F2 + psrld xmm2, 7 ; 0637 _ 66: 0F 72. D2, 07 + pslld xmm6, 25 ; 063C _ 66: 0F 72. F6, 19 + por xmm6, xmm2 ; 0641 _ 66: 0F EB. F2 + movdqa xmm2, xmm3 ; 0645 _ 66: 0F 6F. D3 + psrld xmm3, 1 ; 0649 _ 66: 0F 72. D3, 01 + pxor xmm6, xmm5 ; 064E _ 66: 0F EF. F5 + pslld xmm2, 31 ; 0652 _ 66: 0F 72. F2, 1F + por xmm2, xmm3 ; 0657 _ 66: 0F EB. D3 + movdqa xmm3, xmm1 ; 065B _ 66: 0F 6F. D9 + pxor xmm2, xmm1 ; 065F _ 66: 0F EF. D1 + pxor xmm2, xmm5 ; 0663 _ 66: 0F EF. D5 + pslld xmm3, 3 ; 0667 _ 66: 0F 72. F3, 03 + pxor xmm6, xmm3 ; 066C _ 66: 0F EF. F3 + movdqa xmm3, xmm5 ; 0670 _ 66: 0F 6F. DD + psrld xmm5, 3 ; 0674 _ 66: 0F 72. D5, 03 + pslld xmm3, 29 ; 0679 _ 66: 0F 72. F3, 1D + por xmm3, xmm5 ; 067E _ 66: 0F EB. DD + movdqa xmm5, xmm1 ; 0682 _ 66: 0F 6F. E9 + psrld xmm1, 13 ; 0686 _ 66: 0F 72. D1, 0D + pslld xmm5, 19 ; 068B _ 66: 0F 72. F5, 13 + por xmm5, xmm1 ; 0690 _ 66: 0F EB. E9 + movdqa xmm1, xmm3 ; 0694 _ 66: 0F 6F. CB + pxor xmm1, xmm2 ; 0698 _ 66: 0F EF. CA + pxor xmm5, xmm1 ; 069C _ 66: 0F EF. E9 + pand xmm3, xmm1 ; 06A0 _ 66: 0F DB. D9 + pxor xmm3, xmm5 ; 06A4 _ 66: 0F EF. DD + pand xmm5, xmm2 ; 06A8 _ 66: 0F DB. EA + pxor xmm2, xmm6 ; 06AC _ 66: 0F EF. D6 + por xmm6, xmm3 ; 06B0 _ 66: 0F EB. F3 + pxor xmm1, xmm6 ; 06B4 _ 66: 0F EF. CE + pxor xmm5, xmm6 ; 06B8 _ 66: 0F EF. EE + pxor xmm2, xmm3 ; 06BC _ 66: 0F EF. D3 + pand xmm6, xmm1 ; 06C0 _ 66: 0F DB. F1 + pxor xmm6, xmm2 ; 06C4 _ 66: 0F EF. F2 + pxor xmm2, xmm5 ; 06C8 _ 66: 0F EF. D5 + por xmm2, xmm1 ; 06CC _ 66: 0F EB. D1 + pxor xmm5, xmm6 ; 06D0 _ 66: 0F EF. EE + pxor xmm2, xmm3 ; 06D4 _ 66: 0F EF. D3 + pxor xmm5, xmm2 ; 06D8 _ 66: 0F EF. EA + movd xmm3, dword [ecx+2C30H] ; 06DC _ 66: 0F 6E. 99, 00002C30 + pshufd xmm3, xmm3, 0 ; 06E4 _ 66: 0F 70. DB, 00 + pxor xmm1, xmm3 ; 06E9 _ 66: 0F EF. CB + movd xmm3, dword [ecx+2C34H] ; 06ED _ 66: 0F 6E. 99, 00002C34 + pshufd xmm3, xmm3, 0 ; 06F5 _ 66: 0F 70. DB, 00 + pxor xmm2, xmm3 ; 06FA _ 66: 0F EF. D3 + movd xmm3, dword [ecx+2C38H] ; 06FE _ 66: 0F 6E. 99, 00002C38 + pshufd xmm3, xmm3, 0 ; 0706 _ 66: 0F 70. DB, 00 + pxor xmm6, xmm3 ; 070B _ 66: 0F EF. F3 + movd xmm3, dword [ecx+2C3CH] ; 070F _ 66: 0F 6E. 99, 00002C3C + pshufd xmm3, xmm3, 0 ; 0717 _ 66: 0F 70. DB, 00 + pxor xmm5, xmm3 ; 071C _ 66: 0F EF. EB + movdqa xmm3, xmm6 ; 0720 _ 66: 0F 6F. DE + psrld xmm6, 22 ; 0724 _ 66: 0F 72. D6, 16 + pslld xmm3, 10 ; 0729 _ 66: 0F 72. F3, 0A + por xmm3, xmm6 ; 072E _ 66: 0F EB. DE + movdqa xmm6, xmm1 ; 0732 _ 66: 0F 6F. F1 + psrld xmm1, 5 ; 0736 _ 66: 0F 72. D1, 05 + pxor xmm3, xmm5 ; 073B _ 66: 0F EF. DD + pslld xmm6, 27 ; 073F _ 66: 0F 72. F6, 1B + por xmm6, xmm1 ; 0744 _ 66: 0F EB. F1 + movdqa xmm1, xmm2 ; 0748 _ 66: 0F 6F. CA + pxor xmm6, xmm2 ; 074C _ 66: 0F EF. F2 + pxor xmm6, xmm5 ; 0750 _ 66: 0F EF. F5 + pslld xmm1, 7 ; 0754 _ 66: 0F 72. F1, 07 + pxor xmm3, xmm1 ; 0759 _ 66: 0F EF. D9 + movdqa xmm1, xmm5 ; 075D _ 66: 0F 6F. CD + psrld xmm5, 7 ; 0761 _ 66: 0F 72. D5, 07 + pslld xmm1, 25 ; 0766 _ 66: 0F 72. F1, 19 + por xmm1, xmm5 ; 076B _ 66: 0F EB. CD + movdqa xmm5, xmm2 ; 076F _ 66: 0F 6F. EA + psrld xmm2, 1 ; 0773 _ 66: 0F 72. D2, 01 + pxor xmm1, xmm3 ; 0778 _ 66: 0F EF. CB + pslld xmm5, 31 ; 077C _ 66: 0F 72. F5, 1F + por xmm5, xmm2 ; 0781 _ 66: 0F EB. EA + movdqa xmm2, xmm6 ; 0785 _ 66: 0F 6F. D6 + pxor xmm5, xmm6 ; 0789 _ 66: 0F EF. EE + pxor xmm5, xmm3 ; 078D _ 66: 0F EF. EB + pslld xmm2, 3 ; 0791 _ 66: 0F 72. F2, 03 + pxor xmm1, xmm2 ; 0796 _ 66: 0F EF. CA + movdqa xmm2, xmm3 ; 079A _ 66: 0F 6F. D3 + psrld xmm3, 3 ; 079E _ 66: 0F 72. D3, 03 + pslld xmm2, 29 ; 07A3 _ 66: 0F 72. F2, 1D + por xmm2, xmm3 ; 07A8 _ 66: 0F EB. D3 + movdqa xmm3, xmm6 ; 07AC _ 66: 0F 6F. DE + psrld xmm6, 13 ; 07B0 _ 66: 0F 72. D6, 0D + pxor xmm2, xmm1 ; 07B5 _ 66: 0F EF. D1 + pslld xmm3, 19 ; 07B9 _ 66: 0F 72. F3, 13 + por xmm3, xmm6 ; 07BE _ 66: 0F EB. DE + pxor xmm1, xmm3 ; 07C2 _ 66: 0F EF. CB + movdqa xmm6, xmm1 ; 07C6 _ 66: 0F 6F. F1 + pand xmm6, xmm2 ; 07CA _ 66: 0F DB. F2 + pxor xmm6, xmm5 ; 07CE _ 66: 0F EF. F5 + por xmm5, xmm2 ; 07D2 _ 66: 0F EB. EA + pxor xmm5, xmm1 ; 07D6 _ 66: 0F EF. E9 + pand xmm1, xmm6 ; 07DA _ 66: 0F DB. CE + pxor xmm2, xmm6 ; 07DE _ 66: 0F EF. D6 + pand xmm1, xmm3 ; 07E2 _ 66: 0F DB. CB + pxor xmm1, xmm2 ; 07E6 _ 66: 0F EF. CA + pand xmm2, xmm5 ; 07EA _ 66: 0F DB. D5 + por xmm2, xmm3 ; 07EE _ 66: 0F EB. D3 + pxor xmm6, xmm0 ; 07F2 _ 66: 0F EF. F0 + movdqa xmm0, xmm6 ; 07F6 _ 66: 0F 6F. C6 + pxor xmm2, xmm6 ; 07FA _ 66: 0F EF. D6 + pxor xmm3, xmm6 ; 07FE _ 66: 0F EF. DE + movd xmm6, dword [ecx+2C20H] ; 0802 _ 66: 0F 6E. B1, 00002C20 + pxor xmm0, xmm1 ; 080A _ 66: 0F EF. C1 + pand xmm3, xmm5 ; 080E _ 66: 0F DB. DD + pxor xmm0, xmm3 ; 0812 _ 66: 0F EF. C3 + pshufd xmm3, xmm6, 0 ; 0816 _ 66: 0F 70. DE, 00 + pxor xmm5, xmm3 ; 081B _ 66: 0F EF. EB + movd xmm6, dword [ecx+2C24H] ; 081F _ 66: 0F 6E. B1, 00002C24 + pshufd xmm3, xmm6, 0 ; 0827 _ 66: 0F 70. DE, 00 + movd xmm6, dword [ecx+2C28H] ; 082C _ 66: 0F 6E. B1, 00002C28 + pxor xmm1, xmm3 ; 0834 _ 66: 0F EF. CB + pshufd xmm3, xmm6, 0 ; 0838 _ 66: 0F 70. DE, 00 + pxor xmm2, xmm3 ; 083D _ 66: 0F EF. D3 + movd xmm3, dword [ecx+2C2CH] ; 0841 _ 66: 0F 6E. 99, 00002C2C + pshufd xmm6, xmm3, 0 ; 0849 _ 66: 0F 70. F3, 00 + movdqa xmm3, xmm2 ; 084E _ 66: 0F 6F. DA + pxor xmm0, xmm6 ; 0852 _ 66: 0F EF. C6 + movdqa xmm6, xmm5 ; 0856 _ 66: 0F 6F. F5 + pslld xmm3, 10 ; 085A _ 66: 0F 72. F3, 0A + psrld xmm2, 22 ; 085F _ 66: 0F 72. D2, 16 + por xmm3, xmm2 ; 0864 _ 66: 0F EB. DA + movdqa xmm2, xmm1 ; 0868 _ 66: 0F 6F. D1 + pslld xmm6, 27 ; 086C _ 66: 0F 72. F6, 1B + psrld xmm5, 5 ; 0871 _ 66: 0F 72. D5, 05 + por xmm6, xmm5 ; 0876 _ 66: 0F EB. F5 + movdqa xmm5, xmm0 ; 087A _ 66: 0F 6F. E8 + pxor xmm3, xmm0 ; 087E _ 66: 0F EF. D8 + pslld xmm2, 7 ; 0882 _ 66: 0F 72. F2, 07 + pxor xmm3, xmm2 ; 0887 _ 66: 0F EF. DA + movdqa xmm2, xmm1 ; 088B _ 66: 0F 6F. D1 + pxor xmm6, xmm1 ; 088F _ 66: 0F EF. F1 + pxor xmm6, xmm0 ; 0893 _ 66: 0F EF. F0 + pslld xmm5, 25 ; 0897 _ 66: 0F 72. F5, 19 + psrld xmm0, 7 ; 089C _ 66: 0F 72. D0, 07 + por xmm5, xmm0 ; 08A1 _ 66: 0F EB. E8 + movdqa xmm0, xmm6 ; 08A5 _ 66: 0F 6F. C6 + pslld xmm2, 31 ; 08A9 _ 66: 0F 72. F2, 1F + psrld xmm1, 1 ; 08AE _ 66: 0F 72. D1, 01 + por xmm2, xmm1 ; 08B3 _ 66: 0F EB. D1 + movdqa xmm1, xmm6 ; 08B7 _ 66: 0F 6F. CE + pxor xmm5, xmm3 ; 08BB _ 66: 0F EF. EB + pxor xmm2, xmm6 ; 08BF _ 66: 0F EF. D6 + pslld xmm1, 3 ; 08C3 _ 66: 0F 72. F1, 03 + pxor xmm5, xmm1 ; 08C8 _ 66: 0F EF. E9 + movdqa xmm1, xmm3 ; 08CC _ 66: 0F 6F. CB + pxor xmm2, xmm3 ; 08D0 _ 66: 0F EF. D3 + psrld xmm3, 3 ; 08D4 _ 66: 0F 72. D3, 03 + pslld xmm1, 29 ; 08D9 _ 66: 0F 72. F1, 1D + por xmm1, xmm3 ; 08DE _ 66: 0F EB. CB + movdqa xmm3, xmm2 ; 08E2 _ 66: 0F 6F. DA + pslld xmm0, 19 ; 08E6 _ 66: 0F 72. F0, 13 + psrld xmm6, 13 ; 08EB _ 66: 0F 72. D6, 0D + por xmm0, xmm6 ; 08F0 _ 66: 0F EB. C6 + pxor xmm3, xmm5 ; 08F4 _ 66: 0F EF. DD + pand xmm5, xmm3 ; 08F8 _ 66: 0F DB. EB + pxor xmm2, xmm1 ; 08FC _ 66: 0F EF. D1 + pxor xmm5, xmm0 ; 0900 _ 66: 0F EF. E8 + por xmm0, xmm3 ; 0904 _ 66: 0F EB. C3 + pxor xmm1, xmm5 ; 0908 _ 66: 0F EF. CD + pxor xmm0, xmm2 ; 090C _ 66: 0F EF. C2 + por xmm0, xmm1 ; 0910 _ 66: 0F EB. C1 + pxor xmm3, xmm5 ; 0914 _ 66: 0F EF. DD + pxor xmm0, xmm3 ; 0918 _ 66: 0F EF. C3 + por xmm3, xmm5 ; 091C _ 66: 0F EB. DD + pxor xmm3, xmm0 ; 0920 _ 66: 0F EF. D8 + pcmpeqd xmm6, xmm6 ; 0924 _ 66: 0F 76. F6 + pxor xmm2, xmm6 ; 0928 _ 66: 0F EF. D6 + pxor xmm2, xmm3 ; 092C _ 66: 0F EF. D3 + por xmm3, xmm0 ; 0930 _ 66: 0F EB. D8 + pxor xmm3, xmm0 ; 0934 _ 66: 0F EF. D8 + por xmm3, xmm2 ; 0938 _ 66: 0F EB. DA + pxor xmm5, xmm3 ; 093C _ 66: 0F EF. EB + movd xmm3, dword [ecx+2C10H] ; 0940 _ 66: 0F 6E. 99, 00002C10 + pshufd xmm3, xmm3, 0 ; 0948 _ 66: 0F 70. DB, 00 + pxor xmm2, xmm3 ; 094D _ 66: 0F EF. D3 + movd xmm3, dword [ecx+2C14H] ; 0951 _ 66: 0F 6E. 99, 00002C14 + pshufd xmm3, xmm3, 0 ; 0959 _ 66: 0F 70. DB, 00 + pxor xmm0, xmm3 ; 095E _ 66: 0F EF. C3 + movd xmm3, dword [ecx+2C18H] ; 0962 _ 66: 0F 6E. 99, 00002C18 + pshufd xmm3, xmm3, 0 ; 096A _ 66: 0F 70. DB, 00 + pxor xmm5, xmm3 ; 096F _ 66: 0F EF. EB + movd xmm3, dword [ecx+2C1CH] ; 0973 _ 66: 0F 6E. 99, 00002C1C + pshufd xmm3, xmm3, 0 ; 097B _ 66: 0F 70. DB, 00 + pxor xmm1, xmm3 ; 0980 _ 66: 0F EF. CB + movdqa xmm3, xmm5 ; 0984 _ 66: 0F 6F. DD + psrld xmm5, 22 ; 0988 _ 66: 0F 72. D5, 16 + pslld xmm3, 10 ; 098D _ 66: 0F 72. F3, 0A + por xmm3, xmm5 ; 0992 _ 66: 0F EB. DD + movdqa xmm5, xmm2 ; 0996 _ 66: 0F 6F. EA + psrld xmm2, 5 ; 099A _ 66: 0F 72. D2, 05 + pxor xmm3, xmm1 ; 099F _ 66: 0F EF. D9 + pslld xmm5, 27 ; 09A3 _ 66: 0F 72. F5, 1B + por xmm5, xmm2 ; 09A8 _ 66: 0F EB. EA + movdqa xmm2, xmm0 ; 09AC _ 66: 0F 6F. D0 + pxor xmm5, xmm0 ; 09B0 _ 66: 0F EF. E8 + pxor xmm5, xmm1 ; 09B4 _ 66: 0F EF. E9 + pslld xmm2, 7 ; 09B8 _ 66: 0F 72. F2, 07 + pxor xmm3, xmm2 ; 09BD _ 66: 0F EF. DA + movdqa xmm2, xmm1 ; 09C1 _ 66: 0F 6F. D1 + psrld xmm1, 7 ; 09C5 _ 66: 0F 72. D1, 07 + pslld xmm2, 25 ; 09CA _ 66: 0F 72. F2, 19 + por xmm2, xmm1 ; 09CF _ 66: 0F EB. D1 + movdqa xmm1, xmm0 ; 09D3 _ 66: 0F 6F. C8 + psrld xmm0, 1 ; 09D7 _ 66: 0F 72. D0, 01 + pxor xmm2, xmm3 ; 09DC _ 66: 0F EF. D3 + pslld xmm1, 31 ; 09E0 _ 66: 0F 72. F1, 1F + por xmm1, xmm0 ; 09E5 _ 66: 0F EB. C8 + movdqa xmm0, xmm5 ; 09E9 _ 66: 0F 6F. C5 + pxor xmm1, xmm5 ; 09ED _ 66: 0F EF. CD + pxor xmm1, xmm3 ; 09F1 _ 66: 0F EF. CB + pslld xmm0, 3 ; 09F5 _ 66: 0F 72. F0, 03 + pxor xmm2, xmm0 ; 09FA _ 66: 0F EF. D0 + movdqa xmm0, xmm3 ; 09FE _ 66: 0F 6F. C3 + psrld xmm3, 3 ; 0A02 _ 66: 0F 72. D3, 03 + pslld xmm0, 29 ; 0A07 _ 66: 0F 72. F0, 1D + por xmm0, xmm3 ; 0A0C _ 66: 0F EB. C3 + movdqa xmm3, xmm5 ; 0A10 _ 66: 0F 6F. DD + psrld xmm5, 13 ; 0A14 _ 66: 0F 72. D5, 0D + pxor xmm0, xmm6 ; 0A19 _ 66: 0F EF. C6 + pslld xmm3, 19 ; 0A1D _ 66: 0F 72. F3, 13 + por xmm3, xmm5 ; 0A22 _ 66: 0F EB. DD + movdqa xmm5, xmm1 ; 0A26 _ 66: 0F 6F. E9 + pxor xmm1, xmm6 ; 0A2A _ 66: 0F EF. CE + por xmm5, xmm3 ; 0A2E _ 66: 0F EB. EB + pxor xmm5, xmm0 ; 0A32 _ 66: 0F EF. E8 + por xmm0, xmm1 ; 0A36 _ 66: 0F EB. C1 + pxor xmm5, xmm2 ; 0A3A _ 66: 0F EF. EA + pxor xmm3, xmm1 ; 0A3E _ 66: 0F EF. D9 + pxor xmm0, xmm3 ; 0A42 _ 66: 0F EF. C3 + pand xmm3, xmm2 ; 0A46 _ 66: 0F DB. DA + pxor xmm1, xmm3 ; 0A4A _ 66: 0F EF. CB + por xmm3, xmm5 ; 0A4E _ 66: 0F EB. DD + pxor xmm3, xmm0 ; 0A52 _ 66: 0F EF. D8 + pxor xmm2, xmm1 ; 0A56 _ 66: 0F EF. D1 + pxor xmm0, xmm5 ; 0A5A _ 66: 0F EF. C5 + pxor xmm2, xmm3 ; 0A5E _ 66: 0F EF. D3 + pxor xmm2, xmm5 ; 0A62 _ 66: 0F EF. D5 + pand xmm0, xmm2 ; 0A66 _ 66: 0F DB. C2 + pxor xmm1, xmm0 ; 0A6A _ 66: 0F EF. C8 + movd xmm0, dword [ecx+2C00H] ; 0A6E _ 66: 0F 6E. 81, 00002C00 + pshufd xmm0, xmm0, 0 ; 0A76 _ 66: 0F 70. C0, 00 + pxor xmm3, xmm0 ; 0A7B _ 66: 0F EF. D8 + movd xmm0, dword [ecx+2C04H] ; 0A7F _ 66: 0F 6E. 81, 00002C04 + pshufd xmm0, xmm0, 0 ; 0A87 _ 66: 0F 70. C0, 00 + pxor xmm1, xmm0 ; 0A8C _ 66: 0F EF. C8 + movd xmm0, dword [ecx+2C08H] ; 0A90 _ 66: 0F 6E. 81, 00002C08 + pshufd xmm0, xmm0, 0 ; 0A98 _ 66: 0F 70. C0, 00 + pxor xmm5, xmm0 ; 0A9D _ 66: 0F EF. E8 + movd xmm0, dword [ecx+2C0CH] ; 0AA1 _ 66: 0F 6E. 81, 00002C0C + pshufd xmm0, xmm0, 0 ; 0AA9 _ 66: 0F 70. C0, 00 + pxor xmm2, xmm0 ; 0AAE _ 66: 0F EF. D0 + movdqa xmm0, xmm5 ; 0AB2 _ 66: 0F 6F. C5 + psrld xmm5, 22 ; 0AB6 _ 66: 0F 72. D5, 16 + pslld xmm0, 10 ; 0ABB _ 66: 0F 72. F0, 0A + por xmm0, xmm5 ; 0AC0 _ 66: 0F EB. C5 + movdqa xmm5, xmm3 ; 0AC4 _ 66: 0F 6F. EB + psrld xmm3, 5 ; 0AC8 _ 66: 0F 72. D3, 05 + pxor xmm0, xmm2 ; 0ACD _ 66: 0F EF. C2 + pslld xmm5, 27 ; 0AD1 _ 66: 0F 72. F5, 1B + por xmm5, xmm3 ; 0AD6 _ 66: 0F EB. EB + movdqa xmm3, xmm1 ; 0ADA _ 66: 0F 6F. D9 + pxor xmm5, xmm1 ; 0ADE _ 66: 0F EF. E9 + pxor xmm5, xmm2 ; 0AE2 _ 66: 0F EF. EA + pslld xmm3, 7 ; 0AE6 _ 66: 0F 72. F3, 07 + pxor xmm0, xmm3 ; 0AEB _ 66: 0F EF. C3 + movdqa xmm3, xmm2 ; 0AEF _ 66: 0F 6F. DA + psrld xmm2, 7 ; 0AF3 _ 66: 0F 72. D2, 07 + pslld xmm3, 25 ; 0AF8 _ 66: 0F 72. F3, 19 + por xmm3, xmm2 ; 0AFD _ 66: 0F EB. DA + movdqa xmm2, xmm1 ; 0B01 _ 66: 0F 6F. D1 + psrld xmm1, 1 ; 0B05 _ 66: 0F 72. D1, 01 + pxor xmm3, xmm0 ; 0B0A _ 66: 0F EF. D8 + pslld xmm2, 31 ; 0B0E _ 66: 0F 72. F2, 1F + por xmm2, xmm1 ; 0B13 _ 66: 0F EB. D1 + movdqa xmm1, xmm5 ; 0B17 _ 66: 0F 6F. CD + pxor xmm2, xmm5 ; 0B1B _ 66: 0F EF. D5 + pxor xmm2, xmm0 ; 0B1F _ 66: 0F EF. D0 + pslld xmm1, 3 ; 0B23 _ 66: 0F 72. F1, 03 + pxor xmm3, xmm1 ; 0B28 _ 66: 0F EF. D9 + movdqa xmm1, xmm0 ; 0B2C _ 66: 0F 6F. C8 + psrld xmm0, 3 ; 0B30 _ 66: 0F 72. D0, 03 + pslld xmm1, 29 ; 0B35 _ 66: 0F 72. F1, 1D + por xmm1, xmm0 ; 0B3A _ 66: 0F EB. C8 + movdqa xmm0, xmm5 ; 0B3E _ 66: 0F 6F. C5 + psrld xmm5, 13 ; 0B42 _ 66: 0F 72. D5, 0D + pslld xmm0, 19 ; 0B47 _ 66: 0F 72. F0, 13 + por xmm0, xmm5 ; 0B4C _ 66: 0F EB. C5 + movdqa xmm5, xmm1 ; 0B50 _ 66: 0F 6F. E9 + por xmm1, xmm3 ; 0B54 _ 66: 0F EB. CB + pxor xmm5, xmm0 ; 0B58 _ 66: 0F EF. E8 + pand xmm0, xmm3 ; 0B5C _ 66: 0F DB. C3 + pxor xmm5, xmm6 ; 0B60 _ 66: 0F EF. EE + pxor xmm3, xmm2 ; 0B64 _ 66: 0F EF. DA + por xmm2, xmm0 ; 0B68 _ 66: 0F EB. D0 + pxor xmm0, xmm5 ; 0B6C _ 66: 0F EF. C5 + pand xmm5, xmm1 ; 0B70 _ 66: 0F DB. E9 + pand xmm3, xmm1 ; 0B74 _ 66: 0F DB. D9 + pxor xmm2, xmm5 ; 0B78 _ 66: 0F EF. D5 + pxor xmm5, xmm0 ; 0B7C _ 66: 0F EF. E8 + por xmm0, xmm5 ; 0B80 _ 66: 0F EB. C5 + pxor xmm1, xmm2 ; 0B84 _ 66: 0F EF. CA + pxor xmm0, xmm3 ; 0B88 _ 66: 0F EF. C3 + pxor xmm3, xmm1 ; 0B8C _ 66: 0F EF. D9 + por xmm1, xmm0 ; 0B90 _ 66: 0F EB. C8 + pxor xmm3, xmm5 ; 0B94 _ 66: 0F EF. DD + pxor xmm1, xmm5 ; 0B98 _ 66: 0F EF. CD + movd xmm5, dword [ecx+2BF0H] ; 0B9C _ 66: 0F 6E. A9, 00002BF0 + pshufd xmm5, xmm5, 0 ; 0BA4 _ 66: 0F 70. ED, 00 + pxor xmm3, xmm5 ; 0BA9 _ 66: 0F EF. DD + movd xmm5, dword [ecx+2BF4H] ; 0BAD _ 66: 0F 6E. A9, 00002BF4 + pshufd xmm5, xmm5, 0 ; 0BB5 _ 66: 0F 70. ED, 00 + pxor xmm0, xmm5 ; 0BBA _ 66: 0F EF. C5 + movd xmm5, dword [ecx+2BF8H] ; 0BBE _ 66: 0F 6E. A9, 00002BF8 + pshufd xmm5, xmm5, 0 ; 0BC6 _ 66: 0F 70. ED, 00 + pxor xmm2, xmm5 ; 0BCB _ 66: 0F EF. D5 + movd xmm5, dword [ecx+2BFCH] ; 0BCF _ 66: 0F 6E. A9, 00002BFC + pshufd xmm5, xmm5, 0 ; 0BD7 _ 66: 0F 70. ED, 00 + pxor xmm1, xmm5 ; 0BDC _ 66: 0F EF. CD + movdqa xmm5, xmm2 ; 0BE0 _ 66: 0F 6F. EA + psrld xmm2, 22 ; 0BE4 _ 66: 0F 72. D2, 16 + pslld xmm5, 10 ; 0BE9 _ 66: 0F 72. F5, 0A + por xmm5, xmm2 ; 0BEE _ 66: 0F EB. EA + movdqa xmm2, xmm3 ; 0BF2 _ 66: 0F 6F. D3 + psrld xmm3, 5 ; 0BF6 _ 66: 0F 72. D3, 05 + pxor xmm5, xmm1 ; 0BFB _ 66: 0F EF. E9 + pslld xmm2, 27 ; 0BFF _ 66: 0F 72. F2, 1B + por xmm2, xmm3 ; 0C04 _ 66: 0F EB. D3 + movdqa xmm3, xmm0 ; 0C08 _ 66: 0F 6F. D8 + pxor xmm2, xmm0 ; 0C0C _ 66: 0F EF. D0 + pxor xmm2, xmm1 ; 0C10 _ 66: 0F EF. D1 + pslld xmm3, 7 ; 0C14 _ 66: 0F 72. F3, 07 + pxor xmm5, xmm3 ; 0C19 _ 66: 0F EF. EB + movdqa xmm3, xmm1 ; 0C1D _ 66: 0F 6F. D9 + psrld xmm1, 7 ; 0C21 _ 66: 0F 72. D1, 07 + pslld xmm3, 25 ; 0C26 _ 66: 0F 72. F3, 19 + por xmm3, xmm1 ; 0C2B _ 66: 0F EB. D9 + movdqa xmm1, xmm0 ; 0C2F _ 66: 0F 6F. C8 + psrld xmm0, 1 ; 0C33 _ 66: 0F 72. D0, 01 + pxor xmm3, xmm5 ; 0C38 _ 66: 0F EF. DD + pslld xmm1, 31 ; 0C3C _ 66: 0F 72. F1, 1F + por xmm1, xmm0 ; 0C41 _ 66: 0F EB. C8 + movdqa xmm0, xmm2 ; 0C45 _ 66: 0F 6F. C2 + pxor xmm1, xmm2 ; 0C49 _ 66: 0F EF. CA + pxor xmm1, xmm5 ; 0C4D _ 66: 0F EF. CD + pslld xmm0, 3 ; 0C51 _ 66: 0F 72. F0, 03 + pxor xmm3, xmm0 ; 0C56 _ 66: 0F EF. D8 + movdqa xmm0, xmm5 ; 0C5A _ 66: 0F 6F. C5 + psrld xmm5, 3 ; 0C5E _ 66: 0F 72. D5, 03 + pslld xmm0, 29 ; 0C63 _ 66: 0F 72. F0, 1D + por xmm0, xmm5 ; 0C68 _ 66: 0F EB. C5 + movdqa xmm5, xmm2 ; 0C6C _ 66: 0F 6F. EA + psrld xmm2, 13 ; 0C70 _ 66: 0F 72. D2, 0D + pslld xmm5, 19 ; 0C75 _ 66: 0F 72. F5, 13 + por xmm5, xmm2 ; 0C7A _ 66: 0F EB. EA + movdqa xmm2, xmm0 ; 0C7E _ 66: 0F 6F. D0 + pxor xmm5, xmm0 ; 0C82 _ 66: 0F EF. E8 + pxor xmm0, xmm3 ; 0C86 _ 66: 0F EF. C3 + pand xmm2, xmm5 ; 0C8A _ 66: 0F DB. D5 + pxor xmm2, xmm6 ; 0C8E _ 66: 0F EF. D6 + pxor xmm3, xmm1 ; 0C92 _ 66: 0F EF. D9 + pxor xmm2, xmm3 ; 0C96 _ 66: 0F EF. D3 + por xmm0, xmm5 ; 0C9A _ 66: 0F EB. C5 + pxor xmm5, xmm2 ; 0C9E _ 66: 0F EF. EA + pxor xmm3, xmm0 ; 0CA2 _ 66: 0F EF. D8 + pxor xmm0, xmm1 ; 0CA6 _ 66: 0F EF. C1 + pand xmm1, xmm3 ; 0CAA _ 66: 0F DB. CB + pxor xmm1, xmm5 ; 0CAE _ 66: 0F EF. CD + pxor xmm5, xmm3 ; 0CB2 _ 66: 0F EF. EB + por xmm5, xmm2 ; 0CB6 _ 66: 0F EB. EA + pxor xmm3, xmm1 ; 0CBA _ 66: 0F EF. D9 + pxor xmm0, xmm5 ; 0CBE _ 66: 0F EF. C5 + movd xmm5, dword [ecx+2BE0H] ; 0CC2 _ 66: 0F 6E. A9, 00002BE0 + pshufd xmm5, xmm5, 0 ; 0CCA _ 66: 0F 70. ED, 00 + pxor xmm1, xmm5 ; 0CCF _ 66: 0F EF. CD + movd xmm5, dword [ecx+2BE4H] ; 0CD3 _ 66: 0F 6E. A9, 00002BE4 + pshufd xmm5, xmm5, 0 ; 0CDB _ 66: 0F 70. ED, 00 + pxor xmm2, xmm5 ; 0CE0 _ 66: 0F EF. D5 + movd xmm5, dword [ecx+2BE8H] ; 0CE4 _ 66: 0F 6E. A9, 00002BE8 + pshufd xmm5, xmm5, 0 ; 0CEC _ 66: 0F 70. ED, 00 + pxor xmm0, xmm5 ; 0CF1 _ 66: 0F EF. C5 + movd xmm5, dword [ecx+2BECH] ; 0CF5 _ 66: 0F 6E. A9, 00002BEC + pshufd xmm5, xmm5, 0 ; 0CFD _ 66: 0F 70. ED, 00 + pxor xmm3, xmm5 ; 0D02 _ 66: 0F EF. DD + movdqa xmm5, xmm0 ; 0D06 _ 66: 0F 6F. E8 + psrld xmm0, 22 ; 0D0A _ 66: 0F 72. D0, 16 + pslld xmm5, 10 ; 0D0F _ 66: 0F 72. F5, 0A + por xmm5, xmm0 ; 0D14 _ 66: 0F EB. E8 + movdqa xmm0, xmm1 ; 0D18 _ 66: 0F 6F. C1 + psrld xmm1, 5 ; 0D1C _ 66: 0F 72. D1, 05 + pxor xmm5, xmm3 ; 0D21 _ 66: 0F EF. EB + pslld xmm0, 27 ; 0D25 _ 66: 0F 72. F0, 1B + por xmm0, xmm1 ; 0D2A _ 66: 0F EB. C1 + movdqa xmm1, xmm2 ; 0D2E _ 66: 0F 6F. CA + pxor xmm0, xmm2 ; 0D32 _ 66: 0F EF. C2 + pxor xmm0, xmm3 ; 0D36 _ 66: 0F EF. C3 + pslld xmm1, 7 ; 0D3A _ 66: 0F 72. F1, 07 + pxor xmm5, xmm1 ; 0D3F _ 66: 0F EF. E9 + movdqa xmm1, xmm3 ; 0D43 _ 66: 0F 6F. CB + psrld xmm3, 7 ; 0D47 _ 66: 0F 72. D3, 07 + pslld xmm1, 25 ; 0D4C _ 66: 0F 72. F1, 19 + por xmm1, xmm3 ; 0D51 _ 66: 0F EB. CB + movdqa xmm3, xmm2 ; 0D55 _ 66: 0F 6F. DA + psrld xmm2, 1 ; 0D59 _ 66: 0F 72. D2, 01 + pxor xmm1, xmm5 ; 0D5E _ 66: 0F EF. CD + pslld xmm3, 31 ; 0D62 _ 66: 0F 72. F3, 1F + por xmm3, xmm2 ; 0D67 _ 66: 0F EB. DA + movdqa xmm2, xmm0 ; 0D6B _ 66: 0F 6F. D0 + pxor xmm3, xmm0 ; 0D6F _ 66: 0F EF. D8 + pxor xmm3, xmm5 ; 0D73 _ 66: 0F EF. DD + pslld xmm2, 3 ; 0D77 _ 66: 0F 72. F2, 03 + pxor xmm1, xmm2 ; 0D7C _ 66: 0F EF. CA + movdqa xmm2, xmm5 ; 0D80 _ 66: 0F 6F. D5 + psrld xmm5, 3 ; 0D84 _ 66: 0F 72. D5, 03 + pxor xmm3, xmm6 ; 0D89 _ 66: 0F EF. DE + pslld xmm2, 29 ; 0D8D _ 66: 0F 72. F2, 1D + por xmm2, xmm5 ; 0D92 _ 66: 0F EB. D5 + movdqa xmm5, xmm0 ; 0D96 _ 66: 0F 6F. E8 + psrld xmm0, 13 ; 0D9A _ 66: 0F 72. D0, 0D + pxor xmm2, xmm3 ; 0D9F _ 66: 0F EF. D3 + pslld xmm5, 19 ; 0DA3 _ 66: 0F 72. F5, 13 + por xmm5, xmm0 ; 0DA8 _ 66: 0F EB. E8 + movdqa xmm0, xmm1 ; 0DAC _ 66: 0F 6F. C1 + por xmm0, xmm5 ; 0DB0 _ 66: 0F EB. C5 + pxor xmm0, xmm2 ; 0DB4 _ 66: 0F EF. C2 + por xmm2, xmm3 ; 0DB8 _ 66: 0F EB. D3 + pand xmm2, xmm5 ; 0DBC _ 66: 0F DB. D5 + pxor xmm1, xmm0 ; 0DC0 _ 66: 0F EF. C8 + pxor xmm2, xmm1 ; 0DC4 _ 66: 0F EF. D1 + por xmm1, xmm5 ; 0DC8 _ 66: 0F EB. CD + pxor xmm1, xmm3 ; 0DCC _ 66: 0F EF. CB + pand xmm3, xmm2 ; 0DD0 _ 66: 0F DB. DA + pxor xmm3, xmm0 ; 0DD4 _ 66: 0F EF. D8 + pxor xmm1, xmm2 ; 0DD8 _ 66: 0F EF. CA + pand xmm0, xmm1 ; 0DDC _ 66: 0F DB. C1 + pxor xmm1, xmm3 ; 0DE0 _ 66: 0F EF. CB + pxor xmm0, xmm1 ; 0DE4 _ 66: 0F EF. C1 + pxor xmm1, xmm6 ; 0DE8 _ 66: 0F EF. CE + pxor xmm0, xmm5 ; 0DEC _ 66: 0F EF. C5 + movd xmm5, dword [ecx+2BD0H] ; 0DF0 _ 66: 0F 6E. A9, 00002BD0 + pshufd xmm5, xmm5, 0 ; 0DF8 _ 66: 0F 70. ED, 00 + pxor xmm3, xmm5 ; 0DFD _ 66: 0F EF. DD + movd xmm5, dword [ecx+2BD4H] ; 0E01 _ 66: 0F 6E. A9, 00002BD4 + pshufd xmm5, xmm5, 0 ; 0E09 _ 66: 0F 70. ED, 00 + pxor xmm1, xmm5 ; 0E0E _ 66: 0F EF. CD + movd xmm5, dword [ecx+2BD8H] ; 0E12 _ 66: 0F 6E. A9, 00002BD8 + pshufd xmm5, xmm5, 0 ; 0E1A _ 66: 0F 70. ED, 00 + pxor xmm0, xmm5 ; 0E1F _ 66: 0F EF. C5 + movd xmm5, dword [ecx+2BDCH] ; 0E23 _ 66: 0F 6E. A9, 00002BDC + pshufd xmm5, xmm5, 0 ; 0E2B _ 66: 0F 70. ED, 00 + pxor xmm2, xmm5 ; 0E30 _ 66: 0F EF. D5 + movdqa xmm5, xmm0 ; 0E34 _ 66: 0F 6F. E8 + psrld xmm0, 22 ; 0E38 _ 66: 0F 72. D0, 16 + pslld xmm5, 10 ; 0E3D _ 66: 0F 72. F5, 0A + por xmm5, xmm0 ; 0E42 _ 66: 0F EB. E8 + movdqa xmm0, xmm3 ; 0E46 _ 66: 0F 6F. C3 + psrld xmm3, 5 ; 0E4A _ 66: 0F 72. D3, 05 + pxor xmm5, xmm2 ; 0E4F _ 66: 0F EF. EA + pslld xmm0, 27 ; 0E53 _ 66: 0F 72. F0, 1B + por xmm0, xmm3 ; 0E58 _ 66: 0F EB. C3 + movdqa xmm3, xmm1 ; 0E5C _ 66: 0F 6F. D9 + pxor xmm0, xmm1 ; 0E60 _ 66: 0F EF. C1 + pxor xmm0, xmm2 ; 0E64 _ 66: 0F EF. C2 + pslld xmm3, 7 ; 0E68 _ 66: 0F 72. F3, 07 + pxor xmm5, xmm3 ; 0E6D _ 66: 0F EF. EB + movdqa xmm3, xmm2 ; 0E71 _ 66: 0F 6F. DA + psrld xmm2, 7 ; 0E75 _ 66: 0F 72. D2, 07 + pslld xmm3, 25 ; 0E7A _ 66: 0F 72. F3, 19 + por xmm3, xmm2 ; 0E7F _ 66: 0F EB. DA + movdqa xmm2, xmm1 ; 0E83 _ 66: 0F 6F. D1 + psrld xmm1, 1 ; 0E87 _ 66: 0F 72. D1, 01 + pxor xmm3, xmm5 ; 0E8C _ 66: 0F EF. DD + pslld xmm2, 31 ; 0E90 _ 66: 0F 72. F2, 1F + por xmm2, xmm1 ; 0E95 _ 66: 0F EB. D1 + movdqa xmm1, xmm0 ; 0E99 _ 66: 0F 6F. C8 + pxor xmm2, xmm0 ; 0E9D _ 66: 0F EF. D0 + pxor xmm2, xmm5 ; 0EA1 _ 66: 0F EF. D5 + pslld xmm1, 3 ; 0EA5 _ 66: 0F 72. F1, 03 + pxor xmm3, xmm1 ; 0EAA _ 66: 0F EF. D9 + movdqa xmm1, xmm5 ; 0EAE _ 66: 0F 6F. CD + psrld xmm5, 3 ; 0EB2 _ 66: 0F 72. D5, 03 + pslld xmm1, 29 ; 0EB7 _ 66: 0F 72. F1, 1D + por xmm1, xmm5 ; 0EBC _ 66: 0F EB. CD + movdqa xmm5, xmm0 ; 0EC0 _ 66: 0F 6F. E8 + psrld xmm0, 13 ; 0EC4 _ 66: 0F 72. D0, 0D + pslld xmm5, 19 ; 0EC9 _ 66: 0F 72. F5, 13 + por xmm5, xmm0 ; 0ECE _ 66: 0F EB. E8 + movdqa xmm0, xmm1 ; 0ED2 _ 66: 0F 6F. C1 + pand xmm0, xmm3 ; 0ED6 _ 66: 0F DB. C3 + pxor xmm0, xmm2 ; 0EDA _ 66: 0F EF. C2 + por xmm2, xmm3 ; 0EDE _ 66: 0F EB. D3 + pand xmm2, xmm5 ; 0EE2 _ 66: 0F DB. D5 + pxor xmm1, xmm0 ; 0EE6 _ 66: 0F EF. C8 + pxor xmm1, xmm2 ; 0EEA _ 66: 0F EF. CA + pand xmm2, xmm0 ; 0EEE _ 66: 0F DB. D0 + pxor xmm5, xmm6 ; 0EF2 _ 66: 0F EF. EE + pxor xmm3, xmm1 ; 0EF6 _ 66: 0F EF. D9 + pxor xmm2, xmm3 ; 0EFA _ 66: 0F EF. D3 + pand xmm3, xmm5 ; 0EFE _ 66: 0F DB. DD + pxor xmm3, xmm0 ; 0F02 _ 66: 0F EF. D8 + movd xmm6, dword [ecx+2BC0H] ; 0F06 _ 66: 0F 6E. B1, 00002BC0 + pxor xmm5, xmm2 ; 0F0E _ 66: 0F EF. EA + pand xmm0, xmm5 ; 0F12 _ 66: 0F DB. C5 + pxor xmm3, xmm5 ; 0F16 _ 66: 0F EF. DD + pxor xmm0, xmm1 ; 0F1A _ 66: 0F EF. C1 + por xmm0, xmm3 ; 0F1E _ 66: 0F EB. C3 + pxor xmm3, xmm5 ; 0F22 _ 66: 0F EF. DD + pxor xmm0, xmm2 ; 0F26 _ 66: 0F EF. C2 + pshufd xmm6, xmm6, 0 ; 0F2A _ 66: 0F 70. F6, 00 + pxor xmm5, xmm6 ; 0F2F _ 66: 0F EF. EE + movd xmm2, dword [ecx+2BC4H] ; 0F33 _ 66: 0F 6E. 91, 00002BC4 + pshufd xmm6, xmm2, 0 ; 0F3B _ 66: 0F 70. F2, 00 + pxor xmm3, xmm6 ; 0F40 _ 66: 0F EF. DE + movd xmm2, dword [ecx+2BC8H] ; 0F44 _ 66: 0F 6E. 91, 00002BC8 + pshufd xmm6, xmm2, 0 ; 0F4C _ 66: 0F 70. F2, 00 + movd xmm2, dword [ecx+2BCCH] ; 0F51 _ 66: 0F 6E. 91, 00002BCC + pxor xmm0, xmm6 ; 0F59 _ 66: 0F EF. C6 + pshufd xmm6, xmm2, 0 ; 0F5D _ 66: 0F 70. F2, 00 + pxor xmm1, xmm6 ; 0F62 _ 66: 0F EF. CE + movdqa xmm6, xmm0 ; 0F66 _ 66: 0F 6F. F0 + movdqa xmm2, xmm1 ; 0F6A _ 66: 0F 6F. D1 + psrld xmm0, 22 ; 0F6E _ 66: 0F 72. D0, 16 + pslld xmm6, 10 ; 0F73 _ 66: 0F 72. F6, 0A + por xmm6, xmm0 ; 0F78 _ 66: 0F EB. F0 + movdqa xmm0, xmm5 ; 0F7C _ 66: 0F 6F. C5 + psrld xmm5, 5 ; 0F80 _ 66: 0F 72. D5, 05 + pxor xmm6, xmm1 ; 0F85 _ 66: 0F EF. F1 + pslld xmm0, 27 ; 0F89 _ 66: 0F 72. F0, 1B + por xmm0, xmm5 ; 0F8E _ 66: 0F EB. C5 + movdqa xmm5, xmm3 ; 0F92 _ 66: 0F 6F. EB + pxor xmm0, xmm3 ; 0F96 _ 66: 0F EF. C3 + pxor xmm0, xmm1 ; 0F9A _ 66: 0F EF. C1 + pslld xmm5, 7 ; 0F9E _ 66: 0F 72. F5, 07 + pxor xmm6, xmm5 ; 0FA3 _ 66: 0F EF. F5 + movdqa xmm5, xmm6 ; 0FA7 _ 66: 0F 6F. EE + pslld xmm2, 25 ; 0FAB _ 66: 0F 72. F2, 19 + psrld xmm1, 7 ; 0FB0 _ 66: 0F 72. D1, 07 + por xmm2, xmm1 ; 0FB5 _ 66: 0F EB. D1 + movdqa xmm1, xmm3 ; 0FB9 _ 66: 0F 6F. CB + psrld xmm3, 1 ; 0FBD _ 66: 0F 72. D3, 01 + pxor xmm2, xmm6 ; 0FC2 _ 66: 0F EF. D6 + pslld xmm1, 31 ; 0FC6 _ 66: 0F 72. F1, 1F + por xmm1, xmm3 ; 0FCB _ 66: 0F EB. CB + movdqa xmm3, xmm0 ; 0FCF _ 66: 0F 6F. D8 + pxor xmm1, xmm0 ; 0FD3 _ 66: 0F EF. C8 + pxor xmm1, xmm6 ; 0FD7 _ 66: 0F EF. CE + pslld xmm3, 3 ; 0FDB _ 66: 0F 72. F3, 03 + pxor xmm2, xmm3 ; 0FE0 _ 66: 0F EF. D3 + movdqa xmm3, xmm0 ; 0FE4 _ 66: 0F 6F. D8 + pslld xmm5, 29 ; 0FE8 _ 66: 0F 72. F5, 1D + psrld xmm6, 3 ; 0FED _ 66: 0F 72. D6, 03 + por xmm5, xmm6 ; 0FF2 _ 66: 0F EB. EE + pslld xmm3, 19 ; 0FF6 _ 66: 0F 72. F3, 13 + movd xmm6, dword [ecx+2BB0H] ; 0FFB _ 66: 0F 6E. B1, 00002BB0 + movdqa oword [esp+60H], xmm7 ; 1003 _ 66: 0F 7F. 7C 24, 60 + movdqa xmm7, xmm5 ; 1009 _ 66: 0F 6F. FD + psrld xmm0, 13 ; 100D _ 66: 0F 72. D0, 0D + por xmm3, xmm0 ; 1012 _ 66: 0F EB. D8 + pxor xmm7, xmm1 ; 1016 _ 66: 0F EF. F9 + pxor xmm3, xmm7 ; 101A _ 66: 0F EF. DF + pand xmm5, xmm7 ; 101E _ 66: 0F DB. EF + pxor xmm5, xmm3 ; 1022 _ 66: 0F EF. EB + pand xmm3, xmm1 ; 1026 _ 66: 0F DB. D9 + pxor xmm1, xmm2 ; 102A _ 66: 0F EF. CA + por xmm2, xmm5 ; 102E _ 66: 0F EB. D5 + pxor xmm7, xmm2 ; 1032 _ 66: 0F EF. FA + pxor xmm3, xmm2 ; 1036 _ 66: 0F EF. DA + pxor xmm1, xmm5 ; 103A _ 66: 0F EF. CD + pand xmm2, xmm7 ; 103E _ 66: 0F DB. D7 + pxor xmm2, xmm1 ; 1042 _ 66: 0F EF. D1 + pxor xmm1, xmm3 ; 1046 _ 66: 0F EF. CB + por xmm1, xmm7 ; 104A _ 66: 0F EB. CF + pxor xmm3, xmm2 ; 104E _ 66: 0F EF. DA + pxor xmm1, xmm5 ; 1052 _ 66: 0F EF. CD + pxor xmm3, xmm1 ; 1056 _ 66: 0F EF. D9 + pshufd xmm0, xmm6, 0 ; 105A _ 66: 0F 70. C6, 00 + movd xmm6, dword [ecx+2BB4H] ; 105F _ 66: 0F 6E. B1, 00002BB4 + pxor xmm7, xmm0 ; 1067 _ 66: 0F EF. F8 + pshufd xmm5, xmm6, 0 ; 106B _ 66: 0F 70. EE, 00 + pxor xmm1, xmm5 ; 1070 _ 66: 0F EF. CD + movd xmm0, dword [ecx+2BB8H] ; 1074 _ 66: 0F 6E. 81, 00002BB8 + pshufd xmm6, xmm0, 0 ; 107C _ 66: 0F 70. F0, 00 + pxor xmm2, xmm6 ; 1081 _ 66: 0F EF. D6 + movdqa xmm6, xmm2 ; 1085 _ 66: 0F 6F. F2 + movd xmm5, dword [ecx+2BBCH] ; 1089 _ 66: 0F 6E. A9, 00002BBC + pshufd xmm0, xmm5, 0 ; 1091 _ 66: 0F 70. C5, 00 + pxor xmm3, xmm0 ; 1096 _ 66: 0F EF. D8 + movdqa xmm0, xmm7 ; 109A _ 66: 0F 6F. C7 + movdqa xmm5, xmm3 ; 109E _ 66: 0F 6F. EB + pslld xmm6, 10 ; 10A2 _ 66: 0F 72. F6, 0A + psrld xmm2, 22 ; 10A7 _ 66: 0F 72. D2, 16 + por xmm6, xmm2 ; 10AC _ 66: 0F EB. F2 + movdqa xmm2, xmm1 ; 10B0 _ 66: 0F 6F. D1 + pslld xmm0, 27 ; 10B4 _ 66: 0F 72. F0, 1B + psrld xmm7, 5 ; 10B9 _ 66: 0F 72. D7, 05 + por xmm0, xmm7 ; 10BE _ 66: 0F EB. C7 + pxor xmm6, xmm3 ; 10C2 _ 66: 0F EF. F3 + pslld xmm2, 7 ; 10C6 _ 66: 0F 72. F2, 07 + pxor xmm6, xmm2 ; 10CB _ 66: 0F EF. F2 + pxor xmm0, xmm1 ; 10CF _ 66: 0F EF. C1 + pxor xmm0, xmm3 ; 10D3 _ 66: 0F EF. C3 + movdqa xmm2, xmm0 ; 10D7 _ 66: 0F 6F. D0 + pslld xmm5, 25 ; 10DB _ 66: 0F 72. F5, 19 + psrld xmm3, 7 ; 10E0 _ 66: 0F 72. D3, 07 + por xmm5, xmm3 ; 10E5 _ 66: 0F EB. EB + movdqa xmm3, xmm1 ; 10E9 _ 66: 0F 6F. D9 + psrld xmm1, 1 ; 10ED _ 66: 0F 72. D1, 01 + pxor xmm5, xmm6 ; 10F2 _ 66: 0F EF. EE + pslld xmm3, 31 ; 10F6 _ 66: 0F 72. F3, 1F + por xmm3, xmm1 ; 10FB _ 66: 0F EB. D9 + movdqa xmm1, xmm0 ; 10FF _ 66: 0F 6F. C8 + pxor xmm3, xmm0 ; 1103 _ 66: 0F EF. D8 + pxor xmm3, xmm6 ; 1107 _ 66: 0F EF. DE + pslld xmm1, 3 ; 110B _ 66: 0F 72. F1, 03 + pxor xmm5, xmm1 ; 1110 _ 66: 0F EF. E9 + movdqa xmm1, xmm6 ; 1114 _ 66: 0F 6F. CE + psrld xmm6, 3 ; 1118 _ 66: 0F 72. D6, 03 + pslld xmm2, 19 ; 111D _ 66: 0F 72. F2, 13 + pslld xmm1, 29 ; 1122 _ 66: 0F 72. F1, 1D + por xmm1, xmm6 ; 1127 _ 66: 0F EB. CE + psrld xmm0, 13 ; 112B _ 66: 0F 72. D0, 0D + por xmm2, xmm0 ; 1130 _ 66: 0F EB. D0 + pxor xmm1, xmm5 ; 1134 _ 66: 0F EF. CD + pxor xmm5, xmm2 ; 1138 _ 66: 0F EF. EA + movdqa xmm7, xmm5 ; 113C _ 66: 0F 6F. FD + pand xmm7, xmm1 ; 1140 _ 66: 0F DB. F9 + pxor xmm7, xmm3 ; 1144 _ 66: 0F EF. FB + por xmm3, xmm1 ; 1148 _ 66: 0F EB. D9 + pxor xmm3, xmm5 ; 114C _ 66: 0F EF. DD + pand xmm5, xmm7 ; 1150 _ 66: 0F DB. EF + pxor xmm1, xmm7 ; 1154 _ 66: 0F EF. CF + pand xmm5, xmm2 ; 1158 _ 66: 0F DB. EA + pxor xmm5, xmm1 ; 115C _ 66: 0F EF. E9 + pand xmm1, xmm3 ; 1160 _ 66: 0F DB. CB + por xmm1, xmm2 ; 1164 _ 66: 0F EB. CA + pcmpeqd xmm6, xmm6 ; 1168 _ 66: 0F 76. F6 + pxor xmm7, xmm6 ; 116C _ 66: 0F EF. FE + movdqa xmm0, xmm7 ; 1170 _ 66: 0F 6F. C7 + pxor xmm1, xmm7 ; 1174 _ 66: 0F EF. CF + pxor xmm2, xmm7 ; 1178 _ 66: 0F EF. D7 + pxor xmm0, xmm5 ; 117C _ 66: 0F EF. C5 + pand xmm2, xmm3 ; 1180 _ 66: 0F DB. D3 + pxor xmm0, xmm2 ; 1184 _ 66: 0F EF. C2 + movd xmm2, dword [ecx+2BA0H] ; 1188 _ 66: 0F 6E. 91, 00002BA0 + pshufd xmm7, xmm2, 0 ; 1190 _ 66: 0F 70. FA, 00 + pxor xmm3, xmm7 ; 1195 _ 66: 0F EF. DF + movd xmm2, dword [ecx+2BA4H] ; 1199 _ 66: 0F 6E. 91, 00002BA4 + pshufd xmm7, xmm2, 0 ; 11A1 _ 66: 0F 70. FA, 00 + pxor xmm5, xmm7 ; 11A6 _ 66: 0F EF. EF + movd xmm2, dword [ecx+2BA8H] ; 11AA _ 66: 0F 6E. 91, 00002BA8 + pshufd xmm7, xmm2, 0 ; 11B2 _ 66: 0F 70. FA, 00 + pxor xmm1, xmm7 ; 11B7 _ 66: 0F EF. CF + movd xmm2, dword [ecx+2BACH] ; 11BB _ 66: 0F 6E. 91, 00002BAC + pshufd xmm7, xmm2, 0 ; 11C3 _ 66: 0F 70. FA, 00 + movdqa xmm2, xmm1 ; 11C8 _ 66: 0F 6F. D1 + pxor xmm0, xmm7 ; 11CC _ 66: 0F EF. C7 + psrld xmm1, 22 ; 11D0 _ 66: 0F 72. D1, 16 + pslld xmm2, 10 ; 11D5 _ 66: 0F 72. F2, 0A + por xmm2, xmm1 ; 11DA _ 66: 0F EB. D1 + movdqa xmm1, xmm3 ; 11DE _ 66: 0F 6F. CB + psrld xmm3, 5 ; 11E2 _ 66: 0F 72. D3, 05 + pxor xmm2, xmm0 ; 11E7 _ 66: 0F EF. D0 + pslld xmm1, 27 ; 11EB _ 66: 0F 72. F1, 1B + por xmm1, xmm3 ; 11F0 _ 66: 0F EB. CB + movdqa xmm3, xmm5 ; 11F4 _ 66: 0F 6F. DD + pxor xmm1, xmm5 ; 11F8 _ 66: 0F EF. CD + pxor xmm1, xmm0 ; 11FC _ 66: 0F EF. C8 + pslld xmm3, 7 ; 1200 _ 66: 0F 72. F3, 07 + pxor xmm2, xmm3 ; 1205 _ 66: 0F EF. D3 + movdqa xmm3, xmm0 ; 1209 _ 66: 0F 6F. D8 + movdqa xmm7, xmm2 ; 120D _ 66: 0F 6F. FA + psrld xmm0, 7 ; 1211 _ 66: 0F 72. D0, 07 + pslld xmm3, 25 ; 1216 _ 66: 0F 72. F3, 19 + por xmm3, xmm0 ; 121B _ 66: 0F EB. D8 + movdqa xmm0, xmm5 ; 121F _ 66: 0F 6F. C5 + psrld xmm5, 1 ; 1223 _ 66: 0F 72. D5, 01 + pxor xmm3, xmm2 ; 1228 _ 66: 0F EF. DA + pslld xmm0, 31 ; 122C _ 66: 0F 72. F0, 1F + por xmm0, xmm5 ; 1231 _ 66: 0F EB. C5 + movdqa xmm5, xmm1 ; 1235 _ 66: 0F 6F. E9 + pxor xmm0, xmm1 ; 1239 _ 66: 0F EF. C1 + pxor xmm0, xmm2 ; 123D _ 66: 0F EF. C2 + pslld xmm5, 3 ; 1241 _ 66: 0F 72. F5, 03 + pxor xmm3, xmm5 ; 1246 _ 66: 0F EF. DD + movdqa xmm5, xmm1 ; 124A _ 66: 0F 6F. E9 + pslld xmm7, 29 ; 124E _ 66: 0F 72. F7, 1D + psrld xmm2, 3 ; 1253 _ 66: 0F 72. D2, 03 + por xmm7, xmm2 ; 1258 _ 66: 0F EB. FA + pslld xmm5, 19 ; 125C _ 66: 0F 72. F5, 13 + psrld xmm1, 13 ; 1261 _ 66: 0F 72. D1, 0D + por xmm5, xmm1 ; 1266 _ 66: 0F EB. E9 + movdqa xmm1, xmm0 ; 126A _ 66: 0F 6F. C8 + pxor xmm0, xmm7 ; 126E _ 66: 0F EF. C7 + pxor xmm1, xmm3 ; 1272 _ 66: 0F EF. CB + pand xmm3, xmm1 ; 1276 _ 66: 0F DB. D9 + pxor xmm3, xmm5 ; 127A _ 66: 0F EF. DD + por xmm5, xmm1 ; 127E _ 66: 0F EB. E9 + pxor xmm7, xmm3 ; 1282 _ 66: 0F EF. FB + pxor xmm5, xmm0 ; 1286 _ 66: 0F EF. E8 + por xmm5, xmm7 ; 128A _ 66: 0F EB. EF + pxor xmm1, xmm3 ; 128E _ 66: 0F EF. CB + pxor xmm5, xmm1 ; 1292 _ 66: 0F EF. E9 + por xmm1, xmm3 ; 1296 _ 66: 0F EB. CB + pxor xmm1, xmm5 ; 129A _ 66: 0F EF. CD + pxor xmm0, xmm6 ; 129E _ 66: 0F EF. C6 + pxor xmm0, xmm1 ; 12A2 _ 66: 0F EF. C1 + por xmm1, xmm5 ; 12A6 _ 66: 0F EB. CD + pxor xmm1, xmm5 ; 12AA _ 66: 0F EF. CD + por xmm1, xmm0 ; 12AE _ 66: 0F EB. C8 + pxor xmm3, xmm1 ; 12B2 _ 66: 0F EF. D9 + movd xmm1, dword [ecx+2B90H] ; 12B6 _ 66: 0F 6E. 89, 00002B90 + pshufd xmm2, xmm1, 0 ; 12BE _ 66: 0F 70. D1, 00 + pxor xmm0, xmm2 ; 12C3 _ 66: 0F EF. C2 + movd xmm1, dword [ecx+2B94H] ; 12C7 _ 66: 0F 6E. 89, 00002B94 + pshufd xmm2, xmm1, 0 ; 12CF _ 66: 0F 70. D1, 00 + pxor xmm5, xmm2 ; 12D4 _ 66: 0F EF. EA + movd xmm1, dword [ecx+2B98H] ; 12D8 _ 66: 0F 6E. 89, 00002B98 + pshufd xmm2, xmm1, 0 ; 12E0 _ 66: 0F 70. D1, 00 + movd xmm1, dword [ecx+2B9CH] ; 12E5 _ 66: 0F 6E. 89, 00002B9C + pxor xmm3, xmm2 ; 12ED _ 66: 0F EF. DA + pshufd xmm2, xmm1, 0 ; 12F1 _ 66: 0F 70. D1, 00 + movdqa xmm1, xmm3 ; 12F6 _ 66: 0F 6F. CB + pxor xmm7, xmm2 ; 12FA _ 66: 0F EF. FA + movdqa xmm2, xmm5 ; 12FE _ 66: 0F 6F. D5 + pslld xmm1, 10 ; 1302 _ 66: 0F 72. F1, 0A + psrld xmm3, 22 ; 1307 _ 66: 0F 72. D3, 16 + por xmm1, xmm3 ; 130C _ 66: 0F EB. CB + movdqa xmm3, xmm0 ; 1310 _ 66: 0F 6F. D8 + psrld xmm0, 5 ; 1314 _ 66: 0F 72. D0, 05 + pxor xmm1, xmm7 ; 1319 _ 66: 0F EF. CF + pslld xmm3, 27 ; 131D _ 66: 0F 72. F3, 1B + por xmm3, xmm0 ; 1322 _ 66: 0F EB. D8 + movdqa xmm0, xmm5 ; 1326 _ 66: 0F 6F. C5 + pxor xmm3, xmm5 ; 132A _ 66: 0F EF. DD + pxor xmm3, xmm7 ; 132E _ 66: 0F EF. DF + pslld xmm0, 7 ; 1332 _ 66: 0F 72. F0, 07 + pxor xmm1, xmm0 ; 1337 _ 66: 0F EF. C8 + movdqa xmm0, xmm7 ; 133B _ 66: 0F 6F. C7 + psrld xmm7, 7 ; 133F _ 66: 0F 72. D7, 07 + pslld xmm2, 31 ; 1344 _ 66: 0F 72. F2, 1F + pslld xmm0, 25 ; 1349 _ 66: 0F 72. F0, 19 + por xmm0, xmm7 ; 134E _ 66: 0F EB. C7 + movdqa xmm7, xmm3 ; 1352 _ 66: 0F 6F. FB + psrld xmm5, 1 ; 1356 _ 66: 0F 72. D5, 01 + por xmm2, xmm5 ; 135B _ 66: 0F EB. D5 + movdqa xmm5, xmm1 ; 135F _ 66: 0F 6F. E9 + pxor xmm0, xmm1 ; 1363 _ 66: 0F EF. C1 + pslld xmm7, 3 ; 1367 _ 66: 0F 72. F7, 03 + pxor xmm0, xmm7 ; 136C _ 66: 0F EF. C7 + movdqa xmm7, xmm3 ; 1370 _ 66: 0F 6F. FB + pxor xmm2, xmm3 ; 1374 _ 66: 0F EF. D3 + pxor xmm2, xmm1 ; 1378 _ 66: 0F EF. D1 + pslld xmm5, 29 ; 137C _ 66: 0F 72. F5, 1D + psrld xmm1, 3 ; 1381 _ 66: 0F 72. D1, 03 + por xmm5, xmm1 ; 1386 _ 66: 0F EB. E9 + movdqa xmm1, xmm2 ; 138A _ 66: 0F 6F. CA + pslld xmm7, 19 ; 138E _ 66: 0F 72. F7, 13 + psrld xmm3, 13 ; 1393 _ 66: 0F 72. D3, 0D + por xmm7, xmm3 ; 1398 _ 66: 0F EB. FB + pxor xmm5, xmm6 ; 139C _ 66: 0F EF. EE + por xmm1, xmm7 ; 13A0 _ 66: 0F EB. CF + movd xmm3, dword [ecx+2B80H] ; 13A4 _ 66: 0F 6E. 99, 00002B80 + pxor xmm2, xmm6 ; 13AC _ 66: 0F EF. D6 + pxor xmm1, xmm5 ; 13B0 _ 66: 0F EF. CD + por xmm5, xmm2 ; 13B4 _ 66: 0F EB. EA + pxor xmm1, xmm0 ; 13B8 _ 66: 0F EF. C8 + pxor xmm7, xmm2 ; 13BC _ 66: 0F EF. FA + pxor xmm5, xmm7 ; 13C0 _ 66: 0F EF. EF + pand xmm7, xmm0 ; 13C4 _ 66: 0F DB. F8 + pxor xmm2, xmm7 ; 13C8 _ 66: 0F EF. D7 + por xmm7, xmm1 ; 13CC _ 66: 0F EB. F9 + pxor xmm7, xmm5 ; 13D0 _ 66: 0F EF. FD + pxor xmm0, xmm2 ; 13D4 _ 66: 0F EF. C2 + pxor xmm5, xmm1 ; 13D8 _ 66: 0F EF. E9 + pxor xmm0, xmm7 ; 13DC _ 66: 0F EF. C7 + pxor xmm0, xmm1 ; 13E0 _ 66: 0F EF. C1 + pand xmm5, xmm0 ; 13E4 _ 66: 0F DB. E8 + pxor xmm2, xmm5 ; 13E8 _ 66: 0F EF. D5 + pshufd xmm3, xmm3, 0 ; 13EC _ 66: 0F 70. DB, 00 + pxor xmm7, xmm3 ; 13F1 _ 66: 0F EF. FB + movd xmm5, dword [ecx+2B84H] ; 13F5 _ 66: 0F 6E. A9, 00002B84 + pshufd xmm3, xmm5, 0 ; 13FD _ 66: 0F 70. DD, 00 + pxor xmm2, xmm3 ; 1402 _ 66: 0F EF. D3 + movd xmm5, dword [ecx+2B88H] ; 1406 _ 66: 0F 6E. A9, 00002B88 + pshufd xmm3, xmm5, 0 ; 140E _ 66: 0F 70. DD, 00 + pxor xmm1, xmm3 ; 1413 _ 66: 0F EF. CB + movd xmm5, dword [ecx+2B8CH] ; 1417 _ 66: 0F 6E. A9, 00002B8C + pshufd xmm3, xmm5, 0 ; 141F _ 66: 0F 70. DD, 00 + movdqa xmm5, xmm7 ; 1424 _ 66: 0F 6F. EF + pxor xmm0, xmm3 ; 1428 _ 66: 0F EF. C3 + movdqa xmm3, xmm1 ; 142C _ 66: 0F 6F. D9 + psrld xmm1, 22 ; 1430 _ 66: 0F 72. D1, 16 + pslld xmm5, 27 ; 1435 _ 66: 0F 72. F5, 1B + pslld xmm3, 10 ; 143A _ 66: 0F 72. F3, 0A + por xmm3, xmm1 ; 143F _ 66: 0F EB. D9 + movdqa xmm1, xmm2 ; 1443 _ 66: 0F 6F. CA + psrld xmm7, 5 ; 1447 _ 66: 0F 72. D7, 05 + por xmm5, xmm7 ; 144C _ 66: 0F EB. EF + movdqa xmm7, xmm0 ; 1450 _ 66: 0F 6F. F8 + pxor xmm3, xmm0 ; 1454 _ 66: 0F EF. D8 + pslld xmm1, 7 ; 1458 _ 66: 0F 72. F1, 07 + pxor xmm3, xmm1 ; 145D _ 66: 0F EF. D9 + movdqa xmm1, xmm2 ; 1461 _ 66: 0F 6F. CA + pxor xmm5, xmm2 ; 1465 _ 66: 0F EF. EA + pxor xmm5, xmm0 ; 1469 _ 66: 0F EF. E8 + pslld xmm7, 25 ; 146D _ 66: 0F 72. F7, 19 + psrld xmm0, 7 ; 1472 _ 66: 0F 72. D0, 07 + por xmm7, xmm0 ; 1477 _ 66: 0F EB. F8 + movdqa xmm0, xmm3 ; 147B _ 66: 0F 6F. C3 + pslld xmm1, 31 ; 147F _ 66: 0F 72. F1, 1F + psrld xmm2, 1 ; 1484 _ 66: 0F 72. D2, 01 + por xmm1, xmm2 ; 1489 _ 66: 0F EB. CA + movdqa xmm2, xmm5 ; 148D _ 66: 0F 6F. D5 + pxor xmm7, xmm3 ; 1491 _ 66: 0F EF. FB + pxor xmm1, xmm5 ; 1495 _ 66: 0F EF. CD + pslld xmm2, 3 ; 1499 _ 66: 0F 72. F2, 03 + pxor xmm7, xmm2 ; 149E _ 66: 0F EF. FA + movdqa xmm2, xmm5 ; 14A2 _ 66: 0F 6F. D5 + pxor xmm1, xmm3 ; 14A6 _ 66: 0F EF. CB + pslld xmm0, 29 ; 14AA _ 66: 0F 72. F0, 1D + psrld xmm3, 3 ; 14AF _ 66: 0F 72. D3, 03 + por xmm0, xmm3 ; 14B4 _ 66: 0F EB. C3 + pslld xmm2, 19 ; 14B8 _ 66: 0F 72. F2, 13 + psrld xmm5, 13 ; 14BD _ 66: 0F 72. D5, 0D + por xmm2, xmm5 ; 14C2 _ 66: 0F EB. D5 + movdqa xmm5, xmm0 ; 14C6 _ 66: 0F 6F. E8 + por xmm0, xmm7 ; 14CA _ 66: 0F EB. C7 + pxor xmm5, xmm2 ; 14CE _ 66: 0F EF. EA + pand xmm2, xmm7 ; 14D2 _ 66: 0F DB. D7 + pxor xmm5, xmm6 ; 14D6 _ 66: 0F EF. EE + pxor xmm7, xmm1 ; 14DA _ 66: 0F EF. F9 + movd xmm3, dword [ecx+2B70H] ; 14DE _ 66: 0F 6E. 99, 00002B70 + por xmm1, xmm2 ; 14E6 _ 66: 0F EB. CA + pxor xmm2, xmm5 ; 14EA _ 66: 0F EF. D5 + pand xmm5, xmm0 ; 14EE _ 66: 0F DB. E8 + pand xmm7, xmm0 ; 14F2 _ 66: 0F DB. F8 + pxor xmm1, xmm5 ; 14F6 _ 66: 0F EF. CD + pxor xmm5, xmm2 ; 14FA _ 66: 0F EF. EA + por xmm2, xmm5 ; 14FE _ 66: 0F EB. D5 + pxor xmm0, xmm1 ; 1502 _ 66: 0F EF. C1 + pxor xmm2, xmm7 ; 1506 _ 66: 0F EF. D7 + pxor xmm7, xmm0 ; 150A _ 66: 0F EF. F8 + por xmm0, xmm2 ; 150E _ 66: 0F EB. C2 + pxor xmm7, xmm5 ; 1512 _ 66: 0F EF. FD + pxor xmm0, xmm5 ; 1516 _ 66: 0F EF. C5 + pshufd xmm5, xmm3, 0 ; 151A _ 66: 0F 70. EB, 00 + pxor xmm7, xmm5 ; 151F _ 66: 0F EF. FD + movd xmm3, dword [ecx+2B74H] ; 1523 _ 66: 0F 6E. 99, 00002B74 + pshufd xmm5, xmm3, 0 ; 152B _ 66: 0F 70. EB, 00 + pxor xmm2, xmm5 ; 1530 _ 66: 0F EF. D5 + movd xmm3, dword [ecx+2B78H] ; 1534 _ 66: 0F 6E. 99, 00002B78 + pshufd xmm5, xmm3, 0 ; 153C _ 66: 0F 70. EB, 00 + movd xmm3, dword [ecx+2B7CH] ; 1541 _ 66: 0F 6E. 99, 00002B7C + pxor xmm1, xmm5 ; 1549 _ 66: 0F EF. CD + pshufd xmm5, xmm3, 0 ; 154D _ 66: 0F 70. EB, 00 + movdqa xmm3, xmm7 ; 1552 _ 66: 0F 6F. DF + pxor xmm0, xmm5 ; 1556 _ 66: 0F EF. C5 + movdqa xmm5, xmm1 ; 155A _ 66: 0F 6F. E9 + psrld xmm1, 22 ; 155E _ 66: 0F 72. D1, 16 + pslld xmm3, 27 ; 1563 _ 66: 0F 72. F3, 1B + pslld xmm5, 10 ; 1568 _ 66: 0F 72. F5, 0A + por xmm5, xmm1 ; 156D _ 66: 0F EB. E9 + movdqa xmm1, xmm0 ; 1571 _ 66: 0F 6F. C8 + psrld xmm7, 5 ; 1575 _ 66: 0F 72. D7, 05 + por xmm3, xmm7 ; 157A _ 66: 0F EB. DF + movdqa xmm7, xmm2 ; 157E _ 66: 0F 6F. FA + pxor xmm5, xmm0 ; 1582 _ 66: 0F EF. E8 + pxor xmm3, xmm2 ; 1586 _ 66: 0F EF. DA + pslld xmm7, 7 ; 158A _ 66: 0F 72. F7, 07 + pxor xmm5, xmm7 ; 158F _ 66: 0F EF. EF + movdqa xmm7, xmm2 ; 1593 _ 66: 0F 6F. FA + pxor xmm3, xmm0 ; 1597 _ 66: 0F EF. D8 + pslld xmm1, 25 ; 159B _ 66: 0F 72. F1, 19 + psrld xmm0, 7 ; 15A0 _ 66: 0F 72. D0, 07 + por xmm1, xmm0 ; 15A5 _ 66: 0F EB. C8 + movdqa xmm0, xmm3 ; 15A9 _ 66: 0F 6F. C3 + pslld xmm7, 31 ; 15AD _ 66: 0F 72. F7, 1F + psrld xmm2, 1 ; 15B2 _ 66: 0F 72. D2, 01 + por xmm7, xmm2 ; 15B7 _ 66: 0F EB. FA + movdqa xmm2, xmm3 ; 15BB _ 66: 0F 6F. D3 + pxor xmm1, xmm5 ; 15BF _ 66: 0F EF. CD + pslld xmm0, 3 ; 15C3 _ 66: 0F 72. F0, 03 + pxor xmm1, xmm0 ; 15C8 _ 66: 0F EF. C8 + movdqa xmm0, xmm5 ; 15CC _ 66: 0F 6F. C5 + pxor xmm7, xmm3 ; 15D0 _ 66: 0F EF. FB + pxor xmm7, xmm5 ; 15D4 _ 66: 0F EF. FD + pslld xmm0, 29 ; 15D8 _ 66: 0F 72. F0, 1D + psrld xmm5, 3 ; 15DD _ 66: 0F 72. D5, 03 + por xmm0, xmm5 ; 15E2 _ 66: 0F EB. C5 + pslld xmm2, 19 ; 15E6 _ 66: 0F 72. F2, 13 + psrld xmm3, 13 ; 15EB _ 66: 0F 72. D3, 0D + por xmm2, xmm3 ; 15F0 _ 66: 0F EB. D3 + movdqa xmm3, xmm0 ; 15F4 _ 66: 0F 6F. D8 + pxor xmm2, xmm0 ; 15F8 _ 66: 0F EF. D0 + pxor xmm0, xmm1 ; 15FC _ 66: 0F EF. C1 + pand xmm3, xmm2 ; 1600 _ 66: 0F DB. DA + pxor xmm3, xmm6 ; 1604 _ 66: 0F EF. DE + pxor xmm1, xmm7 ; 1608 _ 66: 0F EF. CF + pxor xmm3, xmm1 ; 160C _ 66: 0F EF. D9 + por xmm0, xmm2 ; 1610 _ 66: 0F EB. C2 + pxor xmm2, xmm3 ; 1614 _ 66: 0F EF. D3 + pxor xmm1, xmm0 ; 1618 _ 66: 0F EF. C8 + pxor xmm0, xmm7 ; 161C _ 66: 0F EF. C7 + pand xmm7, xmm1 ; 1620 _ 66: 0F DB. F9 + pxor xmm7, xmm2 ; 1624 _ 66: 0F EF. FA + pxor xmm2, xmm1 ; 1628 _ 66: 0F EF. D1 + por xmm2, xmm3 ; 162C _ 66: 0F EB. D3 + pxor xmm1, xmm7 ; 1630 _ 66: 0F EF. CF + pxor xmm0, xmm2 ; 1634 _ 66: 0F EF. C2 + movd xmm2, dword [ecx+2B60H] ; 1638 _ 66: 0F 6E. 91, 00002B60 + pshufd xmm5, xmm2, 0 ; 1640 _ 66: 0F 70. EA, 00 + pxor xmm7, xmm5 ; 1645 _ 66: 0F EF. FD + movd xmm2, dword [ecx+2B64H] ; 1649 _ 66: 0F 6E. 91, 00002B64 + pshufd xmm5, xmm2, 0 ; 1651 _ 66: 0F 70. EA, 00 + pxor xmm3, xmm5 ; 1656 _ 66: 0F EF. DD + movd xmm2, dword [ecx+2B68H] ; 165A _ 66: 0F 6E. 91, 00002B68 + pshufd xmm5, xmm2, 0 ; 1662 _ 66: 0F 70. EA, 00 + pxor xmm0, xmm5 ; 1667 _ 66: 0F EF. C5 + movd xmm2, dword [ecx+2B6CH] ; 166B _ 66: 0F 6E. 91, 00002B6C + pshufd xmm5, xmm2, 0 ; 1673 _ 66: 0F 70. EA, 00 + movdqa xmm2, xmm0 ; 1678 _ 66: 0F 6F. D0 + pxor xmm1, xmm5 ; 167C _ 66: 0F EF. CD + movdqa xmm5, xmm7 ; 1680 _ 66: 0F 6F. EF + pslld xmm2, 10 ; 1684 _ 66: 0F 72. F2, 0A + psrld xmm0, 22 ; 1689 _ 66: 0F 72. D0, 16 + por xmm2, xmm0 ; 168E _ 66: 0F EB. D0 + movdqa xmm0, xmm1 ; 1692 _ 66: 0F 6F. C1 + pslld xmm5, 27 ; 1696 _ 66: 0F 72. F5, 1B + psrld xmm7, 5 ; 169B _ 66: 0F 72. D7, 05 + por xmm5, xmm7 ; 16A0 _ 66: 0F EB. EF + movdqa xmm7, xmm3 ; 16A4 _ 66: 0F 6F. FB + pxor xmm2, xmm1 ; 16A8 _ 66: 0F EF. D1 + pxor xmm5, xmm3 ; 16AC _ 66: 0F EF. EB + pslld xmm7, 7 ; 16B0 _ 66: 0F 72. F7, 07 + pxor xmm2, xmm7 ; 16B5 _ 66: 0F EF. D7 + pxor xmm5, xmm1 ; 16B9 _ 66: 0F EF. E9 + movdqa xmm7, xmm5 ; 16BD _ 66: 0F 6F. FD + pslld xmm0, 25 ; 16C1 _ 66: 0F 72. F0, 19 + psrld xmm1, 7 ; 16C6 _ 66: 0F 72. D1, 07 + por xmm0, xmm1 ; 16CB _ 66: 0F EB. C1 + movdqa xmm1, xmm3 ; 16CF _ 66: 0F 6F. CB + psrld xmm3, 1 ; 16D3 _ 66: 0F 72. D3, 01 + pxor xmm0, xmm2 ; 16D8 _ 66: 0F EF. C2 + pslld xmm1, 31 ; 16DC _ 66: 0F 72. F1, 1F + por xmm1, xmm3 ; 16E1 _ 66: 0F EB. CB + movdqa xmm3, xmm5 ; 16E5 _ 66: 0F 6F. DD + pxor xmm1, xmm5 ; 16E9 _ 66: 0F EF. CD + pxor xmm1, xmm2 ; 16ED _ 66: 0F EF. CA + pslld xmm3, 3 ; 16F1 _ 66: 0F 72. F3, 03 + pxor xmm0, xmm3 ; 16F6 _ 66: 0F EF. C3 + movdqa xmm3, xmm2 ; 16FA _ 66: 0F 6F. DA + psrld xmm2, 3 ; 16FE _ 66: 0F 72. D2, 03 + pslld xmm7, 19 ; 1703 _ 66: 0F 72. F7, 13 + pslld xmm3, 29 ; 1708 _ 66: 0F 72. F3, 1D + por xmm3, xmm2 ; 170D _ 66: 0F EB. DA + movdqa xmm2, xmm0 ; 1711 _ 66: 0F 6F. D0 + psrld xmm5, 13 ; 1715 _ 66: 0F 72. D5, 0D + por xmm7, xmm5 ; 171A _ 66: 0F EB. FD + pxor xmm1, xmm6 ; 171E _ 66: 0F EF. CE + pxor xmm3, xmm1 ; 1722 _ 66: 0F EF. D9 + por xmm2, xmm7 ; 1726 _ 66: 0F EB. D7 + pxor xmm2, xmm3 ; 172A _ 66: 0F EF. D3 + por xmm3, xmm1 ; 172E _ 66: 0F EB. D9 + pand xmm3, xmm7 ; 1732 _ 66: 0F DB. DF + pxor xmm0, xmm2 ; 1736 _ 66: 0F EF. C2 + pxor xmm3, xmm0 ; 173A _ 66: 0F EF. D8 + por xmm0, xmm7 ; 173E _ 66: 0F EB. C7 + pxor xmm0, xmm1 ; 1742 _ 66: 0F EF. C1 + pand xmm1, xmm3 ; 1746 _ 66: 0F DB. CB + pxor xmm1, xmm2 ; 174A _ 66: 0F EF. CA + pxor xmm0, xmm3 ; 174E _ 66: 0F EF. C3 + pand xmm2, xmm0 ; 1752 _ 66: 0F DB. D0 + pxor xmm0, xmm1 ; 1756 _ 66: 0F EF. C1 + pxor xmm2, xmm0 ; 175A _ 66: 0F EF. D0 + pxor xmm0, xmm6 ; 175E _ 66: 0F EF. C6 + pxor xmm2, xmm7 ; 1762 _ 66: 0F EF. D7 + movd xmm6, dword [ecx+2B50H] ; 1766 _ 66: 0F 6E. B1, 00002B50 + pshufd xmm5, xmm6, 0 ; 176E _ 66: 0F 70. EE, 00 + movd xmm6, dword [ecx+2B54H] ; 1773 _ 66: 0F 6E. B1, 00002B54 + pxor xmm1, xmm5 ; 177B _ 66: 0F EF. CD + pshufd xmm7, xmm6, 0 ; 177F _ 66: 0F 70. FE, 00 + pxor xmm0, xmm7 ; 1784 _ 66: 0F EF. C7 + movdqa xmm7, oword [esp+60H] ; 1788 _ 66: 0F 6F. 7C 24, 60 + movd xmm5, dword [ecx+2B58H] ; 178E _ 66: 0F 6E. A9, 00002B58 + movdqa oword [esp+60H], xmm7 ; 1796 _ 66: 0F 7F. 7C 24, 60 + movdqa xmm7, xmm1 ; 179C _ 66: 0F 6F. F9 + pshufd xmm6, xmm5, 0 ; 17A0 _ 66: 0F 70. F5, 00 + pxor xmm2, xmm6 ; 17A5 _ 66: 0F EF. D6 + movd xmm5, dword [ecx+2B5CH] ; 17A9 _ 66: 0F 6E. A9, 00002B5C + pshufd xmm6, xmm5, 0 ; 17B1 _ 66: 0F 70. F5, 00 + pxor xmm3, xmm6 ; 17B6 _ 66: 0F EF. DE + movdqa xmm6, xmm2 ; 17BA _ 66: 0F 6F. F2 + psrld xmm2, 22 ; 17BE _ 66: 0F 72. D2, 16 + pslld xmm7, 27 ; 17C3 _ 66: 0F 72. F7, 1B + pslld xmm6, 10 ; 17C8 _ 66: 0F 72. F6, 0A + por xmm6, xmm2 ; 17CD _ 66: 0F EB. F2 + movdqa xmm2, xmm0 ; 17D1 _ 66: 0F 6F. D0 + psrld xmm1, 5 ; 17D5 _ 66: 0F 72. D1, 05 + por xmm7, xmm1 ; 17DA _ 66: 0F EB. F9 + movdqa xmm1, xmm0 ; 17DE _ 66: 0F 6F. C8 + pxor xmm6, xmm3 ; 17E2 _ 66: 0F EF. F3 + pslld xmm2, 7 ; 17E6 _ 66: 0F 72. F2, 07 + pxor xmm6, xmm2 ; 17EB _ 66: 0F EF. F2 + movdqa xmm2, xmm3 ; 17EF _ 66: 0F 6F. D3 + movdqa xmm5, xmm6 ; 17F3 _ 66: 0F 6F. EE + pxor xmm7, xmm0 ; 17F7 _ 66: 0F EF. F8 + pxor xmm7, xmm3 ; 17FB _ 66: 0F EF. FB + pslld xmm2, 25 ; 17FF _ 66: 0F 72. F2, 19 + psrld xmm3, 7 ; 1804 _ 66: 0F 72. D3, 07 + por xmm2, xmm3 ; 1809 _ 66: 0F EB. D3 + movdqa xmm3, xmm7 ; 180D _ 66: 0F 6F. DF + pslld xmm1, 31 ; 1811 _ 66: 0F 72. F1, 1F + psrld xmm0, 1 ; 1816 _ 66: 0F 72. D0, 01 + por xmm1, xmm0 ; 181B _ 66: 0F EB. C8 + pxor xmm2, xmm6 ; 181F _ 66: 0F EF. D6 + pslld xmm3, 3 ; 1823 _ 66: 0F 72. F3, 03 + pxor xmm2, xmm3 ; 1828 _ 66: 0F EF. D3 + movdqa xmm3, xmm7 ; 182C _ 66: 0F 6F. DF + pxor xmm1, xmm7 ; 1830 _ 66: 0F EF. CF + pxor xmm1, xmm6 ; 1834 _ 66: 0F EF. CE + pslld xmm5, 29 ; 1838 _ 66: 0F 72. F5, 1D + psrld xmm6, 3 ; 183D _ 66: 0F 72. D6, 03 + por xmm5, xmm6 ; 1842 _ 66: 0F EB. EE + movdqa xmm0, xmm5 ; 1846 _ 66: 0F 6F. C5 + pslld xmm3, 19 ; 184A _ 66: 0F 72. F3, 13 + psrld xmm7, 13 ; 184F _ 66: 0F 72. D7, 0D + por xmm3, xmm7 ; 1854 _ 66: 0F EB. DF + pand xmm0, xmm2 ; 1858 _ 66: 0F DB. C2 + pxor xmm0, xmm1 ; 185C _ 66: 0F EF. C1 + por xmm1, xmm2 ; 1860 _ 66: 0F EB. CA + pand xmm1, xmm3 ; 1864 _ 66: 0F DB. CB + pxor xmm5, xmm0 ; 1868 _ 66: 0F EF. E8 + pxor xmm5, xmm1 ; 186C _ 66: 0F EF. E9 + pand xmm1, xmm0 ; 1870 _ 66: 0F DB. C8 + pxor xmm2, xmm5 ; 1874 _ 66: 0F EF. D5 + pxor xmm1, xmm2 ; 1878 _ 66: 0F EF. CA + pcmpeqd xmm6, xmm6 ; 187C _ 66: 0F 76. F6 + pxor xmm3, xmm6 ; 1880 _ 66: 0F EF. DE + pand xmm2, xmm3 ; 1884 _ 66: 0F DB. D3 + pxor xmm2, xmm0 ; 1888 _ 66: 0F EF. D0 + pxor xmm3, xmm1 ; 188C _ 66: 0F EF. D9 + pand xmm0, xmm3 ; 1890 _ 66: 0F DB. C3 + pxor xmm2, xmm3 ; 1894 _ 66: 0F EF. D3 + pxor xmm0, xmm5 ; 1898 _ 66: 0F EF. C5 + por xmm0, xmm2 ; 189C _ 66: 0F EB. C2 + pxor xmm2, xmm3 ; 18A0 _ 66: 0F EF. D3 + pxor xmm0, xmm1 ; 18A4 _ 66: 0F EF. C1 + movd xmm1, dword [ecx+2B40H] ; 18A8 _ 66: 0F 6E. 89, 00002B40 + pshufd xmm7, xmm1, 0 ; 18B0 _ 66: 0F 70. F9, 00 + pxor xmm3, xmm7 ; 18B5 _ 66: 0F EF. DF + movd xmm1, dword [ecx+2B44H] ; 18B9 _ 66: 0F 6E. 89, 00002B44 + pshufd xmm7, xmm1, 0 ; 18C1 _ 66: 0F 70. F9, 00 + pxor xmm2, xmm7 ; 18C6 _ 66: 0F EF. D7 + movd xmm1, dword [ecx+2B48H] ; 18CA _ 66: 0F 6E. 89, 00002B48 + pshufd xmm7, xmm1, 0 ; 18D2 _ 66: 0F 70. F9, 00 + pxor xmm0, xmm7 ; 18D7 _ 66: 0F EF. C7 + movd xmm1, dword [ecx+2B4CH] ; 18DB _ 66: 0F 6E. 89, 00002B4C + pshufd xmm7, xmm1, 0 ; 18E3 _ 66: 0F 70. F9, 00 + movdqa xmm1, xmm0 ; 18E8 _ 66: 0F 6F. C8 + pxor xmm5, xmm7 ; 18EC _ 66: 0F EF. EF + psrld xmm0, 22 ; 18F0 _ 66: 0F 72. D0, 16 + pslld xmm1, 10 ; 18F5 _ 66: 0F 72. F1, 0A + por xmm1, xmm0 ; 18FA _ 66: 0F EB. C8 + movdqa xmm0, xmm3 ; 18FE _ 66: 0F 6F. C3 + psrld xmm3, 5 ; 1902 _ 66: 0F 72. D3, 05 + pxor xmm1, xmm5 ; 1907 _ 66: 0F EF. CD + pslld xmm0, 27 ; 190B _ 66: 0F 72. F0, 1B + por xmm0, xmm3 ; 1910 _ 66: 0F EB. C3 + movdqa xmm3, xmm2 ; 1914 _ 66: 0F 6F. DA + pxor xmm0, xmm2 ; 1918 _ 66: 0F EF. C2 + pxor xmm0, xmm5 ; 191C _ 66: 0F EF. C5 + movdqa xmm7, xmm0 ; 1920 _ 66: 0F 6F. F8 + pslld xmm3, 7 ; 1924 _ 66: 0F 72. F3, 07 + pxor xmm1, xmm3 ; 1929 _ 66: 0F EF. CB + movdqa xmm3, xmm5 ; 192D _ 66: 0F 6F. DD + psrld xmm5, 7 ; 1931 _ 66: 0F 72. D5, 07 + pslld xmm7, 19 ; 1936 _ 66: 0F 72. F7, 13 + pslld xmm3, 25 ; 193B _ 66: 0F 72. F3, 19 + por xmm3, xmm5 ; 1940 _ 66: 0F EB. DD + movdqa xmm5, xmm2 ; 1944 _ 66: 0F 6F. EA + psrld xmm2, 1 ; 1948 _ 66: 0F 72. D2, 01 + pxor xmm3, xmm1 ; 194D _ 66: 0F EF. D9 + pslld xmm5, 31 ; 1951 _ 66: 0F 72. F5, 1F + por xmm5, xmm2 ; 1956 _ 66: 0F EB. EA + movdqa xmm2, xmm0 ; 195A _ 66: 0F 6F. D0 + pxor xmm5, xmm0 ; 195E _ 66: 0F EF. E8 + pxor xmm5, xmm1 ; 1962 _ 66: 0F EF. E9 + pslld xmm2, 3 ; 1966 _ 66: 0F 72. F2, 03 + pxor xmm3, xmm2 ; 196B _ 66: 0F EF. DA + movdqa xmm2, xmm1 ; 196F _ 66: 0F 6F. D1 + psrld xmm1, 3 ; 1973 _ 66: 0F 72. D1, 03 + psrld xmm0, 13 ; 1978 _ 66: 0F 72. D0, 0D + pslld xmm2, 29 ; 197D _ 66: 0F 72. F2, 1D + por xmm2, xmm1 ; 1982 _ 66: 0F EB. D1 + por xmm7, xmm0 ; 1986 _ 66: 0F EB. F8 + movdqa xmm0, xmm2 ; 198A _ 66: 0F 6F. C2 + pxor xmm0, xmm5 ; 198E _ 66: 0F EF. C5 + pxor xmm7, xmm0 ; 1992 _ 66: 0F EF. F8 + pand xmm2, xmm0 ; 1996 _ 66: 0F DB. D0 + pxor xmm2, xmm7 ; 199A _ 66: 0F EF. D7 + pand xmm7, xmm5 ; 199E _ 66: 0F DB. FD + pxor xmm5, xmm3 ; 19A2 _ 66: 0F EF. EB + por xmm3, xmm2 ; 19A6 _ 66: 0F EB. DA + pxor xmm0, xmm3 ; 19AA _ 66: 0F EF. C3 + pxor xmm7, xmm3 ; 19AE _ 66: 0F EF. FB + pxor xmm5, xmm2 ; 19B2 _ 66: 0F EF. EA + pand xmm3, xmm0 ; 19B6 _ 66: 0F DB. D8 + pxor xmm3, xmm5 ; 19BA _ 66: 0F EF. DD + pxor xmm5, xmm7 ; 19BE _ 66: 0F EF. EF + por xmm5, xmm0 ; 19C2 _ 66: 0F EB. E8 + pxor xmm7, xmm3 ; 19C6 _ 66: 0F EF. FB + pxor xmm5, xmm2 ; 19CA _ 66: 0F EF. EA + pxor xmm7, xmm5 ; 19CE _ 66: 0F EF. FD + movd xmm2, dword [ecx+2B30H] ; 19D2 _ 66: 0F 6E. 91, 00002B30 + pshufd xmm1, xmm2, 0 ; 19DA _ 66: 0F 70. CA, 00 + pxor xmm0, xmm1 ; 19DF _ 66: 0F EF. C1 + movd xmm2, dword [ecx+2B34H] ; 19E3 _ 66: 0F 6E. 91, 00002B34 + pshufd xmm1, xmm2, 0 ; 19EB _ 66: 0F 70. CA, 00 + pxor xmm5, xmm1 ; 19F0 _ 66: 0F EF. E9 + movd xmm2, dword [ecx+2B38H] ; 19F4 _ 66: 0F 6E. 91, 00002B38 + pshufd xmm1, xmm2, 0 ; 19FC _ 66: 0F 70. CA, 00 + pxor xmm3, xmm1 ; 1A01 _ 66: 0F EF. D9 + movd xmm2, dword [ecx+2B3CH] ; 1A05 _ 66: 0F 6E. 91, 00002B3C + pshufd xmm1, xmm2, 0 ; 1A0D _ 66: 0F 70. CA, 00 + movdqa xmm2, xmm0 ; 1A12 _ 66: 0F 6F. D0 + pxor xmm7, xmm1 ; 1A16 _ 66: 0F EF. F9 + movdqa xmm1, xmm3 ; 1A1A _ 66: 0F 6F. CB + psrld xmm3, 22 ; 1A1E _ 66: 0F 72. D3, 16 + pslld xmm2, 27 ; 1A23 _ 66: 0F 72. F2, 1B + pslld xmm1, 10 ; 1A28 _ 66: 0F 72. F1, 0A + por xmm1, xmm3 ; 1A2D _ 66: 0F EB. CB + movdqa xmm3, xmm7 ; 1A31 _ 66: 0F 6F. DF + psrld xmm0, 5 ; 1A35 _ 66: 0F 72. D0, 05 + por xmm2, xmm0 ; 1A3A _ 66: 0F EB. D0 + movdqa xmm0, xmm5 ; 1A3E _ 66: 0F 6F. C5 + pxor xmm1, xmm7 ; 1A42 _ 66: 0F EF. CF + pxor xmm2, xmm5 ; 1A46 _ 66: 0F EF. D5 + pslld xmm0, 7 ; 1A4A _ 66: 0F 72. F0, 07 + pxor xmm1, xmm0 ; 1A4F _ 66: 0F EF. C8 + movdqa xmm0, xmm5 ; 1A53 _ 66: 0F 6F. C5 + pxor xmm2, xmm7 ; 1A57 _ 66: 0F EF. D7 + pslld xmm3, 25 ; 1A5B _ 66: 0F 72. F3, 19 + psrld xmm7, 7 ; 1A60 _ 66: 0F 72. D7, 07 + por xmm3, xmm7 ; 1A65 _ 66: 0F EB. DF + movdqa xmm7, xmm1 ; 1A69 _ 66: 0F 6F. F9 + pslld xmm0, 31 ; 1A6D _ 66: 0F 72. F0, 1F + psrld xmm5, 1 ; 1A72 _ 66: 0F 72. D5, 01 + por xmm0, xmm5 ; 1A77 _ 66: 0F EB. C5 + movdqa xmm5, xmm2 ; 1A7B _ 66: 0F 6F. EA + pxor xmm3, xmm1 ; 1A7F _ 66: 0F EF. D9 + pxor xmm0, xmm2 ; 1A83 _ 66: 0F EF. C2 + pslld xmm5, 3 ; 1A87 _ 66: 0F 72. F5, 03 + pxor xmm3, xmm5 ; 1A8C _ 66: 0F EF. DD + movdqa xmm5, xmm2 ; 1A90 _ 66: 0F 6F. EA + pxor xmm0, xmm1 ; 1A94 _ 66: 0F EF. C1 + pslld xmm7, 29 ; 1A98 _ 66: 0F 72. F7, 1D + psrld xmm1, 3 ; 1A9D _ 66: 0F 72. D1, 03 + por xmm7, xmm1 ; 1AA2 _ 66: 0F EB. F9 + pslld xmm5, 19 ; 1AA6 _ 66: 0F 72. F5, 13 + psrld xmm2, 13 ; 1AAB _ 66: 0F 72. D2, 0D + por xmm5, xmm2 ; 1AB0 _ 66: 0F EB. EA + pxor xmm7, xmm3 ; 1AB4 _ 66: 0F EF. FB + pxor xmm3, xmm5 ; 1AB8 _ 66: 0F EF. DD + movdqa xmm2, xmm3 ; 1ABC _ 66: 0F 6F. D3 + pand xmm2, xmm7 ; 1AC0 _ 66: 0F DB. D7 + pxor xmm2, xmm0 ; 1AC4 _ 66: 0F EF. D0 + por xmm0, xmm7 ; 1AC8 _ 66: 0F EB. C7 + pxor xmm0, xmm3 ; 1ACC _ 66: 0F EF. C3 + pand xmm3, xmm2 ; 1AD0 _ 66: 0F DB. DA + pxor xmm7, xmm2 ; 1AD4 _ 66: 0F EF. FA + pand xmm3, xmm5 ; 1AD8 _ 66: 0F DB. DD + pxor xmm3, xmm7 ; 1ADC _ 66: 0F EF. DF + pand xmm7, xmm0 ; 1AE0 _ 66: 0F DB. F8 + por xmm7, xmm5 ; 1AE4 _ 66: 0F EB. FD + pxor xmm2, xmm6 ; 1AE8 _ 66: 0F EF. D6 + movdqa xmm1, xmm2 ; 1AEC _ 66: 0F 6F. CA + pxor xmm7, xmm2 ; 1AF0 _ 66: 0F EF. FA + pxor xmm5, xmm2 ; 1AF4 _ 66: 0F EF. EA + pxor xmm1, xmm3 ; 1AF8 _ 66: 0F EF. CB + pand xmm5, xmm0 ; 1AFC _ 66: 0F DB. E8 + pxor xmm1, xmm5 ; 1B00 _ 66: 0F EF. CD + movd xmm5, dword [ecx+2B20H] ; 1B04 _ 66: 0F 6E. A9, 00002B20 + pshufd xmm5, xmm5, 0 ; 1B0C _ 66: 0F 70. ED, 00 + pxor xmm0, xmm5 ; 1B11 _ 66: 0F EF. C5 + movd xmm2, dword [ecx+2B24H] ; 1B15 _ 66: 0F 6E. 91, 00002B24 + pshufd xmm5, xmm2, 0 ; 1B1D _ 66: 0F 70. EA, 00 + pxor xmm3, xmm5 ; 1B22 _ 66: 0F EF. DD + movd xmm2, dword [ecx+2B28H] ; 1B26 _ 66: 0F 6E. 91, 00002B28 + pshufd xmm5, xmm2, 0 ; 1B2E _ 66: 0F 70. EA, 00 + pxor xmm7, xmm5 ; 1B33 _ 66: 0F EF. FD + movd xmm2, dword [ecx+2B2CH] ; 1B37 _ 66: 0F 6E. 91, 00002B2C + pshufd xmm5, xmm2, 0 ; 1B3F _ 66: 0F 70. EA, 00 + movdqa xmm2, xmm7 ; 1B44 _ 66: 0F 6F. D7 + pxor xmm1, xmm5 ; 1B48 _ 66: 0F EF. CD + movdqa xmm5, xmm0 ; 1B4C _ 66: 0F 6F. E8 + pslld xmm2, 10 ; 1B50 _ 66: 0F 72. F2, 0A + psrld xmm7, 22 ; 1B55 _ 66: 0F 72. D7, 16 + por xmm2, xmm7 ; 1B5A _ 66: 0F EB. D7 + movdqa xmm7, xmm3 ; 1B5E _ 66: 0F 6F. FB + pslld xmm5, 27 ; 1B62 _ 66: 0F 72. F5, 1B + psrld xmm0, 5 ; 1B67 _ 66: 0F 72. D0, 05 + por xmm5, xmm0 ; 1B6C _ 66: 0F EB. E8 + movdqa xmm0, xmm1 ; 1B70 _ 66: 0F 6F. C1 + pxor xmm2, xmm1 ; 1B74 _ 66: 0F EF. D1 + pslld xmm7, 7 ; 1B78 _ 66: 0F 72. F7, 07 + pxor xmm2, xmm7 ; 1B7D _ 66: 0F EF. D7 + pxor xmm5, xmm3 ; 1B81 _ 66: 0F EF. EB + pxor xmm5, xmm1 ; 1B85 _ 66: 0F EF. E9 + movdqa xmm7, xmm5 ; 1B89 _ 66: 0F 6F. FD + pslld xmm0, 25 ; 1B8D _ 66: 0F 72. F0, 19 + psrld xmm1, 7 ; 1B92 _ 66: 0F 72. D1, 07 + por xmm0, xmm1 ; 1B97 _ 66: 0F EB. C1 + movdqa xmm1, xmm3 ; 1B9B _ 66: 0F 6F. CB + psrld xmm3, 1 ; 1B9F _ 66: 0F 72. D3, 01 + pxor xmm0, xmm2 ; 1BA4 _ 66: 0F EF. C2 + pslld xmm1, 31 ; 1BA8 _ 66: 0F 72. F1, 1F + por xmm1, xmm3 ; 1BAD _ 66: 0F EB. CB + movdqa xmm3, xmm5 ; 1BB1 _ 66: 0F 6F. DD + pxor xmm1, xmm5 ; 1BB5 _ 66: 0F EF. CD + pxor xmm1, xmm2 ; 1BB9 _ 66: 0F EF. CA + pslld xmm3, 3 ; 1BBD _ 66: 0F 72. F3, 03 + pxor xmm0, xmm3 ; 1BC2 _ 66: 0F EF. C3 + movdqa xmm3, xmm2 ; 1BC6 _ 66: 0F 6F. DA + psrld xmm2, 3 ; 1BCA _ 66: 0F 72. D2, 03 + pslld xmm7, 19 ; 1BCF _ 66: 0F 72. F7, 13 + pslld xmm3, 29 ; 1BD4 _ 66: 0F 72. F3, 1D + por xmm3, xmm2 ; 1BD9 _ 66: 0F EB. DA + movdqa xmm2, xmm1 ; 1BDD _ 66: 0F 6F. D1 + psrld xmm5, 13 ; 1BE1 _ 66: 0F 72. D5, 0D + por xmm7, xmm5 ; 1BE6 _ 66: 0F EB. FD + pxor xmm2, xmm0 ; 1BEA _ 66: 0F EF. D0 + pand xmm0, xmm2 ; 1BEE _ 66: 0F DB. C2 + pxor xmm1, xmm3 ; 1BF2 _ 66: 0F EF. CB + pxor xmm0, xmm7 ; 1BF6 _ 66: 0F EF. C7 + por xmm7, xmm2 ; 1BFA _ 66: 0F EB. FA + movd xmm5, dword [ecx+2B10H] ; 1BFE _ 66: 0F 6E. A9, 00002B10 + pxor xmm3, xmm0 ; 1C06 _ 66: 0F EF. D8 + pxor xmm7, xmm1 ; 1C0A _ 66: 0F EF. F9 + por xmm7, xmm3 ; 1C0E _ 66: 0F EB. FB + pxor xmm2, xmm0 ; 1C12 _ 66: 0F EF. D0 + pxor xmm7, xmm2 ; 1C16 _ 66: 0F EF. FA + por xmm2, xmm0 ; 1C1A _ 66: 0F EB. D0 + pxor xmm2, xmm7 ; 1C1E _ 66: 0F EF. D7 + pxor xmm1, xmm6 ; 1C22 _ 66: 0F EF. CE + pxor xmm1, xmm2 ; 1C26 _ 66: 0F EF. CA + por xmm2, xmm7 ; 1C2A _ 66: 0F EB. D7 + pxor xmm2, xmm7 ; 1C2E _ 66: 0F EF. D7 + por xmm2, xmm1 ; 1C32 _ 66: 0F EB. D1 + pxor xmm0, xmm2 ; 1C36 _ 66: 0F EF. C2 + pshufd xmm2, xmm5, 0 ; 1C3A _ 66: 0F 70. D5, 00 + pxor xmm1, xmm2 ; 1C3F _ 66: 0F EF. CA + movd xmm5, dword [ecx+2B14H] ; 1C43 _ 66: 0F 6E. A9, 00002B14 + pshufd xmm2, xmm5, 0 ; 1C4B _ 66: 0F 70. D5, 00 + pxor xmm7, xmm2 ; 1C50 _ 66: 0F EF. FA + movd xmm5, dword [ecx+2B18H] ; 1C54 _ 66: 0F 6E. A9, 00002B18 + pshufd xmm2, xmm5, 0 ; 1C5C _ 66: 0F 70. D5, 00 + movd xmm5, dword [ecx+2B1CH] ; 1C61 _ 66: 0F 6E. A9, 00002B1C + pxor xmm0, xmm2 ; 1C69 _ 66: 0F EF. C2 + pshufd xmm2, xmm5, 0 ; 1C6D _ 66: 0F 70. D5, 00 + movdqa xmm5, xmm0 ; 1C72 _ 66: 0F 6F. E8 + pxor xmm3, xmm2 ; 1C76 _ 66: 0F EF. DA + movdqa xmm2, xmm1 ; 1C7A _ 66: 0F 6F. D1 + pslld xmm5, 10 ; 1C7E _ 66: 0F 72. F5, 0A + psrld xmm0, 22 ; 1C83 _ 66: 0F 72. D0, 16 + por xmm5, xmm0 ; 1C88 _ 66: 0F EB. E8 + movdqa xmm0, xmm3 ; 1C8C _ 66: 0F 6F. C3 + pslld xmm2, 27 ; 1C90 _ 66: 0F 72. F2, 1B + psrld xmm1, 5 ; 1C95 _ 66: 0F 72. D1, 05 + por xmm2, xmm1 ; 1C9A _ 66: 0F EB. D1 + movdqa xmm1, xmm7 ; 1C9E _ 66: 0F 6F. CF + pxor xmm5, xmm3 ; 1CA2 _ 66: 0F EF. EB + pxor xmm2, xmm7 ; 1CA6 _ 66: 0F EF. D7 + pslld xmm1, 7 ; 1CAA _ 66: 0F 72. F1, 07 + pxor xmm5, xmm1 ; 1CAF _ 66: 0F EF. E9 + pxor xmm2, xmm3 ; 1CB3 _ 66: 0F EF. D3 + movdqa xmm1, xmm2 ; 1CB7 _ 66: 0F 6F. CA + pslld xmm0, 25 ; 1CBB _ 66: 0F 72. F0, 19 + psrld xmm3, 7 ; 1CC0 _ 66: 0F 72. D3, 07 + por xmm0, xmm3 ; 1CC5 _ 66: 0F EB. C3 + movdqa xmm3, xmm7 ; 1CC9 _ 66: 0F 6F. DF + psrld xmm7, 1 ; 1CCD _ 66: 0F 72. D7, 01 + pxor xmm0, xmm5 ; 1CD2 _ 66: 0F EF. C5 + pslld xmm3, 31 ; 1CD6 _ 66: 0F 72. F3, 1F + por xmm3, xmm7 ; 1CDB _ 66: 0F EB. DF + movdqa xmm7, xmm2 ; 1CDF _ 66: 0F 6F. FA + pxor xmm3, xmm2 ; 1CE3 _ 66: 0F EF. DA + pxor xmm3, xmm5 ; 1CE7 _ 66: 0F EF. DD + pslld xmm7, 3 ; 1CEB _ 66: 0F 72. F7, 03 + pxor xmm0, xmm7 ; 1CF0 _ 66: 0F EF. C7 + movdqa xmm7, xmm5 ; 1CF4 _ 66: 0F 6F. FD + psrld xmm5, 3 ; 1CF8 _ 66: 0F 72. D5, 03 + pslld xmm1, 19 ; 1CFD _ 66: 0F 72. F1, 13 + pslld xmm7, 29 ; 1D02 _ 66: 0F 72. F7, 1D + por xmm7, xmm5 ; 1D07 _ 66: 0F EB. FD + movdqa xmm5, xmm3 ; 1D0B _ 66: 0F 6F. EB + psrld xmm2, 13 ; 1D0F _ 66: 0F 72. D2, 0D + por xmm1, xmm2 ; 1D14 _ 66: 0F EB. CA + pxor xmm7, xmm6 ; 1D18 _ 66: 0F EF. FE + por xmm5, xmm1 ; 1D1C _ 66: 0F EB. E9 + movd xmm2, dword [ecx+2B00H] ; 1D20 _ 66: 0F 6E. 91, 00002B00 + pxor xmm3, xmm6 ; 1D28 _ 66: 0F EF. DE + pxor xmm5, xmm7 ; 1D2C _ 66: 0F EF. EF + por xmm7, xmm3 ; 1D30 _ 66: 0F EB. FB + pxor xmm5, xmm0 ; 1D34 _ 66: 0F EF. E8 + pxor xmm1, xmm3 ; 1D38 _ 66: 0F EF. CB + pxor xmm7, xmm1 ; 1D3C _ 66: 0F EF. F9 + pand xmm1, xmm0 ; 1D40 _ 66: 0F DB. C8 + pxor xmm3, xmm1 ; 1D44 _ 66: 0F EF. D9 + por xmm1, xmm5 ; 1D48 _ 66: 0F EB. CD + pxor xmm1, xmm7 ; 1D4C _ 66: 0F EF. CF + pxor xmm0, xmm3 ; 1D50 _ 66: 0F EF. C3 + pxor xmm7, xmm5 ; 1D54 _ 66: 0F EF. FD + pxor xmm0, xmm1 ; 1D58 _ 66: 0F EF. C1 + pxor xmm0, xmm5 ; 1D5C _ 66: 0F EF. C5 + pand xmm7, xmm0 ; 1D60 _ 66: 0F DB. F8 + pxor xmm3, xmm7 ; 1D64 _ 66: 0F EF. DF + pshufd xmm7, xmm2, 0 ; 1D68 _ 66: 0F 70. FA, 00 + pxor xmm1, xmm7 ; 1D6D _ 66: 0F EF. CF + movd xmm2, dword [ecx+2B04H] ; 1D71 _ 66: 0F 6E. 91, 00002B04 + pshufd xmm7, xmm2, 0 ; 1D79 _ 66: 0F 70. FA, 00 + pxor xmm3, xmm7 ; 1D7E _ 66: 0F EF. DF + movd xmm2, dword [ecx+2B08H] ; 1D82 _ 66: 0F 6E. 91, 00002B08 + pshufd xmm7, xmm2, 0 ; 1D8A _ 66: 0F 70. FA, 00 + pxor xmm5, xmm7 ; 1D8F _ 66: 0F EF. EF + movd xmm2, dword [ecx+2B0CH] ; 1D93 _ 66: 0F 6E. 91, 00002B0C + pshufd xmm7, xmm2, 0 ; 1D9B _ 66: 0F 70. FA, 00 + movdqa xmm2, xmm5 ; 1DA0 _ 66: 0F 6F. D5 + pxor xmm0, xmm7 ; 1DA4 _ 66: 0F EF. C7 + movdqa xmm7, xmm0 ; 1DA8 _ 66: 0F 6F. F8 + pslld xmm2, 10 ; 1DAC _ 66: 0F 72. F2, 0A + psrld xmm5, 22 ; 1DB1 _ 66: 0F 72. D5, 16 + por xmm2, xmm5 ; 1DB6 _ 66: 0F EB. D5 + movdqa xmm5, xmm1 ; 1DBA _ 66: 0F 6F. E9 + psrld xmm1, 5 ; 1DBE _ 66: 0F 72. D1, 05 + pxor xmm2, xmm0 ; 1DC3 _ 66: 0F EF. D0 + pslld xmm5, 27 ; 1DC7 _ 66: 0F 72. F5, 1B + por xmm5, xmm1 ; 1DCC _ 66: 0F EB. E9 + movdqa xmm1, xmm3 ; 1DD0 _ 66: 0F 6F. CB + pxor xmm5, xmm3 ; 1DD4 _ 66: 0F EF. EB + pxor xmm5, xmm0 ; 1DD8 _ 66: 0F EF. E8 + pslld xmm1, 7 ; 1DDC _ 66: 0F 72. F1, 07 + pxor xmm2, xmm1 ; 1DE1 _ 66: 0F EF. D1 + movdqa xmm1, xmm3 ; 1DE5 _ 66: 0F 6F. CB + pslld xmm7, 25 ; 1DE9 _ 66: 0F 72. F7, 19 + psrld xmm0, 7 ; 1DEE _ 66: 0F 72. D0, 07 + por xmm7, xmm0 ; 1DF3 _ 66: 0F EB. F8 + movdqa xmm0, xmm5 ; 1DF7 _ 66: 0F 6F. C5 + pslld xmm1, 31 ; 1DFB _ 66: 0F 72. F1, 1F + psrld xmm3, 1 ; 1E00 _ 66: 0F 72. D3, 01 + por xmm1, xmm3 ; 1E05 _ 66: 0F EB. CB + movdqa xmm3, xmm5 ; 1E09 _ 66: 0F 6F. DD + pxor xmm7, xmm2 ; 1E0D _ 66: 0F EF. FA + pslld xmm0, 3 ; 1E11 _ 66: 0F 72. F0, 03 + pxor xmm7, xmm0 ; 1E16 _ 66: 0F EF. F8 + movdqa xmm0, xmm2 ; 1E1A _ 66: 0F 6F. C2 + pxor xmm1, xmm5 ; 1E1E _ 66: 0F EF. CD + pxor xmm1, xmm2 ; 1E22 _ 66: 0F EF. CA + pslld xmm0, 29 ; 1E26 _ 66: 0F 72. F0, 1D + psrld xmm2, 3 ; 1E2B _ 66: 0F 72. D2, 03 + por xmm0, xmm2 ; 1E30 _ 66: 0F EB. C2 + pslld xmm3, 19 ; 1E34 _ 66: 0F 72. F3, 13 + psrld xmm5, 13 ; 1E39 _ 66: 0F 72. D5, 0D + por xmm3, xmm5 ; 1E3E _ 66: 0F EB. DD + movdqa xmm5, xmm0 ; 1E42 _ 66: 0F 6F. E8 + por xmm0, xmm7 ; 1E46 _ 66: 0F EB. C7 + pxor xmm5, xmm3 ; 1E4A _ 66: 0F EF. EB + pand xmm3, xmm7 ; 1E4E _ 66: 0F DB. DF + pxor xmm5, xmm6 ; 1E52 _ 66: 0F EF. EE + pxor xmm7, xmm1 ; 1E56 _ 66: 0F EF. F9 + movd xmm6, dword [ecx+2AF0H] ; 1E5A _ 66: 0F 6E. B1, 00002AF0 + por xmm1, xmm3 ; 1E62 _ 66: 0F EB. CB + pxor xmm3, xmm5 ; 1E66 _ 66: 0F EF. DD + pand xmm5, xmm0 ; 1E6A _ 66: 0F DB. E8 + pand xmm7, xmm0 ; 1E6E _ 66: 0F DB. F8 + pxor xmm1, xmm5 ; 1E72 _ 66: 0F EF. CD + pxor xmm5, xmm3 ; 1E76 _ 66: 0F EF. EB + por xmm3, xmm5 ; 1E7A _ 66: 0F EB. DD + pxor xmm0, xmm1 ; 1E7E _ 66: 0F EF. C1 + pxor xmm3, xmm7 ; 1E82 _ 66: 0F EF. DF + pxor xmm7, xmm0 ; 1E86 _ 66: 0F EF. F8 + por xmm0, xmm3 ; 1E8A _ 66: 0F EB. C3 + pxor xmm7, xmm5 ; 1E8E _ 66: 0F EF. FD + pxor xmm0, xmm5 ; 1E92 _ 66: 0F EF. C5 + pshufd xmm2, xmm6, 0 ; 1E96 _ 66: 0F 70. D6, 00 + pxor xmm7, xmm2 ; 1E9B _ 66: 0F EF. FA + movd xmm5, dword [ecx+2AF4H] ; 1E9F _ 66: 0F 6E. A9, 00002AF4 + pshufd xmm6, xmm5, 0 ; 1EA7 _ 66: 0F 70. F5, 00 + pxor xmm3, xmm6 ; 1EAC _ 66: 0F EF. DE + movd xmm2, dword [ecx+2AF8H] ; 1EB0 _ 66: 0F 6E. 91, 00002AF8 + pshufd xmm5, xmm2, 0 ; 1EB8 _ 66: 0F 70. EA, 00 + movd xmm6, dword [ecx+2AFCH] ; 1EBD _ 66: 0F 6E. B1, 00002AFC + pxor xmm1, xmm5 ; 1EC5 _ 66: 0F EF. CD + movdqa xmm5, xmm7 ; 1EC9 _ 66: 0F 6F. EF + pshufd xmm2, xmm6, 0 ; 1ECD _ 66: 0F 70. D6, 00 + movdqa xmm6, xmm3 ; 1ED2 _ 66: 0F 6F. F3 + pxor xmm0, xmm2 ; 1ED6 _ 66: 0F EF. C2 + movdqa xmm2, xmm1 ; 1EDA _ 66: 0F 6F. D1 + psrld xmm1, 22 ; 1EDE _ 66: 0F 72. D1, 16 + pslld xmm5, 27 ; 1EE3 _ 66: 0F 72. F5, 1B + pslld xmm2, 10 ; 1EE8 _ 66: 0F 72. F2, 0A + por xmm2, xmm1 ; 1EED _ 66: 0F EB. D1 + movdqa xmm1, xmm3 ; 1EF1 _ 66: 0F 6F. CB + psrld xmm7, 5 ; 1EF5 _ 66: 0F 72. D7, 05 + por xmm5, xmm7 ; 1EFA _ 66: 0F EB. EF + movdqa xmm7, oword [esp+60H] ; 1EFE _ 66: 0F 6F. 7C 24, 60 + pxor xmm2, xmm0 ; 1F04 _ 66: 0F EF. D0 + pslld xmm1, 7 ; 1F08 _ 66: 0F 72. F1, 07 + pxor xmm2, xmm1 ; 1F0D _ 66: 0F EF. D1 + movdqa xmm1, xmm0 ; 1F11 _ 66: 0F 6F. C8 + pxor xmm5, xmm3 ; 1F15 _ 66: 0F EF. EB + pxor xmm5, xmm0 ; 1F19 _ 66: 0F EF. E8 + pslld xmm1, 25 ; 1F1D _ 66: 0F 72. F1, 19 + psrld xmm0, 7 ; 1F22 _ 66: 0F 72. D0, 07 + por xmm1, xmm0 ; 1F27 _ 66: 0F EB. C8 + pslld xmm6, 31 ; 1F2B _ 66: 0F 72. F6, 1F + psrld xmm3, 1 ; 1F30 _ 66: 0F 72. D3, 01 + por xmm6, xmm3 ; 1F35 _ 66: 0F EB. F3 + movdqa xmm3, xmm5 ; 1F39 _ 66: 0F 6F. DD + pxor xmm1, xmm2 ; 1F3D _ 66: 0F EF. CA + pxor xmm6, xmm5 ; 1F41 _ 66: 0F EF. F5 + pslld xmm3, 3 ; 1F45 _ 66: 0F 72. F3, 03 + pxor xmm1, xmm3 ; 1F4A _ 66: 0F EF. CB + movdqa oword [esp+60H], xmm7 ; 1F4E _ 66: 0F 7F. 7C 24, 60 + movdqa xmm0, xmm2 ; 1F54 _ 66: 0F 6F. C2 + movdqa xmm7, xmm5 ; 1F58 _ 66: 0F 6F. FD + pxor xmm6, xmm2 ; 1F5C _ 66: 0F EF. F2 + pslld xmm0, 29 ; 1F60 _ 66: 0F 72. F0, 1D + psrld xmm2, 3 ; 1F65 _ 66: 0F 72. D2, 03 + por xmm0, xmm2 ; 1F6A _ 66: 0F EB. C2 + pslld xmm7, 19 ; 1F6E _ 66: 0F 72. F7, 13 + psrld xmm5, 13 ; 1F73 _ 66: 0F 72. D5, 0D + por xmm7, xmm5 ; 1F78 _ 66: 0F EB. FD + movdqa xmm5, xmm0 ; 1F7C _ 66: 0F 6F. E8 + pxor xmm7, xmm0 ; 1F80 _ 66: 0F EF. F8 + pxor xmm0, xmm1 ; 1F84 _ 66: 0F EF. C1 + pand xmm5, xmm7 ; 1F88 _ 66: 0F DB. EF + pxor xmm1, xmm6 ; 1F8C _ 66: 0F EF. CE + por xmm0, xmm7 ; 1F90 _ 66: 0F EB. C7 + movd xmm2, dword [ecx+2AE0H] ; 1F94 _ 66: 0F 6E. 91, 00002AE0 + pcmpeqd xmm3, xmm3 ; 1F9C _ 66: 0F 76. DB + pxor xmm5, xmm3 ; 1FA0 _ 66: 0F EF. EB + pxor xmm5, xmm1 ; 1FA4 _ 66: 0F EF. E9 + pxor xmm7, xmm5 ; 1FA8 _ 66: 0F EF. FD + pxor xmm1, xmm0 ; 1FAC _ 66: 0F EF. C8 + pxor xmm0, xmm6 ; 1FB0 _ 66: 0F EF. C6 + pand xmm6, xmm1 ; 1FB4 _ 66: 0F DB. F1 + pxor xmm6, xmm7 ; 1FB8 _ 66: 0F EF. F7 + pxor xmm7, xmm1 ; 1FBC _ 66: 0F EF. F9 + por xmm7, xmm5 ; 1FC0 _ 66: 0F EB. FD + pxor xmm1, xmm6 ; 1FC4 _ 66: 0F EF. CE + pxor xmm0, xmm7 ; 1FC8 _ 66: 0F EF. C7 + movd xmm7, dword [ecx+2AE4H] ; 1FCC _ 66: 0F 6E. B9, 00002AE4 + pshufd xmm2, xmm2, 0 ; 1FD4 _ 66: 0F 70. D2, 00 + pxor xmm6, xmm2 ; 1FD9 _ 66: 0F EF. F2 + pshufd xmm2, xmm7, 0 ; 1FDD _ 66: 0F 70. D7, 00 + pxor xmm5, xmm2 ; 1FE2 _ 66: 0F EF. EA + movd xmm7, dword [ecx+2AE8H] ; 1FE6 _ 66: 0F 6E. B9, 00002AE8 + pshufd xmm2, xmm7, 0 ; 1FEE _ 66: 0F 70. D7, 00 + pxor xmm0, xmm2 ; 1FF3 _ 66: 0F EF. C2 + movd xmm7, dword [ecx+2AECH] ; 1FF7 _ 66: 0F 6E. B9, 00002AEC + pshufd xmm2, xmm7, 0 ; 1FFF _ 66: 0F 70. D7, 00 + movdqa xmm7, xmm6 ; 2004 _ 66: 0F 6F. FE + pxor xmm1, xmm2 ; 2008 _ 66: 0F EF. CA + movdqa xmm2, xmm0 ; 200C _ 66: 0F 6F. D0 + psrld xmm0, 22 ; 2010 _ 66: 0F 72. D0, 16 + pslld xmm7, 27 ; 2015 _ 66: 0F 72. F7, 1B + pslld xmm2, 10 ; 201A _ 66: 0F 72. F2, 0A + por xmm2, xmm0 ; 201F _ 66: 0F EB. D0 + movdqa xmm0, xmm5 ; 2023 _ 66: 0F 6F. C5 + psrld xmm6, 5 ; 2027 _ 66: 0F 72. D6, 05 + por xmm7, xmm6 ; 202C _ 66: 0F EB. FE + movdqa xmm6, xmm1 ; 2030 _ 66: 0F 6F. F1 + pxor xmm2, xmm1 ; 2034 _ 66: 0F EF. D1 + pslld xmm0, 7 ; 2038 _ 66: 0F 72. F0, 07 + pxor xmm2, xmm0 ; 203D _ 66: 0F EF. D0 + movdqa xmm0, xmm5 ; 2041 _ 66: 0F 6F. C5 + pxor xmm7, xmm5 ; 2045 _ 66: 0F EF. FD + pxor xmm7, xmm1 ; 2049 _ 66: 0F EF. F9 + pslld xmm6, 25 ; 204D _ 66: 0F 72. F6, 19 + psrld xmm1, 7 ; 2052 _ 66: 0F 72. D1, 07 + por xmm6, xmm1 ; 2057 _ 66: 0F EB. F1 + movdqa xmm1, xmm7 ; 205B _ 66: 0F 6F. CF + pslld xmm0, 31 ; 205F _ 66: 0F 72. F0, 1F + psrld xmm5, 1 ; 2064 _ 66: 0F 72. D5, 01 + por xmm0, xmm5 ; 2069 _ 66: 0F EB. C5 + pxor xmm6, xmm2 ; 206D _ 66: 0F EF. F2 + pslld xmm1, 3 ; 2071 _ 66: 0F 72. F1, 03 + pxor xmm6, xmm1 ; 2076 _ 66: 0F EF. F1 + movdqa xmm1, xmm2 ; 207A _ 66: 0F 6F. CA + movdqa xmm5, xmm6 ; 207E _ 66: 0F 6F. EE + pxor xmm0, xmm7 ; 2082 _ 66: 0F EF. C7 + pxor xmm0, xmm2 ; 2086 _ 66: 0F EF. C2 + pslld xmm1, 29 ; 208A _ 66: 0F 72. F1, 1D + psrld xmm2, 3 ; 208F _ 66: 0F 72. D2, 03 + por xmm1, xmm2 ; 2094 _ 66: 0F EB. CA + movdqa xmm2, xmm7 ; 2098 _ 66: 0F 6F. D7 + psrld xmm7, 13 ; 209C _ 66: 0F 72. D7, 0D + pxor xmm0, xmm3 ; 20A1 _ 66: 0F EF. C3 + pslld xmm2, 19 ; 20A5 _ 66: 0F 72. F2, 13 + por xmm2, xmm7 ; 20AA _ 66: 0F EB. D7 + pxor xmm1, xmm0 ; 20AE _ 66: 0F EF. C8 + por xmm5, xmm2 ; 20B2 _ 66: 0F EB. EA + movd xmm7, dword [ecx+2AD0H] ; 20B6 _ 66: 0F 6E. B9, 00002AD0 + pxor xmm5, xmm1 ; 20BE _ 66: 0F EF. E9 + por xmm1, xmm0 ; 20C2 _ 66: 0F EB. C8 + pand xmm1, xmm2 ; 20C6 _ 66: 0F DB. CA + pxor xmm6, xmm5 ; 20CA _ 66: 0F EF. F5 + pxor xmm1, xmm6 ; 20CE _ 66: 0F EF. CE + por xmm6, xmm2 ; 20D2 _ 66: 0F EB. F2 + pxor xmm6, xmm0 ; 20D6 _ 66: 0F EF. F0 + pand xmm0, xmm1 ; 20DA _ 66: 0F DB. C1 + pxor xmm0, xmm5 ; 20DE _ 66: 0F EF. C5 + pxor xmm6, xmm1 ; 20E2 _ 66: 0F EF. F1 + pand xmm5, xmm6 ; 20E6 _ 66: 0F DB. EE + pxor xmm6, xmm0 ; 20EA _ 66: 0F EF. F0 + pxor xmm5, xmm6 ; 20EE _ 66: 0F EF. EE + pxor xmm6, xmm3 ; 20F2 _ 66: 0F EF. F3 + pxor xmm5, xmm2 ; 20F6 _ 66: 0F EF. EA + pshufd xmm2, xmm7, 0 ; 20FA _ 66: 0F 70. D7, 00 + pxor xmm0, xmm2 ; 20FF _ 66: 0F EF. C2 + movd xmm7, dword [ecx+2AD4H] ; 2103 _ 66: 0F 6E. B9, 00002AD4 + pshufd xmm2, xmm7, 0 ; 210B _ 66: 0F 70. D7, 00 + movd xmm7, dword [ecx+2AD8H] ; 2110 _ 66: 0F 6E. B9, 00002AD8 + pxor xmm6, xmm2 ; 2118 _ 66: 0F EF. F2 + pshufd xmm2, xmm7, 0 ; 211C _ 66: 0F 70. D7, 00 + pxor xmm5, xmm2 ; 2121 _ 66: 0F EF. EA + movd xmm7, dword [ecx+2ADCH] ; 2125 _ 66: 0F 6E. B9, 00002ADC + pshufd xmm2, xmm7, 0 ; 212D _ 66: 0F 70. D7, 00 + movdqa xmm7, xmm5 ; 2132 _ 66: 0F 6F. FD + pxor xmm1, xmm2 ; 2136 _ 66: 0F EF. CA + movdqa xmm2, xmm0 ; 213A _ 66: 0F 6F. D0 + pslld xmm7, 10 ; 213E _ 66: 0F 72. F7, 0A + psrld xmm5, 22 ; 2143 _ 66: 0F 72. D5, 16 + por xmm7, xmm5 ; 2148 _ 66: 0F EB. FD + movdqa xmm5, xmm6 ; 214C _ 66: 0F 6F. EE + pslld xmm2, 27 ; 2150 _ 66: 0F 72. F2, 1B + psrld xmm0, 5 ; 2155 _ 66: 0F 72. D0, 05 + por xmm2, xmm0 ; 215A _ 66: 0F EB. D0 + movdqa xmm0, xmm1 ; 215E _ 66: 0F 6F. C1 + pxor xmm7, xmm1 ; 2162 _ 66: 0F EF. F9 + pslld xmm5, 7 ; 2166 _ 66: 0F 72. F5, 07 + pxor xmm7, xmm5 ; 216B _ 66: 0F EF. FD + movdqa xmm5, xmm6 ; 216F _ 66: 0F 6F. EE + pxor xmm2, xmm6 ; 2173 _ 66: 0F EF. D6 + pxor xmm2, xmm1 ; 2177 _ 66: 0F EF. D1 + pslld xmm0, 25 ; 217B _ 66: 0F 72. F0, 19 + psrld xmm1, 7 ; 2180 _ 66: 0F 72. D1, 07 + por xmm0, xmm1 ; 2185 _ 66: 0F EB. C1 + movdqa xmm1, xmm7 ; 2189 _ 66: 0F 6F. CF + pslld xmm5, 31 ; 218D _ 66: 0F 72. F5, 1F + psrld xmm6, 1 ; 2192 _ 66: 0F 72. D6, 01 + por xmm5, xmm6 ; 2197 _ 66: 0F EB. EE + movdqa xmm6, xmm2 ; 219B _ 66: 0F 6F. F2 + pxor xmm0, xmm7 ; 219F _ 66: 0F EF. C7 + pxor xmm5, xmm2 ; 21A3 _ 66: 0F EF. EA + pslld xmm6, 3 ; 21A7 _ 66: 0F 72. F6, 03 + pxor xmm0, xmm6 ; 21AC _ 66: 0F EF. C6 + movdqa xmm6, xmm2 ; 21B0 _ 66: 0F 6F. F2 + pxor xmm5, xmm7 ; 21B4 _ 66: 0F EF. EF + pslld xmm1, 29 ; 21B8 _ 66: 0F 72. F1, 1D + psrld xmm7, 3 ; 21BD _ 66: 0F 72. D7, 03 + por xmm1, xmm7 ; 21C2 _ 66: 0F EB. CF + pslld xmm6, 19 ; 21C6 _ 66: 0F 72. F6, 13 + psrld xmm2, 13 ; 21CB _ 66: 0F 72. D2, 0D + por xmm6, xmm2 ; 21D0 _ 66: 0F EB. F2 + movdqa xmm2, xmm1 ; 21D4 _ 66: 0F 6F. D1 + movd xmm7, dword [ecx+2AC0H] ; 21D8 _ 66: 0F 6E. B9, 00002AC0 + pand xmm2, xmm0 ; 21E0 _ 66: 0F DB. D0 + pxor xmm2, xmm5 ; 21E4 _ 66: 0F EF. D5 + por xmm5, xmm0 ; 21E8 _ 66: 0F EB. E8 + pand xmm5, xmm6 ; 21EC _ 66: 0F DB. EE + pxor xmm1, xmm2 ; 21F0 _ 66: 0F EF. CA + pxor xmm1, xmm5 ; 21F4 _ 66: 0F EF. CD + pand xmm5, xmm2 ; 21F8 _ 66: 0F DB. EA + pxor xmm6, xmm3 ; 21FC _ 66: 0F EF. F3 + pxor xmm0, xmm1 ; 2200 _ 66: 0F EF. C1 + pxor xmm5, xmm0 ; 2204 _ 66: 0F EF. E8 + pand xmm0, xmm6 ; 2208 _ 66: 0F DB. C6 + pxor xmm0, xmm2 ; 220C _ 66: 0F EF. C2 + pxor xmm6, xmm5 ; 2210 _ 66: 0F EF. F5 + pand xmm2, xmm6 ; 2214 _ 66: 0F DB. D6 + pxor xmm0, xmm6 ; 2218 _ 66: 0F EF. C6 + pxor xmm2, xmm1 ; 221C _ 66: 0F EF. D1 + por xmm2, xmm0 ; 2220 _ 66: 0F EB. D0 + pxor xmm0, xmm6 ; 2224 _ 66: 0F EF. C6 + pxor xmm2, xmm5 ; 2228 _ 66: 0F EF. D5 + pshufd xmm5, xmm7, 0 ; 222C _ 66: 0F 70. EF, 00 + pxor xmm6, xmm5 ; 2231 _ 66: 0F EF. F5 + movd xmm7, dword [ecx+2AC4H] ; 2235 _ 66: 0F 6E. B9, 00002AC4 + pshufd xmm5, xmm7, 0 ; 223D _ 66: 0F 70. EF, 00 + pxor xmm0, xmm5 ; 2242 _ 66: 0F EF. C5 + movd xmm7, dword [ecx+2AC8H] ; 2246 _ 66: 0F 6E. B9, 00002AC8 + pshufd xmm5, xmm7, 0 ; 224E _ 66: 0F 70. EF, 00 + movd xmm7, dword [ecx+2ACCH] ; 2253 _ 66: 0F 6E. B9, 00002ACC + pxor xmm2, xmm5 ; 225B _ 66: 0F EF. D5 + pshufd xmm5, xmm7, 0 ; 225F _ 66: 0F 70. EF, 00 + movdqa xmm7, xmm6 ; 2264 _ 66: 0F 6F. FE + pxor xmm1, xmm5 ; 2268 _ 66: 0F EF. CD + movdqa xmm5, xmm2 ; 226C _ 66: 0F 6F. EA + psrld xmm2, 22 ; 2270 _ 66: 0F 72. D2, 16 + pslld xmm7, 27 ; 2275 _ 66: 0F 72. F7, 1B + pslld xmm5, 10 ; 227A _ 66: 0F 72. F5, 0A + por xmm5, xmm2 ; 227F _ 66: 0F EB. EA + movdqa xmm2, xmm0 ; 2283 _ 66: 0F 6F. D0 + psrld xmm6, 5 ; 2287 _ 66: 0F 72. D6, 05 + por xmm7, xmm6 ; 228C _ 66: 0F EB. FE + movdqa xmm6, xmm1 ; 2290 _ 66: 0F 6F. F1 + pxor xmm5, xmm1 ; 2294 _ 66: 0F EF. E9 + pslld xmm2, 7 ; 2298 _ 66: 0F 72. F2, 07 + pxor xmm5, xmm2 ; 229D _ 66: 0F EF. EA + movdqa xmm2, xmm0 ; 22A1 _ 66: 0F 6F. D0 + pxor xmm7, xmm0 ; 22A5 _ 66: 0F EF. F8 + pxor xmm7, xmm1 ; 22A9 _ 66: 0F EF. F9 + pslld xmm6, 25 ; 22AD _ 66: 0F 72. F6, 19 + psrld xmm1, 7 ; 22B2 _ 66: 0F 72. D1, 07 + por xmm6, xmm1 ; 22B7 _ 66: 0F EB. F1 + movdqa xmm1, xmm5 ; 22BB _ 66: 0F 6F. CD + pslld xmm2, 31 ; 22BF _ 66: 0F 72. F2, 1F + psrld xmm0, 1 ; 22C4 _ 66: 0F 72. D0, 01 + por xmm2, xmm0 ; 22C9 _ 66: 0F EB. D0 + movdqa xmm0, xmm7 ; 22CD _ 66: 0F 6F. C7 + pxor xmm6, xmm5 ; 22D1 _ 66: 0F EF. F5 + pxor xmm2, xmm7 ; 22D5 _ 66: 0F EF. D7 + pslld xmm0, 3 ; 22D9 _ 66: 0F 72. F0, 03 + pxor xmm6, xmm0 ; 22DE _ 66: 0F EF. F0 + movdqa xmm0, xmm7 ; 22E2 _ 66: 0F 6F. C7 + pxor xmm2, xmm5 ; 22E6 _ 66: 0F EF. D5 + pslld xmm1, 29 ; 22EA _ 66: 0F 72. F1, 1D + psrld xmm5, 3 ; 22EF _ 66: 0F 72. D5, 03 + por xmm1, xmm5 ; 22F4 _ 66: 0F EB. CD + movd xmm5, dword [ecx+2AB4H] ; 22F8 _ 66: 0F 6E. A9, 00002AB4 + pslld xmm0, 19 ; 2300 _ 66: 0F 72. F0, 13 + psrld xmm7, 13 ; 2305 _ 66: 0F 72. D7, 0D + por xmm0, xmm7 ; 230A _ 66: 0F EB. C7 + movdqa xmm7, xmm1 ; 230E _ 66: 0F 6F. F9 + pxor xmm7, xmm2 ; 2312 _ 66: 0F EF. FA + pxor xmm0, xmm7 ; 2316 _ 66: 0F EF. C7 + pand xmm1, xmm7 ; 231A _ 66: 0F DB. CF + pxor xmm1, xmm0 ; 231E _ 66: 0F EF. C8 + pand xmm0, xmm2 ; 2322 _ 66: 0F DB. C2 + pxor xmm2, xmm6 ; 2326 _ 66: 0F EF. D6 + por xmm6, xmm1 ; 232A _ 66: 0F EB. F1 + pxor xmm7, xmm6 ; 232E _ 66: 0F EF. FE + pxor xmm0, xmm6 ; 2332 _ 66: 0F EF. C6 + pxor xmm2, xmm1 ; 2336 _ 66: 0F EF. D1 + pand xmm6, xmm7 ; 233A _ 66: 0F DB. F7 + pxor xmm6, xmm2 ; 233E _ 66: 0F EF. F2 + pxor xmm2, xmm0 ; 2342 _ 66: 0F EF. D0 + por xmm2, xmm7 ; 2346 _ 66: 0F EB. D7 + pxor xmm0, xmm6 ; 234A _ 66: 0F EF. C6 + pxor xmm2, xmm1 ; 234E _ 66: 0F EF. D1 + pxor xmm0, xmm2 ; 2352 _ 66: 0F EF. C2 + movd xmm1, dword [ecx+2AB0H] ; 2356 _ 66: 0F 6E. 89, 00002AB0 + pshufd xmm1, xmm1, 0 ; 235E _ 66: 0F 70. C9, 00 + pxor xmm7, xmm1 ; 2363 _ 66: 0F EF. F9 + pshufd xmm1, xmm5, 0 ; 2367 _ 66: 0F 70. CD, 00 + movd xmm5, dword [ecx+2AB8H] ; 236C _ 66: 0F 6E. A9, 00002AB8 + pxor xmm2, xmm1 ; 2374 _ 66: 0F EF. D1 + pshufd xmm1, xmm5, 0 ; 2378 _ 66: 0F 70. CD, 00 + pxor xmm6, xmm1 ; 237D _ 66: 0F EF. F1 + movd xmm5, dword [ecx+2ABCH] ; 2381 _ 66: 0F 6E. A9, 00002ABC + pshufd xmm1, xmm5, 0 ; 2389 _ 66: 0F 70. CD, 00 + movdqa xmm5, xmm6 ; 238E _ 66: 0F 6F. EE + pxor xmm0, xmm1 ; 2392 _ 66: 0F EF. C1 + movdqa xmm1, xmm7 ; 2396 _ 66: 0F 6F. CF + pslld xmm5, 10 ; 239A _ 66: 0F 72. F5, 0A + psrld xmm6, 22 ; 239F _ 66: 0F 72. D6, 16 + por xmm5, xmm6 ; 23A4 _ 66: 0F EB. EE + movdqa xmm6, xmm2 ; 23A8 _ 66: 0F 6F. F2 + pslld xmm1, 27 ; 23AC _ 66: 0F 72. F1, 1B + psrld xmm7, 5 ; 23B1 _ 66: 0F 72. D7, 05 + por xmm1, xmm7 ; 23B6 _ 66: 0F EB. CF + pxor xmm5, xmm0 ; 23BA _ 66: 0F EF. E8 + pslld xmm6, 7 ; 23BE _ 66: 0F 72. F6, 07 + pxor xmm5, xmm6 ; 23C3 _ 66: 0F EF. EE + movdqa xmm6, xmm0 ; 23C7 _ 66: 0F 6F. F0 + pxor xmm1, xmm2 ; 23CB _ 66: 0F EF. CA + pxor xmm1, xmm0 ; 23CF _ 66: 0F EF. C8 + movdqa xmm7, xmm1 ; 23D3 _ 66: 0F 6F. F9 + pslld xmm6, 25 ; 23D7 _ 66: 0F 72. F6, 19 + psrld xmm0, 7 ; 23DC _ 66: 0F 72. D0, 07 + por xmm6, xmm0 ; 23E1 _ 66: 0F EB. F0 + movdqa xmm0, xmm2 ; 23E5 _ 66: 0F 6F. C2 + psrld xmm2, 1 ; 23E9 _ 66: 0F 72. D2, 01 + pxor xmm6, xmm5 ; 23EE _ 66: 0F EF. F5 + pslld xmm0, 31 ; 23F2 _ 66: 0F 72. F0, 1F + por xmm0, xmm2 ; 23F7 _ 66: 0F EB. C2 + movdqa xmm2, xmm5 ; 23FB _ 66: 0F 6F. D5 + pslld xmm7, 3 ; 23FF _ 66: 0F 72. F7, 03 + pxor xmm6, xmm7 ; 2404 _ 66: 0F EF. F7 + movdqa xmm7, xmm1 ; 2408 _ 66: 0F 6F. F9 + pxor xmm0, xmm1 ; 240C _ 66: 0F EF. C1 + pxor xmm0, xmm5 ; 2410 _ 66: 0F EF. C5 + pslld xmm2, 29 ; 2414 _ 66: 0F 72. F2, 1D + psrld xmm5, 3 ; 2419 _ 66: 0F 72. D5, 03 + por xmm2, xmm5 ; 241E _ 66: 0F EB. D5 + pslld xmm7, 19 ; 2422 _ 66: 0F 72. F7, 13 + psrld xmm1, 13 ; 2427 _ 66: 0F 72. D1, 0D + por xmm7, xmm1 ; 242C _ 66: 0F EB. F9 + pxor xmm2, xmm6 ; 2430 _ 66: 0F EF. D6 + pxor xmm6, xmm7 ; 2434 _ 66: 0F EF. F7 + movdqa xmm5, xmm6 ; 2438 _ 66: 0F 6F. EE + pand xmm5, xmm2 ; 243C _ 66: 0F DB. EA + pxor xmm5, xmm0 ; 2440 _ 66: 0F EF. E8 + por xmm0, xmm2 ; 2444 _ 66: 0F EB. C2 + pxor xmm0, xmm6 ; 2448 _ 66: 0F EF. C6 + pand xmm6, xmm5 ; 244C _ 66: 0F DB. F5 + pxor xmm2, xmm5 ; 2450 _ 66: 0F EF. D5 + pand xmm6, xmm7 ; 2454 _ 66: 0F DB. F7 + pxor xmm6, xmm2 ; 2458 _ 66: 0F EF. F2 + pand xmm2, xmm0 ; 245C _ 66: 0F DB. D0 + por xmm2, xmm7 ; 2460 _ 66: 0F EB. D7 + pxor xmm5, xmm3 ; 2464 _ 66: 0F EF. EB + movdqa xmm1, xmm5 ; 2468 _ 66: 0F 6F. CD + pxor xmm2, xmm5 ; 246C _ 66: 0F EF. D5 + pxor xmm7, xmm5 ; 2470 _ 66: 0F EF. FD + pxor xmm1, xmm6 ; 2474 _ 66: 0F EF. CE + pand xmm7, xmm0 ; 2478 _ 66: 0F DB. F8 + movd xmm5, dword [ecx+2AA4H] ; 247C _ 66: 0F 6E. A9, 00002AA4 + pxor xmm1, xmm7 ; 2484 _ 66: 0F EF. CF + movd xmm7, dword [ecx+2AA0H] ; 2488 _ 66: 0F 6E. B9, 00002AA0 + pshufd xmm7, xmm7, 0 ; 2490 _ 66: 0F 70. FF, 00 + pxor xmm0, xmm7 ; 2495 _ 66: 0F EF. C7 + pshufd xmm7, xmm5, 0 ; 2499 _ 66: 0F 70. FD, 00 + pxor xmm6, xmm7 ; 249E _ 66: 0F EF. F7 + movd xmm5, dword [ecx+2AA8H] ; 24A2 _ 66: 0F 6E. A9, 00002AA8 + pshufd xmm7, xmm5, 0 ; 24AA _ 66: 0F 70. FD, 00 + movd xmm5, dword [ecx+2AACH] ; 24AF _ 66: 0F 6E. A9, 00002AAC + pxor xmm2, xmm7 ; 24B7 _ 66: 0F EF. D7 + pshufd xmm7, xmm5, 0 ; 24BB _ 66: 0F 70. FD, 00 + movdqa xmm5, xmm2 ; 24C0 _ 66: 0F 6F. EA + pxor xmm1, xmm7 ; 24C4 _ 66: 0F EF. CF + movdqa xmm7, xmm1 ; 24C8 _ 66: 0F 6F. F9 + pslld xmm5, 10 ; 24CC _ 66: 0F 72. F5, 0A + psrld xmm2, 22 ; 24D1 _ 66: 0F 72. D2, 16 + por xmm5, xmm2 ; 24D6 _ 66: 0F EB. EA + movdqa xmm2, xmm0 ; 24DA _ 66: 0F 6F. D0 + psrld xmm0, 5 ; 24DE _ 66: 0F 72. D0, 05 + pxor xmm5, xmm1 ; 24E3 _ 66: 0F EF. E9 + pslld xmm2, 27 ; 24E7 _ 66: 0F 72. F2, 1B + por xmm2, xmm0 ; 24EC _ 66: 0F EB. D0 + movdqa xmm0, xmm6 ; 24F0 _ 66: 0F 6F. C6 + pxor xmm2, xmm6 ; 24F4 _ 66: 0F EF. D6 + pxor xmm2, xmm1 ; 24F8 _ 66: 0F EF. D1 + pslld xmm0, 7 ; 24FC _ 66: 0F 72. F0, 07 + pxor xmm5, xmm0 ; 2501 _ 66: 0F EF. E8 + pslld xmm7, 25 ; 2505 _ 66: 0F 72. F7, 19 + psrld xmm1, 7 ; 250A _ 66: 0F 72. D1, 07 + por xmm7, xmm1 ; 250F _ 66: 0F EB. F9 + movdqa xmm1, xmm6 ; 2513 _ 66: 0F 6F. CE + psrld xmm6, 1 ; 2517 _ 66: 0F 72. D6, 01 + pxor xmm7, xmm5 ; 251C _ 66: 0F EF. FD + pslld xmm1, 31 ; 2520 _ 66: 0F 72. F1, 1F + por xmm1, xmm6 ; 2525 _ 66: 0F EB. CE + movdqa xmm6, xmm2 ; 2529 _ 66: 0F 6F. F2 + pxor xmm1, xmm2 ; 252D _ 66: 0F EF. CA + pxor xmm1, xmm5 ; 2531 _ 66: 0F EF. CD + pslld xmm6, 3 ; 2535 _ 66: 0F 72. F6, 03 + pxor xmm7, xmm6 ; 253A _ 66: 0F EF. FE + movdqa xmm6, xmm5 ; 253E _ 66: 0F 6F. F5 + psrld xmm5, 3 ; 2542 _ 66: 0F 72. D5, 03 + pslld xmm6, 29 ; 2547 _ 66: 0F 72. F6, 1D + por xmm6, xmm5 ; 254C _ 66: 0F EB. F5 + movdqa xmm5, xmm2 ; 2550 _ 66: 0F 6F. EA + psrld xmm2, 13 ; 2554 _ 66: 0F 72. D2, 0D + pslld xmm5, 19 ; 2559 _ 66: 0F 72. F5, 13 + por xmm5, xmm2 ; 255E _ 66: 0F EB. EA + movdqa xmm2, xmm1 ; 2562 _ 66: 0F 6F. D1 + pxor xmm1, xmm6 ; 2566 _ 66: 0F EF. CE + pxor xmm2, xmm7 ; 256A _ 66: 0F EF. D7 + pand xmm7, xmm2 ; 256E _ 66: 0F DB. FA + pxor xmm7, xmm5 ; 2572 _ 66: 0F EF. FD + por xmm5, xmm2 ; 2576 _ 66: 0F EB. EA + pxor xmm6, xmm7 ; 257A _ 66: 0F EF. F7 + pxor xmm5, xmm1 ; 257E _ 66: 0F EF. E9 + por xmm5, xmm6 ; 2582 _ 66: 0F EB. EE + pxor xmm2, xmm7 ; 2586 _ 66: 0F EF. D7 + pxor xmm5, xmm2 ; 258A _ 66: 0F EF. EA + por xmm2, xmm7 ; 258E _ 66: 0F EB. D7 + pxor xmm2, xmm5 ; 2592 _ 66: 0F EF. D5 + pxor xmm1, xmm3 ; 2596 _ 66: 0F EF. CB + pxor xmm1, xmm2 ; 259A _ 66: 0F EF. CA + por xmm2, xmm5 ; 259E _ 66: 0F EB. D5 + pxor xmm2, xmm5 ; 25A2 _ 66: 0F EF. D5 + por xmm2, xmm1 ; 25A6 _ 66: 0F EB. D1 + pxor xmm7, xmm2 ; 25AA _ 66: 0F EF. FA + movd xmm2, dword [ecx+2A90H] ; 25AE _ 66: 0F 6E. 91, 00002A90 + pshufd xmm0, xmm2, 0 ; 25B6 _ 66: 0F 70. C2, 00 + pxor xmm1, xmm0 ; 25BB _ 66: 0F EF. C8 + movd xmm2, dword [ecx+2A94H] ; 25BF _ 66: 0F 6E. 91, 00002A94 + pshufd xmm0, xmm2, 0 ; 25C7 _ 66: 0F 70. C2, 00 + pxor xmm5, xmm0 ; 25CC _ 66: 0F EF. E8 + movd xmm2, dword [ecx+2A98H] ; 25D0 _ 66: 0F 6E. 91, 00002A98 + pshufd xmm0, xmm2, 0 ; 25D8 _ 66: 0F 70. C2, 00 + pxor xmm7, xmm0 ; 25DD _ 66: 0F EF. F8 + movd xmm2, dword [ecx+2A9CH] ; 25E1 _ 66: 0F 6E. 91, 00002A9C + pshufd xmm0, xmm2, 0 ; 25E9 _ 66: 0F 70. C2, 00 + movdqa xmm2, xmm7 ; 25EE _ 66: 0F 6F. D7 + pxor xmm6, xmm0 ; 25F2 _ 66: 0F EF. F0 + movdqa xmm0, xmm6 ; 25F6 _ 66: 0F 6F. C6 + pslld xmm2, 10 ; 25FA _ 66: 0F 72. F2, 0A + psrld xmm7, 22 ; 25FF _ 66: 0F 72. D7, 16 + por xmm2, xmm7 ; 2604 _ 66: 0F EB. D7 + movdqa xmm7, xmm1 ; 2608 _ 66: 0F 6F. F9 + psrld xmm1, 5 ; 260C _ 66: 0F 72. D1, 05 + pxor xmm2, xmm6 ; 2611 _ 66: 0F EF. D6 + pslld xmm7, 27 ; 2615 _ 66: 0F 72. F7, 1B + por xmm7, xmm1 ; 261A _ 66: 0F EB. F9 + movdqa xmm1, xmm5 ; 261E _ 66: 0F 6F. CD + pxor xmm7, xmm5 ; 2622 _ 66: 0F EF. FD + pxor xmm7, xmm6 ; 2626 _ 66: 0F EF. FE + pslld xmm1, 7 ; 262A _ 66: 0F 72. F1, 07 + pxor xmm2, xmm1 ; 262F _ 66: 0F EF. D1 + movdqa xmm1, xmm5 ; 2633 _ 66: 0F 6F. CD + pslld xmm0, 25 ; 2637 _ 66: 0F 72. F0, 19 + psrld xmm6, 7 ; 263C _ 66: 0F 72. D6, 07 + por xmm0, xmm6 ; 2641 _ 66: 0F EB. C6 + pslld xmm1, 31 ; 2645 _ 66: 0F 72. F1, 1F + psrld xmm5, 1 ; 264A _ 66: 0F 72. D5, 01 + por xmm1, xmm5 ; 264F _ 66: 0F EB. CD + movdqa xmm5, xmm7 ; 2653 _ 66: 0F 6F. EF + pxor xmm0, xmm2 ; 2657 _ 66: 0F EF. C2 + pxor xmm1, xmm7 ; 265B _ 66: 0F EF. CF + pslld xmm5, 3 ; 265F _ 66: 0F 72. F5, 03 + pxor xmm0, xmm5 ; 2664 _ 66: 0F EF. C5 + movdqa xmm5, xmm2 ; 2668 _ 66: 0F 6F. EA + pxor xmm1, xmm2 ; 266C _ 66: 0F EF. CA + movdqa xmm6, xmm1 ; 2670 _ 66: 0F 6F. F1 + pslld xmm5, 29 ; 2674 _ 66: 0F 72. F5, 1D + psrld xmm2, 3 ; 2679 _ 66: 0F 72. D2, 03 + por xmm5, xmm2 ; 267E _ 66: 0F EB. EA + movdqa xmm2, xmm7 ; 2682 _ 66: 0F 6F. D7 + psrld xmm7, 13 ; 2686 _ 66: 0F 72. D7, 0D + pxor xmm5, xmm3 ; 268B _ 66: 0F EF. EB + pslld xmm2, 19 ; 268F _ 66: 0F 72. F2, 13 + por xmm2, xmm7 ; 2694 _ 66: 0F EB. D7 + movdqa xmm7, oword [esp+60H] ; 2698 _ 66: 0F 6F. 7C 24, 60 + por xmm6, xmm2 ; 269E _ 66: 0F EB. F2 + pxor xmm1, xmm3 ; 26A2 _ 66: 0F EF. CB + pxor xmm6, xmm5 ; 26A6 _ 66: 0F EF. F5 + movd xmm3, dword [ecx+2A80H] ; 26AA _ 66: 0F 6E. 99, 00002A80 + por xmm5, xmm1 ; 26B2 _ 66: 0F EB. E9 + pxor xmm6, xmm0 ; 26B6 _ 66: 0F EF. F0 + pxor xmm2, xmm1 ; 26BA _ 66: 0F EF. D1 + pxor xmm5, xmm2 ; 26BE _ 66: 0F EF. EA + pand xmm2, xmm0 ; 26C2 _ 66: 0F DB. D0 + pxor xmm1, xmm2 ; 26C6 _ 66: 0F EF. CA + por xmm2, xmm6 ; 26CA _ 66: 0F EB. D6 + pxor xmm2, xmm5 ; 26CE _ 66: 0F EF. D5 + pxor xmm0, xmm1 ; 26D2 _ 66: 0F EF. C1 + pxor xmm5, xmm6 ; 26D6 _ 66: 0F EF. EE + pxor xmm0, xmm2 ; 26DA _ 66: 0F EF. C2 + pxor xmm0, xmm6 ; 26DE _ 66: 0F EF. C6 + pand xmm5, xmm0 ; 26E2 _ 66: 0F DB. E8 + pxor xmm1, xmm5 ; 26E6 _ 66: 0F EF. CD + pshufd xmm3, xmm3, 0 ; 26EA _ 66: 0F 70. DB, 00 + pxor xmm2, xmm3 ; 26EF _ 66: 0F EF. D3 + add esi, 64 ; 26F3 _ 83. C6, 40 + inc eax ; 26F6 _ 40 + movd xmm5, dword [ecx+2A84H] ; 26F7 _ 66: 0F 6E. A9, 00002A84 + pshufd xmm3, xmm5, 0 ; 26FF _ 66: 0F 70. DD, 00 + pxor xmm1, xmm3 ; 2704 _ 66: 0F EF. CB + movd xmm5, dword [ecx+2A88H] ; 2708 _ 66: 0F 6E. A9, 00002A88 + pshufd xmm3, xmm5, 0 ; 2710 _ 66: 0F 70. DD, 00 + movd xmm5, dword [ecx+2A8CH] ; 2715 _ 66: 0F 6E. A9, 00002A8C + pxor xmm6, xmm3 ; 271D _ 66: 0F EF. F3 + pshufd xmm3, xmm5, 0 ; 2721 _ 66: 0F 70. DD, 00 + movdqa xmm5, xmm2 ; 2726 _ 66: 0F 6F. EA + pxor xmm0, xmm3 ; 272A _ 66: 0F EF. C3 + movdqa xmm3, xmm6 ; 272E _ 66: 0F 6F. DE + punpckldq xmm5, xmm1 ; 2732 _ 66: 0F 62. E9 + punpckhdq xmm2, xmm1 ; 2736 _ 66: 0F 6A. D1 + movdqa xmm1, xmm2 ; 273A _ 66: 0F 6F. CA + punpckldq xmm3, xmm0 ; 273E _ 66: 0F 62. D8 + punpckhdq xmm6, xmm0 ; 2742 _ 66: 0F 6A. F0 + movdqa xmm0, xmm5 ; 2746 _ 66: 0F 6F. C5 + punpckhqdq xmm5, xmm3 ; 274A _ 66: 0F 6D. EB + punpcklqdq xmm1, xmm6 ; 274E _ 66: 0F 6C. CE + pxor xmm5, oword [esp+40H] ; 2752 _ 66: 0F EF. 6C 24, 40 + pxor xmm1, oword [esp+50H] ; 2758 _ 66: 0F EF. 4C 24, 50 + movdqu oword [edx+10H], xmm5 ; 275E _ F3: 0F 7F. 6A, 10 + movdqu oword [edx+20H], xmm1 ; 2763 _ F3: 0F 7F. 4A, 20 + punpcklqdq xmm0, xmm3 ; 2768 _ 66: 0F 6C. C3 + punpckhqdq xmm2, xmm6 ; 276C _ 66: 0F 6D. D6 + pxor xmm0, xmm4 ; 2770 _ 66: 0F EF. C4 + movdqu oword [edx], xmm0 ; 2774 _ F3: 0F 7F. 02 + movdqa xmm4, xmm7 ; 2778 _ 66: 0F 6F. E7 + pxor xmm2, xmm7 ; 277C _ 66: 0F EF. D7 + movdqu oword [edx+30H], xmm2 ; 2780 _ F3: 0F 7F. 52, 30 + movdqa xmm2, xmm7 ; 2785 _ 66: 0F 6F. D7 + psllq xmm4, 1 ; 2789 _ 66: 0F 73. F4, 01 + psraw xmm7, 8 ; 278E _ 66: 0F 71. E7, 08 + pslldq xmm2, 8 ; 2793 _ 66: 0F 73. FA, 08 + psrldq xmm2, 7 ; 2798 _ 66: 0F 73. DA, 07 + psrlq xmm2, 7 ; 279D _ 66: 0F 73. D2, 07 + por xmm4, xmm2 ; 27A2 _ 66: 0F EB. E2 + psrldq xmm7, 15 ; 27A6 _ 66: 0F 73. DF, 0F + pand xmm7, oword [esp+30H] ; 27AB _ 66: 0F DB. 7C 24, 30 + pxor xmm4, xmm7 ; 27B1 _ 66: 0F EF. E7 + add edx, 64 ; 27B5 _ 83. C2, 40 + cmp eax, 8 ; 27B8 _ 83. F8, 08 + jl ?_006 ; 27BB _ 0F 8C, FFFFD8D0 + mov dword [esp+24H], edx ; 27C1 _ 89. 54 24, 24 + mov dword [esp+28H], esi ; 27C5 _ 89. 74 24, 28 + mov esi, dword [esp+20H] ; 27C9 _ 8B. 74 24, 20 + add esi, -512 ; 27CD _ 81. C6, FFFFFE00 + jne ?_004 ; 27D3 _ 0F 85, FFFFD886 + add esp, 116 ; 27D9 _ 83. C4, 74 + pop ebx ; 27DC _ 5B + pop esi ; 27DD _ 5E + pop edi ; 27DE _ 5F + mov esp, ebp ; 27DF _ 8B. E5 + pop ebp ; 27E1 _ 5D + ret 24 ; 27E2 _ C2, 0018 +; _xts_serpent_sse2_decrypt@24 End of function + +; Filling space: 0BH +; Filler type: lea with same source and destination +; db 8DH, 74H, 26H, 00H, 8DH, 0BCH, 27H, 00H +; db 00H, 00H, 00H + +ALIGN 16 + + +_xts_serpent_sse2_available@0:; Function begin + push esi ; 0000 _ 56 + push ebx ; 0001 _ 53 + sub esp, 16 ; 0002 _ 83. EC, 10 + mov eax, 1 ; 0005 _ B8, 00000001 + lea esi, [esp] ; 000A _ 8D. 34 24 + cpuid ; 000D _ 0F A2 + mov dword [esi], eax ; 000F _ 89. 06 + mov dword [esi+4H], ebx ; 0011 _ 89. 5E, 04 + mov dword [esi+8H], ecx ; 0014 _ 89. 4E, 08 + mov dword [esi+0CH], edx ; 0017 _ 89. 56, 0C + mov eax, dword [esp+0CH] ; 001A _ 8B. 44 24, 0C + and eax, 4000000H ; 001E _ 25, 04000000 + shr eax, 26 ; 0023 _ C1. E8, 1A + add esp, 16 ; 0026 _ 83. C4, 10 + pop ebx ; 0029 _ 5B + pop esi ; 002A _ 5E + ret ; 002B _ C3 +; _xts_serpent_sse2_available@0 End of function + +; Filling space: 4H +; Filler type: lea with same source and destination +; db 8DH, 74H, 26H, 00H + +ALIGN 8 + + diff --git a/ImBoxEnclave/crypto_fast/serpent.c b/ImBoxEnclave/crypto_fast/serpent.c new file mode 100644 index 0000000..0b120f8 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/serpent.c @@ -0,0 +1,424 @@ +/* + * Cryptographic API. + * + * Serpent Cipher Algorithm. + * + * Copyright (C) 2002 Dag Arne Osvik + * 2003 Herbert Valerio Riedel + Wei Dai + * + * Added tnepres support: Ruben Jesus Garcia Hernandez , 18.10.2004 + * Based on code by hvr + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + */ +#include +#include +#include "serpent.h" + +/* Key is padded to the maximum of 256 bits before round key generation. + * Any key length <= 256 bits (32 bytes) is allowed by the algorithm. + */ + +#define PHI 0x9e3779b9UL + +#define keyiter(a,b,c,d,i,j) \ + b ^= d; b ^= c; b ^= a; b ^= PHI ^ i; b = _rotl(b,11); k[j] = b; + +#define loadkeys(x0,x1,x2,x3,i) \ + x0=k[i]; x1=k[i+1]; x2=k[i+2]; x3=k[i+3]; + +#define storekeys(x0,x1,x2,x3,i) \ + k[i]=x0; k[i+1]=x1; k[i+2]=x2; k[i+3]=x3; + +#define K(x0,x1,x2,x3,i) \ + x3 ^= k[4*(i)+3]; x2 ^= k[4*(i)+2]; \ + x1 ^= k[4*(i)+1]; x0 ^= k[4*(i)+0]; + +#define LK(x0,x1,x2,x3,x4,i) \ + x0=_rotl(x0,13);\ + x2=_rotl(x2,3); x1 ^= x0; x4 = x0 << 3; \ + x3 ^= x2; x1 ^= x2; \ + x1=_rotl(x1,1); x3 ^= x4; \ + x3=_rotl(x3,7); x4 = x1; \ + x0 ^= x1; x4 <<= 7; x2 ^= x3; \ + x0 ^= x3; x2 ^= x4; x3 ^= k[4*i+3]; \ + x1 ^= k[4*i+1]; x0=_rotl(x0,5); x2=_rotl(x2,22);\ + x0 ^= k[4*i+0]; x2 ^= k[4*i+2]; + +#define KL(x0,x1,x2,x3,x4,i) \ + x0 ^= k[4*i+0]; x1 ^= k[4*i+1]; x2 ^= k[4*i+2]; \ + x3 ^= k[4*i+3]; x0=_rotr(x0,5); x2=_rotr(x2,22);\ + x4 = x1; x2 ^= x3; x0 ^= x3; \ + x4 <<= 7; x0 ^= x1; x1=_rotr(x1,1); \ + x2 ^= x4; x3=_rotr(x3,7); x4 = x0 << 3; \ + x1 ^= x0; x3 ^= x4; x0=_rotr(x0,13);\ + x1 ^= x2; x3 ^= x2; x2=_rotr(x2,3); + +#define S0(x0,x1,x2,x3,x4) \ + x4 = x3; \ + x3 |= x0; x0 ^= x4; x4 ^= x2; \ + x4 =~ x4; x3 ^= x1; x1 &= x0; \ + x1 ^= x4; x2 ^= x0; x0 ^= x3; \ + x4 |= x0; x0 ^= x2; x2 &= x1; \ + x3 ^= x2; x1 =~ x1; x2 ^= x4; \ + x1 ^= x2; + +#define S1(x0,x1,x2,x3,x4) \ + x4 = x1; \ + x1 ^= x0; x0 ^= x3; x3 =~ x3; \ + x4 &= x1; x0 |= x1; x3 ^= x2; \ + x0 ^= x3; x1 ^= x3; x3 ^= x4; \ + x1 |= x4; x4 ^= x2; x2 &= x0; \ + x2 ^= x1; x1 |= x0; x0 =~ x0; \ + x0 ^= x2; x4 ^= x1; + +#define S2(x0,x1,x2,x3,x4) \ + x3 =~ x3; \ + x1 ^= x0; x4 = x0; x0 &= x2; \ + x0 ^= x3; x3 |= x4; x2 ^= x1; \ + x3 ^= x1; x1 &= x0; x0 ^= x2; \ + x2 &= x3; x3 |= x1; x0 =~ x0; \ + x3 ^= x0; x4 ^= x0; x0 ^= x2; \ + x1 |= x2; + +#define S3(x0,x1,x2,x3,x4) \ + x4 = x1; \ + x1 ^= x3; x3 |= x0; x4 &= x0; \ + x0 ^= x2; x2 ^= x1; x1 &= x3; \ + x2 ^= x3; x0 |= x4; x4 ^= x3; \ + x1 ^= x0; x0 &= x3; x3 &= x4; \ + x3 ^= x2; x4 |= x1; x2 &= x1; \ + x4 ^= x3; x0 ^= x3; x3 ^= x2; + +#define S4(x0,x1,x2,x3,x4) \ + x4 = x3; \ + x3 &= x0; x0 ^= x4; \ + x3 ^= x2; x2 |= x4; x0 ^= x1; \ + x4 ^= x3; x2 |= x0; \ + x2 ^= x1; x1 &= x0; \ + x1 ^= x4; x4 &= x2; x2 ^= x3; \ + x4 ^= x0; x3 |= x1; x1 =~ x1; \ + x3 ^= x0; + +#define S5(x0,x1,x2,x3,x4) \ + x4 = x1; x1 |= x0; \ + x2 ^= x1; x3 =~ x3; x4 ^= x0; \ + x0 ^= x2; x1 &= x4; x4 |= x3; \ + x4 ^= x0; x0 &= x3; x1 ^= x3; \ + x3 ^= x2; x0 ^= x1; x2 &= x4; \ + x1 ^= x2; x2 &= x0; \ + x3 ^= x2; + +#define S6(x0,x1,x2,x3,x4) \ + x4 = x1; \ + x3 ^= x0; x1 ^= x2; x2 ^= x0; \ + x0 &= x3; x1 |= x3; x4 =~ x4; \ + x0 ^= x1; x1 ^= x2; \ + x3 ^= x4; x4 ^= x0; x2 &= x0; \ + x4 ^= x1; x2 ^= x3; x3 &= x1; \ + x3 ^= x0; x1 ^= x2; + +#define S7(x0,x1,x2,x3,x4) \ + x1 =~ x1; \ + x4 = x1; x0 =~ x0; x1 &= x2; \ + x1 ^= x3; x3 |= x4; x4 ^= x2; \ + x2 ^= x3; x3 ^= x0; x0 |= x1; \ + x2 &= x0; x0 ^= x4; x4 ^= x3; \ + x3 &= x0; x4 ^= x1; \ + x2 ^= x4; x3 ^= x1; x4 |= x0; \ + x4 ^= x1; + +#define SI0(x0,x1,x2,x3,x4) \ + x4 = x3; x1 ^= x0; \ + x3 |= x1; x4 ^= x1; x0 =~ x0; \ + x2 ^= x3; x3 ^= x0; x0 &= x1; \ + x0 ^= x2; x2 &= x3; x3 ^= x4; \ + x2 ^= x3; x1 ^= x3; x3 &= x0; \ + x1 ^= x0; x0 ^= x2; x4 ^= x3; + +#define SI1(x0,x1,x2,x3,x4) \ + x1 ^= x3; x4 = x0; \ + x0 ^= x2; x2 =~ x2; x4 |= x1; \ + x4 ^= x3; x3 &= x1; x1 ^= x2; \ + x2 &= x4; x4 ^= x1; x1 |= x3; \ + x3 ^= x0; x2 ^= x0; x0 |= x4; \ + x2 ^= x4; x1 ^= x0; \ + x4 ^= x1; + +#define SI2(x0,x1,x2,x3,x4) \ + x2 ^= x1; x4 = x3; x3 =~ x3; \ + x3 |= x2; x2 ^= x4; x4 ^= x0; \ + x3 ^= x1; x1 |= x2; x2 ^= x0; \ + x1 ^= x4; x4 |= x3; x2 ^= x3; \ + x4 ^= x2; x2 &= x1; \ + x2 ^= x3; x3 ^= x4; x4 ^= x0; + +#define SI3(x0,x1,x2,x3,x4) \ + x2 ^= x1; \ + x4 = x1; x1 &= x2; \ + x1 ^= x0; x0 |= x4; x4 ^= x3; \ + x0 ^= x3; x3 |= x1; x1 ^= x2; \ + x1 ^= x3; x0 ^= x2; x2 ^= x3; \ + x3 &= x1; x1 ^= x0; x0 &= x2; \ + x4 ^= x3; x3 ^= x0; x0 ^= x1; + +#define SI4(x0,x1,x2,x3,x4) \ + x2 ^= x3; x4 = x0; x0 &= x1; \ + x0 ^= x2; x2 |= x3; x4 =~ x4; \ + x1 ^= x0; x0 ^= x2; x2 &= x4; \ + x2 ^= x0; x0 |= x4; \ + x0 ^= x3; x3 &= x2; \ + x4 ^= x3; x3 ^= x1; x1 &= x0; \ + x4 ^= x1; x0 ^= x3; + +#define SI5(x0,x1,x2,x3,x4) \ + x4 = x1; x1 |= x2; \ + x2 ^= x4; x1 ^= x3; x3 &= x4; \ + x2 ^= x3; x3 |= x0; x0 =~ x0; \ + x3 ^= x2; x2 |= x0; x4 ^= x1; \ + x2 ^= x4; x4 &= x0; x0 ^= x1; \ + x1 ^= x3; x0 &= x2; x2 ^= x3; \ + x0 ^= x2; x2 ^= x4; x4 ^= x3; + +#define SI6(x0,x1,x2,x3,x4) \ + x0 ^= x2; \ + x4 = x0; x0 &= x3; x2 ^= x3; \ + x0 ^= x2; x3 ^= x1; x2 |= x4; \ + x2 ^= x3; x3 &= x0; x0 =~ x0; \ + x3 ^= x1; x1 &= x2; x4 ^= x0; \ + x3 ^= x4; x4 ^= x2; x0 ^= x1; \ + x2 ^= x0; + +#define SI7(x0,x1,x2,x3,x4) \ + x4 = x3; x3 &= x0; x0 ^= x2; \ + x2 |= x4; x4 ^= x1; x0 =~ x0; \ + x1 |= x3; x4 ^= x0; x0 &= x2; \ + x0 ^= x1; x1 &= x2; x3 ^= x2; \ + x4 ^= x3; x2 &= x3; x3 |= x0; \ + x1 ^= x4; x3 ^= x4; x4 &= x0; \ + x4 ^= x2; + +void _stdcall serpent256_set_key(const unsigned char *key, serpent256_key *skey) +{ + unsigned long *k = skey->expkey; + unsigned long r0,r1,r2,r3,r4; + + /* Copy key, add padding */ + memcpy(k, key, SERPENT_KEY_SIZE); + + /* Expand key using polynomial */ + r0 = k[3]; r1 = k[4]; r2 = k[5]; + r3 = k[6]; r4 = k[7]; + + keyiter(k[0],r0,r4,r2,0,0); + keyiter(k[1],r1,r0,r3,1,1); + keyiter(k[2],r2,r1,r4,2,2); + keyiter(k[3],r3,r2,r0,3,3); + keyiter(k[4],r4,r3,r1,4,4); + keyiter(k[5],r0,r4,r2,5,5); + keyiter(k[6],r1,r0,r3,6,6); + keyiter(k[7],r2,r1,r4,7,7); + + keyiter(k[ 0],r3,r2,r0, 8, 8); keyiter(k[ 1],r4,r3,r1, 9, 9); + keyiter(k[ 2],r0,r4,r2, 10, 10); keyiter(k[ 3],r1,r0,r3, 11, 11); + keyiter(k[ 4],r2,r1,r4, 12, 12); keyiter(k[ 5],r3,r2,r0, 13, 13); + keyiter(k[ 6],r4,r3,r1, 14, 14); keyiter(k[ 7],r0,r4,r2, 15, 15); + keyiter(k[ 8],r1,r0,r3, 16, 16); keyiter(k[ 9],r2,r1,r4, 17, 17); + keyiter(k[ 10],r3,r2,r0, 18, 18); keyiter(k[ 11],r4,r3,r1, 19, 19); + keyiter(k[ 12],r0,r4,r2, 20, 20); keyiter(k[ 13],r1,r0,r3, 21, 21); + keyiter(k[ 14],r2,r1,r4, 22, 22); keyiter(k[ 15],r3,r2,r0, 23, 23); + keyiter(k[ 16],r4,r3,r1, 24, 24); keyiter(k[ 17],r0,r4,r2, 25, 25); + keyiter(k[ 18],r1,r0,r3, 26, 26); keyiter(k[ 19],r2,r1,r4, 27, 27); + keyiter(k[ 20],r3,r2,r0, 28, 28); keyiter(k[ 21],r4,r3,r1, 29, 29); + keyiter(k[ 22],r0,r4,r2, 30, 30); keyiter(k[ 23],r1,r0,r3, 31, 31); + + k += 50; + + keyiter(k[-26],r2,r1,r4, 32,-18); keyiter(k[-25],r3,r2,r0, 33,-17); + keyiter(k[-24],r4,r3,r1, 34,-16); keyiter(k[-23],r0,r4,r2, 35,-15); + keyiter(k[-22],r1,r0,r3, 36,-14); keyiter(k[-21],r2,r1,r4, 37,-13); + keyiter(k[-20],r3,r2,r0, 38,-12); keyiter(k[-19],r4,r3,r1, 39,-11); + keyiter(k[-18],r0,r4,r2, 40,-10); keyiter(k[-17],r1,r0,r3, 41, -9); + keyiter(k[-16],r2,r1,r4, 42, -8); keyiter(k[-15],r3,r2,r0, 43, -7); + keyiter(k[-14],r4,r3,r1, 44, -6); keyiter(k[-13],r0,r4,r2, 45, -5); + keyiter(k[-12],r1,r0,r3, 46, -4); keyiter(k[-11],r2,r1,r4, 47, -3); + keyiter(k[-10],r3,r2,r0, 48, -2); keyiter(k[ -9],r4,r3,r1, 49, -1); + keyiter(k[ -8],r0,r4,r2, 50, 0); keyiter(k[ -7],r1,r0,r3, 51, 1); + keyiter(k[ -6],r2,r1,r4, 52, 2); keyiter(k[ -5],r3,r2,r0, 53, 3); + keyiter(k[ -4],r4,r3,r1, 54, 4); keyiter(k[ -3],r0,r4,r2, 55, 5); + keyiter(k[ -2],r1,r0,r3, 56, 6); keyiter(k[ -1],r2,r1,r4, 57, 7); + keyiter(k[ 0],r3,r2,r0, 58, 8); keyiter(k[ 1],r4,r3,r1, 59, 9); + keyiter(k[ 2],r0,r4,r2, 60, 10); keyiter(k[ 3],r1,r0,r3, 61, 11); + keyiter(k[ 4],r2,r1,r4, 62, 12); keyiter(k[ 5],r3,r2,r0, 63, 13); + keyiter(k[ 6],r4,r3,r1, 64, 14); keyiter(k[ 7],r0,r4,r2, 65, 15); + keyiter(k[ 8],r1,r0,r3, 66, 16); keyiter(k[ 9],r2,r1,r4, 67, 17); + keyiter(k[ 10],r3,r2,r0, 68, 18); keyiter(k[ 11],r4,r3,r1, 69, 19); + keyiter(k[ 12],r0,r4,r2, 70, 20); keyiter(k[ 13],r1,r0,r3, 71, 21); + keyiter(k[ 14],r2,r1,r4, 72, 22); keyiter(k[ 15],r3,r2,r0, 73, 23); + keyiter(k[ 16],r4,r3,r1, 74, 24); keyiter(k[ 17],r0,r4,r2, 75, 25); + keyiter(k[ 18],r1,r0,r3, 76, 26); keyiter(k[ 19],r2,r1,r4, 77, 27); + keyiter(k[ 20],r3,r2,r0, 78, 28); keyiter(k[ 21],r4,r3,r1, 79, 29); + keyiter(k[ 22],r0,r4,r2, 80, 30); keyiter(k[ 23],r1,r0,r3, 81, 31); + + k += 50; + + keyiter(k[-26],r2,r1,r4, 82,-18); keyiter(k[-25],r3,r2,r0, 83,-17); + keyiter(k[-24],r4,r3,r1, 84,-16); keyiter(k[-23],r0,r4,r2, 85,-15); + keyiter(k[-22],r1,r0,r3, 86,-14); keyiter(k[-21],r2,r1,r4, 87,-13); + keyiter(k[-20],r3,r2,r0, 88,-12); keyiter(k[-19],r4,r3,r1, 89,-11); + keyiter(k[-18],r0,r4,r2, 90,-10); keyiter(k[-17],r1,r0,r3, 91, -9); + keyiter(k[-16],r2,r1,r4, 92, -8); keyiter(k[-15],r3,r2,r0, 93, -7); + keyiter(k[-14],r4,r3,r1, 94, -6); keyiter(k[-13],r0,r4,r2, 95, -5); + keyiter(k[-12],r1,r0,r3, 96, -4); keyiter(k[-11],r2,r1,r4, 97, -3); + keyiter(k[-10],r3,r2,r0, 98, -2); keyiter(k[ -9],r4,r3,r1, 99, -1); + keyiter(k[ -8],r0,r4,r2,100, 0); keyiter(k[ -7],r1,r0,r3,101, 1); + keyiter(k[ -6],r2,r1,r4,102, 2); keyiter(k[ -5],r3,r2,r0,103, 3); + keyiter(k[ -4],r4,r3,r1,104, 4); keyiter(k[ -3],r0,r4,r2,105, 5); + keyiter(k[ -2],r1,r0,r3,106, 6); keyiter(k[ -1],r2,r1,r4,107, 7); + keyiter(k[ 0],r3,r2,r0,108, 8); keyiter(k[ 1],r4,r3,r1,109, 9); + keyiter(k[ 2],r0,r4,r2,110, 10); keyiter(k[ 3],r1,r0,r3,111, 11); + keyiter(k[ 4],r2,r1,r4,112, 12); keyiter(k[ 5],r3,r2,r0,113, 13); + keyiter(k[ 6],r4,r3,r1,114, 14); keyiter(k[ 7],r0,r4,r2,115, 15); + keyiter(k[ 8],r1,r0,r3,116, 16); keyiter(k[ 9],r2,r1,r4,117, 17); + keyiter(k[ 10],r3,r2,r0,118, 18); keyiter(k[ 11],r4,r3,r1,119, 19); + keyiter(k[ 12],r0,r4,r2,120, 20); keyiter(k[ 13],r1,r0,r3,121, 21); + keyiter(k[ 14],r2,r1,r4,122, 22); keyiter(k[ 15],r3,r2,r0,123, 23); + keyiter(k[ 16],r4,r3,r1,124, 24); keyiter(k[ 17],r0,r4,r2,125, 25); + keyiter(k[ 18],r1,r0,r3,126, 26); keyiter(k[ 19],r2,r1,r4,127, 27); + keyiter(k[ 20],r3,r2,r0,128, 28); keyiter(k[ 21],r4,r3,r1,129, 29); + keyiter(k[ 22],r0,r4,r2,130, 30); keyiter(k[ 23],r1,r0,r3,131, 31); + + /* Apply S-boxes */ + + S3(r3,r4,r0,r1,r2); storekeys(r1,r2,r4,r3, 28); loadkeys(r1,r2,r4,r3, 24); + S4(r1,r2,r4,r3,r0); storekeys(r2,r4,r3,r0, 24); loadkeys(r2,r4,r3,r0, 20); + S5(r2,r4,r3,r0,r1); storekeys(r1,r2,r4,r0, 20); loadkeys(r1,r2,r4,r0, 16); + S6(r1,r2,r4,r0,r3); storekeys(r4,r3,r2,r0, 16); loadkeys(r4,r3,r2,r0, 12); + S7(r4,r3,r2,r0,r1); storekeys(r1,r2,r0,r4, 12); loadkeys(r1,r2,r0,r4, 8); + S0(r1,r2,r0,r4,r3); storekeys(r0,r2,r4,r1, 8); loadkeys(r0,r2,r4,r1, 4); + S1(r0,r2,r4,r1,r3); storekeys(r3,r4,r1,r0, 4); loadkeys(r3,r4,r1,r0, 0); + S2(r3,r4,r1,r0,r2); storekeys(r2,r4,r3,r0, 0); loadkeys(r2,r4,r3,r0, -4); + S3(r2,r4,r3,r0,r1); storekeys(r0,r1,r4,r2, -4); loadkeys(r0,r1,r4,r2, -8); + S4(r0,r1,r4,r2,r3); storekeys(r1,r4,r2,r3, -8); loadkeys(r1,r4,r2,r3,-12); + S5(r1,r4,r2,r3,r0); storekeys(r0,r1,r4,r3,-12); loadkeys(r0,r1,r4,r3,-16); + S6(r0,r1,r4,r3,r2); storekeys(r4,r2,r1,r3,-16); loadkeys(r4,r2,r1,r3,-20); + S7(r4,r2,r1,r3,r0); storekeys(r0,r1,r3,r4,-20); loadkeys(r0,r1,r3,r4,-24); + S0(r0,r1,r3,r4,r2); storekeys(r3,r1,r4,r0,-24); loadkeys(r3,r1,r4,r0,-28); + k -= 50; + S1(r3,r1,r4,r0,r2); storekeys(r2,r4,r0,r3, 22); loadkeys(r2,r4,r0,r3, 18); + S2(r2,r4,r0,r3,r1); storekeys(r1,r4,r2,r3, 18); loadkeys(r1,r4,r2,r3, 14); + S3(r1,r4,r2,r3,r0); storekeys(r3,r0,r4,r1, 14); loadkeys(r3,r0,r4,r1, 10); + S4(r3,r0,r4,r1,r2); storekeys(r0,r4,r1,r2, 10); loadkeys(r0,r4,r1,r2, 6); + S5(r0,r4,r1,r2,r3); storekeys(r3,r0,r4,r2, 6); loadkeys(r3,r0,r4,r2, 2); + S6(r3,r0,r4,r2,r1); storekeys(r4,r1,r0,r2, 2); loadkeys(r4,r1,r0,r2, -2); + S7(r4,r1,r0,r2,r3); storekeys(r3,r0,r2,r4, -2); loadkeys(r3,r0,r2,r4, -6); + S0(r3,r0,r2,r4,r1); storekeys(r2,r0,r4,r3, -6); loadkeys(r2,r0,r4,r3,-10); + S1(r2,r0,r4,r3,r1); storekeys(r1,r4,r3,r2,-10); loadkeys(r1,r4,r3,r2,-14); + S2(r1,r4,r3,r2,r0); storekeys(r0,r4,r1,r2,-14); loadkeys(r0,r4,r1,r2,-18); + S3(r0,r4,r1,r2,r3); storekeys(r2,r3,r4,r0,-18); loadkeys(r2,r3,r4,r0,-22); + k -= 50; + S4(r2,r3,r4,r0,r1); storekeys(r3,r4,r0,r1, 28); loadkeys(r3,r4,r0,r1, 24); + S5(r3,r4,r0,r1,r2); storekeys(r2,r3,r4,r1, 24); loadkeys(r2,r3,r4,r1, 20); + S6(r2,r3,r4,r1,r0); storekeys(r4,r0,r3,r1, 20); loadkeys(r4,r0,r3,r1, 16); + S7(r4,r0,r3,r1,r2); storekeys(r2,r3,r1,r4, 16); loadkeys(r2,r3,r1,r4, 12); + S0(r2,r3,r1,r4,r0); storekeys(r1,r3,r4,r2, 12); loadkeys(r1,r3,r4,r2, 8); + S1(r1,r3,r4,r2,r0); storekeys(r0,r4,r2,r1, 8); loadkeys(r0,r4,r2,r1, 4); + S2(r0,r4,r2,r1,r3); storekeys(r3,r4,r0,r1, 4); loadkeys(r3,r4,r0,r1, 0); + S3(r3,r4,r0,r1,r2); storekeys(r1,r2,r4,r3, 0); +} + +void _stdcall serpent256_encrypt(const unsigned char *in, unsigned char *out, serpent256_key *key) +{ + unsigned long *k = key->expkey; + unsigned long r0, r1, r2, r3, r4; + + r0 = ((unsigned long*)in)[0]; r1 = ((unsigned long*)in)[1]; + r2 = ((unsigned long*)in)[2]; r3 = ((unsigned long*)in)[3]; + + K(r0,r1,r2,r3,0); + S0(r0,r1,r2,r3,r4); LK(r2,r1,r3,r0,r4,1); + S1(r2,r1,r3,r0,r4); LK(r4,r3,r0,r2,r1,2); + S2(r4,r3,r0,r2,r1); LK(r1,r3,r4,r2,r0,3); + S3(r1,r3,r4,r2,r0); LK(r2,r0,r3,r1,r4,4); + S4(r2,r0,r3,r1,r4); LK(r0,r3,r1,r4,r2,5); + S5(r0,r3,r1,r4,r2); LK(r2,r0,r3,r4,r1,6); + S6(r2,r0,r3,r4,r1); LK(r3,r1,r0,r4,r2,7); + S7(r3,r1,r0,r4,r2); LK(r2,r0,r4,r3,r1,8); + S0(r2,r0,r4,r3,r1); LK(r4,r0,r3,r2,r1,9); + S1(r4,r0,r3,r2,r1); LK(r1,r3,r2,r4,r0,10); + S2(r1,r3,r2,r4,r0); LK(r0,r3,r1,r4,r2,11); + S3(r0,r3,r1,r4,r2); LK(r4,r2,r3,r0,r1,12); + S4(r4,r2,r3,r0,r1); LK(r2,r3,r0,r1,r4,13); + S5(r2,r3,r0,r1,r4); LK(r4,r2,r3,r1,r0,14); + S6(r4,r2,r3,r1,r0); LK(r3,r0,r2,r1,r4,15); + S7(r3,r0,r2,r1,r4); LK(r4,r2,r1,r3,r0,16); + S0(r4,r2,r1,r3,r0); LK(r1,r2,r3,r4,r0,17); + S1(r1,r2,r3,r4,r0); LK(r0,r3,r4,r1,r2,18); + S2(r0,r3,r4,r1,r2); LK(r2,r3,r0,r1,r4,19); + S3(r2,r3,r0,r1,r4); LK(r1,r4,r3,r2,r0,20); + S4(r1,r4,r3,r2,r0); LK(r4,r3,r2,r0,r1,21); + S5(r4,r3,r2,r0,r1); LK(r1,r4,r3,r0,r2,22); + S6(r1,r4,r3,r0,r2); LK(r3,r2,r4,r0,r1,23); + S7(r3,r2,r4,r0,r1); LK(r1,r4,r0,r3,r2,24); + S0(r1,r4,r0,r3,r2); LK(r0,r4,r3,r1,r2,25); + S1(r0,r4,r3,r1,r2); LK(r2,r3,r1,r0,r4,26); + S2(r2,r3,r1,r0,r4); LK(r4,r3,r2,r0,r1,27); + S3(r4,r3,r2,r0,r1); LK(r0,r1,r3,r4,r2,28); + S4(r0,r1,r3,r4,r2); LK(r1,r3,r4,r2,r0,29); + S5(r1,r3,r4,r2,r0); LK(r0,r1,r3,r2,r4,30); + S6(r0,r1,r3,r2,r4); LK(r3,r4,r1,r2,r0,31); + S7(r3,r4,r1,r2,r0); K(r0,r1,r2,r3,32); + + ((unsigned long*)out)[0] = r0; ((unsigned long*)out)[1] = r1; + ((unsigned long*)out)[2] = r2; ((unsigned long*)out)[3] = r3; +} + +void _stdcall serpent256_decrypt(const unsigned char *in, unsigned char *out, serpent256_key *key) +{ + unsigned long *k = key->expkey; + unsigned long r0, r1, r2, r3, r4; + + r0 = ((unsigned long*)in)[0]; r1 = ((unsigned long*)in)[1]; + r2 = ((unsigned long*)in)[2]; r3 = ((unsigned long*)in)[3]; + + K(r0,r1,r2,r3,32); + SI7(r0,r1,r2,r3,r4); KL(r1,r3,r0,r4,r2,31); + SI6(r1,r3,r0,r4,r2); KL(r0,r2,r4,r1,r3,30); + SI5(r0,r2,r4,r1,r3); KL(r2,r3,r0,r4,r1,29); + SI4(r2,r3,r0,r4,r1); KL(r2,r0,r1,r4,r3,28); + SI3(r2,r0,r1,r4,r3); KL(r1,r2,r3,r4,r0,27); + SI2(r1,r2,r3,r4,r0); KL(r2,r0,r4,r3,r1,26); + SI1(r2,r0,r4,r3,r1); KL(r1,r0,r4,r3,r2,25); + SI0(r1,r0,r4,r3,r2); KL(r4,r2,r0,r1,r3,24); + SI7(r4,r2,r0,r1,r3); KL(r2,r1,r4,r3,r0,23); + SI6(r2,r1,r4,r3,r0); KL(r4,r0,r3,r2,r1,22); + SI5(r4,r0,r3,r2,r1); KL(r0,r1,r4,r3,r2,21); + SI4(r0,r1,r4,r3,r2); KL(r0,r4,r2,r3,r1,20); + SI3(r0,r4,r2,r3,r1); KL(r2,r0,r1,r3,r4,19); + SI2(r2,r0,r1,r3,r4); KL(r0,r4,r3,r1,r2,18); + SI1(r0,r4,r3,r1,r2); KL(r2,r4,r3,r1,r0,17); + SI0(r2,r4,r3,r1,r0); KL(r3,r0,r4,r2,r1,16); + SI7(r3,r0,r4,r2,r1); KL(r0,r2,r3,r1,r4,15); + SI6(r0,r2,r3,r1,r4); KL(r3,r4,r1,r0,r2,14); + SI5(r3,r4,r1,r0,r2); KL(r4,r2,r3,r1,r0,13); + SI4(r4,r2,r3,r1,r0); KL(r4,r3,r0,r1,r2,12); + SI3(r4,r3,r0,r1,r2); KL(r0,r4,r2,r1,r3,11); + SI2(r0,r4,r2,r1,r3); KL(r4,r3,r1,r2,r0,10); + SI1(r4,r3,r1,r2,r0); KL(r0,r3,r1,r2,r4,9); + SI0(r0,r3,r1,r2,r4); KL(r1,r4,r3,r0,r2,8); + SI7(r1,r4,r3,r0,r2); KL(r4,r0,r1,r2,r3,7); + SI6(r4,r0,r1,r2,r3); KL(r1,r3,r2,r4,r0,6); + SI5(r1,r3,r2,r4,r0); KL(r3,r0,r1,r2,r4,5); + SI4(r3,r0,r1,r2,r4); KL(r3,r1,r4,r2,r0,4); + SI3(r3,r1,r4,r2,r0); KL(r4,r3,r0,r2,r1,3); + SI2(r4,r3,r0,r2,r1); KL(r3,r1,r2,r0,r4,2); + SI1(r3,r1,r2,r0,r4); KL(r4,r1,r2,r0,r3,1); + SI0(r4,r1,r2,r0,r3); K(r2,r3,r1,r4,0); + + ((unsigned long*)out)[0] = r2; ((unsigned long*)out)[1] = r3; + ((unsigned long*)out)[2] = r1; ((unsigned long*)out)[3] = r4; +} diff --git a/ImBoxEnclave/crypto_fast/serpent.h b/ImBoxEnclave/crypto_fast/serpent.h new file mode 100644 index 0000000..15d9223 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/serpent.h @@ -0,0 +1,16 @@ +#ifndef _SERPENT_H_ +#define _SERPENT_H_ + +#define SERPENT_KEY_SIZE 32 +#define SERPENT_EXPKEY_WORDS 132 +#define SERPENT_BLOCK_SIZE 16 + +typedef struct _serpent256_key { + unsigned long expkey[SERPENT_EXPKEY_WORDS]; +} serpent256_key; + +void _stdcall serpent256_set_key(const unsigned char *key, serpent256_key *skey); +void _stdcall serpent256_encrypt(const unsigned char *in, unsigned char *out, serpent256_key *key); +void _stdcall serpent256_decrypt(const unsigned char *in, unsigned char *out, serpent256_key *key); + +#endif \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/sha512.c b/ImBoxEnclave/crypto_fast/sha512.c new file mode 100644 index 0000000..13c2c14 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/sha512.c @@ -0,0 +1,188 @@ +/* LibTomCrypt, modular cryptographic library -- Tom St Denis + * + * LibTomCrypt is a library that provides various cryptographic + * algorithms in a highly modular and flexible manner. + * + * The library is free for all purposes without any express + * guarantee it works. + * + * Tom St Denis, tomstdenis@gmail.com, http://libtomcrypt.com + * modified by ntldr, http://diskcryptor.net/ + */ +#include +#include +#include "sha512.h" + +// the K array +static const unsigned __int64 K[80] = { + 0x428a2f98d728ae22, 0x7137449123ef65cd, 0xb5c0fbcfec4d3b2f, 0xe9b5dba58189dbbc, + 0x3956c25bf348b538, 0x59f111f1b605d019, 0x923f82a4af194f9b, 0xab1c5ed5da6d8118, + 0xd807aa98a3030242, 0x12835b0145706fbe, 0x243185be4ee4b28c, 0x550c7dc3d5ffb4e2, + 0x72be5d74f27b896f, 0x80deb1fe3b1696b1, 0x9bdc06a725c71235, 0xc19bf174cf692694, + 0xe49b69c19ef14ad2, 0xefbe4786384f25e3, 0x0fc19dc68b8cd5b5, 0x240ca1cc77ac9c65, + 0x2de92c6f592b0275, 0x4a7484aa6ea6e483, 0x5cb0a9dcbd41fbd4, 0x76f988da831153b5, + 0x983e5152ee66dfab, 0xa831c66d2db43210, 0xb00327c898fb213f, 0xbf597fc7beef0ee4, + 0xc6e00bf33da88fc2, 0xd5a79147930aa725, 0x06ca6351e003826f, 0x142929670a0e6e70, + 0x27b70a8546d22ffc, 0x2e1b21385c26c926, 0x4d2c6dfc5ac42aed, 0x53380d139d95b3df, + 0x650a73548baf63de, 0x766a0abb3c77b2a8, 0x81c2c92e47edaee6, 0x92722c851482353b, + 0xa2bfe8a14cf10364, 0xa81a664bbc423001, 0xc24b8b70d0f89791, 0xc76c51a30654be30, + 0xd192e819d6ef5218, 0xd69906245565a910, 0xf40e35855771202a, 0x106aa07032bbd1b8, + 0x19a4c116b8d2d0c8, 0x1e376c085141ab53, 0x2748774cdf8eeb99, 0x34b0bcb5e19b48a8, + 0x391c0cb3c5c95a63, 0x4ed8aa4ae3418acb, 0x5b9cca4f7763e373, 0x682e6ff3d6b2b8a3, + 0x748f82ee5defb2fc, 0x78a5636f43172f60, 0x84c87814a1f0ab72, 0x8cc702081a6439ec, + 0x90befffa23631e28, 0xa4506cebde82bde9, 0xbef9a3f7b2c67915, 0xc67178f2e372532b, + 0xca273eceea26619c, 0xd186b8c721c0c207, 0xeada7dd6cde0eb1e, 0xf57d4f7fee6ed178, + 0x06f067aa72176fba, 0x0a637dc5a2c898a6, 0x113f9804bef90dae, 0x1b710b35131c471b, + 0x28db77f523047d84, 0x32caab7b40c72493, 0x3c9ebe0a15c9bebc, 0x431d67c49c100d4c, + 0x4cc5d4becb3e42b6, 0x597f299cfc657e2a, 0x5fcb6fab3ad6faec, 0x6c44198c4a475817 +}; + +// Various logical functions +#define Ch(x,y,z) (z ^ (x & (y ^ z))) +#define Maj(x,y,z) (((x | y) & z) | (x & y)) +#define S(x, n) (_rotr64(x, n)) +#define R(x, n) ((unsigned __int64)(x) >> (unsigned __int64)(n)) +#define Sigma0(x) (S(x, 28) ^ S(x, 34) ^ S(x, 39)) +#define Sigma1(x) (S(x, 14) ^ S(x, 18) ^ S(x, 41)) +#define Gamma0(x) (S(x, 1) ^ S(x, 8) ^ R(x, 7)) +#define Gamma1(x) (S(x, 19) ^ S(x, 61) ^ R(x, 6)) + +// compress 1024-bits +static void sha512_compress(sha512_ctx *ctx, const unsigned char *buf) +{ + unsigned __int64 S[8], W[80], t0, t1; + int i; + + // copy state into S + memcpy(S, ctx->hash, sizeof(S)); + + // copy the state into 1024-bits into W[0..15] + for (i = 0; i < 16; i++) { + W[i] = _byteswap_uint64(((unsigned __int64*)buf)[i]); + } + + // fill W[16..79] + for (i = 16; i < 80; i++) { + W[i] = Gamma1(W[i - 2]) + W[i - 7] + Gamma0(W[i - 15]) + W[i - 16]; + } + + // Compress +#define RND(a,b,c,d,e,f,g,h,i) \ + t0 = h + Sigma1(e) + Ch(e, f, g) + K[i] + W[i]; \ + t1 = Sigma0(a) + Maj(a, b, c); \ + d += t0; \ + h = t0 + t1; + + for (i = 0; i < 80; i += 8) { + RND(S[0],S[1],S[2],S[3],S[4],S[5],S[6],S[7],i+0); + RND(S[7],S[0],S[1],S[2],S[3],S[4],S[5],S[6],i+1); + RND(S[6],S[7],S[0],S[1],S[2],S[3],S[4],S[5],i+2); + RND(S[5],S[6],S[7],S[0],S[1],S[2],S[3],S[4],i+3); + RND(S[4],S[5],S[6],S[7],S[0],S[1],S[2],S[3],i+4); + RND(S[3],S[4],S[5],S[6],S[7],S[0],S[1],S[2],i+5); + RND(S[2],S[3],S[4],S[5],S[6],S[7],S[0],S[1],i+6); + RND(S[1],S[2],S[3],S[4],S[5],S[6],S[7],S[0],i+7); + } + ctx->hash[0] += S[0]; ctx->hash[1] += S[1]; + ctx->hash[2] += S[2]; ctx->hash[3] += S[3]; + ctx->hash[4] += S[4]; ctx->hash[5] += S[5]; + ctx->hash[6] += S[6]; ctx->hash[7] += S[7]; + + // test buffers size alignment at compile-time + static_assert( !(sizeof(S) % sizeof(unsigned long)), "sizeof must be 4 byte aligned"); + static_assert( !(sizeof(W) % sizeof(unsigned long)), "sizeof must be 4 byte aligned"); + + // prevent leaks + __stosd((unsigned long*)&S, 0, (sizeof(S) / sizeof(unsigned long))); + __stosd((unsigned long*)&W, 0, (sizeof(W) / sizeof(unsigned long))); +} + + +// Initialize the hash state +void _stdcall sha512_init(sha512_ctx *ctx) +{ + ctx->curlen = 0; + ctx->length = 0; + ctx->hash[0] = 0x6a09e667f3bcc908; + ctx->hash[1] = 0xbb67ae8584caa73b; + ctx->hash[2] = 0x3c6ef372fe94f82b; + ctx->hash[3] = 0xa54ff53a5f1d36f1; + ctx->hash[4] = 0x510e527fade682d1; + ctx->hash[5] = 0x9b05688c2b3e6c1f; + ctx->hash[6] = 0x1f83d9abfb41bd6b; + ctx->hash[7] = 0x5be0cd19137e2179; +} + +// Process a block of memory though the hash +void _stdcall sha512_hash(sha512_ctx *ctx, const unsigned char *in, size_t inlen) +{ + size_t n; + + while (inlen > 0) + { + if (ctx->curlen == 0 && inlen >= SHA512_BLOCK_SIZE) + { + sha512_compress(ctx, in); + + ctx->length += SHA512_BLOCK_SIZE * 8; + in += SHA512_BLOCK_SIZE; + inlen -= SHA512_BLOCK_SIZE; + } else + { + if ( (n = (size_t)(SHA512_BLOCK_SIZE - ctx->curlen)) > inlen ) n = inlen; + memcpy(ctx->buf + ctx->curlen, in, n); + ctx->curlen += (unsigned long)n; + in += n; + inlen -= n; + + if (ctx->curlen == SHA512_BLOCK_SIZE) + { + sha512_compress(ctx, ctx->buf); + ctx->length += 8 * SHA512_BLOCK_SIZE; + ctx->curlen = 0; + } + } + } +} + +// Terminate the hash to get the digest +void _stdcall sha512_done(sha512_ctx *ctx, unsigned char *out) +{ + int i; + + // increase the length of the message + ctx->length += ctx->curlen * 8; + + // append the '1' bit + ctx->buf[ctx->curlen++] = 0x80; + + /* if the length is currently above 112 bytes we append zeros + * then compress. Then we can fall back to padding zeros and length + * encoding like normal. + */ + if (ctx->curlen > 112) + { + while (ctx->curlen < SHA512_BLOCK_SIZE) { + ctx->buf[ctx->curlen++] = 0; + } + sha512_compress(ctx, ctx->buf); + ctx->curlen = 0; + } + + /* pad up to 120 bytes of zeroes + * note: that from 112 to 120 is the 64 MSB of the length. We assume that you won't hash + * > 2^64 bits of data... :-) + */ + while (ctx->curlen < 120) { + ctx->buf[ctx->curlen++] = 0; + } + + // store length + ((unsigned __int64*)ctx->buf)[15] = _byteswap_uint64(ctx->length); + sha512_compress(ctx, ctx->buf); + + // copy output + for (i = 0; i < 8; i++) { + ((unsigned __int64*)out)[i] = _byteswap_uint64(ctx->hash[i]); + } +} \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/sha512.h b/ImBoxEnclave/crypto_fast/sha512.h new file mode 100644 index 0000000..8adabba --- /dev/null +++ b/ImBoxEnclave/crypto_fast/sha512.h @@ -0,0 +1,21 @@ +#ifndef _SHA512_H_ +#define _SHA512_H_ + +#include + +typedef struct _sha512_ctx { + unsigned __int64 hash[8]; + unsigned __int64 length; + unsigned long curlen; + unsigned char buf[128]; + +} sha512_ctx; + +#define SHA512_DIGEST_SIZE 64 +#define SHA512_BLOCK_SIZE 128 + +void _stdcall sha512_init(sha512_ctx *ctx); +void _stdcall sha512_hash(sha512_ctx *ctx, const unsigned char *in, size_t inlen); +void _stdcall sha512_done(sha512_ctx *ctx, unsigned char *out); + +#endif \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/sha512_hmac.c b/ImBoxEnclave/crypto_fast/sha512_hmac.c new file mode 100644 index 0000000..32ef79f --- /dev/null +++ b/ImBoxEnclave/crypto_fast/sha512_hmac.c @@ -0,0 +1,84 @@ +/* + * + * Copyright (c) 2013 + * ntldr PGP key ID - 0x1B6A24550F33E44A + * + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License version 3 as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +*/ +#include +#include "sha512_hmac.h" + +void _stdcall sha512_hmac_init(sha512_hmac_ctx* ctx, const void* key, size_t keylen) +{ + unsigned long i; + + // zero-fill initial key state + memset(ctx->padded_key, 0, sizeof(ctx->padded_key)); + + // compress HMAC key if needed, or copy source key + if (keylen > SHA512_BLOCK_SIZE) { + sha512_init(&ctx->hash); + sha512_hash(&ctx->hash, (const unsigned char*)key, keylen); + sha512_done(&ctx->hash, ctx->padded_key); + } else { + memcpy(ctx->padded_key, key, keylen); + } + + // start "inner" hash + for (i = 0; i < (SHA512_BLOCK_SIZE / 4); i++) { + ((unsigned long*)ctx->padded_key)[i] ^= 0x36363636; + } + sha512_init(&ctx->hash); + sha512_hash(&ctx->hash, ctx->padded_key, SHA512_BLOCK_SIZE); +} + +void _stdcall sha512_hmac_hash(sha512_hmac_ctx* ctx, const void* ptr, size_t length) +{ + sha512_hash(&ctx->hash, (const unsigned char*)ptr, length); +} + +void _stdcall sha512_hmac_done(sha512_hmac_ctx* ctx, unsigned char* out) +{ + unsigned char inner_hash[SHA512_DIGEST_SIZE]; + unsigned long i; + + // finalize "inner" hash + sha512_done(&ctx->hash, inner_hash); + + // calculate "outer" hash + for (i = 0; i < (SHA512_BLOCK_SIZE / 4); i++) { + ((unsigned long*)ctx->padded_key)[i] ^= 0x6A6A6A6A; // 0x36 ^ 0x64 = 0x5С + } + sha512_init(&ctx->hash); + sha512_hash(&ctx->hash, ctx->padded_key, SHA512_BLOCK_SIZE); + sha512_hash(&ctx->hash, inner_hash, SHA512_DIGEST_SIZE); + sha512_done(&ctx->hash, out); + + // prevent leaks + static_assert( !(sizeof(inner_hash) % sizeof(unsigned long)), "sizeof must be 4 byte aligned"); + __stosd((unsigned long*)&inner_hash, 0, (sizeof(inner_hash) / sizeof(unsigned long))); +} + +void _stdcall sha512_hmac(const void *k, size_t k_len, const void *d, size_t d_len, unsigned char *out) +{ + sha512_hmac_ctx ctx; + + sha512_hmac_init(&ctx, k, k_len); + sha512_hmac_hash(&ctx, d, d_len); + sha512_hmac_done(&ctx, out); + + // prevent leaks + static_assert( !(sizeof(ctx) % sizeof(unsigned long)), "sizeof must be 4 byte aligned"); + __stosd((unsigned long*)&ctx, 0, (sizeof(ctx) / sizeof(unsigned long))); +} diff --git a/ImBoxEnclave/crypto_fast/sha512_hmac.h b/ImBoxEnclave/crypto_fast/sha512_hmac.h new file mode 100644 index 0000000..fc2718b --- /dev/null +++ b/ImBoxEnclave/crypto_fast/sha512_hmac.h @@ -0,0 +1,17 @@ +#ifndef _SHA512_HMAC_H_ +#define _SHA512_HMAC_H_ + +#include "sha512.h" + +typedef struct _sha512_hmac_ctx { + sha512_ctx hash; + unsigned char padded_key[SHA512_BLOCK_SIZE]; + +} sha512_hmac_ctx; + +void _stdcall sha512_hmac_init(sha512_hmac_ctx* ctx, const void* key, size_t keylen); +void _stdcall sha512_hmac_hash(sha512_hmac_ctx* ctx, const void* ptr, size_t length); +void _stdcall sha512_hmac_done(sha512_hmac_ctx* ctx, unsigned char* out); +void _stdcall sha512_hmac(const void *k, size_t k_len, const void *d, size_t d_len, unsigned char *out); + +#endif \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/sha512_hmac_drbg.c b/ImBoxEnclave/crypto_fast/sha512_hmac_drbg.c new file mode 100644 index 0000000..2dff481 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/sha512_hmac_drbg.c @@ -0,0 +1,175 @@ +/* + * + * Copyright (c) 2013 + * ntldr PGP key ID - 0x1B6A24550F33E44A + * + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License version 3 as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +*/ +#include +#include "sha512_hmac_drbg.h" + +/* + Internal function, implements HMAC_DBRG_Update according NIST SP 800-90 specification. +*/ +static void sha512_hmac_drbg_update( sha512_hmac_drbg_ctx *ctx, const void *provided_1, size_t provided_1_len, + const void *provided_2, size_t provided_2_len ) +{ + sha512_hmac_ctx hmac; + unsigned char nullbyte = 0x00, onebyte = 0x01; + + // Key = HMAC ( Key, Val || 0x00 || provided_data ) + sha512_hmac_init(&hmac, ctx->key, sizeof(ctx->key)); + sha512_hmac_hash(&hmac, ctx->val, sizeof(ctx->val)); + sha512_hmac_hash(&hmac, &nullbyte, sizeof(nullbyte)); + sha512_hmac_hash(&hmac, provided_1, provided_1_len); + sha512_hmac_hash(&hmac, provided_2, provided_2_len); + sha512_hmac_done(&hmac, ctx->key); + + // Val = HMAC ( Key, Val ) + sha512_hmac_init(&hmac, ctx->key, sizeof(ctx->key)); + sha512_hmac_hash(&hmac, ctx->val, sizeof(ctx->val)); + sha512_hmac_done(&hmac, ctx->val); + + if (provided_1_len != 0 || provided_2_len != 0) + { + // Key = HMAC ( Key, Val || 0x01 || provided_data ) + sha512_hmac_init(&hmac, ctx->key, sizeof(ctx->key)); + sha512_hmac_hash(&hmac, ctx->val, sizeof(ctx->val)); + sha512_hmac_hash(&hmac, &onebyte, sizeof(onebyte)); + sha512_hmac_hash(&hmac, provided_1, provided_1_len); + sha512_hmac_hash(&hmac, provided_2, provided_2_len); + sha512_hmac_done(&hmac, ctx->key); + + // Val = HMAC ( Key, Val ) + sha512_hmac_init(&hmac, ctx->key, sizeof(ctx->key)); + sha512_hmac_hash(&hmac, ctx->val, sizeof(ctx->val)); + sha512_hmac_done(&hmac, ctx->val); + } + + // prevent leaks + static_assert( !(sizeof(hmac) % sizeof(unsigned long)), "sizeof(hmac) must be 4 byte aligned"); + __stosd((unsigned long*)&hmac, 0, (sizeof(hmac) / sizeof(unsigned long))); +} + +/* + Instantiate SHA512_HMAC_DRBG: + ctx - SHA512_HMAC_DRBG internal state to be initialised + entropy - Entropy input (nonce must be included within the entropy input) + entropy_len - Length of entropy input + personal - Personalisation string + personal_len - Length of personalisation string +*/ +int _stdcall sha512_hmac_drbg_instantiate( sha512_hmac_drbg_ctx *ctx, const void *entropy, size_t entropy_len, + const void *personal, size_t personal_len ) +{ + // entropy input must contain at least 3/2 * security_strength bits of entropy + if ( (entropy_len < SHA512_HMAC_DRBG_MIN_ENTROPY_BYTES * 3 / 2) || + (entropy_len > SHA512_HMAC_DRBG_MAX_ENTROPY_BYTES) || + (personal_len > SHA512_HMAC_DRBG_MAX_PERSONAL_BYTES) ) + { + return -1; + } + + // Key = 0x00, 00..00 + memset(ctx->key, 0x00, sizeof(ctx->key)); + + // Val = 0x01, 01...01 + memset(ctx->val, 0x01, sizeof(ctx->val)); + + // ( Key, Val ) = HMAC_DBRG_Update ( entropy || personal, Key, Val ) + sha512_hmac_drbg_update(ctx, entropy, entropy_len, personal, personal_len); + + // reseed_counter = 1 + ctx->reseed_counter = 1; + + // return SUCCESS + return 0; +} + +/* + Reseed SHA512_HMAC_DRBG: + ctx - SHA512_HMAC_DRBG internal state + entropy - Entropy input + entropy_len - Length of entropy input + additional - Additional input + additional_len - Length of additional input +*/ +int _stdcall sha512_hmac_drbg_reseed( sha512_hmac_drbg_ctx *ctx, const void *entropy, size_t entropy_len, + const void *additional, size_t additional_len ) +{ + if ( (entropy_len < SHA512_HMAC_DRBG_MIN_ENTROPY_BYTES) || + (entropy_len > SHA512_HMAC_DRBG_MAX_ENTROPY_BYTES) || + (additional_len > SHA512_HMAC_DRBG_MAX_ADDITIONAL_BYTES) ) + { + return -1; + } + + // ( Key, Val ) = HMAC_DBRG_Update ( entropy || additional, Key, Val ) + sha512_hmac_drbg_update(ctx, entropy, entropy_len, additional, additional_len); + + // reset reseed counter + ctx->reseed_counter = 1; + + // return SUCCESS + return 0; +} + +/* + Generate pseudorandom bits using SHA512_HMAC_DRBG: + ctx - SHA512_HMAC_DRBG internal state + additional - Additional input + additional_len - Length of additional input + output - Output buffer + output_len - Length of output buffer +*/ +int _stdcall sha512_hmac_drbg_generate( sha512_hmac_drbg_ctx *ctx, const void *additional, size_t additional_len, + unsigned char *output, size_t output_len ) +{ + if ( (ctx->reseed_counter > SHA512_HMAC_DRBG_RESEED_INTERVAL) || // If reseed_counter > reseed_interval, then return an indication that a reseed is required + (additional_len > SHA512_HMAC_DRBG_MAX_ADDITIONAL_BYTES) || // check max_additional_input_length (NIST SP 800-90A specification) + (output_len > SHA512_HMAC_DRBG_MAX_GENERATED_BYTES) ) // check max_number_of_bits_per_request (NIST SP 800-90A specification) + { + return -1; + } + + // If additional_input != Null, + // then ( Key, Val ) = HMAC_DRBG_Update ( additional_input, Key, Val ) + if (additional_len != 0) { + sha512_hmac_drbg_update(ctx, additional, additional_len, NULL, 0); + } + + // While ( len ( temp ) < requested_number_of_bits ) do + while ( output_len ) + { + size_t part_len = output_len < SHA512_DIGEST_SIZE ? output_len : SHA512_DIGEST_SIZE; + + // Val = HMAC ( Key, Val ) + sha512_hmac(ctx->key, sizeof(ctx->key), ctx->val, sizeof(ctx->val), ctx->val); + + // temp = temp || V + // returned_bits = Leftmost requested_number_of_bits of temp + memcpy(output, ctx->val, part_len); + output += part_len; + output_len -= part_len; + } + + // ( Key, Val ) = HMAC_DRBG_Update ( additional_input, Key, Val ) + sha512_hmac_drbg_update(ctx, additional, additional_len, NULL, 0); + + // reseed_counter = reseed_counter + 1 + ctx->reseed_counter++; + + // return SUCCESS + return 0; +} diff --git a/ImBoxEnclave/crypto_fast/sha512_hmac_drbg.h b/ImBoxEnclave/crypto_fast/sha512_hmac_drbg.h new file mode 100644 index 0000000..26e151d --- /dev/null +++ b/ImBoxEnclave/crypto_fast/sha512_hmac_drbg.h @@ -0,0 +1,59 @@ +#ifndef _SHA512_HMAC_DRBG_H_ +#define _SHA512_HMAC_DRBG_H_ + +#include "sha512.h" +#include "sha512_hmac.h" + +/* The minimum required entropy interval for instantiate and reseed is security_strength bits. + according NIST SP 800-90 Section 10.1 Table 2 +*/ +#define SHA512_HMAC_DRBG_MIN_ENTROPY_BYTES ( 256 / 8 ) + +/* The maximum entropy input length for HMAC_DRBG is 2^35 bits + according NIST SP 800-90 Section 10.1 Table 2 +*/ +#define SHA512_HMAC_DRBG_MAX_ENTROPY_BYTES ( (1ull << 35) / 8 ) + +/* The maximum permitted personalisation string length for HMAC_DRBG is 2^35 bits + according NIST SP 800-90 Section 10.1 Table 2 +*/ +#define SHA512_HMAC_DRBG_MAX_PERSONAL_BYTES ( (1ull << 35) / 8 ) + +/* The maximum permitted additional input length for HMAC_DRBG is 2^35 bits + according NIST SP 800-90 Section 10.1 Table 2 +*/ +#define SHA512_HMAC_DRBG_MAX_ADDITIONAL_BYTES ( (1ull << 35) / 8 ) + +/* The maximum number of bits per request for HMAC_DRBG is 2^19 bits + according NIST SP 800-90 Section 10.1 Table 2 +*/ +#define SHA512_HMAC_DRBG_MAX_GENERATED_BYTES ( (1 << 19) / 8 ) + +/* The maximum permitted reseed interval for HMAC_DRBG is 2^48, + We choose a very conservative reseed interval. +*/ +#define SHA512_HMAC_DRBG_RESEED_INTERVAL 128 + +typedef struct _sha512_hmac_drbg_ctx { + // The outlen-bit Key, which is updated at least once each time that the DRBG mechanism generates pseudorandom bits. + unsigned char key[SHA512_DIGEST_SIZE]; + + // The value V of outlen bits, which is updated each time another outlen bits of output are produced. + unsigned char val[SHA512_DIGEST_SIZE]; + + // A counter that indicates the number of generation requests since instantiation or reseeding. + unsigned long reseed_counter; + +} sha512_hmac_drbg_ctx; + + +int _stdcall sha512_hmac_drbg_instantiate( sha512_hmac_drbg_ctx *ctx, const void *entropy, size_t entropy_len, + const void *personal, size_t personal_len ); + +int _stdcall sha512_hmac_drbg_reseed( sha512_hmac_drbg_ctx *ctx, const void *entropy, size_t entropy_len, + const void *additional, size_t additional_len ); + +int _stdcall sha512_hmac_drbg_generate( sha512_hmac_drbg_ctx *ctx, const void *additional, size_t additional_len, + unsigned char *output, size_t output_len ); + +#endif \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/sha512_pkcs5_2.c b/ImBoxEnclave/crypto_fast/sha512_pkcs5_2.c new file mode 100644 index 0000000..66b043c --- /dev/null +++ b/ImBoxEnclave/crypto_fast/sha512_pkcs5_2.c @@ -0,0 +1,67 @@ +/* + * + * Copyright (c) 2007-2013 + * ntldr PGP key ID - 0x1B6A24550F33E44A + * + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License version 3 as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +*/ +#include +#include +#include "sha512_hmac.h" +#include "sha512_pkcs5_2.h" + +void _stdcall sha512_pkcs5_2(int i_count, const void *pwd, size_t pwd_len, const void *salt, size_t salt_len, unsigned char *dk, size_t dklen) +{ + sha512_hmac_ctx ctx; + unsigned char blk[SHA512_DIGEST_SIZE], hmac[SHA512_DIGEST_SIZE]; + unsigned long block = 0x01000000; // _byteswap_ulong(1); + size_t c_len; + int j, i; + + while (dklen != 0) + { + // first iteration + sha512_hmac_init(&ctx, pwd, pwd_len); + sha512_hmac_hash(&ctx, salt, salt_len); + sha512_hmac_hash(&ctx, &block, sizeof(unsigned long)); + sha512_hmac_done(&ctx, hmac); + memcpy(blk, hmac, SHA512_DIGEST_SIZE); + + // next iterations + for (i = 1; i < i_count; i++) + { + sha512_hmac_init(&ctx, pwd, pwd_len); + sha512_hmac_hash(&ctx, hmac, SHA512_DIGEST_SIZE); + sha512_hmac_done(&ctx, hmac); + + for (j = 0; j < (SHA512_DIGEST_SIZE / 4); j++) { + ((unsigned long*)blk)[j] ^= ((unsigned long*)hmac)[j]; + } + } + + block = _byteswap_ulong(_byteswap_ulong(block) + 1); + memcpy(dk, blk, (c_len = dklen < SHA512_DIGEST_SIZE ? dklen : SHA512_DIGEST_SIZE)); + dk += c_len; dklen -= c_len; + } + + // test buffers size alignment at compile-time + static_assert( !(sizeof(ctx) % sizeof(unsigned long)), "sizeof must be 4 byte aligned"); + static_assert( !(sizeof(blk) % sizeof(unsigned long)), "sizeof must be 4 byte aligned"); + static_assert( !(sizeof(hmac) % sizeof(unsigned long)), "sizeof must be 4 byte aligned"); + + // prevent leaks + __stosd((unsigned long*)&ctx, 0, (sizeof(ctx) / sizeof(unsigned long))); + __stosd((unsigned long*)&blk, 0, (sizeof(blk) / sizeof(unsigned long))); + __stosd((unsigned long*)&hmac, 0, (sizeof(hmac) / sizeof(unsigned long))); +} diff --git a/ImBoxEnclave/crypto_fast/sha512_pkcs5_2.h b/ImBoxEnclave/crypto_fast/sha512_pkcs5_2.h new file mode 100644 index 0000000..e2fb530 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/sha512_pkcs5_2.h @@ -0,0 +1,6 @@ +#ifndef _SHA512_PKCS5_2_H_ +#define _SHA512_PKCS5_2_H_ + +void _stdcall sha512_pkcs5_2(int i_count, const void *pwd, size_t pwd_len, const void *salt, size_t salt_len, unsigned char *dk, size_t dklen); + +#endif \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/twofish.c b/ImBoxEnclave/crypto_fast/twofish.c new file mode 100644 index 0000000..bb68f79 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/twofish.c @@ -0,0 +1,779 @@ +/* + * Twofish algorithm + * + * @author Matthew Skala , July 26, 1998 + * @author Werner Koch, April, 1998 + * @author Dr Brian Gladman, 1999 + * @author ntldr , 2008-2010 + * + * The original author has disclaimed all copyright interest in this + * code and thus put it in the public domain. The subsequent authors + * have put this under the GNU General Public License. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 + * USA + * + * This code is a "clean room" implementation, written from the paper + * _Twofish: A 128-Bit Block Cipher_ by Bruce Schneier, John Kelsey, + * Doug Whiting, David Wagner, Chris Hall, and Niels Ferguson, available + * through http://www.counterpane.com/twofish.html + * + * For background information on multiplication in finite fields, used for + * the matrix operations in the key schedule, see the book _Contemporary + * Abstract Algebra_ by Joseph A. Gallian, especially chapter 22 in the + * Third Edition. + */ +#include +#include "twofish.h" + + +/* The large precomputed tables for the Twofish cipher (twofish.c) + * Taken from the same source as twofish.c + * Marc Mutz + */ + +/* These two tables are the q0 and q1 permutations, exactly as described in + * the Twofish paper. */ + +static const unsigned char q0[256] = { + 0xA9, 0x67, 0xB3, 0xE8, 0x04, 0xFD, 0xA3, 0x76, 0x9A, 0x92, 0x80, 0x78, + 0xE4, 0xDD, 0xD1, 0x38, 0x0D, 0xC6, 0x35, 0x98, 0x18, 0xF7, 0xEC, 0x6C, + 0x43, 0x75, 0x37, 0x26, 0xFA, 0x13, 0x94, 0x48, 0xF2, 0xD0, 0x8B, 0x30, + 0x84, 0x54, 0xDF, 0x23, 0x19, 0x5B, 0x3D, 0x59, 0xF3, 0xAE, 0xA2, 0x82, + 0x63, 0x01, 0x83, 0x2E, 0xD9, 0x51, 0x9B, 0x7C, 0xA6, 0xEB, 0xA5, 0xBE, + 0x16, 0x0C, 0xE3, 0x61, 0xC0, 0x8C, 0x3A, 0xF5, 0x73, 0x2C, 0x25, 0x0B, + 0xBB, 0x4E, 0x89, 0x6B, 0x53, 0x6A, 0xB4, 0xF1, 0xE1, 0xE6, 0xBD, 0x45, + 0xE2, 0xF4, 0xB6, 0x66, 0xCC, 0x95, 0x03, 0x56, 0xD4, 0x1C, 0x1E, 0xD7, + 0xFB, 0xC3, 0x8E, 0xB5, 0xE9, 0xCF, 0xBF, 0xBA, 0xEA, 0x77, 0x39, 0xAF, + 0x33, 0xC9, 0x62, 0x71, 0x81, 0x79, 0x09, 0xAD, 0x24, 0xCD, 0xF9, 0xD8, + 0xE5, 0xC5, 0xB9, 0x4D, 0x44, 0x08, 0x86, 0xE7, 0xA1, 0x1D, 0xAA, 0xED, + 0x06, 0x70, 0xB2, 0xD2, 0x41, 0x7B, 0xA0, 0x11, 0x31, 0xC2, 0x27, 0x90, + 0x20, 0xF6, 0x60, 0xFF, 0x96, 0x5C, 0xB1, 0xAB, 0x9E, 0x9C, 0x52, 0x1B, + 0x5F, 0x93, 0x0A, 0xEF, 0x91, 0x85, 0x49, 0xEE, 0x2D, 0x4F, 0x8F, 0x3B, + 0x47, 0x87, 0x6D, 0x46, 0xD6, 0x3E, 0x69, 0x64, 0x2A, 0xCE, 0xCB, 0x2F, + 0xFC, 0x97, 0x05, 0x7A, 0xAC, 0x7F, 0xD5, 0x1A, 0x4B, 0x0E, 0xA7, 0x5A, + 0x28, 0x14, 0x3F, 0x29, 0x88, 0x3C, 0x4C, 0x02, 0xB8, 0xDA, 0xB0, 0x17, + 0x55, 0x1F, 0x8A, 0x7D, 0x57, 0xC7, 0x8D, 0x74, 0xB7, 0xC4, 0x9F, 0x72, + 0x7E, 0x15, 0x22, 0x12, 0x58, 0x07, 0x99, 0x34, 0x6E, 0x50, 0xDE, 0x68, + 0x65, 0xBC, 0xDB, 0xF8, 0xC8, 0xA8, 0x2B, 0x40, 0xDC, 0xFE, 0x32, 0xA4, + 0xCA, 0x10, 0x21, 0xF0, 0xD3, 0x5D, 0x0F, 0x00, 0x6F, 0x9D, 0x36, 0x42, + 0x4A, 0x5E, 0xC1, 0xE0 +}; + +static const unsigned char q1[256] = { + 0x75, 0xF3, 0xC6, 0xF4, 0xDB, 0x7B, 0xFB, 0xC8, 0x4A, 0xD3, 0xE6, 0x6B, + 0x45, 0x7D, 0xE8, 0x4B, 0xD6, 0x32, 0xD8, 0xFD, 0x37, 0x71, 0xF1, 0xE1, + 0x30, 0x0F, 0xF8, 0x1B, 0x87, 0xFA, 0x06, 0x3F, 0x5E, 0xBA, 0xAE, 0x5B, + 0x8A, 0x00, 0xBC, 0x9D, 0x6D, 0xC1, 0xB1, 0x0E, 0x80, 0x5D, 0xD2, 0xD5, + 0xA0, 0x84, 0x07, 0x14, 0xB5, 0x90, 0x2C, 0xA3, 0xB2, 0x73, 0x4C, 0x54, + 0x92, 0x74, 0x36, 0x51, 0x38, 0xB0, 0xBD, 0x5A, 0xFC, 0x60, 0x62, 0x96, + 0x6C, 0x42, 0xF7, 0x10, 0x7C, 0x28, 0x27, 0x8C, 0x13, 0x95, 0x9C, 0xC7, + 0x24, 0x46, 0x3B, 0x70, 0xCA, 0xE3, 0x85, 0xCB, 0x11, 0xD0, 0x93, 0xB8, + 0xA6, 0x83, 0x20, 0xFF, 0x9F, 0x77, 0xC3, 0xCC, 0x03, 0x6F, 0x08, 0xBF, + 0x40, 0xE7, 0x2B, 0xE2, 0x79, 0x0C, 0xAA, 0x82, 0x41, 0x3A, 0xEA, 0xB9, + 0xE4, 0x9A, 0xA4, 0x97, 0x7E, 0xDA, 0x7A, 0x17, 0x66, 0x94, 0xA1, 0x1D, + 0x3D, 0xF0, 0xDE, 0xB3, 0x0B, 0x72, 0xA7, 0x1C, 0xEF, 0xD1, 0x53, 0x3E, + 0x8F, 0x33, 0x26, 0x5F, 0xEC, 0x76, 0x2A, 0x49, 0x81, 0x88, 0xEE, 0x21, + 0xC4, 0x1A, 0xEB, 0xD9, 0xC5, 0x39, 0x99, 0xCD, 0xAD, 0x31, 0x8B, 0x01, + 0x18, 0x23, 0xDD, 0x1F, 0x4E, 0x2D, 0xF9, 0x48, 0x4F, 0xF2, 0x65, 0x8E, + 0x78, 0x5C, 0x58, 0x19, 0x8D, 0xE5, 0x98, 0x57, 0x67, 0x7F, 0x05, 0x64, + 0xAF, 0x63, 0xB6, 0xFE, 0xF5, 0xB7, 0x3C, 0xA5, 0xCE, 0xE9, 0x68, 0x44, + 0xE0, 0x4D, 0x43, 0x69, 0x29, 0x2E, 0xAC, 0x15, 0x59, 0xA8, 0x0A, 0x9E, + 0x6E, 0x47, 0xDF, 0x34, 0x35, 0x6A, 0xCF, 0xDC, 0x22, 0xC9, 0xC0, 0x9B, + 0x89, 0xD4, 0xED, 0xAB, 0x12, 0xA2, 0x0D, 0x52, 0xBB, 0x02, 0x2F, 0xA9, + 0xD7, 0x61, 0x1E, 0xB4, 0x50, 0x04, 0xF6, 0xC2, 0x16, 0x25, 0x86, 0x56, + 0x55, 0x09, 0xBE, 0x91 +}; + +/* These MDS tables are actually tables of MDS composed with q0 and q1, + * because it is only ever used that way and we can save some time by + * precomputing. Of course the main saving comes from precomputing the + * GF(2^8) multiplication involved in the MDS matrix multiply; by looking + * things up in these tables we reduce the matrix multiply to four lookups + * and three XORs. Semi-formally, the definition of these tables is: + * mds[0][i] = MDS (q1[i] 0 0 0)^T mds[1][i] = MDS (0 q0[i] 0 0)^T + * mds[2][i] = MDS (0 0 q1[i] 0)^T mds[3][i] = MDS (0 0 0 q0[i])^T + * where ^T means "transpose", the matrix multiply is performed in GF(2^8) + * represented as GF(2)[x]/v(x) where v(x)=x^8+x^6+x^5+x^3+1 as described + * by Schneier et al, and I'm casually glossing over the byte/word + * conversion issues. */ + +static const unsigned long mds[4][256] = { + { + 0xBCBC3275, 0xECEC21F3, 0x202043C6, 0xB3B3C9F4, 0xDADA03DB, 0x02028B7B, + 0xE2E22BFB, 0x9E9EFAC8, 0xC9C9EC4A, 0xD4D409D3, 0x18186BE6, 0x1E1E9F6B, + 0x98980E45, 0xB2B2387D, 0xA6A6D2E8, 0x2626B74B, 0x3C3C57D6, 0x93938A32, + 0x8282EED8, 0x525298FD, 0x7B7BD437, 0xBBBB3771, 0x5B5B97F1, 0x474783E1, + 0x24243C30, 0x5151E20F, 0xBABAC6F8, 0x4A4AF31B, 0xBFBF4887, 0x0D0D70FA, + 0xB0B0B306, 0x7575DE3F, 0xD2D2FD5E, 0x7D7D20BA, 0x666631AE, 0x3A3AA35B, + 0x59591C8A, 0x00000000, 0xCDCD93BC, 0x1A1AE09D, 0xAEAE2C6D, 0x7F7FABC1, + 0x2B2BC7B1, 0xBEBEB90E, 0xE0E0A080, 0x8A8A105D, 0x3B3B52D2, 0x6464BAD5, + 0xD8D888A0, 0xE7E7A584, 0x5F5FE807, 0x1B1B1114, 0x2C2CC2B5, 0xFCFCB490, + 0x3131272C, 0x808065A3, 0x73732AB2, 0x0C0C8173, 0x79795F4C, 0x6B6B4154, + 0x4B4B0292, 0x53536974, 0x94948F36, 0x83831F51, 0x2A2A3638, 0xC4C49CB0, + 0x2222C8BD, 0xD5D5F85A, 0xBDBDC3FC, 0x48487860, 0xFFFFCE62, 0x4C4C0796, + 0x4141776C, 0xC7C7E642, 0xEBEB24F7, 0x1C1C1410, 0x5D5D637C, 0x36362228, + 0x6767C027, 0xE9E9AF8C, 0x4444F913, 0x1414EA95, 0xF5F5BB9C, 0xCFCF18C7, + 0x3F3F2D24, 0xC0C0E346, 0x7272DB3B, 0x54546C70, 0x29294CCA, 0xF0F035E3, + 0x0808FE85, 0xC6C617CB, 0xF3F34F11, 0x8C8CE4D0, 0xA4A45993, 0xCACA96B8, + 0x68683BA6, 0xB8B84D83, 0x38382820, 0xE5E52EFF, 0xADAD569F, 0x0B0B8477, + 0xC8C81DC3, 0x9999FFCC, 0x5858ED03, 0x19199A6F, 0x0E0E0A08, 0x95957EBF, + 0x70705040, 0xF7F730E7, 0x6E6ECF2B, 0x1F1F6EE2, 0xB5B53D79, 0x09090F0C, + 0x616134AA, 0x57571682, 0x9F9F0B41, 0x9D9D803A, 0x111164EA, 0x2525CDB9, + 0xAFAFDDE4, 0x4545089A, 0xDFDF8DA4, 0xA3A35C97, 0xEAEAD57E, 0x353558DA, + 0xEDEDD07A, 0x4343FC17, 0xF8F8CB66, 0xFBFBB194, 0x3737D3A1, 0xFAFA401D, + 0xC2C2683D, 0xB4B4CCF0, 0x32325DDE, 0x9C9C71B3, 0x5656E70B, 0xE3E3DA72, + 0x878760A7, 0x15151B1C, 0xF9F93AEF, 0x6363BFD1, 0x3434A953, 0x9A9A853E, + 0xB1B1428F, 0x7C7CD133, 0x88889B26, 0x3D3DA65F, 0xA1A1D7EC, 0xE4E4DF76, + 0x8181942A, 0x91910149, 0x0F0FFB81, 0xEEEEAA88, 0x161661EE, 0xD7D77321, + 0x9797F5C4, 0xA5A5A81A, 0xFEFE3FEB, 0x6D6DB5D9, 0x7878AEC5, 0xC5C56D39, + 0x1D1DE599, 0x7676A4CD, 0x3E3EDCAD, 0xCBCB6731, 0xB6B6478B, 0xEFEF5B01, + 0x12121E18, 0x6060C523, 0x6A6AB0DD, 0x4D4DF61F, 0xCECEE94E, 0xDEDE7C2D, + 0x55559DF9, 0x7E7E5A48, 0x2121B24F, 0x03037AF2, 0xA0A02665, 0x5E5E198E, + 0x5A5A6678, 0x65654B5C, 0x62624E58, 0xFDFD4519, 0x0606F48D, 0x404086E5, + 0xF2F2BE98, 0x3333AC57, 0x17179067, 0x05058E7F, 0xE8E85E05, 0x4F4F7D64, + 0x89896AAF, 0x10109563, 0x74742FB6, 0x0A0A75FE, 0x5C5C92F5, 0x9B9B74B7, + 0x2D2D333C, 0x3030D6A5, 0x2E2E49CE, 0x494989E9, 0x46467268, 0x77775544, + 0xA8A8D8E0, 0x9696044D, 0x2828BD43, 0xA9A92969, 0xD9D97929, 0x8686912E, + 0xD1D187AC, 0xF4F44A15, 0x8D8D1559, 0xD6D682A8, 0xB9B9BC0A, 0x42420D9E, + 0xF6F6C16E, 0x2F2FB847, 0xDDDD06DF, 0x23233934, 0xCCCC6235, 0xF1F1C46A, + 0xC1C112CF, 0x8585EBDC, 0x8F8F9E22, 0x7171A1C9, 0x9090F0C0, 0xAAAA539B, + 0x0101F189, 0x8B8BE1D4, 0x4E4E8CED, 0x8E8E6FAB, 0xABABA212, 0x6F6F3EA2, + 0xE6E6540D, 0xDBDBF252, 0x92927BBB, 0xB7B7B602, 0x6969CA2F, 0x3939D9A9, + 0xD3D30CD7, 0xA7A72361, 0xA2A2AD1E, 0xC3C399B4, 0x6C6C4450, 0x07070504, + 0x04047FF6, 0x272746C2, 0xACACA716, 0xD0D07625, 0x50501386, 0xDCDCF756, + 0x84841A55, 0xE1E15109, 0x7A7A25BE, 0x1313EF91}, + + { + 0xA9D93939, 0x67901717, 0xB3719C9C, 0xE8D2A6A6, 0x04050707, 0xFD985252, + 0xA3658080, 0x76DFE4E4, 0x9A084545, 0x92024B4B, 0x80A0E0E0, 0x78665A5A, + 0xE4DDAFAF, 0xDDB06A6A, 0xD1BF6363, 0x38362A2A, 0x0D54E6E6, 0xC6432020, + 0x3562CCCC, 0x98BEF2F2, 0x181E1212, 0xF724EBEB, 0xECD7A1A1, 0x6C774141, + 0x43BD2828, 0x7532BCBC, 0x37D47B7B, 0x269B8888, 0xFA700D0D, 0x13F94444, + 0x94B1FBFB, 0x485A7E7E, 0xF27A0303, 0xD0E48C8C, 0x8B47B6B6, 0x303C2424, + 0x84A5E7E7, 0x54416B6B, 0xDF06DDDD, 0x23C56060, 0x1945FDFD, 0x5BA33A3A, + 0x3D68C2C2, 0x59158D8D, 0xF321ECEC, 0xAE316666, 0xA23E6F6F, 0x82165757, + 0x63951010, 0x015BEFEF, 0x834DB8B8, 0x2E918686, 0xD9B56D6D, 0x511F8383, + 0x9B53AAAA, 0x7C635D5D, 0xA63B6868, 0xEB3FFEFE, 0xA5D63030, 0xBE257A7A, + 0x16A7ACAC, 0x0C0F0909, 0xE335F0F0, 0x6123A7A7, 0xC0F09090, 0x8CAFE9E9, + 0x3A809D9D, 0xF5925C5C, 0x73810C0C, 0x2C273131, 0x2576D0D0, 0x0BE75656, + 0xBB7B9292, 0x4EE9CECE, 0x89F10101, 0x6B9F1E1E, 0x53A93434, 0x6AC4F1F1, + 0xB499C3C3, 0xF1975B5B, 0xE1834747, 0xE66B1818, 0xBDC82222, 0x450E9898, + 0xE26E1F1F, 0xF4C9B3B3, 0xB62F7474, 0x66CBF8F8, 0xCCFF9999, 0x95EA1414, + 0x03ED5858, 0x56F7DCDC, 0xD4E18B8B, 0x1C1B1515, 0x1EADA2A2, 0xD70CD3D3, + 0xFB2BE2E2, 0xC31DC8C8, 0x8E195E5E, 0xB5C22C2C, 0xE9894949, 0xCF12C1C1, + 0xBF7E9595, 0xBA207D7D, 0xEA641111, 0x77840B0B, 0x396DC5C5, 0xAF6A8989, + 0x33D17C7C, 0xC9A17171, 0x62CEFFFF, 0x7137BBBB, 0x81FB0F0F, 0x793DB5B5, + 0x0951E1E1, 0xADDC3E3E, 0x242D3F3F, 0xCDA47676, 0xF99D5555, 0xD8EE8282, + 0xE5864040, 0xC5AE7878, 0xB9CD2525, 0x4D049696, 0x44557777, 0x080A0E0E, + 0x86135050, 0xE730F7F7, 0xA1D33737, 0x1D40FAFA, 0xAA346161, 0xED8C4E4E, + 0x06B3B0B0, 0x706C5454, 0xB22A7373, 0xD2523B3B, 0x410B9F9F, 0x7B8B0202, + 0xA088D8D8, 0x114FF3F3, 0x3167CBCB, 0xC2462727, 0x27C06767, 0x90B4FCFC, + 0x20283838, 0xF67F0404, 0x60784848, 0xFF2EE5E5, 0x96074C4C, 0x5C4B6565, + 0xB1C72B2B, 0xAB6F8E8E, 0x9E0D4242, 0x9CBBF5F5, 0x52F2DBDB, 0x1BF34A4A, + 0x5FA63D3D, 0x9359A4A4, 0x0ABCB9B9, 0xEF3AF9F9, 0x91EF1313, 0x85FE0808, + 0x49019191, 0xEE611616, 0x2D7CDEDE, 0x4FB22121, 0x8F42B1B1, 0x3BDB7272, + 0x47B82F2F, 0x8748BFBF, 0x6D2CAEAE, 0x46E3C0C0, 0xD6573C3C, 0x3E859A9A, + 0x6929A9A9, 0x647D4F4F, 0x2A948181, 0xCE492E2E, 0xCB17C6C6, 0x2FCA6969, + 0xFCC3BDBD, 0x975CA3A3, 0x055EE8E8, 0x7AD0EDED, 0xAC87D1D1, 0x7F8E0505, + 0xD5BA6464, 0x1AA8A5A5, 0x4BB72626, 0x0EB9BEBE, 0xA7608787, 0x5AF8D5D5, + 0x28223636, 0x14111B1B, 0x3FDE7575, 0x2979D9D9, 0x88AAEEEE, 0x3C332D2D, + 0x4C5F7979, 0x02B6B7B7, 0xB896CACA, 0xDA583535, 0xB09CC4C4, 0x17FC4343, + 0x551A8484, 0x1FF64D4D, 0x8A1C5959, 0x7D38B2B2, 0x57AC3333, 0xC718CFCF, + 0x8DF40606, 0x74695353, 0xB7749B9B, 0xC4F59797, 0x9F56ADAD, 0x72DAE3E3, + 0x7ED5EAEA, 0x154AF4F4, 0x229E8F8F, 0x12A2ABAB, 0x584E6262, 0x07E85F5F, + 0x99E51D1D, 0x34392323, 0x6EC1F6F6, 0x50446C6C, 0xDE5D3232, 0x68724646, + 0x6526A0A0, 0xBC93CDCD, 0xDB03DADA, 0xF8C6BABA, 0xC8FA9E9E, 0xA882D6D6, + 0x2BCF6E6E, 0x40507070, 0xDCEB8585, 0xFE750A0A, 0x328A9393, 0xA48DDFDF, + 0xCA4C2929, 0x10141C1C, 0x2173D7D7, 0xF0CCB4B4, 0xD309D4D4, 0x5D108A8A, + 0x0FE25151, 0x00000000, 0x6F9A1919, 0x9DE01A1A, 0x368F9494, 0x42E6C7C7, + 0x4AECC9C9, 0x5EFDD2D2, 0xC1AB7F7F, 0xE0D8A8A8}, + + { + 0xBC75BC32, 0xECF3EC21, 0x20C62043, 0xB3F4B3C9, 0xDADBDA03, 0x027B028B, + 0xE2FBE22B, 0x9EC89EFA, 0xC94AC9EC, 0xD4D3D409, 0x18E6186B, 0x1E6B1E9F, + 0x9845980E, 0xB27DB238, 0xA6E8A6D2, 0x264B26B7, 0x3CD63C57, 0x9332938A, + 0x82D882EE, 0x52FD5298, 0x7B377BD4, 0xBB71BB37, 0x5BF15B97, 0x47E14783, + 0x2430243C, 0x510F51E2, 0xBAF8BAC6, 0x4A1B4AF3, 0xBF87BF48, 0x0DFA0D70, + 0xB006B0B3, 0x753F75DE, 0xD25ED2FD, 0x7DBA7D20, 0x66AE6631, 0x3A5B3AA3, + 0x598A591C, 0x00000000, 0xCDBCCD93, 0x1A9D1AE0, 0xAE6DAE2C, 0x7FC17FAB, + 0x2BB12BC7, 0xBE0EBEB9, 0xE080E0A0, 0x8A5D8A10, 0x3BD23B52, 0x64D564BA, + 0xD8A0D888, 0xE784E7A5, 0x5F075FE8, 0x1B141B11, 0x2CB52CC2, 0xFC90FCB4, + 0x312C3127, 0x80A38065, 0x73B2732A, 0x0C730C81, 0x794C795F, 0x6B546B41, + 0x4B924B02, 0x53745369, 0x9436948F, 0x8351831F, 0x2A382A36, 0xC4B0C49C, + 0x22BD22C8, 0xD55AD5F8, 0xBDFCBDC3, 0x48604878, 0xFF62FFCE, 0x4C964C07, + 0x416C4177, 0xC742C7E6, 0xEBF7EB24, 0x1C101C14, 0x5D7C5D63, 0x36283622, + 0x672767C0, 0xE98CE9AF, 0x441344F9, 0x149514EA, 0xF59CF5BB, 0xCFC7CF18, + 0x3F243F2D, 0xC046C0E3, 0x723B72DB, 0x5470546C, 0x29CA294C, 0xF0E3F035, + 0x088508FE, 0xC6CBC617, 0xF311F34F, 0x8CD08CE4, 0xA493A459, 0xCAB8CA96, + 0x68A6683B, 0xB883B84D, 0x38203828, 0xE5FFE52E, 0xAD9FAD56, 0x0B770B84, + 0xC8C3C81D, 0x99CC99FF, 0x580358ED, 0x196F199A, 0x0E080E0A, 0x95BF957E, + 0x70407050, 0xF7E7F730, 0x6E2B6ECF, 0x1FE21F6E, 0xB579B53D, 0x090C090F, + 0x61AA6134, 0x57825716, 0x9F419F0B, 0x9D3A9D80, 0x11EA1164, 0x25B925CD, + 0xAFE4AFDD, 0x459A4508, 0xDFA4DF8D, 0xA397A35C, 0xEA7EEAD5, 0x35DA3558, + 0xED7AEDD0, 0x431743FC, 0xF866F8CB, 0xFB94FBB1, 0x37A137D3, 0xFA1DFA40, + 0xC23DC268, 0xB4F0B4CC, 0x32DE325D, 0x9CB39C71, 0x560B56E7, 0xE372E3DA, + 0x87A78760, 0x151C151B, 0xF9EFF93A, 0x63D163BF, 0x345334A9, 0x9A3E9A85, + 0xB18FB142, 0x7C337CD1, 0x8826889B, 0x3D5F3DA6, 0xA1ECA1D7, 0xE476E4DF, + 0x812A8194, 0x91499101, 0x0F810FFB, 0xEE88EEAA, 0x16EE1661, 0xD721D773, + 0x97C497F5, 0xA51AA5A8, 0xFEEBFE3F, 0x6DD96DB5, 0x78C578AE, 0xC539C56D, + 0x1D991DE5, 0x76CD76A4, 0x3EAD3EDC, 0xCB31CB67, 0xB68BB647, 0xEF01EF5B, + 0x1218121E, 0x602360C5, 0x6ADD6AB0, 0x4D1F4DF6, 0xCE4ECEE9, 0xDE2DDE7C, + 0x55F9559D, 0x7E487E5A, 0x214F21B2, 0x03F2037A, 0xA065A026, 0x5E8E5E19, + 0x5A785A66, 0x655C654B, 0x6258624E, 0xFD19FD45, 0x068D06F4, 0x40E54086, + 0xF298F2BE, 0x335733AC, 0x17671790, 0x057F058E, 0xE805E85E, 0x4F644F7D, + 0x89AF896A, 0x10631095, 0x74B6742F, 0x0AFE0A75, 0x5CF55C92, 0x9BB79B74, + 0x2D3C2D33, 0x30A530D6, 0x2ECE2E49, 0x49E94989, 0x46684672, 0x77447755, + 0xA8E0A8D8, 0x964D9604, 0x284328BD, 0xA969A929, 0xD929D979, 0x862E8691, + 0xD1ACD187, 0xF415F44A, 0x8D598D15, 0xD6A8D682, 0xB90AB9BC, 0x429E420D, + 0xF66EF6C1, 0x2F472FB8, 0xDDDFDD06, 0x23342339, 0xCC35CC62, 0xF16AF1C4, + 0xC1CFC112, 0x85DC85EB, 0x8F228F9E, 0x71C971A1, 0x90C090F0, 0xAA9BAA53, + 0x018901F1, 0x8BD48BE1, 0x4EED4E8C, 0x8EAB8E6F, 0xAB12ABA2, 0x6FA26F3E, + 0xE60DE654, 0xDB52DBF2, 0x92BB927B, 0xB702B7B6, 0x692F69CA, 0x39A939D9, + 0xD3D7D30C, 0xA761A723, 0xA21EA2AD, 0xC3B4C399, 0x6C506C44, 0x07040705, + 0x04F6047F, 0x27C22746, 0xAC16ACA7, 0xD025D076, 0x50865013, 0xDC56DCF7, + 0x8455841A, 0xE109E151, 0x7ABE7A25, 0x139113EF}, + + { + 0xD939A9D9, 0x90176790, 0x719CB371, 0xD2A6E8D2, 0x05070405, 0x9852FD98, + 0x6580A365, 0xDFE476DF, 0x08459A08, 0x024B9202, 0xA0E080A0, 0x665A7866, + 0xDDAFE4DD, 0xB06ADDB0, 0xBF63D1BF, 0x362A3836, 0x54E60D54, 0x4320C643, + 0x62CC3562, 0xBEF298BE, 0x1E12181E, 0x24EBF724, 0xD7A1ECD7, 0x77416C77, + 0xBD2843BD, 0x32BC7532, 0xD47B37D4, 0x9B88269B, 0x700DFA70, 0xF94413F9, + 0xB1FB94B1, 0x5A7E485A, 0x7A03F27A, 0xE48CD0E4, 0x47B68B47, 0x3C24303C, + 0xA5E784A5, 0x416B5441, 0x06DDDF06, 0xC56023C5, 0x45FD1945, 0xA33A5BA3, + 0x68C23D68, 0x158D5915, 0x21ECF321, 0x3166AE31, 0x3E6FA23E, 0x16578216, + 0x95106395, 0x5BEF015B, 0x4DB8834D, 0x91862E91, 0xB56DD9B5, 0x1F83511F, + 0x53AA9B53, 0x635D7C63, 0x3B68A63B, 0x3FFEEB3F, 0xD630A5D6, 0x257ABE25, + 0xA7AC16A7, 0x0F090C0F, 0x35F0E335, 0x23A76123, 0xF090C0F0, 0xAFE98CAF, + 0x809D3A80, 0x925CF592, 0x810C7381, 0x27312C27, 0x76D02576, 0xE7560BE7, + 0x7B92BB7B, 0xE9CE4EE9, 0xF10189F1, 0x9F1E6B9F, 0xA93453A9, 0xC4F16AC4, + 0x99C3B499, 0x975BF197, 0x8347E183, 0x6B18E66B, 0xC822BDC8, 0x0E98450E, + 0x6E1FE26E, 0xC9B3F4C9, 0x2F74B62F, 0xCBF866CB, 0xFF99CCFF, 0xEA1495EA, + 0xED5803ED, 0xF7DC56F7, 0xE18BD4E1, 0x1B151C1B, 0xADA21EAD, 0x0CD3D70C, + 0x2BE2FB2B, 0x1DC8C31D, 0x195E8E19, 0xC22CB5C2, 0x8949E989, 0x12C1CF12, + 0x7E95BF7E, 0x207DBA20, 0x6411EA64, 0x840B7784, 0x6DC5396D, 0x6A89AF6A, + 0xD17C33D1, 0xA171C9A1, 0xCEFF62CE, 0x37BB7137, 0xFB0F81FB, 0x3DB5793D, + 0x51E10951, 0xDC3EADDC, 0x2D3F242D, 0xA476CDA4, 0x9D55F99D, 0xEE82D8EE, + 0x8640E586, 0xAE78C5AE, 0xCD25B9CD, 0x04964D04, 0x55774455, 0x0A0E080A, + 0x13508613, 0x30F7E730, 0xD337A1D3, 0x40FA1D40, 0x3461AA34, 0x8C4EED8C, + 0xB3B006B3, 0x6C54706C, 0x2A73B22A, 0x523BD252, 0x0B9F410B, 0x8B027B8B, + 0x88D8A088, 0x4FF3114F, 0x67CB3167, 0x4627C246, 0xC06727C0, 0xB4FC90B4, + 0x28382028, 0x7F04F67F, 0x78486078, 0x2EE5FF2E, 0x074C9607, 0x4B655C4B, + 0xC72BB1C7, 0x6F8EAB6F, 0x0D429E0D, 0xBBF59CBB, 0xF2DB52F2, 0xF34A1BF3, + 0xA63D5FA6, 0x59A49359, 0xBCB90ABC, 0x3AF9EF3A, 0xEF1391EF, 0xFE0885FE, + 0x01914901, 0x6116EE61, 0x7CDE2D7C, 0xB2214FB2, 0x42B18F42, 0xDB723BDB, + 0xB82F47B8, 0x48BF8748, 0x2CAE6D2C, 0xE3C046E3, 0x573CD657, 0x859A3E85, + 0x29A96929, 0x7D4F647D, 0x94812A94, 0x492ECE49, 0x17C6CB17, 0xCA692FCA, + 0xC3BDFCC3, 0x5CA3975C, 0x5EE8055E, 0xD0ED7AD0, 0x87D1AC87, 0x8E057F8E, + 0xBA64D5BA, 0xA8A51AA8, 0xB7264BB7, 0xB9BE0EB9, 0x6087A760, 0xF8D55AF8, + 0x22362822, 0x111B1411, 0xDE753FDE, 0x79D92979, 0xAAEE88AA, 0x332D3C33, + 0x5F794C5F, 0xB6B702B6, 0x96CAB896, 0x5835DA58, 0x9CC4B09C, 0xFC4317FC, + 0x1A84551A, 0xF64D1FF6, 0x1C598A1C, 0x38B27D38, 0xAC3357AC, 0x18CFC718, + 0xF4068DF4, 0x69537469, 0x749BB774, 0xF597C4F5, 0x56AD9F56, 0xDAE372DA, + 0xD5EA7ED5, 0x4AF4154A, 0x9E8F229E, 0xA2AB12A2, 0x4E62584E, 0xE85F07E8, + 0xE51D99E5, 0x39233439, 0xC1F66EC1, 0x446C5044, 0x5D32DE5D, 0x72466872, + 0x26A06526, 0x93CDBC93, 0x03DADB03, 0xC6BAF8C6, 0xFA9EC8FA, 0x82D6A882, + 0xCF6E2BCF, 0x50704050, 0xEB85DCEB, 0x750AFE75, 0x8A93328A, 0x8DDFA48D, + 0x4C29CA4C, 0x141C1014, 0x73D72173, 0xCCB4F0CC, 0x09D4D309, 0x108A5D10, + 0xE2510FE2, 0x00000000, 0x9A196F9A, 0xE01A9DE0, 0x8F94368F, 0xE6C742E6, + 0xECC94AEC, 0xFDD25EFD, 0xAB7FC1AB, 0xD8A8E0D8} +}; + +/* The exp_to_poly and poly_to_exp tables are used to perform efficient + * operations in GF(2^8) represented as GF(2)[x]/w(x) where + * w(x)=x^8+x^6+x^3+x^2+1. We care about doing that because it's part of the + * definition of the RS matrix in the key schedule. Elements of that field + * are polynomials of degree not greater than 7 and all coefficients 0 or 1, + * which can be represented naturally by bytes (just substitute x=2). In that + * form, GF(2^8) addition is the same as bitwise XOR, but GF(2^8) + * multiplication is inefficient without hardware support. To multiply + * faster, I make use of the fact x is a generator for the nonzero elements, + * so that every element p of GF(2)[x]/w(x) is either 0 or equal to (x)^n for + * some n in 0..254. Note that that caret is exponentiation in GF(2^8), + * *not* polynomial notation. So if I want to compute pq where p and q are + * in GF(2^8), I can just say: + * 1. if p=0 or q=0 then pq=0 + * 2. otherwise, find m and n such that p=x^m and q=x^n + * 3. pq=(x^m)(x^n)=x^(m+n), so add m and n and find pq + * The translations in steps 2 and 3 are looked up in the tables + * poly_to_exp (for step 2) and exp_to_poly (for step 3). To see this + * in action, look at the CALC_S macro. As additional wrinkles, note that + * one of my operands is always a constant, so the poly_to_exp lookup on it + * is done in advance; I included the original values in the comments so + * readers can have some chance of recognizing that this *is* the RS matrix + * from the Twofish paper. I've only included the table entries I actually + * need; I never do a lookup on a variable input of zero and the biggest + * exponents I'll ever see are 254 (variable) and 237 (constant), so they'll + * never sum to more than 491. I'm repeating part of the exp_to_poly table + * so that I don't have to do mod-255 reduction in the exponent arithmetic. + * Since I know my constant operands are never zero, I only have to worry + * about zero values in the variable operand, and I do it with a simple + * conditional branch. I know conditionals are expensive, but I couldn't + * see a non-horrible way of avoiding them, and I did manage to group the + * statements so that each if covers four group multiplications. */ + +static const unsigned char poly_to_exp[255] = { + 0x00, 0x01, 0x17, 0x02, 0x2E, 0x18, 0x53, 0x03, 0x6A, 0x2F, 0x93, 0x19, + 0x34, 0x54, 0x45, 0x04, 0x5C, 0x6B, 0xB6, 0x30, 0xA6, 0x94, 0x4B, 0x1A, + 0x8C, 0x35, 0x81, 0x55, 0xAA, 0x46, 0x0D, 0x05, 0x24, 0x5D, 0x87, 0x6C, + 0x9B, 0xB7, 0xC1, 0x31, 0x2B, 0xA7, 0xA3, 0x95, 0x98, 0x4C, 0xCA, 0x1B, + 0xE6, 0x8D, 0x73, 0x36, 0xCD, 0x82, 0x12, 0x56, 0x62, 0xAB, 0xF0, 0x47, + 0x4F, 0x0E, 0xBD, 0x06, 0xD4, 0x25, 0xD2, 0x5E, 0x27, 0x88, 0x66, 0x6D, + 0xD6, 0x9C, 0x79, 0xB8, 0x08, 0xC2, 0xDF, 0x32, 0x68, 0x2C, 0xFD, 0xA8, + 0x8A, 0xA4, 0x5A, 0x96, 0x29, 0x99, 0x22, 0x4D, 0x60, 0xCB, 0xE4, 0x1C, + 0x7B, 0xE7, 0x3B, 0x8E, 0x9E, 0x74, 0xF4, 0x37, 0xD8, 0xCE, 0xF9, 0x83, + 0x6F, 0x13, 0xB2, 0x57, 0xE1, 0x63, 0xDC, 0xAC, 0xC4, 0xF1, 0xAF, 0x48, + 0x0A, 0x50, 0x42, 0x0F, 0xBA, 0xBE, 0xC7, 0x07, 0xDE, 0xD5, 0x78, 0x26, + 0x65, 0xD3, 0xD1, 0x5F, 0xE3, 0x28, 0x21, 0x89, 0x59, 0x67, 0xFC, 0x6E, + 0xB1, 0xD7, 0xF8, 0x9D, 0xF3, 0x7A, 0x3A, 0xB9, 0xC6, 0x09, 0x41, 0xC3, + 0xAE, 0xE0, 0xDB, 0x33, 0x44, 0x69, 0x92, 0x2D, 0x52, 0xFE, 0x16, 0xA9, + 0x0C, 0x8B, 0x80, 0xA5, 0x4A, 0x5B, 0xB5, 0x97, 0xC9, 0x2A, 0xA2, 0x9A, + 0xC0, 0x23, 0x86, 0x4E, 0xBC, 0x61, 0xEF, 0xCC, 0x11, 0xE5, 0x72, 0x1D, + 0x3D, 0x7C, 0xEB, 0xE8, 0xE9, 0x3C, 0xEA, 0x8F, 0x7D, 0x9F, 0xEC, 0x75, + 0x1E, 0xF5, 0x3E, 0x38, 0xF6, 0xD9, 0x3F, 0xCF, 0x76, 0xFA, 0x1F, 0x84, + 0xA0, 0x70, 0xED, 0x14, 0x90, 0xB3, 0x7E, 0x58, 0xFB, 0xE2, 0x20, 0x64, + 0xD0, 0xDD, 0x77, 0xAD, 0xDA, 0xC5, 0x40, 0xF2, 0x39, 0xB0, 0xF7, 0x49, + 0xB4, 0x0B, 0x7F, 0x51, 0x15, 0x43, 0x91, 0x10, 0x71, 0xBB, 0xEE, 0xBF, + 0x85, 0xC8, 0xA1 +}; + +static const unsigned char exp_to_poly[492] = { + 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x4D, 0x9A, 0x79, 0xF2, + 0xA9, 0x1F, 0x3E, 0x7C, 0xF8, 0xBD, 0x37, 0x6E, 0xDC, 0xF5, 0xA7, 0x03, + 0x06, 0x0C, 0x18, 0x30, 0x60, 0xC0, 0xCD, 0xD7, 0xE3, 0x8B, 0x5B, 0xB6, + 0x21, 0x42, 0x84, 0x45, 0x8A, 0x59, 0xB2, 0x29, 0x52, 0xA4, 0x05, 0x0A, + 0x14, 0x28, 0x50, 0xA0, 0x0D, 0x1A, 0x34, 0x68, 0xD0, 0xED, 0x97, 0x63, + 0xC6, 0xC1, 0xCF, 0xD3, 0xEB, 0x9B, 0x7B, 0xF6, 0xA1, 0x0F, 0x1E, 0x3C, + 0x78, 0xF0, 0xAD, 0x17, 0x2E, 0x5C, 0xB8, 0x3D, 0x7A, 0xF4, 0xA5, 0x07, + 0x0E, 0x1C, 0x38, 0x70, 0xE0, 0x8D, 0x57, 0xAE, 0x11, 0x22, 0x44, 0x88, + 0x5D, 0xBA, 0x39, 0x72, 0xE4, 0x85, 0x47, 0x8E, 0x51, 0xA2, 0x09, 0x12, + 0x24, 0x48, 0x90, 0x6D, 0xDA, 0xF9, 0xBF, 0x33, 0x66, 0xCC, 0xD5, 0xE7, + 0x83, 0x4B, 0x96, 0x61, 0xC2, 0xC9, 0xDF, 0xF3, 0xAB, 0x1B, 0x36, 0x6C, + 0xD8, 0xFD, 0xB7, 0x23, 0x46, 0x8C, 0x55, 0xAA, 0x19, 0x32, 0x64, 0xC8, + 0xDD, 0xF7, 0xA3, 0x0B, 0x16, 0x2C, 0x58, 0xB0, 0x2D, 0x5A, 0xB4, 0x25, + 0x4A, 0x94, 0x65, 0xCA, 0xD9, 0xFF, 0xB3, 0x2B, 0x56, 0xAC, 0x15, 0x2A, + 0x54, 0xA8, 0x1D, 0x3A, 0x74, 0xE8, 0x9D, 0x77, 0xEE, 0x91, 0x6F, 0xDE, + 0xF1, 0xAF, 0x13, 0x26, 0x4C, 0x98, 0x7D, 0xFA, 0xB9, 0x3F, 0x7E, 0xFC, + 0xB5, 0x27, 0x4E, 0x9C, 0x75, 0xEA, 0x99, 0x7F, 0xFE, 0xB1, 0x2F, 0x5E, + 0xBC, 0x35, 0x6A, 0xD4, 0xE5, 0x87, 0x43, 0x86, 0x41, 0x82, 0x49, 0x92, + 0x69, 0xD2, 0xE9, 0x9F, 0x73, 0xE6, 0x81, 0x4F, 0x9E, 0x71, 0xE2, 0x89, + 0x5F, 0xBE, 0x31, 0x62, 0xC4, 0xC5, 0xC7, 0xC3, 0xCB, 0xDB, 0xFB, 0xBB, + 0x3B, 0x76, 0xEC, 0x95, 0x67, 0xCE, 0xD1, 0xEF, 0x93, 0x6B, 0xD6, 0xE1, + 0x8F, 0x53, 0xA6, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x4D, + 0x9A, 0x79, 0xF2, 0xA9, 0x1F, 0x3E, 0x7C, 0xF8, 0xBD, 0x37, 0x6E, 0xDC, + 0xF5, 0xA7, 0x03, 0x06, 0x0C, 0x18, 0x30, 0x60, 0xC0, 0xCD, 0xD7, 0xE3, + 0x8B, 0x5B, 0xB6, 0x21, 0x42, 0x84, 0x45, 0x8A, 0x59, 0xB2, 0x29, 0x52, + 0xA4, 0x05, 0x0A, 0x14, 0x28, 0x50, 0xA0, 0x0D, 0x1A, 0x34, 0x68, 0xD0, + 0xED, 0x97, 0x63, 0xC6, 0xC1, 0xCF, 0xD3, 0xEB, 0x9B, 0x7B, 0xF6, 0xA1, + 0x0F, 0x1E, 0x3C, 0x78, 0xF0, 0xAD, 0x17, 0x2E, 0x5C, 0xB8, 0x3D, 0x7A, + 0xF4, 0xA5, 0x07, 0x0E, 0x1C, 0x38, 0x70, 0xE0, 0x8D, 0x57, 0xAE, 0x11, + 0x22, 0x44, 0x88, 0x5D, 0xBA, 0x39, 0x72, 0xE4, 0x85, 0x47, 0x8E, 0x51, + 0xA2, 0x09, 0x12, 0x24, 0x48, 0x90, 0x6D, 0xDA, 0xF9, 0xBF, 0x33, 0x66, + 0xCC, 0xD5, 0xE7, 0x83, 0x4B, 0x96, 0x61, 0xC2, 0xC9, 0xDF, 0xF3, 0xAB, + 0x1B, 0x36, 0x6C, 0xD8, 0xFD, 0xB7, 0x23, 0x46, 0x8C, 0x55, 0xAA, 0x19, + 0x32, 0x64, 0xC8, 0xDD, 0xF7, 0xA3, 0x0B, 0x16, 0x2C, 0x58, 0xB0, 0x2D, + 0x5A, 0xB4, 0x25, 0x4A, 0x94, 0x65, 0xCA, 0xD9, 0xFF, 0xB3, 0x2B, 0x56, + 0xAC, 0x15, 0x2A, 0x54, 0xA8, 0x1D, 0x3A, 0x74, 0xE8, 0x9D, 0x77, 0xEE, + 0x91, 0x6F, 0xDE, 0xF1, 0xAF, 0x13, 0x26, 0x4C, 0x98, 0x7D, 0xFA, 0xB9, + 0x3F, 0x7E, 0xFC, 0xB5, 0x27, 0x4E, 0x9C, 0x75, 0xEA, 0x99, 0x7F, 0xFE, + 0xB1, 0x2F, 0x5E, 0xBC, 0x35, 0x6A, 0xD4, 0xE5, 0x87, 0x43, 0x86, 0x41, + 0x82, 0x49, 0x92, 0x69, 0xD2, 0xE9, 0x9F, 0x73, 0xE6, 0x81, 0x4F, 0x9E, + 0x71, 0xE2, 0x89, 0x5F, 0xBE, 0x31, 0x62, 0xC4, 0xC5, 0xC7, 0xC3, 0xCB +}; + + +/* The table constants are indices of + * S-box entries, preprocessed through q0 and q1. */ +static const unsigned char calc_sb_tbl[512] = { + 0xA9, 0x75, 0x67, 0xF3, 0xB3, 0xC6, 0xE8, 0xF4, + 0x04, 0xDB, 0xFD, 0x7B, 0xA3, 0xFB, 0x76, 0xC8, + 0x9A, 0x4A, 0x92, 0xD3, 0x80, 0xE6, 0x78, 0x6B, + 0xE4, 0x45, 0xDD, 0x7D, 0xD1, 0xE8, 0x38, 0x4B, + 0x0D, 0xD6, 0xC6, 0x32, 0x35, 0xD8, 0x98, 0xFD, + 0x18, 0x37, 0xF7, 0x71, 0xEC, 0xF1, 0x6C, 0xE1, + 0x43, 0x30, 0x75, 0x0F, 0x37, 0xF8, 0x26, 0x1B, + 0xFA, 0x87, 0x13, 0xFA, 0x94, 0x06, 0x48, 0x3F, + 0xF2, 0x5E, 0xD0, 0xBA, 0x8B, 0xAE, 0x30, 0x5B, + 0x84, 0x8A, 0x54, 0x00, 0xDF, 0xBC, 0x23, 0x9D, + 0x19, 0x6D, 0x5B, 0xC1, 0x3D, 0xB1, 0x59, 0x0E, + 0xF3, 0x80, 0xAE, 0x5D, 0xA2, 0xD2, 0x82, 0xD5, + 0x63, 0xA0, 0x01, 0x84, 0x83, 0x07, 0x2E, 0x14, + 0xD9, 0xB5, 0x51, 0x90, 0x9B, 0x2C, 0x7C, 0xA3, + 0xA6, 0xB2, 0xEB, 0x73, 0xA5, 0x4C, 0xBE, 0x54, + 0x16, 0x92, 0x0C, 0x74, 0xE3, 0x36, 0x61, 0x51, + 0xC0, 0x38, 0x8C, 0xB0, 0x3A, 0xBD, 0xF5, 0x5A, + 0x73, 0xFC, 0x2C, 0x60, 0x25, 0x62, 0x0B, 0x96, + 0xBB, 0x6C, 0x4E, 0x42, 0x89, 0xF7, 0x6B, 0x10, + 0x53, 0x7C, 0x6A, 0x28, 0xB4, 0x27, 0xF1, 0x8C, + 0xE1, 0x13, 0xE6, 0x95, 0xBD, 0x9C, 0x45, 0xC7, + 0xE2, 0x24, 0xF4, 0x46, 0xB6, 0x3B, 0x66, 0x70, + 0xCC, 0xCA, 0x95, 0xE3, 0x03, 0x85, 0x56, 0xCB, + 0xD4, 0x11, 0x1C, 0xD0, 0x1E, 0x93, 0xD7, 0xB8, + 0xFB, 0xA6, 0xC3, 0x83, 0x8E, 0x20, 0xB5, 0xFF, + 0xE9, 0x9F, 0xCF, 0x77, 0xBF, 0xC3, 0xBA, 0xCC, + 0xEA, 0x03, 0x77, 0x6F, 0x39, 0x08, 0xAF, 0xBF, + 0x33, 0x40, 0xC9, 0xE7, 0x62, 0x2B, 0x71, 0xE2, + 0x81, 0x79, 0x79, 0x0C, 0x09, 0xAA, 0xAD, 0x82, + 0x24, 0x41, 0xCD, 0x3A, 0xF9, 0xEA, 0xD8, 0xB9, + 0xE5, 0xE4, 0xC5, 0x9A, 0xB9, 0xA4, 0x4D, 0x97, + 0x44, 0x7E, 0x08, 0xDA, 0x86, 0x7A, 0xE7, 0x17, + 0xA1, 0x66, 0x1D, 0x94, 0xAA, 0xA1, 0xED, 0x1D, + 0x06, 0x3D, 0x70, 0xF0, 0xB2, 0xDE, 0xD2, 0xB3, + 0x41, 0x0B, 0x7B, 0x72, 0xA0, 0xA7, 0x11, 0x1C, + 0x31, 0xEF, 0xC2, 0xD1, 0x27, 0x53, 0x90, 0x3E, + 0x20, 0x8F, 0xF6, 0x33, 0x60, 0x26, 0xFF, 0x5F, + 0x96, 0xEC, 0x5C, 0x76, 0xB1, 0x2A, 0xAB, 0x49, + 0x9E, 0x81, 0x9C, 0x88, 0x52, 0xEE, 0x1B, 0x21, + 0x5F, 0xC4, 0x93, 0x1A, 0x0A, 0xEB, 0xEF, 0xD9, + 0x91, 0xC5, 0x85, 0x39, 0x49, 0x99, 0xEE, 0xCD, + 0x2D, 0xAD, 0x4F, 0x31, 0x8F, 0x8B, 0x3B, 0x01, + 0x47, 0x18, 0x87, 0x23, 0x6D, 0xDD, 0x46, 0x1F, + 0xD6, 0x4E, 0x3E, 0x2D, 0x69, 0xF9, 0x64, 0x48, + 0x2A, 0x4F, 0xCE, 0xF2, 0xCB, 0x65, 0x2F, 0x8E, + 0xFC, 0x78, 0x97, 0x5C, 0x05, 0x58, 0x7A, 0x19, + 0xAC, 0x8D, 0x7F, 0xE5, 0xD5, 0x98, 0x1A, 0x57, + 0x4B, 0x67, 0x0E, 0x7F, 0xA7, 0x05, 0x5A, 0x64, + 0x28, 0xAF, 0x14, 0x63, 0x3F, 0xB6, 0x29, 0xFE, + 0x88, 0xF5, 0x3C, 0xB7, 0x4C, 0x3C, 0x02, 0xA5, + 0xB8, 0xCE, 0xDA, 0xE9, 0xB0, 0x68, 0x17, 0x44, + 0x55, 0xE0, 0x1F, 0x4D, 0x8A, 0x43, 0x7D, 0x69, + 0x57, 0x29, 0xC7, 0x2E, 0x8D, 0xAC, 0x74, 0x15, + 0xB7, 0x59, 0xC4, 0xA8, 0x9F, 0x0A, 0x72, 0x9E, + 0x7E, 0x6E, 0x15, 0x47, 0x22, 0xDF, 0x12, 0x34, + 0x58, 0x35, 0x07, 0x6A, 0x99, 0xCF, 0x34, 0xDC, + 0x6E, 0x22, 0x50, 0xC9, 0xDE, 0xC0, 0x68, 0x9B, + 0x65, 0x89, 0xBC, 0xD4, 0xDB, 0xED, 0xF8, 0xAB, + 0xC8, 0x12, 0xA8, 0xA2, 0x2B, 0x0D, 0x40, 0x52, + 0xDC, 0xBB, 0xFE, 0x02, 0x32, 0x2F, 0xA4, 0xA9, + 0xCA, 0xD7, 0x10, 0x61, 0x21, 0x1E, 0xF0, 0xB4, + 0xD3, 0x50, 0x5D, 0x04, 0x0F, 0xF6, 0x00, 0xC2, + 0x6F, 0x16, 0x9D, 0x25, 0x36, 0x86, 0x42, 0x56, + 0x4A, 0x55, 0x5E, 0x09, 0xC1, 0xBE, 0xE0, 0x91 +}; + +/* Macro to perform one column of the RS matrix multiplication. The + * parameters a, b, c, and d are the four bytes of output; i is the index + * of the key bytes, and w, x, y, and z, are the column of constants from + * the RS matrix, preprocessed through the poly_to_exp table. */ + +#define CALC_S(a, b, c, d, i, w, x, y, z) \ + if (key[i]) { \ + tmp = poly_to_exp[key[i] - 1]; \ + (a) ^= exp_to_poly[tmp + (w)]; \ + (b) ^= exp_to_poly[tmp + (x)]; \ + (c) ^= exp_to_poly[tmp + (y)]; \ + (d) ^= exp_to_poly[tmp + (z)]; \ + } + +/* Macros to calculate the key-dependent S-boxes for a 128-bit key using + * the S vector from CALC_S. CALC_SB_2 computes a single entry in all + * four S-boxes, where i is the index of the entry to compute, and a and b + * are the index numbers preprocessed through the q0 and q1 tables + * respectively. */ + +#define CALC_SB_2(i, a, b) \ + ctx->s[0][i] = mds[0][q0[(a) ^ sa] ^ se]; \ + ctx->s[1][i] = mds[1][q0[(b) ^ sb] ^ sf]; \ + ctx->s[2][i] = mds[2][q1[(a) ^ sc] ^ sg]; \ + ctx->s[3][i] = mds[3][q1[(b) ^ sd] ^ sh] + +/* Macro exactly like CALC_SB_2, but for 192-bit keys. */ + +#define CALC_SB192_2(i, a, b) \ + ctx->s[0][i] = mds[0][q0[q0[(b) ^ sa] ^ se] ^ si]; \ + ctx->s[1][i] = mds[1][q0[q1[(b) ^ sb] ^ sf] ^ sj]; \ + ctx->s[2][i] = mds[2][q1[q0[(a) ^ sc] ^ sg] ^ sk]; \ + ctx->s[3][i] = mds[3][q1[q1[(a) ^ sd] ^ sh] ^ sl]; + +/* Macro exactly like CALC_SB_2, but for 256-bit keys. */ + +#define CALC_SB256_2(i, a, b) \ + skey->s[0][i] = mds[0][q0[q0[q1[(b) ^ sa] ^ se] ^ si] ^ sm]; \ + skey->s[1][i] = mds[1][q0[q1[q1[(a) ^ sb] ^ sf] ^ sj] ^ sn]; \ + skey->s[2][i] = mds[2][q1[q0[q0[(a) ^ sc] ^ sg] ^ sk] ^ so]; \ + skey->s[3][i] = mds[3][q1[q1[q0[(b) ^ sd] ^ sh] ^ sl] ^ sp]; + +/* Macros to calculate the whitening and round subkeys. CALC_K_2 computes the + * last two stages of the h() function for a given index (either 2i or 2i+1). + * a, b, c, and d are the four bytes going into the last two stages. For + * 128-bit keys, this is the entire h() function and a and c are the index + * preprocessed through q0 and q1 respectively; for longer keys they are the + * output of previous stages. j is the index of the first key byte to use. + * CALC_K computes a pair of subkeys for 128-bit Twofish, by calling CALC_K_2 + * twice, doing the Pseudo-Hadamard Transform, and doing the necessary + * rotations. Its parameters are: a, the array to write the results into, + * j, the index of the first output entry, k and l, the preprocessed indices + * for index 2i, and m and n, the preprocessed indices for index 2i+1. + * CALC_K192_2 expands CALC_K_2 to handle 192-bit keys, by doing an + * additional lookup-and-XOR stage. The parameters a, b, c and d are the + * four bytes going into the last three stages. For 192-bit keys, c = d + * are the index preprocessed through q0, and a = b are the index + * preprocessed through q1; j is the index of the first key byte to use. + * CALC_K192 is identical to CALC_K but for using the CALC_K192_2 macro + * instead of CALC_K_2. + * CALC_K256_2 expands CALC_K192_2 to handle 256-bit keys, by doing an + * additional lookup-and-XOR stage. The parameters a and b are the index + * preprocessed through q0 and q1 respectively; j is the index of the first + * key byte to use. CALC_K256 is identical to CALC_K but for using the + * CALC_K256_2 macro instead of CALC_K_2. */ + +#define CALC_K_2(a, b, c, d, j) \ + mds[0][q0[a ^ key[(j) + 8]] ^ key[j]] \ + ^ mds[1][q0[b ^ key[(j) + 9]] ^ key[(j) + 1]] \ + ^ mds[2][q1[c ^ key[(j) + 10]] ^ key[(j) + 2]] \ + ^ mds[3][q1[d ^ key[(j) + 11]] ^ key[(j) + 3]] + +#define CALC_K(a, j, k, l, m, n) \ + x = CALC_K_2 (k, l, k, l, 0); \ + y = CALC_K_2 (m, n, m, n, 4); \ + y = _rotl(y, 8); \ + x += y; y += x; skey->a[j] = x; \ + skey->a[(j) + 1] = _rotl(y, 9) + +#define CALC_K192_2(a, b, c, d, j) \ + CALC_K_2 (q0[a ^ key[(j) + 16]], \ + q1[b ^ key[(j) + 17]], \ + q0[c ^ key[(j) + 18]], \ + q1[d ^ key[(j) + 19]], j) + +#define CALC_K192(a, j, k, l, m, n) \ + x = CALC_K192_2 (l, l, k, k, 0); \ + y = CALC_K192_2 (n, n, m, m, 4); \ + y = _rotl(y, 8); \ + x += y; y += x; skey->a[j] = x; \ + skey->a[(j) + 1] = _rotl(y, 9) + +#define CALC_K256_2(a, b, j) \ + CALC_K192_2 (q1[b ^ key[(j) + 24]], \ + q1[a ^ key[(j) + 25]], \ + q0[a ^ key[(j) + 26]], \ + q0[b ^ key[(j) + 27]], j) + +#define CALC_K256(a, j, k, l, m, n) \ + x = CALC_K256_2 (k, l, 0); \ + y = CALC_K256_2 (m, n, 4); \ + y = _rotl(y, 8); \ + x += y; y += x; skey->a[j] = x; \ + skey->a[(j) + 1] = _rotl(y, 9) + + +/* Perform the key setup. */ +void _stdcall twofish256_set_key(const unsigned char *key, twofish256_key *skey) +{ + int i, j, k; + + /* Temporaries for CALC_K. */ + unsigned long x, y; + + /* The S vector used to key the S-boxes, split up into individual bytes. + * 128-bit keys use only sa through sh; 256-bit use all of them. */ + unsigned char sa = 0, sb = 0, sc = 0, sd = 0, se = 0, sf = 0, sg = 0, sh = 0; + unsigned char si = 0, sj = 0, sk = 0, sl = 0, sm = 0, sn = 0, so = 0, sp = 0; + + /* Temporary for CALC_S. */ + unsigned char tmp; + + /* Compute the first two words of the S vector. The magic numbers are + * the entries of the RS matrix, preprocessed through poly_to_exp. The + * numbers in the comments are the original (polynomial form) matrix + * entries. */ + CALC_S (sa, sb, sc, sd, 0, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */ + CALC_S (sa, sb, sc, sd, 1, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */ + CALC_S (sa, sb, sc, sd, 2, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */ + CALC_S (sa, sb, sc, sd, 3, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */ + CALC_S (sa, sb, sc, sd, 4, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */ + CALC_S (sa, sb, sc, sd, 5, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */ + CALC_S (sa, sb, sc, sd, 6, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */ + CALC_S (sa, sb, sc, sd, 7, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */ + CALC_S (se, sf, sg, sh, 8, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */ + CALC_S (se, sf, sg, sh, 9, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */ + CALC_S (se, sf, sg, sh, 10, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */ + CALC_S (se, sf, sg, sh, 11, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */ + CALC_S (se, sf, sg, sh, 12, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */ + CALC_S (se, sf, sg, sh, 13, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */ + CALC_S (se, sf, sg, sh, 14, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */ + CALC_S (se, sf, sg, sh, 15, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */ + + /* Calculate the third word of the S vector */ + CALC_S (si, sj, sk, sl, 16, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */ + CALC_S (si, sj, sk, sl, 17, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */ + CALC_S (si, sj, sk, sl, 18, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */ + CALC_S (si, sj, sk, sl, 19, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */ + CALC_S (si, sj, sk, sl, 20, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */ + CALC_S (si, sj, sk, sl, 21, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */ + CALC_S (si, sj, sk, sl, 22, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */ + CALC_S (si, sj, sk, sl, 23, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */ + + + /* Calculate the fourth word of the S vector */ + CALC_S (sm, sn, so, sp, 24, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */ + CALC_S (sm, sn, so, sp, 25, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */ + CALC_S (sm, sn, so, sp, 26, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */ + CALC_S (sm, sn, so, sp, 27, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */ + CALC_S (sm, sn, so, sp, 28, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */ + CALC_S (sm, sn, so, sp, 29, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */ + CALC_S (sm, sn, so, sp, 30, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */ + CALC_S (sm, sn, so, sp, 31, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */ + + /* Compute the S-boxes. */ + for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) { + CALC_SB256_2( i, calc_sb_tbl[j], calc_sb_tbl[k] ); + } + + /* Calculate whitening and round subkeys. The constants are + * indices of subkeys, preprocessed through q0 and q1. */ + CALC_K256 (w, 0, 0xA9, 0x75, 0x67, 0xF3); + CALC_K256 (w, 2, 0xB3, 0xC6, 0xE8, 0xF4); + CALC_K256 (w, 4, 0x04, 0xDB, 0xFD, 0x7B); + CALC_K256 (w, 6, 0xA3, 0xFB, 0x76, 0xC8); + CALC_K256 (k, 0, 0x9A, 0x4A, 0x92, 0xD3); + CALC_K256 (k, 2, 0x80, 0xE6, 0x78, 0x6B); + CALC_K256 (k, 4, 0xE4, 0x45, 0xDD, 0x7D); + CALC_K256 (k, 6, 0xD1, 0xE8, 0x38, 0x4B); + CALC_K256 (k, 8, 0x0D, 0xD6, 0xC6, 0x32); + CALC_K256 (k, 10, 0x35, 0xD8, 0x98, 0xFD); + CALC_K256 (k, 12, 0x18, 0x37, 0xF7, 0x71); + CALC_K256 (k, 14, 0xEC, 0xF1, 0x6C, 0xE1); + CALC_K256 (k, 16, 0x43, 0x30, 0x75, 0x0F); + CALC_K256 (k, 18, 0x37, 0xF8, 0x26, 0x1B); + CALC_K256 (k, 20, 0xFA, 0x87, 0x13, 0xFA); + CALC_K256 (k, 22, 0x94, 0x06, 0x48, 0x3F); + CALC_K256 (k, 24, 0xF2, 0x5E, 0xD0, 0xBA); + CALC_K256 (k, 26, 0x8B, 0xAE, 0x30, 0x5B); + CALC_K256 (k, 28, 0x84, 0x8A, 0x54, 0x00); + CALC_K256 (k, 30, 0xDF, 0xBC, 0x23, 0x9D); +} + +#if 0 +/* Macros to compute the g() function in the encryption and decryption + * rounds. G1 is the straight g() function; G2 includes the 8-bit + * rotation for the high 32-bit word. */ + +#define G1(a) \ + (key->s[0][(a) & 0xFF]) ^ (key->s[1][((a) >> 8) & 0xFF]) \ + ^ (key->s[2][((a) >> 16) & 0xFF]) ^ (key->s[3][(a) >> 24]) + +#define G2(b) \ + (key->s[1][(b) & 0xFF]) ^ (key->s[2][((b) >> 8) & 0xFF]) \ + ^ (key->s[3][((b) >> 16) & 0xFF]) ^ (key->s[0][(b) >> 24]) + +/* Encryption and decryption Feistel rounds. Each one calls the two g() + * macros, does the PHT, and performs the XOR and the appropriate bit + * rotations. The parameters are the round number (used to select subkeys), + * and the four 32-bit chunks of the text. */ + +#define ENCROUND(n, a, b, c, d) \ + x = G1 (a); y = G2 (b); \ + x += y; y += x + key->k[2 * (n) + 1]; \ + (c) ^= x + key->k[2 * (n)]; \ + (c) = _rotr((c), 1); \ + (d) = _rotl((d), 1) ^ y + +#define DECROUND(n, a, b, c, d) \ + x = G1 (a); y = G2 (b); \ + x += y; y += x; \ + (d) ^= y + key->k[2 * (n) + 1]; \ + (d) = _rotr((d), 1); \ + (c) = _rotl((c), 1); \ + (c) ^= (x + key->k[2 * (n)]) + +/* Encryption and decryption cycles; each one is simply two Feistel rounds + * with the 32-bit chunks re-ordered to simulate the "swap" */ + +#define ENCCYCLE(n) \ + ENCROUND (2 * (n), a, b, c, d); \ + ENCROUND (2 * (n) + 1, c, d, a, b) + +#define DECCYCLE(n) \ + DECROUND (2 * (n) + 1, c, d, a, b); \ + DECROUND (2 * (n), a, b, c, d) + +/* Macros to convert the input and output bytes into 32-bit words, + * and simultaneously perform the whitening step. INPACK packs word + * number n into the variable named by x, using whitening subkey number m. + * OUTUNPACK unpacks word number n from the variable named by x, using + * whitening subkey number m. */ + +#define INPACK(n, x, m) \ + x = ((unsigned long*)in)[n] ^ key->w[m] + +#define OUTUNPACK(n, x, m) \ + ((unsigned long*)out)[n] = x ^ key->w[m]; + +/* Encrypt one block. in and out may be the same. */ +void _stdcall twofish256_encrypt(const unsigned char *in, unsigned char *out, twofish256_key *key) +{ + /* The four 32-bit chunks of the text. */ + unsigned long a, b, c, d; + + /* Temporaries used by the round function. */ + unsigned long x, y; + + /* Input whitening and packing. */ + INPACK (0, a, 0); + INPACK (1, b, 1); + INPACK (2, c, 2); + INPACK (3, d, 3); + + /* Encryption Feistel cycles. */ + ENCCYCLE (0); + ENCCYCLE (1); + ENCCYCLE (2); + ENCCYCLE (3); + ENCCYCLE (4); + ENCCYCLE (5); + ENCCYCLE (6); + ENCCYCLE (7); + + /* Output whitening and unpacking. */ + OUTUNPACK (0, c, 4); + OUTUNPACK (1, d, 5); + OUTUNPACK (2, a, 6); + OUTUNPACK (3, b, 7); +} + +/* Decrypt one block. in and out may be the same. */ +void _stdcall twofish256_decrypt(const unsigned char *in, unsigned char *out, twofish256_key *key) +{ + /* The four 32-bit chunks of the text. */ + unsigned long a, b, c, d; + + /* Temporaries used by the round function. */ + unsigned long x, y; + + /* Input whitening and packing. */ + INPACK (0, c, 4); + INPACK (1, d, 5); + INPACK (2, a, 6); + INPACK (3, b, 7); + + /* Encryption Feistel cycles. */ + DECCYCLE (7); + DECCYCLE (6); + DECCYCLE (5); + DECCYCLE (4); + DECCYCLE (3); + DECCYCLE (2); + DECCYCLE (1); + DECCYCLE (0); + + /* Output whitening and unpacking. */ + OUTUNPACK (0, a, 0); + OUTUNPACK (1, b, 1); + OUTUNPACK (2, c, 2); + OUTUNPACK (3, d, 3); +} +#endif \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/twofish.h b/ImBoxEnclave/crypto_fast/twofish.h new file mode 100644 index 0000000..625ddf6 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/twofish.h @@ -0,0 +1,20 @@ +#ifndef _TWOFISH_H_ +#define _TWOFISH_H_ + +/* Structure for an expanded Twofish key. s contains the key-dependent + * S-boxes composed with the MDS matrix; w contains the eight "whitening" + * subkeys, K[0] through K[7]. k holds the remaining, "round" subkeys. Note + * that k[i] corresponds to what the Twofish paper calls K[i+8]. */ + +typedef struct _twofish256_key { + unsigned long s[4][256], w[8], k[32]; +} twofish256_key; + +#define TWOFISH_KEY_SIZE 32 +#define TWOFISH_BLOCK_SIZE 16 + +void _stdcall twofish256_set_key(const unsigned char *key, twofish256_key *skey); +void _stdcall twofish256_encrypt(const unsigned char *in, unsigned char *out, twofish256_key *key); +void _stdcall twofish256_decrypt(const unsigned char *in, unsigned char *out, twofish256_key *key); + +#endif diff --git a/ImBoxEnclave/crypto_fast/xts_aes_ni.h b/ImBoxEnclave/crypto_fast/xts_aes_ni.h new file mode 100644 index 0000000..a81d51e --- /dev/null +++ b/ImBoxEnclave/crypto_fast/xts_aes_ni.h @@ -0,0 +1,7 @@ +#ifndef _XTS_AES_NI_H_ +#define _XTS_AES_NI_H_ + +void _stdcall xts_aes_ni_encrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key); +void _stdcall xts_aes_ni_decrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key); + +#endif \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/xts_fast.c b/ImBoxEnclave/crypto_fast/xts_fast.c new file mode 100644 index 0000000..c466e66 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/xts_fast.c @@ -0,0 +1,437 @@ +/* + * + * Copyright (c) 2010-2012 + * ntldr PGP key ID - 0x1B6A24550F33E44A + * + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License version 3 as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +*/ +#define ENCLAVE_ENABLED +#include +#include +//#include +#include "xts_fast.h" +#include "aes_asm.h" +#include "aes_padlock.h" +#include "xts_aes_ni.h" +#include "xts_serpent_sse2.h" +#include "xts_serpent_avx.h" + +typedef __declspec(align(1)) union _m128 { + unsigned long v32[4]; + unsigned __int64 v64[2]; +} m128; + +static xts_proc aes_selected_encrypt; +static xts_proc aes_selected_decrypt; +static xts_proc serpent_selected_encrypt; +static xts_proc serpent_selected_decrypt; + +#ifdef _M_X64 +#define def_tweak \ + unsigned __int64 t0, t1; m128 + +#define load_tweak() do { \ + t0 = t.v64[0]; t1 = t.v64[1]; \ +} while (0) + +#define tweak_xor(_in, _out) do { \ + ((unsigned __int64*)(_out))[0] = ((unsigned __int64*)(_in))[0] ^ t0; \ + ((unsigned __int64*)(_out))[1] = ((unsigned __int64*)(_in))[1] ^ t1; \ +} while (0) + +#define next_tweak() do { \ + cf = (t1 >> 63) * 135; \ + t1 = (t1 << 1) | (t0 >> 63); \ + t0 = (t0 << 1) ^ cf; \ +} while (0) + +#define copy_tweak(_buf) do { \ + ((unsigned __int64*)(_buf))[0] = t0; \ + ((unsigned __int64*)(_buf))[1] = t1; \ +} while (0) +#else +#define def_tweak m128 +#define load_tweak() + +#define tweak_xor(_in, _out) do { \ + ((unsigned __int64*)(_out))[0] = ((unsigned __int64*)(_in))[0] ^ t.v64[0]; \ + ((unsigned __int64*)(_out))[1] = ((unsigned __int64*)(_in))[1] ^ t.v64[1]; \ +} while (0) + +#define next_tweak() do { \ + cf = (t.v32[3] >> 31) * 135; \ + t.v64[1] <<= 1; \ + t.v32[2] |= t.v32[1] >> 31; \ + t.v64[0] <<= 1; \ + t.v32[0] ^= cf; \ +} while (0) + +#define copy_tweak(_buf) do { \ + memcpy(_buf, &t, sizeof(t)); \ +} while (0) +#endif + +#define DEF_XTS_PROC(func_name, tweak_name, crypt_name, key_field) \ + \ +static void _stdcall func_name(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) \ +{ \ + def_tweak t; \ + m128 idx; \ + size_t cf; \ + unsigned long i; \ + \ + idx.v64[0] = offset / XTS_SECTOR_SIZE; \ + idx.v64[1] = 0; \ + do \ + { \ + /* update tweak unit index */ \ + idx.v64[0]++; \ + /* derive first tweak value */ \ + tweak_name((unsigned char*)&idx, (unsigned char*)&t, &key->tweak_k.key_field); \ + load_tweak(); \ + \ + for (i = 0; i < XTS_BLOCKS_IN_SECTOR; i++) \ + { \ + tweak_xor(in, out); \ + crypt_name(out, out, &key->crypt_k.key_field); \ + tweak_xor(out, out); \ + \ + /* update pointers */ \ + in += XTS_BLOCK_SIZE; out += XTS_BLOCK_SIZE; \ + \ + /* derive next tweak value */ \ + next_tweak(); \ + } \ + } while (len -= XTS_SECTOR_SIZE); \ +} + +#define DEF_XTS_AES_PADLOCK(func_name, crypt_name) \ + \ +static void _stdcall func_name(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) \ +{ \ + def_tweak __declspec(align(16)) t; \ + m128 __declspec(align(16)) idx; \ + unsigned char __declspec(align(16)) buff[XTS_SECTOR_SIZE], tweak[XTS_SECTOR_SIZE]; \ + size_t cf, i; \ + \ + idx.v64[0] = offset / XTS_SECTOR_SIZE; \ + idx.v64[1] = 0; \ + do \ + { \ + /* update tweak unit index */ \ + idx.v64[0]++; \ + /* derive first tweak value */ \ + aes256_padlock_rekey(); \ + aes256_padlock_encrypt((unsigned char*)&idx, (unsigned char*)&t, 1, &key->tweak_k.aes); \ + load_tweak(); \ + \ + /* derive all tweak values for sector */ \ + for (i = 0; i < XTS_BLOCKS_IN_SECTOR; i++) { \ + copy_tweak(tweak + i*XTS_BLOCK_SIZE); \ + next_tweak(); \ + } \ + for (i = 0; i < XTS_SECTOR_SIZE / sizeof(unsigned __int64); i++) { \ + ((unsigned __int64*)buff)[i] = ((unsigned __int64*)in)[i] ^ ((unsigned __int64*)tweak)[i]; \ + } \ + aes256_padlock_rekey(); \ + crypt_name(buff, buff, XTS_BLOCKS_IN_SECTOR, &key->crypt_k.aes); \ + \ + for (i = 0; i < XTS_SECTOR_SIZE / sizeof(unsigned __int64); i++) { \ + ((unsigned __int64*)out)[i] = ((unsigned __int64*)buff)[i] ^ ((unsigned __int64*)tweak)[i]; \ + } \ + /* update pointers */ \ + in += XTS_SECTOR_SIZE; out += XTS_SECTOR_SIZE; \ + } while (len -= XTS_SECTOR_SIZE); \ +} + +DEF_XTS_PROC(xts_aes_basic_encrypt, aes256_asm_encrypt, aes256_asm_encrypt, aes); +DEF_XTS_PROC(xts_aes_basic_decrypt, aes256_asm_encrypt, aes256_asm_decrypt, aes); + +DEF_XTS_PROC(xts_twofish_encrypt, twofish256_encrypt, twofish256_encrypt, twofish); +DEF_XTS_PROC(xts_twofish_decrypt, twofish256_encrypt, twofish256_decrypt, twofish); + +#ifdef _M_IX86 + DEF_XTS_PROC(xts_serpent_basic_encrypt, serpent256_encrypt, serpent256_encrypt, serpent); + DEF_XTS_PROC(xts_serpent_basic_decrypt, serpent256_encrypt, serpent256_decrypt, serpent); +#endif + +DEF_XTS_AES_PADLOCK(xts_aes_padlock_encrypt, aes256_padlock_encrypt); +DEF_XTS_AES_PADLOCK(xts_aes_padlock_decrypt, aes256_padlock_decrypt); + +#ifdef _M_IX86 + +static void _stdcall xts_aes_encrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +{ + unsigned char fpustate[32]; + xts_proc selected; + + if ( (selected = aes_selected_encrypt) == xts_aes_ni_encrypt ) + { + if (save_fpu_state(fpustate) >= 0) { + xts_aes_ni_encrypt(in, out, len, offset, key); + load_fpu_state(fpustate); + } else { + xts_aes_basic_encrypt(in, out, len, offset, key); + } + } else { + selected(in, out, len, offset, key); + } +} + +static void _stdcall xts_aes_decrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +{ + unsigned char fpustate[32]; + xts_proc selected; + + if ( (selected = aes_selected_decrypt) == xts_aes_ni_decrypt ) + { + if (save_fpu_state(fpustate) >= 0) { + xts_aes_ni_decrypt(in, out, len, offset, key); + load_fpu_state(fpustate); + } else { + xts_aes_basic_decrypt(in, out, len, offset, key); + } + } else { + selected(in, out, len, offset, key); + } +} + +static void _stdcall xts_serpent_encrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +{ + unsigned char fpustate[32]; + xts_proc selected = serpent_selected_encrypt; + + if (selected != xts_serpent_basic_encrypt && save_fpu_state(fpustate) >= 0) { + selected(in, out, len, offset, key); + load_fpu_state(fpustate); + } else { + xts_serpent_basic_encrypt(in, out, len, offset, key); + } +} + +static void _stdcall xts_serpent_decrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +{ + unsigned char fpustate[32]; + xts_proc selected = serpent_selected_decrypt; + + if (selected != xts_serpent_basic_decrypt && save_fpu_state(fpustate) >= 0) { + selected(in, out, len, offset, key); + load_fpu_state(fpustate); + } else { + xts_serpent_basic_decrypt(in, out, len, offset, key); + } +} + +#else + #define xts_aes_encrypt aes_selected_encrypt + #define xts_aes_decrypt aes_selected_decrypt + #define xts_serpent_encrypt serpent_selected_encrypt + #define xts_serpent_decrypt serpent_selected_decrypt +#endif + +static void _stdcall xts_aes_twofish_encrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +{ + xts_twofish_encrypt(in, out, len, offset, key); + xts_aes_encrypt(out, out, len, offset, key); +} + +static void _stdcall xts_aes_twofish_decrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +{ + xts_aes_decrypt(in, out, len, offset, key); + xts_twofish_decrypt(out, out, len, offset, key); +} + +static void _stdcall xts_twofish_serpent_encrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +{ + xts_serpent_encrypt(in, out, len, offset, key); + xts_twofish_encrypt(out, out, len, offset, key); +} + +static void _stdcall xts_twofish_serpent_decrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +{ + xts_twofish_decrypt(in, out, len, offset, key); + xts_serpent_decrypt(out, out, len, offset, key); +} + +static void _stdcall xts_serpent_aes_encrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +{ + xts_aes_encrypt(in, out, len, offset, key); + xts_serpent_encrypt(out, out, len, offset, key); +} + +static void _stdcall xts_serpent_aes_decrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +{ + xts_serpent_decrypt(in, out, len, offset, key); + xts_aes_decrypt(out, out, len, offset, key); +} + +static void _stdcall xts_aes_twofish_serpent_encrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +{ + xts_serpent_encrypt(in, out, len, offset, key); + xts_twofish_encrypt(out, out, len, offset, key); + xts_aes_encrypt(out, out, len, offset, key); +} + +static void _stdcall xts_aes_twofish_serpent_decrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +{ + xts_aes_decrypt(in, out, len, offset, key); + xts_twofish_decrypt(out, out, len, offset, key); + xts_serpent_decrypt(out, out, len, offset, key); +} + +void _stdcall xts_set_key(const unsigned char *key, int alg, xts_key *skey) +{ + switch (alg) + { + case CF_AES: + aes256_asm_set_key(key, &skey->crypt_k.aes); + aes256_asm_set_key(key + XTS_KEY_SIZE, &skey->tweak_k.aes); + + skey->encrypt = xts_aes_encrypt; + skey->decrypt = xts_aes_decrypt; + break; + case CF_TWOFISH: + twofish256_set_key(key, &skey->crypt_k.twofish); + twofish256_set_key(key + XTS_KEY_SIZE, &skey->tweak_k.twofish); + + skey->encrypt = xts_twofish_encrypt; + skey->decrypt = xts_twofish_decrypt; + break; + case CF_SERPENT: + serpent256_set_key(key, &skey->crypt_k.serpent); + serpent256_set_key(key + XTS_KEY_SIZE, &skey->tweak_k.serpent); + + skey->encrypt = xts_serpent_encrypt; + skey->decrypt = xts_serpent_decrypt; + break; + case CF_AES_TWOFISH: + twofish256_set_key(key, &skey->crypt_k.twofish); + aes256_asm_set_key(key + XTS_KEY_SIZE, &skey->crypt_k.aes); + twofish256_set_key(key + XTS_KEY_SIZE*2, &skey->tweak_k.twofish); + aes256_asm_set_key(key + XTS_KEY_SIZE*3, &skey->tweak_k.aes); + + skey->encrypt = xts_aes_twofish_encrypt; + skey->decrypt = xts_aes_twofish_decrypt; + break; + case CF_TWOFISH_SERPENT: + serpent256_set_key(key, &skey->crypt_k.serpent); + twofish256_set_key(key + XTS_KEY_SIZE, &skey->crypt_k.twofish); + serpent256_set_key(key + XTS_KEY_SIZE*2, &skey->tweak_k.serpent); + twofish256_set_key(key + XTS_KEY_SIZE*3, &skey->tweak_k.twofish); + + skey->encrypt = xts_twofish_serpent_encrypt; + skey->decrypt = xts_twofish_serpent_decrypt; + break; + case CF_SERPENT_AES: + aes256_asm_set_key(key, &skey->crypt_k.aes); + serpent256_set_key(key + XTS_KEY_SIZE, &skey->crypt_k.serpent); + aes256_asm_set_key(key + XTS_KEY_SIZE*2, &skey->tweak_k.aes); + serpent256_set_key(key + XTS_KEY_SIZE*3, &skey->tweak_k.serpent); + + skey->encrypt = xts_serpent_aes_encrypt; + skey->decrypt = xts_serpent_aes_decrypt; + break; + case CF_AES_TWOFISH_SERPENT: + serpent256_set_key(key, &skey->crypt_k.serpent); + twofish256_set_key(key + XTS_KEY_SIZE, &skey->crypt_k.twofish); + aes256_asm_set_key(key + XTS_KEY_SIZE*2, &skey->crypt_k.aes); + serpent256_set_key(key + XTS_KEY_SIZE*3, &skey->tweak_k.serpent); + twofish256_set_key(key + XTS_KEY_SIZE*4, &skey->tweak_k.twofish); + aes256_asm_set_key(key + XTS_KEY_SIZE*5, &skey->tweak_k.aes); + + skey->encrypt = xts_aes_twofish_serpent_encrypt; + skey->decrypt = xts_aes_twofish_serpent_decrypt; + break; + } +} + +#ifdef _M_IX86 +long save_fpu_state(unsigned char state[32]) { + //if (KeGetCurrentIrql() > DISPATCH_LEVEL) return STATUS_UNSUCCESSFUL; + //return KeSaveFloatingPointState((PKFLOATING_SAVE)state); + return 1; +} +void load_fpu_state(unsigned char state[32]) { + //KeRestoreFloatingPointState((PKFLOATING_SAVE)state); +} +#endif + +int _declspec(noinline) _stdcall xts_aes_ni_available() +{ + int CPUInfo[4], res = 0; + __m128i enc; +#ifdef _M_IX86 + unsigned char fpustate[32]; +#endif + + // check for AES-NI support via CPUID.01H:ECX.AES[bit 25] + __cpuid(CPUInfo, 1); + if ( CPUInfo[2] & 0x02000000 ) return 1; + + // Special workaround for AES-NI on Hyper-V server and virtual machines + if ( (CPUInfo[2] & 0x80000000) == 0 ) return 0; + __cpuid(CPUInfo, 0x40000000); + if ( CPUInfo[1] != 'rciM' || CPUInfo[2] != 'foso' || CPUInfo[3] != 'vH t' ) return 0; + +#ifdef _M_IX86 + if (save_fpu_state(fpustate) >= 0) + { +#endif + //__try { + enc = _mm_aesenc_si128(_mm_set_epi32(0,1,2,3), _mm_set_epi32(4,5,6,7)); + res = enc.m128i_u64[0] == 0x5f77774d4b7b7b54 && enc.m128i_u64[1] == 0x63636367427c7c58; + //} + //__except(/*EXCEPTION_EXECUTE_HANDLER*/ 1) { + // res = 0; + //} +#ifdef _M_IX86 + load_fpu_state(fpustate); + } +#endif + return res; +} + +int _stdcall xts_init(int hw_crypt) +{ +#ifdef _M_IX86 + if (xts_serpent_sse2_available() != 0) { + serpent_selected_encrypt = xts_serpent_sse2_encrypt; + serpent_selected_decrypt = xts_serpent_sse2_decrypt; + } else { + serpent_selected_encrypt = xts_serpent_basic_encrypt; + serpent_selected_decrypt = xts_serpent_basic_decrypt; + } +#else + serpent_selected_encrypt = xts_serpent_sse2_encrypt; + serpent_selected_decrypt = xts_serpent_sse2_decrypt; +#endif + if (xts_serpent_avx_available() != 0) { + serpent_selected_encrypt = xts_serpent_avx_encrypt; + serpent_selected_decrypt = xts_serpent_avx_decrypt; + } + if ( hw_crypt != 0 && xts_aes_ni_available() != 0 ) { + aes_selected_encrypt = xts_aes_ni_encrypt; + aes_selected_decrypt = xts_aes_ni_decrypt; + return 1; + } + if ( hw_crypt != 0 && aes256_padlock_available() != 0 ) + { + aes_selected_encrypt = xts_aes_padlock_encrypt; + aes_selected_decrypt = xts_aes_padlock_decrypt; + return 2; + } + aes_selected_encrypt = xts_aes_basic_encrypt; + aes_selected_decrypt = xts_aes_basic_decrypt; + return 0; +} \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/xts_fast.h b/ImBoxEnclave/crypto_fast/xts_fast.h new file mode 100644 index 0000000..f9d88c3 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/xts_fast.h @@ -0,0 +1,66 @@ +#ifndef _XTS_FAST_H_ +#define _XTS_FAST_H_ + + +#include +#include "aes_key.h" +#include "twofish.h" +#include "serpent.h" + +#define CF_AES 0 +#define CF_TWOFISH 1 +#define CF_SERPENT 2 +#define CF_AES_TWOFISH 3 +#define CF_TWOFISH_SERPENT 4 +#define CF_SERPENT_AES 5 +#define CF_AES_TWOFISH_SERPENT 6 +#define CF_CIPHERS_NUM 7 + +#define XTS_SECTOR_SIZE 512 +#define XTS_BLOCK_SIZE 16 +#define XTS_BLOCKS_IN_SECTOR (XTS_SECTOR_SIZE / XTS_BLOCK_SIZE) + +#define XTS_KEY_SIZE 32 +#define XTS_FULL_KEY (XTS_KEY_SIZE*3*2) + +typedef void (_stdcall *xts_proc)(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, void *key); + +typedef __declspec(align(16)) struct _xts_key { + struct { + aes256_key aes; + twofish256_key twofish; + serpent256_key serpent; + } crypt_k; + struct { + aes256_key aes; + twofish256_key twofish; + serpent256_key serpent; + } tweak_k; + xts_proc encrypt; + xts_proc decrypt; + +} xts_key; + +int _stdcall xts_init(int hw_crypt); +void _stdcall xts_set_key(const unsigned char *key, int alg, xts_key *skey); +int _stdcall xts_aes_ni_available(); + +#define xts_encrypt(_in, _out, _len, _offset, _key) ( (_key)->encrypt(_in, _out, _len, _offset, _key) ) +#define xts_decrypt(_in, _out, _len, _offset, _key) ( (_key)->decrypt(_in, _out, _len, _offset, _key) ) + +#ifdef _M_IX86 + extern long save_fpu_state(unsigned char state[32]); + extern void load_fpu_state(unsigned char state[32]); +#endif + + static void _stdcall xts_aes_twofish_encrypt(const unsigned char* in, unsigned char* out, size_t len, unsigned __int64 offset, xts_key* key); + static void _stdcall xts_aes_twofish_decrypt(const unsigned char* in, unsigned char* out, size_t len, unsigned __int64 offset, xts_key* key); + static void _stdcall xts_twofish_serpent_decrypt(const unsigned char* in, unsigned char* out, size_t len, unsigned __int64 offset, xts_key* key); + static void _stdcall xts_twofish_serpent_encrypt(const unsigned char* in, unsigned char* out, size_t len, unsigned __int64 offset, xts_key* key); + static void _stdcall xts_twofish_serpent_decrypt(const unsigned char* in, unsigned char* out, size_t len, unsigned __int64 offset, xts_key* key); + static void _stdcall xts_serpent_aes_encrypt(const unsigned char* in, unsigned char* out, size_t len, unsigned __int64 offset, xts_key* key); + static void _stdcall xts_serpent_aes_decrypt(const unsigned char* in, unsigned char* out, size_t len, unsigned __int64 offset, xts_key* key); + static void _stdcall xts_aes_twofish_serpent_encrypt(const unsigned char* in, unsigned char* out, size_t len, unsigned __int64 offset, xts_key* key); + static void _stdcall xts_aes_twofish_serpent_decrypt(const unsigned char* in, unsigned char* out, size_t len, unsigned __int64 offset, xts_key* key); + +#endif diff --git a/ImBoxEnclave/crypto_fast/xts_serpent_avx.h b/ImBoxEnclave/crypto_fast/xts_serpent_avx.h new file mode 100644 index 0000000..45ada57 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/xts_serpent_avx.h @@ -0,0 +1,8 @@ +#ifndef _XTS_SERPENT_AVX_H_ +#define _XTS_SERPENT_AVX_H_ + +int _stdcall xts_serpent_avx_available(); +void _stdcall xts_serpent_avx_encrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key); +void _stdcall xts_serpent_avx_decrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key); + +#endif \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/xts_serpent_sse2.c b/ImBoxEnclave/crypto_fast/xts_serpent_sse2.c new file mode 100644 index 0000000..a4eb02b --- /dev/null +++ b/ImBoxEnclave/crypto_fast/xts_serpent_sse2.c @@ -0,0 +1,703 @@ +/* + * + * Copyright (c) 2010-2011 + * ntldr PGP key ID - 0x1B6A24550F33E44A + * + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License version 3 as + published by the Free Software Foundation. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . +*/ +#if defined(USE_AVX) && !defined(__INTEL_COMPILER) + #error Please use Intel C++ Compoler +#endif +#include +#include "serpent.h" +#include "xts_fast.h" +#ifdef USE_AVX + #include + #include "xts_serpent_avx.h" +#else + #include + #include "xts_serpent_sse2.h" +#endif + +#define transpose(_B0, _B1, _B2, _B3) { \ + __m128i _T0 = _mm_unpacklo_epi32(_B0, _B1); \ + __m128i _T1 = _mm_unpacklo_epi32(_B2, _B3); \ + __m128i _T2 = _mm_unpackhi_epi32(_B0, _B1); \ + __m128i _T3 = _mm_unpackhi_epi32(_B2, _B3); \ + _B0 = _mm_unpacklo_epi64(_T0, _T1); \ + _B1 = _mm_unpackhi_epi64(_T0, _T1); \ + _B2 = _mm_unpacklo_epi64(_T2, _T3); \ + _B3 = _mm_unpackhi_epi64(_T2, _T3); \ +} + +#define KXf(_B0, _B1, _B2, _B3, _ctx, round) \ + _B0 = _mm_xor_si128(_B0, _mm_set1_epi32((_ctx)->expkey[4*round ])); \ + _B1 = _mm_xor_si128(_B1, _mm_set1_epi32((_ctx)->expkey[4*round+1])); \ + _B2 = _mm_xor_si128(_B2, _mm_set1_epi32((_ctx)->expkey[4*round+2])); \ + _B3 = _mm_xor_si128(_B3, _mm_set1_epi32((_ctx)->expkey[4*round+3])); + +#define NOT_SI128(_X) ( \ + _mm_xor_si128(_X, _mm_set1_epi32(0xFFFFFFFF)) ) + +#define ROL_SI128(_X, _rot) ( \ + _mm_or_si128(_mm_slli_epi32(_X, _rot), _mm_srli_epi32(_X, 32-_rot)) ) + +#define ROR_SI128(_X, _rot) ( ROL_SI128(_X, (32-_rot)) ) + +#define LTf(_B0, _B1, _B2, _B3) \ + _B0 = ROL_SI128(_B0, 13); \ + _B2 = ROL_SI128(_B2, 3); \ + _B1 = _mm_xor_si128(_B1, _B0); \ + _B1 = _mm_xor_si128(_B1, _B2); \ + _B3 = _mm_xor_si128(_B3, _B2); \ + _B3 = _mm_xor_si128(_B3, _mm_slli_epi32(_B0, 3)); \ + _B1 = ROL_SI128(_B1, 1); \ + _B3 = ROL_SI128(_B3, 7); \ + _B0 = _mm_xor_si128(_B0, _B1); \ + _B0 = _mm_xor_si128(_B0, _B3); \ + _B2 = _mm_xor_si128(_B2, _B3); \ + _B2 = _mm_xor_si128(_B2, _mm_slli_epi32(_B1, 7)); \ + _B0 = ROL_SI128(_B0, 5); \ + _B2 = ROL_SI128(_B2, 22); + +#define ITf(_B0, _B1, _B2, _B3) \ + _B2 = ROR_SI128(_B2, 22); \ + _B0 = ROR_SI128(_B0, 5); \ + _B2 = _mm_xor_si128(_B2, _B3); \ + _B2 = _mm_xor_si128(_B2, _mm_slli_epi32(_B1, 7)); \ + _B0 = _mm_xor_si128(_B0, _B1); \ + _B0 = _mm_xor_si128(_B0, _B3); \ + _B3 = ROR_SI128(_B3, 7); \ + _B1 = ROR_SI128(_B1, 1); \ + _B3 = _mm_xor_si128(_B3, _B2); \ + _B3 = _mm_xor_si128(_B3, _mm_slli_epi32(_B0, 3)); \ + _B1 = _mm_xor_si128(_B1, _B0); \ + _B1 = _mm_xor_si128(_B1, _B2); \ + _B2 = ROR_SI128(_B2, 3); \ + _B0 = ROR_SI128(_B0, 13); + +#define sE1(_B0, _B1, _B2, _B3) { \ + __m128i _tt = _B1; \ + _B3 = _mm_xor_si128(_B3, _B0); \ + _B1 = _mm_and_si128(_B1, _B3); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B1 = _mm_xor_si128(_B1, _B0); \ + _B0 = _mm_or_si128(_B0, _B3); \ + _B0 = _mm_xor_si128(_B0, _tt); \ + _tt = _mm_xor_si128(_tt, _B3); \ + _B3 = _mm_xor_si128(_B3, _B2); \ + _B2 = _mm_or_si128(_B2, _B1); \ + _B2 = _mm_xor_si128(_B2, _tt); \ + _tt = NOT_SI128(_tt); \ + _tt = _mm_or_si128(_tt, _B1); \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _B1 = _mm_xor_si128(_B1, _tt); \ + _B3 = _mm_or_si128(_B3, _B0); \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _tt = _mm_xor_si128(_tt, _B3); \ + _B3 = _B0; \ + _B0 = _B1; \ + _B1 = _tt; \ +} + +#define sE2(_B0, _B1, _B2, _B3) { \ + __m128i _tt; \ + _B0 = NOT_SI128(_B0); \ + _B2 = NOT_SI128(_B2); \ + _tt = _B0; \ + _B0 = _mm_and_si128(_B0, _B1); \ + _B2 = _mm_xor_si128(_B2, _B0); \ + _B0 = _mm_or_si128(_B0, _B3); \ + _B3 = _mm_xor_si128(_B3, _B2); \ + _B1 = _mm_xor_si128(_B1, _B0); \ + _B0 = _mm_xor_si128(_B0, _tt); \ + _tt = _mm_or_si128(_tt, _B1); \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _B2 = _mm_or_si128(_B2, _B0); \ + _B2 = _mm_and_si128(_B2, _tt); \ + _B0 = _mm_xor_si128(_B0, _B1); \ + _B1 = _mm_and_si128(_B1, _B2); \ + _B1 = _mm_xor_si128(_B1, _B0); \ + _B0 = _mm_and_si128(_B0, _B2); \ + _tt = _mm_xor_si128(_tt, _B0); \ + _B0 = _B2; \ + _B2 = _B3; \ + _B3 = _B1; \ + _B1 = _tt; \ +} + +#define sE3(_B0, _B1, _B2, _B3) { \ + __m128i _tt = _B0; \ + _B0 = _mm_and_si128(_B0, _B2); \ + _B0 = _mm_xor_si128(_B0, _B3); \ + _B2 = _mm_xor_si128(_B2, _B1); \ + _B2 = _mm_xor_si128(_B2, _B0); \ + _B3 = _mm_or_si128(_B3, _tt); \ + _B3 = _mm_xor_si128(_B3, _B1); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B1 = _B3; \ + _B3 = _mm_or_si128(_B3, _tt); \ + _B3 = _mm_xor_si128(_B3, _B0); \ + _B0 = _mm_and_si128(_B0, _B1); \ + _tt = _mm_xor_si128(_tt, _B0); \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _B1 = _mm_xor_si128(_B1, _tt); \ + _B0 = _B2; \ + _B2 = _B1; \ + _B1 = _B3; \ + _B3 = NOT_SI128(_tt); \ +} + +#define sE4(_B0, _B1, _B2, _B3) { \ + __m128i _tt = _B0; \ + _B0 = _mm_or_si128(_B0, _B3); \ + _B3 = _mm_xor_si128(_B3, _B1); \ + _B1 = _mm_and_si128(_B1, _tt); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B2 = _mm_xor_si128(_B2, _B3); \ + _B3 = _mm_and_si128(_B3, _B0); \ + _tt = _mm_or_si128(_tt, _B1); \ + _B3 = _mm_xor_si128(_B3, _tt); \ + _B0 = _mm_xor_si128(_B0, _B1); \ + _tt = _mm_and_si128(_tt, _B0); \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B1 = _mm_or_si128(_B1, _B0); \ + _B1 = _mm_xor_si128(_B1, _B2); \ + _B0 = _mm_xor_si128(_B0, _B3); \ + _B2 = _B1; \ + _B1 = _mm_or_si128(_B1, _B3); \ + _B0 = _mm_xor_si128(_B0, _B1); \ + _B1 = _B2; \ + _B2 = _B3; \ + _B3 = _tt; \ +} + +#define sE5(_B0, _B1, _B2, _B3) { \ + __m128i _tt; \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _B3 = NOT_SI128(_B3); \ + _B2 = _mm_xor_si128(_B2, _B3); \ + _B3 = _mm_xor_si128(_B3, _B0); \ + _tt = _B1; \ + _B1 = _mm_and_si128(_B1, _B3); \ + _B1 = _mm_xor_si128(_B1, _B2); \ + _tt = _mm_xor_si128(_tt, _B3); \ + _B0 = _mm_xor_si128(_B0, _tt); \ + _B2 = _mm_and_si128(_B2, _tt); \ + _B2 = _mm_xor_si128(_B2, _B0); \ + _B0 = _mm_and_si128(_B0, _B1); \ + _B3 = _mm_xor_si128(_B3, _B0); \ + _tt = _mm_or_si128(_tt, _B1); \ + _tt = _mm_xor_si128(_tt, _B0); \ + _B0 = _mm_or_si128(_B0, _B3); \ + _B0 = _mm_xor_si128(_B0, _B2); \ + _B2 = _mm_and_si128(_B2, _B3); \ + _B0 = NOT_SI128(_B0); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B2 = _B0; \ + _B0 = _B1; \ + _B1 = _tt; \ +} + +#define sE6(_B0, _B1, _B2, _B3) { \ + __m128i _tt; \ + _B0 = _mm_xor_si128(_B0, _B1); \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _B3 = NOT_SI128(_B3); \ + _tt = _B1; \ + _B1 = _mm_and_si128(_B1, _B0); \ + _B2 = _mm_xor_si128(_B2, _B3); \ + _B1 = _mm_xor_si128(_B1, _B2); \ + _B2 = _mm_or_si128(_B2, _tt); \ + _tt = _mm_xor_si128(_tt, _B3); \ + _B3 = _mm_and_si128(_B3, _B1); \ + _B3 = _mm_xor_si128(_B3, _B0); \ + _tt = _mm_xor_si128(_tt, _B1); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B2 = _mm_xor_si128(_B2, _B0); \ + _B0 = _mm_and_si128(_B0, _B3); \ + _B2 = NOT_SI128(_B2); \ + _B0 = _mm_xor_si128(_B0, _tt); \ + _tt = _mm_or_si128(_tt, _B3); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B2 = _B0; \ + _B0 = _B1; \ + _B1 = _B3; \ + _B3 = _tt; \ +} + +#define sE7(_B0, _B1, _B2, _B3) { \ + __m128i _tt; \ + _B2 = NOT_SI128(_B2); \ + _tt = _B3; \ + _B3 = _mm_and_si128(_B3, _B0); \ + _B0 = _mm_xor_si128(_B0, _tt); \ + _B3 = _mm_xor_si128(_B3, _B2); \ + _B2 = _mm_or_si128(_B2, _tt); \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _B2 = _mm_xor_si128(_B2, _B0); \ + _B0 = _mm_or_si128(_B0, _B1); \ + _B2 = _mm_xor_si128(_B2, _B1); \ + _tt = _mm_xor_si128(_tt, _B0); \ + _B0 = _mm_or_si128(_B0, _B3); \ + _B0 = _mm_xor_si128(_B0, _B2); \ + _tt = _mm_xor_si128(_tt, _B3); \ + _tt = _mm_xor_si128(_tt, _B0); \ + _B3 = NOT_SI128(_B3); \ + _B2 = _mm_and_si128(_B2, _tt); \ + _B3 = _mm_xor_si128(_B3, _B2); \ + _B2 = _tt; \ +} + +#define sE8(_B0, _B1, _B2, _B3) { \ + __m128i _tt = _B1; \ + _B1 = _mm_or_si128(_B1, _B2); \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B2 = _mm_xor_si128(_B2, _B1); \ + _B3 = _mm_or_si128(_B3, _tt); \ + _B3 = _mm_and_si128(_B3, _B0); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B3 = _mm_xor_si128(_B3, _B1); \ + _B1 = _mm_or_si128(_B1, _tt); \ + _B1 = _mm_xor_si128(_B1, _B0); \ + _B0 = _mm_or_si128(_B0, _tt); \ + _B0 = _mm_xor_si128(_B0, _B2); \ + _B1 = _mm_xor_si128(_B1, _tt); \ + _B2 = _mm_xor_si128(_B2, _B1); \ + _B1 = _mm_and_si128(_B1, _B0); \ + _B1 = _mm_xor_si128(_B1, _tt); \ + _B2 = NOT_SI128(_B2); \ + _B2 = _mm_or_si128(_B2, _B0); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B2 = _B1; \ + _B1 = _B3; \ + _B3 = _B0; \ + _B0 = _tt; \ +} + +#define sD1(_B0, _B1, _B2, _B3) { \ + __m128i _tt = _B1; \ + _B2 = NOT_SI128(_B2); \ + _B1 = _mm_or_si128(_B1, _B0); \ + _tt = NOT_SI128(_tt); \ + _B1 = _mm_xor_si128(_B1, _B2); \ + _B2 = _mm_or_si128(_B2, _tt); \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _B0 = _mm_xor_si128(_B0, _tt); \ + _B2 = _mm_xor_si128(_B2, _B0); \ + _B0 = _mm_and_si128(_B0, _B3); \ + _tt = _mm_xor_si128(_tt, _B0); \ + _B0 = _mm_or_si128(_B0, _B1); \ + _B0 = _mm_xor_si128(_B0, _B2); \ + _B3 = _mm_xor_si128(_B3, _tt); \ + _B2 = _mm_xor_si128(_B2, _B1); \ + _B3 = _mm_xor_si128(_B3, _B0); \ + _B3 = _mm_xor_si128(_B3, _B1); \ + _B2 = _mm_and_si128(_B2, _B3); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B2 = _B1; \ + _B1 = _tt; \ +} + +#define sD2(_B0, _B1, _B2, _B3) { \ + __m128i _tt = _B1; \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _B3 = _mm_and_si128(_B3, _B1); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B3 = _mm_xor_si128(_B3, _B0); \ + _B0 = _mm_or_si128(_B0, _B1); \ + _B2 = _mm_xor_si128(_B2, _B3); \ + _B0 = _mm_xor_si128(_B0, _tt); \ + _B0 = _mm_or_si128(_B0, _B2); \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _B0 = _mm_xor_si128(_B0, _B1); \ + _B1 = _mm_or_si128(_B1, _B3); \ + _B1 = _mm_xor_si128(_B1, _B0); \ + _tt = NOT_SI128(_tt); \ + _tt = _mm_xor_si128(_tt, _B1); \ + _B1 = _mm_or_si128(_B1, _B0); \ + _B1 = _mm_xor_si128(_B1, _B0); \ + _B1 = _mm_or_si128(_B1, _tt); \ + _B3 = _mm_xor_si128(_B3, _B1); \ + _B1 = _B0; \ + _B0 = _tt; \ + _tt = _B2; \ + _B2 = _B3; \ + _B3 = _tt; \ +} + +#define sD3(_B0, _B1, _B2, _B3) { \ + __m128i _tt; \ + _B2 = _mm_xor_si128(_B2, _B3); \ + _B3 = _mm_xor_si128(_B3, _B0); \ + _tt = _B3; \ + _B3 = _mm_and_si128(_B3, _B2); \ + _B3 = _mm_xor_si128(_B3, _B1); \ + _B1 = _mm_or_si128(_B1, _B2); \ + _B1 = _mm_xor_si128(_B1, _tt); \ + _tt = _mm_and_si128(_tt, _B3); \ + _B2 = _mm_xor_si128(_B2, _B3); \ + _tt = _mm_and_si128(_tt, _B0); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B2 = _mm_and_si128(_B2, _B1); \ + _B2 = _mm_or_si128(_B2, _B0); \ + _B3 = NOT_SI128(_B3); \ + _B2 = _mm_xor_si128(_B2, _B3); \ + _B0 = _mm_xor_si128(_B0, _B3); \ + _B0 = _mm_and_si128(_B0, _B1); \ + _B3 = _mm_xor_si128(_B3, _tt); \ + _B3 = _mm_xor_si128(_B3, _B0); \ + _B0 = _B1; \ + _B1 = _tt; \ +} + +#define sD4(_B0, _B1, _B2, _B3) { \ + __m128i _tt = _B2; \ + _B2 = _mm_xor_si128(_B2, _B1); \ + _B0 = _mm_xor_si128(_B0, _B2); \ + _tt = _mm_and_si128(_tt, _B2); \ + _tt = _mm_xor_si128(_tt, _B0); \ + _B0 = _mm_and_si128(_B0, _B1); \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _B3 = _mm_or_si128(_B3, _tt); \ + _B2 = _mm_xor_si128(_B2, _B3); \ + _B0 = _mm_xor_si128(_B0, _B3); \ + _B1 = _mm_xor_si128(_B1, _tt); \ + _B3 = _mm_and_si128(_B3, _B2); \ + _B3 = _mm_xor_si128(_B3, _B1); \ + _B1 = _mm_xor_si128(_B1, _B0); \ + _B1 = _mm_or_si128(_B1, _B2); \ + _B0 = _mm_xor_si128(_B0, _B3); \ + _B1 = _mm_xor_si128(_B1, _tt); \ + _B0 = _mm_xor_si128(_B0, _B1); \ + _tt = _B0; \ + _B0 = _B2; \ + _B2 = _B3; \ + _B3 = _tt; \ +} + +#define sD5(_B0, _B1, _B2, _B3) { \ + __m128i _tt = _B2; \ + _B2 = _mm_and_si128(_B2, _B3); \ + _B2 = _mm_xor_si128(_B2, _B1); \ + _B1 = _mm_or_si128(_B1, _B3); \ + _B1 = _mm_and_si128(_B1, _B0); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _tt = _mm_xor_si128(_tt, _B1); \ + _B1 = _mm_and_si128(_B1, _B2); \ + _B0 = NOT_SI128(_B0); \ + _B3 = _mm_xor_si128(_B3, _tt); \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _B3 = _mm_and_si128(_B3, _B0); \ + _B3 = _mm_xor_si128(_B3, _B2); \ + _B0 = _mm_xor_si128(_B0, _B1); \ + _B2 = _mm_and_si128(_B2, _B0); \ + _B3 = _mm_xor_si128(_B3, _B0); \ + _B2 = _mm_xor_si128(_B2, _tt); \ + _B2 = _mm_or_si128(_B2, _B3); \ + _B3 = _mm_xor_si128(_B3, _B0); \ + _B2 = _mm_xor_si128(_B2, _B1); \ + _B1 = _B3; \ + _B3 = _tt; \ +} + +#define sD6(_B0, _B1, _B2, _B3) { \ + __m128i _tt = _B3; \ + _B1 = NOT_SI128(_B1); \ + _B2 = _mm_xor_si128(_B2, _B1); \ + _B3 = _mm_or_si128(_B3, _B0); \ + _B3 = _mm_xor_si128(_B3, _B2); \ + _B2 = _mm_or_si128(_B2, _B1); \ + _B2 = _mm_and_si128(_B2, _B0); \ + _tt = _mm_xor_si128(_tt, _B3); \ + _B2 = _mm_xor_si128(_B2, _tt); \ + _tt = _mm_or_si128(_tt, _B0); \ + _tt = _mm_xor_si128(_tt, _B1); \ + _B1 = _mm_and_si128(_B1, _B2); \ + _B1 = _mm_xor_si128(_B1, _B3); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B3 = _mm_and_si128(_B3, _tt); \ + _tt = _mm_xor_si128(_tt, _B1); \ + _B3 = _mm_xor_si128(_B3, _tt); \ + _tt = NOT_SI128(_tt); \ + _B3 = _mm_xor_si128(_B3, _B0); \ + _B0 = _B1; \ + _B1 = _tt; \ + _tt = _B3; \ + _B3 = _B2; \ + _B2 = _tt; \ +} + +#define sD7(_B0, _B1, _B2, _B3) { \ + __m128i _tt = _B2; \ + _B0 = _mm_xor_si128(_B0, _B2); \ + _B2 = _mm_and_si128(_B2, _B0); \ + _tt = _mm_xor_si128(_tt, _B3); \ + _B2 = NOT_SI128(_B2); \ + _B3 = _mm_xor_si128(_B3, _B1); \ + _B2 = _mm_xor_si128(_B2, _B3); \ + _tt = _mm_or_si128(_tt, _B0); \ + _B0 = _mm_xor_si128(_B0, _B2); \ + _B3 = _mm_xor_si128(_B3, _tt); \ + _tt = _mm_xor_si128(_tt, _B1); \ + _B1 = _mm_and_si128(_B1, _B3); \ + _B1 = _mm_xor_si128(_B1, _B0); \ + _B0 = _mm_xor_si128(_B0, _B3); \ + _B0 = _mm_or_si128(_B0, _B2); \ + _B3 = _mm_xor_si128(_B3, _B1); \ + _tt = _mm_xor_si128(_tt, _B0); \ + _B0 = _B1; \ + _B1 = _B2; \ + _B2 = _tt; \ +} + +#define sD8(_B0, _B1, _B2, _B3) { \ + __m128i _tt = _B2; \ + _B2 = _mm_xor_si128(_B2, _B0); \ + _B0 = _mm_and_si128(_B0, _B3); \ + _tt = _mm_or_si128(_tt, _B3); \ + _B2 = NOT_SI128(_B2); \ + _B3 = _mm_xor_si128(_B3, _B1); \ + _B1 = _mm_or_si128(_B1, _B0); \ + _B0 = _mm_xor_si128(_B0, _B2); \ + _B2 = _mm_and_si128(_B2, _tt); \ + _B3 = _mm_and_si128(_B3, _tt); \ + _B1 = _mm_xor_si128(_B1, _B2); \ + _B2 = _mm_xor_si128(_B2, _B0); \ + _B0 = _mm_or_si128(_B0, _B2); \ + _tt = _mm_xor_si128(_tt, _B1); \ + _B0 = _mm_xor_si128(_B0, _B3); \ + _B3 = _mm_xor_si128(_B3, _tt); \ + _tt = _mm_or_si128(_tt, _B0); \ + _B3 = _mm_xor_si128(_B3, _B2); \ + _tt = _mm_xor_si128(_tt, _B2); \ + _B2 = _B1; \ + _B1 = _B0; \ + _B0 = _B3; \ + _B3 = _tt; \ +} + + +#define serpent256_sse2_encrypt(_B0, _B1, _B2, _B3, _ctx) \ + transpose(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 0); sE1(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 1); sE2(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 2); sE3(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 3); sE4(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 4); sE5(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 5); sE6(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 6); sE7(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 7); sE8(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 8); sE1(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 9); sE2(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 10); sE3(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 11); sE4(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 12); sE5(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 13); sE6(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 14); sE7(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 15); sE8(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 16); sE1(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 17); sE2(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 18); sE3(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 19); sE4(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 20); sE5(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 21); sE6(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 22); sE7(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 23); sE8(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 24); sE1(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 25); sE2(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 26); sE3(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 27); sE4(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 28); sE5(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 29); sE6(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 30); sE7(_B0,_B1,_B2,_B3); LTf(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 31); sE8(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 32); \ + transpose(_B0,_B1,_B2,_B3); + +#define serpent256_sse2_decrypt(_B0, _B1, _B2, _B3, _ctx) \ + transpose(_B0,_B1,_B2,_B3); \ + KXf(_B0,_B1,_B2,_B3,_ctx, 32); sD8(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 31); \ + ITf(_B0,_B1,_B2,_B3); sD7(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 30); \ + ITf(_B0,_B1,_B2,_B3); sD6(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 29); \ + ITf(_B0,_B1,_B2,_B3); sD5(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 28); \ + ITf(_B0,_B1,_B2,_B3); sD4(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 27); \ + ITf(_B0,_B1,_B2,_B3); sD3(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 26); \ + ITf(_B0,_B1,_B2,_B3); sD2(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 25); \ + ITf(_B0,_B1,_B2,_B3); sD1(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 24); \ + ITf(_B0,_B1,_B2,_B3); sD8(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 23); \ + ITf(_B0,_B1,_B2,_B3); sD7(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 22); \ + ITf(_B0,_B1,_B2,_B3); sD6(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 21); \ + ITf(_B0,_B1,_B2,_B3); sD5(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 20); \ + ITf(_B0,_B1,_B2,_B3); sD4(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 19); \ + ITf(_B0,_B1,_B2,_B3); sD3(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 18); \ + ITf(_B0,_B1,_B2,_B3); sD2(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 17); \ + ITf(_B0,_B1,_B2,_B3); sD1(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 16); \ + ITf(_B0,_B1,_B2,_B3); sD8(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 15); \ + ITf(_B0,_B1,_B2,_B3); sD7(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 14); \ + ITf(_B0,_B1,_B2,_B3); sD6(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 13); \ + ITf(_B0,_B1,_B2,_B3); sD5(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 12); \ + ITf(_B0,_B1,_B2,_B3); sD4(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 11); \ + ITf(_B0,_B1,_B2,_B3); sD3(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 10); \ + ITf(_B0,_B1,_B2,_B3); sD2(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 9); \ + ITf(_B0,_B1,_B2,_B3); sD1(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 8); \ + ITf(_B0,_B1,_B2,_B3); sD8(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 7); \ + ITf(_B0,_B1,_B2,_B3); sD7(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 6); \ + ITf(_B0,_B1,_B2,_B3); sD6(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 5); \ + ITf(_B0,_B1,_B2,_B3); sD5(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 4); \ + ITf(_B0,_B1,_B2,_B3); sD4(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 3); \ + ITf(_B0,_B1,_B2,_B3); sD3(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 2); \ + ITf(_B0,_B1,_B2,_B3); sD2(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 1); \ + ITf(_B0,_B1,_B2,_B3); sD1(_B0,_B1,_B2,_B3); KXf(_B0,_B1,_B2,_B3,_ctx, 0); \ + transpose(_B0,_B1,_B2,_B3); + + +#define sse2_next_tweak(_N, _O) { \ + __m128i _tt = _O; \ + __m128i _t2; \ + _tt = _mm_srai_epi16(_tt, 8); \ + _tt = _mm_srli_si128(_tt, 15); \ + _tt = _mm_and_si128(_tt, _mm_setr_epi32(135,0,0,0)); \ + _t2 = _O; \ + _t2 = _mm_slli_si128(_t2, 8); \ + _t2 = _mm_srli_si128(_t2, 7); \ + _t2 = _mm_srli_epi64(_t2, 7); \ + _N = _O; \ + _N = _mm_slli_epi64(_N, 1); \ + _N = _mm_or_si128(_N, _t2); \ + _N = _mm_xor_si128(_N, _tt); \ +} + +#ifdef USE_AVX +void _stdcall xts_serpent_avx_encrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +#else +void _stdcall xts_serpent_sse2_encrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +#endif +{ + __m128i t0, t1, t2, t3; + __m128i b0, b1, b2, b3; + __m128i idx; + int i; + + ((unsigned __int64*)&idx)[0] = offset / XTS_SECTOR_SIZE; + ((unsigned __int64*)&idx)[1] = 0; + do + { + // update tweak unit index + ((unsigned __int64*)&idx)[0]++; + // derive first tweak value + serpent256_encrypt((unsigned char*)&idx, (unsigned char*)&t0, &key->tweak_k.serpent); + + for (i = 0; i < XTS_BLOCKS_IN_SECTOR / 4; i++) + { + // derive t1-t3 + sse2_next_tweak(t1, t0); + sse2_next_tweak(t2, t1); + sse2_next_tweak(t3, t2); + // load and tweak 4 blocks + b0 = _mm_xor_si128(_mm_loadu_si128((const __m128i*)(in + 0 )), t0); + b1 = _mm_xor_si128(_mm_loadu_si128((const __m128i*)(in + 16)), t1); + b2 = _mm_xor_si128(_mm_loadu_si128((const __m128i*)(in + 32)), t2); + b3 = _mm_xor_si128(_mm_loadu_si128((const __m128i*)(in + 48)), t3); + // encrypt / decrypt + serpent256_sse2_encrypt(b0, b1, b2, b3, &key->crypt_k.serpent); + // tweak and store 4 blocks + _mm_storeu_si128((__m128i*)(out + 0 ), _mm_xor_si128(b0, t0)); + _mm_storeu_si128((__m128i*)(out + 16), _mm_xor_si128(b1, t1)); + _mm_storeu_si128((__m128i*)(out + 32), _mm_xor_si128(b2, t2)); + _mm_storeu_si128((__m128i*)(out + 48), _mm_xor_si128(b3, t3)); + // derive next t0 + sse2_next_tweak(t0, t3); + // update pointers + in += XTS_BLOCK_SIZE*4; out += XTS_BLOCK_SIZE*4; + } + } while (len -= XTS_SECTOR_SIZE); +} + +#ifdef USE_AVX +void _stdcall xts_serpent_avx_decrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +#else +void _stdcall xts_serpent_sse2_decrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key) +#endif +{ + __m128i t0, t1, t2, t3; + __m128i b0, b1, b2, b3; + __m128i idx; + int i; + + ((unsigned __int64*)&idx)[0] = offset / XTS_SECTOR_SIZE; + ((unsigned __int64*)&idx)[1] = 0; + do + { + // update tweak unit index + ((unsigned __int64*)&idx)[0]++; + // derive first tweak value + serpent256_encrypt((unsigned char*)&idx, (unsigned char*)&t0, &key->tweak_k.serpent); + + for (i = 0; i < XTS_BLOCKS_IN_SECTOR / 4; i++) + { + // derive t1-t3 + sse2_next_tweak(t1, t0); + sse2_next_tweak(t2, t1); + sse2_next_tweak(t3, t2); + // load and tweak 4 blocks + b0 = _mm_xor_si128(_mm_loadu_si128((const __m128i*)(in + 0 )), t0); + b1 = _mm_xor_si128(_mm_loadu_si128((const __m128i*)(in + 16)), t1); + b2 = _mm_xor_si128(_mm_loadu_si128((const __m128i*)(in + 32)), t2); + b3 = _mm_xor_si128(_mm_loadu_si128((const __m128i*)(in + 48)), t3); + // encrypt / decrypt + serpent256_sse2_decrypt(b0, b1, b2, b3, &key->crypt_k.serpent); + // tweak and store 4 blocks + _mm_storeu_si128((__m128i*)(out + 0 ), _mm_xor_si128(b0, t0)); + _mm_storeu_si128((__m128i*)(out + 16), _mm_xor_si128(b1, t1)); + _mm_storeu_si128((__m128i*)(out + 32), _mm_xor_si128(b2, t2)); + _mm_storeu_si128((__m128i*)(out + 48), _mm_xor_si128(b3, t3)); + // derive next t0 + sse2_next_tweak(t0, t3); + // update pointers + in += XTS_BLOCK_SIZE*4; out += XTS_BLOCK_SIZE*4; + } + } while (len -= XTS_SECTOR_SIZE); +} + +#ifdef USE_AVX + +int _stdcall xts_serpent_avx_available() +{ + int succs = 0; + __asm { + mov eax, 1 + cpuid + and ecx, 0x18000000 // check 27 bit (OS uses XSAVE/XRSTOR) + cmp ecx, 0x18000000 // and 28 (AVX supported by CPU) + jne not_supported + xor ecx, ecx // XFEATURE_ENABLED_MASK/XCR0 register number = 0 + xgetbv // XFEATURE_ENABLED_MASK register is in edx:eax + and eax, 6 + cmp eax, 6 // check the AVX registers restore at context switch + jne not_supported + mov [succs], 1 +not_supported: + } + return succs; +} + +#else +int _stdcall xts_serpent_sse2_available() +{ + int info[4]; __cpuid(info, 1); + return (info[3] & (1 << 26)) != 0; +} +#endif \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/xts_serpent_sse2.h b/ImBoxEnclave/crypto_fast/xts_serpent_sse2.h new file mode 100644 index 0000000..1411b15 --- /dev/null +++ b/ImBoxEnclave/crypto_fast/xts_serpent_sse2.h @@ -0,0 +1,8 @@ +#ifndef _XTS_SERPENT_SSE2_H_ +#define _XTS_SERPENT_SSE2_H_ + +int _stdcall xts_serpent_sse2_available(); +void _stdcall xts_serpent_sse2_encrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key); +void _stdcall xts_serpent_sse2_decrypt(const unsigned char *in, unsigned char *out, size_t len, unsigned __int64 offset, xts_key *key); + +#endif \ No newline at end of file From 20856a8e83f2440cbbd7565067d77d6869cf6725 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=88=B1=E7=BC=96=E7=A8=8B=E7=9A=84=E5=8F=B6=E4=B8=80?= =?UTF-8?q?=E7=AC=91?= <92030377+love-code-yeyixiao@users.noreply.github.com> Date: Sat, 21 Jun 2025 10:37:32 +0800 Subject: [PATCH 3/4] Adjust Structure --- ImBox/CryptoIO.cpp | 30 +- ImBox/CryptoIO.h | 12 +- ImBox/ImBox.cpp | 49 ++ ImBox/ImBox.vcxproj | 12 +- ImBoxEnclave/CryptoIO.cpp | 522 ++++++++++++++++++++ ImBoxEnclave/CryptoIO.h | 50 ++ ImBoxEnclave/EnclaveEntry.cpp | 9 +- ImBoxEnclave/ImBoxEnclave.vcxproj | 4 +- ImBoxEnclave/ImBoxEnclave.vcxproj.filters | 171 ++++--- ImBoxEnclave/crypto_fast/aes_key.c | 1 + ImBoxEnclave/crypto_fast/crc32.c | 1 + ImBoxEnclave/crypto_fast/serpent.c | 1 + ImBoxEnclave/crypto_fast/sha512.c | 1 + ImBoxEnclave/crypto_fast/sha512_hmac.c | 1 + ImBoxEnclave/crypto_fast/sha512_hmac_drbg.c | 1 + ImBoxEnclave/crypto_fast/sha512_pkcs5_2.c | 1 + ImBoxEnclave/crypto_fast/twofish.c | 1 + ImBoxEnclave/crypto_fast/xts_fast.c | 2 +- ImBoxEnclave/crypto_fast/xts_serpent_sse2.c | 1 + ImBoxEnclave/framework.h | 3 +- 20 files changed, 774 insertions(+), 99 deletions(-) create mode 100644 ImBoxEnclave/CryptoIO.cpp create mode 100644 ImBoxEnclave/CryptoIO.h diff --git a/ImBox/CryptoIO.cpp b/ImBox/CryptoIO.cpp index 38e4a36..9e073f2 100644 --- a/ImBox/CryptoIO.cpp +++ b/ImBox/CryptoIO.cpp @@ -74,23 +74,22 @@ CCryptoIO::CCryptoIO(CAbstractIO* pIO, const WCHAR* pKey, const std::wstring& Ci m = new SCryptoIO; m->Cipher = Cipher; m->AllowFormat = false; - +/* #ifdef ENCLAVE_ENABLED if (!IsEnclaveTypeSupported(ENCLAVE_TYPE_VBS)) { OutputDebugString(L"Enclave not supported!\n"); ExitProcess(STATUS_NOT_SUPPORTED); } +#endif // ENCLAVE_ENABLED*/ - -#else if (m->password) { m->password->size = wcslen(pKey) * sizeof(wchar_t); if (m->password->size > MAX_PASSWORD * sizeof(wchar_t)) m->password->size = MAX_PASSWORD * sizeof(wchar_t); memcpy(m->password->pass, pKey, m->password->size); } -#endif // ENCLAVE_ENABLED + m->section = NULL; m_pIO = pIO; @@ -185,7 +184,7 @@ int CCryptoIO::InitCrypto() m_pIO->DiskRead(header.ptr, sizeof(dc_header), 0); int ret = dc_decrypt_header(header.ptr, m->password.ptr) ? ERR_OK : (m->AllowFormat ? ERR_INTERNAL : ERR_WRONG_PASSWORD); - +/* #ifdef ENCLAVE_ENABLED // Create the enclave if (ret == ERR_OK) { @@ -227,10 +226,25 @@ int CCryptoIO::InitCrypto() #endif // ENCLAVE_ENABLED - +*/ if (ret == ERR_OK) { +//#ifndef ENCLAVE_ENABLED xts_set_key(header->key_1, header->alg_1, &m->benc_k); - +//#else + /* + PENCLAVE_ROUTINE EnclaveSetKey = reinterpret_cast(GetProcAddress(reinterpret_cast(Enclave), "EnclaveSetKey")); + KeySetArgs args; + memset(&args,0,sizeof(args)); + args.alg = header->alg_1; + args.key= header->key_1; + args.skey= &m->benc_k; + LPVOID rtn = 0;//We ignore the return value + if (CallEnclave(EnclaveSetKey, &args, 0, &rtn) == FALSE) { + return ERR_INTERNAL; + } + SecureZeroMemory(&args,sizeof(args)); + SecureZeroMemory(rtn, sizeof(rtn)); +#endif*/ if (m->section && header->info_magic == DC_INFO_MAGIC) { m->section->magic = SECTION_MAGIC; m->section->id = SECTION_PARAM_ID_DATA; @@ -255,7 +269,7 @@ int CCryptoIO::Init() m->password.free(); #ifdef ENCLAVE_ENABLED delete& m->benc_k; - delete& m->Cipher; + //delete& m->Cipher; //Clear key in the enternal thread #endif diff --git a/ImBox/CryptoIO.h b/ImBox/CryptoIO.h index ec63086..e4db741 100644 --- a/ImBox/CryptoIO.h +++ b/ImBox/CryptoIO.h @@ -30,12 +30,20 @@ class CCryptoIO : public CAbstractIO protected: virtual int InitCrypto(); virtual int WriteHeader(struct _dc_header* header); -#ifdef ENCLAVE_ENABLED +/*#ifdef ENCLAVE_ENABLED PVOID Enclave; -#endif +#endif*/ struct SCryptoIO* m; public: CAbstractIO* m_pIO; }; +/*#ifdef ENCLAVE_ENABLED +typedef struct KeySetArgs { + const unsigned char* key; + int alg; + xts_key* skey; +} KeySetArgs; +#endif +*/ \ No newline at end of file diff --git a/ImBox/ImBox.cpp b/ImBox/ImBox.cpp index db1171b..3ad2b3b 100644 --- a/ImBox/ImBox.cpp +++ b/ImBox/ImBox.cpp @@ -142,6 +142,55 @@ int APIENTRY wWinMain(_In_ HINSTANCE hInstance, } if (!key.empty() || !section.empty()) { +#ifdef ENCLAVE_ENABLED + if (!IsEnclaveTypeSupported(ENCLAVE_TYPE_VBS)) + { + OutputDebugString(L"Enclave not supported!\n"); + ExitProcess(STATUS_NOT_SUPPORTED); + } +#endif // ENCLAVE_ENABLED +#ifdef ENCLAVE_ENABLED + // Create the enclave + DWORD ret = 0; + if (ret == ERR_OK) { + constexpr ENCLAVE_CREATE_INFO_VBS CreateInfo + { + //ENCLAVE_VBS_FLAG_DEBUG, // Flags + 0, + { 0x10, 0x22, 0x30, 0x45, 0x41, 0x37, 0x21, 0x13 }, // OwnerID + }; + Enclave = CreateEnclave(GetCurrentProcess(), + nullptr, // Preferred base address + 0x10000000, // size + 0, + ENCLAVE_TYPE_VBS, + &CreateInfo, + sizeof(ENCLAVE_CREATE_INFO_VBS), + nullptr); + } + if (Enclave == NULL) { + DbgPrint(L"CreateEnclave failed\n"); + ret = ERR_INTERNAL; + } + if (ret == ERR_OK) + if (LoadEnclaveImageW(Enclave, L"ImBoxEnclave.dll") == FALSE) + ret = ERR_INTERNAL; + if (ret == ERR_OK) { + ENCLAVE_INIT_INFO_VBS InitInfo{}; + + InitInfo.Length = sizeof(ENCLAVE_INIT_INFO_VBS); + InitInfo.ThreadCount = 1; + if (InitializeEnclave(GetCurrentProcess(), + Enclave, + &InitInfo, + InitInfo.Length, + nullptr) == 0) { + ret = ERR_INTERNAL; + } + } + + +#endif CCryptoIO* pCrypto; if (key.empty()) { if (!pSection) diff --git a/ImBox/ImBox.vcxproj b/ImBox/ImBox.vcxproj index b41577d..0793cd1 100644 --- a/ImBox/ImBox.vcxproj +++ b/ImBox/ImBox.vcxproj @@ -37,39 +37,39 @@ Application true - v142 + v143 Unicode Application false - v142 + v143 true Unicode Application true - v142 + v143 Unicode Application true - v142 + v143 Unicode Application false - v142 + v143 true Unicode Application false - v142 + v143 true Unicode diff --git a/ImBoxEnclave/CryptoIO.cpp b/ImBoxEnclave/CryptoIO.cpp new file mode 100644 index 0000000..67385e4 --- /dev/null +++ b/ImBoxEnclave/CryptoIO.cpp @@ -0,0 +1,522 @@ +#include "pch.h" +#include "framework.h" +#include +#include "CryptoIO.h" +#include "..\ImBox\ImBox.h" +#include "..\ImBox\Common\helpers.h" + +extern "C" { +#include "..\ImBox\dc\include\boot\dc_header.h" +#include ".\crypto_fast\crc32.h" +#include ".\crypto_fast\sha512_pkcs5_2.h" +} + +void make_rand(void* ptr, size_t size) +{ + BCryptGenRandom(NULL, (BYTE*)ptr, size, BCRYPT_USE_SYSTEM_PREFERRED_RNG); +} + +template +struct SSecureBuffer +{ + SSecureBuffer() { alloc(sizeof(T)); } + SSecureBuffer(ULONG length) { alloc(length); } + ~SSecureBuffer() { free(); } + + void alloc(ULONG length) + { + // on 32 bit system xts_key must be located in executable memory + // x64 does not require this +#ifdef _M_IX86 + ptr = (T*)VirtualAlloc(NULL, length, MEM_COMMIT + MEM_RESERVE, PAGE_EXECUTE_READWRITE); +#else + ptr = (T*)VirtualAlloc(NULL, length, MEM_COMMIT + MEM_RESERVE, PAGE_READWRITE); +#endif + if(ptr) + VirtualLock(ptr, length); + } + + void free() + { + if (!ptr) + return; + + MEMORY_BASIC_INFORMATION mbi; + if ( (VirtualQuery(ptr, &mbi, sizeof(mbi)) == sizeof(mbi) && mbi.BaseAddress == ptr && mbi.AllocationBase == ptr) ) + { + RtlSecureZeroMemory(ptr, mbi.RegionSize); + VirtualUnlock(ptr, mbi.RegionSize); + } + VirtualFree(ptr, 0, MEM_RELEASE); + + ptr = NULL; + } + + T* operator ->() { return ptr; } + explicit operator bool() { return ptr != NULL; } + + T* ptr; +}; + +struct SCryptoIO +{ + std::wstring Cipher; + bool AllowFormat; + + SSecureBuffer password; + + xts_key benc_k; + + SSection* section; +}; + +CCryptoIO::CCryptoIO(CAbstractIO* pIO, const WCHAR* pKey, const std::wstring& Cipher) +{ + m = new SCryptoIO; + m->Cipher = Cipher; + m->AllowFormat = false; +/* +#ifdef ENCLAVE_ENABLED + if (!IsEnclaveTypeSupported(ENCLAVE_TYPE_VBS)) + { + OutputDebugString(L"Enclave not supported!\n"); + ExitProcess(STATUS_NOT_SUPPORTED); + } +#endif // ENCLAVE_ENABLED*/ + + if (m->password) { + m->password->size = wcslen(pKey) * sizeof(wchar_t); + if (m->password->size > MAX_PASSWORD * sizeof(wchar_t)) + m->password->size = MAX_PASSWORD * sizeof(wchar_t); + memcpy(m->password->pass, pKey, m->password->size); + } + + m->section = NULL; + + m_pIO = pIO; + + xts_init(1); +} + +CCryptoIO::~CCryptoIO() +{ + delete m; +} + +ULONG64 CCryptoIO::GetDiskSize() const +{ + ULONG64 uSize = m_pIO->GetDiskSize(); + if (uSize < DC_AREA_SIZE) + return 0; + return uSize - DC_AREA_SIZE; +} + +bool CCryptoIO::CanBeFormated() const +{ + return m->AllowFormat; +} + +int CCryptoIO::InitCrypto() +{ + if (DC_AREA_SIZE != sizeof dc_header) { + DbgPrint(L"dc_header struct invalid!\n"); + return ERR_INTERNAL; + } + + if (!m->password) + return ERR_KEY_REQUIRED; + + int cipher; + if (m->Cipher.empty() || _wcsicmp(m->Cipher.c_str(), L"AES") == 0 || _wcsicmp(m->Cipher.c_str(), L"Rijndael") == 0) + cipher = CF_AES; + else if (_wcsicmp(m->Cipher.c_str(), L"TWOFISH") == 0) + cipher = CF_TWOFISH; + else if (_wcsicmp(m->Cipher.c_str(), L"SERPENT") == 0) + cipher = CF_SERPENT; + else if (_wcsicmp(m->Cipher.c_str(), L"AES-TWOFISH") == 0) + cipher = CF_AES_TWOFISH; + else if (_wcsicmp(m->Cipher.c_str(), L"TWOFISH-SERPENT") == 0) + cipher = CF_TWOFISH_SERPENT; + else if (_wcsicmp(m->Cipher.c_str(), L"SERPENT-AES") == 0) + cipher = CF_SERPENT_AES; + else if (_wcsicmp(m->Cipher.c_str(), L"AES-TWOFISH-SERPENT") == 0) + cipher = CF_AES_TWOFISH_SERPENT; + else { + DbgPrint(L"Unknown Cipher.\n"); + return ERR_UNKNOWN_CIPHER; + } + + m->AllowFormat = m_pIO->CanBeFormated(); + if (m->AllowFormat) { + + DbgPrint(L"Creating DC header\n"); + + SSecureBuffer header; + if (!header) { + DbgPrint(L"Malloc Failed\n"); + return ERR_MALLOC_ERROR; + } + + // create the volume header + memset((BYTE*)header.ptr, 0, sizeof(dc_header)); + + make_rand(&header->disk_id, sizeof(header->disk_id)); + make_rand(header->key_1, sizeof(header->key_1)); + + header->sign = DC_VOLUME_SIGN; + header->version = DC_HDR_VERSION; + header->flags = VF_NO_REDIR; + header->alg_1 = cipher; + header->data_off = sizeof(dc_header); + header->hdr_crc = crc32((const unsigned char*)&header->version, DC_CRC_AREA_SIZE); + + WriteHeader(header.ptr); + } + + + DbgPrint(L"Trying to decrypt header..."); + + SSecureBuffer header; + if (!header) { + DbgPrint(L"Malloc Failed\n"); + return ERR_MALLOC_ERROR; + } + + m_pIO->DiskRead(header.ptr, sizeof(dc_header), 0); + + int ret = dc_decrypt_header(header.ptr, m->password.ptr) ? ERR_OK : (m->AllowFormat ? ERR_INTERNAL : ERR_WRONG_PASSWORD); +/* +#ifdef ENCLAVE_ENABLED + // Create the enclave + if (ret == ERR_OK) { + constexpr ENCLAVE_CREATE_INFO_VBS CreateInfo + { + //ENCLAVE_VBS_FLAG_DEBUG, // Flags + 0, + { 0x10, 0x22, 0x30, 0x45, 0x41, 0x37, 0x21, 0x13 }, // OwnerID + }; + Enclave = CreateEnclave(GetCurrentProcess(), + nullptr, // Preferred base address + 0x10000000, // size + 0, + ENCLAVE_TYPE_VBS, + &CreateInfo, + sizeof(ENCLAVE_CREATE_INFO_VBS), + nullptr); + } + if (Enclave == NULL) { + DbgPrint(L"CreateEnclave failed\n"); + ret = ERR_INTERNAL; + } + if (ret == ERR_OK) + if (LoadEnclaveImageW(Enclave, L"ImBoxEnclave.dll") == FALSE) + ret = ERR_INTERNAL; + if (ret == ERR_OK) { + ENCLAVE_INIT_INFO_VBS InitInfo{}; + + InitInfo.Length = sizeof(ENCLAVE_INIT_INFO_VBS); + InitInfo.ThreadCount = 1; + if (InitializeEnclave(GetCurrentProcess(), + Enclave, + &InitInfo, + InitInfo.Length, + nullptr) == 0) { + ret= ERR_INTERNAL; + } + } + + +#endif // ENCLAVE_ENABLED +*/ + if (ret == ERR_OK) { +//#ifndef ENCLAVE_ENABLED + xts_set_key(header->key_1, header->alg_1, &m->benc_k); +//#else + /* + PENCLAVE_ROUTINE EnclaveSetKey = reinterpret_cast(GetProcAddress(reinterpret_cast(Enclave), "EnclaveSetKey")); + KeySetArgs args; + memset(&args,0,sizeof(args)); + args.alg = header->alg_1; + args.key= header->key_1; + args.skey= &m->benc_k; + LPVOID rtn = 0;//We ignore the return value + if (CallEnclave(EnclaveSetKey, &args, 0, &rtn) == FALSE) { + return ERR_INTERNAL; + } + SecureZeroMemory(&args,sizeof(args)); + SecureZeroMemory(rtn, sizeof(rtn)); +#endif*/ + if (m->section && header->info_magic == DC_INFO_MAGIC) { + m->section->magic = SECTION_MAGIC; + m->section->id = SECTION_PARAM_ID_DATA; + m->section->size = header->info_size; + memcpy(m->section->data, header->info_data, header->info_size); + } + + DbgPrint(L" SUCCESS.\n"); + } + else + DbgPrint(L" FAILED.\n"); + return ret; +} + +int CCryptoIO::Init() +{ + int ret = m_pIO ? m_pIO->Init() : ERR_UNKNOWN_TYPE; + + if (ret == ERR_OK) + ret = InitCrypto(); + + m->password.free(); +#ifdef ENCLAVE_ENABLED + delete& m->benc_k; + //delete& m->Cipher; + //Clear key in the enternal thread +#endif + + + return ret; +} + +int CCryptoIO::WriteHeader(struct _dc_header* header) +{ + SSecureBuffer header_key; + UCHAR salt[PKCS5_SALT_SIZE]; + SSecureBuffer dk(DISKKEY_SIZE); + + // allocate required memory + if (!header_key || !dk) + { + DbgPrint(L"Malloc Failed\n"); + return ERR_MALLOC_ERROR; + } + + make_rand(salt, PKCS5_SALT_SIZE); + + // derive the header key + sha512_pkcs5_2(1000, m->password->pass, m->password->size, salt, PKCS5_SALT_SIZE, dk.ptr, PKCS_DERIVE_MAX); + + // initialize encryption keys + xts_set_key(dk.ptr, header->alg_1, header_key.ptr); + + // encrypt the volume header + xts_encrypt((const unsigned char*)header, (unsigned char*)header, sizeof(dc_header), 0, header_key.ptr); + + // save salt + memcpy(header->salt, salt, PKCS5_SALT_SIZE); + + // write volume header to output file + m_pIO->DiskWrite(header, sizeof(dc_header), 0); + + return ERR_OK; +} + +int CCryptoIO::ChangePassword(const WCHAR* pNewKey) +{ + int ret = m_pIO ? m_pIO->Init() : ERR_UNKNOWN_TYPE; + + if (ret != ERR_OK) + return ret; + + SSecureBuffer header; + if (!header) { + DbgPrint(L"Malloc Failed\n"); + return ERR_MALLOC_ERROR; + } + + m_pIO->DiskRead(header.ptr, sizeof(dc_header), 0); + + ret = dc_decrypt_header(header.ptr, m->password.ptr) ? ERR_OK : ERR_WRONG_PASSWORD; + + if (ret != ERR_OK) + return ret; + + if (m->password) { + m->password->size = wcslen(pNewKey) * sizeof(wchar_t); + if (m->password->size > MAX_PASSWORD * sizeof(wchar_t)) + m->password->size = MAX_PASSWORD * sizeof(wchar_t); + memcpy(m->password->pass, pNewKey, m->password->size); + } + + ret = WriteHeader(header.ptr); + + return ret; +} + +bool CCryptoIO::DiskWrite(void* buf, int size, __int64 offset) +{ +#ifdef _DEBUG + if ((offset & 0x1FF) || (size & 0x1FF)) + DbgPrint(L"DiskWrite not full sector\n"); +#endif + + xts_encrypt((BYTE*)buf, (BYTE*)buf, size, offset, &m->benc_k); + + bool ret = m_pIO->DiskWrite(buf, size, offset + DC_AREA_SIZE); + + //xts_decrypt((BYTE*)buf, (BYTE*)buf, size, offset, &m->benc_k); // restore buffer - not needed + + return ret; +} + +bool CCryptoIO::DiskRead(void* buf, int size, __int64 offset) +{ +#ifdef _DEBUG + if ((offset & 0x1FF) || (size & 0x1FF)) + DbgPrint(L"DiskRead not full sector\n"); +#endif + + bool ret = m_pIO->DiskRead(buf, size, offset + DC_AREA_SIZE); + + if (ret) + xts_decrypt((BYTE*)buf, (BYTE*)buf, size, offset, &m->benc_k); + + return ret; +} + +void CCryptoIO::TrimProcess(DEVICE_DATA_SET_RANGE* range, int n) +{ + for (DEVICE_DATA_SET_RANGE* range2 = range; range2 < range + n; range2++) { + range2->StartingOffset += DC_AREA_SIZE; +#ifdef _DEBUG + if (range2->StartingOffset & 0x1FF || range2->LengthInBytes & 0x1FF) + DbgPrint(L"TrimProcess not full sector\n"); +#endif + } + + m_pIO->TrimProcess(range, n); +} + +int CCryptoIO::BackupHeader(CAbstractIO* pIO, const std::wstring& Path) +{ + int ret = pIO->Init(); + + if (ret != ERR_OK) + return ret; + + SSecureBuffer header; + if (!header) { + DbgPrint(L"Malloc Failed\n"); + return ERR_MALLOC_ERROR; + } + + if (!pIO->DiskRead(header.ptr, sizeof(dc_header), 0)) + ret = ERR_FILE_NOT_OPENED; + + if (ret != ERR_OK) + return ret; + + HANDLE hFile = CreateFile(Path.c_str(), GENERIC_READ | GENERIC_WRITE, FILE_SHARE_READ, NULL, OPEN_ALWAYS, FILE_FLAG_NO_BUFFERING | FILE_FLAG_WRITE_THROUGH, NULL); + if (hFile != INVALID_HANDLE_VALUE) { + DWORD BytesWritten; + if (!WriteFile(hFile, header.ptr, sizeof(dc_header), &BytesWritten, NULL)) { + ret = ERR_FILE_NOT_OPENED; + } + CloseHandle(hFile); + } else + ret = ERR_FILE_NOT_OPENED; + + return ret; +} + +int CCryptoIO::RestoreHeader(CAbstractIO* pIO, const std::wstring& Path) +{ + int ret = pIO->Init(); + + if (ret != ERR_OK) + return ret; + + SSecureBuffer header; + if (!header) { + DbgPrint(L"Malloc Failed\n"); + return ERR_MALLOC_ERROR; + } + + HANDLE hFile = CreateFile(Path.c_str(), GENERIC_READ | GENERIC_WRITE, FILE_SHARE_READ, NULL, OPEN_ALWAYS, FILE_FLAG_NO_BUFFERING | FILE_FLAG_WRITE_THROUGH, NULL); + if (hFile != INVALID_HANDLE_VALUE) { + DWORD BytesRead; + if (!ReadFile(hFile, header.ptr, sizeof(dc_header), &BytesRead, NULL)) { + ret = ERR_FILE_NOT_OPENED; + } + CloseHandle(hFile); + } else + ret = ERR_FILE_NOT_OPENED; + + if (ret != ERR_OK) + return ret; + + if (!pIO->DiskWrite(header.ptr, sizeof(dc_header), 0)) + ret = ERR_FILE_NOT_OPENED; + + return ret; +} + +void CCryptoIO::SetDataSection(SSection* pSection) +{ + m->section = pSection; +} + +int CCryptoIO::SetData(const UCHAR* pData, SIZE_T uSize) +{ + int ret = m_pIO ? m_pIO->Init() : ERR_UNKNOWN_TYPE; + + if (ret != ERR_OK) + return ret; + + SSecureBuffer header; + if (!header) { + DbgPrint(L"Malloc Failed\n"); + return ERR_MALLOC_ERROR; + } + + m_pIO->DiskRead(header.ptr, sizeof(dc_header), 0); + + ret = dc_decrypt_header(header.ptr, m->password.ptr) ? ERR_OK : ERR_WRONG_PASSWORD; + + if (ret != ERR_OK) + return ret; + + if(uSize> sizeof(header.ptr->info_data)) + return ERR_DATA_TO_LONG; + + header.ptr->info_magic = DC_INFO_MAGIC; + header.ptr->info_reserved = 0; + header.ptr->info_size = uSize; + memcpy(header.ptr->info_data, pData, uSize); + + ret = WriteHeader(header.ptr); + + return ret; +} + +int CCryptoIO::GetData(UCHAR* pData, SIZE_T* pSize) +{ + int ret = m_pIO ? m_pIO->Init() : ERR_UNKNOWN_TYPE; + + if (ret != ERR_OK) + return ret; + + SSecureBuffer header; + if (!header) { + DbgPrint(L"Malloc Failed\n"); + return ERR_MALLOC_ERROR; + } + + m_pIO->DiskRead(header.ptr, sizeof(dc_header), 0); + + ret = dc_decrypt_header(header.ptr, m->password.ptr) ? ERR_OK : ERR_WRONG_PASSWORD; + + if (ret != ERR_OK) + return ret; + + if (header.ptr->info_magic != DC_INFO_MAGIC) + return ERR_DATA_NOT_FOUND; + + //if (*pSize < header.ptr->info_size) + // return ERR_BUFFER_TO_SMALL; + + *pSize = header.ptr->info_size; + memcpy(pData, header.ptr->info_data, *pSize); + + return ERR_OK; +} \ No newline at end of file diff --git a/ImBoxEnclave/CryptoIO.h b/ImBoxEnclave/CryptoIO.h new file mode 100644 index 0000000..a83d345 --- /dev/null +++ b/ImBoxEnclave/CryptoIO.h @@ -0,0 +1,50 @@ +#pragma once +#include "..\ImBox\AbstractIO.h" +#include + +class CCryptoIO : public CAbstractIO +{ +public: + CCryptoIO(CAbstractIO* pIO, const WCHAR* pKey, const std::wstring& Cipher = std::wstring()); + virtual ~CCryptoIO(); + + virtual ULONG64 GetAllocSize() const { return m_pIO->GetAllocSize(); } + virtual ULONG64 GetDiskSize() const; + virtual bool CanBeFormated() const; + + virtual int Init(); + virtual void PrepViewOfFile(BYTE* p) { m_pIO->PrepViewOfFile(p); } + virtual int ChangePassword(const WCHAR* pNewKey); + + virtual bool DiskWrite(void* buf, int size, __int64 offset); + virtual bool DiskRead(void* buf, int size, __int64 offset); + virtual void TrimProcess(DEVICE_DATA_SET_RANGE* range, int n); + + static int BackupHeader(CAbstractIO* pIO, const std::wstring& Path); + static int RestoreHeader(CAbstractIO* pIO, const std::wstring& Path); + + virtual void SetDataSection(struct SSection* pSection); + + virtual int SetData(const UCHAR* pData, SIZE_T uSize); + virtual int GetData(UCHAR* pData, SIZE_T* pSize); + +protected: + virtual int InitCrypto(); + virtual int WriteHeader(struct _dc_header* header); +/*#ifdef ENCLAVE_ENABLED + PVOID Enclave; +#endif*/ + struct SCryptoIO* m; + +public: + CAbstractIO* m_pIO; +}; +/*#ifdef ENCLAVE_ENABLED +typedef struct KeySetArgs { + const unsigned char* key; + int alg; + xts_key* skey; +} KeySetArgs; +#endif + +*/ \ No newline at end of file diff --git a/ImBoxEnclave/EnclaveEntry.cpp b/ImBoxEnclave/EnclaveEntry.cpp index bfeda4d..605bc93 100644 --- a/ImBoxEnclave/EnclaveEntry.cpp +++ b/ImBoxEnclave/EnclaveEntry.cpp @@ -85,11 +85,16 @@ EnclaveSetKey( _In_ void* Context ) { + assert(&Context); + KeySetArgs * args = static_cast(Context); + assert((VOID**)args->key); + assert((VOID**)args->skey); WCHAR String[32]; - swprintf_s(String, ARRAYSIZE(String), L"%s\n", L"CallEnclaveTest started"); + swprintf_s(String, ARRAYSIZE(String), L"%s\n", L"Enclave key set"); OutputDebugStringW(String); - return (void*)((ULONG_PTR)(Context) ^ InitialCookie); + xts_set_key(args->key, args->alg, args->skey); + return NULL; } BOOL APIENTRY DllMain( HMODULE hModule, diff --git a/ImBoxEnclave/ImBoxEnclave.vcxproj b/ImBoxEnclave/ImBoxEnclave.vcxproj index 74bb52b..6d87ecf 100644 --- a/ImBoxEnclave/ImBoxEnclave.vcxproj +++ b/ImBoxEnclave/ImBoxEnclave.vcxproj @@ -110,7 +110,7 @@ true _DEBUG;IMBOXENCLAVE_EXPORTS;_WINDOWS;_USRDLL;%(PreprocessorDefinitions) true - Use + NotUsing pch.h @@ -139,6 +139,7 @@ + @@ -158,6 +159,7 @@ + diff --git a/ImBoxEnclave/ImBoxEnclave.vcxproj.filters b/ImBoxEnclave/ImBoxEnclave.vcxproj.filters index b6304f5..d008373 100644 --- a/ImBoxEnclave/ImBoxEnclave.vcxproj.filters +++ b/ImBoxEnclave/ImBoxEnclave.vcxproj.filters @@ -13,15 +13,6 @@ {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms - - {4ce92d95-faa8-4f3a-861e-f7f0c9d49d18} - - - {479c6f66-f1af-4c42-88fd-b6de07ed71b1} - - - {332d4ec4-6fb7-426e-9f7d-ede4c0a906bd} - @@ -30,47 +21,53 @@ 头文件 - - crypto_fast + + 头文件 - - crypto_fast + + 头文件 - - crypto_fast + + 头文件 - - crypto_fast + + 头文件 - - crypto_fast + + 头文件 - - crypto_fast + + 头文件 - - crypto_fast + + 头文件 - - crypto_fast + + 头文件 - - crypto_fast + + 头文件 - - crypto_fast + + 头文件 - - crypto_fast + + 头文件 - - crypto_fast + + 头文件 - - crypto_fast + + 头文件 - - crypto_fast + + 头文件 + + + 头文件 + + + 头文件 @@ -80,58 +77,76 @@ 源文件 - - crypto_fast + + 源文件 - - crypto_fast + + 源文件 - - crypto_fast + + 源文件 - - crypto_fast + + 源文件 - - crypto_fast + + 源文件 - - crypto_fast + + 源文件 - - crypto_fast + + 源文件 - - crypto_fast + + 源文件 - - crypto_fast + + 源文件 - - crypto_fast + + 源文件 + + + 源文件 - - crypto_fast\amd64 - - - crypto_fast\amd64 - - - crypto_fast\amd64 - - - crypto_fast\amd64 - - - crypto_fast\amd64 - - - crypto_fast\amd64 - - - crypto_fast\i386 - + + 源文件 + + + 源文件 + + + 源文件 + + + 源文件 + + + 源文件 + + + 源文件 + + + 源文件 + + + 源文件 + + + 源文件 + + + 源文件 + + + 源文件 + + + 源文件 + \ No newline at end of file diff --git a/ImBoxEnclave/crypto_fast/aes_key.c b/ImBoxEnclave/crypto_fast/aes_key.c index 9cd709a..fba8017 100644 --- a/ImBoxEnclave/crypto_fast/aes_key.c +++ b/ImBoxEnclave/crypto_fast/aes_key.c @@ -21,6 +21,7 @@ You should have received a copy of the GNU General Public License along with this program. If not, see . */ +#include "..\pch.h" #include #include #include "aes_key.h" diff --git a/ImBoxEnclave/crypto_fast/crc32.c b/ImBoxEnclave/crypto_fast/crc32.c index 2d7a0f8..f53ece9 100644 --- a/ImBoxEnclave/crypto_fast/crc32.c +++ b/ImBoxEnclave/crypto_fast/crc32.c @@ -17,6 +17,7 @@ You should have received a copy of the GNU General Public License along with this program. If not, see . */ +#include "..\pch.h" #include "crc32.h" static const unsigned long crc32_tab[] = { diff --git a/ImBoxEnclave/crypto_fast/serpent.c b/ImBoxEnclave/crypto_fast/serpent.c index 0b120f8..fe4667e 100644 --- a/ImBoxEnclave/crypto_fast/serpent.c +++ b/ImBoxEnclave/crypto_fast/serpent.c @@ -15,6 +15,7 @@ * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. */ +#include "..\pch.h" #include #include #include "serpent.h" diff --git a/ImBoxEnclave/crypto_fast/sha512.c b/ImBoxEnclave/crypto_fast/sha512.c index 13c2c14..459f152 100644 --- a/ImBoxEnclave/crypto_fast/sha512.c +++ b/ImBoxEnclave/crypto_fast/sha512.c @@ -9,6 +9,7 @@ * Tom St Denis, tomstdenis@gmail.com, http://libtomcrypt.com * modified by ntldr, http://diskcryptor.net/ */ +#include "..\pch.h" #include #include #include "sha512.h" diff --git a/ImBoxEnclave/crypto_fast/sha512_hmac.c b/ImBoxEnclave/crypto_fast/sha512_hmac.c index 32ef79f..b6cbfc3 100644 --- a/ImBoxEnclave/crypto_fast/sha512_hmac.c +++ b/ImBoxEnclave/crypto_fast/sha512_hmac.c @@ -16,6 +16,7 @@ You should have received a copy of the GNU General Public License along with this program. If not, see . */ +#include "..\pch.h" #include #include "sha512_hmac.h" diff --git a/ImBoxEnclave/crypto_fast/sha512_hmac_drbg.c b/ImBoxEnclave/crypto_fast/sha512_hmac_drbg.c index 2dff481..5071400 100644 --- a/ImBoxEnclave/crypto_fast/sha512_hmac_drbg.c +++ b/ImBoxEnclave/crypto_fast/sha512_hmac_drbg.c @@ -16,6 +16,7 @@ You should have received a copy of the GNU General Public License along with this program. If not, see . */ +#include "..\pch.h" #include #include "sha512_hmac_drbg.h" diff --git a/ImBoxEnclave/crypto_fast/sha512_pkcs5_2.c b/ImBoxEnclave/crypto_fast/sha512_pkcs5_2.c index 66b043c..e91f2ba 100644 --- a/ImBoxEnclave/crypto_fast/sha512_pkcs5_2.c +++ b/ImBoxEnclave/crypto_fast/sha512_pkcs5_2.c @@ -16,6 +16,7 @@ You should have received a copy of the GNU General Public License along with this program. If not, see . */ +#include "..\pch.h" #include #include #include "sha512_hmac.h" diff --git a/ImBoxEnclave/crypto_fast/twofish.c b/ImBoxEnclave/crypto_fast/twofish.c index bb68f79..cd7f5fe 100644 --- a/ImBoxEnclave/crypto_fast/twofish.c +++ b/ImBoxEnclave/crypto_fast/twofish.c @@ -35,6 +35,7 @@ * Abstract Algebra_ by Joseph A. Gallian, especially chapter 22 in the * Third Edition. */ +#include "..\pch.h" #include #include "twofish.h" diff --git a/ImBoxEnclave/crypto_fast/xts_fast.c b/ImBoxEnclave/crypto_fast/xts_fast.c index c466e66..7ec9f5a 100644 --- a/ImBoxEnclave/crypto_fast/xts_fast.c +++ b/ImBoxEnclave/crypto_fast/xts_fast.c @@ -16,7 +16,7 @@ You should have received a copy of the GNU General Public License along with this program. If not, see . */ -#define ENCLAVE_ENABLED +#include "..\pch.h" #include #include //#include diff --git a/ImBoxEnclave/crypto_fast/xts_serpent_sse2.c b/ImBoxEnclave/crypto_fast/xts_serpent_sse2.c index a4eb02b..c682352 100644 --- a/ImBoxEnclave/crypto_fast/xts_serpent_sse2.c +++ b/ImBoxEnclave/crypto_fast/xts_serpent_sse2.c @@ -16,6 +16,7 @@ You should have received a copy of the GNU General Public License along with this program. If not, see . */ +#include "..\pch.h" #if defined(USE_AVX) && !defined(__INTEL_COMPILER) #error Please use Intel C++ Compoler #endif diff --git a/ImBoxEnclave/framework.h b/ImBoxEnclave/framework.h index 039da1e..fba3798 100644 --- a/ImBoxEnclave/framework.h +++ b/ImBoxEnclave/framework.h @@ -2,4 +2,5 @@ // Windows 头文件 #include -#include \ No newline at end of file +#include +#define ENCLAVE_ENABLED From 9bdf92fb70759cd1d794fe2d5e21308d35e3a08f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=88=B1=E7=BC=96=E7=A8=8B=E7=9A=84=E5=8F=B6=E4=B8=80?= =?UTF-8?q?=E7=AC=91?= <92030377+love-code-yeyixiao@users.noreply.github.com> Date: Sat, 21 Jun 2025 10:58:09 +0800 Subject: [PATCH 4/4] Add BCrypt Link --- ImBoxEnclave/CryptoIO.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ImBoxEnclave/CryptoIO.cpp b/ImBoxEnclave/CryptoIO.cpp index 67385e4..9c9bddb 100644 --- a/ImBoxEnclave/CryptoIO.cpp +++ b/ImBoxEnclave/CryptoIO.cpp @@ -5,6 +5,8 @@ #include "..\ImBox\ImBox.h" #include "..\ImBox\Common\helpers.h" +#pragma comment(lib,"Bcrypt.lib") + extern "C" { #include "..\ImBox\dc\include\boot\dc_header.h" #include ".\crypto_fast\crc32.h"