mirror of
https://github.com/xmrig/xmrig.git
synced 2024-11-18 18:11:05 +00:00
65 lines
2.1 KiB
C
65 lines
2.1 KiB
C
/* XMRig
|
|
* Copyright 2010 Jeff Garzik <jgarzik@pobox.com>
|
|
* Copyright 2012-2014 pooler <pooler@litecoinpool.org>
|
|
* Copyright 2014 Lucas Jones <https://github.com/lucasjones>
|
|
* Copyright 2014-2016 Wolf9466 <https://github.com/OhGodAPet>
|
|
* Copyright 2016 Jay D Dee <jayddee246@gmail.com>
|
|
* Copyright 2016-2017 XMRig <support@xmrig.com>
|
|
*
|
|
*
|
|
* This program is free software: you can redistribute it and/or modify
|
|
* it under the terms of the GNU General Public License as published by
|
|
* the Free Software Foundation, either version 3 of the License, or
|
|
* (at your option) any later version.
|
|
*
|
|
* This program is distributed in the hope that it will be useful,
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
* GNU General Public License for more details.
|
|
*
|
|
* You should have received a copy of the GNU General Public License
|
|
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
*/
|
|
|
|
#ifndef __ALIGNED_MALLOC_H__
|
|
#define __ALIGNED_MALLOC_H__
|
|
|
|
|
|
#include <stdlib.h>
|
|
|
|
|
|
#ifndef __cplusplus
|
|
extern int posix_memalign(void **__memptr, size_t __alignment, size_t __size);
|
|
#else
|
|
// Some systems (e.g. those with GNU libc) declare posix_memalign with an
|
|
// exception specifier. Via an "egregious workaround" in
|
|
// Sema::CheckEquivalentExceptionSpec, Clang accepts the following as a valid
|
|
// redeclaration of glibc's declaration.
|
|
extern "C" int posix_memalign(void **__memptr, size_t __alignment, size_t __size);
|
|
#endif
|
|
|
|
|
|
static __inline__ void *__attribute__((__always_inline__, __malloc__)) _mm_malloc(size_t __size, size_t __align)
|
|
{
|
|
if (__align == 1) {
|
|
return malloc(__size);
|
|
}
|
|
|
|
if (!(__align & (__align - 1)) && __align < sizeof(void *))
|
|
__align = sizeof(void *);
|
|
|
|
void *__mallocedMemory;
|
|
if (posix_memalign(&__mallocedMemory, __align, __size)) {
|
|
return 0;
|
|
}
|
|
|
|
return __mallocedMemory;
|
|
}
|
|
|
|
|
|
static __inline__ void __attribute__((__always_inline__)) _mm_free(void *__p)
|
|
{
|
|
free(__p);
|
|
}
|
|
|
|
#endif /* __ALIGNED_MALLOC_H__ */
|