clang 19.0.0git
mm_malloc.h
Go to the documentation of this file.
1/*===---- mm_malloc.h - Allocating and Freeing Aligned Memory Blocks -------===
2 *
3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 * See https://llvm.org/LICENSE.txt for license information.
5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 *
7 *===-----------------------------------------------------------------------===
8 */
9
10#ifndef __MM_MALLOC_H
11#define __MM_MALLOC_H
12
13#include <stdlib.h>
14
15#ifdef _WIN32
16#include <malloc.h>
17#else
18#ifndef __cplusplus
19extern int posix_memalign(void **__memptr, size_t __alignment, size_t __size);
20#else
21// Some systems (e.g. those with GNU libc) declare posix_memalign with an
22// exception specifier. Via an "egregious workaround" in
23// Sema::CheckEquivalentExceptionSpec, Clang accepts the following as a valid
24// redeclaration of glibc's declaration.
25extern "C" int posix_memalign(void **__memptr, size_t __alignment, size_t __size);
26#endif
27#endif
28
29#if !(defined(_WIN32) && defined(_mm_malloc))
30static __inline__ void *__attribute__((__always_inline__, __nodebug__,
31 __malloc__, __alloc_size__(1),
32 __alloc_align__(2)))
33_mm_malloc(size_t __size, size_t __align) {
34 if (__align == 1) {
35 return malloc(__size);
36 }
37
38 if (!(__align & (__align - 1)) && __align < sizeof(void *))
39 __align = sizeof(void *);
40
42#if defined(__MINGW32__)
43 __mallocedMemory = __mingw_aligned_malloc(__size, __align);
44#elif defined(_WIN32)
45 __mallocedMemory = _aligned_malloc(__size, __align);
46#else
48 return 0;
49#endif
50
51 return __mallocedMemory;
52}
53
54static __inline__ void __attribute__((__always_inline__, __nodebug__))
55_mm_free(void *__p)
56{
57#if defined(__MINGW32__)
58 __mingw_aligned_free(__p);
59#elif defined(_WIN32)
60 _aligned_free(__p);
61#else
62 free(__p);
63#endif
64}
65#endif
66
67#endif /* __MM_MALLOC_H */
_Float16 __2f16 __attribute__((ext_vector_type(2)))
Zeroes the upper 128 bits (bits 255:128) of all YMM registers.
static __inline__ uint32_t volatile uint32_t * __p
Definition: arm_acle.h:80
int posix_memalign(void **__memptr, size_t __alignment, size_t __size)
void * __mallocedMemory
Definition: mm_malloc.h:41
static __inline__ void size_t __align
Definition: mm_malloc.h:33