2002-06-11 13:41:01 +00:00
|
|
|
/*
|
2006-07-19 07:28:58 +00:00
|
|
|
* default memory allocator for libavutil
|
2009-01-19 15:46:40 +00:00
|
|
|
* Copyright (c) 2002 Fabrice Bellard
|
2002-06-11 13:41:01 +00:00
|
|
|
*
|
2006-10-07 15:30:46 +00:00
|
|
|
* This file is part of FFmpeg.
|
|
|
|
*
|
|
|
|
* FFmpeg is free software; you can redistribute it and/or
|
2002-06-11 13:41:01 +00:00
|
|
|
* modify it under the terms of the GNU Lesser General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
2006-10-07 15:30:46 +00:00
|
|
|
* version 2.1 of the License, or (at your option) any later version.
|
2002-06-11 13:41:01 +00:00
|
|
|
*
|
2006-10-07 15:30:46 +00:00
|
|
|
* FFmpeg is distributed in the hope that it will be useful,
|
2002-06-11 13:41:01 +00:00
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Lesser General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Lesser General Public
|
2006-10-07 15:30:46 +00:00
|
|
|
* License along with FFmpeg; if not, write to the Free Software
|
2006-01-12 22:43:26 +00:00
|
|
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
2002-06-11 13:41:01 +00:00
|
|
|
*/
|
2005-12-17 18:14:38 +00:00
|
|
|
|
2003-03-06 11:32:04 +00:00
|
|
|
/**
|
2010-04-20 14:45:34 +00:00
|
|
|
* @file
|
2009-01-28 00:16:05 +00:00
|
|
|
* default memory allocator for libavutil
|
2003-03-06 11:32:04 +00:00
|
|
|
*/
|
2005-12-17 18:14:38 +00:00
|
|
|
|
2011-05-12 13:11:27 +00:00
|
|
|
#define _XOPEN_SOURCE 600
|
|
|
|
|
2009-01-24 14:55:30 +00:00
|
|
|
#include "config.h"
|
2003-01-23 23:03:09 +00:00
|
|
|
|
2009-01-24 14:55:30 +00:00
|
|
|
#include <limits.h>
|
2012-10-18 17:27:51 +00:00
|
|
|
#include <stdint.h>
|
2009-01-07 23:36:34 +00:00
|
|
|
#include <stdlib.h>
|
2024-04-25 09:18:18 +00:00
|
|
|
#include <stdatomic.h>
|
2009-01-24 14:55:30 +00:00
|
|
|
#include <string.h>
|
2009-01-13 23:44:16 +00:00
|
|
|
#if HAVE_MALLOC_H
|
2002-06-11 13:41:01 +00:00
|
|
|
#include <malloc.h>
|
|
|
|
#endif
|
|
|
|
|
2024-04-25 09:18:18 +00:00
|
|
|
#include "attributes.h"
|
2024-02-05 11:18:57 +00:00
|
|
|
#include "avassert.h"
|
2014-03-08 20:27:00 +00:00
|
|
|
#include "dynarray.h"
|
2024-04-25 09:18:18 +00:00
|
|
|
#include "error.h"
|
|
|
|
#include "internal.h"
|
2012-10-18 17:27:51 +00:00
|
|
|
#include "intreadwrite.h"
|
2024-04-25 09:18:18 +00:00
|
|
|
#include "macros.h"
|
2009-01-25 22:40:43 +00:00
|
|
|
#include "mem.h"
|
|
|
|
|
2010-01-28 13:06:31 +00:00
|
|
|
#ifdef MALLOC_PREFIX
|
|
|
|
|
|
|
|
#define malloc AV_JOIN(MALLOC_PREFIX, malloc)
|
|
|
|
#define memalign AV_JOIN(MALLOC_PREFIX, memalign)
|
|
|
|
#define posix_memalign AV_JOIN(MALLOC_PREFIX, posix_memalign)
|
|
|
|
#define realloc AV_JOIN(MALLOC_PREFIX, realloc)
|
|
|
|
#define free AV_JOIN(MALLOC_PREFIX, free)
|
|
|
|
|
|
|
|
void *malloc(size_t size);
|
|
|
|
void *memalign(size_t align, size_t size);
|
|
|
|
int posix_memalign(void **ptr, size_t align, size_t size);
|
|
|
|
void *realloc(void *ptr, size_t size);
|
|
|
|
void free(void *ptr);
|
|
|
|
|
|
|
|
#endif /* MALLOC_PREFIX */
|
|
|
|
|
2017-10-26 17:51:37 +00:00
|
|
|
#define ALIGN (HAVE_AVX512 ? 64 : (HAVE_AVX ? 32 : 16))
|
2011-05-10 01:15:42 +00:00
|
|
|
|
2012-04-15 20:35:19 +00:00
|
|
|
/* NOTE: if you want to override these functions with your own
|
|
|
|
* implementations (not recommended) you have to link libav* as
|
|
|
|
* dynamic libraries and remove -Wl,-Bsymbolic from the linker flags.
|
|
|
|
* Note that this will cost performance. */
|
2002-06-11 13:41:01 +00:00
|
|
|
|
2024-04-25 09:18:18 +00:00
|
|
|
static atomic_size_t max_alloc_size = ATOMIC_VAR_INIT(INT_MAX);
|
2011-12-25 17:43:58 +00:00
|
|
|
|
|
|
|
void av_max_alloc(size_t max){
|
2024-04-25 09:18:18 +00:00
|
|
|
atomic_store_explicit(&max_alloc_size, max, memory_order_relaxed);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int size_mult(size_t a, size_t b, size_t *r)
|
|
|
|
{
|
|
|
|
size_t t;
|
|
|
|
|
|
|
|
#if (!defined(__INTEL_COMPILER) && AV_GCC_VERSION_AT_LEAST(5,1)) || AV_HAS_BUILTIN(__builtin_mul_overflow)
|
|
|
|
if (__builtin_mul_overflow(a, b, &t))
|
|
|
|
return AVERROR(EINVAL);
|
|
|
|
#else
|
|
|
|
t = a * b;
|
|
|
|
/* Hack inspired from glibc: don't try the division if nelem and elsize
|
|
|
|
* are both less than sqrt(SIZE_MAX). */
|
|
|
|
if ((a | b) >= ((size_t)1 << (sizeof(size_t) * 4)) && a && t / a != b)
|
|
|
|
return AVERROR(EINVAL);
|
|
|
|
#endif
|
|
|
|
*r = t;
|
|
|
|
return 0;
|
2011-12-25 17:43:58 +00:00
|
|
|
}
|
2011-05-18 21:59:38 +00:00
|
|
|
|
2011-04-12 19:17:26 +00:00
|
|
|
void *av_malloc(size_t size)
|
2002-06-11 13:41:01 +00:00
|
|
|
{
|
2009-01-07 23:36:34 +00:00
|
|
|
void *ptr = NULL;
|
2005-01-12 00:16:25 +00:00
|
|
|
|
2024-04-25 09:18:18 +00:00
|
|
|
if (size > atomic_load_explicit(&max_alloc_size, memory_order_relaxed))
|
2005-01-12 00:16:25 +00:00
|
|
|
return NULL;
|
2005-12-17 18:14:38 +00:00
|
|
|
|
2016-03-24 19:55:19 +00:00
|
|
|
#if HAVE_POSIX_MEMALIGN
|
2011-12-11 00:56:48 +00:00
|
|
|
if (size) //OS X on SDK 10.6 has a broken posix_memalign implementation
|
2012-10-20 11:18:59 +00:00
|
|
|
if (posix_memalign(&ptr, ALIGN, size))
|
2009-02-21 20:38:27 +00:00
|
|
|
ptr = NULL;
|
2012-06-18 13:37:02 +00:00
|
|
|
#elif HAVE_ALIGNED_MALLOC
|
2012-06-19 18:52:00 +00:00
|
|
|
ptr = _aligned_malloc(size, ALIGN);
|
2009-01-13 23:44:16 +00:00
|
|
|
#elif HAVE_MEMALIGN
|
2013-03-10 09:22:45 +00:00
|
|
|
#ifndef __DJGPP__
|
2012-10-20 11:18:59 +00:00
|
|
|
ptr = memalign(ALIGN, size);
|
2013-03-10 09:22:45 +00:00
|
|
|
#else
|
|
|
|
ptr = memalign(size, ALIGN);
|
|
|
|
#endif
|
2005-12-17 18:14:38 +00:00
|
|
|
/* Why 64?
|
2012-10-18 17:16:37 +00:00
|
|
|
* Indeed, we should align it:
|
|
|
|
* on 4 for 386
|
|
|
|
* on 16 for 486
|
|
|
|
* on 32 for 586, PPro - K6-III
|
|
|
|
* on 64 for K7 (maybe for P3 too).
|
|
|
|
* Because L1 and L2 caches are aligned on those values.
|
|
|
|
* But I don't want to code such logic here!
|
2002-06-11 13:41:01 +00:00
|
|
|
*/
|
2012-10-18 17:16:37 +00:00
|
|
|
/* Why 32?
|
|
|
|
* For AVX ASM. SSE / NEON needs only 16.
|
|
|
|
* Why not larger? Because I did not see a difference in benchmarks ...
|
2002-09-15 10:02:15 +00:00
|
|
|
*/
|
2012-10-18 17:16:37 +00:00
|
|
|
/* benchmarks with P3
|
|
|
|
* memalign(64) + 1 3071, 3051, 3032
|
|
|
|
* memalign(64) + 2 3051, 3032, 3041
|
|
|
|
* memalign(64) + 4 2911, 2896, 2915
|
|
|
|
* memalign(64) + 8 2545, 2554, 2550
|
|
|
|
* memalign(64) + 16 2543, 2572, 2563
|
|
|
|
* memalign(64) + 32 2546, 2545, 2571
|
|
|
|
* memalign(64) + 64 2570, 2533, 2558
|
|
|
|
*
|
|
|
|
* BTW, malloc seems to do 8-byte alignment by default here.
|
2002-09-15 10:02:15 +00:00
|
|
|
*/
|
2002-06-11 13:41:01 +00:00
|
|
|
#else
|
|
|
|
ptr = malloc(size);
|
|
|
|
#endif
|
2012-07-02 17:31:35 +00:00
|
|
|
if(!ptr && !size) {
|
|
|
|
size = 1;
|
2011-05-07 13:28:39 +00:00
|
|
|
ptr= av_malloc(1);
|
2012-07-02 17:31:35 +00:00
|
|
|
}
|
|
|
|
#if CONFIG_MEMORY_POISONING
|
|
|
|
if (ptr)
|
2013-05-12 11:37:33 +00:00
|
|
|
memset(ptr, FF_MEMORY_POISON, size);
|
2012-07-02 17:31:35 +00:00
|
|
|
#endif
|
2002-06-11 13:41:01 +00:00
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2011-04-12 19:17:26 +00:00
|
|
|
void *av_realloc(void *ptr, size_t size)
|
2003-01-23 23:03:09 +00:00
|
|
|
{
|
2024-04-25 09:18:18 +00:00
|
|
|
void *ret;
|
|
|
|
if (size > atomic_load_explicit(&max_alloc_size, memory_order_relaxed))
|
2005-01-12 00:16:25 +00:00
|
|
|
return NULL;
|
|
|
|
|
2016-03-24 19:55:19 +00:00
|
|
|
#if HAVE_ALIGNED_MALLOC
|
2024-04-25 09:18:18 +00:00
|
|
|
ret = _aligned_realloc(ptr, size + !size, ALIGN);
|
2006-08-17 08:18:48 +00:00
|
|
|
#else
|
2024-04-25 09:18:18 +00:00
|
|
|
ret = realloc(ptr, size + !size);
|
2004-06-06 03:45:53 +00:00
|
|
|
#endif
|
2024-04-25 09:18:18 +00:00
|
|
|
#if CONFIG_MEMORY_POISONING
|
|
|
|
if (ret && !ptr)
|
|
|
|
memset(ret, FF_MEMORY_POISON, size);
|
|
|
|
#endif
|
|
|
|
return ret;
|
2003-01-21 21:30:48 +00:00
|
|
|
}
|
|
|
|
|
2011-03-20 18:39:20 +00:00
|
|
|
void *av_realloc_f(void *ptr, size_t nelem, size_t elsize)
|
|
|
|
{
|
|
|
|
size_t size;
|
|
|
|
void *r;
|
|
|
|
|
2024-04-25 09:18:18 +00:00
|
|
|
if (size_mult(elsize, nelem, &size)) {
|
2011-03-20 18:39:20 +00:00
|
|
|
av_free(ptr);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
r = av_realloc(ptr, size);
|
2016-07-18 16:30:54 +00:00
|
|
|
if (!r)
|
2011-03-20 18:39:20 +00:00
|
|
|
av_free(ptr);
|
|
|
|
return r;
|
|
|
|
}
|
|
|
|
|
2013-09-15 19:42:07 +00:00
|
|
|
int av_reallocp(void *ptr, size_t size)
|
|
|
|
{
|
2015-01-26 19:17:31 +00:00
|
|
|
void *val;
|
2013-09-15 19:42:07 +00:00
|
|
|
|
2013-09-20 11:02:41 +00:00
|
|
|
if (!size) {
|
|
|
|
av_freep(ptr);
|
|
|
|
return 0;
|
|
|
|
}
|
2013-09-15 19:42:07 +00:00
|
|
|
|
2015-01-26 19:17:31 +00:00
|
|
|
memcpy(&val, ptr, sizeof(val));
|
|
|
|
val = av_realloc(val, size);
|
|
|
|
|
|
|
|
if (!val) {
|
2013-09-15 19:42:07 +00:00
|
|
|
av_freep(ptr);
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
}
|
|
|
|
|
2015-01-26 19:17:31 +00:00
|
|
|
memcpy(ptr, &val, sizeof(val));
|
2013-09-15 19:42:07 +00:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2017-03-30 15:02:39 +00:00
|
|
|
void *av_malloc_array(size_t nmemb, size_t size)
|
|
|
|
{
|
2020-04-11 22:36:30 +00:00
|
|
|
size_t result;
|
2024-04-25 09:18:18 +00:00
|
|
|
if (size_mult(nmemb, size, &result) < 0)
|
2017-03-30 15:02:39 +00:00
|
|
|
return NULL;
|
2020-04-11 22:36:30 +00:00
|
|
|
return av_malloc(result);
|
2017-03-30 15:02:39 +00:00
|
|
|
}
|
|
|
|
|
2024-04-25 09:18:18 +00:00
|
|
|
#if FF_API_AV_MALLOCZ_ARRAY
|
2017-03-30 15:02:39 +00:00
|
|
|
void *av_mallocz_array(size_t nmemb, size_t size)
|
|
|
|
{
|
2020-04-11 22:36:30 +00:00
|
|
|
size_t result;
|
2024-04-25 09:18:18 +00:00
|
|
|
if (size_mult(nmemb, size, &result) < 0)
|
2017-03-30 15:02:39 +00:00
|
|
|
return NULL;
|
2020-04-11 22:36:30 +00:00
|
|
|
return av_mallocz(result);
|
2017-03-30 15:02:39 +00:00
|
|
|
}
|
2024-04-25 09:18:18 +00:00
|
|
|
#endif
|
2017-03-30 15:02:39 +00:00
|
|
|
|
2013-06-03 09:31:46 +00:00
|
|
|
void *av_realloc_array(void *ptr, size_t nmemb, size_t size)
|
|
|
|
{
|
2020-04-11 22:36:30 +00:00
|
|
|
size_t result;
|
2024-04-25 09:18:18 +00:00
|
|
|
if (size_mult(nmemb, size, &result) < 0)
|
2013-06-03 09:31:46 +00:00
|
|
|
return NULL;
|
2020-04-11 22:36:30 +00:00
|
|
|
return av_realloc(ptr, result);
|
2013-06-03 09:31:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
int av_reallocp_array(void *ptr, size_t nmemb, size_t size)
|
|
|
|
{
|
2015-01-26 19:17:31 +00:00
|
|
|
void *val;
|
|
|
|
|
|
|
|
memcpy(&val, ptr, sizeof(val));
|
2015-02-01 12:52:22 +00:00
|
|
|
val = av_realloc_f(val, nmemb, size);
|
|
|
|
memcpy(ptr, &val, sizeof(val));
|
|
|
|
if (!val && nmemb && size)
|
2013-06-03 09:31:46 +00:00
|
|
|
return AVERROR(ENOMEM);
|
2015-01-26 19:17:31 +00:00
|
|
|
|
2013-06-03 09:31:46 +00:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2002-06-11 13:41:01 +00:00
|
|
|
void av_free(void *ptr)
|
|
|
|
{
|
2016-03-24 19:55:19 +00:00
|
|
|
#if HAVE_ALIGNED_MALLOC
|
2012-06-18 13:37:02 +00:00
|
|
|
_aligned_free(ptr);
|
2004-06-06 03:45:53 +00:00
|
|
|
#else
|
2011-02-03 00:40:35 +00:00
|
|
|
free(ptr);
|
2004-06-06 03:45:53 +00:00
|
|
|
#endif
|
2002-06-11 13:41:01 +00:00
|
|
|
}
|
|
|
|
|
2006-09-25 15:23:40 +00:00
|
|
|
void av_freep(void *arg)
|
|
|
|
{
|
2015-01-26 19:17:31 +00:00
|
|
|
void *val;
|
|
|
|
|
|
|
|
memcpy(&val, arg, sizeof(val));
|
|
|
|
memcpy(arg, &(void *){ NULL }, sizeof(val));
|
|
|
|
av_free(val);
|
2006-09-25 15:23:40 +00:00
|
|
|
}
|
|
|
|
|
2011-04-12 19:17:26 +00:00
|
|
|
void *av_mallocz(size_t size)
|
2006-09-25 15:23:40 +00:00
|
|
|
{
|
2008-04-03 19:18:14 +00:00
|
|
|
void *ptr = av_malloc(size);
|
2006-09-25 15:23:40 +00:00
|
|
|
if (ptr)
|
|
|
|
memset(ptr, 0, size);
|
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2011-09-24 16:39:13 +00:00
|
|
|
void *av_calloc(size_t nmemb, size_t size)
|
|
|
|
{
|
2020-04-11 22:36:30 +00:00
|
|
|
size_t result;
|
2024-04-25 09:18:18 +00:00
|
|
|
if (size_mult(nmemb, size, &result) < 0)
|
2011-09-24 16:39:13 +00:00
|
|
|
return NULL;
|
2020-04-11 22:36:30 +00:00
|
|
|
return av_mallocz(result);
|
2011-09-24 16:39:13 +00:00
|
|
|
}
|
|
|
|
|
2006-09-25 15:23:40 +00:00
|
|
|
char *av_strdup(const char *s)
|
|
|
|
{
|
2012-10-18 17:16:37 +00:00
|
|
|
char *ptr = NULL;
|
|
|
|
if (s) {
|
2015-05-10 14:06:50 +00:00
|
|
|
size_t len = strlen(s) + 1;
|
2013-10-04 21:19:13 +00:00
|
|
|
ptr = av_realloc(NULL, len);
|
2008-05-23 12:37:52 +00:00
|
|
|
if (ptr)
|
|
|
|
memcpy(ptr, s, len);
|
2008-05-23 12:37:32 +00:00
|
|
|
}
|
2006-09-25 15:23:40 +00:00
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2014-08-12 16:24:19 +00:00
|
|
|
char *av_strndup(const char *s, size_t len)
|
|
|
|
{
|
|
|
|
char *ret = NULL, *end;
|
|
|
|
|
|
|
|
if (!s)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
end = memchr(s, 0, len);
|
|
|
|
if (end)
|
|
|
|
len = end - s;
|
|
|
|
|
|
|
|
ret = av_realloc(NULL, len + 1);
|
|
|
|
if (!ret)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
memcpy(ret, s, len);
|
|
|
|
ret[len] = 0;
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2013-04-27 22:49:14 +00:00
|
|
|
void *av_memdup(const void *p, size_t size)
|
|
|
|
{
|
|
|
|
void *ptr = NULL;
|
|
|
|
if (p) {
|
|
|
|
ptr = av_malloc(size);
|
|
|
|
if (ptr)
|
|
|
|
memcpy(ptr, p, size);
|
|
|
|
}
|
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2014-02-25 00:06:06 +00:00
|
|
|
int av_dynarray_add_nofree(void *tab_ptr, int *nb_ptr, void *elem)
|
|
|
|
{
|
2015-02-01 14:16:15 +00:00
|
|
|
void **tab;
|
|
|
|
memcpy(&tab, tab_ptr, sizeof(tab));
|
2014-02-25 00:06:06 +00:00
|
|
|
|
2016-07-30 05:05:39 +00:00
|
|
|
FF_DYNARRAY_ADD(INT_MAX, sizeof(*tab), tab, *nb_ptr, {
|
2014-02-25 00:06:06 +00:00
|
|
|
tab[*nb_ptr] = elem;
|
2015-02-01 14:16:15 +00:00
|
|
|
memcpy(tab_ptr, &tab, sizeof(tab));
|
2014-02-25 00:06:06 +00:00
|
|
|
}, {
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
});
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2011-04-29 15:33:38 +00:00
|
|
|
void av_dynarray_add(void *tab_ptr, int *nb_ptr, void *elem)
|
|
|
|
{
|
2015-02-01 14:16:15 +00:00
|
|
|
void **tab;
|
|
|
|
memcpy(&tab, tab_ptr, sizeof(tab));
|
2014-03-08 20:27:00 +00:00
|
|
|
|
2016-07-30 05:05:39 +00:00
|
|
|
FF_DYNARRAY_ADD(INT_MAX, sizeof(*tab), tab, *nb_ptr, {
|
2014-03-22 20:14:57 +00:00
|
|
|
tab[*nb_ptr] = elem;
|
2015-02-01 14:16:15 +00:00
|
|
|
memcpy(tab_ptr, &tab, sizeof(tab));
|
2014-03-08 20:27:00 +00:00
|
|
|
}, {
|
|
|
|
*nb_ptr = 0;
|
|
|
|
av_freep(tab_ptr);
|
|
|
|
});
|
2011-04-29 15:33:38 +00:00
|
|
|
}
|
2012-01-28 23:34:59 +00:00
|
|
|
|
2013-04-14 01:07:54 +00:00
|
|
|
void *av_dynarray2_add(void **tab_ptr, int *nb_ptr, size_t elem_size,
|
|
|
|
const uint8_t *elem_data)
|
|
|
|
{
|
2014-03-08 20:27:00 +00:00
|
|
|
uint8_t *tab_elem_data = NULL;
|
|
|
|
|
2016-07-30 05:05:39 +00:00
|
|
|
FF_DYNARRAY_ADD(INT_MAX, elem_size, *tab_ptr, *nb_ptr, {
|
2014-03-08 20:27:00 +00:00
|
|
|
tab_elem_data = (uint8_t *)*tab_ptr + (*nb_ptr) * elem_size;
|
|
|
|
if (elem_data)
|
|
|
|
memcpy(tab_elem_data, elem_data, elem_size);
|
|
|
|
else if (CONFIG_MEMORY_POISONING)
|
|
|
|
memset(tab_elem_data, FF_MEMORY_POISON, elem_size);
|
|
|
|
}, {
|
|
|
|
av_freep(tab_ptr);
|
|
|
|
*nb_ptr = 0;
|
|
|
|
});
|
2013-04-14 01:07:54 +00:00
|
|
|
return tab_elem_data;
|
|
|
|
}
|
|
|
|
|
2012-10-26 13:42:23 +00:00
|
|
|
static void fill16(uint8_t *dst, int len)
|
|
|
|
{
|
|
|
|
uint32_t v = AV_RN16(dst - 2);
|
|
|
|
|
|
|
|
v |= v << 16;
|
|
|
|
|
|
|
|
while (len >= 4) {
|
|
|
|
AV_WN32(dst, v);
|
|
|
|
dst += 4;
|
|
|
|
len -= 4;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (len--) {
|
|
|
|
*dst = dst[-2];
|
|
|
|
dst++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void fill24(uint8_t *dst, int len)
|
|
|
|
{
|
|
|
|
#if HAVE_BIGENDIAN
|
|
|
|
uint32_t v = AV_RB24(dst - 3);
|
|
|
|
uint32_t a = v << 8 | v >> 16;
|
|
|
|
uint32_t b = v << 16 | v >> 8;
|
|
|
|
uint32_t c = v << 24 | v;
|
|
|
|
#else
|
|
|
|
uint32_t v = AV_RL24(dst - 3);
|
|
|
|
uint32_t a = v | v << 24;
|
|
|
|
uint32_t b = v >> 8 | v << 16;
|
|
|
|
uint32_t c = v >> 16 | v << 8;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
while (len >= 12) {
|
|
|
|
AV_WN32(dst, a);
|
|
|
|
AV_WN32(dst + 4, b);
|
|
|
|
AV_WN32(dst + 8, c);
|
|
|
|
dst += 12;
|
|
|
|
len -= 12;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (len >= 4) {
|
|
|
|
AV_WN32(dst, a);
|
|
|
|
dst += 4;
|
|
|
|
len -= 4;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (len >= 4) {
|
|
|
|
AV_WN32(dst, b);
|
|
|
|
dst += 4;
|
|
|
|
len -= 4;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (len--) {
|
|
|
|
*dst = dst[-3];
|
|
|
|
dst++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void fill32(uint8_t *dst, int len)
|
|
|
|
{
|
|
|
|
uint32_t v = AV_RN32(dst - 4);
|
|
|
|
|
2019-01-17 21:35:10 +00:00
|
|
|
#if HAVE_FAST_64BIT
|
|
|
|
uint64_t v2= v + ((uint64_t)v<<32);
|
|
|
|
while (len >= 32) {
|
|
|
|
AV_WN64(dst , v2);
|
|
|
|
AV_WN64(dst+ 8, v2);
|
|
|
|
AV_WN64(dst+16, v2);
|
|
|
|
AV_WN64(dst+24, v2);
|
|
|
|
dst += 32;
|
|
|
|
len -= 32;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2012-10-26 13:42:23 +00:00
|
|
|
while (len >= 4) {
|
|
|
|
AV_WN32(dst, v);
|
|
|
|
dst += 4;
|
|
|
|
len -= 4;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (len--) {
|
|
|
|
*dst = dst[-4];
|
|
|
|
dst++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-10-18 17:27:51 +00:00
|
|
|
void av_memcpy_backptr(uint8_t *dst, int back, int cnt)
|
|
|
|
{
|
|
|
|
const uint8_t *src = &dst[-back];
|
2013-02-13 20:36:25 +00:00
|
|
|
if (!back)
|
|
|
|
return;
|
|
|
|
|
2012-10-18 17:27:51 +00:00
|
|
|
if (back == 1) {
|
|
|
|
memset(dst, *src, cnt);
|
2012-10-26 13:42:23 +00:00
|
|
|
} else if (back == 2) {
|
|
|
|
fill16(dst, cnt);
|
|
|
|
} else if (back == 3) {
|
|
|
|
fill24(dst, cnt);
|
|
|
|
} else if (back == 4) {
|
|
|
|
fill32(dst, cnt);
|
2012-10-18 17:27:51 +00:00
|
|
|
} else {
|
2012-10-26 13:42:23 +00:00
|
|
|
if (cnt >= 16) {
|
2012-10-18 17:27:51 +00:00
|
|
|
int blocklen = back;
|
|
|
|
while (cnt > blocklen) {
|
|
|
|
memcpy(dst, src, blocklen);
|
|
|
|
dst += blocklen;
|
|
|
|
cnt -= blocklen;
|
|
|
|
blocklen <<= 1;
|
|
|
|
}
|
|
|
|
memcpy(dst, src, cnt);
|
2012-10-26 13:42:23 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (cnt >= 8) {
|
|
|
|
AV_COPY32U(dst, src);
|
|
|
|
AV_COPY32U(dst + 4, src + 4);
|
|
|
|
src += 8;
|
|
|
|
dst += 8;
|
|
|
|
cnt -= 8;
|
|
|
|
}
|
|
|
|
if (cnt >= 4) {
|
|
|
|
AV_COPY32U(dst, src);
|
|
|
|
src += 4;
|
|
|
|
dst += 4;
|
|
|
|
cnt -= 4;
|
|
|
|
}
|
|
|
|
if (cnt >= 2) {
|
|
|
|
AV_COPY16U(dst, src);
|
|
|
|
src += 2;
|
|
|
|
dst += 2;
|
|
|
|
cnt -= 2;
|
2012-10-18 17:27:51 +00:00
|
|
|
}
|
2012-10-26 13:42:23 +00:00
|
|
|
if (cnt)
|
|
|
|
*dst = *src;
|
2012-10-18 17:27:51 +00:00
|
|
|
}
|
|
|
|
}
|
2012-10-25 11:15:58 +00:00
|
|
|
|
2013-10-27 21:21:59 +00:00
|
|
|
void *av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
|
|
|
|
{
|
2024-04-25 09:18:18 +00:00
|
|
|
size_t max_size;
|
|
|
|
|
2017-12-30 13:38:33 +00:00
|
|
|
if (min_size <= *size)
|
2013-10-27 21:21:59 +00:00
|
|
|
return ptr;
|
|
|
|
|
2024-04-25 09:18:18 +00:00
|
|
|
max_size = atomic_load_explicit(&max_alloc_size, memory_order_relaxed);
|
|
|
|
/* *size is an unsigned, so the real maximum is <= UINT_MAX. */
|
|
|
|
max_size = FFMIN(max_size, UINT_MAX);
|
|
|
|
|
|
|
|
if (min_size > max_size) {
|
2018-01-02 00:58:35 +00:00
|
|
|
*size = 0;
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2024-04-25 09:18:18 +00:00
|
|
|
min_size = FFMIN(max_size, FFMAX(min_size + min_size / 16 + 32, min_size));
|
2013-10-27 21:21:59 +00:00
|
|
|
|
|
|
|
ptr = av_realloc(ptr, min_size);
|
|
|
|
/* we could set this to the unmodified min_size but this is safer
|
|
|
|
* if the user lost the ptr and uses NULL now
|
|
|
|
*/
|
|
|
|
if (!ptr)
|
|
|
|
min_size = 0;
|
|
|
|
|
|
|
|
*size = min_size;
|
|
|
|
|
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2024-04-25 09:18:18 +00:00
|
|
|
static inline void fast_malloc(void *ptr, unsigned int *size, size_t min_size, int zero_realloc)
|
|
|
|
{
|
|
|
|
size_t max_size;
|
|
|
|
void *val;
|
|
|
|
|
|
|
|
memcpy(&val, ptr, sizeof(val));
|
|
|
|
if (min_size <= *size) {
|
|
|
|
av_assert0(val || !min_size);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
max_size = atomic_load_explicit(&max_alloc_size, memory_order_relaxed);
|
|
|
|
/* *size is an unsigned, so the real maximum is <= UINT_MAX. */
|
|
|
|
max_size = FFMIN(max_size, UINT_MAX);
|
|
|
|
|
|
|
|
if (min_size > max_size) {
|
|
|
|
av_freep(ptr);
|
|
|
|
*size = 0;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
min_size = FFMIN(max_size, FFMAX(min_size + min_size / 16 + 32, min_size));
|
|
|
|
av_freep(ptr);
|
|
|
|
val = zero_realloc ? av_mallocz(min_size) : av_malloc(min_size);
|
|
|
|
memcpy(ptr, &val, sizeof(val));
|
|
|
|
if (!val)
|
|
|
|
min_size = 0;
|
|
|
|
*size = min_size;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2013-11-14 14:04:04 +00:00
|
|
|
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
|
|
|
|
{
|
2024-04-25 09:18:18 +00:00
|
|
|
fast_malloc(ptr, size, min_size, 0);
|
2013-11-14 14:04:04 +00:00
|
|
|
}
|
|
|
|
|
2015-11-18 13:19:47 +00:00
|
|
|
void av_fast_mallocz(void *ptr, unsigned int *size, size_t min_size)
|
|
|
|
{
|
2024-04-25 09:18:18 +00:00
|
|
|
fast_malloc(ptr, size, min_size, 1);
|
|
|
|
}
|
|
|
|
|
|
|
|
int av_size_mult(size_t a, size_t b, size_t *r)
|
|
|
|
{
|
|
|
|
return size_mult(a, b, r);
|
2015-11-18 13:19:47 +00:00
|
|
|
}
|