summaryrefslogtreecommitdiffstats
path: root/media/ffvpx/libavutil/buffer.c
diff options
context:
space:
mode:
Diffstat (limited to 'media/ffvpx/libavutil/buffer.c')
-rw-r--r--media/ffvpx/libavutil/buffer.c96
1 files changed, 13 insertions, 83 deletions
diff --git a/media/ffvpx/libavutil/buffer.c b/media/ffvpx/libavutil/buffer.c
index 694e116a3..8d1aa5fa8 100644
--- a/media/ffvpx/libavutil/buffer.c
+++ b/media/ffvpx/libavutil/buffer.c
@@ -16,10 +16,10 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
+#include <stdatomic.h>
#include <stdint.h>
#include <string.h>
-#include "atomic.h"
#include "buffer_internal.h"
#include "common.h"
#include "mem.h"
@@ -40,7 +40,8 @@ AVBufferRef *av_buffer_create(uint8_t *data, int size,
buf->size = size;
buf->free = free ? free : av_buffer_default_free;
buf->opaque = opaque;
- buf->refcount = 1;
+
+ atomic_init(&buf->refcount, 1);
if (flags & AV_BUFFER_FLAG_READONLY)
buf->flags |= BUFFER_FLAG_READONLY;
@@ -98,7 +99,7 @@ AVBufferRef *av_buffer_ref(AVBufferRef *buf)
*ret = *buf;
- avpriv_atomic_int_add_and_fetch(&buf->buffer->refcount, 1);
+ atomic_fetch_add_explicit(&buf->buffer->refcount, 1, memory_order_relaxed);
return ret;
}
@@ -115,7 +116,7 @@ static void buffer_replace(AVBufferRef **dst, AVBufferRef **src)
} else
av_freep(dst);
- if (!avpriv_atomic_int_add_and_fetch(&b->refcount, -1)) {
+ if (atomic_fetch_add_explicit(&b->refcount, -1, memory_order_acq_rel) == 1) {
b->free(b->opaque, b->data);
av_freep(&b);
}
@@ -134,7 +135,7 @@ int av_buffer_is_writable(const AVBufferRef *buf)
if (buf->buffer->flags & AV_BUFFER_FLAG_READONLY)
return 0;
- return avpriv_atomic_int_get(&buf->buffer->refcount) == 1;
+ return atomic_load(&buf->buffer->refcount) == 1;
}
void *av_buffer_get_opaque(const AVBufferRef *buf)
@@ -144,7 +145,7 @@ void *av_buffer_get_opaque(const AVBufferRef *buf)
int av_buffer_get_ref_count(const AVBufferRef *buf)
{
- return buf->buffer->refcount;
+ return atomic_load(&buf->buffer->refcount);
}
int av_buffer_make_writable(AVBufferRef **pbuf)
@@ -191,7 +192,7 @@ int av_buffer_realloc(AVBufferRef **pbuf, int size)
return 0;
if (!(buf->buffer->flags & BUFFER_FLAG_REALLOCATABLE) ||
- !av_buffer_is_writable(buf)) {
+ !av_buffer_is_writable(buf) || buf->data != buf->buffer->data) {
/* cannot realloc, allocate a new reallocable buffer and copy data */
AVBufferRef *new = NULL;
@@ -229,7 +230,7 @@ AVBufferPool *av_buffer_pool_init2(int size, void *opaque,
pool->alloc2 = alloc;
pool->pool_free = pool_free;
- avpriv_atomic_int_set(&pool->refcount, 1);
+ atomic_init(&pool->refcount, 1);
return pool;
}
@@ -245,7 +246,7 @@ AVBufferPool *av_buffer_pool_init(int size, AVBufferRef* (*alloc)(int size))
pool->size = size;
pool->alloc = alloc ? alloc : av_buffer_alloc;
- avpriv_atomic_int_set(&pool->refcount, 1);
+ atomic_init(&pool->refcount, 1);
return pool;
}
@@ -280,48 +281,10 @@ void av_buffer_pool_uninit(AVBufferPool **ppool)
pool = *ppool;
*ppool = NULL;
- if (!avpriv_atomic_int_add_and_fetch(&pool->refcount, -1))
+ if (atomic_fetch_add_explicit(&pool->refcount, -1, memory_order_acq_rel) == 1)
buffer_pool_free(pool);
}
-#if USE_ATOMICS
-/* remove the whole buffer list from the pool and return it */
-static BufferPoolEntry *get_pool(AVBufferPool *pool)
-{
- BufferPoolEntry *cur = *(void * volatile *)&pool->pool, *last = NULL;
-
- while (cur != last) {
- last = cur;
- cur = avpriv_atomic_ptr_cas((void * volatile *)&pool->pool, last, NULL);
- if (!cur)
- return NULL;
- }
-
- return cur;
-}
-
-static void add_to_pool(BufferPoolEntry *buf)
-{
- AVBufferPool *pool;
- BufferPoolEntry *cur, *end = buf;
-
- if (!buf)
- return;
- pool = buf->pool;
-
- while (end->next)
- end = end->next;
-
- while (avpriv_atomic_ptr_cas((void * volatile *)&pool->pool, NULL, buf)) {
- /* pool is not empty, retrieve it and append it to our list */
- cur = get_pool(pool);
- end->next = cur;
- while (end->next)
- end = end->next;
- }
-}
-#endif
-
static void pool_release_buffer(void *opaque, uint8_t *data)
{
BufferPoolEntry *buf = opaque;
@@ -330,16 +293,12 @@ static void pool_release_buffer(void *opaque, uint8_t *data)
if(CONFIG_MEMORY_POISONING)
memset(buf->data, FF_MEMORY_POISON, pool->size);
-#if USE_ATOMICS
- add_to_pool(buf);
-#else
ff_mutex_lock(&pool->mutex);
buf->next = pool->pool;
pool->pool = buf;
ff_mutex_unlock(&pool->mutex);
-#endif
- if (!avpriv_atomic_int_add_and_fetch(&pool->refcount, -1))
+ if (atomic_fetch_add_explicit(&pool->refcount, -1, memory_order_acq_rel) == 1)
buffer_pool_free(pool);
}
@@ -369,11 +328,6 @@ static AVBufferRef *pool_alloc_buffer(AVBufferPool *pool)
ret->buffer->opaque = buf;
ret->buffer->free = pool_release_buffer;
-#if USE_ATOMICS
- avpriv_atomic_int_add_and_fetch(&pool->refcount, 1);
- avpriv_atomic_int_add_and_fetch(&pool->nb_allocated, 1);
-#endif
-
return ret;
}
@@ -382,29 +336,6 @@ AVBufferRef *av_buffer_pool_get(AVBufferPool *pool)
AVBufferRef *ret;
BufferPoolEntry *buf;
-#if USE_ATOMICS
- /* check whether the pool is empty */
- buf = get_pool(pool);
- if (!buf && pool->refcount <= pool->nb_allocated) {
- av_log(NULL, AV_LOG_DEBUG, "Pool race dectected, spining to avoid overallocation and eventual OOM\n");
- while (!buf && avpriv_atomic_int_get(&pool->refcount) <= avpriv_atomic_int_get(&pool->nb_allocated))
- buf = get_pool(pool);
- }
-
- if (!buf)
- return pool_alloc_buffer(pool);
-
- /* keep the first entry, return the rest of the list to the pool */
- add_to_pool(buf->next);
- buf->next = NULL;
-
- ret = av_buffer_create(buf->data, pool->size, pool_release_buffer,
- buf, 0);
- if (!ret) {
- add_to_pool(buf);
- return NULL;
- }
-#else
ff_mutex_lock(&pool->mutex);
buf = pool->pool;
if (buf) {
@@ -418,10 +349,9 @@ AVBufferRef *av_buffer_pool_get(AVBufferPool *pool)
ret = pool_alloc_buffer(pool);
}
ff_mutex_unlock(&pool->mutex);
-#endif
if (ret)
- avpriv_atomic_int_add_and_fetch(&pool->refcount, 1);
+ atomic_fetch_add_explicit(&pool->refcount, 1, memory_order_relaxed);
return ret;
}