- copy kernel headers (waiting for libatomic..)

Original commit message from CVS:
- copy kernel headers (waiting for libatomic..)
- Make sure the atomic stuff is never seen by the app
- inline atomic stuff for core only, expose non-inlined version to apps.

hoping this one works... please test
This commit is contained in:
Wim Taymans 2002-12-31 03:21:08 +00:00
parent aea79e82e1
commit 132bab1098
11 changed files with 551 additions and 114 deletions

View file

@ -52,6 +52,7 @@ libgstreamer_@GST_MAJORMINOR@_la_SOURCES = \
gstenumtypes.c \
gstobject.c \
$(GST_AUTOPLUG_SRC) \
gstatomic.c \
gstbin.c \
gstbuffer.c \
gstbufferpool-default.c \
@ -153,6 +154,7 @@ libgstreamer_@GST_MAJORMINOR@include_HEADERS = $(gst_headers) $(built_headers)
noinst_HEADERS = \
gst_private.h \
gstatomic_impl.h \
gstdata_private.h \
gstarch.h \
cothreads.h

25
gst/gstatomic.c Normal file
View file

@ -0,0 +1,25 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#define GST_IMPLEMENT_INLINES 1
#define __GST_ATOMIC_C__
#include "gstatomic.h"
#include "gstatomic_impl.h"

View file

@ -20,15 +20,6 @@
#ifndef __GST_ATOMIC_H__
#define __GST_ATOMIC_H__
/* FIXME */
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#ifdef HAVE_ATOMIC_H
# include <asm/atomic.h>
#endif
#include <glib.h>
G_BEGIN_DECLS
@ -36,82 +27,19 @@ G_BEGIN_DECLS
typedef struct _GstAtomicInt GstAtomicInt;
struct _GstAtomicInt {
#ifdef HAVE_ATOMIC_H
union {
atomic_t value;
struct {
int value;
GMutex *lock;
} unused;
} v;
#else
int value;
GMutex *lock;
#endif
volatile gint counter;
GMutex *lock; /* for C fallback */
};
#ifdef HAVE_ATOMIC_H
/* atomic functions */
#define GST_ATOMIC_INT_INIT(ref, val) (atomic_set(&((ref)->v.value), (val)))
#define GST_ATOMIC_INT_FREE(ref)
void gst_atomic_int_init (GstAtomicInt *aint, gint val);
void gst_atomic_int_destroy (GstAtomicInt *aint);
void gst_atomic_int_set (GstAtomicInt *aint, gint val);
gint gst_atomic_int_read (GstAtomicInt *aint);
void gst_atomic_int_add (GstAtomicInt *aint, gint val);
void gst_atomic_int_inc (GstAtomicInt *aint);
gboolean gst_atomic_int_dec_and_test (GstAtomicInt *aint);
#define GST_ATOMIC_INT_SET(ref,val) (atomic_set(&((ref)->v.value), (val)))
#define GST_ATOMIC_INT_VALUE(ref) (atomic_read(&((ref)->v.value)))
#define GST_ATOMIC_INT_READ(ref,res) (*res = atomic_read(&((ref)->v.value)))
#define GST_ATOMIC_INT_INC(ref) (atomic_inc (&((ref)->v.value)))
#define GST_ATOMIC_INT_DEC_AND_TEST(ref,zero) (*zero = atomic_dec_and_test (&((ref)->v.value)))
#define GST_ATOMIC_INT_ADD(ref, count) (atomic_add ((count), &((ref)->v.value)))
#else
/* fallback using a lock */
#define GST_ATOMIC_INT_INIT(ref, val) \
G_STMT_START { \
(ref)->value = (val); \
(ref)->lock = g_mutex_new(); \
} G_STMT_END
#define GST_ATOMIC_INT_FREE(ref) g_mutex_free ((ref)->lock)
#define GST_ATOMIC_INT_SET(ref,val) \
G_STMT_START { \
g_mutex_lock ((ref)->lock); \
(ref)->value = (val); \
g_mutex_unlock ((ref)->lock); \
} G_STMT_END
#define GST_ATOMIC_INT_VALUE(ref) ((ref)->value)
#define GST_ATOMIC_INT_READ(ref,res) \
G_STMT_START { \
g_mutex_lock ((ref)->lock); \
*res = (ref)->value; \
g_mutex_unlock ((ref)->lock); \
} G_STMT_END
#define GST_ATOMIC_INT_INC(ref) \
G_STMT_START { \
g_mutex_lock ((ref)->lock); \
(ref)->value++; \
g_mutex_unlock ((ref)->lock); \
} G_STMT_END
#define GST_ATOMIC_INT_DEC_AND_TEST(ref,zero) \
G_STMT_START { \
g_mutex_lock ((ref)->lock); \
(ref)->value--; \
*zero = ((ref)->value == 0); \
g_mutex_unlock ((ref)->lock); \
} G_STMT_END
#define GST_ATOMIC_INT_ADD(ref, count) \
G_STMT_START { \
g_mutex_lock ((ref)->lock); \
(ref)->value += count; \
g_mutex_unlock ((ref)->lock); \
} G_STMT_END
#endif /* HAVE_ATOMIC_H */
G_END_DECLS

472
gst/gstatomic_impl.h Normal file
View file

@ -0,0 +1,472 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_ATOMIC_IMPL_H__
#define __GST_ATOMIC_IMPL_H__
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <glib.h>
#include "gstatomic.h"
#include "gstmacros.h"
G_BEGIN_DECLS
#if defined (GST_CAN_INLINE) || defined (__GST_ATOMIC_C__)
/***** Intel x86 *****/
#if defined (HAVE_CPU_I386)
#ifdef GST_CONFIG_NO_SMP
#define SMP_LOCK ""
#else
#define SMP_LOCK "lock ; "
#endif
GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
GST_INLINE_FUNC void
gst_atomic_int_add (GstAtomicInt *aint, gint val)
{
__asm__ __volatile__(
SMP_LOCK "addl %1,%0"
:"=m" (aint->counter)
:"ir" (val), "m" (aint->counter));
}
GST_INLINE_FUNC void
gst_atomic_int_inc (GstAtomicInt *aint)
{
__asm__ __volatile__(
SMP_LOCK "incl %0"
:"=m" (aint->counter)
:"m" (aint->counter));
}
GST_INLINE_FUNC gboolean
gst_atomic_int_dec_and_test (GstAtomicInt *aint)
{
guchar res;
__asm__ __volatile__(
SMP_LOCK "decl %0; sete %1"
:"=m" (aint->counter), "=qm" (res)
:"m" (aint->counter) : "memory");
return res != 0;
}
/***** PowerPC *****/
#elif defined (HAVE_CPU_PPC)
#ifdef GST_CONFIG_NO_SMP
#define SMP_SYNC ""
#define SMP_ISYNC
#else
#define SMP_SYNC "sync"
#define SMP_ISYNC "\n\tisync"
#endif
/* Erratum #77 on the 405 means we need a sync or dcbt before every stwcx.
* The old ATOMIC_SYNC_FIX covered some but not all of this.
*/
#ifdef GST_CONFIG_IBM405_ERR77
#define PPC405_ERR77(ra,rb) "dcbt " #ra "," #rb ";"
#else
#define PPC405_ERR77(ra,rb)
#endif
GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
GST_INLINE_FUNC void
gst_atomic_int_add (GstAtomicInt *aint, gint val)
{
int t;
__asm__ __volatile__(
"1: lwarx %0,0,%3 # atomic_add\n\
add %0,%2,%0\n"
PPC405_ERR77(0,%3)
" stwcx. %0,0,%3 \n\
bne- 1b"
: "=&r" (t), "=m" (aint->counter)
: "r" (val), "r" (&aint->counter), "m" (aint->counter)
: "cc");
}
GST_INLINE_FUNC void
gst_atomic_int_inc (GstAtomicInt *aint)
{
int t;
__asm__ __volatile__(
"1: lwarx %0,0,%2 # atomic_inc\n\
addic %0,%0,1\n"
PPC405_ERR77(0,%2)
" stwcx. %0,0,%2 \n\
bne- 1b"
: "=&r" (t), "=m" (aint->counter)
: "r" (&aint->counter), "m" (aint->counter)
: "cc");
}
GST_INLINE_FUNC gboolean
gst_atomic_int_dec_and_test (GstAtomicInt *aint)
{
int t;
__asm__ __volatile__(
"1: lwarx %0,0,%1 # atomic_dec_return\n\
addic %0,%0,-1\n"
PPC405_ERR77(0,%1)
" stwcx. %0,0,%1\n\
bne- 1b"
SMP_ISYNC
: "=&r" (t)
: "r" (&aint->counter)
: "cc", "memory");
return t == 0;
}
/***** DEC[/Compaq/HP?/Intel?] Alpha *****/
#elif defined(HAVE_CPU_ALPHA)
GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
GST_INLINE_FUNC void
gst_atomic_int_add (GstAtomicInt *aint, gint val)
{
unsigned long temp;
__asm__ __volatile__(
"1: ldl_l %0,%1\n"
" addl %0,%2,%0\n"
" stl_c %0,%1\n"
" beq %0,2f\n"
".subsection 2\n"
"2: br 1b\n"
".previous"
:"=&r" (temp), "=m" (aint->counter)
:"Ir" (val), "m" (aint->counter));
}
GST_INLINE_FUNC void
gst_atomic_int_inc (GstAtomicInt *aint)
{
gst_atomic_int_add (aint, 1);
}
GST_INLINE_FUNC gboolean
gst_atomic_int_dec_and_test (GstAtomicInt *aint)
{
long temp, result;
int val = 1;
__asm__ __volatile__(
"1: ldl_l %0,%1\n"
" subl %0,%3,%2\n"
" subl %0,%3,%0\n"
" stl_c %0,%1\n"
" beq %0,2f\n"
" mb\n"
".subsection 2\n"
"2: br 1b\n"
".previous"
:"=&r" (temp), "=m" (aint->counter), "=&r" (result)
:"Ir" (val), "m" (aint->counter) : "memory");
return result == 0;
}
/***** Sun SPARC *****/
#elif defined(HAVE_CPU_SPARC)
GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
#ifdef GST_CONFIG_NO_SMP
GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
#else
GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = (val<<8); }
GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = (val<<8); }
/*
* For SMP the trick is you embed the spin lock byte within
* the word, use the low byte so signedness is easily retained
* via a quick arithmetic shift. It looks like this:
*
* ----------------------------------------
* | signed 24-bit counter value | lock | atomic_t
* ----------------------------------------
* 31 8 7 0
*/
GST_INLINE_FUNC gint
gst_atomic_int_read (GstAtomicInt *aint)
{
int ret = aint->counter;
while (ret & 0xff)
ret = aint->counter;
return ret >> 8;
}
#endif /* GST_CONFIG_NO_SMP */
GST_INLINE_FUNC void
gst_atomic_int_add (GstAtomicInt *aint, gint val)
{
register volatile int *ptr asm ("g1");
register int increment asm ("g2");
ptr = &aint->counter;
increment = val;
__asm__ __volatile__(
"mov %%o7, %%g4\n\t"
"call ___atomic_add\n\t"
" add %%o7, 8, %%o7\n"
: "=&r" (increment)
: "0" (increment), "r" (ptr)
: "g3", "g4", "g7", "memory", "cc");
}
GST_INLINE_FUNC void
gst_atomic_int_inc (GstAtomicInt *aint)
{
gst_atomic_int_add (aint, 1);
}
GST_INLINE_FUNC gboolean
gst_atomic_int_dec_and_test (GstAtomicInt *aint)
{
register volatile int *ptr asm ("g1");
register int increment asm ("g2");
ptr = &aint->counter;
increment = val;
__asm__ __volatile__(
"mov %%o7, %%g4\n\t"
"call ___atomic_sub\n\t"
" add %%o7, 8, %%o7\n"
: "=&r" (increment)
: "0" (increment), "r" (ptr)
: "g3", "g4", "g7", "memory", "cc");
return increment == 0;
}
/***** MIPS *****/
#elif defined(HAVE_CPU_MIPS)
GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
/* this only works on MIPS II and better */
GST_INLINE_FUNC void
gst_atomic_int_add (GstAtomicInt *aint, gint val)
{
unsigned long temp;
__asm__ __volatile__(
"1: ll %0, %1 # atomic_add\n"
" addu %0, %2 \n"
" sc %0, %1 \n"
" beqz %0, 1b \n"
: "=&r" (temp), "=m" (aint->counter)
: "Ir" (val), "m" (aint->counter));
}
GST_INLINE_FUNC void
gst_atomic_int_inc (GstAtomicInt *aint)
{
gst_atomic_int_add (aint, 1);
}
GST_INLINE_FUNC gboolean
gst_atomic_int_dec_and_test (GstAtomicInt *aint)
{
unsigned long temp, result;
int val = 1;
__asm__ __volatile__(
".set push \n"
".set noreorder # atomic_sub_return\n"
"1: ll %1, %2 \n"
" subu %0, %1, %3 \n"
" sc %0, %2 \n"
" beqz %0, 1b \n"
" subu %0, %1, %3 \n"
".set pop \n"
: "=&r" (result), "=&r" (temp), "=m" (aint->counter)
: "Ir" (val), "m" (aint->counter)
: "memory");
return result == 0;
}
/***** S/390 *****/
#elif defined(HAVE_CPU_S390)
GST_INLINE_FUNC void gst_atomic_int_init (GstAtomicInt *aint, gint val) { aint->counter = val; }
GST_INLINE_FUNC void gst_atomic_int_destroy (GstAtomicInt *aint) { }
GST_INLINE_FUNC void gst_atomic_int_set (GstAtomicInt *aint, gint val) { aint->counter = val; }
GST_INLINE_FUNC gint gst_atomic_int_read (GstAtomicInt *aint) { return aint->counter; }
#define __CS_LOOP(old_val, new_val, ptr, op_val, op_string) \
__asm__ __volatile__(" l %0,0(%3)\n" \
"0: lr %1,%0\n" \
op_string " %1,%4\n" \
" cs %0,%1,0(%3)\n" \
" jl 0b" \
: "=&d" (old_val), "=&d" (new_val), \
"+m" (((atomic_t *)(ptr))->counter) \
: "a" (ptr), "d" (op_val) : "cc" );
GST_INLINE_FUNC void
gst_atomic_int_add (GstAtomicInt *aint, gint val)
{
int old_val, new_val;
__CS_LOOP(old_val, new_val, aint, val, "ar");
}
GST_INLINE_FUNC void
gst_atomic_int_inc (GstAtomicInt *aint)
{
int old_val, new_val;
__CS_LOOP(old_val, new_val, aint, 1, "ar");
}
GST_INLINE_FUNC gboolean
gst_atomic_int_dec_and_test (GstAtomicInt *aint)
{
int old_val, new_val;
__CS_LOOP(old_val, new_val, aint, 1, "sr");
return new_val == 0;
}
#else
#warning consider putting your architecture specific atomic implementations here
/*
* generic implementation
*/
GST_INLINE_FUNC void
gst_atomic_int_init (GstAtomicInt *aint, gint val)
{
aint->counter = val;
aint->lock = g_mutex_new ();
}
GST_INLINE_FUNC void
gst_atomic_int_destroy (GstAtomicInt *aint)
{
g_mutex_free (aint->lock);
}
GST_INLINE_FUNC void
gst_atomic_int_set (GstAtomicInt *aint, gint val)
{
g_mutex_lock (aint->lock);
aint->counter = val;
g_mutex_unlock (aint->lock);
}
GST_INLINE_FUNC gint
gst_atomic_int_read (GstAtomicInt *aint)
{
gint res;
g_mutex_lock (aint->lock);
res = aint->counter;
g_mutex_unlock (aint->lock);
return res;
}
GST_INLINE_FUNC void
gst_atomic_int_add (GstAtomicInt *aint, gint val)
{
g_mutex_lock (aint->lock);
aint->counter += val;
g_mutex_unlock (aint->lock);
}
GST_INLINE_FUNC void
gst_atomic_int_inc (GstAtomicInt *aint)
{
g_mutex_lock (aint->lock);
aint->counter++;
g_mutex_unlock (aint->lock);
}
GST_INLINE_FUNC gboolean
gst_atomic_int_dec_and_test (GstAtomicInt *aint)
{
gboolean res;
g_mutex_lock (aint->lock);
aint->counter--;
res = (aint->counter == 0);
g_mutex_unlock (aint->lock);
return res;
}
#endif
/*
* common functions
*/
GST_INLINE_FUNC GstAtomicInt*
gst_atomic_int_new (gint val)
{
GstAtomicInt *aint;
aint = g_new0 (GstAtomicInt, 1);
gst_atomic_int_init (aint, val);
return aint;
}
GST_INLINE_FUNC void
gst_atomic_int_free (GstAtomicInt *aint)
{
gst_atomic_int_destroy (aint);
g_free (aint);
}
#endif /* defined (GST_CAN_INLINE) || defined (__GST_TRASH_STACK_C__)*/
G_END_DECLS
#endif /* __GST_ATOMIC_IMPL_H__ */

View file

@ -24,6 +24,7 @@
#define GST_DEBUG_FORCE_DISABLE
#include "gst_private.h"
#include "gstatomic_impl.h"
#include "gstdata_private.h"
#include "gstbuffer.h"
#include "gstmemchunk.h"

View file

@ -21,9 +21,11 @@
*/
/* this file makes too much noise for most debugging sessions */
#define GST_DEBUG_FORCE_DISABLE
#include "gst_private.h"
#include "gstatomic_impl.h"
#include "gstdata.h"
#include "gstdata_private.h"
#include "gstlog.h"
@ -115,7 +117,7 @@ gst_data_needs_copy_on_write (GstData *data)
g_return_val_if_fail (data != NULL, FALSE);
GST_ATOMIC_INT_READ (&data->refcount, &refcount);
refcount = gst_atomic_int_read (&data->refcount);
if (refcount == 1 && !GST_DATA_FLAG_IS_SET (data, GST_DATA_READONLY))
return FALSE;
@ -143,7 +145,7 @@ gst_data_copy_on_write (GstData *data)
g_return_val_if_fail (data != NULL, NULL);
GST_ATOMIC_INT_READ (&data->refcount, &refcount);
refcount = gst_atomic_int_read (&data->refcount);
if (refcount == 1 && !GST_DATA_FLAG_IS_SET (data, GST_DATA_READONLY))
return GST_DATA (data);
@ -188,7 +190,7 @@ gst_data_ref (GstData *data)
g_return_val_if_fail (data != NULL, NULL);
g_return_val_if_fail (GST_DATA_REFCOUNT_VALUE(data) > 0, NULL);
GST_ATOMIC_INT_INC (&data->refcount);
gst_atomic_int_inc (&data->refcount);
return data;
}
@ -209,7 +211,7 @@ gst_data_ref_by_count (GstData *data, gint count)
g_return_val_if_fail (count >= 0, NULL);
g_return_val_if_fail (GST_DATA_REFCOUNT_VALUE(data) > 0, NULL);
GST_ATOMIC_INT_ADD (&data->refcount, count);
gst_atomic_int_add (&data->refcount, count);
return data;
}
@ -236,7 +238,7 @@ gst_data_unref (GstData *data)
data, GST_DATA_REFCOUNT_VALUE (data));
g_return_if_fail (GST_DATA_REFCOUNT_VALUE (data) > 0);
GST_ATOMIC_INT_DEC_AND_TEST (&data->refcount, &zero);
zero = gst_atomic_int_dec_and_test (&data->refcount);
/* if we ended up with the refcount at zero, free the data */
if (zero) {

View file

@ -57,8 +57,7 @@ typedef enum
/* refcount */
#define GST_DATA_REFCOUNT(data) ((GST_DATA(data))->refcount)
#define GST_DATA_REFCOUNT_VALUE(data) (GST_ATOMIC_INT_VALUE((&GST_DATA_REFCOUNT (data))))
#define GST_DATA_REFCOUNT_READ(data,value) (GST_ATOMIC_INT_READ(&(GST_DATA_REFCOUNT (data)),value)
#define GST_DATA_REFCOUNT_VALUE(data) (gst_atomic_int_read (&(GST_DATA(data))->refcount))
/* copy/free functions */
#define GST_DATA_COPY_FUNC(data) (GST_DATA(data)->copy)

View file

@ -20,9 +20,11 @@
* Boston, MA 02111-1307, USA.
*/
#include "gstatomic_impl.h"
#define _GST_DATA_INIT(data, ptype, pflags, pfree, pcopy) \
G_STMT_START { \
GST_ATOMIC_INT_INIT (&(data)->refcount, 1); \
gst_atomic_int_init (&(data)->refcount, 1); \
(data)->type = ptype; \
(data)->flags = pflags; \
(data)->free = pfree; \
@ -31,6 +33,6 @@ G_STMT_START { \
#define _GST_DATA_DISPOSE(data) \
G_STMT_START { \
GST_ATOMIC_INT_FREE (&(data)->refcount); \
gst_atomic_int_destroy (&(data)->refcount); \
} G_STMT_END;

View file

@ -20,13 +20,21 @@
#ifndef __GST_MACROS_H__
#define __GST_MACROS_H__
#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ > 4)
# define GST_GNUC_CONSTRUCTOR \
#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ > 4)
# define GST_GNUC_CONSTRUCTOR \
__attribute__ ((constructor))
#else /* !__GNUC__ */
# define GST_GNUC_CONSTRUCTOR
# define GST_GNUC_CONSTRUCTOR
#endif /* !__GNUC__ */
#if defined (__GNUC__) && !defined (GST_IMPLEMENT_INLINES)
# define GST_INLINE_FUNC extern __inline__
# define GST_CAN_INLINE 1
#else
# define GST_INLINE_FUNC extern
# undef GST_CAN_INLINE
#endif
#endif /* __GST_MACROS_H__ */

View file

@ -22,6 +22,7 @@
#include "gstlog.h"
#include "gstutils.h"
#include "gstmemchunk.h"
#define __GST_TRASH_STACK_C__
#include "gsttrashstack.h"

View file

@ -20,11 +20,8 @@
#ifndef __GST_TRASH_STACK_H__
#define __GST_TRASH_STACK_H__
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <glib.h>
#include "gstmacros.h"
G_BEGIN_DECLS
@ -41,34 +38,34 @@ struct _GstTrashStack {
GMutex *lock; /* lock for C fallback */
};
G_INLINE_FUNC GstTrashStack* gst_trash_stack_new (void);
G_INLINE_FUNC void gst_trash_stack_init (GstTrashStack *stack);
G_INLINE_FUNC void gst_trash_stack_destroy (GstTrashStack *stack);
G_INLINE_FUNC void gst_trash_stack_free (GstTrashStack *stack);
GST_INLINE_FUNC GstTrashStack* gst_trash_stack_new (void);
GST_INLINE_FUNC void gst_trash_stack_init (GstTrashStack *stack);
GST_INLINE_FUNC void gst_trash_stack_destroy (GstTrashStack *stack);
GST_INLINE_FUNC void gst_trash_stack_free (GstTrashStack *stack);
G_INLINE_FUNC void gst_trash_stack_push (GstTrashStack *stack, gpointer mem);
G_INLINE_FUNC gpointer gst_trash_stack_pop (GstTrashStack *stack);
GST_INLINE_FUNC void gst_trash_stack_push (GstTrashStack *stack, gpointer mem);
GST_INLINE_FUNC gpointer gst_trash_stack_pop (GstTrashStack *stack);
#if defined (G_CAN_INLINE) || defined (__GST_TRASH_STACK_C__)
#if defined (GST_CAN_INLINE) || defined (__GST_TRASH_STACK_C__)
#if defined (__i386__) && defined (__GNUC__) && __GNUC__ >= 2
/*
* intel ia32 optimized lockfree implementations
*/
G_INLINE_FUNC void
GST_INLINE_FUNC void
gst_trash_stack_init (GstTrashStack *stack)
{
stack->head = NULL;
stack->count = 0;
}
G_INLINE_FUNC void
GST_INLINE_FUNC void
gst_trash_stack_destroy (GstTrashStack *stack)
{
}
G_INLINE_FUNC void
GST_INLINE_FUNC void
gst_trash_stack_push (GstTrashStack *stack, gpointer mem)
{
__asm__ __volatile__ (
@ -83,7 +80,7 @@ gst_trash_stack_push (GstTrashStack *stack, gpointer mem)
);
}
G_INLINE_FUNC gpointer
GST_INLINE_FUNC gpointer
gst_trash_stack_pop (GstTrashStack *stack)
{
GstTrashStackElement *head;
@ -120,20 +117,20 @@ gst_trash_stack_pop (GstTrashStack *stack)
/*
* generic implementation
*/
G_INLINE_FUNC void
GST_INLINE_FUNC void
gst_trash_stack_init (GstTrashStack *stack)
{
stack->head = NULL;
stack->lock = g_mutex_new();
}
G_INLINE_FUNC void
GST_INLINE_FUNC void
gst_trash_stack_destroy (GstTrashStack *stack)
{
g_mutex_free (stack->lock);
}
G_INLINE_FUNC void
GST_INLINE_FUNC void
gst_trash_stack_push (GstTrashStack *stack, gpointer mem)
{
GstTrashStackElement *elem = (GstTrashStackElement *) mem;
@ -144,7 +141,7 @@ gst_trash_stack_push (GstTrashStack *stack, gpointer mem)
g_mutex_unlock (stack->lock);
}
G_INLINE_FUNC gpointer
GST_INLINE_FUNC gpointer
gst_trash_stack_pop (GstTrashStack *stack)
{
GstTrashStackElement *head;
@ -163,7 +160,7 @@ gst_trash_stack_pop (GstTrashStack *stack)
/*
* common functions
*/
G_INLINE_FUNC GstTrashStack*
GST_INLINE_FUNC GstTrashStack*
gst_trash_stack_new (void)
{
GstTrashStack *stack;
@ -174,7 +171,7 @@ gst_trash_stack_new (void)
return stack;
}
G_INLINE_FUNC void
GST_INLINE_FUNC void
gst_trash_stack_free (GstTrashStack *stack)
{
gst_trash_stack_destroy (stack);