+#endif
+
+#undef vlc_gc_init
+#undef vlc_hold
+#undef vlc_release
+
+/**
+ * Atomically set the reference count to 1.
+ * @param p_gc reference counted object
+ * @param pf_destruct destruction calback
+ * @return p_gc.
+ */
+void *vlc_gc_init (gc_object_t *p_gc, void (*pf_destruct) (gc_object_t *))
+{
+ p_gc->pf_destructor = pf_destruct;
+
+ p_gc->refs = 1;
+#ifdef USE_SYNC
+ __sync_synchronize ();
+#elif defined(__APPLE__)
+ OSMemoryBarrier ();
+#else
+ /* Nobody else can possibly lock the spin - it's there as a barrier */
+ vlc_spin_init (&p_gc->spin);
+ vlc_spin_lock (&p_gc->spin);
+ vlc_spin_unlock (&p_gc->spin);
+#endif
+ return p_gc;
+}
+
+/**
+ * Atomically increment the reference count.
+ * @param p_gc reference counted object
+ * @return p_gc.
+ */
+void *vlc_hold (gc_object_t * p_gc)
+{
+ uintptr_t refs;
+ assert( p_gc );
+
+#ifdef USE_SYNC
+ refs = __sync_fetch_and_add (&p_gc->refs, 1);
+#elif defined(__APPLE__)
+ refs = OSAtomicIncrement32Barrier((int*)&p_gc->refs) - 1;
+#else
+ vlc_spin_lock (&p_gc->spin);
+ refs = p_gc->refs++;
+ vlc_spin_unlock (&p_gc->spin);
+#endif
+ assert (refs > 0);
+ return p_gc;
+}
+
+/**
+ * Atomically decrement the reference count and, if it reaches zero, destroy.
+ * @param p_gc reference counted object.
+ */
+void vlc_release (gc_object_t *p_gc)
+{
+ unsigned refs;
+
+ assert( p_gc );
+
+#ifdef USE_SYNC
+ refs = __sync_fetch_and_sub (&p_gc->refs, 1);
+#elif defined(__APPLE__)
+ refs = OSAtomicDecrement32Barrier((int*)&p_gc->refs) + 1;
+#else
+ vlc_spin_lock (&p_gc->spin);
+ refs = p_gc->refs--;
+ vlc_spin_unlock (&p_gc->spin);
+#endif
+
+ assert (refs > 0);
+ if (refs == 1)
+ {
+#ifdef USE_SYNC
+#elif defined(__APPLE__)
+#else
+ vlc_spin_destroy (&p_gc->spin);
+#endif
+ p_gc->pf_destructor (p_gc);
+ }
+}