View | Details | Raw Unified | Return to bug 42806
Collapse All | Expand All

(-)apr/atomic/unix/ppc.c (-17 / +46 lines)
Lines 18-54 Link Here
18
18
19
#ifdef USE_ATOMICS_PPC
19
#ifdef USE_ATOMICS_PPC
20
20
21
#ifdef HAVE_POWER4_LWSYNC
22
/* Lightweight Sync (POWER4 new instruction) */
23
#   define PPC_SYNC    "	lwsync\n"
24
#else
25
#   define PPC_SYNC    "	sync\n"
26
#endif
27
21
#ifdef PPC405_ERRATA
28
#ifdef PPC405_ERRATA
22
#   define PPC405_ERR77_SYNC   "	sync\n"
29
#   define PPC405_ERR77_SYNC   "	sync\n"
23
#else
30
#else
24
#   define PPC405_ERR77_SYNC
31
#   define PPC405_ERR77_SYNC
25
#endif
32
#endif
26
33
34
#define barrier() asm volatile ("sync" : : : "memory")
35
27
APR_DECLARE(apr_status_t) apr_atomic_init(apr_pool_t *p)
36
APR_DECLARE(apr_status_t) apr_atomic_init(apr_pool_t *p)
28
{
37
{
29
    return APR_SUCCESS;
38
    return APR_SUCCESS;
30
}
39
}
31
40
32
APR_DECLARE(apr_uint32_t) apr_atomic_read32(volatile apr_uint32_t *mem)
41
APR_DECLARE(apr_uint32_t) apr_atomic_read32(apr_uint32_t *mem)
33
{
42
{
43
    barrier();
44
34
    return *mem;
45
    return *mem;
35
}
46
}
36
47
37
APR_DECLARE(void) apr_atomic_set32(volatile apr_uint32_t *mem, apr_uint32_t val)
48
APR_DECLARE(void) apr_atomic_set32(apr_uint32_t *mem, apr_uint32_t val)
38
{
49
{
39
    *mem = val;
50
    *mem = val;
51
52
    barrier();
40
}
53
}
41
54
42
APR_DECLARE(apr_uint32_t) apr_atomic_add32(volatile apr_uint32_t *mem, apr_uint32_t val)
55
APR_DECLARE(apr_uint32_t) apr_atomic_add32(apr_uint32_t *mem, apr_uint32_t val)
43
{
56
{
44
    apr_uint32_t prev, temp;
57
    apr_uint32_t prev, temp;
45
58
46
    asm volatile ("loop_%=:\n"                  /* lost reservation     */
59
    asm volatile (PPC_SYNC
60
                  "loop_%=:\n"                  /* lost reservation     */
47
                  "	lwarx   %0,0,%3\n"      /* load and reserve     */
61
                  "	lwarx   %0,0,%3\n"      /* load and reserve     */
48
                  "	add     %1,%0,%4\n"     /* add val and prev     */
62
                  "	add     %1,%0,%4\n"     /* add val and prev     */
49
                  PPC405_ERR77_SYNC             /* ppc405 Erratum 77    */
63
                  PPC405_ERR77_SYNC             /* ppc405 Erratum 77    */
50
                  "	stwcx.  %1,0,%3\n"      /* store new value      */
64
                  "	stwcx.  %1,0,%3\n"      /* store new value      */
51
                  "	bne-    loop_%=\n"      /* loop if lost         */
65
                  "	bne-    loop_%=\n"      /* loop if lost         */
66
                  "	isync\n"                /* memory barrier       */
52
                  : "=&r" (prev), "=&r" (temp), "=m" (*mem)
67
                  : "=&r" (prev), "=&r" (temp), "=m" (*mem)
53
                  : "b" (mem), "r" (val)
68
                  : "b" (mem), "r" (val)
54
                  : "cc", "memory");
69
                  : "cc", "memory");
Lines 56-86 APR_DECLARE(apr_uint32_t) apr_atomic_add Link Here
56
    return prev;
71
    return prev;
57
}
72
}
58
73
59
APR_DECLARE(void) apr_atomic_sub32(volatile apr_uint32_t *mem, apr_uint32_t val)
74
APR_DECLARE(void) apr_atomic_sub32(apr_uint32_t *mem, apr_uint32_t val)
60
{
75
{
61
    apr_uint32_t temp;
76
    apr_uint32_t temp;
62
77
63
    asm volatile ("loop_%=:\n"                  /* lost reservation     */
78
    asm volatile (PPC_SYNC
79
                  "loop_%=:\n"                  /* lost reservation     */
64
                  "	lwarx   %0,0,%2\n"      /* load and reserve     */
80
                  "	lwarx   %0,0,%2\n"      /* load and reserve     */
65
                  "	subf    %0,%3,%0\n"     /* subtract val         */
81
                  "	subf    %0,%3,%0\n"     /* subtract val         */
66
                  PPC405_ERR77_SYNC             /* ppc405 Erratum 77    */
82
                  PPC405_ERR77_SYNC             /* ppc405 Erratum 77    */
67
                  "	stwcx.  %0,0,%2\n"      /* store new value      */
83
                  "	stwcx.  %0,0,%2\n"      /* store new value      */
68
                  "	bne-    loop_%=\n"      /* loop if lost         */
84
                  "	bne-    loop_%=\n"      /* loop if lost         */
85
                  "	isync\n"                /* memory barrier       */
69
                  : "=&r" (temp), "=m" (*mem)
86
                  : "=&r" (temp), "=m" (*mem)
70
                  : "b" (mem), "r" (val)
87
                  : "b" (mem), "r" (val)
71
                  : "cc", "memory");
88
                  : "cc", "memory");
72
}
89
}
73
90
74
APR_DECLARE(apr_uint32_t) apr_atomic_inc32(volatile apr_uint32_t *mem)
91
APR_DECLARE(apr_uint32_t) apr_atomic_inc32(apr_uint32_t *mem)
75
{
92
{
76
    apr_uint32_t prev;
93
    apr_uint32_t prev;
77
94
78
    asm volatile ("loop_%=:\n"                  /* lost reservation     */
95
    asm volatile (PPC_SYNC
96
                  "loop_%=:\n"                  /* lost reservation     */
79
                  "	lwarx   %0,0,%2\n"      /* load and reserve     */
97
                  "	lwarx   %0,0,%2\n"      /* load and reserve     */
80
                  "	addi    %0,%0,1\n"      /* add immediate        */
98
                  "	addi    %0,%0,1\n"      /* add immediate        */
81
                  PPC405_ERR77_SYNC             /* ppc405 Erratum 77    */
99
                  PPC405_ERR77_SYNC             /* ppc405 Erratum 77    */
82
                  "	stwcx.  %0,0,%2\n"      /* store new value      */
100
                  "	stwcx.  %0,0,%2\n"      /* store new value      */
83
                  "	bne-    loop_%=\n"      /* loop if lost         */
101
                  "	bne-    loop_%=\n"      /* loop if lost         */
102
                  "	isync\n"                /* memory barrier       */
84
                  "	subi    %0,%0,1\n"      /* return old value     */
103
                  "	subi    %0,%0,1\n"      /* return old value     */
85
                  : "=&b" (prev), "=m" (*mem)
104
                  : "=&b" (prev), "=m" (*mem)
86
                  : "b" (mem), "m" (*mem)
105
                  : "b" (mem), "m" (*mem)
Lines 89-104 APR_DECLARE(apr_uint32_t) apr_atomic_inc Link Here
89
    return prev;
108
    return prev;
90
}
109
}
91
110
92
APR_DECLARE(int) apr_atomic_dec32(volatile apr_uint32_t *mem)
111
APR_DECLARE(int) apr_atomic_dec32(apr_uint32_t *mem)
93
{
112
{
94
    apr_uint32_t prev;
113
    apr_uint32_t prev;
95
114
96
    asm volatile ("loop_%=:\n"                  /* lost reservation     */
115
    asm volatile (PPC_SYNC
116
                  "loop_%=:\n"                  /* lost reservation     */
97
                  "	lwarx   %0,0,%2\n"      /* load and reserve     */
117
                  "	lwarx   %0,0,%2\n"      /* load and reserve     */
98
                  "	subi    %0,%0,1\n"      /* subtract immediate   */
118
                  "	subi    %0,%0,1\n"      /* subtract immediate   */
99
                  PPC405_ERR77_SYNC             /* ppc405 Erratum 77    */
119
                  PPC405_ERR77_SYNC             /* ppc405 Erratum 77    */
100
                  "	stwcx.  %0,0,%2\n"      /* store new value      */
120
                  "	stwcx.  %0,0,%2\n"      /* store new value      */
101
                  "	bne-    loop_%=\n"      /* loop if lost         */
121
                  "	bne-    loop_%=\n"      /* loop if lost         */
122
                  "	isync\n"                /* memory barrier       */
102
                  : "=&b" (prev), "=m" (*mem)
123
                  : "=&b" (prev), "=m" (*mem)
103
                  : "b" (mem), "m" (*mem)
124
                  : "b" (mem), "m" (*mem)
104
                  : "cc", "memory");
125
                  : "cc", "memory");
Lines 106-117 APR_DECLARE(int) apr_atomic_dec32(volati Link Here
106
    return prev;
127
    return prev;
107
}
128
}
108
129
109
APR_DECLARE(apr_uint32_t) apr_atomic_cas32(volatile apr_uint32_t *mem, apr_uint32_t with,
130
APR_DECLARE(apr_uint32_t) apr_atomic_cas32(apr_uint32_t *mem, apr_uint32_t with,
110
                                           apr_uint32_t cmp)
131
                                           apr_uint32_t cmp)
111
{
132
{
112
    apr_uint32_t prev;
133
    apr_uint32_t prev;
113
134
114
    asm volatile ("loop_%=:\n"                  /* lost reservation     */
135
    asm volatile (PPC_SYNC
136
                  "loop_%=:\n"                  /* lost reservation     */
115
                  "	lwarx   %0,0,%1\n"      /* load and reserve     */
137
                  "	lwarx   %0,0,%1\n"      /* load and reserve     */
116
                  "	cmpw    %0,%3\n"        /* compare operands     */
138
                  "	cmpw    %0,%3\n"        /* compare operands     */
117
                  "	bne-    exit_%=\n"      /* skip if not equal    */
139
                  "	bne-    exit_%=\n"      /* skip if not equal    */
Lines 119-124 APR_DECLARE(apr_uint32_t) apr_atomic_cas Link Here
119
                  "	stwcx.  %2,0,%1\n"      /* store new value      */
141
                  "	stwcx.  %2,0,%1\n"      /* store new value      */
120
                  "	bne-    loop_%=\n"      /* loop if lost         */
142
                  "	bne-    loop_%=\n"      /* loop if lost         */
121
                  "exit_%=:\n"                  /* not equal            */
143
                  "exit_%=:\n"                  /* not equal            */
144
                  "	isync\n"                /* memory barrier       */
122
                  : "=&r" (prev)
145
                  : "=&r" (prev)
123
                  : "b" (mem), "r" (with), "r" (cmp)
146
                  : "b" (mem), "r" (with), "r" (cmp)
124
                  : "cc", "memory");
147
                  : "cc", "memory");
Lines 126-140 APR_DECLARE(apr_uint32_t) apr_atomic_cas Link Here
126
    return prev;
149
    return prev;
127
}
150
}
128
151
129
APR_DECLARE(apr_uint32_t) apr_atomic_xchg32(volatile apr_uint32_t *mem, apr_uint32_t val)
152
APR_DECLARE(apr_uint32_t) apr_atomic_xchg32(apr_uint32_t *mem, apr_uint32_t val)
130
{
153
{
131
    apr_uint32_t prev;
154
    apr_uint32_t prev;
132
155
133
    asm volatile ("loop_%=:\n"                  /* lost reservation     */
156
    asm volatile (PPC_SYNC
157
                  "loop_%=:\n"                  /* lost reservation     */
134
                  "	lwarx   %0,0,%1\n"      /* load and reserve     */
158
                  "	lwarx   %0,0,%1\n"      /* load and reserve     */
135
                  PPC405_ERR77_SYNC             /* ppc405 Erratum 77    */
159
                  PPC405_ERR77_SYNC             /* ppc405 Erratum 77    */
136
                  "	stwcx.  %2,0,%1\n"      /* store new value      */
160
                  "	stwcx.  %2,0,%1\n"      /* store new value      */
137
                  "	bne-    loop_%="        /* loop if lost         */
161
                  "	bne-    loop_%="        /* loop if lost         */
162
                  "	isync\n"                /* memory barrier       */
138
                  : "=&r" (prev)
163
                  : "=&r" (prev)
139
                  : "b" (mem), "r" (val)
164
                  : "b" (mem), "r" (val)
140
                  : "cc", "memory");
165
                  : "cc", "memory");
Lines 142-152 APR_DECLARE(apr_uint32_t) apr_atomic_xch Link Here
142
    return prev;
167
    return prev;
143
}
168
}
144
169
145
APR_DECLARE(void*) apr_atomic_casptr(volatile void **mem, void *with, const void *cmp)
170
APR_DECLARE(void*) apr_atomic_casptr(void **mem, void *with, void *cmp)
146
{
171
{
147
    void *prev;
172
    void *prev;
148
#if APR_SIZEOF_VOIDP == 4
173
#if APR_SIZEOF_VOIDP == 4
149
    asm volatile ("loop_%=:\n"                  /* lost reservation     */
174
    asm volatile (PPC_SYNC
175
                  "loop_%=:\n"                  /* lost reservation     */
150
                  "	lwarx   %0,0,%1\n"      /* load and reserve     */
176
                  "	lwarx   %0,0,%1\n"      /* load and reserve     */
151
                  "	cmpw    %0,%3\n"        /* compare operands     */
177
                  "	cmpw    %0,%3\n"        /* compare operands     */
152
                  "	bne-    exit_%=\n"      /* skip if not equal    */
178
                  "	bne-    exit_%=\n"      /* skip if not equal    */
Lines 154-164 APR_DECLARE(void*) apr_atomic_casptr(vol Link Here
154
                  "	stwcx.  %2,0,%1\n"      /* store new value      */
180
                  "	stwcx.  %2,0,%1\n"      /* store new value      */
155
                  "	bne-    loop_%=\n"      /* loop if lost         */
181
                  "	bne-    loop_%=\n"      /* loop if lost         */
156
                  "exit_%=:\n"                  /* not equal            */
182
                  "exit_%=:\n"                  /* not equal            */
183
                  "	isync\n"                /* memory barrier       */
157
                  : "=&r" (prev)
184
                  : "=&r" (prev)
158
                  : "b" (mem), "r" (with), "r" (cmp)
185
                  : "b" (mem), "r" (with), "r" (cmp)
159
                  : "cc", "memory");
186
                  : "cc", "memory");
160
#elif APR_SIZEOF_VOIDP == 8
187
#elif APR_SIZEOF_VOIDP == 8
161
    asm volatile ("loop_%=:\n"                  /* lost reservation     */
188
    asm volatile (PPC_SYNC
189
                  "loop_%=:\n"                  /* lost reservation     */
162
                  "	ldarx   %0,0,%1\n"      /* load and reserve     */
190
                  "	ldarx   %0,0,%1\n"      /* load and reserve     */
163
                  "	cmpd    %0,%3\n"        /* compare operands     */
191
                  "	cmpd    %0,%3\n"        /* compare operands     */
164
                  "	bne-    exit_%=\n"      /* skip if not equal    */
192
                  "	bne-    exit_%=\n"      /* skip if not equal    */
Lines 166-171 APR_DECLARE(void*) apr_atomic_casptr(vol Link Here
166
                  "	stdcx.  %2,0,%1\n"      /* store new value      */
194
                  "	stdcx.  %2,0,%1\n"      /* store new value      */
167
                  "	bne-    loop_%=\n"      /* loop if lost         */
195
                  "	bne-    loop_%=\n"      /* loop if lost         */
168
                  "exit_%=:\n"                  /* not equal            */
196
                  "exit_%=:\n"                  /* not equal            */
197
                  "	isync\n"                /* memory barrier       */
169
                  : "=&r" (prev)
198
                  : "=&r" (prev)
170
                  : "b" (mem), "r" (with), "r" (cmp)
199
                  : "b" (mem), "r" (with), "r" (cmp)
171
                  : "cc", "memory");
200
                  : "cc", "memory");
(-)apr/configure.in (+13 lines)
Lines 401-406 case $host in Link Here
401
        ;;
401
        ;;
402
esac
402
esac
403
403
404
AC_CACHE_CHECK([for POWER4 lwsync mnemonic], [atomic_builtins],
405
[AC_TRY_RUN([
406
int main()
407
{
408
    asm volatile ("lwsync" : : : "memory");
409
410
    return 0;
411
}], [have_lwsync=yes], [have_lwsync=no], [have_lwsync=no])])
412
413
if test "$have_lwsync" = "yes"; then
414
    AC_DEFINE(HAVE_POWER4_LWSYNC, 1, [Define if target CPU has lwsync])
415
fi
416
404
dnl Check the depend program we can use
417
dnl Check the depend program we can use
405
APR_CHECK_DEPEND
418
APR_CHECK_DEPEND
406
419

Return to bug 42806