ObjFW  Check-in [c859e2230c]

Overview
Comment:Remove OF_*_ASM defines

Duplicating defines for every CPU architecture is not very useful.

Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | trunk
Files: files | file ages | folders
SHA3-256: c859e2230cd143ffd27b8fbac9d5e16461ef8c812c4c2ab85dc43cbe1c376c0a
User & Date: js on 2020-12-12 22:31:19
Other Links: manifest | tags
Context
2020-12-12
22:36
Add platform define for Elbrus 2000 check-in: 3a9ef7d5cc user: js tags: trunk
22:31
Remove OF_*_ASM defines check-in: c859e2230c user: js tags: trunk
22:17
Fix disabling encodings check-in: ec50bf2a93 user: js tags: trunk
Changes

Modified src/OFSystemInfo.m from [97e07274dd] to [6b81ef8011].

240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260

#if defined(OF_X86_64) || defined(OF_X86)
static OF_INLINE struct x86_regs OF_CONST_FUNC
x86_cpuid(uint32_t eax, uint32_t ecx)
{
	struct x86_regs regs;

# if defined(OF_X86_64_ASM)
	__asm__ (
	    "cpuid"
	    : "=a"(regs.eax), "=b"(regs.ebx), "=c"(regs.ecx), "=d"(regs.edx)
	    : "a"(eax), "c"(ecx)
	);
# elif defined(OF_X86_ASM)
	/*
	 * This workaround is required by older GCC versions when using -fPIC,
	 * as ebx is a special register in PIC code. Yes, GCC is indeed not
	 * able to just push a register onto the stack before the __asm__ block
	 * and to pop it afterwards.
	 */
	__asm__ (







|





|







240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260

#if defined(OF_X86_64) || defined(OF_X86)
static OF_INLINE struct x86_regs OF_CONST_FUNC
x86_cpuid(uint32_t eax, uint32_t ecx)
{
	struct x86_regs regs;

# if defined(OF_X86_64) && defined(__GNUC__)
	__asm__ (
	    "cpuid"
	    : "=a"(regs.eax), "=b"(regs.ebx), "=c"(regs.ecx), "=d"(regs.edx)
	    : "a"(eax), "c"(ecx)
	);
# elif defined(OF_X86) && defined(__GNUC__)
	/*
	 * This workaround is required by older GCC versions when using -fPIC,
	 * as ebx is a special register in PIC code. Yes, GCC is indeed not
	 * able to just push a register onto the stack before the __asm__ block
	 * and to pop it afterwards.
	 */
	__asm__ (
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
	return [var stringByAppendingPathComponent: @".config"];
# endif
}
#endif

+ (OFString *)CPUVendor
{
#if defined(OF_X86_64_ASM) || defined(OF_X86_ASM)
	struct x86_regs regs = x86_cpuid(0, 0);
	uint32_t buffer[3];

	if (regs.eax == 0)
		return nil;

	buffer[0] = regs.ebx;
	buffer[1] = regs.edx;
	buffer[2] = regs.ecx;

	return [OFString stringWithCString: (char *)buffer
				  encoding: OF_STRING_ENCODING_ASCII
				    length: 12];
#else
	return nil;
#endif
}

+ (OFString *)CPUModel
{
#if defined(OF_X86_64_ASM) || defined(OF_X86_ASM)
	uint32_t buffer[12];
	size_t i;

	i = 0;
	for (uint32_t eax = 0x80000002; eax <= 0x80000004; eax++) {
		struct x86_regs regs = x86_cpuid(eax, 0);








|




















|







528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
	return [var stringByAppendingPathComponent: @".config"];
# endif
}
#endif

+ (OFString *)CPUVendor
{
#if (defined(OF_X86_64) || defined(OF_X86)) && defined(__GNUC__)
	struct x86_regs regs = x86_cpuid(0, 0);
	uint32_t buffer[3];

	if (regs.eax == 0)
		return nil;

	buffer[0] = regs.ebx;
	buffer[1] = regs.edx;
	buffer[2] = regs.ecx;

	return [OFString stringWithCString: (char *)buffer
				  encoding: OF_STRING_ENCODING_ASCII
				    length: 12];
#else
	return nil;
#endif
}

+ (OFString *)CPUModel
{
#if (defined(OF_X86_64) || defined(OF_X86)) && defined(__GNUC__)
	uint32_t buffer[12];
	size_t i;

	i = 0;
	for (uint32_t eax = 0x80000002; eax <= 0x80000004; eax++) {
		struct x86_regs regs = x86_cpuid(eax, 0);

Modified src/atomic.h from [7db74776cf] to [6778d083ff].

21
22
23
24
25
26
27
28
29

30
31
32
33
34
35
36
37
38
39
40

#ifndef OF_HAVE_ATOMIC_OPS
# error No atomic operations available!
#endif

#if !defined(OF_HAVE_THREADS)
# import "atomic_no_threads.h"
#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM)
# import "atomic_x86.h"

#elif defined(OF_POWERPC_ASM) && !defined(__APPLE_CC__) && !defined(OF_AIX)
# import "atomic_powerpc.h"
#elif defined(OF_HAVE_ATOMIC_BUILTINS)
# import "atomic_builtins.h"
#elif defined(OF_HAVE_SYNC_BUILTINS)
# import "atomic_sync_builtins.h"
#elif defined(OF_HAVE_OSATOMIC)
# import "atomic_osatomic.h"
#else
# error No atomic operations available!
#endif







|

>
|










21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41

#ifndef OF_HAVE_ATOMIC_OPS
# error No atomic operations available!
#endif

#if !defined(OF_HAVE_THREADS)
# import "atomic_no_threads.h"
#elif (defined(OF_X86_64) || defined(OF_X86)) && defined(__GNUC__)
# import "atomic_x86.h"
#elif defined(OF_POWERPC) && defined(__GNUC__) && !defined(__APPLE_CC__) && \
    !defined(OF_AIX)
# import "atomic_powerpc.h"
#elif defined(OF_HAVE_ATOMIC_BUILTINS)
# import "atomic_builtins.h"
#elif defined(OF_HAVE_SYNC_BUILTINS)
# import "atomic_sync_builtins.h"
#elif defined(OF_HAVE_OSATOMIC)
# import "atomic_osatomic.h"
#else
# error No atomic operations available!
#endif

Modified src/atomic_x86.h from [61a2fa833b] to [7fb17d08ce].

24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
		__asm__ __volatile__ (
		    "lock\n\t"
		    "xaddl	%0, %2\n\t"
		    "addl	%1, %0"
		    : "+&r"(i)
		    : "r"(i), "m"(*p)
		);
#ifdef OF_X86_64_ASM
	else if (sizeof(int) == 8)
		__asm__ __volatile__ (
		    "lock\n\t"
		    "xaddq	%0, %2\n\t"
		    "addq	%1, %0"
		    : "+&r"(i)
		    : "r"(i), "m"(*p)







|







24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
		__asm__ __volatile__ (
		    "lock\n\t"
		    "xaddl	%0, %2\n\t"
		    "addl	%1, %0"
		    : "+&r"(i)
		    : "r"(i), "m"(*p)
		);
#ifdef OF_X86_64
	else if (sizeof(int) == 8)
		__asm__ __volatile__ (
		    "lock\n\t"
		    "xaddq	%0, %2\n\t"
		    "addq	%1, %0"
		    : "+&r"(i)
		    : "r"(i), "m"(*p)
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81

	return i;
}

static OF_INLINE void *_Nullable
of_atomic_ptr_add(void *volatile _Nullable *_Nonnull p, intptr_t i)
{
#if defined(OF_X86_64_ASM)
	__asm__ __volatile__ (
	    "lock\n\t"
	    "xaddq	%0, %2\n\t"
	    "addq	%1, %0"
	    : "+&r"(i)
	    : "r"(i), "m"(*p)
	);

	return (void *)i;
#elif defined(OF_X86_ASM)
	__asm__ __volatile__ (
	    "lock\n\t"
	    "xaddl	%0, %2\n\t"
	    "addl	%1, %0"
	    : "+&r"(i)
	    : "r"(i), "m"(*p)
	);







|









|







57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81

	return i;
}

static OF_INLINE void *_Nullable
of_atomic_ptr_add(void *volatile _Nullable *_Nonnull p, intptr_t i)
{
#if defined(OF_X86_64)
	__asm__ __volatile__ (
	    "lock\n\t"
	    "xaddq	%0, %2\n\t"
	    "addq	%1, %0"
	    : "+&r"(i)
	    : "r"(i), "m"(*p)
	);

	return (void *)i;
#elif defined(OF_X86)
	__asm__ __volatile__ (
	    "lock\n\t"
	    "xaddl	%0, %2\n\t"
	    "addl	%1, %0"
	    : "+&r"(i)
	    : "r"(i), "m"(*p)
	);
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
		    "negl	%0\n\t"
		    "lock\n\t"
		    "xaddl	%0, %2\n\t"
		    "subl	%1, %0"
		    : "+&r"(i)
		    : "r"(i), "m"(*p)
		);
#ifdef OF_X86_64_ASM
	else if (sizeof(int) == 8)
		__asm__ __volatile__ (
		    "negq	%0\n\t"
		    "lock\n\t"
		    "xaddq	%0, %2\n\t"
		    "subq	%1, %0"
		    : "+&r"(i)







|







92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
		    "negl	%0\n\t"
		    "lock\n\t"
		    "xaddl	%0, %2\n\t"
		    "subl	%1, %0"
		    : "+&r"(i)
		    : "r"(i), "m"(*p)
		);
#ifdef OF_X86_64
	else if (sizeof(int) == 8)
		__asm__ __volatile__ (
		    "negq	%0\n\t"
		    "lock\n\t"
		    "xaddq	%0, %2\n\t"
		    "subq	%1, %0"
		    : "+&r"(i)
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152

	return i;
}

static OF_INLINE void *_Nullable
of_atomic_ptr_sub(void *volatile _Nullable *_Nonnull p, intptr_t i)
{
#if defined(OF_X86_64_ASM)
	__asm__ __volatile__ (
	    "negq	%0\n\t"
	    "lock\n\t"
	    "xaddq	%0, %2\n\t"
	    "subq	%1, %0"
	    : "+&r"(i)
	    : "r"(i), "m"(*p)
	);

	return (void *)i;
#elif defined(OF_X86_ASM)
	__asm__ __volatile__ (
	    "negl	%0\n\t"
	    "lock\n\t"
	    "xaddl	%0, %2\n\t"
	    "subl	%1, %0"
	    : "+&r"(i)
	    : "r"(i), "m"(*p)







|










|







127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152

	return i;
}

static OF_INLINE void *_Nullable
of_atomic_ptr_sub(void *volatile _Nullable *_Nonnull p, intptr_t i)
{
#if defined(OF_X86_64)
	__asm__ __volatile__ (
	    "negq	%0\n\t"
	    "lock\n\t"
	    "xaddq	%0, %2\n\t"
	    "subq	%1, %0"
	    : "+&r"(i)
	    : "r"(i), "m"(*p)
	);

	return (void *)i;
#elif defined(OF_X86)
	__asm__ __volatile__ (
	    "negl	%0\n\t"
	    "lock\n\t"
	    "xaddl	%0, %2\n\t"
	    "subl	%1, %0"
	    : "+&r"(i)
	    : "r"(i), "m"(*p)
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
		    "incl	%0\n\t"
		    "lock\n\t"
		    "xaddl	%0, %1\n\t"
		    "incl	%0"
		    : "=&r"(i)
		    : "m"(*p)
		);
#ifdef OF_X86_64_ASM
	else if (sizeof(int) == 8)
		__asm__ __volatile__ (
		    "xorq	%0, %0\n\t"
		    "incq	%0\n\t"
		    "lock\n\t"
		    "xaddq	%0, %1\n\t"
		    "incq	%0"







|







167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
		    "incl	%0\n\t"
		    "lock\n\t"
		    "xaddl	%0, %1\n\t"
		    "incl	%0"
		    : "=&r"(i)
		    : "m"(*p)
		);
#ifdef OF_X86_64
	else if (sizeof(int) == 8)
		__asm__ __volatile__ (
		    "xorq	%0, %0\n\t"
		    "incq	%0\n\t"
		    "lock\n\t"
		    "xaddq	%0, %1\n\t"
		    "incq	%0"
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
		    "decl	%0\n\t"
		    "lock\n\t"
		    "xaddl	%0, %1\n\t"
		    "decl	%0"
		    : "=&r"(i)
		    : "m"(*p)
		);
#ifdef OF_X86_64_ASM
	else if (sizeof(int) == 8)
		__asm__ __volatile__ (
		    "xorq	%0, %0\n\t"
		    "decq	%0\n\t"
		    "lock\n\t"
		    "xaddq	%0, %1\n\t"
		    "decq	%0"







|







218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
		    "decl	%0\n\t"
		    "lock\n\t"
		    "xaddl	%0, %1\n\t"
		    "decl	%0"
		    : "=&r"(i)
		    : "m"(*p)
		);
#ifdef OF_X86_64
	else if (sizeof(int) == 8)
		__asm__ __volatile__ (
		    "xorq	%0, %0\n\t"
		    "decq	%0\n\t"
		    "lock\n\t"
		    "xaddq	%0, %1\n\t"
		    "decq	%0"
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
		    "lock\n\t"
		    "cmpxchg	%0, %2\n\t"
		    "jne	0b"
		    : "=&r"(i)
		    : "r"(i), "m"(*p)
		    : "eax", "cc"
		);
#ifdef OF_X86_64_ASM
	else if (sizeof(int) == 8)
		__asm__ __volatile__ (
		    "0:\n\t"
		    "movq	%2, %0\n\t"
		    "movq	%0, %%rax\n\t"
		    "orq	%1, %0\n\t"
		    "lock\n\t"







|







270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
		    "lock\n\t"
		    "cmpxchg	%0, %2\n\t"
		    "jne	0b"
		    : "=&r"(i)
		    : "r"(i), "m"(*p)
		    : "eax", "cc"
		);
#ifdef OF_X86_64
	else if (sizeof(int) == 8)
		__asm__ __volatile__ (
		    "0:\n\t"
		    "movq	%2, %0\n\t"
		    "movq	%0, %%rax\n\t"
		    "orq	%1, %0\n\t"
		    "lock\n\t"
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
		    "lock\n\t"
		    "cmpxchg	%0, %2\n\t"
		    "jne	0b"
		    : "=&r"(i)
		    : "r"(i), "m"(*p)
		    : "eax", "cc"
		);
#ifdef OF_X86_64_ASM
	else if (sizeof(int) == 8)
		__asm__ __volatile__ (
		    "0:\n\t"
		    "movq	%2, %0\n\t"
		    "movq	%0, %%rax\n\t"
		    "andq	%1, %0\n\t"
		    "lock\n\t"







|







326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
		    "lock\n\t"
		    "cmpxchg	%0, %2\n\t"
		    "jne	0b"
		    : "=&r"(i)
		    : "r"(i), "m"(*p)
		    : "eax", "cc"
		);
#ifdef OF_X86_64
	else if (sizeof(int) == 8)
		__asm__ __volatile__ (
		    "0:\n\t"
		    "movq	%2, %0\n\t"
		    "movq	%0, %%rax\n\t"
		    "andq	%1, %0\n\t"
		    "lock\n\t"
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
		    "lock\n\t"
		    "cmpxchg	%0, %2\n\t"
		    "jne	0b"
		    : "=&r"(i)
		    : "r"(i), "m"(*p)
		    : "eax", "cc"
		);
#ifdef OF_X86_64_ASM
	else if (sizeof(int) == 8)
		__asm__ __volatile__ (
		    "0:\n\t"
		    "movq	%2, %0\n\t"
		    "movq	%0, %%rax\n\t"
		    "xorq	%1, %0\n\t"
		    "lock\n\t"







|







382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
		    "lock\n\t"
		    "cmpxchg	%0, %2\n\t"
		    "jne	0b"
		    : "=&r"(i)
		    : "r"(i), "m"(*p)
		    : "eax", "cc"
		);
#ifdef OF_X86_64
	else if (sizeof(int) == 8)
		__asm__ __volatile__ (
		    "0:\n\t"
		    "movq	%2, %0\n\t"
		    "movq	%0, %%rax\n\t"
		    "xorq	%1, %0\n\t"
		    "lock\n\t"

Modified src/macros.h from [7383bf6570] to [365b468ef6].

317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
#endif
#if __has_attribute(__objc_direct_members__) && defined(OF_APPLE_RUNTIME)
# define OF_DIRECT_MEMBERS __attribute__((__objc_direct_members__))
#else
# define OF_DIRECT_MEMBERS
#endif

#ifdef __GNUC__
# ifdef OF_X86_64
#  define OF_X86_64_ASM
# endif
# ifdef OF_X86
#  define OF_X86_ASM
# endif
# ifdef OF_POWERPC
#  define OF_POWERPC_ASM
# endif
# ifdef OF_ARM64
#  define OF_ARM64_ASM
# endif
# ifdef OF_ARM
#  define OF_ARM_ASM
# endif
# ifdef OF_ARMV7
#  define OF_ARMV7_ASM
# endif
# ifdef OF_ARMV6
#  define OF_ARMV6_ASM
# endif
# ifdef OF_MIPS64
#  define OF_MIPS64_ASM
# endif
# ifdef OF_MIPS
#  define OF_MIPS_ASM
# endif
# ifdef OF_PA_RISC
#  define OF_PA_RISC_ASM
# endif
# ifdef OF_ITANIUM
#  define OF_ITANIUM_ASM
# endif
#endif

#ifdef OF_APPLE_RUNTIME
# if defined(OF_X86_64) || defined(OF_X86) || defined(OF_ARM64) || \
    defined(OF_ARM) || defined(OF_POWERPC)
#  define OF_HAVE_FORWARDING_TARGET_FOR_SELECTOR
#  define OF_HAVE_FORWARDING_TARGET_FOR_SELECTOR_STRET
# endif
#else







<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<







317
318
319
320
321
322
323




































324
325
326
327
328
329
330
#endif
#if __has_attribute(__objc_direct_members__) && defined(OF_APPLE_RUNTIME)
# define OF_DIRECT_MEMBERS __attribute__((__objc_direct_members__))
#else
# define OF_DIRECT_MEMBERS
#endif





































#ifdef OF_APPLE_RUNTIME
# if defined(OF_X86_64) || defined(OF_X86) || defined(OF_ARM64) || \
    defined(OF_ARM) || defined(OF_POWERPC)
#  define OF_HAVE_FORWARDING_TARGET_FOR_SELECTOR
#  define OF_HAVE_FORWARDING_TARGET_FOR_SELECTOR_STRET
# endif
#else
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
}

static OF_INLINE uint16_t OF_CONST_FUNC
OF_BSWAP16_NONCONST(uint16_t i)
{
#if defined(OF_HAVE_BUILTIN_BSWAP16)
	return __builtin_bswap16(i);
#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM)
	__asm__ (
	    "xchgb	%h0, %b0"
	    : "=Q"(i)
	    : "0"(i)
	);
#elif defined(OF_POWERPC_ASM)
	__asm__ (
	    "lhbrx	%0, 0, %1"
	    : "=r"(i)
	    : "r"(&i), "m"(i)
	);
#elif defined(OF_ARMV6_ASM)
	__asm__ (
	    "rev16	%0, %0"
	    : "=r"(i)
	    : "0"(i)
	);
#else
	i = (i & UINT16_C(0xFF00)) >> 8 |
	    (i & UINT16_C(0x00FF)) << 8;
#endif
	return i;
}

static OF_INLINE uint32_t OF_CONST_FUNC
OF_BSWAP32_NONCONST(uint32_t i)
{
#if defined(OF_HAVE_BUILTIN_BSWAP32)
	return __builtin_bswap32(i);
#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM)
	__asm__ (
	    "bswap	%0"
	    : "=q"(i)
	    : "0"(i)
	);
#elif defined(OF_POWERPC_ASM)
	__asm__ (
	    "lwbrx	%0, 0, %1"
	    : "=r"(i)
	    : "r"(&i), "m"(i)
	);
#elif defined(OF_ARMV6_ASM)
	__asm__ (
	    "rev	%0, %0"
	    : "=r"(i)
	    : "0"(i)
	);
#else
	i = (i & UINT32_C(0xFF000000)) >> 24 |
	    (i & UINT32_C(0x00FF0000)) >>  8 |
	    (i & UINT32_C(0x0000FF00)) <<  8 |
	    (i & UINT32_C(0x000000FF)) << 24;
#endif
	return i;
}

static OF_INLINE uint64_t OF_CONST_FUNC
OF_BSWAP64_NONCONST(uint64_t i)
{
#if defined(OF_HAVE_BUILTIN_BSWAP64)
	return __builtin_bswap64(i);
#elif defined(OF_X86_64_ASM)
	__asm__ (
	    "bswap	%0"
	    : "=r"(i)
	    : "0"(i)
	);
#elif defined(OF_X86_ASM)
	__asm__ (
	    "bswap	%%eax\n\t"
	    "bswap	%%edx\n\t"
	    "xchgl	%%eax, %%edx"
	    : "=A"(i)
	    : "0"(i)
	);







|





|





|

















|





|





|



















|





|







429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
}

static OF_INLINE uint16_t OF_CONST_FUNC
OF_BSWAP16_NONCONST(uint16_t i)
{
#if defined(OF_HAVE_BUILTIN_BSWAP16)
	return __builtin_bswap16(i);
#elif (defined(OF_X86_64) || defined(OF_X86)) && defined(__GNUC__)
	__asm__ (
	    "xchgb	%h0, %b0"
	    : "=Q"(i)
	    : "0"(i)
	);
#elif defined(OF_POWERPC) && defined(__GNUC__)
	__asm__ (
	    "lhbrx	%0, 0, %1"
	    : "=r"(i)
	    : "r"(&i), "m"(i)
	);
#elif defined(OF_ARMV6) && defined(__GNUC__)
	__asm__ (
	    "rev16	%0, %0"
	    : "=r"(i)
	    : "0"(i)
	);
#else
	i = (i & UINT16_C(0xFF00)) >> 8 |
	    (i & UINT16_C(0x00FF)) << 8;
#endif
	return i;
}

static OF_INLINE uint32_t OF_CONST_FUNC
OF_BSWAP32_NONCONST(uint32_t i)
{
#if defined(OF_HAVE_BUILTIN_BSWAP32)
	return __builtin_bswap32(i);
#elif (defined(OF_X86_64) || defined(OF_X86)) && defined(__GNUC__)
	__asm__ (
	    "bswap	%0"
	    : "=q"(i)
	    : "0"(i)
	);
#elif defined(OF_POWERPC) && defined(__GNUC__)
	__asm__ (
	    "lwbrx	%0, 0, %1"
	    : "=r"(i)
	    : "r"(&i), "m"(i)
	);
#elif defined(OF_ARMV6) && defined(__GNUC__)
	__asm__ (
	    "rev	%0, %0"
	    : "=r"(i)
	    : "0"(i)
	);
#else
	i = (i & UINT32_C(0xFF000000)) >> 24 |
	    (i & UINT32_C(0x00FF0000)) >>  8 |
	    (i & UINT32_C(0x0000FF00)) <<  8 |
	    (i & UINT32_C(0x000000FF)) << 24;
#endif
	return i;
}

static OF_INLINE uint64_t OF_CONST_FUNC
OF_BSWAP64_NONCONST(uint64_t i)
{
#if defined(OF_HAVE_BUILTIN_BSWAP64)
	return __builtin_bswap64(i);
#elif defined(OF_X86_64) && defined(__GNUC__)
	__asm__ (
	    "bswap	%0"
	    : "=r"(i)
	    : "0"(i)
	);
#elif defined(OF_X86) && defined(__GNUC__)
	__asm__ (
	    "bswap	%%eax\n\t"
	    "bswap	%%edx\n\t"
	    "xchgl	%%eax, %%edx"
	    : "=A"(i)
	    : "0"(i)
	);