Location via proxy:   [ UP ]  
[Report a bug]   [Manage cookies]                
Skip to content

Commit f13a912

Browse files
committed
Fix issues in e8fdbd5.
When the 64bit atomics simulation is in use, we can't necessarily guarantee the correct alignment of the atomics due to lack of compiler support for doing so- that's fine from a safety perspective, because everything is protected by a lock, but we asserted the alignment in all cases. Weaken them. Per complaint from Alvaro Herrera. My #ifdefery for PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY wasn't sufficient. Fix that. Per complaint from Alexander Korotkov.
1 parent 453aaf7 commit f13a912

File tree

2 files changed

+62
-16
lines changed

2 files changed

+62
-16
lines changed

src/include/port/atomics.h

+27-2
Original file line numberDiff line numberDiff line change
@@ -425,82 +425,107 @@ pg_atomic_sub_fetch_u32(volatile pg_atomic_uint32 *ptr, int32 sub_)
425425
static inline void
426426
pg_atomic_init_u64(volatile pg_atomic_uint64 *ptr, uint64 val)
427427
{
428+
/*
429+
* Can't necessarily enforce alignment - and don't need it - when using
430+
* the spinlock based fallback implementation. Therefore only assert when
431+
* not using it.
432+
*/
433+
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
428434
AssertPointerAlignment(ptr, 8);
429-
435+
#endif
430436
pg_atomic_init_u64_impl(ptr, val);
431437
}
432438

433439
static inline uint64
434440
pg_atomic_read_u64(volatile pg_atomic_uint64 *ptr)
435441
{
442+
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
436443
AssertPointerAlignment(ptr, 8);
444+
#endif
437445
return pg_atomic_read_u64_impl(ptr);
438446
}
439447

440448
static inline void
441449
pg_atomic_write_u64(volatile pg_atomic_uint64 *ptr, uint64 val)
442450
{
451+
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
443452
AssertPointerAlignment(ptr, 8);
453+
#endif
444454
pg_atomic_write_u64_impl(ptr, val);
445455
}
446456

447457
static inline uint64
448458
pg_atomic_exchange_u64(volatile pg_atomic_uint64 *ptr, uint64 newval)
449459
{
460+
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
450461
AssertPointerAlignment(ptr, 8);
451-
462+
#endif
452463
return pg_atomic_exchange_u64_impl(ptr, newval);
453464
}
454465

455466
static inline bool
456467
pg_atomic_compare_exchange_u64(volatile pg_atomic_uint64 *ptr,
457468
uint64 *expected, uint64 newval)
458469
{
470+
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
459471
AssertPointerAlignment(ptr, 8);
460472
AssertPointerAlignment(expected, 8);
473+
#endif
461474
return pg_atomic_compare_exchange_u64_impl(ptr, expected, newval);
462475
}
463476

464477
static inline uint64
465478
pg_atomic_fetch_add_u64(volatile pg_atomic_uint64 *ptr, int64 add_)
466479
{
480+
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
467481
AssertPointerAlignment(ptr, 8);
482+
#endif
468483
return pg_atomic_fetch_add_u64_impl(ptr, add_);
469484
}
470485

471486
static inline uint64
472487
pg_atomic_fetch_sub_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
473488
{
489+
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
474490
AssertPointerAlignment(ptr, 8);
491+
#endif
475492
Assert(sub_ != PG_INT64_MIN);
476493
return pg_atomic_fetch_sub_u64_impl(ptr, sub_);
477494
}
478495

479496
static inline uint64
480497
pg_atomic_fetch_and_u64(volatile pg_atomic_uint64 *ptr, uint64 and_)
481498
{
499+
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
482500
AssertPointerAlignment(ptr, 8);
501+
#endif
483502
return pg_atomic_fetch_and_u64_impl(ptr, and_);
484503
}
485504

486505
static inline uint64
487506
pg_atomic_fetch_or_u64(volatile pg_atomic_uint64 *ptr, uint64 or_)
488507
{
508+
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
489509
AssertPointerAlignment(ptr, 8);
510+
#endif
490511
return pg_atomic_fetch_or_u64_impl(ptr, or_);
491512
}
492513

493514
static inline uint64
494515
pg_atomic_add_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 add_)
495516
{
517+
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
496518
AssertPointerAlignment(ptr, 8);
519+
#endif
497520
return pg_atomic_add_fetch_u64_impl(ptr, add_);
498521
}
499522

500523
static inline uint64
501524
pg_atomic_sub_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
502525
{
526+
#ifndef PG_HAVE_ATOMIC_U64_SIMULATION
503527
AssertPointerAlignment(ptr, 8);
528+
#endif
504529
Assert(sub_ != PG_INT64_MIN);
505530
return pg_atomic_sub_fetch_u64_impl(ptr, sub_);
506531
}

src/include/port/atomics/generic.h

+35-14
Original file line numberDiff line numberDiff line change
@@ -271,26 +271,26 @@ pg_atomic_exchange_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 xchg_)
271271
}
272272
#endif
273273

274-
#ifndef PG_HAVE_ATOMIC_READ_U64
275-
#define PG_HAVE_ATOMIC_READ_U64
276-
static inline uint64
277-
pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
278-
{
279-
return *(&ptr->value);
280-
}
281-
#endif
282-
283274
#ifndef PG_HAVE_ATOMIC_WRITE_U64
284275
#define PG_HAVE_ATOMIC_WRITE_U64
276+
277+
#if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
278+
!defined(PG_HAVE_ATOMIC_U64_SIMULATION)
279+
285280
static inline void
286281
pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
287282
{
283+
/*
284+
* On this platform aligned 64bit writes are guaranteed to be atomic,
285+
* except if using the fallback implementation, where can't guarantee the
286+
* required alignment.
287+
*/
288+
AssertPointerAlignment(ptr, 8);
288289
ptr->value = val;
289290
}
290-
#endif
291291

292-
#ifndef PG_HAVE_ATOMIC_WRITE_U64
293-
#define PG_HAVE_ATOMIC_WRITE_U64
292+
#else
293+
294294
static inline void
295295
pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
296296
{
@@ -300,10 +300,30 @@ pg_atomic_write_u64_impl(volatile pg_atomic_uint64 *ptr, uint64 val)
300300
*/
301301
pg_atomic_exchange_u64_impl(ptr, val);
302302
}
303-
#endif
303+
304+
#endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
305+
#endif /* PG_HAVE_ATOMIC_WRITE_U64 */
304306

305307
#ifndef PG_HAVE_ATOMIC_READ_U64
306308
#define PG_HAVE_ATOMIC_READ_U64
309+
310+
#if defined(PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY) && \
311+
!defined(PG_HAVE_ATOMIC_U64_SIMULATION)
312+
313+
static inline uint64
314+
pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
315+
{
316+
/*
317+
* On this platform aligned 64bit reads are guaranteed to be atomic,
318+
* except if using the fallback implementation, where can't guarantee the
319+
* required alignment.
320+
*/
321+
AssertPointerAlignment(ptr, 8);
322+
return *(&ptr->value);
323+
}
324+
325+
#else
326+
307327
static inline uint64
308328
pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
309329
{
@@ -319,7 +339,8 @@ pg_atomic_read_u64_impl(volatile pg_atomic_uint64 *ptr)
319339

320340
return old;
321341
}
322-
#endif
342+
#endif /* PG_HAVE_8BYTE_SINGLE_COPY_ATOMICITY && !PG_HAVE_ATOMIC_U64_SIMULATION */
343+
#endif /* PG_HAVE_ATOMIC_READ_U64 */
323344

324345
#ifndef PG_HAVE_ATOMIC_INIT_U64
325346
#define PG_HAVE_ATOMIC_INIT_U64

0 commit comments

Comments
 (0)