diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2020-06-11 18:27:19 -0700 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2020-06-11 18:27:19 -0700 |
commit | 9716e57a0195dae356ae1425df121988abd27131 (patch) | |
tree | 27aab1864da1b0c51dc4c0a874e8c5c02016f0df /scripts | |
parent | b1a62749946ee1956a480ba31e7d4929aa561d30 (diff) | |
parent | 37f8173dd84936ea78000ed1cad24f8b18d48ebb (diff) |
Merge tag 'locking-urgent-2020-06-11' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull atomics rework from Thomas Gleixner:
"Peter Zijlstras rework of atomics and fallbacks. This solves two
problems:
1) Compilers uninline small atomic_* static inline functions which
can expose them to instrumentation.
2) The instrumentation of atomic primitives was done at the
architecture level while composites or fallbacks were provided at
the generic level. As a result there are no uninstrumented
variants of the fallbacks.
Both issues were in the way of fully isolating fragile entry code
pathes and especially the text poke int3 handler which is prone to an
endless recursion problem when anything in that code path is about to
be instrumented. This was always a problem, but got elevated due to
the new batch mode updates of tracing.
The solution is to mark the functions __always_inline and to flip the
fallback and instrumentation so the non-instrumented variants are at
the architecture level and the instrumentation is done in generic
code.
The latter introduces another fallback variant which will go away once
all architectures have been moved over to arch_atomic_*"
* tag 'locking-urgent-2020-06-11' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
locking/atomics: Flip fallbacks and instrumentation
asm-generic/atomic: Use __always_inline for fallback wrappers
Diffstat (limited to 'scripts')
-rwxr-xr-x | scripts/atomic/fallbacks/acquire | 6 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/add_negative | 8 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/add_unless | 8 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/andnot | 6 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/dec | 6 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/dec_and_test | 8 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/dec_if_positive | 8 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/dec_unless_positive | 8 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/fence | 6 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/fetch_add_unless | 10 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/inc | 6 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/inc_and_test | 8 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/inc_not_zero | 8 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/inc_unless_negative | 8 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/read_acquire | 4 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/release | 6 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/set_release | 4 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/sub_and_test | 8 | ||||
-rwxr-xr-x | scripts/atomic/fallbacks/try_cmpxchg | 6 | ||||
-rwxr-xr-x | scripts/atomic/gen-atomic-fallback.sh | 31 | ||||
-rw-r--r-- | scripts/atomic/gen-atomics.sh | 5 |
21 files changed, 84 insertions, 84 deletions
diff --git a/scripts/atomic/fallbacks/acquire b/scripts/atomic/fallbacks/acquire index e38871e64db6..59c00529dc7c 100755 --- a/scripts/atomic/fallbacks/acquire +++ b/scripts/atomic/fallbacks/acquire @@ -1,8 +1,8 @@ cat <<EOF -static inline ${ret} -${atomic}_${pfx}${name}${sfx}_acquire(${params}) +static __always_inline ${ret} +${arch}${atomic}_${pfx}${name}${sfx}_acquire(${params}) { - ${ret} ret = ${atomic}_${pfx}${name}${sfx}_relaxed(${args}); + ${ret} ret = ${arch}${atomic}_${pfx}${name}${sfx}_relaxed(${args}); __atomic_acquire_fence(); return ret; } diff --git a/scripts/atomic/fallbacks/add_negative b/scripts/atomic/fallbacks/add_negative index e6f4815637de..a66635bceefb 100755 --- a/scripts/atomic/fallbacks/add_negative +++ b/scripts/atomic/fallbacks/add_negative @@ -1,6 +1,6 @@ cat <<EOF /** - * ${atomic}_add_negative - add and test if negative + * ${arch}${atomic}_add_negative - add and test if negative * @i: integer value to add * @v: pointer of type ${atomic}_t * @@ -8,9 +8,9 @@ cat <<EOF * if the result is negative, or false when * result is greater than or equal to zero. */ -static inline bool -${atomic}_add_negative(${int} i, ${atomic}_t *v) +static __always_inline bool +${arch}${atomic}_add_negative(${int} i, ${atomic}_t *v) { - return ${atomic}_add_return(i, v) < 0; + return ${arch}${atomic}_add_return(i, v) < 0; } EOF diff --git a/scripts/atomic/fallbacks/add_unless b/scripts/atomic/fallbacks/add_unless index 792533885fbf..2ff598a3f9ec 100755 --- a/scripts/atomic/fallbacks/add_unless +++ b/scripts/atomic/fallbacks/add_unless @@ -1,6 +1,6 @@ cat << EOF /** - * ${atomic}_add_unless - add unless the number is already a given value + * ${arch}${atomic}_add_unless - add unless the number is already a given value * @v: pointer of type ${atomic}_t * @a: the amount to add to v... * @u: ...unless v is equal to u. @@ -8,9 +8,9 @@ cat << EOF * Atomically adds @a to @v, if @v was not already @u. * Returns true if the addition was done. */ -static inline bool -${atomic}_add_unless(${atomic}_t *v, ${int} a, ${int} u) +static __always_inline bool +${arch}${atomic}_add_unless(${atomic}_t *v, ${int} a, ${int} u) { - return ${atomic}_fetch_add_unless(v, a, u) != u; + return ${arch}${atomic}_fetch_add_unless(v, a, u) != u; } EOF diff --git a/scripts/atomic/fallbacks/andnot b/scripts/atomic/fallbacks/andnot index 9f3a3216b5e3..3f18663dcefb 100755 --- a/scripts/atomic/fallbacks/andnot +++ b/scripts/atomic/fallbacks/andnot @@ -1,7 +1,7 @@ cat <<EOF -static inline ${ret} -${atomic}_${pfx}andnot${sfx}${order}(${int} i, ${atomic}_t *v) +static __always_inline ${ret} +${arch}${atomic}_${pfx}andnot${sfx}${order}(${int} i, ${atomic}_t *v) { - ${retstmt}${atomic}_${pfx}and${sfx}${order}(~i, v); + ${retstmt}${arch}${atomic}_${pfx}and${sfx}${order}(~i, v); } EOF diff --git a/scripts/atomic/fallbacks/dec b/scripts/atomic/fallbacks/dec index 10bbc82be31d..e2e01f0574bb 100755 --- a/scripts/atomic/fallbacks/dec +++ b/scripts/atomic/fallbacks/dec @@ -1,7 +1,7 @@ cat <<EOF -static inline ${ret} -${atomic}_${pfx}dec${sfx}${order}(${atomic}_t *v) +static __always_inline ${ret} +${arch}${atomic}_${pfx}dec${sfx}${order}(${atomic}_t *v) { - ${retstmt}${atomic}_${pfx}sub${sfx}${order}(1, v); + ${retstmt}${arch}${atomic}_${pfx}sub${sfx}${order}(1, v); } EOF diff --git a/scripts/atomic/fallbacks/dec_and_test b/scripts/atomic/fallbacks/dec_and_test index 0ce7103b3df2..e8a5e492eb5f 100755 --- a/scripts/atomic/fallbacks/dec_and_test +++ b/scripts/atomic/fallbacks/dec_and_test @@ -1,15 +1,15 @@ cat <<EOF /** - * ${atomic}_dec_and_test - decrement and test + * ${arch}${atomic}_dec_and_test - decrement and test * @v: pointer of type ${atomic}_t * * Atomically decrements @v by 1 and * returns true if the result is 0, or false for all other * cases. */ -static inline bool -${atomic}_dec_and_test(${atomic}_t *v) +static __always_inline bool +${arch}${atomic}_dec_and_test(${atomic}_t *v) { - return ${atomic}_dec_return(v) == 0; + return ${arch}${atomic}_dec_return(v) == 0; } EOF diff --git a/scripts/atomic/fallbacks/dec_if_positive b/scripts/atomic/fallbacks/dec_if_positive index c52eacec43c8..527adec89c37 100755 --- a/scripts/atomic/fallbacks/dec_if_positive +++ b/scripts/atomic/fallbacks/dec_if_positive @@ -1,14 +1,14 @@ cat <<EOF -static inline ${ret} -${atomic}_dec_if_positive(${atomic}_t *v) +static __always_inline ${ret} +${arch}${atomic}_dec_if_positive(${atomic}_t *v) { - ${int} dec, c = ${atomic}_read(v); + ${int} dec, c = ${arch}${atomic}_read(v); do { dec = c - 1; if (unlikely(dec < 0)) break; - } while (!${atomic}_try_cmpxchg(v, &c, dec)); + } while (!${arch}${atomic}_try_cmpxchg(v, &c, dec)); return dec; } diff --git a/scripts/atomic/fallbacks/dec_unless_positive b/scripts/atomic/fallbacks/dec_unless_positive index 8a2578f14268..dcab6848ca1e 100755 --- a/scripts/atomic/fallbacks/dec_unless_positive +++ b/scripts/atomic/fallbacks/dec_unless_positive @@ -1,13 +1,13 @@ cat <<EOF -static inline bool -${atomic}_dec_unless_positive(${atomic}_t *v) +static __always_inline bool +${arch}${atomic}_dec_unless_positive(${atomic}_t *v) { - ${int} c = ${atomic}_read(v); + ${int} c = ${arch}${atomic}_read(v); do { if (unlikely(c > 0)) return false; - } while (!${atomic}_try_cmpxchg(v, &c, c - 1)); + } while (!${arch}${atomic}_try_cmpxchg(v, &c, c - 1)); return true; } diff --git a/scripts/atomic/fallbacks/fence b/scripts/atomic/fallbacks/fence index 82f68fa6931a..3764fc8ce945 100755 --- a/scripts/atomic/fallbacks/fence +++ b/scripts/atomic/fallbacks/fence @@ -1,10 +1,10 @@ cat <<EOF -static inline ${ret} -${atomic}_${pfx}${name}${sfx}(${params}) +static __always_inline ${ret} +${arch}${atomic}_${pfx}${name}${sfx}(${params}) { ${ret} ret; __atomic_pre_full_fence(); - ret = ${atomic}_${pfx}${name}${sfx}_relaxed(${args}); + ret = ${arch}${atomic}_${pfx}${name}${sfx}_relaxed(${args}); __atomic_post_full_fence(); return ret; } diff --git a/scripts/atomic/fallbacks/fetch_add_unless b/scripts/atomic/fallbacks/fetch_add_unless index d2c091db7eae..0e0b9aef1515 100755 --- a/scripts/atomic/fallbacks/fetch_add_unless +++ b/scripts/atomic/fallbacks/fetch_add_unless @@ -1,6 +1,6 @@ cat << EOF /** - * ${atomic}_fetch_add_unless - add unless the number is already a given value + * ${arch}${atomic}_fetch_add_unless - add unless the number is already a given value * @v: pointer of type ${atomic}_t * @a: the amount to add to v... * @u: ...unless v is equal to u. @@ -8,15 +8,15 @@ cat << EOF * Atomically adds @a to @v, so long as @v was not already @u. * Returns original value of @v */ -static inline ${int} -${atomic}_fetch_add_unless(${atomic}_t *v, ${int} a, ${int} u) +static __always_inline ${int} +${arch}${atomic}_fetch_add_unless(${atomic}_t *v, ${int} a, ${int} u) { - ${int} c = ${atomic}_read(v); + ${int} c = ${arch}${atomic}_read(v); do { if (unlikely(c == u)) break; - } while (!${atomic}_try_cmpxchg(v, &c, c + a)); + } while (!${arch}${atomic}_try_cmpxchg(v, &c, c + a)); return c; } diff --git a/scripts/atomic/fallbacks/inc b/scripts/atomic/fallbacks/inc index f866b3ad2353..15ec62946e8c 100755 --- a/scripts/atomic/fallbacks/inc +++ b/scripts/atomic/fallbacks/inc @@ -1,7 +1,7 @@ cat <<EOF -static inline ${ret} -${atomic}_${pfx}inc${sfx}${order}(${atomic}_t *v) +static __always_inline ${ret} +${arch}${atomic}_${pfx}inc${sfx}${order}(${atomic}_t *v) { - ${retstmt}${atomic}_${pfx}add${sfx}${order}(1, v); + ${retstmt}${arch}${atomic}_${pfx}add${sfx}${order}(1, v); } EOF diff --git a/scripts/atomic/fallbacks/inc_and_test b/scripts/atomic/fallbacks/inc_and_test index 4e2068869f7e..cecc8322a21f 100755 --- a/scripts/atomic/fallbacks/inc_and_test +++ b/scripts/atomic/fallbacks/inc_and_test @@ -1,15 +1,15 @@ cat <<EOF /** - * ${atomic}_inc_and_test - increment and test + * ${arch}${atomic}_inc_and_test - increment and test * @v: pointer of type ${atomic}_t * * Atomically increments @v by 1 * and returns true if the result is zero, or false for all * other cases. */ -static inline bool -${atomic}_inc_and_test(${atomic}_t *v) +static __always_inline bool +${arch}${atomic}_inc_and_test(${atomic}_t *v) { - return ${atomic}_inc_return(v) == 0; + return ${arch}${atomic}_inc_return(v) == 0; } EOF diff --git a/scripts/atomic/fallbacks/inc_not_zero b/scripts/atomic/fallbacks/inc_not_zero index a7c45c8d107c..50f2d4d48279 100755 --- a/scripts/atomic/fallbacks/inc_not_zero +++ b/scripts/atomic/fallbacks/inc_not_zero @@ -1,14 +1,14 @@ cat <<EOF /** - * ${atomic}_inc_not_zero - increment unless the number is zero + * ${arch}${atomic}_inc_not_zero - increment unless the number is zero * @v: pointer of type ${atomic}_t * * Atomically increments @v by 1, if @v is non-zero. * Returns true if the increment was done. */ -static inline bool -${atomic}_inc_not_zero(${atomic}_t *v) +static __always_inline bool +${arch}${atomic}_inc_not_zero(${atomic}_t *v) { - return ${atomic}_add_unless(v, 1, 0); + return ${arch}${atomic}_add_unless(v, 1, 0); } EOF diff --git a/scripts/atomic/fallbacks/inc_unless_negative b/scripts/atomic/fallbacks/inc_unless_negative index 0c266e71dbd4..87629e0d4a80 100755 --- a/scripts/atomic/fallbacks/inc_unless_negative +++ b/scripts/atomic/fallbacks/inc_unless_negative @@ -1,13 +1,13 @@ cat <<EOF -static inline bool -${atomic}_inc_unless_negative(${atomic}_t *v) +static __always_inline bool +${arch}${atomic}_inc_unless_negative(${atomic}_t *v) { - ${int} c = ${atomic}_read(v); + ${int} c = ${arch}${atomic}_read(v); do { if (unlikely(c < 0)) return false; - } while (!${atomic}_try_cmpxchg(v, &c, c + 1)); + } while (!${arch}${atomic}_try_cmpxchg(v, &c, c + 1)); return true; } diff --git a/scripts/atomic/fallbacks/read_acquire b/scripts/atomic/fallbacks/read_acquire index 75863b5203f7..341a88dccaa7 100755 --- a/scripts/atomic/fallbacks/read_acquire +++ b/scripts/atomic/fallbacks/read_acquire @@ -1,6 +1,6 @@ cat <<EOF -static inline ${ret} -${atomic}_read_acquire(const ${atomic}_t *v) +static __always_inline ${ret} +${arch}${atomic}_read_acquire(const ${atomic}_t *v) { return smp_load_acquire(&(v)->counter); } diff --git a/scripts/atomic/fallbacks/release b/scripts/atomic/fallbacks/release index 3f628a3802d9..f8906d537c0f 100755 --- a/scripts/atomic/fallbacks/release +++ b/scripts/atomic/fallbacks/release @@ -1,8 +1,8 @@ cat <<EOF -static inline ${ret} -${atomic}_${pfx}${name}${sfx}_release(${params}) +static __always_inline ${ret} +${arch}${atomic}_${pfx}${name}${sfx}_release(${params}) { __atomic_release_fence(); - ${retstmt}${atomic}_${pfx}${name}${sfx}_relaxed(${args}); + ${retstmt}${arch}${atomic}_${pfx}${name}${sfx}_relaxed(${args}); } EOF diff --git a/scripts/atomic/fallbacks/set_release b/scripts/atomic/fallbacks/set_release index 45bb5e0cfc08..76068272d5f5 100755 --- a/scripts/atomic/fallbacks/set_release +++ b/scripts/atomic/fallbacks/set_release @@ -1,6 +1,6 @@ cat <<EOF -static inline void -${atomic}_set_release(${atomic}_t *v, ${int} i) +static __always_inline void +${arch}${atomic}_set_release(${atomic}_t *v, ${int} i) { smp_store_release(&(v)->counter, i); } diff --git a/scripts/atomic/fallbacks/sub_and_test b/scripts/atomic/fallbacks/sub_and_test index 289ef17a2d7a..c580f4c2136e 100755 --- a/scripts/atomic/fallbacks/sub_and_test +++ b/scripts/atomic/fallbacks/sub_and_test @@ -1,6 +1,6 @@ cat <<EOF /** - * ${atomic}_sub_and_test - subtract value from variable and test result + * ${arch}${atomic}_sub_and_test - subtract value from variable and test result * @i: integer value to subtract * @v: pointer of type ${atomic}_t * @@ -8,9 +8,9 @@ cat <<EOF * true if the result is zero, or false for all * other cases. */ -static inline bool -${atomic}_sub_and_test(${int} i, ${atomic}_t *v) +static __always_inline bool +${arch}${atomic}_sub_and_test(${int} i, ${atomic}_t *v) { - return ${atomic}_sub_return(i, v) == 0; + return ${arch}${atomic}_sub_return(i, v) == 0; } EOF diff --git a/scripts/atomic/fallbacks/try_cmpxchg b/scripts/atomic/fallbacks/try_cmpxchg index 4ed85e2f5378..06db0f738e45 100755 --- a/scripts/atomic/fallbacks/try_cmpxchg +++ b/scripts/atomic/fallbacks/try_cmpxchg @@ -1,9 +1,9 @@ cat <<EOF -static inline bool -${atomic}_try_cmpxchg${order}(${atomic}_t *v, ${int} *old, ${int} new) +static __always_inline bool +${arch}${atomic}_try_cmpxchg${order}(${atomic}_t *v, ${int} *old, ${int} new) { ${int} r, o = *old; - r = ${atomic}_cmpxchg${order}(v, o, new); + r = ${arch}${atomic}_cmpxchg${order}(v, o, new); if (unlikely(r != o)) *old = r; return likely(r == o); diff --git a/scripts/atomic/gen-atomic-fallback.sh b/scripts/atomic/gen-atomic-fallback.sh index 1bd7c1707633..0fd1cf0c2b94 100755 --- a/scripts/atomic/gen-atomic-fallback.sh +++ b/scripts/atomic/gen-atomic-fallback.sh @@ -2,10 +2,11 @@ # SPDX-License-Identifier: GPL-2.0 ATOMICDIR=$(dirname $0) +ARCH=$2 . ${ATOMICDIR}/atomic-tbl.sh -#gen_template_fallback(template, meta, pfx, name, sfx, order, atomic, int, args...) +#gen_template_fallback(template, meta, pfx, name, sfx, order, arch, atomic, int, args...) gen_template_fallback() { local template="$1"; shift @@ -14,10 +15,11 @@ gen_template_fallback() local name="$1"; shift local sfx="$1"; shift local order="$1"; shift + local arch="$1"; shift local atomic="$1"; shift local int="$1"; shift - local atomicname="${atomic}_${pfx}${name}${sfx}${order}" + local atomicname="${arch}${atomic}_${pfx}${name}${sfx}${order}" local ret="$(gen_ret_type "${meta}" "${int}")" local retstmt="$(gen_ret_stmt "${meta}")" @@ -32,7 +34,7 @@ gen_template_fallback() fi } -#gen_proto_fallback(meta, pfx, name, sfx, order, atomic, int, args...) +#gen_proto_fallback(meta, pfx, name, sfx, order, arch, atomic, int, args...) gen_proto_fallback() { local meta="$1"; shift @@ -56,16 +58,17 @@ cat << EOF EOF } -#gen_proto_order_variants(meta, pfx, name, sfx, atomic, int, args...) +#gen_proto_order_variants(meta, pfx, name, sfx, arch, atomic, int, args...) gen_proto_order_variants() { local meta="$1"; shift local pfx="$1"; shift local name="$1"; shift local sfx="$1"; shift - local atomic="$1" + local arch="$1" + local atomic="$2" - local basename="${atomic}_${pfx}${name}${sfx}" + local basename="${arch}${atomic}_${pfx}${name}${sfx}" local template="$(find_fallback_template "${pfx}" "${name}" "${sfx}" "${order}")" @@ -94,7 +97,7 @@ gen_proto_order_variants() gen_basic_fallbacks "${basename}" if [ ! -z "${template}" ]; then - printf "#endif /* ${atomic}_${pfx}${name}${sfx} */\n\n" + printf "#endif /* ${arch}${atomic}_${pfx}${name}${sfx} */\n\n" gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "" "$@" gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "_acquire" "$@" gen_proto_fallback "${meta}" "${pfx}" "${name}" "${sfx}" "_release" "$@" @@ -149,20 +152,19 @@ cat << EOF #ifndef _LINUX_ATOMIC_FALLBACK_H #define _LINUX_ATOMIC_FALLBACK_H +#include <linux/compiler.h> + EOF -for xchg in "xchg" "cmpxchg" "cmpxchg64"; do +for xchg in "${ARCH}xchg" "${ARCH}cmpxchg" "${ARCH}cmpxchg64"; do gen_xchg_fallbacks "${xchg}" done grep '^[a-z]' "$1" | while read name meta args; do - gen_proto "${meta}" "${name}" "atomic" "int" ${args} + gen_proto "${meta}" "${name}" "${ARCH}" "atomic" "int" ${args} done cat <<EOF -#define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c)) -#define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c)) - #ifdef CONFIG_GENERIC_ATOMIC64 #include <asm-generic/atomic64.h> #endif @@ -170,12 +172,9 @@ cat <<EOF EOF grep '^[a-z]' "$1" | while read name meta args; do - gen_proto "${meta}" "${name}" "atomic64" "s64" ${args} + gen_proto "${meta}" "${name}" "${ARCH}" "atomic64" "s64" ${args} done cat <<EOF -#define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c)) -#define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c)) - #endif /* _LINUX_ATOMIC_FALLBACK_H */ EOF diff --git a/scripts/atomic/gen-atomics.sh b/scripts/atomic/gen-atomics.sh index 000dc6437893..d29e159ef489 100644 --- a/scripts/atomic/gen-atomics.sh +++ b/scripts/atomic/gen-atomics.sh @@ -10,10 +10,11 @@ LINUXDIR=${ATOMICDIR}/../.. cat <<EOF | gen-atomic-instrumented.sh asm-generic/atomic-instrumented.h gen-atomic-long.sh asm-generic/atomic-long.h +gen-atomic-fallback.sh linux/atomic-arch-fallback.h arch_ gen-atomic-fallback.sh linux/atomic-fallback.h EOF -while read script header; do - /bin/sh ${ATOMICDIR}/${script} ${ATOMICTBL} > ${LINUXDIR}/include/${header} +while read script header args; do + /bin/sh ${ATOMICDIR}/${script} ${ATOMICTBL} ${args} > ${LINUXDIR}/include/${header} HASH="$(sha1sum ${LINUXDIR}/include/${header})" HASH="${HASH%% *}" printf "// %s\n" "${HASH}" >> ${LINUXDIR}/include/${header} |