Mock Version: 2.15 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bs --target x86_64 --nodeps /builddir/build/SPECS/apache-spark3.spec'], chrootPath='/var/lib/mock/epel-8-x86_64-1640069965.608899/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=0uid=1000gid=135user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.pw5w4nnb:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueprintOutput=True) Using nspawn with args ['--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.pw5w4nnb:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', '06a356e2ea3b4373adf46536538b4d47', '-D', '/var/lib/mock/epel-8-x86_64-1640069965.608899/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.pw5w4nnb:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -bs --target x86_64 --nodeps /builddir/build/SPECS/apache-spark3.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0'} and shell False Building target platforms: x86_64 Building for target x86_64 Wrote: /builddir/build/SRPMS/apache-spark3-3.2.0-0.el8.src.rpm Child return code was: 0 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bb --target x86_64 --nodeps /builddir/build/SPECS/apache-spark3.spec'], chrootPath='/var/lib/mock/epel-8-x86_64-1640069965.608899/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=0uid=1000gid=135user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.pw5w4nnb:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueprintOutput=True) Using nspawn with args ['--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.pw5w4nnb:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', 'cc13717595ca4abf95205c9684d355c4', '-D', '/var/lib/mock/epel-8-x86_64-1640069965.608899/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.pw5w4nnb:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -bb --target x86_64 --nodeps /builddir/build/SPECS/apache-spark3.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0'} and shell False Building target platforms: x86_64 Building for target x86_64 Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.t5f7av + umask 022 + cd /builddir/build/BUILD + cd /builddir/build/BUILD + rm -rf spark-3.2.0-bin-hadoop3.2 + /usr/bin/gzip -dc /builddir/build/SOURCES/spark-3.2.0-bin-hadoop3.2.tgz + /usr/bin/tar -xof - + STATUS=0 + '[' 0 -ne 0 ']' + cd spark-3.2.0-bin-hadoop3.2 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + exit 0 Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.p4OkUu + umask 022 + cd /builddir/build/BUILD + cd spark-3.2.0-bin-hadoop3.2 + exit 0 Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.1eACZv + umask 022 + cd /builddir/build/BUILD + '[' /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64 '!=' / ']' + rm -rf /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64 ++ dirname /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64 + mkdir -p /builddir/build/BUILDROOT + mkdir /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64 + cd spark-3.2.0-bin-hadoop3.2 + mkdir -p /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64/opt/apache/spark-3.2.0-bin-hadoop3.2 + mkdir -p /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64//usr/bin + mkdir -p /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64//usr/sbin + mkdir -p /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64//usr/lib/systemd/system + mkdir -p /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64//var/lib/spark3 + mkdir -p /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64//etc/sysconfig + mkdir -p /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64//etc/spark3 + mkdir -p /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64//var/log/spark3/ + mkdir -p /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64//var/log/spark3/event_log/ + mkdir -p /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64//var/lib/spark3/warehouse/ + mkdir -p /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64//usr/share/apache-spark3/ + cp /builddir/build/SOURCES/spark3-hive-metastore.sql /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64//usr/share/apache-spark3/hive-metastore.sql + cp -r LICENSE NOTICE R README.md RELEASE bin conf data examples jars kubernetes licenses python sbin yarn /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64/opt/apache/spark-3.2.0-bin-hadoop3.2 + cp -r conf/fairscheduler.xml.template conf/log4j.properties.template conf/metrics.properties.template conf/spark-defaults.conf.template conf/spark-env.sh.template conf/workers.template /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64//etc/spark3 + ln -s ./spark-3.2.0-bin-hadoop3.2 /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64/opt/apache/spark3 + cat + cat + cat + cat + cat + cat + cat + cat + cat + cat + cat + cat + cat + cat + cat + cat + cat + '[' -f /usr/bin/pathfix3.6.py ']' + pathfix=/usr/bin/pathfix.py + '[' -z s ']' + shebang_flags=-kas + /usr/bin/pathfix.py -pni /usr/libexec/platform-python -kas /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64/opt/apache/spark-3.2.0-bin-hadoop3.2/bin/ recursedown('/builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64/opt/apache/spark-3.2.0-bin-hadoop3.2/bin/') + '[' -f /usr/bin/pathfix3.6.py ']' + pathfix=/usr/bin/pathfix.py + '[' -z s ']' + shebang_flags=-kas + /usr/bin/pathfix.py -pni /usr/libexec/platform-python -kas /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64/opt/apache/spark-3.2.0-bin-hadoop3.2/python/pyspark/find_spark_home.py /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64/opt/apache/spark-3.2.0-bin-hadoop3.2/python/pyspark/find_spark_home.py: updating + '[' -f /usr/bin/pathfix3.6.py ']' + pathfix=/usr/bin/pathfix.py + '[' -z s ']' + shebang_flags=-kas + /usr/bin/pathfix.py -pni /usr/libexec/platform-python -kas /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64/opt/apache/spark-3.2.0-bin-hadoop3.2/python/run-tests.py /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64/opt/apache/spark-3.2.0-bin-hadoop3.2/python/run-tests.py: updating + cat + /usr/lib/rpm/find-debuginfo.sh -j2 --strict-build-id -m -i --build-id-seed 3.2.0-0.el8 --unique-debug-suffix -3.2.0-0.el8.x86_64 --unique-debug-src-base apache-spark3-3.2.0-0.el8.x86_64 --run-dwz --dwz-low-mem-die-limit 10000000 --dwz-max-die-limit 110000000 -S debugsourcefiles.list /builddir/build/BUILD/spark-3.2.0-bin-hadoop3.2 find: 'debug': No such file or directory + /usr/lib/rpm/check-buildroot + /usr/lib/rpm/redhat/brp-ldconfig /sbin/ldconfig: Warning: ignoring configuration file that cannot be opened: /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64/etc/ld.so.conf: No such file or directory + /usr/lib/rpm/brp-compress + /usr/lib/rpm/brp-strip-static-archive /usr/bin/strip + /usr/lib/rpm/brp-python-bytecompile '' 1 + /usr/lib/rpm/brp-python-hardlink + PYTHON3=/usr/libexec/platform-python + /usr/lib/rpm/redhat/brp-mangle-shebangs mangling shebang in /etc/spark3/spark-env.sh.template from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/stop-master.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/start-mesos-dispatcher.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/stop-all.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/stop-slaves.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/stop-workers.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/spark-daemons.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/stop-slave.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/start-all.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/start-slave.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/stop-thriftserver.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/start-thriftserver.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/stop-history-server.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/slaves.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/decommission-slave.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/start-mesos-shuffle-service.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/decommission-worker.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/stop-mesos-dispatcher.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/stop-worker.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/spark-daemon.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/start-slaves.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/start-history-server.sh from /usr/bin/env bash to #!/usr/bin/bash *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/spark-config.sh is executable but has no shebang, removing executable bit mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/start-worker.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/workers.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/start-master.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/stop-mesos-shuffle-service.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/sbin/start-workers.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/python/run-tests from /usr/bin/env bash to #!/usr/bin/bash *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/python/pyspark/ml/feature.py is executable but has no shebang, removing executable bit *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/python/test_support/hello/hello.txt is executable but has no shebang, removing executable bit *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/python/test_support/userlibrary.py is executable but has no shebang, removing executable bit mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/python/setup.py from /usr/bin/env python3 to #!/usr/libexec/platform-python mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/python/run-tests-with-coverage from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/kubernetes/dockerfiles/spark/decom.sh from /usr/bin/env bash to #!/usr/bin/bash *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/examples/src/main/python/pi.py is executable but has no shebang, removing executable bit *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/examples/src/main/python/transitive_closure.py is executable but has no shebang, removing executable bit *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/examples/src/main/python/sort.py is executable but has no shebang, removing executable bit *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/examples/src/main/python/pagerank.py is executable but has no shebang, removing executable bit *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/examples/src/main/python/wordcount.py is executable but has no shebang, removing executable bit *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/examples/src/main/python/mllib/random_rdd_generation.py is executable but has no shebang, removing executable bit *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/examples/src/main/python/mllib/sampled_rdds.py is executable but has no shebang, removing executable bit *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/examples/src/main/python/mllib/logistic_regression.py is executable but has no shebang, removing executable bit *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/examples/src/main/python/mllib/correlations.py is executable but has no shebang, removing executable bit *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/examples/src/main/python/mllib/kmeans.py is executable but has no shebang, removing executable bit *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/examples/src/main/python/als.py is executable but has no shebang, removing executable bit *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/examples/src/main/python/logistic_regression.py is executable but has no shebang, removing executable bit *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/examples/src/main/python/kmeans.py is executable but has no shebang, removing executable bit mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/examples/src/main/scripts/getGpusResources.sh from /usr/bin/env bash to #!/usr/bin/bash *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/data/mllib/sample_linear_regression_data.txt is executable but has no shebang, removing executable bit mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/conf/spark-env.sh.template from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/bin/spark-shell from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/bin/run-example from /usr/bin/env bash to #!/usr/bin/bash *** WARNING: ./opt/apache/spark-3.2.0-bin-hadoop3.2/bin/spark-class2.cmd is executable but has no shebang, removing executable bit mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/bin/beeline from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/bin/find-spark-home from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/bin/pyspark from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/bin/docker-image-tool.sh from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/bin/spark-sql from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/bin/spark-class from /usr/bin/env bash to #!/usr/bin/bash mangling shebang in /opt/apache/spark-3.2.0-bin-hadoop3.2/bin/spark-submit from /usr/bin/env bash to #!/usr/bin/bash Processing files: apache-spark3-3.2.0-0.el8.noarch Provides: apache-spark3 = 3.2.0-0.el8 config(apache-spark3) = 3.2.0-0.el8 Requires(interp): /bin/sh /bin/sh /bin/sh /bin/sh Requires(rpmlib): rpmlib(CompressedFileNames) <= 3.0.4-1 rpmlib(FileDigests) <= 4.6.0-1 rpmlib(PayloadFilesHavePrefix) <= 4.0-1 Requires(pre): /bin/sh shadow-utils Requires(post): /bin/sh Requires(preun): /bin/sh Requires(postun): /bin/sh Requires: /bin/bash /usr/bin/bash /usr/libexec/platform-python Checking for unpackaged file(s): /usr/lib/rpm/check-files /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64 Wrote: /builddir/build/RPMS/apache-spark3-3.2.0-0.el8.noarch.rpm Executing(%clean): /bin/sh -e /var/tmp/rpm-tmp.EKPFFw + umask 022 + cd /builddir/build/BUILD + cd spark-3.2.0-bin-hadoop3.2 + /usr/bin/rm -rf /builddir/build/BUILDROOT/apache-spark3-3.2.0-0.el8.x86_64 + exit 0 Child return code was: 0