Mock Version: 3.5 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -bs --target x86_64 --nodeps /builddir/build/SPECS/python-azure-synapse-spark.spec'], chrootPath='/var/lib/mock/fedora-rawhide-x86_64-1674296134.647424/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=0uid=1000gid=135user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueprintOutput=True) Using nspawn with args ['--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', 'd9101a46d7c544c4b8eb57513ac854fa', '-D', '/var/lib/mock/fedora-rawhide-x86_64-1674296134.647424/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -bs --target x86_64 --nodeps /builddir/build/SPECS/python-azure-synapse-spark.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: x86_64 Building for target x86_64 setting SOURCE_DATE_EPOCH=1674172800 Wrote: /builddir/build/SRPMS/python-azure-synapse-spark-0.2.0-9.fc38.src.rpm Child return code was: 0 ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -br --target x86_64 --nodeps /builddir/build/SPECS/python-azure-synapse-spark.spec'], chrootPath='/var/lib/mock/fedora-rawhide-x86_64-1674296134.647424/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=0uid=1000gid=135user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueraiseExc=FalseprintOutput=True) Using nspawn with args ['--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', '43d89462d77e4525a6eeb6ea43ccb2f0', '-D', '/var/lib/mock/fedora-rawhide-x86_64-1674296134.647424/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -br --target x86_64 --nodeps /builddir/build/SPECS/python-azure-synapse-spark.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: x86_64 Building for target x86_64 setting SOURCE_DATE_EPOCH=1674172800 Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.DSVJKy + umask 022 + cd /builddir/build/BUILD + cd /builddir/build/BUILD + rm -rf azure-synapse-spark-0.2.0 + /usr/lib/rpm/rpmuncompress -x /builddir/build/SOURCES/azure-synapse-spark-0.2.0.zip + STATUS=0 + '[' 0 -ne 0 ']' + cd azure-synapse-spark-0.2.0 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + RPM_EC=0 ++ jobs -p + exit 0 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.bfiE8M + umask 022 + cd /builddir/build/BUILD + cd azure-synapse-spark-0.2.0 + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib64/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib64/gfortran/modules' + export FCFLAGS + VALAFLAGS=-g + export VALAFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib64: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(pip) >= 19' + echo 'python3dist(packaging)' + '[' -f pyproject.toml ']' + '[' -f setup.py ']' + echo 'python3dist(setuptools) >= 40.8' + echo 'python3dist(wheel)' + rm -rfv '*.dist-info/' + '[' -f /usr/bin/python3 ']' + mkdir -p /builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + TMPDIR=/builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir + RPM_TOXENV=py311 + HOSTNAME=rpmbuild + /usr/bin/python3 -Bs /usr/lib/rpm/redhat/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 --wheeldir /builddir/build/BUILD/azure-synapse-spark-0.2.0/pyproject-wheeldir Handling setuptools >= 40.8 from default build backend Requirement not satisfied: setuptools >= 40.8 Handling wheel from default build backend Requirement not satisfied: wheel Exiting dependency generation pass: build backend + RPM_EC=0 ++ jobs -p + exit 0 Wrote: /builddir/build/SRPMS/python-azure-synapse-spark-0.2.0-9.fc38.buildreqs.nosrc.rpm Child return code was: 11 Dynamic buildrequires detected Going to install missing buildrequires. See root.log for details. ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -br --target x86_64 --nodeps /builddir/build/SPECS/python-azure-synapse-spark.spec'], chrootPath='/var/lib/mock/fedora-rawhide-x86_64-1674296134.647424/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=0uid=1000gid=135user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueraiseExc=FalseprintOutput=True) Using nspawn with args ['--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', 'b8e91fd83ef74dc8a968fb640f1ba164', '-D', '/var/lib/mock/fedora-rawhide-x86_64-1674296134.647424/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -br --target x86_64 --nodeps /builddir/build/SPECS/python-azure-synapse-spark.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: x86_64 Building for target x86_64 setting SOURCE_DATE_EPOCH=1674172800 Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.Ve36KJ + umask 022 + cd /builddir/build/BUILD + cd /builddir/build/BUILD + rm -rf azure-synapse-spark-0.2.0 + /usr/lib/rpm/rpmuncompress -x /builddir/build/SOURCES/azure-synapse-spark-0.2.0.zip + STATUS=0 + '[' 0 -ne 0 ']' + cd azure-synapse-spark-0.2.0 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + RPM_EC=0 ++ jobs -p + exit 0 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.AWaHtR + umask 022 + cd /builddir/build/BUILD + cd azure-synapse-spark-0.2.0 + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib64/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib64/gfortran/modules' + export FCFLAGS + VALAFLAGS=-g + export VALAFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib64: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(pip) >= 19' + echo 'python3dist(packaging)' + '[' -f pyproject.toml ']' + '[' -f setup.py ']' + echo 'python3dist(setuptools) >= 40.8' + echo 'python3dist(wheel)' + rm -rfv '*.dist-info/' + '[' -f /usr/bin/python3 ']' + mkdir -p /builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + TMPDIR=/builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir + RPM_TOXENV=py311 + HOSTNAME=rpmbuild + /usr/bin/python3 -Bs /usr/lib/rpm/redhat/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 --wheeldir /builddir/build/BUILD/azure-synapse-spark-0.2.0/pyproject-wheeldir Handling setuptools >= 40.8 from default build backend Requirement satisfied: setuptools >= 40.8 (installed: setuptools 65.5.1) Handling wheel from default build backend Requirement satisfied: wheel (installed: wheel 0.38.4) warning: no files found matching '*.py' under directory 'tests' warning: no files found matching '*.yaml' under directory 'tests' HOOK STDOUT: running egg_info HOOK STDOUT: writing azure_synapse_spark.egg-info/PKG-INFO HOOK STDOUT: writing dependency_links to azure_synapse_spark.egg-info/dependency_links.txt HOOK STDOUT: writing requirements to azure_synapse_spark.egg-info/requires.txt HOOK STDOUT: writing top-level names to azure_synapse_spark.egg-info/top_level.txt HOOK STDOUT: reading manifest file 'azure_synapse_spark.egg-info/SOURCES.txt' HOOK STDOUT: reading manifest template 'MANIFEST.in' HOOK STDOUT: writing manifest file 'azure_synapse_spark.egg-info/SOURCES.txt' Handling wheel from get_requires_for_build_wheel Requirement satisfied: wheel (installed: wheel 0.38.4) warning: no files found matching '*.py' under directory 'tests' warning: no files found matching '*.yaml' under directory 'tests' HOOK STDOUT: running dist_info HOOK STDOUT: writing azure_synapse_spark.egg-info/PKG-INFO HOOK STDOUT: writing dependency_links to azure_synapse_spark.egg-info/dependency_links.txt HOOK STDOUT: writing requirements to azure_synapse_spark.egg-info/requires.txt HOOK STDOUT: writing top-level names to azure_synapse_spark.egg-info/top_level.txt HOOK STDOUT: reading manifest file 'azure_synapse_spark.egg-info/SOURCES.txt' HOOK STDOUT: reading manifest template 'MANIFEST.in' HOOK STDOUT: writing manifest file 'azure_synapse_spark.egg-info/SOURCES.txt' HOOK STDOUT: creating '/builddir/build/BUILD/azure-synapse-spark-0.2.0/azure_synapse_spark-0.2.0.dist-info' Handling msrest (>=0.5.0) from hook generated metadata: Requires-Dist Requirement not satisfied: msrest (>=0.5.0) Handling azure-common (~=1.1) from hook generated metadata: Requires-Dist Requirement not satisfied: azure-common (~=1.1) Handling azure-core (<2.0.0,>=1.6.0) from hook generated metadata: Requires-Dist Requirement not satisfied: azure-core (<2.0.0,>=1.6.0) Handling azure-synapse-nspkg ; python_version<'3.0' from hook generated metadata: Requires-Dist Ignoring alien requirement: azure-synapse-nspkg ; python_version<'3.0' + RPM_EC=0 ++ jobs -p + exit 0 Wrote: /builddir/build/SRPMS/python-azure-synapse-spark-0.2.0-9.fc38.buildreqs.nosrc.rpm Child return code was: 11 Dynamic buildrequires detected Going to install missing buildrequires. See root.log for details. ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -br --target x86_64 --nodeps /builddir/build/SPECS/python-azure-synapse-spark.spec'], chrootPath='/var/lib/mock/fedora-rawhide-x86_64-1674296134.647424/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=0uid=1000gid=135user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueraiseExc=FalseprintOutput=True) Using nspawn with args ['--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', '45b4267e056840bfb1d34613c3cea712', '-D', '/var/lib/mock/fedora-rawhide-x86_64-1674296134.647424/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -br --target x86_64 --nodeps /builddir/build/SPECS/python-azure-synapse-spark.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: x86_64 Building for target x86_64 setting SOURCE_DATE_EPOCH=1674172800 Executing(%prep): /bin/sh -e /var/tmp/rpm-tmp.aEKhsO + umask 022 + cd /builddir/build/BUILD + cd /builddir/build/BUILD + rm -rf azure-synapse-spark-0.2.0 + /usr/lib/rpm/rpmuncompress -x /builddir/build/SOURCES/azure-synapse-spark-0.2.0.zip + STATUS=0 + '[' 0 -ne 0 ']' + cd azure-synapse-spark-0.2.0 + /usr/bin/chmod -Rf a+rX,u+w,g-w,o-w . + RPM_EC=0 ++ jobs -p + exit 0 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.x2OtnG + umask 022 + cd /builddir/build/BUILD + cd azure-synapse-spark-0.2.0 + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib64/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib64/gfortran/modules' + export FCFLAGS + VALAFLAGS=-g + export VALAFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib64: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(pip) >= 19' + echo 'python3dist(packaging)' + '[' -f pyproject.toml ']' + '[' -f setup.py ']' + echo 'python3dist(setuptools) >= 40.8' + echo 'python3dist(wheel)' + rm -rfv '*.dist-info/' + '[' -f /usr/bin/python3 ']' + mkdir -p /builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + TMPDIR=/builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir + RPM_TOXENV=py311 + HOSTNAME=rpmbuild + /usr/bin/python3 -Bs /usr/lib/rpm/redhat/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 --wheeldir /builddir/build/BUILD/azure-synapse-spark-0.2.0/pyproject-wheeldir Handling setuptools >= 40.8 from default build backend Requirement satisfied: setuptools >= 40.8 (installed: setuptools 65.5.1) Handling wheel from default build backend Requirement satisfied: wheel (installed: wheel 0.38.4) warning: no files found matching '*.py' under directory 'tests' warning: no files found matching '*.yaml' under directory 'tests' HOOK STDOUT: running egg_info HOOK STDOUT: writing azure_synapse_spark.egg-info/PKG-INFO HOOK STDOUT: writing dependency_links to azure_synapse_spark.egg-info/dependency_links.txt HOOK STDOUT: writing requirements to azure_synapse_spark.egg-info/requires.txt HOOK STDOUT: writing top-level names to azure_synapse_spark.egg-info/top_level.txt HOOK STDOUT: reading manifest file 'azure_synapse_spark.egg-info/SOURCES.txt' HOOK STDOUT: reading manifest template 'MANIFEST.in' HOOK STDOUT: writing manifest file 'azure_synapse_spark.egg-info/SOURCES.txt' Handling wheel from get_requires_for_build_wheel Requirement satisfied: wheel (installed: wheel 0.38.4) warning: no files found matching '*.py' under directory 'tests' warning: no files found matching '*.yaml' under directory 'tests' HOOK STDOUT: running dist_info HOOK STDOUT: writing azure_synapse_spark.egg-info/PKG-INFO HOOK STDOUT: writing dependency_links to azure_synapse_spark.egg-info/dependency_links.txt HOOK STDOUT: writing requirements to azure_synapse_spark.egg-info/requires.txt HOOK STDOUT: writing top-level names to azure_synapse_spark.egg-info/top_level.txt HOOK STDOUT: reading manifest file 'azure_synapse_spark.egg-info/SOURCES.txt' HOOK STDOUT: reading manifest template 'MANIFEST.in' HOOK STDOUT: writing manifest file 'azure_synapse_spark.egg-info/SOURCES.txt' HOOK STDOUT: creating '/builddir/build/BUILD/azure-synapse-spark-0.2.0/azure_synapse_spark-0.2.0.dist-info' Handling msrest (>=0.5.0) from hook generated metadata: Requires-Dist Requirement satisfied: msrest (>=0.5.0) (installed: msrest 0.7.1) Handling azure-common (~=1.1) from hook generated metadata: Requires-Dist Requirement satisfied: azure-common (~=1.1) (installed: azure-common 1.1.28) Handling azure-core (<2.0.0,>=1.6.0) from hook generated metadata: Requires-Dist Requirement satisfied: azure-core (<2.0.0,>=1.6.0) (installed: azure-core 1.25.1) Handling azure-synapse-nspkg ; python_version<'3.0' from hook generated metadata: Requires-Dist Ignoring alien requirement: azure-synapse-nspkg ; python_version<'3.0' + RPM_EC=0 ++ jobs -p + exit 0 Wrote: /builddir/build/SRPMS/python-azure-synapse-spark-0.2.0-9.fc38.buildreqs.nosrc.rpm Child return code was: 11 Dynamic buildrequires detected Going to install missing buildrequires. See root.log for details. ENTER ['do_with_status'](['bash', '--login', '-c', '/usr/bin/rpmbuild -ba --noprep --target x86_64 --nodeps /builddir/build/SPECS/python-azure-synapse-spark.spec'], chrootPath='/var/lib/mock/fedora-rawhide-x86_64-1674296134.647424/root'env={'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8'}shell=Falselogger=timeout=0uid=1000gid=135user='mockbuild'nspawn_args=['--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11']unshare_net=TrueprintOutput=True) Using nspawn with args ['--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11'] Executing command: ['/usr/bin/systemd-nspawn', '-q', '-M', '79c8f3060cf6452ea4501881db8fb66c', '-D', '/var/lib/mock/fedora-rawhide-x86_64-1674296134.647424/root', '-a', '-u', 'mockbuild', '--capability=cap_ipc_lock', '--rlimit=RLIMIT_NOFILE=10240', '--capability=cap_ipc_lock', '--bind=/tmp/mock-resolv.srlgrwue:/etc/resolv.conf', '--bind=/dev/btrfs-control', '--bind=/dev/mapper/control', '--bind=/dev/loop-control', '--bind=/dev/loop0', '--bind=/dev/loop1', '--bind=/dev/loop2', '--bind=/dev/loop3', '--bind=/dev/loop4', '--bind=/dev/loop5', '--bind=/dev/loop6', '--bind=/dev/loop7', '--bind=/dev/loop8', '--bind=/dev/loop9', '--bind=/dev/loop10', '--bind=/dev/loop11', '--console=pipe', '--setenv=TERM=vt100', '--setenv=SHELL=/bin/bash', '--setenv=HOME=/builddir', '--setenv=HOSTNAME=mock', '--setenv=PATH=/usr/bin:/bin:/usr/sbin:/sbin', '--setenv=PROMPT_COMMAND=printf "\\033]0;\\007"', '--setenv=PS1= \\s-\\v\\$ ', '--setenv=LANG=C.UTF-8', '--resolv-conf=off', 'bash', '--login', '-c', '/usr/bin/rpmbuild -ba --noprep --target x86_64 --nodeps /builddir/build/SPECS/python-azure-synapse-spark.spec'] with env {'TERM': 'vt100', 'SHELL': '/bin/bash', 'HOME': '/builddir', 'HOSTNAME': 'mock', 'PATH': '/usr/bin:/bin:/usr/sbin:/sbin', 'PROMPT_COMMAND': 'printf "\\033]0;\\007"', 'PS1': ' \\s-\\v\\$ ', 'LANG': 'C.UTF-8', 'SYSTEMD_NSPAWN_TMPFS_TMP': '0', 'SYSTEMD_SECCOMP': '0'} and shell False Building target platforms: x86_64 Building for target x86_64 setting SOURCE_DATE_EPOCH=1674172800 Executing(%generate_buildrequires): /bin/sh -e /var/tmp/rpm-tmp.ZQC8TX + umask 022 + cd /builddir/build/BUILD + cd azure-synapse-spark-0.2.0 + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib64/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib64/gfortran/modules' + export FCFLAGS + VALAFLAGS=-g + export VALAFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib64: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + echo pyproject-rpm-macros + echo python3-devel + echo 'python3dist(pip) >= 19' + echo 'python3dist(packaging)' + '[' -f pyproject.toml ']' + '[' -f setup.py ']' + echo 'python3dist(setuptools) >= 40.8' + echo 'python3dist(wheel)' + rm -rfv azure_synapse_spark-0.2.0.dist-info/ removed 'azure_synapse_spark-0.2.0.dist-info/METADATA' removed 'azure_synapse_spark-0.2.0.dist-info/top_level.txt' removed directory 'azure_synapse_spark-0.2.0.dist-info/' + '[' -f /usr/bin/python3 ']' + mkdir -p /builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 ' + TMPDIR=/builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir + RPM_TOXENV=py311 + HOSTNAME=rpmbuild + /usr/bin/python3 -Bs /usr/lib/rpm/redhat/pyproject_buildrequires.py --generate-extras --python3_pkgversion 3 --wheeldir /builddir/build/BUILD/azure-synapse-spark-0.2.0/pyproject-wheeldir Handling setuptools >= 40.8 from default build backend Requirement satisfied: setuptools >= 40.8 (installed: setuptools 65.5.1) Handling wheel from default build backend Requirement satisfied: wheel (installed: wheel 0.38.4) warning: no files found matching '*.py' under directory 'tests' warning: no files found matching '*.yaml' under directory 'tests' HOOK STDOUT: running egg_info HOOK STDOUT: writing azure_synapse_spark.egg-info/PKG-INFO HOOK STDOUT: writing dependency_links to azure_synapse_spark.egg-info/dependency_links.txt HOOK STDOUT: writing requirements to azure_synapse_spark.egg-info/requires.txt HOOK STDOUT: writing top-level names to azure_synapse_spark.egg-info/top_level.txt HOOK STDOUT: reading manifest file 'azure_synapse_spark.egg-info/SOURCES.txt' HOOK STDOUT: reading manifest template 'MANIFEST.in' HOOK STDOUT: writing manifest file 'azure_synapse_spark.egg-info/SOURCES.txt' Handling wheel from get_requires_for_build_wheel Requirement satisfied: wheel (installed: wheel 0.38.4) warning: no files found matching '*.py' under directory 'tests' warning: no files found matching '*.yaml' under directory 'tests' HOOK STDOUT: running dist_info HOOK STDOUT: writing azure_synapse_spark.egg-info/PKG-INFO HOOK STDOUT: writing dependency_links to azure_synapse_spark.egg-info/dependency_links.txt HOOK STDOUT: writing requirements to azure_synapse_spark.egg-info/requires.txt HOOK STDOUT: writing top-level names to azure_synapse_spark.egg-info/top_level.txt HOOK STDOUT: reading manifest file 'azure_synapse_spark.egg-info/SOURCES.txt' HOOK STDOUT: reading manifest template 'MANIFEST.in' HOOK STDOUT: writing manifest file 'azure_synapse_spark.egg-info/SOURCES.txt' HOOK STDOUT: creating '/builddir/build/BUILD/azure-synapse-spark-0.2.0/azure_synapse_spark-0.2.0.dist-info' Handling msrest (>=0.5.0) from hook generated metadata: Requires-Dist Requirement satisfied: msrest (>=0.5.0) (installed: msrest 0.7.1) Handling azure-common (~=1.1) from hook generated metadata: Requires-Dist Requirement satisfied: azure-common (~=1.1) (installed: azure-common 1.1.28) Handling azure-core (<2.0.0,>=1.6.0) from hook generated metadata: Requires-Dist Requirement satisfied: azure-core (<2.0.0,>=1.6.0) (installed: azure-core 1.25.1) Handling azure-synapse-nspkg ; python_version<'3.0' from hook generated metadata: Requires-Dist Ignoring alien requirement: azure-synapse-nspkg ; python_version<'3.0' + RPM_EC=0 ++ jobs -p + exit 0 Executing(%build): /bin/sh -e /var/tmp/rpm-tmp.MdlS49 + umask 022 + cd /builddir/build/BUILD + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib64/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib64/gfortran/modules' + export FCFLAGS + VALAFLAGS=-g + export VALAFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 -specs=/usr/lib/rpm/redhat/redhat-package-notes' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib64: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd azure-synapse-spark-0.2.0 + mkdir -p /builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 -specs=/usr/lib/rpm/redhat/redhat-package-notes' + TMPDIR=/builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir + /usr/bin/python3 -Bs /usr/lib/rpm/redhat/pyproject_wheel.py /builddir/build/BUILD/azure-synapse-spark-0.2.0/pyproject-wheeldir Processing /builddir/build/BUILD/azure-synapse-spark-0.2.0 Preparing metadata (pyproject.toml): started Running command Preparing metadata (pyproject.toml) running dist_info creating /builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir/pip-modern-metadata-5a8q339i/azure_synapse_spark.egg-info writing /builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir/pip-modern-metadata-5a8q339i/azure_synapse_spark.egg-info/PKG-INFO writing dependency_links to /builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir/pip-modern-metadata-5a8q339i/azure_synapse_spark.egg-info/dependency_links.txt writing requirements to /builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir/pip-modern-metadata-5a8q339i/azure_synapse_spark.egg-info/requires.txt writing top-level names to /builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir/pip-modern-metadata-5a8q339i/azure_synapse_spark.egg-info/top_level.txt writing manifest file '/builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir/pip-modern-metadata-5a8q339i/azure_synapse_spark.egg-info/SOURCES.txt' reading manifest file '/builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir/pip-modern-metadata-5a8q339i/azure_synapse_spark.egg-info/SOURCES.txt' reading manifest template 'MANIFEST.in' warning: no files found matching '*.py' under directory 'tests' warning: no files found matching '*.yaml' under directory 'tests' writing manifest file '/builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir/pip-modern-metadata-5a8q339i/azure_synapse_spark.egg-info/SOURCES.txt' creating '/builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir/pip-modern-metadata-5a8q339i/azure_synapse_spark-0.2.0.dist-info' Preparing metadata (pyproject.toml): finished with status 'done' Building wheels for collected packages: azure-synapse-spark Building wheel for azure-synapse-spark (pyproject.toml): started Running command Building wheel for azure-synapse-spark (pyproject.toml) running bdist_wheel running build running build_py creating build creating build/lib creating build/lib/azure creating build/lib/azure/synapse creating build/lib/azure/synapse/spark copying azure/synapse/spark/_configuration.py -> build/lib/azure/synapse/spark copying azure/synapse/spark/_version.py -> build/lib/azure/synapse/spark copying azure/synapse/spark/_spark_client.py -> build/lib/azure/synapse/spark copying azure/synapse/spark/__init__.py -> build/lib/azure/synapse/spark creating build/lib/azure/synapse/spark/models copying azure/synapse/spark/models/_models.py -> build/lib/azure/synapse/spark/models copying azure/synapse/spark/models/_models_py3.py -> build/lib/azure/synapse/spark/models copying azure/synapse/spark/models/__init__.py -> build/lib/azure/synapse/spark/models copying azure/synapse/spark/models/_spark_client_enums.py -> build/lib/azure/synapse/spark/models creating build/lib/azure/synapse/spark/aio copying azure/synapse/spark/aio/_configuration_async.py -> build/lib/azure/synapse/spark/aio copying azure/synapse/spark/aio/__init__.py -> build/lib/azure/synapse/spark/aio copying azure/synapse/spark/aio/_spark_client_async.py -> build/lib/azure/synapse/spark/aio creating build/lib/azure/synapse/spark/operations copying azure/synapse/spark/operations/_spark_batch_operations.py -> build/lib/azure/synapse/spark/operations copying azure/synapse/spark/operations/__init__.py -> build/lib/azure/synapse/spark/operations copying azure/synapse/spark/operations/_spark_session_operations.py -> build/lib/azure/synapse/spark/operations creating build/lib/azure/synapse/spark/aio/operations_async copying azure/synapse/spark/aio/operations_async/_spark_batch_operations_async.py -> build/lib/azure/synapse/spark/aio/operations_async copying azure/synapse/spark/aio/operations_async/__init__.py -> build/lib/azure/synapse/spark/aio/operations_async copying azure/synapse/spark/aio/operations_async/_spark_session_operations_async.py -> build/lib/azure/synapse/spark/aio/operations_async installing to build/bdist.linux-x86_64/wheel running install running install_lib creating build/bdist.linux-x86_64 creating build/bdist.linux-x86_64/wheel creating build/bdist.linux-x86_64/wheel/azure creating build/bdist.linux-x86_64/wheel/azure/synapse creating build/bdist.linux-x86_64/wheel/azure/synapse/spark creating build/bdist.linux-x86_64/wheel/azure/synapse/spark/operations copying build/lib/azure/synapse/spark/operations/_spark_session_operations.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark/operations copying build/lib/azure/synapse/spark/operations/__init__.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark/operations copying build/lib/azure/synapse/spark/operations/_spark_batch_operations.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark/operations creating build/bdist.linux-x86_64/wheel/azure/synapse/spark/aio creating build/bdist.linux-x86_64/wheel/azure/synapse/spark/aio/operations_async copying build/lib/azure/synapse/spark/aio/operations_async/_spark_session_operations_async.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark/aio/operations_async copying build/lib/azure/synapse/spark/aio/operations_async/__init__.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark/aio/operations_async copying build/lib/azure/synapse/spark/aio/operations_async/_spark_batch_operations_async.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark/aio/operations_async copying build/lib/azure/synapse/spark/aio/_spark_client_async.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark/aio copying build/lib/azure/synapse/spark/aio/__init__.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark/aio copying build/lib/azure/synapse/spark/aio/_configuration_async.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark/aio creating build/bdist.linux-x86_64/wheel/azure/synapse/spark/models copying build/lib/azure/synapse/spark/models/_spark_client_enums.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark/models copying build/lib/azure/synapse/spark/models/__init__.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark/models copying build/lib/azure/synapse/spark/models/_models_py3.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark/models copying build/lib/azure/synapse/spark/models/_models.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark/models copying build/lib/azure/synapse/spark/__init__.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark copying build/lib/azure/synapse/spark/_spark_client.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark copying build/lib/azure/synapse/spark/_version.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark copying build/lib/azure/synapse/spark/_configuration.py -> build/bdist.linux-x86_64/wheel/azure/synapse/spark running install_egg_info running egg_info writing azure_synapse_spark.egg-info/PKG-INFO writing dependency_links to azure_synapse_spark.egg-info/dependency_links.txt writing requirements to azure_synapse_spark.egg-info/requires.txt writing top-level names to azure_synapse_spark.egg-info/top_level.txt reading manifest file 'azure_synapse_spark.egg-info/SOURCES.txt' reading manifest template 'MANIFEST.in' warning: no files found matching '*.py' under directory 'tests' warning: no files found matching '*.yaml' under directory 'tests' writing manifest file 'azure_synapse_spark.egg-info/SOURCES.txt' Copying azure_synapse_spark.egg-info to build/bdist.linux-x86_64/wheel/azure_synapse_spark-0.2.0-py3.11.egg-info running install_scripts creating build/bdist.linux-x86_64/wheel/azure_synapse_spark-0.2.0.dist-info/WHEEL creating '/builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir/pip-wheel-8nfnrgz7/tmp8iv8t3dr/azure_synapse_spark-0.2.0-py2.py3-none-any.whl' and adding 'build/bdist.linux-x86_64/wheel' to it adding 'azure/synapse/spark/__init__.py' adding 'azure/synapse/spark/_configuration.py' adding 'azure/synapse/spark/_spark_client.py' adding 'azure/synapse/spark/_version.py' adding 'azure/synapse/spark/aio/__init__.py' adding 'azure/synapse/spark/aio/_configuration_async.py' adding 'azure/synapse/spark/aio/_spark_client_async.py' adding 'azure/synapse/spark/aio/operations_async/__init__.py' adding 'azure/synapse/spark/aio/operations_async/_spark_batch_operations_async.py' adding 'azure/synapse/spark/aio/operations_async/_spark_session_operations_async.py' adding 'azure/synapse/spark/models/__init__.py' adding 'azure/synapse/spark/models/_models.py' adding 'azure/synapse/spark/models/_models_py3.py' adding 'azure/synapse/spark/models/_spark_client_enums.py' adding 'azure/synapse/spark/operations/__init__.py' adding 'azure/synapse/spark/operations/_spark_batch_operations.py' adding 'azure/synapse/spark/operations/_spark_session_operations.py' adding 'azure_synapse_spark-0.2.0.dist-info/METADATA' adding 'azure_synapse_spark-0.2.0.dist-info/WHEEL' adding 'azure_synapse_spark-0.2.0.dist-info/top_level.txt' adding 'azure_synapse_spark-0.2.0.dist-info/RECORD' removing build/bdist.linux-x86_64/wheel Building wheel for azure-synapse-spark (pyproject.toml): finished with status 'done' Created wheel for azure-synapse-spark: filename=azure_synapse_spark-0.2.0-py2.py3-none-any.whl size=29964 sha256=f2378a53091251b038c03602faaced45aef2c661540ecf3e002636a26c160e24 Stored in directory: /builddir/.cache/pip/wheels/fb/67/bd/71f164d13a63679050ee8fa72662c83ce2beb72f825f9fa77b Successfully built azure-synapse-spark + RPM_EC=0 ++ jobs -p + exit 0 Executing(%install): /bin/sh -e /var/tmp/rpm-tmp.lvOrhf + umask 022 + cd /builddir/build/BUILD + '[' /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64 '!=' / ']' + rm -rf /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64 ++ dirname /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64 + mkdir -p /builddir/build/BUILDROOT + mkdir /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64 + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib64/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib64/gfortran/modules' + export FCFLAGS + VALAFLAGS=-g + export VALAFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 -specs=/usr/lib/rpm/redhat/redhat-package-notes' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib64: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd azure-synapse-spark-0.2.0 ++ ls /builddir/build/BUILD/azure-synapse-spark-0.2.0/pyproject-wheeldir/azure_synapse_spark-0.2.0-py2.py3-none-any.whl ++ sed -E 's/([^-]+)-([^-]+)-.+\.whl/\1==\2/' ++ xargs basename --multiple + specifier=azure_synapse_spark==0.2.0 + TMPDIR=/builddir/build/BUILD/azure-synapse-spark-0.2.0/.pyproject-builddir + /usr/bin/python3 -m pip install --root /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64 --prefix /usr --no-deps --disable-pip-version-check --progress-bar off --verbose --ignore-installed --no-warn-script-location --no-index --no-cache-dir --find-links /builddir/build/BUILD/azure-synapse-spark-0.2.0/pyproject-wheeldir azure_synapse_spark==0.2.0 Using pip 22.3.1 from /usr/lib/python3.11/site-packages/pip (python 3.11) Looking in links: /builddir/build/BUILD/azure-synapse-spark-0.2.0/pyproject-wheeldir Processing ./pyproject-wheeldir/azure_synapse_spark-0.2.0-py2.py3-none-any.whl Installing collected packages: azure_synapse_spark Successfully installed azure_synapse_spark-0.2.0 + '[' -d /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/bin ']' + rm -f /builddir/build/BUILD/python-azure-synapse-spark-0.2.0-9.fc38.x86_64-pyproject-ghost-distinfo + site_dirs=() + '[' -d /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib/python3.11/site-packages ']' + site_dirs+=("/usr/lib/python3.11/site-packages") + '[' /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib64/python3.11/site-packages '!=' /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib/python3.11/site-packages ']' + '[' -d /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib64/python3.11/site-packages ']' + for site_dir in ${site_dirs[@]} + for distinfo in /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64$site_dir/*.dist-info + echo '%ghost /usr/lib/python3.11/site-packages/azure_synapse_spark-0.2.0.dist-info' + sed -i s/pip/rpm/ /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib/python3.11/site-packages/azure_synapse_spark-0.2.0.dist-info/INSTALLER + PYTHONPATH=/usr/lib/rpm/redhat + /usr/bin/python3 -B /usr/lib/rpm/redhat/pyproject_preprocess_record.py --buildroot /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64 --record /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib/python3.11/site-packages/azure_synapse_spark-0.2.0.dist-info/RECORD --output /builddir/build/BUILD/python-azure-synapse-spark-0.2.0-9.fc38.x86_64-pyproject-record + rm -fv /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib/python3.11/site-packages/azure_synapse_spark-0.2.0.dist-info/RECORD removed '/builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib/python3.11/site-packages/azure_synapse_spark-0.2.0.dist-info/RECORD' + rm -fv /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib/python3.11/site-packages/azure_synapse_spark-0.2.0.dist-info/REQUESTED removed '/builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib/python3.11/site-packages/azure_synapse_spark-0.2.0.dist-info/REQUESTED' ++ wc -l /builddir/build/BUILD/python-azure-synapse-spark-0.2.0-9.fc38.x86_64-pyproject-ghost-distinfo ++ cut -f1 '-d ' + lines=1 + '[' 1 -ne 1 ']' + /usr/bin/python3 /usr/lib/rpm/redhat/pyproject_save_files.py --output-files /builddir/build/BUILD/python-azure-synapse-spark-0.2.0-9.fc38.x86_64-pyproject-files --output-modules /builddir/build/BUILD/python-azure-synapse-spark-0.2.0-9.fc38.x86_64-pyproject-modules --buildroot /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64 --sitelib /usr/lib/python3.11/site-packages --sitearch /usr/lib64/python3.11/site-packages --python-version 3.11 --pyproject-record /builddir/build/BUILD/python-azure-synapse-spark-0.2.0-9.fc38.x86_64-pyproject-record --prefix /usr azure + /usr/bin/find-debuginfo -j2 --strict-build-id -m -i --build-id-seed 0.2.0-9.fc38 --unique-debug-suffix -0.2.0-9.fc38.x86_64 --unique-debug-src-base python-azure-synapse-spark-0.2.0-9.fc38.x86_64 --run-dwz --dwz-low-mem-die-limit 10000000 --dwz-max-die-limit 110000000 -S debugsourcefiles.list /builddir/build/BUILD/azure-synapse-spark-0.2.0 find: 'debug': No such file or directory + /usr/lib/rpm/check-buildroot + /usr/lib/rpm/redhat/brp-ldconfig + /usr/lib/rpm/brp-compress + /usr/lib/rpm/redhat/brp-strip-lto /usr/bin/strip + /usr/lib/rpm/brp-strip-static-archive /usr/bin/strip + /usr/lib/rpm/check-rpaths + /usr/lib/rpm/redhat/brp-mangle-shebangs + /usr/lib/rpm/brp-remove-la-files + env /usr/lib/rpm/redhat/brp-python-bytecompile '' 1 0 -j2 Bytecompiling .py files below /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib/python3.11 using python3.11 + /usr/lib/rpm/redhat/brp-python-hardlink Executing(%check): /bin/sh -e /var/tmp/rpm-tmp.C3MmkB + umask 022 + cd /builddir/build/BUILD + CFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + export CFLAGS + CXXFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer' + export CXXFLAGS + FFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib64/gfortran/modules' + export FFLAGS + FCFLAGS='-O2 -flto=auto -ffat-lto-objects -fexceptions -g -grecord-gcc-switches -pipe -Wall -Werror=format-security -Wp,-U_FORTIFY_SOURCE,-D_FORTIFY_SOURCE=3 -Wp,-D_GLIBCXX_ASSERTIONS -specs=/usr/lib/rpm/redhat/redhat-hardened-cc1 -fstack-protector-strong -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -m64 -mtune=generic -fasynchronous-unwind-tables -fstack-clash-protection -fcf-protection -fno-omit-frame-pointer -mno-omit-leaf-frame-pointer -I/usr/lib64/gfortran/modules' + export FCFLAGS + VALAFLAGS=-g + export VALAFLAGS + LDFLAGS='-Wl,-z,relro -Wl,--as-needed -Wl,-z,now -specs=/usr/lib/rpm/redhat/redhat-hardened-ld -specs=/usr/lib/rpm/redhat/redhat-annobin-cc1 -Wl,--build-id=sha1 -specs=/usr/lib/rpm/redhat/redhat-package-notes' + export LDFLAGS + LT_SYS_LIBRARY_PATH=/usr/lib64: + export LT_SYS_LIBRARY_PATH + CC=gcc + export CC + CXX=g++ + export CXX + cd azure-synapse-spark-0.2.0 + '[' '!' -f /builddir/build/BUILD/python-azure-synapse-spark-0.2.0-9.fc38.x86_64-pyproject-modules ']' + PATH=/builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/bin:/builddir/.local/bin:/builddir/bin:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/sbin + PYTHONPATH=/builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib64/python3.11/site-packages:/builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib/python3.11/site-packages + _PYTHONSITE=/builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib64/python3.11/site-packages:/builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/lib/python3.11/site-packages + PYTHONDONTWRITEBYTECODE=1 + /usr/bin/python3 -sP /usr/lib/rpm/redhat/import_all_modules.py -f /builddir/build/BUILD/python-azure-synapse-spark-0.2.0-9.fc38.x86_64-pyproject-modules Check import: azure Check import: azure.synapse Check import: azure.synapse.spark Check import: azure.synapse.spark.aio Check import: azure.synapse.spark.aio.operations_async Check import: azure.synapse.spark.models Check import: azure.synapse.spark.operations + RPM_EC=0 ++ jobs -p + exit 0 Processing files: python3-azure-synapse-spark-0.2.0-9.fc38.noarch Executing(%doc): /bin/sh -e /var/tmp/rpm-tmp.dcCkK9 + umask 022 + cd /builddir/build/BUILD + cd azure-synapse-spark-0.2.0 + DOCDIR=/builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/share/doc/python3-azure-synapse-spark + export LC_ALL=C + LC_ALL=C + export DOCDIR + /usr/bin/mkdir -p /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/share/doc/python3-azure-synapse-spark + cp -pr README.md /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64/usr/share/doc/python3-azure-synapse-spark + RPM_EC=0 ++ jobs -p + exit 0 Provides: python-azure-synapse-spark = 1:0.2.0-9.fc38 python3-azure-synapse-spark = 1:0.2.0-9.fc38 python3.11-azure-synapse-spark = 1:0.2.0-9.fc38 python3.11dist(azure-synapse-spark) = 0.2 python3dist(azure-synapse-spark) = 0.2 Requires(rpmlib): rpmlib(CompressedFileNames) <= 3.0.4-1 rpmlib(FileDigests) <= 4.6.0-1 rpmlib(PartialHardlinkSets) <= 4.0.4-1 rpmlib(PayloadFilesHavePrefix) <= 4.0-1 Requires: (python3.11dist(azure-common) >= 1.1 with python3.11dist(azure-common) < 2) (python3.11dist(azure-core) < 2~~ with python3.11dist(azure-core) >= 1.6) python(abi) = 3.11 python3.11dist(msrest) >= 0.5 Checking for unpackaged file(s): /usr/lib/rpm/check-files /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64 Wrote: /builddir/build/SRPMS/python-azure-synapse-spark-0.2.0-9.fc38.src.rpm Wrote: /builddir/build/RPMS/python3-azure-synapse-spark-0.2.0-9.fc38.noarch.rpm Executing(%clean): /bin/sh -e /var/tmp/rpm-tmp.B44VmK + umask 022 + cd /builddir/build/BUILD + cd azure-synapse-spark-0.2.0 + /usr/bin/rm -rf /builddir/build/BUILDROOT/python-azure-synapse-spark-0.2.0-9.fc38.x86_64 + RPM_EC=0 ++ jobs -p + exit 0 Executing(rmbuild): /bin/sh -e /var/tmp/rpm-tmp.42n5fS + umask 022 + cd /builddir/build/BUILD + rm -rf azure-synapse-spark-0.2.0 azure-synapse-spark-0.2.0.gemspec + RPM_EC=0 ++ jobs -p + exit 0 Child return code was: 0