[2024-12-03 18:50:12,591][ INFO][PID:1182951] Marking build as starting [2024-12-03 18:50:12,637][ INFO][PID:1182951] Trying to allocate VM [2024-12-03 18:50:15,689][ INFO][PID:1182951] Allocated host ResallocHost, ticket_id=6183437, hostname=54.226.40.250 [2024-12-03 18:50:15,690][ INFO][PID:1182951] Allocating ssh connection to builder [2024-12-03 18:50:15,690][ INFO][PID:1182951] Checking that builder machine is OK [2024-12-03 18:50:16,136][ INFO][PID:1182951] Running remote command: copr-builder-ready srpm-builds [2024-12-03 18:50:16,376][ INFO][PID:1182951] Red Hat subscription not needed for srpm-builds Builder is ready to be used [2024-12-03 18:50:16,377][ INFO][PID:1182951] Filling build.info file with builder info [2024-12-03 18:50:16,378][ INFO][PID:1182951] Sending build state back to frontend: { "builds": [ { "timeout": 108000, "frontend_base_url": "https://copr.fedorainfracloud.org", "memory_reqs": null, "enable_net": true, "project_owner": "@ai-ml", "project_name": "llama-cpp", "project_dirname": "llama-cpp", "submitter": "man2dev", "ended_on": null, "started_on": 1733251816.3783617, "submitted_on": null, "status": 3, "chroot": "srpm-builds", "arch": null, "buildroot_pkgs": null, "task_id": "8338687", "build_id": 8338687, "package_name": null, "package_version": null, "git_repo": null, "git_hash": null, "git_branch": null, "source_type": 2, "source_json": "{\"url\": \"https://copr.fedorainfracloud.org/tmp/tmp3l9vxcj4/llama-cpp.spec\", \"pkg\": \"llama-cpp.spec\", \"tmp\": \"tmp3l9vxcj4\"}", "pkg_name": null, "pkg_main_version": null, "pkg_epoch": null, "pkg_release": null, "srpm_url": null, "uses_devel_repo": null, "sandbox": "@ai-ml/llama-cpp--man2dev", "results": null, "appstream": false, "allow_user_ssh": null, "ssh_public_keys": null, "storage": null, "background": false, "repos": [], "destdir": "/var/lib/copr/public_html/results/@ai-ml/llama-cpp", "results_repo_url": "https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp", "result_dir": "08338687", "built_packages": "", "tags": [ "arch_noarch" ], "id": 8338687, "mockchain_macros": { "copr_username": "@ai-ml", "copr_projectname": "llama-cpp", "vendor": "Fedora Project COPR (@ai-ml/llama-cpp)" } } ] } [2024-12-03 18:50:16,421][ INFO][PID:1182951] Sending fedora-messaging bus message in build.start [2024-12-03 18:50:16,974][ INFO][PID:1182951] Sending fedora-messaging bus message in chroot.start [2024-12-03 18:50:16,992][ INFO][PID:1182951] Starting remote build: copr-rpmbuild --verbose --drop-resultdir --srpm --task-url https://copr.fedorainfracloud.org/backend/get-srpm-build-task/8338687 --detached [2024-12-03 18:50:17,433][ INFO][PID:1182951] Downloading the builder-live.log file, attempt 1 [2024-12-03 18:50:17,435][ INFO][PID:1182951] Popen command started: ssh -F /home/copr/.ssh/config mockbuilder@54.226.40.250 copr-rpmbuild-log [2024-12-03 18:50:22,438][ INFO][PID:1182951] Periodic builder liveness probe: alive [2024-12-03 18:50:38,239][ INFO][PID:1182951] Downloading results from builder [2024-12-03 18:50:38,240][ INFO][PID:1182951] rsyncing of mockbuilder@54.226.40.250:/var/lib/copr-rpmbuild/results/ to /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08338687 started [2024-12-03 18:50:38,241][ INFO][PID:1182951] Popen command started: /usr/bin/rsync -rltDvH --chmod=D755,F644 -e 'ssh -F /home/copr/.ssh/config' mockbuilder@54.226.40.250:/var/lib/copr-rpmbuild/results/ /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08338687/ &> /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08338687/build-08338687.rsync.log [2024-12-03 18:50:38,606][ INFO][PID:1182951] rsyncing finished. [2024-12-03 18:50:38,607][ INFO][PID:1182951] Releasing VM back to pool [2024-12-03 18:50:38,614][ INFO][PID:1182951] Searching for 'success' file in resultdir [2024-12-03 18:50:38,615][ INFO][PID:1182951] Getting build details [2024-12-03 18:50:38,615][ INFO][PID:1182951] Retrieving SRPM info from /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08338687 [2024-12-03 18:50:38,616][ INFO][PID:1182951] SRPM URL: https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp/srpm-builds/08338687/llama-cpp-b4094-1.src.rpm [2024-12-03 18:50:38,617][ INFO][PID:1182951] build details: {'srpm_url': 'https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp/srpm-builds/08338687/llama-cpp-b4094-1.src.rpm', 'pkg_name': 'llama-cpp', 'pkg_version': 'b4094-1'} [2024-12-03 18:50:38,617][ INFO][PID:1182951] Finished build: id=8338687 failed=False timeout=108000 destdir=/var/lib/copr/public_html/results/@ai-ml/llama-cpp chroot=srpm-builds [2024-12-03 18:50:38,619][ INFO][PID:1182951] Worker succeeded build, took 22.240811824798584 [2024-12-03 18:50:38,619][ INFO][PID:1182951] Sending build state back to frontend: { "builds": [ { "timeout": 108000, "frontend_base_url": "https://copr.fedorainfracloud.org", "memory_reqs": null, "enable_net": true, "project_owner": "@ai-ml", "project_name": "llama-cpp", "project_dirname": "llama-cpp", "submitter": "man2dev", "ended_on": 1733251838.6191735, "started_on": 1733251816.3783617, "submitted_on": null, "status": 1, "chroot": "srpm-builds", "arch": null, "buildroot_pkgs": null, "task_id": "8338687", "build_id": 8338687, "package_name": null, "package_version": null, "git_repo": null, "git_hash": null, "git_branch": null, "source_type": 2, "source_json": "{\"url\": \"https://copr.fedorainfracloud.org/tmp/tmp3l9vxcj4/llama-cpp.spec\", \"pkg\": \"llama-cpp.spec\", \"tmp\": \"tmp3l9vxcj4\"}", "pkg_name": "llama-cpp", "pkg_main_version": null, "pkg_epoch": null, "pkg_release": null, "srpm_url": "https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp/srpm-builds/08338687/llama-cpp-b4094-1.src.rpm", "uses_devel_repo": null, "sandbox": "@ai-ml/llama-cpp--man2dev", "results": { "name": "llama-cpp", "epoch": null, "version": "b4094", "release": "1", "exclusivearch": [ "x86_64", "aarch64" ], "excludearch": [] }, "appstream": false, "allow_user_ssh": null, "ssh_public_keys": null, "storage": null, "background": false, "repos": [], "destdir": "/var/lib/copr/public_html/results/@ai-ml/llama-cpp", "results_repo_url": "https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp", "result_dir": "08338687", "built_packages": "", "tags": [ "arch_noarch" ], "pkg_version": "b4094-1", "id": 8338687, "mockchain_macros": { "copr_username": "@ai-ml", "copr_projectname": "llama-cpp", "vendor": "Fedora Project COPR (@ai-ml/llama-cpp)" } } ] } [2024-12-03 18:50:38,724][ INFO][PID:1182951] Sending fedora-messaging bus message in build.end [2024-12-03 18:50:38,744][ INFO][PID:1182951] Compressing /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08338687/builder-live.log by gzip [2024-12-03 18:50:38,745][ INFO][PID:1182951] Running command 'gzip /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08338687/builder-live.log' as PID 1183663 [2024-12-03 18:50:38,747][ INFO][PID:1182951] Finished after 0 seconds with exit code 0 (gzip /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08338687/builder-live.log) [2024-12-03 18:50:38,748][ INFO][PID:1182951] Compressing /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08338687/backend.log by gzip [2024-12-03 18:50:38,748][ INFO][PID:1182951] Running command 'gzip /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08338687/backend.log' as PID 1183664