[2024-12-17 16:39:42,867][ INFO][PID:1768193] Marking build as starting [2024-12-17 16:39:42,927][ INFO][PID:1768193] Trying to allocate VM [2024-12-17 16:39:49,056][ INFO][PID:1768193] Allocated host ResallocHost, ticket_id=228342, hostname=2620:52:3:1:dead:beef:cafe:c20a [2024-12-17 16:39:49,057][ INFO][PID:1768193] Allocating ssh connection to builder [2024-12-17 16:39:49,058][ INFO][PID:1768193] Checking that builder machine is OK [2024-12-17 16:39:50,596][ INFO][PID:1768193] Filling build.info file with builder info [2024-12-17 16:39:50,604][ INFO][PID:1768193] Sending build state back to frontend: { "builds": [ { "timeout": 108000, "frontend_base_url": "https://copr.fedorainfracloud.org", "memory_reqs": null, "enable_net": true, "project_owner": "@ai-ml", "project_name": "llama-cpp", "project_dirname": "llama-cpp", "submitter": "man2dev", "ended_on": null, "started_on": 1734453590.6044993, "submitted_on": null, "status": 3, "chroot": "srpm-builds", "arch": null, "buildroot_pkgs": null, "task_id": "8403654", "build_id": 8403654, "package_name": null, "package_version": null, "git_repo": null, "git_hash": null, "git_branch": null, "source_type": 2, "source_json": "{\"url\": \"https://copr.fedorainfracloud.org/tmp/tmp084uu67n/llama-cpp-b4338-2.fc42.src.rpm\", \"pkg\": \"llama-cpp-b4338-2.fc42.src.rpm\", \"tmp\": \"tmp084uu67n\"}", "pkg_name": null, "pkg_main_version": null, "pkg_epoch": null, "pkg_release": null, "srpm_url": null, "uses_devel_repo": null, "sandbox": "@ai-ml/llama-cpp--man2dev", "results": null, "appstream": false, "allow_user_ssh": null, "ssh_public_keys": null, "storage": null, "background": false, "repos": [], "destdir": "/var/lib/copr/public_html/results/@ai-ml/llama-cpp", "results_repo_url": "https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp", "result_dir": "08403654", "built_packages": "", "tags": [ "arch_noarch" ], "id": 8403654, "mockchain_macros": { "copr_username": "@ai-ml", "copr_projectname": "llama-cpp", "vendor": "Fedora Project COPR (@ai-ml/llama-cpp)" } } ] } [2024-12-17 16:39:50,644][ INFO][PID:1768193] Sending fedora-messaging bus message in build.start [2024-12-17 16:39:52,039][ INFO][PID:1768193] Sending fedora-messaging bus message in chroot.start [2024-12-17 16:39:52,103][ INFO][PID:1768193] Starting remote build: copr-rpmbuild --verbose --drop-resultdir --srpm --task-url https://copr.fedorainfracloud.org/backend/get-srpm-build-task/8403654 --detached [2024-12-17 16:39:53,763][ INFO][PID:1768193] Downloading the builder-live.log file, attempt 1 [2024-12-17 16:39:53,771][ INFO][PID:1768193] Popen command started: ssh -F /home/copr/.ssh/config mockbuilder@2620:52:3:1:dead:beef:cafe:c20a copr-rpmbuild-log [2024-12-17 16:39:58,780][ INFO][PID:1768193] Periodic builder liveness probe: alive [2024-12-17 16:40:07,263][ INFO][PID:1768193] Downloading results from builder [2024-12-17 16:40:07,264][ INFO][PID:1768193] rsyncing of mockbuilder@[2620:52:3:1:dead:beef:cafe:c20a]:/var/lib/copr-rpmbuild/results/ to /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08403654 started [2024-12-17 16:40:07,266][ INFO][PID:1768193] Popen command started: /usr/bin/rsync -rltDvH --chmod=D755,F644 -e 'ssh -F /home/copr/.ssh/config' mockbuilder@[2620:52:3:1:dead:beef:cafe:c20a]:/var/lib/copr-rpmbuild/results/ /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08403654/ &> /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08403654/build-08403654.rsync.log [2024-12-17 16:40:09,081][ INFO][PID:1768193] rsyncing finished. [2024-12-17 16:40:09,083][ INFO][PID:1768193] Releasing VM back to pool [2024-12-17 16:40:09,104][ INFO][PID:1768193] Searching for 'success' file in resultdir [2024-12-17 16:40:09,105][ INFO][PID:1768193] Getting build details [2024-12-17 16:40:09,106][ INFO][PID:1768193] Retrieving SRPM info from /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08403654 [2024-12-17 16:40:09,108][ INFO][PID:1768193] SRPM URL: https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp/srpm-builds/08403654/llama-cpp-b4338-2.fc42.src.rpm [2024-12-17 16:40:09,109][ INFO][PID:1768193] build details: {'srpm_url': 'https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp/srpm-builds/08403654/llama-cpp-b4338-2.fc42.src.rpm', 'pkg_name': 'llama-cpp', 'pkg_version': '1:b4338-2'} [2024-12-17 16:40:09,110][ INFO][PID:1768193] Finished build: id=8403654 failed=False timeout=108000 destdir=/var/lib/copr/public_html/results/@ai-ml/llama-cpp chroot=srpm-builds [2024-12-17 16:40:09,112][ INFO][PID:1768193] Worker succeeded build, took 18.508137941360474 [2024-12-17 16:40:09,114][ INFO][PID:1768193] Sending build state back to frontend: { "builds": [ { "timeout": 108000, "frontend_base_url": "https://copr.fedorainfracloud.org", "memory_reqs": null, "enable_net": true, "project_owner": "@ai-ml", "project_name": "llama-cpp", "project_dirname": "llama-cpp", "submitter": "man2dev", "ended_on": 1734453609.1126373, "started_on": 1734453590.6044993, "submitted_on": null, "status": 1, "chroot": "srpm-builds", "arch": null, "buildroot_pkgs": null, "task_id": "8403654", "build_id": 8403654, "package_name": null, "package_version": null, "git_repo": null, "git_hash": null, "git_branch": null, "source_type": 2, "source_json": "{\"url\": \"https://copr.fedorainfracloud.org/tmp/tmp084uu67n/llama-cpp-b4338-2.fc42.src.rpm\", \"pkg\": \"llama-cpp-b4338-2.fc42.src.rpm\", \"tmp\": \"tmp084uu67n\"}", "pkg_name": "llama-cpp", "pkg_main_version": null, "pkg_epoch": null, "pkg_release": null, "srpm_url": "https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp/srpm-builds/08403654/llama-cpp-b4338-2.fc42.src.rpm", "uses_devel_repo": null, "sandbox": "@ai-ml/llama-cpp--man2dev", "results": { "name": "llama-cpp", "epoch": 1, "version": "b4338", "release": "2", "exclusivearch": [ "x86_64", "aarch64" ], "excludearch": [] }, "appstream": false, "allow_user_ssh": null, "ssh_public_keys": null, "storage": null, "background": false, "repos": [], "destdir": "/var/lib/copr/public_html/results/@ai-ml/llama-cpp", "results_repo_url": "https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp", "result_dir": "08403654", "built_packages": "", "tags": [ "arch_noarch" ], "pkg_version": "1:b4338-2", "id": 8403654, "mockchain_macros": { "copr_username": "@ai-ml", "copr_projectname": "llama-cpp", "vendor": "Fedora Project COPR (@ai-ml/llama-cpp)" } } ] } [2024-12-17 16:40:09,164][ INFO][PID:1768193] Sending fedora-messaging bus message in build.end [2024-12-17 16:40:09,194][ INFO][PID:1768193] Compressing /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08403654/builder-live.log by gzip [2024-12-17 16:40:09,196][ INFO][PID:1768193] Running command 'gzip /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08403654/builder-live.log' as PID 1772963 [2024-12-17 16:40:09,198][ INFO][PID:1768193] Finished after 0 seconds with exit code 0 (gzip /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08403654/builder-live.log) [2024-12-17 16:40:09,200][ INFO][PID:1768193] Compressing /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08403654/backend.log by gzip [2024-12-17 16:40:09,205][ INFO][PID:1768193] Running command 'gzip /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08403654/backend.log' as PID 1772965