[2024-11-30 17:43:18,616][ INFO][PID:1290008] Marking build as starting [2024-11-30 17:43:18,661][ INFO][PID:1290008] Trying to allocate VM [2024-11-30 17:43:21,716][ INFO][PID:1290008] Allocated host ResallocHost, ticket_id=6145512, hostname=54.80.74.7 [2024-11-30 17:43:21,716][ INFO][PID:1290008] Allocating ssh connection to builder [2024-11-30 17:43:21,717][ INFO][PID:1290008] Checking that builder machine is OK [2024-11-30 17:43:22,110][ INFO][PID:1290008] Running remote command: copr-builder-ready srpm-builds [2024-11-30 17:43:22,354][ INFO][PID:1290008] Red Hat subscription not needed for srpm-builds Builder is ready to be used [2024-11-30 17:43:22,355][ INFO][PID:1290008] Filling build.info file with builder info [2024-11-30 17:43:22,356][ INFO][PID:1290008] Sending build state back to frontend: { "builds": [ { "timeout": 108000, "frontend_base_url": "https://copr.fedorainfracloud.org", "memory_reqs": null, "enable_net": true, "project_owner": "@ai-ml", "project_name": "llama-cpp", "project_dirname": "llama-cpp", "submitter": "man2dev", "ended_on": null, "started_on": 1732988602.3562434, "submitted_on": null, "status": 3, "chroot": "srpm-builds", "arch": null, "buildroot_pkgs": null, "task_id": "8330030", "build_id": 8330030, "package_name": null, "package_version": null, "git_repo": null, "git_hash": null, "git_branch": null, "source_type": 2, "source_json": "{\"url\": \"https://copr.fedorainfracloud.org/tmp/tmpjgl8o8eg/llama-cpp.spec\", \"pkg\": \"llama-cpp.spec\", \"tmp\": \"tmpjgl8o8eg\"}", "pkg_name": null, "pkg_main_version": null, "pkg_epoch": null, "pkg_release": null, "srpm_url": null, "uses_devel_repo": null, "sandbox": "@ai-ml/llama-cpp--man2dev", "results": null, "appstream": false, "allow_user_ssh": null, "ssh_public_keys": null, "storage": null, "background": false, "repos": [], "destdir": "/var/lib/copr/public_html/results/@ai-ml/llama-cpp", "results_repo_url": "https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp", "result_dir": "08330030", "built_packages": "", "tags": [ "arch_noarch" ], "id": 8330030, "mockchain_macros": { "copr_username": "@ai-ml", "copr_projectname": "llama-cpp", "vendor": "Fedora Project COPR (@ai-ml/llama-cpp)" } } ] } [2024-11-30 17:43:22,399][ INFO][PID:1290008] Sending fedora-messaging bus message in build.start [2024-11-30 17:43:22,962][ INFO][PID:1290008] Sending fedora-messaging bus message in chroot.start [2024-11-30 17:43:22,981][ INFO][PID:1290008] Starting remote build: copr-rpmbuild --verbose --drop-resultdir --srpm --task-url https://copr.fedorainfracloud.org/backend/get-srpm-build-task/8330030 --detached [2024-11-30 17:43:23,412][ INFO][PID:1290008] Downloading the builder-live.log file, attempt 1 [2024-11-30 17:43:23,413][ INFO][PID:1290008] Popen command started: ssh -F /home/copr/.ssh/config mockbuilder@54.80.74.7 copr-rpmbuild-log [2024-11-30 17:43:28,418][ INFO][PID:1290008] Periodic builder liveness probe: alive [2024-11-30 17:43:43,951][ INFO][PID:1290008] Downloading results from builder [2024-11-30 17:43:43,952][ INFO][PID:1290008] rsyncing of mockbuilder@54.80.74.7:/var/lib/copr-rpmbuild/results/ to /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08330030 started [2024-11-30 17:43:43,953][ INFO][PID:1290008] Popen command started: /usr/bin/rsync -rltDvH --chmod=D755,F644 -e 'ssh -F /home/copr/.ssh/config' mockbuilder@54.80.74.7:/var/lib/copr-rpmbuild/results/ /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08330030/ &> /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08330030/build-08330030.rsync.log [2024-11-30 17:43:44,390][ INFO][PID:1290008] rsyncing finished. [2024-11-30 17:43:44,391][ INFO][PID:1290008] Releasing VM back to pool [2024-11-30 17:43:44,404][ INFO][PID:1290008] Searching for 'success' file in resultdir [2024-11-30 17:43:44,405][ INFO][PID:1290008] Getting build details [2024-11-30 17:43:44,406][ INFO][PID:1290008] Retrieving SRPM info from /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08330030 [2024-11-30 17:43:44,407][ INFO][PID:1290008] SRPM URL: https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp/srpm-builds/08330030/llama-cpp-b4206-1.src.rpm [2024-11-30 17:43:44,408][ INFO][PID:1290008] build details: {'srpm_url': 'https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp/srpm-builds/08330030/llama-cpp-b4206-1.src.rpm', 'pkg_name': 'llama-cpp', 'pkg_version': '1:b4206-1'} [2024-11-30 17:43:44,409][ INFO][PID:1290008] Finished build: id=8330030 failed=False timeout=108000 destdir=/var/lib/copr/public_html/results/@ai-ml/llama-cpp chroot=srpm-builds [2024-11-30 17:43:44,411][ INFO][PID:1290008] Worker succeeded build, took 22.055203199386597 [2024-11-30 17:43:44,412][ INFO][PID:1290008] Sending build state back to frontend: { "builds": [ { "timeout": 108000, "frontend_base_url": "https://copr.fedorainfracloud.org", "memory_reqs": null, "enable_net": true, "project_owner": "@ai-ml", "project_name": "llama-cpp", "project_dirname": "llama-cpp", "submitter": "man2dev", "ended_on": 1732988624.4114466, "started_on": 1732988602.3562434, "submitted_on": null, "status": 1, "chroot": "srpm-builds", "arch": null, "buildroot_pkgs": null, "task_id": "8330030", "build_id": 8330030, "package_name": null, "package_version": null, "git_repo": null, "git_hash": null, "git_branch": null, "source_type": 2, "source_json": "{\"url\": \"https://copr.fedorainfracloud.org/tmp/tmpjgl8o8eg/llama-cpp.spec\", \"pkg\": \"llama-cpp.spec\", \"tmp\": \"tmpjgl8o8eg\"}", "pkg_name": "llama-cpp", "pkg_main_version": null, "pkg_epoch": null, "pkg_release": null, "srpm_url": "https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp/srpm-builds/08330030/llama-cpp-b4206-1.src.rpm", "uses_devel_repo": null, "sandbox": "@ai-ml/llama-cpp--man2dev", "results": { "name": "llama-cpp", "epoch": 1, "version": "b4206", "release": "1", "exclusivearch": [ "x86_64", "aarch64" ], "excludearch": [] }, "appstream": false, "allow_user_ssh": null, "ssh_public_keys": null, "storage": null, "background": false, "repos": [], "destdir": "/var/lib/copr/public_html/results/@ai-ml/llama-cpp", "results_repo_url": "https://download.copr.fedorainfracloud.org/results/@ai-ml/llama-cpp", "result_dir": "08330030", "built_packages": "", "tags": [ "arch_noarch" ], "pkg_version": "1:b4206-1", "id": 8330030, "mockchain_macros": { "copr_username": "@ai-ml", "copr_projectname": "llama-cpp", "vendor": "Fedora Project COPR (@ai-ml/llama-cpp)" } } ] } [2024-11-30 17:43:44,529][ INFO][PID:1290008] Sending fedora-messaging bus message in build.end [2024-11-30 17:43:44,560][ INFO][PID:1290008] Compressing /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08330030/builder-live.log by gzip [2024-11-30 17:43:44,561][ INFO][PID:1290008] Running command 'gzip /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08330030/builder-live.log' as PID 1290519 [2024-11-30 17:43:44,564][ INFO][PID:1290008] Finished after 0 seconds with exit code 0 (gzip /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08330030/builder-live.log) [2024-11-30 17:43:44,565][ INFO][PID:1290008] Compressing /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08330030/backend.log by gzip [2024-11-30 17:43:44,566][ INFO][PID:1290008] Running command 'gzip /var/lib/copr/public_html/results/@ai-ml/llama-cpp/srpm-builds/08330030/backend.log' as PID 1290520