● XOSEncoder-01
    State: degraded
     Jobs: 0 queued
   Failed: 3 units
    Since: Mon 2025-11-17 16:03:49 UTC; 21h ago
   CGroup: /
           ├─user.slice
           │ └─cpuset-init.service
           │   └─2418 /bin/sleep infinity
           ├─init.scope
           │ └─1 /usr/lib/systemd/systemd --switched-root --system --deserialize 16
           ├─system.slice
           │ ├─irqbalance.service
           │ │ └─4576 /sbin/irqbalance --policyscript=/opt/omneon/sbin/irqbalance-ban.py --foreground
           │ ├─systemd-update-utmp.service
           │ ├─var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7eprojected-tls\x2dassets.mount
           │ ├─var-lib-pgdb-pg_log.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad-rootfs.mount
           │ ├─var-log.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82-rootfs.mount
           │ ├─lm_sensors.service
           │ ├─linux_performance_tuning_post_net.service
           │ ├─kube-scheduler.service
           │ │ └─10306 /usr/local/bin/kube-scheduler --bind-address=127.0.0.1 --config=/etc/kubernetes/config/kube-scheduler.yaml --profiling=false --v=2
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf-shm.mount
           │ ├─pcscd.socket
           │ ├─var-lib-kubelet-pods-282ed129\x2dc820\x2d4d17\x2d83ec\x2d5fd195d42947-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dlbp6j.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c-rootfs.mount
           │ ├─systemd-udevd-control.socket
           │ ├─run-netns-cni\x2dd4f2855c\x2d4183\x2db95a\x2d78af\x2deca3a4fecde8.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-f15a73eb2d9863156cc92fde623fb7c9f291dbcc6f4fd68c05f1d72e948c907e-rootfs.mount
           │ ├─run-netns-cni\x2dd8ff14c3\x2dae72\x2d93f7\x2d4338\x2d6c79dbfe2b1c.mount
           │ ├─lvm2-monitor.service
           │ ├─var-lib-kubelet-pods-180edcc9\x2dc2ff\x2d4d05\x2dbc3a\x2dffd533439e72-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dhtvwm.mount
           │ ├─var-lib-triveni.mount
           │ ├─systemd-journal-flush.service
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa-rootfs.mount
           │ ├─containerd.service
           │ │ ├─ 8866 /usr/local/bin/containerd
           │ │ ├─13338 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad -address /run/containerd/containerd.sock
           │ │ ├─13339 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea -address /run/containerd/containerd.sock
           │ │ ├─13340 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id fe01a89fedb057cc98e77dfcaadada33e8d2afe86478b11f263d5f2e0a9cdf70 -address /run/containerd/containerd.sock
           │ │ ├─13342 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4 -address /run/containerd/containerd.sock
           │ │ ├─13343 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c -address /run/containerd/containerd.sock
           │ │ ├─13344 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c -address /run/containerd/containerd.sock
           │ │ ├─13345 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed -address /run/containerd/containerd.sock
           │ │ ├─13648 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e -address /run/containerd/containerd.sock
           │ │ ├─13836 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076 -address /run/containerd/containerd.sock
           │ │ ├─13974 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa -address /run/containerd/containerd.sock
           │ │ ├─14063 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2 -address /run/containerd/containerd.sock
           │ │ ├─14593 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8 -address /run/containerd/containerd.sock
           │ │ ├─15347 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27 -address /run/containerd/containerd.sock
           │ │ ├─15584 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49 -address /run/containerd/containerd.sock
           │ │ ├─16084 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c -address /run/containerd/containerd.sock
           │ │ ├─16614 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4 -address /run/containerd/containerd.sock
           │ │ ├─24364 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7 -address /run/containerd/containerd.sock
           │ │ ├─25399 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82 -address /run/containerd/containerd.sock
           │ │ ├─25721 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0 -address /run/containerd/containerd.sock
           │ │ ├─26954 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b -address /run/containerd/containerd.sock
           │ │ ├─30076 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d -address /run/containerd/containerd.sock
           │ │ ├─30238 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917 -address /run/containerd/containerd.sock
           │ │ ├─30634 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197 -address /run/containerd/containerd.sock
           │ │ ├─40260 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927 -address /run/containerd/containerd.sock
           │ │ ├─40264 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687 -address /run/containerd/containerd.sock
           │ │ ├─41057 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f -address /run/containerd/containerd.sock
           │ │ ├─42434 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb -address /run/containerd/containerd.sock
           │ │ ├─42443 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982 -address /run/containerd/containerd.sock
           │ │ ├─42451 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450 -address /run/containerd/containerd.sock
           │ │ ├─43094 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f -address /run/containerd/containerd.sock
           │ │ ├─44628 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840 -address /run/containerd/containerd.sock
           │ │ ├─44852 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213 -address /run/containerd/containerd.sock
           │ │ ├─45020 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf -address /run/containerd/containerd.sock
           │ │ ├─45663 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27 -address /run/containerd/containerd.sock
           │ │ ├─46559 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7 -address /run/containerd/containerd.sock
           │ │ ├─47074 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1 -address /run/containerd/containerd.sock
           │ │ ├─47989 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06 -address /run/containerd/containerd.sock
           │ │ ├─48450 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53 -address /run/containerd/containerd.sock
           │ │ └─49612 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85 -address /run/containerd/containerd.sock
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213-rootfs.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c-rootfs.mount
           │ ├─var-lib-kubelet-pods-2054d1e4\x2df2e3\x2d40bc\x2da3a0\x2db76f1ef5c374-volumes-kubernetes.io\x7eempty\x2ddir-gfxss.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d-shm.mount
           │ ├─run-netns-cni\x2d965bdda6\x2de9a6\x2d2b1f\x2d1eea\x2d6e338a5b9271.mount
           │ ├─var-lib-containerd.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27-shm.mount
           │ ├─cpqIde.service
           │ │ └─8863 /sbin/cpqIde -f
           │ ├─systemd-udevd.service
           │ │ └─2430 /usr/lib/systemd/systemd-udevd
           │ ├─preempt.service
           │ ├─systemd-udevd-kernel.socket
           │ ├─var-lib-kubelet-pods-706f2b88\x2d4937\x2d4630\x2da676\x2d3d637a0649ac-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dvzdt6.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb-shm.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4-shm.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-aa5225d693f714f7e75445136b3b211b51dddf9ae10e9b6533a4b762901249dd-rootfs.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85-rootfs.mount
           │ ├─fingerprint.service
           │ ├─nminet.service
           │ │ ├─6858 python3 /opt/omneon/nmi/nminet/bin/../src/nminet.py -v
           │ │ ├─6965 /usr/bin/teamd -N -o -U -d -n -t net1 -c {"device": "net1", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ │ ├─7002 /usr/bin/teamd -N -o -U -d -n -t net2 -c {"device": "net2", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ │ ├─7022 /usr/bin/teamd -N -o -U -d -n -t net3 -c {"device": "net3", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ │ ├─7058 /usr/bin/teamd -N -o -U -d -n -t net4 -c {"device": "net4", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ │ ├─7071 /usr/bin/teamd -N -o -U -d -n -t net5 -c {"device": "net5", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ │ ├─7107 /usr/bin/teamd -N -o -U -d -n -t net6 -c {"device": "net6", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ │ ├─7122 /usr/bin/teamd -N -o -U -d -n -t net9 -c {"device": "net9", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ │ └─7156 /usr/bin/teamd -N -o -U -d -n -t net10 -c {"device": "net10", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ ├─nmipoller_raidssa.service
           │ │ └─8856 python3.11 /opt/omneon/nmi/nmipollers/src/raidssa/main.py
           │ ├─var-lib-elasticsearch.mount
           │ ├─sys-fs-fuse-connections.mount
           │ ├─system-serial\x2dgetty.slice
           │ │ └─serial-getty@ttyS0.service
           │ │   └─8909 /sbin/agetty -o -p -- \u --keep-baud 115200,38400,9600 ttyS0 vt220
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213-shm.mount
           │ ├─rpc-statd-notify.service
           │ ├─var-lib-kubelet-pods-035d6c70\x2dd4ea\x2d4b47\x2d8cbd\x2d9045b8424260-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dwbx58.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82-shm.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197-shm.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076-shm.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917-rootfs.mount
           │ ├─run-netns-cni\x2d9600e88c\x2dccbc\x2dae1b\x2dfc9d\x2d62ec09c800af.mount
           │ ├─kube-apiserver.service
           │ │ └─8949 /usr/local/bin/kube-apiserver --advertise-address=192.0.2.248 --allow-privileged=true --apiserver-count=1 --audit-log-maxage=30 --audit-log-maxbackup=3 --audit-log-maxsize=100 --audit-log-path=/var/log/apiserver/audit.log --authorization-mode=Node,RBAC --bind-address=0.0.0.0 --client-ca-file=/var/lib/kubernetes/ca.pem --enable-admission-plugins=NamespaceLifecycle,NodeRestriction,LimitRanger,ServiceAccount,DefaultStorageClass,ResourceQuota --etcd-cafile=/var/lib/kubernetes/ca.pem --etcd-certfile=/var/lib/kubernetes/kubernetes.pem --etcd-keyfile=/var/lib/kubernetes/kubernetes-key.pem --etcd-servers=https://127.0.0.1:2379 --event-ttl=1h --encryption-provider-config=/var/lib/kubernetes/encryption-config.yaml --kubelet-certificate-authority=/var/lib/kubernetes/ca.pem --kubelet-client-certificate=/var/lib/kubernetes/kubernetes.pem --kubelet-client-key=/var/lib/kubernetes/kubernetes-key.pem --profiling=false --runtime-config=api/all=true --service-account-key-file=/var/lib/kubernetes/service-account.pem --service-account-signing-key-file=/var/lib/kubernetes/service-account-key.pem --service-account-issuer=https://192.0.2.248:6443 --service-cluster-ip-range=203.0.113.0/24 --service-node-port-range=2500-50055 --tls-cert-file=/var/lib/kubernetes/kubernetes.pem --tls-private-key-file=/var/lib/kubernetes/kubernetes-key.pem --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_RSA_WITH_AES_256_GCM_SHA384,TLS_RSA_WITH_AES_128_GCM_SHA256 --shutdown-watch-termination-grace-period=5s --v=2
           │ ├─run-netns-cni\x2dde0a1fc6\x2dce2c\x2d5aea\x2da63c\x2d94f2b4ae01ca.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06-rootfs.mount
           │ ├─var-lib-kubelet-pods-8ffb9bdd\x2d9417\x2d450a\x2db140\x2d623dad767f01-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2drzl4m.mount
           │ ├─openibd.service
           │ ├─credentials-init.service
           │ ├─var-lib-kubelet-pods-03911f12\x2d9dd7\x2d491a\x2db444\x2da79c18d5bc39-volume\x2dsubpaths-pgdb\x2ddata-pgdb-1.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27-rootfs.mount
           │ ├─sys-kernel-config.mount
           │ ├─polkit.service
           │ │ └─9285 /usr/lib/polkit-1/polkitd --no-debug
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-0cc8fb35afbc569f879b5340cf38cbc9ddc87773fce6b67a9c7fc07172660322-rootfs.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27-shm.mount
           │ ├─systemd-remount-fs.service
           │ ├─ip6tables.service
           │ ├─rpcbind.socket
           │ ├─lshw-dump.service
           │ ├─update_files.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-6633e7d49882ec93c836eb9e2e2b57e3ede0d2cbfa2be9b417cc1f339e7241f5-rootfs.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450-rootfs.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8-shm.mount
           │ ├─var-lib-kubelet-pods-2054d1e4\x2df2e3\x2d40bc\x2da3a0\x2db76f1ef5c374-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dx2kbk.mount
           │ ├─run-netns-cni\x2d71b60ffe\x2d98a1\x2da12d\x2d66c0\x2d01a75d1c91d2.mount
           │ ├─var-lib-kubelet-pods-180edcc9\x2dc2ff\x2d4d05\x2dbc3a\x2dffd533439e72-volumes-kubernetes.io\x7esecret-certs.mount
           │ ├─var-lib-kubelet-pods-e8b5f050\x2d8a1e\x2d4433\x2db78d\x2d7d533407b8a0-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dm4b28.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06-shm.mount
           │ ├─var-lib-kubelet-pods-0a0dbba7\x2db736\x2d437c\x2daf77\x2d183ba0386260-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dfsmx7.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-e6c48aed1f74ce74c92c7fa0d44b00eb2cec24c30cb75bd0e44cc002979144d2-rootfs.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-bd2f5bcbccb5108ea75dd8fe3c107454e9443a9bf04d0d76ea762d52abf0de4d-rootfs.mount
           │ ├─sys-kernel-debug.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982-shm.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad-shm.mount
           │ ├─system-mlnx_interface_mgr.slice
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687-shm.mount
           │ ├─mnt-data_drive.mount
           │ ├─nmiperfmon_fans.service
           │ │ └─8792 /opt/omneon/nmi/nmiperfmon
           │ ├─lvm2-lvmpolld.socket
           │ ├─linux_performance_tuning_pre_net.service
           │ ├─run-netns-cni\x2d7cefe5ea\x2dd575\x2de3ef\x2d8c22\x2d39d543dd3ee6.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27-rootfs.mount
           │ ├─var-lib-kubelet-pods-009abdc9\x2d2e33\x2d4e2e\x2d837f\x2dc454cee1b6cf-volumes-kubernetes.io\x7eempty\x2ddir-ingest\x2ddata.mount
           │ ├─tuned.service
           │ │ └─8832 /usr/libexec/platform-python -Es /usr/sbin/tuned -l -P
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-fe3a782b6ddb59dc473fbf1bfad0404b1d5a2a9b06b8dc15cc7474ea935194c5-rootfs.mount
           │ ├─nmiset_hostname.service
           │ ├─shutdown-containers.service
           │ ├─var-lib-kubelet-pods-096bba8f\x2d0beb\x2d47cb\x2dac9c\x2dcc22c9bed927-volume\x2dsubpaths-config-nginx-0.mount
           │ ├─system-sshd\x2dkeygen.slice
           │ ├─systemd-tmpfiles-setup.service
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1-rootfs.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4-shm.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840-rootfs.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-3ddcf70649aeed995d1fdce890e993ead6f5b073b7de2211d2f93de524d2a7b5-rootfs.mount
           │ ├─var-lib-kubelet-pods-7cd2ae11\x2d942e\x2d42e8\x2d9ac7\x2dca02a995c553-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d57cnh.mount
           │ ├─var-lib-kubelet-pods-096bba8f\x2d0beb\x2d47cb\x2dac9c\x2dcc22c9bed927-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dcg4rv.mount
           │ ├─-.mount
           │ ├─var-lib-kubelet-pods-d3004940\x2d8948\x2d414f\x2d97ef\x2d9b881316670e-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2djdtgg.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0-rootfs.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c-shm.mount
           │ ├─var-lib-kubelet-pods-aff14187\x2d181d\x2d4c0a\x2d9445\x2da2326a3bf487-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d66jgh.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-1af4c9c62ebee28328f5112ba9d76d6b74a48a6627e620d82f2d49d3c5a29efa-rootfs.mount
           │ ├─setupdevops.service
           │ ├─kubelet.service
           │ │ └─12424 /usr/local/bin/kubelet --config=/var/lib/kubelet/kubelet-config.yaml --kubeconfig=/var/lib/kubelet/kubeconfig --hostname-override=vosflex --container-runtime-endpoint=unix:///run/containerd/containerd.sock --v=2
           │ ├─systemd-journald-dev-log.socket
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-fe01a89fedb057cc98e77dfcaadada33e8d2afe86478b11f263d5f2e0a9cdf70-rootfs.mount
           │ ├─systemd-coredump.socket
           │ ├─kbdrate.service
           │ ├─init-dummy-net-interface.service
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7-rootfs.mount
           │ ├─var-lib-kubelet-pods-7cd2ae11\x2d942e\x2d42e8\x2d9ac7\x2dca02a995c553-volumes-kubernetes.io\x7esecret-esam\x2doob\x2dbasic\x2dauth\x2dsecret\x2dvolume.mount
           │ ├─kdump.service
           │ ├─earlyoom.service
           │ │ └─4378 /opt/omneon/bin/earlyoom -i -m 4 -r 30
           │ ├─run-netns-cni\x2d3bebe15e\x2d87e5\x2dea94\x2d13d8\x2da7a368220828.mount
           │ ├─var-lib-dms_files.mount
           │ ├─run-netns-cni\x2d74f60784\x2dbf9c\x2de91e\x2db9e5\x2d3396f4072574.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb-rootfs.mount
           │ ├─run-netns-cni\x2d78dc31f7\x2d2a46\x2d126d\x2d4ffe\x2d0e38dcc5b417.mount
           │ ├─var-lib-pgdb-backup_restore.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197-rootfs.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2-shm.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-7149e251631bd2084f6135e3cb5b75287790d2f14422004e79a3c6f962e189b6-rootfs.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-b746563060551612d49e0b3e5ebfc32a5e99f5518389847c64602c6be63c8a9d-rootfs.mount
           │ ├─systemd-journald.service
           │ │ └─3086 /usr/lib/systemd/systemd-journald
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4-rootfs.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840-shm.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-ba239e148103274ece222541571cef554d8f50d8e19cc55f13679a6a1e2d2076-rootfs.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e-rootfs.mount
           │ ├─var-lib-kubelet-pods-7a9dc476\x2d39fb\x2d46a5\x2d8dae\x2d2b3f334ed106-volumes-kubernetes.io\x7eempty\x2ddir-gfxss.mount
           │ ├─var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volume\x2dsubpaths-web\x2dconfig-prometheus-4.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-21d714746405044fd80d02b52d0146c4a8e8ea066da93341094adb56168511e9-rootfs.mount
           │ ├─run-netns-cni\x2dfa3094c1\x2d793f\x2d879a\x2dc802\x2d0eb29392b279.mount
           │ ├─var-lib-kubelet-pods-e4417f1c\x2dec13\x2d46d6\x2da15e\x2de54cc6549410-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dcftkq.mount
           │ ├─atd.service
           │ │ └─8918 /usr/sbin/atd -f
           │ ├─systemd-udev-trigger.service
           │ ├─var-lib-kubelet-pods-1a5c156d\x2d0493\x2d4d63\x2d9e6d\x2d1699216a43f7-volumes-kubernetes.io\x7esecret-secret\x2dconfig.mount
           │ ├─corefiles.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-bf7cf933a8910b9fb80cce11f576b2ce0282a4a972e5b0001806935cd7e4a995-rootfs.mount
           │ ├─systemd-rfkill.socket
           │ ├─sshd.service
           │ │ └─8807 /usr/sbin/sshd -D
           │ ├─var-lib-kubelet-pods-d223d476\x2dac55\x2d4a39\x2d8b62\x2d404db15c85ec-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dfcnd7.mount
           │ ├─nmipoller_raidmr.service
           │ │ └─8787 python3.11 /opt/omneon/nmi/nmipollers/src/raidmr/main.py
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed-shm.mount
           │ ├─var-lib-kubelet-pods-c3a4b089\x2d3305\x2d4918\x2db342\x2dedb4ab6fc5dc-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d79r7z.mount
           │ ├─dev-mqueue.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1-shm.mount
           │ ├─crond.service
           │ │ └─8944 /usr/sbin/crond -n
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d-rootfs.mount
           │ ├─run-netns-cni\x2d19ac231d\x2dc843\x2de559\x2dbe3f\x2db6295ccf8de2.mount
           │ ├─var-lib-kubelet-pods-03911f12\x2d9dd7\x2d491a\x2db444\x2da79c18d5bc39-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dcbfc4.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-2ce57bc5a2491a58dd6362918c37ac6e80a3bd093e610da0db3cc4d9a6bc9fda-rootfs.mount
           │ ├─var-lib-kubelet-pods-7a9dc476\x2d39fb\x2d46a5\x2d8dae\x2d2b3f334ed106-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dx5bsn.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-83cbb97e4fb7c9801bb5d1334c503eb1231e1f7903d8ed8f73c60a62819d0bba-rootfs.mount
           │ ├─run-netns-cni\x2d58ec732f\x2d6ad5\x2d15da\x2dcae7\x2db9933e40fa40.mount
           │ ├─kube-controller-manager.service
           │ │ └─10312 /usr/local/bin/kube-controller-manager --bind-address=127.0.0.1 --cluster-cidr=198.51.100.0/24 --cluster-name=kubernetes --cluster-signing-cert-file=/var/lib/kubernetes/ca.pem --cluster-signing-key-file=/var/lib/kubernetes/ca-key.pem --kubeconfig=/var/lib/kubernetes/kube-controller-manager.kubeconfig --leader-elect=true --profiling=false --root-ca-file=/var/lib/kubernetes/ca.pem --service-account-private-key-file=/var/lib/kubernetes/service-account-key.pem --service-cluster-ip-range=203.0.113.0/24 --terminated-pod-gc-threshold=10 --use-service-account-credentials=true --v=2
           │ ├─nmiperfmon_raid_controllers.service
           │ │ └─8804 /opt/omneon/nmi/nmiperfmon
           │ ├─var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dpldp2.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-61c556810ef8dbb1aed7ea76255af0833233248a82e667d07e0b879d365b778f-rootfs.mount
           │ ├─var-lib-kubelet-pods-29ecd84d\x2d4896\x2d4ebc\x2da838\x2d464df976b54b-volumes-kubernetes.io\x7esecret-kubernetes\x2ddashboard\x2dcerts.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687-rootfs.mount
           │ ├─var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7elocal\x2dvolume-prometheus\x2dpv.mount
           │ ├─irqbalance2110.service
           │ │ └─4710 /sbin/irqbalance --policyscript=/opt/omneon/sbin/irqbalance-ban.py --foreground
           │ ├─systemd-initctl.socket
           │ ├─iptables.service
           │ ├─var-lib-kubelet-pods-03911f12\x2d9dd7\x2d491a\x2db444\x2da79c18d5bc39-volume\x2dsubpaths-pgdb\x2ddata-pgdb-2.mount
           │ ├─NetworkManager.service
           │ │ └─4574 /usr/sbin/NetworkManager --no-daemon
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53-rootfs.mount
           │ ├─hw_check_state.service
           │ ├─var-lib-kubelet-pods-1a5c156d\x2d0493\x2d4d63\x2d9e6d\x2d1699216a43f7-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dh2cbt.mount
           │ ├─etcd.service
           │ │ └─4383 /usr/local/bin/etcd --name etcd --cert-file=/etc/etcd/kubernetes.pem --key-file=/etc/etcd/kubernetes-key.pem --trusted-ca-file=/etc/etcd/ca.pem --client-cert-auth --listen-client-urls https://127.0.0.1:2379 --advertise-client-urls https://127.0.0.1:2379 --initial-cluster-state new --auto-compaction-mode=periodic --auto-compaction-retention=100m --data-dir=/var/lib/etcd --logger=zap
           │ ├─run-netns-cni\x2d34583cba\x2d9292\x2de688\x2dc483\x2deb33b5500982.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85-shm.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea-rootfs.mount
           │ ├─var-lib-kubelet-pods-3e49bb7f\x2d4353\x2d4a7c\x2d8d01\x2d161db867a3ee-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dnlwf5.mount
           │ ├─mr_cpqScsi.service
           │ │ ├─8865 /sbin/mr_cpqScsi -f
           │ │ └─9343 /sbin/mr_cpqScsi -f
           │ ├─rtirq.service
           │ ├─systemd-random-seed.service
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea-shm.mount
           │ ├─cleanup_zookeeper_on_startup.service
           │ ├─dbus.socket
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa-shm.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-7e4e0613f3bdaa4e0315cfb163224a53ba0daec763fd26f1803fb7c0fe0b0c9a-rootfs.mount
           │ ├─var-lib-kubelet-pods-2054d1e4\x2df2e3\x2d40bc\x2da3a0\x2db76f1ef5c374-volumes-kubernetes.io\x7eempty\x2ddir-packager\x2dstate.mount
           │ ├─run-netns-cni\x2d33c9bd64\x2df94f\x2de7f3\x2dd4a0\x2d814248fc76e7.mount
           │ ├─var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7esecret-web\x2dconfig.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-386b74e96bce466252755a2db8492a0aa80fb81245a7e11ab02cadcb615d4e35-rootfs.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917-shm.mount
           │ ├─run-netns-cni\x2da1d94c5c\x2d0718\x2d94e4\x2d24b9\x2d80f9835f2395.mount
           │ ├─dddvb-kmod.service
           │ ├─gssproxy.service
           │ │ └─8899 /usr/sbin/gssproxy -D
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-900eab49b8c2ba07c64b2d9d3e37821d1dcf1ef16dcd585fb3feae19db6519a2-rootfs.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c-shm.mount
           │ ├─nmipoller_caminfo.service
           │ │ └─8796 python3.11 /opt/omneon/nmi/nmipollers/src/caminfo/main.py
           │ ├─var-lib-kubelet-pods-5cb575bc\x2de848\x2d4e17\x2da792\x2d8ede656bd25e-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dd7nf4.mount
           │ ├─rsyslog.service
           │ │ └─8818 /usr/sbin/rsyslogd -n
           │ ├─run-netns-cni\x2de76763b3\x2d8c84\x2d2072\x2d8a73\x2d421ed689b3b6.mount
           │ ├─run-netns-cni\x2d9059af8c\x2d2cb7\x2d6ca8\x2d17f0\x2da5a8a72df9a8.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-e77396127a5205ad7263b1715ab488deed253092cf27f541d1d3a85180edc5a9-rootfs.mount
           │ ├─tmp.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-aad3f498ca0001b47bbc7d89f01988c218e9b80d529077e7152d13b61403f0bb-rootfs.mount
           │ ├─run-netns-cni\x2d03a85eca\x2d3519\x2dbd81\x2d54b1\x2dacb6905a7e24.mount
           │ ├─rc-local.service
           │ ├─var-nmi.mount
           │ ├─advantech_vega330x.service
           │ ├─hardware-monitor.service
           │ ├─systemd-udev-settle.service
           │ ├─var-lib-kubelet-pods-7a9dc476\x2d39fb\x2d46a5\x2d8dae\x2d2b3f334ed106-volumes-kubernetes.io\x7eempty\x2ddir-packager\x2dstate.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53-shm.mount
           │ ├─var-lib-kubelet-pods-d223d476\x2dac55\x2d4a39\x2d8b62\x2d404db15c85ec-volumes-kubernetes.io\x7eempty\x2ddir-gfxss.mount
           │ ├─var-lib-kubelet-pods-6ac986a8\x2d270f\x2d43b4\x2d8b04\x2d651fce2f139a-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dstvbz.mount
           │ ├─run-netns-cni\x2d9320a390\x2d37f5\x2dca9d\x2daa28\x2d75ca569250c5.mount
           │ ├─var-lib-prometheus.mount
           │ ├─lspci-dump.service
           │ ├─delete_pods_on_startup.service
           │ ├─var-lib-kubelet-pods-2139364f\x2d7be5\x2d40b3\x2dbbec\x2d0c259f5c2559-volumes-kubernetes.io\x7eempty\x2ddir-test\x2dobject.mount
           │ ├─systemd-tmpfiles-setup-dev.service
           │ ├─amsd.service
           │ │ └─8867 /sbin/amsd -f
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-d8b576a7151077a817ed7ccda23150d12c06cef14dff0d46cf1ddb8384a2c8b2-rootfs.mount
           │ ├─rpcbind.service
           │ │ └─3561 /usr/bin/rpcbind -w -f
           │ ├─nmiperfmon_power_supplies.service
           │ │ └─8800 /opt/omneon/nmi/nmiperfmon
           │ ├─kube-proxy.service
           │ │ └─10310 /usr/local/bin/kube-proxy --config=/var/lib/kube-proxy/kube-proxy-config.yaml --proxy-port-range 40000-42000
           │ ├─sys-kernel-debug-tracing.mount
           │ ├─var-lib-kubelet-pods-2139364f\x2d7be5\x2d40b3\x2dbbec\x2d0c259f5c2559-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2ds99pd.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7-shm.mount
           │ ├─run-netns-cni\x2de9cdd67a\x2ddac7\x2d8fc9\x2de83d\x2d67ac4b141242.mount
           │ ├─var-lib-kubelet-pods-6986b857\x2df59e\x2d4674\x2db383\x2df5319ad06f64-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dgtgcv.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c-shm.mount
           │ ├─var-lib-persistent.service
           │ ├─docker-crashlogs.service
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e-shm.mount
           │ ├─dddvbfw.service
           │ ├─smad.service
           │ │ ├─8864 /sbin/smad
           │ │ ├─8881 /sbin/smad
           │ │ ├─8882 /sbin/smad
           │ │ └─9461 /sbin/smad
           │ ├─mnt-sda7-init.service
           │ ├─run-netns-cni\x2d0acaf869\x2d40de\x2da254\x2d8d1d\x2d7be3d8a0ed2c.mount
           │ ├─var-lib-kubelet-pods-009abdc9\x2d2e33\x2d4e2e\x2d837f\x2dc454cee1b6cf-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dlvv29.mount
           │ ├─deltacast-kmod.service
           │ ├─run-netns-cni\x2db8e90953\x2d3c83\x2dcf68\x2d792e\x2dd2b99ff42b20.mount
           │ ├─bios.service
           │ ├─usb-dongle-check.service
           │ ├─var-lib-kubelet-pods-5c63f719\x2d4775\x2d4dfe\x2d970f\x2d71ff10f56311-volumes-kubernetes.io\x7esecret-grafana\x2ddatasources.mount
           │ ├─var-lib-kubelet-pods-3581198f\x2d9982\x2d4c55\x2d91af\x2d1388df22dece-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d9vnr7.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c-rootfs.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-fcac4959fbbd538807c041a643e8ae251eb6ca11af5c1415125b37871a5102e1-rootfs.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7-shm.mount
           │ ├─systemd-journald.socket
           │ ├─var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volume\x2dsubpaths-prometheus\x2dpv-prometheus-2.mount
           │ ├─teleportv2.service
           │ │ └─8811 sleep infinity
           │ ├─var-lib-kubelet-pods-6d657888\x2d4793\x2d498f\x2d9d00\x2d63bf11d5b00b-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d8n5b4.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7-rootfs.mount
           │ ├─var-lib-kubelet-pods-9810806a\x2d49a2\x2d47b3\x2dae21\x2ddd8237c0423f-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dv94l2.mount
           │ ├─var-lib-kubelet-pods-b12c5b65\x2dd019\x2d4c39\x2da2bd\x2d3b70d80c5a52-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dgnl9g.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b-rootfs.mount
           │ ├─run-netns-cni\x2d79376bb4\x2d67bf\x2df77d\x2d4cde\x2d080ae93b24c6.mount
           │ ├─var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7esecret-config.mount
           │ ├─node_teleport_runner.service
           │ │ └─29731 python3 /opt/teleport/bin/node_teleport_runner.py
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450-shm.mount
           │ ├─var-lib-kubelet-pods-cc192fdc\x2dfcd9\x2d4e29\x2d8a4c\x2d12a71693a249-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dc5t67.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b-shm.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49-shm.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49-rootfs.mount
           │ ├─kmod-static-nodes.service
           │ ├─var-lib-kubelet-pods-6e8ec5b4\x2d6cbd\x2d43e3\x2da002\x2d382ad74382de-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dx6r4n.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-7c297996517c2ebdd207bc733957e7ea43d63c8630dab617c28d6518db1f0282-rootfs.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f-rootfs.mount
           │ ├─proc-sys-fs-binfmt_misc.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-9f25cd91eb885e3f94e2d800003dd991e969dd57e3c8fde23045761f19fffe62-rootfs.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2-rootfs.mount
           │ ├─sssd-kcm.socket
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-03b57a55d5b60b06581e669f2d1bb915d5866652d1fe45dababd7b973fa4a2d4-rootfs.mount
           │ ├─mellanoxfw.service
           │ ├─chrony-wait.service
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-c8bba1ece9eddd59495885e39f255ad4f9769ae89b1b7fc3903cce8d3c7f7916-rootfs.mount
           │ ├─run-netns-cni\x2d5c620556\x2dc80e\x2d03e1\x2da05c\x2d3f60d58239c8.mount
           │ ├─atop.service
           │ │ └─440176 /usr/bin/atop -S -w /var/log/atop27/atop_20251118 600
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927-rootfs.mount
           │ ├─nmiset_dns.service
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-f1e21bbbc7cb957da13db6b46c64353a04a7c1d1fd670772e7adc9faf6e78fa3-rootfs.mount
           │ ├─dentry_cache_cleaner.service
           │ │ ├─ 4238 /bin/bash /usr/local/bin/dentry_cache_cleaner.sh
           │ │ └─10078 sleep 60
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-acd873fc559f2941cb1dc8cf4355ff5bfd67df6a31d67ed10073845c4e40642b-rootfs.mount
           │ ├─var-lib-minio.mount
           │ ├─var-lib-kubelet-pods-53016956\x2d1fdc\x2d4182\x2db30d\x2d8975df13b8b3-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dnfr2r.mount
           │ ├─Intel-nvm-fw.service
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-92e14fd007887603c04b8210955ad6a1815b1fc9e0ee10ec0d6d862974e6748f-rootfs.mount
           │ ├─run-netns-cni\x2d99d93468\x2d14be\x2dddd3\x2d2dba\x2d40fe047165bb.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-794245c75ab20b17bcd8b43a373fad017d3bd653db684caf3add11d14754276c-rootfs.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982-rootfs.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4-rootfs.mount
           │ ├─timemaster.service
           │ │ ├─8885 /usr/sbin/timemaster -f /etc/timemaster.conf
           │ │ └─8897 /usr/sbin/chronyd -u chrony -n -f /var/run/timemaster/chrony.conf
           │ ├─var-lib-kubelet-pods-d223d476\x2dac55\x2d4a39\x2d8b62\x2d404db15c85ec-volumes-kubernetes.io\x7eempty\x2ddir-packager\x2dstate.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-80162971b81ee8611d86c3f717d3087bd3b3f9ccb2ceb918d89e0bbc087e8a45-rootfs.mount
           │ ├─var-lib-kubelet-pods-5c63f719\x2d4775\x2d4dfe\x2d970f\x2d71ff10f56311-volumes-kubernetes.io\x7esecret-grafana\x2dconfig.mount
           │ ├─var-lib-kubelet-pods-7cd2ae11\x2d942e\x2d42e8\x2d9ac7\x2dca02a995c553-volumes-kubernetes.io\x7esecret-client\x2dcredential\x2dvolume.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-bd7b68359e81ea9f2992b99fa251dc23bca03911465b8cfcea15dc22ce0a6612-rootfs.mount
           │ ├─k8s-keygen.service
           │ ├─var-lib-kubelet-pods-92ba2f19\x2dec80\x2d4925\x2db13d\x2d1f0cb995aa75-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d8pr7k.mount
           │ ├─run-netns-cni\x2d29b4dec7\x2d9eb9\x2dd78d\x2d88fd\x2dc707faa3af4c.mount
           │ ├─hardlockup_panic.service
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f-shm.mount
           │ ├─var-lib-pgdb-pgdata.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0-shm.mount
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927-shm.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-8fbeae4f599c70001a79ff8190e4d36b6208c699e369b374812b39107c435fab-rootfs.mount
           │ ├─dektecfw.service
           │ ├─var-lib-kubelet-pods-2c8d80ef\x2db1d9\x2d4b20\x2d9118\x2d2100b74d63b4-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dhnxmm.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-ade64f14dfe395e14760ca207d41149a049d31cb2ec606827ff5a63c2670a816-rootfs.mount
           │ ├─var-lib-kubelet-pods-d0541ca5\x2dc841\x2d48eb\x2d9fd5\x2dc66f0f4710a9-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d8pn77.mount
           │ ├─var-lib-persistent.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-eb0550778838139057fd93a2269c6ce7f78e4684614f299360ca096e1c538630-rootfs.mount
           │ ├─nis-domainname.service
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-281948bceabba08c70eb15e4c693c86d3a1d4f4e96e38b8cf2590c39f52e9bb6-rootfs.mount
           │ ├─var-lib-kubelet-pods-03911f12\x2d9dd7\x2d491a\x2db444\x2da79c18d5bc39-volume\x2dsubpaths-pgdb\x2ddata-pgdb-0.mount
           │ ├─dev-hugepages.mount
           │ ├─mnt-sda7-clean.service
           │ ├─dbus.service
           │ │ └─4249 /usr/bin/dbus-daemon --system --address=systemd: --nofork --nopidfile --systemd-activation --syslog-only
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf-rootfs.mount
           │ ├─var-lib-kubelet-pods-24a751e0\x2d91a9\x2d42bb\x2d9ad0\x2d6d401ff080a7-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dkc6qm.mount
           │ ├─iloauth.service
           │ ├─var-lib-zookeeper.mount
           │ ├─dracut-shutdown.service
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f-rootfs.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-204e2017be70a182f8109ac13fcc46cc8373a95064050894e81e2a102817d84b-rootfs.mount
           │ ├─NetworkManager-wait-online.service
           │ ├─system-getty.slice
           │ │ ├─getty@tty1.service
           │ │ │ └─30411 /sbin/agetty -o -p -- \u --noclear tty1 linux
           │ │ ├─getty@tty3.service
           │ │ │ └─4103 /sbin/agetty -o -p -- \u --issue-file /etc/issue.early --noclear tty3 linux
           │ │ └─getty@tty4.service
           │ │   └─4117 /sbin/agetty -o -p -- \u --issue-file /etc/issue.early --noclear tty4 linux
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8-rootfs.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed-rootfs.mount
           │ ├─nmipoller_ilostatus.service
           │ │ └─8852 python3.11 /opt/omneon/nmi/nmipollers/src/ilostatus/main.py
           │ ├─run-containerd-io.containerd.grpc.v1.cri-sandboxes-8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f-shm.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-295e2808e5e27f8c47f04bab1615d4a6cc6d426d0019d6298316c10262abd4a5-rootfs.mount
           │ ├─turbostatlog.service
           │ │ └─4190 /bin/turbostat -i 3600
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-213f9d22cbd9897c1da8e5967af955d56e30f79c43d2fd15acb7e487c9d3c537-rootfs.mount
           │ ├─systemd-user-sessions.service
           │ ├─var-lib-cni.mount
           │ ├─dm-event.socket
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-0549d98179a918e17cf119b8b8e147bc0ed93b3a2fe6800534943bc5cade26ec-rootfs.mount
           │ ├─var-lib-kubelet-pods-29ecd84d\x2d4896\x2d4ebc\x2da838\x2d464df976b54b-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d57mxd.mount
           │ ├─var-lib-kubelet-pods-5c63f719\x2d4775\x2d4dfe\x2d970f\x2d71ff10f56311-volumes-kubernetes.io\x7eempty\x2ddir-tmp\x2dplugins.mount
           │ ├─ksoftirq-rt.service
           │ ├─perfstatserver.service
           │ │ ├─ 8798 /bin/java -XX:+UseSerialGC -Xms32M -Xmx512M -jar PerfStatsServer.jar
           │ │ ├─10641 /usr/sbin/arping -c 10 -D -I net1 10.10.106.144
           │ │ ├─11023 /usr/sbin/arping -c 10 -D -I net10 100.8.248.10
           │ │ └─11099 /usr/sbin/arping -c 10 -D -I net9 100.8.248.16
           │ ├─smartd.service
           │ │ └─4240 /usr/sbin/smartd -n -q never
           │ ├─mnt-sda7.mount
           │ ├─systemd-logind.service
           │ │ └─4257 /usr/lib/systemd/systemd-logind
           │ ├─var-lib-dms_sw_download_files.mount
           │ ├─run-containerd-io.containerd.runtime.v2.task-k8s.io-82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076-rootfs.mount
           │ └─nmi.service
           │   ├─ 8858 /bin/java -Dorg.apache.commons.logging.Log=org.apache.commons.logging.impl.SimpleLog -Dorg.apache.commons.logging.simplelog.defaultlog=warn -Dorg.apache.commons.logging.simplelog.showdatetime=false -XX:+UseConcMarkSweepGC -Xms128M -Xmx1024M -jar NMIService.jar
           │   ├─ 9551 /bin/bash /opt/omneon/sbin/techdump.sh /corefiles/TechDump-XOSEncoder-01-20251118-1355.zip
           │   ├─ 9571 /bin/bash /opt/omneon/sbin/techdump.sh /corefiles/TechDump-XOSEncoder-01-20251118-1355.zip
           │   ├─ 9585 /bin/bash /etc/techdump.d/10platform.sh
           │   ├─11148 sleep 0.2
           │   └─11154 /bin/systemctl status --all
           └─k8s.io
             ├─7c297996517c2ebdd207bc733957e7ea43d63c8630dab617c28d6518db1f0282
             │ └─49662 java -Xmx512m -jar /opt/harmonic/scs/scs.jar zookeeper:2181 /vos-apps/simulcrypt/config /vos-apps/simulcrypt/output 1515
             ├─122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c
             │ └─13786 /pause
             ├─92e14fd007887603c04b8210955ad6a1815b1fc9e0ee10ec0d6d862974e6748f
             │ ├─ 44959 bash /opt/srmpc/start.sh java -Dtmd.port=32818 -Drmp.mallocArenaMax=32 -Dkubernetes.namespace=cluster1 -DLOG_SERVER=logstash -XX:NativeMemoryTracking=summary -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=128K -Xms64m -Xmx1024m -Xshare:auto -XX:MaxMetaspaceSize=512m -XX:MaxHeapSize=1024m -XX:TieredStopAtLevel=1 -XX:+ExitOnOutOfMemoryError -XX:MaxHeapFreeRatio=30  -XX:MinHeapFreeRatio=10 -Djava.security.properties=/opt/harmonic/StreamRmpControllerCmd/java.security -jar /opt/harmonic/StreamRmpControllerCmd/StreamRmpControllerCmd.jar sample_stream_processing_engine v1 stream_processing 44DE0751-40F2-4A65-AB19-290B7B8BABB0 zookeeper:2181 44DE0751-40F2-4A65-AB19-290B7B8BABB0-1 /opt/harmonic/vos/voshome -1
             │ ├─ 44983 /usr/bin/python3 -s /usr/bin/supervisord -c /tmp/supervisor.conf -n
             │ ├─ 45062 /usr/bin/python3 /opt/srmpc/srmpc-watchdog
             │ ├─ 45063 /usr/share/filebeat/bin/filebeat -path.home /usr/share/filebeat -path.config /etc/filebeat -path.data /var/lib/filebeat -path.logs /var/log/filebeat
             │ ├─ 45064 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/dolby_vision /graphics/dolby_vision
             │ ├─ 45065 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/fonts /graphics/fonts
             │ ├─ 45066 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/irdeto_license /opt/irdeto
             │ ├─ 45067 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/lut /graphics/lut
             │ ├─ 45073 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/sl_hdr_config /graphics/sl_hdr_config
             │ ├─ 45079 /opt/goofys/goofys -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data /opt/harmonic/vos/voshome/app_data
             │ ├─ 45090 /usr/local/bin/xinit /usr/local/etc/X11/xinitrc -- /usr/local/bin/Xorg -xkbdir /usr/local/share/X11/xkb -nolisten local -logverbose 0 vt7 -sharevts :0.0
             │ ├─ 45107 java -Dtmd.port=32818 -Drmp.mallocArenaMax=32 -Dkubernetes.namespace=cluster1 -DLOG_SERVER=logstash -XX:NativeMemoryTracking=summary -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=128K -Xms64m -Xmx1024m -Xshare:auto -XX:MaxMetaspaceSize=512m -XX:MaxHeapSize=1024m -XX:TieredStopAtLevel=1 -XX:+ExitOnOutOfMemoryError -XX:MaxHeapFreeRatio=30 -XX:MinHeapFreeRatio=10 -Djava.security.properties=/opt/harmonic/StreamRmpControllerCmd/java.security -jar /opt/harmonic/StreamRmpControllerCmd/StreamRmpControllerCmd.jar sample_stream_processing_engine v1 stream_processing 44DE0751-40F2-4A65-AB19-290B7B8BABB0 zookeeper:2181 44DE0751-40F2-4A65-AB19-290B7B8BABB0-1 /opt/harmonic/vos/voshome -1
             │ ├─ 45123 /usr/local/bin/Xorg :0 -xkbdir /usr/local/share/X11/xkb -nolisten local -logverbose 0 vt7 -sharevts :0.0
             │ ├─ 45458 sh /usr/local/etc/X11/xinitrc
             │ ├─ 45460 /usr/local/bin/xterm -g 90x50+0+0 -bg black -fg yellow -fn 10x20
             │ ├─ 45475 /usr/local/bin/dwm
             │ ├─ 46747 bash
             │ ├─ 46799 java -Xmx64m -Xshare:auto -XX:MaxMetaspaceSize=64m -XX:MaxHeapSize=64m -XX:TieredStopAtLevel=1 -Dlog_file_path=/var/log/rmp-controller-log -DLOG_SERVER=logstash -Dvos.home=/opt/harmonic/vos/voshome -jar /opt/harmonic/EsamAdapter/EsamAdapter.jar
             │ ├─ 47628 dbus-daemon --fork --config-file /etc/dbus-1/dbus-uvp-session.conf
             │ └─477745 /opt/harmonic/rmp/RmpWorker -Id=5f5663b2-8411-42a9-8237-a47063261a03 -KernelPath=/opt/harmonic/rmp/libRmpKernel.so -RpiPath=/opt/harmonic/rmp/rpi -rpiMessagePlugInPath=/opt/harmonic/rmp/rpiMessagePlugin -logPath=/var/log/rmp-controller-log/rmp-worker.log -velocimeterPath=/opt/harmonic/rmp/libRmpVelocimeter.so -rmpServiceId=sspe-44DE0751-40F2-4A65-AB19-290B7B8BABB0 -serverIP=127.0.0.1 -serverPort=40005 -oplanOutputPath=/var/log/rmp-controller-log/rmp-processing.opl -logMDCs=service_id:44DE0751-40F2-4A65-AB19-290B7B8BABB0 -enableRuntimeStateRestorer=true -enableRuntimeStateUpdater=true
             ├─82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076
             │ └─13925 /pause
             ├─68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f
             │ └─41311 /pause
             ├─fe3a782b6ddb59dc473fbf1bfad0404b1d5a2a9b06b8dc15cc7474ea935194c5
             │ └─48732 java -Xmx640m -XX:+UseG1GC -XX:NativeMemoryTracking=summary -jar /tmd/app.jar
             ├─b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4
             │ └─13772 /pause
             ├─ade64f14dfe395e14760ca207d41149a049d31cb2ec606827ff5a63c2670a816
             │ ├─26108 bash /usr/bin/runserver --mode=elasticsearch --es_http_port=9200 --es_cluster_http_port=9200 --es_transport_port=9300
             │ ├─26141 python3 /usr/bin/runserver.py --mode=elasticsearch --es_http_port=9200 --es_cluster_http_port=9200 --es_transport_port=9300
             │ ├─26252 su elasticsearch -c /opt/elasticsearch/bin/opensearch -Ehttp.port=9200 -Etransport.port=9300
             │ ├─26263 /opt/elasticsearch/jdk/bin/java -Xshare:auto -Dopensearch.networkaddress.cache.ttl=60 -Dopensearch.networkaddress.cache.negative.ttl=10 -XX:+AlwaysPreTouch -Xss1m -Djava.awt.headless=true -Dfile.encoding=UTF-8 -Djna.nosys=true -XX:-OmitStackTraceInFastThrow -Dio.netty.noUnsafe=true -Dio.netty.noKeySetOptimization=true -Dio.netty.recycler.maxCapacityPerThread=0 -Dio.netty.allocator.numDirectArenas=0 -Dlog4j.shutdownHookEnabled=false -Dlog4j2.disable.jmx=true -Djava.locale.providers=SPI,COMPAT -Xms1g -Xmx1g -XX:+UseG1GC -XX:G1ReservePercent=25 -XX:InitiatingHeapOccupancyPercent=30 -Djava.io.tmpdir=/tmp/opensearch-8757680722945077413 -XX:HeapDumpPath=data -XX:ErrorFile=logs/hs_err_pid%p.log -Xlog:gc*,gc+age=trace,safepoint:file=logs/gc.log:utctime,pid,tags:filecount=32,filesize=64m -XX:OnOutOfMemoryError=shutdown_elasticsearch -Des.allow_insecure_settings=true -Dlog4j2.formatMsgNoLookups=true -Xms1280m -Xmx1280m -XX:MaxDirectMemorySize=671088640 -Dopensearch.path.home=/opt/elasticsearch -Dopensearch.path.conf=/opt/elasticsearch/config -Dopensearch.distribution.type=tar -Dopensearch.bundled_jdk=true -cp /opt/elasticsearch/lib/* org.opensearch.bootstrap.OpenSearch -Ehttp.port=9200 -Etransport.port=9300
             │ └─28901 java -Xms10m -Xmx100m -XX:+ExitOnOutOfMemoryError -XX:+UseSerialGC -XX:MinHeapFreeRatio=10 -XX:MaxHeapFreeRatio=20 -Dmodes=elasticsearch -Delasticsearch.host=127.0.0.1 -Delasticsearch.http_port=9200 -Dzookeeper=zookeeper:2181 -jar /opt/harmonic/logstash-controller/LogstashController.jar
             ├─6633e7d49882ec93c836eb9e2e2b57e3ede0d2cbfa2be9b417cc1f339e7241f5
             │ ├─14737 bash -c /opt/harmonic/mediagrid/install_mg_fsd.sh & supervisord -c /opt/harmonic/supervisor/supervisord.conf
             │ ├─14825 /usr/bin/python3 -s /usr/bin/supervisord -c /opt/harmonic/supervisor/supervisord.conf
             │ ├─17043 /opt/harmonic/looprecord/looprecord_proxy
             │ ├─17044 nginx: master process nginx -c /opt/harmonic/nginx/nginx.conf
             │ ├─17056 nginx: worker process
             │ ├─17057 nginx: worker process
             │ ├─17058 nginx: worker process
             │ └─17059 nginx: worker process
             ├─00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213
             │ └─44888 /pause
             ├─fcac4959fbbd538807c041a643e8ae251eb6ca11af5c1415125b37871a5102e1
             │ └─15733 java -jar /opt/harmonic/xos/upgrade-rollback/xos_upgrade_rollback_worker.jar -Xms48m -Xmx96m -XX:+ExitOnOutOfMemoryError -XX:MaxMetaspaceSize=96m -XX:CompressedClassSpaceSize=48m
             ├─e77396127a5205ad7263b1715ab488deed253092cf27f541d1d3a85180edc5a9
             │ └─16398 grafana-server --homepath=/usr/share/grafana --config=/etc/grafana/grafana.ini --packaging=docker cfg:default.log.mode=console cfg:default.paths.data=/var/lib/grafana cfg:default.paths.logs=/var/log/grafana cfg:default.paths.plugins=/var/lib/grafana/plugins cfg:default.paths.provisioning=/etc/grafana/provisioning
             ├─21d714746405044fd80d02b52d0146c4a8e8ea066da93341094adb56168511e9
             │ ├─14715 nginx: master process nginx -g daemon off;
             │ ├─14943 nginx: worker process
             │ ├─14944 nginx: worker process
             │ ├─14945 nginx: worker process
             │ ├─14946 nginx: worker process
             │ ├─14947 nginx: worker process
             │ ├─14948 nginx: worker process
             │ ├─14949 nginx: worker process
             │ ├─14951 nginx: worker process
             │ ├─14954 nginx: worker process
             │ ├─14958 nginx: worker process
             │ ├─14960 nginx: worker process
             │ ├─14963 nginx: worker process
             │ ├─14964 nginx: worker process
             │ ├─14965 nginx: worker process
             │ ├─14966 nginx: worker process
             │ ├─14967 nginx: worker process
             │ ├─14968 nginx: worker process
             │ ├─14969 nginx: worker process
             │ ├─14970 nginx: worker process
             │ ├─14971 nginx: worker process
             │ ├─14972 nginx: worker process
             │ ├─14973 nginx: worker process
             │ ├─14974 nginx: worker process
             │ ├─14975 nginx: worker process
             │ ├─14976 nginx: worker process
             │ ├─14977 nginx: worker process
             │ ├─14978 nginx: worker process
             │ ├─14979 nginx: worker process
             │ ├─14980 nginx: worker process
             │ ├─14981 nginx: worker process
             │ ├─14982 nginx: worker process
             │ ├─14983 nginx: worker process
             │ ├─14984 nginx: worker process
             │ ├─14985 nginx: worker process
             │ ├─14986 nginx: worker process
             │ ├─14987 nginx: worker process
             │ ├─14988 nginx: worker process
             │ ├─14989 nginx: worker process
             │ ├─14990 nginx: worker process
             │ ├─14991 nginx: worker process
             │ ├─14992 nginx: worker process
             │ ├─14993 nginx: worker process
             │ ├─14994 nginx: worker process
             │ ├─14995 nginx: worker process
             │ ├─14996 nginx: worker process
             │ ├─14997 nginx: worker process
             │ ├─14998 nginx: worker process
             │ ├─14999 nginx: worker process
             │ ├─15000 nginx: worker process
             │ ├─15001 nginx: worker process
             │ ├─15002 nginx: worker process
             │ ├─15003 nginx: worker process
             │ ├─15004 nginx: worker process
             │ ├─15005 nginx: worker process
             │ ├─15006 nginx: worker process
             │ ├─15007 nginx: worker process
             │ ├─15008 nginx: worker process
             │ ├─15009 nginx: worker process
             │ ├─15010 nginx: worker process
             │ ├─15011 nginx: worker process
             │ ├─15012 nginx: worker process
             │ ├─15013 nginx: worker process
             │ ├─15014 nginx: worker process
             │ ├─15015 nginx: worker process
             │ ├─15016 nginx: worker process
             │ ├─15017 nginx: worker process
             │ ├─15018 nginx: worker process
             │ ├─15019 nginx: worker process
             │ ├─15020 nginx: worker process
             │ ├─15021 nginx: worker process
             │ ├─15022 nginx: worker process
             │ ├─15023 nginx: worker process
             │ ├─15024 nginx: worker process
             │ ├─15025 nginx: worker process
             │ ├─15026 nginx: worker process
             │ ├─15027 nginx: worker process
             │ ├─15028 nginx: worker process
             │ ├─15029 nginx: worker process
             │ ├─15030 nginx: worker process
             │ ├─15031 nginx: worker process
             │ ├─15032 nginx: worker process
             │ ├─15033 nginx: worker process
             │ ├─15034 nginx: worker process
             │ ├─15035 nginx: worker process
             │ ├─15036 nginx: worker process
             │ ├─15037 nginx: worker process
             │ ├─15038 nginx: worker process
             │ ├─15040 nginx: worker process
             │ ├─15041 nginx: worker process
             │ ├─15042 nginx: worker process
             │ ├─15043 nginx: worker process
             │ ├─15044 nginx: worker process
             │ ├─15045 nginx: worker process
             │ ├─15046 nginx: worker process
             │ ├─15047 nginx: worker process
             │ ├─15048 nginx: worker process
             │ ├─15049 nginx: worker process
             │ ├─15050 nginx: worker process
             │ ├─15051 nginx: worker process
             │ ├─15052 nginx: worker process
             │ ├─15053 nginx: worker process
             │ ├─15054 nginx: worker process
             │ ├─15055 nginx: worker process
             │ ├─15056 nginx: worker process
             │ ├─15057 nginx: worker process
             │ ├─15058 nginx: worker process
             │ ├─15059 nginx: worker process
             │ ├─15060 nginx: worker process
             │ ├─15061 nginx: worker process
             │ ├─15062 nginx: worker process
             │ ├─15063 nginx: worker process
             │ ├─15064 nginx: worker process
             │ ├─15065 nginx: worker process
             │ ├─15066 nginx: worker process
             │ ├─15067 nginx: worker process
             │ ├─15068 nginx: worker process
             │ ├─15069 nginx: worker process
             │ ├─15071 nginx: worker process
             │ ├─15072 nginx: worker process
             │ ├─15074 nginx: worker process
             │ ├─15075 nginx: worker process
             │ ├─15078 nginx: worker process
             │ ├─15082 nginx: worker process
             │ ├─15083 nginx: worker process
             │ ├─15084 nginx: worker process
             │ ├─15085 nginx: worker process
             │ ├─15086 nginx: worker process
             │ ├─15087 nginx: worker process
             │ ├─15088 nginx: worker process
             │ ├─15089 nginx: worker process
             │ ├─15090 nginx: worker process
             │ ├─15091 nginx: worker process
             │ ├─15092 nginx: worker process
             │ ├─15093 nginx: worker process
             │ ├─15094 nginx: worker process
             │ ├─15095 nginx: worker process
             │ ├─15096 nginx: worker process
             │ ├─15097 nginx: worker process
             │ ├─15098 nginx: worker process
             │ ├─15099 nginx: worker process
             │ ├─15100 nginx: worker process
             │ ├─15101 nginx: worker process
             │ ├─15102 nginx: worker process
             │ ├─15103 nginx: worker process
             │ ├─15104 nginx: worker process
             │ ├─15105 nginx: worker process
             │ ├─15106 nginx: worker process
             │ ├─15107 nginx: worker process
             │ ├─15108 nginx: worker process
             │ ├─15110 nginx: worker process
             │ ├─15111 nginx: worker process
             │ ├─15112 nginx: worker process
             │ ├─15116 nginx: worker process
             │ ├─15119 nginx: worker process
             │ ├─15120 nginx: worker process
             │ ├─15121 nginx: worker process
             │ ├─15122 nginx: worker process
             │ ├─15123 nginx: worker process
             │ ├─15124 nginx: worker process
             │ ├─15126 nginx: worker process
             │ ├─15127 nginx: worker process
             │ ├─15128 nginx: worker process
             │ ├─15129 nginx: worker process
             │ ├─15130 nginx: worker process
             │ ├─15131 nginx: worker process
             │ ├─15132 nginx: worker process
             │ ├─15133 nginx: worker process
             │ └─15134 nginx: worker process
             ├─9f25cd91eb885e3f94e2d800003dd991e969dd57e3c8fde23045761f19fffe62
             │ └─k8s.io
             │   └─9f25cd91eb885e3f94e2d800003dd991e969dd57e3c8fde23045761f19fffe62
             │     ├─14675 /sbin/init
             │     ├─user.slice
             │     │ └─user-1000.slice
             │     │   └─session-1.scope
             │     │     ├─17071 lightdm --session-child 12 15
             │     │     ├─17077 /usr/bin/openbox --startup /usr/libexec/openbox-autostart OPENBOX
             │     │     ├─17105 dbus-launch --sh-syntax --exit-with-session
             │     │     ├─17106 /usr/bin/dbus-daemon --fork --print-pid 5 --print-address 7 --session
             │     │     ├─17114 /usr/bin/ssh-agent /bin/sh -c exec -l /bin/bash -c "/usr/bin/openbox-session"
             │     │     ├─17134 /usr/lib64/firefox/firefox
             │     │     ├─17765 /usr/libexec/at-spi-bus-launcher
             │     │     ├─17776 /usr/lib64/firefox/firefox -contentproc -parentBuildID 20240618122702 -prefsLen 24777 -prefMapSize 243868 -appDir /usr/lib64/firefox/browser {f2a25019-b106-4fa8-9f8c-74c61c8d41a3} 112 socket
             │     │     ├─17819 /usr/lib64/firefox/firefox -contentproc -childID 1 -isForBrowser -prefsLen 24839 -prefMapSize 243868 -jsInitLen 240916 -parentBuildID 20240618122702 -appDir /usr/lib64/firefox/browser {554d6d1b-9e6b-4731-93ff-d8e57eaf2ab6} 112 tab
             │     │     ├─17849 /usr/lib64/firefox/firefox -contentproc -childID 2 -isForBrowser -prefsLen 23057 -prefMapSize 243868 -jsInitLen 240916 -parentBuildID 20240618122702 -appDir /usr/lib64/firefox/browser {f81b6a31-5ab6-4607-96bf-930b69e24be1} 112 tab
             │     │     ├─23752 /usr/lib64/firefox/firefox -contentproc -childID 3 -isForBrowser -prefsLen 31342 -prefMapSize 243868 -jsInitLen 240916 -parentBuildID 20240618122702 -appDir /usr/lib64/firefox/browser {a9a73e53-3c64-495a-8373-e854a17cc2b5} 112 tab
             │     │     ├─23806 /usr/lib64/firefox/firefox -contentproc -childID 4 -isForBrowser -prefsLen 28344 -prefMapSize 243868 -jsInitLen 240916 -parentBuildID 20240618122702 -appDir /usr/lib64/firefox/browser {0d3f346d-be3e-4b08-ba23-ecfc34e78412} 112 tab
             │     │     ├─23813 /usr/lib64/firefox/firefox -contentproc -childID 5 -isForBrowser -prefsLen 28344 -prefMapSize 243868 -jsInitLen 240916 -parentBuildID 20240618122702 -appDir /usr/lib64/firefox/browser {7643dbaf-7ffa-441a-9dcb-911454c93f2f} 112 tab
             │     │     └─23846 /usr/lib64/firefox/firefox -contentproc -childID 6 -isForBrowser -prefsLen 28344 -prefMapSize 243868 -jsInitLen 240916 -parentBuildID 20240618122702 -appDir /usr/lib64/firefox/browser {dceb146d-9a5e-4d9f-806d-8f0461584ce7} 112 tab
             │     └─system.slice
             │       ├─systemd-update-utmp.service
             │       ├─systemd-vconsole-setup.service
             │       ├─systemd-journal-flush.service
             │       ├─systemd-sysctl.service
             │       ├─etc-remote\x2daccess\x2dsecret.mount
             │       ├─run-secrets-kubernetes.io-serviceaccount.mount
             │       ├─sys-kernel-config.mount
             │       ├─polkit.service
             │       │ └─16996 /usr/lib/polkit-1/polkitd --no-debug
             │       ├─systemd-hwdb-update.service
             │       ├─systemd-update-done.service
             │       ├─sys-kernel-debug.mount
             │       ├─accounts-daemon.service
             │       │ └─16992 /usr/libexec/accounts-daemon
             │       ├─systemd-tmpfiles-setup.service
             │       ├─-.mount
             │       ├─lightdm.service
             │       │ ├─16928 /usr/sbin/lightdm
             │       │ └─17017 Xvfb :0 -screen 0 1440x900x16
             │       ├─etc-hosts.mount
             │       ├─setup-jump-client.service
             │       │ └─17018 node /usr/local/bin/setup_jump_client.js
             │       ├─systemd-journald.service
             │       │ └─15640 /usr/lib/systemd/systemd-journald
             │       ├─home-devops-.ssh.mount
             │       ├─dev-mqueue.mount
             │       ├─systemd-journal-catalog-update.service
             │       ├─systemd-random-seed.service
             │       ├─etc-resolv.conf.mount
             │       ├─systemd-tmpfiles-setup-dev.service
             │       ├─run-user-1000.mount
             │       ├─etc-hostname.mount
             │       ├─dev-termination\x2dlog.mount
             │       ├─systemd-readahead-collect.service
             │       ├─dev-hugepages.mount
             │       ├─dbus.service
             │       │ └─16676 /usr/bin/dbus-daemon --system --address=systemd: --nofork --nopidfile --systemd-activation
             │       ├─system-getty.slice
             │       ├─systemd-user-sessions.service
             │       └─systemd-logind.service
             │         └─16908 /usr/lib/systemd/systemd-logind
             ├─7e4e0613f3bdaa4e0315cfb163224a53ba0daec763fd26f1803fb7c0fe0b0c9a
             │ └─14478 /metrics-sidecar
             ├─0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7
             │ └─24571 /pause
             ├─bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2
             │ └─14224 /pause
             ├─bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d
             │ └─30398 /pause
             ├─103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450
             │ └─42625 /pause
             ├─722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687
             │ └─40454 /pause
             ├─295e2808e5e27f8c47f04bab1615d4a6cc6d426d0019d6298316c10262abd4a5
             │ ├─40572 java -Xmx512m -Dlog.server=logstash -Dlogging.config=/opt/harmonic/fileutils/log4j2.xml -XX:+ExitOnOutOfMemoryError -jar /opt/harmonic/fileutils/fileutils-worker.jar
             │ └─40643 /sbin/rpcbind
             ├─83cbb97e4fb7c9801bb5d1334c503eb1231e1f7903d8ed8f73c60a62819d0bba
             │ └─14798 /bin/postgres_exporter --web.listen-address=0.0.0.0:9187 --extend.query-path /additional/custom-query.yaml
             ├─80162971b81ee8611d86c3f717d3087bd3b3f9ccb2ceb918d89e0bbc087e8a45
             │ └─16920 /bin/operator --kubelet-service=kube-system/kubelet --prometheus-config-reloader=quay.io/prometheus-operator/prometheus-config-reloader:v0.57.0
             ├─e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8
             │ └─14628 /pause
             ├─e6c48aed1f74ce74c92c7fa0d44b00eb2cec24c30cb75bd0e44cc002979144d2
             │ └─14629 java -Xmx512m -jar /opt/harmonic/emmg_server/emmg_server.jar zookeeper:2181 /vos-apps/emmg-server/v1/config 4971 1516
             ├─eb0550778838139057fd93a2269c6ce7f78e4684614f299360ca096e1c538630
             │ ├─44684 bash /opt/srmpc/start.sh java -Dtmd.port=32791 -Drmp.mallocArenaMax=32 -Dkubernetes.namespace=cluster1 -DLOG_SERVER=logstash -XX:NativeMemoryTracking=summary -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=128K -Xms64m -Xmx1024m -Xshare:auto -XX:MaxMetaspaceSize=512m -XX:MaxHeapSize=1024m -XX:TieredStopAtLevel=1 -XX:+ExitOnOutOfMemoryError -XX:MaxHeapFreeRatio=30  -XX:MinHeapFreeRatio=10 -Djava.security.properties=/opt/harmonic/StreamRmpControllerCmd/java.security -jar /opt/harmonic/StreamRmpControllerCmd/StreamRmpControllerCmd.jar sample_stream_processing_engine v1 stream_processing E7395E3B-B18C-442C-A045-E1CECED0696B zookeeper:2181 E7395E3B-B18C-442C-A045-E1CECED0696B-1 /opt/harmonic/vos/voshome -1
             │ ├─44708 /usr/bin/python3 -s /usr/bin/supervisord -c /tmp/supervisor.conf -n
             │ ├─44719 /usr/bin/python3 /opt/srmpc/srmpc-watchdog
             │ ├─44720 /usr/share/filebeat/bin/filebeat -path.home /usr/share/filebeat -path.config /etc/filebeat -path.data /var/lib/filebeat -path.logs /var/log/filebeat
             │ ├─44721 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/dolby_vision /graphics/dolby_vision
             │ ├─44722 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/fonts /graphics/fonts
             │ ├─44723 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/irdeto_license /opt/irdeto
             │ ├─44724 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/lut /graphics/lut
             │ ├─44730 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/sl_hdr_config /graphics/sl_hdr_config
             │ ├─44736 /opt/goofys/goofys -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data /opt/harmonic/vos/voshome/app_data
             │ ├─44747 /usr/local/bin/xinit /usr/local/etc/X11/xinitrc -- /usr/local/bin/Xorg -xkbdir /usr/local/share/X11/xkb -nolisten local -logverbose 0 vt7 -sharevts :0.0
             │ ├─44764 java -Dtmd.port=32791 -Drmp.mallocArenaMax=32 -Dkubernetes.namespace=cluster1 -DLOG_SERVER=logstash -XX:NativeMemoryTracking=summary -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=128K -Xms64m -Xmx1024m -Xshare:auto -XX:MaxMetaspaceSize=512m -XX:MaxHeapSize=1024m -XX:TieredStopAtLevel=1 -XX:+ExitOnOutOfMemoryError -XX:MaxHeapFreeRatio=30 -XX:MinHeapFreeRatio=10 -Djava.security.properties=/opt/harmonic/StreamRmpControllerCmd/java.security -jar /opt/harmonic/StreamRmpControllerCmd/StreamRmpControllerCmd.jar sample_stream_processing_engine v1 stream_processing E7395E3B-B18C-442C-A045-E1CECED0696B zookeeper:2181 E7395E3B-B18C-442C-A045-E1CECED0696B-1 /opt/harmonic/vos/voshome -1
             │ ├─44841 /usr/local/bin/Xorg :0 -xkbdir /usr/local/share/X11/xkb -nolisten local -logverbose 0 vt7 -sharevts :0.0
             │ ├─45443 sh /usr/local/etc/X11/xinitrc
             │ ├─45445 /usr/local/bin/xterm -g 90x50+0+0 -bg black -fg yellow -fn 10x20
             │ ├─45473 /usr/local/bin/dwm
             │ ├─46745 bash
             │ ├─46787 java -Xmx64m -Xshare:auto -XX:MaxMetaspaceSize=64m -XX:MaxHeapSize=64m -XX:TieredStopAtLevel=1 -Dlog_file_path=/var/log/rmp-controller-log -DLOG_SERVER=logstash -Dvos.home=/opt/harmonic/vos/voshome -jar /opt/harmonic/EsamAdapter/EsamAdapter.jar
             │ ├─47331 /opt/harmonic/rmp/RmpWorker -Id=e62c4674-384e-4326-b5ad-bbb6d9f0733f -KernelPath=/opt/harmonic/rmp/libRmpKernel.so -RpiPath=/opt/harmonic/rmp/rpi -rpiMessagePlugInPath=/opt/harmonic/rmp/rpiMessagePlugin -logPath=/var/log/rmp-controller-log/rmp-worker.log -velocimeterPath=/opt/harmonic/rmp/libRmpVelocimeter.so -rmpServiceId=sspe-E7395E3B-B18C-442C-A045-E1CECED0696B -serverIP=127.0.0.1 -serverPort=40003 -oplanOutputPath=/var/log/rmp-controller-log/rmp-processing.opl -logMDCs=service_id:E7395E3B-B18C-442C-A045-E1CECED0696B -enableRuntimeStateRestorer=true -enableRuntimeStateUpdater=true
             │ └─47625 dbus-daemon --fork --config-file /etc/dbus-1/dbus-uvp-session.conf
             ├─945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27
             │ └─15641 /pause
             ├─7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82
             │ └─25640 /pause
             ├─d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa
             │ └─14139 /pause
             ├─c8bba1ece9eddd59495885e39f255ad4f9769ae89b1b7fc3903cce8d3c7f7916
             │ ├─42904 /usr/bin/python3 -s /usr/bin/supervisord -c /supervisord.conf
             │ ├─43444 python3 /opt/harmonic/unified-origin-engine/bin/supervisord-event-handler
             │ ├─43448 java -Xms50m -Xmx512m -XX:+ExitOnOutOfMemoryError -XX:+UseConcMarkSweepGC -Xloggc:/var/log/gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=3 -XX:GCLogFileSize=1M -Dorg.glassfish.grizzly.nio.transport.TCPNIOTransport.max-receive-buffer-size=1048576 -classpath /opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/remote-daemon-executor.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/potf-server-config-lib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/KMSClientLib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/cpixlib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kms-soap-stub.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jaxb-impl-2.2.5.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-logging-1.2.1.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/xmlsec-1.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-codec-1.14.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-apache-connector-2.5.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/httpcore-4.3.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/httpclient-4.3.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/guava-14.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-dataformat-xml-2.10.3.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/stax2-api-4.2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/RmpControllerSDK.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/unified-origin-engine-library.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/StreamRmpControllerCmd.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/rmp-controller-models.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-math3-3.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/dnsjava-2.1.8.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-container-grizzly2-http-2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-http-server-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-container-jdk-http-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-core-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-runtime-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-json-provider-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-media-json-jackson-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/velocity-1.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/joda-time-2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-validator-1.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-slf4j-impl-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zookeeper-3.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/cron-utils-9.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-client-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-httpclient-okhttp-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-client-api-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-recipes-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-framework-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-client-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-gatewayapi-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-resource-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-rbac-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-admissionregistration-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-apps-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-autoscaling-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-apiextensions-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-batch-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-certificates-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-coordination-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-discovery-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-events-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-extensions-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-flowcontrol-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-networking-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-metrics-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-policy-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-scheduling-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-storageclass-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-node-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-core-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-common-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/slf4j-api-1.7.36.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-1.2-api-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_servlet-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_servlet_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-server-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-client-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-common-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.ws.rs-api-2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-layout-template-json-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/asset-mgmt-grpc-library.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-configuration-1.10.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-lang-2.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/gson-2.2.4.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-io-2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-module-jaxb-annotations-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-base-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zjsonpatch-0.3.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-dataformat-yaml-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-datatype-jsr310-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-databind-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-annotations-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/property-binder-4.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-http-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-locator-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.inject-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-collectionschema-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-api-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-xc-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-mapper-asl-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-core-asl-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-beanutils-1.9.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-collections-3.2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-digester-1.8.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-logging-1.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-core-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-api-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_otel-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_otel_agent-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zookeeper-jute-3.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/audience-annotations-0.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-handler-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-native-epoll-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-core-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.el-3.0.4.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-framework-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.annotation-api-1.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-guava-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-api-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/osgi-resource-locator-1.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/validation-api-1.1.0.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.xml.bind-api-2.3.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.activation-api-1.2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/snakeyaml-engine-2.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/logging-interceptor-3.12.12.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/okhttp-3.12.12.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-codec-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-native-unix-common-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-buffer-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-resolver-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-common-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-utils-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/aopalliance-repackaged-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javassist-3.18.1-GA.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/snakeyaml-1.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/okio-1.15.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/guava-27.0.1-jre.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.inject-1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/failureaccess-1.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jsr305-3.0.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/checker-qual-2.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/error_prone_annotations-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/j2objc-annotations-1.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/animal-sniffer-annotations-1.17.jar com.harmonicinc.remotedaemonexecutor.RemoteDaemonExecutor asset-operator MEDIAGRID backupStorageIsNotConfigured
             │ ├─43451 /usr/share/filebeat/bin/filebeat --path.home /usr/share/filebeat --path.config /etc/filebeat --path.data /var/lib/filebeat --path.logs /var/log/filebeat -E output.logstash.hosts=['logstash:5044']
             │ └─44380 /opt/harmonic/MediaStreamPackageController/AssetManagementProxy --grpc-port 20208
             ├─6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53
             │ └─48584 /pause
             ├─bf7cf933a8910b9fb80cce11f576b2ce0282a4a972e5b0001806935cd7e4a995
             │ └─40571 java -Xmx512m -Dlog.server=logstash -Dlogging.config=/opt/harmonic/mediautils/log4j2.xml -XX:+ExitOnOutOfMemoryError -jar /opt/harmonic/mediautils/mediautils-worker.jar
             ├─9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad
             │ └─13776 /pause
             ├─ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982
             │ └─42632 /pause
             ├─aa5225d693f714f7e75445136b3b211b51dddf9ae10e9b6533a4b762901249dd
             │ ├─30938 /bin/bash /opt/harmonic/vos/bin/vos --httpPort=80 --httpsPort=443 --stopPort=-1 --assetStorageEnable=True --assetStorageType=mediagrid --assetStorageName=local-asset-storage --keystore=/opt/harmonic/vos/flex_keystore --keystore-pw=Vk9TaGFybW9uaWNGTEVY --truststore=/opt/harmonic/vos/flex_truststore --truststore-pw=Vk9TaGFybW9uaWNGTEVY
             │ ├─30981 /usr/bin/python3 -s /usr/bin/supervisord -c supervisor.conf -n
             │ ├─31042 /opt/goofys/goofys -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1 /opt/harmonic/vos/voshome/
             │ ├─31044 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 5s --type-cache-ttl 5s --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/com.harmonicinc.vos.sspe/ /opt/harmonic/vos/voshome/app_data/com.harmonicinc.vos.sspe
             │ └─38063 /usr/bin/java -Dvos.use.k8s=true -Dcluster.multizone=False -Dvos.appbundles.bundledir=/opt/harmonic/vos/appbundleDir -Dvos.jdbc.url=jdbc:postgresql://pgdb:5432/vos -XX:+UseG1GC -Xmx5120m -Xss16m -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=9010 -Dcom.sun.management.jmxremote.rmi.port=9010 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.local.only=false -Dcom.sun.management.jmxremote.ssl=false -Djava.net.preferIPv4Stack=true -Djava.rmi.server.hostname=127.0.0.1 -Djdk.tls.ephemeralDHKeySize=2048 -XX:OnOutOfMemoryError=pkill -f -9 java -Dvos.log4j.configuration=file:/opt/harmonic/vos/log4j2.xml -Dvos.configuration=/opt/harmonic/vos/vos.properties -Dvos.appbundles.srcdir=/opt/harmonic/vos/app_bundles -Dvos.systemapps.dir=/opt/harmonic/vos/system_apps -jar /opt/harmonic/vos/vos.war --voshome=/opt/harmonic/vos/voshome --httpPort=80 --httpsPort=443 --stopPort=-1 --keystore=/opt/harmonic/vos/flex_keystore --keystore-pw=Vk9TaGFybW9uaWNGTEVY --truststore=/opt/harmonic/vos/flex_truststore --truststore-pw=Vk9TaGFybW9uaWNGTEVY
             ├─7149e251631bd2084f6135e3cb5b75287790d2f14422004e79a3c6f962e189b6
             │ ├─30480 bash /usr/bin/runserver --mode=kibana --kibana4_port=5601 --es_cluster_http_port=9200 --always_reconf_kibana
             │ ├─30573 python3 /usr/bin/runserver.py --mode=kibana --kibana4_port=5601 --es_cluster_http_port=9200 --always_reconf_kibana
             │ ├─30984 su kibana -c NODE_OPTIONS=--max-old-space-size=512 nohup /opt/kibana/bin/opensearch-dashboards -p 5601 -e http://elasticsearch:9200
             │ ├─30989 /opt/kibana/bin/../node/bin/node /opt/kibana/bin/../src/cli/dist -p 5601 -e http://elasticsearch:9200
             │ ├─30993 nginx: master process nginx
             │ ├─30994 nginx: worker process
             │ ├─30995 nginx: worker process
             │ ├─30996 nginx: worker process
             │ ├─30997 nginx: worker process
             │ └─31002 java -Xms10m -Xmx100m -XX:+ExitOnOutOfMemoryError -XX:+UseSerialGC -XX:MinHeapFreeRatio=10 -XX:MaxHeapFreeRatio=20 -Dmodes=kibana -Delasticsearch.host=elasticsearch -Delasticsearch.http_port=9200 -Dzookeeper=zookeeper:2181 -jar /opt/harmonic/logstash-controller/LogstashController.jar
             ├─f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea
             │ └─13765 /pause
             ├─77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0
             │ └─25753 /pause
             ├─1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917
             │ └─30399 /pause
             ├─d8b576a7151077a817ed7ccda23150d12c06cef14dff0d46cf1ddb8384a2c8b2
             │ ├─14313 /bin/bash /opt/omneon/sbin/run_ecd_and_wait
             │ └─14824 /opt/omneon/bin/execCntld
             ├─4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c
             │ └─16163 /pause
             ├─281948bceabba08c70eb15e4c693c86d3a1d4f4e96e38b8cf2590c39f52e9bb6
             │ ├─   692 postgres: v1: vos vos 198.51.100.20(52122) idle
             │ ├─  1036 postgres: v1: vos vos 198.51.100.20(34018) idle
             │ ├─  1046 postgres: v1: vos vos 198.51.100.20(34028) idle
             │ ├─  1438 postgres: v1: vos vos 198.51.100.20(56684) idle
             │ ├─  1578 postgres: v1: vos vos 198.51.100.21(52666) idle
             │ ├─  1642 postgres: v1: vos vos 198.51.100.20(58974) idle
             │ ├─  1672 postgres: v1: vos vos 198.51.100.21(42432) idle
             │ ├─  1685 postgres: v1: vos vos 198.51.100.20(58988) idle
             │ ├─  1723 postgres: v1: vos vos 198.51.100.21(42444) idle
             │ ├─  1880 postgres: v1: vos vos 198.51.100.20(58992) idle
             │ ├─  1977 postgres: v1: vos vos 198.51.100.20(58996) idle
             │ ├─  2005 postgres: v1: vos vos 198.51.100.20(35580) idle
             │ ├─  2182 postgres: v1: vos vos 198.51.100.22(37330) idle
             │ ├─  2436 postgres: v1: vos vos 198.51.100.22(37338) idle
             │ ├─  2484 postgres: v1: vos vos 198.51.100.20(53734) idle
             │ ├─  2580 postgres: v1: vos vos 198.51.100.21(49102) idle
             │ ├─  2736 postgres: v1: vos vos 198.51.100.22(51102) idle
             │ ├─  2742 postgres: v1: vos vos 198.51.100.20(53742) idle
             │ ├─  2743 postgres: v1: vos vos 198.51.100.22(51110) idle
             │ ├─  3006 postgres: v1: vos vos 198.51.100.20(52000) idle
             │ ├─  3101 postgres: v1: vos vos 198.51.100.20(40512) idle
             │ ├─  3126 postgres: v1: vos vos 198.51.100.22(47130) idle
             │ ├─  3551 postgres: v1: vos vos 198.51.100.21(35220) idle
             │ ├─  3703 postgres: v1: vos vos 198.51.100.20(45060) idle
             │ ├─  4085 postgres: v1: vos vos 198.51.100.22(32788) idle
             │ ├─  4160 postgres: v1: vos vos 198.51.100.21(48314) idle
             │ ├─  4412 postgres: v1: vos vos 198.51.100.20(44592) idle
             │ ├─  4607 postgres: v1: vos vos 198.51.100.22(54258) idle
             │ ├─  4888 postgres: v1: vos vos 198.51.100.20(50490) idle
             │ ├─  5444 postgres: v1: vos vos 198.51.100.20(38082) idle
             │ ├─  5629 postgres: v1: vos vos 198.51.100.22(44254) idle
             │ ├─  6971 postgres: v1: vos vos 198.51.100.20(41902) idle
             │ ├─  7812 postgres: v1: vos vos 198.51.100.20(56802) idle
             │ ├─  7813 postgres: v1: vos vos 198.51.100.20(56806) idle
             │ ├─  7814 postgres: v1: vos vos 198.51.100.20(56816) idle
             │ ├─  7815 postgres: v1: vos vos 198.51.100.20(56832) idle
             │ ├─  7855 sleep 60
             │ ├─  7856 sleep 60
             │ ├─ 10730 sleep 10
             │ ├─ 10731 sleep 10
             │ ├─ 25928 /usr/bin/dumb-init -c --rewrite 1:0 -- /bin/sh /launch.sh
             │ ├─ 25960 /bin/sh /launch.sh
             │ ├─ 26031 /usr/bin/runsvdir -P /etc/service
             │ ├─ 26156 runsv cron
             │ ├─ 26157 runsv patroni
             │ ├─ 26158 runsv backup_restore
             │ ├─ 26159 runsv cluster_manage
             │ ├─ 26160 runsv filebeat
             │ ├─ 26161 runsv init_db_for_vos
             │ ├─ 26162 runsv pgqd
             │ ├─ 26163 runsv replica_monitor
             │ ├─ 26164 svlogd -tt /var/log/cron
             │ ├─ 26165 svlogd -tt /var/log/backup_restore
             │ ├─ 26166 svlogd -tt /var/log/init_db_for_vos
             │ ├─ 26167 svlogd -tt /var/log/patroni
             │ ├─ 26168 svlogd -tt /var/log/cluster_manage
             │ ├─ 26169 svlogd -tt /var/log/replica_monitor
             │ ├─ 26170 svlogd -tt /var/log/pgqd
             │ ├─ 26171 svlogd -tt /var/log/filebeat
             │ ├─ 26172 /usr/sbin/cron -f
             │ ├─ 26173 python3 /usr/local/bin/init_db_for_vos.py --sleep
             │ ├─ 26174 /usr/bin/python3 /usr/local/bin/patroni
             │ ├─ 26175 /bin/bash /var/lib/database_middleware/rest_service/backup_restore/start.sh
             │ ├─ 26176 /bin/bash /var/lib/database_middleware/rest_service/cluster_manage/start.sh
             │ ├─ 26177 /usr/bin/pgqd /home/postgres/pgq_ticker.ini
             │ ├─ 26178 /bin/sh -e ./run
             │ ├─ 26179 ./filebeat -e --strict.perms=false
             │ ├─ 26185 sleep infinity
             │ ├─ 26207 /bin/bash /var/lib/database_middleware/rest_service/cluster_manage/start.sh
             │ ├─ 26208 /bin/bash /var/lib/database_middleware/rest_service/backup_restore/start.sh
             │ ├─ 26227 python3 -u /var/lib/database_middleware/rest_service/cluster_manage/DatabaseMiddlewareClusterManageService.py 0.0.0.0 5440
             │ ├─ 26228 python3 -u /var/lib/database_middleware/rest_service/backup_restore/DatabaseMiddlewareBackupRestoreService.py 0.0.0.0 5438
             │ ├─ 26474 postgres -D /home/postgres/pgroot/pgdata/standalone --config-file=/home/postgres/pgroot/pgdata/standalone/postgresql.conf --listen_addresses=198.51.100.16 --port=5432 --cluster_name=v1 --wal_level=replica --hot_standby=on --max_connections=200 --max_wal_senders=10 --max_prepared_transactions=0 --max_locks_per_transaction=64 --track_commit_timestamp=off --max_replication_slots=10 --max_worker_processes=8 --wal_log_hints=on
             │ ├─ 26476 postgres: v1: logger process   
             │ ├─ 26478 postgres: v1: bgworker: bg_mon   
             │ ├─ 26481 postgres: v1: checkpointer process   
             │ ├─ 26482 postgres: v1: writer process   
             │ ├─ 26483 postgres: v1: stats collector process   
             │ ├─ 26487 postgres: v1: postgres postgres [local] idle
             │ ├─ 26504 postgres: v1: wal writer process   
             │ ├─ 26505 postgres: v1: autovacuum launcher process   
             │ ├─ 26506 postgres: v1: archiver process   last was 000000100000000F00000059
             │ ├─ 26507 postgres: v1: bgworker: pg_cron launcher   
             │ ├─ 26508 postgres: v1: bgworker: TimescaleDB Background Worker Launcher   
             │ ├─ 26510 postgres: v1: bgworker: logical replication launcher   
             │ ├─ 26524 tail -f /var/log/backup_restore/current /var/log/cluster_manage/current /var/log/cron/current /var/log/filebeat/current /var/log/init_db_for_vos/current /var/log/patroni/current /var/log/pgqd/current /var/log/replica_monitor/current
             │ ├─ 38190 postgres: v1: vos vos 198.51.100.9(38916) idle
             │ ├─ 38191 postgres: v1: vos vos 198.51.100.9(38930) idle
             │ ├─ 47500 postgres: v1: vos vos 198.51.100.1(63158) idle
             │ ├─ 47525 postgres: v1: vos vos 198.51.100.1(5896) idle
             │ ├─ 47526 postgres: v1: vos vos 198.51.100.1(6944) idle
             │ ├─ 47528 postgres: v1: vos vos 198.51.100.1(4058) idle
             │ ├─ 49820 postgres: v1: vos vos 198.51.100.28(56246) idle
             │ ├─ 49821 postgres: v1: vos vos 198.51.100.28(56248) idle
             │ ├─ 49823 postgres: v1: vos vos 198.51.100.28(56260) idle
             │ ├─ 49825 postgres: v1: vos vos 198.51.100.28(56262) idle
             │ ├─ 49826 postgres: v1: vos vos 198.51.100.28(56264) idle
             │ ├─ 49827 postgres: v1: vos vos 198.51.100.28(56272) idle
             │ ├─ 49828 postgres: v1: vos vos 198.51.100.28(56282) idle
             │ ├─ 49829 postgres: v1: vos vos 198.51.100.28(56290) idle
             │ ├─ 49830 postgres: v1: vos vos 198.51.100.28(56292) idle
             │ ├─ 49831 postgres: v1: vos vos 198.51.100.28(56304) idle
             │ ├─455454 postgres: v1: vos vos 198.51.100.21(42792) idle
             │ ├─496318 postgres: v1: vos vos 198.51.100.21(41306) idle
             │ ├─497281 postgres: v1: vos vos 198.51.100.20(33370) idle
             │ ├─497309 postgres: v1: vos vos 198.51.100.20(33384) idle
             │ ├─497959 postgres: v1: vos vos 198.51.100.21(50216) idle
             │ ├─498229 postgres: v1: vos vos 198.51.100.20(59992) idle
             │ ├─498252 postgres: v1: vos vos 198.51.100.20(60002) idle
             │ ├─498475 postgres: v1: vos vos 198.51.100.22(32906) idle
             │ ├─499573 postgres: v1: vos vos 198.51.100.21(37024) idle
             │ ├─499717 postgres: v1: vos vos 198.51.100.22(34004) idle
             │ ├─499880 postgres: v1: vos vos 198.51.100.20(52088) idle
             │ ├─499965 postgres: v1: vos vos 198.51.100.20(52102) idle
             │ └─499993 postgres: v1: vos vos 198.51.100.20(52118) idle
             ├─8fbeae4f599c70001a79ff8190e4d36b6208c699e369b374812b39107c435fab
             │ ├─11104 /bin/sh -c zkCli.sh -server 127.0.0.1:2181 get /zookeeper/config | grep -E "^(server|version)"
             │ ├─11105 bash /zookeeper/bin/zkCli.sh -server 127.0.0.1:2181 get /zookeeper/config
             │ ├─11106 grep -E ^(server|version)
             │ ├─11111 /opt/java/openjdk/bin/java -Dzookeeper.log.dir=/zookeeper-volume/log -Dzookeeper.log.file=zookeeper-cli.log -cp /zookeeper/bin/../zookeeper-server/target/classes:/zookeeper/bin/../build/classes:/zookeeper/bin/../zookeeper-server/target/lib/*.jar:/zookeeper/bin/../build/lib/*.jar:/zookeeper/bin/../lib/zookeeper-prometheus-metrics-3.8.2.jar:/zookeeper/bin/../lib/zookeeper-jute-3.8.2.jar:/zookeeper/bin/../lib/zookeeper-3.8.2.jar:/zookeeper/bin/../lib/snappy-java-1.1.10.1.jar:/zookeeper/bin/../lib/slf4j-api-1.7.30.jar:/zookeeper/bin/../lib/simpleclient_servlet-0.9.0.jar:/zookeeper/bin/../lib/simpleclient_hotspot-0.9.0.jar:/zookeeper/bin/../lib/simpleclient_common-0.9.0.jar:/zookeeper/bin/../lib/simpleclient-0.9.0.jar:/zookeeper/bin/../lib/netty-transport-native-unix-common-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-transport-native-epoll-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-transport-classes-epoll-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-transport-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-resolver-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-handler-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-common-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-codec-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-buffer-4.1.94.Final.jar:/zookeeper/bin/../lib/metrics-core-4.1.12.1.jar:/zookeeper/bin/../lib/logback-core-1.2.10.jar:/zookeeper/bin/../lib/logback-classic-1.2.10.jar:/zookeeper/bin/../lib/jline-2.14.6.jar:/zookeeper/bin/../lib/jetty-util-ajax-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-util-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-servlet-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-server-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-security-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-io-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-http-9.4.51.v20230217.jar:/zookeeper/bin/../lib/javax.servlet-api-3.1.0.jar:/zookeeper/bin/../lib/jackson-databind-2.15.2.jar:/zookeeper/bin/../lib/jackson-core-2.15.2.jar:/zookeeper/bin/../lib/jackson-annotations-2.15.2.jar:/zookeeper/bin/../lib/commons-io-2.11.0.jar:/zookeeper/bin/../lib/commons-cli-1.5.0.jar:/zookeeper/bin/../lib/audience-annotations-0.12.0.jar:/zookeeper/bin/../zookeeper-*.jar:/zookeeper/bin/../zookeeper-server/src/main/resources/lib/*.jar:/zookeeper-volume/conf: -Xmx256m org.apache.zookeeper.ZooKeeperMain -server 127.0.0.1:2181 get /zookeeper/config
             │ ├─15753 bash /dynamic_zk/docker-entrypoint.sh
             │ ├─16135 python3 /dynamic_zk/dynamic_zk.py
             │ └─17619 /opt/java/openjdk/bin/java -Dzookeeper.log.dir=/zookeeper-volume/log -Dzookeeper.log.file=zookeeper-server.log -XX:OnOutOfMemoryError=kill -9 %p -cp /zookeeper/bin/../zookeeper-server/target/classes:/zookeeper/bin/../build/classes:/zookeeper/bin/../zookeeper-server/target/lib/*.jar:/zookeeper/bin/../build/lib/*.jar:/zookeeper/bin/../lib/zookeeper-prometheus-metrics-3.8.2.jar:/zookeeper/bin/../lib/zookeeper-jute-3.8.2.jar:/zookeeper/bin/../lib/zookeeper-3.8.2.jar:/zookeeper/bin/../lib/snappy-java-1.1.10.1.jar:/zookeeper/bin/../lib/slf4j-api-1.7.30.jar:/zookeeper/bin/../lib/simpleclient_servlet-0.9.0.jar:/zookeeper/bin/../lib/simpleclient_hotspot-0.9.0.jar:/zookeeper/bin/../lib/simpleclient_common-0.9.0.jar:/zookeeper/bin/../lib/simpleclient-0.9.0.jar:/zookeeper/bin/../lib/netty-transport-native-unix-common-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-transport-native-epoll-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-transport-classes-epoll-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-transport-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-resolver-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-handler-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-common-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-codec-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-buffer-4.1.94.Final.jar:/zookeeper/bin/../lib/metrics-core-4.1.12.1.jar:/zookeeper/bin/../lib/logback-core-1.2.10.jar:/zookeeper/bin/../lib/logback-classic-1.2.10.jar:/zookeeper/bin/../lib/jline-2.14.6.jar:/zookeeper/bin/../lib/jetty-util-ajax-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-util-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-servlet-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-server-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-security-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-io-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-http-9.4.51.v20230217.jar:/zookeeper/bin/../lib/javax.servlet-api-3.1.0.jar:/zookeeper/bin/../lib/jackson-databind-2.15.2.jar:/zookeeper/bin/../lib/jackson-core-2.15.2.jar:/zookeeper/bin/../lib/jackson-annotations-2.15.2.jar:/zookeeper/bin/../lib/commons-io-2.11.0.jar:/zookeeper/bin/../lib/commons-cli-1.5.0.jar:/zookeeper/bin/../lib/audience-annotations-0.12.0.jar:/zookeeper/bin/../zookeeper-*.jar:/zookeeper/bin/../zookeeper-server/src/main/resources/lib/*.jar:/zookeeper-volume/conf: -Xmx1000m -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.local.only=false org.apache.zookeeper.server.quorum.QuorumPeerMain /zookeeper-volume/conf/zoo.cfg
             ├─2ce57bc5a2491a58dd6362918c37ac6e80a3bd093e610da0db3cc4d9a6bc9fda
             │ └─24130 /dashboard --insecure-bind-address=0.0.0.0 --bind-address=0.0.0.0 --namespace=kube-system --tls-cert-file=kubernetes.pem --tls-key-file=kubernetes-key.pem --enable-skip-login --authentication-mode=token --token-ttl=900
             ├─d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49
             │ └─15670 /pause
             ├─f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed
             │ └─13770 /pause
             ├─1af4c9c62ebee28328f5112ba9d76d6b74a48a6627e620d82f2d49d3c5a29efa
             │ ├─45204 bash /opt/srmpc/start.sh java -Dtmd.port=32845 -Drmp.mallocArenaMax=32 -Dkubernetes.namespace=cluster1 -DLOG_SERVER=logstash -XX:NativeMemoryTracking=summary -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=128K -Xms64m -Xmx1024m -Xshare:auto -XX:MaxMetaspaceSize=512m -XX:MaxHeapSize=1024m -XX:TieredStopAtLevel=1 -XX:+ExitOnOutOfMemoryError -XX:MaxHeapFreeRatio=30  -XX:MinHeapFreeRatio=10 -Djava.security.properties=/opt/harmonic/StreamRmpControllerCmd/java.security -jar /opt/harmonic/StreamRmpControllerCmd/StreamRmpControllerCmd.jar sample_stream_processing_engine v1 stream_processing 2F500BA9-D3B4-4332-BFA3-74BE6E00AAE2 zookeeper:2181 2F500BA9-D3B4-4332-BFA3-74BE6E00AAE2-1 /opt/harmonic/vos/voshome -1
             │ ├─45234 /usr/bin/python3 -s /usr/bin/supervisord -c /tmp/supervisor.conf -n
             │ ├─45268 /usr/bin/python3 /opt/srmpc/srmpc-watchdog
             │ ├─45269 /usr/share/filebeat/bin/filebeat -path.home /usr/share/filebeat -path.config /etc/filebeat -path.data /var/lib/filebeat -path.logs /var/log/filebeat
             │ ├─45270 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/dolby_vision /graphics/dolby_vision
             │ ├─45271 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/fonts /graphics/fonts
             │ ├─45272 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/irdeto_license /opt/irdeto
             │ ├─45273 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/lut /graphics/lut
             │ ├─45277 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/sl_hdr_config /graphics/sl_hdr_config
             │ ├─45280 /opt/goofys/goofys -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data /opt/harmonic/vos/voshome/app_data
             │ ├─45297 /usr/local/bin/xinit /usr/local/etc/X11/xinitrc -- /usr/local/bin/Xorg -xkbdir /usr/local/share/X11/xkb -nolisten local -logverbose 0 vt7 -sharevts :0.0
             │ ├─45306 java -Dtmd.port=32845 -Drmp.mallocArenaMax=32 -Dkubernetes.namespace=cluster1 -DLOG_SERVER=logstash -XX:NativeMemoryTracking=summary -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=128K -Xms64m -Xmx1024m -Xshare:auto -XX:MaxMetaspaceSize=512m -XX:MaxHeapSize=1024m -XX:TieredStopAtLevel=1 -XX:+ExitOnOutOfMemoryError -XX:MaxHeapFreeRatio=30 -XX:MinHeapFreeRatio=10 -Djava.security.properties=/opt/harmonic/StreamRmpControllerCmd/java.security -jar /opt/harmonic/StreamRmpControllerCmd/StreamRmpControllerCmd.jar sample_stream_processing_engine v1 stream_processing 2F500BA9-D3B4-4332-BFA3-74BE6E00AAE2 zookeeper:2181 2F500BA9-D3B4-4332-BFA3-74BE6E00AAE2-1 /opt/harmonic/vos/voshome -1
             │ ├─45326 /usr/local/bin/Xorg :0 -xkbdir /usr/local/share/X11/xkb -nolisten local -logverbose 0 vt7 -sharevts :0.0
             │ ├─45548 sh /usr/local/etc/X11/xinitrc
             │ ├─45550 /usr/local/bin/xterm -g 90x50+0+0 -bg black -fg yellow -fn 10x20
             │ ├─45562 /usr/local/bin/dwm
             │ ├─46746 bash
             │ ├─46812 java -Xmx64m -Xshare:auto -XX:MaxMetaspaceSize=64m -XX:MaxHeapSize=64m -XX:TieredStopAtLevel=1 -Dlog_file_path=/var/log/rmp-controller-log -DLOG_SERVER=logstash -Dvos.home=/opt/harmonic/vos/voshome -jar /opt/harmonic/EsamAdapter/EsamAdapter.jar
             │ ├─47325 /opt/harmonic/rmp/RmpWorker -Id=3eca9eb6-6ee8-4946-9308-6de30cc98438 -KernelPath=/opt/harmonic/rmp/libRmpKernel.so -RpiPath=/opt/harmonic/rmp/rpi -rpiMessagePlugInPath=/opt/harmonic/rmp/rpiMessagePlugin -logPath=/var/log/rmp-controller-log/rmp-worker.log -velocimeterPath=/opt/harmonic/rmp/libRmpVelocimeter.so -rmpServiceId=sspe-2F500BA9-D3B4-4332-BFA3-74BE6E00AAE2 -serverIP=127.0.0.1 -serverPort=43081 -oplanOutputPath=/var/log/rmp-controller-log/rmp-processing.opl -logMDCs=service_id:2F500BA9-D3B4-4332-BFA3-74BE6E00AAE2 -enableRuntimeStateRestorer=true -enableRuntimeStateUpdater=true
             │ └─47626 dbus-daemon --fork --config-file /etc/dbus-1/dbus-uvp-session.conf
             ├─bd2f5bcbccb5108ea75dd8fe3c107454e9443a9bf04d0d76ea762d52abf0de4d
             │ └─24718 /bin/prometheus --web.console.templates=/etc/prometheus/consoles --web.console.libraries=/etc/prometheus/console_libraries --storage.tsdb.retention.time=1d --config.file=/etc/prometheus/config_out/prometheus.env.yaml --storage.tsdb.path=/prometheus --web.enable-lifecycle --web.route-prefix=/ --web.config.file=/etc/prometheus/web_config/web-config.yaml
             ├─fe01a89fedb057cc98e77dfcaadada33e8d2afe86478b11f263d5f2e0a9cdf70
             │ └─13739 /pause
             ├─bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4
             │ └─16695 /pause
             ├─aad3f498ca0001b47bbc7d89f01988c218e9b80d529077e7152d13b61403f0bb
             │ └─27295 minio server /storage
             ├─f15a73eb2d9863156cc92fde623fb7c9f291dbcc6f4fd68c05f1d72e948c907e
             │ ├─42903 /usr/bin/python3 -s /usr/bin/supervisord -c /supervisord.conf
             │ ├─43446 python3 /opt/harmonic/unified-origin-engine/bin/supervisord-event-handler
             │ ├─43450 /opt/goofys/goofys -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/com.harmonicinc.vos.cert.manager /tmp/drmCertValidationKey
             │ ├─43452 /opt/goofys/goofys -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.scrambling/kmsCertKey /tmp/drmClientCertKey
             │ ├─43454 /opt/goofys/goofys --stat-cache-ttl 1s --type-cache-ttl 1s -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.unified.origin.engine /opt/harmonic/vos/voshome/app_data/harmonicinc.vos.unified.origin.engine
             │ ├─43455 /usr/share/filebeat/bin/filebeat --path.home /usr/share/filebeat --path.config /etc/filebeat --path.data /var/lib/filebeat --path.logs /var/log/filebeat -E output.logstash.hosts=['logstash:5044']
             │ ├─43462 java -Xms50m -Xmx512m -XX:+ExitOnOutOfMemoryError -Xloggc:/var/log/gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=3 -XX:GCLogFileSize=1M -Dorg.glassfish.grizzly.nio.transport.TCPNIOTransport.max-receive-buffer-size=1048576 -Djava.security.properties=/opt/harmonic/java_security/java.security -classpath /opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/remote-daemon-executor.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/potf-server-config-lib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/KMSClientLib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/cpixlib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kms-soap-stub.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jaxb-impl-2.2.5.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-logging-1.2.1.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/xmlsec-1.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-codec-1.14.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-apache-connector-2.5.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/httpcore-4.3.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/httpclient-4.3.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/guava-14.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-dataformat-xml-2.10.3.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/stax2-api-4.2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/RmpControllerSDK.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/unified-origin-engine-library.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/StreamRmpControllerCmd.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/rmp-controller-models.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-math3-3.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/dnsjava-2.1.8.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-container-grizzly2-http-2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-http-server-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-container-jdk-http-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-core-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-runtime-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-json-provider-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-media-json-jackson-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/velocity-1.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/joda-time-2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-validator-1.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-slf4j-impl-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zookeeper-3.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/cron-utils-9.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-client-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-httpclient-okhttp-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-client-api-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-recipes-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-framework-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-client-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-gatewayapi-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-resource-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-rbac-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-admissionregistration-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-apps-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-autoscaling-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-apiextensions-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-batch-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-certificates-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-coordination-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-discovery-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-events-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-extensions-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-flowcontrol-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-networking-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-metrics-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-policy-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-scheduling-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-storageclass-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-node-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-core-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-common-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/slf4j-api-1.7.36.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-1.2-api-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_servlet-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_servlet_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-server-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-client-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-common-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.ws.rs-api-2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-layout-template-json-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/asset-mgmt-grpc-library.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-configuration-1.10.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-lang-2.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/gson-2.2.4.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-io-2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-module-jaxb-annotations-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-base-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zjsonpatch-0.3.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-dataformat-yaml-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-datatype-jsr310-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-databind-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-annotations-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/property-binder-4.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-http-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-locator-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.inject-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-collectionschema-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-api-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-xc-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-mapper-asl-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-core-asl-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-beanutils-1.9.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-collections-3.2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-digester-1.8.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-logging-1.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-core-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-api-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_otel-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_otel_agent-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zookeeper-jute-3.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/audience-annotations-0.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-handler-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-native-epoll-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-core-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.el-3.0.4.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-framework-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.annotation-api-1.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-guava-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-api-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/osgi-resource-locator-1.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/validation-api-1.1.0.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.xml.bind-api-2.3.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.activation-api-1.2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/snakeyaml-engine-2.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/logging-interceptor-3.12.12.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/okhttp-3.12.12.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-codec-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-native-unix-common-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-buffer-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-resolver-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-common-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-utils-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/aopalliance-repackaged-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javassist-3.18.1-GA.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/snakeyaml-1.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/okio-1.15.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/guava-27.0.1-jre.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.inject-1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/failureaccess-1.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jsr305-3.0.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/checker-qual-2.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/error_prone_annotations-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/j2objc-annotations-1.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/animal-sniffer-annotations-1.17.jar com.harmonicinc.remotedaemonexecutor.RemoteDaemonExecutor media-delivery-server MEDIAGRID backupStorageIsNotConfigured
             │ ├─45241 /opt/harmonic/MediaStreamPackageController/rmp/MDSMain --ip 0.0.0.0 --port 20201
             │ ├─46973 ./ott_egress_proxy
             │ ├─47973 nginx: master process /usr/local/nginx/sbin/nginx -c /opt/harmonic/mds/nginx.conf
             │ ├─52324 nginx: worker process
             │ ├─52325 nginx: worker process
             │ ├─52326 nginx: worker process
             │ ├─52327 nginx: worker process
             │ ├─52328 nginx: worker process
             │ ├─52329 nginx: worker process
             │ ├─52330 nginx: worker process
             │ ├─52331 nginx: worker process
             │ ├─52332 nginx: worker process
             │ ├─52333 nginx: worker process
             │ ├─52334 nginx: worker process
             │ ├─52335 nginx: worker process
             │ ├─52336 nginx: worker process
             │ ├─52337 nginx: worker process
             │ ├─52338 nginx: worker process
             │ ├─52339 nginx: worker process
             │ ├─52340 nginx: worker process
             │ ├─52341 nginx: worker process
             │ ├─52342 nginx: worker process
             │ ├─52343 nginx: worker process
             │ ├─52344 nginx: worker process
             │ ├─52345 nginx: worker process
             │ ├─52346 nginx: worker process
             │ ├─52347 nginx: worker process
             │ ├─52348 nginx: worker process
             │ ├─52349 nginx: worker process
             │ ├─52350 nginx: worker process
             │ ├─52351 nginx: worker process
             │ ├─52352 nginx: worker process
             │ ├─52353 nginx: worker process
             │ ├─52354 nginx: worker process
             │ ├─52355 nginx: worker process
             │ ├─52356 nginx: worker process
             │ ├─52357 nginx: worker process
             │ ├─52358 nginx: worker process
             │ ├─52359 nginx: worker process
             │ ├─52360 nginx: worker process
             │ ├─52361 nginx: worker process
             │ ├─52362 nginx: worker process
             │ ├─52363 nginx: worker process
             │ ├─52364 nginx: worker process
             │ ├─52365 nginx: worker process
             │ ├─52366 nginx: worker process
             │ ├─52367 nginx: worker process
             │ ├─52368 nginx: worker process
             │ ├─52369 nginx: worker process
             │ ├─52370 nginx: worker process
             │ ├─52371 nginx: worker process
             │ ├─52372 nginx: worker process
             │ ├─52373 nginx: worker process
             │ ├─52374 nginx: worker process
             │ ├─52375 nginx: worker process
             │ ├─52376 nginx: worker process
             │ ├─52377 nginx: worker process
             │ ├─52378 nginx: worker process
             │ ├─52379 nginx: worker process
             │ ├─52380 nginx: worker process
             │ ├─52381 nginx: worker process
             │ ├─52382 nginx: worker process
             │ ├─52383 nginx: worker process
             │ ├─52384 nginx: worker process
             │ ├─52385 nginx: worker process
             │ ├─52386 nginx: worker process
             │ ├─52387 nginx: worker process
             │ ├─52388 nginx: worker process
             │ ├─52389 nginx: worker process
             │ ├─52390 nginx: worker process
             │ ├─52391 nginx: worker process
             │ ├─52392 nginx: worker process
             │ ├─52393 nginx: worker process
             │ ├─52394 nginx: worker process
             │ ├─52395 nginx: worker process
             │ ├─52396 nginx: worker process
             │ ├─52397 nginx: worker process
             │ ├─52398 nginx: worker process
             │ ├─52399 nginx: worker process
             │ ├─52400 nginx: worker process
             │ ├─52401 nginx: worker process
             │ ├─52402 nginx: worker process
             │ ├─52403 nginx: worker process
             │ ├─52404 nginx: worker process
             │ ├─52405 nginx: worker process
             │ ├─52406 nginx: worker process
             │ ├─52408 nginx: worker process
             │ ├─52409 nginx: worker process
             │ ├─52410 nginx: worker process
             │ ├─52411 nginx: worker process
             │ ├─52412 nginx: worker process
             │ ├─52413 nginx: worker process
             │ ├─52414 nginx: worker process
             │ ├─52415 nginx: worker process
             │ ├─52416 nginx: worker process
             │ ├─52417 nginx: worker process
             │ ├─52418 nginx: worker process
             │ ├─52419 nginx: worker process
             │ ├─52420 nginx: worker process
             │ ├─52421 nginx: worker process
             │ ├─52422 nginx: worker process
             │ ├─52423 nginx: worker process
             │ ├─52424 nginx: worker process
             │ ├─52426 nginx: worker process
             │ ├─52427 nginx: worker process
             │ ├─52428 nginx: worker process
             │ ├─52429 nginx: worker process
             │ ├─52430 nginx: worker process
             │ ├─52431 nginx: worker process
             │ ├─52432 nginx: worker process
             │ ├─52433 nginx: worker process
             │ ├─52434 nginx: worker process
             │ ├─52435 nginx: worker process
             │ ├─52436 nginx: worker process
             │ ├─52437 nginx: worker process
             │ ├─52438 nginx: worker process
             │ ├─52439 nginx: worker process
             │ ├─52440 nginx: worker process
             │ ├─52441 nginx: worker process
             │ ├─52442 nginx: worker process
             │ ├─52443 nginx: worker process
             │ ├─52448 nginx: worker process
             │ ├─52449 nginx: worker process
             │ ├─52450 nginx: worker process
             │ ├─52451 nginx: worker process
             │ ├─52452 nginx: worker process
             │ ├─52453 nginx: worker process
             │ ├─52454 nginx: worker process
             │ ├─52455 nginx: worker process
             │ ├─52456 nginx: worker process
             │ ├─52457 nginx: worker process
             │ ├─52458 nginx: worker process
             │ ├─52459 nginx: worker process
             │ ├─52460 nginx: worker process
             │ ├─52461 nginx: worker process
             │ ├─52462 nginx: worker process
             │ ├─52463 nginx: worker process
             │ ├─52464 nginx: worker process
             │ ├─52465 nginx: worker process
             │ ├─52466 nginx: worker process
             │ ├─52467 nginx: worker process
             │ ├─52468 nginx: worker process
             │ ├─52469 nginx: worker process
             │ ├─52470 nginx: worker process
             │ ├─52471 nginx: worker process
             │ ├─52472 nginx: worker process
             │ ├─52473 nginx: worker process
             │ ├─52474 nginx: worker process
             │ ├─52479 nginx: worker process
             │ ├─52480 nginx: worker process
             │ ├─52481 nginx: worker process
             │ ├─52482 nginx: worker process
             │ ├─52483 nginx: worker process
             │ ├─52484 nginx: worker process
             │ ├─52485 nginx: worker process
             │ ├─52486 nginx: worker process
             │ ├─52487 nginx: worker process
             │ ├─52488 nginx: worker process
             │ ├─52489 nginx: worker process
             │ ├─52490 nginx: worker process
             │ ├─52491 nginx: worker process
             │ ├─52492 nginx: worker process
             │ ├─52493 nginx: worker process
             │ ├─52494 nginx: worker process
             │ ├─52495 nginx: worker process
             │ ├─52496 nginx: worker process
             │ ├─52497 nginx: worker process
             │ ├─52498 nginx: worker process
             │ ├─52499 nginx: worker process
             │ ├─52500 nginx: worker process
             │ ├─52501 nginx: worker process
             │ └─52502 nginx: cache manager process
             ├─97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c
             │ └─13746 /pause
             ├─f1e21bbbc7cb957da13db6b46c64353a04a7c1d1fd670772e7adc9faf6e78fa3
             │ └─15804 java -Xms256m -Xmx512m -jar /opt/exhibitor/exhibitor.jar --port 8081 --defaultconfig /opt/exhibitor/exhibitor.properties --configtype file --filesystembackup false
             ├─213f9d22cbd9897c1da8e5967af955d56e30f79c43d2fd15acb7e487c9d3c537
             │ ├─46627 /usr/bin/python3 -s /usr/bin/supervisord -c supervisord.conf
             │ ├─47208 /opt/harmonic/esam_pois/esam-oob-adapter --port=8088
             │ ├─47209 /opt/harmonic/esam_pois/esam-translator --port=9999 --external-port=19999 --dbconn=postgres://vos:vossdk@pgdb:5432/vos?sslmode=disable
             │ ├─47210 java -Xmx512m -jar esam.pois.worker.jar
             │ └─47211 /usr/share/filebeat/bin/filebeat -path.home /usr/share/filebeat -path.config /etc/filebeat -path.data /var/lib/filebeat -path.logs /var/log/filebeat
             ├─0549d98179a918e17cf119b8b8e147bc0ed93b3a2fe6800534943bc5cade26ec
             │ ├─14476 /usr/bin/dumb-init -- /nginx-ingress-controller --configmap=ingress-nginx/ingress-nginx --tcp-services-configmap=ingress-nginx/tcp-services --udp-services-configmap=ingress-nginx/udp-services --annotations-prefix=nginx.ingress.kubernetes.io --default-ssl-certificate=ingress-nginx/ingress-nginx-vos-default-ssl-certificate
             │ ├─14578 /nginx-ingress-controller --configmap=ingress-nginx/ingress-nginx --tcp-services-configmap=ingress-nginx/tcp-services --udp-services-configmap=ingress-nginx/udp-services --annotations-prefix=nginx.ingress.kubernetes.io --default-ssl-certificate=ingress-nginx/ingress-nginx-vos-default-ssl-certificate
             │ ├─17843 nginx: master process /usr/bin/nginx -c /etc/nginx/nginx.conf
             │ ├─31898 nginx: worker process
             │ ├─31899 nginx: worker process
             │ ├─31900 nginx: worker process
             │ ├─31901 nginx: worker process
             │ ├─31923 nginx: worker process
             │ ├─31938 nginx: worker process
             │ ├─31976 nginx: worker process
             │ ├─32010 nginx: worker process
             │ ├─32039 nginx: worker process
             │ ├─32078 nginx: worker process
             │ ├─32113 nginx: worker process
             │ ├─32141 nginx: worker process
             │ ├─32183 nginx: worker process
             │ ├─32223 nginx: worker process
             │ ├─32260 nginx: worker process
             │ ├─32296 nginx: worker process
             │ ├─32322 nginx: worker process
             │ ├─32357 nginx: worker process
             │ ├─32385 nginx: worker process
             │ ├─32398 nginx: worker process
             │ ├─32448 nginx: worker process
             │ ├─32476 nginx: worker process
             │ ├─32518 nginx: worker process
             │ ├─32560 nginx: worker process
             │ ├─32588 nginx: worker process
             │ ├─32612 nginx: worker process
             │ ├─32650 nginx: worker process
             │ ├─32680 nginx: worker process
             │ ├─32719 nginx: worker process
             │ ├─32755 nginx: worker process
             │ ├─32785 nginx: worker process
             │ ├─32823 nginx: worker process
             │ ├─32860 nginx: worker process
             │ ├─32890 nginx: worker process
             │ ├─32920 nginx: worker process
             │ ├─32953 nginx: worker process
             │ ├─32990 nginx: worker process
             │ ├─33021 nginx: worker process
             │ ├─33055 nginx: worker process
             │ ├─33082 nginx: worker process
             │ ├─33123 nginx: worker process
             │ ├─33143 nginx: worker process
             │ ├─33183 nginx: worker process
             │ ├─33218 nginx: worker process
             │ ├─33256 nginx: worker process
             │ ├─33283 nginx: worker process
             │ ├─33322 nginx: worker process
             │ ├─33355 nginx: worker process
             │ ├─33385 nginx: worker process
             │ ├─33421 nginx: worker process
             │ ├─33454 nginx: worker process
             │ ├─33490 nginx: worker process
             │ ├─33526 nginx: worker process
             │ ├─33562 nginx: worker process
             │ ├─33586 nginx: worker process
             │ ├─33618 nginx: worker process
             │ ├─33653 nginx: worker process
             │ ├─33685 nginx: worker process
             │ ├─33724 nginx: worker process
             │ ├─33756 nginx: worker process
             │ ├─33784 nginx: worker process
             │ ├─33817 nginx: worker process
             │ ├─33850 nginx: worker process
             │ ├─33883 nginx: worker process
             │ ├─33919 nginx: worker process
             │ ├─33951 nginx: worker process
             │ ├─33984 nginx: worker process
             │ ├─34015 nginx: worker process
             │ ├─34049 nginx: worker process
             │ ├─34082 nginx: worker process
             │ ├─34114 nginx: worker process
             │ ├─34147 nginx: worker process
             │ ├─34181 nginx: worker process
             │ ├─34217 nginx: worker process
             │ ├─34249 nginx: worker process
             │ ├─34285 nginx: worker process
             │ ├─34312 nginx: worker process
             │ ├─34351 nginx: worker process
             │ ├─34381 nginx: worker process
             │ ├─34415 nginx: worker process
             │ ├─34449 nginx: worker process
             │ ├─34486 nginx: worker process
             │ ├─34514 nginx: worker process
             │ ├─34550 nginx: worker process
             │ ├─34582 nginx: worker process
             │ ├─34614 nginx: worker process
             │ ├─34651 nginx: worker process
             │ ├─34683 nginx: worker process
             │ ├─34712 nginx: worker process
             │ ├─34751 nginx: worker process
             │ ├─34784 nginx: worker process
             │ ├─34814 nginx: worker process
             │ ├─34851 nginx: worker process
             │ ├─34885 nginx: worker process
             │ ├─34919 nginx: worker process
             │ ├─34945 nginx: worker process
             │ ├─34975 nginx: worker process
             │ ├─35019 nginx: worker process
             │ ├─35040 nginx: worker process
             │ ├─35083 nginx: worker process
             │ ├─35108 nginx: worker process
             │ ├─35147 nginx: worker process
             │ ├─35185 nginx: worker process
             │ ├─35217 nginx: worker process
             │ ├─35247 nginx: worker process
             │ ├─35281 nginx: worker process
             │ ├─35308 nginx: worker process
             │ ├─35347 nginx: worker process
             │ ├─35377 nginx: worker process
             │ ├─35418 nginx: worker process
             │ ├─35444 nginx: worker process
             │ ├─35485 nginx: worker process
             │ ├─35514 nginx: worker process
             │ ├─35551 nginx: worker process
             │ ├─35584 nginx: worker process
             │ ├─35617 nginx: worker process
             │ ├─35652 nginx: worker process
             │ ├─35679 nginx: worker process
             │ ├─35714 nginx: worker process
             │ ├─35751 nginx: worker process
             │ ├─35778 nginx: worker process
             │ ├─35818 nginx: worker process
             │ ├─35848 nginx: worker process
             │ ├─35889 nginx: worker process
             │ ├─35918 nginx: worker process
             │ ├─35955 nginx: worker process
             │ ├─35990 nginx: worker process
             │ ├─36016 nginx: worker process
             │ ├─36031 nginx: worker process
             │ ├─36076 nginx: worker process
             │ ├─36109 nginx: worker process
             │ ├─36151 nginx: worker process
             │ ├─36187 nginx: worker process
             │ ├─36226 nginx: worker process
             │ ├─36236 nginx: worker process
             │ ├─36288 nginx: worker process
             │ ├─36316 nginx: worker process
             │ ├─36353 nginx: worker process
             │ ├─36384 nginx: worker process
             │ ├─36417 nginx: worker process
             │ ├─36442 nginx: worker process
             │ ├─36481 nginx: worker process
             │ ├─36518 nginx: worker process
             │ ├─36554 nginx: worker process
             │ ├─36586 nginx: worker process
             │ ├─36622 nginx: worker process
             │ ├─36651 nginx: worker process
             │ ├─36684 nginx: worker process
             │ ├─36716 nginx: worker process
             │ ├─36751 nginx: worker process
             │ ├─36787 nginx: worker process
             │ ├─36806 nginx: worker process
             │ ├─36851 nginx: worker process
             │ ├─36881 nginx: worker process
             │ ├─36921 nginx: worker process
             │ ├─36954 nginx: worker process
             │ ├─36984 nginx: worker process
             │ ├─37020 nginx: worker process
             │ ├─37047 nginx: worker process
             │ ├─37084 nginx: worker process
             │ ├─37109 nginx: worker process
             │ ├─37150 nginx: worker process
             │ ├─37179 nginx: worker process
             │ ├─37212 nginx: worker process
             │ ├─37244 nginx: worker process
             │ ├─37282 nginx: worker process
             │ ├─37317 nginx: worker process
             │ ├─37350 nginx: worker process
             │ └─37383 nginx: cache manager process
             ├─088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927
             │ └─40452 /pause
             ├─42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840
             │ └─44651 /pause
             ├─bd7b68359e81ea9f2992b99fa251dc23bca03911465b8cfcea15dc22ce0a6612
             │ ├─41342 /bin/bash /opt/harmonic/live-ingest-origin/bin/live-ingest-origin-server-controller
             │ ├─41354 /usr/bin/python /usr/bin/supervisord -c /supervisord.conf
             │ ├─41391 python3 /opt/harmonic/live-ingest-origin/bin/supervisord-event-handler
             │ ├─41392 java -Xms50m -Xmx200m -classpath /opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/live-ingest-origin-server-controller.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/live-ingest-origin-library.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/log4j-slf4j-impl-2.17.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/zookeeper-3.5.6.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/curator-recipes-5.0.0.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/curator-framework-5.0.0.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/curator-client-5.0.0.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/slf4j-api-1.7.25.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/log4j-1.2-api-2.17.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/commons-io-2.2.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/velocity-1.7.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/commons-lang-2.4.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/joda-time-2.8.2.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/log4j-layout-template-json-2.17.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/gson-2.2.4.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/log4j-core-2.17.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/log4j-api-2.17.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/commons-collections-3.2.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/zookeeper-jute-3.5.6.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/audience-annotations-0.5.0.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-handler-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-transport-native-epoll-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-codec-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-transport-native-unix-common-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-transport-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-buffer-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-resolver-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-common-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/guava-27.0.1-jre.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/failureaccess-1.0.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/jsr305-3.0.2.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/checker-qual-2.5.2.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/error_prone_annotations-2.2.0.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/j2objc-annotations-1.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/animal-sniffer-annotations-1.17.jar com.harmonicinc.liveingestoriginservercontroller.LiveIngestOriginServerController 125eb6b4-eb00-4000-b90b-369d72cb3b56 NGINX HTTP
             │ ├─41455 /usr/share/filebeat/bin/filebeat -path.home /usr/share/filebeat -path.config /etc/filebeat -path.data /var/lib/filebeat -path.logs /var/log/filebeat run &
             │ ├─41462 nginx: master process nginx -c /opt/harmonic/nginx/nginx.conf
             │ ├─41475 nginx: worker process
             │ ├─41476 nginx: worker process
             │ ├─41477 nginx: worker process
             │ ├─41479 nginx: worker process
             │ ├─41480 nginx: worker process
             │ ├─41481 nginx: worker process
             │ ├─41482 nginx: worker process
             │ ├─41483 nginx: worker process
             │ ├─41484 nginx: worker process
             │ ├─41485 nginx: worker process
             │ ├─41486 nginx: worker process
             │ ├─41487 nginx: worker process
             │ ├─41488 nginx: worker process
             │ ├─41489 nginx: worker process
             │ ├─41490 nginx: worker process
             │ ├─41491 nginx: worker process
             │ ├─41492 nginx: worker process
             │ ├─41493 nginx: worker process
             │ ├─41494 nginx: worker process
             │ ├─41495 nginx: worker process
             │ ├─41496 nginx: worker process
             │ ├─41497 nginx: worker process
             │ ├─41498 nginx: worker process
             │ ├─41499 nginx: worker process
             │ ├─41500 nginx: worker process
             │ ├─41501 nginx: worker process
             │ ├─41502 nginx: worker process
             │ ├─41503 nginx: worker process
             │ ├─41504 nginx: worker process
             │ ├─41505 nginx: worker process
             │ ├─41506 nginx: worker process
             │ ├─41507 nginx: worker process
             │ ├─41508 nginx: worker process
             │ ├─41509 nginx: worker process
             │ ├─41510 nginx: worker process
             │ ├─41511 nginx: worker process
             │ ├─41512 nginx: worker process
             │ ├─41513 nginx: worker process
             │ ├─41514 nginx: worker process
             │ ├─41515 nginx: worker process
             │ ├─41516 nginx: worker process
             │ ├─41517 nginx: worker process
             │ ├─41518 nginx: worker process
             │ ├─41519 nginx: worker process
             │ ├─41520 nginx: worker process
             │ ├─41521 nginx: worker process
             │ ├─41522 nginx: worker process
             │ ├─41523 nginx: worker process
             │ ├─41524 nginx: worker process
             │ ├─41525 nginx: worker process
             │ ├─41526 nginx: worker process
             │ ├─41527 nginx: worker process
             │ ├─41528 nginx: worker process
             │ ├─41529 nginx: worker process
             │ ├─41530 nginx: worker process
             │ ├─41531 nginx: worker process
             │ ├─41532 nginx: worker process
             │ ├─41533 nginx: worker process
             │ ├─41534 nginx: worker process
             │ ├─41535 nginx: worker process
             │ ├─41536 nginx: worker process
             │ ├─41537 nginx: worker process
             │ ├─41538 nginx: worker process
             │ ├─41539 nginx: worker process
             │ ├─41540 nginx: worker process
             │ ├─41541 nginx: worker process
             │ ├─41542 nginx: worker process
             │ ├─41543 nginx: worker process
             │ ├─41544 nginx: worker process
             │ ├─41545 nginx: worker process
             │ ├─41546 nginx: worker process
             │ ├─41547 nginx: worker process
             │ ├─41548 nginx: worker process
             │ ├─41549 nginx: worker process
             │ ├─41550 nginx: worker process
             │ ├─41551 nginx: worker process
             │ ├─41552 nginx: worker process
             │ ├─41553 nginx: worker process
             │ ├─41554 nginx: worker process
             │ ├─41555 nginx: worker process
             │ ├─41556 nginx: worker process
             │ ├─41557 nginx: worker process
             │ ├─41558 nginx: worker process
             │ ├─41559 nginx: worker process
             │ ├─41560 nginx: worker process
             │ ├─41561 nginx: worker process
             │ ├─41562 nginx: worker process
             │ ├─41563 nginx: worker process
             │ ├─41564 nginx: worker process
             │ ├─41565 nginx: worker process
             │ ├─41566 nginx: worker process
             │ ├─41567 nginx: worker process
             │ ├─41568 nginx: worker process
             │ ├─41569 nginx: worker process
             │ ├─41570 nginx: worker process
             │ ├─41571 nginx: worker process
             │ ├─41572 nginx: worker process
             │ ├─41573 nginx: worker process
             │ ├─41574 nginx: worker process
             │ ├─41575 nginx: worker process
             │ ├─41576 nginx: worker process
             │ ├─41577 nginx: worker process
             │ ├─41578 nginx: worker process
             │ ├─41579 nginx: worker process
             │ ├─41580 nginx: worker process
             │ ├─41581 nginx: worker process
             │ ├─41582 nginx: worker process
             │ ├─41583 nginx: worker process
             │ ├─41584 nginx: worker process
             │ ├─41585 nginx: worker process
             │ ├─41586 nginx: worker process
             │ ├─41587 nginx: worker process
             │ ├─41588 nginx: worker process
             │ ├─41589 nginx: worker process
             │ ├─41590 nginx: worker process
             │ ├─41591 nginx: worker process
             │ ├─41592 nginx: worker process
             │ ├─41593 nginx: worker process
             │ ├─41596 nginx: worker process
             │ ├─41597 nginx: worker process
             │ ├─41598 nginx: worker process
             │ ├─41599 nginx: worker process
             │ ├─41600 nginx: worker process
             │ ├─41601 nginx: worker process
             │ ├─41602 nginx: worker process
             │ ├─41603 nginx: worker process
             │ ├─41604 nginx: worker process
             │ ├─41605 nginx: worker process
             │ ├─41606 nginx: worker process
             │ ├─41607 nginx: worker process
             │ ├─41608 nginx: worker process
             │ ├─41609 nginx: worker process
             │ ├─41610 nginx: worker process
             │ ├─41611 nginx: worker process
             │ ├─41612 nginx: worker process
             │ ├─41613 nginx: worker process
             │ ├─41614 nginx: worker process
             │ ├─41615 nginx: worker process
             │ ├─41616 nginx: worker process
             │ ├─41617 nginx: worker process
             │ ├─41618 nginx: worker process
             │ ├─41619 nginx: worker process
             │ ├─41620 nginx: worker process
             │ ├─41621 nginx: worker process
             │ ├─41622 nginx: worker process
             │ ├─41623 nginx: worker process
             │ ├─41624 nginx: worker process
             │ ├─41625 nginx: worker process
             │ ├─41626 nginx: worker process
             │ ├─41627 nginx: worker process
             │ ├─41628 nginx: worker process
             │ ├─41629 nginx: worker process
             │ ├─41630 nginx: worker process
             │ ├─41631 nginx: worker process
             │ ├─41632 nginx: worker process
             │ ├─41633 nginx: worker process
             │ ├─41634 nginx: worker process
             │ ├─41635 nginx: worker process
             │ ├─41636 nginx: worker process
             │ ├─41637 nginx: worker process
             │ ├─41638 nginx: worker process
             │ ├─41639 nginx: worker process
             │ ├─41640 nginx: worker process
             │ ├─41641 nginx: worker process
             │ ├─41642 nginx: worker process
             │ ├─41643 nginx: worker process
             │ ├─41644 nginx: worker process
             │ ├─41645 nginx: worker process
             │ └─41646 nginx: cache manager process
             ├─0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e
             │ └─13795 /pause
             ├─61c556810ef8dbb1aed7ea76255af0833233248a82e667d07e0b879d365b778f
             │ ├─30447 bash /usr/bin/runserver --mode=logstash --log4j_input_port=4560 --logshipper_input_port=5514 --es_cluster_http_port=9200
             │ ├─30473 python3 /usr/bin/runserver.py --mode=logstash --log4j_input_port=4560 --logshipper_input_port=5514 --es_cluster_http_port=9200
             │ ├─31370 java -Xms10m -Xmx100m -XX:+ExitOnOutOfMemoryError -XX:+UseSerialGC -XX:MinHeapFreeRatio=10 -XX:MaxHeapFreeRatio=20 -Dmodes=logstash -Delasticsearch.host=elasticsearch -Delasticsearch.http_port=9200 -Dzookeeper=zookeeper:2181 -jar /opt/harmonic/logstash-controller/LogstashController.jar
             │ ├─31515 rsyslogd -n
             │ └─44896 /opt/logstash/jdk/bin/java -Xms500m -Xmx500m -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -Djava.awt.headless=true -Dfile.encoding=UTF-8 -Djdk.io.File.enableADS=true -Djruby.compile.invokedynamic=true -Djruby.jit.threshold=0 -Djruby.regexp.interruptible=true -XX:-HeapDumpOnOutOfMemoryError -Djava.security.egd=file:/dev/urandom -Dlog4j2.isThreadContextMapInheritable=true -Dlog4j2.formatMsgNoLookups=true -XX:OnOutOfMemoryError=shutdown_logstash -cp /opt/logstash/logstash-core/lib/jars/checker-qual-3.37.0.jar:/opt/logstash/logstash-core/lib/jars/commons-codec-1.14.jar:/opt/logstash/logstash-core/lib/jars/commons-compiler-3.1.0.jar:/opt/logstash/logstash-core/lib/jars/commons-logging-1.2.jar:/opt/logstash/logstash-core/lib/jars/error_prone_annotations-2.21.1.jar:/opt/logstash/logstash-core/lib/jars/failureaccess-1.0.1.jar:/opt/logstash/logstash-core/lib/jars/google-java-format-1.1.jar:/opt/logstash/logstash-core/lib/jars/guava-32.1.3-jre.jar:/opt/logstash/logstash-core/lib/jars/j2objc-annotations-2.8.jar:/opt/logstash/logstash-core/lib/jars/jackson-annotations-2.14.1.jar:/opt/logstash/logstash-core/lib/jars/jackson-core-2.14.1.jar:/opt/logstash/logstash-core/lib/jars/jackson-databind-2.14.1.jar:/opt/logstash/logstash-core/lib/jars/jackson-dataformat-cbor-2.14.1.jar:/opt/logstash/logstash-core/lib/jars/jackson-dataformat-yaml-2.14.1.jar:/opt/logstash/logstash-core/lib/jars/janino-3.1.0.jar:/opt/logstash/logstash-core/lib/jars/javassist-3.26.0-GA.jar:/opt/logstash/logstash-core/lib/jars/jruby-complete-9.2.20.1.jar:/opt/logstash/logstash-core/lib/jars/jsr305-3.0.2.jar:/opt/logstash/logstash-core/lib/jars/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/opt/logstash/logstash-core/lib/jars/log4j-api-2.17.1.jar:/opt/logstash/logstash-core/lib/jars/log4j-core-2.17.1.jar:/opt/logstash/logstash-core/lib/jars/log4j-jcl-2.17.1.jar:/opt/logstash/logstash-core/lib/jars/log4j-slf4j-impl-2.17.1.jar:/opt/logstash/logstash-core/lib/jars/logstash-core.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.core.commands-3.6.0.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.core.contenttype-3.4.100.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.core.expressions-3.4.300.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.core.filesystem-1.3.100.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.core.jobs-3.5.100.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.core.resources-3.7.100.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.core.runtime-3.7.0.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.equinox.app-1.3.100.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.equinox.common-3.6.0.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.equinox.preferences-3.4.1.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.equinox.registry-3.5.101.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.jdt.core-3.10.0.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.osgi-3.7.1.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.text-3.5.101.jar:/opt/logstash/logstash-core/lib/jars/reflections-0.9.11.jar:/opt/logstash/logstash-core/lib/jars/slf4j-api-1.7.30.jar:/opt/logstash/logstash-core/lib/jars/snakeyaml-1.33.jar org.logstash.Logstash -l /var/log -f /tmp/logstash-config --http.host 0.0.0.0 --pipeline.unsafe_shutdown -b 100 -u 5
             ├─4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb
             │ └─42622 /pause
             ├─acd873fc559f2941cb1dc8cf4355ff5bfd67df6a31d67ed10073845c4e40642b
             │ ├─46299 /bin/bash /opt/harmonic/casd/etc/start_casd.sh
             │ └─46472 /opt/harmonic/casd/casd
             ├─8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f
             │ └─43286 /pause
             ├─8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1
             │ └─47096 /pause
             ├─86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197
             │ └─30781 /pause
             ├─b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7
             │ └─46581 /pause
             ├─93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06
             │ └─48052 /pause
             ├─ba239e148103274ece222541571cef554d8f50d8e19cc55f13679a6a1e2d2076
             │ └─16746 /pause
             ├─b746563060551612d49e0b3e5ebfc32a5e99f5518389847c64602c6be63c8a9d
             │ └─24747 /bin/prometheus-config-reloader --listen-address=:8080 --reload-url=http://localhost:9090/-/reload --config-file=/etc/prometheus/config/prometheus.yaml.gz --config-envsubst-file=/etc/prometheus/config_out/prometheus.env.yaml --watched-dir=/etc/prometheus/rules/prometheus-k8s-rulefiles-0
             ├─3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85
             │ └─49632 /pause
             ├─386b74e96bce466252755a2db8492a0aa80fb81245a7e11ab02cadcb615d4e35
             │ ├─11029 sleep 5
             │ ├─14293 /opt/dektec/sbin/tini -g -- /opt/dektec/sbin/DtapiServiced-wrapper.sh
             │ ├─14678 /bin/bash /opt/dektec/sbin/DtapiServiced-wrapper.sh
             │ └─14866 /usr/sbin/DtapiServiced /var/run/DtapiServiced.pid
             ├─03b57a55d5b60b06581e669f2d1bb915d5866652d1fe45dababd7b973fa4a2d4
             │ ├─47131 /usr/bin/java -cp * com.harmonicinc.vos.hhp.MainControllerApp
             │ ├─47184 /usr/bin/python3 -s /usr/bin/supervisord -c /opt/harmonic/hhp-controller/etc/supervisord.conf
             │ └─47849 /opt/goofys/goofys -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data /opt/harmonic/vos/voshome/app_data
             ├─3ddcf70649aeed995d1fdce890e993ead6f5b073b7de2211d2f93de524d2a7b5
             │ ├─42895 /usr/bin/python3 -s /usr/bin/supervisord -c /supervisord.conf
             │ ├─43445 python3 /opt/harmonic/unified-origin-engine/bin/supervisord-event-handler
             │ ├─43447 /usr/share/filebeat/bin/filebeat --path.home /usr/share/filebeat --path.config /etc/filebeat --path.data /var/lib/filebeat --path.logs /var/log/filebeat -E output.logstash.hosts=['logstash:5044']
             │ ├─43449 java -Xms50m -Xmx512m -XX:+ExitOnOutOfMemoryError -Xloggc:/var/log/gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=3 -XX:GCLogFileSize=1M -classpath /opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/remote-daemon-executor.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/potf-server-config-lib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/KMSClientLib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/cpixlib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kms-soap-stub.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jaxb-impl-2.2.5.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-logging-1.2.1.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/xmlsec-1.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-codec-1.14.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-apache-connector-2.5.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/httpcore-4.3.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/httpclient-4.3.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/guava-14.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-dataformat-xml-2.10.3.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/stax2-api-4.2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/RmpControllerSDK.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/unified-origin-engine-library.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/StreamRmpControllerCmd.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/rmp-controller-models.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-math3-3.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/dnsjava-2.1.8.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-container-grizzly2-http-2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-http-server-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-container-jdk-http-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-core-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-runtime-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-json-provider-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-media-json-jackson-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/velocity-1.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/joda-time-2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-validator-1.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-slf4j-impl-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zookeeper-3.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/cron-utils-9.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-client-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-httpclient-okhttp-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-client-api-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-recipes-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-framework-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-client-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-gatewayapi-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-resource-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-rbac-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-admissionregistration-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-apps-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-autoscaling-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-apiextensions-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-batch-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-certificates-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-coordination-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-discovery-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-events-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-extensions-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-flowcontrol-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-networking-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-metrics-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-policy-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-scheduling-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-storageclass-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-node-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-core-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-common-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/slf4j-api-1.7.36.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-1.2-api-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_servlet-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_servlet_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-server-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-client-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-common-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.ws.rs-api-2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-layout-template-json-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/asset-mgmt-grpc-library.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-configuration-1.10.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-lang-2.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/gson-2.2.4.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-io-2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-module-jaxb-annotations-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-base-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zjsonpatch-0.3.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-dataformat-yaml-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-datatype-jsr310-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-databind-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-annotations-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/property-binder-4.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-http-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-locator-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.inject-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-collectionschema-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-api-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-xc-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-mapper-asl-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-core-asl-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-beanutils-1.9.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-collections-3.2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-digester-1.8.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-logging-1.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-core-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-api-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_otel-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_otel_agent-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zookeeper-jute-3.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/audience-annotations-0.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-handler-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-native-epoll-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-core-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.el-3.0.4.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-framework-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.annotation-api-1.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-guava-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-api-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/osgi-resource-locator-1.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/validation-api-1.1.0.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.xml.bind-api-2.3.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.activation-api-1.2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/snakeyaml-engine-2.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/logging-interceptor-3.12.12.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/okhttp-3.12.12.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-codec-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-native-unix-common-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-buffer-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-resolver-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-common-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-utils-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/aopalliance-repackaged-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javassist-3.18.1-GA.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/snakeyaml-1.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/okio-1.15.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/guava-27.0.1-jre.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.inject-1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/failureaccess-1.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jsr305-3.0.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/checker-qual-2.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/error_prone_annotations-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/j2objc-annotations-1.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/animal-sniffer-annotations-1.17.jar com.harmonicinc.remotedaemonexecutor.RemoteDaemonExecutor retention-worker MEDIAGRID backupStorageIsNotConfigured
             │ └─44323 /opt/harmonic/MediaStreamPackageController/rmp/RetentionWorker
             ├─0cc8fb35afbc569f879b5340cf38cbc9ddc87773fce6b67a9c7fc07172660322
             │ └─15073 /coredns -conf /etc/coredns/Corefile
             ├─fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf
             │ └─45046 /pause
             ├─d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27
             │ └─45806 /pause
             ├─204e2017be70a182f8109ac13fcc46cc8373a95064050894e81e2a102817d84b
             │ └─16955 /usr/local/bin/kube-rbac-proxy --logtostderr --secure-listen-address=:8443 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305 --upstream=http://127.0.0.1:8080/
       Unit boot.automount could not be found.
      ├─048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b
             │ └─27170 /pause
             ├─794245c75ab20b17bcd8b43a373fad017d3bd653db684caf3add11d14754276c
             │ └─48108 /usr/bin/java -jar /opt/harmonic/ndcp-adapter/ndcp-adapter.jar
             └─900eab49b8c2ba07c64b2d9d3e37821d1dcf1ef16dcd585fb3feae19db6519a2
               ├─11142 /usr/bin/coreutils --coreutils-prog-shebang=sleep /usr/bin/sleep 1
               ├─43429 /usr/bin/python3 -s /usr/bin/supervisord -c /opt/harmonic/Atm/supervisord.conf
               ├─43722 /bin/bash /opt/harmonic/Atm/stop-supervisor.sh
               ├─43723 /usr/share/filebeat/bin/filebeat --path.home /usr/share/filebeat --path.config /etc/filebeat --path.data /var/lib/filebeat --path.logs /var/log/filebeat -E output.logstash.hosts=['logstash:5044']
               ├─43724 /bin/bash /opt/harmonic/Atm/shutdown_delay.sh
               └─43725 /opt/harmonic/Atm/atm

● proc-sys-fs-binfmt_misc.automount - Arbitrary Executable File Formats File System Automount Point
   Loaded: loaded (/usr/lib/systemd/system/proc-sys-fs-binfmt_misc.automount; static; vendor preset: disabled)
   Active: active (running) since Mon 2025-11-17 16:03:50 UTC; 21h ago
    Where: /proc/sys/fs/binfmt_misc
     Docs: https://www.kernel.org/doc/html/latest/admin-guide/binfmt-misc.html
           https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems

Nov 18 13:55:35 XOSEncoder-01 systemd[1]: proc-sys-fs-binfmt_misc.automount: Got automount request for /proc/sys/fs/binfmt_misc, triggered by 9604 (sysctl)

● dev-disk-by\x2did-ata\x2dMK000480GXNXB_241247BD3772.device - MK000480GXNXB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb

● dev-disk-by\x2did-ata\x2dMK000480GXNXB_241247BD3772\x2dpart1.device - MK000480GXNXB BOOT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb1

● dev-disk-by\x2did-ata\x2dMK000480GXNXB_241247BD3772\x2dpart2.device - MK000480GXNXB PERSISTENT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb2.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb2

● dev-disk-by\x2did-ata\x2dMK000480GXNXB_241247BD3772\x2dpart3.device - MK000480GXNXB ext4
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb3.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb3

● dev-disk-by\x2did-ata\x2dMK000480GXNXB_241247BD3772\x2dpart4.device - MK000480GXNXB ext4
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb4.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb4

● dev-disk-by\x2did-ata\x2dMK000480GXNXB_241247BD3772\x2dpart5.device - MK000480GXNXB ROOTA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb5.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb5

● dev-disk-by\x2did-ata\x2dMK000480GXNXB_241247BD3772\x2dpart6.device - MK000480GXNXB ROOTB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb6.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb6

● dev-disk-by\x2did-ata\x2dMK000480GXNXB_241247BD3772\x2dpart7.device - MK000480GXNXB DATA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb7.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb7

● dev-disk-by\x2did-dm\x2dname\x2dvos_docker.device - /dev/disk/by-id/dm-name-vos_docker
   Follow: unit currently follows state of sys-devices-virtual-block-dm\x2d0.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:08 UTC; 21h ago
   Device: /sys/devices/virtual/block/dm-0

● dev-disk-by\x2did-scsi\x2d3500a075147bd3772.device - MK000480GXNXB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb

● dev-disk-by\x2did-scsi\x2d3500a075147bd3772\x2dpart1.device - MK000480GXNXB BOOT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb1

● dev-disk-by\x2did-scsi\x2d3500a075147bd3772\x2dpart2.device - MK000480GXNXB PERSISTENT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb2.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb2

● dev-disk-by\x2did-scsi\x2d3500a075147bd3772\x2dpart3.device - MK000480GXNXB ext4
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb3.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb3

● dev-disk-by\x2did-scsi\x2d3500a075147bd3772\x2dpart4.device - MK000480GXNXB ext4
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb4.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb4

● dev-disk-by\x2did-scsi\x2d3500a075147bd3772\x2dpart5.device - MK000480GXNXB ROOTA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb5.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb5

● dev-disk-by\x2did-scsi\x2d3500a075147bd3772\x2dpart6.device - MK000480GXNXB ROOTB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb6.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb6

● dev-disk-by\x2did-scsi\x2d3500a075147bd3772\x2dpart7.device - MK000480GXNXB DATA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb7.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb7

● dev-disk-by\x2did-scsi\x2d3600062b21e66d6402f41c79bd6c6aa37.device - MR416i-p_Gen11
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda

● dev-disk-by\x2did-scsi\x2d3600062b21e66d6402f41c79bd6c6aa37\x2dpart1.device - MR416i-p_Gen11 DATA_DRIVE
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda-sda1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda/sda1

● dev-disk-by\x2did-scsi\x2dSATA_MK000480GXNXB_241247BD3772.device - MK000480GXNXB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb

● dev-disk-by\x2did-scsi\x2dSATA_MK000480GXNXB_241247BD3772\x2dpart1.device - MK000480GXNXB BOOT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb1

● dev-disk-by\x2did-scsi\x2dSATA_MK000480GXNXB_241247BD3772\x2dpart2.device - MK000480GXNXB PERSISTENT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb2.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb2

● dev-disk-by\x2did-scsi\x2dSATA_MK000480GXNXB_241247BD3772\x2dpart3.device - MK000480GXNXB ext4
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb3.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb3

● dev-disk-by\x2did-scsi\x2dSATA_MK000480GXNXB_241247BD3772\x2dpart4.device - MK000480GXNXB ext4
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb4.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb4

● dev-disk-by\x2did-scsi\x2dSATA_MK000480GXNXB_241247BD3772\x2dpart5.device - MK000480GXNXB ROOTA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb5.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb5

● dev-disk-by\x2did-scsi\x2dSATA_MK000480GXNXB_241247BD3772\x2dpart6.device - MK000480GXNXB ROOTB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb6.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb6

● dev-disk-by\x2did-scsi\x2dSATA_MK000480GXNXB_241247BD3772\x2dpart7.device - MK000480GXNXB DATA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb7.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb7

● dev-disk-by\x2did-scsi\x2dSHPE_MR416i\x2dp_Gen11_0037aac6d69bc7412f40d6661eb26200.device - MR416i-p_Gen11
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda

● dev-disk-by\x2did-scsi\x2dSHPE_MR416i\x2dp_Gen11_0037aac6d69bc7412f40d6661eb26200\x2dpart1.device - MR416i-p_Gen11 DATA_DRIVE
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda-sda1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda/sda1

● dev-disk-by\x2did-wwn\x2d0x500a075147bd3772.device - MK000480GXNXB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb

● dev-disk-by\x2did-wwn\x2d0x500a075147bd3772\x2dpart1.device - MK000480GXNXB BOOT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb1

● dev-disk-by\x2did-wwn\x2d0x500a075147bd3772\x2dpart2.device - MK000480GXNXB PERSISTENT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb2.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb2

● dev-disk-by\x2did-wwn\x2d0x500a075147bd3772\x2dpart3.device - MK000480GXNXB ext4
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb3.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb3

● dev-disk-by\x2did-wwn\x2d0x500a075147bd3772\x2dpart4.device - MK000480GXNXB ext4
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb4.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb4

● dev-disk-by\x2did-wwn\x2d0x500a075147bd3772\x2dpart5.device - MK000480GXNXB ROOTA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb5.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb5

● dev-disk-by\x2did-wwn\x2d0x500a075147bd3772\x2dpart6.device - MK000480GXNXB ROOTB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb6.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb6

● dev-disk-by\x2did-wwn\x2d0x500a075147bd3772\x2dpart7.device - MK000480GXNXB DATA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb7.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb7

● dev-disk-by\x2did-wwn\x2d0x600062b21e66d6402f41c79bd6c6aa37.device - MR416i-p_Gen11
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda

● dev-disk-by\x2did-wwn\x2d0x600062b21e66d6402f41c79bd6c6aa37\x2dpart1.device - MR416i-p_Gen11 DATA_DRIVE
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda-sda1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda/sda1

● dev-disk-by\x2dlabel-BOOT.device - MK000480GXNXB BOOT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb1

● dev-disk-by\x2dlabel-DATA.device - MK000480GXNXB DATA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb7.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb7

● dev-disk-by\x2dlabel-DATA_DRIVE.device - MR416i-p_Gen11 DATA_DRIVE
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda-sda1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda/sda1

● dev-disk-by\x2dlabel-PERSISTENT.device - MK000480GXNXB PERSISTENT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb2.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb2

● dev-disk-by\x2dlabel-ROOTA.device - MK000480GXNXB ROOTA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb5.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb5

● dev-disk-by\x2dlabel-ROOTB.device - MK000480GXNXB ROOTB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb6.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb6

● dev-disk-by\x2dpartlabel-ext4.device - MK000480GXNXB PERSISTENT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb2.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb2

● dev-disk-by\x2dpartlabel-fat32.device - MK000480GXNXB BOOT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb1

● dev-disk-by\x2dpartuuid-07482439\x2db430\x2d484c\x2dbd1e\x2daad44370930e.device - MK000480GXNXB ROOTB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb6.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb6

● dev-disk-by\x2dpartuuid-3c46637c\x2dca0a\x2d4932\x2d91c8\x2d9308db470930.device - MK000480GXNXB ext4
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb3.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb3

● dev-disk-by\x2dpartuuid-9dbbbd55\x2da976\x2d4fd2\x2d9053\x2de3c06c9325a4.device - MK000480GXNXB ext4
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb4.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb4

● dev-disk-by\x2dpartuuid-a5159b56\x2db49a\x2d455c\x2d9a5e\x2dc058a177412b.device - MK000480GXNXB BOOT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb1

● dev-disk-by\x2dpartuuid-bb1e0a4b\x2d193d\x2d4a9f\x2d9c94\x2d67d5f804e3d1.device - MK000480GXNXB DATA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb7.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb7

● dev-disk-by\x2dpartuuid-c7da38b3\x2dbb5b\x2d4f90\x2d8278\x2d5a396d407c05.device - MR416i-p_Gen11 DATA_DRIVE
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda-sda1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda/sda1

● dev-disk-by\x2dpartuuid-e0bea7fe\x2d7149\x2d4d58\x2d9e00\x2d61ac4bdd7819.device - MK000480GXNXB PERSISTENT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb2.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb2

● dev-disk-by\x2dpartuuid-fec2abfd\x2d6997\x2d40eb\x2d93e1\x2d72ae0cb6fee1.device - MK000480GXNXB ROOTA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb5.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb5

● dev-disk-by\x2dpath-pci\x2d0000:07:00.0\x2dscsi\x2d0:2:1:0.device - MK000480GXNXB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb

● dev-disk-by\x2dpath-pci\x2d0000:07:00.0\x2dscsi\x2d0:2:1:0\x2dpart1.device - MK000480GXNXB BOOT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb1

● dev-disk-by\x2dpath-pci\x2d0000:07:00.0\x2dscsi\x2d0:2:1:0\x2dpart2.device - MK000480GXNXB PERSISTENT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb2.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb2

● dev-disk-by\x2dpath-pci\x2d0000:07:00.0\x2dscsi\x2d0:2:1:0\x2dpart3.device - MK000480GXNXB ext4
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb3.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb3

● dev-disk-by\x2dpath-pci\x2d0000:07:00.0\x2dscsi\x2d0:2:1:0\x2dpart4.device - MK000480GXNXB ext4
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb4.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb4

● dev-disk-by\x2dpath-pci\x2d0000:07:00.0\x2dscsi\x2d0:2:1:0\x2dpart5.device - MK000480GXNXB ROOTA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb5.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb5

● dev-disk-by\x2dpath-pci\x2d0000:07:00.0\x2dscsi\x2d0:2:1:0\x2dpart6.device - MK000480GXNXB ROOTB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb6.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb6

● dev-disk-by\x2dpath-pci\x2d0000:07:00.0\x2dscsi\x2d0:2:1:0\x2dpart7.device - MK000480GXNXB DATA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb7.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb7

● dev-disk-by\x2dpath-pci\x2d0000:07:00.0\x2dscsi\x2d0:3:110:0.device - MR416i-p_Gen11
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda

● dev-disk-by\x2dpath-pci\x2d0000:07:00.0\x2dscsi\x2d0:3:110:0\x2dpart1.device - MR416i-p_Gen11 DATA_DRIVE
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda-sda1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda/sda1

● dev-disk-by\x2duuid-00e4c20d\x2dd888\x2d42bd\x2db008\x2ddf2db67929f9.device - MK000480GXNXB PERSISTENT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb2.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb2

● dev-disk-by\x2duuid-23a241d1\x2df0e6\x2d411b\x2d9958\x2d270d17c6b19e.device - /dev/disk/by-uuid/23a241d1-f0e6-411b-9958-270d17c6b19e
   Follow: unit currently follows state of sys-devices-virtual-block-loop0.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:08 UTC; 21h ago
   Device: /sys/devices/virtual/block/loop0

● dev-disk-by\x2duuid-2D00\x2d3E52.device - MK000480GXNXB BOOT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb1

● dev-disk-by\x2duuid-469aee03\x2d0c73\x2d4ef8\x2d9fe3\x2da9986343bc7d.device - MR416i-p_Gen11 DATA_DRIVE
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda-sda1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda/sda1

● dev-disk-by\x2duuid-5cb91fbe\x2d5e7b\x2d44ef\x2d9e16\x2de9b0cd6acc96.device - MK000480GXNXB ROOTB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb6.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb6

Nov 17 16:03:49 localhost systemd[1]: Found device MK000480GXNXB ROOTB.

● dev-disk-by\x2duuid-a0383805\x2d27e2\x2d4c5a\x2dabe2\x2dc99e6770b7c2.device - MK000480GXNXB DATA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb7.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb7

● dev-disk-by\x2duuid-b9b40be0\x2da809\x2d4f49\x2dab72\x2d53f439c34ac7.device - MK000480GXNXB ROOTA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb5.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb5

● dev-dm\x2d0.device - /dev/dm-0
   Follow: unit currently follows state of sys-devices-virtual-block-dm\x2d0.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:08 UTC; 21h ago
   Device: /sys/devices/virtual/block/dm-0

● dev-loop0.device - /dev/loop0
   Follow: unit currently follows state of sys-devices-virtual-block-loop0.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:08 UTC; 21h ago
   Device: /sys/devices/virtual/block/loop0

● dev-loop1.device - /dev/loop1
   Follow: unit currently follows state of sys-devices-virtual-block-loop1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:08 UTC; 21h ago
   Device: /sys/devices/virtual/block/loop1

● dev-mapper-vos_docker.device - /dev/mapper/vos_docker
   Follow: unit currently follows state of sys-devices-virtual-block-dm\x2d0.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:08 UTC; 21h ago
   Device: /sys/devices/virtual/block/dm-0

● dev-rfkill.device - /dev/rfkill
   Follow: unit currently follows state of sys-devices-virtual-misc-rfkill.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:09 UTC; 21h ago
   Device: /sys/devices/virtual/misc/rfkill

● dev-sda.device - MR416i-p_Gen11
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda

● dev-sda1.device - MR416i-p_Gen11 DATA_DRIVE
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda-sda1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda/sda1

● dev-sdb.device - MK000480GXNXB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb

● dev-sdb1.device - MK000480GXNXB BOOT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb1

● dev-sdb2.device - MK000480GXNXB PERSISTENT
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb2.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb2

● dev-sdb3.device - MK000480GXNXB ext4
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb3.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb3

● dev-sdb4.device - MK000480GXNXB ext4
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb4.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb4

● dev-sdb5.device - MK000480GXNXB ROOTA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb5.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb5

● dev-sdb6.device - MK000480GXNXB ROOTB
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb6.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb6

● dev-sdb7.device - MK000480GXNXB DATA
   Follow: unit currently follows state of sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb7.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb7

● dev-ttyS0.device - /dev/ttyS0
   Follow: unit currently follows state of sys-devices-pnp0-00:03-tty-ttyS0.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:48 UTC; 21h ago
   Device: /sys/devices/pnp0/00:03/tty/ttyS0

● dev-ttyS1.device - /dev/ttyS1
   Follow: unit currently follows state of sys-devices-pnp0-00:02-tty-ttyS1.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:48 UTC; 21h ago
   Device: /sys/devices/pnp0/00:02/tty/ttyS1

● dev-ttyS2.device - /dev/ttyS2
   Follow: unit currently follows state of sys-devices-platform-serial8250-tty-ttyS2.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:48 UTC; 21h ago
   Device: /sys/devices/platform/serial8250/tty/ttyS2

● dev-ttyS3.device - /dev/ttyS3
   Follow: unit currently follows state of sys-devices-platform-serial8250-tty-ttyS3.device
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:48 UTC; 21h ago
   Device: /sys/devices/platform/serial8250/tty/ttyS3

● sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb1.device - MK000480GXNXB BOOT
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb1

● sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb2.device - MK000480GXNXB PERSISTENT
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb2

● sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb3.device - MK000480GXNXB ext4
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb3

● sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb4.device - MK000480GXNXB ext4
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb4

● sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb5.device - MK000480GXNXB ROOTA
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb5

● sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb6.device - MK000480GXNXB ROOTB
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb6

● sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb-sdb7.device - MK000480GXNXB DATA
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb/sdb7

● sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:2:1-0:2:1:0-block-sdb.device - MK000480GXNXB
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:2:1/0:2:1:0/block/sdb

● sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda-sda1.device - MR416i-p_Gen11 DATA_DRIVE
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda/sda1

● sys-devices-pci0000:00-0000:00:03.1-0000:07:00.0-host0-target0:3:110-0:3:110:0-block-sda.device - MR416i-p_Gen11
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:49 UTC; 21h ago
   Device: /sys/devices/pci0000:00/0000:00:03.1/0000:07:00.0/host0/target0:3:110/0:3:110:0/block/sda

● sys-devices-pci0000:40-0000:40:03.1-0000:46:00.0-net-ens22f0.device - I350 Gigabit Network Connection (Ethernet Network Adapter I350-T4 for OCP NIC 3.0)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:52 UTC; 21h ago
   Device: /sys/devices/pci0000:40/0000:40:03.1/0000:46:00.0/net/ens22f0

● sys-devices-pci0000:40-0000:40:03.1-0000:46:00.1-net-ens22f1.device - I350 Gigabit Network Connection (Ethernet Network Adapter I350-T4 for OCP NIC 3.0)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:52 UTC; 21h ago
   Device: /sys/devices/pci0000:40/0000:40:03.1/0000:46:00.1/net/ens22f1

● sys-devices-pci0000:40-0000:40:03.1-0000:46:00.2-net-ens22f2.device - I350 Gigabit Network Connection (Ethernet Network Adapter I350-T4 for OCP NIC 3.0)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:52 UTC; 21h ago
   Device: /sys/devices/pci0000:40/0000:40:03.1/0000:46:00.2/net/ens22f2

● sys-devices-pci0000:40-0000:40:03.1-0000:46:00.3-net-ens22f3.device - I350 Gigabit Network Connection (Ethernet Network Adapter I350-T4 for OCP NIC 3.0)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:52 UTC; 21h ago
   Device: /sys/devices/pci0000:40/0000:40:03.1/0000:46:00.3/net/ens22f3

● sys-devices-pci0000:80-0000:80:03.1-0000:86:00.0-net-ens1f0np0.device - MT27800 Family [ConnectX-5] (ConnectX®-5 EN network interface card, 10/25GbE dual-port SFP28, PCIe3.0 x8, tall bracket ; MCX512A-ACAT)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:53 UTC; 21h ago
   Device: /sys/devices/pci0000:80/0000:80:03.1/0000:86:00.0/net/ens1f0np0

● sys-devices-pci0000:80-0000:80:03.1-0000:86:00.1-net-ens1f1np1.device - MT27800 Family [ConnectX-5] (ConnectX®-5 EN network interface card, 10/25GbE dual-port SFP28, PCIe3.0 x8, tall bracket ; MCX512A-ACAT)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:53 UTC; 21h ago
   Device: /sys/devices/pci0000:80/0000:80:03.1/0000:86:00.1/net/ens1f1np1

● sys-devices-pci0000:c0-0000:c0:03.1-0000:c8:00.0-net-ens21f0.device - Ethernet Controller X710 for 10GbE SFP+ (Ethernet Network Adapter X710-2 for OCP NIC 3.0)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:52 UTC; 21h ago
   Device: /sys/devices/pci0000:c0/0000:c0:03.1/0000:c8:00.0/net/ens21f0

● sys-devices-pci0000:c0-0000:c0:03.1-0000:c8:00.1-net-ens21f1.device - Ethernet Controller X710 for 10GbE SFP+ (Ethernet Converged Network Adapter X710)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:c0/0000:c0:03.1/0000:c8:00.1/net/ens21f1

● sys-devices-platform-serial8250-tty-ttyS2.device - /sys/devices/platform/serial8250/tty/ttyS2
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:48 UTC; 21h ago
   Device: /sys/devices/platform/serial8250/tty/ttyS2

● sys-devices-platform-serial8250-tty-ttyS3.device - /sys/devices/platform/serial8250/tty/ttyS3
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:48 UTC; 21h ago
   Device: /sys/devices/platform/serial8250/tty/ttyS3

● sys-devices-pnp0-00:02-tty-ttyS1.device - /sys/devices/pnp0/00:02/tty/ttyS1
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:48 UTC; 21h ago
   Device: /sys/devices/pnp0/00:02/tty/ttyS1

● sys-devices-pnp0-00:03-tty-ttyS0.device - /sys/devices/pnp0/00:03/tty/ttyS0
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:48 UTC; 21h ago
   Device: /sys/devices/pnp0/00:03/tty/ttyS0

● sys-devices-virtual-block-dm\x2d0.device - /sys/devices/virtual/block/dm-0
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:08 UTC; 21h ago
   Device: /sys/devices/virtual/block/dm-0

● sys-devices-virtual-block-loop0.device - /sys/devices/virtual/block/loop0
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:08 UTC; 21h ago
   Device: /sys/devices/virtual/block/loop0

● sys-devices-virtual-block-loop1.device - /sys/devices/virtual/block/loop1
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:08 UTC; 21h ago
   Device: /sys/devices/virtual/block/loop1

● sys-devices-virtual-misc-rfkill.device - /sys/devices/virtual/misc/rfkill
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:09 UTC; 21h ago
   Device: /sys/devices/virtual/misc/rfkill

● sys-devices-virtual-net-cni0.device - /sys/devices/virtual/net/cni0
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/cni0

● sys-devices-virtual-net-kub1.device - /sys/devices/virtual/net/kub1
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:08 UTC; 21h ago
   Device: /sys/devices/virtual/net/kub1

● sys-devices-virtual-net-net1.device - /sys/devices/virtual/net/net1
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net1

● sys-devices-virtual-net-net10.device - /sys/devices/virtual/net/net10
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net10

● sys-devices-virtual-net-net2.device - /sys/devices/virtual/net/net2
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net2

● sys-devices-virtual-net-net3.device - /sys/devices/virtual/net/net3
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net3

● sys-devices-virtual-net-net4.device - /sys/devices/virtual/net/net4
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net4

● sys-devices-virtual-net-net5.device - /sys/devices/virtual/net/net5
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net5

● sys-devices-virtual-net-net6.device - /sys/devices/virtual/net/net6
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net6

● sys-devices-virtual-net-net9.device - /sys/devices/virtual/net/net9
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net9

● sys-devices-virtual-net-veth0e128358.device - /sys/devices/virtual/net/veth0e128358
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:48 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth0e128358

● sys-devices-virtual-net-veth13a8b6f2.device - /sys/devices/virtual/net/veth13a8b6f2
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:06:00 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth13a8b6f2

● sys-devices-virtual-net-veth18440dce.device - /sys/devices/virtual/net/veth18440dce
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:08:52 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth18440dce

● sys-devices-virtual-net-veth2361008a.device - /sys/devices/virtual/net/veth2361008a
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:06:31 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth2361008a

● sys-devices-virtual-net-veth25f0a974.device - /sys/devices/virtual/net/veth25f0a974
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:16 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth25f0a974

● sys-devices-virtual-net-veth4623844f.device - /sys/devices/virtual/net/veth4623844f
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:16 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth4623844f

● sys-devices-virtual-net-veth4c902a9b.device - /sys/devices/virtual/net/veth4c902a9b
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth4c902a9b

● sys-devices-virtual-net-veth50df2d02.device - /sys/devices/virtual/net/veth50df2d02
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:06:31 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth50df2d02

● sys-devices-virtual-net-veth59671d56.device - /sys/devices/virtual/net/veth59671d56
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:09:10 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth59671d56

● sys-devices-virtual-net-veth6772188c.device - /sys/devices/virtual/net/veth6772188c
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth6772188c

● sys-devices-virtual-net-veth6eaaa5d2.device - /sys/devices/virtual/net/veth6eaaa5d2
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:08:39 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth6eaaa5d2

● sys-devices-virtual-net-veth7a42c95b.device - /sys/devices/virtual/net/veth7a42c95b
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:08:52 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth7a42c95b

● sys-devices-virtual-net-veth7b9ff6c6.device - /sys/devices/virtual/net/veth7b9ff6c6
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth7b9ff6c6

● sys-devices-virtual-net-vetha3a8c7f4.device - /sys/devices/virtual/net/vetha3a8c7f4
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:08:39 UTC; 21h ago
   Device: /sys/devices/virtual/net/vetha3a8c7f4

● sys-devices-virtual-net-vetha71aa938.device - /sys/devices/virtual/net/vetha71aa938
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:08:41 UTC; 21h ago
   Device: /sys/devices/virtual/net/vetha71aa938

● sys-devices-virtual-net-vethb0808040.device - /sys/devices/virtual/net/vethb0808040
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethb0808040

● sys-devices-virtual-net-vethbc21e52f.device - /sys/devices/virtual/net/vethbc21e52f
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:55 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethbc21e52f

● sys-devices-virtual-net-vethbe8f5d8a.device - /sys/devices/virtual/net/vethbe8f5d8a
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethbe8f5d8a

● sys-devices-virtual-net-vethbf0f32e1.device - /sys/devices/virtual/net/vethbf0f32e1
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:55 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethbf0f32e1

● sys-devices-virtual-net-vethc232249a.device - /sys/devices/virtual/net/vethc232249a
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethc232249a

● sys-devices-virtual-net-vethc2996d18.device - /sys/devices/virtual/net/vethc2996d18
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:06:31 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethc2996d18

● sys-devices-virtual-net-vethd1459ad1.device - /sys/devices/virtual/net/vethd1459ad1
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:08:52 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethd1459ad1

● sys-devices-virtual-net-vethd40dc250.device - /sys/devices/virtual/net/vethd40dc250
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:16 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethd40dc250

● sys-devices-virtual-net-vethd766a42c.device - /sys/devices/virtual/net/vethd766a42c
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethd766a42c

● sys-devices-virtual-net-vethd9c82a5d.device - /sys/devices/virtual/net/vethd9c82a5d
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethd9c82a5d

● sys-devices-virtual-net-vethe9f4720d.device - /sys/devices/virtual/net/vethe9f4720d
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:08:53 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethe9f4720d

● sys-devices-virtual-net-vethea94ad01.device - /sys/devices/virtual/net/vethea94ad01
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethea94ad01

● sys-module-configfs.device - /sys/module/configfs
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:48 UTC; 21h ago
   Device: /sys/module/configfs

● sys-module-fuse.device - /sys/module/fuse
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:06:32 UTC; 21h ago
   Device: /sys/module/fuse

● sys-subsystem-net-devices-cni0.device - /sys/subsystem/net/devices/cni0
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/cni0

● sys-subsystem-net-devices-ens1f0np0.device - MT27800 Family [ConnectX-5] (ConnectX®-5 EN network interface card, 10/25GbE dual-port SFP28, PCIe3.0 x8, tall bracket ; MCX512A-ACAT)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:53 UTC; 21h ago
   Device: /sys/devices/pci0000:80/0000:80:03.1/0000:86:00.0/net/ens1f0np0

● sys-subsystem-net-devices-ens1f1np1.device - MT27800 Family [ConnectX-5] (ConnectX®-5 EN network interface card, 10/25GbE dual-port SFP28, PCIe3.0 x8, tall bracket ; MCX512A-ACAT)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:53 UTC; 21h ago
   Device: /sys/devices/pci0000:80/0000:80:03.1/0000:86:00.1/net/ens1f1np1

● sys-subsystem-net-devices-ens21f0.device - Ethernet Controller X710 for 10GbE SFP+ (Ethernet Network Adapter X710-2 for OCP NIC 3.0)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:52 UTC; 21h ago
   Device: /sys/devices/pci0000:c0/0000:c0:03.1/0000:c8:00.0/net/ens21f0

● sys-subsystem-net-devices-ens21f1.device - Ethernet Controller X710 for 10GbE SFP+ (Ethernet Converged Network Adapter X710)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:51 UTC; 21h ago
   Device: /sys/devices/pci0000:c0/0000:c0:03.1/0000:c8:00.1/net/ens21f1

● sys-subsystem-net-devices-ens22f0.device - I350 Gigabit Network Connection (Ethernet Network Adapter I350-T4 for OCP NIC 3.0)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:52 UTC; 21h ago
   Device: /sys/devices/pci0000:40/0000:40:03.1/0000:46:00.0/net/ens22f0

● sys-subsystem-net-devices-ens22f1.device - I350 Gigabit Network Connection (Ethernet Network Adapter I350-T4 for OCP NIC 3.0)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:52 UTC; 21h ago
   Device: /sys/devices/pci0000:40/0000:40:03.1/0000:46:00.1/net/ens22f1

● sys-subsystem-net-devices-ens22f2.device - I350 Gigabit Network Connection (Ethernet Network Adapter I350-T4 for OCP NIC 3.0)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:52 UTC; 21h ago
   Device: /sys/devices/pci0000:40/0000:40:03.1/0000:46:00.2/net/ens22f2

● sys-subsystem-net-devices-ens22f3.device - I350 Gigabit Network Connection (Ethernet Network Adapter I350-T4 for OCP NIC 3.0)
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:03:52 UTC; 21h ago
   Device: /sys/devices/pci0000:40/0000:40:03.1/0000:46:00.3/net/ens22f3

● sys-subsystem-net-devices-kub1.device - /sys/subsystem/net/devices/kub1
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:08 UTC; 21h ago
   Device: /sys/devices/virtual/net/kub1

● sys-subsystem-net-devices-net1.device - /sys/subsystem/net/devices/net1
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net1

● sys-subsystem-net-devices-net10.device - /sys/subsystem/net/devices/net10
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net10

● sys-subsystem-net-devices-net2.device - /sys/subsystem/net/devices/net2
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net2

● sys-subsystem-net-devices-net3.device - /sys/subsystem/net/devices/net3
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net3

● sys-subsystem-net-devices-net4.device - /sys/subsystem/net/devices/net4
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net4

● sys-subsystem-net-devices-net5.device - /sys/subsystem/net/devices/net5
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net5

● sys-subsystem-net-devices-net6.device - /sys/subsystem/net/devices/net6
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net6

● sys-subsystem-net-devices-net9.device - /sys/subsystem/net/devices/net9
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:04:17 UTC; 21h ago
   Device: /sys/devices/virtual/net/net9

● sys-subsystem-net-devices-veth0e128358.device - /sys/subsystem/net/devices/veth0e128358
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:48 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth0e128358

● sys-subsystem-net-devices-veth13a8b6f2.device - /sys/subsystem/net/devices/veth13a8b6f2
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:06:00 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth13a8b6f2

● sys-subsystem-net-devices-veth18440dce.device - /sys/subsystem/net/devices/veth18440dce
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:08:52 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth18440dce

● sys-subsystem-net-devices-veth2361008a.device - /sys/subsystem/net/devices/veth2361008a
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:06:31 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth2361008a

● sys-subsystem-net-devices-veth25f0a974.device - /sys/subsystem/net/devices/veth25f0a974
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:16 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth25f0a974

● sys-subsystem-net-devices-veth4623844f.device - /sys/subsystem/net/devices/veth4623844f
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:16 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth4623844f

● sys-subsystem-net-devices-veth4c902a9b.device - /sys/subsystem/net/devices/veth4c902a9b
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth4c902a9b

● sys-subsystem-net-devices-veth50df2d02.device - /sys/subsystem/net/devices/veth50df2d02
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:06:31 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth50df2d02

● sys-subsystem-net-devices-veth59671d56.device - /sys/subsystem/net/devices/veth59671d56
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:09:10 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth59671d56

● sys-subsystem-net-devices-veth6772188c.device - /sys/subsystem/net/devices/veth6772188c
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth6772188c

● sys-subsystem-net-devices-veth6eaaa5d2.device - /sys/subsystem/net/devices/veth6eaaa5d2
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:08:39 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth6eaaa5d2

● sys-subsystem-net-devices-veth7a42c95b.device - /sys/subsystem/net/devices/veth7a42c95b
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:08:52 UTC; 21h ago
Unit boot.mount could not be found.
   Device: /sys/devices/virtual/net/veth7a42c95b

● sys-subsystem-net-devices-veth7b9ff6c6.device - /sys/subsystem/net/devices/veth7b9ff6c6
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/veth7b9ff6c6

● sys-subsystem-net-devices-vetha3a8c7f4.device - /sys/subsystem/net/devices/vetha3a8c7f4
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:08:39 UTC; 21h ago
   Device: /sys/devices/virtual/net/vetha3a8c7f4

● sys-subsystem-net-devices-vetha71aa938.device - /sys/subsystem/net/devices/vetha71aa938
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:08:41 UTC; 21h ago
   Device: /sys/devices/virtual/net/vetha71aa938

● sys-subsystem-net-devices-vethb0808040.device - /sys/subsystem/net/devices/vethb0808040
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethb0808040

● sys-subsystem-net-devices-vethbc21e52f.device - /sys/subsystem/net/devices/vethbc21e52f
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:55 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethbc21e52f

● sys-subsystem-net-devices-vethbe8f5d8a.device - /sys/subsystem/net/devices/vethbe8f5d8a
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethbe8f5d8a

● sys-subsystem-net-devices-vethbf0f32e1.device - /sys/subsystem/net/devices/vethbf0f32e1
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:55 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethbf0f32e1

● sys-subsystem-net-devices-vethc232249a.device - /sys/subsystem/net/devices/vethc232249a
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethc232249a

● sys-subsystem-net-devices-vethc2996d18.device - /sys/subsystem/net/devices/vethc2996d18
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:06:31 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethc2996d18

● sys-subsystem-net-devices-vethd1459ad1.device - /sys/subsystem/net/devices/vethd1459ad1
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:08:52 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethd1459ad1

● sys-subsystem-net-devices-vethd40dc250.device - /sys/subsystem/net/devices/vethd40dc250
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:16 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethd40dc250

● sys-subsystem-net-devices-vethd766a42c.device - /sys/subsystem/net/devices/vethd766a42c
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethd766a42c

● sys-subsystem-net-devices-vethd9c82a5d.device - /sys/subsystem/net/devices/vethd9c82a5d
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethd9c82a5d

● sys-subsystem-net-devices-vethe9f4720d.device - /sys/subsystem/net/devices/vethe9f4720d
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:08:53 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethe9f4720d

● sys-subsystem-net-devices-vethea94ad01.device - /sys/subsystem/net/devices/vethea94ad01
   Loaded: loaded
   Active: active (plugged) since Mon 2025-11-17 16:05:15 UTC; 21h ago
   Device: /sys/devices/virtual/net/vethea94ad01

● -.mount - Root Mount
   Loaded: loaded (/etc/fstab; generated)
   Active: active (mounted) since Mon 2025-11-17 16:03:48 UTC; 21h ago
    Where: /
     What: /dev/sdb6
     Docs: man:fstab(5)
           man:systemd-fstab-generator(8)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/-.mount

Warning: Journal has been rotated since unit was started. Log output is incomplete or unavailable.

● corefiles.mount - Bind mount to move corefiles to sda7
   Loaded: loaded (/etc/systemd/system/corefiles.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:51 UTC; 21h ago
    Where: /corefiles
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/corefiles.mount

● dev-hugepages.mount - Huge Pages File System
   Loaded: loaded (/usr/lib/systemd/system/dev-hugepages.mount; static; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:50 UTC; 21h ago
    Where: /dev/hugepages
     What: hugetlbfs
     Docs: https://www.kernel.org/doc/Documentation/vm/hugetlbpage.txt
           https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
    Tasks: 0 (limit: 399998)
   Memory: 60.0K
   CGroup: /system.slice/dev-hugepages.mount

● dev-mqueue.mount - POSIX Message Queue File System
   Loaded: loaded (/usr/lib/systemd/system/dev-mqueue.mount; static; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:50 UTC; 21h ago
    Where: /dev/mqueue
     What: mqueue
     Docs: man:mq_overview(7)
           https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
    Tasks: 0 (limit: 399998)
   Memory: 4.0K
   CGroup: /system.slice/dev-mqueue.mount

● mnt-data_drive.mount - /mnt/data_drive
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:04:08 UTC; 21h ago
    Where: /mnt/data_drive
     What: /dev/sda1
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/mnt-data_drive.mount

● mnt-sda7.mount - /mnt/sda7
   Loaded: loaded (/etc/fstab; generated)
   Active: active (mounted) since Mon 2025-11-17 16:03:50 UTC; 21h ago
    Where: /mnt/sda7
     What: /dev/sdb7
     Docs: man:fstab(5)
           man:systemd-fstab-generator(8)
    Tasks: 0 (limit: 399998)
   Memory: 196.0K
   CGroup: /system.slice/mnt-sda7.mount

● proc-fs-nfsd.mount - NFSD configuration filesystem
   Loaded: loaded (/usr/lib/systemd/system/proc-fs-nfsd.mount; static; vendor preset: disabled)
   Active: inactive (dead)
    Where: /proc/fs/nfsd
     What: nfsd

● proc-sys-fs-binfmt_misc.mount - Arbitrary Executable File Formats File System
   Loaded: loaded (/usr/lib/systemd/system/proc-sys-fs-binfmt_misc.mount; static; vendor preset: disabled)
   Active: active (mounted) since Tue 2025-11-18 13:55:35 UTC; 31s ago
    Where: /proc/sys/fs/binfmt_misc
     What: binfmt_misc
     Docs: https://www.kernel.org/doc/html/latest/admin-guide/binfmt-misc.html
           https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/proc-sys-fs-binfmt_misc.mount

Nov 18 13:55:35 XOSEncoder-01 systemd[1]: Mounting Arbitrary Executable File Formats File System...
Nov 18 13:55:35 XOSEncoder-01 systemd[1]: Mounted Arbitrary Executable File Formats File System.

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:00 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:39 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:49 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:52 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:31 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:12 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:05 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:52 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:41 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:11 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:39 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:56 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:55 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:31 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:53 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:08 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:10 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:07 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:30 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:52 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea-shm.mount

● run-containerd-io.containerd.grpc.v1.cri-sandboxes-fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf-shm.mount - /run/containerd/io.containerd.grpc.v1.cri/sandboxes/fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf/shm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /run/containerd/io.containerd.grpc.v1.cri/sandboxes/fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf/shm
     What: shm
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.grpc.v1.cri-sandboxes-fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf-shm.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-03b57a55d5b60b06581e669f2d1bb915d5866652d1fe45dababd7b973fa4a2d4-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/03b57a55d5b60b06581e669f2d1bb915d5866652d1fe45dababd7b973fa4a2d4/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:09 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/03b57a55d5b60b06581e669f2d1bb915d5866652d1fe45dababd7b973fa4a2d4/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-03b57a55d5b60b06581e669f2d1bb915d5866652d1fe45dababd7b973fa4a2d4-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:00 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-0549d98179a918e17cf119b8b8e147bc0ed93b3a2fe6800534943bc5cade26ec-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/0549d98179a918e17cf119b8b8e147bc0ed93b3a2fe6800534943bc5cade26ec/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/0549d98179a918e17cf119b8b8e147bc0ed93b3a2fe6800534943bc5cade26ec/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-0549d98179a918e17cf119b8b8e147bc0ed93b3a2fe6800534943bc5cade26ec-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:39 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-0cc8fb35afbc569f879b5340cf38cbc9ddc87773fce6b67a9c7fc07172660322-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/0cc8fb35afbc569f879b5340cf38cbc9ddc87773fce6b67a9c7fc07172660322/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/0cc8fb35afbc569f879b5340cf38cbc9ddc87773fce6b67a9c7fc07172660322/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-0cc8fb35afbc569f879b5340cf38cbc9ddc87773fce6b67a9c7fc07172660322-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:49 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:52 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-1af4c9c62ebee28328f5112ba9d76d6b74a48a6627e620d82f2d49d3c5a29efa-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/1af4c9c62ebee28328f5112ba9d76d6b74a48a6627e620d82f2d49d3c5a29efa/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/1af4c9c62ebee28328f5112ba9d76d6b74a48a6627e620d82f2d49d3c5a29efa/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-1af4c9c62ebee28328f5112ba9d76d6b74a48a6627e620d82f2d49d3c5a29efa-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:31 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-204e2017be70a182f8109ac13fcc46cc8373a95064050894e81e2a102817d84b-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/204e2017be70a182f8109ac13fcc46cc8373a95064050894e81e2a102817d84b/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/204e2017be70a182f8109ac13fcc46cc8373a95064050894e81e2a102817d84b/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-204e2017be70a182f8109ac13fcc46cc8373a95064050894e81e2a102817d84b-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-213f9d22cbd9897c1da8e5967af955d56e30f79c43d2fd15acb7e487c9d3c537-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/213f9d22cbd9897c1da8e5967af955d56e30f79c43d2fd15acb7e487c9d3c537/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:08 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/213f9d22cbd9897c1da8e5967af955d56e30f79c43d2fd15acb7e487c9d3c537/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-213f9d22cbd9897c1da8e5967af955d56e30f79c43d2fd15acb7e487c9d3c537-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-21d714746405044fd80d02b52d0146c4a8e8ea066da93341094adb56168511e9-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/21d714746405044fd80d02b52d0146c4a8e8ea066da93341094adb56168511e9/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/21d714746405044fd80d02b52d0146c4a8e8ea066da93341094adb56168511e9/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-21d714746405044fd80d02b52d0146c4a8e8ea066da93341094adb56168511e9-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-281948bceabba08c70eb15e4c693c86d3a1d4f4e96e38b8cf2590c39f52e9bb6-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/281948bceabba08c70eb15e4c693c86d3a1d4f4e96e38b8cf2590c39f52e9bb6/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:56 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/281948bceabba08c70eb15e4c693c86d3a1d4f4e96e38b8cf2590c39f52e9bb6/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-281948bceabba08c70eb15e4c693c86d3a1d4f4e96e38b8cf2590c39f52e9bb6-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-295e2808e5e27f8c47f04bab1615d4a6cc6d426d0019d6298316c10262abd4a5-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/295e2808e5e27f8c47f04bab1615d4a6cc6d426d0019d6298316c10262abd4a5/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:39 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/295e2808e5e27f8c47f04bab1615d4a6cc6d426d0019d6298316c10262abd4a5/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-295e2808e5e27f8c47f04bab1615d4a6cc6d426d0019d6298316c10262abd4a5-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-2ce57bc5a2491a58dd6362918c37ac6e80a3bd093e610da0db3cc4d9a6bc9fda-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/2ce57bc5a2491a58dd6362918c37ac6e80a3bd093e610da0db3cc4d9a6bc9fda/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:46 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/2ce57bc5a2491a58dd6362918c37ac6e80a3bd093e610da0db3cc4d9a6bc9fda/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-2ce57bc5a2491a58dd6362918c37ac6e80a3bd093e610da0db3cc4d9a6bc9fda-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-386b74e96bce466252755a2db8492a0aa80fb81245a7e11ab02cadcb615d4e35-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/386b74e96bce466252755a2db8492a0aa80fb81245a7e11ab02cadcb615d4e35/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/386b74e96bce466252755a2db8492a0aa80fb81245a7e11ab02cadcb615d4e35/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-386b74e96bce466252755a2db8492a0aa80fb81245a7e11ab02cadcb615d4e35-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:12 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-3ddcf70649aeed995d1fdce890e993ead6f5b073b7de2211d2f93de524d2a7b5-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/3ddcf70649aeed995d1fdce890e993ead6f5b073b7de2211d2f93de524d2a7b5/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:53 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/3ddcf70649aeed995d1fdce890e993ead6f5b073b7de2211d2f93de524d2a7b5/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-3ddcf70649aeed995d1fdce890e993ead6f5b073b7de2211d2f93de524d2a7b5-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:05 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:52 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-61c556810ef8dbb1aed7ea76255af0833233248a82e667d07e0b879d365b778f-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/61c556810ef8dbb1aed7ea76255af0833233248a82e667d07e0b879d365b778f/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:31 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/61c556810ef8dbb1aed7ea76255af0833233248a82e667d07e0b879d365b778f/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-61c556810ef8dbb1aed7ea76255af0833233248a82e667d07e0b879d365b778f-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-6633e7d49882ec93c836eb9e2e2b57e3ede0d2cbfa2be9b417cc1f339e7241f5-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/6633e7d49882ec93c836eb9e2e2b57e3ede0d2cbfa2be9b417cc1f339e7241f5/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/6633e7d49882ec93c836eb9e2e2b57e3ede0d2cbfa2be9b417cc1f339e7241f5/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-6633e7d49882ec93c836eb9e2e2b57e3ede0d2cbfa2be9b417cc1f339e7241f5-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:41 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:11 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-7149e251631bd2084f6135e3cb5b75287790d2f14422004e79a3c6f962e189b6-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/7149e251631bd2084f6135e3cb5b75287790d2f14422004e79a3c6f962e189b6/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:31 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/7149e251631bd2084f6135e3cb5b75287790d2f14422004e79a3c6f962e189b6/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-7149e251631bd2084f6135e3cb5b75287790d2f14422004e79a3c6f962e189b6-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:39 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:56 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-794245c75ab20b17bcd8b43a373fad017d3bd653db684caf3add11d14754276c-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/794245c75ab20b17bcd8b43a373fad017d3bd653db684caf3add11d14754276c/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:10 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/794245c75ab20b17bcd8b43a373fad017d3bd653db684caf3add11d14754276c/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-794245c75ab20b17bcd8b43a373fad017d3bd653db684caf3add11d14754276c-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-7c297996517c2ebdd207bc733957e7ea43d63c8630dab617c28d6518db1f0282-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/7c297996517c2ebdd207bc733957e7ea43d63c8630dab617c28d6518db1f0282/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:13 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/7c297996517c2ebdd207bc733957e7ea43d63c8630dab617c28d6518db1f0282/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-7c297996517c2ebdd207bc733957e7ea43d63c8630dab617c28d6518db1f0282-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:55 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-7e4e0613f3bdaa4e0315cfb163224a53ba0daec763fd26f1803fb7c0fe0b0c9a-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/7e4e0613f3bdaa4e0315cfb163224a53ba0daec763fd26f1803fb7c0fe0b0c9a/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/7e4e0613f3bdaa4e0315cfb163224a53ba0daec763fd26f1803fb7c0fe0b0c9a/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-7e4e0613f3bdaa4e0315cfb163224a53ba0daec763fd26f1803fb7c0fe0b0c9a-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-80162971b81ee8611d86c3f717d3087bd3b3f9ccb2ceb918d89e0bbc087e8a45-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/80162971b81ee8611d86c3f717d3087bd3b3f9ccb2ceb918d89e0bbc087e8a45/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/80162971b81ee8611d86c3f717d3087bd3b3f9ccb2ceb918d89e0bbc087e8a45/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-80162971b81ee8611d86c3f717d3087bd3b3f9ccb2ceb918d89e0bbc087e8a45-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-83cbb97e4fb7c9801bb5d1334c503eb1231e1f7903d8ed8f73c60a62819d0bba-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/83cbb97e4fb7c9801bb5d1334c503eb1231e1f7903d8ed8f73c60a62819d0bba/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/83cbb97e4fb7c9801bb5d1334c503eb1231e1f7903d8ed8f73c60a62819d0bba/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-83cbb97e4fb7c9801bb5d1334c503eb1231e1f7903d8ed8f73c60a62819d0bba-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:31 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:53 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:08 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-8fbeae4f599c70001a79ff8190e4d36b6208c699e369b374812b39107c435fab-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/8fbeae4f599c70001a79ff8190e4d36b6208c699e369b374812b39107c435fab/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/8fbeae4f599c70001a79ff8190e4d36b6208c699e369b374812b39107c435fab/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-8fbeae4f599c70001a79ff8190e4d36b6208c699e369b374812b39107c435fab-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-900eab49b8c2ba07c64b2d9d3e37821d1dcf1ef16dcd585fb3feae19db6519a2-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/900eab49b8c2ba07c64b2d9d3e37821d1dcf1ef16dcd585fb3feae19db6519a2/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:53 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/900eab49b8c2ba07c64b2d9d3e37821d1dcf1ef16dcd585fb3feae19db6519a2/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-900eab49b8c2ba07c64b2d9d3e37821d1dcf1ef16dcd585fb3feae19db6519a2-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-92e14fd007887603c04b8210955ad6a1815b1fc9e0ee10ec0d6d862974e6748f-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/92e14fd007887603c04b8210955ad6a1815b1fc9e0ee10ec0d6d862974e6748f/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/92e14fd007887603c04b8210955ad6a1815b1fc9e0ee10ec0d6d862974e6748f/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-92e14fd007887603c04b8210955ad6a1815b1fc9e0ee10ec0d6d862974e6748f-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:10 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-9f25cd91eb885e3f94e2d800003dd991e969dd57e3c8fde23045761f19fffe62-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/9f25cd91eb885e3f94e2d800003dd991e969dd57e3c8fde23045761f19fffe62/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/9f25cd91eb885e3f94e2d800003dd991e969dd57e3c8fde23045761f19fffe62/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-9f25cd91eb885e3f94e2d800003dd991e969dd57e3c8fde23045761f19fffe62-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-aa5225d693f714f7e75445136b3b211b51dddf9ae10e9b6533a4b762901249dd-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/aa5225d693f714f7e75445136b3b211b51dddf9ae10e9b6533a4b762901249dd/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:31 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/aa5225d693f714f7e75445136b3b211b51dddf9ae10e9b6533a4b762901249dd/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-aa5225d693f714f7e75445136b3b211b51dddf9ae10e9b6533a4b762901249dd-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-aad3f498ca0001b47bbc7d89f01988c218e9b80d529077e7152d13b61403f0bb-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/aad3f498ca0001b47bbc7d89f01988c218e9b80d529077e7152d13b61403f0bb/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:01 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/aad3f498ca0001b47bbc7d89f01988c218e9b80d529077e7152d13b61403f0bb/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-aad3f498ca0001b47bbc7d89f01988c218e9b80d529077e7152d13b61403f0bb-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-acd873fc559f2941cb1dc8cf4355ff5bfd67df6a31d67ed10073845c4e40642b-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/acd873fc559f2941cb1dc8cf4355ff5bfd67df6a31d67ed10073845c4e40642b/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:07 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/acd873fc559f2941cb1dc8cf4355ff5bfd67df6a31d67ed10073845c4e40642b/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-acd873fc559f2941cb1dc8cf4355ff5bfd67df6a31d67ed10073845c4e40642b-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-ade64f14dfe395e14760ca207d41149a049d31cb2ec606827ff5a63c2670a816-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/ade64f14dfe395e14760ca207d41149a049d31cb2ec606827ff5a63c2670a816/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:56 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/ade64f14dfe395e14760ca207d41149a049d31cb2ec606827ff5a63c2670a816/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-ade64f14dfe395e14760ca207d41149a049d31cb2ec606827ff5a63c2670a816-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:07 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-b746563060551612d49e0b3e5ebfc32a5e99f5518389847c64602c6be63c8a9d-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/b746563060551612d49e0b3e5ebfc32a5e99f5518389847c64602c6be63c8a9d/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:49 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/b746563060551612d49e0b3e5ebfc32a5e99f5518389847c64602c6be63c8a9d/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-b746563060551612d49e0b3e5ebfc32a5e99f5518389847c64602c6be63c8a9d-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-ba239e148103274ece222541571cef554d8f50d8e19cc55f13679a6a1e2d2076-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/ba239e148103274ece222541571cef554d8f50d8e19cc55f13679a6a1e2d2076/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/ba239e148103274ece222541571cef554d8f50d8e19cc55f13679a6a1e2d2076/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-ba239e148103274ece222541571cef554d8f50d8e19cc55f13679a6a1e2d2076-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-bd2f5bcbccb5108ea75dd8fe3c107454e9443a9bf04d0d76ea762d52abf0de4d-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/bd2f5bcbccb5108ea75dd8fe3c107454e9443a9bf04d0d76ea762d52abf0de4d/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:49 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/bd2f5bcbccb5108ea75dd8fe3c107454e9443a9bf04d0d76ea762d52abf0de4d/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-bd2f5bcbccb5108ea75dd8fe3c107454e9443a9bf04d0d76ea762d52abf0de4d-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-bd7b68359e81ea9f2992b99fa251dc23bca03911465b8cfcea15dc22ce0a6612-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/bd7b68359e81ea9f2992b99fa251dc23bca03911465b8cfcea15dc22ce0a6612/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:42 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/bd7b68359e81ea9f2992b99fa251dc23bca03911465b8cfcea15dc22ce0a6612/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-bd7b68359e81ea9f2992b99fa251dc23bca03911465b8cfcea15dc22ce0a6612-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:31 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-bf7cf933a8910b9fb80cce11f576b2ce0282a4a972e5b0001806935cd7e4a995-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/bf7cf933a8910b9fb80cce11f576b2ce0282a4a972e5b0001806935cd7e4a995/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:39 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/bf7cf933a8910b9fb80cce11f576b2ce0282a4a972e5b0001806935cd7e4a995/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-bf7cf933a8910b9fb80cce11f576b2ce0282a4a972e5b0001806935cd7e4a995-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-c8bba1ece9eddd59495885e39f255ad4f9769ae89b1b7fc3903cce8d3c7f7916-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/c8bba1ece9eddd59495885e39f255ad4f9769ae89b1b7fc3903cce8d3c7f7916/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:53 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/c8bba1ece9eddd59495885e39f255ad4f9769ae89b1b7fc3903cce8d3c7f7916/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-c8bba1ece9eddd59495885e39f255ad4f9769ae89b1b7fc3903cce8d3c7f7916-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-d8b576a7151077a817ed7ccda23150d12c06cef14dff0d46cf1ddb8384a2c8b2-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/d8b576a7151077a817ed7ccda23150d12c06cef14dff0d46cf1ddb8384a2c8b2/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/d8b576a7151077a817ed7ccda23150d12c06cef14dff0d46cf1ddb8384a2c8b2/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-d8b576a7151077a817ed7ccda23150d12c06cef14dff0d46cf1ddb8384a2c8b2-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-e6c48aed1f74ce74c92c7fa0d44b00eb2cec24c30cb75bd0e44cc002979144d2-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/e6c48aed1f74ce74c92c7fa0d44b00eb2cec24c30cb75bd0e44cc002979144d2/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/e6c48aed1f74ce74c92c7fa0d44b00eb2cec24c30cb75bd0e44cc002979144d2/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-e6c48aed1f74ce74c92c7fa0d44b00eb2cec24c30cb75bd0e44cc002979144d2-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-e77396127a5205ad7263b1715ab488deed253092cf27f541d1d3a85180edc5a9-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/e77396127a5205ad7263b1715ab488deed253092cf27f541d1d3a85180edc5a9/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/e77396127a5205ad7263b1715ab488deed253092cf27f541d1d3a85180edc5a9/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-e77396127a5205ad7263b1715ab488deed253092cf27f541d1d3a85180edc5a9-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-eb0550778838139057fd93a2269c6ce7f78e4684614f299360ca096e1c538630-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/eb0550778838139057fd93a2269c6ce7f78e4684614f299360ca096e1c538630/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/eb0550778838139057fd93a2269c6ce7f78e4684614f299360ca096e1c538630/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-eb0550778838139057fd93a2269c6ce7f78e4684614f299360ca096e1c538630-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:52 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-f15a73eb2d9863156cc92fde623fb7c9f291dbcc6f4fd68c05f1d72e948c907e-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/f15a73eb2d9863156cc92fde623fb7c9f291dbcc6f4fd68c05f1d72e948c907e/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:53 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/f15a73eb2d9863156cc92fde623fb7c9f291dbcc6f4fd68c05f1d72e948c907e/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-f15a73eb2d9863156cc92fde623fb7c9f291dbcc6f4fd68c05f1d72e948c907e-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-f1e21bbbc7cb957da13db6b46c64353a04a7c1d1fd670772e7adc9faf6e78fa3-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/f1e21bbbc7cb957da13db6b46c64353a04a7c1d1fd670772e7adc9faf6e78fa3/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/f1e21bbbc7cb957da13db6b46c64353a04a7c1d1fd670772e7adc9faf6e78fa3/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-f1e21bbbc7cb957da13db6b46c64353a04a7c1d1fd670772e7adc9faf6e78fa3-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-fcac4959fbbd538807c041a643e8ae251eb6ca11af5c1415125b37871a5102e1-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/fcac4959fbbd538807c041a643e8ae251eb6ca11af5c1415125b37871a5102e1/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/fcac4959fbbd538807c041a643e8ae251eb6ca11af5c1415125b37871a5102e1/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-fcac4959fbbd538807c041a643e8ae251eb6ca11af5c1415125b37871a5102e1-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-fe01a89fedb057cc98e77dfcaadada33e8d2afe86478b11f263d5f2e0a9cdf70-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/fe01a89fedb057cc98e77dfcaadada33e8d2afe86478b11f263d5f2e0a9cdf70/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/fe01a89fedb057cc98e77dfcaadada33e8d2afe86478b11f263d5f2e0a9cdf70/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-fe01a89fedb057cc98e77dfcaadada33e8d2afe86478b11f263d5f2e0a9cdf70-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-fe3a782b6ddb59dc473fbf1bfad0404b1d5a2a9b06b8dc15cc7474ea935194c5-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/fe3a782b6ddb59dc473fbf1bfad0404b1d5a2a9b06b8dc15cc7474ea935194c5/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:11 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/fe3a782b6ddb59dc473fbf1bfad0404b1d5a2a9b06b8dc15cc7474ea935194c5/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-fe3a782b6ddb59dc473fbf1bfad0404b1d5a2a9b06b8dc15cc7474ea935194c5-rootfs.mount

● run-containerd-io.containerd.runtime.v2.task-k8s.io-fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf-rootfs.mount - /run/containerd/io.containerd.runtime.v2.task/k8s.io/fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf/rootfs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /run/containerd/io.containerd.runtime.v2.task/k8s.io/fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf/rootfs
     What: overlay
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-containerd-io.containerd.runtime.v2.task-k8s.io-fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf-rootfs.mount

● run-netns-cni\x2d03a85eca\x2d3519\x2dbd81\x2d54b1\x2dacb6905a7e24.mount - /run/netns/cni-03a85eca-3519-bd81-54b1-acb6905a7e24
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/netns/cni-03a85eca-3519-bd81-54b1-acb6905a7e24
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d03a85eca\x2d3519\x2dbd81\x2d54b1\x2dacb6905a7e24.mount

● run-netns-cni\x2d0acaf869\x2d40de\x2da254\x2d8d1d\x2d7be3d8a0ed2c.mount - /run/netns/cni-0acaf869-40de-a254-8d1d-7be3d8a0ed2c
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:41 UTC; 21h ago
    Where: /run/netns/cni-0acaf869-40de-a254-8d1d-7be3d8a0ed2c
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d0acaf869\x2d40de\x2da254\x2d8d1d\x2d7be3d8a0ed2c.mount

● run-netns-cni\x2d19ac231d\x2dc843\x2de559\x2dbe3f\x2db6295ccf8de2.mount - /run/netns/cni-19ac231d-c843-e559-be3f-b6295ccf8de2
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/netns/cni-19ac231d-c843-e559-be3f-b6295ccf8de2
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d19ac231d\x2dc843\x2de559\x2dbe3f\x2db6295ccf8de2.mount

● run-netns-cni\x2d29b4dec7\x2d9eb9\x2dd78d\x2d88fd\x2dc707faa3af4c.mount - /run/netns/cni-29b4dec7-9eb9-d78d-88fd-c707faa3af4c
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:39 UTC; 21h ago
    Where: /run/netns/cni-29b4dec7-9eb9-d78d-88fd-c707faa3af4c
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d29b4dec7\x2d9eb9\x2dd78d\x2d88fd\x2dc707faa3af4c.mount

● run-netns-cni\x2d33c9bd64\x2df94f\x2de7f3\x2dd4a0\x2d814248fc76e7.mount - /run/netns/cni-33c9bd64-f94f-e7f3-d4a0-814248fc76e7
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/netns/cni-33c9bd64-f94f-e7f3-d4a0-814248fc76e7
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d33c9bd64\x2df94f\x2de7f3\x2dd4a0\x2d814248fc76e7.mount

● run-netns-cni\x2d34583cba\x2d9292\x2de688\x2dc483\x2deb33b5500982.mount - /run/netns/cni-34583cba-9292-e688-c483-eb33b5500982
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:31 UTC; 21h ago
    Where: /run/netns/cni-34583cba-9292-e688-c483-eb33b5500982
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d34583cba\x2d9292\x2de688\x2dc483\x2deb33b5500982.mount

● run-netns-cni\x2d3bebe15e\x2d87e5\x2dea94\x2d13d8\x2da7a368220828.mount - /run/netns/cni-3bebe15e-87e5-ea94-13d8-a7a368220828
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:49 UTC; 21h ago
    Where: /run/netns/cni-3bebe15e-87e5-ea94-13d8-a7a368220828
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d3bebe15e\x2d87e5\x2dea94\x2d13d8\x2da7a368220828.mount

● run-netns-cni\x2d58ec732f\x2d6ad5\x2d15da\x2dcae7\x2db9933e40fa40.mount - /run/netns/cni-58ec732f-6ad5-15da-cae7-b9933e40fa40
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/netns/cni-58ec732f-6ad5-15da-cae7-b9933e40fa40
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d58ec732f\x2d6ad5\x2d15da\x2dcae7\x2db9933e40fa40.mount

● run-netns-cni\x2d5c620556\x2dc80e\x2d03e1\x2da05c\x2d3f60d58239c8.mount - /run/netns/cni-5c620556-c80e-03e1-a05c-3f60d58239c8
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/netns/cni-5c620556-c80e-03e1-a05c-3f60d58239c8
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d5c620556\x2dc80e\x2d03e1\x2da05c\x2d3f60d58239c8.mount

● run-netns-cni\x2d71b60ffe\x2d98a1\x2da12d\x2d66c0\x2d01a75d1c91d2.mount - /run/netns/cni-71b60ffe-98a1-a12d-66c0-01a75d1c91d2
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:31 UTC; 21h ago
    Where: /run/netns/cni-71b60ffe-98a1-a12d-66c0-01a75d1c91d2
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d71b60ffe\x2d98a1\x2da12d\x2d66c0\x2d01a75d1c91d2.mount

● run-netns-cni\x2d74f60784\x2dbf9c\x2de91e\x2db9e5\x2d3396f4072574.mount - /run/netns/cni-74f60784-bf9c-e91e-b9e5-3396f4072574
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:53 UTC; 21h ago
    Where: /run/netns/cni-74f60784-bf9c-e91e-b9e5-3396f4072574
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d74f60784\x2dbf9c\x2de91e\x2db9e5\x2d3396f4072574.mount

● run-netns-cni\x2d78dc31f7\x2d2a46\x2d126d\x2d4ffe\x2d0e38dcc5b417.mount - /run/netns/cni-78dc31f7-2a46-126d-4ffe-0e38dcc5b417
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/netns/cni-78dc31f7-2a46-126d-4ffe-0e38dcc5b417
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d78dc31f7\x2d2a46\x2d126d\x2d4ffe\x2d0e38dcc5b417.mount

● run-netns-cni\x2d79376bb4\x2d67bf\x2df77d\x2d4cde\x2d080ae93b24c6.mount - /run/netns/cni-79376bb4-67bf-f77d-4cde-080ae93b24c6
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/netns/cni-79376bb4-67bf-f77d-4cde-080ae93b24c6
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d79376bb4\x2d67bf\x2df77d\x2d4cde\x2d080ae93b24c6.mount

● run-netns-cni\x2d7cefe5ea\x2dd575\x2de3ef\x2d8c22\x2d39d543dd3ee6.mount - /run/netns/cni-7cefe5ea-d575-e3ef-8c22-39d543dd3ee6
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:55 UTC; 21h ago
    Where: /run/netns/cni-7cefe5ea-d575-e3ef-8c22-39d543dd3ee6
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d7cefe5ea\x2dd575\x2de3ef\x2d8c22\x2d39d543dd3ee6.mount

● run-netns-cni\x2d9059af8c\x2d2cb7\x2d6ca8\x2d17f0\x2da5a8a72df9a8.mount - /run/netns/cni-9059af8c-2cb7-6ca8-17f0-a5a8a72df9a8
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/netns/cni-9059af8c-2cb7-6ca8-17f0-a5a8a72df9a8
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d9059af8c\x2d2cb7\x2d6ca8\x2d17f0\x2da5a8a72df9a8.mount

● run-netns-cni\x2d9320a390\x2d37f5\x2dca9d\x2daa28\x2d75ca569250c5.mount - /run/netns/cni-9320a390-37f5-ca9d-aa28-75ca569250c5
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/netns/cni-9320a390-37f5-ca9d-aa28-75ca569250c5
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d9320a390\x2d37f5\x2dca9d\x2daa28\x2d75ca569250c5.mount

● run-netns-cni\x2d9600e88c\x2dccbc\x2dae1b\x2dfc9d\x2d62ec09c800af.mount - /run/netns/cni-9600e88c-ccbc-ae1b-fc9d-62ec09c800af
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/netns/cni-9600e88c-ccbc-ae1b-fc9d-62ec09c800af
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d9600e88c\x2dccbc\x2dae1b\x2dfc9d\x2d62ec09c800af.mount

● run-netns-cni\x2d965bdda6\x2de9a6\x2d2b1f\x2d1eea\x2d6e338a5b9271.mount - /run/netns/cni-965bdda6-e9a6-2b1f-1eea-6e338a5b9271
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:52 UTC; 21h ago
    Where: /run/netns/cni-965bdda6-e9a6-2b1f-1eea-6e338a5b9271
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d965bdda6\x2de9a6\x2d2b1f\x2d1eea\x2d6e338a5b9271.mount

● run-netns-cni\x2d99d93468\x2d14be\x2dddd3\x2d2dba\x2d40fe047165bb.mount - /run/netns/cni-99d93468-14be-ddd3-2dba-40fe047165bb
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:39 UTC; 21h ago
    Where: /run/netns/cni-99d93468-14be-ddd3-2dba-40fe047165bb
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2d99d93468\x2d14be\x2dddd3\x2d2dba\x2d40fe047165bb.mount

● run-netns-cni\x2da1d94c5c\x2d0718\x2d94e4\x2d24b9\x2d80f9835f2395.mount - /run/netns/cni-a1d94c5c-0718-94e4-24b9-80f9835f2395
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:56 UTC; 21h ago
    Where: /run/netns/cni-a1d94c5c-0718-94e4-24b9-80f9835f2395
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2da1d94c5c\x2d0718\x2d94e4\x2d24b9\x2d80f9835f2395.mount

● run-netns-cni\x2db8e90953\x2d3c83\x2dcf68\x2d792e\x2dd2b99ff42b20.mount - /run/netns/cni-b8e90953-3c83-cf68-792e-d2b99ff42b20
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:52 UTC; 21h ago
    Where: /run/netns/cni-b8e90953-3c83-cf68-792e-d2b99ff42b20
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2db8e90953\x2d3c83\x2dcf68\x2d792e\x2dd2b99ff42b20.mount

● run-netns-cni\x2dd4f2855c\x2d4183\x2db95a\x2d78af\x2deca3a4fecde8.mount - /run/netns/cni-d4f2855c-4183-b95a-78af-eca3a4fecde8
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:11 UTC; 21h ago
    Where: /run/netns/cni-d4f2855c-4183-b95a-78af-eca3a4fecde8
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2dd4f2855c\x2d4183\x2db95a\x2d78af\x2deca3a4fecde8.mount

● run-netns-cni\x2dd8ff14c3\x2dae72\x2d93f7\x2d4338\x2d6c79dbfe2b1c.mount - /run/netns/cni-d8ff14c3-ae72-93f7-4338-6c79dbfe2b1c
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /run/netns/cni-d8ff14c3-ae72-93f7-4338-6c79dbfe2b1c
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2dd8ff14c3\x2dae72\x2d93f7\x2d4338\x2d6c79dbfe2b1c.mount

● run-netns-cni\x2dde0a1fc6\x2dce2c\x2d5aea\x2da63c\x2d94f2b4ae01ca.mount - /run/netns/cni-de0a1fc6-ce2c-5aea-a63c-94f2b4ae01ca
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:00 UTC; 21h ago
    Where: /run/netns/cni-de0a1fc6-ce2c-5aea-a63c-94f2b4ae01ca
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2dde0a1fc6\x2dce2c\x2d5aea\x2da63c\x2d94f2b4ae01ca.mount

● run-netns-cni\x2de76763b3\x2d8c84\x2d2072\x2d8a73\x2d421ed689b3b6.mount - /run/netns/cni-e76763b3-8c84-2072-8a73-421ed689b3b6
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /run/netns/cni-e76763b3-8c84-2072-8a73-421ed689b3b6
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2de76763b3\x2d8c84\x2d2072\x2d8a73\x2d421ed689b3b6.mount

● run-netns-cni\x2de9cdd67a\x2ddac7\x2d8fc9\x2de83d\x2d67ac4b141242.mount - /run/netns/cni-e9cdd67a-dac7-8fc9-e83d-67ac4b141242
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:52 UTC; 21h ago
    Where: /run/netns/cni-e9cdd67a-dac7-8fc9-e83d-67ac4b141242
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2de9cdd67a\x2ddac7\x2d8fc9\x2de83d\x2d67ac4b141242.mount

● run-netns-cni\x2dfa3094c1\x2d793f\x2d879a\x2dc802\x2d0eb29392b279.mount - /run/netns/cni-fa3094c1-793f-879a-c802-0eb29392b279
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:30 UTC; 21h ago
    Where: /run/netns/cni-fa3094c1-793f-879a-c802-0eb29392b279
     What: nsfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/run-netns-cni\x2dfa3094c1\x2d793f\x2d879a\x2dc802\x2d0eb29392b279.mount

● sys-fs-fuse-connections.mount - FUSE Control File System
   Loaded: loaded (/usr/lib/systemd/system/sys-fs-fuse-connections.mount; static; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:06:32 UTC; 21h ago
    Where: /sys/fs/fuse/connections
     What: fusectl
     Docs: https://www.kernel.org/doc/Documentation/filesystems/fuse.txt
           https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/sys-fs-fuse-connections.mount

Nov 17 16:06:32 vosflex.localdomain systemd[1]: Mounting FUSE Control File System...
Nov 17 16:06:32 vosflex.localdomain systemd[1]: Mounted FUSE Control File System.

● sys-kernel-config.mount - Kernel Configuration File System
   Loaded: loaded (/usr/lib/systemd/system/sys-kernel-config.mount; static; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:48 UTC; 21h ago
    Where: /sys/kernel/config
     What: configfs
     Docs: https://www.kernel.org/doc/Documentation/filesystems/configfs/configfs.txt
           https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/sys-kernel-config.mount

Nov 17 16:03:48 localhost systemd[1]: Mounting Kernel Configuration File System...
Nov 17 16:03:48 localhost systemd[1]: Mounted Kernel Configuration File SystUnit sysroot.mount could not be found.
em.

● sys-kernel-debug-tracing.mount - /sys/kernel/debug/tracing
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:03:53 UTC; 21h ago
    Where: /sys/kernel/debug/tracing
     What: tracefs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/sys-kernel-debug-tracing.mount

● sys-kernel-debug.mount - Kernel Debug File System
   Loaded: loaded (/usr/lib/systemd/system/sys-kernel-debug.mount; static; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:50 UTC; 21h ago
    Where: /sys/kernel/debug
     What: debugfs
     Docs: https://www.kernel.org/doc/Documentation/filesystems/debugfs.txt
           https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/sys-kernel-debug.mount

● tmp.mount - Temporary Directory
   Loaded: loaded (/etc/systemd/system/tmp.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:50 UTC; 21h ago
    Where: /tmp
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/tmp.mount

● update_files.mount - Bind mount to move update_files to sda7
   Loaded: loaded (/etc/systemd/system/update_files.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:51 UTC; 21h ago
    Where: /update_files
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/update_files.mount

● var-lib-cni.mount - CNI state drectory
   Loaded: loaded (/etc/systemd/system/var-lib-cni.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:50 UTC; 21h ago
    Where: /var/lib/cni
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-cni.mount

● var-lib-containerd.mount - /var/lib/containerd
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:04:12 UTC; 21h ago
    Where: /var/lib/containerd
     What: /dev/mapper/vos_docker
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-containerd.mount

● var-lib-dms_files.mount - Bind mount to move dms_files to sda7
   Loaded: loaded (/etc/systemd/system/var-lib-dms_files.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:51 UTC; 21h ago
    Where: /var/lib/dms_files
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-dms_files.mount

● var-lib-dms_sw_download_files.mount - Bind mount to move dms_sw_download_files to sda7
   Loaded: loaded (/etc/systemd/system/var-lib-dms_sw_download_files.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:51 UTC; 21h ago
    Where: /var/lib/dms_sw_download_files
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-dms_sw_download_files.mount

● var-lib-elasticsearch.mount - Bind mount to move elasticsearch to sda7
   Loaded: loaded (/etc/systemd/system/var-lib-elasticsearch.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:51 UTC; 21h ago
    Where: /var/lib/elasticsearch
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-elasticsearch.mount

● var-lib-kubelet-pods-009abdc9\x2d2e33\x2d4e2e\x2d837f\x2dc454cee1b6cf-volumes-kubernetes.io\x7eempty\x2ddir-ingest\x2ddata.mount - /var/lib/kubelet/pods/009abdc9-2e33-4e2e-837f-c454cee1b6cf/volumes/kubernetes.io~empty-dir/ingest-data
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:41 UTC; 21h ago
    Where: /var/lib/kubelet/pods/009abdc9-2e33-4e2e-837f-c454cee1b6cf/volumes/kubernetes.io~empty-dir/ingest-data
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-009abdc9\x2d2e33\x2d4e2e\x2d837f\x2dc454cee1b6cf-volumes-kubernetes.io\x7eempty\x2ddir-ingest\x2ddata.mount

● var-lib-kubelet-pods-009abdc9\x2d2e33\x2d4e2e\x2d837f\x2dc454cee1b6cf-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dlvv29.mount - /var/lib/kubelet/pods/009abdc9-2e33-4e2e-837f-c454cee1b6cf/volumes/kubernetes.io~projected/kube-api-access-lvv29
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:41 UTC; 21h ago
    Where: /var/lib/kubelet/pods/009abdc9-2e33-4e2e-837f-c454cee1b6cf/volumes/kubernetes.io~projected/kube-api-access-lvv29
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-009abdc9\x2d2e33\x2d4e2e\x2d837f\x2dc454cee1b6cf-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dlvv29.mount

● var-lib-kubelet-pods-035d6c70\x2dd4ea\x2d4b47\x2d8cbd\x2d9045b8424260-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dwbx58.mount - /var/lib/kubelet/pods/035d6c70-d4ea-4b47-8cbd-9045b8424260/volumes/kubernetes.io~projected/kube-api-access-wbx58
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:51 UTC; 21h ago
    Where: /var/lib/kubelet/pods/035d6c70-d4ea-4b47-8cbd-9045b8424260/volumes/kubernetes.io~projected/kube-api-access-wbx58
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-035d6c70\x2dd4ea\x2d4b47\x2d8cbd\x2d9045b8424260-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dwbx58.mount

● var-lib-kubelet-pods-03911f12\x2d9dd7\x2d491a\x2db444\x2da79c18d5bc39-volume\x2dsubpaths-pgdb\x2ddata-pgdb-0.mount - /var/lib/kubelet/pods/03911f12-9dd7-491a-b444-a79c18d5bc39/volume-subpaths/pgdb-data/pgdb/0
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:56 UTC; 21h ago
    Where: /var/lib/kubelet/pods/03911f12-9dd7-491a-b444-a79c18d5bc39/volume-subpaths/pgdb-data/pgdb/0
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-03911f12\x2d9dd7\x2d491a\x2db444\x2da79c18d5bc39-volume\x2dsubpaths-pgdb\x2ddata-pgdb-0.mount

● var-lib-kubelet-pods-03911f12\x2d9dd7\x2d491a\x2db444\x2da79c18d5bc39-volume\x2dsubpaths-pgdb\x2ddata-pgdb-1.mount - /var/lib/kubelet/pods/03911f12-9dd7-491a-b444-a79c18d5bc39/volume-subpaths/pgdb-data/pgdb/1
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:56 UTC; 21h ago
    Where: /var/lib/kubelet/pods/03911f12-9dd7-491a-b444-a79c18d5bc39/volume-subpaths/pgdb-data/pgdb/1
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-03911f12\x2d9dd7\x2d491a\x2db444\x2da79c18d5bc39-volume\x2dsubpaths-pgdb\x2ddata-pgdb-1.mount

● var-lib-kubelet-pods-03911f12\x2d9dd7\x2d491a\x2db444\x2da79c18d5bc39-volume\x2dsubpaths-pgdb\x2ddata-pgdb-2.mount - /var/lib/kubelet/pods/03911f12-9dd7-491a-b444-a79c18d5bc39/volume-subpaths/pgdb-data/pgdb/2
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:56 UTC; 21h ago
    Where: /var/lib/kubelet/pods/03911f12-9dd7-491a-b444-a79c18d5bc39/volume-subpaths/pgdb-data/pgdb/2
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-03911f12\x2d9dd7\x2d491a\x2db444\x2da79c18d5bc39-volume\x2dsubpaths-pgdb\x2ddata-pgdb-2.mount

● var-lib-kubelet-pods-03911f12\x2d9dd7\x2d491a\x2db444\x2da79c18d5bc39-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dcbfc4.mount - /var/lib/kubelet/pods/03911f12-9dd7-491a-b444-a79c18d5bc39/volumes/kubernetes.io~projected/kube-api-access-cbfc4
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:55 UTC; 21h ago
    Where: /var/lib/kubelet/pods/03911f12-9dd7-491a-b444-a79c18d5bc39/volumes/kubernetes.io~projected/kube-api-access-cbfc4
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-03911f12\x2d9dd7\x2d491a\x2db444\x2da79c18d5bc39-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dcbfc4.mount

● var-lib-kubelet-pods-096bba8f\x2d0beb\x2d47cb\x2dac9c\x2dcc22c9bed927-volume\x2dsubpaths-config-nginx-0.mount - /var/lib/kubelet/pods/096bba8f-0beb-47cb-ac9c-cc22c9bed927/volume-subpaths/config/nginx/0
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /var/lib/kubelet/pods/096bba8f-0beb-47cb-ac9c-cc22c9bed927/volume-subpaths/config/nginx/0
     What: /dev/sdb6
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-096bba8f\x2d0beb\x2d47cb\x2dac9c\x2dcc22c9bed927-volume\x2dsubpaths-config-nginx-0.mount

● var-lib-kubelet-pods-096bba8f\x2d0beb\x2d47cb\x2dac9c\x2dcc22c9bed927-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dcg4rv.mount - /var/lib/kubelet/pods/096bba8f-0beb-47cb-ac9c-cc22c9bed927/volumes/kubernetes.io~projected/kube-api-access-cg4rv
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /var/lib/kubelet/pods/096bba8f-0beb-47cb-ac9c-cc22c9bed927/volumes/kubernetes.io~projected/kube-api-access-cg4rv
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-096bba8f\x2d0beb\x2d47cb\x2dac9c\x2dcc22c9bed927-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dcg4rv.mount

● var-lib-kubelet-pods-0a0dbba7\x2db736\x2d437c\x2daf77\x2d183ba0386260-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dfsmx7.mount - /var/lib/kubelet/pods/0a0dbba7-b736-437c-af77-183ba0386260/volumes/kubernetes.io~projected/kube-api-access-fsmx7
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:39 UTC; 21h ago
    Where: /var/lib/kubelet/pods/0a0dbba7-b736-437c-af77-183ba0386260/volumes/kubernetes.io~projected/kube-api-access-fsmx7
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-0a0dbba7\x2db736\x2d437c\x2daf77\x2d183ba0386260-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dfsmx7.mount

● var-lib-kubelet-pods-180edcc9\x2dc2ff\x2d4d05\x2dbc3a\x2dffd533439e72-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dhtvwm.mount - /var/lib/kubelet/pods/180edcc9-c2ff-4d05-bc3a-ffd533439e72/volumes/kubernetes.io~projected/kube-api-access-htvwm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /var/lib/kubelet/pods/180edcc9-c2ff-4d05-bc3a-ffd533439e72/volumes/kubernetes.io~projected/kube-api-access-htvwm
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-180edcc9\x2dc2ff\x2d4d05\x2dbc3a\x2dffd533439e72-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dhtvwm.mount

● var-lib-kubelet-pods-180edcc9\x2dc2ff\x2d4d05\x2dbc3a\x2dffd533439e72-volumes-kubernetes.io\x7esecret-certs.mount - /var/lib/kubelet/pods/180edcc9-c2ff-4d05-bc3a-ffd533439e72/volumes/kubernetes.io~secret/certs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:14 UTC; 21h ago
    Where: /var/lib/kubelet/pods/180edcc9-c2ff-4d05-bc3a-ffd533439e72/volumes/kubernetes.io~secret/certs
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-180edcc9\x2dc2ff\x2d4d05\x2dbc3a\x2dffd533439e72-volumes-kubernetes.io\x7esecret-certs.mount

● var-lib-kubelet-pods-1a5c156d\x2d0493\x2d4d63\x2d9e6d\x2d1699216a43f7-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dh2cbt.mount - /var/lib/kubelet/pods/1a5c156d-0493-4d63-9e6d-1699216a43f7/volumes/kubernetes.io~projected/kube-api-access-h2cbt
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /var/lib/kubelet/pods/1a5c156d-0493-4d63-9e6d-1699216a43f7/volumes/kubernetes.io~projected/kube-api-access-h2cbt
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-1a5c156d\x2d0493\x2d4d63\x2d9e6d\x2d1699216a43f7-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dh2cbt.mount

● var-lib-kubelet-pods-1a5c156d\x2d0493\x2d4d63\x2d9e6d\x2d1699216a43f7-volumes-kubernetes.io\x7esecret-secret\x2dconfig.mount - /var/lib/kubelet/pods/1a5c156d-0493-4d63-9e6d-1699216a43f7/volumes/kubernetes.io~secret/secret-config
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /var/lib/kubelet/pods/1a5c156d-0493-4d63-9e6d-1699216a43f7/volumes/kubernetes.io~secret/secret-config
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-1a5c156d\x2d0493\x2d4d63\x2d9e6d\x2d1699216a43f7-volumes-kubernetes.io\x7esecret-secret\x2dconfig.mount

● var-lib-kubelet-pods-2054d1e4\x2df2e3\x2d40bc\x2da3a0\x2db76f1ef5c374-volumes-kubernetes.io\x7eempty\x2ddir-gfxss.mount - /var/lib/kubelet/pods/2054d1e4-f2e3-40bc-a3a0-b76f1ef5c374/volumes/kubernetes.io~empty-dir/gfxss
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:05 UTC; 21h ago
    Where: /var/lib/kubelet/pods/2054d1e4-f2e3-40bc-a3a0-b76f1ef5c374/volumes/kubernetes.io~empty-dir/gfxss
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-2054d1e4\x2df2e3\x2d40bc\x2da3a0\x2db76f1ef5c374-volumes-kubernetes.io\x7eempty\x2ddir-gfxss.mount

● var-lib-kubelet-pods-2054d1e4\x2df2e3\x2d40bc\x2da3a0\x2db76f1ef5c374-volumes-kubernetes.io\x7eempty\x2ddir-packager\x2dstate.mount - /var/lib/kubelet/pods/2054d1e4-f2e3-40bc-a3a0-b76f1ef5c374/volumes/kubernetes.io~empty-dir/packager-state
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:05 UTC; 21h ago
    Where: /var/lib/kubelet/pods/2054d1e4-f2e3-40bc-a3a0-b76f1ef5c374/volumes/kubernetes.io~empty-dir/packager-state
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-2054d1e4\x2df2e3\x2d40bc\x2da3a0\x2db76f1ef5c374-volumes-kubernetes.io\x7eempty\x2ddir-packager\x2dstate.mount

● var-lib-kubelet-pods-2054d1e4\x2df2e3\x2d40bc\x2da3a0\x2db76f1ef5c374-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dx2kbk.mount - /var/lib/kubelet/pods/2054d1e4-f2e3-40bc-a3a0-b76f1ef5c374/volumes/kubernetes.io~projected/kube-api-access-x2kbk
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:05 UTC; 21h ago
    Where: /var/lib/kubelet/pods/2054d1e4-f2e3-40bc-a3a0-b76f1ef5c374/volumes/kubernetes.io~projected/kube-api-access-x2kbk
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-2054d1e4\x2df2e3\x2d40bc\x2da3a0\x2db76f1ef5c374-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dx2kbk.mount

● var-lib-kubelet-pods-2139364f\x2d7be5\x2d40b3\x2dbbec\x2d0c259f5c2559-volumes-kubernetes.io\x7eempty\x2ddir-test\x2dobject.mount - /var/lib/kubelet/pods/2139364f-7be5-40b3-bbec-0c259f5c2559/volumes/kubernetes.io~empty-dir/test-object
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:51 UTC; 21h ago
    Where: /var/lib/kubelet/pods/2139364f-7be5-40b3-bbec-0c259f5c2559/volumes/kubernetes.io~empty-dir/test-object
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-2139364f\x2d7be5\x2d40b3\x2dbbec\x2d0c259f5c2559-volumes-kubernetes.io\x7eempty\x2ddir-test\x2dobject.mount

● var-lib-kubelet-pods-2139364f\x2d7be5\x2d40b3\x2dbbec\x2d0c259f5c2559-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2ds99pd.mount - /var/lib/kubelet/pods/2139364f-7be5-40b3-bbec-0c259f5c2559/volumes/kubernetes.io~projected/kube-api-access-s99pd
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:51 UTC; 21h ago
    Where: /var/lib/kubelet/pods/2139364f-7be5-40b3-bbec-0c259f5c2559/volumes/kubernetes.io~projected/kube-api-access-s99pd
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-2139364f\x2d7be5\x2d40b3\x2dbbec\x2d0c259f5c2559-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2ds99pd.mount

● var-lib-kubelet-pods-24a751e0\x2d91a9\x2d42bb\x2d9ad0\x2d6d401ff080a7-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dkc6qm.mount - /var/lib/kubelet/pods/24a751e0-91a9-42bb-9ad0-6d401ff080a7/volumes/kubernetes.io~projected/kube-api-access-kc6qm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:14 UTC; 21h ago
    Where: /var/lib/kubelet/pods/24a751e0-91a9-42bb-9ad0-6d401ff080a7/volumes/kubernetes.io~projected/kube-api-access-kc6qm
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-24a751e0\x2d91a9\x2d42bb\x2d9ad0\x2d6d401ff080a7-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dkc6qm.mount

● var-lib-kubelet-pods-282ed129\x2dc820\x2d4d17\x2d83ec\x2d5fd195d42947-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dlbp6j.mount - /var/lib/kubelet/pods/282ed129-c820-4d17-83ec-5fd195d42947/volumes/kubernetes.io~projected/kube-api-access-lbp6j
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:08 UTC; 21h ago
    Where: /var/lib/kubelet/pods/282ed129-c820-4d17-83ec-5fd195d42947/volumes/kubernetes.io~projected/kube-api-access-lbp6j
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-282ed129\x2dc820\x2d4d17\x2d83ec\x2d5fd195d42947-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dlbp6j.mount

● var-lib-kubelet-pods-29ecd84d\x2d4896\x2d4ebc\x2da838\x2d464df976b54b-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d57mxd.mount - /var/lib/kubelet/pods/29ecd84d-4896-4ebc-a838-464df976b54b/volumes/kubernetes.io~projected/kube-api-access-57mxd
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:14 UTC; 21h ago
    Where: /var/lib/kubelet/pods/29ecd84d-4896-4ebc-a838-464df976b54b/volumes/kubernetes.io~projected/kube-api-access-57mxd
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-29ecd84d\x2d4896\x2d4ebc\x2da838\x2d464df976b54b-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d57mxd.mount

● var-lib-kubelet-pods-29ecd84d\x2d4896\x2d4ebc\x2da838\x2d464df976b54b-volumes-kubernetes.io\x7esecret-kubernetes\x2ddashboard\x2dcerts.mount - /var/lib/kubelet/pods/29ecd84d-4896-4ebc-a838-464df976b54b/volumes/kubernetes.io~secret/kubernetes-dashboard-certs
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:14 UTC; 21h ago
    Where: /var/lib/kubelet/pods/29ecd84d-4896-4ebc-a838-464df976b54b/volumes/kubernetes.io~secret/kubernetes-dashboard-certs
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-29ecd84d\x2d4896\x2d4ebc\x2da838\x2d464df976b54b-volumes-kubernetes.io\x7esecret-kubernetes\x2ddashboard\x2dcerts.mount

● var-lib-kubelet-pods-2c8d80ef\x2db1d9\x2d4b20\x2d9118\x2d2100b74d63b4-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dhnxmm.mount - /var/lib/kubelet/pods/2c8d80ef-b1d9-4b20-9118-2100b74d63b4/volumes/kubernetes.io~projected/kube-api-access-hnxmm
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:14 UTC; 21h ago
    Where: /var/lib/kubelet/pods/2c8d80ef-b1d9-4b20-9118-2100b74d63b4/volumes/kubernetes.io~projected/kube-api-access-hnxmm
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-2c8d80ef\x2db1d9\x2d4b20\x2d9118\x2d2100b74d63b4-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dhnxmm.mount

● var-lib-kubelet-pods-3581198f\x2d9982\x2d4c55\x2d91af\x2d1388df22dece-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d9vnr7.mount - /var/lib/kubelet/pods/3581198f-9982-4c55-91af-1388df22dece/volumes/kubernetes.io~projected/kube-api-access-9vnr7
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:14 UTC; 21h ago
    Where: /var/lib/kubelet/pods/3581198f-9982-4c55-91af-1388df22dece/volumes/kubernetes.io~projected/kube-api-access-9vnr7
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-3581198f\x2d9982\x2d4c55\x2d91af\x2d1388df22dece-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d9vnr7.mount

● var-lib-kubelet-pods-3e49bb7f\x2d4353\x2d4a7c\x2d8d01\x2d161db867a3ee-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dnlwf5.mount - /var/lib/kubelet/pods/3e49bb7f-4353-4a7c-8d01-161db867a3ee/volumes/kubernetes.io~projected/kube-api-access-nlwf5
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /var/lib/kubelet/pods/3e49bb7f-4353-4a7c-8d01-161db867a3ee/volumes/kubernetes.io~projected/kube-api-access-nlwf5
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-3e49bb7f\x2d4353\x2d4a7c\x2d8d01\x2d161db867a3ee-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dnlwf5.mount

● var-lib-kubelet-pods-53016956\x2d1fdc\x2d4182\x2db30d\x2d8975df13b8b3-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dnfr2r.mount - /var/lib/kubelet/pods/53016956-1fdc-4182-b30d-8975df13b8b3/volumes/kubernetes.io~projected/kube-api-access-nfr2r
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:10 UTC; 21h ago
    Where: /var/lib/kubelet/pods/53016956-1fdc-4182-b30d-8975df13b8b3/volumes/kubernetes.io~projected/kube-api-access-nfr2r
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-53016956\x2d1fdc\x2d4182\x2db30d\x2d8975df13b8b3-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dnfr2r.mount

● var-lib-kubelet-pods-5c63f719\x2d4775\x2d4dfe\x2d970f\x2d71ff10f56311-volumes-kubernetes.io\x7eempty\x2ddir-tmp\x2dplugins.mount - /var/lib/kubelet/pods/5c63f719-4775-4dfe-970f-71ff10f56311/volumes/kubernetes.io~empty-dir/tmp-plugins
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /var/lib/kubelet/pods/5c63f719-4775-4dfe-970f-71ff10f56311/volumes/kubernetes.io~empty-dir/tmp-plugins
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-5c63f719\x2d4775\x2d4dfe\x2d970f\x2d71ff10f56311-volumes-kubernetes.io\x7eempty\x2ddir-tmp\x2dplugins.mount

● var-lib-kubelet-pods-5c63f719\x2d4775\x2d4dfe\x2d970f\x2d71ff10f56311-volumes-kubernetes.io\x7esecret-grafana\x2dconfig.mount - /var/lib/kubelet/pods/5c63f719-4775-4dfe-970f-71ff10f56311/volumes/kubernetes.io~secret/grafana-config
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /var/lib/kubelet/pods/5c63f719-4775-4dfe-970f-71ff10f56311/volumes/kubernetes.io~secret/grafana-config
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-5c63f719\x2d4775\x2d4dfe\x2d970f\x2d71ff10f56311-volumes-kubernetes.io\x7esecret-grafana\x2dconfig.mount

● var-lib-kubelet-pods-5c63f719\x2d4775\x2d4dfe\x2d970f\x2d71ff10f56311-volumes-kubernetes.io\x7esecret-grafana\x2ddatasources.mount - /var/lib/kubelet/pods/5c63f719-4775-4dfe-970f-71ff10f56311/volumes/kubernetes.io~secret/grafana-datasources
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /var/lib/kubelet/pods/5c63f719-4775-4dfe-970f-71ff10f56311/volumes/kubernetes.io~secret/grafana-datasources
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-5c63f719\x2d4775\x2d4dfe\x2d970f\x2d71ff10f56311-volumes-kubernetes.io\x7esecret-grafana\x2ddatasources.mount

● var-lib-kubelet-pods-5cb575bc\x2de848\x2d4e17\x2da792\x2d8ede656bd25e-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dd7nf4.mount - /var/lib/kubelet/pods/5cb575bc-e848-4e17-a792-8ede656bd25e/volumes/kubernetes.io~projected/kube-api-access-d7nf4
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:38 UTC; 21h ago
    Where: /var/lib/kubelet/pods/5cb575bc-e848-4e17-a792-8ede656bd25e/volumes/kubernetes.io~projected/kube-api-access-d7nf4
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-5cb575bc\x2de848\x2d4e17\x2da792\x2d8ede656bd25e-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dd7nf4.mount

● var-lib-kubelet-pods-6986b857\x2df59e\x2d4674\x2db383\x2df5319ad06f64-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dgtgcv.mount - /var/lib/kubelet/pods/6986b857-f59e-4674-b383-f5319ad06f64/volumes/kubernetes.io~projected/kube-api-access-gtgcv
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /var/lib/kubelet/pods/6986b857-f59e-4674-b383-f5319ad06f64/volumes/kubernetes.io~projected/kube-api-access-gtgcv
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-6986b857\x2df59e\x2d4674\x2db383\x2df5319ad06f64-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dgtgcv.mount

● var-lib-kubelet-pods-6ac986a8\x2d270f\x2d43b4\x2d8b04\x2d651fce2f139a-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dstvbz.mount - /var/lib/kubelet/pods/6ac986a8-270f-43b4-8b04-651fce2f139a/volumes/kubernetes.io~projected/kube-api-access-stvbz
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /var/lib/kubelet/pods/6ac986a8-270f-43b4-8b04-651fce2f139a/volumes/kubernetes.io~projected/kube-api-access-stvbz
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-6ac986a8\x2d270f\x2d43b4\x2d8b04\x2d651fce2f139a-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dstvbz.mount

● var-lib-kubelet-pods-6d657888\x2d4793\x2d498f\x2d9d00\x2d63bf11d5b00b-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d8n5b4.mount - /var/lib/kubelet/pods/6d657888-4793-498f-9d00-63bf11d5b00b/volumes/kubernetes.io~projected/kube-api-access-8n5b4
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:00 UTC; 21h ago
    Where: /var/lib/kubelet/pods/6d657888-4793-498f-9d00-63bf11d5b00b/volumes/kubernetes.io~projected/kube-api-access-8n5b4
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-6d657888\x2d4793\x2d498f\x2d9d00\x2d63bf11d5b00b-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d8n5b4.mount

● var-lib-kubelet-pods-6e8ec5b4\x2d6cbd\x2d43e3\x2da002\x2d382ad74382de-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dx6r4n.mount - /var/lib/kubelet/pods/6e8ec5b4-6cbd-43e3-a002-382ad74382de/volumes/kubernetes.io~projected/kube-api-access-x6r4n
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /var/lib/kubelet/pods/6e8ec5b4-6cbd-43e3-a002-382ad74382de/volumes/kubernetes.io~projected/kube-api-access-x6r4n
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-6e8ec5b4\x2d6cbd\x2d43e3\x2da002\x2d382ad74382de-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dx6r4n.mount

● var-lib-kubelet-pods-706f2b88\x2d4937\x2d4630\x2da676\x2d3d637a0649ac-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dvzdt6.mount - /var/lib/kubelet/pods/706f2b88-4937-4630-a676-3d637a0649ac/volumes/kubernetes.io~projected/kube-api-access-vzdt6
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:16 UTC; 21h ago
    Where: /var/lib/kubelet/pods/706f2b88-4937-4630-a676-3d637a0649ac/volumes/kubernetes.io~projected/kube-api-access-vzdt6
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-706f2b88\x2d4937\x2d4630\x2da676\x2d3d637a0649ac-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dvzdt6.mount

● var-lib-kubelet-pods-7a9dc476\x2d39fb\x2d46a5\x2d8dae\x2d2b3f334ed106-volumes-kubernetes.io\x7eempty\x2ddir-gfxss.mount - /var/lib/kubelet/pods/7a9dc476-39fb-46a5-8dae-2b3f334ed106/volumes/kubernetes.io~empty-dir/gfxss
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /var/lib/kubelet/pods/7a9dc476-39fb-46a5-8dae-2b3f334ed106/volumes/kubernetes.io~empty-dir/gfxss
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-7a9dc476\x2d39fb\x2d46a5\x2d8dae\x2d2b3f334ed106-volumes-kubernetes.io\x7eempty\x2ddir-gfxss.mount

● var-lib-kubelet-pods-7a9dc476\x2d39fb\x2d46a5\x2d8dae\x2d2b3f334ed106-volumes-kubernetes.io\x7eempty\x2ddir-packager\x2dstate.mount - /var/lib/kubelet/pods/7a9dc476-39fb-46a5-8dae-2b3f334ed106/volumes/kubernetes.io~empty-dir/packager-state
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /var/lib/kubelet/pods/7a9dc476-39fb-46a5-8dae-2b3f334ed106/volumes/kubernetes.io~empty-dir/packager-state
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-7a9dc476\x2d39fb\x2d46a5\x2d8dae\x2d2b3f334ed106-volumes-kubernetes.io\x7eempty\x2ddir-packager\x2dstate.mount

● var-lib-kubelet-pods-7a9dc476\x2d39fb\x2d46a5\x2d8dae\x2d2b3f334ed106-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dx5bsn.mount - /var/lib/kubelet/pods/7a9dc476-39fb-46a5-8dae-2b3f334ed106/volumes/kubernetes.io~projected/kube-api-access-x5bsn
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /var/lib/kubelet/pods/7a9dc476-39fb-46a5-8dae-2b3f334ed106/volumes/kubernetes.io~projected/kube-api-access-x5bsn
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-7a9dc476\x2d39fb\x2d46a5\x2d8dae\x2d2b3f334ed106-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dx5bsn.mount

● var-lib-kubelet-pods-7cd2ae11\x2d942e\x2d42e8\x2d9ac7\x2dca02a995c553-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d57cnh.mount - /var/lib/kubelet/pods/7cd2ae11-942e-42e8-9ac7-ca02a995c553/volumes/kubernetes.io~projected/kube-api-access-57cnh
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:07 UTC; 21h ago
    Where: /var/lib/kubelet/pods/7cd2ae11-942e-42e8-9ac7-ca02a995c553/volumes/kubernetes.io~projected/kube-api-access-57cnh
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-7cd2ae11\x2d942e\x2d42e8\x2d9ac7\x2dca02a995c553-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d57cnh.mount

● var-lib-kubelet-pods-7cd2ae11\x2d942e\x2d42e8\x2d9ac7\x2dca02a995c553-volumes-kubernetes.io\x7esecret-client\x2dcredential\x2dvolume.mount - /var/lib/kubelet/pods/7cd2ae11-942e-42e8-9ac7-ca02a995c553/volumes/kubernetes.io~secret/client-credential-volume
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:07 UTC; 21h ago
    Where: /var/lib/kubelet/pods/7cd2ae11-942e-42e8-9ac7-ca02a995c553/volumes/kubernetes.io~secret/client-credential-volume
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-7cd2ae11\x2d942e\x2d42e8\x2d9ac7\x2dca02a995c553-volumes-kubernetes.io\x7esecret-client\x2dcredential\x2dvolume.mount

● var-lib-kubelet-pods-7cd2ae11\x2d942e\x2d42e8\x2d9ac7\x2dca02a995c553-volumes-kubernetes.io\x7esecret-esam\x2doob\x2dbasic\x2dauth\x2dsecret\x2dvolume.mount - /var/lib/kubelet/pods/7cd2ae11-942e-42e8-9ac7-ca02a995c553/volumes/kubernetes.io~secret/esam-oob-basic-auth-secret-volume
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:07 UTC; 21h ago
    Where: /var/lib/kubelet/pods/7cd2ae11-942e-42e8-9ac7-ca02a995c553/volumes/kubernetes.io~secret/esam-oob-basic-auth-secret-volume
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-7cd2ae11\x2d942e\x2d42e8\x2d9ac7\x2dca02a995c553-volumes-kubernetes.io\x7esecret-esam\x2doob\x2dbasic\x2dauth\x2dsecret\x2dvolume.mount

● var-lib-kubelet-pods-8ffb9bdd\x2d9417\x2d450a\x2db140\x2d623dad767f01-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2drzl4m.mount - /var/lib/kubelet/pods/8ffb9bdd-9417-450a-b140-623dad767f01/volumes/kubernetes.io~projected/kube-api-access-rzl4m
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /var/lib/kubelet/pods/8ffb9bdd-9417-450a-b140-623dad767f01/volumes/kubernetes.io~projected/kube-api-access-rzl4m
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-8ffb9bdd\x2d9417\x2d450a\x2db140\x2d623dad767f01-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2drzl4m.mount

● var-lib-kubelet-pods-92ba2f19\x2dec80\x2d4925\x2db13d\x2d1f0cb995aa75-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d8pr7k.mount - /var/lib/kubelet/pods/92ba2f19-ec80-4925-b13d-1f0cb995aa75/volumes/kubernetes.io~projected/kube-api-access-8pr7k
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /var/lib/kubelet/pods/92ba2f19-ec80-4925-b13d-1f0cb995aa75/volumes/kubernetes.io~projected/kube-api-access-8pr7k
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-92ba2f19\x2dec80\x2d4925\x2db13d\x2d1f0cb995aa75-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d8pr7k.mount

● var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volume\x2dsubpaths-prometheus\x2dpv-prometheus-2.mount - /var/lib/kubelet/pods/9442a890-ef09-4a23-a5f5-468078336436/volume-subpaths/prometheus-pv/prometheus/2
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:49 UTC; 21h ago
    Where: /var/lib/kubelet/pods/9442a890-ef09-4a23-a5f5-468078336436/volume-subpaths/prometheus-pv/prometheus/2
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volume\x2dsubpaths-prometheus\x2dpv-prometheus-2.mount

● var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volume\x2dsubpaths-web\x2dconfig-prometheus-4.mount - /var/lib/kubelet/pods/9442a890-ef09-4a23-a5f5-468078336436/volume-subpaths/web-config/prometheus/4
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:49 UTC; 21h ago
    Where: /var/lib/kubelet/pods/9442a890-ef09-4a23-a5f5-468078336436/volume-subpaths/web-config/prometheus/4
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volume\x2dsubpaths-web\x2dconfig-prometheus-4.mount

● var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7elocal\x2dvolume-prometheus\x2dpv.mount - /var/lib/kubelet/pods/9442a890-ef09-4a23-a5f5-468078336436/volumes/kubernetes.io~local-volume/prometheus-pv
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:48 UTC; 21h ago
    Where: /var/lib/kubelet/pods/9442a890-ef09-4a23-a5f5-468078336436/volumes/kubernetes.io~local-volume/prometheus-pv
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7elocal\x2dvolume-prometheus\x2dpv.mount

● var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dpldp2.mount - /var/lib/kubelet/pods/9442a890-ef09-4a23-a5f5-468078336436/volumes/kubernetes.io~projected/kube-api-access-pldp2
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:48 UTC; 21h ago
    Where: /var/lib/kubelet/pods/9442a890-ef09-4a23-a5f5-468078336436/volumes/kubernetes.io~projected/kube-api-access-pldp2
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dpldp2.mount

● var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7eprojected-tls\x2dassets.mount - /var/lib/kubelet/pods/9442a890-ef09-4a23-a5f5-468078336436/volumes/kubernetes.io~projected/tls-assets
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:48 UTC; 21h ago
    Where: /var/lib/kubelet/pods/9442a890-ef09-4a23-a5f5-468078336436/volumes/kubernetes.io~projected/tls-assets
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7eprojected-tls\x2dassets.mount

● var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7esecret-config.mount - /var/lib/kubelet/pods/9442a890-ef09-4a23-a5f5-468078336436/volumes/kubernetes.io~secret/config
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:48 UTC; 21h ago
    Where: /var/lib/kubelet/pods/9442a890-ef09-4a23-a5f5-468078336436/volumes/kubernetes.io~secret/config
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7esecret-config.mount

● var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7esecret-web\x2dconfig.mount - /var/lib/kubelet/pods/9442a890-ef09-4a23-a5f5-468078336436/volumes/kubernetes.io~secret/web-config
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:48 UTC; 21h ago
    Where: /var/lib/kubelet/pods/9442a890-ef09-4a23-a5f5-468078336436/volumes/kubernetes.io~secret/web-config
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-9442a890\x2def09\x2d4a23\x2da5f5\x2d468078336436-volumes-kubernetes.io\x7esecret-web\x2dconfig.mount

● var-lib-kubelet-pods-9810806a\x2d49a2\x2d47b3\x2dae21\x2ddd8237c0423f-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dv94l2.mount - /var/lib/kubelet/pods/9810806a-49a2-47b3-ae21-dd8237c0423f/volumes/kubernetes.io~projected/kube-api-access-v94l2
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:30 UTC; 21h ago
    Where: /var/lib/kubelet/pods/9810806a-49a2-47b3-ae21-dd8237c0423f/volumes/kubernetes.io~projected/kube-api-access-v94l2
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-9810806a\x2d49a2\x2d47b3\x2dae21\x2ddd8237c0423f-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dv94l2.mount

● var-lib-kubelet-pods-aff14187\x2d181d\x2d4c0a\x2d9445\x2da2326a3bf487-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d66jgh.mount - /var/lib/kubelet/pods/aff14187-181d-4c0a-9445-a2326a3bf487/volumes/kubernetes.io~projected/kube-api-access-66jgh
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:12 UTC; 21h ago
    Where: /var/lib/kubelet/pods/aff14187-181d-4c0a-9445-a2326a3bf487/volumes/kubernetes.io~projected/kube-api-access-66jgh
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-aff14187\x2d181d\x2d4c0a\x2d9445\x2da2326a3bf487-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d66jgh.mount

● var-lib-kubelet-pods-b12c5b65\x2dd019\x2d4c39\x2da2bd\x2d3b70d80c5a52-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dgnl9g.mount - /var/lib/kubelet/pods/b12c5b65-d019-4c39-a2bd-3b70d80c5a52/volumes/kubernetes.io~projected/kube-api-access-gnl9g
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:30 UTC; 21h ago
    Where: /var/lib/kubelet/pods/b12c5b65-d019-4c39-a2bd-3b70d80c5a52/volumes/kubernetes.io~projected/kube-api-access-gnl9g
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-b12c5b65\x2dd019\x2d4c39\x2da2bd\x2d3b70d80c5a52-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dgnl9g.mount

● var-lib-kubelet-pods-c3a4b089\x2d3305\x2d4918\x2db342\x2dedb4ab6fc5dc-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d79r7z.mount - /var/lib/kubelet/pods/c3a4b089-3305-4918-b342-edb4ab6fc5dc/volumes/kubernetes.io~projected/kube-api-access-79r7z
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:15 UTC; 21h ago
    Where: /var/lib/kubelet/pods/c3a4b089-3305-4918-b342-edb4ab6fc5dc/volumes/kubernetes.io~projected/kube-api-access-79r7z
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-c3a4b089\x2d3305\x2d4918\x2db342\x2dedb4ab6fc5dc-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d79r7z.mount

● var-lib-kubelet-pods-cc192fdc\x2dfcd9\x2d4e29\x2d8a4c\x2d12a71693a249-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dc5t67.mount - /var/lib/kubelet/pods/cc192fdc-fcd9-4e29-8a4c-12a71693a249/volumes/kubernetes.io~projected/kube-api-access-c5t67
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:11 UTC; 21h ago
    Where: /var/lib/kubelet/pods/cc192fdc-fcd9-4e29-8a4c-12a71693a249/volumes/kubernetes.io~projected/kube-api-access-c5t67
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-cc192fdc\x2dfcd9\x2d4e29\x2d8a4c\x2d12a71693a249-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dc5t67.mount

● var-lib-kubelet-pods-d0541ca5\x2dc841\x2d48eb\x2d9fd5\x2dc66f0f4710a9-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d8pn77.mount - /var/lib/kubelet/pods/d0541ca5-c841-48eb-9fd5-c66f0f4710a9/volumes/kubernetes.io~projected/kube-api-access-8pn77
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:05:55 UTC; 21h ago
    Where: /var/lib/kubelet/pods/d0541ca5-c841-48eb-9fd5-c66f0f4710a9/volumes/kubernetes.io~projected/kube-api-access-8pn77
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-d0541ca5\x2dc841\x2d48eb\x2d9fd5\x2dc66f0f4710a9-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2d8pn77.mount

● var-lib-kubelet-pods-d223d476\x2dac55\x2d4a39\x2d8b62\x2d404db15c85ec-volumes-kubernetes.io\x7eempty\x2ddir-gfxss.mount - /var/lib/kubelet/pods/d223d476-ac55-4a39-8b62-404db15c85ec/volumes/kubernetes.io~empty-dir/gfxss
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /var/lib/kubelet/pods/d223d476-ac55-4a39-8b62-404db15c85ec/volumes/kubernetes.io~empty-dir/gfxss
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-d223d476\x2dac55\x2d4a39\x2d8b62\x2d404db15c85ec-volumes-kubernetes.io\x7eempty\x2ddir-gfxss.mount

● var-lib-kubelet-pods-d223d476\x2dac55\x2d4a39\x2d8b62\x2d404db15c85ec-volumes-kubernetes.io\x7eempty\x2ddir-packager\x2dstate.mount - /var/lib/kubelet/pods/d223d476-ac55-4a39-8b62-404db15c85ec/volumes/kubernetes.io~empty-dir/packager-state
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /var/lib/kubelet/pods/d223d476-ac55-4a39-8b62-404db15c85ec/volumes/kubernetes.io~empty-dir/packager-state
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-d223d476\x2dac55\x2d4a39\x2d8b62\x2d404db15c85ec-volumes-kubernetes.io\x7eempty\x2ddir-packager\x2dstate.mount

● var-lib-kubelet-pods-d223d476\x2dac55\x2d4a39\x2d8b62\x2d404db15c85ec-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dfcnd7.mount - /var/lib/kubelet/pods/d223d476-ac55-4a39-8b62-404db15c85ec/volumes/kubernetes.io~projected/kube-api-access-fcnd7
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:09:06 UTC; 21h ago
    Where: /var/lib/kubelet/pods/d223d476-ac55-4a39-8b62-404db15c85ec/volumes/kubernetes.io~projected/kube-api-access-fcnd7
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-d223d476\x2dac55\x2d4a39\x2d8b62\x2d404db15c85ec-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dfcnd7.mount

● var-lib-kubelet-pods-d3004940\x2d8948\x2d414f\x2d97ef\x2d9b881316670e-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2djdtgg.mount - /var/lib/kubelet/pods/d3004940-8948-414f-97ef-9b881316670e/volumes/kubernetes.io~projected/kube-api-access-jdtgg
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:53 UTC; 21h ago
    Where: /var/lib/kubelet/pods/d3004940-8948-414f-97ef-9b881316670e/volumes/kubernetes.io~projected/kube-api-access-jdtgg
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-d3004940\x2d8948\x2d414f\x2d97ef\x2d9b881316670e-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2djdtgg.mount

● var-lib-kubelet-pods-e4417f1c\x2dec13\x2d46d6\x2da15e\x2de54cc6549410-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dcftkq.mount - /var/lib/kubelet/pods/e4417f1c-ec13-46d6-a15e-e54cc6549410/volumes/kubernetes.io~projected/kube-api-access-cftkq
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:06:31 UTC; 21h ago
    Where: /var/lib/kubelet/pods/e4417f1c-ec13-46d6-a15e-e54cc6549410/volumes/kubernetes.io~projected/kube-api-access-cftkq
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-e4417f1c\x2dec13\x2d46d6\x2da15e\x2de54cc6549410-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dcftkq.mount

● var-lib-kubelet-pods-e8b5f050\x2d8a1e\x2d4433\x2db78d\x2d7d533407b8a0-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dm4b28.mount - /var/lib/kubelet/pods/e8b5f050-8a1e-4433-b78d-7d533407b8a0/volumes/kubernetes.io~projected/kube-api-access-m4b28
   Loaded: loaded (/proc/self/mountinfo)
   Active: active (mounted) since Mon 2025-11-17 16:08:51 UTC; 21h ago
    Where: /var/lib/kubelet/pods/e8b5f050-8a1e-4433-b78d-7d533407b8a0/volumes/kubernetes.io~projected/kube-api-access-m4b28
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-kubelet-pods-e8b5f050\x2d8a1e\x2d4433\x2db78d\x2d7d533407b8a0-volumes-kubernetes.io\x7eprojected-kube\x2dapi\x2daccess\x2dm4b28.mount

● var-lib-minio.mount - Bind mount to move minio to sda7
   Loaded: loaded (/etc/systemd/system/var-lib-minio.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:51 UTC; 21h ago
    Where: /var/lib/minio
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-minio.mount

● var-lib-nfs-rpc_pipefs.mount - RPC Pipe File System
   Loaded: loaded (/usr/lib/systemd/system/var-lib-nfs-rpc_pipefs.mount; static; vendor preset: disabled)
   Active: failed (Result: protocol) since Mon 2025-11-17 16:03:53 UTC; 21h ago
    Where: /var/lib/nfs/rpc_pipefs
     What: sunrpc

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Mounting RPC Pipe File System...
Nov 17 16:03:53 vosflex.localdomain systemd[1]: var-lib-nfs-rpc_pipefs.mount: Mount process finished, but there is no mount.
Nov 17 16:03:53 vosflex.localdomain systemd[1]: var-lib-nfs-rpc_pipefs.mount: Failed with result 'protocol'.
Nov 17 16:03:53 vosflex.localdomain systemd[1]: Failed to mount RPC Pipe File System.

● var-lib-persistent.mount - /var/lib/persistent
   Loaded: loaded (/etc/fstab; generated)
   Active: active (mounted) since Mon 2025-11-17 16:03:51 UTC; 21h ago
    Where: /var/lib/persistent
     What: /dev/sdb2
     Docs: man:fstab(5)
           man:systemd-fstab-generator(8)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-persistent.mount

● var-lib-pgdb-backup_restore.mount - Bind mount to move pgdb database_middleware to sda7
   Loaded: loaded (/etc/systemd/system/var-lib-pgdb-backup_restore.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:51 UTC; 21h ago
    Where: /var/lib/pgdb/backup_restore
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-pgdb-backup_restore.mount

● var-lib-pgdb-pg_log.mount - Bind mount to move pgdb log to sda7
   Loaded: loaded (/etc/systemd/system/var-lib-pgdb-pg_log.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:51 UTC; 21h ago
    Where: /var/lib/pgdb/pg_log
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-pgdb-pg_log.mount

● var-lib-pgdb-pgdata.mount - Bind mount to move pgdb data to sda7
   Loaded: loaded (/etc/systemd/system/var-lib-pgdb-pgdata.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:51 UTC; 21h ago
    Where: /var/lib/pgdb/pgdata
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-pgdb-pgdata.mount

● var-lib-prometheus.mount - Bind mount to move prometheus to sda7
   Loaded: loaded (/etc/systemd/system/var-lib-prometheus.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:51 UTC; 21h ago
    Where: /var/lib/prometheus
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-prometheus.mount

● var-lib-triveni.mount - Bind mount to move triveni files to sda7
   Loaded: loaded (/etc/systemd/system/var-lib-triveni.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:51 UTC; 21h ago
    Where: /var/lib/triveni
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-triveni.mount

● var-lib-zookeeper.mount - Bind mount to move zookeeper to sda7
   Loaded: loaded (/etc/systemd/system/var-lib-zookeeper.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:51 UTC; 21h ago
    Where: /var/lib/zookeeper
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-zookeeper.mount

● var-log.mount - Bind mount to move logs to sda7
   Loaded: loaded (/etc/systemd/system/var-log.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:51 UTC; 21h ago
    Where: /var/log
     What: /dev/sdb7
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-log.mount

● var-nmi.mount - NMI state drectory
   Loaded: loaded (/etc/systemd/system/var-nmi.mount; enabled; vendor preset: disabled)
   Active: active (mounted) since Mon 2025-11-17 16:03:50 UTC; 21h ago
    Where: /var/nmi
     What: tmpfs
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-nmi.mount

● systemd-ask-password-console.path - Dispatch Password Requests to Console Directory Watch
   Loaded: loaded (/usr/lib/systemd/system/systemd-ask-password-console.path; static; vendor preset: disabled)
   Active: active (waiting) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd-ask-password-console.service(8)

● systemd-ask-password-wall.path - Forward Password Requests to Wall Directory Watch
   Loaded: loaded (/usr/lib/systemd/system/systemd-ask-password-wall.path; static; vendor preset: disabled)
   Active: active (waiting) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd-ask-password-console.service(8)

● init.scope - System and Service Manager
   Loaded: loaded
Transient: yes
   Active: active (running) since Mon 2025-11-17 16:03:48 UTC; 21h ago
     Docs: man:systemd(1)
    Tasks: 1 (limit: 399998)
   Memory: 32.8M
   CGroup: /init.scope
           └─1 /usr/lib/systemd/systemd --switched-root --system --deserialize 16

Nov 18 13:55:35 XOSEncoder-01 systemd[1]: proc-sys-fs-binfmt_misc.automount: Got automount request for /proc/sys/fs/binfmt_misc, triggered by 9604 (sysctl)
Nov 18 13:55:35 XOSEncoder-01 systemd[1]: Mounting Arbitrary Executable File Formats File System...
Nov 18 13:55:35 XOSEncoder-01 systemd[1]: Mounted Arbitrary Executable File Formats File System.
Nov 18 13:55:45 XOSEncoder-01 systemd[1]: nmiperfmon_nics.service: Service RestartSec=30s expired, scheduling restart.
Nov 18 13:55:45 XOSEncoder-01 systemd[1]: nmiperfmon_nics.service: Scheduled restart job, restart counter is at 2369.
Nov 18 13:55:45 XOSEncoder-01 systemd[1]: Stopped NMI Performance Monitor.
Nov 18 13:55:45 XOSEncoder-01 systemd[1]: Starting NMI Performance Monitor...
Nov 18 13:55:45 XOSEncoder-01 systemd[1]: Started NMI Performance Monitor.
Nov 18 13:55:48 XOSEncoder-01 systemd[1]: nmiperfmon_nics.service: Main process exited, code=exited, status=2/INVALIDARGUMENT
Nov 18 13:55:48 XOSEncoder-01 systemd[1]: nmiperfmon_nics.service: Failed with result 'exit-code'.

● advantech_vega330x.service - Advantech Vega board firmware load
   Loaded: loaded (/etc/systemd/system/advantech_vega330x.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:07 UTC; 21h ago
 Main PID: 4087 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/advantech_vega330x.service

Nov 17 16:04:07 vosflex.localdomain systemd[1]: Starting Advantech Vega board firmware load...
Nov 17 16:04:07 vosflex.localdomain systemd[1]: Started Advantech Vega board firmware load.

● ahslog.service - Active Health Service Logger
   Loaded: loaded (/usr/lib/systemd/system/ahslog.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/ahslog.service.d
           └─05-cpusetconfig.conf, 05-exec-condition.conf, 05-resource-limit.conf
   Active: inactive (dead) since Mon 2025-11-17 16:05:06 UTC; 21h ago
  Process: 12340 ExecStart=/sbin/ahslog -f $OPTIONS (code=exited, status=0/SUCCESS)
  Process: 12336 ExecStartPre=/opt/omneon/sbin/cpusetconfig ahslog.service (code=exited, status=0/SUCCESS)
  Process: 8816 ExecStartPre=/bin/sleep 10 (code=exited, status=0/SUCCESS)
 Main PID: 12340 (code=exited, status=0/SUCCESS)
      CPU: 215ms

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Starting Active Health Service Logger...
Nov 17 16:05:04 vosflex.localdomain systemd[1]: Started Active Health Service Logger.
Nov 17 16:05:06 vosflex.localdomain systemd[1]: ahslog.service: Succeeded.
Nov 17 16:05:06 vosflex.localdomain systemd[1]: ahslog.service: Consumed 215ms CPU time

● amsd.service - Agentless Management Service daemon
   Loaded: loaded (/usr/lib/systemd/system/amsd.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/amsd.service.d
           └─05-cpusetconfig.conf, 05-exec-condition.conf, 05-resource-limit.conf
   Active: active (running) since Mon 2025-11-17 16:04:58 UTC; 21h ago
  Process: 8830 ExecStartPre=/opt/omneon/sbin/cpusetconfig amsd.service (code=exited, status=0/SUCCESS)
 Main PID: 8867 (amsd)
    Tasks: 1 (limit: 399998)
   Memory: 86.6M (high: 512.0M max: 1.0G)
      CPU: 1min 25.819s
   CGroup: /system.slice/amsd.service
           └─8867 /sbin/amsd -f

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Starting Agentless Management Service daemon...
Nov 17 16:04:58 vosflex.localdomain systemd[1]: Started Agentless Management Service daemon.
Nov 17 16:04:58 vosflex.localdomain amsd[8867]: amsd Started . .

● atd.service - Job spooling tools
   Loaded: loaded (/usr/lib/systemd/system/atd.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/atd.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8908 ExecStartPre=/opt/omneon/sbin/cpusetconfig atd.service (code=exited, status=0/SUCCESS)
 Main PID: 8918 (atd)
    Tasks: 1 (limit: 399998)
   Memory: 264.0K
   CGroup: /system.slice/atd.service
           └─8918 /usr/sbin/atd -f

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Starting Job spooling tools...
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8908]: + '[' -n atd.service ']'
Nov 17 16:04:54 vosflex.loUnit auto-cpufreq.service could not be found.
caldomain cpusetconfig[8908]: + mkdir -p /run/cpusetconfig/atd.service
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8908]: + '[' -f /run/nmiirq/allowedcpulist ']'
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8908]: + CPUS=/sys/fs/cgroup/cpuset/system.slice/atd.service/cpuset.cpus
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8908]: + cat /run/nmiirq/allowedcpulist
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8908]: + exit 0
Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started Job spooling tools.

● atop-rotate.service - Restart atop daemon to rotate logs
   Loaded: loaded (/usr/lib/systemd/system/atop-rotate.service; static; vendor preset: disabled)
   Active: inactive (dead) since Tue 2025-11-18 00:00:05 UTC; 13h ago
  Process: 439124 ExecStart=/usr/bin/systemctl try-restart atop.service (code=exited, status=0/SUCCESS)
 Main PID: 439124 (code=exited, status=0/SUCCESS)

Nov 18 00:00:05 XOSEncoder-01 systemd[1]: Starting Restart atop daemon to rotate logs...
Nov 18 00:00:05 XOSEncoder-01 systemd[1]: atop-rotate.service: Succeeded.
Nov 18 00:00:05 XOSEncoder-01 systemd[1]: Started Restart atop daemon to rotate logs.

● atop.service - Atop advanced performance monitor
   Loaded: loaded (/usr/lib/systemd/system/atop.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/atop.service.d
           └─05-cpusetconfig.conf, 50-atop.conf, 60-restart.conf
   Active: active (running) since Tue 2025-11-18 00:00:15 UTC; 13h ago
     Docs: man:atop(1)
  Process: 440177 ExecStartPost=/usr/bin/find ${LOGPATH} -name atop_* -mtime +${LOGGENERATIONS} -exec rm -v {} ; (code=exited, status=0/SUCCESS)
  Process: 440174 ExecStartPre=/bin/mkdir -p ${LOGPATH} (code=exited, status=0/SUCCESS)
  Process: 440170 ExecStartPre=/opt/omneon/sbin/cpusetconfig atop.service (code=exited, status=0/SUCCESS)
  Process: 440168 ExecStartPre=/bin/sh -c test -n "$LOGGENERATIONS" -a "$LOGGENERATIONS" -eq "$LOGGENERATIONS" (code=exited, status=0/SUCCESS)
  Process: 440166 ExecStartPre=/bin/sh -c test -n "$LOGINTERVAL" -a "$LOGINTERVAL" -eq "$LOGINTERVAL" (code=exited, status=0/SUCCESS)
 Main PID: 440176 (atop)
    Tasks: 1 (limit: 399998)
   Memory: 99.4M
   CGroup: /system.slice/atop.service
           └─440176 /usr/bin/atop -S -w /var/log/atop27/atop_20251118 600

Nov 18 00:00:15 XOSEncoder-01 systemd[1]: Starting Atop advanced performance monitor...
Nov 18 00:00:15 XOSEncoder-01 cpusetconfig[440170]: + '[' -n atop.service ']'
Nov 18 00:00:15 XOSEncoder-01 cpusetconfig[440170]: + mkdir -p /run/cpusetconfig/atop.service
Nov 18 00:00:15 XOSEncoder-01 cpusetconfig[440170]: + '[' -f /run/nmiirq/allowedcpulist ']'
Nov 18 00:00:15 XOSEncoder-01 cpusetconfig[440170]: + CPUS=/sys/fs/cgroup/cpuset/system.slice/atop.service/cpuset.cpus
Nov 18 00:00:15 XOSEncoder-01 cpusetconfig[440170]: + cat /run/nmiirq/allowedcpulist
Nov 18 00:00:15 XOSEncoder-01 cpusetconfig[440170]: + exit 0
Nov 18 00:00:15 XOSEncoder-01 systemd[1]: Started Atop advanced performance monitor.

● auditd.service - Security Auditing Service
   Loaded: loaded (/usr/lib/systemd/system/auditd.service; disabled; vendor preset: enabled)
   Active: inactive (dead)
     Docs: man:auditd(8)
           https://github.com/linux-audit/audit-documentation

● auth-rpcgss-module.service - Kernel Module supporting RPCSEC_GSS
   Loaded: loaded (/usr/lib/systemd/system/auth-rpcgss-module.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:50 UTC; 21h ago

● bios.service - System BIOS upgrade service
   Loaded: loaded (/etc/systemd/system/bios.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:06 UTC; 21h ago
 Main PID: 3734 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/bios.service

Nov 17 16:04:04 vosflex.localdomain fw[3849]: Discovering data...Done
Nov 17 16:04:04 vosflex.localdomain fw[3849]: Saving configuration...
Nov 17 16:04:05 vosflex.localdomain fw[3849]: Configuration saved to: /tmp/tmp.TBnepk4cCK
Nov 17 16:04:06 vosflex.localdomain fw[3852]: Info: BIOS parameter EmbeddedDiagnostics should be Enabled, but not undefined
Nov 17 16:04:06 vosflex.localdomain fw[3852]: Info: BIOS parameter IntelligentProvisioning should be Enabled, but not undefined
Nov 17 16:04:06 vosflex.localdomain fw[3852]: Info: BIOS parameter PciSlot7Aspm should be Disabled, but not undefined
Nov 17 16:04:06 vosflex.localdomain fw[3852]: Info: BIOS parameter PciSlot7Enable should be Auto, but not undefined
Nov 17 16:04:06 vosflex.localdomain fw[3852]: Info: BIOS parameter PciSlot7LinkSpeed should be Auto, but not undefined
Nov 17 16:04:06 vosflex.localdomain fw[3852]: Info: BIOS parameter PciSlot7OptionROM should be Enabled, but not undefined
Nov 17 16:04:06 vosflex.localdomain systemd[1]: Started System BIOS upgrade service.

● chrony-wait.service - Wait for chrony to synchronize system clock
   Loaded: loaded (/etc/systemd/system/chrony-wait.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8886 ExecStart=/opt/omneon/sbin/chrony-wait-ifneed.sh (code=exited, status=0/SUCCESS)
 Main PID: 8886 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/chrony-wait.service

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Starting Wait for chrony to synchronize system clock...
Nov 17 16:04:54 vosflex.localdomain chrony-wait-ifneed.sh[8886]: No NTP servers or PTP domains configured
Nov 17 16:04:54 vosflex.localdomain chrony-wait-ifneed.sh[8898]: Build time: 2025-10-25 13:47:32+00:00
Nov 17 16:04:54 vosflex.localdomain chrony-wait-ifneed.sh[8898]: Current time: 2025-11-17 16:04:54.789499+00:00
Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started Wait for chrony to synchronize system clock.

● chronyd.service
   Loaded: masked (Reason: Unit chronyd.service is masked.)
   Active: inactive (dead)

● cleanup_zookeeper_on_startup.service - Starting system services and getting ready to start VOS
   Loaded: loaded (/etc/systemd/system/cleanup_zookeeper_on_startup.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:06:30 UTC; 21h ago
  Process: 12446 ExecStart=/opt/omneon/sbin/cleanup_zookeeper_on_startup.sh --force=true (code=exited, status=0/SUCCESS)
 Main PID: 12446 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/cleanup_zookeeper_on_startup.service

Nov 17 16:06:05 vosflex.localdomain cleanup_zookeeper_on_startup.sh[27674]: Removing `minio/vos-home-cluster1/app_bundles/1.28.2.0.283/vos_appbundle_stream_processing-vosbundle-1.28.2.0.283/apps/rmp-spe.jar`.
Nov 17 16:06:05 vosflex.localdomain cleanup_zookeeper_on_startup.sh[27674]: Removing `minio/vos-home-cluster1/app_bundles/1.28.2.0.283/vos_appbundle_stream_processing-vosbundle-1.28.2.0.283/apps/vos-spi.jar`.
Nov 17 16:06:05 vosflex.localdomain cleanup_zookeeper_on_startup.sh[27674]: Removing `minio/vos-home-cluster1/app_bundles/1.28.2.0.283/vos_appbundle_stream_processing-vosbundle-1.28.2.0.283/bundle.json`.
Nov 17 16:06:30 vosflex.localdomain cleanup_zookeeper_on_startup.sh[29471]: deployment.apps/logstash scaled
Nov 17 16:06:30 vosflex.localdomain cleanup_zookeeper_on_startup.sh[29510]: deployment.apps/kibana scaled
Nov 17 16:06:30 vosflex.localdomain cleanup_zookeeper_on_startup.sh[29588]: deployment.apps/grafana scaled
Nov 17 16:06:30 vosflex.localdomain cleanup_zookeeper_on_startup.sh[29588]: deployment.apps/prometheus-operator scaled
Nov 17 16:06:30 vosflex.localdomain cleanup_zookeeper_on_startup.sh[29629]: prometheus.monitoring.coreos.com/k8s patched (no change)
Nov 17 16:06:30 vosflex.localdomain cleanup_zookeeper_on_startup.sh[29671]: deployment.apps/vosha scaled
Nov 17 16:06:30 vosflex.localdomain systemd[1]: Started Starting system services and getting ready to start VOS.

● configure-flavour-defaults.service - Checking default settings for NTP & DNS on the first boot
   Loaded: loaded (/etc/systemd/system/configure-flavour-defaults.service; enabled; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:04:07 UTC; 21h ago

● containerd.service - containerd container runtime
   Loaded: loaded (/etc/systemd/system/containerd.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/containerd.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:56 UTC; 21h ago
     Docs: https://containerd.io
  Process: 8824 ExecStartPre=/opt/omneon/sbin/cpusetconfig containerd.service (code=exited, status=0/SUCCESS)
  Process: 8757 ExecStartPre=/sbin/modprobe overlay (code=exited, status=0/SUCCESS)
 Main PID: 8866 (containerd)
    Tasks: 624
   Memory: 367.7M
   CGroup: /system.slice/containerd.service
           ├─ 8866 /usr/local/bin/containerd
           ├─13338 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad -address /run/containerd/containerd.sock
           ├─13339 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea -address /run/containerd/containerd.sock
           ├─13340 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id fe01a89fedb057cc98e77dfcaadada33e8d2afe86478b11f263d5f2e0a9cdf70 -address /run/containerd/containerd.sock
           ├─13342 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4 -address /run/containerd/containerd.sock
           ├─13343 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c -address /run/containerd/containerd.sock
           ├─13344 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c -address /run/containerd/containerd.sock
           ├─13345 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed -address /run/containerd/containerd.sock
           ├─13648 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e -address /run/containerd/containerd.sock
           ├─13836 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076 -address /run/containerd/containerd.sock
           ├─13974 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa -address /run/containerd/containerd.sock
           ├─14063 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2 -address /run/containerd/containerd.sock
           ├─14593 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8 -address /run/containerd/containerd.sock
           ├─15347 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27 -address /run/containerd/containerd.sock
           ├─15584 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49 -address /run/containerd/containerd.sock
           ├─16084 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c -address /run/containerd/containerd.sock
           ├─16614 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4 -address /run/containerd/containerd.sock
           ├─24364 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7 -address /run/containerd/containerd.sock
           ├─25399 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82 -address /run/containerd/containerd.sock
           ├─25721 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0 -address /run/containerd/containerd.sock
           ├─26954 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b -address /run/containerd/containerd.sock
           ├─30076 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d -address /run/containerd/containerd.sock
           ├─30238 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917 -address /run/containerd/containerd.sock
           ├─30634 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197 -address /run/containerd/containerd.sock
           ├─40260 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927 -address /run/containerd/containerd.sock
           ├─40264 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687 -address /run/containerd/containerd.sock
           ├─41057 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f -address /run/containerd/containerd.sock
           ├─42434 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb -address /run/containerd/containerd.sock
           ├─42443 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982 -address /run/containerd/containerd.sock
           ├─42451 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450 -address /run/containerd/containerd.sock
           ├─43094 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f -address /run/containerd/containerd.sock
           ├─44628 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840 -address /run/containerd/containerd.sock
           ├─44852 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213 -address /run/containerd/containerd.sock
           ├─45020 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf -address /run/containerd/containerd.sock
           ├─45663 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27 -address /run/containerd/containerd.sock
           ├─46559 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7 -address /run/containerd/containerd.sock
           ├─47074 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1 -address /run/containerd/containerd.sock
           ├─47989 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06 -address /run/containerd/containerd.sock
           ├─48450 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53 -address /run/containerd/containerd.sock
           └─49612 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85 -address /run/containerd/containerd.sock

Nov 17 16:09:12 vosflex.localdomain containerd[8866]: time="2025-11-17T16:09:12.976732658Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:simulcrypt-6c4688f66d-4nj5q,Uid:aff14187-181d-4c0a-9445-a2326a3bf487,Namespace:cluster1,Attempt:0,}"
Nov 17 16:09:12 vosflex.localdomain containerd[8866]: time="2025-11-17T16:09:12.986213150Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
Nov 17 16:09:12 vosflex.localdomain containerd[8866]: time="2025-11-17T16:09:12.986544566Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
Nov 17 16:09:12 vosflex.localdomain containerd[8866]: time="2025-11-17T16:09:12.986554211Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
Nov 17 16:09:12 vosflex.localdomain containerd[8866]: time="2025-11-17T16:09:12.986629633Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
Nov 17 16:09:13 vosflex.localdomain containerd[8866]: time="2025-11-17T16:09:13.063245947Z" level=info msg="RunPodSandbox for &PodSandboxMetadata{Name:simulcrypt-6c4688f66d-4nj5q,Uid:aff14187-181d-4c0a-9445-a2326a3bf487,Namespace:cluster1,Attempt:0,} returns sandbox id \"3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85\""
Nov 17 16:09:13 vosflex.localdomain containerd[8866]: time="2025-11-17T16:09:13.064697699Z" level=info msg="CreateContainer within sandbox \"3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85\" for container &ContainerMetadata{Name:simulcrypt,Attempt:0,}"
Nov 17 16:09:13 vosflex.localdomain containerd[8866]: time="2025-11-17T16:09:13.068788418Z" level=info msg="CreateContainer within sandbox \"3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85\" for &ContainerMetadata{Name:simulcrypt,Attempt:0,} returns container id \"7c297996517c2ebdd207bc733957e7ea43d63c8630dab617c28d6518db1f0282\""
Nov 17 16:09:13 vosflex.localdomain containerd[8866]: time="2025-11-17T16:09:13.069019063Z" level=info msg="StartContainer for \"7c297996517c2ebdd207bc733957e7ea43d63c8630dab617c28d6518db1f0282\""
Nov 17 16:09:13 vosflex.localdomain containerd[8866]: time="2025-11-17T16:09:13.096156649Z" level=info msg="StartContainer for \"7c297996517c2ebdd207bc733957e7ea43d63c8630dab617c28d6518db1f0282\" returns successfully"

● cpqFca.service - cpqFca MIB handler.
   Loaded: loaded (/usr/lib/systemd/system/cpqFca.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/cpqFca.service.d
           └─05-cpusetconfig.conf, 05-exec-condition.conf, 05-resource-limit.conf
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:04:54 UTC; 21h ago
           └─ ConditionPathExists=/sys/class/fc_host was not met

● cpqIde.service - cpqIde MIB handler.
   Loaded: loaded (/usr/lib/systemd/system/cpqIde.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/cpqIde.service.d
           └─05-cpusetconfig.conf, 05-exec-condition.conf, 05-resource-limit.conf
   Active: active (running) since Mon 2025-11-17 16:04:55 UTC; 21h ago
  Process: 8814 ExecStartPre=/opt/omneon/sbin/cpusetconfig cpqIde.service (code=exited, status=0/SUCCESS)
 Main PID: 8863 (cpqIde)
    Tasks: 1 (limit: 399998)
   Memory: 2.0M (high: 512.0M max: 1.0G)
      CPU: 1.035s
   CGroup: /system.slice/cpqIde.service
           └─8863 /sbin/cpqIde -f

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Starting cpqIde MIB handler....
Nov 17 16:04:55 vosflex.localdomain cpqIde[8863]: cpqIde Started . .
Nov 17 16:04:55 vosflex.localdomain systemd[1]: Started cpqIde MIB handler..

● cpqScsi.service - cpqScsi MIB handler.
   Loaded: loaded (/usr/lib/systemd/system/cpqScsi.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/cpqScsi.service.d
           └─05-cpusetconfig.conf, 05-exec-condition.conf, 05-resource-limit.conf
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:04:54 UTC; 21h ago
           └─ ConditionPathExists=/dev/mpt2ctl was not met

● cpupower.service - Configure CPU power related settings
   Loaded: loaded (/usr/lib/systemd/system/cpupower.service; disabled; vendor preset: disabled)
   Active: inactive (dead)

● cpuset-init.service - Initialize cpuset system slice
   Loaded: loaded (/etc/systemd/system/cpuset-init.service; enabled; vendor preset: disabled)
   Active: active (running) since Mon 2025-11-17 16:03:50 UTC; 21h ago
 Main PID: 2418 (sleep)
    Tasks: 1 (limit: 399998)
   Memory: 376.0K
   CGroup: /user.slice/cpuset-init.service
           └─2418 /bin/sleep infinity

Nov 17 16:03:53 vosflex.localdomain bash[2374]: 0-167
Nov 17 16:03:53 vosflex.localdomain bash[2406]: 0
Nov 17 16:03:53 vosflex.localdomain bash[2411]: 1

● crash-clean.service - Service to manage /var/crash directory size
   Loaded: loaded (/etc/systemd/system/crash-clean.service; enabled; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:04:08 UTC; 21h ago

● credentials-init.service - Initialize user accounts credentials
   Loaded: loaded (/etc/systemd/system/credentials-init.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:08 UTC; 21h ago
  Process: 4377 ExecStart=/opt/omneon/sbin/pam-update-passwd update (code=exited, status=0/SUCCESS)
  Process: 4171 ExecStart=/opt/omneon/sbin/pam-update-ssh-access update (code=exited, status=0/SUCCESS)
 Main PID: 4377 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/credentials-init.service

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Starting Initialize user accounts credentials...
Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started Initialize user accounts credentials.

● crond.service - Command Scheduler
   Loaded: loaded (/usr/lib/systemd/system/crond.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/crond.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8931 ExecStartPre=/opt/omneon/sbin/cpusetconfig crond.service (code=exited, status=0/SUCCESS)
 Main PID: 8944 (crond)
    Tasks: 1 (limit: 399998)
   Memory: 1.2M
   CGroup: /system.slice/crond.service
           └─8944 /usr/sbin/crond -n

Nov 18 04:01:01 XOSEncoder-01 CROND[393257]: (root) CMD (run-parts /etc/cron.hourly)
Nov 18 05:01:01 XOSEncoder-01 CROND[4557]: (root) CMD (run-parts /etc/cron.hourly)
Nov 18 06:01:01 XOSEncoder-01 CROND[126507]: (root) CMD (run-parts /etc/cron.hourly)
Nov 18 07:01:01 XOSEncoder-01 CROND[236008]: (root) CMD (run-parts /etc/cron.hourly)
Nov 18 08:01:01 XOSEncoder-01 CROND[345399]: (root) CMD (run-parts /etc/cron.hourly)
Nov 18 09:01:01 XOSEncoder-01 CROND[454917]: (root) CMD (run-parts /etc/cron.hourly)
Nov 18 10:01:01 XOSEncoder-01 CROND[78507]: (root) CMD (run-parts /etc/cron.hourly)
Nov 18 11:01:01 XOSEncoder-01 CROND[188340]: (root) CMD (run-parts /etc/cron.hourly)
Nov 18 12:01:01 XOSEncoder-01 CROND[297768]: (root) CMD (run-parts /etc/cron.hourly)
Nov 18 13:01:01 XOSEncoder-01 CROND[407232]: (root) CMD (run-parts /etc/cron.hourly)

● dbus.service - D-Bus System Message Bus
   Loaded: loaded (/usr/lib/systemd/system/dbus.service; static; vendor preset: disabled)
  Drop-In: /etc/systemd/system/dbus.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:08 UTC; 21h ago
     Docs: man:dbus-daemon(1)
 Main PID: 4249 (dbus-daemon)
    Tasks: 1 (limit: 399998)
   Memory: 1.8M
   CGroup: /system.slice/dbus.service
           └─4249 /usr/bin/dbus-daemon --system --address=systemd: --nofork --nopidfile --systemd-activation --syslog-only

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started D-Bus System Message Bus.
Nov 17 16:04:08 vosflex.localdomain dbus-daemon[4249]: [system] Successfully activated service 'org.freedesktop.systemd1'
Nov 17 16:04:08 vosflex.localdomain dbus-daemon[4249]: [system] Activating via systemd: service name='org.freedesktop.hostname1' unit='dbus-org.freedesktop.hostname1.service' requested by ':1.10' (uid=0 pid=4574 comm="/usr/sbin/NetworkManager --no-daemon " label="kernel")
Nov 17 16:04:09 vosflex.localdomain dbus-daemon[4249]: [system] Successfully activated service 'org.freedesktop.hostname1'
Nov 17 16:04:09 vosflex.localdomain dbus-daemon[4249]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.10' (uid=0 pid=4574 comm="/usr/sbin/NetworkManager --no-daemon " label="kernel")
Nov 17 16:04:09 vosflex.localdomain dbus-daemon[4249]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher'
Nov 17 16:04:55 vosflex.localdomain dbus-daemon[4249]: [system] Activating via systemd: service name='org.freedesktop.PolicyKit1' unit='polkit.service' requested by ':1.15' (uid=0 pid=8832 comm="/usr/libexec/platform-python -Es /usr/sbin/tuned -" label="kernel")
Nov 17 16:04:55 vosflex.localdomain dbus-daemon[4249]: [system] Successfully activated service 'org.freedesktop.PolicyKit1'
Nov 17 16:06:38 vosflex.localdomain dbus-daemon[4249]: [system] Activating via systemd: service name='org.freedesktop.hostname1' unit='dbus-org.freedesktop.hostname1.service' requested by ':1.19' (uid=0 pid=31302 comm="systemd-analyze plot " label="kernel")
Nov 17 16:06:38 vosflex.localdomain dbus-daemon[4249]: [system] Successfully activated service 'org.freedesktop.hostname1'

● dcstatus.service - Deltacast port status monitor service
   Loaded: loaded (/etc/systemd/system/dcstatus.service; enabled; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:04:08 UTC; 21h ago

● dddvb-kmod.service - Digital Devices DVB board dddvb driver load
   Loaded: loaded (/etc/systemd/system/dddvb-kmod.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:07 UTC; 21h ago
 Main PID: 3898 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/dddvb-kmod.service

Nov 17 16:04:06 vosflex.localdomain systemd[1]: Starting Digital Devices DVB board dddvb driver load...
Nov 17 16:04:06 vosflex.localdomain dddvb-kmod[3898]: + /sbin/modprobe ddbridge fmode=0 no_voltage=1 dma_buf_num=32 dma_buf_size=43
Nov 17 16:04:06 vosflex.localdomain dddvb-kmod[3898]: + set +x
Nov 17 16:04:07 vosflex.localdomain systemd[1]: Started Digital Devices DVB board dddvb driver load.

● dddvbfw.service - dddvb firmware upgrade service
   Loaded: loaded (/etc/systemd/system/dddvbfw.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:07 UTC; 21h ago
 Main PID: 4053 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/dddvbfw.service

Nov 17 16:04:07 vosflex.localdomain systemd[1]: Starting dddvb firmware upgrade service...
Nov 17 16:04:07 vosflex.localdomain systemd[1]: Started dddvb firmware upgrade service.

● dektecfw.service - dektec firmware upgrade service
   Loaded: loaded (/etc/systemd/system/dektecfw.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:07 UTC; 21h ago
 Main PID: 4073 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/dektecfw.service

Nov 17 16:04:07 vosflex.localdomain systemd[1]: Starting dektec firmware upgrade service...
Nov 17 16:04:07 vosflex.localdomain systemd[1]: Started dektec firmware upgrade service.

● delete_pods_on_startup.service - Cleaning up K8s on start
   Loaded: loaded (/etc/systemd/system/delete_pods_on_startup.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:05:04 UTC; 21h ago
  Process: 10313 ExecStart=/opt/omneon/sbin/delete_pods_on_startup.sh --force=true (code=exited, status=0/SUCCESS)
 Main PID: 10313 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/delete_pods_on_startup.service

Nov 17 16:04:59 vosflex.localdomain delete_pods_on_startup.sh[12199]: Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
Nov 17 16:04:59 vosflex.localdomain delete_pods_on_startup.sh[12199]: pod "ingress-nginx-controller-dqw4j" force deleted
Nov 17 16:04:59 vosflex.localdomain delete_pods_on_startup.sh[12242]: Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
Nov 17 16:04:59 vosflex.localdomain delete_pods_on_startup.sh[12242]: pod "coredns-86d9c5c9c6-h2flf" force deleted
Nov 17 16:04:59 vosflex.localdomain delete_pods_on_startup.sh[12242]: pod "dashboard-metrics-scraper-8cb7b999d-67h5t" force deleted
Nov 17 16:04:59 vosflex.localdomain delete_pods_on_startup.sh[12242]: pod "kubernetes-dashboard-7d4d5b5d8c-ggrst" force deleted
Nov 17 16:04:59 vosflex.localdomain delete_pods_on_startup.sh[12284]: Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
Nov 17 16:04:59 vosflex.localdomain delete_pods_on_startup.sh[12284]: pod "dtapi-serviced-8bpdn" force deleted
Nov 17 16:05:04 vosflex.localdomain delete_pods_on_startup.sh[12381]: node/vosflex uncordoned
Nov 17 16:05:04 vosflex.localdomain systemd[1]: Started Cleaning up K8s on start.

● deltacast-kmod.service - Deltacast board firmware load
   Loaded: loaded (/etc/systemd/system/deltacast-kmod.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:06 UTC; 21h ago
 Main PID: 3861 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/deltacast-kmod.service

Nov 17 16:04:06 vosflex.localdomain systemd[1]: Starting Deltacast board firmware load...
Nov 17 16:04:06 vosflex.localdomain systemd[1]: Started Deltacast board firmware load.

● dentry_cache_cleaner.service - Clean dentry cache
   Loaded: loaded (/etc/systemd/system/dentry_cache_cleaner.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/dentry_cache_cleaner.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:08 UTC; 21h ago
 Main PID: 4238 (dentry_cache_cl)
    Tasks: 2 (limit: 399998)
   Memory: 692.0K
   CGroup: /system.slice/dentry_cache_cleaner.service
           ├─ 4238 /bin/bash /usr/local/bin/dentry_cache_cleaner.sh
           └─10078 sleep 60

Nov 18 13:52:39 XOSEncoder-01 dentry_cache_cleaner.sh[4238]: 2025-11-18--13:52:39 Dentry cache is dropped.
Nov 18 13:53:39 XOSEncoder-01 dentry_cache_cleaner.sh[4238]: 2025-11-18--13:53:39 Unused percentage of dentries 64% exceeds limit 60%.
Nov 18 13:53:39 XOSEncoder-01 dentry_cache_cleaner.sh[4238]: 2025-11-18--13:53:39 Start dropping dentry cache.
Nov 18 13:53:39 XOSEncoder-01 dentry_cache_cleaner.sh[4238]: 2025-11-18--13:53:39 Dentry cache is dropped.
Nov 18 13:54:39 XOSEncoder-01 dentry_cache_cleaner.sh[4238]: 2025-11-18--13:54:39 Unused percentage of dentries 64% exceeds limit 60%.
Nov 18 13:54:39 XOSEncoder-01 dentry_cache_cleaner.sh[4238]: 2025-11-18--13:54:39 Start dropping dentry cache.
Nov 18 13:54:40 XOSEncoder-01 dentry_cache_cleaner.sh[4238]: 2025-11-18--13:54:40 Dentry cache is dropped.
Nov 18 13:55:40 XOSEncoder-01 dentry_cache_cleaner.sh[4238]: 2025-11-18--13:55:40 Unused percentage of dentries 74% exceeds limit 60%.
Nov 18 13:55:40 XOSEncoder-01 dentry_cache_cleaner.sh[4238]: 2025-11-18--13:55:40 Start dropping dentry cache.
Nov 18 13:55:40 XOSEncoder-01 dentry_cache_cleaner.sh[4238]: 2025-11-18--13:55:40 Dentry cache is dropped.

● disable_fallback_sensors.service - Disable HPE iLO Fallback sensors
   Loaded: loaded (/etc/systemd/system/disable_fallback_sensors.service; enabled; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:04:12 UTC; 21h ago
  Process: 6503 ExecStaUnit display-manager.service could not be found.
rt=/opt/omneon/sbin/disable_fallback_sensors.sh start (code=exited, status=0/SUCCESS)
 Main PID: 6503 (code=exited, status=0/SUCCESS)

Nov 17 16:04:12 vosflex.localdomain systemd[1]: Starting Disable HPE iLO Fallback sensors...
Nov 17 16:04:12 vosflex.localdomain disable_fallback_sensors[6503]: SUPPORTED_PRODUCT_CONFIG_LIST=ProLiant_DL320_Gen11:PCI
Nov 17 16:04:12 vosflex.localdomain disable_fallback_sensors[6503]: System Manufacturer: HPE
Nov 17 16:04:12 vosflex.localdomain disable_fallback_sensors[6503]: System Product Name: ProLiant DL325 Gen11
Nov 17 16:04:12 vosflex.localdomain disable_fallback_sensors[6503]: No configuration found for this product: ProLiant DL325 Gen11
Nov 17 16:04:12 vosflex.localdomain disable_fallback_sensors[6503]: Exiting...
Nov 17 16:04:12 vosflex.localdomain systemd[1]: disable_fallback_sensors.service: Succeeded.
Nov 17 16:04:12 vosflex.localdomain systemd[1]: Started Disable HPE iLO Fallback sensors.

● dm-event.service - Device-mapper event daemon
   Loaded: loaded (/usr/lib/systemd/system/dm-event.service; static; vendor preset: enabled)
   Active: inactive (dead)
     Docs: man:dmeventd(8)

● docker-crashlogs.service - Service to collect logs from stopper containers on start
   Loaded: loaded (/etc/systemd/system/docker-crashlogs.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:58 UTC; 21h ago
  Process: 9812 ExecStart=/opt/omneon/sbin/docker-crash-techdump.sh (code=exited, status=0/SUCCESS)
 Main PID: 9812 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/docker-crashlogs.service

Nov 17 16:04:56 vosflex.localdomain systemd[1]: Starting Service to collect logs from stopper containers on start...
Nov 17 16:04:57 vosflex.localdomain docker-crashlogs[11417]: tar: Removing leading `/' from member names
Nov 17 16:04:58 vosflex.localdomain systemd[1]: Started Service to collect logs from stopper containers on start.

● docker-overlay.service - Configuring container storage
   Loaded: loaded (/etc/systemd/system/docker-overlay.service; enabled; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:04:12 UTC; 21h ago
  Process: 4356 ExecStart=/opt/omneon/sbin/create-docker-overlayfs-disk.sh (code=exited, status=0/SUCCESS)
 Main PID: 4356 (code=exited, status=0/SUCCESS)

Nov 17 16:04:12 vosflex.localdomain create-docker-overlayfs-disk.sh[6471]: data     =                       bsize=4096   blocks=26738688, imaxpct=25
Nov 17 16:04:12 vosflex.localdomain create-docker-overlayfs-disk.sh[6471]:          =                       sunit=0      swidth=0 blks
Nov 17 16:04:12 vosflex.localdomain create-docker-overlayfs-disk.sh[6471]: naming   =version 2              bsize=4096   ascii-ci=0, ftype=1
Nov 17 16:04:12 vosflex.localdomain create-docker-overlayfs-disk.sh[6471]: log      =internal log           bsize=4096   blocks=4096, version=2
Nov 17 16:04:12 vosflex.localdomain create-docker-overlayfs-disk.sh[6471]:          =                       sectsz=512   sunit=0 blks, lazy-count=1
Nov 17 16:04:12 vosflex.localdomain create-docker-overlayfs-disk.sh[6471]: realtime =none                   extsz=4096   blocks=0, rtextents=0
Nov 17 16:04:12 vosflex.localdomain create-docker-overlayfs-disk.sh[4356]: + fstrim /var/lib/containerd
Nov 17 16:04:12 vosflex.localdomain create-docker-overlayfs-disk.sh[4356]: + exit 0
Nov 17 16:04:12 vosflex.localdomain systemd[1]: docker-overlay.service: Succeeded.
Nov 17 16:04:12 vosflex.localdomain systemd[1]: Started Configuring container storage.

● dracut-cmdline.service - dracut cmdline hook
   Loaded: loaded (/usr/lib/systemd/system/../../dracut/modules.d/98dracut-systemd/dracut-cmdline.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:48 UTC; 21h ago
     Docs: man:dracut-cmdline.service(8)

● dracut-initqueue.service - dracut initqueue hook
   Loaded: loaded (/usr/lib/systemd/system/../../dracut/modules.d/98dracut-systemd/dracut-initqueue.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:48 UTC; 21h ago
     Docs: man:dracut-initqueue.service(8)

● dracut-mount.service - dracut mount hook
   Loaded: loaded (/usr/lib/systemd/system/../../dracut/modules.d/98dracut-systemd/dracut-mount.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:49 UTC; 21h ago
     Docs: man:dracut-mount.service(8)

● dracut-pre-mount.service - dracut pre-mount hook
   Loaded: loaded (/usr/lib/systemd/system/../../dracut/modules.d/98dracut-systemd/dracut-pre-mount.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:48 UTC; 21h ago
     Docs: man:dracut-pre-mount.service(8)

● dracut-pre-pivot.service - dracut pre-pivot and cleanup hook
   Loaded: loaded (/usr/lib/systemd/system/../../dracut/modules.d/98dracut-systemd/dracut-pre-pivot.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:49 UTC; 21h ago
     Docs: man:dracut-pre-pivot.service(8)

● dracut-pre-trigger.service - dracut pre-trigger hook
   Loaded: loaded (/usr/lib/systemd/system/../../dracut/modules.d/98dracut-systemd/dracut-pre-trigger.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:48 UTC; 21h ago
     Docs: man:dracut-pre-trigger.service(8)

● dracut-pre-udev.service - dracut pre-udev hook
   Loaded: loaded (/usr/lib/systemd/system/../../dracut/modules.d/98dracut-systemd/dracut-pre-udev.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:48 UTC; 21h ago
     Docs: man:dracut-pre-udev.service(8)

● dracut-shutdown-onfailure.service - Service executing upon dracut-shutdown failure to perform cleanup
   Loaded: loaded (/usr/lib/systemd/system/../../dracut/modules.d/98dracut-systemd/dracut-shutdown-onfailure.service; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:dracut-shutdown.service(8)

● dracut-shutdown.service - Restore /run/initramfs on shutdown
   Loaded: loaded (/usr/lib/systemd/system/../../dracut/modules.d/98dracut-systemd/dracut-shutdown.service; static; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:08 UTC; 21h ago
     Docs: man:dracut-shutdown.service(8)
 Main PID: 4205 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/dracut-shutdown.service

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Starting Restore /run/initramfs on shutdown...
Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started Restore /run/initramfs on shutdown.

● dtstatus.service - DekTec port status monitor service
   Loaded: loaded (/etc/systemd/system/dtstatus.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/dtstatus.service.d
           └─05-cpusetconfig.conf
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:04:07 UTC; 21h ago

● earlyoom.service - Early OOM Daemon
   Loaded: loaded (/etc/systemd/system/earlyoom.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/earlyoom.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:08 UTC; 21h ago
     Docs: man:earlyoom(1)
           https://github.com/rfjakob/earlyoom
  Process: 4198 ExecStartPre=/opt/omneon/sbin/cpusetconfig earlyoom.service (code=exited, status=0/SUCCESS)
 Main PID: 4378 (earlyoom)
    Tasks: 1 (limit: 50)
   Memory: 200.0K (limit: 500.0M)
   CGroup: /system.slice/earlyoom.service
           └─4378 /opt/omneon/bin/earlyoom -i -m 4 -r 30

Nov 18 13:51:11 XOSEncoder-01 earlyoom[4378]: mem avail: 164282 of 192496 MiB (85.34%), swap free:    0 of    0 MiB ( 0.00%)
Nov 18 13:51:41 XOSEncoder-01 earlyoom[4378]: mem avail: 164293 of 192496 MiB (85.35%), swap free:    0 of    0 MiB ( 0.00%)
Nov 18 13:52:11 XOSEncoder-01 earlyoom[4378]: mem avail: 164292 of 192496 MiB (85.35%), swap free:    0 of    0 MiB ( 0.00%)
Nov 18 13:52:41 XOSEncoder-01 earlyoom[4378]: mem avail: 164283 of 192496 MiB (85.34%), swap free:    0 of    0 MiB ( 0.00%)
Nov 18 13:53:11 XOSEncoder-01 earlyoom[4378]: mem avail: 164288 of 192496 MiB (85.35%), swap free:    0 of    0 MiB ( 0.00%)
Nov 18 13:53:41 XOSEncoder-01 earlyoom[4378]: mem avail: 164329 of 192496 MiB (85.37%), swap free:    0 of    0 MiB ( 0.00%)
Nov 18 13:54:11 XOSEncoder-01 earlyoom[4378]: mem avail: 164270 of 192496 MiB (85.34%), swap free:    0 of    0 MiB ( 0.00%)
Nov 18 13:54:41 XOSEncoder-01 earlyoom[4378]: mem avail: 164305 of 192496 MiB (85.35%), swap free:    0 of    0 MiB ( 0.00%)
Nov 18 13:55:11 XOSEncoder-01 earlyoom[4378]: mem avail: 164226 of 192496 MiB (85.31%), swap free:    0 of    0 MiB ( 0.00%)
Nov 18 13:55:41 XOSEncoder-01 earlyoom[4378]: mem avail: 164269 of 192496 MiB (85.34%), swap free:    0 of    0 MiB ( 0.00%)

● emergency.service - Emergency Shell
   Loaded: loaded (/usr/lib/systemd/system/emergency.service; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:sulogin(8)

● etcd.service - etcd
   Loaded: loaded (/etc/systemd/system/etcd.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/etcd.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:09 UTC; 21h ago
     Docs: https://github.com/coreos
  Process: 4354 ExecStartPre=/opt/omneon/sbin/cpusetconfig etcd.service (code=exited, status=0/SUCCESS)
 Main PID: 4383 (etcd)
    Tasks: 132 (limit: 399998)
   Memory: 305.4M
   CGroup: /system.slice/etcd.service
           └─4383 /usr/local/bin/etcd --name etcd --cert-file=/etc/etcd/kubernetes.pem --key-file=/etc/etcd/kubernetes-key.pem --trusted-ca-file=/etc/etcd/ca.pem --client-cert-auth --listen-client-urls https://127.0.0.1:2379 --advertise-client-urls https://127.0.0.1:2379 --initial-cluster-state new --auto-compaction-mode=periodic --auto-compaction-retention=100m --data-dir=/var/lib/etcd --logger=zap

Nov 18 13:44:56 XOSEncoder-01 etcd[4383]: {"level":"info","ts":"2025-11-18T13:44:56.617741Z","caller":"mvcc/kvstore_compaction.go:68","msg":"finished scheduled compaction","compact-revision":78542,"took":"892.085µs","hash":2069342923,"current-db-size-bytes":19894272,"current-db-size":"20 MB","current-db-size-in-use-bytes":3620864,"current-db-size-in-use":"3.6 MB"}
Nov 18 13:44:56 XOSEncoder-01 etcd[4383]: {"level":"info","ts":"2025-11-18T13:44:56.617756Z","caller":"mvcc/hash.go:137","msg":"storing new hash","hash":2069342923,"revision":78542,"compact-revision":78261}
Nov 18 13:46:08 XOSEncoder-01 etcd[4383]: {"level":"info","ts":"2025-11-18T13:46:08.663917Z","caller":"v3compactor/periodic.go:134","msg":"starting auto periodic compaction","revision":73166,"compact-period":"1h40m0s"}
Nov 18 13:46:08 XOSEncoder-01 etcd[4383]: {"level":"info","ts":"2025-11-18T13:46:08.664116Z","caller":"v3compactor/periodic.go:142","msg":"completed auto periodic compaction","revision":73166,"compact-period":"1h40m0s","took":"154.432µs"}
Nov 18 13:49:56 XOSEncoder-01 etcd[4383]: {"level":"info","ts":"2025-11-18T13:49:56.618179Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":78823}
Nov 18 13:49:56 XOSEncoder-01 etcd[4383]: {"level":"info","ts":"2025-11-18T13:49:56.619464Z","caller":"mvcc/kvstore_compaction.go:68","msg":"finished scheduled compaction","compact-revision":78823,"took":"980.867µs","hash":2298692740,"current-db-size-bytes":19894272,"current-db-size":"20 MB","current-db-size-in-use-bytes":3616768,"current-db-size-in-use":"3.6 MB"}
Nov 18 13:49:56 XOSEncoder-01 etcd[4383]: {"level":"info","ts":"2025-11-18T13:49:56.61948Z","caller":"mvcc/hash.go:137","msg":"storing new hash","hash":2298692740,"revision":78823,"compact-revision":78542}
Nov 18 13:54:56 XOSEncoder-01 etcd[4383]: {"level":"info","ts":"2025-11-18T13:54:56.620184Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":79103}
Nov 18 13:54:56 XOSEncoder-01 etcd[4383]: {"level":"info","ts":"2025-11-18T13:54:56.621382Z","caller":"mvcc/kvstore_compaction.go:68","msg":"finished scheduled compaction","compact-revision":79103,"took":"934.478µs","hash":530768048,"current-db-size-bytes":19894272,"current-db-size":"20 MB","current-db-size-in-use-bytes":3620864,"current-db-size-in-use":"3.6 MB"}
Nov 18 13:54:56 XOSEncoder-01 etcd[4383]: {"level":"info","ts":"2025-11-18T13:54:56.6214Z","caller":"mvcc/hash.go:137","msg":"storing new hash","hash":530768048,"revision":79103,"compact-revision":78823}

● fingerprint.service - Determining server model and part number
   Loaded: loaded (/etc/systemd/system/fingerprint.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:08 UTC; 21h ago
 Main PID: 4185 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/fingerprint.service

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Starting Determining server model and part number...
Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started Determining server model and part number.

● firewall_security_profile.service - Write firewall security.profile config on first boot (installation)
   Loaded: loaded (/etc/systemd/system/firewall_security_profile.service; enabled; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:04:08 UTC; 21h ago

● first-boot-cleanup.service - Cleanup first boot flag files
   Loaded: loaded (/etc/systemd/system/first-boot-cleanup.service; enabled; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:04:08 UTC; 21h ago
           └─ ConditionPathExistsGlob=/etc/.vosflex-first-boot-* was not met

● getty@tty1.service - Getty on tty1
   Loaded: loaded (/usr/lib/systemd/system/getty@.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/getty@tty1.service.d
           └─10-later.conf
   Active: active (running) since Mon 2025-11-17 16:06:31 UTC; 21h ago
     Docs: man:agetty(8)
           man:systemd-getty-generator(8)
           http://0pointer.de/blog/projects/serial-console.html
 Main PID: 30411 (agetty)
    Tasks: 1 (limit: 399998)
   Memory: 228.0K
   CGroup: /system.slice/system-getty.slice/getty@tty1.service
           └─30411 /sbin/agetty -o -p -- \u --noclear tty1 linux

Nov 17 16:06:31 vosflex.localdomain systemd[1]: Started Getty on tty1.

● getty@tty3.service - Getty on tty3
   Loaded: loaded (/etc/systemd/system/getty@tty3.service; enabled; vendor preset: disabled)
   Active: active (running) since Mon 2025-11-17 16:04:07 UTC; 21h ago
     Docs: man:agetty(8)
           man:systemd-getty-generator(8)
           http://0pointer.de/blog/projects/serial-console.html
 Main PID: 4103 (agetty)
    Tasks: 1 (limit: 399998)
   Memory: 148.0K
   CGroup: /system.slice/system-getty.slice/getty@tty3.service
           └─4103 /sbin/agetty -o -p -- \u --issue-file /etc/issue.early --noclear tty3 linux

Nov 17 16:04:07 vosflex.localdomain systemd[1]: Started Getty on tty3.

● getty@tty4.service - Getty on tty4
   Loaded: loaded (/etc/systemd/system/getty@tty4.service; enabled; vendor preset: disabled)
   Active: active (running) since Mon 2025-11-17 16:04:07 UTC; 21h ago
     Docs: man:agetty(8)
           man:systemd-getty-generator(8)
           http://0pointer.de/blog/projects/serial-console.html
 Main PID: 4117 (agetty)
    Tasks: 1 (limit: 399998)
   Memory: 176.0K
   CGroup: /system.slice/system-getty.slice/getty@tty4.service
           └─4117 /sbin/agetty -o -p -- \u --issue-file /etc/issue.early --noclear tty4 linux

Nov 17 16:04:07 vosflex.localdomain systemd[1]: Started Getty on tty4.

● grub-default-part.service - Set default boot partition
   Loaded: loaded (/etc/systemd/system/grub-default-part.service; enabled; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:03:53 UTC; 21h ago
 Main PID: 3564 (code=exited, status=0/SUCCESS)

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Starting Set default boot partition...
Nov 17 16:03:53 vosflex.localdomain systemd[1]: grub-default-part.service: Succeeded.
Nov 17 16:03:53 vosflex.localdomain systemd[1]: Started Set default boot partition.

● gssproxy.service - GSSAPI Proxy Daemon
   Loaded: loaded (/usr/lib/systemd/system/gssproxy.service; disabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/gssproxy.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8835 ExecStart=/usr/sbin/gssproxy -D (code=exited, status=0/SUCCESS)
  Process: 8770 ExecStartPre=/opt/omneon/sbin/cpusetconfig gssproxy.service (code=exited, status=0/SUCCESS)
 Main PID: 8899 (gssproxy)
    Tasks: 6 (limit: 399998)
   Memory: 1.5M
   CGroup: /system.slice/gssproxy.service
           └─8899 /usr/sbin/gssproxy -D

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Starting GSSAPI Proxy Daemon...
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8770]: + '[' -n gssproxy.service ']'
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8770]: + mkdir -p /run/cpusetconfig/gssproxy.service
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8770]: + '[' -f /run/nmiirq/allowedcpulist ']'
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8770]: + CPUS=/sys/fs/cgroup/cpuset/system.slice/gssproxy.service/cpuset.cpus
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8770]: + cat /run/nmiirq/allowedcpulist
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8770]: + exit 0
Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started GSSAPI Proxy Daemon.

● hardlockup_panic.service - Enable kernel panic on hard lockup
   Loaded: loaded (/etc/systemd/system/hardlockup_panic.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:08 UTC; 21h ago
 Main PID: 4247 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/hardlockup_panic.service

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Starting Enable kernel panic on hard lockup...
Nov 17 16:04:08 vosflex.localdomain sysctl[4247]: kernel.hardlockup_panic = 1
Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started Enable kernel panic on hard lockup.

● hardware-monitor.service - Hardware Monitor
   Loaded: loaded (/etc/systemd/system/hardware-monitor.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:08 UTC; 21h ago
  Process: 4130 ExecStart=/opt/omneon/sbin/hardware_monitor.py (code=exited, status=0/SUCCESS)
 Main PID: 4130 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/hardware-monitor.service

Nov 17 16:04:08 vosflex.localdomain hardware_monitor.py[4130]: 2025-11-17 16:04:08,910 INFO    : /0/100/3.1/0/2.1.0 disk ATA Disk Toshiba MK000480GXNXB HPG0 241247BD3772 2.1.0 None 480103981056 None 8:16
Nov 17 16:04:08 vosflex.localdomain hardware_monitor.py[4130]: 2025-11-17 16:04:08,910 INFO    : /0/100/3.1/0/3.6e.0 disk SCSI Disk HPE MR416i-p Gen11 5.26 0037aac6d69bc7412f40d6661eb26200 3.6e.0 None 4798283776000 None 8:16
Nov 17 16:04:08 vosflex.localdomain hardware_monitor.py[4130]: 2025-11-17 16:04:08,910 INFO    : ---
Nov 17 16:04:08 vosflex.localdomain hardware_monitor.py[4130]: 2025-11-17 16:04:08,910 INFO    : /0/100/3.1/0/3.6e.0 disk SCSI Disk HPE MR416i-p Gen11 5.26 0037aac6d69bc7412f40d6661eb26200 3.6e.0 None 4798283776000 None 8:0
Nov 17 16:04:08 vosflex.localdomain hardware_monitor.py[4130]: 2025-11-17 16:04:08,911 INFO    : ----------------------------------------------------------------------------------------------------------------------------------
Nov 17 16:04:08 vosflex.localdomain hardware_monitor.py[4130]: 2025-11-17 16:04:08,911 INFO    :                                                                 USB
Nov 17 16:04:08 vosflex.localdomain hardware_monitor.py[4130]: 2025-11-17 16:04:08,911 INFO    : ----------------------------------------------------------------------------------------------------------------------------------
Nov 17 16:04:08 vosflex.localdomain hardware_monitor.py[4130]: 2025-11-17 16:04:08,911 INFO    : No changes has been found
Nov 17 16:04:08 vosflex.localdomain hardware_monitor.py[4130]: 2025-11-17 16:04:08,911 WARNING : Inventories have differences!
Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started Hardware Monitor.

● hw_check_state.service - Basic HW verification
   Loaded: loaded (/etc/systemd/system/hw_check_state.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8868 ExecStart=/opt/omneon/sbin/hw_check_state.sh (code=exited, status=0/SUCCESS)
 Main PID: 8868 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/hw_check_state.service

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started Basic HW verification.

● iloauth.service - HPE iLO authentication service
   Loaded: loaded (/etc/systemd/system/iloauth.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/iloauth.service.d
           └─50-iloauth.conf
   Active: active (exited) since Mon 2025-11-17 16:04:12 UTC; 21h ago
  Process: 3868 ExecStart=/opt/omneon/nmi/nmipollers/bin/ilo_no_local_auth.sh (code=exited, status=0/SUCCESS)
 Main PID: 3868 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/iloauth.service

Nov 17 16:04:06 vosflex.localdomain systemd[1]: Starting HPE iLO authentication service...
Nov 17 16:04:12 vosflex.localdomain systemd[1]: Started HPE iLO authentication service.

● import-state.service - Import network configuration from initramfs
   Loaded: loaded (/usr/lib/systemd/system/import-state.service; enabled; vendor preset: enabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:51 UTC; 21h ago

● init-dummy-net-interface.service - Create dummy network interface
   Loaded: loaded (/etc/systemd/system/init-dummy-net-interface.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:08 UTC; 21h ago
 Main PID: 4129 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/init-dummy-net-interface.service

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Starting Create dummy network interface...
Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started Create dummy network interface.

● initrd-cleanup.service - Cleaning Up and Shutting Down Daemons
   Loaded: loaded (/usr/lib/systemd/system/initrd-cleanup.service; static; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:03:49 UTC; 21h ago
 Main PID: 2313 (code=exited, status=0/SUCCESS)

Nov 17 16:03:49 localhost systemd[1]: Starting Cleaning Up and Shutting Down Daemons...
Nov 17 16:03:49 localhost systemd[1]: initrd-cleanup.service: Succeeded.
Nov 17 16:03:49 localhost systemd[1]: Started Cleaning Up and Shutting Down Daemons.

● initrd-parse-etc.service - Reload Configuration from the Real Root
   Loaded: loaded (/usr/lib/systemd/system/initrd-parse-etc.service; static; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:03:49 UTC; 21h ago
 Main PID: 2311 (code=exited, status=0/SUCCESS)

Nov 17 16:03:49 localhost systemd[1]: Starting Reload Configuration from the Real Root...
Nov 17 16:03:49 localhost systemd[1]: initrd-parse-etc.service: Succeeded.
Nov 17 16:03:49 localhost systemd[1]: Started Reload Configuration from the Real Root.

● initrd-switch-root.service - Switch Root
   Loaded: loaded (/usr/lib/systemd/system/initrd-switch-root.service; static; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:03:50 UTC; 21h ago
 Main PID: 2338 (code=exited, status=0/SUCCESS)

Nov 17 16:03:49 localhost systemd[1]: Starting Switch Root...

● initrd-udevadm-cleanup-db.service - Cleanup udevd DB
   Loaded: loaded (/usr/lib/systemd/system/initrd-udevadm-cleanup-db.service; static; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:03:49 UTC; 21h ago
 Main PID: 2320 (code=exited, status=0/SUCCESS)

Nov 17 16:03:49 localhost systemd[1]: Starting Cleanup udevd DB...
Nov 17 16:03:49 localhost systemd[1]: initrd-udevadm-cleanup-db.service: Succeeded.
Nov 17 16:03:49 localhost systemd[1]: Started Cleanup udevd DB.

● Intel-nvm-fw.service - Intel NVM firmware upgrade service
   Loaded: loaded (/etc/systemd/system/Intel-nvm-fw.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:06 UTC; 21h ago
 Main PID: 3871 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/Intel-nvm-fw.service

Nov 17 16:04:06 vosflex.localdomain systemd[1]: Starting Intel NVM firmware upgrade service...
Nov 17 16:04:06 vosflex.localdomain systemd[1]: Started Intel NVM firmware upgrade service.

● ip6tables.service - IPv6 firewall with ip6tables
   Loaded: loaded (/usr/lib/systemd/system/ip6tables.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:08 UTC; 21h ago
  Process: 4192 ExecStart=/usr/libexec/iptables/ip6tables.init start (code=exited, status=0/SUCCESS)
 Main PID: 4192 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/ip6tables.service

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Starting IPv6 firewall with ip6tables...
Nov 17 16:04:08 vosflex.localdomain ip6tables.init[4192]: Loading IPv6 rule set: source='/etc/sysconfig/ip6tables.XOS.STRICT', resolved='/etc/sysconfig/ip6tables.XOS.STRICT'
Nov 17 16:04:08 vosflex.localdomain ip6tables.init[4192]: ip6tables: Applying firewall rules: [  OK  ]
Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started IPv6 firewall with ip6tables.

● ipmitool-sdr-dump.service - Dump Sensor Data Repository to file
   Loaded: loaded (/etc/systemd/system/ipmitool-sdr-dump.service; enabled; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:04:55 UTC; 21h ago
  Process: 8628 ExecStart=/opt/omneon/sbin/env_check.py --sdr-dump-only (code=exited, status=0/SUCCESS)
 Main PID: 8628 (code=exited, status=0/SUCCESS)

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started Dump Sensor Data Repository to file.
Nov 17 16:04:55 vosflex.localdomain env_check.py[8716]: Dumping Sensor Data Repository to '/run/ipmitool-sdr-dump.tmp'
Nov 17 16:04:55 vosflex.localdomain env_check.py[8628]:  
Nov 17 16:04:55 vosflex.localdomain env_check.py[8628]: Platform Environmental Report
Nov 17 16:04:55 vosflex.localdomain env_check.py[8628]:     Date/Time: Mon Nov 17 16:04:54 2025
Nov 17 16:04:55 vosflex.localdomain env_check.py[8628]:     Platform Hostname: vosflex.localdomain
Nov 17 16:04:55 vosflex.localdomain env_check.py[8628]:     Platform Type: Generic_HPE
Nov 17 16:04:55 vosflex.localdomain systemd[1]: ipmitool-sdr-dump.service: Succeeded.

● iptables.service - IPv4 firewall with iptables
   Loaded: loaded (/usr/lib/systemd/system/iptables.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:08 UTC; 21h ago
  Process: 4188 ExecStart=/usr/libexec/iptables/iptables.init start (code=exited, status=0/SUCCESS)
 Main PID: 4188 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/iptables.service

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Starting IPv4 firewall with iptables...
Nov 17 16:04:08 vosflex.localdomain iptables.init[4188]: Loading IPv4 rule set: source='/etc/sysconfig/iptables.XOS.STRICT', resolved='/etc/sysconfig/iptables.XOS.STRICT'
Nov 17 16:04:08 vosflex.localdomain iptables.init[4188]: iptables: Applying firewall rules: [  OK  ]
Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started IPv4 firewall with iptables.

● irqbalance.service - irqbalance daemon via nmi wrapper
   Loaded: loaded (/etc/systemd/system/irqbalance.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/irqbalance.service.d
           └─05-cpusetconfig.conf, 50-cpusetsync.conf
   Active: active (running) since Mon 2025-11-17 16:04:09 UTC; 21h ago
  Process: 4699 ExecStartPost=/opt/omneon/sbin/cpusetsync (code=exited, status=0/SUCCESS)
  Process: 4567 ExecStartPre=/opt/omneon/sbin/cpusetconfig irqbalance.service (code=exited, status=0/SUCCESS)
  Process: 4543 ExecStartPre=/bin/mkdir -p /run/nmiirq (code=exited, status=0/SUCCESS)
 Main PID: 4576 (irqbalance)
    Tasks: 2 (limit: 399998)
   Memory: 13.7M
   CGroup: /system.slice/irqbalance.service
           └─4576 /sbin/irqbalance --policyscript=/opt/omneon/sbin/irqbalance-ban.py --foreground

Nov 17 16:04:28 vosflex.localdomain /sbin/irqbalance[4576]: IRQ 544: Override ban to true
Nov 17 16:04:28 vosflex.localdomain /sbin/irqbalance[4576]: IRQ 572: Override ban to true
Nov 17 16:04:28 vosflex.localdomain /sbin/irqbalance[4576]: IRQ 534: Override ban to true
Nov 17 16:04:28 vosflex.localdomain /sbin/irqbalance[4576]: IRQ 562: Override ban to true
Nov 17 16:04:28 vosflex.localdomain /sbin/irqbalance[4576]: IRQ 524: Override ban to true
Nov 17 16:04:28 vosflex.localdomain /sbin/irqbalance[4576]: IRQ 552: Override ban to true
Nov 17 16:04:28 vosflex.localdomain /sbin/irqbalance[4576]: IRQ 580: Override ban to true
Nov 17 16:04:28 vosflex.localdomain /sbin/irqbalance[4576]: IRQ 542: Override ban to true
Nov 17 16:04:28 vosflex.localdomain /sbin/irqbalance[4576]: IRQ 570: Override ban to true
Nov 17 16:04:28 vosflex.localdomain /sbin/irqbalance[4576]: IRQ 532: Override ban to true

● irqbalance2110.service - irqbalance daemon via nmi wrapper (inverted)
   Loaded: loaded (/etc/systemd/system/irqbalance2110.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/irqbalance2110.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:10 UTC; 21h ago
  Process: 4706 ExecStartPre=/opt/omneon/sbin/cpusetconfig irqbalance2110.service (code=exited, status=0/SUCCESS)
  Process: 4702 ExecStartPre=/bin/mkdir -p /run/nmiirq (code=exited, status=0/SUCCESS)
 Main PID: 4710 (irqbalance)
    Tasks: 2 (limit: 399998)
   Memory: 1.2M
   CGroup: /system.slice/irqbalance2110.service
           └─4710 /sbin/irqbalance --policyscript=/opt/omneon/sbin/irqbalance-ban.py --foreground

Nov 17 16:04:29 vosflex.localdomain /sbin/irqbalance[4710]: IRQ 72: Override ban to true
Nov 17 16:04:29 vosflex.localdomain /sbin/irqbalance[4710]: IRQ 70: Override ban to true
Nov 17 16:04:29 vosflex.localdomain /sbin/irqbalance[4710]: IRQ 77: Override ban to true
Nov 17 16:04:29 vosflex.localdomain /sbin/irqbalance[4710]: IRQ 75: Override ban to true
Nov 17 16:04:29 vosflex.localdomain /sbin/irqbalance[4710]: IRQ 31: Override ban to true
Nov 17 16:04:29 vosflex.localdomain /sbin/irqbalance[4710]: IRQ 0: Override ban to true
Nov 17 16:04:29 vosflex.localdomain /sbin/irqbalance[4710]: IRQ 8: Override ban to true
Nov 17 16:04:29 vosflex.localdomain /sbin/irqbalance[4710]: IRQ 9: Override ban to true
Nov 17 16:04:29 vosflex.localdomain /sbin/irqbalance[4710]: IRQ 10: Override ban to true
Nov 17 16:05:00 vosflex.localdomain /sbin/irqbalance[4710]: IRQ 4: Override ban to true

● k8s-keygen.service - Generate new kubernetes certificate if needed
   Loaded: loaded (/etc/systemd/system/k8s-keygen.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:08 UTC; 21h ago
  Process: 4132 ExecStart=/opt/omneon/sbin/k8s-keygen.sh (code=exited, status=0/SUCCESS)
 Main PID: 4132 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/k8s-keygen.service

Nov 17 16:04:08 vosflex.localdomain k8s-keygen.sh[4132]: service-account.pem Exist
Nov 17 16:04:08 vosflex.localdomain k8s-keygen.sh[4132]: admin.kubeconfig Exist
Nov 17 16:04:08 vosflex.localdomain k8s-keygen.sh[4132]: kube-scheduler.kubeconfig Exist
Nov 17 16:04:08 vosflex.localdomain k8s-keygen.sh[4132]: kube-controller-manager.kubeconfig Exist
Nov 17 16:04:08 vosflex.localdomain k8s-keygen.sh[4132]: Valid admin.pem
Nov 17 16:04:08 vosflex.localdomain k8s-keygen.sh[4132]: Valid ca.pem
Nov 17 16:04:08 vosflex.localdomain k8s-keygen.sh[4132]: Valid kubernetes.pem
Nov 17 16:04:08 vosflex.localdomain k8s-keygen.sh[4132]: Valid service-account.pem
Nov 17 16:04:08 vosflex.localdomain k8s-keygen.sh[4132]: kubeconfig Exist
Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started Generate new kubernetes certificate if needed.

● kbdrate.service - Keyboard repeat rate
   Loaded: loaded (/etc/systemd/system/kbdrate.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:03:51 UTC; 21h ago
 Main PID: 3081 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/kbdrate.service

Nov 17 16:03:53 vosflex.localdomain bash[3082]: Typematic Rate set to 0.0 cps (delay = 0 ms)
Nov 17 16:03:53 vosflex.localdomain bash[3082]: old delay 0, period 0
Nov 17 16:03:53 vosflex.localdomain bash[3082]: Typematic Rate set to 0.0 cps (delay = 0 ms)

● kdump.service - Crash recovery kernel arming
   Loaded: loaded (/usr/lib/systemd/system/kdump.service; enabled; vendor preset: enabled)
   Active: active (exited) since Mon 2025-11-17 16:04:56 UTC; 21h ago
  Process: 8905 ExecStart=/usr/bin/kdumpctl start (code=exited, status=0/SUCCESS)
 Main PID: 8905 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/kdump.service

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Starting Crash recovery kernel arming...
Nov 17 16:04:56 vosflex.localdomain kdumpctl[8917]: kdump: kexec: loaded kdump kernel
Nov 17 16:04:56 vosflex.localdomain kdumpctl[8917]: kdump: Starting kdump: [OK]
Nov 17 16:04:56 vosflex.localdomain systemd[1]: Started Crash recovery kernel arming.

● kmod-static-nodes.service - Create list of required static device nodes for the current kernel
   Loaded: loaded (/usr/lib/systemd/system/kmod-static-nodes.service; static; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:03:50 UTC; 21h ago
 Main PID: 2388 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/kmod-static-nodes.service

● ksoftirq-rt.service - Change ksoftirqd priorty
   Loaded: loaded (/etc/systemd/system/ksoftirq-rt.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:06:31 UTC; 21h ago
  Process: 29714 ExecStart=/bin/bash -c for p in $(/bin/pgrep ksoftirqd/); do /bin/chrt -f -p 3 $p ; done (code=exited, status=0/SUCCESS)
 Main PID: 29714 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/ksoftirq-rt.service

Nov 17 16:06:30 vosflex.localdomain systemd[1]: Starting Change ksoftirqd priorty...
Nov 17 16:06:31 vosflex.localdomain systemd[1]: Started Change ksoftirqd priorty.

● kube-apiserver.service - Kubernetes API Server
   Loaded: loaded (/etc/systemd/system/kube-apiserver.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/kube-apiserver.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:56 UTC; 21h ago
     Docs: https://github.com/kubernetes/kubernetes
  Process: 8941 ExecStartPre=/opt/omneon/sbin/cpusetconfig kube-apiserver.service (code=exited, status=0/SUCCESS)
  Process: 8927 ExecStartPre=/bin/mkdir -p /var/log/apiserver (code=exited, status=0/SUCCESS)
 Main PID: 8949 (kube-apiserver)
    Tasks: 151 (limit: 399998)
   Memory: 453.5M
   CGroup: /system.slice/kube-apiserver.service
           └─8949 /usr/local/bin/kube-apiserver --advertise-address=192.0.2.248 --allow-privileged=true --apiserver-count=1 --audit-log-maxage=30 --audit-log-maxbackup=3 --audit-log-maxsize=100 --audit-log-path=/var/log/apiserver/audit.log --authorization-mode=Node,RBAC --bind-address=0.0.0.0 --client-ca-file=/var/lib/kubernetes/ca.pem --enable-admission-plugins=NamespaceLifecycle,NodeRestriction,LimitRanger,ServiceAccount,DefaultStorageClass,ResourceQuota --etcd-cafile=/var/lib/kubernetes/ca.pem --etcd-certfile=/var/lib/kubernetes/kubernetes.pem --etcd-keyfile=/var/lib/kubernetes/kubernetes-key.pem --etcd-servers=https://127.0.0.1:2379 --event-ttl=1h --encryption-provider-config=/var/lib/kubernetes/encryption-config.yaml --kubelet-certificate-authority=/var/lib/kubernetes/ca.pem --kubelet-client-certificate=/var/lib/kubernetes/kubernetes.pem --kubelet-client-key=/var/lib/kubernetes/kubernetes-key.pem --profiling=false --runtime-config=api/all=true --service-account-key-file=/var/lib/kubernetes/service-account.pem --service-account-signing-key-file=/var/lib/kubernetes/service-account-key.pem --service-account-issuer=https://192.0.2.248:6443 --service-cluster-ip-range=203.0.113.0/24 --service-node-port-range=2500-50055 --tls-cert-file=/var/lib/kubernetes/kubernetes.pem --tls-private-key-file=/var/lib/kubernetes/kubernetes-key.pem --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_RSA_WITH_AES_256_GCM_SHA384,TLS_RSA_WITH_AES_128_GCM_SHA256 --shutdown-watch-termination-grace-period=5s --v=2

Nov 18 13:49:57 XOSEncoder-01 kube-apiserver[8949]: I1118 13:49:57.041115    8949 controller.go:228] Updating CRD OpenAPI spec because servicemonitors.monitoring.coreos.com changed
Nov 18 13:49:57 XOSEncoder-01 kube-apiserver[8949]: I1118 13:49:57.041120    8949 controller.go:228] Updating CRD OpenAPI spec because thanosrulers.monitoring.coreos.com changed
Nov 18 13:54:57 XOSEncoder-01 kube-apiserver[8949]: I1118 13:54:57.040988    8949 controller.go:228] Updating CRD OpenAPI spec because probes.monitoring.coreos.com changed
Nov 18 13:54:57 XOSEncoder-01 kube-apiserver[8949]: I1118 13:54:57.041024    8949 controller.go:228] Updating CRD OpenAPI spec because prometheuses.monitoring.coreos.com changed
Nov 18 13:54:57 XOSEncoder-01 kube-apiserver[8949]: I1118 13:54:57.041045    8949 controller.go:228] Updating CRD OpenAPI spec because prometheusrules.monitoring.coreos.com changed
Nov 18 13:54:57 XOSEncoder-01 kube-apiserver[8949]: I1118 13:54:57.041095    8949 controller.go:228] Updating CRD OpenAPI spec because servicemonitors.monitoring.coreos.com changed
Nov 18 13:54:57 XOSEncoder-01 kube-apiserver[8949]: I1118 13:54:57.041137    8949 controller.go:228] Updating CRD OpenAPI spec because thanosrulers.monitoring.coreos.com changed
Nov 18 13:54:57 XOSEncoder-01 kube-apiserver[8949]: I1118 13:54:57.041147    8949 controller.go:228] Updating CRD OpenAPI spec because alertmanagerconfigs.monitoring.coreos.com changed
Nov 18 13:54:57 XOSEncoder-01 kube-apiserver[8949]: I1118 13:54:57.041198    8949 controller.go:228] Updating CRD OpenAPI spec because alertmanagers.monitoring.coreos.com changed
Nov 18 13:54:57 XOSEncoder-01 kube-apiserver[8949]: I1118 13:54:57.041213    8949 controller.go:228] Updating CRD OpenAPI spec because podmonitors.monitoring.coreos.com changed

● kube-controller-manager.service - Kubernetes Controller Manager
   Loaded: loaded (/etc/systemd/system/kube-controller-manager.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/kube-controller-manager.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:56 UTC; 21h ago
     Docs: https://github.com/kubernetes/kubernetes
  Process: 10301 ExecStartPre=/opt/omneon/sbin/cpusetconfig kube-controller-manager.service (code=exited, status=0/SUCCESS)
 Main PID: 10312 (kube-controller)
    Tasks: 99 (limit: 399998)
   Memory: 141.7M
   CGroup: /system.slice/kube-controller-manager.service
           └─10312 /usr/local/bin/kube-controller-manager --bind-address=127.0.0.1 --cluster-cidr=198.51.100.0/24 --cluster-name=kubernetes --cluster-signing-cert-file=/var/lib/kubernetes/ca.pem --cluster-signing-key-file=/var/lib/kubernetes/ca-key.pem --kubeconfig=/var/lib/kubernetes/kube-controller-manager.kubeconfig --leader-elect=true --profiling=false --root-ca-file=/var/lib/kubernetes/ca.pem --service-account-private-key-file=/var/lib/kubernetes/service-account-key.pem --service-cluster-ip-range=203.0.113.0/24 --terminated-pod-gc-threshold=10 --use-service-account-credentials=true --v=2

Nov 18 04:08:10 XOSEncoder-01 kube-controller-manager[10312]: I1118 04:08:10.964899   10312 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="cluster1/mediautils-6567db66d8" duration="18.458µs"
Nov 18 04:08:10 XOSEncoder-01 kube-controller-manager[10312]: I1118 04:08:10.964901   10312 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="cluster1/ndcp-78b498f786" duration="11.157µs"
Nov 18 04:08:10 XOSEncoder-01 kube-controller-manager[10312]: I1118 04:08:10.964932   10312 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="cluster1/vosha-6f75897574" duration="65.338µs"
Nov 18 04:08:10 XOSEncoder-01 kube-controller-manager[10312]: I1118 04:08:10.964961   10312 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="cluster1/scte104-server-7bcb458fcf" duration="207.591µs"
Nov 18 04:08:10 XOSEncoder-01 kube-controller-manager[10312]: I1118 04:08:10.964966   10312 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="cluster1/origin-engine-asset-operator-b87b4f48f" duration="221.171µs"
Nov 18 04:08:10 XOSEncoder-01 kube-controller-manager[10312]: I1118 04:08:10.964991   10312 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="cluster1/live-ingest-origin-task-125eb6b4-eb00-4000-b90b-369d72cb3b56-5788d57d94" duration="84.476µs"
Nov 18 04:08:10 XOSEncoder-01 kube-controller-manager[10312]: I1118 04:08:10.965014   10312 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="cluster1/minio-6b644d9fc7" duration="25.178µs"
Nov 18 04:08:10 XOSEncoder-01 kube-controller-manager[10312]: I1118 04:08:10.965013   10312 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="teleport/teleport-b-648ccc456f" duration="34.161µs"
Nov 18 04:08:10 XOSEncoder-01 kube-controller-manager[10312]: I1118 04:08:10.965035   10312 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/kubernetes-dashboard-7d4d5b5d8c" duration="122.504µs"
Nov 18 04:08:10 XOSEncoder-01 kube-controller-manager[10312]: I1118 04:08:10.965047   10312 replica_set.go:676] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="cluster1/nmos-85c5775785" duration="11.648µs"

● kube-proxy.service - Kubernetes Proxy
   Loaded: loaded (/etc/systemd/system/kube-proxy.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/kube-proxy.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:56 UTC; 21h ago
     Docs: https://github.com/kubernetes/kubernetes
  Process: 10298 ExecStartPre=/opt/omneon/sbin/cpusetconfig kube-proxy.service (code=exited, status=0/SUCCESS)
 Main PID: 10310 (kube-proxy)
    Tasks: 43 (limit: 399998)
   Memory: 63.6M
   CGroup: /system.slice/kube-proxy.service
           └─10310 /usr/local/bin/kube-proxy --config=/var/lib/kube-proxy/kube-proxy-config.yaml --proxy-port-range 40000-42000

Nov 17 16:05:30 vosflex.localdomain kube-proxy[10310]: I1117 16:05:30.208997   10310 server.go:874] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
Nov 17 16:05:30 vosflex.localdomain kube-proxy[10310]: I1117 16:05:30.211104   10310 config.go:192] "Starting service config controller"
Nov 17 16:05:30 vosflex.localdomain kube-proxy[10310]: I1117 16:05:30.211476   10310 config.go:101] "Starting endpoint slice config controller"
Nov 17 16:05:30 vosflex.localdomain kube-proxy[10310]: I1117 16:05:30.211510   10310 config.go:319] "Starting node config controller"
Nov 17 16:05:30 vosflex.localdomain kube-proxy[10310]: I1117 16:05:30.212323   10310 shared_informer.go:313] Waiting for caches to sync for service config
Nov 17 16:05:30 vosflex.localdomain kube-proxy[10310]: I1117 16:05:30.212323   10310 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
Nov 17 16:05:30 vosflex.localdomain kube-proxy[10310]: I1117 16:05:30.212349   10310 shared_informer.go:313] Waiting for caches to sync for node config
Nov 17 16:05:30 vosflex.localdomain kube-proxy[10310]: I1117 16:05:30.313085   10310 shared_informer.go:320] Caches are synced for endpoint slice config
Nov 17 16:05:30 vosflex.localdomain kube-proxy[10310]: I1117 16:05:30.313124   10310 shared_informer.go:320] Caches are synced for service config
Nov 17 16:05:30 vosflex.localdomain kube-proxy[10310]: I1117 16:05:30.313085   10310 shared_informer.go:320] Caches are synced for node config

● kube-scheduler.service - Kubernetes Scheduler
   Loaded: loaded (/etc/systemd/system/kube-scheduler.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/kube-scheduler.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:56 UTC; 21h ago
     Docs: https://github.com/kubernetes/kubernetes
  Process: 10242 ExecStartPre=/opt/omneon/sbin/cpusetconfig kube-scheduler.service (code=exited, status=0/SUCCESS)
 Main PID: 10306 (kube-scheduler)
    Tasks: 55 (limit: 399998)
   Memory: 71.3M
   CGroup: /system.slice/kube-scheduler.service
           └─10306 /usr/local/bin/kube-scheduler --bind-address=127.0.0.1 --config=/etc/kubernetes/config/kube-scheduler.yaml --profiling=false --v=2

Nov 17 16:08:52 vosflex.localdomain kube-scheduler[10306]: I1117 16:08:52.737643   10306 schedule_one.go:304] "Successfully bound pod to node" pod="cluster1/atm-86776fb854-jpdss" node="vosflex" evaluatedNodes=1 feasibleNodes=1
Nov 17 16:09:04 vosflex.localdomain kube-scheduler[10306]: I1117 16:09:04.978384   10306 schedule_one.go:304] "Successfully bound pod to node" pod="cluster1/stream-processing-e7395e3b-b18c-442c-a045-e1ceced0696b-1.1dn5qw" node="vosflex" evaluatedNodes=1 feasibleNodes=1
Nov 17 16:09:05 vosflex.localdomain kube-scheduler[10306]: I1117 16:09:05.256237   10306 schedule_one.go:304] "Successfully bound pod to node" pod="cluster1/stream-processing-44de0751-40f2-4a65-ab19-290b7b8babb0-1.fmf7gj" node="vosflex" evaluatedNodes=1 feasibleNodes=1
Nov 17 16:09:05 vosflex.localdomain kube-scheduler[10306]: I1117 16:09:05.421101   10306 schedule_one.go:304] "Successfully bound pod to node" pod="cluster1/stream-processing-2f500ba9-d3b4-4332-bfa3-74be6e00aae2-1.9kqq5p" node="vosflex" evaluatedNodes=1 feasibleNodes=1
Nov 17 16:09:06 vosflex.localdomain kube-scheduler[10306]: I1117 16:09:06.385184   10306 schedule_one.go:304] "Successfully bound pod to node" pod="cluster1/casd-7476b47766-gdcn2" node="vosflex" evaluatedNodes=1 feasibleNodes=1
Nov 17 16:09:07 vosflex.localdomain kube-scheduler[10306]: I1117 16:09:07.044917   10306 schedule_one.go:304] "Successfully bound pod to node" pod="cluster1/esam-pois-d6b578dcd-ppg9x" node="vosflex" evaluatedNodes=1 feasibleNodes=1
Nov 17 16:09:07 vosflex.localdomain kube-scheduler[10306]: I1117 16:09:07.893595   10306 schedule_one.go:304] "Successfully bound pod to node" pod="cluster1/hhp-controller-647d64d7cc-jtlqg" node="vosflex" evaluatedNodes=1 feasibleNodes=1
Nov 17 16:09:09 vosflex.localdomain kube-scheduler[10306]: I1117 16:09:09.868145   10306 schedule_one.go:304] "Successfully bound pod to node" pod="cluster1/ndcp-78b498f786-kttxf" node="vosflex" evaluatedNodes=1 feasibleNodes=1
Nov 17 16:09:10 vosflex.localdomain kube-scheduler[10306]: I1117 16:09:10.661019   10306 schedule_one.go:304] "Successfully bound pod to node" pod="cluster1/tmd-control-79b998fd7-ljgsk" node="vosflex" evaluatedNodes=1 feasibleNodes=1
Nov 17 16:09:12 vosflex.localdomain kube-scheduler[10306]: I1117 16:09:12.373679   10306 schedule_one.go:304] "Successfully bound pod to node" pod="cluster1/simulcrypt-6c4688f66d-4nj5q" node="vosflex" evaluatedNodes=1 feasibleNodes=1

● kubelet.service - Kubernetes Kubelet
   Loaded: loaded (/etc/systemd/system/kubelet.service; enabled; vendor preset: disabled)
   Active: active (running) since Mon 2025-11-17 16:05:05 UTC; 21h ago
     Docs: https://github.com/kubernetes/kubernetes
 Main PID: 12424 (kubelet)
    Tasks: 11 (limit: 399998)
   Memory: 153.0M
      CPU: 1h 4min 19.505s
   CGroup: /system.slice/kubelet.service
           └─12424 /usr/local/bin/kubelet --config=/var/lib/kubelet/kubelet-config.yaml --kubeconfig=/var/lib/kubelet/kubeconfig --hostname-override=vosflex --container-runtime-endpoint=unix:///run/containerd/containerd.sock --v=2

Nov 17 16:09:12 vosflex.localdomain taskset[12424]: I1117 16:09:12.374782   12424 kubelet.go:2423] "SyncLoop ADD" source="api" pods=["cluster1/simulcrypt-6c4688f66d-4nj5q"]
Nov 17 16:09:12 vosflex.localdomain taskset[12424]: I1117 16:09:12.374826   12424 topology_manager.go:215] "Topology Admit Handler" podUID="aff14187-181d-4c0a-9445-a2326a3bf487" podNamespace="cluster1" podName="simulcrypt-6c4688f66d-4nj5q"
Nov 17 16:09:12 vosflex.localdomain taskset[12424]: I1117 16:09:12.375133   12424 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cluster1/simulcrypt-6c4688f66d-4nj5q"
Nov 17 16:09:12 vosflex.localdomain taskset[12424]: I1117 16:09:12.570733   12424 reconciler_common.go:247] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66jgh\" (UniqueName: \"kubernetes.io/projected/aff14187-181d-4c0a-9445-a2326a3bf487-kube-api-access-66jgh\") pod \"simulcrypt-6c4688f66d-4nj5q\" (UID: \"aff14187-181d-4c0a-9445-a2326a3bf487\") " pod="cluster1/simulcrypt-6c4688f66d-4nj5q"
Nov 17 16:09:12 vosflex.localdomain taskset[12424]: I1117 16:09:12.671321   12424 reconciler_common.go:220] "operationExecutor.MountVolume started for volume \"kube-api-access-66jgh\" (UniqueName: \"kubernetes.io/projected/aff14187-181d-4c0a-9445-a2326a3bf487-kube-api-access-66jgh\") pod \"simulcrypt-6c4688f66d-4nj5q\" (UID: \"aff14187-181d-4c0a-9445-a2326a3bf487\") " pod="cluster1/simulcrypt-6c4688f66d-4nj5q"
Nov 17 16:09:12 vosflex.localdomain taskset[12424]: I1117 16:09:12.676798   12424 operation_generator.go:721] "MountVolume.SetUp succeeded for volume \"kube-api-access-66jgh\" (UniqueName: \"kubernetes.io/projected/aff14187-181d-4c0a-9445-a2326a3bf487-kube-api-access-66jgh\") pod \"simulcrypt-6c4688f66d-4nj5q\" (UID: \"aff14187-181d-4c0a-9445-a2326a3bf487\") " pod="cluster1/simulcrypt-6c4688f66d-4nj5q"
Nov 17 16:09:12 vosflex.localdomain taskset[12424]: I1117 16:09:12.976340   12424 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cluster1/simulcrypt-6c4688f66d-4nj5q"
Nov 17 16:09:13 vosflex.localdomain taskset[12424]: I1117 16:09:13.121607   12424 kubelet.go:2455] "SyncLoop (PLEG): event for pod" pod="cluster1/simulcrypt-6c4688f66d-4nj5q" event={"ID":"aff14187-181d-4c0a-9445-a2326a3bf487","Type":"ContainerStarted","Data":"7c297996517c2ebdd207bc733957e7ea43d63c8630dab617c28d6518db1f0282"}
Nov 17 16:09:13 vosflex.localdomain taskset[12424]: I1117 16:09:13.121634   12424 kubelet.go:2455] "SyncLoop (PLEG): event for pod" pod="cluster1/simulcrypt-6c4688f66d-4nj5q" event={"ID":"aff14187-181d-4c0a-9445-a2326a3bf487","Type":"ContainerStarted","Data":"3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85"}
Nov 17 16:09:13 vosflex.localdomain taskset[12424]: I1117 16:09:13.127063   12424 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cluster1/simulcrypt-6c4688f66d-4nj5q" podStartSLOduration=1.12705396 podStartE2EDuration="1.12705396s" podCreationTimestamp="2025-11-17 16:09:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-17 16:09:13.12618473 +0000 UTC m=+248.171344602" watchObservedRunningTime="2025-11-17 16:09:13.12705396 +0000 UTC m=+248.172213832"

● ldconfig.service - Rebuild Dynamic Linker Cache
   Loaded: loaded (/usr/lib/systemd/system/ldconfig.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:51 UTC; 21h ago
     Docs: man:ldconfig(8)

● linux_performance_tuning_post_net.service - System performance tuning
   Loaded: loaded (/etc/systemd/system/linux_performance_tuning_post_net.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:57 UTC; 21h ago
  Process: 9402 ExecStart=/usr/local/bin/linux_performance_tuning.py --force --post-net (code=exited, status=0/SUCCESS)
 Main PID: 9402 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/linux_performance_tuning_post_net.service

Nov 17 16:04:55 vosflex.localdomain systemd[1]: Starting System performance tuning...
Nov 17 16:04:55 vosflex.localdomain linux_performance_tuning.py[9402]: Exec: ['/sbin/tuned-adm', 'profile', 'latency-performance']
Nov 17 16:04:57 vosflex.localdomain systemd[1]: Started System performance tuning.

● linux_performance_tuning_pre_net.service - System performance tuning (network)
   Loaded: loaded (/etc/systemd/system/linux_performance_tuning_pre_net.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:17 UTC; 21h ago
  Process: 4551 ExecStart=/usr/local/bin/linux_performance_tuning.py --force --pre-net (code=exited, status=0/SUCCESS)
 Main PID: 4551 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/linux_performance_tuning_pre_net.service

Nov 17 16:04:14 vosflex.localdomain linux_performance_tuning.py[4551]: Exec: ['/sbin/ethtool', '-A', 'ens1f0np0', 'rx', 'off', 'tx', 'off']
Nov 17 16:04:14 vosflex.localdomain linux_performance_tuning.py[4551]: Exec: ['/sbin/ethtool', '-G', 'ens1f0np0', 'rx', '8192', 'tx', '4096']
Nov 17 16:04:14 vosflex.localdomain linux_performance_tuning.py[4551]: Exec: ['/sbin/ethtool', '-N', 'ens1f1np1', 'rx-flow-hash', 'udp4', 'sdfn']
Nov 17 16:04:14 vosflex.localdomain linux_performance_tuning.py[4551]: Exec: ['/sbin/ethtool', '-C', 'ens1f1np1', 'adaptive-rx', 'off']
Nov 17 16:04:15 vosflex.localdomain linux_performance_tuning.py[4551]: Exec: ['/sbin/ethtool', '-C', 'ens1f1np1', 'rx-usecs', '950', 'rx-frames', '50', 'adaptive-tx', 'on']
Nov 17 16:04:16 vosflex.localdomain linux_performance_tuning.py[4551]: Exec: ['/sbin/ethtool', '-K', 'ens1f1np1', 'gro', 'off']
Nov 17 16:04:16 vosflex.localdomain linux_performance_tuning.py[4551]: Exec: ['/sbin/ethtool', '-A', 'ens1f1np1', 'autoneg', 'off', 'rx', 'off', 'tx', 'off']
Nov 17 16:04:16 vosflex.localdomain linux_performance_tuning.py[4551]: Exec: ['/sbin/ethtool', '-A', 'ens1f1np1', 'rx', 'off', 'tx', 'off']
Nov 17 16:04:17 vosflex.localdomain linux_performance_tuning.py[4551]: Exec: ['/sbin/ethtool', '-G', 'ens1f1np1', 'rx', '8192', 'tx', '4096']
Nov 17 16:04:17 vosflex.localdomain systemd[1]: Started System performance tuning (network).

● lm_sensors.service - Hardware Monitoring Sensors
   Loaded: loaded (/usr/lib/systemd/system/lm_sensors.service; enabled; vendor preset: enabled)
   Active: active (exited) since Mon 2025-11-17 16:04:08 UTC; 21h ago
 Main PID: 4242 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/lm_sensors.service

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Starting Hardware Monitoring Sensors...
Nov 17 16:04:08 vosflex.localdomain lm_sensors-modprobe-wrapper[4127]: No sensors with loadable kernel modules configured.
Nov 17 16:04:08 vosflex.localdomain lm_sensors-modprobe-wrapper[4127]: Please, run 'sensors-detect' as root in order to search for available sensors.
Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started Hardware Monitoring Sensors.

● loadmodules.service - Load legacy module configuration
   Loaded: loaded (/usr/lib/systemd/system/loadmodules.service; enabled; vendor preset: enabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:50 UTC; 21h ago

● lshw-dump.service - Hardware Lister XML Dump For TechDump
   Loaded: loaded (/etc/systemd/system/lshw-dump.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:06:31 UTC; 21h ago
  Process: 29729 ExecStart=/bin/bash -c lshw -xml > /var/run/lshw/lshw.xml (code=exited, status=0/SUCCESS)
  Process: 29719 ExecStartPre=/bin/mkdir -p /var/run/lshw/ (code=exited, status=0/SUCCESS)
 Main PID: 29729 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/lshw-dump.service

Nov 17 16:06:30 vosflex.localdomain systemd[1]: Starting Hardware Lister XML Dump For TechDump...
Nov 17 16:06:Unit lvm2-activation.service could not be found.
Unit modprobe@efi_pstore.service could not be found.
31 vosflex.localdomain systemd[1]: Started Hardware Lister XML Dump For TechDump.

● lspci-dump.service - Dump All PCI Devices For TechDump
   Loaded: loaded (/etc/systemd/system/lspci-dump.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:06:31 UTC; 21h ago
  Process: 29730 ExecStart=/bin/bash -c lspci -vvv > /var/run/lspci/lspci-v (code=exited, status=0/SUCCESS)
  Process: 29722 ExecStartPre=/bin/mkdir -p /var/run/lspci/ (code=exited, status=0/SUCCESS)
 Main PID: 29730 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/lspci-dump.service

Nov 17 16:06:30 vosflex.localdomain systemd[1]: Starting Dump All PCI Devices For TechDump...
Nov 17 16:06:31 vosflex.localdomain systemd[1]: Started Dump All PCI Devices For TechDump.

● lvm2-lvmpolld.service - LVM2 poll daemon
   Loaded: loaded (/usr/lib/systemd/system/lvm2-lvmpolld.service; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:lvmpolld(8)

● lvm2-monitor.service - Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling
   Loaded: loaded (/usr/lib/systemd/system/lvm2-monitor.service; enabled; vendor preset: enabled)
   Active: active (exited) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:dmeventd(8)
           man:lvcreate(8)
           man:lvchange(8)
           man:vgchange(8)
 Main PID: 2380 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/lvm2-monitor.service

● mellanoxfw.service - Mellanox ConnectX firmware upgrade service
   Loaded: loaded (/etc/systemd/system/mellanoxfw.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:06 UTC; 21h ago
 Main PID: 3869 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/mellanoxfw.service

Nov 17 16:04:06 vosflex.localdomain systemd[1]: Starting Mellanox ConnectX firmware upgrade service...
Nov 17 16:04:06 vosflex.localdomain fw[3914]: Info: Checking if Mellanox firmware update is needed...
Nov 17 16:04:06 vosflex.localdomain fw[3914]: Info: Mellanox firmware update is not needed.
Nov 17 16:04:06 vosflex.localdomain systemd[1]: Started Mellanox ConnectX firmware upgrade service.

● microcode.service - Load CPU microcode update
   Loaded: loaded (/usr/lib/systemd/system/microcode.service; enabled; vendor preset: enabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:04:07 UTC; 21h ago

● mnt-sda7-clean.service - Erase VOS data
   Loaded: loaded (/etc/systemd/system/mnt-sda7-clean.service; static; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:03:50 UTC; 21h ago
 Main PID: 2431 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/mnt-sda7-clean.service

● mnt-sda7-init.service - Internal data storage initialization service
   Loaded: loaded (/etc/systemd/system/mnt-sda7-init.service; static; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:03:50 UTC; 21h ago
 Main PID: 2553 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/mnt-sda7-init.service

● mr_cpqScsi.service - cpqScsi MIB handler for Smart Aray 824i-p MR Gen10 Controller.
   Loaded: loaded (/usr/lib/systemd/system/mr_cpqScsi.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/mr_cpqScsi.service.d
           └─05-cpusetconfig.conf, 05-exec-condition.conf, 05-resource-limit.conf
   Active: active (running) since Mon 2025-11-17 16:04:55 UTC; 21h ago
  Process: 8821 ExecStartPre=/opt/omneon/sbin/cpusetconfig mr_cpqScsi.service (code=exited, status=0/SUCCESS)
 Main PID: 8865 (mr_cpqScsi)
    Tasks: 3 (limit: 399998)
   Memory: 3.5M (high: 512.0M max: 1.0G)
      CPU: 1.268s
   CGroup: /system.slice/mr_cpqScsi.service
           ├─8865 /sbin/mr_cpqScsi -f
           └─9343 /sbin/mr_cpqScsi -f

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Starting cpqScUnit network.service could not be found.
Unit networking.service could not be found.
si MIB handler for Smart Aray 824i-p MR Gen10 Controller....
Nov 17 16:04:55 vosflex.localdomain mr_cpqScsi[8865]: mr_cpqScsi Started . .
Nov 17 16:04:55 vosflex.localdomain systemd[1]: Started cpqScsi MIB handler for Smart Aray 824i-p MR Gen10 Controller..

● NetworkManager-wait-online.service - Network Manager Wait Online
   Loaded: loaded (/usr/lib/systemd/system/NetworkManager-wait-online.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:09 UTC; 21h ago
     Docs: man:nm-online(1)
  Process: 4608 ExecStart=/usr/bin/nm-online -s -q (code=exited, status=0/SUCCESS)
 Main PID: 4608 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/NetworkManager-wait-online.service

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Starting Network Manager Wait Online...
Nov 17 16:04:09 vosflex.localdomain systemd[1]: Started Network Manager Wait Online.

● NetworkManager.service - Network Manager
   Loaded: loaded (/usr/lib/systemd/system/NetworkManager.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/NetworkManager.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:08 UTC; 21h ago
     Docs: man:NetworkManager(8)
  Process: 4553 ExecStartPre=/opt/omneon/sbin/cpusetconfig NetworkManager.service (code=exited, status=0/SUCCESS)
 Main PID: 4574 (NetworkManager)
    Tasks: 3 (limit: 399998)
   Memory: 11.1M
   CGroup: /system.slice/NetworkManager.service
           └─4574 /usr/sbin/NetworkManager --no-daemon

Nov 17 16:08:52 vosflex.localdomain NetworkManager[4574]: <info>  [1763395732.1035] manager: (veth7a42c95b): new Veth device (/org/freedesktop/NetworkManager/Devices/42)
Nov 17 16:08:52 vosflex.localdomain NetworkManager[4574]: <info>  [1763395732.1124] manager: (vethd1459ad1): new Veth device (/org/freedesktop/NetworkManager/Devices/43)
Nov 17 16:08:52 vosflex.localdomain NetworkManager[4574]: <info>  [1763395732.1142] device (veth7a42c95b): carrier: link connected
Nov 17 16:08:52 vosflex.localdomain NetworkManager[4574]: <info>  [1763395732.1149] manager: (veth18440dce): new Veth device (/org/freedesktop/NetworkManager/Devices/44)
Nov 17 16:08:52 vosflex.localdomain NetworkManager[4574]: <info>  [1763395732.1178] device (veth18440dce): carrier: link connected
Nov 17 16:08:52 vosflex.localdomain NetworkManager[4574]: <info>  [1763395732.1188] device (vethd1459ad1): carrier: link connected
Nov 17 16:08:53 vosflex.localdomain NetworkManager[4574]: <info>  [1763395733.0514] manager: (vethe9f4720d): new Veth device (/org/freedesktop/NetworkManager/Devices/45)
Nov 17 16:08:53 vosflex.localdomain NetworkManager[4574]: <info>  [1763395733.0549] device (vethe9f4720d): carrier: link connected
Nov 17 16:09:10 vosflex.localdomain NetworkManager[4574]: <info>  [1763395750.9758] manager: (veth59671d56): new Veth device (/org/freedesktop/NetworkManager/Devices/46)
Nov 17 16:09:10 vosflex.localdomain NetworkManager[4574]: <info>  [1763395750.9793] device (veth59671d56): carrier: link connected

● nfs-idmapd.service - NFSv4 ID-name mapping service
   Loaded: loaded (/usr/lib/systemd/system/nfs-idmapd.service; static; vendor preset: disabled)
   Active: inactive (dead)

● nfs-mountd.service - NFS Mount Daemon
   Loaded: loaded (/usr/lib/systemd/system/nfs-mountd.service; static; vendor preset: disabled)
   Active: inactive (dead)

● nfs-server.service - NFS server and services
   Loaded: loaded (/usr/lib/systemd/system/nfs-server.service; disabled; vendor preset: disabled)
   Active: inactive (dead)

● nfs-utils.service - NFS server and client services
   Loaded: loaded (/usr/lib/systemd/system/nfs-utils.service; static; vendor preset: disabled)
   Active: inactive (dead)

● nfsdcld.service - NFSv4 Client Tracking Daemon
   Loaded: loaded (/usr/lib/systemd/system/nfsdcld.service; static; vendor preset: disabled)
   Active: inactive (dead)

● nis-domainname.service - Read and set NIS domainname from /etc/sysconfig/network
   Loaded: loaded (/usr/lib/systemd/system/nis-domainname.service; enabled; vendor preset: enabled)
   Active: active (exited) since Mon 2025-11-17 16:03:50 UTC; 21h ago
 Main PID: 2364 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/nis-domainname.service

● nmi.service - NMI Service
   Loaded: loaded (/etc/systemd/system/nmi.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/nmi.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8790 ExecStartPre=/opt/omneon/sbin/cpusetconfig nmi.service (code=exited, status=0/SUCCESS)
  Process: 8668 ExecStartPre=/bin/rm -f /var/tmp/update_distro.log /var/tmp/wget_download.log /var/tmp/update_urlinfo.txt /var/tmp/unpack_progress (code=exited, status=0/SUCCESS)
 Main PID: 8858 (java)
    Tasks: 188 (limit: 399998)
   Memory: 1.6G
   CGroup: /system.slice/nmi.service
           ├─ 8858 /bin/java -Dorg.apache.commons.logging.Log=org.apache.commons.logging.impl.SimpleLog -Dorg.apache.commons.logging.simplelog.defaultlog=warn -Dorg.apache.commons.logging.simplelog.showdatetime=false -XX:+UseConcMarkSweepGC -Xms128M -Xmx1024M -jar NMIService.jar
           ├─ 9551 /bin/bash /opt/omneon/sbin/techdump.sh /corefiles/TechDump-XOSEncoder-01-20251118-1355.zip
           ├─ 9571 /bin/bash /opt/omneon/sbin/techdump.sh /corefiles/TechDump-XOSEncoder-01-20251118-1355.zip
           ├─ 9585 /bin/bash /etc/techdump.d/10platform.sh
           ├─11154 /bin/systemctl status --all
           └─11637 sleep 0.2

Nov 18 13:56:06 XOSEncoder-01 java[8858]: P8858 T404 A8030 [invokeOperation] [Originator: NMX_1001] [OperationName: LockDevice] [OperationParam: <?xml version="1.0" encoding="utf-8"?><LockDeviceParam xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema"><Originator>10.10.106.214</Originator></LockDeviceParam>] Start
Nov 18 13:56:06 XOSEncoder-01 java[8858]: P8858 T404 A8030 [invokeOperation] [Originator: NMX_1001] [OperationName: LockDevice] Successful
Nov 18 13:56:07 XOSEncoder-01 java[8858]: P8858 T737 A8030 [invokeOperation] [Originator: VOS] [OperationName: QueryBackgroundDownloadStatus] [OperationParam: ] Start
Nov 18 13:56:07 XOSEncoder-01 java[8858]: P8858 T737 A8030 [invokeOperation] [Originator: VOS] [OperationName: QueryBackgroundDownloadStatus] Successful
Nov 18 13:56:08 XOSEncoder-01 java[8858]: P8858 T737 A8030 [invokeOperation] [Originator: VOS] [OperationName: QueryBackgroundTechdumpStatus] [OperationParam: ] Start
Nov 18 13:56:08 XOSEncoder-01 java[8858]: P8858 T737 A8030 [invokeOperation] [Originator: VOS] [OperationName: QueryBackgroundTechdumpStatus] Successful
Nov 18 13:56:08 XOSEncoder-01 java[8858]: P8858 T737 A8030 [invokeOperation] [Originator: VOS] [OperationName: QueryPendingSoftwareVersion] [OperationParam: ] Start
Nov 18 13:56:08 XOSEncoder-01 java[8858]: P8858 T737 A8030 [invokeOperation] [Originator: VOS] [OperationName: QueryPendingSoftwareVersion] Successful
Nov 18 13:56:08 XOSEncoder-01 java[8858]: P8858 T747 A8030 [invokeOperation] [Originator: NMX_1001] [OperationName: LockDevice] [OperationParam: <?xml version="1.0" encoding="utf-8"?><LockDeviceParam xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema"><Originator>10.10.106.214</Originator></LockDeviceParam>] Start
Nov 18 13:56:08 XOSEncoder-01 java[8858]: P8858 T747 A8030 [invokeOperation] [Originator: NMX_1001] [OperationName: LockDevice] Successful

● nminet.service - NMINet network service
   Loaded: loaded (/etc/systemd/system/nminet.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/nminet.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 6845 ExecStartPre=/opt/omneon/sbin/cpusetconfig nminet.service (code=exited, status=0/SUCCESS)
 Main PID: 6858 (python3)
    Tasks: 19 (limit: 399998)
   Memory: 51.7M
   CGroup: /system.slice/nminet.service
           ├─6858 python3 /opt/omneon/nmi/nminet/bin/../src/nminet.py -v
           ├─6965 /usr/bin/teamd -N -o -U -d -n -t net1 -c {"device": "net1", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           ├─7002 /usr/bin/teamd -N -o -U -d -n -t net2 -c {"device": "net2", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           ├─7022 /usr/bin/teamd -N -o -U -d -n -t net3 -c {"device": "net3", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           ├─7058 /usr/bin/teamd -N -o -U -d -n -t net4 -c {"device": "net4", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           ├─7071 /usr/bin/teamd -N -o -U -d -n -t net5 -c {"device": "net5", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           ├─7107 /usr/bin/teamd -N -o -U -d -n -t net6 -c {"device": "net6", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           ├─7122 /usr/bin/teamd -N -o -U -d -n -t net9 -c {"device": "net9", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           └─7156 /usr/bin/teamd -N -o -U -d -n -t net10 -c {"device": "net10", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}

Nov 18 07:54:58 XOSEncoder-01 nminet[6858]: DEBUG(pyroute2.netlink.nlsocket) - Packet burst: delta=11 qsize=11 delay=0.01
Nov 18 08:35:30 XOSEncoder-01 nminet[6858]: DEBUG(pyroute2.netlink.nlsocket) - Packet burst: delta=12 qsize=12 delay=0.01
Nov 18 08:39:04 XOSEncoder-01 nminet[6858]: DEBUG(pyroute2.netlink.nlsocket) - Packet burst: delta=12 qsize=12 delay=0.01
Nov 18 09:32:15 XOSEncoder-01 nminet[6858]: DEBUG(pyroute2.netlink.nlsocket) - Packet burst: delta=13 qsize=13 delay=0.01
Nov 18 09:49:26 XOSEncoder-01 nminet[6858]: DEBUG(pyroute2.netlink.nlsocket) - Packet burst: delta=12 qsize=12 delay=0.01
Nov 18 09:54:20 XOSEncoder-01 nminet[6858]: DEBUG(pyroute2.netlink.nlsocket) - Packet burst: delta=11 qsize=11 delay=0.01
Nov 18 11:24:19 XOSEncoder-01 nminet[6858]: DEBUG(pyroute2.netlink.nlsocket) - Packet burst: delta=13 qsize=13 delay=0.01
Nov 18 11:50:40 XOSEncoder-01 nminet[6858]: DEBUG(pyroute2.netlink.nlsocket) - Packet burst: delta=12 qsize=12 delay=0.01
Nov 18 13:24:32 XOSEncoder-01 nminet[6858]: DEBUG(pyroute2.netlink.nlsocket) - Packet burst: delta=12 qsize=12 delay=0.01
Nov 18 13:54:24 XOSEncoder-01 nminet[6858]: DEBUG(pyroute2.netlink.nlsocket) - Packet burst: delta=13 qsize=13 delay=0.01

● nmiperfmon_fans.service - NMI Performance Monitor
   Loaded: loaded (/etc/systemd/system/nmiperfmon_fans.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/nmiperfmon_fans.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8675 ExecStartPre=/opt/omneon/sbin/cpusetconfig nmiperfmon_fans.service (code=exited, status=0/SUCCESS)
 Main PID: 8792 (nmiperfmon)
    Tasks: 28 (limit: 399998)
   Memory: 10.8M
   CGroup: /system.slice/nmiperfmon_fans.service
           └─8792 /opt/omneon/nmi/nmiperfmon

Nov 17 16:04:54 vosflex.localdomain nmiperfmon_fans[8792]: 	DekTecMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_fans[8792]: 	DigitalDevicesMetricBuffer: 1024
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_fans[8792]: 	HPEFanMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_fans[8792]: 	HPEPumpMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_fans[8792]: 	HPEPrimaryPowerMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_fans[8792]: 	HPEBackupPowerMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_fans[8792]: 16:04:54.911668 alarm_mapping.go:58: INFO: Available cards: map[]
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_fans[8792]: 16:04:54.912436 alarm_mapping.go:133: INFO: Available cooling: map[HPE_Gen11Fun:{1 3 [Harmonic_FanFailure]} HPE_Gen11Pump:{2 4 [Harmonic_PumpFailure]}]
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_fans[8792]: 16:04:54.912460 alarm_mapping.go:167: INFO: Available power supplies: map[HPE_Gen11B:{4 6 [Harmonic_BackupPowerSupplyFailure]} HPE_Gen11P:{4 5 [Harmonic_PowerSupplyFailure]}]
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_fans[8792]: 16:04:54.912475 handlers.go:106: INFO: Metric Handlers: map[HPE Fan:[0x51a380] HPE Pump:[0x51a380]]

● nmiperfmon_nics.service - NMI Performance Monitor
   Loaded: loaded (/etc/systemd/system/nmiperfmon_nics.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/nmiperfmon_nics.service.d
           └─05-cpusetconfig.conf
   Active: activating (auto-restart) (Result: exit-code) since Tue 2025-11-18 13:55:48 UTC; 21s ago
  Process: 10256 ExecStart=/opt/omneon/nmi/nmiperfmon (code=exited, status=2)
  Process: 10248 ExecStartPre=/opt/omneon/sbin/cpusetconfig nmiperfmon_nics.service (code=exited, status=0/SUCCESS)
 Main PID: 10256 (code=exited, status=2)

● nmiperfmon_power_supplies.service - NMI Performance Monitor
   Loaded: loaded (/etc/systemd/system/nmiperfmon_power_supplies.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/nmiperfmon_power_supplies.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8696 ExecStartPre=/opt/omneon/sbin/cpusetconfig nmiperfmon_power_supplies.service (code=exited, status=0/SUCCESS)
 Main PID: 8800 (nmiperfmon)
    Tasks: 28 (limit: 399998)
   Memory: 10.6M
   CGroup: /system.slice/nmiperfmon_power_supplies.service
           └─8800 /opt/omneon/nmi/nmiperfmon

Nov 17 16:04:54 vosflex.localdomain nmiperfmon_power_supplies[8800]: 	DekTecMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_power_supplies[8800]: 	DigitalDevicesMetricBuffer: 1024
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_power_supplies[8800]: 	HPEFanMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_power_supplies[8800]: 	HPEPumpMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_power_supplies[8800]: 	HPEPrimaryPowerMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_power_supplies[8800]: 	HPEBackupPowerMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_power_supplies[8800]: 16:04:54.908792 alarm_mapping.go:58: INFO: Available cards: map[]
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_power_supplies[8800]: 16:04:54.909606 alarm_mapping.go:133: INFO: Available cooling: map[HPE_Gen11Fun:{1 3 [Harmonic_FanFailure]} HPE_Gen11Pump:{2 4 [Harmonic_PumpFailure]}]
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_power_supplies[8800]: 16:04:54.909634 alarm_mapping.go:167: INFO: Available power supplies: map[HPE_Gen11B:{4 6 [Harmonic_BackupPowerSupplyFailure]} HPE_Gen11P:{4 5 [Harmonic_PowerSupplyFailure]}]
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_power_supplies[8800]: 16:04:54.909648 handlers.go:106: INFO: Metric Handlers: map[HPE Primary Power Supply:[0x51a1a0] HPE Backup Power Supply:[0x51a1a0]]

● nmiperfmon_raid_controllers.service - NMI Performance Monitor
   Loaded: loaded (/etc/systemd/system/nmiperfmon_raid_controllers.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/nmiperfmon_raid_controllers.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8702 ExecStartPre=/opt/omneon/sbin/cpusetconfig nmiperfmon_raid_controllers.service (code=exited, status=0/SUCCESS)
 Main PID: 8804 (nmiperfmon)
    Tasks: 19 (limit: 399998)
   Memory: 5.6M
   CGroup: /system.slice/nmiperfmon_raid_controllers.service
           └─8804 /opt/omneon/nmi/nmiperfmon

Nov 17 16:04:54 vosflex.localdomain nmiperfmon_raid_controllers[8804]: 	DekTecMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_raid_controllers[8804]: 	DigitalDevicesMetricBuffer: 1024
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_raid_controllers[8804]: 	HPEFanMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_raid_controllers[8804]: 	HPEPumpMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_raid_controllers[8804]: 	HPEPrimaryPowerMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_raid_controllers[8804]: 	HPEBackupPowerMetricBuffer: -1
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_raid_controllers[8804]: 16:04:54.912784 alarm_mapping.go:58: INFO: Available cards: map[]
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_raid_controllers[8804]: 16:04:54.913531 alarm_mapping.go:133: INFO: Available cooling: map[HPE_Gen11Fun:{1 3 [Harmonic_FanFailure]} HPE_Gen11Pump:{2 4 [Harmonic_PumpFailure]}]
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_raid_controllers[8804]: 16:04:54.913556 alarm_mapping.go:167: INFO: Available power supplies: map[HPE_Gen11B:{4 6 [Harmonic_BackupPowerSupplyFailure]} HPE_Gen11P:{4 5 [Harmonic_PowerSupplyFailure]}]
Nov 17 16:04:54 vosflex.localdomain nmiperfmon_raid_controllers[8804]: 16:04:54.913569 handlers.go:106: INFO: Metric Handlers: map[RAID:[0x51aaa0]]

● nmipoller_caminfo.service - DVB-CI status poller
   Loaded: loaded (/etc/systemd/system/nmipoller_caminfo.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/nmipoller_caminfo.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8690 ExecStartPre=/opt/omneon/sbin/cpusetconfig nmipoller_caminfo.service (code=exited, status=0/SUCCESS)
 Main PID: 8796 (python3.11)
    Tasks: 1 (limit: 399998)
   Memory: 15.3M
   CGroup: /system.slice/nmipoller_caminfo.service
           └─8796 python3.11 /opt/omneon/nmi/nmipollers/src/caminfo/main.py

Nov 17 16:04:54 vosflex.localdomain nmipoller_caminfo[8690]: + '[' -n nmipoller_caminfo.service ']'
Nov 17 16:04:54 vosflex.localdomain nmipoller_caminfo[8690]: + mkdir -p /run/cpusetconfig/nmipoller_caminfo.service
Nov 17 16:04:54 vosflex.localdomain nmipoller_caminfo[8690]: + '[' -f /run/nmiirq/allowedcpulist ']'
Nov 17 16:04:54 vosflex.localdomain nmipoller_caminfo[8690]: + CPUS=/sys/fs/cgroup/cpuset/system.slice/nmipoller_caminfo.service/cpuset.cpus
Nov 17 16:04:54 vosflex.localdomain nmipoller_caminfo[8690]: + cat /run/nmiirq/allowedcpulist
Nov 17 16:04:54 vosflex.localdomain nmipoller_caminfo[8690]: + exit 0
Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started DVB-CI status poller.
Nov 17 16:04:55 vosflex.localdomain nmipoller_caminfo[8796]: 2025-11-17 16:04:55,081 - App - INFO - Starting nmipoller caminfo
Nov 17 16:04:55 vosflex.localdomain nmipoller_caminfo[8796]: 2025-11-17 16:04:55,081 - App - INFO - Adapters: []
Nov 17 16:04:55 vosflex.localdomain nmipoller_caminfo[8796]: 2025-11-17 16:04:55,081 - IdlingApp - INFO - Nothing to do, idling

● nmipoller_ilostatus.service - ILO status poller
   Loaded: loaded (/etc/systemd/system/nmipoller_ilostatus.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/nmipoller_ilostatus.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8776 ExecStartPre=/opt/omneon/sbin/cpusetconfig nmipoller_ilostatus.service (code=exited, status=0/SUCCESS)
 Main PID: 8852 (python3.11)
    Tasks: 1 (limit: 399998)
   Memory: 21.1M
   CGroup: /system.slice/nmipoller_ilostatus.service
           └─8852 python3.11 /opt/omneon/nmi/nmipollers/src/ilostatus/main.py

Nov 18 13:54:18 XOSEncoder-01 nmipoller_ilostatus[8852]: 2025-11-18 13:54:18,828 - redfish.rest.connections - INFO - iLO Response Time to /redfish/v1/Chassis: 0.027500152587890625 secs.
Nov 18 13:54:18 XOSEncoder-01 nmipoller_ilostatus[8852]: 2025-11-18 13:54:18,861 - redfish.rest.connections - INFO - iLO Response Time to /redfish/v1/Chassis/1: 0.03324770927429199 secs.
Nov 18 13:54:18 XOSEncoder-01 nmipoller_ilostatus[8852]: 2025-11-18 13:54:18,893 - redfish.rest.connections - INFO - iLO Response Time to /redfish/v1/Chassis/1/Power: 0.0309903621673584 secs.
Nov 18 13:54:18 XOSEncoder-01 nmipoller_ilostatus[8852]: 2025-11-18 13:54:18,990 - redfish.rest.connections - INFO - iLO Response Time to /redfish/v1/Chassis/1/Thermal: 0.09687328338623047 secs.
Nov 18 13:54:19 XOSEncoder-01 nmipoller_ilostatus[8852]: 2025-11-18 13:54:19,022 - redfish.rest.connections - INFO - iLO Response Time to /redfish/v1/Chassis/DE042000: 0.03071451187133789 secs.
Nov 18 13:55:19 XOSEncoder-01 nmipoller_ilostatus[8852]: 2025-11-18 13:55:19,055 - redfish.rest.connections - INFO - iLO Response Time to /redfish/v1/Chassis: 0.02906346321105957 secs.
Nov 18 13:55:19 XOSEncoder-01 nmipoller_ilostatus[8852]: 2025-11-18 13:55:19,089 - redfish.rest.connections - INFO - iLO Response Time to /redfish/v1/Chassis/1: 0.03290104866027832 secs.
Nov 18 13:55:19 XOSEncoder-01 nmipoller_ilostatus[8852]: 2025-11-18 13:55:19,122 - redfish.rest.connections - INFO - iLO Response Time to /redfish/v1/Chassis/1/Power: 0.033344268798828125 secs.
Nov 18 13:55:19 XOSEncoder-01 nmipoller_ilostatus[8852]: 2025-11-18 13:55:19,220 - redfish.rest.connections - INFO - iLO Response Time to /redfish/v1/Chassis/1/Thermal: 0.09695911407470703 secs.
Nov 18 13:55:19 XOSEncoder-01 nmipoller_ilostatus[8852]: 2025-11-18 13:55:19,253 - redfish.rest.connections - INFO - iLO Response Time to /redfish/v1/Chassis/DE042000: 0.03132438659667969 secs.

● nmipoller_raidmr.service - MegaRAID status poller
   Loaded: loaded (/etc/systemd/system/nmipoller_raidmr.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/nmipoller_raidmr.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8667 ExecStartPre=/opt/omneon/sbin/cpusetconfig nmipoller_raidmr.service (code=exited, status=0/SUCCESS)
 Main PID: 8787 (python3.11)
    Tasks: 1 (limit: 399998)
   Memory: 18.6M
   CGroup: /system.slice/nmipoller_raidmr.service
           └─8787 python3.11 /opt/omneon/nmi/nmipollers/src/raidmr/main.py

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Starting MegaRAID status poller...
Nov 17 16:04:54 vosflex.localdomain nmipoller_raidmr[8667]: + '[' -n nmipoller_raidmr.service ']'
Nov 17 16:04:54 vosflex.localdomain nmipoller_raidmr[8667]: + mkdir -p /run/cpusetconfig/nmipoller_raidmr.service
Nov 17 16:04:54 vosflex.localdomain nmipoller_raidmr[8667]: + '[' -f /run/nmiirq/allowedcpulist ']'
Nov 17 16:04:54 vosflex.localdomain nmipoller_raidmr[8667]: + CPUS=/sys/fs/cgroup/cpuset/system.slice/nmipoller_raidmr.service/cpuset.cpus
Nov 17 16:04:54 vosflex.localdomain nmipoller_raidmr[8667]: + cat /run/nmiirq/allowedcpulist
Nov 17 16:04:54 vosflex.localdomain nmipoller_raidmr[8667]: + exit 0
Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started MegaRAID status poller.
Nov 17 16:04:54 vosflex.localdomain nmipoller_raidmr[8787]: 2025-11-17 16:04:54,942 - App - INFO - Starting nmipoller raidmr

● nmipoller_raidssa.service - SSA RAID status poller
   Loaded: loaded (/etc/systemd/system/nmipoller_raidssa.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/nmipoller_raidssa.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8781 ExecStartPre=/opt/omneon/sbin/cpusetconfig nmipoller_raidssa.service (code=exited, status=0/SUCCESS)
 Main PID: 8856 (python3.11)
    Tasks: 1 (limit: 399998)
   Memory: 22.8M
   CGroup: /system.slice/nmipoller_raidssa.service
           └─8856 python3.11 /opt/omneon/nmi/nmipollers/src/raidssa/main.py

Nov 17 16:04:54 vosflex.localdomain nmipoller_raidssa[8781]: + '[' -n nmipoller_raidssa.service ']'
Nov 17 16:04:54 vosflex.localdomain nmipoller_raidssa[8781]: + mkdir -p /run/cpusetconfig/nmipoller_raidssa.service
Nov 17 16:04:54 vosflex.localdomain nmipoller_raidssa[8781]: + '[' -f /run/nmiirq/allowedcpulist ']'
Nov 17 16:04:54 vosflex.localdomain nmipoller_raidssa[8781]: + CPUS=/sys/fs/cgroup/cpuset/system.slice/nmipoller_raidssa.service/cpuset.cpus
Nov 17 16:04:54 vosflex.localdomain nmipoller_raidssa[8781]: + cat /run/nmiirq/allowedcpulist
Nov 17 16:04:54 vosflex.localdomain nmipoller_raidssa[8781]: + exit 0
Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started SSA RAID status poller.
Nov 17 16:04:54 vosflex.localdomain nmipoller_raidssa[8856]: 2025-11-17 16:04:54,942 - App - INFO - Starting nmipoller raidssa
Nov 17 16:04:55 vosflex.localdomain nmipoller_raidssa[8856]: 2025-11-17 16:04:55,437 - App - INFO - Controller not detected
Nov 17 16:04:55 vosflex.localdomain nmipoller_raidssa[8856]: 2025-11-17 16:04:55,437 - IdlingApp - INFO - Nothing to do, idling

● nmiset_dns.service - NMINet initial DNS configuration
   Loaded: loaded (/etc/systemd/system/nmiset_dns.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:08 UTC; 21h ago
  Process: 4559 ExecStart=/opt/omneon/sbin/set_dns.py nodocker (code=exited, status=0/SUCCESS)
 Main PID: 4559 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/nmiset_dns.service

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Starting NMINet initial DNS configuration...
Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started NMINet initial DNS configuration.

● nmiset_hostname.service - NMINet initial /etc/hosts configuration
   Loaded: loaded (/etc/systemd/system/nmiset_hostname.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:08 UTC; 21h ago
  Process: 4552 ExecStart=/opt/omneon/sbin/set_hostname.py (code=exited, status=0/SUCCESS)
 Main PID: 4552 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/nmiset_hostname.service

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Starting NMINet initial /etc/hosts configuration...
Nov 17 16:04:08 vosflex.localdomain set_hostname.py[4552]: Hostname of the system is: vosflex.localdomain
Nov 17 16:04:08 vosflex.localdomain set_hostname.py[4552]: Nothing to do. Hostname vosflex.localdomain is in /etc/hosts
Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started NMINet initial /etc/hosts configuration.

● node_teleport_runner.service - Remote access services configuration monitor
   Loaded: loaded (/etc/systemd/system/node_teleport_runner.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/node_teleport_runner.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:06:30 UTC; 21h ago
  Process: 29723 ExecStartPre=/opt/omneon/sbin/cpusetconfig node_teleport_runner.service (code=exited, status=0/SUCCESS)
 Main PID: 29731 (python3)
    Tasks: 4 (limit: 399998)
   Memory: 10.1M
   CGroup: /system.slice/node_teleport_runner.service
           └─29731 python3 /opt/teleport/bin/node_teleport_runner.py

Nov 17 16:06:30 vosflex.localdomain cpusetconfig[29723]: + '[' -n node_teleport_runner.service ']'
Nov 17 16:06:30 vosflex.localdomain cpusetconfig[29723]: + mkdir -p /run/cpusetconfig/node_teleport_runner.service
Nov 17 16:06:30 vosflex.localdomain cpusetconfig[29723]: + '[' -f /run/nmiirq/allowedcpulist ']'
Nov 17 16:06:30 vosflex.localdomain cpusetconfig[29723]: + CPUS=/sys/fs/cgroup/cpuset/system.slice/node_teleport_runner.service/cpuset.cpus
Nov 17 16:06:30 vosflex.localdomain cpusetconfig[29723]: + cat /run/nmiirq/allowedcpulist
Nov 17 16:06:30 vosflex.localdomain cpusetconfig[29723]: + exit 0
Nov 17 16:06:30 vosflex.localdomain systemd[1]: Started Remote access services configuration monitor.
Nov 17 16:06:30 vosflex.localdomain node_teleportUnit ntpd.service could not be found.
Unit ntpdate.service could not be found.
Unit plymouth-quit-wait.service could not be found.
Unit plymouth-start.service could not be found.
_runner.py[29731]: INFO: Connecting to zookeeper.cluster1.svc.cluster.local(203.0.113.35):2181, use_ssl: False
Nov 17 16:06:30 vosflex.localdomain node_teleport_runner.py[29731]: INFO: Zookeeper connection established, state: CONNECTED
Nov 17 16:06:41 vosflex.localdomain node_teleport_runner.py[29731]: INFO: Got new Teleport config, trying to update

● openibd.service - openibd - configure Mellanox devices
   Loaded: loaded (/usr/lib/systemd/system/openibd.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/openibd.service.d
           └─50-order.conf
   Active: active (exited) since Mon 2025-11-17 16:03:53 UTC; 21h ago
     Docs: file:/etc/infiniband/openib.conf
 Main PID: 3541 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 5.3M
   CGroup: /system.slice/openibd.service

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Starting openibd - configure Mellanox devices...
Nov 17 16:03:53 vosflex.localdomain openibd[3541]: Loading HCA driver and Access Layer:[60G[  [1;32mOK[0;39m  ]
Nov 17 16:03:53 vosflex.localdomain systemd[1]: Started openibd - configure Mellanox devices.

● pcscd.service - PC/SC Smart Card Daemon
   Loaded: loaded (/usr/lib/systemd/system/pcscd.service; indirect; vendor preset: disabled)
  Drop-In: /etc/systemd/system/pcscd.service.d
           └─50-priority.conf
   Active: inactive (dead)
     Docs: man:pcscd(8)

● perfstatserver.service - NMI PerfStatServer Service
   Loaded: loaded (/etc/systemd/system/perfstatserver.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/perfstatserver.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8693 ExecStartPre=/opt/omneon/sbin/cpusetconfig perfstatserver.service (code=exited, status=0/SUCCESS)
 Main PID: 8798 (java)
    Tasks: 102 (limit: 399998)
   Memory: 189.3M
   CGroup: /system.slice/perfstatserver.service
           ├─ 8798 /bin/java -XX:+UseSerialGC -Xms32M -Xmx512M -jar PerfStatsServer.jar
           ├─11218 /usr/sbin/arping -c 10 -D -I net1 10.10.106.144
           ├─11572 /usr/sbin/arping -c 10 -D -I net10 100.8.248.10
           └─11638 /usr/sbin/arping -c 10 -D -I net9 100.8.248.16

Nov 17 16:04:55 vosflex.localdomain java[8798]: P8798 T1 A8031 Environment: vm:false, rpm:false
Nov 17 16:04:55 vosflex.localdomain java[8798]: P8798 T1 A8031 Starting Platform monitoring
Nov 17 16:04:56 vosflex.localdomain java[8798]: P8798 T1 A8031 Starting NTP and PTP monitoring
Nov 17 16:04:56 vosflex.localdomain java[8798]: P8798 T1 A8031 Entry PTP configuration: []
Nov 17 16:04:56 vosflex.localdomain java[8798]: P8798 T1 A8031 Starting Hardware monitoring
Nov 17 16:04:57 vosflex.localdomain java[8798]: P8798 T1 A8031 Starting ECC memory monitoring
Nov 17 16:04:57 vosflex.localdomain java[8798]: P8798 T1 A8031 Starting RAID stats monitoring
Nov 17 16:04:57 vosflex.localdomain java[8798]: P8798 T1 A8031 Starting SSD wear-out monitoring
Nov 17 16:04:57 vosflex.localdomain perfstatserver[8798]: Nov 17, 2025 4:04:57 PM com.harmonic.nmi.perfstatsserver.SSDMon checkDeviceHealth
Nov 17 16:04:57 vosflex.localdomain perfstatserver[8798]: INFO: Flash drive is 0% used

● phc2sys.service - Synchronize system clock or PTP hardware clock (PHC)
   Loaded: loaded (/usr/lib/systemd/system/phc2sys.service; disabled; vendor preset: disabled)
   Active: inactive (dead)

● polkit.service - Authorization Manager
   Loaded: loaded (/usr/lib/systemd/system/polkit.service; static; vendor preset: disabled)
  Drop-In: /etc/systemd/system/polkit.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:55 UTC; 21h ago
     Docs: man:polkit(8)
  Process: 9269 ExecStartPre=/opt/omneon/sbin/cpusetconfig polkit.service (code=exited, status=0/SUCCESS)
 Main PID: 9285 (polkitd)
    Tasks: 12 (limit: 399998)
   Memory: 15.2M
   CGroup: /system.slice/polkit.service
           └─9285 /usr/lib/polkit-1/polkitd --no-debug

Nov 17 16:04:55 vosflex.localdomain cpusetconfig[9269]:Unit power-profiles-daemon.service could not be found.
 + '[' -f /run/nmiirq/allowedcpulist ']'
Nov 17 16:04:55 vosflex.localdomain cpusetconfig[9269]: + CPUS=/sys/fs/cgroup/cpuset/system.slice/polkit.service/cpuset.cpus
Nov 17 16:04:55 vosflex.localdomain cpusetconfig[9269]: + cat /run/nmiirq/allowedcpulist
Nov 17 16:04:55 vosflex.localdomain cpusetconfig[9269]: + exit 0
Nov 17 16:04:55 vosflex.localdomain polkitd[9285]: Started polkitd version 0.115
Nov 17 16:04:55 vosflex.localdomain polkitd[9285]: Loading rules from directory /etc/polkit-1/rules.d
Nov 17 16:04:55 vosflex.localdomain polkitd[9285]: Loading rules from directory /usr/share/polkit-1/rules.d
Nov 17 16:04:55 vosflex.localdomain polkitd[9285]: Finished loading, compiling and executing 3 rules
Nov 17 16:04:55 vosflex.localdomain polkitd[9285]: Acquired the name org.freedesktop.PolicyKit1 on the system bus
Nov 17 16:04:55 vosflex.localdomain systemd[1]: Started Authorization Manager.

● post_upgrade.service - Post upgrade scripts
   Loaded: loaded (/etc/systemd/system/post_upgrade.service; enabled; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:04:08 UTC; 21h ago
 Main PID: 4125 (code=exited, status=0/SUCCESS)

Nov 17 16:04:07 vosflex.localdomain systemd[1]: Started Post upgrade scripts.
Nov 17 16:04:08 vosflex.localdomain systemd[1]: post_upgrade.service: Succeeded.

● preempt.service - Configure task preemption model
   Loaded: loaded (/etc/systemd/system/preempt.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:06:31 UTC; 21h ago
  Process: 30395 ExecStart=/bin/bash -c echo ${PREEMPT:-full} > /sys/kernel/debug/sched/preempt (code=exited, status=0/SUCCESS)
 Main PID: 30395 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/preempt.service

Nov 17 16:06:31 vosflex.localdomain systemd[1]: Starting Configure task preemption model...
Nov 17 16:06:31 vosflex.localdomain systemd[1]: Started Configure task preemption model.

● product_early_mounts.service - Storage initialization and mount service
   Loaded: loaded (/etc/systemd/system/product_early_mounts.service; enabled; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:04:08 UTC; 21h ago
 Main PID: 4104 (code=exited, status=0/SUCCESS)

Nov 17 16:04:07 vosflex.localdomain systemd[1]: Started Storage initialization and mount service.
Nov 17 16:04:07 vosflex.localdomain product_early_mounts.sh[4107]: TARGET    SOURCE    FSTYPE OPTIONS
Nov 17 16:04:07 vosflex.localdomain product_early_mounts.sh[4107]: /mnt/sda7 /dev/sdb7 ext4   rw,relatime
Nov 17 16:04:07 vosflex.localdomain product_early_mounts.sh[4104]: /mnt/sda7 is already a mount point!
Nov 17 16:04:07 vosflex.localdomain product_early_mounts.sh[4104]: Mounting DATA_DRIVE /dev/sda1 to /mnt/data_drive
Nov 17 16:04:08 vosflex.localdomain systemd[1]: product_early_mounts.service: Succeeded.

● ptp4l.service - Precision Time Protocol (PTP) service
   Loaded: loaded (/usr/lib/systemd/system/ptp4l.service; disabled; vendor preset: disabled)
   Active: inactive (dead)

● rc-local.service - /etc/rc.d/rc.local Compatibility
   Loaded: loaded (/usr/lib/systemd/system/rc-local.service; enabled-runtime; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:54 UTC; 21h ago
     Docs: man:systemd-rc-local-generator(8)
  Process: 8718 ExecStart=/etc/rc.d/rc.local start (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/rc-local.service

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Starting /etc/rc.d/rc.local Compatibility...
Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started /etc/rc.d/rc.local Compatibility.

● reboot_after_fw_upgrade.service - Service to force system reboot if FW upgrade took place
   Loaded: loaded (/etc/systemd/system/reboot_after_fw_upgrade.service; enabled; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:04:07 UTC; 21h ago

● rescue.service - Rescue Shell
   Loaded: loaded (/usr/lib/systemd/system/rescue.service; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:sulogin(8)

● rpc-gssd.service - RPC security service for NFS client and server
   Loaded: loaded (/usr/lib/systemd/system/rpc-gssd.service; static; vendor preset: disabled)
   Active: inactive (dead)

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Dependency failed for RPC security service for NFS client and server.
Nov 17 16:03:53 vosflex.localdomain systemd[1]: rpc-gssd.service: Job rpc-gssd.service/start failed with result 'dependency'.

● rpc-statd-notify.service - Notify NFS peers of a restart
   Loaded: loaded (/usr/lib/systemd/system/rpc-statd-notify.service; static; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8715 ExecStart=/usr/sbin/sm-notify (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/rpc-statd-notify.service

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Starting Notify NFS peers of a restart...
Nov 17 16:04:54 vosflex.localdomain sm-notify[8715]: Version 2.3.3 starting
Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started Notify NFS peers of a restart.

● rpc-statd.service - NFS status monitor for NFSv2/3 locking.
   Loaded: loaded (/usr/lib/systemd/system/rpc-statd.service; static; vendor preset: disabled)
   Active: inactive (dead)

● rpcbind.service - RPC Bind
   Loaded: loaded (/usr/lib/systemd/system/rpcbind.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/rpcbind.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:03:53 UTC; 21h ago
     Docs: man:rpcbind(8)
 Main PID: 3561 (rpcbind)
    Tasks: 1 (limit: 399998)
   Memory: 1.6M
   CGroup: /system.slice/rpcbind.service
           └─3561 /usr/bin/rpcbind -w -f

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Starting RPC Bind...
Nov 17 16:03:53 vosflex.localdomain cpusetconfig[3553]: + '[' -n rpcbind.service ']'
Nov 17 16:03:53 vosflex.localdomain cpusetconfig[3553]: + mkdir -p /run/cpusetconfig/rpcbind.service
Nov 17 16:03:53 vosflex.localdomain cpusetconfig[3553]: + '[' -f /run/nmiirq/allowedcpulist ']'
Nov 17 16:03:53 vosflex.localdomain cpusetconfig[3553]: + exit 0
Nov 17 16:03:53 vosflex.localdomain systemd[1]: Started RPC Bind.

● rsyslog.service - System Logging Service
   Loaded: loaded (/usr/lib/systemd/system/rsyslog.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/rsyslog.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
     Docs: man:rsyslogd(8)
           https://www.rsyslog.com/doc/
  Process: 8741 ExecStartPre=/opt/omneon/sbin/cpusetconfig rsyslog.service (code=exited, status=0/SUCCESS)
 Main PID: 8818 (rsyslogd)
    Tasks: 4 (limit: 399998)
   Memory: 153.0M
   CGroup: /system.slice/rsyslog.service
           └─8818 /usr/sbin/rsyslogd -n

Nov 17 16:09:16 vosflex.localdomain rsyslogd[8818]: action 'action-0-builtin:omfwd' suspended (module 'builtin:omfwd'), next retry is Mon Nov 17 16:09:46 2025, retry nbr 0. There should be messages before this one giving the reason for suspension. [v8.2102.0-15.el8_10.1 try https://www.rsyslog.com/e/2007 ]
Nov 17 16:12:32 XOSEncoder-01 rsyslogd[8818]: action 'action-0-builtin:omfwd' resumed (module 'builtin:omfwd') [v8.2102.0-15.el8_10.1 try https://www.rsyslog.com/e/2359 ]
Nov 17 17:57:22 XOSEncoder-01 rsyslogd[8818]: imjournal: journal files changed, reloading...  [v8.2102.0-15.el8_10.1 try https://www.rsyslog.com/e/0 ]
Nov 17 20:29:02 XOSEncoder-01 rsyslogd[8818]: imjournal: journal files changed, reloading...  [v8.2102.0-15.el8_10.1 try https://www.rsyslog.com/e/0 ]
Nov 17 23:02:09 XOSEncoder-01 rsyslogd[8818]: imjournal: journal files changed, reloading...  [v8.2102.0-15.el8_10.1 try https://www.rsyslog.com/e/0 ]
Nov 18 01:32:37 XOSEncoder-01 rsyslogd[8818]: imjournal: journal files changed, reloading...  [v8.2102.0-15.el8_10.1 try https://www.rsyslog.com/e/0 ]
Nov 18 04:04:39 XOSEncoder-01 rsyslogd[8818]: imjournal: journal files changed, reloading...  [v8.2102.0-15.el8_10.1 try https://www.rsyslog.com/e/0 ]
Nov 18 06:37:34 XOSEncoder-01 rsyslogd[8818]: imjournal: journal files changed, reloading...  [v8.2102.0-15.el8_10.1 try https://www.rsyslog.com/e/0 ]
Nov 18 09:09:37 XOSEncoder-01 rsyslogd[8818]: imjournal: journal files changed, reloading...  [v8.2102.0-15.el8_10.1 try https://www.rsyslog.com/e/0 ]
Nov 18 11:43:12 XOSEncoder-01 rsyslogd[8818]: imjournal: journal files changed, reloading...  [v8.2102.0-15.el8_10.1 try https://www.rsyslog.com/e/0 ]

● rtirq.service - Realtime IRQ thread system tuning
   Loaded: loaded (/etc/systemd/system/rtirq.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:06:31 UTC; 21h ago
  Process: 31032 ExecStart=/usr/local/bin/rtirq.sh start (code=exited, status=0/SUCCESS)
 Main PID: 31032 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/rtirq.service

Nov 17 16:06:31 vosflex.localdomain systemd[1]: Starting Realtime IRQ thread system tuning...
Nov 17 16:06:31 vosflex.localdomain systemd[1]: Started Realtime IRQ thread system tuning.

● selinux-autorelabel-mark.service - Mark the need to relabel after reboot
   Loaded: loaded (/usr/lib/systemd/system/selinux-autorelabel-mark.service; enabled; vendor preset: enabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:51 UTC; 21h ago

● serial-getty@ttyS0.service - Serial Getty on ttyS0
   Loaded: loaded (/usr/lib/systemd/system/serial-getty@.service; enabled; vendor preset: disabled)
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
     Docs: man:agetty(8)
           man:systemd-getty-generator(8)
           http://0pointer.de/blog/projects/serial-console.html
 Main PID: 8909 (agetty)
    Tasks: 1 (limit: 399998)
   Memory: 228.0K
   CGroup: /system.slice/system-serial\x2dgetty.slice/serial-getty@ttyS0.service
           └─8909 /sbin/agetty -o -p -- \u --keep-baud 115200,38400,9600 ttyS0 vt220

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started Serial Getty on ttyS0.

● setupdevops.service - Setup devops user
   Loaded: loaded (/etc/systemd/system/setupdevops.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:05:05 UTC; 21h ago
  Process: 12475 ExecStart=/opt/omneon/sbin/setupdevops (code=exited, status=0/SUCCESS)
 Main PID: 12475 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/setupdevops.service

Nov 17 16:05:05 vosflex.localdomain setupdevops[12475]: + chown devops:devops /home/devops/.ssh/authorized_keys.new
Nov 17 16:05:05 vosflex.localdomain setupdevops[12475]: + mv -f /home/devops/.ssh/authorized_keys.new /home/devops/.ssh/authorized_keys
Nov 17 16:05:05 vosflex.localdomain setupdevops[12475]: + sync
Nov 17 16:05:05 vosflex.localdomain setupdevops[12475]: + '[' '!' -f /var/lib/bomgar-misc/.ssh/config ']'
Nov 17 16:05:05 vosflex.localdomain setupdevops[12475]: + set +e
Nov 17 16:05:05 vosflex.localdomain setupdevops[12475]: + '[' '!' -f /var/lib/bomgar-misc/.ssh/known_hosts -a -n 198.51.100.1 ']'
Nov 17 16:05:05 vosflex.localdomain setupdevops[12475]: + chown devops:devops -R /home/devops/.ssh
Nov 17 16:05:05 vosflex.localdomain setupdevops[12475]: + chown 1000:1000 -R /var/lib/bomgar-misc/.ssh
Nov 17 16:05:05 vosflex.localdomain setupdevops[12517]: chown: changing ownership of '/var/lib/bomgar-misc/.ssh/this-xos_ed25519': Operation not permitted
Nov 17 16:05:05 vosflex.localdomain setupdevops[12475]: + chattr +i /var/lib/bomgar-misc/.ssh/this-xos_ed25519

● shutdown-containers.service - Shutdown containers
   Loaded: loaded (/etc/systemd/system/shutdown-containers.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:56 UTC; 21h ago
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/shutdown-containers.service

Nov 17 16:04:56 vosflex.localdomain systemd[1]: Started Shutdown containers.

● smad.service - System Management AssistUnit sntp.service could not be found.
ant daemon
   Loaded: loaded (/usr/lib/systemd/system/smad.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/smad.service.d
           └─05-cpusetconfig.conf, 05-exec-condition.conf, 05-resource-limit.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8815 ExecStartPre=/opt/omneon/sbin/cpusetconfig smad.service (code=exited, status=0/SUCCESS)
 Main PID: 8864 (smad)
    Tasks: 4 (limit: 399998)
   Memory: 652.0K (high: 512.0M max: 1.0G)
      CPU: 3.209s
   CGroup: /system.slice/smad.service
           ├─8864 /sbin/smad
           ├─8881 /sbin/smad
           ├─8882 /sbin/smad
           └─9461 /sbin/smad

Nov 17 16:04:58 vosflex.localdomain smad[9461]: [NOTICE]: AgentX trap MIB-II (.1.3.6.1.6.3.1.1.4.1.0:.1.3.6.1.6.3.1.1.5.1)
Nov 17 16:05:26 vosflex.localdomain smad[8881]: [NOTICE]: IML received: 125 bytes
Nov 17 16:05:26 vosflex.localdomain smad[8881]: [ALERT ]: NOTICE: Network Controller Connectivity status changed to OK for adapter in slot 1, port 1 has been repaired
Nov 17 16:05:26 vosflex.localdomain smad[8881]: [INFO  ]: Log the IML info to syslog
Nov 17 16:05:26 vosflex.localdomain smad[8881]: [NOTICE]: IML received: 125 bytes
Nov 17 16:05:26 vosflex.localdomain smad[8881]: [ALERT ]: NOTICE: Network Controller Connectivity status changed to OK for adapter in slot 1, port 2 has been repaired
Nov 17 16:05:26 vosflex.localdomain smad[8881]: [INFO  ]: Log the IML info to syslog
Nov 17 16:05:26 vosflex.localdomain smad[8881]: [NOTICE]: IML received: 100 bytes
Nov 17 16:05:26 vosflex.localdomain smad[8881]: [ALERT ]: NOTICE: iLO is in High Security Mode and there is no System ROM Admin Password set.
Nov 17 16:05:26 vosflex.localdomain smad[8881]: [INFO  ]: Log the IML info to syslog

● smartd.service - Self Monitoring and Reporting Technology (SMART) Daemon
   Loaded: loaded (/usr/lib/systemd/system/smartd.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/smartd.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:08 UTC; 21h ago
     Docs: man:smartd(8)
           man:smartd.conf(5)
 Main PID: 4240 (smartd)
   Status: "Next check of 1 device will start at 14:04:08"
    Tasks: 1 (limit: 399998)
   Memory: 2.5M
   CGroup: /system.slice/smartd.service
           └─4240 /usr/sbin/smartd -n -q never

Nov 17 16:04:08 vosflex.localdomain smartd[4240]: Device: /dev/bus/0 [megaraid_disk_05] [SAT], not capable of SMART Health Status check
Nov 17 16:04:08 vosflex.localdomain smartd[4240]: Device: /dev/bus/0 [megaraid_disk_05] [SAT], no ATA CHECK POWER STATUS support, ignoring -n Directive
Nov 17 16:04:08 vosflex.localdomain smartd[4240]: Device: /dev/bus/0 [megaraid_disk_06], type changed from 'megaraid,6' to 'sat+megaraid,6'
Nov 17 16:04:08 vosflex.localdomain smartd[4240]: Device: /dev/bus/0 [megaraid_disk_06] [SAT], opened
Nov 17 16:04:08 vosflex.localdomain smartd[4240]: Device: /dev/bus/0 [megaraid_disk_06] [SAT], MK000960GXNZK, S/N:24344A996651, WWN:5-00a075-14a996651, FW:HPG0, 960 GB
Nov 17 16:04:08 vosflex.localdomain smartd[4240]: Device: /dev/bus/0 [megaraid_disk_06] [SAT], not found in smartd database.
Nov 17 16:04:08 vosflex.localdomain smartd[4240]: Device: /dev/bus/0 [megaraid_disk_06] [SAT], not capable of SMART Health Status check
Nov 17 16:04:08 vosflex.localdomain smartd[4240]: Device: /dev/bus/0 [megaraid_disk_06] [SAT], no ATA CHECK POWER STATUS support, ignoring -n Directive
Nov 17 16:04:08 vosflex.localdomain smartd[4240]: Monitoring 1 ATA/SATA, 0 SCSI/SAS and 0 NVMe devices
Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started Self Monitoring and Reporting Technology (SMART) Daemon.

● sshd-keygen@ecdsa.service - OpenSSH ecdsa Server Key Generation
   Loaded: loaded (/usr/lib/systemd/system/sshd-keygen@.service; disabled; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:04:07 UTC; 21h ago

● sshd-keygen@ed25519.service - OpenSSH ed25519 Server Key Generation
   Loaded: loaded (/usr/lib/systemd/system/sshd-keygen@.service; disabled; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:04:08 UTC; 21h ago

● sshd-keygen@rsa.service - OpenSSH rsa Server Key Generation
   Loaded: loaded (/usr/lib/systemd/system/sshd-keygen@.service; disabled; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:04:07 UTC; 21h ago

● sshd.service - OpenSSH server daemon
   Loaded: loaded (/usr/lib/systemd/system/sshd.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/sshd.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
     Docs: man:sshd(8)
           man:sshd_config(5)
  Process: 8708 ExecStartPre=/opt/omneon/sbin/cpusetconfig sshd.service (code=exited, status=0/SUCCESS)
 Main PID: 8807 (sshd)
    Tasks: 1 (limit: 399998)
   Memory: 1.8M
   CGroup: /system.slice/sshd.service
           └─8807 /usr/sbin/sshd -D

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Starting OpenSSH server daemon...
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8708]: + '[' -n sshd.service ']'
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8708]: + mkdir -p /run/cpusetconfig/sshd.service
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8708]: + '[' -f /run/nmiirq/allowedcpulist ']'
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8708]: + CPUS=/sys/fs/cgroup/cpuset/system.slice/sshd.service/cpuset.cpus
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8708]: + cat /run/nmiirq/allowedcpulist
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8708]: + exit 0
Nov 17 16:04:54 vosflex.localdomain sshd[8807]: Server listening on 0.0.0.0 port 22.
Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started OpenSSH server daemon.

● sssd-kcm.service - SSSD Kerberos Cache Manager
   Loaded: loaded (/usr/lib/systemd/system/sssd-kcm.service; indirect; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:sssd-kcm(5)

● sssd.service - System Security Services Daemon
   Loaded: loaded (/usr/lib/systemd/system/sssd.service; enabled; vendor preset: enabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:04:08 UTC; 21h ago

● start-reboot-guard.service - Start Reboot Guard
   Loaded: loaded (/etc/systemd/system/start-reboot-guard.service; enabled; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:03:53 UTC; 21h ago
 Main PID: 3577 (code=exited, status=0/SUCCESS)

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Starting Start Reboot Guard...
Nov 17 16:03:53 vosflex.localdomain systemctl[3577]: Created symlink /etc/systemd/system/shutdown.target.requires/reboot-guard.service → /etc/systemd/system/reboot-guard.service.
Nov 17 16:03:53 vosflex.localdomain systemd[1]: start-reboot-guard.service: Succeeded.
Nov 17 16:03:53 vosflex.localdomain systemd[1]: Started Start Reboot Guard.

● stop-reboot-guard.service - Stop Reboot Guard
   Loaded: loaded (/etc/systemd/system/stop-reboot-guard.service; enabled; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:04:08 UTC; 21h ago
  Process: 4219 ExecStart=/bin/systemctl disable reboot-guard (code=exited, status=0/SUCCESS)
 Main PID: 4219 (code=exited, status=0/SUCCESS)

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started Stop Reboot Guard.
Nov 17 16:04:08 vosflex.localdomain systemctl[4219]: Removed /etc/systemd/system/shutdown.target.requires/reboot-guard.service.
Nov 17 16:04:08 vosflex.localdomain systemd[1]: stop-reboot-guard.service: Succeeded.

● systemd-ask-password-console.service - Dispatch Password Requests to Console
   Loaded: loaded (/usr/lib/systemd/system/systemd-ask-password-console.service; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:systemd-ask-password-console.service(8)

● systemd-ask-password-wall.service - Forward Password Requests to Wall
   Loaded: loaded (/usr/lib/systemd/system/systemd-ask-password-wall.service; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:systemd-ask-password-console.service(8)

● systemd-binfmt.service - Set Up Additional Binary Formats
   Loaded: loaded (/usr/lib/systemd/system/systemd-binfmt.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd-binfmt.service(8)
           man:binfmt.d(5)
           https://www.kernel.org/doc/html/latest/admin-guide/binfmt-misc.html
           https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems

● systemd-firstboot.service - First Boot Wizard
   Loaded: loaded (/usr/lib/systemd/system/systemd-firstboot.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd-firstboot(1)

● systemd-fsck-root.service - File System Check on Root Device
   Loaded: loaded (/usr/lib/systemd/system/systemd-fsck-root.service; static; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd-fsck-root.service(8)
 Main PID: 2263 (code=exited, status=0/SUCCESS)

Nov 17 16:03:49 localhost systemd[1]: Starting File System Check on /dev/disk/by-uuid/5cb91fbe-5e7b-44ef-9e16-e9b0cd6acc96...
Nov 17 16:03:49 localhost systemd-fsck[2266]: ROOTB: clean, 86925/2883584 files, 8512732/11520000 blocks
Nov 17 16:03:49 localhost systemd[1]: Started File System Check on /dev/disk/by-uuid/5cb91fbe-5e7b-44ef-9e16-e9b0cd6acc96.

● systemd-hostnamed.service - Hostname Service
   Loaded: loaded (/usr/lib/systemd/system/systemd-hostnamed.service; static; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:07:08 UTC; 21h ago
     Docs: man:systemd-hostnamed.service(8)
           man:hostname(5)
           man:machine-info(5)
           https://www.freedesktop.org/wiki/Software/systemd/hostnamed
  Process: 31303 ExecStart=/usr/lib/systemd/systemd-hostnamed (code=exited, status=0/SUCCESS)
 Main PID: 31303 (code=exited, status=0/SUCCESS)

Nov 17 16:06:38 vosflex.localdomain systemd[1]: Starting Hostname Service...
Nov 17 16:06:38 vosflex.localdomain systemd[1]: Started Hostname Service.
Nov 17 16:07:08 vosflex.localdomain systemd[1]: systemd-hostnamed.service: Succeeded.

● systemd-hwdb-update.service - Rebuild Hardware Database
   Loaded: loaded (/usr/lib/systemd/system/systemd-hwdb-update.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:hwdb(7)
           man:systemd-hwdb(8)

● systemd-initctl.service - initctl Compatibility Daemon
   Loaded: loaded (/usr/lib/systemd/system/systemd-initctl.service; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:systemd-initctl.service(8)

● systemd-journal-catalog-update.service - Rebuild Journal Catalog
   Loaded: loaded (/usr/lib/systemd/system/systemd-journal-catalog-update.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:51 UTC; 21h ago
     Docs: man:systemd-journald.service(8)
           man:journald.conf(5)

● systemd-journal-flush.service - Flush Journal to Persistent Storage
   Loaded: loaded (/usr/lib/systemd/system/systemd-journal-flush.service; static; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:03:53 UTC; 21h ago
     Docs: man:systemd-journald.service(8)
           man:journald.conf(5)
 Main PID: 3456 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-journal-flush.service

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Starting Flush Journal to Persistent Storage...
Nov 17 16:03:53 vosflex.localdomain systemd[1]: Started Flush Journal to Persistent Storage.

● systemd-journald.service - Journal Service
   Loaded: loaded (/usr/lib/systemd/system/systemd-journald.service; static; vendor preset: disabled)
  Drop-In: /etc/systemd/system/systemd-journald.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:03:53 UTC; 21h ago
     Docs: man:systemd-journald.service(8)
           man:journald.conf(5)
 Main PID: 3086 (systemd-journal)
   Status: "Processing requests..."
    Tasks: 1 (limit: 399998)
   Memory: 210.2M
   CGroup: /system.slice/systemd-journald.service
           └─3086 /usr/lib/systemd/systemd-journald

Nov 17 16:03:53 vosflex.localdomain systemd-journald[3086]: Journal started
Nov 17 16:03:53 vosflex.localdomain systemd-journald[3086]: Runtime journal (/run/log/journal/c04ddb266e6b4d668ae5add906f11001) is 8.0M, max 4.0G, 3.9G free.
Nov 17 16:03:50 vosflex.localdomain systemd[1]: systemd-journald.service: Succeeded.
Nov 17 16:03:53 vosflex.localdomain systemd-journald[3086]: Time spent on flushing to /var is 87.026ms for 2277 entries.
Nov 17 16:03:53 vosflex.localdomain systemd-journald[3086]: System journal (/var/log/journal/c04ddb266e6b4d668ae5add906f11001) is 1.9G, max 1.9G, 51.6M free.

● systemd-log-bootinfo.service - Log stats about boot process
   Loaded: loaded (/etc/systemd/system/systemd-log-bootinfo.service; enabled; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:06:38 UTC; 21h ago
  Process: 4210 ExecStart=/usr/local/bin/systemd-log-bootinfo.sh (code=exited, status=0/SUCCESS)
 Main PID: 4210 (code=exited, status=0/SUCCESS)

Nov 17 16:06:38 vosflex.localdomain systemd-log-bootinfo.sh[31293]:   timers.target                                                                                                                                 loaded active     active       Timers
Nov 17 16:06:38 vosflex.localdomain systemd-log-bootinfo.sh[31293]:   atop-rotate.timer                                                                                                                             loaded active     waiting      Daily atop restart
Nov 17 16:06:38 vosflex.localdomain systemd-log-bootinfo.sh[31293]:   systemd-tmpfiles-clean.timer                                                                                                                  loaded active     waiting      Daily Cleanup of Temporary Directories
Nov 17 16:06:38 vosflex.localdomain systemd-log-bootinfo.sh[31293]:   unbound-anchor.timer                                                                                                                          loaded active     waiting      daily update of the root trust anchor for DNSSEC
Nov 17 16:06:38 vosflex.localdomain systemd-log-bootinfo.sh[31293]: LOAD   = Reflects whether the unit definition was properly loaded.
Nov 17 16:06:38 vosflex.localdomain systemd-log-bootinfo.sh[31293]: ACTIVE = The high-level unit activation state, i.e. generalization of SUB.
Nov 17 16:06:38 vosflex.localdomain systemd-log-bootinfo.sh[31293]: SUB    = The low-level unit activation state, values depend on unit type.
Nov 17 16:06:38 vosflex.localdomain systemd-log-bootinfo.sh[31293]: 410 loaded units listed. Pass --all to see loaded but inactive units, too.
Nov 17 16:06:38 vosflex.localdomain systemd-log-bootinfo.sh[31293]: To show all installed unit files use 'systemctl list-unit-files'.
Nov 17 16:06:38 vosflex.localdomain systemd[1]: systemd-log-bootinfo.service: Succeeded.

● systemd-logind.service - Login Service
   Loaded: loaded (/usr/lib/systemd/system/systemd-logind.service; static; vendor preset: disabled)
  Drop-In: /etc/systemd/system/systemd-logind.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:08 UTC; 21h ago
     Docs: man:systemd-logind.service(8)
           man:logind.conf(5)
           https://www.freedesktop.org/wiki/Software/systemd/logind
           https://www.freedesktop.org/wiki/Software/systemd/multiseat
 Main PID: 4257 (systemd-logind)
   Status: "Processing requests..."
    Tasks: 1 (limit: 399998)
   Memory: 1.1M
   CGroup: /system.slice/systemd-logind.service
           └─4257 /usr/lib/systemd/systemd-logind

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Starting Login Service...
Nov 17 16:04:08 vosflex.localdomain cpusetconfig[4179]: + '[' -n systemd-logind.service ']'
Nov 17 16:04:08 vosflex.localdomain cpusetconfig[4179]: + mkdir -p /run/cpusetconfig/systemd-logind.service
Nov 17 16:04:08 vosflex.localdomain cpusetconfig[4179]: + '[' -f /run/nmiirq/allowedcpulist ']'
Nov 17 16:04:08 vosflex.localdomain cpusetconfig[4179]: + exit 0
Nov 17 16:04:08 vosflex.localdomain systemd-logind[4257]: New seat seat0.
Nov 17 16:04:08 vosflex.localdomain systemd-logind[4257]: Watching system buttons on /dev/input/event1 (Power Button)
Nov 17 16:04:08 vosflex.localdomain systemd-logind[4257]: Watching system buttons on /dev/input/event0 (Power Button)
Nov 17 16:04:08 vosflex.localdomain systemd[1]: Started Login Service.

● systemd-machine-id-commit.service - Commit a transient machine-id on disk
   Loaded: loaded (/usr/lib/systemd/system/systemd-machine-id-commit.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:51 UTC; 21h ago
     Docs: man:systemd-machine-id-commit.service(8)

● systemd-modules-load.service - Load Kernel Modules
   Loaded: loaded (/usr/lib/systemd/system/systemd-modules-load.service; static; vendor preset: disabled)
   Active: failed (Result: exit-code) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd-modules-load.service(8)
           man:modules-load.d(5)
 Main PID: 2370 (code=exited, status=1/FAILURE)

● systemd-pstore.service - Platform Persistent Storage Archival
   Loaded: loaded (/usr/lib/systemd/system/systemd-pstore.service; enabled; vendor preset: enabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd-pstore(8)

● systemd-random-seed.service - Load/Save Random Seed
   Loaded: loaded (/usr/lib/systemd/system/systemd-random-seed.service; static; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd-random-seed.service(8)
           man:random(4)
 Main PID: 2410 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-random-seed.service

● systemd-remount-fs.service - Remount Root and Kernel File Systems
   Loaded: loaded (/usr/lib/systemd/system/systemd-remount-fs.service; static; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd-remount-fs.service(8)
           https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
 Main PID: 2392 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-remount-fs.service

● systemd-rfkill.service - Load/Save RF Kill Switch Status
   Loaded: loaded (/usr/lib/systemd/system/systemd-rfkill.service; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:systemd-rfkill.service(8)

● systemd-sysctl.service - Apply Kernel Variables
   Loaded: loaded (/usr/lib/systemd/system/systemd-sysctl.service; static; vendor preset: disabled)
  Drop-In: /etc/systemd/system/systemd-sysctl.service.d
           └─override.conf
   Active: failed (Result: exit-code) since Mon 2025-11-17 16:03:53 UTC; 21h ago
     Docs: man:systemd-sysctl.service(8)
           man:sysctl.d(5)
 Main PID: 3540 (code=exited, status=1/FAILURE)

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Starting Apply Kernel Variables...
Nov 17 16:03:53 vosflex.localdomain systemd-sysctl[3540]: Couldn't write '0' to 'kernel/sched_energy_aware': Operation not supported
Nov 17 16:03:53 vosflex.localdomain systemd[1]: systemd-sysctl.service: Main process exited, code=exited, status=1/FAILURE
Nov 17 16:03:53 vosflex.localdomain systemd[1]: systemd-sysctl.service: Failed with result 'exit-code'.
Nov 17 16:03:53 vosflex.localdomain systemd[1]: Failed to start Apply Kernel Variables.

● systemd-sysusers.service - Create System Users
   Loaded: loaded (/usr/lib/systemd/system/systemd-sysusers.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:sysusers.d(5)
           man:systemd-sysusers.service(8)

● systemd-tmpfiles-clean.service - Cleanup of Temporary Directories
   Loaded: loaded (/usr/lib/systemd/system/systemd-tmpfiles-clean.service; static; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:18:38 UTC; 21h ago
     Docs: man:tmpfiles.d(5)
           man:systemd-tmpfiles(8)
  Process: 73109 ExecStart=/usr/bin/systemd-tmpfiles --clean (code=exited, status=0/SUCCESS)
 Main PID: 73109 (code=exited, status=0/SUCCESS)

Nov 17 16:18:38 XOSEncoder-01 systemd[1]: Starting Cleanup of Temporary Directories...
Nov 17 16:18:38 XOSEncoder-01 systemd[1]: systemd-tmpfiles-clean.service: Succeeded.
Nov 17 16:18:38 XOSEncoder-01 systemd[1]: Started Cleanup of Temporary Directories.

● systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev
   Loaded: loaded (/usr/lib/systemd/system/systemd-tmpfiles-setup-dev.service; static; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:tmpfiles.d(5)
           man:systemd-tmpfiles(8)
 Main PID: 2414 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-tmpfiles-setup-dev.service

● systemd-tmpfiles-setup.service - Create Volatile Files and Directories
   Loaded: loaded (/usr/lib/systemd/system/systemd-tmpfiles-setup.service; static; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:03:53 UTC; 21h ago
     Docs: man:tmpfiles.d(5)
           man:systemd-tmpfiles(8)
 Main PID: 3522 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-tmpfiles-setup.service

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Starting Create Volatile Files and Directories...
Nov 17 16:03:53 vosflex.localdomain systemd[1]: Started Create Volatile Files and Directories.

● systemd-udev-settle.service - udev Wait for Complete Device Initialization
   Loaded: loaded (/usr/lib/systemd/system/systemd-udev-settle.service; static; vendor preset: disabled)
  Drop-In: /etc/systemd/system/systemd-udev-settle.service.d
           └─50-timeout.conf
   Active: active (exited) since Mon 2025-11-17 16:03:53 UTC; 21h ago
     Docs: man:udev(7)
           man:systemd-udevd.service(8)
 Main PID: 2429 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-udev-settle.service

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Started udev Wait for Complete Device Initialization.

● systemd-udev-trigger.service - udev Coldplug all Devices
   Loaded: loaded (/usr/lib/systemd/system/systemd-udev-trigger.service; static; vendor preset: disabled)
  Drop-In: /usr/lib/systemd/system/systemd-udev-trigger.service.d
           └─systemd-udev-trigger-no-reload.conf
   Active: active (exited) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:udev(7)
           man:systemd-udevd.service(8)
 Main PID: 2404 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-udev-trigger.service

● systemd-udevd.service - udev Kernel Device Manager
   Loaded: loaded (/usr/lib/systemd/system/systemd-udevd.service; static; vendor preset: disabled)
  Drop-In: /etc/systemd/system/systemd-udevd.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd-udevd.service(8)
           man:udev(7)
 Main PID: 2430 (systemd-udevd)
   Status: "Processing with 344 children at max"
    Tasks: 1
   Memory: 36.0M
   CGroup: /system.slice/systemd-udevd.service
           └─2430 /usr/lib/systemd/systemd-udevd

Nov 17 16:08:52 vosflex.localdomain systemd-udevd[42251]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.
Nov 17 16:08:52 vosflex.localdomain systemd-udevd[42251]: Could not generate persistent MAC address for veth7a42c95b: No such file or directory
Nov 17 16:08:52 vosflex.localdomain systemd-udevd[42276]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.
Nov 17 16:08:52 vosflex.localdomain systemd-udevd[42276]: Could not generate persistent MAC address for vethd1459ad1: No such file or directory
Nov 17 16:08:52 vosflex.localdomain systemd-udevd[42605]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.
Nov 17 16:08:52 vosflex.localdomain systemd-udevd[42605]: Could not generate persistent MAC address for veth18440dce: No such file or directory
Nov 17 16:08:53 vosflex.localdomain systemd-udevd[42992]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.
Nov 17 16:08:53 vosflex.localdomain systemd-udevd[42992]: Could not generate persistent MAC address for vethe9f4720d: No such file or directory
Nov 17 16:09:10 vosflex.localdomain systemd-udevd[48297]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable.
Nov 17 16:09:10 vosflex.localdomain systemd-udevd[48297]: Could not generate persistent MAC address for veth59671d56: No such file or directory

● systemd-update-done.service - Update is Completed
   Loaded: loaded (/usr/lib/systemd/system/systemd-update-done.service; static; vendor preset: disabled)
   Active: inactive (dead)
Condition: start condition failed at Mon 2025-11-17 16:03:51 UTC; 21h ago
     Docs: man:systemd-update-done.service(8)

● systemd-update-utmp-runlevel.service - Update UTMP about System Runlevel Changes
   Loaded: loaded (/usr/lib/systemd/system/systemd-update-utmp-runlevel.service; static; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:06:31 UTC; 21h ago
     Docs: man:systemd-update-utmp.service(8)
           man:utmp(5)
  Process: 31025 ExecStart=/usr/lib/systemd/systemd-update-utmp runlevel (code=exited, status=0/SUCCESS)
 Main PID: 31025 (code=exited, status=0/SUCCESS)

Nov 17 16:06:31 vosflex.localdomain systemd[1]: Starting Update UTMP about System Runlevel Changes...
Nov 17 16:06:31 vosflex.localdomain systemd[1]: systemd-update-utmp-runlevel.service: Succeeded.
Nov 17 16:06:31 vosflex.localdomain systemd[1]: Started Update UTMP about System Runlevel Changes.

● systemd-update-utmp.service - Update UTMP about System Boot/Shutdown
   Loaded: loaded (/usr/lib/systemd/system/systemd-update-utmp.service; static; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:03:53 UTC; 21h ago
     Docs: man:systemd-update-utmp.service(8)
           man:utmp(5)
 Main PID: 3555 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-update-utmp.service

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Starting Update UTMP about System Boot/Shutdown...
Nov 17 16:03:53 vosflex.localdomain systemd[1]: Started Update UTMP about System Boot/Shutdown.

● systemd-user-sessions.service - Permit User Sessions
   Loaded: loaded (/usr/lib/systemd/system/systemd-user-sessions.service; static; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:04:54 UTC; 21h ago
     Docs: man:systemd-user-sessions.service(8)
  Process: 8906 ExecStart=/usr/lib/systemd/systemd-user-sessions start (code=exited, status=0/SUCCESS)
 Main PID: 8906 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-user-sessions.service

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Starting Permit User Sessions...
Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started Permit User Sessions.

● systemd-vconsole-setup.service - Setup Virtual Console
   Loaded: loaded (/usr/lib/systemd/system/systemd-vconsole-setup.service; static; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:03:48 UTC; 21h ago
     Docs: man:systemd-vconsole-setup.service(8)
           man:vconsole.conf(5)
 Main PID: 1762 (code=exited, status=0/SUCCESS)

Warning: Journal has been rotated since unit was started. Log output is incomplete or unavailable.

● teleportv2.service - Teleport V2
   LUnit tlp.service could not be found.
oaded: loaded (/etc/systemd/system/teleportv2.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/teleportv2.service.d
           └─05-cpusetconfig.conf, 70-j2path.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8720 ExecStartPre=/opt/omneon/sbin/cpusetconfig teleportv2.service (code=exited, status=0/SUCCESS)
 Main PID: 8811 (sleep)
    Tasks: 1 (limit: 399998)
   Memory: 96.0K
   CGroup: /system.slice/teleportv2.service
           └─8811 sleep infinity

Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8720]: + CPUS=/sys/fs/cgroup/cpuset/system.slice/teleportv2.service/cpuset.cpus
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8720]: + cat /run/nmiirq/allowedcpulist
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8720]: + exit 0
Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started Teleport V2.
Nov 17 16:04:54 vosflex.localdomain teleportv2.sh[8836]: Failed to disable unit: Unit file teleport-a.service does not exist.
Nov 17 16:04:54 vosflex.localdomain teleportv2.sh[8869]: Failed to stop teleport-a.service: Unit teleport-a.service not loaded.
Nov 17 16:04:54 vosflex.localdomain teleportv2.sh[8869]: Failed to stop teleport-b.service: Unit teleport-b.service not loaded.
Nov 17 16:04:54 vosflex.localdomain teleportv2.sh[8869]: Failed to stop teleport-a-update.service: Unit teleport-a-update.service not loaded.
Nov 17 16:04:54 vosflex.localdomain teleportv2.sh[8869]: Failed to stop teleport-b-update.service: Unit teleport-b-update.service not loaded.
Nov 17 16:04:54 vosflex.localdomain teleportv2.sh[8811]: Not configured, sleeping forever

● timemaster.service - Synchronize system clock to NTP and PTP time sources
   Loaded: loaded (/usr/lib/systemd/system/timemaster.service; enabled; vendor preset: disabled)
  Drop-In: /etc/systemd/system/timemaster.service.d
           └─05-cpusetconfig.conf, 20-no-networkmanager.conf, 50-set_ntp_server.conf
   Active: active (running) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Process: 8861 ExecStartPre=/opt/omneon/sbin/set_ntp_server.py (code=exited, status=0/SUCCESS)
  Process: 8809 ExecStartPre=/bin/chmod 0644 /etc/NetworkManager/dispatcher.d/20-chrony (code=exited, status=1/FAILURE)
  Process: 8713 ExecStartPre=/opt/omneon/sbin/cpusetconfig timemaster.service (code=exited, status=0/SUCCESS)
 Main PID: 8885 (timemaster)
    Tasks: 2 (limit: 399998)
   Memory: 1.1M
   CGroup: /system.slice/timemaster.service
           ├─8885 /usr/sbin/timemaster -f /etc/timemaster.conf
           └─8897 /usr/sbin/chronyd -u chrony -n -f /var/run/timemaster/chrony.conf

Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8713]: + exit 0
Nov 17 16:04:54 vosflex.localdomain chmod[8809]: /bin/chmod: cannot access '/etc/NetworkManager/dispatcher.d/20-chrony': No such file or directory
Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started Synchronize system clock to NTP and PTP time sources.
Nov 17 16:04:54 vosflex.localdomain timemaster[8885]: [76.770] process 8897 started: /usr/sbin/chronyd -u chrony -n -f /var/run/timemaster/chrony.conf
Nov 17 16:04:54 vosflex.localdomain chronyd[8897]: chronyd version 4.5 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG)
Nov 17 16:04:54 vosflex.localdomain chronyd[8897]: commandkey directive is no longer supported
Nov 17 16:04:54 vosflex.localdomain chronyd[8897]: generatecommandkey directive is no longer supported
Nov 17 16:04:54 vosflex.localdomain chronyd[8897]: Loaded 0 symmetric keys
Nov 17 16:04:54 vosflex.localdomain chronyd[8897]: Frequency 4.459 +/- 0.386 ppm read from /var/lib/chrony/drift
Nov 17 16:04:54 vosflex.localdomain chronyd[8897]: Using right/UTC timezone to obtain leap second data

● tuned.service - Dynamic System Tuning Daemon
   Loaded: loaded (/usr/lib/systemd/system/tuned.service; enabled; vendor preset: enabled)
  Drop-In: /etc/systemd/system/tuned.service.d
           └─05-cpusetconfig.conf
   Active: active (running) since Mon 2025-11-17 16:04:55 UTC; 21h ago
     Docs: man:tuned(8)
           man:tuned.conf(5)
           man:tuned-adm(8)
  Process: 8766 ExecStartPre=/opt/omneon/sbin/cpusetconfig tuned.service (code=exited, status=0/SUCCESS)
 Main PID: 8832 (tuned)
    Tasks: 4 (limit: 399998)
   Memory: 24.1M
   CGroup: /system.slice/tuned.service
           └─8832 /usr/libexec/platform-python -Es /usr/sbin/tuned -l -P

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Starting Dynamic System Tuning Daemon...
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8766]: + '[' -n tuned.service ']'
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8766]: + mkdir -p /run/cpusetconfig/tuned.service
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8766]: + '[' -f /run/nmiirq/allowedcpulist ']'
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8766]: + CPUS=/sys/fs/cgroup/cpuset/system.slice/tuned.service/cpuset.cpus
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8766]: + cat /run/nmiirq/allowedcpulist
Nov 17 16:04:54 vosflex.localdomain cpusetconfig[8766]: + exit 0
Nov 17 16:04:55 vosflex.localdomain systemd[1]: Started Dynamic System Tuning Daemon.

● turbostatlog.service - turbostat log service
   Loaded: loaded (/etc/systemd/system/turbostatlog.service; enabled; vendor preset: disabled)
   Active: active (running) since Mon 2025-11-17 16:04:08 UTC; 21h ago
 Main PID: 4190 (turbostat)
    Tasks: 2 (limit: 399998)
   Memory: 604.0K
   CGroup: /system.slice/turbostatlog.service
           └─4190 /bin/turbostat -i 3600

Nov 18 13:04:08 XOSEncoder-01 turbostat[4190]: 90	58	152	4.11	3693	2247	1.63	  662774	      10	  595247	0.00	95.93	0.75
Nov 18 13:04:08 XOSEncoder-01 turbostat[4190]: 90	142	89	2.42	3694	2247	1.61	  588511	       6	  536898	0.00	97.61
Nov 18 13:04:08 XOSEncoder-01 turbostat[4190]: 91	59	139	3.77	3693	2247	1.66	  660901	       1	  606919	0.00	96.27	0.74
Nov 18 13:04:08 XOSEncoder-01 turbostat[4190]: 91	143	89	2.42	3692	2247	1.62	  503757	       2	  464671	0.00	97.61
Nov 18 13:04:08 XOSEncoder-01 turbostat[4190]: 92	60	102	2.77	3692	2247	1.54	 1092016	     199	 1035201	0.00	97.28	0.70
Nov 18 13:04:08 XOSEncoder-01 turbostat[4190]: 92	144	75	2.04	3691	2247	1.51	  740426	      89	  701303	0.00	98.00
Nov 18 13:04:08 XOSEncoder-01 turbostat[4190]: 93	61	106	2.86	3693	2247	1.61	  529003	       2	  480146	0.00	97.17	0.74
Nov 18 13:04:08 XOSEncoder-01 turbostat[4190]: 93	145	125	3.38	3692	2247	1.70	  522063	      13	  474459	0.00	96.65
Nov 18 13:04:08 XOSEncoder-01 turbostat[4190]: 94	62	87	2.34	3692	2247	1.60	  518250	       3	  477501	0.00	97.69	0.69
Nov 18 13:04:08 XOSEncoder-01 turbostat[4190]: 94	146	91	2.47	3693	2247	1.80	  430057	       8	  392918	0.00	97.56

● unbound-anchor.service - update of the root trust anchor for DNSSEC validation in unbound
   Loaded: loaded (/usr/lib/systemd/system/unbound-anchor.service; static; vendor preset: disabled)
   Active: inactive (dead) since Tue 2025-11-18 00:00:30 UTC; 13h ago
     Docs: man:unbound-anchor(8)
  Process: 439125 ExecStart=/usr/sbin/unbound-anchor -a /var/lib/unbound/root.key -c /etc/unbound/icannbundle.pem -f /etc/resolv.conf -R (code=exited, status=0/SUCCESS)
 Main PID: 439125 (code=exited, status=0/SUCCESS)

Nov 18 00:00:05 XOSEncoder-01 systemd[1]: Starting update of the root trust anchor for DNSSEC validation in unbound...
Nov 18 00:00:30 XOSEncoder-01 systemd[1]: unbound-anchor.service: Succeeded.
Nov 18 00:00:30 XOSEncoder-01 systemd[1]: Started update of the root trust anchor for DNSSEC validation in unbound.

● usb-dongle-check.service - Check SC USB Dongle and power-cycle once if necessary
   Loaded: loaded (/etc/systemd/system/usb-dongle-check.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:03:53 UTC; 21h ago
 Main PID: 3735 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/usb-dongle-check.service

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Starting Check SC USB Dongle and power-cycle once if necessary...
Nov 17 16:03:53 vosflex.localdomain usbdongle[3735]: USB Smart Card DONGLE CHECK
Nov 17 16:03:53 vosflex.localdomain systemd[1]: StaUnit wickedd-nanny.service could not be found.
Unit wickedd.service could not be found.
Unit ypbind.service could not be found.
Unit yppasswdd.service could not be found.
Unit ypserv.service could not be found.
Unit ypxfrd.service could not be found.
rted Check SC USB Dongle and power-cycle once if necessary.

● var-lib-persistent.service - Setup Persistent volume
   Loaded: loaded (/etc/systemd/system/var-lib-persistent.service; enabled; vendor preset: disabled)
   Active: active (exited) since Mon 2025-11-17 16:03:51 UTC; 21h ago
 Main PID: 2617 (code=exited, status=0/SUCCESS)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/var-lib-persistent.service

Nov 17 16:03:53 vosflex.localdomain setup-persistent-vol.sh[2646]: dumpe2fs 1.45.6 (20-Mar-2020)
Nov 17 16:03:53 vosflex.localdomain setup-persistent-vol.sh[2671]: PERSISTENT: 23/25376 files (0.0% non-contiguous), 12043/101376 blocks

● -.slice - Root Slice
   Loaded: loaded
   Active: active since Mon 2025-11-17 16:03:48 UTC; 21h ago
     Docs: man:systemd.special(7)
    Tasks: 15362
   Memory: 43.3G
      CPU: 5d 9h 41min 52.880s
   CGroup: /
           ├─init.scope
           │ └─1 /usr/lib/systemd/systemd --switched-root --system --deserialize 16
           ├─k8s.io
           │ ├─00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213
           │ │ └─44888 /pause
           │ ├─03b57a55d5b60b06581e669f2d1bb915d5866652d1fe45dababd7b973fa4a2d4
           │ │ ├─47131 /usr/bin/java -cp * com.harmonicinc.vos.hhp.MainControllerApp
           │ │ ├─47184 /usr/bin/python3 -s /usr/bin/supervisord -c /opt/harmonic/hhp-controller/etc/supervisord.conf
           │ │ └─47849 /opt/goofys/goofys -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data /opt/harmonic/vos/voshome/app_data
           │ ├─048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b
           │ │ └─27170 /pause
           │ ├─0549d98179a918e17cf119b8b8e147bc0ed93b3a2fe6800534943bc5cade26ec
           │ │ ├─14476 /usr/bin/dumb-init -- /nginx-ingress-controller --configmap=ingress-nginx/ingress-nginx --tcp-services-configmap=ingress-nginx/tcp-services --udp-services-configmap=ingress-nginx/udp-services --annotations-prefix=nginx.ingress.kubernetes.io --default-ssl-certificate=ingress-nginx/ingress-nginx-vos-default-ssl-certificate
           │ │ ├─14578 /nginx-ingress-controller --configmap=ingress-nginx/ingress-nginx --tcp-services-configmap=ingress-nginx/tcp-services --udp-services-configmap=ingress-nginx/udp-services --annotations-prefix=nginx.ingress.kubernetes.io --default-ssl-certificate=ingress-nginx/ingress-nginx-vos-default-ssl-certificate
           │ │ ├─17843 nginx: master process /usr/bin/nginx -c /etc/nginx/nginx.conf
           │ │ ├─31898 nginx: worker process
           │ │ ├─31899 nginx: worker process
           │ │ ├─31900 nginx: worker process
           │ │ ├─31901 nginx: worker process
           │ │ ├─31923 nginx: worker process
           │ │ ├─31938 nginx: worker process
           │ │ ├─31976 nginx: worker process
           │ │ ├─32010 nginx: worker process
           │ │ ├─32039 nginx: worker process
           │ │ ├─32078 nginx: worker process
           │ │ ├─32113 nginx: worker process
           │ │ ├─32141 nginx: worker process
           │ │ ├─32183 nginx: worker process
           │ │ ├─32223 nginx: worker process
           │ │ ├─32260 nginx: worker process
           │ │ ├─32296 nginx: worker process
           │ │ ├─32322 nginx: worker process
           │ │ ├─32357 nginx: worker process
           │ │ ├─32385 nginx: worker process
           │ │ ├─32398 nginx: worker process
           │ │ ├─32448 nginx: worker process
           │ │ ├─32476 nginx: worker process
           │ │ ├─32518 nginx: worker process
           │ │ ├─32560 nginx: worker process
           │ │ ├─32588 nginx: worker process
           │ │ ├─32612 nginx: worker process
           │ │ ├─32650 nginx: worker process
           │ │ ├─32680 nginx: worker process
           │ │ ├─32719 nginx: worker process
           │ │ ├─32755 nginx: worker process
           │ │ ├─32785 nginx: worker process
           │ │ ├─32823 nginx: worker process
           │ │ ├─32860 nginx: worker process
           │ │ ├─32890 nginx: worker process
           │ │ ├─32920 nginx: worker process
           │ │ ├─32953 nginx: worker process
           │ │ ├─32990 nginx: worker process
           │ │ ├─33021 nginx: worker process
           │ │ ├─33055 nginx: worker process
           │ │ ├─33082 nginx: worker process
           │ │ ├─33123 nginx: worker process
           │ │ ├─33143 nginx: worker process
           │ │ ├─33183 nginx: worker process
           │ │ ├─33218 nginx: worker process
           │ │ ├─33256 nginx: worker process
           │ │ ├─33283 nginx: worker process
           │ │ ├─33322 nginx: worker process
           │ │ ├─33355 nginx: worker process
           │ │ ├─33385 nginx: worker process
           │ │ ├─33421 nginx: worker process
           │ │ ├─33454 nginx: worker process
           │ │ ├─33490 nginx: worker process
           │ │ ├─33526 nginx: worker process
           │ │ ├─33562 nginx: worker process
           │ │ ├─33586 nginx: worker process
           │ │ ├─33618 nginx: worker process
           │ │ ├─33653 nginx: worker process
           │ │ ├─33685 nginx: worker process
           │ │ ├─33724 nginx: worker process
           │ │ ├─33756 nginx: worker process
           │ │ ├─33784 nginx: worker process
           │ │ ├─33817 nginx: worker process
           │ │ ├─33850 nginx: worker process
           │ │ ├─33883 nginx: worker process
           │ │ ├─33919 nginx: worker process
           │ │ ├─33951 nginx: worker process
           │ │ ├─33984 nginx: worker process
           │ │ ├─34015 nginx: worker process
           │ │ ├─34049 nginx: worker process
           │ │ ├─34082 nginx: worker process
           │ │ ├─34114 nginx: worker process
           │ │ ├─34147 nginx: worker process
           │ │ ├─34181 nginx: worker process
           │ │ ├─34217 nginx: worker process
           │ │ ├─34249 nginx: worker process
           │ │ ├─34285 nginx: worker process
           │ │ ├─34312 nginx: worker process
           │ │ ├─34351 nginx: worker process
           │ │ ├─34381 nginx: worker process
           │ │ ├─34415 nginx: worker process
           │ │ ├─34449 nginx: worker process
           │ │ ├─34486 nginx: worker process
           │ │ ├─34514 nginx: worker process
           │ │ ├─34550 nginx: worker process
           │ │ ├─34582 nginx: worker process
           │ │ ├─34614 nginx: worker process
           │ │ ├─34651 nginx: worker process
           │ │ ├─34683 nginx: worker process
           │ │ ├─34712 nginx: worker process
           │ │ ├─34751 nginx: worker process
           │ │ ├─34784 nginx: worker process
           │ │ ├─34814 nginx: worker process
           │ │ ├─34851 nginx: worker process
           │ │ ├─34885 nginx: worker process
           │ │ ├─34919 nginx: worker process
           │ │ ├─34945 nginx: worker process
           │ │ ├─34975 nginx: worker process
           │ │ ├─35019 nginx: worker process
           │ │ ├─35040 nginx: worker process
           │ │ ├─35083 nginx: worker process
           │ │ ├─35108 nginx: worker process
           │ │ ├─35147 nginx: worker process
           │ │ ├─35185 nginx: worker process
           │ │ ├─35217 nginx: worker process
           │ │ ├─35247 nginx: worker process
           │ │ ├─35281 nginx: worker process
           │ │ ├─35308 nginx: worker process
           │ │ ├─35347 nginx: worker process
           │ │ ├─35377 nginx: worker process
           │ │ ├─35418 nginx: worker process
           │ │ ├─35444 nginx: worker process
           │ │ ├─35485 nginx: worker process
           │ │ ├─35514 nginx: worker process
           │ │ ├─35551 nginx: worker process
           │ │ ├─35584 nginx: worker process
           │ │ ├─35617 nginx: worker process
           │ │ ├─35652 nginx: worker process
           │ │ ├─35679 nginx: worker process
           │ │ ├─35714 nginx: worker process
           │ │ ├─35751 nginx: worker process
           │ │ ├─35778 nginx: worker process
           │ │ ├─35818 nginx: worker process
           │ │ ├─35848 nginx: worker process
           │ │ ├─35889 nginx: worker process
           │ │ ├─35918 nginx: worker process
           │ │ ├─35955 nginx: worker process
           │ │ ├─35990 nginx: worker process
           │ │ ├─36016 nginx: worker process
           │ │ ├─36031 nginx: worker process
           │ │ ├─36076 nginx: worker process
           │ │ ├─36109 nginx: worker process
           │ │ ├─36151 nginx: worker process
           │ │ ├─36187 nginx: worker process
           │ │ ├─36226 nginx: worker process
           │ │ ├─36236 nginx: worker process
           │ │ ├─36288 nginx: worker process
           │ │ ├─36316 nginx: worker process
           │ │ ├─36353 nginx: worker process
           │ │ ├─36384 nginx: worker process
           │ │ ├─36417 nginx: worker process
           │ │ ├─36442 nginx: worker process
           │ │ ├─36481 nginx: worker process
           │ │ ├─36518 nginx: worker process
           │ │ ├─36554 nginx: worker process
           │ │ ├─36586 nginx: worker process
           │ │ ├─36622 nginx: worker process
           │ │ ├─36651 nginx: worker process
           │ │ ├─36684 nginx: worker process
           │ │ ├─36716 nginx: worker process
           │ │ ├─36751 nginx: worker process
           │ │ ├─36787 nginx: worker process
           │ │ ├─36806 nginx: worker process
           │ │ ├─36851 nginx: worker process
           │ │ ├─36881 nginx: worker process
           │ │ ├─36921 nginx: worker process
           │ │ ├─36954 nginx: worker process
           │ │ ├─36984 nginx: worker process
           │ │ ├─37020 nginx: worker process
           │ │ ├─37047 nginx: worker process
           │ │ ├─37084 nginx: worker process
           │ │ ├─37109 nginx: worker process
           │ │ ├─37150 nginx: worker process
           │ │ ├─37179 nginx: worker process
           │ │ ├─37212 nginx: worker process
           │ │ ├─37244 nginx: worker process
           │ │ ├─37282 nginx: worker process
           │ │ ├─37317 nginx: worker process
           │ │ ├─37350 nginx: worker process
           │ │ └─37383 nginx: cache manager process
           │ ├─088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927
           │ │ └─40452 /pause
           │ ├─0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e
           │ │ └─13795 /pause
           │ ├─0cc8fb35afbc569f879b5340cf38cbc9ddc87773fce6b67a9c7fc07172660322
           │ │ └─15073 /coredns -conf /etc/coredns/Corefile
           │ ├─0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7
           │ │ └─24571 /pause
           │ ├─103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450
           │ │ └─42625 /pause
           │ ├─122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c
           │ │ └─13786 /pause
           │ ├─1af4c9c62ebee28328f5112ba9d76d6b74a48a6627e620d82f2d49d3c5a29efa
           │ │ ├─45204 bash /opt/srmpc/start.sh java -Dtmd.port=32845 -Drmp.mallocArenaMax=32 -Dkubernetes.namespace=cluster1 -DLOG_SERVER=logstash -XX:NativeMemoryTracking=summary -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=128K -Xms64m -Xmx1024m -Xshare:auto -XX:MaxMetaspaceSize=512m -XX:MaxHeapSize=1024m -XX:TieredStopAtLevel=1 -XX:+ExitOnOutOfMemoryError -XX:MaxHeapFreeRatio=30  -XX:MinHeapFreeRatio=10 -Djava.security.properties=/opt/harmonic/StreamRmpControllerCmd/java.security -jar /opt/harmonic/StreamRmpControllerCmd/StreamRmpControllerCmd.jar sample_stream_processing_engine v1 stream_processing 2F500BA9-D3B4-4332-BFA3-74BE6E00AAE2 zookeeper:2181 2F500BA9-D3B4-4332-BFA3-74BE6E00AAE2-1 /opt/harmonic/vos/voshome -1
           │ │ ├─45234 /usr/bin/python3 -s /usr/bin/supervisord -c /tmp/supervisor.conf -n
           │ │ ├─45268 /usr/bin/python3 /opt/srmpc/srmpc-watchdog
           │ │ ├─45269 /usr/share/filebeat/bin/filebeat -path.home /usr/share/filebeat -path.config /etc/filebeat -path.data /var/lib/filebeat -path.logs /var/log/filebeat
           │ │ ├─45270 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/dolby_vision /graphics/dolby_vision
           │ │ ├─45271 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/fonts /graphics/fonts
           │ │ ├─45272 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/irdeto_license /opt/irdeto
           │ │ ├─45273 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/lut /graphics/lut
           │ │ ├─45277 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/sl_hdr_config /graphics/sl_hdr_config
           │ │ ├─45280 /opt/goofys/goofys -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data /opt/harmonic/vos/voshome/app_data
           │ │ ├─45297 /usr/local/bin/xinit /usr/local/etc/X11/xinitrc -- /usr/local/bin/Xorg -xkbdir /usr/local/share/X11/xkb -nolisten local -logverbose 0 vt7 -sharevts :0.0
           │ │ ├─45306 java -Dtmd.port=32845 -Drmp.mallocArenaMax=32 -Dkubernetes.namespace=cluster1 -DLOG_SERVER=logstash -XX:NativeMemoryTracking=summary -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=128K -Xms64m -Xmx1024m -Xshare:auto -XX:MaxMetaspaceSize=512m -XX:MaxHeapSize=1024m -XX:TieredStopAtLevel=1 -XX:+ExitOnOutOfMemoryError -XX:MaxHeapFreeRatio=30 -XX:MinHeapFreeRatio=10 -Djava.security.properties=/opt/harmonic/StreamRmpControllerCmd/java.security -jar /opt/harmonic/StreamRmpControllerCmd/StreamRmpControllerCmd.jar sample_stream_processing_engine v1 stream_processing 2F500BA9-D3B4-4332-BFA3-74BE6E00AAE2 zookeeper:2181 2F500BA9-D3B4-4332-BFA3-74BE6E00AAE2-1 /opt/harmonic/vos/voshome -1
           │ │ ├─45326 /usr/local/bin/Xorg :0 -xkbdir /usr/local/share/X11/xkb -nolisten local -logverbose 0 vt7 -sharevts :0.0
           │ │ ├─45548 sh /usr/local/etc/X11/xinitrc
           │ │ ├─45550 /usr/local/bin/xterm -g 90x50+0+0 -bg black -fg yellow -fn 10x20
           │ │ ├─45562 /usr/local/bin/dwm
           │ │ ├─46746 bash
           │ │ ├─46812 java -Xmx64m -Xshare:auto -XX:MaxMetaspaceSize=64m -XX:MaxHeapSize=64m -XX:TieredStopAtLevel=1 -Dlog_file_path=/var/log/rmp-controller-log -DLOG_SERVER=logstash -Dvos.home=/opt/harmonic/vos/voshome -jar /opt/harmonic/EsamAdapter/EsamAdapter.jar
           │ │ ├─47325 /opt/harmonic/rmp/RmpWorker -Id=3eca9eb6-6ee8-4946-9308-6de30cc98438 -KernelPath=/opt/harmonic/rmp/libRmpKernel.so -RpiPath=/opt/harmonic/rmp/rpi -rpiMessagePlugInPath=/opt/harmonic/rmp/rpiMessagePlugin -logPath=/var/log/rmp-controller-log/rmp-worker.log -velocimeterPath=/opt/harmonic/rmp/libRmpVelocimeter.so -rmpServiceId=sspe-2F500BA9-D3B4-4332-BFA3-74BE6E00AAE2 -serverIP=127.0.0.1 -serverPort=43081 -oplanOutputPath=/var/log/rmp-controller-log/rmp-processing.opl -logMDCs=service_id:2F500BA9-D3B4-4332-BFA3-74BE6E00AAE2 -enableRuntimeStateRestorer=true -enableRuntimeStateUpdater=true
           │ │ └─47626 dbus-daemon --fork --config-file /etc/dbus-1/dbus-uvp-session.conf
           │ ├─1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917
           │ │ └─30399 /pause
           │ ├─204e2017be70a182f8109ac13fcc46cc8373a95064050894e81e2a102817d84b
           │ │ └─16955 /usr/local/bin/kube-rbac-proxy --logtostderr --secure-listen-address=:8443 --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305 --upstream=http://127.0.0.1:8080/
           │ ├─213f9d22cbd9897c1da8e5967af955d56e30f79c43d2fd15acb7e487c9d3c537
           │ │ ├─46627 /usr/bin/python3 -s /usr/bin/supervisord -c supervisord.conf
           │ │ ├─47208 /opt/harmonic/esam_pois/esam-oob-adapter --port=8088
           │ │ ├─47209 /opt/harmonic/esam_pois/esam-translator --port=9999 --external-port=19999 --dbconn=postgres://vos:vossdk@pgdb:5432/vos?sslmode=disable
           │ │ ├─47210 java -Xmx512m -jar esam.pois.worker.jar
           │ │ └─47211 /usr/share/filebeat/bin/filebeat -path.home /usr/share/filebeat -path.config /etc/filebeat -path.data /var/lib/filebeat -path.logs /var/log/filebeat
           │ ├─21d714746405044fd80d02b52d0146c4a8e8ea066da93341094adb56168511e9
           │ │ ├─14715 nginx: master process nginx -g daemon off;
           │ │ ├─14943 nginx: worker process
           │ │ ├─14944 nginx: worker process
           │ │ ├─14945 nginx: worker process
           │ │ ├─14946 nginx: worker process
           │ │ ├─14947 nginx: worker process
           │ │ ├─14948 nginx: worker process
           │ │ ├─14949 nginx: worker process
           │ │ ├─14951 nginx: worker process
           │ │ ├─14954 nginx: worker process
           │ │ ├─14958 nginx: worker process
           │ │ ├─14960 nginx: worker process
           │ │ ├─14963 nginx: worker process
           │ │ ├─14964 nginx: worker process
           │ │ ├─14965 nginx: worker process
           │ │ ├─14966 nginx: worker process
           │ │ ├─14967 nginx: worker process
           │ │ ├─14968 nginx: worker process
           │ │ ├─14969 nginx: worker process
           │ │ ├─14970 nginx: worker process
           │ │ ├─14971 nginx: worker process
           │ │ ├─14972 nginx: worker process
           │ │ ├─14973 nginx: worker process
           │ │ ├─14974 nginx: worker process
           │ │ ├─14975 nginx: worker process
           │ │ ├─14976 nginx: worker process
           │ │ ├─14977 nginx: worker process
           │ │ ├─14978 nginx: worker process
           │ │ ├─14979 nginx: worker process
           │ │ ├─14980 nginx: worker process
           │ │ ├─14981 nginx: worker process
           │ │ ├─14982 nginx: worker process
           │ │ ├─14983 nginx: worker process
           │ │ ├─14984 nginx: worker process
           │ │ ├─14985 nginx: worker process
           │ │ ├─14986 nginx: worker process
           │ │ ├─14987 nginx: worker process
           │ │ ├─14988 nginx: worker process
           │ │ ├─14989 nginx: worker process
           │ │ ├─14990 nginx: worker process
           │ │ ├─14991 nginx: worker process
           │ │ ├─14992 nginx: worker process
           │ │ ├─14993 nginx: worker process
           │ │ ├─14994 nginx: worker process
           │ │ ├─14995 nginx: worker process
           │ │ ├─14996 nginx: worker process
           │ │ ├─14997 nginx: worker process
           │ │ ├─14998 nginx: worker process
           │ │ ├─14999 nginx: worker process
           │ │ ├─15000 nginx: worker process
           │ │ ├─15001 nginx: worker process
           │ │ ├─15002 nginx: worker process
           │ │ ├─15003 nginx: worker process
           │ │ ├─15004 nginx: worker process
           │ │ ├─15005 nginx: worker process
           │ │ ├─15006 nginx: worker process
           │ │ ├─15007 nginx: worker process
           │ │ ├─15008 nginx: worker process
           │ │ ├─15009 nginx: worker process
           │ │ ├─15010 nginx: worker process
           │ │ ├─15011 nginx: worker process
           │ │ ├─15012 nginx: worker process
           │ │ ├─15013 nginx: worker process
           │ │ ├─15014 nginx: worker process
           │ │ ├─15015 nginx: worker process
           │ │ ├─15016 nginx: worker process
           │ │ ├─15017 nginx: worker process
           │ │ ├─15018 nginx: worker process
           │ │ ├─15019 nginx: worker process
           │ │ ├─15020 nginx: worker process
           │ │ ├─15021 nginx: worker process
           │ │ ├─15022 nginx: worker process
           │ │ ├─15023 nginx: worker process
           │ │ ├─15024 nginx: worker process
           │ │ ├─15025 nginx: worker process
           │ │ ├─15026 nginx: worker process
           │ │ ├─15027 nginx: worker process
           │ │ ├─15028 nginx: worker process
           │ │ ├─15029 nginx: worker process
           │ │ ├─15030 nginx: worker process
           │ │ ├─15031 nginx: worker process
           │ │ ├─15032 nginx: worker process
           │ │ ├─15033 nginx: worker process
           │ │ ├─15034 nginx: worker process
           │ │ ├─15035 nginx: worker process
           │ │ ├─15036 nginx: worker process
           │ │ ├─15037 nginx: worker process
           │ │ ├─15038 nginx: worker process
           │ │ ├─15040 nginx: worker process
           │ │ ├─15041 nginx: worker process
           │ │ ├─15042 nginx: worker process
           │ │ ├─15043 nginx: worker process
           │ │ ├─15044 nginx: worker process
           │ │ ├─15045 nginx: worker process
           │ │ ├─15046 nginx: worker process
           │ │ ├─15047 nginx: worker process
           │ │ ├─15048 nginx: worker process
           │ │ ├─15049 nginx: worker process
           │ │ ├─15050 nginx: worker process
           │ │ ├─15051 nginx: worker process
           │ │ ├─15052 nginx: worker process
           │ │ ├─15053 nginx: worker process
           │ │ ├─15054 nginx: worker process
           │ │ ├─15055 nginx: worker process
           │ │ ├─15056 nginx: worker process
           │ │ ├─15057 nginx: worker process
           │ │ ├─15058 nginx: worker process
           │ │ ├─15059 nginx: worker process
           │ │ ├─15060 nginx: worker process
           │ │ ├─15061 nginx: worker process
           │ │ ├─15062 nginx: worker process
           │ │ ├─15063 nginx: worker process
           │ │ ├─15064 nginx: worker process
           │ │ ├─15065 nginx: worker process
           │ │ ├─15066 nginx: worker process
           │ │ ├─15067 nginx: worker process
           │ │ ├─15068 nginx: worker process
           │ │ ├─15069 nginx: worker process
           │ │ ├─15071 nginx: worker process
           │ │ ├─15072 nginx: worker process
           │ │ ├─15074 nginx: worker process
           │ │ ├─15075 nginx: worker process
           │ │ ├─15078 nginx: worker process
           │ │ ├─15082 nginx: worker process
           │ │ ├─15083 nginx: worker process
           │ │ ├─15084 nginx: worker process
           │ │ ├─15085 nginx: worker process
           │ │ ├─15086 nginx: worker process
           │ │ ├─15087 nginx: worker process
           │ │ ├─15088 nginx: worker process
           │ │ ├─15089 nginx: worker process
           │ │ ├─15090 nginx: worker process
           │ │ ├─15091 nginx: worker process
           │ │ ├─15092 nginx: worker process
           │ │ ├─15093 nginx: worker process
           │ │ ├─15094 nginx: worker process
           │ │ ├─15095 nginx: worker process
           │ │ ├─15096 nginx: worker process
           │ │ ├─15097 nginx: worker process
           │ │ ├─15098 nginx: worker process
           │ │ ├─15099 nginx: worker process
           │ │ ├─15100 nginx: worker process
           │ │ ├─15101 nginx: worker process
           │ │ ├─15102 nginx: worker process
           │ │ ├─15103 nginx: worker process
           │ │ ├─15104 nginx: worker process
           │ │ ├─15105 nginx: worker process
           │ │ ├─15106 nginx: worker process
           │ │ ├─15107 nginx: worker process
           │ │ ├─15108 nginx: worker process
           │ │ ├─15110 nginx: worker process
           │ │ ├─15111 nginx: worker process
           │ │ ├─15112 nginx: worker process
           │ │ ├─15116 nginx: worker process
           │ │ ├─15119 nginx: worker process
           │ │ ├─15120 nginx: worker process
           │ │ ├─15121 nginx: worker process
           │ │ ├─15122 nginx: worker process
           │ │ ├─15123 nginx: worker process
           │ │ ├─15124 nginx: worker process
           │ │ ├─15126 nginx: worker process
           │ │ ├─15127 nginx: worker process
           │ │ ├─15128 nginx: worker process
           │ │ ├─15129 nginx: worker process
           │ │ ├─15130 nginx: worker process
           │ │ ├─15131 nginx: worker process
           │ │ ├─15132 nginx: worker process
           │ │ ├─15133 nginx: worker process
           │ │ └─15134 nginx: worker process
           │ ├─281948bceabba08c70eb15e4c693c86d3a1d4f4e96e38b8cf2590c39f52e9bb6
           │ │ ├─   692 postgres: v1: vos vos 198.51.100.20(52122) idle
           │ │ ├─  1036 postgres: v1: vos vos 198.51.100.20(34018) idle
           │ │ ├─  1046 postgres: v1: vos vos 198.51.100.20(34028) idle
           │ │ ├─  1438 postgres: v1: vos vos 198.51.100.20(56684) idle
           │ │ ├─  1578 postgres: v1: vos vos 198.51.100.21(52666) idle
           │ │ ├─  1642 postgres: v1: vos vos 198.51.100.20(58974) idle
           │ │ ├─  1672 postgres: v1: vos vos 198.51.100.21(42432) idle
           │ │ ├─  1685 postgres: v1: vos vos 198.51.100.20(58988) idle
           │ │ ├─  1723 postgres: v1: vos vos 198.51.100.21(42444) idle
           │ │ ├─  1880 postgres: v1: vos vos 198.51.100.20(58992) idle
           │ │ ├─  1977 postgres: v1: vos vos 198.51.100.20(58996) idle
           │ │ ├─  2005 postgres: v1: vos vos 198.51.100.20(35580) idle
           │ │ ├─  2182 postgres: v1: vos vos 198.51.100.22(37330) idle
           │ │ ├─  2436 postgres: v1: vos vos 198.51.100.22(37338) idle
           │ │ ├─  2484 postgres: v1: vos vos 198.51.100.20(53734) idle
           │ │ ├─  2580 postgres: v1: vos vos 198.51.100.21(49102) idle
           │ │ ├─  2736 postgres: v1: vos vos 198.51.100.22(51102) idle
           │ │ ├─  2742 postgres: v1: vos vos 198.51.100.20(53742) idle
           │ │ ├─  2743 postgres: v1: vos vos 198.51.100.22(51110) idle
           │ │ ├─  3006 postgres: v1: vos vos 198.51.100.20(52000) idle
           │ │ ├─  3101 postgres: v1: vos vos 198.51.100.20(40512) idle
           │ │ ├─  3126 postgres: v1: vos vos 198.51.100.22(47130) idle
           │ │ ├─  3551 postgres: v1: vos vos 198.51.100.21(35220) idle
           │ │ ├─  3703 postgres: v1: vos vos 198.51.100.20(45060) idle
           │ │ ├─  4085 postgres: v1: vos vos 198.51.100.22(32788) idle
           │ │ ├─  4160 postgres: v1: vos vos 198.51.100.21(48314) idle
           │ │ ├─  4412 postgres: v1: vos vos 198.51.100.20(44592) idle
           │ │ ├─  4607 postgres: v1: vos vos 198.51.100.22(54258) idle
           │ │ ├─  4888 postgres: v1: vos vos 198.51.100.20(50490) idle
           │ │ ├─  5444 postgres: v1: vos vos 198.51.100.20(38082) idle
           │ │ ├─  5629 postgres: v1: vos vos 198.51.100.22(44254) idle
           │ │ ├─  6971 postgres: v1: vos vos 198.51.100.20(41902) idle
           │ │ ├─  7812 postgres: v1: vos vos 198.51.100.20(56802) idle
           │ │ ├─  7813 postgres: v1: vos vos 198.51.100.20(56806) idle
           │ │ ├─  7814 postgres: v1: vos vos 198.51.100.20(56816) idle
           │ │ ├─  7815 postgres: v1: vos vos 198.51.100.20(56832) idle
           │ │ ├─ 11242 sleep 60
           │ │ ├─ 11243 sleep 60
           │ │ ├─ 11295 sleep 10
           │ │ ├─ 11296 sleep 10
           │ │ ├─ 25928 /usr/bin/dumb-init -c --rewrite 1:0 -- /bin/sh /launch.sh
           │ │ ├─ 25960 /bin/sh /launch.sh
           │ │ ├─ 26031 /usr/bin/runsvdir -P /etc/service
           │ │ ├─ 26156 runsv cron
           │ │ ├─ 26157 runsv patroni
           │ │ ├─ 26158 runsv backup_restore
           │ │ ├─ 26159 runsv cluster_manage
           │ │ ├─ 26160 runsv filebeat
           │ │ ├─ 26161 runsv init_db_for_vos
           │ │ ├─ 26162 runsv pgqd
           │ │ ├─ 26163 runsv replica_monitor
           │ │ ├─ 26164 svlogd -tt /var/log/cron
           │ │ ├─ 26165 svlogd -tt /var/log/backup_restore
           │ │ ├─ 26166 svlogd -tt /var/log/init_db_for_vos
           │ │ ├─ 26167 svlogd -tt /var/log/patroni
           │ │ ├─ 26168 svlogd -tt /var/log/cluster_manage
           │ │ ├─ 26169 svlogd -tt /var/log/replica_monitor
           │ │ ├─ 26170 svlogd -tt /var/log/pgqd
           │ │ ├─ 26171 svlogd -tt /var/log/filebeat
           │ │ ├─ 26172 /usr/sbin/cron -f
           │ │ ├─ 26173 python3 /usr/local/bin/init_db_for_vos.py --sleep
           │ │ ├─ 26174 /usr/bin/python3 /usr/local/bin/patroni
           │ │ ├─ 26175 /bin/bash /var/lib/database_middleware/rest_service/backup_restore/start.sh
           │ │ ├─ 26176 /bin/bash /var/lib/database_middleware/rest_service/cluster_manage/start.sh
           │ │ ├─ 26177 /usr/bin/pgqd /home/postgres/pgq_ticker.ini
           │ │ ├─ 26178 /bin/sh -e ./run
           │ │ ├─ 26179 ./filebeat -e --strict.perms=false
           │ │ ├─ 26185 sleep infinity
           │ │ ├─ 26207 /bin/bash /var/lib/database_middleware/rest_service/cluster_manage/start.sh
           │ │ ├─ 26208 /bin/bash /var/lib/database_middleware/rest_service/backup_restore/start.sh
           │ │ ├─ 26227 python3 -u /var/lib/database_middleware/rest_service/cluster_manage/DatabaseMiddlewareClusterManageService.py 0.0.0.0 5440
           │ │ ├─ 26228 python3 -u /var/lib/database_middleware/rest_service/backup_restore/DatabaseMiddlewareBackupRestoreService.py 0.0.0.0 5438
           │ │ ├─ 26474 postgres -D /home/postgres/pgroot/pgdata/standalone --config-file=/home/postgres/pgroot/pgdata/standalone/postgresql.conf --listen_addresses=198.51.100.16 --port=5432 --cluster_name=v1 --wal_level=replica --hot_standby=on --max_connections=200 --max_wal_senders=10 --max_prepared_transactions=0 --max_locks_per_transaction=64 --track_commit_timestamp=off --max_replication_slots=10 --max_worker_processes=8 --wal_log_hints=on
           │ │ ├─ 26476 postgres: v1: logger process   
           │ │ ├─ 26478 postgres: v1: bgworker: bg_mon   
           │ │ ├─ 26481 postgres: v1: checkpointer process   
           │ │ ├─ 26482 postgres: v1: writer process   
           │ │ ├─ 26483 postgres: v1: stats collector process   
           │ │ ├─ 26487 postgres: v1: postgres postgres [local] idle
           │ │ ├─ 26504 postgres: v1: wal writer process   
           │ │ ├─ 26505 postgres: v1: autovacuum launcher process   
           │ │ ├─ 26506 postgres: v1: archiver process   last was 000000100000000F00000059
           │ │ ├─ 26507 postgres: v1: bgworker: pg_cron launcher   
           │ │ ├─ 26508 postgres: v1: bgworker: TimescaleDB Background Worker Launcher   
           │ │ ├─ 26510 postgres: v1: bgworker: logical replication launcher   
           │ │ ├─ 26524 tail -f /var/log/backup_restore/current /var/log/cluster_manage/current /var/log/cron/current /var/log/filebeat/current /var/log/init_db_for_vos/current /var/log/patroni/current /var/log/pgqd/current /var/log/replica_monitor/current
           │ │ ├─ 38190 postgres: v1: vos vos 198.51.100.9(38916) idle
           │ │ ├─ 38191 postgres: v1: vos vos 198.51.100.9(38930) idle
           │ │ ├─ 47500 postgres: v1: vos vos 198.51.100.1(63158) idle
           │ │ ├─ 47525 postgres: v1: vos vos 198.51.100.1(5896) idle
           │ │ ├─ 47526 postgres: v1: vos vos 198.51.100.1(6944) idle
           │ │ ├─ 47528 postgres: v1: vos vos 198.51.100.1(4058) idle
           │ │ ├─ 49820 postgres: v1: vos vos 198.51.100.28(56246) idle
           │ │ ├─ 49821 postgres: v1: vos vos 198.51.100.28(56248) idle
           │ │ ├─ 49823 postgres: v1: vos vos 198.51.100.28(56260) idle
           │ │ ├─ 49825 postgres: v1: vos vos 198.51.100.28(56262) idle
           │ │ ├─ 49826 postgres: v1: vos vos 198.51.100.28(56264) idle
           │ │ ├─ 49827 postgres: v1: vos vos 198.51.100.28(56272) idle
           │ │ ├─ 49828 postgres: v1: vos vos 198.51.100.28(56282) idle
           │ │ ├─ 49829 postgres: v1: vos vos 198.51.100.28(56290) idle
           │ │ ├─ 49830 postgres: v1: vos vos 198.51.100.28(56292) idle
           │ │ ├─ 49831 postgres: v1: vos vos 198.51.100.28(56304) idle
           │ │ ├─455454 postgres: v1: vos vos 198.51.100.21(42792) idle
           │ │ ├─496318 postgres: v1: vos vos 198.51.100.21(41306) idle
           │ │ ├─497281 postgres: v1: vos vos 198.51.100.20(33370) idle
           │ │ ├─497309 postgres: v1: vos vos 198.51.100.20(33384) idle
           │ │ ├─497959 postgres: v1: vos vos 198.51.100.21(50216) idle
           │ │ ├─498229 postgres: v1: vos vos 198.51.100.20(59992) idle
           │ │ ├─498252 postgres: v1: vos vos 198.51.100.20(60002) idle
           │ │ ├─498475 postgres: v1: vos vos 198.51.100.22(32906) idle
           │ │ ├─499573 postgres: v1: vos vos 198.51.100.21(37024) idle
           │ │ ├─499717 postgres: v1: vos vos 198.51.100.22(34004) idle
           │ │ ├─499880 postgres: v1: vos vos 198.51.100.20(52088) idle
           │ │ ├─499965 postgres: v1: vos vos 198.51.100.20(52102) idle
           │ │ └─499993 postgres: v1: vos vos 198.51.100.20(52118) idle
           │ ├─295e2808e5e27f8c47f04bab1615d4a6cc6d426d0019d6298316c10262abd4a5
           │ │ ├─40572 java -Xmx512m -Dlog.server=logstash -Dlogging.config=/opt/harmonic/fileutils/log4j2.xml -XX:+ExitOnOutOfMemoryError -jar /opt/harmonic/fileutils/fileutils-worker.jar
           │ │ └─40643 /sbin/rpcbind
           │ ├─2ce57bc5a2491a58dd6362918c37ac6e80a3bd093e610da0db3cc4d9a6bc9fda
           │ │ └─24130 /dashboard --insecure-bind-address=0.0.0.0 --bind-address=0.0.0.0 --namespace=kube-system --tls-cert-file=kubernetes.pem --tls-key-file=kubernetes-key.pem --enable-skip-login --authentication-mode=token --token-ttl=900
           │ ├─386b74e96bce466252755a2db8492a0aa80fb81245a7e11ab02cadcb615d4e35
           │ │ ├─11577 sleep 5
           │ │ ├─14293 /opt/dektec/sbin/tini -g -- /opt/dektec/sbin/DtapiServiced-wrapper.sh
           │ │ ├─14678 /bin/bash /opt/dektec/sbin/DtapiServiced-wrapper.sh
           │ │ └─14866 /usr/sbin/DtapiServiced /var/run/DtapiServiced.pid
           │ ├─3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85
           │ │ └─49632 /pause
           │ ├─3ddcf70649aeed995d1fdce890e993ead6f5b073b7de2211d2f93de524d2a7b5
           │ │ ├─42895 /usr/bin/python3 -s /usr/bin/supervisord -c /supervisord.conf
           │ │ ├─43445 python3 /opt/harmonic/unified-origin-engine/bin/supervisord-event-handler
           │ │ ├─43447 /usr/share/filebeat/bin/filebeat --path.home /usr/share/filebeat --path.config /etc/filebeat --path.data /var/lib/filebeat --path.logs /var/log/filebeat -E output.logstash.hosts=['logstash:5044']
           │ │ ├─43449 java -Xms50m -Xmx512m -XX:+ExitOnOutOfMemoryError -Xloggc:/var/log/gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=3 -XX:GCLogFileSize=1M -classpath /opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/remote-daemon-executor.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/potf-server-config-lib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/KMSClientLib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/cpixlib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kms-soap-stub.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jaxb-impl-2.2.5.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-logging-1.2.1.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/xmlsec-1.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-codec-1.14.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-apache-connector-2.5.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/httpcore-4.3.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/httpclient-4.3.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/guava-14.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-dataformat-xml-2.10.3.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/stax2-api-4.2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/RmpControllerSDK.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/unified-origin-engine-library.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/StreamRmpControllerCmd.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/rmp-controller-models.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-math3-3.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/dnsjava-2.1.8.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-container-grizzly2-http-2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-http-server-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-container-jdk-http-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-core-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-runtime-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-json-provider-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-media-json-jackson-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/velocity-1.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/joda-time-2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-validator-1.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-slf4j-impl-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zookeeper-3.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/cron-utils-9.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-client-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-httpclient-okhttp-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-client-api-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-recipes-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-framework-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-client-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-gatewayapi-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-resource-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-rbac-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-admissionregistration-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-apps-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-autoscaling-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-apiextensions-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-batch-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-certificates-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-coordination-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-discovery-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-events-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-extensions-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-flowcontrol-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-networking-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-metrics-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-policy-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-scheduling-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-storageclass-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-node-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-core-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-common-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/slf4j-api-1.7.36.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-1.2-api-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_servlet-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_servlet_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-server-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-client-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-common-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.ws.rs-api-2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-layout-template-json-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/asset-mgmt-grpc-library.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-configuration-1.10.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-lang-2.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/gson-2.2.4.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-io-2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-module-jaxb-annotations-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-base-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zjsonpatch-0.3.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-dataformat-yaml-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-datatype-jsr310-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-databind-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-annotations-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/property-binder-4.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-http-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-locator-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.inject-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-collectionschema-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-api-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-xc-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-mapper-asl-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-core-asl-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-beanutils-1.9.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-collections-3.2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-digester-1.8.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-logging-1.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-core-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-api-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_otel-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_otel_agent-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zookeeper-jute-3.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/audience-annotations-0.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-handler-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-native-epoll-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-core-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.el-3.0.4.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-framework-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.annotation-api-1.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-guava-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-api-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/osgi-resource-locator-1.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/validation-api-1.1.0.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.xml.bind-api-2.3.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.activation-api-1.2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/snakeyaml-engine-2.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/logging-interceptor-3.12.12.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/okhttp-3.12.12.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-codec-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-native-unix-common-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-buffer-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-resolver-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-common-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-utils-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/aopalliance-repackaged-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javassist-3.18.1-GA.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/snakeyaml-1.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/okio-1.15.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/guava-27.0.1-jre.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.inject-1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/failureaccess-1.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jsr305-3.0.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/checker-qual-2.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/error_prone_annotations-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/j2objc-annotations-1.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/animal-sniffer-annotations-1.17.jar com.harmonicinc.remotedaemonexecutor.RemoteDaemonExecutor retention-worker MEDIAGRID backupStorageIsNotConfigured
           │ │ └─44323 /opt/harmonic/MediaStreamPackageController/rmp/RetentionWorker
           │ ├─42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840
           │ │ └─44651 /pause
           │ ├─4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c
           │ │ └─16163 /pause
           │ ├─4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb
           │ │ └─42622 /pause
           │ ├─61c556810ef8dbb1aed7ea76255af0833233248a82e667d07e0b879d365b778f
           │ │ ├─30447 bash /usr/bin/runserver --mode=logstash --log4j_input_port=4560 --logshipper_input_port=5514 --es_cluster_http_port=9200
           │ │ ├─30473 python3 /usr/bin/runserver.py --mode=logstash --log4j_input_port=4560 --logshipper_input_port=5514 --es_cluster_http_port=9200
           │ │ ├─31370 java -Xms10m -Xmx100m -XX:+ExitOnOutOfMemoryError -XX:+UseSerialGC -XX:MinHeapFreeRatio=10 -XX:MaxHeapFreeRatio=20 -Dmodes=logstash -Delasticsearch.host=elasticsearch -Delasticsearch.http_port=9200 -Dzookeeper=zookeeper:2181 -jar /opt/harmonic/logstash-controller/LogstashController.jar
           │ │ ├─31515 rsyslogd -n
           │ │ └─44896 /opt/logstash/jdk/bin/java -Xms500m -Xmx500m -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -Djava.awt.headless=true -Dfile.encoding=UTF-8 -Djdk.io.File.enableADS=true -Djruby.compile.invokedynamic=true -Djruby.jit.threshold=0 -Djruby.regexp.interruptible=true -XX:-HeapDumpOnOutOfMemoryError -Djava.security.egd=file:/dev/urandom -Dlog4j2.isThreadContextMapInheritable=true -Dlog4j2.formatMsgNoLookups=true -XX:OnOutOfMemoryError=shutdown_logstash -cp /opt/logstash/logstash-core/lib/jars/checker-qual-3.37.0.jar:/opt/logstash/logstash-core/lib/jars/commons-codec-1.14.jar:/opt/logstash/logstash-core/lib/jars/commons-compiler-3.1.0.jar:/opt/logstash/logstash-core/lib/jars/commons-logging-1.2.jar:/opt/logstash/logstash-core/lib/jars/error_prone_annotations-2.21.1.jar:/opt/logstash/logstash-core/lib/jars/failureaccess-1.0.1.jar:/opt/logstash/logstash-core/lib/jars/google-java-format-1.1.jar:/opt/logstash/logstash-core/lib/jars/guava-32.1.3-jre.jar:/opt/logstash/logstash-core/lib/jars/j2objc-annotations-2.8.jar:/opt/logstash/logstash-core/lib/jars/jackson-annotations-2.14.1.jar:/opt/logstash/logstash-core/lib/jars/jackson-core-2.14.1.jar:/opt/logstash/logstash-core/lib/jars/jackson-databind-2.14.1.jar:/opt/logstash/logstash-core/lib/jars/jackson-dataformat-cbor-2.14.1.jar:/opt/logstash/logstash-core/lib/jars/jackson-dataformat-yaml-2.14.1.jar:/opt/logstash/logstash-core/lib/jars/janino-3.1.0.jar:/opt/logstash/logstash-core/lib/jars/javassist-3.26.0-GA.jar:/opt/logstash/logstash-core/lib/jars/jruby-complete-9.2.20.1.jar:/opt/logstash/logstash-core/lib/jars/jsr305-3.0.2.jar:/opt/logstash/logstash-core/lib/jars/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/opt/logstash/logstash-core/lib/jars/log4j-api-2.17.1.jar:/opt/logstash/logstash-core/lib/jars/log4j-core-2.17.1.jar:/opt/logstash/logstash-core/lib/jars/log4j-jcl-2.17.1.jar:/opt/logstash/logstash-core/lib/jars/log4j-slf4j-impl-2.17.1.jar:/opt/logstash/logstash-core/lib/jars/logstash-core.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.core.commands-3.6.0.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.core.contenttype-3.4.100.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.core.expressions-3.4.300.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.core.filesystem-1.3.100.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.core.jobs-3.5.100.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.core.resources-3.7.100.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.core.runtime-3.7.0.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.equinox.app-1.3.100.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.equinox.common-3.6.0.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.equinox.preferences-3.4.1.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.equinox.registry-3.5.101.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.jdt.core-3.10.0.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.osgi-3.7.1.jar:/opt/logstash/logstash-core/lib/jars/org.eclipse.text-3.5.101.jar:/opt/logstash/logstash-core/lib/jars/reflections-0.9.11.jar:/opt/logstash/logstash-core/lib/jars/slf4j-api-1.7.30.jar:/opt/logstash/logstash-core/lib/jars/snakeyaml-1.33.jar org.logstash.Logstash -l /var/log -f /tmp/logstash-config --http.host 0.0.0.0 --pipeline.unsafe_shutdown -b 100 -u 5
           │ ├─6633e7d49882ec93c836eb9e2e2b57e3ede0d2cbfa2be9b417cc1f339e7241f5
           │ │ ├─14737 bash -c /opt/harmonic/mediagrid/install_mg_fsd.sh & supervisord -c /opt/harmonic/supervisor/supervisord.conf
           │ │ ├─14825 /usr/bin/python3 -s /usr/bin/supervisord -c /opt/harmonic/supervisor/supervisord.conf
           │ │ ├─17043 /opt/harmonic/looprecord/looprecord_proxy
           │ │ ├─17044 nginx: master process nginx -c /opt/harmonic/nginx/nginx.conf
           │ │ ├─17056 nginx: worker process
           │ │ ├─17057 nginx: worker process
           │ │ ├─17058 nginx: worker process
           │ │ └─17059 nginx: worker process
           │ ├─68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f
           │ │ └─41311 /pause
           │ ├─6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53
           │ │ └─48584 /pause
           │ ├─7149e251631bd2084f6135e3cb5b75287790d2f14422004e79a3c6f962e189b6
           │ │ ├─30480 bash /usr/bin/runserver --mode=kibana --kibana4_port=5601 --es_cluster_http_port=9200 --always_reconf_kibana
           │ │ ├─30573 python3 /usr/bin/runserver.py --mode=kibana --kibana4_port=5601 --es_cluster_http_port=9200 --always_reconf_kibana
           │ │ ├─30984 su kibana -c NODE_OPTIONS=--max-old-space-size=512 nohup /opt/kibana/bin/opensearch-dashboards -p 5601 -e http://elasticsearch:9200
           │ │ ├─30989 /opt/kibana/bin/../node/bin/node /opt/kibana/bin/../src/cli/dist -p 5601 -e http://elasticsearch:9200
           │ │ ├─30993 nginx: master process nginx
           │ │ ├─30994 nginx: worker process
           │ │ ├─30995 nginx: worker process
           │ │ ├─30996 nginx: worker process
           │ │ ├─30997 nginx: worker process
           │ │ └─31002 java -Xms10m -Xmx100m -XX:+ExitOnOutOfMemoryError -XX:+UseSerialGC -XX:MinHeapFreeRatio=10 -XX:MaxHeapFreeRatio=20 -Dmodes=kibana -Delasticsearch.host=elasticsearch -Delasticsearch.http_port=9200 -Dzookeeper=zookeeper:2181 -jar /opt/harmonic/logstash-controller/LogstashController.jar
           │ ├─722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687
           │ │ └─40454 /pause
           │ ├─77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0
           │ │ └─25753 /pause
           │ ├─794245c75ab20b17bcd8b43a373fad017d3bd653db684caf3add11d14754276c
           │ │ └─48108 /usr/bin/java -jar /opt/harmonic/ndcp-adapter/ndcp-adapter.jar
           │ ├─7c297996517c2ebdd207bc733957e7ea43d63c8630dab617c28d6518db1f0282
           │ │ └─49662 java -Xmx512m -jar /opt/harmonic/scs/scs.jar zookeeper:2181 /vos-apps/simulcrypt/config /vos-apps/simulcrypt/output 1515
           │ ├─7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82
           │ │ └─25640 /pause
           │ ├─7e4e0613f3bdaa4e0315cfb163224a53ba0daec763fd26f1803fb7c0fe0b0c9a
           │ │ └─14478 /metrics-sidecar
           │ ├─80162971b81ee8611d86c3f717d3087bd3b3f9ccb2ceb918d89e0bbc087e8a45
           │ │ └─16920 /bin/operator --kubelet-service=kube-system/kubelet --prometheus-config-reloader=quay.io/prometheus-operator/prometheus-config-reloader:v0.57.0
           │ ├─82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076
           │ │ └─13925 /pause
           │ ├─83cbb97e4fb7c9801bb5d1334c503eb1231e1f7903d8ed8f73c60a62819d0bba
           │ │ └─14798 /bin/postgres_exporter --web.listen-address=0.0.0.0:9187 --extend.query-path /additional/custom-query.yaml
           │ ├─86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197
           │ │ └─30781 /pause
           │ ├─8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f
           │ │ └─43286 /pause
           │ ├─8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1
           │ │ └─47096 /pause
           │ ├─8fbeae4f599c70001a79ff8190e4d36b6208c699e369b374812b39107c435fab
           │ │ ├─15753 bash /dynamic_zk/docker-entrypoint.sh
           │ │ ├─16135 python3 /dynamic_zk/dynamic_zk.py
           │ │ └─17619 /opt/java/openjdk/bin/java -Dzookeeper.log.dir=/zookeeper-volume/log -Dzookeeper.log.file=zookeeper-server.log -XX:OnOutOfMemoryError=kill -9 %p -cp /zookeeper/bin/../zookeeper-server/target/classes:/zookeeper/bin/../build/classes:/zookeeper/bin/../zookeeper-server/target/lib/*.jar:/zookeeper/bin/../build/lib/*.jar:/zookeeper/bin/../lib/zookeeper-prometheus-metrics-3.8.2.jar:/zookeeper/bin/../lib/zookeeper-jute-3.8.2.jar:/zookeeper/bin/../lib/zookeeper-3.8.2.jar:/zookeeper/bin/../lib/snappy-java-1.1.10.1.jar:/zookeeper/bin/../lib/slf4j-api-1.7.30.jar:/zookeeper/bin/../lib/simpleclient_servlet-0.9.0.jar:/zookeeper/bin/../lib/simpleclient_hotspot-0.9.0.jar:/zookeeper/bin/../lib/simpleclient_common-0.9.0.jar:/zookeeper/bin/../lib/simpleclient-0.9.0.jar:/zookeeper/bin/../lib/netty-transport-native-unix-common-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-transport-native-epoll-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-transport-classes-epoll-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-transport-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-resolver-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-handler-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-common-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-codec-4.1.94.Final.jar:/zookeeper/bin/../lib/netty-buffer-4.1.94.Final.jar:/zookeeper/bin/../lib/metrics-core-4.1.12.1.jar:/zookeeper/bin/../lib/logback-core-1.2.10.jar:/zookeeper/bin/../lib/logback-classic-1.2.10.jar:/zookeeper/bin/../lib/jline-2.14.6.jar:/zookeeper/bin/../lib/jetty-util-ajax-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-util-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-servlet-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-server-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-security-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-io-9.4.51.v20230217.jar:/zookeeper/bin/../lib/jetty-http-9.4.51.v20230217.jar:/zookeeper/bin/../lib/javax.servlet-api-3.1.0.jar:/zookeeper/bin/../lib/jackson-databind-2.15.2.jar:/zookeeper/bin/../lib/jackson-core-2.15.2.jar:/zookeeper/bin/../lib/jackson-annotations-2.15.2.jar:/zookeeper/bin/../lib/commons-io-2.11.0.jar:/zookeeper/bin/../lib/commons-cli-1.5.0.jar:/zookeeper/bin/../lib/audience-annotations-0.12.0.jar:/zookeeper/bin/../zookeeper-*.jar:/zookeeper/bin/../zookeeper-server/src/main/resources/lib/*.jar:/zookeeper-volume/conf: -Xmx1000m -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.local.only=false org.apache.zookeeper.server.quorum.QuorumPeerMain /zookeeper-volume/conf/zoo.cfg
           │ ├─900eab49b8c2ba07c64b2d9d3e37821d1dcf1ef16dcd585fb3feae19db6519a2
           │ │ ├─11646 /usr/bin/coreutils --coreutils-prog-shebang=sleep /usr/bin/sleep 1
           │ │ ├─43429 /usr/bin/python3 -s /usr/bin/supervisord -c /opt/harmonic/Atm/supervisord.conf
           │ │ ├─43722 /bin/bash /opt/harmonic/Atm/stop-supervisor.sh
           │ │ ├─43723 /usr/share/filebeat/bin/filebeat --path.home /usr/share/filebeat --path.config /etc/filebeat --path.data /var/lib/filebeat --path.logs /var/log/filebeat -E output.logstash.hosts=['logstash:5044']
           │ │ ├─43724 /bin/bash /opt/harmonic/Atm/shutdown_delay.sh
           │ │ └─43725 /opt/harmonic/Atm/atm
           │ ├─92e14fd007887603c04b8210955ad6a1815b1fc9e0ee10ec0d6d862974e6748f
           │ │ ├─ 44959 bash /opt/srmpc/start.sh java -Dtmd.port=32818 -Drmp.mallocArenaMax=32 -Dkubernetes.namespace=cluster1 -DLOG_SERVER=logstash -XX:NativeMemoryTracking=summary -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=128K -Xms64m -Xmx1024m -Xshare:auto -XX:MaxMetaspaceSize=512m -XX:MaxHeapSize=1024m -XX:TieredStopAtLevel=1 -XX:+ExitOnOutOfMemoryError -XX:MaxHeapFreeRatio=30  -XX:MinHeapFreeRatio=10 -Djava.security.properties=/opt/harmonic/StreamRmpControllerCmd/java.security -jar /opt/harmonic/StreamRmpControllerCmd/StreamRmpControllerCmd.jar sample_stream_processing_engine v1 stream_processing 44DE0751-40F2-4A65-AB19-290B7B8BABB0 zookeeper:2181 44DE0751-40F2-4A65-AB19-290B7B8BABB0-1 /opt/harmonic/vos/voshome -1
           │ │ ├─ 44983 /usr/bin/python3 -s /usr/bin/supervisord -c /tmp/supervisor.conf -n
           │ │ ├─ 45062 /usr/bin/python3 /opt/srmpc/srmpc-watchdog
           │ │ ├─ 45063 /usr/share/filebeat/bin/filebeat -path.home /usr/share/filebeat -path.config /etc/filebeat -path.data /var/lib/filebeat -path.logs /var/log/filebeat
           │ │ ├─ 45064 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/dolby_vision /graphics/dolby_vision
           │ │ ├─ 45065 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/fonts /graphics/fonts
           │ │ ├─ 45066 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/irdeto_license /opt/irdeto
           │ │ ├─ 45067 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/lut /graphics/lut
           │ │ ├─ 45073 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/sl_hdr_config /graphics/sl_hdr_config
           │ │ ├─ 45079 /opt/goofys/goofys -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data /opt/harmonic/vos/voshome/app_data
           │ │ ├─ 45090 /usr/local/bin/xinit /usr/local/etc/X11/xinitrc -- /usr/local/bin/Xorg -xkbdir /usr/local/share/X11/xkb -nolisten local -logverbose 0 vt7 -sharevts :0.0
           │ │ ├─ 45107 java -Dtmd.port=32818 -Drmp.mallocArenaMax=32 -Dkubernetes.namespace=cluster1 -DLOG_SERVER=logstash -XX:NativeMemoryTracking=summary -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=128K -Xms64m -Xmx1024m -Xshare:auto -XX:MaxMetaspaceSize=512m -XX:MaxHeapSize=1024m -XX:TieredStopAtLevel=1 -XX:+ExitOnOutOfMemoryError -XX:MaxHeapFreeRatio=30 -XX:MinHeapFreeRatio=10 -Djava.security.properties=/opt/harmonic/StreamRmpControllerCmd/java.security -jar /opt/harmonic/StreamRmpControllerCmd/StreamRmpControllerCmd.jar sample_stream_processing_engine v1 stream_processing 44DE0751-40F2-4A65-AB19-290B7B8BABB0 zookeeper:2181 44DE0751-40F2-4A65-AB19-290B7B8BABB0-1 /opt/harmonic/vos/voshome -1
           │ │ ├─ 45123 /usr/local/bin/Xorg :0 -xkbdir /usr/local/share/X11/xkb -nolisten local -logverbose 0 vt7 -sharevts :0.0
           │ │ ├─ 45458 sh /usr/local/etc/X11/xinitrc
           │ │ ├─ 45460 /usr/local/bin/xterm -g 90x50+0+0 -bg black -fg yellow -fn 10x20
           │ │ ├─ 45475 /usr/local/bin/dwm
           │ │ ├─ 46747 bash
           │ │ ├─ 46799 java -Xmx64m -Xshare:auto -XX:MaxMetaspaceSize=64m -XX:MaxHeapSize=64m -XX:TieredStopAtLevel=1 -Dlog_file_path=/var/log/rmp-controller-log -DLOG_SERVER=logstash -Dvos.home=/opt/harmonic/vos/voshome -jar /opt/harmonic/EsamAdapter/EsamAdapter.jar
           │ │ ├─ 47628 dbus-daemon --fork --config-file /etc/dbus-1/dbus-uvp-session.conf
           │ │ └─477745 /opt/harmonic/rmp/RmpWorker -Id=5f5663b2-8411-42a9-8237-a47063261a03 -KernelPath=/opt/harmonic/rmp/libRmpKernel.so -RpiPath=/opt/harmonic/rmp/rpi -rpiMessagePlugInPath=/opt/harmonic/rmp/rpiMessagePlugin -logPath=/var/log/rmp-controller-log/rmp-worker.log -velocimeterPath=/opt/harmonic/rmp/libRmpVelocimeter.so -rmpServiceId=sspe-44DE0751-40F2-4A65-AB19-290B7B8BABB0 -serverIP=127.0.0.1 -serverPort=40005 -oplanOutputPath=/var/log/rmp-controller-log/rmp-processing.opl -logMDCs=service_id:44DE0751-40F2-4A65-AB19-290B7B8BABB0 -enableRuntimeStateRestorer=true -enableRuntimeStateUpdater=true
           │ ├─93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06
           │ │ └─48052 /pause
           │ ├─945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27
           │ │ └─15641 /pause
           │ ├─97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c
           │ │ └─13746 /pause
           │ ├─9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad
           │ │ └─13776 /pause
           │ ├─9f25cd91eb885e3f94e2d800003dd991e969dd57e3c8fde23045761f19fffe62
           │ │ └─k8s.io
           │ │   └─9f25cd91eb885e3f94e2d800003dd991e969dd57e3c8fde23045761f19fffe62
           │ │     ├─14675 /sbin/init
           │ │     ├─system.slice
           │ │     │ ├─accounts-daemon.service
           │ │     │ │ └─16992 /usr/libexec/accounts-daemon
           │ │     │ ├─dbus.service
           │ │     │ │ └─16676 /usr/bin/dbus-daemon --system --address=systemd: --nofork --nopidfile --systemd-activation
           │ │     │ ├─lightdm.service
           │ │     │ │ ├─16928 /usr/sbin/lightdm
           │ │     │ │ └─17017 Xvfb :0 -screen 0 1440x900x16
           │ │     │ ├─polkit.service
           │ │     │ │ └─16996 /usr/lib/polkit-1/polkitd --no-debug
           │ │     │ ├─setup-jump-client.service
           │ │     │ │ └─17018 node /usr/local/bin/setup_jump_client.js
           │ │     │ ├─systemd-journald.service
           │ │     │ │ └─15640 /usr/lib/systemd/systemd-journald
           │ │     │ └─systemd-logind.service
           │ │     │   └─16908 /usr/lib/systemd/systemd-logind
           │ │     └─user.slice
           │ │       └─user-1000.slice
           │ │         └─session-1.scope
           │ │           ├─17071 lightdm --session-child 12 15
           │ │           ├─17077 /usr/bin/openbox --startup /usr/libexec/openbox-autostart OPENBOX
           │ │           ├─17105 dbus-launch --sh-syntax --exit-with-session
           │ │           ├─17106 /usr/bin/dbus-daemon --fork --print-pid 5 --print-address 7 --session
           │ │           ├─17114 /usr/bin/ssh-agent /bin/sh -c exec -l /bin/bash -c "/usr/bin/openbox-session"
           │ │           ├─17134 /usr/lib64/firefox/firefox
           │ │           ├─17765 /usr/libexec/at-spi-bus-launcher
           │ │           ├─17776 /usr/lib64/firefox/firefox -contentproc -parentBuildID 20240618122702 -prefsLen 24777 -prefMapSize 243868 -appDir /usr/lib64/firefox/browser {f2a25019-b106-4fa8-9f8c-74c61c8d41a3} 112 socket
           │ │           ├─17819 /usr/lib64/firefox/firefox -contentproc -childID 1 -isForBrowser -prefsLen 24839 -prefMapSize 243868 -jsInitLen 240916 -parentBuildID 20240618122702 -appDir /usr/lib64/firefox/browser {554d6d1b-9e6b-4731-93ff-d8e57eaf2ab6} 112 tab
           │ │           ├─17849 /usr/lib64/firefox/firefox -contentproc -childID 2 -isForBrowser -prefsLen 23057 -prefMapSize 243868 -jsInitLen 240916 -parentBuildID 20240618122702 -appDir /usr/lib64/firefox/browser {f81b6a31-5ab6-4607-96bf-930b69e24be1} 112 tab
           │ │           ├─23752 /usr/lib64/firefox/firefox -contentproc -childID 3 -isForBrowser -prefsLen 31342 -prefMapSize 243868 -jsInitLen 240916 -parentBuildID 20240618122702 -appDir /usr/lib64/firefox/browser {a9a73e53-3c64-495a-8373-e854a17cc2b5} 112 tab
           │ │           ├─23806 /usr/lib64/firefox/firefox -contentproc -childID 4 -isForBrowser -prefsLen 28344 -prefMapSize 243868 -jsInitLen 240916 -parentBuildID 20240618122702 -appDir /usr/lib64/firefox/browser {0d3f346d-be3e-4b08-ba23-ecfc34e78412} 112 tab
           │ │           ├─23813 /usr/lib64/firefox/firefox -contentproc -childID 5 -isForBrowser -prefsLen 28344 -prefMapSize 243868 -jsInitLen 240916 -parentBuildID 20240618122702 -appDir /usr/lib64/firefox/browser {7643dbaf-7ffa-441a-9dcb-911454c93f2f} 112 tab
           │ │           └─23846 /usr/lib64/firefox/firefox -contentproc -childID 6 -isForBrowser -prefsLen 28344 -prefMapSize 243868 -jsInitLen 240916 -parentBuildID 20240618122702 -appDir /usr/lib64/firefox/browser {dceb146d-9a5e-4d9f-806d-8f0461584ce7} 112 tab
           │ ├─aa5225d693f714f7e75445136b3b211b51dddf9ae10e9b6533a4b762901249dd
           │ │ ├─30938 /bin/bash /opt/harmonic/vos/bin/vos --httpPort=80 --httpsPort=443 --stopPort=-1 --assetStorageEnable=True --assetStorageType=mediagrid --assetStorageName=local-asset-storage --keystore=/opt/harmonic/vos/flex_keystore --keystore-pw=Vk9TaGFybW9uaWNGTEVY --truststore=/opt/harmonic/vos/flex_truststore --truststore-pw=Vk9TaGFybW9uaWNGTEVY
           │ │ ├─30981 /usr/bin/python3 -s /usr/bin/supervisord -c supervisor.conf -n
           │ │ ├─31042 /opt/goofys/goofys -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1 /opt/harmonic/vos/voshome/
           │ │ ├─31044 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 5s --type-cache-ttl 5s --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/com.harmonicinc.vos.sspe/ /opt/harmonic/vos/voshome/app_data/com.harmonicinc.vos.sspe
           │ │ └─38063 /usr/bin/java -Dvos.use.k8s=true -Dcluster.multizone=False -Dvos.appbundles.bundledir=/opt/harmonic/vos/appbundleDir -Dvos.jdbc.url=jdbc:postgresql://pgdb:5432/vos -XX:+UseG1GC -Xmx5120m -Xss16m -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=9010 -Dcom.sun.management.jmxremote.rmi.port=9010 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.local.only=false -Dcom.sun.management.jmxremote.ssl=false -Djava.net.preferIPv4Stack=true -Djava.rmi.server.hostname=127.0.0.1 -Djdk.tls.ephemeralDHKeySize=2048 -XX:OnOutOfMemoryError=pkill -f -9 java -Dvos.log4j.configuration=file:/opt/harmonic/vos/log4j2.xml -Dvos.configuration=/opt/harmonic/vos/vos.properties -Dvos.appbundles.srcdir=/opt/harmonic/vos/app_bundles -Dvos.systemapps.dir=/opt/harmonic/vos/system_apps -jar /opt/harmonic/vos/vos.war --voshome=/opt/harmonic/vos/voshome --httpPort=80 --httpsPort=443 --stopPort=-1 --keystore=/opt/harmonic/vos/flex_keystore --keystore-pw=Vk9TaGFybW9uaWNGTEVY --truststore=/opt/harmonic/vos/flex_truststore --truststore-pw=Vk9TaGFybW9uaWNGTEVY
           │ ├─aad3f498ca0001b47bbc7d89f01988c218e9b80d529077e7152d13b61403f0bb
           │ │ └─27295 minio server /storage
           │ ├─acd873fc559f2941cb1dc8cf4355ff5bfd67df6a31d67ed10073845c4e40642b
           │ │ ├─46299 /bin/bash /opt/harmonic/casd/etc/start_casd.sh
           │ │ └─46472 /opt/harmonic/casd/casd
           │ ├─ade64f14dfe395e14760ca207d41149a049d31cb2ec606827ff5a63c2670a816
           │ │ ├─26108 bash /usr/bin/runserver --mode=elasticsearch --es_http_port=9200 --es_cluster_http_port=9200 --es_transport_port=9300
           │ │ ├─26141 python3 /usr/bin/runserver.py --mode=elasticsearch --es_http_port=9200 --es_cluster_http_port=9200 --es_transport_port=9300
           │ │ ├─26252 su elasticsearch -c /opt/elasticsearch/bin/opensearch -Ehttp.port=9200 -Etransport.port=9300
           │ │ ├─26263 /opt/elasticsearch/jdk/bin/java -Xshare:auto -Dopensearch.networkaddress.cache.ttl=60 -Dopensearch.networkaddress.cache.negative.ttl=10 -XX:+AlwaysPreTouch -Xss1m -Djava.awt.headless=true -Dfile.encoding=UTF-8 -Djna.nosys=true -XX:-OmitStackTraceInFastThrow -Dio.netty.noUnsafe=true -Dio.netty.noKeySetOptimization=true -Dio.netty.recycler.maxCapacityPerThread=0 -Dio.netty.allocator.numDirectArenas=0 -Dlog4j.shutdownHookEnabled=false -Dlog4j2.disable.jmx=true -Djava.locale.providers=SPI,COMPAT -Xms1g -Xmx1g -XX:+UseG1GC -XX:G1ReservePercent=25 -XX:InitiatingHeapOccupancyPercent=30 -Djava.io.tmpdir=/tmp/opensearch-8757680722945077413 -XX:HeapDumpPath=data -XX:ErrorFile=logs/hs_err_pid%p.log -Xlog:gc*,gc+age=trace,safepoint:file=logs/gc.log:utctime,pid,tags:filecount=32,filesize=64m -XX:OnOutOfMemoryError=shutdown_elasticsearch -Des.allow_insecure_settings=true -Dlog4j2.formatMsgNoLookups=true -Xms1280m -Xmx1280m -XX:MaxDirectMemorySize=671088640 -Dopensearch.path.home=/opt/elasticsearch -Dopensearch.path.conf=/opt/elasticsearch/config -Dopensearch.distribution.type=tar -Dopensearch.bundled_jdk=true -cp /opt/elasticsearch/lib/* org.opensearch.bootstrap.OpenSearch -Ehttp.port=9200 -Etransport.port=9300
           │ │ └─28901 java -Xms10m -Xmx100m -XX:+ExitOnOutOfMemoryError -XX:+UseSerialGC -XX:MinHeapFreeRatio=10 -XX:MaxHeapFreeRatio=20 -Dmodes=elasticsearch -Delasticsearch.host=127.0.0.1 -Delasticsearch.http_port=9200 -Dzookeeper=zookeeper:2181 -jar /opt/harmonic/logstash-controller/LogstashController.jar
           │ ├─b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7
           │ │ └─46581 /pause
           │ ├─b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4
           │ │ └─13772 /pause
           │ ├─b746563060551612d49e0b3e5ebfc32a5e99f5518389847c64602c6be63c8a9d
           │ │ └─24747 /bin/prometheus-config-reloader --listen-address=:8080 --reload-url=http://localhost:9090/-/reload --config-file=/etc/prometheus/config/prometheus.yaml.gz --config-envsubst-file=/etc/prometheus/config_out/prometheus.env.yaml --watched-dir=/etc/prometheus/rules/prometheus-k8s-rulefiles-0
           │ ├─ba239e148103274ece222541571cef554d8f50d8e19cc55f13679a6a1e2d2076
           │ │ └─16746 /pause
           │ ├─bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4
           │ │ └─16695 /pause
           │ ├─bd2f5bcbccb5108ea75dd8fe3c107454e9443a9bf04d0d76ea762d52abf0de4d
           │ │ └─24718 /bin/prometheus --web.console.templates=/etc/prometheus/consoles --web.console.libraries=/etc/prometheus/console_libraries --storage.tsdb.retention.time=1d --config.file=/etc/prometheus/config_out/prometheus.env.yaml --storage.tsdb.path=/prometheus --web.enable-lifecycle --web.route-prefix=/ --web.config.file=/etc/prometheus/web_config/web-config.yaml
           │ ├─bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2
           │ │ └─14224 /pause
           │ ├─bd7b68359e81ea9f2992b99fa251dc23bca03911465b8cfcea15dc22ce0a6612
           │ │ ├─41342 /bin/bash /opt/harmonic/live-ingest-origin/bin/live-ingest-origin-server-controller
           │ │ ├─41354 /usr/bin/python /usr/bin/supervisord -c /supervisord.conf
           │ │ ├─41391 python3 /opt/harmonic/live-ingest-origin/bin/supervisord-event-handler
           │ │ ├─41392 java -Xms50m -Xmx200m -classpath /opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/live-ingest-origin-server-controller.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/live-ingest-origin-library.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/log4j-slf4j-impl-2.17.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/zookeeper-3.5.6.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/curator-recipes-5.0.0.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/curator-framework-5.0.0.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/curator-client-5.0.0.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/slf4j-api-1.7.25.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/log4j-1.2-api-2.17.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/commons-io-2.2.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/velocity-1.7.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/commons-lang-2.4.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/joda-time-2.8.2.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/log4j-layout-template-json-2.17.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/gson-2.2.4.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/log4j-core-2.17.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/log4j-api-2.17.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/commons-collections-3.2.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/zookeeper-jute-3.5.6.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/audience-annotations-0.5.0.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-handler-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-transport-native-epoll-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-codec-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-transport-native-unix-common-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-transport-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-buffer-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-resolver-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/netty-common-4.1.42.Final.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/guava-27.0.1-jre.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/failureaccess-1.0.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/jsr305-3.0.2.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/checker-qual-2.5.2.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/error_prone_annotations-2.2.0.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/j2objc-annotations-1.1.jar:/opt/harmonic/live-ingest-origin/live-ingest-origin-server-controller/lib/animal-sniffer-annotations-1.17.jar com.harmonicinc.liveingestoriginservercontroller.LiveIngestOriginServerController 125eb6b4-eb00-4000-b90b-369d72cb3b56 NGINX HTTP
           │ │ ├─41455 /usr/share/filebeat/bin/filebeat -path.home /usr/share/filebeat -path.config /etc/filebeat -path.data /var/lib/filebeat -path.logs /var/log/filebeat run &
           │ │ ├─41462 nginx: master process nginx -c /opt/harmonic/nginx/nginx.conf
           │ │ ├─41475 nginx: worker process
           │ │ ├─41476 nginx: worker process
           │ │ ├─41477 nginx: worker process
           │ │ ├─41479 nginx: worker process
           │ │ ├─41480 nginx: worker process
           │ │ ├─41481 nginx: worker process
           │ │ ├─41482 nginx: worker process
           │ │ ├─41483 nginx: worker process
           │ │ ├─41484 nginx: worker process
           │ │ ├─41485 nginx: worker process
           │ │ ├─41486 nginx: worker process
           │ │ ├─41487 nginx: worker process
           │ │ ├─41488 nginx: worker process
           │ │ ├─41489 nginx: worker process
           │ │ ├─41490 nginx: worker process
           │ │ ├─41491 nginx: worker process
           │ │ ├─41492 nginx: worker process
           │ │ ├─41493 nginx: worker process
           │ │ ├─41494 nginx: worker process
           │ │ ├─41495 nginx: worker process
           │ │ ├─41496 nginx: worker process
           │ │ ├─41497 nginx: worker process
           │ │ ├─41498 nginx: worker process
           │ │ ├─41499 nginx: worker process
           │ │ ├─41500 nginx: worker process
           │ │ ├─41501 nginx: worker process
           │ │ ├─41502 nginx: worker process
           │ │ ├─41503 nginx: worker process
           │ │ ├─41504 nginx: worker process
           │ │ ├─41505 nginx: worker process
           │ │ ├─41506 nginx: worker process
           │ │ ├─41507 nginx: worker process
           │ │ ├─41508 nginx: worker process
           │ │ ├─41509 nginx: worker process
           │ │ ├─41510 nginx: worker process
           │ │ ├─41511 nginx: worker process
           │ │ ├─41512 nginx: worker process
           │ │ ├─41513 nginx: worker process
           │ │ ├─41514 nginx: worker process
           │ │ ├─41515 nginx: worker process
           │ │ ├─41516 nginx: worker process
           │ │ ├─41517 nginx: worker process
           │ │ ├─41518 nginx: worker process
           │ │ ├─41519 nginx: worker process
           │ │ ├─41520 nginx: worker process
           │ │ ├─41521 nginx: worker process
           │ │ ├─41522 nginx: worker process
           │ │ ├─41523 nginx: worker process
           │ │ ├─41524 nginx: worker process
           │ │ ├─41525 nginx: worker process
           │ │ ├─41526 nginx: worker process
           │ │ ├─41527 nginx: worker process
           │ │ ├─41528 nginx: worker process
           │ │ ├─41529 nginx: worker process
           │ │ ├─41530 nginx: worker process
           │ │ ├─41531 nginx: worker process
           │ │ ├─41532 nginx: worker process
           │ │ ├─41533 nginx: worker process
           │ │ ├─41534 nginx: worker process
           │ │ ├─41535 nginx: worker process
           │ │ ├─41536 nginx: worker process
           │ │ ├─41537 nginx: worker process
           │ │ ├─41538 nginx: worker process
           │ │ ├─41539 nginx: worker process
           │ │ ├─41540 nginx: worker process
           │ │ ├─41541 nginx: worker process
           │ │ ├─41542 nginx: worker process
           │ │ ├─41543 nginx: worker process
           │ │ ├─41544 nginx: worker process
           │ │ ├─41545 nginx: worker process
           │ │ ├─41546 nginx: worker process
           │ │ ├─41547 nginx: worker process
           │ │ ├─41548 nginx: worker process
           │ │ ├─41549 nginx: worker process
           │ │ ├─41550 nginx: worker process
           │ │ ├─41551 nginx: worker process
           │ │ ├─41552 nginx: worker process
           │ │ ├─41553 nginx: worker process
           │ │ ├─41554 nginx: worker process
           │ │ ├─41555 nginx: worker process
           │ │ ├─41556 nginx: worker process
           │ │ ├─41557 nginx: worker process
           │ │ ├─41558 nginx: worker process
           │ │ ├─41559 nginx: worker process
           │ │ ├─41560 nginx: worker process
           │ │ ├─41561 nginx: worker process
           │ │ ├─41562 nginx: worker process
           │ │ ├─41563 nginx: worker process
           │ │ ├─41564 nginx: worker process
           │ │ ├─41565 nginx: worker process
           │ │ ├─41566 nginx: worker process
           │ │ ├─41567 nginx: worker process
           │ │ ├─41568 nginx: worker process
           │ │ ├─41569 nginx: worker process
           │ │ ├─41570 nginx: worker process
           │ │ ├─41571 nginx: worker process
           │ │ ├─41572 nginx: worker process
           │ │ ├─41573 nginx: worker process
           │ │ ├─41574 nginx: worker process
           │ │ ├─41575 nginx: worker process
           │ │ ├─41576 nginx: worker process
           │ │ ├─41577 nginx: worker process
           │ │ ├─41578 nginx: worker process
           │ │ ├─41579 nginx: worker process
           │ │ ├─41580 nginx: worker process
           │ │ ├─41581 nginx: worker process
           │ │ ├─41582 nginx: worker process
           │ │ ├─41583 nginx: worker process
           │ │ ├─41584 nginx: worker process
           │ │ ├─41585 nginx: worker process
           │ │ ├─41586 nginx: worker process
           │ │ ├─41587 nginx: worker process
           │ │ ├─41588 nginx: worker process
           │ │ ├─41589 nginx: worker process
           │ │ ├─41590 nginx: worker process
           │ │ ├─41591 nginx: worker process
           │ │ ├─41592 nginx: worker process
           │ │ ├─41593 nginx: worker process
           │ │ ├─41596 nginx: worker process
           │ │ ├─41597 nginx: worker process
           │ │ ├─41598 nginx: worker process
           │ │ ├─41599 nginx: worker process
           │ │ ├─41600 nginx: worker process
           │ │ ├─41601 nginx: worker process
           │ │ ├─41602 nginx: worker process
           │ │ ├─41603 nginx: worker process
           │ │ ├─41604 nginx: worker process
           │ │ ├─41605 nginx: worker process
           │ │ ├─41606 nginx: worker process
           │ │ ├─41607 nginx: worker process
           │ │ ├─41608 nginx: worker process
           │ │ ├─41609 nginx: worker process
           │ │ ├─41610 nginx: worker process
           │ │ ├─41611 nginx: worker process
           │ │ ├─41612 nginx: worker process
           │ │ ├─41613 nginx: worker process
           │ │ ├─41614 nginx: worker process
           │ │ ├─41615 nginx: worker process
           │ │ ├─41616 nginx: worker process
           │ │ ├─41617 nginx: worker process
           │ │ ├─41618 nginx: worker process
           │ │ ├─41619 nginx: worker process
           │ │ ├─41620 nginx: worker process
           │ │ ├─41621 nginx: worker process
           │ │ ├─41622 nginx: worker process
           │ │ ├─41623 nginx: worker process
           │ │ ├─41624 nginx: worker process
           │ │ ├─41625 nginx: worker process
           │ │ ├─41626 nginx: worker process
           │ │ ├─41627 nginx: worker process
           │ │ ├─41628 nginx: worker process
           │ │ ├─41629 nginx: worker process
           │ │ ├─41630 nginx: worker process
           │ │ ├─41631 nginx: worker process
           │ │ ├─41632 nginx: worker process
           │ │ ├─41633 nginx: worker process
           │ │ ├─41634 nginx: worker process
           │ │ ├─41635 nginx: worker process
           │ │ ├─41636 nginx: worker process
           │ │ ├─41637 nginx: worker process
           │ │ ├─41638 nginx: worker process
           │ │ ├─41639 nginx: worker process
           │ │ ├─41640 nginx: worker process
           │ │ ├─41641 nginx: worker process
           │ │ ├─41642 nginx: worker process
           │ │ ├─41643 nginx: worker process
           │ │ ├─41644 nginx: worker process
           │ │ ├─41645 nginx: worker process
           │ │ └─41646 nginx: cache manager process
           │ ├─bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d
           │ │ └─30398 /pause
           │ ├─bf7cf933a8910b9fb80cce11f576b2ce0282a4a972e5b0001806935cd7e4a995
           │ │ └─40571 java -Xmx512m -Dlog.server=logstash -Dlogging.config=/opt/harmonic/mediautils/log4j2.xml -XX:+ExitOnOutOfMemoryError -jar /opt/harmonic/mediautils/mediautils-worker.jar
           │ ├─c8bba1ece9eddd59495885e39f255ad4f9769ae89b1b7fc3903cce8d3c7f7916
           │ │ ├─42904 /usr/bin/python3 -s /usr/bin/supervisord -c /supervisord.conf
           │ │ ├─43444 python3 /opt/harmonic/unified-origin-engine/bin/supervisord-event-handler
           │ │ ├─43448 java -Xms50m -Xmx512m -XX:+ExitOnOutOfMemoryError -XX:+UseConcMarkSweepGC -Xloggc:/var/log/gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=3 -XX:GCLogFileSize=1M -Dorg.glassfish.grizzly.nio.transport.TCPNIOTransport.max-receive-buffer-size=1048576 -classpath /opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/remote-daemon-executor.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/potf-server-config-lib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/KMSClientLib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/cpixlib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kms-soap-stub.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jaxb-impl-2.2.5.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-logging-1.2.1.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/xmlsec-1.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-codec-1.14.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-apache-connector-2.5.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/httpcore-4.3.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/httpclient-4.3.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/guava-14.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-dataformat-xml-2.10.3.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/stax2-api-4.2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/RmpControllerSDK.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/unified-origin-engine-library.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/StreamRmpControllerCmd.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/rmp-controller-models.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-math3-3.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/dnsjava-2.1.8.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-container-grizzly2-http-2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-http-server-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-container-jdk-http-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-core-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-runtime-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-json-provider-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-media-json-jackson-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/velocity-1.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/joda-time-2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-validator-1.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-slf4j-impl-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zookeeper-3.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/cron-utils-9.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-client-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-httpclient-okhttp-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-client-api-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-recipes-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-framework-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-client-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-gatewayapi-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-resource-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-rbac-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-admissionregistration-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-apps-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-autoscaling-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-apiextensions-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-batch-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-certificates-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-coordination-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-discovery-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-events-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-extensions-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-flowcontrol-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-networking-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-metrics-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-policy-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-scheduling-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-storageclass-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-node-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-core-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-common-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/slf4j-api-1.7.36.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-1.2-api-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_servlet-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_servlet_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-server-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-client-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-common-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.ws.rs-api-2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-layout-template-json-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/asset-mgmt-grpc-library.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-configuration-1.10.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-lang-2.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/gson-2.2.4.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-io-2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-module-jaxb-annotations-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-base-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zjsonpatch-0.3.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-dataformat-yaml-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-datatype-jsr310-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-databind-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-annotations-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/property-binder-4.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-http-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-locator-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.inject-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-collectionschema-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-api-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-xc-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-mapper-asl-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-core-asl-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-beanutils-1.9.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-collections-3.2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-digester-1.8.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-logging-1.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-core-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-api-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_otel-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_otel_agent-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zookeeper-jute-3.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/audience-annotations-0.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-handler-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-native-epoll-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-core-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.el-3.0.4.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-framework-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.annotation-api-1.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-guava-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-api-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/osgi-resource-locator-1.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/validation-api-1.1.0.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.xml.bind-api-2.3.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.activation-api-1.2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/snakeyaml-engine-2.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/logging-interceptor-3.12.12.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/okhttp-3.12.12.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-codec-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-native-unix-common-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-buffer-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-resolver-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-common-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-utils-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/aopalliance-repackaged-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javassist-3.18.1-GA.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/snakeyaml-1.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/okio-1.15.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/guava-27.0.1-jre.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.inject-1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/failureaccess-1.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jsr305-3.0.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/checker-qual-2.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/error_prone_annotations-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/j2objc-annotations-1.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/animal-sniffer-annotations-1.17.jar com.harmonicinc.remotedaemonexecutor.RemoteDaemonExecutor asset-operator MEDIAGRID backupStorageIsNotConfigured
           │ │ ├─43451 /usr/share/filebeat/bin/filebeat --path.home /usr/share/filebeat --path.config /etc/filebeat --path.data /var/lib/filebeat --path.logs /var/log/filebeat -E output.logstash.hosts=['logstash:5044']
           │ │ └─44380 /opt/harmonic/MediaStreamPackageController/AssetManagementProxy --grpc-port 20208
           │ ├─d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49
           │ │ └─15670 /pause
           │ ├─d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa
           │ │ └─14139 /pause
           │ ├─d8b576a7151077a817ed7ccda23150d12c06cef14dff0d46cf1ddb8384a2c8b2
           │ │ ├─14313 /bin/bash /opt/omneon/sbin/run_ecd_and_wait
           │ │ └─14824 /opt/omneon/bin/execCntld
           │ ├─d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27
           │ │ └─45806 /pause
           │ ├─e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8
           │ │ └─14628 /pause
           │ ├─e6c48aed1f74ce74c92c7fa0d44b00eb2cec24c30cb75bd0e44cc002979144d2
           │ │ └─14629 java -Xmx512m -jar /opt/harmonic/emmg_server/emmg_server.jar zookeeper:2181 /vos-apps/emmg-server/v1/config 4971 1516
           │ ├─e77396127a5205ad7263b1715ab488deed253092cf27f541d1d3a85180edc5a9
           │ │ └─16398 grafana-server --homepath=/usr/share/grafana --config=/etc/grafana/grafana.ini --packaging=docker cfg:default.log.mode=console cfg:default.paths.data=/var/lib/grafana cfg:default.paths.logs=/var/log/grafana cfg:default.paths.plugins=/var/lib/grafana/plugins cfg:default.paths.provisioning=/etc/grafana/provisioning
           │ ├─eb0550778838139057fd93a2269c6ce7f78e4684614f299360ca096e1c538630
           │ │ ├─44684 bash /opt/srmpc/start.sh java -Dtmd.port=32791 -Drmp.mallocArenaMax=32 -Dkubernetes.namespace=cluster1 -DLOG_SERVER=logstash -XX:NativeMemoryTracking=summary -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=128K -Xms64m -Xmx1024m -Xshare:auto -XX:MaxMetaspaceSize=512m -XX:MaxHeapSize=1024m -XX:TieredStopAtLevel=1 -XX:+ExitOnOutOfMemoryError -XX:MaxHeapFreeRatio=30  -XX:MinHeapFreeRatio=10 -Djava.security.properties=/opt/harmonic/StreamRmpControllerCmd/java.security -jar /opt/harmonic/StreamRmpControllerCmd/StreamRmpControllerCmd.jar sample_stream_processing_engine v1 stream_processing E7395E3B-B18C-442C-A045-E1CECED0696B zookeeper:2181 E7395E3B-B18C-442C-A045-E1CECED0696B-1 /opt/harmonic/vos/voshome -1
           │ │ ├─44708 /usr/bin/python3 -s /usr/bin/supervisord -c /tmp/supervisor.conf -n
           │ │ ├─44719 /usr/bin/python3 /opt/srmpc/srmpc-watchdog
           │ │ ├─44720 /usr/share/filebeat/bin/filebeat -path.home /usr/share/filebeat -path.config /etc/filebeat -path.data /var/lib/filebeat -path.logs /var/log/filebeat
           │ │ ├─44721 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/dolby_vision /graphics/dolby_vision
           │ │ ├─44722 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/fonts /graphics/fonts
           │ │ ├─44723 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/irdeto_license /opt/irdeto
           │ │ ├─44724 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/lut /graphics/lut
           │ │ ├─44730 /opt/goofys/goofys -f -o nonempty --stat-cache-ttl 0 --type-cache-ttl 0 --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.configure/sl_hdr_config /graphics/sl_hdr_config
           │ │ ├─44736 /opt/goofys/goofys -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data /opt/harmonic/vos/voshome/app_data
           │ │ ├─44747 /usr/local/bin/xinit /usr/local/etc/X11/xinitrc -- /usr/local/bin/Xorg -xkbdir /usr/local/share/X11/xkb -nolisten local -logverbose 0 vt7 -sharevts :0.0
           │ │ ├─44764 java -Dtmd.port=32791 -Drmp.mallocArenaMax=32 -Dkubernetes.namespace=cluster1 -DLOG_SERVER=logstash -XX:NativeMemoryTracking=summary -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=128K -Xms64m -Xmx1024m -Xshare:auto -XX:MaxMetaspaceSize=512m -XX:MaxHeapSize=1024m -XX:TieredStopAtLevel=1 -XX:+ExitOnOutOfMemoryError -XX:MaxHeapFreeRatio=30 -XX:MinHeapFreeRatio=10 -Djava.security.properties=/opt/harmonic/StreamRmpControllerCmd/java.security -jar /opt/harmonic/StreamRmpControllerCmd/StreamRmpControllerCmd.jar sample_stream_processing_engine v1 stream_processing E7395E3B-B18C-442C-A045-E1CECED0696B zookeeper:2181 E7395E3B-B18C-442C-A045-E1CECED0696B-1 /opt/harmonic/vos/voshome -1
           │ │ ├─44841 /usr/local/bin/Xorg :0 -xkbdir /usr/local/share/X11/xkb -nolisten local -logverbose 0 vt7 -sharevts :0.0
           │ │ ├─45443 sh /usr/local/etc/X11/xinitrc
           │ │ ├─45445 /usr/local/bin/xterm -g 90x50+0+0 -bg black -fg yellow -fn 10x20
           │ │ ├─45473 /usr/local/bin/dwm
           │ │ ├─46745 bash
           │ │ ├─46787 java -Xmx64m -Xshare:auto -XX:MaxMetaspaceSize=64m -XX:MaxHeapSize=64m -XX:TieredStopAtLevel=1 -Dlog_file_path=/var/log/rmp-controller-log -DLOG_SERVER=logstash -Dvos.home=/opt/harmonic/vos/voshome -jar /opt/harmonic/EsamAdapter/EsamAdapter.jar
           │ │ ├─47331 /opt/harmonic/rmp/RmpWorker -Id=e62c4674-384e-4326-b5ad-bbb6d9f0733f -KernelPath=/opt/harmonic/rmp/libRmpKernel.so -RpiPath=/opt/harmonic/rmp/rpi -rpiMessagePlugInPath=/opt/harmonic/rmp/rpiMessagePlugin -logPath=/var/log/rmp-controller-log/rmp-worker.log -velocimeterPath=/opt/harmonic/rmp/libRmpVelocimeter.so -rmpServiceId=sspe-E7395E3B-B18C-442C-A045-E1CECED0696B -serverIP=127.0.0.1 -serverPort=40003 -oplanOutputPath=/var/log/rmp-controller-log/rmp-processing.opl -logMDCs=service_id:E7395E3B-B18C-442C-A045-E1CECED0696B -enableRuntimeStateRestorer=true -enableRuntimeStateUpdater=true
           │ │ └─47625 dbus-daemon --fork --config-file /etc/dbus-1/dbus-uvp-session.conf
           │ ├─ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982
           │ │ └─42632 /pause
           │ ├─f15a73eb2d9863156cc92fde623fb7c9f291dbcc6f4fd68c05f1d72e948c907e
           │ │ ├─42903 /usr/bin/python3 -s /usr/bin/supervisord -c /supervisord.conf
           │ │ ├─43446 python3 /opt/harmonic/unified-origin-engine/bin/supervisord-event-handler
           │ │ ├─43450 /opt/goofys/goofys -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/com.harmonicinc.vos.cert.manager /tmp/drmCertValidationKey
           │ │ ├─43452 /opt/goofys/goofys -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.scrambling/kmsCertKey /tmp/drmClientCertKey
           │ │ ├─43454 /opt/goofys/goofys --stat-cache-ttl 1s --type-cache-ttl 1s -f -o nonempty --profile minio --endpoint http://minio-service:9000 vos-home-cluster1:app_data/harmonicinc.vos.unified.origin.engine /opt/harmonic/vos/voshome/app_data/harmonicinc.vos.unified.origin.engine
           │ │ ├─43455 /usr/share/filebeat/bin/filebeat --path.home /usr/share/filebeat --path.config /etc/filebeat --path.data /var/lib/filebeat --path.logs /var/log/filebeat -E output.logstash.hosts=['logstash:5044']
           │ │ ├─43462 java -Xms50m -Xmx512m -XX:+ExitOnOutOfMemoryError -Xloggc:/var/log/gc.log -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=3 -XX:GCLogFileSize=1M -Dorg.glassfish.grizzly.nio.transport.TCPNIOTransport.max-receive-buffer-size=1048576 -Djava.security.properties=/opt/harmonic/java_security/java.security -classpath /opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/remote-daemon-executor.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/potf-server-config-lib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/KMSClientLib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/cpixlib.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kms-soap-stub.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jaxb-impl-2.2.5.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-logging-1.2.1.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/xmlsec-1.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-codec-1.14.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-apache-connector-2.5.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/httpcore-4.3.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/httpclient-4.3.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/guava-14.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-dataformat-xml-2.10.3.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/stax2-api-4.2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/RmpControllerSDK.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/unified-origin-engine-library.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/StreamRmpControllerCmd.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/rmp-controller-models.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-math3-3.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/dnsjava-2.1.8.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-container-grizzly2-http-2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-http-server-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-container-jdk-http-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-core-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-runtime-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-json-provider-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-media-json-jackson-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/velocity-1.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/joda-time-2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-validator-1.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-slf4j-impl-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zookeeper-3.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/cron-utils-9.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-client-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-httpclient-okhttp-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-client-api-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-recipes-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-framework-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/curator-client-5.0.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-gatewayapi-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-resource-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-rbac-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-admissionregistration-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-apps-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-autoscaling-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-apiextensions-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-batch-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-certificates-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-coordination-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-discovery-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-events-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-extensions-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-flowcontrol-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-networking-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-metrics-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-policy-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-scheduling-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-storageclass-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-node-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-core-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/kubernetes-model-common-6.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/slf4j-api-1.7.36.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-1.2-api-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_servlet-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_servlet_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-server-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-client-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-common-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.ws.rs-api-2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-layout-template-json-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/asset-mgmt-grpc-library.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-configuration-1.10.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-lang-2.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/gson-2.2.4.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-io-2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-module-jaxb-annotations-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-jaxrs-base-2.10.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zjsonpatch-0.3.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-dataformat-yaml-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-datatype-jsr310-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-databind-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-annotations-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/property-binder-4.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-http-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-locator-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.inject-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-collectionschema-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/protostuff-api-1.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-xc-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-mapper-asl-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-core-asl-1.9.13.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-beanutils-1.9.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-collections-3.2.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-digester-1.8.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/commons-logging-1.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-core-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/log4j-api-2.17.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_otel-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_otel_agent-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/zookeeper-jute-3.5.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/audience-annotations-0.5.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-handler-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-native-epoll-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jackson-core-2.14.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.el-3.0.4.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/grizzly-framework-2.3.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.annotation-api-1.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jersey-guava-2.7.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-api-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/osgi-resource-locator-1.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/validation-api-1.1.0.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.xml.bind-api-2.3.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jakarta.activation-api-1.2.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/snakeyaml-engine-2.6.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/logging-interceptor-3.12.12.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/okhttp-3.12.12.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/simpleclient_tracer_common-0.12.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-codec-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-native-unix-common-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-transport-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-buffer-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-resolver-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/netty-common-4.1.42.Final.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/hk2-utils-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/aopalliance-repackaged-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javassist-3.18.1-GA.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/snakeyaml-1.33.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/okio-1.15.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/guava-27.0.1-jre.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/javax.inject-1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/failureaccess-1.0.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/jsr305-3.0.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/checker-qual-2.5.2.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/error_prone_annotations-2.2.0.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/j2objc-annotations-1.1.jar:/opt/harmonic/MediaStreamPackageController/remote-daemon-executor/lib/animal-sniffer-annotations-1.17.jar com.harmonicinc.remotedaemonexecutor.RemoteDaemonExecutor media-delivery-server MEDIAGRID backupStorageIsNotConfigured
           │ │ ├─45241 /opt/harmonic/MediaStreamPackageController/rmp/MDSMain --ip 0.0.0.0 --port 20201
           │ │ ├─46973 ./ott_egress_proxy
           │ │ ├─47973 nginx: master process /usr/local/nginx/sbin/nginx -c /opt/harmonic/mds/nginx.conf
           │ │ ├─52324 nginx: worker process
           │ │ ├─52325 nginx: worker process
           │ │ ├─52326 nginx: worker process
           │ │ ├─52327 nginx: worker process
           │ │ ├─52328 nginx: worker process
           │ │ ├─52329 nginx: worker process
           │ │ ├─52330 nginx: worker process
           │ │ ├─52331 nginx: worker process
           │ │ ├─52332 nginx: worker process
           │ │ ├─52333 nginx: worker process
           │ │ ├─52334 nginx: worker process
           │ │ ├─52335 nginx: worker process
           │ │ ├─52336 nginx: worker process
           │ │ ├─52337 nginx: worker process
           │ │ ├─52338 nginx: worker process
           │ │ ├─52339 nginx: worker process
           │ │ ├─52340 nginx: worker process
           │ │ ├─52341 nginx: worker process
           │ │ ├─52342 nginx: worker process
           │ │ ├─52343 nginx: worker process
           │ │ ├─52344 nginx: worker process
           │ │ ├─52345 nginx: worker process
           │ │ ├─52346 nginx: worker process
           │ │ ├─52347 nginx: worker process
           │ │ ├─52348 nginx: worker process
           │ │ ├─52349 nginx: worker process
           │ │ ├─52350 nginx: worker process
           │ │ ├─52351 nginx: worker process
           │ │ ├─52352 nginx: worker process
           │ │ ├─52353 nginx: worker process
           │ │ ├─52354 nginx: worker process
           │ │ ├─52355 nginx: worker process
           │ │ ├─52356 nginx: worker process
           │ │ ├─52357 nginx: worker process
           │ │ ├─52358 nginx: worker process
           │ │ ├─52359 nginx: worker process
           │ │ ├─52360 nginx: worker process
           │ │ ├─52361 nginx: worker process
           │ │ ├─52362 nginx: worker process
           │ │ ├─52363 nginx: worker process
           │ │ ├─52364 nginx: worker process
           │ │ ├─52365 nginx: worker process
           │ │ ├─52366 nginx: worker process
           │ │ ├─52367 nginx: worker process
           │ │ ├─52368 nginx: worker process
           │ │ ├─52369 nginx: worker process
           │ │ ├─52370 nginx: worker process
           │ │ ├─52371 nginx: worker process
           │ │ ├─52372 nginx: worker process
           │ │ ├─52373 nginx: worker process
           │ │ ├─52374 nginx: worker process
           │ │ ├─52375 nginx: worker process
           │ │ ├─52376 nginx: worker process
           │ │ ├─52377 nginx: worker process
           │ │ ├─52378 nginx: worker process
           │ │ ├─52379 nginx: worker process
           │ │ ├─52380 nginx: worker process
           │ │ ├─52381 nginx: worker process
           │ │ ├─52382 nginx: worker process
           │ │ ├─52383 nginx: worker process
           │ │ ├─52384 nginx: worker process
           │ │ ├─52385 nginx: worker process
           │ │ ├─52386 nginx: worker process
           │ │ ├─52387 nginx: worker process
           │ │ ├─52388 nginx: worker process
           │ │ ├─52389 nginx: worker process
           │ │ ├─52390 nginx: worker process
           │ │ ├─52391 nginx: worker process
           │ │ ├─52392 nginx: worker process
           │ │ ├─52393 nginx: worker process
           │ │ ├─52394 nginx: worker process
           │ │ ├─52395 nginx: worker process
           │ │ ├─52396 nginx: worker process
           │ │ ├─52397 nginx: worker process
           │ │ ├─52398 nginx: worker process
           │ │ ├─52399 nginx: worker process
           │ │ ├─52400 nginx: worker process
           │ │ ├─52401 nginx: worker process
           │ │ ├─52402 nginx: worker process
           │ │ ├─52403 nginx: worker process
           │ │ ├─52404 nginx: worker process
           │ │ ├─52405 nginx: worker process
           │ │ ├─52406 nginx: worker process
           │ │ ├─52408 nginx: worker process
           │ │ ├─52409 nginx: worker process
           │ │ ├─52410 nginx: worker process
           │ │ ├─52411 nginx: worker process
           │ │ ├─52412 nginx: worker process
           │ │ ├─52413 nginx: worker process
           │ │ ├─52414 nginx: worker process
           │ │ ├─52415 nginx: worker process
           │ │ ├─52416 nginx: worker process
           │ │ ├─52417 nginx: worker process
           │ │ ├─52418 nginx: worker process
           │ │ ├─52419 nginx: worker process
           │ │ ├─52420 nginx: worker process
           │ │ ├─52421 nginx: worker process
           │ │ ├─52422 nginx: worker process
           │ │ ├─52423 nginx: worker process
           │ │ ├─52424 nginx: worker process
           │ │ ├─52426 nginx: worker process
           │ │ ├─52427 nginx: worker process
           │ │ ├─52428 nginx: worker process
           │ │ ├─52429 nginx: worker process
           │ │ ├─52430 nginx: worker process
           │ │ ├─52431 nginx: worker process
           │ │ ├─52432 nginx: worker process
           │ │ ├─52433 nginx: worker process
           │ │ ├─52434 nginx: worker process
           │ │ ├─52435 nginx: worker process
           │ │ ├─52436 nginx: worker process
           │ │ ├─52437 nginx: worker process
           │ │ ├─52438 nginx: worker process
           │ │ ├─52439 nginx: worker process
           │ │ ├─52440 nginx: worker process
           │ │ ├─52441 nginx: worker process
           │ │ ├─52442 nginx: worker process
           │ │ ├─52443 nginx: worker process
           │ │ ├─52448 nginx: worker process
           │ │ ├─52449 nginx: worker process
           │ │ ├─52450 nginx: worker process
           │ │ ├─52451 nginx: worker process
           │ │ ├─52452 nginx: worker process
           │ │ ├─52453 nginx: worker process
           │ │ ├─52454 nginx: worker process
           │ │ ├─52455 nginx: worker process
           │ │ ├─52456 nginx: worker process
           │ │ ├─52457 nginx: worker process
           │ │ ├─52458 nginx: worker process
           │ │ ├─52459 nginx: worker process
           │ │ ├─52460 nginx: worker process
           │ │ ├─52461 nginx: worker process
           │ │ ├─52462 nginx: worker process
           │ │ ├─52463 nginx: worker process
           │ │ ├─52464 nginx: worker process
           │ │ ├─52465 nginx: worker process
           │ │ ├─52466 nginx: worker process
           │ │ ├─52467 nginx: worker process
           │ │ ├─52468 nginx: worker process
           │ │ ├─52469 nginx: worker process
           │ │ ├─52470 nginx: worker process
           │ │ ├─52471 nginx: worker process
           │ │ ├─52472 nginx: worker process
           │ │ ├─52473 nginx: worker process
           │ │ ├─52474 nginx: worker process
           │ │ ├─52479 nginx: worker process
           │ │ ├─52480 nginx: worker process
           │ │ ├─52481 nginx: worker process
           │ │ ├─52482 nginx: worker process
           │ │ ├─52483 nginx: worker process
           │ │ ├─52484 nginx: worker process
           │ │ ├─52485 nginx: worker process
           │ │ ├─52486 nginx: worker process
           │ │ ├─52487 nginx: worker process
           │ │ ├─52488 nginx: worker process
           │ │ ├─52489 nginx: worker process
           │ │ ├─52490 nginx: worker process
           │ │ ├─52491 nginx: worker process
           │ │ ├─52492 nginx: worker process
           │ │ ├─52493 nginx: worker process
           │ │ ├─52494 nginx: worker process
           │ │ ├─52495 nginx: worker process
           │ │ ├─52496 nginx: worker process
           │ │ ├─52497 nginx: worker process
           │ │ ├─52498 nginx: worker process
           │ │ ├─52499 nginx: worker process
           │ │ ├─52500 nginx: worker process
           │ │ ├─52501 nginx: worker process
           │ │ └─52502 nginx: cache manager process
           │ ├─f1e21bbbc7cb957da13db6b46c64353a04a7c1d1fd670772e7adc9faf6e78fa3
           │ │ └─15804 java -Xms256m -Xmx512m -jar /opt/exhibitor/exhibitor.jar --port 8081 --defaultconfig /opt/exhibitor/exhibitor.properties --configtype file --filesystembackup false
           │ ├─f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed
           │ │ └─13770 /pause
           │ ├─f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea
           │ │ └─13765 /pause
           │ ├─fcac4959fbbd538807c041a643e8ae251eb6ca11af5c1415125b37871a5102e1
           │ │ └─15733 java -jar /opt/harmonic/xos/upgrade-rollback/xos_upgrade_rollback_worker.jar -Xms48m -Xmx96m -XX:+ExitOnOutOfMemoryError -XX:MaxMetaspaceSize=96m -XX:CompressedClassSpaceSize=48m
           │ ├─fe01a89fedb057cc98e77dfcaadada33e8d2afe86478b11f263d5f2e0a9cdf70
           │ │ └─13739 /pause
           │ ├─fe3a782b6ddb59dc473fbf1bfad0404b1d5a2a9b06b8dc15cc7474ea935194c5
           │ │ └─48732 java -Xmx640m -XX:+UseG1GC -XX:NativeMemoryTracking=summary -jar /tmd/app.jar
           │ └─fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf
           │   └─45046 /pause
           ├─system.slice
           │ ├─NetworkManager.service
           │ │ └─4574 /usr/sbin/NetworkManager --no-daemon
           │ ├─amsd.service
           │ │ └─8867 /sbin/amsd -f
           │ ├─atd.service
           │ │ └─8918 /usr/sbin/atd -f
           │ ├─atop.service
           │ │ └─440176 /usr/bin/atop -S -w /var/log/atop27/atop_20251118 600
           │ ├─containerd.service
           │ │ ├─ 8866 /usr/local/bin/containerd
           │ │ ├─13338 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad -address /run/containerd/containerd.sock
           │ │ ├─13339 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea -address /run/containerd/containerd.sock
           │ │ ├─13340 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id fe01a89fedb057cc98e77dfcaadada33e8d2afe86478b11f263d5f2e0a9cdf70 -address /run/containerd/containerd.sock
           │ │ ├─13342 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4 -address /run/containerd/containerd.sock
           │ │ ├─13343 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c -address /run/containerd/containerd.sock
           │ │ ├─13344 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c -address /run/containerd/containerd.sock
           │ │ ├─13345 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed -address /run/containerd/containerd.sock
           │ │ ├─13648 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e -address /run/containerd/containerd.sock
           │ │ ├─13836 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076 -address /run/containerd/containerd.sock
           │ │ ├─13974 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa -address /run/containerd/containerd.sock
           │ │ ├─14063 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2 -address /run/containerd/containerd.sock
           │ │ ├─14593 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8 -address /run/containerd/containerd.sock
           │ │ ├─15347 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27 -address /run/containerd/containerd.sock
           │ │ ├─15584 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49 -address /run/containerd/containerd.sock
           │ │ ├─16084 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c -address /run/containerd/containerd.sock
           │ │ ├─16614 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4 -address /run/containerd/containerd.sock
           │ │ ├─24364 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7 -address /run/containerd/containerd.sock
           │ │ ├─25399 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82 -address /run/containerd/containerd.sock
           │ │ ├─25721 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0 -address /run/containerd/containerd.sock
           │ │ ├─26954 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b -address /run/containerd/containerd.sock
           │ │ ├─30076 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d -address /run/containerd/containerd.sock
           │ │ ├─30238 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917 -address /run/containerd/containerd.sock
           │ │ ├─30634 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197 -address /run/containerd/containerd.sock
           │ │ ├─40260 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927 -address /run/containerd/containerd.sock
           │ │ ├─40264 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687 -address /run/containerd/containerd.sock
           │ │ ├─41057 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f -address /run/containerd/containerd.sock
           │ │ ├─42434 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb -address /run/containerd/containerd.sock
           │ │ ├─42443 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982 -address /run/containerd/containerd.sock
           │ │ ├─42451 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450 -address /run/containerd/containerd.sock
           │ │ ├─43094 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f -address /run/containerd/containerd.sock
           │ │ ├─44628 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840 -address /run/containerd/containerd.sock
           │ │ ├─44852 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213 -address /run/containerd/containerd.sock
           │ │ ├─45020 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf -address /run/containerd/containerd.sock
           │ │ ├─45663 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27 -address /run/containerd/containerd.sock
           │ │ ├─46559 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7 -address /run/containerd/containerd.sock
           │ │ ├─47074 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1 -address /run/containerd/containerd.sock
           │ │ ├─47989 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06 -address /run/containerd/containerd.sock
           │ │ ├─48450 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53 -address /run/containerd/containerd.sock
           │ │ └─49612 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85 -address /run/containerd/containerd.sock
           │ ├─cpqIde.service
           │ │ └─8863 /sbin/cpqIde -f
           │ ├─crond.service
           │ │ └─8944 /usr/sbin/crond -n
           │ ├─dbus.service
           │ │ └─4249 /usr/bin/dbus-daemon --system --address=systemd: --nofork --nopidfile --systemd-activation --syslog-only
           │ ├─dentry_cache_cleaner.service
           │ │ ├─ 4238 /bin/bash /usr/local/bin/dentry_cache_cleaner.sh
           │ │ └─10078 sleep 60
           │ ├─earlyoom.service
           │ │ └─4378 /opt/omneon/bin/earlyoom -i -m 4 -r 30
           │ ├─etcd.service
           │ │ └─4383 /usr/local/bin/etcd --name etcd --cert-file=/etc/etcd/kubernetes.pem --key-file=/etc/etcd/kubernetes-key.pem --trusted-ca-file=/etc/etcd/ca.pem --client-cert-auth --listen-client-urls https://127.0.0.1:2379 --advertise-client-urls https://127.0.0.1:2379 --initial-cluster-state new --auto-compaction-mode=periodic --auto-compaction-retention=100m --data-dir=/var/lib/etcd --logger=zap
           │ ├─gssproxy.service
           │ │ └─8899 /usr/sbin/gssproxy -D
           │ ├─irqbalance.service
           │ │ └─4576 /sbin/irqbalance --policyscript=/opt/omneon/sbin/irqbalance-ban.py --foreground
           │ ├─irqbalance2110.service
           │ │ └─4710 /sbin/irqbalance --policyscript=/opt/omneon/sbin/irqbalance-ban.py --foreground
           │ ├─kube-apiserver.service
           │ │ └─8949 /usr/local/bin/kube-apiserver --advertise-address=192.0.2.248 --allow-privileged=true --apiserver-count=1 --audit-log-maxage=30 --audit-log-maxbackup=3 --audit-log-maxsize=100 --audit-log-path=/var/log/apiserver/audit.log --authorization-mode=Node,RBAC --bind-address=0.0.0.0 --client-ca-file=/var/lib/kubernetes/ca.pem --enable-admission-plugins=NamespaceLifecycle,NodeRestriction,LimitRanger,ServiceAccount,DefaultStorageClass,ResourceQuota --etcd-cafile=/var/lib/kubernetes/ca.pem --etcd-certfile=/var/lib/kubernetes/kubernetes.pem --etcd-keyfile=/var/lib/kubernetes/kubernetes-key.pem --etcd-servers=https://127.0.0.1:2379 --event-ttl=1h --encryption-provider-config=/var/lib/kubernetes/encryption-config.yaml --kubelet-certificate-authority=/var/lib/kubernetes/ca.pem --kubelet-client-certificate=/var/lib/kubernetes/kubernetes.pem --kubelet-client-key=/var/lib/kubernetes/kubernetes-key.pem --profiling=false --runtime-config=api/all=true --service-account-key-file=/var/lib/kubernetes/service-account.pem --service-account-signing-key-file=/var/lib/kubernetes/service-account-key.pem --service-account-issuer=https://192.0.2.248:6443 --service-cluster-ip-range=203.0.113.0/24 --service-node-port-range=2500-50055 --tls-cert-file=/var/lib/kubernetes/kubernetes.pem --tls-private-key-file=/var/lib/kubernetes/kubernetes-key.pem --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_RSA_WITH_AES_256_GCM_SHA384,TLS_RSA_WITH_AES_128_GCM_SHA256 --shutdown-watch-termination-grace-period=5s --v=2
           │ ├─kube-controller-manager.service
           │ │ └─10312 /usr/local/bin/kube-controller-manager --bind-address=127.0.0.1 --cluster-cidr=198.51.100.0/24 --cluster-name=kubernetes --cluster-signing-cert-file=/var/lib/kubernetes/ca.pem --cluster-signing-key-file=/var/lib/kubernetes/ca-key.pem --kubeconfig=/var/lib/kubernetes/kube-controller-manager.kubeconfig --leader-elect=true --profiling=false --root-ca-file=/var/lib/kubernetes/ca.pem --service-account-private-key-file=/var/lib/kubernetes/service-account-key.pem --service-cluster-ip-range=203.0.113.0/24 --terminated-pod-gc-threshold=10 --use-service-account-credentials=true --v=2
           │ ├─kube-proxy.service
           │ │ └─10310 /usr/local/bin/kube-proxy --config=/var/lib/kube-proxy/kube-proxy-config.yaml --proxy-port-range 40000-42000
           │ ├─kube-scheduler.service
           │ │ └─10306 /usr/local/bin/kube-scheduler --bind-address=127.0.0.1 --config=/etc/kubernetes/config/kube-scheduler.yaml --profiling=false --v=2
           │ ├─kubelet.service
           │ │ └─12424 /usr/local/bin/kubelet --config=/var/lib/kubelet/kubelet-config.yaml --kubeconfig=/var/lib/kubelet/kubeconfig --hostname-override=vosflex --container-runtime-endpoint=unix:///run/containerd/containerd.sock --v=2
           │ ├─mr_cpqScsi.service
           │ │ ├─8865 /sbin/mr_cpqScsi -f
           │ │ └─9343 /sbin/mr_cpqScsi -f
           │ ├─nmi.service
           │ │ ├─ 8858 /bin/java -Dorg.apache.commons.logging.Log=org.apache.commons.logging.impl.SimpleLog -Dorg.apache.commons.logging.simplelog.defaultlog=warn -Dorg.apache.commons.logging.simplelog.showdatetime=false -XX:+UseConcMarkSweepGC -Xms128M -Xmx1024M -jar NMIService.jar
           │ │ ├─ 9551 /bin/bash /opt/omneon/sbin/techdump.sh /corefiles/TechDump-XOSEncoder-01-20251118-1355.zip
           │ │ ├─ 9571 /bin/bash /opt/omneon/sbin/techdump.sh /corefiles/TechDump-XOSEncoder-01-20251118-1355.zip
           │ │ ├─ 9585 /bin/bash /etc/techdump.d/10platform.sh
           │ │ ├─11154 /bin/systemctl status --all
           │ │ └─11652 sleep 0.2
           │ ├─nminet.service
           │ │ ├─6858 python3 /opt/omneon/nmi/nminet/bin/../src/nminet.py -v
           │ │ ├─6965 /usr/bin/teamd -N -o -U -d -n -t net1 -c {"device": "net1", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ │ ├─7002 /usr/bin/teamd -N -o -U -d -n -t net2 -c {"device": "net2", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ │ ├─7022 /usr/bin/teamd -N -o -U -d -n -t net3 -c {"device": "net3", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ │ ├─7058 /usr/bin/teamd -N -o -U -d -n -t net4 -c {"device": "net4", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ │ ├─7071 /usr/bin/teamd -N -o -U -d -n -t net5 -c {"device": "net5", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ │ ├─7107 /usr/bin/teamd -N -o -U -d -n -t net6 -c {"device": "net6", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ │ ├─7122 /usr/bin/teamd -N -o -U -d -n -t net9 -c {"device": "net9", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ │ └─7156 /usr/bin/teamd -N -o -U -d -n -t net10 -c {"device": "net10", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ ├─nmiperfmon_fans.service
           │ │ └─8792 /opt/omneon/nmi/nmiperfmon
           │ ├─nmiperfmon_power_supplies.service
           │ │ └─8800 /opt/omneon/nmi/nmiperfmon
           │ ├─nmiperfmon_raid_controllers.service
           │ │ └─8804 /opt/omneon/nmi/nmiperfmon
           │ ├─nmipoller_caminfo.service
           │ │ └─8796 python3.11 /opt/omneon/nmi/nmipollers/src/caminfo/main.py
           │ ├─nmipoller_ilostatus.service
           │ │ └─8852 python3.11 /opt/omneon/nmi/nmipollers/src/ilostatus/main.py
           │ ├─nmipoller_raidmr.service
           │ │ └─8787 python3.11 /opt/omneon/nmi/nmipollers/src/raidmr/main.py
           │ ├─nmipoller_raidssa.service
           │ │ └─8856 python3.11 /opt/omneon/nmi/nmipollers/src/raidssa/main.py
           │ ├─node_teleport_runner.service
           │ │ └─29731 python3 /opt/teleport/bin/node_teleport_runner.py
           │ ├─perfstatserver.service
           │ │ ├─ 8798 /bin/java -XX:+UseSerialGC -Xms32M -Xmx512M -jar PerfStatsServer.jar
           │ │ ├─11218 /usr/sbin/arping -c 10 -D -I net1 10.10.106.144
           │ │ ├─11572 /usr/sbin/arping -c 10 -D -I net10 100.8.248.10
           │ │ └─11638 /usr/sbin/arping -c 10 -D -I net9 100.8.248.16
           │ ├─polkit.service
           │ │ └─9285 /usr/lib/polkit-1/polkitd --no-debug
           │ ├─rpcbind.service
           │ │ └─3561 /usr/bin/rpcbind -w -f
           │ ├─rsyslog.service
           │ │ └─8818 /usr/sbin/rsyslogd -n
           │ ├─smad.service
           │ │ ├─8864 /sbin/smad
           │ │ ├─8881 /sbin/smad
           │ │ ├─8882 /sbin/smad
           │ │ └─9461 /sbin/smad
           │ ├─smartd.service
           │ │ └─4240 /usr/sbin/smartd -n -q never
           │ ├─sshd.service
           │ │ └─8807 /usr/sbin/sshd -D
           │ ├─system-getty.slice
           │ │ ├─getty@tty1.service
           │ │ │ └─30411 /sbin/agetty -o -p -- \u --noclear tty1 linux
           │ │ ├─getty@tty3.service
           │ │ │ └─4103 /sbin/agetty -o -p -- \u --issue-file /etc/issue.early --noclear tty3 linux
           │ │ └─getty@tty4.service
           │ │   └─4117 /sbin/agetty -o -p -- \u --issue-file /etc/issue.early --noclear tty4 linux
           │ ├─system-serial\x2dgetty.slice
           │ │ └─serial-getty@ttyS0.service
           │ │   └─8909 /sbin/agetty -o -p -- \u --keep-baud 115200,38400,9600 ttyS0 vt220
           │ ├─systemd-journald.service
           │ │ └─3086 /usr/lib/systemd/systemd-journald
           │ ├─systemd-logind.service
           │ │ └─4257 /usr/lib/systemd/systemd-logind
           │ ├─systemd-udevd.service
           │ │ └─2430 /usr/lib/systemd/systemd-udevd
           │ ├─teleportv2.service
           │ │ └─8811 sleep infinity
           │ ├─timemaster.service
           │ │ ├─8885 /usr/sbin/timemaster -f /etc/timemaster.conf
           │ │ └─8897 /usr/sbin/chronyd -u chrony -n -f /var/run/timemaster/chrony.conf
           │ ├─tuned.service
           │ │ └─8832 /usr/libexec/platform-python -Es /usr/sbin/tuned -l -P
           │ └─turbostatlog.service
           │   └─4190 /bin/turbostat -i 3600
           └─user.slice
             └─cpuset-init.service
               └─2418 /bin/sleep infinity

Nov 18 13:55:35 XOSEncoder-01 systemd[1]: proc-sys-fs-binfmt_misc.automount: Got automount request for /proc/sys/fs/binfmt_misc, triggered by 9604 (sysctl)
Nov 18 13:55:35 XOSEncoder-01 systemd[1]: Mounting Arbitrary Executable File Formats File System...
Nov 18 13:55:35 XOSEncoder-01 systemd[1]: Mounted Arbitrary Executable File Formats File System.
Nov 18 13:55:45 XOSEncoder-01 systemd[1]: nmiperfmon_nics.service: Service RestartSec=30s expired, scheduling restart.
Nov 18 13:55:45 XOSEncoder-01 systemd[1]: nmiperfmon_nics.service: Scheduled restart job, restart counter is at 2369.
Nov 18 13:55:45 XOSEncoder-01 systemd[1]: Stopped NMI Performance Monitor.
Nov 18 13:55:45 XOSEncoder-01 systemd[1]: Starting NMI Performance Monitor...
Nov 18 13:55:45 XOSEncoder-01 systemd[1]: Started NMI Performance Monitor.
Nov 18 13:55:48 XOSEncoder-01 systemd[1]: nmiperfmon_nics.service: Main process exited, code=exited, status=2/INVALIDARGUMENT
Nov 18 13:55:48 XOSEncoder-01 systemd[1]: nmiperfmon_nics.service: Failed with result 'exit-code'.

● system-getty.slice
   Loaded: loaded
   Active: active since Mon 2025-11-17 16:03:50 UTC; 21h ago
    Tasks: 3
   Memory: 552.0K
   CGroup: /system.slice/system-getty.slice
           ├─getty@tty1.service
           │ └─30411 /sbin/agetty -o -p -- \u --noclear tty1 linux
           ├─getty@tty3.service
           │ └─4103 /sbin/agetty -o -p -- \u --issue-file /etc/issue.early --noclear tty3 linux
           └─getty@tty4.service
             └─4117 /sbin/agetty -o -p -- \u --issue-file /etc/issue.early --noclear tty4 linux

● system-mlnx_interface_mgr.slice
   Loaded: loaded
   Active: active since Mon 2025-11-17 16:03:53 UTC; 21h ago
    Tasks: 0
   Memory: 488.0K
   CGroup: /system.slice/system-mlnx_interface_mgr.slice

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Created slice system-mlnx_interface_mgr.slice.

● system-serial\x2dgetty.slice
   Loaded: loaded
   Active: active since Mon 2025-11-17 16:03:50 UTC; 21h ago
    Tasks: 1
   Memory: 228.0K
   CGroup: /system.slice/system-serial\x2dgetty.slice
           └─serial-getty@ttyS0.service
             └─8909 /sbin/agetty -o -p -- \u --keep-baud 115200,38400,9600 ttyS0 vt220

● system-sshd\x2dkeygen.slice
   Loaded: loaded
   Active: active since Mon 2025-11-17 16:03:50 UTC; 21h ago
    Tasks: 0
   Memory: 0B
   CGroup: /system.slice/system-sshd\x2dkeygen.slice

● system.slice - System Slice
   Loaded: loaded
   Active: active since Mon 2025-11-17 16:03:48 UTC; 21h ago
     Docs: man:systemd.special(7)
    Tasks: 1571
   Memory: 4.4G
   CGroup: /system.slice
           ├─NetworkManager.service
           │ └─4574 /usr/sbin/NetworkManager --no-daemon
           ├─amsd.service
           │ └─8867 /sbin/amsd -f
           ├─atd.service
           │ └─8918 /usr/sbin/atd -f
           ├─atop.service
           │ └─440176 /usr/bin/atop -S -w /var/log/atop27/atop_20251118 600
           ├─containerd.service
           │ ├─ 8866 /usr/local/bin/containerd
           │ ├─13338 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 9b7ebe191af1caee7c18acca4db8ef2c3c671c90ebed84dda923eaa5e4f16cad -address /run/containerd/containerd.sock
           │ ├─13339 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id f4065ad589e13db0b84c7391204b93094e65cb429fb782299eb826113accb8ea -address /run/containerd/containerd.sock
           │ ├─13340 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id fe01a89fedb057cc98e77dfcaadada33e8d2afe86478b11f263d5f2e0a9cdf70 -address /run/containerd/containerd.sock
           │ ├─13342 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id b521bb9fc800522d5ddc3a138193d4a08f4d51ce79ee0d19a6b3e42d1d4a4ee4 -address /run/containerd/containerd.sock
           │ ├─13343 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 122f73268927d80259bbcdb7f8459c557a41c549eed251b9c19c06ea29fb9f3c -address /run/containerd/containerd.sock
           │ ├─13344 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 97fe6cf2172869acf381caf38e9c77a80dd4ffb5ec9ffd342bb1297327a0c61c -address /run/containerd/containerd.sock
           │ ├─13345 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id f35a227c889252874195cfa019db37de8dd32f8c0a56e2dabdc9040729bdebed -address /run/containerd/containerd.sock
           │ ├─13648 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 0aa4eddeb5b3286b2b60bf5210fa41e2b8bc42f89958b23ba2038700b985324e -address /run/containerd/containerd.sock
           │ ├─13836 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 82b42c91685186661522be88ab242d1fdfde63fc80e20d245e230c4a43b91076 -address /run/containerd/containerd.sock
           │ ├─13974 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id d47371732f56f14f031e0b7b210ea0c0947efe1c4bf8fa70b5e0c3ebb888ecfa -address /run/containerd/containerd.sock
           │ ├─14063 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id bd5c5cd2d6fdb3330412b7f2a136f6c40255e3f17d2e12cb4e727dbadb0715a2 -address /run/containerd/containerd.sock
           │ ├─14593 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id e32dbeb647266922c1ed7cbf28d0793f5eb1684ee8ea9245e799311514fddcf8 -address /run/containerd/containerd.sock
           │ ├─15347 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 945e7003246f1d85e24f7367e5a3332fcc3bfe456f7f7b9fcc12cd5399d0ed27 -address /run/containerd/containerd.sock
           │ ├─15584 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id d11743e05135a6b7daa52339ed04bbac1981ffc8018794fe1c83b1da7b182e49 -address /run/containerd/containerd.sock
           │ ├─16084 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 4410a4c85c7991ba762df13284942daddcb3f54a86de129e666c5728aaaa401c -address /run/containerd/containerd.sock
           │ ├─16614 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id bb8f9aba5a9cfe49eda5b1007ecac6c2228462f77806cb7801aa820df7b2f0a4 -address /run/containerd/containerd.sock
           │ ├─24364 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 0d7155d211873ab9dd56c492ca84a6e4b4923853bba2d738ec0b87355cd4d5c7 -address /run/containerd/containerd.sock
           │ ├─25399 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 7df7db3e64340a5a722340e86d3ff57667247c0af41c3901b967d7282dcabb82 -address /run/containerd/containerd.sock
           │ ├─25721 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 77a2d3f8700f25d768cab0b31d993da60466e1cfeb0c21451e503ba6b4caa4f0 -address /run/containerd/containerd.sock
           │ ├─26954 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 048a23daccd11173da5ccd3aed2475ac58bd2f5c48e564c56d3867c90e407e8b -address /run/containerd/containerd.sock
           │ ├─30076 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id bdd3f0371b20c26eabbf8c0c7141ac0992f046ae3f8d0673711e30a1078f8c0d -address /run/containerd/containerd.sock
           │ ├─30238 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 1d580c48cbcd18e29e222541f0507c3fb28987d91d35b55bd98d39e7d7c21917 -address /run/containerd/containerd.sock
           │ ├─30634 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 86235cf3a3f9e3cfc02fc7f4888ecb5e3cfa9c0ec97e426d2ae1ce316b32c197 -address /run/containerd/containerd.sock
           │ ├─40260 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 088d676e25b836ba1cad2c6a51c82f123437319e49f2e76950f72abfe9bf1927 -address /run/containerd/containerd.sock
           │ ├─40264 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 722977738813a4b4dba7b54d441862d21d5970ae817c4d9a8ce4634487fc6687 -address /run/containerd/containerd.sock
           │ ├─41057 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 68567ef6b45f1055433d0f3e6fbcd064ef9bb98190dd051f69fd12cff808e30f -address /run/containerd/containerd.sock
           │ ├─42434 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 4756130fd5ad708a904a99a0cad190c227812ba5f85f488054b470d5d9ab4fbb -address /run/containerd/containerd.sock
           │ ├─42443 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id ecb389f6e488e34ee05de07d5ba0b6a865880613de9512f762d02a25ef996982 -address /run/containerd/containerd.sock
           │ ├─42451 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 103288724dcc47b7a997f0c219c0fad5d24fe11321d3a9fbfa04ba4763764450 -address /run/containerd/containerd.sock
           │ ├─43094 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 8c433fb4c3fc17ad704bf6e54b939e9997d87dd176929ea899a6a96130f63b4f -address /run/containerd/containerd.sock
           │ ├─44628 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 42840c0de72c59943724e2371a66aadbd0d28beaf75a202ec52c78ab12091840 -address /run/containerd/containerd.sock
           │ ├─44852 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 00e98de63a20a316ed80b9fb58d481ce95ac134055b07fa246fb8be067636213 -address /run/containerd/containerd.sock
           │ ├─45020 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id fe68ab2c54c4e080fa59d2704214383fbd0d9afeee08b6f32fbda016dfe38caf -address /run/containerd/containerd.sock
           │ ├─45663 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id d8fdd5b3c74ad34b276029b4db554a5ac1526f30d1ed42e889a2d11810f97e27 -address /run/containerd/containerd.sock
           │ ├─46559 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id b4f17c609632632a613da8bf68ce25789705dbcb014364fa3b7419ae46c316a7 -address /run/containerd/containerd.sock
           │ ├─47074 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 8e6998139a624e92be3d5aec5ae450aef662eb452d6c1b589e89721e082040a1 -address /run/containerd/containerd.sock
           │ ├─47989 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 93457ab6bb3bd0b0033873f9f7b5eb98d2daaa300555d4b050a89460ee8dbe06 -address /run/containerd/containerd.sock
           │ ├─48450 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 6ae50f2a676c640e423edd4d4bf7cff29770d8a9e82293d7113d8fcef8912e53 -address /run/containerd/containerd.sock
           │ └─49612 /usr/local/bin/containerd-shim-runc-v2 -namespace k8s.io -id 3ad1a6a103e84d28055fe44b77c7699f1de9ca54723853a94bc8fe5f7309cb85 -address /run/containerd/containerd.sock
           ├─cpqIde.service
           │ └─8863 /sbin/cpqIde -f
           ├─crond.service
           │ └─8944 /usr/sbin/crond -n
           ├─dbus.service
           │ └─4249 /usr/bin/dbus-daemon --system --address=systemd: --nofork --nopidfile --systemd-activation --syslog-only
           ├─dentry_cache_cleaner.service
           │ ├─ 4238 /bin/bash /usr/local/bin/dentry_cache_cleaner.sh
           │ └─10078 sleep 60
           ├─earlyoom.service
           │ └─4378 /opt/omneon/bin/earlyoom -i -m 4 -r 30
           ├─etcd.service
           │ └─4383 /usr/local/bin/etcd --name etcd --cert-file=/etc/etcd/kubernetes.pem --key-file=/etc/etcd/kubernetes-key.pem --trusted-ca-file=/etc/etcd/ca.pem --client-cert-auth --listen-client-urls https://127.0.0.1:2379 --advertise-client-urls https://127.0.0.1:2379 --initial-cluster-state new --auto-compaction-mode=periodic --auto-compaction-retention=100m --data-dir=/var/lib/etcd --logger=zap
           ├─gssproxy.service
           │ └─8899 /usr/sbin/gssproxy -D
           ├─irqbalance.service
           │ └─4576 /sbin/irqbalance --policyscript=/opt/omneon/sbin/irqbalance-ban.py --foreground
           ├─irqbalance2110.service
           │ └─4710 /sbin/irqbalance --policyscript=/opt/omneon/sbin/irqbalance-ban.py --foreground
           ├─kube-apiserver.service
           │ └─8949 /usr/local/bin/kube-apiserver --advertise-address=192.0.2.248 --allow-privileged=true --apiserver-count=1 --audit-log-maxage=30 --audit-log-maxbackup=3 --audit-log-maxsize=100 --audit-log-path=/var/log/apiserver/audit.log --authorization-mode=Node,RBAC --bind-address=0.0.0.0 --client-ca-file=/var/lib/kubernetes/ca.pem --enable-admission-plugins=NamespaceLifecycle,NodeRestriction,LimitRanger,ServiceAccount,DefaultStorageClass,ResourceQuota --etcd-cafile=/var/lib/kubernetes/ca.pem --etcd-certfile=/var/lib/kubernetes/kubernetes.pem --etcd-keyfile=/var/lib/kubernetes/kubernetes-key.pem --etcd-servers=https://127.0.0.1:2379 --event-ttl=1h --encryption-provider-config=/var/lib/kubernetes/encryption-config.yaml --kubelet-certificate-authority=/var/lib/kubernetes/ca.pem --kubelet-client-certificate=/var/lib/kubernetes/kubernetes.pem --kubelet-client-key=/var/lib/kubernetes/kubernetes-key.pem --profiling=false --runtime-config=api/all=true --service-account-key-file=/var/lib/kubernetes/service-account.pem --service-account-signing-key-file=/var/lib/kubernetes/service-account-key.pem --service-account-issuer=https://192.0.2.248:6443 --service-cluster-ip-range=203.0.113.0/24 --service-node-port-range=2500-50055 --tls-cert-file=/var/lib/kubernetes/kubernetes.pem --tls-private-key-file=/var/lib/kubernetes/kubernetes-key.pem --tls-cipher-suites=TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_RSA_WITH_AES_256_GCM_SHA384,TLS_RSA_WITH_AES_128_GCM_SHA256 --shutdown-watch-termination-grace-period=5s --v=2
           ├─kube-controller-manager.service
           │ └─10312 /usr/local/bin/kube-controller-manager --bind-address=127.0.0.1 --cluster-cidr=198.51.100.0/24 --cluster-name=kubernetes --cluster-signing-cert-file=/var/lib/kubernetes/ca.pem --cluster-signing-key-file=/var/lib/kubernetes/ca-key.pem --kubeconfig=/var/lib/kubernetes/kube-controller-manager.kubeconfig --leader-elect=true --profiling=false --root-ca-file=/var/lib/kubernetes/ca.pem --service-account-private-key-file=/var/lib/kubernetes/service-account-key.pem --service-cluster-ip-range=203.0.113.0/24 --terminated-pod-gc-threshold=10 --use-service-account-credentials=true --v=2
           ├─kube-proxy.service
           │ └─10310 /usr/local/bin/kube-proxy --config=/var/lib/kube-proxy/kube-proxy-config.yaml --proxy-port-range 40000-42000
           ├─kube-scheduler.service
           │ └─10306 /usr/local/bin/kube-scheduler --bind-address=127.0.0.1 --config=/etc/kubernetes/config/kube-scheduler.yaml --profiling=false --v=2
           ├─kubelet.service
           │ └─12424 /usr/local/bin/kubelet --config=/var/lib/kubelet/kubelet-config.yaml --kubeconfig=/var/lib/kubelet/kubeconfig --hostname-override=vosflex --container-runtime-endpoint=unix:///run/containerd/containerd.sock --v=2
           ├─mr_cpqScsi.service
           │ ├─8865 /sbin/mr_cpqScsi -f
           │ └─9343 /sbin/mr_cpqScsi -f
           ├─nmi.service
           │ ├─ 8858 /bin/java -Dorg.apache.commons.logging.Log=org.apache.commons.logging.impl.SimpleLog -Dorg.apache.commons.logging.simplelog.defaultlog=warn -Dorg.apache.commons.logging.simplelog.showdatetime=false -XX:+UseConcMarkSweepGC -Xms128M -Xmx1024M -jar NMIService.jar
           │ ├─ 9551 /bin/bash /opt/omneon/sbin/techdump.sh /corefiles/TechDump-XOSEncoder-01-20251118-1355.zip
           │ ├─ 9571 /bin/bash /opt/omneon/sbin/techdump.sh /corefiles/TechDump-XOSEncoder-01-20251118-1355.zip
           │ ├─ 9585 /bin/bash /etc/techdump.d/10platform.sh
           │ ├─11154 /bin/systemctl status --all
           │ └─11652 sleep 0.2
           ├─nminet.service
           │ ├─6858 python3 /opt/omneon/nmi/nminet/bin/../src/nminet.py -v
           │ ├─6965 /usr/bin/teamd -N -o -U -d -n -t net1 -c {"device": "net1", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ ├─7002 /usr/bin/teamd -N -o -U -d -n -t net2 -c {"device": "net2", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ ├─7022 /usr/bin/teamd -N -o -U -d -n -t net3 -c {"device": "net3", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ ├─7058 /usr/bin/teamd -N -o -U -d -n -t net4 -c {"device": "net4", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ ├─7071 /usr/bin/teamd -N -o -U -d -n -t net5 -c {"device": "net5", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ ├─7107 /usr/bin/teamd -N -o -U -d -n -t net6 -c {"device": "net6", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ ├─7122 /usr/bin/teamd -N -o -U -d -n -t net9 -c {"device": "net9", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           │ └─7156 /usr/bin/teamd -N -o -U -d -n -t net10 -c {"device": "net10", "mcast_rejoin": {"count": 3, "interval": 25}, "notify_peers": {"count": 3, "interval": 25}, "runner": {"name": "activebackup", "hwaddr_policy": "same_all"}, "link_watch": {"name": "ethtool"}}
           ├─nmiperfmon_fans.service
           │ └─8792 /opt/omneon/nmi/nmiperfmon
           ├─nmiperfmon_power_supplies.service
           │ └─8800 /opt/omneon/nmi/nmiperfmon
           ├─nmiperfmon_raid_controllers.service
           │ └─8804 /opt/omneon/nmi/nmiperfmon
           ├─nmipoller_caminfo.service
           │ └─8796 python3.11 /opt/omneon/nmi/nmipollers/src/caminfo/main.py
           ├─nmipoller_ilostatus.service
           │ └─8852 python3.11 /opt/omneon/nmi/nmipollers/src/ilostatus/main.py
           ├─nmipoller_raidmr.service
           │ └─8787 python3.11 /opt/omneon/nmi/nmipollers/src/raidmr/main.py
           ├─nmipoller_raidssa.service
           │ └─8856 python3.11 /opt/omneon/nmi/nmipollers/src/raidssa/main.py
           ├─node_teleport_runner.service
           │ └─29731 python3 /opt/teleport/bin/node_teleport_runner.py
           ├─perfstatserver.service
           │ ├─ 8798 /bin/java -XX:+UseSerialGC -Xms32M -Xmx512M -jar PerfStatsServer.jar
           │ ├─11218 /usr/sbin/arping -c 10 -D -I net1 10.10.106.144
           │ ├─11572 /usr/sbin/arping -c 10 -D -I net10 100.8.248.10
           │ └─11638 /usr/sbin/arping -c 10 -D -I net9 100.8.248.16
           ├─polkit.service
           │ └─9285 /usr/lib/polkit-1/polkitd --no-debug
           ├─rpcbind.service
           │ └─3561 /usr/bin/rpcbind -w -f
           ├─rsyslog.service
           │ └─8818 /usr/sbin/rsyslogd -n
           ├─smad.service
           │ ├─8864 /sbin/smad
           │ ├─8881 /sbin/smad
           │ ├─8882 /sbin/smad
           │ └─9461 /sbin/smad
           ├─smartd.service
           │ └─4240 /usr/sbin/smartd -n -q never
           ├─sshd.service
           │ └─8807 /usr/sbin/sshd -D
           ├─system-getty.slice
           │ ├─getty@tty1.service
           │ │ └─30411 /sbin/agetty -o -p -- \u --noclear tty1 linux
           │ ├─getty@tty3.service
           │ │ └─4103 /sbin/agetty -o -p -- \u --issue-file /etc/issue.early --noclear tty3 linux
           │ └─getty@tty4.service
           │   └─4117 /sbin/agetty -o -p -- \u --issue-file /etc/issue.early --noclear tty4 linux
           ├─system-serial\x2dgetty.slice
           │ └─serial-getty@ttyS0.service
           │   └─8909 /sbin/agetty -o -p -- \u --keep-baud 115200,38400,9600 ttyS0 vt220
           ├─systemd-journald.service
           │ └─3086 /usr/lib/systemd/systemd-journald
           ├─systemd-logind.service
           │ └─4257 /usr/lib/systemd/systemd-logind
           ├─systemd-udevd.service
           │ └─2430 /usr/lib/systemd/systemd-udevd
           ├─teleportv2.service
           │ └─8811 sleep infinity
           ├─timemaster.service
           │ ├─8885 /usr/sbin/timemaster -f /etc/timemaster.conf
           │ └─8897 /usr/sbin/chronyd -u chrony -n -f /var/run/timemaster/chrony.conf
           ├─tuned.service
           │ └─8832 /usr/libexec/platform-python -Es /usr/sbin/tuned -l -P
           └─turbostatlog.service
             └─4190 /bin/turbostat -i 3600

Nov 18 13:56:07 XOSEncoder-01 java[8858]: P8858 T737 A8030 [invokeOperation] [Originator: VOS] [OperationName: QueryBackgroundDownloadStatus] [OperationParam: ] Start
Nov 18 13:56:07 XOSEncoder-01 java[8858]: P8858 T737 A8030 [invokeOperation] [Originator: VOS] [OperationName: QueryBackgroundDownloadStatus] Successful
Nov 18 13:56:08 XOSEncoder-01 java[8858]: P8858 T737 A8030 [invokeOperation] [Originator: VOS] [OperationName: QueryBackgroundTechdumpStatus] [OperationParam: ] Start
Nov 18 13:56:08 XOSEncoder-01 java[8858]: P8858 T737 A8030 [invokeOperation] [Originator: VOS] [OperationName: QueryBackgroundTechdumpStatus] Successful
Nov 18 13:56:08 XOSEncoder-01 java[8858]: P8858 T737 A8030 [invokeOperation] [Originator: VOS] [OperationName: QueryPendingSoftwareVersion] [OperationParam: ] Start
Nov 18 13:56:08 XOSEncoder-01 java[8858]: P8858 T737 A8030 [invokeOperation] [Originator: VOS] [OperationName: QueryPendingSoftwareVersion] Successful
Nov 18 13:56:08 XOSEncoder-01 java[8858]: P8858 T747 A8030 [invokeOperation] [Originator: NMX_1001] [OperationName: LockDevice] [OperationParam: <?xml version="1.0" encoding="utf-8"?><LockDeviceParam xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema"><Originator>10.10.106.214</Originator></LockDeviceParam>] Start
Nov 18 13:56:08 XOSEncoder-01 java[8858]: P8858 T747 A8030 [invokeOperation] [Originator: NMX_1001] [OperationName: LockDevice] Successful
Nov 18 13:56:10 XOSEncoder-01 java[8858]: P8858 T737 A8030 [invokeOperation] [Originator: VOS] [OperationName: QueryBackgroundTechdumpStatus] [OperationParam: ] Start
Nov 18 13:56:10 XOSEncoder-01 java[8858]: P8858 T737 A8030 [invokeOperation] [Originator: VOS] [OperationName: QueryBackgroundTechdumpStatus] Successful

● user.slice - User and Session Slice
   Loaded: loaded (/usr/lib/systemd/system/user.slice; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd.special(7)
    Tasks: 1
   Memory: 376.0K
   CGroup: /user.slice
           └─cpuset-init.service
             └─2418 /bin/sleep infinity

● dbus.socket - D-Bus System Message Bus Socket
   Loaded: loaded (/usr/lib/systemd/system/dbus.socket; static; vendor preset: enabled)
   Active: active (running) since Mon 2025-11-17 16:03:53 UTC; 21h ago
   Listen: /run/dbus/system_bus_socket (Stream)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/dbus.socket

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Listening on D-Bus System Message Bus Socket.

● dm-event.socket - Device-mapper event daemon FIFOs
   Loaded: loaded (/usr/lib/systemd/system/dm-event.socket; enabled; vendor preset: enabled)
   Active: active (listening) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:dmeventd(8)
   Listen: /run/dmeventd-server (FIFO)
           /run/dmeventd-client (FIFO)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/dm-event.socket

● lvm2-lvmpolld.socket - LVM2 poll daemon socket
   Loaded: loaded (/usr/lib/systemd/system/lvm2-lvmpolld.socket; enabled; vendor preset: enabled)
   Active: active (listening) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:lvmpolld(8)
   Listen: /run/lvm/lvmpolld.socket (Stream)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/lvm2-lvmpolld.socket

● pcscd.socket - PC/SC Smart Card Daemon Activation Socket
   Loaded: loaded (/usr/lib/systemd/system/pcscd.socket; enabled; vendor preset: enabled)
   Active: active (listening) since Mon 2025-11-17 16:03:53 UTC; 21h ago
   Listen: /run/pcscd/pcscd.comm (Stream)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/pcscd.socket

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Listening on PC/SC Smart Card Daemon Activation Socket.

● rpcbind.socket - RPCbind Server Activation Socket
   Loaded: loaded (/usr/lib/systemd/system/rpcbind.socket; enabled; vendor preset: enabled)
   Active: active (running) since Mon 2025-11-17 16:03:50 UTC; 21h ago
   Listen: /run/rpcbind.sock (Stream)
           0.0.0.0:111 (Stream)
           0.0.0.0:111 (Datagram)
           [::]:111 (Stream)
           [::]:111 (Datagram)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/rpcbind.socket

● sssd-kcm.socket - SSSD Kerberos Cache Manager responder socket
   Loaded: loaded (/usr/lib/systemd/system/sssd-kcm.socket; enabled; vendor preset: enabled)
   Active: active (listening) since Mon 2025-11-17 16:03:53 UTC; 21h ago
     Docs: man:sssd-kcm(8)
   Listen: /var/run/.heim_org.h5l.kcm-socket (Stream)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/sssd-kcm.socket

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Listening on SSSD Kerberos Cache Manager responder socket.

● syslog.socket - Syslog Socket
   Loaded: loaded (/usr/lib/systemd/system/syslog.socket; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:systemd.special(7)
           https://www.freedesktop.org/wiki/Software/systemd/syslog
   Listen: /run/systemd/journal/syslog (Datagram)

● systemd-coredump.socket - Process Core Dump Socket
   Loaded: loaded (/usr/lib/systemd/system/systemd-coredump.socket; static; vendor preset: disabled)
   Active: active (listening) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd-coredump(8)
   Listen: /run/systemd/coredump (SequentialPacket)
 Accepted: 0; Connected: 0;
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-coredump.socket

● systemd-initctl.socket - initctl Compatibility Named Pipe
   Loaded: loaded (/usr/lib/systemd/system/systemd-initctl.socket; static; vendor preset: disabled)
   Active: active (listening) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd-initctl.service(8)
   Listen: /run/initctl (FIFO)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-initctl.socket

● systemd-journald-audit.socket - Journal Audit Socket
   Loaded: loaded (/usr/lib/systemd/system/systemd-journald-audit.socket; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:systemd-journald.service(8)
           man:journald.conf(5)
   Listen: audit 1 (Netlink)

● systemd-journald-dev-log.socket - Journal Socket (/dev/log)
   Loaded: loaded (/usr/lib/systemd/system/systemd-journald-dev-log.socket; static; vendor preset: disabled)
   Active: active (running) since Mon 2025-11-17 16:03:48 UTC; 21h ago
     Docs: man:systemd-journald.service(8)
           man:journald.conf(5)
   Listen: /run/systemd/journal/dev-log (Datagram)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-journald-dev-log.socket

Warning: Journal has been rotated since unit was started. Log output is incomplete or unavailable.

● systemd-journald.socket - Journal Socket
   Loaded: loaded (/usr/lib/systemd/system/systemd-journald.socket; static; vendor preset: disabled)
   Active: active (running) since Mon 2025-11-17 16:03:48 UTC; 21h ago
     Docs: man:systemd-journald.service(8)
           man:journald.conf(5)
   Listen: /run/systemd/journal/stdout (Stream)
           /run/systemd/journal/socket (Datagram)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-journald.socket

Warning: Journal has been rotated since unit was started. Log output is incomplete or unavailable.

● systemd-rfkill.socket - Load/Save RF Kill Switch Status /dev/rfkill Watch
   Loaded: loaded (/usr/lib/systemd/system/systemd-rfkill.socket; static; vendor preset: disabled)
   Active: active (listening) since Mon 2025-11-17 16:04:09 UTC; 21h ago
     Docs: man:systemd-rfkill.socket(8)
   Listen: /dev/rfkill (Special)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-rfkill.socket

Nov 17 16:04:09 vosflex.localdomain systemd[1]: Listening on Load/Save RF Kill Switch Status /dev/rfkill Watch.

● systemd-udevd-control.socket - udev Control Socket
   Loaded: loaded (/usr/lib/systemd/system/systemd-udevd-control.socket; static; vendor preset: disabled)
   Active: active (running) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd-udevd.service(8)
           man:udev(7)
   Listen: /run/udev/control (SequentialPacket)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-udevd-control.socket

● systemd-udevd-kernel.socket - udev Kernel Socket
   Loaded: loaded (/usr/lib/systemd/system/systemd-udevd-kernel.socket; static; vendor preset: disabled)
   Active: active (running) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd-udevd.service(8)
           man:udev(7)
   Listen: kobject-uevent 1 (Netlink)
    Tasks: 0 (limit: 399998)
   Memory: 0B
   CGroup: /system.slice/systemd-udevd-kernel.socket

● basic.target - Basic System
   Loaded: loaded (/usr/lib/systemd/system/basic.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:04:07 UTC; 21h ago
     Docs: man:systemd.special(7)

Nov 17 16:04:07 vosflex.localdomain systemd[1]: Reached target Basic System.

● cryptsetup.target - Local Encrypted Volumes
   Loaded: loaded (/usr/lib/systemd/system/cryptsetup.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd.special(7)

● emergency.target - Emergency Mode
   Loaded: loaded (/usr/lib/systemd/system/emergency.target; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:systemd.special(7)

● firmware.target - System Firmwares and drivers
   Loaded: loaded (/etc/systemd/system/firmware.target; enabled; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:03:53 UTC; 21h ago

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Reached target System Firmwares and drivers.

● getty-pre.target - Login Prompts (Pre)
   Loaded: loaded (/usr/lib/systemd/system/getty-pre.target; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:systemd.special(7)
           man:systemd-getty-generator(8)
           http://0pointer.de/blog/projects/serial-console.html

● getty.target - Login Prompts
   Loaded: loaded (/usr/lib/systemd/system/getty.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:06:31 UTC; 21h ago
     Docs: man:systemd.special(7)
           man:systemd-getty-generator(8)
           http://0pointer.de/blog/projects/serial-console.html

Nov 17 16:06:31 vosflex.localdomain systemd[1]: Reached target Login Prompts.

● graphical.target - Graphical Interface
   Loaded: loaded (/usr/lib/systemd/system/graphical.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:06:31 UTC; 21h ago
     Docs: man:systemd.special(7)

Nov 17 16:06:31 vosflex.localdomain systemd[1]: Reached target Graphical Interface.

● initrd-fs.target - Initrd File Systems
   Loaded: loaded (/usr/lib/systemd/system/initrd-fs.target; static; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd.special(7)

Nov 17 16:03:49 localhost systemd[1]: Reached target Initrd File Systems.

● initrd-root-device.target - Initrd Root Device
   Loaded: loaded (/usr/lib/systemd/system/initrd-root-device.target; static; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:03:49 UTC; 21h ago
     Docs: man:systemd.special(7)

Nov 17 16:03:49 localhost systemd[1]: Reached target Initrd Root Device.
Nov 17 16:03:49 localhost systemd[1]: Stopped target Initrd Root Device.

● initrd-root-fs.target - Initrd Root File System
   Loaded: loaded (/usr/lib/systemd/system/initrd-root-fs.target; static; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd.special(7)

Nov 17 16:03:49 localhost systemd[1]: Reached target Initrd Root File System.

● initrd-switch-root.target - Switch Root
   Loaded: loaded (/usr/lib/systemd/system/initrd-switch-root.target; static; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:03:50 UTC; 21h ago

Nov 17 16:03:49 localhost systemd[1]: Reached target Switch Root.

● initrd.target - Initrd Default Target
   Loaded: loaded (/usr/lib/systemd/system/initrd.target; static; vendor preset: disabled)
   Active: inactive (dead) since Mon 2025-11-17 16:03:49 UTC; 21h ago
     Docs: man:systemd.special(7)

Nov 17 16:03:49 localhost systemd[1]: Reached target Initrd Default Target.
Nov 17 16:03:49 localhost systemd[1]: Stopped target Initrd Default Target.

● local-fs-pre.target - Local File Systems (Pre)
   Loaded: loaded (/usr/lib/systemd/system/local-fs-pre.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd.special(7)

● local-fs.target - Local File Systems
   Loaded: loaded (/usr/lib/systemd/system/local-fs.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:03:51 UTC; 21h ago
     Docs: man:systemd.special(7)

● multi-user.target - Multi-User System
   Loaded: loaded (/usr/lib/systemd/system/multi-user.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:06:31 UTC; 21h ago
     Docs: man:systemd.special(7)

Nov 17 16:06:31 vosflex.localdomain systemd[1]: Reached target Multi-User System.

● network-online.target - Network is Online
   Loaded: loaded (/usr/lib/systemd/system/network-online.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:04:54 UTC; 21h ago
     Docs: man:systemd.special(7)
           https://www.freedesktop.org/wiki/Software/systemd/NetworkTarget

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Reached target Network is Online.

● network-pre.target - Network (Pre)
   Loaded: loaded (/usr/lib/systemd/system/network-pre.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:04:08 UTC; 21h ago
     Docs: man:systemd.special(7)
           https://www.freedesktop.org/wiki/Software/systemd/NetworkTarget

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Reached target Network (Pre).

● network.target - Network
   Loaded: loaded (/usr/lib/systemd/system/network.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:04:54 UTC; 21h ago
     Docs: man:systemd.special(7)
           https://www.freedesktop.org/wiki/Software/systemd/NetworkTarget

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Reached target Network.

● nfs-client.target - NFS client services
   Loaded: loaded (/usr/lib/systemd/system/nfs-client.target; enabled; vendor preset: enabled)
   Active: active since Mon 2025-11-17 16:04:54 UTC; 21h ago

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Reached target NFS client services.

● nss-lookup.target - Host and Network Name Lookups
   Loaded: loaded (/usr/lib/systemd/system/nss-lookup.target; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:systemd.special(7)

● nss-user-lookup.target - User and Group Name Lookups
   Loaded: loaded (/usr/lib/systemd/system/nss-user-lookup.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:04:08 UTC; 21h ago
     Docs: man:systemd.special(7)

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Reached target User and Group Name Lookups.

● paths.target - Paths
   Loaded: loaded (/usr/lib/systemd/system/paths.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd.special(7)

● remote-fs-pre.target - Remote File Systems (Pre)
   Loaded: loaded (/usr/lib/systemd/system/remote-fs-pre.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:04:54 UTC; 21h ago
     Docs: man:systemd.special(7)

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Reached target Remote File Systems (Pre).

● remote-fs.target - Remote File Systems
   Loaded: loaded (/usr/lib/systemd/system/remote-fs.target; enabled; vendor preset: enabled)
   Active: active since Mon 2025-11-17 16:04:54 UTC; 21h ago
     Docs: man:systemd.special(7)

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Reached target Remote File Systems.

● rescue.target - Rescue Mode
   Loaded: loaded (/usr/lib/systemd/system/rescue.target; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:systemd.special(7)

● rpc_pipefs.target
   Loaded: loaded (/usr/lib/systemd/system/rpc_pipefs.target; static; vendor preset: disabled)
   Active: inactive (dead)

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Dependency failed for rpc_pipefs.target.
Nov 17 16:03:53 vosflex.localdomain systemd[1]: rpc_pipefs.target: Job rpc_pipefs.target/start failed with result 'dependency'.

● rpcbind.target - RPC Port Mapper
   Loaded: loaded (/usr/lib/systemd/system/rpcbind.target; static; vendor preset: enabled)
   Active: active since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd.special(7)

● shutdown.target - Shutdown
   Loaded: loaded (/usr/lib/systemd/system/shutdown.target; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:systemd.special(7)

● slices.target - Slices
   Loaded: loaded (/usr/lib/systemd/system/slices.target; static; vendUnit syslog.target could not be found.
or preset: disabled)
   Active: active since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd.special(7)

● sockets.target - Sockets
   Loaded: loaded (/usr/lib/systemd/system/sockets.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:03:53 UTC; 21h ago
     Docs: man:systemd.special(7)

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Reached target Sockets.

● sshd-keygen.target
   Loaded: loaded (/usr/lib/systemd/system/sshd-keygen.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:04:08 UTC; 21h ago

Nov 17 16:04:08 vosflex.localdomain systemd[1]: Reached target sshd-keygen.target.

● swap.target - Swap
   Loaded: loaded (/usr/lib/systemd/system/swap.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:03:50 UTC; 21h ago
     Docs: man:systemd.special(7)

● sysinit.target - System Initialization
   Loaded: loaded (/usr/lib/systemd/system/sysinit.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:03:53 UTC; 21h ago
     Docs: man:systemd.special(7)

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Reached target System Initialization.

● time-sync.target - System Time Synchronized
   Loaded: loaded (/usr/lib/systemd/system/time-sync.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:04:54 UTC; 21h ago
     Docs: man:systemd.special(7)

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Reached target System Time Synchronized.

● timers.target - Timers
   Loaded: loaded (/usr/lib/systemd/system/timers.target; static; vendor preset: disabled)
   Active: active since Mon 2025-11-17 16:04:54 UTC; 21h ago
     Docs: man:systemd.special(7)

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Reached target Timers.

● umount.target - Unmount All Filesystems
   Loaded: loaded (/usr/lib/systemd/system/umount.target; static; vendor preset: disabled)
   Active: inactive (dead)
     Docs: man:systemd.special(7)

● atop-rotate.timer - Daily atop restart
   Loaded: loaded (/usr/lib/systemd/system/atop-rotate.timer; enabled; vendor preset: disabled)
   Active: active (waiting) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Trigger: Wed 2025-11-19 00:00:00 UTC; 10h left

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started Daily atop restart.

● systemd-tmpfiles-clean.timer - Daily Cleanup of Temporary Directories
   Loaded: loaded (/usr/lib/systemd/system/systemd-tmpfiles-clean.timer; static; vendor preset: disabled)
   Active: active (waiting) since Mon 2025-11-17 16:03:53 UTC; 21h ago
  Trigger: Tue 2025-11-18 16:18:38 UTC; 2h 22min left
     Docs: man:tmpfiles.d(5)
           man:systemd-tmpfiles(8)

Nov 17 16:03:53 vosflex.localdomain systemd[1]: Started Daily Cleanup of Temporary Directories.

● unbound-anchor.timer - daily update of the root trust anchor for DNSSEC
   Loaded: loaded (/usr/lib/systemd/system/unbound-anchor.timer; enabled; vendor preset: enabled)
   Active: active (waiting) since Mon 2025-11-17 16:04:54 UTC; 21h ago
  Trigger: Wed 2025-11-19 00:00:00 UTC; 10h left
     Docs: man:unbound-anchor(8)

Nov 17 16:04:54 vosflex.localdomain systemd[1]: Started daily update of the root trust anchor for DNSSEC.
