File size: 1,276 Bytes
3814f26 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
{
"os": "Linux-6.1.58+-x86_64-with-glibc2.35",
"python": "3.10.12",
"heartbeatAt": "2024-03-21T01:55:04.228731",
"startedAt": "2024-03-21T01:54:58.281206",
"docker": null,
"cuda": null,
"args": [],
"state": "running",
"program": "Mistral-7B-Finetuning-Insurance.ipynb%EC%9D%98%20%EC%82%AC%EB%B3%B8",
"codePathLocal": null,
"colab": "https://colab.research.google.com/notebook#fileId=1glmddDRkqHDhfUT8slC17FnAssneTUkr",
"host": "d72aea89d4b6",
"username": "root",
"executable": "/usr/bin/python3",
"cpu_count": 1,
"cpu_count_logical": 2,
"cpu_freq": {
"current": 2000.176,
"min": 0.0,
"max": 0.0
},
"cpu_freq_per_core": [
{
"current": 2000.176,
"min": 0.0,
"max": 0.0
},
{
"current": 2000.176,
"min": 0.0,
"max": 0.0
}
],
"disk": {
"/": {
"total": 78.1898422241211,
"used": 42.871490478515625
}
},
"gpu": "Tesla T4",
"gpu_count": 1,
"gpu_devices": [
{
"name": "Tesla T4",
"memory_total": 16106127360
}
],
"memory": {
"total": 12.674781799316406
}
}
|