Femboyuwu2000 commited on
Commit
df772d3
1 Parent(s): 1adad76

lierotica_llama2

Browse files
Files changed (36) hide show
  1. README.md +5 -6
  2. adapter_config.json +4 -4
  3. runs/Apr09_21-37-48_6e44b39f6877/events.out.tfevents.1712698950.6e44b39f6877.114.0 +3 -0
  4. runs/Apr09_21-55-28_6e44b39f6877/events.out.tfevents.1712699861.6e44b39f6877.266.0 +3 -0
  5. runs/Apr09_22-04-47_6e44b39f6877/events.out.tfevents.1712700418.6e44b39f6877.415.0 +3 -0
  6. tokenizer.json +6 -1
  7. training_args.bin +1 -1
  8. wandb/debug-internal.log +0 -0
  9. wandb/debug.log +29 -28
  10. wandb/run-20240409_214603-e7cki9vp/files/conda-environment.yaml +0 -0
  11. wandb/run-20240409_214603-e7cki9vp/files/config.yaml +686 -0
  12. wandb/run-20240409_214603-e7cki9vp/files/output.log +54 -0
  13. wandb/run-20240409_214603-e7cki9vp/files/requirements.txt +864 -0
  14. wandb/run-20240409_214603-e7cki9vp/files/wandb-metadata.json +66 -0
  15. wandb/run-20240409_214603-e7cki9vp/files/wandb-summary.json +1 -0
  16. wandb/run-20240409_214603-e7cki9vp/logs/debug-internal.log +480 -0
  17. wandb/run-20240409_214603-e7cki9vp/logs/debug.log +34 -0
  18. wandb/run-20240409_214603-e7cki9vp/run-e7cki9vp.wandb +0 -0
  19. wandb/run-20240409_215743-953wtybl/files/conda-environment.yaml +0 -0
  20. wandb/run-20240409_215743-953wtybl/files/config.yaml +686 -0
  21. wandb/run-20240409_215743-953wtybl/files/output.log +48 -0
  22. wandb/run-20240409_215743-953wtybl/files/requirements.txt +864 -0
  23. wandb/run-20240409_215743-953wtybl/files/wandb-metadata.json +66 -0
  24. wandb/run-20240409_215743-953wtybl/files/wandb-summary.json +1 -0
  25. wandb/run-20240409_215743-953wtybl/logs/debug-internal.log +400 -0
  26. wandb/run-20240409_215743-953wtybl/logs/debug.log +33 -0
  27. wandb/run-20240409_215743-953wtybl/run-953wtybl.wandb +0 -0
  28. wandb/run-20240409_220700-9aom042n/files/conda-environment.yaml +0 -0
  29. wandb/run-20240409_220700-9aom042n/files/config.yaml +686 -0
  30. wandb/run-20240409_220700-9aom042n/files/output.log +37 -0
  31. wandb/run-20240409_220700-9aom042n/files/requirements.txt +864 -0
  32. wandb/run-20240409_220700-9aom042n/files/wandb-metadata.json +66 -0
  33. wandb/run-20240409_220700-9aom042n/files/wandb-summary.json +1 -0
  34. wandb/run-20240409_220700-9aom042n/logs/debug-internal.log +145 -0
  35. wandb/run-20240409_220700-9aom042n/logs/debug.log +31 -0
  36. wandb/run-20240409_220700-9aom042n/run-9aom042n.wandb +0 -0
README.md CHANGED
@@ -9,7 +9,6 @@ base_model: TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T
9
  model-index:
10
  - name: working
11
  results: []
12
- pipeline_tag: text-generation
13
  ---
14
 
15
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
@@ -36,14 +35,14 @@ More information needed
36
  ### Training hyperparameters
37
 
38
  The following hyperparameters were used during training:
39
- - learning_rate: 3e-05
40
- - train_batch_size: 4
41
- - eval_batch_size: 16
42
  - seed: 42
43
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
44
  - lr_scheduler_type: cosine
45
  - lr_scheduler_warmup_ratio: 0.03
46
- - training_steps: 17000
47
 
48
  ### Training results
49
 
@@ -52,7 +51,7 @@ The following hyperparameters were used during training:
52
  ### Framework versions
53
 
54
  - PEFT 0.10.0
55
- - Transformers 4.38.2
56
  - Pytorch 2.1.2
57
  - Datasets 2.16.0
58
  - Tokenizers 0.15.2
 
9
  model-index:
10
  - name: working
11
  results: []
 
12
  ---
13
 
14
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
 
35
  ### Training hyperparameters
36
 
37
  The following hyperparameters were used during training:
38
+ - learning_rate: 1e-06
39
+ - train_batch_size: 2
40
+ - eval_batch_size: 8
41
  - seed: 42
42
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
43
  - lr_scheduler_type: cosine
44
  - lr_scheduler_warmup_ratio: 0.03
45
+ - training_steps: 200
46
 
47
  ### Training results
48
 
 
51
  ### Framework versions
52
 
53
  - PEFT 0.10.0
54
+ - Transformers 4.39.3
55
  - Pytorch 2.1.2
56
  - Datasets 2.16.0
57
  - Tokenizers 0.15.2
adapter_config.json CHANGED
@@ -20,13 +20,13 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "q_proj",
24
  "up_proj",
 
25
  "gate_proj",
26
- "o_proj",
27
  "down_proj",
28
- "v_proj",
29
- "k_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "o_proj",
24
  "up_proj",
25
+ "k_proj",
26
  "gate_proj",
 
27
  "down_proj",
28
+ "q_proj",
29
+ "v_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
runs/Apr09_21-37-48_6e44b39f6877/events.out.tfevents.1712698950.6e44b39f6877.114.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81d07277f330e769b3b9df4b96e30f8d58d11d4a6d492fe15de8bc5fa4064339
3
+ size 7052
runs/Apr09_21-55-28_6e44b39f6877/events.out.tfevents.1712699861.6e44b39f6877.266.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3fb8e9e457b0a39f288cc805081ef3247b50cfd1307c46b6649bfaab3503756
3
+ size 6419
runs/Apr09_22-04-47_6e44b39f6877/events.out.tfevents.1712700418.6e44b39f6877.415.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ad37060afaf8fad0d862cd7d1842b311a5a5f6eae23003328b338c60a9087bb
3
+ size 5927
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 2048,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:75aa6e38939ebbd4d58f8eaad581a1244adc1ae25513698261d543576e873783
3
  size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:416389bcac852ac676a55c6402b08f29b42e4b09e1c5ff217c88feb67998544d
3
  size 4920
wandb/debug-internal.log CHANGED
The diff for this file is too large to render. See raw diff
 
wandb/debug.log CHANGED
@@ -1,30 +1,31 @@
1
- 2024-04-08 20:30:54,358 INFO MainThread:1139 [wandb_setup.py:_flush():76] Current SDK version is 0.16.4
2
- 2024-04-08 20:30:54,358 INFO MainThread:1139 [wandb_setup.py:_flush():76] Configure stats pid to 1139
3
- 2024-04-08 20:30:54,358 INFO MainThread:1139 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
- 2024-04-08 20:30:54,358 INFO MainThread:1139 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
- 2024-04-08 20:30:54,358 INFO MainThread:1139 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
- 2024-04-08 20:30:54,358 INFO MainThread:1139 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
- 2024-04-08 20:30:54,358 INFO MainThread:1139 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
- 2024-04-08 20:30:54,358 INFO MainThread:1139 [wandb_init.py:_log_setup():526] Logging user logs to /kaggle/working/wandb/run-20240408_203054-ldiuneeg/logs/debug.log
9
- 2024-04-08 20:30:54,359 INFO MainThread:1139 [wandb_init.py:_log_setup():527] Logging internal logs to /kaggle/working/wandb/run-20240408_203054-ldiuneeg/logs/debug-internal.log
10
- 2024-04-08 20:30:54,359 INFO MainThread:1139 [wandb_init.py:_jupyter_setup():472] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7a14022dcc40>
11
- 2024-04-08 20:30:54,359 INFO MainThread:1139 [wandb_init.py:init():566] calling init triggers
12
- 2024-04-08 20:30:54,359 INFO MainThread:1139 [wandb_init.py:init():573] wandb.init called with sweep_config: {}
 
13
  config: {}
14
- 2024-04-08 20:30:54,359 INFO MainThread:1139 [wandb_init.py:init():616] starting backend
15
- 2024-04-08 20:30:54,359 INFO MainThread:1139 [wandb_init.py:init():620] setting up manager
16
- 2024-04-08 20:30:54,361 INFO MainThread:1139 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
17
- 2024-04-08 20:30:54,362 INFO MainThread:1139 [wandb_init.py:init():628] backend started and connected
18
- 2024-04-08 20:30:54,373 INFO MainThread:1139 [wandb_run.py:_label_probe_notebook():1295] probe notebook
19
- 2024-04-08 20:30:54,756 INFO MainThread:1139 [wandb_init.py:init():720] updated telemetry
20
- 2024-04-08 20:30:54,760 INFO MainThread:1139 [wandb_init.py:init():753] communicating run to backend with 90.0 second timeout
21
- 2024-04-08 20:30:54,922 INFO MainThread:1139 [wandb_run.py:_on_init():2262] communicating current version
22
- 2024-04-08 20:30:55,013 INFO MainThread:1139 [wandb_run.py:_on_init():2271] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
23
 
24
- 2024-04-08 20:30:55,013 INFO MainThread:1139 [wandb_init.py:init():804] starting run threads in backend
25
- 2024-04-08 20:31:25,981 INFO MainThread:1139 [wandb_run.py:_console_start():2241] atexit reg
26
- 2024-04-08 20:31:25,981 INFO MainThread:1139 [wandb_run.py:_redirect():2096] redirect: wrap_raw
27
- 2024-04-08 20:31:25,982 INFO MainThread:1139 [wandb_run.py:_redirect():2161] Wrapping output streams.
28
- 2024-04-08 20:31:25,982 INFO MainThread:1139 [wandb_run.py:_redirect():2186] Redirects installed.
29
- 2024-04-08 20:31:25,983 INFO MainThread:1139 [wandb_init.py:init():847] run started, returning control to user process
30
- 2024-04-08 20:31:25,989 INFO MainThread:1139 [wandb_run.py:_config_callback():1343] config_cb None None {'vocab_size': 32000, 'max_position_embeddings': 2048, 'hidden_size': 2048, 'intermediate_size': 5632, 'num_hidden_layers': 22, 'num_attention_heads': 32, 'num_key_value_heads': 4, 'hidden_act': 'silu', 'initializer_range': 0.02, 'rms_norm_eps': 1e-05, 'pretraining_tp': 1, 'use_cache': False, 'rope_theta': 10000.0, 'rope_scaling': None, 'attention_bias': False, 'attention_dropout': 0.0, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['LlamaForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': None, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'transformers_version': '4.38.2', 'model_type': 'llama', 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': False, 'bnb_4bit_compute_dtype': 'float16', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 2, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 3e-05, 'weight_decay': 0.001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.1, 'num_train_epochs': 1, 'max_steps': 17000, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr08_20-30-47_622eb14e717e', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 20, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 20, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 0, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_32bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': False, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': False, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None}
 
1
+ 2024-04-09 22:07:00,438 INFO MainThread:415 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
+ 2024-04-09 22:07:00,438 INFO MainThread:415 [wandb_setup.py:_flush():76] Configure stats pid to 415
3
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_setup.py:_flush():76] Applying login settings: {}
9
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240409_220700-9aom042n/logs/debug.log
10
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240409_220700-9aom042n/logs/debug-internal.log
11
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7d36045756c0>
12
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_init.py:init():567] calling init triggers
13
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
14
  config: {}
15
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_init.py:init():617] starting backend
16
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_init.py:init():621] setting up manager
17
+ 2024-04-09 22:07:00,441 INFO MainThread:415 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
18
+ 2024-04-09 22:07:00,442 INFO MainThread:415 [wandb_init.py:init():629] backend started and connected
19
+ 2024-04-09 22:07:00,454 INFO MainThread:415 [wandb_run.py:_label_probe_notebook():1299] probe notebook
20
+ 2024-04-09 22:07:00,766 INFO MainThread:415 [wandb_init.py:init():721] updated telemetry
21
+ 2024-04-09 22:07:00,769 INFO MainThread:415 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
22
+ 2024-04-09 22:07:03,999 INFO MainThread:415 [wandb_run.py:_on_init():2344] communicating current version
23
+ 2024-04-09 22:07:04,090 INFO MainThread:415 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
24
 
25
+ 2024-04-09 22:07:04,090 INFO MainThread:415 [wandb_init.py:init():805] starting run threads in backend
26
+ 2024-04-09 22:07:20,151 INFO MainThread:415 [wandb_run.py:_console_start():2323] atexit reg
27
+ 2024-04-09 22:07:20,151 INFO MainThread:415 [wandb_run.py:_redirect():2178] redirect: wrap_raw
28
+ 2024-04-09 22:07:20,153 INFO MainThread:415 [wandb_run.py:_redirect():2243] Wrapping output streams.
29
+ 2024-04-09 22:07:20,153 INFO MainThread:415 [wandb_run.py:_redirect():2268] Redirects installed.
30
+ 2024-04-09 22:07:20,154 INFO MainThread:415 [wandb_init.py:init():848] run started, returning control to user process
31
+ 2024-04-09 22:07:20,160 INFO MainThread:415 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 32000, 'max_position_embeddings': 2048, 'hidden_size': 2048, 'intermediate_size': 5632, 'num_hidden_layers': 22, 'num_attention_heads': 32, 'num_key_value_heads': 4, 'hidden_act': 'silu', 'initializer_range': 0.02, 'rms_norm_eps': 1e-05, 'pretraining_tp': 1, 'use_cache': False, 'rope_theta': 10000.0, 'rope_scaling': None, 'attention_bias': False, 'attention_dropout': 0.0, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['LlamaForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': None, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'transformers_version': '4.39.3', 'model_type': 'llama', 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': False, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 2, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 1e-06, 'weight_decay': 0.001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.1, 'num_train_epochs': 5, 'max_steps': 200, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr09_22-04-47_6e44b39f6877', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 100, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 8, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_32bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': False, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': True, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
wandb/run-20240409_214603-e7cki9vp/files/conda-environment.yaml ADDED
File without changes
wandb/run-20240409_214603-e7cki9vp/files/config.yaml ADDED
@@ -0,0 +1,686 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _wandb:
4
+ desc: null
5
+ value:
6
+ python_version: 3.10.13
7
+ cli_version: 0.16.5
8
+ framework: huggingface
9
+ huggingface_version: 4.39.3
10
+ is_jupyter_run: true
11
+ is_kaggle_kernel: true
12
+ start_time: 1712699163.0
13
+ t:
14
+ 1:
15
+ - 1
16
+ - 2
17
+ - 3
18
+ - 5
19
+ - 11
20
+ - 12
21
+ - 49
22
+ - 51
23
+ - 53
24
+ - 55
25
+ - 71
26
+ - 84
27
+ - 98
28
+ - 105
29
+ 2:
30
+ - 1
31
+ - 2
32
+ - 3
33
+ - 5
34
+ - 11
35
+ - 12
36
+ - 49
37
+ - 51
38
+ - 53
39
+ - 55
40
+ - 71
41
+ - 84
42
+ - 98
43
+ - 105
44
+ 3:
45
+ - 7
46
+ - 23
47
+ 4: 3.10.13
48
+ 5: 0.16.5
49
+ 6: 4.39.3
50
+ 8:
51
+ - 1
52
+ - 2
53
+ - 5
54
+ 9:
55
+ 1: transformers_trainer
56
+ 13: linux-x86_64
57
+ m:
58
+ - 1: train/global_step
59
+ 6:
60
+ - 3
61
+ - 1: train/loss
62
+ 5: 1
63
+ 6:
64
+ - 1
65
+ - 1: train/grad_norm
66
+ 5: 1
67
+ 6:
68
+ - 1
69
+ - 1: train/learning_rate
70
+ 5: 1
71
+ 6:
72
+ - 1
73
+ - 1: train/epoch
74
+ 5: 1
75
+ 6:
76
+ - 1
77
+ vocab_size:
78
+ desc: null
79
+ value: 32000
80
+ max_position_embeddings:
81
+ desc: null
82
+ value: 2048
83
+ hidden_size:
84
+ desc: null
85
+ value: 2048
86
+ intermediate_size:
87
+ desc: null
88
+ value: 5632
89
+ num_hidden_layers:
90
+ desc: null
91
+ value: 22
92
+ num_attention_heads:
93
+ desc: null
94
+ value: 32
95
+ num_key_value_heads:
96
+ desc: null
97
+ value: 4
98
+ hidden_act:
99
+ desc: null
100
+ value: silu
101
+ initializer_range:
102
+ desc: null
103
+ value: 0.02
104
+ rms_norm_eps:
105
+ desc: null
106
+ value: 1.0e-05
107
+ pretraining_tp:
108
+ desc: null
109
+ value: 1
110
+ use_cache:
111
+ desc: null
112
+ value: false
113
+ rope_theta:
114
+ desc: null
115
+ value: 10000.0
116
+ rope_scaling:
117
+ desc: null
118
+ value: null
119
+ attention_bias:
120
+ desc: null
121
+ value: false
122
+ attention_dropout:
123
+ desc: null
124
+ value: 0.0
125
+ return_dict:
126
+ desc: null
127
+ value: true
128
+ output_hidden_states:
129
+ desc: null
130
+ value: false
131
+ output_attentions:
132
+ desc: null
133
+ value: false
134
+ torchscript:
135
+ desc: null
136
+ value: false
137
+ torch_dtype:
138
+ desc: null
139
+ value: float32
140
+ use_bfloat16:
141
+ desc: null
142
+ value: false
143
+ tf_legacy_loss:
144
+ desc: null
145
+ value: false
146
+ pruned_heads:
147
+ desc: null
148
+ value: {}
149
+ tie_word_embeddings:
150
+ desc: null
151
+ value: false
152
+ chunk_size_feed_forward:
153
+ desc: null
154
+ value: 0
155
+ is_encoder_decoder:
156
+ desc: null
157
+ value: false
158
+ is_decoder:
159
+ desc: null
160
+ value: false
161
+ cross_attention_hidden_size:
162
+ desc: null
163
+ value: null
164
+ add_cross_attention:
165
+ desc: null
166
+ value: false
167
+ tie_encoder_decoder:
168
+ desc: null
169
+ value: false
170
+ max_length:
171
+ desc: null
172
+ value: 20
173
+ min_length:
174
+ desc: null
175
+ value: 0
176
+ do_sample:
177
+ desc: null
178
+ value: false
179
+ early_stopping:
180
+ desc: null
181
+ value: false
182
+ num_beams:
183
+ desc: null
184
+ value: 1
185
+ num_beam_groups:
186
+ desc: null
187
+ value: 1
188
+ diversity_penalty:
189
+ desc: null
190
+ value: 0.0
191
+ temperature:
192
+ desc: null
193
+ value: 1.0
194
+ top_k:
195
+ desc: null
196
+ value: 50
197
+ top_p:
198
+ desc: null
199
+ value: 1.0
200
+ typical_p:
201
+ desc: null
202
+ value: 1.0
203
+ repetition_penalty:
204
+ desc: null
205
+ value: 1.0
206
+ length_penalty:
207
+ desc: null
208
+ value: 1.0
209
+ no_repeat_ngram_size:
210
+ desc: null
211
+ value: 0
212
+ encoder_no_repeat_ngram_size:
213
+ desc: null
214
+ value: 0
215
+ bad_words_ids:
216
+ desc: null
217
+ value: null
218
+ num_return_sequences:
219
+ desc: null
220
+ value: 1
221
+ output_scores:
222
+ desc: null
223
+ value: false
224
+ return_dict_in_generate:
225
+ desc: null
226
+ value: false
227
+ forced_bos_token_id:
228
+ desc: null
229
+ value: null
230
+ forced_eos_token_id:
231
+ desc: null
232
+ value: null
233
+ remove_invalid_values:
234
+ desc: null
235
+ value: false
236
+ exponential_decay_length_penalty:
237
+ desc: null
238
+ value: null
239
+ suppress_tokens:
240
+ desc: null
241
+ value: null
242
+ begin_suppress_tokens:
243
+ desc: null
244
+ value: null
245
+ architectures:
246
+ desc: null
247
+ value:
248
+ - LlamaForCausalLM
249
+ finetuning_task:
250
+ desc: null
251
+ value: null
252
+ id2label:
253
+ desc: null
254
+ value:
255
+ '0': LABEL_0
256
+ '1': LABEL_1
257
+ label2id:
258
+ desc: null
259
+ value:
260
+ LABEL_0: 0
261
+ LABEL_1: 1
262
+ tokenizer_class:
263
+ desc: null
264
+ value: null
265
+ prefix:
266
+ desc: null
267
+ value: null
268
+ bos_token_id:
269
+ desc: null
270
+ value: 1
271
+ pad_token_id:
272
+ desc: null
273
+ value: null
274
+ eos_token_id:
275
+ desc: null
276
+ value: 2
277
+ sep_token_id:
278
+ desc: null
279
+ value: null
280
+ decoder_start_token_id:
281
+ desc: null
282
+ value: null
283
+ task_specific_params:
284
+ desc: null
285
+ value: null
286
+ problem_type:
287
+ desc: null
288
+ value: null
289
+ _name_or_path:
290
+ desc: null
291
+ value: TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T
292
+ transformers_version:
293
+ desc: null
294
+ value: 4.39.3
295
+ model_type:
296
+ desc: null
297
+ value: llama
298
+ quantization_config:
299
+ desc: null
300
+ value:
301
+ quant_method: QuantizationMethod.BITS_AND_BYTES
302
+ _load_in_8bit: false
303
+ _load_in_4bit: true
304
+ llm_int8_threshold: 6.0
305
+ llm_int8_skip_modules: null
306
+ llm_int8_enable_fp32_cpu_offload: false
307
+ llm_int8_has_fp16_weight: false
308
+ bnb_4bit_quant_type: nf4
309
+ bnb_4bit_use_double_quant: false
310
+ bnb_4bit_compute_dtype: float16
311
+ bnb_4bit_quant_storage: uint8
312
+ load_in_4bit: true
313
+ load_in_8bit: false
314
+ output_dir:
315
+ desc: null
316
+ value: /kaggle/working/
317
+ overwrite_output_dir:
318
+ desc: null
319
+ value: false
320
+ do_train:
321
+ desc: null
322
+ value: false
323
+ do_eval:
324
+ desc: null
325
+ value: false
326
+ do_predict:
327
+ desc: null
328
+ value: false
329
+ evaluation_strategy:
330
+ desc: null
331
+ value: 'no'
332
+ prediction_loss_only:
333
+ desc: null
334
+ value: false
335
+ per_device_train_batch_size:
336
+ desc: null
337
+ value: 2
338
+ per_device_eval_batch_size:
339
+ desc: null
340
+ value: 8
341
+ per_gpu_train_batch_size:
342
+ desc: null
343
+ value: null
344
+ per_gpu_eval_batch_size:
345
+ desc: null
346
+ value: null
347
+ gradient_accumulation_steps:
348
+ desc: null
349
+ value: 1
350
+ eval_accumulation_steps:
351
+ desc: null
352
+ value: null
353
+ eval_delay:
354
+ desc: null
355
+ value: 0
356
+ learning_rate:
357
+ desc: null
358
+ value: 5.0e-06
359
+ weight_decay:
360
+ desc: null
361
+ value: 0.001
362
+ adam_beta1:
363
+ desc: null
364
+ value: 0.9
365
+ adam_beta2:
366
+ desc: null
367
+ value: 0.999
368
+ adam_epsilon:
369
+ desc: null
370
+ value: 1.0e-08
371
+ max_grad_norm:
372
+ desc: null
373
+ value: 0.1
374
+ num_train_epochs:
375
+ desc: null
376
+ value: 5
377
+ max_steps:
378
+ desc: null
379
+ value: 70000
380
+ lr_scheduler_type:
381
+ desc: null
382
+ value: cosine
383
+ lr_scheduler_kwargs:
384
+ desc: null
385
+ value: {}
386
+ warmup_ratio:
387
+ desc: null
388
+ value: 0.03
389
+ warmup_steps:
390
+ desc: null
391
+ value: 0
392
+ log_level:
393
+ desc: null
394
+ value: passive
395
+ log_level_replica:
396
+ desc: null
397
+ value: warning
398
+ log_on_each_node:
399
+ desc: null
400
+ value: true
401
+ logging_dir:
402
+ desc: null
403
+ value: /kaggle/working/runs/Apr09_21-37-48_6e44b39f6877
404
+ logging_strategy:
405
+ desc: null
406
+ value: steps
407
+ logging_first_step:
408
+ desc: null
409
+ value: false
410
+ logging_steps:
411
+ desc: null
412
+ value: 100
413
+ logging_nan_inf_filter:
414
+ desc: null
415
+ value: true
416
+ save_strategy:
417
+ desc: null
418
+ value: steps
419
+ save_steps:
420
+ desc: null
421
+ value: 100
422
+ save_total_limit:
423
+ desc: null
424
+ value: 1
425
+ save_safetensors:
426
+ desc: null
427
+ value: true
428
+ save_on_each_node:
429
+ desc: null
430
+ value: false
431
+ save_only_model:
432
+ desc: null
433
+ value: false
434
+ no_cuda:
435
+ desc: null
436
+ value: false
437
+ use_cpu:
438
+ desc: null
439
+ value: false
440
+ use_mps_device:
441
+ desc: null
442
+ value: false
443
+ seed:
444
+ desc: null
445
+ value: 42
446
+ data_seed:
447
+ desc: null
448
+ value: null
449
+ jit_mode_eval:
450
+ desc: null
451
+ value: false
452
+ use_ipex:
453
+ desc: null
454
+ value: false
455
+ bf16:
456
+ desc: null
457
+ value: false
458
+ fp16:
459
+ desc: null
460
+ value: false
461
+ fp16_opt_level:
462
+ desc: null
463
+ value: O1
464
+ half_precision_backend:
465
+ desc: null
466
+ value: auto
467
+ bf16_full_eval:
468
+ desc: null
469
+ value: false
470
+ fp16_full_eval:
471
+ desc: null
472
+ value: false
473
+ tf32:
474
+ desc: null
475
+ value: null
476
+ local_rank:
477
+ desc: null
478
+ value: 0
479
+ ddp_backend:
480
+ desc: null
481
+ value: null
482
+ tpu_num_cores:
483
+ desc: null
484
+ value: null
485
+ tpu_metrics_debug:
486
+ desc: null
487
+ value: false
488
+ debug:
489
+ desc: null
490
+ value: []
491
+ dataloader_drop_last:
492
+ desc: null
493
+ value: false
494
+ eval_steps:
495
+ desc: null
496
+ value: null
497
+ dataloader_num_workers:
498
+ desc: null
499
+ value: 8
500
+ dataloader_prefetch_factor:
501
+ desc: null
502
+ value: null
503
+ past_index:
504
+ desc: null
505
+ value: -1
506
+ run_name:
507
+ desc: null
508
+ value: /kaggle/working/
509
+ disable_tqdm:
510
+ desc: null
511
+ value: false
512
+ remove_unused_columns:
513
+ desc: null
514
+ value: true
515
+ label_names:
516
+ desc: null
517
+ value: null
518
+ load_best_model_at_end:
519
+ desc: null
520
+ value: false
521
+ metric_for_best_model:
522
+ desc: null
523
+ value: null
524
+ greater_is_better:
525
+ desc: null
526
+ value: null
527
+ ignore_data_skip:
528
+ desc: null
529
+ value: false
530
+ fsdp:
531
+ desc: null
532
+ value: []
533
+ fsdp_min_num_params:
534
+ desc: null
535
+ value: 0
536
+ fsdp_config:
537
+ desc: null
538
+ value:
539
+ min_num_params: 0
540
+ xla: false
541
+ xla_fsdp_v2: false
542
+ xla_fsdp_grad_ckpt: false
543
+ fsdp_transformer_layer_cls_to_wrap:
544
+ desc: null
545
+ value: null
546
+ accelerator_config:
547
+ desc: null
548
+ value:
549
+ split_batches: false
550
+ dispatch_batches: null
551
+ even_batches: true
552
+ use_seedable_sampler: true
553
+ deepspeed:
554
+ desc: null
555
+ value: null
556
+ label_smoothing_factor:
557
+ desc: null
558
+ value: 0.0
559
+ optim:
560
+ desc: null
561
+ value: paged_adamw_32bit
562
+ optim_args:
563
+ desc: null
564
+ value: null
565
+ adafactor:
566
+ desc: null
567
+ value: false
568
+ group_by_length:
569
+ desc: null
570
+ value: false
571
+ length_column_name:
572
+ desc: null
573
+ value: length
574
+ report_to:
575
+ desc: null
576
+ value:
577
+ - tensorboard
578
+ - wandb
579
+ ddp_find_unused_parameters:
580
+ desc: null
581
+ value: null
582
+ ddp_bucket_cap_mb:
583
+ desc: null
584
+ value: null
585
+ ddp_broadcast_buffers:
586
+ desc: null
587
+ value: null
588
+ dataloader_pin_memory:
589
+ desc: null
590
+ value: true
591
+ dataloader_persistent_workers:
592
+ desc: null
593
+ value: false
594
+ skip_memory_metrics:
595
+ desc: null
596
+ value: true
597
+ use_legacy_prediction_loop:
598
+ desc: null
599
+ value: false
600
+ push_to_hub:
601
+ desc: null
602
+ value: false
603
+ resume_from_checkpoint:
604
+ desc: null
605
+ value: null
606
+ hub_model_id:
607
+ desc: null
608
+ value: null
609
+ hub_strategy:
610
+ desc: null
611
+ value: every_save
612
+ hub_token:
613
+ desc: null
614
+ value: <HUB_TOKEN>
615
+ hub_private_repo:
616
+ desc: null
617
+ value: false
618
+ hub_always_push:
619
+ desc: null
620
+ value: false
621
+ gradient_checkpointing:
622
+ desc: null
623
+ value: true
624
+ gradient_checkpointing_kwargs:
625
+ desc: null
626
+ value: null
627
+ include_inputs_for_metrics:
628
+ desc: null
629
+ value: false
630
+ fp16_backend:
631
+ desc: null
632
+ value: auto
633
+ push_to_hub_model_id:
634
+ desc: null
635
+ value: null
636
+ push_to_hub_organization:
637
+ desc: null
638
+ value: null
639
+ push_to_hub_token:
640
+ desc: null
641
+ value: <PUSH_TO_HUB_TOKEN>
642
+ mp_parameters:
643
+ desc: null
644
+ value: ''
645
+ auto_find_batch_size:
646
+ desc: null
647
+ value: true
648
+ full_determinism:
649
+ desc: null
650
+ value: false
651
+ torchdynamo:
652
+ desc: null
653
+ value: null
654
+ ray_scope:
655
+ desc: null
656
+ value: last
657
+ ddp_timeout:
658
+ desc: null
659
+ value: 1800
660
+ torch_compile:
661
+ desc: null
662
+ value: false
663
+ torch_compile_backend:
664
+ desc: null
665
+ value: null
666
+ torch_compile_mode:
667
+ desc: null
668
+ value: null
669
+ dispatch_batches:
670
+ desc: null
671
+ value: null
672
+ split_batches:
673
+ desc: null
674
+ value: null
675
+ include_tokens_per_second:
676
+ desc: null
677
+ value: false
678
+ include_num_input_tokens_seen:
679
+ desc: null
680
+ value: false
681
+ neftune_noise_alpha:
682
+ desc: null
683
+ value: null
684
+ optim_target_modules:
685
+ desc: null
686
+ value: null
wandb/run-20240409_214603-e7cki9vp/files/output.log ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:557: UserWarning: This DataLoader will create 8 worker processes in total. Our suggested max number of worker in current system is 4, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.
2
+ warnings.warn(_create_warning_msg(
3
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
4
+ To disable this warning, you can either:
5
+ - Avoid using `tokenizers` before the fork if possible
6
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
7
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
8
+ To disable this warning, you can either:
9
+ - Avoid using `tokenizers` before the fork if possible
10
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
11
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
12
+ To disable this warning, you can either:
13
+ - Avoid using `tokenizers` before the fork if possible
14
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
15
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
16
+ To disable this warning, you can either:
17
+ - Avoid using `tokenizers` before the fork if possible
18
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
19
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
20
+ To disable this warning, you can either:
21
+ - Avoid using `tokenizers` before the fork if possible
22
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
23
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
24
+ To disable this warning, you can either:
25
+ - Avoid using `tokenizers` before the fork if possible
26
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
27
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
28
+ To disable this warning, you can either:
29
+ - Avoid using `tokenizers` before the fork if possible
30
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
31
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
32
+ To disable this warning, you can either:
33
+ - Avoid using `tokenizers` before the fork if possible
34
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
35
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
36
+ warnings.warn(
37
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
38
+ warnings.warn(
39
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
40
+ warnings.warn(
41
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
42
+ warnings.warn(
43
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
44
+ warnings.warn(
45
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
46
+ warnings.warn(
47
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
48
+ warnings.warn(
49
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
50
+ warnings.warn(
51
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
52
+ warnings.warn(
53
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
54
+ warnings.warn(
wandb/run-20240409_214603-e7cki9vp/files/requirements.txt ADDED
@@ -0,0 +1,864 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Babel==2.14.0
2
+ Boruta==0.3
3
+ Brotli==1.0.9
4
+ CVXcanon==0.1.2
5
+ Cartopy==0.22.0
6
+ Cython==3.0.8
7
+ Deprecated==1.2.14
8
+ Farama-Notifications==0.0.4
9
+ Flask==3.0.2
10
+ Geohash==1.0
11
+ GitPython==3.1.41
12
+ ImageHash==4.3.1
13
+ Janome==0.5.0
14
+ Jinja2==3.1.2
15
+ LunarCalendar==0.0.9
16
+ Mako==1.3.2
17
+ Markdown==3.5.2
18
+ MarkupSafe==2.1.3
19
+ MarkupSafe==2.1.5
20
+ Pillow==9.5.0
21
+ PuLP==2.8.0
22
+ PyArabic==0.6.15
23
+ PyJWT==2.8.0
24
+ PyMeeus==0.5.12
25
+ PySocks==1.7.1
26
+ PyUpSet==0.1.1.post7
27
+ PyWavelets==1.5.0
28
+ PyYAML==6.0.1
29
+ Pygments==2.17.2
30
+ Pympler==1.0.1
31
+ QtPy==2.4.1
32
+ Rtree==1.2.0
33
+ SQLAlchemy==2.0.25
34
+ SecretStorage==3.3.3
35
+ Send2Trash==1.8.2
36
+ Shapely==1.8.5.post1
37
+ Shimmy==1.3.0
38
+ SimpleITK==2.3.1
39
+ TPOT==0.12.1
40
+ Theano-PyMC==1.1.2
41
+ Theano==1.0.5
42
+ Wand==0.6.13
43
+ Werkzeug==3.0.2
44
+ absl-py==1.4.0
45
+ accelerate==0.28.0
46
+ access==1.1.9
47
+ affine==2.4.0
48
+ aiobotocore==2.12.2
49
+ aiofiles==22.1.0
50
+ aiohttp-cors==0.7.0
51
+ aiohttp==3.9.1
52
+ aioitertools==0.11.0
53
+ aiorwlock==1.3.0
54
+ aiosignal==1.3.1
55
+ aiosqlite==0.19.0
56
+ albumentations==1.4.0
57
+ alembic==1.13.1
58
+ altair==5.3.0
59
+ annotated-types==0.6.0
60
+ annoy==1.17.3
61
+ anyio==4.2.0
62
+ apache-beam==2.46.0
63
+ aplus==0.11.0
64
+ appdirs==1.4.4
65
+ archspec==0.2.3
66
+ argon2-cffi-bindings==21.2.0
67
+ argon2-cffi==23.1.0
68
+ array-record==0.5.0
69
+ arrow==1.3.0
70
+ arviz==0.17.1
71
+ astroid==3.1.0
72
+ astropy-iers-data==0.2024.4.1.0.33.14
73
+ astropy==6.0.1
74
+ asttokens==2.4.1
75
+ astunparse==1.6.3
76
+ async-lru==2.0.4
77
+ async-timeout==4.0.3
78
+ attrs==23.2.0
79
+ audioread==3.0.1
80
+ autopep8==2.0.4
81
+ backoff==2.2.1
82
+ bayesian-optimization==1.4.3
83
+ beatrix_jupyterlab==2023.128.151533
84
+ beautifulsoup4==4.12.2
85
+ bitsandbytes==0.43.0
86
+ blake3==0.2.1
87
+ bleach==6.1.0
88
+ blessed==1.20.0
89
+ blinker==1.7.0
90
+ blis==0.7.10
91
+ blosc2==2.6.0
92
+ bokeh==3.3.4
93
+ boltons==23.1.1
94
+ boto3==1.26.100
95
+ botocore==1.34.51
96
+ bq_helper==0.4.1
97
+ bqplot==0.12.43
98
+ branca==0.7.1
99
+ brewer2mpl==1.4.1
100
+ brotlipy==0.7.0
101
+ cached-property==1.5.2
102
+ cachetools==4.2.4
103
+ cachetools==5.3.2
104
+ catalogue==2.0.10
105
+ catalyst==22.4
106
+ catboost==1.2.3
107
+ category-encoders==2.6.3
108
+ certifi==2024.2.2
109
+ cesium==0.12.1
110
+ cffi==1.16.0
111
+ charset-normalizer==3.3.2
112
+ chex==0.1.86
113
+ cleverhans==4.0.0
114
+ click-plugins==1.1.1
115
+ click==8.1.7
116
+ cligj==0.7.2
117
+ cloud-tpu-client==0.10
118
+ cloud-tpu-profiler==2.4.0
119
+ cloudpathlib==0.16.0
120
+ cloudpickle==2.2.1
121
+ cloudpickle==3.0.0
122
+ cmdstanpy==1.2.2
123
+ colorama==0.4.6
124
+ colorcet==3.1.0
125
+ colorful==0.5.6
126
+ colorlog==6.8.2
127
+ colorlover==0.3.0
128
+ comm==0.2.1
129
+ conda-libmamba-solver==23.7.0
130
+ conda-package-handling==2.2.0
131
+ conda==23.7.4
132
+ conda_package_streaming==0.9.0
133
+ confection==0.1.4
134
+ contextily==1.6.0
135
+ contourpy==1.2.0
136
+ convertdate==2.4.0
137
+ crcmod==1.7
138
+ cryptography==41.0.7
139
+ cuda-python==12.4.0
140
+ cudf==23.8.0
141
+ cufflinks==0.17.3
142
+ cuml==23.8.0
143
+ cupy==13.0.0
144
+ cycler==0.12.1
145
+ cymem==2.0.8
146
+ cytoolz==0.12.3
147
+ daal4py==2024.2.0
148
+ daal==2024.2.0
149
+ dacite==1.8.1
150
+ dask-cuda==23.8.0
151
+ dask-cudf==23.8.0
152
+ dask-expr==1.0.9
153
+ dask==2024.4.0
154
+ dataclasses-json==0.6.4
155
+ dataproc_jupyter_plugin==0.1.66
156
+ datasets==2.16.0
157
+ datashader==0.16.0
158
+ datatile==1.0.3
159
+ db-dtypes==1.2.0
160
+ deap==1.4.1
161
+ debugpy==1.8.0
162
+ decorator==5.1.1
163
+ deepdiff==6.7.1
164
+ defusedxml==0.7.1
165
+ deprecation==2.1.0
166
+ descartes==1.1.0
167
+ dill==0.3.7
168
+ dipy==1.9.0
169
+ distlib==0.3.8
170
+ distributed==2023.7.1
171
+ distro==1.9.0
172
+ dm-tree==0.1.8
173
+ docker-pycreds==0.4.0
174
+ docker==7.0.0
175
+ docopt==0.6.2
176
+ docstring-parser==0.15
177
+ docstring-to-markdown==0.15
178
+ docutils==0.20.1
179
+ earthengine-api==0.1.395
180
+ easydict==1.13
181
+ easyocr==1.7.1
182
+ ecos==2.0.13
183
+ eli5==0.13.0
184
+ emoji==2.11.0
185
+ en-core-web-lg==3.7.1
186
+ en-core-web-sm==3.7.1
187
+ entrypoints==0.4
188
+ ephem==4.1.5
189
+ esda==2.5.1
190
+ essentia==2.1b6.dev1110
191
+ et-xmlfile==1.1.0
192
+ etils==1.6.0
193
+ exceptiongroup==1.2.0
194
+ executing==2.0.1
195
+ explainable-ai-sdk==1.3.3
196
+ fastai==2.7.14
197
+ fastapi==0.108.0
198
+ fastavro==1.9.3
199
+ fastcore==1.5.29
200
+ fastdownload==0.0.7
201
+ fasteners==0.19
202
+ fastjsonschema==2.19.1
203
+ fastprogress==1.0.3
204
+ fastrlock==0.8.2
205
+ fasttext==0.9.2
206
+ feather-format==0.4.1
207
+ featuretools==1.30.0
208
+ filelock==3.13.1
209
+ fiona==1.9.6
210
+ fitter==1.7.0
211
+ flake8==7.0.0
212
+ flashtext==2.7
213
+ flatbuffers==23.5.26
214
+ flax==0.8.2
215
+ folium==0.16.0
216
+ fonttools==4.47.0
217
+ fonttools==4.50.0
218
+ fqdn==1.5.1
219
+ frozendict==2.4.1
220
+ frozenlist==1.4.1
221
+ fsspec==2023.10.0
222
+ fsspec==2024.3.1
223
+ funcy==2.0
224
+ fury==0.10.0
225
+ future==1.0.0
226
+ fuzzywuzzy==0.18.0
227
+ gast==0.5.4
228
+ gatspy==0.3
229
+ gcsfs==2024.2.0
230
+ gensim==4.3.2
231
+ geographiclib==2.0
232
+ geojson==3.1.0
233
+ geopandas==0.14.3
234
+ geoplot==0.5.1
235
+ geopy==2.4.1
236
+ geoviews==1.11.1
237
+ ggplot==0.11.5
238
+ giddy==2.3.5
239
+ gitdb==4.0.11
240
+ google-ai-generativelanguage==0.4.0
241
+ google-api-core==2.11.1
242
+ google-api-core==2.18.0
243
+ google-api-python-client==2.125.0
244
+ google-apitools==0.5.31
245
+ google-auth-httplib2==0.2.0
246
+ google-auth-oauthlib==1.2.0
247
+ google-auth==2.26.1
248
+ google-cloud-aiplatform==0.6.0a1
249
+ google-cloud-artifact-registry==1.10.0
250
+ google-cloud-automl==1.0.1
251
+ google-cloud-bigquery==2.34.4
252
+ google-cloud-bigtable==1.7.3
253
+ google-cloud-core==2.4.1
254
+ google-cloud-datastore==2.19.0
255
+ google-cloud-dlp==3.14.0
256
+ google-cloud-jupyter-config==0.0.5
257
+ google-cloud-language==2.13.3
258
+ google-cloud-monitoring==2.18.0
259
+ google-cloud-pubsub==2.19.0
260
+ google-cloud-pubsublite==1.9.0
261
+ google-cloud-recommendations-ai==0.7.1
262
+ google-cloud-resource-manager==1.11.0
263
+ google-cloud-spanner==3.40.1
264
+ google-cloud-storage==1.44.0
265
+ google-cloud-translate==3.12.1
266
+ google-cloud-videointelligence==2.13.3
267
+ google-cloud-vision==2.8.0
268
+ google-crc32c==1.5.0
269
+ google-generativeai==0.4.1
270
+ google-pasta==0.2.0
271
+ google-resumable-media==2.7.0
272
+ googleapis-common-protos==1.62.0
273
+ gplearn==0.4.2
274
+ gpustat==1.0.0
275
+ gpxpy==1.6.2
276
+ graphviz==0.20.3
277
+ greenlet==3.0.3
278
+ grpc-google-iam-v1==0.12.7
279
+ grpcio-status==1.48.1
280
+ grpcio-status==1.48.2
281
+ grpcio==1.51.1
282
+ grpcio==1.60.0
283
+ gviz-api==1.10.0
284
+ gym-notices==0.0.8
285
+ gym==0.26.2
286
+ gymnasium==0.29.0
287
+ h11==0.14.0
288
+ h2o==3.46.0.1
289
+ h5netcdf==1.3.0
290
+ h5py==3.10.0
291
+ haversine==2.8.1
292
+ hdfs==2.7.3
293
+ hep-ml==0.7.2
294
+ hijri-converter==2.3.1
295
+ hmmlearn==0.3.2
296
+ holidays==0.24
297
+ holoviews==1.18.3
298
+ hpsklearn==0.1.0
299
+ html5lib==1.1
300
+ htmlmin==0.1.12
301
+ httpcore==1.0.5
302
+ httplib2==0.21.0
303
+ httptools==0.6.1
304
+ httpx==0.27.0
305
+ huggingface-hub==0.22.2
306
+ hunspell==0.5.5
307
+ hydra-slayer==0.5.0
308
+ hyperopt==0.2.7
309
+ hypertools==0.8.0
310
+ idna==3.6
311
+ igraph==0.11.4
312
+ imagecodecs==2024.1.1
313
+ imageio==2.33.1
314
+ imbalanced-learn==0.12.2
315
+ imgaug==0.4.0
316
+ importlib-metadata==6.11.0
317
+ importlib-metadata==7.0.1
318
+ importlib-resources==6.1.1
319
+ inequality==1.0.1
320
+ iniconfig==2.0.0
321
+ ipydatawidgets==4.3.5
322
+ ipykernel==6.28.0
323
+ ipyleaflet==0.18.2
324
+ ipympl==0.7.0
325
+ ipython-genutils==0.2.0
326
+ ipython-genutils==0.2.0
327
+ ipython-sql==0.5.0
328
+ ipython==8.20.0
329
+ ipyvolume==0.6.3
330
+ ipyvue==1.10.2
331
+ ipyvuetify==1.9.3
332
+ ipywebrtc==0.6.0
333
+ ipywidgets==7.7.1
334
+ isoduration==20.11.0
335
+ isort==5.13.2
336
+ isoweek==1.3.3
337
+ itsdangerous==2.1.2
338
+ jaraco.classes==3.3.0
339
+ jax-jumpy==1.0.0
340
+ jax==0.4.23
341
+ jaxlib==0.4.23.dev20240116
342
+ jedi==0.19.1
343
+ jeepney==0.8.0
344
+ jieba==0.42.1
345
+ jmespath==1.0.1
346
+ joblib==1.3.2
347
+ json5==0.9.14
348
+ jsonpatch==1.33
349
+ jsonpointer==2.4
350
+ jsonschema-specifications==2023.12.1
351
+ jsonschema==4.20.0
352
+ jupyter-console==6.6.3
353
+ jupyter-events==0.9.0
354
+ jupyter-http-over-ws==0.0.8
355
+ jupyter-lsp==1.5.1
356
+ jupyter-server-mathjax==0.2.6
357
+ jupyter-ydoc==0.2.5
358
+ jupyter_client==7.4.9
359
+ jupyter_client==8.6.0
360
+ jupyter_core==5.7.1
361
+ jupyter_server==2.13.0
362
+ jupyter_server_fileid==0.9.1
363
+ jupyter_server_proxy==4.1.0
364
+ jupyter_server_terminals==0.5.1
365
+ jupyter_server_ydoc==0.8.0
366
+ jupyterlab-lsp==5.1.0
367
+ jupyterlab-widgets==3.0.9
368
+ jupyterlab==4.1.5
369
+ jupyterlab_git==0.44.0
370
+ jupyterlab_pygments==0.3.0
371
+ jupyterlab_server==2.25.2
372
+ jupytext==1.16.0
373
+ kaggle-environments==1.14.3
374
+ kaggle==1.6.8
375
+ kagglehub==0.2.2
376
+ keras-cv==0.8.2
377
+ keras-nlp==0.8.2
378
+ keras-tuner==1.4.6
379
+ keras==3.1.1
380
+ kernels-mixer==0.0.7
381
+ keyring==24.3.0
382
+ keyrings.google-artifactregistry-auth==1.1.2
383
+ kfp-pipeline-spec==0.2.2
384
+ kfp-server-api==2.0.5
385
+ kfp==2.5.0
386
+ kiwisolver==1.4.5
387
+ kmapper==2.0.1
388
+ kmodes==0.12.2
389
+ korean-lunar-calendar==0.3.1
390
+ kornia==0.7.2
391
+ kornia_rs==0.1.3
392
+ kt-legacy==1.0.5
393
+ kubernetes==26.1.0
394
+ langcodes==3.3.0
395
+ langid==1.1.6
396
+ lazy_loader==0.3
397
+ learntools==0.3.4
398
+ leven==1.0.4
399
+ libclang==16.0.6
400
+ libmambapy==1.5.0
401
+ libpysal==4.9.2
402
+ librosa==0.10.1
403
+ lightgbm==4.2.0
404
+ lightning-utilities==0.11.2
405
+ lime==0.2.0.1
406
+ line-profiler==4.1.2
407
+ linkify-it-py==2.0.3
408
+ llvmlite==0.41.1
409
+ llvmlite==0.42.0
410
+ lml==0.1.0
411
+ locket==1.0.0
412
+ loguru==0.7.2
413
+ lxml==5.2.1
414
+ lz4==4.3.3
415
+ mamba==1.5.0
416
+ mapclassify==2.6.1
417
+ markdown-it-py==3.0.0
418
+ marshmallow==3.21.1
419
+ matplotlib-inline==0.1.6
420
+ matplotlib-venn==0.11.10
421
+ matplotlib==3.7.5
422
+ matplotlib==3.8.3
423
+ mccabe==0.7.0
424
+ mdit-py-plugins==0.4.0
425
+ mdurl==0.1.2
426
+ memory-profiler==0.61.0
427
+ menuinst==2.0.1
428
+ mercantile==1.2.1
429
+ mgwr==2.2.1
430
+ missingno==0.5.2
431
+ mistune==0.8.4
432
+ mizani==0.11.1
433
+ ml-dtypes==0.2.0
434
+ mlcrate==0.2.0
435
+ mlens==0.2.3
436
+ mlxtend==0.23.1
437
+ mne==1.6.1
438
+ mnist==0.2.2
439
+ momepy==0.7.0
440
+ more-itertools==10.2.0
441
+ mpld3==0.5.10
442
+ mpmath==1.3.0
443
+ msgpack==1.0.7
444
+ multidict==6.0.4
445
+ multimethod==1.10
446
+ multipledispatch==1.0.0
447
+ multiprocess==0.70.15
448
+ munkres==1.1.4
449
+ murmurhash==1.0.10
450
+ mypy-extensions==1.0.0
451
+ namex==0.0.7
452
+ nb-conda-kernels==2.3.1
453
+ nb_conda==2.2.1
454
+ nbclassic==1.0.0
455
+ nbclient==0.5.13
456
+ nbconvert==6.4.5
457
+ nbdime==3.2.0
458
+ nbformat==5.9.2
459
+ ndindex==1.8
460
+ nest-asyncio==1.5.8
461
+ networkx==3.2.1
462
+ nibabel==5.2.1
463
+ nilearn==0.10.3
464
+ ninja==1.11.1.1
465
+ nltk==3.2.4
466
+ nose==1.3.7
467
+ notebook==6.5.4
468
+ notebook==6.5.6
469
+ notebook_executor==0.2
470
+ notebook_shim==0.2.3
471
+ numba==0.58.1
472
+ numba==0.59.1
473
+ numexpr==2.10.0
474
+ numpy==1.26.4
475
+ nvidia-ml-py==11.495.46
476
+ nvtx==0.2.10
477
+ oauth2client==4.1.3
478
+ oauthlib==3.2.2
479
+ objsize==0.6.1
480
+ odfpy==1.4.1
481
+ olefile==0.47
482
+ onnx==1.16.0
483
+ opencensus-context==0.1.3
484
+ opencensus==0.11.4
485
+ opencv-contrib-python==4.9.0.80
486
+ opencv-python-headless==4.9.0.80
487
+ opencv-python==4.9.0.80
488
+ openpyxl==3.1.2
489
+ openslide-python==1.3.1
490
+ opentelemetry-api==1.22.0
491
+ opentelemetry-exporter-otlp-proto-common==1.22.0
492
+ opentelemetry-exporter-otlp-proto-grpc==1.22.0
493
+ opentelemetry-exporter-otlp-proto-http==1.22.0
494
+ opentelemetry-exporter-otlp==1.22.0
495
+ opentelemetry-proto==1.22.0
496
+ opentelemetry-sdk==1.22.0
497
+ opentelemetry-semantic-conventions==0.43b0
498
+ opt-einsum==3.3.0
499
+ optax==0.2.2
500
+ optree==0.11.0
501
+ optuna==3.6.1
502
+ orbax-checkpoint==0.5.7
503
+ ordered-set==4.1.0
504
+ orjson==3.9.10
505
+ ortools==9.4.1874
506
+ osmnx==1.9.2
507
+ overrides==7.4.0
508
+ packaging==21.3
509
+ pandas-datareader==0.10.0
510
+ pandas-profiling==3.6.6
511
+ pandas-summary==0.2.0
512
+ pandas==2.1.4
513
+ pandas==2.2.1
514
+ pandasql==0.7.3
515
+ pandocfilters==1.5.0
516
+ panel==1.3.8
517
+ papermill==2.5.0
518
+ param==2.1.0
519
+ parso==0.8.3
520
+ partd==1.4.1
521
+ path.py==12.5.0
522
+ path==16.10.0
523
+ pathos==0.3.2
524
+ pathy==0.10.3
525
+ patsy==0.5.6
526
+ pdf2image==1.17.0
527
+ peft==0.10.0
528
+ pettingzoo==1.24.0
529
+ pexpect==4.8.0
530
+ pexpect==4.9.0
531
+ phik==0.12.4
532
+ pickleshare==0.7.5
533
+ pillow==10.3.0
534
+ pip==23.3.2
535
+ pkgutil_resolve_name==1.3.10
536
+ platformdirs==4.2.0
537
+ plotly-express==0.4.1
538
+ plotly==5.18.0
539
+ plotnine==0.13.4
540
+ pluggy==1.4.0
541
+ pointpats==2.4.0
542
+ polars==0.20.18
543
+ polyglot==16.7.4
544
+ pooch==1.8.1
545
+ pox==0.3.4
546
+ ppca==0.0.4
547
+ ppft==1.7.6.8
548
+ preprocessing==0.1.13
549
+ preshed==3.0.9
550
+ prettytable==3.9.0
551
+ progressbar2==4.4.2
552
+ prometheus-client==0.19.0
553
+ promise==2.3
554
+ prompt-toolkit==3.0.42
555
+ prompt-toolkit==3.0.43
556
+ prophet==1.1.1
557
+ proto-plus==1.23.0
558
+ protobuf==3.20.3
559
+ protobuf==4.21.12
560
+ psutil==5.9.3
561
+ psutil==5.9.7
562
+ ptyprocess==0.7.0
563
+ pudb==2024.1
564
+ pure-eval==0.2.2
565
+ py-cpuinfo==9.0.0
566
+ py-spy==0.3.14
567
+ py4j==0.10.9.7
568
+ pyLDAvis==3.4.1
569
+ pyOpenSSL==23.3.0
570
+ pyaml==23.12.0
571
+ pyarrow-hotfix==0.6
572
+ pyarrow==15.0.2
573
+ pyasn1-modules==0.3.0
574
+ pyasn1==0.5.1
575
+ pybind11==2.12.0
576
+ pyclipper==1.3.0.post5
577
+ pycodestyle==2.11.1
578
+ pycosat==0.6.6
579
+ pycparser==2.21
580
+ pycryptodome==3.20.0
581
+ pyct==0.5.0
582
+ pycuda==2024.1
583
+ pydantic==2.5.3
584
+ pydantic==2.6.4
585
+ pydantic_core==2.14.6
586
+ pydantic_core==2.16.3
587
+ pydegensac==0.1.2
588
+ pydicom==2.4.4
589
+ pydocstyle==6.3.0
590
+ pydot==1.4.2
591
+ pydub==0.25.1
592
+ pyemd==1.0.0
593
+ pyerfa==2.0.1.1
594
+ pyexcel-io==0.6.6
595
+ pyexcel-ods==0.6.0
596
+ pyflakes==3.2.0
597
+ pygltflib==1.16.2
598
+ pykalman==0.9.7
599
+ pylibraft==23.8.0
600
+ pylint==3.1.0
601
+ pymc3==3.11.4
602
+ pymongo==3.13.0
603
+ pynndescent==0.5.12
604
+ pynvml==11.4.1
605
+ pynvrtc==9.2
606
+ pyparsing==3.1.1
607
+ pyparsing==3.1.2
608
+ pypdf==4.1.0
609
+ pyproj==3.6.1
610
+ pysal==24.1
611
+ pyshp==2.3.1
612
+ pytesseract==0.3.10
613
+ pytest==8.1.1
614
+ python-bidi==0.4.2
615
+ python-dateutil==2.9.0.post0
616
+ python-dotenv==1.0.0
617
+ python-json-logger==2.0.7
618
+ python-louvain==0.16
619
+ python-lsp-jsonrpc==1.1.2
620
+ python-lsp-server==1.11.0
621
+ python-slugify==8.0.4
622
+ python-utils==3.8.2
623
+ pythreejs==2.4.2
624
+ pytoolconfig==1.3.1
625
+ pytools==2024.1.1
626
+ pytorch-ignite==0.5.0.post2
627
+ pytorch-lightning==2.2.1
628
+ pytz==2023.3.post1
629
+ pytz==2024.1
630
+ pyu2f==0.1.5
631
+ pyviz_comms==3.0.2
632
+ pyzmq==24.0.1
633
+ pyzmq==25.1.2
634
+ qgrid==1.3.1
635
+ qtconsole==5.5.1
636
+ quantecon==0.7.2
637
+ qudida==0.0.4
638
+ raft-dask==23.8.0
639
+ rasterio==1.3.9
640
+ rasterstats==0.19.0
641
+ ray-cpp==2.9.0
642
+ ray==2.9.0
643
+ referencing==0.32.1
644
+ regex==2023.12.25
645
+ requests-oauthlib==1.3.1
646
+ requests-toolbelt==0.10.1
647
+ requests==2.31.0
648
+ retrying==1.3.3
649
+ retrying==1.3.4
650
+ rfc3339-validator==0.1.4
651
+ rfc3986-validator==0.1.1
652
+ rgf-python==3.12.0
653
+ rich-click==1.7.4
654
+ rich==13.7.0
655
+ rich==13.7.1
656
+ rmm==23.8.0
657
+ rope==1.13.0
658
+ rpds-py==0.16.2
659
+ rsa==4.9
660
+ ruamel-yaml-conda==0.15.100
661
+ ruamel.yaml.clib==0.2.7
662
+ ruamel.yaml==0.17.40
663
+ s2sphere==0.2.5
664
+ s3fs==2024.2.0
665
+ s3transfer==0.6.2
666
+ safetensors==0.4.2
667
+ scattertext==0.1.19
668
+ scikit-image==0.22.0
669
+ scikit-learn-intelex==2024.2.0
670
+ scikit-learn==1.2.2
671
+ scikit-multilearn==0.2.0
672
+ scikit-optimize==0.10.1
673
+ scikit-plot==0.3.7
674
+ scikit-surprise==1.1.3
675
+ scipy==1.11.4
676
+ scipy==1.12.0
677
+ seaborn==0.12.2
678
+ segment_anything==1.0
679
+ segregation==2.5
680
+ semver==3.0.2
681
+ sentencepiece==0.2.0
682
+ sentry-sdk==1.44.1
683
+ setproctitle==1.3.3
684
+ setuptools-git==1.2
685
+ setuptools-scm==8.0.4
686
+ setuptools==69.0.3
687
+ shap==0.44.1
688
+ shapely==2.0.3
689
+ shellingham==1.5.4
690
+ shtab==1.7.1
691
+ simpervisor==1.0.0
692
+ simplejson==3.19.2
693
+ six==1.16.0
694
+ sklearn-pandas==2.2.0
695
+ slicer==0.0.7
696
+ smart-open==6.4.0
697
+ smmap==5.0.1
698
+ sniffio==1.3.0
699
+ snowballstemmer==2.2.0
700
+ snuggs==1.4.7
701
+ sortedcontainers==2.4.0
702
+ soundfile==0.12.1
703
+ soupsieve==2.5
704
+ soxr==0.3.7
705
+ spacy-legacy==3.0.12
706
+ spacy-loggers==1.0.5
707
+ spacy==3.7.2
708
+ spaghetti==1.7.5.post1
709
+ spectral==0.23.1
710
+ spglm==1.1.0
711
+ sphinx-rtd-theme==0.2.4
712
+ spint==1.0.7
713
+ splot==1.1.5.post1
714
+ spopt==0.6.0
715
+ spreg==1.4.2
716
+ spvcm==0.3.0
717
+ sqlparse==0.4.4
718
+ squarify==0.4.3
719
+ srsly==2.4.8
720
+ stable-baselines3==2.1.0
721
+ stack-data==0.6.2
722
+ stack-data==0.6.3
723
+ stanio==0.5.0
724
+ starlette==0.32.0.post1
725
+ statsmodels==0.14.1
726
+ stemming==1.0.1
727
+ stop-words==2018.7.23
728
+ stopit==1.1.2
729
+ stumpy==1.12.0
730
+ sympy==1.12
731
+ tables==3.9.2
732
+ tabulate==0.9.0
733
+ tangled-up-in-unicode==0.2.0
734
+ tbb==2021.12.0
735
+ tblib==3.0.0
736
+ tenacity==8.2.3
737
+ tensorboard-data-server==0.7.2
738
+ tensorboard-plugin-profile==2.15.0
739
+ tensorboard==2.15.1
740
+ tensorboardX==2.6.2.2
741
+ tensorflow-cloud==0.1.16
742
+ tensorflow-datasets==4.9.4
743
+ tensorflow-decision-forests==1.8.1
744
+ tensorflow-estimator==2.15.0
745
+ tensorflow-hub==0.16.1
746
+ tensorflow-io-gcs-filesystem==0.35.0
747
+ tensorflow-io==0.35.0
748
+ tensorflow-metadata==0.14.0
749
+ tensorflow-probability==0.23.0
750
+ tensorflow-serving-api==2.14.1
751
+ tensorflow-text==2.15.0
752
+ tensorflow-transform==0.14.0
753
+ tensorflow==2.15.0
754
+ tensorstore==0.1.56
755
+ termcolor==2.4.0
756
+ terminado==0.18.0
757
+ testpath==0.6.0
758
+ text-unidecode==1.3
759
+ textblob==0.18.0.post0
760
+ texttable==1.7.0
761
+ tf_keras==2.15.1
762
+ tfp-nightly==0.24.0.dev0
763
+ thinc==8.2.2
764
+ threadpoolctl==3.2.0
765
+ tifffile==2023.12.9
766
+ timm==0.9.16
767
+ tinycss2==1.2.1
768
+ tobler==0.11.2
769
+ tokenizers==0.15.2
770
+ toml==0.10.2
771
+ tomli==2.0.1
772
+ tomlkit==0.12.4
773
+ toolz==0.12.1
774
+ torch==2.1.2
775
+ torchaudio==2.1.2
776
+ torchdata==0.7.1
777
+ torchinfo==1.8.0
778
+ torchmetrics==1.3.2
779
+ torchtext==0.16.2
780
+ torchvision==0.16.2
781
+ tornado==6.3.3
782
+ tqdm==4.66.1
783
+ traceml==1.0.8
784
+ traitlets==5.9.0
785
+ traittypes==0.2.1
786
+ transformers==4.39.3
787
+ treelite-runtime==3.2.0
788
+ treelite==3.2.0
789
+ trl==0.8.1
790
+ truststore==0.8.0
791
+ trx-python==0.2.9
792
+ tsfresh==0.20.2
793
+ typeguard==4.1.5
794
+ typer==0.9.0
795
+ typer==0.9.4
796
+ types-python-dateutil==2.8.19.20240106
797
+ typing-inspect==0.9.0
798
+ typing-utils==0.1.0
799
+ typing_extensions==4.9.0
800
+ tyro==0.8.3
801
+ tzdata==2023.4
802
+ uc-micro-py==1.0.3
803
+ ucx-py==0.33.0
804
+ ujson==5.9.0
805
+ umap-learn==0.5.5
806
+ unicodedata2==15.1.0
807
+ update-checker==0.18.0
808
+ uri-template==1.3.0
809
+ uritemplate==3.0.1
810
+ urllib3==1.26.18
811
+ urllib3==2.1.0
812
+ urwid==2.6.10
813
+ urwid_readline==0.14
814
+ uvicorn==0.25.0
815
+ uvloop==0.19.0
816
+ vaex-astro==0.9.3
817
+ vaex-core==4.17.1
818
+ vaex-hdf5==0.14.1
819
+ vaex-jupyter==0.8.2
820
+ vaex-ml==0.18.3
821
+ vaex-server==0.9.0
822
+ vaex-viz==0.5.4
823
+ vaex==4.17.0
824
+ vec_noise==1.1.4
825
+ vecstack==0.4.0
826
+ virtualenv==20.21.0
827
+ visions==0.7.5
828
+ vowpalwabbit==9.9.0
829
+ vtk==9.3.0
830
+ wandb==0.16.5
831
+ wasabi==1.1.2
832
+ watchfiles==0.21.0
833
+ wavio==0.0.8
834
+ wcwidth==0.2.13
835
+ weasel==0.3.4
836
+ webcolors==1.13
837
+ webencodings==0.5.1
838
+ websocket-client==1.7.0
839
+ websockets==12.0
840
+ wfdb==4.1.2
841
+ whatthepatch==1.0.5
842
+ wheel==0.42.0
843
+ widgetsnbextension==3.6.6
844
+ witwidget==1.8.1
845
+ woodwork==0.29.0
846
+ wordcloud==1.9.3
847
+ wordsegment==1.3.1
848
+ wrapt==1.14.1
849
+ xarray-einstats==0.7.0
850
+ xarray==2024.3.0
851
+ xgboost==2.0.3
852
+ xvfbwrapper==0.2.9
853
+ xxhash==3.4.1
854
+ xyzservices==2023.10.1
855
+ y-py==0.6.2
856
+ yapf==0.40.2
857
+ yarl==1.9.3
858
+ yarl==1.9.4
859
+ ydata-profiling==4.6.4
860
+ yellowbrick==1.5
861
+ ypy-websocket==0.8.4
862
+ zict==3.0.0
863
+ zipp==3.17.0
864
+ zstandard==0.22.0
wandb/run-20240409_214603-e7cki9vp/files/wandb-metadata.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-5.15.133+-x86_64-with-glibc2.31",
3
+ "python": "3.10.13",
4
+ "heartbeatAt": "2024-04-09T21:46:03.782958",
5
+ "startedAt": "2024-04-09T21:46:03.059424",
6
+ "docker": null,
7
+ "cuda": null,
8
+ "args": [],
9
+ "state": "running",
10
+ "program": "kaggle.ipynb",
11
+ "codePathLocal": null,
12
+ "root": "/kaggle/working",
13
+ "host": "6e44b39f6877",
14
+ "username": "root",
15
+ "executable": "/opt/conda/bin/python3.10",
16
+ "cpu_count": 2,
17
+ "cpu_count_logical": 4,
18
+ "cpu_freq": {
19
+ "current": 2000.152,
20
+ "min": 0.0,
21
+ "max": 0.0
22
+ },
23
+ "cpu_freq_per_core": [
24
+ {
25
+ "current": 2000.152,
26
+ "min": 0.0,
27
+ "max": 0.0
28
+ },
29
+ {
30
+ "current": 2000.152,
31
+ "min": 0.0,
32
+ "max": 0.0
33
+ },
34
+ {
35
+ "current": 2000.152,
36
+ "min": 0.0,
37
+ "max": 0.0
38
+ },
39
+ {
40
+ "current": 2000.152,
41
+ "min": 0.0,
42
+ "max": 0.0
43
+ }
44
+ ],
45
+ "disk": {
46
+ "/": {
47
+ "total": 8062.387607574463,
48
+ "used": 5568.826061248779
49
+ }
50
+ },
51
+ "gpu": "Tesla T4",
52
+ "gpu_count": 2,
53
+ "gpu_devices": [
54
+ {
55
+ "name": "Tesla T4",
56
+ "memory_total": 16106127360
57
+ },
58
+ {
59
+ "name": "Tesla T4",
60
+ "memory_total": 16106127360
61
+ }
62
+ ],
63
+ "memory": {
64
+ "total": 31.357559204101562
65
+ }
66
+ }
wandb/run-20240409_214603-e7cki9vp/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"train/loss": 2.8094, "train/grad_norm": 0.0, "train/learning_rate": 2.1428571428571427e-06, "train/epoch": 0.0, "train/global_step": 900, "_timestamp": 1712699676.9552884, "_runtime": 513.8878283500671, "_step": 8, "_wandb": {"runtime": 535}}
wandb/run-20240409_214603-e7cki9vp/logs/debug-internal.log ADDED
@@ -0,0 +1,480 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-09 21:46:03,067 INFO StreamThr :160 [internal.py:wandb_internal():86] W&B internal server running at pid: 160, started at: 2024-04-09 21:46:03.066337
2
+ 2024-04-09 21:46:03,068 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status
3
+ 2024-04-09 21:46:03,465 INFO WriterThread:160 [datastore.py:open_for_write():87] open: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/run-e7cki9vp.wandb
4
+ 2024-04-09 21:46:03,466 DEBUG SenderThread:160 [sender.py:send():379] send: header
5
+ 2024-04-09 21:46:03,470 DEBUG SenderThread:160 [sender.py:send():379] send: run
6
+ 2024-04-09 21:46:03,649 INFO SenderThread:160 [dir_watcher.py:__init__():211] watching files in: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files
7
+ 2024-04-09 21:46:03,649 INFO SenderThread:160 [sender.py:_start_run_threads():1124] run started: e7cki9vp with start time 1712699163.06746
8
+ 2024-04-09 21:46:03,658 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: check_version
9
+ 2024-04-09 21:46:03,658 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: check_version
10
+ 2024-04-09 21:46:03,756 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: run_start
11
+ 2024-04-09 21:46:03,768 DEBUG HandlerThread:160 [system_info.py:__init__():26] System info init
12
+ 2024-04-09 21:46:03,768 DEBUG HandlerThread:160 [system_info.py:__init__():41] System info init done
13
+ 2024-04-09 21:46:03,768 INFO HandlerThread:160 [system_monitor.py:start():194] Starting system monitor
14
+ 2024-04-09 21:46:03,768 INFO SystemMonitor:160 [system_monitor.py:_start():158] Starting system asset monitoring threads
15
+ 2024-04-09 21:46:03,768 INFO HandlerThread:160 [system_monitor.py:probe():214] Collecting system info
16
+ 2024-04-09 21:46:03,769 INFO SystemMonitor:160 [interfaces.py:start():190] Started cpu monitoring
17
+ 2024-04-09 21:46:03,769 INFO SystemMonitor:160 [interfaces.py:start():190] Started disk monitoring
18
+ 2024-04-09 21:46:03,770 INFO SystemMonitor:160 [interfaces.py:start():190] Started gpu monitoring
19
+ 2024-04-09 21:46:03,771 INFO SystemMonitor:160 [interfaces.py:start():190] Started memory monitoring
20
+ 2024-04-09 21:46:03,771 INFO SystemMonitor:160 [interfaces.py:start():190] Started network monitoring
21
+ 2024-04-09 21:46:03,782 DEBUG HandlerThread:160 [system_info.py:probe():150] Probing system
22
+ 2024-04-09 21:46:03,785 DEBUG HandlerThread:160 [gitlib.py:_init_repo():56] git repository is invalid
23
+ 2024-04-09 21:46:03,785 DEBUG HandlerThread:160 [system_info.py:probe():198] Probing system done
24
+ 2024-04-09 21:46:03,785 DEBUG HandlerThread:160 [system_monitor.py:probe():223] {'os': 'Linux-5.15.133+-x86_64-with-glibc2.31', 'python': '3.10.13', 'heartbeatAt': '2024-04-09T21:46:03.782958', 'startedAt': '2024-04-09T21:46:03.059424', 'docker': None, 'cuda': None, 'args': (), 'state': 'running', 'program': 'kaggle.ipynb', 'codePathLocal': None, 'root': '/kaggle/working', 'host': '6e44b39f6877', 'username': 'root', 'executable': '/opt/conda/bin/python3.10', 'cpu_count': 2, 'cpu_count_logical': 4, 'cpu_freq': {'current': 2000.152, 'min': 0.0, 'max': 0.0}, 'cpu_freq_per_core': [{'current': 2000.152, 'min': 0.0, 'max': 0.0}, {'current': 2000.152, 'min': 0.0, 'max': 0.0}, {'current': 2000.152, 'min': 0.0, 'max': 0.0}, {'current': 2000.152, 'min': 0.0, 'max': 0.0}], 'disk': {'/': {'total': 8062.387607574463, 'used': 5568.826061248779}}, 'gpu': 'Tesla T4', 'gpu_count': 2, 'gpu_devices': [{'name': 'Tesla T4', 'memory_total': 16106127360}, {'name': 'Tesla T4', 'memory_total': 16106127360}], 'memory': {'total': 31.357559204101562}}
25
+ 2024-04-09 21:46:03,785 INFO HandlerThread:160 [system_monitor.py:probe():224] Finished collecting system info
26
+ 2024-04-09 21:46:03,785 INFO HandlerThread:160 [system_monitor.py:probe():227] Publishing system info
27
+ 2024-04-09 21:46:03,785 DEBUG HandlerThread:160 [system_info.py:_save_conda():207] Saving list of conda packages installed into the current environment
28
+ 2024-04-09 21:46:04,652 INFO Thread-12 :160 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/conda-environment.yaml
29
+ 2024-04-09 21:46:18,800 ERROR HandlerThread:160 [system_info.py:_save_conda():221] Error saving conda packages: Command '['conda', 'env', 'export']' timed out after 15 seconds
30
+ Traceback (most recent call last):
31
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/internal/system/system_info.py", line 214, in _save_conda
32
+ subprocess.call(
33
+ File "/opt/conda/lib/python3.10/subprocess.py", line 347, in call
34
+ return p.wait(timeout=timeout)
35
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1209, in wait
36
+ return self._wait(timeout=timeout)
37
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1951, in _wait
38
+ raise TimeoutExpired(self.args, timeout)
39
+ subprocess.TimeoutExpired: Command '['conda', 'env', 'export']' timed out after 15 seconds
40
+ 2024-04-09 21:46:18,802 DEBUG HandlerThread:160 [system_info.py:_save_conda():222] Saving conda packages done
41
+ 2024-04-09 21:46:18,803 INFO HandlerThread:160 [system_monitor.py:probe():229] Finished publishing system info
42
+ 2024-04-09 21:46:18,810 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
43
+ 2024-04-09 21:46:18,810 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: keepalive
44
+ 2024-04-09 21:46:18,810 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
45
+ 2024-04-09 21:46:18,810 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: keepalive
46
+ 2024-04-09 21:46:18,810 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
47
+ 2024-04-09 21:46:18,810 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: keepalive
48
+ 2024-04-09 21:46:18,811 DEBUG SenderThread:160 [sender.py:send():379] send: files
49
+ 2024-04-09 21:46:18,811 INFO SenderThread:160 [sender.py:_save_file():1390] saving file wandb-metadata.json with policy now
50
+ 2024-04-09 21:46:19,179 INFO wandb-upload_0:160 [upload_job.py:push():131] Uploaded file /tmp/tmp92pjcj01wandb/kvu2hqxk-wandb-metadata.json
51
+ 2024-04-09 21:46:19,654 INFO Thread-12 :160 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/wandb-metadata.json
52
+ 2024-04-09 21:46:19,806 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: python_packages
53
+ 2024-04-09 21:46:19,806 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: python_packages
54
+ 2024-04-09 21:46:19,810 DEBUG SenderThread:160 [sender.py:send():379] send: telemetry
55
+ 2024-04-09 21:46:19,812 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
56
+ 2024-04-09 21:46:19,812 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
57
+ 2024-04-09 21:46:19,862 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
58
+ 2024-04-09 21:46:19,872 DEBUG SenderThread:160 [sender.py:send():379] send: config
59
+ 2024-04-09 21:46:19,874 DEBUG SenderThread:160 [sender.py:send():379] send: metric
60
+ 2024-04-09 21:46:19,874 DEBUG SenderThread:160 [sender.py:send():379] send: telemetry
61
+ 2024-04-09 21:46:19,874 DEBUG SenderThread:160 [sender.py:send():379] send: metric
62
+ 2024-04-09 21:46:19,874 WARNING SenderThread:160 [sender.py:send_metric():1341] Seen metric with glob (shouldn't happen)
63
+ 2024-04-09 21:46:19,874 DEBUG SenderThread:160 [sender.py:send():379] send: telemetry
64
+ 2024-04-09 21:46:20,655 INFO Thread-12 :160 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/requirements.txt
65
+ 2024-04-09 21:46:20,655 INFO Thread-12 :160 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/output.log
66
+ 2024-04-09 21:46:22,655 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/output.log
67
+ 2024-04-09 21:46:24,034 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
68
+ 2024-04-09 21:46:29,035 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
69
+ 2024-04-09 21:46:34,041 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
70
+ 2024-04-09 21:46:34,660 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/config.yaml
71
+ 2024-04-09 21:46:34,809 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
72
+ 2024-04-09 21:46:34,810 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
73
+ 2024-04-09 21:46:34,811 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
74
+ 2024-04-09 21:46:39,961 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
75
+ 2024-04-09 21:46:44,962 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
76
+ 2024-04-09 21:46:49,807 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
77
+ 2024-04-09 21:46:49,807 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
78
+ 2024-04-09 21:46:49,847 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
79
+ 2024-04-09 21:46:50,920 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
80
+ 2024-04-09 21:46:55,921 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
81
+ 2024-04-09 21:47:00,922 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
82
+ 2024-04-09 21:47:03,772 DEBUG SystemMonitor:160 [system_monitor.py:_start():172] Starting system metrics aggregation loop
83
+ 2024-04-09 21:47:03,774 DEBUG SenderThread:160 [sender.py:send():379] send: stats
84
+ 2024-04-09 21:47:04,807 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
85
+ 2024-04-09 21:47:04,807 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
86
+ 2024-04-09 21:47:04,848 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
87
+ 2024-04-09 21:47:05,925 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
88
+ 2024-04-09 21:47:10,926 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
89
+ 2024-04-09 21:47:14,340 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: partial_history
90
+ 2024-04-09 21:47:14,342 DEBUG SenderThread:160 [sender.py:send():379] send: metric
91
+ 2024-04-09 21:47:14,342 DEBUG SenderThread:160 [sender.py:send():379] send: metric
92
+ 2024-04-09 21:47:14,343 DEBUG SenderThread:160 [sender.py:send():379] send: metric
93
+ 2024-04-09 21:47:14,343 DEBUG SenderThread:160 [sender.py:send():379] send: metric
94
+ 2024-04-09 21:47:14,343 DEBUG SenderThread:160 [sender.py:send():379] send: history
95
+ 2024-04-09 21:47:14,343 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: summary_record
96
+ 2024-04-09 21:47:14,345 INFO SenderThread:160 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
97
+ 2024-04-09 21:47:14,675 INFO Thread-12 :160 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/wandb-summary.json
98
+ 2024-04-09 21:47:16,676 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/output.log
99
+ 2024-04-09 21:47:16,676 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
100
+ 2024-04-09 21:47:19,808 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
101
+ 2024-04-09 21:47:19,808 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
102
+ 2024-04-09 21:47:19,809 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
103
+ 2024-04-09 21:47:21,900 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
104
+ 2024-04-09 21:47:26,901 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
105
+ 2024-04-09 21:47:31,902 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
106
+ 2024-04-09 21:47:33,775 DEBUG SenderThread:160 [sender.py:send():379] send: stats
107
+ 2024-04-09 21:47:34,807 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
108
+ 2024-04-09 21:47:34,807 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
109
+ 2024-04-09 21:47:34,848 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
110
+ 2024-04-09 21:47:37,879 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
111
+ 2024-04-09 21:47:38,684 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/config.yaml
112
+ 2024-04-09 21:47:42,992 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
113
+ 2024-04-09 21:47:47,992 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
114
+ 2024-04-09 21:47:49,807 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
115
+ 2024-04-09 21:47:49,808 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
116
+ 2024-04-09 21:47:49,848 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
117
+ 2024-04-09 21:47:53,904 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
118
+ 2024-04-09 21:47:58,905 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
119
+ 2024-04-09 21:48:03,776 DEBUG SenderThread:160 [sender.py:send():379] send: stats
120
+ 2024-04-09 21:48:04,777 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
121
+ 2024-04-09 21:48:04,808 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
122
+ 2024-04-09 21:48:04,808 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
123
+ 2024-04-09 21:48:04,849 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
124
+ 2024-04-09 21:48:09,962 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
125
+ 2024-04-09 21:48:12,408 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: partial_history
126
+ 2024-04-09 21:48:12,409 DEBUG SenderThread:160 [sender.py:send():379] send: history
127
+ 2024-04-09 21:48:12,409 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: summary_record
128
+ 2024-04-09 21:48:12,409 INFO SenderThread:160 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
129
+ 2024-04-09 21:48:12,699 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/wandb-summary.json
130
+ 2024-04-09 21:48:14,700 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/output.log
131
+ 2024-04-09 21:48:15,653 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
132
+ 2024-04-09 21:48:19,808 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
133
+ 2024-04-09 21:48:19,808 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
134
+ 2024-04-09 21:48:19,809 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
135
+ 2024-04-09 21:48:20,941 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
136
+ 2024-04-09 21:48:25,943 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
137
+ 2024-04-09 21:48:30,943 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
138
+ 2024-04-09 21:48:33,777 DEBUG SenderThread:160 [sender.py:send():379] send: stats
139
+ 2024-04-09 21:48:34,808 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
140
+ 2024-04-09 21:48:34,808 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
141
+ 2024-04-09 21:48:34,849 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
142
+ 2024-04-09 21:48:35,949 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
143
+ 2024-04-09 21:48:40,950 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
144
+ 2024-04-09 21:48:45,951 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
145
+ 2024-04-09 21:48:49,808 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
146
+ 2024-04-09 21:48:49,808 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
147
+ 2024-04-09 21:48:49,849 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
148
+ 2024-04-09 21:48:50,961 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
149
+ 2024-04-09 21:48:55,962 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
150
+ 2024-04-09 21:49:00,963 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
151
+ 2024-04-09 21:49:03,779 DEBUG SenderThread:160 [sender.py:send():379] send: stats
152
+ 2024-04-09 21:49:04,458 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: partial_history
153
+ 2024-04-09 21:49:04,459 DEBUG SenderThread:160 [sender.py:send():379] send: history
154
+ 2024-04-09 21:49:04,459 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: summary_record
155
+ 2024-04-09 21:49:04,460 INFO SenderThread:160 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
156
+ 2024-04-09 21:49:04,719 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/wandb-summary.json
157
+ 2024-04-09 21:49:04,808 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
158
+ 2024-04-09 21:49:04,809 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
159
+ 2024-04-09 21:49:04,812 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
160
+ 2024-04-09 21:49:06,719 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/output.log
161
+ 2024-04-09 21:49:06,903 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
162
+ 2024-04-09 21:49:11,904 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
163
+ 2024-04-09 21:49:16,904 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
164
+ 2024-04-09 21:49:19,808 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
165
+ 2024-04-09 21:49:19,809 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
166
+ 2024-04-09 21:49:19,849 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
167
+ 2024-04-09 21:49:21,960 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
168
+ 2024-04-09 21:49:26,961 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
169
+ 2024-04-09 21:49:31,962 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
170
+ 2024-04-09 21:49:33,779 DEBUG SenderThread:160 [sender.py:send():379] send: stats
171
+ 2024-04-09 21:49:34,809 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
172
+ 2024-04-09 21:49:34,809 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
173
+ 2024-04-09 21:49:34,849 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
174
+ 2024-04-09 21:49:37,903 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
175
+ 2024-04-09 21:49:42,904 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
176
+ 2024-04-09 21:49:47,905 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
177
+ 2024-04-09 21:49:49,809 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
178
+ 2024-04-09 21:49:49,809 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
179
+ 2024-04-09 21:49:49,850 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
180
+ 2024-04-09 21:49:52,947 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
181
+ 2024-04-09 21:49:57,948 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
182
+ 2024-04-09 21:50:02,949 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
183
+ 2024-04-09 21:50:03,253 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: partial_history
184
+ 2024-04-09 21:50:03,254 DEBUG SenderThread:160 [sender.py:send():379] send: history
185
+ 2024-04-09 21:50:03,254 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: summary_record
186
+ 2024-04-09 21:50:03,256 INFO SenderThread:160 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
187
+ 2024-04-09 21:50:03,742 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/wandb-summary.json
188
+ 2024-04-09 21:50:03,781 DEBUG SenderThread:160 [sender.py:send():379] send: stats
189
+ 2024-04-09 21:50:04,809 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
190
+ 2024-04-09 21:50:04,810 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
191
+ 2024-04-09 21:50:04,813 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
192
+ 2024-04-09 21:50:06,743 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/output.log
193
+ 2024-04-09 21:50:08,908 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
194
+ 2024-04-09 21:50:13,909 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
195
+ 2024-04-09 21:50:18,910 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
196
+ 2024-04-09 21:50:19,809 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
197
+ 2024-04-09 21:50:19,810 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
198
+ 2024-04-09 21:50:19,850 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
199
+ 2024-04-09 21:50:23,921 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
200
+ 2024-04-09 21:50:28,922 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
201
+ 2024-04-09 21:50:33,782 DEBUG SenderThread:160 [sender.py:send():379] send: stats
202
+ 2024-04-09 21:50:34,783 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
203
+ 2024-04-09 21:50:34,809 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
204
+ 2024-04-09 21:50:34,809 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
205
+ 2024-04-09 21:50:34,850 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
206
+ 2024-04-09 21:50:39,971 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
207
+ 2024-04-09 21:50:44,972 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
208
+ 2024-04-09 21:50:49,809 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
209
+ 2024-04-09 21:50:49,810 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
210
+ 2024-04-09 21:50:49,850 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
211
+ 2024-04-09 21:50:50,889 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
212
+ 2024-04-09 21:50:55,890 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
213
+ 2024-04-09 21:51:00,589 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: partial_history
214
+ 2024-04-09 21:51:00,590 DEBUG SenderThread:160 [sender.py:send():379] send: history
215
+ 2024-04-09 21:51:00,590 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: summary_record
216
+ 2024-04-09 21:51:00,592 INFO SenderThread:160 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
217
+ 2024-04-09 21:51:00,764 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/wandb-summary.json
218
+ 2024-04-09 21:51:00,908 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
219
+ 2024-04-09 21:51:02,765 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/output.log
220
+ 2024-04-09 21:51:03,783 DEBUG SenderThread:160 [sender.py:send():379] send: stats
221
+ 2024-04-09 21:51:04,809 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
222
+ 2024-04-09 21:51:04,810 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
223
+ 2024-04-09 21:51:04,813 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
224
+ 2024-04-09 21:51:05,944 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
225
+ 2024-04-09 21:51:10,945 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
226
+ 2024-04-09 21:51:15,946 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
227
+ 2024-04-09 21:51:19,830 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
228
+ 2024-04-09 21:51:19,841 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
229
+ 2024-04-09 21:51:19,842 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
230
+ 2024-04-09 21:51:20,989 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
231
+ 2024-04-09 21:51:25,990 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
232
+ 2024-04-09 21:51:30,991 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
233
+ 2024-04-09 21:51:33,783 DEBUG SenderThread:160 [sender.py:send():379] send: stats
234
+ 2024-04-09 21:51:34,816 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
235
+ 2024-04-09 21:51:34,816 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
236
+ 2024-04-09 21:51:34,819 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
237
+ 2024-04-09 21:51:36,912 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
238
+ 2024-04-09 21:51:41,914 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
239
+ 2024-04-09 21:51:46,913 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
240
+ 2024-04-09 21:51:49,815 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
241
+ 2024-04-09 21:51:49,816 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
242
+ 2024-04-09 21:51:49,856 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
243
+ 2024-04-09 21:51:51,966 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
244
+ 2024-04-09 21:51:55,844 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: partial_history
245
+ 2024-04-09 21:51:55,845 DEBUG SenderThread:160 [sender.py:send():379] send: history
246
+ 2024-04-09 21:51:55,845 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: summary_record
247
+ 2024-04-09 21:51:55,845 INFO SenderThread:160 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
248
+ 2024-04-09 21:51:56,784 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/wandb-summary.json
249
+ 2024-04-09 21:51:57,094 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
250
+ 2024-04-09 21:51:58,785 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/output.log
251
+ 2024-04-09 21:52:02,095 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
252
+ 2024-04-09 21:52:03,784 DEBUG SenderThread:160 [sender.py:send():379] send: stats
253
+ 2024-04-09 21:52:04,816 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
254
+ 2024-04-09 21:52:04,816 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
255
+ 2024-04-09 21:52:04,820 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
256
+ 2024-04-09 21:52:07,900 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
257
+ 2024-04-09 21:52:12,900 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
258
+ 2024-04-09 21:52:17,901 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
259
+ 2024-04-09 21:52:19,816 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
260
+ 2024-04-09 21:52:19,816 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
261
+ 2024-04-09 21:52:19,857 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
262
+ 2024-04-09 21:52:23,893 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
263
+ 2024-04-09 21:52:28,893 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
264
+ 2024-04-09 21:52:33,785 DEBUG SenderThread:160 [sender.py:send():379] send: stats
265
+ 2024-04-09 21:52:34,786 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
266
+ 2024-04-09 21:52:34,818 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
267
+ 2024-04-09 21:52:34,818 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
268
+ 2024-04-09 21:52:34,818 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
269
+ 2024-04-09 21:52:39,947 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
270
+ 2024-04-09 21:52:44,947 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
271
+ 2024-04-09 21:52:49,816 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
272
+ 2024-04-09 21:52:49,817 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
273
+ 2024-04-09 21:52:49,817 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
274
+ 2024-04-09 21:52:50,486 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: partial_history
275
+ 2024-04-09 21:52:50,487 DEBUG SenderThread:160 [sender.py:send():379] send: history
276
+ 2024-04-09 21:52:50,487 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: summary_record
277
+ 2024-04-09 21:52:50,487 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
278
+ 2024-04-09 21:52:50,488 INFO SenderThread:160 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
279
+ 2024-04-09 21:52:50,805 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/wandb-summary.json
280
+ 2024-04-09 21:52:52,806 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/output.log
281
+ 2024-04-09 21:52:55,739 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
282
+ 2024-04-09 21:53:00,740 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
283
+ 2024-04-09 21:53:03,786 DEBUG SenderThread:160 [sender.py:send():379] send: stats
284
+ 2024-04-09 21:53:04,817 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
285
+ 2024-04-09 21:53:04,817 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
286
+ 2024-04-09 21:53:04,820 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
287
+ 2024-04-09 21:53:05,903 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
288
+ 2024-04-09 21:53:10,904 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
289
+ 2024-04-09 21:53:15,905 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
290
+ 2024-04-09 21:53:19,816 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
291
+ 2024-04-09 21:53:19,816 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
292
+ 2024-04-09 21:53:19,857 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
293
+ 2024-04-09 21:53:21,885 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
294
+ 2024-04-09 21:53:26,886 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
295
+ 2024-04-09 21:53:31,887 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
296
+ 2024-04-09 21:53:33,787 DEBUG SenderThread:160 [sender.py:send():379] send: stats
297
+ 2024-04-09 21:53:34,816 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
298
+ 2024-04-09 21:53:34,816 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
299
+ 2024-04-09 21:53:34,857 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
300
+ 2024-04-09 21:53:36,987 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
301
+ 2024-04-09 21:53:41,988 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
302
+ 2024-04-09 21:53:43,389 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: partial_history
303
+ 2024-04-09 21:53:43,390 DEBUG SenderThread:160 [sender.py:send():379] send: history
304
+ 2024-04-09 21:53:43,390 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: summary_record
305
+ 2024-04-09 21:53:43,391 INFO SenderThread:160 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
306
+ 2024-04-09 21:53:43,829 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/wandb-summary.json
307
+ 2024-04-09 21:53:44,829 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/output.log
308
+ 2024-04-09 21:53:47,638 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
309
+ 2024-04-09 21:53:49,816 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
310
+ 2024-04-09 21:53:49,817 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
311
+ 2024-04-09 21:53:49,820 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
312
+ 2024-04-09 21:53:52,887 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
313
+ 2024-04-09 21:53:57,888 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
314
+ 2024-04-09 21:54:02,889 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
315
+ 2024-04-09 21:54:03,788 DEBUG SenderThread:160 [sender.py:send():379] send: stats
316
+ 2024-04-09 21:54:04,844 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
317
+ 2024-04-09 21:54:04,845 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
318
+ 2024-04-09 21:54:04,846 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
319
+ 2024-04-09 21:54:07,926 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
320
+ 2024-04-09 21:54:12,927 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
321
+ 2024-04-09 21:54:17,928 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
322
+ 2024-04-09 21:54:19,824 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
323
+ 2024-04-09 21:54:19,824 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
324
+ 2024-04-09 21:54:19,828 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
325
+ 2024-04-09 21:54:23,918 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
326
+ 2024-04-09 21:54:28,919 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
327
+ 2024-04-09 21:54:33,789 DEBUG SenderThread:160 [sender.py:send():379] send: stats
328
+ 2024-04-09 21:54:34,790 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
329
+ 2024-04-09 21:54:34,824 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
330
+ 2024-04-09 21:54:34,825 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
331
+ 2024-04-09 21:54:34,865 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
332
+ 2024-04-09 21:54:36,955 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: partial_history
333
+ 2024-04-09 21:54:36,956 DEBUG SenderThread:160 [sender.py:send():379] send: history
334
+ 2024-04-09 21:54:36,957 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: summary_record
335
+ 2024-04-09 21:54:36,957 INFO SenderThread:160 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
336
+ 2024-04-09 21:54:37,849 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/wandb-summary.json
337
+ 2024-04-09 21:54:38,850 INFO Thread-12 :160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/output.log
338
+ 2024-04-09 21:54:40,220 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
339
+ 2024-04-09 21:54:45,221 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
340
+ 2024-04-09 21:54:49,822 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: stop_status
341
+ 2024-04-09 21:54:49,823 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: stop_status
342
+ 2024-04-09 21:54:49,826 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
343
+ 2024-04-09 21:54:50,962 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
344
+ 2024-04-09 21:54:55,963 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
345
+ 2024-04-09 21:54:58,795 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: pause
346
+ 2024-04-09 21:54:58,796 INFO HandlerThread:160 [handler.py:handle_request_pause():708] stopping system metrics thread
347
+ 2024-04-09 21:54:58,796 INFO HandlerThread:160 [system_monitor.py:finish():203] Stopping system monitor
348
+ 2024-04-09 21:54:58,796 DEBUG SystemMonitor:160 [system_monitor.py:_start():179] Finished system metrics aggregation loop
349
+ 2024-04-09 21:54:58,796 INFO HandlerThread:160 [interfaces.py:finish():202] Joined cpu monitor
350
+ 2024-04-09 21:54:58,797 DEBUG SystemMonitor:160 [system_monitor.py:_start():183] Publishing last batch of metrics
351
+ 2024-04-09 21:54:58,797 INFO HandlerThread:160 [interfaces.py:finish():202] Joined disk monitor
352
+ 2024-04-09 21:54:58,807 INFO HandlerThread:160 [interfaces.py:finish():202] Joined gpu monitor
353
+ 2024-04-09 21:54:58,808 INFO HandlerThread:160 [interfaces.py:finish():202] Joined memory monitor
354
+ 2024-04-09 21:54:58,808 INFO HandlerThread:160 [interfaces.py:finish():202] Joined network monitor
355
+ 2024-04-09 21:54:58,812 DEBUG SenderThread:160 [sender.py:send():379] send: stats
356
+ 2024-04-09 21:55:01,813 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
357
+ 2024-04-09 21:55:02,316 DEBUG SenderThread:160 [sender.py:send():379] send: exit
358
+ 2024-04-09 21:55:02,316 INFO SenderThread:160 [sender.py:send_exit():586] handling exit code: 0
359
+ 2024-04-09 21:55:02,316 INFO SenderThread:160 [sender.py:send_exit():588] handling runtime: 535
360
+ 2024-04-09 21:55:02,317 INFO SenderThread:160 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
361
+ 2024-04-09 21:55:02,317 INFO SenderThread:160 [sender.py:send_exit():594] send defer
362
+ 2024-04-09 21:55:02,317 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
363
+ 2024-04-09 21:55:02,317 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 0
364
+ 2024-04-09 21:55:02,318 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
365
+ 2024-04-09 21:55:02,318 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 0
366
+ 2024-04-09 21:55:02,318 INFO SenderThread:160 [sender.py:transition_state():614] send defer: 1
367
+ 2024-04-09 21:55:02,318 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
368
+ 2024-04-09 21:55:02,318 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 1
369
+ 2024-04-09 21:55:02,318 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
370
+ 2024-04-09 21:55:02,318 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 1
371
+ 2024-04-09 21:55:02,318 INFO SenderThread:160 [sender.py:transition_state():614] send defer: 2
372
+ 2024-04-09 21:55:02,318 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
373
+ 2024-04-09 21:55:02,318 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 2
374
+ 2024-04-09 21:55:02,318 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
375
+ 2024-04-09 21:55:02,318 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 2
376
+ 2024-04-09 21:55:02,319 INFO SenderThread:160 [sender.py:transition_state():614] send defer: 3
377
+ 2024-04-09 21:55:02,319 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
378
+ 2024-04-09 21:55:02,319 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 3
379
+ 2024-04-09 21:55:02,319 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
380
+ 2024-04-09 21:55:02,319 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 3
381
+ 2024-04-09 21:55:02,319 INFO SenderThread:160 [sender.py:transition_state():614] send defer: 4
382
+ 2024-04-09 21:55:02,319 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
383
+ 2024-04-09 21:55:02,319 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 4
384
+ 2024-04-09 21:55:02,319 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
385
+ 2024-04-09 21:55:02,319 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 4
386
+ 2024-04-09 21:55:02,319 INFO SenderThread:160 [sender.py:transition_state():614] send defer: 5
387
+ 2024-04-09 21:55:02,319 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
388
+ 2024-04-09 21:55:02,320 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 5
389
+ 2024-04-09 21:55:02,320 DEBUG SenderThread:160 [sender.py:send():379] send: summary
390
+ 2024-04-09 21:55:02,320 INFO SenderThread:160 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
391
+ 2024-04-09 21:55:02,321 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
392
+ 2024-04-09 21:55:02,321 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 5
393
+ 2024-04-09 21:55:02,321 INFO SenderThread:160 [sender.py:transition_state():614] send defer: 6
394
+ 2024-04-09 21:55:02,321 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
395
+ 2024-04-09 21:55:02,321 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 6
396
+ 2024-04-09 21:55:02,321 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
397
+ 2024-04-09 21:55:02,321 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 6
398
+ 2024-04-09 21:55:02,321 INFO SenderThread:160 [sender.py:transition_state():614] send defer: 7
399
+ 2024-04-09 21:55:02,321 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: status_report
400
+ 2024-04-09 21:55:02,321 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
401
+ 2024-04-09 21:55:02,321 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 7
402
+ 2024-04-09 21:55:02,322 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
403
+ 2024-04-09 21:55:02,322 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 7
404
+ 2024-04-09 21:55:02,501 INFO SenderThread:160 [sender.py:transition_state():614] send defer: 8
405
+ 2024-04-09 21:55:02,501 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
406
+ 2024-04-09 21:55:02,501 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 8
407
+ 2024-04-09 21:55:02,502 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
408
+ 2024-04-09 21:55:02,502 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 8
409
+ 2024-04-09 21:55:02,502 INFO SenderThread:160 [job_builder.py:build():318] Attempting to build job artifact
410
+ 2024-04-09 21:55:02,504 INFO SenderThread:160 [job_builder.py:_get_source_type():466] no source found
411
+ 2024-04-09 21:55:02,504 INFO SenderThread:160 [sender.py:transition_state():614] send defer: 9
412
+ 2024-04-09 21:55:02,504 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
413
+ 2024-04-09 21:55:02,504 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 9
414
+ 2024-04-09 21:55:02,504 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
415
+ 2024-04-09 21:55:02,504 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 9
416
+ 2024-04-09 21:55:02,504 INFO SenderThread:160 [dir_watcher.py:finish():358] shutting down directory watcher
417
+ 2024-04-09 21:55:02,855 INFO SenderThread:160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/wandb-summary.json
418
+ 2024-04-09 21:55:02,855 INFO SenderThread:160 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/output.log
419
+ 2024-04-09 21:55:02,855 INFO SenderThread:160 [dir_watcher.py:finish():388] scan: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files
420
+ 2024-04-09 21:55:02,856 INFO SenderThread:160 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/conda-environment.yaml conda-environment.yaml
421
+ 2024-04-09 21:55:02,856 INFO SenderThread:160 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/requirements.txt requirements.txt
422
+ 2024-04-09 21:55:02,856 INFO SenderThread:160 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/wandb-metadata.json wandb-metadata.json
423
+ 2024-04-09 21:55:02,856 INFO SenderThread:160 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/wandb-summary.json wandb-summary.json
424
+ 2024-04-09 21:55:02,856 INFO SenderThread:160 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/config.yaml config.yaml
425
+ 2024-04-09 21:55:02,862 INFO SenderThread:160 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/output.log output.log
426
+ 2024-04-09 21:55:02,867 INFO SenderThread:160 [sender.py:transition_state():614] send defer: 10
427
+ 2024-04-09 21:55:02,870 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
428
+ 2024-04-09 21:55:02,870 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 10
429
+ 2024-04-09 21:55:02,871 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
430
+ 2024-04-09 21:55:02,871 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 10
431
+ 2024-04-09 21:55:02,871 INFO SenderThread:160 [file_pusher.py:finish():172] shutting down file pusher
432
+ 2024-04-09 21:55:03,144 INFO wandb-upload_0:160 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/requirements.txt
433
+ 2024-04-09 21:55:03,156 INFO wandb-upload_3:160 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/output.log
434
+ 2024-04-09 21:55:03,171 INFO wandb-upload_1:160 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/wandb-summary.json
435
+ 2024-04-09 21:55:03,185 INFO wandb-upload_2:160 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240409_214603-e7cki9vp/files/config.yaml
436
+ 2024-04-09 21:55:03,316 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: poll_exit
437
+ 2024-04-09 21:55:03,317 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: poll_exit
438
+ 2024-04-09 21:55:03,385 INFO Thread-11 (_thread_body):160 [sender.py:transition_state():614] send defer: 11
439
+ 2024-04-09 21:55:03,386 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
440
+ 2024-04-09 21:55:03,386 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 11
441
+ 2024-04-09 21:55:03,386 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
442
+ 2024-04-09 21:55:03,386 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 11
443
+ 2024-04-09 21:55:03,387 INFO SenderThread:160 [file_pusher.py:join():178] waiting for file pusher
444
+ 2024-04-09 21:55:03,387 INFO SenderThread:160 [sender.py:transition_state():614] send defer: 12
445
+ 2024-04-09 21:55:03,387 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
446
+ 2024-04-09 21:55:03,387 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 12
447
+ 2024-04-09 21:55:03,387 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
448
+ 2024-04-09 21:55:03,387 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 12
449
+ 2024-04-09 21:55:03,387 INFO SenderThread:160 [file_stream.py:finish():614] file stream finish called
450
+ 2024-04-09 21:55:03,572 INFO SenderThread:160 [file_stream.py:finish():618] file stream finish is done
451
+ 2024-04-09 21:55:03,572 INFO SenderThread:160 [sender.py:transition_state():614] send defer: 13
452
+ 2024-04-09 21:55:03,572 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
453
+ 2024-04-09 21:55:03,572 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 13
454
+ 2024-04-09 21:55:03,573 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
455
+ 2024-04-09 21:55:03,573 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 13
456
+ 2024-04-09 21:55:03,573 INFO SenderThread:160 [sender.py:transition_state():614] send defer: 14
457
+ 2024-04-09 21:55:03,573 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: defer
458
+ 2024-04-09 21:55:03,573 INFO HandlerThread:160 [handler.py:handle_request_defer():172] handle defer: 14
459
+ 2024-04-09 21:55:03,573 DEBUG SenderThread:160 [sender.py:send():379] send: final
460
+ 2024-04-09 21:55:03,574 DEBUG SenderThread:160 [sender.py:send():379] send: footer
461
+ 2024-04-09 21:55:03,574 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: defer
462
+ 2024-04-09 21:55:03,574 INFO SenderThread:160 [sender.py:send_request_defer():610] handle sender defer: 14
463
+ 2024-04-09 21:55:03,575 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: poll_exit
464
+ 2024-04-09 21:55:03,575 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: poll_exit
465
+ 2024-04-09 21:55:03,576 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: poll_exit
466
+ 2024-04-09 21:55:03,576 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: poll_exit
467
+ 2024-04-09 21:55:03,577 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: server_info
468
+ 2024-04-09 21:55:03,577 DEBUG SenderThread:160 [sender.py:send_request():406] send_request: server_info
469
+ 2024-04-09 21:55:03,580 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: get_summary
470
+ 2024-04-09 21:55:03,580 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: sampled_history
471
+ 2024-04-09 21:55:03,581 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: internal_messages
472
+ 2024-04-09 21:55:03,648 INFO MainThread:160 [wandb_run.py:_footer_history_summary_info():3920] rendering history
473
+ 2024-04-09 21:55:03,648 INFO MainThread:160 [wandb_run.py:_footer_history_summary_info():3952] rendering summary
474
+ 2024-04-09 21:55:03,648 INFO MainThread:160 [wandb_run.py:_footer_sync_info():3879] logging synced files
475
+ 2024-04-09 21:55:03,649 DEBUG HandlerThread:160 [handler.py:handle_request():146] handle_request: shutdown
476
+ 2024-04-09 21:55:03,649 INFO HandlerThread:160 [handler.py:finish():866] shutting down handler
477
+ 2024-04-09 21:55:04,577 INFO WriterThread:160 [datastore.py:close():296] close: /kaggle/working/wandb/run-20240409_214603-e7cki9vp/run-e7cki9vp.wandb
478
+ 2024-04-09 21:55:04,648 INFO SenderThread:160 [sender.py:finish():1546] shutting down sender
479
+ 2024-04-09 21:55:04,648 INFO SenderThread:160 [file_pusher.py:finish():172] shutting down file pusher
480
+ 2024-04-09 21:55:04,648 INFO SenderThread:160 [file_pusher.py:join():178] waiting for file pusher
wandb/run-20240409_214603-e7cki9vp/logs/debug.log ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-09 21:46:03,061 INFO MainThread:114 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
+ 2024-04-09 21:46:03,061 INFO MainThread:114 [wandb_setup.py:_flush():76] Configure stats pid to 114
3
+ 2024-04-09 21:46:03,061 INFO MainThread:114 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
+ 2024-04-09 21:46:03,061 INFO MainThread:114 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
+ 2024-04-09 21:46:03,061 INFO MainThread:114 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-04-09 21:46:03,061 INFO MainThread:114 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-04-09 21:46:03,061 INFO MainThread:114 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
+ 2024-04-09 21:46:03,061 INFO MainThread:114 [wandb_setup.py:_flush():76] Applying login settings: {}
9
+ 2024-04-09 21:46:03,061 INFO MainThread:114 [wandb_setup.py:_flush():76] Applying login settings: {'api_key': '***REDACTED***'}
10
+ 2024-04-09 21:46:03,061 INFO MainThread:114 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240409_214603-e7cki9vp/logs/debug.log
11
+ 2024-04-09 21:46:03,061 INFO MainThread:114 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240409_214603-e7cki9vp/logs/debug-internal.log
12
+ 2024-04-09 21:46:03,062 INFO MainThread:114 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7a38881130d0>
13
+ 2024-04-09 21:46:03,062 INFO MainThread:114 [wandb_init.py:init():567] calling init triggers
14
+ 2024-04-09 21:46:03,062 INFO MainThread:114 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
15
+ config: {}
16
+ 2024-04-09 21:46:03,062 INFO MainThread:114 [wandb_init.py:init():617] starting backend
17
+ 2024-04-09 21:46:03,062 INFO MainThread:114 [wandb_init.py:init():621] setting up manager
18
+ 2024-04-09 21:46:03,064 INFO MainThread:114 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
19
+ 2024-04-09 21:46:03,067 INFO MainThread:114 [wandb_init.py:init():629] backend started and connected
20
+ 2024-04-09 21:46:03,079 INFO MainThread:114 [wandb_run.py:_label_probe_notebook():1299] probe notebook
21
+ 2024-04-09 21:46:03,465 INFO MainThread:114 [wandb_init.py:init():721] updated telemetry
22
+ 2024-04-09 21:46:03,469 INFO MainThread:114 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
23
+ 2024-04-09 21:46:03,657 INFO MainThread:114 [wandb_run.py:_on_init():2344] communicating current version
24
+ 2024-04-09 21:46:03,750 INFO MainThread:114 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
25
+
26
+ 2024-04-09 21:46:03,751 INFO MainThread:114 [wandb_init.py:init():805] starting run threads in backend
27
+ 2024-04-09 21:46:19,807 INFO MainThread:114 [wandb_run.py:_console_start():2323] atexit reg
28
+ 2024-04-09 21:46:19,807 INFO MainThread:114 [wandb_run.py:_redirect():2178] redirect: wrap_raw
29
+ 2024-04-09 21:46:19,809 INFO MainThread:114 [wandb_run.py:_redirect():2243] Wrapping output streams.
30
+ 2024-04-09 21:46:19,809 INFO MainThread:114 [wandb_run.py:_redirect():2268] Redirects installed.
31
+ 2024-04-09 21:46:19,811 INFO MainThread:114 [wandb_init.py:init():848] run started, returning control to user process
32
+ 2024-04-09 21:46:19,818 INFO MainThread:114 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 32000, 'max_position_embeddings': 2048, 'hidden_size': 2048, 'intermediate_size': 5632, 'num_hidden_layers': 22, 'num_attention_heads': 32, 'num_key_value_heads': 4, 'hidden_act': 'silu', 'initializer_range': 0.02, 'rms_norm_eps': 1e-05, 'pretraining_tp': 1, 'use_cache': False, 'rope_theta': 10000.0, 'rope_scaling': None, 'attention_bias': False, 'attention_dropout': 0.0, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['LlamaForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': None, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'transformers_version': '4.39.3', 'model_type': 'llama', 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': False, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 2, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 5e-06, 'weight_decay': 0.001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.1, 'num_train_epochs': 5, 'max_steps': 70000, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr09_21-37-48_6e44b39f6877', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 100, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 8, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_32bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': False, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': True, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
33
+ 2024-04-09 21:54:58,795 INFO MainThread:114 [jupyter.py:save_ipynb():373] not saving jupyter notebook
34
+ 2024-04-09 21:54:58,795 INFO MainThread:114 [wandb_init.py:_pause_backend():438] pausing backend
wandb/run-20240409_214603-e7cki9vp/run-e7cki9vp.wandb ADDED
Binary file (33.3 kB). View file
 
wandb/run-20240409_215743-953wtybl/files/conda-environment.yaml ADDED
File without changes
wandb/run-20240409_215743-953wtybl/files/config.yaml ADDED
@@ -0,0 +1,686 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _wandb:
4
+ desc: null
5
+ value:
6
+ python_version: 3.10.13
7
+ cli_version: 0.16.5
8
+ framework: huggingface
9
+ huggingface_version: 4.39.3
10
+ is_jupyter_run: true
11
+ is_kaggle_kernel: true
12
+ start_time: 1712699863.0
13
+ t:
14
+ 1:
15
+ - 1
16
+ - 2
17
+ - 3
18
+ - 5
19
+ - 11
20
+ - 12
21
+ - 49
22
+ - 51
23
+ - 53
24
+ - 55
25
+ - 71
26
+ - 84
27
+ - 98
28
+ - 105
29
+ 2:
30
+ - 1
31
+ - 2
32
+ - 3
33
+ - 5
34
+ - 11
35
+ - 12
36
+ - 49
37
+ - 51
38
+ - 53
39
+ - 55
40
+ - 71
41
+ - 84
42
+ - 98
43
+ - 105
44
+ 3:
45
+ - 7
46
+ - 23
47
+ 4: 3.10.13
48
+ 5: 0.16.5
49
+ 6: 4.39.3
50
+ 8:
51
+ - 1
52
+ - 2
53
+ - 5
54
+ 9:
55
+ 1: transformers_trainer
56
+ 13: linux-x86_64
57
+ m:
58
+ - 1: train/global_step
59
+ 6:
60
+ - 3
61
+ - 1: train/loss
62
+ 5: 1
63
+ 6:
64
+ - 1
65
+ - 1: train/grad_norm
66
+ 5: 1
67
+ 6:
68
+ - 1
69
+ - 1: train/learning_rate
70
+ 5: 1
71
+ 6:
72
+ - 1
73
+ - 1: train/epoch
74
+ 5: 1
75
+ 6:
76
+ - 1
77
+ vocab_size:
78
+ desc: null
79
+ value: 32000
80
+ max_position_embeddings:
81
+ desc: null
82
+ value: 2048
83
+ hidden_size:
84
+ desc: null
85
+ value: 2048
86
+ intermediate_size:
87
+ desc: null
88
+ value: 5632
89
+ num_hidden_layers:
90
+ desc: null
91
+ value: 22
92
+ num_attention_heads:
93
+ desc: null
94
+ value: 32
95
+ num_key_value_heads:
96
+ desc: null
97
+ value: 4
98
+ hidden_act:
99
+ desc: null
100
+ value: silu
101
+ initializer_range:
102
+ desc: null
103
+ value: 0.02
104
+ rms_norm_eps:
105
+ desc: null
106
+ value: 1.0e-05
107
+ pretraining_tp:
108
+ desc: null
109
+ value: 1
110
+ use_cache:
111
+ desc: null
112
+ value: false
113
+ rope_theta:
114
+ desc: null
115
+ value: 10000.0
116
+ rope_scaling:
117
+ desc: null
118
+ value: null
119
+ attention_bias:
120
+ desc: null
121
+ value: false
122
+ attention_dropout:
123
+ desc: null
124
+ value: 0.0
125
+ return_dict:
126
+ desc: null
127
+ value: true
128
+ output_hidden_states:
129
+ desc: null
130
+ value: false
131
+ output_attentions:
132
+ desc: null
133
+ value: false
134
+ torchscript:
135
+ desc: null
136
+ value: false
137
+ torch_dtype:
138
+ desc: null
139
+ value: float32
140
+ use_bfloat16:
141
+ desc: null
142
+ value: false
143
+ tf_legacy_loss:
144
+ desc: null
145
+ value: false
146
+ pruned_heads:
147
+ desc: null
148
+ value: {}
149
+ tie_word_embeddings:
150
+ desc: null
151
+ value: false
152
+ chunk_size_feed_forward:
153
+ desc: null
154
+ value: 0
155
+ is_encoder_decoder:
156
+ desc: null
157
+ value: false
158
+ is_decoder:
159
+ desc: null
160
+ value: false
161
+ cross_attention_hidden_size:
162
+ desc: null
163
+ value: null
164
+ add_cross_attention:
165
+ desc: null
166
+ value: false
167
+ tie_encoder_decoder:
168
+ desc: null
169
+ value: false
170
+ max_length:
171
+ desc: null
172
+ value: 20
173
+ min_length:
174
+ desc: null
175
+ value: 0
176
+ do_sample:
177
+ desc: null
178
+ value: false
179
+ early_stopping:
180
+ desc: null
181
+ value: false
182
+ num_beams:
183
+ desc: null
184
+ value: 1
185
+ num_beam_groups:
186
+ desc: null
187
+ value: 1
188
+ diversity_penalty:
189
+ desc: null
190
+ value: 0.0
191
+ temperature:
192
+ desc: null
193
+ value: 1.0
194
+ top_k:
195
+ desc: null
196
+ value: 50
197
+ top_p:
198
+ desc: null
199
+ value: 1.0
200
+ typical_p:
201
+ desc: null
202
+ value: 1.0
203
+ repetition_penalty:
204
+ desc: null
205
+ value: 1.0
206
+ length_penalty:
207
+ desc: null
208
+ value: 1.0
209
+ no_repeat_ngram_size:
210
+ desc: null
211
+ value: 0
212
+ encoder_no_repeat_ngram_size:
213
+ desc: null
214
+ value: 0
215
+ bad_words_ids:
216
+ desc: null
217
+ value: null
218
+ num_return_sequences:
219
+ desc: null
220
+ value: 1
221
+ output_scores:
222
+ desc: null
223
+ value: false
224
+ return_dict_in_generate:
225
+ desc: null
226
+ value: false
227
+ forced_bos_token_id:
228
+ desc: null
229
+ value: null
230
+ forced_eos_token_id:
231
+ desc: null
232
+ value: null
233
+ remove_invalid_values:
234
+ desc: null
235
+ value: false
236
+ exponential_decay_length_penalty:
237
+ desc: null
238
+ value: null
239
+ suppress_tokens:
240
+ desc: null
241
+ value: null
242
+ begin_suppress_tokens:
243
+ desc: null
244
+ value: null
245
+ architectures:
246
+ desc: null
247
+ value:
248
+ - LlamaForCausalLM
249
+ finetuning_task:
250
+ desc: null
251
+ value: null
252
+ id2label:
253
+ desc: null
254
+ value:
255
+ '0': LABEL_0
256
+ '1': LABEL_1
257
+ label2id:
258
+ desc: null
259
+ value:
260
+ LABEL_0: 0
261
+ LABEL_1: 1
262
+ tokenizer_class:
263
+ desc: null
264
+ value: null
265
+ prefix:
266
+ desc: null
267
+ value: null
268
+ bos_token_id:
269
+ desc: null
270
+ value: 1
271
+ pad_token_id:
272
+ desc: null
273
+ value: null
274
+ eos_token_id:
275
+ desc: null
276
+ value: 2
277
+ sep_token_id:
278
+ desc: null
279
+ value: null
280
+ decoder_start_token_id:
281
+ desc: null
282
+ value: null
283
+ task_specific_params:
284
+ desc: null
285
+ value: null
286
+ problem_type:
287
+ desc: null
288
+ value: null
289
+ _name_or_path:
290
+ desc: null
291
+ value: TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T
292
+ transformers_version:
293
+ desc: null
294
+ value: 4.39.3
295
+ model_type:
296
+ desc: null
297
+ value: llama
298
+ quantization_config:
299
+ desc: null
300
+ value:
301
+ quant_method: QuantizationMethod.BITS_AND_BYTES
302
+ _load_in_8bit: false
303
+ _load_in_4bit: true
304
+ llm_int8_threshold: 6.0
305
+ llm_int8_skip_modules: null
306
+ llm_int8_enable_fp32_cpu_offload: false
307
+ llm_int8_has_fp16_weight: false
308
+ bnb_4bit_quant_type: nf4
309
+ bnb_4bit_use_double_quant: false
310
+ bnb_4bit_compute_dtype: float16
311
+ bnb_4bit_quant_storage: uint8
312
+ load_in_4bit: true
313
+ load_in_8bit: false
314
+ output_dir:
315
+ desc: null
316
+ value: /kaggle/working/
317
+ overwrite_output_dir:
318
+ desc: null
319
+ value: false
320
+ do_train:
321
+ desc: null
322
+ value: false
323
+ do_eval:
324
+ desc: null
325
+ value: false
326
+ do_predict:
327
+ desc: null
328
+ value: false
329
+ evaluation_strategy:
330
+ desc: null
331
+ value: 'no'
332
+ prediction_loss_only:
333
+ desc: null
334
+ value: false
335
+ per_device_train_batch_size:
336
+ desc: null
337
+ value: 2
338
+ per_device_eval_batch_size:
339
+ desc: null
340
+ value: 8
341
+ per_gpu_train_batch_size:
342
+ desc: null
343
+ value: null
344
+ per_gpu_eval_batch_size:
345
+ desc: null
346
+ value: null
347
+ gradient_accumulation_steps:
348
+ desc: null
349
+ value: 1
350
+ eval_accumulation_steps:
351
+ desc: null
352
+ value: null
353
+ eval_delay:
354
+ desc: null
355
+ value: 0
356
+ learning_rate:
357
+ desc: null
358
+ value: 1.0e-06
359
+ weight_decay:
360
+ desc: null
361
+ value: 0.001
362
+ adam_beta1:
363
+ desc: null
364
+ value: 0.9
365
+ adam_beta2:
366
+ desc: null
367
+ value: 0.999
368
+ adam_epsilon:
369
+ desc: null
370
+ value: 1.0e-08
371
+ max_grad_norm:
372
+ desc: null
373
+ value: 0.1
374
+ num_train_epochs:
375
+ desc: null
376
+ value: 5
377
+ max_steps:
378
+ desc: null
379
+ value: 70000
380
+ lr_scheduler_type:
381
+ desc: null
382
+ value: cosine
383
+ lr_scheduler_kwargs:
384
+ desc: null
385
+ value: {}
386
+ warmup_ratio:
387
+ desc: null
388
+ value: 0.03
389
+ warmup_steps:
390
+ desc: null
391
+ value: 0
392
+ log_level:
393
+ desc: null
394
+ value: passive
395
+ log_level_replica:
396
+ desc: null
397
+ value: warning
398
+ log_on_each_node:
399
+ desc: null
400
+ value: true
401
+ logging_dir:
402
+ desc: null
403
+ value: /kaggle/working/runs/Apr09_21-55-28_6e44b39f6877
404
+ logging_strategy:
405
+ desc: null
406
+ value: steps
407
+ logging_first_step:
408
+ desc: null
409
+ value: false
410
+ logging_steps:
411
+ desc: null
412
+ value: 100
413
+ logging_nan_inf_filter:
414
+ desc: null
415
+ value: true
416
+ save_strategy:
417
+ desc: null
418
+ value: steps
419
+ save_steps:
420
+ desc: null
421
+ value: 100
422
+ save_total_limit:
423
+ desc: null
424
+ value: 1
425
+ save_safetensors:
426
+ desc: null
427
+ value: true
428
+ save_on_each_node:
429
+ desc: null
430
+ value: false
431
+ save_only_model:
432
+ desc: null
433
+ value: false
434
+ no_cuda:
435
+ desc: null
436
+ value: false
437
+ use_cpu:
438
+ desc: null
439
+ value: false
440
+ use_mps_device:
441
+ desc: null
442
+ value: false
443
+ seed:
444
+ desc: null
445
+ value: 42
446
+ data_seed:
447
+ desc: null
448
+ value: null
449
+ jit_mode_eval:
450
+ desc: null
451
+ value: false
452
+ use_ipex:
453
+ desc: null
454
+ value: false
455
+ bf16:
456
+ desc: null
457
+ value: false
458
+ fp16:
459
+ desc: null
460
+ value: false
461
+ fp16_opt_level:
462
+ desc: null
463
+ value: O1
464
+ half_precision_backend:
465
+ desc: null
466
+ value: auto
467
+ bf16_full_eval:
468
+ desc: null
469
+ value: false
470
+ fp16_full_eval:
471
+ desc: null
472
+ value: false
473
+ tf32:
474
+ desc: null
475
+ value: null
476
+ local_rank:
477
+ desc: null
478
+ value: 0
479
+ ddp_backend:
480
+ desc: null
481
+ value: null
482
+ tpu_num_cores:
483
+ desc: null
484
+ value: null
485
+ tpu_metrics_debug:
486
+ desc: null
487
+ value: false
488
+ debug:
489
+ desc: null
490
+ value: []
491
+ dataloader_drop_last:
492
+ desc: null
493
+ value: false
494
+ eval_steps:
495
+ desc: null
496
+ value: null
497
+ dataloader_num_workers:
498
+ desc: null
499
+ value: 8
500
+ dataloader_prefetch_factor:
501
+ desc: null
502
+ value: null
503
+ past_index:
504
+ desc: null
505
+ value: -1
506
+ run_name:
507
+ desc: null
508
+ value: /kaggle/working/
509
+ disable_tqdm:
510
+ desc: null
511
+ value: false
512
+ remove_unused_columns:
513
+ desc: null
514
+ value: true
515
+ label_names:
516
+ desc: null
517
+ value: null
518
+ load_best_model_at_end:
519
+ desc: null
520
+ value: false
521
+ metric_for_best_model:
522
+ desc: null
523
+ value: null
524
+ greater_is_better:
525
+ desc: null
526
+ value: null
527
+ ignore_data_skip:
528
+ desc: null
529
+ value: false
530
+ fsdp:
531
+ desc: null
532
+ value: []
533
+ fsdp_min_num_params:
534
+ desc: null
535
+ value: 0
536
+ fsdp_config:
537
+ desc: null
538
+ value:
539
+ min_num_params: 0
540
+ xla: false
541
+ xla_fsdp_v2: false
542
+ xla_fsdp_grad_ckpt: false
543
+ fsdp_transformer_layer_cls_to_wrap:
544
+ desc: null
545
+ value: null
546
+ accelerator_config:
547
+ desc: null
548
+ value:
549
+ split_batches: false
550
+ dispatch_batches: null
551
+ even_batches: true
552
+ use_seedable_sampler: true
553
+ deepspeed:
554
+ desc: null
555
+ value: null
556
+ label_smoothing_factor:
557
+ desc: null
558
+ value: 0.0
559
+ optim:
560
+ desc: null
561
+ value: paged_adamw_32bit
562
+ optim_args:
563
+ desc: null
564
+ value: null
565
+ adafactor:
566
+ desc: null
567
+ value: false
568
+ group_by_length:
569
+ desc: null
570
+ value: false
571
+ length_column_name:
572
+ desc: null
573
+ value: length
574
+ report_to:
575
+ desc: null
576
+ value:
577
+ - tensorboard
578
+ - wandb
579
+ ddp_find_unused_parameters:
580
+ desc: null
581
+ value: null
582
+ ddp_bucket_cap_mb:
583
+ desc: null
584
+ value: null
585
+ ddp_broadcast_buffers:
586
+ desc: null
587
+ value: null
588
+ dataloader_pin_memory:
589
+ desc: null
590
+ value: true
591
+ dataloader_persistent_workers:
592
+ desc: null
593
+ value: false
594
+ skip_memory_metrics:
595
+ desc: null
596
+ value: true
597
+ use_legacy_prediction_loop:
598
+ desc: null
599
+ value: false
600
+ push_to_hub:
601
+ desc: null
602
+ value: false
603
+ resume_from_checkpoint:
604
+ desc: null
605
+ value: null
606
+ hub_model_id:
607
+ desc: null
608
+ value: null
609
+ hub_strategy:
610
+ desc: null
611
+ value: every_save
612
+ hub_token:
613
+ desc: null
614
+ value: <HUB_TOKEN>
615
+ hub_private_repo:
616
+ desc: null
617
+ value: false
618
+ hub_always_push:
619
+ desc: null
620
+ value: false
621
+ gradient_checkpointing:
622
+ desc: null
623
+ value: true
624
+ gradient_checkpointing_kwargs:
625
+ desc: null
626
+ value: null
627
+ include_inputs_for_metrics:
628
+ desc: null
629
+ value: false
630
+ fp16_backend:
631
+ desc: null
632
+ value: auto
633
+ push_to_hub_model_id:
634
+ desc: null
635
+ value: null
636
+ push_to_hub_organization:
637
+ desc: null
638
+ value: null
639
+ push_to_hub_token:
640
+ desc: null
641
+ value: <PUSH_TO_HUB_TOKEN>
642
+ mp_parameters:
643
+ desc: null
644
+ value: ''
645
+ auto_find_batch_size:
646
+ desc: null
647
+ value: true
648
+ full_determinism:
649
+ desc: null
650
+ value: false
651
+ torchdynamo:
652
+ desc: null
653
+ value: null
654
+ ray_scope:
655
+ desc: null
656
+ value: last
657
+ ddp_timeout:
658
+ desc: null
659
+ value: 1800
660
+ torch_compile:
661
+ desc: null
662
+ value: false
663
+ torch_compile_backend:
664
+ desc: null
665
+ value: null
666
+ torch_compile_mode:
667
+ desc: null
668
+ value: null
669
+ dispatch_batches:
670
+ desc: null
671
+ value: null
672
+ split_batches:
673
+ desc: null
674
+ value: null
675
+ include_tokens_per_second:
676
+ desc: null
677
+ value: false
678
+ include_num_input_tokens_seen:
679
+ desc: null
680
+ value: false
681
+ neftune_noise_alpha:
682
+ desc: null
683
+ value: null
684
+ optim_target_modules:
685
+ desc: null
686
+ value: null
wandb/run-20240409_215743-953wtybl/files/output.log ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:557: UserWarning: This DataLoader will create 8 worker processes in total. Our suggested max number of worker in current system is 4, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.
2
+ warnings.warn(_create_warning_msg(
3
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
4
+ To disable this warning, you can either:
5
+ - Avoid using `tokenizers` before the fork if possible
6
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
7
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
8
+ To disable this warning, you can either:
9
+ - Avoid using `tokenizers` before the fork if possible
10
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
11
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
12
+ To disable this warning, you can either:
13
+ - Avoid using `tokenizers` before the fork if possible
14
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
15
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
16
+ To disable this warning, you can either:
17
+ - Avoid using `tokenizers` before the fork if possible
18
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
19
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
20
+ To disable this warning, you can either:
21
+ - Avoid using `tokenizers` before the fork if possible
22
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
23
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
24
+ To disable this warning, you can either:
25
+ - Avoid using `tokenizers` before the fork if possible
26
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
27
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
28
+ To disable this warning, you can either:
29
+ - Avoid using `tokenizers` before the fork if possible
30
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
31
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
32
+ To disable this warning, you can either:
33
+ - Avoid using `tokenizers` before the fork if possible
34
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
35
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
36
+ warnings.warn(
37
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
38
+ warnings.warn(
39
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
40
+ warnings.warn(
41
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
42
+ warnings.warn(
43
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
44
+ warnings.warn(
45
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
46
+ warnings.warn(
47
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
48
+ warnings.warn(
wandb/run-20240409_215743-953wtybl/files/requirements.txt ADDED
@@ -0,0 +1,864 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Babel==2.14.0
2
+ Boruta==0.3
3
+ Brotli==1.0.9
4
+ CVXcanon==0.1.2
5
+ Cartopy==0.22.0
6
+ Cython==3.0.8
7
+ Deprecated==1.2.14
8
+ Farama-Notifications==0.0.4
9
+ Flask==3.0.2
10
+ Geohash==1.0
11
+ GitPython==3.1.41
12
+ ImageHash==4.3.1
13
+ Janome==0.5.0
14
+ Jinja2==3.1.2
15
+ LunarCalendar==0.0.9
16
+ Mako==1.3.2
17
+ Markdown==3.5.2
18
+ MarkupSafe==2.1.3
19
+ MarkupSafe==2.1.5
20
+ Pillow==9.5.0
21
+ PuLP==2.8.0
22
+ PyArabic==0.6.15
23
+ PyJWT==2.8.0
24
+ PyMeeus==0.5.12
25
+ PySocks==1.7.1
26
+ PyUpSet==0.1.1.post7
27
+ PyWavelets==1.5.0
28
+ PyYAML==6.0.1
29
+ Pygments==2.17.2
30
+ Pympler==1.0.1
31
+ QtPy==2.4.1
32
+ Rtree==1.2.0
33
+ SQLAlchemy==2.0.25
34
+ SecretStorage==3.3.3
35
+ Send2Trash==1.8.2
36
+ Shapely==1.8.5.post1
37
+ Shimmy==1.3.0
38
+ SimpleITK==2.3.1
39
+ TPOT==0.12.1
40
+ Theano-PyMC==1.1.2
41
+ Theano==1.0.5
42
+ Wand==0.6.13
43
+ Werkzeug==3.0.2
44
+ absl-py==1.4.0
45
+ accelerate==0.28.0
46
+ access==1.1.9
47
+ affine==2.4.0
48
+ aiobotocore==2.12.2
49
+ aiofiles==22.1.0
50
+ aiohttp-cors==0.7.0
51
+ aiohttp==3.9.1
52
+ aioitertools==0.11.0
53
+ aiorwlock==1.3.0
54
+ aiosignal==1.3.1
55
+ aiosqlite==0.19.0
56
+ albumentations==1.4.0
57
+ alembic==1.13.1
58
+ altair==5.3.0
59
+ annotated-types==0.6.0
60
+ annoy==1.17.3
61
+ anyio==4.2.0
62
+ apache-beam==2.46.0
63
+ aplus==0.11.0
64
+ appdirs==1.4.4
65
+ archspec==0.2.3
66
+ argon2-cffi-bindings==21.2.0
67
+ argon2-cffi==23.1.0
68
+ array-record==0.5.0
69
+ arrow==1.3.0
70
+ arviz==0.17.1
71
+ astroid==3.1.0
72
+ astropy-iers-data==0.2024.4.1.0.33.14
73
+ astropy==6.0.1
74
+ asttokens==2.4.1
75
+ astunparse==1.6.3
76
+ async-lru==2.0.4
77
+ async-timeout==4.0.3
78
+ attrs==23.2.0
79
+ audioread==3.0.1
80
+ autopep8==2.0.4
81
+ backoff==2.2.1
82
+ bayesian-optimization==1.4.3
83
+ beatrix_jupyterlab==2023.128.151533
84
+ beautifulsoup4==4.12.2
85
+ bitsandbytes==0.43.0
86
+ blake3==0.2.1
87
+ bleach==6.1.0
88
+ blessed==1.20.0
89
+ blinker==1.7.0
90
+ blis==0.7.10
91
+ blosc2==2.6.0
92
+ bokeh==3.3.4
93
+ boltons==23.1.1
94
+ boto3==1.26.100
95
+ botocore==1.34.51
96
+ bq_helper==0.4.1
97
+ bqplot==0.12.43
98
+ branca==0.7.1
99
+ brewer2mpl==1.4.1
100
+ brotlipy==0.7.0
101
+ cached-property==1.5.2
102
+ cachetools==4.2.4
103
+ cachetools==5.3.2
104
+ catalogue==2.0.10
105
+ catalyst==22.4
106
+ catboost==1.2.3
107
+ category-encoders==2.6.3
108
+ certifi==2024.2.2
109
+ cesium==0.12.1
110
+ cffi==1.16.0
111
+ charset-normalizer==3.3.2
112
+ chex==0.1.86
113
+ cleverhans==4.0.0
114
+ click-plugins==1.1.1
115
+ click==8.1.7
116
+ cligj==0.7.2
117
+ cloud-tpu-client==0.10
118
+ cloud-tpu-profiler==2.4.0
119
+ cloudpathlib==0.16.0
120
+ cloudpickle==2.2.1
121
+ cloudpickle==3.0.0
122
+ cmdstanpy==1.2.2
123
+ colorama==0.4.6
124
+ colorcet==3.1.0
125
+ colorful==0.5.6
126
+ colorlog==6.8.2
127
+ colorlover==0.3.0
128
+ comm==0.2.1
129
+ conda-libmamba-solver==23.7.0
130
+ conda-package-handling==2.2.0
131
+ conda==23.7.4
132
+ conda_package_streaming==0.9.0
133
+ confection==0.1.4
134
+ contextily==1.6.0
135
+ contourpy==1.2.0
136
+ convertdate==2.4.0
137
+ crcmod==1.7
138
+ cryptography==41.0.7
139
+ cuda-python==12.4.0
140
+ cudf==23.8.0
141
+ cufflinks==0.17.3
142
+ cuml==23.8.0
143
+ cupy==13.0.0
144
+ cycler==0.12.1
145
+ cymem==2.0.8
146
+ cytoolz==0.12.3
147
+ daal4py==2024.2.0
148
+ daal==2024.2.0
149
+ dacite==1.8.1
150
+ dask-cuda==23.8.0
151
+ dask-cudf==23.8.0
152
+ dask-expr==1.0.9
153
+ dask==2024.4.0
154
+ dataclasses-json==0.6.4
155
+ dataproc_jupyter_plugin==0.1.66
156
+ datasets==2.16.0
157
+ datashader==0.16.0
158
+ datatile==1.0.3
159
+ db-dtypes==1.2.0
160
+ deap==1.4.1
161
+ debugpy==1.8.0
162
+ decorator==5.1.1
163
+ deepdiff==6.7.1
164
+ defusedxml==0.7.1
165
+ deprecation==2.1.0
166
+ descartes==1.1.0
167
+ dill==0.3.7
168
+ dipy==1.9.0
169
+ distlib==0.3.8
170
+ distributed==2023.7.1
171
+ distro==1.9.0
172
+ dm-tree==0.1.8
173
+ docker-pycreds==0.4.0
174
+ docker==7.0.0
175
+ docopt==0.6.2
176
+ docstring-parser==0.15
177
+ docstring-to-markdown==0.15
178
+ docutils==0.20.1
179
+ earthengine-api==0.1.395
180
+ easydict==1.13
181
+ easyocr==1.7.1
182
+ ecos==2.0.13
183
+ eli5==0.13.0
184
+ emoji==2.11.0
185
+ en-core-web-lg==3.7.1
186
+ en-core-web-sm==3.7.1
187
+ entrypoints==0.4
188
+ ephem==4.1.5
189
+ esda==2.5.1
190
+ essentia==2.1b6.dev1110
191
+ et-xmlfile==1.1.0
192
+ etils==1.6.0
193
+ exceptiongroup==1.2.0
194
+ executing==2.0.1
195
+ explainable-ai-sdk==1.3.3
196
+ fastai==2.7.14
197
+ fastapi==0.108.0
198
+ fastavro==1.9.3
199
+ fastcore==1.5.29
200
+ fastdownload==0.0.7
201
+ fasteners==0.19
202
+ fastjsonschema==2.19.1
203
+ fastprogress==1.0.3
204
+ fastrlock==0.8.2
205
+ fasttext==0.9.2
206
+ feather-format==0.4.1
207
+ featuretools==1.30.0
208
+ filelock==3.13.1
209
+ fiona==1.9.6
210
+ fitter==1.7.0
211
+ flake8==7.0.0
212
+ flashtext==2.7
213
+ flatbuffers==23.5.26
214
+ flax==0.8.2
215
+ folium==0.16.0
216
+ fonttools==4.47.0
217
+ fonttools==4.50.0
218
+ fqdn==1.5.1
219
+ frozendict==2.4.1
220
+ frozenlist==1.4.1
221
+ fsspec==2023.10.0
222
+ fsspec==2024.3.1
223
+ funcy==2.0
224
+ fury==0.10.0
225
+ future==1.0.0
226
+ fuzzywuzzy==0.18.0
227
+ gast==0.5.4
228
+ gatspy==0.3
229
+ gcsfs==2024.2.0
230
+ gensim==4.3.2
231
+ geographiclib==2.0
232
+ geojson==3.1.0
233
+ geopandas==0.14.3
234
+ geoplot==0.5.1
235
+ geopy==2.4.1
236
+ geoviews==1.11.1
237
+ ggplot==0.11.5
238
+ giddy==2.3.5
239
+ gitdb==4.0.11
240
+ google-ai-generativelanguage==0.4.0
241
+ google-api-core==2.11.1
242
+ google-api-core==2.18.0
243
+ google-api-python-client==2.125.0
244
+ google-apitools==0.5.31
245
+ google-auth-httplib2==0.2.0
246
+ google-auth-oauthlib==1.2.0
247
+ google-auth==2.26.1
248
+ google-cloud-aiplatform==0.6.0a1
249
+ google-cloud-artifact-registry==1.10.0
250
+ google-cloud-automl==1.0.1
251
+ google-cloud-bigquery==2.34.4
252
+ google-cloud-bigtable==1.7.3
253
+ google-cloud-core==2.4.1
254
+ google-cloud-datastore==2.19.0
255
+ google-cloud-dlp==3.14.0
256
+ google-cloud-jupyter-config==0.0.5
257
+ google-cloud-language==2.13.3
258
+ google-cloud-monitoring==2.18.0
259
+ google-cloud-pubsub==2.19.0
260
+ google-cloud-pubsublite==1.9.0
261
+ google-cloud-recommendations-ai==0.7.1
262
+ google-cloud-resource-manager==1.11.0
263
+ google-cloud-spanner==3.40.1
264
+ google-cloud-storage==1.44.0
265
+ google-cloud-translate==3.12.1
266
+ google-cloud-videointelligence==2.13.3
267
+ google-cloud-vision==2.8.0
268
+ google-crc32c==1.5.0
269
+ google-generativeai==0.4.1
270
+ google-pasta==0.2.0
271
+ google-resumable-media==2.7.0
272
+ googleapis-common-protos==1.62.0
273
+ gplearn==0.4.2
274
+ gpustat==1.0.0
275
+ gpxpy==1.6.2
276
+ graphviz==0.20.3
277
+ greenlet==3.0.3
278
+ grpc-google-iam-v1==0.12.7
279
+ grpcio-status==1.48.1
280
+ grpcio-status==1.48.2
281
+ grpcio==1.51.1
282
+ grpcio==1.60.0
283
+ gviz-api==1.10.0
284
+ gym-notices==0.0.8
285
+ gym==0.26.2
286
+ gymnasium==0.29.0
287
+ h11==0.14.0
288
+ h2o==3.46.0.1
289
+ h5netcdf==1.3.0
290
+ h5py==3.10.0
291
+ haversine==2.8.1
292
+ hdfs==2.7.3
293
+ hep-ml==0.7.2
294
+ hijri-converter==2.3.1
295
+ hmmlearn==0.3.2
296
+ holidays==0.24
297
+ holoviews==1.18.3
298
+ hpsklearn==0.1.0
299
+ html5lib==1.1
300
+ htmlmin==0.1.12
301
+ httpcore==1.0.5
302
+ httplib2==0.21.0
303
+ httptools==0.6.1
304
+ httpx==0.27.0
305
+ huggingface-hub==0.22.2
306
+ hunspell==0.5.5
307
+ hydra-slayer==0.5.0
308
+ hyperopt==0.2.7
309
+ hypertools==0.8.0
310
+ idna==3.6
311
+ igraph==0.11.4
312
+ imagecodecs==2024.1.1
313
+ imageio==2.33.1
314
+ imbalanced-learn==0.12.2
315
+ imgaug==0.4.0
316
+ importlib-metadata==6.11.0
317
+ importlib-metadata==7.0.1
318
+ importlib-resources==6.1.1
319
+ inequality==1.0.1
320
+ iniconfig==2.0.0
321
+ ipydatawidgets==4.3.5
322
+ ipykernel==6.28.0
323
+ ipyleaflet==0.18.2
324
+ ipympl==0.7.0
325
+ ipython-genutils==0.2.0
326
+ ipython-genutils==0.2.0
327
+ ipython-sql==0.5.0
328
+ ipython==8.20.0
329
+ ipyvolume==0.6.3
330
+ ipyvue==1.10.2
331
+ ipyvuetify==1.9.3
332
+ ipywebrtc==0.6.0
333
+ ipywidgets==7.7.1
334
+ isoduration==20.11.0
335
+ isort==5.13.2
336
+ isoweek==1.3.3
337
+ itsdangerous==2.1.2
338
+ jaraco.classes==3.3.0
339
+ jax-jumpy==1.0.0
340
+ jax==0.4.23
341
+ jaxlib==0.4.23.dev20240116
342
+ jedi==0.19.1
343
+ jeepney==0.8.0
344
+ jieba==0.42.1
345
+ jmespath==1.0.1
346
+ joblib==1.3.2
347
+ json5==0.9.14
348
+ jsonpatch==1.33
349
+ jsonpointer==2.4
350
+ jsonschema-specifications==2023.12.1
351
+ jsonschema==4.20.0
352
+ jupyter-console==6.6.3
353
+ jupyter-events==0.9.0
354
+ jupyter-http-over-ws==0.0.8
355
+ jupyter-lsp==1.5.1
356
+ jupyter-server-mathjax==0.2.6
357
+ jupyter-ydoc==0.2.5
358
+ jupyter_client==7.4.9
359
+ jupyter_client==8.6.0
360
+ jupyter_core==5.7.1
361
+ jupyter_server==2.13.0
362
+ jupyter_server_fileid==0.9.1
363
+ jupyter_server_proxy==4.1.0
364
+ jupyter_server_terminals==0.5.1
365
+ jupyter_server_ydoc==0.8.0
366
+ jupyterlab-lsp==5.1.0
367
+ jupyterlab-widgets==3.0.9
368
+ jupyterlab==4.1.5
369
+ jupyterlab_git==0.44.0
370
+ jupyterlab_pygments==0.3.0
371
+ jupyterlab_server==2.25.2
372
+ jupytext==1.16.0
373
+ kaggle-environments==1.14.3
374
+ kaggle==1.6.8
375
+ kagglehub==0.2.2
376
+ keras-cv==0.8.2
377
+ keras-nlp==0.8.2
378
+ keras-tuner==1.4.6
379
+ keras==3.1.1
380
+ kernels-mixer==0.0.7
381
+ keyring==24.3.0
382
+ keyrings.google-artifactregistry-auth==1.1.2
383
+ kfp-pipeline-spec==0.2.2
384
+ kfp-server-api==2.0.5
385
+ kfp==2.5.0
386
+ kiwisolver==1.4.5
387
+ kmapper==2.0.1
388
+ kmodes==0.12.2
389
+ korean-lunar-calendar==0.3.1
390
+ kornia==0.7.2
391
+ kornia_rs==0.1.3
392
+ kt-legacy==1.0.5
393
+ kubernetes==26.1.0
394
+ langcodes==3.3.0
395
+ langid==1.1.6
396
+ lazy_loader==0.3
397
+ learntools==0.3.4
398
+ leven==1.0.4
399
+ libclang==16.0.6
400
+ libmambapy==1.5.0
401
+ libpysal==4.9.2
402
+ librosa==0.10.1
403
+ lightgbm==4.2.0
404
+ lightning-utilities==0.11.2
405
+ lime==0.2.0.1
406
+ line-profiler==4.1.2
407
+ linkify-it-py==2.0.3
408
+ llvmlite==0.41.1
409
+ llvmlite==0.42.0
410
+ lml==0.1.0
411
+ locket==1.0.0
412
+ loguru==0.7.2
413
+ lxml==5.2.1
414
+ lz4==4.3.3
415
+ mamba==1.5.0
416
+ mapclassify==2.6.1
417
+ markdown-it-py==3.0.0
418
+ marshmallow==3.21.1
419
+ matplotlib-inline==0.1.6
420
+ matplotlib-venn==0.11.10
421
+ matplotlib==3.7.5
422
+ matplotlib==3.8.3
423
+ mccabe==0.7.0
424
+ mdit-py-plugins==0.4.0
425
+ mdurl==0.1.2
426
+ memory-profiler==0.61.0
427
+ menuinst==2.0.1
428
+ mercantile==1.2.1
429
+ mgwr==2.2.1
430
+ missingno==0.5.2
431
+ mistune==0.8.4
432
+ mizani==0.11.1
433
+ ml-dtypes==0.2.0
434
+ mlcrate==0.2.0
435
+ mlens==0.2.3
436
+ mlxtend==0.23.1
437
+ mne==1.6.1
438
+ mnist==0.2.2
439
+ momepy==0.7.0
440
+ more-itertools==10.2.0
441
+ mpld3==0.5.10
442
+ mpmath==1.3.0
443
+ msgpack==1.0.7
444
+ multidict==6.0.4
445
+ multimethod==1.10
446
+ multipledispatch==1.0.0
447
+ multiprocess==0.70.15
448
+ munkres==1.1.4
449
+ murmurhash==1.0.10
450
+ mypy-extensions==1.0.0
451
+ namex==0.0.7
452
+ nb-conda-kernels==2.3.1
453
+ nb_conda==2.2.1
454
+ nbclassic==1.0.0
455
+ nbclient==0.5.13
456
+ nbconvert==6.4.5
457
+ nbdime==3.2.0
458
+ nbformat==5.9.2
459
+ ndindex==1.8
460
+ nest-asyncio==1.5.8
461
+ networkx==3.2.1
462
+ nibabel==5.2.1
463
+ nilearn==0.10.3
464
+ ninja==1.11.1.1
465
+ nltk==3.2.4
466
+ nose==1.3.7
467
+ notebook==6.5.4
468
+ notebook==6.5.6
469
+ notebook_executor==0.2
470
+ notebook_shim==0.2.3
471
+ numba==0.58.1
472
+ numba==0.59.1
473
+ numexpr==2.10.0
474
+ numpy==1.26.4
475
+ nvidia-ml-py==11.495.46
476
+ nvtx==0.2.10
477
+ oauth2client==4.1.3
478
+ oauthlib==3.2.2
479
+ objsize==0.6.1
480
+ odfpy==1.4.1
481
+ olefile==0.47
482
+ onnx==1.16.0
483
+ opencensus-context==0.1.3
484
+ opencensus==0.11.4
485
+ opencv-contrib-python==4.9.0.80
486
+ opencv-python-headless==4.9.0.80
487
+ opencv-python==4.9.0.80
488
+ openpyxl==3.1.2
489
+ openslide-python==1.3.1
490
+ opentelemetry-api==1.22.0
491
+ opentelemetry-exporter-otlp-proto-common==1.22.0
492
+ opentelemetry-exporter-otlp-proto-grpc==1.22.0
493
+ opentelemetry-exporter-otlp-proto-http==1.22.0
494
+ opentelemetry-exporter-otlp==1.22.0
495
+ opentelemetry-proto==1.22.0
496
+ opentelemetry-sdk==1.22.0
497
+ opentelemetry-semantic-conventions==0.43b0
498
+ opt-einsum==3.3.0
499
+ optax==0.2.2
500
+ optree==0.11.0
501
+ optuna==3.6.1
502
+ orbax-checkpoint==0.5.7
503
+ ordered-set==4.1.0
504
+ orjson==3.9.10
505
+ ortools==9.4.1874
506
+ osmnx==1.9.2
507
+ overrides==7.4.0
508
+ packaging==21.3
509
+ pandas-datareader==0.10.0
510
+ pandas-profiling==3.6.6
511
+ pandas-summary==0.2.0
512
+ pandas==2.1.4
513
+ pandas==2.2.1
514
+ pandasql==0.7.3
515
+ pandocfilters==1.5.0
516
+ panel==1.3.8
517
+ papermill==2.5.0
518
+ param==2.1.0
519
+ parso==0.8.3
520
+ partd==1.4.1
521
+ path.py==12.5.0
522
+ path==16.10.0
523
+ pathos==0.3.2
524
+ pathy==0.10.3
525
+ patsy==0.5.6
526
+ pdf2image==1.17.0
527
+ peft==0.10.0
528
+ pettingzoo==1.24.0
529
+ pexpect==4.8.0
530
+ pexpect==4.9.0
531
+ phik==0.12.4
532
+ pickleshare==0.7.5
533
+ pillow==10.3.0
534
+ pip==23.3.2
535
+ pkgutil_resolve_name==1.3.10
536
+ platformdirs==4.2.0
537
+ plotly-express==0.4.1
538
+ plotly==5.18.0
539
+ plotnine==0.13.4
540
+ pluggy==1.4.0
541
+ pointpats==2.4.0
542
+ polars==0.20.18
543
+ polyglot==16.7.4
544
+ pooch==1.8.1
545
+ pox==0.3.4
546
+ ppca==0.0.4
547
+ ppft==1.7.6.8
548
+ preprocessing==0.1.13
549
+ preshed==3.0.9
550
+ prettytable==3.9.0
551
+ progressbar2==4.4.2
552
+ prometheus-client==0.19.0
553
+ promise==2.3
554
+ prompt-toolkit==3.0.42
555
+ prompt-toolkit==3.0.43
556
+ prophet==1.1.1
557
+ proto-plus==1.23.0
558
+ protobuf==3.20.3
559
+ protobuf==4.21.12
560
+ psutil==5.9.3
561
+ psutil==5.9.7
562
+ ptyprocess==0.7.0
563
+ pudb==2024.1
564
+ pure-eval==0.2.2
565
+ py-cpuinfo==9.0.0
566
+ py-spy==0.3.14
567
+ py4j==0.10.9.7
568
+ pyLDAvis==3.4.1
569
+ pyOpenSSL==23.3.0
570
+ pyaml==23.12.0
571
+ pyarrow-hotfix==0.6
572
+ pyarrow==15.0.2
573
+ pyasn1-modules==0.3.0
574
+ pyasn1==0.5.1
575
+ pybind11==2.12.0
576
+ pyclipper==1.3.0.post5
577
+ pycodestyle==2.11.1
578
+ pycosat==0.6.6
579
+ pycparser==2.21
580
+ pycryptodome==3.20.0
581
+ pyct==0.5.0
582
+ pycuda==2024.1
583
+ pydantic==2.5.3
584
+ pydantic==2.6.4
585
+ pydantic_core==2.14.6
586
+ pydantic_core==2.16.3
587
+ pydegensac==0.1.2
588
+ pydicom==2.4.4
589
+ pydocstyle==6.3.0
590
+ pydot==1.4.2
591
+ pydub==0.25.1
592
+ pyemd==1.0.0
593
+ pyerfa==2.0.1.1
594
+ pyexcel-io==0.6.6
595
+ pyexcel-ods==0.6.0
596
+ pyflakes==3.2.0
597
+ pygltflib==1.16.2
598
+ pykalman==0.9.7
599
+ pylibraft==23.8.0
600
+ pylint==3.1.0
601
+ pymc3==3.11.4
602
+ pymongo==3.13.0
603
+ pynndescent==0.5.12
604
+ pynvml==11.4.1
605
+ pynvrtc==9.2
606
+ pyparsing==3.1.1
607
+ pyparsing==3.1.2
608
+ pypdf==4.1.0
609
+ pyproj==3.6.1
610
+ pysal==24.1
611
+ pyshp==2.3.1
612
+ pytesseract==0.3.10
613
+ pytest==8.1.1
614
+ python-bidi==0.4.2
615
+ python-dateutil==2.9.0.post0
616
+ python-dotenv==1.0.0
617
+ python-json-logger==2.0.7
618
+ python-louvain==0.16
619
+ python-lsp-jsonrpc==1.1.2
620
+ python-lsp-server==1.11.0
621
+ python-slugify==8.0.4
622
+ python-utils==3.8.2
623
+ pythreejs==2.4.2
624
+ pytoolconfig==1.3.1
625
+ pytools==2024.1.1
626
+ pytorch-ignite==0.5.0.post2
627
+ pytorch-lightning==2.2.1
628
+ pytz==2023.3.post1
629
+ pytz==2024.1
630
+ pyu2f==0.1.5
631
+ pyviz_comms==3.0.2
632
+ pyzmq==24.0.1
633
+ pyzmq==25.1.2
634
+ qgrid==1.3.1
635
+ qtconsole==5.5.1
636
+ quantecon==0.7.2
637
+ qudida==0.0.4
638
+ raft-dask==23.8.0
639
+ rasterio==1.3.9
640
+ rasterstats==0.19.0
641
+ ray-cpp==2.9.0
642
+ ray==2.9.0
643
+ referencing==0.32.1
644
+ regex==2023.12.25
645
+ requests-oauthlib==1.3.1
646
+ requests-toolbelt==0.10.1
647
+ requests==2.31.0
648
+ retrying==1.3.3
649
+ retrying==1.3.4
650
+ rfc3339-validator==0.1.4
651
+ rfc3986-validator==0.1.1
652
+ rgf-python==3.12.0
653
+ rich-click==1.7.4
654
+ rich==13.7.0
655
+ rich==13.7.1
656
+ rmm==23.8.0
657
+ rope==1.13.0
658
+ rpds-py==0.16.2
659
+ rsa==4.9
660
+ ruamel-yaml-conda==0.15.100
661
+ ruamel.yaml.clib==0.2.7
662
+ ruamel.yaml==0.17.40
663
+ s2sphere==0.2.5
664
+ s3fs==2024.2.0
665
+ s3transfer==0.6.2
666
+ safetensors==0.4.2
667
+ scattertext==0.1.19
668
+ scikit-image==0.22.0
669
+ scikit-learn-intelex==2024.2.0
670
+ scikit-learn==1.2.2
671
+ scikit-multilearn==0.2.0
672
+ scikit-optimize==0.10.1
673
+ scikit-plot==0.3.7
674
+ scikit-surprise==1.1.3
675
+ scipy==1.11.4
676
+ scipy==1.12.0
677
+ seaborn==0.12.2
678
+ segment_anything==1.0
679
+ segregation==2.5
680
+ semver==3.0.2
681
+ sentencepiece==0.2.0
682
+ sentry-sdk==1.44.1
683
+ setproctitle==1.3.3
684
+ setuptools-git==1.2
685
+ setuptools-scm==8.0.4
686
+ setuptools==69.0.3
687
+ shap==0.44.1
688
+ shapely==2.0.3
689
+ shellingham==1.5.4
690
+ shtab==1.7.1
691
+ simpervisor==1.0.0
692
+ simplejson==3.19.2
693
+ six==1.16.0
694
+ sklearn-pandas==2.2.0
695
+ slicer==0.0.7
696
+ smart-open==6.4.0
697
+ smmap==5.0.1
698
+ sniffio==1.3.0
699
+ snowballstemmer==2.2.0
700
+ snuggs==1.4.7
701
+ sortedcontainers==2.4.0
702
+ soundfile==0.12.1
703
+ soupsieve==2.5
704
+ soxr==0.3.7
705
+ spacy-legacy==3.0.12
706
+ spacy-loggers==1.0.5
707
+ spacy==3.7.2
708
+ spaghetti==1.7.5.post1
709
+ spectral==0.23.1
710
+ spglm==1.1.0
711
+ sphinx-rtd-theme==0.2.4
712
+ spint==1.0.7
713
+ splot==1.1.5.post1
714
+ spopt==0.6.0
715
+ spreg==1.4.2
716
+ spvcm==0.3.0
717
+ sqlparse==0.4.4
718
+ squarify==0.4.3
719
+ srsly==2.4.8
720
+ stable-baselines3==2.1.0
721
+ stack-data==0.6.2
722
+ stack-data==0.6.3
723
+ stanio==0.5.0
724
+ starlette==0.32.0.post1
725
+ statsmodels==0.14.1
726
+ stemming==1.0.1
727
+ stop-words==2018.7.23
728
+ stopit==1.1.2
729
+ stumpy==1.12.0
730
+ sympy==1.12
731
+ tables==3.9.2
732
+ tabulate==0.9.0
733
+ tangled-up-in-unicode==0.2.0
734
+ tbb==2021.12.0
735
+ tblib==3.0.0
736
+ tenacity==8.2.3
737
+ tensorboard-data-server==0.7.2
738
+ tensorboard-plugin-profile==2.15.0
739
+ tensorboard==2.15.1
740
+ tensorboardX==2.6.2.2
741
+ tensorflow-cloud==0.1.16
742
+ tensorflow-datasets==4.9.4
743
+ tensorflow-decision-forests==1.8.1
744
+ tensorflow-estimator==2.15.0
745
+ tensorflow-hub==0.16.1
746
+ tensorflow-io-gcs-filesystem==0.35.0
747
+ tensorflow-io==0.35.0
748
+ tensorflow-metadata==0.14.0
749
+ tensorflow-probability==0.23.0
750
+ tensorflow-serving-api==2.14.1
751
+ tensorflow-text==2.15.0
752
+ tensorflow-transform==0.14.0
753
+ tensorflow==2.15.0
754
+ tensorstore==0.1.56
755
+ termcolor==2.4.0
756
+ terminado==0.18.0
757
+ testpath==0.6.0
758
+ text-unidecode==1.3
759
+ textblob==0.18.0.post0
760
+ texttable==1.7.0
761
+ tf_keras==2.15.1
762
+ tfp-nightly==0.24.0.dev0
763
+ thinc==8.2.2
764
+ threadpoolctl==3.2.0
765
+ tifffile==2023.12.9
766
+ timm==0.9.16
767
+ tinycss2==1.2.1
768
+ tobler==0.11.2
769
+ tokenizers==0.15.2
770
+ toml==0.10.2
771
+ tomli==2.0.1
772
+ tomlkit==0.12.4
773
+ toolz==0.12.1
774
+ torch==2.1.2
775
+ torchaudio==2.1.2
776
+ torchdata==0.7.1
777
+ torchinfo==1.8.0
778
+ torchmetrics==1.3.2
779
+ torchtext==0.16.2
780
+ torchvision==0.16.2
781
+ tornado==6.3.3
782
+ tqdm==4.66.1
783
+ traceml==1.0.8
784
+ traitlets==5.9.0
785
+ traittypes==0.2.1
786
+ transformers==4.39.3
787
+ treelite-runtime==3.2.0
788
+ treelite==3.2.0
789
+ trl==0.8.1
790
+ truststore==0.8.0
791
+ trx-python==0.2.9
792
+ tsfresh==0.20.2
793
+ typeguard==4.1.5
794
+ typer==0.9.0
795
+ typer==0.9.4
796
+ types-python-dateutil==2.8.19.20240106
797
+ typing-inspect==0.9.0
798
+ typing-utils==0.1.0
799
+ typing_extensions==4.9.0
800
+ tyro==0.8.3
801
+ tzdata==2023.4
802
+ uc-micro-py==1.0.3
803
+ ucx-py==0.33.0
804
+ ujson==5.9.0
805
+ umap-learn==0.5.5
806
+ unicodedata2==15.1.0
807
+ update-checker==0.18.0
808
+ uri-template==1.3.0
809
+ uritemplate==3.0.1
810
+ urllib3==1.26.18
811
+ urllib3==2.1.0
812
+ urwid==2.6.10
813
+ urwid_readline==0.14
814
+ uvicorn==0.25.0
815
+ uvloop==0.19.0
816
+ vaex-astro==0.9.3
817
+ vaex-core==4.17.1
818
+ vaex-hdf5==0.14.1
819
+ vaex-jupyter==0.8.2
820
+ vaex-ml==0.18.3
821
+ vaex-server==0.9.0
822
+ vaex-viz==0.5.4
823
+ vaex==4.17.0
824
+ vec_noise==1.1.4
825
+ vecstack==0.4.0
826
+ virtualenv==20.21.0
827
+ visions==0.7.5
828
+ vowpalwabbit==9.9.0
829
+ vtk==9.3.0
830
+ wandb==0.16.5
831
+ wasabi==1.1.2
832
+ watchfiles==0.21.0
833
+ wavio==0.0.8
834
+ wcwidth==0.2.13
835
+ weasel==0.3.4
836
+ webcolors==1.13
837
+ webencodings==0.5.1
838
+ websocket-client==1.7.0
839
+ websockets==12.0
840
+ wfdb==4.1.2
841
+ whatthepatch==1.0.5
842
+ wheel==0.42.0
843
+ widgetsnbextension==3.6.6
844
+ witwidget==1.8.1
845
+ woodwork==0.29.0
846
+ wordcloud==1.9.3
847
+ wordsegment==1.3.1
848
+ wrapt==1.14.1
849
+ xarray-einstats==0.7.0
850
+ xarray==2024.3.0
851
+ xgboost==2.0.3
852
+ xvfbwrapper==0.2.9
853
+ xxhash==3.4.1
854
+ xyzservices==2023.10.1
855
+ y-py==0.6.2
856
+ yapf==0.40.2
857
+ yarl==1.9.3
858
+ yarl==1.9.4
859
+ ydata-profiling==4.6.4
860
+ yellowbrick==1.5
861
+ ypy-websocket==0.8.4
862
+ zict==3.0.0
863
+ zipp==3.17.0
864
+ zstandard==0.22.0
wandb/run-20240409_215743-953wtybl/files/wandb-metadata.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-5.15.133+-x86_64-with-glibc2.31",
3
+ "python": "3.10.13",
4
+ "heartbeatAt": "2024-04-09T21:57:44.570411",
5
+ "startedAt": "2024-04-09T21:57:43.901624",
6
+ "docker": null,
7
+ "cuda": null,
8
+ "args": [],
9
+ "state": "running",
10
+ "program": "kaggle.ipynb",
11
+ "codePathLocal": null,
12
+ "root": "/kaggle/working",
13
+ "host": "6e44b39f6877",
14
+ "username": "root",
15
+ "executable": "/opt/conda/bin/python3.10",
16
+ "cpu_count": 2,
17
+ "cpu_count_logical": 4,
18
+ "cpu_freq": {
19
+ "current": 2000.152,
20
+ "min": 0.0,
21
+ "max": 0.0
22
+ },
23
+ "cpu_freq_per_core": [
24
+ {
25
+ "current": 2000.152,
26
+ "min": 0.0,
27
+ "max": 0.0
28
+ },
29
+ {
30
+ "current": 2000.152,
31
+ "min": 0.0,
32
+ "max": 0.0
33
+ },
34
+ {
35
+ "current": 2000.152,
36
+ "min": 0.0,
37
+ "max": 0.0
38
+ },
39
+ {
40
+ "current": 2000.152,
41
+ "min": 0.0,
42
+ "max": 0.0
43
+ }
44
+ ],
45
+ "disk": {
46
+ "/": {
47
+ "total": 8062.387607574463,
48
+ "used": 5569.163803100586
49
+ }
50
+ },
51
+ "gpu": "Tesla T4",
52
+ "gpu_count": 2,
53
+ "gpu_devices": [
54
+ {
55
+ "name": "Tesla T4",
56
+ "memory_total": 16106127360
57
+ },
58
+ {
59
+ "name": "Tesla T4",
60
+ "memory_total": 16106127360
61
+ }
62
+ ],
63
+ "memory": {
64
+ "total": 31.357559204101562
65
+ }
66
+ }
wandb/run-20240409_215743-953wtybl/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"train/loss": 2.8502, "train/grad_norm": 0.0, "train/learning_rate": 2.857142857142857e-07, "train/epoch": 0.0, "train/global_step": 600, "_timestamp": 1712700205.542545, "_runtime": 341.634428024292, "_step": 5, "_wandb": {"runtime": 386}}
wandb/run-20240409_215743-953wtybl/logs/debug-internal.log ADDED
@@ -0,0 +1,400 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-09 21:57:43,909 INFO StreamThr :311 [internal.py:wandb_internal():86] W&B internal server running at pid: 311, started at: 2024-04-09 21:57:43.908396
2
+ 2024-04-09 21:57:43,910 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status
3
+ 2024-04-09 21:57:44,253 INFO WriterThread:311 [datastore.py:open_for_write():87] open: /kaggle/working/wandb/run-20240409_215743-953wtybl/run-953wtybl.wandb
4
+ 2024-04-09 21:57:44,253 DEBUG SenderThread:311 [sender.py:send():379] send: header
5
+ 2024-04-09 21:57:44,256 DEBUG SenderThread:311 [sender.py:send():379] send: run
6
+ 2024-04-09 21:57:44,435 INFO SenderThread:311 [dir_watcher.py:__init__():211] watching files in: /kaggle/working/wandb/run-20240409_215743-953wtybl/files
7
+ 2024-04-09 21:57:44,435 INFO SenderThread:311 [sender.py:_start_run_threads():1124] run started: 953wtybl with start time 1712699863.908117
8
+ 2024-04-09 21:57:44,444 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: check_version
9
+ 2024-04-09 21:57:44,444 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: check_version
10
+ 2024-04-09 21:57:44,544 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: run_start
11
+ 2024-04-09 21:57:44,555 DEBUG HandlerThread:311 [system_info.py:__init__():26] System info init
12
+ 2024-04-09 21:57:44,555 DEBUG HandlerThread:311 [system_info.py:__init__():41] System info init done
13
+ 2024-04-09 21:57:44,555 INFO HandlerThread:311 [system_monitor.py:start():194] Starting system monitor
14
+ 2024-04-09 21:57:44,555 INFO SystemMonitor:311 [system_monitor.py:_start():158] Starting system asset monitoring threads
15
+ 2024-04-09 21:57:44,556 INFO HandlerThread:311 [system_monitor.py:probe():214] Collecting system info
16
+ 2024-04-09 21:57:44,556 INFO SystemMonitor:311 [interfaces.py:start():190] Started cpu monitoring
17
+ 2024-04-09 21:57:44,557 INFO SystemMonitor:311 [interfaces.py:start():190] Started disk monitoring
18
+ 2024-04-09 21:57:44,558 INFO SystemMonitor:311 [interfaces.py:start():190] Started gpu monitoring
19
+ 2024-04-09 21:57:44,559 INFO SystemMonitor:311 [interfaces.py:start():190] Started memory monitoring
20
+ 2024-04-09 21:57:44,560 INFO SystemMonitor:311 [interfaces.py:start():190] Started network monitoring
21
+ 2024-04-09 21:57:44,570 DEBUG HandlerThread:311 [system_info.py:probe():150] Probing system
22
+ 2024-04-09 21:57:44,572 DEBUG HandlerThread:311 [gitlib.py:_init_repo():56] git repository is invalid
23
+ 2024-04-09 21:57:44,572 DEBUG HandlerThread:311 [system_info.py:probe():198] Probing system done
24
+ 2024-04-09 21:57:44,572 DEBUG HandlerThread:311 [system_monitor.py:probe():223] {'os': 'Linux-5.15.133+-x86_64-with-glibc2.31', 'python': '3.10.13', 'heartbeatAt': '2024-04-09T21:57:44.570411', 'startedAt': '2024-04-09T21:57:43.901624', 'docker': None, 'cuda': None, 'args': (), 'state': 'running', 'program': 'kaggle.ipynb', 'codePathLocal': None, 'root': '/kaggle/working', 'host': '6e44b39f6877', 'username': 'root', 'executable': '/opt/conda/bin/python3.10', 'cpu_count': 2, 'cpu_count_logical': 4, 'cpu_freq': {'current': 2000.152, 'min': 0.0, 'max': 0.0}, 'cpu_freq_per_core': [{'current': 2000.152, 'min': 0.0, 'max': 0.0}, {'current': 2000.152, 'min': 0.0, 'max': 0.0}, {'current': 2000.152, 'min': 0.0, 'max': 0.0}, {'current': 2000.152, 'min': 0.0, 'max': 0.0}], 'disk': {'/': {'total': 8062.387607574463, 'used': 5569.163803100586}}, 'gpu': 'Tesla T4', 'gpu_count': 2, 'gpu_devices': [{'name': 'Tesla T4', 'memory_total': 16106127360}, {'name': 'Tesla T4', 'memory_total': 16106127360}], 'memory': {'total': 31.357559204101562}}
25
+ 2024-04-09 21:57:44,572 INFO HandlerThread:311 [system_monitor.py:probe():224] Finished collecting system info
26
+ 2024-04-09 21:57:44,572 INFO HandlerThread:311 [system_monitor.py:probe():227] Publishing system info
27
+ 2024-04-09 21:57:44,572 DEBUG HandlerThread:311 [system_info.py:_save_conda():207] Saving list of conda packages installed into the current environment
28
+ 2024-04-09 21:57:45,437 INFO Thread-12 :311 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/conda-environment.yaml
29
+ 2024-04-09 21:57:59,586 ERROR HandlerThread:311 [system_info.py:_save_conda():221] Error saving conda packages: Command '['conda', 'env', 'export']' timed out after 15 seconds
30
+ Traceback (most recent call last):
31
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/internal/system/system_info.py", line 214, in _save_conda
32
+ subprocess.call(
33
+ File "/opt/conda/lib/python3.10/subprocess.py", line 347, in call
34
+ return p.wait(timeout=timeout)
35
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1209, in wait
36
+ return self._wait(timeout=timeout)
37
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1951, in _wait
38
+ raise TimeoutExpired(self.args, timeout)
39
+ subprocess.TimeoutExpired: Command '['conda', 'env', 'export']' timed out after 15 seconds
40
+ 2024-04-09 21:57:59,587 DEBUG HandlerThread:311 [system_info.py:_save_conda():222] Saving conda packages done
41
+ 2024-04-09 21:57:59,588 INFO HandlerThread:311 [system_monitor.py:probe():229] Finished publishing system info
42
+ 2024-04-09 21:57:59,594 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
43
+ 2024-04-09 21:57:59,595 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: keepalive
44
+ 2024-04-09 21:57:59,595 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
45
+ 2024-04-09 21:57:59,595 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: keepalive
46
+ 2024-04-09 21:57:59,595 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
47
+ 2024-04-09 21:57:59,596 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: keepalive
48
+ 2024-04-09 21:57:59,596 DEBUG SenderThread:311 [sender.py:send():379] send: files
49
+ 2024-04-09 21:57:59,596 INFO SenderThread:311 [sender.py:_save_file():1390] saving file wandb-metadata.json with policy now
50
+ 2024-04-09 21:57:59,960 INFO wandb-upload_0:311 [upload_job.py:push():131] Uploaded file /tmp/tmpvft6cwy7wandb/ii0dkg4d-wandb-metadata.json
51
+ 2024-04-09 21:58:00,441 INFO Thread-12 :311 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/wandb-metadata.json
52
+ 2024-04-09 21:58:00,562 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: python_packages
53
+ 2024-04-09 21:58:00,562 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: python_packages
54
+ 2024-04-09 21:58:00,566 DEBUG SenderThread:311 [sender.py:send():379] send: telemetry
55
+ 2024-04-09 21:58:00,576 DEBUG SenderThread:311 [sender.py:send():379] send: config
56
+ 2024-04-09 21:58:00,579 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
57
+ 2024-04-09 21:58:00,579 DEBUG SenderThread:311 [sender.py:send():379] send: metric
58
+ 2024-04-09 21:58:00,579 DEBUG SenderThread:311 [sender.py:send():379] send: telemetry
59
+ 2024-04-09 21:58:00,580 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
60
+ 2024-04-09 21:58:00,581 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
61
+ 2024-04-09 21:58:00,730 DEBUG SenderThread:311 [sender.py:send():379] send: metric
62
+ 2024-04-09 21:58:00,730 WARNING SenderThread:311 [sender.py:send_metric():1341] Seen metric with glob (shouldn't happen)
63
+ 2024-04-09 21:58:00,730 DEBUG SenderThread:311 [sender.py:send():379] send: telemetry
64
+ 2024-04-09 21:58:01,441 INFO Thread-12 :311 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/requirements.txt
65
+ 2024-04-09 21:58:01,442 INFO Thread-12 :311 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/output.log
66
+ 2024-04-09 21:58:03,442 INFO Thread-12 :311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/output.log
67
+ 2024-04-09 21:58:04,758 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
68
+ 2024-04-09 21:58:09,758 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
69
+ 2024-04-09 21:58:14,764 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
70
+ 2024-04-09 21:58:15,448 INFO Thread-12 :311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/config.yaml
71
+ 2024-04-09 21:58:15,565 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
72
+ 2024-04-09 21:58:15,566 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
73
+ 2024-04-09 21:58:15,566 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
74
+ 2024-04-09 21:58:20,729 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
75
+ 2024-04-09 21:58:25,730 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
76
+ 2024-04-09 21:58:30,564 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
77
+ 2024-04-09 21:58:30,564 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
78
+ 2024-04-09 21:58:30,605 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
79
+ 2024-04-09 21:58:31,675 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
80
+ 2024-04-09 21:58:36,676 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
81
+ 2024-04-09 21:58:41,677 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
82
+ 2024-04-09 21:58:44,561 DEBUG SystemMonitor:311 [system_monitor.py:_start():172] Starting system metrics aggregation loop
83
+ 2024-04-09 21:58:44,562 DEBUG SenderThread:311 [sender.py:send():379] send: stats
84
+ 2024-04-09 21:58:45,563 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
85
+ 2024-04-09 21:58:45,564 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
86
+ 2024-04-09 21:58:45,604 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
87
+ 2024-04-09 21:58:47,649 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
88
+ 2024-04-09 21:58:52,650 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
89
+ 2024-04-09 21:58:54,897 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: partial_history
90
+ 2024-04-09 21:58:54,899 DEBUG SenderThread:311 [sender.py:send():379] send: metric
91
+ 2024-04-09 21:58:54,899 DEBUG SenderThread:311 [sender.py:send():379] send: metric
92
+ 2024-04-09 21:58:54,899 DEBUG SenderThread:311 [sender.py:send():379] send: metric
93
+ 2024-04-09 21:58:54,899 DEBUG SenderThread:311 [sender.py:send():379] send: metric
94
+ 2024-04-09 21:58:54,899 DEBUG SenderThread:311 [sender.py:send():379] send: history
95
+ 2024-04-09 21:58:54,900 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: summary_record
96
+ 2024-04-09 21:58:54,901 INFO SenderThread:311 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
97
+ 2024-04-09 21:58:55,467 INFO Thread-12 :311 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/wandb-summary.json
98
+ 2024-04-09 21:58:57,467 INFO Thread-12 :311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/output.log
99
+ 2024-04-09 21:58:58,180 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
100
+ 2024-04-09 21:59:00,563 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
101
+ 2024-04-09 21:59:00,564 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
102
+ 2024-04-09 21:59:00,591 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
103
+ 2024-04-09 21:59:03,662 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
104
+ 2024-04-09 21:59:08,663 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
105
+ 2024-04-09 21:59:13,664 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
106
+ 2024-04-09 21:59:14,563 DEBUG SenderThread:311 [sender.py:send():379] send: stats
107
+ 2024-04-09 21:59:15,563 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
108
+ 2024-04-09 21:59:15,564 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
109
+ 2024-04-09 21:59:15,605 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
110
+ 2024-04-09 21:59:18,726 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
111
+ 2024-04-09 21:59:19,476 INFO Thread-12 :311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/config.yaml
112
+ 2024-04-09 21:59:23,873 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
113
+ 2024-04-09 21:59:28,874 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
114
+ 2024-04-09 21:59:30,564 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
115
+ 2024-04-09 21:59:30,564 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
116
+ 2024-04-09 21:59:30,604 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
117
+ 2024-04-09 21:59:34,018 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
118
+ 2024-04-09 21:59:39,019 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
119
+ 2024-04-09 21:59:44,020 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
120
+ 2024-04-09 21:59:44,564 DEBUG SenderThread:311 [sender.py:send():379] send: stats
121
+ 2024-04-09 21:59:45,564 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
122
+ 2024-04-09 21:59:45,564 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
123
+ 2024-04-09 21:59:45,605 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
124
+ 2024-04-09 21:59:49,730 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
125
+ 2024-04-09 21:59:52,746 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: partial_history
126
+ 2024-04-09 21:59:52,748 DEBUG SenderThread:311 [sender.py:send():379] send: history
127
+ 2024-04-09 21:59:52,748 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: summary_record
128
+ 2024-04-09 21:59:52,748 INFO SenderThread:311 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
129
+ 2024-04-09 21:59:53,489 INFO Thread-12 :311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/wandb-summary.json
130
+ 2024-04-09 21:59:55,000 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
131
+ 2024-04-09 21:59:55,490 INFO Thread-12 :311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/output.log
132
+ 2024-04-09 22:00:00,001 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
133
+ 2024-04-09 22:00:00,564 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
134
+ 2024-04-09 22:00:00,564 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
135
+ 2024-04-09 22:00:00,566 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
136
+ 2024-04-09 22:00:05,675 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
137
+ 2024-04-09 22:00:10,676 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
138
+ 2024-04-09 22:00:14,565 DEBUG SenderThread:311 [sender.py:send():379] send: stats
139
+ 2024-04-09 22:00:15,572 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
140
+ 2024-04-09 22:00:15,572 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
141
+ 2024-04-09 22:00:15,572 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
142
+ 2024-04-09 22:00:15,684 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
143
+ 2024-04-09 22:00:20,684 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
144
+ 2024-04-09 22:00:25,685 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
145
+ 2024-04-09 22:00:30,566 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
146
+ 2024-04-09 22:00:30,568 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
147
+ 2024-04-09 22:00:30,569 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
148
+ 2024-04-09 22:00:30,704 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
149
+ 2024-04-09 22:00:35,706 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
150
+ 2024-04-09 22:00:40,706 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
151
+ 2024-04-09 22:00:44,566 DEBUG SenderThread:311 [sender.py:send():379] send: stats
152
+ 2024-04-09 22:00:45,568 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
153
+ 2024-04-09 22:00:45,569 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
154
+ 2024-04-09 22:00:45,570 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
155
+ 2024-04-09 22:00:45,704 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: partial_history
156
+ 2024-04-09 22:00:45,706 DEBUG SenderThread:311 [sender.py:send():379] send: history
157
+ 2024-04-09 22:00:45,706 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: summary_record
158
+ 2024-04-09 22:00:45,706 INFO SenderThread:311 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
159
+ 2024-04-09 22:00:45,707 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
160
+ 2024-04-09 22:00:46,509 INFO Thread-12 :311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/wandb-summary.json
161
+ 2024-04-09 22:00:47,510 INFO Thread-12 :311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/output.log
162
+ 2024-04-09 22:00:50,959 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
163
+ 2024-04-09 22:00:55,960 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
164
+ 2024-04-09 22:01:00,566 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
165
+ 2024-04-09 22:01:00,569 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
166
+ 2024-04-09 22:01:00,569 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
167
+ 2024-04-09 22:01:01,685 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
168
+ 2024-04-09 22:01:06,686 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
169
+ 2024-04-09 22:01:11,687 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
170
+ 2024-04-09 22:01:14,567 DEBUG SenderThread:311 [sender.py:send():379] send: stats
171
+ 2024-04-09 22:01:15,566 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
172
+ 2024-04-09 22:01:15,569 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
173
+ 2024-04-09 22:01:15,569 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
174
+ 2024-04-09 22:01:16,731 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
175
+ 2024-04-09 22:01:21,732 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
176
+ 2024-04-09 22:01:26,733 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
177
+ 2024-04-09 22:01:30,567 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
178
+ 2024-04-09 22:01:30,569 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
179
+ 2024-04-09 22:01:30,570 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
180
+ 2024-04-09 22:01:32,643 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
181
+ 2024-04-09 22:01:37,644 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
182
+ 2024-04-09 22:01:39,267 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: partial_history
183
+ 2024-04-09 22:01:39,268 DEBUG SenderThread:311 [sender.py:send():379] send: history
184
+ 2024-04-09 22:01:39,269 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: summary_record
185
+ 2024-04-09 22:01:39,271 INFO SenderThread:311 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
186
+ 2024-04-09 22:01:39,530 INFO Thread-12 :311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/wandb-summary.json
187
+ 2024-04-09 22:01:41,531 INFO Thread-12 :311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/output.log
188
+ 2024-04-09 22:01:43,542 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
189
+ 2024-04-09 22:01:44,568 DEBUG SenderThread:311 [sender.py:send():379] send: stats
190
+ 2024-04-09 22:01:45,567 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
191
+ 2024-04-09 22:01:45,569 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
192
+ 2024-04-09 22:01:45,569 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
193
+ 2024-04-09 22:01:48,729 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
194
+ 2024-04-09 22:01:53,730 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
195
+ 2024-04-09 22:01:58,731 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
196
+ 2024-04-09 22:02:00,567 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
197
+ 2024-04-09 22:02:00,570 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
198
+ 2024-04-09 22:02:00,570 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
199
+ 2024-04-09 22:02:04,681 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
200
+ 2024-04-09 22:02:09,682 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
201
+ 2024-04-09 22:02:14,569 DEBUG SenderThread:311 [sender.py:send():379] send: stats
202
+ 2024-04-09 22:02:15,567 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
203
+ 2024-04-09 22:02:15,568 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
204
+ 2024-04-09 22:02:15,570 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
205
+ 2024-04-09 22:02:15,570 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
206
+ 2024-04-09 22:02:20,676 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
207
+ 2024-04-09 22:02:25,677 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
208
+ 2024-04-09 22:02:30,568 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
209
+ 2024-04-09 22:02:30,570 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
210
+ 2024-04-09 22:02:30,570 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
211
+ 2024-04-09 22:02:30,687 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
212
+ 2024-04-09 22:02:34,022 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: partial_history
213
+ 2024-04-09 22:02:34,023 DEBUG SenderThread:311 [sender.py:send():379] send: history
214
+ 2024-04-09 22:02:34,023 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: summary_record
215
+ 2024-04-09 22:02:34,025 INFO SenderThread:311 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
216
+ 2024-04-09 22:02:34,551 INFO Thread-12 :311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/wandb-summary.json
217
+ 2024-04-09 22:02:35,551 INFO Thread-12 :311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/output.log
218
+ 2024-04-09 22:02:36,273 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
219
+ 2024-04-09 22:02:41,274 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
220
+ 2024-04-09 22:02:44,570 DEBUG SenderThread:311 [sender.py:send():379] send: stats
221
+ 2024-04-09 22:02:45,567 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
222
+ 2024-04-09 22:02:45,570 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
223
+ 2024-04-09 22:02:45,570 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
224
+ 2024-04-09 22:02:46,736 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
225
+ 2024-04-09 22:02:51,737 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
226
+ 2024-04-09 22:02:56,738 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
227
+ 2024-04-09 22:03:00,586 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
228
+ 2024-04-09 22:03:00,587 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
229
+ 2024-04-09 22:03:00,588 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
230
+ 2024-04-09 22:03:02,683 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
231
+ 2024-04-09 22:03:07,684 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
232
+ 2024-04-09 22:03:12,685 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
233
+ 2024-04-09 22:03:14,570 DEBUG SenderThread:311 [sender.py:send():379] send: stats
234
+ 2024-04-09 22:03:15,573 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
235
+ 2024-04-09 22:03:15,574 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
236
+ 2024-04-09 22:03:15,577 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
237
+ 2024-04-09 22:03:18,664 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
238
+ 2024-04-09 22:03:23,665 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
239
+ 2024-04-09 22:03:25,543 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: partial_history
240
+ 2024-04-09 22:03:25,544 DEBUG SenderThread:311 [sender.py:send():379] send: history
241
+ 2024-04-09 22:03:25,544 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: summary_record
242
+ 2024-04-09 22:03:25,545 INFO SenderThread:311 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
243
+ 2024-04-09 22:03:25,571 INFO Thread-12 :311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/wandb-summary.json
244
+ 2024-04-09 22:03:27,572 INFO Thread-12 :311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/output.log
245
+ 2024-04-09 22:03:28,792 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
246
+ 2024-04-09 22:03:30,573 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
247
+ 2024-04-09 22:03:30,574 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
248
+ 2024-04-09 22:03:30,577 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
249
+ 2024-04-09 22:03:34,642 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
250
+ 2024-04-09 22:03:39,643 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
251
+ 2024-04-09 22:03:44,571 DEBUG SenderThread:311 [sender.py:send():379] send: stats
252
+ 2024-04-09 22:03:45,572 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
253
+ 2024-04-09 22:03:45,573 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
254
+ 2024-04-09 22:03:45,574 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
255
+ 2024-04-09 22:03:45,574 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
256
+ 2024-04-09 22:03:50,702 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
257
+ 2024-04-09 22:03:55,703 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
258
+ 2024-04-09 22:04:00,574 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
259
+ 2024-04-09 22:04:00,574 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: stop_status
260
+ 2024-04-09 22:04:00,575 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: stop_status
261
+ 2024-04-09 22:04:01,690 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
262
+ 2024-04-09 22:04:06,691 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
263
+ 2024-04-09 22:04:10,838 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: pause
264
+ 2024-04-09 22:04:10,838 INFO HandlerThread:311 [handler.py:handle_request_pause():708] stopping system metrics thread
265
+ 2024-04-09 22:04:10,838 INFO HandlerThread:311 [system_monitor.py:finish():203] Stopping system monitor
266
+ 2024-04-09 22:04:10,838 INFO HandlerThread:311 [interfaces.py:finish():202] Joined cpu monitor
267
+ 2024-04-09 22:04:10,839 INFO HandlerThread:311 [interfaces.py:finish():202] Joined disk monitor
268
+ 2024-04-09 22:04:10,839 DEBUG SystemMonitor:311 [system_monitor.py:_start():179] Finished system metrics aggregation loop
269
+ 2024-04-09 22:04:10,840 DEBUG SystemMonitor:311 [system_monitor.py:_start():183] Publishing last batch of metrics
270
+ 2024-04-09 22:04:10,850 INFO HandlerThread:311 [interfaces.py:finish():202] Joined gpu monitor
271
+ 2024-04-09 22:04:10,851 INFO HandlerThread:311 [interfaces.py:finish():202] Joined memory monitor
272
+ 2024-04-09 22:04:10,851 INFO HandlerThread:311 [interfaces.py:finish():202] Joined network monitor
273
+ 2024-04-09 22:04:10,851 DEBUG SenderThread:311 [sender.py:send():379] send: stats
274
+ 2024-04-09 22:04:11,852 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
275
+ 2024-04-09 22:04:14,618 DEBUG SenderThread:311 [sender.py:send():379] send: exit
276
+ 2024-04-09 22:04:14,618 INFO SenderThread:311 [sender.py:send_exit():586] handling exit code: 0
277
+ 2024-04-09 22:04:14,618 INFO SenderThread:311 [sender.py:send_exit():588] handling runtime: 386
278
+ 2024-04-09 22:04:14,620 INFO SenderThread:311 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
279
+ 2024-04-09 22:04:14,621 INFO SenderThread:311 [sender.py:send_exit():594] send defer
280
+ 2024-04-09 22:04:14,621 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
281
+ 2024-04-09 22:04:14,621 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 0
282
+ 2024-04-09 22:04:14,621 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
283
+ 2024-04-09 22:04:14,621 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 0
284
+ 2024-04-09 22:04:14,621 INFO SenderThread:311 [sender.py:transition_state():614] send defer: 1
285
+ 2024-04-09 22:04:14,622 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
286
+ 2024-04-09 22:04:14,622 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 1
287
+ 2024-04-09 22:04:14,622 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
288
+ 2024-04-09 22:04:14,622 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 1
289
+ 2024-04-09 22:04:14,622 INFO SenderThread:311 [sender.py:transition_state():614] send defer: 2
290
+ 2024-04-09 22:04:14,622 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
291
+ 2024-04-09 22:04:14,622 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 2
292
+ 2024-04-09 22:04:14,622 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
293
+ 2024-04-09 22:04:14,622 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 2
294
+ 2024-04-09 22:04:14,622 INFO SenderThread:311 [sender.py:transition_state():614] send defer: 3
295
+ 2024-04-09 22:04:14,622 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
296
+ 2024-04-09 22:04:14,623 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 3
297
+ 2024-04-09 22:04:14,623 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
298
+ 2024-04-09 22:04:14,623 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 3
299
+ 2024-04-09 22:04:14,623 INFO SenderThread:311 [sender.py:transition_state():614] send defer: 4
300
+ 2024-04-09 22:04:14,623 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
301
+ 2024-04-09 22:04:14,623 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 4
302
+ 2024-04-09 22:04:14,623 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
303
+ 2024-04-09 22:04:14,623 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 4
304
+ 2024-04-09 22:04:14,623 INFO SenderThread:311 [sender.py:transition_state():614] send defer: 5
305
+ 2024-04-09 22:04:14,623 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
306
+ 2024-04-09 22:04:14,623 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 5
307
+ 2024-04-09 22:04:14,624 DEBUG SenderThread:311 [sender.py:send():379] send: summary
308
+ 2024-04-09 22:04:14,624 INFO SenderThread:311 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
309
+ 2024-04-09 22:04:14,624 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
310
+ 2024-04-09 22:04:14,624 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 5
311
+ 2024-04-09 22:04:14,624 INFO SenderThread:311 [sender.py:transition_state():614] send defer: 6
312
+ 2024-04-09 22:04:14,625 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
313
+ 2024-04-09 22:04:14,625 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 6
314
+ 2024-04-09 22:04:14,625 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
315
+ 2024-04-09 22:04:14,625 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 6
316
+ 2024-04-09 22:04:14,625 INFO SenderThread:311 [sender.py:transition_state():614] send defer: 7
317
+ 2024-04-09 22:04:14,625 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: status_report
318
+ 2024-04-09 22:04:14,625 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
319
+ 2024-04-09 22:04:14,625 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 7
320
+ 2024-04-09 22:04:14,626 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
321
+ 2024-04-09 22:04:14,626 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 7
322
+ 2024-04-09 22:04:15,160 INFO SenderThread:311 [sender.py:transition_state():614] send defer: 8
323
+ 2024-04-09 22:04:15,160 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
324
+ 2024-04-09 22:04:15,160 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 8
325
+ 2024-04-09 22:04:15,161 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
326
+ 2024-04-09 22:04:15,161 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 8
327
+ 2024-04-09 22:04:15,161 INFO SenderThread:311 [job_builder.py:build():318] Attempting to build job artifact
328
+ 2024-04-09 22:04:15,163 INFO SenderThread:311 [job_builder.py:_get_source_type():466] no source found
329
+ 2024-04-09 22:04:15,163 INFO SenderThread:311 [sender.py:transition_state():614] send defer: 9
330
+ 2024-04-09 22:04:15,163 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
331
+ 2024-04-09 22:04:15,163 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 9
332
+ 2024-04-09 22:04:15,163 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
333
+ 2024-04-09 22:04:15,164 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 9
334
+ 2024-04-09 22:04:15,164 INFO SenderThread:311 [dir_watcher.py:finish():358] shutting down directory watcher
335
+ 2024-04-09 22:04:15,586 INFO SenderThread:311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/output.log
336
+ 2024-04-09 22:04:15,587 INFO SenderThread:311 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/wandb-summary.json
337
+ 2024-04-09 22:04:15,587 INFO SenderThread:311 [dir_watcher.py:finish():388] scan: /kaggle/working/wandb/run-20240409_215743-953wtybl/files
338
+ 2024-04-09 22:04:15,587 INFO SenderThread:311 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/conda-environment.yaml conda-environment.yaml
339
+ 2024-04-09 22:04:15,587 INFO SenderThread:311 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/requirements.txt requirements.txt
340
+ 2024-04-09 22:04:15,587 INFO SenderThread:311 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/wandb-metadata.json wandb-metadata.json
341
+ 2024-04-09 22:04:15,587 INFO SenderThread:311 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/wandb-summary.json wandb-summary.json
342
+ 2024-04-09 22:04:15,588 INFO SenderThread:311 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/config.yaml config.yaml
343
+ 2024-04-09 22:04:15,594 INFO SenderThread:311 [dir_watcher.py:finish():402] scan save: /kaggle/working/wandb/run-20240409_215743-953wtybl/files/output.log output.log
344
+ 2024-04-09 22:04:15,597 INFO SenderThread:311 [sender.py:transition_state():614] send defer: 10
345
+ 2024-04-09 22:04:15,601 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
346
+ 2024-04-09 22:04:15,601 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 10
347
+ 2024-04-09 22:04:15,601 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
348
+ 2024-04-09 22:04:15,601 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 10
349
+ 2024-04-09 22:04:15,601 INFO SenderThread:311 [file_pusher.py:finish():172] shutting down file pusher
350
+ 2024-04-09 22:04:15,618 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: poll_exit
351
+ 2024-04-09 22:04:15,619 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: poll_exit
352
+ 2024-04-09 22:04:15,829 INFO wandb-upload_2:311 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240409_215743-953wtybl/files/config.yaml
353
+ 2024-04-09 22:04:15,857 INFO wandb-upload_1:311 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240409_215743-953wtybl/files/wandb-summary.json
354
+ 2024-04-09 22:04:15,912 INFO wandb-upload_3:311 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240409_215743-953wtybl/files/output.log
355
+ 2024-04-09 22:04:15,917 INFO wandb-upload_0:311 [upload_job.py:push():131] Uploaded file /kaggle/working/wandb/run-20240409_215743-953wtybl/files/requirements.txt
356
+ 2024-04-09 22:04:16,117 INFO Thread-11 (_thread_body):311 [sender.py:transition_state():614] send defer: 11
357
+ 2024-04-09 22:04:16,118 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
358
+ 2024-04-09 22:04:16,118 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 11
359
+ 2024-04-09 22:04:16,118 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
360
+ 2024-04-09 22:04:16,118 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 11
361
+ 2024-04-09 22:04:16,118 INFO SenderThread:311 [file_pusher.py:join():178] waiting for file pusher
362
+ 2024-04-09 22:04:16,119 INFO SenderThread:311 [sender.py:transition_state():614] send defer: 12
363
+ 2024-04-09 22:04:16,119 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
364
+ 2024-04-09 22:04:16,119 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 12
365
+ 2024-04-09 22:04:16,119 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
366
+ 2024-04-09 22:04:16,119 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 12
367
+ 2024-04-09 22:04:16,119 INFO SenderThread:311 [file_stream.py:finish():614] file stream finish called
368
+ 2024-04-09 22:04:16,297 INFO SenderThread:311 [file_stream.py:finish():618] file stream finish is done
369
+ 2024-04-09 22:04:16,297 INFO SenderThread:311 [sender.py:transition_state():614] send defer: 13
370
+ 2024-04-09 22:04:16,297 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
371
+ 2024-04-09 22:04:16,298 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 13
372
+ 2024-04-09 22:04:16,298 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
373
+ 2024-04-09 22:04:16,298 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 13
374
+ 2024-04-09 22:04:16,298 INFO SenderThread:311 [sender.py:transition_state():614] send defer: 14
375
+ 2024-04-09 22:04:16,298 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: defer
376
+ 2024-04-09 22:04:16,298 INFO HandlerThread:311 [handler.py:handle_request_defer():172] handle defer: 14
377
+ 2024-04-09 22:04:16,299 DEBUG SenderThread:311 [sender.py:send():379] send: final
378
+ 2024-04-09 22:04:16,299 DEBUG SenderThread:311 [sender.py:send():379] send: footer
379
+ 2024-04-09 22:04:16,299 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: defer
380
+ 2024-04-09 22:04:16,299 INFO SenderThread:311 [sender.py:send_request_defer():610] handle sender defer: 14
381
+ 2024-04-09 22:04:16,300 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: poll_exit
382
+ 2024-04-09 22:04:16,300 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: poll_exit
383
+ 2024-04-09 22:04:16,301 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: poll_exit
384
+ 2024-04-09 22:04:16,301 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: poll_exit
385
+ 2024-04-09 22:04:16,301 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: server_info
386
+ 2024-04-09 22:04:16,302 DEBUG SenderThread:311 [sender.py:send_request():406] send_request: server_info
387
+ 2024-04-09 22:04:16,304 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: get_summary
388
+ 2024-04-09 22:04:16,305 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: sampled_history
389
+ 2024-04-09 22:04:16,306 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: internal_messages
390
+ 2024-04-09 22:04:16,357 INFO MainThread:311 [wandb_run.py:_footer_history_summary_info():3920] rendering history
391
+ 2024-04-09 22:04:16,357 INFO MainThread:311 [wandb_run.py:_footer_history_summary_info():3952] rendering summary
392
+ 2024-04-09 22:04:16,358 INFO MainThread:311 [wandb_run.py:_footer_sync_info():3879] logging synced files
393
+ 2024-04-09 22:04:16,358 DEBUG HandlerThread:311 [handler.py:handle_request():146] handle_request: shutdown
394
+ 2024-04-09 22:04:16,358 INFO HandlerThread:311 [handler.py:finish():866] shutting down handler
395
+ 2024-04-09 22:04:17,302 INFO WriterThread:311 [datastore.py:close():296] close: /kaggle/working/wandb/run-20240409_215743-953wtybl/run-953wtybl.wandb
396
+ 2024-04-09 22:04:17,308 WARNING StreamThr :311 [internal.py:is_dead():414] Internal process exiting, parent pid 266 disappeared
397
+ 2024-04-09 22:04:17,309 ERROR StreamThr :311 [internal.py:wandb_internal():152] Internal process shutdown.
398
+ 2024-04-09 22:04:17,357 INFO SenderThread:311 [sender.py:finish():1546] shutting down sender
399
+ 2024-04-09 22:04:17,357 INFO SenderThread:311 [file_pusher.py:finish():172] shutting down file pusher
400
+ 2024-04-09 22:04:17,357 INFO SenderThread:311 [file_pusher.py:join():178] waiting for file pusher
wandb/run-20240409_215743-953wtybl/logs/debug.log ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-09 21:57:43,903 INFO MainThread:266 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
+ 2024-04-09 21:57:43,904 INFO MainThread:266 [wandb_setup.py:_flush():76] Configure stats pid to 266
3
+ 2024-04-09 21:57:43,904 INFO MainThread:266 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
+ 2024-04-09 21:57:43,904 INFO MainThread:266 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
+ 2024-04-09 21:57:43,904 INFO MainThread:266 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-04-09 21:57:43,904 INFO MainThread:266 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-04-09 21:57:43,904 INFO MainThread:266 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
+ 2024-04-09 21:57:43,904 INFO MainThread:266 [wandb_setup.py:_flush():76] Applying login settings: {}
9
+ 2024-04-09 21:57:43,904 INFO MainThread:266 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240409_215743-953wtybl/logs/debug.log
10
+ 2024-04-09 21:57:43,904 INFO MainThread:266 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240409_215743-953wtybl/logs/debug-internal.log
11
+ 2024-04-09 21:57:43,904 INFO MainThread:266 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7d919da01720>
12
+ 2024-04-09 21:57:43,904 INFO MainThread:266 [wandb_init.py:init():567] calling init triggers
13
+ 2024-04-09 21:57:43,904 INFO MainThread:266 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
14
+ config: {}
15
+ 2024-04-09 21:57:43,904 INFO MainThread:266 [wandb_init.py:init():617] starting backend
16
+ 2024-04-09 21:57:43,905 INFO MainThread:266 [wandb_init.py:init():621] setting up manager
17
+ 2024-04-09 21:57:43,906 INFO MainThread:266 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
18
+ 2024-04-09 21:57:43,907 INFO MainThread:266 [wandb_init.py:init():629] backend started and connected
19
+ 2024-04-09 21:57:43,919 INFO MainThread:266 [wandb_run.py:_label_probe_notebook():1299] probe notebook
20
+ 2024-04-09 21:57:44,252 INFO MainThread:266 [wandb_init.py:init():721] updated telemetry
21
+ 2024-04-09 21:57:44,255 INFO MainThread:266 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
22
+ 2024-04-09 21:57:44,443 INFO MainThread:266 [wandb_run.py:_on_init():2344] communicating current version
23
+ 2024-04-09 21:57:44,538 INFO MainThread:266 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
24
+
25
+ 2024-04-09 21:57:44,538 INFO MainThread:266 [wandb_init.py:init():805] starting run threads in backend
26
+ 2024-04-09 21:58:00,563 INFO MainThread:266 [wandb_run.py:_console_start():2323] atexit reg
27
+ 2024-04-09 21:58:00,563 INFO MainThread:266 [wandb_run.py:_redirect():2178] redirect: wrap_raw
28
+ 2024-04-09 21:58:00,564 INFO MainThread:266 [wandb_run.py:_redirect():2243] Wrapping output streams.
29
+ 2024-04-09 21:58:00,564 INFO MainThread:266 [wandb_run.py:_redirect():2268] Redirects installed.
30
+ 2024-04-09 21:58:00,566 INFO MainThread:266 [wandb_init.py:init():848] run started, returning control to user process
31
+ 2024-04-09 21:58:00,572 INFO MainThread:266 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 32000, 'max_position_embeddings': 2048, 'hidden_size': 2048, 'intermediate_size': 5632, 'num_hidden_layers': 22, 'num_attention_heads': 32, 'num_key_value_heads': 4, 'hidden_act': 'silu', 'initializer_range': 0.02, 'rms_norm_eps': 1e-05, 'pretraining_tp': 1, 'use_cache': False, 'rope_theta': 10000.0, 'rope_scaling': None, 'attention_bias': False, 'attention_dropout': 0.0, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['LlamaForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': None, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'transformers_version': '4.39.3', 'model_type': 'llama', 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': False, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 2, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 1e-06, 'weight_decay': 0.001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.1, 'num_train_epochs': 5, 'max_steps': 70000, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr09_21-55-28_6e44b39f6877', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 100, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 8, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_32bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': False, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': True, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
32
+ 2024-04-09 22:04:10,837 INFO MainThread:266 [jupyter.py:save_ipynb():373] not saving jupyter notebook
33
+ 2024-04-09 22:04:10,837 INFO MainThread:266 [wandb_init.py:_pause_backend():438] pausing backend
wandb/run-20240409_215743-953wtybl/run-953wtybl.wandb ADDED
Binary file (26.6 kB). View file
 
wandb/run-20240409_220700-9aom042n/files/conda-environment.yaml ADDED
File without changes
wandb/run-20240409_220700-9aom042n/files/config.yaml ADDED
@@ -0,0 +1,686 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _wandb:
4
+ desc: null
5
+ value:
6
+ python_version: 3.10.13
7
+ cli_version: 0.16.5
8
+ framework: huggingface
9
+ huggingface_version: 4.39.3
10
+ is_jupyter_run: true
11
+ is_kaggle_kernel: true
12
+ start_time: 1712700420.0
13
+ t:
14
+ 1:
15
+ - 1
16
+ - 2
17
+ - 3
18
+ - 5
19
+ - 11
20
+ - 12
21
+ - 49
22
+ - 51
23
+ - 53
24
+ - 55
25
+ - 71
26
+ - 84
27
+ - 98
28
+ - 105
29
+ 2:
30
+ - 1
31
+ - 2
32
+ - 3
33
+ - 5
34
+ - 11
35
+ - 12
36
+ - 49
37
+ - 51
38
+ - 53
39
+ - 55
40
+ - 71
41
+ - 84
42
+ - 98
43
+ - 105
44
+ 3:
45
+ - 7
46
+ - 23
47
+ 4: 3.10.13
48
+ 5: 0.16.5
49
+ 6: 4.39.3
50
+ 8:
51
+ - 1
52
+ - 2
53
+ - 5
54
+ 9:
55
+ 1: transformers_trainer
56
+ 13: linux-x86_64
57
+ m:
58
+ - 1: train/global_step
59
+ 6:
60
+ - 3
61
+ - 1: train/loss
62
+ 5: 1
63
+ 6:
64
+ - 1
65
+ - 1: train/grad_norm
66
+ 5: 1
67
+ 6:
68
+ - 1
69
+ - 1: train/learning_rate
70
+ 5: 1
71
+ 6:
72
+ - 1
73
+ - 1: train/epoch
74
+ 5: 1
75
+ 6:
76
+ - 1
77
+ vocab_size:
78
+ desc: null
79
+ value: 32000
80
+ max_position_embeddings:
81
+ desc: null
82
+ value: 2048
83
+ hidden_size:
84
+ desc: null
85
+ value: 2048
86
+ intermediate_size:
87
+ desc: null
88
+ value: 5632
89
+ num_hidden_layers:
90
+ desc: null
91
+ value: 22
92
+ num_attention_heads:
93
+ desc: null
94
+ value: 32
95
+ num_key_value_heads:
96
+ desc: null
97
+ value: 4
98
+ hidden_act:
99
+ desc: null
100
+ value: silu
101
+ initializer_range:
102
+ desc: null
103
+ value: 0.02
104
+ rms_norm_eps:
105
+ desc: null
106
+ value: 1.0e-05
107
+ pretraining_tp:
108
+ desc: null
109
+ value: 1
110
+ use_cache:
111
+ desc: null
112
+ value: false
113
+ rope_theta:
114
+ desc: null
115
+ value: 10000.0
116
+ rope_scaling:
117
+ desc: null
118
+ value: null
119
+ attention_bias:
120
+ desc: null
121
+ value: false
122
+ attention_dropout:
123
+ desc: null
124
+ value: 0.0
125
+ return_dict:
126
+ desc: null
127
+ value: true
128
+ output_hidden_states:
129
+ desc: null
130
+ value: false
131
+ output_attentions:
132
+ desc: null
133
+ value: false
134
+ torchscript:
135
+ desc: null
136
+ value: false
137
+ torch_dtype:
138
+ desc: null
139
+ value: float32
140
+ use_bfloat16:
141
+ desc: null
142
+ value: false
143
+ tf_legacy_loss:
144
+ desc: null
145
+ value: false
146
+ pruned_heads:
147
+ desc: null
148
+ value: {}
149
+ tie_word_embeddings:
150
+ desc: null
151
+ value: false
152
+ chunk_size_feed_forward:
153
+ desc: null
154
+ value: 0
155
+ is_encoder_decoder:
156
+ desc: null
157
+ value: false
158
+ is_decoder:
159
+ desc: null
160
+ value: false
161
+ cross_attention_hidden_size:
162
+ desc: null
163
+ value: null
164
+ add_cross_attention:
165
+ desc: null
166
+ value: false
167
+ tie_encoder_decoder:
168
+ desc: null
169
+ value: false
170
+ max_length:
171
+ desc: null
172
+ value: 20
173
+ min_length:
174
+ desc: null
175
+ value: 0
176
+ do_sample:
177
+ desc: null
178
+ value: false
179
+ early_stopping:
180
+ desc: null
181
+ value: false
182
+ num_beams:
183
+ desc: null
184
+ value: 1
185
+ num_beam_groups:
186
+ desc: null
187
+ value: 1
188
+ diversity_penalty:
189
+ desc: null
190
+ value: 0.0
191
+ temperature:
192
+ desc: null
193
+ value: 1.0
194
+ top_k:
195
+ desc: null
196
+ value: 50
197
+ top_p:
198
+ desc: null
199
+ value: 1.0
200
+ typical_p:
201
+ desc: null
202
+ value: 1.0
203
+ repetition_penalty:
204
+ desc: null
205
+ value: 1.0
206
+ length_penalty:
207
+ desc: null
208
+ value: 1.0
209
+ no_repeat_ngram_size:
210
+ desc: null
211
+ value: 0
212
+ encoder_no_repeat_ngram_size:
213
+ desc: null
214
+ value: 0
215
+ bad_words_ids:
216
+ desc: null
217
+ value: null
218
+ num_return_sequences:
219
+ desc: null
220
+ value: 1
221
+ output_scores:
222
+ desc: null
223
+ value: false
224
+ return_dict_in_generate:
225
+ desc: null
226
+ value: false
227
+ forced_bos_token_id:
228
+ desc: null
229
+ value: null
230
+ forced_eos_token_id:
231
+ desc: null
232
+ value: null
233
+ remove_invalid_values:
234
+ desc: null
235
+ value: false
236
+ exponential_decay_length_penalty:
237
+ desc: null
238
+ value: null
239
+ suppress_tokens:
240
+ desc: null
241
+ value: null
242
+ begin_suppress_tokens:
243
+ desc: null
244
+ value: null
245
+ architectures:
246
+ desc: null
247
+ value:
248
+ - LlamaForCausalLM
249
+ finetuning_task:
250
+ desc: null
251
+ value: null
252
+ id2label:
253
+ desc: null
254
+ value:
255
+ '0': LABEL_0
256
+ '1': LABEL_1
257
+ label2id:
258
+ desc: null
259
+ value:
260
+ LABEL_0: 0
261
+ LABEL_1: 1
262
+ tokenizer_class:
263
+ desc: null
264
+ value: null
265
+ prefix:
266
+ desc: null
267
+ value: null
268
+ bos_token_id:
269
+ desc: null
270
+ value: 1
271
+ pad_token_id:
272
+ desc: null
273
+ value: null
274
+ eos_token_id:
275
+ desc: null
276
+ value: 2
277
+ sep_token_id:
278
+ desc: null
279
+ value: null
280
+ decoder_start_token_id:
281
+ desc: null
282
+ value: null
283
+ task_specific_params:
284
+ desc: null
285
+ value: null
286
+ problem_type:
287
+ desc: null
288
+ value: null
289
+ _name_or_path:
290
+ desc: null
291
+ value: TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T
292
+ transformers_version:
293
+ desc: null
294
+ value: 4.39.3
295
+ model_type:
296
+ desc: null
297
+ value: llama
298
+ quantization_config:
299
+ desc: null
300
+ value:
301
+ quant_method: QuantizationMethod.BITS_AND_BYTES
302
+ _load_in_8bit: false
303
+ _load_in_4bit: true
304
+ llm_int8_threshold: 6.0
305
+ llm_int8_skip_modules: null
306
+ llm_int8_enable_fp32_cpu_offload: false
307
+ llm_int8_has_fp16_weight: false
308
+ bnb_4bit_quant_type: nf4
309
+ bnb_4bit_use_double_quant: false
310
+ bnb_4bit_compute_dtype: float16
311
+ bnb_4bit_quant_storage: uint8
312
+ load_in_4bit: true
313
+ load_in_8bit: false
314
+ output_dir:
315
+ desc: null
316
+ value: /kaggle/working/
317
+ overwrite_output_dir:
318
+ desc: null
319
+ value: false
320
+ do_train:
321
+ desc: null
322
+ value: false
323
+ do_eval:
324
+ desc: null
325
+ value: false
326
+ do_predict:
327
+ desc: null
328
+ value: false
329
+ evaluation_strategy:
330
+ desc: null
331
+ value: 'no'
332
+ prediction_loss_only:
333
+ desc: null
334
+ value: false
335
+ per_device_train_batch_size:
336
+ desc: null
337
+ value: 2
338
+ per_device_eval_batch_size:
339
+ desc: null
340
+ value: 8
341
+ per_gpu_train_batch_size:
342
+ desc: null
343
+ value: null
344
+ per_gpu_eval_batch_size:
345
+ desc: null
346
+ value: null
347
+ gradient_accumulation_steps:
348
+ desc: null
349
+ value: 1
350
+ eval_accumulation_steps:
351
+ desc: null
352
+ value: null
353
+ eval_delay:
354
+ desc: null
355
+ value: 0
356
+ learning_rate:
357
+ desc: null
358
+ value: 1.0e-06
359
+ weight_decay:
360
+ desc: null
361
+ value: 0.001
362
+ adam_beta1:
363
+ desc: null
364
+ value: 0.9
365
+ adam_beta2:
366
+ desc: null
367
+ value: 0.999
368
+ adam_epsilon:
369
+ desc: null
370
+ value: 1.0e-08
371
+ max_grad_norm:
372
+ desc: null
373
+ value: 0.1
374
+ num_train_epochs:
375
+ desc: null
376
+ value: 5
377
+ max_steps:
378
+ desc: null
379
+ value: 200
380
+ lr_scheduler_type:
381
+ desc: null
382
+ value: cosine
383
+ lr_scheduler_kwargs:
384
+ desc: null
385
+ value: {}
386
+ warmup_ratio:
387
+ desc: null
388
+ value: 0.03
389
+ warmup_steps:
390
+ desc: null
391
+ value: 0
392
+ log_level:
393
+ desc: null
394
+ value: passive
395
+ log_level_replica:
396
+ desc: null
397
+ value: warning
398
+ log_on_each_node:
399
+ desc: null
400
+ value: true
401
+ logging_dir:
402
+ desc: null
403
+ value: /kaggle/working/runs/Apr09_22-04-47_6e44b39f6877
404
+ logging_strategy:
405
+ desc: null
406
+ value: steps
407
+ logging_first_step:
408
+ desc: null
409
+ value: false
410
+ logging_steps:
411
+ desc: null
412
+ value: 100
413
+ logging_nan_inf_filter:
414
+ desc: null
415
+ value: true
416
+ save_strategy:
417
+ desc: null
418
+ value: steps
419
+ save_steps:
420
+ desc: null
421
+ value: 100
422
+ save_total_limit:
423
+ desc: null
424
+ value: 1
425
+ save_safetensors:
426
+ desc: null
427
+ value: true
428
+ save_on_each_node:
429
+ desc: null
430
+ value: false
431
+ save_only_model:
432
+ desc: null
433
+ value: false
434
+ no_cuda:
435
+ desc: null
436
+ value: false
437
+ use_cpu:
438
+ desc: null
439
+ value: false
440
+ use_mps_device:
441
+ desc: null
442
+ value: false
443
+ seed:
444
+ desc: null
445
+ value: 42
446
+ data_seed:
447
+ desc: null
448
+ value: null
449
+ jit_mode_eval:
450
+ desc: null
451
+ value: false
452
+ use_ipex:
453
+ desc: null
454
+ value: false
455
+ bf16:
456
+ desc: null
457
+ value: false
458
+ fp16:
459
+ desc: null
460
+ value: false
461
+ fp16_opt_level:
462
+ desc: null
463
+ value: O1
464
+ half_precision_backend:
465
+ desc: null
466
+ value: auto
467
+ bf16_full_eval:
468
+ desc: null
469
+ value: false
470
+ fp16_full_eval:
471
+ desc: null
472
+ value: false
473
+ tf32:
474
+ desc: null
475
+ value: null
476
+ local_rank:
477
+ desc: null
478
+ value: 0
479
+ ddp_backend:
480
+ desc: null
481
+ value: null
482
+ tpu_num_cores:
483
+ desc: null
484
+ value: null
485
+ tpu_metrics_debug:
486
+ desc: null
487
+ value: false
488
+ debug:
489
+ desc: null
490
+ value: []
491
+ dataloader_drop_last:
492
+ desc: null
493
+ value: false
494
+ eval_steps:
495
+ desc: null
496
+ value: null
497
+ dataloader_num_workers:
498
+ desc: null
499
+ value: 8
500
+ dataloader_prefetch_factor:
501
+ desc: null
502
+ value: null
503
+ past_index:
504
+ desc: null
505
+ value: -1
506
+ run_name:
507
+ desc: null
508
+ value: /kaggle/working/
509
+ disable_tqdm:
510
+ desc: null
511
+ value: false
512
+ remove_unused_columns:
513
+ desc: null
514
+ value: true
515
+ label_names:
516
+ desc: null
517
+ value: null
518
+ load_best_model_at_end:
519
+ desc: null
520
+ value: false
521
+ metric_for_best_model:
522
+ desc: null
523
+ value: null
524
+ greater_is_better:
525
+ desc: null
526
+ value: null
527
+ ignore_data_skip:
528
+ desc: null
529
+ value: false
530
+ fsdp:
531
+ desc: null
532
+ value: []
533
+ fsdp_min_num_params:
534
+ desc: null
535
+ value: 0
536
+ fsdp_config:
537
+ desc: null
538
+ value:
539
+ min_num_params: 0
540
+ xla: false
541
+ xla_fsdp_v2: false
542
+ xla_fsdp_grad_ckpt: false
543
+ fsdp_transformer_layer_cls_to_wrap:
544
+ desc: null
545
+ value: null
546
+ accelerator_config:
547
+ desc: null
548
+ value:
549
+ split_batches: false
550
+ dispatch_batches: null
551
+ even_batches: true
552
+ use_seedable_sampler: true
553
+ deepspeed:
554
+ desc: null
555
+ value: null
556
+ label_smoothing_factor:
557
+ desc: null
558
+ value: 0.0
559
+ optim:
560
+ desc: null
561
+ value: paged_adamw_32bit
562
+ optim_args:
563
+ desc: null
564
+ value: null
565
+ adafactor:
566
+ desc: null
567
+ value: false
568
+ group_by_length:
569
+ desc: null
570
+ value: false
571
+ length_column_name:
572
+ desc: null
573
+ value: length
574
+ report_to:
575
+ desc: null
576
+ value:
577
+ - tensorboard
578
+ - wandb
579
+ ddp_find_unused_parameters:
580
+ desc: null
581
+ value: null
582
+ ddp_bucket_cap_mb:
583
+ desc: null
584
+ value: null
585
+ ddp_broadcast_buffers:
586
+ desc: null
587
+ value: null
588
+ dataloader_pin_memory:
589
+ desc: null
590
+ value: true
591
+ dataloader_persistent_workers:
592
+ desc: null
593
+ value: false
594
+ skip_memory_metrics:
595
+ desc: null
596
+ value: true
597
+ use_legacy_prediction_loop:
598
+ desc: null
599
+ value: false
600
+ push_to_hub:
601
+ desc: null
602
+ value: false
603
+ resume_from_checkpoint:
604
+ desc: null
605
+ value: null
606
+ hub_model_id:
607
+ desc: null
608
+ value: null
609
+ hub_strategy:
610
+ desc: null
611
+ value: every_save
612
+ hub_token:
613
+ desc: null
614
+ value: <HUB_TOKEN>
615
+ hub_private_repo:
616
+ desc: null
617
+ value: false
618
+ hub_always_push:
619
+ desc: null
620
+ value: false
621
+ gradient_checkpointing:
622
+ desc: null
623
+ value: true
624
+ gradient_checkpointing_kwargs:
625
+ desc: null
626
+ value: null
627
+ include_inputs_for_metrics:
628
+ desc: null
629
+ value: false
630
+ fp16_backend:
631
+ desc: null
632
+ value: auto
633
+ push_to_hub_model_id:
634
+ desc: null
635
+ value: null
636
+ push_to_hub_organization:
637
+ desc: null
638
+ value: null
639
+ push_to_hub_token:
640
+ desc: null
641
+ value: <PUSH_TO_HUB_TOKEN>
642
+ mp_parameters:
643
+ desc: null
644
+ value: ''
645
+ auto_find_batch_size:
646
+ desc: null
647
+ value: true
648
+ full_determinism:
649
+ desc: null
650
+ value: false
651
+ torchdynamo:
652
+ desc: null
653
+ value: null
654
+ ray_scope:
655
+ desc: null
656
+ value: last
657
+ ddp_timeout:
658
+ desc: null
659
+ value: 1800
660
+ torch_compile:
661
+ desc: null
662
+ value: false
663
+ torch_compile_backend:
664
+ desc: null
665
+ value: null
666
+ torch_compile_mode:
667
+ desc: null
668
+ value: null
669
+ dispatch_batches:
670
+ desc: null
671
+ value: null
672
+ split_batches:
673
+ desc: null
674
+ value: null
675
+ include_tokens_per_second:
676
+ desc: null
677
+ value: false
678
+ include_num_input_tokens_seen:
679
+ desc: null
680
+ value: false
681
+ neftune_noise_alpha:
682
+ desc: null
683
+ value: null
684
+ optim_target_modules:
685
+ desc: null
686
+ value: null
wandb/run-20240409_220700-9aom042n/files/output.log ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:557: UserWarning: This DataLoader will create 8 worker processes in total. Our suggested max number of worker in current system is 4, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.
2
+ warnings.warn(_create_warning_msg(
3
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
4
+ To disable this warning, you can either:
5
+ - Avoid using `tokenizers` before the fork if possible
6
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
7
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
8
+ To disable this warning, you can either:
9
+ - Avoid using `tokenizers` before the fork if possible
10
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
11
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
12
+ To disable this warning, you can either:
13
+ - Avoid using `tokenizers` before the fork if possible
14
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
15
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
16
+ To disable this warning, you can either:
17
+ - Avoid using `tokenizers` before the fork if possible
18
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
19
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
20
+ To disable this warning, you can either:
21
+ - Avoid using `tokenizers` before the fork if possible
22
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
23
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
24
+ To disable this warning, you can either:
25
+ - Avoid using `tokenizers` before the fork if possible
26
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
27
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
28
+ To disable this warning, you can either:
29
+ - Avoid using `tokenizers` before the fork if possible
30
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
31
+ huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
32
+ To disable this warning, you can either:
33
+ - Avoid using `tokenizers` before the fork if possible
34
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
35
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
36
+ warnings.warn(
37
+ /opt/conda/lib/python3.10/site-packages/torch/utils/checkpoint.py:429: UserWarning: torch.utils.checkpoint: please pass in use_reentrant=True or use_reentrant=False explicitly. The default value of use_reentrant will be updated to be False in the future. To maintain current behavior, pass use_reentrant=True. It is recommended that you use use_reentrant=False. Refer to docs for more details on the differences between the two variants.
wandb/run-20240409_220700-9aom042n/files/requirements.txt ADDED
@@ -0,0 +1,864 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Babel==2.14.0
2
+ Boruta==0.3
3
+ Brotli==1.0.9
4
+ CVXcanon==0.1.2
5
+ Cartopy==0.22.0
6
+ Cython==3.0.8
7
+ Deprecated==1.2.14
8
+ Farama-Notifications==0.0.4
9
+ Flask==3.0.2
10
+ Geohash==1.0
11
+ GitPython==3.1.41
12
+ ImageHash==4.3.1
13
+ Janome==0.5.0
14
+ Jinja2==3.1.2
15
+ LunarCalendar==0.0.9
16
+ Mako==1.3.2
17
+ Markdown==3.5.2
18
+ MarkupSafe==2.1.3
19
+ MarkupSafe==2.1.5
20
+ Pillow==9.5.0
21
+ PuLP==2.8.0
22
+ PyArabic==0.6.15
23
+ PyJWT==2.8.0
24
+ PyMeeus==0.5.12
25
+ PySocks==1.7.1
26
+ PyUpSet==0.1.1.post7
27
+ PyWavelets==1.5.0
28
+ PyYAML==6.0.1
29
+ Pygments==2.17.2
30
+ Pympler==1.0.1
31
+ QtPy==2.4.1
32
+ Rtree==1.2.0
33
+ SQLAlchemy==2.0.25
34
+ SecretStorage==3.3.3
35
+ Send2Trash==1.8.2
36
+ Shapely==1.8.5.post1
37
+ Shimmy==1.3.0
38
+ SimpleITK==2.3.1
39
+ TPOT==0.12.1
40
+ Theano-PyMC==1.1.2
41
+ Theano==1.0.5
42
+ Wand==0.6.13
43
+ Werkzeug==3.0.2
44
+ absl-py==1.4.0
45
+ accelerate==0.28.0
46
+ access==1.1.9
47
+ affine==2.4.0
48
+ aiobotocore==2.12.2
49
+ aiofiles==22.1.0
50
+ aiohttp-cors==0.7.0
51
+ aiohttp==3.9.1
52
+ aioitertools==0.11.0
53
+ aiorwlock==1.3.0
54
+ aiosignal==1.3.1
55
+ aiosqlite==0.19.0
56
+ albumentations==1.4.0
57
+ alembic==1.13.1
58
+ altair==5.3.0
59
+ annotated-types==0.6.0
60
+ annoy==1.17.3
61
+ anyio==4.2.0
62
+ apache-beam==2.46.0
63
+ aplus==0.11.0
64
+ appdirs==1.4.4
65
+ archspec==0.2.3
66
+ argon2-cffi-bindings==21.2.0
67
+ argon2-cffi==23.1.0
68
+ array-record==0.5.0
69
+ arrow==1.3.0
70
+ arviz==0.17.1
71
+ astroid==3.1.0
72
+ astropy-iers-data==0.2024.4.1.0.33.14
73
+ astropy==6.0.1
74
+ asttokens==2.4.1
75
+ astunparse==1.6.3
76
+ async-lru==2.0.4
77
+ async-timeout==4.0.3
78
+ attrs==23.2.0
79
+ audioread==3.0.1
80
+ autopep8==2.0.4
81
+ backoff==2.2.1
82
+ bayesian-optimization==1.4.3
83
+ beatrix_jupyterlab==2023.128.151533
84
+ beautifulsoup4==4.12.2
85
+ bitsandbytes==0.43.0
86
+ blake3==0.2.1
87
+ bleach==6.1.0
88
+ blessed==1.20.0
89
+ blinker==1.7.0
90
+ blis==0.7.10
91
+ blosc2==2.6.0
92
+ bokeh==3.3.4
93
+ boltons==23.1.1
94
+ boto3==1.26.100
95
+ botocore==1.34.51
96
+ bq_helper==0.4.1
97
+ bqplot==0.12.43
98
+ branca==0.7.1
99
+ brewer2mpl==1.4.1
100
+ brotlipy==0.7.0
101
+ cached-property==1.5.2
102
+ cachetools==4.2.4
103
+ cachetools==5.3.2
104
+ catalogue==2.0.10
105
+ catalyst==22.4
106
+ catboost==1.2.3
107
+ category-encoders==2.6.3
108
+ certifi==2024.2.2
109
+ cesium==0.12.1
110
+ cffi==1.16.0
111
+ charset-normalizer==3.3.2
112
+ chex==0.1.86
113
+ cleverhans==4.0.0
114
+ click-plugins==1.1.1
115
+ click==8.1.7
116
+ cligj==0.7.2
117
+ cloud-tpu-client==0.10
118
+ cloud-tpu-profiler==2.4.0
119
+ cloudpathlib==0.16.0
120
+ cloudpickle==2.2.1
121
+ cloudpickle==3.0.0
122
+ cmdstanpy==1.2.2
123
+ colorama==0.4.6
124
+ colorcet==3.1.0
125
+ colorful==0.5.6
126
+ colorlog==6.8.2
127
+ colorlover==0.3.0
128
+ comm==0.2.1
129
+ conda-libmamba-solver==23.7.0
130
+ conda-package-handling==2.2.0
131
+ conda==23.7.4
132
+ conda_package_streaming==0.9.0
133
+ confection==0.1.4
134
+ contextily==1.6.0
135
+ contourpy==1.2.0
136
+ convertdate==2.4.0
137
+ crcmod==1.7
138
+ cryptography==41.0.7
139
+ cuda-python==12.4.0
140
+ cudf==23.8.0
141
+ cufflinks==0.17.3
142
+ cuml==23.8.0
143
+ cupy==13.0.0
144
+ cycler==0.12.1
145
+ cymem==2.0.8
146
+ cytoolz==0.12.3
147
+ daal4py==2024.2.0
148
+ daal==2024.2.0
149
+ dacite==1.8.1
150
+ dask-cuda==23.8.0
151
+ dask-cudf==23.8.0
152
+ dask-expr==1.0.9
153
+ dask==2024.4.0
154
+ dataclasses-json==0.6.4
155
+ dataproc_jupyter_plugin==0.1.66
156
+ datasets==2.16.0
157
+ datashader==0.16.0
158
+ datatile==1.0.3
159
+ db-dtypes==1.2.0
160
+ deap==1.4.1
161
+ debugpy==1.8.0
162
+ decorator==5.1.1
163
+ deepdiff==6.7.1
164
+ defusedxml==0.7.1
165
+ deprecation==2.1.0
166
+ descartes==1.1.0
167
+ dill==0.3.7
168
+ dipy==1.9.0
169
+ distlib==0.3.8
170
+ distributed==2023.7.1
171
+ distro==1.9.0
172
+ dm-tree==0.1.8
173
+ docker-pycreds==0.4.0
174
+ docker==7.0.0
175
+ docopt==0.6.2
176
+ docstring-parser==0.15
177
+ docstring-to-markdown==0.15
178
+ docutils==0.20.1
179
+ earthengine-api==0.1.395
180
+ easydict==1.13
181
+ easyocr==1.7.1
182
+ ecos==2.0.13
183
+ eli5==0.13.0
184
+ emoji==2.11.0
185
+ en-core-web-lg==3.7.1
186
+ en-core-web-sm==3.7.1
187
+ entrypoints==0.4
188
+ ephem==4.1.5
189
+ esda==2.5.1
190
+ essentia==2.1b6.dev1110
191
+ et-xmlfile==1.1.0
192
+ etils==1.6.0
193
+ exceptiongroup==1.2.0
194
+ executing==2.0.1
195
+ explainable-ai-sdk==1.3.3
196
+ fastai==2.7.14
197
+ fastapi==0.108.0
198
+ fastavro==1.9.3
199
+ fastcore==1.5.29
200
+ fastdownload==0.0.7
201
+ fasteners==0.19
202
+ fastjsonschema==2.19.1
203
+ fastprogress==1.0.3
204
+ fastrlock==0.8.2
205
+ fasttext==0.9.2
206
+ feather-format==0.4.1
207
+ featuretools==1.30.0
208
+ filelock==3.13.1
209
+ fiona==1.9.6
210
+ fitter==1.7.0
211
+ flake8==7.0.0
212
+ flashtext==2.7
213
+ flatbuffers==23.5.26
214
+ flax==0.8.2
215
+ folium==0.16.0
216
+ fonttools==4.47.0
217
+ fonttools==4.50.0
218
+ fqdn==1.5.1
219
+ frozendict==2.4.1
220
+ frozenlist==1.4.1
221
+ fsspec==2023.10.0
222
+ fsspec==2024.3.1
223
+ funcy==2.0
224
+ fury==0.10.0
225
+ future==1.0.0
226
+ fuzzywuzzy==0.18.0
227
+ gast==0.5.4
228
+ gatspy==0.3
229
+ gcsfs==2024.2.0
230
+ gensim==4.3.2
231
+ geographiclib==2.0
232
+ geojson==3.1.0
233
+ geopandas==0.14.3
234
+ geoplot==0.5.1
235
+ geopy==2.4.1
236
+ geoviews==1.11.1
237
+ ggplot==0.11.5
238
+ giddy==2.3.5
239
+ gitdb==4.0.11
240
+ google-ai-generativelanguage==0.4.0
241
+ google-api-core==2.11.1
242
+ google-api-core==2.18.0
243
+ google-api-python-client==2.125.0
244
+ google-apitools==0.5.31
245
+ google-auth-httplib2==0.2.0
246
+ google-auth-oauthlib==1.2.0
247
+ google-auth==2.26.1
248
+ google-cloud-aiplatform==0.6.0a1
249
+ google-cloud-artifact-registry==1.10.0
250
+ google-cloud-automl==1.0.1
251
+ google-cloud-bigquery==2.34.4
252
+ google-cloud-bigtable==1.7.3
253
+ google-cloud-core==2.4.1
254
+ google-cloud-datastore==2.19.0
255
+ google-cloud-dlp==3.14.0
256
+ google-cloud-jupyter-config==0.0.5
257
+ google-cloud-language==2.13.3
258
+ google-cloud-monitoring==2.18.0
259
+ google-cloud-pubsub==2.19.0
260
+ google-cloud-pubsublite==1.9.0
261
+ google-cloud-recommendations-ai==0.7.1
262
+ google-cloud-resource-manager==1.11.0
263
+ google-cloud-spanner==3.40.1
264
+ google-cloud-storage==1.44.0
265
+ google-cloud-translate==3.12.1
266
+ google-cloud-videointelligence==2.13.3
267
+ google-cloud-vision==2.8.0
268
+ google-crc32c==1.5.0
269
+ google-generativeai==0.4.1
270
+ google-pasta==0.2.0
271
+ google-resumable-media==2.7.0
272
+ googleapis-common-protos==1.62.0
273
+ gplearn==0.4.2
274
+ gpustat==1.0.0
275
+ gpxpy==1.6.2
276
+ graphviz==0.20.3
277
+ greenlet==3.0.3
278
+ grpc-google-iam-v1==0.12.7
279
+ grpcio-status==1.48.1
280
+ grpcio-status==1.48.2
281
+ grpcio==1.51.1
282
+ grpcio==1.60.0
283
+ gviz-api==1.10.0
284
+ gym-notices==0.0.8
285
+ gym==0.26.2
286
+ gymnasium==0.29.0
287
+ h11==0.14.0
288
+ h2o==3.46.0.1
289
+ h5netcdf==1.3.0
290
+ h5py==3.10.0
291
+ haversine==2.8.1
292
+ hdfs==2.7.3
293
+ hep-ml==0.7.2
294
+ hijri-converter==2.3.1
295
+ hmmlearn==0.3.2
296
+ holidays==0.24
297
+ holoviews==1.18.3
298
+ hpsklearn==0.1.0
299
+ html5lib==1.1
300
+ htmlmin==0.1.12
301
+ httpcore==1.0.5
302
+ httplib2==0.21.0
303
+ httptools==0.6.1
304
+ httpx==0.27.0
305
+ huggingface-hub==0.22.2
306
+ hunspell==0.5.5
307
+ hydra-slayer==0.5.0
308
+ hyperopt==0.2.7
309
+ hypertools==0.8.0
310
+ idna==3.6
311
+ igraph==0.11.4
312
+ imagecodecs==2024.1.1
313
+ imageio==2.33.1
314
+ imbalanced-learn==0.12.2
315
+ imgaug==0.4.0
316
+ importlib-metadata==6.11.0
317
+ importlib-metadata==7.0.1
318
+ importlib-resources==6.1.1
319
+ inequality==1.0.1
320
+ iniconfig==2.0.0
321
+ ipydatawidgets==4.3.5
322
+ ipykernel==6.28.0
323
+ ipyleaflet==0.18.2
324
+ ipympl==0.7.0
325
+ ipython-genutils==0.2.0
326
+ ipython-genutils==0.2.0
327
+ ipython-sql==0.5.0
328
+ ipython==8.20.0
329
+ ipyvolume==0.6.3
330
+ ipyvue==1.10.2
331
+ ipyvuetify==1.9.3
332
+ ipywebrtc==0.6.0
333
+ ipywidgets==7.7.1
334
+ isoduration==20.11.0
335
+ isort==5.13.2
336
+ isoweek==1.3.3
337
+ itsdangerous==2.1.2
338
+ jaraco.classes==3.3.0
339
+ jax-jumpy==1.0.0
340
+ jax==0.4.23
341
+ jaxlib==0.4.23.dev20240116
342
+ jedi==0.19.1
343
+ jeepney==0.8.0
344
+ jieba==0.42.1
345
+ jmespath==1.0.1
346
+ joblib==1.3.2
347
+ json5==0.9.14
348
+ jsonpatch==1.33
349
+ jsonpointer==2.4
350
+ jsonschema-specifications==2023.12.1
351
+ jsonschema==4.20.0
352
+ jupyter-console==6.6.3
353
+ jupyter-events==0.9.0
354
+ jupyter-http-over-ws==0.0.8
355
+ jupyter-lsp==1.5.1
356
+ jupyter-server-mathjax==0.2.6
357
+ jupyter-ydoc==0.2.5
358
+ jupyter_client==7.4.9
359
+ jupyter_client==8.6.0
360
+ jupyter_core==5.7.1
361
+ jupyter_server==2.13.0
362
+ jupyter_server_fileid==0.9.1
363
+ jupyter_server_proxy==4.1.0
364
+ jupyter_server_terminals==0.5.1
365
+ jupyter_server_ydoc==0.8.0
366
+ jupyterlab-lsp==5.1.0
367
+ jupyterlab-widgets==3.0.9
368
+ jupyterlab==4.1.5
369
+ jupyterlab_git==0.44.0
370
+ jupyterlab_pygments==0.3.0
371
+ jupyterlab_server==2.25.2
372
+ jupytext==1.16.0
373
+ kaggle-environments==1.14.3
374
+ kaggle==1.6.8
375
+ kagglehub==0.2.2
376
+ keras-cv==0.8.2
377
+ keras-nlp==0.8.2
378
+ keras-tuner==1.4.6
379
+ keras==3.1.1
380
+ kernels-mixer==0.0.7
381
+ keyring==24.3.0
382
+ keyrings.google-artifactregistry-auth==1.1.2
383
+ kfp-pipeline-spec==0.2.2
384
+ kfp-server-api==2.0.5
385
+ kfp==2.5.0
386
+ kiwisolver==1.4.5
387
+ kmapper==2.0.1
388
+ kmodes==0.12.2
389
+ korean-lunar-calendar==0.3.1
390
+ kornia==0.7.2
391
+ kornia_rs==0.1.3
392
+ kt-legacy==1.0.5
393
+ kubernetes==26.1.0
394
+ langcodes==3.3.0
395
+ langid==1.1.6
396
+ lazy_loader==0.3
397
+ learntools==0.3.4
398
+ leven==1.0.4
399
+ libclang==16.0.6
400
+ libmambapy==1.5.0
401
+ libpysal==4.9.2
402
+ librosa==0.10.1
403
+ lightgbm==4.2.0
404
+ lightning-utilities==0.11.2
405
+ lime==0.2.0.1
406
+ line-profiler==4.1.2
407
+ linkify-it-py==2.0.3
408
+ llvmlite==0.41.1
409
+ llvmlite==0.42.0
410
+ lml==0.1.0
411
+ locket==1.0.0
412
+ loguru==0.7.2
413
+ lxml==5.2.1
414
+ lz4==4.3.3
415
+ mamba==1.5.0
416
+ mapclassify==2.6.1
417
+ markdown-it-py==3.0.0
418
+ marshmallow==3.21.1
419
+ matplotlib-inline==0.1.6
420
+ matplotlib-venn==0.11.10
421
+ matplotlib==3.7.5
422
+ matplotlib==3.8.3
423
+ mccabe==0.7.0
424
+ mdit-py-plugins==0.4.0
425
+ mdurl==0.1.2
426
+ memory-profiler==0.61.0
427
+ menuinst==2.0.1
428
+ mercantile==1.2.1
429
+ mgwr==2.2.1
430
+ missingno==0.5.2
431
+ mistune==0.8.4
432
+ mizani==0.11.1
433
+ ml-dtypes==0.2.0
434
+ mlcrate==0.2.0
435
+ mlens==0.2.3
436
+ mlxtend==0.23.1
437
+ mne==1.6.1
438
+ mnist==0.2.2
439
+ momepy==0.7.0
440
+ more-itertools==10.2.0
441
+ mpld3==0.5.10
442
+ mpmath==1.3.0
443
+ msgpack==1.0.7
444
+ multidict==6.0.4
445
+ multimethod==1.10
446
+ multipledispatch==1.0.0
447
+ multiprocess==0.70.15
448
+ munkres==1.1.4
449
+ murmurhash==1.0.10
450
+ mypy-extensions==1.0.0
451
+ namex==0.0.7
452
+ nb-conda-kernels==2.3.1
453
+ nb_conda==2.2.1
454
+ nbclassic==1.0.0
455
+ nbclient==0.5.13
456
+ nbconvert==6.4.5
457
+ nbdime==3.2.0
458
+ nbformat==5.9.2
459
+ ndindex==1.8
460
+ nest-asyncio==1.5.8
461
+ networkx==3.2.1
462
+ nibabel==5.2.1
463
+ nilearn==0.10.3
464
+ ninja==1.11.1.1
465
+ nltk==3.2.4
466
+ nose==1.3.7
467
+ notebook==6.5.4
468
+ notebook==6.5.6
469
+ notebook_executor==0.2
470
+ notebook_shim==0.2.3
471
+ numba==0.58.1
472
+ numba==0.59.1
473
+ numexpr==2.10.0
474
+ numpy==1.26.4
475
+ nvidia-ml-py==11.495.46
476
+ nvtx==0.2.10
477
+ oauth2client==4.1.3
478
+ oauthlib==3.2.2
479
+ objsize==0.6.1
480
+ odfpy==1.4.1
481
+ olefile==0.47
482
+ onnx==1.16.0
483
+ opencensus-context==0.1.3
484
+ opencensus==0.11.4
485
+ opencv-contrib-python==4.9.0.80
486
+ opencv-python-headless==4.9.0.80
487
+ opencv-python==4.9.0.80
488
+ openpyxl==3.1.2
489
+ openslide-python==1.3.1
490
+ opentelemetry-api==1.22.0
491
+ opentelemetry-exporter-otlp-proto-common==1.22.0
492
+ opentelemetry-exporter-otlp-proto-grpc==1.22.0
493
+ opentelemetry-exporter-otlp-proto-http==1.22.0
494
+ opentelemetry-exporter-otlp==1.22.0
495
+ opentelemetry-proto==1.22.0
496
+ opentelemetry-sdk==1.22.0
497
+ opentelemetry-semantic-conventions==0.43b0
498
+ opt-einsum==3.3.0
499
+ optax==0.2.2
500
+ optree==0.11.0
501
+ optuna==3.6.1
502
+ orbax-checkpoint==0.5.7
503
+ ordered-set==4.1.0
504
+ orjson==3.9.10
505
+ ortools==9.4.1874
506
+ osmnx==1.9.2
507
+ overrides==7.4.0
508
+ packaging==21.3
509
+ pandas-datareader==0.10.0
510
+ pandas-profiling==3.6.6
511
+ pandas-summary==0.2.0
512
+ pandas==2.1.4
513
+ pandas==2.2.1
514
+ pandasql==0.7.3
515
+ pandocfilters==1.5.0
516
+ panel==1.3.8
517
+ papermill==2.5.0
518
+ param==2.1.0
519
+ parso==0.8.3
520
+ partd==1.4.1
521
+ path.py==12.5.0
522
+ path==16.10.0
523
+ pathos==0.3.2
524
+ pathy==0.10.3
525
+ patsy==0.5.6
526
+ pdf2image==1.17.0
527
+ peft==0.10.0
528
+ pettingzoo==1.24.0
529
+ pexpect==4.8.0
530
+ pexpect==4.9.0
531
+ phik==0.12.4
532
+ pickleshare==0.7.5
533
+ pillow==10.3.0
534
+ pip==23.3.2
535
+ pkgutil_resolve_name==1.3.10
536
+ platformdirs==4.2.0
537
+ plotly-express==0.4.1
538
+ plotly==5.18.0
539
+ plotnine==0.13.4
540
+ pluggy==1.4.0
541
+ pointpats==2.4.0
542
+ polars==0.20.18
543
+ polyglot==16.7.4
544
+ pooch==1.8.1
545
+ pox==0.3.4
546
+ ppca==0.0.4
547
+ ppft==1.7.6.8
548
+ preprocessing==0.1.13
549
+ preshed==3.0.9
550
+ prettytable==3.9.0
551
+ progressbar2==4.4.2
552
+ prometheus-client==0.19.0
553
+ promise==2.3
554
+ prompt-toolkit==3.0.42
555
+ prompt-toolkit==3.0.43
556
+ prophet==1.1.1
557
+ proto-plus==1.23.0
558
+ protobuf==3.20.3
559
+ protobuf==4.21.12
560
+ psutil==5.9.3
561
+ psutil==5.9.7
562
+ ptyprocess==0.7.0
563
+ pudb==2024.1
564
+ pure-eval==0.2.2
565
+ py-cpuinfo==9.0.0
566
+ py-spy==0.3.14
567
+ py4j==0.10.9.7
568
+ pyLDAvis==3.4.1
569
+ pyOpenSSL==23.3.0
570
+ pyaml==23.12.0
571
+ pyarrow-hotfix==0.6
572
+ pyarrow==15.0.2
573
+ pyasn1-modules==0.3.0
574
+ pyasn1==0.5.1
575
+ pybind11==2.12.0
576
+ pyclipper==1.3.0.post5
577
+ pycodestyle==2.11.1
578
+ pycosat==0.6.6
579
+ pycparser==2.21
580
+ pycryptodome==3.20.0
581
+ pyct==0.5.0
582
+ pycuda==2024.1
583
+ pydantic==2.5.3
584
+ pydantic==2.6.4
585
+ pydantic_core==2.14.6
586
+ pydantic_core==2.16.3
587
+ pydegensac==0.1.2
588
+ pydicom==2.4.4
589
+ pydocstyle==6.3.0
590
+ pydot==1.4.2
591
+ pydub==0.25.1
592
+ pyemd==1.0.0
593
+ pyerfa==2.0.1.1
594
+ pyexcel-io==0.6.6
595
+ pyexcel-ods==0.6.0
596
+ pyflakes==3.2.0
597
+ pygltflib==1.16.2
598
+ pykalman==0.9.7
599
+ pylibraft==23.8.0
600
+ pylint==3.1.0
601
+ pymc3==3.11.4
602
+ pymongo==3.13.0
603
+ pynndescent==0.5.12
604
+ pynvml==11.4.1
605
+ pynvrtc==9.2
606
+ pyparsing==3.1.1
607
+ pyparsing==3.1.2
608
+ pypdf==4.1.0
609
+ pyproj==3.6.1
610
+ pysal==24.1
611
+ pyshp==2.3.1
612
+ pytesseract==0.3.10
613
+ pytest==8.1.1
614
+ python-bidi==0.4.2
615
+ python-dateutil==2.9.0.post0
616
+ python-dotenv==1.0.0
617
+ python-json-logger==2.0.7
618
+ python-louvain==0.16
619
+ python-lsp-jsonrpc==1.1.2
620
+ python-lsp-server==1.11.0
621
+ python-slugify==8.0.4
622
+ python-utils==3.8.2
623
+ pythreejs==2.4.2
624
+ pytoolconfig==1.3.1
625
+ pytools==2024.1.1
626
+ pytorch-ignite==0.5.0.post2
627
+ pytorch-lightning==2.2.1
628
+ pytz==2023.3.post1
629
+ pytz==2024.1
630
+ pyu2f==0.1.5
631
+ pyviz_comms==3.0.2
632
+ pyzmq==24.0.1
633
+ pyzmq==25.1.2
634
+ qgrid==1.3.1
635
+ qtconsole==5.5.1
636
+ quantecon==0.7.2
637
+ qudida==0.0.4
638
+ raft-dask==23.8.0
639
+ rasterio==1.3.9
640
+ rasterstats==0.19.0
641
+ ray-cpp==2.9.0
642
+ ray==2.9.0
643
+ referencing==0.32.1
644
+ regex==2023.12.25
645
+ requests-oauthlib==1.3.1
646
+ requests-toolbelt==0.10.1
647
+ requests==2.31.0
648
+ retrying==1.3.3
649
+ retrying==1.3.4
650
+ rfc3339-validator==0.1.4
651
+ rfc3986-validator==0.1.1
652
+ rgf-python==3.12.0
653
+ rich-click==1.7.4
654
+ rich==13.7.0
655
+ rich==13.7.1
656
+ rmm==23.8.0
657
+ rope==1.13.0
658
+ rpds-py==0.16.2
659
+ rsa==4.9
660
+ ruamel-yaml-conda==0.15.100
661
+ ruamel.yaml.clib==0.2.7
662
+ ruamel.yaml==0.17.40
663
+ s2sphere==0.2.5
664
+ s3fs==2024.2.0
665
+ s3transfer==0.6.2
666
+ safetensors==0.4.2
667
+ scattertext==0.1.19
668
+ scikit-image==0.22.0
669
+ scikit-learn-intelex==2024.2.0
670
+ scikit-learn==1.2.2
671
+ scikit-multilearn==0.2.0
672
+ scikit-optimize==0.10.1
673
+ scikit-plot==0.3.7
674
+ scikit-surprise==1.1.3
675
+ scipy==1.11.4
676
+ scipy==1.12.0
677
+ seaborn==0.12.2
678
+ segment_anything==1.0
679
+ segregation==2.5
680
+ semver==3.0.2
681
+ sentencepiece==0.2.0
682
+ sentry-sdk==1.44.1
683
+ setproctitle==1.3.3
684
+ setuptools-git==1.2
685
+ setuptools-scm==8.0.4
686
+ setuptools==69.0.3
687
+ shap==0.44.1
688
+ shapely==2.0.3
689
+ shellingham==1.5.4
690
+ shtab==1.7.1
691
+ simpervisor==1.0.0
692
+ simplejson==3.19.2
693
+ six==1.16.0
694
+ sklearn-pandas==2.2.0
695
+ slicer==0.0.7
696
+ smart-open==6.4.0
697
+ smmap==5.0.1
698
+ sniffio==1.3.0
699
+ snowballstemmer==2.2.0
700
+ snuggs==1.4.7
701
+ sortedcontainers==2.4.0
702
+ soundfile==0.12.1
703
+ soupsieve==2.5
704
+ soxr==0.3.7
705
+ spacy-legacy==3.0.12
706
+ spacy-loggers==1.0.5
707
+ spacy==3.7.2
708
+ spaghetti==1.7.5.post1
709
+ spectral==0.23.1
710
+ spglm==1.1.0
711
+ sphinx-rtd-theme==0.2.4
712
+ spint==1.0.7
713
+ splot==1.1.5.post1
714
+ spopt==0.6.0
715
+ spreg==1.4.2
716
+ spvcm==0.3.0
717
+ sqlparse==0.4.4
718
+ squarify==0.4.3
719
+ srsly==2.4.8
720
+ stable-baselines3==2.1.0
721
+ stack-data==0.6.2
722
+ stack-data==0.6.3
723
+ stanio==0.5.0
724
+ starlette==0.32.0.post1
725
+ statsmodels==0.14.1
726
+ stemming==1.0.1
727
+ stop-words==2018.7.23
728
+ stopit==1.1.2
729
+ stumpy==1.12.0
730
+ sympy==1.12
731
+ tables==3.9.2
732
+ tabulate==0.9.0
733
+ tangled-up-in-unicode==0.2.0
734
+ tbb==2021.12.0
735
+ tblib==3.0.0
736
+ tenacity==8.2.3
737
+ tensorboard-data-server==0.7.2
738
+ tensorboard-plugin-profile==2.15.0
739
+ tensorboard==2.15.1
740
+ tensorboardX==2.6.2.2
741
+ tensorflow-cloud==0.1.16
742
+ tensorflow-datasets==4.9.4
743
+ tensorflow-decision-forests==1.8.1
744
+ tensorflow-estimator==2.15.0
745
+ tensorflow-hub==0.16.1
746
+ tensorflow-io-gcs-filesystem==0.35.0
747
+ tensorflow-io==0.35.0
748
+ tensorflow-metadata==0.14.0
749
+ tensorflow-probability==0.23.0
750
+ tensorflow-serving-api==2.14.1
751
+ tensorflow-text==2.15.0
752
+ tensorflow-transform==0.14.0
753
+ tensorflow==2.15.0
754
+ tensorstore==0.1.56
755
+ termcolor==2.4.0
756
+ terminado==0.18.0
757
+ testpath==0.6.0
758
+ text-unidecode==1.3
759
+ textblob==0.18.0.post0
760
+ texttable==1.7.0
761
+ tf_keras==2.15.1
762
+ tfp-nightly==0.24.0.dev0
763
+ thinc==8.2.2
764
+ threadpoolctl==3.2.0
765
+ tifffile==2023.12.9
766
+ timm==0.9.16
767
+ tinycss2==1.2.1
768
+ tobler==0.11.2
769
+ tokenizers==0.15.2
770
+ toml==0.10.2
771
+ tomli==2.0.1
772
+ tomlkit==0.12.4
773
+ toolz==0.12.1
774
+ torch==2.1.2
775
+ torchaudio==2.1.2
776
+ torchdata==0.7.1
777
+ torchinfo==1.8.0
778
+ torchmetrics==1.3.2
779
+ torchtext==0.16.2
780
+ torchvision==0.16.2
781
+ tornado==6.3.3
782
+ tqdm==4.66.1
783
+ traceml==1.0.8
784
+ traitlets==5.9.0
785
+ traittypes==0.2.1
786
+ transformers==4.39.3
787
+ treelite-runtime==3.2.0
788
+ treelite==3.2.0
789
+ trl==0.8.1
790
+ truststore==0.8.0
791
+ trx-python==0.2.9
792
+ tsfresh==0.20.2
793
+ typeguard==4.1.5
794
+ typer==0.9.0
795
+ typer==0.9.4
796
+ types-python-dateutil==2.8.19.20240106
797
+ typing-inspect==0.9.0
798
+ typing-utils==0.1.0
799
+ typing_extensions==4.9.0
800
+ tyro==0.8.3
801
+ tzdata==2023.4
802
+ uc-micro-py==1.0.3
803
+ ucx-py==0.33.0
804
+ ujson==5.9.0
805
+ umap-learn==0.5.5
806
+ unicodedata2==15.1.0
807
+ update-checker==0.18.0
808
+ uri-template==1.3.0
809
+ uritemplate==3.0.1
810
+ urllib3==1.26.18
811
+ urllib3==2.1.0
812
+ urwid==2.6.10
813
+ urwid_readline==0.14
814
+ uvicorn==0.25.0
815
+ uvloop==0.19.0
816
+ vaex-astro==0.9.3
817
+ vaex-core==4.17.1
818
+ vaex-hdf5==0.14.1
819
+ vaex-jupyter==0.8.2
820
+ vaex-ml==0.18.3
821
+ vaex-server==0.9.0
822
+ vaex-viz==0.5.4
823
+ vaex==4.17.0
824
+ vec_noise==1.1.4
825
+ vecstack==0.4.0
826
+ virtualenv==20.21.0
827
+ visions==0.7.5
828
+ vowpalwabbit==9.9.0
829
+ vtk==9.3.0
830
+ wandb==0.16.5
831
+ wasabi==1.1.2
832
+ watchfiles==0.21.0
833
+ wavio==0.0.8
834
+ wcwidth==0.2.13
835
+ weasel==0.3.4
836
+ webcolors==1.13
837
+ webencodings==0.5.1
838
+ websocket-client==1.7.0
839
+ websockets==12.0
840
+ wfdb==4.1.2
841
+ whatthepatch==1.0.5
842
+ wheel==0.42.0
843
+ widgetsnbextension==3.6.6
844
+ witwidget==1.8.1
845
+ woodwork==0.29.0
846
+ wordcloud==1.9.3
847
+ wordsegment==1.3.1
848
+ wrapt==1.14.1
849
+ xarray-einstats==0.7.0
850
+ xarray==2024.3.0
851
+ xgboost==2.0.3
852
+ xvfbwrapper==0.2.9
853
+ xxhash==3.4.1
854
+ xyzservices==2023.10.1
855
+ y-py==0.6.2
856
+ yapf==0.40.2
857
+ yarl==1.9.3
858
+ yarl==1.9.4
859
+ ydata-profiling==4.6.4
860
+ yellowbrick==1.5
861
+ ypy-websocket==0.8.4
862
+ zict==3.0.0
863
+ zipp==3.17.0
864
+ zstandard==0.22.0
wandb/run-20240409_220700-9aom042n/files/wandb-metadata.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-5.15.133+-x86_64-with-glibc2.31",
3
+ "python": "3.10.13",
4
+ "heartbeatAt": "2024-04-09T22:07:04.127016",
5
+ "startedAt": "2024-04-09T22:07:00.437103",
6
+ "docker": null,
7
+ "cuda": null,
8
+ "args": [],
9
+ "state": "running",
10
+ "program": "kaggle.ipynb",
11
+ "codePathLocal": null,
12
+ "root": "/kaggle/working",
13
+ "host": "6e44b39f6877",
14
+ "username": "root",
15
+ "executable": "/opt/conda/bin/python3.10",
16
+ "cpu_count": 2,
17
+ "cpu_count_logical": 4,
18
+ "cpu_freq": {
19
+ "current": 2000.152,
20
+ "min": 0.0,
21
+ "max": 0.0
22
+ },
23
+ "cpu_freq_per_core": [
24
+ {
25
+ "current": 2000.152,
26
+ "min": 0.0,
27
+ "max": 0.0
28
+ },
29
+ {
30
+ "current": 2000.152,
31
+ "min": 0.0,
32
+ "max": 0.0
33
+ },
34
+ {
35
+ "current": 2000.152,
36
+ "min": 0.0,
37
+ "max": 0.0
38
+ },
39
+ {
40
+ "current": 2000.152,
41
+ "min": 0.0,
42
+ "max": 0.0
43
+ }
44
+ ],
45
+ "disk": {
46
+ "/": {
47
+ "total": 8062.387607574463,
48
+ "used": 5569.50146484375
49
+ }
50
+ },
51
+ "gpu": "Tesla T4",
52
+ "gpu_count": 2,
53
+ "gpu_devices": [
54
+ {
55
+ "name": "Tesla T4",
56
+ "memory_total": 16106127360
57
+ },
58
+ {
59
+ "name": "Tesla T4",
60
+ "memory_total": 16106127360
61
+ }
62
+ ],
63
+ "memory": {
64
+ "total": 31.357559204101562
65
+ }
66
+ }
wandb/run-20240409_220700-9aom042n/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"train/loss": 2.8162, "train/grad_norm": 0.0, "train/learning_rate": 0.0, "train/epoch": 0.0, "train/global_step": 200, "_timestamp": 1712700553.0282009, "_runtime": 132.5851058959961, "_step": 2, "train_runtime": 134.1736, "train_samples_per_second": 2.981, "train_steps_per_second": 1.491, "total_flos": 202235532804096.0, "train_loss": 2.806353759765625}
wandb/run-20240409_220700-9aom042n/logs/debug-internal.log ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-09 22:07:00,443 INFO StreamThr :462 [internal.py:wandb_internal():86] W&B internal server running at pid: 462, started at: 2024-04-09 22:07:00.443187
2
+ 2024-04-09 22:07:00,445 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status
3
+ 2024-04-09 22:07:00,767 INFO WriterThread:462 [datastore.py:open_for_write():87] open: /kaggle/working/wandb/run-20240409_220700-9aom042n/run-9aom042n.wandb
4
+ 2024-04-09 22:07:00,767 DEBUG SenderThread:462 [sender.py:send():379] send: header
5
+ 2024-04-09 22:07:00,770 DEBUG SenderThread:462 [sender.py:send():379] send: run
6
+ 2024-04-09 22:07:03,990 INFO SenderThread:462 [dir_watcher.py:__init__():211] watching files in: /kaggle/working/wandb/run-20240409_220700-9aom042n/files
7
+ 2024-04-09 22:07:03,990 INFO SenderThread:462 [sender.py:_start_run_threads():1124] run started: 9aom042n with start time 1712700420.443095
8
+ 2024-04-09 22:07:04,000 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: check_version
9
+ 2024-04-09 22:07:04,000 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: check_version
10
+ 2024-04-09 22:07:04,096 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: run_start
11
+ 2024-04-09 22:07:04,107 DEBUG HandlerThread:462 [system_info.py:__init__():26] System info init
12
+ 2024-04-09 22:07:04,107 DEBUG HandlerThread:462 [system_info.py:__init__():41] System info init done
13
+ 2024-04-09 22:07:04,107 INFO HandlerThread:462 [system_monitor.py:start():194] Starting system monitor
14
+ 2024-04-09 22:07:04,107 INFO SystemMonitor:462 [system_monitor.py:_start():158] Starting system asset monitoring threads
15
+ 2024-04-09 22:07:04,107 INFO HandlerThread:462 [system_monitor.py:probe():214] Collecting system info
16
+ 2024-04-09 22:07:04,108 INFO SystemMonitor:462 [interfaces.py:start():190] Started cpu monitoring
17
+ 2024-04-09 22:07:04,109 INFO SystemMonitor:462 [interfaces.py:start():190] Started disk monitoring
18
+ 2024-04-09 22:07:04,111 INFO SystemMonitor:462 [interfaces.py:start():190] Started gpu monitoring
19
+ 2024-04-09 22:07:04,111 INFO SystemMonitor:462 [interfaces.py:start():190] Started memory monitoring
20
+ 2024-04-09 22:07:04,112 INFO SystemMonitor:462 [interfaces.py:start():190] Started network monitoring
21
+ 2024-04-09 22:07:04,126 DEBUG HandlerThread:462 [system_info.py:probe():150] Probing system
22
+ 2024-04-09 22:07:04,129 DEBUG HandlerThread:462 [gitlib.py:_init_repo():56] git repository is invalid
23
+ 2024-04-09 22:07:04,129 DEBUG HandlerThread:462 [system_info.py:probe():198] Probing system done
24
+ 2024-04-09 22:07:04,129 DEBUG HandlerThread:462 [system_monitor.py:probe():223] {'os': 'Linux-5.15.133+-x86_64-with-glibc2.31', 'python': '3.10.13', 'heartbeatAt': '2024-04-09T22:07:04.127016', 'startedAt': '2024-04-09T22:07:00.437103', 'docker': None, 'cuda': None, 'args': (), 'state': 'running', 'program': 'kaggle.ipynb', 'codePathLocal': None, 'root': '/kaggle/working', 'host': '6e44b39f6877', 'username': 'root', 'executable': '/opt/conda/bin/python3.10', 'cpu_count': 2, 'cpu_count_logical': 4, 'cpu_freq': {'current': 2000.152, 'min': 0.0, 'max': 0.0}, 'cpu_freq_per_core': [{'current': 2000.152, 'min': 0.0, 'max': 0.0}, {'current': 2000.152, 'min': 0.0, 'max': 0.0}, {'current': 2000.152, 'min': 0.0, 'max': 0.0}, {'current': 2000.152, 'min': 0.0, 'max': 0.0}], 'disk': {'/': {'total': 8062.387607574463, 'used': 5569.50146484375}}, 'gpu': 'Tesla T4', 'gpu_count': 2, 'gpu_devices': [{'name': 'Tesla T4', 'memory_total': 16106127360}, {'name': 'Tesla T4', 'memory_total': 16106127360}], 'memory': {'total': 31.357559204101562}}
25
+ 2024-04-09 22:07:04,129 INFO HandlerThread:462 [system_monitor.py:probe():224] Finished collecting system info
26
+ 2024-04-09 22:07:04,129 INFO HandlerThread:462 [system_monitor.py:probe():227] Publishing system info
27
+ 2024-04-09 22:07:04,129 DEBUG HandlerThread:462 [system_info.py:_save_conda():207] Saving list of conda packages installed into the current environment
28
+ 2024-04-09 22:07:04,992 INFO Thread-12 :462 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_220700-9aom042n/files/conda-environment.yaml
29
+ 2024-04-09 22:07:19,144 ERROR HandlerThread:462 [system_info.py:_save_conda():221] Error saving conda packages: Command '['conda', 'env', 'export']' timed out after 15 seconds
30
+ Traceback (most recent call last):
31
+ File "/opt/conda/lib/python3.10/site-packages/wandb/sdk/internal/system/system_info.py", line 214, in _save_conda
32
+ subprocess.call(
33
+ File "/opt/conda/lib/python3.10/subprocess.py", line 347, in call
34
+ return p.wait(timeout=timeout)
35
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1209, in wait
36
+ return self._wait(timeout=timeout)
37
+ File "/opt/conda/lib/python3.10/subprocess.py", line 1951, in _wait
38
+ raise TimeoutExpired(self.args, timeout)
39
+ subprocess.TimeoutExpired: Command '['conda', 'env', 'export']' timed out after 15 seconds
40
+ 2024-04-09 22:07:19,144 DEBUG HandlerThread:462 [system_info.py:_save_conda():222] Saving conda packages done
41
+ 2024-04-09 22:07:19,145 INFO HandlerThread:462 [system_monitor.py:probe():229] Finished publishing system info
42
+ 2024-04-09 22:07:19,150 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
43
+ 2024-04-09 22:07:19,150 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: keepalive
44
+ 2024-04-09 22:07:19,150 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
45
+ 2024-04-09 22:07:19,151 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: keepalive
46
+ 2024-04-09 22:07:19,151 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
47
+ 2024-04-09 22:07:19,151 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: keepalive
48
+ 2024-04-09 22:07:19,151 DEBUG SenderThread:462 [sender.py:send():379] send: files
49
+ 2024-04-09 22:07:19,151 INFO SenderThread:462 [sender.py:_save_file():1390] saving file wandb-metadata.json with policy now
50
+ 2024-04-09 22:07:19,461 INFO wandb-upload_0:462 [upload_job.py:push():131] Uploaded file /tmp/tmpjk_7pw69wandb/fv30folz-wandb-metadata.json
51
+ 2024-04-09 22:07:19,995 INFO Thread-12 :462 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_220700-9aom042n/files/wandb-metadata.json
52
+ 2024-04-09 22:07:20,150 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: python_packages
53
+ 2024-04-09 22:07:20,151 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: python_packages
54
+ 2024-04-09 22:07:20,154 DEBUG SenderThread:462 [sender.py:send():379] send: telemetry
55
+ 2024-04-09 22:07:20,164 DEBUG SenderThread:462 [sender.py:send():379] send: config
56
+ 2024-04-09 22:07:20,167 DEBUG SenderThread:462 [sender.py:send():379] send: metric
57
+ 2024-04-09 22:07:20,167 DEBUG SenderThread:462 [sender.py:send():379] send: telemetry
58
+ 2024-04-09 22:07:20,167 DEBUG SenderThread:462 [sender.py:send():379] send: metric
59
+ 2024-04-09 22:07:20,167 WARNING SenderThread:462 [sender.py:send_metric():1341] Seen metric with glob (shouldn't happen)
60
+ 2024-04-09 22:07:20,168 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: stop_status
61
+ 2024-04-09 22:07:20,168 DEBUG SenderThread:462 [sender.py:send():379] send: telemetry
62
+ 2024-04-09 22:07:20,169 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: stop_status
63
+ 2024-04-09 22:07:20,171 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: internal_messages
64
+ 2024-04-09 22:07:20,996 INFO Thread-12 :462 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_220700-9aom042n/files/output.log
65
+ 2024-04-09 22:07:20,996 INFO Thread-12 :462 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_220700-9aom042n/files/requirements.txt
66
+ 2024-04-09 22:07:21,386 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
67
+ 2024-04-09 22:07:23,000 INFO Thread-12 :462 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_220700-9aom042n/files/output.log
68
+ 2024-04-09 22:07:26,387 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
69
+ 2024-04-09 22:07:31,393 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
70
+ 2024-04-09 22:07:32,004 INFO Thread-12 :462 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_220700-9aom042n/files/config.yaml
71
+ 2024-04-09 22:07:35,154 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: stop_status
72
+ 2024-04-09 22:07:35,154 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: internal_messages
73
+ 2024-04-09 22:07:35,154 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: stop_status
74
+ 2024-04-09 22:07:37,289 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
75
+ 2024-04-09 22:07:42,290 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
76
+ 2024-04-09 22:07:47,291 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
77
+ 2024-04-09 22:07:50,153 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: internal_messages
78
+ 2024-04-09 22:07:50,153 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: stop_status
79
+ 2024-04-09 22:07:50,154 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: stop_status
80
+ 2024-04-09 22:07:53,243 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
81
+ 2024-04-09 22:07:58,243 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
82
+ 2024-04-09 22:08:03,244 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
83
+ 2024-04-09 22:08:04,112 DEBUG SystemMonitor:462 [system_monitor.py:_start():172] Starting system metrics aggregation loop
84
+ 2024-04-09 22:08:04,114 DEBUG SenderThread:462 [sender.py:send():379] send: stats
85
+ 2024-04-09 22:08:05,151 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: stop_status
86
+ 2024-04-09 22:08:05,152 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: stop_status
87
+ 2024-04-09 22:08:05,155 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: internal_messages
88
+ 2024-04-09 22:08:08,312 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
89
+ 2024-04-09 22:08:13,313 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
90
+ 2024-04-09 22:08:14,561 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: partial_history
91
+ 2024-04-09 22:08:14,564 DEBUG SenderThread:462 [sender.py:send():379] send: metric
92
+ 2024-04-09 22:08:14,564 DEBUG SenderThread:462 [sender.py:send():379] send: metric
93
+ 2024-04-09 22:08:14,564 DEBUG SenderThread:462 [sender.py:send():379] send: metric
94
+ 2024-04-09 22:08:14,565 DEBUG SenderThread:462 [sender.py:send():379] send: metric
95
+ 2024-04-09 22:08:14,565 DEBUG SenderThread:462 [sender.py:send():379] send: history
96
+ 2024-04-09 22:08:14,565 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: summary_record
97
+ 2024-04-09 22:08:14,567 INFO SenderThread:462 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
98
+ 2024-04-09 22:08:15,020 INFO Thread-12 :462 [dir_watcher.py:_on_file_created():271] file/dir created: /kaggle/working/wandb/run-20240409_220700-9aom042n/files/wandb-summary.json
99
+ 2024-04-09 22:08:17,021 INFO Thread-12 :462 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_220700-9aom042n/files/output.log
100
+ 2024-04-09 22:08:19,130 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
101
+ 2024-04-09 22:08:20,152 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: stop_status
102
+ 2024-04-09 22:08:20,152 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: stop_status
103
+ 2024-04-09 22:08:20,156 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: internal_messages
104
+ 2024-04-09 22:08:24,247 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
105
+ 2024-04-09 22:08:29,248 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
106
+ 2024-04-09 22:08:34,115 DEBUG SenderThread:462 [sender.py:send():379] send: stats
107
+ 2024-04-09 22:08:35,121 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
108
+ 2024-04-09 22:08:35,202 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: internal_messages
109
+ 2024-04-09 22:08:35,203 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: stop_status
110
+ 2024-04-09 22:08:35,256 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: stop_status
111
+ 2024-04-09 22:08:36,029 INFO Thread-12 :462 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_220700-9aom042n/files/config.yaml
112
+ 2024-04-09 22:08:40,388 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
113
+ 2024-04-09 22:08:45,389 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
114
+ 2024-04-09 22:08:50,159 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: internal_messages
115
+ 2024-04-09 22:08:50,160 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: stop_status
116
+ 2024-04-09 22:08:50,160 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: stop_status
117
+ 2024-04-09 22:08:51,289 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
118
+ 2024-04-09 22:08:56,289 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
119
+ 2024-04-09 22:09:01,290 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
120
+ 2024-04-09 22:09:04,116 DEBUG SenderThread:462 [sender.py:send():379] send: stats
121
+ 2024-04-09 22:09:05,159 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: internal_messages
122
+ 2024-04-09 22:09:05,160 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: stop_status
123
+ 2024-04-09 22:09:05,160 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: stop_status
124
+ 2024-04-09 22:09:06,333 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
125
+ 2024-04-09 22:09:11,334 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: status_report
126
+ 2024-04-09 22:09:12,622 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: partial_history
127
+ 2024-04-09 22:09:12,623 DEBUG SenderThread:462 [sender.py:send():379] send: history
128
+ 2024-04-09 22:09:12,623 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: summary_record
129
+ 2024-04-09 22:09:12,624 INFO SenderThread:462 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
130
+ 2024-04-09 22:09:13,028 DEBUG SenderThread:462 [sender.py:send():379] send: telemetry
131
+ 2024-04-09 22:09:13,028 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: summary_record
132
+ 2024-04-09 22:09:13,030 DEBUG HandlerThread:462 [handler.py:handle_request():146] handle_request: partial_history
133
+ 2024-04-09 22:09:13,032 INFO SenderThread:462 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
134
+ 2024-04-09 22:09:13,032 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: summary_record
135
+ 2024-04-09 22:09:13,032 INFO SenderThread:462 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
136
+ 2024-04-09 22:09:13,033 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: summary_record
137
+ 2024-04-09 22:09:13,033 INFO SenderThread:462 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
138
+ 2024-04-09 22:09:13,033 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: summary_record
139
+ 2024-04-09 22:09:13,034 INFO SenderThread:462 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
140
+ 2024-04-09 22:09:13,034 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: summary_record
141
+ 2024-04-09 22:09:13,034 INFO SenderThread:462 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
142
+ 2024-04-09 22:09:13,034 DEBUG SenderThread:462 [sender.py:send():379] send: history
143
+ 2024-04-09 22:09:13,034 DEBUG SenderThread:462 [sender.py:send_request():406] send_request: summary_record
144
+ 2024-04-09 22:09:13,035 INFO SenderThread:462 [sender.py:_save_file():1390] saving file wandb-summary.json with policy end
145
+ 2024-04-09 22:09:13,042 INFO Thread-12 :462 [dir_watcher.py:_on_file_modified():288] file/dir modified: /kaggle/working/wandb/run-20240409_220700-9aom042n/files/wandb-summary.json
wandb/run-20240409_220700-9aom042n/logs/debug.log ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-04-09 22:07:00,438 INFO MainThread:415 [wandb_setup.py:_flush():76] Current SDK version is 0.16.5
2
+ 2024-04-09 22:07:00,438 INFO MainThread:415 [wandb_setup.py:_flush():76] Configure stats pid to 415
3
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
4
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_setup.py:_flush():76] Loading settings from /kaggle/working/wandb/settings
5
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
6
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
7
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
8
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_setup.py:_flush():76] Applying login settings: {}
9
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_init.py:_log_setup():527] Logging user logs to /kaggle/working/wandb/run-20240409_220700-9aom042n/logs/debug.log
10
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_init.py:_log_setup():528] Logging internal logs to /kaggle/working/wandb/run-20240409_220700-9aom042n/logs/debug-internal.log
11
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_init.py:_jupyter_setup():473] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7d36045756c0>
12
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_init.py:init():567] calling init triggers
13
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_init.py:init():574] wandb.init called with sweep_config: {}
14
+ config: {}
15
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_init.py:init():617] starting backend
16
+ 2024-04-09 22:07:00,439 INFO MainThread:415 [wandb_init.py:init():621] setting up manager
17
+ 2024-04-09 22:07:00,441 INFO MainThread:415 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
18
+ 2024-04-09 22:07:00,442 INFO MainThread:415 [wandb_init.py:init():629] backend started and connected
19
+ 2024-04-09 22:07:00,454 INFO MainThread:415 [wandb_run.py:_label_probe_notebook():1299] probe notebook
20
+ 2024-04-09 22:07:00,766 INFO MainThread:415 [wandb_init.py:init():721] updated telemetry
21
+ 2024-04-09 22:07:00,769 INFO MainThread:415 [wandb_init.py:init():754] communicating run to backend with 90.0 second timeout
22
+ 2024-04-09 22:07:03,999 INFO MainThread:415 [wandb_run.py:_on_init():2344] communicating current version
23
+ 2024-04-09 22:07:04,090 INFO MainThread:415 [wandb_run.py:_on_init():2353] got version response upgrade_message: "wandb version 0.16.6 is available! To upgrade, please run:\n $ pip install wandb --upgrade"
24
+
25
+ 2024-04-09 22:07:04,090 INFO MainThread:415 [wandb_init.py:init():805] starting run threads in backend
26
+ 2024-04-09 22:07:20,151 INFO MainThread:415 [wandb_run.py:_console_start():2323] atexit reg
27
+ 2024-04-09 22:07:20,151 INFO MainThread:415 [wandb_run.py:_redirect():2178] redirect: wrap_raw
28
+ 2024-04-09 22:07:20,153 INFO MainThread:415 [wandb_run.py:_redirect():2243] Wrapping output streams.
29
+ 2024-04-09 22:07:20,153 INFO MainThread:415 [wandb_run.py:_redirect():2268] Redirects installed.
30
+ 2024-04-09 22:07:20,154 INFO MainThread:415 [wandb_init.py:init():848] run started, returning control to user process
31
+ 2024-04-09 22:07:20,160 INFO MainThread:415 [wandb_run.py:_config_callback():1347] config_cb None None {'vocab_size': 32000, 'max_position_embeddings': 2048, 'hidden_size': 2048, 'intermediate_size': 5632, 'num_hidden_layers': 22, 'num_attention_heads': 32, 'num_key_value_heads': 4, 'hidden_act': 'silu', 'initializer_range': 0.02, 'rms_norm_eps': 1e-05, 'pretraining_tp': 1, 'use_cache': False, 'rope_theta': 10000.0, 'rope_scaling': None, 'attention_bias': False, 'attention_dropout': 0.0, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'chunk_size_feed_forward': 0, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['LlamaForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': None, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T', 'transformers_version': '4.39.3', 'model_type': 'llama', 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', '_load_in_8bit': False, '_load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': False, 'bnb_4bit_compute_dtype': 'float16', 'bnb_4bit_quant_storage': 'uint8', 'load_in_4bit': True, 'load_in_8bit': False}, 'output_dir': '/kaggle/working/', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 2, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 1e-06, 'weight_decay': 0.001, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.1, 'num_train_epochs': 5, 'max_steps': 200, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/kaggle/working/runs/Apr09_22-04-47_6e44b39f6877', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 100, 'save_total_limit': 1, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': None, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 8, 'dataloader_prefetch_factor': None, 'past_index': -1, 'run_name': '/kaggle/working/', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'accelerator_config': {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True}, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_32bit', 'optim_args': None, 'adafactor': False, 'group_by_length': False, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': False, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': True, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': True, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': None, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None, 'optim_target_modules': None}
wandb/run-20240409_220700-9aom042n/run-9aom042n.wandb ADDED
Binary file (14 kB). View file