Upload folder using huggingface_hub
Browse files- logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.3_2e-1_connector-3.0_1.3_2e-1_ablation_20251013_213724.log +0 -0
- logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.5_2e-1_connector-3.0_1.5_2e-1_ablation_20251013_214412.log +0 -0
- logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.7_2e-1_connector-3.0_1.7_2e-1_ablation_20251013_215018.log +0 -0
- logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.9_2e-1_connector-3.0_1.9_2e-1_ablation_20251013_215700.log +0 -0
- logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.1_2e-1_connector-3.0_2.1_2e-1_ablation_20251013_220444.log +0 -0
- logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation_20251013_221214.log +92 -0
- logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.5_2e-1_connector-3.0_2.5_2e-1_ablation_20251013_221228.log +0 -0
- logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.7_2e-1_connector-3.0_2.7_2e-1_ablation_20251013_221828.log +0 -0
- logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.9_2e-1_connector-3.0_2.9_2e-1_ablation_20251013_222533.log +0 -0
- logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_0.7_2e-1_connector-5.0_0.7_2e-1_ablation_20251013_223153.log +681 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.3_2e-1_connector-3.0_1.3_2e-1_ablation_20251013_065736.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.5_2e-1_connector-3.0_1.5_2e-1_ablation_20251013_073153.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.7_2e-1_connector-3.0_1.7_2e-1_ablation_20251013_080601.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.9_2e-1_connector-3.0_1.9_2e-1_ablation_20251013_104850.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.1_2e-1_connector-3.0_2.1_2e-1_ablation_20251013_113216.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation_20251013_130305.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.5_2e-1_connector-3.0_2.5_2e-1_ablation_20251013_143914.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.7_2e-1_connector-3.0_2.7_2e-1_ablation_20251013_151303.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.9_2e-1_connector-3.0_2.9_2e-1_ablation_20251013_154739.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_0.7_2e-1_connector-5.0_0.7_2e-1_ablation_20251013_162143.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_0.9_2e-1_connector-5.0_0.9_2e-1_ablation_20251013_165603.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_1.1_2e-1_connector-5.0_1.1_2e-1_ablation_20251013_173027.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_1.3_2e-1_connector-5.0_1.3_2e-1_ablation_20251013_180430.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_1.5_2e-1_connector-5.0_1.5_2e-1_ablation_20251013_183828.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_1.7_2e-1_connector-5.0_1.7_2e-1_ablation_20251013_191236.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_1.9_2e-1_connector-5.0_1.9_2e-1_ablation_20251013_194705.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_2.1_2e-1_connector-5.0_2.1_2e-1_ablation_20251013_202134.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_2.3_2e-1_connector-5.0_2.3_2e-1_ablation_20251013_205557.log +0 -0
- logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_2.5_2e-1_connector-5.0_2.5_2e-1_ablation_20251013_213037.log +0 -0
logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.3_2e-1_connector-3.0_1.3_2e-1_ablation_20251013_213724.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.5_2e-1_connector-3.0_1.5_2e-1_ablation_20251013_214412.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.7_2e-1_connector-3.0_1.7_2e-1_ablation_20251013_215018.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.9_2e-1_connector-3.0_1.9_2e-1_ablation_20251013_215700.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.1_2e-1_connector-3.0_2.1_2e-1_ablation_20251013_220444.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation_20251013_221214.log
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
==== STARTING EXPERIMENT: eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation ====
|
| 2 |
+
Log File: eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation_20251013_221214.log
|
| 3 |
+
Timestamp: 2025-10-13 22:12:14
|
| 4 |
+
=====================================
|
| 5 |
+
Processing: /nfs/ywang29/TinyLLaVA/checkpoints/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation
|
| 6 |
+
/opt/conda/envs/tinyllava/lib/python3.10/site-packages/torch/cuda/__init__.py:51: FutureWarning: The pynvml package is deprecated. Please install nvidia-ml-py instead. If you did not install pynvml directly, please report this to the maintainers of the package that installed pynvml for you.
|
| 7 |
+
import pynvml # type: ignore[import]
|
| 8 |
+
[2025-10-13 22:12:17,426] [INFO] [real_accelerator.py:191:get_accelerator] Setting ds_accelerator to cuda (auto detect)
|
| 9 |
+
Traceback (most recent call last):
|
| 10 |
+
File "/nfs/ywang29/TinyLLaVA/scripts/apply_masks.py", line 488, in <module>
|
| 11 |
+
main()
|
| 12 |
+
File "/nfs/ywang29/TinyLLaVA/scripts/apply_masks.py", line 123, in main
|
| 13 |
+
config_mask = TinyLlavaConfig.from_pretrained(model_args.mask_model_name_or_path)
|
| 14 |
+
File "/nfs/ywang29/TinyLLaVA/transformers/src/transformers/configuration_utils.py", line 602, in from_pretrained
|
| 15 |
+
config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
|
| 16 |
+
File "/nfs/ywang29/TinyLLaVA/transformers/src/transformers/configuration_utils.py", line 631, in get_config_dict
|
| 17 |
+
config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs)
|
| 18 |
+
File "/nfs/ywang29/TinyLLaVA/transformers/src/transformers/configuration_utils.py", line 686, in _get_config_dict
|
| 19 |
+
resolved_config_file = cached_file(
|
| 20 |
+
File "/nfs/ywang29/TinyLLaVA/transformers/src/transformers/utils/hub.py", line 369, in cached_file
|
| 21 |
+
raise EnvironmentError(
|
| 22 |
+
OSError: /nfs/ywang29/TinyLLaVA/checkpoints/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation does not appear to have a file named config.json. Checkout 'https://huggingface.co//nfs/ywang29/TinyLLaVA/checkpoints/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation/tree/main' for available files.
|
| 23 |
+
/opt/conda/envs/tinyllava/lib/python3.10/site-packages/torch/cuda/__init__.py:51: FutureWarning: The pynvml package is deprecated. Please install nvidia-ml-py instead. If you did not install pynvml directly, please report this to the maintainers of the package that installed pynvml for you.
|
| 24 |
+
import pynvml # type: ignore[import]
|
| 25 |
+
[2025-10-13 22:12:24,641] [INFO] [real_accelerator.py:191:get_accelerator] Setting ds_accelerator to cuda (auto detect)
|
| 26 |
+
Traceback (most recent call last):
|
| 27 |
+
File "/nfs/ywang29/TinyLLaVA/transformers/src/transformers/utils/hub.py", line 398, in cached_file
|
| 28 |
+
resolved_file = hf_hub_download(
|
| 29 |
+
File "/opt/conda/envs/tinyllava/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 106, in _inner_fn
|
| 30 |
+
validate_repo_id(arg_value)
|
| 31 |
+
File "/opt/conda/envs/tinyllava/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 154, in validate_repo_id
|
| 32 |
+
raise HFValidationError(
|
| 33 |
+
huggingface_hub.errors.HFValidationError: Repo id must be in the form 'repo_name' or 'namespace/repo_name': '/nfs/ywang29/TinyLLaVA/checkpoints/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation/mask_applied'. Use `repo_type` argument if needed.
|
| 34 |
+
|
| 35 |
+
The above exception was the direct cause of the following exception:
|
| 36 |
+
|
| 37 |
+
Traceback (most recent call last):
|
| 38 |
+
File "/nfs/ywang29/TinyLLaVA/tinyllava/model/load_model.py", line 38, in load_pretrained_model
|
| 39 |
+
model = TinyLlavaForConditionalGeneration.from_pretrained(model_name_or_path,low_cpu_mem_usage=True)
|
| 40 |
+
File "/nfs/ywang29/TinyLLaVA/transformers/src/transformers/modeling_utils.py", line 3015, in from_pretrained
|
| 41 |
+
resolved_config_file = cached_file(
|
| 42 |
+
File "/nfs/ywang29/TinyLLaVA/transformers/src/transformers/utils/hub.py", line 462, in cached_file
|
| 43 |
+
raise EnvironmentError(
|
| 44 |
+
OSError: Incorrect path_or_model_id: '/nfs/ywang29/TinyLLaVA/checkpoints/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation/mask_applied'. Please provide either the path to a local folder or the repo_id of a model on the Hub.
|
| 45 |
+
|
| 46 |
+
During handling of the above exception, another exception occurred:
|
| 47 |
+
|
| 48 |
+
Traceback (most recent call last):
|
| 49 |
+
File "/nfs/ywang29/TinyLLaVA/transformers/src/transformers/utils/hub.py", line 398, in cached_file
|
| 50 |
+
resolved_file = hf_hub_download(
|
| 51 |
+
File "/opt/conda/envs/tinyllava/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 106, in _inner_fn
|
| 52 |
+
validate_repo_id(arg_value)
|
| 53 |
+
File "/opt/conda/envs/tinyllava/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py", line 154, in validate_repo_id
|
| 54 |
+
raise HFValidationError(
|
| 55 |
+
huggingface_hub.errors.HFValidationError: Repo id must be in the form 'repo_name' or 'namespace/repo_name': '/nfs/ywang29/TinyLLaVA/checkpoints/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation/mask_applied'. Use `repo_type` argument if needed.
|
| 56 |
+
|
| 57 |
+
The above exception was the direct cause of the following exception:
|
| 58 |
+
|
| 59 |
+
Traceback (most recent call last):
|
| 60 |
+
File "/opt/conda/envs/tinyllava/lib/python3.10/runpy.py", line 196, in _run_module_as_main
|
| 61 |
+
return _run_code(code, main_globals, None,
|
| 62 |
+
File "/opt/conda/envs/tinyllava/lib/python3.10/runpy.py", line 86, in _run_code
|
| 63 |
+
exec(code, run_globals)
|
| 64 |
+
File "/nfs/ywang29/TinyLLaVA/tinyllava/eval/model_vqa_mmmu.py", line 180, in <module>
|
| 65 |
+
eval_model(args)
|
| 66 |
+
File "/nfs/ywang29/TinyLLaVA/tinyllava/eval/model_vqa_mmmu.py", line 88, in eval_model
|
| 67 |
+
model, tokenizer, image_processor, context_len = load_pretrained_model(model_path)
|
| 68 |
+
File "/nfs/ywang29/TinyLLaVA/tinyllava/model/load_model.py", line 40, in load_pretrained_model
|
| 69 |
+
model_config = TinyLlavaConfig.from_pretrained(model_name_or_path)
|
| 70 |
+
File "/nfs/ywang29/TinyLLaVA/transformers/src/transformers/configuration_utils.py", line 602, in from_pretrained
|
| 71 |
+
config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
|
| 72 |
+
File "/nfs/ywang29/TinyLLaVA/transformers/src/transformers/configuration_utils.py", line 631, in get_config_dict
|
| 73 |
+
config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs)
|
| 74 |
+
File "/nfs/ywang29/TinyLLaVA/transformers/src/transformers/configuration_utils.py", line 686, in _get_config_dict
|
| 75 |
+
resolved_config_file = cached_file(
|
| 76 |
+
File "/nfs/ywang29/TinyLLaVA/transformers/src/transformers/utils/hub.py", line 462, in cached_file
|
| 77 |
+
raise EnvironmentError(
|
| 78 |
+
OSError: Incorrect path_or_model_id: '/nfs/ywang29/TinyLLaVA/checkpoints/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation/mask_applied'. Please provide either the path to a local folder or the repo_id of a model on the Hub.
|
| 79 |
+
Traceback (most recent call last):
|
| 80 |
+
File "/nfs/ywang29/TinyLLaVA/scripts/convert_answer_to_mmmu.py", line 31, in <module>
|
| 81 |
+
eval_model(args)
|
| 82 |
+
File "/nfs/ywang29/TinyLLaVA/scripts/convert_answer_to_mmmu.py", line 7, in eval_model
|
| 83 |
+
answers = [json.loads(q) for q in open(os.path.expanduser(args.answers_file), "r")]
|
| 84 |
+
FileNotFoundError: [Errno 2] No such file or directory: '/s3-code/ywang29/datasets/tinyllava/eval/MMMU/answers/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation-mask_applied.jsonl'
|
| 85 |
+
Traceback (most recent call last):
|
| 86 |
+
File "/s3-code/ywang29/datasets/tinyllava/eval/MMMU/eval/main_eval_only.py", line 19, in <module>
|
| 87 |
+
output_dict = json.load(open(args.output_path))
|
| 88 |
+
FileNotFoundError: [Errno 2] No such file or directory: '/s3-code/ywang29/datasets/tinyllava/eval/MMMU/answers/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation-mask_applied_output.json'
|
| 89 |
+
==== EXPERIMENT COMPLETED: eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation ====
|
| 90 |
+
Log File: eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation_20251013_221214.log
|
| 91 |
+
Timestamp: 2025-10-13 22:12:28
|
| 92 |
+
=====================================
|
logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.5_2e-1_connector-3.0_2.5_2e-1_ablation_20251013_221228.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.7_2e-1_connector-3.0_2.7_2e-1_ablation_20251013_221828.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.9_2e-1_connector-3.0_2.9_2e-1_ablation_20251013_222533.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_0.7_2e-1_connector-5.0_0.7_2e-1_ablation_20251013_223153.log
ADDED
|
@@ -0,0 +1,681 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
0%| | 0/900 [00:00<?, ?it/s]/nfs/ywang29/TinyLLaVA/transformers/src/transformers/generation/configuration_utils.py:492: UserWarning: `do_sample` is set to `False`. However, `temperature` is set to `0.0` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `temperature`.
|
|
|
|
|
|
|
| 1 |
0%| | 1/900 [00:01<18:03, 1.21s/it]
|
| 2 |
0%| | 2/900 [00:22<3:15:05, 13.04s/it]
|
| 3 |
0%| | 3/900 [00:43<4:11:40, 16.83s/it]
|
| 4 |
0%| | 4/900 [01:04<4:34:51, 18.41s/it]
|
| 5 |
1%| | 5/900 [01:26<4:51:03, 19.51s/it]
|
| 6 |
1%| | 6/900 [01:47<5:00:27, 20.17s/it]
|
| 7 |
1%| | 7/900 [02:09<5:06:18, 20.58s/it]
|
| 8 |
1%| | 8/900 [02:30<5:09:58, 20.85s/it]
|
| 9 |
1%| | 9/900 [02:51<5:09:22, 20.83s/it]
|
| 10 |
1%| | 10/900 [03:12<5:10:43, 20.95s/it]
|
| 11 |
1%| | 11/900 [03:33<5:09:33, 20.89s/it]
|
| 12 |
1%|β | 12/900 [03:54<5:11:16, 21.03s/it]
|
| 13 |
1%|β | 13/900 [04:15<5:12:20, 21.13s/it]
|
| 14 |
2%|β | 14/900 [04:36<5:10:25, 21.02s/it]
|
| 15 |
2%|β | 15/900 [04:57<5:08:40, 20.93s/it]
|
| 16 |
2%|β | 16/900 [05:18<5:10:11, 21.05s/it]
|
| 17 |
2%|β | 17/900 [05:40<5:10:57, 21.13s/it]
|
| 18 |
2%|β | 18/900 [05:45<4:01:16, 16.41s/it]
|
| 19 |
2%|β | 19/900 [06:05<4:18:29, 17.60s/it]
|
| 20 |
2%|β | 20/900 [06:26<4:30:42, 18.46s/it]
|
| 21 |
2%|β | 21/900 [06:47<4:41:25, 19.21s/it]
|
| 22 |
2%|β | 22/900 [07:07<4:46:27, 19.58s/it]
|
| 23 |
3%|β | 23/900 [07:28<4:50:56, 19.90s/it]
|
| 24 |
3%|β | 24/900 [07:49<4:56:14, 20.29s/it]
|
| 25 |
3%|β | 25/900 [08:10<4:59:48, 20.56s/it]
|
| 26 |
3%|β | 26/900 [08:32<5:02:27, 20.76s/it]
|
| 27 |
3%|β | 27/900 [08:52<5:01:32, 20.72s/it]
|
| 28 |
3%|β | 28/900 [09:13<5:03:24, 20.88s/it]
|
| 29 |
3%|β | 29/900 [09:35<5:04:40, 20.99s/it]
|
| 30 |
3%|β | 30/900 [09:56<5:05:11, 21.05s/it]
|
| 31 |
3%|β | 31/900 [10:18<5:08:07, 21.27s/it]
|
| 32 |
4%|β | 32/900 [10:19<3:43:36, 15.46s/it]
|
| 33 |
4%|β | 33/900 [10:21<2:41:37, 11.19s/it]
|
| 34 |
4%|β | 34/900 [10:42<3:27:21, 14.37s/it]
|
| 35 |
4%|β | 35/900 [11:03<3:54:17, 16.25s/it]
|
| 36 |
4%|β | 36/900 [11:24<4:12:42, 17.55s/it]
|
| 37 |
4%|β | 37/900 [11:45<4:29:55, 18.77s/it]
|
| 38 |
4%|β | 38/900 [12:07<4:43:01, 19.70s/it]
|
| 39 |
4%|β | 39/900 [12:08<3:23:13, 14.16s/it]
|
| 40 |
4%|β | 40/900 [12:09<2:25:49, 10.17s/it]
|
| 41 |
5%|β | 41/900 [12:10<1:46:12, 7.42s/it]
|
| 42 |
5%|β | 42/900 [12:31<2:42:32, 11.37s/it]
|
| 43 |
5%|β | 43/900 [12:32<1:59:27, 8.36s/it]
|
| 44 |
5%|β | 44/900 [12:35<1:34:38, 6.63s/it]
|
| 45 |
5%|β | 45/900 [12:36<1:12:46, 5.11s/it]
|
| 46 |
5%|β | 46/900 [12:37<55:07, 3.87s/it]
|
| 47 |
5%|β | 47/900 [12:38<42:41, 3.00s/it]
|
| 48 |
5%|β | 48/900 [12:39<34:31, 2.43s/it]
|
| 49 |
5%|β | 49/900 [12:42<36:29, 2.57s/it]
|
| 50 |
6%|β | 50/900 [12:44<32:01, 2.26s/it]
|
| 51 |
6%|β | 51/900 [12:46<30:32, 2.16s/it]
|
| 52 |
6%|β | 52/900 [12:49<34:42, 2.46s/it]
|
| 53 |
6%|β | 53/900 [12:51<32:50, 2.33s/it]
|
| 54 |
6%|β | 54/900 [12:52<28:44, 2.04s/it]
|
| 55 |
6%|β | 55/900 [12:53<24:04, 1.71s/it]
|
| 56 |
6%|β | 56/900 [12:54<19:58, 1.42s/it]
|
| 57 |
6%|β | 57/900 [12:55<17:37, 1.25s/it]
|
| 58 |
6%|β | 58/900 [12:56<15:02, 1.07s/it]
|
| 59 |
7%|β | 59/900 [12:57<15:53, 1.13s/it]
|
| 60 |
7%|β | 60/900 [12:58<17:06, 1.22s/it]
|
| 61 |
7%|β | 61/900 [13:19<1:40:16, 7.17s/it]
|
| 62 |
7%|β | 62/900 [13:40<2:38:14, 11.33s/it]
|
| 63 |
7%|β | 63/900 [14:02<3:20:31, 14.37s/it]
|
| 64 |
7%|β | 64/900 [14:23<3:48:24, 16.39s/it]
|
| 65 |
7%|β | 65/900 [14:44<4:07:51, 17.81s/it]
|
| 66 |
7%|β | 66/900 [15:05<4:21:03, 18.78s/it]
|
| 67 |
7%|β | 67/900 [15:26<4:30:00, 19.45s/it]
|
| 68 |
8%|β | 68/900 [15:47<4:36:33, 19.94s/it]
|
| 69 |
8%|β | 69/900 [15:48<3:16:38, 14.20s/it]
|
| 70 |
8%|β | 70/900 [16:08<3:41:50, 16.04s/it]
|
| 71 |
8%|β | 71/900 [16:30<4:03:29, 17.62s/it]
|
| 72 |
8%|β | 72/900 [16:51<4:18:21, 18.72s/it]
|
| 73 |
8%|β | 73/900 [17:12<4:28:41, 19.49s/it]
|
| 74 |
8%|β | 74/900 [17:33<4:35:39, 20.02s/it]
|
| 75 |
8%|β | 75/900 [17:38<3:29:36, 15.24s/it]
|
| 76 |
8%|β | 76/900 [17:59<3:54:29, 17.07s/it]
|
| 77 |
9%|β | 77/900 [18:01<2:53:21, 12.64s/it]
|
| 78 |
9%|β | 78/900 [18:05<2:16:32, 9.97s/it]
|
| 79 |
9%|β | 79/900 [18:06<1:40:34, 7.35s/it]
|
| 80 |
9%|β | 80/900 [18:27<2:37:43, 11.54s/it]
|
| 81 |
9%|β | 81/900 [18:49<3:17:46, 14.49s/it]
|
| 82 |
9%|β | 82/900 [19:10<3:45:28, 16.54s/it]
|
| 83 |
9%|β | 83/900 [19:31<4:04:28, 17.95s/it]
|
| 84 |
9%|β | 84/900 [19:53<4:17:51, 18.96s/it]
|
| 85 |
9%|β | 85/900 [20:14<4:26:55, 19.65s/it]
|
| 86 |
10%|β | 86/900 [20:35<4:33:43, 20.18s/it]
|
| 87 |
10%|β | 87/900 [20:57<4:38:17, 20.54s/it]
|
| 88 |
10%|β | 88/900 [21:18<4:41:13, 20.78s/it]
|
| 89 |
10%|β | 89/900 [21:39<4:43:09, 20.95s/it]
|
| 90 |
10%|β | 90/900 [22:01<4:44:36, 21.08s/it]
|
| 91 |
10%|β | 91/900 [22:02<3:22:41, 15.03s/it]
|
| 92 |
10%|β | 92/900 [22:23<3:47:16, 16.88s/it]
|
| 93 |
10%|β | 93/900 [22:44<4:05:15, 18.23s/it]
|
| 94 |
10%|β | 94/900 [23:06<4:17:50, 19.19s/it]
|
| 95 |
11%|β | 95/900 [23:08<3:10:29, 14.20s/it]
|
| 96 |
11%|β | 96/900 [23:30<3:39:35, 16.39s/it]
|
| 97 |
11%|β | 97/900 [23:30<2:35:50, 11.64s/it]
|
| 98 |
11%|β | 98/900 [23:31<1:51:39, 8.35s/it]
|
| 99 |
11%|β | 99/900 [23:32<1:21:50, 6.13s/it]
|
| 100 |
11%|β | 100/900 [23:53<2:19:47, 10.48s/it]
|
| 101 |
11%|β | 101/900 [23:54<1:43:14, 7.75s/it]
|
| 102 |
11%|ββ | 102/900 [23:56<1:21:01, 6.09s/it]
|
| 103 |
11%|ββ | 103/900 [23:57<59:40, 4.49s/it]
|
| 104 |
12%|ββ | 104/900 [23:59<50:44, 3.82s/it]
|
| 105 |
12%|ββ | 105/900 [24:21<2:00:46, 9.12s/it]
|
| 106 |
12%|ββ | 106/900 [24:22<1:29:13, 6.74s/it]
|
| 107 |
12%|ββ | 107/900 [24:42<2:23:21, 10.85s/it]
|
| 108 |
12%|ββ | 108/900 [24:43<1:44:31, 7.92s/it]
|
| 109 |
12%|ββ | 109/900 [24:44<1:15:29, 5.73s/it]
|
| 110 |
12%|ββ | 110/900 [24:45<55:56, 4.25s/it]
|
| 111 |
12%|ββ | 111/900 [25:06<2:04:14, 9.45s/it]
|
| 112 |
12%|ββ | 112/900 [25:08<1:31:24, 6.96s/it]
|
| 113 |
13%|ββ | 113/900 [25:09<1:08:01, 5.19s/it]
|
| 114 |
13%|ββ | 114/900 [25:10<51:38, 3.94s/it]
|
| 115 |
13%|ββ | 115/900 [25:31<1:59:11, 9.11s/it]
|
| 116 |
13%|ββ | 116/900 [25:32<1:26:55, 6.65s/it]
|
| 117 |
13%|ββ | 117/900 [25:33<1:05:51, 5.05s/it]
|
| 118 |
13%|ββ | 118/900 [25:54<2:07:31, 9.78s/it]
|
| 119 |
13%|ββ | 119/900 [25:55<1:34:28, 7.26s/it]
|
| 120 |
13%|ββ | 120/900 [26:17<2:28:58, 11.46s/it]
|
| 121 |
13%|ββ | 121/900 [26:17<1:47:35, 8.29s/it]
|
| 122 |
14%|ββ | 122/900 [26:19<1:22:40, 6.38s/it]
|
| 123 |
14%|ββ | 123/900 [26:22<1:07:29, 5.21s/it]
|
| 124 |
14%|ββ | 124/900 [26:23<50:57, 3.94s/it]
|
| 125 |
14%|ββ | 125/900 [26:44<1:58:00, 9.14s/it]
|
| 126 |
14%|ββ | 126/900 [26:46<1:28:31, 6.86s/it]
|
| 127 |
14%|ββ | 127/900 [26:47<1:08:37, 5.33s/it]
|
| 128 |
14%|ββ | 128/900 [26:48<51:29, 4.00s/it]
|
| 129 |
14%|ββ | 129/900 [26:49<39:29, 3.07s/it]
|
| 130 |
14%|ββ | 130/900 [27:10<1:49:29, 8.53s/it]
|
| 131 |
15%|ββ | 131/900 [27:31<2:35:58, 12.17s/it]
|
| 132 |
15%|ββ | 132/900 [27:33<1:58:03, 9.22s/it]
|
| 133 |
15%|ββ | 133/900 [27:54<2:42:06, 12.68s/it]
|
| 134 |
15%|ββ | 134/900 [28:16<3:16:11, 15.37s/it]
|
| 135 |
15%|ββ | 135/900 [28:17<2:20:35, 11.03s/it]
|
| 136 |
15%|ββ | 136/900 [28:38<2:58:54, 14.05s/it]
|
| 137 |
15%|ββ | 137/900 [28:59<3:23:50, 16.03s/it]
|
| 138 |
15%|ββ | 138/900 [29:01<2:30:47, 11.87s/it]
|
| 139 |
15%|ββ | 139/900 [29:02<1:50:18, 8.70s/it]
|
| 140 |
16%|ββ | 140/900 [29:03<1:21:09, 6.41s/it]
|
| 141 |
16%|ββ | 141/900 [29:24<2:15:34, 10.72s/it]
|
| 142 |
16%|ββ | 142/900 [29:45<2:53:29, 13.73s/it]
|
| 143 |
16%|ββ | 143/900 [29:45<2:04:31, 9.87s/it]
|
| 144 |
16%|ββ | 144/900 [29:47<1:33:06, 7.39s/it]
|
| 145 |
16%|ββ | 145/900 [29:49<1:12:35, 5.77s/it]
|
| 146 |
16%|ββ | 146/900 [30:10<2:08:54, 10.26s/it]
|
| 147 |
16%|ββ | 147/900 [30:31<2:50:14, 13.57s/it]
|
| 148 |
16%|ββ | 148/900 [30:52<3:19:15, 15.90s/it]
|
| 149 |
17%|ββ | 149/900 [30:54<2:26:11, 11.68s/it]
|
| 150 |
17%|ββ | 150/900 [31:16<3:02:14, 14.58s/it]
|
| 151 |
17%|ββ | 151/900 [31:16<2:09:28, 10.37s/it]
|
| 152 |
17%|ββ | 152/900 [31:18<1:35:49, 7.69s/it]
|
| 153 |
17%|ββ | 153/900 [31:19<1:12:40, 5.84s/it]
|
| 154 |
17%|ββ | 154/900 [31:40<2:10:14, 10.47s/it]
|
| 155 |
17%|ββ | 155/900 [32:01<2:48:21, 13.56s/it]
|
| 156 |
17%|ββ | 156/900 [32:22<3:15:14, 15.74s/it]
|
| 157 |
17%|ββ | 157/900 [32:43<3:35:15, 17.38s/it]
|
| 158 |
18%|ββ | 158/900 [33:04<3:47:14, 18.37s/it]
|
| 159 |
18%|ββ | 159/900 [33:25<3:57:34, 19.24s/it]
|
| 160 |
18%|ββ | 160/900 [33:46<4:02:08, 19.63s/it]
|
| 161 |
18%|ββ | 161/900 [34:07<4:08:00, 20.14s/it]
|
| 162 |
18%|ββ | 162/900 [34:08<2:56:35, 14.36s/it]
|
| 163 |
18%|ββ | 163/900 [34:29<3:21:04, 16.37s/it]
|
| 164 |
18%|ββ | 164/900 [34:50<3:36:27, 17.65s/it]
|
| 165 |
18%|ββ | 165/900 [35:10<3:47:01, 18.53s/it]
|
| 166 |
18%|ββ | 166/900 [35:11<2:42:52, 13.31s/it]/opt/conda/envs/tinyllava/lib/python3.10/site-packages/PIL/Image.py:1047: UserWarning: Palette images with Transparency expressed in bytes should be converted to RGBA images
|
|
|
|
|
|
|
| 167 |
19%|ββ | 167/900 [35:32<3:11:28, 15.67s/it]
|
| 168 |
19%|ββ | 168/900 [35:53<3:28:49, 17.12s/it]
|
| 169 |
19%|ββ | 169/900 [35:54<2:29:24, 12.26s/it]
|
| 170 |
19%|ββ | 170/900 [35:55<1:48:03, 8.88s/it]
|
| 171 |
19%|ββ | 171/900 [36:16<2:30:54, 12.42s/it]
|
| 172 |
19%|ββ | 172/900 [36:37<3:03:09, 15.10s/it]
|
| 173 |
19%|ββ | 173/900 [36:57<3:22:18, 16.70s/it]
|
| 174 |
19%|ββ | 174/900 [37:18<3:37:55, 18.01s/it]
|
| 175 |
19%|ββ | 175/900 [37:39<3:47:03, 18.79s/it]
|
| 176 |
20%|ββ | 176/900 [37:40<2:40:48, 13.33s/it]
|
| 177 |
20%|ββ | 177/900 [38:01<3:08:02, 15.60s/it]
|
| 178 |
20%|ββ | 178/900 [38:21<3:26:44, 17.18s/it]
|
| 179 |
20%|ββ | 179/900 [38:22<2:27:14, 12.25s/it]
|
| 180 |
20%|ββ | 180/900 [38:43<2:57:13, 14.77s/it]
|
| 181 |
20%|ββ | 181/900 [39:04<3:19:07, 16.62s/it]
|
| 182 |
20%|ββ | 182/900 [39:25<3:35:20, 18.00s/it]
|
| 183 |
20%|ββ | 183/900 [39:29<2:45:15, 13.83s/it]
|
| 184 |
20%|ββ | 184/900 [39:49<3:08:39, 15.81s/it]
|
| 185 |
21%|ββ | 185/900 [40:11<3:27:49, 17.44s/it]
|
| 186 |
21%|ββ | 186/900 [40:32<3:40:42, 18.55s/it]
|
| 187 |
21%|ββ | 187/900 [40:53<3:48:31, 19.23s/it]
|
| 188 |
21%|ββ | 188/900 [41:13<3:53:38, 19.69s/it]
|
| 189 |
21%|ββ | 189/900 [41:35<3:59:57, 20.25s/it]
|
| 190 |
21%|ββ | 190/900 [41:56<4:04:08, 20.63s/it]
|
| 191 |
21%|ββ | 191/900 [42:17<4:04:21, 20.68s/it]
|
| 192 |
21%|βββ | 192/900 [42:39<4:07:10, 20.95s/it]
|
| 193 |
21%|βββ | 193/900 [43:00<4:08:28, 21.09s/it]
|
| 194 |
22%|βββ | 194/900 [43:21<4:06:55, 20.99s/it]
|
| 195 |
22%|βββ | 195/900 [43:22<2:55:45, 14.96s/it]
|
| 196 |
22%|βββ | 196/900 [43:43<3:16:59, 16.79s/it]
|
| 197 |
22%|βββ | 197/900 [44:04<3:29:56, 17.92s/it]
|
| 198 |
22%|βββ | 198/900 [44:24<3:39:12, 18.74s/it]
|
| 199 |
22%|βββ | 199/900 [44:45<3:45:35, 19.31s/it]
|
| 200 |
22%|βββ | 200/900 [45:06<3:51:26, 19.84s/it]
|
| 201 |
22%|βββ | 201/900 [45:27<3:54:10, 20.10s/it]
|
| 202 |
22%|βββ | 202/900 [45:47<3:56:02, 20.29s/it]
|
| 203 |
23%|βββ | 203/900 [46:08<3:57:03, 20.41s/it]
|
| 204 |
23%|βββ | 204/900 [46:09<2:48:37, 14.54s/it]
|
| 205 |
23%|βββ | 205/900 [46:29<3:08:29, 16.27s/it]
|
| 206 |
23%|βββ | 206/900 [46:50<3:25:37, 17.78s/it]
|
| 207 |
23%|βββ | 207/900 [47:12<3:37:02, 18.79s/it]
|
| 208 |
23%|βββ | 208/900 [47:33<3:45:21, 19.54s/it]
|
| 209 |
23%|βββ | 209/900 [47:54<3:48:46, 19.86s/it]
|
| 210 |
23%|βββ | 210/900 [47:59<2:57:15, 15.41s/it]
|
| 211 |
23%|βββ | 211/900 [47:59<2:05:33, 10.93s/it]
|
| 212 |
24%|βββ | 212/900 [48:20<2:40:08, 13.97s/it]
|
| 213 |
24%|βββ | 213/900 [48:41<3:05:21, 16.19s/it]
|
| 214 |
24%|βββ | 214/900 [49:02<3:20:33, 17.54s/it]
|
| 215 |
24%|βββ | 215/900 [49:24<3:33:32, 18.70s/it]
|
| 216 |
24%|βββ | 216/900 [49:45<3:42:31, 19.52s/it]
|
| 217 |
24%|βββ | 217/900 [50:06<3:48:38, 20.09s/it]
|
| 218 |
24%|βββ | 218/900 [50:28<3:52:17, 20.44s/it]
|
| 219 |
24%|βββ | 219/900 [50:28<2:44:02, 14.45s/it]
|
| 220 |
24%|βββ | 220/900 [50:49<3:03:57, 16.23s/it]
|
| 221 |
25%|βββ | 221/900 [51:10<3:20:09, 17.69s/it]
|
| 222 |
25%|βββ | 222/900 [51:11<2:25:28, 12.87s/it]
|
| 223 |
25%|βββ | 223/900 [51:32<2:51:49, 15.23s/it]
|
| 224 |
25%|βββ | 224/900 [51:53<3:12:06, 17.05s/it]
|
| 225 |
25%|βββ | 225/900 [51:54<2:16:06, 12.10s/it]
|
| 226 |
25%|βββ | 226/900 [52:14<2:44:05, 14.61s/it]
|
| 227 |
25%|βββ | 227/900 [52:35<3:04:14, 16.43s/it]
|
| 228 |
25%|βββ | 228/900 [52:56<3:18:27, 17.72s/it]
|
| 229 |
25%|βββ | 229/900 [53:16<3:27:07, 18.52s/it]
|
| 230 |
26%|βββ | 230/900 [53:37<3:33:28, 19.12s/it]
|
| 231 |
26%|βββ | 231/900 [53:57<3:37:32, 19.51s/it]
|
| 232 |
26%|βββ | 232/900 [54:19<3:44:12, 20.14s/it]
|
| 233 |
26%|βββ | 233/900 [54:39<3:44:58, 20.24s/it]
|
| 234 |
26%|βββ | 234/900 [54:59<3:45:12, 20.29s/it]
|
| 235 |
26%|βββ | 235/900 [55:21<3:48:55, 20.65s/it]
|
| 236 |
26%|βββ | 236/900 [55:41<3:47:43, 20.58s/it]
|
| 237 |
26%|βββ | 237/900 [56:03<3:50:35, 20.87s/it]
|
| 238 |
26%|βββ | 238/900 [56:24<3:49:51, 20.83s/it]
|
| 239 |
27%|βββ | 239/900 [56:44<3:49:01, 20.79s/it]
|
| 240 |
27%|βββ | 240/900 [57:05<3:48:19, 20.76s/it]
|
| 241 |
27%|βββ | 241/900 [57:06<2:43:30, 14.89s/it]
|
| 242 |
27%|βββ | 242/900 [57:28<3:04:34, 16.83s/it]
|
| 243 |
27%|βββ | 243/900 [57:29<2:13:42, 12.21s/it]
|
| 244 |
27%|βββ | 244/900 [57:30<1:36:10, 8.80s/it]
|
| 245 |
27%|βββ | 245/900 [57:51<2:16:39, 12.52s/it]
|
| 246 |
27%|βββ | 246/900 [58:12<2:42:53, 14.94s/it]
|
| 247 |
27%|βββ | 247/900 [58:33<3:03:28, 16.86s/it]
|
| 248 |
28%|βββ | 248/900 [58:54<3:16:14, 18.06s/it]
|
| 249 |
28%|βββ | 249/900 [59:15<3:26:50, 19.06s/it]
|
| 250 |
28%|βββ | 250/900 [59:36<3:31:20, 19.51s/it]
|
| 251 |
28%|βββ | 251/900 [59:56<3:34:19, 19.81s/it]
|
| 252 |
28%|βββ | 252/900 [1:00:18<3:39:22, 20.31s/it]
|
| 253 |
28%|βββ | 253/900 [1:00:38<3:39:48, 20.38s/it]
|
| 254 |
28%|βββ | 254/900 [1:01:00<3:42:49, 20.70s/it]
|
| 255 |
28%|βββ | 255/900 [1:01:02<2:41:49, 15.05s/it]
|
| 256 |
28%|βββ | 256/900 [1:01:06<2:06:17, 11.77s/it]
|
| 257 |
29%|βββ | 257/900 [1:01:27<2:36:30, 14.60s/it]
|
| 258 |
29%|βββ | 258/900 [1:01:48<2:56:21, 16.48s/it]
|
| 259 |
29%|βββ | 259/900 [1:01:49<2:07:10, 11.90s/it]
|
| 260 |
29%|βββ | 260/900 [1:02:10<2:35:31, 14.58s/it]
|
| 261 |
29%|βββ | 261/900 [1:02:31<2:54:38, 16.40s/it]
|
| 262 |
29%|βββ | 262/900 [1:02:52<3:10:11, 17.89s/it]
|
| 263 |
29%|βββ | 263/900 [1:02:53<2:14:54, 12.71s/it]
|
| 264 |
29%|βββ | 264/900 [1:03:14<2:41:20, 15.22s/it]
|
| 265 |
29%|βββ | 265/900 [1:03:34<2:58:08, 16.83s/it]
|
| 266 |
30%|βββ | 266/900 [1:03:55<3:09:24, 17.93s/it]
|
| 267 |
30%|βββ | 267/900 [1:03:56<2:17:56, 13.08s/it]
|
| 268 |
30%|βββ | 268/900 [1:03:58<1:41:16, 9.61s/it]
|
| 269 |
30%|βββ | 269/900 [1:04:20<2:18:41, 13.19s/it]
|
| 270 |
30%|βββ | 270/900 [1:04:20<1:39:47, 9.50s/it]
|
| 271 |
30%|βββ | 271/900 [1:04:41<2:14:34, 12.84s/it]
|
| 272 |
30%|βββ | 272/900 [1:05:02<2:39:19, 15.22s/it]
|
| 273 |
30%|βββ | 273/900 [1:05:23<2:58:18, 17.06s/it]
|
| 274 |
30%|βββ | 274/900 [1:05:50<3:28:33, 19.99s/it]
|
| 275 |
31%|βββ | 275/900 [1:06:17<3:49:18, 22.01s/it]
|
| 276 |
31%|βββ | 276/900 [1:06:38<3:46:58, 21.82s/it]
|
| 277 |
31%|βββ | 277/900 [1:06:59<3:42:34, 21.44s/it]
|
| 278 |
31%|βββ | 278/900 [1:07:19<3:39:16, 21.15s/it]
|
| 279 |
31%|βββ | 279/900 [1:07:41<3:39:44, 21.23s/it]
|
| 280 |
31%|βββ | 280/900 [1:08:02<3:39:48, 21.27s/it]
|
| 281 |
31%|βββ | 281/900 [1:08:23<3:39:37, 21.29s/it]
|
| 282 |
31%|ββββ | 282/900 [1:08:45<3:39:25, 21.30s/it]
|
| 283 |
31%|ββββ | 283/900 [1:09:06<3:39:12, 21.32s/it]
|
| 284 |
32%|ββββ | 284/900 [1:09:27<3:36:37, 21.10s/it]
|
| 285 |
32%|ββββ | 285/900 [1:09:47<3:34:48, 20.96s/it]
|
| 286 |
32%|ββββ | 286/900 [1:10:08<3:33:08, 20.83s/it]
|
| 287 |
32%|ββββ | 287/900 [1:10:28<3:31:55, 20.74s/it]
|
| 288 |
32%|ββββ | 288/900 [1:10:49<3:31:02, 20.69s/it]
|
| 289 |
32%|ββββ | 289/900 [1:11:09<3:29:54, 20.61s/it]
|
| 290 |
32%|ββββ | 290/900 [1:11:31<3:32:28, 20.90s/it]
|
| 291 |
32%|ββββ | 291/900 [1:11:52<3:34:04, 21.09s/it]
|
| 292 |
32%|ββββ | 292/900 [1:12:14<3:34:57, 21.21s/it]
|
| 293 |
33%|ββββ | 293/900 [1:12:35<3:35:29, 21.30s/it]
|
| 294 |
33%|ββββ | 294/900 [1:12:57<3:36:05, 21.40s/it]
|
| 295 |
33%|ββββ | 295/900 [1:13:18<3:34:11, 21.24s/it]
|
| 296 |
33%|ββββ | 296/900 [1:13:39<3:32:46, 21.14s/it]
|
| 297 |
33%|ββββ | 297/900 [1:14:00<3:31:35, 21.05s/it]
|
| 298 |
33%|ββββ | 298/900 [1:14:01<2:31:17, 15.08s/it]
|
| 299 |
33%|ββββ | 299/900 [1:14:07<2:03:47, 12.36s/it]
|
| 300 |
33%|ββββ | 300/900 [1:14:28<2:30:48, 15.08s/it]
|
| 301 |
33%|ββββ | 301/900 [1:14:29<1:48:02, 10.82s/it]
|
| 302 |
34%|ββββ | 302/900 [1:14:30<1:17:56, 7.82s/it]
|
| 303 |
34%|ββββ | 303/900 [1:14:51<1:56:15, 11.68s/it]
|
| 304 |
34%|ββββ | 304/900 [1:15:12<2:25:18, 14.63s/it]
|
| 305 |
34%|ββββ | 305/900 [1:15:14<1:45:58, 10.69s/it]
|
| 306 |
34%|ββββ | 306/900 [1:15:34<2:14:56, 13.63s/it]
|
| 307 |
34%|ββββ | 307/900 [1:15:35<1:36:15, 9.74s/it]
|
| 308 |
34%|ββββ | 308/900 [1:15:56<2:10:41, 13.25s/it]
|
| 309 |
34%|ββββ | 309/900 [1:15:57<1:33:56, 9.54s/it]
|
| 310 |
34%|ββββ | 310/900 [1:15:58<1:07:26, 6.86s/it]
|
| 311 |
35%|ββββ | 311/900 [1:16:19<1:49:46, 11.18s/it]
|
| 312 |
35%|ββββ | 312/900 [1:16:20<1:18:44, 8.03s/it]
|
| 313 |
35%|ββββ | 313/900 [1:16:20<57:08, 5.84s/it]
|
|
|
|
| 1 |
+
==== STARTING EXPERIMENT: eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_0.7_2e-1_connector-5.0_0.7_2e-1_ablation ====
|
| 2 |
+
Log File: eval_qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_0.7_2e-1_connector-5.0_0.7_2e-1_ablation_20251013_223153.log
|
| 3 |
+
Timestamp: 2025-10-13 22:31:53
|
| 4 |
+
=====================================
|
| 5 |
+
Processing: /nfs/ywang29/TinyLLaVA/checkpoints/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_0.7_2e-1_connector-5.0_0.7_2e-1_ablation
|
| 6 |
+
/opt/conda/envs/tinyllava/lib/python3.10/site-packages/torch/cuda/__init__.py:51: FutureWarning: The pynvml package is deprecated. Please install nvidia-ml-py instead. If you did not install pynvml directly, please report this to the maintainers of the package that installed pynvml for you.
|
| 7 |
+
import pynvml # type: ignore[import]
|
| 8 |
+
[2025-10-13 22:31:56,706] [INFO] [real_accelerator.py:191:get_accelerator] Setting ds_accelerator to cuda (auto detect)
|
| 9 |
+
/opt/conda/envs/tinyllava/lib/python3.10/site-packages/huggingface_hub/file_download.py:945: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.
|
| 10 |
+
warnings.warn(
|
| 11 |
+
config_mask.torch_dtype: torch.bfloat16
|
| 12 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
| 13 |
+
Load mask model from /nfs/ywang29/TinyLLaVA/checkpoints/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_0.7_2e-1_connector-5.0_0.7_2e-1_ablation over.
|
| 14 |
+
TinyLlavaConfig {
|
| 15 |
+
"architectures": [
|
| 16 |
+
"TinyLlavaForConditionalGeneration"
|
| 17 |
+
],
|
| 18 |
+
"backward_type_connector": "normal",
|
| 19 |
+
"cache_dir": null,
|
| 20 |
+
"connector_type": "mlp2x_gelu",
|
| 21 |
+
"hidden_size": 896,
|
| 22 |
+
"ignore_index": -100,
|
| 23 |
+
"image_aspect_ratio": "square",
|
| 24 |
+
"image_token_index": -200,
|
| 25 |
+
"llm_model_name_or_path": "Qwen/Qwen2.5-0.5B",
|
| 26 |
+
"mask_model": [
|
| 27 |
+
"llm",
|
| 28 |
+
"connector"
|
| 29 |
+
],
|
| 30 |
+
"mask_type_connector": "soft",
|
| 31 |
+
"model_type": "tinyllava",
|
| 32 |
+
"num_queries": 128,
|
| 33 |
+
"num_resampler_layers": 3,
|
| 34 |
+
"pad_token": "<|endoftext|>",
|
| 35 |
+
"resampler_hidden_size": 768,
|
| 36 |
+
"sparsity_connector": null,
|
| 37 |
+
"subnet_type_connector": "global",
|
| 38 |
+
"temperature_connector": 0.7,
|
| 39 |
+
"text_config": {
|
| 40 |
+
"_name_or_path": "Qwen/Qwen2.5-0.5B",
|
| 41 |
+
"architectures": [
|
| 42 |
+
"Qwen2ForCausalLM"
|
| 43 |
+
],
|
| 44 |
+
"backward_type": "normal",
|
| 45 |
+
"bos_token_id": 151643,
|
| 46 |
+
"eos_token_id": 151643,
|
| 47 |
+
"hidden_size": 896,
|
| 48 |
+
"intermediate_size": 4864,
|
| 49 |
+
"mask_type": "soft",
|
| 50 |
+
"masked_layers": "all",
|
| 51 |
+
"max_position_embeddings": 32768,
|
| 52 |
+
"max_window_layers": 24,
|
| 53 |
+
"model_type": "qwen2",
|
| 54 |
+
"num_attention_heads": 14,
|
| 55 |
+
"num_hidden_layers": 24,
|
| 56 |
+
"num_key_value_heads": 2,
|
| 57 |
+
"rope_theta": 1000000.0,
|
| 58 |
+
"sliding_window": 32768,
|
| 59 |
+
"subnet_mode": "both",
|
| 60 |
+
"subnet_type": "None",
|
| 61 |
+
"temperature_attn": 0.7,
|
| 62 |
+
"temperature_mlp": 0.7,
|
| 63 |
+
"tie_word_embeddings": true,
|
| 64 |
+
"torch_dtype": "bfloat16",
|
| 65 |
+
"use_mrope": false,
|
| 66 |
+
"use_sliding_window": false,
|
| 67 |
+
"vocab_size": 151936
|
| 68 |
+
},
|
| 69 |
+
"threshold_connector": null,
|
| 70 |
+
"tokenizer_model_max_length": 2048,
|
| 71 |
+
"tokenizer_name_or_path": "Qwen/Qwen2.5-0.5B",
|
| 72 |
+
"tokenizer_padding_side": "right",
|
| 73 |
+
"tokenizer_use_fast": false,
|
| 74 |
+
"torch_dtype": "bfloat16",
|
| 75 |
+
"transformers_version": "4.40.1",
|
| 76 |
+
"tune_type_connector": "full",
|
| 77 |
+
"tune_type_llm": "full",
|
| 78 |
+
"tune_type_vision_tower": "frozen",
|
| 79 |
+
"tune_vision_tower_from_layer": 0,
|
| 80 |
+
"use_cache": true,
|
| 81 |
+
"vision_config": {
|
| 82 |
+
"hidden_act": "gelu_pytorch_tanh",
|
| 83 |
+
"hidden_size": 1152,
|
| 84 |
+
"image_size": 384,
|
| 85 |
+
"intermediate_size": 4304,
|
| 86 |
+
"layer_norm_eps": 1e-06,
|
| 87 |
+
"model_name_or_path": "google/siglip-so400m-patch14-384",
|
| 88 |
+
"model_name_or_path2": "",
|
| 89 |
+
"model_type": "siglip_vision_model",
|
| 90 |
+
"num_attention_heads": 16,
|
| 91 |
+
"num_hidden_layers": 27,
|
| 92 |
+
"patch_size": 14
|
| 93 |
+
},
|
| 94 |
+
"vision_feature_layer": -2,
|
| 95 |
+
"vision_feature_select_strategy": "patch",
|
| 96 |
+
"vision_hidden_size": 1152,
|
| 97 |
+
"vision_model_name_or_path": "google/siglip-so400m-patch14-384",
|
| 98 |
+
"vision_model_name_or_path2": "",
|
| 99 |
+
"vocab_size": 151936
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
TinyLlavaForConditionalGeneration(
|
| 103 |
+
(language_model): Qwen2ForCausalLM(
|
| 104 |
+
(model): Qwen2Model(
|
| 105 |
+
(embed_tokens): Embedding(151936, 896)
|
| 106 |
+
(layers): ModuleList(
|
| 107 |
+
(0-23): 24 x Qwen2DecoderLayer(
|
| 108 |
+
(self_attn): Qwen2Attention(
|
| 109 |
+
(q_proj): SupermaskLinearSparsity_SoftForward_Normal(in_features=896, out_features=896, bias=True)
|
| 110 |
+
(k_proj): SupermaskLinearSparsity_SoftForward_Normal(in_features=896, out_features=128, bias=True)
|
| 111 |
+
(v_proj): SupermaskLinearSparsity_SoftForward_Normal(in_features=896, out_features=128, bias=True)
|
| 112 |
+
(o_proj): SupermaskLinearSparsity_SoftForward_Normal(in_features=896, out_features=896, bias=False)
|
| 113 |
+
(rotary_emb): Qwen2RotaryEmbedding()
|
| 114 |
+
)
|
| 115 |
+
(mlp): Qwen2MLP(
|
| 116 |
+
(gate_proj): SupermaskLinearSparsity_SoftForward_Normal(in_features=896, out_features=4864, bias=False)
|
| 117 |
+
(up_proj): SupermaskLinearSparsity_SoftForward_Normal(in_features=896, out_features=4864, bias=False)
|
| 118 |
+
(down_proj): SupermaskLinearSparsity_SoftForward_Normal(in_features=4864, out_features=896, bias=False)
|
| 119 |
+
(act_fn): SiLU()
|
| 120 |
+
)
|
| 121 |
+
(input_layernorm): Qwen2RMSNorm()
|
| 122 |
+
(post_attention_layernorm): Qwen2RMSNorm()
|
| 123 |
+
)
|
| 124 |
+
)
|
| 125 |
+
(norm): Qwen2RMSNorm()
|
| 126 |
+
)
|
| 127 |
+
(lm_head): Linear(in_features=896, out_features=151936, bias=False)
|
| 128 |
+
)
|
| 129 |
+
(vision_tower): SIGLIPVisionTower(
|
| 130 |
+
(_vision_tower): SiglipVisionModel(
|
| 131 |
+
(vision_model): SiglipVisionTransformer(
|
| 132 |
+
(embeddings): SiglipVisionEmbeddings(
|
| 133 |
+
(patch_embedding): Conv2d(3, 1152, kernel_size=(14, 14), stride=(14, 14), padding=valid)
|
| 134 |
+
(position_embedding): Embedding(729, 1152)
|
| 135 |
+
)
|
| 136 |
+
(encoder): SiglipEncoder(
|
| 137 |
+
(layers): ModuleList(
|
| 138 |
+
(0-26): 27 x SiglipEncoderLayer(
|
| 139 |
+
(self_attn): SiglipAttention(
|
| 140 |
+
(k_proj): Linear(in_features=1152, out_features=1152, bias=True)
|
| 141 |
+
(v_proj): Linear(in_features=1152, out_features=1152, bias=True)
|
| 142 |
+
(q_proj): Linear(in_features=1152, out_features=1152, bias=True)
|
| 143 |
+
(out_proj): Linear(in_features=1152, out_features=1152, bias=True)
|
| 144 |
+
)
|
| 145 |
+
(layer_norm1): LayerNorm((1152,), eps=1e-06, elementwise_affine=True)
|
| 146 |
+
(mlp): SiglipMLP(
|
| 147 |
+
(activation_fn): PytorchGELUTanh()
|
| 148 |
+
(fc1): Linear(in_features=1152, out_features=4304, bias=True)
|
| 149 |
+
(fc2): Linear(in_features=4304, out_features=1152, bias=True)
|
| 150 |
+
)
|
| 151 |
+
(layer_norm2): LayerNorm((1152,), eps=1e-06, elementwise_affine=True)
|
| 152 |
+
)
|
| 153 |
+
)
|
| 154 |
+
)
|
| 155 |
+
(post_layernorm): LayerNorm((1152,), eps=1e-06, elementwise_affine=True)
|
| 156 |
+
(head): SiglipMultiheadAttentionPoolingHead(
|
| 157 |
+
(attention): MultiheadAttention(
|
| 158 |
+
(out_proj): NonDynamicallyQuantizableLinear(in_features=1152, out_features=1152, bias=True)
|
| 159 |
+
)
|
| 160 |
+
(layernorm): LayerNorm((1152,), eps=1e-06, elementwise_affine=True)
|
| 161 |
+
(mlp): SiglipMLP(
|
| 162 |
+
(activation_fn): PytorchGELUTanh()
|
| 163 |
+
(fc1): Linear(in_features=1152, out_features=4304, bias=True)
|
| 164 |
+
(fc2): Linear(in_features=4304, out_features=1152, bias=True)
|
| 165 |
+
)
|
| 166 |
+
)
|
| 167 |
+
)
|
| 168 |
+
)
|
| 169 |
+
)
|
| 170 |
+
(connector): MLPConnector(
|
| 171 |
+
(_connector): Sequential(
|
| 172 |
+
(0): SupermaskLinearSparsity_SoftForward_Normal(in_features=1152, out_features=896, bias=True)
|
| 173 |
+
(1): GELU(approximate='none')
|
| 174 |
+
(2): SupermaskLinearSparsity_SoftForward_Normal(in_features=896, out_features=896, bias=True)
|
| 175 |
+
)
|
| 176 |
+
)
|
| 177 |
+
)
|
| 178 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
| 179 |
+
/opt/conda/envs/tinyllava/lib/python3.10/site-packages/torch/_utils.py:831: UserWarning: TypedStorage is deprecated. It will be removed in the future and UntypedStorage will be the only storage class. This should only matter to you if you are using storages directly. To access UntypedStorage directly, use tensor.untyped_storage() instead of tensor.storage()
|
| 180 |
+
return self.fget.__get__(instance, owner)()
|
| 181 |
+
loading language model from /nfs/ywang29/TinyLLaVA/checkpoints/tiny-llava-Qwen2.5-0.5B-siglip-so400m-patch14-384-pretrain/language_model
|
| 182 |
+
Loading vision tower from /nfs/ywang29/TinyLLaVA/checkpoints/tiny-llava-Qwen2.5-0.5B-siglip-so400m-patch14-384-pretrain/vision_tower
|
| 183 |
+
Loading connector from /nfs/ywang29/TinyLLaVA/checkpoints/tiny-llava-Qwen2.5-0.5B-siglip-so400m-patch14-384-pretrain/connector/pytorch_model.bin...
|
| 184 |
+
Load base model from /nfs/ywang29/TinyLLaVA/checkpoints/tiny-llava-Qwen2.5-0.5B-siglip-so400m-patch14-384-pretrain over.
|
| 185 |
+
TinyLlavaConfig {
|
| 186 |
+
"cache_dir": null,
|
| 187 |
+
"connector_type": "mlp2x_gelu",
|
| 188 |
+
"hidden_size": 896,
|
| 189 |
+
"ignore_index": -100,
|
| 190 |
+
"image_aspect_ratio": "square",
|
| 191 |
+
"image_token_index": -200,
|
| 192 |
+
"llm_model_name_or_path": "Qwen/Qwen2.5-0.5B",
|
| 193 |
+
"model_type": "tinyllava",
|
| 194 |
+
"num_queries": 128,
|
| 195 |
+
"num_resampler_layers": 3,
|
| 196 |
+
"pad_token": "<|endoftext|>",
|
| 197 |
+
"pad_token_id": 151643,
|
| 198 |
+
"resampler_hidden_size": 768,
|
| 199 |
+
"text_config": {
|
| 200 |
+
"_name_or_path": "Qwen/Qwen2.5-0.5B",
|
| 201 |
+
"architectures": [
|
| 202 |
+
"Qwen2ForCausalLM"
|
| 203 |
+
],
|
| 204 |
+
"bos_token_id": 151643,
|
| 205 |
+
"eos_token_id": 151643,
|
| 206 |
+
"hidden_size": 896,
|
| 207 |
+
"intermediate_size": 4864,
|
| 208 |
+
"max_position_embeddings": 32768,
|
| 209 |
+
"max_window_layers": 24,
|
| 210 |
+
"model_type": "qwen2",
|
| 211 |
+
"num_attention_heads": 14,
|
| 212 |
+
"num_hidden_layers": 24,
|
| 213 |
+
"num_key_value_heads": 2,
|
| 214 |
+
"rope_theta": 1000000.0,
|
| 215 |
+
"sliding_window": 32768,
|
| 216 |
+
"tie_word_embeddings": true,
|
| 217 |
+
"use_mrope": false,
|
| 218 |
+
"use_sliding_window": false,
|
| 219 |
+
"vocab_size": 151936
|
| 220 |
+
},
|
| 221 |
+
"tokenizer_model_max_length": 2048,
|
| 222 |
+
"tokenizer_name_or_path": "Qwen/Qwen2.5-0.5B",
|
| 223 |
+
"tokenizer_padding_side": "right",
|
| 224 |
+
"tokenizer_use_fast": false,
|
| 225 |
+
"transformers_version": "4.40.1",
|
| 226 |
+
"tune_type_connector": "full",
|
| 227 |
+
"tune_type_llm": "frozen",
|
| 228 |
+
"tune_type_vision_tower": "frozen",
|
| 229 |
+
"tune_vision_tower_from_layer": 0,
|
| 230 |
+
"use_cache": true,
|
| 231 |
+
"vision_config": {
|
| 232 |
+
"hidden_act": "gelu_pytorch_tanh",
|
| 233 |
+
"hidden_size": 1152,
|
| 234 |
+
"image_size": 384,
|
| 235 |
+
"intermediate_size": 4304,
|
| 236 |
+
"layer_norm_eps": 1e-06,
|
| 237 |
+
"model_name_or_path": "google/siglip-so400m-patch14-384",
|
| 238 |
+
"model_name_or_path2": "",
|
| 239 |
+
"model_type": "siglip_vision_model",
|
| 240 |
+
"num_attention_heads": 16,
|
| 241 |
+
"num_hidden_layers": 27,
|
| 242 |
+
"patch_size": 14
|
| 243 |
+
},
|
| 244 |
+
"vision_feature_layer": -2,
|
| 245 |
+
"vision_feature_select_strategy": "patch",
|
| 246 |
+
"vision_hidden_size": 1152,
|
| 247 |
+
"vision_model_name_or_path": "google/siglip-so400m-patch14-384",
|
| 248 |
+
"vision_model_name_or_path2": "",
|
| 249 |
+
"vocab_size": 151936
|
| 250 |
+
}
|
| 251 |
+
|
| 252 |
+
TinyLlavaForConditionalGeneration(
|
| 253 |
+
(language_model): Qwen2ForCausalLM(
|
| 254 |
+
(model): Qwen2Model(
|
| 255 |
+
(embed_tokens): Embedding(151936, 896)
|
| 256 |
+
(layers): ModuleList(
|
| 257 |
+
(0-23): 24 x Qwen2DecoderLayer(
|
| 258 |
+
(self_attn): Qwen2Attention(
|
| 259 |
+
(q_proj): Linear(in_features=896, out_features=896, bias=True)
|
| 260 |
+
(k_proj): Linear(in_features=896, out_features=128, bias=True)
|
| 261 |
+
(v_proj): Linear(in_features=896, out_features=128, bias=True)
|
| 262 |
+
(o_proj): Linear(in_features=896, out_features=896, bias=False)
|
| 263 |
+
(rotary_emb): Qwen2RotaryEmbedding()
|
| 264 |
+
)
|
| 265 |
+
(mlp): Qwen2MLP(
|
| 266 |
+
(gate_proj): Linear(in_features=896, out_features=4864, bias=False)
|
| 267 |
+
(up_proj): Linear(in_features=896, out_features=4864, bias=False)
|
| 268 |
+
(down_proj): Linear(in_features=4864, out_features=896, bias=False)
|
| 269 |
+
(act_fn): SiLU()
|
| 270 |
+
)
|
| 271 |
+
(input_layernorm): Qwen2RMSNorm()
|
| 272 |
+
(post_attention_layernorm): Qwen2RMSNorm()
|
| 273 |
+
)
|
| 274 |
+
)
|
| 275 |
+
(norm): Qwen2RMSNorm()
|
| 276 |
+
)
|
| 277 |
+
(lm_head): Linear(in_features=896, out_features=151936, bias=False)
|
| 278 |
+
)
|
| 279 |
+
(vision_tower): SIGLIPVisionTower(
|
| 280 |
+
(_vision_tower): SiglipVisionModel(
|
| 281 |
+
(vision_model): SiglipVisionTransformer(
|
| 282 |
+
(embeddings): SiglipVisionEmbeddings(
|
| 283 |
+
(patch_embedding): Conv2d(3, 1152, kernel_size=(14, 14), stride=(14, 14), padding=valid)
|
| 284 |
+
(position_embedding): Embedding(729, 1152)
|
| 285 |
+
)
|
| 286 |
+
(encoder): SiglipEncoder(
|
| 287 |
+
(layers): ModuleList(
|
| 288 |
+
(0-26): 27 x SiglipEncoderLayer(
|
| 289 |
+
(self_attn): SiglipAttention(
|
| 290 |
+
(k_proj): Linear(in_features=1152, out_features=1152, bias=True)
|
| 291 |
+
(v_proj): Linear(in_features=1152, out_features=1152, bias=True)
|
| 292 |
+
(q_proj): Linear(in_features=1152, out_features=1152, bias=True)
|
| 293 |
+
(out_proj): Linear(in_features=1152, out_features=1152, bias=True)
|
| 294 |
+
)
|
| 295 |
+
(layer_norm1): LayerNorm((1152,), eps=1e-06, elementwise_affine=True)
|
| 296 |
+
(mlp): SiglipMLP(
|
| 297 |
+
(activation_fn): PytorchGELUTanh()
|
| 298 |
+
(fc1): Linear(in_features=1152, out_features=4304, bias=True)
|
| 299 |
+
(fc2): Linear(in_features=4304, out_features=1152, bias=True)
|
| 300 |
+
)
|
| 301 |
+
(layer_norm2): LayerNorm((1152,), eps=1e-06, elementwise_affine=True)
|
| 302 |
+
)
|
| 303 |
+
)
|
| 304 |
+
)
|
| 305 |
+
(post_layernorm): LayerNorm((1152,), eps=1e-06, elementwise_affine=True)
|
| 306 |
+
(head): SiglipMultiheadAttentionPoolingHead(
|
| 307 |
+
(attention): MultiheadAttention(
|
| 308 |
+
(out_proj): NonDynamicallyQuantizableLinear(in_features=1152, out_features=1152, bias=True)
|
| 309 |
+
)
|
| 310 |
+
(layernorm): LayerNorm((1152,), eps=1e-06, elementwise_affine=True)
|
| 311 |
+
(mlp): SiglipMLP(
|
| 312 |
+
(activation_fn): PytorchGELUTanh()
|
| 313 |
+
(fc1): Linear(in_features=1152, out_features=4304, bias=True)
|
| 314 |
+
(fc2): Linear(in_features=4304, out_features=1152, bias=True)
|
| 315 |
+
)
|
| 316 |
+
)
|
| 317 |
+
)
|
| 318 |
+
)
|
| 319 |
+
)
|
| 320 |
+
(connector): MLPConnector(
|
| 321 |
+
(_connector): Sequential(
|
| 322 |
+
(0): Linear(in_features=1152, out_features=896, bias=True)
|
| 323 |
+
(1): GELU(approximate='none')
|
| 324 |
+
(2): Linear(in_features=896, out_features=896, bias=True)
|
| 325 |
+
)
|
| 326 |
+
)
|
| 327 |
+
)
|
| 328 |
+
Collect masks for language model over.
|
| 329 |
+
Collect masks for connector over.
|
| 330 |
+
Applying mask on model.layers.0.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 331 |
+
Applied soft mask on model.layers.0.self_attn.q_proj.
|
| 332 |
+
Applying mask on model.layers.0.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 333 |
+
Applied soft mask on model.layers.0.self_attn.k_proj.
|
| 334 |
+
Applying mask on model.layers.0.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 335 |
+
Applied soft mask on model.layers.0.self_attn.v_proj.
|
| 336 |
+
Applying mask on model.layers.0.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 337 |
+
Applied soft mask on model.layers.0.self_attn.o_proj.
|
| 338 |
+
Applying mask on model.layers.0.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 339 |
+
Applied soft mask on model.layers.0.mlp.gate_proj.
|
| 340 |
+
Applying mask on model.layers.0.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 341 |
+
Applied soft mask on model.layers.0.mlp.up_proj.
|
| 342 |
+
Applying mask on model.layers.0.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 343 |
+
Applied soft mask on model.layers.0.mlp.down_proj.
|
| 344 |
+
Applying mask on model.layers.1.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 345 |
+
Applied soft mask on model.layers.1.self_attn.q_proj.
|
| 346 |
+
Applying mask on model.layers.1.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 347 |
+
Applied soft mask on model.layers.1.self_attn.k_proj.
|
| 348 |
+
Applying mask on model.layers.1.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 349 |
+
Applied soft mask on model.layers.1.self_attn.v_proj.
|
| 350 |
+
Applying mask on model.layers.1.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 351 |
+
Applied soft mask on model.layers.1.self_attn.o_proj.
|
| 352 |
+
Applying mask on model.layers.1.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 353 |
+
Applied soft mask on model.layers.1.mlp.gate_proj.
|
| 354 |
+
Applying mask on model.layers.1.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 355 |
+
Applied soft mask on model.layers.1.mlp.up_proj.
|
| 356 |
+
Applying mask on model.layers.1.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 357 |
+
Applied soft mask on model.layers.1.mlp.down_proj.
|
| 358 |
+
Applying mask on model.layers.2.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 359 |
+
Applied soft mask on model.layers.2.self_attn.q_proj.
|
| 360 |
+
Applying mask on model.layers.2.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 361 |
+
Applied soft mask on model.layers.2.self_attn.k_proj.
|
| 362 |
+
Applying mask on model.layers.2.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 363 |
+
Applied soft mask on model.layers.2.self_attn.v_proj.
|
| 364 |
+
Applying mask on model.layers.2.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 365 |
+
Applied soft mask on model.layers.2.self_attn.o_proj.
|
| 366 |
+
Applying mask on model.layers.2.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 367 |
+
Applied soft mask on model.layers.2.mlp.gate_proj.
|
| 368 |
+
Applying mask on model.layers.2.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 369 |
+
Applied soft mask on model.layers.2.mlp.up_proj.
|
| 370 |
+
Applying mask on model.layers.2.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 371 |
+
Applied soft mask on model.layers.2.mlp.down_proj.
|
| 372 |
+
Applying mask on model.layers.3.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 373 |
+
Applied soft mask on model.layers.3.self_attn.q_proj.
|
| 374 |
+
Applying mask on model.layers.3.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 375 |
+
Applied soft mask on model.layers.3.self_attn.k_proj.
|
| 376 |
+
Applying mask on model.layers.3.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 377 |
+
Applied soft mask on model.layers.3.self_attn.v_proj.
|
| 378 |
+
Applying mask on model.layers.3.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 379 |
+
Applied soft mask on model.layers.3.self_attn.o_proj.
|
| 380 |
+
Applying mask on model.layers.3.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 381 |
+
Applied soft mask on model.layers.3.mlp.gate_proj.
|
| 382 |
+
Applying mask on model.layers.3.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 383 |
+
Applied soft mask on model.layers.3.mlp.up_proj.
|
| 384 |
+
Applying mask on model.layers.3.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 385 |
+
Applied soft mask on model.layers.3.mlp.down_proj.
|
| 386 |
+
Applying mask on model.layers.4.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 387 |
+
Applied soft mask on model.layers.4.self_attn.q_proj.
|
| 388 |
+
Applying mask on model.layers.4.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 389 |
+
Applied soft mask on model.layers.4.self_attn.k_proj.
|
| 390 |
+
Applying mask on model.layers.4.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 391 |
+
Applied soft mask on model.layers.4.self_attn.v_proj.
|
| 392 |
+
Applying mask on model.layers.4.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 393 |
+
Applied soft mask on model.layers.4.self_attn.o_proj.
|
| 394 |
+
Applying mask on model.layers.4.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 395 |
+
Applied soft mask on model.layers.4.mlp.gate_proj.
|
| 396 |
+
Applying mask on model.layers.4.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 397 |
+
Applied soft mask on model.layers.4.mlp.up_proj.
|
| 398 |
+
Applying mask on model.layers.4.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 399 |
+
Applied soft mask on model.layers.4.mlp.down_proj.
|
| 400 |
+
Applying mask on model.layers.5.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 401 |
+
Applied soft mask on model.layers.5.self_attn.q_proj.
|
| 402 |
+
Applying mask on model.layers.5.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 403 |
+
Applied soft mask on model.layers.5.self_attn.k_proj.
|
| 404 |
+
Applying mask on model.layers.5.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 405 |
+
Applied soft mask on model.layers.5.self_attn.v_proj.
|
| 406 |
+
Applying mask on model.layers.5.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 407 |
+
Applied soft mask on model.layers.5.self_attn.o_proj.
|
| 408 |
+
Applying mask on model.layers.5.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 409 |
+
Applied soft mask on model.layers.5.mlp.gate_proj.
|
| 410 |
+
Applying mask on model.layers.5.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 411 |
+
Applied soft mask on model.layers.5.mlp.up_proj.
|
| 412 |
+
Applying mask on model.layers.5.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 413 |
+
Applied soft mask on model.layers.5.mlp.down_proj.
|
| 414 |
+
Applying mask on model.layers.6.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 415 |
+
Applied soft mask on model.layers.6.self_attn.q_proj.
|
| 416 |
+
Applying mask on model.layers.6.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 417 |
+
Applied soft mask on model.layers.6.self_attn.k_proj.
|
| 418 |
+
Applying mask on model.layers.6.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 419 |
+
Applied soft mask on model.layers.6.self_attn.v_proj.
|
| 420 |
+
Applying mask on model.layers.6.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 421 |
+
Applied soft mask on model.layers.6.self_attn.o_proj.
|
| 422 |
+
Applying mask on model.layers.6.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 423 |
+
Applied soft mask on model.layers.6.mlp.gate_proj.
|
| 424 |
+
Applying mask on model.layers.6.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 425 |
+
Applied soft mask on model.layers.6.mlp.up_proj.
|
| 426 |
+
Applying mask on model.layers.6.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 427 |
+
Applied soft mask on model.layers.6.mlp.down_proj.
|
| 428 |
+
Applying mask on model.layers.7.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 429 |
+
Applied soft mask on model.layers.7.self_attn.q_proj.
|
| 430 |
+
Applying mask on model.layers.7.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 431 |
+
Applied soft mask on model.layers.7.self_attn.k_proj.
|
| 432 |
+
Applying mask on model.layers.7.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 433 |
+
Applied soft mask on model.layers.7.self_attn.v_proj.
|
| 434 |
+
Applying mask on model.layers.7.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 435 |
+
Applied soft mask on model.layers.7.self_attn.o_proj.
|
| 436 |
+
Applying mask on model.layers.7.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 437 |
+
Applied soft mask on model.layers.7.mlp.gate_proj.
|
| 438 |
+
Applying mask on model.layers.7.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 439 |
+
Applied soft mask on model.layers.7.mlp.up_proj.
|
| 440 |
+
Applying mask on model.layers.7.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 441 |
+
Applied soft mask on model.layers.7.mlp.down_proj.
|
| 442 |
+
Applying mask on model.layers.8.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 443 |
+
Applied soft mask on model.layers.8.self_attn.q_proj.
|
| 444 |
+
Applying mask on model.layers.8.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 445 |
+
Applied soft mask on model.layers.8.self_attn.k_proj.
|
| 446 |
+
Applying mask on model.layers.8.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 447 |
+
Applied soft mask on model.layers.8.self_attn.v_proj.
|
| 448 |
+
Applying mask on model.layers.8.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 449 |
+
Applied soft mask on model.layers.8.self_attn.o_proj.
|
| 450 |
+
Applying mask on model.layers.8.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 451 |
+
Applied soft mask on model.layers.8.mlp.gate_proj.
|
| 452 |
+
Applying mask on model.layers.8.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 453 |
+
Applied soft mask on model.layers.8.mlp.up_proj.
|
| 454 |
+
Applying mask on model.layers.8.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 455 |
+
Applied soft mask on model.layers.8.mlp.down_proj.
|
| 456 |
+
Applying mask on model.layers.9.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 457 |
+
Applied soft mask on model.layers.9.self_attn.q_proj.
|
| 458 |
+
Applying mask on model.layers.9.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 459 |
+
Applied soft mask on model.layers.9.self_attn.k_proj.
|
| 460 |
+
Applying mask on model.layers.9.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 461 |
+
Applied soft mask on model.layers.9.self_attn.v_proj.
|
| 462 |
+
Applying mask on model.layers.9.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 463 |
+
Applied soft mask on model.layers.9.self_attn.o_proj.
|
| 464 |
+
Applying mask on model.layers.9.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 465 |
+
Applied soft mask on model.layers.9.mlp.gate_proj.
|
| 466 |
+
Applying mask on model.layers.9.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 467 |
+
Applied soft mask on model.layers.9.mlp.up_proj.
|
| 468 |
+
Applying mask on model.layers.9.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 469 |
+
Applied soft mask on model.layers.9.mlp.down_proj.
|
| 470 |
+
Applying mask on model.layers.10.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 471 |
+
Applied soft mask on model.layers.10.self_attn.q_proj.
|
| 472 |
+
Applying mask on model.layers.10.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 473 |
+
Applied soft mask on model.layers.10.self_attn.k_proj.
|
| 474 |
+
Applying mask on model.layers.10.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 475 |
+
Applied soft mask on model.layers.10.self_attn.v_proj.
|
| 476 |
+
Applying mask on model.layers.10.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 477 |
+
Applied soft mask on model.layers.10.self_attn.o_proj.
|
| 478 |
+
Applying mask on model.layers.10.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 479 |
+
Applied soft mask on model.layers.10.mlp.gate_proj.
|
| 480 |
+
Applying mask on model.layers.10.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 481 |
+
Applied soft mask on model.layers.10.mlp.up_proj.
|
| 482 |
+
Applying mask on model.layers.10.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 483 |
+
Applied soft mask on model.layers.10.mlp.down_proj.
|
| 484 |
+
Applying mask on model.layers.11.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 485 |
+
Applied soft mask on model.layers.11.self_attn.q_proj.
|
| 486 |
+
Applying mask on model.layers.11.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 487 |
+
Applied soft mask on model.layers.11.self_attn.k_proj.
|
| 488 |
+
Applying mask on model.layers.11.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 489 |
+
Applied soft mask on model.layers.11.self_attn.v_proj.
|
| 490 |
+
Applying mask on model.layers.11.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 491 |
+
Applied soft mask on model.layers.11.self_attn.o_proj.
|
| 492 |
+
Applying mask on model.layers.11.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 493 |
+
Applied soft mask on model.layers.11.mlp.gate_proj.
|
| 494 |
+
Applying mask on model.layers.11.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 495 |
+
Applied soft mask on model.layers.11.mlp.up_proj.
|
| 496 |
+
Applying mask on model.layers.11.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 497 |
+
Applied soft mask on model.layers.11.mlp.down_proj.
|
| 498 |
+
Applying mask on model.layers.12.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 499 |
+
Applied soft mask on model.layers.12.self_attn.q_proj.
|
| 500 |
+
Applying mask on model.layers.12.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 501 |
+
Applied soft mask on model.layers.12.self_attn.k_proj.
|
| 502 |
+
Applying mask on model.layers.12.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 503 |
+
Applied soft mask on model.layers.12.self_attn.v_proj.
|
| 504 |
+
Applying mask on model.layers.12.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 505 |
+
Applied soft mask on model.layers.12.self_attn.o_proj.
|
| 506 |
+
Applying mask on model.layers.12.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 507 |
+
Applied soft mask on model.layers.12.mlp.gate_proj.
|
| 508 |
+
Applying mask on model.layers.12.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 509 |
+
Applied soft mask on model.layers.12.mlp.up_proj.
|
| 510 |
+
Applying mask on model.layers.12.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 511 |
+
Applied soft mask on model.layers.12.mlp.down_proj.
|
| 512 |
+
Applying mask on model.layers.13.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 513 |
+
Applied soft mask on model.layers.13.self_attn.q_proj.
|
| 514 |
+
Applying mask on model.layers.13.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 515 |
+
Applied soft mask on model.layers.13.self_attn.k_proj.
|
| 516 |
+
Applying mask on model.layers.13.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 517 |
+
Applied soft mask on model.layers.13.self_attn.v_proj.
|
| 518 |
+
Applying mask on model.layers.13.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 519 |
+
Applied soft mask on model.layers.13.self_attn.o_proj.
|
| 520 |
+
Applying mask on model.layers.13.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 521 |
+
Applied soft mask on model.layers.13.mlp.gate_proj.
|
| 522 |
+
Applying mask on model.layers.13.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 523 |
+
Applied soft mask on model.layers.13.mlp.up_proj.
|
| 524 |
+
Applying mask on model.layers.13.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 525 |
+
Applied soft mask on model.layers.13.mlp.down_proj.
|
| 526 |
+
Applying mask on model.layers.14.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 527 |
+
Applied soft mask on model.layers.14.self_attn.q_proj.
|
| 528 |
+
Applying mask on model.layers.14.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 529 |
+
Applied soft mask on model.layers.14.self_attn.k_proj.
|
| 530 |
+
Applying mask on model.layers.14.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 531 |
+
Applied soft mask on model.layers.14.self_attn.v_proj.
|
| 532 |
+
Applying mask on model.layers.14.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 533 |
+
Applied soft mask on model.layers.14.self_attn.o_proj.
|
| 534 |
+
Applying mask on model.layers.14.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 535 |
+
Applied soft mask on model.layers.14.mlp.gate_proj.
|
| 536 |
+
Applying mask on model.layers.14.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 537 |
+
Applied soft mask on model.layers.14.mlp.up_proj.
|
| 538 |
+
Applying mask on model.layers.14.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 539 |
+
Applied soft mask on model.layers.14.mlp.down_proj.
|
| 540 |
+
Applying mask on model.layers.15.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 541 |
+
Applied soft mask on model.layers.15.self_attn.q_proj.
|
| 542 |
+
Applying mask on model.layers.15.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 543 |
+
Applied soft mask on model.layers.15.self_attn.k_proj.
|
| 544 |
+
Applying mask on model.layers.15.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 545 |
+
Applied soft mask on model.layers.15.self_attn.v_proj.
|
| 546 |
+
Applying mask on model.layers.15.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 547 |
+
Applied soft mask on model.layers.15.self_attn.o_proj.
|
| 548 |
+
Applying mask on model.layers.15.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 549 |
+
Applied soft mask on model.layers.15.mlp.gate_proj.
|
| 550 |
+
Applying mask on model.layers.15.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 551 |
+
Applied soft mask on model.layers.15.mlp.up_proj.
|
| 552 |
+
Applying mask on model.layers.15.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 553 |
+
Applied soft mask on model.layers.15.mlp.down_proj.
|
| 554 |
+
Applying mask on model.layers.16.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 555 |
+
Applied soft mask on model.layers.16.self_attn.q_proj.
|
| 556 |
+
Applying mask on model.layers.16.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 557 |
+
Applied soft mask on model.layers.16.self_attn.k_proj.
|
| 558 |
+
Applying mask on model.layers.16.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 559 |
+
Applied soft mask on model.layers.16.self_attn.v_proj.
|
| 560 |
+
Applying mask on model.layers.16.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 561 |
+
Applied soft mask on model.layers.16.self_attn.o_proj.
|
| 562 |
+
Applying mask on model.layers.16.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 563 |
+
Applied soft mask on model.layers.16.mlp.gate_proj.
|
| 564 |
+
Applying mask on model.layers.16.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 565 |
+
Applied soft mask on model.layers.16.mlp.up_proj.
|
| 566 |
+
Applying mask on model.layers.16.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 567 |
+
Applied soft mask on model.layers.16.mlp.down_proj.
|
| 568 |
+
Applying mask on model.layers.17.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 569 |
+
Applied soft mask on model.layers.17.self_attn.q_proj.
|
| 570 |
+
Applying mask on model.layers.17.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 571 |
+
Applied soft mask on model.layers.17.self_attn.k_proj.
|
| 572 |
+
Applying mask on model.layers.17.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 573 |
+
Applied soft mask on model.layers.17.self_attn.v_proj.
|
| 574 |
+
Applying mask on model.layers.17.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 575 |
+
Applied soft mask on model.layers.17.self_attn.o_proj.
|
| 576 |
+
Applying mask on model.layers.17.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 577 |
+
Applied soft mask on model.layers.17.mlp.gate_proj.
|
| 578 |
+
Applying mask on model.layers.17.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 579 |
+
Applied soft mask on model.layers.17.mlp.up_proj.
|
| 580 |
+
Applying mask on model.layers.17.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 581 |
+
Applied soft mask on model.layers.17.mlp.down_proj.
|
| 582 |
+
Applying mask on model.layers.18.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 583 |
+
Applied soft mask on model.layers.18.self_attn.q_proj.
|
| 584 |
+
Applying mask on model.layers.18.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 585 |
+
Applied soft mask on model.layers.18.self_attn.k_proj.
|
| 586 |
+
Applying mask on model.layers.18.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 587 |
+
Applied soft mask on model.layers.18.self_attn.v_proj.
|
| 588 |
+
Applying mask on model.layers.18.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 589 |
+
Applied soft mask on model.layers.18.self_attn.o_proj.
|
| 590 |
+
Applying mask on model.layers.18.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 591 |
+
Applied soft mask on model.layers.18.mlp.gate_proj.
|
| 592 |
+
Applying mask on model.layers.18.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 593 |
+
Applied soft mask on model.layers.18.mlp.up_proj.
|
| 594 |
+
Applying mask on model.layers.18.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 595 |
+
Applied soft mask on model.layers.18.mlp.down_proj.
|
| 596 |
+
Applying mask on model.layers.19.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 597 |
+
Applied soft mask on model.layers.19.self_attn.q_proj.
|
| 598 |
+
Applying mask on model.layers.19.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 599 |
+
Applied soft mask on model.layers.19.self_attn.k_proj.
|
| 600 |
+
Applying mask on model.layers.19.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 601 |
+
Applied soft mask on model.layers.19.self_attn.v_proj.
|
| 602 |
+
Applying mask on model.layers.19.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 603 |
+
Applied soft mask on model.layers.19.self_attn.o_proj.
|
| 604 |
+
Applying mask on model.layers.19.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 605 |
+
Applied soft mask on model.layers.19.mlp.gate_proj.
|
| 606 |
+
Applying mask on model.layers.19.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 607 |
+
Applied soft mask on model.layers.19.mlp.up_proj.
|
| 608 |
+
Applying mask on model.layers.19.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 609 |
+
Applied soft mask on model.layers.19.mlp.down_proj.
|
| 610 |
+
Applying mask on model.layers.20.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 611 |
+
Applied soft mask on model.layers.20.self_attn.q_proj.
|
| 612 |
+
Applying mask on model.layers.20.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 613 |
+
Applied soft mask on model.layers.20.self_attn.k_proj.
|
| 614 |
+
Applying mask on model.layers.20.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 615 |
+
Applied soft mask on model.layers.20.self_attn.v_proj.
|
| 616 |
+
Applying mask on model.layers.20.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 617 |
+
Applied soft mask on model.layers.20.self_attn.o_proj.
|
| 618 |
+
Applying mask on model.layers.20.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 619 |
+
Applied soft mask on model.layers.20.mlp.gate_proj.
|
| 620 |
+
Applying mask on model.layers.20.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 621 |
+
Applied soft mask on model.layers.20.mlp.up_proj.
|
| 622 |
+
Applying mask on model.layers.20.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 623 |
+
Applied soft mask on model.layers.20.mlp.down_proj.
|
| 624 |
+
Applying mask on model.layers.21.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 625 |
+
Applied soft mask on model.layers.21.self_attn.q_proj.
|
| 626 |
+
Applying mask on model.layers.21.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 627 |
+
Applied soft mask on model.layers.21.self_attn.k_proj.
|
| 628 |
+
Applying mask on model.layers.21.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 629 |
+
Applied soft mask on model.layers.21.self_attn.v_proj.
|
| 630 |
+
Applying mask on model.layers.21.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 631 |
+
Applied soft mask on model.layers.21.self_attn.o_proj.
|
| 632 |
+
Applying mask on model.layers.21.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 633 |
+
Applied soft mask on model.layers.21.mlp.gate_proj.
|
| 634 |
+
Applying mask on model.layers.21.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 635 |
+
Applied soft mask on model.layers.21.mlp.up_proj.
|
| 636 |
+
Applying mask on model.layers.21.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 637 |
+
Applied soft mask on model.layers.21.mlp.down_proj.
|
| 638 |
+
Applying mask on model.layers.22.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 639 |
+
Applied soft mask on model.layers.22.self_attn.q_proj.
|
| 640 |
+
Applying mask on model.layers.22.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 641 |
+
Applied soft mask on model.layers.22.self_attn.k_proj.
|
| 642 |
+
Applying mask on model.layers.22.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 643 |
+
Applied soft mask on model.layers.22.self_attn.v_proj.
|
| 644 |
+
Applying mask on model.layers.22.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 645 |
+
Applied soft mask on model.layers.22.self_attn.o_proj.
|
| 646 |
+
Applying mask on model.layers.22.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 647 |
+
Applied soft mask on model.layers.22.mlp.gate_proj.
|
| 648 |
+
Applying mask on model.layers.22.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 649 |
+
Applied soft mask on model.layers.22.mlp.up_proj.
|
| 650 |
+
Applying mask on model.layers.22.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 651 |
+
Applied soft mask on model.layers.22.mlp.down_proj.
|
| 652 |
+
Applying mask on model.layers.23.self_attn.q_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 653 |
+
Applied soft mask on model.layers.23.self_attn.q_proj.
|
| 654 |
+
Applying mask on model.layers.23.self_attn.k_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 655 |
+
Applied soft mask on model.layers.23.self_attn.k_proj.
|
| 656 |
+
Applying mask on model.layers.23.self_attn.v_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 657 |
+
Applied soft mask on model.layers.23.self_attn.v_proj.
|
| 658 |
+
Applying mask on model.layers.23.self_attn.o_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 659 |
+
Applied soft mask on model.layers.23.self_attn.o_proj.
|
| 660 |
+
Applying mask on model.layers.23.mlp.gate_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 661 |
+
Applied soft mask on model.layers.23.mlp.gate_proj.
|
| 662 |
+
Applying mask on model.layers.23.mlp.up_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 663 |
+
Applied soft mask on model.layers.23.mlp.up_proj.
|
| 664 |
+
Applying mask on model.layers.23.mlp.down_proj with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 665 |
+
Applied soft mask on model.layers.23.mlp.down_proj.
|
| 666 |
+
Applying mask on _connector.0 with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 667 |
+
Applied soft mask on _connector.0.
|
| 668 |
+
Applying mask on _connector.2 with dtype, mask_dtype=torch.bfloat16, module_dtype=torch.bfloat16
|
| 669 |
+
Applied soft mask on _connector.2.
|
| 670 |
+
Using cleaned config_mask (without mask parameters) for saving.
|
| 671 |
+
/opt/conda/envs/tinyllava/lib/python3.10/site-packages/torch/cuda/__init__.py:51: FutureWarning: The pynvml package is deprecated. Please install nvidia-ml-py instead. If you did not install pynvml directly, please report this to the maintainers of the package that installed pynvml for you.
|
| 672 |
+
import pynvml # type: ignore[import]
|
| 673 |
+
[2025-10-13 22:32:38,278] [INFO] [real_accelerator.py:191:get_accelerator] Setting ds_accelerator to cuda (auto detect)
|
| 674 |
+
/opt/conda/envs/tinyllava/lib/python3.10/site-packages/huggingface_hub/file_download.py:945: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.
|
| 675 |
+
warnings.warn(
|
| 676 |
+
Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
|
| 677 |
+
|
| 678 |
0%| | 0/900 [00:00<?, ?it/s]/nfs/ywang29/TinyLLaVA/transformers/src/transformers/generation/configuration_utils.py:492: UserWarning: `do_sample` is set to `False`. However, `temperature` is set to `0.0` -- this flag is only used in sample-based generation modes. You should set `do_sample=True` or unset `temperature`.
|
| 679 |
+
warnings.warn(
|
| 680 |
+
|
| 681 |
0%| | 1/900 [00:01<18:03, 1.21s/it]
|
| 682 |
0%| | 2/900 [00:22<3:15:05, 13.04s/it]
|
| 683 |
0%| | 3/900 [00:43<4:11:40, 16.83s/it]
|
| 684 |
0%| | 4/900 [01:04<4:34:51, 18.41s/it]
|
| 685 |
1%| | 5/900 [01:26<4:51:03, 19.51s/it]
|
| 686 |
1%| | 6/900 [01:47<5:00:27, 20.17s/it]
|
| 687 |
1%| | 7/900 [02:09<5:06:18, 20.58s/it]
|
| 688 |
1%| | 8/900 [02:30<5:09:58, 20.85s/it]
|
| 689 |
1%| | 9/900 [02:51<5:09:22, 20.83s/it]
|
| 690 |
1%| | 10/900 [03:12<5:10:43, 20.95s/it]
|
| 691 |
1%| | 11/900 [03:33<5:09:33, 20.89s/it]
|
| 692 |
1%|β | 12/900 [03:54<5:11:16, 21.03s/it]
|
| 693 |
1%|β | 13/900 [04:15<5:12:20, 21.13s/it]
|
| 694 |
2%|β | 14/900 [04:36<5:10:25, 21.02s/it]
|
| 695 |
2%|β | 15/900 [04:57<5:08:40, 20.93s/it]
|
| 696 |
2%|β | 16/900 [05:18<5:10:11, 21.05s/it]
|
| 697 |
2%|β | 17/900 [05:40<5:10:57, 21.13s/it]
|
| 698 |
2%|β | 18/900 [05:45<4:01:16, 16.41s/it]
|
| 699 |
2%|β | 19/900 [06:05<4:18:29, 17.60s/it]
|
| 700 |
2%|β | 20/900 [06:26<4:30:42, 18.46s/it]
|
| 701 |
2%|β | 21/900 [06:47<4:41:25, 19.21s/it]
|
| 702 |
2%|β | 22/900 [07:07<4:46:27, 19.58s/it]
|
| 703 |
3%|β | 23/900 [07:28<4:50:56, 19.90s/it]
|
| 704 |
3%|β | 24/900 [07:49<4:56:14, 20.29s/it]
|
| 705 |
3%|β | 25/900 [08:10<4:59:48, 20.56s/it]
|
| 706 |
3%|β | 26/900 [08:32<5:02:27, 20.76s/it]
|
| 707 |
3%|β | 27/900 [08:52<5:01:32, 20.72s/it]
|
| 708 |
3%|β | 28/900 [09:13<5:03:24, 20.88s/it]
|
| 709 |
3%|β | 29/900 [09:35<5:04:40, 20.99s/it]
|
| 710 |
3%|β | 30/900 [09:56<5:05:11, 21.05s/it]
|
| 711 |
3%|β | 31/900 [10:18<5:08:07, 21.27s/it]
|
| 712 |
4%|β | 32/900 [10:19<3:43:36, 15.46s/it]
|
| 713 |
4%|β | 33/900 [10:21<2:41:37, 11.19s/it]
|
| 714 |
4%|β | 34/900 [10:42<3:27:21, 14.37s/it]
|
| 715 |
4%|β | 35/900 [11:03<3:54:17, 16.25s/it]
|
| 716 |
4%|β | 36/900 [11:24<4:12:42, 17.55s/it]
|
| 717 |
4%|β | 37/900 [11:45<4:29:55, 18.77s/it]
|
| 718 |
4%|β | 38/900 [12:07<4:43:01, 19.70s/it]
|
| 719 |
4%|β | 39/900 [12:08<3:23:13, 14.16s/it]
|
| 720 |
4%|β | 40/900 [12:09<2:25:49, 10.17s/it]
|
| 721 |
5%|β | 41/900 [12:10<1:46:12, 7.42s/it]
|
| 722 |
5%|β | 42/900 [12:31<2:42:32, 11.37s/it]
|
| 723 |
5%|β | 43/900 [12:32<1:59:27, 8.36s/it]
|
| 724 |
5%|β | 44/900 [12:35<1:34:38, 6.63s/it]
|
| 725 |
5%|β | 45/900 [12:36<1:12:46, 5.11s/it]
|
| 726 |
5%|β | 46/900 [12:37<55:07, 3.87s/it]
|
| 727 |
5%|β | 47/900 [12:38<42:41, 3.00s/it]
|
| 728 |
5%|β | 48/900 [12:39<34:31, 2.43s/it]
|
| 729 |
5%|β | 49/900 [12:42<36:29, 2.57s/it]
|
| 730 |
6%|β | 50/900 [12:44<32:01, 2.26s/it]
|
| 731 |
6%|β | 51/900 [12:46<30:32, 2.16s/it]
|
| 732 |
6%|β | 52/900 [12:49<34:42, 2.46s/it]
|
| 733 |
6%|β | 53/900 [12:51<32:50, 2.33s/it]
|
| 734 |
6%|β | 54/900 [12:52<28:44, 2.04s/it]
|
| 735 |
6%|β | 55/900 [12:53<24:04, 1.71s/it]
|
| 736 |
6%|β | 56/900 [12:54<19:58, 1.42s/it]
|
| 737 |
6%|β | 57/900 [12:55<17:37, 1.25s/it]
|
| 738 |
6%|β | 58/900 [12:56<15:02, 1.07s/it]
|
| 739 |
7%|β | 59/900 [12:57<15:53, 1.13s/it]
|
| 740 |
7%|β | 60/900 [12:58<17:06, 1.22s/it]
|
| 741 |
7%|β | 61/900 [13:19<1:40:16, 7.17s/it]
|
| 742 |
7%|β | 62/900 [13:40<2:38:14, 11.33s/it]
|
| 743 |
7%|β | 63/900 [14:02<3:20:31, 14.37s/it]
|
| 744 |
7%|β | 64/900 [14:23<3:48:24, 16.39s/it]
|
| 745 |
7%|β | 65/900 [14:44<4:07:51, 17.81s/it]
|
| 746 |
7%|β | 66/900 [15:05<4:21:03, 18.78s/it]
|
| 747 |
7%|β | 67/900 [15:26<4:30:00, 19.45s/it]
|
| 748 |
8%|β | 68/900 [15:47<4:36:33, 19.94s/it]
|
| 749 |
8%|β | 69/900 [15:48<3:16:38, 14.20s/it]
|
| 750 |
8%|β | 70/900 [16:08<3:41:50, 16.04s/it]
|
| 751 |
8%|β | 71/900 [16:30<4:03:29, 17.62s/it]
|
| 752 |
8%|β | 72/900 [16:51<4:18:21, 18.72s/it]
|
| 753 |
8%|β | 73/900 [17:12<4:28:41, 19.49s/it]
|
| 754 |
8%|β | 74/900 [17:33<4:35:39, 20.02s/it]
|
| 755 |
8%|β | 75/900 [17:38<3:29:36, 15.24s/it]
|
| 756 |
8%|β | 76/900 [17:59<3:54:29, 17.07s/it]
|
| 757 |
9%|β | 77/900 [18:01<2:53:21, 12.64s/it]
|
| 758 |
9%|β | 78/900 [18:05<2:16:32, 9.97s/it]
|
| 759 |
9%|β | 79/900 [18:06<1:40:34, 7.35s/it]
|
| 760 |
9%|β | 80/900 [18:27<2:37:43, 11.54s/it]
|
| 761 |
9%|β | 81/900 [18:49<3:17:46, 14.49s/it]
|
| 762 |
9%|β | 82/900 [19:10<3:45:28, 16.54s/it]
|
| 763 |
9%|β | 83/900 [19:31<4:04:28, 17.95s/it]
|
| 764 |
9%|β | 84/900 [19:53<4:17:51, 18.96s/it]
|
| 765 |
9%|β | 85/900 [20:14<4:26:55, 19.65s/it]
|
| 766 |
10%|β | 86/900 [20:35<4:33:43, 20.18s/it]
|
| 767 |
10%|β | 87/900 [20:57<4:38:17, 20.54s/it]
|
| 768 |
10%|β | 88/900 [21:18<4:41:13, 20.78s/it]
|
| 769 |
10%|β | 89/900 [21:39<4:43:09, 20.95s/it]
|
| 770 |
10%|β | 90/900 [22:01<4:44:36, 21.08s/it]
|
| 771 |
10%|β | 91/900 [22:02<3:22:41, 15.03s/it]
|
| 772 |
10%|β | 92/900 [22:23<3:47:16, 16.88s/it]
|
| 773 |
10%|β | 93/900 [22:44<4:05:15, 18.23s/it]
|
| 774 |
10%|β | 94/900 [23:06<4:17:50, 19.19s/it]
|
| 775 |
11%|β | 95/900 [23:08<3:10:29, 14.20s/it]
|
| 776 |
11%|β | 96/900 [23:30<3:39:35, 16.39s/it]
|
| 777 |
11%|β | 97/900 [23:30<2:35:50, 11.64s/it]
|
| 778 |
11%|β | 98/900 [23:31<1:51:39, 8.35s/it]
|
| 779 |
11%|β | 99/900 [23:32<1:21:50, 6.13s/it]
|
| 780 |
11%|β | 100/900 [23:53<2:19:47, 10.48s/it]
|
| 781 |
11%|β | 101/900 [23:54<1:43:14, 7.75s/it]
|
| 782 |
11%|ββ | 102/900 [23:56<1:21:01, 6.09s/it]
|
| 783 |
11%|ββ | 103/900 [23:57<59:40, 4.49s/it]
|
| 784 |
12%|ββ | 104/900 [23:59<50:44, 3.82s/it]
|
| 785 |
12%|ββ | 105/900 [24:21<2:00:46, 9.12s/it]
|
| 786 |
12%|ββ | 106/900 [24:22<1:29:13, 6.74s/it]
|
| 787 |
12%|ββ | 107/900 [24:42<2:23:21, 10.85s/it]
|
| 788 |
12%|ββ | 108/900 [24:43<1:44:31, 7.92s/it]
|
| 789 |
12%|ββ | 109/900 [24:44<1:15:29, 5.73s/it]
|
| 790 |
12%|ββ | 110/900 [24:45<55:56, 4.25s/it]
|
| 791 |
12%|ββ | 111/900 [25:06<2:04:14, 9.45s/it]
|
| 792 |
12%|ββ | 112/900 [25:08<1:31:24, 6.96s/it]
|
| 793 |
13%|ββ | 113/900 [25:09<1:08:01, 5.19s/it]
|
| 794 |
13%|ββ | 114/900 [25:10<51:38, 3.94s/it]
|
| 795 |
13%|ββ | 115/900 [25:31<1:59:11, 9.11s/it]
|
| 796 |
13%|ββ | 116/900 [25:32<1:26:55, 6.65s/it]
|
| 797 |
13%|ββ | 117/900 [25:33<1:05:51, 5.05s/it]
|
| 798 |
13%|ββ | 118/900 [25:54<2:07:31, 9.78s/it]
|
| 799 |
13%|ββ | 119/900 [25:55<1:34:28, 7.26s/it]
|
| 800 |
13%|ββ | 120/900 [26:17<2:28:58, 11.46s/it]
|
| 801 |
13%|ββ | 121/900 [26:17<1:47:35, 8.29s/it]
|
| 802 |
14%|ββ | 122/900 [26:19<1:22:40, 6.38s/it]
|
| 803 |
14%|ββ | 123/900 [26:22<1:07:29, 5.21s/it]
|
| 804 |
14%|ββ | 124/900 [26:23<50:57, 3.94s/it]
|
| 805 |
14%|ββ | 125/900 [26:44<1:58:00, 9.14s/it]
|
| 806 |
14%|ββ | 126/900 [26:46<1:28:31, 6.86s/it]
|
| 807 |
14%|ββ | 127/900 [26:47<1:08:37, 5.33s/it]
|
| 808 |
14%|ββ | 128/900 [26:48<51:29, 4.00s/it]
|
| 809 |
14%|ββ | 129/900 [26:49<39:29, 3.07s/it]
|
| 810 |
14%|ββ | 130/900 [27:10<1:49:29, 8.53s/it]
|
| 811 |
15%|ββ | 131/900 [27:31<2:35:58, 12.17s/it]
|
| 812 |
15%|ββ | 132/900 [27:33<1:58:03, 9.22s/it]
|
| 813 |
15%|ββ | 133/900 [27:54<2:42:06, 12.68s/it]
|
| 814 |
15%|ββ | 134/900 [28:16<3:16:11, 15.37s/it]
|
| 815 |
15%|ββ | 135/900 [28:17<2:20:35, 11.03s/it]
|
| 816 |
15%|ββ | 136/900 [28:38<2:58:54, 14.05s/it]
|
| 817 |
15%|ββ | 137/900 [28:59<3:23:50, 16.03s/it]
|
| 818 |
15%|ββ | 138/900 [29:01<2:30:47, 11.87s/it]
|
| 819 |
15%|ββ | 139/900 [29:02<1:50:18, 8.70s/it]
|
| 820 |
16%|ββ | 140/900 [29:03<1:21:09, 6.41s/it]
|
| 821 |
16%|ββ | 141/900 [29:24<2:15:34, 10.72s/it]
|
| 822 |
16%|ββ | 142/900 [29:45<2:53:29, 13.73s/it]
|
| 823 |
16%|ββ | 143/900 [29:45<2:04:31, 9.87s/it]
|
| 824 |
16%|ββ | 144/900 [29:47<1:33:06, 7.39s/it]
|
| 825 |
16%|ββ | 145/900 [29:49<1:12:35, 5.77s/it]
|
| 826 |
16%|ββ | 146/900 [30:10<2:08:54, 10.26s/it]
|
| 827 |
16%|ββ | 147/900 [30:31<2:50:14, 13.57s/it]
|
| 828 |
16%|ββ | 148/900 [30:52<3:19:15, 15.90s/it]
|
| 829 |
17%|ββ | 149/900 [30:54<2:26:11, 11.68s/it]
|
| 830 |
17%|ββ | 150/900 [31:16<3:02:14, 14.58s/it]
|
| 831 |
17%|ββ | 151/900 [31:16<2:09:28, 10.37s/it]
|
| 832 |
17%|ββ | 152/900 [31:18<1:35:49, 7.69s/it]
|
| 833 |
17%|ββ | 153/900 [31:19<1:12:40, 5.84s/it]
|
| 834 |
17%|ββ | 154/900 [31:40<2:10:14, 10.47s/it]
|
| 835 |
17%|ββ | 155/900 [32:01<2:48:21, 13.56s/it]
|
| 836 |
17%|ββ | 156/900 [32:22<3:15:14, 15.74s/it]
|
| 837 |
17%|ββ | 157/900 [32:43<3:35:15, 17.38s/it]
|
| 838 |
18%|ββ | 158/900 [33:04<3:47:14, 18.37s/it]
|
| 839 |
18%|ββ | 159/900 [33:25<3:57:34, 19.24s/it]
|
| 840 |
18%|ββ | 160/900 [33:46<4:02:08, 19.63s/it]
|
| 841 |
18%|ββ | 161/900 [34:07<4:08:00, 20.14s/it]
|
| 842 |
18%|ββ | 162/900 [34:08<2:56:35, 14.36s/it]
|
| 843 |
18%|ββ | 163/900 [34:29<3:21:04, 16.37s/it]
|
| 844 |
18%|ββ | 164/900 [34:50<3:36:27, 17.65s/it]
|
| 845 |
18%|ββ | 165/900 [35:10<3:47:01, 18.53s/it]
|
| 846 |
18%|ββ | 166/900 [35:11<2:42:52, 13.31s/it]/opt/conda/envs/tinyllava/lib/python3.10/site-packages/PIL/Image.py:1047: UserWarning: Palette images with Transparency expressed in bytes should be converted to RGBA images
|
| 847 |
+
warnings.warn(
|
| 848 |
+
|
| 849 |
19%|ββ | 167/900 [35:32<3:11:28, 15.67s/it]
|
| 850 |
19%|ββ | 168/900 [35:53<3:28:49, 17.12s/it]
|
| 851 |
19%|ββ | 169/900 [35:54<2:29:24, 12.26s/it]
|
| 852 |
19%|ββ | 170/900 [35:55<1:48:03, 8.88s/it]
|
| 853 |
19%|ββ | 171/900 [36:16<2:30:54, 12.42s/it]
|
| 854 |
19%|ββ | 172/900 [36:37<3:03:09, 15.10s/it]
|
| 855 |
19%|ββ | 173/900 [36:57<3:22:18, 16.70s/it]
|
| 856 |
19%|ββ | 174/900 [37:18<3:37:55, 18.01s/it]
|
| 857 |
19%|ββ | 175/900 [37:39<3:47:03, 18.79s/it]
|
| 858 |
20%|ββ | 176/900 [37:40<2:40:48, 13.33s/it]
|
| 859 |
20%|ββ | 177/900 [38:01<3:08:02, 15.60s/it]
|
| 860 |
20%|ββ | 178/900 [38:21<3:26:44, 17.18s/it]
|
| 861 |
20%|ββ | 179/900 [38:22<2:27:14, 12.25s/it]
|
| 862 |
20%|ββ | 180/900 [38:43<2:57:13, 14.77s/it]
|
| 863 |
20%|ββ | 181/900 [39:04<3:19:07, 16.62s/it]
|
| 864 |
20%|ββ | 182/900 [39:25<3:35:20, 18.00s/it]
|
| 865 |
20%|ββ | 183/900 [39:29<2:45:15, 13.83s/it]
|
| 866 |
20%|ββ | 184/900 [39:49<3:08:39, 15.81s/it]
|
| 867 |
21%|ββ | 185/900 [40:11<3:27:49, 17.44s/it]
|
| 868 |
21%|ββ | 186/900 [40:32<3:40:42, 18.55s/it]
|
| 869 |
21%|ββ | 187/900 [40:53<3:48:31, 19.23s/it]
|
| 870 |
21%|ββ | 188/900 [41:13<3:53:38, 19.69s/it]
|
| 871 |
21%|ββ | 189/900 [41:35<3:59:57, 20.25s/it]
|
| 872 |
21%|ββ | 190/900 [41:56<4:04:08, 20.63s/it]
|
| 873 |
21%|ββ | 191/900 [42:17<4:04:21, 20.68s/it]
|
| 874 |
21%|βββ | 192/900 [42:39<4:07:10, 20.95s/it]
|
| 875 |
21%|βββ | 193/900 [43:00<4:08:28, 21.09s/it]
|
| 876 |
22%|βββ | 194/900 [43:21<4:06:55, 20.99s/it]
|
| 877 |
22%|βββ | 195/900 [43:22<2:55:45, 14.96s/it]
|
| 878 |
22%|βββ | 196/900 [43:43<3:16:59, 16.79s/it]
|
| 879 |
22%|βββ | 197/900 [44:04<3:29:56, 17.92s/it]
|
| 880 |
22%|βββ | 198/900 [44:24<3:39:12, 18.74s/it]
|
| 881 |
22%|βββ | 199/900 [44:45<3:45:35, 19.31s/it]
|
| 882 |
22%|βββ | 200/900 [45:06<3:51:26, 19.84s/it]
|
| 883 |
22%|βββ | 201/900 [45:27<3:54:10, 20.10s/it]
|
| 884 |
22%|βββ | 202/900 [45:47<3:56:02, 20.29s/it]
|
| 885 |
23%|βββ | 203/900 [46:08<3:57:03, 20.41s/it]
|
| 886 |
23%|βββ | 204/900 [46:09<2:48:37, 14.54s/it]
|
| 887 |
23%|βββ | 205/900 [46:29<3:08:29, 16.27s/it]
|
| 888 |
23%|βββ | 206/900 [46:50<3:25:37, 17.78s/it]
|
| 889 |
23%|βββ | 207/900 [47:12<3:37:02, 18.79s/it]
|
| 890 |
23%|βββ | 208/900 [47:33<3:45:21, 19.54s/it]
|
| 891 |
23%|βββ | 209/900 [47:54<3:48:46, 19.86s/it]
|
| 892 |
23%|βββ | 210/900 [47:59<2:57:15, 15.41s/it]
|
| 893 |
23%|βββ | 211/900 [47:59<2:05:33, 10.93s/it]
|
| 894 |
24%|βββ | 212/900 [48:20<2:40:08, 13.97s/it]
|
| 895 |
24%|βββ | 213/900 [48:41<3:05:21, 16.19s/it]
|
| 896 |
24%|βββ | 214/900 [49:02<3:20:33, 17.54s/it]
|
| 897 |
24%|βββ | 215/900 [49:24<3:33:32, 18.70s/it]
|
| 898 |
24%|βββ | 216/900 [49:45<3:42:31, 19.52s/it]
|
| 899 |
24%|βββ | 217/900 [50:06<3:48:38, 20.09s/it]
|
| 900 |
24%|βββ | 218/900 [50:28<3:52:17, 20.44s/it]
|
| 901 |
24%|βββ | 219/900 [50:28<2:44:02, 14.45s/it]
|
| 902 |
24%|βββ | 220/900 [50:49<3:03:57, 16.23s/it]
|
| 903 |
25%|βββ | 221/900 [51:10<3:20:09, 17.69s/it]
|
| 904 |
25%|βββ | 222/900 [51:11<2:25:28, 12.87s/it]
|
| 905 |
25%|βββ | 223/900 [51:32<2:51:49, 15.23s/it]
|
| 906 |
25%|βββ | 224/900 [51:53<3:12:06, 17.05s/it]
|
| 907 |
25%|βββ | 225/900 [51:54<2:16:06, 12.10s/it]
|
| 908 |
25%|βββ | 226/900 [52:14<2:44:05, 14.61s/it]
|
| 909 |
25%|βββ | 227/900 [52:35<3:04:14, 16.43s/it]
|
| 910 |
25%|βββ | 228/900 [52:56<3:18:27, 17.72s/it]
|
| 911 |
25%|βββ | 229/900 [53:16<3:27:07, 18.52s/it]
|
| 912 |
26%|βββ | 230/900 [53:37<3:33:28, 19.12s/it]
|
| 913 |
26%|βββ | 231/900 [53:57<3:37:32, 19.51s/it]
|
| 914 |
26%|βββ | 232/900 [54:19<3:44:12, 20.14s/it]
|
| 915 |
26%|βββ | 233/900 [54:39<3:44:58, 20.24s/it]
|
| 916 |
26%|βββ | 234/900 [54:59<3:45:12, 20.29s/it]
|
| 917 |
26%|βββ | 235/900 [55:21<3:48:55, 20.65s/it]
|
| 918 |
26%|βββ | 236/900 [55:41<3:47:43, 20.58s/it]
|
| 919 |
26%|βββ | 237/900 [56:03<3:50:35, 20.87s/it]
|
| 920 |
26%|βββ | 238/900 [56:24<3:49:51, 20.83s/it]
|
| 921 |
27%|βββ | 239/900 [56:44<3:49:01, 20.79s/it]
|
| 922 |
27%|βββ | 240/900 [57:05<3:48:19, 20.76s/it]
|
| 923 |
27%|βββ | 241/900 [57:06<2:43:30, 14.89s/it]
|
| 924 |
27%|βββ | 242/900 [57:28<3:04:34, 16.83s/it]
|
| 925 |
27%|βββ | 243/900 [57:29<2:13:42, 12.21s/it]
|
| 926 |
27%|βββ | 244/900 [57:30<1:36:10, 8.80s/it]
|
| 927 |
27%|βββ | 245/900 [57:51<2:16:39, 12.52s/it]
|
| 928 |
27%|βββ | 246/900 [58:12<2:42:53, 14.94s/it]
|
| 929 |
27%|βββ | 247/900 [58:33<3:03:28, 16.86s/it]
|
| 930 |
28%|βββ | 248/900 [58:54<3:16:14, 18.06s/it]
|
| 931 |
28%|βββ | 249/900 [59:15<3:26:50, 19.06s/it]
|
| 932 |
28%|βββ | 250/900 [59:36<3:31:20, 19.51s/it]
|
| 933 |
28%|βββ | 251/900 [59:56<3:34:19, 19.81s/it]
|
| 934 |
28%|βββ | 252/900 [1:00:18<3:39:22, 20.31s/it]
|
| 935 |
28%|βββ | 253/900 [1:00:38<3:39:48, 20.38s/it]
|
| 936 |
28%|βββ | 254/900 [1:01:00<3:42:49, 20.70s/it]
|
| 937 |
28%|βββ | 255/900 [1:01:02<2:41:49, 15.05s/it]
|
| 938 |
28%|βββ | 256/900 [1:01:06<2:06:17, 11.77s/it]
|
| 939 |
29%|βββ | 257/900 [1:01:27<2:36:30, 14.60s/it]
|
| 940 |
29%|βββ | 258/900 [1:01:48<2:56:21, 16.48s/it]
|
| 941 |
29%|βββ | 259/900 [1:01:49<2:07:10, 11.90s/it]
|
| 942 |
29%|βββ | 260/900 [1:02:10<2:35:31, 14.58s/it]
|
| 943 |
29%|βββ | 261/900 [1:02:31<2:54:38, 16.40s/it]
|
| 944 |
29%|βββ | 262/900 [1:02:52<3:10:11, 17.89s/it]
|
| 945 |
29%|βββ | 263/900 [1:02:53<2:14:54, 12.71s/it]
|
| 946 |
29%|βββ | 264/900 [1:03:14<2:41:20, 15.22s/it]
|
| 947 |
29%|βββ | 265/900 [1:03:34<2:58:08, 16.83s/it]
|
| 948 |
30%|βββ | 266/900 [1:03:55<3:09:24, 17.93s/it]
|
| 949 |
30%|βββ | 267/900 [1:03:56<2:17:56, 13.08s/it]
|
| 950 |
30%|βββ | 268/900 [1:03:58<1:41:16, 9.61s/it]
|
| 951 |
30%|βββ | 269/900 [1:04:20<2:18:41, 13.19s/it]
|
| 952 |
30%|βββ | 270/900 [1:04:20<1:39:47, 9.50s/it]
|
| 953 |
30%|βββ | 271/900 [1:04:41<2:14:34, 12.84s/it]
|
| 954 |
30%|βββ | 272/900 [1:05:02<2:39:19, 15.22s/it]
|
| 955 |
30%|βββ | 273/900 [1:05:23<2:58:18, 17.06s/it]
|
| 956 |
30%|βββ | 274/900 [1:05:50<3:28:33, 19.99s/it]
|
| 957 |
31%|βββ | 275/900 [1:06:17<3:49:18, 22.01s/it]
|
| 958 |
31%|βββ | 276/900 [1:06:38<3:46:58, 21.82s/it]
|
| 959 |
31%|βββ | 277/900 [1:06:59<3:42:34, 21.44s/it]
|
| 960 |
31%|βββ | 278/900 [1:07:19<3:39:16, 21.15s/it]
|
| 961 |
31%|βββ | 279/900 [1:07:41<3:39:44, 21.23s/it]
|
| 962 |
31%|βββ | 280/900 [1:08:02<3:39:48, 21.27s/it]
|
| 963 |
31%|βββ | 281/900 [1:08:23<3:39:37, 21.29s/it]
|
| 964 |
31%|ββββ | 282/900 [1:08:45<3:39:25, 21.30s/it]
|
| 965 |
31%|ββββ | 283/900 [1:09:06<3:39:12, 21.32s/it]
|
| 966 |
32%|ββββ | 284/900 [1:09:27<3:36:37, 21.10s/it]
|
| 967 |
32%|ββββ | 285/900 [1:09:47<3:34:48, 20.96s/it]
|
| 968 |
32%|ββββ | 286/900 [1:10:08<3:33:08, 20.83s/it]
|
| 969 |
32%|ββββ | 287/900 [1:10:28<3:31:55, 20.74s/it]
|
| 970 |
32%|ββββ | 288/900 [1:10:49<3:31:02, 20.69s/it]
|
| 971 |
32%|ββββ | 289/900 [1:11:09<3:29:54, 20.61s/it]
|
| 972 |
32%|ββββ | 290/900 [1:11:31<3:32:28, 20.90s/it]
|
| 973 |
32%|ββββ | 291/900 [1:11:52<3:34:04, 21.09s/it]
|
| 974 |
32%|ββββ | 292/900 [1:12:14<3:34:57, 21.21s/it]
|
| 975 |
33%|ββββ | 293/900 [1:12:35<3:35:29, 21.30s/it]
|
| 976 |
33%|ββββ | 294/900 [1:12:57<3:36:05, 21.40s/it]
|
| 977 |
33%|ββββ | 295/900 [1:13:18<3:34:11, 21.24s/it]
|
| 978 |
33%|ββββ | 296/900 [1:13:39<3:32:46, 21.14s/it]
|
| 979 |
33%|ββββ | 297/900 [1:14:00<3:31:35, 21.05s/it]
|
| 980 |
33%|ββββ | 298/900 [1:14:01<2:31:17, 15.08s/it]
|
| 981 |
33%|ββββ | 299/900 [1:14:07<2:03:47, 12.36s/it]
|
| 982 |
33%|ββββ | 300/900 [1:14:28<2:30:48, 15.08s/it]
|
| 983 |
33%|ββββ | 301/900 [1:14:29<1:48:02, 10.82s/it]
|
| 984 |
34%|ββββ | 302/900 [1:14:30<1:17:56, 7.82s/it]
|
| 985 |
34%|ββββ | 303/900 [1:14:51<1:56:15, 11.68s/it]
|
| 986 |
34%|ββββ | 304/900 [1:15:12<2:25:18, 14.63s/it]
|
| 987 |
34%|ββββ | 305/900 [1:15:14<1:45:58, 10.69s/it]
|
| 988 |
34%|ββββ | 306/900 [1:15:34<2:14:56, 13.63s/it]
|
| 989 |
34%|ββββ | 307/900 [1:15:35<1:36:15, 9.74s/it]
|
| 990 |
34%|ββββ | 308/900 [1:15:56<2:10:41, 13.25s/it]
|
| 991 |
34%|ββββ | 309/900 [1:15:57<1:33:56, 9.54s/it]
|
| 992 |
34%|ββββ | 310/900 [1:15:58<1:07:26, 6.86s/it]
|
| 993 |
35%|ββββ | 311/900 [1:16:19<1:49:46, 11.18s/it]
|
| 994 |
35%|ββββ | 312/900 [1:16:20<1:18:44, 8.03s/it]
|
| 995 |
35%|ββββ | 313/900 [1:16:20<57:08, 5.84s/it]
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.3_2e-1_connector-3.0_1.3_2e-1_ablation_20251013_065736.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.5_2e-1_connector-3.0_1.5_2e-1_ablation_20251013_073153.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.7_2e-1_connector-3.0_1.7_2e-1_ablation_20251013_080601.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_1.9_2e-1_connector-3.0_1.9_2e-1_ablation_20251013_104850.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.1_2e-1_connector-3.0_2.1_2e-1_ablation_20251013_113216.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.3_2e-1_connector-3.0_2.3_2e-1_ablation_20251013_130305.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.5_2e-1_connector-3.0_2.5_2e-1_ablation_20251013_143914.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.7_2e-1_connector-3.0_2.7_2e-1_ablation_20251013_151303.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-3.0_2.9_2e-1_connector-3.0_2.9_2e-1_ablation_20251013_154739.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_0.7_2e-1_connector-5.0_0.7_2e-1_ablation_20251013_162143.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_0.9_2e-1_connector-5.0_0.9_2e-1_ablation_20251013_165603.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_1.1_2e-1_connector-5.0_1.1_2e-1_ablation_20251013_173027.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_1.3_2e-1_connector-5.0_1.3_2e-1_ablation_20251013_180430.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_1.5_2e-1_connector-5.0_1.5_2e-1_ablation_20251013_183828.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_1.7_2e-1_connector-5.0_1.7_2e-1_ablation_20251013_191236.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_1.9_2e-1_connector-5.0_1.9_2e-1_ablation_20251013_194705.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_2.1_2e-1_connector-5.0_2.1_2e-1_ablation_20251013_202134.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_2.3_2e-1_connector-5.0_2.3_2e-1_ablation_20251013_205557.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
logs_oct12/qwen2.5-0_5b_base_masktune_42_llm-connector_text-5.0_2.5_2e-1_connector-5.0_2.5_2e-1_ablation_20251013_213037.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|