valeriaWong commited on
Commit
9a3e816
·
verified ·
1 Parent(s): bfd1982

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +8 -0
  2. internvl_ft_run_10_filter/iter_4000.pth +3 -0
  3. internvl_ft_run_10_filter/last_checkpoint +1 -0
  4. internvl_ft_run_12_filter/20250304_104043/20250304_104043.log +195 -0
  5. internvl_ft_run_12_filter/20250304_104403/20250304_104403.log +203 -0
  6. internvl_ft_run_12_filter/20250304_104403/vis_data/events.out.tfevents.1741056245.intern-studio-40019814.17053.0 +3 -0
  7. internvl_ft_run_12_filter/20250304_111639/20250304_111639.log +0 -0
  8. internvl_ft_run_12_filter/20250304_111639/vis_data/events.out.tfevents.1741058201.intern-studio-40019814.23140.0 +3 -0
  9. internvl_ft_run_12_filter/20250304_112305/20250304_112305.log +0 -0
  10. internvl_ft_run_12_filter/20250304_112305/vis_data/events.out.tfevents.1741058586.intern-studio-40019814.25156.0 +3 -0
  11. internvl_ft_run_12_filter/20250304_112538/20250304_112538.log +0 -0
  12. internvl_ft_run_12_filter/20250304_112538/vis_data/events.out.tfevents.1741058739.intern-studio-40019814.26649.0 +3 -0
  13. internvl_ft_run_12_filter/20250304_113017/20250304_113017.log +633 -0
  14. internvl_ft_run_12_filter/20250304_113017/vis_data/events.out.tfevents.1741059019.intern-studio-40019814.28433.0 +3 -0
  15. internvl_ft_run_12_filter/20250304_114757/20250304_114757.log +0 -0
  16. internvl_ft_run_12_filter/20250304_114757/vis_data/events.out.tfevents.1741060079.intern-studio-40019814.34025.0 +3 -0
  17. internvl_ft_run_12_filter/internvl_v2_internlm2_2b_qlora_finetune_copy.py +145 -0
  18. internvl_ft_run_12_filter/iter_1000.pth +3 -0
  19. internvl_ft_run_12_filter/iter_2000.pth +3 -0
  20. internvl_ft_run_12_filter/iter_3000.pth +3 -0
  21. internvl_ft_run_12_filter/iter_4000.pth +3 -0
  22. internvl_ft_run_12_filter/iter_5000.pth +3 -0
  23. internvl_ft_run_12_filter/iter_6000.pth +3 -0
  24. internvl_ft_run_12_filter/iter_7000.pth +3 -0
  25. internvl_ft_run_12_filter/iter_8000.pth +3 -0
  26. internvl_ft_run_12_filter/iter_9000.pth +3 -0
  27. internvl_ft_run_12_filter/iter_9612.pth +3 -0
  28. internvl_ft_run_12_filter/last_checkpoint +1 -0
  29. internvl_ft_run_13_filter/20250304_121519/20250304_121519.log +464 -0
  30. internvl_ft_run_13_filter/20250304_121519/vis_data/events.out.tfevents.1741061720.intern-studio-40019814.41268.0 +3 -0
  31. internvl_ft_run_13_filter/20250304_213711/20250304_213711.log +0 -0
  32. internvl_ft_run_13_filter/20250304_213711/vis_data/events.out.tfevents.1741095432.intern-studio-40019814.159243.0 +3 -0
  33. internvl_ft_run_13_filter/internvl_v2_internlm2_2b_qlora_finetune_copy.py +146 -0
  34. internvl_ft_run_13_filter/iter_1000.pth +3 -0
  35. internvl_ft_run_13_filter/iter_10000.pth +3 -0
  36. internvl_ft_run_13_filter/iter_11000.pth +3 -0
  37. internvl_ft_run_13_filter/iter_12000.pth +3 -0
  38. internvl_ft_run_13_filter/iter_13000.pth +3 -0
  39. internvl_ft_run_13_filter/iter_14000.pth +3 -0
  40. internvl_ft_run_13_filter/iter_15000.pth +3 -0
  41. internvl_ft_run_13_filter/iter_16000.pth +3 -0
  42. internvl_ft_run_13_filter/iter_17000.pth +3 -0
  43. internvl_ft_run_13_filter/iter_18000.pth +3 -0
  44. internvl_ft_run_13_filter/iter_19000.pth +3 -0
  45. internvl_ft_run_13_filter/iter_19224.pth +3 -0
  46. internvl_ft_run_13_filter/iter_2000.pth +3 -0
  47. internvl_ft_run_13_filter/iter_3000.pth +3 -0
  48. internvl_ft_run_13_filter/iter_4000.pth +3 -0
  49. internvl_ft_run_13_filter/iter_5000.pth +3 -0
  50. internvl_ft_run_13_filter/iter_6000.pth +3 -0
.gitattributes CHANGED
@@ -33,3 +33,11 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ llava_ft/20240623_134854/vis_data/wandb/run-20240623_134900-1tj91a85/run-1tj91a85.wandb filter=lfs diff=lfs merge=lfs -text
37
+ llava_ft/20240623_135747/vis_data/wandb/run-20240623_135753-haxnz4ri/run-haxnz4ri.wandb filter=lfs diff=lfs merge=lfs -text
38
+ llava_ft/20240623_141103/vis_data/wandb/run-20240623_141111-j57x5qxb/run-j57x5qxb.wandb filter=lfs diff=lfs merge=lfs -text
39
+ llava_ft/20240623_150212/vis_data/wandb/run-20240623_150217-mlt4pykm/run-mlt4pykm.wandb filter=lfs diff=lfs merge=lfs -text
40
+ llava_ft/20240623_151455/vis_data/wandb/run-20240623_151501-nv52dul7/run-nv52dul7.wandb filter=lfs diff=lfs merge=lfs -text
41
+ llava_ft/20240623_153346/vis_data/wandb/run-20240623_153353-q9ka8o44/run-q9ka8o44.wandb filter=lfs diff=lfs merge=lfs -text
42
+ llava_ft/20240623_191953/vis_data/wandb/run-20240623_191959-ljo9rur3/run-ljo9rur3.wandb filter=lfs diff=lfs merge=lfs -text
43
+ llava_ft/20240623_193616/vis_data/wandb/run-20240623_193622-jigkwuih/run-jigkwuih.wandb filter=lfs diff=lfs merge=lfs -text
internvl_ft_run_10_filter/iter_4000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83c4428c4241b5c7f332d93048ca22713c8b704dab05f6f747be468ed4a705e6
3
+ size 300000
internvl_ft_run_10_filter/last_checkpoint ADDED
@@ -0,0 +1 @@
 
 
1
+ /root/wangqun/work_dirs/internvl_ft_run_10_filter/iter_4608.pth
internvl_ft_run_12_filter/20250304_104043/20250304_104043.log ADDED
@@ -0,0 +1,195 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025/03/04 10:40:44 - mmengine - DEBUG - An `DeepSpeedStrategy` instance is built from registry, and its implementation can be found in xtuner.engine._strategy.deepspeed
2
+ 2025/03/04 10:40:44 - mmengine - INFO -
3
+ ------------------------------------------------------------
4
+ System environment:
5
+ sys.platform: linux
6
+ Python: 3.10.13 (main, Sep 11 2023, 13:44:35) [GCC 11.2.0]
7
+ CUDA available: True
8
+ MUSA available: False
9
+ numpy_random_seed: 1685007727
10
+ GPU 0: NVIDIA A100-SXM4-80GB
11
+ CUDA_HOME: /usr/local/cuda
12
+ NVCC: Cuda compilation tools, release 12.2, V12.2.140
13
+ GCC: gcc (Ubuntu 9.4.0-1ubuntu1~20.04.2) 9.4.0
14
+ PyTorch: 2.4.1+cu121
15
+ PyTorch compiling details: PyTorch built with:
16
+ - GCC 9.3
17
+ - C++ Version: 201703
18
+ - Intel(R) oneAPI Math Kernel Library Version 2022.2-Product Build 20220804 for Intel(R) 64 architecture applications
19
+ - Intel(R) MKL-DNN v3.4.2 (Git Hash 1137e04ec0b5251ca2b4400a4fd3c667ce843d67)
20
+ - OpenMP 201511 (a.k.a. OpenMP 4.5)
21
+ - LAPACK is enabled (usually provided by MKL)
22
+ - NNPACK is enabled
23
+ - CPU capability usage: AVX512
24
+ - CUDA Runtime 12.1
25
+ - NVCC architecture flags: -gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86;-gencode;arch=compute_90,code=sm_90
26
+ - CuDNN 90.1 (built against CUDA 12.4)
27
+ - Magma 2.6.1
28
+ - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=12.1, CUDNN_VERSION=9.1.0, CXX_COMPILER=/opt/rh/devtoolset-9/root/usr/bin/c++, CXX_FLAGS= -D_GLIBCXX_USE_CXX11_ABI=0 -fabi-version=11 -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -DNDEBUG -DUSE_KINETO -DLIBKINETO_NOROCTRACER -DUSE_FBGEMM -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wall -Wextra -Werror=return-type -Werror=non-virtual-dtor -Werror=bool-operation -Wnarrowing -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-unused-parameter -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-stringop-overflow -Wsuggest-override -Wno-psabi -Wno-error=pedantic -Wno-error=old-style-cast -Wno-missing-braces -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_VERSION=2.4.1, USE_CUDA=ON, USE_CUDNN=ON, USE_CUSPARSELT=1, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_GLOO=ON, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=1, USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF, USE_ROCM_KERNEL_ASSERT=OFF,
29
+
30
+ TorchVision: 0.19.1+cu121
31
+ OpenCV: 4.9.0
32
+ MMEngine: 0.10.6
33
+
34
+ Runtime environment:
35
+ launcher: none
36
+ randomness: {'seed': None, 'deterministic': False}
37
+ cudnn_benchmark: False
38
+ mp_cfg: {'mp_start_method': 'fork', 'opencv_num_threads': 0}
39
+ dist_cfg: {'backend': 'nccl'}
40
+ seed: None
41
+ deterministic: False
42
+ Distributed launcher: none
43
+ Distributed training: False
44
+ GPU number: 1
45
+ ------------------------------------------------------------
46
+
47
+ 2025/03/04 10:40:44 - mmengine - INFO - Config:
48
+ accumulative_counts = 2
49
+ batch_size = 1
50
+ betas = (
51
+ 0.9,
52
+ 0.999,
53
+ )
54
+ custom_hooks = [
55
+ dict(
56
+ tokenizer=dict(
57
+ pretrained_model_name_or_path='/root/models/InternVL2_2B',
58
+ trust_remote_code=True,
59
+ type='transformers.AutoTokenizer.from_pretrained'),
60
+ type='xtuner.engine.hooks.DatasetInfoHook'),
61
+ ]
62
+ data_path = '/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json'
63
+ data_root = '/root/data/tempData/screenshot_od'
64
+ dataloader_num_workers = 4
65
+ default_hooks = dict(
66
+ checkpoint=dict(
67
+ by_epoch=False,
68
+ interval=1000,
69
+ max_keep_ckpts=-1,
70
+ save_optimizer=False,
71
+ type='mmengine.hooks.CheckpointHook'),
72
+ logger=dict(
73
+ interval=10,
74
+ log_metric_by_epoch=False,
75
+ type='mmengine.hooks.LoggerHook'),
76
+ param_scheduler=dict(type='mmengine.hooks.ParamSchedulerHook'),
77
+ sampler_seed=dict(type='mmengine.hooks.DistSamplerSeedHook'),
78
+ timer=dict(type='mmengine.hooks.IterTimerHook'))
79
+ env_cfg = dict(
80
+ cudnn_benchmark=False,
81
+ dist_cfg=dict(backend='nccl'),
82
+ mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0))
83
+ image_folder = '/root/data/tempData/screenshot_odimages'
84
+ launcher = 'none'
85
+ llava_dataset = dict(
86
+ data_paths='/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json',
87
+ image_folders='/root/data/tempData/screenshot_odimages',
88
+ max_length=8192,
89
+ model_path='/root/models/InternVL2_2B',
90
+ template='xtuner.utils.PROMPT_TEMPLATE.internlm2_chat',
91
+ type='xtuner.dataset.InternVL_V1_5_Dataset')
92
+ load_from = None
93
+ log_level = 'DEBUG'
94
+ log_processor = dict(by_epoch=False)
95
+ lr = 2e-05
96
+ max_epochs = 4
97
+ max_length = 8192
98
+ max_norm = 1
99
+ model = dict(
100
+ freeze_llm=True,
101
+ freeze_visual_encoder=True,
102
+ llm_lora=dict(
103
+ lora_alpha=256,
104
+ lora_dropout=0.05,
105
+ r=128,
106
+ target_modules=None,
107
+ task_type='CAUSAL_LM',
108
+ type='peft.LoraConfig'),
109
+ model_path='/root/models/InternVL2_2B',
110
+ quantization_llm=True,
111
+ quantization_vit=False,
112
+ type='xtuner.model.InternVL_V1_5')
113
+ optim_type = 'torch.optim.AdamW'
114
+ optim_wrapper = dict(
115
+ optimizer=dict(
116
+ betas=(
117
+ 0.9,
118
+ 0.999,
119
+ ),
120
+ lr=2e-05,
121
+ type='torch.optim.AdamW',
122
+ weight_decay=0.05),
123
+ type='DeepSpeedOptimWrapper')
124
+ param_scheduler = [
125
+ dict(
126
+ begin=0,
127
+ by_epoch=True,
128
+ convert_to_iter_based=True,
129
+ end=0.12,
130
+ start_factor=1e-05,
131
+ type='mmengine.optim.LinearLR'),
132
+ dict(
133
+ begin=0.12,
134
+ by_epoch=True,
135
+ convert_to_iter_based=True,
136
+ end=4,
137
+ eta_min=0.0,
138
+ type='mmengine.optim.CosineAnnealingLR'),
139
+ ]
140
+ path = '/root/models/InternVL2_2B'
141
+ prompt_template = 'xtuner.utils.PROMPT_TEMPLATE.internlm2_chat'
142
+ randomness = dict(deterministic=False, seed=None)
143
+ resume = False
144
+ runner_type = 'FlexibleRunner'
145
+ save_steps = 1000
146
+ save_total_limit = -1
147
+ strategy = dict(
148
+ config=dict(
149
+ bf16=dict(enabled=True),
150
+ fp16=dict(enabled=False, initial_scale_power=16),
151
+ gradient_accumulation_steps='auto',
152
+ gradient_clipping='auto',
153
+ train_micro_batch_size_per_gpu='auto',
154
+ zero_allow_untested_optimizer=True,
155
+ zero_force_ds_cpu_optimizer=False,
156
+ zero_optimization=dict(overlap_comm=True, stage=2)),
157
+ exclude_frozen_parameters=True,
158
+ gradient_accumulation_steps=2,
159
+ gradient_clipping=1,
160
+ sequence_parallel_size=1,
161
+ train_micro_batch_size_per_gpu=1,
162
+ type='xtuner.engine.DeepSpeedStrategy')
163
+ tokenizer = dict(
164
+ pretrained_model_name_or_path='/root/models/InternVL2_2B',
165
+ trust_remote_code=True,
166
+ type='transformers.AutoTokenizer.from_pretrained')
167
+ train_cfg = dict(max_epochs=4, type='xtuner.engine.runner.TrainLoop')
168
+ train_dataloader = dict(
169
+ batch_size=1,
170
+ collate_fn=dict(type='xtuner.dataset.collate_fns.default_collate_fn'),
171
+ dataset=dict(
172
+ data_paths=
173
+ '/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json',
174
+ image_folders='/root/data/tempData/screenshot_odimages',
175
+ max_length=8192,
176
+ model_path='/root/models/InternVL2_2B',
177
+ template='xtuner.utils.PROMPT_TEMPLATE.internlm2_chat',
178
+ type='xtuner.dataset.InternVL_V1_5_Dataset'),
179
+ num_workers=4,
180
+ sampler=dict(
181
+ length_property='modality_length',
182
+ per_device_batch_size=2,
183
+ type='xtuner.dataset.samplers.LengthGroupedSampler'))
184
+ visualizer = dict(
185
+ type='mmengine.visualization.Visualizer',
186
+ vis_backends=[
187
+ dict(type='mmengine.visualization.TensorboardVisBackend'),
188
+ ])
189
+ warmup_ratio = 0.03
190
+ weight_decay = 0.05
191
+ work_dir = '/root/wangqun/work_dirs/internvl_ft_run_12_filter'
192
+
193
+ 2025/03/04 10:40:44 - mmengine - DEBUG - An `TensorboardVisBackend` instance is built from registry, and its implementation can be found in mmengine.visualization.vis_backend
194
+ 2025/03/04 10:40:44 - mmengine - DEBUG - An `Visualizer` instance is built from registry, and its implementation can be found in mmengine.visualization.visualizer
195
+ 2025/03/04 10:40:44 - mmengine - DEBUG - Attribute `_env_initialized` is not defined in <class 'mmengine.visualization.vis_backend.TensorboardVisBackend'> or `<class 'mmengine.visualization.vis_backend.TensorboardVisBackend'>._env_initialized is False, `_init_env` will be called and <class 'mmengine.visualization.vis_backend.TensorboardVisBackend'>._env_initialized will be set to True
internvl_ft_run_12_filter/20250304_104403/20250304_104403.log ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025/03/04 10:44:03 - mmengine - DEBUG - An `DeepSpeedStrategy` instance is built from registry, and its implementation can be found in xtuner.engine._strategy.deepspeed
2
+ 2025/03/04 10:44:03 - mmengine - INFO -
3
+ ------------------------------------------------------------
4
+ System environment:
5
+ sys.platform: linux
6
+ Python: 3.10.13 (main, Sep 11 2023, 13:44:35) [GCC 11.2.0]
7
+ CUDA available: True
8
+ MUSA available: False
9
+ numpy_random_seed: 1125505633
10
+ GPU 0: NVIDIA A100-SXM4-80GB
11
+ CUDA_HOME: /usr/local/cuda
12
+ NVCC: Cuda compilation tools, release 12.2, V12.2.140
13
+ GCC: gcc (Ubuntu 9.4.0-1ubuntu1~20.04.2) 9.4.0
14
+ PyTorch: 2.4.1+cu121
15
+ PyTorch compiling details: PyTorch built with:
16
+ - GCC 9.3
17
+ - C++ Version: 201703
18
+ - Intel(R) oneAPI Math Kernel Library Version 2022.2-Product Build 20220804 for Intel(R) 64 architecture applications
19
+ - Intel(R) MKL-DNN v3.4.2 (Git Hash 1137e04ec0b5251ca2b4400a4fd3c667ce843d67)
20
+ - OpenMP 201511 (a.k.a. OpenMP 4.5)
21
+ - LAPACK is enabled (usually provided by MKL)
22
+ - NNPACK is enabled
23
+ - CPU capability usage: AVX512
24
+ - CUDA Runtime 12.1
25
+ - NVCC architecture flags: -gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86;-gencode;arch=compute_90,code=sm_90
26
+ - CuDNN 90.1 (built against CUDA 12.4)
27
+ - Magma 2.6.1
28
+ - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=12.1, CUDNN_VERSION=9.1.0, CXX_COMPILER=/opt/rh/devtoolset-9/root/usr/bin/c++, CXX_FLAGS= -D_GLIBCXX_USE_CXX11_ABI=0 -fabi-version=11 -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -DNDEBUG -DUSE_KINETO -DLIBKINETO_NOROCTRACER -DUSE_FBGEMM -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wall -Wextra -Werror=return-type -Werror=non-virtual-dtor -Werror=bool-operation -Wnarrowing -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-unused-parameter -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-stringop-overflow -Wsuggest-override -Wno-psabi -Wno-error=pedantic -Wno-error=old-style-cast -Wno-missing-braces -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_VERSION=2.4.1, USE_CUDA=ON, USE_CUDNN=ON, USE_CUSPARSELT=1, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_GLOO=ON, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=1, USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF, USE_ROCM_KERNEL_ASSERT=OFF,
29
+
30
+ TorchVision: 0.19.1+cu121
31
+ OpenCV: 4.9.0
32
+ MMEngine: 0.10.6
33
+
34
+ Runtime environment:
35
+ launcher: none
36
+ randomness: {'seed': None, 'deterministic': False}
37
+ cudnn_benchmark: False
38
+ mp_cfg: {'mp_start_method': 'fork', 'opencv_num_threads': 0}
39
+ dist_cfg: {'backend': 'nccl'}
40
+ seed: None
41
+ deterministic: False
42
+ Distributed launcher: none
43
+ Distributed training: False
44
+ GPU number: 1
45
+ ------------------------------------------------------------
46
+
47
+ 2025/03/04 10:44:04 - mmengine - INFO - Config:
48
+ accumulative_counts = 2
49
+ batch_size = 1
50
+ betas = (
51
+ 0.9,
52
+ 0.999,
53
+ )
54
+ custom_hooks = [
55
+ dict(
56
+ tokenizer=dict(
57
+ pretrained_model_name_or_path='/root/models/InternVL2_2B',
58
+ trust_remote_code=True,
59
+ type='transformers.AutoTokenizer.from_pretrained'),
60
+ type='xtuner.engine.hooks.DatasetInfoHook'),
61
+ ]
62
+ data_path = '/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json'
63
+ data_root = '/root/data/tempData/screenshot_od'
64
+ dataloader_num_workers = 4
65
+ default_hooks = dict(
66
+ checkpoint=dict(
67
+ by_epoch=False,
68
+ interval=1000,
69
+ max_keep_ckpts=-1,
70
+ save_optimizer=False,
71
+ type='mmengine.hooks.CheckpointHook'),
72
+ logger=dict(
73
+ interval=10,
74
+ log_metric_by_epoch=False,
75
+ type='mmengine.hooks.LoggerHook'),
76
+ param_scheduler=dict(type='mmengine.hooks.ParamSchedulerHook'),
77
+ sampler_seed=dict(type='mmengine.hooks.DistSamplerSeedHook'),
78
+ timer=dict(type='mmengine.hooks.IterTimerHook'))
79
+ env_cfg = dict(
80
+ cudnn_benchmark=False,
81
+ dist_cfg=dict(backend='nccl'),
82
+ mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0))
83
+ image_folder = '/root/data/tempData/screenshot_odimages'
84
+ launcher = 'none'
85
+ llava_dataset = dict(
86
+ data_paths='/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json',
87
+ image_folders='/root/data/tempData/screenshot_odimages',
88
+ max_length=8192,
89
+ model_path='/root/models/InternVL2_2B',
90
+ template='xtuner.utils.PROMPT_TEMPLATE.internlm2_chat',
91
+ type='xtuner.dataset.InternVL_V1_5_Dataset')
92
+ load_from = None
93
+ log_level = 'DEBUG'
94
+ log_processor = dict(by_epoch=False)
95
+ lr = 2e-05
96
+ max_epochs = 4
97
+ max_length = 8192
98
+ max_norm = 1
99
+ model = dict(
100
+ freeze_llm=True,
101
+ freeze_visual_encoder=True,
102
+ llm_lora=dict(
103
+ lora_alpha=256,
104
+ lora_dropout=0.05,
105
+ r=128,
106
+ target_modules=None,
107
+ task_type='CAUSAL_LM',
108
+ type='peft.LoraConfig'),
109
+ model_path='/root/models/InternVL2_2B',
110
+ quantization_llm=True,
111
+ quantization_vit=False,
112
+ type='xtuner.model.InternVL_V1_5')
113
+ optim_type = 'torch.optim.AdamW'
114
+ optim_wrapper = dict(
115
+ optimizer=dict(
116
+ betas=(
117
+ 0.9,
118
+ 0.999,
119
+ ),
120
+ lr=2e-05,
121
+ type='torch.optim.AdamW',
122
+ weight_decay=0.05),
123
+ type='DeepSpeedOptimWrapper')
124
+ param_scheduler = [
125
+ dict(
126
+ begin=0,
127
+ by_epoch=True,
128
+ convert_to_iter_based=True,
129
+ end=0.12,
130
+ start_factor=1e-05,
131
+ type='mmengine.optim.LinearLR'),
132
+ dict(
133
+ begin=0.12,
134
+ by_epoch=True,
135
+ convert_to_iter_based=True,
136
+ end=4,
137
+ eta_min=0.0,
138
+ type='mmengine.optim.CosineAnnealingLR'),
139
+ ]
140
+ path = '/root/models/InternVL2_2B'
141
+ prompt_template = 'xtuner.utils.PROMPT_TEMPLATE.internlm2_chat'
142
+ randomness = dict(deterministic=False, seed=None)
143
+ resume = False
144
+ runner_type = 'FlexibleRunner'
145
+ save_steps = 1000
146
+ save_total_limit = -1
147
+ strategy = dict(
148
+ config=dict(
149
+ bf16=dict(enabled=True),
150
+ fp16=dict(enabled=False, initial_scale_power=16),
151
+ gradient_accumulation_steps='auto',
152
+ gradient_clipping='auto',
153
+ train_micro_batch_size_per_gpu='auto',
154
+ zero_allow_untested_optimizer=True,
155
+ zero_force_ds_cpu_optimizer=False,
156
+ zero_optimization=dict(overlap_comm=True, stage=2)),
157
+ exclude_frozen_parameters=True,
158
+ gradient_accumulation_steps=2,
159
+ gradient_clipping=1,
160
+ sequence_parallel_size=1,
161
+ train_micro_batch_size_per_gpu=1,
162
+ type='xtuner.engine.DeepSpeedStrategy')
163
+ tokenizer = dict(
164
+ pretrained_model_name_or_path='/root/models/InternVL2_2B',
165
+ trust_remote_code=True,
166
+ type='transformers.AutoTokenizer.from_pretrained')
167
+ train_cfg = dict(max_epochs=4, type='xtuner.engine.runner.TrainLoop')
168
+ train_dataloader = dict(
169
+ batch_size=1,
170
+ collate_fn=dict(type='xtuner.dataset.collate_fns.default_collate_fn'),
171
+ dataset=dict(
172
+ data_paths=
173
+ '/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json',
174
+ image_folders='/root/data/tempData/screenshot_odimages',
175
+ max_length=8192,
176
+ model_path='/root/models/InternVL2_2B',
177
+ template='xtuner.utils.PROMPT_TEMPLATE.internlm2_chat',
178
+ type='xtuner.dataset.InternVL_V1_5_Dataset'),
179
+ num_workers=4,
180
+ sampler=dict(
181
+ length_property='modality_length',
182
+ per_device_batch_size=2,
183
+ type='xtuner.dataset.samplers.LengthGroupedSampler'))
184
+ visualizer = dict(
185
+ type='mmengine.visualization.Visualizer',
186
+ vis_backends=[
187
+ dict(type='mmengine.visualization.TensorboardVisBackend'),
188
+ ])
189
+ warmup_ratio = 0.03
190
+ weight_decay = 0.05
191
+ work_dir = '/root/wangqun/work_dirs/internvl_ft_run_12_filter'
192
+
193
+ 2025/03/04 10:44:04 - mmengine - DEBUG - An `TensorboardVisBackend` instance is built from registry, and its implementation can be found in mmengine.visualization.vis_backend
194
+ 2025/03/04 10:44:04 - mmengine - DEBUG - An `Visualizer` instance is built from registry, and its implementation can be found in mmengine.visualization.visualizer
195
+ 2025/03/04 10:44:04 - mmengine - DEBUG - Attribute `_env_initialized` is not defined in <class 'mmengine.visualization.vis_backend.TensorboardVisBackend'> or `<class 'mmengine.visualization.vis_backend.TensorboardVisBackend'>._env_initialized is False, `_init_env` will be called and <class 'mmengine.visualization.vis_backend.TensorboardVisBackend'>._env_initialized will be set to True
196
+ 2025/03/04 10:44:05 - mmengine - DEBUG - Get class `RuntimeInfoHook` from "hook" registry in "mmengine"
197
+ 2025/03/04 10:44:05 - mmengine - DEBUG - An `RuntimeInfoHook` instance is built from registry, and its implementation can be found in mmengine.hooks.runtime_info_hook
198
+ 2025/03/04 10:44:05 - mmengine - DEBUG - An `IterTimerHook` instance is built from registry, and its implementation can be found in mmengine.hooks.iter_timer_hook
199
+ 2025/03/04 10:44:05 - mmengine - DEBUG - An `DistSamplerSeedHook` instance is built from registry, and its implementation can be found in mmengine.hooks.sampler_seed_hook
200
+ 2025/03/04 10:44:05 - mmengine - DEBUG - An `LoggerHook` instance is built from registry, and its implementation can be found in mmengine.hooks.logger_hook
201
+ 2025/03/04 10:44:05 - mmengine - DEBUG - An `ParamSchedulerHook` instance is built from registry, and its implementation can be found in mmengine.hooks.param_scheduler_hook
202
+ 2025/03/04 10:44:05 - mmengine - DEBUG - An `CheckpointHook` instance is built from registry, and its implementation can be found in mmengine.hooks.checkpoint_hook
203
+ 2025/03/04 10:44:05 - mmengine - WARNING - Failed to search registry with scope "mmengine" in the "builder" registry tree. As a workaround, the current "builder" registry in "xtuner" is used to build instance. This may cause unexpected failure when running the built modules. Please check whether "mmengine" is a correct scope, or whether the registry is initialized.
internvl_ft_run_12_filter/20250304_104403/vis_data/events.out.tfevents.1741056245.intern-studio-40019814.17053.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1c288ad221462a4a8a981e1feef2141f61fb02c92b0d81c47078d4d277a3fa4
3
+ size 4845
internvl_ft_run_12_filter/20250304_111639/20250304_111639.log ADDED
The diff for this file is too large to render. See raw diff
 
internvl_ft_run_12_filter/20250304_111639/vis_data/events.out.tfevents.1741058201.intern-studio-40019814.23140.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c197cbc5fe46cd6ee94a44546a7ff96eed17a9f96594b200c09a174483bdf28
3
+ size 4983
internvl_ft_run_12_filter/20250304_112305/20250304_112305.log ADDED
The diff for this file is too large to render. See raw diff
 
internvl_ft_run_12_filter/20250304_112305/vis_data/events.out.tfevents.1741058586.intern-studio-40019814.25156.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3ecc6d8f1eb6882b13ebc2091216762d530789a91dd5d211545f065c08500c1
3
+ size 4983
internvl_ft_run_12_filter/20250304_112538/20250304_112538.log ADDED
The diff for this file is too large to render. See raw diff
 
internvl_ft_run_12_filter/20250304_112538/vis_data/events.out.tfevents.1741058739.intern-studio-40019814.26649.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be6291cbd77f2207ec598e9e75e13fab608b52277bea94cf9dda0e5d99ff87c1
3
+ size 4987
internvl_ft_run_12_filter/20250304_113017/20250304_113017.log ADDED
@@ -0,0 +1,633 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025/03/04 11:30:18 - mmengine - DEBUG - An `DeepSpeedStrategy` instance is built from registry, and its implementation can be found in xtuner.engine._strategy.deepspeed
2
+ 2025/03/04 11:30:18 - mmengine - INFO -
3
+ ------------------------------------------------------------
4
+ System environment:
5
+ sys.platform: linux
6
+ Python: 3.10.13 (main, Sep 11 2023, 13:44:35) [GCC 11.2.0]
7
+ CUDA available: True
8
+ MUSA available: False
9
+ numpy_random_seed: 1251330375
10
+ GPU 0: NVIDIA A100-SXM4-80GB
11
+ CUDA_HOME: /usr/local/cuda
12
+ NVCC: Cuda compilation tools, release 12.2, V12.2.140
13
+ GCC: gcc (Ubuntu 9.4.0-1ubuntu1~20.04.2) 9.4.0
14
+ PyTorch: 2.4.1+cu121
15
+ PyTorch compiling details: PyTorch built with:
16
+ - GCC 9.3
17
+ - C++ Version: 201703
18
+ - Intel(R) oneAPI Math Kernel Library Version 2022.2-Product Build 20220804 for Intel(R) 64 architecture applications
19
+ - Intel(R) MKL-DNN v3.4.2 (Git Hash 1137e04ec0b5251ca2b4400a4fd3c667ce843d67)
20
+ - OpenMP 201511 (a.k.a. OpenMP 4.5)
21
+ - LAPACK is enabled (usually provided by MKL)
22
+ - NNPACK is enabled
23
+ - CPU capability usage: AVX512
24
+ - CUDA Runtime 12.1
25
+ - NVCC architecture flags: -gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86;-gencode;arch=compute_90,code=sm_90
26
+ - CuDNN 90.1 (built against CUDA 12.4)
27
+ - Magma 2.6.1
28
+ - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=12.1, CUDNN_VERSION=9.1.0, CXX_COMPILER=/opt/rh/devtoolset-9/root/usr/bin/c++, CXX_FLAGS= -D_GLIBCXX_USE_CXX11_ABI=0 -fabi-version=11 -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -DNDEBUG -DUSE_KINETO -DLIBKINETO_NOROCTRACER -DUSE_FBGEMM -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wall -Wextra -Werror=return-type -Werror=non-virtual-dtor -Werror=bool-operation -Wnarrowing -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-unused-parameter -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-stringop-overflow -Wsuggest-override -Wno-psabi -Wno-error=pedantic -Wno-error=old-style-cast -Wno-missing-braces -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_VERSION=2.4.1, USE_CUDA=ON, USE_CUDNN=ON, USE_CUSPARSELT=1, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_GLOO=ON, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=1, USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF, USE_ROCM_KERNEL_ASSERT=OFF,
29
+
30
+ TorchVision: 0.19.1+cu121
31
+ OpenCV: 4.9.0
32
+ MMEngine: 0.10.6
33
+
34
+ Runtime environment:
35
+ launcher: none
36
+ randomness: {'seed': None, 'deterministic': False}
37
+ cudnn_benchmark: False
38
+ mp_cfg: {'mp_start_method': 'fork', 'opencv_num_threads': 0}
39
+ dist_cfg: {'backend': 'nccl'}
40
+ seed: None
41
+ deterministic: False
42
+ Distributed launcher: none
43
+ Distributed training: False
44
+ GPU number: 1
45
+ ------------------------------------------------------------
46
+
47
+ 2025/03/04 11:30:18 - mmengine - INFO - Config:
48
+ accumulative_counts = 2
49
+ batch_size = 1
50
+ betas = (
51
+ 0.9,
52
+ 0.999,
53
+ )
54
+ custom_hooks = [
55
+ dict(
56
+ tokenizer=dict(
57
+ pretrained_model_name_or_path=
58
+ '/root/share/new_models/OpenGVLab/InternVL2-2B',
59
+ trust_remote_code=True,
60
+ type='transformers.AutoTokenizer.from_pretrained'),
61
+ type='xtuner.engine.hooks.DatasetInfoHook'),
62
+ ]
63
+ data_path = '/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json'
64
+ dataloader_num_workers = 4
65
+ default_hooks = dict(
66
+ checkpoint=dict(
67
+ by_epoch=False,
68
+ interval=1000,
69
+ max_keep_ckpts=-1,
70
+ save_optimizer=False,
71
+ type='mmengine.hooks.CheckpointHook'),
72
+ logger=dict(
73
+ interval=10,
74
+ log_metric_by_epoch=False,
75
+ type='mmengine.hooks.LoggerHook'),
76
+ param_scheduler=dict(type='mmengine.hooks.ParamSchedulerHook'),
77
+ sampler_seed=dict(type='mmengine.hooks.DistSamplerSeedHook'),
78
+ timer=dict(type='mmengine.hooks.IterTimerHook'))
79
+ env_cfg = dict(
80
+ cudnn_benchmark=False,
81
+ dist_cfg=dict(backend='nccl'),
82
+ mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0))
83
+ image_folder = '/'
84
+ launcher = 'none'
85
+ llava_dataset = dict(
86
+ data_paths='/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json',
87
+ image_folders='/',
88
+ max_length=8192,
89
+ model_path='/root/share/new_models/OpenGVLab/InternVL2-2B',
90
+ template='xtuner.utils.PROMPT_TEMPLATE.internlm2_chat',
91
+ type='xtuner.dataset.InternVL_V1_5_Dataset')
92
+ load_from = None
93
+ log_level = 'DEBUG'
94
+ log_processor = dict(by_epoch=False)
95
+ lr = 2e-05
96
+ max_epochs = 4
97
+ max_length = 8192
98
+ max_norm = 1
99
+ model = dict(
100
+ freeze_llm=True,
101
+ freeze_visual_encoder=True,
102
+ llm_lora=dict(
103
+ lora_alpha=256,
104
+ lora_dropout=0.05,
105
+ r=128,
106
+ target_modules=None,
107
+ task_type='CAUSAL_LM',
108
+ type='peft.LoraConfig'),
109
+ model_path='/root/share/new_models/OpenGVLab/InternVL2-2B',
110
+ quantization_llm=True,
111
+ quantization_vit=False,
112
+ type='xtuner.model.InternVL_V1_5')
113
+ optim_type = 'torch.optim.AdamW'
114
+ optim_wrapper = dict(
115
+ optimizer=dict(
116
+ betas=(
117
+ 0.9,
118
+ 0.999,
119
+ ),
120
+ lr=2e-05,
121
+ type='torch.optim.AdamW',
122
+ weight_decay=0.05),
123
+ type='DeepSpeedOptimWrapper')
124
+ param_scheduler = [
125
+ dict(
126
+ begin=0,
127
+ by_epoch=True,
128
+ convert_to_iter_based=True,
129
+ end=0.12,
130
+ start_factor=1e-05,
131
+ type='mmengine.optim.LinearLR'),
132
+ dict(
133
+ begin=0.12,
134
+ by_epoch=True,
135
+ convert_to_iter_based=True,
136
+ end=4,
137
+ eta_min=0.0,
138
+ type='mmengine.optim.CosineAnnealingLR'),
139
+ ]
140
+ path = '/root/share/new_models/OpenGVLab/InternVL2-2B'
141
+ prompt_template = 'xtuner.utils.PROMPT_TEMPLATE.internlm2_chat'
142
+ randomness = dict(deterministic=False, seed=None)
143
+ resume = False
144
+ runner_type = 'FlexibleRunner'
145
+ save_steps = 1000
146
+ save_total_limit = -1
147
+ strategy = dict(
148
+ config=dict(
149
+ bf16=dict(enabled=True),
150
+ fp16=dict(enabled=False, initial_scale_power=16),
151
+ gradient_accumulation_steps='auto',
152
+ gradient_clipping='auto',
153
+ train_micro_batch_size_per_gpu='auto',
154
+ zero_allow_untested_optimizer=True,
155
+ zero_force_ds_cpu_optimizer=False,
156
+ zero_optimization=dict(overlap_comm=True, stage=2)),
157
+ exclude_frozen_parameters=True,
158
+ gradient_accumulation_steps=2,
159
+ gradient_clipping=1,
160
+ sequence_parallel_size=1,
161
+ train_micro_batch_size_per_gpu=1,
162
+ type='xtuner.engine.DeepSpeedStrategy')
163
+ tokenizer = dict(
164
+ pretrained_model_name_or_path=
165
+ '/root/share/new_models/OpenGVLab/InternVL2-2B',
166
+ trust_remote_code=True,
167
+ type='transformers.AutoTokenizer.from_pretrained')
168
+ train_cfg = dict(max_epochs=4, type='xtuner.engine.runner.TrainLoop')
169
+ train_dataloader = dict(
170
+ batch_size=1,
171
+ collate_fn=dict(type='xtuner.dataset.collate_fns.default_collate_fn'),
172
+ dataset=dict(
173
+ data_paths=
174
+ '/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json',
175
+ image_folders='/',
176
+ max_length=8192,
177
+ model_path='/root/share/new_models/OpenGVLab/InternVL2-2B',
178
+ template='xtuner.utils.PROMPT_TEMPLATE.internlm2_chat',
179
+ type='xtuner.dataset.InternVL_V1_5_Dataset'),
180
+ num_workers=4,
181
+ sampler=dict(
182
+ length_property='modality_length',
183
+ per_device_batch_size=2,
184
+ type='xtuner.dataset.samplers.LengthGroupedSampler'))
185
+ visualizer = dict(
186
+ type='mmengine.visualization.Visualizer',
187
+ vis_backends=[
188
+ dict(type='mmengine.visualization.TensorboardVisBackend'),
189
+ ])
190
+ warmup_ratio = 0.03
191
+ weight_decay = 0.05
192
+ work_dir = '/root/wangqun/work_dirs/internvl_ft_run_12_filter'
193
+
194
+ 2025/03/04 11:30:18 - mmengine - DEBUG - An `TensorboardVisBackend` instance is built from registry, and its implementation can be found in mmengine.visualization.vis_backend
195
+ 2025/03/04 11:30:18 - mmengine - DEBUG - An `Visualizer` instance is built from registry, and its implementation can be found in mmengine.visualization.visualizer
196
+ 2025/03/04 11:30:18 - mmengine - DEBUG - Attribute `_env_initialized` is not defined in <class 'mmengine.visualization.vis_backend.TensorboardVisBackend'> or `<class 'mmengine.visualization.vis_backend.TensorboardVisBackend'>._env_initialized is False, `_init_env` will be called and <class 'mmengine.visualization.vis_backend.TensorboardVisBackend'>._env_initialized will be set to True
197
+ 2025/03/04 11:30:19 - mmengine - DEBUG - Get class `RuntimeInfoHook` from "hook" registry in "mmengine"
198
+ 2025/03/04 11:30:19 - mmengine - DEBUG - An `RuntimeInfoHook` instance is built from registry, and its implementation can be found in mmengine.hooks.runtime_info_hook
199
+ 2025/03/04 11:30:19 - mmengine - DEBUG - An `IterTimerHook` instance is built from registry, and its implementation can be found in mmengine.hooks.iter_timer_hook
200
+ 2025/03/04 11:30:19 - mmengine - DEBUG - An `DistSamplerSeedHook` instance is built from registry, and its implementation can be found in mmengine.hooks.sampler_seed_hook
201
+ 2025/03/04 11:30:19 - mmengine - DEBUG - An `LoggerHook` instance is built from registry, and its implementation can be found in mmengine.hooks.logger_hook
202
+ 2025/03/04 11:30:19 - mmengine - DEBUG - An `ParamSchedulerHook` instance is built from registry, and its implementation can be found in mmengine.hooks.param_scheduler_hook
203
+ 2025/03/04 11:30:19 - mmengine - DEBUG - An `CheckpointHook` instance is built from registry, and its implementation can be found in mmengine.hooks.checkpoint_hook
204
+ 2025/03/04 11:30:19 - mmengine - WARNING - Failed to search registry with scope "mmengine" in the "builder" registry tree. As a workaround, the current "builder" registry in "xtuner" is used to build instance. This may cause unexpected failure when running the built modules. Please check whether "mmengine" is a correct scope, or whether the registry is initialized.
205
+ 2025/03/04 11:30:19 - mmengine - DEBUG - An `from_pretrained` instance is built from registry, and its implementation can be found in transformers.models.auto.tokenization_auto
206
+ 2025/03/04 11:30:19 - mmengine - DEBUG - An `DatasetInfoHook` instance is built from registry, and its implementation can be found in xtuner.engine.hooks.dataset_info_hook
207
+ 2025/03/04 11:30:19 - mmengine - INFO - Hooks will be executed in the following order:
208
+ before_run:
209
+ (VERY_HIGH ) RuntimeInfoHook
210
+ (BELOW_NORMAL) LoggerHook
211
+ --------------------
212
+ before_train:
213
+ (VERY_HIGH ) RuntimeInfoHook
214
+ (NORMAL ) IterTimerHook
215
+ (NORMAL ) DatasetInfoHook
216
+ (VERY_LOW ) CheckpointHook
217
+ --------------------
218
+ before_train_epoch:
219
+ (VERY_HIGH ) RuntimeInfoHook
220
+ (NORMAL ) IterTimerHook
221
+ (NORMAL ) DistSamplerSeedHook
222
+ --------------------
223
+ before_train_iter:
224
+ (VERY_HIGH ) RuntimeInfoHook
225
+ (NORMAL ) IterTimerHook
226
+ --------------------
227
+ after_train_iter:
228
+ (VERY_HIGH ) RuntimeInfoHook
229
+ (NORMAL ) IterTimerHook
230
+ (BELOW_NORMAL) LoggerHook
231
+ (LOW ) ParamSchedulerHook
232
+ (VERY_LOW ) CheckpointHook
233
+ --------------------
234
+ after_train_epoch:
235
+ (NORMAL ) IterTimerHook
236
+ (LOW ) ParamSchedulerHook
237
+ (VERY_LOW ) CheckpointHook
238
+ --------------------
239
+ before_val:
240
+ (VERY_HIGH ) RuntimeInfoHook
241
+ (NORMAL ) DatasetInfoHook
242
+ --------------------
243
+ before_val_epoch:
244
+ (NORMAL ) IterTimerHook
245
+ --------------------
246
+ before_val_iter:
247
+ (NORMAL ) IterTimerHook
248
+ --------------------
249
+ after_val_iter:
250
+ (NORMAL ) IterTimerHook
251
+ (BELOW_NORMAL) LoggerHook
252
+ --------------------
253
+ after_val_epoch:
254
+ (VERY_HIGH ) RuntimeInfoHook
255
+ (NORMAL ) IterTimerHook
256
+ (BELOW_NORMAL) LoggerHook
257
+ (LOW ) ParamSchedulerHook
258
+ (VERY_LOW ) CheckpointHook
259
+ --------------------
260
+ after_val:
261
+ (VERY_HIGH ) RuntimeInfoHook
262
+ --------------------
263
+ after_train:
264
+ (VERY_HIGH ) RuntimeInfoHook
265
+ (VERY_LOW ) CheckpointHook
266
+ --------------------
267
+ before_test:
268
+ (VERY_HIGH ) RuntimeInfoHook
269
+ (NORMAL ) DatasetInfoHook
270
+ --------------------
271
+ before_test_epoch:
272
+ (NORMAL ) IterTimerHook
273
+ --------------------
274
+ before_test_iter:
275
+ (NORMAL ) IterTimerHook
276
+ --------------------
277
+ after_test_iter:
278
+ (NORMAL ) IterTimerHook
279
+ (BELOW_NORMAL) LoggerHook
280
+ --------------------
281
+ after_test_epoch:
282
+ (VERY_HIGH ) RuntimeInfoHook
283
+ (NORMAL ) IterTimerHook
284
+ (BELOW_NORMAL) LoggerHook
285
+ --------------------
286
+ after_test:
287
+ (VERY_HIGH ) RuntimeInfoHook
288
+ --------------------
289
+ after_run:
290
+ (BELOW_NORMAL) LoggerHook
291
+ --------------------
292
+ 2025/03/04 11:30:19 - mmengine - DEBUG - An `FlexibleRunner` instance is built from registry, its implementation can be found inmmengine.runner._flexible_runner
293
+ 2025/03/04 11:30:19 - mmengine - INFO - Starting to loading data and calc length
294
+ 2025/03/04 11:30:19 - mmengine - INFO - =======Starting to process /root/data/tempData/screenshot_od/layout_ocr_multi_scale.json =======
295
+ 2025/03/04 11:30:26 - mmengine - INFO - =======total 4806 samples of /root/data/tempData/screenshot_od/layout_ocr_multi_scale.json=======
296
+ 2025/03/04 11:30:26 - mmengine - INFO - end loading data and calc length
297
+ 2025/03/04 11:30:26 - mmengine - INFO - =======total 4806 samples=======
298
+ 2025/03/04 11:30:26 - mmengine - DEBUG - An `InternVL_V1_5_Dataset` instance is built from registry, and its implementation can be found in xtuner.dataset.internvl_dataset
299
+ 2025/03/04 11:30:26 - mmengine - INFO - LengthGroupedSampler is used.
300
+ 2025/03/04 11:30:26 - mmengine - INFO - LengthGroupedSampler construction is complete, and the selected attribute is modality_length
301
+ 2025/03/04 11:30:26 - mmengine - DEBUG - An `LengthGroupedSampler` instance is built from registry, and its implementation can be found in xtuner.dataset.samplers.length_grouped
302
+ 2025/03/04 11:30:26 - mmengine - WARNING - Dataset InternVL_V1_5_Dataset has no metainfo. ``dataset_meta`` in visualizer will be None.
303
+ 2025/03/04 11:30:26 - mmengine - DEBUG - An `TrainLoop` instance is built from registry, and its implementation can be found in xtuner.engine.runner.loops
304
+ 2025/03/04 11:30:26 - mmengine - INFO - Start to load InternVL_V1_5 model.
305
+ 2025/03/04 11:30:26 - mmengine - DEBUG - Get class `BaseDataPreprocessor` from "model" registry in "mmengine"
306
+ 2025/03/04 11:30:26 - mmengine - DEBUG - An `BaseDataPreprocessor` instance is built from registry, and its implementation can be found in mmengine.model.base_model.data_preprocessor
307
+ 2025/03/04 11:30:32 - mmengine - DEBUG - An `LoraConfig` instance is built from registry, and its implementation can be found in peft.tuners.lora.config
308
+ 2025/03/04 11:30:34 - mmengine - INFO - InternVL_V1_5(
309
+ (data_preprocessor): BaseDataPreprocessor()
310
+ (model): InternVLChatModel(
311
+ (vision_model): InternVisionModel(
312
+ (embeddings): InternVisionEmbeddings(
313
+ (patch_embedding): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14))
314
+ )
315
+ (encoder): InternVisionEncoder(
316
+ (layers): ModuleList(
317
+ (0-23): 24 x InternVisionEncoderLayer(
318
+ (attn): InternAttention(
319
+ (qkv): Linear(in_features=1024, out_features=3072, bias=True)
320
+ (attn_drop): Dropout(p=0.0, inplace=False)
321
+ (proj_drop): Dropout(p=0.0, inplace=False)
322
+ (proj): Linear(in_features=1024, out_features=1024, bias=True)
323
+ )
324
+ (mlp): InternMLP(
325
+ (act): GELUActivation()
326
+ (fc1): Linear(in_features=1024, out_features=4096, bias=True)
327
+ (fc2): Linear(in_features=4096, out_features=1024, bias=True)
328
+ )
329
+ (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True)
330
+ (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True)
331
+ (drop_path1): Identity()
332
+ (drop_path2): Identity()
333
+ )
334
+ )
335
+ )
336
+ )
337
+ (language_model): PeftModelForCausalLM(
338
+ (base_model): LoraModel(
339
+ (model): InternLM2ForCausalLM(
340
+ (model): InternLM2Model(
341
+ (tok_embeddings): Embedding(92553, 2048, padding_idx=2)
342
+ (layers): ModuleList(
343
+ (0-23): 24 x InternLM2DecoderLayer(
344
+ (attention): InternLM2Attention(
345
+ (wqkv): lora.Linear(
346
+ (base_layer): Linear4bit(in_features=2048, out_features=4096, bias=False)
347
+ (lora_dropout): ModuleDict(
348
+ (default): Dropout(p=0.05, inplace=False)
349
+ )
350
+ (lora_A): ModuleDict(
351
+ (default): Linear(in_features=2048, out_features=128, bias=False)
352
+ )
353
+ (lora_B): ModuleDict(
354
+ (default): Linear(in_features=128, out_features=4096, bias=False)
355
+ )
356
+ (lora_embedding_A): ParameterDict()
357
+ (lora_embedding_B): ParameterDict()
358
+ (lora_magnitude_vector): ModuleDict()
359
+ )
360
+ (wo): lora.Linear(
361
+ (base_layer): Linear4bit(in_features=2048, out_features=2048, bias=False)
362
+ (lora_dropout): ModuleDict(
363
+ (default): Dropout(p=0.05, inplace=False)
364
+ )
365
+ (lora_A): ModuleDict(
366
+ (default): Linear(in_features=2048, out_features=128, bias=False)
367
+ )
368
+ (lora_B): ModuleDict(
369
+ (default): Linear(in_features=128, out_features=2048, bias=False)
370
+ )
371
+ (lora_embedding_A): ParameterDict()
372
+ (lora_embedding_B): ParameterDict()
373
+ (lora_magnitude_vector): ModuleDict()
374
+ )
375
+ (rotary_emb): InternLM2DynamicNTKScalingRotaryEmbedding()
376
+ )
377
+ (feed_forward): InternLM2MLP(
378
+ (w1): lora.Linear(
379
+ (base_layer): Linear4bit(in_features=2048, out_features=8192, bias=False)
380
+ (lora_dropout): ModuleDict(
381
+ (default): Dropout(p=0.05, inplace=False)
382
+ )
383
+ (lora_A): ModuleDict(
384
+ (default): Linear(in_features=2048, out_features=128, bias=False)
385
+ )
386
+ (lora_B): ModuleDict(
387
+ (default): Linear(in_features=128, out_features=8192, bias=False)
388
+ )
389
+ (lora_embedding_A): ParameterDict()
390
+ (lora_embedding_B): ParameterDict()
391
+ (lora_magnitude_vector): ModuleDict()
392
+ )
393
+ (w3): lora.Linear(
394
+ (base_layer): Linear4bit(in_features=2048, out_features=8192, bias=False)
395
+ (lora_dropout): ModuleDict(
396
+ (default): Dropout(p=0.05, inplace=False)
397
+ )
398
+ (lora_A): ModuleDict(
399
+ (default): Linear(in_features=2048, out_features=128, bias=False)
400
+ )
401
+ (lora_B): ModuleDict(
402
+ (default): Linear(in_features=128, out_features=8192, bias=False)
403
+ )
404
+ (lora_embedding_A): ParameterDict()
405
+ (lora_embedding_B): ParameterDict()
406
+ (lora_magnitude_vector): ModuleDict()
407
+ )
408
+ (w2): lora.Linear(
409
+ (base_layer): Linear4bit(in_features=8192, out_features=2048, bias=False)
410
+ (lora_dropout): ModuleDict(
411
+ (default): Dropout(p=0.05, inplace=False)
412
+ )
413
+ (lora_A): ModuleDict(
414
+ (default): Linear(in_features=8192, out_features=128, bias=False)
415
+ )
416
+ (lora_B): ModuleDict(
417
+ (default): Linear(in_features=128, out_features=2048, bias=False)
418
+ )
419
+ (lora_embedding_A): ParameterDict()
420
+ (lora_embedding_B): ParameterDict()
421
+ (lora_magnitude_vector): ModuleDict()
422
+ )
423
+ (act_fn): SiLU()
424
+ )
425
+ (attention_norm): InternLM2RMSNorm()
426
+ (ffn_norm): InternLM2RMSNorm()
427
+ )
428
+ )
429
+ (norm): InternLM2RMSNorm()
430
+ )
431
+ (output): lora.Linear(
432
+ (base_layer): Linear4bit(in_features=2048, out_features=92553, bias=False)
433
+ (lora_dropout): ModuleDict(
434
+ (default): Dropout(p=0.05, inplace=False)
435
+ )
436
+ (lora_A): ModuleDict(
437
+ (default): Linear(in_features=2048, out_features=128, bias=False)
438
+ )
439
+ (lora_B): ModuleDict(
440
+ (default): Linear(in_features=128, out_features=92553, bias=False)
441
+ )
442
+ (lora_embedding_A): ParameterDict()
443
+ (lora_embedding_B): ParameterDict()
444
+ (lora_magnitude_vector): ModuleDict()
445
+ )
446
+ )
447
+ )
448
+ )
449
+ (mlp1): Sequential(
450
+ (0): LayerNorm((4096,), eps=1e-05, elementwise_affine=True)
451
+ (1): Linear(in_features=4096, out_features=2048, bias=True)
452
+ (2): GELU(approximate='none')
453
+ (3): Linear(in_features=2048, out_features=2048, bias=True)
454
+ )
455
+ )
456
+ )
457
+ 2025/03/04 11:30:34 - mmengine - INFO - InternVL_V1_5 construction is complete
458
+ 2025/03/04 11:30:34 - mmengine - DEBUG - An `InternVL_V1_5` instance is built from registry, and its implementation can be found in xtuner.model.internvl
459
+ 2025/03/04 11:30:34 - mmengine - DEBUG - Get class `DefaultOptimWrapperConstructor` from "optimizer wrapper constructor" registry in "mmengine"
460
+ 2025/03/04 11:30:34 - mmengine - DEBUG - An `DefaultOptimWrapperConstructor` instance is built from registry, and its implementation can be found in mmengine.optim.optimizer.default_constructor
461
+ 2025/03/04 11:30:34 - mmengine - DEBUG - An `AdamW` instance is built from registry, and its implementation can be found in torch.optim.adamw
462
+ 2025/03/04 11:30:34 - mmengine - DEBUG - Get class `DeepSpeedOptimWrapper` from "optim_wrapper" registry in "mmengine"
463
+ 2025/03/04 11:30:34 - mmengine - DEBUG - An `DeepSpeedOptimWrapper` instance is built from registry, and its implementation can be found in mmengine._strategy.deepspeed
464
+ 2025/03/04 11:30:36 - mmengine - DEBUG - The `end` of <class 'mmengine.optim.scheduler.lr_scheduler.LinearLR'> is not set. Use the max epochs/iters of train loop as default.
465
+ 2025/03/04 11:30:36 - mmengine - DEBUG - The `end` of <class 'mmengine.optim.scheduler.lr_scheduler.CosineAnnealingLR'> is not set. Use the max epochs/iters of train loop as default.
466
+ 2025/03/04 11:30:36 - mmengine - INFO - Num train samples 4806
467
+ 2025/03/04 11:30:36 - mmengine - INFO - train example:
468
+ 2025/03/04 11:30:36 - mmengine - INFO - <s><|im_start|> system
469
+ You are an AI assistant whose name is InternLM (书生·浦语).<|im_end|><|im_start|>user
470
+ <img> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> <IMG_CONTEXT> </img>
471
+ 请从这张聊天截图中提取结构化信息<|im_end|><|im_start|> assistant
472
+ {
473
+ "dialog_name": "<对方正在输入...",
474
+ "conversation": [
475
+ {
476
+ "timestamp": "",
477
+ "speaker": "<对方正在输入...",
478
+ "content": "不是",
479
+ "message_bbox": {
480
+ "min_x": 855,
481
+ "max_x": 923,
482
+ "min_y": 236,
483
+ "max_y": 269
484
+ },
485
+ "image": "",
486
+ "transfer": [],
487
+ "file": []
488
+ },
489
+ {
490
+ "timestamp": "",
491
+ "speaker": "<对方正在输入...",
492
+ "content": "在淘宝里",
493
+ "message_bbox": {
494
+ "min_x": 783,
495
+ "max_x": 921,
496
+ "min_y": 345,
497
+ "max_y": 377
498
+ },
499
+ "image": "",
500
+ "transfer": [],
501
+ "file": []
502
+ },
503
+ {
504
+ "timestamp": "",
505
+ "speaker": "<对方正在输入...",
506
+ "content": "不能发微信",
507
+ "message_bbox": {
508
+ "min_x": 747,
509
+ "max_x": 923,
510
+ "min_y": 452,
511
+ "max_y": 486
512
+ },
513
+ "image": "",
514
+ "transfer": [],
515
+ "file": []
516
+ },
517
+ {
518
+ "timestamp": "",
519
+ "speaker": "<对方正在输入...",
520
+ "content": "两字",
521
+ "message_bbox": {
522
+ "min_x": 854,
523
+ "max_x": 922,
524
+ "min_y": 560,
525
+ "max_y": 594
526
+ },
527
+ "image": "",
528
+ "transfer": [],
529
+ "file": []
530
+ },
531
+ {
532
+ "timestamp": "",
533
+ "speaker": "<对方正在输入...",
534
+ "content": "微信",
535
+ "message_bbox": {
536
+ "min_x": 854,
537
+ "max_x": 924,
538
+ "min_y": 670,
539
+ "max_y": 702
540
+ },
541
+ "image": "",
542
+ "transfer": [],
543
+ "file": []
544
+ },
545
+ {
546
+ "timestamp": "",
547
+ "speaker": "<对方正在输入...",
548
+ "content": "①微信",
549
+ "message_bbox": {
550
+ "min_x": 788,
551
+ "max_x": 922,
552
+ "min_y": 777,
553
+ "max_y": 811
554
+ },
555
+ "image": "",
556
+ "transfer": [],
557
+ "file": []
558
+ }
559
+ ]
560
+ }<|im_end|>
561
+ 2025/03/04 11:30:36 - mmengine - WARNING - "FileClient" will be deprecated in future. Please use io functions in https://mmengine.readthedocs.io/en/latest/api/fileio.html#file-io
562
+ 2025/03/04 11:30:36 - mmengine - WARNING - "HardDiskBackend" is the alias of "LocalBackend" and the former will be deprecated in future.
563
+ 2025/03/04 11:30:36 - mmengine - INFO - Checkpoints will be saved to /root/wangqun/work_dirs/internvl_ft_run_12_filter.
564
+ 2025/03/04 11:31:22 - mmengine - INFO - Iter(train) [ 10/19224] lr: 3.1324e-07 eta: 1 day, 0:14:22 time: 4.5416 data_time: 0.0107 memory: 17874 loss: 0.4848
565
+ 2025/03/04 11:31:37 - mmengine - INFO - Iter(train) [ 20/19224] lr: 6.6106e-07 eta: 16:10:09 time: 1.5206 data_time: 0.0143 memory: 11809 loss: 0.5176
566
+ 2025/03/04 11:31:52 - mmengine - INFO - Iter(train) [ 30/19224] lr: 1.0089e-06 eta: 13:23:24 time: 1.4721 data_time: 0.0109 memory: 11450 loss: 0.5703
567
+ 2025/03/04 11:32:06 - mmengine - INFO - Iter(train) [ 40/19224] lr: 1.3567e-06 eta: 11:59:30 time: 1.4671 data_time: 0.0111 memory: 11364 loss: 0.4955
568
+ 2025/03/04 11:32:20 - mmengine - INFO - Iter(train) [ 50/19224] lr: 1.7045e-06 eta: 11:03:04 time: 1.3731 data_time: 0.0113 memory: 11364 loss: 0.6113
569
+ 2025/03/04 11:32:34 - mmengine - INFO - Iter(train) [ 60/19224] lr: 2.0524e-06 eta: 10:26:18 time: 1.3907 data_time: 0.0112 memory: 11117 loss: 0.6183
570
+ 2025/03/04 11:32:47 - mmengine - INFO - Iter(train) [ 70/19224] lr: 2.4002e-06 eta: 9:57:44 time: 1.3416 data_time: 0.0109 memory: 11027 loss: 0.6026
571
+ 2025/03/04 11:33:00 - mmengine - INFO - Iter(train) [ 80/19224] lr: 2.7480e-06 eta: 9:33:38 time: 1.2760 data_time: 0.0107 memory: 11055 loss: 0.6811
572
+ 2025/03/04 11:33:10 - mmengine - INFO - Iter(train) [ 90/19224] lr: 3.0958e-06 eta: 9:05:42 time: 1.0182 data_time: 0.0101 memory: 10558 loss: 0.5202
573
+ 2025/03/04 11:33:18 - mmengine - INFO - Iter(train) [ 100/19224] lr: 3.4436e-06 eta: 8:34:44 time: 0.7483 data_time: 0.0091 memory: 9661 loss: 0.5684
574
+ 2025/03/04 11:33:36 - mmengine - INFO - Iter(train) [ 110/19224] lr: 3.7915e-06 eta: 8:39:45 time: 1.7979 data_time: 0.0105 memory: 15104 loss: 0.3638
575
+ 2025/03/04 11:33:52 - mmengine - INFO - Iter(train) [ 120/19224] lr: 4.1393e-06 eta: 8:39:11 time: 1.6204 data_time: 0.0104 memory: 12016 loss: 0.3501
576
+ 2025/03/04 11:34:07 - mmengine - INFO - Iter(train) [ 130/19224] lr: 4.4871e-06 eta: 8:36:04 time: 1.5144 data_time: 0.0105 memory: 11578 loss: 0.3903
577
+ 2025/03/04 11:34:22 - mmengine - INFO - Iter(train) [ 140/19224] lr: 4.8349e-06 eta: 8:32:49 time: 1.4901 data_time: 0.0109 memory: 11441 loss: 0.3476
578
+ 2025/03/04 11:34:36 - mmengine - INFO - Iter(train) [ 150/19224] lr: 5.1828e-06 eta: 8:29:01 time: 1.4462 data_time: 0.0114 memory: 11303 loss: 0.3756
579
+ 2025/03/04 11:34:50 - mmengine - INFO - Iter(train) [ 160/19224] lr: 5.5306e-06 eta: 8:23:51 time: 1.3546 data_time: 0.0105 memory: 11123 loss: 0.3872
580
+ 2025/03/04 11:35:02 - mmengine - INFO - Iter(train) [ 170/19224] lr: 5.8784e-06 eta: 8:17:20 time: 1.2510 data_time: 0.0105 memory: 10922 loss: 0.3984
581
+ 2025/03/04 11:35:14 - mmengine - INFO - Iter(train) [ 180/19224] lr: 6.2262e-06 eta: 8:09:41 time: 1.1468 data_time: 0.0114 memory: 10672 loss: 0.4183
582
+ 2025/03/04 11:35:24 - mmengine - INFO - Iter(train) [ 190/19224] lr: 6.5740e-06 eta: 8:00:03 time: 0.9815 data_time: 0.0096 memory: 10248 loss: 0.3544
583
+ 2025/03/04 11:35:31 - mmengine - INFO - Iter(train) [ 200/19224] lr: 6.9219e-06 eta: 7:46:45 time: 0.6902 data_time: 0.0091 memory: 9804 loss: 0.3184
584
+ 2025/03/04 11:35:51 - mmengine - INFO - Iter(train) [ 210/19224] lr: 7.2697e-06 eta: 7:55:34 time: 2.0721 data_time: 0.0104 memory: 17788 loss: 0.2691
585
+ 2025/03/04 11:36:07 - mmengine - INFO - Iter(train) [ 220/19224] lr: 7.6175e-06 eta: 7:56:38 time: 1.5929 data_time: 0.0118 memory: 11865 loss: 0.3032
586
+ 2025/03/04 11:36:22 - mmengine - INFO - Iter(train) [ 230/19224] lr: 7.9653e-06 eta: 7:56:16 time: 1.4955 data_time: 0.0109 memory: 11605 loss: 0.3026
587
+ 2025/03/04 11:36:37 - mmengine - INFO - Iter(train) [ 240/19224] lr: 8.3132e-06 eta: 7:55:03 time: 1.4318 data_time: 0.0110 memory: 11348 loss: 0.2916
588
+ 2025/03/04 11:36:51 - mmengine - INFO - Iter(train) [ 250/19224] lr: 8.6610e-06 eta: 7:53:24 time: 1.3910 data_time: 0.0106 memory: 11212 loss: 0.3301
589
+ 2025/03/04 11:37:04 - mmengine - INFO - Iter(train) [ 260/19224] lr: 9.0088e-06 eta: 7:51:32 time: 1.3642 data_time: 0.0106 memory: 11092 loss: 0.3150
590
+ 2025/03/04 11:37:16 - mmengine - INFO - Iter(train) [ 270/19224] lr: 9.3566e-06 eta: 7:47:31 time: 1.1699 data_time: 0.0109 memory: 10895 loss: 0.4017
591
+ 2025/03/04 11:37:26 - mmengine - INFO - Iter(train) [ 280/19224] lr: 9.7045e-06 eta: 7:41:37 time: 0.9782 data_time: 0.0094 memory: 10390 loss: 0.2858
592
+ 2025/03/04 11:37:34 - mmengine - INFO - Iter(train) [ 290/19224] lr: 1.0052e-05 eta: 7:35:05 time: 0.8845 data_time: 0.0096 memory: 9940 loss: 0.3397
593
+ 2025/03/04 11:37:41 - mmengine - INFO - Iter(train) [ 300/19224] lr: 1.0400e-05 eta: 7:26:50 time: 0.6804 data_time: 0.0089 memory: 9679 loss: 0.3477
594
+ 2025/03/04 11:37:58 - mmengine - INFO - Iter(train) [ 310/19224] lr: 1.0748e-05 eta: 7:29:22 time: 1.6891 data_time: 0.0105 memory: 13096 loss: 0.2749
595
+ 2025/03/04 11:38:14 - mmengine - INFO - Iter(train) [ 320/19224] lr: 1.1096e-05 eta: 7:30:40 time: 1.5811 data_time: 0.0108 memory: 11972 loss: 0.2802
596
+ 2025/03/04 11:38:29 - mmengine - INFO - Iter(train) [ 330/19224] lr: 1.1444e-05 eta: 7:31:27 time: 1.5370 data_time: 0.0105 memory: 11557 loss: 0.2905
597
+ 2025/03/04 11:38:44 - mmengine - INFO - Iter(train) [ 340/19224] lr: 1.1791e-05 eta: 7:31:20 time: 1.4478 data_time: 0.0110 memory: 11408 loss: 0.2810
598
+ 2025/03/04 11:38:58 - mmengine - INFO - Iter(train) [ 350/19224] lr: 1.2139e-05 eta: 7:30:44 time: 1.3936 data_time: 0.0107 memory: 11262 loss: 0.3028
599
+ 2025/03/04 11:39:11 - mmengine - INFO - Iter(train) [ 360/19224] lr: 1.2487e-05 eta: 7:29:38 time: 1.3336 data_time: 0.0114 memory: 11153 loss: 0.2933
600
+ 2025/03/04 11:39:24 - mmengine - INFO - Iter(train) [ 370/19224] lr: 1.2835e-05 eta: 7:28:05 time: 1.2765 data_time: 0.0108 memory: 11027 loss: 0.3327
601
+ 2025/03/04 11:39:35 - mmengine - INFO - Iter(train) [ 380/19224] lr: 1.3183e-05 eta: 7:25:06 time: 1.0944 data_time: 0.0100 memory: 10580 loss: 0.3867
602
+ 2025/03/04 11:39:45 - mmengine - INFO - Iter(train) [ 390/19224] lr: 1.3530e-05 eta: 7:21:20 time: 0.9780 data_time: 0.0097 memory: 10354 loss: 0.3154
603
+ 2025/03/04 11:39:52 - mmengine - INFO - Iter(train) [ 400/19224] lr: 1.3878e-05 eta: 7:16:13 time: 0.7822 data_time: 0.0089 memory: 9683 loss: 0.3445
604
+ 2025/03/04 11:40:09 - mmengine - INFO - Iter(train) [ 410/19224] lr: 1.4226e-05 eta: 7:17:55 time: 1.6434 data_time: 0.0107 memory: 12505 loss: 0.2493
605
+ 2025/03/04 11:40:25 - mmengine - INFO - Iter(train) [ 420/19224] lr: 1.4574e-05 eta: 7:19:01 time: 1.5764 data_time: 0.0106 memory: 11912 loss: 0.2502
606
+ 2025/03/04 11:40:40 - mmengine - INFO - Iter(train) [ 430/19224] lr: 1.4922e-05 eta: 7:19:47 time: 1.5375 data_time: 0.0108 memory: 11529 loss: 0.2680
607
+ 2025/03/04 11:40:54 - mmengine - INFO - Iter(train) [ 440/19224] lr: 1.5270e-05 eta: 7:19:43 time: 1.4275 data_time: 0.0106 memory: 11389 loss: 0.2787
608
+ 2025/03/04 11:41:08 - mmengine - INFO - Iter(train) [ 450/19224] lr: 1.5617e-05 eta: 7:19:33 time: 1.4134 data_time: 0.0110 memory: 11266 loss: 0.3042
609
+ 2025/03/04 11:41:21 - mmengine - INFO - Iter(train) [ 460/19224] lr: 1.5965e-05 eta: 7:18:12 time: 1.2417 data_time: 0.0103 memory: 11039 loss: 0.3354
610
+ 2025/03/04 11:41:34 - mmengine - INFO - Iter(train) [ 470/19224] lr: 1.6313e-05 eta: 7:17:27 time: 1.3231 data_time: 0.0105 memory: 10907 loss: 0.3018
611
+ 2025/03/04 11:41:46 - mmengine - INFO - Iter(train) [ 480/19224] lr: 1.6661e-05 eta: 7:15:48 time: 1.1828 data_time: 0.0098 memory: 10818 loss: 0.3590
612
+ 2025/03/04 11:41:56 - mmengine - INFO - Iter(train) [ 490/19224] lr: 1.7009e-05 eta: 7:13:20 time: 1.0446 data_time: 0.0096 memory: 10395 loss: 0.3291
613
+ 2025/03/04 11:42:04 - mmengine - INFO - Iter(train) [ 500/19224] lr: 1.7357e-05 eta: 7:09:04 time: 0.7395 data_time: 0.0089 memory: 9992 loss: 0.2996
614
+ 2025/03/04 11:42:21 - mmengine - INFO - Iter(train) [ 510/19224] lr: 1.7704e-05 eta: 7:11:10 time: 1.7567 data_time: 0.0102 memory: 14188 loss: 0.2741
615
+ 2025/03/04 11:42:37 - mmengine - INFO - Iter(train) [ 520/19224] lr: 1.8052e-05 eta: 7:12:15 time: 1.6026 data_time: 0.0107 memory: 12266 loss: 0.2345
616
+ 2025/03/04 11:42:52 - mmengine - INFO - Iter(train) [ 530/19224] lr: 1.8400e-05 eta: 7:12:25 time: 1.4521 data_time: 0.0105 memory: 11810 loss: 0.2394
617
+ 2025/03/04 11:43:08 - mmengine - INFO - Iter(train) [ 540/19224] lr: 1.8748e-05 eta: 7:13:25 time: 1.6034 data_time: 0.0117 memory: 11765 loss: 0.2383
618
+ 2025/03/04 11:43:22 - mmengine - INFO - Iter(train) [ 550/19224] lr: 1.9096e-05 eta: 7:13:17 time: 1.4077 data_time: 0.0110 memory: 11367 loss: 0.2818
619
+ 2025/03/04 11:43:36 - mmengine - INFO - Iter(train) [ 560/19224] lr: 1.9443e-05 eta: 7:13:12 time: 1.4201 data_time: 0.0108 memory: 11259 loss: 0.2765
620
+ 2025/03/04 11:43:50 - mmengine - INFO - Iter(train) [ 570/19224] lr: 1.9791e-05 eta: 7:12:48 time: 1.3616 data_time: 0.0130 memory: 11167 loss: 0.2869
621
+ 2025/03/04 11:44:03 - mmengine - INFO - Iter(train) [ 580/19224] lr: 2.0000e-05 eta: 7:12:11 time: 1.3208 data_time: 0.0104 memory: 10925 loss: 0.3095
622
+ 2025/03/04 11:44:14 - mmengine - INFO - Iter(train) [ 590/19224] lr: 2.0000e-05 eta: 7:10:19 time: 1.0787 data_time: 0.0098 memory: 10596 loss: 0.2788
623
+ 2025/03/04 11:44:23 - mmengine - INFO - Iter(train) [ 600/19224] lr: 2.0000e-05 eta: 7:07:43 time: 0.9279 data_time: 0.0097 memory: 10005 loss: 0.2877
624
+ 2025/03/04 11:44:41 - mmengine - INFO - Iter(train) [ 610/19224] lr: 2.0000e-05 eta: 7:09:22 time: 1.7489 data_time: 0.0104 memory: 13855 loss: 0.2535
625
+ 2025/03/04 11:44:56 - mmengine - INFO - Iter(train) [ 620/19224] lr: 2.0000e-05 eta: 7:09:57 time: 1.5451 data_time: 0.0106 memory: 12281 loss: 0.2177
626
+ 2025/03/04 11:45:11 - mmengine - INFO - Iter(train) [ 630/19224] lr: 2.0000e-05 eta: 7:10:25 time: 1.5314 data_time: 0.0107 memory: 11697 loss: 0.2348
627
+ 2025/03/04 11:45:26 - mmengine - INFO - Iter(train) [ 640/19224] lr: 1.9999e-05 eta: 7:10:31 time: 1.4570 data_time: 0.0107 memory: 11459 loss: 0.2709
628
+ 2025/03/04 11:45:40 - mmengine - INFO - Iter(train) [ 650/19224] lr: 1.9999e-05 eta: 7:10:22 time: 1.4061 data_time: 0.0109 memory: 11390 loss: 0.2576
629
+ 2025/03/04 11:45:53 - mmengine - INFO - Iter(train) [ 660/19224] lr: 1.9999e-05 eta: 7:09:48 time: 1.3197 data_time: 0.0109 memory: 11218 loss: 0.3110
630
+ 2025/03/04 11:46:06 - mmengine - INFO - Iter(train) [ 670/19224] lr: 1.9999e-05 eta: 7:09:04 time: 1.2780 data_time: 0.0106 memory: 10981 loss: 0.3224
631
+ 2025/03/04 11:46:17 - mmengine - INFO - Iter(train) [ 680/19224] lr: 1.9998e-05 eta: 7:07:38 time: 1.1260 data_time: 0.0104 memory: 10725 loss: 0.3063
632
+ 2025/03/04 11:46:27 - mmengine - INFO - Iter(train) [ 690/19224] lr: 1.9998e-05 eta: 7:05:30 time: 0.9567 data_time: 0.0097 memory: 10172 loss: 0.2703
633
+ 2025/03/04 11:46:35 - mmengine - INFO - Iter(train) [ 700/19224] lr: 1.9998e-05 eta: 7:02:45 time: 0.8067 data_time: 0.0091 memory: 9872 loss: 0.3140
internvl_ft_run_12_filter/20250304_113017/vis_data/events.out.tfevents.1741059019.intern-studio-40019814.28433.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd425b2aca9e0b39d18b30a09b663885a921b56d30f9da0e450be82102f9a02a
3
+ size 23159
internvl_ft_run_12_filter/20250304_114757/20250304_114757.log ADDED
The diff for this file is too large to render. See raw diff
 
internvl_ft_run_12_filter/20250304_114757/vis_data/events.out.tfevents.1741060079.intern-studio-40019814.34025.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79ff3861cce2bb33eeb67d1b5be27e2cd23b93bed47b5cfbe8aaced8a16400cb
3
+ size 257492
internvl_ft_run_12_filter/internvl_v2_internlm2_2b_qlora_finetune_copy.py ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ accumulative_counts = 1
2
+ batch_size = 2
3
+ betas = (
4
+ 0.9,
5
+ 0.999,
6
+ )
7
+ custom_hooks = [
8
+ dict(
9
+ tokenizer=dict(
10
+ pretrained_model_name_or_path=
11
+ '/root/wangqun/models/internvl2-2B',
12
+ trust_remote_code=True,
13
+ type='transformers.AutoTokenizer.from_pretrained'),
14
+ type='xtuner.engine.hooks.DatasetInfoHook'),
15
+ ]
16
+ data_path = '/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json'
17
+ dataloader_num_workers = 4
18
+ default_hooks = dict(
19
+ checkpoint=dict(
20
+ by_epoch=False,
21
+ interval=1000,
22
+ max_keep_ckpts=-1,
23
+ save_optimizer=False,
24
+ type='mmengine.hooks.CheckpointHook'),
25
+ logger=dict(
26
+ interval=10,
27
+ log_metric_by_epoch=False,
28
+ type='mmengine.hooks.LoggerHook'),
29
+ param_scheduler=dict(type='mmengine.hooks.ParamSchedulerHook'),
30
+ sampler_seed=dict(type='mmengine.hooks.DistSamplerSeedHook'),
31
+ timer=dict(type='mmengine.hooks.IterTimerHook'))
32
+ env_cfg = dict(
33
+ cudnn_benchmark=False,
34
+ dist_cfg=dict(backend='nccl'),
35
+ mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0))
36
+ image_folder = '/'
37
+ launcher = 'none'
38
+ llava_dataset = dict(
39
+ data_paths='/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json',
40
+ image_folders='/',
41
+ max_length=8192,
42
+ model_path='/root/wangqun/models/internvl2-2B',
43
+ template='xtuner.utils.PROMPT_TEMPLATE.internlm2_chat',
44
+ type='xtuner.dataset.InternVL_V1_5_Dataset')
45
+ load_from = None
46
+ log_level = 'DEBUG'
47
+ log_processor = dict(by_epoch=False)
48
+ lr = 2e-05
49
+ max_epochs = 4
50
+ max_length = 8192
51
+ max_norm = 1
52
+ model = dict(
53
+ freeze_llm=True,
54
+ freeze_visual_encoder=True,
55
+ llm_lora=dict(
56
+ lora_alpha=256,
57
+ lora_dropout=0.05,
58
+ r=128,
59
+ target_modules=None,
60
+ task_type='CAUSAL_LM',
61
+ type='peft.LoraConfig'),
62
+ model_path='/root/wangqun/models/internvl2-2B',
63
+ quantization_llm=True,
64
+ quantization_vit=False,
65
+ type='xtuner.model.InternVL_V1_5')
66
+ optim_type = 'torch.optim.AdamW'
67
+ optim_wrapper = dict(
68
+ optimizer=dict(
69
+ betas=(
70
+ 0.9,
71
+ 0.999,
72
+ ),
73
+ lr=2e-05,
74
+ type='torch.optim.AdamW',
75
+ weight_decay=0.05),
76
+ type='DeepSpeedOptimWrapper')
77
+ param_scheduler = [
78
+ dict(
79
+ begin=0,
80
+ by_epoch=True,
81
+ convert_to_iter_based=True,
82
+ end=0.12,
83
+ start_factor=1e-05,
84
+ type='mmengine.optim.LinearLR'),
85
+ dict(
86
+ begin=0.12,
87
+ by_epoch=True,
88
+ convert_to_iter_based=True,
89
+ end=4,
90
+ eta_min=0.0,
91
+ type='mmengine.optim.CosineAnnealingLR'),
92
+ ]
93
+ path = '/root/wangqun/models/internvl2-2B'
94
+ prompt_template = 'xtuner.utils.PROMPT_TEMPLATE.internlm2_chat'
95
+ randomness = dict(deterministic=False, seed=None)
96
+ resume = False
97
+ runner_type = 'FlexibleRunner'
98
+ save_steps = 1000
99
+ save_total_limit = -1
100
+ strategy = dict(
101
+ config=dict(
102
+ bf16=dict(enabled=True),
103
+ fp16=dict(enabled=False, initial_scale_power=16),
104
+ gradient_accumulation_steps='auto',
105
+ gradient_clipping='auto',
106
+ train_micro_batch_size_per_gpu='auto',
107
+ zero_allow_untested_optimizer=True,
108
+ zero_force_ds_cpu_optimizer=False,
109
+ zero_optimization=dict(overlap_comm=True, stage=2)),
110
+ exclude_frozen_parameters=True,
111
+ gradient_accumulation_steps=1,
112
+ gradient_clipping=1,
113
+ sequence_parallel_size=1,
114
+ train_micro_batch_size_per_gpu=2,
115
+ type='xtuner.engine.DeepSpeedStrategy')
116
+ tokenizer = dict(
117
+ pretrained_model_name_or_path=
118
+ '/root/wangqun/models/internvl2-2B',
119
+ trust_remote_code=True,
120
+ type='transformers.AutoTokenizer.from_pretrained')
121
+ train_cfg = dict(max_epochs=4, type='xtuner.engine.runner.TrainLoop')
122
+ train_dataloader = dict(
123
+ batch_size=2,
124
+ collate_fn=dict(type='xtuner.dataset.collate_fns.default_collate_fn'),
125
+ dataset=dict(
126
+ data_paths=
127
+ '/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json',
128
+ image_folders='/',
129
+ max_length=8192,
130
+ model_path='/root/wangqun/models/internvl2-2B',
131
+ template='xtuner.utils.PROMPT_TEMPLATE.internlm2_chat',
132
+ type='xtuner.dataset.InternVL_V1_5_Dataset'),
133
+ num_workers=4,
134
+ sampler=dict(
135
+ length_property='modality_length',
136
+ per_device_batch_size=2,
137
+ type='xtuner.dataset.samplers.LengthGroupedSampler'))
138
+ visualizer = dict(
139
+ type='mmengine.visualization.Visualizer',
140
+ vis_backends=[
141
+ dict(type='mmengine.visualization.TensorboardVisBackend'),
142
+ ])
143
+ warmup_ratio = 0.03
144
+ weight_decay = 0.05
145
+ work_dir = '/root/wangqun/work_dirs/internvl_ft_run_12_filter'
internvl_ft_run_12_filter/iter_1000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff54e611914bd3584430380a53da9940c23015b16917b0eb7de9fb652280f0d8
3
+ size 301244162
internvl_ft_run_12_filter/iter_2000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d42f971e9c3b56fa86cc64566873987c3b8654ac9495f287f4f8bf2e1c191472
3
+ size 301318338
internvl_ft_run_12_filter/iter_3000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb25e46fa242cc70a082db9a47c87593ff9e15290cad1e6d2bde7e4d6e58f18e
3
+ size 301392386
internvl_ft_run_12_filter/iter_4000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2666d6ebb5c3f4476d5118e25a858ce6c628983fffbdddc5bfda13b262d60a6
3
+ size 301466626
internvl_ft_run_12_filter/iter_5000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5320f8061e8cd2d95a58555e0717bc49ba5aa06195808a762994fc2f7cf40986
3
+ size 301540866
internvl_ft_run_12_filter/iter_6000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18a21687fe64d7dd3943dbea5cec4c0f46cc8c09d7e0e0a24cec1710e6f2e91a
3
+ size 301615042
internvl_ft_run_12_filter/iter_7000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b9c67595b834aaabc5cb02b9faa6cecdb478f622152668fe6493675b94378f5
3
+ size 301689218
internvl_ft_run_12_filter/iter_8000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d957225a068b5fbd164698068c5df34c796ee6f58ee76438a6e00804d864447f
3
+ size 301763330
internvl_ft_run_12_filter/iter_9000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7a49c32e1fb80179256e644dcaf8ab29f19cd1c90edf14d10ff136016f8412a
3
+ size 301837506
internvl_ft_run_12_filter/iter_9612.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:167ac77f8c658792a016e1a6fe123f62036edb2cc67cf0fb40248b3feeb0c651
3
+ size 301882434
internvl_ft_run_12_filter/last_checkpoint ADDED
@@ -0,0 +1 @@
 
 
1
+ /root/wangqun/work_dirs/internvl_ft_run_12_filter/iter_9612.pth
internvl_ft_run_13_filter/20250304_121519/20250304_121519.log ADDED
@@ -0,0 +1,464 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025/03/04 12:15:19 - mmengine - DEBUG - An `DeepSpeedStrategy` instance is built from registry, and its implementation can be found in xtuner.engine._strategy.deepspeed
2
+ 2025/03/04 12:15:19 - mmengine - INFO -
3
+ ------------------------------------------------------------
4
+ System environment:
5
+ sys.platform: linux
6
+ Python: 3.10.13 (main, Sep 11 2023, 13:44:35) [GCC 11.2.0]
7
+ CUDA available: True
8
+ MUSA available: False
9
+ numpy_random_seed: 49487546
10
+ GPU 0: NVIDIA A100-SXM4-80GB
11
+ CUDA_HOME: /usr/local/cuda
12
+ NVCC: Cuda compilation tools, release 12.2, V12.2.140
13
+ GCC: gcc (Ubuntu 9.4.0-1ubuntu1~20.04.2) 9.4.0
14
+ PyTorch: 2.4.1+cu121
15
+ PyTorch compiling details: PyTorch built with:
16
+ - GCC 9.3
17
+ - C++ Version: 201703
18
+ - Intel(R) oneAPI Math Kernel Library Version 2022.2-Product Build 20220804 for Intel(R) 64 architecture applications
19
+ - Intel(R) MKL-DNN v3.4.2 (Git Hash 1137e04ec0b5251ca2b4400a4fd3c667ce843d67)
20
+ - OpenMP 201511 (a.k.a. OpenMP 4.5)
21
+ - LAPACK is enabled (usually provided by MKL)
22
+ - NNPACK is enabled
23
+ - CPU capability usage: AVX512
24
+ - CUDA Runtime 12.1
25
+ - NVCC architecture flags: -gencode;arch=compute_50,code=sm_50;-gencode;arch=compute_60,code=sm_60;-gencode;arch=compute_70,code=sm_70;-gencode;arch=compute_75,code=sm_75;-gencode;arch=compute_80,code=sm_80;-gencode;arch=compute_86,code=sm_86;-gencode;arch=compute_90,code=sm_90
26
+ - CuDNN 90.1 (built against CUDA 12.4)
27
+ - Magma 2.6.1
28
+ - Build settings: BLAS_INFO=mkl, BUILD_TYPE=Release, CUDA_VERSION=12.1, CUDNN_VERSION=9.1.0, CXX_COMPILER=/opt/rh/devtoolset-9/root/usr/bin/c++, CXX_FLAGS= -D_GLIBCXX_USE_CXX11_ABI=0 -fabi-version=11 -fvisibility-inlines-hidden -DUSE_PTHREADPOOL -DNDEBUG -DUSE_KINETO -DLIBKINETO_NOROCTRACER -DUSE_FBGEMM -DUSE_PYTORCH_QNNPACK -DUSE_XNNPACK -DSYMBOLICATE_MOBILE_DEBUG_HANDLE -O2 -fPIC -Wall -Wextra -Werror=return-type -Werror=non-virtual-dtor -Werror=bool-operation -Wnarrowing -Wno-missing-field-initializers -Wno-type-limits -Wno-array-bounds -Wno-unknown-pragmas -Wno-unused-parameter -Wno-unused-function -Wno-unused-result -Wno-strict-overflow -Wno-strict-aliasing -Wno-stringop-overflow -Wsuggest-override -Wno-psabi -Wno-error=pedantic -Wno-error=old-style-cast -Wno-missing-braces -fdiagnostics-color=always -faligned-new -Wno-unused-but-set-variable -Wno-maybe-uninitialized -fno-math-errno -fno-trapping-math -Werror=format -Wno-stringop-overflow, LAPACK_INFO=mkl, PERF_WITH_AVX=1, PERF_WITH_AVX2=1, PERF_WITH_AVX512=1, TORCH_VERSION=2.4.1, USE_CUDA=ON, USE_CUDNN=ON, USE_CUSPARSELT=1, USE_EXCEPTION_PTR=1, USE_GFLAGS=OFF, USE_GLOG=OFF, USE_GLOO=ON, USE_MKL=ON, USE_MKLDNN=ON, USE_MPI=OFF, USE_NCCL=1, USE_NNPACK=ON, USE_OPENMP=ON, USE_ROCM=OFF, USE_ROCM_KERNEL_ASSERT=OFF,
29
+
30
+ TorchVision: 0.19.1+cu121
31
+ OpenCV: 4.9.0
32
+ MMEngine: 0.10.6
33
+
34
+ Runtime environment:
35
+ launcher: none
36
+ randomness: {'seed': None, 'deterministic': False}
37
+ cudnn_benchmark: False
38
+ mp_cfg: {'mp_start_method': 'fork', 'opencv_num_threads': 0}
39
+ dist_cfg: {'backend': 'nccl'}
40
+ seed: None
41
+ deterministic: False
42
+ Distributed launcher: none
43
+ Distributed training: False
44
+ GPU number: 1
45
+ ------------------------------------------------------------
46
+
47
+ 2025/03/04 12:15:20 - mmengine - INFO - Config:
48
+ accumulative_counts = 2
49
+ batch_size = 1
50
+ betas = (
51
+ 0.9,
52
+ 0.999,
53
+ )
54
+ custom_hooks = [
55
+ dict(
56
+ tokenizer=dict(
57
+ pretrained_model_name_or_path=
58
+ '/root/share/new_models/OpenGVLab/InternVL2_5/InternVL2_5-2B',
59
+ trust_remote_code=True,
60
+ type='transformers.AutoTokenizer.from_pretrained'),
61
+ type='xtuner.engine.hooks.DatasetInfoHook'),
62
+ ]
63
+ data_path = '/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json'
64
+ dataloader_num_workers = 4
65
+ default_hooks = dict(
66
+ checkpoint=dict(
67
+ by_epoch=False,
68
+ interval=1000,
69
+ max_keep_ckpts=-1,
70
+ save_optimizer=False,
71
+ type='mmengine.hooks.CheckpointHook'),
72
+ logger=dict(
73
+ interval=10,
74
+ log_metric_by_epoch=False,
75
+ type='mmengine.hooks.LoggerHook'),
76
+ param_scheduler=dict(type='mmengine.hooks.ParamSchedulerHook'),
77
+ sampler_seed=dict(type='mmengine.hooks.DistSamplerSeedHook'),
78
+ timer=dict(type='mmengine.hooks.IterTimerHook'))
79
+ env_cfg = dict(
80
+ cudnn_benchmark=False,
81
+ dist_cfg=dict(backend='nccl'),
82
+ mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0))
83
+ image_folder = '/'
84
+ launcher = 'none'
85
+ llava_dataset = dict(
86
+ data_paths='/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json',
87
+ image_folders='/',
88
+ max_length=8192,
89
+ model_path='/root/share/new_models/OpenGVLab/InternVL2_5/InternVL2_5-2B',
90
+ template='xtuner.utils.PROMPT_TEMPLATE.internlm2_chat',
91
+ type='xtuner.dataset.InternVL_V1_5_Dataset')
92
+ load_from = None
93
+ log_level = 'DEBUG'
94
+ log_processor = dict(by_epoch=False)
95
+ lr = 2e-05
96
+ max_epochs = 4
97
+ max_length = 8192
98
+ max_norm = 1
99
+ model = dict(
100
+ freeze_llm=True,
101
+ freeze_visual_encoder=True,
102
+ llm_lora=dict(
103
+ lora_alpha=256,
104
+ lora_dropout=0.05,
105
+ r=128,
106
+ target_modules=None,
107
+ task_type='CAUSAL_LM',
108
+ type='peft.LoraConfig'),
109
+ model_path='/root/share/new_models/OpenGVLab/InternVL2_5/InternVL2_5-2B',
110
+ quantization_llm=True,
111
+ quantization_vit=False,
112
+ type='xtuner.model.InternVL_V1_5')
113
+ optim_type = 'torch.optim.AdamW'
114
+ optim_wrapper = dict(
115
+ optimizer=dict(
116
+ betas=(
117
+ 0.9,
118
+ 0.999,
119
+ ),
120
+ lr=2e-05,
121
+ type='torch.optim.AdamW',
122
+ weight_decay=0.05),
123
+ type='DeepSpeedOptimWrapper')
124
+ param_scheduler = [
125
+ dict(
126
+ begin=0,
127
+ by_epoch=True,
128
+ convert_to_iter_based=True,
129
+ end=0.12,
130
+ start_factor=1e-05,
131
+ type='mmengine.optim.LinearLR'),
132
+ dict(
133
+ begin=0.12,
134
+ by_epoch=True,
135
+ convert_to_iter_based=True,
136
+ end=4,
137
+ eta_min=0.0,
138
+ type='mmengine.optim.CosineAnnealingLR'),
139
+ ]
140
+ path = '/root/share/new_models/OpenGVLab/InternVL2_5/InternVL2_5-2B'
141
+ prompt_template = 'xtuner.utils.PROMPT_TEMPLATE.internlm2_chat'
142
+ randomness = dict(deterministic=False, seed=None)
143
+ resume = False
144
+ runner_type = 'FlexibleRunner'
145
+ save_steps = 1000
146
+ save_total_limit = -1
147
+ strategy = dict(
148
+ config=dict(
149
+ bf16=dict(enabled=True),
150
+ fp16=dict(enabled=False, initial_scale_power=16),
151
+ gradient_accumulation_steps='auto',
152
+ gradient_clipping='auto',
153
+ train_micro_batch_size_per_gpu='auto',
154
+ zero_allow_untested_optimizer=True,
155
+ zero_force_ds_cpu_optimizer=False,
156
+ zero_optimization=dict(overlap_comm=True, stage=2)),
157
+ exclude_frozen_parameters=True,
158
+ gradient_accumulation_steps=2,
159
+ gradient_clipping=1,
160
+ sequence_parallel_size=1,
161
+ train_micro_batch_size_per_gpu=1,
162
+ type='xtuner.engine.DeepSpeedStrategy')
163
+ tokenizer = dict(
164
+ pretrained_model_name_or_path=
165
+ '/root/share/new_models/OpenGVLab/InternVL2_5/InternVL2_5-2B',
166
+ trust_remote_code=True,
167
+ type='transformers.AutoTokenizer.from_pretrained')
168
+ train_cfg = dict(max_epochs=4, type='xtuner.engine.runner.TrainLoop')
169
+ train_dataloader = dict(
170
+ batch_size=1,
171
+ collate_fn=dict(type='xtuner.dataset.collate_fns.default_collate_fn'),
172
+ dataset=dict(
173
+ data_paths=
174
+ '/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json',
175
+ image_folders='/',
176
+ max_length=8192,
177
+ model_path=
178
+ '/root/share/new_models/OpenGVLab/InternVL2_5/InternVL2_5-2B',
179
+ template='xtuner.utils.PROMPT_TEMPLATE.internlm2_chat',
180
+ type='xtuner.dataset.InternVL_V1_5_Dataset'),
181
+ num_workers=4,
182
+ sampler=dict(
183
+ length_property='modality_length',
184
+ per_device_batch_size=2,
185
+ type='xtuner.dataset.samplers.LengthGroupedSampler'))
186
+ visualizer = dict(
187
+ type='mmengine.visualization.Visualizer',
188
+ vis_backends=[
189
+ dict(type='mmengine.visualization.TensorboardVisBackend'),
190
+ ])
191
+ warmup_ratio = 0.03
192
+ weight_decay = 0.05
193
+ work_dir = '/root/wangqun/work_dirs/internvl_ft_run_13_filter'
194
+
195
+ 2025/03/04 12:15:20 - mmengine - DEBUG - An `TensorboardVisBackend` instance is built from registry, and its implementation can be found in mmengine.visualization.vis_backend
196
+ 2025/03/04 12:15:20 - mmengine - DEBUG - An `Visualizer` instance is built from registry, and its implementation can be found in mmengine.visualization.visualizer
197
+ 2025/03/04 12:15:20 - mmengine - DEBUG - Attribute `_env_initialized` is not defined in <class 'mmengine.visualization.vis_backend.TensorboardVisBackend'> or `<class 'mmengine.visualization.vis_backend.TensorboardVisBackend'>._env_initialized is False, `_init_env` will be called and <class 'mmengine.visualization.vis_backend.TensorboardVisBackend'>._env_initialized will be set to True
198
+ 2025/03/04 12:15:20 - mmengine - DEBUG - Get class `RuntimeInfoHook` from "hook" registry in "mmengine"
199
+ 2025/03/04 12:15:20 - mmengine - DEBUG - An `RuntimeInfoHook` instance is built from registry, and its implementation can be found in mmengine.hooks.runtime_info_hook
200
+ 2025/03/04 12:15:20 - mmengine - DEBUG - An `IterTimerHook` instance is built from registry, and its implementation can be found in mmengine.hooks.iter_timer_hook
201
+ 2025/03/04 12:15:20 - mmengine - DEBUG - An `DistSamplerSeedHook` instance is built from registry, and its implementation can be found in mmengine.hooks.sampler_seed_hook
202
+ 2025/03/04 12:15:20 - mmengine - DEBUG - An `LoggerHook` instance is built from registry, and its implementation can be found in mmengine.hooks.logger_hook
203
+ 2025/03/04 12:15:20 - mmengine - DEBUG - An `ParamSchedulerHook` instance is built from registry, and its implementation can be found in mmengine.hooks.param_scheduler_hook
204
+ 2025/03/04 12:15:20 - mmengine - DEBUG - An `CheckpointHook` instance is built from registry, and its implementation can be found in mmengine.hooks.checkpoint_hook
205
+ 2025/03/04 12:15:20 - mmengine - WARNING - Failed to search registry with scope "mmengine" in the "builder" registry tree. As a workaround, the current "builder" registry in "xtuner" is used to build instance. This may cause unexpected failure when running the built modules. Please check whether "mmengine" is a correct scope, or whether the registry is initialized.
206
+ 2025/03/04 12:15:21 - mmengine - DEBUG - An `from_pretrained` instance is built from registry, and its implementation can be found in transformers.models.auto.tokenization_auto
207
+ 2025/03/04 12:15:21 - mmengine - DEBUG - An `DatasetInfoHook` instance is built from registry, and its implementation can be found in xtuner.engine.hooks.dataset_info_hook
208
+ 2025/03/04 12:15:21 - mmengine - INFO - Hooks will be executed in the following order:
209
+ before_run:
210
+ (VERY_HIGH ) RuntimeInfoHook
211
+ (BELOW_NORMAL) LoggerHook
212
+ --------------------
213
+ before_train:
214
+ (VERY_HIGH ) RuntimeInfoHook
215
+ (NORMAL ) IterTimerHook
216
+ (NORMAL ) DatasetInfoHook
217
+ (VERY_LOW ) CheckpointHook
218
+ --------------------
219
+ before_train_epoch:
220
+ (VERY_HIGH ) RuntimeInfoHook
221
+ (NORMAL ) IterTimerHook
222
+ (NORMAL ) DistSamplerSeedHook
223
+ --------------------
224
+ before_train_iter:
225
+ (VERY_HIGH ) RuntimeInfoHook
226
+ (NORMAL ) IterTimerHook
227
+ --------------------
228
+ after_train_iter:
229
+ (VERY_HIGH ) RuntimeInfoHook
230
+ (NORMAL ) IterTimerHook
231
+ (BELOW_NORMAL) LoggerHook
232
+ (LOW ) ParamSchedulerHook
233
+ (VERY_LOW ) CheckpointHook
234
+ --------------------
235
+ after_train_epoch:
236
+ (NORMAL ) IterTimerHook
237
+ (LOW ) ParamSchedulerHook
238
+ (VERY_LOW ) CheckpointHook
239
+ --------------------
240
+ before_val:
241
+ (VERY_HIGH ) RuntimeInfoHook
242
+ (NORMAL ) DatasetInfoHook
243
+ --------------------
244
+ before_val_epoch:
245
+ (NORMAL ) IterTimerHook
246
+ --------------------
247
+ before_val_iter:
248
+ (NORMAL ) IterTimerHook
249
+ --------------------
250
+ after_val_iter:
251
+ (NORMAL ) IterTimerHook
252
+ (BELOW_NORMAL) LoggerHook
253
+ --------------------
254
+ after_val_epoch:
255
+ (VERY_HIGH ) RuntimeInfoHook
256
+ (NORMAL ) IterTimerHook
257
+ (BELOW_NORMAL) LoggerHook
258
+ (LOW ) ParamSchedulerHook
259
+ (VERY_LOW ) CheckpointHook
260
+ --------------------
261
+ after_val:
262
+ (VERY_HIGH ) RuntimeInfoHook
263
+ --------------------
264
+ after_train:
265
+ (VERY_HIGH ) RuntimeInfoHook
266
+ (VERY_LOW ) CheckpointHook
267
+ --------------------
268
+ before_test:
269
+ (VERY_HIGH ) RuntimeInfoHook
270
+ (NORMAL ) DatasetInfoHook
271
+ --------------------
272
+ before_test_epoch:
273
+ (NORMAL ) IterTimerHook
274
+ --------------------
275
+ before_test_iter:
276
+ (NORMAL ) IterTimerHook
277
+ --------------------
278
+ after_test_iter:
279
+ (NORMAL ) IterTimerHook
280
+ (BELOW_NORMAL) LoggerHook
281
+ --------------------
282
+ after_test_epoch:
283
+ (VERY_HIGH ) RuntimeInfoHook
284
+ (NORMAL ) IterTimerHook
285
+ (BELOW_NORMAL) LoggerHook
286
+ --------------------
287
+ after_test:
288
+ (VERY_HIGH ) RuntimeInfoHook
289
+ --------------------
290
+ after_run:
291
+ (BELOW_NORMAL) LoggerHook
292
+ --------------------
293
+ 2025/03/04 12:15:21 - mmengine - DEBUG - An `FlexibleRunner` instance is built from registry, its implementation can be found inmmengine.runner._flexible_runner
294
+ 2025/03/04 12:15:21 - mmengine - INFO - Starting to loading data and calc length
295
+ 2025/03/04 12:15:21 - mmengine - INFO - =======Starting to process /root/data/tempData/screenshot_od/layout_ocr_multi_scale.json =======
296
+ 2025/03/04 12:15:28 - mmengine - INFO - =======total 4806 samples of /root/data/tempData/screenshot_od/layout_ocr_multi_scale.json=======
297
+ 2025/03/04 12:15:28 - mmengine - INFO - end loading data and calc length
298
+ 2025/03/04 12:15:28 - mmengine - INFO - =======total 4806 samples=======
299
+ 2025/03/04 12:15:28 - mmengine - DEBUG - An `InternVL_V1_5_Dataset` instance is built from registry, and its implementation can be found in xtuner.dataset.internvl_dataset
300
+ 2025/03/04 12:15:28 - mmengine - INFO - LengthGroupedSampler is used.
301
+ 2025/03/04 12:15:28 - mmengine - INFO - LengthGroupedSampler construction is complete, and the selected attribute is modality_length
302
+ 2025/03/04 12:15:28 - mmengine - DEBUG - An `LengthGroupedSampler` instance is built from registry, and its implementation can be found in xtuner.dataset.samplers.length_grouped
303
+ 2025/03/04 12:15:28 - mmengine - WARNING - Dataset InternVL_V1_5_Dataset has no metainfo. ``dataset_meta`` in visualizer will be None.
304
+ 2025/03/04 12:15:28 - mmengine - DEBUG - An `TrainLoop` instance is built from registry, and its implementation can be found in xtuner.engine.runner.loops
305
+ 2025/03/04 12:15:28 - mmengine - INFO - Start to load InternVL_V1_5 model.
306
+ 2025/03/04 12:15:28 - mmengine - DEBUG - Get class `BaseDataPreprocessor` from "model" registry in "mmengine"
307
+ 2025/03/04 12:15:28 - mmengine - DEBUG - An `BaseDataPreprocessor` instance is built from registry, and its implementation can be found in mmengine.model.base_model.data_preprocessor
308
+ 2025/03/04 12:15:55 - mmengine - DEBUG - An `LoraConfig` instance is built from registry, and its implementation can be found in peft.tuners.lora.config
309
+ 2025/03/04 12:15:58 - mmengine - INFO - InternVL_V1_5(
310
+ (data_preprocessor): BaseDataPreprocessor()
311
+ (model): InternVLChatModel(
312
+ (vision_model): InternVisionModel(
313
+ (embeddings): InternVisionEmbeddings(
314
+ (patch_embedding): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14))
315
+ )
316
+ (encoder): InternVisionEncoder(
317
+ (layers): ModuleList(
318
+ (0-23): 24 x InternVisionEncoderLayer(
319
+ (attn): InternAttention(
320
+ (qkv): Linear(in_features=1024, out_features=3072, bias=True)
321
+ (attn_drop): Dropout(p=0.0, inplace=False)
322
+ (proj_drop): Dropout(p=0.0, inplace=False)
323
+ (proj): Linear(in_features=1024, out_features=1024, bias=True)
324
+ )
325
+ (mlp): InternMLP(
326
+ (act): GELUActivation()
327
+ (fc1): Linear(in_features=1024, out_features=4096, bias=True)
328
+ (fc2): Linear(in_features=4096, out_features=1024, bias=True)
329
+ )
330
+ (norm1): LayerNorm((1024,), eps=1e-06, elementwise_affine=True)
331
+ (norm2): LayerNorm((1024,), eps=1e-06, elementwise_affine=True)
332
+ (drop_path1): Identity()
333
+ (drop_path2): Identity()
334
+ )
335
+ )
336
+ )
337
+ )
338
+ (language_model): PeftModelForCausalLM(
339
+ (base_model): LoraModel(
340
+ (model): InternLM2ForCausalLM(
341
+ (model): InternLM2Model(
342
+ (tok_embeddings): Embedding(92553, 2048, padding_idx=2)
343
+ (layers): ModuleList(
344
+ (0-23): 24 x InternLM2DecoderLayer(
345
+ (attention): InternLM2Attention(
346
+ (wqkv): lora.Linear(
347
+ (base_layer): Linear4bit(in_features=2048, out_features=4096, bias=False)
348
+ (lora_dropout): ModuleDict(
349
+ (default): Dropout(p=0.05, inplace=False)
350
+ )
351
+ (lora_A): ModuleDict(
352
+ (default): Linear(in_features=2048, out_features=128, bias=False)
353
+ )
354
+ (lora_B): ModuleDict(
355
+ (default): Linear(in_features=128, out_features=4096, bias=False)
356
+ )
357
+ (lora_embedding_A): ParameterDict()
358
+ (lora_embedding_B): ParameterDict()
359
+ (lora_magnitude_vector): ModuleDict()
360
+ )
361
+ (wo): lora.Linear(
362
+ (base_layer): Linear4bit(in_features=2048, out_features=2048, bias=False)
363
+ (lora_dropout): ModuleDict(
364
+ (default): Dropout(p=0.05, inplace=False)
365
+ )
366
+ (lora_A): ModuleDict(
367
+ (default): Linear(in_features=2048, out_features=128, bias=False)
368
+ )
369
+ (lora_B): ModuleDict(
370
+ (default): Linear(in_features=128, out_features=2048, bias=False)
371
+ )
372
+ (lora_embedding_A): ParameterDict()
373
+ (lora_embedding_B): ParameterDict()
374
+ (lora_magnitude_vector): ModuleDict()
375
+ )
376
+ (rotary_emb): InternLM2DynamicNTKScalingRotaryEmbedding()
377
+ )
378
+ (feed_forward): InternLM2MLP(
379
+ (w1): lora.Linear(
380
+ (base_layer): Linear4bit(in_features=2048, out_features=8192, bias=False)
381
+ (lora_dropout): ModuleDict(
382
+ (default): Dropout(p=0.05, inplace=False)
383
+ )
384
+ (lora_A): ModuleDict(
385
+ (default): Linear(in_features=2048, out_features=128, bias=False)
386
+ )
387
+ (lora_B): ModuleDict(
388
+ (default): Linear(in_features=128, out_features=8192, bias=False)
389
+ )
390
+ (lora_embedding_A): ParameterDict()
391
+ (lora_embedding_B): ParameterDict()
392
+ (lora_magnitude_vector): ModuleDict()
393
+ )
394
+ (w3): lora.Linear(
395
+ (base_layer): Linear4bit(in_features=2048, out_features=8192, bias=False)
396
+ (lora_dropout): ModuleDict(
397
+ (default): Dropout(p=0.05, inplace=False)
398
+ )
399
+ (lora_A): ModuleDict(
400
+ (default): Linear(in_features=2048, out_features=128, bias=False)
401
+ )
402
+ (lora_B): ModuleDict(
403
+ (default): Linear(in_features=128, out_features=8192, bias=False)
404
+ )
405
+ (lora_embedding_A): ParameterDict()
406
+ (lora_embedding_B): ParameterDict()
407
+ (lora_magnitude_vector): ModuleDict()
408
+ )
409
+ (w2): lora.Linear(
410
+ (base_layer): Linear4bit(in_features=8192, out_features=2048, bias=False)
411
+ (lora_dropout): ModuleDict(
412
+ (default): Dropout(p=0.05, inplace=False)
413
+ )
414
+ (lora_A): ModuleDict(
415
+ (default): Linear(in_features=8192, out_features=128, bias=False)
416
+ )
417
+ (lora_B): ModuleDict(
418
+ (default): Linear(in_features=128, out_features=2048, bias=False)
419
+ )
420
+ (lora_embedding_A): ParameterDict()
421
+ (lora_embedding_B): ParameterDict()
422
+ (lora_magnitude_vector): ModuleDict()
423
+ )
424
+ (act_fn): SiLU()
425
+ )
426
+ (attention_norm): InternLM2RMSNorm()
427
+ (ffn_norm): InternLM2RMSNorm()
428
+ )
429
+ )
430
+ (norm): InternLM2RMSNorm()
431
+ )
432
+ (output): lora.Linear(
433
+ (base_layer): Linear4bit(in_features=2048, out_features=92553, bias=False)
434
+ (lora_dropout): ModuleDict(
435
+ (default): Dropout(p=0.05, inplace=False)
436
+ )
437
+ (lora_A): ModuleDict(
438
+ (default): Linear(in_features=2048, out_features=128, bias=False)
439
+ )
440
+ (lora_B): ModuleDict(
441
+ (default): Linear(in_features=128, out_features=92553, bias=False)
442
+ )
443
+ (lora_embedding_A): ParameterDict()
444
+ (lora_embedding_B): ParameterDict()
445
+ (lora_magnitude_vector): ModuleDict()
446
+ )
447
+ )
448
+ )
449
+ )
450
+ (mlp1): Sequential(
451
+ (0): LayerNorm((4096,), eps=1e-05, elementwise_affine=True)
452
+ (1): Linear(in_features=4096, out_features=2048, bias=True)
453
+ (2): GELU(approximate='none')
454
+ (3): Linear(in_features=2048, out_features=2048, bias=True)
455
+ )
456
+ )
457
+ )
458
+ 2025/03/04 12:15:58 - mmengine - INFO - InternVL_V1_5 construction is complete
459
+ 2025/03/04 12:15:58 - mmengine - DEBUG - An `InternVL_V1_5` instance is built from registry, and its implementation can be found in xtuner.model.internvl
460
+ 2025/03/04 12:15:58 - mmengine - DEBUG - Get class `DefaultOptimWrapperConstructor` from "optimizer wrapper constructor" registry in "mmengine"
461
+ 2025/03/04 12:15:58 - mmengine - DEBUG - An `DefaultOptimWrapperConstructor` instance is built from registry, and its implementation can be found in mmengine.optim.optimizer.default_constructor
462
+ 2025/03/04 12:15:58 - mmengine - DEBUG - An `AdamW` instance is built from registry, and its implementation can be found in torch.optim.adamw
463
+ 2025/03/04 12:15:58 - mmengine - DEBUG - Get class `DeepSpeedOptimWrapper` from "optim_wrapper" registry in "mmengine"
464
+ 2025/03/04 12:15:58 - mmengine - DEBUG - An `DeepSpeedOptimWrapper` instance is built from registry, and its implementation can be found in mmengine._strategy.deepspeed
internvl_ft_run_13_filter/20250304_121519/vis_data/events.out.tfevents.1741061720.intern-studio-40019814.41268.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46db7041eca09f33cd2c42f058a3f2665a3e1928517a93cb3132aa388a2accdb
3
+ size 4914
internvl_ft_run_13_filter/20250304_213711/20250304_213711.log ADDED
The diff for this file is too large to render. See raw diff
 
internvl_ft_run_13_filter/20250304_213711/vis_data/events.out.tfevents.1741095432.intern-studio-40019814.159243.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d38718bec644b61e1e53b07ceca6b2c0adb59aedff598ab38d1fa9244ac23d0
3
+ size 512032
internvl_ft_run_13_filter/internvl_v2_internlm2_2b_qlora_finetune_copy.py ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ accumulative_counts = 2
2
+ batch_size = 1
3
+ betas = (
4
+ 0.9,
5
+ 0.999,
6
+ )
7
+ custom_hooks = [
8
+ dict(
9
+ tokenizer=dict(
10
+ pretrained_model_name_or_path=
11
+ '/data/wangqun/models/InternVL2_5-2B',
12
+ trust_remote_code=True,
13
+ type='transformers.AutoTokenizer.from_pretrained'),
14
+ type='xtuner.engine.hooks.DatasetInfoHook'),
15
+ ]
16
+ data_path = '/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json'
17
+ dataloader_num_workers = 4
18
+ default_hooks = dict(
19
+ checkpoint=dict(
20
+ by_epoch=False,
21
+ interval=1000,
22
+ max_keep_ckpts=-1,
23
+ save_optimizer=False,
24
+ type='mmengine.hooks.CheckpointHook'),
25
+ logger=dict(
26
+ interval=10,
27
+ log_metric_by_epoch=False,
28
+ type='mmengine.hooks.LoggerHook'),
29
+ param_scheduler=dict(type='mmengine.hooks.ParamSchedulerHook'),
30
+ sampler_seed=dict(type='mmengine.hooks.DistSamplerSeedHook'),
31
+ timer=dict(type='mmengine.hooks.IterTimerHook'))
32
+ env_cfg = dict(
33
+ cudnn_benchmark=False,
34
+ dist_cfg=dict(backend='nccl'),
35
+ mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0))
36
+ image_folder = '/'
37
+ launcher = 'none'
38
+ llava_dataset = dict(
39
+ data_paths='/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json',
40
+ image_folders='/',
41
+ max_length=8192,
42
+ model_path='/data/wangqun/models/InternVL2_5-2B',
43
+ template='xtuner.utils.PROMPT_TEMPLATE.internlm2_chat',
44
+ type='xtuner.dataset.InternVL_V1_5_Dataset')
45
+ load_from = None
46
+ log_level = 'DEBUG'
47
+ log_processor = dict(by_epoch=False)
48
+ lr = 2e-05
49
+ max_epochs = 4
50
+ max_length = 8192
51
+ max_norm = 1
52
+ model = dict(
53
+ freeze_llm=True,
54
+ freeze_visual_encoder=True,
55
+ llm_lora=dict(
56
+ lora_alpha=256,
57
+ lora_dropout=0.05,
58
+ r=128,
59
+ target_modules=None,
60
+ task_type='CAUSAL_LM',
61
+ type='peft.LoraConfig'),
62
+ model_path='/data/wangqun/models/InternVL2_5-2B',
63
+ quantization_llm=True,
64
+ quantization_vit=False,
65
+ type='xtuner.model.InternVL_V1_5')
66
+ optim_type = 'torch.optim.AdamW'
67
+ optim_wrapper = dict(
68
+ optimizer=dict(
69
+ betas=(
70
+ 0.9,
71
+ 0.999,
72
+ ),
73
+ lr=2e-05,
74
+ type='torch.optim.AdamW',
75
+ weight_decay=0.05),
76
+ type='DeepSpeedOptimWrapper')
77
+ param_scheduler = [
78
+ dict(
79
+ begin=0,
80
+ by_epoch=True,
81
+ convert_to_iter_based=True,
82
+ end=0.12,
83
+ start_factor=1e-05,
84
+ type='mmengine.optim.LinearLR'),
85
+ dict(
86
+ begin=0.12,
87
+ by_epoch=True,
88
+ convert_to_iter_based=True,
89
+ end=4,
90
+ eta_min=0.0,
91
+ type='mmengine.optim.CosineAnnealingLR'),
92
+ ]
93
+ path = '/data/wangqun/models/InternVL2_5-2B'
94
+ prompt_template = 'xtuner.utils.PROMPT_TEMPLATE.internlm2_chat'
95
+ randomness = dict(deterministic=False, seed=None)
96
+ resume = False
97
+ runner_type = 'FlexibleRunner'
98
+ save_steps = 1000
99
+ save_total_limit = -1
100
+ strategy = dict(
101
+ config=dict(
102
+ bf16=dict(enabled=True),
103
+ fp16=dict(enabled=False, initial_scale_power=16),
104
+ gradient_accumulation_steps='auto',
105
+ gradient_clipping='auto',
106
+ train_micro_batch_size_per_gpu='auto',
107
+ zero_allow_untested_optimizer=True,
108
+ zero_force_ds_cpu_optimizer=False,
109
+ zero_optimization=dict(overlap_comm=True, stage=2)),
110
+ exclude_frozen_parameters=True,
111
+ gradient_accumulation_steps=2,
112
+ gradient_clipping=1,
113
+ sequence_parallel_size=1,
114
+ train_micro_batch_size_per_gpu=1,
115
+ type='xtuner.engine.DeepSpeedStrategy')
116
+ tokenizer = dict(
117
+ pretrained_model_name_or_path=
118
+ '/data/wangqun/models/InternVL2_5-2B',
119
+ trust_remote_code=True,
120
+ type='transformers.AutoTokenizer.from_pretrained')
121
+ train_cfg = dict(max_epochs=4, type='xtuner.engine.runner.TrainLoop')
122
+ train_dataloader = dict(
123
+ batch_size=1,
124
+ collate_fn=dict(type='xtuner.dataset.collate_fns.default_collate_fn'),
125
+ dataset=dict(
126
+ data_paths=
127
+ '/root/data/tempData/screenshot_od/layout_ocr_multi_scale.json',
128
+ image_folders='/',
129
+ max_length=8192,
130
+ model_path=
131
+ '/data/wangqun/models/InternVL2_5-2B',
132
+ template='xtuner.utils.PROMPT_TEMPLATE.internlm2_chat',
133
+ type='xtuner.dataset.InternVL_V1_5_Dataset'),
134
+ num_workers=4,
135
+ sampler=dict(
136
+ length_property='modality_length',
137
+ per_device_batch_size=2,
138
+ type='xtuner.dataset.samplers.LengthGroupedSampler'))
139
+ visualizer = dict(
140
+ type='mmengine.visualization.Visualizer',
141
+ vis_backends=[
142
+ dict(type='mmengine.visualization.TensorboardVisBackend'),
143
+ ])
144
+ warmup_ratio = 0.03
145
+ weight_decay = 0.05
146
+ work_dir = '/root/wangqun/work_dirs/internvl_ft_run_13_filter'
internvl_ft_run_13_filter/iter_1000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b24f6c6bd3851bce9ce45f4bba96fdbcee632c7c622ebe837ec5b973085bbb73
3
+ size 301244994
internvl_ft_run_13_filter/iter_10000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1c692ec736bd23df366f5c00daad06614571f7fca440f7964cff27fbfd8bb3e
3
+ size 301920578
internvl_ft_run_13_filter/iter_11000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea8fcd7154e0d6ded70bc9e6b67d7a1a8cf59c57c566eeb6bea962cf721f1591
3
+ size 301995330
internvl_ft_run_13_filter/iter_12000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:43f6034da940b55224f9205875cc2740bfccc058a3f145d0b7bcaccd115536f5
3
+ size 302070338
internvl_ft_run_13_filter/iter_13000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92bfbc4a13bfd7b0eea33d12fc9615ec744297673ed571cd140f40871481f46d
3
+ size 302145474
internvl_ft_run_13_filter/iter_14000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b0eb4fa01c3378ea8eb3acf2b084fa954a72d29fcf63b2bfe27553d61e79fb8b
3
+ size 302220482
internvl_ft_run_13_filter/iter_15000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bd81bba2007b30f19aa53382d801968c7f349487162031fe2dc8ebd823e4c27
3
+ size 302295490
internvl_ft_run_13_filter/iter_16000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8603f7e271fcefb72f50bf1397f05f03b55505c3272e4680fdfa27c4de9dc157
3
+ size 302370562
internvl_ft_run_13_filter/iter_17000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d9041144ef0a48ad437e9ce94558444dbbc6c485d9a779048f6fe08a44e324a
3
+ size 302445570
internvl_ft_run_13_filter/iter_18000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1032dda4ee201c9f851825745b001b30864d7c84cdb11ace42b1d9ffa70be77b
3
+ size 302520514
internvl_ft_run_13_filter/iter_19000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f17fa3c628a60d737567747eb440d606b0ee0fa89bf4c8f5acbcb48e13e3089f
3
+ size 302594882
internvl_ft_run_13_filter/iter_19224.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75eca927c93ffe7129d4f1fc84bdeb42a6563e6bc97c1f66d98a17dd1f479ae6
3
+ size 302611458
internvl_ft_run_13_filter/iter_2000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:039ecebe9872764ca4c96ff04029e8921a938cad5ad026009a090270788aad4b
3
+ size 301320450
internvl_ft_run_13_filter/iter_3000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:779c990e71dfbac4e0c18606852eb8f564bb232947404a0342a4c06c2792e2e9
3
+ size 301395330
internvl_ft_run_13_filter/iter_4000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85bd648ac928cf3e76058af3404fb87ce098e5e05dc6e126707f1a5318f74119
3
+ size 301470594
internvl_ft_run_13_filter/iter_5000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9378786acb4c41e9f719d475acb3806e467e198d0ca929ae82880e8b826ff06
3
+ size 301545538
internvl_ft_run_13_filter/iter_6000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5341528784c8762d29de4b7d16e32266b87ee3aade8daede3996d4a6fc6989e
3
+ size 301620546