diff --git a/llava_repr_requirements.txt b/llava_repr_requirements.txt index f1f6dcf8b..07afb6866 100644 --- a/llava_repr_requirements.txt +++ b/llava_repr_requirements.txt @@ -1,33 +1,32 @@ -llava@git+https://github.com/haotian-liu/LLaVA@v1.1.3 -accelerate>=0.21.0 -black==24.1.0 +accelerate==0.21.0 datasets==2.16.1 -evaluate>=0.4.0 -jsonlines -numexpr -peft>=0.2.0 -pybind11>=2.6.2 -pytablewriter -rouge-score>=0.0.4 -sacrebleu>=1.5.0 -scikit-learn>=0.24.1 -sqlitedict +evaluate==0.4.1 +hf_transfer==0.1.6 +Jinja2==3.1.3 +numpy==1.26.4 +openai==1.13.3 +packaging==23.2 +pandas==2.2.1 +Pillow==10.2.0 +protobuf==4.25.3 +pycocoevalcap==1.2 +pycocotools==2.0.7 +pytablewriter==1.2.0 +pytest==8.0.2 +python_Levenshtein==0.25.0 +pytz==2024.1 +PyYAML==6.0.1 +PyYAML==6.0.1 +Requests==2.31.0 +sacrebleu==2.4.0 +scikit_learn==1.2.2 +sentencepiece==0.1.99 +setuptools==68.2.2 +sglang==0.1.12 +shortuuid==1.0.12 +sqlitedict==2.1.0 +tenacity==8.2.3 torch==2.0.1 -openai>=1.0.0 -pycocoevalcap -tqdm-multiprocess -transformers>=4.36.2 -zstandard -pillow -pyyaml -sympy -mpmath -Jinja2 -openpyxl -Levenshtein -hf_transfer -tenacity -wandb>=0.16.0 -transformers-stream-generator -tiktoken -pre-commit \ No newline at end of file +tokenizers==0.15.2 +tqdm==4.66.2 +transformers==4.37.2 \ No newline at end of file diff --git a/miscs/repr_scripts.sh b/miscs/repr_scripts.sh index f5a743099..27fccbafc 100644 --- a/miscs/repr_scripts.sh +++ b/miscs/repr_scripts.sh @@ -2,9 +2,13 @@ cd lmms_eval; pip install --no-deps -U -e . +# install LLaVA without building dependencies +cd LLaVA +pip install --no-deps -U -e . + # install all the requirements that require for reproduce llava results pip install -r llava_repr_requirements.txt # Run and exactly reproduce llava_v1.5 results! # mme as an example -accelerate launch --num_processes=1 -m lmms_eval --model llava --model_args pretrained="liuhaotian/llava-v1.5-7b" --tasks mme --batch_size 1 --log_samples --log_samples_sufix reproduce --output_path ./logs/ \ No newline at end of file +accelerate launch --num_processes=1 -m lmms_eval --model llava --model_args pretrained="liuhaotian/llava-v1.5-7b,use_flash_attention_2=False" --tasks mme --batch_size 1 --log_samples --log_samples_sufix reproduce --output_path ./logs/ \ No newline at end of file