Skip to content

Commit

Permalink
Remove llava
Browse files Browse the repository at this point in the history
  • Loading branch information
mattmazzola committed Mar 3, 2024
1 parent d4a9f90 commit 47490f4
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 216 deletions.
74 changes: 0 additions & 74 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -60,79 +60,5 @@
"LOGLEVEL": "DEBUG",
},
},
{
"name": "Python: Eval - MathVista - (LLaVA) evaluation.generate_response",
"type": "debugpy",
"request": "launch",
"module": "evaluation.generate_response",
"justMyCode": true,
"args": [
"--conv-mode",
"vicuna_v1",
"--max_num_problems",
"100",
"--output_dir",
"_results/eval/mathvista/llava/debug",
"--output_file",
"llava-v1.5-7b.json",
"--model_path",
"liuhaotian/llava-v1.5-7b",
// TODO: Find out why loading from folder during debug not work?
// "/mnt/mattmprojects/exp/projects/mattm-projectwillow/amlt-results/7293878818.78845-fe8da8ef-7f38-4b40-b2e1-3259c7bf7a3e/llava/checkpoints/llava-vicuna-7b-v1.5-finetune",
"--save_every",
"1",
],
"envFile": "${workspaceFolder}/.env",
"env": {
"CUDA_VISIBLE_DEVICES": "0",
"LOGLEVEL": "DEBUG",
},
},
{
"name": "Python: Eval - MathVista - (LLaVA) evaluation.extract_answer",
"type": "debugpy",
"request": "launch",
"module": "evaluation.extract_answer",
"justMyCode": true,
"args": [
"--max_num_problems",
"100",
"--results_file_path",
"_results/eval/mathvista/llava/debug/llava-v1.5-7b.json",
// "${workspaceFolder}/_results/eval/mathvista/20240213_223832/llava-v1.5-7b.json",
// "${workspaceFolder}/_results/eval/mathvista/debug/output_gpt4_2shot_solution_use_caption_ocr.json",
],
"envFile": "${workspaceFolder}/.env",
"env": {
"CUDA_VISIBLE_DEVICES": "0",
"LOGLEVEL": "DEBUG",
},
},
{
"name": "Python: Eval - MathVista - (LLaVA) evaluation.calculate_score",
"type": "debugpy",
"request": "launch",
"module": "evaluation.calculate_score",
"justMyCode": true,
"args": [
"--output_dir",
// All Empty Responses
// "_results/eval/mathvista/20240214_210722",
// Legitimate Responses
// "_results/eval/mathvista/20240214_220834",
"_results/eval/mathvista/20240215_204602",
"--output_file",
"llava-v1.5-7b.json",
// "llava-v1.5-7b_false_positives.json",
"--score_file",
"llava-v1.5-7b_metrics.json",
"--ignore_empty_extractions",
],
"envFile": "${workspaceFolder}/.env",
"env": {
"CUDA_VISIBLE_DEVICES": "0",
"LOGLEVEL": "DEBUG",
},
},
]
}
14 changes: 3 additions & 11 deletions evaluation/generate_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,19 +146,11 @@ def main():

# If we were given a custom model path, load that model, otherwise use a remote service model
if args.model_path:
from models import llava
# from models import llava

logging.info(f"Loading model from {args.model_path}...")
model = llava.Llava_Model(
model_path=args.model_path,
model_base=args.model_base,
conv_mode=args.conv_mode,
temperature=args.temperature,
top_p=args.top_p,
num_beams=args.num_beams,
max_new_tokens=args.max_new_tokens,
seed_value=42,
)
# TODO: Add support for local models
raise NotImplementedError("Local models are not yet supported.")
else:
model_name = args.azure_openai_model if args.azure_openai_model else args.model
logging.info(f"Loading {model_name}...")
Expand Down
131 changes: 0 additions & 131 deletions models/llava.py

This file was deleted.

0 comments on commit 47490f4

Please sign in to comment.