diff --git a/examples/nlp/huggingface_model/text_generation/llama/quantization/weight_only/main.py b/examples/nlp/huggingface_model/text_generation/llama/quantization/weight_only/main.py index e70c8a188..5509c021a 100644 --- a/examples/nlp/huggingface_model/text_generation/llama/quantization/weight_only/main.py +++ b/examples/nlp/huggingface_model/text_generation/llama/quantization/weight_only/main.py @@ -23,7 +23,6 @@ import torch import logging import argparse -import random import numpy as np import datasets import onnxruntime as ort diff --git a/examples/nlp/huggingface_model/text_generation/llama/quantization/weight_only/run_benchmark.sh b/examples/nlp/huggingface_model/text_generation/llama/quantization/weight_only/run_benchmark.sh index 1f728c0f1..199188f62 100644 --- a/examples/nlp/huggingface_model/text_generation/llama/quantization/weight_only/run_benchmark.sh +++ b/examples/nlp/huggingface_model/text_generation/llama/quantization/weight_only/run_benchmark.sh @@ -35,7 +35,7 @@ function init_params { # run_benchmark function run_benchmark { - + # Check if the input_model ends with the filename extension ".onnx" if [[ $input_model =~ \.onnx$ ]]; then # If the string ends with the filename extension, get the path of the file @@ -50,7 +50,7 @@ function run_benchmark { --mode=${mode} \ --intra_op_num_threads=${intra_op_num_threads-24} \ --benchmark - + } main "$@" diff --git a/examples/nlp/huggingface_model/text_generation/llama/quantization/weight_only/run_quant.sh b/examples/nlp/huggingface_model/text_generation/llama/quantization/weight_only/run_quant.sh index 295b47249..bb4c2ae05 100644 --- a/examples/nlp/huggingface_model/text_generation/llama/quantization/weight_only/run_quant.sh +++ b/examples/nlp/huggingface_model/text_generation/llama/quantization/weight_only/run_quant.sh @@ -52,18 +52,18 @@ function run_tuning { # Check if the directory exists if [ ! -d "$output_model" ]; then # If the directory doesn't exist, create it - mkdir -p "$output_model" - echo "Created directory $output_model" + mkdir -p "$output_model" + echo "Created directory $output_model" fi python main.py \ --model_path ${input_model} \ - --tokenizer ${tokenizer-meta-llama/Llama-2-7b-hf} \ + --tokenizer ${tokenizer-meta-llama/Llama-2-7b-hf} \ --output_model ${output_model} \ --batch_size ${batch_size-1} \ --dataset ${dataset-NeelNanda/pile-10k} \ - --algorithm ${algorithm-WOQ_TUNE} \ - --tasks ${tasks-lambada_openai} \ + --algorithm ${algorithm-WOQ_TUNE} \ + --tasks ${tasks-lambada_openai} \ --tune } diff --git a/onnx_neural_compressor/config.py b/onnx_neural_compressor/config.py index 61ab8fc67..bf6bb2c42 100644 --- a/onnx_neural_compressor/config.py +++ b/onnx_neural_compressor/config.py @@ -277,7 +277,7 @@ def to_dict(self): return result def get_params_dict(self): - result = dict() + result = {} for param, value in self.__dict__.items(): if param not in ["_global_config", "_local_config", "_white_list"]: result[param] = value @@ -448,8 +448,8 @@ def expand(self) -> List[BaseConfig]: return config_list def _get_op_name_op_type_config(self): - op_type_config_dict = dict() - op_name_config_dict = dict() + op_type_config_dict = {} + op_name_config_dict = {} for name, config in self.local_config.items(): if self._is_op_type(name): op_type_config_dict[name] = config @@ -551,7 +551,7 @@ def get_config_set_for_tuning(cls) -> None: return None def get_model_info(self, model, *args, **kwargs): - model_info_dict = dict() + model_info_dict = {} for config in self.config_list: model_info_dict.update({config.name: config.get_model_info(model, *args, **kwargs)}) return model_info_dict @@ -650,7 +650,7 @@ def __init__( self._post_init() def get_model_params_dict(self): - result = dict() + result = {} for param in self.model_params_list: result[param] = getattr(self, param) return result @@ -812,7 +812,7 @@ def __init__( self._post_init() def get_model_params_dict(self): - result = dict() + result = {} for param in self.model_params_list: result[param] = getattr(self, param) return result @@ -964,7 +964,7 @@ def __init__( self._post_init() def get_model_params_dict(self): - result = dict() + result = {} for param in self.model_params_list: result[param] = getattr(self, param) return result diff --git a/onnx_neural_compressor/onnx_model.py b/onnx_neural_compressor/onnx_model.py index 2db10353f..34ecbbd71 100644 --- a/onnx_neural_compressor/onnx_model.py +++ b/onnx_neural_compressor/onnx_model.py @@ -984,8 +984,8 @@ def write_external_data_to_new_location(self, external_data_location="external.d def merge_split_models(self, to_merge_model): """Merge two split model into final model.""" to_merge_model.write_external_data_to_new_location() - self.add_nodes([node for node in to_merge_model.nodes()]) - self.add_initializers([init for init in to_merge_model.initializer()]) + self.add_nodes(list(to_merge_model.nodes())) + self.add_initializers(list(to_merge_model.initializer())) self.update() # add new output