Upload README.md with huggingface_hub
Browse files
README.md
CHANGED
|
@@ -19,36 +19,37 @@ How to use
|
|
| 19 |
```python
|
| 20 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 21 |
base_model = 'llama-2-7b'
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
model_path = f'vita-group/
|
| 25 |
model = AutoModelForCausalLM.from_pretrained(
|
| 26 |
model_path,
|
|
|
|
| 27 |
torch_dtype=torch.float16,
|
| 28 |
low_cpu_mem_usage=True,
|
| 29 |
device_map="auto"
|
| 30 |
)
|
| 31 |
-
tokenizer = AutoTokenizer.from_pretrained('meta-llama/Llama-2-7b')
|
| 32 |
input_ids = tokenizer('Hello! I am a VITA-compressed-LLM chatbot!', return_tensors='pt').input_ids
|
| 33 |
outputs = model.generate(input_ids)
|
|
|
|
| 34 |
```
|
| 35 |
|
| 36 |
|
| 37 |
-
| | Base Model | Model Size | Compression Method
|
| 38 |
-
|
| 39 |
-
| 0 | Llama-2 | 7b | magnitude_unstructured | [s0.1](https://huggingface.co/vita-group/
|
| 40 |
-
| 1 | Llama-2 | 7b | magnitude_unstructured | [s0.2](https://huggingface.co/vita-group/
|
| 41 |
-
| 2 | Llama-2 | 7b | magnitude_unstructured | [s0.3](https://huggingface.co/vita-group/
|
| 42 |
-
| 3 | Llama-2 | 7b | magnitude_unstructured | [s0.5](https://huggingface.co/vita-group/
|
| 43 |
-
| 4 | Llama-2 | 7b | magnitude_unstructured | [s0.6](https://huggingface.co/vita-group/
|
| 44 |
-
| 5 | Llama-2 | 7b | sparsegpt_unstructured | [s0.1](https://huggingface.co/vita-group/
|
| 45 |
-
| 6 | Llama-2 | 7b | sparsegpt_unstructured | [s0.2](https://huggingface.co/vita-group/
|
| 46 |
-
| 7 | Llama-2 | 7b | sparsegpt_unstructured | [s0.3](https://huggingface.co/vita-group/
|
| 47 |
-
| 8 | Llama-2 | 7b | sparsegpt_unstructured | [s0.5](https://huggingface.co/vita-group/
|
| 48 |
-
| 9 | Llama-2 | 7b | sparsegpt_unstructured | [s0.6](https://huggingface.co/vita-group/
|
| 49 |
-
| 10 | Llama-2 | 7b | wanda_unstructured
|
| 50 |
-
| 11 | Llama-2 | 7b | wanda_unstructured
|
| 51 |
-
| 12 | Llama-2 | 7b | wanda_unstructured
|
| 52 |
-
| 13 | Llama-2 | 7b | wanda_unstructured
|
| 53 |
-
| 14 | Llama-2 | 7b | wanda_unstructured
|
| 54 |
-
|
|
|
|
| 19 |
```python
|
| 20 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 21 |
base_model = 'llama-2-7b'
|
| 22 |
+
comp_method = 'magnitude_unstructured'
|
| 23 |
+
comp_degree = 0.2
|
| 24 |
+
model_path = f'vita-group/{base_model}_{comp_method}'
|
| 25 |
model = AutoModelForCausalLM.from_pretrained(
|
| 26 |
model_path,
|
| 27 |
+
revision=f's{comp_degree}',
|
| 28 |
torch_dtype=torch.float16,
|
| 29 |
low_cpu_mem_usage=True,
|
| 30 |
device_map="auto"
|
| 31 |
)
|
| 32 |
+
tokenizer = AutoTokenizer.from_pretrained('meta-llama/Llama-2-7b-hf')
|
| 33 |
input_ids = tokenizer('Hello! I am a VITA-compressed-LLM chatbot!', return_tensors='pt').input_ids
|
| 34 |
outputs = model.generate(input_ids)
|
| 35 |
+
print(tokenizer.decode(outputs[0]))
|
| 36 |
```
|
| 37 |
|
| 38 |
|
| 39 |
+
| | Base Model | Model Size | Compression Method | Compression Degree |
|
| 40 |
+
|---:|:-------------|:-------------|:----------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------|
|
| 41 |
+
| 0 | Llama-2 | 7b | [magnitude_unstructured](https://huggingface.co/vita-group/llama-2-7b_magnitude_unstructured) | [s0.1](https://huggingface.co/vita-group/llama-2-7b_magnitude_unstructured/tree/s0.1) |
|
| 42 |
+
| 1 | Llama-2 | 7b | [magnitude_unstructured](https://huggingface.co/vita-group/llama-2-7b_magnitude_unstructured) | [s0.2](https://huggingface.co/vita-group/llama-2-7b_magnitude_unstructured/tree/s0.2) |
|
| 43 |
+
| 2 | Llama-2 | 7b | [magnitude_unstructured](https://huggingface.co/vita-group/llama-2-7b_magnitude_unstructured) | [s0.3](https://huggingface.co/vita-group/llama-2-7b_magnitude_unstructured/tree/s0.3) |
|
| 44 |
+
| 3 | Llama-2 | 7b | [magnitude_unstructured](https://huggingface.co/vita-group/llama-2-7b_magnitude_unstructured) | [s0.5](https://huggingface.co/vita-group/llama-2-7b_magnitude_unstructured/tree/s0.5) |
|
| 45 |
+
| 4 | Llama-2 | 7b | [magnitude_unstructured](https://huggingface.co/vita-group/llama-2-7b_magnitude_unstructured) | [s0.6](https://huggingface.co/vita-group/llama-2-7b_magnitude_unstructured/tree/s0.6) |
|
| 46 |
+
| 5 | Llama-2 | 7b | [sparsegpt_unstructured](https://huggingface.co/vita-group/llama-2-7b_sparsegpt_unstructured) | [s0.1](https://huggingface.co/vita-group/llama-2-7b_sparsegpt_unstructured/tree/s0.1) |
|
| 47 |
+
| 6 | Llama-2 | 7b | [sparsegpt_unstructured](https://huggingface.co/vita-group/llama-2-7b_sparsegpt_unstructured) | [s0.2](https://huggingface.co/vita-group/llama-2-7b_sparsegpt_unstructured/tree/s0.2) |
|
| 48 |
+
| 7 | Llama-2 | 7b | [sparsegpt_unstructured](https://huggingface.co/vita-group/llama-2-7b_sparsegpt_unstructured) | [s0.3](https://huggingface.co/vita-group/llama-2-7b_sparsegpt_unstructured/tree/s0.3) |
|
| 49 |
+
| 8 | Llama-2 | 7b | [sparsegpt_unstructured](https://huggingface.co/vita-group/llama-2-7b_sparsegpt_unstructured) | [s0.5](https://huggingface.co/vita-group/llama-2-7b_sparsegpt_unstructured/tree/s0.5) |
|
| 50 |
+
| 9 | Llama-2 | 7b | [sparsegpt_unstructured](https://huggingface.co/vita-group/llama-2-7b_sparsegpt_unstructured) | [s0.6](https://huggingface.co/vita-group/llama-2-7b_sparsegpt_unstructured/tree/s0.6) |
|
| 51 |
+
| 10 | Llama-2 | 7b | [wanda_unstructured](https://huggingface.co/vita-group/llama-2-7b_wanda_unstructured) | [s0.1](https://huggingface.co/vita-group/llama-2-7b_wanda_unstructured/tree/s0.1) |
|
| 52 |
+
| 11 | Llama-2 | 7b | [wanda_unstructured](https://huggingface.co/vita-group/llama-2-7b_wanda_unstructured) | [s0.2](https://huggingface.co/vita-group/llama-2-7b_wanda_unstructured/tree/s0.2) |
|
| 53 |
+
| 12 | Llama-2 | 7b | [wanda_unstructured](https://huggingface.co/vita-group/llama-2-7b_wanda_unstructured) | [s0.3](https://huggingface.co/vita-group/llama-2-7b_wanda_unstructured/tree/s0.3) |
|
| 54 |
+
| 13 | Llama-2 | 7b | [wanda_unstructured](https://huggingface.co/vita-group/llama-2-7b_wanda_unstructured) | [s0.5](https://huggingface.co/vita-group/llama-2-7b_wanda_unstructured/tree/s0.5) |
|
| 55 |
+
| 14 | Llama-2 | 7b | [wanda_unstructured](https://huggingface.co/vita-group/llama-2-7b_wanda_unstructured) | [s0.6](https://huggingface.co/vita-group/llama-2-7b_wanda_unstructured/tree/s0.6) |
|
|
|