Update README.md
Browse files
README.md
CHANGED
@@ -31,7 +31,6 @@ We introduce AceCoder, the first work to propose a fully automated pipeline for
|
|
31 |
|
32 |
## Performance on RM Bench
|
33 |
|
34 |
-
```markdown
|
35 |
| Model | Code | Chat | Math | Safety | Easy | Normal | Hard | Avg |
|
36 |
| ------------------------------------ | ---- | ----- | ----- | ------ | ----- | ------ | ---- | ---- |
|
37 |
| Skywork/Skywork-Reward-Llama-3.1-8B | 54.5 | 69.5 | 60.6 | 95.7 | 89 | 74.7 | 46.6 | 70.1 |
|
@@ -50,7 +49,7 @@ We introduce AceCoder, the first work to propose a fully automated pipeline for
|
|
50 |
| AceCoder-RM-32B | 72.1 | 73.7 | 70.5 | 88 | 84.5 | 78.3 | 65.5 | 76.1 |
|
51 |
| Delta (AceCoder 7B - Others) | 7.5 | \-4.6 | \-6.1 | \-6.1 | \-9.1 | \-0.3 | 6.1 | 2.1 |
|
52 |
| Delta (AceCoder 32B - Others) | 12.7 | 2.4 | \-0.9 | \-8 | \-4.5 | 3.6 | 9.4 | 6 |
|
53 |
-
|
54 |
|
55 |
## Performance on Best-of-N sampling
|
56 |
|
@@ -62,11 +61,11 @@ We introduce AceCoder, the first work to propose a fully automated pipeline for
|
|
62 |
|
63 |
```python
|
64 |
"""pip install git+https://github.com/TIGER-AI-Lab/AceCoder"""
|
65 |
-
from acecoder import
|
66 |
from transformers import AutoTokenizer
|
67 |
|
68 |
model_path = "TIGER-Lab/AceCodeRM-7B"
|
69 |
-
model =
|
70 |
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
71 |
|
72 |
question = """\
|
|
|
31 |
|
32 |
## Performance on RM Bench
|
33 |
|
|
|
34 |
| Model | Code | Chat | Math | Safety | Easy | Normal | Hard | Avg |
|
35 |
| ------------------------------------ | ---- | ----- | ----- | ------ | ----- | ------ | ---- | ---- |
|
36 |
| Skywork/Skywork-Reward-Llama-3.1-8B | 54.5 | 69.5 | 60.6 | 95.7 | 89 | 74.7 | 46.6 | 70.1 |
|
|
|
49 |
| AceCoder-RM-32B | 72.1 | 73.7 | 70.5 | 88 | 84.5 | 78.3 | 65.5 | 76.1 |
|
50 |
| Delta (AceCoder 7B - Others) | 7.5 | \-4.6 | \-6.1 | \-6.1 | \-9.1 | \-0.3 | 6.1 | 2.1 |
|
51 |
| Delta (AceCoder 32B - Others) | 12.7 | 2.4 | \-0.9 | \-8 | \-4.5 | 3.6 | 9.4 | 6 |
|
52 |
+
|
53 |
|
54 |
## Performance on Best-of-N sampling
|
55 |
|
|
|
61 |
|
62 |
```python
|
63 |
"""pip install git+https://github.com/TIGER-AI-Lab/AceCoder"""
|
64 |
+
from acecoder import AceCodeRM
|
65 |
from transformers import AutoTokenizer
|
66 |
|
67 |
model_path = "TIGER-Lab/AceCodeRM-7B"
|
68 |
+
model = AceCodeRM.from_pretrained(model_path, device_map="auto")
|
69 |
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
|
70 |
|
71 |
question = """\
|