Update sft_fastapi.py
Browse files- sft_fastapi.py +6 -1
sft_fastapi.py
CHANGED
|
@@ -7,7 +7,6 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
| 7 |
import random
|
| 8 |
import os
|
| 9 |
from typing import List, Optional
|
| 10 |
-
from sft_dataset import load_config
|
| 11 |
|
| 12 |
app = FastAPI(title="PVS Step Recommender API", version="1.0.0")
|
| 13 |
|
|
@@ -19,6 +18,12 @@ MODEL = None
|
|
| 19 |
TEST_DATASET = None
|
| 20 |
DEVICE = "cuda:0" if torch.cuda.is_available() else "cpu"
|
| 21 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
def load_model_and_tokenizer(path: str):
|
| 23 |
global TOKENIZER, MODEL
|
| 24 |
if TOKENIZER is None or MODEL is None:
|
|
|
|
| 7 |
import random
|
| 8 |
import os
|
| 9 |
from typing import List, Optional
|
|
|
|
| 10 |
|
| 11 |
app = FastAPI(title="PVS Step Recommender API", version="1.0.0")
|
| 12 |
|
|
|
|
| 18 |
TEST_DATASET = None
|
| 19 |
DEVICE = "cuda:0" if torch.cuda.is_available() else "cpu"
|
| 20 |
|
| 21 |
+
def load_config(path="config.yaml"):
|
| 22 |
+
with open(path, "r") as f:
|
| 23 |
+
cfg = yaml.safe_load(f)
|
| 24 |
+
return SimpleNamespace(**cfg)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
def load_model_and_tokenizer(path: str):
|
| 28 |
global TOKENIZER, MODEL
|
| 29 |
if TOKENIZER is None or MODEL is None:
|