Ksjsjjdj commited on
Commit
865fcd1
·
verified ·
1 Parent(s): b9cf174

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -69,7 +69,7 @@ def run_training(hf_token, model_name, new_repo_name, lora_r, lora_alpha, lora_d
69
 
70
  def load_single(ds_name, cfg):
71
  try:
72
- ds = load_dataset(ds_name, cfg, streaming=True, trust_remote_code=True)
73
  if isinstance(ds, dict):
74
  ds = next(iter(ds.values()))
75
 
@@ -117,7 +117,7 @@ def run_training(hf_token, model_name, new_repo_name, lora_r, lora_alpha, lora_d
117
 
118
  progress(0.3, desc="Cargando Tokenizer...")
119
  try:
120
- tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True, padding_side="left", add_eos_token=True, add_bos_token=True)
121
  tokenizer.pad_token = tokenizer.eos_token
122
  except Exception as e:
123
  return f"Error cargando tokenizer: {str(e)}"
@@ -167,7 +167,7 @@ def run_training(hf_token, model_name, new_repo_name, lora_r, lora_alpha, lora_d
167
 
168
  progress(0.4, desc="Cargando Modelo...")
169
  try:
170
- original_model = AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True)
171
  except Exception as e:
172
  return f"Error cargando modelo: {str(e)}"
173
 
 
69
 
70
  def load_single(ds_name, cfg):
71
  try:
72
+ ds = load_dataset(ds_name, cfg, streaming=True)
73
  if isinstance(ds, dict):
74
  ds = next(iter(ds.values()))
75
 
 
117
 
118
  progress(0.3, desc="Cargando Tokenizer...")
119
  try:
120
+ tokenizer = AutoTokenizer.from_pretrained(model_name, padding_side="left", add_eos_token=True, add_bos_token=True)
121
  tokenizer.pad_token = tokenizer.eos_token
122
  except Exception as e:
123
  return f"Error cargando tokenizer: {str(e)}"
 
167
 
168
  progress(0.4, desc="Cargando Modelo...")
169
  try:
170
+ original_model = AutoModelForCausalLM.from_pretrained(model_name)
171
  except Exception as e:
172
  return f"Error cargando modelo: {str(e)}"
173