Spaces:
Running
Running
Upload app.py
Browse files
app.py
CHANGED
|
@@ -27,6 +27,9 @@ def measure_performance(func):
|
|
| 27 |
|
| 28 |
@measure_performance
|
| 29 |
def generate_story_text(theme):
|
|
|
|
|
|
|
|
|
|
| 30 |
prompt = f"Tell me a storie about {theme}"
|
| 31 |
story = generate_story_pipe(prompt, max_length=400, num_return_sequences=1, temperature=0.7, do_sample=True, repetition_penalty=3.0, eos_token_id=generate_story_pipe.model.config.eos_token_id, pad_token_id=generate_story_pipe.model.config.pad_token_id, truncation=True, padding=True)
|
| 32 |
generated_story = story[0]["generated_text"]
|
|
|
|
| 27 |
|
| 28 |
@measure_performance
|
| 29 |
def generate_story_text(theme):
|
| 30 |
+
if generate_story_pipe.tokenizer.pad_token is None:
|
| 31 |
+
generate_story_pipe.tokenizer.pad_token = generate_story_pipe.tokenizer.eos_token
|
| 32 |
+
|
| 33 |
prompt = f"Tell me a storie about {theme}"
|
| 34 |
story = generate_story_pipe(prompt, max_length=400, num_return_sequences=1, temperature=0.7, do_sample=True, repetition_penalty=3.0, eos_token_id=generate_story_pipe.model.config.eos_token_id, pad_token_id=generate_story_pipe.model.config.pad_token_id, truncation=True, padding=True)
|
| 35 |
generated_story = story[0]["generated_text"]
|