Cyitron commited on
Commit
0dd62e8
·
verified ·
1 Parent(s): beb3a84

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -14
app.py CHANGED
@@ -2,15 +2,21 @@ import torch
2
  import gradio as gr
3
  from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline
4
 
5
- model_name = "deepset/roberta-base-squad2"
6
-
7
- pipe = pipeline('question-answering', model=model_name, tokenizer=model_name)
8
-
9
- model = AutoModelForQuestionAnswering.from_pretrained(model_name)
10
- tokenizer = AutoTokenizer.from_pretrained(model_name)
11
-
12
- # context = "I am gabriel"
13
- # question = "what is my name?"
 
 
 
 
 
 
14
 
15
  def read_file(file_content):
16
  try:
@@ -19,7 +25,7 @@ def read_file(file_content):
19
  except Exception as e:
20
  return f"Ocorreu um erro: {e}"
21
 
22
- def get_answer(text_content, question, file_content):
23
  if text_content and file_content:
24
  return "Selecione apenas uma opção!"
25
 
@@ -31,6 +37,10 @@ def get_answer(text_content, question, file_content):
31
  else:
32
  content = text_content
33
 
 
 
 
 
34
  res = pipe(
35
  context=content,
36
  question=question,
@@ -38,10 +48,16 @@ def get_answer(text_content, question, file_content):
38
  return res
39
 
40
  demo = gr.Interface(fn=get_answer,
41
- inputs=[gr.Textbox(label="Input your context", lines=4), gr.Textbox(label="Input your question", lines=4), gr.File(label="Uploadl your file")],
42
- outputs=[gr.Textbox(label="Answer", lines=3)],
43
- title="QnA for files, txt or essays",
44
- description="Esse programa em python foi feito para a disciplina de sistemas multimidia em 2025/1",)
 
 
 
 
 
 
45
 
46
  if __name__ == "__main__":
47
  demo.launch()
 
2
  import gradio as gr
3
  from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline
4
 
5
+ model_eng = "deepset/roberta-base-squad2"
6
+ model_br = "mrm8488/bert-base-portuguese-cased-finetuned-squad-v1-pt"
7
+
8
+ PIPELINES = {
9
+ "English": pipeline(
10
+ "question-answering",
11
+ model="deepset/roberta-base-squad2",
12
+ tokenizer="deepset/roberta-base-squad2"
13
+ ),
14
+ "Português": pipeline(
15
+ "question-answering",
16
+ model="mrm8488/bert-base-portuguese-cased-finetuned-squad-v1-pt",
17
+ tokenizer="mrm8488/bert-base-portuguese-cased-finetuned-squad-v1-pt"
18
+ )
19
+ }
20
 
21
  def read_file(file_content):
22
  try:
 
25
  except Exception as e:
26
  return f"Ocorreu um erro: {e}"
27
 
28
+ def get_answer(lang , text_content, question, file_content):
29
  if text_content and file_content:
30
  return "Selecione apenas uma opção!"
31
 
 
37
  else:
38
  content = text_content
39
 
40
+ pipe = PIPELINES.get(lang)
41
+ if pipe is None:
42
+ return "Idioma não suportado."
43
+
44
  res = pipe(
45
  context=content,
46
  question=question,
 
48
  return res
49
 
50
  demo = gr.Interface(fn=get_answer,
51
+ inputs=[
52
+ gr.Radio(["English", "Português"], label="Selecione o idioma"),
53
+ gr.Textbox(label="Contexto (texto livre)", placeholder="Digite seu texto aqui...", lines=6),
54
+ gr.Textbox(label="Pergunta", placeholder="Digite sua pergunta aqui...", lines=2),
55
+ gr.File(label="Ou faça upload de um arquivo .txt", file_types=['.txt'])
56
+ ],
57
+ outputs=gr.Textbox(label="Resposta"),
58
+ title="QnA Multilíngue",
59
+ description="Faça perguntas em inglês ou português. Carrega o modelo adequado automaticamente.",
60
+ )
61
 
62
  if __name__ == "__main__":
63
  demo.launch()