Compare commits
No commits in common. "481d8e1eee26c3a22057b0be2e18cb930a9a5520" and "a959f6dd1459fb6d1c0a8ef13acc64094a5e499d" have entirely different histories.
481d8e1eee
...
a959f6dd14
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -1,219 +0,0 @@
|
||||||
from fastapi import FastAPI, HTTPException
|
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
|
||||||
from pydantic import BaseModel
|
|
||||||
import httpx
|
|
||||||
import logging
|
|
||||||
import random
|
|
||||||
|
|
||||||
app = FastAPI()
|
|
||||||
|
|
||||||
app.add_middleware(
|
|
||||||
CORSMiddleware,
|
|
||||||
allow_origins=["*"],
|
|
||||||
allow_credentials=True,
|
|
||||||
allow_methods=["*"],
|
|
||||||
allow_headers=["*"],
|
|
||||||
)
|
|
||||||
|
|
||||||
OLLAMA_URL = "http://192.168.60.110:11434/api/generate"
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
|
||||||
|
|
||||||
data_sources = {
|
|
||||||
"cerita": {
|
|
||||||
"Malin Kundang": "Malin Kundang adalah seorang anak dari keluarga miskin yang menjadi kaya raya namun menolak mengakui ibunya, hingga akhirnya dikutuk menjadi batu.",
|
|
||||||
"Bawang Merah Bawang Putih": "Bawang Putih adalah gadis baik hati yang diperlakukan buruk oleh ibu dan saudara tirinya, tetapi kebaikannya membuahkan hasil berkat ikan ajaib.",
|
|
||||||
"Sangkuriang": "Sangkuriang jatuh cinta pada ibunya, Dayang Sumbi, dan diberi tugas mustahil untuk membangun perahu dalam satu malam. Ia gagal dan akhirnya marah, menendang perahu hingga menjadi Gunung Tangkuban Perahu.",
|
|
||||||
"Si Kancil": "Si Kancil dengan kecerdikannya berhasil menipu buaya untuk menyeberangi sungai dengan aman.",
|
|
||||||
"Timun Mas": "Seorang ibu tua mendapatkan anak dari biji timun emas. Namun, anak itu harus melarikan diri dari raksasa jahat yang ingin memakannya."
|
|
||||||
},
|
|
||||||
"pantun": {
|
|
||||||
"Pantun Nasihat": "Jalan-jalan ke kota Blitar,\nJangan lupa membeli roti.\nRajin belajar sejak pintar,\nAgar sukses di kemudian hari.",
|
|
||||||
"Pantun Jenaka": "Ke pasar beli ikan teri,\nIkan habis tinggal kepala.\nJangan suka mencuri,\nNanti ketahuan malah celaka."
|
|
||||||
},
|
|
||||||
"puisi": {
|
|
||||||
"Puisi Alam": "Langit biru membentang luas,\nBurung-burung terbang bebas.\nAngin sepoi menyapu dedaunan,\nAlam indah penuh kedamaian.",
|
|
||||||
"Puisi Persahabatan": "Sahabat sejati selalu ada,\nDalam suka dan dalam duka.\nBersama kita jalani hari,\nMengukir cerita tak terlupa."
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@app.post("/generate/")
|
|
||||||
async def generate_text():
|
|
||||||
try:
|
|
||||||
selected_stories = random.sample(list(data_sources["cerita"].keys()), 3)
|
|
||||||
selected_pantun = random.choice(list(data_sources["pantun"].keys()))
|
|
||||||
selected_puisi = random.choice(list(data_sources["puisi"].keys()))
|
|
||||||
|
|
||||||
# PROMPT BAGIAN CERITA
|
|
||||||
story_prompts = "\n\n".join([
|
|
||||||
f"Judul: {story}\nIsi:\n{data_sources['cerita'][story]}"
|
|
||||||
for story in selected_stories
|
|
||||||
])
|
|
||||||
|
|
||||||
story_prompt_full = f"""
|
|
||||||
Kamu adalah asisten pengajar untuk siswa SD kelas 3. Berdasarkan teks cerita di bawah ini, buat soal latihan.
|
|
||||||
|
|
||||||
**Instruksi:**
|
|
||||||
- Untuk setiap cerita, buat:
|
|
||||||
- 1 soal pilihan ganda (A-D) + jawabannya (Jawaban Benar: X)
|
|
||||||
- 1 soal isian + jawabannya (Jawaban Ideal: ...)
|
|
||||||
|
|
||||||
Gunakan format berikut:
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Judul: [judul]
|
|
||||||
Isi:
|
|
||||||
[isi teks]
|
|
||||||
|
|
||||||
**Soal Pilihan Ganda:**
|
|
||||||
1. ...
|
|
||||||
A. ...
|
|
||||||
B. ...
|
|
||||||
C. ...
|
|
||||||
D. ...
|
|
||||||
Jawaban Benar: X
|
|
||||||
|
|
||||||
**Soal Isian:**
|
|
||||||
...
|
|
||||||
Jawaban Ideal: ...
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Berikut teks ceritanya:
|
|
||||||
|
|
||||||
{story_prompts}
|
|
||||||
"""
|
|
||||||
|
|
||||||
# PROMPT BAGIAN PANTUN & PUISI
|
|
||||||
pantun_prompt = f"Judul: {selected_pantun}\nIsi:\n{data_sources['pantun'][selected_pantun]}"
|
|
||||||
puisi_prompt = f"Judul: {selected_puisi}\nIsi:\n{data_sources['puisi'][selected_puisi]}"
|
|
||||||
|
|
||||||
pantun_puisi_full = f"""
|
|
||||||
Kamu adalah asisten pengajar untuk siswa SD kelas 3. Berdasarkan teks pantun dan puisi di bawah ini, buat soal latihan.
|
|
||||||
|
|
||||||
**Instruksi:**
|
|
||||||
- Untuk setiap teks, buat:
|
|
||||||
- 1 soal pilihan ganda (A-D) + jawabannya (Jawaban Benar: X)
|
|
||||||
- 1 soal isian + jawabannya (Jawaban Ideal: ...)
|
|
||||||
|
|
||||||
Gunakan format berikut:
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Judul: [judul]
|
|
||||||
Isi:
|
|
||||||
[isi teks]
|
|
||||||
|
|
||||||
**Soal Pilihan Ganda:**
|
|
||||||
1. ...
|
|
||||||
A. ...
|
|
||||||
B. ...
|
|
||||||
C. ...
|
|
||||||
D. ...
|
|
||||||
Jawaban Benar: X
|
|
||||||
|
|
||||||
**Soal Isian:**
|
|
||||||
...
|
|
||||||
Jawaban Ideal: ...
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Berikut teks pantun dan puisinya:
|
|
||||||
|
|
||||||
{pantun_prompt}
|
|
||||||
|
|
||||||
{puisi_prompt}
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Siapkan payload untuk kedua request
|
|
||||||
async with httpx.AsyncClient(timeout=300) as client:
|
|
||||||
# Request untuk CERITA
|
|
||||||
res1 = await client.post(OLLAMA_URL, json={
|
|
||||||
"model": "llama3.1:latest",
|
|
||||||
"prompt": story_prompt_full,
|
|
||||||
"stream": False,
|
|
||||||
"options": {
|
|
||||||
"num_predict": 2048
|
|
||||||
}
|
|
||||||
})
|
|
||||||
res1.raise_for_status()
|
|
||||||
response_story = res1.json().get("response", "").strip()
|
|
||||||
|
|
||||||
# Request untuk PANTUN + PUISI
|
|
||||||
res2 = await client.post(OLLAMA_URL, json={
|
|
||||||
"model": "llama3.1:latest",
|
|
||||||
"prompt": pantun_puisi_full,
|
|
||||||
"stream": False,
|
|
||||||
"options": {
|
|
||||||
"num_predict": 1024
|
|
||||||
}
|
|
||||||
})
|
|
||||||
res2.raise_for_status()
|
|
||||||
response_pantun_puisi = res2.json().get("response", "").strip()
|
|
||||||
|
|
||||||
if not response_story or not response_pantun_puisi:
|
|
||||||
raise HTTPException(status_code=500, detail="Ollama tidak menghasilkan pertanyaan")
|
|
||||||
|
|
||||||
return {
|
|
||||||
"selected_stories": [
|
|
||||||
{"title": title, "content": data_sources["cerita"][title]}
|
|
||||||
for title in selected_stories
|
|
||||||
],
|
|
||||||
"selected_pantun": {
|
|
||||||
"title": selected_pantun,
|
|
||||||
"content": data_sources["pantun"][selected_pantun]
|
|
||||||
},
|
|
||||||
"selected_puisi": {
|
|
||||||
"title": selected_puisi,
|
|
||||||
"content": data_sources["puisi"][selected_puisi]
|
|
||||||
},
|
|
||||||
"generated_questions": response_story + "\n\n" + response_pantun_puisi
|
|
||||||
}
|
|
||||||
|
|
||||||
except httpx.HTTPStatusError as e:
|
|
||||||
logging.error(f"HTTP error dari Ollama API: {e.response.text}")
|
|
||||||
raise HTTPException(status_code=e.response.status_code, detail=e.response.text)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Terjadi kesalahan: {str(e)}")
|
|
||||||
raise HTTPException(status_code=500, detail="Terjadi kesalahan internal")
|
|
||||||
|
|
||||||
class FeedbackRequest(BaseModel):
|
|
||||||
user_answer: str
|
|
||||||
expected_answer: str
|
|
||||||
|
|
||||||
@app.post("/generate-feedback/")
|
|
||||||
async def generate_feedback(request: FeedbackRequest):
|
|
||||||
try:
|
|
||||||
prompt = f"""
|
|
||||||
Kamu adalah asisten pengajar untuk siswa SD kelas 3. Siswa memberikan jawaban berikut untuk soal isian.
|
|
||||||
|
|
||||||
**Jawaban Siswa:** {request.user_answer.strip()}
|
|
||||||
**Jawaban Ideal:** {request.expected_answer.strip()}
|
|
||||||
|
|
||||||
Beri feedback singkat dan membangun, maksimal 2 kalimat. Gunakan bahasa yang mudah dimengerti oleh siswa SD. Jika jawaban siswa salah, berikan petunjuk atau koreksi yang membantu.
|
|
||||||
"""
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
"model": "llama3.1:latest",
|
|
||||||
"prompt": prompt,
|
|
||||||
"stream": False
|
|
||||||
}
|
|
||||||
|
|
||||||
logging.info("Mengirim permintaan feedback ke Ollama...")
|
|
||||||
async with httpx.AsyncClient(timeout=60) as client:
|
|
||||||
response = await client.post(OLLAMA_URL, json=payload)
|
|
||||||
|
|
||||||
response.raise_for_status()
|
|
||||||
result = response.json()
|
|
||||||
feedback = result.get("response", "").strip()
|
|
||||||
|
|
||||||
if not feedback:
|
|
||||||
raise HTTPException(status_code=500, detail="Ollama tidak memberikan feedback")
|
|
||||||
|
|
||||||
return {"feedback": feedback}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Gagal menghasilkan feedback dari Ollama: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Terjadi kesalahan: {str(e)}")
|
|
||||||
|
|
@ -1,342 +0,0 @@
|
||||||
from fastapi import FastAPI, HTTPException, Request
|
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
|
||||||
from fastapi.exceptions import RequestValidationError
|
|
||||||
from fastapi.responses import JSONResponse
|
|
||||||
from fastapi.encoders import jsonable_encoder
|
|
||||||
from pydantic import BaseModel
|
|
||||||
import logging, hashlib, re
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
# Logging
|
|
||||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
|
||||||
|
|
||||||
app = FastAPI()
|
|
||||||
|
|
||||||
# CORS
|
|
||||||
app.add_middleware(
|
|
||||||
CORSMiddleware,
|
|
||||||
allow_origins=["*"],
|
|
||||||
allow_credentials=True,
|
|
||||||
allow_methods=["*"],
|
|
||||||
allow_headers=["*"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# OLLAMA_URL = "http://167.71.212.60:111/api/generate"
|
|
||||||
OLLAMA_URL = "http://labai.polinema.ac.id:11434/api/generate"
|
|
||||||
|
|
||||||
@app.exception_handler(RequestValidationError)
|
|
||||||
async def validation_exception_handler(request: Request, exc: RequestValidationError):
|
|
||||||
return JSONResponse(
|
|
||||||
status_code=422,
|
|
||||||
content={"detail": jsonable_encoder(exc.errors()), "body": exc.body},
|
|
||||||
)
|
|
||||||
|
|
||||||
class MaterialRequest(BaseModel):
|
|
||||||
content: str
|
|
||||||
question_type: str # hanya 'multiple_choice' atau 'essay'
|
|
||||||
question_count: int = 5
|
|
||||||
|
|
||||||
def potong_konten(text: str, max_chars: int = 5000):
|
|
||||||
return text[:max_chars] if len(text) > max_chars else text
|
|
||||||
|
|
||||||
@app.post("/generate-from-material/")
|
|
||||||
async def generate_from_material(request: MaterialRequest):
|
|
||||||
if request.question_count < 1 or request.question_count > 20:
|
|
||||||
raise HTTPException(status_code=400, detail="Jumlah soal harus antara 1-20")
|
|
||||||
|
|
||||||
if request.question_type not in ["multiple_choice", "essay"]:
|
|
||||||
raise HTTPException(status_code=400, detail="Jenis soal hanya bisa 'multiple_choice' atau 'essay'")
|
|
||||||
|
|
||||||
mc_count = essay_count = 0
|
|
||||||
if request.question_type == "multiple_choice":
|
|
||||||
mc_count = request.question_count
|
|
||||||
else:
|
|
||||||
essay_count = request.question_count
|
|
||||||
|
|
||||||
content_bersih = potong_konten(request.content.strip())
|
|
||||||
|
|
||||||
prompt_generate = f"""
|
|
||||||
Buat soal latihan berdasarkan teks materi berikut untuk siswa SD kelas 3.
|
|
||||||
|
|
||||||
**Instruksi:**
|
|
||||||
1. Buat total {request.question_count} soal:
|
|
||||||
- Pilihan ganda: {mc_count}
|
|
||||||
- Isian/essay: {essay_count}
|
|
||||||
2. Setiap soal sertakan:
|
|
||||||
- Kutipan dari teks (1 kalimat)
|
|
||||||
- Pertanyaan
|
|
||||||
- Jawaban
|
|
||||||
- Bobot default: isi `Bobot: TBD` (nanti akan ditentukan otomatis)
|
|
||||||
3. Gunakan bahasa yang sederhana dan sesuai untuk siswa SD kelas 3.
|
|
||||||
|
|
||||||
**Format Output:**
|
|
||||||
---
|
|
||||||
**Soal Pilihan Ganda:**
|
|
||||||
1. Kalimat sumber: "[kutipan]"
|
|
||||||
Pertanyaan: [pertanyaan]
|
|
||||||
A. ...
|
|
||||||
B. ...
|
|
||||||
C. ...
|
|
||||||
D. ...
|
|
||||||
Jawaban: [opsi]
|
|
||||||
Bobot: TBD
|
|
||||||
|
|
||||||
**Soal Isian:**
|
|
||||||
1. Kalimat sumber: "[kutipan]"
|
|
||||||
Pertanyaan: [pertanyaan]
|
|
||||||
Jawaban: [jawaban]
|
|
||||||
Bobot: TBD
|
|
||||||
---
|
|
||||||
|
|
||||||
**Materi:**
|
|
||||||
{content_bersih}
|
|
||||||
"""
|
|
||||||
|
|
||||||
async with httpx.AsyncClient(timeout=300) as client:
|
|
||||||
res = await client.post(OLLAMA_URL, json={
|
|
||||||
"model": "gemma3:12b",
|
|
||||||
"prompt": prompt_generate,
|
|
||||||
"stream": False,
|
|
||||||
"options": {"num_predict": 2048}
|
|
||||||
})
|
|
||||||
res.raise_for_status()
|
|
||||||
raw_output = res.json().get("response", "").strip()
|
|
||||||
|
|
||||||
prompt_bobot = f"""
|
|
||||||
Tentukan bobot untuk setiap soal berikut berdasarkan kompleksitas:
|
|
||||||
|
|
||||||
**Panduan Penilaian:**
|
|
||||||
- Pilihan ganda:
|
|
||||||
- 1 = mudah
|
|
||||||
- 2 = agak sulit
|
|
||||||
- Isian:
|
|
||||||
- 3 = sedang
|
|
||||||
- 4 = agak sulit
|
|
||||||
- 5 = sulit
|
|
||||||
|
|
||||||
Kembalikan soal yang sama, tapi ganti baris "Bobot: TBD" dengan bobot sesuai tingkat kesulitan. Jangan ubah bagian lainnya.
|
|
||||||
|
|
||||||
Soal:
|
|
||||||
{raw_output}
|
|
||||||
""".strip()
|
|
||||||
|
|
||||||
async with httpx.AsyncClient(timeout=120) as client:
|
|
||||||
bobot_res = await client.post(OLLAMA_URL, json={
|
|
||||||
"model": "llama3.1:latest",
|
|
||||||
"prompt": prompt_bobot,
|
|
||||||
"stream": False
|
|
||||||
})
|
|
||||||
bobot_res.raise_for_status()
|
|
||||||
final_output = bobot_res.json().get("response", "").strip()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"generated_questions": final_output,
|
|
||||||
"question_type": request.question_type,
|
|
||||||
"question_count": request.question_count,
|
|
||||||
"mc_count": mc_count,
|
|
||||||
"essay_count": essay_count
|
|
||||||
}
|
|
||||||
|
|
||||||
@app.post("/generate-with-bobot/")
|
|
||||||
async def generate_with_bobot(request: MaterialRequest):
|
|
||||||
if request.question_count < 1 or request.question_count > 20:
|
|
||||||
raise HTTPException(status_code=400, detail="Jumlah soal harus antara 1-20")
|
|
||||||
|
|
||||||
if request.question_type not in ["multiple_choice", "essay"]:
|
|
||||||
raise HTTPException(status_code=400, detail="Jenis soal hanya bisa 'multiple_choice' atau 'essay'")
|
|
||||||
|
|
||||||
mc_count = request.question_count if request.question_type == "multiple_choice" else 0
|
|
||||||
essay_count = request.question_count if request.question_type == "essay" else 0
|
|
||||||
|
|
||||||
content_bersih = potong_konten(request.content.strip())
|
|
||||||
|
|
||||||
# Langkah 1: Generate soal mentah dengan Bobot: TBD
|
|
||||||
prompt_generate = f"""
|
|
||||||
Buat soal latihan berdasarkan teks berikut untuk siswa SD kelas 3.
|
|
||||||
|
|
||||||
Instruksi:
|
|
||||||
- Total soal: {request.question_count}
|
|
||||||
- Jenis soal:
|
|
||||||
- Pilihan Ganda: {mc_count}
|
|
||||||
- Isian/Essay: {essay_count}
|
|
||||||
- Setiap soal berisi:
|
|
||||||
- Kalimat sumber
|
|
||||||
- Pertanyaan
|
|
||||||
- Jawaban
|
|
||||||
- Bobot: isi "Bobot: TBD"
|
|
||||||
|
|
||||||
Format:
|
|
||||||
---
|
|
||||||
**Soal Pilihan Ganda:**
|
|
||||||
1. Kalimat sumber: "[kutipan]"
|
|
||||||
Pertanyaan: ...
|
|
||||||
A. ...
|
|
||||||
B. ...
|
|
||||||
C. ...
|
|
||||||
D. ...
|
|
||||||
Jawaban: ...
|
|
||||||
Bobot: TBD
|
|
||||||
|
|
||||||
**Soal Isian:**
|
|
||||||
1. Kalimat sumber: "[kutipan]"
|
|
||||||
Pertanyaan: ...
|
|
||||||
Jawaban: ...
|
|
||||||
Bobot: TBD
|
|
||||||
---
|
|
||||||
|
|
||||||
Teks:
|
|
||||||
{content_bersih}
|
|
||||||
"""
|
|
||||||
|
|
||||||
async with httpx.AsyncClient(timeout=300) as client:
|
|
||||||
res = await client.post(OLLAMA_URL, json={
|
|
||||||
"model": "llama3.1:latest",
|
|
||||||
"prompt": prompt_generate,
|
|
||||||
"stream": False
|
|
||||||
})
|
|
||||||
res.raise_for_status()
|
|
||||||
raw_output = res.json().get("response", "").strip()
|
|
||||||
|
|
||||||
# Langkah 2: Tambahkan bobot
|
|
||||||
prompt_bobot = f"""
|
|
||||||
Tentukan bobot untuk setiap soal berdasarkan kompleksitas.
|
|
||||||
|
|
||||||
Panduan Penilaian:
|
|
||||||
- Pilihan Ganda:
|
|
||||||
- 1 = mudah
|
|
||||||
- 2 = agak sulit
|
|
||||||
- Isian:
|
|
||||||
- 3 = sedang
|
|
||||||
- 4 = agak sulit
|
|
||||||
- 5 = sulit
|
|
||||||
|
|
||||||
Ganti baris "Bobot: TBD" menjadi "Bobot: [1-5]". Jangan ubah bagian lainnya.
|
|
||||||
|
|
||||||
Soal:
|
|
||||||
{raw_output}
|
|
||||||
"""
|
|
||||||
|
|
||||||
async with httpx.AsyncClient(timeout=180) as client:
|
|
||||||
res_bobot = await client.post(OLLAMA_URL, json={
|
|
||||||
"model": "llama3.1:latest",
|
|
||||||
"prompt": prompt_bobot,
|
|
||||||
"stream": False
|
|
||||||
})
|
|
||||||
res_bobot.raise_for_status()
|
|
||||||
final_output = res_bobot.json().get("response", "").strip()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"generated_questions": final_output,
|
|
||||||
"question_type": request.question_type,
|
|
||||||
"question_count": request.question_count,
|
|
||||||
"mc_count": mc_count,
|
|
||||||
"essay_count": essay_count
|
|
||||||
}
|
|
||||||
|
|
||||||
class GenerateQuestionsRequest(BaseModel):
|
|
||||||
content: str
|
|
||||||
question_count: int
|
|
||||||
question_type: str # 'multiple_choice', 'essay', atau 'both'
|
|
||||||
with_bobot: bool = True
|
|
||||||
|
|
||||||
@app.post("/generate-questions/")
|
|
||||||
async def generate_questions(request: GenerateQuestionsRequest):
|
|
||||||
mc_count = essay_count = 0
|
|
||||||
if request.question_type == "multiple_choice":
|
|
||||||
mc_count = request.question_count
|
|
||||||
elif request.question_type == "essay":
|
|
||||||
essay_count = request.question_count
|
|
||||||
else:
|
|
||||||
raise HTTPException(status_code=400, detail="Jenis soal tidak valid.")
|
|
||||||
|
|
||||||
prompt = f"""
|
|
||||||
Buat soal dari teks berikut untuk siswa SD kelas 3.
|
|
||||||
Jumlah soal: {request.question_count}
|
|
||||||
- Pilihan Ganda: {mc_count}
|
|
||||||
- Isian: {essay_count}
|
|
||||||
|
|
||||||
Setiap soal sertakan:
|
|
||||||
- Kalimat sumber
|
|
||||||
- Pertanyaan
|
|
||||||
- Jawaban
|
|
||||||
- Bobot: TBD
|
|
||||||
|
|
||||||
Format:
|
|
||||||
...
|
|
||||||
Teks:
|
|
||||||
{potong_konten(request.content)}
|
|
||||||
""".strip()
|
|
||||||
|
|
||||||
async with httpx.AsyncClient(timeout=300) as client:
|
|
||||||
result = await client.post(OLLAMA_URL, json={
|
|
||||||
"model": "llama3.1:latest",
|
|
||||||
"prompt": prompt,
|
|
||||||
"stream": False
|
|
||||||
})
|
|
||||||
result.raise_for_status()
|
|
||||||
output = result.json().get("response", "").strip()
|
|
||||||
|
|
||||||
if not request.with_bobot:
|
|
||||||
return {
|
|
||||||
"generated_questions": output,
|
|
||||||
"mc_count": mc_count,
|
|
||||||
"essay_count": essay_count
|
|
||||||
}
|
|
||||||
|
|
||||||
# Tambah bobot
|
|
||||||
prompt_bobot = f"""
|
|
||||||
Tentukan bobot soal berdasarkan kompleksitas. Ganti 'Bobot: TBD' dengan angka 1-5 sesuai panduan. Jangan ubah bagian lainnya.
|
|
||||||
|
|
||||||
Soal:
|
|
||||||
{output}
|
|
||||||
"""
|
|
||||||
async with httpx.AsyncClient(timeout=180) as client:
|
|
||||||
bobot_result = await client.post(OLLAMA_URL, json={
|
|
||||||
"model": "llama3.1:latest",
|
|
||||||
"prompt": prompt_bobot,
|
|
||||||
"stream": False
|
|
||||||
})
|
|
||||||
bobot_result.raise_for_status()
|
|
||||||
final_output = bobot_result.json().get("response", "").strip()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"generated_questions": final_output,
|
|
||||||
"mc_count": mc_count,
|
|
||||||
"essay_count": essay_count
|
|
||||||
}
|
|
||||||
|
|
||||||
class FeedbackRequest(BaseModel):
|
|
||||||
user_answer: str
|
|
||||||
expected_answer: str
|
|
||||||
|
|
||||||
feedback_cache = {}
|
|
||||||
|
|
||||||
@app.post("/generate-feedback/")
|
|
||||||
async def generate_feedback(request: FeedbackRequest):
|
|
||||||
user_answer = request.user_answer.strip()
|
|
||||||
expected_answer = request.expected_answer.strip()
|
|
||||||
prompt_hash = hashlib.sha256(f"{user_answer}|{expected_answer}".encode()).hexdigest()
|
|
||||||
|
|
||||||
if prompt_hash in feedback_cache:
|
|
||||||
return {"feedback": feedback_cache[prompt_hash]}
|
|
||||||
|
|
||||||
prompt = f"""
|
|
||||||
Kamu adalah asisten pengajar SD kelas 3. Siswa memberikan jawaban berikut.
|
|
||||||
|
|
||||||
**Jawaban Siswa:** {user_answer}
|
|
||||||
**Jawaban Ideal:** {expected_answer}
|
|
||||||
|
|
||||||
Beri feedback singkat dan membangun, maksimal 2 kalimat, dengan bahasa mudah dipahami.
|
|
||||||
"""
|
|
||||||
|
|
||||||
async with httpx.AsyncClient(timeout=120) as client:
|
|
||||||
response = await client.post(OLLAMA_URL, json={
|
|
||||||
"model": "llama3.1:latest",
|
|
||||||
"prompt": prompt,
|
|
||||||
"stream": False
|
|
||||||
})
|
|
||||||
|
|
||||||
response.raise_for_status()
|
|
||||||
feedback = response.json().get("response", "").strip()
|
|
||||||
feedback_cache[prompt_hash] = feedback
|
|
||||||
return {"feedback": feedback}
|
|
||||||
|
|
@ -1,164 +0,0 @@
|
||||||
from fastapi import FastAPI, HTTPException, Request
|
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
|
||||||
from fastapi.exceptions import RequestValidationError
|
|
||||||
from fastapi.responses import JSONResponse
|
|
||||||
from fastapi.encoders import jsonable_encoder
|
|
||||||
from pydantic import BaseModel
|
|
||||||
import logging, hashlib
|
|
||||||
from groq import Groq
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
|
||||||
|
|
||||||
app = FastAPI()
|
|
||||||
|
|
||||||
app.add_middleware(
|
|
||||||
CORSMiddleware,
|
|
||||||
allow_origins=["*"],
|
|
||||||
allow_credentials=True,
|
|
||||||
allow_methods=["*"],
|
|
||||||
allow_headers=["*"],
|
|
||||||
)
|
|
||||||
|
|
||||||
client = Groq(api_key="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")
|
|
||||||
|
|
||||||
@app.exception_handler(RequestValidationError)
|
|
||||||
async def validation_exception_handler(request: Request, exc: RequestValidationError):
|
|
||||||
logging.error(f"Validation error: {exc}")
|
|
||||||
return JSONResponse(
|
|
||||||
status_code=422,
|
|
||||||
content={"detail": jsonable_encoder(exc.errors()), "body": exc.body},
|
|
||||||
)
|
|
||||||
|
|
||||||
class MaterialRequest(BaseModel):
|
|
||||||
content: str
|
|
||||||
question_type: str = "multiple_choice"
|
|
||||||
question_count: int = 5
|
|
||||||
|
|
||||||
def potong_konten(text: str, max_chars: int = 5000):
|
|
||||||
if len(text) > max_chars:
|
|
||||||
logging.warning(f"Teks terlalu panjang ({len(text)} karakter), dipotong jadi {max_chars}")
|
|
||||||
return text[:max_chars]
|
|
||||||
return text
|
|
||||||
|
|
||||||
@app.post("/generate-from-material/")
|
|
||||||
async def generate_from_material(request: MaterialRequest):
|
|
||||||
try:
|
|
||||||
if request.question_count < 1 or request.question_count > 20:
|
|
||||||
raise HTTPException(status_code=400, detail="Jumlah soal harus antara 1-20")
|
|
||||||
|
|
||||||
if request.question_type not in ["multiple_choice", "essay"]:
|
|
||||||
raise HTTPException(status_code=400, detail="Jenis soal tidak valid. Pilih: multiple_choice atau essay")
|
|
||||||
|
|
||||||
content_bersih = potong_konten(request.content.strip())
|
|
||||||
|
|
||||||
mc_count = request.question_count if request.question_type == "multiple_choice" else 0
|
|
||||||
essay_count = request.question_count if request.question_type == "essay" else 0
|
|
||||||
|
|
||||||
prompt = f"""
|
|
||||||
Buat soal latihan berdasarkan teks materi berikut untuk siswa SD kelas 3.
|
|
||||||
|
|
||||||
**Instruksi:**
|
|
||||||
1. Buat total {request.question_count} soal dengan rincian:
|
|
||||||
- Soal pilihan ganda: {mc_count}
|
|
||||||
- Soal isian/essay: {essay_count}
|
|
||||||
2. Setiap soal harus disertai:
|
|
||||||
- Kutipan 1 kalimat dari teks materi sebagai dasar soal
|
|
||||||
- Jawaban
|
|
||||||
- ✅ Bobot soal antara:
|
|
||||||
- 1-2 untuk soal pilihan ganda
|
|
||||||
- 3-5 untuk soal isian/essay
|
|
||||||
- Gunakan penilaian kompleksitas soal untuk menentukan bobotnya
|
|
||||||
3. Gunakan bahasa yang sederhana dan sesuai untuk siswa SD kelas 3.
|
|
||||||
4. Jangan menambahkan informasi di luar materi.
|
|
||||||
|
|
||||||
**Format Output:**
|
|
||||||
---
|
|
||||||
**Soal Pilihan Ganda:**
|
|
||||||
1. Kalimat sumber: "[kutipan kalimat dari teks]"
|
|
||||||
Pertanyaan: [Pertanyaan]
|
|
||||||
A. [Opsi A]
|
|
||||||
B. [Opsi B]
|
|
||||||
C. [Opsi C]
|
|
||||||
D. [Opsi D]
|
|
||||||
Jawaban: [Huruf Opsi]
|
|
||||||
Bobot: [1 atau 2]
|
|
||||||
|
|
||||||
**Soal Isian:**
|
|
||||||
1. Kalimat sumber: "[kutipan kalimat dari teks]"
|
|
||||||
Pertanyaan: [Pertanyaan]
|
|
||||||
Jawaban: [Jawaban]
|
|
||||||
Bobot: [3 - 5]
|
|
||||||
---
|
|
||||||
|
|
||||||
**Materi:**
|
|
||||||
{content_bersih}
|
|
||||||
""".strip()
|
|
||||||
|
|
||||||
logging.info("Mengirim prompt ke Groq...")
|
|
||||||
|
|
||||||
completion = client.chat.completions.create(
|
|
||||||
model="llama-3.1-8b-instant",
|
|
||||||
messages=[{"role": "user", "content": prompt}],
|
|
||||||
temperature=0.7,
|
|
||||||
max_tokens=1500
|
|
||||||
)
|
|
||||||
|
|
||||||
generated_text = completion.choices[0].message.content.strip()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"generated_questions": generated_text,
|
|
||||||
"question_type": request.question_type,
|
|
||||||
"question_count": request.question_count,
|
|
||||||
"mc_count": mc_count,
|
|
||||||
"essay_count": essay_count
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error saat generate: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Terjadi kesalahan internal: {str(e)}")
|
|
||||||
|
|
||||||
# Cache Feedback
|
|
||||||
feedback_cache = {}
|
|
||||||
|
|
||||||
class FeedbackRequest(BaseModel):
|
|
||||||
user_answer: str
|
|
||||||
expected_answer: str
|
|
||||||
|
|
||||||
@app.post("/generate-feedback/")
|
|
||||||
async def generate_feedback(request: FeedbackRequest):
|
|
||||||
try:
|
|
||||||
user_answer = request.user_answer.strip()
|
|
||||||
expected_answer = request.expected_answer.strip()
|
|
||||||
|
|
||||||
# Hashing untuk cache
|
|
||||||
prompt_hash = hashlib.sha256(f"{user_answer}|{expected_answer}".encode()).hexdigest()
|
|
||||||
if prompt_hash in feedback_cache:
|
|
||||||
logging.info("Feedback dari cache.")
|
|
||||||
return {"feedback": feedback_cache[prompt_hash]}
|
|
||||||
|
|
||||||
prompt = f"""
|
|
||||||
Kamu adalah asisten pengajar untuk siswa SD kelas 3. Siswa memberikan jawaban berikut untuk soal isian.
|
|
||||||
|
|
||||||
**Jawaban Siswa:** {user_answer}
|
|
||||||
**Jawaban Ideal:** {expected_answer}
|
|
||||||
|
|
||||||
Beri feedback singkat dan membangun, maksimal 2 kalimat. Gunakan bahasa yang mudah dimengerti oleh siswa SD. Jika jawaban siswa salah, berikan petunjuk atau koreksi yang membantu.
|
|
||||||
"""
|
|
||||||
|
|
||||||
logging.info("Mengirim prompt feedback ke Groq...")
|
|
||||||
|
|
||||||
completion = client.chat.completions.create(
|
|
||||||
model="llama-3.1-8b-instant",
|
|
||||||
messages=[{"role": "user", "content": prompt}],
|
|
||||||
temperature=0.7,
|
|
||||||
max_tokens=150
|
|
||||||
)
|
|
||||||
|
|
||||||
feedback = completion.choices[0].message.content.strip()
|
|
||||||
feedback_cache[prompt_hash] = feedback
|
|
||||||
|
|
||||||
return {"feedback": feedback}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error saat generate feedback: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Terjadi kesalahan: {str(e)}")
|
|
||||||
|
|
@ -1,177 +0,0 @@
|
||||||
from fastapi import FastAPI, HTTPException, Request
|
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
|
||||||
from fastapi.exceptions import RequestValidationError
|
|
||||||
from fastapi.responses import JSONResponse
|
|
||||||
from fastapi.encoders import jsonable_encoder
|
|
||||||
from pydantic import BaseModel
|
|
||||||
import logging, hashlib
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
# Logging setup
|
|
||||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
|
||||||
|
|
||||||
# FastAPI instance
|
|
||||||
app = FastAPI()
|
|
||||||
|
|
||||||
# CORS
|
|
||||||
app.add_middleware(
|
|
||||||
CORSMiddleware,
|
|
||||||
allow_origins=["*"],
|
|
||||||
allow_credentials=True,
|
|
||||||
allow_methods=["*"],
|
|
||||||
allow_headers=["*"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Ollama endpoint
|
|
||||||
OLLAMA_URL = "http://labai.polinema.ac.id:11434/api/generate"
|
|
||||||
|
|
||||||
# Error handler
|
|
||||||
@app.exception_handler(RequestValidationError)
|
|
||||||
async def validation_exception_handler(request: Request, exc: RequestValidationError):
|
|
||||||
logging.error(f"Validation error: {exc}")
|
|
||||||
return JSONResponse(
|
|
||||||
status_code=422,
|
|
||||||
content={"detail": jsonable_encoder(exc.errors()), "body": exc.body},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Input models
|
|
||||||
class MaterialRequest(BaseModel):
|
|
||||||
content: str
|
|
||||||
question_type: str = "multiple_choice"
|
|
||||||
question_count: int = 5
|
|
||||||
|
|
||||||
class FeedbackRequest(BaseModel):
|
|
||||||
user_answer: str
|
|
||||||
expected_answer: str
|
|
||||||
|
|
||||||
# Utility
|
|
||||||
feedback_cache = {}
|
|
||||||
|
|
||||||
def potong_konten(text: str, max_chars: int = 5000):
|
|
||||||
if len(text) > max_chars:
|
|
||||||
logging.warning(f"Teks terlalu panjang ({len(text)} karakter), dipotong jadi {max_chars}")
|
|
||||||
return text[:max_chars]
|
|
||||||
return text
|
|
||||||
|
|
||||||
@app.post("/generate-from-material/")
|
|
||||||
async def generate_from_material(request: MaterialRequest):
|
|
||||||
try:
|
|
||||||
if request.question_count < 1 or request.question_count > 20:
|
|
||||||
raise HTTPException(status_code=400, detail="Jumlah soal harus antara 1-20")
|
|
||||||
|
|
||||||
if request.question_type not in ["multiple_choice", "essay", "mixed"]:
|
|
||||||
raise HTTPException(status_code=400, detail="Jenis soal tidak valid. Pilih: multiple_choice, essay, atau mixed")
|
|
||||||
|
|
||||||
mc_count = request.question_count if request.question_type == "multiple_choice" else 0
|
|
||||||
essay_count = request.question_count if request.question_type == "essay" else 0
|
|
||||||
if request.question_type == "mixed":
|
|
||||||
mc_count = max(1, request.question_count // 2)
|
|
||||||
essay_count = request.question_count - mc_count
|
|
||||||
|
|
||||||
content_bersih = potong_konten(request.content.strip())
|
|
||||||
|
|
||||||
prompt = f"""
|
|
||||||
Buat soal latihan berdasarkan teks materi berikut untuk siswa SD kelas 3.
|
|
||||||
|
|
||||||
**Instruksi:**
|
|
||||||
1. Buat total {request.question_count} soal dengan rincian:
|
|
||||||
- Soal pilihan ganda: {mc_count}
|
|
||||||
- Soal isian/essay: {essay_count}
|
|
||||||
2. Setiap soal harus disertai:
|
|
||||||
- Kutipan 1 kalimat dari teks materi sebagai dasar soal
|
|
||||||
- Jawaban
|
|
||||||
- ✅ Bobot soal antara:
|
|
||||||
- 1-2 untuk soal pilihan ganda
|
|
||||||
- 3-5 untuk soal isian/essay
|
|
||||||
- Gunakan penilaian kompleksitas soal untuk menentukan bobotnya
|
|
||||||
3. Gunakan bahasa yang sederhana dan sesuai untuk siswa SD kelas 3.
|
|
||||||
4. Jangan menambahkan informasi di luar materi.
|
|
||||||
|
|
||||||
**Format Output:**
|
|
||||||
---
|
|
||||||
**Soal Pilihan Ganda:**
|
|
||||||
1. Kalimat sumber: "[kutipan kalimat dari teks]"
|
|
||||||
Pertanyaan: [Pertanyaan]
|
|
||||||
A. [Opsi A]
|
|
||||||
B. [Opsi B]
|
|
||||||
C. [Opsi C]
|
|
||||||
D. [Opsi D]
|
|
||||||
Jawaban: [Huruf Opsi]
|
|
||||||
Bobot: [1 atau 2]
|
|
||||||
|
|
||||||
**Soal Isian:**
|
|
||||||
1. Kalimat sumber: "[kutipan kalimat dari teks]"
|
|
||||||
Pertanyaan: [Pertanyaan]
|
|
||||||
Jawaban: [Jawaban]
|
|
||||||
Bobot: [3 - 5]
|
|
||||||
---
|
|
||||||
|
|
||||||
**Materi:**
|
|
||||||
{content_bersih}
|
|
||||||
""".strip()
|
|
||||||
|
|
||||||
logging.info("Mengirim prompt ke Ollama...")
|
|
||||||
async with httpx.AsyncClient(timeout=120) as client:
|
|
||||||
response = await client.post(OLLAMA_URL, json={
|
|
||||||
"model": "llama3.1:latest",
|
|
||||||
"prompt": prompt,
|
|
||||||
"stream": False,
|
|
||||||
"options": {
|
|
||||||
"num_predict": 2048
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
response.raise_for_status()
|
|
||||||
result = response.json()
|
|
||||||
generated_text = result.get("response", "").strip()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"generated_questions": generated_text,
|
|
||||||
"question_type": request.question_type,
|
|
||||||
"question_count": request.question_count,
|
|
||||||
"mc_count": mc_count,
|
|
||||||
"essay_count": essay_count
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error saat generate: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Terjadi kesalahan internal: {str(e)}")
|
|
||||||
|
|
||||||
@app.post("/generate-feedback/")
|
|
||||||
async def generate_feedback(request: FeedbackRequest):
|
|
||||||
try:
|
|
||||||
user_answer = request.user_answer.strip()
|
|
||||||
expected_answer = request.expected_answer.strip()
|
|
||||||
|
|
||||||
prompt_hash = hashlib.sha256(f"{user_answer}|{expected_answer}".encode()).hexdigest()
|
|
||||||
if prompt_hash in feedback_cache:
|
|
||||||
logging.info("Feedback dari cache.")
|
|
||||||
return {"feedback": feedback_cache[prompt_hash]}
|
|
||||||
|
|
||||||
prompt = f"""
|
|
||||||
Kamu adalah asisten pengajar untuk siswa SD kelas 3. Siswa memberikan jawaban berikut untuk soal isian.
|
|
||||||
|
|
||||||
**Jawaban Siswa:** {user_answer}
|
|
||||||
**Jawaban Ideal:** {expected_answer}
|
|
||||||
|
|
||||||
Beri feedback singkat dan membangun, maksimal 2 kalimat. Gunakan bahasa yang mudah dimengerti oleh siswa SD. Jika jawaban siswa salah, berikan petunjuk atau koreksi yang membantu.
|
|
||||||
"""
|
|
||||||
|
|
||||||
logging.info("Mengirim prompt feedback ke Ollama...")
|
|
||||||
async with httpx.AsyncClient(timeout=60) as client:
|
|
||||||
response = await client.post(OLLAMA_URL, json={
|
|
||||||
"model": "llama3.1:latest",
|
|
||||||
"prompt": prompt,
|
|
||||||
"stream": False
|
|
||||||
})
|
|
||||||
|
|
||||||
response.raise_for_status()
|
|
||||||
result = response.json()
|
|
||||||
feedback = result.get("response", "").strip()
|
|
||||||
|
|
||||||
feedback_cache[prompt_hash] = feedback
|
|
||||||
return {"feedback": feedback}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error saat generate feedback: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Terjadi kesalahan: {str(e)}")
|
|
||||||
|
|
@ -1,144 +1,7 @@
|
||||||
# Menggunakan Huggingface
|
from fastapi import FastAPI, HTTPException
|
||||||
# from fastapi import FastAPI, HTTPException
|
|
||||||
# from fastapi.middleware.cors import CORSMiddleware
|
|
||||||
# from huggingface_hub import InferenceClient
|
|
||||||
# import random
|
|
||||||
# import logging
|
|
||||||
|
|
||||||
# app = FastAPI()
|
|
||||||
|
|
||||||
# app.add_middleware(
|
|
||||||
# CORSMiddleware,
|
|
||||||
# allow_origins=["*"],
|
|
||||||
# allow_credentials=True,
|
|
||||||
# allow_methods=["*"],
|
|
||||||
# allow_headers=["*"],
|
|
||||||
# )
|
|
||||||
|
|
||||||
# # Logging setup
|
|
||||||
# logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
|
||||||
|
|
||||||
# # Token Hugging Face (gunakan .env di production)
|
|
||||||
# HF_TOKEN = "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"
|
|
||||||
# #meta-llama/Llama-2-7b-chat-hf
|
|
||||||
# #meta-llama/Llama-3.2-3B-Instruct
|
|
||||||
# client = InferenceClient(model="meta-llama/Llama-3.2-3B-Instruct", token=HF_TOKEN)
|
|
||||||
|
|
||||||
# data_sources = {
|
|
||||||
# "cerita": {
|
|
||||||
# "Malin Kundang": "Malin Kundang adalah seorang anak dari keluarga miskin yang menjadi kaya raya namun menolak mengakui ibunya, hingga akhirnya dikutuk menjadi batu.",
|
|
||||||
# "Bawang Merah Bawang Putih": "Bawang Putih adalah gadis baik hati yang diperlakukan buruk oleh ibu dan saudara tirinya, tetapi kebaikannya membuahkan hasil berkat ikan ajaib.",
|
|
||||||
# "Sangkuriang": "Sangkuriang jatuh cinta pada ibunya, Dayang Sumbi, dan diberi tugas mustahil untuk membangun perahu dalam satu malam. Ia gagal dan akhirnya marah, menendang perahu hingga menjadi Gunung Tangkuban Perahu.",
|
|
||||||
# "Si Kancil": "Si Kancil dengan kecerdikannya berhasil menipu buaya untuk menyeberangi sungai dengan aman.",
|
|
||||||
# "Timun Mas": "Seorang ibu tua mendapatkan anak dari biji timun emas. Namun, anak itu harus melarikan diri dari raksasa jahat yang ingin memakannya."
|
|
||||||
# },
|
|
||||||
# "pantun": {
|
|
||||||
# "Pantun Nasihat": "Jalan-jalan ke kota Blitar,\nJangan lupa membeli roti.\nRajin belajar sejak pintar,\nAgar sukses di kemudian hari.",
|
|
||||||
# "Pantun Jenaka": "Ke pasar beli ikan teri,\nIkan habis tinggal kepala.\nJangan suka mencuri,\nNanti ketahuan malah celaka."
|
|
||||||
# },
|
|
||||||
# "puisi": {
|
|
||||||
# "Puisi Alam": "Langit biru membentang luas,\nBurung-burung terbang bebas.\nAngin sepoi menyapu dedaunan,\nAlam indah penuh kedamaian.",
|
|
||||||
# "Puisi Persahabatan": "Sahabat sejati selalu ada,\nDalam suka dan dalam duka.\nBersama kita jalani hari,\nMengukir cerita tak terlupa."
|
|
||||||
# }
|
|
||||||
# }
|
|
||||||
|
|
||||||
# @app.post("/generate/")
|
|
||||||
# async def generate_text():
|
|
||||||
# try:
|
|
||||||
# selected_stories = random.sample(list(data_sources["cerita"].keys()), 3)
|
|
||||||
# selected_pantun = random.choice(list(data_sources["pantun"].keys()))
|
|
||||||
# selected_puisi = random.choice(list(data_sources["puisi"].keys()))
|
|
||||||
|
|
||||||
# story_prompts = "\n\n".join([
|
|
||||||
# f"Judul: {story}\nIsi:\n{data_sources['cerita'][story]}"
|
|
||||||
# for story in selected_stories
|
|
||||||
# ])
|
|
||||||
|
|
||||||
# pantun_prompt = f"Judul: {selected_pantun}\nIsi:\n{data_sources['pantun'][selected_pantun]}"
|
|
||||||
# puisi_prompt = f"Judul: {selected_puisi}\nIsi:\n{data_sources['puisi'][selected_puisi]}"
|
|
||||||
|
|
||||||
# full_prompt = f"""
|
|
||||||
# Kamu adalah asisten pengajar untuk siswa SD kelas 3. Berdasarkan teks di bawah ini, buat soal untuk latihan siswa.
|
|
||||||
|
|
||||||
# **Instruksi:**
|
|
||||||
# 1. Untuk setiap teks (cerita, pantun, puisi), tampilkan:
|
|
||||||
# - Judul
|
|
||||||
# - Isi lengkap teks
|
|
||||||
# - 1 soal pilihan ganda (A-D) beserta jawaban benar dalam format: Jawaban Benar: X
|
|
||||||
# - 1 soal isian beserta jawaban ideal dalam format: Jawaban Ideal: [isi jawaban])
|
|
||||||
# 2. Gunakan bahasa sederhana dan mudah dipahami siswa SD kelas 3.
|
|
||||||
# 3. Gunakan format seperti contoh ini:
|
|
||||||
|
|
||||||
# ---
|
|
||||||
|
|
||||||
# Judul: [judul]
|
|
||||||
# Isi:
|
|
||||||
# [isi teks]
|
|
||||||
|
|
||||||
# **Soal Pilihan Ganda:**
|
|
||||||
# 1. [pertanyaan]
|
|
||||||
# A. ...
|
|
||||||
# B. ...
|
|
||||||
# C. ...
|
|
||||||
# D. ...
|
|
||||||
# Jawaban Benar: X
|
|
||||||
|
|
||||||
# **Soal Isian:**
|
|
||||||
# [pertanyaan]
|
|
||||||
# Jawaban Ideal: [isi jawaban]
|
|
||||||
|
|
||||||
# ---
|
|
||||||
|
|
||||||
# **TEKS CERITA:**
|
|
||||||
# {story_prompts}
|
|
||||||
|
|
||||||
# **TEKS PANTUN:**
|
|
||||||
# {pantun_prompt}
|
|
||||||
|
|
||||||
# **TEKS PUISI:**
|
|
||||||
# {puisi_prompt}
|
|
||||||
|
|
||||||
# Jangan gabungkan semua soal jadi satu bagian. Setiap teks harus punya blok tersendiri seperti format di atas.
|
|
||||||
# """
|
|
||||||
|
|
||||||
# logging.info("Mengirim prompt ke Hugging Face...")
|
|
||||||
# response = client.text_generation(full_prompt, max_new_tokens=1000)
|
|
||||||
# generated_text = response.strip()
|
|
||||||
|
|
||||||
# if not generated_text:
|
|
||||||
# raise HTTPException(status_code=500, detail="Model tidak menghasilkan pertanyaan.")
|
|
||||||
|
|
||||||
# return {
|
|
||||||
# "selected_stories": [
|
|
||||||
# {
|
|
||||||
# "title": story,
|
|
||||||
# "content": data_sources["cerita"][story]
|
|
||||||
# } for story in selected_stories
|
|
||||||
# ],
|
|
||||||
# "selected_pantun": {
|
|
||||||
# "title": selected_pantun,
|
|
||||||
# "content": data_sources["pantun"][selected_pantun]
|
|
||||||
# },
|
|
||||||
# "selected_puisi": {
|
|
||||||
# "title": selected_puisi,
|
|
||||||
# "content": data_sources["puisi"][selected_puisi]
|
|
||||||
# },
|
|
||||||
# "generated_questions": generated_text
|
|
||||||
# }
|
|
||||||
|
|
||||||
# except Exception as e:
|
|
||||||
# logging.error(f"Error saat generate: {e}")
|
|
||||||
# raise HTTPException(status_code=500, detail=f"Terjadi kesalahan: {str(e)}")
|
|
||||||
|
|
||||||
|
|
||||||
# Menggunakan Groq
|
|
||||||
from fastapi import FastAPI, HTTPException, Query
|
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from groq import Groq
|
from huggingface_hub import InferenceClient
|
||||||
import random
|
import random
|
||||||
import logging
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from difflib import SequenceMatcher
|
|
||||||
|
|
||||||
app = FastAPI()
|
app = FastAPI()
|
||||||
|
|
||||||
|
|
@ -150,135 +13,80 @@ app.add_middleware(
|
||||||
allow_headers=["*"],
|
allow_headers=["*"],
|
||||||
)
|
)
|
||||||
|
|
||||||
# Logging setup
|
# Ganti dengan token API Hugging Face kamu
|
||||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
HF_TOKEN = ""
|
||||||
|
client = InferenceClient(model="TinyLlama/TinyLlama-1.1B-chat-v1.0", token=HF_TOKEN)
|
||||||
# Langsung masukkan API Key Groq di sini
|
|
||||||
client = Groq(api_key="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")
|
|
||||||
|
|
||||||
|
# Data ringkasan cerita, pantun, dan puisi (bisa ditambah lagi)
|
||||||
data_sources = {
|
data_sources = {
|
||||||
"cerita": {
|
"cerita": {
|
||||||
"Malin Kundang": "Pada zaman dahulu, hiduplah seorang anak bernama Malin Kundang...",
|
"Malin Kundang": "Malin Kundang adalah seorang anak dari keluarga miskin yang menjadi kaya raya namun menolak mengakui ibunya, hingga akhirnya dikutuk menjadi batu.",
|
||||||
"Bawang Merah Bawang Putih": "Bawang Merah selalu iri dengan kebaikan Bawang Putih...",
|
"Bawang Merah Bawang Putih": "Bawang Putih adalah gadis baik hati yang diperlakukan buruk oleh ibu dan saudara tirinya, tetapi kebaikannya membuahkan hasil berkat ikan ajaib.",
|
||||||
"Timun Mas": "Timun Mas adalah seorang gadis pemberian raksasa kepada seorang petani..."
|
"Sangkuriang": "Sangkuriang jatuh cinta pada ibunya, Dayang Sumbi, dan diberi tugas mustahil untuk membangun perahu dalam satu malam. Ia gagal dan akhirnya marah, menendang perahu hingga menjadi Gunung Tangkuban Perahu.",
|
||||||
|
"Si Kancil": "Si Kancil dengan kecerdikannya berhasil menipu buaya untuk menyeberangi sungai dengan aman.",
|
||||||
|
"Timun Mas": "Seorang ibu tua mendapatkan anak dari biji timun emas. Namun, anak itu harus melarikan diri dari raksasa jahat yang ingin memakannya."
|
||||||
},
|
},
|
||||||
"pantun": {
|
"pantun": {
|
||||||
"Pantun Pendidikan": "Belajar pagi membaca buku,\nSiang datang janganlah lesu,\nMenuntut ilmu jangan jemu,\nAgar sukses di masa depanmu."
|
"Pantun Nasihat": "Jalan-jalan ke kota Blitar,\nJangan lupa membeli roti.\nRajin belajar sejak kecil,\nAgar sukses di kemudian hari.",
|
||||||
|
"Pantun Jenaka": "Ke pasar beli ikan teri,\nIkan habis tinggal kepala.\nJangan suka mencuri,\nNanti ketahuan malah celaka."
|
||||||
},
|
},
|
||||||
"puisi": {
|
"puisi": {
|
||||||
"Puisi Alam": "Langit biru terbentang luas,\nGunung tinggi menjulang tegas,\nHijau daun menari bebas,\nAlam indah ciptaan yang cerdas."
|
"Puisi Alam": "Langit biru membentang luas,\nBurung-burung terbang bebas.\nAngin sepoi menyapu dedaunan,\nAlam indah penuh kedamaian.",
|
||||||
|
"Puisi Persahabatan": "Sahabat sejati selalu ada,\nDalam suka dan dalam duka.\nBersama kita jalani hari,\nMengukir cerita tak terlupa."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@app.post("/generate/") # Endpoint untuk generate soal
|
@app.post("/generate/")
|
||||||
async def generate_text():
|
async def generate_text():
|
||||||
try:
|
try:
|
||||||
selected_stories = random.sample(list(data_sources["cerita"].keys()), 3)
|
# Pilih 2 cerita, 1 pantun, dan 1 puisi secara acak
|
||||||
|
selected_stories = random.sample(list(data_sources["cerita"].keys()), 2)
|
||||||
selected_pantun = random.choice(list(data_sources["pantun"].keys()))
|
selected_pantun = random.choice(list(data_sources["pantun"].keys()))
|
||||||
selected_puisi = random.choice(list(data_sources["puisi"].keys()))
|
selected_puisi = random.choice(list(data_sources["puisi"].keys()))
|
||||||
|
|
||||||
story_prompts = "\n\n".join([f"Judul: {story}\nIsi:\n{data_sources['cerita'][story]}" for story in selected_stories])
|
# Buat format prompt dengan cerita, pantun, dan puisi
|
||||||
pantun_prompt = f"Judul: {selected_pantun}\nIsi:\n{data_sources['pantun'][selected_pantun]}"
|
story_prompts = "\n\n".join([
|
||||||
puisi_prompt = f"Judul: {selected_puisi}\nIsi:\n{data_sources['puisi'][selected_puisi]}"
|
f"**{story}**\n{data_sources['cerita'][story]}"
|
||||||
|
for story in selected_stories
|
||||||
|
])
|
||||||
|
|
||||||
|
pantun_prompt = f"**{selected_pantun}**\n{data_sources['pantun'][selected_pantun]}"
|
||||||
|
puisi_prompt = f"**{selected_puisi}**\n{data_sources['puisi'][selected_puisi]}"
|
||||||
|
|
||||||
|
# Gabungkan semua prompt dengan instruksi eksplisit
|
||||||
full_prompt = f"""
|
full_prompt = f"""
|
||||||
Kamu adalah asisten pengajar untuk siswa SD kelas 3. Berdasarkan teks di bawah ini, buat soal untuk latihan siswa.
|
Berdasarkan teks berikut ini, buatlah soal pilihan ganda dengan format berikut:
|
||||||
|
- Setiap soal harus memiliki **1 pertanyaan** dan **4 pilihan jawaban (A, B, C, D)**.
|
||||||
|
- Berikan **jawaban yang benar dengan format 'Jawaban Benar: X'** di akhir setiap soal.
|
||||||
|
- Buat total **5 soal** dari teks yang tersedia.
|
||||||
|
|
||||||
**Instruksi:**
|
### **Teks Bacaan**
|
||||||
1. Untuk setiap teks (cerita, pantun, puisi), tampilkan:
|
|
||||||
- Judul
|
|
||||||
- Isi lengkap teks
|
|
||||||
- 1 soal pilihan ganda (A-D) beserta jawaban benar dalam format: Jawaban Benar: X
|
|
||||||
- 1 soal isian beserta jawaban ideal dalam format: Jawaban Ideal: [isi jawaban])
|
|
||||||
2. Gunakan bahasa sederhana dan mudah dipahami siswa SD kelas 3.
|
|
||||||
3. Gunakan format seperti contoh ini:
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Judul: [judul]
|
|
||||||
Isi:
|
|
||||||
[isi teks]
|
|
||||||
|
|
||||||
**Soal Pilihan Ganda:**
|
|
||||||
1. [pertanyaan]
|
|
||||||
A. ...
|
|
||||||
B. ...
|
|
||||||
C. ...
|
|
||||||
D. ...
|
|
||||||
Jawaban Benar: X
|
|
||||||
|
|
||||||
**Soal Isian:**
|
|
||||||
[pertanyaan]
|
|
||||||
Jawaban Ideal: [isi jawaban]
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**TEKS CERITA:**
|
|
||||||
{story_prompts}
|
{story_prompts}
|
||||||
|
|
||||||
**TEKS PANTUN:**
|
|
||||||
{pantun_prompt}
|
{pantun_prompt}
|
||||||
|
|
||||||
**TEKS PUISI:**
|
|
||||||
{puisi_prompt}
|
{puisi_prompt}
|
||||||
|
|
||||||
Jangan gabungkan semua soal jadi satu bagian. Setiap teks harus punya blok tersendiri seperti format di atas.
|
### **Contoh Format Soal**
|
||||||
|
1. Siapakah tokoh utama dalam cerita '{selected_stories[0]}'?
|
||||||
|
A. Bawang Putih
|
||||||
|
B. Timun Mas
|
||||||
|
C. {selected_stories[0]}
|
||||||
|
D. Si Kancil
|
||||||
|
**Jawaban Benar: C**
|
||||||
|
|
||||||
|
Sekarang, buatlah **5 soal pilihan ganda** berdasarkan teks di atas.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
logging.info("Mengirim prompt ke Groq LLaMA-3.1-8b-instant...")
|
# Kirim permintaan ke model TinyLlama
|
||||||
|
response = client.text_generation(full_prompt, max_new_tokens=400)
|
||||||
completion = client.chat.completions.create(
|
|
||||||
model="llama-3.1-8b-instant",
|
|
||||||
messages=[{"role": "user", "content": full_prompt}],
|
|
||||||
temperature=0.7,
|
|
||||||
max_tokens=1000
|
|
||||||
)
|
|
||||||
|
|
||||||
generated_text = completion.choices[0].message.content.strip()
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"selected_stories": [{"title": story, "content": data_sources["cerita"][story]} for story in selected_stories],
|
"selected_stories": selected_stories,
|
||||||
"selected_pantun": {"title": selected_pantun, "content": data_sources["pantun"][selected_pantun]},
|
"selected_pantun": selected_pantun,
|
||||||
"selected_puisi": {"title": selected_puisi, "content": data_sources["puisi"][selected_puisi]},
|
"selected_puisi": selected_puisi,
|
||||||
"generated_questions": generated_text
|
"generated_questions": response
|
||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"Error saat generate: {e}")
|
raise HTTPException(status_code=500, detail=f"Error: {str(e)}")
|
||||||
raise HTTPException(status_code=500, detail=f"Terjadi kesalahan: {str(e)}")
|
|
||||||
|
|
||||||
class FeedbackRequest(BaseModel):
|
|
||||||
user_answer: str
|
|
||||||
expected_answer: str
|
|
||||||
|
|
||||||
@app.post("/generate-feedback/")
|
|
||||||
async def generate_feedback(request: FeedbackRequest):
|
|
||||||
try:
|
|
||||||
user_answer = request.user_answer.strip()
|
|
||||||
expected_answer = request.expected_answer.strip()
|
|
||||||
|
|
||||||
prompt = f"""
|
|
||||||
Kamu adalah asisten pengajar untuk siswa SD kelas 3. Siswa memberikan jawaban berikut untuk soal isian.
|
|
||||||
|
|
||||||
**Jawaban Siswa:** {user_answer}
|
|
||||||
**Jawaban Ideal:** {expected_answer}
|
|
||||||
|
|
||||||
Beri feedback singkat dan membangun, maksimal 2 kalimat. Gunakan bahasa yang mudah dimengerti oleh siswa SD. Jika jawaban siswa salah, berikan petunjuk atau koreksi yang membantu.
|
|
||||||
"""
|
|
||||||
|
|
||||||
logging.info("Mengirim prompt feedback ke Groq...")
|
|
||||||
|
|
||||||
completion = client.chat.completions.create(
|
|
||||||
model="llama-3.1-8b-instant",
|
|
||||||
messages=[{"role": "user", "content": prompt}],
|
|
||||||
temperature=0.7,
|
|
||||||
max_tokens=150
|
|
||||||
)
|
|
||||||
|
|
||||||
feedback = completion.choices[0].message.content.strip()
|
|
||||||
|
|
||||||
return {"feedback": feedback}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error saat generate feedback: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Terjadi kesalahan: {str(e)}")
|
|
||||||
|
|
|
||||||
|
|
@ -1,190 +0,0 @@
|
||||||
from fastapi import FastAPI, HTTPException, Request
|
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
|
||||||
from starlette.middleware.base import BaseHTTPMiddleware
|
|
||||||
from starlette.responses import JSONResponse
|
|
||||||
from groq import Groq
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from collections import defaultdict
|
|
||||||
import random, time, logging, hashlib
|
|
||||||
|
|
||||||
# Inisialisasi FastAPI
|
|
||||||
app = FastAPI()
|
|
||||||
|
|
||||||
# Middleware CORS
|
|
||||||
app.add_middleware(
|
|
||||||
CORSMiddleware,
|
|
||||||
allow_origins=["*"],
|
|
||||||
allow_credentials=True,
|
|
||||||
allow_methods=["*"],
|
|
||||||
allow_headers=["*"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Logging
|
|
||||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
|
||||||
|
|
||||||
# Groq Client
|
|
||||||
client = Groq(api_key="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")
|
|
||||||
|
|
||||||
# Rate Limiting Middleware
|
|
||||||
class RateLimitMiddleware(BaseHTTPMiddleware):
|
|
||||||
def __init__(self, app, max_requests: int = 5, window_seconds: int = 60):
|
|
||||||
super().__init__(app)
|
|
||||||
self.max_requests = max_requests
|
|
||||||
self.window = window_seconds
|
|
||||||
self.ip_timestamps = defaultdict(list)
|
|
||||||
|
|
||||||
async def dispatch(self, request: Request, call_next):
|
|
||||||
ip = request.client.host
|
|
||||||
now = time.time()
|
|
||||||
timestamps = self.ip_timestamps[ip]
|
|
||||||
self.ip_timestamps[ip] = [t for t in timestamps if now - t < self.window]
|
|
||||||
|
|
||||||
if len(self.ip_timestamps[ip]) >= self.max_requests:
|
|
||||||
return JSONResponse(
|
|
||||||
status_code=429,
|
|
||||||
content={"detail": "Terlalu banyak permintaan. Silakan coba lagi beberapa saat."},
|
|
||||||
)
|
|
||||||
|
|
||||||
self.ip_timestamps[ip].append(now)
|
|
||||||
return await call_next(request)
|
|
||||||
|
|
||||||
# Tambahkan RateLimit Middleware
|
|
||||||
app.add_middleware(RateLimitMiddleware)
|
|
||||||
|
|
||||||
# Data sumber
|
|
||||||
data_sources = {
|
|
||||||
"cerita": {
|
|
||||||
"Malin Kundang": "Pada zaman dahulu, hiduplah seorang anak bernama Malin Kundang...",
|
|
||||||
"Bawang Merah Bawang Putih": "Bawang Merah selalu iri dengan kebaikan Bawang Putih...",
|
|
||||||
"Timun Mas": "Timun Mas adalah seorang gadis pemberian raksasa kepada seorang petani..."
|
|
||||||
},
|
|
||||||
"pantun": {
|
|
||||||
"Pantun Pendidikan": "Belajar pagi membaca buku,\nSiang datang janganlah lesu,\nMenuntut ilmu jangan jemu,\nAgar sukses di masa depanmu."
|
|
||||||
},
|
|
||||||
"puisi": {
|
|
||||||
"Puisi Alam": "Langit biru terbentang luas,\nGunung tinggi menjulang tegas,\nHijau daun menari bebas,\nAlam indah ciptaan yang cerdas."
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@app.post("/generate/")
|
|
||||||
async def generate_text():
|
|
||||||
try:
|
|
||||||
selected_stories = random.sample(list(data_sources["cerita"].keys()), 3)
|
|
||||||
selected_pantun = random.choice(list(data_sources["pantun"].keys()))
|
|
||||||
selected_puisi = random.choice(list(data_sources["puisi"].keys()))
|
|
||||||
|
|
||||||
story_prompts = "\n\n".join([f"Judul: {story}\nIsi:\n{data_sources['cerita'][story]}" for story in selected_stories])
|
|
||||||
pantun_prompt = f"Judul: {selected_pantun}\nIsi:\n{data_sources['pantun'][selected_pantun]}"
|
|
||||||
puisi_prompt = f"Judul: {selected_puisi}\nIsi:\n{data_sources['puisi'][selected_puisi]}"
|
|
||||||
|
|
||||||
full_prompt = f"""
|
|
||||||
Kamu adalah asisten pengajar untuk siswa SD kelas 3. Berdasarkan teks di bawah ini, buat soal untuk latihan siswa.
|
|
||||||
|
|
||||||
**Instruksi:**
|
|
||||||
1. Untuk setiap teks (cerita, pantun, puisi), tampilkan:
|
|
||||||
- Judul
|
|
||||||
- Isi lengkap teks
|
|
||||||
- 1 soal pilihan ganda (A-D) beserta jawaban benar dalam format: Jawaban Benar: X
|
|
||||||
- 1 soal isian beserta jawaban ideal dalam format: Jawaban Ideal: [isi jawaban])
|
|
||||||
2. Gunakan bahasa sederhana dan mudah dipahami siswa SD kelas 3.
|
|
||||||
3. Gunakan format seperti contoh ini:
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Judul: [judul]
|
|
||||||
Isi:
|
|
||||||
[isi teks]
|
|
||||||
|
|
||||||
**Soal Pilihan Ganda:**
|
|
||||||
1. [pertanyaan]
|
|
||||||
A. ...
|
|
||||||
B. ...
|
|
||||||
C. ...
|
|
||||||
D. ...
|
|
||||||
Jawaban Benar: X
|
|
||||||
|
|
||||||
**Soal Isian:**
|
|
||||||
[pertanyaan]
|
|
||||||
Jawaban Ideal: [isi jawaban]
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**TEKS CERITA:**
|
|
||||||
{story_prompts}
|
|
||||||
|
|
||||||
**TEKS PANTUN:**
|
|
||||||
{pantun_prompt}
|
|
||||||
|
|
||||||
**TEKS PUISI:**
|
|
||||||
{puisi_prompt}
|
|
||||||
|
|
||||||
Jangan gabungkan semua soal jadi satu bagian. Setiap teks harus punya blok tersendiri seperti format di atas.
|
|
||||||
"""
|
|
||||||
|
|
||||||
logging.info("Mengirim prompt ke Groq...")
|
|
||||||
|
|
||||||
completion = client.chat.completions.create(
|
|
||||||
model="llama-3.1-8b-instant",
|
|
||||||
messages=[{"role": "user", "content": full_prompt}],
|
|
||||||
temperature=0.7,
|
|
||||||
max_tokens=1000
|
|
||||||
)
|
|
||||||
|
|
||||||
generated_text = completion.choices[0].message.content.strip()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"selected_stories": [{"title": s, "content": data_sources["cerita"][s]} for s in selected_stories],
|
|
||||||
"selected_pantun": {"title": selected_pantun, "content": data_sources["pantun"][selected_pantun]},
|
|
||||||
"selected_puisi": {"title": selected_puisi, "content": data_sources["puisi"][selected_puisi]},
|
|
||||||
"generated_questions": generated_text
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error saat generate: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Terjadi kesalahan: {str(e)}")
|
|
||||||
|
|
||||||
# Cache Feedback
|
|
||||||
feedback_cache = {}
|
|
||||||
|
|
||||||
class FeedbackRequest(BaseModel):
|
|
||||||
user_answer: str
|
|
||||||
expected_answer: str
|
|
||||||
|
|
||||||
@app.post("/generate-feedback/")
|
|
||||||
async def generate_feedback(request: FeedbackRequest):
|
|
||||||
try:
|
|
||||||
user_answer = request.user_answer.strip()
|
|
||||||
expected_answer = request.expected_answer.strip()
|
|
||||||
|
|
||||||
# Hashing untuk cache
|
|
||||||
prompt_hash = hashlib.sha256(f"{user_answer}|{expected_answer}".encode()).hexdigest()
|
|
||||||
if prompt_hash in feedback_cache:
|
|
||||||
logging.info("Feedback dari cache.")
|
|
||||||
return {"feedback": feedback_cache[prompt_hash]}
|
|
||||||
|
|
||||||
prompt = f"""
|
|
||||||
Kamu adalah asisten pengajar untuk siswa SD kelas 3. Siswa memberikan jawaban berikut untuk soal isian.
|
|
||||||
|
|
||||||
**Jawaban Siswa:** {user_answer}
|
|
||||||
**Jawaban Ideal:** {expected_answer}
|
|
||||||
|
|
||||||
Beri feedback singkat dan membangun, maksimal 2 kalimat. Gunakan bahasa yang mudah dimengerti oleh siswa SD. Jika jawaban siswa salah, berikan petunjuk atau koreksi yang membantu.
|
|
||||||
"""
|
|
||||||
|
|
||||||
logging.info("Mengirim prompt feedback ke Groq...")
|
|
||||||
|
|
||||||
completion = client.chat.completions.create(
|
|
||||||
model="llama-3.1-8b-instant",
|
|
||||||
messages=[{"role": "user", "content": prompt}],
|
|
||||||
temperature=0.7,
|
|
||||||
max_tokens=150
|
|
||||||
)
|
|
||||||
|
|
||||||
feedback = completion.choices[0].message.content.strip()
|
|
||||||
feedback_cache[prompt_hash] = feedback
|
|
||||||
|
|
||||||
return {"feedback": feedback}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error saat generate feedback: {e}")
|
|
||||||
raise HTTPException(status_code=500, detail=f"Terjadi kesalahan: {str(e)}")
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Loading…
Reference in New Issue
Block a user