Merge branch 'devel' into feat/OPS-85

This commit is contained in:
Christoph J. Scherr 2025-03-23 18:51:08 +01:00
commit 6dd8400b61
No known key found for this signature in database
GPG key ID: 9EB784BB202BB7BB
15 changed files with 587 additions and 45 deletions

View file

@ -1,2 +1,2 @@
[run]
omit = tests/*
omit = tests/*

2
.gitignore vendored
View file

@ -180,4 +180,4 @@ ollama
*.kate-swp
# sphinx rst files
docs/source/_modules
poetry.lock

View file

@ -14,8 +14,6 @@ RUN apt update && apt install curl bash jq
RUN pip install poetry
RUN poetry install -v
FROM base as dev
# Expose development port
EXPOSE 5000

View file

@ -2,7 +2,6 @@ services:
senju:
build:
context: .
target: dev
ports:
- "127.0.0.1:5000:5000"
volumes:

0
docs/auto_docu.sh Normal file → Executable file
View file

View file

@ -3,10 +3,13 @@
# For the full list of built-in configuration values, see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
from __future__ import annotations
# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
import os
import sys
sys.path.insert(0, os.path.abspath("../../senju"))
project = 'senju'

View file

@ -16,4 +16,4 @@ documentation for details.
:caption: Contents:
usage
_modules/modules
_modules/modules

View file

@ -1,4 +1,5 @@
#!/bin/bash
set -e
# First create a readable multiline string
SYSTEM_PROMPT=$(cat <<EOF
@ -65,4 +66,4 @@ CONF=$(jq -n --arg system "$SYSTEM_PROMPT" '{
curl http://ollama:11434/api/pull -d '{"model": "phi3"}'
curl http://ollama:11434/api/create -d "$CONF"
cd /app
poetry run sh -c 'flask --app senju/main run --host=0.0.0.0'
poetry run sh -c 'waitress-serve --listen=*:5000 senju.main:app'

18
poetry.lock generated
View file

@ -1711,6 +1711,22 @@ h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "waitress"
version = "3.0.2"
description = "Waitress WSGI server"
optional = false
python-versions = ">=3.9.0"
groups = ["main"]
files = [
{file = "waitress-3.0.2-py3-none-any.whl", hash = "sha256:c56d67fd6e87c2ee598b76abdd4e96cfad1f24cacdea5078d382b1f9d7b5ed2e"},
{file = "waitress-3.0.2.tar.gz", hash = "sha256:682aaaf2af0c44ada4abfb70ded36393f0e307f4ab9456a215ce0020baefc31f"},
]
[package.extras]
docs = ["Sphinx (>=1.8.1)", "docutils", "pylons-sphinx-themes (>=1.0.9)"]
testing = ["coverage (>=7.6.0)", "pytest", "pytest-cov"]
[[package]]
name = "werkzeug"
version = "3.1.3"
@ -1732,4 +1748,4 @@ watchdog = ["watchdog (>=2.3)"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.10,<3.13"
content-hash = "b7a4de48ebf806e2217d5e7a8d2bc3b39babdaacbd09705b93c86c66845111a6"
content-hash = "5492deb1adff40a0e4571b4520e4c19449090cffa82a9b25308b6d7575ca4933"

View file

@ -19,6 +19,7 @@ dependencies = [
"pillow (>=11.1.0,<12.0.0)",
"torch (>=2.6.0,<3.0.0)",
"transformers (>=4.50.0,<5.0.0)",
"waitress (>=3.0.2,<4.0.0)",
]

View file

@ -1,3 +1,66 @@
"""
Haiku Generation Module
=======================
A client interface for AI-powered haiku poem generation.
This module provides the core functionality for communicating
with an Ollama-based
AI service to generate three-line haiku poems. It handles the
entire generation
process, from sending properly formatted requests to processing
and validating
the returned poems.
Classes
-------
Haiku
A dataclass representation of a haiku poem, providing structure
for storage,
manipulation and serialization of poem data.
**Methods**:
* ``to_json()``: Converts a haiku instance to JSON format for API
responses
* ``generate_haiku(seed_text)``: Creates a new haiku using
the AI service
Constants
---------
AI_SERVICE_URL
The endpoint URL for the Ollama API service.
AI_MODEL_NAME
The specific AI model used for haiku generation.
REQUEST_TIMEOUT
The maximum time (in seconds) to wait for AI service responses.
Dependencies
------------
* requests: HTTP client library for API communication
* dataclasses: Support for the Haiku data structure
* logging: Error and diagnostic information capture
* json: Processing of API responses
Implementation Details
----------------------
The module implements a robust communication pattern with the
AI service, including:
1. Proper request formatting with seed text integration
2. Multiple retry attempts for handling temporary service issues
3. Response validation to ensure the returned text follows haiku structure
4. Fallback mechanisms when the AI service returns unsuitable content
5. JSON serialization for consistent data exchange
When communicating with the AI service, the module maintains appropriate
error handling and logging to help diagnose any generation issues. It aims
to provide a reliable haiku generation experience even when dealing with the
inherent unpredictability of AI-generated content.
"""
from __future__ import annotations
import json
@ -12,16 +75,43 @@ AI_GEN_ENDPOINT: str = "/generate"
@dataclass
class Haiku:
"""
A class representing a haiku poem with three lines.
:ivar lines: A list containing the three lines of the haiku.
:type lines: list[str]
"""
lines: list[str]
def get_json(self):
"""
Converts the haiku lines to a JSON string.
:return: A JSON string representation of the haiku lines.
:rtype: str
"""
return json.dumps(self.lines)
@staticmethod
def request_haiku(seed: str, url=AI_BASE_URL + AI_GEN_ENDPOINT) -> Haiku:
"""This function prompts the ai to generate
the hauku based on the user input"""
"""
Generates a haiku using an AI model based on the
provided seed text.
This function prompts the AI to generate a haiku based on the
user input.
It validates that the response contains exactly 3 lines.
The function will retry until a valid haiku is generated.
:param seed: The input text used to inspire the haiku generation.
:param url: The URL to the AI endpoint
:type seed: str
:return: A new Haiku object containing the generated three lines.
:rtype: Haiku
:raises: Possible JSONDecodeError which is caught and handled
with retries.
"""
ai_gen_request = {
"model": "haiku",
"prompt": f"{seed}",
@ -30,6 +120,7 @@ class Haiku:
}
tries = 0
while True:
tries += 1
try:
@ -62,8 +153,8 @@ class Haiku:
lines[1],
lines[2]
])
break
except json.JSONDecodeError as e:
logging.error(f"error while reading json from LLM: {e}")
raise e

View file

@ -1,16 +1,58 @@
"""
Senju Haiku Web Application
===========================
A Flask-based web interface for generating, viewing, and managing haiku poetry.
This application provides a comprehensive interface between users
and an AI-powered
haiku generation service, with persistent storage capabilities.
Users can interact
with the system through both a web interface and a RESTful API.
Features
--------
* **Landing page**: Welcome interface introducing users to the Senju service
* **Browsing interface**: Gallery-style viewing of previously generated haikus
* **Prompt interface**: Text input system for generating haikus from seed text
* **Image scanning**: Experimental interface for creating haikus
from visual inputs
* **RESTful API**: Programmatic access for integration with other services
Architecture
------------
The application implements a RESTful architecture using Flask's routing system
and template rendering. All user interactions are handled through
clearly defined
routes, with appropriate error handling for exceptional cases.
Dependencies
------------
* future.annotations: Enhanced type hint support
* os, Path: Filesystem operations for storage management
* Flask: Core web application framework
* Haiku: Custom class for poem representation and generation
* StoreManager: Database abstraction for persistence operations
Implementation
--------------
The module initializes both a Flask application instance and a StoreManager
with a configured storage location. All routes and view functions required
for the complete web interface are defined within this module.
"""
from __future__ import annotations
import os
from pathlib import Path
from flask import (Flask, redirect, render_template, request, url_for,
send_from_directory)
from flask import (Flask, redirect, render_template, request,
send_from_directory, url_for)
from senju.haiku import Haiku
from senju.image_reco import gen_response
from senju.store_manager import StoreManager
import os
app = Flask(__name__)
store = StoreManager(Path("/tmp/store.db"))
@ -18,11 +60,24 @@ store = StoreManager(Path("/tmp/store.db"))
@app.route("/")
def index_view():
"""
Render the main index page of the application.
:return: The index.html template with title "Senju".
:rtype: flask.Response
"""
return render_template("index.html", title="Senju")
@app.route("/haiku/")
def haiku_index_view():
"""
Redirect to the most recently created haiku.
:return: Redirects to the haiku_view route with the latest haiku ID.
:rtype: flask.Response
:raises KeyError: If no haikus exist in the store yet.
"""
haiku_id: int | None = store.get_id_of_latest_haiku()
if haiku_id is None:
haiku_id = 0
@ -31,14 +86,29 @@ def haiku_index_view():
@app.route("/haiku/<int:haiku_id>")
def haiku_view(haiku_id):
"""test"""
"""
Display a specific haiku by its ID.
Loads the haiku with the given ID from the store and renders it using
the haiku.html template. If no haiku is found with the provided ID,
raises a KeyError.
:param haiku_id: The ID of the haiku to display.
:type haiku_id: int
:return: The haiku.html template with the haiku data in context.
:rtype: flask.Response
:raises KeyError: If no haiku exists with the given ID.
"""
haiku: Haiku | None = store.load_haiku(haiku_id)
if haiku is None:
# TODO: add "haiku not found" page
raise KeyError("haiku not found")
is_default: bool = request.args.get("is_default") == "1"
haiku: Haiku = store.load_haiku(haiku_id)
context: dict = {
"haiku": haiku,
"is_default": is_default
}
return render_template(
"haiku.html",
context=context,
@ -47,6 +117,12 @@ def haiku_view(haiku_id):
@app.route("/prompt")
def prompt_view():
"""
Render the haiku generation prompt page.
:return: The prompt.html template with title "Haiku generation".
:rtype: flask.Response
"""
return render_template(
"prompt.html",
title="Haiku generation"
@ -55,6 +131,12 @@ def prompt_view():
@app.route("/scan")
def scan_view():
"""
Render the image scanning page.
:return: The scan.html template with title "Image scanning".
:rtype: flask.Response
"""
return render_template(
"scan.html",
title="Image scanning"
@ -79,6 +161,17 @@ def image_recognition():
@app.route("/api/v1/haiku", methods=['POST'])
def generate_haiku():
"""
API endpoint to generate a new haiku based on the provided prompt.
Accepts POST requests with JSON data containing a 'prompt' field.
Generates a haiku using the prompt, saves it to the store,
and returns the ID.
:return: The ID of the newly created haiku if method is POST.
Error message and status code 405 if method is not POST.
:rtype: Union[str, Tuple[str, int]]
"""
if request.method == 'POST':
json_data = request.get_json()
prompt = json_data["prompt"]
@ -93,6 +186,12 @@ def generate_haiku():
@app.route('/favicon.ico')
def favicon():
"""
Serve the favicon.ico file from the static directory.
:return: The favicon.ico file with the appropriate MIME type.
:rtype: flask.Response
"""
return send_from_directory(os.path.join(app.root_path, 'static/img'),
'favicon.ico',
mimetype='image/vnd.microsoft.icon')

View file

@ -1,3 +1,53 @@
"""
Senju Database Management Module
================================
A database interaction layer for the Senju haiku management system.
This module implements a lightweight document database
abstraction using TinyDB
for persistent storage of haiku poems. It provides a
clean interface for storing,
retrieving, updating, and managing haiku entries in the system.
Classes
-------
StoreManager
The primary class responsible for all database operations.
Handles connection
management, CRUD operations, and query capabilities for haiku data.
Functions
---------
utility_function
Provides simple arithmetic operations to support
database functionalities.
Constants
---------
DEFAULT_DB_PATH
The default filesystem location for the TinyDB database file
(/var/lib/senju.json).
Dependencies
------------
* future.annotations: Enhanced type hint support
* logging.Logger: Diagnostic and error logging capabilities
* pathlib.Path: Cross-platform filesystem path handling
* typing.Optional: Type annotations for nullable values
* tinydb.TinyDB: Lightweight document database implementation
* tinydb.QueryImpl: Query builder for database searches
* senju.haiku.Haiku: Data model for haiku representation
Implementation Details
----------------------
The module uses TinyDB as its storage engine, providing a JSON-based document
storage solution that balances simplicity with functionality. The StoreManager
abstracts all database operations behind a clean API,
handling connection lifecycle
and providing methods for common operations on haiku data.
"""
from __future__ import annotations
from logging import Logger
@ -22,11 +72,34 @@ class BadStoreManagerFileError(Exception):
class StoreManager:
"""
Manages the storage and retrieval of haiku
data using TinyDB.
This class provides an interface for saving and
loading haikus from
a TinyDB database file.
:ivar _db: Database instance for storing haiku data.
:type _db: TinyDB
:ivar logger: Logger for tracking operations and errors.
:type logger: Logger
"""
__slots__ = "_db", "logger"
_db: TinyDB
logger: Logger
def __init__(self, path_to_db: Path = DEFAULT_DB_PATH) -> None:
"""
Initialize the StoreManager with a database path.
:param path_to_db: Path to the TinyDB database file.
Defaults to DEFAULT_DB_PATH.
:type path_to_db: Path, optional
:return: None
"""
self._db = TinyDB(path_to_db)
try:
self._db = TinyDB(path_to_db)
except Exception as e:
@ -34,9 +107,29 @@ class StoreManager:
self.logger = Logger(__name__)
def _query(self, query: QueryImpl) -> list[dict]:
"""
Execute a query against the database.
:param query: TinyDB query to execute.
:type query: QueryImpl
:return: List of documents matching the query.
:rtype: list[dict]
"""
return self._db.search(query)
def _load(self, id: int) -> Optional[dict]:
"""
Load a document by its ID.
:param id: Document ID to load.
:type id: int
:return: The document if found, None otherwise.
:rtype: Optional[dict]
.. note::
Logs a warning if document with specified
ID is not found.
"""
try:
return self._db.get(doc_id=id)
except IndexError as e:
@ -44,9 +137,25 @@ class StoreManager:
return None
def _save(self, data: dict) -> int:
"""
Save a document to the database.
:param data: Document data to save.
:type data: dict
:return: The document ID of the saved document.
:rtype: int
"""
return self._db.insert(data)
def load_haiku(self, key: Optional[int]) -> Haiku:
"""
Load a haiku by its ID.
:param key: The ID of the haiku to load.
:type key: int
:return: A Haiku object if found, None otherwise.
:rtype: Optional[Haiku]
"""
if key is None:
return DEFAULT_HAIKU
raw_haiku: dict | None = self._load(key)
@ -56,9 +165,27 @@ class StoreManager:
return h
def save_haiku(self, data: Haiku) -> int:
"""
Save a haiku to the database.
:param data: The Haiku object to save.
:type data: Haiku
:return: The document ID of the saved haiku.
:rtype: int
"""
return self._save(data.__dict__)
def get_id_of_latest_haiku(self) -> Optional[int]:
"""
Get the ID of the most recently added haiku.
:return: The ID of the latest haiku if any exists,
None otherwise.
:rtype: Optional[int]
.. note::
Logs an error if the database is empty.
"""
try:
id = self._db.all()[-1].doc_id
return id

View file

@ -1,15 +1,25 @@
{% extends "base.html" %}
{% block content %}
<div class="bg-violet-900 min-h-screen flex items-center justify-center text-white">
<div class="text-center">
<div class="bg-white text-gray-900 p-10 rounded-lg shadow-lg max-w-2xl mx-auto transform -translate-y-10">
<h1 class="text-4xl font-bold text-violet-700 mb-6">{{ title }}</h1>
<p class="text-2xl italic leading-relaxed text-left">
<p id="haiku-text" class="text-2xl italic leading-relaxed text-left">
{% for line in context.haiku.lines %}
{{ line }}<br>
{% endfor %}
</p>
<div class="mt-6 flex flex-col sm:flex-row items-center justify-center gap-3">
<button id="speak-button" class="bg-violet-600 hover:bg-violet-700 text-white font-bold py-2 px-4 rounded-lg flex items-center justify-center">
<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 mr-2" viewBox="0 0 20 20" fill="currentColor">
<path fill-rule="evenodd" d="M9.383 3.076A1 1 0 0110 4v12a1 1 0 01-1.707.707L4.586 13H2a1 1 0 01-1-1V8a1 1 0 011-1h2.586l3.707-3.707a1 1 0 011.09-.217zM14.657 2.929a1 1 0 011.414 0A9.972 9.972 0 0119 10a9.972 9.972 0 01-2.929 7.071a1 1 0 01-1.414-1.414A7.971 7.971 0 0017 10c0-2.21-.894-4.208-2.343-5.657a1 1 0 010-1.414zm-2.829 2.828a1 1 0 011.415 0A5.983 5.983 0 0115 10a5.984 5.984 0 01-1.757 4.243a1 1 0 01-1.415-1.415A3.984 3.984 0 0013 10a3.983 3.983 0 00-1.172-2.828a1 1 0 010-1.415z" clip-rule="evenodd" />
</svg>
Speak Haiku
</button>
<select id="voice-select" class="rounded-lg border border-gray-300 px-3 py-2 text-gray-700 max-w-full truncate" style="max-width: 200px; text-overflow: ellipsis;">
<option value="">Default Voice</option>
</select>
</div>
</div>
{% if context.is_default %}
<div class="mb-5">
@ -17,9 +27,209 @@
</div>
{% endif %}
<a href="{{ url_for('index_view') }}"
class=" inline-block bg-violet-600 hover:bg-violet-700 text-white font-bold py-2 px-4 rounded-lg">
class="inline-block bg-violet-600 hover:bg-violet-700 text-white font-bold py-2 px-4 rounded-lg mt-6">
Back to Home
</a>
</div>
</div>
<script>
document.addEventListener('DOMContentLoaded', function() {
const speakButton = document.getElementById('speak-button');
const haikuText = document.getElementById('haiku-text');
const voiceSelect = document.getElementById('voice-select');
let speaking = false;
let voices = [];
// Check if speech synthesis is supported
if (!('speechSynthesis' in window)) {
speakButton.disabled = true;
voiceSelect.disabled = true;
speakButton.title = "Speech synthesis not supported in your browser";
speakButton.classList.add('opacity-50');
console.error("Speech synthesis not supported");
}
function loadVoices() {
voices = window.speechSynthesis.getVoices();
voiceSelect.innerHTML = '<option value="">Default Voice</option>';
const preferredVoices = voices.filter(voice =>
voice.name.includes('Natural') ||
voice.name.includes('Premium') ||
voice.name.includes('Neural') ||
voice.name.includes('Enhanced')
);
// Add preferred voices first
preferredVoices.forEach(voice => {
const option = document.createElement('option');
option.value = voice.name;
option.textContent = `${voice.name} (${voice.lang}) ★`;
voiceSelect.appendChild(option);
});
// Add remaining voices
voices.forEach(voice => {
if (!preferredVoices.includes(voice)) {
const option = document.createElement('option');
option.value = voice.name;
option.textContent = `${voice.name} (${voice.lang})`;
voiceSelect.appendChild(option);
}
});
// Pre-select a good voice if available
for (const searchTerm of ['Neural', 'Premium', 'Natural', 'Enhanced', 'Daniel', 'Samantha', 'Karen']) {
const goodVoice = Array.from(voiceSelect.options).find(option =>
option.text.includes(searchTerm)
);
if (goodVoice) {
voiceSelect.value = goodVoice.value;
break;
}
}
}
// Load voices when available
if (window.speechSynthesis.onvoiceschanged !== undefined) {
window.speechSynthesis.onvoiceschanged = loadVoices;
}
// Initial load attempt
setTimeout(loadVoices, 100);
// Function to extract the haiku text properly
function getHaikuText() {
try {
// First try using innerText
let rawText = haikuText.innerText;
if (rawText && rawText.trim()) {
return rawText.trim();
}
// If that fails, try getting individual text nodes
let lines = [];
Array.from(haikuText.childNodes).forEach(node => {
if (node.nodeType === Node.TEXT_NODE && node.textContent.trim()) {
lines.push(node.textContent.trim());
}
});
// If we got lines, join them
if (lines.length > 0) {
return lines.join(' ');
}
// If nothing worked, fall back to extracting from HTML
return haikuText.textContent.replace(/<br>/g, ' ').trim();
} catch (e) {
console.error("Error extracting haiku text:", e);
try {
return "{{ context.haiku.lines|join(' ') }}";
} catch (e2) {
return "Could not retrieve haiku text.";
}
}
}
// Function to add natural pauses between haiku lines
function addPausesToHaiku(text) {
// Split by line breaks or typical line separators
const lines = text.split(/[\n\r]+|<br>|\. /).filter(line => line.trim().length > 0);
if (lines.length <= 1) {
return text;
}
// Join with pauses (using SSML pause syntax)
return lines.join('. ');
}
speakButton.addEventListener('click', function() {
try {
// If already speaking, stop
if (speaking) {
window.speechSynthesis.cancel();
speaking = false;
speakButton.classList.remove('bg-violet-800');
speakButton.innerHTML = '<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 mr-2" viewBox="0 0 20 20" fill="currentColor"><path fill-rule="evenodd" d="M9.383 3.076A1 1 0 0110 4v12a1 1 0 01-1.707.707L4.586 13H2a1 1 0 01-1-1V8a1 1 0 011-1h2.586l3.707-3.707a1 1 0 011.09-.217zM14.657 2.929a1 1 0 011.414 0A9.972 9.972 0 0119 10a9.972 9.972 0 01-2.929 7.071a1 1 0 01-1.414-1.414A7.971 7.971 0 0017 10c0-2.21-.894-4.208-2.343-5.657a1 1 0 010-1.414zm-2.829 2.828a1 1 0 011.415 0A5.983 5.983 0 0115 10a5.984 5.984 0 01-1.757 4.243a1 1 0 01-1.415-1.415A3.984 3.984 0 0013 10a3.983 3.983 0 00-1.172-2.828a1 1 0 010-1.415z" clip-rule="evenodd" /></svg> Speak Haiku';
return;
}
// Get the haiku text
let textContent = getHaikuText();
console.log("Speaking text:", textContent);
if (!textContent || textContent === "") {
console.error("No text to speak");
alert("No text to speak");
return;
}
// Add natural pauses
textContent = addPausesToHaiku(textContent);
// Create a new speech synthesis instance
const msg = new SpeechSynthesisUtterance();
msg.text = textContent;
// Set human-like speech parameters
msg.rate = 0.85; // Slightly slower pace for poetry
msg.pitch = 1.0; // Natural pitch
msg.volume = 1.0; // Full volume
// Set selected voice if available
if (voiceSelect.value) {
const selectedVoice = voices.find(voice => voice.name === voiceSelect.value);
if (selectedVoice) {
msg.voice = selectedVoice;
}
}
// Stop any ongoing speech
window.speechSynthesis.cancel();
// Set up event handlers
msg.onstart = function() {
speaking = true;
speakButton.classList.add('bg-violet-800');
speakButton.innerHTML = '<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 mr-2" viewBox="0 0 20 20" fill="currentColor"><path fill-rule="evenodd" d="M10 18a8 8 0 100-16 8 8 0 000 16zM8 7a1 1 0 00-1 1v4a1 1 0 001 1h4a1 1 0 001-1V8a1 1 0 00-1-1H8z" clip-rule="evenodd" /></svg> Stop Speaking';
};
msg.onend = function() {
speaking = false;
speakButton.classList.remove('bg-violet-800');
speakButton.innerHTML = '<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 mr-2" viewBox="0 0 20 20" fill="currentColor"><path fill-rule="evenodd" d="M9.383 3.076A1 1 0 0110 4v12a1 1 0 01-1.707.707L4.586 13H2a1 1 0 01-1-1V8a1 1 0 011-1h2.586l3.707-3.707a1 1 0 011.09-.217zM14.657 2.929a1 1 0 011.414 0A9.972 9.972 0 0119 10a9.972 9.972 0 01-2.929 7.071a1 1 0 01-1.414-1.414A7.971 7.971 0 0017 10c0-2.21-.894-4.208-2.343-5.657a1 1 0 010-1.414zm-2.829 2.828a1 1 0 011.415 0A5.983 5.983 0 0115 10a5.984 5.984 0 01-1.757 4.243a1 1 0 01-1.415-1.415A3.984 3.984 0 0013 10a3.983 3.983 0 00-1.172-2.828a1 1 0 010-1.415z" clip-rule="evenodd" /></svg> Speak Haiku';
};
msg.onerror = function(event) {
console.error("Speech synthesis error:", event);
speaking = false;
speakButton.classList.remove('bg-violet-800');
speakButton.innerHTML = '<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5 mr-2" viewBox="0 0 20 20" fill="currentColor"><path fill-rule="evenodd" d="M9.383 3.076A1 1 0 0110 4v12a1 1 0 01-1.707.707L4.586 13H2a1 1 0 01-1-1V8a1 1 0 011-1h2.586l3.707-3.707a1 1 0 011.09-.217zM14.657 2.929a1 1 0 011.414 0A9.972 9.972 0 0119 10a9.972 9.972 0 01-2.929 7.071a1 1 0 01-1.414-1.414A7.971 7.971 0 0017 10c0-2.21-.894-4.208-2.343-5.657a1 1 0 010-1.414zm-2.829 2.828a1 1 0 011.415 0A5.983 5.983 0 0115 10a5.984 5.984 0 01-1.757 4.243a1 1 0 01-1.415-1.415A3.984 3.984 0 0013 10a3.983 3.983 0 00-1.172-2.828a1 1 0 010-1.415z" clip-rule="evenodd" /></svg> Speak Haiku';
};
// Introduce a very slight delay before each line (to ensure natural pacing)
setTimeout(() => {
window.speechSynthesis.speak(msg);
}, 100);
} catch (error) {
console.error("Speech synthesis error:", error);
speaking = false;
speakButton.classList.remove('bg-violet-800');
alert("Speech synthesis failed: " + error.message);
}
});
// Ensure speech is canceled when navigating away from the page
window.addEventListener('beforeunload', function() {
if (window.speechSynthesis) {
window.speechSynthesis.cancel();
}
});
});
</script>
{% endblock %}

View file

@ -1,28 +1,25 @@
{% extends "base.html" %} {% block content %}
<div
class="flex flex-col items-center justify-center min-h-screen bg-violet-900 text-white p-6"
>
<div
class="bg-white text-gray-900 p-8 rounded-xl shadow-lg max-w-lg w-full text-center transform transition duration-300 hover:scale-105"
>
<h1 class="text-3xl font-bold text-violet-700 mb-4">
Very 1337 prompt input
</h1>
<div class="flex flex-col gap-4">
<input
type="text"
id="user-input"
minlength="0"
maxlength="100"
placeholder="Type your prompt here..."
class="w-full px-4 py-3 text-lg border-2 border-gray-300 rounded-lg focus:outline-none focus:ring-2 focus:ring-violet-600"
/>
<button
id="submit-btn"
class="bg-violet-600 text-white font-bold py-3 px-6 rounded-lg text-lg shadow-md transition duration-300 hover:bg-violet-700 hover:scale-105"
>
🚀 Submit
</button>
{% extends "base.html" %}
{% block content %}
<div class="flex flex-col items-center justify-center min-h-screen bg-violet-900 text-white p-6">
<div class="bg-white text-gray-900 p-8 rounded-xl shadow-lg max-w-lg w-full text-center transform transition duration-300 hover:scale-105">
<h1 class="text-3xl font-bold text-violet-700 mb-4">Very 1337 prompt input</h1>
<div class="flex flex-col gap-4">
<input
type="text"
id="user-input"
minlength="0"
maxlength="100"
placeholder="Type your prompt here..."
class="w-full px-4 py-3 text-lg border-2 border-gray-300 rounded-lg focus:outline-none focus:ring-2 focus:ring-violet-600"
/>
<button
id="submit-btn"
class="bg-violet-600 text-white font-bold py-3 px-6 rounded-lg text-lg shadow-md transition duration-300 hover:bg-violet-700 hover:scale-105"
>
🚀 Submit
</button>
</div>
</div>
</div>