import gzip
import json
import pickle
import flet as ft
import os
from pathlib import Path
import threading
import urllib.request
import urllib.error
import time
from functools import partial
import queue
import re
import shutil
# Plattformunabh\C3\A4ngige Pfade
app_data_path = Path(os.getenv("FLET_APP_STORAGE_DATA", default="~/.flet"))
ASSETS_DIR = app_data_path / "assets"
ASSETS_DIR.mkdir(parents=True, exist_ok=True)
SETTINGS_FILE = app_data_path / "settings.json"
INDEX_DIR = ASSETS_DIR / "indexes"
INDEX_DIR.mkdir(parents=True, exist_ok=True)
LANGUAGES = {
"Dutch / Nederlands": ("nl-extract.jsonl.gz", "nl"),
"French / Fran\C3\A7ais": ("fr-extract.jsonl.gz", "fr"),
"German / Deutsch": ("de-extract.jsonl.gz", "de"),
"Italian / Italiano": ("it-extract.jsonl.gz", "it"),
"Polish / Polski": ("pl-extract.jsonl.gz", "pl"),
"Russian / \D0\A0\D1\83\D1\81\D1\81\D0\BA\D0\B8\D0\B9": ("ru-extract.jsonl.gz", "ru"),
"Spanish / Espa\C3\B1ol": ("es-extract.jsonl.gz", "es"),
}
ETYMOLOGY_TERMS = {
"nl": "Etymologie",
"fr": "\C3\89tymologie",
"de": "Etymologie",
"it": "Etimologia",
"pl": "Etymologia",
"ru": "\D0\AD\D1\82\D0\B8\D0\BC\D0\BE\D0\BB\D0\BE\D0\B3\D0\B8\D1\8F",
"es": "Etimolog\C3\ADa",
}
SIZES = {
"Dutch / Nederlands": "100 MB",
"French / Fran\C3\A7ais": "600 MB",
"German / Deutsch": "300 MB",
"Italian / Italiano": "40 MB",
"Polish / Polski": "100 MB",
"Russian / \D0\A0\D1\83\D1\81\D1\81\D0\BA\D0\B8\D0\B9": "300 MB",
"Spanish / Espa\C3\B1ol": "100 MB",
}
# Status f\C3\BCr Index-Erstellung
index_creation_status = {lang: False for lang in LANGUAGES}
def load_settings():
if SETTINGS_FILE.exists():
with open(SETTINGS_FILE, "r", encoding="utf-8") as f:
try:
return json.load(f)
except json.JSONDecodeError:
return {}
return {}
def save_settings(settings):
with open(SETTINGS_FILE, "w", encoding="utf-8") as f:
json.dump(settings, f)
def create_index(data_path, index_path, lang_code):
index = {}
with gzip.open(data_path, 'rb') as f:
position = 0
while True:
current_position = position
line_bytes = f.readline()
if not line_bytes:
break
try:
line = line_bytes.decode('utf-8')
entry = json.loads(line)
word = entry.get("word", "").lower()
if word and entry.get("lang_code") == lang_code:
if word not in index:
index[word] = []
index[word].append(current_position)
position = f.tell()
except (json.JSONDecodeError, UnicodeDecodeError):
position = f.tell()
continue
with open(index_path, 'wb') as f:
pickle.dump(index, f)
def create_index_process(lang, data_path, index_path, progress_queue):
lang_code = LANGUAGES[lang][1]
try:
create_index(data_path, index_path, lang_code)
progress_queue.put((lang, 100))
except Exception as ex:
print(f"Indexierung fehlgeschlagen f\C3\BCr {lang}: {ex}")
progress_queue.put((lang, None))
finally:
progress_queue.put((lang, "done"))
def load_index(index_path):
with open(index_path, 'rb') as f:
return pickle.load(f)
def search_word_with_index(word, data_path, index_path, lang_code):
word_lower = word.lower()
index = load_index(index_path)
entries = []
if word_lower in index:
positions = index[word_lower]
with gzip.open(data_path, 'rb') as f:
for position in positions:
f.seek(position)
line_bytes = f.readline()
try:
line = line_bytes.decode('utf-8')
entry = json.loads(line)
if entry.get("word", "").lower() == word_lower:
entries.append(entry)
except (json.JSONDecodeError, UnicodeDecodeError):
continue
return entries
def fetch_online_definition(word, language_code="en"):
url = f"https://{language_code}.wiktionary.org/wiki/{word}"
try:
with urllib.request.urlopen(url, timeout=10) as response:
html = response.read().decode('utf-8')
soup = BeautifulSoup(html, "html.parser")
definitions = []
ols = soup.select("ol")
if ols:
definitions = [li.get_text(separator=" ").strip() for li in ols[0].find_all("li", recursive=False)][:5]
if not definitions:
definitions = ["Keine Definition gefunden."]
etymology_term = ETYMOLOGY_TERMS.get(language_code, "Etymology").lower()
etymology = None
for header in soup.find_all(['h2', 'h3', 'h4']):
header_text = header.get_text(strip=True).lower()
if header_text.startswith(etymology_term):
next_node = header.find_next(["p", "ul", "ol"])
if next_node:
etymology = next_node.get_text(separator=" ").strip()
break
return {"definitions": definitions, "etymology": etymology}
except urllib.error.HTTPError as e:
return {"definitions": [f"Fehler beim Abrufen der Seite ({e.code})."], "etymology": None}
except Exception as ex:
return {"definitions": [f"Fehler: {ex}"], "etymology": None}
def cleanup_partial_downloads():
for lang in LANGUAGES.values():
filename = lang[0] + ".part"
part_path = ASSETS_DIR / filename
if part_path.exists():
try:
part_path.unlink()
print(f"Unvollst\C3\A4ndige Datei gel\C3\B6scht: {part_path}")
except Exception as e:
print(f"Fehler beim L\C3\B6schen von {part_path}: {e}")
def download_process(lang, url, temp_path, dest_path, stop_event, progress_queue):
try:
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36'}
req = urllib.request.Request(url, headers=headers)
with urllib.request.urlopen(req, timeout=30) as response:
total = int(response.headers.get('Content-Length', 0))
downloaded = 0
with open(temp_path, "wb") as f:
while True:
if stop_event.is_set():
print(f"Download f\C3\BCr {lang} abgebrochen.")
f.close()
try:
if temp_path.exists():
temp_path.unlink()
print(f"Partielle Download-Datei gel\C3\B6scht: {temp_path}")
except Exception as ex:
print(f"Fehler beim L\C3\B6schen von {temp_path}: {ex}")
progress_queue.put((lang, None))
return
chunk = response.read(8192)
if not chunk:
break
f.write(chunk)
downloaded += len(chunk)
if total:
percent = int(downloaded * 100 / total)
progress_queue.put((lang, percent))
if not stop_event.is_set():
temp_path.rename(dest_path)
progress_queue.put((lang, 100))
print(f"Download f\C3\BCr {lang} abgeschlossen.")
except urllib.error.HTTPError as e:
print(f"Download fehlgeschlagen f\C3\BCr {lang}: HTTP Error {e.code}")
progress_queue.put((lang, None))
except Exception as ex:
print(f"Download fehlgeschlagen f\C3\BCr {lang}: {ex}")
progress_queue.put((lang, None))
def remove_special_chars(text):
"""Entfernt Pfeilzeichen und ^ aus dem Text."""
if not text:
return text
# Unicode-Bereiche f\C3\BCr Pfeilzeichen: U+2190 bis U+21FF, U+27A1, etc., plus ^
pattern = r'[\u2190-\u21FF\u27A1\u2B05-\u2B07\u2B95\uFF08]|\^'
return re.sub(pattern, '', text)
def main(page: ft.Page):
cleanup_partial_downloads()
page.title = "earth Lexica"
page.vertical_alignment = ft.MainAxisAlignment.SPACE_BETWEEN
page.padding = 20
page.window_min_width = 400
page.bgcolor = ft.Colors.BLACK
page.fonts = {
"figtree": "Figtree-VariableFont_wght.ttf",
"notosans": "NotoSans-VariableFont_wdth,wght.ttf",
"notosansthai": "NotoSansThai-VariableFont_wdth,wght.ttf",
"notosanssc": "NotoSansSC-VariableFont_wght.ttf",
"notosanskr": "NotoSansKR-VariableFont_wght.ttf",
"notosansjp": "NotoSansJP-VariableFont_wght.ttf",
"notosansrunic": "NotoSansRunic-Regular.ttf",
"notosansolditalic": "NotoSansOldItalic-Regular.ttf",
"notosansgothic": "NotoSansGothic-Regular.ttf",
"notosansarabic": "NotoSansArabic-VariableFont_wdth,wght.ttf",
"notosansarmenian": "NotoSansArmenian-VariableFont_wdth,wght.ttf",
"notosanscoptic": "NotoSansCoptic-Regular.ttf",
"notosansdevanagari": "NotoSansDevanagari-VariableFont_wdth,wght.ttf",
"notosansethiopic": "NotoSansEthiopic-VariableFont_wdth,wght.ttf",
"notosanshebrew": "NotoSansHebrew-VariableFont_wdth,wght.ttf",
"notosanssymbols": "NotoSansSymbols-VariableFont_wght.ttf",
}
light_text_color = ft.Colors.WHITE
dark_text_color = ft.Colors.WHITE
light_theme = ft.Theme(
color_scheme_seed=ft.Colors.BLUE,
text_theme=ft.TextTheme(
body_large=ft.TextStyle(font_family="figtree", color=light_text_color),
body_medium=ft.TextStyle(font_family="figtree", color=light_text_color),
label_large=ft.TextStyle(font_family="figtree", color=light_text_color),
),
icon_theme=ft.IconTheme(color=light_text_color)
)
dark_theme = ft.Theme(
color_scheme=ft.ColorScheme(
on_surface=ft.Colors.WHITE,
primary=ft.Colors.BLUE,
background=ft.Colors.BLACK,
),
text_theme=ft.TextTheme(
body_large=ft.TextStyle(font_family="figtree", color=dark_text_color),
body_medium=ft.TextStyle(font_family="figtree", color=dark_text_color),
label_large=ft.TextStyle(font_family="figtree", color=dark_text_color),
),
icon_theme=ft.IconTheme(color=dark_text_color)
)
page.theme = light_theme
page.dark_theme = dark_theme
page.theme_mode = "dark"
settings = load_settings()
if "has_been_opened" not in settings:
settings["has_been_opened"] = True
save_settings(settings)
selected_language = settings.get("selected_language", "German / Deutsch")
download_progress = {lang: None for lang in LANGUAGES}
download_processes = {}
download_stop_flags = {}
language_controls_cache = {}
result_scroll = ft.ListView(expand=True, spacing=10, padding=10, auto_scroll=False)
# Queue f\C3\BCr Fortschrittsupdates (nun threading-kompatibel)
progress_queue = queue.Queue()
def update_progress_from_queue():
updated = False
while True:
try:
lang, value = progress_queue.get_nowait()
if value == "done":
index_creation_status[lang] = False
download_progress[lang] = None
if lang == selected_language and len(result_scroll.controls) == 1 and isinstance(result_scroll.controls[0], ft.Text) and result_scroll.controls[0].value in ["processing...", "Processing after download can take up to 10 minutes."]:
query = search_field.value.strip()
if query:
on_search(None)
else:
result_scroll.controls.clear()
result_scroll.update()
elif value is not None:
download_progress[lang] = value
else:
download_progress[lang] = None
updated = True
except queue.Empty:
break
if updated:
update_language_list(page)
page.update()
def get_font_for_language(lang):
font_map = {
"Thai": "notosansthai",
"Chinese": "notosanssc",
"Korean": "notosanskr",
"Japanese": "notosansjp",
"Russian / \D0\A0\D1\83\D1\81\D1\81\D0\BA\D0\B8\D0\B9": "notosans"
}
return font_map.get(lang, "figtree")
def apply_fallback_fonts(text):
text = remove_special_chars(text) # Pfeilzeichen und ^ entfernen, bevor Spans erstellt werden
spans = []
runic_chars = set(chr(i) for i in range(0x16A0, 0x16FF+1))
old_italic_chars = set(chr(i) for i in range(0x10300, 0x1032F+1))
gothic_chars = set(chr(i) for i in range(0x10330, 0x1034F+1))
arabic_chars = set(chr(i) for i in range(0x0600, 0x06FF+1))
armenian_chars = set(chr(i) for i in range(0x0530, 0x058F+1))
coptic_chars = set(chr(i) for i in range(0x2C80, 0x2CFF+1))
devanagari_chars = set(chr(i) for i in range(0x0900, 0x097F+1))
ethiopic_chars = set(chr(i) for i in range(0x1200, 0x137F+1))
hebrew_chars = set(chr(i) for i in range(0x0590, 0x05FF+1))
symbols_chars = set(chr(i) for i in range(0x2600, 0x26FF+1)) | set(chr(i) for i in range(0x2700, 0x27BF+1))
for char in text:
if char in runic_chars:
spans.append(ft.TextSpan(char, style=ft.TextStyle(font_family="notosansrunic", color=ft.Colors.WHITE)))
elif char in old_italic_chars:
spans.append(ft.TextSpan(char, style=ft.TextStyle(font_family="notosansolditalic", color=ft.Colors.WHITE)))
elif char in gothic_chars:
spans.append(ft.TextSpan(char, style=ft.TextStyle(font_family="notosansgothic", color=ft.Colors.WHITE)))
elif char in arabic_chars:
spans.append(ft.TextSpan(char, style=ft.TextStyle(font_family="notosansarabic", color=ft.Colors.WHITE)))
elif char in armenian_chars:
spans.append(ft.TextSpan(char, style=ft.TextStyle(font_family="notosansarmenian", color=ft.Colors.WHITE)))
elif char in coptic_chars:
spans.append(ft.TextSpan(char, style=ft.TextStyle(font_family="notosanscoptic", color=ft.Colors.WHITE)))
elif char in devanagari_chars:
spans.append(ft.TextSpan(char, style=ft.TextStyle(font_family="notosansdevanagari", color=ft.Colors.WHITE)))
elif char in ethiopic_chars:
spans.append(ft.TextSpan(char, style=ft.TextStyle(font_family="notosansethiopic", color=ft.Colors.WHITE)))
elif char in hebrew_chars:
spans.append(ft.TextSpan(char, style=ft.TextStyle(font_family="notosanshebrew", color=ft.Colors.WHITE)))
elif char in symbols_chars:
spans.append(ft.TextSpan(char, style=ft.TextStyle(font_family="notosanssymbols", color=ft.Colors.WHITE)))
else:
spans.append(ft.TextSpan(char, style=ft.TextStyle(font_family="notosans", color=ft.Colors.WHITE)))
return spans
def on_search(e):
query = search_field.value.strip()
if not query:
return
filename, lang_code = LANGUAGES[selected_language]
data_path = ASSETS_DIR / filename
index_path = INDEX_DIR / f"{filename}.index.pkl"
if not data_path.exists():
result_scroll.controls.clear()
result_scroll.update()
message = "download required"
result_scroll.controls.append(ft.Text(message, size=18, selectable=True, font_family="figtree", color=ft.Colors.WHITE))
page.update()
return
result_scroll.controls.clear()
result_scroll.update()
progress_ring_container.visible = True
page.update()
font_family = get_font_for_language(selected_language)
if not index_path.exists():
if index_creation_status[selected_language]:
result_scroll.controls.append(ft.Text("Processing after download can take a few minutes.", size=18, selectable=True, font_family="figtree", color=ft.Colors.WHITE))
else:
index_creation_status[selected_language] = True
update_language_list(page)
p = threading.Thread(target=create_index_process, args=(selected_language, data_path, index_path, progress_queue))
p.start()
result_scroll.controls.append(ft.Text("processing...", size=18, selectable=True, font_family="figtree", color=ft.Colors.WHITE))
progress_ring_container.visible = False
page.update()
return
entries = search_word_with_index(query, data_path, index_path, lang_code)
result_scroll.controls.clear()
result_scroll.update()
if entries:
for entry_idx, entry in enumerate(entries):
if entry_idx > 0:
result_scroll.controls.append(ft.Container(height=20))
result_scroll.controls.append(ft.Text(" ", size=18, italic=True, weight=600, selectable=True, font_family="figtree", color=ft.Colors.WHITE))
word = remove_special_chars(entry.get("word", ""))
result_scroll.controls.append(ft.Text(value=word, size=27, weight=900, selectable=True, font_family=font_family, color=ft.Colors.WHITE))
pos = entry.get("pos")
if pos:
pos = remove_special_chars(pos)
result_scroll.controls.append(ft.Text(f"({pos})", size=18, selectable=True, font_family=font_family, color=ft.Colors.GREY_400))
seen_glosses = []
for sense in entry.get("senses", []):
gloss = sense.get("glosses", [])
if gloss:
gloss_text = remove_special_chars(gloss[0])
if gloss_text not in seen_glosses:
seen_glosses.append(gloss_text)
for idx, gloss_text in enumerate(seen_glosses, start=1):
result_scroll.controls.append(ft.Text(value=f"{idx}. {gloss_text}", size=18, selectable=True, font_family=font_family, color=ft.Colors.WHITE))
ety_text = None
if "etymology_text" in entry and entry["etymology_text"]:
ety_text = entry["etymology_text"]
elif "etymology_texts" in entry and entry["etymology_texts"]:
ety_text = "\n".join(entry["etymology_texts"])
elif "etymology" in entry and entry["etymology"]:
ety_text = entry["etymology"]
elif "sections" in entry:
for sec in entry["sections"]:
if sec.get("section", "").lower() in ["etymology", "etymologie"] and "text" in sec:
ety_text = sec["text"]
break
if ety_text:
ety_text = remove_special_chars(ety_text)
result_scroll.controls.append(ft.Container(height=10))
result_scroll.controls.append(ft.Text(value="Etymology:", size=18, italic=True, weight=600, selectable=True, font_family="notosans", color=ft.Colors.WHITE))
result_scroll.controls.append(ft.Text(
spans=apply_fallback_fonts(ety_text),
size=16,
weight=400,
selectable=True,
))
else:
query_cleaned = remove_special_chars(query)
result_scroll.controls.append(ft.Text(value=query_cleaned, size=30, weight=900, selectable=True, font_family=font_family, color=ft.Colors.WHITE))
result_scroll.controls.append(ft.Text(value="word not found", size=18, selectable=True, font_family="figtree", color=ft.Colors.WHITE))
progress_ring_container.visible = False
page.update()
font_family = get_font_for_language(selected_language)
search_field = ft.TextField(
hint_text="one word search...",
expand=True,
border_radius=50,
filled=True,
autofocus=True,
on_submit=on_search,
text_style=ft.TextStyle(font_family=font_family, color=ft.Colors.WHITE),
hint_style=ft.TextStyle(color=ft.Colors.GREY_400),
border_color=None,
focused_border_color=None,
border_width=0,
content_padding=ft.padding.symmetric(vertical=24, horizontal=20),
bgcolor=ft.Colors.GREY_900,
cursor_color=ft.Colors.BLUE,
)
language_controls = []
language_list_view = ft.ListView(controls=language_controls, expand=True, spacing=0, padding=ft.padding.only(top=10, bottom=10), height=330, cache_extent=1000)
def is_language_downloaded(lang):
filename, _ = LANGUAGES[lang]
return (ASSETS_DIR / filename).exists()
def stop_and_delete_download(lang, e=None):
print(f"Stoppe und l\C3\B6sche Download f\C3\BCr {lang}...")
filename, _ = LANGUAGES[lang]
part_path = ASSETS_DIR / (filename + ".part")
if lang in download_processes and download_processes[lang].is_alive():
print(f"Download-Thread f\C3\BCr {lang} l\C3\A4uft, setze Stop-Flag...")
if lang in download_stop_flags:
download_stop_flags[lang].set()
download_processes[lang].join(timeout=3)
if download_processes[lang].is_alive():
print(f"Download-Thread f\C3\BCr {lang} konnte nicht rechtzeitig gestoppt werden.")
else:
print(f"Download-Thread f\C3\BCr {lang} erfolgreich gestoppt.")
time.sleep(0.5)
try:
if part_path.exists():
part_path.unlink()
print(f"Partielle Download-Datei gel\C3\B6scht: {part_path}")
else:
print(f"Keine .part-Datei gefunden f\C3\BCr {lang}.")
except Exception as ex:
print(f"Fehler beim L\C3\B6schen von {part_path}: {ex}")
download_progress[lang] = None
download_processes.pop(lang, None)
download_stop_flags.pop(lang, None)
update_language_list(page)
page.update()
print(f"Download f\C3\BCr {lang} abgebrochen und UI aktualisiert.")
def delete_language(lang, e=None):
print(f"L\C3\B6sche Sprache {lang}...")
filename, _ = LANGUAGES[lang]
file_path = ASSETS_DIR / filename
part_path = ASSETS_DIR / (filename + ".part")
stop_and_delete_download(lang)
try:
if file_path.exists():
file_path.unlink()
print(f"Datei gel\C3\B6scht: {file_path}")
except Exception as ex:
print(f"Konnte {file_path} nicht l\C3\B6schen: {ex}")
download_progress[lang] = None
download_processes.pop(lang, None)
download_stop_flags.pop(lang, None)
index_creation_status[lang] = False
update_language_list(page)
page.update()
print(f"Sprache {lang} gel\C3\B6scht und UI aktualisiert.")
def download_language(lang, e=None):
if lang in download_processes and download_processes[lang].is_alive():
print(f"Download f\C3\BCr {lang} l\C3\A4uft bereits.")
return
print(f"Starte Download f\C3\BCr {lang}...")
download_progress[lang] = 0
update_language_list(page)
page.update()
filename, lang_code = LANGUAGES[lang]
url = f"https://{filename}"
dest_path = ASSETS_DIR / filename
temp_path = ASSETS_DIR / (filename + ".part")
stop_event = threading.Event()
download_stop_flags[lang] = stop_event
p = threading.Thread(target=download_process, args=(lang, url, temp_path, dest_path, stop_event, progress_queue))
download_processes[lang] = p
p.start()
print(f"Download-Thread f\C3\BCr {lang} gestartet.")
def check_and_index():
while p.is_alive():
update_progress_from_queue()
time.sleep(1)
download_processes.pop(lang, None)
update_progress_from_queue()
if download_progress[lang] == 100 and not stop_event.is_set():
index_path = INDEX_DIR / f"{filename}.index.pkl"
source_dir = Path(__file__).parent / 'assets'
source_index = source_dir / f"{filename}.index.pkl"
if source_index.exists():
try:
shutil.copy(source_index, index_path)
print(f"Index kopiert f\C3\BCr {lang} von {source_index} nach {index_path}")
except Exception as ex:
print(f"Fehler beim Kopieren des Index f\C3\BCr {lang}: {ex}")
if not index_path.exists():
index_creation_status[lang] = True
update_language_list(page)
page.update()
index_p = threading.Thread(target=create_index_process, args=(lang, dest_path, index_path, progress_queue))
index_p.start()
while index_p.is_alive():
update_progress_from_queue()
time.sleep(1)
update_progress_from_queue()
threading.Thread(target=check_and_index, daemon=True).start()
def update_language_progress(lang, percent, page):
download_progress[lang] = percent
update_language_list(page)
page.update()
def select_language(e):
nonlocal selected_language
selected_language = e.control.data
settings["selected_language"] = selected_language
save_settings(settings)
font_family = get_font_for_language(selected_language)
search_field.text_style = ft.TextStyle(font_family=font_family, color=ft.Colors.WHITE)
search_field.value = ""
result_scroll.controls.clear()
update_language_list(page)
custom_sheet.open = False
page.update()
def update_language_list(page):
language_controls.clear()
langs_sorted = sorted(LANGUAGES.keys())
for i, lang in enumerate(langs_sorted):
is_selected = (lang == selected_language)
filename, lang_code = LANGUAGES[lang]
progress = download_progress[lang]
progress_text = ""
if progress is not None and progress < 100:
progress_text = f"{progress}%"
elif progress == 100:
progress_text = " "
downloaded = is_language_downloaded(lang)
download_running = (progress is not None and progress < 100 and lang in download_processes)
show_trash = downloaded or download_running
right_icon = ft.IconButton(
icon=ft.Icons.DELETE if show_trash else ft.Icons.DOWNLOAD_FOR_OFFLINE_OUTLINED,
icon_color=ft.Colors.WHITE,
tooltip="Abbrechen und l\C3\B6schen" if download_running else f"{lang} l\C3\B6schen" if downloaded else f"{lang} herunterladen",
on_click=partial(stop_and_delete_download, lang) if download_running else partial(delete_language, lang) if downloaded else partial(download_language, lang),
disabled=False
)
progress_text_control = ft.Text(progress_text, size=14, font_family="figtree", color=ft.Colors.WHITE)
icon_row = []
if progress_text:
icon_row.append(progress_text_control)
text_color = ft.Colors.BLUE if is_selected else ft.Colors.WHITE
if lang == "Russian / \D0\A0\D1\83\D1\81\D1\81\D0\BA\D0\B8\D0\B9":
text_control = ft.Text(
spans=[
ft.TextSpan(f"{SIZES[lang]} ", ft.TextStyle(font_family="figtree", color=text_color)),
ft.TextSpan("Russian / ", ft.TextStyle(font_family="figtree", color=text_color)),
ft.TextSpan("\D0\A0\D1\83\D1\81\D1\81\D0\BA\D0\B8\D0\B9", ft.TextStyle(font_family="notosans", color=text_color))
],
size=16,
expand=True,
overflow=ft.TextOverflow.ELLIPSIS,
no_wrap=True
)
else:
text_control = ft.Text(f"{SIZES[lang]} {lang}", font_family="figtree", size=16, color=text_color, expand=True, overflow=ft.TextOverflow.ELLIPSIS, no_wrap=True)
index_path = INDEX_DIR / f"{filename}.index.pkl"
if downloaded and not index_path.exists() and index_creation_status[lang]:
progress_ring = ft.ProgressRing(width=20, height=20, color=ft.Colors.BLUE)
processing_text = ft.Text("processing", size=14, font_family="figtree", color=ft.Colors.WHITE)
text_row = ft.Row([text_control, progress_ring, processing_text], alignment=ft.MainAxisAlignment.START)
else:
text_row = text_control
tile_row = ft.Row([
text_row,
ft.Container(width=20),
ft.Row(icon_row, spacing=5),
right_icon
], alignment=ft.MainAxisAlignment.SPACE_BETWEEN, vertical_alignment=ft.CrossAxisAlignment.CENTER)
list_tile = ft.ListTile(
title=tile_row,
on_click=select_language,
data=lang,
content_padding=ft.padding.only(left=5, right=10),
dense=True,
)
language_controls.append(list_tile)
if i < len(langs_sorted) - 1:
language_controls.append(ft.Container(ft.Divider(height=1, color=ft.Colors.GREY_800), padding=ft.padding.only(left=2)))
language_controls.append(
ft.Container(
ft.Text(
spans=[
ft.TextSpan("Data from ", style=ft.TextStyle(color=ft.Colors.GREY_400)),
ft.TextSpan(
"Wiktionary",
ft.TextStyle(decoration=ft.TextDecoration.NONE, color=ft.Colors.GREY_400),
on_click=lambda e: page.launch_url("https://de.wiktionary.org/"),
),
ft.TextSpan(", licensed under CC BY-SA 4.0 and GFDL.", style=ft.TextStyle(color=ft.Colors.GREY_400)),
],
size=12,
font_family="figtree",
),
padding=ft.padding.only(top=10, left=5),
)
)
language_controls.append(
ft.Container(
ft.Text(
spans=[
ft.TextSpan(
"Click this sentence to contact us.",
ft.TextStyle(decoration=ft.TextDecoration.NONE, color=ft.Colors.GREY_400),
on_click=lambda e: page.launch_url("https://www.earth-website.com/contact"),
),
],
size=12,
font_family="figtree",
),
padding=ft.padding.only(top=10, left=5),
)
)
language_list_view.controls = language_controls
language_list_view.update()
custom_sheet = ft.BottomSheet(
ft.Container(
content=ft.Column(
[
ft.Container(
ft.Text("Language", size=20, weight=700, font_family="figtree", color=ft.Colors.WHITE),
padding=ft.padding.only(left=5)
),
language_list_view,
],
horizontal_alignment=ft.CrossAxisAlignment.START,
tight=True,
spacing=10,
),
padding=20,
alignment=ft.alignment.top_left,
bgcolor=ft.Colors.GREY_900,
border_radius=ft.border_radius.only(top_left=20, top_right=20),
),
open=False,
bgcolor=ft.Colors.BLACK,
)
page.overlay.append(custom_sheet)
def on_download_click(e):
update_language_list(page)
custom_sheet.open = True
page.update()
top_bar = ft.Row(
[
ft.Container(expand=True),
ft.Container(
content=ft.Text(
"Download",
color=ft.Colors.BLUE,
size=17,
font_family="figtree"
),
on_click=on_download_click,
ink=False,
tooltip="select language",
padding=ft.padding.only(left=8, right=8, top=10)
)
],
vertical_alignment=ft.CrossAxisAlignment.CENTER,
height=50,
)
progress_ring_container = ft.Container(
visible=False,
content=ft.Image(src="video.gif", width=100, height=100, fit=ft.ImageFit.CONTAIN),
alignment=ft.alignment.top_left,
padding=ft.padding.only(left=-10, top=10),
)
result_stack = ft.Stack([result_scroll, progress_ring_container], expand=True)
made_by_container = ft.Container(
content=ft.Text(
"made by earth.",
size=12,
font_family="figtree",
color=ft.Colors.GREY_400,
text_align=ft.TextAlign.CENTER
),
on_click=lambda e: page.launch_url("https://www.earth-website.com"),
ink=False,
alignment=ft.alignment.center,
padding=ft.padding.only(top=5, bottom=0),
)
page.add(
ft.Column(
[ft.Container(height=20), top_bar, result_stack, ft.Container(search_field, padding=ft.padding.only(top=10)), made_by_container],
expand=True,
)
)
update_language_list(page)
for lang in LANGUAGES:
filename, lang_code = LANGUAGES[lang]
data_path = ASSETS_DIR / filename
index_path = INDEX_DIR / f"{filename}.index.pkl"
if data_path.exists() and not index_path.exists():
index_creation_status[lang] = True
update_language_list(page)
page.update()
p = threading.Thread(target=create_index_process, args=(lang, data_path, index_path, progress_queue))
p.start()
def check_queue():
update_progress_from_queue()
page.update()
threading.Timer(1, check_queue).start()
check_queue()
if __name__ == "__main__":
ft.app(main)