v1.8.58: results context menu — Copy Cell/Row/All As 16 formats
Right-click on results table: - Copy Cell — single cell value - Copy Row — tab-delimited row - Copy Row As → 16 formats submenu - Copy All As → 16 formats submenu Formats: Excel CSV, Tab-delimited, HTML, XML, SQL INSERTs, SQL INSERT IGNOREs, SQL REPLACEs, SQL DELETE/INSERTs, SQL UPDATEs, LaTeX, Textile, Jira, PHP Array, Markdown, JSON, JSON Lines Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
15
core/i18n.py
15
core/i18n.py
@@ -340,6 +340,11 @@ _EN = {
|
|||||||
"tree_no_tables": "(no tables)",
|
"tree_no_tables": "(no tables)",
|
||||||
"tree_no_columns": "(no columns)",
|
"tree_no_columns": "(no columns)",
|
||||||
"tree_connected": "Connected",
|
"tree_connected": "Connected",
|
||||||
|
# Results context menu
|
||||||
|
"res_copy_cell": "Copy Cell",
|
||||||
|
"res_copy_row": "Copy Row",
|
||||||
|
"res_copy_row_as": "Copy Row As",
|
||||||
|
"res_copy_all_as": "Copy All As",
|
||||||
"query_exported": "Exported to {path}",
|
"query_exported": "Exported to {path}",
|
||||||
|
|
||||||
# Redis tab
|
# Redis tab
|
||||||
@@ -766,6 +771,11 @@ _RU = {
|
|||||||
"tree_no_tables": "(нет таблиц)",
|
"tree_no_tables": "(нет таблиц)",
|
||||||
"tree_no_columns": "(нет колонок)",
|
"tree_no_columns": "(нет колонок)",
|
||||||
"tree_connected": "Подключено",
|
"tree_connected": "Подключено",
|
||||||
|
# Results context menu
|
||||||
|
"res_copy_cell": "Копировать ячейку",
|
||||||
|
"res_copy_row": "Копировать строку",
|
||||||
|
"res_copy_row_as": "Копировать строку как",
|
||||||
|
"res_copy_all_as": "Копировать всё как",
|
||||||
|
|
||||||
# Redis tab
|
# Redis tab
|
||||||
"redis_clear": "Очистить",
|
"redis_clear": "Очистить",
|
||||||
@@ -1191,6 +1201,11 @@ _ZH = {
|
|||||||
"tree_no_tables": "(无表)",
|
"tree_no_tables": "(无表)",
|
||||||
"tree_no_columns": "(无列)",
|
"tree_no_columns": "(无列)",
|
||||||
"tree_connected": "已连接",
|
"tree_connected": "已连接",
|
||||||
|
# Results context menu
|
||||||
|
"res_copy_cell": "复制单元格",
|
||||||
|
"res_copy_row": "复制行",
|
||||||
|
"res_copy_row_as": "复制行为",
|
||||||
|
"res_copy_all_as": "全部复制为",
|
||||||
|
|
||||||
# Redis tab
|
# Redis tab
|
||||||
"redis_clear": "清除",
|
"redis_clear": "清除",
|
||||||
|
|||||||
@@ -3,6 +3,9 @@ Query tab — SQL database interaction with tree explorer, editor, results grid,
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import csv
|
import csv
|
||||||
|
import io
|
||||||
|
import json
|
||||||
|
import re
|
||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
import tkinter as tk
|
import tkinter as tk
|
||||||
@@ -77,6 +80,25 @@ def _apply_db_tree_theme():
|
|||||||
|
|
||||||
|
|
||||||
class QueryTab(ctk.CTkFrame):
|
class QueryTab(ctk.CTkFrame):
|
||||||
|
_FORMAT_OPTIONS = [
|
||||||
|
("Excel CSV", "csv"),
|
||||||
|
("Delimited Text (Tab)", "tsv"),
|
||||||
|
("HTML Table", "html"),
|
||||||
|
("XML", "xml"),
|
||||||
|
("SQL INSERTs", "sql_insert"),
|
||||||
|
("SQL INSERT IGNOREs", "sql_insert_ignore"),
|
||||||
|
("SQL REPLACEs", "sql_replace"),
|
||||||
|
("SQL DELETE/INSERTs", "sql_delete_insert"),
|
||||||
|
("SQL UPDATEs", "sql_update"),
|
||||||
|
("LaTeX", "latex"),
|
||||||
|
("Textile", "textile"),
|
||||||
|
("Jira Textile", "jira"),
|
||||||
|
("PHP Array", "php"),
|
||||||
|
("Markdown", "markdown"),
|
||||||
|
("JSON", "json"),
|
||||||
|
("JSON Lines", "jsonl"),
|
||||||
|
]
|
||||||
|
|
||||||
def __init__(self, master, store):
|
def __init__(self, master, store):
|
||||||
super().__init__(master, fg_color="transparent")
|
super().__init__(master, fg_color="transparent")
|
||||||
self._current_alias: str | None = None
|
self._current_alias: str | None = None
|
||||||
@@ -216,6 +238,7 @@ class QueryTab(ctk.CTkFrame):
|
|||||||
yscrollcommand=self._res_yscroll.set,
|
yscrollcommand=self._res_yscroll.set,
|
||||||
)
|
)
|
||||||
self._results_tree.pack(fill="both", expand=True)
|
self._results_tree.pack(fill="both", expand=True)
|
||||||
|
self._results_tree.bind("<Button-3>", self._on_results_rightclick)
|
||||||
|
|
||||||
self._res_xscroll.config(command=self._results_tree.xview)
|
self._res_xscroll.config(command=self._results_tree.xview)
|
||||||
self._res_yscroll.config(command=self._results_tree.yview)
|
self._res_yscroll.config(command=self._results_tree.yview)
|
||||||
@@ -586,6 +609,231 @@ class QueryTab(ctk.CTkFrame):
|
|||||||
def _insert_text(self, text: str):
|
def _insert_text(self, text: str):
|
||||||
self._editor.insert("insert", text)
|
self._editor.insert("insert", text)
|
||||||
|
|
||||||
|
# ── Results context menu ────────────────────────────────────────
|
||||||
|
|
||||||
|
def _on_results_rightclick(self, event):
|
||||||
|
"""Context menu on results Treeview — copy cell/row/all in 16 formats."""
|
||||||
|
if not self._columns or not self._results:
|
||||||
|
return
|
||||||
|
|
||||||
|
row_iid = self._results_tree.identify_row(event.y)
|
||||||
|
col_id = self._results_tree.identify_column(event.x)
|
||||||
|
|
||||||
|
if row_iid:
|
||||||
|
self._results_tree.selection_set(row_iid)
|
||||||
|
|
||||||
|
row_values = None
|
||||||
|
if row_iid:
|
||||||
|
row_values = list(self._results_tree.item(row_iid, "values"))
|
||||||
|
|
||||||
|
cell_value = None
|
||||||
|
if row_values and col_id:
|
||||||
|
try:
|
||||||
|
col_index = int(col_id.replace("#", "")) - 1
|
||||||
|
if 0 <= col_index < len(row_values):
|
||||||
|
cell_value = row_values[col_index]
|
||||||
|
except (ValueError, IndexError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
table_name = self._extract_table_name()
|
||||||
|
|
||||||
|
_menu_kw = dict(tearoff=0, bg="#2b2b2b", fg="#dcdcdc",
|
||||||
|
activebackground="#3b82f6", activeforeground="#ffffff",
|
||||||
|
font=("Segoe UI", 10))
|
||||||
|
menu = tk.Menu(self, **_menu_kw)
|
||||||
|
|
||||||
|
if cell_value is not None:
|
||||||
|
menu.add_command(
|
||||||
|
label=t("res_copy_cell"),
|
||||||
|
command=lambda: self._copy_to_clipboard(str(cell_value)),
|
||||||
|
)
|
||||||
|
|
||||||
|
if row_values is not None:
|
||||||
|
menu.add_command(
|
||||||
|
label=t("res_copy_row"),
|
||||||
|
command=lambda: self._copy_to_clipboard(
|
||||||
|
"\t".join(str(v) for v in row_values)),
|
||||||
|
)
|
||||||
|
|
||||||
|
if cell_value is not None or row_values is not None:
|
||||||
|
menu.add_separator()
|
||||||
|
|
||||||
|
if row_values is not None:
|
||||||
|
row_sub = tk.Menu(menu, **_menu_kw)
|
||||||
|
for label, fmt_key in self._FORMAT_OPTIONS:
|
||||||
|
row_sub.add_command(
|
||||||
|
label=label,
|
||||||
|
command=lambda fk=fmt_key, rv=row_values: self._copy_to_clipboard(
|
||||||
|
self._format_data(fk, self._columns, [rv], table_name)),
|
||||||
|
)
|
||||||
|
menu.add_cascade(label=t("res_copy_row_as"), menu=row_sub)
|
||||||
|
|
||||||
|
all_sub = tk.Menu(menu, **_menu_kw)
|
||||||
|
all_rows = [list(self._results_tree.item(iid, "values"))
|
||||||
|
for iid in self._results_tree.get_children()]
|
||||||
|
for label, fmt_key in self._FORMAT_OPTIONS:
|
||||||
|
all_sub.add_command(
|
||||||
|
label=label,
|
||||||
|
command=lambda fk=fmt_key, ar=all_rows: self._copy_to_clipboard(
|
||||||
|
self._format_data(fk, self._columns, ar, table_name)),
|
||||||
|
)
|
||||||
|
menu.add_cascade(label=t("res_copy_all_as"), menu=all_sub)
|
||||||
|
|
||||||
|
menu.tk_popup(event.x_root, event.y_root)
|
||||||
|
|
||||||
|
def _extract_table_name(self) -> str:
|
||||||
|
sql = self._editor.get("0.0", "end").strip()
|
||||||
|
m = re.search(r'\bFROM\s+`?(\w+)`?', sql, re.IGNORECASE)
|
||||||
|
return m.group(1) if m else "table"
|
||||||
|
|
||||||
|
# ── Format converters ──────────────────────────────────────────
|
||||||
|
|
||||||
|
def _format_data(self, fmt: str, columns: list[str],
|
||||||
|
rows: list[list], table: str) -> str:
|
||||||
|
formatter = getattr(self, f"_fmt_{fmt}", None)
|
||||||
|
if formatter:
|
||||||
|
return formatter(columns, rows, table)
|
||||||
|
return "\t".join(columns) + "\n" + "\n".join(
|
||||||
|
"\t".join(str(v) for v in r) for r in rows)
|
||||||
|
|
||||||
|
def _fmt_csv(self, cols, rows, _t):
|
||||||
|
buf = io.StringIO()
|
||||||
|
w = csv.writer(buf)
|
||||||
|
w.writerow(cols)
|
||||||
|
for r in rows:
|
||||||
|
w.writerow(r)
|
||||||
|
return buf.getvalue()
|
||||||
|
|
||||||
|
def _fmt_tsv(self, cols, rows, _t):
|
||||||
|
lines = ["\t".join(cols)]
|
||||||
|
for r in rows:
|
||||||
|
lines.append("\t".join(str(v) for v in r))
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _fmt_html(self, cols, rows, _t):
|
||||||
|
h = "<table>\n<tr>" + "".join(f"<th>{c}</th>" for c in cols) + "</tr>\n"
|
||||||
|
for r in rows:
|
||||||
|
h += "<tr>" + "".join(f"<td>{v}</td>" for v in r) + "</tr>\n"
|
||||||
|
h += "</table>"
|
||||||
|
return h
|
||||||
|
|
||||||
|
def _fmt_xml(self, cols, rows, _t):
|
||||||
|
lines = ['<?xml version="1.0" encoding="UTF-8"?>', "<results>"]
|
||||||
|
for r in rows:
|
||||||
|
lines.append(" <row>")
|
||||||
|
for c, v in zip(cols, r):
|
||||||
|
tag = re.sub(r'[^a-zA-Z0-9_]', '_', c)
|
||||||
|
lines.append(f" <{tag}>{v}</{tag}>")
|
||||||
|
lines.append(" </row>")
|
||||||
|
lines.append("</results>")
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _fmt_sql_insert(self, cols, rows, tbl):
|
||||||
|
cl = ", ".join(f"`{c}`" for c in cols)
|
||||||
|
return "\n".join(
|
||||||
|
f"INSERT INTO `{tbl}` ({cl}) VALUES ({', '.join(self._sql_val(v) for v in r)});"
|
||||||
|
for r in rows)
|
||||||
|
|
||||||
|
def _fmt_sql_insert_ignore(self, cols, rows, tbl):
|
||||||
|
cl = ", ".join(f"`{c}`" for c in cols)
|
||||||
|
return "\n".join(
|
||||||
|
f"INSERT IGNORE INTO `{tbl}` ({cl}) VALUES ({', '.join(self._sql_val(v) for v in r)});"
|
||||||
|
for r in rows)
|
||||||
|
|
||||||
|
def _fmt_sql_replace(self, cols, rows, tbl):
|
||||||
|
cl = ", ".join(f"`{c}`" for c in cols)
|
||||||
|
return "\n".join(
|
||||||
|
f"REPLACE INTO `{tbl}` ({cl}) VALUES ({', '.join(self._sql_val(v) for v in r)});"
|
||||||
|
for r in rows)
|
||||||
|
|
||||||
|
def _fmt_sql_delete_insert(self, cols, rows, tbl):
|
||||||
|
cl = ", ".join(f"`{c}`" for c in cols)
|
||||||
|
lines = []
|
||||||
|
for r in rows:
|
||||||
|
where = f"`{cols[0]}` = {self._sql_val(r[0])}"
|
||||||
|
lines.append(f"DELETE FROM `{tbl}` WHERE {where};")
|
||||||
|
lines.append(f"INSERT INTO `{tbl}` ({cl}) VALUES ({', '.join(self._sql_val(v) for v in r)});")
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _fmt_sql_update(self, cols, rows, tbl):
|
||||||
|
lines = []
|
||||||
|
for r in rows:
|
||||||
|
sets = ", ".join(f"`{c}` = {self._sql_val(v)}" for c, v in zip(cols[1:], r[1:]))
|
||||||
|
where = f"`{cols[0]}` = {self._sql_val(r[0])}"
|
||||||
|
lines.append(f"UPDATE `{tbl}` SET {sets} WHERE {where};")
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _sql_val(v) -> str:
|
||||||
|
if v is None or str(v) == "NULL":
|
||||||
|
return "NULL"
|
||||||
|
s = str(v)
|
||||||
|
try:
|
||||||
|
float(s)
|
||||||
|
return s
|
||||||
|
except ValueError:
|
||||||
|
return "'" + s.replace("'", "''") + "'"
|
||||||
|
|
||||||
|
def _fmt_latex(self, cols, rows, _t):
|
||||||
|
align = "|".join("l" * len(cols))
|
||||||
|
lines = [f"\\begin{{tabular}}{{|{align}|}}", "\\hline"]
|
||||||
|
lines.append(" & ".join(cols) + " \\\\")
|
||||||
|
lines.append("\\hline")
|
||||||
|
for r in rows:
|
||||||
|
lines.append(" & ".join(str(v) for v in r) + " \\\\")
|
||||||
|
lines.append("\\hline")
|
||||||
|
lines.append("\\end{tabular}")
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _fmt_textile(self, cols, rows, _t):
|
||||||
|
lines = ["|_. " + " |_. ".join(cols) + " |"]
|
||||||
|
for r in rows:
|
||||||
|
lines.append("| " + " | ".join(str(v) for v in r) + " |")
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _fmt_jira(self, cols, rows, _t):
|
||||||
|
lines = ["|| " + " || ".join(cols) + " ||"]
|
||||||
|
for r in rows:
|
||||||
|
lines.append("| " + " | ".join(str(v) for v in r) + " |")
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _fmt_php(self, cols, rows, _t):
|
||||||
|
lines = ["["]
|
||||||
|
for r in rows:
|
||||||
|
pairs = ", ".join(f"'{c}' => {self._php_val(v)}" for c, v in zip(cols, r))
|
||||||
|
lines.append(f" [{pairs}],")
|
||||||
|
lines.append("]")
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _php_val(v) -> str:
|
||||||
|
if v is None or str(v) == "NULL":
|
||||||
|
return "null"
|
||||||
|
s = str(v)
|
||||||
|
try:
|
||||||
|
float(s)
|
||||||
|
return s
|
||||||
|
except ValueError:
|
||||||
|
return "'" + s.replace("'", "\\'") + "'"
|
||||||
|
|
||||||
|
def _fmt_markdown(self, cols, rows, _t):
|
||||||
|
lines = ["| " + " | ".join(cols) + " |"]
|
||||||
|
lines.append("| " + " | ".join("---" for _ in cols) + " |")
|
||||||
|
for r in rows:
|
||||||
|
lines.append("| " + " | ".join(str(v) for v in r) + " |")
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
def _fmt_json(self, cols, rows, _t):
|
||||||
|
data = [dict(zip(cols, [str(v) for v in r])) for r in rows]
|
||||||
|
return json.dumps(data, indent=2, ensure_ascii=False)
|
||||||
|
|
||||||
|
def _fmt_jsonl(self, cols, rows, _t):
|
||||||
|
lines = []
|
||||||
|
for r in rows:
|
||||||
|
obj = dict(zip(cols, [str(v) for v in r]))
|
||||||
|
lines.append(json.dumps(obj, ensure_ascii=False))
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
# ── Query execution ────────────────────────────────────────────
|
# ── Query execution ────────────────────────────────────────────
|
||||||
|
|
||||||
def _execute_query(self):
|
def _execute_query(self):
|
||||||
|
|||||||
BIN
releases/ServerManager-v1.8.58-win-x64.exe
Normal file
BIN
releases/ServerManager-v1.8.58-win-x64.exe
Normal file
Binary file not shown.
@@ -1,6 +1,6 @@
|
|||||||
"""Version info for ServerManager."""
|
"""Version info for ServerManager."""
|
||||||
|
|
||||||
__version__ = "1.8.57"
|
__version__ = "1.8.58"
|
||||||
__app_name__ = "ServerManager"
|
__app_name__ = "ServerManager"
|
||||||
__author__ = "aibot777"
|
__author__ = "aibot777"
|
||||||
__description__ = "Desktop GUI for managing remote servers"
|
__description__ = "Desktop GUI for managing remote servers"
|
||||||
|
|||||||
Reference in New Issue
Block a user