aaaaa
This commit is contained in:
parent
04d9f40ab3
commit
93fda4bf6e
3
Pipfile
3
Pipfile
|
|
@ -14,6 +14,9 @@ matplotlib = "*"
|
|||
bokeh = "*"
|
||||
bokeh-sampledata = "*"
|
||||
scipy = "*"
|
||||
dash = "*"
|
||||
dash-bootstrap-components = "*"
|
||||
dash-basecomponent = "*"
|
||||
|
||||
[dev-packages]
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,112 @@
|
|||
from io import StringIO
|
||||
from dash import callback
|
||||
from dash.dcc import Interval
|
||||
import dash_bootstrap_components as dbc
|
||||
from dash_basecomponent import BaseComponent
|
||||
from dash_bootstrap_components._components import Row
|
||||
|
||||
def pwms_parse(text: str):
|
||||
[int(t.strip()) for t in text.split(',')]
|
||||
|
||||
ATTRS = ['Description', 'Motor', 'Prop']
|
||||
|
||||
class CaptureTab(Row, BaseComponent):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
|
||||
|
||||
pwms_input = dbc.Row(
|
||||
[
|
||||
dbc.Label('PWMs', html_for=self.child_id('input-pwms'), width=2),
|
||||
dbc.Col(
|
||||
dbc.Input(type='text', id=self.child_id('input-pwms'), placeholder='PWM throttle values', persistence=True),
|
||||
width = 5
|
||||
),
|
||||
],
|
||||
className='mb-3',
|
||||
)
|
||||
|
||||
|
||||
button = dbc.Button('Start measurement', id='button-measurement', color='primary', n_clicks=0)
|
||||
|
||||
attr_inputs = [self.attr_input(attr, i) for i, attr in enumerate(ATTRS)]
|
||||
|
||||
form = dbc.Form([
|
||||
pwms_input,
|
||||
*attr_inputs,
|
||||
button
|
||||
], className='m-3')
|
||||
|
||||
super().__init__([
|
||||
dbc.Col([
|
||||
dbc.Card(dbc.CardBody([form]), className='m-3'),
|
||||
]),
|
||||
dbc.Col(dbc.Card(dbc.Textarea(readOnly=True, id=self.child_id('area-log'), rows=20, value='test123\n'*10))),
|
||||
Interval(id=self.child_id('log-interval'), interval=1000, n_intervals=0, disabled=True)
|
||||
])
|
||||
|
||||
def attr_input(self, name: str, index: int):
|
||||
return dbc.Row(
|
||||
[
|
||||
dbc.Label(name, html_for=self.child_id(f'input-attr{index}'), width=2),
|
||||
dbc.Col(
|
||||
dbc.Input(type='text', id=self.child_id(f'input-attr{index}'), placeholder=name, persistence=True),
|
||||
width = 5
|
||||
),
|
||||
],
|
||||
className='mb-3',
|
||||
)
|
||||
|
||||
@callback(
|
||||
BaseComponent.ChildOutput('input-pwms', 'valid'),
|
||||
BaseComponent.ChildOutput('input-pwms', 'invalid'),
|
||||
BaseComponent.ChildInput('input-pwms', 'value'),
|
||||
)
|
||||
@staticmethod
|
||||
def check_validity(text: str):
|
||||
if text:
|
||||
try:
|
||||
pwms_parse(text)
|
||||
return True, False
|
||||
except ValueError:
|
||||
pass
|
||||
return False, True
|
||||
|
||||
return False, False
|
||||
|
||||
|
||||
strio = StringIO()
|
||||
|
||||
@callback(
|
||||
BaseComponent.ChildOutput('area-log', 'value'),
|
||||
BaseComponent.ChildInput('log-interval', 'n_intervals'),
|
||||
)
|
||||
def update_log(_):
|
||||
print('a')
|
||||
# print(strio.getvalue())
|
||||
print('b')
|
||||
|
||||
@callback(
|
||||
BaseComponent.ChildOutput('log-interval', 'disabled'),
|
||||
BaseComponent.ChildInput('button-measurement', 'n_clicks'),
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def on_start(_):
|
||||
print('thra')
|
||||
# print(asyncio.get_event_loop())
|
||||
# Thread(target=meas_thread, daemon=True).start()
|
||||
print('thrb')
|
||||
return False
|
||||
|
||||
# def meas_thread():
|
||||
# print('XXXX')
|
||||
# with contextlib.redirect_stdout(strio), contextlib.redirect_stderr(strio):
|
||||
# sleep(3)
|
||||
# strio.write('baba')
|
||||
# asyncio.run(testmeas())
|
||||
# print('YYYY')
|
||||
|
||||
# async def testmeas():
|
||||
# for i in range(10):
|
||||
# await asyncio.sleep(1)
|
||||
# print(i)
|
||||
|
|
@ -0,0 +1,103 @@
|
|||
import asyncio
|
||||
import contextlib
|
||||
from io import StringIO
|
||||
from logging import debug
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
import dash
|
||||
from dash.dash import dcc, html
|
||||
from dash_basecomponent import BaseComponent
|
||||
import dash_bootstrap_components as dbc
|
||||
import pandas as pd
|
||||
|
||||
data = pd.DataFrame([
|
||||
{'Prop': 'N/A', 'Motor': 'N/A', 'Description': 'baseline_chamber', 'Shroud': 'None', 'id': 'data_misc/benchmark_tent/baseline_chamber'},
|
||||
{'Prop': 'N/A', 'Motor': 'N/A', 'Description': 'baseline_notent', 'Shroud': 'None', 'id': 'data_misc/benchmark_tent/baseline_notent'},
|
||||
{'Prop': 'N/A', 'Motor': 'N/A', 'Description': 'baseline_tent', 'Shroud': 'None', 'id': 'data_misc/benchmark_tent/baseline_tent'},
|
||||
{'Prop': 'N/A', 'Motor': 'N/A', 'Description': 'shroud_chamber', 'Shroud': 'Yes', 'id': 'data_misc/benchmark_tent/shroud_chamber'},
|
||||
{'Prop': 'N/A', 'Motor': 'N/A', 'Description': 'shroud_nochamber', 'Shroud': 'Yes', 'id': 'data_misc/benchmark_tent/shroud_nochamber'},
|
||||
{'Prop': 'N/A', 'Motor': 'N/A', 'Description': 'shroud_foam_chamber', 'Shroud': 'With foam', 'id': 'data_misc/benchmark_tent/shroud_foam_chamber'},
|
||||
])
|
||||
|
||||
ATTRS = ['Description', 'Motor', 'Prop', 'Shroud']
|
||||
# ATTRS = ['Description']
|
||||
|
||||
def is_checked(value) -> bool:
|
||||
if not isinstance(value, list):
|
||||
return False
|
||||
return None in value
|
||||
|
||||
|
||||
class BrowseTab(dbc.Container, BaseComponent):
|
||||
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
|
||||
attr_filters = [self.attr_filter(attr, i) for i, attr in enumerate(ATTRS)]
|
||||
|
||||
super().__init__([
|
||||
html.H2("Database browser"),
|
||||
*attr_filters,
|
||||
|
||||
dbc.Row([
|
||||
dbc.Col(dbc.Table(id=self.child_id("data-table"), bordered=True, hover=True, responsive=True, striped=True), width=8)
|
||||
])
|
||||
])
|
||||
|
||||
@dash.callback(
|
||||
dash.Output(self.child_id('data-table'), 'children'),
|
||||
dash.Input({'role': 'filter-attr', 'index': dash.ALL}, "value"),
|
||||
)
|
||||
def update_table(filters):
|
||||
filtered_data = data.copy()
|
||||
|
||||
for attr, filt in zip(ATTRS, filters):
|
||||
if filt:
|
||||
filtered_data = filtered_data[filtered_data[attr].isin(filt)] # type: ignore
|
||||
|
||||
def attr_col(attr: str):
|
||||
vals = [html.Td(x) for x in filtered_data[attr]]
|
||||
return html.Th(attr), vals
|
||||
|
||||
|
||||
cols_attrs = [attr_col(attr) for attr in ATTRS] # type: ignore
|
||||
col_sel = html.Th('Compare'), [html.Td(dcc.Checklist(options=[{'label': ''}], value=True, inline=True, id={'role': 'row-compare', 'id': row["id"]}, )) for _, row in filtered_data.iterrows()] # type: ignore
|
||||
|
||||
cols = cols_attrs + [col_sel]
|
||||
|
||||
header = html.Thead(html.Tr([h for h, b in cols]))
|
||||
|
||||
bodyrows = zip(*[b for h, b in cols])
|
||||
|
||||
body = html.Tbody([html.Tr([data for data in row]) for row in bodyrows])
|
||||
|
||||
|
||||
|
||||
return [header, body]
|
||||
|
||||
@dash.callback(
|
||||
# dash.Output(self.child_id('data-table'), 'children'),
|
||||
dash.Output('data_files', 'data'),
|
||||
dash.Input({'role': 'row-compare', 'id': dash.ALL}, "value"),
|
||||
dash.State({'role': 'row-compare', 'id': dash.ALL}, "id"),
|
||||
)
|
||||
def duppa(values, ids):
|
||||
print('kkkk')
|
||||
values = list(map(is_checked, values))
|
||||
# values = list(map(bool, values))
|
||||
ids = list(map(lambda x: x['id'], ids))
|
||||
# print(values)
|
||||
# print(ids)
|
||||
return [idd for idd, val, in zip(ids, values) if val]
|
||||
|
||||
def attr_filter(self, name: str, index: int):
|
||||
r = dbc.Row([
|
||||
dbc.Col(dcc.Dropdown(
|
||||
id={'role':'filter-attr', 'index': index},
|
||||
options=[{"label": cat, "value": cat} for cat in sorted(data[name].unique())],
|
||||
placeholder=f"Select {name}",
|
||||
multi=True,
|
||||
clearable=True
|
||||
), width=6)
|
||||
], className="mb-3")
|
||||
return r
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
import asyncio
|
||||
import contextlib
|
||||
from io import StringIO
|
||||
from logging import debug
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
from urllib.parse import urlencode
|
||||
import dash
|
||||
from dash.dash import html
|
||||
from dash.dcc import Interval, Store
|
||||
import dash_bootstrap_components as dbc
|
||||
|
||||
from capture import CaptureTab
|
||||
from db_browser import BrowseTab
|
||||
|
||||
app = dash.Dash(
|
||||
external_stylesheets=[dbc.themes.BOOTSTRAP]
|
||||
)
|
||||
|
||||
app.layout = dbc.Container(
|
||||
[
|
||||
dbc.Row(
|
||||
dbc.Col(
|
||||
dbc.Tabs(
|
||||
[
|
||||
dbc.Tab(label="Capture", children=CaptureTab()),
|
||||
dbc.Tab(label="Browse", children=BrowseTab()),
|
||||
dbc.Tab(label="Analyze", id='tab_analyze', children=html.Iframe(src='http://localhost:5000/', width='100%', height=800)),
|
||||
]
|
||||
),
|
||||
# width=12
|
||||
)
|
||||
),
|
||||
Store(id='data_files', data=[])
|
||||
],
|
||||
fluid=True
|
||||
)
|
||||
|
||||
@dash.callback(
|
||||
dash.Output('tab_analyze', 'children'),
|
||||
dash.Input('data_files', 'data'),
|
||||
)
|
||||
def update_analyzer(files):
|
||||
# print('dupa')
|
||||
# print(files)
|
||||
if files:
|
||||
return html.Iframe(src='http://localhost:5000/?'+urlencode({f'f{i}': file for i, file in enumerate(files)}), width='100%', height=800)
|
||||
return dbc.Label("No measurement selected for analysis")
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0')
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
import asyncio
|
||||
import contextlib
|
||||
from io import StringIO
|
||||
from logging import debug
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
import dash
|
||||
from dash.dash import html
|
||||
from dash.dcc import Interval
|
||||
import dash_bootstrap_components as dbc
|
||||
|
||||
from capture import CaptureTab
|
||||
from db_browser import BrowseTab
|
||||
|
||||
app = dash.Dash(
|
||||
external_stylesheets=[dbc.themes.BOOTSTRAP]
|
||||
)
|
||||
|
||||
app.layout = dbc.Container(
|
||||
[
|
||||
dbc.Row([
|
||||
dbc.Col(dash.dcc.Dropdown(
|
||||
id={'dupa': 'dupa', 'index': 0},
|
||||
options=[{"label": cat, "value": cat} for cat in ['A', 'B']],
|
||||
placeholder=f"Select",
|
||||
multi=True,
|
||||
clearable=True
|
||||
), width=6),
|
||||
dbc.Col(dash.dcc.Dropdown(
|
||||
id={'dupa': 'dupa', 'index' :1},
|
||||
options=[{"label": cat, "value": cat} for cat in ['A', 'B']],
|
||||
placeholder=f"Select",
|
||||
multi=True,
|
||||
clearable=True
|
||||
), width=6),
|
||||
])
|
||||
],
|
||||
fluid=True
|
||||
)
|
||||
|
||||
@dash.callback(
|
||||
dash.Input({'dupa': 'dupa', 'index': dash.ALL}, 'value')
|
||||
)
|
||||
def cb(x):
|
||||
print(x)
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(host='0.0.0.0')
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
import dash
|
||||
import dash_bootstrap_components as dbc
|
||||
import pandas as pd
|
||||
from dash import dcc, html, Input, Output
|
||||
|
||||
# Sample Data
|
||||
data = pd.DataFrame([
|
||||
{"Prop": "Item 1", "Motor": "A", "Status": "Active"},
|
||||
{"Prop": "Item 2", "Motor": "B", "Status": "Inactive"},
|
||||
{"Prop": "Item 3", "Motor": "A", "Status": "Inactive"},
|
||||
{"Prop": "Item 4", "Motor": "C", "Status": "Active"},
|
||||
{"Prop": "Item 5", "Motor": "B", "Status": "Active"},
|
||||
{"Prop": "Item 6", "Motor": "C", "Status": "Inactive"},
|
||||
{"Prop": "Item 7", "Motor": "A", "Status": "Active"},
|
||||
])
|
||||
|
||||
# Initialize Dash App
|
||||
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP])
|
||||
|
||||
app.layout = dbc.Container([
|
||||
html.H2("Filterable List"),
|
||||
|
||||
# Multi-Select Dropdown for Category Filter
|
||||
dbc.Row([
|
||||
dbc.Col(dcc.Dropdown(
|
||||
id="category-filter",
|
||||
options=[{"label": cat, "value": cat} for cat in sorted(data["Category"].unique())],
|
||||
placeholder="Select Categories",
|
||||
multi=True,
|
||||
clearable=True
|
||||
), width=6)
|
||||
], className="mb-3"),
|
||||
|
||||
# Checklist for Status Filter
|
||||
dbc.Row([
|
||||
dbc.Col(dcc.Checklist(
|
||||
id="status-filter",
|
||||
options=[{"label": status, "value": status} for status in data["Status"].unique()],
|
||||
value=[],
|
||||
inline=True
|
||||
), width=6)
|
||||
], className="mb-3"),
|
||||
|
||||
# Table to Display Data
|
||||
dbc.Row([
|
||||
dbc.Col(dbc.Table(id="data-table", bordered=True, hover=True, responsive=True, striped=True), width=8)
|
||||
])
|
||||
])
|
||||
|
||||
# Callback to update table based on filters
|
||||
@app.callback(
|
||||
Output("data-table", "children"),
|
||||
Input("category-filter", "value"),
|
||||
Input("status-filter", "value")
|
||||
)
|
||||
def update_table(selected_categories, selected_status):
|
||||
filtered_data = data.copy()
|
||||
|
||||
if selected_categories:
|
||||
filtered_data = filtered_data[filtered_data["Category"].isin(selected_categories)]
|
||||
|
||||
if selected_status:
|
||||
filtered_data = filtered_data[filtered_data["Status"].isin(selected_status)]
|
||||
|
||||
# Generate Table
|
||||
table_header = [html.Thead(html.Tr([html.Th(col) for col in filtered_data.columns]))]
|
||||
table_body = [html.Tbody([html.Tr([html.Td(row[col]) for col in filtered_data.columns]) for _, row in filtered_data.iterrows()])]
|
||||
|
||||
return table_header + table_body if not filtered_data.empty else [html.Thead(html.Tr([html.Th("No Results Found")]))]
|
||||
|
||||
# Run the app
|
||||
if __name__ == "__main__":
|
||||
app.run_server(debug=True)
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
from typing import Optional
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class MeasurementAttributes:
|
||||
motor: Optional[str] = None
|
||||
prop: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
|
||||
class Database:
|
||||
|
||||
|
|
@ -5,6 +5,7 @@ from logging import debug
|
|||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from fft import FFTData
|
||||
from logger import spr
|
||||
from norsonic_parser import parse_report
|
||||
import thrust_stand
|
||||
|
|
@ -17,6 +18,7 @@ from thrust_stand import ThrustStand, ThrustStandMeasurement
|
|||
class OpPointData:
|
||||
data_thrust_stand: Sequence[ThrustStandMeasurement]
|
||||
raw_nor_report: Optional[bytes]
|
||||
pwm_setpoint: int
|
||||
|
||||
@property
|
||||
def nor_report_parsed(self):
|
||||
|
|
@ -29,19 +31,25 @@ class OpPointData:
|
|||
return self.nor_report_parsed.profile
|
||||
|
||||
@property
|
||||
def data_thrust_stand_avg(self):
|
||||
def data_thrust_stand_avg(self) -> ThrustStandMeasurement:
|
||||
return sum(self.data_thrust_stand, ThrustStandMeasurement.zero())/len(self.data_thrust_stand)
|
||||
|
||||
@property
|
||||
def data_accustic_avg(self):
|
||||
def data_accustic_avg(self) -> dict[str, float]:
|
||||
return {k: sum(map(float, (row[k] for row in self.data_accustic)))/len(self.data_accustic) for k in self.data_accustic[0].keys() if k != 'Date'}
|
||||
|
||||
import sys
|
||||
async def ainput(string: str) -> str:
|
||||
await asyncio.get_event_loop().run_in_executor(
|
||||
None, lambda s=string: sys.stdout.write(s+' '))
|
||||
return await asyncio.get_event_loop().run_in_executor(
|
||||
None, sys.stdin.readline)
|
||||
@property
|
||||
def data_fft(self) -> FFTData:
|
||||
return FFTData(self.nor_report_parsed.glob_fft)
|
||||
|
||||
|
||||
|
||||
# import sys
|
||||
# async def ainput(string: str) -> str:
|
||||
# await asyncio.get_event_loop().run_in_executor(
|
||||
# None, lambda s=string: sys.stdout.write(s+' '))
|
||||
# return await asyncio.get_event_loop().run_in_executor(
|
||||
# None, sys.stdin.readline)
|
||||
|
||||
|
||||
@dataclass
|
||||
|
|
|
|||
|
|
@ -1,15 +1,19 @@
|
|||
from dataclasses import fields
|
||||
from typing import Callable, cast
|
||||
from bokeh.io import curdoc
|
||||
from bokeh.layouts import column, row
|
||||
from bokeh.models import ColumnDataSource, MultiChoice, Select
|
||||
from bokeh.models import ColumnDataSource, MultiChoice, Select, Slider, TextInput
|
||||
from bokeh.plotting import figure
|
||||
import pickle
|
||||
from fft import FFTData
|
||||
from interpolation import interp_keyed
|
||||
from measurement_station import OpPointData
|
||||
import math
|
||||
from bokeh.server.server import Server
|
||||
from bokeh.application import Application
|
||||
from bokeh.application.handlers.function import FunctionHandler
|
||||
import sys
|
||||
import itertools
|
||||
|
||||
|
||||
def oppoint_data_src(p: OpPointData) -> dict[str, float]:
|
||||
|
|
@ -34,15 +38,35 @@ def series_dataclosrc(series: dict[int, OpPointData]) -> ColumnDataSource:
|
|||
data = { key: [e[key] for e in entries] for key in keys }
|
||||
return ColumnDataSource(data)
|
||||
|
||||
def fft_datasrc(op: OpPointData) -> ColumnDataSource:
|
||||
fft_data = op.nor_report_parsed.glob_fft
|
||||
def fft_datasrc(fft: FFTData) -> ColumnDataSource:
|
||||
fft_data = fft.data
|
||||
freqs = sorted(fft_data.keys())
|
||||
vals = [fft_data[f] for f in freqs]
|
||||
data = { "FREQ": freqs, "POWER": vals }
|
||||
return data
|
||||
|
||||
def interp_fft(oppoints: dict[int, OpPointData], key: Callable[[OpPointData], float], x: float) -> FFTData:
|
||||
seq = [oppoints[x] for x in sorted(oppoints.keys())]
|
||||
return interp_keyed(seq, key, lambda op: op.data_fft, x)
|
||||
|
||||
|
||||
def make_doc(doc, files):
|
||||
|
||||
# print('aaaaaaa')
|
||||
# print(doc.session_context.request.arguments)
|
||||
|
||||
args = doc.session_context.request.arguments
|
||||
|
||||
print(args)
|
||||
files = []
|
||||
for i in itertools.count():
|
||||
key = f'f{i}'
|
||||
if key in args:
|
||||
files.append(args[key][0].decode())
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
series = [ read_series(f) for f in files ]
|
||||
sources = [series_dataclosrc(s) for s in series]
|
||||
keys = list(sources[0].data.keys())
|
||||
|
|
@ -59,6 +83,9 @@ def make_doc(doc, files):
|
|||
pfft = figure(title='', x_axis_label='X', y_axis_label='Y', tools=['hover', 'pan', 'xwheel_zoom'])
|
||||
pfft.sizing_mode = 'scale_both' # type: ignore
|
||||
|
||||
# fftslider = Slider(start=0, end=15, value=1, step=.1, title="fft X")
|
||||
fftslider = TextInput(title = 'fft X')
|
||||
|
||||
def update_plot(attr, _, new_values):
|
||||
p.renderers = [] # type: ignore
|
||||
|
||||
|
|
@ -81,12 +108,15 @@ def make_doc(doc, files):
|
|||
p.y_range.end = ymax + margin
|
||||
|
||||
|
||||
pfft.renderers = [] # type: ignore
|
||||
for serie, fname, color in zip(series, files, colors):
|
||||
if fname not in srcsel.value: #type: ignore
|
||||
continue
|
||||
data = fft_datasrc(serie[1500])
|
||||
pfft.line(x='FREQ', y='POWER', source=data, legend_label=f'{fname} FFT', line_width=2, color=color)
|
||||
try:
|
||||
pfft.renderers = [] # type: ignore
|
||||
for serie, fname, color in zip(series, files, colors):
|
||||
if fname not in srcsel.value: #type: ignore
|
||||
continue
|
||||
data = fft_datasrc(interp_fft(serie, lambda op: oppoint_data_src(op)[xsel.value], float(fftslider.value)))
|
||||
pfft.line(x='FREQ', y='POWER', source=data, legend_label=f'{fname} FFT', line_width=2, color=color)
|
||||
except ValueError:
|
||||
print('Ommiting FFT')
|
||||
|
||||
|
||||
|
||||
|
|
@ -94,8 +124,9 @@ def make_doc(doc, files):
|
|||
multi_choice.on_change('value', update_plot)
|
||||
xsel.on_change('value', update_plot)
|
||||
srcsel.on_change('value', update_plot)
|
||||
fftslider.on_change('value', update_plot)
|
||||
|
||||
layout = column(column(srcsel, multi_choice, xsel), p, pfft)
|
||||
layout = column(column(srcsel, multi_choice, xsel), p, pfft, fftslider)
|
||||
layout.sizing_mode = 'scale_both' # type: ignore
|
||||
|
||||
doc.add_root(layout)
|
||||
|
|
|
|||
Loading…
Reference in New Issue