Replies: 4 comments 9 replies
-
You'll need to do the reading of the file async: https://github.com/zauberzeug/nicegui/wiki/FAQs#why-is-my-long-running-function-blocking-ui-updates |
Beta Was this translation helpful? Give feedback.
-
Thanks for providing the example to generate such large csv files (~38 mb) @LimaVsC. With it I could build a small demo which does not use the upload at all but still freezes: from nicegui import run, ui
import pandas as pd
from pathlib import Path
from io import StringIO
import time
def read_csv():
with StringIO(Path("trace.csv").read_bytes().decode("utf-8")) as f:
return pd.read_csv(f, engine="python", sep=None)
async def load():
status = ui.notification("reading csv")
df = await run.cpu_bound(read_csv)
print(df)
status.message = "creating ui.table"
ui.table.from_pandas(df)
status.dismiss()
ui.timer(1, load, once=True)
alive_indicator = ui.label()
ui.timer(0.1, lambda: alive_indicator.set_text(f"time: {time.monotonic():.1f}s"))
ui.run() We need to make |
Beta Was this translation helpful? Give feedback.
-
The upload works fine as long as the reading is done in a background process: from nicegui import run, ui
import pandas as pd
from io import StringIO
def create_dataframe(content):
with StringIO(content.decode("utf-8")) as f:
return pd.read_csv(f, engine="python", sep=None)
async def load(e):
status = ui.notification("creating dataframe")
df = await run.cpu_bound(create_dataframe, e.content.read())
status.dismiss()
print(df)
ui.upload(on_upload=load, auto_upload=True)
ui.run() I'll update the title accordingly. |
Beta Was this translation helpful? Give feedback.
-
Many other large dataframes seem to work fine. So for a full reproduction I currently have this (not so minimal) code: import asyncio
import time
from io import StringIO
import numpy as np
import pandas as pd
import pymc as pm
from nicegui import app, run, ui
def create_df():
np.random.seed(42)
true_mu = 5.0
true_sigma = 2.0
n_samples = 500
normal = np.random.normal(loc=true_mu, scale=true_sigma, size=n_samples)
noise = np.random.normal(0, 0.5, size=n_samples)
data = normal + noise
with pm.Model():
mu = pm.Normal("mu", mu=0, sigma=10)
sigma = pm.HalfNormal("sigma", sigma=10)
y_obs = pm.Normal("y_obs", mu=mu, sigma=sigma, observed=data)
trace = pm.sample(chains=4, idata_kwargs={"log_likelihood": True})
# NOTE: convert to csv and back to df to make it serializable for ui.table
df = trace.to_dataframe()
buffer = StringIO()
df.to_csv(buffer, index=False)
buffer.seek(0)
return pd.read_csv(buffer)
async def load():
status = ui.notification("creating data")
df = await run.cpu_bound(create_df)
print(df)
status.message = "creating ui.table"
ui.table.from_pandas(df)
status.dismiss()
ui.timer(1, load, once=True)
alive_indicator = ui.label()
ui.timer(0.1, lambda: alive_indicator.set_text(f"time: {time.monotonic():.1f}s"))
def startup():
loop = asyncio.get_running_loop()
loop.set_debug(True)
loop.slow_callback_duration = 0.05
app.on_startup(startup)
ui.run() |
Beta Was this translation helpful? Give feedback.
-
Question
When uploading a large CSV file (4000 rows x 521 columns, in my case), the app freezes, and the
on_upload
function is not executed.Here’s an example that reproduces the issue:
Beta Was this translation helpful? Give feedback.
All reactions