diff --git a/volumetric_imaging/anaconda-project-lock.yml b/volumetric_imaging/anaconda-project-lock.yml index 103a4e5d3..2bf1a4651 100644 --- a/volumetric_imaging/anaconda-project-lock.yml +++ b/volumetric_imaging/anaconda-project-lock.yml @@ -17,7 +17,7 @@ locking_enabled: true env_specs: default: locked: true - env_spec_hash: d394747b111fb937e4709cee44a9d55e485797b9 + env_spec_hash: cd29392052f232673b3eb6e507406068d3799ed2 platforms: - linux-64 - osx-64 @@ -25,57 +25,57 @@ env_specs: - win-64 packages: all: - - anyio=4.6.2.post1=pyhd8ed1ab_0 + - anyio=4.7.0=pyhd8ed1ab_0 - argon2-cffi=23.1.0=pyhd8ed1ab_1 - - arrow=1.3.0=pyhd8ed1ab_0 + - arrow=1.3.0=pyhd8ed1ab_1 - asttokens=3.0.0=pyhd8ed1ab_1 - - async-lru=2.0.4=pyhd8ed1ab_0 - - attrs=24.2.0=pyh71513ae_0 + - async-lru=2.0.4=pyhd8ed1ab_1 + - attrs=24.3.0=pyh71513ae_0 - babel=2.16.0=pyhd8ed1ab_1 - beautifulsoup4=4.12.3=pyha770c72_1 - bleach=6.2.0=pyhd8ed1ab_1 - - bokeh=3.6.2=pyhd8ed1ab_0 + - bokeh=3.6.2=pyhd8ed1ab_1 - cached-property=1.5.2=hd8ed1ab_1 - cached_property=1.5.2=pyha770c72_1 - - certifi=2024.8.30=pyhd8ed1ab_0 + - certifi=2024.12.14=pyhd8ed1ab_0 - charset-normalizer=3.4.0=pyhd8ed1ab_1 - colorama=0.4.6=pyhd8ed1ab_1 - - comm=0.2.2=pyhd8ed1ab_0 + - comm=0.2.2=pyhd8ed1ab_1 - decorator=5.1.1=pyhd8ed1ab_1 - defusedxml=0.7.1=pyhd8ed1ab_0 - entrypoints=0.4=pyhd8ed1ab_1 - exceptiongroup=1.2.2=pyhd8ed1ab_1 - - executing=2.1.0=pyhd8ed1ab_0 + - executing=2.1.0=pyhd8ed1ab_1 - fqdn=1.5.1=pyhd8ed1ab_1 - h11=0.14.0=pyhd8ed1ab_1 - h2=4.1.0=pyhd8ed1ab_1 - hpack=4.0.0=pyhd8ed1ab_1 - httpcore=1.0.7=pyh29332c3_1 - - httpx=0.28.0=pyhd8ed1ab_0 + - httpx=0.28.1=pyhd8ed1ab_0 - hyperframe=6.0.1=pyhd8ed1ab_1 - idna=3.10=pyhd8ed1ab_1 - importlib-metadata=8.5.0=pyha770c72_1 - importlib_resources=6.4.5=pyhd8ed1ab_1 - - isoduration=20.11.0=pyhd8ed1ab_0 + - isoduration=20.11.0=pyhd8ed1ab_1 - jedi=0.19.2=pyhd8ed1ab_1 - jinja2=3.1.4=pyhd8ed1ab_1 - json5=0.10.0=pyhd8ed1ab_1 - - jsonschema-specifications=2024.10.1=pyhd8ed1ab_0 - - jsonschema-with-format-nongpl=4.23.0=hd8ed1ab_0 - - jsonschema=4.23.0=pyhd8ed1ab_0 - - jupyter-lsp=2.2.5=pyhd8ed1ab_0 - - jupyter_client=8.6.3=pyhd8ed1ab_0 - - jupyter_events=0.10.0=pyhd8ed1ab_0 + - jsonschema-specifications=2024.10.1=pyhd8ed1ab_1 + - jsonschema-with-format-nongpl=4.23.0=hd8ed1ab_1 + - jsonschema=4.23.0=pyhd8ed1ab_1 + - jupyter-lsp=2.2.5=pyhd8ed1ab_1 + - jupyter_client=8.6.3=pyhd8ed1ab_1 + - jupyter_events=0.10.0=pyhd8ed1ab_1 - jupyter_server=2.14.2=pyhd8ed1ab_1 - jupyter_server_terminals=0.5.3=pyhd8ed1ab_1 - - jupyterlab=4.3.2=pyhd8ed1ab_0 + - jupyterlab=4.3.3=pyhd8ed1ab_0 - jupyterlab_pygments=0.3.0=pyhd8ed1ab_2 - - jupyterlab_server=2.27.3=pyhd8ed1ab_0 - - linkify-it-py=2.0.3=pyhd8ed1ab_0 + - jupyterlab_server=2.27.3=pyhd8ed1ab_1 + - linkify-it-py=2.0.3=pyhd8ed1ab_1 - markdown-it-py=3.0.0=pyhd8ed1ab_1 - markdown=3.6=pyhd8ed1ab_0 - matplotlib-inline=0.1.7=pyhd8ed1ab_1 - - mdit-py-plugins=0.4.2=pyhd8ed1ab_0 + - mdit-py-plugins=0.4.2=pyhd8ed1ab_1 - mdurl=0.1.2=pyhd8ed1ab_1 - mistune=3.0.2=pyhd8ed1ab_1 - nbclient=0.10.1=pyhd8ed1ab_0 @@ -88,15 +88,15 @@ env_specs: - packaging=24.2=pyhd8ed1ab_2 - pandocfilters=1.5.0=pyhd8ed1ab_0 - panel=1.5.4=pyhd8ed1ab_0 - - param=2.1.1=pyhff2d567_0 + - param=2.2.0=pyhd8ed1ab_0 - parso=0.8.4=pyhd8ed1ab_1 - pickleshare=0.7.5=pyhd8ed1ab_1004 - - pip=24.3.1=pyh8b19718_0 + - pip=24.3.1=pyh145f28c_1 - pkgutil-resolve-name=1.3.10=pyhd8ed1ab_2 - platformdirs=4.3.6=pyhd8ed1ab_1 - prometheus_client=0.21.1=pyhd8ed1ab_0 - prompt-toolkit=3.0.48=pyha770c72_1 - - pure_eval=0.2.3=pyhd8ed1ab_0 + - pure_eval=0.2.3=pyhd8ed1ab_1 - pycparser=2.22=pyh29332c3_1 - pygments=2.18.0=pyhd8ed1ab_1 - python-dateutil=2.9.0.post0=pyhff2d567_1 @@ -108,30 +108,29 @@ env_specs: - pyviz_comms=3.0.3=pyhd8ed1ab_0 - referencing=0.35.1=pyhd8ed1ab_1 - requests=2.32.3=pyhd8ed1ab_1 - - rfc3339-validator=0.1.4=pyhd8ed1ab_0 + - rfc3339-validator=0.1.4=pyhd8ed1ab_1 - rfc3986-validator=0.1.1=pyh9f0ad1d_0 - setuptools=75.6.0=pyhff2d567_1 - six=1.17.0=pyhd8ed1ab_0 - sniffio=1.3.1=pyhd8ed1ab_1 - soupsieve=2.5=pyhd8ed1ab_1 - - stack_data=0.6.2=pyhd8ed1ab_0 + - stack_data=0.6.3=pyhd8ed1ab_1 - tinycss2=1.4.0=pyhd8ed1ab_0 - tomli=2.2.1=pyhd8ed1ab_1 - tqdm=4.67.1=pyhd8ed1ab_0 - traitlets=5.14.3=pyhd8ed1ab_1 - - types-python-dateutil=2.9.0.20241003=pyhd8ed1ab_1 + - types-python-dateutil=2.9.0.20241206=pyhd8ed1ab_0 - typing-extensions=4.12.2=hd8ed1ab_1 - typing_extensions=4.12.2=pyha770c72_1 - typing_utils=0.1.0=pyhd8ed1ab_1 - tzdata=2024b=hc8b5060_0 - - uc-micro-py=1.0.3=pyhd8ed1ab_0 + - uc-micro-py=1.0.3=pyhd8ed1ab_1 - uri-template=1.3.0=pyhd8ed1ab_1 - urllib3=2.2.3=pyhd8ed1ab_1 - wcwidth=0.2.13=pyhd8ed1ab_1 - webcolors=24.11.1=pyhd8ed1ab_0 - webencodings=0.5.1=pyhd8ed1ab_3 - websocket-client=1.8.0=pyhd8ed1ab_1 - - wheel=0.45.1=pyhd8ed1ab_1 - xyzservices=2024.9.0=pyhd8ed1ab_1 - zipp=3.21.0=pyhd8ed1ab_1 unix: @@ -151,10 +150,10 @@ env_specs: - argon2-cffi-bindings=21.2.0=py311h9ecbd09_5 - brotli-python=1.1.0=py311hfdbb021_2 - bzip2=1.0.8=h4bc722e_7 - - ca-certificates=2024.8.30=hbcca054_0 + - ca-certificates=2024.12.14=hbcca054_0 - cffi=1.17.1=py311hf29c0ef_0 - contourpy=1.3.1=py311hd18a35c_0 - - debugpy=1.8.9=py311hfdbb021_0 + - debugpy=1.8.11=py311hfdbb021_0 - freetype=2.12.1=h267a509_2 - ipykernel=6.29.5=pyh3099207_0 - jsonpointer=3.0.0=py311h38be061_1 @@ -163,9 +162,9 @@ env_specs: - lcms2=2.16=hb7c19ff_0 - ld_impl_linux-64=2.43=h712a8e2_2 - lerc=4.0.0=h27087fc_0 - - libblas=3.9.0=25_linux64_openblas - - libcblas=3.9.0=25_linux64_openblas - - libdeflate=1.22=hb9d3cd8_0 + - libblas=3.9.0=26_linux64_openblas + - libcblas=3.9.0=26_linux64_openblas + - libdeflate=1.23=h4ddbbb0_0 - libedit=3.1.20191231=he28a2e2_2 - libexpat=2.6.4=h5888daf_0 - libffi=3.4.2=h7f98852_5 @@ -175,17 +174,16 @@ env_specs: - libgfortran=14.2.0=h69a702a_1 - libgomp=14.2.0=h77fa898_1 - libjpeg-turbo=3.0.0=hd590300_1 - - liblapack=3.9.0=25_linux64_openblas - - liblzma-devel=5.6.3=hb9d3cd8_1 + - liblapack=3.9.0=26_linux64_openblas - liblzma=5.6.3=hb9d3cd8_1 - libnsl=2.0.1=hd590300_0 - libopenblas=0.3.28=pthreads_h94d23a6_1 - libpng=1.6.44=hadc24fc_0 - libsodium=1.0.20=h4ab18f5_0 - - libsqlite=3.47.0=hadc24fc_1 + - libsqlite=3.47.2=hee588c1_0 - libstdcxx-ng=14.2.0=h4852527_1 - libstdcxx=14.2.0=hc0a3c3a_1 - - libtiff=4.7.0=he137b08_1 + - libtiff=4.7.0=hd9ff511_3 - libuuid=2.38.1=h0b41bf4_0 - libwebp-base=1.4.0=hd590300_0 - libxcb=1.17.0=h8a09558_0 @@ -193,8 +191,8 @@ env_specs: - libzlib=1.3.1=hb9d3cd8_2 - markupsafe=3.0.2=py311h2dc5d0c_1 - ncurses=6.5=he02047a_1 - - numpy=2.1.3=py311h71ddf71_0 - - openjpeg=2.5.2=h488ebb8_0 + - numpy=2.2.0=py311hf916aec_0 + - openjpeg=2.5.3=h5fbd93e_0 - openssl=3.4.0=hb9d3cd8_0 - pandas=2.2.3=py311h7db5c69_1 - pillow=11.0.0=py311h49e9ac3_0 @@ -209,11 +207,8 @@ env_specs: - terminado=0.18.1=pyh0d859eb_0 - tk=8.6.13=noxft_h4845f30_101 - tornado=6.4.2=py311h9ecbd09_0 - - xorg-libxau=1.0.11=hb9d3cd8_1 + - xorg-libxau=1.0.12=hb9d3cd8_0 - xorg-libxdmcp=1.1.5=hb9d3cd8_0 - - xz-gpl-tools=5.6.3=hbcc6ac9_1 - - xz-tools=5.6.3=hb9d3cd8_1 - - xz=5.6.3=hbcc6ac9_1 - yaml=0.2.5=h7f98852_2 - zeromq=4.3.5=h3b0a872_7 - zstandard=0.23.0=py311hbc35293_1 @@ -222,41 +217,40 @@ env_specs: - argon2-cffi-bindings=21.2.0=py311h3336109_5 - brotli-python=1.1.0=py311hd89902b_2 - bzip2=1.0.8=hfdf4475_7 - - ca-certificates=2024.8.30=h8857fd0_0 + - ca-certificates=2024.12.14=h8857fd0_0 - cffi=1.17.1=py311h137bacd_0 - contourpy=1.3.1=py311h4e34fa0_0 - - debugpy=1.8.9=py311hc356e98_0 + - debugpy=1.8.11=py311hc356e98_0 - freetype=2.12.1=h60636b9_2 - jsonpointer=3.0.0=py311h6eed73b_1 - krb5=1.21.3=h37d8d59_0 - lcms2=2.16=ha2f27b4_0 - lerc=4.0.0=hb486fe8_0 - - libblas=3.9.0=25_osx64_openblas - - libcblas=3.9.0=25_osx64_openblas + - libblas=3.9.0=26_osx64_openblas + - libcblas=3.9.0=26_osx64_openblas - libcxx=19.1.5=hf95d169_0 - - libdeflate=1.22=h00291cd_0 + - libdeflate=1.23=he65b83e_0 - libedit=3.1.20191231=h0678c8f_2 - libexpat=2.6.4=h240833e_0 - libffi=3.4.2=h0d85af4_5 - libgfortran5=13.2.0=h2873a65_3 - libgfortran=5.0.0=13_2_0_h97931a8_3 - libjpeg-turbo=3.0.0=h0dc2134_1 - - liblapack=3.9.0=25_osx64_openblas - - liblzma-devel=5.6.3=hd471939_1 + - liblapack=3.9.0=26_osx64_openblas - liblzma=5.6.3=hd471939_1 - libopenblas=0.3.28=openmp_hbf64a52_1 - libpng=1.6.44=h4b8f8c9_0 - libsodium=1.0.20=hfdf4475_0 - - libsqlite=3.47.0=h2f8c449_1 - - libtiff=4.7.0=h583c2ba_1 + - libsqlite=3.47.2=hdb6dae5_0 + - libtiff=4.7.0=hb77a491_3 - libwebp-base=1.4.0=h10d778d_0 - libxcb=1.17.0=hf1f96e2_0 - libzlib=1.3.1=hd23fc13_2 - llvm-openmp=19.1.5=ha54dae1_0 - markupsafe=3.0.2=py311ha3cf9ac_1 - ncurses=6.5=hf036a51_1 - - numpy=2.1.3=py311h14ed71f_0 - - openjpeg=2.5.2=h7310d3a_0 + - numpy=2.2.0=py311h4632d39_0 + - openjpeg=2.5.3=h7fd6d84_0 - openssl=3.4.0=hd471939_0 - pandas=2.2.3=py311haeb46be_1 - pillow=11.0.0=py311h1f68098_0 @@ -271,11 +265,8 @@ env_specs: - rpds-py=0.22.3=py311h3b9c2be_0 - tk=8.6.13=h1abcd95_1 - tornado=6.4.2=py311h4d7f069_0 - - xorg-libxau=1.0.11=h00291cd_1 + - xorg-libxau=1.0.12=h6e16a3a_0 - xorg-libxdmcp=1.1.5=h00291cd_0 - - xz-gpl-tools=5.6.3=h357f2ed_1 - - xz-tools=5.6.3=hd471939_1 - - xz=5.6.3=h357f2ed_1 - yaml=0.2.5=h0d85af4_2 - zeromq=4.3.5=h7130eaa_7 - zstandard=0.23.0=py311hdf6fcd6_1 @@ -284,41 +275,40 @@ env_specs: - argon2-cffi-bindings=21.2.0=py311h460d6c5_5 - brotli-python=1.1.0=py311h3f08180_2 - bzip2=1.0.8=h99b78c6_7 - - ca-certificates=2024.8.30=hf0a4a13_0 + - ca-certificates=2024.12.14=hf0a4a13_0 - cffi=1.17.1=py311h3a79f62_0 - contourpy=1.3.1=py311h210dab8_0 - - debugpy=1.8.9=py311h155a34a_0 + - debugpy=1.8.11=py311h155a34a_0 - freetype=2.12.1=hadb7bae_2 - jsonpointer=3.0.0=py311h267d04e_1 - krb5=1.21.3=h237132a_0 - lcms2=2.16=ha0e7c42_0 - lerc=4.0.0=h9a09cb3_0 - - libblas=3.9.0=25_osxarm64_openblas - - libcblas=3.9.0=25_osxarm64_openblas + - libblas=3.9.0=26_osxarm64_openblas + - libcblas=3.9.0=26_osxarm64_openblas - libcxx=19.1.5=ha82da77_0 - - libdeflate=1.22=hd74edd7_0 + - libdeflate=1.23=hec38601_0 - libedit=3.1.20191231=hc8eb9b7_2 - libexpat=2.6.4=h286801f_0 - libffi=3.4.2=h3422bc3_5 - libgfortran5=13.2.0=hf226fd6_3 - libgfortran=5.0.0=13_2_0_hd922786_3 - libjpeg-turbo=3.0.0=hb547adb_1 - - liblapack=3.9.0=25_osxarm64_openblas - - liblzma-devel=5.6.3=h39f12f2_1 + - liblapack=3.9.0=26_osxarm64_openblas - liblzma=5.6.3=h39f12f2_1 - libopenblas=0.3.28=openmp_hf332438_1 - libpng=1.6.44=hc14010f_0 - libsodium=1.0.20=h99b78c6_0 - - libsqlite=3.47.0=hbaaea75_1 - - libtiff=4.7.0=hfce79cd_1 + - libsqlite=3.47.2=h3f77e49_0 + - libtiff=4.7.0=h551f018_3 - libwebp-base=1.4.0=h93a5062_0 - libxcb=1.17.0=hdb1d25a_0 - libzlib=1.3.1=h8359307_2 - llvm-openmp=19.1.5=hdb05f8b_0 - markupsafe=3.0.2=py311h4921393_1 - ncurses=6.5=h7bae524_1 - - numpy=2.1.3=py311h649a571_0 - - openjpeg=2.5.2=h9f1df11_0 + - numpy=2.2.0=py311h4b914e2_0 + - openjpeg=2.5.3=h8a3d83b_0 - openssl=3.4.0=h39f12f2_0 - pandas=2.2.3=py311h9cb3ce9_1 - pillow=11.0.0=py311h3894ae9_0 @@ -333,11 +323,8 @@ env_specs: - rpds-py=0.22.3=py311h3ff9189_0 - tk=8.6.13=h5083fa2_1 - tornado=6.4.2=py311h917b07b_0 - - xorg-libxau=1.0.11=hd74edd7_1 + - xorg-libxau=1.0.12=h5505292_0 - xorg-libxdmcp=1.1.5=hd74edd7_0 - - xz-gpl-tools=5.6.3=h9a6d368_1 - - xz-tools=5.6.3=h39f12f2_1 - - xz=5.6.3=h9a6d368_1 - yaml=0.2.5=h3422bc3_2 - zeromq=4.3.5=hc1bb282_7 - zstandard=0.23.0=py311ha60cc69_1 @@ -347,11 +334,11 @@ env_specs: - argon2-cffi-bindings=21.2.0=py311he736701_5 - brotli-python=1.1.0=py311hda3d55a_2 - bzip2=1.0.8=h2466b09_7 - - ca-certificates=2024.8.30=h56e8100_0 + - ca-certificates=2024.12.14=h56e8100_0 - cffi=1.17.1=py311he736701_0 - contourpy=1.3.1=py311h3257749_0 - cpython=3.11.11=py311hd8ed1ab_1 - - debugpy=1.8.9=py311hda3d55a_0 + - debugpy=1.8.11=py311hda3d55a_0 - freetype=2.12.1=hdaf720e_2 - intel-openmp=2024.2.1=h57928b3_1083 - ipykernel=6.29.5=pyh4bbf305_0 @@ -361,9 +348,9 @@ env_specs: - krb5=1.21.3=hdf4eb48_0 - lcms2=2.16=h67d730c_0 - lerc=4.0.0=h63175ca_0 - - libblas=3.9.0=25_win64_mkl - - libcblas=3.9.0=25_win64_mkl - - libdeflate=1.22=h2466b09_0 + - libblas=3.9.0=26_win64_mkl + - libcblas=3.9.0=26_win64_mkl + - libdeflate=1.23=h9062f6e_0 - libexpat=2.6.4=he0c23c2_0 - libffi=3.4.2=h8ffe710_5 - libgcc=14.2.0=h1383e82_1 @@ -371,22 +358,21 @@ env_specs: - libhwloc=2.11.2=default_ha69328c_1001 - libiconv=1.17=hcfcfb64_2 - libjpeg-turbo=3.0.0=hcfcfb64_1 - - liblapack=3.9.0=25_win64_mkl - - liblzma-devel=5.6.3=h2466b09_1 + - liblapack=3.9.0=26_win64_mkl - liblzma=5.6.3=h2466b09_1 - libpng=1.6.44=h3ca93ac_0 - libsodium=1.0.20=hc70643c_0 - - libsqlite=3.47.0=h2466b09_1 - - libtiff=4.7.0=hfc51747_1 + - libsqlite=3.47.2=h67fdade_0 + - libtiff=4.7.0=h797046b_3 - libwebp-base=1.4.0=hcfcfb64_0 - libwinpthread=12.0.0.r4.gg4f2fc60ca=h57928b3_8 - libxcb=1.17.0=h0e4246c_0 - - libxml2=2.13.5=h442d1da_0 + - libxml2=2.13.5=he286e8c_1 - libzlib=1.3.1=h2466b09_2 - markupsafe=3.0.2=py311h5082efb_1 - - mkl=2024.2.2=h66d3029_14 - - numpy=2.1.3=py311h35ffc71_0 - - openjpeg=2.5.2=h3d672ee_0 + - mkl=2024.2.2=h66d3029_15 + - numpy=2.2.0=py311hc213d13_0 + - openjpeg=2.5.3=h4d64b90_0 - openssl=3.4.0=h2466b09_0 - pandas=2.2.3=py311hcf9f919_1 - pillow=11.0.0=py311h4fbf6a9_0 @@ -410,10 +396,8 @@ env_specs: - vs2015_runtime=14.42.34433=hdffcdeb_23 - win_inet_pton=1.1.0=pyh7428d3b_8 - winpty=0.4.3=4 - - xorg-libxau=1.0.11=h0e40799_1 + - xorg-libxau=1.0.12=h0e40799_0 - xorg-libxdmcp=1.1.5=h0e40799_0 - - xz-tools=5.6.3=h2466b09_1 - - xz=5.6.3=h208afaa_1 - yaml=0.2.5=h8ffe710_2 - zeromq=4.3.5=ha9f60a1_7 - zstandard=0.23.0=py311h53056dc_1 @@ -423,10 +407,11 @@ env_specs: - cachetools==5.5.0 - fasteners==0.19 - google-apitools==0.5.32 - - google-auth==2.36.0 + - google-auth==2.37.0 - httplib2==0.22.0 - neuroglancer==2.40.1 - oauth2client==4.1.3 + - panel-neuroglancer==0.1.0 - pyasn1==0.6.1 - pyasn1-modules==0.4.1 - pyparsing==3.2.0 diff --git a/volumetric_imaging/anaconda-project.yml b/volumetric_imaging/anaconda-project.yml index f1574d23c..9962cc435 100644 --- a/volumetric_imaging/anaconda-project.yml +++ b/volumetric_imaging/anaconda-project.yml @@ -30,6 +30,7 @@ packages: &pkgs - pip - pip: - neuroglancer>=2.40.1 # auto min pinned 2024-12-06 + - panel-neuroglancer>=0.1.0 dependencies: *pkgs diff --git a/volumetric_imaging/assets/CustomNeuroglancer.png b/volumetric_imaging/assets/CustomNeuroglancer.png new file mode 100644 index 000000000..8ef90dade Binary files /dev/null and b/volumetric_imaging/assets/CustomNeuroglancer.png differ diff --git a/volumetric_imaging/assets/approach1.png b/volumetric_imaging/assets/approach1.png index 89699226c..671c2a421 100644 Binary files a/volumetric_imaging/assets/approach1.png and b/volumetric_imaging/assets/approach1.png differ diff --git a/volumetric_imaging/assets/approach2.png b/volumetric_imaging/assets/approach2.png index 48a8764be..e50f77b0d 100644 Binary files a/volumetric_imaging/assets/approach2.png and b/volumetric_imaging/assets/approach2.png differ diff --git a/volumetric_imaging/volumetric_imaging.ipynb b/volumetric_imaging/volumetric_imaging.ipynb index bb48f6b74..e693bf5c4 100644 --- a/volumetric_imaging/volumetric_imaging.ipynb +++ b/volumetric_imaging/volumetric_imaging.ipynb @@ -59,37 +59,41 @@ "source": [ "## Overview\n", "\n", - "**Volumetric imaging** refers to techniques that capture data in three dimensions, allowing researchers to visualize and analyze the internal structures of objects, including depth and spatial relationships. Unlike traditional 2D imaging, volumetric imaging provides a more comprehensive view of specimens.\n", + "**Volumetric imaging** refers to techniques that capture or reconstruct data in three dimensions, enabling researchers to study the internal structures and spatial relationships within a sample. For instance, in neuroscience, volumetric imaging is used to reconstruct the intricate spatial arrangements of neurons and synapses in the brain. This ability to explore spatial depth and organization makes it an essential tool in fields like biology, neuroscience, and materials science.\n", "\n", "One of the most powerful volumetric imaging techniques is **electron microscopy** ([EM](https://en.wikipedia.org/wiki/Electron_microscope)). EM uses a beam of electrons to create high-resolution images at the nanometer scale, enabling the exploration of fine structural details of biological specimens such as cells, tissues, and molecular structures. However, handling and visualizing the massive datasets generated by EM poses significant challenges due to their size and complexity.\n", "\n", - "### Introducing Neuroglancer\n", + "### Introducing **Neuroglancer**\n", "\n", - "[Neuroglancer](https://github.com/google/neuroglancer) by Google is a WebGL-based viewer designed specifically for volumetric data, offering efficient handling of large-scale datasets through data streaming. Its key features include:\n", + "[Neuroglancer](https://github.com/google/neuroglancer), developed by Google, is a browser-based 3D viewer tailored for volumetric datasets. Some key features include:\n", "\n", - "- **Interactive Visualization:** Smooth, real-time navigation through volumetric data.\n", - "- **Customizable Layers:** Support for raw images, segmented regions, and annotations.\n", - "- **Web-Based Interface:** Accessible directly from browsers without the need for specialized software.\n", + "- **Interactive Visualization:** Smoothly navigate large, multi-dimensional volumes.\n", + "- **Customizable Layers:** Overlay raw images, segmented regions, and annotations as separate layers.\n", + "- **Web-Based Interface:** Open a Neuroglancer session directly in your browser—no special software needed.\n", "\n", - "Originally developed for neuroscience, Neuroglancer empowers researchers to explore complex 3D structures by tracing neural pathways, identifying cellular components, and annotating regions of interest.\n", + "Neuroglancer was originally created for neuroscientists studying complex neural circuits. However, its capabilities are widely relevant to anyone dealing with large volumetric data.\n", "\n", - "### Integrating Neuroglancer with Jupyter Notebooks\n", + "### Integrating Neuroglancer with **Jupyter Notebooks**\n", "\n", - "While Neuroglancer is a powerful tool for exploring large volumes, it is typically used as a standalone application. Researchers often utilize **Jupyter Notebooks** to conduct reproducible research and combine code, data analysis, and visualizations.\n", + "While Neuroglancer is a powerful tool for exploring large volumes, it is typically used as a standalone application. Researchers often utilize **Jupyter Notebooks** to conduct reproducible research and combine code, data analysis, and visualizations. Embedding Neuroglancer into a Jupyter Notebook provides several important benefits:\n", "\n", - "By integrating Neuroglancer within Jupyter Notebooks using [HoloViz Panel](https://panel.holoviz.org/), researchers can:\n", + "- **Single-Environment Workflow:** Code, analysis, and interactive visualization live together in the same environment.\n", + "- **Reproducibility:** You can share notebooks that not only contain the analysis steps but also embedded views of the dataset.\n", + "- **Collaboration:** Colleagues can open the same notebook and interact with the volumetric data directly.\n", "\n", - "- **Consolidate Workflow:** Keep code, data analysis, and visualization in a single environment.\n", - "- **Enhance Reproducibility:** Share notebooks that include both computational steps and interactive visualizations.\n", - "- **Facilitate Collaboration:** Allow collaborators to interact with the same data and visualizations within the notebook.\n", + "### Using **HoloViz Panel** for the Integration\n", "\n", - "### Using HoloViz for the Integration\n", + "Panel provides a flexible way to embed web-based tools. With it, you can:\n", "\n", - "In this workflow, we will demonstrate how to embed Neuroglancer within a Panel application, highlighting how HoloViz tools can seamlessly extend third-party applications. By using Panel, we can:\n", + "- Put the Neuroglancer viewer right inside a notebook cell.\n", + "- Add widgets for controlling Neuroglancer parameters (layers, position, segmentation).\n", + "- Integrate Neuroglancer views with other visualizations, annotations, or computational results side-by-side.\n", "\n", - "- Embed the Neuroglancer viewer directly within a notebook cell.\n", - "- Create interactive widgets and controls to manipulate and report the state of the viewer.\n", - "- Combine Neuroglancer views alongside other visualizations.\n", + "### Quick-Start with the **Panel-Neuroglancer** Package\n", + "\n", + "To simplify the process, the class that integrates Neuroglancer with Panel is available as an installable Python package called [Panel-Neuroglancer](https://github.com/panel-extensions/panel-neuroglancer).\n", + "\n", + "By installing and using this package, you can avoid manually defining the class code. Simply import the package and start visualizing your data. We'll demonstrate the out-of-the-box usage first, followed by instructions on how to customize or rebuild the integration if needed.\n", "\n", "---\n", "\n", @@ -97,7 +101,8 @@ "\n", "| Topic | Type | Notes |\n", "| --- | --- | --- |\n", - "| [Panel Documentation](https://panel.holoviz.org/) | Prerequisite | Familiarity with Panel for building interactive apps |\n", + "| [Panel-Neuroglancer Repo](https://panel.holoviz.org/) | Prerequisite | Awareness of installation and latest development |\n", + "| [Neuroglancer Repo](https://github.com/google/neuroglancer) | Prerequisite | Neuroglancer web app guidance |\n", "\n", "---" ] @@ -133,13 +138,132 @@ }, "outputs": [], "source": [ - "import param\n", - "import panel as pn\n", "import neuroglancer\n", + "import panel as pn\n", + "import param\n", + "from neuroglancer.viewer import Viewer\n", + "from panel.custom import PyComponent\n", + "from panel_neuroglancer import Neuroglancer\n", "\n", "pn.extension()" ] }, + { + "cell_type": "markdown", + "id": "fc64a149-d29f-4662-85da-22782dd9defc", + "metadata": {}, + "source": [ + "## Using Panel-Neuroglancer\n", + "Once installed, using Panel-Neuroglancer is straightforward. You can launch a Neuroglancer viewer in two main ways:\n", + "\n", + "**Approach 1: Load Viewer State from a URL** \n", + "Perfect if a collaborator sends you a Neuroglancer link that already has data and layers configured. Just paste that URL into the widget and load it directly.\n", + "\n", + "**Approach 2: Start from a Pre-Configured Viewer**\n", + "Ideal if you want programmatic control. You create a `neuroglancer.Viewer`, set its layers and parameters with Python code, and then display it using the `panel-neuroglancer.Neuroglancer` class.\n", + "\n", + "### Approach 1: Launch Viewer from a URL\n", + "\n", + "You can either:\n", + "\n", + "- Pass a valid [Neuroglancer URL]([Neuroglancer repository](https://github.com/google/neuroglancer#examples) to `Neuroglancer(source=)`.\n", + "- Launch a blank viewer using `Neuroglancer()`, paste the URL into the text input, and hit `Load`.\n", + "\n", + "Optionally, Panel-Neuroglancer includes a predefined `demo` URL loader button (or `load_demo=True` when instantiating) to quickly load an example dataset and state. You can find example URLs in the [Neuroglancer repository](https://github.com/google/neuroglancer#examples)." + ] + }, + { + "cell_type": "markdown", + "id": "be4c05fb-c167-4992-9304-acb2b809292a", + "metadata": {}, + "source": [ + "\"panel-neuroglancer\"\n", + "\n", + "**Here's a static snapshot of what the next cell produces. 👉**\n", + "\n", + "
\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "43e94acb-a658-4544-8bc3-eb4302982851", + "metadata": {}, + "outputs": [], + "source": [ + "Neuroglancer(show_state=True, load_demo=True)" + ] + }, + { + "cell_type": "markdown", + "id": "1cd8112c-dcdd-4fbb-a931-7916a0792f4e", + "metadata": {}, + "source": [ + "## Approach 2: Displaying a Pre-Configured Viewer\n", + "\n", + "Alternatively, you can create a `neuroglancer.viewer.Viewer` object, specify layers and other settings, and then provide this as the `source` parameter. This allows you to set up the viewer programmatically and then embed it within the context of the notebook.\n", + "\n", + "Below:\n", + "- We create a Neuroglancer `Viewer` instance\n", + "- Within a transaction (`viewer.txn()`), we add layers to the viewer:\n", + " - An image layer from a precomputed data source\n", + " - A segmentation layer\n", + "- We then pass this configured viewer to our `Neuroglancer` class\n", + "- The viewer is embedded within the Panel app and displayed in the notebook\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "195dc09b-7fbd-4940-a451-a6247b0216de", + "metadata": {}, + "outputs": [], + "source": [ + "viewer = neuroglancer.Viewer()\n", + "\n", + "with viewer.txn() as s:\n", + " # Add an image layer from a precomputed data source\n", + " s.layers[\"image\"] = neuroglancer.ImageLayer(\n", + " source=\"precomputed://gs://neuroglancer-janelia-flyem-hemibrain/emdata/clahe_yz/jpeg\",\n", + " )\n", + " # Add a segmentation layer\n", + " s.layers[\"segmentation\"] = neuroglancer.SegmentationLayer(\n", + " source=\"precomputed://gs://neuroglancer-janelia-flyem-hemibrain/v1.1/segmentation\",\n", + " )" + ] + }, + { + "cell_type": "markdown", + "id": "251278f8-4634-49b1-a2ba-cb95fb7b8728", + "metadata": {}, + "source": [ + "\"Panel\n", + "\n", + "**Here's a static snapshot of what the next cell produces. 👉**\n", + "\n", + "
\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d19a63a4-a19f-435b-a691-07e3f82a9bc1", + "metadata": {}, + "outputs": [], + "source": [ + "Neuroglancer(source=viewer, show_state=True)" + ] + }, { "cell_type": "markdown", "id": "64a50727-8c91-4d88-a82d-626e5a7715ba", @@ -151,9 +275,13 @@ } }, "source": [ - "## Defining the `NeuroglancerNB` Class\n", + "## Customizing the `Neuroglancer` Class\n", + "\n", + "The Panel-Neuroglancer package will likely meet most of your needs out of the box. However, if you'd like to customize or extend the class—or simply understand how it works—you can easily reconstruct it directly in the notebook. Customization lets you fine-tune the interface, add specific features, or streamline the user experience for your workflows.\n", + "\n", + "Our class below will create a Panel object that includes the Neuroglancer viewer embedded within an iframe, along with controls to load a Neuroglancer state from a URL, display the current state JSON, and generate shareable links.\n", "\n", - "To embed Neuroglancer within a Panel application, we'll define a custom class `NeuroglancerNB`. This class creates a Panel viewable object that includes the Neuroglancer viewer embedded within an iframe, along with controls to load a Neuroglancer state from a URL, display the current state JSON, and generate shareable links." + "As an example of customization, imagine receiving a `NEW_URL` from a colleague and deciding to simplify the interface by reducing the number of UI elements. By commenting out a few lines of code, you can remove the `Demo` loading feature and hide the URL `Load` widget, leaving only the JSON state panel, shareable state URL dropdown, and the viewer." ] }, { @@ -163,174 +291,175 @@ "metadata": {}, "outputs": [], "source": [ - "class NeuroglancerNB(pn.viewable.Viewer):\n", + "class CustomNeuroglancer(PyComponent):\n", " \"\"\"\n", - " A HoloViz Panel app for visualizing and interacting with Neuroglancer viewers\n", - " within a Jupyter Notebook.\n", - " \"\"\"\n", - " \n", - " source = param.Parameter(default=None, doc=\"\"\"\n", - " Source for the initial state of the viewer, which can be a URL string \n", - " or an existing neuroglancer.viewer.Viewer instance.\n", - " \"\"\")\n", - " height = param.Number(default=600, doc=\"height of the viewer.\")\n", - " width = param.Number(default=800, doc=\"width of the viewer.\")\n", - " show_state = param.Boolean(default=False, doc=\"Show the viewer state for debugging.\")\n", - " load_demo = param.Boolean(default=False, doc=\"Load the demo dataset on initialization.\")\n", - " \n", - " DEMO_URL = \"https://neuroglancer-demo.appspot.com/#!%7B%22dimensions%22%3A%7B%22x%22%3A%5B6.000000000000001e-9%2C%22m%22%5D%2C%22y%22%3A%5B6.000000000000001e-9%2C%22m%22%5D%2C%22z%22%3A%5B3.0000000000000004e-8%2C%22m%22%5D%7D%2C%22position%22%3A%5B5029.42333984375%2C6217.5849609375%2C1182.5%5D%2C%22crossSectionScale%22%3A3.7621853549999242%2C%22projectionOrientation%22%3A%5B-0.05179581791162491%2C-0.8017329573631287%2C0.0831851214170456%2C-0.5895944833755493%5D%2C%22projectionScale%22%3A4699.372698097029%2C%22layers%22%3A%5B%7B%22type%22%3A%22image%22%2C%22source%22%3A%22precomputed%3A%2F%2Fgs%3A%2F%2Fneuroglancer-public-data%2Fkasthuri2011%2Fimage%22%2C%22tab%22%3A%22source%22%2C%22name%22%3A%22original-image%22%7D%2C%7B%22type%22%3A%22image%22%2C%22source%22%3A%22precomputed%3A%2F%2Fgs%3A%2F%2Fneuroglancer-public-data%2Fkasthuri2011%2Fimage_color_corrected%22%2C%22tab%22%3A%22source%22%2C%22name%22%3A%22corrected-image%22%7D%2C%7B%22type%22%3A%22segmentation%22%2C%22source%22%3A%22precomputed%3A%2F%2Fgs%3A%2F%2Fneuroglancer-public-data%2Fkasthuri2011%2Fground_truth%22%2C%22tab%22%3A%22source%22%2C%22selectedAlpha%22%3A0.63%2C%22notSelectedAlpha%22%3A0.14%2C%22segments%22%3A%5B%223208%22%2C%224901%22%2C%2213%22%2C%224965%22%2C%224651%22%2C%222282%22%2C%223189%22%2C%223758%22%2C%2215%22%2C%224027%22%2C%223228%22%2C%22444%22%2C%223207%22%2C%223224%22%2C%223710%22%5D%2C%22name%22%3A%22ground_truth%22%7D%5D%2C%22layout%22%3A%224panel%22%7D\"\n", - "\n", - " def __init__(self, **params):\n", - " super().__init__(**params)\n", + " CustomNeuroglancer is a Panel component for visualizing and interacting with a Neuroglancer Viewer\n", + " (without the demo loading stuff).\n", "\n", - " self.source_not_provided = (self.source is None)\n", - " self.viewer = (self.source if isinstance(self.source, neuroglancer.viewer.Viewer)\n", - " else neuroglancer.Viewer())\n", + " The component can be initialized from a URL or a `neuroglancer.viewer.Viewer` instance.\n", + " \"\"\"\n", "\n", - " # Setup UI\n", - " self.url_input = pn.widgets.TextInput(\n", + " show_state = param.Boolean(default=False, doc=\"\"\"\n", + " Provides a collapsible card widget under the viewer that displays the viewer's state.\"\"\")\n", + "\n", + " url = param.String(default=None, doc=\"\"\"\n", + " The URL public URL of the Neuroglancer Viewer.\"\"\")\n", + "\n", + " def __init__(\n", + " self,\n", + " source=None,\n", + " # load_demo=False,\n", + " **params,\n", + " ):\n", + " viewer = None\n", + " if isinstance(source, str):\n", + " params['url'] = source\n", + " elif isinstance(source, Viewer):\n", + " params['url'] = source.get_viewer_url()\n", + " viewer = source\n", + " elif source is not None:\n", + " raise ValueError('Neuroglancer source must be a URL or neuroglancer Viewer instance')\n", + " self._source_not_provided = source is None\n", + " super().__init__(**params)\n", + " if viewer is None:\n", + " viewer = neuroglancer.Viewer()\n", + " self.viewer = viewer\n", + "\n", + " self._setup_ui_components()\n", + " self._configure_viewer()\n", + " self._setup_callbacks()\n", + "\n", + " if isinstance(source, str):\n", + " self._load_state_from_url(source)\n", + " # if load_demo:\n", + " # self._load_demo()\n", + "\n", + " def _setup_ui_components(self):\n", + " self.url_input = pn.widgets.TextInput.from_param(\n", + " self.param.url,\n", " placeholder=\"Enter a Neuroglancer URL and click Load\",\n", " name=\"Input URL\",\n", - " width=self.width,\n", + " width=700,\n", + " )\n", + "\n", + " self.load_button = pn.widgets.Button(\n", + " name=\"Load\", button_type=\"primary\", width=75\n", " )\n", - " self.load_button = pn.widgets.Button(name=\"Load\", button_type=\"primary\", width=75)\n", - " self.demo_button = pn.widgets.Button(name=\"Demo\", button_type=\"warning\", width=75)\n", - " self.json_pane = pn.pane.JSON({}, theme=\"light\", depth=2, name=\"Viewer State\", \n", - " sizing_mode='stretch_both')\n", + " # self.demo_button = pn.widgets.Button(\n", + " # name=\"Demo\", button_type=\"warning\", width=75\n", + " # )\n", + "\n", + " self.json_pane = pn.pane.JSON(\n", + " {}, theme=\"light\", depth=2, name=\"Viewer State\", sizing_mode='stretch_both'\n", + " )\n", + "\n", " self.shareable_url_pane = pn.pane.Markdown(\"**Shareable URL:**\")\n", " self.local_url_pane = pn.pane.Markdown(\"**Local URL:**\")\n", "\n", " self.iframe = pn.pane.HTML(\n", - " height=self.height,\n", - " width=self.width,\n", + " sizing_mode=\"stretch_both\",\n", + " aspect_ratio=self.param.aspect_ratio,\n", + " margin=0,\n", + " min_height=800,\n", " styles={\"resize\": \"both\", \"overflow\": \"hidden\"},\n", " )\n", "\n", - " # Configure viewer and UI\n", - " local_url = self.viewer.get_viewer_url()\n", - " self.local_url_pane.object = self._generate_dropdown_markup(\"Local URL\", local_url)\n", - " iframe_style = ('frameborder=\"0\" scrolling=\"no\" marginheight=\"0\" marginwidth=\"0\" '\n", - " 'style=\"width:100%; height:100%; min-width:500px; min-height:500px;\"')\n", - " self.iframe.object = f''\n", + " def _configure_viewer(self):\n", + " self._update_local_url()\n", + " self._update_iframe_with_local_url()\n", "\n", - " # UI Actions\n", - " self.load_button.on_click(self._on_load_button_clicked)\n", - " self.demo_button.on_click(self._on_demo_button_clicked)\n", + " def _setup_callbacks(self):\n", + " self.load_button.on_click(self._load_url)\n", + " # self.demo_button.on_click(self._load_demo)\n", " self.viewer.shared_state.add_changed_callback(self._on_viewer_state_changed)\n", "\n", - " # If a source URL was provided, initialize the viewer from it\n", - " if self.source and not isinstance(self.source, neuroglancer.viewer.Viewer):\n", - " self._initialize_viewer_from_url(self.source)\n", - "\n", - " # Load the demo if requested\n", - " if self.load_demo:\n", - " self.demo_button.clicks += 1\n", + " # def _load_demo(self, event=None):\n", + " # self.url = DEMO_URL\n", + " # self._load_state_from_url(self.url)\n", "\n", - " def _initialize_viewer_from_url(self, source: str):\n", - " self.url_input.value = source\n", - " self._load_state_from_url(source)\n", - "\n", - " def _on_demo_button_clicked(self, event):\n", - " self.url_input.value = self.DEMO_URL\n", - " self._load_state_from_url(self.url_input.value)\n", - "\n", - " def _on_load_button_clicked(self, event):\n", - " self._load_state_from_url(self.url_input.value)\n", + " def _load_url(self, event=None):\n", + " self._load_state_from_url(self.url)\n", "\n", " def _load_state_from_url(self, url):\n", " try:\n", - " new_state = neuroglancer.parse_url(url)\n", + " new_state = self._parse_state_from_url(url)\n", " self.viewer.set_state(new_state)\n", " except Exception as e:\n", - " print(f\"Error loading Neuroglancer state: {e}\")\n", + " print(f\"Error loading Neuroglancer state: {e}\") # noqa\n", + "\n", + " def _parse_state_from_url(self, url):\n", + " return neuroglancer.parse_url(url)\n", "\n", " def _on_viewer_state_changed(self):\n", - " # Update shareable URL and JSON pane\n", - " shareable_url = neuroglancer.to_url(self.viewer.state)\n", - " self.shareable_url_pane.object = self._generate_dropdown_markup(\"Shareable URL\", shareable_url)\n", + " self._update_shareable_url()\n", + " self._update_json_pane()\n", + "\n", + " def _update_shareable_url(self):\n", + " self.url = shareable_url = neuroglancer.to_url(self.viewer.state)\n", + " self.shareable_url_pane.object = self._generate_dropdown_markup(\n", + " \"Shareable URL\", shareable_url\n", + " )\n", + "\n", + " def _update_local_url(self):\n", + " self.local_url_pane.object = self._generate_dropdown_markup(\n", + " \"Local URL\", self.viewer.get_viewer_url()\n", + " )\n", + "\n", + " def _update_iframe_with_local_url(self):\n", + " iframe_style = (\n", + " 'frameborder=\"0\" scrolling=\"no\" marginheight=\"0\" marginwidth=\"0\" '\n", + " 'style=\"width:100%; height:100%; min-width:500px; min-height:500px;\"'\n", + " )\n", + " self.iframe.object = (\n", + " f''\n", + " )\n", + "\n", + " def _update_json_pane(self):\n", " self.json_pane.object = self.viewer.state.to_json()\n", "\n", " def _generate_dropdown_markup(self, title, url):\n", " return f\"\"\"\n", - "
\n", - " {title}:\n", - " {url}\n", - "
\n", + "
\n", + " {title}:\n", + " {url}\n", + "
\n", " \"\"\"\n", "\n", " def __panel__(self):\n", - " controls_layout = pn.Column(\n", - " pn.Row(self.demo_button, self.load_button),\n", - " pn.Row(self.url_input),\n", - " visible=self.source_not_provided,\n", + " controls = pn.Column(\n", + " # pn.Row(self.demo_button, self.load_button, visible=self._source_not_provided),\n", + " # pn.Row(self.url_input, visible=self._source_not_provided),\n", + " # self.local_url_pane,\n", + " self.shareable_url_pane\n", " )\n", - " links_layout = pn.Column(self.local_url_pane, self.shareable_url_pane)\n", - "\n", " state_widget = pn.Card(\n", " self.json_pane,\n", " title=\"State\",\n", " collapsed=False,\n", - " visible=self.show_state,\n", + " visible=self.param.show_state,\n", " styles={\"background\": \"WhiteSmoke\"},\n", " max_width=350\n", " )\n", - "\n", " return pn.Column(\n", - " controls_layout,\n", - " links_layout,\n", - " pn.Row(state_widget, self.iframe)\n", + " controls,\n", + " pn.Row(state_widget, self.iframe),\n", " )" ] }, { "cell_type": "markdown", - "id": "4dac819c-c3e9-4a51-be83-bf54b28f62b2", - "metadata": { - "panel-layout": { - "height": 50.850006103515625, - "visible": true, - "width": 100 - } - }, - "source": [ - "## Approach 1: Launching a New Viewer and Loading State from URL\n" - ] - }, - { - "cell_type": "markdown", - "id": "58f1de1e-d307-4e6a-8f49-ec7f4a04cc24", - "metadata": { - "panel-layout": { - "height": 110.35000610351562, - "visible": true, - "width": 100 - } - }, + "id": "d864fe66-15b1-43c4-bef9-d295b356e4ba", + "metadata": {}, "source": [ - "In this workflow, we'll initialize a new Neuroglancer viewer and load a dataset using a parameterized URL. This allows us to explore different datasets by simply changing the URL.\n", + "### How This Custom Class Works\n", "\n", - "Either use `NeuroglancerNB(source=)` or just run `NeuroglancerNB()` and input the URL in the GUI:\n", + "Here is a summary of the key aspect of this custom Panel component:\n", "\n", - "To use a demo URL, either `load_demo=True` or click the **Demo** button. You can find other demo links on the [Neuroglancer repository](https://github.com/google/neuroglancer#examples).\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "615b234d-e8f3-4600-826d-c1db1d820bbf", - "metadata": { - "panel-layout": { - "height": 763.5833129882812, - "visible": true, - "width": 100 - }, - "tags": [ - "remove-output" - ] - }, - "outputs": [], - "source": [ - "NeuroglancerNB(show_state=True, load_demo=True)" + "- **Constructing the Panel Layout:** The class inherits from PyComponent, a mechanism that lets us assemble multiple Panel components into a unified widget. The `__panel__` method defines the final layout: a card showing the viewer state (if requested) next to an iframe embedding the Neuroglancer viewer.\n", + "\n", + "- **Viewer State Synchronization:** When the Neuroglancer viewer’s state changes, the callback (`_on_viewer_state_changed`) updates the JSON pane and shareable URL, maintaining alignment between the viewer and the notebook widgets. The `shared_state` object in the viewer enables this synchronization by alerting Panel to any changes within Neuroglancer.\n", + "\n", + "- **URL Parsing and Generation:** Neuroglancer states can be encoded in URLs. By calling `neuroglancer.parse_url()`, we load an existing state from a shareable link. Conversely, `neuroglancer.to_url()` generates a new URL from the current state, making it easy to share your exact view with others." ] }, { @@ -345,111 +474,25 @@ }, "source": [ "\"Panel\n", "\n", - "**Here's a static snapshot of what the previous cell produces. 👉**\n", + "**Here's a static snapshot of what the next cell produces. 👉**\n", "\n", "
\n" ] }, - { - "cell_type": "markdown", - "id": "afa26a39-e22f-4ae8-880e-559350e1bfc9", - "metadata": { - "panel-layout": { - "height": 50.850006103515625, - "visible": true, - "width": 100 - } - }, - "source": [ - "## Approach 2: Displaying a Pre-specified Viewer" - ] - }, - { - "cell_type": "markdown", - "id": "2717e7f4-f098-4ba8-87a5-1e673496d740", - "metadata": { - "panel-layout": { - "height": 185, - "visible": true, - "width": 100 - } - }, - "source": [ - "Alternatively, you can provide a pre-configured `neuroglancer.viewer.Viewer` object as the `source` to display that viewer in the notebook. This allows you to set up the viewer programmatically and then embed it.\n", - "\n", - "- We create a Neuroglancer `Viewer` instance.\n", - "- Within a transaction (`viewer.txn()`), we add layers to the viewer:\n", - " - An image layer from a precomputed data source.\n", - " - A segmentation layer.\n", - "- We then pass this configured viewer to our `NeuroglancerNB` class.\n", - "- The viewer is embedded within the Panel app and displayed in the notebook.\n", - "\n" - ] - }, { "cell_type": "code", "execution_count": null, - "id": "1d178d1c-7f5b-4be2-b000-26157cb28416", + "id": "b3db73dd-f7e3-4896-b76a-a6743042713c", "metadata": {}, "outputs": [], "source": [ - "viewer = neuroglancer.Viewer()\n", - "\n", - "with viewer.txn() as s:\n", - " # Add an image layer from a precomputed data source\n", - " s.layers[\"image\"] = neuroglancer.ImageLayer(\n", - " source=\"precomputed://gs://neuroglancer-janelia-flyem-hemibrain/emdata/clahe_yz/jpeg\",\n", - " )\n", - " # Add a segmentation layer\n", - " s.layers[\"segmentation\"] = neuroglancer.SegmentationLayer(\n", - " source=\"precomputed://gs://neuroglancer-janelia-flyem-hemibrain/v1.1/segmentation\",\n", - " )" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "26d8d75a-4512-4d92-8c96-a5f974fc72c7", - "metadata": { - "panel-layout": { - "height": 664.2999877929688, - "visible": true, - "width": 100 - }, - "tags": [ - "remove-output" - ] - }, - "outputs": [], - "source": [ - "NeuroglancerNB(source=viewer, show_state=True)" - ] - }, - { - "cell_type": "markdown", - "id": "94b0e8a2-4fa3-44b0-b7a4-8b43f9ad081f", - "metadata": { - "panel-layout": { - "height": 68.29998779296875, - "visible": false, - "width": 100 - } - }, - "source": [ - "\"Panel\n", - "\n", - "**Here's a static snapshot of what the previous cell produces. 👉**\n", - "\n", - "
\n" + "NEW_URL = \"https://fafb-dot-neuroglancer-demo.appspot.com/#!%7B%22dimensions%22:%7B%22x%22:%5B4e-9%2C%22m%22%5D%2C%22y%22:%5B4e-9%2C%22m%22%5D%2C%22z%22:%5B4e-8%2C%22m%22%5D%7D%2C%22position%22:%5B109421.8984375%2C41044.6796875%2C5417%5D%2C%22crossSectionScale%22:2.1875%2C%22projectionOrientation%22:%5B-0.08939177542924881%2C-0.9848012924194336%2C-0.07470247149467468%2C0.12882165610790253%5D%2C%22projectionScale%22:27773.019357116023%2C%22layers%22:%5B%7B%22type%22:%22image%22%2C%22source%22:%22precomputed://gs://neuroglancer-fafb-data/fafb_v14/fafb_v14_orig%22%2C%22tab%22:%22source%22%2C%22name%22:%22fafb_v14%22%2C%22visible%22:false%7D%2C%7B%22type%22:%22image%22%2C%22source%22:%22precomputed://gs://neuroglancer-fafb-data/fafb_v14/fafb_v14_clahe%22%2C%22tab%22:%22source%22%2C%22name%22:%22fafb_v14_clahe%22%7D%2C%7B%22type%22:%22segmentation%22%2C%22source%22:%22precomputed://gs://fafb-ffn1-20190805/segmentation%22%2C%22tab%22:%22source%22%2C%22segments%22:%5B%22710435991%22%5D%2C%22name%22:%22fafb-ffn1-20190805%22%7D%2C%7B%22type%22:%22annotation%22%2C%22source%22:%22precomputed://gs://neuroglancer-20191211_fafbv14_buhmann2019_li20190805%22%2C%22tab%22:%22rendering%22%2C%22annotationColor%22:%22#cecd11%22%2C%22shader%22:%22#uicontrol%20vec3%20preColor%20color%28default=%5C%22blue%5C%22%29%5Cn#uicontrol%20vec3%20postColor%20color%28default=%5C%22red%5C%22%29%5Cn#uicontrol%20float%20scorethr%20slider%28min=0%2C%20max=1000%29%5Cn#uicontrol%20int%20showautapse%20slider%28min=0%2C%20max=1%29%5Cn%5Cnvoid%20main%28%29%20%7B%5Cn%20%20setColor%28defaultColor%28%29%29%3B%5Cn%20%20setEndpointMarkerColor%28%5Cn%20%20%20%20vec4%28preColor%2C%200.5%29%2C%5Cn%20%20%20%20vec4%28postColor%2C%200.5%29%29%3B%5Cn%20%20setEndpointMarkerSize%285.0%2C%205.0%29%3B%5Cn%20%20setLineWidth%282.0%29%3B%5Cn%20%20if%20%28int%28prop_autapse%28%29%29%20%3E%20showautapse%29%20discard%3B%5Cn%20%20if%20%28prop_score%28%29%3Cscorethr%29%20discard%3B%5Cn%7D%5Cn%5Cn%22%2C%22shaderControls%22:%7B%22scorethr%22:80%7D%2C%22linkedSegmentationLayer%22:%7B%22pre_segment%22:%22fafb-ffn1-20190805%22%2C%22post_segment%22:%22fafb-ffn1-20190805%22%7D%2C%22filterBySegmentation%22:%5B%22post_segment%22%2C%22pre_segment%22%5D%2C%22name%22:%22synapses_buhmann2019%22%7D%2C%7B%22type%22:%22image%22%2C%22source%22:%22n5://gs://fafb-v14-synaptic-clefts-heinrich-et-al-2018-n5/synapses_dt_reblocked%22%2C%22tab%22:%22source%22%2C%22opacity%22:0.73%2C%22shader%22:%22void%20main%28%29%20%7BemitRGBA%28vec4%280.0%2C0.0%2C1.0%2CtoNormalized%28getDataValue%28%29%29%29%29%3B%7D%22%2C%22name%22:%22clefts_Heinrich_etal%22%2C%22visible%22:false%7D%2C%7B%22type%22:%22segmentation%22%2C%22source%22:%22precomputed://gs://neuroglancer-fafb-data/elmr-data/FAFBNP.surf/mesh#type=mesh%22%2C%22tab%22:%22source%22%2C%22segments%22:%5B%221%22%2C%2210%22%2C%2211%22%2C%2212%22%2C%2213%22%2C%2214%22%2C%2215%22%2C%2216%22%2C%2217%22%2C%2218%22%2C%2219%22%2C%222%22%2C%2220%22%2C%2221%22%2C%2222%22%2C%2223%22%2C%2224%22%2C%2225%22%2C%2226%22%2C%2227%22%2C%2228%22%2C%2229%22%2C%223%22%2C%2230%22%2C%2231%22%2C%2232%22%2C%2233%22%2C%2234%22%2C%2235%22%2C%2236%22%2C%2237%22%2C%2238%22%2C%2239%22%2C%224%22%2C%2240%22%2C%2241%22%2C%2242%22%2C%2243%22%2C%2244%22%2C%2245%22%2C%2246%22%2C%2247%22%2C%2248%22%2C%2249%22%2C%225%22%2C%2250%22%2C%2251%22%2C%2252%22%2C%2253%22%2C%2254%22%2C%2255%22%2C%2256%22%2C%2257%22%2C%2258%22%2C%2259%22%2C%226%22%2C%2260%22%2C%2261%22%2C%2262%22%2C%2263%22%2C%2264%22%2C%2265%22%2C%2266%22%2C%2267%22%2C%2268%22%2C%2269%22%2C%227%22%2C%2270%22%2C%2271%22%2C%2272%22%2C%2273%22%2C%2274%22%2C%2275%22%2C%228%22%2C%229%22%5D%2C%22name%22:%22neuropil-regions-surface%22%2C%22visible%22:false%7D%2C%7B%22type%22:%22mesh%22%2C%22source%22:%22vtk://https://storage.googleapis.com/neuroglancer-fafb-data/elmr-data/FAFB.surf.vtk.gz%22%2C%22tab%22:%22source%22%2C%22shader%22:%22void%20main%28%29%20%7BemitRGBA%28vec4%281.0%2C%200.0%2C%200.0%2C%200.5%29%29%3B%7D%22%2C%22name%22:%22neuropil-full-surface%22%2C%22visible%22:false%7D%2C%7B%22type%22:%22segmentation%22%2C%22source%22:%5B%7B%22url%22:%22precomputed://gs://fafb-ffn1-20190805/segmentation%22%2C%22subsources%22:%7B%22default%22:true%2C%22bounds%22:true%7D%2C%22enableDefaultSubsources%22:false%7D%2C%22precomputed://gs://fafb-ffn1-20190805/segmentation/skeletons_32nm%22%5D%2C%22tab%22:%22source%22%2C%22selectedAlpha%22:0%2C%22segments%22:%5B%224613663523%22%5D%2C%22name%22:%22skeletons_32nm%22%2C%22visible%22:false%7D%2C%7B%22type%22:%22segmentation%22%2C%22source%22:%22precomputed://gs://fafb-ffn1/fafb-public-skeletons%22%2C%22tab%22:%22source%22%2C%22segments%22:%5B%5D%2C%22name%22:%22public_skeletons%22%2C%22visible%22:false%7D%5D%2C%22showAxisLines%22:false%2C%22showSlices%22:false%2C%22layout%22:%22xy-3d%22%7D\"\n", + "CustomNeuroglancer(source=NEW_URL, show_state=True)" ] }, { @@ -475,11 +518,17 @@ "\n", "| Resource | Description |\n", "| --- | --- |\n", - "| [Neuroglancer GitHub Repository](https://github.com/google/neuroglancer) | Neuroglancer source code and documentation |\n", - "| [Neuroglancer Python Integration](https://github.com/google/neuroglancer/tree/master/python) | Python interface for controlling Neuroglancer |\n", - "\n", - "---\n" + "| [Panel Tutorials](https://panel.holoviz.org/) | Familiarity with Panel for building interactive apps |\n", + "| [Neuroglancer Python Integration](https://github.com/google/neuroglancer/tree/master/python) | Python interface for controlling Neuroglancer |\n" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f63b5019-7814-46f0-b968-c6eb28b9c40a", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": {