From 0c40b6a9a6096cc7e68c49cb7de646f30c707b69 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Mon, 25 Mar 2024 16:00:46 -0500 Subject: [PATCH 1/2] feat(report): prototyping tables for visualization and rendering on sciviz --- webapps/sciviz/docker-compose.yaml | 2 +- webapps/sciviz/sciviz_specsheet.yaml | 317 +++++--------------- workflow/pipeline/report.py | 146 +++++++++ workflow/utils/plotting/__init__.py | 0 workflow/utils/plotting/photometry_plots.py | 90 ++++++ 5 files changed, 305 insertions(+), 250 deletions(-) create mode 100644 workflow/pipeline/report.py create mode 100644 workflow/utils/plotting/__init__.py create mode 100644 workflow/utils/plotting/photometry_plots.py diff --git a/webapps/sciviz/docker-compose.yaml b/webapps/sciviz/docker-compose.yaml index c23f3e3..110099b 100644 --- a/webapps/sciviz/docker-compose.yaml +++ b/webapps/sciviz/docker-compose.yaml @@ -1,4 +1,4 @@ -#MODE= PROD docker-compose up +#MODE=PROD docker-compose up # # Access using fakeservices.datajoint.io version: '2.4' diff --git a/webapps/sciviz/sciviz_specsheet.yaml b/webapps/sciviz/sciviz_specsheet.yaml index 5ca9ba5..35bfad6 100644 --- a/webapps/sciviz/sciviz_specsheet.yaml +++ b/webapps/sciviz/sciviz_specsheet.yaml @@ -22,105 +22,8 @@ SciViz: # top level tab image_route: ../../logo.svg text: | # Welcome to the data navigator for the SabatiniLab data pipeline - Workflow Monitor: - route: /workflow_monitor - grids: - grid1: - type: fixed - columns: 1 - row_height: 680 - components: - Worker Status: - route: /workflow_monitor_workerstatus - x: 0 - y: 0 - height: 1 - width: 1 - type: antd-table - restriction: > - def restriction(**kwargs): - return dict(**kwargs) - dj_query: > - def dj_query(sabatini_dj_workerlog): - cls = sabatini_dj_workerlog.WorkerLog - backtrack_minutes = 60 - recent = ( - cls.proj( - minute_elapsed="TIMESTAMPDIFF(MINUTE, process_timestamp, UTC_TIMESTAMP())" - ) - & f"minute_elapsed < {backtrack_minutes}" - ) - recent_jobs = dj.U("process").aggr( - cls & recent, - worker_count="count(DISTINCT pid)", - minutes_since_oldest="TIMESTAMPDIFF(MINUTE, MIN(process_timestamp), UTC_TIMESTAMP())", - minutes_since_newest="TIMESTAMPDIFF(MINUTE, MAX(process_timestamp), UTC_TIMESTAMP())", - ) - - return {'query': recent_jobs, 'fetch_args': {'order_by': 'minutes_since_newest ASC'}} - Error Log: - route: /sabatini_dj_errorlog - x: 0 - y: 1 - height: 1 - width: 1 - type: antd-table - restriction: > - def restriction(**kwargs): - return dict(**kwargs) - dj_query: > - def dj_query(sabatini_dj_workerlog): - cls = sabatini_dj_workerlog.ErrorLog.proj(..., '-error_timestamp', minutes_elapsed='TIMESTAMPDIFF(MINUTE, error_timestamp, UTC_TIMESTAMP())') - return {'query': cls, 'fetch_args': {'order_by': 'minutes_elapsed ASC'}} - Jobs Log: - route: /sabatini_dj_jobslog - x: 0 - y: 2 - height: 1 - width: 1 - type: antd-table - restriction: > - def restriction(**kwargs): - return dict(**kwargs) - dj_query: > - def dj_query(sabatini_dj_workerlog): - workerlog_vm = sabatini_dj_workerlog - org_name, wf_name = workerlog_vm.schema.database.replace('support_', '').split('_')[:2] - db_prefix = f'{org_name}_{wf_name}' - connection = dj.conn( - host=workerlog_vm.schema.connection.conn_info['host'], - user=workerlog_vm.schema.connection.conn_info['user'], - password=workerlog_vm.schema.connection.conn_info['passwd'], - reset=True) - schema_names = [s for s in dj.list_schemas(connection=connection) if (s.startswith(db_prefix) or - s.startswith(f'{org_name}_support_{wf_name}'))] - jobs_table = None - print(schema_names, flush=True) - for schema_name in schema_names: - vm = dj.VirtualModule(schema_name, schema_name, connection=connection) - jobs_query = dj.U(*vm.schema.jobs.heading.names) & vm.schema.jobs - if jobs_table is None: - jobs_table = jobs_query - else: - jobs_table += jobs_query - jobs_table = jobs_table.proj(..., minutes_elapsed='TIMESTAMPDIFF(MINUTE, timestamp, UTC_TIMESTAMP())') - return {'query': jobs_table, 'fetch_args': {'order_by': 'status DESC, minutes_elapsed ASC'}} - Worker Log: - route: /sabatini_dj_workerlog - x: 0 - y: 3 - height: 1 - width: 1 - type: antd-table - restriction: > - def restriction(**kwargs): - return dict(**kwargs) - dj_query: > - def dj_query(sabatini_dj_workerlog): - cls = sabatini_dj_workerlog.WorkerLog.proj(..., minutes_elapsed='TIMESTAMPDIFF(MINUTE, process_timestamp, UTC_TIMESTAMP())') - return {'query': cls, 'fetch_args': {'order_by': 'minutes_elapsed ASC'}} - Ephys Session Overview: - route: /session_overview_ephys + Session Overview: + route: /session_overview grids: grid1: type: fixed @@ -128,7 +31,8 @@ SciViz: # top level tab row_height: 680 components: Session Status: - route: /session_sessionstatus_ephys + route: /session_sessionstatus + link: /per_session_plots x: 0 y: 0 height: 1 @@ -138,183 +42,98 @@ SciViz: # top level tab def restriction(**kwargs): return dict(**kwargs) dj_query: > - def dj_query(sabatini_dj_session, sabatini_dj_ephys): + def dj_query(sabatini_dj_session, sabatini_dj_ephys, sabatini_dj_imaging, sabatini_dj_photometry, sabatini_dj_event, sabatini_dj_model): session = sabatini_dj_session ephys = sabatini_dj_ephys + imaging = sabatini_dj_imaging + photometry = sabatini_dj_photometry + event = sabatini_dj_event + model = sabatini_dj_model session_process_status = session.Session - - session_process_status *= session.Session.aggr(ephys.ProbeInsertion, - insertion='count(insertion_number)', keep_all_rows=True) - session_process_status *= session.Session.aggr(ephys.EphysRecording, - ephys_recording='count(insertion_number)', keep_all_rows=True) - session_process_status *= session.Session.aggr(ephys.LFP, - lfp='count(insertion_number)', keep_all_rows=True) - session_process_status *= session.Session.aggr(ephys.ClusteringTask, - clustering_task='count(insertion_number)', keep_all_rows=True) - session_process_status *= session.Session.aggr(ephys.Clustering, - clustering='count(insertion_number)', keep_all_rows=True) + session_process_status *= session.Session.aggr(event.BehaviorRecording, + behav='count(session_id)', keep_all_rows=True) session_process_status *= session.Session.aggr(ephys.CuratedClustering, - curated_clustering='count(insertion_number)', keep_all_rows=True) - session_process_status *= session.Session.aggr(ephys.QualityMetrics, - qc_metrics='count(insertion_number)', keep_all_rows=True) - session_process_status *= session.Session.aggr(ephys.WaveformSet, - waveform='count(insertion_number)', keep_all_rows=True) - - query = session_process_status.proj(..., all_done='insertion > 0 AND waveform = clustering_task') + ephys='count(insertion_number)', keep_all_rows=True) + session_process_status *= session.Session.aggr(imaging.Fluorescence, + ophys='count(scan_id)', keep_all_rows=True) + session_process_status *= session.Session.aggr(model.PoseEstimation, + pose='count(recording_id)', keep_all_rows=True) + session_process_status *= session.Session.aggr(photometry.FiberPhotometrySynced.SyncedTrace, + photometry='count(fiber_id)', keep_all_rows=True) + + query = session_process_status return {'query': query, 'fetch_args': {'order_by': 'session_datetime DESC'}} - Calcium Imaging Session Overview: - route: /session_overview_imaging + + PerSessionPlots: + hidden: true + route: /per_session_plots grids: - grid1: + fiber_photometry: type: fixed + route: /per_session_plots_fiber_photometry columns: 1 - row_height: 680 + row_height: 1500 components: - Session Status: - route: /session_sessionstatus_imaging + comp1: + route: /per_session_plots_fiber_photometry_comp1 x: 0 y: 0 - height: 1 - width: 1 - type: antd-table + height: 0.2 + width: 0.8 + type: file:image:attach restriction: > def restriction(**kwargs): return dict(**kwargs) dj_query: > - def dj_query(sabatini_dj_session, sabatini_dj_imaging, sabatini_dj_scan): - session = sabatini_dj_session - imaging = sabatini_dj_imaging - scan = sabatini_dj_scan - - session_process_status = session.Session - - session_process_status *= session.Session.aggr(scan.Scan, - scan='count(scan_id)', keep_all_rows=True) - session_process_status *= session.Session.aggr(scan.ScanInfo, - scan_info='count(scan_id)', keep_all_rows=True) - session_process_status *= session.Session.aggr(imaging.ProcessingTask, - processing_task='count(scan_id)', keep_all_rows=True) - session_process_status *= session.Session.aggr(imaging.Processing, - processing='count(scan_id)', keep_all_rows=True) - session_process_status *= session.Session.aggr(imaging.Segmentation, - segmentation='count(scan_id)', keep_all_rows=True) - session_process_status *= session.Session.aggr(imaging.Fluorescence, - fluorescence='count(scan_id)', keep_all_rows=True) - session_process_status *= session.Session.aggr(imaging.MaskClassification, - mask_classfication='count(scan_id)', keep_all_rows=True) - session_process_status *= session.Session.aggr(imaging.Activity, - activity='count(scan_id)', keep_all_rows=True) - session_process_status *= session.Session.aggr(imaging.MotionCorrection, - motion_correction='count(scan_id)', keep_all_rows=True) - - query = session_process_status.proj(..., all_done='scan > 0 AND activity = processing_task') - - return {'query': query, 'fetch_args': {'order_by': 'session_datetime DESC'}} - Photometry Session Overview: - route: /session_overview_photometry - grids: - grid1: - type: fixed - columns: 1 - row_height: 680 - components: - Session Status: - route: /session_sessionstatus_photometry + def dj_query(sabatini_dj_report): + report = sabatini_dj_report + return {'query': report.FiberPhotometryPlots(), 'fetch_args': ['demodulated_trace_plot']} + comp2: + route: /per_session_plots_fiber_photometry_comp2 x: 0 - y: 0 - height: 1 - width: 1 - type: antd-table + y: 0.2 + height: 0.5 + width: 0.8 + type: file:image:attach restriction: > def restriction(**kwargs): return dict(**kwargs) dj_query: > - def dj_query(sabatini_dj_session, sabatini_dj_photometry): - session = sabatini_dj_session - photometry = sabatini_dj_photometry - - session_process_status = session.Session - - session_process_status *= session.Session.aggr(photometry.FiberPhotometry, - fiber_photometry='count(session_id)', keep_all_rows=True) - session_process_status *= session.Session.aggr(photometry.FiberPhotometrySynced, - fiber_photometry_synced='count(session_id)', keep_all_rows=True) - - query = session_process_status.proj(..., all_done='fiber_photometry > 0 AND fiber_photometry_synced = fiber_photometry') - - return {'query': query, 'fetch_args': {'order_by': 'session_datetime DESC'}} - Behavior Session Overview: - route: /session_overview_behavior - grids: - grid1: - type: fixed + def dj_query(sabatini_dj_report): + report = sabatini_dj_report + return {'query': report.FiberPhotometryPlots(), 'fetch_args': ['event_aligned_plot']} + pose_estimation: + route: /per_session_plots_pose_estimation + type: dynamic columns: 1 - row_height: 680 - components: - Session Status: - route: /session_sessionstatus_behavior - x: 0 - y: 0 - height: 1 - width: 1 - type: antd-table + row_height: 1000 + restriction: > + def restriction(**kwargs): + return dict(**kwargs) + dj_query: > + def dj_query(sabatini_dj_report): + report = sabatini_dj_report + return {'query': report.PoseEstimationPlots.BodyPart.proj(), 'fetch_args': []} + component_templates: + comp1: + route: /per_session_plots_pose_estimation_comp1 + type: file:image:attach restriction: > def restriction(**kwargs): return dict(**kwargs) dj_query: > - def dj_query(sabatini_dj_session, sabatini_dj_event, sabatini_dj_ingestion): - session = sabatini_dj_session - event = sabatini_dj_event - ingestion = sabatini_dj_ingestion - - session_process_status = session.Session - - session_process_status *= session.Session.aggr(event.BehaviorRecording, - behavior_recording='count(session_id)', keep_all_rows=True) - session_process_status *= session.Session.aggr(event.Event, - event='count(session_id)', keep_all_rows=True) - session_process_status *= session.Session.aggr(ingestion.BehaviorIngestion, - ingestion='count(session_id)', keep_all_rows=True) - - query = session_process_status.proj(..., all_done='behavior_recording > 0 AND ingestion = behavior_recording') - - return {'query': query, 'fetch_args': {'order_by': 'session_datetime DESC'}} - DLC Session Overview: - route: /session_overview_dlc - grids: - grid1: - type: fixed - columns: 1 - row_height: 680 - components: - Session Status: - route: /session_sessionstatus_dlc - x: 0 - y: 0 - height: 1 - width: 1 - type: antd-table + def dj_query(sabatini_dj_report): + report = sabatini_dj_report + return {'query': report.PoseEstimationPlots.BodyPart(), 'fetch_args': ['bodypart_xy_plot']} + comp2: + route: /per_session_plots_pose_estimation_comp2 + type: file:image:attach restriction: > def restriction(**kwargs): return dict(**kwargs) dj_query: > - def dj_query(sabatini_dj_session, sabatini_dj_model): - session = sabatini_dj_session - model = sabatini_dj_model - - session_process_status = session.Session - - session_process_status *= session.Session.aggr(model.VideoRecording, - video_recording='count(recording_id)', keep_all_rows=True) - session_process_status *= session.Session.aggr(model.RecordingInfo, - recording_info='count(recording_id)', keep_all_rows=True) - session_process_status *= session.Session.aggr(model.PoseEstimationTask, - pose_estimation_task='count(recording_id)', keep_all_rows=True) - session_process_status *= session.Session.aggr(model.PoseEstimation, - pose_estimation='count(recording_id)', keep_all_rows=True) - - query = session_process_status.proj(..., all_done='video_recording > 0 AND pose_estimation = pose_estimation_task') - - return {'query': query, 'fetch_args': {'order_by': 'session_datetime DESC'}} + def dj_query(sabatini_dj_report): + report = sabatini_dj_report + return {'query': report.PoseEstimationPlots.BodyPart(), 'fetch_args': ['bodypart_time_plot']} \ No newline at end of file diff --git a/workflow/pipeline/report.py b/workflow/pipeline/report.py new file mode 100644 index 0000000..1c23ef9 --- /dev/null +++ b/workflow/pipeline/report.py @@ -0,0 +1,146 @@ +import datajoint as dj +import pandas as pd +import numpy as np +import seaborn as sns +import matplotlib.pyplot as plt + +from workflow import db_prefix +from workflow.pipeline import session, event, model, photometry + +import workflow.utils.photometry_preprocessing as pp +from workflow.utils.paths import get_processed_root_data_dir + + +schema = dj.schema(db_prefix + "report") + +report_figures_dir = get_processed_root_data_dir() / "report_figures" +report_figures_dir.mkdir(exist_ok=True, parents=True) + + +# Pose estimation plots + +@schema +class PoseEstimationPlots(dj.Computed): + definition = """ + -> model.PoseEstimation + """ + + class BodyPart(dj.Part): + definition = """ + -> master + body_part: varchar(64) + --- + bodypart_xy_plot: attach + bodypart_time_plot: attach + """ + + def make(self, key): + body_parts = (model.PoseEstimation.BodyPartPosition & key).fetch('body_part') + + pose_df = (model.PoseEstimation.BodyPartPosition & key).fetch(format='frame').reset_index() + pose_df = pose_df.explode(column=["frame_index", "x_pos", "y_pos", "likelihood"]) + + self.insert1(key) + + for body_part in body_parts: + body_part_df = pose_df[pose_df['body_part'] == body_part] + fig1, ax = plt.subplots(figsize=(12, 6)) + sns.scatterplot(body_part_df, x='x_pos', y='y_pos', hue='frame_index', style='body_part', alpha=0.3, ax=ax) + + fig2, axs = plt.subplots(2, 1, figsize=(12, 6)) + axs[0].plot(body_part_df['frame_index'], body_part_df['x_pos'], 'r', label='x_pos') + axs[1].plot(body_part_df['frame_index'], body_part_df['y_pos'], 'b', label='y_pos') + + saved_fig_paths = save_figs( + {"bodypart_xy_plot": fig1, "bodypart_time_plot": fig2}, + save_dir=get_session_figs_dir(key), + fig_prefix="-".join([str(v) for v in key.values()]) + "-" + body_part, + extension=".png", + ) + self.BodyPart.insert1({**key, "body_part": body_part, **saved_fig_paths}) + +# photometry plots + +@schema +class FiberPhotometryPlots(dj.Computed): + definition = """ + -> photometry.FiberPhotometrySynced + --- + demodulated_trace_plot: attach + event_aligned_plot: attach + photometry_analysis_summary: longblob + """ + + key_source = photometry.FiberPhotometrySynced & event.BehaviorRecording + + def make(self, key): + from workflow.utils.plotting.photometry_plots import plot_event_aligned_photometry + + # Demodulated trace plot + query = photometry.FiberPhotometry.DemodulatedTrace & key + traces = query.fetch("trace_name", "emission_color", "hemisphere", "trace", as_dict=True) + + i = 8 + inc_height = -1.5 + window_start = 1000 + window_stop = 3000 + n_colors = len(query) + fig0, ax = plt.subplots(figsize=(10, 3)) + sns.set_palette('deep', n_colors) + + for j, trace in enumerate(traces): + name = '_'.join([trace["trace_name"], trace["emission_color"], trace["hemisphere"]]) + ax.plot(pp.normalize(pd.DataFrame(trace["trace"]), window=500)[window_start:window_stop] + i, + label=name) + i += inc_height + ax.text(x=window_stop + 2, y=i - inc_height, s=name, fontsize=12, va="bottom", color=sns.color_palette()[j]) + + ax.set_title(f"{key}") + ax.set_xlabel("Time (s)") + ax.set_yticks([]) + sns.despine(left=True) + + # event-aligned plot + events_OI = ['lick', 'water'] + fig1, CI, RMS, avg_trace, SEM = plot_event_aligned_photometry(key, trace_name='photom', emission_color='green', + hemisphere='right', events_OI=events_OI) + analysis_summary = {'mean': avg_trace, 'RMS': RMS, 'SEM': SEM, 'aligned_events': events_OI} + + saved_fig_paths = save_figs( + {"demodulated_trace_plot": fig0, "event_aligned_plot": fig1}, + save_dir=get_session_figs_dir(key), + fig_prefix="-".join([str(v) for v in key.values()]), + extension=".png", + ) + + self.insert1({**key, **saved_fig_paths, "photometry_analysis_summary": analysis_summary}) + + +# ---- Helper functions ---- + +def save_figs( + fig_dict, save_dir, fig_prefix, extension=".png" +): + """ + Save figures in fig_dict to save_dir with the specified prefix and extension + Returns a dictionary of saved figure paths with the same keys as fig_dict + """ + save_dir.mkdir(exist_ok=True, parents=True) + saved_fig_paths = {} + for fig_name, fig in fig_dict.items(): + if fig: + fig_filepath = save_dir / (fig_prefix + "_" + fig_name + extension) + saved_fig_paths[fig_name] = fig_filepath.as_posix() + fig.tight_layout() + fig.savefig(fig_filepath) + plt.close(fig) + + return saved_fig_paths + + +def get_session_figs_dir(key): + """ + Get the directory to save figures for a given session key + """ + session_key = (session.Session & key).fetch1("KEY") + return report_figures_dir / "-".join([str(v) for v in session_key.values()]) diff --git a/workflow/utils/plotting/__init__.py b/workflow/utils/plotting/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/workflow/utils/plotting/photometry_plots.py b/workflow/utils/plotting/photometry_plots.py new file mode 100644 index 0000000..d7f9e51 --- /dev/null +++ b/workflow/utils/plotting/photometry_plots.py @@ -0,0 +1,90 @@ +import seaborn as sns +import matplotlib.pyplot as plt +import pandas as pd +import numpy as np +from scipy.stats import sem + +from workflow.pipeline import photometry, event + +def plot_event_aligned_photometry(session_key, *, trace_name, emission_color, hemisphere, events_OI): + restr = { + "trace_name": trace_name, + "emission_color": emission_color, + "hemisphere": hemisphere + } + + time_buffer = (20, 60) # before and after each event + + trace = (photometry.FiberPhotometrySynced.SyncedTrace & session_key & restr).fetch1("trace") + timestamps = np.array((photometry.FiberPhotometrySynced & session_key).fetch1("timestamps")) + + fig, axes = plt.subplots(1, len(events_OI), figsize=(23, 3)) + + RMS = [] + CI = [] + avg_trace = [] + SEM = [] + + for ind, (event_type, ax) in enumerate(zip(events_OI, axes)): + + event_traces = [] # Store traces for this event type + + desired_length = len(trace) + new_timestamps = np.linspace(timestamps[0], timestamps[-1], desired_length) + df = pd.DataFrame({"timestamps": new_timestamps, "photometry_trace": trace}) + + # Query the event_start_time for the respective event type + query = event.Event & session_key & f"event_type='{event_type}'" + event_ts = query.fetch("event_start_time") + + # Iterate over each event time + for ts in event_ts: + # Find the corresponding index in the trace for the event time + index = np.searchsorted(df["timestamps"], ts) + + # Define the time window around the event + window_start = index - int(time_buffer[0]) + window_end = index + int(time_buffer[1]) + 1 + + # Extract the peri-event window + peri_event_window = df.iloc[window_start:window_end] + + if len(peri_event_window["photometry_trace"]) == len(range(window_start, window_end)): + event_traces.append(peri_event_window["photometry_trace"].values) + + if event_traces: # Check if there are event traces + event_traces = np.array(event_traces) # trial x time + + # Compute the mean and standard error of the event traces + mean_trace = np.mean(event_traces, axis=0) + sem_trace = sem(event_traces, axis=0) + mean_trace_timestamps = np.arange(-time_buffer[0], time_buffer[1] + 1) + avg_trace.append(mean_trace) + SEM.append(sem_trace) + + # compute confidence interval + from scipy.stats import norm + confidence = 0.95 + alpha_2 = (1 - confidence) / 2 + critical_value = norm.ppf(1 - alpha_2) + + ci = [(mean_trace - (critical_value * sem_trace)), + (mean_trace + (critical_value * sem_trace))] + CI.append(ci) + + # compute RMS + rms = np.sqrt(mean_trace ** 2) + RMS.append(rms) + + # Plot the mean trace with standard error + ax.plot(mean_trace_timestamps, mean_trace, label=event_type, lw=2) + ax.fill_between(mean_trace_timestamps, mean_trace - sem_trace, mean_trace + sem_trace, alpha=0.3) + + ax.axvline(x=0, linewidth=0.5, ls='--') + if ind == 0: + ax.set_ylabel("Trace Name", fontsize=15) + ax.set(xlabel='Sample', title=event_type) + sns.despine() + + return fig, CI, RMS, avg_trace, SEM + From 587524b64c4d65e9255491db34d60bb01d41214b Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 26 Mar 2024 11:54:08 -0500 Subject: [PATCH 2/2] update readme for sciviz --- README.md | 16 ++++++++++++++++ webapps/sciviz/docker-compose.yaml | 1 - 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index a72c045..b804a31 100644 --- a/README.md +++ b/README.md @@ -56,3 +56,19 @@ Note that the ``Subject`` is the top level directory, and all other data types a For DLC related projects, the ``dlc_projects`` directory is expected to be in the Inbox directory *not* the subject directory. ``` + + +#### Testing the Data Viewer Locally + +1. After making the code changes locally, run the following command to start the application: + +``` +docker compose -f webapps/sciviz/docker-compose.yaml up -d +``` + +1. Access the application using the following URL in an incognito window: and log in with your DataJoint Works credentials. +1. When you have finished testing, please ensure to stop and remove the Docker container by running the following command: + +``` +docker compose -f webapps/sciviz/docker-compose.yaml down +``` \ No newline at end of file diff --git a/webapps/sciviz/docker-compose.yaml b/webapps/sciviz/docker-compose.yaml index 110099b..b5fcd7e 100644 --- a/webapps/sciviz/docker-compose.yaml +++ b/webapps/sciviz/docker-compose.yaml @@ -10,7 +10,6 @@ services: environment: PHARUS_SPEC_PATH: /main/specs/sciviz_specsheet.yaml # for dynamic api spec PHARUS_MODE: ${MODE} # DEV | PROD - env_file: .env volumes: - ./sciviz_specsheet.yaml:/main/specs/sciviz_specsheet.yaml:ro #copy the spec over to /main/specs/YOUR_SPEC_NAME command: