From e7da83d906942e2b44f21e87269f65e8370abdfd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=ADma=2C=20Jan?= Date: Thu, 2 Nov 2023 22:31:15 +0100 Subject: [PATCH] Sync changes, add workflows --- .github/workflows/CompatHelper.yml | 45 + .github/workflows/Documenter.yml | 40 + .github/workflows/Formatter.yml | 36 + .github/workflows/TagBot.yml | 33 + .github/workflows/Tests.yml | 35 + Project.toml | 4 +- docs/make.jl | 2 +- docs/src/api.md | 10 +- docs/src/assets/ceed_dark.svg | 3 +- docs/src/assets/ceed_light.svg | 3 +- docs/src/index.md | 4 +- docs/src/tutorials/GenerativeDesigns.jl | 107 ++- docs/src/tutorials/GenerativeDesigns.md | 120 ++- docs/src/tutorials/StaticDesigns.jl | 2 +- docs/src/tutorials/StaticDesigns.md | 2 +- docs/src/tutorials/data/glioma_grading.csv | 858 ++++++++++++++++++ readme.md | 3 +- src/CEED.jl | 1 - src/GenerativeDesigns/EfficientValueMDP.jl | 234 +++++ src/GenerativeDesigns/GenerativeDesigns.jl | 332 +------ .../UncertaintyReductionMDP.jl | 364 ++++++++ src/GenerativeDesigns/distancebased.jl | 38 +- src/StaticDesigns/StaticDesigns.jl | 61 +- src/StaticDesigns/arrangements.jl | 8 +- src/fronts.jl | 11 +- test/GenerativeDesigns/test.jl | 71 +- tutorials/GenerativeDesigns.jl | 108 ++- tutorials/GliomaGrading/GliomaGrading.jl | 526 +++++++++++ tutorials/GliomaGrading/GliomaGrading_anim.jl | 468 ++++++++++ .../GliomaGrading/GliomaGrading_left_cells.jl | 43 + tutorials/GliomaGrading/accuracy.png | Bin 0 -> 157057 bytes tutorials/GliomaGrading/anim.gif | Bin 0 -> 173431 bytes tutorials/GliomaGrading/architecture.png | Bin 0 -> 233900 bytes tutorials/GliomaGrading/effect_sizes.png | Bin 0 -> 56214 bytes tutorials/GliomaGrading/glioma_slide.jpeg | Bin 0 -> 178054 bytes tutorials/GliomaGrading/mutations.png | Bin 0 -> 140284 bytes tutorials/Project.toml | 3 + tutorials/StaticDesigns.jl | 2 +- tutorials/anim_fps05.gif | Bin 0 -> 173431 bytes tutorials/data/glioma_grading.csv | 858 ++++++++++++++++++ 40 files changed, 3958 insertions(+), 477 deletions(-) create mode 100644 .github/workflows/CompatHelper.yml create mode 100644 .github/workflows/Documenter.yml create mode 100644 .github/workflows/Formatter.yml create mode 100644 .github/workflows/TagBot.yml create mode 100644 .github/workflows/Tests.yml create mode 100644 docs/src/tutorials/data/glioma_grading.csv create mode 100644 src/GenerativeDesigns/EfficientValueMDP.jl create mode 100644 src/GenerativeDesigns/UncertaintyReductionMDP.jl create mode 100644 tutorials/GliomaGrading/GliomaGrading.jl create mode 100644 tutorials/GliomaGrading/GliomaGrading_anim.jl create mode 100644 tutorials/GliomaGrading/GliomaGrading_left_cells.jl create mode 100644 tutorials/GliomaGrading/accuracy.png create mode 100644 tutorials/GliomaGrading/anim.gif create mode 100644 tutorials/GliomaGrading/architecture.png create mode 100644 tutorials/GliomaGrading/effect_sizes.png create mode 100644 tutorials/GliomaGrading/glioma_slide.jpeg create mode 100644 tutorials/GliomaGrading/mutations.png create mode 100644 tutorials/anim_fps05.gif create mode 100644 tutorials/data/glioma_grading.csv diff --git a/.github/workflows/CompatHelper.yml b/.github/workflows/CompatHelper.yml new file mode 100644 index 0000000..c7f7a64 --- /dev/null +++ b/.github/workflows/CompatHelper.yml @@ -0,0 +1,45 @@ +name: CompatHelper +on: + schedule: + - cron: 0 0 * * * + workflow_dispatch: +permissions: + contents: write + pull-requests: write +jobs: + CompatHelper: + runs-on: ubuntu-latest + steps: + - name: Check if Julia is already available in the PATH + id: julia_in_path + run: which julia + continue-on-error: true + - name: Install Julia, but only if it is not already available in the PATH + uses: julia-actions/setup-julia@v1 + with: + version: '1.9' + arch: ${{ runner.arch }} + if: steps.julia_in_path.outcome != 'success' + - name: "Add the General registry via Git" + run: | + import Pkg + ENV["JULIA_PKG_SERVER"] = "" + Pkg.Registry.add("General") + shell: julia --color=yes {0} + - name: "Install CompatHelper" + run: | + import Pkg + name = "CompatHelper" + uuid = "aa819f21-2bde-4658-8897-bab36330d9b7" + version = "3" + Pkg.add(; name, uuid, version) + shell: julia --color=yes {0} + - name: "Run CompatHelper" + run: | + import CompatHelper + CompatHelper.main() + shell: julia --color=yes {0} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + COMPATHELPER_PRIV: ${{ secrets.DOCUMENTER_KEY }} + # COMPATHELPER_PRIV: ${{ secrets.COMPATHELPER_PRIV }} \ No newline at end of file diff --git a/.github/workflows/Documenter.yml b/.github/workflows/Documenter.yml new file mode 100644 index 0000000..20bbd92 --- /dev/null +++ b/.github/workflows/Documenter.yml @@ -0,0 +1,40 @@ +name: Documentation +on: + push: + branches: + - main # update to match your development branch (master, main, dev, trunk, ...) + tags: '*' + pull_request: +jobs: + build: + permissions: + contents: write + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: julia-actions/setup-julia@latest + with: + version: '1.9' + - uses: actions/cache@v3 + env: + cache-name: cache-artifacts + with: + path: ~/.julia/artifacts + key: ${{ runner.os }}-docs-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }} + restore-keys: | + ${{ runner.os }}-docs-${{ env.cache-name }}- + ${{ runner.os }}-docs- + ${{ runner.os }}- + - name: Install binary dependencies + run: sudo apt-get update && sudo apt-get install -y xorg-dev mesa-utils xvfb libgl1 freeglut3-dev libxrandr-dev libxinerama-dev libxcursor-dev libxi-dev libxext-dev + - name: Install Julia dependencies + run: > + DISPLAY=:0 xvfb-run -s '-screen 0 1024x768x24' + julia --project=docs/ -e 'using Pkg; Pkg.develop(PackageSpec(path=pwd())); Pkg.instantiate()' + - name: Build and deploy + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # If authenticating with GitHub Actions token + DOCUMENTER_KEY: ${{ secrets.DOCUMENTER_KEY }} # If authenticating with SSH deploy key + run: > + DISPLAY=:0 xvfb-run -s '-screen 0 1024x768x24' + julia --project=docs --color=yes docs/make.jl \ No newline at end of file diff --git a/.github/workflows/Formatter.yml b/.github/workflows/Formatter.yml new file mode 100644 index 0000000..08e348f --- /dev/null +++ b/.github/workflows/Formatter.yml @@ -0,0 +1,36 @@ +name: FormatCheck +on: + push: + branches: + - 'main' + tags: '*' + pull_request: +jobs: + build: + runs-on: ${{ matrix.os }} + strategy: + matrix: + julia-version: [1] + julia-arch: [x86] + os: [ubuntu-latest] + steps: + - uses: julia-actions/setup-julia@latest + with: + version: ${{ matrix.julia-version }} + + - uses: actions/checkout@v3 + - name: Install JuliaFormatter and format + run: | + julia -e 'using Pkg; Pkg.add(PackageSpec(name="JuliaFormatter"))' + julia -e 'using JuliaFormatter; format(".", verbose=true)' + - name: Format check + run: | + julia -e ' + out = Cmd(`git diff --name-only`) |> read |> String + if out == "" + exit(0) + else + @error "Some files have not been formatted !!!" + write(stdout, out) + exit(1) + end' \ No newline at end of file diff --git a/.github/workflows/TagBot.yml b/.github/workflows/TagBot.yml new file mode 100644 index 0000000..3042569 --- /dev/null +++ b/.github/workflows/TagBot.yml @@ -0,0 +1,33 @@ +name: TagBot +on: + issue_comment: + types: + - created + workflow_dispatch: + inputs: + lookback: + default: 3 +permissions: + actions: read + checks: read + contents: write + deployments: read + issues: read + discussions: read + packages: read + pages: read + pull-requests: read + repository-projects: read + security-events: read + statuses: read +jobs: + TagBot: + if: github.event_name == 'workflow_dispatch' || github.actor == 'JuliaTagBot' + runs-on: ubuntu-latest + steps: + - uses: JuliaRegistries/TagBot@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + # Edit the following line to reflect the actual name of the GitHub Secret containing your private key + ssh: ${{ secrets.DOCUMENTER_KEY }} + # ssh: ${{ secrets.NAME_OF_MY_SSH_PRIVATE_KEY_SECRET }} \ No newline at end of file diff --git a/.github/workflows/Tests.yml b/.github/workflows/Tests.yml new file mode 100644 index 0000000..ddaf9c1 --- /dev/null +++ b/.github/workflows/Tests.yml @@ -0,0 +1,35 @@ +name: Tests +on: + push: + branches: + - main # update to match your development branch (master, main, dev, trunk, ...) + tags: '*' + pull_request: +jobs: + test: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + julia_version: ['1.9'] + os: [ubuntu-latest] + steps: + - uses: actions/checkout@v3 + - name: "Set up Julia" + uses: julia-actions/setup-julia@latest + with: + version: ${{ matrix.julia_version }} + - uses: actions/cache@v3 + env: + cache-name: cache-artifacts + with: + path: ~/.julia/artifacts + key: ${{ runner.os }}-test-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }} + restore-keys: | + ${{ runner.os }}-test-${{ env.cache-name }}- + ${{ runner.os }}-test- + ${{ runner.os }}- + - name: "Build package" + uses: julia-actions/julia-buildpkg@latest + - name: "Run tests" + uses: julia-actions/julia-runtest@latest \ No newline at end of file diff --git a/Project.toml b/Project.toml index 70295fe..2430ac2 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "CEED" uuid = "e939450b-799e-4198-a5f5-3f2f7fb1c671" -version = "0.3.0" +version = "0.3.4" [deps] Clustering = "aaaa29a8-35af-508c-8bc3-b662a17a0fe5" @@ -38,7 +38,7 @@ Distances = "0.10" POMDPs = "0.9" JSON = "0.21" Clustering = "0.15" -MCTS = "0.5" +MCTS = "0.5.5" MLJ = "0.19" Requires = "1.3" POMDPSimulators = "0.3" diff --git a/docs/make.jl b/docs/make.jl index f2ab774..864f8f0 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -4,7 +4,7 @@ using CEED # Literate for tutorials const literate_dir = joinpath(@__DIR__, "..", "tutorials") const tutorials_src = - ["StaticDesigns.jl", "StaticDesignsFiltration.jl", "GenerativeDesigns.jl", "paper.jl"] + ["StaticDesigns.jl", "StaticDesignsFiltration.jl", "GenerativeDesigns.jl"] const generated_dir = joinpath(@__DIR__, "src", "tutorials/") # copy tutorials src diff --git a/docs/src/api.md b/docs/src/api.md index 803ed8e..d12dd50 100644 --- a/docs/src/api.md +++ b/docs/src/api.md @@ -14,13 +14,19 @@ CEED.StaticDesigns.evaluate_experiments ## `GenerativeDesigns` ```@docs -CEED.GenerativeDesigns.ResearchMDP +CEED.GenerativeDesigns.UncertaintyReductionMDP +CEED.GenerativeDesigns.EfficientValueMDP CEED.GenerativeDesigns.State -CEED.GenerativeDesigns.efficient_designs CEED.GenerativeDesigns.Variance CEED.GenerativeDesigns.Entropy ``` +```@docs +CEED.GenerativeDesigns.efficient_design +CEED.GenerativeDesigns.efficient_designs +CEED.GenerativeDesigns.efficient_value +``` + ### Distance-Based Sampling ```@docs diff --git a/docs/src/assets/ceed_dark.svg b/docs/src/assets/ceed_dark.svg index 182a1cc..b9959c9 100644 --- a/docs/src/assets/ceed_dark.svg +++ b/docs/src/assets/ceed_dark.svg @@ -1,4 +1,3 @@ - -
C
C
E
E
E
E
D
D
Cost-Efficient Experimental Designs
Cost-Efficient Experimental...
Text is not SVG - cannot display
\ No newline at end of file +
C
C
E
E
E
E
D
D
Cost-Efficient Experimental Designs
Cost-Efficient Experimental...
Text is not SVG - cannot display
\ No newline at end of file diff --git a/docs/src/assets/ceed_light.svg b/docs/src/assets/ceed_light.svg index cf2ba40..9d0df98 100644 --- a/docs/src/assets/ceed_light.svg +++ b/docs/src/assets/ceed_light.svg @@ -1,4 +1,3 @@ - -
C
C
E
E
E
E
D
D
Cost-Efficient Experimental Designs
Cost-Efficient Experimental...
Text is not SVG - cannot display
\ No newline at end of file +
C
C
E
E
E
E
D
D
Cost-Efficient Experimental Designs
Cost-Efficient Experimental...
Text is not SVG - cannot display
\ No newline at end of file diff --git a/docs/src/index.md b/docs/src/index.md index c557830..44bd73f 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -29,4 +29,6 @@ At the beginning of the triage process, an entity's prior data is used to projec code ``` -We conceptualized the triage as a Markov decision process, in which we iteratively choose to conduct a subset of experiments and then, based on the experimental evidence, update our belief about the distribution of outcomes for the experiments that have not yet been conducted. The information value associated with the state, derived from experimental evidence, can be modeled through any statistical or information-theoretic measure such as the variance or uncertainty associated with the target variable posterior. \ No newline at end of file +We conceptualized the triage as a Markov decision process, in which we iteratively choose to conduct a subset of experiments and then, based on the experimental evidence, update our belief about the distribution of outcomes for the experiments that have not yet been conducted. The information value associated with the state, derived from experimental evidence, can be modeled through any statistical or information-theoretic measure such as the variance or uncertainty associated with the target variable posterior. + +We implemented the following two variants of the decision-making process: Firstly, assuming that the decision-making process only terminates when the uncertainty drops below a given threshold, we minimize the expected resource spend. Secondly, we can optimize the value of experimental evidence, adjusted for the incurred experimental costs. \ No newline at end of file diff --git a/docs/src/tutorials/GenerativeDesigns.jl b/docs/src/tutorials/GenerativeDesigns.jl index 77cc4ca..03a694c 100644 --- a/docs/src/tutorials/GenerativeDesigns.jl +++ b/docs/src/tutorials/GenerativeDesigns.jl @@ -17,7 +17,7 @@ # We conceptualize the triage as a Markov decision process, in which we iteratively choose to conduct a subset of experiments $S \subseteq E$ and then, based on the experimental evidence, update our belief about the distribution of outcomes for the experiments that have not yet been conducted. # Within the framework, -# - _state_ is modeled as the set of experiments conducted so far along with the acquired experimental evidence; +# - _state_ is modeled as the set of experiments conducted so far along with the acquired experimental evidence and accumulated costs; # - _actions_ are subsets of experiments that have not yet been conducted; the size of these subsets is restricted by the maximum number of parallel experiments. # Importantly, the outcome of a set $S$ of experiments is modeled as a random variable $e_S$, conditioned on the current state, i.e., combined evidence. This means that if in a given state outcomes from experiments in $S \subseteq E$ are available, the outcome of experiments in $S' \subseteq E \setminus S$ is drawn from a posterior $r \sim q(e_{S'} | e_S)$. @@ -56,8 +56,9 @@ # ### Objective Sense # The reward and stopping condition of the triage process can be interpreted in various ways. -# - In our implementation, the triage continues until the uncertainty about the posterior distribution of the target variable falls below a certain level. Our aim is to minimize the anticipated combined monetary cost and execution time of the triage (considered as a 'negative' reward). If all experiments are conducted without reaching below the required uncertainty level, or if the maximum number of experiments is exceeded, we penalize this scenario with a 'minus infinite' reward. -# - Alternatively, one could aim to minimize the expected uncertainty while being constrained by the costs of the experiment. +# - The triage may continue until the uncertainty about the posterior distribution of the target variable falls below a certain level. Our aim is to minimize the anticipated combined monetary cost and execution time of the triage (considered as a 'negative' reward). If all experiments are conducted without reaching below the required uncertainty level, or if the maximum number of experiments is exceeded, we penalize this scenario with a 'minus infinite' reward. +# - We may aim to minimize the expected uncertainty while being constrained by the costs of the experiment. +# - Alternatively, we could maximize the value of experimental evidence, adjusted for the incurred experimental costs. # ### Policy Search # Standard MDP algorithms can be used to solve this problem (offline learning) or construct the policy (online learning) for the sequential decision-making. @@ -117,28 +118,42 @@ using CEED, CEED.GenerativeDesigns # As previously discussed, we provide a dataset of historical records, the target variable, along with an information-theoretic measure to quantify the uncertainty about the target variable. # In what follows, we obtain three functions: -# - `sampler`: this is a function of `(state, features, rng)`, in which `state` denotes the current experimental state, `features` represent the set of features we want to sample from, and `rng` is a random number generator; -# - `uncertainty`: this is a function of `state`, -# - `weights`: this represents a function of `state` that distributes probabilistic weights across the rows in the dataset. +# - `sampler`: this is a function of `(evidence, features, rng)`, in which `evidence` denotes the current experimental evidence, `features` represent the set of features we want to sample from, and `rng` is a random number generator; +# - `uncertainty`: this is a function of `evidence`, +# - `weights`: this represents a function of `evidence` that distributes probabilistic weights across the rows in the dataset. + +# Note that internally, a state of the decision process is represented as a tuple `(evidence, costs)`. (; sampler, uncertainty, weights) = DistanceBased(data, "HeartDisease", Entropy, Exponential(; λ = 5)); +# The CEED package offers an additional flexibility by allowing an experiment to yield readouts over multiple features at the same time. In our scenario, we can consider the features `RestingECG`, `Oldpeak`, `ST_Slope`, and `MaxHR` to be obtained from a single experiment `ECG`. + +# We specify the experiments along with the associated features: + +experiments = Dict( + ## experiment => features + "BloodPressure" => 1.0 => ["RestingBP"], + "ECG" => 5.0 => ["RestingECG", "Oldpeak", "ST_Slope", "MaxHR"], + "BloodCholesterol" => 20.0 => ["Cholesterol"], + "BloodSugar" => 20.0 => ["FastingBS"], + "HeartDisease" => 100.0, +) + # Let us inspect the distribution of belief for the following experimental evidence: -state = State("Age" => 55, "Sex" => "M") +evidence = Evidence("Age" => 55, "Sex" => "M") # using StatsBase: countmap using Plots # -target_belief = countmap(data[!, "HeartDisease"], weights(state)) +target_belief = countmap(data[!, "HeartDisease"], weights(evidence)) p = bar( 0:1, [target_belief[0], target_belief[1]]; xrot = 40, - c = :teal, ylabel = "probability", - title = "unc: $(round(uncertainty(state), digits=1))", + title = "unc: $(round(uncertainty(evidence), digits=1))", kind = :bar, legend = false, ); @@ -147,36 +162,24 @@ p # Let us next add an outcome of blood pressure measurement: -state_with_bp = merge(state, Dict("RestingBP" => 190)) +evidence_with_bp = merge(evidence, Dict("RestingBP" => 190)) -target_belief = countmap(data[!, "HeartDisease"], weights(state_with_bp)) +target_belief = countmap(data[!, "HeartDisease"], weights(evidence_with_bp)) p = bar( 0:1, [target_belief[0], target_belief[1]]; xrot = 40, - c = :teal, ylabel = "probability", - title = "unc: $(round(uncertainty(state_with_bp), digits=2))", + title = "unc: $(round(uncertainty(evidence_with_bp), digits=2))", kind = :bar, legend = false, ); xticks!(p, 0:1, ["no disease", "disease"]); p -# ## Cost-Efficient Designs +# ## Cost-Efficient Experimental Designs for Uncertainty Reduction -# The CEED package offers an additional flexibility by allowing an experiment to yield readouts over multiple features at the same time. In our scenario, we can consider the features `RestingECG`, `Oldpeak`, `ST_Slope`, and `MaxHR` to be obtained from a single experiment `ECG`. - -# We specify the experiments along with the associated features: - -experiments = Dict( - ## experiment => features - "BloodPressure" => 1.0 => ["RestingBP"], - "ECG" => 5.0 => ["RestingECG", "Oldpeak", "ST_Slope", "MaxHR"], - "BloodCholesterol" => 20.0 => ["Cholesterol"], - "BloodSugar" => 20.0 => ["FastingBS"], - "HeartDisease" => 100.0, -) +# In this experimental setup, our objective is to minimize the expected experimental cost while ensuring the uncertainty remains below a specified threshold. # We use the provided function `efficient_designs` to construct the set of cost-efficient experimental designs for various levels of uncertainty threshold. In the following example, we generate 6 thresholds spaces evenly between 0 and 1, inclusive. @@ -186,16 +189,20 @@ experiments = Dict( using Random: seed! seed!(1) # -state = State("Age" => 35, "Sex" => "M") +evidence = Evidence("Age" => 35, "Sex" => "M") # ## use less number of iterations to speed up build process -solver = GenerativeDesigns.DPWSolver(; n_iterations = 20_000, tree_in_info = true) +solver = GenerativeDesigns.DPWSolver(; + n_iterations = 20_000, + exploration_constant = 5.0, + tree_in_info = true, +) designs = efficient_designs( experiments, sampler, uncertainty, 6, - state; + evidence; solver, mdp_options = (; max_parallel = 1), repetitions = 5, @@ -228,13 +235,17 @@ experiments = Dict( ## minimize time, two concurrent experiments at maximum seed!(1) ## use less number of iterations to speed up build process -solver = GenerativeDesigns.DPWSolver(; n_iterations = 30_000, tree_in_info = true) +solver = GenerativeDesigns.DPWSolver(; + n_iterations = 20_000, + exploration_constant = 5.0, + tree_in_info = true, +) designs = efficient_designs( experiments, sampler, uncertainty, 6, - state; + evidence; solver, mdp_options = (; max_parallel = 2, costs_tradeoff = [0, 1.0]), repetitions = 5, @@ -243,3 +254,35 @@ designs = efficient_designs( # We plot the Pareto-efficient actions: plot_front(designs; labels = make_labels(designs), ylabel = "% uncertainty") + +# ## Efficient Value Experimental Designs + +# In this experimental setup, we aim to maximize the value of experimental evidence, adjusted for the incurred experimental costs. + +# For this purpose, we need to specify a function that quantifies the 'value' of decision-process making state, modeled as a tuple of experimental evidence and costs. + +value = function (evidence, (monetary_cost, execution_time)) + return (1 - uncertainty(evidence)) - (0.005 * sum(monetary_cost)) +end + +# Considering a discount factor $\lambda$, the total reward associated with the experimental state in an $n$-step decision process is given by $r = r_1 + \sum_{i=2}^n \lambda^{i-1} (r_i - r_{i-1})$, where $r_i$ is the value associated with the $i$-th state. + +# In the following example, we also limit the maximum rollout horizon to 4. +# +seed!(1) +## use less number of iterations to speed up build process +solver = + GenerativeDesigns.DPWSolver(; n_iterations = 20_000, depth = 4, tree_in_info = true) +design = efficient_value( + experiments, + sampler, + value, + evidence; + solver, + repetitions = 5, + mdp_options = (; discount = 0.8), +); +# +design[1] # optimized cost-adjusted value +# +d3tree = D3Tree(design[2].tree; init_expand = 2) diff --git a/docs/src/tutorials/GenerativeDesigns.md b/docs/src/tutorials/GenerativeDesigns.md index b87c1ce..10b3e51 100644 --- a/docs/src/tutorials/GenerativeDesigns.md +++ b/docs/src/tutorials/GenerativeDesigns.md @@ -21,7 +21,7 @@ Let us consider a set of $n$ experiments $E = \{ e_1, \ldots, e_n\}$, and let $y We conceptualize the triage as a Markov decision process, in which we iteratively choose to conduct a subset of experiments $S \subseteq E$ and then, based on the experimental evidence, update our belief about the distribution of outcomes for the experiments that have not yet been conducted. Within the framework, -- _state_ is modeled as the set of experiments conducted so far along with the acquired experimental evidence; +- _state_ is modeled as the set of experiments conducted so far along with the acquired experimental evidence and accumulated costs; - _actions_ are subsets of experiments that have not yet been conducted; the size of these subsets is restricted by the maximum number of parallel experiments. Importantly, the outcome of a set $S$ of experiments is modeled as a random variable $e_S$, conditioned on the current state, i.e., combined evidence. This means that if in a given state outcomes from experiments in $S \subseteq E$ are available, the outcome of experiments in $S' \subseteq E \setminus S$ is drawn from a posterior $r \sim q(e_{S'} | e_S)$. @@ -60,8 +60,9 @@ Assuming the weights $w_j$ have been assigned, we can sample an index $\hat j \i ### Objective Sense The reward and stopping condition of the triage process can be interpreted in various ways. -- In our implementation, the triage continues until the uncertainty about the posterior distribution of the target variable falls below a certain level. Our aim is to minimize the anticipated combined monetary cost and execution time of the triage (considered as a 'negative' reward). If all experiments are conducted without reaching below the required uncertainty level, or if the maximum number of experiments is exceeded, we penalize this scenario with a 'minus infinite' reward. -- Alternatively, one could aim to minimize the expected uncertainty while being constrained by the costs of the experiment. +- The triage may continue until the uncertainty about the posterior distribution of the target variable falls below a certain level. Our aim is to minimize the anticipated combined monetary cost and execution time of the triage (considered as a 'negative' reward). If all experiments are conducted without reaching below the required uncertainty level, or if the maximum number of experiments is exceeded, we penalize this scenario with a 'minus infinite' reward. +- We may aim to minimize the expected uncertainty while being constrained by the costs of the experiment. +- Alternatively, we could maximize the value of experimental evidence, adjusted for the incurred experimental costs. ### Policy Search Standard MDP algorithms can be used to solve this problem (offline learning) or construct the policy (online learning) for the sequential decision-making. @@ -128,9 +129,11 @@ using CEED, CEED.GenerativeDesigns As previously discussed, we provide a dataset of historical records, the target variable, along with an information-theoretic measure to quantify the uncertainty about the target variable. In what follows, we obtain three functions: -- `sampler`: this is a function of `(state, features, rng)`, in which `state` denotes the current experimental state, `features` represent the set of features we want to sample from, and `rng` is a random number generator; -- `uncertainty`: this is a function of `state`, -- `weights`: this represents a function of `state` that distributes probabilistic weights across the rows in the dataset. +- `sampler`: this is a function of `(evidence, features, rng)`, in which `evidence` denotes the current experimental evidence, `features` represent the set of features we want to sample from, and `rng` is a random number generator; +- `uncertainty`: this is a function of `evidence`, +- `weights`: this represents a function of `evidence` that distributes probabilistic weights across the rows in the dataset. + +Note that internally, a state of the decision process is represented as a tuple `(evidence, costs)`. ````@example GenerativeDesigns (; sampler, uncertainty, weights) = @@ -138,10 +141,25 @@ In what follows, we obtain three functions: nothing #hide ```` +The CEED package offers an additional flexibility by allowing an experiment to yield readouts over multiple features at the same time. In our scenario, we can consider the features `RestingECG`, `Oldpeak`, `ST_Slope`, and `MaxHR` to be obtained from a single experiment `ECG`. + +We specify the experiments along with the associated features: + +````@example GenerativeDesigns +experiments = Dict( + # experiment => features + "BloodPressure" => 1.0 => ["RestingBP"], + "ECG" => 5.0 => ["RestingECG", "Oldpeak", "ST_Slope", "MaxHR"], + "BloodCholesterol" => 20.0 => ["Cholesterol"], + "BloodSugar" => 20.0 => ["FastingBS"], + "HeartDisease" => 100.0, +) +```` + Let us inspect the distribution of belief for the following experimental evidence: ````@example GenerativeDesigns -state = State("Age" => 55, "Sex" => "M") +evidence = Evidence("Age" => 55, "Sex" => "M") ```` ````@example GenerativeDesigns @@ -150,14 +168,13 @@ using Plots ```` ````@example GenerativeDesigns -target_belief = countmap(data[!, "HeartDisease"], weights(state)) +target_belief = countmap(data[!, "HeartDisease"], weights(evidence)) p = bar( 0:1, [target_belief[0], target_belief[1]]; xrot = 40, - c = CEED.MRK_COLOR_TEAL, ylabel = "probability", - title = "unc: $(round(uncertainty(state), digits=1))", + title = "unc: $(round(uncertainty(evidence), digits=1))", kind = :bar, legend = false, ); @@ -168,16 +185,15 @@ p Let us next add an outcome of blood pressure measurement: ````@example GenerativeDesigns -state_with_bp = merge(state, Dict("RestingBP" => 190)) +evidence_with_bp = merge(evidence, Dict("RestingBP" => 190)) -target_belief = countmap(data[!, "HeartDisease"], weights(state_with_bp)) +target_belief = countmap(data[!, "HeartDisease"], weights(evidence_with_bp)) p = bar( 0:1, [target_belief[0], target_belief[1]]; xrot = 40, - c = CEED.MRK_COLOR_TEAL, ylabel = "probability", - title = "unc: $(round(uncertainty(state_with_bp), digits=2))", + title = "unc: $(round(uncertainty(evidence_with_bp), digits=2))", kind = :bar, legend = false, ); @@ -185,22 +201,9 @@ xticks!(p, 0:1, ["no disease", "disease"]); p ```` -## Cost-Efficient Designs - -The CEED package offers an additional flexibility by allowing an experiment to yield readouts over multiple features at the same time. In our scenario, we can consider the features `RestingECG`, `Oldpeak`, `ST_Slope`, and `MaxHR` to be obtained from a single experiment `ECG`. +## Cost-Efficient Experimental Designs for Uncertainty Reduction -We specify the experiments along with the associated features: - -````@example GenerativeDesigns -experiments = Dict( - # experiment => features - "BloodPressure" => 1.0 => ["RestingBP"], - "ECG" => 5.0 => ["RestingECG", "Oldpeak", "ST_Slope", "MaxHR"], - "BloodCholesterol" => 20.0 => ["Cholesterol"], - "BloodSugar" => 20.0 => ["FastingBS"], - "HeartDisease" => 100.0, -) -```` +In this experimental setup, our objective is to minimize the expected experimental cost while ensuring the uncertainty remains below a specified threshold. We use the provided function `efficient_designs` to construct the set of cost-efficient experimental designs for various levels of uncertainty threshold. In the following example, we generate 6 thresholds spaces evenly between 0 and 1, inclusive. @@ -213,18 +216,22 @@ seed!(1) ```` ````@example GenerativeDesigns -state = State("Age" => 35, "Sex" => "M") +evidence = Evidence("Age" => 35, "Sex" => "M") ```` ````@example GenerativeDesigns # use less number of iterations to speed up build process -solver = GenerativeDesigns.DPWSolver(; n_iterations = 20_000, tree_in_info = true) +solver = GenerativeDesigns.DPWSolver(; + n_iterations = 20_000, + exploration_constant = 5.0, + tree_in_info = true, +) designs = efficient_designs( experiments, sampler, uncertainty, 6, - state; + evidence; solver, mdp_options = (; max_parallel = 1), repetitions = 5, @@ -266,13 +273,17 @@ We have to provide the maximum number of concurrent experiments. Additionally, w # minimize time, two concurrent experiments at maximum seed!(1) # use less number of iterations to speed up build process -solver = GenerativeDesigns.DPWSolver(; n_iterations = 30_000, tree_in_info = true) +solver = GenerativeDesigns.DPWSolver(; + n_iterations = 20_000, + exploration_constant = 5.0, + tree_in_info = true, +) designs = efficient_designs( experiments, sampler, uncertainty, 6, - state; + evidence; solver, mdp_options = (; max_parallel = 2, costs_tradeoff = [0, 1.0]), repetitions = 5, @@ -286,3 +297,44 @@ We plot the Pareto-efficient actions: plot_front(designs; labels = make_labels(designs), ylabel = "% uncertainty") ```` +## Efficient Value Experimental Designs + +In this experimental setup, we aim to maximize the value of experimental evidence, adjusted for the incurred experimental costs. + +For this purpose, we need to specify a function that quantifies the 'value' of decision-process making state, modeled as a tuple of experimental evidence and costs. + +````@example GenerativeDesigns +value = function (evidence, (monetary_cost, execution_time)) + return (1 - uncertainty(evidence)) - (0.005 * sum(monetary_cost)) +end +```` + +Considering a discount factor $\lambda$, the total reward associated with the experimental state in an $n$-step decision process is given by $r = r_1 + \sum_{i=2}^n \lambda^{i-1} (r_i - r_{i-1})$, where $r_i$ is the value associated with the $i$-th state. + +In the following example, we also limit the maximum rollout horizon to 4. + +````@example GenerativeDesigns +seed!(1) +# use less number of iterations to speed up build process +solver = + GenerativeDesigns.DPWSolver(; n_iterations = 20_000, depth = 4, tree_in_info = true) +design = efficient_value( + experiments, + sampler, + value, + evidence; + solver, + repetitions = 5, + mdp_options = (; discount = 0.8), +); +nothing #hide +```` + +````@example GenerativeDesigns +design[1] # optimized cost-adjusted value +```` + +````@example GenerativeDesigns +d3tree = D3Tree(design[2].tree; init_expand = 2) +```` + diff --git a/docs/src/tutorials/StaticDesigns.jl b/docs/src/tutorials/StaticDesigns.jl index 257aee5..effdcf5 100644 --- a/docs/src/tutorials/StaticDesigns.jl +++ b/docs/src/tutorials/StaticDesigns.jl @@ -20,7 +20,7 @@ # Given the constraint on the maximum number of parallel experiments, we devise an arrangement $o$ of experiments $S$ such that, for a fixed tradeoff between monetary cost and execution time, the expected combined cost $c_{(o, \lambda)} = \lambda m_o + (1-\lambda) t_o$ is minimized (i.e., the execution time is minimized). -# In fact, it can be readily demonstrated that the optimal arrangement can be found by ordering the experiments in set $S$ in descending order according to their execution times. Consequently, the experiments are grouped sequentially into sets whose size equals to the maximum number of parallel experiments, except possibly for the final set. +# In fact, it can be readily demonstrated that the optimal arrangement can be found by ordering the experiments in set $S$ in descending order according to their execution times. Consequently, the experiments are grouped sequentially into sets whose size equals the maximum number of parallel experiments, except possibly for the final set. # Continuing our example and assuming a maximum of two parallel experiments, the optimal arrangement is to conduct $e_1$ in parallel with $e_2$, and $e_3$ with $e_4$. This results in an arrangement $o = (\{ e_1, e_2 \}, \{ e_3, e_4 \})$ with a total cost of $m_o = 4$ and $t_o = 2 + 4 = 6$. diff --git a/docs/src/tutorials/StaticDesigns.md b/docs/src/tutorials/StaticDesigns.md index 48c0376..3e209b4 100644 --- a/docs/src/tutorials/StaticDesigns.md +++ b/docs/src/tutorials/StaticDesigns.md @@ -24,7 +24,7 @@ However, if we decide to conduct $e_1$ in parallel with $e_3$, and $e_2$ with $e Given the constraint on the maximum number of parallel experiments, we devise an arrangement $o$ of experiments $S$ such that, for a fixed tradeoff between monetary cost and execution time, the expected combined cost $c_{(o, \lambda)} = \lambda m_o + (1-\lambda) t_o$ is minimized (i.e., the execution time is minimized). -In fact, it can be readily demonstrated that the optimal arrangement can be found by ordering the experiments in set $S$ in descending order according to their execution times. Consequently, the experiments are grouped sequentially into sets whose size equals to the maximum number of parallel experiments, except possibly for the final set. +In fact, it can be readily demonstrated that the optimal arrangement can be found by ordering the experiments in set $S$ in descending order according to their execution times. Consequently, the experiments are grouped sequentially into sets whose size equals the maximum number of parallel experiments, except possibly for the final set. Continuing our example and assuming a maximum of two parallel experiments, the optimal arrangement is to conduct $e_1$ in parallel with $e_2$, and $e_3$ with $e_4$. This results in an arrangement $o = (\{ e_1, e_2 \}, \{ e_3, e_4 \})$ with a total cost of $m_o = 4$ and $t_o = 2 + 4 = 6$. diff --git a/docs/src/tutorials/data/glioma_grading.csv b/docs/src/tutorials/data/glioma_grading.csv new file mode 100644 index 0000000..46a2bdc --- /dev/null +++ b/docs/src/tutorials/data/glioma_grading.csv @@ -0,0 +1,858 @@ +IDH1,TP53,ATRX,PTEN,EGFR,CIC,MUC16,Grade,Age_at_diagnosis,Gender,Race,percent_tumor_nuclei,percent_normal_cells,percent_stromal_cells,percent_necrosis,percent_tumor_cells +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,51.0,Male,white,95.0,0.0,15.0,10.0,75.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,38.0,Male,white,100.0,0.0,0.0,5.0,95.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,35.0,Male,white,85.0,0.0,0.0,5.0,95.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,32.0,Female,white,100.0,0.0,0.0,10.0,90.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,31.0,Male,white,100.0,0.0,0.0,20.0,80.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,33.0,Female,white,80.0,50.0,0.0,0.0,50.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,35.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,44.0,Female,white,0.0,0.0,0.0,0.0,0.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,33.0,Female,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,87.0,Male,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,51.0,Male,asian,100.0,0.0,0.0,20.0,80.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,54.0,Male,white,85.0,0.0,10.0,15.0,75.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,52.0,Male,white,90.0,10.0,5.0,0.0,85.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,47.0,Male,white,85.0,0.0,0.0,30.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,34.0,Male,white,80.0,0.0,20.0,0.0,80.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,40.0,Female,white,80.0,5.0,20.0,5.0,70.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,53.0,Female,white,85.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,41.0,Female,white,90.0,0.0,10.0,25.0,65.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,43.0,Female,white,80.0,0.0,5.0,5.0,90.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,37.0,Female,white,95.0,0.0,0.0,5.0,95.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,47.0,Female,white,85.0,0.0,20.0,20.0,60.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,31.0,Female,black or african american,80.0,20.0,20.0,0.0,60.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,25.0,Male,white,80.0,0.0,0.0,3.0,97.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,66.0,Male,white,80.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,56.0,Male,asian,100.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,35.0,Male,white,90.0,0.0,30.0,30.0,40.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,51.0,Male,white,80.0,0.0,0.0,1.0,99.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,37.0,Male,white,100.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,54.0,Male,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,32.0,Male,white,90.0,0.0,20.0,20.0,60.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,65.0,Female,white,90.0,5.0,5.0,10.0,80.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,43.0,Male,white,90.0,0.0,5.0,5.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,51.0,Female,white,90.0,5.0,5.0,15.0,75.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,43.0,Male,white,80.0,0.0,0.0,40.0,60.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,25.0,Male,white,75.0,0.0,5.0,70.0,25.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,45.0,Female,white,95.0,0.0,25.0,0.0,75.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,50.0,Female,white,85.0,0.0,0.0,5.0,95.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,27.0,Male,white,85.0,0.0,0.0,30.0,70.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,53.0,Female,white,95.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,46.0,Female,white,100.0,0.0,0.0,20.0,80.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,24.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,34.0,Male,white,70.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,29.0,Female,white,80.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,45.0,Female,white,80.0,0.0,0.0,7.0,93.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,62.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,46.0,Male,white,90.0,0.0,10.0,10.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,36.0,Female,white,75.0,0.0,0.0,8.0,92.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,62.0,Female,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,70.0,Male,white,80.0,5.0,40.0,5.0,50.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,53.0,Female,white,85.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,48.0,Female,white,100.0,5.0,0.0,0.0,95.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,40.0,Male,white,90.0,0.0,20.0,5.0,75.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,40.0,Female,white,85.0,0.0,5.0,3.0,97.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,20.0,Female,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,57.0,Female,white,85.0,0.0,10.0,40.0,50.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Male,white,95.0,0.0,0.0,5.0,95.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,52.0,Male,white,85.0,0.0,0.0,40.0,60.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,56.0,Female,white,80.0,0.0,0.0,40.0,60.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,59.0,Male,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,67.0,Male,white,95.0,0.0,2.0,15.0,83.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,48.0,Female,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,59.0,Female,white,90.0,0.0,5.0,0.0,95.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,58.0,Female,white,100.0,0.0,0.0,5.0,95.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,27.0,Male,white,100.0,0.0,0.0,5.0,95.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,LGG,51.0,Female,asian,100.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,74.0,Female,black or african american,95.0,0.0,0.0,2.0,98.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,52.0,Male,white,98.0,0.0,2.0,10.0,88.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,61.0,Male,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,MUTATED,LGG,66.0,Male,white,80.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,42.0,Female,white,95.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,31.0,Male,not reported,80.0,15.0,10.0,0.0,75.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,48.0,Female,white,100.0,0.0,0.0,5.0,95.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,33.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,34.0,Female,white,50.0,0.0,100.0,0.0,0.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,24.0,Male,white,95.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,LGG,55.0,Female,white,100.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,27.0,Male,white,100.0,0.0,0.0,30.0,30.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,29.0,Male,white,85.0,5.0,10.0,0.0,85.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,39.0,Female,white,85.0,0.0,0.0,1.0,99.0 +NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,70.0,Male,white,90.0,0.0,0.0,15.0,85.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,30.0,Female,white,100.0,0.0,0.0,15.0,85.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,39.0,Male,white,85.0,0.0,20.0,0.0,80.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,25.0,Female,white,95.0,0.0,5.0,0.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,48.0,Male,white,100.0,0.0,0.0,7.0,95.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,57.0,Female,white,85.0,0.0,0.0,20.0,0.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,34.0,Male,white,100.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,58.0,Male,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,55.0,Female,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,60.0,Female,white,80.0,20.0,5.0,5.0,70.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Male,white,100.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,56.0,Male,white,0.0,100.0,0.0,0.0,0.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,54.0,Female,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,31.0,Male,white,80.0,20.0,30.0,0.0,50.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,54.0,Female,white,85.0,0.0,0.0,10.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,52.0,Male,white,90.0,0.0,10.0,10.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,33.0,Female,black or african american,85.0,0.0,0.0,7.0,93.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,42.0,Female,white,60.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,20.0,Male,white,85.0,0.0,0.0,20.0,80.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,64.0,Female,white,80.0,50.0,0.0,0.0,50.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,29.0,Male,white,80.0,0.0,0.0,15.0,85.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,36.0,Male,white,100.0,0.0,0.0,30.0,70.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,31.0,Male,white,100.0,0.0,0.0,10.0,90.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,35.0,Male,white,85.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,36.0,Male,white,80.0,0.0,10.0,0.0,90.0 +NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,32.0,Female,white,100.0,0.0,0.0,10.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Male,black or african american,60.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,29.0,Male,black or african american,100.0,0.0,0.0,5.0,95.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,41.0,Male,white,100.0,0.0,0.0,20.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,35.0,Male,white,80.0,0.0,50.0,5.0,45.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,33.0,Female,white,90.0,10.0,10.0,0.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,32.0,Male,white,85.0,0.0,0.0,12.0,88.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,49.0,Female,black or african american,85.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,69.0,Male,white,80.0,15.0,5.0,0.0,85.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,14.0,Male,white,95.0,0.0,20.0,0.0,80.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,33.0,Female,white,85.0,0.0,5.0,5.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,47.0,Male,white,95.0,0.0,0.0,30.0,70.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,33.0,Male,white,80.0,0.0,0.0,1.0,99.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,63.0,Male,white,95.0,15.0,0.0,5.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,62.0,Male,white,100.0,10.0,30.0,40.0,30.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,37.0,Female,black or african american,85.0,0.0,0.0,10.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,53.0,Male,white,80.0,0.0,0.0,10.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,53.0,Female,asian,100.0,0.0,0.0,10.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,45.0,Female,white,95.0,0.0,10.0,0.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,33.0,Female,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,53.0,Female,black or african american,85.0,0.0,0.0,15.0,85.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,52.0,Male,white,95.0,0.0,0.0,5.0,95.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,64.0,Female,white,80.0,0.0,0.0,15.0,85.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,71.0,Female,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,40.0,Female,white,100.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,36.0,Male,white,100.0,0.0,0.0,10.0,90.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,25.0,Male,white,85.0,0.0,0.0,2.0,98.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,63.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,40.0,Male,white,85.0,0.0,0.0,3.0,97.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,47.0,Male,white,90.0,0.0,5.0,40.0,55.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,73.0,Female,white,75.0,35.0,0.0,0.0,65.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,24.0,Male,white,65.0,0.0,0.0,10.0,90.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,48.0,Female,white,100.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,41.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,36.0,Female,white,100.0,0.0,0.0,20.0,80.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,45.0,Male,white,100.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,39.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,44.0,Female,white,100.0,0.0,0.0,50.0,50.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,27.0,Female,white,98.0,0.0,5.0,0.0,95.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,50.0,Male,black or african american,95.0,0.0,3.0,2.0,95.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,42.0,Male,white,85.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,40.0,Male,white,90.0,0.0,10.0,0.0,90.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,62.0,Female,white,95.0,0.0,20.0,20.0,60.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,33.0,Female,white,100.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,46.0,Male,white,95.0,0.0,0.0,5.0,95.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,41.0,Male,white,75.0,15.0,15.0,0.0,70.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,47.0,Male,white,100.0,0.0,0.0,20.0,80.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,56.0,Male,white,100.0,0.0,0.0,40.0,60.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Female,white,85.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,60.0,Male,white,100.0,0.0,0.0,10.0,90.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,29.0,Male,white,95.0,0.0,5.0,0.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,59.0,Female,black or african american,100.0,0.0,0.0,40.0,60.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,44.0,Male,white,85.0,0.0,0.0,15.0,85.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,69.0,Female,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,63.0,Male,white,90.0,0.0,50.0,0.0,50.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,43.0,Male,white,70.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,61.0,Female,white,85.0,0.0,5.0,20.0,75.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,39.0,Male,white,85.0,0.0,25.0,0.0,75.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,31.0,Female,not reported,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,51.0,Male,white,85.0,0.0,0.0,10.0,90.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,41.0,Female,white,95.0,0.0,20.0,0.0,80.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,60.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,74.0,Male,white,100.0,0.0,10.0,20.0,70.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,63.0,Male,white,80.0,0.0,0.0,15.0,85.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,LGG,66.0,Male,white,100.0,0.0,0.0,5.0,95.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,51.0,Male,white,75.0,0.0,10.0,10.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,39.0,Male,white,85.0,10.0,5.0,5.0,80.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Male,white,90.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,37.0,Female,asian,90.0,0.0,5.0,25.0,70.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,30.0,Female,white,100.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,54.0,Female,white,100.0,0.0,0.0,10.0,90.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,36.0,Male,white,95.0,0.0,20.0,5.0,75.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,48.0,Male,white,95.0,5.0,0.0,10.0,85.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,29.0,Male,white,85.0,0.0,5.0,10.0,85.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,32.0,Male,white,100.0,0.0,0.0,80.0,20.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,44.0,Female,white,85.0,50.0,0.0,20.0,30.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,30.0,Male,white,90.0,0.0,5.0,15.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,57.0,Female,white,80.0,0.0,15.0,0.0,85.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,19.0,Male,white,95.0,0.0,0.0,10.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,70.0,Male,white,100.0,0.0,0.0,25.0,75.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,63.0,Male,white,100.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,41.0,Male,white,100.0,0.0,0.0,10.0,90.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,29.0,Male,white,95.0,0.0,10.0,0.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,42.0,Male,white,85.0,0.0,15.0,0.0,85.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,51.0,Female,white,90.0,0.0,20.0,20.0,60.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,38.0,Female,white,100.0,0.0,0.0,25.0,75.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,28.0,Male,white,65.0,0.0,8.0,5.0,87.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,32.0,Female,white,80.0,25.0,5.0,0.0,70.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,26.0,Female,white,100.0,0.0,0.0,20.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,67.0,Male,white,100.0,0.0,0.0,10.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,LGG,40.0,Male,white,85.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,59.0,Male,white,100.0,0.0,0.0,15.0,85.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,58.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,56.0,Female,white,85.0,0.0,0.0,5.0,95.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,20.0,Female,white,75.0,0.0,0.0,30.0,70.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,49.0,Male,white,60.0,65.0,0.0,5.0,30.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,37.0,Female,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,48.0,Female,white,95.0,0.0,0.0,5.0,95.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,71.0,Male,black or african american,80.0,0.0,0.0,60.0,40.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,43.0,Female,white,85.0,0.0,10.0,15.0,75.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,27.0,Female,white,85.0,0.0,0.0,20.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,25.0,Female,white,80.0,0.0,0.0,20.0,80.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,30.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,47.0,Male,not reported,100.0,0.0,0.0,10.0,90.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,31.0,Male,white,95.0,0.0,5.0,40.0,55.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,30.0,Male,white,95.0,0.0,25.0,0.0,75.0 +MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,30.0,Female,white,85.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,24.0,Female,white,100.0,0.0,0.0,20.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,41.0,Female,white,85.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,34.0,Male,white,85.0,0.0,0.0,5.0,95.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,56.0,Female,white,85.0,0.0,5.0,20.0,75.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,39.0,Female,white,95.0,0.0,0.0,5.0,95.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,23.0,Female,white,90.0,5.0,0.0,40.0,55.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,31.0,Female,white,100.0,0.0,0.0,10.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,17.0,Male,white,80.0,0.0,0.0,35.0,65.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,64.0,Male,white,100.0,0.0,0.0,20.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,27.0,Female,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,69.0,Male,white,80.0,5.0,5.0,15.0,75.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,37.0,Male,white,80.0,0.0,10.0,0.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,44.0,Male,white,90.0,5.0,10.0,0.0,85.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,25.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,29.0,Female,white,100.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,26.0,Male,white,85.0,0.0,0.0,2.0,98.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,47.0,Female,white,70.0,0.0,0.0,2.0,98.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,39.0,Male,white,0.0,100.0,0.0,0.0,0.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,35.0,Female,white,85.0,0.0,0.0,15.0,85.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,33.0,Male,not reported,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,74.0,Female,white,70.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,50.0,Male,white,85.0,0.0,0.0,2.0,98.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,65.0,Male,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,60.0,Female,white,95.0,0.0,0.0,5.0,95.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,34.0,Female,white,85.0,0.0,15.0,5.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,44.0,Female,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,21.0,Male,white,95.0,10.0,10.0,0.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,55.0,Male,white,85.0,10.0,10.0,0.0,80.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,43.0,Male,white,75.0,0.0,0.0,45.0,55.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,36.0,Male,white,95.0,0.0,0.0,40.0,60.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,70.0,Male,white,90.0,10.0,40.0,0.0,50.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,23.0,Female,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,43.0,Male,white,90.0,10.0,10.0,0.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,30.0,Male,white,95.0,0.0,5.0,10.0,85.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,38.0,Female,white,80.0,0.0,0.0,30.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,34.0,Female,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,55.0,Male,white,80.0,0.0,0.0,10.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,LGG,30.0,Male,white,95.0,0.0,2.0,10.0,88.0 +NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,61.0,Female,white,85.0,0.0,0.0,3.0,97.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,44.0,Male,white,85.0,0.0,0.0,1.0,99.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,35.0,Female,white,60.0,20.0,20.0,0.0,60.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,68.0,Male,white,80.0,0.0,0.0,15.0,85.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,44.0,Female,white,100.0,0.0,0.0,10.0,90.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Male,white,90.0,0.0,0.0,20.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,36.0,Female,not reported,100.0,0.0,0.0,40.0,60.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,52.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,36.0,Female,white,60.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,33.0,Female,white,90.0,10.0,10.0,0.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,60.0,Female,white,70.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,44.0,Female,white,65.0,0.0,0.0,0.0,0.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,58.0,Female,black or african american,70.0,10.0,65.0,0.0,25.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,33.0,Male,white,90.0,10.0,10.0,0.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,62.0,Male,white,80.0,5.0,45.0,0.0,50.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,44.0,Female,black or african american,60.0,10.0,40.0,0.0,50.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,36.0,Male,white,85.0,5.0,20.0,0.0,75.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,29.0,Female,white,80.0,10.0,60.0,0.0,30.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,25.0,Female,not reported,80.0,10.0,40.0,0.0,50.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,20.0,Female,white,80.0,5.0,45.0,0.0,50.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,61.0,Female,white,90.0,5.0,45.0,0.0,50.0 +NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,41.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,47.0,Male,white,90.0,10.0,10.0,0.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,59.0,Female,white,90.0,10.0,20.0,0.0,70.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,48.0,Male,white,70.0,10.0,60.0,0.0,30.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Female,white,90.0,5.0,45.0,0.0,50.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,27.0,Female,white,90.0,10.0,15.0,0.0,75.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,58.0,Female,white,65.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,41.0,Male,white,80.0,5.0,45.0,0.0,50.0 +NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,26.0,Male,white,70.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,44.0,Female,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,32.0,Male,white,90.0,5.0,15.0,0.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,22.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,25.0,Female,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,34.0,Female,white,90.0,10.0,10.0,0.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,24.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,30.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,39.0,Male,black or african american,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,22.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,53.0,Male,white,85.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,30.0,Male,white,75.0,10.0,45.0,0.0,45.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,37.0,Female,white,75.0,10.0,60.0,0.0,30.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,36.0,Female,white,90.0,10.0,20.0,0.0,70.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,41.0,Female,white,70.0,50.0,0.0,0.0,50.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,57.0,Male,white,70.0,10.0,60.0,0.0,30.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,52.0,Male,white,70.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,31.0,Male,white,90.0,5.0,15.0,0.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,32.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,44.0,Male,asian,70.0,10.0,60.0,0.0,30.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,40.0,Male,white,70.0,30.0,0.0,0.0,70.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,44.0,Female,white,75.0,5.0,85.0,0.0,10.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,39.0,Female,white,65.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,62.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,45.0,Male,white,90.0,15.0,0.0,5.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,48.0,Male,white,85.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,49.0,Male,white,80.0,10.0,30.0,0.0,60.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,28.0,Female,white,90.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Female,white,60.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,37.0,Female,white,80.0,10.0,45.0,0.0,45.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,58.0,Female,white,80.0,10.0,40.0,0.0,50.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,41.0,Female,white,70.0,10.0,70.0,0.0,20.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,60.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,50.0,Female,white,90.0,10.0,30.0,0.0,60.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,75.0,Male,white,60.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,43.0,Male,black or african american,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,52.0,Female,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,50.0,Male,white,75.0,10.0,50.0,0.0,40.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,66.0,Male,white,60.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,30.0,Male,white,68.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,59.0,Male,white,80.0,5.0,70.0,0.0,25.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,35.0,Female,asian,65.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,42.0,Male,black or african american,70.0,10.0,40.0,0.0,50.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,59.0,Female,white,95.0,5.0,15.0,0.0,80.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,26.0,Male,white,85.0,5.0,15.0,0.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,58.0,Male,white,90.0,10.0,45.0,0.0,45.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,31.0,Male,white,90.0,10.0,40.0,0.0,50.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,30.0,Female,white,80.0,10.0,50.0,0.0,40.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,45.0,Female,white,85.0,5.0,55.0,0.0,40.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,37.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,34.0,Female,white,60.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,34.0,Female,white,90.0,10.0,45.0,0.0,45.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,32.0,Male,white,80.0,10.0,20.0,0.0,70.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,66.0,Male,white,70.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,46.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Female,black or african american,75.0,10.0,15.0,10.0,65.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,33.0,Male,white,80.0,10.0,60.0,0.0,30.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,40.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,52.0,Female,white,70.0,10.0,70.0,0.0,20.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,59.0,Male,not reported,80.0,10.0,45.0,0.0,45.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,58.0,Female,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,31.0,Female,white,70.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,43.0,Female,black or african american,85.0,5.0,35.0,0.0,60.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,33.0,Male,white,90.0,5.0,5.0,0.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,55.0,Female,white,80.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,43.0,Male,white,70.0,10.0,40.0,0.0,50.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,60.0,Male,white,60.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,33.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,48.0,Female,white,70.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,70.0,Female,white,80.0,20.0,0.0,0.0,80.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,53.0,Male,white,90.0,5.0,15.0,0.0,80.0 +NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,66.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,36.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,40.0,Male,white,85.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,LGG,73.0,Female,white,80.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,36.0,Male,white,90.0,10.0,20.0,0.0,70.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,74.0,Female,white,80.0,10.0,50.0,0.0,40.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,43.0,Female,white,70.0,0.0,10.0,0.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,40.0,Male,white,60.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,38.0,Male,white,90.0,5.0,5.0,0.0,90.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,54.0,Female,white,75.0,10.0,20.0,0.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,63.0,Female,white,80.0,10.0,60.0,0.0,30.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,62.0,Male,white,85.0,10.0,10.0,0.0,80.0 +NOT_MUTATED,MUTATED,MUTATED,MUTATED,MUTATED,MUTATED,MUTATED,LGG,35.0,Female,white,80.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,39.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,29.0,Male,white,80.0,10.0,30.0,0.0,60.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,46.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,46.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,51.0,Female,white,90.0,10.0,40.0,0.0,50.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,54.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,42.0,Female,white,90.0,5.0,15.0,0.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,64.0,Male,white,75.0,20.0,55.0,0.0,25.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,29.0,Male,white,90.0,5.0,45.0,0.0,50.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,65.0,Female,white,85.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,41.0,Female,white,80.0,10.0,50.0,0.0,40.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,31.0,Female,white,80.0,10.0,60.0,0.0,30.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,58.0,Female,white,90.0,5.0,5.0,0.0,90.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,54.0,Male,white,80.0,5.0,25.0,0.0,70.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,28.0,Male,white,65.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,20.0,Female,white,90.0,10.0,40.0,0.0,50.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,34.0,Male,asian,80.0,10.0,50.0,0.0,40.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,54.0,Male,white,90.0,10.0,40.0,0.0,50.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,45.0,Male,white,80.0,10.0,60.0,0.0,30.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,48.0,Female,white,80.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,29.0,Male,white,70.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,22.0,Male,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,32.0,Male,white,70.0,0.0,50.0,0.0,50.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,LGG,40.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,61.0,Female,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,57.0,Male,white,90.0,5.0,25.0,0.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,54.0,Male,white,60.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,LGG,56.0,Female,white,85.0,5.0,25.0,0.0,70.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,34.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,27.0,Female,white,75.0,10.0,60.0,0.0,30.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,37.0,Female,not reported,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,53.0,Male,white,90.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,43.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,41.0,Female,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,33.0,Male,white,80.0,10.0,60.0,0.0,30.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,39.0,Male,white,90.0,10.0,20.0,0.0,70.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,31.0,Male,white,90.0,5.0,35.0,0.0,60.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,67.0,Female,white,85.0,10.0,45.0,0.0,45.0 +NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,30.0,Male,white,70.0,5.0,40.0,0.0,55.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,48.0,Male,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,50.0,Male,white,90.0,10.0,10.0,0.0,80.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,45.0,Male,white,70.0,10.0,45.0,0.0,45.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,47.0,Female,white,65.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,32.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Female,white,90.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,26.0,Male,white,70.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,29.0,Male,white,60.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,28.0,Male,white,80.0,5.0,35.0,0.0,60.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,66.0,Female,white,80.0,5.0,30.0,15.0,50.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,39.0,Male,white,80.0,10.0,40.0,0.0,50.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,33.0,Male,white,85.0,10.0,50.0,0.0,40.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,60.0,Female,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,35.0,Male,white,90.0,5.0,25.0,0.0,70.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,35.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,58.0,Male,white,60.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,26.0,Female,white,70.0,10.0,60.0,0.0,30.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,30.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Female,white,80.0,15.0,45.0,0.0,40.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,35.0,Male,white,60.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,39.0,Male,white,90.0,5.0,15.0,0.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,55.0,Male,black or african american,80.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,49.0,Female,white,90.0,10.0,50.0,0.0,40.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,50.0,Male,white,70.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,52.0,Male,white,90.0,5.0,15.0,0.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,51.0,Female,white,60.0,20.0,0.0,0.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,61.0,Female,white,80.0,10.0,40.0,0.0,50.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,52.0,Female,white,85.0,5.0,25.0,0.0,70.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,35.0,Male,white,55.0,0.0,0.0,0.0,0.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,47.0,Female,white,80.0,10.0,60.0,0.0,30.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,35.0,Female,white,75.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,31.0,Female,white,80.0,10.0,50.0,0.0,40.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,31.0,Male,white,65.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,37.0,Female,white,90.0,5.0,45.0,0.0,50.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,37.0,Male,white,65.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,52.0,Female,white,90.0,0.0,40.0,0.0,60.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,42.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,32.0,Male,white,65.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,61.0,Male,black or african american,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,48.0,Female,white,85.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,34.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,30.0,Male,white,85.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,30.0,Male,white,75.0,10.0,30.0,0.0,60.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,28.0,Male,white,90.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,47.0,Male,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,35.0,Male,white,80.0,10.0,60.0,0.0,30.0 +NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,23.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,49.0,Male,white,70.0,10.0,65.0,0.0,25.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,36.0,Male,white,80.0,10.0,50.0,0.0,40.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,54.0,Male,not reported,85.0,10.0,30.0,0.0,60.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,22.0,Male,white,90.0,5.0,10.0,0.0,85.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,42.0,Male,white,70.0,10.0,60.0,0.0,30.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,28.0,Female,white,90.0,5.0,35.0,0.0,60.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,25.0,Male,white,90.0,10.0,35.0,0.0,55.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,66.0,Male,black or african american,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,46.0,Male,white,75.0,10.0,40.0,0.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,57.0,Female,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,43.0,Male,white,70.0,10.0,45.0,0.0,45.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,49.0,Female,white,60.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,33.0,Male,white,90.0,10.0,40.0,0.0,50.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,27.0,Male,white,65.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,62.0,Female,white,85.0,15.0,40.0,0.0,45.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,55.0,Female,white,70.0,5.0,60.0,0.0,35.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,37.0,Female,white,70.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,37.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,32.0,Male,white,90.0,5.0,10.0,0.0,85.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,42.0,Female,white,80.0,10.0,30.0,0.0,60.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,47.0,Male,american indian or alaska native,90.0,15.0,5.0,0.0,80.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,54.0,Male,white,90.0,5.0,15.0,0.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,29.0,Female,white,90.0,0.0,25.0,0.0,75.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,39.0,Female,white,70.0,10.0,70.0,0.0,20.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,28.0,Male,white,80.0,5.0,15.0,0.0,80.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,23.0,Female,white,85.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,21.0,Male,white,60.0,25.0,0.0,0.0,75.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,38.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,49.0,Female,white,90.0,10.0,40.0,0.0,50.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,38.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,LGG,22.0,Female,white,90.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,67.0,Male,white,65.0,3.0,0.0,0.0,97.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,73.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,62.0,Male,white,65.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,57.0,Female,white,90.0,5.0,45.0,0.0,50.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,30.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,24.0,Female,not reported,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,20.0,Male,white,65.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,41.0,Female,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,LGG,53.0,Female,white,85.0,0.0,15.0,0.0,85.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,34.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,51.0,Female,white,90.0,5.0,50.0,0.0,45.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,30.0,Female,white,70.0,10.0,60.0,0.0,30.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,57.0,Male,white,90.0,10.0,5.0,0.0,85.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,LGG,31.0,Male,white,90.0,10.0,0.0,0.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,57.0,Female,black or african american,90.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,49.0,Female,black or african american,100.0,10.0,0.0,0.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,60.0,Male,white,100.0,0.0,0.0,75.0,25.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,73.0,Male,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,64.0,Female,white,90.0,0.0,5.0,15.0,80.0 +NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,73.0,Female,not reported,80.0,0.0,40.0,10.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,33.0,Male,white,85.0,0.0,0.0,8.0,92.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,67.0,Male,white,95.0,5.0,0.0,0.0,95.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,72.0,Female,black or african american,100.0,0.0,0.0,25.0,75.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,69.0,Male,asian,80.0,0.0,15.0,10.0,75.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,79.0,Female,white,100.0,0.0,0.0,25.0,75.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,68.0,Male,white,100.0,0.0,0.0,15.0,85.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,76.0,Female,white,60.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,82.0,Male,white,80.0,0.0,0.0,3.0,97.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,58.0,Male,white,80.0,0.0,10.0,25.0,65.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,52.0,Female,white,90.0,0.0,5.0,5.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,81.0,Female,white,80.0,0.0,5.0,10.0,85.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,74.0,Female,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,76.0,Male,white,85.0,10.0,10.0,0.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,57.0,Male,black or african american,85.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,67.0,Female,white,80.0,10.0,15.0,0.0,75.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,63.0,Male,white,70.0,10.0,15.0,45.0,30.0 +NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,64.0,Female,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,69.0,Female,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,66.0,Male,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,54.0,Male,white,100.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,60.0,Female,not reported,100.0,30.0,20.0,0.0,50.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,45.0,Female,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,36.0,Male,white,94.0,0.0,10.0,20.0,70.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,49.0,Male,white,75.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,69.0,Female,white,85.0,10.0,0.0,10.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,54.0,Female,white,85.0,0.0,3.0,15.0,82.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,58.0,Male,white,95.0,0.0,5.0,5.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,61.0,Female,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,64.0,Male,white,80.0,10.0,50.0,0.0,40.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,65.0,Male,white,100.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,60.0,Male,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,53.0,Female,white,100.0,0.0,0.0,85.0,15.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,39.0,Male,black or african american,85.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,58.0,Male,white,85.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,56.0,Male,white,95.0,20.0,0.0,5.0,75.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,65.0,Male,white,100.0,0.0,0.0,30.0,70.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,36.0,Female,white,95.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,72.0,Male,white,90.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,48.0,Male,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,39.0,Male,white,80.0,0.0,10.0,5.0,85.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,54.0,Male,black or african american,85.0,0.0,0.0,2.0,98.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,63.0,Female,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,66.0,Male,black or african american,100.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,52.0,Female,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,GBM,64.0,Male,black or african american,70.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,89.0,Male,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,54.0,Male,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,60.0,Male,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,77.0,Female,white,100.0,0.0,0.0,15.0,85.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,60.0,Male,white,100.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,80.0,Male,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,51.0,Male,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,53.0,Male,white,80.0,0.0,20.0,0.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,43.0,Male,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,74.0,Male,white,100.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,76.0,Male,white,80.0,0.0,0.0,50.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,49.0,Male,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,48.0,Female,black or african american,100.0,0.0,0.0,40.0,60.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,68.0,Male,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,74.0,Female,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,69.0,Male,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,64.0,Female,asian,80.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,58.0,Male,white,80.0,20.0,40.0,0.0,40.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,47.0,Female,white,85.0,0.0,10.0,30.0,60.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,61.0,Male,white,65.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,75.0,Male,white,40.0,0.0,80.0,0.0,20.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,52.0,Male,white,80.0,10.0,5.0,5.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,47.0,Male,black or african american,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,21.0,Female,black or african american,85.0,20.0,20.0,0.0,60.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,72.0,Male,white,100.0,0.0,0.0,50.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,68.0,Female,white,70.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,67.0,Male,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,52.0,Female,white,85.0,0.0,0.0,3.0,97.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,59.0,Male,white,80.0,0.0,0.0,30.0,70.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,59.0,Female,black or african american,75.0,0.0,0.0,2.0,98.0 +NOT_MUTATED,MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,48.0,Male,white,90.0,5.0,5.0,0.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,76.0,Male,white,85.0,0.0,5.0,1.0,94.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,72.0,Male,white,95.0,0.0,10.0,10.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,51.0,Male,not reported,100.0,0.0,0.0,15.0,85.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,67.0,Male,white,95.0,0.0,5.0,5.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,82.0,Female,white,90.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,64.0,Female,white,80.0,5.0,25.0,30.0,40.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,75.0,Male,white,90.0,0.0,10.0,10.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,85.0,Female,white,95.0,0.0,15.0,5.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,73.0,Male,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,78.0,Female,white,80.0,0.0,0.0,30.0,70.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,45.0,Female,white,90.0,5.0,25.0,0.0,70.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,42.0,Female,white,80.0,10.0,10.0,0.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,53.0,Male,white,90.0,5.0,5.0,5.0,85.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,68.0,Male,white,100.0,0.0,0.0,5.0,95.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,43.0,Male,white,90.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,59.0,Male,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,52.0,Male,white,95.0,0.0,10.0,20.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,61.0,Female,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,40.0,Female,white,80.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,63.0,Female,white,90.0,0.0,0.0,25.0,75.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,63.0,Male,white,95.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,70.0,Male,white,85.0,5.0,55.0,5.0,35.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,59.0,Male,white,95.0,0.0,5.0,5.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,56.0,Male,white,80.0,0.0,0.0,15.0,85.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,62.0,Male,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,24.0,Male,white,80.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,72.0,Female,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,67.0,Male,not reported,85.0,5.0,5.0,0.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,54.0,Male,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,86.0,Male,white,85.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,62.0,Male,white,100.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,63.0,Female,white,100.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,58.0,Female,white,0.0,0.0,0.0,0.0,0.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,60.0,Male,white,100.0,0.0,0.0,50.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,67.0,Male,white,0.0,0.0,0.0,0.0,0.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,23.0,Male,white,95.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,58.0,Male,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,64.0,Male,white,90.0,0.0,10.0,0.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,65.0,Male,white,90.0,0.0,0.0,30.0,70.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,61.0,Female,white,100.0,0.0,0.0,5.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,52.0,Male,white,80.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,77.0,Male,not reported,65.0,10.0,30.0,0.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,58.0,Male,not reported,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,78.0,Female,white,75.0,0.0,0.0,2.0,98.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,63.0,Male,white,90.0,0.0,5.0,5.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,63.0,Male,white,80.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,MUTATED,MUTATED,MUTATED,GBM,23.0,Female,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,65.0,Male,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,50.0,Male,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,55.0,Male,white,95.0,0.0,10.0,10.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,60.0,Male,white,100.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,62.0,Female,white,95.0,0.0,5.0,10.0,85.0 +NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,69.0,Male,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,56.0,Male,white,80.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,76.0,Female,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,76.0,Male,white,90.0,0.0,50.0,0.0,50.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,86.0,Male,white,100.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,69.0,Female,white,85.0,0.0,0.0,18.0,82.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,59.0,Male,white,95.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,75.0,Female,white,80.0,0.0,15.0,0.0,85.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,57.0,Male,white,60.0,40.0,0.0,10.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,69.0,Male,white,85.0,0.0,5.0,35.0,60.0 +NOT_MUTATED,MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,45.0,Female,white,95.0,5.0,0.0,25.0,75.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,47.0,Male,white,80.0,0.0,0.0,2.0,98.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,74.0,Male,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,60.0,Male,white,80.0,0.0,0.0,1.0,99.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,78.0,Male,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,59.0,Male,white,85.0,5.0,10.0,25.0,60.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,21.0,Female,white,60.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,81.0,Male,white,80.0,0.0,20.0,0.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,70.0,Male,white,90.0,0.0,10.0,30.0,60.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,62.0,Female,white,95.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,75.0,Female,white,85.0,0.0,0.0,1.0,99.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,60.0,Male,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,63.0,Female,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,61.0,Male,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,47.0,Female,white,95.0,0.0,15.0,10.0,75.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,76.0,Female,white,90.0,0.0,10.0,30.0,60.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,84.0,Female,white,90.0,0.0,5.0,10.0,85.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,77.0,Male,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,48.0,Female,white,90.0,0.0,30.0,10.0,60.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,69.0,Female,white,85.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,56.0,Male,white,95.0,0.0,5.0,15.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,76.0,Female,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,70.0,Male,white,100.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,GBM,34.0,Male,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,68.0,Male,black or african american,100.0,0.0,0.0,40.0,60.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,54.0,Male,white,95.0,0.0,0.0,30.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,74.0,Male,white,50.0,0.0,90.0,10.0,0.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,73.0,Male,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,57.0,Male,white,95.0,0.0,5.0,5.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,44.0,Male,white,100.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,83.0,Male,white,85.0,0.0,0.0,30.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,74.0,Male,white,85.0,0.0,0.0,18.0,82.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,51.0,Male,white,95.0,0.0,0.0,32.0,68.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,52.0,Male,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,73.0,Male,white,80.0,0.0,20.0,5.0,75.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,81.0,Male,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,78.0,Female,white,80.0,0.0,5.0,5.0,90.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,57.0,Male,white,90.0,0.0,10.0,10.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,53.0,Female,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,50.0,Female,white,95.0,0.0,10.0,10.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,78.0,Female,black or african american,95.0,0.0,20.0,20.0,60.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,76.0,Female,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,57.0,Male,white,75.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,36.0,Male,white,75.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,72.0,Female,black or african american,100.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,31.0,Male,white,85.0,0.0,10.0,20.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,65.0,Male,white,90.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,63.0,Male,white,75.0,0.0,0.0,0.0,0.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,49.0,Female,white,95.0,0.0,0.0,40.0,60.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,59.0,Female,white,98.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,69.0,Male,white,85.0,0.0,15.0,15.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,62.0,Male,white,70.0,10.0,50.0,0.0,40.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,52.0,Female,white,100.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,56.0,Male,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,81.0,Male,white,80.0,0.0,0.0,3.0,97.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,40.0,Male,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,81.0,Female,white,85.0,10.0,10.0,15.0,65.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,50.0,Female,white,90.0,0.0,5.0,5.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,38.0,Female,white,85.0,0.0,0.0,3.0,97.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,48.0,Male,white,85.0,0.0,5.0,20.0,75.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,51.0,Male,black or african american,100.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,66.0,Male,white,10.0,0.0,0.0,95.0,5.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,71.0,Male,black or african american,85.0,5.0,5.0,10.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,24.0,Female,black or african american,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,63.0,Female,white,90.0,0.0,0.0,25.0,75.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,69.0,Male,white,85.0,0.0,10.0,10.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,78.0,Female,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,30.0,Male,white,85.0,0.0,5.0,0.0,95.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,52.0,Female,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,65.0,Male,white,90.0,5.0,25.0,20.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,78.0,Male,white,100.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,61.0,Male,black or african american,100.0,0.0,0.0,50.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,50.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,39.0,Male,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,55.0,Male,white,65.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,66.0,Male,white,80.0,10.0,60.0,0.0,30.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,49.0,Male,black or african american,80.0,0.0,15.0,0.0,85.0 +NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,54.0,Female,black or african american,100.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,58.0,Male,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,73.0,Female,white,85.0,0.0,5.0,2.0,93.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,53.0,Female,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,63.0,Male,white,85.0,0.0,0.0,30.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,66.0,Female,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,49.0,Male,black or african american,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,59.0,Male,white,90.0,0.0,10.0,10.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,54.0,Male,white,80.0,5.0,5.0,30.0,60.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,57.0,Male,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,59.0,Female,white,80.0,0.0,0.0,40.0,60.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,44.0,Female,white,70.0,0.0,10.0,40.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,58.0,Female,white,100.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,65.0,Male,white,70.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,67.0,Male,white,95.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,75.0,Female,black or african american,100.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,88.0,Male,white,80.0,0.0,0.0,2.0,98.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,38.0,Female,black or african american,80.0,0.0,10.0,0.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,53.0,Female,black or african american,80.0,0.0,0.0,50.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,75.0,Female,white,70.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,66.0,Male,white,75.0,20.0,5.0,5.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,51.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,40.0,Male,black or african american,90.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,58.0,Female,white,95.0,0.0,20.0,0.0,80.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,57.0,Male,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,55.0,Female,not reported,100.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,40.0,Male,white,90.0,0.0,15.0,0.0,85.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,40.0,Male,black or african american,95.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,54.0,Male,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,53.0,Male,black or african american,100.0,10.0,0.0,0.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,56.0,Female,white,90.0,10.0,0.0,0.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,59.0,Female,white,100.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,75.0,Female,white,70.0,0.0,0.0,20.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,59.0,Male,asian,80.0,15.0,10.0,5.0,70.0 +NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,44.0,Female,white,90.0,0.0,5.0,0.0,95.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,46.0,Male,white,10.0,0.0,100.0,0.0,0.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,68.0,Male,white,100.0,0.0,0.0,5.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,30.0,Female,black or african american,95.0,0.0,5.0,10.0,85.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,64.0,Female,white,100.0,0.0,0.0,5.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,36.0,Female,white,60.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,78.0,Male,white,85.0,0.0,0.0,15.0,85.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,83.0,Male,white,100.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,51.0,Male,white,85.0,0.0,0.0,10.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,63.0,Female,white,70.0,5.0,50.0,0.0,45.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,59.0,Female,white,90.0,5.0,50.0,0.0,45.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,72.0,Female,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,53.0,Female,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,74.0,Male,white,90.0,5.0,25.0,0.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,60.0,Female,white,50.0,10.0,0.0,0.0,90.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,54.0,Male,white,70.0,10.0,50.0,0.0,40.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,63.0,Male,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,58.0,Male,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,60.0,Male,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,71.0,Male,white,70.0,10.0,50.0,0.0,40.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,75.0,Male,black or african american,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,50.0,Male,black or african american,80.0,10.0,40.0,0.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,67.0,Male,white,80.0,20.0,40.0,0.0,40.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,60.0,Male,white,80.0,10.0,40.0,0.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,65.0,Female,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,51.0,Male,white,70.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,45.0,Male,asian,80.0,10.0,20.0,0.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,39.0,Female,white,90.0,10.0,40.0,0.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,70.0,Female,white,90.0,10.0,35.0,0.0,55.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,73.0,Female,black or african american,80.0,10.0,50.0,0.0,40.0 +NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,65.0,Female,white,0.0,0.0,0.0,0.0,0.0 +NOT_MUTATED,MUTATED,MUTATED,MUTATED,MUTATED,MUTATED,MUTATED,GBM,53.0,Female,white,90.0,5.0,40.0,0.0,55.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,72.0,Female,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,61.0,Male,white,70.0,10.0,60.0,0.0,30.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,65.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,30.0,Male,asian,70.0,10.0,50.0,0.0,40.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,64.0,Male,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,55.0,Male,white,75.0,10.0,55.0,0.0,35.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,79.0,Female,white,70.0,10.0,15.0,0.0,75.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,74.0,Male,white,80.0,10.0,60.0,0.0,30.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,63.0,Male,white,65.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,81.0,Female,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,53.0,Female,white,80.0,10.0,50.0,0.0,40.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,69.0,Male,black or african american,85.0,10.0,40.0,0.0,50.0 +MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,60.0,Male,white,90.0,10.0,30.0,0.0,60.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,55.0,Male,black or african american,90.0,5.0,30.0,0.0,65.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,75.0,Female,white,80.0,20.0,30.0,0.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,GBM,47.0,Male,white,90.0,10.0,30.0,0.0,60.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,71.0,Female,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,71.0,Male,white,90.0,10.0,40.0,0.0,50.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,25.0,Male,white,80.0,5.0,20.0,0.0,75.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,61.0,Male,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,47.0,Female,white,80.0,10.0,20.0,0.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,50.0,Female,white,90.0,10.0,40.0,0.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,72.0,Female,white,80.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,31.0,Male,white,70.0,10.0,70.0,0.0,20.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,86.0,Male,white,75.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,38.0,Female,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,72.0,Female,white,90.0,5.0,5.0,0.0,90.0 +NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,55.0,Female,white,75.0,10.0,40.0,0.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,66.0,Female,white,90.0,10.0,50.0,0.0,40.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,55.0,Female,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,65.0,Male,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,72.0,Female,white,90.0,0.0,10.0,0.0,90.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,33.0,Female,white,85.0,15.0,0.0,0.0,85.0 +NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,83.0,Male,white,80.0,0.0,0.0,0.0,100.0 +MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,43.0,Male,asian,80.0,10.0,40.0,0.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,68.0,Male,white,80.0,10.0,40.0,0.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,47.0,Female,white,80.0,10.0,40.0,0.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,79.0,Male,black or african american,85.0,10.0,40.0,0.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,67.0,Male,white,90.0,0.0,20.0,0.0,80.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,49.0,Female,white,90.0,15.0,20.0,0.0,65.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,66.0,Female,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,68.0,Male,black or african american,50.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,47.0,Female,black or african american,90.0,10.0,20.0,0.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,61.0,Male,white,65.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,49.0,Female,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,67.0,Male,white,75.0,0.0,5.0,0.0,95.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,32.0,Female,white,70.0,10.0,60.0,0.0,30.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,65.0,Female,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,66.0,Female,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,62.0,Male,white,80.0,10.0,50.0,0.0,40.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,50.0,Male,white,85.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,34.0,Female,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,61.0,Female,white,90.0,5.0,25.0,0.0,70.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,77.0,Male,white,90.0,5.0,50.0,0.0,45.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,54.0,Male,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,51.0,Male,white,70.0,10.0,40.0,0.0,50.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,53.0,Male,white,65.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,78.0,Male,not reported,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,46.0,Male,white,85.0,35.0,5.0,0.0,60.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,52.0,Male,white,80.0,10.0,60.0,0.0,30.0 +NOT_MUTATED,MUTATED,MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,48.0,Female,white,90.0,10.0,40.0,0.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,51.0,Male,white,90.0,10.0,55.0,0.0,35.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,62.0,Female,white,70.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,58.0,Male,white,70.0,5.0,45.0,0.0,50.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,61.0,Male,white,80.0,10.0,50.0,0.0,40.0 +MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,66.0,Male,white,70.0,20.0,40.0,0.0,40.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,64.0,Male,white,80.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,56.0,Male,white,95.0,5.0,20.0,0.0,75.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,77.0,Female,white,85.0,10.0,30.0,0.0,60.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,85.0,Male,white,70.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,77.0,Female,white,75.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,MUTATED,GBM,63.0,Male,white,65.0,0.0,0.0,0.0,100.0 +NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,NOT_MUTATED,GBM,76.0,Male,black or african american,90.0,5.0,15.0,0.0,80.0 diff --git a/readme.md b/readme.md index d3db339..c43c4a1 100644 --- a/readme.md +++ b/readme.md @@ -32,6 +32,7 @@ At the beginning of the triage process, an entity's prior data is used to projec We conceptualized the triage as a Markov decision process, in which we iteratively choose to conduct a subset of experiments and then, based on the experimental evidence, update our belief about the distribution of outcomes for the experiments that have not yet been conducted. The information value associated with the state, derived from experimental evidence, can be modeled through any statistical or information-theoretic measure such as the variance or uncertainty associated with the target variable posterior. +We implemented the following two variants of the decision-making process: Firstly, assuming that the decision-making process only terminates when the uncertainty drops below a given threshold, we minimize the expected resource spend. Secondly, we can optimize the value of experimental evidence, adjusted for the incurred experimental costs. ## Context: Dynamics of Value Evolution (DyVE) @@ -41,4 +42,4 @@ As the framework evolves, multiple functionalities have matured enough to become This includes **[ReactiveDynamics.jl](https://github.com/Merck/ReactiveDynamics.jl)**, a package which implements a category of reaction (transportation) network-type dynamical systems. The central concept of the package is of a stateful, parametric transition; simultaneous action of the transitions then evolves the dynamical system. Moreover, a network's dynamics can be specified using a compact modeling metalanguage. -Another package is **[AlgebraicAgents.jl](https://github.com/Merck/AlgebraicAgents.jl)**, a lightweight package to enable hierarchical, heterogeneous dynamical systems co-integration. It implements a highly scalable, fully customizable interface featuring sums and compositions of dynamical systems. In present context, we note it can be used to co-integrate a reaction network problem with, e.g., a stochastic ordinary differential problem! +Another package is **[AlgebraicAgents.jl](https://github.com/Merck/AlgebraicAgents.jl)**, a lightweight package to enable hierarchical, heterogeneous dynamical systems co-integration. It implements a highly scalable, fully customizable interface featuring sums and compositions of dynamical systems. In present context, we note it can be used to co-integrate a reaction network problem with, e.g., a stochastic ordinary differential problem! \ No newline at end of file diff --git a/src/CEED.jl b/src/CEED.jl index fd720a9..9b19da0 100644 --- a/src/CEED.jl +++ b/src/CEED.jl @@ -1,7 +1,6 @@ module CEED using DataFrames, Plots - export front, plot_front export make_labels, plot_evals diff --git a/src/GenerativeDesigns/EfficientValueMDP.jl b/src/GenerativeDesigns/EfficientValueMDP.jl new file mode 100644 index 0000000..f0aa54e --- /dev/null +++ b/src/GenerativeDesigns/EfficientValueMDP.jl @@ -0,0 +1,234 @@ +""" + EfficientValueMDP(costs, sampler, value, evidence=Evidence(); ) + +Structure that parametrizes the experimental decision-making process. It is used in the object interface of POMDPs. + +In this experimental setup, our objective is to maximize the value of the experimental evidence (such as clinical utility), adjusted for experimental costs. + +Internally, the reward associated with a particular experimental `evidence` and with total accumulated `monetary_cost` and (optionally) `execution_time` is computed as `value(evidence) - costs_tradeoff' * [monetary_cost, execution_time]`. + +# Arguments + + - `costs`: a dictionary containing pairs `experiment => cost`, where `cost` can either be a scalar cost (modelled as a monetary cost) or a tuple `(monetary cost, execution time)`. + - `sampler`: a function of `(evidence, features, rng)`, in which `evidence` denotes the current experimental evidence, `features` represent the set of features we want to sample from, and `rng` is a random number generator; it returns a dictionary mapping the features to outcomes. + - `value`: a function of `(evidence)`; it quantifies the utility of experimental evidence. + - `evidence=Evidence()`: initial experimental evidence. + +# Keyword Arguments + + - 'costs_tradeoff': a vector of weights that trade off monetary cost and execution time. Defaults to `[1, 0]`. + - `max_parallel`: maximum number of parallel experiments. + - `discount`: this is the discounting factor utilized in reward computation. + - `bigM`: it refers to the penalty that arises in a scenario where further experimental action is not an option, yet the uncertainty exceeds the allowable limit. +""" +struct EfficientValueMDP <: POMDPs.MDP{State,Vector{String}} + # initial state + initial_state::State + + # actions and costs + costs::Dict{String,ActionCost} + # maximum number of assays that can be run in parallel + max_parallel::Int + # discount + discount::Float64 + + ## sample readouts from the posterior + sampler::Function + ## measure of utility + value::Function + + function EfficientValueMDP( + costs, + sampler, + value, + evidence = Evidence(); + max_parallel::Int = 1, + discount = 1.0, + ) + state = State((evidence, zeros(2))) + + # check if `sampler`, `uncertainty` are compatible + @assert hasmethod(sampler, Tuple{Evidence,Vector{String},AbstractRNG}) """`sampler` must implement a method accepting `(evidence, readout features, rng)` as its arguments.""" + @assert hasmethod(value, Tuple{Evidence,Vector{Float64}}) """`value` must implement a method accepting `(evidence, costs)` as its argument.""" + + # actions and their costs + costs = Dict{String,ActionCost}( + try + if action isa Pair && action[2] isa Pair + string(action[1]) => (; + costs = Float64[action[2][1]..., 0][1:2], + features = convert(Vector{String}, action[2][2]), + ) + elseif action isa Pair + string(action[1]) => (; + costs = Float64[action[2]..., 0][1:2], + features = String[action[1]], + ) + else + error() + end + catch + error("could not parse $action as an action") + end for action in costs + ) + + return new(state, costs, max_parallel, discount, sampler, value) + end +end + +function POMDPs.actions(m::EfficientValueMDP, state) + all_actions = filter!(collect(keys(m.costs))) do a + return !isempty(m.costs[a].features) && + !in(first(m.costs[a].features), keys(state.evidence)) + end + + return collect(powerset(all_actions, 1, m.max_parallel)) +end + +POMDPs.isterminal(m::EfficientValueMDP, state) = isempty(actions(m, state)) + +POMDPs.discount(m::EfficientValueMDP) = m.discount + +POMDPs.initialstate(m::EfficientValueMDP) = Deterministic(m.initial_state) + +function POMDPs.transition(m::EfficientValueMDP, state, action_set) + # costs + costs = zeros(2) + for experiment in action_set + costs[1] += m.costs[experiment].costs[1] # monetary cost + costs[2] = max(costs[2], m.costs[experiment].costs[2]) # time + end + + # readout features + features = vcat(map(action -> m.costs[action].features, action_set)...) + ImplicitDistribution() do rng + # sample readouts from history + observation = m.sampler(state.evidence, features, rng) + + # create new evidence, add new information + return merge(state, observation, costs) + end +end + +function POMDPs.reward(m::EfficientValueMDP, previous_state::State, _, state::State) + return m.value(state.evidence, state.costs) - + m.value(previous_state.evidence, previous_state.costs) +end + +""" + efficient_value(costs, sampler, value, evidence=Evidence(); ) + +Estimate the maximum value of experimental evidence (such as clinical utility), adjusted for experimental costs. + +Internally, an instance of the `EfficientValueMDP` structure is created and a summary over `repetitions` runoffs is returned. + +# Arguments + + - `costs`: a dictionary containing pairs `experiment => cost`, where `cost` can either be a scalar cost (modelled as a monetary cost) or a tuple `(monetary cost, execution time)`. + - `sampler`: a function of `(evidence, features, rng)`, in which `evidence` denotes the current experimental evidence, `features` represent the set of features we want to sample from, and `rng` is a random number generator; it returns a dictionary mapping the features to outcomes. + - `value`: a function of `(evidence, (monetary costs, execution time))`; it quantifies the utility of experimental evidence. + - `evidence=Evidence()`: initial experimental evidence. + +# Keyword Arguments + + - `solver=default_solver`: a POMDPs.jl compatible solver used to solve the decision process. The default solver is [`DPWSolver`](https://juliapomdp.github.io/MCTS.jl/dev/dpw/). + - `repetitions=0`: number of runoffs used to estimate the expected experimental cost. + - `mdp_options`: a `NamedTuple` of additional keyword arguments that will be passed to the constructor of [`EfficientValueMDP`](@ref). + +# Example + +```julia +(; sampler, uncertainty, weights) = + DistanceBased(data, "HeartDisease", Entropy, Exponential(; λ = 5)); +value = (evidence, costs) -> (1 - uncertainty(evidence) + 0.005 * sum(costs)); +# initialize evidence +evidence = Evidence("Age" => 35, "Sex" => "M") +# set up solver (or use default) +solver = + GenerativeDesigns.DPWSolver(; n_iterations = 10_000, depth = 3, tree_in_info = true) +design = efficient_value( + experiments, + sampler, + value, + evidence; + solver, # planner + mdp_options = (; max_parallel = 1), + repetitions = 5, +) +``` +""" +function efficient_value( + costs, + sampler, + value, + evidence = Evidence(); + solver = default_solver, + repetitions = 0, + mdp_options = (;), +) + mdp = EfficientValueMDP(costs, sampler, value, evidence; mdp_options...) + + # planner + planner = solve(solver, mdp) + action, info = action_info(planner, mdp.initial_state) + + if repetitions > 0 + queue = [Sim(mdp, planner) for _ = 1:repetitions] + + stats = run_parallel(queue) do _, hist + monetary_cost, time = hist[end][:s].costs + return (; + monetary_cost, + time, + adjusted_value = value(mdp.initial_state.evidence, mdp.initial_state.costs) + discounted_reward(hist), + terminal_value = value(hist[end][:s].evidence, hist[end][:s].costs), + ) + end + + if haskey(info, :tree) + return ( + value(mdp.initial_state.evidence, mdp.initial_state.costs) + info[:best_Q], + (; + planner, + arrangement = [action], + monetary_cost = mean(stats.monetary_cost), + time = mean(stats.time), + terminal_value = mean(stats.terminal_value), + tree = info[:tree], + stats, + ), + ) + else + ( + value(mdp.initial_state.evidence, mdp.initial_state.costs) + info[:best_Q], + (; + planner, + arrangement = [action], + monetary_cost = mean(stats.monetary_cost), + time = mean(stats.time), + terminal_value = mean(stats.terminal_value), + stats, + ), + ) + end + else + if haskey(info, :tree) + return ( + value(mdp.initial_state.evidence, mdp.initial_state.costs) + info[:best_Q], + (; + planner, + arrangement = [action], + tree = info[:tree], + ), + ) + else + ( + value(mdp.initial_state.evidence, mdp.initial_state.costs) + info[:best_Q], + (; + planner, + arrangement = [action], + ), + ) + end + end +end diff --git a/src/GenerativeDesigns/GenerativeDesigns.jl b/src/GenerativeDesigns/GenerativeDesigns.jl index 3e10062..1506012 100644 --- a/src/GenerativeDesigns/GenerativeDesigns.jl +++ b/src/GenerativeDesigns/GenerativeDesigns.jl @@ -1,8 +1,7 @@ module GenerativeDesigns using POMDPs -using POMDPTools: ImplicitDistribution, Deterministic -using POMDPSimulators +using POMDPTools using Combinatorics using DataFrames, ScientificTypes @@ -13,339 +12,52 @@ using MCTS using ..CEED: front -export ResearchMDP, DistanceBased +export UncertaintyReductionMDP, DistanceBased export QuadraticStandardizedDistance, DiscreteMetric, Exponential export Variance, Entropy -export State -export efficient_designs +export Evidence, State +export efficient_design, efficient_designs +export efficient_value """ -Represent state (knowledge) as an immutable dictionary. +Represent experimental evidence as an immutable dictionary. """ -const State = Base.ImmutableDict{String,Any} +const Evidence = Base.ImmutableDict{String,Any} -function Base.merge(d::State, dsrc::Dict) +function Base.merge(d::Evidence, dsrc::Dict) for (k, v) in dsrc - d = State(d, k, v) + d = Evidence(d, k, v) end return d end -State(p1::Pair, pairs::Pair...) = merge(State(), Dict(p1, pairs...)) - -include("distancebased.jl") - -""" -Represent action as a named tuple `(; costs=[monetary cost, time], features)`. -""" -const ActionCost = NamedTuple{(:costs, :features),<:Tuple{Vector{Float64},Vector{String}}} - -const const_bigM = 1_000_000 - -## define the MDP -""" - ResearchMDP(costs, sampler, uncertainty, threshold, state=State(); ) - -Structure that parametrizes the experimental decision-making process. It is used in the object interface of POMDPs. - -# Arguments - - - `costs`: a dictionary containing pairs `experiment => cost`, where `cost` can either be a scalar cost (modelled as a monetary cost) or a tuple `(monetary cost, execution time)`. - - `sampler`: a function of `(state, features, rng)`, in which `state` denotes the current experimental state, `features` represent the set of features we want to sample from, and `rng` is a random number generator; it returns a dictionary mapping the features to outcomes. - - `uncertainty`: a function of `state`; it returns the measure of variance or uncertainty about the target variable, conditioned on the experimental evidence acquired so far. - - `threshold`: a number representing the acceptable level of uncertainty about the target variable. - - `state=State()`: initial experimental evidence. - -# Keyword Arguments - - - `costs_tradeoff`: tradeoff between monetary cost and execution time of an experimental designs, modeled as a vector of length two. - - `max_parallel`: maximum number of parallel experiments. - - `discount`: this is the discounting factor utilized in reward computation. - - `bigM`: it refers to the penalty that arises in a scenario where further experimental action is not an option, yet the uncertainty exceeds the allowable limit. - - `max_experiments`: this denotes the maximum number of experiments that are permissible to be conducted. # initial state -""" -struct ResearchMDP <: POMDPs.MDP{State,Vector{String}} - # initial state - initial_state::State - # uncertainty threshold - threshold::Float64 - - # actions and costs - costs::Dict{String,ActionCost} - # monetary cost v. time tradeoff - costs_tradeoff::Vector{Float64} - # maximum number of assays that can be run in parallel - max_parallel::Int - # discount - discount::Float64 - # max experiments - max_experiments::Int64 - # penalty if max number of experiments exceeded - bigM::Int64 - - ## sample readouts from the posterior - sampler::Function - ## measure of uncertainty about the ground truth - uncertainty::Function - - function ResearchMDP( - costs, - sampler, - uncertainty, - threshold, - state = State(); - costs_tradeoff = [1, 0], - max_parallel::Int = 1, - discount = 1.0, - bigM = const_bigM, - max_experiments = bigM, - ) - - # check if `sampler`, `uncertainty` are compatible - @assert hasmethod(sampler, Tuple{State,Vector{String},AbstractRNG}) """`sampler` must implement a method accepting `(state, readout features, rng)` as its arguments.""" - @assert hasmethod(uncertainty, Tuple{State}) """`uncertainty` must implement a method accepting `state` as its argument.""" - - # actions and their costs - costs = Dict{String,ActionCost}( - try - if action isa Pair && action[2] isa Pair - string(action[1]) => (; - costs = Float64[action[2][1]..., 0][1:2], - features = convert(Vector{String}, action[2][2]), - ) - elseif action isa Pair - string(action[1]) => (; - costs = Float64[action[2]..., 0][1:2], - features = String[action[1]], - ) - else - error() - end - catch - error("could not parse $action as an action") - end for action in costs - ) - - new( - State(state...), - threshold, - costs, - costs_tradeoff, - max_parallel, - discount, - max_experiments, - bigM, - sampler, - uncertainty, - ) - end -end +Evidence(p1::Pair, pairs::Pair...) = merge(Evidence(), Dict(p1, pairs...)) """ -A penalized action that results in a terminal state, e.g., in situations where conducting additional experiments is not possible, but the level of uncertainty remains above an acceptable threshold. +Represent experimental state as a tuple of experimental costs and evidence. """ -const eox = "EOX" - -function POMDPs.actions(m::ResearchMDP, state) - all_actions = filter!(collect(keys(m.costs))) do a - !isempty(m.costs[a].features) && !in(first(m.costs[a].features), keys(state)) - end - - if !isempty(all_actions) && (length(state) < m.max_experiments) - collect(powerset(all_actions, 1, m.max_parallel)) - else - [[eox]] - end -end +const State = NamedTuple{(:evidence, :costs),Tuple{Evidence,<:Vector{Real}}} -function POMDPs.isterminal(m::ResearchMDP, state) - #haskey(state, eox) || (haskey(state, "kpuu") && println(m.uncertainty(state))) - return haskey(state, eox) || (m.uncertainty(state) <= m.threshold) +function Base.merge(state::State, evidence, costs) + return State((merge(state.evidence, evidence), state.costs .+ costs)) end -POMDPs.discount(m::ResearchMDP) = m.discount - -POMDPs.initialstate(m::ResearchMDP) = Deterministic(m.initial_state) - -function POMDPs.transition(m::ResearchMDP, state, action_set) - if action_set == [eox] - Deterministic(merge(state, Dict(eox => -1))) - else - # readout features - features = vcat(map(action -> m.costs[action].features, action_set)...) - - ImplicitDistribution() do rng - # sample readouts from history - observation = m.sampler(state, features, rng) - - # create new state, add new information - merge(state, observation) - end - end -end - -function POMDPs.reward(m::ResearchMDP, _, action) - if action == [eox] - -m.bigM - else - costs = zeros(2) - for experiment in action - costs[1] += m.costs[experiment].costs[1] # monetary cost - costs[2] = max(costs[2], m.costs[experiment].costs[2]) # time - end - - -costs' * m.costs_tradeoff - end -end +include("distancebased.jl") """ - compute_execution_cost(costs, actions; discount=1.) - -Compute monetary cost and execution time for a sequence of actions. Returns a named tuple `(; monetary_cost, time)`. +Represent action as a named tuple `(; costs=[monetary cost, time], features)`. """ -function compute_execution_cost(costs, actions; discount = 1.0) - costs = Dict{String,ActionCost}( - try - if action isa Pair && action[2] isa Pair - string(action[1]) => (; - costs = Float64[action[2][1]..., 0][1:2], - features = convert(Vector{String}, action[2][2]), - ) - elseif action isa Pair - string(action[1]) => (; - costs = Float64[action[2]..., 0][1:2], - features = String[action[1]], - ) - else - error() - end - catch - error("could not parse $action as an action") - end for action in costs - ) - - monetary_cost = time = 0 - - for action in actions - (action == [eox]) && break - - time_group = 0 # total duration of parallel assays - for experiment in action - monetary_cost += discount * costs[experiment].costs[1] # monetary cost - time_group = max(time_group, costs[experiment].costs[2]) # time - end - time += discount * time_group - end +const ActionCost = NamedTuple{(:costs, :features),<:Tuple{Vector{Float64},Vector{String}}} - (; monetary_cost, time) -end +const const_bigM = 1_000_000 const default_solver = DPWSolver(; n_iterations = 100_000, tree_in_info = true) -const default_repetitions = 20 - -""" - efficient_designs(costs, sampler, uncertainty, n_thresholds, state=State(); ) - -Estimate the combined experimental costs of 'generative' experimental designs over a range of uncertainty thresholds, and return the set of Pareto-efficient designs in the dimension of cost and uncertainty threshold. - -Internally, an instance of the `ResearchMDP` reference is created for every selected uncertainty threshold and the corresponding runoffs are simulated. - -# Arguments - - - `costs`: a dictionary containing pairs `experiment => cost`, where `cost` can either be a scalar cost (modelled as a monetary cost) or a tuple `(monetary cost, execution time)`. - - `sampler`: a function of `(state, features, rng)`, in which `state` denotes the current experimental state, `features` represent the set of features we want to sample from, and `rng` is a random number generator; it returns a dictionary mapping the features to outcomes. - - `uncertainty`: a function of `state`; it returns the measure of variance or uncertainty about the target variable, conditioned on the experimental evidence acquired so far. - - `n_thresholds`: number of thresholds to consider uniformly in the range between 0 and 1, inclusive. - - `state=State()`: initial experimental evidence. -# Keyword Arguments +# minimize the expected experimental cost while ensuring the uncertainty remains below a specified threshold. +include("UncertaintyReductionMDP.jl") - - `solver=default_solver`: a POMDPs.jl compatible solver used to solve the decision process. The default solver is [`DPWSolver`](https://juliapomdp.github.io/MCTS.jl/dev/dpw/). - - `repetitions=default_repetitions`: number of runoffs used to estimate the expected experimental cost. - - `mdp_options`: a `NamedTuple` of additional keyword arguments that will be passed to the constructor of [`ResearchMDP`](@ref). - -# Example - -```julia -(; sampler, uncertainty, weights) = - DistanceBased(data, "HeartDisease", Entropy, Exponential(; λ = 5)); -# initialize state -state = State("Age" => 35, "Sex" => "M") -# set up solver (or use default) -solver = GenerativeDesigns.DPWSolver(; n_iterations = 60_000, tree_in_info = true) -designs = efficient_designs( - experiments, - sampler, - uncertainty, - 6, - state; - solver, # planner - mdp_options = (; max_parallel = 1), - repetitions = 5, -) -``` -""" -function efficient_designs( - costs, - sampler, - uncertainty, - n_thresholds, - state = State(); - solver = default_solver, - repetitions = default_repetitions, - mdp_options = (;), -) - designs = [] - for threshold in range(0.0, 1.0, n_thresholds) - @info "Current threshold level : $threshold" - mdp = ResearchMDP(costs, sampler, uncertainty, threshold, state; mdp_options...) - if isterminal(mdp, state) - push!(designs, ((0.0, threshold), (; monetary_cost = 0.0, time = 0.0))) - else - # planner - planner = solve(solver, mdp) - queue = [Sim(mdp, planner) for _ = 1:repetitions] - - stats = run_parallel(queue) do _, hist - (; monetary_cost, time) = compute_execution_cost(costs, hist[:a]) - return (; monetary_cost, time, combined_cost = -discounted_reward(hist)) - end - show(stats) - action, info = action_info(planner, state) - - if haskey(info, :tree) - push!( - designs, - ( - (mean(stats.combined_cost), threshold), - (; - planner, - arrangement = [action], - monetary_cost = mean(stats.monetary_cost), - time = mean(stats.time), - tree = info[:tree], - ), - ), - ) - else - push!( - designs, - ( - (mean(stats.combined_cost), threshold), - (; - planner, - arrangement = [action], - monetary_cost = mean(stats.monetary_cost), - time = mean(stats.time), - ), - ), - ) - end - end - end - ## rewrite - front(x -> x[1], designs) -end +# maximize the value of the experimental evidence (such as clinical utility), adjusted for experimental costs. +include("EfficientValueMDP.jl") end diff --git a/src/GenerativeDesigns/UncertaintyReductionMDP.jl b/src/GenerativeDesigns/UncertaintyReductionMDP.jl new file mode 100644 index 0000000..6a186a8 --- /dev/null +++ b/src/GenerativeDesigns/UncertaintyReductionMDP.jl @@ -0,0 +1,364 @@ +""" + UncertaintyReductionMDP(costs, sampler, uncertainty, threshold, evidence=Evidence(); ) + +Structure that parametrizes the experimental decision-making process. It is used in the object interface of POMDPs. + +In this experimental setup, our objective is to minimize the expected experimental cost while ensuring the uncertainty remains below a specified threshold. + +Internally, a state of the decision process is modeled as a tuple `(evidence::Evidence, [total accumulated monetary cost, total accumulated execution time])`. + +# Arguments + + - `costs`: a dictionary containing pairs `experiment => cost`, where `cost` can either be a scalar cost (modelled as a monetary cost) or a tuple `(monetary cost, execution time)`. + - `sampler`: a function of `(evidence, features, rng)`, in which `evidence` denotes the current experimental evidence, `features` represent the set of features we want to sample from, and `rng` is a random number generator; it returns a dictionary mapping the features to outcomes. + - `uncertainty`: a function of `evidence`; it returns the measure of variance or uncertainty about the target variable, conditioned on the experimental evidence acquired so far. + - `threshold`: a number representing the acceptable level of uncertainty about the target variable. + - `evidence=Evidence()`: initial experimental evidence. + +# Keyword Arguments + + - `costs_tradeoff`: tradeoff between monetary cost and execution time of an experimental designs, modeled as a vector of length two. + - `max_parallel`: maximum number of parallel experiments. + - `discount`: this is the discounting factor utilized in reward computation. + - `bigM`: it refers to the penalty that arises in a scenario where further experimental action is not an option, yet the uncertainty exceeds the allowable limit. + - `max_experiments`: this denotes the maximum number of experiments that are permissible to be conducted. +""" +struct UncertaintyReductionMDP <: POMDPs.MDP{State,Vector{String}} + # initial state + initial_state::State + # uncertainty threshold + threshold::Float64 + + # actions and costs + costs::Dict{String,ActionCost} + # monetary cost v. time tradeoff + costs_tradeoff::Vector{Float64} + # maximum number of assays that can be run in parallel + max_parallel::Int + # discount + discount::Float64 + # max experiments + max_experiments::Int64 + # penalty if max number of experiments exceeded + bigM::Int64 + + ## sample readouts from the posterior + sampler::Function + ## measure of uncertainty about the ground truth + uncertainty::Function + + function UncertaintyReductionMDP( + costs, + sampler, + uncertainty, + threshold, + evidence = Evidence(); + costs_tradeoff = [1, 0], + max_parallel::Int = 1, + discount = 1.0, + bigM = const_bigM, + max_experiments = bigM, + ) + state = State((evidence, zeros(2))) + + # check if `sampler`, `uncertainty` are compatible + @assert hasmethod(sampler, Tuple{Evidence,Vector{String},AbstractRNG}) """`sampler` must implement a method accepting `(evidence, readout features, rng)` as its arguments.""" + @assert hasmethod(uncertainty, Tuple{Evidence}) """`uncertainty` must implement a method accepting `evidence` as its argument.""" + + # actions and their costs + costs = Dict{String,ActionCost}( + try + if action isa Pair && action[2] isa Pair + string(action[1]) => (; + costs = Float64[action[2][1]..., 0][1:2], + features = convert(Vector{String}, action[2][2]), + ) + elseif action isa Pair + string(action[1]) => (; + costs = Float64[action[2]..., 0][1:2], + features = String[action[1]], + ) + else + error() + end + catch + error("could not parse $action as an action") + end for action in costs + ) + + return new( + state, + threshold, + costs, + costs_tradeoff, + max_parallel, + discount, + max_experiments, + bigM, + sampler, + uncertainty, + ) + end +end + +""" +A penalized action that results in a terminal state, e.g., in situations where conducting additional experiments is not possible, but the level of uncertainty remains above an acceptable threshold. +""" +const eox = "EOX" + +function POMDPs.actions(m::UncertaintyReductionMDP, state) + all_actions = filter!(collect(keys(m.costs))) do a + return !isempty(m.costs[a].features) && + !in(first(m.costs[a].features), keys(state.evidence)) + end + + if !isempty(all_actions) && (length(state.evidence) < m.max_experiments) + collect(powerset(all_actions, 1, m.max_parallel)) + else + [[eox]] + end +end + +function POMDPs.isterminal(m::UncertaintyReductionMDP, state) + return haskey(state.evidence, eox) || (m.uncertainty(state.evidence) <= m.threshold) +end + +POMDPs.discount(m::UncertaintyReductionMDP) = m.discount + +POMDPs.initialstate(m::UncertaintyReductionMDP) = Deterministic(m.initial_state) + +function POMDPs.transition(m::UncertaintyReductionMDP, state, action_set) + if action_set == [eox] + Deterministic(merge(state, Dict(eox => -1), [0.0, 0.0])) + else + # costs + costs = zeros(2) + for experiment in action_set + costs[1] += m.costs[experiment].costs[1] # monetary cost + costs[2] = max(costs[2], m.costs[experiment].costs[2]) # time + end + + # readout features + features = vcat(map(action -> m.costs[action].features, action_set)...) + ImplicitDistribution() do rng + # sample readouts from history + observation = m.sampler(state.evidence, features, rng) + + # create new evidence, add new information + return merge(state, observation, costs) + end + end +end + +function POMDPs.reward(m::UncertaintyReductionMDP, _, action, state) + if action == [eox] + -m.bigM + else + -state.costs' * m.costs_tradeoff + end +end + +""" + efficient_design(costs, sampler, uncertainty, thresholds, evidence=Evidence(); ) + +In the uncertainty reduction setup, minimize the expected experimental cost while ensuring the uncertainty remains below a specified threshold. + +# Arguments + + - `costs`: a dictionary containing pairs `experiment => cost`, where `cost` can either be a scalar cost (modelled as a monetary cost) or a tuple `(monetary cost, execution time)`. + - `sampler`: a function of `(evidence, features, rng)`, in which `evidence` denotes the current experimental evidence, `features` represent the set of features we want to sample from, and `rng` is a random number generator; it returns a dictionary mapping the features to outcomes. + - `uncertainty`: a function of `evidence`; it returns the measure of variance or uncertainty about the target variable, conditioned on the experimental evidence acquired so far. + - `thresholds`: uncertainty threshold. + - `evidence=Evidence()`: initial experimental evidence. + +# Keyword Arguments + + - `solver=default_solver`: a POMDPs.jl compatible solver used to solve the decision process. The default solver is [`DPWSolver`](https://juliapomdp.github.io/MCTS.jl/dev/dpw/). + - `repetitions=0`: number of runoffs used to estimate the expected experimental cost. + - `mdp_options`: a `NamedTuple` of additional keyword arguments that will be passed to the constructor of [`UncertaintyReductionMDP`](@ref). + - `realized_uncertainty=false`: whenever the initial state uncertainty is below the selected threshold, return the actual uncertainty of this state. + +# Example + +```julia +(; sampler, uncertainty, weights) = + DistanceBased(data, "HeartDisease", Entropy, Exponential(; λ = 5)); +# initialize evidence +evidence = Evidence("Age" => 35, "Sex" => "M") +# set up solver (or use default) +solver = GenerativeDesigns.DPWSolver(; n_iterations = 60_000, tree_in_info = true) +designs = efficient_design( + costs, + experiments, + sampler, + uncertainty, + 0.6, + evidence; + solver, # planner + mdp_options = (; max_parallel = 1), + repetitions = 5, +) +``` +""" +function efficient_design( + costs, + sampler, + uncertainty, + threshold, + evidence = Evidence(); + solver = default_solver, + repetitions = 0, + realized_uncertainty = false, + mdp_options = (;), +) + mdp = UncertaintyReductionMDP( + costs, + sampler, + uncertainty, + threshold, + evidence; + mdp_options..., + ) + if isterminal(mdp, mdp.initial_state) + return ((0.0, realized_uncertainty ? mdp.uncertainty(mdp.initial_state.evidence) : threshold), (; monetary_cost = 0.0, time = 0.0)) + else + # planner + planner = solve(solver, mdp) + action, info = action_info(planner, mdp.initial_state) + + if repetitions > 0 + queue = [Sim(mdp, planner) for _ = 1:repetitions] + + stats = run_parallel(queue) do _, hist + monetary_cost, time = hist[end][:s].costs + return (; + monetary_cost, + time, + combined_cost = -discounted_reward(hist), + actions = hist[:a], + ) + end + + if haskey(info, :tree) + return ( + (-info[:best_Q], threshold), + (; + planner, + arrangement = [action], + monetary_cost = mean(stats.monetary_cost), + time = mean(stats.time), + tree = info[:tree], + stats, + ), + ) + else + return ( + (-info[:best_Q], threshold), + (; + planner, + arrangement = [action], + monetary_cost = mean(stats.monetary_cost), + time = mean(stats.time), + stats, + ), + ) + end + else + if haskey(info, :tree) + return ( + (-info[:best_Q], threshold), + (; + planner, + arrangement = [action], + tree = info[:tree], + ), + ) + else + return ( + (-info[:best_Q], threshold), + (; + planner, + arrangement = [action], + ), + ) + end + end + end +end + +""" + efficient_designs(costs, sampler, uncertainty, n_thresholds, evidence=Evidence(); ) + +In the uncertainty reduction setup, minimize the expected experimental resource spend over a range of uncertainty thresholds, and return the set of Pareto-efficient designs in the dimension of cost and uncertainty threshold. + +Internally, an instance of the `UncertaintyReductionMDP` structure is created for every selected uncertainty threshold and the corresponding runoffs are simulated. + +# Arguments + + - `costs`: a dictionary containing pairs `experiment => cost`, where `cost` can either be a scalar cost (modelled as a monetary cost) or a tuple `(monetary cost, execution time)`. + - `sampler`: a function of `(evidence, features, rng)`, in which `evidence` denotes the current experimental evidence, `features` represent the set of features we want to sample from, and `rng` is a random number generator; it returns a dictionary mapping the features to outcomes. + - `uncertainty`: a function of `evidence`; it returns the measure of variance or uncertainty about the target variable, conditioned on the experimental evidence acquired so far. + - `n_thresholds`: number of thresholds to consider uniformly in the range between 0 and 1, inclusive. + - `evidence=Evidence()`: initial experimental evidence. + +# Keyword Arguments + + - `solver=default_solver`: a POMDPs.jl compatible solver used to solve the decision process. The default solver is [`DPWSolver`](https://juliapomdp.github.io/MCTS.jl/dev/dpw/). + - `repetitions=0`: number of runoffs used to estimate the expected experimental cost. + - `mdp_options`: a `NamedTuple` of additional keyword arguments that will be passed to the constructor of [`UncertaintyReductionMDP`](@ref). + - `realized_uncertainty=false`: whenever the initial state uncertainty is below the selected threshold, return the actual uncertainty of this state. + +# Example + +```julia +(; sampler, uncertainty, weights) = + DistanceBased(data, "HeartDisease", Entropy, Exponential(; λ = 5)); +# initialize evidence +evidence = Evidence("Age" => 35, "Sex" => "M") +# set up solver (or use default) +solver = GenerativeDesigns.DPWSolver(; n_iterations = 60_000, tree_in_info = true) +designs = efficient_designs( + costs, + experiments, + sampler, + uncertainty, + 6, + evidence; + solver, # planner + mdp_options = (; max_parallel = 1), + repetitions = 5, +) +``` +""" +function efficient_designs( + costs, + sampler, + uncertainty, + n_thresholds, + evidence = Evidence(); + solver = default_solver, + repetitions = 0, + realized_uncertainty = false, + mdp_options = (;), +) + designs = [] + for threshold in range(0.0, 1.0, n_thresholds) + @info "Current threshold level : $threshold" + push!( + designs, + efficient_design( + costs, + sampler, + uncertainty, + threshold, + evidence; + solver, + repetitions, + realized_uncertainty, + mdp_options, + ), + ) + end + ## rewrite + return front(x -> x[1], designs) +end diff --git a/src/GenerativeDesigns/distancebased.jl b/src/GenerativeDesigns/distancebased.jl index 2f08797..7067624 100644 --- a/src/GenerativeDesigns/distancebased.jl +++ b/src/GenerativeDesigns/distancebased.jl @@ -7,7 +7,7 @@ Return an anonymous function `(x, col; prior) -> λ * (x .- col).^2 / (2*σ2)`, QuadraticStandardizedDistance(; λ = 1) = function (x, col; prior = ones(length(col))) σ2 = var(col, Weights(prior); corrected=false) - λ * (x .- col) .^ 2 / (2 * σ2) + return λ * (x .- col) .^ 2 / (2 * σ2) end """ @@ -16,7 +16,7 @@ end Return an anonymous function `(x, col) -> λ * (x .== col)`. """ DiscreteMetric(; λ = 1) = function (x, col; _...) - map(y -> y == x ? λ : 0.0, col) + return map(y -> y == x ? λ : 0.0, col) end # default similarity functional @@ -30,7 +30,7 @@ Exponential(; λ = 1) = x -> exp(-λ * sum(x; init = 0)) # default uncertainty functionals compute_variance(data::AbstractVector; weights) = var(data, Weights(weights)) -compute_variance(data; weights) = var(Matrix(data), Weights(repeat(weights, size(data, 2)))) +compute_variance(data; weights) = sum(var(Matrix(data), Weights(weights), 1)) """ Variance(data; prior) @@ -67,9 +67,9 @@ Compute distances between experimental evidence and historical readouts, and app A named tuple with the following fields: - - `sampler`: a function of `(state, features, rng)`, in which `state` denotes the current experimental state, `features` represent the set of features we want to sample from, and `rng` is a random number generator; it returns a dictionary mapping the features to outcomes. - - `uncertainty`: a function of `state`; it returns the measure of variance or uncertainty about the target variable, conditioned on the experimental evidence acquired so far. - - `weights`: a function of `state`; it returns probabilities (posterior) acrss the rows in `data`. + - `sampler`: a function of `(evidence, features, rng)`, in which `evidence` denotes the current experimental evidence, `features` represent the set of features we want to sample from, and `rng` is a random number generator; it returns a dictionary mapping the features to outcomes. + - `uncertainty`: a function of `evidence`; it returns the measure of variance or uncertainty about the target variable, conditioned on the experimental evidence acquired so far. + - `weights`: a function of `evidence`; it returns probabilities (posterior) acrss the rows in `data`. # Arguments @@ -119,17 +119,17 @@ function DistanceBased( prior = Weights(prior) targets = target isa AbstractVector ? target : [target] - compute_weights = function (state::State) - if isempty(state) + compute_weights = function (evidence::Evidence) + if isempty(evidence) return prior else - array_distances = zeros((nrow(data), length(state))) - for (i, colname) in enumerate(keys(state)) + array_distances = zeros((nrow(data), length(evidence))) + for (i, colname) in enumerate(keys(evidence)) if colname ∈ targets continue else array_distances[:, i] .= - distances[colname](state[colname], data[!, colname]; prior) + distances[colname](evidence[colname], data[!, colname]; prior) end end @@ -138,24 +138,24 @@ function DistanceBased( map(i -> similarity(array_distances[i, :]), 1:size(array_distances, 1)) # hard match on target columns - for colname in collect(keys(state)) ∩ targets - similarities .*= data[!, colname] .== state[colname] + for colname in collect(keys(evidence)) ∩ targets + similarities .*= data[!, colname] .== evidence[colname] end return Weights(similarities ./ sum(similarities)) end end - sampler = function (state::State, columns, rng = default_rng()) - observed = data[sample(rng, compute_weights(state)), :] + sampler = function (evidence::Evidence, columns, rng = default_rng()) + observed = data[sample(rng, compute_weights(evidence)), :] - Dict(c => observed[c] for c in columns) + return Dict(c => observed[c] for c in columns) end f_uncertainty = uncertainty(data[!, target]; prior) - compute_uncertainty = function (state::State) - f_uncertainty(compute_weights(state)) + compute_uncertainty = function (evidence::Evidence) + return f_uncertainty(compute_weights(evidence)) end - (; sampler, uncertainty = compute_uncertainty, weights = compute_weights) + return (; sampler, uncertainty = compute_uncertainty, weights = compute_weights) end diff --git a/src/StaticDesigns/StaticDesigns.jl b/src/StaticDesigns/StaticDesigns.jl index 8e7920e..459c66b 100644 --- a/src/StaticDesigns/StaticDesigns.jl +++ b/src/StaticDesigns/StaticDesigns.jl @@ -30,6 +30,8 @@ include("arrangements.jl") Evaluate predictive accuracy over subsets of experiments, and return the metrics. The evaluation is facilitated by `MLJ.evaluate`; additional keyword arguments to this function will be passed to `evaluate`. +Evaluations are run in parallel. + # Arguments - `experiments`: a dictionary containing pairs `experiment => (cost =>) features`, where `features` is a subset of column names in `data`. @@ -39,6 +41,7 @@ Evaluate predictive accuracy over subsets of experiments, and return the metrics # Keyword arguments + - `max_cardinality`: maximum cardinality of experimental subsets (defaults to the number of experiments). - `zero_cost_features`: additional zero-cost features available for each experimental subset (defaults to an empty list). - `evaluate_empty_subset`: flag indicating whether to evaluate empty experimental subset. A constant column will be added if `zero_cost_features` is empty (defaults to true). - `return_full_metrics`: flag indicating whether to return full `MLJ.PerformanceEvaluation` metrics. Otherwise return an aggregate "measurement" for the first measure (defaults to false). @@ -61,14 +64,20 @@ function evaluate_experiments( model, X, y; + max_cardinality = length(experiments), zero_cost_features = [], evaluate_empty_subset::Bool = true, return_full_metrics::Bool = false, kwargs..., ) where {T} + # predictive accuracy scores over subsets of experiments scores = Dict{Set{String},return_full_metrics ? PerformanceEvaluation : Float64}() + # generate all the possible subsets from the set of experiments, with a minimum size of 1 and maximum size of 'max_cardinality' + experimental_subsets = collect(powerset(collect(keys(experiments)), 1, max_cardinality)) + # lock + lk = ReentrantLock() - for exp_set in powerset(collect(keys(experiments)), 1) + Threads.@threads for exp_set in collect(experimental_subsets) features = eltype(names(X))[zero_cost_features...] foreach( x -> append!( @@ -79,10 +88,14 @@ function evaluate_experiments( ) perf_eval = evaluate(model, X[:, features], y; kwargs...) - push!( - scores, - Set(exp_set) => return_full_metrics ? perf_eval : first(perf_eval.measurement), - ) + # acquire the lock to prevent race conditions + lock(lk) do + return push!( + scores, + Set(exp_set) => + return_full_metrics ? perf_eval : first(perf_eval.measurement), + ) + end end if evaluate_empty_subset @@ -99,7 +112,7 @@ function evaluate_experiments( ) end - scores + return scores end """ @@ -107,6 +120,8 @@ end Evaluate discriminative power for subsets of experiments, and return the metrics. +Evaluations are run in parallel. + # Arguments - `experiments`: a dictionary containing pairs `experiment => (cost =>) features`, where `features` is a subset of column names in `X`. @@ -157,7 +172,7 @@ function evaluate_experiments( push!(scores, Set{String}() => (; loss = perf_eval, filtration = perf_eval)) end - scores + return scores end """ @@ -210,8 +225,12 @@ function efficient_designs( end for (s, e) in evals ) + # find the optimal arrangement for each experimental subset designs = [] - for design in evals + # lock to prevent race condition + lk = ReentrantLock() + + Threads.@threads for design in collect(evals) arrangement = optimal_arrangement( experimental_costs, evals, @@ -221,20 +240,22 @@ function efficient_designs( mdp_kwargs, ) - push!( - designs, - ( - (arrangement.combined_cost, design[2].loss), - (; - arrangement = arrangement.arrangement, - monetary_cost = arrangement.monetary_cost, - time = arrangement.time, + lock(lk) do + return push!( + designs, + ( + (arrangement.combined_cost, design[2].loss), + (; + arrangement = arrangement.arrangement, + monetary_cost = arrangement.monetary_cost, + time = arrangement.time, + ), ), - ), - ) + ) + end end - front(x -> x[1], designs) + return front(x -> x[1], designs) end """ @@ -268,7 +289,7 @@ function efficient_designs( ) where {T} evals = evaluate_experiments(experiments, args...; eval_options...) - efficient_designs(experiments, evals; arrangement_options...) + return efficient_designs(experiments, evals; arrangement_options...) end end diff --git a/src/StaticDesigns/arrangements.jl b/src/StaticDesigns/arrangements.jl index 917d582..e507f3a 100644 --- a/src/StaticDesigns/arrangements.jl +++ b/src/StaticDesigns/arrangements.jl @@ -18,7 +18,9 @@ Base.@kwdef struct ArrangementMDP{T1<:Number,T2<:Number} <: end function POMDPs.actions(m::ArrangementMDP, state) - Set.(collect(powerset(collect(setdiff(m.experiments, state)), 1, m.max_parallel))) + return Set.( + collect(powerset(collect(setdiff(m.experiments, state)), 1, m.max_parallel)) + ) end POMDPs.isterminal(m::ArrangementMDP, state) = m.experiments == state @@ -27,14 +29,14 @@ POMDPs.initialstate(::ArrangementMDP) = Deterministic(Set{String}()) function POMDPs.transition(::ArrangementMDP, state, action) # readout features - Deterministic(state ∪ action) + return Deterministic(state ∪ action) end function POMDPs.reward(m::ArrangementMDP, state, action) monetary_cost = m.evals[state].filtration * sum(a -> m.experimental_costs[a][1], action) time = maximum(a -> m.experimental_costs[a][2], action) - -sum(m.tradeoff .* (monetary_cost, time)) + return -sum(m.tradeoff .* (monetary_cost, time)) end POMDPs.discount(::ArrangementMDP) = 1.0 diff --git a/src/fronts.jl b/src/fronts.jl index 44742da..4153b47 100644 --- a/src/fronts.jl +++ b/src/fronts.jl @@ -37,7 +37,7 @@ front(v; atol2 = 0.2) function front end function front(v::T; atol1::Float64 = 0.0, atol2::Float64 = atol1) where {T<:AbstractVector} - front(identity, v; atol1, atol2) + return front(identity, v; atol1, atol2) end function front( @@ -66,7 +66,7 @@ function front( end end - v_sorted + return v_sorted end """ @@ -110,7 +110,7 @@ function plot_front( end end - p + return p end """ @@ -139,11 +139,11 @@ Create a stick plot that visualizes the performance measures evaluated for subse function plot_evals(evals; ylabel = "information measure", kwargs...) xs = sort!(collect(keys(evals)); by = x -> length(x)) ys = map(xs) do x - evals[x] isa Number ? evals[x] : evals[x].loss + return evals[x] isa Number ? evals[x] : evals[x].loss end xformatter = i -> isempty(xs[Int(i)]) ? "∅" : join(xs[Int(i)], ", ") - sticks( + return sticks( 1:length(evals), ys; ticks = 1:length(evals), @@ -151,7 +151,6 @@ function plot_evals(evals; ylabel = "information measure", kwargs...) guidefontsize = 8, tickfontsize = 8, ylabel, - c = :teal, label = nothing, xrotation = 30, ) diff --git a/test/GenerativeDesigns/test.jl b/test/GenerativeDesigns/test.jl index 9762a77..e9bbadc 100644 --- a/test/GenerativeDesigns/test.jl +++ b/test/GenerativeDesigns/test.jl @@ -21,7 +21,7 @@ data = coerce(data, types); using CEED, CEED.GenerativeDesigns -state = State("Age" => 35, "Sex" => "M") +evidence = Evidence("Age" => 35, "Sex" => "M") # test `DistanceBased` sampler r = DistanceBased(data, "HeartDisease", Entropy, Exponential(; λ = 5)); @@ -30,10 +30,10 @@ r = DistanceBased(data, "HeartDisease", Entropy, Exponential(; λ = 5)); # test signatures using Random: default_rng -@test applicable(sampler, state, ["HeartDisease"], default_rng) +@test applicable(sampler, evidence, ["HeartDisease"], default_rng) -@test applicable(uncertainty, state) -@test applicable(weights, state) +@test applicable(uncertainty, evidence) +@test applicable(weights, evidence) experiments = Dict( ## experiment => features @@ -44,16 +44,75 @@ experiments = Dict( "HeartDisease" => 100.0, ) -solver = GenerativeDesigns.DPWSolver(; n_iterations = 10_000, tree_in_info = true) +# test `UncertaintyReductionMDP` + +solver = GenerativeDesigns.DPWSolver(; n_iterations = 100, tree_in_info = true) + +design = efficient_design( + experiments, + sampler, + uncertainty, + 0.0, + evidence; + solver, + mdp_options = (; max_parallel = 1), + repetitions = 5, +); + +@test design isa Tuple + designs = efficient_designs( experiments, sampler, uncertainty, 4, - state; + evidence; solver, mdp_options = (; max_parallel = 1), repetitions = 5, ); @test designs isa Vector +@test all(design -> (design[1][1] == 0) || hasproperty(design[2], :stats), designs) + +designs = efficient_designs( + experiments, + sampler, + uncertainty, + 4, + evidence; + solver, + mdp_options = (; max_parallel = 1), +); + +@test !hasproperty(designs[1][2], :stats) + +designs = efficient_designs( + experiments, + sampler, + uncertainty, + 4, + evidence; + solver, + realized_uncertainty = true, + mdp_options = (; max_parallel = 1), +); + +@test designs[begin][1][2] == uncertainty(evidence) + +# test `EfficientValueMDP`` + +value = function (evidence, (monetary_cost, execution_time)) + return (1 - uncertainty(evidence)) - (0.005 * sum(monetary_cost)) +end + +## use less number of iterations to speed up build process +solver = GenerativeDesigns.DPWSolver(; n_iterations = 100, depth = 2, tree_in_info = true) + +design = efficient_value(experiments, sampler, value, evidence; solver, repetitions = 5); +@test design isa Tuple +@test hasproperty(design[2], :stats) + +design = efficient_value(experiments, sampler, value, evidence; solver); +@test design isa Tuple +@test !hasproperty(design[2], :stats) \ No newline at end of file diff --git a/tutorials/GenerativeDesigns.jl b/tutorials/GenerativeDesigns.jl index 332bb12..5983a52 100644 --- a/tutorials/GenerativeDesigns.jl +++ b/tutorials/GenerativeDesigns.jl @@ -17,7 +17,7 @@ # We conceptualize the triage as a Markov decision process, in which we iteratively choose to conduct a subset of experiments $S \subseteq E$ and then, based on the experimental evidence, update our belief about the distribution of outcomes for the experiments that have not yet been conducted. # Within the framework, -# - _state_ is modeled as the set of experiments conducted so far along with the acquired experimental evidence; +# - _state_ is modeled as the set of experiments conducted so far along with the acquired experimental evidence and accumulated costs; # - _actions_ are subsets of experiments that have not yet been conducted; the size of these subsets is restricted by the maximum number of parallel experiments. # Importantly, the outcome of a set $S$ of experiments is modeled as a random variable $e_S$, conditioned on the current state, i.e., combined evidence. This means that if in a given state outcomes from experiments in $S \subseteq E$ are available, the outcome of experiments in $S' \subseteq E \setminus S$ is drawn from a posterior $r \sim q(e_{S'} | e_S)$. @@ -56,8 +56,9 @@ # ### Objective Sense # The reward and stopping condition of the triage process can be interpreted in various ways. -# - In our implementation, the triage continues until the uncertainty about the posterior distribution of the target variable falls below a certain level. Our aim is to minimize the anticipated combined monetary cost and execution time of the triage (considered as a 'negative' reward). If all experiments are conducted without reaching below the required uncertainty level, or if the maximum number of experiments is exceeded, we penalize this scenario with a 'minus infinite' reward. -# - Alternatively, one could aim to minimize the expected uncertainty while being constrained by the costs of the experiment. +# - The triage may continue until the uncertainty about the posterior distribution of the target variable falls below a certain level. Our aim is to minimize the anticipated combined monetary cost and execution time of the triage (considered as a 'negative' reward). If all experiments are conducted without reaching below the required uncertainty level, or if the maximum number of experiments is exceeded, we penalize this scenario with a 'minus infinite' reward. +# - We may aim to minimize the expected uncertainty while being constrained by the costs of the experiment. +# - Alternatively, we could maximize the value of experimental evidence, adjusted for the incurred experimental costs. # ### Policy Search # Standard MDP algorithms can be used to solve this problem (offline learning) or construct the policy (online learning) for the sequential decision-making. @@ -117,28 +118,43 @@ using CEED, CEED.GenerativeDesigns # As previously discussed, we provide a dataset of historical records, the target variable, along with an information-theoretic measure to quantify the uncertainty about the target variable. # In what follows, we obtain three functions: -# - `sampler`: this is a function of `(state, features, rng)`, in which `state` denotes the current experimental state, `features` represent the set of features we want to sample from, and `rng` is a random number generator; -# - `uncertainty`: this is a function of `state`, -# - `weights`: this represents a function of `state` that distributes probabilistic weights across the rows in the dataset. +# - `sampler`: this is a function of `(evidence, features, rng)`, in which `evidence` denotes the current experimental evidence, `features` represent the set of features we want to sample from, and `rng` is a random number generator; +# - `uncertainty`: this is a function of `evidence`, +# - `weights`: this represents a function of `evidence` that distributes probabilistic weights across the rows in the dataset. + +# Note that internally, a state of the decision process is represented as a tuple `(evidence, costs)`. (; sampler, uncertainty, weights) = DistanceBased(data, "HeartDisease", Entropy, Exponential(; λ = 5)); +# The CEED package offers an additional flexibility by allowing an experiment to yield readouts over multiple features at the same time. In our scenario, we can consider the features `RestingECG`, `Oldpeak`, `ST_Slope`, and `MaxHR` to be obtained from a single experiment `ECG`. + +# We specify the experiments along with the associated features: + +experiments = Dict( + ## experiment => features + "BloodPressure" => 1.0 => ["RestingBP"], + "ECG" => 5.0 => ["RestingECG", "Oldpeak", "ST_Slope", "MaxHR"], + "BloodCholesterol" => 20.0 => ["Cholesterol"], + "BloodSugar" => 20.0 => ["FastingBS"], + "HeartDisease" => 100.0, +) + # Let us inspect the distribution of belief for the following experimental evidence: -state = State("Age" => 55, "Sex" => "M") +evidence = Evidence("Age" => 55, "Sex" => "M") # using StatsBase: countmap using Plots # -target_belief = countmap(data[!, "HeartDisease"], weights(state)) +target_belief = countmap(data[!, "HeartDisease"], weights(evidence)) p = bar( 0:1, [target_belief[0], target_belief[1]]; xrot = 40, - c = :teal, ylabel = "probability", - title = "unc: $(round(uncertainty(state), digits=1))", + color = :teal, + title = "unc: $(round(uncertainty(evidence), digits=1))", kind = :bar, legend = false, ); @@ -147,36 +163,24 @@ p # Let us next add an outcome of blood pressure measurement: -state_with_bp = merge(state, Dict("RestingBP" => 190)) +evidence_with_bp = merge(evidence, Dict("RestingBP" => 190)) -target_belief = countmap(data[!, "HeartDisease"], weights(state_with_bp)) +target_belief = countmap(data[!, "HeartDisease"], weights(evidence_with_bp)) p = bar( 0:1, [target_belief[0], target_belief[1]]; xrot = 40, - c = :teal, ylabel = "probability", - title = "unc: $(round(uncertainty(state_with_bp), digits=2))", + title = "unc: $(round(uncertainty(evidence_with_bp), digits=2))", kind = :bar, legend = false, ); xticks!(p, 0:1, ["no disease", "disease"]); p -# ## Cost-Efficient Designs +# ## Cost-Efficient Experimental Designs for Uncertainty Reduction -# The CEED package offers an additional flexibility by allowing an experiment to yield readouts over multiple features at the same time. In our scenario, we can consider the features `RestingECG`, `Oldpeak`, `ST_Slope`, and `MaxHR` to be obtained from a single experiment `ECG`. - -# We specify the experiments along with the associated features: - -experiments = Dict( - ## experiment => features - "BloodPressure" => 1.0 => ["RestingBP"], - "ECG" => 5.0 => ["RestingECG", "Oldpeak", "ST_Slope", "MaxHR"], - "BloodCholesterol" => 20.0 => ["Cholesterol"], - "BloodSugar" => 20.0 => ["FastingBS"], - "HeartDisease" => 100.0, -) +# In this experimental setup, our objective is to minimize the expected experimental cost while ensuring the uncertainty remains below a specified threshold. # We use the provided function `efficient_designs` to construct the set of cost-efficient experimental designs for various levels of uncertainty threshold. In the following example, we generate 6 thresholds spaces evenly between 0 and 1, inclusive. @@ -186,16 +190,20 @@ experiments = Dict( using Random: seed! seed!(1) # -state = State("Age" => 35, "Sex" => "M") +evidence = Evidence("Age" => 35, "Sex" => "M") # ## use less number of iterations to speed up build process -solver = GenerativeDesigns.DPWSolver(; n_iterations = 40_000, tree_in_info = true) +solver = GenerativeDesigns.DPWSolver(; + n_iterations = 20_000, + exploration_constant = 5.0, + tree_in_info = true, +) designs = efficient_designs( experiments, sampler, uncertainty, 6, - state; + evidence; solver, mdp_options = (; max_parallel = 1), repetitions = 5, @@ -228,13 +236,17 @@ experiments = Dict( ## minimize time, two concurrent experiments at maximum seed!(1) ## use less number of iterations to speed up build process -solver = GenerativeDesigns.DPWSolver(; n_iterations = 60_000, tree_in_info = true) +solver = GenerativeDesigns.DPWSolver(; + n_iterations = 20_000, + exploration_constant = 5.0, + tree_in_info = true, +) designs = efficient_designs( experiments, sampler, uncertainty, 6, - state; + evidence; solver, mdp_options = (; max_parallel = 2, costs_tradeoff = [0, 1.0]), repetitions = 5, @@ -243,3 +255,35 @@ designs = efficient_designs( # We plot the Pareto-efficient actions: plot_front(designs; labels = make_labels(designs), ylabel = "% uncertainty") + +# ## Efficient Value Experimental Designs + +# In this experimental setup, we aim to maximize the value of experimental evidence, adjusted for the incurred experimental costs. + +# For this purpose, we need to specify a function that quantifies the 'value' of decision-process making state, modeled as a tuple of experimental evidence and costs. + +value = function (evidence, (monetary_cost, execution_time)) + return (1 - uncertainty(evidence)) - (0.005 * sum(monetary_cost)) +end + +# Considering a discount factor $\lambda$, the total reward associated with the experimental state in an $n$-step decision process is given by $r = r_1 + \sum_{i=2}^n \lambda^{i-1} (r_i - r_{i-1})$, where $r_i$ is the value associated with the $i$-th state. + +# In the following example, we also limit the maximum rollout horizon to 4. +# +seed!(1) +## use less number of iterations to speed up build process +solver = + GenerativeDesigns.DPWSolver(; n_iterations = 20_000, depth = 4, tree_in_info = true) +design = efficient_value( + experiments, + sampler, + value, + evidence; + solver, + repetitions = 5, + mdp_options = (; discount = 0.8), +); +# +design[1] # optimized cost-adjusted value +# +d3tree = D3Tree(design[2].tree; init_expand = 2) diff --git a/tutorials/GliomaGrading/GliomaGrading.jl b/tutorials/GliomaGrading/GliomaGrading.jl new file mode 100644 index 0000000..d397ba0 --- /dev/null +++ b/tutorials/GliomaGrading/GliomaGrading.jl @@ -0,0 +1,526 @@ +### A Pluto.jl notebook ### +# v0.19.27 + +using Markdown +using InteractiveUtils + +# This Pluto notebook uses @bind for interactivity. When running this notebook outside of Pluto, the following 'mock version' of @bind gives bound variables a default value (instead of an error). +macro bind(def, element) + quote + local iv = try Base.loaded_modules[Base.PkgId(Base.UUID("6e696c72-6542-2067-7265-42206c756150"), "AbstractPlutoDingetjes")].Bonds.initial_value catch; b -> missing; end + local el = $(esc(element)) + global $(esc(def)) = Core.applicable(Base.get, el) ? Base.get(el) : iv(el) + el + end +end + +# ╔═╡ fc17a594-02f3-46b5-a012-136ab5d0ba38 +# ╠═╡ show_logs = false +begin + import Pkg + Pkg.activate("..") + + using PlutoUI + md""" +Case [_TCGA-HT-8564_](https://portal.gdc.cancer.gov/cases/f625e522-226b-450f-af94-dd2f5adb605e?filters=%7B%22content%22%3A%5B%7B%22content%22%3A%7B%22field%22%3A%22cases.project.project_id%22%2C%22value%22%3A%5B%22TCGA-LGG%22%5D%7D%2C%22op%22%3A%22in%22%7D%5D%2C%22op%22%3A%22and%22%7D), Diagnosis _Astrocytoma, anaplastic_: + +$(LocalResource("glioma_slide.jpeg", :width => 500, :style => "display: block; margin-left: auto; margin-right: auto;")) + """ +end + +# ╔═╡ d66ad52c-4ac9-4a04-884b-bfa013f9acd9 +begin + using CSV, DataFrames + data = CSV.File("../data/glioma_grading.csv") |> DataFrame +end + +# ╔═╡ b7b62e65-9bd3-4bd8-9a71-2bea4ba109c4 +begin + using MLJ + import BetaML, MLJModels + using Random: seed! + + md"We fix the scientific types of features." +end + +# ╔═╡ 6ebd71e0-b49a-4d12-b441-4805efc69520 +begin + using CEED, CEED.StaticDesigns + + md""" + ### Cost-Efficient Feature Selection + + We use `evaluate_experiments` from `CEED.StaticDesigns` to evaluate the predictive accuracy over subsets of experiments. We use `LogLoss` as a measure of accuracy. It is possible to pass additional keyword arguments, which will be passed to `MLJ.evaluate` (such as `measure`, shown below). + """ +end + +# ╔═╡ 7f2e19e0-4cf3-11ee-0e10-dba25ffccc94 +md""" +# Cost-Efficient Experimental Design Towards Glioma Grading + +Gliomas are the most common primary tumors of the brain. They can be graded as LGG (Lower-Grade Glioma) or GBM (Glioblastoma Multiforme), depending on the histological and imaging criteria. Clinical and molecular or genetic factors are also very crucial for the grading process. The ultimate aim is to identify the optimal subset of clinical, molecular or genetic, and histological features for the glioma grading process to improve diagnostic accuracy and reduce costs. + +## Theoretical Framework + +Let us consider a set of $n$ experiments $E = \{ e_1, \ldots, e_n\}$. + +For each subset $S \subseteq E$ of experiments, we denote by $v_S$ the value of information acquired from conducting experiments in $S$. + +In the cost-sensitive setting of CEED, conducting an experiment $e$ incurs a cost $(m_e, t_e)$. Generally, this cost is specified in terms of monetary cost and execution time of the experiment. + +To compute the cost associated with carrying out a set of experiments $S$, we first need to introduce the notion of an arrangement $o$ of the experiments $S$. An arrangement is modeled as a sequence of mutually disjoint subsets of $S$. In other words, $o = (o_1, \ldots, o_l)$ for a given $l\in\mathbb N$, where $\bigcup_{i=1}^l o_i = S$ and $o_i \cap o_j = \emptyset$ for each $1\leq i < j \leq l$. + +Given a subset $S$ of experiments and their arrangement $o$, the total monetary cost and execution time of the experimental design is given as $m_o = \sum_{e\in S} m_e$ and $t_o = \sum_{i=1}^l \max \{ t_e : e\in o_i\}$, respectively. + +For instance, consider the experiments $e_1,\, e_2,\, e_3$, and $e_4$ with associated costs $(1, 1)$, $(1, 3)$, $(1, 2)$, and $(1, 4)$. If we conduct experiments $e_1$ through $e_4$ in sequence, this would correspond to an arrangement $o = (\{ e_1 \}, \{ e_2 \}, \{ e_3 \}, \{ e_4 \})$ with a total cost of $m_o = 4$ and $t_o = 10$. + +However, if we decide to conduct $e_1$ in parallel with $e_3$, and $e_2$ with $e_4$, we would obtain an arrangement $o = (\{ e_1, e_3 \}, \{ e_2, e_4 \})$ with a total cost of $m_o = 4$, and $t_o = 3 + 4 = 7$. + +Given the constraint on the maximum number of parallel experiments, we devise an arrangement $o$ of experiments $S$ such that, for a fixed tradeoff between monetary cost and execution time, the expected combined cost $c_{(o, \lambda)} = \lambda m_o + (1-\lambda) t_o$ is minimized (i.e., the execution time is minimized). + +In fact, it can be readily demonstrated that the optimal arrangement can be found by ordering the experiments in set $S$ in descending order according to their execution times. Consequently, the experiments are grouped sequentially into sets whose size equals to the maximum number of parallel experiments, except possibly for the final set. + +Continuing our example and assuming a maximum of two parallel experiments, the optimal arrangement is to conduct $e_1$ in parallel with $e_2$, and $e_3$ with $e_4$. This results in an arrangement $o = (\{ e_1, e_2 \}, \{ e_3, e_4 \})$ with a total cost of $m_o = 4$ and $t_o = 2 + 4 = 6$. + +Assuming the information values $v_S$ and optimized experimental costs $c_S$ for each subset $S \subseteq E$ of experiments, we then generate a set of cost-efficient experimental designs. + +### Application to Predictive Modeling + +Consider a dataset of historical readouts over $m$ features $X = \{x_1, \ldots, x_m\}$, and let $y$ denote the target variable that we want to predict. + +We assume that each experiment $e \in E$ yields readouts over a subset $X_e \subseteq X$ of features. + +Then, for each subset $S \subseteq E$ of experiments, we may model the value of information acquired by conducting the experiments in $S$ as the accuracy of a predictive model that predicts the value of $y$ based on readouts over features in $X_S = \bigcup_{e\in S} X_e$. +""" + +# ╔═╡ 2edaa133-6907-44f9-b39c-da06dae3eead +md""" +## Glioma Grading Clinical and Mutation Dataset + +In this dataset, the instances represent patient records of those diagnosed with brain glioma. The dataset is publicly available at [Glioma Grading Clinical and Mutation Features](https://archive.ics.uci.edu/dataset/759/glioma+grading+clinical+and+mutation+features+dataset). It is constructed based on the TCGA-LGG and TCGA-GBM brain glioma projects available at the [NIH GDC Data Portal](https://portal.gdc.cancer.gov). + +Each record is characterized by + +- 3 clinical features (age, gender, race), +- 5 mutation factors (IDH1, TP53, ATRX, PTEN, EGFR; each of which can be 'mutated' or 'not_mutated'). + +We list somatic mutations with the highest number of affected cases in cohort: +""" + +# ╔═╡ 6737a98e-20b9-4016-85f6-c8f4a07d04f8 +md"""$(LocalResource("mutations.png", :width => 500, :style => "display: block; margin-left: auto; margin-right: auto;"))""" + +# ╔═╡ 1364c91a-4331-4a1b-8c3d-fb40743c1df7 +md"We load the dataset:" + +# ╔═╡ 87f9826f-865f-4d1f-b122-e1ccae51bfdf +md"## Assessing the Predictive Accuracy + +We specify the clinical features and mutation factors." + +# ╔═╡ 9835f38b-b720-429d-8a8b-eab742fe7e05 +features_clinical = ["Age_at_diagnosis", "Gender", "Race"] + +# ╔═╡ 0bb794a2-d0b4-493d-8a10-d19a515271ec +features_mutation = ["IDH1", "TP53", "ATRX", "PTEN", "EGFR", "CIC", "MUC16"] + +# ╔═╡ 1c72a85d-19cb-44f3-b330-a312b9ef9b7c +md"Classification target is just the glioma grade" + +# ╔═╡ 7e2bb98c-1175-4964-8c41-8e3ac8e6eb9f +target = "Grade" + +# ╔═╡ d74684f6-7fd4-41c8-9917-d99dfc1f5f64 +md"In the cost-sensitive setting of CEED, obtaining additional experimental evidence comes with a cost. We assume that each gene mutation factor is obtained through a separate experiment." + +# ╔═╡ f176f8cf-8941-4257-ba41-60fff864aa56 +# We assume that each feature is measured separately and the measurement incurs a monetary cost. +experiments = Dict( + ## experiment => features + "TP53" => 3.0 => ["TP53"], + "EGFR" => 2.0 => ["EGFR"], + "PTEN" => 4.0 => ["PTEN"], + "ATRX" => 2.0 => ["ATRX"], + "IDH1" => 3.0 => ["IDH1"], + "CIC" => 1.0 => ["CIC"], + "MUC16" => 2.0 => ["MUC16"] +) + +# ╔═╡ 9547fc15-7dff-49a3-b5d2-43b54a6dc443 +md""" +### Classifier + +We use a package called [MLJ.jl](https://alan-turing-institute.github.io/MLJ.jl/dev/) to evaluate the predictive accuracy over subsets of experimental features. +""" + +# ╔═╡ 61219167-805c-4624-932e-d050a13ada07 +begin + types = Dict( + name => Multiclass for name in [Symbol.(features_mutation); :Grade; :Gender; :Race] + ) + + data_typefix = coerce(data, types) + schema(data_typefix) +end + +# ╔═╡ 63b7b1f3-660d-4a42-8b2c-3c0851d2b859 +md"Next, we choose a particular predictive model that will evaluated in the sequel. We can list all models that are compatible with the dataset:" + +# ╔═╡ d0973a67-f24e-4ffd-8343-7fe7be400d07 +models(matching(data_typefix, data_typefix[:, target])) + +# ╔═╡ f797ccd0-e98e-4617-a966-455469d16096 +md"Eventually, we fix `RandomForestClassifier` from [BetaML](https://github.com/sylvaticus/BetaML.jl)" + +# ╔═╡ b987567b-f500-4837-8f23-412a2eecec51 +classifier = @load RandomForestClassifier pkg = BetaML verbosity = -1 + +# ╔═╡ 88a20956-e665-4b0f-81f2-0e7ec8a1e28f +model = classifier(; n_trees = 8, max_depth = 5) + +# ╔═╡ c1860fca-f304-4bb4-9266-5a6d71467e27 +# ╠═╡ show_logs = false +begin + seed!(1) # evaluation process generally is not deterministic + perf_eval = evaluate_experiments( + experiments, + model, + data_typefix[!, Not(target)], + data_typefix[!, target]; + zero_cost_features = features_clinical, + measure = LogLoss(), + resampling = CV(; nfolds = 10), + ) +end + +# ╔═╡ ccdef55c-0570-404a-bb3d-ea0b9487c321 +# ╠═╡ show_logs = false +begin + using Plots + plotly() + designs = efficient_designs(experiments, perf_eval) + + function plot_front_invert( + designs; + grad = cgrad(:Paired_12), + xlabel = "combined cost", + ylabel = "information measure", + labels = make_labels(designs), + ) + xs = map(x -> x[1][1], designs) + ys = map(x -> x[1][2], designs) + + p = scatter( + [xs[1]], + [1 - ys[1]]; + xlabel, + ylabel, + label = labels[1], + c = grad[1], + mscolor = nothing, + fontsize = 16, + legendposition = :bottomright, + title = "cost-efficient experimental designs", + ) + for i = 2:length(designs) + if xs[i] < 10_000 + scatter!( + p, + [xs[i]], + [1 - ys[i]]; + label = labels[i], + c = grad[i], + mscolor = nothing, + ) + end + end + + return p + end + + plot_front_invert( + designs; + labels = make_labels(designs), + ylabel = "accuracy (1-logloss)", + ) # fill("", (1, length(designs))) +end + +# ╔═╡ 01646835-55a9-42af-8eb3-29816c9780b7 +md"We proceed to construct the set of cost-efficient experimental designs. In doing so, our goal is to identify the optimal sets of mutation factors for the glioma grading task, balancing the conflicting objectives of enhancing prediction accuracy and reducing incurred costs." + +# ╔═╡ dcd790bd-36a5-4536-9e21-4f54fe2cf4e4 +begin + function predict( + X; + positive_label = 1, + negative_label = 0, + sensitivity::Float64, + specificity::Float64, + ) + y_pred = similar(X, Union{typeof(positive_label), typeof(negative_label)}) + + # simulate a predictor with given sensitivity and specificity + for i in eachindex(X) + if X[i] == positive_label + y_pred[i] = rand() < sensitivity ? positive_label : negative_label + else + y_pred[i] = rand() < (1 - specificity) ? positive_label : negative_label + end + end + + return y_pred + end + + sensitivity_slider = @bind sensitivity Slider(0:0.01:1, default = 0.73, show_value = true) + specificity_slider = @bind specificity Slider(0:0.01:1, default = 0.74, show_value = true) + cost_slider = @bind cost Slider(1:1:10, default = 1, show_value = true) + + md""" + ## Assessing the Impact of Histoathology Image Analysis on the Experimental Cost-Efficiency + + Building on the previous example, we will consider the introduction of a new feature in the task of glioma grading, where this feature will essentially function as a predictor of the glioma grade. + + In [Glioma Grading via Analysis of Digital Pathology Images Using Machine Learning](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7139732/), the authors proposed a computational method that exploits pattern analysis methods for grade prediction in gliomas using digital pathology images. + + From the abstract, _according to the remarkable performance of computational approaches in the digital pathology domain, we hypothesized that machine learning can help to distinguish low-grade gliomas (LGG) from high-grade gliomas (HGG) by exploiting the rich phenotypic information that reflects the microvascular proliferation level, mitotic activity, presence of necrosis, and nuclear atypia present in digital pathology images. A set of 735 whole-slide digital pathology images of glioma patients (median age: 49.65 years, male: 427, female: 308, median survival: 761.26 days) were obtained from TCGA. Sub-images that contained a viable tumor area, showing sufficient histologic characteristics, and that did not have any staining artifact were extracted. Several clinical measures and imaging features, including conventional (intensity, morphology) and advanced textures features (gray-level co-occurrence matrix and gray-level run-length matrix), extracted from the sub-images were further used for training the support vector machine model with linear configuration._ + + $(LocalResource("architecture.png", :width => 600, :style => "display: block; margin-left: auto; margin-right: auto;")) + + The authors aimed to evaluate the combined effect of conventional imaging, clinical, and texture features by assessing the predictive value of each feature type and their combinations through a predictive classifier. + + For our specific intent, we will focus on the predictive accuracy of a classifier that utilizes only imaging features. + + $(LocalResource("accuracy.png", :width => 600, :style => "display: block; margin-left: auto; margin-right: auto;")) + + We will artificially produce the grade predictions, modelling them as predictor outputs with the defined sensitivity and specificity. Note that we will not incorporate any further correlations with other features such as clinical factors, mutation factors, or histology. + + In addition, we consider the cost of the predictive classifier development. + + | parameter | value picker | + | :-----: | :------: | + | sensitivity | $sensitivity_slider | + | specificity | $specificity_slider | + | cost | $cost_slider | + """ +end + +# ╔═╡ c2121983-7135-4833-a33a-2dbc331272f3 +digital_pathology = + map(x -> x == "LGG" ? "lower grade" : "glioblastoma", predict(data_typefix[!, target]; positive_label = "GBM", negative_label="LGG", sensitivity, specificity)) + +# ╔═╡ a271f163-046e-40cc-8134-c7619bf6a63a +begin + experiments_new_feature = + push!(copy(experiments), "digital_pathology" => cost => ["digital_pathology"]) +end + +# ╔═╡ 9ee21001-8404-4d5d-875b-6ea7952f65b8 +md"We add the new feature to the dataset:" + +# ╔═╡ bc31504a-9374-4da3-9d68-030994ba8fcf +begin + data_new_feature = data_new_feature = copy(data_typefix) + data_new_feature.digital_pathology = digital_pathology + + data_new_feature_typefix = coerce( + data_new_feature, + Dict( + ( + name => Multiclass for + name in [Symbol.(features_mutation); :Grade; :Gender; :Race] + )..., + :new_feature => Multiclass, + ), + ) + + data_new_feature_typefix[!, [["IDH1", "digital_pathology"]; setdiff(names(data_new_feature_typefix), ("digital_pathology", "IDH1"))]] +end + +# ╔═╡ 0c58ddff-ad6f-4e80-8f1a-3c0c8d5245d2 +# ╠═╡ show_logs = false +begin + seed!(1) # evaluation process generally is not deterministic + perf_eval_new_feature = evaluate_experiments( + experiments_new_feature, + model, + data_new_feature_typefix[!, Not(target)], + data_new_feature_typefix[!, target]; + zero_cost_features = features_clinical, + measure = LogLoss(), + resampling = CV(; nfolds = 10), + ) +end + +# ╔═╡ 3d710dad-9b8c-47a3-966b-9718cb76fad8 +md"We evaluate performance measures across different experimental subsets, comparing those that include the histology feature and those that do not." + +# ╔═╡ 6ed9b5e5-754c-47f6-b1e4-2514d23039d2 +begin + evals1_new_feature = filter(x -> "digital_pathology" ∉ x[1], perf_eval_new_feature) + xs1_new_feature = + sort!(collect(keys(evals1_new_feature)); by = x -> -perf_eval_new_feature[x]) + #xs1_new_feature = xs1_new_feature[begin:2:end] + ys1_new_feature = map(xs1_new_feature) do x + return if perf_eval_new_feature[x] isa Number + perf_eval_new_feature[x] + else + perf_eval_new_feature[x].loss + end + end + + evals2_new_feature = filter(x -> "digital_pathology" ∈ x[1], perf_eval_new_feature) + xs2_new_feature_new_feature = sort!( + collect(keys(evals2_new_feature)); + by = x -> -perf_eval_new_feature[setdiff(x, ["histology"])], + ) + #xs2_new_feature_new_feature = xs2_new_feature_new_feature[begin:2:end] + ys2_new_feature = map(xs2_new_feature_new_feature) do x + return if perf_eval_new_feature[x] isa Number + perf_eval_new_feature[x] + else + perf_eval_new_feature[x].loss + end + end + + p_evals_new_feature = Plots.sticks( + 1:length(xs2_new_feature_new_feature), + 1 .- min.(ys1_new_feature, ys2_new_feature); + ticks = 1:4:length(evals2_new_feature), + #xformatter=i -> isempty(xs2_new_feature_new_feature[Int(i)]) ? "∅" : join(xs1_new_feature[Int(i)], ", "), + guidefontsize = 8, + tickfontsize = 8, + ylabel = "accuracy", + c = CEED.colorant"rgb(110,206,178)", + label = "w/ histology feature", + xrotation = 50, + ) + + Plots.sticks!( + p_evals_new_feature, + 1:length(xs1_new_feature), + 1 .- ys1_new_feature; + ticks = 1:4:length(evals1_new_feature), + xformatter = i -> + isempty(xs1_new_feature[Int(i)]) ? "∅" : join(xs1_new_feature[Int(i)], ", "), + guidefontsize = 8, + tickfontsize = 8, + ylabel = "accuracy", + c = CEED.colorant"rgb(104,140,232)", + label = "w/o histology feature", + width = 2, + xrotation = 50, + ) +end + +# ╔═╡ 765ed41a-50e5-4989-98a1-1d2abfa3ba28 +md"We proceed to construct the set of cost-efficient experimental designs, comparing the frontier that incorporates the histology feature with the one that does not." + +# ╔═╡ 95567d4d-6594-4a25-8449-b7b95738c56c +# ╠═╡ show_logs = false +begin + experiments_new_feature_no_feature = copy(experiments_new_feature) + delete!(experiments_new_feature_no_feature, "digital_pathology") + + seed!(1) + perf_eval_new_feature_no_feature = evaluate_experiments( + experiments_new_feature_no_feature, + model, + data_new_feature_typefix[!, Not(target)], + data_new_feature_typefix[!, target]; + zero_cost_features = features_clinical, + measure = LogLoss(), + resampling = CV(; nfolds = 10), + ) + + for (k, v) in perf_eval_new_feature_no_feature + perf_eval_new_feature[k] = v + end + + design_new_feature = efficient_designs(experiments_new_feature, perf_eval_new_feature) + + design_new_feature_no_feature = efficient_designs( + experiments_new_feature_no_feature, + perf_eval_new_feature_no_feature, + ) + + p_new_feature = scatter( + map(x -> x[1][1], design_new_feature), + map(x -> 1 - x[1][2], design_new_feature); + xlabel = "combined cost", + ylabel = "accuracy", + label = "w/ histology feature", + c = CEED.colorant"rgb(110,206,178)", + mscolor = nothing, + fontsize = 16, + #fill = (0, CEED.colorant"rgb(110,206,178)"), + fillalpha = 0.2, + legend = :bottomright, + ) + + scatter!( + p_new_feature, + map(x -> x[1][1], design_new_feature_no_feature), + map(x -> 1 - x[1][2], design_new_feature_no_feature); + label = "w/o histology feature", + c = CEED.colorant"rgb(104,140,232)", + mscolor = nothing, + fontsize = 16, + #fill = (0, CEED.colorant"rgb(104,140,232)"), + fillalpha = 0.15, + title = "sensitivity = $sensitivity, specificity = $specificity, cost = $cost", + ) +end + +# ╔═╡ 53023ce3-2c74-46b7-82eb-589a78ca89c0 +md"This is a dynamic illustration demonstrating the efficient frontiers generated for a range of predictive model parameters." + +# ╔═╡ 6900000d-8bfd-4f8b-bf5b-46cefce652e4 +LocalResource("anim.gif") + +# ╔═╡ 471eaf63-ca16-486e-b63a-ea3852768a0f +html"""