Skip to content

Commit

Permalink
build(deps-dev): bump ruff from 0.5.1 to 0.6.1 (opentargets#732)
Browse files Browse the repository at this point in the history
* build(deps-dev): bump ruff from 0.5.1 to 0.6.1

Bumps [ruff](https://github.com/astral-sh/ruff) from 0.5.1 to 0.6.1.
- [Release notes](https://github.com/astral-sh/ruff/releases)
- [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md)
- [Commits](astral-sh/ruff@0.5.1...0.6.1)

---
updated-dependencies:
- dependency-name: ruff
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <[email protected]>

* fix: linting issues associated with ruff 0.6

* chore: fixing imports for notebooks

* chore: removing old notebook

---------

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: David Ochoa <[email protected]>
Co-authored-by: Daniel Considine <[email protected]>
  • Loading branch information
3 people authored Aug 22, 2024
1 parent e45f295 commit f49a5c5
Show file tree
Hide file tree
Showing 13 changed files with 2,393 additions and 4,637 deletions.
32 changes: 7 additions & 25 deletions notebooks/FineMappingSimmuations.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@
"metadata": {},
"outputs": [],
"source": [
"ld_matrix = np.load('/Users/yt4/Projects/ot_data/tmp/ld_matrix.npy')\n",
"ld_matrix = np.load(\"/Users/yt4/Projects/ot_data/tmp/ld_matrix.npy\")\n",
"ld_index=session.spark.read.parquet(\"/Users/yt4/Projects/ot_data/tmp/ld_index\")\n",
"ld_matrix_for_sim=ld_matrix[0:500,:][:,0:500]\n",
"ld_index_for_sim=ld_index.limit(500)"
Expand Down Expand Up @@ -129,11 +129,8 @@
}
],
"source": [
"print(FineMappingSimulations.ProvideSummary(cred_sets=x1,n_causal=n_causal))\n",
"x2=x1[(x1[\"pValueExponent\"]<=-6) | (x1[\"credibleSetIndex\"]==1)]\n",
"print(FineMappingSimulations.ProvideSummary(cred_sets=x2,n_causal=n_causal))\n",
"x3=x2[(x2[\"purityMinR2\"]>=0.25) | (x2[\"credibleSetIndex\"]==1)]\n",
"print(FineMappingSimulations.ProvideSummary(cred_sets=x3,n_causal=n_causal))"
"x3=x2[(x2[\"purityMinR2\"]>=0.25) | (x2[\"credibleSetIndex\"]==1)]"
]
},
{
Expand Down Expand Up @@ -177,11 +174,8 @@
}
],
"source": [
"print(FineMappingSimulations.ProvideSummary(cred_sets=x1,n_causal=n_causal))\n",
"x2=x1[(x1[\"pValueExponent\"]<=-6) | (x1[\"credibleSetIndex\"]==1)]\n",
"print(FineMappingSimulations.ProvideSummary(cred_sets=x2,n_causal=n_causal))\n",
"x3=x2[(x2[\"purityMinR2\"]>=0.25) | (x2[\"credibleSetIndex\"]==1)]\n",
"print(FineMappingSimulations.ProvideSummary(cred_sets=x3,n_causal=n_causal))"
"x3=x2[(x2[\"purityMinR2\"]>=0.25) | (x2[\"credibleSetIndex\"]==1)]"
]
},
{
Expand Down Expand Up @@ -227,11 +221,8 @@
}
],
"source": [
"print(FineMappingSimulations.ProvideSummary(cred_sets=x1,n_causal=n_causal))\n",
"x2=x1[(x1[\"pValueExponent\"]<=-6) | (x1[\"credibleSetIndex\"]==1)]\n",
"print(FineMappingSimulations.ProvideSummary(cred_sets=x2,n_causal=n_causal))\n",
"x3=x2[(x2[\"purityMinR2\"]>=0.25) | (x2[\"credibleSetIndex\"]==1)]\n",
"print(FineMappingSimulations.ProvideSummary(cred_sets=x3,n_causal=n_causal))"
"x3=x2[(x2[\"purityMinR2\"]>=0.25) | (x2[\"credibleSetIndex\"]==1)]"
]
},
{
Expand Down Expand Up @@ -277,11 +268,8 @@
}
],
"source": [
"print(FineMappingSimulations.ProvideSummary(cred_sets=x1,n_causal=n_causal))\n",
"x2=x1[(x1[\"pValueExponent\"]<=-6) | (x1[\"credibleSetIndex\"]==1)]\n",
"print(FineMappingSimulations.ProvideSummary(cred_sets=x2,n_causal=n_causal))\n",
"x3=x2[(x2[\"purityMinR2\"]>=0.25) | (x2[\"credibleSetIndex\"]==1)]\n",
"print(FineMappingSimulations.ProvideSummary(cred_sets=x3,n_causal=n_causal))"
"x3=x2[(x2[\"purityMinR2\"]>=0.25) | (x2[\"credibleSetIndex\"]==1)]"
]
},
{
Expand Down Expand Up @@ -335,11 +323,8 @@
}
],
"source": [
"print(FineMappingSimulations.ProvideSummary(cred_sets=x1,n_causal=n_causal))\n",
"x2=x1[(x1[\"pValueExponent\"]<=-6) | (x1[\"credibleSetIndex\"]==1)]\n",
"print(FineMappingSimulations.ProvideSummary(cred_sets=x2,n_causal=n_causal))\n",
"x3=x2[(x2[\"purityMinR2\"]>=0.25) | (x2[\"credibleSetIndex\"]==1)]\n",
"print(FineMappingSimulations.ProvideSummary(cred_sets=x3,n_causal=n_causal))"
"x3=x2[(x2[\"purityMinR2\"]>=0.25) | (x2[\"credibleSetIndex\"]==1)]"
]
},
{
Expand Down Expand Up @@ -386,11 +371,8 @@
}
],
"source": [
"print(FineMappingSimulations.ProvideSummary(cred_sets=x1,n_causal=n_causal))\n",
"x2=x1[(x1[\"pValueExponent\"]<=-6) | (x1[\"credibleSetIndex\"]==1)]\n",
"print(FineMappingSimulations.ProvideSummary(cred_sets=x2,n_causal=n_causal))\n",
"x3=x2[(x2[\"purityMinR2\"]>=0.25) | (x2[\"credibleSetIndex\"]==1)]\n",
"print(FineMappingSimulations.ProvideSummary(cred_sets=x3,n_causal=n_causal))"
"x3=x2[(x2[\"purityMinR2\"]>=0.25) | (x2[\"credibleSetIndex\"]==1)]"
]
},
{
Expand Down
51 changes: 16 additions & 35 deletions notebooks/FineMapping_AlzheimierDisease.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -121,19 +121,20 @@
],
"source": [
"import os\n",
"\n",
"import hail as hl\n",
"import pyspark.sql.functions as f\n",
"import pandas as pd\n",
"pd.set_option('display.max_colwidth', None)\n",
"pd.set_option('display.expand_frame_repr', False)\n",
"import pyspark.sql.functions as f\n",
"\n",
"from gentropy.common.session import Session\n",
"from gentropy.dataset.study_index import StudyIndex\n",
"from gentropy.dataset.summary_statistics import SummaryStatistics\n",
"from gentropy.dataset.study_index import StudyIndex\n",
"from gentropy.method.window_based_clumping import WindowBasedClumping\n",
"from gentropy.susie_finemapper import SusieFineMapperStep\n",
"\n",
"pd.set_option(\"display.max_colwidth\", None)\n",
"pd.set_option(\"display.expand_frame_repr\", False)\n",
"\n",
"hail_dir = os.path.dirname(hl.__file__)\n",
"session = Session(hail_home=hail_dir, start_hail=True, extended_spark_conf={\"spark.driver.memory\": \"12g\",\n",
" \"spark.kryoserializer.buffer.max\": \"500m\",\"spark.driver.maxResultSize\":\"3g\"})"
Expand Down Expand Up @@ -195,10 +196,7 @@
"study_index = StudyIndex.from_parquet(session, path_si)\n",
"\n",
"slt=WindowBasedClumping.clump(gwas1,gwas_significance=5e-8,distance=1e6)\n",
"slt_df=slt._df\n",
"\n",
"print(\"Number of SNPs in GWAS: \",gwas1._df.count())\n",
"print(\"Number of clumps: \",slt_df.count())"
"slt_df=slt._df\n"
]
},
{
Expand Down Expand Up @@ -254,9 +252,7 @@
]
}
],
"source": [
"print(slt_df.show())"
]
"source": []
},
{
"cell_type": "markdown",
Expand Down Expand Up @@ -1071,7 +1067,7 @@
"source": [
"df = slt_df.withColumn(\"row_index\", f.monotonically_increasing_id())\n",
"\n",
"columns = ['N_gwas', 'N_ld', 'N_overlap', 'N_outliers', 'N_imputed', 'N_final_to_fm', 'eleapsed_time']\n",
"columns = [\"N_gwas\", \"N_ld\", \"N_overlap\", \"N_outliers\", \"N_imputed\", \"N_final_to_fm\", \"eleapsed_time\"]\n",
"logs = pd.DataFrame(columns=columns)\n",
"\n",
"for i in range(0,df.count()):\n",
Expand All @@ -1095,7 +1091,6 @@
"\n",
" sl=res[\"study_locus\"]\n",
" #print(sl._df.withColumn(\"size\", f.size(sl._df[\"locus\"])).show())\n",
" print(\"Region: \",sl._df.collect()[0]['region'], \"; number of CSs: \",sl._df.count(), \"; log:\")\n",
" #print(res[\"log\"])\n",
" logs=pd.concat([logs,res[\"log\"]])"
]
Expand Down Expand Up @@ -1146,8 +1141,7 @@
}
],
"source": [
"pd.set_option('display.max_rows', None)\n",
"print(logs)"
"pd.set_option(\"display.max_rows\", None)"
]
},
{
Expand All @@ -1164,8 +1158,7 @@
}
],
"source": [
"summary = logs['N_overlap'].mean()\n",
"print(summary)"
"summary = logs[\"N_overlap\"].mean()"
]
},
{
Expand Down Expand Up @@ -1318,9 +1311,7 @@
" imputed_r2_threshold=0.8,\n",
" ld_score_threshold=4\n",
")\n",
"sl=res[\"study_locus\"]\n",
"print(sl._df.withColumn(\"size\", f.size(sl._df[\"locus\"])).show())\n",
"print(res[\"log\"])"
"sl=res[\"study_locus\"]"
]
},
{
Expand Down Expand Up @@ -1382,9 +1373,7 @@
" imputed_r2_threshold=0.8,\n",
" ld_score_threshold=4\n",
")\n",
"sl=res[\"study_locus\"]\n",
"print(sl._df.withColumn(\"size\", f.size(sl._df[\"locus\"])).show())\n",
"print(res[\"log\"])"
"sl=res[\"study_locus\"]"
]
},
{
Expand Down Expand Up @@ -1482,9 +1471,7 @@
" imputed_r2_threshold=0.8,\n",
" ld_score_threshold=4\n",
")\n",
"sl=res[\"study_locus\"]\n",
"print(sl._df.withColumn(\"size\", f.size(sl._df[\"locus\"])).show())\n",
"print(res[\"log\"])"
"sl=res[\"study_locus\"]"
]
},
{
Expand Down Expand Up @@ -1546,9 +1533,7 @@
" imputed_r2_threshold=0.8,\n",
" ld_score_threshold=4\n",
")\n",
"sl=res[\"study_locus\"]\n",
"print(sl._df.withColumn(\"size\", f.size(sl._df[\"locus\"])).show())\n",
"print(res[\"log\"])"
"sl=res[\"study_locus\"]"
]
},
{
Expand Down Expand Up @@ -1610,9 +1595,7 @@
" imputed_r2_threshold=0.8,\n",
" ld_score_threshold=4\n",
")\n",
"sl=res[\"study_locus\"]\n",
"print(sl._df.withColumn(\"size\", f.size(sl._df[\"locus\"])).show())\n",
"print(res[\"log\"])"
"sl=res[\"study_locus\"]"
]
},
{
Expand Down Expand Up @@ -1703,9 +1686,7 @@
" imputed_r2_threshold=0.8,\n",
" ld_score_threshold=4\n",
")\n",
"sl=res[\"study_locus\"]\n",
"print(sl._df.withColumn(\"size\", f.size(sl._df[\"locus\"])).show())\n",
"print(res[\"log\"])"
"sl=res[\"study_locus\"]"
]
}
],
Expand Down
Loading

0 comments on commit f49a5c5

Please sign in to comment.