Skip to content

Commit

Permalink
Update dependencies, removal of redundant dependencies. Removal of re…
Browse files Browse the repository at this point in the history
…dundant code.
  • Loading branch information
dinarior committed Nov 29, 2020
1 parent fa63b5f commit 99132e6
Show file tree
Hide file tree
Showing 5 changed files with 12 additions and 61 deletions.
5 changes: 2 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
language: julia
julia:
- 1.2
- 1.3
- 1.4
- 1.5

jobs:
include:
- stage: "Documentation"
julia: 1.3
julia: 1.5
os: linux
script:
- julia --project=docs/ -e 'using Pkg; Pkg.develop(PackageSpec(path=pwd()));
Expand Down
20 changes: 9 additions & 11 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
name = "DPMMSubClusters"
uuid = "2841fd70-8698-11e9-176d-6dfa142d2ee7"
authors = ["Or Dinari <[email protected]>"]
version = "0.1.8"
version = "0.1.9"

[deps]
CatViews = "81a5f4ea-a946-549a-aa7e-2a7f63a27d31"
Clustering = "aaaa29a8-35af-508c-8bc3-b662a17a0fe5"
Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b"
DistributedArrays = "aaf54ef3-cdf8-58ed-94cc-d582ad619b94"
Expand All @@ -17,15 +16,14 @@ SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b"
StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91"

[compat]
CatViews = "1"
Clustering = "0.13.3"
DistributedArrays = "0, 1"
Distributions = "0, 1"
JLD2 = "0, 1"
NPZ = "0, 1"
SpecialFunctions = "0.8, 0.9, 0.10, 1"
StatsBase = "0,1"
julia = "1"
Clustering = "^0.13.3,0.14"
DistributedArrays = "0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8"
Distributions = "0.16 - 0.25"
JLD2 = "0.1, 0.2,0.3,0.4"
NPZ = "0.1, 0.2,0.3,0.4,0.5,0.6"
SpecialFunctions = "0.8, 0.9, 0.10, 1.0"
StatsBase = "0.32,0.33,0.34,0.35"
julia = "1.0,1.1,1.2,1.3,1.4,1.5,1.6"

[extras]
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Expand Down
3 changes: 1 addition & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
# DPMMSubClusters.jl
This is the code repository for the *Julia* package (with an optional [Python wrapper](https://github.com/BGU-CS-VIL/dpmmpython)) that corresponds to our paper, [Distributed MCMC Inference in Dirichlet Process Mixture Models Using Julia](https://www.cs.bgu.ac.il/~dinari/papers/dpmm_hpml2019.pdf), which was presented at CCGrid2019 High Performance Computing Maching Learning workshop (HPML).

Note that due to improvements in the code we have made since the time of the pulication of the paper, this package is now faster than what we reported there.
Note that due to improvements in the code we have made since the time of the pulication of the paper, this package is now faster than what we reported there.

<br>
<p align="center">
Expand All @@ -19,7 +19,6 @@ Note that due to improvements in the code we have made since the time of the pul
## Requirements
This package was developed and tested on *Julia 1.0.3*, prior versions will not work.
The following dependencies are required:
- CatViews
- Distributed
- DistributedArrays
- Distributions
Expand Down
1 change: 0 additions & 1 deletion src/DPMMSubClusters.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ using DistributedArrays
using StatsBase
using Distributions
using SpecialFunctions
using CatViews
using LinearAlgebra
using JLD2
using Clustering
Expand Down
44 changes: 0 additions & 44 deletions src/local_clusters_actions.jl
Original file line number Diff line number Diff line change
Expand Up @@ -67,14 +67,6 @@ function create_subclusters_labels!(labels::AbstractArray{Int64,1},
if size(labels,1) == 0
return
end
# clusts_dist = [cluster_params.l_dist, cluster_params.r_dist]
# log_weights = log.(cluster_params.lr_weights)
# @inbounds for i=1:size(labels,1)
# x = @view points[:,i]
# probs = RestrictedClusterProbs(log_weights,clusts_dist,x)
# labels[i] = sample(1:2, ProbabilityWeights(probs))
# # println(labels[i])
# end
parr = zeros(Float32,length(labels), 2)
log_likelihood!((@view parr[:,1]),points,cluster_params.l_dist)
log_likelihood!((@view parr[:,2]),points,cluster_params.r_dist)
Expand All @@ -98,19 +90,6 @@ function sample_labels!(labels::AbstractArray{Int64,1},
end


function RestrictedClusterProbs(logπs::AbstractVector{V}, clusters,
x::AbstractVector) where V<:Real
p = Array{V,1}(undef,length(clusters))
max = typemin(V)
for (j,c) in enumerate(values(clusters))
@inbounds s = p[j] = logπs[j] + logαpdf(c,x)
max = s>max ? s : max
end
pc = exp.(p .- max)
return pc ./ sum(pc)
end


function sample_labels_worker!(labels::AbstractArray{Int64,1},
points::AbstractArray{Float32,2},
final::Bool,
Expand All @@ -122,19 +101,8 @@ function sample_labels_worker!(labels::AbstractArray{Int64,1},
parr = zeros(Float32,length(indices), length(clusters_vector))
tic = time()
for (k,cluster) in enumerate(clusters_vector)
# if no_more_splits == false && k==1
# parr[:,k] .= -Inf
# continue
# end
log_likelihood!(reshape((@view parr[:,k]),:,1), pts,cluster.cluster_dist)
end
# println("Time: "* string(time()-tic) * " size:" *string(size(pts)))
# parr = zeros(Float32,length(indices), length(clusters_vector))
# newx = copy(localpart(points)')
# @time log_likelihood!(parr, localpart(points),[c.cluster_dist for c in clusters_vector],log.(clusters_weights))
#
#
#
for (k,v) in enumerate(clusters_weights)
parr[:,k] .+= log(v)
end
Expand All @@ -144,18 +112,6 @@ function sample_labels_worker!(labels::AbstractArray{Int64,1},
else
sample_log_cat_array!(lbls,parr)
end

# clust_dists = [c.cluster_dist for c in clusters_vector]
#
# @inbounds for i=1:size(lbls,1)
# x = @view pts[:,i]
# probs = RestrictedClusterProbs(log_weights,clust_dists,x)
# # println(probs)
# lbls[i] = sample(1:length(clusters_vector), ProbabilityWeights(probs))
# # println(lbls[i])
# end


end


Expand Down

0 comments on commit 99132e6

Please sign in to comment.