Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
903 changes: 604 additions & 299 deletions Manifest.toml

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ OptimalTransport = "7e02d93a-ae51-4f58-b602-d97af76e3b33"
OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
Parameters = "d96e819e-fc66-5662-9728-84c9c7592b0a"
PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Revise = "295af30f-e4ad-537b-8983-00126c2a3abe"
Setfield = "efcf1570-3423-57d1-acb7-fd33fddbac46"
SimpleWeightedGraphs = "47aef6b3-ad0c-573a-a1e2-d07658019622"
Expand All @@ -38,3 +39,4 @@ StatProfilerHTML = "a8a75453-ed82-57c9-9e16-4cd1196ecbf5"
StaticArrays = "90137ffa-7385-5640-81b9-e52037218182"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
UnicodePlots = "b8865327-cd53-5732-bb35-84acbb429228"
WaveFunctionCollapse = "bc91b3db-6012-4156-9697-e03a5f4b0c51"
63 changes: 13 additions & 50 deletions env.d/Singularity
Original file line number Diff line number Diff line change
@@ -1,63 +1,26 @@
bootstrap: docker
from: nvidia/cuda:11.7.0-cudnn8-devel-ubuntu20.04
bootstrap: localimage
from: env.d/base.sif

%environment
# setup PATH to point to julia and blender
# export PATH=$PATH:"/usr/local/blender"
export PATH=$PATH:"/usr/local/julia-1.8.5/bin"
export PATH=$PATH:"/usr/local/blender"
export PATH=$PATH:"/usr/local/julia-1.9.3/bin"

%runscript
exec bash "$@"

%post
export DEBIAN_FRONTEND=noninteractive
export TZ=Etc/UTC
rm /etc/apt/sources.list.d/cuda.list
# rm /etc/apt/sources.list.d/nvidia-ml.list
apt-get update
apt-get install -y software-properties-common
apt-get install -y build-essential \
wget \
git \
ffmpeg \
cmake \
python3.9-dev \
python3-pip \
libopencv-dev \
libturbojpeg0-dev \
blender
apt-get clean

python3.9 -m pip install --upgrade pip
python3.9 -m pip install pipenv virtualenv

# build context
mkdir /build-ctx && cd /build-ctx
%files
env.d/blender-3.6.3-linux-x64.tar.xz /build-ctx/

# Setup blender
# wget "https://yale.box.com/shared/static/nn6n5iyo5m4tzl5u9yoy2dvv1ohk22xj.xz" \
# -O blender.tar.gz
# tar -xf blender.tar.gz
# mv blender-2.* "/usr/local/blender"
# chmod +x "/usr/local/blender/blender"

# Set up Julia
JURL="https://julialang-s3.julialang.org/bin/linux/x64/1.8/julia-1.8.5-linux-x86_64.tar.gz"
wget "$JURL" -O "julia.tar.gz"
tar -xzf "julia.tar.gz" -C "/usr/local/"
chmod +x /usr/local/julia-1.8.5/bin/*
%post
cd /build-ctx
# blender
apt-get install -y libsm6 libxext6
tar -xf "blender-3.6.3-linux-x64.tar.xz"
mv blender-3.6.3-linux-x64 "/usr/local/blender"
chmod +x "/usr/local/blender/blender"

# clean up
rm -rf /build-ctx

# Add an sbatch workaround
echo '#!/bin/bash\nssh -y "$HOSTNAME" sbatch "$@"' > /usr/bin/sbatch
chmod +x /usr/bin/sbatch

# Add an scancel workaround
echo '#!/bin/bash\nssh -y "$HOSTNAME" scancel "$@"' > /usr/bin/scancel
chmod +x /usr/bin/scancel

# Add an srun workaround
echo '#!/bin/bash\nssh -y "$HOSTNAME" srun "$@"' > /usr/bin/srun
chmod +x /usr/bin/srun
2 changes: 2 additions & 0 deletions env.d/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,10 @@ sqlalchemy
numba
opencv-python-headless
ninja
plotly
torch
torchvision
drjit
mitsuba
fvcore
iopath
Expand Down
64 changes: 62 additions & 2 deletions scripts/analysis/ccn_2023_change_detection.Rmd
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ delta_hr_by_scene %>%
t0 = mean(delta_hr_by_scene$abs_f_diff)


f1 <- delta_hr_by_scene %>%
delta_hr_by_scene %>%
ggplot(aes(abs_f_diff)) +
geom_vline(xintercept = `t0`,
size = 1.25,
Expand Down Expand Up @@ -220,7 +220,7 @@ inv_t0 = 0.063
sum((inv_t0 < reps$t)) / length(reps$t)

rdf = data.frame(t = reps$t)
p <- rdf %>%
rdf %>%
ggplot(aes(x = t)) +
geom_histogram(fill = "grey") +
geom_vline(xintercept = reps$t0,
Expand Down Expand Up @@ -296,6 +296,66 @@ t.test(upright_dhr_analysis$up_diff, upright_dhr_analysis$abs_f_diff)

```


### Path length analysis


```{r}


path_metrics <- read.csv("~/project/spaths/datasets/ccn_2023_exp_path_metrics.csv") %>%
rename(scene = id)

path_comps <- path_metrics %>%
group_by(scene, door) %>%
select(scene, door, starts_with("path")) %>%
summarise(across(everything(),
list(mean = mean, max = max, min = min, diff = diff))) %>%
left_join(passed_hits)

path_comps %>%
ggplot(aes(x = path_cost_diff, y = f, label = scene, color = factor(door))) +
geom_point() +
geom_text(hjust=0, vjust=0)

path_comps %>%
ggplot(aes(x = path_dist_mean, y = f, label = scene, color = factor(door), group = scene)) +
geom_point() +
geom_text(hjust=0, vjust=0) +
geom_line()

path_comps %>%
with(lm(f ~ path_dist_mean)) %>%
summary()

path_comps %>%
ggplot(aes(x = path_dist_max, y = f, label = scene, color = factor(door))) +
geom_point() +
geom_text(hjust=0, vjust=0)

path_comps %>%
with(lm(f ~ path_dist_max)) %>%
summary()

pairwise_differences <- path_comps %>%
group_by(scene) %>%
summarise(across(c(path_dist_mean),
list(mean = mean, diff = diff))) %>%
left_join(delta_hr_by_scene)

pairwise_differences %>%
ggplot(aes(x = path_dist_mean_diff, y = f_diff, label = scene)) +
geom_point() +
geom_text(hjust=0, vjust=0)

pairwise_differences %>%
ggplot(aes(x = path_dist_mean_mean, y = f_diff, label = scene)) +
geom_point() +
geom_text(hjust=0, vjust=0)

```


### Multi-granular Attention

```{r}
Expand Down
132 changes: 132 additions & 0 deletions scripts/analysis/scene_path_analysis.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
using Gen
using CSV
using JSON
using PyCall
using FileIO
using Images
using ArgParse
using DataFrames
using FunctionalScenes
using Random

np = pyimport("numpy")

function save_path_render(out::String, scene::Int, door::Int,
isshifted::Bool,
r::GridRoom, path)
m = FunctionalScenes.draw_room(r, path)
m = imresize(m, (128,128))
shifted = isshifted ? "shifted" : "unshifted"
save("$(out)/$(scene)_$(door)_$(shifted).png", m)
return nothing
end

name = "ccn_2023_exp"
# name = "pathcost_4.0"
function main()

src = "/spaths/datasets/$(name)"
df = DataFrame(CSV.File("$(src)/scenes.csv"))

n = nrow(df) * 2

out = "/spaths/datasets/$(name)/path_analysis"
isdir(out) || mkdir(out)


fc = 1.0
# for c = LinRange(0.1, 5.0, 5), k = [5,7,9]
for c = [32.0], k = [5]
# params = AStarPath(
# obstacle_cost = c,
# floor_cost = fc,
# )
params = NoisyPath(
obstacle_cost = c,
floor_cost = fc,
kernel_sigma = 3.0,
kernel_width = k,
)
analysis_params = Dict(
:kernel => k,
:p => 0.35,
:n => 7
)
param_data = Dict{Symbol, Any}(
:obstacle_cost => c,
:floor_cost => fc,
:kernel_width => k)
# paths = Array{Float64}(undef, (30, 2, 2, 32, 32))
results = DataFrame(scene = Int64[],
door = Int64[],
is_shifted = Bool[],
obstacle_size = Int64[],
density = Float64[],
diffusion_ct = Float64[],
diffusion_ct_max = Float64[],
diffusion_ct_alt = Float64[],
diffusion_prop = Float64[],
diffusion_tot = Float64[],
path_dist = Float64[],
path_length = Int64[],
obstacle_cost = Float64[],
floor_cost = Float64[],
kernel_width = Int64[])
render_out = "$(out)/$(c)_$(fc)_$(k)_renders"
isdir(render_out) || mkdir(render_out)
metric_out = "$(out)/$(c)_$(fc)_$(k)_path_metrics.csv"
@show metric_out
# isfile(metric_out) && continue
for row in eachrow(df)

base_p = "/spaths/datasets/$(name)/scenes/$(row.scene)_$(row.door).json"
local base_s
open(base_p, "r") do f
base_s = JSON.parse(f)
end
base = from_json(GridRoom, base_s)
to_shift = furniture(base)[row.furniture]
obs_size = length(to_shift)

scene_data = Dict{Symbol, Any}(
:scene => row.scene,
:door => row.door,
:obstacle_size => obs_size)

base_path, base_result = path_analysis(base, params, to_shift;
analysis_params...)

save_path_render(render_out, row.scene, row.door, false, base,
base_path)

# paths[row.scene, row.door, 1, :, :] = base_path

trial_data = Dict{Symbol, Any}(:is_shifted => false)
merge!(trial_data, base_result, scene_data, param_data)
push!(results, trial_data)


# shifted = remove(base, to_shift)
shifted = shift_furniture(base,
to_shift,
Symbol(row.move))
shifted_path, shifted_result = path_analysis(shifted, params, to_shift;
analysis_params...)

save_path_render(render_out, row.scene, row.door, true, shifted,
shifted_path)

# paths[row.scene, row.door, 2, :, :] = shift_path
trial_data = Dict{Symbol, Any}(:is_shifted => true)
merge!(trial_data, shifted_result, scene_data, param_data)
push!(results, trial_data)
end

display(results)

# np.save("$(out)/$(c)_$(fc)_$(k)_noisy_paths.npy", paths)
CSV.write(metric_out, results)
end
end

main();
55 changes: 55 additions & 0 deletions scripts/analysis/scene_path_viz.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
#!/usr/bin/env python3

import numpy as np
import pandas as pd
import plotly.graph_objects as go
from plotly.subplots import make_subplots

def downsample(a, n:int = 2):
if n ==1 :
return a
b = a.shape[0]//n
a_downsampled = a.reshape(-1, n, b, n).sum((-1, -3)) / (n*n)
return a_downsampled


EXPNAME = 'pathcost_3.0'
scale = 1

def main():

df_path = f"/spaths/datasets/{EXPNAME}/scenes.csv"
df = pd.read_csv(df_path)
# df = df.loc[map(lambda x: x in scenes, df['id'])]

row_count = 1
fig = make_subplots(rows=60, cols=2,
shared_xaxes=True,
shared_yaxes=True)

path_file = '1.64_0.01_7'
paths = np.load(f'/spaths/datasets/{EXPNAME}_path/{path_file}_noisy_paths.npy')

for scene in range(30):
for door in range(2):
fig.update_yaxes(title_text=f"{scene+1}, {door+1}",
row=row_count, col=1)
pmat_a = downsample(paths[scene, door, 0], n = scale)
pmat_hm = go.Heatmap(z = pmat_a.T, coloraxis="coloraxis3")
fig.add_trace(pmat_hm, row = row_count, col = 1)

pmat_b = downsample(paths[scene, door, 1], n = scale)
pmat_hm = go.Heatmap(z = pmat_b.T, coloraxis="coloraxis3")
fig.add_trace(pmat_hm, row = row_count, col = 2)
row_count += 1

fig.update_layout(
height = 300 * 30 * 2,
width = 800,
coloraxis3=dict(colorscale='greens'),
showlegend=False
)
fig.write_html(f'/spaths/datasets/{EXPNAME}_path/{path_file}_noisy_paths.html')

if __name__ == '__main__':
main()
Loading