Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
nerminsamet committed Apr 25, 2023
1 parent edd10ae commit 01dd851
Show file tree
Hide file tree
Showing 13 changed files with 19,655 additions and 125 deletions.
114 changes: 114 additions & 0 deletions datasets/base_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
import pickle
import pickle5 as pickle
from sklearn.cluster import KMeans
from scipy import stats
import numpy as np

class PCData:

Expand Down Expand Up @@ -114,5 +116,117 @@ def cluster_scene(self, cluster_num,im_feats):
kmeans = KMeans(n_clusters=cluster_num, random_state=123).fit(im_feats)
return kmeans.cluster_centers_

def extract_scene_attributes(self):

ret = self.load_scene_attributes()
if ret: # if we managed to load then no need to run again!
return

all_scenes, all_scene_keys = self.get_scenes()

self.load_cluster_centers()

scene_attributes = {}

for ind, kk in enumerate(all_scene_keys):
xbb = self.cluster_centers.get(kk, None)

if xbb is not None:
xbb = self.cluster_centers[kk]
curr_dim = len(xbb)
ordered_D = self.sim_object.calculate_dino_aff_matrix_from_feats(xbb)
final_distance_list = list(ordered_D[np.triu_indices(curr_dim, 1)])

d = {}
d[f"{kk}"] = {}
d[f"{kk}"]['mean'] = stats.describe(final_distance_list).mean
d[f"{kk}"]['variance'] = stats.describe(final_distance_list).variance
d[f"{kk}"]['minmax'] = stats.describe(final_distance_list).minmax
scene_attributes.update(d)

self.scene_attributes = scene_attributes
f = open(self.scene_attributes_file, "wb")
pickle.dump(self.scene_attributes, f)
f.close()
print(f'Len of final rooms {len(self.scene_attributes)}')

def extract_pairwise_scene_attributes(self):

ret = self.load_pairwise_scene_attributes()
if ret: # if we managed to load then no need to run again!
return

all_scenes, all_scene_keys = self.get_scenes()
total_scene_number = len(all_scene_keys)

self.load_cluster_centers()

pairwise_scene_attributes = {}

for ind, s1 in enumerate(all_scene_keys):
for st in range(ind + 1, total_scene_number):
s2 = all_scene_keys[st]
kk = f'{s1}*{s2}'

cluster_centers_s1 = self.cluster_centers[s1]
cluster_centers_s2 = self.cluster_centers[s2]

curr_dim = len(cluster_centers_s1) + len(cluster_centers_s2)
xbb = np.zeros((curr_dim, cluster_centers_s1.shape[1]), dtype=np.float32)
for ii, v in enumerate(cluster_centers_s1):
xbb[ii] = v
for ii, v in enumerate(cluster_centers_s2):
xbb[len(cluster_centers_s1) + ii] = v

ordered_D = self.sim_object.calculate_dino_aff_matrix_from_feats(xbb)

final_distance_list = list(
ordered_D[len(cluster_centers_s1):, 0: len(cluster_centers_s2)].flatten())

d = {}
d[f"{kk}"] = {}
d[f"{kk}"]['mean'] = stats.describe(final_distance_list).mean
d[f"{kk}"]['variance'] = stats.describe(final_distance_list).variance
d[f"{kk}"]['minmax'] = stats.describe(final_distance_list).minmax
pairwise_scene_attributes.update(d)

self.pairwise_scene_attributes = pairwise_scene_attributes
# create a binary pickle file
f = open(self.pairwise_scene_attributes_file, "wb")
pickle.dump(self.pairwise_scene_attributes, f)
f.close()
print(f'Len: {len(self.pairwise_scene_attributes)}')

def prepare_data_for_optimization(self):

self.load_scene_attributes()
self.load_data_stats()
self.load_pairwise_scene_attributes()

all_scenes, all_scene_keys = self.get_scenes()
total_scene_number = len(all_scene_keys)

pair_scores = []
all_pairs = []
for i, scene_i in enumerate(all_scene_keys):
for j in range(i + 1, total_scene_number):
scene_j = all_scene_keys[j]
dsim_i = 1 - self.scene_attributes[scene_i]['mean']
dsim_j = 1 - self.scene_attributes[scene_j]['mean']

dsim = dsim_i * dsim_j

kk = f'{scene_i}*{scene_j}'

pairwise_sim = self.pairwise_scene_attributes[kk]['mean']
pairwise_dsim = 1 - pairwise_sim
final_score = pairwise_dsim * dsim

pair_scores.append(final_score)
all_pairs.append((scene_i, scene_j))

return all_pairs, self.data_stats, pair_scores, self.reduction_size, self.target_point_num




120 changes: 4 additions & 116 deletions datasets/s3dis.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,12 +109,11 @@ def extract_scene_clusters(self):
if ret: # if we managed to load then no need to run again!
return

self.all_scenes = self.get_scenes()
all_scenes, all_scene_keys = self.get_scenes()
self.cluster_centers = {}
total_scene_number = len(self.all_scenes)
all_scene_keys = list(self.all_scenes.keys())

for ind, s1 in enumerate(all_scene_keys):
im_feats = np.squeeze(np.asarray(self.all_scenes[s1]), axis=1)
im_feats = np.squeeze(np.asarray(all_scenes[s1]), axis=1)

cluster_num = self.cluster_num
if len(im_feats) < cluster_num:
Expand All @@ -137,88 +136,7 @@ def get_scenes(self):
values = [value for key, value in self.feat_vec_image_dict.items() if kk.lower() in key.lower()]
if values:
all_scenes[kk[:-1]] = values
return all_scenes

def extract_scene_attributes(self):

ret = self.load_scene_attributes()
if ret: # if we managed to load then no need to run again!
return

self.load_cluster_centers()
scene_attributes = {}
for scene in self.train_scenes:
for room in self.rooms:
for i in range(self.max_room_num):
kk = scene + '_' + room + str(i)
xbb = self.cluster_centers.get(kk, None)

if xbb is not None:
xbb = self.cluster_centers[kk]
curr_dim = len(xbb)
ordered_D = self.sim_object.calculate_dino_aff_matrix_from_feats(xbb)
final_distance_list = list(ordered_D[np.triu_indices(curr_dim, 1)])

d = {}
d[f"{kk}"] = {}
d[f"{kk}"]['mean'] = stats.describe(final_distance_list).mean
d[f"{kk}"]['variance'] = stats.describe(final_distance_list).variance
d[f"{kk}"]['minmax'] = stats.describe(final_distance_list).minmax
scene_attributes.update(d)

self.scene_attributes = scene_attributes
f = open(self.scene_attributes_file, "wb")
pickle.dump(self.scene_attributes, f)
f.close()
print(f'Len of final rooms {len(self.scene_attributes)}')

def extract_pairwise_scene_attributes(self):

ret = self.load_pairwise_scene_attributes()
if ret: # if we managed to load then no need to run again!
return

self.all_scenes = self.get_scenes()
all_scene_keys = list(self.all_scenes.keys())
total_scene_number = len(all_scene_keys)

self.load_cluster_centers()

pairwise_scene_attributes = {}

for ind, s1 in enumerate(all_scene_keys):
for st in range(ind + 1, total_scene_number):
s2 = all_scene_keys[st]
kk = f'{s1}*{s2}'

cluster_centers_s1 = self.cluster_centers[s1]
cluster_centers_s2 = self.cluster_centers[s2]

curr_dim = len(cluster_centers_s1) + len(cluster_centers_s2)
xbb = np.zeros((curr_dim, cluster_centers_s1.shape[1]), dtype=np.float32)
for ii, v in enumerate(cluster_centers_s1):
xbb[ii] = v
for ii, v in enumerate(cluster_centers_s2):
xbb[len(cluster_centers_s1) + ii] = v

ordered_D = self.sim_object.calculate_dino_aff_matrix_from_feats(xbb)

final_distance_list = list(
ordered_D[len(cluster_centers_s1):, 0: len(cluster_centers_s2)].flatten())

d = {}
d[f"{kk}"] = {}
d[f"{kk}"]['mean'] = stats.describe(final_distance_list).mean
d[f"{kk}"]['variance'] = stats.describe(final_distance_list).variance
d[f"{kk}"]['minmax'] = stats.describe(final_distance_list).minmax
pairwise_scene_attributes.update(d)

self.pairwise_scene_attributes = pairwise_scene_attributes
# create a binary pickle file
f = open(self.pairwise_scene_attributes_file, "wb")
pickle.dump(self.pairwise_scene_attributes, f)
f.close()
print(f'Len of final rooms {len(self.pairwise_scene_attributes)}')
return all_scenes, list(all_scenes.keys())

def create_initial_set(self, scene_list):

Expand Down Expand Up @@ -284,37 +202,7 @@ def create_initial_set(self, scene_list):
fi.write(f'Region Num: {total_region_num} and Point Num: {total_point_num} in the current set')
fi.close()

def prepare_data_for_optimization(self):

self.load_scene_attributes()
self.load_data_stats()
self.load_pairwise_scene_attributes()

self.all_scenes = self.get_scenes()
all_scene_keys = list(self.all_scenes.keys())
total_scene_number = len(all_scene_keys)

pair_scores = []
all_pairs = []
for i, scene_i in enumerate(all_scene_keys):
for j in range(i + 1, total_scene_number):

scene_j = all_scene_keys[j]
dsim_i = 1 - self.scene_attributes[scene_i]['mean']
dsim_j = 1 - self.scene_attributes[scene_j]['mean']

dsim = dsim_i * dsim_j

kk = f'{scene_i}*{scene_j}'

pairwise_sim = self.pairwise_scene_attributes[kk]['mean']
pairwise_dsim = 1 - pairwise_sim
final_score = pairwise_dsim * dsim

pair_scores.append(final_score)
all_pairs.append((scene_i, scene_j))

return all_pairs, self.data_stats, pair_scores, self.reduction_size, self.target_point_num



Expand Down
Loading

0 comments on commit 01dd851

Please sign in to comment.