-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathhelper_functions.py
230 lines (176 loc) · 9.06 KB
/
helper_functions.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
import json
import numpy as np
from keras.layers import Input, Dense
from keras.models import Model, load_model
from keras.callbacks import EarlyStopping, TensorBoard
from keras import metrics
from os import listdir
def import_data(json_path):
"""
Parameter: String
Output: 2 Numpy arrays
Reads json from path 'json_path' that has multiple rows of dense matrix and a star rating.
Processes the file, creates a dense matrix.
Returns the dense matrix and star rating matrix
"""
count_vec_list = []
star_rating_list = []
vocab_size_saved = False
with open(json_path, 'r') as f:
for i, line in enumerate(f):
if i == 50000: break
#creates separate lists for sparse vec data, and star review
count_vec_list.append(json.loads(line)['count_vec'])
star_rating_list.append(json.loads(line)['star_rating'])
#Saves the vocab size on the first pass
if not vocab_size_saved:
num_matrix_columns = json.loads(line)['count_vec']['size']
vocab_size_saved = True
#creates an empty matrix of the size to fit the read in data
num_matrix_rows = len(count_vec_list)
dense_matrix = np.zeros([num_matrix_rows, num_matrix_columns])
for i, row in enumerate(count_vec_list):
indices = row['indices']
values = row['values']
#Replaces the indices of row i in the dense_matrix with the values
np.put(dense_matrix[i, :], indices, values)
return dense_matrix, np.array(star_rating_list)
def count_stars(json_path):
count = 0
with open(json_path, 'r') as f:
for line in f:
if json.loads(line)['star_rating'] == 0:
count += 1
return count
def batch_generator(json_directory, num_rows):
"""
Parameter: String, num_rows
Output: 2 Numpy arrays
Reads jsons in path 'json_directory' that has multiple rows of dense matrix and a star rating.
Processes the file, creates a dense matrix.
Returns the dense matrix and star rating matrix
"""
count_vec_list_positive = []
count_vec_list_negative = []
num_positive, num_negative, file_count = 0, 0, 0
json_paths = listdir(json_directory)
path_shuffle = np.arange(len(json_paths))
np.random.shuffle(path_shuffle)
while 1:
vocab_size_saved = False
with open(json_directory + '/' + json_paths[path_shuffle[file_count]], 'r') as f:
for line in f:
#creates separate lists for sparse vec data, and star review
if json.loads(line)['star_rating_filtered'] == 1:
count_vec_list_positive.append(json.loads(line)['count_vec'])
else:
count_vec_list_negative.append(json.loads(line)['count_vec'])
#Tracks how many positive/negative reviews have been seen
if json.loads(line)['star_rating_filtered'] == 1:
num_positive += 1
else:
num_negative += 1
#Saves the vocab size on the first pass
if not vocab_size_saved:
num_matrix_columns = json.loads(line)['count_vec']['size']
vocab_size_saved = True
#Builds dense matrix if we have gathered enough of each rating
if num_positive >= num_rows / 2 and num_negative >= num_rows / 2:
num_positive, num_negative = 0, 0
#creates an empty matrix of the size to fit the read in data
#adds an extra column to hold positive/negative value
dense_matrix = np.zeros((num_rows, num_matrix_columns + 1))
#randomizes positive indexs
positive_shuffle_idx = np.arange(len(count_vec_list_positive))
np.random.shuffle(positive_shuffle_idx)
#loops through positive reviews to create dense matrix
for i in range(len(count_vec_list_positive)):
#Breaks loop if half the matrix is filled
if i == num_rows / 2: break
#selects a random row and gets the indices and values
row = count_vec_list_positive[positive_shuffle_idx[i]]
indices = row['indices']
values = row['values']
#Replaces the indices of row i in the dense_matrix with the values
np.put(dense_matrix[i, :], indices, values)
#sets last column to indicate positive
dense_matrix[i, -1] = 1
#randomizes negative indexs
negative_shuffle_idx = np.arange(len(count_vec_list_negative))
np.random.shuffle(negative_shuffle_idx)
#loops through negative reviews to create dense matrix
for i in range(len(count_vec_list_negative)):
#Breaks loop if the matrix is filled
if i == num_rows / 2: break
#selects a random row and gets the indices and values
row = count_vec_list_negative[negative_shuffle_idx[i]]
indices = row['indices']
values = row['values']
#Replaces the indices of row i in the dense_matrix with the values
row_idx = int(i + num_rows / 2)
np.put(dense_matrix[row_idx, :], indices, values)
#sets last column to indicate negative
dense_matrix[int(i + num_rows / 2), -1] = 0
#Creates an array of randomized indexs with lengh equal to number of dense_matrix rows
shuffle_dense_indexs = np.arange(dense_matrix.shape[0])
np.random.shuffle(shuffle_dense_indexs)
dense_matrix = dense_matrix[shuffle_dense_indexs]
#Declares the lists as empty
count_vec_list_positive = []
count_vec_list_negative = []
yield dense_matrix[:, :-1], dense_matrix[:, -1]
#Tracks the number of files used, and reshuffles the indexes if we have used all the files in the list
file_count += 1
if file_count == len(json_paths):
file_count = 0
np.random.shuffle(path_shuffle)
def test_creator(json_directory, num_rows):
"""
Parameter: String, num_rows
Output: 2 Numpy arrays
Reads jsons in path 'json_directory' that has multiple rows of dense matrix and a star rating.
Processes the file, creates a dense matrix.
Returns the dense matrix and star rating matrix
"""
count_vec_list = []
star_list = []
json_paths = listdir(json_directory)
path_shuffle = np.arange(len(json_paths))
np.random.shuffle(path_shuffle)
file_count = 0
row_count = 0
while 1:
vocab_size_saved = False
with open(json_directory + '/' + json_paths[path_shuffle[file_count]], 'r') as f:
for line in f:
#creates list of review data
count_vec_list.append(json.loads(line)['count_vec'])
star_list.append(json.loads(line)['star_rating_filtered'])
row_count += 1
#Saves the vocab size on the first pass
if not vocab_size_saved:
num_matrix_columns = json.loads(line)['count_vec']['size']
vocab_size_saved = True
#Stops reading lines once row_num is reached
if row_count == num_rows:
break
#creates an empty matrix of the size to fit the read in data
#adds an extra column to hold positive/negative value
dense_matrix = np.zeros((num_rows, num_matrix_columns + 1))
#loops through positive reviews to create dense matrix
if row_count == num_rows:
for i in range(len(count_vec_list)):
#selects a random row and gets the indices and values
row = count_vec_list[i]
indices = row['indices']
values = row['values']
#Replaces the indices of row i in the dense_matrix with the values
np.put(dense_matrix[i, :], indices, values)
#sets last column to indicate positive
dense_matrix[i, -1] = star_list[i]
return dense_matrix[:, :-1], dense_matrix[:, -1]
#Tracks the number of files used, and reshuffles the indexes if we have used all the files in the list
file_count += 1
if file_count == len(json_paths):
file_count = 0
np.random.shuffle(path_shuffle)