Skip to content

Commit

Permalink
Fix users of NumPy APIs that are removed in NumPy 2.0.
Browse files Browse the repository at this point in the history
This change migrates users of APIs removed in NumPy 2.0 to their recommended replacements (https://numpy.org/devdocs/numpy_2_0_migration_guide.html).

PiperOrigin-RevId: 655943417
  • Loading branch information
tfx-copybara committed Jul 25, 2024
1 parent 03987b2 commit 1c4cfff
Show file tree
Hide file tree
Showing 6 changed files with 24 additions and 24 deletions.
2 changes: 1 addition & 1 deletion tensorflow_data_validation/api/stats_api_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def test_stats_pipeline(self):
pa.array([np.linspace(1, 500, 500, dtype=np.int32)]),
], ['a', 'b', 'c']),
pa.RecordBatch.from_arrays([
pa.array([[3.0, 4.0, np.NaN, 5.0]]),
pa.array([[3.0, 4.0, np.nan, 5.0]]),
pa.array([['a', 'c', '∞', 'a']]),
pa.array([np.linspace(501, 1250, 750, dtype=np.int32)]),
], ['a', 'b', 'c']),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -826,7 +826,7 @@ def test_with_weight_feature(self):
pa.array([[1.0], [2.0]])
], ['a', 'b', 'w'])
b2 = pa.RecordBatch.from_arrays([
pa.array([[1.0, np.NaN, np.NaN, np.NaN], None]),
pa.array([[1.0, np.nan, np.nan, np.nan], None]),
pa.array([[1], None]),
pa.array([[3.0], [2.0]])
], ['a', 'b', 'w'])
Expand Down Expand Up @@ -1269,7 +1269,7 @@ def test_with_per_feature_weight(self):
pa.array([[2.0], [1.0]]),
], ['a', 'b', 'w_a', 'w_b'])
b2 = pa.RecordBatch.from_arrays([
pa.array([[1.0, np.NaN, np.NaN, np.NaN], None]),
pa.array([[1.0, np.nan, np.nan, np.nan], None]),
pa.array([[1], None]),
pa.array([[3.0], [2.0]]),
pa.array([[2.0], [3.0]]),
Expand Down Expand Up @@ -1706,8 +1706,8 @@ def test_with_individual_feature_value_missing(self):
# input with two batches: first batch has two examples and second batch
# has a single example.
b1 = pa.RecordBatch.from_arrays(
[pa.array([[1.0, 2.0], [3.0, 4.0, np.NaN, 5.0]])], ['a'])
b2 = pa.RecordBatch.from_arrays([pa.array([[np.NaN, 1.0]])], ['a'])
[pa.array([[1.0, 2.0], [3.0, 4.0, np.nan, 5.0]])], ['a'])
b2 = pa.RecordBatch.from_arrays([pa.array([[np.nan, 1.0]])], ['a'])
batches = [b1, b2]

expected_result = {
Expand Down Expand Up @@ -2286,7 +2286,7 @@ def test_no_value_in_batch(self):

def test_only_nan(self):
b1 = pa.RecordBatch.from_arrays(
[pa.array([[np.NaN]], type=pa.list_(pa.float32()))], ['a'])
[pa.array([[np.nan]], type=pa.list_(pa.float32()))], ['a'])
batches = [b1]
expected_result = {
types.FeaturePath(['a']): text_format.Parse(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def test_encoder_feature_excluded(self):

def test_encoder_multivalent_numerical_with_nulls(self):
batch = pa.RecordBatch.from_arrays(
[pa.array([[1.0, 1.0, np.NaN], None, [2.0, 2.0, 1.0], []])], ["fa"])
[pa.array([[1.0, 1.0, np.nan], None, [2.0, 2.0, 1.0], []])], ["fa"])
expected = {
types.FeaturePath(["fa"]): [[2, 0, 0], [None, None, None], [1, 0, 2],
[None, None, None]]
Expand All @@ -92,7 +92,7 @@ def test_encoder_multivalent_numerical_with_nulls(self):

def test_encoder_univalent_with_nulls(self):
batch = pa.RecordBatch.from_arrays(
[pa.array([None, [2.0], [], [None], [np.NaN]])], ["fa"])
[pa.array([None, [2.0], [], [None], [np.nan]])], ["fa"])
expected = {
types.FeaturePath(["fa"]): [[None], [2], [None], [None], [None]]
}
Expand Down Expand Up @@ -205,7 +205,7 @@ def test_mi_with_univalent_features(self):
label_array = pa.array([[0.1], [0.2], [0.7], [0.2], None, [0.9], [0.4],
[0.8]])
# Random floats that do not map onto the label
terrible_feat_array = pa.array([[0.4], [0.1], [0.4], [np.NaN], [0.8], [0.2],
terrible_feat_array = pa.array([[0.4], [0.1], [0.4], [np.nan], [0.8], [0.2],
[0.5], [0.1]])
batch = pa.RecordBatch.from_arrays(
[label_array, label_array, terrible_feat_array],
Expand Down Expand Up @@ -318,7 +318,7 @@ def test_mi_batch_smaller_than_k(self):
def test_mi_normalized(self):
label_array = pa.array([[0.1], [0.2], [0.7], [0.2], None, [0.9], [0.4],
[0.8]])
terrible_feat_array = pa.array([[0.4], [0.1], [0.4], [np.NaN], [0.8], [0.2],
terrible_feat_array = pa.array([[0.4], [0.1], [0.4], [np.nan], [0.8], [0.2],
[0.5], [0.1]])
batch = pa.RecordBatch.from_arrays(
[label_array, label_array, terrible_feat_array],
Expand Down Expand Up @@ -468,7 +468,7 @@ def test_mi_with_unicode_labels(self):

def test_mi_with_univalent_feature_all_null(self):
label_array = pa.array([[0.1], [0.2], [0.7], [0.7]])
null_feat_array = pa.array([[np.NaN], [np.NaN], [np.NaN], [np.NaN]])
null_feat_array = pa.array([[np.nan], [np.nan], [np.nan], [np.nan]])
batch = pa.RecordBatch.from_arrays([label_array, null_feat_array],
["label_key", "null_feature"])

Expand Down Expand Up @@ -510,7 +510,7 @@ def test_mi_with_univalent_feature_all_null(self):

def test_mi_with_multivalent_feature_all_null(self):
label_array = pa.array([[0.1], [0.2], [0.7], [0.7]])
null_feat_array = pa.array([[np.NaN], [np.NaN], [np.NaN], [np.NaN]])
null_feat_array = pa.array([[np.nan], [np.nan], [np.nan], [np.nan]])
batch = pa.RecordBatch.from_arrays([label_array, null_feat_array],
["label_key", "null_feature"])

Expand Down Expand Up @@ -551,7 +551,7 @@ def test_mi_with_multivalent_feature_all_null(self):

def test_mi_with_multivalent_feature_all_empty(self):
label_array = pa.array([[0.1], [0.2], [0.7], [0.7]])
empty_feat_array = pa.array([[np.NaN], [], [], []])
empty_feat_array = pa.array([[np.nan], [], [], []])
batch = pa.RecordBatch.from_arrays([label_array, empty_feat_array],
["label_key", "empty_feature"])

Expand Down Expand Up @@ -633,7 +633,7 @@ def test_mi_with_multivalent_feature_univalent_label(self):
types.FeaturePath(["label_key"]))

def test_mi_with_multivalent_numeric_feature(self):
feat_array = pa.array([[3.1], None, [4.0], [np.NaN], [1.2, 8.5], [2.3],
feat_array = pa.array([[3.1], None, [4.0], [np.nan], [1.2, 8.5], [2.3],
[1.2, 3.2, 3.9]])
label_array = pa.array([[3.3], None, [4.0], [2.0, 8.0], [1.3, 8.5], [2.3],
[1.0, 3.1, 4]])
Expand Down Expand Up @@ -1395,7 +1395,7 @@ def setUp(self):
], ["fa", "fb", "fc", "fd", "label_key"]),
pa.RecordBatch.from_arrays([
pa.array([["1"]]),
pa.array([[np.NaN]], type=pa.list_(pa.float64())),
pa.array([[np.nan]], type=pa.list_(pa.float64())),
pa.array([["0", "0"]]),
pa.array([[0.0, 0.2]]),
pa.array([["label"]]),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -567,7 +567,7 @@ def setUp(self):
pa.array([['Green']]),
pa.array([[2.3]]),
pa.array([[0]]),
pa.array([[np.NaN]], type=pa.list_(pa.float64())),
pa.array([[np.nan]], type=pa.list_(pa.float64())),
pa.array([['Label']]),
], ['fa', 'fb', 'fc', 'fd', 'label_key']),
pa.RecordBatch.from_arrays([
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -612,7 +612,7 @@ def test_mi_with_imputed_numerical_feature(self):
[0.1], [0.2], [0.8], [0.7], [0.2], [0.2], [0.3],
[0.1], [0.2], [0.8], [0.7], [0.2], [0.2], [0.3]])
feat_array = pa.array([
[0.1], [0.2], [0.8], [0.7], [0.2], [np.NaN], None,
[0.1], [0.2], [0.8], [0.7], [0.2], [np.nan], None,
[0.1], [0.2], [0.8], [0.7], [0.2], [0.2], [0.3]])
batch = pa.RecordBatch.from_arrays([label_array, feat_array],
["label_key", "fa"])
Expand Down Expand Up @@ -716,7 +716,7 @@ def test_mi_with_imputed_categorical_label(self):

def test_mi_with_imputed_numerical_label(self):
label_array = pa.array([
[0.1], [0.2], [0.8], [0.7], [0.2], [np.NaN], None,
[0.1], [0.2], [0.8], [0.7], [0.2], [np.nan], None,
[0.1], [0.2], [0.8], [0.7], [0.2], [0.2], [0.3]])
feat_array = pa.array([
[0.1], [0.2], [0.8], [0.7], [0.2], [0.2], [0.3],
Expand Down
12 changes: 6 additions & 6 deletions tensorflow_data_validation/statistics/stats_impl_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def extract_output(self, accumulator):
pa.array([np.linspace(1, 500, 500, dtype=np.int64)]),
], ['a', 'b', 'c']),
pa.RecordBatch.from_arrays([
pa.array([[3.0, 4.0, np.NaN, 5.0]], type=pa.list_(
pa.array([[3.0, 4.0, np.nan, 5.0]], type=pa.list_(
pa.float32())),
pa.array([[b'a', b'c', b'd', b'a']], type=pa.list_(
pa.binary())),
Expand Down Expand Up @@ -1891,7 +1891,7 @@ def extract_output(self, accumulator):
pa.array([np.linspace(1, 500, 500, dtype=np.int64)]),
], ['a', 'b', 'c']),
pa.RecordBatch.from_arrays([
pa.array([[3.0, 4.0, np.NaN, 5.0]], type=pa.list_(pa.float32())),
pa.array([[3.0, 4.0, np.nan, 5.0]], type=pa.list_(pa.float32())),
pa.array([[b'a', b'b']], type=pa.list_(pa.binary())),
pa.array([np.linspace(501, 1250, 750, dtype=np.int64)]),
], ['a', 'b', 'c']),
Expand Down Expand Up @@ -2015,7 +2015,7 @@ def extract_output(self, accumulator):
pa.array([np.linspace(1, 500, 500, dtype=np.int64)]),
], ['a', 'b', 'c']),
pa.RecordBatch.from_arrays([
pa.array([[3.0, 4.0, np.NaN, 5.0]], type=pa.list_(
pa.array([[3.0, 4.0, np.nan, 5.0]], type=pa.list_(
pa.float32())),
pa.array([[b'a', b'b']], type=pa.list_(pa.binary())),
pa.array([np.linspace(501, 1250, 750, dtype=np.int64)]),
Expand Down Expand Up @@ -2114,7 +2114,7 @@ def test_stats_impl_slicing_sql(self):
pa.array([np.linspace(1, 500, 500, dtype=np.int64)]),
], ['a', 'b', 'c']),
pa.RecordBatch.from_arrays([
pa.array([[3.0, 4.0, np.NaN, 5.0]], type=pa.list_(
pa.array([[3.0, 4.0, np.nan, 5.0]], type=pa.list_(
pa.float32())),
pa.array([[b'a', b'b']], type=pa.list_(pa.binary())),
pa.array([np.linspace(501, 1250, 750, dtype=np.int64)]),
Expand Down Expand Up @@ -2160,7 +2160,7 @@ def test_stats_impl_slicing_sql_in_config(self):
pa.array([np.linspace(1, 500, 500, dtype=np.int64)]),
], ['a', 'b', 'c']),
pa.RecordBatch.from_arrays([
pa.array([[3.0, 4.0, np.NaN, 5.0]], type=pa.list_(
pa.array([[3.0, 4.0, np.nan, 5.0]], type=pa.list_(
pa.float32())),
pa.array([[b'a', b'b']], type=pa.list_(pa.binary())),
pa.array([np.linspace(501, 1250, 750, dtype=np.int64)]),
Expand Down Expand Up @@ -2677,7 +2677,7 @@ def test_tfdv_telemetry(self):
pa.array([None]),
], ['a', 'b', 'c']),
pa.RecordBatch.from_arrays([
pa.array([[3.0, 4.0, np.NaN, 5.0]]),
pa.array([[3.0, 4.0, np.nan, 5.0]]),
pa.array([['d', 'e', 'f']]),
pa.array([None]),
], ['a', 'b', 'c']),
Expand Down

0 comments on commit 1c4cfff

Please sign in to comment.