From 0883baf21f18dcc7d1221889b2d7e69347a6a368 Mon Sep 17 00:00:00 2001 From: yaoyangz Date: Tue, 14 Jan 2025 03:48:02 +0000 Subject: [PATCH] untrack a test file --- test.py | 28 ---------------------------- 1 file changed, 28 deletions(-) delete mode 100644 test.py diff --git a/test.py b/test.py deleted file mode 100644 index c508ede9fb6c..000000000000 --- a/test.py +++ /dev/null @@ -1,28 +0,0 @@ -import torch -import torch_xla -import torch_xla.core.xla_model as xm - - -""" - t_inp, t_args, t_kwargs = sample_input.input, sample_input.args, sample_input.kwargs - cpu_inp, cpu_args, cpu_kwargs = cpu(sample_input) - - actual = torch_fn(t_inp, *t_args, **t_kwargs) - expected = torch_fn(cpu_inp, *cpu_args, **cpu_kwargs) - -[test_reference_eager] sample_input: SampleInput(input=7.358110427856445, args=(0,), kwargs={}, broadcasts_input=False, name='') -[test_reference_eager] sample_input: SampleInput(input=7, args=(0,), kwargs={}, broadcasts_input=False, name='') -""" - -a = torch.tensor(7.358110427856445, device=xm.xla_device()) -print(f'[xla]: {torch.cumsum(a, 0)}') - -b = torch.tensor(7.358110427856445) -print(f'[cpu]: {torch.cumsum(b, 0)}') - -# import torch_xla.debug.metrics as met - -# # For short report that only contains a few key metrics. -# print(met.short_metrics_report()) -# # For full report that includes all metrics. -# print(met.metrics_report()) \ No newline at end of file