gan_gsters | Master-IASD | coktailjet | Error | 0 | 1000 | 0 | 0 | bytes: b'\r 0%- - 0.00/9.91M [00:00, ?B/s]\r 1%-\xe2\x96\x8d - 98.3k/9.91M [00:00<00:15, 629kB/s]\r 4%-\xe2\x96\x88\xe2\x96\x8c - 393k/9.91M [00:00<00:07, 1.36MB/s]\r 6%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8d - 623k/9.91M [00:00<00:06, 1.39MB/s]\r 18%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8c - 1.77M/9.91M [00:00<00:02, 3.69MB/s]\r 25%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8d - 2.52M/9.91M [00:00<00:01, 4.12MB/s]\r 32%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8a - 3.18M/9.91M [00:00<00:01, 4.13MB/s]\r 39%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8e - 3.83M/9.91M [00:01<00:01, 4.15MB/s]\r 45%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8a - 4.49M/9.91M [00:01<00:01, 4.47MB/s]\r 50%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8d - 4.95M/9.91M [00:01<00:01, 4.32MB/s]\r 56%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8b - 5.54M/9.91M [00:01<00:01, 4.14MB/s]\r 63%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8f - 6.23M/9.91M [00:01<00:00, 4.23MB/s]\r 70%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8a - 6.91M/9.91M [00:01<00:00, 4.27MB/s]\r 77%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8d - 7.63M/9.91M [00:01<00:00, 4.37MB/s]\r 84%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8f - 8.36M/9.91M [00:02<00:00, 4.43MB/s]\r 92%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x89 - 9.08M/9.91M [00:02<00:00, 4.48MB/s]\r 99%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8c- 9.80M/9.91M [00:02<00:00, 4.26MB/s]\r100%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88- 9.91M/9.91M [00:02<00:00, 4.01MB/s]\n\r 0%- - 0.00/28.9k [00:00, ?B/s]\r100%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88- 28.9k/28.9k [00:00<00:00, 379kB/s]\n\r 0%- - 0.00/1.65M [00:00, ?B/s]\r 6%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8e - 98.3k/1.65M [00:00<00:02, 621kB/s]\r 24%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88 - 393k/1.65M [00:00<00:00, 1.35MB/s]\r 46%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8e - 754k/1.65M [00:00<00:00, 2.00MB/s]\r 91%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8a - 1.51M/1.65M [00:00<00:00, 3.51MB/s]\r100%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88- 1.65M/1.65M [00:00<00:00, 2.30MB/s]\n\r 0%- - 0.00/4.54k [00:00, ?B/s]\r100%-\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88- 4.54k/4.54k [00:00<00:00, 20.5MB/s]\n\rdiffaug-epochs: 0%- - 0/150 [00:00, ?it/s]\n\repoch-1: 0%- - 0/938 [00:00, ?it/s]\x1b[A/home/lamsade/testplatform/test-platform-a2/repos/Master-IASD/gan_gsters/venv/lib/python3.13/site-packages/torch/nn/modules/linear.py:134: UserWarning: Attempting to run cuBLAS, but there was no current CUDA context! Attempting to set the primary context... (Triggered internally at /pytorch/aten/src/ATen/cuda/CublasHandlePool.cpp:270.)\n return F.linear(input, self.weight, self.bias)\n\n\r \x1b[A\r \rTraceback (most recent call last):\n File "/home/lamsade/testplatform/test-platform-a2/repos/Master-IASD/gan_gsters/generate.py", line 111, in \n main()\n ~~~~^^\n File "/home/lamsade/testplatform/test-platform-a2/repos/Master-IASD/gan_gsters/generate.py", line 67, in main\n ensure_diffaug_weights(checkpoint_dir)\n ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^\n File "/home/lamsade/testplatform/test-platform-a2/repos/Master-IASD/gan_gsters/train_diffaug.py", line 263, in ensure_diffaug_weights\n train_diffaug(\n ~~~~~~~~~~~~~^\n epochs=150,\n ^^^^^^^^^^^\n ...<4 lines>...\n requested_gpus=requested_gpus,\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n )\n ^\n File "/home/lamsade/testplatform/test-platform-a2/repos/Master-IASD/gan_gsters/train_diffaug.py", line 226, in train_diffaug\n d_loss, w_dist = d_step(real_images, G, D, d_optimizer, device, policy)\n ~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n File "/home/lamsade/testplatform/test-platform-a2/repos/Master-IASD/gan_gsters/train_diffaug.py", line 136, in d_step\n d_loss.backward()\n ~~~~~~~~~~~~~~~^^\n File "/home/lamsade/testplatform/test-platform-a2/repos/Master-IASD/gan_gsters/venv/lib/python3.13/site-packages/torch/_tensor.py", line 625, in backward\n torch.autograd.backward(\n ~~~~~~~~~~~~~~~~~~~~~~~^\n self, gradient, retain_graph, create_graph, inputs=inputs\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n )\n ^\n File "/home/lamsade/testplatform/test-platform-a2/repos/Master-IASD/gan_gsters/venv/lib/python3.13/site-packages/torch/autograd/__init__.py", line 354, in backward\n _engine_run_backward(\n ~~~~~~~~~~~~~~~~~~~~^\n tensors,\n ^^^^^^^^\n ...<5 lines>...\n accumulate_grad=True,\n ^^^^^^^^^^^^^^^^^^^^^\n )\n ^\n File "/home/lamsade/testplatform/test-platform-a2/repos/Master-IASD/gan_gsters/venv/lib/python3.13/site-packages/torch/autograd/graph.py", line 841, in _engine_run_backward\n return Variable._execution_engine.run_backward( # Calls into the C++ engine to run the backward pass\n ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n t_outputs, *args, **kwargs\n ^^^^^^^^^^^^^^^^^^^^^^^^^^\n ) # Calls into the C++ engine to run the backward pass\n ^\nRuntimeError: derivative for aten::grid_sampler_2d_backward is not implemented\n' |