Last active
November 6, 2025 11:58
-
-
Save rec/ae475424290a9ba0df0867ca5593698c to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # interesting | |
| torch.fx.experimental.proxy_tensor.handle_sym_dispatch(args=) | |
| torch.fx.experimental.proxy_tensor.handle_sym_dispatch(kwargs=) | |
| # done | |
| torch.DisableTorchFunction | |
| torch.DisableTorchFunctionSubclass | |
| torch.distributed.distributed_c10d.get_debug_level | |
| torch.distributed.fsdp.fully_sharded_data_parallel.FullyShardedDataParallel.register_comm_hook(hook=) | |
| torch.distributed.optim.zero_redundancy_optimizer.ZeroRedundancyOptimizer.join_hook | |
| torch.return_types.aminmax.__new__ | |
| torch.return_types.cummax.__new__ | |
| torch.return_types.cummin.__new__ | |
| torch.return_types.frexp.__new__ | |
| torch.return_types.geqrf.__new__ | |
| torch.return_types.histogram.__new__ | |
| torch.return_types.histogramdd.__new__ | |
| torch.return_types.kthvalue.__new__ | |
| torch.return_types.lu_unpack.__new__ | |
| torch.return_types.max.__new__ | |
| torch.return_types.median.__new__ | |
| torch.return_types.min.__new__ | |
| torch.return_types.mode.__new__ | |
| torch.return_types.nanmedian.__new__ | |
| torch.return_types.qr.__new__ | |
| torch.return_types.slogdet.__new__ | |
| torch.return_types.sort.__new__ | |
| torch.return_types.svd.__new__ | |
| torch.return_types.topk.__new__ | |
| torch.return_types.triangular_solve.__new__ | |
| torch.jit.interface | |
| torch.jit.script | |
| # False positive or can't be done. | |
| torch.ao.nn.quantized.functional.adaptive_avg_pool2d(output_size=) | |
| torch.ao.nn.quantized.functional.adaptive_avg_pool3d(output_size=) | |
| # https://github.com/pytorch/pytorch/blob/a51208c656fb3e9a8b091a4d181f9a9cda783c04/torch/ao/nn/quantized/functional.py#L134 | |
| # https://github.com/pytorch/pytorch/blob/a51208c656fb3e9a8b091a4d181f9a9cda783c04/torch/ao/nn/quantized/functional.py#L154 | |
| torch.ao.quantization.experimental.APoT_tensor.TensorAPoT.__init__(quantizer=) | |
| # https://github.com/pytorch/pytorch/blob/a51208c656fb3e9a8b091a4d181f9a9cda783c04/torch/ao/quantization/experimental/APoT_tensor.py#L10 | |
| torch.cuda.get_device_properties | |
| torch.cuda.green_contexts.GreenContext.create | |
| torch.xpu.get_device_properties | |
| torch.nn.functional.interpolate | |
| torch.nn.functional.softsign | |
| torch.nn.functional.tanh | |
| torch.nn.functional.tanhshrink | |
| torch.nn.functional.upsample | |
| torch.nn.functional.upsample_bilinear | |
| torch.nn.functional.upsample_nearest | |
| torch.nn.quantized.functional.adaptive_avg_pool2d(output_size=) | |
| torch.nn.quantized.functional.adaptive_avg_pool3d(output_size=) | |
| torch.utils.data.datapipes.datapipe.DFIterDataPipe.__iter__ | |
| torch.utils.data.datapipes.datapipe.IterDataPipe.__getattr__ | |
| torch.utils.data.datapipes.datapipe.IterDataPipe.__getstate__ | |
| torch.utils.data.datapipes.datapipe.IterDataPipe.__reduce_ex__ | |
| torch.utils.data.datapipes.datapipe.IterDataPipe.register_datapipe_as_function | |
| torch.utils.data.datapipes.datapipe.MapDataPipe.__getattr__ | |
| torch.utils.data.datapipes.datapipe.MapDataPipe.__getstate__ | |
| torch.utils.data.datapipes.datapipe.MapDataPipe.__reduce_ex__ | |
| torch.utils.data.datapipes.datapipe.MapDataPipe.register_datapipe_as_function | |
| # Still evaluating. | |
| torch._C.CompilationUnit.define | |
| torch._C.ConcreteModuleTypeBuilder.add_attribute | |
| torch._C.ConcreteModuleTypeBuilder.add_builtin_function | |
| torch._C.ConcreteModuleTypeBuilder.add_constant | |
| torch._C.ConcreteModuleTypeBuilder.add_failed_attribute | |
| torch._C.ConcreteModuleTypeBuilder.add_forward_hook | |
| torch._C.ConcreteModuleTypeBuilder.add_forward_pre_hook | |
| torch._C.ConcreteModuleTypeBuilder.add_function_attribute | |
| torch._C.ConcreteModuleTypeBuilder.add_ignored_attribute | |
| torch._C.ConcreteModuleTypeBuilder.add_ignored_attributes | |
| torch._C.ConcreteModuleTypeBuilder.add_module | |
| torch._C.ConcreteModuleTypeBuilder.add_overload | |
| torch._C.ConcreteModuleTypeBuilder.set_module_dict | |
| torch._C.ConcreteModuleTypeBuilder.set_module_list | |
| torch._C.ConcreteModuleTypeBuilder.set_parameter_dict | |
| torch._C.ConcreteModuleTypeBuilder.set_parameter_list | |
| torch._C.LiteScriptModule.__call__ | |
| torch._C.LiteScriptModule.find_method | |
| torch._C.LiteScriptModule.run_method | |
| torch._C.ScriptObject.setattr | |
| torch._C._distributed_c10d.Logger.set_construction_data_and_log | |
| torch._C._distributed_c10d.Store.set | |
| torch._C._distributed_c10d.Store.set_timeout | |
| torch._C._distributed_c10d.Store.wait | |
| torch.jit._script.RecursiveScriptClass.__getattr__ | |
| torch.jit._script.RecursiveScriptClass.__iadd__ | |
| torch.jit._script.RecursiveScriptClass.forward_magic_method | |
| torch.jit._script.RecursiveScriptModule.__copy__ | |
| torch.jit._script.RecursiveScriptModule.__deepcopy__ | |
| torch.jit._script.RecursiveScriptModule.__dir__ | |
| torch.jit._script.RecursiveScriptModule.__getattr__ | |
| torch.jit._script.RecursiveScriptModule.__getitem__ | |
| torch.jit._script.RecursiveScriptModule.__iter__ | |
| torch.jit._script.RecursiveScriptModule.extra_repr | |
| torch.jit._script.RecursiveScriptModule.forward_magic_method | |
| torch.jit._script.RecursiveScriptModule.get_debug_state | |
| torch.jit._script.RecursiveScriptModule.graph_for | |
| torch.jit._script.RecursiveScriptModule.save | |
| torch.jit._script.RecursiveScriptModule.save_to_buffer | |
| torch.jit._script.ScriptModule.__getattr__ | |
| torch.jit._script.ScriptModule.__reduce_package__ | |
| torch.jit._script.ScriptModule.define | |
| torch._subclasses.fake_tensor.FakeTensor.__new__ | |
| torch.utils._sympy.value_ranges.ValueRanges.convex_min_zero_map | |
| torch.utils._sympy.value_ranges.ValueRanges.coordinatewise_increasing_map | |
| torch.utils._sympy.value_ranges.ValueRanges.decreasing_map | |
| torch.utils._sympy.value_ranges.ValueRanges.increasing_map | |
| torch.utils._sympy.value_ranges.ValueRanges.monotone_map | |
| torch.utils._sympy.value_ranges.ValueRanges.wrap |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment