import torch

Tensors

torch.is_tensor( obj ) 返回obj是否是一个pytorch张量
x = torch.tensor([1,2,3])
torch.is_tensor(x)
True
torch.is_storage( obj ) 返回obj是否是一个pytorch存储对象
a = torch.rand(3,5)
a
tensor([[0.3135, 0.2202, 0.0493, 0.1774, 0.4600],[0.1035, 0.5486, 0.3794, 0.2942, 0.4146],[0.3640, 0.8552, 0.2304, 0.3706, 0.4923]])
sto = a.storage()
torch.is_storage(sto)
True
torch.is_complex( 输入 ) 返回“输入”是否是complex复数数据类型
x = torch.tensor(4)
torch.is_complex(x)
False
a = torch.tensor([1, 2],dtype=torch.float32)
b = torch.tensor([3, 4],dtype=torch.float32)
z = torch.complex(a, b)
print(z)
print(z.dtype)
print(torch.is_complex(z))
tensor([1.+3.j, 2.+4.j])
torch.complex64
True
torch.is_conj( 输入 ) 返回“输入”是否是复数tensor的共轭比特的view
x = torch.tensor([1+2j])
y = x.conj()
torch.is_conj(y)
True
torch.is_floating_point( 输入 ) 返回“输入”是否是浮点类型
x = torch.tensor([1,2],dtype = torch.float16)
torch.is_floating_point(x)
True
torch.is_nonzero( 输入 ) 测试“输入”这个单元素的tensor在进行类型转换后是不是为0
即 torch.tensor([0.])或者torch.tensor([0])或者torch.tensor([False])
nonzero → “不是0”
a = torch.tensor([0])
b = torch.tensor([0.])
c = torch.tensor([False])
print(torch.is_nonzero(a))
print(torch.is_nonzero(b))
print(torch.is_nonzero(c))
False
False
False
d = torch.tensor([1])
torch.is_nonzero(d)
True
torch.set_default_dtype( d ) 设置pytorch中浮点数的默认类型
包含复数类型的更改
pytorch默认:torch.float32和torch.complex64
x = torch.tensor([1.,2.])
x.dtype
torch.float32
torch.set_default_dtype(torch.float64)
y = torch.tensor([1.,2.])
y.dtype
torch.float64
torch.tensor([1.2,3j]).dtype
torch.complex128
torch.get_default_dtype( ) 返回当前的浮点数类型
torch.get_default_dtype()
torch.float64
torch.set_default_tensor_type( t ) 设置浮点张量的默认类型
默认是torch.FloatTensor,可修改为torch.DoubleTensor,当前环境为后者
torch.tensor([1.2,3]).dtype
torch.float64
torch.set_default_tensor_type(torch.FloatTensor)
torch.tensor([1.2,3]).dtype
torch.float32
torch.numel( 输入 ) 返回输入张量的元素总数
a = torch.randn(1,2,3,4,5)
torch.numel(a)
120
b = torch.zeros(4,4)
torch.numel(b)
16
torch.set_printoptions(precision=None, threshold=None, edgeitems=None, linewidth=None, profile=None, sci_mode=None) 修改pytorch的打印选项
precision=None,显示浮点tensor中元素的精度,默认为4
x = torch.rand(5)
x
tensor([0.3292, 0.7998, 0.1258, 0.0310, 0.5057])
torch.set_printoptions(precision=6)
x
tensor([0.329211, 0.799845, 0.125833, 0.030979, 0.505715])
threshold=None,设置tensor的数目超过多少时开始进行折叠显示,默认为1000
x = torch.rand(300,5)
x
tensor([[0.547976, 0.653326, 0.642392, 0.672673, 0.791259],[0.510795, 0.429108, 0.908404, 0.114354, 0.682855],[0.293834, 0.415299, 0.137108, 0.200969, 0.093728],...,[0.528094, 0.974976, 0.202145, 0.356833, 0.817997],[0.049833, 0.735271, 0.655568, 0.055305, 0.189063],[0.160168, 0.885137, 0.151047, 0.628637, 0.627643]])
torch.set_printoptions(threshold=2000) #此时就能将张量进行全部打印
x
tensor([[5.479759e-01, 6.533259e-01, 6.423922e-01, 6.726730e-01, 7.912594e-01],[5.107947e-01, 4.291083e-01, 9.084044e-01, 1.143541e-01, 6.828547e-01],[2.938338e-01, 4.152987e-01, 1.371077e-01, 2.009691e-01, 9.372818e-02],[8.214625e-01, 6.274248e-01, 4.875559e-01, 2.144871e-01, 5.377626e-01],[2.101597e-01, 8.990686e-01, 8.096005e-01, 3.820583e-01, 3.517438e-01],[4.844642e-02, 2.294857e-01, 8.063939e-01, 3.087540e-01, 2.890223e-01],[1.272277e-01, 9.689122e-02, 4.827772e-01, 5.620605e-01, 3.429323e-01],[8.108643e-01, 9.696676e-01, 3.353500e-01, 7.201391e-02, 4.699750e-01],[7.658895e-01, 9.048920e-01, 3.366650e-01, 9.105060e-01, 9.384068e-01],[2.546955e-01, 7.422664e-01, 1.719366e-01, 2.741474e-02, 6.720504e-01],[6.037071e-01, 4.575521e-02, 8.333752e-01, 2.018924e-01, 9.806207e-01],[7.446343e-02, 3.690255e-02, 2.601237e-01, 5.435314e-01, 7.064431e-01],[7.194057e-01, 5.556376e-01, 7.463855e-02, 3.237045e-02, 5.500636e-01],[9.161678e-01, 4.906170e-01, 1.690975e-01, 4.024093e-01, 3.818820e-01],[3.151822e-02, 7.549047e-03, 7.215335e-01, 3.963411e-01, 1.113992e-01],[5.575149e-01, 3.825662e-01, 9.263731e-01, 9.911243e-01, 4.681200e-02],[7.618128e-01, 7.719105e-01, 1.349832e-01, 3.347547e-01, 9.619353e-01],[4.272721e-01, 1.734744e-01, 3.590091e-01, 1.852302e-01, 4.428941e-01],[5.905786e-01, 8.922667e-01, 6.649318e-01, 9.419478e-01, 3.590983e-01],[4.778612e-01, 2.600287e-01, 8.845685e-01, 3.456156e-01, 2.782494e-02],[3.173872e-01, 7.336920e-01, 6.403675e-01, 1.330847e-01, 7.049650e-02],[3.541074e-01, 8.014670e-01, 8.576953e-01, 7.261711e-02, 8.050566e-01],[7.523429e-01, 2.390943e-01, 8.303093e-01, 5.072606e-02, 5.218457e-01],[2.648121e-01, 5.710497e-01, 7.945571e-01, 1.196907e-01, 1.229700e-01],[7.921798e-01, 9.228273e-01, 6.311500e-02, 3.640890e-03, 9.283056e-01],[8.868506e-01, 7.624270e-01, 5.688798e-01, 1.833863e-01, 9.081678e-01],[4.258175e-01, 6.822956e-02, 2.462117e-01, 8.743001e-01, 1.684905e-01],[2.081650e-01, 3.638180e-01, 9.376676e-01, 3.049116e-01, 1.971060e-02],[2.495815e-01, 1.633296e-01, 3.208203e-01, 8.993552e-01, 3.705428e-01],[2.837831e-01, 4.279662e-01, 1.995165e-01, 7.820160e-01, 8.934007e-01],[6.987028e-01, 9.512573e-01, 9.441652e-01, 5.113466e-01, 6.359255e-01],[3.129605e-01, 4.638589e-01, 3.499180e-01, 1.650780e-01, 4.886492e-01],[8.683971e-01, 7.872421e-01, 8.961793e-01, 7.367355e-02, 4.654086e-02],[2.745742e-01, 1.406419e-01, 8.030115e-01, 7.910815e-01, 7.987034e-02],[3.648055e-02, 6.081672e-01, 3.017390e-01, 7.640325e-01, 7.811721e-01],[9.413999e-02, 5.618073e-01, 4.799951e-01, 2.469740e-01, 5.692960e-01],[9.672636e-02, 8.243416e-01, 5.553534e-01, 5.667288e-01, 9.206656e-01],[9.464075e-01, 9.066026e-01, 4.319676e-01, 2.746251e-01, 5.385137e-01],[1.704854e-01, 6.267544e-01, 7.786224e-01, 1.155751e-01, 7.750250e-01],[9.073203e-01, 4.950160e-02, 7.052117e-01, 8.648877e-01, 9.041737e-01],[3.358799e-02, 4.877403e-01, 7.278889e-01, 5.597518e-01, 4.578267e-01],[1.968675e-01, 3.230182e-01, 6.620311e-01, 5.920517e-01, 6.141177e-01],[4.536444e-01, 7.598798e-01, 9.995518e-01, 6.900102e-02, 9.167986e-01],[1.685007e-01, 4.203890e-01, 5.036479e-01, 4.328888e-01, 9.653617e-01],[9.071733e-01, 9.738868e-02, 7.427402e-01, 5.340498e-01, 7.485052e-01],[7.379379e-01, 8.565556e-01, 5.626624e-01, 9.406613e-01, 7.023579e-01],[7.959710e-01, 3.127610e-01, 9.151979e-01, 1.745090e-01, 6.911483e-01],[6.763495e-01, 7.120888e-01, 3.427863e-01, 4.094312e-01, 5.631093e-01],[9.164845e-01, 8.587677e-01, 3.522884e-01, 6.078995e-01, 2.737397e-01],[5.539393e-02, 8.637846e-03, 2.227401e-01, 4.850764e-01, 8.699194e-01],[2.996255e-01, 9.223822e-01, 9.537737e-01, 7.282615e-01, 8.602979e-01],[5.924144e-01, 8.750015e-01, 7.748131e-01, 6.111442e-01, 8.844858e-02],[1.604277e-02, 7.724553e-02, 4.764044e-01, 1.818008e-01, 7.844605e-01],[8.588740e-01, 2.693405e-01, 3.974658e-01, 5.061374e-01, 4.334962e-01],[7.283229e-01, 8.811971e-01, 3.369437e-01, 3.631898e-01, 4.644580e-01],[3.377179e-01, 4.914088e-01, 1.078739e-01, 5.309461e-01, 9.797914e-01],[3.419350e-01, 4.189931e-01, 8.613622e-01, 8.721599e-01, 3.589205e-01],[2.747413e-01, 3.066023e-01, 8.329787e-01, 1.642854e-01, 8.442376e-01],[1.070241e-01, 5.292624e-01, 6.573987e-01, 4.372142e-01, 5.443343e-01],[6.397892e-01, 8.363503e-02, 3.509602e-01, 5.857283e-01, 5.014392e-01],[9.594901e-01, 1.190563e-01, 9.009438e-01, 2.287246e-01, 7.695643e-01],[4.232089e-01, 3.389512e-01, 5.060219e-01, 2.063274e-01, 6.821063e-01],[9.460502e-01, 3.020803e-01, 5.460327e-01, 6.661588e-02, 1.671723e-01],[7.195247e-01, 5.941403e-01, 2.007748e-01, 1.162763e-01, 4.943412e-01],[3.184853e-01, 8.620095e-01, 5.074387e-01, 2.932174e-01, 2.352524e-01],[7.365814e-01, 7.595230e-01, 8.776993e-02, 4.497471e-01, 6.888072e-01],[2.887284e-01, 7.192436e-01, 7.499658e-01, 3.312019e-01, 5.347914e-02],[9.741063e-01, 3.831456e-01, 6.373395e-01, 3.075686e-01, 2.816337e-02],[6.226770e-01, 1.587139e-01, 5.410244e-01, 1.011720e-01, 4.919131e-01],[6.661993e-01, 4.655056e-01, 3.548297e-01, 5.961171e-01, 6.172025e-01],[7.259502e-01, 1.948674e-01, 7.698252e-01, 9.441253e-01, 9.672170e-01],[1.569580e-01, 9.894252e-01, 3.833643e-01, 7.598602e-01, 2.973285e-01],[6.712056e-01, 7.183489e-01, 8.574550e-01, 4.517311e-01, 6.510415e-01],[2.408767e-02, 8.658886e-02, 8.295674e-01, 7.471573e-01, 7.425381e-01],[9.289066e-01, 1.793007e-01, 2.315679e-01, 5.457366e-01, 2.550601e-01],[2.760741e-01, 7.250225e-01, 7.107438e-01, 9.300405e-01, 2.281125e-01],[7.684598e-01, 4.224837e-03, 2.876109e-02, 1.273268e-02, 8.477319e-01],[3.368220e-01, 5.265782e-01, 7.569871e-01, 2.751262e-01, 9.640952e-01],[9.629482e-01, 9.286783e-01, 3.070246e-01, 5.465255e-01, 7.947422e-01],[2.110936e-01, 7.553647e-01, 1.986580e-01, 1.318040e-01, 9.215425e-01],[3.769919e-01, 9.454138e-01, 2.591606e-01, 1.516064e-01, 7.459430e-01],[1.947794e-01, 5.122854e-01, 7.159548e-01, 4.025598e-01, 2.252395e-01],[2.456895e-01, 8.644506e-01, 7.236000e-01, 6.921462e-01, 7.872028e-01],[6.916078e-01, 6.421326e-01, 7.667738e-02, 3.571099e-01, 6.891654e-01],[1.747485e-01, 5.510866e-01, 4.440722e-01, 4.645493e-01, 9.119255e-01],[8.057728e-01, 3.442924e-01, 1.672942e-01, 5.896870e-01, 1.013860e-01],[4.540246e-01, 9.311438e-01, 2.945940e-01, 8.826473e-01, 4.558172e-01],[3.705463e-01, 2.452331e-01, 1.899319e-01, 6.455424e-01, 8.858458e-01],[4.581035e-01, 1.517796e-01, 3.965169e-01, 8.893268e-01, 9.192830e-01],[8.678374e-01, 3.235993e-01, 4.606413e-01, 2.805491e-01, 4.662676e-01],[8.396048e-01, 9.855723e-01, 6.593317e-02, 5.526196e-01, 2.663739e-01],[9.397694e-01, 7.900774e-02, 6.267686e-01, 8.571898e-01, 9.464766e-01],[3.414295e-01, 9.727541e-01, 5.103248e-01, 6.955705e-01, 2.256889e-01],[3.300788e-01, 2.229006e-01, 7.659025e-01, 4.082605e-01, 8.989350e-01],[8.164983e-01, 1.254582e-01, 3.706872e-03, 8.556986e-01, 9.298132e-01],[1.916655e-01, 5.992182e-01, 1.121988e-01, 2.236110e-02, 9.357303e-01],[1.332875e-01, 4.433644e-02, 2.594053e-01, 6.990549e-01, 3.308309e-01],[7.711771e-01, 3.650261e-01, 5.362043e-01, 3.260554e-01, 8.075057e-01],[3.068934e-01, 6.666234e-01, 2.381876e-01, 3.819518e-01, 9.970683e-01],[2.397615e-02, 7.935871e-01, 7.045615e-01, 2.593767e-01, 5.119386e-01],[2.016705e-02, 9.353821e-01, 7.738033e-01, 2.996282e-01, 5.466542e-01],[4.006343e-01, 7.717779e-01, 2.612615e-01, 9.662569e-01, 1.564840e-01],[4.758728e-01, 3.604654e-01, 8.208563e-01, 7.822416e-01, 4.653537e-01],[8.848444e-01, 6.420966e-01, 6.627207e-01, 1.291848e-01, 2.797331e-01],[6.485211e-01, 3.454149e-02, 8.699569e-01, 3.540112e-01, 4.486018e-01],[9.408368e-01, 6.175667e-02, 1.615632e-02, 9.193673e-01, 4.489772e-01],[8.556799e-01, 6.245231e-01, 5.387096e-01, 1.057387e-01, 4.515592e-01],[7.334710e-01, 7.953358e-01, 3.620636e-01, 2.320309e-01, 3.380881e-01],[7.952570e-01, 4.002077e-01, 4.956144e-01, 1.166003e-01, 3.171075e-01],[6.298826e-01, 9.811965e-01, 9.829264e-01, 9.332231e-01, 3.811216e-01],[4.487776e-01, 8.249139e-01, 8.310511e-01, 8.185847e-01, 3.340089e-01],[8.156270e-02, 5.344960e-01, 8.094165e-01, 1.806807e-01, 2.400353e-01],[6.505575e-01, 6.939847e-01, 6.720181e-01, 1.764764e-01, 1.058202e-01],[3.011352e-01, 9.016883e-02, 7.553283e-01, 4.123914e-02, 6.459171e-02],[7.014347e-01, 4.487963e-01, 6.239386e-01, 3.348609e-01, 2.257437e-02],[9.054838e-01, 4.033124e-01, 6.765809e-01, 4.017027e-01, 7.832032e-01],[2.777423e-01, 2.933735e-02, 5.459751e-01, 6.457995e-01, 4.861705e-01],[2.401807e-01, 9.903944e-01, 1.857221e-03, 9.856567e-01, 1.819640e-02],[8.275223e-01, 8.071596e-01, 5.799791e-01, 7.320440e-01, 8.051038e-01],[2.190986e-01, 2.392929e-01, 7.669740e-01, 4.546475e-02, 7.656022e-01],[2.927761e-01, 1.149639e-01, 7.462071e-01, 5.915599e-01, 7.925071e-01],[7.112788e-01, 4.855380e-01, 9.023913e-01, 5.253127e-01, 3.607436e-01],[1.512571e-01, 7.445716e-01, 1.690940e-01, 6.900405e-01, 6.980211e-02],[5.523912e-01, 3.512659e-01, 6.737402e-01, 7.446167e-01, 7.620156e-01],[2.957721e-01, 9.743643e-01, 6.927086e-01, 6.699698e-01, 3.587251e-01],[9.599380e-01, 8.083122e-01, 1.872690e-01, 3.755152e-03, 3.961974e-01],[2.163852e-01, 8.858229e-01, 1.601512e-01, 3.164128e-01, 7.532393e-01],[6.103709e-01, 8.823515e-01, 8.765410e-01, 3.611597e-01, 5.813385e-01],[3.450662e-02, 9.215907e-01, 3.248397e-01, 9.495577e-01, 7.646393e-01],[5.671495e-02, 4.339961e-01, 5.272714e-01, 9.626561e-01, 8.823450e-01],[2.914186e-01, 5.740588e-01, 7.261559e-01, 5.250752e-02, 7.522810e-01],[8.921230e-01, 3.738326e-01, 2.289310e-01, 6.614813e-01, 6.240683e-01],[8.997482e-01, 8.878651e-01, 3.033620e-02, 7.896747e-01, 4.399283e-01],[5.018988e-01, 6.875109e-01, 9.840715e-02, 1.949500e-01, 6.108126e-01],[7.412375e-01, 7.347210e-01, 7.224739e-01, 4.392164e-01, 6.051108e-01],[4.138917e-02, 3.186857e-01, 9.502274e-01, 2.548212e-01, 6.154011e-01],[7.035008e-01, 9.313844e-01, 5.981848e-01, 1.078206e-02, 7.035636e-01],[1.500758e-01, 3.324120e-01, 4.663931e-01, 3.586432e-01, 9.623145e-01],[4.139045e-01, 2.665735e-01, 8.310115e-01, 9.287102e-01, 8.907174e-01],[3.308323e-01, 7.802935e-01, 1.007331e-01, 5.064040e-01, 5.448884e-01],[8.186289e-01, 5.075541e-01, 5.776135e-01, 5.665528e-01, 3.166076e-01],[4.805835e-01, 4.082389e-01, 3.324272e-01, 9.563242e-01, 4.603716e-01],[9.818835e-01, 3.681592e-01, 6.661077e-01, 1.646582e-01, 1.250408e-01],[8.435478e-01, 2.024748e-01, 9.951429e-01, 8.034466e-01, 1.234514e-02],[7.256465e-01, 4.444637e-01, 1.334425e-01, 6.293350e-01, 2.407731e-01],[1.009780e-01, 3.414680e-01, 2.967217e-01, 7.162615e-01, 7.700797e-01],[3.223808e-01, 5.143759e-01, 2.653627e-01, 8.196623e-01, 1.283891e-01],[8.081733e-01, 7.114468e-01, 9.052718e-01, 9.776593e-01, 1.702009e-01],[9.148622e-01, 2.203158e-01, 4.631594e-01, 2.613238e-01, 2.432988e-01],[8.568416e-01, 7.846429e-01, 4.151598e-01, 3.201546e-01, 2.676637e-01],[2.923627e-01, 3.300942e-01, 8.698331e-01, 7.707962e-01, 2.711729e-01],[8.684152e-01, 2.205593e-01, 2.472741e-01, 4.895710e-01, 8.790383e-01],[3.067539e-01, 4.859957e-01, 1.196061e-01, 7.863702e-01, 1.323087e-01],[2.749971e-01, 9.582210e-02, 6.137553e-01, 4.930161e-01, 4.697687e-01],[5.997472e-01, 4.206629e-01, 2.528289e-01, 6.896384e-01, 1.509874e-01],[4.994908e-01, 6.192905e-02, 1.600146e-01, 2.079322e-01, 3.839562e-01],[8.364688e-01, 1.406790e-01, 4.597775e-01, 1.367760e-02, 1.809098e-01],[1.672065e-01, 1.408111e-01, 1.771432e-01, 8.047503e-01, 2.661568e-02],[6.472054e-01, 1.309546e-01, 5.476122e-01, 6.413354e-01, 9.305810e-01],[9.430286e-01, 5.166978e-02, 4.475142e-01, 8.926287e-01, 3.889653e-01],[9.887356e-01, 4.666030e-01, 2.739392e-01, 3.626876e-01, 2.704224e-01],[9.800315e-01, 5.857200e-01, 5.163501e-01, 6.801736e-01, 1.605294e-01],[7.290522e-01, 4.435503e-01, 7.772285e-02, 9.320718e-01, 1.662248e-02],[6.597092e-01, 8.810861e-01, 4.108254e-01, 9.676057e-01, 1.885772e-03],[7.818655e-01, 2.909983e-01, 7.063647e-01, 8.234121e-01, 7.251868e-01],[9.869390e-01, 7.851511e-01, 8.853777e-01, 1.846519e-01, 9.570579e-01],[9.424354e-01, 9.231585e-01, 2.820674e-01, 7.053494e-01, 1.770865e-01],[6.059096e-01, 8.309510e-01, 6.185872e-01, 8.579028e-01, 4.058846e-01],[8.694074e-01, 9.929917e-01, 1.860559e-03, 5.474501e-01, 5.547951e-01],[4.988211e-02, 7.895942e-01, 3.224738e-01, 3.499607e-01, 2.825894e-01],[4.610039e-01, 2.937645e-02, 4.426218e-01, 2.299786e-01, 3.933263e-01],[6.946771e-01, 8.192933e-02, 3.346051e-01, 7.453650e-01, 2.320943e-01],[1.851783e-01, 4.804742e-01, 1.807278e-01, 5.495178e-01, 3.131939e-01],[8.522274e-01, 4.250332e-01, 8.886462e-01, 2.232373e-02, 5.508507e-01],[8.361425e-01, 5.133503e-01, 5.267049e-01, 3.092917e-01, 7.026184e-01],[3.393342e-01, 4.121745e-01, 7.762421e-01, 9.825321e-01, 7.247207e-01],[7.449138e-01, 5.768701e-01, 4.285038e-01, 9.471729e-01, 1.855193e-01],[5.906438e-01, 1.778893e-01, 5.030515e-01, 5.695469e-01, 8.926116e-01],[4.395478e-01, 7.016535e-01, 5.968512e-01, 9.645171e-01, 1.568278e-01],[7.751975e-01, 1.461250e-01, 1.954558e-01, 8.082603e-01, 4.482616e-01],[9.755565e-01, 1.718571e-01, 5.535094e-01, 2.888660e-01, 9.171582e-01],[6.717874e-01, 2.807544e-01, 4.759538e-02, 1.355494e-01, 2.600105e-01],[5.219634e-01, 2.963888e-01, 5.310496e-01, 9.896471e-01, 7.049922e-01],[3.733326e-01, 5.506009e-01, 6.928099e-01, 4.128730e-01, 1.436282e-01],[8.952098e-01, 7.716093e-01, 6.276937e-01, 8.120161e-02, 4.219102e-01],[3.085326e-01, 3.518969e-02, 9.589223e-01, 7.343987e-01, 2.404457e-01],[6.218292e-01, 8.761029e-01, 8.022993e-01, 5.734723e-01, 9.678404e-01],[9.061098e-01, 7.688763e-01, 9.713222e-01, 6.783819e-01, 6.473720e-02],[9.876369e-01, 8.448141e-01, 5.760266e-01, 7.315338e-03, 2.893502e-01],[2.998769e-03, 3.822037e-01, 8.808365e-01, 4.403384e-01, 9.240498e-01],[4.616913e-01, 2.135751e-01, 5.115426e-01, 2.889090e-01, 9.487076e-01],[3.505272e-01, 9.196607e-01, 8.551560e-01, 8.318140e-01, 8.674159e-01],[7.542879e-01, 2.909341e-01, 1.760125e-03, 8.232981e-01, 3.435412e-01],[6.117426e-01, 2.656181e-01, 1.664917e-01, 3.018064e-01, 3.045639e-01],[9.074989e-01, 8.243072e-01, 4.391021e-01, 7.603545e-01, 3.618972e-01],[4.898465e-01, 3.722323e-01, 2.436894e-01, 8.985842e-01, 8.313738e-01],[6.960450e-01, 4.546239e-01, 8.495474e-02, 2.825128e-01, 1.675407e-01],[2.191019e-01, 4.930884e-02, 1.872609e-01, 9.067743e-01, 6.899483e-01],[9.096674e-01, 2.190718e-01, 6.309783e-01, 9.124153e-01, 7.397597e-01],[6.140384e-01, 9.876557e-01, 3.837633e-01, 1.299996e-01, 1.680165e-01],[5.293112e-01, 3.441815e-01, 6.752486e-01, 7.982111e-01, 6.866063e-01],[2.853687e-01, 1.466367e-01, 8.470869e-01, 9.980324e-01, 6.734866e-01],[7.520784e-01, 6.958906e-01, 6.446861e-01, 5.676786e-01, 5.907872e-01],[9.488148e-01, 6.363156e-01, 2.812169e-01, 3.443938e-01, 7.594256e-01],[6.421438e-01, 9.948766e-01, 9.478033e-02, 3.253577e-01, 6.652732e-01],[2.205149e-01, 8.286160e-01, 1.411271e-01, 3.544317e-01, 8.221805e-01],[1.154251e-01, 4.966960e-01, 2.536218e-01, 4.531212e-01, 9.045081e-01],[1.469442e-01, 3.065628e-02, 5.085171e-01, 2.158107e-01, 1.911372e-01],[2.565433e-01, 2.330703e-02, 2.529017e-01, 3.606771e-01, 5.321881e-01],[2.154507e-01, 2.055706e-01, 5.257245e-01, 4.120025e-01, 3.701786e-01],[7.906502e-01, 1.869047e-01, 4.366846e-01, 8.464485e-01, 7.794085e-01],[1.067937e-01, 6.270428e-01, 5.098685e-01, 3.711450e-02, 6.651326e-01],[9.247911e-02, 4.120028e-01, 5.557094e-01, 2.905521e-01, 9.542240e-01],[1.984965e-01, 9.681269e-01, 1.284851e-01, 5.571280e-01, 8.723378e-02],[7.679543e-01, 1.249796e-01, 5.697743e-01, 3.564957e-01, 7.139183e-01],[6.644219e-01, 7.075969e-01, 3.857071e-01, 2.069774e-01, 1.430798e-02],[2.524089e-01, 6.847203e-02, 5.220648e-01, 1.831728e-01, 1.111540e-01],[5.133206e-01, 8.394428e-01, 6.959371e-01, 4.717060e-01, 3.323230e-01],[7.989484e-01, 4.159448e-01, 8.709801e-01, 6.683763e-01, 2.415032e-01],[7.549637e-01, 4.592490e-02, 4.991216e-01, 6.641077e-01, 5.695259e-01],[8.246611e-01, 7.968515e-02, 4.534709e-01, 6.230281e-01, 9.877067e-01],[9.862238e-02, 8.720767e-02, 4.246231e-01, 3.839173e-01, 2.496103e-01],[9.963540e-01, 9.329588e-01, 9.054333e-01, 8.776296e-01, 5.126244e-02],[3.991746e-01, 8.604200e-01, 2.095747e-02, 5.459343e-01, 9.371138e-01],[7.527751e-02, 3.473397e-01, 5.844579e-01, 9.774947e-01, 8.972828e-01],[5.324989e-01, 8.984389e-01, 1.908585e-01, 5.730482e-01, 8.683792e-01],[8.167211e-01, 5.977562e-01, 6.525959e-01, 2.464514e-01, 6.646127e-01],[4.994603e-01, 7.514192e-01, 7.624844e-01, 4.018098e-01, 2.359021e-01],[3.456191e-01, 3.934888e-01, 5.202684e-01, 6.010354e-03, 6.169666e-01],[7.134833e-01, 9.140980e-02, 9.097322e-01, 9.625146e-01, 8.712474e-01],[4.039610e-01, 8.118200e-01, 5.286368e-01, 8.319086e-01, 6.301955e-01],[1.849138e-01, 2.836583e-01, 2.569309e-01, 2.131507e-01, 3.561817e-01],[9.577394e-03, 6.829917e-01, 7.816219e-01, 2.175276e-01, 3.629327e-04],[6.983764e-01, 4.826915e-01, 7.657391e-01, 6.417727e-02, 5.376971e-02],[5.964958e-01, 4.128054e-01, 1.571022e-01, 1.896006e-01, 9.501734e-01],[3.702611e-02, 1.248518e-01, 7.946780e-01, 3.027936e-01, 9.889649e-01],[5.613024e-01, 7.613966e-01, 1.833600e-01, 7.684824e-01, 7.676482e-03],[3.010221e-01, 2.751744e-01, 7.187458e-01, 9.816453e-01, 2.980178e-01],[9.910398e-01, 2.565216e-01, 7.479402e-01, 5.901634e-01, 5.734098e-01],[3.661842e-01, 8.200604e-01, 1.622504e-01, 4.949092e-01, 7.332571e-01],[8.812575e-01, 3.316438e-01, 3.963362e-01, 6.892209e-01, 3.309903e-01],[6.532263e-01, 9.814602e-01, 6.296403e-01, 4.669839e-01, 1.511514e-01],[2.083095e-01, 8.247473e-01, 1.306889e-01, 3.211391e-01, 9.507038e-01],[8.621317e-01, 6.205783e-01, 9.298059e-01, 7.867069e-01, 4.034536e-01],[7.101190e-02, 7.815815e-01, 4.970602e-01, 5.315654e-01, 9.876284e-01],[9.429909e-01, 9.470301e-01, 7.950594e-01, 9.794616e-01, 3.810859e-01],[4.020334e-01, 8.654967e-01, 3.985932e-01, 7.235347e-01, 5.450370e-01],[2.730365e-01, 6.468561e-01, 9.984602e-01, 4.186981e-01, 9.035127e-01],[2.405288e-01, 7.437736e-01, 3.981560e-02, 3.399084e-01, 8.124442e-01],[1.033278e-01, 2.686328e-01, 3.873090e-01, 1.200535e-01, 6.033130e-01],[6.729751e-01, 1.757228e-02, 8.950196e-01, 4.375139e-01, 5.192080e-01],[8.926672e-01, 6.306212e-01, 6.009561e-01, 3.308092e-01, 2.332699e-01],[5.424650e-01, 8.823532e-01, 5.039055e-01, 9.095227e-01, 9.952030e-01],[2.503563e-01, 2.130901e-01, 2.838622e-01, 6.559022e-01, 7.561532e-01],[3.537893e-01, 6.013065e-01, 4.335806e-01, 2.724690e-01, 4.993066e-01],[6.135779e-01, 4.909472e-01, 5.809262e-01, 8.950202e-01, 4.689446e-01],[5.291612e-01, 7.824677e-02, 5.563079e-01, 1.747768e-01, 2.702677e-01],[1.661811e-01, 2.961220e-01, 7.885760e-02, 1.982357e-01, 6.965672e-01],[7.928318e-01, 7.220733e-01, 3.535190e-01, 2.522587e-01, 5.017352e-02],[2.225865e-01, 5.550805e-01, 1.156235e-02, 1.924719e-01, 4.211611e-01],[2.670412e-01, 3.229119e-01, 5.681655e-01, 2.576771e-01, 5.056288e-01],[5.934119e-01, 6.470122e-01, 8.878740e-01, 2.733190e-01, 5.524020e-01],[9.243989e-02, 7.376165e-01, 3.920126e-02, 9.473805e-01, 6.415018e-01],[3.765617e-01, 7.017728e-01, 8.376707e-01, 3.290467e-01, 3.069103e-03],[9.158496e-01, 9.238833e-01, 5.710434e-01, 5.157843e-01, 6.208843e-01],[6.386005e-01, 7.037604e-01, 1.906465e-01, 3.186399e-02, 3.978091e-02],[8.479279e-01, 9.925191e-01, 8.807810e-01, 6.453509e-01, 8.418300e-01],[8.007861e-01, 4.224557e-02, 4.533061e-01, 8.154625e-01, 9.537874e-01],[8.177733e-01, 5.682549e-01, 7.759075e-01, 7.809268e-01, 7.515126e-01],[5.350013e-01, 4.496869e-01, 1.331733e-01, 5.054139e-01, 4.721017e-01],[1.355720e-02, 3.089769e-01, 8.541477e-01, 3.565171e-01, 6.436208e-01],[6.822689e-01, 1.839297e-01, 6.539416e-01, 8.923541e-01, 3.226082e-01],[7.657018e-01, 8.358454e-01, 9.492838e-02, 9.103585e-01, 5.995169e-01],[1.782829e-01, 8.610064e-01, 7.363703e-01, 7.818586e-01, 7.864594e-01],[6.560856e-02, 3.878746e-01, 6.468712e-01, 4.220114e-01, 1.383936e-01],[2.977081e-01, 3.729486e-01, 6.831419e-02, 6.053293e-02, 6.491771e-01],[2.645807e-01, 2.531159e-02, 1.764624e-01, 2.679348e-03, 9.668333e-01],[6.760372e-01, 6.598584e-01, 1.859109e-01, 9.181786e-01, 5.463827e-01],[9.529499e-01, 8.935003e-01, 6.406296e-01, 8.496910e-01, 5.799887e-01],[9.694272e-02, 8.205624e-01, 2.007939e-01, 2.369021e-01, 4.093953e-01],[6.735665e-01, 2.077929e-01, 7.457119e-01, 1.890904e-01, 6.753762e-01],[6.488463e-01, 2.853466e-01, 5.441935e-01, 9.602631e-01, 3.460606e-01],[3.910762e-02, 5.393759e-01, 8.895788e-01, 7.063749e-01, 7.589683e-01],[2.692644e-01, 8.960226e-01, 5.545682e-01, 7.893307e-01, 1.574051e-01],[4.492626e-01, 8.404417e-01, 2.340811e-01, 1.827536e-01, 5.454802e-01],[8.274391e-01, 8.807490e-01, 4.756492e-02, 9.485398e-01, 3.062460e-01],[9.812894e-01, 5.611845e-01, 8.927494e-01, 2.739358e-02, 7.938709e-01],[8.603551e-01, 3.739146e-01, 4.984561e-01, 4.774571e-01, 9.798422e-01],[3.390244e-01, 3.111479e-01, 1.095328e-01, 2.966480e-01, 8.441472e-01],[5.384583e-01, 3.394729e-01, 4.261987e-01, 3.484061e-01, 8.458554e-01],[4.768602e-01, 6.360817e-01, 7.565092e-01, 9.611720e-01, 4.328936e-01],[3.618387e-01, 5.335379e-01, 9.053879e-01, 4.379079e-01, 6.312569e-01],[7.991109e-01, 1.564482e-01, 7.374480e-01, 6.840240e-01, 3.026823e-01],[1.262880e-01, 4.656230e-01, 6.107042e-01, 4.767168e-02, 6.463168e-01],[8.448948e-01, 4.648799e-01, 8.219155e-01, 6.828617e-01, 2.151942e-02],[4.418995e-01, 4.215333e-01, 6.645594e-01, 2.788448e-01, 1.418293e-03],[6.507168e-01, 4.595282e-01, 2.269078e-01, 4.970753e-01, 3.605320e-01],[5.280936e-01, 9.749762e-01, 2.021449e-01, 3.568335e-01, 8.179965e-01],[4.983324e-02, 7.352706e-01, 6.555685e-01, 5.530506e-02, 1.890627e-01],[1.601676e-01, 8.851370e-01, 1.510473e-01, 6.286365e-01, 6.276430e-01]])
edgeitems=None,设置折叠显示前后显示的数据行数,默认为3
x = torch.rand(300,5)
torch.set_printoptions(threshold=1000,edgeitems=5)
x
tensor([[0.823003, 0.724627, 0.151349, 0.635556, 0.758480],[0.195002, 0.188467, 0.361890, 0.999455, 0.381826],[0.519000, 0.382240, 0.323936, 0.613176, 0.493356],[0.598428, 0.890287, 0.178512, 0.734590, 0.792948],[0.089123, 0.625326, 0.732094, 0.586968, 0.001416],...,[0.793325, 0.430178, 0.076151, 0.990313, 0.359378],[0.624305, 0.199056, 0.143817, 0.275211, 0.781591],[0.343071, 0.362729, 0.453921, 0.185471, 0.387750],[0.071589, 0.276224, 0.036687, 0.046026, 0.297211],[0.015935, 0.340304, 0.203909, 0.793265, 0.022778]])
linewidth=None,指定每行的字符数到达多少时插入换行符,默认80
x = torch.rand(300,100) ### 这里是90的时候
x
tensor([[0.837755, 0.946588, 0.125825, 0.313039, 0.401871,  ..., 0.297322, 0.100671,0.134698, 0.281824, 0.604187],[0.648208, 0.380088, 0.617546, 0.713590, 0.103169,  ..., 0.649760, 0.635475,0.158828, 0.202886, 0.620849],[0.802481, 0.103543, 0.192517, 0.915203, 0.594341,  ..., 0.538125, 0.563634,0.727168, 0.056273, 0.468143],[0.310724, 0.671067, 0.539035, 0.984445, 0.767107,  ..., 0.795432, 0.740041,0.094645, 0.466614, 0.065598],[0.669992, 0.551989, 0.014595, 0.399467, 0.295915,  ..., 0.978176, 0.635362,0.194325, 0.757090, 0.584492],...,[0.599014, 0.101226, 0.187463, 0.550024, 0.481907,  ..., 0.588663, 0.316834,0.593799, 0.738091, 0.284613],[0.379680, 0.202076, 0.765683, 0.827797, 0.799012,  ..., 0.879597, 0.120351,0.934854, 0.868876, 0.978365],[0.744969, 0.437267, 0.441280, 0.558972, 0.001062,  ..., 0.817802, 0.877915,0.550309, 0.944025, 0.297106],[0.624466, 0.270907, 0.391876, 0.519749, 0.655235,  ..., 0.700256, 0.211640,0.578399, 0.245057, 0.712004],[0.934933, 0.252441, 0.795266, 0.255975, 0.453355,  ..., 0.683163, 0.235009,0.449879, 0.967997, 0.307974]])
torch.set_printoptions(linewidth=80)
x
tensor([[0.837755, 0.946588, 0.125825, 0.313039, 0.401871,  ..., 0.297322,0.100671, 0.134698, 0.281824, 0.604187],[0.648208, 0.380088, 0.617546, 0.713590, 0.103169,  ..., 0.649760,0.635475, 0.158828, 0.202886, 0.620849],[0.802481, 0.103543, 0.192517, 0.915203, 0.594341,  ..., 0.538125,0.563634, 0.727168, 0.056273, 0.468143],[0.310724, 0.671067, 0.539035, 0.984445, 0.767107,  ..., 0.795432,0.740041, 0.094645, 0.466614, 0.065598],[0.669992, 0.551989, 0.014595, 0.399467, 0.295915,  ..., 0.978176,0.635362, 0.194325, 0.757090, 0.584492],...,[0.599014, 0.101226, 0.187463, 0.550024, 0.481907,  ..., 0.588663,0.316834, 0.593799, 0.738091, 0.284613],[0.379680, 0.202076, 0.765683, 0.827797, 0.799012,  ..., 0.879597,0.120351, 0.934854, 0.868876, 0.978365],[0.744969, 0.437267, 0.441280, 0.558972, 0.001062,  ..., 0.817802,0.877915, 0.550309, 0.944025, 0.297106],[0.624466, 0.270907, 0.391876, 0.519749, 0.655235,  ..., 0.700256,0.211640, 0.578399, 0.245057, 0.712004],[0.934933, 0.252441, 0.795266, 0.255975, 0.453355,  ..., 0.683163,0.235009, 0.449879, 0.967997, 0.307974]])
profile=None,设置打印默认设置的另一种选择,default、short、full
x = torch.rand(5,5)
x
tensor([[0.362303, 0.429224, 0.995742, 0.731385, 0.024793],[0.360837, 0.312959, 0.680765, 0.089889, 0.648550],[0.360773, 0.811612, 0.558717, 0.160916, 0.996822],[0.077603, 0.359483, 0.455077, 0.266479, 0.963603],[0.647150, 0.650537, 0.443523, 0.556489, 0.162916]])
torch.set_printoptions(profile='short')
x
tensor([[0.36, 0.43, 1.00, 0.73, 0.02],[0.36, 0.31, 0.68, 0.09, 0.65],[0.36, 0.81, 0.56, 0.16, 1.00],[0.08, 0.36, 0.46, 0.27, 0.96],[0.65, 0.65, 0.44, 0.56, 0.16]])
torch.set_printoptions(profile='full')
x
tensor([[0.3623, 0.4292, 0.9957, 0.7314, 0.0248],[0.3608, 0.3130, 0.6808, 0.0899, 0.6485],[0.3608, 0.8116, 0.5587, 0.1609, 0.9968],[0.0776, 0.3595, 0.4551, 0.2665, 0.9636],[0.6471, 0.6505, 0.4435, 0.5565, 0.1629]])
torch.set_printoptions(profile='default')
x
tensor([[0.3623, 0.4292, 0.9957, 0.7314, 0.0248],[0.3608, 0.3130, 0.6808, 0.0899, 0.6485],[0.3608, 0.8116, 0.5587, 0.1609, 0.9968],[0.0776, 0.3595, 0.4551, 0.2665, 0.9636],[0.6471, 0.6505, 0.4435, 0.5565, 0.1629]])
sci_mode=None,指定显示的数字是否使用科学计数法
x = torch.rand(3,3)
x
tensor([[0.2731, 0.7779, 0.0257],[0.5678, 0.0704, 0.4254],[0.3297, 0.3354, 0.3181]])
torch.set_printoptions(sci_mode=True)
x
tensor([[2.7311e-01, 7.7790e-01, 2.5717e-02],[5.6780e-01, 7.0381e-02, 4.2540e-01],[3.2972e-01, 3.3539e-01, 3.1805e-01]])
torch.set_flush_denormal( 模式 ) 禁用CPU上的非正常浮点数
torch.set_flush_denormal(True)
True
torch.tensor([1e-323], dtype=torch.float64)
tensor([0.], dtype=torch.float64)
torch.set_flush_denormal(False)
True
torch.tensor([1e-323], dtype=torch.float64)
tensor([9.8813e-324], dtype=torch.float64)
torch.tensor( data , * , dtype = None , device = None , requires_grad = False , pin_memory = False ) 构造一个没有autograd历史的张量
data:张量的初始数据,可以是列表、元素、Numpy的ndarray、标量及其他类型
dtype:张量所需的数据类型
device:构建张量的设备,是CPU还是GPU,默认CPU
requires_grad:指定创建的张量是否需要梯度信息
pin_memory:如为True,创建的张量将会被分配到固定的内存位置。仅适用于CPU张量
torch.tensor([[0.11111, 0.222222, 0.3333333]],dtype=torch.float64,device=torch.device('cuda:0'))
tensor([[1.1111e-01, 2.2222e-01, 3.3333e-01]], device='cuda:0',dtype=torch.float64)
torch.sparse_coo_tensor(indices, values, size=None, *, dtype=None, device=None, requires_grad=False) 创建一个COO格式的稀疏矩阵,返回值为一个tensor
indices:指定非零元素所在的位置,行 + 列
values:指定了非零元素的值
size:指定了稀疏矩阵的大小
dtype:指定返回tensor的数据类型
device:指定创建的张量是在cpu上还是gpu上
requires_grad:指定创建的tensor是否需要梯度信息,默认为False
indices = torch.tensor([[4,2,1],[2,0,2]])
values = torch.tensor([3,4,5],dtype = torch.float32)
x = torch.sparse_coo_tensor(indices=indices,values = values,size = [5,5])
x
tensor(indices=tensor([[4, 2, 1],[2, 0, 2]]),values=tensor([3.0000e+00, 4.0000e+00, 5.0000e+00]),size=(5, 5), nnz=3, layout=torch.sparse_coo)
torch.asarray( obj , * , dtype = None , device = None , copy = None , requires_grad = False ) 转换为obj为张量
obj:一个张量、一个numpy array、一个DLPack胶囊、一个实现Python缓冲区协议的对象,一个标量、一系列标量
dtype:指定返回张量的类型
copy:指定返回张量是否与原来的obj共享内存
device:张量设备
requires_grad:指定返回的tensor是否需要梯度信息,默认为False
import numpy as np
a = np.array([1,2,3])
b = torch.asarray(a)
b
tensor([1, 2, 3], dtype=torch.int32)
torch.as_tensor(data, dtype=None, device=None) 将数据data转换为张量
data:初始数据。可以是列表、元素、numpy的ndarray,标量或其他类型
dtype:指定返回张量的数据类型
device:构造张量的设备
a = np.array([1, 2, 3])
t = torch.as_tensor(a, device=torch.device('cuda'))
t
tensor([1, 2, 3], device='cuda:0', dtype=torch.int32)
torch.as_strided(input, size, stride, storage_offset=0) 根据步长床架一个现有tensor的视图,类型仍为tensor

视图是指创建一个方便查看的东西,与原数据共享内存,它并不占用内存,也不存储数据,只是将原有的数据进行整理,显示其中部分内容或者进行重排序后显示出来等等。

input:指定在哪个数据上创建视图
size:指定生成视图的大小
stride:指定输出张量的步长
storage_offset:指定输出张量在基础存储中的偏移量
torch.set_printoptions(sci_mode=False)
x = torch.randn(3,3)
x
tensor([[ 0.8032,  1.4086, -0.6369],[-0.2773,  1.3125, -0.1569],[-0.8273, -0.0994,  1.3168]])
t = torch.as_strided(x, (2, 2), (1, 2), 1)
t
tensor([[ 1.4086, -0.2773],[-0.6369,  1.3125]])
torch.from_numpy( ndarray ) 从numpy的ndarrary创建张量
创建的张量与ndarray共享相同的内存
a = np.array([1, 2, 3])
t = torch.from_numpy(a)
t
tensor([1, 2, 3], dtype=torch.int32)
torch.from_dlpack( ext_tensor ) 将外部库中的张量转换为torch张量
import torch.utils.dlpack
t = torch.arange(4)
t2 = torch.from_dlpack(t)
t2[:2] = -1
t2
tensor([-1, -1,  2,  3])
torch.frombuffer( buffer , * , dtype , count = - 1 , offset = 0 , requires_grad = False ) 从Python缓冲区对象创建一维tensor
buffer:公开缓冲区接口的Python对象
dtype:指定返回张量的数据类型
count:读取元素的数量,默认-1,读取所有元素直到缓冲区末尾
offset:在缓冲区读取开始时要跳过的字节数,默认0
requires_grad:指定返回的tensor是否需要梯度信息,默认为False
import array
a = array.array('i', [1, 2, 3])
t = torch.frombuffer(a, dtype=torch.int32)
t
tensor([1, 2, 3], dtype=torch.int32)
torch.zeros(*size, *, out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False) 返回由0填充的张量
size:定义输出张量形状的整数序列,可变数量的参数、列表、元组…
out:输出张量
dtype:指定返回张量的数据类型
layout:指定返回张量的所需布局
device:创建张量的设备
requires_grad:指定返回的tensor是否需要梯度信息,默认为False
torch.zeros(2, 3)
tensor([[0., 0., 0.],[0., 0., 0.]])
torch.zeros_like( 输入 , * , dtype = None , layout = None , device = None , requires_grad = False , memory_format = torch.preserve_format ) 根据给定张量,返回与其形状相同的全0张量
输入input:输入张量
dtype:指定返回张量的数据类型
layout:指定返回张量所需的布局
device:指定返回张量所需设备
requires_grad:指定返回的tensor是否需要梯度信息,默认为False
memory_format = torch.preserve_format:指定内存格式
input = torch.empty(2, 3)
torch.zeros_like(input)
tensor([[0., 0., 0.],[0., 0., 0.]])

太多了,就这样吧…

官方文档地址:

https://pytorch.org/docs/stable/torch.html#


pytorch PythonAPI torch.....................相关推荐

  1. 8_用opencv调用深度学习框架tenorflow、Pytorch、Torch、caffe训练好的模型(20190212)

    用opencv调用深度学习框架tenorflow.Pytorch.Torch.caffe训练好模型(20190212) 文章目录: https://blog.csdn.net/hust_bochu_x ...

  2. Pytorch学习-torch.max()和min()深度解析

    Pytorch学习-torch.max和min深度解析 max的使用 min同理 dim参数理解 二维张量使用max() 三维张量使用max() max的使用 min同理 参考链接: 参考链接: 对于 ...

  3. 2021.08.23学习内容Pytorch与Torch的关系以及Torchvision作用

    ** PyTorch **: 是一个开源的Python机器学习库,基于Torch,用于自然语言处理等应用程序. 2017年1月,由Facebook人工智能研究院(FAIR)基于Torch推出了PyTo ...

  4. 【PyTorch】torch.cat==>张量拼接,在图像的应用上可以有效利用原始图像结构信息

    1. 字面理解:torch.cat是将两个张量(tensor)拼接在一起,cat 是 concatenate 的意思,即拼接,联系在一起. 使用torch.cat((A,B),dim)时,除拼接维数d ...

  5. Pytorch和Torch和torchvision的安装

    超详细的Pytorch和torch和torchvision的安装步骤~~~~~ Pytorch安装步骤 包地址:https://download.pytorch.org/whl/torch_stabl ...

  6. YDOOK:Pytorch : AI : torch.tensor.size() 与 torch.tensor.shape 的区别

    YDOOK:Pytorch : AI : torch.tensor.size() 与 torch.tensor.shape 的区别 区别: 1. torch.tensor.size() 可用通过 :t ...

  7. Pytorch中torch.nn.Softmax的dim参数含义

    自己搞了一晚上终于搞明白了,下文说的很透彻,做个记录,方便以后翻阅 Pytorch中torch.nn.Softmax的dim参数含义

  8. 深入浅出Pytorch函数——torch.zeros

    分类目录:<深入浅出Pytorch函数>总目录 相关文章: · 深入浅出Pytorch函数--torch.Tensor · 深入浅出Pytorch函数--torch.ones · 深入浅出 ...

  9. 深入浅出Pytorch函数——torch.arange

    分类目录:<深入浅出Pytorch函数>总目录 相关文章: · 深入浅出TensorFlow2函数--tf.range · 深入浅出Pytorch函数--torch.arange · 深入 ...

最新文章

  1. UnidentifiedImageError: cannot identify image file <_io.BufferedReader name=‘D:\\Download\
  2. 深度有趣 | 22 天马行空的DeepDream
  3. hive常见问题及解决方法
  4. 使用PLSQL导入导出数据库
  5. python processpoolexector 释放内存_一起看看python 中日志异步发送到远程服务器
  6. 【机器学习入门笔记15:BP神经网络逼近股票收盘价格2】20190218
  7. Linux Kernel/optee/ATF等操作系统的异常向量表的速查
  8. PHP接口与抽象类区别:接口用来扩展应用 抽象用来总结归纳
  9. .html追加的触发js事件,JavaScript
  10. 软件测试面试选择判断提,软件测试面试常考判断题
  11. mysql delette_关于字符串:首字母大写MySQL
  12. 可作为GC Roots的对象
  13. 避免Unity变量初始化错误的8条建议
  14. 峰Redis学习(6)Redis 数据结构(sorted-set的操作)
  15. Windows系统下将目录挂载为一个磁盘并分配盘符
  16. 又一个程序员在工位上倒下了!
  17. 汇编 - 实验 - 计算X+(Y-Z)=W
  18. 智能体温计APP方案开发
  19. 为什么要用大写L替换小写L(l),idea提醒literal ‘xxx‘ ends with lowercase ‘l‘
  20. 关于SPEEX和语音的研究(转载的基础上加原创)

热门文章

  1. Android中创建一个商品展示小案例
  2. u盘格式化不了怎么办?恢复U盘,方法很简单
  3. 2021中国IC封装基板市场现状及未来发展趋势
  4. Skype for Symbian
  5. VUE+WebPack游戏设计:欲望都市,构建类RPG游戏的开发
  6. 面试季:2019Java面试题汇总---升级版(附答案)
  7. ubuntu16.04根目录磁盘空间扩容及可能遇到的问题(亲测有效)
  8. 湿敏电阻的原理,结构,分类与应用场景要点总结
  9. cubeIDE开发, UART的CubeMX及HAL库实现原理及底层分析
  10. 计算机xp系统怎么录音,XP系统电脑怎么录音? - KK录像机