莫煩Pytorch神經網絡第三章代碼修改

3.1Regression回歸

import torch
import torch.nn.functional as F
from torch.autograd import Variable
import matplotlib.pyplot as plt"""
創建數據
"""x = torch.unsqueeze(torch.linspace(-1,1,100),dim=1)
y = x.pow(2) + 0.2*torch.rand(x.size()) #增加噪點
x , y = Variable(x),Variable(y)# plt.scatter(x.data.numpy(),y.data.numpy())    #打印數據
# plt.show()"""
搭建網絡
"""
class Net(torch.nn.Module):def __init__(self,n_features,n_hidden,n_out):super(Net, self).__init__()self.hidden = torch.nn.Linear(n_features,n_hidden)self.predict = torch.nn.Linear(n_hidden,n_out)def forward(self,x):x = F.relu(self.hidden(x))x = self.predict(x)return xnet = Net(1,10,1)
# print(net)
plt.ion()   #實時打印的
plt.show()
"""
優化網絡
"""
optimizer = torch.optim.SGD(net.parameters(),lr=0.5)
loss_func = torch.nn.MSELoss()      #MSELoss是用在線性預測
#打印環節
for t in range(100):prediction = net(x)loss = loss_func(prediction,y)optimizer.zero_grad()loss.backward()optimizer.step()if t % 5 ==0:plt.cla()plt.scatter(x.data.numpy(),y.data.numpy())plt.plot(x.data.numpy(),prediction.data.numpy(),'r-',lw=5)plt.text(0.5,0,'Loss=%.4f' % loss.item(),fontdict={'size':20,'color':'red'})       #注意莫老師這里loss.data[0]得換成loss.item()plt.pause(0.1)plt.ioff()
plt.show()

3.2Classification分類

import torch
import torch.nn.functional as F
from torch.autograd import Variable
import matplotlib.pyplot as plt"""
創建數據
"""
n_data = torch.ones(100,2)
x0 = torch.normal(2*n_data,1)
y0 = torch.zeros(100)
x1 = torch.normal(-2*n_data,1)
y1 = torch.ones(100)
x = torch.cat((x0,x1),0).type(torch.FloatTensor)
y = torch.cat((y0,y1),).type(torch.LongTensor)x,y = Variable(x),Variable(y)# plt.scatter(x.data.numpy()[:,0],x.data.numpy()[:,1],c=y.data.numpy(),s=100,lw=0,cmap='RdYlGn')
# plt.show()"""
網絡搭建
"""
class Net(torch.nn.Module):def __init__(self,n_features,n_hidden,n_out):super(Net, self).__init__()self.hidden = torch.nn.Linear(n_features,n_hidden)self.predict = torch.nn.Linear(n_hidden,n_out)def forward(self,x):x = F.relu(self.hidden(x))x = self.predict(x)return xnet = Net(2,10,2)
# print(net)plt.ion()   #實時打印的
plt.show()optimizer = torch.optim.SGD(net.parameters(),lr=0.02)
loss_func = torch.nn.CrossEntropyLoss()     #CrossEntropyLoss用在分類的損失函數中"""
結果打印
"""
for t in range(100):out = net(x)loss = loss_func(out,y)optimizer.zero_grad()loss.backward()optimizer.step()if t % 2 == 0:plt.cla()prediction = torch.max(F.softmax(out),1)[1] #輸出的結果在第二位,因為輸出是二維,例如輸出結果為[0,1],是指最大值為0,類型是1pred_y = prediction.data.numpy().squeeze()target_y = y.data.numpy()plt.scatter(x.data.numpy()[:,0],x.data.numpy()[:,1],c=pred_y,s=100,lw=0,cmap='RdYlGn')accuracy = sum(pred_y == target_y) / 200plt.text(1.5,-4,'Accuracy=%.2f'%accuracy,fontdict={'size':20,'color':'red'})plt.pause(0.1)plt.ioff()
plt.show()

3.3快速搭建法

import torch
import torch.nn.functional as F
from torch.autograd import Variable
import matplotlib.pyplot as plt"""
創建數據
"""
n_data = torch.ones(100,2)
x0 = torch.normal(2*n_data,1)
y0 = torch.zeros(100)
x1 = torch.normal(-2*n_data,1)
y1 = torch.ones(100)
x = torch.cat((x0,x1),0).type(torch.FloatTensor)
y = torch.cat((y0,y1),).type(torch.LongTensor)x,y = Variable(x),Variable(y)# plt.scatter(x.data.numpy()[:,0],x.data.numpy()[:,1],c=y.data.numpy(),s=100,lw=0,cmap='RdYlGn')
# plt.show()"""
普通網絡搭建
"""
class Net(torch.nn.Module):def __init__(self,n_features,n_hidden,n_out):super(Net, self).__init__()self.hidden = torch.nn.Linear(n_features,n_hidden)self.predict = torch.nn.Linear(n_hidden,n_out)def forward(self,x):x = F.relu(self.hidden(x))x = self.predict(x)return xnet1 = Net(2,10,2)"""
快速網絡搭建
"""
net2 = torch.nn.Sequential(torch.nn.Linear(2,10),torch.nn.ReLU(),torch.nn.Linear(10,2)
)print(net1)
print(net2)

3.4保存提取

import torch
from torch.autograd import  Variable
import matplotlib.pyplot as plt#fake data
x = torch.unsqueeze(torch.linspace(-1,1,100),dim=1)
y = x.pow(2) + 0.2*torch.rand(x.size())
x,y = Variable(x,requires_grad=False),Variable(y,requires_grad=False)"""
保存
"""
def save():net1 = torch.nn.Sequential(torch.nn.Linear(1, 10),torch.nn.ReLU(),torch.nn.Linear(10, 1))optimizer = torch.optim.SGD(net1.parameters(), lr=0.5)loss_func = torch.nn.MSELoss()for t in range(100):prediction = net1(x)loss = loss_func(prediction, y)optimizer.zero_grad()loss.backward()optimizer.step()torch.save(net1,'net.pkl')  #保存網絡torch.save(net1.state_dict(),'net_params.pkl')   #保存參數#畫圖plt.figure(1,figsize=(10,3))plt.subplot(131)plt.title('Net1')plt.scatter(x.data.numpy(),y.data.numpy())plt.plot(x.data.numpy(),prediction.data.numpy(),'r-',lw=5)"""
提取網絡模型
"""
def restore_net():net2 = torch.load('net.pkl')prediction = net2(x)plt.subplot(132)plt.title('Net2')plt.scatter(x.data.numpy(), y.data.numpy())plt.plot(x.data.numpy(), prediction.data.numpy(), 'r-', lw=5)"""
提取網絡參數
"""
def restore_params():net3 = torch.nn.Sequential(torch.nn.Linear(1, 10),torch.nn.ReLU(),torch.nn.Linear(10, 1))net3.load_state_dict(torch.load('net_params.pkl'))prediction = net3(x)plt.subplot(133)plt.title('Net3')plt.scatter(x.data.numpy(), y.data.numpy())plt.plot(x.data.numpy(), prediction.data.numpy(), 'r-', lw=5)plt.show()save()
restore_net()
restore_params()

3.5批數據訓練

import torch
import torch.utils.data as DataBATCH_SIZE = 5x = torch.linspace(1,10,10)
y = torch.linspace(10,1,10)#torch_dataset = Data.TensorDataset(data_tensor=x,target_tensor=y)  #莫老師使用的這個方法在高版本報錯 使用下邊的語句可以解決
torch_dataset = Data.TensorDataset(x,y)loader = Data.DataLoader(dataset=torch_dataset,batch_size=BATCH_SIZE,shuffle=True,#num_workers=2,      #線程數   windows用戶這里要去掉 因為windows系統中沒有Fork函數,多線程會報錯
)for epoch in range(3):for step,(batch_x,batch_y) in enumerate(loader):#trainingprint('Epoch:',epoch,'|Step:',step,'|batch x:',batch_x.numpy(),'|batch y:',batch_y.numpy())

3.6Optimizer優化器

import torch
import torch.nn.functional as F
from torch.autograd import Variable
import matplotlib.pyplot as plt
import torch.utils.data as Data#hyper parameters
LR = 0.01
BATCH_SIZE = 32
EPOCH = 12x = torch.unsqueeze(torch.linspace(-1,1,100),dim=1)
y = x.pow(2) + 0.2*torch.rand(x.size())# plt.scatter(x.numpy(),y.numpy())
# plt.show()torch_dataset = Data.TensorDataset(x,y)
loader = Data.DataLoader(dataset=torch_dataset,batch_size=BATCH_SIZE,shuffle=True)class Net(torch.nn.Module):def __init__(self):super(Net, self).__init__()self.hidden = torch.nn.Linear(1,20)self.predict = torch.nn.Linear(20,1)def forward(self,x):x = F.relu(self.hidden(x))x = self.predict(x)return xnet_SGD = Net()
net_Momentum =  Net()
net_RMSprop =   Net()
net_Adam =  Net()
nets = [net_SGD,net_Momentum,net_RMSprop,net_Adam]opt_SGD = torch.optim.SGD(net_SGD.parameters(),lr=LR)
opt_Momentum = torch.optim.SGD(net_Momentum.parameters(),lr=LR,momentum=0.8)
opt_RMSprop = torch.optim.RMSprop(net_RMSprop.parameters(),lr=LR,alpha=0.9)
opt_Adam = torch.optim.Adam(net_Adam.parameters(),lr=LR,betas=(0.9,0.99))optimizers = [opt_SGD,opt_Momentum,opt_RMSprop,opt_Adam]loss_func = torch.nn.MSELoss()
losses_his=[[],[],[],[]]    #記錄損失
for epoch in range(EPOCH):print(epoch)for step,(batch_x,batch_y) in enumerate(loader):#     b_x = Variable(batch_x)   #新版本pytorch不用這個了#     b_y = Variable(batch_y)for net,opt,l_his in zip(nets,optimizers,losses_his):output = net(batch_x)loss = loss_func(output,batch_y)opt.zero_grad()loss.backward()opt.step()l_his.append(loss.item())labels = ['SGD','Momentum','RMSprop','Adam']
for i,l_his in enumerate(losses_his):plt.plot(l_his,label = labels[i])
plt.legend(loc = 'best')
plt.xlabel('Steps')
plt.ylabel('Loss')
plt.show()

本文來自互聯網用戶投稿,該文觀點僅代表作者本人,不代表本站立場。本站僅提供信息存儲空間服務,不擁有所有權,不承擔相關法律責任。
如若轉載,請注明出處:http://www.pswp.cn/news/389444.shtml
繁體地址,請注明出處:http://hk.pswp.cn/news/389444.shtml
英文地址,請注明出處:http://en.pswp.cn/news/389444.shtml

如若內容造成侵權/違法違規/事實不符,請聯系多彩編程網進行投訴反饋email:809451989@qq.com,一經查實,立即刪除!

相關文章

為什么餅圖有問題

介紹 (Introduction) It seems as if people are split on pie charts: either you passionately hate them, or you are indifferent. In this article, I am going to explain why pie charts are problematic and, if you fall into the latter category, what you can do w…

New Distinct Substrings(后綴數組)

New Distinct Substrings&#xff08;后綴數組&#xff09; 給定一個字符串&#xff0c;求不相同的子串的個數。\(n<50005\)。 顯然&#xff0c;任何一個子串一定是后綴上的前綴。先&#xff08;按套路&#xff09;把后綴排好序&#xff0c;對于當前的后綴\(S_i\)&#xff0…

Android dependency 'com.android.support:support-v4' has different version for the compile (26.1.0...

在項目中加入react-native-camera的時候 出現的錯誤. 解決方案: 修改 implementation project(:react-native-camera)為 implementation (project(:react-native-camera)) {exclude group: "com.android.support"}查看原文 Could not find play-services-basement.aa…

先知模型 facebook_使用Facebook先知進行犯罪率預測

先知模型 facebookTime series prediction is one of the must-know techniques for any data scientist. Questions like predicting the weather, product sales, customer visit in the shopping center, or amount of inventory to maintain, etc - all about time series …

莫煩Pytorch神經網絡第四章代碼修改

4.1CNN卷積神經網絡 import torch import torch.nn as nn from torch.autograd import Variable import torch.utils.data as Data import torchvision import matplotlib.pyplot as pltEPOCH 1 BATCH_SIZE 50 LR 0.001 DOWNLOAD_MNIST False #如果數據集已經下載到…

github gists 101使代碼共享漂亮

If you’ve been going through Medium, looking at technical articles, you’ve undoubtedly seen little windows that look like the below:如果您一直在閱讀Medium&#xff0c;并查看技術文章&#xff0c;那么您無疑會看到類似于以下內容的小窗口&#xff1a; def hello_…

loj #6278. 數列分塊入門 2

題目 題解 區間修改&#xff0c;詢問區間小于c的個數。分塊排序&#xff0c;用vector。至于那個塊的大小&#xff0c;好像要用到均值不等式 我不太會。。。就開始一個個試&#xff0c;發現sizsqrt(n)/4時最快&#xff01;&#xff01;&#xff01;明天去學一下算分塊復雜度的方…

基于Netty的百萬級推送服務設計要點

1. 背景1.1. 話題來源最近很多從事移動互聯網和物聯網開發的同學給我發郵件或者微博私信我&#xff0c;咨詢推送服務相關的問題。問題五花八門&#xff0c;在幫助大家答疑解惑的過程中&#xff0c;我也對問題進行了總結&#xff0c;大概可以歸納為如下幾類&#xff1a;1&#x…

莫煩Pytorch神經網絡第五章代碼修改

5.1動態Dynamic import torch from torch import nn import numpy as np import matplotlib.pyplot as plt# torch.manual_seed(1) # reproducible# Hyper Parameters INPUT_SIZE 1 # rnn input size / image width LR 0.02 # learning rateclass…

鮮為人知的6個黑科技網站_6種鮮為人知的熊貓繪圖工具

鮮為人知的6個黑科技網站Pandas is the go-to Python library for data analysis and manipulation. It provides numerous functions and methods that expedice the data analysis process.Pandas是用于數據分析和處理的Python庫。 它提供了加速數據分析過程的眾多功能和方法…

VRRP網關冗余

實驗要求?1、R1創建環回口&#xff0c;模擬外網?2、R2&#xff0c;R3使用VRRP技術?3、路由器之間使用EIGRP路由協議? 實驗拓撲? 實驗配置??R1(config)#interface loopback 0R1(config-if)#ip address 1.1.1.1 255.255.255.0R1(config-if)#int e0/0R1(config-if)#ip addr…

網頁JS獲取當前地理位置(省市區)

網頁JS獲取當前地理位置&#xff08;省市區&#xff09; 一、總結 一句話總結&#xff1a;ip查詢接口 二、網頁JS獲取當前地理位置&#xff08;省市區&#xff09; 眼看2014又要過去了&#xff0c;翻翻今年的文章好像沒有寫幾篇&#xff0c;忙真的或許已經不能成為借口了&#…

大熊貓卸妝后_您不應錯過的6大熊貓行動

大熊貓卸妝后數據科學 (Data Science) Pandas is used mainly for reading, cleaning, and extracting insights from data. We will see an advanced use of Pandas which are very important to a Data Scientist. These operations are used to analyze data and manipulate…

數據eda_關于分類和有序數據的EDA

數據eda數據科學和機器學習統計 (STATISTICS FOR DATA SCIENCE AND MACHINE LEARNING) Categorical variables are the ones where the possible values are provided as a set of options, it can be pre-defined or open. An example can be the gender of a person. In the …

PyTorch官方教程中文版:PYTORCH之60MIN入門教程代碼學習

Pytorch入門 import torch""" 構建非初始化的矩陣 """x torch.empty(5,3) #print(x)""" 構建隨機初始化矩陣 """x torch.rand(5,3)""" 構造一個矩陣全為 0&#xff0c;而且數據類型是 long &qu…

Flexbox 最簡單的表單

彈性布局(Flexbox)逐漸流行&#xff0c;越來越多的人開始使用&#xff0c;因為它寫Css布局真是太簡單了一一、<form>元素表單使用<form>元素<form></form>復制代碼上面是一個空的表單&#xff0c;根據HTML標準&#xff0c;它是一個塊級元素&#xff0c…

CSS中的盒子模型

一.為什么使用CSS 1.有效的傳遞頁面信息 2.使用CSS美化過的頁面文本&#xff0c;使頁面漂亮、美觀&#xff0c;吸引用戶 3.可以很好的突出頁面的主題內容&#xff0c;使用戶第一眼可以看到頁面主要內容 4.具有良好的用戶體驗 二.字體樣式屬性 1.font-family:英…

jdk重啟后步行_向后介紹步行以一種新穎的方式來預測未來

jdk重啟后步行“永遠不要做出預測&#xff0c;尤其是關于未來的預測。” (KK Steincke) (“Never Make Predictions, Especially About the Future.” (K. K. Steincke)) Does this picture portray a horse or a car? 這張照片描繪的是馬還是汽車&#xff1f; How likely is …

PyTorch官方教程中文版:入門強化教程代碼學習

PyTorch之數據加載和處理 from __future__ import print_function, division import os import torch import pandas as pd #用于更容易地進行csv解析 from skimage import io, transform #用于圖像的IO和變換 import numpy as np import matplotlib.pyplot a…

css3-2 CSS3選擇器和文本字體樣式

css3-2 CSS3選擇器和文本字體樣式 一、總結 一句話總結&#xff1a;是要記下來的&#xff0c;記下來可以省很多事。 1、css的基本選擇器中的:first-letter和:first-line是什么意思&#xff1f; :first-letter選擇第一個單詞&#xff0c;:first-line選擇第一行 2、css的偽類選…