# coding: utf-8# In[59]:import tensorflow as tf
import numpy as np
import matplotlib.pyplot as pltdef normalize(X):"""归一化输入数据"""mean = np.mean(X) # 均值std = np.std(X) # 标准差X = (X - mean)/std # Z-Score归一化,符合正态分布return X# 读取数据集
boston = tf.contrib.learn.datasets.load_dataset('boston') # 使用Tensorflow contrib数据集加载
X_train = boston.data[:,5] # 取数据集第6列的所有值
Y_train = boston.target # 实际值
X_train = normalize(X_train) # 进行归一化
n_samples = len(X_train) # 样本数量
#print(X_train)# 定义模型
X = tf.placeholder(tf.float32, name='X') # 占位符
Y = tf.placeholder(tf.float32, name='Y')b = tf.Variable(0.0) # 偏置
w = tf.Variable(0.0) # 权重Y_hat = X * w + b # 线性回归模型loss = tf.square(Y - Y_hat, name='loss') # 定义损失函数optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01).minimize(loss) # 简单梯度下降算法优化器,学习速率0.01# 训练
init = tf.global_variables_initializer() # 初始化变量
total = []
with tf.Session() as sess:sess.run(init)# 迭代100次for i in range(100):total_loss = 0for x,y in zip(X_train,Y_train):#将对应元素打包成一个个元组,返回这些元组组成的列表_, l = sess.run ([optimizer, loss], feed_dict={X:x, Y:y}) # 运行优化器、损失函数,并馈送数据total_loss += ltotal.append(total_loss / n_samples)print('Epoch {0}: Loss {1}'.format(i, total_loss/n_samples))b_value, w_value = sess.run([b, w]) # 计算偏置与权值
WARNING:tensorflow:
The TensorFlow contrib module will not be included in TensorFlow 2.0.
For more information, please see:* https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md* https://github.com/tensorflow/addons* https://github.com/tensorflow/io (for I/O related ops)
If you depend on functionality not listed there, please file an issue.WARNING:tensorflow:From <ipython-input-1-5577675f5b64>:15: load_dataset (from tensorflow.contrib.learn.python.learn.datasets) is deprecated and will be removed in a future version.
Instructions for updating:
Please use tf.data.
WARNING:tensorflow:From C:\Users\Administrator\AppData\Roaming\Python\Python37\site-packages\tensorflow\contrib\learn\python\learn\datasets\__init__.py:80: load_boston (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.
Instructions for updating:
Use scikits.learn.datasets.
WARNING:tensorflow:From C:\Users\Administrator\AppData\Roaming\Python\Python37\site-packages\tensorflow\contrib\learn\python\learn\datasets\base.py:129: load_csv_with_header (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.
Instructions for updating:
Use tf.data instead.
Epoch 0: Loss 61.6679900079541
Epoch 1: Loss 34.238465964702186
Epoch 2: Loss 34.237623845216135
Epoch 3: Loss 34.237623861201385
Epoch 4: Loss 34.237623861201385
Epoch 5: Loss 34.237623861201385
Epoch 6: Loss 34.237623861201385
Epoch 7: Loss 34.237623861201385
Epoch 8: Loss 34.237623861201385
Epoch 9: Loss 34.237623861201385
Epoch 10: Loss 34.237623861201385
Epoch 11: Loss 34.237623861201385
Epoch 12: Loss 34.237623861201385
Epoch 13: Loss 34.237623861201385
Epoch 14: Loss 34.237623861201385
Epoch 15: Loss 34.237623861201385
Epoch 16: Loss 34.237623861201385
Epoch 17: Loss 34.237623861201385
Epoch 18: Loss 34.237623861201385
Epoch 19: Loss 34.237623861201385
Epoch 20: Loss 34.237623861201385
Epoch 21: Loss 34.237623861201385
Epoch 22: Loss 34.237623861201385
Epoch 23: Loss 34.237623861201385
Epoch 24: Loss 34.237623861201385
Epoch 25: Loss 34.237623861201385
Epoch 26: Loss 34.237623861201385
Epoch 27: Loss 34.237623861201385
Epoch 28: Loss 34.237623861201385
Epoch 29: Loss 34.237623861201385
Epoch 30: Loss 34.237623861201385
Epoch 31: Loss 34.237623861201385
Epoch 32: Loss 34.237623861201385
Epoch 33: Loss 34.237623861201385
Epoch 34: Loss 34.237623861201385
Epoch 35: Loss 34.237623861201385
Epoch 36: Loss 34.237623861201385
Epoch 37: Loss 34.237623861201385
Epoch 38: Loss 34.237623861201385
Epoch 39: Loss 34.237623861201385
Epoch 40: Loss 34.237623861201385
Epoch 41: Loss 34.237623861201385
Epoch 42: Loss 34.237623861201385
Epoch 43: Loss 34.237623861201385
Epoch 44: Loss 34.237623861201385
Epoch 45: Loss 34.237623861201385
Epoch 46: Loss 34.237623861201385
Epoch 47: Loss 34.237623861201385
Epoch 48: Loss 34.237623861201385
Epoch 49: Loss 34.237623861201385
Epoch 50: Loss 34.237623861201385
Epoch 51: Loss 34.237623861201385
Epoch 52: Loss 34.237623861201385
Epoch 53: Loss 34.237623861201385
Epoch 54: Loss 34.237623861201385
Epoch 55: Loss 34.237623861201385
Epoch 56: Loss 34.237623861201385
Epoch 57: Loss 34.237623861201385
Epoch 58: Loss 34.237623861201385
Epoch 59: Loss 34.237623861201385
Epoch 60: Loss 34.237623861201385
Epoch 61: Loss 34.237623861201385
Epoch 62: Loss 34.237623861201385
Epoch 63: Loss 34.237623861201385
Epoch 64: Loss 34.237623861201385
Epoch 65: Loss 34.237623861201385
Epoch 66: Loss 34.237623861201385
Epoch 67: Loss 34.237623861201385
Epoch 68: Loss 34.237623861201385
Epoch 69: Loss 34.237623861201385
Epoch 70: Loss 34.237623861201385
Epoch 71: Loss 34.237623861201385
Epoch 72: Loss 34.237623861201385
Epoch 73: Loss 34.237623861201385
Epoch 74: Loss 34.237623861201385
Epoch 75: Loss 34.237623861201385
Epoch 76: Loss 34.237623861201385
Epoch 77: Loss 34.237623861201385
Epoch 78: Loss 34.237623861201385
Epoch 79: Loss 34.237623861201385
Epoch 80: Loss 34.237623861201385
Epoch 81: Loss 34.237623861201385
Epoch 82: Loss 34.237623861201385
Epoch 83: Loss 34.237623861201385
Epoch 84: Loss 34.237623861201385
Epoch 85: Loss 34.237623861201385
Epoch 86: Loss 34.237623861201385
Epoch 87: Loss 34.237623861201385
Epoch 88: Loss 34.237623861201385
Epoch 89: Loss 34.237623861201385
Epoch 90: Loss 34.237623861201385
Epoch 91: Loss 34.237623861201385
Epoch 92: Loss 34.237623861201385
Epoch 93: Loss 34.237623861201385
Epoch 94: Loss 34.237623861201385
Epoch 95: Loss 34.237623861201385
Epoch 96: Loss 34.237623861201385
Epoch 97: Loss 34.237623861201385
Epoch 98: Loss 34.237623861201385
Epoch 99: Loss 34.237623861201385
Done
Y_pred = X_train * w_value + b_value
print('Done')
# 评估
plt.plot(X_train, Y_train, 'bo', label='Real Data')
plt.plot(X_train,Y_pred,  'r', label='Predicted Data')
plt.legend()
plt.show()plt.plot(total)
plt.show()

plt.plot(total)
plt.show()


# In[74]:config=tf.ConfigProto(allow_soft_placement=True)
#del config# In[76]:with tf.device('/gpu:1'):rand_t = tf.random_uniform([50,50],0,10,dtype=tf.float32,seed=0)a = tf.Variable(rand_t)b = tf.Variable(rand_t)c = tf.matmul(a,b)init = tf.global_variables_initializer()with tf.Session(config=tf.ConfigProto(log_device_placement=True)) as sess:print(sess.run(c))
# sess = tf.Session()
# sess.run(init)
# print(sess.run(c))# In[72]:sess.close

这里电脑不行跑不出来

tensorflow预测波士顿房价相关推荐

  1. python预测波士顿房价代码

    这是一份使用 Python 预测波士顿房价的示例代码: import numpy as np import pandas as pd from sklearn.datasets import load ...

  2. 项目 1: 预测波士顿房价

    机器学习工程师纳米学位 模型评价与验证 项目 1: 预测波士顿房价 欢迎来到机器学习工程师纳米学位的第一个项目!在此文件中,有些示例代码已经提供给你,但你还需要实现更多的功能来让项目成功运行.除非有明 ...

  3. 机器学习项目-预测波士顿房价-整体流程

    项目 1: 预测波士顿房价¶ 第一步. 导入数据 在这个项目中,你将利用马萨诸塞州波士顿郊区的房屋信息数据训练和测试一个模型,并对模型的性能和预测能力进行测试.通过该数据训练后的好的模型可以被用来对房 ...

  4. p1项目,预测波士顿房价

    机器学习工程师纳米学位 模型评价与验证 项目 1: 预测波士顿房价 欢迎来到机器学习工程师纳米学位的第一个项目!在此文件中,有些示例代码已经提供给你,但你还需要实现更多的功能来让项目成功运行.除非有明 ...

  5. Udacity机器学习入门项目5:预测波士顿房价

    机器学习工程师纳米学位 模型评价与验证 项目 1: 预测波士顿房价 欢迎来到机器学习工程师纳米学位的第一个项目!在此文件中,有些示例代码已经提供给你,但你还需要实现更多的功能来让项目成功运行.除非有明 ...

  6. 项目 : 预测波士顿房价_团结波士顿更新:早鸟票销售将于明天结束。 部分时间表到了!

    项目 : 预测波士顿房价 Unite Boston is just two months away! Early Bird ticket pricing ends tomorrow (Friday, ...

  7. 【Python】Sklearn线性回归模型预测波士顿房价并绘图

    波士顿房价 这是 sklearn.datasets 里的一种 Toy Dataset ,包含503个美国波士顿房价的观测值,是内置的小数据集,也是研究回归算法的优秀数据集. Python编程实现 im ...

  8. python 基于xgboost预测波士顿房价

    一.意义 这是一个机器学习练习项目,旨在熟悉xgboost的建模过程和数据分析的思路,目标数据选取sklearn自带数据集--波士顿房价 二.开始 1. 导入要用的库 from sklearn.dat ...

  9. 梯度下降法实现线性回归, 实例---预测波士顿房价

    本文先手动实现一个线性回归模型, 然后用sklearn的线性回归模型作对比 import pandas as pd df = pd.read_csv('house_data.csv') #数据集可到网 ...

  10. 【sklearn】线性回归 - 预测波士顿房价

    目的 本文使用Python的sklearn类库,基于对机器学习线性回归算法的理论学习,利用sklearn中集成的波士顿房价数据,以此来对线性回归的理论知识进行一次实践总结. 本文不以预测的准确率为目的 ...

最新文章

  1. emoji mysql 转 unicode_unicode和emoji编码
  2. 6.DeepFM: A Factorization-Machine based Neural Network for CTR Prediction论文详解和代码实现
  3. 23.使用load()方法异步请求数据
  4. [MSSQL2012]LEAD函数
  5. Debian下Cannot set LC_CTYPE to default locale: No such file or directory解决方法
  6. Linux中自带正则表达式应用举例
  7. JavaScript选择器
  8. CSS 制作垂直导航
  9. Spring进阶教程之在ApplicationContext初始化完成后重定义Bean
  10. CentOS 7 配置网络连接
  11. 在.NET环境中实现每日构建--NAnt篇
  12. 【观点讨论与支撑】读书到底有没有用?
  13. Python将图片转换成二进制的.txt文件
  14. 如何改变Android-studio中的APP的名字和图标
  15. Top 10 JavaScript编辑器,你在用哪个?
  16. Word自动生成的目录超出页边距
  17. 天正对应cad版本_天正CAD图形找不到可用的AutoCAD版本怎么办?
  18. 第十届蓝桥杯 2019年国赛 最优旅行
  19. python matplotlib坐标轴刻度设置
  20. win10 风格的 OA 管理系统 skyeye 更新,新增项目模块

热门文章

  1. Android学习——写个小实例
  2. 玩玩AJAX之使用ashx文件响应来自JQuery的JSON请求.
  3. 剑指offer题解 带讲解 python版 第二部分
  4. TurboFan-Sea of Nodes概念讲解
  5. Java多线程如何确定线程数
  6. ubuntu 18.04 解决无法联网的问题
  7. ftp 相关知识集合
  8. shiro+springmvc+mybatis【转】
  9. 一个Android上的弹幕控件Open Danmaku
  10. python2.7.7笔记if in