百度翻译

  • baidufanyi.py
import requests
import re
import json
import execjsURL = 'https://fanyi.baidu.com/?aldtype=16047#zh/en/'
TRANSLATE_API = 'https://fanyi.baidu.com/v2transapi'
REALTRANSLATE_API = 'https://fanyi.baidu.com/transapi'HEADERS = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr 1.0','Accept': '*/*','Accept-Language': 'zh-CN,zh;q=0.9','Connection': 'keep-alive','Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8','Host': 'fanyi.baidu.com','Origin': 'https://fanyi.baidu.com','Referer': 'https://fanyi.baidu.com/','X-Requested-With': 'XMLHttpRequest',
}HEADERS2 = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr 1.0', 'Accept': '*/*', 'Accept-Language': 'zh-CN,zh;q=0.9', 'Connection': 'keep-alive', 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8', 'Host': 'fanyi.baidu.com', 'Origin': 'https://fanyi.baidu.com', 'Referer': 'https://fanyi.baidu.com/', 'X-Requested-With': 'XMLHttpRequest', 'Cookie': 'BAIDUID=BEA2658FC962DF6CA0C053E5690C1934:FG=1; locale=zh; REALTIME_TRANS_SWITCH=1; FANYI_WORD_SWITCH=1; HISTORY_SWITCH=1; SOUND_SPD_SWITCH=1; SOUND_PREFER_SWITCH=1; Hm_lvt_64ecd82404c51e03dc91cb9e8c025574=1540531940; Hm_lpvt_64ecd82404c51e03dc91cb9e8c025574=1540531984; from_lang_often=%5B%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%2C%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%5D; to_lang_often=%5B%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%2C%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%5D'}Cookie = 'REALTIME_TRANS_SWITCH=1; FANYI_WORD_SWITCH=1; HISTORY_SWITCH=1; SOUND_SPD_SWITCH=1; SOUND_PREFER_SWITCH=1; Hm_lvt_64ecd82404c51e03dc91cb9e8c025574=1540531940; Hm_lpvt_64ecd82404c51e03dc91cb9e8c025574=1540531984; from_lang_often=%5B%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%2C%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%5D; to_lang_often=%5B%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%2C%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%5D'class fanYi:def __init__(self):self._session = requests.session()self._data = {'from': 'en','to': 'zh','query': '','transtype': 'realtime','simple_means_flag': '3','sign': '','token': ''}def _set_words(self, words):self._words = wordsdef _get_token(self):response = self._session.get(URL, headers=HEADERS2)html = response.textli = re.search(r"<script>\s*window\[\'common\'\] = ([\s\S]*?)</script>", html)token = re.search(r"token: \'([a-zA-Z0-9]+)\',", li.group(1))self._data['token'] = token.group(1)def _get_sign(self):with open('baidufanyi.js') as f:js = f.read()sign = execjs.compile(js).call('e', self._words)self._data['sign'] = signdef _translate(self):self._get_token()self._get_sign()self._data['query'] = self._words# string = ''# cookie = self._session.cookies.get_dict()# for key in cookie:#     string += key + '=' + cookie[key] + '; '# # response = self._session.post(REALTRANSLATE_API, data=self._data, headers=HEADERS)# HEADERS['Cookie'] = string + Cookie# print(HEADERS)response = self._session.post(TRANSLATE_API, data=self._data, headers=HEADERS2)Dict = json.loads(response.content.decode('utf-8'))print(Dict['trans_result']['data'][0]['dst'])if __name__ == "__main__":fanyi = fanYi()while True:fanyi._set_words(input())fanyi._translate()
  • baidufanyi.js
var i = "320305.131321201"function a(r){if(Array.isArray(r)){for(var o=0,t=Array(r.length);o<r.length;o++)t[o]=r[o];
return t}return Array.from(r)}function n(r,o){for(var t=0;t<o.length-2;t+=3){var a=o.charAt(t+2);a=a>="a"?a.charCodeAt(0)-87:Number(a),a="+"===o.charAt(t+1)?r>>>a:r<<a,r="+"===o.charAt(t)?r+a&4294967295:r^a
}return r}function e(r) {var o = r.match(/[\uD800-\uDBFF][\uDC00-\uDFFF]/g);if (null === o) {var t = r.length;t > 30 && (r = "" + r.substr(0, 10) + r.substr(Math.floor(t / 2) - 5, 10) + r.substr( - 10, 10))} else {for (var e = r.split(/[\uD800-\uDBFF][\uDC00-\uDFFF]/), C = 0, h = e.length, f = []; h > C; C++)"" !== e[C] && f.push.apply(f, a(e[C].split(""))),C !== h - 1 && f.push(o[C]);var g = f.length;g > 30 && (r = f.slice(0, 10).join("") + f.slice(Math.floor(g / 2) - 5, Math.floor(g / 2) + 5).join("") + f.slice( - 10).join(""))}var u = void 0,l = "" + String.fromCharCode(103) + String.fromCharCode(116) + String.fromCharCode(107);u = null !== i ? i: (i = window[l] || "") || "";for (var d = u.split("."), m = Number(d[0]) || 0, s = Number(d[1]) || 0, S = [], c = 0, v = 0; v < r.length; v++) {var A = r.charCodeAt(v);128 > A ? S[c++] = A: (2048 > A ? S[c++] = A >> 6 | 192 : (55296 === (64512 & A) && v + 1 < r.length && 56320 === (64512 & r.charCodeAt(v + 1)) ? (A = 65536 + ((1023 & A) << 10) + (1023 & r.charCodeAt(++v)), S[c++] = A >> 18 | 240, S[c++] = A >> 12 & 63 | 128) : S[c++] = A >> 12 | 224, S[c++] = A >> 6 & 63 | 128), S[c++] = 63 & A | 128)}for (var p = m,F = "" + String.fromCharCode(43) + String.fromCharCode(45) + String.fromCharCode(97) + ("" + String.fromCharCode(94) + String.fromCharCode(43) + String.fromCharCode(54)), D = "" + String.fromCharCode(43) + String.fromCharCode(45) + String.fromCharCode(51) + ("" + String.fromCharCode(94) + String.fromCharCode(43) + String.fromCharCode(98)) + ("" + String.fromCharCode(43) + String.fromCharCode(45) + String.fromCharCode(102)), b = 0; b < S.length; b++) p += S[b],p = n(p, F);return p = n(p, D),p ^= s,0 > p && (p = (2147483647 & p) + 2147483648),p %= 1e6,p.toString() + "." + (p ^ m)}

有道翻译

单纯翻译:

__author__ = 'hugowen'
# -*- coding:utf-8 -*-
'''
[33]python Web 框架:Tornado
https://blog.csdn.net/xc_zhou/article/details/80637714
https://pypi.org/project/tornado/
'''from bs4 import BeautifulSoup
import tornado.httpclientdef is_chinese(uchar):if uchar >= u'\u4e00' and uchar <= u'\u9fa5':return Trueelse:return Falseif __name__ == "__main__":cli = tornado.httpclient.HTTPClient()link = 'http://dict.youdao.com/search?q='search = input('search: ')link += search# print(link)data = cli.fetch(link)body = data.bodysoup = BeautifulSoup(body)group = soup.find_all(class_ = 'trans-container')if is_chinese(search):content = group[0].find('ul').find('p')print(content.find_all('span')[0].get_text())for ele in content.find_all(class_ = 'contentTitle'):print(ele.find('a').get_text())else:content = group[0].find('ul').find_all('li')for ele in content:print(ele.get_text())

翻译并记录翻译日志: 输出到 words.md 文件中

__author__ = 'hugowen'
# -*- coding:utf-8 -*-
'''
[33]python Web 框架:Tornado
https://blog.csdn.net/xc_zhou/article/details/80637714
https://pypi.org/project/tornado/
'''from bs4 import BeautifulSoup
import tornado.httpclientdef is_chinese(uchar):if uchar >= u'\u4e00' and uchar <= u'\u9fa5':return Trueelse:return Falsedef translate(search):cli = tornado.httpclient.HTTPClient()link = 'http://dict.youdao.com/search?q='link += searchdata = cli.fetch(link)body = data.bodysoup = BeautifulSoup(body, "html.parser")group = soup.find_all(class_ = 'trans-container')result = []if is_chinese(search):content = group[0].find('ul').find('p')print(content.find_all('span')[0].get_text())for ele in content.find_all(class_ = 'contentTitle'):result.append(ele.find('a').get_text())else:content = group[0].find('ul').find_all('li')for ele in content:result.append(ele.get_text())return resultif __name__ == "__main__":while True:search = input('search: ')if search in ['Q', 'q']:breakresult = translate(search)with open('words.md', 'a') as f:f.write('###  ▌' + search + '\n')for r in result:f.write('- [ ] ' + r +'\n')print('▌> ' + r)

python 百度翻译 有道翻译相关推荐

  1. Python 打造基于有道翻译的命令行翻译工具(命令行爱好者必备)

    Python 打造基于有道翻译的命令行翻译工具(命令行爱好者必备) 之前基于百度写了一篇博文 Python 打造基于百度翻译的命令行翻译工具,本来这工具用得挺好的.但是没想到,近期处于不知道啥原因,不 ...

  2. python实战爬虫有道翻译与解决{“errorCode”:50}反爬虫问题

    title: python实战爬虫有道翻译与解决有道翻译反爬虫问题 date: 2020-03-22 20:21:12 description: 最近在学Python,一直没有尝试过实战.于是昨天试了 ...

  3. python有道翻译接口-Python通过调用有道翻译api实现翻译功能示例

    本文实例讲述了Python通过调用有道翻译api实现翻译功能.分享给大家供大家参考,具体如下: 通过调用有道翻译的api,实现中译英.其他语言译中文 Python代码: # coding=utf-8 ...

  4. python爬虫模拟有道翻译

    python爬虫模拟有道翻译 案例目的: 通过模拟有道翻译,介绍携带form表单发送post请求以及破解form表单中的加密数据. 案例实现功能: 模拟有道翻译,实现中英互译. 爬取过程分析: 找到目 ...

  5. 基于python爬取有道翻译,并在线翻译

    基于python爬取有道翻译,并在线翻译 由于我也是爬虫新学者,有什么做的不对的请多加包涵 我们需要使用的库如下 from urllib import request import urllib im ...

  6. Python爬虫破解有道翻译

    有道翻译是以异步方式实现数据加载的,要实现对此类网站的数据抓取,其过程相对繁琐,本节我以有道翻译为例进行详细讲解. 通过控制台抓包,我们得知了 POST 请求的参数以及相应的参数值,如下所示: 图1: ...

  7. Python 编写一个有道翻译的 workflow 教程

    最近使用有道翻译的 workflow 总是翻译不了,可能是 appKey 失效了或者超过调用上限,所以打算自己实现一个. 创建 workflow 打开 Alfred3 的 Preferences,选择 ...

  8. 教你用Python爬虫自制有道翻译词典

    Python爬虫能够实现的功能有很多,就看你如何去使用它了.今天小千就来教大家如何去利用Python爬虫自制一个有道翻译词典. 首先打开有道翻译页面,尝试输入hello,就出现翻译了,可以自动翻译.有 ...

  9. python如何爬有道翻译_Python网络爬虫(八) - 利用有道词典实现一个简单翻译程序...

    目录: 1.爬虫前的分析 因为要实现有道翻译的翻译功能,就需要找到它的接口,打开审查元素,来到网络监听窗口(Network),查看API接口. 我们可以找到有道翻译的API接口,同时是以Post方式提 ...

最新文章

  1. NSString属性什么时候用copy,什么时候用strong?
  2. 苹果后门、微软垄断与Linux缺位
  3. python【力扣LeetCode算法题库】20- 有效的括号(辅助栈)
  4. kpc v0.8.3发布,跨框架的组件库解决方案
  5. NYOJ 904 search 二分查找
  6. linux tty core code,linux tty core 源码分析(7)
  7. java中如何对汉字进行排序?
  8. 从github上克隆hibernate项目
  9. 至强cpu型号列表_装机必看——CPU型号参数详解
  10. 如何让DataGrid能够在客户端点击某一行的任意位置,而触发服务端的事件(Asp.net)...
  11. 书单丨把握Java技术发展的新趋势!
  12. flink sink jdbc没有数据_一套 SQL 搞定数据仓库?Flink 有了新尝试
  13. Newcoder lxh裁木棍 (不开long double见祖宗 ceil前不加long long也去
  14. linux中如何查看本机ip,Linux中如何查看本机IP地址呢?
  15. 实验四——反汇编工具的使用
  16. JS-part12.3-ES6- 箭头函数 / 函数的参数默认值 / 模板字符串 / 点点点运算符 / 解构赋值 / 对象的简写形式
  17. 初步了解FPGA中的HLS
  18. 网站使用第三方Steam帐号登录[OpenID]
  19. 软件项目管理 3.4.增量生存期模型
  20. 线性回归模型的度量参数1- SST SSR SSE R-Squared

热门文章

  1. 2022年GDCPC广东省大学生程序设计竞赛题解
  2. ssh密钥验证失败,是否启动ssh密钥代理
  3. 华为接近获准为伦敦地铁供应移动搜集
  4. 华为服务器修改登录密码,如何修改服务器的登录密码
  5. EEPROM的学习和使用方法
  6. Microsoft 加入HPC战团
  7. iOS开发-------- iPhone X代码适配 (齐刘海处理)
  8. jmeter-1-apache ant-集成测试
  9. Android managedQuery查询如果加入group by条件(及其猥琐的方法)
  10. x265源码分析:main函数及CLIOptions结构体解释