# -*- coding:utf-8 -*-from urllib.request import urlopenfrom bs4 import BeautifulSoup as bsimport refrom pymysql import cursorsimport pymysql#请求URL并把把结果用UTF-8编码resp=urlopen('https://en.wikipedia.org/wiki/Main_Page').read().decode('utf-8')#使用bs解析soup=bs(resp,'html.parser')listUrls=soup.findAll('a',href=re.compile('^/wiki/'))#获取所有以/wiki/开头的a标签的href属性for url in listUrls: if not re.search('\.(jpg|JPG)$',url['href']): #string只能获取一个 get_text()获取标签下所有的文字 print(url.get_text(),'< - - - >','https://en.wikipedia.org'+url['href'])#获取href属性 #获取数据库连接 conn = pymysql.Connect( host='localhost', port=3306, user='root', password='root', db='wikiurl', charset='utf8' ) try: with conn.cursor() as cur:#用with可以自动关闭cur.close sql="insert into `urls`(`urlname`,`urlhref`) values(%s,%s)" #执行sql语句 cur.execute(sql,(url.get_text(),'https://en.wikipedia.org'+url['href'])) #提交sql语句 conn.commit() finally: conn.close()