html = open('index.html','r',encoding='utf-8')
soup = bs4.BeautifulSoup(html,'html.perser')
li = soup.find_all('li')
print(li)
スクレイピング文字列だけ取り出す
import bs4
html = open('index.html','r',encoding='utf-8')
soup = bs4.BeautifulSoup(html,'html.perser')
list = []
for li in soup.find_all(li):
list.append(li.string)
print(li)
入れ子要素の取得
import bs4
html = open('index.html','r',encoding = 'utf-8')
soup = bs4.BeautifulSoup(html,'html.perser')
list = []
for ol in soup.find_all('ol'):
for li in ol.find_all('li'):
list.append(li.string)
print(list)
条件を指定して要素を取得する
import bs4
html = open('index.html','r',encoding = 'utf-8')
soup = bs4.BeautifulSoup(html,'html.perser')
list = []
for ol in soup.find_all('ol',class_ = 'dessert'):
for li in ol.find_all('li'):
list.append(li.string)
print(list)
URLにアクセスしてHTMLを取得する
import bs4
import requests #HTTP通信
import time
URL = 'http://www.otsuma.ac.jp/news'
r = requests.get(URL)
s = bs4.BeautifulSoup(r.content,'html.perser')
list = []
for i in s.find_all('span',class_ = 'sub'):
list.append(i.string)
time.sleep(3600) # 3600秒待機
print(list)
複数ページにアクセスする
import bs4
import requests #HTTP通信
import time
URL = 'http://example.com/news/page'
for count in range(1,10):
r = requests.get(URL+str(count))
s = bs4.BeautifulSoup(r.content,'html.perser')
list = []
for i in s.find_all('span',class_ = 'sub'):
list.append(i.string)
time.sleep(3600) # 3600秒待機
time.sleep(5)
print(list)
データベースのimport
基本パターン
import sqlite3
con = sqlite3.connect('data.db') #データベースに接続する
c = con.cursor() #Cursor帯ジェクトを作成する
sql = 'SELECT * FROM users'
c.execute(sql)
con.commit()
con.close()
while True:
r,w,x = select.select(sockes,[],[])
for s in r:
if s is svr:
c,addr = svr.accept()
socks.append(c)
else:
msg = s.recv(1024)
if len(msg) != 0:
print(str(msg,encoding='utf-8'))
s.send(msg)
else:
socks.remove(s)
s.close()
while True:
r,w,x = select.select(sockes,[],[])
for s in r:
if s is svr:
c,addr = svr.accept()
socks.append(c)
else:
msg = s.recv(1024)
txt = str(msg,encoding='utf-8')
if txt != 'bye':
for c in socks:
if c is not svr:
c.send(msg)
else:
print('切断要求')
s.send(msg)
socks.remove(s)
s.close()
a = np.array([1,2,3])
b = np.array([4,5,6])
print(a + b) # 5 7 9
print(a - b) # -3 -3 -3
print(a * 2) # 2 4 6
a / 2 0.5 1 1.5
a * b #4 10 18
a / b #0.25 0.4 0.5
加減、スカラー倍、乗除算
2列
a = np.array([[1,2],[3,4]])
b = np.array([[5,6],[7,8]])
cas = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_alt.xml')
faces = cas.detctMultiScale(img,minSize=(100,100))
動画の表示
import cv2
v = cv2.VideoCapture('ball.mp4')
while True:
ret,frame = v.read()
if ret == False: break
cv2.imshow('image',frame)
if cv2.waitKey(10)==27: break
v.release()
cv2.destroyAllWindows()
動物体検出
import cv2
back = cv2.imread('background.png')
back = cv2.cvtColor(back,cv2.COLOR_BGR2GRAY)
v = cv2.VideoCapture('ball.mp4')
while True:
ret,frame = v.read()
if ret == False: break
frame = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
diff = cv2.absdiff(frame,back)
cv2.imshow('image',frame)
if cv2.waitKey(10)==27: break
v.release()
cv2.destroyAllWindows()
機械学習ライブラリ
畳み込みニューラルネットワーク
pip install scikit-learn
pip install tensorflow
pip install keras