-
Notifications
You must be signed in to change notification settings - Fork 0
/
script.py
48 lines (29 loc) · 1.13 KB
/
script.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import requests
import pandas as pd
from bs4 import BeautifulSoup as bs
domain = 'https://ar.wikipedia.org'
url = "https://ar.wikipedia.org/wiki/%D9%82%D8%A7%D8%A6%D9%85%D8%A9_%D8%A3%D9%81%D8%B6%D9%84_%D9%85%D8%A6%D8%A9_%D8%B1%D9%88%D8%A7%D9%8A%D8%A9_%D8%B9%D8%B1%D8%A8%D9%8A%D8%A9"
response = requests.get(url)
soup = bs(response.content, features='html.parser')
table = soup.select('table.wikitable')[0]
columns = [i.get_text(strip=True) for i in table.find_all("th")]
columns += ["رابط الكتاب", "رابط المؤلف", "رابط البلد"]
data = []
for tr in table.find("tbody").find_all("tr"):
cells = []
tds = tr.find_all('td')
link=[]
for td in tds:
cells.append(td.get_text(strip=True))
if td.find('a'):
link.append(domain + td.find('a')['href'])
data.append(cells + link)
df = pd.DataFrame(data, columns=columns)
df.to_excel("data.xlsx", index=False)
dict_data = pd.read_excel("data.xlsx")
rec = dict_data.to_dict("index")
import json
final = json.dumps(rec, ensure_ascii=False).encode('utf8')
print(final)
# with open(r'test.txt', 'w') as fp:
# fp.write(str(rec))