import traceback
from pyrx import Ap, Ax, Db, Ed, Ge, Giimport requests
from bs4 import BeautifulSoup as bs
import pandas as pd
# sample source https://www.scraperapi.com/blog/python-loop-through-html-table/
def scraper9000():
url = "https://datatables.net/examples/styling/stripe.html"
response = requests.get(url)
soup = bs(response.text, "html.parser")
table = soup.find("table", class_="stripe")
data = {
'Name': [],
'Position': [],
'Office': [],
'age': [],
'Start date': [],
'salary': [],
}
for employee_data in table.find_all("tbody"):
for row in employee_data.find_all("tr"):
data['Name'].append(row.find_all("td")[0].text)
data['Position'].append(row.find_all("td")[1].text)
data['Office'].append(row.find_all("td")[2].text)
data['age'].append(row.find_all("td")[3].text)
data['Start date'].append(row.find_all("td")[4].text)
data['salary'].append(row.find_all("td")[5].text)
return data
def doit(): try:
db = Db.curDb()
df = pd.DataFrame(scraper9000())
table = Db.Table()
table.setDatabaseDefaults(db)
# add one for the title and header
table.setSize(df.shape[0]+2, df.shape[1])
#title
table.setTextString(0, 0, "Scrape Master 9000")
headers = df.columns.values.tolist()
datas = df.values.tolist()
for col, value in enumerate(headers):
table.setTextString(1, col, "{}".format(value))
for row, data in enumerate(datas):
for col, value in enumerate(data):
table.setTextString(row+2, col, "{}".format(value))
model = Db.BlockTableRecord(db.modelSpaceId(), Db.OpenMode.kForWrite)
model.appendAcDbEntity(table)
except Exception as err:
traceback.print_exception(err)