This is a personal record of my python programming experiences. It will allow me to see how I am progressing as I learn a new language.
Sunday, March 31, 2013
My first Server written in Python
This is my first successful server program written in Python. This helps me understand how servers and clients work. Firefox would not hook-up, but Chromium worked just fine. Typed "localhost:1088" in my browser and the sever sent a small html file back.
Saturday, March 30, 2013
I added a GUI to my webpage scraper
#!/usr/bin/python
#Appraiser Data scraper example.
#Written by Steve Atchison March 23 2013
import urllib2
from Tkinter import *
def getwebhtml():
#This gets the html text from the appraiser webpage using the parcel number entered by user: assines it to the_page
pid = pinnum.get()
req = urllib2.Request('http://www.snco.us/ap/R_prop/Comp.asp?PRCL_ID='+str(pid)+'&PRCL_CD=01&YEAR=2013')
response = urllib2.urlopen(req)
the_page = response.read()
startchar = the_page.find('PID1=')
#compareable pin
comp1=(the_page[int(startchar):int(startchar)+21])
#saleprice
saleprice=(the_page[int(startchar+88):int(startchar)+99])
#sale date
saledate=(the_page[int(startchar+63):int(startchar)+67])
txtbox.insert("1.0",comp1+'\n')
txtbox.insert("2.0",'Date: '+saledate+'\n')
txtbox.insert("3.0",'Sale price:'+saleprice+'\n\n')
startchar = the_page.find('PID2=')
#compareable pin
comp1=(the_page[int(startchar):int(startchar)+21])
#saleprice
saleprice=(the_page[int(startchar+88):int(startchar)+99])
#sale date
saledate=(the_page[int(startchar+63):int(startchar)+67])
txtbox.insert("1.0",comp1+'\n')
txtbox.insert("2.0",'Date: '+saledate+'\n')
txtbox.insert("3.0",'Sale price:'+saleprice+'\n\n')
startchar = the_page.find('PID3=')
#compareable pin
comp1=(the_page[int(startchar):int(startchar)+21])
#saleprice
saleprice=(the_page[int(startchar+88):int(startchar)+99])
#sale date
saledate=(the_page[int(startchar+63):int(startchar)+67])
txtbox.insert("1.0",comp1+'\n')
txtbox.insert("2.0",'Date: '+saledate+'\n')
txtbox.insert("3.0",'Sale price:'+saleprice+'\n\n')
startchar = the_page.find('PID4=')
#compareable pin
comp1=(the_page[int(startchar):int(startchar)+21])
#saleprice
saleprice=(the_page[int(startchar+88):int(startchar)+99])
#sale date
saledate=(the_page[int(startchar+63):int(startchar)+67])
txtbox.insert("1.0",comp1+'\n')
txtbox.insert("2.0",'Date: '+saledate+'\n')
txtbox.insert("3.0",'Sale price:'+saleprice+'\n\n')
startchar = the_page.find('PID5=')
#compareable pin
comp1=(the_page[int(startchar):int(startchar)+21])
#saleprice
saleprice=(the_page[int(startchar+88):int(startchar)+99])
#sale date
saledate=(the_page[int(startchar+63):int(startchar)+67])
txtbox.insert("1.0",comp1+'\n')
txtbox.insert("2.0",'Date: '+saledate+'\n')
txtbox.insert("3.0",'Sale price:'+saleprice+'\n\n')
def cleartext():
txtbox.delete("1.0", END)
pinnum.delete(0,END)
root = Tk()
btn = Button(root, text = "Get Comparables", command = getwebhtml)
btn.pack()
btn2=Button(root, text="Clear", command=cleartext)
btn2.pack()
pinnum=Entry(root)
pinnum.pack()
txtbox = Text(root)
txtbox.pack()
root.mainloop()
Eric Python Editor
I tried a new python editor called Eric, and I like it a lot. Eric has lots of cool features like code folding, smart indent, and it can export my code to several formats including pdf.
Unfortunately, my netbook does not have a big enough screen for it, but it is great on a larger display. I think I have stepped up to the next level of python IDE, but I still like good old idle too.
Sunday, March 24, 2013
My new and improved Webpage scraper
#!/usr/bin/python
#Appraiser Data scraper
#Written by Steve Atchison March 23 2013
import urllib2
pid = raw_input('Enter parcel number: ')
#This gets the html text from the appraiser webpage using the parcel number entered by user.
req = urllib2.Request('http://www.snco.us/ap/R_prop/Comp.asp?PRCL_ID='+str(pid)+'&PRCL_CD=01&YEAR=2013')
response = urllib2.urlopen(req)
the_page = response.read()
#erase all data in scrape_data.txt file
f2=open("scrape_data.txt",'w')
f2.close()
def getmoredata(start):
#This function grabs sales and date from appraiser webpage and saves it to scrape_data.txt
print('--------------------------------')
#Compare PIN
thepin=((the_page[int(start+5):int(start)+21]))
print(thepin)
#Actual sale price
saleprice=(the_page[int(start+88):int(start)+99])
print('Actual sales price = %s') % saleprice
#sale date
saledate=(the_page[int(start+63):int(start)+67])
print('Sale date = %s') % saledate
f2=open("scrape_data.txt",'a')
f2.write(thepin +' ')
f2.write(saleprice +' ')
f2.write(saledate + '\n')
f2.close()
#Finds the begining point of each parcel number in the html file,
#and then calls the getmoredata function
startchar = the_page.find('PID1=')
getmoredata(startchar)
startchar = the_page.find('PID2=')
getmoredata(startchar)
startchar = the_page.find('PID3=')
getmoredata(startchar)
startchar = the_page.find('PID4=')
getmoredata(startchar)
startchar = the_page.find('PID5=')
getmoredata(startchar)
#Appraiser Data scraper
#Written by Steve Atchison March 23 2013
import urllib2
pid = raw_input('Enter parcel number: ')
#This gets the html text from the appraiser webpage using the parcel number entered by user.
req = urllib2.Request('http://www.snco.us/ap/R_prop/Comp.asp?PRCL_ID='+str(pid)+'&PRCL_CD=01&YEAR=2013')
response = urllib2.urlopen(req)
the_page = response.read()
#erase all data in scrape_data.txt file
f2=open("scrape_data.txt",'w')
f2.close()
def getmoredata(start):
#This function grabs sales and date from appraiser webpage and saves it to scrape_data.txt
print('--------------------------------')
#Compare PIN
thepin=((the_page[int(start+5):int(start)+21]))
print(thepin)
#Actual sale price
saleprice=(the_page[int(start+88):int(start)+99])
print('Actual sales price = %s') % saleprice
#sale date
saledate=(the_page[int(start+63):int(start)+67])
print('Sale date = %s') % saledate
f2=open("scrape_data.txt",'a')
f2.write(thepin +' ')
f2.write(saleprice +' ')
f2.write(saledate + '\n')
f2.close()
#Finds the begining point of each parcel number in the html file,
#and then calls the getmoredata function
startchar = the_page.find('PID1=')
getmoredata(startchar)
startchar = the_page.find('PID2=')
getmoredata(startchar)
startchar = the_page.find('PID3=')
getmoredata(startchar)
startchar = the_page.find('PID4=')
getmoredata(startchar)
startchar = the_page.find('PID5=')
getmoredata(startchar)
Friday, March 22, 2013
My web scraper program, Works Great!
#Appraiser Data scraper example.
#Written by Steve Atchisn 3/22/2013
import urllib2
import arcpy
mxd = arcpy.mapping.MapDocument('CURRENT')
df = arcpy.mapping.ListDataFrames(mxd)[0]
def getsalesdate():
#get parcel number
parnum1=(the_page[int(startchar+5):int(startchar)+21])
print(parnum1)
#Actual sale price
saleprice=(the_page[int(startchar+88):int(startchar)+99])
print('Actual sales price = %s') % saleprice
#get sale date
saledate=(the_page[int(startchar+63):int(startchar)+67])
print('Sale date = %s') % saledate
print('-------------------------------------------------')
for row in arcpy.SearchCursor("Owners"):
parnum = row.getValue('PIN')
req = urllib2.Request('http://www.snco.us/ap/R_prop/Comp.asp?PRCL_ID='+str(parnum)+'&PRCL_CD=01&YEAR=2013')
response = urllib2.urlopen(req)
the_page = response.read()
startchar = the_page.find('PID1=')
parnum1 =(the_page[int(startchar+5):int(startchar)+21])
getsalesdate()
startchar = the_page.find('PID2=')
parnum2 =(the_page[int(startchar+5):int(startchar)+21])
getsalesdate()
startchar = the_page.find('PID3=')
parnum3=(the_page[int(startchar+5):int(startchar)+21])
getsalesdate()
startchar = the_page.find('PID4=')
parnum4=(the_page[int(startchar+5):int(startchar)+21])
getsalesdate()
startchar = the_page.find('PID5=')
parnum5=(the_page[int(startchar+5):int(startchar)+21])
getsalesdate()
arcpy.SelectLayerByAttribute_management("Owners","NEW_SELECTION","PIN="+parnum1)
arcpy.SelectLayerByAttribute_management("Owners","ADD_TO_SELECTION","PIN="+parnum2)
arcpy.SelectLayerByAttribute_management("Owners","ADD_TO_SELECTION","PIN="+parnum3)
arcpy.SelectLayerByAttribute_management("Owners","ADD_TO_SELECTION","PIN="+parnum4)
arcpy.SelectLayerByAttribute_management("Owners","ADD_TO_SELECTION","PIN="+parnum5)
df.zoomToSelectedFeatures()
#Select a parcel from the Owner feature class before running this code!!
Wednesday, March 20, 2013
Subscribe to:
Posts (Atom)
Followers
Blog Archive
- June (1)
- March (3)
- February (1)
- July (3)
- May (1)
- April (1)
- March (1)
- January (1)
- December (1)
- November (4)
- July (3)
- May (2)
- April (1)
- March (6)
- February (2)
- December (1)
- November (1)
- September (3)
- August (3)
- July (1)
- March (3)
- January (1)
- December (5)
- November (2)
- October (1)
- September (2)
- August (1)
- July (4)
- May (3)
- February (1)
- January (1)
- December (1)
- November (2)
- October (2)
- September (9)
- August (2)
- May (1)
- April (1)
- March (6)
- February (5)
- January (3)
- August (2)