f = urllib2.urlopen(url)
req = f.read()
soup = BeautifulSoup(req)
content = soup.findAll(attrs={"name":"readonlycounter2"})
subId = content[0].string.split(',')[1]
subName = soup.html.body.h1.span.string
content = soup.findAll(attrs={"class":"subdes_td"})
subType = content[0].string
subLeg = content[1].string
content = soup.findAll(attrs={"colspan":"3"})
subTime = content[2].string
subFile = content[7].div.string