fund_alerts.py 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159
  1. from bs4 import BeautifulSoup
  2. from urllib2 import urlopen
  3. from time import strftime
  4. import time
  5. def send_email(user, pwd, recipient, subject, body):
  6. import smtplib
  7. gmail_user = user
  8. gmail_pwd = pwd
  9. FROM = user
  10. TO = recipient if type(recipient) is list else [recipient]
  11. SUBJECT = subject
  12. TEXT = body
  13. # Prepare actual message
  14. message = """\From: %s\nTo: %s\nSubject: %s\n\n%s
  15. """ % (FROM, ", ".join(TO), SUBJECT, TEXT)
  16. try:
  17. server = smtplib.SMTP("smtp.gmail.com", 587)
  18. server.ehlo()
  19. server.starttls()
  20. server.login(gmail_user, gmail_pwd)
  21. server.sendmail(FROM, TO, message)
  22. server.close()
  23. print 'successfully sent the mail'
  24. except:
  25. print "failed to send mail"
  26. def allianz():
  27. try:
  28. url = 'http://www.allianzgi.hk/en/retail/fund-prices?type=&sort=&order=&page_no=&action=&fund_series=3&fund_name=74&fund_class=SHARE_CLASS_RETAIL_CLASS_AM&fund_currency=CCY_DISPLAY_H2_AUD&fund_name_text='
  29. html = urlopen(url).read()
  30. soup = BeautifulSoup(html, 'html5lib')
  31. lookups = {'unit_price': 'Unit Price','mth_nav': '12 month NAV', 'daily_change_pct': 'daily chg%','valuation_date': 'valuation date'}
  32. tvs= soup.findAll('td', [k for k in lookups.keys()])
  33. fund = soup.findAll('td', 'fund_name')[0].a.text
  34. purchase_px = 10.07
  35. curr_px = float(soup.find('td', 'unit_price').text)
  36. percent_chg = (curr_px - purchase_px) / purchase_px * 100
  37. s= '*****************************************\n'
  38. s+= ' %s\n' % fund
  39. s+= '\n'
  40. s+= ', '.join('%s: %s' % (lookups[e.get('class') [0]] if e.get('class')[0] in lookups else 'Daily Chg%', e.text) for e in tvs)
  41. s+= '\n\n'
  42. s+= ' purchase price = %0.2f (%0.2f%%)\n' % (purchase_px, percent_chg)
  43. s+= '\n'
  44. s+= '\n'
  45. s+= ' %s\n' % url
  46. s+= '\n'
  47. s+= '*****************************************\n'
  48. return s
  49. url = 'http://finance.sina.com.cn/fund/quotes/000011/bc.shtml'
  50. html = urlopen(url).read()
  51. soup = BeautifulSoup(html, 'html5lib')
  52. # lookups = {'unit_price': 'Unit Price','mth_nav': '12 month NAV', 'daily_change_pct': 'daily chg%','valuation_date': 'valuation date'}
  53. # fundname = soup.findAll('div', {'class': 'top_fixed_fund_name'})
  54. # funddiv = soup.findAll('div', {'class': 'top_fixed_fund_dwjz'})
  55. blk = soup.findAll('div', {'class': 'fund_info_blk2'})
  56. fundpx = blk[0].findAll('div', {'class': 'fund_data_item'})[0].find('span', {'class':'fund_data'}).text
  57. except:
  58. return 'error extracting allianz fund price'
  59. return '%s' % (fundpx)
  60. def cn_huaxia():
  61. try:
  62. url = 'http://finance.sina.com.cn/fund/quotes/000011/bc.shtml'
  63. html = urlopen(url).read()
  64. soup = BeautifulSoup(html, 'html5lib')
  65. # lookups = {'unit_price': 'Unit Price','mth_nav': '12 month NAV', 'daily_change_pct': 'daily chg%','valuation_date': 'valuation date'}
  66. # fundname = soup.findAll('div', {'class': 'top_fixed_fund_name'})
  67. # funddiv = soup.findAll('div', {'class': 'top_fixed_fund_dwjz'})
  68. blk = soup.findAll('div', {'class': 'fund_info_blk2'})
  69. fundpx = blk[0].findAll('div', {'class': 'fund_data_item'})[0].find('span', {'class':'fund_data'}).text
  70. return 'huaxia %s' % (fundpx)
  71. except:
  72. return 'error extracting cn fund price'
  73. def bct_funds():
  74. try:
  75. url = 'http://www.bcthk.com/BCT/html/eng/page/WMP0240/FIF0100/fund.jsp'
  76. html = urlopen(url).read()
  77. soup = BeautifulSoup(html, 'html5lib')
  78. lookups = ['BCT (Pro) Absolute Return Fund', 'BCT (Pro) Global Bond Fund', 'BCT (Pro) Hong Kong Dollar Bond Fund', 'BCT (Pro) MPF Conservative Fund']
  79. # for e in soup.findAll('a', 'green03'):
  80. # print e.text
  81. anchors= filter(lambda x: x.text[:(x.text.find('Fund')+4)] in lookups, soup.findAll('a', 'green03'))
  82. def fundinfo_extract(felem):
  83. node = felem.parent.parent.findAll('td')
  84. e = {}
  85. e['name'] = felem.text[:(felem.text.find('Fund')+4)]
  86. e['bid'] = node[2].text
  87. e['ask'] = node[3].text
  88. return e
  89. finfo= map(fundinfo_extract, anchors)
  90. return ''.join('%s:%s\n' % (e['name'], e['ask']) for e in finfo)
  91. except:
  92. return 'error extracting bct fund price'
  93. def send_daily_alert():
  94. user='cigarbar@gmail.com'
  95. pwd='taipeii0i'
  96. recipient='larry1chan@gmail.com'
  97. body = '%s\n%s\n%s' % (allianz(), cn_huaxia(), bct_funds() )
  98. subject='Daily fund price'
  99. send_email(user, pwd, recipient, subject, body)
  100. def retrieve_hk_holidays(year):
  101. month_names = ['January',
  102. 'February',
  103. 'March',
  104. 'April',
  105. 'May',
  106. 'June',
  107. 'July',
  108. 'August',
  109. 'September',
  110. 'October',
  111. 'November',
  112. 'December',
  113. ]
  114. try:
  115. url = 'http://www.gov.hk/en/about/abouthk/holiday/{{year}}.htm'
  116. url = url.replace('{{year}}', str(year))
  117. html = urlopen(url).read()
  118. soup = BeautifulSoup(html, 'html5lib')
  119. tds = soup.findAll('h3')[0].parent.findAll('td', 'date')
  120. d1 = map(lambda x: (x.text.split(' ')[0], x.text.split(' ')[1]), tds[1:])
  121. return map(lambda x: strftime('%Y%m%d', time.strptime('%s %s %s' % (month_names.index(x[1])+1, x[0], '2015'), "%m %d %Y")), d1)
  122. except:
  123. print 'error'
  124. if __name__ == '__main__':
  125. #send_daily_alert()
  126. print retrieve_hk_holidays(2015)
  127. # print allianz()
  128. #
  129. # print cn_huaxia()
  130. # print bct_funds()