test4.py 1.8 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364
  1. import ystockquote
  2. import redis
  3. import json
  4. from ystockquote import Alphavantage
  5. def download_historical_px(code):
  6. rs = redis.Redis('localhost', 6379,0)
  7. av = Alphavantage()
  8. #d = av.time_series_daily_adjusted('AUDUSD=X', 'csv', 'full')
  9. d = av.time_series_daily_adjusted(code, 'csv', 'full')
  10. rs.set('ts-%s' % code, json.dumps(d))
  11. def load_historical(code):
  12. rs = redis.Redis('localhost', 6379,0)
  13. return json.loads(rs.get('ts-%s' % code))
  14. def get_daily_percent_change(ts):
  15. ts = map(lambda x: (x[0], float(x[4])), filter(lambda x: float(x[4]) <> 0.0, ts))
  16. ts0 = ts[:len(ts)-1]
  17. ts1 = ts[1:]
  18. print ts0
  19. print ts1
  20. def compute_day_change(i):
  21. # return a duple of (t+1 date, % change)
  22. # the series is sorted with the most recent items appearing first
  23. return (ts0[i][0], (ts0[i][1] - ts1[i][1]) / ts1[i][1])
  24. day_changes = map(compute_day_change, range(len(ts0)))
  25. return ', '.join('[new Date("%s"), %0.6f]' % (elem[0], elem[1]*100) for elem in day_changes)
  26. #return ', '.join('[new Date("%s"), %0.6f]' % (elem[0], (float(elem[4]) - float(elem[1])) / float(elem[4])) for elem in ts)
  27. def get_daily_close(ts):
  28. return ', '.join('[new Date("%s"), %s]' % (elem[0], float(elem[4])) for elem in ts)
  29. if __name__ == '__main__':
  30. #download_historical_px('000001.SS')
  31. # ts = load_historical('000001.SS')
  32. # ts = filter(lambda x:int(x[0][0:4]) >= 2017, ts[1:])
  33. # print get_daily_percent_change(ts)
  34. #download_historical_px('^HSI')
  35. #code = '^HSI'
  36. code = '000001.SS'
  37. download_historical_px(code)
  38. ts = load_historical(code)
  39. # only values greater than 2017, and skip the first header row
  40. ts = filter(lambda x:int(x[0][0:4]) >= 2015, ts[1:])
  41. print get_daily_close(ts)
  42. print get_daily_percent_change(ts)