3030import dateutil .relativedelta as _relativedelta
3131import numpy as _np
3232import pandas as _pd
33- import pandas as pd
3433
3534from . import shared , utils
3635from .data import TickerData
@@ -159,8 +158,8 @@ def _get_1y_prices(self, fullDaysOnly=False):
159158 self ._md = self ._tkr .get_history_metadata ()
160159 try :
161160 ctp = self ._md ["currentTradingPeriod" ]
162- self ._today_open = pd .to_datetime (ctp ["regular" ]["start" ], unit = "s" , utc = True ).tz_convert (self .timezone )
163- self ._today_close = pd .to_datetime (ctp ["regular" ]["end" ], unit = "s" , utc = True ).tz_convert (self .timezone )
161+ self ._today_open = _pd .to_datetime (ctp ["regular" ]["start" ], unit = "s" , utc = True ).tz_convert (self .timezone )
162+ self ._today_close = _pd .to_datetime (ctp ["regular" ]["end" ], unit = "s" , utc = True ).tz_convert (self .timezone )
164163 self ._today_midnight = self ._today_close .ceil ("D" )
165164 except Exception :
166165 self ._today_open = None
@@ -171,7 +170,7 @@ def _get_1y_prices(self, fullDaysOnly=False):
171170 if self ._prices_1y .empty :
172171 return self ._prices_1y
173172
174- dnow = pd .Timestamp .utcnow ().tz_convert (self .timezone ).date ()
173+ dnow = _pd .Timestamp .utcnow ().tz_convert (self .timezone ).date ()
175174 d1 = dnow
176175 d0 = (d1 + _datetime .timedelta (days = 1 )) - utils ._interval_to_timedelta ("1y" )
177176 if fullDaysOnly and self ._exchange_open_now ():
@@ -202,7 +201,7 @@ def _get_exchange_metadata(self):
202201 return self ._md
203202
204203 def _exchange_open_now (self ):
205- t = pd .Timestamp .utcnow ()
204+ t = _pd .Timestamp .utcnow ()
206205 self ._get_exchange_metadata ()
207206
208207 # if self._today_open is None and self._today_close is None:
@@ -267,12 +266,12 @@ def shares(self):
267266 if self ._shares is not None :
268267 return self ._shares
269268
270- shares = self ._tkr .get_shares_full (start = pd .Timestamp .utcnow ().date () - pd .Timedelta (days = 548 ))
269+ shares = self ._tkr .get_shares_full (start = _pd .Timestamp .utcnow ().date () - _pd .Timedelta (days = 548 ))
271270 if shares is None :
272271 # Requesting 18 months failed, so fallback to shares which should include last year
273272 shares = self ._tkr .get_shares ()
274273 if shares is not None :
275- if isinstance (shares , pd .DataFrame ):
274+ if isinstance (shares , _pd .DataFrame ):
276275 shares = shares [shares .columns [0 ]]
277276 self ._shares = int (shares .iloc [- 1 ])
278277 return self ._shares
@@ -574,7 +573,7 @@ def history(
574573 timeout = 10 ,
575574 debug = True ,
576575 raise_errors = False ,
577- ) -> pd .DataFrame :
576+ ) -> _pd .DataFrame :
578577 """
579578 :Parameters:
580579 period : str
@@ -621,22 +620,20 @@ def history(
621620 If True, then raise errors as
622621 exceptions instead of printing to console.
623622 """
624-
623+ tz = self ._get_ticker_tz (debug , proxy , timeout )
624+ if tz is None :
625+ # Every valid ticker has a timezone. Missing = problem
626+ err_msg = "No timezone found, symbol may be delisted"
627+ shared ._DFS [self .ticker ] = utils .empty_df ()
628+ shared ._ERRORS [self .ticker ] = err_msg
629+ if debug :
630+ if raise_errors :
631+ raise Exception ("%s: %s" % (self .ticker , err_msg ))
632+ else :
633+ print ("- %s: %s" % (self .ticker , err_msg ))
634+ return utils .empty_df ()
625635 if start or period is None or period .lower () == "max" :
626636 # Check can get TZ. Fail => probably delisted
627- tz = self ._get_ticker_tz (debug , proxy , timeout )
628- if tz is None :
629- # Every valid ticker has a timezone. Missing = problem
630- err_msg = "No timezone found, symbol may be delisted"
631- shared ._DFS [self .ticker ] = utils .empty_df ()
632- shared ._ERRORS [self .ticker ] = err_msg
633- if debug :
634- if raise_errors :
635- raise Exception ("%s: %s" % (self .ticker , err_msg ))
636- else :
637- print ("- %s: %s" % (self .ticker , err_msg ))
638- return utils .empty_df ()
639-
640637 if end is None :
641638 end = int (_time .time ())
642639 else :
@@ -792,30 +789,28 @@ def history(
792789 if not expect_capital_gains :
793790 capital_gains = None
794791
792+ if splits is not None :
793+ splits = utils .set_df_tz (splits , interval , tz_exchange )
794+ if dividends is not None :
795+ dividends = utils .set_df_tz (dividends , interval , tz_exchange )
796+ if capital_gains is not None :
797+ capital_gains = utils .set_df_tz (capital_gains , interval , tz_exchange )
795798 if start is not None :
796- # Note: use pandas Timestamp as datetime.utcfromtimestamp has bugs on windows
797- # https://github.com/python/cpython/issues/81708
798- startDt = _pd .Timestamp (start , unit = "s" )
799+ startDt = quotes .index [0 ].floor ('D' )
799800 if dividends is not None :
800- dividends = dividends [ dividends . index >= startDt ]
801+ dividends = dividends . loc [ startDt : ]
801802 if capital_gains is not None :
802- capital_gains = capital_gains [ capital_gains . index >= startDt ]
803+ capital_gains = capital_gains . loc [ startDt : ]
803804 if splits is not None :
804- splits = splits [ splits . index >= startDt ]
805+ splits = splits . loc [ startDt : ]
805806 if end is not None :
806- endDt = _pd .Timestamp (end , unit = "s" )
807+ endDt = _pd .Timestamp (end , unit = 's' ). tz_localize ( tz )
807808 if dividends is not None :
808809 dividends = dividends [dividends .index < endDt ]
809810 if capital_gains is not None :
810811 capital_gains = capital_gains [capital_gains .index < endDt ]
811812 if splits is not None :
812813 splits = splits [splits .index < endDt ]
813- if splits is not None :
814- splits = utils .set_df_tz (splits , interval , tz_exchange )
815- if dividends is not None :
816- dividends = utils .set_df_tz (dividends , interval , tz_exchange )
817- if capital_gains is not None :
818- capital_gains = utils .set_df_tz (capital_gains , interval , tz_exchange )
819814
820815 # Prepare for combine
821816 intraday = params ["interval" ][- 1 ] in ("m" , "h" )
@@ -964,7 +959,7 @@ def _reconstruct_intervals_batch(self, df, interval, prepost, tag=-1.0, silent=F
964959 return df
965960
966961 dts_to_repair = df .index [f_repair_rows ]
967- indices_to_repair = _np .where (f_repair_rows )[0 ]
962+ # indices_to_repair = _np.where(f_repair_rows)[0]
968963
969964 if len (dts_to_repair ) == 0 :
970965 if debug :
@@ -978,9 +973,9 @@ def _reconstruct_intervals_batch(self, df, interval, prepost, tag=-1.0, silent=F
978973
979974 # Group nearby NaN-intervals together to reduce number of Yahoo fetches
980975 dts_groups = [[dts_to_repair [0 ]]]
981- last_dt = dts_to_repair [0 ]
982- last_ind = indices_to_repair [0 ]
983- td = utils ._interval_to_timedelta (interval )
976+ # last_dt = dts_to_repair[0]
977+ # last_ind = indices_to_repair[0]
978+ # td = utils._interval_to_timedelta(interval)
984979 # Note on setting max size: have to allow space for adding good data
985980 if sub_interval == "1mo" :
986981 grp_max_size = _relativedelta .relativedelta (years = 2 )
@@ -1091,7 +1086,7 @@ def _reconstruct_intervals_batch(self, df, interval, prepost, tag=-1.0, silent=F
10911086 df_fine ["Week Start" ] = df_fine .index .tz_localize (None ).to_period ("W-" + week_end_day ).start_time
10921087 grp_col = "Week Start"
10931088 elif interval == "1d" :
1094- df_fine ["Day Start" ] = pd .to_datetime (df_fine .index .date )
1089+ df_fine ["Day Start" ] = _pd .to_datetime (df_fine .index .date )
10951090 grp_col = "Day Start"
10961091 else :
10971092 df_fine .loc [df_fine .index .isin (df_block .index ), "ctr" ] = 1
@@ -1355,7 +1350,7 @@ def _fix_zeroes(self, df, interval, tz_exchange, prepost, silent=False):
13551350 df2_reserve = None
13561351 if intraday :
13571352 # Ignore days with >50% intervals containing NaNs
1358- df_nans = pd .DataFrame (f_prices_bad .any (axis = 1 ), columns = ["nan" ])
1353+ df_nans = _pd .DataFrame (f_prices_bad .any (axis = 1 ), columns = ["nan" ])
13591354 df_nans ["_date" ] = df_nans .index .date
13601355 grp = df_nans .groupby ("_date" )
13611356 nan_pct = grp .sum () / grp .count ()
@@ -1876,7 +1871,7 @@ def get_news(self, proxy=None):
18761871 self ._news = data .get ("news" , [])
18771872 return self ._news
18781873
1879- def get_earnings_dates (self , limit = 12 , proxy = None ) -> Optional [pd .DataFrame ]:
1874+ def get_earnings_dates (self , limit = 12 , proxy = None ) -> Optional [_pd .DataFrame ]:
18801875 """
18811876 Get earning dates (future and historic)
18821877 :param limit: max amount of upcoming and recent earnings dates to return.
@@ -1989,7 +1984,7 @@ def get_query1_info(self) -> dict:
19891984 elif isinstance (v , str ):
19901985 query1_info [k ] = v .replace ("\xa0 " , " " )
19911986 if "firstTradeDateMilliseconds" in query1_info :
1992- query1_info ["firstTradeDateMilliseconds" ] = pd .Timestamp (
1987+ query1_info ["firstTradeDateMilliseconds" ] = _pd .Timestamp (
19931988 query1_info ["firstTradeDateMilliseconds" ], unit = "ms" , tz = "UTC"
19941989 ).tz_convert (query1_info .get ("exchangeTimezoneName" , "UTC" ))
19951990 self ._query1_info = query1_info
0 commit comments