diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d6efa7db..e93de2b2 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -16,13 +16,13 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest", "macos-latest", "windows-latest"] - python-version: ["3.10", "3.11", "3.12"] + python-version: ["3.11", "3.12"] numpy_ver: ["latest"] test_config: ["latest"] include: # NEP29 compliance settings - - python-version: "3.9" - numpy_ver: "1.23" + - python-version: "3.10" + numpy_ver: "1.24" os: ubuntu-latest test_config: "NEP29" # Operational compliance settings @@ -70,22 +70,25 @@ jobs: run: flake8 . --count --exit-zero --max-complexity=10 --statistics - name: Test with pytest - run: pytest + run: pytest --cov=pysatSpaceWeather --cov-report xml - - name: Publish results to coveralls - env: - GITHUB_TOKEN: ${{ secrets.github_token }} - COVERALLS_PARALLEL: true - run: coveralls --rcfile=pyproject.toml --service=github + - name: Coveralls Parallel + uses: coverallsapp/github-action@v2 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + flag-name: run=${{ join(matrix.*, '-') }} + parallel: true + format: cobertura + debug: true finish: name: Finish Coverage Analysis needs: build + if: ${{ always() }} runs-on: ubuntu-latest steps: - name: Coveralls Finished - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - pip install --upgrade coveralls - coveralls --service=github --finish + uses: coverallsapp/github-action@v2 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + parallel-finished: true diff --git a/.github/workflows/pysat_rc.yml b/.github/workflows/pysat_rc.yml index ec5b3174..a9afe6f1 100644 --- a/.github/workflows/pysat_rc.yml +++ b/.github/workflows/pysat_rc.yml @@ -12,7 +12,7 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest", "macos-latest", "windows-latest"] - python-version: ["3.11"] + python-version: ["3.12"] name: Python ${{ matrix.python-version }} on ${{ matrix.os }} runs-on: ${{ matrix.os }} @@ -35,9 +35,25 @@ jobs: python -c "import pysat; pysat.params['data_dirs'] = 'pysatData'" - name: Test with pytest - run: pytest + run: pytest --cov=pysatSpaceWeather --cov-report xml - - name: Publish results to coveralls - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: coveralls --rcfile=setup.cfg --service=github + - name: Coveralls Parallel + uses: coverallsapp/github-action@v2 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + flag-name: run=${{ join(matrix.*, '-') }} + parallel: true + format: cobertura + debug: true + + finish: + name: Finish Coverage Analysis + needs: build + if: ${{ always() }} + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: coverallsapp/github-action@v2 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + parallel-finished: true diff --git a/.zenodo.json b/.zenodo.json index 88e91af8..6c665aa8 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -24,7 +24,7 @@ "orcid": "0000-0001-8321-6074" }, { - "affiliation": "Stoneris LLC", + "affiliation": "Cosmic Studio", "name": "Stoneback, Russell", "orcid": "0000-0001-7216-4336" }, diff --git a/CHANGELOG.md b/CHANGELOG.md index e14b8991..69f53667 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,23 @@ Change Log All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/). +[0.2.0] - 2024-08-30 +-------------------- +* Enhancements + * Added an instrument for the daily Nobeyama Radio Polarimeters solar flux +* Maintenance + * Removed unneeded keyword arguments from Kp method functions + * Replaces `fillna` with `asfreq` to maintain the same behaviour + * Implemented `iloc` in pandas Series and DataFrame index access + * Added `verify=False` to GFZ requests + * Updated documentation links and fixed intersphinx mapping + * Replaced `utcnow` with `now` and the UTC timezone + * Removed support for deprecated Kp tag, '' + * Updated minimum supported pysat version to 3.2.0+. +* Bugs + * Fixed errors introduced by assuming files will be present when combining + F10.7 or Kp data in `combine_f107` and `combine_kp` functions. + [0.1.0] - 2024-02-16 -------------------- * Enhancements diff --git a/README.md b/README.md index 88f51490..921b6ad4 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ Python 3.7+. | Common modules | Community modules | | -------------- | ----------------- | -| netCDF4 | pysat >= 3.1.0 | +| netCDF4 | pysat >= 3.2.0 | | numpy | | | pandas | | | requests | | diff --git a/docs/conf.py b/docs/conf.py index e35e7b25..325a82a4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -65,7 +65,7 @@ author = ', '.join([auth['name'] for auth in zenodo['creators']]) description = 'Tools for space weather indices.' category = 'Space Physics' -copyright = ', '.join(['2022', author]) +copyright = ', '.join(['2024', author]) # The short X.Y version version = info.project['version'].base_version @@ -177,8 +177,9 @@ epub_exclude_files = ['search.html'] # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'https://docs.python.org/': None} +intersphinx_mapping = {'': ('https://docs.python.org/', None)} # Links to ignore when checking for stability linkcheck_ignore = ['https://lasp.colorado.edu/space_weather/dsttemerin/', - 'https://*QUERY'] + 'https://*QUERY', + 'https://datapub.gfz-potsdam.de/download/10.5880.Kp.0001/*'] diff --git a/docs/installation.rst b/docs/installation.rst index 35c0eda9..fba38944 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -24,7 +24,7 @@ Python 3.6 and 3.9+. ============== ================= Common modules Community modules ============== ================= - netCDF4 pysat >= 3.1.0 + netCDF4 pysat >= 3.2.0 numpy pandas requests diff --git a/docs/supported_instruments.rst b/docs/supported_instruments.rst index efb508a6..0e18db41 100644 --- a/docs/supported_instruments.rst +++ b/docs/supported_instruments.rst @@ -66,6 +66,30 @@ Supports ACE Solar Wind Electron Proton Alpha Monitor data. :members: +.. _norp-inst: +NoRP +---- + +The Nobeyama Radio Polarameters (NoRP) platform encompasses solar flux +measurements provided by the Japanese Solar Science Observatory. +`NoRP `_ provides +additional information and processing tools on their website. + +.. _norp-rf-inst: + +RF +^^^ + +RF is the radio flux measured from the sun at different wavelengths. This +provides a different measure of solar activity and has been corrected to be +in solar flux units at 1 AU. The daily data set currently starts in Nov 1951 +and is updated to extend to the current period, but not in real-time. + + +.. automodule:: pysatSpaceWeather.instruments.norp_rf + :members: + + .. _sw-inst: SW --- @@ -179,10 +203,9 @@ Dst ^^^ The Disturbance Storm Time (Dst) Index is a measure of magnetic activity -associated with the ring current. The National Geophysical Data Center (NGDC) -maintains the -`current database `_ from which -the historic Dst is downloaded. +associated with the ring current. The National Centers for Environmental +Information (NCEI), formerly the National Geophysical Data Center (NGDC), +maintains the current database from which the historic Dst is downloaded. `LASP `_ performs the calculates and provides the predicted Dst for the last 96 hours. You can learn more about the Dst Index at the diff --git a/pyproject.toml b/pyproject.toml index 9d75a5e0..a2b86624 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pysatSpaceWeather" -version = "0.1.0" +version = "0.2.0" description = 'pysat support for Space Weather Indices' readme = "README.md" requires-python = ">=3.6" @@ -22,7 +22,6 @@ classifiers = [ "Natural Language :: English", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -44,13 +43,12 @@ dependencies = [ "numpy", "packaging", "pandas", - "pysat>=3.1.0", + "pysat>=3.2.0", "requests", "xarray"] [project.optional-dependencies] test = [ - "coveralls < 3.3", "flake8", "flake8-docstrings", "hacking >= 1.0", @@ -65,7 +63,7 @@ doc = [ "numpydoc", "pyproject_parser", "sphinx", - "sphinx_rtd_theme >= 1.2.2,<2.0.0" + "sphinx_rtd_theme >= 1.2.2, < 2.0.0" ] [project.urls] @@ -75,7 +73,6 @@ Source = "https://github.com/pysat/pysatSpaceWeather" [tool.coverage.report] [tool.pytest.ini_options] -addopts = "--cov=pysatSpaceWeather" markers = [ "all_inst", "download", diff --git a/pysatSpaceWeather/instruments/__init__.py b/pysatSpaceWeather/instruments/__init__.py index efa92021..fe819bef 100644 --- a/pysatSpaceWeather/instruments/__init__.py +++ b/pysatSpaceWeather/instruments/__init__.py @@ -1,9 +1,9 @@ from pysatSpaceWeather.instruments import methods # noqa F401 -__all__ = ['ace_epam', 'ace_mag', 'ace_sis', 'ace_swepam', 'sw_ae', 'sw_al', - 'sw_au', 'sw_ap', 'sw_apo', 'sw_cp', 'sw_dst', 'sw_f107', 'sw_flare', - 'sw_hpo', 'sw_kp', 'sw_mgii', 'sw_polarcap', 'sw_sbfield', 'sw_ssn', - 'sw_stormprob'] +__all__ = ['ace_epam', 'ace_mag', 'ace_sis', 'ace_swepam', 'norp_rf', 'sw_ae', + 'sw_al', 'sw_au', 'sw_ap', 'sw_apo', 'sw_cp', 'sw_dst', 'sw_f107', + 'sw_flare', 'sw_hpo', 'sw_kp', 'sw_mgii', 'sw_polarcap', + 'sw_sbfield', 'sw_ssn', 'sw_stormprob'] for inst in __all__: exec("from pysatSpaceWeather.instruments import {inst}".format(inst=inst)) diff --git a/pysatSpaceWeather/instruments/ace_epam.py b/pysatSpaceWeather/instruments/ace_epam.py index ae439c6c..a895fd15 100644 --- a/pysatSpaceWeather/instruments/ace_epam.py +++ b/pysatSpaceWeather/instruments/ace_epam.py @@ -68,7 +68,7 @@ inst_ids = {inst_id: [tag for tag in tags.keys()] for inst_id in ['']} # Define today's date -now = dt.datetime.utcnow() +now = dt.datetime.now(tz=dt.timezone.utc) # ---------------------------------------------------------------------------- # Instrument test attributes diff --git a/pysatSpaceWeather/instruments/ace_mag.py b/pysatSpaceWeather/instruments/ace_mag.py index 27e82fbd..fbf874da 100644 --- a/pysatSpaceWeather/instruments/ace_mag.py +++ b/pysatSpaceWeather/instruments/ace_mag.py @@ -68,7 +68,7 @@ inst_ids = {inst_id: [tag for tag in tags.keys()] for inst_id in ['']} # Define today's date -now = dt.datetime.utcnow() +now = dt.datetime.now(tz=dt.timezone.utc) # ---------------------------------------------------------------------------- # Instrument test attributes diff --git a/pysatSpaceWeather/instruments/ace_sis.py b/pysatSpaceWeather/instruments/ace_sis.py index 61f6546a..405ab18b 100644 --- a/pysatSpaceWeather/instruments/ace_sis.py +++ b/pysatSpaceWeather/instruments/ace_sis.py @@ -68,7 +68,7 @@ inst_ids = {inst_id: [tag for tag in tags.keys()] for inst_id in ['']} # Define today's date -now = dt.datetime.utcnow() +now = dt.datetime.now(tz=dt.timezone.utc) # ---------------------------------------------------------------------------- # Instrument test attributes @@ -115,8 +115,10 @@ def clean(self): # Evaluate the different proton fluxes. Replace bad values with NaN and # times with no valid data - self.data['int_pflux_10MeV'][self.data['status_10'] > max_status] = np.nan - self.data['int_pflux_30MeV'][self.data['status_30'] > max_status] = np.nan + self.data['int_pflux_10MeV'] = self.data['int_pflux_10MeV'].where( + (self.data['status_10'] <= max_status), other=np.nan) + self.data['int_pflux_30MeV'] = self.data['int_pflux_30MeV'].where( + (self.data['status_30'] <= max_status), other=np.nan) eval_cols = ['int_pflux_10MeV', 'int_pflux_30MeV'] diff --git a/pysatSpaceWeather/instruments/ace_swepam.py b/pysatSpaceWeather/instruments/ace_swepam.py index 174d4245..e6a164cc 100644 --- a/pysatSpaceWeather/instruments/ace_swepam.py +++ b/pysatSpaceWeather/instruments/ace_swepam.py @@ -67,7 +67,7 @@ inst_ids = {inst_id: [tag for tag in tags.keys()] for inst_id in ['']} # Define today's date -now = dt.datetime.utcnow() +now = dt.datetime.now(tz=dt.timezone.utc) # ---------------------------------------------------------------------------- # Instrument test attributes diff --git a/pysatSpaceWeather/instruments/methods/__init__.py b/pysatSpaceWeather/instruments/methods/__init__.py index 28ada584..c2e258fe 100644 --- a/pysatSpaceWeather/instruments/methods/__init__.py +++ b/pysatSpaceWeather/instruments/methods/__init__.py @@ -4,6 +4,7 @@ from pysatSpaceWeather.instruments.methods import f107 # noqa F401 from pysatSpaceWeather.instruments.methods import general # noqa F401 from pysatSpaceWeather.instruments.methods import gfz # noqa F401 +from pysatSpaceWeather.instruments.methods import norp # noqa F401 from pysatSpaceWeather.instruments.methods import kp_ap # noqa F401 from pysatSpaceWeather.instruments.methods import lasp # noqa F401 from pysatSpaceWeather.instruments.methods import lisird # noqa F401 diff --git a/pysatSpaceWeather/instruments/methods/ace.py b/pysatSpaceWeather/instruments/methods/ace.py index 83f756fa..5e11e2b8 100644 --- a/pysatSpaceWeather/instruments/methods/ace.py +++ b/pysatSpaceWeather/instruments/methods/ace.py @@ -202,7 +202,7 @@ def download(date_array, name, tag='', inst_id='', data_path='', now=None, """ # Ensure now is up-to-date, if desired if now is None: - now = dt.datetime.utcnow() + now = dt.datetime.now(tz=dt.timezone.utc) # Define the file information for each data type and check the # date range diff --git a/pysatSpaceWeather/instruments/methods/f107.py b/pysatSpaceWeather/instruments/methods/f107.py index 3fe7edf7..b32a5f4c 100644 --- a/pysatSpaceWeather/instruments/methods/f107.py +++ b/pysatSpaceWeather/instruments/methods/f107.py @@ -12,7 +12,6 @@ import datetime as dt import numpy as np -from packaging.version import Version import pandas as pds import pysat @@ -117,9 +116,12 @@ def combine_f107(standard_inst, forecast_inst, start=None, stop=None): Notes ----- Merging prioritizes the standard data, then the 45day data, and finally - the forecast data + the forecast data. - Will not attempt to download any missing data, but will load data + Will not attempt to download any missing data, but will load data. + + If no data is present, but dates are provided, supplies a series of fill + values. """ @@ -169,12 +171,6 @@ def combine_f107(standard_inst, forecast_inst, start=None, stop=None): # Set the load kwargs, which vary by pysat version and tag load_kwargs = {'date': itime} - # TODO(#131): Remove version check after minimum version - # supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - load_kwargs['use_header'] = True - if standard_inst.tag == 'daily': # Add 30 days load_kwargs['date'] += dt.timedelta(days=30) @@ -211,7 +207,10 @@ def combine_f107(standard_inst, forecast_inst, start=None, stop=None): if inst_flag == "forecast": # Determine which files should be loaded if len(forecast_inst.index) == 0: - files = np.unique(forecast_inst.files.files[itime:stop]) + if len(forecast_inst.files.files) > 0: + files = np.unique(forecast_inst.files.files[itime:stop]) + else: + files = [None] # No load, because no files are available else: files = [None] # No load needed, if already initialized @@ -219,30 +218,23 @@ def combine_f107(standard_inst, forecast_inst, start=None, stop=None): # data for filename in files: if filename is not None: - load_kwargs = {'fname': filename} - - # TODO(#131): Remove version check after minimum version - # supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - load_kwargs['use_header'] = True - - forecast_inst.load(**load_kwargs) + forecast_inst.load(fname=filename) if notes.find("forecast") < 0: notes += " the {:} source ({:} to ".format(inst_flag, itime.date()) - # Check in case there was a problem with the standard data - if fill_val is None: - f107_inst.meta = forecast_inst.meta - fill_val = f107_inst.meta['f107'][ - f107_inst.meta.labels.fill_val] - # Determine which times to save if forecast_inst.empty: good_vals = [] else: + # Check in case there was a problem with the standard data + if fill_val is None: + f107_inst.meta = forecast_inst.meta + fill_val = f107_inst.meta['f107'][ + f107_inst.meta.labels.fill_val] + + # Get the good times and values good_times = ((forecast_inst.index >= itime) & (forecast_inst.index < stop)) good_vals = forecast_inst['f107'][good_times] != fill_val @@ -304,7 +296,7 @@ def combine_f107(standard_inst, forecast_inst, start=None, stop=None): # Resample the output data, filling missing values if (date_range.shape != f107_inst.index.shape or abs(date_range - f107_inst.index).max().total_seconds() > 0.0): - f107_inst.data = f107_inst.data.resample(freq).fillna(method=None) + f107_inst.data = f107_inst.data.resample(freq).asfreq() if np.isfinite(fill_val): f107_inst.data[np.isnan(f107_inst.data)] = fill_val @@ -354,7 +346,7 @@ def calc_f107a(f107_inst, f107_name='f107', f107a_name='f107a', min_pnts=41): # # Ensure the data are evenly sampled at a daily frequency, since this is # how often F10.7 is calculated. - f107_fill = f107_inst.data.resample('1D').fillna(method=None) + f107_fill = f107_inst.data.resample('1D').asfreq() # Replace the time index with an ordinal time_ind = f107_fill.index @@ -373,14 +365,14 @@ def calc_f107a(f107_inst, f107_name='f107', f107a_name='f107a', min_pnts=41): # Resample to the original frequency, if it is not equal to 1 day freq = pysat.utils.time.calc_freq(f107_inst.index) - if freq != "86400S": + if freq != "86400s": # Resample to the desired frequency f107_fill = f107_fill.resample(freq).ffill() # Save the output in a list f107a = list(f107_fill[f107a_name]) - # Fill any dates that fall + # Fill any dates that fall just outside of the range time_ind = pds.date_range(f107_fill.index[0], f107_inst.index[-1], freq=freq) for itime in time_ind[f107_fill.index.shape[0]:]: diff --git a/pysatSpaceWeather/instruments/methods/gfz.py b/pysatSpaceWeather/instruments/methods/gfz.py index 12ffcebb..2d5ad550 100644 --- a/pysatSpaceWeather/instruments/methods/gfz.py +++ b/pysatSpaceWeather/instruments/methods/gfz.py @@ -128,7 +128,7 @@ def json_downloads(date_array, data_path, local_file_prefix, local_date_fmt, query_url = '{:s}&status=def'.format(query_url) # The data is returned as a JSON file - req = requests.get(query_url) + req = requests.get(query_url, verify=False) # Process the JSON file if req.text.find('Gateway Timeout') >= 0: @@ -249,7 +249,7 @@ def kp_ap_cp_download(platform, name, date_array, tag, inst_id, data_path, dl_date.strftime('%Y')))) if mock_download_dir is None: furl = ''.join([burl, fname]) - req = requests.get(furl) + req = requests.get(furl, verify=False) raw_txt = req.text if req.ok else None else: diff --git a/pysatSpaceWeather/instruments/methods/kp_ap.py b/pysatSpaceWeather/instruments/methods/kp_ap.py index efc2eec0..24489be9 100644 --- a/pysatSpaceWeather/instruments/methods/kp_ap.py +++ b/pysatSpaceWeather/instruments/methods/kp_ap.py @@ -11,7 +11,6 @@ import datetime as dt import numpy as np -from packaging.version import Version import pandas as pds import pysat @@ -36,8 +35,7 @@ def acknowledgements(name, tag): """ - ackn = {'kp': {'': 'Provided by GFZ German Research Centre for Geosciences', - 'forecast': swpc.ackn, 'recent': swpc.ackn, 'def': gfz.ackn, + ackn = {'kp': {'forecast': swpc.ackn, 'recent': swpc.ackn, 'def': gfz.ackn, 'now': gfz.ackn, 'prediction': swpc.ackn}, 'ap': {'forecast': swpc.ackn, 'recent': swpc.ackn, 'prediction': swpc.ackn, '45day': swpc.ackn, @@ -81,7 +79,7 @@ def references(name, tag): "and availability, Rev. Geophys. 29, 3, ", "415-432, 1991."])]) - refs = {'kp': {'': gen_refs, 'forecast': gen_refs, 'recent': gen_refs, + refs = {'kp': {'forecast': gen_refs, 'recent': gen_refs, 'prediction': gen_refs, 'def': gfz.geoind_refs, 'now': gfz.geoind_refs}, 'ap': {'recent': gen_refs, 'forecast': gen_refs, '45day': gen_refs, @@ -306,12 +304,12 @@ def calc_daily_Ap(ap_inst, ap_name='3hr_ap', daily_name='Ap', index=[ap_mean.index[0] - pds.DateOffset(hours=3)]) # Extract the mean that only uses data for one day - ap_sel = ap_pad.combine_first(ap_mean[[i for i, tt in - enumerate(ap_mean.index) - if tt.hour == 21]]) + ap_sel = ap_pad.combine_first(ap_mean.iloc[[i for i, tt in + enumerate(ap_mean.index) + if tt.hour == 21]]) # Backfill this data - ap_data = ap_sel.resample('3H').bfill() + ap_data = ap_sel.resample('3h').bfill() # Save the output for the original time range ap_inst[daily_name] = pds.Series(ap_data[1:], index=ap_data.index[1:]) @@ -363,8 +361,9 @@ def filter_geomag(inst, min_kp=0, max_kp=9, filter_time=24, kp_inst=None, Default max and min values accept all Kp, so changing only one will cause the filter to act as a high- or low-pass function. - This routine is written for standard Kp data (tag=''), not the forecast or - recent data. However, it will work with these Kp data if they are supplied. + This routine is written for standard Kp data (tags of 'def', 'now'), not + the forecast or recent data. However, it will work with these Kp data if + they are supplied. """ # Load the desired data @@ -375,13 +374,6 @@ def filter_geomag(inst, min_kp=0, max_kp=9, filter_time=24, kp_inst=None, if kp_inst.empty: load_kwargs = {'date': inst.index[0], 'end_date': inst.index[-1], 'verifyPad': True} - - # TODO(#131): Remove version check after minimum version supported - # is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - load_kwargs['use_header'] = True - kp_inst.load(**load_kwargs) if kp_inst.empty: @@ -540,9 +532,12 @@ def combine_kp(standard_inst=None, recent_inst=None, forecast_inst=None, Note ---- Merging prioritizes the standard data, then the recent data, and finally - the forecast data + the forecast data. + + Will not attempt to download any missing data, but will load data. - Will not attempt to download any missing data, but will load data + If no data is present, but dates are provided, supplies a series of fill + values. """ @@ -587,13 +582,8 @@ def combine_kp(standard_inst=None, recent_inst=None, forecast_inst=None, "provide starting and ending times"))) # Initialize the output instrument - # TODO(#136): Remove if/else when pysat is 3.2.0+ - if hasattr(all_inst[0], "meta_labels"): - meta_kwargs = {"labels": all_inst[0].meta_labels} - kp_inst = pysat.Instrument(labels=all_inst[0].meta_labels) - else: - meta_kwargs = all_inst[0].meta_kwargs - kp_inst = pysat.Instrument(meta_kwargs=meta_kwargs) + meta_kwargs = all_inst[0].meta_kwargs + kp_inst = pysat.Instrument(meta_kwargs=meta_kwargs) kp_inst.inst_module = pysat_sw.instruments.sw_kp kp_inst.tag = tag @@ -610,15 +600,7 @@ def combine_kp(standard_inst=None, recent_inst=None, forecast_inst=None, while itime < stop and inst_flag is not None: # Load and save the standard data for as many times as possible if inst_flag == 'standard': - load_kwargs = {'date': itime} - - # TODO(#131): Remove version check after minimum version supported - # is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - load_kwargs['use_header'] = True - - standard_inst.load(**load_kwargs) + standard_inst.load(date=itime) if notes.find("standard") < 0: notes += " the {:} source ({:} to ".format(inst_flag, @@ -636,7 +618,10 @@ def combine_kp(standard_inst=None, recent_inst=None, forecast_inst=None, if inst_flag == 'recent': # Determine which files should be loaded if len(recent_inst.index) == 0: - files = np.unique(recent_inst.files.files[itime:stop]) + if len(recent_inst.files.files) > 0: + files = np.unique(recent_inst.files.files[itime:stop]) + else: + files = [None] # No files available else: files = [None] # No load needed, if already initialized @@ -644,30 +629,29 @@ def combine_kp(standard_inst=None, recent_inst=None, forecast_inst=None, # data for filename in files: if filename is not None: - load_kwargs = {'fname': filename} - - # TODO(#131): Remove version check after minimum version - # supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - load_kwargs['use_header'] = True - recent_inst.load(**load_kwargs) + recent_inst.load(fname=filename) if notes.find("recent") < 0: notes += " the {:} source ({:} to ".format(inst_flag, itime.date()) # Determine which times to save - local_fill_val = recent_inst.meta[ - 'Kp', recent_inst.meta.labels.fill_val] - good_times = ((recent_inst.index >= itime) - & (recent_inst.index < stop)) - good_vals = recent_inst['Kp'][good_times] != local_fill_val + if recent_inst.empty: + good_vals = [] + else: + local_fill_val = recent_inst.meta[ + 'Kp', recent_inst.meta.labels.fill_val] + good_times = ((recent_inst.index >= itime) + & (recent_inst.index < stop)) + good_vals = recent_inst['Kp'][good_times] != local_fill_val # Save output data and cycle time - kp_times.extend(list(recent_inst.index[good_times][good_vals])) - kp_values.extend(list(recent_inst['Kp'][good_times][good_vals])) - itime = kp_times[-1] + pds.DateOffset(hours=3) + if len(good_vals): + kp_times.extend(list( + recent_inst.index[good_times][good_vals])) + kp_values.extend(list( + recent_inst['Kp'][good_times][good_vals])) + itime = kp_times[-1] + pds.DateOffset(hours=3) inst_flag = 'forecast' if forecast_inst is not None else None notes += "{:})".format(itime.date()) @@ -676,7 +660,10 @@ def combine_kp(standard_inst=None, recent_inst=None, forecast_inst=None, if inst_flag == "forecast": # Determine which files should be loaded if len(forecast_inst.index) == 0: - files = np.unique(forecast_inst.files.files[itime:stop]) + if len(forecast_inst.files.files) > 0: + files = np.unique(forecast_inst.files.files[itime:stop]) + else: + files = [None] # No files have been downloaded else: files = [None] # No load needed, if already initialized @@ -684,32 +671,29 @@ def combine_kp(standard_inst=None, recent_inst=None, forecast_inst=None, # data for filename in files: if filename is not None: - load_kwargs = {'fname': filename} - - # TODO(#131): Remove version check after minimum version - # supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - load_kwargs['use_header'] = True - forecast_inst.load(**load_kwargs) + forecast_inst.load(fname=filename) if notes.find("forecast") < 0: notes += " the {:} source ({:} to ".format(inst_flag, itime.date()) # Determine which times to save - local_fill_val = forecast_inst.meta[ - 'Kp', forecast_inst.meta.labels.fill_val] - good_times = ((forecast_inst.index >= itime) - & (forecast_inst.index < stop)) - good_vals = forecast_inst['Kp'][good_times] != local_fill_val - - # Save desired data and cycle time - new_times = list(forecast_inst.index[good_times][good_vals]) - kp_times.extend(new_times) - new_vals = list(forecast_inst['Kp'][good_times][good_vals]) - kp_values.extend(new_vals) - itime = kp_times[-1] + pds.DateOffset(hours=3) + if not forecast_inst.empty: + local_fill_val = forecast_inst.meta[ + 'Kp', forecast_inst.meta.labels.fill_val] + good_times = ((forecast_inst.index >= itime) + & (forecast_inst.index < stop)) + good_vals = forecast_inst['Kp'][ + good_times] != local_fill_val + + # Save desired data + new_times = list(forecast_inst.index[good_times][good_vals]) + kp_times.extend(new_times) + new_vals = list(forecast_inst['Kp'][good_times][good_vals]) + kp_values.extend(new_vals) + + # Cycle time + itime = kp_times[-1] + pds.DateOffset(hours=3) notes += "{:})".format(itime.date()) inst_flag = None @@ -751,7 +735,7 @@ def combine_kp(standard_inst=None, recent_inst=None, forecast_inst=None, # Resample the output data, filling missing values if (date_range.shape != kp_inst.index.shape or abs(date_range - kp_inst.index).max().total_seconds() > 0.0): - kp_inst.data = kp_inst.data.resample(freq).fillna(method=None) + kp_inst.data = kp_inst.data.resample(freq).asfreq() if np.isfinite(fill_val): kp_inst.data[np.isnan(kp_inst.data)] = fill_val diff --git a/pysatSpaceWeather/instruments/methods/norp.py b/pysatSpaceWeather/instruments/methods/norp.py new file mode 100644 index 00000000..f6a8bb80 --- /dev/null +++ b/pysatSpaceWeather/instruments/methods/norp.py @@ -0,0 +1,225 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*-. +# Full license can be found in License.md +# Full author list can be found in .zenodo.json file +# DOI:10.5281/zenodo.3986138 +# ---------------------------------------------------------------------------- +"""Supports data from the Nobeyama Radio Polarimeters.""" + +import datetime as dt +import numpy as np +import os +import pandas as pds +import requests + +import pysat + + +def acknowledgements(): + """Define the acknowledgements for NoRP data. + + Returns + ------- + ackn : str + Acknowledgements associated with the appropriate NoRP name and tag. + + """ + ackn = ''.join(['The Nobeyama Radio Polarimeters (NoRP) are operated by ', + 'Solar Science Observatory, a branch of National ', + 'Astronomical Observatory of Japan, and their observing ', + 'data are verified scientifically by the consortium for ', + 'NoRP scientific operations.', + '\nFor questions regarding the data please contact ', + 'solar_helpdesk@ml.nao.ac.jp']) + + return ackn + + +def references(name, tag): + """Define the references for NoRP data. + + Parameters + ---------- + name : str + Instrument name for the NoRP data. + tag : str + Instrument tag for the NoRP data. + + Returns + ------- + refs : str + Reference string associated with the appropriate F10.7 tag. + + """ + refs = {'rf': {'daily': "\n".join([ + ''.join(['Shimojo and Iwai "Over seven decades of solar microwave ', + 'data obtained with Toyokawa and Nobeyama Radio Polarimeters', + '", GDJ, 10, 114-129 (2023)']), + ''.join(['Nakajima et al. "The Radiometer and Polarimeters at 80, 35,', + ' and 17 GHz for Solar Observations at Nobeyama", PASJ, 37,', + ' 163 (1985)']), + ''.join(['Torii et al. "Full-Automatic Radiopolarimeters for Solar ', + 'Patrol at Microwave Frequencies", Proc. of the Res. Inst. ', + 'of Atmospherics, Nagoya Univ., 26, 129 (1979)']), + ''.join(['Shibasaki et al. "Solar Radio Data Acquisition and ', + 'Communication System (SORDACS) of Toyokawa Observatory", ', + 'Proc. of the Res. Inst. of Atmospherics, Nagoya Univ., 26, ', + '117 (1979)']), + 'Tanaka, "Toyokawa Observatory", Solar Physics, 1, 2, 295 (1967)', + ''.join(['Tsuchiya and Nagase "Atmospheric Absorption in Microwave ', + 'Solar Observation and Solar Flux Measurement at 17 Gc/s", ', + 'PASJ, 17, 86 (1965)']), + ''.join(['Tanaka and Kakinuma. "EQUIPMENT FOR THE OBSERVATION OF SOLAR', + ' RADIO EMISSION AT 9400, 3750, 2000 AND 1000 Mc/s", Proc. ', + 'of the Res. Inst. of Atmospherics, Nagoya Univ., 4, 60 ', + '(1957)']), + ''.join(['Tanaka et al. "EQUIPMENT FOR THE OBSERVATION OF SOLAR NOISE ', + 'AT 3,750 MC", Proc. of the Res. Inst. of Atmospherics, ', + 'Nagoya Univ., 1, 71 (1953)'])])}} + + return refs[name][tag] + + +def daily_rf_downloads(data_path, mock_download_dir=None, start=None, + stop=None): + """Download LASP 96-hour prediction data. + + Parameters + ---------- + data_path : str + Path to data directory. + mock_download_dir : str or NoneType + Local directory with downloaded files or None. If not None, will + process any files with the correct name and date (following the local + file prefix and date format) as if they were downloaded (default=None) + + Raises + ------ + IOError + If the data link has an unexpected format or an unknown mock download + directory is supplied. + + Note + ---- + Saves data in month-long files + + """ + # Initalize the output information + times = list() + data_dict = dict() + + # Set the file name + fname = 'TYKW-NoRP_dailyflux.txt' + + if mock_download_dir is None: + # Set the remote data variables + url = '/'.join(['https://solar.nro.nao.ac.jp/norp/data/daily', fname]) + + # Download the webpage + req = requests.get(url) + + # Test to see if the file was found on the server + if req.text.find('not found on this server') > 0: + pysat.logger.warning(''.join(['NoRP daily flux file not found on ', + 'server: ', url])) + raw_txt = None + else: + raw_txt = req.text + else: + # If a mock download directory was supplied, test to see it exists + if mock_download_dir is not None: + if not os.path.isdir(mock_download_dir): + raise IOError('file location is not a directory: {:}'.format( + mock_download_dir)) + + # Get the data from the mock download directory + url = os.path.join(mock_download_dir, fname) + if os.path.isfile(url): + with open(url, 'r') as fpin: + raw_txt = fpin.read() + else: + pysat.logger.warning(''.join(['NoRP daily flux file not found in', + 'the local directory: ', url, + ", data may have been saved to an ", + "unexpected filename"])) + raw_txt = None + + if raw_txt is not None: + # Split the text to get the header lines + file_lines = raw_txt.split('\n')[:2] + + # If needed, set or adjust the start and end times + line_cols = file_lines[0].split() + file_start = dt.datetime.strptime(line_cols[-3], '(%Y-%m-%d') + file_stop = dt.datetime.strptime(line_cols[-1], '%Y-%m-%d)') + + # Evaluate the file start time + if start is None or start < file_start: + start = file_start + elif start.day > 1: + # Set the start time to be the start of the month + start = dt.datetime(start.year, start.month, 1) + + # Evaluate the file stop time + if stop is None or stop < file_stop: + stop = file_stop + elif stop.day < 31: + # Set the stop time to be the end of the month + month_end = stop + dt.timedelta(days=1) + while month_end.month == stop.month: + stop = dt.datetime(month_end.year, month_end.month, + month_end.day) + month_end += dt.timedelta(days=1) + + # Set the data columns + data_cols = [col.replace(" ", "_") for col in file_lines[1].split(',')] + for col in data_cols[1:]: + data_dict[col] = list() + + # Split the text to get the file line for the desired period + start_txt = raw_txt.split(start.strftime('"%Y-%m-%d"'))[1] + stop_txt = ''.join([start.strftime('"%Y-%m-%d"'), start_txt]).split( + stop.strftime('"%Y-%m-%d"')) + file_lines = stop_txt[0].split('\n')[:-1] + if len(stop_txt) > 1: + file_lines.append(''.join([stop.strftime('"%Y-%m-%d"'), + stop_txt[1]]).split('\n')[0]) + + # Format the data for the desired time period + for line in file_lines: + # Split the line on comma + line_cols = line.split(',') + + if len(line_cols) != len(data_cols): + raise IOError(''.join(['unexpected line encountered in file ', + 'retrieved from ', url, ':\n', line])) + + # Format the time and values + times.append(dt.datetime.strptime(line_cols[0], '"%Y-%m-%d"')) + for i, col in enumerate(data_cols[1:]): + if line_cols[i + 1].lower().find('nan') == 0: + data_dict[col].append(np.nan) + else: + data_dict[col].append(np.float64(line_cols[i + 1])) + + # Re-cast the data as a pandas DataFrame + data = pds.DataFrame(data_dict, index=times) + + # Write out the files using a monthly cadance + file_base = '_'.join(['norp', 'rf', 'daily', '%Y-%m.txt']) + + while start < stop: + # Set the output file name + file_name = os.path.join(data_path, start.strftime(file_base)) + + # Downselect the output data + file_data = data[start:start + pds.DateOffset(months=1) + - dt.timedelta(microseconds=1)] + + # Save the output data to file + file_data.to_csv(file_name) + + # Cycle the time + start += pds.DateOffset(months=1) + + return diff --git a/pysatSpaceWeather/instruments/methods/swpc.py b/pysatSpaceWeather/instruments/methods/swpc.py index 0635a599..ca70e228 100644 --- a/pysatSpaceWeather/instruments/methods/swpc.py +++ b/pysatSpaceWeather/instruments/methods/swpc.py @@ -77,10 +77,16 @@ def daily_dsd_download(name, today, data_path, mock_download_dir=None): general.get_instrument_data_path( 'sw_{:s}'.format(data_name), tag='daily') for data_name in ['f107', 'flare', 'ssn', 'sbfield']} + + # Try and obtain the file date; otherwise assume it was issued today + fdate = find_issue_date(raw_txt) + if fdate is None: + fdate = today + outfiles = { data_name: os.path.join(file_paths[data_name], '_'.join([ data_name, 'daily', '{:s}.txt'.format( - today.strftime('%Y-%m-%d'))])) + fdate.strftime('%Y-%m-%d'))])) for data_name in file_paths.keys()} # Check that the directories exist @@ -88,7 +94,7 @@ def daily_dsd_download(name, today, data_path, mock_download_dir=None): pysat.utils.files.check_and_make_path(data_path) # Save the output - rewrite_daily_solar_data_file(today.year, outfiles, raw_txt) + rewrite_daily_solar_data_file(fdate.year, outfiles, raw_txt) return @@ -318,6 +324,7 @@ def rewrite_daily_solar_data_file(year, outfiles, lines): # Write out as a file data.to_csv(outfiles[data_name], header=True) + pysat.logger.info('Wrote: {:}'.format(outfiles[data_name])) return @@ -441,8 +448,7 @@ def solar_geomag_predictions_download(name, date_array, data_path, "filename."])) else: # Parse text to get the date the prediction was generated - date_str = raw_txt.split(':Issued: ')[-1].split(' UTC')[0] - dl_date = dt.datetime.strptime(date_str, '%Y %b %d %H%M') + dl_date = find_issue_date(raw_txt, '%Y %b %d %H%M UTC') # Parse the data to get the prediction dates date_strs = raw_txt.split(':Prediction_dates:')[-1].split('\n')[0] @@ -478,7 +484,7 @@ def solar_geomag_predictions_download(name, date_array, data_path, # Process the Kp data hr_strs = ['00-03UT', '03-06UT', '06-09UT', '09-12UT', '12-15UT', '15-18UT', '18-21UT', '21-00UT'] - data_times['kp'] = pds.date_range(pred_times[0], periods=24, freq='3H') + data_times['kp'] = pds.date_range(pred_times[0], periods=24, freq='3h') for line in kp_raw.split('\n'): if line.find("Prob_Mid") >= 0: @@ -615,8 +621,7 @@ def geomag_forecast_download(name, date_array, data_path, "been saved to an unexpected filename."])) else: # Parse text to get the date the prediction was generated - date_str = raw_txt.split(':Issued: ')[-1].split(' UTC')[0] - dl_date = dt.datetime.strptime(date_str, '%Y %b %d %H%M') + dl_date = find_issue_date(raw_txt, '%Y %b %d %H%M UTC') # Separate out the data by chunks ap_raw = raw_txt.split('NOAA Ap Index Forecast')[-1] @@ -643,7 +648,7 @@ def geomag_forecast_download(name, date_array, data_path, kp_day2.append(float(cols[-2])) kp_day3.append(float(cols[-1])) - kp_times = pds.date_range(forecast_date, periods=24, freq='3H') + kp_times = pds.date_range(forecast_date, periods=24, freq='3h') kp_day = [] for dd in [kp_day1, kp_day2, kp_day3]: kp_day.extend(dd) @@ -744,8 +749,7 @@ def kp_ap_recent_download(name, date_array, data_path, mock_download_dir=None): "filename."])) else: # Parse text to get the date the prediction was generated - date_str = raw_txt.split(':Issued: ')[-1].split('\n')[0] - dl_date = dt.datetime.strptime(date_str, '%H%M UT %d %b %Y') + dl_date = find_issue_date(raw_txt) # Data is the forecast value for the next three days raw_data = raw_txt.split('# Date ')[-1] @@ -784,7 +788,7 @@ def kp_ap_recent_download(name, date_array, data_path, mock_download_dir=None): sub_aps[i].append(np.int64(ap_sub_lines[i])) # Create times on 3 hour cadence - kp_times = pds.date_range(times[0], periods=(8 * 30), freq='3H') + kp_times = pds.date_range(times[0], periods=(8 * 30), freq='3h') # Put both data sets into DataFrames data = {'kp': pds.DataFrame({'mid_lat_Kp': sub_kps[0], @@ -857,8 +861,7 @@ def recent_ap_f107_download(name, date_array, data_path, " saved to an unexpected filename."])) else: # Parse text to get the date the prediction was generated - date_str = raw_txt.split(':Issued: ')[-1].split(' UTC')[0] - dl_date = dt.datetime.strptime(date_str, '%Y %b %d %H%M') + dl_date = find_issue_date(raw_txt, '%Y %b %d %H%M UTC') # Get to the forecast data raw_data = raw_txt.split('45-DAY AP FORECAST')[-1] @@ -879,6 +882,10 @@ def recent_ap_f107_download(name, date_array, data_path, data = {'ap': pds.DataFrame(ap, index=ap_times, columns=['daily_Ap']), 'f107': pds.DataFrame(f107, index=f107_times, columns=['f107'])} + # Ensure there is a download date + if dl_date is None: + dl_date = ap_times[0] + # Write out the data files for data_name in data.keys(): file_name = '{:s}_45day_{:s}.txt'.format( @@ -962,3 +969,43 @@ def list_files(name, tag, inst_id, data_path, format_str=None): files.loc[files.index[-1] + pds_offset] = files.values[-1] return files + + +def find_issue_date(file_txt, date_fmt="%H%M UT %d %b %Y"): + r"""Find the issue date for a SWPC file. + + Parameters + ---------- + file_txt : str + String containing all text in a file. + date_fmt : str + Expected date format (default='%H%M UT %d %b %Y') + + Returns + ------- + dl_date : dt.datetime or NoneType + Datetime object with time that the file was issued or None if an + error is encountered + + Note + ---- + Assumes the file has a line formatted as: ':Issued: \n' + + """ + dl_date = None + + # Parse text to get the date the prediction was generated + if file_txt.find(':Issued:') >= 0: + date_str = file_txt.split(':Issued: ')[-1].split('\n')[0] + + try: + dl_date = dt.datetime.strptime(date_str, date_fmt) + except Exception as err: + pysat.logger.critical(''.join(['Unexpected issued date format ', + repr(date_str), ' did not match ', + date_fmt, '; failed with error: ', + repr(err)])) + else: + pysat.logger.critical('Unable to find issue line in file') + + return dl_date diff --git a/pysatSpaceWeather/instruments/norp_rf.py b/pysatSpaceWeather/instruments/norp_rf.py new file mode 100644 index 00000000..fc32a0af --- /dev/null +++ b/pysatSpaceWeather/instruments/norp_rf.py @@ -0,0 +1,223 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*-. +# Full license can be found in License.md +# Full author list can be found in .zenodo.json file +# DOI:10.5281/zenodo.3986138 +# ---------------------------------------------------------------------------- +"""Supports solar radio flux values from the Nobeyama Radio Polarimeters. + +Properties +---------- +platform + 'norp' +name + 'rf' +tag + - 'daily' Daily solar flux values from 1951-11-01 onward +inst_id + - None supported + +Examples +-------- +Download and load all of the daily radio flux data. +:: + + rf = pysat.Instrument('norp', 'rf', tag='daily') + rf.download(start=dt.datetime(1951, 11, 1), stop=rf.today()) + rf.load(date=dt.datetime(1951, 11, 1), end_date=rf.today()) + +""" + +import datetime as dt +import numpy as np +import pandas as pds + +import pysat + +from pysatSpaceWeather.instruments import methods + +# ---------------------------------------------------------------------------- +# Instrument attributes + +platform = 'norp' +name = 'rf' +tags = {'daily': 'Daily solar flux values'} +inst_ids = {'': [tag for tag in tags.keys()]} + +# ---------------------------------------------------------------------------- +# Instrument test attributes + +_test_dates = {'': {'daily': dt.datetime(1951, 11, 1)}} + +# ---------------------------------------------------------------------------- +# Instrument methods + +preprocess = methods.general.preprocess + + +def init(self): + """Initialize the Instrument object with instrument specific values.""" + + # Set the required Instrument attributes + self.acknowledgements = methods.norp.acknowledgements() + self.references = methods.norp.references(self.name, self.tag) + pysat.logger.info(self.acknowledgements) + + return + + +def clean(self): + """Clean the solar radio flux, empty function as this is not necessary.""" + + return + + +# ---------------------------------------------------------------------------- +# Instrument functions + + +def load(fnames, tag='', inst_id=''): + """Load NoRP solar radio flux files. + + Parameters + ---------- + fnames : pandas.Series + Series of filenames. + tag : str + Instrument tag. (default='') + inst_id : str + Instrument ID, not used. (default='') + + Returns + ------- + data : pandas.DataFrame + Object containing satellite data. + meta : pysat.Meta + Object containing metadata such as column names and units. + + See Also + -------- + pysat.instruments.methods.general.load_csv_data + + Note + ---- + Called by pysat. Not intended for direct use by user. + + """ + + # Get the desired file dates and file names from the daily indexed list + file_dates = list() + if tag in ['daily']: + unique_files = list() + for fname in fnames: + file_dates.append(dt.datetime.strptime(fname[-10:], '%Y-%m-%d')) + if fname[0:-11] not in unique_files: + unique_files.append(fname[0:-11]) + fnames = unique_files + + # Load the CSV data files + data = pysat.instruments.methods.general.load_csv_data( + fnames, read_csv_kwargs={"index_col": 0, "parse_dates": True}) + + # If there is a date range, downselect here + if len(file_dates) > 0: + idx, = np.where((data.index >= min(file_dates)) + & (data.index < max(file_dates) + dt.timedelta(days=1))) + data = data.iloc[idx, :] + + # Initialize the metadata + meta = pysat.Meta() + for col in data.columns: + meta[col] = { + meta.labels.units: 'SFU', meta.labels.notes: '', + meta.labels.name: 'NoRP solar sadio flux {:} w/AU corr'.format( + col.replace("_", " ")), + meta.labels.desc: ''.join([ + 'NoRP solar radio flux at ', col.replace("_", " "), + ' with Astronomical Unit (AU) correction in Solar Flux Units', + ' (SFU).']), + meta.labels.fill_val: np.nan, + meta.labels.min_val: 0, meta.labels.max_val: np.inf} + + return data, meta + + +def list_files(tag='', inst_id='', data_path='', format_str=None): + """List local NoRP solar radio flux data files. + + Parameters + ---------- + tag : str + Instrument tag, accepts any value from `tags`. (default='') + inst_id : str + Instrument ID, not used. (default='') + data_path : str + Path to data directory. (default='') + format_str : str or NoneType + User specified file format. If None is specified, the default + formats associated with the supplied tags are used. (default=None) + + Returns + ------- + out_files : pds.Series + A Series containing the verified available files + + Note + ---- + Called by pysat. Not intended for direct use by user. + + """ + # Files are by month, going to add date to monthly filename for each day of + # the month. The load routine will load a month of data and use the + # appended date to select out appropriate data. + if format_str is None: + format_str = "_".join(["norp", "rf", tag, '{year:04d}-{month:02d}.txt']) + + out_files = pysat.Files.from_os(data_path=data_path, format_str=format_str) + if not out_files.empty: + out_files.loc[out_files.index[-1] + pds.DateOffset(months=1) + - pds.DateOffset(days=1)] = out_files.iloc[-1] + out_files = out_files.asfreq('D', 'pad') + out_files = out_files + '_' + out_files.index.strftime('%Y-%m-%d') + + return out_files + + +def download(date_array, tag, inst_id, data_path, update_files=False, + mock_download_dir=None): + """Download NoRP solar radio flux data. + + Parameters + ---------- + date_array : array-like + Sequence of dates for which files will be downloaded. + tag : str + Denotes type of file to load. + inst_id : str + Specifies the satellite ID for a constellation. + data_path : str + Path to data directory. + update_files : bool + Re-download data for files that already exist if True (default=False) + mock_download_dir : str or NoneType + Local directory with downloaded files or None. If not None, will + process any files with the correct name and date as if they were + downloaded (default=None) + + Raises + ------ + IOError + If a problem is encountered connecting to the gateway or retrieving + data from the remote or local repository. + + Note + ---- + Called by pysat. Not intended for direct use by user. + + """ + # Download the daily radio flux data from NoRP, saving data in monthly files + methods.norp.daily_rf_downloads(data_path, + mock_download_dir=mock_download_dir, + start=date_array[0], stop=date_array[-1]) + + return diff --git a/pysatSpaceWeather/instruments/sw_ap.py b/pysatSpaceWeather/instruments/sw_ap.py index cc20853e..f87562fd 100644 --- a/pysatSpaceWeather/instruments/sw_ap.py +++ b/pysatSpaceWeather/instruments/sw_ap.py @@ -89,7 +89,7 @@ inst_ids = {'': list(tags.keys())} # Generate todays date to support loading forecast data -now = dt.datetime.utcnow() +now = dt.datetime.now(tz=dt.timezone.utc) today = dt.datetime(now.year, now.month, now.day) tomorrow = today + dt.timedelta(days=1) diff --git a/pysatSpaceWeather/instruments/sw_f107.py b/pysatSpaceWeather/instruments/sw_f107.py index d3ca6b54..e2ec995b 100644 --- a/pysatSpaceWeather/instruments/sw_f107.py +++ b/pysatSpaceWeather/instruments/sw_f107.py @@ -95,7 +95,7 @@ # Dict keyed by inst_id that lists supported tags and a good day of test data # generate todays date to support loading forecast data -now = dt.datetime.utcnow() +now = dt.datetime.now(tz=dt.timezone.utc) today = dt.datetime(now.year, now.month, now.day) tomorrow = today + dt.timedelta(days=1) @@ -157,7 +157,7 @@ def load(fnames, tag='', inst_id=''): tag : str Instrument tag. (default='') inst_id : str - Instrument ID, not used. (default='') + Instrument ID. (default='') Returns ------- @@ -263,8 +263,8 @@ def list_files(tag='', inst_id='', data_path='', format_str=None): Returns ------- - out_files : pysat._files.Files - A class containing the verified available files + out_files : pds.Series + A Series containing the verified available files Note ---- @@ -399,7 +399,7 @@ def download(date_array, tag, inst_id, data_path, update_files=False, # Cut the date from the end of the local files for i, lfile in enumerate(local_files): - local_files[i] = lfile[:-11] + local_files.iloc[i] = lfile[:-11] methods.swpc.old_indices_dsd_download( name, date_array, data_path, local_files, today, diff --git a/pysatSpaceWeather/instruments/sw_flare.py b/pysatSpaceWeather/instruments/sw_flare.py index 54bd1470..78870e02 100644 --- a/pysatSpaceWeather/instruments/sw_flare.py +++ b/pysatSpaceWeather/instruments/sw_flare.py @@ -74,7 +74,7 @@ # Dict keyed by inst_id that lists supported tags and a good day of test data # generate todays date to support loading forecast data -now = dt.datetime.utcnow() +now = dt.datetime.now(tz=dt.timezone.utc) today = dt.datetime(now.year, now.month, now.day) tomorrow = today + dt.timedelta(days=1) diff --git a/pysatSpaceWeather/instruments/sw_kp.py b/pysatSpaceWeather/instruments/sw_kp.py index 030b93b9..f8a331c0 100644 --- a/pysatSpaceWeather/instruments/sw_kp.py +++ b/pysatSpaceWeather/instruments/sw_kp.py @@ -16,7 +16,6 @@ name 'kp' tag - - '' Deprecated, mixed definitive and nowcast Kp data from GFZ - 'def' Definitive Kp data from GFZ - 'now' Nowcast Kp data from GFZ - 'prediction' Predictions from SWPC for the next 3 days @@ -69,7 +68,6 @@ import datetime as dt import numpy as np import pandas as pds -import warnings import pysat @@ -80,8 +78,7 @@ platform = 'sw' name = 'kp' -tags = {'': 'Deprecated, mixed definitive and nowcast Kp data from GFZ', - 'def': 'Definitive Kp data from GFZ', +tags = {'def': 'Definitive Kp data from GFZ', 'now': 'Nowcast Kp data from GFZ', 'prediction': 'SWPC Predictions for the next three days', 'forecast': 'SWPC Forecast data next (3 days)', @@ -89,7 +86,7 @@ inst_ids = {'': list(tags.keys())} # Generate todays date to support loading forecast data -now = dt.datetime.utcnow() +now = dt.datetime.now(tz=dt.timezone.utc) today = dt.datetime(now.year, now.month, now.day) # ---------------------------------------------------------------------------- @@ -102,9 +99,6 @@ 'prediction': today + dt.timedelta(days=1), 'recent': today}} -# Other tags assumed to be True -_test_download_ci = {'': {'': False}} - # ---------------------------------------------------------------------------- # Instrument methods @@ -154,117 +148,10 @@ def load(fnames, tag='', inst_id=''): ---- Called by pysat. Not intended for direct use by user. - Warnings - -------- - tag '' has been deprecated, will be removed in version 0.2.0+ - """ meta = pysat.Meta() - if tag == '': - # This data type has been deprecated due to changes at GFZ - warnings.warn("".join(["Changes at the GFZ database have led to this", - " data type being deprecated. Switch to using", - " 'def' for definitive Kp or 'now' for Kp ", - "nowcasts from GFZ. Load support will be ", - "removed in version 0.2.0+"]), - DeprecationWarning, stacklevel=2) - - # Kp data stored monthly, need to return data daily. The daily date is - # attached to filename. Parse off the last date, load month of data, - # and downselect to the desired day - data = pds.DataFrame() - - # Set up fixed width format for these files, only selecting the date - # and daily 3-hour Kp values - date_slice = slice(0, 6) - kp_slice = [slice(7, 10), slice(10, 13), slice(13, 16), slice(16, 19), - slice(19, 23), slice(23, 26), slice(26, 29), slice(29, 32)] - - # These are monthly files, if a date range is desired, test here. - # Does not assume an ordered list, but the date range must be continous - # within a given month. - unique_fnames = dict() - for filename in fnames: - fname = filename[0:-11] - fdate = dt.datetime.strptime(filename[-10:], '%Y-%m-%d') - if fname not in unique_fnames.keys(): - unique_fnames[fname] = [fdate] - else: - unique_fnames[fname].append(fdate) - - # Load all of the desired filenames - all_data = [] - for fname in unique_fnames.keys(): - # The daily date is attached to the filename. Parse off the last - # date, load month of data, downselect to the desired day - fdate = min(unique_fnames[fname]) - - with open(fname, 'r') as fin: - temp = fin.readlines() - - if len(temp) == 0: - pysat.logger.warn('Empty file: {:}'.format(fname)) - continue - - # This file has a different format if it is historic or a file that - # is being actively updated. In either case, this line will - # remove the appropriate number of summmary lines. - ilast = -1 if temp[-1].find('Mean') > 0 else -4 - temp = np.array(temp[:ilast]) - - # Re-format the time data - temp_index = np.array([dt.datetime.strptime(tline[date_slice], - '%y%m%d') - for tline in temp]) - - idx, = np.where((temp_index >= fdate) - & (temp_index < max(unique_fnames[fname]) - + dt.timedelta(days=1))) - - temp_data = list() - for tline in temp[idx]: - temp_data.append(list()) - for col in kp_slice: - temp_data[-1].append(tline[col].strip()) - - # Select the desired times and add to data list - all_data.append(pds.DataFrame(temp_data, index=temp_index[idx])) - - # Combine data together - data = pds.concat(all_data, axis=0, sort=True) - - if len(data.index) > 0: - # Each column increments UT by three hours. Produce a single data - # series that has Kp value monotonically increasing in time with - # appropriate datetime indices - data_series = pds.Series(dtype='float64') - for i in np.arange(8): - tind = data.index + pds.DateOffset(hours=int(3 * i)) - temp = pds.Series(data.iloc[:, i].values, index=tind) - data_series = pds.concat([data_series, temp]) - - data_series = data_series.sort_index() - data_series.index.name = 'time' - - # Kp comes in non-user friendly values like 2-, 2o, and 2+. Relate - # these to 1.667, 2.0, 2.333 for processing and user friendliness - first = np.array([np.float64(str_val[0]) - for str_val in data_series]) - flag = np.array([str_val[1] for str_val in data_series]) - - ind, = np.where(flag == '+') - first[ind] += 1.0 / 3.0 - ind, = np.where(flag == '-') - first[ind] -= 1.0 / 3.0 - - result = pds.DataFrame(first, columns=['Kp'], - index=data_series.index) - else: - result = pds.DataFrame() - - fill_val = np.nan - elif tag in ['def', 'now']: + if tag in ['def', 'now']: # Load the definitive or nowcast data. The Kp data stored in yearly # files, and we need to return data daily. The daily date is # attached to filename. Parse off the last date, load month of data, @@ -282,7 +169,7 @@ def load(fnames, tag='', inst_id=''): fill_val = -1 # Initalize the meta data - if tag in ['', 'forecast', 'recent', 'prediction']: + if tag in ['forecast', 'recent', 'prediction']: for kk in result.keys(): methods.kp_ap.initialize_kp_metadata(meta, kk, fill_val) else: @@ -319,35 +206,9 @@ def list_files(tag='', inst_id='', data_path='', format_str=None): ---- Called by pysat. Not intended for direct use by user. - Warnings - -------- - The '' tag has been deprecated and local file listing support will - be removed in version 0.2.0+ - """ - if tag == '': - # This data type has been deprecated due to changes at GFZ - warnings.warn("".join(["Changes at the GFZ database have led to this", - " data type being deprecated. Switch to using", - " 'def' for definitive Kp or 'now' for Kp ", - "nowcasts from GFZ. Local file listing support ", - "will be removed in version 0.2.0+"]), - DeprecationWarning, stacklevel=2) - - # Files are by month, going to add date to monthly filename for - # each day of the month. The load routine will load a month of - # data and use the appended date to select out appropriate data. - if format_str is None: - format_str = 'kp{year:2d}{month:02d}.tab' - files = pysat.Files.from_os(data_path=data_path, format_str=format_str, - two_digit_year_break=0) - if not files.empty: - files.loc[files.index[-1] + pds.DateOffset(months=1) - - pds.DateOffset(days=1)] = files.iloc[-1] - files = files.asfreq('D', 'pad') - files = files + '_' + files.index.strftime('%Y-%m-%d') - elif tag in ['def', 'now']: + if tag in ['def', 'now']: files = methods.gfz.kp_ap_cp_list_files(name, tag, inst_id, data_path, format_str=format_str) else: @@ -389,21 +250,10 @@ def download(date_array, tag, inst_id, data_path, mock_download_dir=None): -------- Only able to download current forecast data, not archived forecasts. - The '' tag has been deprecated and downloads are no longer supported by - the source. Use 'dep' or 'now' instead. - """ # Download standard Kp data - if tag == '': - # This data type has been deprecated due to changes at GFZ - warnings.warn("".join(["Changes at the GFZ database have led to this", - " data type being deprecated. Switch to using", - " 'def' for definitive Kp or 'now' for Kp ", - "nowcasts from GFZ. Downloads are no longer ", - "supported by GFZ."]), - DeprecationWarning, stacklevel=2) - elif tag in ['def', 'now']: + if tag in ['def', 'now']: methods.gfz.kp_ap_cp_download(platform, name, date_array, tag, inst_id, data_path, mock_download_dir) elif tag == 'forecast': diff --git a/pysatSpaceWeather/instruments/sw_mgii.py b/pysatSpaceWeather/instruments/sw_mgii.py index c624a196..2dc766ef 100644 --- a/pysatSpaceWeather/instruments/sw_mgii.py +++ b/pysatSpaceWeather/instruments/sw_mgii.py @@ -55,7 +55,7 @@ # Dict keyed by inst_id that lists supported tags and a good day of test data # generate todays date to support loading forecast data -now = dt.datetime.utcnow() +now = dt.datetime.now(tz=dt.timezone.utc) today = dt.datetime(now.year, now.month, now.day) tomorrow = today + pds.DateOffset(days=1) diff --git a/pysatSpaceWeather/instruments/sw_polarcap.py b/pysatSpaceWeather/instruments/sw_polarcap.py index 9a85df01..7739ff1c 100644 --- a/pysatSpaceWeather/instruments/sw_polarcap.py +++ b/pysatSpaceWeather/instruments/sw_polarcap.py @@ -69,7 +69,7 @@ inst_ids = {'': list(tags.keys())} # Generate todays date to support loading forecast data -now = dt.datetime.utcnow() +now = dt.datetime.now(tz=dt.timezone.utc) today = dt.datetime(now.year, now.month, now.day) tomorrow = today + dt.timedelta(days=1) diff --git a/pysatSpaceWeather/instruments/sw_sbfield.py b/pysatSpaceWeather/instruments/sw_sbfield.py index f4207f41..352dcbf8 100644 --- a/pysatSpaceWeather/instruments/sw_sbfield.py +++ b/pysatSpaceWeather/instruments/sw_sbfield.py @@ -52,7 +52,7 @@ # Dict keyed by inst_id that lists supported tags and a good day of test data # generate todays date to support loading forecast data -now = dt.datetime.utcnow() +now = dt.datetime.now(tz=dt.timezone.utc) today = dt.datetime(now.year, now.month, now.day) tomorrow = today + pds.DateOffset(days=1) diff --git a/pysatSpaceWeather/instruments/sw_ssn.py b/pysatSpaceWeather/instruments/sw_ssn.py index 77eafba2..81f6e9ca 100644 --- a/pysatSpaceWeather/instruments/sw_ssn.py +++ b/pysatSpaceWeather/instruments/sw_ssn.py @@ -55,7 +55,7 @@ # Dict keyed by inst_id that lists supported tags and a good day of test data # generate todays date to support loading forecast data -now = dt.datetime.utcnow() +now = dt.datetime.now(tz=dt.timezone.utc) today = dt.datetime(now.year, now.month, now.day) tomorrow = today + pds.DateOffset(days=1) diff --git a/pysatSpaceWeather/instruments/sw_stormprob.py b/pysatSpaceWeather/instruments/sw_stormprob.py index 1dea8079..aec1fde0 100644 --- a/pysatSpaceWeather/instruments/sw_stormprob.py +++ b/pysatSpaceWeather/instruments/sw_stormprob.py @@ -70,7 +70,7 @@ inst_ids = {'': list(tags.keys())} # Generate todays date to support loading forecast data -now = dt.datetime.utcnow() +now = dt.datetime.now(tz=dt.timezone.utc) today = dt.datetime(now.year, now.month, now.day) tomorrow = today + dt.timedelta(days=1) diff --git a/pysatSpaceWeather/tests/test_data/TYKW-NoRP_dailyflux.txt b/pysatSpaceWeather/tests/test_data/TYKW-NoRP_dailyflux.txt new file mode 100644 index 00000000..789b026d --- /dev/null +++ b/pysatSpaceWeather/tests/test_data/TYKW-NoRP_dailyflux.txt @@ -0,0 +1,7 @@ +TYKW-NoRP solar daily flux (1951-11-02 -- 1951-11-06) +Date,1 GHz,2 GHz,3.75 GHz,9.4 GHz +"1951-11-02",NaN,NaN,NaN,NaN +"1951-11-03",NaN,NaN,NaN,NaN +"1951-11-04",NaN,NaN,NaN,NaN +"1951-11-05",NaN,NaN,NaN,NaN +"1951-11-06",NaN,NaN,115.000,NaN diff --git a/pysatSpaceWeather/tests/test_instruments.py b/pysatSpaceWeather/tests/test_instruments.py index ac685c45..a1c03550 100644 --- a/pysatSpaceWeather/tests/test_instruments.py +++ b/pysatSpaceWeather/tests/test_instruments.py @@ -21,7 +21,6 @@ import pytest import sys import tempfile -import warnings # Make sure to import your instrument library here import pysatSpaceWeather @@ -49,52 +48,6 @@ class TestInstruments(clslib.InstLibTests): """ -class TestLocalDeprecation(object): - """Unit tests for local instrument deprecation warnings.""" - - def setup_method(self): - """Set up the unit test environment for each method.""" - - warnings.simplefilter("always", DeprecationWarning) - self.in_kwargs = [ - {"inst_module": pysatSpaceWeather.instruments.sw_kp, 'tag': ''}] - self.ref_time = dt.datetime(2001, 1, 1) - self.warn_msgs = [] - self.war = "" - return - - def teardown_method(self): - """Clean up the unit test environment after each method.""" - - del self.in_kwargs, self.ref_time, self.warn_msgs, self.war - return - - def eval_warnings(self): - """Evaluate the number and message of the raised warnings.""" - - # Ensure the minimum number of warnings were raised. - assert len(self.war) >= len(self.warn_msgs) - - # Test the warning messages, ensuring each attribute is present. - testing.eval_warnings(self.war, self.warn_msgs) - return - - def test_sw_kp_default_tag_deprecation(self): - """Test the deprecation of the '' tag for the sw_kp Instrument.""" - - with warnings.catch_warnings(record=True) as self.war: - pysat.Instrument(**self.in_kwargs[0]) - - self.warn_msgs = ["".join(["Changes at the GFZ database have led to ", - "this data type being deprecated. Switch ", - "to using 'def' for definitive Kp or ", - "'now' for Kp nowcasts from GFZ."])] - - # Evaluate the warning output - self.eval_warnings() - return - - class TestSWInstrumentLogging(object): """Test logging messages raised under instrument-specific conditions.""" diff --git a/pysatSpaceWeather/tests/test_methods_ace.py b/pysatSpaceWeather/tests/test_methods_ace.py index 41be5173..ef92e287 100644 --- a/pysatSpaceWeather/tests/test_methods_ace.py +++ b/pysatSpaceWeather/tests/test_methods_ace.py @@ -8,7 +8,6 @@ # ---------------------------------------------------------------------------- """Integration and unit test suite for ACE methods.""" -from packaging.version import Version import pytest import pysat @@ -61,21 +60,12 @@ def test_clean_bad_inst(self): return -@pytest.mark.skipif(Version(pysat.__version__) < Version('3.0.2'), - reason="Requires time routine available in pysat 3.0.2+") class TestACESWEPAMMethods(object): """Test class for ACE SWEPAM methods.""" def setup_method(self): """Create a clean testing setup.""" - - # TODO(#131): Remove version check after min version supported is 3.2.0 - inst_kwargs = dict() - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - inst_kwargs['use_header'] = True - - self.testInst = pysat.Instrument('pysat', 'testing', **inst_kwargs) + self.testInst = pysat.Instrument('pysat', 'testing') self.testInst.load(date=self.testInst.inst_module._test_dates['']['']) self.omni_keys = ['sw_proton_dens_norm', 'sw_ion_temp_norm'] diff --git a/pysatSpaceWeather/tests/test_methods_f107.py b/pysatSpaceWeather/tests/test_methods_f107.py index 30bc16e5..812e95fd 100644 --- a/pysatSpaceWeather/tests/test_methods_f107.py +++ b/pysatSpaceWeather/tests/test_methods_f107.py @@ -10,7 +10,6 @@ import datetime as dt import numpy as np -from packaging.version import Version import pandas as pds import pysat @@ -142,12 +141,6 @@ def setup_method(self): for tag in sw_f107.tags.keys()} self.combine_times = {"start": self.test_day - dt.timedelta(days=30), "stop": self.test_day + dt.timedelta(days=3)} - self.load_kwargs = {} - - # TODO(#131): Remove version check after min version supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - self.load_kwargs['use_header'] = True return @@ -155,7 +148,6 @@ def teardown_method(self): """Clean up previous testing setup.""" pysat.params.data['data_dirs'] = self.saved_path del self.combine_inst, self.test_day, self.combine_times - del self.load_kwargs return def test_combine_f107_none(self): @@ -203,14 +195,34 @@ def test_combine_f107_no_data(self): del combo_in, f107_inst return + def test_combine_f107_no_data_no_files(self): + """Test `combine_f107` without data or files for the specified times.""" + + # Unset the file list for the instrument + self.combine_inst['forecast'].files.files = pds.Series([]) + + # Set the function inputs + combo_in = {kk: self.combine_inst['forecast'] for kk in + ['standard_inst', 'forecast_inst']} + combo_in['start'] = dt.datetime(2014, 2, 19) + combo_in['stop'] = dt.datetime(2014, 2, 24) + + # Run the method + f107_inst = mm_f107.combine_f107(**combo_in) + + # Test the output + assert f107_inst.data.isnull().all()["f107"] + + del combo_in, f107_inst + return + def test_combine_f107_inst_time(self): """Test `combine_f107` with times provided through datasets.""" self.combine_inst['historic'].load( date=self.combine_inst['historic'].lasp_stime, - end_date=self.combine_times['start'], **self.load_kwargs) - self.combine_inst['forecast'].load(date=self.test_day, - **self.load_kwargs) + end_date=self.combine_times['start']) + self.combine_inst['forecast'].load(date=self.test_day) f107_inst = mm_f107.combine_f107(self.combine_inst['historic'], self.combine_inst['forecast']) diff --git a/pysatSpaceWeather/tests/test_methods_general.py b/pysatSpaceWeather/tests/test_methods_general.py index bc7db985..eab60b20 100644 --- a/pysatSpaceWeather/tests/test_methods_general.py +++ b/pysatSpaceWeather/tests/test_methods_general.py @@ -9,26 +9,18 @@ """Integration and unit test suite for ACE methods.""" import numpy as np -from packaging.version import Version + import pysat -import pytest from pysatSpaceWeather.instruments.methods import general -@pytest.mark.skipif(Version(pysat.__version__) < Version('3.0.2'), - reason="Test setup requires pysat 3.0.2+") class TestGeneralMethods(object): """Test class for general methods.""" def setup_method(self): """Create a clean testing setup.""" - # TODO(#131): Remove version check after min version supported is 3.2.0 - inst_kwargs = dict() - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - inst_kwargs['use_header'] = True - self.testInst = pysat.Instrument('pysat', 'testing', **inst_kwargs) + self.testInst = pysat.Instrument('pysat', 'testing') self.testInst.load(date=self.testInst.inst_module._test_dates['']['']) return diff --git a/pysatSpaceWeather/tests/test_methods_gfz.py b/pysatSpaceWeather/tests/test_methods_gfz.py index c3954336..dfdb8fbd 100644 --- a/pysatSpaceWeather/tests/test_methods_gfz.py +++ b/pysatSpaceWeather/tests/test_methods_gfz.py @@ -40,7 +40,8 @@ def test_kp_ap_cp_download_bad_inst(self): """Test the download doesn't work for an incorrect Instrument.""" with pytest.raises(ValueError) as verr: gfz.kp_ap_cp_download('platform', 'name', 'tag', 'inst_id', - [dt.datetime.utcnow()], 'data/path') + [dt.datetime.now(tz=dt.timezone.utc)], + 'data/path') assert str(verr).find('Unknown Instrument module') >= 0 return diff --git a/pysatSpaceWeather/tests/test_methods_kp.py b/pysatSpaceWeather/tests/test_methods_kp.py index 9f10e3c1..f6d2165d 100644 --- a/pysatSpaceWeather/tests/test_methods_kp.py +++ b/pysatSpaceWeather/tests/test_methods_kp.py @@ -10,7 +10,6 @@ import datetime as dt import numpy as np -from packaging.version import Version import pandas as pds import pysat @@ -28,22 +27,12 @@ def setup_method(self): """Create a clean testing setup.""" self.test_function = kp_ap.initialize_kp_metadata - inst_dict = {'num_samples': 12} - # TODO(#131): Remove version check after min version supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - inst_dict['use_header'] = True - # Load a test instrument + inst_dict = {'num_samples': 12} self.testInst = pysat.Instrument('pysat', 'testing', **inst_dict) test_time = pysat.instruments.pysat_testing._test_dates[''][''] load_kwargs = {'date': test_time} - # TODO(#131): Remove version check after min version supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - load_kwargs['use_header'] = True - self.testInst.load(**load_kwargs) # Create Kp data @@ -132,22 +121,12 @@ def setup_method(self): """Create a clean testing setup.""" self.test_function = kp_ap.initialize_ap_metadata - inst_dict = {'num_samples': 12} - # TODO(#131): Remove version check after min version supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - inst_dict['use_header'] = True - # Load a test instrument + inst_dict = {'num_samples': 12} self.testInst = pysat.Instrument('pysat', 'testing', **inst_dict) test_time = pysat.instruments.pysat_testing._test_dates[''][''] load_kwargs = {'date': test_time} - # TODO(#131): Remove version check after min version supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - load_kwargs['use_header'] = True - self.testInst.load(**load_kwargs) # Create Kp data @@ -187,22 +166,12 @@ def setup_method(self): """Create a clean testing setup.""" self.test_function = kp_ap.initialize_bartel_metadata - inst_dict = {'num_samples': 12} - # TODO(#131): Remove version check after min version supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - inst_dict['use_header'] = True - # Load a test instrument + inst_dict = {'num_samples': 12} self.testInst = pysat.Instrument('pysat', 'testing', **inst_dict) test_time = pysat.instruments.pysat_testing._test_dates[''][''] load_kwargs = {'date': test_time} - # TODO(#131): Remove version check after min version supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - load_kwargs['use_header'] = True - self.testInst.load(**load_kwargs) # Create Kp data @@ -291,22 +260,13 @@ class TestSWKp(object): def setup_method(self): """Create a clean testing setup.""" - inst_dict = {'num_samples': 12} - # TODO(#131): Remove version check after min version supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - inst_dict['use_header'] = True # Load a test instrument + inst_dict = {'num_samples': 12} self.testInst = pysat.Instrument('pysat', 'testing', **inst_dict) test_time = pysat.instruments.pysat_testing._test_dates[''][''] load_kwargs = {'date': test_time} - # TODO(#131): Remove version check after min version supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - load_kwargs['use_header'] = True - self.testInst.load(**load_kwargs) # Create Kp data @@ -395,7 +355,7 @@ def test_convert_ap_to_kp_middle(self): """Test conversion of ap to Kp where ap is not an exact Kp value.""" kp_ap.convert_3hr_kp_to_ap(self.testInst) - new_val = self.testInst['3hr_ap'][8] + 1 + new_val = self.testInst['3hr_ap'].iloc[8] + 1 self.testInst.data.at[self.testInst.index[8], '3hr_ap'] = new_val kp_out, kp_meta = kp_ap.convert_ap_to_kp(self.testInst['3hr_ap']) @@ -508,10 +468,6 @@ def setup_method(self): # Set combination testing input test_day = dt.datetime(2019, 3, 18) idict = {'inst_module': sw_kp, 'update_files': True} - # TODO(#131): Remove version check after min version supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - idict['use_header'] = True self.combine = {"standard_inst": pysat.Instrument(tag="def", **idict), "recent_inst": pysat.Instrument(tag="recent", **idict), @@ -521,10 +477,6 @@ def setup_method(self): "stop": test_day + dt.timedelta(days=3), "fill_val": -1} self.load_kwargs = {"date": test_day} - # TODO(#131): Remove version check after min version supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - self.load_kwargs['use_header'] = True return @@ -591,6 +543,27 @@ def test_combine_kp_no_data(self): del combo_in, kp_inst return + def test_combine_kp_no_data_no_files(self): + """Test combine_kp without data or files for the specified times.""" + + # Unset the file list for the instrument + self.combine['forecast_inst'].files.files = pds.Series([]) + + # Set the function inputs + combo_in = {kk: self.combine['forecast_inst'] for kk in + ['standard_inst', 'recent_inst', 'forecast_inst']} + combo_in['start'] = dt.datetime(2014, 2, 19) + combo_in['stop'] = dt.datetime(2014, 2, 24) + + # Run the method + kp_inst = kp_ap.combine_kp(**combo_in) + + # Test the output + assert kp_inst.data.isnull().all()["Kp"] + + del combo_in, kp_inst + return + def test_combine_kp_inst_time(self): """Test combine_kp when times are provided through the instruments.""" @@ -695,20 +668,10 @@ class TestSWAp(object): def setup_method(self): """Create a clean testing setup.""" inst_dict = {'num_samples': 10} - # TODO(#131): Remove version check after min version supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - inst_dict['use_header'] = True - self.test_inst = pysat.Instrument('pysat', 'testing', **inst_dict) test_time = pysat.instruments.pysat_testing._test_dates[''][''] load_kwargs = {'date': test_time} - # TODO(#131): Remove version check after min version supported is 3.2.0 - if all([Version(pysat.__version__) > Version('3.0.1'), - Version(pysat.__version__) < Version('3.2.0')]): - load_kwargs['use_header'] = True - self.test_inst.load(**load_kwargs) # Create 3 hr Ap data diff --git a/pysatSpaceWeather/tests/test_methods_norp.py b/pysatSpaceWeather/tests/test_methods_norp.py new file mode 100644 index 00000000..e042d047 --- /dev/null +++ b/pysatSpaceWeather/tests/test_methods_norp.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python +# Full license can be found in License.md +# Full author list can be found in .zenodo.json file +# DOI:10.5281/zenodo.3986138 +# ---------------------------------------------------------------------------- +"""Integration and unit test suite for NoRP methods.""" + +import pytest + +from pysatSpaceWeather.instruments.methods import norp as mm_norp + + +class TestNoRPMethods(object): + """Test class for NoRP methods.""" + + def setup_method(self): + """Create a clean testing setup.""" + self.out = None + return + + def teardown_method(self): + """Clean up previous testing setup.""" + del self.out + return + + def test_acknowledgements(self): + """Test the NoRP acknowledgements.""" + self.out = mm_norp.acknowledgements() + assert self.out.find('NoRP') >= 0 + return + + @pytest.mark.parametrize('name,tag', [('rf', 'daily')]) + def test_references(self, name, tag): + """Test the references for a NoRP instrument. + + Parameters + ---------- + name : str + Instrument name + tag : str + Instrument tag + + """ + self.out = mm_norp.references(name, tag) + assert self.out.find('Toyokawa Observatory') > 0 + return + + def test_references_bad_name(self): + """Test the references raise an informative error for bad instrument.""" + with pytest.raises(KeyError) as kerr: + mm_norp.references('ace', 'sis') + + assert str(kerr.value).find('ace') >= 0, \ + "Unknown KeyError message: {:}".format(kerr.value) + return