From b4a9166e8b5325d1607c29950cbc1c029914602d Mon Sep 17 00:00:00 2001 From: Marc Pound <22331890+mpound@users.noreply.github.com> Date: Fri, 1 Dec 2023 12:52:45 -0500 Subject: [PATCH 01/37] new core file, veldef function and test --- src/dysh/fits/core.py | 78 ++++++++++++++++++++++++++++++++ src/dysh/fits/tests/test_core.py | 48 ++++++++++++++++++++ 2 files changed, 126 insertions(+) create mode 100644 src/dysh/fits/core.py create mode 100644 src/dysh/fits/tests/test_core.py diff --git a/src/dysh/fits/core.py b/src/dysh/fits/core.py new file mode 100644 index 00000000..9202f917 --- /dev/null +++ b/src/dysh/fits/core.py @@ -0,0 +1,78 @@ +""" +Core functions for FITS/SDFITS +""" + +# Velocity frame conventions, partially stolen from pyspeckit. +# See also Section6.2.5.2 of the GBT observer's guide https://www.gb.nrao.edu/scienceDocs/GBTog.pdf + +frame_dict = { + "VLSR": "LSRK", + "VRAD": "LSRK", + "VELO": "LSRK", + "VOPT": "LSRK", + "LSRD": "LSRD", + "LSRK": "LSRK", + "-LSR": "LSRK", + "-HEL": "heliocentric", + "-BAR": "barycentric", + "BAR": "barycentric", + "BARY": "barycentric", + "-OBS": "obs", + "VHEL": "heliocentric", + "VGEO": "topocentric", + "TOPO": "topocentric", + "VREST": "rest", + "Z": "rest", + "FREQ": "rest", + "WAV": "rest", + "WAVE": "rest", + "CMB": "cmb", + "GALAC": "galactic", + "GALA": "galactic", + "ALAC": "galactic", +} + +# Dictionary to convert from FITS velocity convention to specutils string. +# At GBT, VELO was written by sdfits filler for some unknown amount of +# time instead of RELA, so allow for it here +vconv_dict = { + "OPTI": "doppler_optical", + "RADI": "doppler_radio", + "RELA": "doppler_relativistic", + "VELO": "doppler_relativistic", +} + + +def decode_veldef(veldef): + """ + Parse the SDFITS VELDEF value into its two components, the velocity + definition and velocity reference frame. This value must contain + no more than 8 characters where the first 4 characters describe the velocity + definition and the last 4 characters describe the reference frame. + + Parameters + ---------- + veldef : str + The definition string, consisting of a velocity convention and a velocity frame, e.g., 'OPTI-LSR' + + Returns + ------- + A str tuple of velocity convention and velocity frame type, e.g., ('doppler_radio', 'LSRK') + """ + if len(veldef) > 8: + # in future, possibly relax this requirement + # if string not coming from FITS + raise ValueError(f"VELDEF string {veldef} must be no more than 8 characters.") + vconv = veldef[:4] + try: + velocity_convention = vconv_dict[vconv] + except KeyError: + raise KeyError(f"Velocity convention {vconv} not recognized.") + + frame = veldef[4:] + try: + frame_type = frame_dict[frame] + except KeyError: + raise KeyError(f"Velocity frame {frame} not recognized.") + + return velocity_convention, frame_type diff --git a/src/dysh/fits/tests/test_core.py b/src/dysh/fits/tests/test_core.py new file mode 100644 index 00000000..bfb9060f --- /dev/null +++ b/src/dysh/fits/tests/test_core.py @@ -0,0 +1,48 @@ +from dysh.fits import decode_veldef + + +class TestCore: + """Test dysh.fits core functions""" + + def test_veldef(self): + # first make sure we get correct answers for normal inputs + inputs = [ + "RADILSRK", + "RADI-LSR", + "RADILSRD", + "OPTICMB", + "VELO-BAR", + "OPTIBARY", + "RELATOPO", + "RADIGALA", + "OPTI-HEL", + ] + outputs = [ + ("doppler_radio", "LSRK"), + ("doppler_radio", "LSRK"), + ("doppler_radio", "LSRD"), + ("doppler_optical", "cmb"), + ("doppler_relativistic", "barycentric"), + ("doppler_optical", "barycentric"), + ("doppler_relativistic", "topocentric"), + ("doppler_radio", "galactic"), + ("doppler_optical", "heliocentric"), + ] + for i, j in zip(inputs, outputs): + assert decode_veldef(i) == j + + # Now test that bad input raises an exception + try: + decode_veldef("This is more than 8 chars") + except ValueError: + assert True + try: + # frame fails + decode_veldef("OPTI-LRS") + except KeyError: + assert True + try: + # convention fails + decode_veldef("MAXILSRK") + except KeyError: + assert True From 1f3b836b3cf74b24f74a79d097a42112f553082e Mon Sep 17 00:00:00 2001 From: Marc Pound <22331890+mpound@users.noreply.github.com> Date: Fri, 1 Dec 2023 13:23:29 -0500 Subject: [PATCH 02/37] rename because pytest doesn't like files with the same name --- src/dysh/fits/tests/test_fits_core.py | 48 ++++++++++++++++ src/dysh/spectra/tests/test_spectra_core.py | 62 +++++++++++++++++++++ 2 files changed, 110 insertions(+) create mode 100644 src/dysh/fits/tests/test_fits_core.py create mode 100644 src/dysh/spectra/tests/test_spectra_core.py diff --git a/src/dysh/fits/tests/test_fits_core.py b/src/dysh/fits/tests/test_fits_core.py new file mode 100644 index 00000000..bfb9060f --- /dev/null +++ b/src/dysh/fits/tests/test_fits_core.py @@ -0,0 +1,48 @@ +from dysh.fits import decode_veldef + + +class TestCore: + """Test dysh.fits core functions""" + + def test_veldef(self): + # first make sure we get correct answers for normal inputs + inputs = [ + "RADILSRK", + "RADI-LSR", + "RADILSRD", + "OPTICMB", + "VELO-BAR", + "OPTIBARY", + "RELATOPO", + "RADIGALA", + "OPTI-HEL", + ] + outputs = [ + ("doppler_radio", "LSRK"), + ("doppler_radio", "LSRK"), + ("doppler_radio", "LSRD"), + ("doppler_optical", "cmb"), + ("doppler_relativistic", "barycentric"), + ("doppler_optical", "barycentric"), + ("doppler_relativistic", "topocentric"), + ("doppler_radio", "galactic"), + ("doppler_optical", "heliocentric"), + ] + for i, j in zip(inputs, outputs): + assert decode_veldef(i) == j + + # Now test that bad input raises an exception + try: + decode_veldef("This is more than 8 chars") + except ValueError: + assert True + try: + # frame fails + decode_veldef("OPTI-LRS") + except KeyError: + assert True + try: + # convention fails + decode_veldef("MAXILSRK") + except KeyError: + assert True diff --git a/src/dysh/spectra/tests/test_spectra_core.py b/src/dysh/spectra/tests/test_spectra_core.py new file mode 100644 index 00000000..5882cd64 --- /dev/null +++ b/src/dysh/spectra/tests/test_spectra_core.py @@ -0,0 +1,62 @@ +import os + +import numpy as np +import pytest +from astropy.io import fits + +from dysh import util +from dysh.spectra import core + +LOCALDIR = os.path.dirname(os.path.realpath(__file__)) + + +class TestMeanTsys: + """ + Tests for `dysh.spectra.core.dcmeantsys` function. + """ + + def setup_method(self): + self.root_dir = util.get_project_root() + self.data_dir = f"{self.root_dir}/testdata" + + def test_tsys(self): + expected = np.array([17.24000345, 17.17140405, 17.15663698]) + + path_to_file = f"{self.data_dir}/TGBT21A_501_11" + filename = "TGBT21A_501_11_ifnum_0_int_0-2.fits" + sdf_file = f"{path_to_file}/{filename}" + + # Open and select data. + hdu_sdf = fits.open(sdf_file) + table = hdu_sdf[1].data + table_pl0 = table[table["PLNUM"] == 0] + table_pl0_off = table_pl0[table_pl0["SCAN"] == 153] + tcal = table_pl0_off["TCAL"][0] + tsys_dysh = np.empty(table_pl0_off["DATA"].shape[0] // 2, dtype=float) + for i in range(len(tsys_dysh)): + tsys_dysh[i] = core.mean_tsys( + calon=table_pl0_off["DATA"][1::2][i], caloff=table_pl0_off["DATA"][0::2][i], tcal=tcal + ) + # Compare. + assert tsys_dysh == pytest.approx(expected) + + def test_tsys2(self): + path_to_file = f"{self.data_dir}/TGBT21A_501_11" + filein = f"{path_to_file}/TGBT21A_501_11.raw.vegas.fits" + gbtidl_file = f"{path_to_file}/TGBT21A_501_11_getps_scan_152_intnum_0_ifnum_0_plnum_0.fits" + + hdu = fits.open(filein) + table = hdu[1].data + mask = (table["SCAN"] == 153) & (table["IFNUM"] == 0) & (table["PLNUM"] == 0) + mask_on = table[mask]["CAL"] == "T" + mask_off = table[mask]["CAL"] == "F" + table_on = table[mask][mask_on] + table_off = table[mask][mask_off] + nchan = table["DATA"].shape[1] + tsys_dysh = core.mean_tsys(table_on["DATA"][0], table_off["DATA"][0], table_on["TCAL"][0]) + + hdu = fits.open(gbtidl_file) + gbtidl_table = hdu[1].data + gbtidl_tsys = gbtidl_table["TSYS"] + + assert tsys_dysh == gbtidl_tsys From e90d6fed21374443a579b7dac51a90240e342df5 Mon Sep 17 00:00:00 2001 From: Marc Pound <22331890+mpound@users.noreply.github.com> Date: Fri, 1 Dec 2023 13:23:53 -0500 Subject: [PATCH 03/37] add core --- src/dysh/fits/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/dysh/fits/__init__.py b/src/dysh/fits/__init__.py index b3597907..4b1b1ab4 100644 --- a/src/dysh/fits/__init__.py +++ b/src/dysh/fits/__init__.py @@ -1,4 +1,5 @@ """Classes and functions for importing SDFITS files""" +from dysh.fits.core import * # noqa from dysh.fits.gb20mfitsload import GB20MFITSLoad # noqa from dysh.fits.gbtfitsload import GBTFITSLoad # noqa from dysh.fits.sdfitsload import SDFITSLoad # noqa From f931368b2dab709c82c0d02100c23e5919501905 Mon Sep 17 00:00:00 2001 From: Marc Pound <22331890+mpound@users.noreply.github.com> Date: Mon, 4 Dec 2023 09:46:12 -0500 Subject: [PATCH 04/37] delete old names --- src/dysh/fits/tests/test_core.py | 48 ---------------------- src/dysh/spectra/tests/test_core.py | 62 ----------------------------- 2 files changed, 110 deletions(-) delete mode 100644 src/dysh/fits/tests/test_core.py delete mode 100644 src/dysh/spectra/tests/test_core.py diff --git a/src/dysh/fits/tests/test_core.py b/src/dysh/fits/tests/test_core.py deleted file mode 100644 index bfb9060f..00000000 --- a/src/dysh/fits/tests/test_core.py +++ /dev/null @@ -1,48 +0,0 @@ -from dysh.fits import decode_veldef - - -class TestCore: - """Test dysh.fits core functions""" - - def test_veldef(self): - # first make sure we get correct answers for normal inputs - inputs = [ - "RADILSRK", - "RADI-LSR", - "RADILSRD", - "OPTICMB", - "VELO-BAR", - "OPTIBARY", - "RELATOPO", - "RADIGALA", - "OPTI-HEL", - ] - outputs = [ - ("doppler_radio", "LSRK"), - ("doppler_radio", "LSRK"), - ("doppler_radio", "LSRD"), - ("doppler_optical", "cmb"), - ("doppler_relativistic", "barycentric"), - ("doppler_optical", "barycentric"), - ("doppler_relativistic", "topocentric"), - ("doppler_radio", "galactic"), - ("doppler_optical", "heliocentric"), - ] - for i, j in zip(inputs, outputs): - assert decode_veldef(i) == j - - # Now test that bad input raises an exception - try: - decode_veldef("This is more than 8 chars") - except ValueError: - assert True - try: - # frame fails - decode_veldef("OPTI-LRS") - except KeyError: - assert True - try: - # convention fails - decode_veldef("MAXILSRK") - except KeyError: - assert True diff --git a/src/dysh/spectra/tests/test_core.py b/src/dysh/spectra/tests/test_core.py deleted file mode 100644 index 5882cd64..00000000 --- a/src/dysh/spectra/tests/test_core.py +++ /dev/null @@ -1,62 +0,0 @@ -import os - -import numpy as np -import pytest -from astropy.io import fits - -from dysh import util -from dysh.spectra import core - -LOCALDIR = os.path.dirname(os.path.realpath(__file__)) - - -class TestMeanTsys: - """ - Tests for `dysh.spectra.core.dcmeantsys` function. - """ - - def setup_method(self): - self.root_dir = util.get_project_root() - self.data_dir = f"{self.root_dir}/testdata" - - def test_tsys(self): - expected = np.array([17.24000345, 17.17140405, 17.15663698]) - - path_to_file = f"{self.data_dir}/TGBT21A_501_11" - filename = "TGBT21A_501_11_ifnum_0_int_0-2.fits" - sdf_file = f"{path_to_file}/{filename}" - - # Open and select data. - hdu_sdf = fits.open(sdf_file) - table = hdu_sdf[1].data - table_pl0 = table[table["PLNUM"] == 0] - table_pl0_off = table_pl0[table_pl0["SCAN"] == 153] - tcal = table_pl0_off["TCAL"][0] - tsys_dysh = np.empty(table_pl0_off["DATA"].shape[0] // 2, dtype=float) - for i in range(len(tsys_dysh)): - tsys_dysh[i] = core.mean_tsys( - calon=table_pl0_off["DATA"][1::2][i], caloff=table_pl0_off["DATA"][0::2][i], tcal=tcal - ) - # Compare. - assert tsys_dysh == pytest.approx(expected) - - def test_tsys2(self): - path_to_file = f"{self.data_dir}/TGBT21A_501_11" - filein = f"{path_to_file}/TGBT21A_501_11.raw.vegas.fits" - gbtidl_file = f"{path_to_file}/TGBT21A_501_11_getps_scan_152_intnum_0_ifnum_0_plnum_0.fits" - - hdu = fits.open(filein) - table = hdu[1].data - mask = (table["SCAN"] == 153) & (table["IFNUM"] == 0) & (table["PLNUM"] == 0) - mask_on = table[mask]["CAL"] == "T" - mask_off = table[mask]["CAL"] == "F" - table_on = table[mask][mask_on] - table_off = table[mask][mask_off] - nchan = table["DATA"].shape[1] - tsys_dysh = core.mean_tsys(table_on["DATA"][0], table_off["DATA"][0], table_on["TCAL"][0]) - - hdu = fits.open(gbtidl_file) - gbtidl_table = hdu[1].data - gbtidl_tsys = gbtidl_table["TSYS"] - - assert tsys_dysh == gbtidl_tsys From 24ece765faad71447513240a4d642407d75af1cc Mon Sep 17 00:00:00 2001 From: Thomas Chamberlin Date: Wed, 29 Nov 2023 11:06:37 -0500 Subject: [PATCH 05/37] Fix dependencies --- pyproject.toml | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 858c164f..354af00a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,11 +29,8 @@ dependencies = [ "pandas", "scipy", "specutils", - "sphinx", - "myst-parser", - "sphinx-inline-tabs", "ipython", - "wget" + "wget", ] [project.optional-dependencies] @@ -46,8 +43,14 @@ dev = [ "sphinx-autobuild", "sphinx-rtd-theme", "sphinxcontrib-mermaid", - "numpydoc" + "numpydoc", + "sphinx-inline-tabs", +] +nb = [ + "jupyter", + "jupyterlab", ] +all = ["dysh[dev]", "dysh[nb]"] [project.urls] Documentation = "https://github.com/GreenBankObservatory/dysh#readme" From a2340c2772dba2acdcdac24e2873ffad3be6f552 Mon Sep 17 00:00:00 2001 From: Thomas Chamberlin Date: Wed, 29 Nov 2023 10:53:00 -0500 Subject: [PATCH 06/37] Remove GUI; will move to separate repo --- docs/source/gui/build/executable.rst | 60 ------ docs/source/gui/build/index.rst | 10 - docs/source/gui/index.rst | 10 - docs/source/index.rst | 1 - gui/app.py | 305 --------------------------- gui/static/favicon.ico | Bin 67646 -> 0 bytes gui/tests/.gitkeep | 0 gui/util/__init__.py | 0 gui/util/core.py | 31 --- gui/util/dataload.py | 158 -------------- gui/widgets/QIPython.py | 52 ----- gui/widgets/__init__.py | 0 gui/widgets/graphs.py | 129 ----------- gui/widgets/layouts.py | 76 ------- gui/widgets/splash.py | 64 ------ gui/widgets/tables.py | 88 -------- 16 files changed, 984 deletions(-) delete mode 100644 docs/source/gui/build/executable.rst delete mode 100644 docs/source/gui/build/index.rst delete mode 100644 docs/source/gui/index.rst delete mode 100644 gui/app.py delete mode 100644 gui/static/favicon.ico delete mode 100644 gui/tests/.gitkeep delete mode 100644 gui/util/__init__.py delete mode 100644 gui/util/core.py delete mode 100644 gui/util/dataload.py delete mode 100644 gui/widgets/QIPython.py delete mode 100644 gui/widgets/__init__.py delete mode 100644 gui/widgets/graphs.py delete mode 100644 gui/widgets/layouts.py delete mode 100644 gui/widgets/splash.py delete mode 100644 gui/widgets/tables.py diff --git a/docs/source/gui/build/executable.rst b/docs/source/gui/build/executable.rst deleted file mode 100644 index d68aa4da..00000000 --- a/docs/source/gui/build/executable.rst +++ /dev/null @@ -1,60 +0,0 @@ -*********************** -Building the Executable -*********************** - -PyInstaller -=========== - -To build with PyInstaller - -.. code:: bash - - (dysh) $ cd gui - (dysh) $ pyinstaller app.py - -Building From Scratch -##################### - -If you somehow lose or clear the ``dysh.spec`` file, you need to run a lot more flags through `PyInstaller` - -.. code:: bash - - (dysh) $ cd gui - (dysh) $ pyinstaller --onefile --noconsole --name "dysh" --icon=./static/favicon.ico --clean -y --collect-all asdf --collect-all asdf_standard --collect-all asdf_transform_schemas --collect-all packaging --collect-all pkg_resources --collect-all astropy --collect-all lz4 --recursive-copy-metadata asdf --recursive-copy-metadata astropy app.py - -Troubleshooting -=============== - -Windows -####### - -If you get the following error: - -.. code:: bash - - OSError: [WinError 225] Operation did not complete successfully because the file contains a virus or potentially unwanted software. - ... - win32ctypes.pywin32.pywintypes.error: (225, 'BeginUpdateResourceW', 'Operation did not complete successfully because the file contains a virus or potentially unwanted software.') - -This is the antivirus program thinking you're getting a virus. To circumvent this: - -1. Open the Windows Security app - -2. Navigate to "Virus & threat protection" - -3. Click "Manage settings" under "Virus & threat protection settings" - -4. Turn off "Real-time protection" - -5. Run the `PyInstaller` build command - -6. Turn "Real-time protection" back on - -What You Can Ignore -################### - -You can safely ignore the following messages: - -* ``WARNING: Library {LIBNAME} required via ctypes not found`` - - * See https://github.com/pyinstaller/pyinstaller/issues/1403 diff --git a/docs/source/gui/build/index.rst b/docs/source/gui/build/index.rst deleted file mode 100644 index a0e97b1e..00000000 --- a/docs/source/gui/build/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -**************** -Building the GUI -**************** - -Stuff about the GUI design - -.. toctree:: - :maxdepth: 2 - - executable diff --git a/docs/source/gui/index.rst b/docs/source/gui/index.rst deleted file mode 100644 index fdfc6952..00000000 --- a/docs/source/gui/index.rst +++ /dev/null @@ -1,10 +0,0 @@ -********** -GUI Design -********** - -Stuff about the GUI design - -.. toctree:: - :maxdepth: 2 - - build/index diff --git a/docs/source/index.rst b/docs/source/index.rst index 426d5ce6..2b2e6bc5 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -21,7 +21,6 @@ Contents for_developers/index performance_testing/index design/index - gui/index Indices and tables ================== diff --git a/gui/app.py b/gui/app.py deleted file mode 100644 index 076271a3..00000000 --- a/gui/app.py +++ /dev/null @@ -1,305 +0,0 @@ -# PACKAGE IMPORTS -import sys # , os, psutil, getpass, socket - -# PARALLELIZATION -from concurrent.futures import ThreadPoolExecutor -from threading import Thread - -import wget -from PyQt5.QtCore import * -from PyQt5.QtGui import * -from PyQt5.QtWidgets import * -from qt_material import apply_stylesheet - -# import numpy as np -# import pyqtgraph as pg -# from astropy.io import fits -# from time import time -# import pandas as pd -# import argparse -from screeninfo import get_monitors -from util.core import DyshWorker, ThreadCallbacks -from util.dataload import DataLoader, FITSFileDialog -from widgets.graphs import * -from widgets.layouts import * -from widgets.QIPython import QIPythonConsoleWidget -from widgets.splash import SplashScreen - -# LOCAL GUI IMPORTS -from widgets.tables import FITSHeaderTable - -from dysh.fits.gbtfitsload import GBTFITSLoad - -# DYSH IMPORTS -from dysh.util.messages import * -from dysh.util.parallelization import SingleThread - - -class SelectPanel(QGridLayout): - """The startup window of the GUI""" - - def __init__(self): - """Initializes the startup window""" - super().__init__() - self._init_UI() - - def _init_UI(self): - """Creates the skeleton structure of the GUI""" - - # Make the UI Items - self.main_text = QLabel("Welcome to the Dysh GUI") - self.button = QPushButton("Select file") - self.button.clicked.connect(self.get_files) - - self._init_site_selection() - - # Add the UI Items - self.addWidget(self.main_text, 0, 0, 1, 2) - self.addWidget(self.combo_telescope, 1, 0, 1, 1) - self.addWidget(self.combo_rx, 1, 1, 1, 1) - self.addWidget(self.button, 2, 0, 1, 2) - - def _init_site_selection(self): - self.combo_telescope = QComboBox() - self.combo_telescope.addItem("Auto-Detect") - self.combo_telescope.addItem("Green Bank Telescope (GBT)") - self.combo_telescope.addItem("Green Bank 20-meter Telescope") - self.combo_telescope.addItem("Large Millimeter Telescope (LMT)") - - self.combo_rx = QComboBox() - self.update_combo_rx() - - self.combo_telescope.currentIndexChanged.connect(self.update_combo_rx) - - def update_combo_rx(self): - # [TODO] Load the RX info from a JSON file - self.combo_rx.clear() - self.combo_rx.setEnabled(True) - - ci = int(self.combo_telescope.currentIndex()) - if ci == 0: - # Auto-Detect - self.combo_rx.setEnabled(False) - elif ci == 1: - # Green Bank Telescope (GBT) - self.combo_rx.addItem("PF1 (0.29 - 0.395 GHz)") - self.combo_rx.addItem("PF1 (0.385 - 0.52 GHz)") - self.combo_rx.addItem("PF1 (0.51 - 0.69 GHz)") - self.combo_rx.addItem("PF1 (0.68 - 0.92 GHz)") - self.combo_rx.addItem("PF2 (0.9 - 1.23 GHz)") - self.combo_rx.addItem("L (1.15 - 1.73 GHz)") - self.combo_rx.addItem("S (1.73 - 2.6 GHz)") - self.combo_rx.addItem("UWBR (0.7 - 4.0 GHz)") - self.combo_rx.addItem("C (3.95 - 8.0 GHz)") - self.combo_rx.addItem("X (8.0 - 12.0 GHz)") - self.combo_rx.addItem("Ku (12.0 - 15.4 GHz)") - self.combo_rx.addItem("KFPA (17.0 - 27.5 GHz)") - self.combo_rx.addItem("Ka F1 (26.0 - 31.0 GHz)") - self.combo_rx.addItem("Ka F2 (30.5 - 37.0 GHz)") - self.combo_rx.addItem("Ka F3 (36.0 - 39.5 GHz)") - self.combo_rx.addItem("Q (38.2 - 49.8 GHz)") - self.combo_rx.addItem("W1 (68.0 - 74.0 GHz)") - self.combo_rx.addItem("W2 (73.0 - 80.0 GHz)") - self.combo_rx.addItem("W3 (79.0 - 86.0 GHz)") - self.combo_rx.addItem("W4 (85.0 - 92.0 GHz)") - self.combo_rx.addItem("ARGUS (75.0 - 115.5 GHz)") - elif ci == 2: - # Green Bank 20-meter telescope - self.combo_rx.addItem("L (1.15 - 1.73 GHz)") - self.combo_rx.addItem("X (8.0 - 12.0 GHz)") - elif ci == 3: - # Green Bank 20-meter telescope - self.combo_rx.addItem("RSR") - self.combo_rx.addItem("SEQUOIA") - self.combo_rx.addItem("MSIP1mm") - self.combo_rx.addItem("B4R") - self.combo_rx.addItem("TolTEC") - - def get_files(self): - # [TODO] Figure out why this makes you do it twice? - self.file_dialog = FITSFileDialog() - - -class DyshMainWindow(QMainWindow): - """The main window of the GUI""" - - def __init__(self, fpath=None): - """Initializes the main window""" - super(DyshMainWindow, self).__init__() - FriendlyMessages.hello() - - self.setWindowTitle("Dysh GUI") - self._init_geometry(0.8) - - self.info_threads() - # self._init_select_panel() - self._init_main_panel() - - self.show() - - def _init_geometry(self, mult): - """ - Draws the GUI on the primary monitor - - Parameters - ---------- - mult : int or float - proportion of total size to draw window (0.8 = 80%) - - """ - for m in get_monitors(): - if m.is_primary: - self.width = int(m.width * mult) - self.height = int(m.height * mult) - self.xpos = int(m.x + (m.width * (1 - mult)) / 2) - self.ypos = int(m.y + (m.height * (1 - mult)) / 2) - self.setGeometry(self.xpos, self.ypos, self.width, self.height) - - def info_threads(self): - """Updates info on available threads""" - self.threadCountActive = QThreadPool.globalInstance().activeThreadCount() - self.threadCountTotal = QThreadPool.globalInstance().maxThreadCount() - # print(f"You are using {self.threadCountActive} of the {self.threadCountTotal} available QThreads") - - def _init_select_panel(self): - self.main_widget = QWidget() - self.main_layout = SelectPanel() - self.main_widget.setLayout(self.main_layout) - self.setCentralWidget(self.main_widget) - if self.main_layout.file_dialog.exec_() == QDialog.Accepted: - self.fpath = self.main_layout.file_dialog.selectedFiles()[0] - - def _init_main_panel(self): - # self._clear_all() - self._load_data() - self._init_UI() - - # @SingleThread - def SDFITS_load_all(self, fpath): - self.sdfits = GBTFITSLoad(fpath) - - def _load_data(self): - """Opens up the FITS file""" - # [TODO] Load lists in a QThread so the main screen can be created - # [TODO] Add logic to determine if GBTFITSLoad or another - # s_load = DyshWorker(target=self.SDFITS_load_all, args=(self.fpath, 1)) - # s_load.start() - # url = "https://www.gb.nrao.edu/dysh/example_data/onoff-L/data/TGBT21A_501_11.raw.vegas.fits" - # self.fpath = wget.download(url) - self.fpath = "TGBT21A_501_11.raw.vegas.fits" - - self.SDFITS_load_all(self.fpath) # s_load.join() - self.scan = self.sdfits.getps(152, ifnum=0, plnum=0) - self.scan.calibrate() - self.fdata = self.scan.timeaverage(weights="tsys") - - def _init_UI(self): - """Creates the skeleton structure of the GUI""" - self.main_widget = QWidget() - self.main_layout = QGridLayout() - self.main_widget.setLayout(self.main_layout) - self.setCentralWidget(self.main_widget) - - self._init_sidebar() - self._init_toggle_btn() - self._init_tabs() - - self.main_layout.addWidget(self.toggle_btn, 0, 0, 1, 1) - self.main_layout.addWidget(self.sidebar, 1, 0, 1, 1) - - self.main_layout.addWidget(self.tabs, 0, 1, 2, 2) - self._init_tables() - self._init_plots() - self._init_terminal() - - def _init_tabs(self): - self.tabs = QTabWidget() - self.tab1 = QWidget() - self.tab2 = QWidget() - self.tab3 = QWidget() - self.tab4 = QWidget() - - self.tab1_layout = QGridLayout() - self.tab2_layout = QGridLayout() - self.tab3_layout = QGridLayout() - self.tab4_layout = QGridLayout() - - self.tab1.setLayout(self.tab1_layout) - self.tab2.setLayout(self.tab2_layout) - self.tab3.setLayout(self.tab3_layout) - self.tab4.setLayout(self.tab4_layout) - - self.tabs.addTab(self.tab1, "File") - self.tabs.addTab(self.tab2, "Waterfall") - self.tabs.addTab(self.tab3, "Calibrated Spectrum") - self.tabs.addTab(self.tab4, "Console") - - def _init_sidebar(self): - self.sidebar = CollapsibleSideBar() - self.sidebar.add_box(title="my box 1", contentWidget=QLabel("content 1")) - self.sidebar.add_box(title="my box 2", contentWidget=QLabel("content 2")) - - def _init_toggle_btn(self): - self.toggle_btn = QPushButton("Dock") - self.toggle_btn.clicked.connect(self.toggle_hidden) - - def toggle_hidden(self): - if self.sidebar.isHidden() == True: - self.sidebar.setHidden(False) - else: - self.sidebar.setHidden(True) - - def _init_tables(self): - """Creates tables of FITS information""" - # [TODO] Add selection logic for if len(bintable) > 1 - # [TODO] Do this in a QThread so the main screen can be created - self.hdr0_tbl = FITSHeaderTable() - self.hdr0_tbl.load(self.sdfits.primaryheader()) - self.hdr1_tbl = FITSHeaderTable() - self.hdr1_tbl.load(self.sdfits.binheader()[0]) - self.tab1_layout.addWidget(self.hdr0_tbl, 0, 0, 1, 1) - self.tab1_layout.addWidget(self.hdr1_tbl, 0, 1, 1, 1) - - def _init_plots(self): - """Creates the plot canvases""" - # [TODO] Do this in a QThread so the main screen can be created - self.spec_plot = SingleSpectrum(self.fdata) - # print(f"NINTEGRATIONS: {self.fdata.nintegrations(1)}") - # self.waterfall = WaterfallSpectrum(self.fdata) - self.tab3_layout.addWidget(self.spec_plot, 0, 0, 1, 2) - - def _init_plot_sidebar(self): - pass - - def _init_terminal(self): - self.terminal = QIPythonConsoleWidget() - self.tab4_layout.addWidget(self.terminal, 0, 0, 1, 1) - - def _clear_all(self): - while self.main_layout.count(): - child = self.main_layout.takeAt(0) - if child.widget(): - child.widget().deleteLater() - - def closeEvent(self, *args, **kwargs): - super(QMainWindow, self).closeEvent(*args, **kwargs) - self.terminal.stop() - FriendlyMessages.goodbye() - - -class App(QApplication): - def __init__(self, *args): - QApplication.__init__(self, *args) - self.main = DyshMainWindow() - self.main.show() - - -def main(args): - # global app - app = App(args) - apply_stylesheet(app, theme="dark_purple.xml") - app.exec_() - - -if __name__ == "__main__": - main(sys.argv) diff --git a/gui/static/favicon.ico b/gui/static/favicon.ico deleted file mode 100644 index 2ca452fe2be213c41782537580eb35dc30bd6fc2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 67646 zcmeHw2XtK5dFGwsSj9S_o8)!Y66LBK=SWW0n<#r?n?8zci;|<=W5@QfERwQfIUD=z z#<8V18|{)}Cjk&334jeGs4m57ECAR<)ERW54yFR2NP+}N3K$fzfWW-{zWeSS40;C! zqZyu$_ucm%-pqU7-|m0Q6T*Ujt5ym8zgXOJlSSMvgt!O5F7A~3xyh~n{EUz=>x2If z>`WJpdr~#5*d}Cs@R{*iv_FC3#C1Z!ijt7^k}a!8{pn46whN+hcdGEGLl$`_+>T?4 z&!7o^=ijx>C`npxfRnkx4fS0aC~5y~B`8PxfZs^|a}?j?KNQFGp8;$@s4`_!2sR)D zF6IK7|Lo!<@Sn8ahVLGYesUkl|6#>{pDN?JKSkZw&kr$xQJ0!zAO@H#{qIxye+YI* z0F&WAE6)GS`a9^q;n}%$uDIv-X#Nk!`JY)XwN6jA48VqzZP{Qr=OzY-^FK|SJz0Lo zJD)b-)Yi=g{P0}ZfjIwB#{V400CE1KjDPCCdq+xK|D%kT@$B|Y9{2CwK392w>F9sF z{Wps;TAsXdE^L72e=6-io&V{D?DtOq%n|Xtg_=WL@9-!r3_D6cbcZ|Gw)YQeu_ z^J=4*?SLs^jrn(DwQ29C7?@1o!5bF%+0aXm%@jQ)W>q3?IQ2<+J=ns@CG{)`k$**Kw-gflfFW4`a75yph<*K@$&{XfM%TsS$uB$S<1t$$}VJ^UOQs|R^S8%fM zX(6mt>Dz9rOy2yl$}O929g2$NP1eHo>qJ=+`hXKQNQ@={O?!9gJ^*b8v@H-VFQ$(6 zXQDgc7oFZ(OQ)|+bot#5zySJG$?chiufYgbmKv0Sj*8;xAf*1j9pxfG0K;^oE8w;Q zxKH<8xRz8(qm zHDK(=dV>AGA}{IR!{?2(0ey%5rvfZt5B8+O28@j}ufJ%ySa96>K}(b9@-;XB1L#vF zw`Ue!m(fvKVsw-_Oc|gYObsFRckS&Zg&*(n)QbyQZ&T(6-SefQT-yidnjz!$i*YYk z)%}E~b6=gfhsNSU!&{UG8OWb3`qba#HI|l$hC}bzD$7A%^u# z)Bnfq|CD1t#sTx@|H*&Lsj@i#A6ESjc#QK^<%S3C|LMg4as6MQ`VamuR9D3L{}AiH zPKhFQ;+s(|Ibj1SyVf#P5{9oMvFI@kBKJNb~Z~J3N|6?lt_q*cx z|4GEMG2H$yl0JG?`~PM<|NH)2_x~{edp4Hxe*^lj`R|1s^v?>su733Y_JgzOq7T<# z9+3Qh9GJ8IzgWuucba{pHgL^8(iZ^w_1WwH$Mu6%Wi!$LPx&GLRhO#DE$>&B%qjn| z(*MVPWAJSOSpEs{7EljV12r=azq_W(T~`eo-~x9IKs8V^N$_v#fPCPu0CI0_FDo9_ z4UXSGv;F@V`_Tn7l@!H9|6kD4AUYM$Hn((oAqT*0eY(#yi|+RtMNi8aYj=~^*4^w8 z-AxUmBhWUfuWT=O2n9>T_ONTr2>;|9cB>oh{_aMv(S@>6^FR3M)*z=t49qqMwA_zV2AS#?z>b=r`)Juo`JatdJx4VGJjsMZS zH~Hll$d0;GM*FE!<1+jH^WQc6#YKi6a^Q1JDi0J@{PTeY-3_7}GB5zQrQ0`)@Ovzs z^);4YWr-z-Fj+TaA+B zO-2RAf!7~?+mr!09&|!-5Ow^I_~2q>=al$Y+_$5T_w-Bq41apE5y09HoU_9DKQ7cA zo7Wi5O{*nN?cZ(q=m(?>j5{7Q&i@?A7VL}Ce~xW+EuN75SKN10mm5vccgZ_A$NUjF zcZ_quYq1Ut*8q^~L17M?U%v( zjWPMB?l<|2w!Gug=4+kDoPt5~Mn|3h3tQlM_bB4R!m(^Yoc~$ozXNSU>VB}oA%bv0J#*SDM+WlxL07fWl)2NKpFBkGY5P5L{f`mWfF%Ej2P^mPFxvy8 z=LLrG@1Do`AG1Y8x8E;WO6C_1GhqBGQYdcS1Gy1wP+(K zLt%N~JhhIdYI!^e+cEUhQI34zfZewRIUl&(`Ksi;+<61>pv>Fjnxpd1Il-Nn+sicq zxCVyiU6*67|EK)_sTVRP>i=K&f`7mkSRnid{U+C)4fUd>wU4fEqw(Nt=)m8X0x7^Fz{deo7W@M;C;P4;3$X1<_qES=eiPi^060g; zoF|0&=AEvZ@%cyp&ilykOZ`_lAL{?Oz90F(va~H@wfpoj>HqzX{Kqu^QAKvdkD(9d zGY(P@BXylVS^DLM+p+&2fm~oDz8P<`&8@+<*GPyw9vP$Q?aTPP*MRTCM&C@kp!@^n z(Aw2dL;iI?&>h%r1UNQ8j}egL4T80mWAjgWR5svz&H=*@K6NZW{m1&>!`J0C>!^}; z;DF+u`cD}!!SyE4cZa{57!V844@encOo#j~)?zTQHpd`1u?`fuuX`Kkj&1j;|NV;p zlB&GpqUd;*2sjHw8`fTlmJgnfwDltPLaWz(-O40gwxjU;P}lK2L{leXY?- zUR%qRJ|~EMn~~SQ-7pN%QCdVB@a%}(N60{DL+$uBK;0YLgRSo#F-~K>s(P&TRJ3tb zxX*{V9=7+(&==&&q27<_^R?}lz8^aO&)W7kMUn2)w==eG_sKT8RmNVvhCA?_p9cJZ ze)gJ=+2mY**U(b?6#d;njK_EiIlo7c=Na(#n)ApKHUM^F!u<1onGf!Q%=pmHTZw+Y zialwv-?!?eOxgEKeU@VYM$h+X?zQc2h26j1wEYDZpEEyNRA2rM+%xGdCajN z=0`~Q#9eVYgCKQ$FcPupL``D}C32mDHLu4A)En?J_$3d_n|ox|V7^~xv@ zp)|mZA;7+PEwd|K~W>YQIqqci^&fN`G%Yluac z?t5fTR`b73JtK|15>(oN=8-xy~Tt{>T_WZ&L=w$p?`8cKZH_>G(+J zk$2tRf6~nN=i9LUBlSPps24qEn_jl9Wp2gSa74)f{rW@vfe_UPrktT;i%*PBF6lx+$!@bv;qG^@gA+de?l@jt_;*yhvh}x zue>hTc?@u!$I0nG+xR-x>&5oZu}-MuzNy$DTFS}<){cxi?)48sdz!pkm|N-gH?R%B zTrQtS?8*RR!kP-XHi5Pq)csb*eG`lAb&N-S*X_J|#CQLd?K|kZy{W_jop&JL6TRG1 z2D<#n`hnqlLy z-`0rSQ~>ZtQCm;z`$#43U)lgw{wI`|VL?#Gz#ezpg}c`@Sqe?&g_wN3N{wV{4aYNPLb00bMHFwS- z0sCMN6y*wU?lIxZKOvAWTBx*#e|Kl2(XZK}i3(uh(;e{QDrH}Ff=m%z-(8cyZZ7j(^5be82 zKfu_}f7JB*Lv%hA!Tmd)oWr+zkAm}@LpE%qg#FJx5KGj{dkL}~VA_7&7W|FkKPG$t z$NK1gg3iif<81zkk#wKj_bGYU`reyIg(v%v6?Wf>eUt$Kdl7To>l*^!W?N8>{d99I zz`}0}>h=cw0Q(Cu|J=BYzFoy}WZ92>1o!>MOJ6T_et>)5(JcL_VzMOv9ADYp?6>ta z2Qc2-o1|>O!fp#{8IXCQ5~CU8e8Mrm$+y<|E8zZzo+DD%?bx;eQtv%6&OHUXX*b>{ zwS9}Yn18~4$&o8sk7q5%d*YZcU-&)%-1l&dS9!6~kafWD9679YU&nbD!1eu#Z#&w2 z;N65DF<-2I&TiQv-znFWfp=={NEYYbe(h$Ai?6@r%;&hMap%)0I}x?{0*r5#V|)C_ z-+^Nd&3!H8;q#jN2E=#t^YHz#PJ8(NFz#K*_e$OGQ8KW|eppK!tFf0#Z2`2C6( z_K|lRz%miwoTxtdfYAA@x9I~yY(tHuW6^kwdf$Qk5Bh6#|4ztGwZOA3tqIUha;|m%F~vRgzF%!WfD(D2Y>o{i|4SABQQI#Vj`y%W;|RJxR3`p{V>SS*=j4%S zp=Q>9hkxHTwDqOkw=niY&gWL}>{gs>-T#k9(>GY#PnB4DE}&nqsg`3zPDQH>kbC*Q zcD-KnaNXDZYaj4K#lNldsxj-&JzBnid7dT!e9K%9%4Zb!H&l5A<(~mO$9m!DA?m*F z7yPy2KPr7d^TgiOtz*D{Rs7qdg`3&_9nCwS`zEiJ#yx4+PWvqC7lBB>zeSy6J6p7m z{Q+jbpy>lX1^zE68HlPl@IB=ACpd;%^Pi&mhp(9JM;{f}C$DEx5-0=O?{6K#eH-eZ z(f)zwcY4C>#_RDxQPmb?ZhsQo(+6n&U(xoT<9K8JLw#@BO`T8Gxt{y5eGQ=QYuo=@ zZ1WuW26?W0Ebyze0eXz+SFx?@_y^>MUfL!v&vD{d7e>i|l!dpI515xYfV}hjrQT1D zd&-37|CrQ&XMqjlc_;c97vyDKfuy#%78qd@*r)7*#XUDSsFMFs}EPq&GX{%-D|1S7w?^~;de^>en|L!!bbA|rB-D%AAQVzaJ zt^pfkzX1A!{bs)qWyOwMfTamGq!}?m^YgpE>Q7DFem?u14!LHIcZ{`mm7P{{GOhl@ zo`5+=U(VAVp4T0!<73se{{KP8e}2r_j+M_7-)RxcmxA{J*zz{LYVCPrjp#r4>}@Uk zll|xRCmZLV-_rZuzO9slSFlf<%}lXXZQBbQ5bh7O!3S6xcOq`wmBw7bLqIlgL%ttn z-%hNdUDJ?SUv;n`t_-7{R**+cwsJ`RLhC z{P@B4A3yyQU)JWQ%8i7l%KBb><~Y{5d*SMZS2ljR@4){Rul)Xqz4e788|(wxhLiy* z2cH1nzXs|7+6f|r81Y(QNAeAf5uDYr0kro7-@!Zsylc78{IAsfdltxi-SOmN`NJ1r z^MR#At99AK7g;_9Tmp=reeYrjasa$<{OE7njmIBq>tFUz+x{oNfBsXy_pS~fn?PiS91#TZ|6JyNOd{|e3j zc+YjA9ACb)&Gh*XwOUrt|1WJ#C+|Q%&__J+@C8Fd?Z@#p&+nGiec|HEo4$pZ;1D>z z4rqO6p8)GRR$%GCJJNn8#|D_|wUYN4YYX=HJ;r6s&z}KbKNA1C5Abgk|CqzO@aF9f zmC5BY-jlvx+I(>PG(hfq2Y8ls?h9kZ!x#D<`|c&<>7RHz!K=;#kz?w*u2U8`F6brT z{?@nmiC}%T&IdlA^mf{D0Ck?%YQef~wD&V;^R*mk?z_SLXBGc3(f_-`jQ6bImU@3D z5L7&eR2k-cXdU*TAO0Zp>hF*ABQCs#m{9itllL>gD&S6lZ3Iixo-`}k0rr-IZ(2HC zct3!zp;*bll-q(Fuk$|o?fJglN#^r3w-YJ#y1b|Au};UDE5WE-pKGXYPoysOe`%{7 z+>-xi6vsD)ay?QVHo*AlcQ1ut148H%)^@)bT>lL1LG}-`pV(|4%D#i`;;-o zPb&T=?E}a=`)$w7p3X8R~Ev@F`5kXt z$9_Lm{M+$P--)PK)Sm+`#ZQ-<7b_l-`d_8EW!yI*{&Al46OVoG;$QPx2~U-?%}-eH zJ{0Oca)D{#=fL+nsd?jiY=B-zbj&fLns?fIz24?5`1z6KK>PkKtkd;5wMHY>?uilp zk?ULboi@v|?@9j4roumLi1ELE(DV)HzS#CpPUihY<1TV<_67R4nf*ap2$T)b{eoXY zebo1El6Ttrc8tdl&&8Qu9&aR$$J(cLKih+GUx1Z)pJqpNzmr8j>+wqJETCimR~7#g zwf~_N56k?(`%C`v+9w~l|El=<@tbmwkbealq@!2ZZAQ zt?yi4^=xISN>fnou|Izna`Dv zA4v|h?Qcb2->2E%2fL54-?790ibs+AS$Y9E-wR^d(hIkM|AvX$0L1_OKY8%|(6P0D zFwSKrvrm9+fNp^Ogc|_l{-)8TpC^5flmi|2Z`W}@ORqC8R@7g>eoKIJ?E@afHse3Wb+iK`aQK(K81aPSHZU)2dvwIESVq51J=McJm}fE?Gxwr?-pk=cZ!CW_KCBn%53NBYeY+B z=`XS7F?GFfj^esVo=4i&y3cjl?^R>|W3tbmx(_}hh?^gh}0mwh)fwI8306t%~ z`}kZg_`JS{=^;84{{WUq>2XQ~_lJJx++%+4=k`;3P!v`;mUq5=5 zcEF0dsN1>pG~&@cFNvoBc70o#a#>?`6qMAmcfaQvUl z^QnA~ZtK}G9t=6a?V@7KW>KA#EGiQdY$b{7zXHCW05SnD&I;6zX;>-o1!5tS-{U*!LQw!u+Hw-yjnOm zuMx!w>x465gD6SZWQRQbDULr0yaWV_6V~>d?@KJ24_~0|{_Ae6*Xt?DwVp0^QvXGh zaT}mG9t-9SInG zC!9}*{#E4IWO+B9KgU~M++@l`<%-<7e$UZFJe+o{t4d9r&neYLT_WphF{Sae&tl{O5bai!~Oh zkKhdBY^8nvcg4Ss)hEh-B=6U8jc1#R^K8}mymp?=Rd`%f6&~UD%=azFzkkRRkYlZ! z`Lf>yZGk(%QBcW%^xbNEG&#_`>vK<_P5EaLu_fQVDR~b#qwhW2LmtJ4vg+qp3(T(t zPdX0BP@HQ!K9qay-{~VX?={$d^qjX+_)ATlkBsvIMe#OXGz%AfGGJBZ{fhsgypwPG z1QptvPW_WIg`}$^yq?_W-596YveU zAzzI5DI;DmV+EU+Ib@90K^lV}g$bit=FB zkt;03*etW1Xl*-oLe$pc{T$fuE{pR%hcwOVS20(k0ItEi**9d`c;<Ix`o{1;-VXIfm=kb;OEO`Gf8%FQ)(2j7ub|rT9-xsd8@P^>yLzUvB=rDG38#r|(BT zJe<<^ZS(K#=IKV?bA#*GRSt>iM9)Zb*I5@pr^cCsy%5jQ<6>oz^F>J(2Z#9Fp}fn8z=QRoH{iSIqk5=hXdu*Q|H_@FV7NC4K#tCFTilo5vvl z@&wCWx^Cq01k1{Bz0Z!iH8)&0R->-PhkL1_Quov;G!|NVxr-{PrI+< 0: - message = f"Processing... {count}" - else: - message = "Finished!" - self.showMessage(message, Qt.AlignHCenter | Qt.AlignBottom, Qt.white) - - def handleFrameChange(self): - pixmap = self.movie.currentPixmap() - self.setPixmap(pixmap) - self.setMask(pixmap.mask()) - - -if __name__ == "__main__": - print("Splash screen!") - dir_path = os.path.dirname(os.path.realpath(__file__)) - - app = QApplication(sys.argv) - window = Window() - - splash = SplashScreen(Qt.WindowStaysOnTopHint) - worker = Worker() - worker.progressChanged.connect(splash.updateProgress) - worker.finished.connect(lambda: (splash.finish(window), window.show())) - splash.show() - worker.start() - app.exec_() diff --git a/gui/widgets/tables.py b/gui/widgets/tables.py deleted file mode 100644 index c6819650..00000000 --- a/gui/widgets/tables.py +++ /dev/null @@ -1,88 +0,0 @@ -import getpass -import os -import socket -import sys - -import psutil -from PyQt5.QtCore import * -from PyQt5.QtGui import * -from PyQt5.QtWidgets import * -from pyqtgraph import GraphicsLayoutWidget, ImageItem - - -class FITSHeaderTable(QWidget): - """Table of FITS Header information""" - - def __init__(self): - """Initializes the table widget""" - super().__init__() - self.make_layout() - - def make_layout(self): - self.title = QLabel("FITS Header") - self.tbl = QTableWidget() - self.tbl_layout = QVBoxLayout() - self.setLayout(self.tbl_layout) - self.tbl_layout.addWidget(self.title) - self.tbl_layout.addWidget(self.tbl) - - def load(self, data): - """ - Gets the keys - - Parameters - ---------- - data : dict - A dictionary of the FITS header - - """ - ks = [k for k in data.keys()] - - self.tbl.setRowCount(len(ks)) - self.tbl.setColumnCount(2) - self.tbl.setHorizontalHeaderLabels(["Header Key", "Header Value"]) - - for i, ki in enumerate(ks): - self.tbl.setItem(i, 0, QTableWidgetItem(str(ki))) - self.tbl.setItem(i, 1, QTableWidgetItem(str(data[ki]))) - - -class FITSDataTable(QTableWidget): - """Table of FITS Header information""" - - def __init__(self): - """Initializes the table widget""" - super().__init__() - - def make_layout(self): - self.title = QLabel("FITS Data") - self.tbl = QTableWidget() - self.tbl_layout = QVBoxLayout() - self.setLayout(self.tbl_layout) - self.tbl_layout.addWidget(self.title) - self.tbl_layout.addWidget(self.tbl) - - def get_keys(self, data): - """ - Gets the keys - - Parameters - ---------- - data : dict - A dictionary of the FITS column names - - """ - ks = data.keys() - - self.tbl.setRowCount(len(ks)) - self.tbl.setColumnCount(4) - self.tbl.setHorizontalHeaderLabels(["Header Key", "Value", "Unit", "TFORM"]) - - for i, ki in enumerate(ks): - try: - self.tbl.setItem(i, 0, QTableWidgetItem(str(ki))) - self.tbl.setItem(i, 1, QTableWidgetItem(str(self.hdr_df[ki][0]))) - self.tbl.setItem(i, 2, QTableWidgetItem(str(self.h_data_info[ki]["TUNIT"]))) - self.tbl.setItem(i, 3, QTableWidgetItem(str(self.h_data_info[ki]["TFORM"]))) - except: - print(f"Issue encountered for {ki} (data)") From 2a825494dcd5a21ab06209a97599809950e54d21 Mon Sep 17 00:00:00 2001 From: Thomas Chamberlin Date: Wed, 29 Nov 2023 10:33:19 -0500 Subject: [PATCH 07/37] Remove autoconf/make files --- configure | 2921 ---------------------------------------------- configure.ac | 52 - docs/Makefile | 23 - dysh_start.sh.in | 12 - 4 files changed, 3008 deletions(-) delete mode 100755 configure delete mode 100644 configure.ac delete mode 100644 docs/Makefile delete mode 100644 dysh_start.sh.in diff --git a/configure b/configure deleted file mode 100755 index a541a855..00000000 --- a/configure +++ /dev/null @@ -1,2921 +0,0 @@ -#! /bin/sh -# Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.71. -# -# -# Copyright (C) 1992-1996, 1998-2017, 2020-2021 Free Software Foundation, -# Inc. -# -# -# This configure script is free software; the Free Software Foundation -# gives unlimited permission to copy, distribute and modify it. -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -as_nop=: -if test ${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 -then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else $as_nop - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - - -# Reset variables that may have inherited troublesome values from -# the environment. - -# IFS needs to be set, to space, tab, and newline, in precisely that order. -# (If _AS_PATH_WALK were called with IFS unset, it would have the -# side effect of setting IFS to empty, thus disabling word splitting.) -# Quoting is to prevent editors from complaining about space-tab. -as_nl=' -' -export as_nl -IFS=" "" $as_nl" - -PS1='$ ' -PS2='> ' -PS4='+ ' - -# Ensure predictable behavior from utilities with locale-dependent output. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# We cannot yet rely on "unset" to work, but we need these variables -# to be unset--not just set to an empty or harmless value--now, to -# avoid bugs in old shells (e.g. pre-3.0 UWIN ksh). This construct -# also avoids known problems related to "unset" and subshell syntax -# in other old shells (e.g. bash 2.01 and pdksh 5.2.14). -for as_var in BASH_ENV ENV MAIL MAILPATH CDPATH -do eval test \${$as_var+y} \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done - -# Ensure that fds 0, 1, and 2 are open. -if (exec 3>&0) 2>/dev/null; then :; else exec 0&1) 2>/dev/null; then :; else exec 1>/dev/null; fi -if (exec 3>&2) ; then :; else exec 2>/dev/null; fi - -# The user is always right. -if ${PATH_SEPARATOR+false} :; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - case $as_dir in #((( - '') as_dir=./ ;; - */) ;; - *) as_dir=$as_dir/ ;; - esac - test -r "$as_dir$0" && as_myself=$as_dir$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - printf "%s\n" "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - - -# Use a proper internal environment variable to ensure we don't fall - # into an infinite loop, continuously re-executing ourselves. - if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then - _as_can_reexec=no; export _as_can_reexec; - # We cannot yet assume a decent shell, so we have to provide a -# neutralization value for shells without unset; and this also -# works around shells that cannot unset nonexistent variables. -# Preserve -v and -x to the replacement shell. -BASH_ENV=/dev/null -ENV=/dev/null -(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV -case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; -esac -exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} -# Admittedly, this is quite paranoid, since all the known shells bail -# out after a failed `exec'. -printf "%s\n" "$0: could not re-execute with $CONFIG_SHELL" >&2 -exit 255 - fi - # We don't want this to propagate to other subprocesses. - { _as_can_reexec=; unset _as_can_reexec;} -if test "x$CONFIG_SHELL" = x; then - as_bourne_compatible="as_nop=: -if test \${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 -then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which - # is contrary to our usage. Disable this feature. - alias -g '\${1+\"\$@\"}'='\"\$@\"' - setopt NO_GLOB_SUBST -else \$as_nop - case \`(set -o) 2>/dev/null\` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi -" - as_required="as_fn_return () { (exit \$1); } -as_fn_success () { as_fn_return 0; } -as_fn_failure () { as_fn_return 1; } -as_fn_ret_success () { return 0; } -as_fn_ret_failure () { return 1; } - -exitcode=0 -as_fn_success || { exitcode=1; echo as_fn_success failed.; } -as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } -as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } -as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } -if ( set x; as_fn_ret_success y && test x = \"\$1\" ) -then : - -else \$as_nop - exitcode=1; echo positional parameters were not saved. -fi -test x\$exitcode = x0 || exit 1 -blah=\$(echo \$(echo blah)) -test x\"\$blah\" = xblah || exit 1 -test -x / || exit 1" - as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO - as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO - eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && - test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1" - if (eval "$as_required") 2>/dev/null -then : - as_have_required=yes -else $as_nop - as_have_required=no -fi - if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null -then : - -else $as_nop - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -as_found=false -for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH -do - IFS=$as_save_IFS - case $as_dir in #((( - '') as_dir=./ ;; - */) ;; - *) as_dir=$as_dir/ ;; - esac - as_found=: - case $as_dir in #( - /*) - for as_base in sh bash ksh sh5; do - # Try only shells that exist, to save several forks. - as_shell=$as_dir$as_base - if { test -f "$as_shell" || test -f "$as_shell.exe"; } && - as_run=a "$as_shell" -c "$as_bourne_compatible""$as_required" 2>/dev/null -then : - CONFIG_SHELL=$as_shell as_have_required=yes - if as_run=a "$as_shell" -c "$as_bourne_compatible""$as_suggested" 2>/dev/null -then : - break 2 -fi -fi - done;; - esac - as_found=false -done -IFS=$as_save_IFS -if $as_found -then : - -else $as_nop - if { test -f "$SHELL" || test -f "$SHELL.exe"; } && - as_run=a "$SHELL" -c "$as_bourne_compatible""$as_required" 2>/dev/null -then : - CONFIG_SHELL=$SHELL as_have_required=yes -fi -fi - - - if test "x$CONFIG_SHELL" != x -then : - export CONFIG_SHELL - # We cannot yet assume a decent shell, so we have to provide a -# neutralization value for shells without unset; and this also -# works around shells that cannot unset nonexistent variables. -# Preserve -v and -x to the replacement shell. -BASH_ENV=/dev/null -ENV=/dev/null -(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV -case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; -esac -exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} -# Admittedly, this is quite paranoid, since all the known shells bail -# out after a failed `exec'. -printf "%s\n" "$0: could not re-execute with $CONFIG_SHELL" >&2 -exit 255 -fi - - if test x$as_have_required = xno -then : - printf "%s\n" "$0: This script requires a shell more modern than all" - printf "%s\n" "$0: the shells that I found on your system." - if test ${ZSH_VERSION+y} ; then - printf "%s\n" "$0: In particular, zsh $ZSH_VERSION has bugs and should" - printf "%s\n" "$0: be upgraded to zsh 4.3.4 or later." - else - printf "%s\n" "$0: Please tell bug-autoconf@gnu.org about your system, -$0: including any error possibly output before this -$0: message. Then install a modern shell, or manually run -$0: the script under such a shell if you do have one." - fi - exit 1 -fi -fi -fi -SHELL=${CONFIG_SHELL-/bin/sh} -export SHELL -# Unset more variables known to interfere with behavior of common tools. -CLICOLOR_FORCE= GREP_OPTIONS= -unset CLICOLOR_FORCE GREP_OPTIONS - -## --------------------- ## -## M4sh Shell Functions. ## -## --------------------- ## -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset - - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit -# as_fn_nop -# --------- -# Do nothing but, unlike ":", preserve the value of $?. -as_fn_nop () -{ - return $? -} -as_nop=as_fn_nop - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`printf "%s\n" "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -printf "%s\n" X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p - -# as_fn_executable_p FILE -# ----------------------- -# Test if FILE is an executable regular file. -as_fn_executable_p () -{ - test -f "$1" && test -x "$1" -} # as_fn_executable_p -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null -then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else $as_nop - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null -then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else $as_nop - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - -# as_fn_nop -# --------- -# Do nothing but, unlike ":", preserve the value of $?. -as_fn_nop () -{ - return $? -} -as_nop=as_fn_nop - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - printf "%s\n" "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -printf "%s\n" X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - - - as_lineno_1=$LINENO as_lineno_1a=$LINENO - as_lineno_2=$LINENO as_lineno_2a=$LINENO - eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && - test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { - # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) - sed -n ' - p - /[$]LINENO/= - ' <$as_myself | - sed ' - s/[$]LINENO.*/&-/ - t lineno - b - :lineno - N - :loop - s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ - t loop - s/-\n.*// - ' >$as_me.lineno && - chmod +x "$as_me.lineno" || - { printf "%s\n" "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } - - # If we had to re-execute with $CONFIG_SHELL, we're ensured to have - # already done that, so ensure we don't try to do so again and fall - # in an infinite loop. This has already happened in practice. - _as_can_reexec=no; export _as_can_reexec - # Don't try to exec as it changes $[0], causing all sort of problems - # (the dirname of $[0] is not the place where we might find the - # original and so on. Autoconf is especially sensitive to this). - . "./$as_me.lineno" - # Exit status is that of the last command. - exit -} - - -# Determine whether it's possible to make 'echo' print without a newline. -# These variables are no longer used directly by Autoconf, but are AC_SUBSTed -# for compatibility with existing Makefiles. -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -# For backward compatibility with old third-party macros, we provide -# the shell variables $as_echo and $as_echo_n. New code should use -# AS_ECHO(["message"]) and AS_ECHO_N(["message"]), respectively. -as_echo='printf %s\n' -as_echo_n='printf %s' - - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -pR'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -pR' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -pR' - fi -else - as_ln_s='cp -pR' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - -as_test_x='test -x' -as_executable_p=as_fn_executable_p - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - -test -n "$DJDIR" || exec 7<&0 &1 - -# Name of the host. -# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, -# so uname gets run too. -ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` - -# -# Initializations. -# -ac_default_prefix=/usr/local -ac_clean_files= -ac_config_libobj_dir=. -LIBOBJS= -cross_compiling=no -subdirs= -MFLAGS= -MAKEFLAGS= - -# Identity of this package. -PACKAGE_NAME='' -PACKAGE_TARNAME='' -PACKAGE_VERSION='' -PACKAGE_STRING='' -PACKAGE_BUGREPORT='' -PACKAGE_URL='' - -ac_unique_file="README.md" -ac_subst_vars='LTLIBOBJS -LIBOBJS -DYSH_DATA -EDIT_MSG -DYSH -target_alias -host_alias -build_alias -LIBS -ECHO_T -ECHO_N -ECHO_C -DEFS -mandir -localedir -libdir -psdir -pdfdir -dvidir -htmldir -infodir -docdir -oldincludedir -includedir -runstatedir -localstatedir -sharedstatedir -sysconfdir -datadir -datarootdir -libexecdir -sbindir -bindir -program_transform_name -prefix -exec_prefix -PACKAGE_URL -PACKAGE_BUGREPORT -PACKAGE_STRING -PACKAGE_VERSION -PACKAGE_TARNAME -PACKAGE_NAME -PATH_SEPARATOR -SHELL' -ac_subst_files='' -ac_user_opts=' -enable_option_checking -with_data -' - ac_precious_vars='build_alias -host_alias -target_alias' - - -# Initialize some variables set by options. -ac_init_help= -ac_init_version=false -ac_unrecognized_opts= -ac_unrecognized_sep= -# The variables have the same names as the options, with -# dashes changed to underlines. -cache_file=/dev/null -exec_prefix=NONE -no_create= -no_recursion= -prefix=NONE -program_prefix=NONE -program_suffix=NONE -program_transform_name=s,x,x, -silent= -site= -srcdir= -verbose= -x_includes=NONE -x_libraries=NONE - -# Installation directory options. -# These are left unexpanded so users can "make install exec_prefix=/foo" -# and all the variables that are supposed to be based on exec_prefix -# by default will actually change. -# Use braces instead of parens because sh, perl, etc. also accept them. -# (The list follows the same order as the GNU Coding Standards.) -bindir='${exec_prefix}/bin' -sbindir='${exec_prefix}/sbin' -libexecdir='${exec_prefix}/libexec' -datarootdir='${prefix}/share' -datadir='${datarootdir}' -sysconfdir='${prefix}/etc' -sharedstatedir='${prefix}/com' -localstatedir='${prefix}/var' -runstatedir='${localstatedir}/run' -includedir='${prefix}/include' -oldincludedir='/usr/include' -docdir='${datarootdir}/doc/${PACKAGE}' -infodir='${datarootdir}/info' -htmldir='${docdir}' -dvidir='${docdir}' -pdfdir='${docdir}' -psdir='${docdir}' -libdir='${exec_prefix}/lib' -localedir='${datarootdir}/locale' -mandir='${datarootdir}/man' - -ac_prev= -ac_dashdash= -for ac_option -do - # If the previous option needs an argument, assign it. - if test -n "$ac_prev"; then - eval $ac_prev=\$ac_option - ac_prev= - continue - fi - - case $ac_option in - *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; - *=) ac_optarg= ;; - *) ac_optarg=yes ;; - esac - - case $ac_dashdash$ac_option in - --) - ac_dashdash=yes ;; - - -bindir | --bindir | --bindi | --bind | --bin | --bi) - ac_prev=bindir ;; - -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) - bindir=$ac_optarg ;; - - -build | --build | --buil | --bui | --bu) - ac_prev=build_alias ;; - -build=* | --build=* | --buil=* | --bui=* | --bu=*) - build_alias=$ac_optarg ;; - - -cache-file | --cache-file | --cache-fil | --cache-fi \ - | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) - ac_prev=cache_file ;; - -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ - | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) - cache_file=$ac_optarg ;; - - --config-cache | -C) - cache_file=config.cache ;; - - -datadir | --datadir | --datadi | --datad) - ac_prev=datadir ;; - -datadir=* | --datadir=* | --datadi=* | --datad=*) - datadir=$ac_optarg ;; - - -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ - | --dataroo | --dataro | --datar) - ac_prev=datarootdir ;; - -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ - | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) - datarootdir=$ac_optarg ;; - - -disable-* | --disable-*) - ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid feature name: \`$ac_useropt'" - ac_useropt_orig=$ac_useropt - ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"enable_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval enable_$ac_useropt=no ;; - - -docdir | --docdir | --docdi | --doc | --do) - ac_prev=docdir ;; - -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) - docdir=$ac_optarg ;; - - -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) - ac_prev=dvidir ;; - -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) - dvidir=$ac_optarg ;; - - -enable-* | --enable-*) - ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid feature name: \`$ac_useropt'" - ac_useropt_orig=$ac_useropt - ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"enable_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval enable_$ac_useropt=\$ac_optarg ;; - - -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ - | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ - | --exec | --exe | --ex) - ac_prev=exec_prefix ;; - -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ - | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ - | --exec=* | --exe=* | --ex=*) - exec_prefix=$ac_optarg ;; - - -gas | --gas | --ga | --g) - # Obsolete; use --with-gas. - with_gas=yes ;; - - -help | --help | --hel | --he | -h) - ac_init_help=long ;; - -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) - ac_init_help=recursive ;; - -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) - ac_init_help=short ;; - - -host | --host | --hos | --ho) - ac_prev=host_alias ;; - -host=* | --host=* | --hos=* | --ho=*) - host_alias=$ac_optarg ;; - - -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) - ac_prev=htmldir ;; - -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ - | --ht=*) - htmldir=$ac_optarg ;; - - -includedir | --includedir | --includedi | --included | --include \ - | --includ | --inclu | --incl | --inc) - ac_prev=includedir ;; - -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ - | --includ=* | --inclu=* | --incl=* | --inc=*) - includedir=$ac_optarg ;; - - -infodir | --infodir | --infodi | --infod | --info | --inf) - ac_prev=infodir ;; - -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) - infodir=$ac_optarg ;; - - -libdir | --libdir | --libdi | --libd) - ac_prev=libdir ;; - -libdir=* | --libdir=* | --libdi=* | --libd=*) - libdir=$ac_optarg ;; - - -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ - | --libexe | --libex | --libe) - ac_prev=libexecdir ;; - -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ - | --libexe=* | --libex=* | --libe=*) - libexecdir=$ac_optarg ;; - - -localedir | --localedir | --localedi | --localed | --locale) - ac_prev=localedir ;; - -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) - localedir=$ac_optarg ;; - - -localstatedir | --localstatedir | --localstatedi | --localstated \ - | --localstate | --localstat | --localsta | --localst | --locals) - ac_prev=localstatedir ;; - -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ - | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) - localstatedir=$ac_optarg ;; - - -mandir | --mandir | --mandi | --mand | --man | --ma | --m) - ac_prev=mandir ;; - -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) - mandir=$ac_optarg ;; - - -nfp | --nfp | --nf) - # Obsolete; use --without-fp. - with_fp=no ;; - - -no-create | --no-create | --no-creat | --no-crea | --no-cre \ - | --no-cr | --no-c | -n) - no_create=yes ;; - - -no-recursion | --no-recursion | --no-recursio | --no-recursi \ - | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) - no_recursion=yes ;; - - -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ - | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ - | --oldin | --oldi | --old | --ol | --o) - ac_prev=oldincludedir ;; - -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ - | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ - | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) - oldincludedir=$ac_optarg ;; - - -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) - ac_prev=prefix ;; - -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) - prefix=$ac_optarg ;; - - -program-prefix | --program-prefix | --program-prefi | --program-pref \ - | --program-pre | --program-pr | --program-p) - ac_prev=program_prefix ;; - -program-prefix=* | --program-prefix=* | --program-prefi=* \ - | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) - program_prefix=$ac_optarg ;; - - -program-suffix | --program-suffix | --program-suffi | --program-suff \ - | --program-suf | --program-su | --program-s) - ac_prev=program_suffix ;; - -program-suffix=* | --program-suffix=* | --program-suffi=* \ - | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) - program_suffix=$ac_optarg ;; - - -program-transform-name | --program-transform-name \ - | --program-transform-nam | --program-transform-na \ - | --program-transform-n | --program-transform- \ - | --program-transform | --program-transfor \ - | --program-transfo | --program-transf \ - | --program-trans | --program-tran \ - | --progr-tra | --program-tr | --program-t) - ac_prev=program_transform_name ;; - -program-transform-name=* | --program-transform-name=* \ - | --program-transform-nam=* | --program-transform-na=* \ - | --program-transform-n=* | --program-transform-=* \ - | --program-transform=* | --program-transfor=* \ - | --program-transfo=* | --program-transf=* \ - | --program-trans=* | --program-tran=* \ - | --progr-tra=* | --program-tr=* | --program-t=*) - program_transform_name=$ac_optarg ;; - - -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) - ac_prev=pdfdir ;; - -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) - pdfdir=$ac_optarg ;; - - -psdir | --psdir | --psdi | --psd | --ps) - ac_prev=psdir ;; - -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) - psdir=$ac_optarg ;; - - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - silent=yes ;; - - -runstatedir | --runstatedir | --runstatedi | --runstated \ - | --runstate | --runstat | --runsta | --runst | --runs \ - | --run | --ru | --r) - ac_prev=runstatedir ;; - -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ - | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ - | --run=* | --ru=* | --r=*) - runstatedir=$ac_optarg ;; - - -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) - ac_prev=sbindir ;; - -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ - | --sbi=* | --sb=*) - sbindir=$ac_optarg ;; - - -sharedstatedir | --sharedstatedir | --sharedstatedi \ - | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ - | --sharedst | --shareds | --shared | --share | --shar \ - | --sha | --sh) - ac_prev=sharedstatedir ;; - -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ - | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ - | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ - | --sha=* | --sh=*) - sharedstatedir=$ac_optarg ;; - - -site | --site | --sit) - ac_prev=site ;; - -site=* | --site=* | --sit=*) - site=$ac_optarg ;; - - -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) - ac_prev=srcdir ;; - -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) - srcdir=$ac_optarg ;; - - -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ - | --syscon | --sysco | --sysc | --sys | --sy) - ac_prev=sysconfdir ;; - -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ - | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) - sysconfdir=$ac_optarg ;; - - -target | --target | --targe | --targ | --tar | --ta | --t) - ac_prev=target_alias ;; - -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) - target_alias=$ac_optarg ;; - - -v | -verbose | --verbose | --verbos | --verbo | --verb) - verbose=yes ;; - - -version | --version | --versio | --versi | --vers | -V) - ac_init_version=: ;; - - -with-* | --with-*) - ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid package name: \`$ac_useropt'" - ac_useropt_orig=$ac_useropt - ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"with_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval with_$ac_useropt=\$ac_optarg ;; - - -without-* | --without-*) - ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid package name: \`$ac_useropt'" - ac_useropt_orig=$ac_useropt - ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"with_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval with_$ac_useropt=no ;; - - --x) - # Obsolete; use --with-x. - with_x=yes ;; - - -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ - | --x-incl | --x-inc | --x-in | --x-i) - ac_prev=x_includes ;; - -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ - | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) - x_includes=$ac_optarg ;; - - -x-libraries | --x-libraries | --x-librarie | --x-librari \ - | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) - ac_prev=x_libraries ;; - -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ - | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) - x_libraries=$ac_optarg ;; - - -*) as_fn_error $? "unrecognized option: \`$ac_option' -Try \`$0 --help' for more information" - ;; - - *=*) - ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` - # Reject names that are not valid shell variable names. - case $ac_envvar in #( - '' | [0-9]* | *[!_$as_cr_alnum]* ) - as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; - esac - eval $ac_envvar=\$ac_optarg - export $ac_envvar ;; - - *) - # FIXME: should be removed in autoconf 3.0. - printf "%s\n" "$as_me: WARNING: you should use --build, --host, --target" >&2 - expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && - printf "%s\n" "$as_me: WARNING: invalid host type: $ac_option" >&2 - : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" - ;; - - esac -done - -if test -n "$ac_prev"; then - ac_option=--`echo $ac_prev | sed 's/_/-/g'` - as_fn_error $? "missing argument to $ac_option" -fi - -if test -n "$ac_unrecognized_opts"; then - case $enable_option_checking in - no) ;; - fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; - *) printf "%s\n" "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; - esac -fi - -# Check all directory arguments for consistency. -for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ - datadir sysconfdir sharedstatedir localstatedir includedir \ - oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ - libdir localedir mandir runstatedir -do - eval ac_val=\$$ac_var - # Remove trailing slashes. - case $ac_val in - */ ) - ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` - eval $ac_var=\$ac_val;; - esac - # Be sure to have absolute directory names. - case $ac_val in - [\\/$]* | ?:[\\/]* ) continue;; - NONE | '' ) case $ac_var in *prefix ) continue;; esac;; - esac - as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" -done - -# There might be people who depend on the old broken behavior: `$host' -# used to hold the argument of --host etc. -# FIXME: To remove some day. -build=$build_alias -host=$host_alias -target=$target_alias - -# FIXME: To remove some day. -if test "x$host_alias" != x; then - if test "x$build_alias" = x; then - cross_compiling=maybe - elif test "x$build_alias" != "x$host_alias"; then - cross_compiling=yes - fi -fi - -ac_tool_prefix= -test -n "$host_alias" && ac_tool_prefix=$host_alias- - -test "$silent" = yes && exec 6>/dev/null - - -ac_pwd=`pwd` && test -n "$ac_pwd" && -ac_ls_di=`ls -di .` && -ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || - as_fn_error $? "working directory cannot be determined" -test "X$ac_ls_di" = "X$ac_pwd_ls_di" || - as_fn_error $? "pwd does not report name of working directory" - - -# Find the source files, if location was not specified. -if test -z "$srcdir"; then - ac_srcdir_defaulted=yes - # Try the directory containing this script, then the parent directory. - ac_confdir=`$as_dirname -- "$as_myself" || -$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_myself" : 'X\(//\)[^/]' \| \ - X"$as_myself" : 'X\(//\)$' \| \ - X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || -printf "%s\n" X"$as_myself" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - srcdir=$ac_confdir - if test ! -r "$srcdir/$ac_unique_file"; then - srcdir=.. - fi -else - ac_srcdir_defaulted=no -fi -if test ! -r "$srcdir/$ac_unique_file"; then - test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." - as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" -fi -ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" -ac_abs_confdir=`( - cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" - pwd)` -# When building in place, set srcdir=. -if test "$ac_abs_confdir" = "$ac_pwd"; then - srcdir=. -fi -# Remove unnecessary trailing slashes from srcdir. -# Double slashes in file names in object file debugging info -# mess up M-x gdb in Emacs. -case $srcdir in -*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; -esac -for ac_var in $ac_precious_vars; do - eval ac_env_${ac_var}_set=\${${ac_var}+set} - eval ac_env_${ac_var}_value=\$${ac_var} - eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} - eval ac_cv_env_${ac_var}_value=\$${ac_var} -done - -# -# Report the --help message. -# -if test "$ac_init_help" = "long"; then - # Omit some internal or obsolete options to make the list less imposing. - # This message is too long to be a string in the A/UX 3.1 sh. - cat <<_ACEOF -\`configure' configures this package to adapt to many kinds of systems. - -Usage: $0 [OPTION]... [VAR=VALUE]... - -To assign environment variables (e.g., CC, CFLAGS...), specify them as -VAR=VALUE. See below for descriptions of some of the useful variables. - -Defaults for the options are specified in brackets. - -Configuration: - -h, --help display this help and exit - --help=short display options specific to this package - --help=recursive display the short help of all the included packages - -V, --version display version information and exit - -q, --quiet, --silent do not print \`checking ...' messages - --cache-file=FILE cache test results in FILE [disabled] - -C, --config-cache alias for \`--cache-file=config.cache' - -n, --no-create do not create output files - --srcdir=DIR find the sources in DIR [configure dir or \`..'] - -Installation directories: - --prefix=PREFIX install architecture-independent files in PREFIX - [$ac_default_prefix] - --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX - [PREFIX] - -By default, \`make install' will install all the files in -\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify -an installation prefix other than \`$ac_default_prefix' using \`--prefix', -for instance \`--prefix=\$HOME'. - -For better control, use the options below. - -Fine tuning of the installation directories: - --bindir=DIR user executables [EPREFIX/bin] - --sbindir=DIR system admin executables [EPREFIX/sbin] - --libexecdir=DIR program executables [EPREFIX/libexec] - --sysconfdir=DIR read-only single-machine data [PREFIX/etc] - --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] - --localstatedir=DIR modifiable single-machine data [PREFIX/var] - --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] - --libdir=DIR object code libraries [EPREFIX/lib] - --includedir=DIR C header files [PREFIX/include] - --oldincludedir=DIR C header files for non-gcc [/usr/include] - --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] - --datadir=DIR read-only architecture-independent data [DATAROOTDIR] - --infodir=DIR info documentation [DATAROOTDIR/info] - --localedir=DIR locale-dependent data [DATAROOTDIR/locale] - --mandir=DIR man documentation [DATAROOTDIR/man] - --docdir=DIR documentation root [DATAROOTDIR/doc/PACKAGE] - --htmldir=DIR html documentation [DOCDIR] - --dvidir=DIR dvi documentation [DOCDIR] - --pdfdir=DIR pdf documentation [DOCDIR] - --psdir=DIR ps documentation [DOCDIR] -_ACEOF - - cat <<\_ACEOF -_ACEOF -fi - -if test -n "$ac_init_help"; then - - cat <<\_ACEOF - -Optional Packages: - --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] - --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) - --with-data=DYSH_DATA Root directory of DYSH_DATA (or set $DYSH_DATA) - -Report bugs to the package provider. -_ACEOF -ac_status=$? -fi - -if test "$ac_init_help" = "recursive"; then - # If there are subdirs, report their specific --help. - for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue - test -d "$ac_dir" || - { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || - continue - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`printf "%s\n" "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`printf "%s\n" "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - cd "$ac_dir" || { ac_status=$?; continue; } - # Check for configure.gnu first; this name is used for a wrapper for - # Metaconfig's "Configure" on case-insensitive file systems. - if test -f "$ac_srcdir/configure.gnu"; then - echo && - $SHELL "$ac_srcdir/configure.gnu" --help=recursive - elif test -f "$ac_srcdir/configure"; then - echo && - $SHELL "$ac_srcdir/configure" --help=recursive - else - printf "%s\n" "$as_me: WARNING: no configuration information is in $ac_dir" >&2 - fi || ac_status=$? - cd "$ac_pwd" || { ac_status=$?; break; } - done -fi - -test -n "$ac_init_help" && exit $ac_status -if $ac_init_version; then - cat <<\_ACEOF -configure -generated by GNU Autoconf 2.71 - -Copyright (C) 2021 Free Software Foundation, Inc. -This configure script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it. -_ACEOF - exit -fi - -## ------------------------ ## -## Autoconf initialization. ## -## ------------------------ ## -ac_configure_args_raw= -for ac_arg -do - case $ac_arg in - *\'*) - ac_arg=`printf "%s\n" "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - as_fn_append ac_configure_args_raw " '$ac_arg'" -done - -case $ac_configure_args_raw in - *$as_nl*) - ac_safe_unquote= ;; - *) - ac_unsafe_z='|&;<>()$`\\"*?[ '' ' # This string ends in space, tab. - ac_unsafe_a="$ac_unsafe_z#~" - ac_safe_unquote="s/ '\\([^$ac_unsafe_a][^$ac_unsafe_z]*\\)'/ \\1/g" - ac_configure_args_raw=` printf "%s\n" "$ac_configure_args_raw" | sed "$ac_safe_unquote"`;; -esac - -cat >config.log <<_ACEOF -This file contains any messages produced by compilers while -running configure, to aid debugging if configure makes a mistake. - -It was created by $as_me, which was -generated by GNU Autoconf 2.71. Invocation command line was - - $ $0$ac_configure_args_raw - -_ACEOF -exec 5>>config.log -{ -cat <<_ASUNAME -## --------- ## -## Platform. ## -## --------- ## - -hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` -uname -m = `(uname -m) 2>/dev/null || echo unknown` -uname -r = `(uname -r) 2>/dev/null || echo unknown` -uname -s = `(uname -s) 2>/dev/null || echo unknown` -uname -v = `(uname -v) 2>/dev/null || echo unknown` - -/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` -/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` - -/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` -/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` -/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` -/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` -/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` -/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` -/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` - -_ASUNAME - -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - case $as_dir in #((( - '') as_dir=./ ;; - */) ;; - *) as_dir=$as_dir/ ;; - esac - printf "%s\n" "PATH: $as_dir" - done -IFS=$as_save_IFS - -} >&5 - -cat >&5 <<_ACEOF - - -## ----------- ## -## Core tests. ## -## ----------- ## - -_ACEOF - - -# Keep a trace of the command line. -# Strip out --no-create and --no-recursion so they do not pile up. -# Strip out --silent because we don't want to record it for future runs. -# Also quote any args containing shell meta-characters. -# Make two passes to allow for proper duplicate-argument suppression. -ac_configure_args= -ac_configure_args0= -ac_configure_args1= -ac_must_keep_next=false -for ac_pass in 1 2 -do - for ac_arg - do - case $ac_arg in - -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - continue ;; - *\'*) - ac_arg=`printf "%s\n" "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - case $ac_pass in - 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; - 2) - as_fn_append ac_configure_args1 " '$ac_arg'" - if test $ac_must_keep_next = true; then - ac_must_keep_next=false # Got value, back to normal. - else - case $ac_arg in - *=* | --config-cache | -C | -disable-* | --disable-* \ - | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ - | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ - | -with-* | --with-* | -without-* | --without-* | --x) - case "$ac_configure_args0 " in - "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; - esac - ;; - -* ) ac_must_keep_next=true ;; - esac - fi - as_fn_append ac_configure_args " '$ac_arg'" - ;; - esac - done -done -{ ac_configure_args0=; unset ac_configure_args0;} -{ ac_configure_args1=; unset ac_configure_args1;} - -# When interrupted or exit'd, cleanup temporary files, and complete -# config.log. We remove comments because anyway the quotes in there -# would cause problems or look ugly. -# WARNING: Use '\'' to represent an apostrophe within the trap. -# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. -trap 'exit_status=$? - # Sanitize IFS. - IFS=" "" $as_nl" - # Save into config.log some information that might help in debugging. - { - echo - - printf "%s\n" "## ---------------- ## -## Cache variables. ## -## ---------------- ##" - echo - # The following way of writing the cache mishandles newlines in values, -( - for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 -printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( - *) { eval $ac_var=; unset $ac_var;} ;; - esac ;; - esac - done - (set) 2>&1 | - case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - sed -n \ - "s/'\''/'\''\\\\'\'''\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" - ;; #( - *) - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) - echo - - printf "%s\n" "## ----------------- ## -## Output variables. ## -## ----------------- ##" - echo - for ac_var in $ac_subst_vars - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`printf "%s\n" "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - printf "%s\n" "$ac_var='\''$ac_val'\''" - done | sort - echo - - if test -n "$ac_subst_files"; then - printf "%s\n" "## ------------------- ## -## File substitutions. ## -## ------------------- ##" - echo - for ac_var in $ac_subst_files - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`printf "%s\n" "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - printf "%s\n" "$ac_var='\''$ac_val'\''" - done | sort - echo - fi - - if test -s confdefs.h; then - printf "%s\n" "## ----------- ## -## confdefs.h. ## -## ----------- ##" - echo - cat confdefs.h - echo - fi - test "$ac_signal" != 0 && - printf "%s\n" "$as_me: caught signal $ac_signal" - printf "%s\n" "$as_me: exit $exit_status" - } >&5 - rm -f core *.core core.conftest.* && - rm -f -r conftest* confdefs* conf$$* $ac_clean_files && - exit $exit_status -' 0 -for ac_signal in 1 2 13 15; do - trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal -done -ac_signal=0 - -# confdefs.h avoids OS command line length limits that DEFS can exceed. -rm -f -r conftest* confdefs.h - -printf "%s\n" "/* confdefs.h */" > confdefs.h - -# Predefined preprocessor variables. - -printf "%s\n" "#define PACKAGE_NAME \"$PACKAGE_NAME\"" >>confdefs.h - -printf "%s\n" "#define PACKAGE_TARNAME \"$PACKAGE_TARNAME\"" >>confdefs.h - -printf "%s\n" "#define PACKAGE_VERSION \"$PACKAGE_VERSION\"" >>confdefs.h - -printf "%s\n" "#define PACKAGE_STRING \"$PACKAGE_STRING\"" >>confdefs.h - -printf "%s\n" "#define PACKAGE_BUGREPORT \"$PACKAGE_BUGREPORT\"" >>confdefs.h - -printf "%s\n" "#define PACKAGE_URL \"$PACKAGE_URL\"" >>confdefs.h - - -# Let the site file select an alternate cache file if it wants to. -# Prefer an explicitly selected file to automatically selected ones. -if test -n "$CONFIG_SITE"; then - ac_site_files="$CONFIG_SITE" -elif test "x$prefix" != xNONE; then - ac_site_files="$prefix/share/config.site $prefix/etc/config.site" -else - ac_site_files="$ac_default_prefix/share/config.site $ac_default_prefix/etc/config.site" -fi - -for ac_site_file in $ac_site_files -do - case $ac_site_file in #( - */*) : - ;; #( - *) : - ac_site_file=./$ac_site_file ;; -esac - if test -f "$ac_site_file" && test -r "$ac_site_file"; then - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 -printf "%s\n" "$as_me: loading site script $ac_site_file" >&6;} - sed 's/^/| /' "$ac_site_file" >&5 - . "$ac_site_file" \ - || { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -printf "%s\n" "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "failed to load site script $ac_site_file -See \`config.log' for more details" "$LINENO" 5; } - fi -done - -if test -r "$cache_file"; then - # Some versions of bash will fail to source /dev/null (special files - # actually), so we avoid doing that. DJGPP emulates it as a regular file. - if test /dev/null != "$cache_file" && test -f "$cache_file"; then - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 -printf "%s\n" "$as_me: loading cache $cache_file" >&6;} - case $cache_file in - [\\/]* | ?:[\\/]* ) . "$cache_file";; - *) . "./$cache_file";; - esac - fi -else - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 -printf "%s\n" "$as_me: creating cache $cache_file" >&6;} - >$cache_file -fi - -# Check that the precious variables saved in the cache have kept the same -# value. -ac_cache_corrupted=false -for ac_var in $ac_precious_vars; do - eval ac_old_set=\$ac_cv_env_${ac_var}_set - eval ac_new_set=\$ac_env_${ac_var}_set - eval ac_old_val=\$ac_cv_env_${ac_var}_value - eval ac_new_val=\$ac_env_${ac_var}_value - case $ac_old_set,$ac_new_set in - set,) - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 -printf "%s\n" "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,set) - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 -printf "%s\n" "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,);; - *) - if test "x$ac_old_val" != "x$ac_new_val"; then - # differences in whitespace do not lead to failure. - ac_old_val_w=`echo x $ac_old_val` - ac_new_val_w=`echo x $ac_new_val` - if test "$ac_old_val_w" != "$ac_new_val_w"; then - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 -printf "%s\n" "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} - ac_cache_corrupted=: - else - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 -printf "%s\n" "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} - eval $ac_var=\$ac_old_val - fi - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 -printf "%s\n" "$as_me: former value: \`$ac_old_val'" >&2;} - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 -printf "%s\n" "$as_me: current value: \`$ac_new_val'" >&2;} - fi;; - esac - # Pass precious variables to config.status. - if test "$ac_new_set" = set; then - case $ac_new_val in - *\'*) ac_arg=$ac_var=`printf "%s\n" "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; - *) ac_arg=$ac_var=$ac_new_val ;; - esac - case " $ac_configure_args " in - *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. - *) as_fn_append ac_configure_args " '$ac_arg'" ;; - esac - fi -done -if $ac_cache_corrupted; then - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -printf "%s\n" "$as_me: error: in \`$ac_pwd':" >&2;} - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 -printf "%s\n" "$as_me: error: changes in the environment can compromise the build" >&2;} - as_fn_error $? "run \`${MAKE-make} distclean' and/or \`rm $cache_file' - and start over" "$LINENO" 5 -fi -## -------------------- ## -## Main body of script. ## -## -------------------- ## - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - - - -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking DYSH config" >&5 -printf %s "checking DYSH config... " >&6; } -echo "" - -DYSH="`pwd`" - -echo Using DYSH=$DYSH - -EDIT_MSG="Do not edit this file, it has been generated via configure in DYSH" - - - - - -if test ! -z $DYSH_DATA ; then - DYSH_DATA=$DYSH_DATA -elif test -d ~/GBT/data ; then - DYSH_DATA=~/GBT/data -elif test -d /home/gbt ; then - DYSH_DATA=/home/gbt -else - echo "Could not find the dysh_data, use --with-data=" - DYSH_DATA=/tmp -fi -data_default=$DYSH_DATA - - -# Check whether --with-data was given. -if test ${with_data+y} -then : - withval=$with_data; dysh_data="$withval" -else $as_nop - dysh_data="$data_default" -fi - -DYSH_DATA=$dysh_data - - -echo "Using DYSH_DATA=$DYSH_DATA" - - - -# put these in a single AC_OUTPUT command so config.status is not called multiple times. -ac_config_files="$ac_config_files dysh_start.sh" - -cat >confcache <<\_ACEOF -# This file is a shell script that caches the results of configure -# tests run on this system so they can be shared between configure -# scripts and configure runs, see configure's option --config-cache. -# It is not useful on other systems. If it contains results you don't -# want to keep, you may remove or edit it. -# -# config.status only pays attention to the cache file if you give it -# the --recheck option to rerun configure. -# -# `ac_cv_env_foo' variables (set or unset) will be overridden when -# loading this file, other *unset* `ac_cv_foo' will be assigned the -# following values. - -_ACEOF - -# The following way of writing the cache mishandles newlines in values, -# but we know of no workaround that is simple, portable, and efficient. -# So, we kill variables containing newlines. -# Ultrix sh set writes to stderr and can't be redirected directly, -# and sets the high bit in the cache file unless we assign to the vars. -( - for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 -printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( - *) { eval $ac_var=; unset $ac_var;} ;; - esac ;; - esac - done - - (set) 2>&1 | - case $as_nl`(ac_space=' '; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - # `set' does not quote correctly, so add quotes: double-quote - # substitution turns \\\\ into \\, and sed turns \\ into \. - sed -n \ - "s/'/'\\\\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" - ;; #( - *) - # `set' quotes correctly as required by POSIX, so do not add quotes. - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) | - sed ' - /^ac_cv_env_/b end - t clear - :clear - s/^\([^=]*\)=\(.*[{}].*\)$/test ${\1+y} || &/ - t end - s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ - :end' >>confcache -if diff "$cache_file" confcache >/dev/null 2>&1; then :; else - if test -w "$cache_file"; then - if test "x$cache_file" != "x/dev/null"; then - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 -printf "%s\n" "$as_me: updating cache $cache_file" >&6;} - if test ! -f "$cache_file" || test -h "$cache_file"; then - cat confcache >"$cache_file" - else - case $cache_file in #( - */* | ?:*) - mv -f confcache "$cache_file"$$ && - mv -f "$cache_file"$$ "$cache_file" ;; #( - *) - mv -f confcache "$cache_file" ;; - esac - fi - fi - else - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 -printf "%s\n" "$as_me: not updating unwritable cache $cache_file" >&6;} - fi -fi -rm -f confcache - -test "x$prefix" = xNONE && prefix=$ac_default_prefix -# Let make expand exec_prefix. -test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' - -# Transform confdefs.h into DEFS. -# Protect against shell expansion while executing Makefile rules. -# Protect against Makefile macro expansion. -# -# If the first sed substitution is executed (which looks for macros that -# take arguments), then branch to the quote section. Otherwise, -# look for a macro that doesn't take arguments. -ac_script=' -:mline -/\\$/{ - N - s,\\\n,, - b mline -} -t clear -:clear -s/^[ ]*#[ ]*define[ ][ ]*\([^ (][^ (]*([^)]*)\)[ ]*\(.*\)/-D\1=\2/g -t quote -s/^[ ]*#[ ]*define[ ][ ]*\([^ ][^ ]*\)[ ]*\(.*\)/-D\1=\2/g -t quote -b any -:quote -s/[ `~#$^&*(){}\\|;'\''"<>?]/\\&/g -s/\[/\\&/g -s/\]/\\&/g -s/\$/$$/g -H -:any -${ - g - s/^\n// - s/\n/ /g - p -} -' -DEFS=`sed -n "$ac_script" confdefs.h` - - -ac_libobjs= -ac_ltlibobjs= -U= -for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue - # 1. Remove the extension, and $U if already installed. - ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' - ac_i=`printf "%s\n" "$ac_i" | sed "$ac_script"` - # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR - # will be set to the directory where LIBOBJS objects are built. - as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" - as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' -done -LIBOBJS=$ac_libobjs - -LTLIBOBJS=$ac_ltlibobjs - - - -: "${CONFIG_STATUS=./config.status}" -ac_write_fail=0 -ac_clean_files_save=$ac_clean_files -ac_clean_files="$ac_clean_files $CONFIG_STATUS" -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 -printf "%s\n" "$as_me: creating $CONFIG_STATUS" >&6;} -as_write_fail=0 -cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 -#! $SHELL -# Generated by $as_me. -# Run this file to recreate the current configuration. -# Compiler output produced by configure, useful for debugging -# configure, is in config.log if it exists. - -debug=false -ac_cs_recheck=false -ac_cs_silent=false - -SHELL=\${CONFIG_SHELL-$SHELL} -export SHELL -_ASEOF -cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -as_nop=: -if test ${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 -then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else $as_nop - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - - -# Reset variables that may have inherited troublesome values from -# the environment. - -# IFS needs to be set, to space, tab, and newline, in precisely that order. -# (If _AS_PATH_WALK were called with IFS unset, it would have the -# side effect of setting IFS to empty, thus disabling word splitting.) -# Quoting is to prevent editors from complaining about space-tab. -as_nl=' -' -export as_nl -IFS=" "" $as_nl" - -PS1='$ ' -PS2='> ' -PS4='+ ' - -# Ensure predictable behavior from utilities with locale-dependent output. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# We cannot yet rely on "unset" to work, but we need these variables -# to be unset--not just set to an empty or harmless value--now, to -# avoid bugs in old shells (e.g. pre-3.0 UWIN ksh). This construct -# also avoids known problems related to "unset" and subshell syntax -# in other old shells (e.g. bash 2.01 and pdksh 5.2.14). -for as_var in BASH_ENV ENV MAIL MAILPATH CDPATH -do eval test \${$as_var+y} \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done - -# Ensure that fds 0, 1, and 2 are open. -if (exec 3>&0) 2>/dev/null; then :; else exec 0&1) 2>/dev/null; then :; else exec 1>/dev/null; fi -if (exec 3>&2) ; then :; else exec 2>/dev/null; fi - -# The user is always right. -if ${PATH_SEPARATOR+false} :; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - case $as_dir in #((( - '') as_dir=./ ;; - */) ;; - *) as_dir=$as_dir/ ;; - esac - test -r "$as_dir$0" && as_myself=$as_dir$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - printf "%s\n" "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - - - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - printf "%s\n" "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - - - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit - -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset - -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null -then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else $as_nop - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null -then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else $as_nop - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -printf "%s\n" X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - - -# Determine whether it's possible to make 'echo' print without a newline. -# These variables are no longer used directly by Autoconf, but are AC_SUBSTed -# for compatibility with existing Makefiles. -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -# For backward compatibility with old third-party macros, we provide -# the shell variables $as_echo and $as_echo_n. New code should use -# AS_ECHO(["message"]) and AS_ECHO_N(["message"]), respectively. -as_echo='printf %s\n' -as_echo_n='printf %s' - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -pR'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -pR' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -pR' - fi -else - as_ln_s='cp -pR' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`printf "%s\n" "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -printf "%s\n" X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - - -# as_fn_executable_p FILE -# ----------------------- -# Test if FILE is an executable regular file. -as_fn_executable_p () -{ - test -f "$1" && test -x "$1" -} # as_fn_executable_p -as_test_x='test -x' -as_executable_p=as_fn_executable_p - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - -exec 6>&1 -## ----------------------------------- ## -## Main body of $CONFIG_STATUS script. ## -## ----------------------------------- ## -_ASEOF -test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# Save the log message, to keep $0 and so on meaningful, and to -# report actual input values of CONFIG_FILES etc. instead of their -# values after options handling. -ac_log=" -This file was extended by $as_me, which was -generated by GNU Autoconf 2.71. Invocation command line was - - CONFIG_FILES = $CONFIG_FILES - CONFIG_HEADERS = $CONFIG_HEADERS - CONFIG_LINKS = $CONFIG_LINKS - CONFIG_COMMANDS = $CONFIG_COMMANDS - $ $0 $@ - -on `(hostname || uname -n) 2>/dev/null | sed 1q` -" - -_ACEOF - -case $ac_config_files in *" -"*) set x $ac_config_files; shift; ac_config_files=$*;; -esac - - - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -# Files that config.status was made for. -config_files="$ac_config_files" - -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -ac_cs_usage="\ -\`$as_me' instantiates files and other configuration actions -from templates according to the current configuration. Unless the files -and actions are specified as TAGs, all are instantiated by default. - -Usage: $0 [OPTION]... [TAG]... - - -h, --help print this help, then exit - -V, --version print version number and configuration settings, then exit - --config print configuration, then exit - -q, --quiet, --silent - do not print progress messages - -d, --debug don't remove temporary files - --recheck update $as_me by reconfiguring in the same conditions - --file=FILE[:TEMPLATE] - instantiate the configuration file FILE - -Configuration files: -$config_files - -Report bugs to the package provider." - -_ACEOF -ac_cs_config=`printf "%s\n" "$ac_configure_args" | sed "$ac_safe_unquote"` -ac_cs_config_escaped=`printf "%s\n" "$ac_cs_config" | sed "s/^ //; s/'/'\\\\\\\\''/g"` -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -ac_cs_config='$ac_cs_config_escaped' -ac_cs_version="\\ -config.status -configured by $0, generated by GNU Autoconf 2.71, - with options \\"\$ac_cs_config\\" - -Copyright (C) 2021 Free Software Foundation, Inc. -This config.status script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it." - -ac_pwd='$ac_pwd' -srcdir='$srcdir' -test -n "\$AWK" || AWK=awk -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# The default lists apply if the user does not specify any file. -ac_need_defaults=: -while test $# != 0 -do - case $1 in - --*=?*) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` - ac_shift=: - ;; - --*=) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg= - ac_shift=: - ;; - *) - ac_option=$1 - ac_optarg=$2 - ac_shift=shift - ;; - esac - - case $ac_option in - # Handling of the options. - -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) - ac_cs_recheck=: ;; - --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) - printf "%s\n" "$ac_cs_version"; exit ;; - --config | --confi | --conf | --con | --co | --c ) - printf "%s\n" "$ac_cs_config"; exit ;; - --debug | --debu | --deb | --de | --d | -d ) - debug=: ;; - --file | --fil | --fi | --f ) - $ac_shift - case $ac_optarg in - *\'*) ac_optarg=`printf "%s\n" "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; - '') as_fn_error $? "missing file argument" ;; - esac - as_fn_append CONFIG_FILES " '$ac_optarg'" - ac_need_defaults=false;; - --he | --h | --help | --hel | -h ) - printf "%s\n" "$ac_cs_usage"; exit ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil | --si | --s) - ac_cs_silent=: ;; - - # This is an error. - -*) as_fn_error $? "unrecognized option: \`$1' -Try \`$0 --help' for more information." ;; - - *) as_fn_append ac_config_targets " $1" - ac_need_defaults=false ;; - - esac - shift -done - -ac_configure_extra_args= - -if $ac_cs_silent; then - exec 6>/dev/null - ac_configure_extra_args="$ac_configure_extra_args --silent" -fi - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -if \$ac_cs_recheck; then - set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion - shift - \printf "%s\n" "running CONFIG_SHELL=$SHELL \$*" >&6 - CONFIG_SHELL='$SHELL' - export CONFIG_SHELL - exec "\$@" -fi - -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -exec 5>>config.log -{ - echo - sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX -## Running $as_me. ## -_ASBOX - printf "%s\n" "$ac_log" -} >&5 - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 - -# Handling of arguments. -for ac_config_target in $ac_config_targets -do - case $ac_config_target in - "dysh_start.sh") CONFIG_FILES="$CONFIG_FILES dysh_start.sh" ;; - - *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; - esac -done - - -# If the user did not use the arguments to specify the items to instantiate, -# then the envvar interface is used. Set only those that are not. -# We use the long form for the default assignment because of an extremely -# bizarre bug on SunOS 4.1.3. -if $ac_need_defaults; then - test ${CONFIG_FILES+y} || CONFIG_FILES=$config_files -fi - -# Have a temporary directory for convenience. Make it in the build tree -# simply because there is no reason against having it here, and in addition, -# creating and moving files from /tmp can sometimes cause problems. -# Hook for its removal unless debugging. -# Note that there is a small window in which the directory will not be cleaned: -# after its creation but before its name has been assigned to `$tmp'. -$debug || -{ - tmp= ac_tmp= - trap 'exit_status=$? - : "${ac_tmp:=$tmp}" - { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status -' 0 - trap 'as_fn_exit 1' 1 2 13 15 -} -# Create a (secure) tmp directory for tmp files. - -{ - tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && - test -d "$tmp" -} || -{ - tmp=./conf$$-$RANDOM - (umask 077 && mkdir "$tmp") -} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 -ac_tmp=$tmp - -# Set up the scripts for CONFIG_FILES section. -# No need to generate them if there are no CONFIG_FILES. -# This happens for instance with `./config.status config.h'. -if test -n "$CONFIG_FILES"; then - - -ac_cr=`echo X | tr X '\015'` -# On cygwin, bash can eat \r inside `` if the user requested igncr. -# But we know of no other shell where ac_cr would be empty at this -# point, so we can use a bashism as a fallback. -if test "x$ac_cr" = x; then - eval ac_cr=\$\'\\r\' -fi -ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` -if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then - ac_cs_awk_cr='\\r' -else - ac_cs_awk_cr=$ac_cr -fi - -echo 'BEGIN {' >"$ac_tmp/subs1.awk" && -_ACEOF - - -{ - echo "cat >conf$$subs.awk <<_ACEOF" && - echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && - echo "_ACEOF" -} >conf$$subs.sh || - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 -ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` -ac_delim='%!_!# ' -for ac_last_try in false false false false false :; do - . ./conf$$subs.sh || - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - - ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` - if test $ac_delim_n = $ac_delim_num; then - break - elif $ac_last_try; then - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - else - ac_delim="$ac_delim!$ac_delim _$ac_delim!! " - fi -done -rm -f conf$$subs.sh - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && -_ACEOF -sed -n ' -h -s/^/S["/; s/!.*/"]=/ -p -g -s/^[^!]*!// -:repl -t repl -s/'"$ac_delim"'$// -t delim -:nl -h -s/\(.\{148\}\)..*/\1/ -t more1 -s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ -p -n -b repl -:more1 -s/["\\]/\\&/g; s/^/"/; s/$/"\\/ -p -g -s/.\{148\}// -t nl -:delim -h -s/\(.\{148\}\)..*/\1/ -t more2 -s/["\\]/\\&/g; s/^/"/; s/$/"/ -p -b -:more2 -s/["\\]/\\&/g; s/^/"/; s/$/"\\/ -p -g -s/.\{148\}// -t delim -' >$CONFIG_STATUS || ac_write_fail=1 -rm -f conf$$subs.awk -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -_ACAWK -cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && - for (key in S) S_is_set[key] = 1 - FS = "" - -} -{ - line = $ 0 - nfields = split(line, field, "@") - substed = 0 - len = length(field[1]) - for (i = 2; i < nfields; i++) { - key = field[i] - keylen = length(key) - if (S_is_set[key]) { - value = S[key] - line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) - len += length(value) + length(field[++i]) - substed = 1 - } else - len += 1 + keylen - } - - print line -} - -_ACAWK -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then - sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" -else - cat -fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ - || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 -_ACEOF - -# VPATH may cause trouble with some makes, so we remove sole $(srcdir), -# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and -# trailing colons and then remove the whole line if VPATH becomes empty -# (actually we leave an empty line to preserve line numbers). -if test "x$srcdir" = x.; then - ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ -h -s/// -s/^/:/ -s/[ ]*$/:/ -s/:\$(srcdir):/:/g -s/:\${srcdir}:/:/g -s/:@srcdir@:/:/g -s/^:*// -s/:*$// -x -s/\(=[ ]*\).*/\1/ -G -s/\n// -s/^[^=]*=[ ]*$// -}' -fi - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -fi # test -n "$CONFIG_FILES" - - -eval set X " :F $CONFIG_FILES " -shift -for ac_tag -do - case $ac_tag in - :[FHLC]) ac_mode=$ac_tag; continue;; - esac - case $ac_mode$ac_tag in - :[FHL]*:*);; - :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; - :[FH]-) ac_tag=-:-;; - :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; - esac - ac_save_IFS=$IFS - IFS=: - set x $ac_tag - IFS=$ac_save_IFS - shift - ac_file=$1 - shift - - case $ac_mode in - :L) ac_source=$1;; - :[FH]) - ac_file_inputs= - for ac_f - do - case $ac_f in - -) ac_f="$ac_tmp/stdin";; - *) # Look for the file first in the build tree, then in the source tree - # (if the path is not absolute). The absolute path cannot be DOS-style, - # because $ac_f cannot contain `:'. - test -f "$ac_f" || - case $ac_f in - [\\/$]*) false;; - *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; - esac || - as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; - esac - case $ac_f in *\'*) ac_f=`printf "%s\n" "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac - as_fn_append ac_file_inputs " '$ac_f'" - done - - # Let's still pretend it is `configure' which instantiates (i.e., don't - # use $as_me), people would be surprised to read: - # /* config.h. Generated by config.status. */ - configure_input='Generated from '` - printf "%s\n" "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' - `' by configure.' - if test x"$ac_file" != x-; then - configure_input="$ac_file. $configure_input" - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 -printf "%s\n" "$as_me: creating $ac_file" >&6;} - fi - # Neutralize special characters interpreted by sed in replacement strings. - case $configure_input in #( - *\&* | *\|* | *\\* ) - ac_sed_conf_input=`printf "%s\n" "$configure_input" | - sed 's/[\\\\&|]/\\\\&/g'`;; #( - *) ac_sed_conf_input=$configure_input;; - esac - - case $ac_tag in - *:-:* | *:-) cat >"$ac_tmp/stdin" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; - esac - ;; - esac - - ac_dir=`$as_dirname -- "$ac_file" || -$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$ac_file" : 'X\(//\)[^/]' \| \ - X"$ac_file" : 'X\(//\)$' \| \ - X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || -printf "%s\n" X"$ac_file" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - as_dir="$ac_dir"; as_fn_mkdir_p - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`printf "%s\n" "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`printf "%s\n" "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - - case $ac_mode in - :F) - # - # CONFIG_FILE - # - -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# If the template does not know about datarootdir, expand it. -# FIXME: This hack should be removed a few years after 2.60. -ac_datarootdir_hack=; ac_datarootdir_seen= -ac_sed_dataroot=' -/datarootdir/ { - p - q -} -/@datadir@/p -/@docdir@/p -/@infodir@/p -/@localedir@/p -/@mandir@/p' -case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in -*datarootdir*) ac_datarootdir_seen=yes;; -*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 -printf "%s\n" "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 - ac_datarootdir_hack=' - s&@datadir@&$datadir&g - s&@docdir@&$docdir&g - s&@infodir@&$infodir&g - s&@localedir@&$localedir&g - s&@mandir@&$mandir&g - s&\\\${datarootdir}&$datarootdir&g' ;; -esac -_ACEOF - -# Neutralize VPATH when `$srcdir' = `.'. -# Shell code in configure.ac might set extrasub. -# FIXME: do we really want to maintain this feature? -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -ac_sed_extra="$ac_vpsub -$extrasub -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -:t -/@[a-zA-Z_][a-zA-Z_0-9]*@/!b -s|@configure_input@|$ac_sed_conf_input|;t t -s&@top_builddir@&$ac_top_builddir_sub&;t t -s&@top_build_prefix@&$ac_top_build_prefix&;t t -s&@srcdir@&$ac_srcdir&;t t -s&@abs_srcdir@&$ac_abs_srcdir&;t t -s&@top_srcdir@&$ac_top_srcdir&;t t -s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t -s&@builddir@&$ac_builddir&;t t -s&@abs_builddir@&$ac_abs_builddir&;t t -s&@abs_top_builddir@&$ac_abs_top_builddir&;t t -$ac_datarootdir_hack -" -eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ - >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - -test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && - { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && - { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ - "$ac_tmp/out"`; test -z "$ac_out"; } && - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&5 -printf "%s\n" "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&2;} - - rm -f "$ac_tmp/stdin" - case $ac_file in - -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; - *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; - esac \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - ;; - - - - esac - -done # for ac_tag - - -as_fn_exit 0 -_ACEOF -ac_clean_files=$ac_clean_files_save - -test $ac_write_fail = 0 || - as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 - - -# configure is writing to config.log, and then calls config.status. -# config.status does its own redirection, appending to config.log. -# Unfortunately, on DOS this fails, as config.log is still kept open -# by configure, so config.status won't be able to write to it; its -# output is simply discarded. So we exec the FD to /dev/null, -# effectively closing config.log, so it can be properly (re)opened and -# appended to by config.status. When coming back to configure, we -# need to make the FD available again. -if test "$no_create" != yes; then - ac_cs_success=: - ac_config_status_args= - test "$silent" = yes && - ac_config_status_args="$ac_config_status_args --quiet" - exec 5>/dev/null - $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false - exec 5>>config.log - # Use ||, not &&, to avoid exiting from the if with $? = 1, which - # would make configure fail if this is the last instruction. - $ac_cs_success || as_fn_exit 1 -fi -if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 -printf "%s\n" "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} -fi diff --git a/configure.ac b/configure.ac deleted file mode 100644 index b1669941..00000000 --- a/configure.ac +++ /dev/null @@ -1,52 +0,0 @@ -dnl Process this file with autoconf to produce a new configure script if it has changed. -dnl ------------------------------------------------------------------------------------ - -AC_INIT -AC_CONFIG_SRCDIR([README.md]) - -AC_MSG_CHECKING([DYSH config]) -echo "" - -DYSH="`pwd`" -AC_SUBST(DYSH) -echo Using DYSH=$DYSH - -EDIT_MSG="Do not edit this file, it has been generated via configure in DYSH" -AC_SUBST(EDIT_MSG) - - - -dnl Override with: --with-data= -dnl Tested in this order: -dnl $DATA_LMT -dnl ~/LMT/data_lmt Suggested private sandboxes -dnl /data_lmt At the UMasss machine "cln" -dnl /lma1/lmt/data_lmt At the UMD machine "lma" -dnl ??? At the LMT site (wares?) - -if test ! -z $DYSH_DATA ; then - DYSH_DATA=$DYSH_DATA -elif test -d ~/GBT/data ; then - DYSH_DATA=~/GBT/data -elif test -d /home/gbt ; then - DYSH_DATA=/home/gbt -else - echo "Could not find the dysh_data, use --with-data=" - DYSH_DATA=/tmp -fi -data_default=$DYSH_DATA - -AC_ARG_WITH(data, - [ --with-data=DYSH_DATA Root directory of DYSH_DATA (or set $DYSH_DATA)], - dysh_data="$withval", - dysh_data="$data_default") -DYSH_DATA=$dysh_data -AC_SUBST(DYSH_DATA) - -echo "Using DYSH_DATA=$DYSH_DATA" - - - -# put these in a single AC_OUTPUT command so config.status is not called multiple times. -AC_CONFIG_FILES([dysh_start.sh]) -AC_OUTPUT diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index 514daac3..00000000 --- a/docs/Makefile +++ /dev/null @@ -1,23 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = python -m sphinx -SPHINXPROJ = dysh -SOURCEDIR = ./source -BUILDDIR = ./_build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -dev: - sphinx-autobuild "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) diff --git a/dysh_start.sh.in b/dysh_start.sh.in deleted file mode 100644 index 9849e9fc..00000000 --- a/dysh_start.sh.in +++ /dev/null @@ -1,12 +0,0 @@ -# @EDIT_MSG@ -# for (ba)sh : source this file - -export DYSH=@DYSH@ - -export DYSH_DATA=@DYSH_DATA@ - -for f in $DYSH/dysh_local.sh $HOME/.dysh_start.sh; do - if [ -e $f ]; then - source $f - fi -done From 4c1e46c626ab0b8cf69734cef76274f00d208395 Mon Sep 17 00:00:00 2001 From: Thomas Chamberlin Date: Wed, 29 Nov 2023 11:01:42 -0500 Subject: [PATCH 08/37] Fixed usage of __all__ I _think_ __all__ should now be functioning as intended. But I'm not sure that the way it's intended to work makes sense -- need to discuss --- src/dysh/plot/__init__.py | 3 ++- src/dysh/spectra/__init__.py | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/dysh/plot/__init__.py b/src/dysh/plot/__init__.py index f0ff421e..1e58501d 100644 --- a/src/dysh/plot/__init__.py +++ b/src/dysh/plot/__init__.py @@ -1,2 +1,3 @@ """Classes and functions for plotting spectra and SDFITS data""" -__all__ = ["specplot "] +__all__ = ["specplot"] +from .specplot import * diff --git a/src/dysh/spectra/__init__.py b/src/dysh/spectra/__init__.py index e69d1ade..43d1a239 100644 --- a/src/dysh/spectra/__init__.py +++ b/src/dysh/spectra/__init__.py @@ -1,3 +1,5 @@ """Classes and functions for managing and processing spectra""" -__all__ = ["obsblock", "spectrum", "core"] +__all__ = ["scan", "spectrum", "core"] from .core import * +from .scan import * +from .spectrum import * From 16214ffb0f358a6a737238daf29ca6d346550b07 Mon Sep 17 00:00:00 2001 From: Thomas Chamberlin Date: Wed, 29 Nov 2023 10:42:46 -0500 Subject: [PATCH 09/37] Remove _dysh_init.py --- src/dysh/_dysh_init.py | 57 ------------------------------------------ 1 file changed, 57 deletions(-) delete mode 100644 src/dysh/_dysh_init.py diff --git a/src/dysh/_dysh_init.py b/src/dysh/_dysh_init.py deleted file mode 100644 index 42dc57d3..00000000 --- a/src/dysh/_dysh_init.py +++ /dev/null @@ -1,57 +0,0 @@ -"""dysh entry point""" - -import argparse -import logging -from pathlib import Path - -import specutils - -logger = logging.getLogger(__name__) - - -def init_logging(verbosity): - if verbosity >= 2: - level = logging.DEBUG - elif verbosity == 1: - level = logging.INFO - elif verbosity == 0: - level = logging.WARNING - else: - raise ValueError(f"Invalid verbosity level: {verbosity}") - - root_logger = logging.getLogger() - root_logger.setLevel(level) - logger.setLevel(level) - console_handler = logging.StreamHandler() - console_handler.setLevel(level) - # See: https://docs.python.org/3/library/logging.html#logrecord-attributes - formatter = logging.Formatter("[%(asctime)s - %(levelname)s] %(message)s") - console_handler.setFormatter(formatter) - root_logger.addHandler(console_handler) - - -def dysh(input_path: Path, output_path: Path): - logger.info("hello world") - specutils.__file__ - return True - - -def main(): - if False: - args = parse_args() - init_logging(args.verbosity) - dysh(input_path=args.input_path, output_path=args.output_path) - print(version()) - - -def parse_args(): - parser = argparse.ArgumentParser() - parser.add_argument("input_path", type=Path) - parser.add_argument("output_path", type=Path, default=Path(".")) - parser.add_argument("-v", "--verbosity", type=int, choices=[0, 1, 2, 3], default=1) - - return parser.parse_args() - - -if __name__ == "__main__": - main() From 3326d0a8cbad9381f45f63a5f91617b7e2ad9825 Mon Sep 17 00:00:00 2001 From: Thomas Chamberlin Date: Wed, 29 Nov 2023 10:51:36 -0500 Subject: [PATCH 10/37] Reorganize shell code --- src/dysh/__main__.py | 3 +++ src/dysh/shell/__init__.py | 1 + src/dysh/{ => shell}/shell.py | 0 src/dysh/{ => shell}/test_shell.py | 0 4 files changed, 4 insertions(+) create mode 100644 src/dysh/__main__.py create mode 100644 src/dysh/shell/__init__.py rename src/dysh/{ => shell}/shell.py (100%) rename src/dysh/{ => shell}/test_shell.py (100%) diff --git a/src/dysh/__main__.py b/src/dysh/__main__.py new file mode 100644 index 00000000..a9cd2e6e --- /dev/null +++ b/src/dysh/__main__.py @@ -0,0 +1,3 @@ +from dysh.shell import main + +main() diff --git a/src/dysh/shell/__init__.py b/src/dysh/shell/__init__.py new file mode 100644 index 00000000..d5a1765f --- /dev/null +++ b/src/dysh/shell/__init__.py @@ -0,0 +1 @@ +from dysh.shell.shell import * diff --git a/src/dysh/shell.py b/src/dysh/shell/shell.py similarity index 100% rename from src/dysh/shell.py rename to src/dysh/shell/shell.py diff --git a/src/dysh/test_shell.py b/src/dysh/shell/test_shell.py similarity index 100% rename from src/dysh/test_shell.py rename to src/dysh/shell/test_shell.py From 5793e0b2c6f7916fd960136cf68c2b1d8119ba67 Mon Sep 17 00:00:00 2001 From: Thomas Chamberlin Date: Tue, 5 Dec 2023 13:55:20 -0500 Subject: [PATCH 11/37] Remove myst-parser dep Sort deps alphabetically. Change dysh[all] syntax --- pyproject.toml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 354af00a..66427432 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,33 +24,32 @@ classifiers = [ ] dependencies = [ "astropy", + "ipython", "matplotlib", "numpy", "pandas", "scipy", "specutils", - "ipython", "wget", ] [project.optional-dependencies] dev = [ "ipdb", + "numpydoc", "pytest", "pytest-cov", - "myst-parser", "sphinx", "sphinx-autobuild", + "sphinx-inline-tabs", "sphinx-rtd-theme", "sphinxcontrib-mermaid", - "numpydoc", - "sphinx-inline-tabs", ] nb = [ "jupyter", "jupyterlab", ] -all = ["dysh[dev]", "dysh[nb]"] +all = ["dysh[dev,nb]"] [project.urls] Documentation = "https://github.com/GreenBankObservatory/dysh#readme" From cc8f94213e4809adce0274ecf5f748342a1628e6 Mon Sep 17 00:00:00 2001 From: Thomas Chamberlin Date: Tue, 5 Dec 2023 14:56:32 -0500 Subject: [PATCH 12/37] Provide default value for plnum --- src/dysh/fits/gbtfitsload.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/dysh/fits/gbtfitsload.py b/src/dysh/fits/gbtfitsload.py index 94e2140e..8159adab 100644 --- a/src/dysh/fits/gbtfitsload.py +++ b/src/dysh/fits/gbtfitsload.py @@ -781,6 +781,7 @@ def subbeamnod(self, scan, bintable=None, **kwargs): kwargs_opts.update(kwargs) ifnum = kwargs_opts["ifnum"] fdnum = kwargs_opts["fdnum"] + plnum = kwargs_opts["plnum"] docal = kwargs_opts["calibrate"] w = kwargs_opts["weights"] method = kwargs_opts["method"] From ab608b3f40bc3fa712675a7a197ad90e28f0abbb Mon Sep 17 00:00:00 2001 From: Thomas Chamberlin Date: Tue, 5 Dec 2023 15:43:23 -0500 Subject: [PATCH 13/37] Updates to GitHub Actions/Workflows (#151) - Updates to GH Actions and pre-commit hooks - Enable caching of pip reqs - Enable code coverage generation and reporting - Removed unused workflow files - Update README to add badges - Re-enable pip-compile hook; regenerate requirements.txt - Remove requirements_dev.txt; not used --- .github/workflows/build.yml | 31 -- .github/workflows/ci.yml | 86 ++++ .github/workflows/hatch-and-pytest.yml | 40 -- .github/workflows/pre-commit.yml | 20 - .github/workflows/pyinstaller.yml | 38 -- .github/workflows/release.yml | 15 +- .github/workflows/workflow.yml | 16 - .pre-commit-config.yaml | 15 +- README.md | 2 + pyproject.toml | 19 +- requirements.txt | 582 +++++++++++++++++++++++-- requirements_dev.txt | 15 - src/dysh/__init__.py | 1 + src/dysh/fits/__init__.py | 9 +- src/dysh/fits/sdfitsload.py | 1 + src/dysh/plot/__init__.py | 1 + src/dysh/spectra/__init__.py | 1 + src/dysh/util/__init__.py | 1 + 18 files changed, 665 insertions(+), 228 deletions(-) delete mode 100644 .github/workflows/build.yml create mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/hatch-and-pytest.yml delete mode 100644 .github/workflows/pre-commit.yml delete mode 100644 .github/workflows/pyinstaller.yml delete mode 100644 .github/workflows/workflow.yml delete mode 100644 requirements_dev.txt diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml deleted file mode 100644 index 44c02b78..00000000 --- a/.github/workflows/build.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: Build with hatch - -on: - release: - types: [created] - workflow_dispatch: # needed for "Run" button to show up in action menu - -jobs: - build: - runs-on: ${{ matrix.os }} - environment: hatch build - - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - python-version: ["3.x"] - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements_dev.txt - pip install -e . - - name: Build with hatch - run: | - hatch build -c diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..84be9456 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,86 @@ +# This workflow will install Python dependencies, run tests and lint with a variety of Python versions +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python + +name: CI +on: + push: + branches: + - "main" + - "release*" + - "*-devel" + - "*_devel" + - "*-ci" + - "*_ci" + pull_request: # Run on all pull requests + workflow_dispatch: # needed for "Run" button to show up in action +env: + FORCE_COLOR: "1" # Make tools pretty. + PIP_DISABLE_PIP_VERSION_CHECK: "1" + PIP_NO_PYTHON_VERSION_WARNING: "1" + +jobs: + tests: + runs-on: ${{ matrix.os }} + + strategy: + # Ensure that if even if a build in the matrix fails, the others continue + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + python-version: ["3.9", "3.10", "3.11", "3.12"] + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + # Enable caching of pip packages between workflow jobs. This can speed things up dramatically, _if_ + # jobs are executed fairly close together in time + # See: https://github.com/actions/setup-python/blob/main/docs/advanced-usage.md#caching-packages + cache: 'pip' + cache-dependency-path: 'requirements.txt' + - name: Install dependencies + run: | + # Install requirements, as compiled by pip-compile + pip install -r requirements.txt + # Install dysh itself, in editable mode (which is required to avoid breaking the caching mechanism above) + pip install -e . + - name: Test with pytest + run: | + # Write coverage data files, namespaced using matrix info + coverage run --data-file=".coverage.${{ matrix.os }}.${{ matrix.python-version }}" -m pytest + - name: Upload coverage data + # Upload only ubuntu results, since we are only running the coverage step on ubuntu + if: matrix.os == 'ubuntu-latest' + uses: actions/upload-artifact@v3 + with: + name: coverage-data + path: ".coverage.ubuntu-latest*" + coverage: + needs: tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v4 + with: + python-version: 3.9 + cache: pip + - run: pip install --upgrade coverage[toml] + - uses: actions/download-artifact@v3 + with: + name: coverage-data + - name: Combine coverage + run: | + coverage combine + coverage html --skip-covered --skip-empty + + # Report and write to summary. + coverage report | sed 's/^/ /' >> $GITHUB_STEP_SUMMARY + + # Report again and fail if under 100%. + # coverage report --fail-under=100 + - name: Upload HTML report + uses: actions/upload-artifact@v3 + with: + name: html-report + path: htmlcov diff --git a/.github/workflows/hatch-and-pytest.yml b/.github/workflows/hatch-and-pytest.yml deleted file mode 100644 index e86b9aa4..00000000 --- a/.github/workflows/hatch-and-pytest.yml +++ /dev/null @@ -1,40 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python - -name: Build and Pytest - -on: - push: - branches: [ "main", "release-*", "cat-devel", "mwp-devel", "pedro-devel", "evan-devel" ] - pull_request: - branches: [ "main", "release-*", "cat-devel", "mwp-devel", "pedro-devel", "evan-devel" ] - workflow_dispatch: # needed for "Run" button to show up in action -jobs: - build: - runs-on: ${{ matrix.os }} - environment: hatch build - - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, windows-latest, macos-latest] - python-version: ["3.9", "3.10", "3.11", "3.12"] - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install flake8 pytest - pip install -r requirements.txt - pip install -e . - - name: Build with hatch - run: | - hatch build -c - - name: Test with pytest - run: | - pytest diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml deleted file mode 100644 index 3e5f8cc2..00000000 --- a/.github/workflows/pre-commit.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: pre-commit - -on: - pull_request: - push: - branches: [main, release*] - - -jobs: - pre-commit: - env: - SKIP: ruff - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 - with: - # must match the version in .pre-commit-config.yaml:default_language_version.python - python-version: '3.9' - - uses: pre-commit/action@v3.0.0 diff --git a/.github/workflows/pyinstaller.yml b/.github/workflows/pyinstaller.yml deleted file mode 100644 index d7ddcfc1..00000000 --- a/.github/workflows/pyinstaller.yml +++ /dev/null @@ -1,38 +0,0 @@ - -name: Package GUI with Pyinstaller - -on: - push: - branches: [ "cat-devel" ] - pull_request: - branches: [ "cat-devel" ] - -jobs: - build: - runs-on: ${{ matrix.os }} - environment: hatch build - - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, windows-latest, macos-latest] - python-version: ["3.9", "3.10", "3.11", "3.12"] - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - pip install -e . - - name: Build with hatch - run: | - hatch build -c - - name: Package GUI with PyInstaller - run: | - cd gui - pyinstaller app.py diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 56305228..05c51ae7 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,25 +11,22 @@ jobs: environment: release permissions: # IMPORTANT: this permission is mandatory for trusted publishing + # See: https://github.com/pypa/gh-action-pypi-publish#trusted-publishing id-token: write - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - python-version: ["3.x"] steps: - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python uses: actions/setup-python@v3 with: - python-version: ${{ matrix.python-version }} + python-version: '3.9' # Should always be the minimum supported Python version + cache: 'pip' + cache-dependency-path': 'requirements.txt' - name: Install dependencies run: | python -m pip install --upgrade pip pip install -r requirements.txt - pip install -e . - name: Build with hatch run: | - hatch build -c + hatch build --clean - name: upload release to PyPI uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml deleted file mode 100644 index b10a84b6..00000000 --- a/.github/workflows/workflow.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: TestWorkflow - -on: - release: - types: [created] - workflow_dispatch: - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - - name: Run a one-line script - run: echo Hello, world! diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1837eee9..e7b665e4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,39 +1,36 @@ --- # See https://pre-commit.com for more information default_language_version: - python: python3.10 + python: python3.9 # See https://pre-commit.com/hooks.html for more hooks repos: - repo: 'https://github.com/pre-commit/pre-commit-hooks' - rev: v4.4.0 + rev: v4.5.0 hooks: - id: trailing-whitespace exclude: '(notebooks|attic|benchmark|testdata)/.*' - id: end-of-file-fixer exclude: LICENSE - id: check-yaml + - id: check-toml - id: check-added-large-files args: - '--maxkb=1024' - id: debug-statements + - id: detect-private-key - id: mixed-line-ending args: - '--fix=lf' - id: check-docstring-first + - id: check-case-conflict # Check for files with names that would conflict on a case-insensitive filesystem - repo: https://github.com/pycqa/isort rev: 5.12.0 hooks: - id: isort exclude: '(notebooks|attic|benchmark|testdata)/.*' - repo: 'https://github.com/psf/black' - rev: 23.1.0 + rev: 23.11.0 hooks: - id: black exclude: '(notebooks|attic|benchmark|testdata)/.*' -# - repo: https://github.com/jazzband/pip-tools -# rev: 6.12.3 -# hooks: -# - id: pip-compile -# args: -# - '--resolver=backtracking' diff --git a/README.md b/README.md index a6511564..9e6ddad1 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,6 @@ [![Documentation Status](https://readthedocs.org/projects/dysh/badge/?version=latest)](https://dysh.readthedocs.io/en/latest/?badge=latest) +[![pre-commit.ci Status](https://results.pre-commit.ci/badge/github/GreenBankObservatory/dysh/main.svg)](https://results.pre-commit.ci/latest/github/GreenBankObservatory/dysh/main) +[![CI Workflow Build Status](https://github.com/GreenBankObservatory/dysh/actions/workflows/ci.yml/badge.svg)](https://github.com/GreenBankObservatory/dysh/actions/workflows/ci.yml) # dysh diff --git a/pyproject.toml b/pyproject.toml index 66427432..25b5ee35 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,7 @@ dependencies = [ [project.optional-dependencies] dev = [ + "coverage[toml]", "ipdb", "numpydoc", "pytest", @@ -81,7 +82,7 @@ docs-build = "sphinx-build {root}/docs/source {root}/docs/build -b html {args}" # run via: $ hatch run test: