diff --git a/.env.template b/.env.template new file mode 100644 index 00000000..c1ca3e4a --- /dev/null +++ b/.env.template @@ -0,0 +1,8 @@ +# Template environment file for building the docs +# Copy this to .env, then edit all indicated lines + +# Documentation Settings +export DOCS_ROOT="path/to/dysh/docs" # EDIT ME +export DOCS_HOST="" # EDIT ME +export DOCS_PORT="" # EDIT ME +alias startdocs="cd $DOCS_ROOT && cd source && sphinx-autobuild . _build -b html --host $DOCS_HOST --port $DOCS_PORT" diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index c3d37298..00000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: '' -labels: bug -assignees: '' - ---- - -**Describe the bug** -What is the bug? What did you expect to happen vs. what actually happened? - -**How to Reproduce** -Include a minimal example that reproduces your issue. This should probably be a brief snippet of Python code along with a traceback. Please format [using triple backticks](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#quoting-code). - -**Environment** -- Dysh version -- Python version -- OS diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index a083121a..00000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: '' -labels: enhancement -assignees: '' - ---- - -**Feature description** -What problem are you trying to solve that you currently can't? - -**Solution** -If you have an idea of how to solve the problem, please describe it! - -**Additional context** -Add any other context or screenshots about the feature request here. For example, if you can do the same thing in GBTIDL or another pipeline tool, tell us how it works there diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 00000000..44c02b78 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,31 @@ +name: Build with hatch + +on: + release: + types: [created] + workflow_dispatch: # needed for "Run" button to show up in action menu + +jobs: + build: + runs-on: ${{ matrix.os }} + environment: hatch build + + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ["3.x"] + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements_dev.txt + pip install -e . + - name: Build with hatch + run: | + hatch build -c diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 84be9456..00000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,86 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python - -name: CI -on: - push: - branches: - - "main" - - "release*" - - "*-devel" - - "*_devel" - - "*-ci" - - "*_ci" - pull_request: # Run on all pull requests - workflow_dispatch: # needed for "Run" button to show up in action -env: - FORCE_COLOR: "1" # Make tools pretty. - PIP_DISABLE_PIP_VERSION_CHECK: "1" - PIP_NO_PYTHON_VERSION_WARNING: "1" - -jobs: - tests: - runs-on: ${{ matrix.os }} - - strategy: - # Ensure that if even if a build in the matrix fails, the others continue - fail-fast: false - matrix: - os: [ubuntu-latest, windows-latest, macos-latest] - python-version: ["3.9", "3.10", "3.11", "3.12"] - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - # Enable caching of pip packages between workflow jobs. This can speed things up dramatically, _if_ - # jobs are executed fairly close together in time - # See: https://github.com/actions/setup-python/blob/main/docs/advanced-usage.md#caching-packages - cache: 'pip' - cache-dependency-path: 'requirements.txt' - - name: Install dependencies - run: | - # Install requirements, as compiled by pip-compile - pip install -r requirements.txt - # Install dysh itself, in editable mode (which is required to avoid breaking the caching mechanism above) - pip install -e . - - name: Test with pytest - run: | - # Write coverage data files, namespaced using matrix info - coverage run --data-file=".coverage.${{ matrix.os }}.${{ matrix.python-version }}" -m pytest - - name: Upload coverage data - # Upload only ubuntu results, since we are only running the coverage step on ubuntu - if: matrix.os == 'ubuntu-latest' - uses: actions/upload-artifact@v3 - with: - name: coverage-data - path: ".coverage.ubuntu-latest*" - coverage: - needs: tests - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 - with: - python-version: 3.9 - cache: pip - - run: pip install --upgrade coverage[toml] - - uses: actions/download-artifact@v3 - with: - name: coverage-data - - name: Combine coverage - run: | - coverage combine - coverage html --skip-covered --skip-empty - - # Report and write to summary. - coverage report | sed 's/^/ /' >> $GITHUB_STEP_SUMMARY - - # Report again and fail if under 100%. - # coverage report --fail-under=100 - - name: Upload HTML report - uses: actions/upload-artifact@v3 - with: - name: html-report - path: htmlcov diff --git a/.github/workflows/hatch-and-pytest.yml b/.github/workflows/hatch-and-pytest.yml new file mode 100644 index 00000000..e86b9aa4 --- /dev/null +++ b/.github/workflows/hatch-and-pytest.yml @@ -0,0 +1,40 @@ +# This workflow will install Python dependencies, run tests and lint with a variety of Python versions +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python + +name: Build and Pytest + +on: + push: + branches: [ "main", "release-*", "cat-devel", "mwp-devel", "pedro-devel", "evan-devel" ] + pull_request: + branches: [ "main", "release-*", "cat-devel", "mwp-devel", "pedro-devel", "evan-devel" ] + workflow_dispatch: # needed for "Run" button to show up in action +jobs: + build: + runs-on: ${{ matrix.os }} + environment: hatch build + + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + python-version: ["3.9", "3.10", "3.11", "3.12"] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install flake8 pytest + pip install -r requirements.txt + pip install -e . + - name: Build with hatch + run: | + hatch build -c + - name: Test with pytest + run: | + pytest diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 00000000..3e5f8cc2 --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,20 @@ +name: pre-commit + +on: + pull_request: + push: + branches: [main, release*] + + +jobs: + pre-commit: + env: + SKIP: ruff + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + # must match the version in .pre-commit-config.yaml:default_language_version.python + python-version: '3.9' + - uses: pre-commit/action@v3.0.0 diff --git a/.github/workflows/pyinstaller.yml b/.github/workflows/pyinstaller.yml new file mode 100644 index 00000000..d7ddcfc1 --- /dev/null +++ b/.github/workflows/pyinstaller.yml @@ -0,0 +1,38 @@ + +name: Package GUI with Pyinstaller + +on: + push: + branches: [ "cat-devel" ] + pull_request: + branches: [ "cat-devel" ] + +jobs: + build: + runs-on: ${{ matrix.os }} + environment: hatch build + + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + python-version: ["3.9", "3.10", "3.11", "3.12"] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + pip install -e . + - name: Build with hatch + run: | + hatch build -c + - name: Package GUI with PyInstaller + run: | + cd gui + pyinstaller app.py diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 05c51ae7..56305228 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,22 +11,25 @@ jobs: environment: release permissions: # IMPORTANT: this permission is mandatory for trusted publishing - # See: https://github.com/pypa/gh-action-pypi-publish#trusted-publishing id-token: write + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ["3.x"] steps: - uses: actions/checkout@v3 - - name: Set up Python + - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v3 with: - python-version: '3.9' # Should always be the minimum supported Python version - cache: 'pip' - cache-dependency-path': 'requirements.txt' + python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip pip install -r requirements.txt + pip install -e . - name: Build with hatch run: | - hatch build --clean + hatch build -c - name: upload release to PyPI uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml new file mode 100644 index 00000000..b10a84b6 --- /dev/null +++ b/.github/workflows/workflow.yml @@ -0,0 +1,16 @@ +name: TestWorkflow + +on: + release: + types: [created] + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Run a one-line script + run: echo Hello, world! diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b96f2d77..1837eee9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,36 +1,39 @@ --- # See https://pre-commit.com for more information default_language_version: - python: python3.9 + python: python3.10 # See https://pre-commit.com/hooks.html for more hooks repos: - repo: 'https://github.com/pre-commit/pre-commit-hooks' - rev: v4.5.0 + rev: v4.4.0 hooks: - id: trailing-whitespace exclude: '(notebooks|attic|benchmark|testdata)/.*' - id: end-of-file-fixer exclude: LICENSE - id: check-yaml - - id: check-toml - id: check-added-large-files args: - '--maxkb=1024' - id: debug-statements - - id: detect-private-key - id: mixed-line-ending args: - '--fix=lf' - id: check-docstring-first - - id: check-case-conflict # Check for files with names that would conflict on a case-insensitive filesystem - repo: https://github.com/pycqa/isort - rev: 5.13.2 + rev: 5.12.0 hooks: - id: isort exclude: '(notebooks|attic|benchmark|testdata)/.*' - repo: 'https://github.com/psf/black' - rev: 23.12.1 + rev: 23.1.0 hooks: - id: black exclude: '(notebooks|attic|benchmark|testdata)/.*' +# - repo: https://github.com/jazzband/pip-tools +# rev: 6.12.3 +# hooks: +# - id: pip-compile +# args: +# - '--resolver=backtracking' diff --git a/README.md b/README.md index 9e6ddad1..a6511564 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,4 @@ [![Documentation Status](https://readthedocs.org/projects/dysh/badge/?version=latest)](https://dysh.readthedocs.io/en/latest/?badge=latest) -[![pre-commit.ci Status](https://results.pre-commit.ci/badge/github/GreenBankObservatory/dysh/main.svg)](https://results.pre-commit.ci/latest/github/GreenBankObservatory/dysh/main) -[![CI Workflow Build Status](https://github.com/GreenBankObservatory/dysh/actions/workflows/ci.yml/badge.svg)](https://github.com/GreenBankObservatory/dysh/actions/workflows/ci.yml) # dysh diff --git a/configure b/configure new file mode 100755 index 00000000..a541a855 --- /dev/null +++ b/configure @@ -0,0 +1,2921 @@ +#! /bin/sh +# Guess values for system-dependent variables and create Makefiles. +# Generated by GNU Autoconf 2.71. +# +# +# Copyright (C) 1992-1996, 1998-2017, 2020-2021 Free Software Foundation, +# Inc. +# +# +# This configure script is free software; the Free Software Foundation +# gives unlimited permission to copy, distribute and modify it. +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +as_nop=: +if test ${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 +then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else $as_nop + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi + + + +# Reset variables that may have inherited troublesome values from +# the environment. + +# IFS needs to be set, to space, tab, and newline, in precisely that order. +# (If _AS_PATH_WALK were called with IFS unset, it would have the +# side effect of setting IFS to empty, thus disabling word splitting.) +# Quoting is to prevent editors from complaining about space-tab. +as_nl=' +' +export as_nl +IFS=" "" $as_nl" + +PS1='$ ' +PS2='> ' +PS4='+ ' + +# Ensure predictable behavior from utilities with locale-dependent output. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# We cannot yet rely on "unset" to work, but we need these variables +# to be unset--not just set to an empty or harmless value--now, to +# avoid bugs in old shells (e.g. pre-3.0 UWIN ksh). This construct +# also avoids known problems related to "unset" and subshell syntax +# in other old shells (e.g. bash 2.01 and pdksh 5.2.14). +for as_var in BASH_ENV ENV MAIL MAILPATH CDPATH +do eval test \${$as_var+y} \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done + +# Ensure that fds 0, 1, and 2 are open. +if (exec 3>&0) 2>/dev/null; then :; else exec 0&1) 2>/dev/null; then :; else exec 1>/dev/null; fi +if (exec 3>&2) ; then :; else exec 2>/dev/null; fi + +# The user is always right. +if ${PATH_SEPARATOR+false} :; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# Find who we are. Look in the path if we contain no directory separator. +as_myself= +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + test -r "$as_dir$0" && as_myself=$as_dir$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as `sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + printf "%s\n" "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + + +# Use a proper internal environment variable to ensure we don't fall + # into an infinite loop, continuously re-executing ourselves. + if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then + _as_can_reexec=no; export _as_can_reexec; + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +printf "%s\n" "$0: could not re-execute with $CONFIG_SHELL" >&2 +exit 255 + fi + # We don't want this to propagate to other subprocesses. + { _as_can_reexec=; unset _as_can_reexec;} +if test "x$CONFIG_SHELL" = x; then + as_bourne_compatible="as_nop=: +if test \${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 +then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which + # is contrary to our usage. Disable this feature. + alias -g '\${1+\"\$@\"}'='\"\$@\"' + setopt NO_GLOB_SUBST +else \$as_nop + case \`(set -o) 2>/dev/null\` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi +" + as_required="as_fn_return () { (exit \$1); } +as_fn_success () { as_fn_return 0; } +as_fn_failure () { as_fn_return 1; } +as_fn_ret_success () { return 0; } +as_fn_ret_failure () { return 1; } + +exitcode=0 +as_fn_success || { exitcode=1; echo as_fn_success failed.; } +as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } +as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } +as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } +if ( set x; as_fn_ret_success y && test x = \"\$1\" ) +then : + +else \$as_nop + exitcode=1; echo positional parameters were not saved. +fi +test x\$exitcode = x0 || exit 1 +blah=\$(echo \$(echo blah)) +test x\"\$blah\" = xblah || exit 1 +test -x / || exit 1" + as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO + as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO + eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && + test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1" + if (eval "$as_required") 2>/dev/null +then : + as_have_required=yes +else $as_nop + as_have_required=no +fi + if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null +then : + +else $as_nop + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +as_found=false +for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + as_found=: + case $as_dir in #( + /*) + for as_base in sh bash ksh sh5; do + # Try only shells that exist, to save several forks. + as_shell=$as_dir$as_base + if { test -f "$as_shell" || test -f "$as_shell.exe"; } && + as_run=a "$as_shell" -c "$as_bourne_compatible""$as_required" 2>/dev/null +then : + CONFIG_SHELL=$as_shell as_have_required=yes + if as_run=a "$as_shell" -c "$as_bourne_compatible""$as_suggested" 2>/dev/null +then : + break 2 +fi +fi + done;; + esac + as_found=false +done +IFS=$as_save_IFS +if $as_found +then : + +else $as_nop + if { test -f "$SHELL" || test -f "$SHELL.exe"; } && + as_run=a "$SHELL" -c "$as_bourne_compatible""$as_required" 2>/dev/null +then : + CONFIG_SHELL=$SHELL as_have_required=yes +fi +fi + + + if test "x$CONFIG_SHELL" != x +then : + export CONFIG_SHELL + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +printf "%s\n" "$0: could not re-execute with $CONFIG_SHELL" >&2 +exit 255 +fi + + if test x$as_have_required = xno +then : + printf "%s\n" "$0: This script requires a shell more modern than all" + printf "%s\n" "$0: the shells that I found on your system." + if test ${ZSH_VERSION+y} ; then + printf "%s\n" "$0: In particular, zsh $ZSH_VERSION has bugs and should" + printf "%s\n" "$0: be upgraded to zsh 4.3.4 or later." + else + printf "%s\n" "$0: Please tell bug-autoconf@gnu.org about your system, +$0: including any error possibly output before this +$0: message. Then install a modern shell, or manually run +$0: the script under such a shell if you do have one." + fi + exit 1 +fi +fi +fi +SHELL=${CONFIG_SHELL-/bin/sh} +export SHELL +# Unset more variables known to interfere with behavior of common tools. +CLICOLOR_FORCE= GREP_OPTIONS= +unset CLICOLOR_FORCE GREP_OPTIONS + +## --------------------- ## +## M4sh Shell Functions. ## +## --------------------- ## +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset + + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit +# as_fn_nop +# --------- +# Do nothing but, unlike ":", preserve the value of $?. +as_fn_nop () +{ + return $? +} +as_nop=as_fn_nop + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`printf "%s\n" "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null +then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else $as_nop + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null +then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else $as_nop + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + +# as_fn_nop +# --------- +# Do nothing but, unlike ":", preserve the value of $?. +as_fn_nop () +{ + return $? +} +as_nop=as_fn_nop + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + printf "%s\n" "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + + + as_lineno_1=$LINENO as_lineno_1a=$LINENO + as_lineno_2=$LINENO as_lineno_2a=$LINENO + eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && + test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { + # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) + sed -n ' + p + /[$]LINENO/= + ' <$as_myself | + sed ' + s/[$]LINENO.*/&-/ + t lineno + b + :lineno + N + :loop + s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ + t loop + s/-\n.*// + ' >$as_me.lineno && + chmod +x "$as_me.lineno" || + { printf "%s\n" "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } + + # If we had to re-execute with $CONFIG_SHELL, we're ensured to have + # already done that, so ensure we don't try to do so again and fall + # in an infinite loop. This has already happened in practice. + _as_can_reexec=no; export _as_can_reexec + # Don't try to exec as it changes $[0], causing all sort of problems + # (the dirname of $[0] is not the place where we might find the + # original and so on. Autoconf is especially sensitive to this). + . "./$as_me.lineno" + # Exit status is that of the last command. + exit +} + + +# Determine whether it's possible to make 'echo' print without a newline. +# These variables are no longer used directly by Autoconf, but are AC_SUBSTed +# for compatibility with existing Makefiles. +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +# For backward compatibility with old third-party macros, we provide +# the shell variables $as_echo and $as_echo_n. New code should use +# AS_ECHO(["message"]) and AS_ECHO_N(["message"]), respectively. +as_echo='printf %s\n' +as_echo_n='printf %s' + + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + +as_test_x='test -x' +as_executable_p=as_fn_executable_p + +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" + +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" + + +test -n "$DJDIR" || exec 7<&0 &1 + +# Name of the host. +# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, +# so uname gets run too. +ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` + +# +# Initializations. +# +ac_default_prefix=/usr/local +ac_clean_files= +ac_config_libobj_dir=. +LIBOBJS= +cross_compiling=no +subdirs= +MFLAGS= +MAKEFLAGS= + +# Identity of this package. +PACKAGE_NAME='' +PACKAGE_TARNAME='' +PACKAGE_VERSION='' +PACKAGE_STRING='' +PACKAGE_BUGREPORT='' +PACKAGE_URL='' + +ac_unique_file="README.md" +ac_subst_vars='LTLIBOBJS +LIBOBJS +DYSH_DATA +EDIT_MSG +DYSH +target_alias +host_alias +build_alias +LIBS +ECHO_T +ECHO_N +ECHO_C +DEFS +mandir +localedir +libdir +psdir +pdfdir +dvidir +htmldir +infodir +docdir +oldincludedir +includedir +runstatedir +localstatedir +sharedstatedir +sysconfdir +datadir +datarootdir +libexecdir +sbindir +bindir +program_transform_name +prefix +exec_prefix +PACKAGE_URL +PACKAGE_BUGREPORT +PACKAGE_STRING +PACKAGE_VERSION +PACKAGE_TARNAME +PACKAGE_NAME +PATH_SEPARATOR +SHELL' +ac_subst_files='' +ac_user_opts=' +enable_option_checking +with_data +' + ac_precious_vars='build_alias +host_alias +target_alias' + + +# Initialize some variables set by options. +ac_init_help= +ac_init_version=false +ac_unrecognized_opts= +ac_unrecognized_sep= +# The variables have the same names as the options, with +# dashes changed to underlines. +cache_file=/dev/null +exec_prefix=NONE +no_create= +no_recursion= +prefix=NONE +program_prefix=NONE +program_suffix=NONE +program_transform_name=s,x,x, +silent= +site= +srcdir= +verbose= +x_includes=NONE +x_libraries=NONE + +# Installation directory options. +# These are left unexpanded so users can "make install exec_prefix=/foo" +# and all the variables that are supposed to be based on exec_prefix +# by default will actually change. +# Use braces instead of parens because sh, perl, etc. also accept them. +# (The list follows the same order as the GNU Coding Standards.) +bindir='${exec_prefix}/bin' +sbindir='${exec_prefix}/sbin' +libexecdir='${exec_prefix}/libexec' +datarootdir='${prefix}/share' +datadir='${datarootdir}' +sysconfdir='${prefix}/etc' +sharedstatedir='${prefix}/com' +localstatedir='${prefix}/var' +runstatedir='${localstatedir}/run' +includedir='${prefix}/include' +oldincludedir='/usr/include' +docdir='${datarootdir}/doc/${PACKAGE}' +infodir='${datarootdir}/info' +htmldir='${docdir}' +dvidir='${docdir}' +pdfdir='${docdir}' +psdir='${docdir}' +libdir='${exec_prefix}/lib' +localedir='${datarootdir}/locale' +mandir='${datarootdir}/man' + +ac_prev= +ac_dashdash= +for ac_option +do + # If the previous option needs an argument, assign it. + if test -n "$ac_prev"; then + eval $ac_prev=\$ac_option + ac_prev= + continue + fi + + case $ac_option in + *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; + *=) ac_optarg= ;; + *) ac_optarg=yes ;; + esac + + case $ac_dashdash$ac_option in + --) + ac_dashdash=yes ;; + + -bindir | --bindir | --bindi | --bind | --bin | --bi) + ac_prev=bindir ;; + -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) + bindir=$ac_optarg ;; + + -build | --build | --buil | --bui | --bu) + ac_prev=build_alias ;; + -build=* | --build=* | --buil=* | --bui=* | --bu=*) + build_alias=$ac_optarg ;; + + -cache-file | --cache-file | --cache-fil | --cache-fi \ + | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) + ac_prev=cache_file ;; + -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ + | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) + cache_file=$ac_optarg ;; + + --config-cache | -C) + cache_file=config.cache ;; + + -datadir | --datadir | --datadi | --datad) + ac_prev=datadir ;; + -datadir=* | --datadir=* | --datadi=* | --datad=*) + datadir=$ac_optarg ;; + + -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ + | --dataroo | --dataro | --datar) + ac_prev=datarootdir ;; + -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ + | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) + datarootdir=$ac_optarg ;; + + -disable-* | --disable-*) + ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: \`$ac_useropt'" + ac_useropt_orig=$ac_useropt + ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=no ;; + + -docdir | --docdir | --docdi | --doc | --do) + ac_prev=docdir ;; + -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) + docdir=$ac_optarg ;; + + -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) + ac_prev=dvidir ;; + -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) + dvidir=$ac_optarg ;; + + -enable-* | --enable-*) + ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: \`$ac_useropt'" + ac_useropt_orig=$ac_useropt + ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=\$ac_optarg ;; + + -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ + | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ + | --exec | --exe | --ex) + ac_prev=exec_prefix ;; + -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ + | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ + | --exec=* | --exe=* | --ex=*) + exec_prefix=$ac_optarg ;; + + -gas | --gas | --ga | --g) + # Obsolete; use --with-gas. + with_gas=yes ;; + + -help | --help | --hel | --he | -h) + ac_init_help=long ;; + -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) + ac_init_help=recursive ;; + -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) + ac_init_help=short ;; + + -host | --host | --hos | --ho) + ac_prev=host_alias ;; + -host=* | --host=* | --hos=* | --ho=*) + host_alias=$ac_optarg ;; + + -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) + ac_prev=htmldir ;; + -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ + | --ht=*) + htmldir=$ac_optarg ;; + + -includedir | --includedir | --includedi | --included | --include \ + | --includ | --inclu | --incl | --inc) + ac_prev=includedir ;; + -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ + | --includ=* | --inclu=* | --incl=* | --inc=*) + includedir=$ac_optarg ;; + + -infodir | --infodir | --infodi | --infod | --info | --inf) + ac_prev=infodir ;; + -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) + infodir=$ac_optarg ;; + + -libdir | --libdir | --libdi | --libd) + ac_prev=libdir ;; + -libdir=* | --libdir=* | --libdi=* | --libd=*) + libdir=$ac_optarg ;; + + -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ + | --libexe | --libex | --libe) + ac_prev=libexecdir ;; + -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ + | --libexe=* | --libex=* | --libe=*) + libexecdir=$ac_optarg ;; + + -localedir | --localedir | --localedi | --localed | --locale) + ac_prev=localedir ;; + -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) + localedir=$ac_optarg ;; + + -localstatedir | --localstatedir | --localstatedi | --localstated \ + | --localstate | --localstat | --localsta | --localst | --locals) + ac_prev=localstatedir ;; + -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ + | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) + localstatedir=$ac_optarg ;; + + -mandir | --mandir | --mandi | --mand | --man | --ma | --m) + ac_prev=mandir ;; + -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) + mandir=$ac_optarg ;; + + -nfp | --nfp | --nf) + # Obsolete; use --without-fp. + with_fp=no ;; + + -no-create | --no-create | --no-creat | --no-crea | --no-cre \ + | --no-cr | --no-c | -n) + no_create=yes ;; + + -no-recursion | --no-recursion | --no-recursio | --no-recursi \ + | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) + no_recursion=yes ;; + + -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ + | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ + | --oldin | --oldi | --old | --ol | --o) + ac_prev=oldincludedir ;; + -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ + | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ + | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) + oldincludedir=$ac_optarg ;; + + -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) + ac_prev=prefix ;; + -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) + prefix=$ac_optarg ;; + + -program-prefix | --program-prefix | --program-prefi | --program-pref \ + | --program-pre | --program-pr | --program-p) + ac_prev=program_prefix ;; + -program-prefix=* | --program-prefix=* | --program-prefi=* \ + | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) + program_prefix=$ac_optarg ;; + + -program-suffix | --program-suffix | --program-suffi | --program-suff \ + | --program-suf | --program-su | --program-s) + ac_prev=program_suffix ;; + -program-suffix=* | --program-suffix=* | --program-suffi=* \ + | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) + program_suffix=$ac_optarg ;; + + -program-transform-name | --program-transform-name \ + | --program-transform-nam | --program-transform-na \ + | --program-transform-n | --program-transform- \ + | --program-transform | --program-transfor \ + | --program-transfo | --program-transf \ + | --program-trans | --program-tran \ + | --progr-tra | --program-tr | --program-t) + ac_prev=program_transform_name ;; + -program-transform-name=* | --program-transform-name=* \ + | --program-transform-nam=* | --program-transform-na=* \ + | --program-transform-n=* | --program-transform-=* \ + | --program-transform=* | --program-transfor=* \ + | --program-transfo=* | --program-transf=* \ + | --program-trans=* | --program-tran=* \ + | --progr-tra=* | --program-tr=* | --program-t=*) + program_transform_name=$ac_optarg ;; + + -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) + ac_prev=pdfdir ;; + -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) + pdfdir=$ac_optarg ;; + + -psdir | --psdir | --psdi | --psd | --ps) + ac_prev=psdir ;; + -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) + psdir=$ac_optarg ;; + + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + silent=yes ;; + + -runstatedir | --runstatedir | --runstatedi | --runstated \ + | --runstate | --runstat | --runsta | --runst | --runs \ + | --run | --ru | --r) + ac_prev=runstatedir ;; + -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ + | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ + | --run=* | --ru=* | --r=*) + runstatedir=$ac_optarg ;; + + -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) + ac_prev=sbindir ;; + -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ + | --sbi=* | --sb=*) + sbindir=$ac_optarg ;; + + -sharedstatedir | --sharedstatedir | --sharedstatedi \ + | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ + | --sharedst | --shareds | --shared | --share | --shar \ + | --sha | --sh) + ac_prev=sharedstatedir ;; + -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ + | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ + | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ + | --sha=* | --sh=*) + sharedstatedir=$ac_optarg ;; + + -site | --site | --sit) + ac_prev=site ;; + -site=* | --site=* | --sit=*) + site=$ac_optarg ;; + + -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) + ac_prev=srcdir ;; + -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) + srcdir=$ac_optarg ;; + + -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ + | --syscon | --sysco | --sysc | --sys | --sy) + ac_prev=sysconfdir ;; + -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ + | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) + sysconfdir=$ac_optarg ;; + + -target | --target | --targe | --targ | --tar | --ta | --t) + ac_prev=target_alias ;; + -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) + target_alias=$ac_optarg ;; + + -v | -verbose | --verbose | --verbos | --verbo | --verb) + verbose=yes ;; + + -version | --version | --versio | --versi | --vers | -V) + ac_init_version=: ;; + + -with-* | --with-*) + ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: \`$ac_useropt'" + ac_useropt_orig=$ac_useropt + ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=\$ac_optarg ;; + + -without-* | --without-*) + ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: \`$ac_useropt'" + ac_useropt_orig=$ac_useropt + ac_useropt=`printf "%s\n" "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=no ;; + + --x) + # Obsolete; use --with-x. + with_x=yes ;; + + -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ + | --x-incl | --x-inc | --x-in | --x-i) + ac_prev=x_includes ;; + -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ + | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) + x_includes=$ac_optarg ;; + + -x-libraries | --x-libraries | --x-librarie | --x-librari \ + | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) + ac_prev=x_libraries ;; + -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ + | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) + x_libraries=$ac_optarg ;; + + -*) as_fn_error $? "unrecognized option: \`$ac_option' +Try \`$0 --help' for more information" + ;; + + *=*) + ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` + # Reject names that are not valid shell variable names. + case $ac_envvar in #( + '' | [0-9]* | *[!_$as_cr_alnum]* ) + as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; + esac + eval $ac_envvar=\$ac_optarg + export $ac_envvar ;; + + *) + # FIXME: should be removed in autoconf 3.0. + printf "%s\n" "$as_me: WARNING: you should use --build, --host, --target" >&2 + expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && + printf "%s\n" "$as_me: WARNING: invalid host type: $ac_option" >&2 + : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" + ;; + + esac +done + +if test -n "$ac_prev"; then + ac_option=--`echo $ac_prev | sed 's/_/-/g'` + as_fn_error $? "missing argument to $ac_option" +fi + +if test -n "$ac_unrecognized_opts"; then + case $enable_option_checking in + no) ;; + fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; + *) printf "%s\n" "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; + esac +fi + +# Check all directory arguments for consistency. +for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ + datadir sysconfdir sharedstatedir localstatedir includedir \ + oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ + libdir localedir mandir runstatedir +do + eval ac_val=\$$ac_var + # Remove trailing slashes. + case $ac_val in + */ ) + ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` + eval $ac_var=\$ac_val;; + esac + # Be sure to have absolute directory names. + case $ac_val in + [\\/$]* | ?:[\\/]* ) continue;; + NONE | '' ) case $ac_var in *prefix ) continue;; esac;; + esac + as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" +done + +# There might be people who depend on the old broken behavior: `$host' +# used to hold the argument of --host etc. +# FIXME: To remove some day. +build=$build_alias +host=$host_alias +target=$target_alias + +# FIXME: To remove some day. +if test "x$host_alias" != x; then + if test "x$build_alias" = x; then + cross_compiling=maybe + elif test "x$build_alias" != "x$host_alias"; then + cross_compiling=yes + fi +fi + +ac_tool_prefix= +test -n "$host_alias" && ac_tool_prefix=$host_alias- + +test "$silent" = yes && exec 6>/dev/null + + +ac_pwd=`pwd` && test -n "$ac_pwd" && +ac_ls_di=`ls -di .` && +ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || + as_fn_error $? "working directory cannot be determined" +test "X$ac_ls_di" = "X$ac_pwd_ls_di" || + as_fn_error $? "pwd does not report name of working directory" + + +# Find the source files, if location was not specified. +if test -z "$srcdir"; then + ac_srcdir_defaulted=yes + # Try the directory containing this script, then the parent directory. + ac_confdir=`$as_dirname -- "$as_myself" || +$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_myself" : 'X\(//\)[^/]' \| \ + X"$as_myself" : 'X\(//\)$' \| \ + X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X"$as_myself" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + srcdir=$ac_confdir + if test ! -r "$srcdir/$ac_unique_file"; then + srcdir=.. + fi +else + ac_srcdir_defaulted=no +fi +if test ! -r "$srcdir/$ac_unique_file"; then + test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." + as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" +fi +ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" +ac_abs_confdir=`( + cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" + pwd)` +# When building in place, set srcdir=. +if test "$ac_abs_confdir" = "$ac_pwd"; then + srcdir=. +fi +# Remove unnecessary trailing slashes from srcdir. +# Double slashes in file names in object file debugging info +# mess up M-x gdb in Emacs. +case $srcdir in +*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; +esac +for ac_var in $ac_precious_vars; do + eval ac_env_${ac_var}_set=\${${ac_var}+set} + eval ac_env_${ac_var}_value=\$${ac_var} + eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} + eval ac_cv_env_${ac_var}_value=\$${ac_var} +done + +# +# Report the --help message. +# +if test "$ac_init_help" = "long"; then + # Omit some internal or obsolete options to make the list less imposing. + # This message is too long to be a string in the A/UX 3.1 sh. + cat <<_ACEOF +\`configure' configures this package to adapt to many kinds of systems. + +Usage: $0 [OPTION]... [VAR=VALUE]... + +To assign environment variables (e.g., CC, CFLAGS...), specify them as +VAR=VALUE. See below for descriptions of some of the useful variables. + +Defaults for the options are specified in brackets. + +Configuration: + -h, --help display this help and exit + --help=short display options specific to this package + --help=recursive display the short help of all the included packages + -V, --version display version information and exit + -q, --quiet, --silent do not print \`checking ...' messages + --cache-file=FILE cache test results in FILE [disabled] + -C, --config-cache alias for \`--cache-file=config.cache' + -n, --no-create do not create output files + --srcdir=DIR find the sources in DIR [configure dir or \`..'] + +Installation directories: + --prefix=PREFIX install architecture-independent files in PREFIX + [$ac_default_prefix] + --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX + [PREFIX] + +By default, \`make install' will install all the files in +\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify +an installation prefix other than \`$ac_default_prefix' using \`--prefix', +for instance \`--prefix=\$HOME'. + +For better control, use the options below. + +Fine tuning of the installation directories: + --bindir=DIR user executables [EPREFIX/bin] + --sbindir=DIR system admin executables [EPREFIX/sbin] + --libexecdir=DIR program executables [EPREFIX/libexec] + --sysconfdir=DIR read-only single-machine data [PREFIX/etc] + --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] + --localstatedir=DIR modifiable single-machine data [PREFIX/var] + --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] + --libdir=DIR object code libraries [EPREFIX/lib] + --includedir=DIR C header files [PREFIX/include] + --oldincludedir=DIR C header files for non-gcc [/usr/include] + --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] + --datadir=DIR read-only architecture-independent data [DATAROOTDIR] + --infodir=DIR info documentation [DATAROOTDIR/info] + --localedir=DIR locale-dependent data [DATAROOTDIR/locale] + --mandir=DIR man documentation [DATAROOTDIR/man] + --docdir=DIR documentation root [DATAROOTDIR/doc/PACKAGE] + --htmldir=DIR html documentation [DOCDIR] + --dvidir=DIR dvi documentation [DOCDIR] + --pdfdir=DIR pdf documentation [DOCDIR] + --psdir=DIR ps documentation [DOCDIR] +_ACEOF + + cat <<\_ACEOF +_ACEOF +fi + +if test -n "$ac_init_help"; then + + cat <<\_ACEOF + +Optional Packages: + --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] + --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) + --with-data=DYSH_DATA Root directory of DYSH_DATA (or set $DYSH_DATA) + +Report bugs to the package provider. +_ACEOF +ac_status=$? +fi + +if test "$ac_init_help" = "recursive"; then + # If there are subdirs, report their specific --help. + for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue + test -d "$ac_dir" || + { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || + continue + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`printf "%s\n" "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`printf "%s\n" "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + cd "$ac_dir" || { ac_status=$?; continue; } + # Check for configure.gnu first; this name is used for a wrapper for + # Metaconfig's "Configure" on case-insensitive file systems. + if test -f "$ac_srcdir/configure.gnu"; then + echo && + $SHELL "$ac_srcdir/configure.gnu" --help=recursive + elif test -f "$ac_srcdir/configure"; then + echo && + $SHELL "$ac_srcdir/configure" --help=recursive + else + printf "%s\n" "$as_me: WARNING: no configuration information is in $ac_dir" >&2 + fi || ac_status=$? + cd "$ac_pwd" || { ac_status=$?; break; } + done +fi + +test -n "$ac_init_help" && exit $ac_status +if $ac_init_version; then + cat <<\_ACEOF +configure +generated by GNU Autoconf 2.71 + +Copyright (C) 2021 Free Software Foundation, Inc. +This configure script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it. +_ACEOF + exit +fi + +## ------------------------ ## +## Autoconf initialization. ## +## ------------------------ ## +ac_configure_args_raw= +for ac_arg +do + case $ac_arg in + *\'*) + ac_arg=`printf "%s\n" "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + as_fn_append ac_configure_args_raw " '$ac_arg'" +done + +case $ac_configure_args_raw in + *$as_nl*) + ac_safe_unquote= ;; + *) + ac_unsafe_z='|&;<>()$`\\"*?[ '' ' # This string ends in space, tab. + ac_unsafe_a="$ac_unsafe_z#~" + ac_safe_unquote="s/ '\\([^$ac_unsafe_a][^$ac_unsafe_z]*\\)'/ \\1/g" + ac_configure_args_raw=` printf "%s\n" "$ac_configure_args_raw" | sed "$ac_safe_unquote"`;; +esac + +cat >config.log <<_ACEOF +This file contains any messages produced by compilers while +running configure, to aid debugging if configure makes a mistake. + +It was created by $as_me, which was +generated by GNU Autoconf 2.71. Invocation command line was + + $ $0$ac_configure_args_raw + +_ACEOF +exec 5>>config.log +{ +cat <<_ASUNAME +## --------- ## +## Platform. ## +## --------- ## + +hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` +uname -m = `(uname -m) 2>/dev/null || echo unknown` +uname -r = `(uname -r) 2>/dev/null || echo unknown` +uname -s = `(uname -s) 2>/dev/null || echo unknown` +uname -v = `(uname -v) 2>/dev/null || echo unknown` + +/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` +/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` + +/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` +/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` +/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` +/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` +/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` +/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` +/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` + +_ASUNAME + +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + printf "%s\n" "PATH: $as_dir" + done +IFS=$as_save_IFS + +} >&5 + +cat >&5 <<_ACEOF + + +## ----------- ## +## Core tests. ## +## ----------- ## + +_ACEOF + + +# Keep a trace of the command line. +# Strip out --no-create and --no-recursion so they do not pile up. +# Strip out --silent because we don't want to record it for future runs. +# Also quote any args containing shell meta-characters. +# Make two passes to allow for proper duplicate-argument suppression. +ac_configure_args= +ac_configure_args0= +ac_configure_args1= +ac_must_keep_next=false +for ac_pass in 1 2 +do + for ac_arg + do + case $ac_arg in + -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + continue ;; + *\'*) + ac_arg=`printf "%s\n" "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + case $ac_pass in + 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; + 2) + as_fn_append ac_configure_args1 " '$ac_arg'" + if test $ac_must_keep_next = true; then + ac_must_keep_next=false # Got value, back to normal. + else + case $ac_arg in + *=* | --config-cache | -C | -disable-* | --disable-* \ + | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ + | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ + | -with-* | --with-* | -without-* | --without-* | --x) + case "$ac_configure_args0 " in + "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; + esac + ;; + -* ) ac_must_keep_next=true ;; + esac + fi + as_fn_append ac_configure_args " '$ac_arg'" + ;; + esac + done +done +{ ac_configure_args0=; unset ac_configure_args0;} +{ ac_configure_args1=; unset ac_configure_args1;} + +# When interrupted or exit'd, cleanup temporary files, and complete +# config.log. We remove comments because anyway the quotes in there +# would cause problems or look ugly. +# WARNING: Use '\'' to represent an apostrophe within the trap. +# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. +trap 'exit_status=$? + # Sanitize IFS. + IFS=" "" $as_nl" + # Save into config.log some information that might help in debugging. + { + echo + + printf "%s\n" "## ---------------- ## +## Cache variables. ## +## ---------------- ##" + echo + # The following way of writing the cache mishandles newlines in values, +( + for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + (set) 2>&1 | + case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + sed -n \ + "s/'\''/'\''\\\\'\'''\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" + ;; #( + *) + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) + echo + + printf "%s\n" "## ----------------- ## +## Output variables. ## +## ----------------- ##" + echo + for ac_var in $ac_subst_vars + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`printf "%s\n" "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + printf "%s\n" "$ac_var='\''$ac_val'\''" + done | sort + echo + + if test -n "$ac_subst_files"; then + printf "%s\n" "## ------------------- ## +## File substitutions. ## +## ------------------- ##" + echo + for ac_var in $ac_subst_files + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`printf "%s\n" "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + printf "%s\n" "$ac_var='\''$ac_val'\''" + done | sort + echo + fi + + if test -s confdefs.h; then + printf "%s\n" "## ----------- ## +## confdefs.h. ## +## ----------- ##" + echo + cat confdefs.h + echo + fi + test "$ac_signal" != 0 && + printf "%s\n" "$as_me: caught signal $ac_signal" + printf "%s\n" "$as_me: exit $exit_status" + } >&5 + rm -f core *.core core.conftest.* && + rm -f -r conftest* confdefs* conf$$* $ac_clean_files && + exit $exit_status +' 0 +for ac_signal in 1 2 13 15; do + trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal +done +ac_signal=0 + +# confdefs.h avoids OS command line length limits that DEFS can exceed. +rm -f -r conftest* confdefs.h + +printf "%s\n" "/* confdefs.h */" > confdefs.h + +# Predefined preprocessor variables. + +printf "%s\n" "#define PACKAGE_NAME \"$PACKAGE_NAME\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_TARNAME \"$PACKAGE_TARNAME\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_VERSION \"$PACKAGE_VERSION\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_STRING \"$PACKAGE_STRING\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_BUGREPORT \"$PACKAGE_BUGREPORT\"" >>confdefs.h + +printf "%s\n" "#define PACKAGE_URL \"$PACKAGE_URL\"" >>confdefs.h + + +# Let the site file select an alternate cache file if it wants to. +# Prefer an explicitly selected file to automatically selected ones. +if test -n "$CONFIG_SITE"; then + ac_site_files="$CONFIG_SITE" +elif test "x$prefix" != xNONE; then + ac_site_files="$prefix/share/config.site $prefix/etc/config.site" +else + ac_site_files="$ac_default_prefix/share/config.site $ac_default_prefix/etc/config.site" +fi + +for ac_site_file in $ac_site_files +do + case $ac_site_file in #( + */*) : + ;; #( + *) : + ac_site_file=./$ac_site_file ;; +esac + if test -f "$ac_site_file" && test -r "$ac_site_file"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 +printf "%s\n" "$as_me: loading site script $ac_site_file" >&6;} + sed 's/^/| /' "$ac_site_file" >&5 + . "$ac_site_file" \ + || { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +printf "%s\n" "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "failed to load site script $ac_site_file +See \`config.log' for more details" "$LINENO" 5; } + fi +done + +if test -r "$cache_file"; then + # Some versions of bash will fail to source /dev/null (special files + # actually), so we avoid doing that. DJGPP emulates it as a regular file. + if test /dev/null != "$cache_file" && test -f "$cache_file"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 +printf "%s\n" "$as_me: loading cache $cache_file" >&6;} + case $cache_file in + [\\/]* | ?:[\\/]* ) . "$cache_file";; + *) . "./$cache_file";; + esac + fi +else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 +printf "%s\n" "$as_me: creating cache $cache_file" >&6;} + >$cache_file +fi + +# Check that the precious variables saved in the cache have kept the same +# value. +ac_cache_corrupted=false +for ac_var in $ac_precious_vars; do + eval ac_old_set=\$ac_cv_env_${ac_var}_set + eval ac_new_set=\$ac_env_${ac_var}_set + eval ac_old_val=\$ac_cv_env_${ac_var}_value + eval ac_new_val=\$ac_env_${ac_var}_value + case $ac_old_set,$ac_new_set in + set,) + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 +printf "%s\n" "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,set) + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 +printf "%s\n" "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,);; + *) + if test "x$ac_old_val" != "x$ac_new_val"; then + # differences in whitespace do not lead to failure. + ac_old_val_w=`echo x $ac_old_val` + ac_new_val_w=`echo x $ac_new_val` + if test "$ac_old_val_w" != "$ac_new_val_w"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 +printf "%s\n" "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} + ac_cache_corrupted=: + else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 +printf "%s\n" "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} + eval $ac_var=\$ac_old_val + fi + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 +printf "%s\n" "$as_me: former value: \`$ac_old_val'" >&2;} + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 +printf "%s\n" "$as_me: current value: \`$ac_new_val'" >&2;} + fi;; + esac + # Pass precious variables to config.status. + if test "$ac_new_set" = set; then + case $ac_new_val in + *\'*) ac_arg=$ac_var=`printf "%s\n" "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; + *) ac_arg=$ac_var=$ac_new_val ;; + esac + case " $ac_configure_args " in + *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. + *) as_fn_append ac_configure_args " '$ac_arg'" ;; + esac + fi +done +if $ac_cache_corrupted; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +printf "%s\n" "$as_me: error: in \`$ac_pwd':" >&2;} + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 +printf "%s\n" "$as_me: error: changes in the environment can compromise the build" >&2;} + as_fn_error $? "run \`${MAKE-make} distclean' and/or \`rm $cache_file' + and start over" "$LINENO" 5 +fi +## -------------------- ## +## Main body of script. ## +## -------------------- ## + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + + +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking DYSH config" >&5 +printf %s "checking DYSH config... " >&6; } +echo "" + +DYSH="`pwd`" + +echo Using DYSH=$DYSH + +EDIT_MSG="Do not edit this file, it has been generated via configure in DYSH" + + + + + +if test ! -z $DYSH_DATA ; then + DYSH_DATA=$DYSH_DATA +elif test -d ~/GBT/data ; then + DYSH_DATA=~/GBT/data +elif test -d /home/gbt ; then + DYSH_DATA=/home/gbt +else + echo "Could not find the dysh_data, use --with-data=" + DYSH_DATA=/tmp +fi +data_default=$DYSH_DATA + + +# Check whether --with-data was given. +if test ${with_data+y} +then : + withval=$with_data; dysh_data="$withval" +else $as_nop + dysh_data="$data_default" +fi + +DYSH_DATA=$dysh_data + + +echo "Using DYSH_DATA=$DYSH_DATA" + + + +# put these in a single AC_OUTPUT command so config.status is not called multiple times. +ac_config_files="$ac_config_files dysh_start.sh" + +cat >confcache <<\_ACEOF +# This file is a shell script that caches the results of configure +# tests run on this system so they can be shared between configure +# scripts and configure runs, see configure's option --config-cache. +# It is not useful on other systems. If it contains results you don't +# want to keep, you may remove or edit it. +# +# config.status only pays attention to the cache file if you give it +# the --recheck option to rerun configure. +# +# `ac_cv_env_foo' variables (set or unset) will be overridden when +# loading this file, other *unset* `ac_cv_foo' will be assigned the +# following values. + +_ACEOF + +# The following way of writing the cache mishandles newlines in values, +# but we know of no workaround that is simple, portable, and efficient. +# So, we kill variables containing newlines. +# Ultrix sh set writes to stderr and can't be redirected directly, +# and sets the high bit in the cache file unless we assign to the vars. +( + for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +printf "%s\n" "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + + (set) 2>&1 | + case $as_nl`(ac_space=' '; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + # `set' does not quote correctly, so add quotes: double-quote + # substitution turns \\\\ into \\, and sed turns \\ into \. + sed -n \ + "s/'/'\\\\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" + ;; #( + *) + # `set' quotes correctly as required by POSIX, so do not add quotes. + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) | + sed ' + /^ac_cv_env_/b end + t clear + :clear + s/^\([^=]*\)=\(.*[{}].*\)$/test ${\1+y} || &/ + t end + s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ + :end' >>confcache +if diff "$cache_file" confcache >/dev/null 2>&1; then :; else + if test -w "$cache_file"; then + if test "x$cache_file" != "x/dev/null"; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 +printf "%s\n" "$as_me: updating cache $cache_file" >&6;} + if test ! -f "$cache_file" || test -h "$cache_file"; then + cat confcache >"$cache_file" + else + case $cache_file in #( + */* | ?:*) + mv -f confcache "$cache_file"$$ && + mv -f "$cache_file"$$ "$cache_file" ;; #( + *) + mv -f confcache "$cache_file" ;; + esac + fi + fi + else + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 +printf "%s\n" "$as_me: not updating unwritable cache $cache_file" >&6;} + fi +fi +rm -f confcache + +test "x$prefix" = xNONE && prefix=$ac_default_prefix +# Let make expand exec_prefix. +test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' + +# Transform confdefs.h into DEFS. +# Protect against shell expansion while executing Makefile rules. +# Protect against Makefile macro expansion. +# +# If the first sed substitution is executed (which looks for macros that +# take arguments), then branch to the quote section. Otherwise, +# look for a macro that doesn't take arguments. +ac_script=' +:mline +/\\$/{ + N + s,\\\n,, + b mline +} +t clear +:clear +s/^[ ]*#[ ]*define[ ][ ]*\([^ (][^ (]*([^)]*)\)[ ]*\(.*\)/-D\1=\2/g +t quote +s/^[ ]*#[ ]*define[ ][ ]*\([^ ][^ ]*\)[ ]*\(.*\)/-D\1=\2/g +t quote +b any +:quote +s/[ `~#$^&*(){}\\|;'\''"<>?]/\\&/g +s/\[/\\&/g +s/\]/\\&/g +s/\$/$$/g +H +:any +${ + g + s/^\n// + s/\n/ /g + p +} +' +DEFS=`sed -n "$ac_script" confdefs.h` + + +ac_libobjs= +ac_ltlibobjs= +U= +for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue + # 1. Remove the extension, and $U if already installed. + ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' + ac_i=`printf "%s\n" "$ac_i" | sed "$ac_script"` + # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR + # will be set to the directory where LIBOBJS objects are built. + as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" + as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' +done +LIBOBJS=$ac_libobjs + +LTLIBOBJS=$ac_ltlibobjs + + + +: "${CONFIG_STATUS=./config.status}" +ac_write_fail=0 +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files $CONFIG_STATUS" +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 +printf "%s\n" "$as_me: creating $CONFIG_STATUS" >&6;} +as_write_fail=0 +cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 +#! $SHELL +# Generated by $as_me. +# Run this file to recreate the current configuration. +# Compiler output produced by configure, useful for debugging +# configure, is in config.log if it exists. + +debug=false +ac_cs_recheck=false +ac_cs_silent=false + +SHELL=\${CONFIG_SHELL-$SHELL} +export SHELL +_ASEOF +cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +as_nop=: +if test ${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 +then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else $as_nop + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi + + + +# Reset variables that may have inherited troublesome values from +# the environment. + +# IFS needs to be set, to space, tab, and newline, in precisely that order. +# (If _AS_PATH_WALK were called with IFS unset, it would have the +# side effect of setting IFS to empty, thus disabling word splitting.) +# Quoting is to prevent editors from complaining about space-tab. +as_nl=' +' +export as_nl +IFS=" "" $as_nl" + +PS1='$ ' +PS2='> ' +PS4='+ ' + +# Ensure predictable behavior from utilities with locale-dependent output. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# We cannot yet rely on "unset" to work, but we need these variables +# to be unset--not just set to an empty or harmless value--now, to +# avoid bugs in old shells (e.g. pre-3.0 UWIN ksh). This construct +# also avoids known problems related to "unset" and subshell syntax +# in other old shells (e.g. bash 2.01 and pdksh 5.2.14). +for as_var in BASH_ENV ENV MAIL MAILPATH CDPATH +do eval test \${$as_var+y} \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done + +# Ensure that fds 0, 1, and 2 are open. +if (exec 3>&0) 2>/dev/null; then :; else exec 0&1) 2>/dev/null; then :; else exec 1>/dev/null; fi +if (exec 3>&2) ; then :; else exec 2>/dev/null; fi + +# The user is always right. +if ${PATH_SEPARATOR+false} :; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# Find who we are. Look in the path if we contain no directory separator. +as_myself= +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + case $as_dir in #((( + '') as_dir=./ ;; + */) ;; + *) as_dir=$as_dir/ ;; + esac + test -r "$as_dir$0" && as_myself=$as_dir$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as `sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + printf "%s\n" "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + + + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + printf "%s\n" "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + + + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset + +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null +then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else $as_nop + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null +then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else $as_nop + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + + +# Determine whether it's possible to make 'echo' print without a newline. +# These variables are no longer used directly by Autoconf, but are AC_SUBSTed +# for compatibility with existing Makefiles. +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +# For backward compatibility with old third-party macros, we provide +# the shell variables $as_echo and $as_echo_n. New code should use +# AS_ECHO(["message"]) and AS_ECHO_N(["message"]), respectively. +as_echo='printf %s\n' +as_echo_n='printf %s' + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`printf "%s\n" "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +as_test_x='test -x' +as_executable_p=as_fn_executable_p + +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" + +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" + + +exec 6>&1 +## ----------------------------------- ## +## Main body of $CONFIG_STATUS script. ## +## ----------------------------------- ## +_ASEOF +test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# Save the log message, to keep $0 and so on meaningful, and to +# report actual input values of CONFIG_FILES etc. instead of their +# values after options handling. +ac_log=" +This file was extended by $as_me, which was +generated by GNU Autoconf 2.71. Invocation command line was + + CONFIG_FILES = $CONFIG_FILES + CONFIG_HEADERS = $CONFIG_HEADERS + CONFIG_LINKS = $CONFIG_LINKS + CONFIG_COMMANDS = $CONFIG_COMMANDS + $ $0 $@ + +on `(hostname || uname -n) 2>/dev/null | sed 1q` +" + +_ACEOF + +case $ac_config_files in *" +"*) set x $ac_config_files; shift; ac_config_files=$*;; +esac + + + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +# Files that config.status was made for. +config_files="$ac_config_files" + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +ac_cs_usage="\ +\`$as_me' instantiates files and other configuration actions +from templates according to the current configuration. Unless the files +and actions are specified as TAGs, all are instantiated by default. + +Usage: $0 [OPTION]... [TAG]... + + -h, --help print this help, then exit + -V, --version print version number and configuration settings, then exit + --config print configuration, then exit + -q, --quiet, --silent + do not print progress messages + -d, --debug don't remove temporary files + --recheck update $as_me by reconfiguring in the same conditions + --file=FILE[:TEMPLATE] + instantiate the configuration file FILE + +Configuration files: +$config_files + +Report bugs to the package provider." + +_ACEOF +ac_cs_config=`printf "%s\n" "$ac_configure_args" | sed "$ac_safe_unquote"` +ac_cs_config_escaped=`printf "%s\n" "$ac_cs_config" | sed "s/^ //; s/'/'\\\\\\\\''/g"` +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_cs_config='$ac_cs_config_escaped' +ac_cs_version="\\ +config.status +configured by $0, generated by GNU Autoconf 2.71, + with options \\"\$ac_cs_config\\" + +Copyright (C) 2021 Free Software Foundation, Inc. +This config.status script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it." + +ac_pwd='$ac_pwd' +srcdir='$srcdir' +test -n "\$AWK" || AWK=awk +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# The default lists apply if the user does not specify any file. +ac_need_defaults=: +while test $# != 0 +do + case $1 in + --*=?*) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` + ac_shift=: + ;; + --*=) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg= + ac_shift=: + ;; + *) + ac_option=$1 + ac_optarg=$2 + ac_shift=shift + ;; + esac + + case $ac_option in + # Handling of the options. + -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) + ac_cs_recheck=: ;; + --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) + printf "%s\n" "$ac_cs_version"; exit ;; + --config | --confi | --conf | --con | --co | --c ) + printf "%s\n" "$ac_cs_config"; exit ;; + --debug | --debu | --deb | --de | --d | -d ) + debug=: ;; + --file | --fil | --fi | --f ) + $ac_shift + case $ac_optarg in + *\'*) ac_optarg=`printf "%s\n" "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; + '') as_fn_error $? "missing file argument" ;; + esac + as_fn_append CONFIG_FILES " '$ac_optarg'" + ac_need_defaults=false;; + --he | --h | --help | --hel | -h ) + printf "%s\n" "$ac_cs_usage"; exit ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil | --si | --s) + ac_cs_silent=: ;; + + # This is an error. + -*) as_fn_error $? "unrecognized option: \`$1' +Try \`$0 --help' for more information." ;; + + *) as_fn_append ac_config_targets " $1" + ac_need_defaults=false ;; + + esac + shift +done + +ac_configure_extra_args= + +if $ac_cs_silent; then + exec 6>/dev/null + ac_configure_extra_args="$ac_configure_extra_args --silent" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +if \$ac_cs_recheck; then + set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion + shift + \printf "%s\n" "running CONFIG_SHELL=$SHELL \$*" >&6 + CONFIG_SHELL='$SHELL' + export CONFIG_SHELL + exec "\$@" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +exec 5>>config.log +{ + echo + sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX +## Running $as_me. ## +_ASBOX + printf "%s\n" "$ac_log" +} >&5 + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 + +# Handling of arguments. +for ac_config_target in $ac_config_targets +do + case $ac_config_target in + "dysh_start.sh") CONFIG_FILES="$CONFIG_FILES dysh_start.sh" ;; + + *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; + esac +done + + +# If the user did not use the arguments to specify the items to instantiate, +# then the envvar interface is used. Set only those that are not. +# We use the long form for the default assignment because of an extremely +# bizarre bug on SunOS 4.1.3. +if $ac_need_defaults; then + test ${CONFIG_FILES+y} || CONFIG_FILES=$config_files +fi + +# Have a temporary directory for convenience. Make it in the build tree +# simply because there is no reason against having it here, and in addition, +# creating and moving files from /tmp can sometimes cause problems. +# Hook for its removal unless debugging. +# Note that there is a small window in which the directory will not be cleaned: +# after its creation but before its name has been assigned to `$tmp'. +$debug || +{ + tmp= ac_tmp= + trap 'exit_status=$? + : "${ac_tmp:=$tmp}" + { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status +' 0 + trap 'as_fn_exit 1' 1 2 13 15 +} +# Create a (secure) tmp directory for tmp files. + +{ + tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && + test -d "$tmp" +} || +{ + tmp=./conf$$-$RANDOM + (umask 077 && mkdir "$tmp") +} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 +ac_tmp=$tmp + +# Set up the scripts for CONFIG_FILES section. +# No need to generate them if there are no CONFIG_FILES. +# This happens for instance with `./config.status config.h'. +if test -n "$CONFIG_FILES"; then + + +ac_cr=`echo X | tr X '\015'` +# On cygwin, bash can eat \r inside `` if the user requested igncr. +# But we know of no other shell where ac_cr would be empty at this +# point, so we can use a bashism as a fallback. +if test "x$ac_cr" = x; then + eval ac_cr=\$\'\\r\' +fi +ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` +if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then + ac_cs_awk_cr='\\r' +else + ac_cs_awk_cr=$ac_cr +fi + +echo 'BEGIN {' >"$ac_tmp/subs1.awk" && +_ACEOF + + +{ + echo "cat >conf$$subs.awk <<_ACEOF" && + echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && + echo "_ACEOF" +} >conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 +ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` +ac_delim='%!_!# ' +for ac_last_try in false false false false false :; do + . ./conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 + + ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` + if test $ac_delim_n = $ac_delim_num; then + break + elif $ac_last_try; then + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 + else + ac_delim="$ac_delim!$ac_delim _$ac_delim!! " + fi +done +rm -f conf$$subs.sh + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && +_ACEOF +sed -n ' +h +s/^/S["/; s/!.*/"]=/ +p +g +s/^[^!]*!// +:repl +t repl +s/'"$ac_delim"'$// +t delim +:nl +h +s/\(.\{148\}\)..*/\1/ +t more1 +s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ +p +n +b repl +:more1 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t nl +:delim +h +s/\(.\{148\}\)..*/\1/ +t more2 +s/["\\]/\\&/g; s/^/"/; s/$/"/ +p +b +:more2 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t delim +' >$CONFIG_STATUS || ac_write_fail=1 +rm -f conf$$subs.awk +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACAWK +cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && + for (key in S) S_is_set[key] = 1 + FS = "" + +} +{ + line = $ 0 + nfields = split(line, field, "@") + substed = 0 + len = length(field[1]) + for (i = 2; i < nfields; i++) { + key = field[i] + keylen = length(key) + if (S_is_set[key]) { + value = S[key] + line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) + len += length(value) + length(field[++i]) + substed = 1 + } else + len += 1 + keylen + } + + print line +} + +_ACAWK +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then + sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" +else + cat +fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ + || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 +_ACEOF + +# VPATH may cause trouble with some makes, so we remove sole $(srcdir), +# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and +# trailing colons and then remove the whole line if VPATH becomes empty +# (actually we leave an empty line to preserve line numbers). +if test "x$srcdir" = x.; then + ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ +h +s/// +s/^/:/ +s/[ ]*$/:/ +s/:\$(srcdir):/:/g +s/:\${srcdir}:/:/g +s/:@srcdir@:/:/g +s/^:*// +s/:*$// +x +s/\(=[ ]*\).*/\1/ +G +s/\n// +s/^[^=]*=[ ]*$// +}' +fi + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +fi # test -n "$CONFIG_FILES" + + +eval set X " :F $CONFIG_FILES " +shift +for ac_tag +do + case $ac_tag in + :[FHLC]) ac_mode=$ac_tag; continue;; + esac + case $ac_mode$ac_tag in + :[FHL]*:*);; + :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; + :[FH]-) ac_tag=-:-;; + :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; + esac + ac_save_IFS=$IFS + IFS=: + set x $ac_tag + IFS=$ac_save_IFS + shift + ac_file=$1 + shift + + case $ac_mode in + :L) ac_source=$1;; + :[FH]) + ac_file_inputs= + for ac_f + do + case $ac_f in + -) ac_f="$ac_tmp/stdin";; + *) # Look for the file first in the build tree, then in the source tree + # (if the path is not absolute). The absolute path cannot be DOS-style, + # because $ac_f cannot contain `:'. + test -f "$ac_f" || + case $ac_f in + [\\/$]*) false;; + *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; + esac || + as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; + esac + case $ac_f in *\'*) ac_f=`printf "%s\n" "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac + as_fn_append ac_file_inputs " '$ac_f'" + done + + # Let's still pretend it is `configure' which instantiates (i.e., don't + # use $as_me), people would be surprised to read: + # /* config.h. Generated by config.status. */ + configure_input='Generated from '` + printf "%s\n" "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' + `' by configure.' + if test x"$ac_file" != x-; then + configure_input="$ac_file. $configure_input" + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 +printf "%s\n" "$as_me: creating $ac_file" >&6;} + fi + # Neutralize special characters interpreted by sed in replacement strings. + case $configure_input in #( + *\&* | *\|* | *\\* ) + ac_sed_conf_input=`printf "%s\n" "$configure_input" | + sed 's/[\\\\&|]/\\\\&/g'`;; #( + *) ac_sed_conf_input=$configure_input;; + esac + + case $ac_tag in + *:-:* | *:-) cat >"$ac_tmp/stdin" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; + esac + ;; + esac + + ac_dir=`$as_dirname -- "$ac_file" || +$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$ac_file" : 'X\(//\)[^/]' \| \ + X"$ac_file" : 'X\(//\)$' \| \ + X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || +printf "%s\n" X"$ac_file" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + as_dir="$ac_dir"; as_fn_mkdir_p + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`printf "%s\n" "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`printf "%s\n" "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + + case $ac_mode in + :F) + # + # CONFIG_FILE + # + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# If the template does not know about datarootdir, expand it. +# FIXME: This hack should be removed a few years after 2.60. +ac_datarootdir_hack=; ac_datarootdir_seen= +ac_sed_dataroot=' +/datarootdir/ { + p + q +} +/@datadir@/p +/@docdir@/p +/@infodir@/p +/@localedir@/p +/@mandir@/p' +case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in +*datarootdir*) ac_datarootdir_seen=yes;; +*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 +printf "%s\n" "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 + ac_datarootdir_hack=' + s&@datadir@&$datadir&g + s&@docdir@&$docdir&g + s&@infodir@&$infodir&g + s&@localedir@&$localedir&g + s&@mandir@&$mandir&g + s&\\\${datarootdir}&$datarootdir&g' ;; +esac +_ACEOF + +# Neutralize VPATH when `$srcdir' = `.'. +# Shell code in configure.ac might set extrasub. +# FIXME: do we really want to maintain this feature? +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_sed_extra="$ac_vpsub +$extrasub +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +:t +/@[a-zA-Z_][a-zA-Z_0-9]*@/!b +s|@configure_input@|$ac_sed_conf_input|;t t +s&@top_builddir@&$ac_top_builddir_sub&;t t +s&@top_build_prefix@&$ac_top_build_prefix&;t t +s&@srcdir@&$ac_srcdir&;t t +s&@abs_srcdir@&$ac_abs_srcdir&;t t +s&@top_srcdir@&$ac_top_srcdir&;t t +s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t +s&@builddir@&$ac_builddir&;t t +s&@abs_builddir@&$ac_abs_builddir&;t t +s&@abs_top_builddir@&$ac_abs_top_builddir&;t t +$ac_datarootdir_hack +" +eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ + >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + +test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && + { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && + { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ + "$ac_tmp/out"`; test -z "$ac_out"; } && + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined" >&5 +printf "%s\n" "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined" >&2;} + + rm -f "$ac_tmp/stdin" + case $ac_file in + -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; + *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; + esac \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + ;; + + + + esac + +done # for ac_tag + + +as_fn_exit 0 +_ACEOF +ac_clean_files=$ac_clean_files_save + +test $ac_write_fail = 0 || + as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 + + +# configure is writing to config.log, and then calls config.status. +# config.status does its own redirection, appending to config.log. +# Unfortunately, on DOS this fails, as config.log is still kept open +# by configure, so config.status won't be able to write to it; its +# output is simply discarded. So we exec the FD to /dev/null, +# effectively closing config.log, so it can be properly (re)opened and +# appended to by config.status. When coming back to configure, we +# need to make the FD available again. +if test "$no_create" != yes; then + ac_cs_success=: + ac_config_status_args= + test "$silent" = yes && + ac_config_status_args="$ac_config_status_args --quiet" + exec 5>/dev/null + $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false + exec 5>>config.log + # Use ||, not &&, to avoid exiting from the if with $? = 1, which + # would make configure fail if this is the last instruction. + $ac_cs_success || as_fn_exit 1 +fi +if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 +printf "%s\n" "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} +fi diff --git a/configure.ac b/configure.ac new file mode 100644 index 00000000..b1669941 --- /dev/null +++ b/configure.ac @@ -0,0 +1,52 @@ +dnl Process this file with autoconf to produce a new configure script if it has changed. +dnl ------------------------------------------------------------------------------------ + +AC_INIT +AC_CONFIG_SRCDIR([README.md]) + +AC_MSG_CHECKING([DYSH config]) +echo "" + +DYSH="`pwd`" +AC_SUBST(DYSH) +echo Using DYSH=$DYSH + +EDIT_MSG="Do not edit this file, it has been generated via configure in DYSH" +AC_SUBST(EDIT_MSG) + + + +dnl Override with: --with-data= +dnl Tested in this order: +dnl $DATA_LMT +dnl ~/LMT/data_lmt Suggested private sandboxes +dnl /data_lmt At the UMasss machine "cln" +dnl /lma1/lmt/data_lmt At the UMD machine "lma" +dnl ??? At the LMT site (wares?) + +if test ! -z $DYSH_DATA ; then + DYSH_DATA=$DYSH_DATA +elif test -d ~/GBT/data ; then + DYSH_DATA=~/GBT/data +elif test -d /home/gbt ; then + DYSH_DATA=/home/gbt +else + echo "Could not find the dysh_data, use --with-data=" + DYSH_DATA=/tmp +fi +data_default=$DYSH_DATA + +AC_ARG_WITH(data, + [ --with-data=DYSH_DATA Root directory of DYSH_DATA (or set $DYSH_DATA)], + dysh_data="$withval", + dysh_data="$data_default") +DYSH_DATA=$dysh_data +AC_SUBST(DYSH_DATA) + +echo "Using DYSH_DATA=$DYSH_DATA" + + + +# put these in a single AC_OUTPUT command so config.status is not called multiple times. +AC_CONFIG_FILES([dysh_start.sh]) +AC_OUTPUT diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..514daac3 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,23 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = python -m sphinx +SPHINXPROJ = dysh +SOURCEDIR = ./source +BUILDDIR = ./_build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +dev: + sphinx-autobuild "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) diff --git a/docs/source/conf.py b/docs/source/conf.py index 2805c829..1016220b 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -164,13 +164,15 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). -latex_documents = [( - master_doc, - "dysh.tex", - "dysh Documentation", - ["Marc Pound", "Victoria Catlett", "Peter Teuben"], - "manual", -)] +latex_documents = [ + ( + master_doc, + "dysh.tex", + "dysh Documentation", + ["Marc Pound", "Victoria Catlett", "Peter Teuben"], + "manual", + ) +] # -- Options for manual page output ------------------------------------------ @@ -185,15 +187,17 @@ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) -texinfo_documents = [( - master_doc, - "dysh", - "dysh Documentation", - author, - "dysh", - "One line description of project.", - "Miscellaneous", -)] +texinfo_documents = [ + ( + master_doc, + "dysh", + "dysh Documentation", + author, + "dysh", + "One line description of project.", + "Miscellaneous", + ) +] # The reST default role (used for this markup: `text`) to use for all # documents. Set to the "smart" one. diff --git a/docs/source/examples/positionswitch.rst b/docs/source/examples/positionswitch.rst index 2040f67d..b4e91a30 100644 --- a/docs/source/examples/positionswitch.rst +++ b/docs/source/examples/positionswitch.rst @@ -146,9 +146,6 @@ You can also print a concise (or verbose if you choose `verbose=True`) summary : Retrieve a scan and its partner ON or OFF, selecting an IF number and polarization, then calibrate it -.. note:: - For each scan in the summary `dysh` shows the mean of the VELOCITY, RESTFREQ, DOPFREQ, AZIMUTH and ELEVATIO columns, while `GBTIDL` reports the value of the first integration for a scan. If you use `verbose=True` in `dysh` you get all the integrations. - .. code:: python >>> psscan = sdfits.getps(152, ifnum=0, plnum=0) diff --git a/docs/source/examples/subbeamnod.rst b/docs/source/examples/subbeamnod.rst index 88f75b54..969371f7 100644 --- a/docs/source/examples/subbeamnod.rst +++ b/docs/source/examples/subbeamnod.rst @@ -71,10 +71,6 @@ You can also print a concise (or verbose if you choose `verbose=True`) :meth:`~d The SubBeamNod scans are 43, 46, and 54. Retrieve and calibrate a SubBeamNod scan, then plot it -.. note:: - For each scan in the summary `dysh` shows the mean of the VELOCITY, RESTFREQ, DOPFREQ, AZIMUTH and ELEVATIO columns, while `GBTIDL` reports the value of the first integration for a scan. If you use `verbo - se=True` in `dysh` you get all the integrations. - .. code:: python >>> sbn = sdfits.subbeamnod(scan=43, fdnum=1, ifnum=0, weights='tsys') diff --git a/docs/source/examples/totalpower.rst b/docs/source/examples/totalpower.rst index 0d7285cb..c56e0760 100644 --- a/docs/source/examples/totalpower.rst +++ b/docs/source/examples/totalpower.rst @@ -54,10 +54,6 @@ You can also print a concise (or verbose if you choose `verbose=True`) summary : Retrieve a scan, selecting and IF number and polarization -.. note:: - For each scan in the summary `dysh` shows the mean of the VELOCITY, RESTFREQ, DOPFREQ, AZIMUTH and ELEVATIO columns, while `GBTIDL` reports the value of the first integration for a scan. If you use `verbo - se=True` in `dysh` you get all the integrations. - .. code:: python >>> tpscan = sdfits.gettp(152, ifnum=0, plnum=0) diff --git a/docs/source/for_beta_testers/beta_testing.rst b/docs/source/for_beta_testers/beta_testing.rst index 40f9a028..e7138af3 100644 --- a/docs/source/for_beta_testers/beta_testing.rst +++ b/docs/source/for_beta_testers/beta_testing.rst @@ -40,7 +40,6 @@ Here are examples of feedback on GitHub * reporting an issue, `Issue #88 `_ * requesting a modification, `Issue #78 `_ -.. _beta-install Installing `dysh` ================= diff --git a/docs/source/for_developers/doc_standards.rst b/docs/source/for_developers/doc_standards.rst index 05827250..200f8580 100644 --- a/docs/source/for_developers/doc_standards.rst +++ b/docs/source/for_developers/doc_standards.rst @@ -13,24 +13,29 @@ Here are the steps to set up Sphinx autobuilds so that you can check your docume $ hatch shell -2. Next, tell hatch to run the docs. The docs will be published at `http://127.0.0.1:8000/`. +2. Next, copy the environment file template. .. code-block:: bash - (dysh) $ hatch run docs + (dysh) $ cp .env.template .env -3. If you would like the docs to publish at a specific host and port, such as `http://thales:9876`, then add the appropriate flags: +3. Add values for ``DOCS_ROOT``, ``DOCS_HOST``, and ``DOCS_PORT`` in `.env` +4. Start the autobuild .. code-block:: bash - (dysh) $ hatch run docs --host thales --port 9876 + (dysh) $ source .env + (dysh) $ startdocs -4. You may now make changes in the `dysh/docs/` directory and see the live changes at the appropriate URL in your browser. To close the server, simply `CTRL+C`. +5. Go to `http://{$DOCS_HOST}:{$DOCS_PORT}` in a web browser. You should now see the documentation with live edits as you save changes. + +.. note:: + Do not commit the `.env` file to `git`. Docstring Format ================ -All Python functions must contain a docstring which follows the NumPy convention. You can learn more about this convention here: https://numpydoc.readthedocs.io/en/latest/format.html +Gotta format the docstrings Mermaid Diagrams ================ diff --git a/docs/source/for_developers/install.rst b/docs/source/for_developers/install.rst index e6eca46a..25b23ccb 100644 --- a/docs/source/for_developers/install.rst +++ b/docs/source/for_developers/install.rst @@ -45,209 +45,13 @@ The usual caveats apply how you set up your python development environment. Notice you can *only* do that from within the original install directory tree. - Additional Installation Options ------------------------------- -The previous instructions (and the ``dysh`` README) suggest a route to install ``dysh`` using `hatch`. -However, there are several ways how you can install ``dysh`` for development. -We give a few practical examples, all based on having "dyshN" directories in a ~/GBT directory. -It is imperative that a developer install takes place in a shielded environment, generally using a virtual environment. - -We list a few, but if you found another approach, please share. - -.. _dysh1: - -dysh1: native Python -^^^^^^^^^^^^^^^^^^^^ - -Here is an example using native python on a vanilla Ubuntu system (python version 3.11 may be different). -You will need initial admin privilages for this. - -.. code:: bash - - # first ensure your native python has at least a way to run pip and allow a venv - sudo apt install python3 python3-pip ipython3 python3.11-venv jupyter-core - - # setup a venv, for example in a $HOME/venv hierarchy - mkdir -p $HOME/venv - python3 -m venv $HOME/venv/dysh1 - - # activate this venv - source $HOME/venv/dysh1/bin/activate - - # install hatch - pip install hatch notebook - -After this dysh can be installed in a virtual environment controlled by hatch - -.. code:: bash - - mkdir ~/GBT/dysh1 - cd ~/GBT/dysh1 - git clone https://github.com/GreenBankObservatory/dysh - cd dysh - - # setup dysh with hatch (be sure to be in the dysh directory) - hatch shell - pip install -r requirements_dev.txt - # some warning about running ipython - pip install -r docs/requirements.txt - hatch build - pip install -e . - ipython # this initially gave a matplotlib error, but it went away - exit - -Any time development is needed: - -.. code:: bash - - source $HOME/venv/dysh1/bin/activate - cd ~/GBT/dysh1/dysh - hatch shell - -and as always, verify it's there: - -.. code:: bash - - python -c 'import dysh; print(dysh.__version__)' - echo "git BRANCH: $(git branch --show-current) HEAD: $(git rev-list --count HEAD)" - -and when done, exit the hatch sub-shell - -.. code:: bash - - exit - -this will still return to the native virtual environment, so one more exit is needed to kill this shell - -.. code:: bash - - exit - -.. _dysh2: - -dysh2: anaconda3 python -^^^^^^^^^^^^^^^^^^^^^^^ - -Here is an example using an anaconda3 python, no virtual environments, no hatch, no nothing. -Simple and self-contained, but with an anaconda3 to maintain. - -.. code:: bash - - mkdir ~/GBT/dysh2 - cd ~/GBT/dysh2 - - ../install_anaconda3 # DM me for a copy - source python_start.sh - - git clone https://github.com/GreenBankObservatory/dysh - cd dysh - pip install -r requirements_dev.txt - pip install -r docs/requirements.txt - pip install -e . - -any time development is needed: - -.. code:: bash - - source ~/GBT/dysh2/python_start.sh - -and verify - -.. code:: bash - - python -c 'import dysh; print(dysh.__version__)' - echo "git BRANCH: $(git branch --show-current) HEAD: $(git rev-list --count HEAD)" - -and when done, exit will terminate the shell - -.. code:: bash - - exit - -.. _dysh3: - -dysh3: anaconda3 python with virtual environment -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Here is an example using an anaconda3 python, but now using hatch - -.. code:: bash - - mkdir ~/GBT/dysh3 - cd ~/GBT/dysh3 - - ../install_anaconda3 # DM me for a copy - source python_start.sh - - pip install hatch - -After this dysh can be installed in a virtual environment controlled by hatch, -pretty much following what we did in :ref:`dysh1`: - -.. code:: bash - - git clone https://github.com/GreenBankObservatory/dysh - cd dysh - - # setup dysh with hatch (be sure to be in the dysh directory) - hatch shell - pip install -r requirements_dev.txt - pip install -r docs/requirements.txt - hatch build - pip install -e . - -and verify - -.. code:: bash - - python -c 'import dysh; print(dysh.__version__)' - echo "git BRANCH: $(git branch --show-current) HEAD: $(git rev-list --count HEAD)" - -and when done, exit will terminate the shell - -.. code:: bash - - exit - - -Any time development is needed: - -.. code:: bash - - source $HOME/GBT/dysh3/python_start.sh - cd ~/GBT/dysh3/dysh - hatch shell - - -Sample workflows ----------------- - -Minor issue: with hatch, if you're not in the code tree (much like git) you don't know -where your code tree is. Do we need peter's "rc" files. Do we need a module file? - - -Simple ``dysh`` Commands -^^^^^^^^^^^^^^^^^^^^^^^^ - -.. code:: bash - - python -c 'import dysh; print(dysh.__version__)' - python -c 'import dysh; print(dysh.__file__)' - -Building Documentation -^^^^^^^^^^^^^^^^^^^^^^ - -.. code:: bash - - cd dysh/docs - make html - xdg-open _build/html/index.html +.. include:: install_developer.rst Testing ======= - We use `pytest` for unit and integration testing. From the top-level dysh directory, run: diff --git a/docs/source/for_developers/install_developer.rst b/docs/source/for_developers/install_developer.rst new file mode 100644 index 00000000..ee666f1a --- /dev/null +++ b/docs/source/for_developers/install_developer.rst @@ -0,0 +1,196 @@ + +The previous instructions (and the ``dysh`` README) suggest a route to install ``dysh`` using `hatch`. +However, there are several ways how you can install ``dysh`` for development. +We give a few practical examples, all based on having "dyshN" directories in a ~/GBT directory. +It is imperative that a developer install takes place in a shielded environment, generally using a virtual environment. + +We list a few, but if you found another approach, please share. + +.. _dysh1: + +dysh1: native Python +^^^^^^^^^^^^^^^^^^^^ + +Here is an example using native python on a vanilla Ubuntu system (python version 3.11 may be different). +You will need initial admin privilages for this. + +.. code:: bash + + # first ensure your native python has at least a way to run pip and allow a venv + sudo apt install python3 python3-pip ipython3 python3.11-venv jupyter-core + + # setup a venv, for example in a $HOME/venv hierarchy + mkdir -p $HOME/venv + python3 -m venv $HOME/venv/dysh1 + + # activate this venv + source $HOME/venv/dysh1/bin/activate + + # install hatch + pip install hatch notebook + +After this dysh can be installed in a virtual environment controlled by hatch + +.. code:: bash + + mkdir ~/GBT/dysh1 + cd ~/GBT/dysh1 + git clone https://github.com/GreenBankObservatory/dysh + cd dysh + + # setup dysh with hatch (be sure to be in the dysh directory) + hatch shell + pip install -r requirements_dev.txt + # some warning about running ipython + pip install -r docs/requirements.txt + hatch build + pip install -e . + ipython # this initially gave a matplotlib error, but it went away + exit + +Any time development is needed: + +.. code:: bash + + source $HOME/venv/dysh1/bin/activate + cd ~/GBT/dysh1/dysh + hatch shell + +and as always, verify it's there: + +.. code:: bash + + python -c 'import dysh; print(dysh.__version__)' + echo "git BRANCH: $(git branch --show-current) HEAD: $(git rev-list --count HEAD)" + +and when done, exit the hatch sub-shell + +.. code:: bash + + exit + +this will still return to the native virtual environment, so one more exit is needed to kill this shell + +.. code:: bash + + exit + +.. _dysh2: + +dysh2: anaconda3 python +^^^^^^^^^^^^^^^^^^^^^^^ + +Here is an example using an anaconda3 python, no virtual environments, no hatch, no nothing. +Simple and self-contained, but with an anaconda3 to maintain. + +.. code:: bash + + mkdir ~/GBT/dysh2 + cd ~/GBT/dysh2 + + ../install_anaconda3 # DM me for a copy + source python_start.sh + + git clone https://github.com/GreenBankObservatory/dysh + cd dysh + pip install -r requirements_dev.txt + pip install -r docs/requirements.txt + pip install -e . + +any time development is needed: + +.. code:: bash + + source ~/GBT/dysh2/python_start.sh + +and verify + +.. code:: bash + + python -c 'import dysh; print(dysh.__version__)' + echo "git BRANCH: $(git branch --show-current) HEAD: $(git rev-list --count HEAD)" + +and when done, exit will terminate the shell + +.. code:: bash + + exit + +.. _dysh3: + +dysh3: anaconda3 python with virtual environment +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Here is an example using an anaconda3 python, but now using hatch + +.. code:: bash + + mkdir ~/GBT/dysh3 + cd ~/GBT/dysh3 + + ../install_anaconda3 # DM me for a copy + source python_start.sh + + pip install hatch + +After this dysh can be installed in a virtual environment controlled by hatch, +pretty much following what we did in :ref:`dysh1`: + +.. code:: bash + + git clone https://github.com/GreenBankObservatory/dysh + cd dysh + + # setup dysh with hatch (be sure to be in the dysh directory) + hatch shell + pip install -r requirements_dev.txt + pip install -r docs/requirements.txt + hatch build + pip install -e . + +and verify + +.. code:: bash + + python -c 'import dysh; print(dysh.__version__)' + echo "git BRANCH: $(git branch --show-current) HEAD: $(git rev-list --count HEAD)" + +and when done, exit will terminate the shell + +.. code:: bash + + exit + + +Any time development is needed: + +.. code:: bash + + source $HOME/GBT/dysh3/python_start.sh + cd ~/GBT/dysh3/dysh + hatch shell + + +Sample workflows +---------------- + +Minor issue: with hatch, if you're not in the code tree (much like git) you don't know +where your code tree is. Do we need peter's "rc" files. Do we need a module file? + + +Simple ``dysh`` Commands +^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code:: bash + + python -c 'import dysh; print(dysh.__version__)' + python -c 'import dysh; print(dysh.__file__)' + +Building Documentation +^^^^^^^^^^^^^^^^^^^^^^ + +.. code:: bash + + cd dysh/docs + make html + xdg-open _build/html/index.html diff --git a/docs/source/getting_started/install.rst b/docs/source/getting_started/install.rst index 077407f1..a61a702b 100644 --- a/docs/source/getting_started/install.rst +++ b/docs/source/getting_started/install.rst @@ -21,9 +21,6 @@ Index `_. $ pip install dysh -.. warning:: - `dysh` is currently in development and the above command will install the latest stable version of `dysh` which might not reflect the contents of the documentation. For beta testing please see :ref:`beta-install`. - From github =========== diff --git a/docs/source/getting_started/tutorials/GBT/index.rst b/docs/source/getting_started/tutorials/GBT/index.rst index e7574f09..deafffec 100644 --- a/docs/source/getting_started/tutorials/GBT/index.rst +++ b/docs/source/getting_started/tutorials/GBT/index.rst @@ -4,5 +4,4 @@ Tutorials for GBT Data .. toctree:: :maxdepth: 2 - tutorial-positionswitch diff --git a/docs/source/getting_started/tutorials/index.rst b/docs/source/getting_started/tutorials/index.rst index fcdab94c..57c7bf1e 100644 --- a/docs/source/getting_started/tutorials/index.rst +++ b/docs/source/getting_started/tutorials/index.rst @@ -7,4 +7,4 @@ Tutorials on how to use `dysh`. .. toctree:: :maxdepth: 1 - GBT/index + GBT/tutorial-positionswitch diff --git a/docs/source/gui/build/executable.rst b/docs/source/gui/build/executable.rst new file mode 100644 index 00000000..d68aa4da --- /dev/null +++ b/docs/source/gui/build/executable.rst @@ -0,0 +1,60 @@ +*********************** +Building the Executable +*********************** + +PyInstaller +=========== + +To build with PyInstaller + +.. code:: bash + + (dysh) $ cd gui + (dysh) $ pyinstaller app.py + +Building From Scratch +##################### + +If you somehow lose or clear the ``dysh.spec`` file, you need to run a lot more flags through `PyInstaller` + +.. code:: bash + + (dysh) $ cd gui + (dysh) $ pyinstaller --onefile --noconsole --name "dysh" --icon=./static/favicon.ico --clean -y --collect-all asdf --collect-all asdf_standard --collect-all asdf_transform_schemas --collect-all packaging --collect-all pkg_resources --collect-all astropy --collect-all lz4 --recursive-copy-metadata asdf --recursive-copy-metadata astropy app.py + +Troubleshooting +=============== + +Windows +####### + +If you get the following error: + +.. code:: bash + + OSError: [WinError 225] Operation did not complete successfully because the file contains a virus or potentially unwanted software. + ... + win32ctypes.pywin32.pywintypes.error: (225, 'BeginUpdateResourceW', 'Operation did not complete successfully because the file contains a virus or potentially unwanted software.') + +This is the antivirus program thinking you're getting a virus. To circumvent this: + +1. Open the Windows Security app + +2. Navigate to "Virus & threat protection" + +3. Click "Manage settings" under "Virus & threat protection settings" + +4. Turn off "Real-time protection" + +5. Run the `PyInstaller` build command + +6. Turn "Real-time protection" back on + +What You Can Ignore +################### + +You can safely ignore the following messages: + +* ``WARNING: Library {LIBNAME} required via ctypes not found`` + + * See https://github.com/pyinstaller/pyinstaller/issues/1403 diff --git a/docs/source/gui/build/index.rst b/docs/source/gui/build/index.rst new file mode 100644 index 00000000..a0e97b1e --- /dev/null +++ b/docs/source/gui/build/index.rst @@ -0,0 +1,10 @@ +**************** +Building the GUI +**************** + +Stuff about the GUI design + +.. toctree:: + :maxdepth: 2 + + executable diff --git a/docs/source/gui/index.rst b/docs/source/gui/index.rst new file mode 100644 index 00000000..fdfc6952 --- /dev/null +++ b/docs/source/gui/index.rst @@ -0,0 +1,10 @@ +********** +GUI Design +********** + +Stuff about the GUI design + +.. toctree:: + :maxdepth: 2 + + build/index diff --git a/docs/source/index.rst b/docs/source/index.rst index 18ceebe9..426d5ce6 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -1,12 +1,11 @@ -********************** +****************** ``dysh`` Documentation -********************** - +****************** ``dysh`` is a Python spectral line data reduction and analysis program for singledish data with specific emphasis on data from the Green Bank Telescope. It is currently under development in collaboration between the `Green Bank Observatory `_ and the Laboratory for Millimeter-Wave Astronomy (LMA) at the `University of Maryland (UMD) `_. -It is intended to be an alternative for `GBTIDL `_, GBO's current spectral line data reduction package. +It is intended to be a full replacement for the GBO's current reduction package `GBTIDL `_. Contents =============== @@ -22,6 +21,7 @@ Contents for_developers/index performance_testing/index design/index + gui/index Indices and tables ================== diff --git a/dysh_start.sh.in b/dysh_start.sh.in new file mode 100644 index 00000000..9849e9fc --- /dev/null +++ b/dysh_start.sh.in @@ -0,0 +1,12 @@ +# @EDIT_MSG@ +# for (ba)sh : source this file + +export DYSH=@DYSH@ + +export DYSH_DATA=@DYSH_DATA@ + +for f in $DYSH/dysh_local.sh $HOME/.dysh_start.sh; do + if [ -e $f ]; then + source $f + fi +done diff --git a/gui/app.py b/gui/app.py new file mode 100644 index 00000000..076271a3 --- /dev/null +++ b/gui/app.py @@ -0,0 +1,305 @@ +# PACKAGE IMPORTS +import sys # , os, psutil, getpass, socket + +# PARALLELIZATION +from concurrent.futures import ThreadPoolExecutor +from threading import Thread + +import wget +from PyQt5.QtCore import * +from PyQt5.QtGui import * +from PyQt5.QtWidgets import * +from qt_material import apply_stylesheet + +# import numpy as np +# import pyqtgraph as pg +# from astropy.io import fits +# from time import time +# import pandas as pd +# import argparse +from screeninfo import get_monitors +from util.core import DyshWorker, ThreadCallbacks +from util.dataload import DataLoader, FITSFileDialog +from widgets.graphs import * +from widgets.layouts import * +from widgets.QIPython import QIPythonConsoleWidget +from widgets.splash import SplashScreen + +# LOCAL GUI IMPORTS +from widgets.tables import FITSHeaderTable + +from dysh.fits.gbtfitsload import GBTFITSLoad + +# DYSH IMPORTS +from dysh.util.messages import * +from dysh.util.parallelization import SingleThread + + +class SelectPanel(QGridLayout): + """The startup window of the GUI""" + + def __init__(self): + """Initializes the startup window""" + super().__init__() + self._init_UI() + + def _init_UI(self): + """Creates the skeleton structure of the GUI""" + + # Make the UI Items + self.main_text = QLabel("Welcome to the Dysh GUI") + self.button = QPushButton("Select file") + self.button.clicked.connect(self.get_files) + + self._init_site_selection() + + # Add the UI Items + self.addWidget(self.main_text, 0, 0, 1, 2) + self.addWidget(self.combo_telescope, 1, 0, 1, 1) + self.addWidget(self.combo_rx, 1, 1, 1, 1) + self.addWidget(self.button, 2, 0, 1, 2) + + def _init_site_selection(self): + self.combo_telescope = QComboBox() + self.combo_telescope.addItem("Auto-Detect") + self.combo_telescope.addItem("Green Bank Telescope (GBT)") + self.combo_telescope.addItem("Green Bank 20-meter Telescope") + self.combo_telescope.addItem("Large Millimeter Telescope (LMT)") + + self.combo_rx = QComboBox() + self.update_combo_rx() + + self.combo_telescope.currentIndexChanged.connect(self.update_combo_rx) + + def update_combo_rx(self): + # [TODO] Load the RX info from a JSON file + self.combo_rx.clear() + self.combo_rx.setEnabled(True) + + ci = int(self.combo_telescope.currentIndex()) + if ci == 0: + # Auto-Detect + self.combo_rx.setEnabled(False) + elif ci == 1: + # Green Bank Telescope (GBT) + self.combo_rx.addItem("PF1 (0.29 - 0.395 GHz)") + self.combo_rx.addItem("PF1 (0.385 - 0.52 GHz)") + self.combo_rx.addItem("PF1 (0.51 - 0.69 GHz)") + self.combo_rx.addItem("PF1 (0.68 - 0.92 GHz)") + self.combo_rx.addItem("PF2 (0.9 - 1.23 GHz)") + self.combo_rx.addItem("L (1.15 - 1.73 GHz)") + self.combo_rx.addItem("S (1.73 - 2.6 GHz)") + self.combo_rx.addItem("UWBR (0.7 - 4.0 GHz)") + self.combo_rx.addItem("C (3.95 - 8.0 GHz)") + self.combo_rx.addItem("X (8.0 - 12.0 GHz)") + self.combo_rx.addItem("Ku (12.0 - 15.4 GHz)") + self.combo_rx.addItem("KFPA (17.0 - 27.5 GHz)") + self.combo_rx.addItem("Ka F1 (26.0 - 31.0 GHz)") + self.combo_rx.addItem("Ka F2 (30.5 - 37.0 GHz)") + self.combo_rx.addItem("Ka F3 (36.0 - 39.5 GHz)") + self.combo_rx.addItem("Q (38.2 - 49.8 GHz)") + self.combo_rx.addItem("W1 (68.0 - 74.0 GHz)") + self.combo_rx.addItem("W2 (73.0 - 80.0 GHz)") + self.combo_rx.addItem("W3 (79.0 - 86.0 GHz)") + self.combo_rx.addItem("W4 (85.0 - 92.0 GHz)") + self.combo_rx.addItem("ARGUS (75.0 - 115.5 GHz)") + elif ci == 2: + # Green Bank 20-meter telescope + self.combo_rx.addItem("L (1.15 - 1.73 GHz)") + self.combo_rx.addItem("X (8.0 - 12.0 GHz)") + elif ci == 3: + # Green Bank 20-meter telescope + self.combo_rx.addItem("RSR") + self.combo_rx.addItem("SEQUOIA") + self.combo_rx.addItem("MSIP1mm") + self.combo_rx.addItem("B4R") + self.combo_rx.addItem("TolTEC") + + def get_files(self): + # [TODO] Figure out why this makes you do it twice? + self.file_dialog = FITSFileDialog() + + +class DyshMainWindow(QMainWindow): + """The main window of the GUI""" + + def __init__(self, fpath=None): + """Initializes the main window""" + super(DyshMainWindow, self).__init__() + FriendlyMessages.hello() + + self.setWindowTitle("Dysh GUI") + self._init_geometry(0.8) + + self.info_threads() + # self._init_select_panel() + self._init_main_panel() + + self.show() + + def _init_geometry(self, mult): + """ + Draws the GUI on the primary monitor + + Parameters + ---------- + mult : int or float + proportion of total size to draw window (0.8 = 80%) + + """ + for m in get_monitors(): + if m.is_primary: + self.width = int(m.width * mult) + self.height = int(m.height * mult) + self.xpos = int(m.x + (m.width * (1 - mult)) / 2) + self.ypos = int(m.y + (m.height * (1 - mult)) / 2) + self.setGeometry(self.xpos, self.ypos, self.width, self.height) + + def info_threads(self): + """Updates info on available threads""" + self.threadCountActive = QThreadPool.globalInstance().activeThreadCount() + self.threadCountTotal = QThreadPool.globalInstance().maxThreadCount() + # print(f"You are using {self.threadCountActive} of the {self.threadCountTotal} available QThreads") + + def _init_select_panel(self): + self.main_widget = QWidget() + self.main_layout = SelectPanel() + self.main_widget.setLayout(self.main_layout) + self.setCentralWidget(self.main_widget) + if self.main_layout.file_dialog.exec_() == QDialog.Accepted: + self.fpath = self.main_layout.file_dialog.selectedFiles()[0] + + def _init_main_panel(self): + # self._clear_all() + self._load_data() + self._init_UI() + + # @SingleThread + def SDFITS_load_all(self, fpath): + self.sdfits = GBTFITSLoad(fpath) + + def _load_data(self): + """Opens up the FITS file""" + # [TODO] Load lists in a QThread so the main screen can be created + # [TODO] Add logic to determine if GBTFITSLoad or another + # s_load = DyshWorker(target=self.SDFITS_load_all, args=(self.fpath, 1)) + # s_load.start() + # url = "https://www.gb.nrao.edu/dysh/example_data/onoff-L/data/TGBT21A_501_11.raw.vegas.fits" + # self.fpath = wget.download(url) + self.fpath = "TGBT21A_501_11.raw.vegas.fits" + + self.SDFITS_load_all(self.fpath) # s_load.join() + self.scan = self.sdfits.getps(152, ifnum=0, plnum=0) + self.scan.calibrate() + self.fdata = self.scan.timeaverage(weights="tsys") + + def _init_UI(self): + """Creates the skeleton structure of the GUI""" + self.main_widget = QWidget() + self.main_layout = QGridLayout() + self.main_widget.setLayout(self.main_layout) + self.setCentralWidget(self.main_widget) + + self._init_sidebar() + self._init_toggle_btn() + self._init_tabs() + + self.main_layout.addWidget(self.toggle_btn, 0, 0, 1, 1) + self.main_layout.addWidget(self.sidebar, 1, 0, 1, 1) + + self.main_layout.addWidget(self.tabs, 0, 1, 2, 2) + self._init_tables() + self._init_plots() + self._init_terminal() + + def _init_tabs(self): + self.tabs = QTabWidget() + self.tab1 = QWidget() + self.tab2 = QWidget() + self.tab3 = QWidget() + self.tab4 = QWidget() + + self.tab1_layout = QGridLayout() + self.tab2_layout = QGridLayout() + self.tab3_layout = QGridLayout() + self.tab4_layout = QGridLayout() + + self.tab1.setLayout(self.tab1_layout) + self.tab2.setLayout(self.tab2_layout) + self.tab3.setLayout(self.tab3_layout) + self.tab4.setLayout(self.tab4_layout) + + self.tabs.addTab(self.tab1, "File") + self.tabs.addTab(self.tab2, "Waterfall") + self.tabs.addTab(self.tab3, "Calibrated Spectrum") + self.tabs.addTab(self.tab4, "Console") + + def _init_sidebar(self): + self.sidebar = CollapsibleSideBar() + self.sidebar.add_box(title="my box 1", contentWidget=QLabel("content 1")) + self.sidebar.add_box(title="my box 2", contentWidget=QLabel("content 2")) + + def _init_toggle_btn(self): + self.toggle_btn = QPushButton("Dock") + self.toggle_btn.clicked.connect(self.toggle_hidden) + + def toggle_hidden(self): + if self.sidebar.isHidden() == True: + self.sidebar.setHidden(False) + else: + self.sidebar.setHidden(True) + + def _init_tables(self): + """Creates tables of FITS information""" + # [TODO] Add selection logic for if len(bintable) > 1 + # [TODO] Do this in a QThread so the main screen can be created + self.hdr0_tbl = FITSHeaderTable() + self.hdr0_tbl.load(self.sdfits.primaryheader()) + self.hdr1_tbl = FITSHeaderTable() + self.hdr1_tbl.load(self.sdfits.binheader()[0]) + self.tab1_layout.addWidget(self.hdr0_tbl, 0, 0, 1, 1) + self.tab1_layout.addWidget(self.hdr1_tbl, 0, 1, 1, 1) + + def _init_plots(self): + """Creates the plot canvases""" + # [TODO] Do this in a QThread so the main screen can be created + self.spec_plot = SingleSpectrum(self.fdata) + # print(f"NINTEGRATIONS: {self.fdata.nintegrations(1)}") + # self.waterfall = WaterfallSpectrum(self.fdata) + self.tab3_layout.addWidget(self.spec_plot, 0, 0, 1, 2) + + def _init_plot_sidebar(self): + pass + + def _init_terminal(self): + self.terminal = QIPythonConsoleWidget() + self.tab4_layout.addWidget(self.terminal, 0, 0, 1, 1) + + def _clear_all(self): + while self.main_layout.count(): + child = self.main_layout.takeAt(0) + if child.widget(): + child.widget().deleteLater() + + def closeEvent(self, *args, **kwargs): + super(QMainWindow, self).closeEvent(*args, **kwargs) + self.terminal.stop() + FriendlyMessages.goodbye() + + +class App(QApplication): + def __init__(self, *args): + QApplication.__init__(self, *args) + self.main = DyshMainWindow() + self.main.show() + + +def main(args): + # global app + app = App(args) + apply_stylesheet(app, theme="dark_purple.xml") + app.exec_() + + +if __name__ == "__main__": + main(sys.argv) diff --git a/gui/static/favicon.ico b/gui/static/favicon.ico new file mode 100644 index 00000000..2ca452fe Binary files /dev/null and b/gui/static/favicon.ico differ diff --git a/gui/tests/.gitkeep b/gui/tests/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/gui/util/__init__.py b/gui/util/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/gui/util/core.py b/gui/util/core.py new file mode 100644 index 00000000..31585a73 --- /dev/null +++ b/gui/util/core.py @@ -0,0 +1,31 @@ +import os +import sys +from threading import Thread + +from PyQt5.QtCore import QObject, QThread, pyqtSignal + + +class ThreadCallbacks: + def progress(future): + pass + # print('.', end='', flush=True) + + +class DyshWorker(QObject, Thread): + """Thread to run a function that returns a value""" + + def __init__(self, group=None, target=None, name=None, args=(), kwargs={}): + self.finished = pyqtSignal() + self.progress = pyqtSignal(int) + QObject.__init__(self) + Thread.__init__(self, group=group, target=target, name=name, args=args, kwargs=kwargs) + self._return = None + # print(self.__dir__()) + + def run(self): + if self._target is not None: + self._return = self._target(*self._args, **self._kwargs) + + def join(self): + Thread.join(self) + return self._return diff --git a/gui/util/dataload.py b/gui/util/dataload.py new file mode 100644 index 00000000..cd962c8a --- /dev/null +++ b/gui/util/dataload.py @@ -0,0 +1,158 @@ +import argparse +import os +import sys +from time import time + +import numpy as np +import pandas as pd +from PyQt5.QtCore import * + +# from PyQt5.QtCore import QRunnable, QObject, Qt, QThreadPool, pyqtSignal, pyqtSlot, QMutex +from PyQt5.QtGui import * +from PyQt5.QtWidgets import * + +# sys.path.insert(0, os.path.abspath('../src')) +from dysh.fits.sdfitsload import SDFITSLoad + + +class FITSFileDialog(QFileDialog): + """File dialog to select a FITS file""" + + def __init__(self): + """Initializes the file dialog widget""" + super().__init__() + self.get_fpaths() + self.config() + self.getOpenFileName(caption="Open an SDFITS file") + + def config(self): + self.setDirectory(self.f_root) + # self.setFilter(self.f_filter) + + def get_fpaths(self): + # [TODO] Have this check what system you're on and build the QFileSystemModel + # [TODO] Add the FITS file filter + self.f_root = QDir("/home/dysh/example_data/") + # self.f_filter = "FITS Files (*.fits)" + # self.f_model = QFileSystemModel() + # self.f_model.setRootPath("/home/dysh/example_data/") + + def get_selection(self): + # [TODO] Closing the file dialog without selecting anything produces an error. Fix that + fdialog = QFileDialog(self, "Select an SDFITS File", froot, filter_sdfits) + if fdialog.exec_() == QDialog.Accepted: + fpath = fdialog.selectedFiles()[0] + + +class DataLoader(QObject): + finished = pyqtSignal() + my_data = pyqtSignal() + my_hdr = () + + def load_data(self, fpath): + print("RUNNABLE: LOADING DATA") + t0 = time() + p0, a0 = self.get_memory_usage() + + parser = argparse.ArgumentParser(prog="revisedstructure") + parser.add_argument("--wcs", action="store_true", help="create WCS for each Spectrum1D") + parser.add_argument("--fix", action="store_true", help="fix warnings about keywords in WCS creation") + parser.add_argument("--profile", "-p", action="store_true", help="run the profiler") + parser.add_argument("--baseline", "-b", action="store_true", help="remove baselines") + parser.add_argument( + "--maxload", "-m", action="store", help="maximum number of spectra to load (create obsblocks)", default=1e16 + ) + args = parser.parse_args() + + data_sl = SDFITSLoad(fpath) + for h in range(1, len(data_sl._hdu)): + data_sl._loadlists(fix=args.fix, wcs=args.wcs, hdu=int(h), maxspect=float(args.maxload)) + + t1 = time() + p1, a1 = self.get_memory_usage() + + for o in data_sl._obsblock: + data = [] + for oi in range(len(o)): + data.append(o[oi].spectral_axis.data) + # self.data = np.asarray(data).T + # print(f'NEW DATA: {np.shape(self.data)}, {type(self.data)}') + print(f"SHAPE: {np.shape(o)}, {np.shape(o[0].spectral_axis)}") + print(o) + print(f"It took {t1-t0:.2f} seconds to load the file with the SDFITS Loader.") + print(f"This process added {p1-p0:.2f} MiB of RAM usage.\n") + + def load_header(self, fdata): + print("RUNNABLE: LOADING HEADER") + # [TODO] These are only loading one value, but they may change for each scan + all_hks = [] + self.fhdr = {} + self.tfhdr = {} + # self.fhdr_d = {} + for hk in self.fdata[1].header: + hkv = str(fd[1].header[hk]) + all_hks.append(hkv) + + n_tfield = int(fdata[1].header["TFIELDS"]) + n_points = int(fdata[1].header["NAXIS1"]) + + # PLACES TO SAVE THE INFO + self.h_fits = {} + self.h_data_info = {} + h_data_all = {} + + # GET INFO ON THE WHOLE FILE + h_fits_info = [ + "XTENSION", + "BITPIX", + "NAXIS", + "NAXIS1", + "NAXIS2", + "PCOUNT", + "GCOUNT", + "TFIELDS", + "TELESCOP", + "BACKEND", + "SITELONG", + "SITELAT", + "SITEELEV", + "EXTNAME", + ] + for hfi in h_fits_info: + try: + self.h_fits[hfi] = fd[1].header[hfi] + # print(f'Header key {hfi} has value {fd[1].header[hfi]}') + # all_hks.remove(hfi) + except: + print(f"Issue with header {hfi}") + + # GET SCAN-SPECIFIC INFORMATION + for i in range(n_tfield): + # Use i+1 because the TFIELDs use 1-based counting, while range() uses 0-based + try: + tf0 = fdata[1].header[f"TTYPE{i+1}"] + tf_data = fdata[1].data[tf0] + all_hks.remove(tf0) + except: + tf0 = np.zeros(n_points) + try: + tf1 = fdata[1].header[f"TFORM{i+1}"] + all_hks.remove(tf1) + except: + tf1 = "-" + try: + tf2 = fdata[1].header[f"TUNIT{i+1}"] + all_hks.remove(tf2) + except: + tf2 = "-" + self.h_data_info[tf0] = {"TFORM": tf1, "TUNIT": tf2} + h_data_all[tf0] = list(tf_data) + self.hdr_df = pd.DataFrame(h_data_all) + # print(f'{len(all_hks)} UNUSED HEADERS: {all_hks}') + + def run(self): + print("Loading Data...") + self.load_data() + print("Loading Header...") + self.load_header() + self.signal.finished.emit() diff --git a/gui/widgets/QIPython.py b/gui/widgets/QIPython.py new file mode 100644 index 00000000..0712ed8e --- /dev/null +++ b/gui/widgets/QIPython.py @@ -0,0 +1,52 @@ +import os + +os.environ["QT_API"] = "pyqt5" +import sip + +sip.setapi("QString", 2) +sip.setapi("QVariant", 2) + +from IPython.lib import guisupport +from PyQt5.QtCore import * +from qtconsole.inprocess import QtInProcessKernelManager +from qtconsole.rich_ipython_widget import RichIPythonWidget + + +class QIPythonConsoleWidget(RichIPythonWidget): + """Convenience class for a live IPython console widget. We can replace the standard banner using the customBanner argument""" + + def __init__(self, *args, **kwargs): + super(QIPythonConsoleWidget, self).__init__(*args, **kwargs) + + self.makeBanner() + self.kernel_manager = QtInProcessKernelManager() + self.kernel_manager.start_kernel() + self.kernel_manager.kernel.gui = "qt4" + self.kernel_client = self.kernel_manager.client() + self.kernel_client.start_channels() + + self.exit_requested.connect(self.stop) + + def stop(self): + self.kernel_client.stop_channels() + self.kernel_manager.shutdown_kernel() + guisupport.get_app_qt4().exit() + + def makeBanner(self): + self.banner = "Welcome to the Dysh console!\n" + + def pushVariables(self, variableDict): + """Given a dictionary containing name / value pairs, push those variables to the IPython console widget""" + self.kernel_manager.kernel.shell.push(variableDict) + + def clearTerminal(self): + """Clears the terminal""" + self._control.clear() + + def printText(self, text): + """Prints some plain text to the console""" + self._append_plain_text(text) + + def executeCommand(self, command): + """Execute a command in the frame of the console widget""" + self._execute(command, False) diff --git a/gui/widgets/__init__.py b/gui/widgets/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/gui/widgets/graphs.py b/gui/widgets/graphs.py new file mode 100644 index 00000000..cc6e2e72 --- /dev/null +++ b/gui/widgets/graphs.py @@ -0,0 +1,129 @@ +import getpass +import os +import socket +import sys + +import numpy as np +import psutil +from PyQt5.QtCore import * +from PyQt5.QtGui import * +from PyQt5.QtWidgets import * +from pyqtgraph import GraphicsLayoutWidget, ImageView, InfiniteLine, PlotWidget + + +class SpectrumSelectLine(InfiniteLine): + """Horizontal Line to select spectrum in waterfall""" + + def __init__(self): + super().__init__(pos=0, angle=0, movable=True) + self.config() + + def config(self): + self.setPen(color="r", width=2) + self.setHoverPen(color="r", width=2) + + def connect(self, func): + self.sigDragged.connect(func) + + +class WaterfallSpectrum(ImageView): + """Waterfall Plot""" + + def __init__(self, data): + super().__init__() + self.add_data(data) + self.config() + + def add_data(self, data): + self.setImage(self.data.T) + + def config(self): + # self.setLimits(xMin=0, xMax=self.xlim0, yMin=self.ylim0, yMax=0) + self.setTitle("Waterfall Plot") + self.setLabels(left="Spectrum", bottom="Frequency") + self.setColorMap("viridis") + + def add_hline(self, change_func): + self.hline = SpectrumSelectLine() + self.hline.connect(change_func) + self.addItem(self.hline) + + +class SingleSpectrum(PlotWidget): + """Spectrum Plot""" + + def __init__(self, spectrum, **kwargs): + super().__init__() + self._spectrum = spectrum + self.add_data() + + def config(self): + # [TODO] connect spec_num to the hline value + spec_num = 0 + self.setLabels(left="Intensity", bottom="Frequency") + self.setTitle(f"Spectrum {spec_num}") + + def update_data(self, spectrum): + self.clear() + self._spectrum = spectrum + self.add_data() + + def add_data(self, **kwargs): + # self._plot_kwargs.update(kwargs) + # self.get_kwargs(**kwargs) + + s = self._spectrum + sa = s.spectral_axis + sf = s.flux + + self.plot(sa, sf) + + def get_kwargs(self, **kwargs): + self.lw = self._plot_kwargs["linewidth"] + self.xunit = self._plot_kwargs["xaxis_unit"] + self.yunit = self._plot_kwargs["yaxis_unit"] + + +class MemoryUsage(PlotWidget): + """Memory Usage Plot""" + + def __init__(self): + super().__init__() + self.username = getpass.getuser() + self.hostname = socket.gethostname() + + def _init_mem_use_plot(self): + self.list_mem_time = [] + self.list_pmem_val = [] + self.list_amem_val = [] + self.plotWidget_mem = pg.GraphicsLayoutWidget() + self.ax_mem = self.plotWidget_mem.addPlot(title=f"RAM Usage ({self.username}@{self.hostname})", row=1, col=0) + self.ax_mem.setLabels(left="Resident Set Size (MiB)", bottom="Time") + # self.ax_mem.setLogMode(False, True) + + self.timer_mem = QTimer() + self.timer_mem.timeout.connect(self.update_mem_use_plot) + self.timer_mem.start(1000) + + def update_mem_use_plot(self): + # [TODO] Figure out what to do if this is left open for a long time. The memory lists will get huge. + pmem, amem = self.get_memory_usage() + self.list_pmem_val.append(pmem) + self.list_amem_val.append(amem) + self.list_mem_time.append(time()) + self.ax_mem.clear() + # self.ax_mem.addLegend() + self.hline_fsize = pg.InfiniteLine( + pos=self.fsize, angle=0, movable=False, label="SDFITS File Size", labelOpts={"color": "r"} + ) + self.hline_fsize.setPen(color="r", width=1) + self.ax_mem.addItem(self.hline_fsize, name="SDFITS File Size") + # self.ax_mem.plot(self.list_mem_time, self.list_amem_val, name=f"Available RAM on {self.hostname}") + self.ax_mem.plot(self.list_mem_time, self.list_pmem_val, name="Current RAM Usage") + + def get_memory_usage(self): + # [TODO] Verify that this is not underestimating usage bc of caches + process = psutil.Process(os.getpid()) + pmem = process.memory_info().rss / 1048576 # Bytes to MiB + amem = psutil.virtual_memory().available / 1048576 # Bytes to MiB + return pmem, amem diff --git a/gui/widgets/layouts.py b/gui/widgets/layouts.py new file mode 100644 index 00000000..0f55cdb1 --- /dev/null +++ b/gui/widgets/layouts.py @@ -0,0 +1,76 @@ +from PyQt5.QtCore import * +from PyQt5.QtGui import * +from PyQt5.QtWidgets import * + + +class CollapsibleBox(QWidget): + def __init__(self, title="", parent=None): + super(CollapsibleBox, self).__init__(parent) + + self.toggle_button = QToolButton(text=title, checkable=True, checked=False) + self.toggle_button.setStyleSheet("QToolButton { border: none; }") + self.toggle_button.setToolButtonStyle(Qt.ToolButtonTextBesideIcon) + self.toggle_button.setArrowType(Qt.RightArrow) + self.toggle_button.pressed.connect(self.on_pressed) + + self.toggle_animation = QParallelAnimationGroup(self) + + self.content_area = QScrollArea(maximumHeight=0, minimumHeight=0) + self.content_area.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed) + self.content_area.setFrameShape(QFrame.NoFrame) + + lay = QVBoxLayout(self) + lay.setSpacing(0) + lay.setContentsMargins(0, 0, 0, 0) + lay.addWidget(self.toggle_button) + lay.addWidget(self.content_area) + + self.toggle_animation.addAnimation(QPropertyAnimation(self, b"minimumHeight")) + self.toggle_animation.addAnimation(QPropertyAnimation(self, b"maximumHeight")) + self.toggle_animation.addAnimation(QPropertyAnimation(self.content_area, b"maximumHeight")) + + @pyqtSlot() + def on_pressed(self): + checked = self.toggle_button.isChecked() + self.toggle_button.setArrowType(Qt.DownArrow if not checked else Qt.RightArrow) + self.toggle_animation.setDirection(QAbstractAnimation.Forward if not checked else QAbstractAnimation.Backward) + self.toggle_animation.start() + + def setContentLayout(self, layout): + lay = self.content_area.layout() + del lay + self.content_area.setLayout(layout) + collapsed_height = self.sizeHint().height() - self.content_area.maximumHeight() + content_height = layout.sizeHint().height() + for i in range(self.toggle_animation.animationCount()): + animation = self.toggle_animation.animationAt(i) + animation.setDuration(500) + animation.setStartValue(collapsed_height) + animation.setEndValue(collapsed_height + content_height) + + content_animation = self.toggle_animation.animationAt(self.toggle_animation.animationCount() - 1) + content_animation.setDuration(500) + content_animation.setStartValue(0) + content_animation.setEndValue(content_height) + + +class CollapsibleSideBar(QDockWidget): + def __init__(self): + super().__init__() + self._init_UI() + + def _init_UI(self): + self.scroll = QScrollArea() + self.setWidget(self.scroll) + self.content = QWidget() + self.scroll.setWidget(self.content) + self.scroll.setWidgetResizable(True) + self.vlay = QVBoxLayout(self.content) + self.vlay.addStretch() + + def add_box(self, title, contentWidget): + box = CollapsibleBox(title) + lay = QVBoxLayout() + lay.addWidget(contentWidget) + box.setContentLayout(lay) + self.vlay.addWidget(box) diff --git a/gui/widgets/splash.py b/gui/widgets/splash.py new file mode 100644 index 00000000..3963551c --- /dev/null +++ b/gui/widgets/splash.py @@ -0,0 +1,64 @@ +import os +import sys +from pathlib import Path + +from PyQt5.QtCore import Qt, QThread, pyqtSignal +from PyQt5.QtGui import QMovie +from PyQt5.QtWidgets import QApplication, QMainWindow, QSplashScreen + +GUI_BASE_DIR = Path(__file__).resolve().parent.parent +# https://stackoverflow.com/questions/71627508/pyqt-show-animated-gif-while-other-operations-are-running + + +class Worker(QThread): + progressChanged = pyqtSignal(int) + + def run(self): + for count in range(6): + self.progressChanged.emit(count) + self.sleep(1) + self.progressChanged.emit(-1) + + +class Window(QMainWindow): + def __init__(self): + super().__init__() + + +class SplashScreen(QSplashScreen): + def __init__(self, flags=0): + super().__init__(flags=Qt.WindowFlags(flags)) + self.load_gif_dir = os.path.join(GUI_BASE_DIR, "static/img/loading.gif") + self.movie = QMovie(self.load_gif_dir, parent=self) + self.movie.frameChanged.connect(self.handleFrameChange) + self.movie.start() + + def updateProgress(self, count=0): + if count == 0: + message = "Starting..." + elif count > 0: + message = f"Processing... {count}" + else: + message = "Finished!" + self.showMessage(message, Qt.AlignHCenter | Qt.AlignBottom, Qt.white) + + def handleFrameChange(self): + pixmap = self.movie.currentPixmap() + self.setPixmap(pixmap) + self.setMask(pixmap.mask()) + + +if __name__ == "__main__": + print("Splash screen!") + dir_path = os.path.dirname(os.path.realpath(__file__)) + + app = QApplication(sys.argv) + window = Window() + + splash = SplashScreen(Qt.WindowStaysOnTopHint) + worker = Worker() + worker.progressChanged.connect(splash.updateProgress) + worker.finished.connect(lambda: (splash.finish(window), window.show())) + splash.show() + worker.start() + app.exec_() diff --git a/gui/widgets/tables.py b/gui/widgets/tables.py new file mode 100644 index 00000000..c6819650 --- /dev/null +++ b/gui/widgets/tables.py @@ -0,0 +1,88 @@ +import getpass +import os +import socket +import sys + +import psutil +from PyQt5.QtCore import * +from PyQt5.QtGui import * +from PyQt5.QtWidgets import * +from pyqtgraph import GraphicsLayoutWidget, ImageItem + + +class FITSHeaderTable(QWidget): + """Table of FITS Header information""" + + def __init__(self): + """Initializes the table widget""" + super().__init__() + self.make_layout() + + def make_layout(self): + self.title = QLabel("FITS Header") + self.tbl = QTableWidget() + self.tbl_layout = QVBoxLayout() + self.setLayout(self.tbl_layout) + self.tbl_layout.addWidget(self.title) + self.tbl_layout.addWidget(self.tbl) + + def load(self, data): + """ + Gets the keys + + Parameters + ---------- + data : dict + A dictionary of the FITS header + + """ + ks = [k for k in data.keys()] + + self.tbl.setRowCount(len(ks)) + self.tbl.setColumnCount(2) + self.tbl.setHorizontalHeaderLabels(["Header Key", "Header Value"]) + + for i, ki in enumerate(ks): + self.tbl.setItem(i, 0, QTableWidgetItem(str(ki))) + self.tbl.setItem(i, 1, QTableWidgetItem(str(data[ki]))) + + +class FITSDataTable(QTableWidget): + """Table of FITS Header information""" + + def __init__(self): + """Initializes the table widget""" + super().__init__() + + def make_layout(self): + self.title = QLabel("FITS Data") + self.tbl = QTableWidget() + self.tbl_layout = QVBoxLayout() + self.setLayout(self.tbl_layout) + self.tbl_layout.addWidget(self.title) + self.tbl_layout.addWidget(self.tbl) + + def get_keys(self, data): + """ + Gets the keys + + Parameters + ---------- + data : dict + A dictionary of the FITS column names + + """ + ks = data.keys() + + self.tbl.setRowCount(len(ks)) + self.tbl.setColumnCount(4) + self.tbl.setHorizontalHeaderLabels(["Header Key", "Value", "Unit", "TFORM"]) + + for i, ki in enumerate(ks): + try: + self.tbl.setItem(i, 0, QTableWidgetItem(str(ki))) + self.tbl.setItem(i, 1, QTableWidgetItem(str(self.hdr_df[ki][0]))) + self.tbl.setItem(i, 2, QTableWidgetItem(str(self.h_data_info[ki]["TUNIT"]))) + self.tbl.setItem(i, 3, QTableWidgetItem(str(self.h_data_info[ki]["TFORM"]))) + except: + print(f"Issue encountered for {ki} (data)") diff --git a/pyproject.toml b/pyproject.toml index 25b5ee35..858c164f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,33 +24,30 @@ classifiers = [ ] dependencies = [ "astropy", - "ipython", "matplotlib", "numpy", "pandas", "scipy", "specutils", - "wget", + "sphinx", + "myst-parser", + "sphinx-inline-tabs", + "ipython", + "wget" ] [project.optional-dependencies] dev = [ - "coverage[toml]", "ipdb", - "numpydoc", "pytest", "pytest-cov", + "myst-parser", "sphinx", "sphinx-autobuild", - "sphinx-inline-tabs", "sphinx-rtd-theme", "sphinxcontrib-mermaid", + "numpydoc" ] -nb = [ - "jupyter", - "jupyterlab", -] -all = ["dysh[dev,nb]"] [project.urls] Documentation = "https://github.com/GreenBankObservatory/dysh#readme" @@ -82,7 +79,7 @@ docs-build = "sphinx-build {root}/docs/source {root}/docs/build -b html {args}" # run via: $ hatch run test: