diff --git a/.flake8 b/.flake8
index e660df602..972c6429e 100644
--- a/.flake8
+++ b/.flake8
@@ -11,9 +11,12 @@ in-place = true
recursive = true
aggressive = 2
exclude =
- agents/meinberg_m1000/mibs/MBG-SNMP-LTNG-MIB.py,
- agents/meinberg_m1000/mibs/SNMPv2-MIB.py,
- agents/meinberg_m1000/mibs/MBG-SNMP-ROOT-MIB.py,
- agents/ibootbar/mibs/IBOOTPDU-MIB.py,
+ socs/mibs/MBG-SNMP-LTNG-MIB.py,
+ socs/mibs/SNMPv2-MIB.py,
+ socs/mibs/MBG-SNMP-ROOT-MIB.py,
+ socs/mibs/IBOOTPDU-MIB.py,
versioneer.py,
docs/conf.py,
+per-file-ignores =
+ # Windows only dependency
+ socs/agents/thorlabs_mc2000b/agent.py:F405
diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index 016910988..dcca97f4f 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -75,14 +75,14 @@ jobs:
run: |
python3 -m pip install dist/socs*.whl
- - name: install testing requirements
+ - name: install requirements for testing
run: |
- pip3 install -r requirements/testing.txt
+ pip3 install -r requirements.txt
- name: Run unit tests
working-directory: ./tests
run: |
- python3 -m pytest -m 'not (integtest or spt3g)'
+ python3 -m pytest -m 'not integtest'
- name: upload to PyPI
run: |
diff --git a/.github/workflows/develop.yml b/.github/workflows/develop.yml
index 81a46f4ee..33962d5b7 100644
--- a/.github/workflows/develop.yml
+++ b/.github/workflows/develop.yml
@@ -79,9 +79,9 @@ jobs:
run: |
python3 -m pip install dist/socs*.whl
- - name: install testing requirements
+ - name: install requirements for testing
run: |
- pip3 install -r requirements/testing.txt
+ pip3 install -r requirements.txt
- name: Run unit tests
working-directory: ./tests
diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml
index 21e78a5c9..2d4a7f095 100644
--- a/.github/workflows/pytest.yml
+++ b/.github/workflows/pytest.yml
@@ -17,7 +17,7 @@ jobs:
steps:
- name: Cancel Previous Runs
- uses: styfle/cancel-workflow-action@0.10.0
+ uses: styfle/cancel-workflow-action@0.11.0
with:
access_token: ${{ github.token }}
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index f04007192..5e9a9289b 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -6,8 +6,13 @@ repos:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
+- repo: https://github.com/pycqa/isort
+ rev: 5.10.1
+ hooks:
+ - id: isort
+ name: isort (python)
- repo: https://github.com/pre-commit/mirrors-autopep8
- rev: v1.7.0
+ rev: v2.0.0
hooks:
- id: autopep8
- repo: https://github.com/pycqa/flake8
diff --git a/Dockerfile b/Dockerfile
index c8b1fc0b3..cd2665827 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -2,13 +2,52 @@
# A container setup with an installation of socs.
# Use the ocs image as a base
-FROM simonsobs/ocs:v0.9.3
+FROM simonsobs/ocs:v0.10.0
-# Copy the current directory contents into the container at /app
-COPY . /app/socs/
+# Set up the cryo/smurf user and group so this can run on smurf-servers
+# See link for how all other smurf-containers are set up:
+# https://github.com/slaclab/smurf-base-docker/blob/master/Dockerfile
+RUN useradd -d /home/cryo -M cryo -u 1000 && \
+ groupadd smurf -g 1001 && \
+ usermod -aG smurf cryo && \
+ usermod -g smurf cryo && \
+ mkdir /home/cryo && \
+ chown cryo:smurf /home/cryo
+
+# Install packages
+# suprsync agent - rsync
+# labjack agent - wget, python3-pip, libusb-1.0-0-dev, udev
+RUN apt-get update && apt-get install -y rsync \
+ wget \
+ python3-pip \
+ libusb-1.0-0-dev \
+ udev
+# Install labjack ljm module
+# Copied from the labjack ljm dockerfile:
+# https://hub.docker.com/r/labjack/ljm/dockerfile
+WORKDIR /app/labjack/
+RUN wget https://cdn.docsie.io/file/workspace_u4AEu22YJT50zKF8J/doc_VDWGWsJAhd453cYSI/boo_9BFzMKFachlhscG9Z/file_NNCdkmsmvPHtgkHk8/labjack_ljm_software_2020_03_30_x86_64_betatar.gz -O labjack_ljm_software_2020_03_30_x86_64_beta.tar.gz
+RUN tar zxf ./labjack_ljm_software_2020_03_30_x86_64_beta.tar.gz
+RUN ./labjack_ljm_software_2020_03_30_x86_64/labjack_ljm_installer.run -- --no-restart-device-rules
+
+# Copy in and install requirements
+COPY requirements/ /app/socs/requirements
+COPY requirements.txt /app/socs/requirements.txt
WORKDIR /app/socs/
+RUN pip3 install -r requirements.txt
+
+# Copy the current directory contents into the container at /app
+COPY . /app/socs/
# Install socs
-RUN pip3 install -r requirements.txt && \
- pip3 install .
+RUN pip3 install .
+
+# Reset workdir to avoid local imports
+WORKDIR /
+
+# Port for HWP Encoder Beaglebone connection
+EXPOSE 8080/udp
+
+# Run agent on container startup
+ENTRYPOINT ["dumb-init", "ocs-agent-cli"]
diff --git a/README.rst b/README.rst
index 6a3f9853d..198e3e670 100644
--- a/README.rst
+++ b/README.rst
@@ -41,9 +41,19 @@ Install and update with pip::
$ pip3 install -U socs
-If you need to install the optional so3g module you can do so via::
+You may install optional dependencies by including one or more agent group
+names on installation, for example::
- $ pip3 install -U socs[so3g]
+ $ pip3 install -U socs[labjack,pysmurf]
+
+For a complete list of agent groups see the `Installation Documentation`_.
+
+If you would like to install all optional dependencies use the special varient
+"all"::
+
+ $ pip3 install -U socs[all]
+
+.. _`Installation Documentation`: https://socs.readthedocs.io/en/develop/user/installation.html
Installing from Source
``````````````````````
diff --git a/agents/bluefors/Dockerfile b/agents/bluefors/Dockerfile
deleted file mode 100644
index c8867e219..000000000
--- a/agents/bluefors/Dockerfile
+++ /dev/null
@@ -1,19 +0,0 @@
-# SOCS Bluefors Agent
-# socs Agent container for running the Bluefors log tracking Agent.
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to proper agent directory
-WORKDIR /app/socs/agents/bluefors/
-
-## Copy this agent into the app/agents directory
-COPY . .
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "bluefors_log_tracker.py"]
-
-# Sensible default instance-id
-CMD ["--instance-id=bluefors", \
- "--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/chwp/Dockerfile b/agents/chwp/Dockerfile
deleted file mode 100644
index ef1312a4e..000000000
--- a/agents/chwp/Dockerfile
+++ /dev/null
@@ -1,19 +0,0 @@
-# CHWP Agent
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/chwp/
-
-# Copy this agent into the app/agents directory
-COPY . .
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "hwpbbb_agent.py"]
-
-# Sensible default arguments
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
-
-EXPOSE 8080/udp
diff --git a/agents/cryomech_cpa/CPA_digital_panel_user_manual.pdf b/agents/cryomech_cpa/CPA_digital_panel_user_manual.pdf
deleted file mode 100644
index 13c81ffa5..000000000
Binary files a/agents/cryomech_cpa/CPA_digital_panel_user_manual.pdf and /dev/null differ
diff --git a/agents/cryomech_cpa/Dockerfile b/agents/cryomech_cpa/Dockerfile
deleted file mode 100644
index e7b5c3a79..000000000
--- a/agents/cryomech_cpa/Dockerfile
+++ /dev/null
@@ -1,21 +0,0 @@
-# SOCS PTC Agent
-
-# SOCS Agent container for interacting with PTC over ethernet
-# Use socs base image
-
-FROM socs:latest
-
-# Set the working directory to registry directory
-
-WORKDIR /app/socs/agents/cryomech-cpa/
-
-# Copy this agent into the app/socs/agents directory
-
-COPY . /app/socs/agents/cryomech-cpa/
-
-# Run agent on container startup
-
-ENTRYPOINT ["dumb-init", "python3", "-u", "cryomech_cpa_agent.py"]
-
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/cryomech_cpa/PT420RM_CPA1114_manual.pdf b/agents/cryomech_cpa/PT420RM_CPA1114_manual.pdf
deleted file mode 100644
index 774c5c120..000000000
Binary files a/agents/cryomech_cpa/PT420RM_CPA1114_manual.pdf and /dev/null differ
diff --git a/agents/hwp_rotation/Dockerfile b/agents/hwp_rotation/Dockerfile
deleted file mode 100644
index 0cf8308f4..000000000
--- a/agents/hwp_rotation/Dockerfile
+++ /dev/null
@@ -1,16 +0,0 @@
-# SOCS Agent container for controlling the HWP rotation speed and direction
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/hwp_rotation/
-
-## Copy this agent into the app/agents directory
-COPY . .
-
-# Run agent on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "rotation_agent.py"]
-
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/ibootbar/Dockerfile b/agents/ibootbar/Dockerfile
deleted file mode 100644
index d383635d4..000000000
--- a/agents/ibootbar/Dockerfile
+++ /dev/null
@@ -1,24 +0,0 @@
-# SOCS ibootbar Agent
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory
-WORKDIR /app/socs/agents/ibootbar/
-
-# Copy in the Agent
-COPY ibootbar.py .
-
-# Avoid build error from successive COPY's
-# https://stackoverflow.com/a/62409523
-RUN true
-
-# Copy in the MIBS
-COPY mibs/ /root/.pysnmp/mibs/
-
-# Run agent on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "ibootbar.py"]
-
-# Default site-hub
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/labjack/Dockerfile b/agents/labjack/Dockerfile
deleted file mode 100644
index 8ccff8d76..000000000
--- a/agents/labjack/Dockerfile
+++ /dev/null
@@ -1,30 +0,0 @@
-# SOCS LabJack Agent
-# socs Agent container for running the LabJack Agent.
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to proper agent directory
-WORKDIR /app/socs/agents/labjack/
-
-# Install ljm module from labjack, copied from the labjack ljm dockerfile
-# located at https://hub.docker.com/r/labjack/ljm/dockerfile
-RUN apt-get update
-RUN apt-get install -y wget python3-pip libusb-1.0-0-dev udev
-
-RUN wget https://labjack.com/sites/default/files/software/labjack_ljm_minimal_2020_03_30_x86_64_beta.tar.gz
-RUN tar zxf ./labjack_ljm_minimal_2020_03_30_x86_64_beta.tar.gz
-RUN ./labjack_ljm_minimal_2020_03_30_x86_64/labjack_ljm_installer.run -- --no-restart-device-rules
-
-RUN pip3 install --no-cache-dir https://labjack.com/sites/default/files/software/Python_LJM_2019_04_03.zip
-
-## Copy this agent into the app/agents directory
-COPY . .
-RUN pip3 install -r requirements.txt
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "labjack_agent.py"]
-
-# Sensible default arguments
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/labjack/requirements.txt b/agents/labjack/requirements.txt
deleted file mode 100644
index 6b419d67a..000000000
--- a/agents/labjack/requirements.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-numexpr
-scipy
-labjack-ljm
diff --git a/agents/lakeshore240/Dockerfile b/agents/lakeshore240/Dockerfile
deleted file mode 100644
index 0a3e145e6..000000000
--- a/agents/lakeshore240/Dockerfile
+++ /dev/null
@@ -1,17 +0,0 @@
-# SOCS Lakeshore 240 Agent
-# socs Agent container for interacting with a Lakeshore 240.
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/lakeshore240/
-
-# Copy this agent into the app/agents directory
-COPY . .
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "LS240_agent.py"]
-
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/lakeshore336/Dockerfile b/agents/lakeshore336/Dockerfile
deleted file mode 100644
index 8bdb809ac..000000000
--- a/agents/lakeshore336/Dockerfile
+++ /dev/null
@@ -1,18 +0,0 @@
-# SOCS Lakeshore 336 Agent
-# socs Agent container for interacting with a Lakeshore 336.
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/lakeshore336/
-
-# Copy this agent into the app/agents directory
-COPY . .
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "LS336_agent.py"]
-
-# Sensible default arguments
-CMD ["--site-hub=ws://sisock-crossbar:8001/ws", \
- "--site-http=http://sisock-crossbar:8001/call"]
diff --git a/agents/lakeshore370/Dockerfile b/agents/lakeshore370/Dockerfile
deleted file mode 100644
index abfc3397d..000000000
--- a/agents/lakeshore370/Dockerfile
+++ /dev/null
@@ -1,18 +0,0 @@
-# SOCS Lakeshore 370 Agent
-# socs Agent container for interacting with a Lakeshore 370.
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/lakeshore370/
-
-# Copy this agent into the app/agents directory
-COPY . .
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "LS370_agent.py"]
-
-# Sensible default arguments
-CMD ["--site-hub=ws://sisock-crossbar:8001/ws", \
- "--site-http=http://sisock-crossbar:8001/call"]
diff --git a/agents/lakeshore372/Dockerfile b/agents/lakeshore372/Dockerfile
deleted file mode 100644
index 0661962e9..000000000
--- a/agents/lakeshore372/Dockerfile
+++ /dev/null
@@ -1,18 +0,0 @@
-# SOCS Lakeshore 372 Agent
-# socs Agent container for interacting with a Lakeshore 372.
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/lakeshore372/
-
-# Copy this agent into the app/agents directory
-COPY . .
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "LS372_agent.py"]
-
-# Sensible default arguments
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/lakeshore425/Dockerfile b/agents/lakeshore425/Dockerfile
deleted file mode 100644
index 454dcc516..000000000
--- a/agents/lakeshore425/Dockerfile
+++ /dev/null
@@ -1,17 +0,0 @@
-# SOCS Lakeshore 425 Agent
-# socs Agent container for interacting with a Lakeshore 425.
-
-# Use socs base image
-FROM simonsobs/socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/lakeshore425/
-
-# Copy this agent into the app/agents directory
-COPY . .
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "LS425_agent.py"]
-
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/magpie/Dockerfile b/agents/magpie/Dockerfile
deleted file mode 100644
index 32c02e02c..000000000
--- a/agents/magpie/Dockerfile
+++ /dev/null
@@ -1,12 +0,0 @@
-# Use socs base image
-FROM socs:latest
-
-RUN pip3 install pandas
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/magpie
-COPY . .
-
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "magpie_agent.py"]
diff --git a/agents/meinberg_m1000/Dockerfile b/agents/meinberg_m1000/Dockerfile
deleted file mode 100644
index d3d744f79..000000000
--- a/agents/meinberg_m1000/Dockerfile
+++ /dev/null
@@ -1,24 +0,0 @@
-# SOCS Meinberg M1000 Agent
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory
-WORKDIR /app/socs/agents/meinberg_m1000/
-
-# Copy in the Agent
-COPY meinberg_m1000_agent.py .
-
-# Avoid build error from successive COPY's
-# https://stackoverflow.com/a/62409523
-RUN true
-
-# Copy in the MIBS
-COPY mibs/ /root/.pysnmp/mibs/
-
-# Run agent on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "meinberg_m1000_agent.py"]
-
-# Default site-hub
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/ocs_plugin_so.py b/agents/ocs_plugin_so.py
deleted file mode 100644
index a994e7b75..000000000
--- a/agents/ocs_plugin_so.py
+++ /dev/null
@@ -1,28 +0,0 @@
-"""
-Register our agents in ocs central. In order for this script to
-be imported by site_config.scan_for_agents(), it must be in the python
-path and called something like ocs_plugin_*.
-"""
-
-import ocs
-import os
-root = os.path.abspath(os.path.split(__file__)[0])
-
-for n, f in [
- ('Lakeshore372Agent', 'lakeshore372/LS372_agent.py'),
- ('Lakeshore370Agent', 'lakeshore370/LS370_agent.py'),
- ('Lakeshore240Agent', 'lakeshore240/LS240_agent.py'),
- ('Keithley2230G-PSU', 'keithley2230G-psu/keithley_agent.py'),
- ('PysmurfController', 'pysmurf_controller/pysmurf_controller.py'),
- ('BlueforsAgent', 'bluefors/bluefors_log_tracker.py'),
- ('CryomechCPAAgent', 'cryomech_cpa/cryomech_cpa_agent.py'),
- ('LATRtXYStageAgent', 'xy_stage/xy_latrt_agent.py'),
- ('ACUAgent', 'acu/acu_agent.py'),
- ('FTSAerotechAgent', 'fts_aerotech_stage/fts_aerotech_agent.py'),
- ('VantagePro2Agent', 'vantagePro2_agent/vantage_pro2_agent.py'),
- ('HWPPicoscopeAgent', 'hwp_picoscope/pico_agent.py'),
- ('FPGAAgent', 'holo_fpga/roach_agent.py'),
- ('SynthAgent', 'holo_synth/synth_agent.py'),
- ('SupRsync', 'suprsync/suprsync.py'),
-]:
- ocs.site_config.register_agent_class(n, os.path.join(root, f))
diff --git a/agents/pfeiffer_tc400/Dockerfile b/agents/pfeiffer_tc400/Dockerfile
deleted file mode 100644
index c8970fccc..000000000
--- a/agents/pfeiffer_tc400/Dockerfile
+++ /dev/null
@@ -1,22 +0,0 @@
-# SOCS Pfeiffer tc400 Agent
-# socs Agent container for interacting with Pfeiffer TC 400 electronic drive unit
-# using a moxa serial to ethernet controller converter.
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/pfeiffer_tc400/
-
-# Copy and install requirements
-COPY requirements.txt .
-RUN pip3 install -r requirements.txt
-
-## Copy this agent into the app/agents directory
-COPY . .
-
-# Run agent on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "pfeiffer_tc400_agent.py"]
-
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/pfeiffer_tc400/requirements.txt b/agents/pfeiffer_tc400/requirements.txt
deleted file mode 100644
index 3b8e8fc39..000000000
--- a/agents/pfeiffer_tc400/requirements.txt
+++ /dev/null
@@ -1 +0,0 @@
-pfeiffer-vacuum-protocol==0.4
diff --git a/agents/pfeiffer_tpg366/Dockerfile b/agents/pfeiffer_tpg366/Dockerfile
deleted file mode 100644
index b474ce75a..000000000
--- a/agents/pfeiffer_tpg366/Dockerfile
+++ /dev/null
@@ -1,17 +0,0 @@
-# SOCS Pfeiffer TPG-366 Agent
-# socs Agent container for interacting with a Pfeiffer TPG-366 pressure gauge reader.
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/pfeiffer_tpg366/
-
-# Copy this agent into the app/agents directory
-COPY . .
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "pfeiffer_tpg366_agent.py"]
-
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/pysmurf_controller/Dockerfile b/agents/pysmurf_controller/Dockerfile
deleted file mode 100644
index 9b3c2f54b..000000000
--- a/agents/pysmurf_controller/Dockerfile
+++ /dev/null
@@ -1,21 +0,0 @@
-FROM simonsobs/sodetlib:v0.4.0
-
-WORKDIR /app
-
-# SOCS installation
-RUN python3 -m pip install --src=/app/ -e git+https://github.com/simonsobs/socs.git@py36#egg=socs
-
-ENV OCS_CONFIG_DIR /config
-
-
-# Run registry on container startup
-WORKDIR /app/socs/agents/pysmurf_controller
-
-COPY . .
-RUN pip3 install -r requirements.txt
-
-ENTRYPOINT ["dumb-init", "python3", "-u", "pysmurf_controller.py"]
-
-# Sensible defaults for setup with sisock
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/pysmurf_controller/requirements.txt b/agents/pysmurf_controller/requirements.txt
deleted file mode 100644
index 1edd89c5b..000000000
--- a/agents/pysmurf_controller/requirements.txt
+++ /dev/null
@@ -1 +0,0 @@
-dumb-init
diff --git a/agents/pysmurf_monitor/Dockerfile b/agents/pysmurf_monitor/Dockerfile
deleted file mode 100644
index aa1a0160b..000000000
--- a/agents/pysmurf_monitor/Dockerfile
+++ /dev/null
@@ -1,30 +0,0 @@
-# SOCS Pysmurf Monitor agent
-# socs Agent for monitoring Pysmurf Publisher, and writing file info to database.
-
-# Use socs base image
-FROM socs:latest
-
-# Sets up the cryo / smurf user and group so this can run on smurf-servers
-# See link for how all other smurf-containers are set up:
-# https://github.com/slaclab/smurf-base-docker/blob/master/Dockerfile
-RUN useradd -d /home/cryo -M cryo -u 1000 && \
- groupadd smurf -g 1001 && \
- usermod -aG smurf cryo && \
- usermod -g smurf cryo && \
- mkdir /home/cryo && \
- chown cryo:smurf /home/cryo
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/pysmurf_monitor/
-
-# Copy this agent into the app/agents directory
-COPY . .
-RUN pip3 install -r requirements.txt
-
-USER cryo:smurf
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "pysmurf_monitor.py"]
-
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/pysmurf_monitor/requirements.txt b/agents/pysmurf_monitor/requirements.txt
deleted file mode 100644
index 09cf856af..000000000
--- a/agents/pysmurf_monitor/requirements.txt
+++ /dev/null
@@ -1 +0,0 @@
-sqlalchemy >= 1.4.0
diff --git a/agents/scpi_psu/Dockerfile b/agents/scpi_psu/Dockerfile
deleted file mode 100644
index 2535e7caf..000000000
--- a/agents/scpi_psu/Dockerfile
+++ /dev/null
@@ -1,17 +0,0 @@
-# SOCS PSU Agent
-# socs Agent container for interacting with PSUs over GPIB to Ethernet
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/scpi_psu/
-
-## Copy this agent into the app/agents directory
-COPY . .
-
-# Run agent on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "scpi_psu_agent.py"]
-
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/smurf_crate_monitor/Dockerfile b/agents/smurf_crate_monitor/Dockerfile
deleted file mode 100644
index 8e696038e..000000000
--- a/agents/smurf_crate_monitor/Dockerfile
+++ /dev/null
@@ -1,17 +0,0 @@
-# OCS SMuRF Crate Agent
-# ocs Agent container for running the smurf crate monitor.
-
-# Use ocs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/smurf_crate_monitor/
-
-COPY . .
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "smurf_crate_monitor.py"]
-
-# Sensible defaults for setup with sisock
-CMD ["--site-hub=ws://sisock-crossbar:8001/ws", \
- "--site-http=http://sisock-crossbar:8001/call"]
diff --git a/agents/smurf_file_emulator/Dockerfile b/agents/smurf_file_emulator/Dockerfile
deleted file mode 100644
index fe9fe4ae4..000000000
--- a/agents/smurf_file_emulator/Dockerfile
+++ /dev/null
@@ -1,17 +0,0 @@
-# SMuRF File Emulator Agent docker
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/smurf_file_emulator/
-
-# Copy this agent into the app/agents directory
-COPY . .
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "smurf_file_emulator.py"]
-
-# Sensible default arguments
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/smurf_stream_simulator/Dockerfile b/agents/smurf_stream_simulator/Dockerfile
deleted file mode 100644
index fd2a75218..000000000
--- a/agents/smurf_stream_simulator/Dockerfile
+++ /dev/null
@@ -1,14 +0,0 @@
-# SOCS smurf stream simulator
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/smurf_stream_simulator/
-COPY . .
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "smurf_stream_simulator.py"]
-
-
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/suprsync/Dockerfile b/agents/suprsync/Dockerfile
deleted file mode 100644
index 03c6b5571..000000000
--- a/agents/suprsync/Dockerfile
+++ /dev/null
@@ -1,26 +0,0 @@
-# SupRsync docker file
-FROM socs:latest
-
-# Sets up the cryo / smurf user and group so this can run on smurf-servers
-# See link for how all other smurf-containers are set up:
-# https://github.com/slaclab/smurf-base-docker/blob/master/Dockerfile
-RUN useradd -d /home/cryo -M cryo -u 1000 && \
- groupadd smurf -g 1001 && \
- usermod -aG smurf cryo && \
- usermod -g smurf cryo && \
- mkdir /home/cryo && \
- chown cryo:smurf /home/cryo
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/suprsync
-# Copy this agent into the app/agents directory
-RUN apt-get update && apt-get install -y rsync
-COPY . .
-RUN pip3 install -r requirements.txt
-
-USER cryo:smurf
-
-ENTRYPOINT ["dumb-init", "python3", "-u", "suprsync.py"]
-
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/suprsync/requirements.txt b/agents/suprsync/requirements.txt
deleted file mode 100644
index 8eda5dcab..000000000
--- a/agents/suprsync/requirements.txt
+++ /dev/null
@@ -1 +0,0 @@
-SQLAlchemy >= 1.4
diff --git a/agents/synacc/Dockerfile b/agents/synacc/Dockerfile
deleted file mode 100644
index cda2a6857..000000000
--- a/agents/synacc/Dockerfile
+++ /dev/null
@@ -1,19 +0,0 @@
-# SOCS Synaccess Agent, used to run the Synaccess power strip
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to proper agent directory
-WORKDIR /app/socs/agents/synacc/
-
-## Copy this agent into the app/agents directory
-COPY . .
-
-RUN pip3 install -r requirements.txt
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "synacc.py"]
-
-# Sensible default arguments
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/synacc/requirements.txt b/agents/synacc/requirements.txt
deleted file mode 100644
index f2293605c..000000000
--- a/agents/synacc/requirements.txt
+++ /dev/null
@@ -1 +0,0 @@
-requests
diff --git a/agents/tektronix3021c/Dockerfile b/agents/tektronix3021c/Dockerfile
deleted file mode 100644
index efd5143cb..000000000
--- a/agents/tektronix3021c/Dockerfile
+++ /dev/null
@@ -1,18 +0,0 @@
-# SOCS AWG Agent
-# socs Agent container for interacting with AWG over GPIB to Ethernet
-# converts.
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/tektronix3021c/
-
-## Copy this agent into the app/agents directory
-COPY . .
-
-# Run agent on container startup
-ENTRYPOINT ["python3", "-u", "tektronix_agent.py"]
-
-CMD ["--site-hub=ws://sisock-crossbar:8001/ws", \
- "--site-http=http://sisock-crossbar:8001/call"]
diff --git a/agents/tektronix3021c/tektronix_agent.py b/agents/tektronix3021c/tektronix_agent.py
deleted file mode 100644
index 3e048c9f3..000000000
--- a/agents/tektronix3021c/tektronix_agent.py
+++ /dev/null
@@ -1,212 +0,0 @@
-"""Michael Randall
-mrandall@ucsd.edu"""
-
-import time
-import os
-import socket
-import argparse
-
-from socs.agent.tektronix3021c_driver import TektronixInterface
-
-on_rtd = os.environ.get('READTHEDOCS') == 'True'
-if not on_rtd:
- from ocs import ocs_agent, site_config
- from ocs.ocs_twisted import TimeoutLock
-
-
-class TektronixAWGAgent:
- """Tektronix3021c Agent.
-
- Args:
- ip_address (string): the IP address of the gpib to ethernet
- controller connected to the function generator.
-
- gpib_slot (int): the gpib address currently set
- on the function generator.
-
- """
-
- def __init__(self, agent, ip_address, gpib_slot):
- self.agent = agent
- self.log = agent.log
- self.lock = TimeoutLock()
-
- self.job = None
-
- self.ip_address = ip_address
- self.gpib_slot = gpib_slot
- self.monitor = False
-
- self.awg = None
- # Registers data feeds
- agg_params = {
- 'frame_length': 60,
- }
- self.agent.register_feed('awg',
- record=True,
- agg_params=agg_params)
-
- def init_awg(self, session, params=None):
- """ Task to connect to Tektronix AWG """
-
- with self.lock.acquire_timeout(0) as acquired:
- if not acquired:
- return False, "Could not acquire lock"
-
- try:
- self.awg = TektronixInterface(self.ip_address, self.gpib_slot)
- self.idn = self.awg.identify()
-
- except socket.timeout as e:
- self.log.error("""Tektronix AWG
- timed out during connect -> {}""".format(e))
- return False, "Timeout"
-
- self.log.info("Connected to AWG: {}".format(self.idn))
-
- return True, 'Initialized AWG.'
-
- def set_frequency(self, session, params=None):
- """
- Sets frequency of function generator:
-
- Args:
- frequency (float): Frequency to set in Hz.
- Must be between 0 and 25,000,000.
- """
-
- with self.lock.acquire_timeout(1) as acquired:
- if acquired:
- freq = params.get("frequency")
-
- try:
- float(freq)
-
- except ValueError as e:
- return False, """Frequency must
- be a float or int -> {}""".format(e)
-
- except TypeError as e:
- return False, """Frequency must
- not be of NoneType -> {}""".format(e)
-
- if 0 < freq < 25E6:
- self.awg.set_freq(freq)
-
- data = {'timestamp': time.time(),
- 'block_name': "AWG_frequency_cmd",
- 'data': {'AWG_frequency_cmd': freq}
- }
- self.agent.publish_to_feed('awg', data)
-
- else:
- return False, """Invalid input:
- Frequency must be between 0 and 25,000,000 Hz"""
-
- else:
- return False, "Could not acquire lock"
-
- return True, 'Set frequency {} Hz'.format(params)
-
- def set_amplitude(self, session, params=None):
- """
- Sets current of power supply:
-
- Args:
- amplitude (float): Peak to Peak voltage to set.
- Must be between 0 and 10.
- """
- with self.lock.acquire_timeout(1) as acquired:
- if acquired:
- amp = params.get('amplitude')
- try:
- float(amp)
-
- except ValueError as e:
- return False, """Amplitude must be
- a float or int -> {}""".format(e)
-
- except TypeError as e:
- return False, """Amplitude must not be
- of NoneType -> {}""".format(e)
-
- if 0 < amp < 10:
- self.awg.set_amp(amp)
-
- data = {'timestamp': time.time(),
- 'block_name': "AWG_amplitude_cmd",
- 'data': {'AWG_amplitude_cmd': amp}
- }
- self.agent.publish_to_feed('awg', data)
-
- else:
- return False, """Amplitude must be
- between 0 and 10 Volts peak to peak"""
-
- else:
- return False, "Could not acquire lock"
-
- return True, 'Set amplitude to {} Vpp'.format(params)
-
- def set_output(self, session, params=None):
- """
- Task to turn channel on or off.
-
- Args:
- state (bool): True for on, False for off.
- """
- with self.lock.acquire_timeout(1) as acquired:
- if acquired:
- state = params.get("state")
-
- try:
- bool(state)
-
- except ValueError as e:
- return False, "State must be a boolean -> {}".format(e)
-
- except TypeError as e:
- return False, """State must not
- be of NoneType -> {}""".format(e)
-
- self.awg.set_output(state)
-
- data = {'timestamp': time.time(),
- 'block_name': "AWG_output_cmd",
- 'data': {'AWG_output_cmd': int(state)}
- }
- self.agent.publish_to_feed('awg', data)
-
- else:
- return False, "Could not acquire lock"
-
- return True, 'Set Output to {}.'.format(params)
-
-
-def make_parser(parser=None):
- if parser is None:
- parser = argparse.ArgumentParser()
-
- pgroup = parser.add_argument_group('Agent Options')
- pgroup.add_argument('--ip-address', type=str,
- help="IP address of tektronix device")
- pgroup.add_argument('--gpib-slot', type=int,
- help="GPIB slot of tektronix device")
- return parser
-
-
-if __name__ == '__main__':
-
- parser = make_parser()
- args = site_config.parse_args(agent_class="Tektronix AWG", parser=parser)
-
- agent, runner = ocs_agent.init_site_agent(args)
-
- p = TektronixAWGAgent(agent, args.ip_address, args.gpib_slot)
-
- agent.register_task('init', p.init_awg, startup=True)
- agent.register_task('set_frequency', p.set_frequency)
- agent.register_task('set_amplitude', p.set_amplitude)
- agent.register_task('set_output', p.set_output)
-
- runner.run(agent, auto_reconnect=True)
diff --git a/agents/vantagePro2_agent/Dockerfile b/agents/vantagePro2_agent/Dockerfile
deleted file mode 100644
index 12c06a9b2..000000000
--- a/agents/vantagePro2_agent/Dockerfile
+++ /dev/null
@@ -1,17 +0,0 @@
-# SOCS Weather Monitor Agent
-# socs Agent container for interacting with the Vantrage Pro2 Weather Monitor.
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/vantage_pro2/
-
-# Copy this agent into the app/agents directory
-COPY . .
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "vantage_pro2_agent.py"]
-
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/wiregrid_encoder/Dockerfile b/agents/wiregrid_encoder/Dockerfile
deleted file mode 100644
index 7d7592549..000000000
--- a/agents/wiregrid_encoder/Dockerfile
+++ /dev/null
@@ -1,17 +0,0 @@
-# WireGrid Encoder Agent
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/wiregrid_encoder/
-
-# Copy this agent into the app/agents directory
-COPY . .
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "wiregrid_encoder.py"]
-
-# Sensible default arguments
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/agents/wiregrid_kikusui/Dockerfile b/agents/wiregrid_kikusui/Dockerfile
deleted file mode 100644
index e665403ef..000000000
--- a/agents/wiregrid_kikusui/Dockerfile
+++ /dev/null
@@ -1,18 +0,0 @@
-# SOCS AWG Agent
-# socs Agent container for interacting with AWG over GPIB to Ethernet
-# converts.
-
-# Use socs base image
-FROM socs:latest
-
-# Set the working directory to registry directory
-WORKDIR /app/socs/agents/wiregrid_kikusui/
-
-## Copy this agent into the app/agents directory
-COPY . .
-
-# Run agent on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "kikusui_agent.py"]
-
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
diff --git a/bin/rename_fields b/bin/rename_fields
index ceeb194ca..968f4af11 100755
--- a/bin/rename_fields
+++ b/bin/rename_fields
@@ -2,19 +2,17 @@
# Rename fields to valid names in recorded .g3 files. Also translate from HK v0
# to HK v1. This is heavily based on the so3g.hk.translator.
-import os
import hashlib
import logging
+import os
import sqlite3
-
from pathlib import Path
-from tqdm import tqdm
import so3g
+from ocs.ocs_feed import Feed
from so3g.hk.translator import HKTranslator
from spt3g import core
-
-from ocs.ocs_feed import Feed
+from tqdm import tqdm
class RenamerRecord:
diff --git a/bin/suprsync b/bin/suprsync
index 1c98b68bb..1e1aa3847 100755
--- a/bin/suprsync
+++ b/bin/suprsync
@@ -2,10 +2,11 @@
"""
Utility script for interacting with the suprsync db.
"""
-from socs.db.suprsync import SupRsyncFilesManager, SupRsyncFile
import argparse
import os
+from socs.db.suprsync import SupRsyncFile, SupRsyncFilesManager
+
def check_func(args):
srfm = SupRsyncFilesManager(args.db, create_all=False)
diff --git a/docker-compose.yml b/docker-compose.yml
index c614e89dd..84d975f65 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,7 +1,7 @@
version: '3.2'
services:
# --------------------------------------------------------------------------
- # The socs library.
+ # SOCS Base Image
# --------------------------------------------------------------------------
socs:
image: "socs"
@@ -10,257 +10,26 @@ services:
# --------------------------------------------------------------------------
# SOCS Agents
# --------------------------------------------------------------------------
-
- # --------------------------------------------------------------------------
- # The Lakeshore 372 Agent
- # --------------------------------------------------------------------------
- ocs-lakeshore372-agent:
- image: "ocs-lakeshore372-agent"
- build: ./agents/lakeshore372/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # The Lakeshore 370 Agent
- # --------------------------------------------------------------------------
- ocs-lakeshore370-agent:
- image: "ocs-lakeshore370-agent"
- build: ./agents/lakeshore370/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # The Lakeshore 240 Agent
- # --------------------------------------------------------------------------
- ocs-lakeshore240-agent:
- image: "ocs-lakeshore240-agent"
- build: ./agents/lakeshore240/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # The Lakeshore 336 Agent
- # --------------------------------------------------------------------------
- ocs-lakeshore336-agent:
- image: "ocs-lakeshore336-agent"
- build: ./agents/lakeshore336/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # The Lakeshore 425 Agent
- # --------------------------------------------------------------------------
- ocs-lakeshore425-agent:
- image: "ocs-lakeshore425-agent"
- build: ./agents/lakeshore425/
-
- # --------------------------------------------------------------------------
- # The Pysmurf Controller Agent
- # --------------------------------------------------------------------------
ocs-pysmurf-agent:
image: "ocs-pysmurf-agent"
- build: ./agents/pysmurf_controller/
-
- # --------------------------------------------------------------------------
- # The Pysmurf Monitor Agent
- # --------------------------------------------------------------------------
- ocs-pysmurf-monitor-agent:
- image: "ocs-pysmurf-monitor-agent"
- build: ./agents/pysmurf_monitor/
- depends_on:
- - "socs"
+ build: ./docker/pysmurf_controller/
- # --------------------------------------------------------------------------
- # Smurf stream simulator agent
- # --------------------------------------------------------------------------
- ocs-smurf-stream-sim-agent:
- image: "ocs-smurf-stream-sim"
- build: ./agents/smurf_stream_simulator/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # The Bluefors log tracking Agent
- # --------------------------------------------------------------------------
- ocs-bluefors-agent:
- image: "ocs-bluefors-agent"
- build: ./agents/bluefors/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # Power Supply control Agent
- # --------------------------------------------------------------------------
- ocs-scpi-psu-agent:
- image: "ocs-scpi-psu-agent"
- build: ./agents/scpi_psu/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # Cryomech CPA compressor log agent
- # --------------------------------------------------------------------------
- ocs-cryomech-cpa-agent:
- image: "ocs-cryomech-cpa-agent"
- build: ./agents/cryomech_cpa/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # LabJack Agent
- # --------------------------------------------------------------------------
- ocs-labjack-agent:
- image: "ocs-labjack-agent"
- build: ./agents/labjack/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # Pfeiffer tpg366 agent
- # --------------------------------------------------------------------------
- ocs-pfeiffer-tpg366-agent:
- image: "ocs-pfeiffer-tpg366-agent"
- build: ./agents/pfeiffer_tpg366/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # Synaccess power strip agent
- # --------------------------------------------------------------------------
- ocs-synaccess-agent:
- image: "ocs-synaccess-agent"
- build: ./agents/synacc/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # Meinberg M1000
- # --------------------------------------------------------------------------
- ocs-meinberg-m1000-agent:
- image: "ocs-meinberg-m1000-agent"
- build: ./agents/meinberg_m1000/
- depends_on:
- - "socs"
-
- # -------------------------------------------------------------------------
- # SMuRF Crate Monitor
- # -------------------------------------------------------------------------
- ocs-smurf-crate-monitor:
- image: "ocs-smurf-crate-monitor"
- build: ./agents/smurf_crate_monitor/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # Tektronix3021c
- # --------------------------------------------------------------------------
- ocs-tektronix3021c-agent:
- image: "ocs-tektronix3021c-agent"
- build: ./agents/tektronix3021c/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # Pfeiffer Turbo Controller
- # --------------------------------------------------------------------------
- ocs-pfeiffer-tc400-agent:
- image: "ocs-pfeiffer-tc400-agent"
- build: ./agents/pfeiffer_tc400/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # CHWP Encoder BBB agent
- # --------------------------------------------------------------------------
- ocs-hwpbbb-agent:
- image: "ocs-hwpbbb-agent"
- build: ./agents/chwp/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # CHWP picoscope agent
- # --------------------------------------------------------------------------
- ocs-hwp-picoscope-agent:
- image: "ocs-hwp-picoscope-agent"
- build: ./agents/hwp_picoscope/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # HWP Rotation Agent
- # --------------------------------------------------------------------------
- ocs-hwp-rotation-agent:
- image: "ocs-hwp-rotation-agent"
- build: ./agents/hwp_rotation/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # Smurf file emulator agent
- # --------------------------------------------------------------------------
- ocs-smurf-file-emulator-agent:
- image: "ocs-smurf-file-emulator-agent"
- build: ./agents/smurf_file_emulator/
-
- ocs-suprsync-agent:
- image: 'ocs-suprsync-agent'
- build: ./agents/suprsync/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # Magpie agent
- # --------------------------------------------------------------------------
- ocs-magpie-agent:
- image: "ocs-magpie-agent"
- build: ./agents/magpie
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # Wiregrid KIKUSUI power supply agent
- # --------------------------------------------------------------------------
- ocs-wgkikusui-agent:
- image: "ocs-wgkikusui-agent"
- build: ./agents/wiregrid_kikusui/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # Wiregrid actuator agent
- # --------------------------------------------------------------------------
ocs-wgactuator-agent:
image: "ocs-wgactuator-agent"
- build: ./agents/wiregrid_actuator/
- depends_on:
- - "socs"
-
- # --------------------------------------------------------------------------
- # Wiregrid encoder agent
- # --------------------------------------------------------------------------
- ocs-wgencoder-agent:
- image: "ocs-wgencoder-agent"
- build: ./agents/wiregrid_encoder/
+ build: ./docker/wiregrid_actuator/
depends_on:
- "socs"
- # --------------------------------------------------------------------------
- # iBootbar agent
- # --------------------------------------------------------------------------
- ocs-ibootbar-agent:
- image: "ocs-ibootbar-agent"
- build: ./agents/ibootbar/
- depends_on:
- - "socs"
+ # Only works with --privileged, will insist users build image themselves
+ # ocs-hwp-picoscope-agent:
+ # image: "ocs-hwp-picoscope-agent"
+ # build: ./docker/hwp_picoscope/
+ # depends_on:
+ # - "socs"
# --------------------------------------------------------------------------
# SOCS Simulators
# --------------------------------------------------------------------------
-
- # --------------------------------------------------------------------------
- # Lakeshore240 Simulator
- # --------------------------------------------------------------------------
ocs-lakeshore240-simulator:
image: "ocs-lakeshore240-simulator"
build: ./simulators/lakeshore240/
diff --git a/agents/hwp_picoscope/Dockerfile b/docker/hwp_picoscope/Dockerfile
similarity index 63%
rename from agents/hwp_picoscope/Dockerfile
rename to docker/hwp_picoscope/Dockerfile
index 9fe0f934b..871fa90ab 100644
--- a/agents/hwp_picoscope/Dockerfile
+++ b/docker/hwp_picoscope/Dockerfile
@@ -1,4 +1,5 @@
-FROM simonsobs/socs
+# hwp_picoscope Agent image
+FROM socs:latest
RUN apt update && apt install -y wget udev libusb-1.0-0
@@ -13,13 +14,5 @@ WORKDIR /make/picosdk-python-wrappers
RUN git switch -c 89003868b5bc52511ee57419f0afbfade25f1882
RUN python3 setup.py install
-WORKDIR /app/socs/agents/picoscope/
-
-# Copy this agent into the app/agents directory
-COPY . .
-
-# Run registry on container startup
-ENTRYPOINT ["dumb-init", "python3", "-u", "pico_agent.py"]
-
-CMD ["--site-hub=ws://crossbar:8001/ws", \
- "--site-http=http://crossbar:8001/call"]
+# Run agent on container startup
+ENTRYPOINT ["dumb-init", "ocs-agent-cli"]
diff --git a/docker/pysmurf_controller/Dockerfile b/docker/pysmurf_controller/Dockerfile
new file mode 100644
index 000000000..8fe678df9
--- /dev/null
+++ b/docker/pysmurf_controller/Dockerfile
@@ -0,0 +1,12 @@
+FROM simonsobs/sodetlib:v0.4.1
+
+WORKDIR /app
+
+ENV OCS_CONFIG_DIR /config
+
+# SOCS installation
+RUN python3 -m pip install --src=/app/ -e git+https://github.com/simonsobs/socs.git@py36#egg=socs
+
+RUN pip3 install dumb-init
+
+ENTRYPOINT ["dumb-init", "ocs-agent-cli"]
diff --git a/agents/wiregrid_actuator/Dockerfile b/docker/wiregrid_actuator/Dockerfile
similarity index 95%
rename from agents/wiregrid_actuator/Dockerfile
rename to docker/wiregrid_actuator/Dockerfile
index 23151c20f..bb6d4770e 100644
--- a/agents/wiregrid_actuator/Dockerfile
+++ b/docker/wiregrid_actuator/Dockerfile
@@ -1,6 +1,6 @@
FROM socs:latest
-WORKDIR /app/socs/agents/wiregrid_actuator
+WORKDIR /home/ocs/
RUN apt-get update \
&& apt-get install -y apt-utils \
diff --git a/agents/wiregrid_actuator/wg-actuator-entrypoint.sh b/docker/wiregrid_actuator/wg-actuator-entrypoint.sh
similarity index 77%
rename from agents/wiregrid_actuator/wg-actuator-entrypoint.sh
rename to docker/wiregrid_actuator/wg-actuator-entrypoint.sh
index 44161896f..656eafdd6 100755
--- a/agents/wiregrid_actuator/wg-actuator-entrypoint.sh
+++ b/docker/wiregrid_actuator/wg-actuator-entrypoint.sh
@@ -5,4 +5,4 @@
/usr/sbin/gcapsd &
# "$@" is all of the arguments on this script: wg-actuator-entrypoint.sh
-dumb-init python3 -u wiregrid_actuator.py "$@"
+dumb-init ocs-agent-cli "$@"
diff --git a/docs/agents/acu_agent.rst b/docs/agents/acu_agent.rst
index a852b3f2a..01a0ee550 100644
--- a/docs/agents/acu_agent.rst
+++ b/docs/agents/acu_agent.rst
@@ -11,9 +11,9 @@ It is used for readout of encoder measurements and control of telescope
platforms.
.. argparse::
- :filename: ../agents/acu/acu_agent.py
+ :filename: ../socs/agents/acu/agent.py
:func: add_agent_args
- :prog: python3 acu_agent.py
+ :prog: python3 agent.py
Dependencies
------------
@@ -85,7 +85,7 @@ example configuration block is below::
Agent API
---------
-.. autoclass:: agents.acu.acu_agent.ACUAgent
+.. autoclass:: socs.agents.acu.agent.ACUAgent
:members:
Example Clients
@@ -109,5 +109,5 @@ acquisition processes are running::
Supporting APIs
---------------
-.. automodule:: agents.acu.scan_helpers
+.. automodule:: socs.agents.acu.drivers
:members:
diff --git a/docs/agents/bluefors_agent.rst b/docs/agents/bluefors_agent.rst
index d008e62bc..39a1ceaac 100644
--- a/docs/agents/bluefors_agent.rst
+++ b/docs/agents/bluefors_agent.rst
@@ -11,9 +11,9 @@ logs and passes them to the live monitor and to the OCS housekeeping data
aggregator.
.. argparse::
- :module: agents.bluefors.bluefors_log_tracker
+ :module: socs.agents.bluefors.agent
:func: make_parser
- :prog: bluefors_log_tracker.py
+ :prog: agent.py
Configuration File Examples
---------------------------
@@ -41,19 +41,18 @@ Docker Compose
Example docker-compose configuration::
ocs-bluefors:
- image: simonsobs/ocs-bluefors-agent:latest
+ image: simonsobs/socs:latest
hostname: ocs-docker
network_mode: "host"
- volumes:
- - ${OCS_CONFIG_DIR}:/config:ro
- - /home/simonsobs/bluefors/logs/:/logs:ro
environment:
- LOGLEVEL: "info"
+ INSTANCE_ID: bluefors
+ LOGLEVEL: info
FRAME_LENGTH: 600
STALE_TIME: 2
- MODE: "follow"
- command:
- - "--instance-id=bluefors"
+ MODE: follow
+ volumes:
+ - ${OCS_CONFIG_DIR}:/config:ro
+ - /home/simonsobs/bluefors/logs/:/logs:ro
Depending on how you are running your containers it might be easier to hard
code the `OCS_CONFIG_DIR` environment variable.
@@ -189,14 +188,14 @@ minutes old. This defaults to two minutes, but can be set with the
Agent API
---------
-.. autoclass:: agents.bluefors.bluefors_log_tracker.BlueforsAgent
+.. autoclass:: socs.agents.bluefors.agent.BlueforsAgent
:members:
Supporting API
--------------
-.. autoclass:: agents.bluefors.bluefors_log_tracker.LogTracker
+.. autoclass:: socs.agents.bluefors.agent.LogTracker
:members:
-.. autoclass:: agents.bluefors.bluefors_log_tracker.LogParser
+.. autoclass:: socs.agents.bluefors.agent.LogParser
:members:
diff --git a/docs/agents/cryomech_cpa.rst b/docs/agents/cryomech_cpa.rst
index 1e8fe3d06..79dbe0cdf 100644
--- a/docs/agents/cryomech_cpa.rst
+++ b/docs/agents/cryomech_cpa.rst
@@ -13,9 +13,9 @@ temperature and pressure, oil temperature, and more. Control is not yet
implemented.
.. argparse::
- :filename: ../agents/cryomech_cpa/cryomech_cpa_agent.py
+ :filename: ../socs/agents/cryomech_cpa/agent.py
:func: make_parser
- :prog: python3 crypmech_cpa_agent.py
+ :prog: python3 agent.py
Configuration File Examples
@@ -50,13 +50,13 @@ The Cryomech CPA Agent should be configured to run in a Docker container.
An example docker-compose service configuration is shown here::
ocs-ptc1:
- image: simonsobs/ocs-cryomech-cpa-agent:latest
+ image: simonsobs/socs:latest
hostname: ocs-docker
network_mode: "host"
+ environment:
+ - INSTANCE_ID=ptc1
volumes:
- ${OCS_CONFIG_DIR}:/config
- command:
- - "--instance-id=ptc1"
Since the agent within the container needs to communicate with hardware on the
host network you must use ``network_mode: "host"`` in your compose file.
@@ -83,11 +83,11 @@ Below is an example client to start data acquisition::
Agent API
---------
-.. autoclass:: agents.cryomech_cpa.cryomech_cpa_agent.PTCAgent
+.. autoclass:: socs.agents.cryomech_cpa.agent.PTCAgent
:members:
Supporting APIs
---------------
-.. autoclass:: agents.cryomech_cpa.cryomech_cpa_agent.PTC
+.. autoclass:: socs.agents.cryomech_cpa.agent.PTC
:members:
diff --git a/docs/agents/fts_agent.rst b/docs/agents/fts_agent.rst
index b6983f33f..391dda266 100644
--- a/docs/agents/fts_agent.rst
+++ b/docs/agents/fts_agent.rst
@@ -2,17 +2,17 @@
.. _fts_aerotech_stage:
-=====================
+==================
FTS Aerotech Agent
-=====================
+==================
This agent is used to communicate with the FTS mirror stage for two FTSs with
Aerotech motion controllers.
.. argparse::
- :filename: ../agents/fts_aerotech_stage/fts_aerotech_agent.py
+ :filename: ../socs/agents/fts_aerotech/agent.py
:func: make_parser
- :prog: python3 fts_aerotech_agent.py
+ :prog: python3 agent.py
Configuration File Examples
@@ -20,8 +20,9 @@ Configuration File Examples
Below are configuration examples for the ocs config file and for running the
Agent in a docker container.
-ocs-config
-``````````
+OCS Site Config
+```````````````
+
To configure the FTS Agent we need to add a block to our ocs
configuration file. Here is an example configuration block using all of
the available arguments::
@@ -37,8 +38,9 @@ the available arguments::
]},
-fts-config
+FTS Config
``````````
+
The FTS takes a separate YAML config file to specify some inner paramters. Here
is an example using all the available arguments.::
@@ -47,30 +49,30 @@ is an example using all the available arguments.::
speed: 10
timeout: 10
-Example Client
---------------
+Agent API
+---------
+
+.. autoclass:: socs.agents.fts_aerotech.agent.FTSAerotechAgent
+ :members:
+
+Example Clients
+---------------
+
Below is an example client demonstrating full agent functionality.
Note that all tasks can be run even while the data acquisition process
is running.::
- from ocs.matched_client import MatchedClient
+ from ocs.ocs_client import OCSClient
- #Initialize the Stages
- fts_agent = MatchedClient('Falcon', args=[])
+ # Initialize the Stages
+ fts_agent = OCSClient('Falcon', args=[])
fts_agent.init.start()
fts_agent.init.wait()
- #Home Axis
+ # Home Axis
fts_agent.home.start()
fts_agent.home.wait()
- #Move to a specific position
+ # Move to a specific position
fts_agent.move_to.start( position=0)
fts_agent.move_to.wait()
-
-
-Agent API
----------
-
-.. autoclass:: agents.fts_aerotech_stage.fts_aerotech_agent.FTSAerotechAgent
- :members: init_stage_task, home_task, move_to, start_acq, stop_acq
diff --git a/docs/agents/holo_fpga.rst b/docs/agents/holo_fpga.rst
index 036009c92..e4c622e49 100644
--- a/docs/agents/holo_fpga.rst
+++ b/docs/agents/holo_fpga.rst
@@ -11,18 +11,35 @@ issues with the holography ROACH2 FPGA. It will connect the computer to the
ROACH via an ethernet port, take data, and pass it to the OCS feed.
.. argparse::
- :module: agents.holo_fpga.roach_agent
+ :module: socs.agents.holo_fpga.agent
:func: make_parser
- :prog: roach_agent.py
+ :prog: python3 agent.py
Dependencies
------------
-Python Packages:
+.. note::
+ These dependencies only support Python 3.8! As such, they are not
+ automatically installed when you install ``socs``. You can manually install
+ them, or follow the instructions below.
- `casperfpga `_
- `holo_daq `_
+You can install these by first checking you are running Python 3.8::
+
+ $ python --version
+ Python 3.8.13
+
+Then by either installing via the optional dependency "holography"::
+
+ $ python -m pip install socs[holography]
+
+Or by cloning the socs repository and using the provided requirements file::
+
+ $ git clone https://github.com/simonsobs/socs.git
+ $ python -m pip install -r socs/requirements/holography.txt
+
Configuration File Examples
---------------------------
@@ -67,5 +84,5 @@ function. This will record the cross-correlations A, BB, AB, and phase.
Agent API
---------
-.. autoclass:: agents.holo_fpga.roach_agent.FPGAAgent
+.. autoclass:: socs.agents.holo_fpga.agent.FPGAAgent
:members:
diff --git a/docs/agents/holo_synth.rst b/docs/agents/holo_synth.rst
index c6690e51a..154a9a1df 100644
--- a/docs/agents/holo_synth.rst
+++ b/docs/agents/holo_synth.rst
@@ -14,17 +14,36 @@ initialize the LO's, set the frequency of each and pass the frequency to the
OCS feed.
.. argparse::
- :module: agents.holo_synth.synth_agent
+ :module: socs.agents.holo_synth.agent
:func: make_parser
- :prog: synth_agent.py
+ :prog: python3 agent.py
Dependencies
------------
-Python Packages:
+.. note::
+ These dependencies are not automatically installed when you install
+ ``socs``. You can manually install them, or follow the instructions below.
+
+ Also note that since this agent is tightly coupled with the
+ :ref:`holo_fpga_agent`, the instructions below will pull dependencies
+ related to that agent as well.
- `holo_daq `_
+You can install these by first checking you are running Python 3.8::
+
+ $ python --version
+ Python 3.8.13
+
+Then by either installing via the optional dependency "holography"::
+
+ $ python -m pip install socs[holography]
+
+Or by cloning the socs repository and using the provided requirements file::
+
+ $ git clone https://github.com/simonsobs/socs.git
+ $ python -m pip install -r socs/requirements/holography.txt
Configuration File Examples
---------------------------
@@ -71,5 +90,5 @@ output of BOTH synthesizers. The user-specified frequency should be in GHz.
Agent API
---------
-.. autoclass:: agents.holo_synth.synth_agent.SynthAgent
+.. autoclass:: socs.agents.holo_synth.agent.SynthAgent
:members:
diff --git a/docs/agents/chwp_encoder.rst b/docs/agents/hwp_encoder.rst
similarity index 80%
rename from docs/agents/chwp_encoder.rst
rename to docs/agents/hwp_encoder.rst
index 04f1a8c87..19a3f9ea1 100644
--- a/docs/agents/chwp_encoder.rst
+++ b/docs/agents/hwp_encoder.rst
@@ -2,9 +2,9 @@
.. _chwp_encoder:
-======================
-CHWP Encoder BBB Agent
-======================
+=====================
+HWP Encoder BBB Agent
+=====================
The optical encoder signals of the CHWP are captured by Beaglebone Black (BBB)
boards with the IRIG-B timing reference.
@@ -12,17 +12,18 @@ This agent receives and decodes UDP packets from BBB and publishes the data
feeds.
.. argparse::
- :filename: ../agents/chwp/hwpbbb_agent.py
+ :filename: ../socs/agents/hwp_encoder/agent.py
:func: make_parser
- :prog: python3 hwpbbb_agent.py
+ :prog: python3 agent.py
Configuration File Examples
---------------------------
Below are useful configurations examples for the relevant OCS files and for
running the agent in a docker container.
-ocs-config
-``````````
+OCS Site Config
+```````````````
+
To configure the CHWP encoder BBB agent we need to add a HWPBBBAgent
block to our ocs configuration file. Here is an example configuration block
using all of the available arguments::
@@ -44,35 +45,32 @@ Multiple BBBs on the same network are distinguished by port numbers.
You should assign a port for each BBB, which should be consistent with
the setting on the BBB side.
-Docker
-``````
+Docker Compose
+``````````````
+
The CHWP BBB agent can be run via a Docker container. The following is an
example of what to insert into your institution's docker-compose file.
This again is an example to run multiple agents::
ocs-hwpbbb-agent-HBA0:
- image: simonsobs/ocs-hwpbb-agent:latest
+ image: simonsobs/socs:latest
ports:
- "8080:8080/udp"
hostname: ocs-docker
+ environment:
+ - INSTANCE_ID=HBA0
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- command:
- - "--instance-id=HBA0"
- - "--site-hub=ws://crossbar:8001/ws"
- - "--site-http=http://crossbar:8001/call"
ocs-hwpbbb-agent-HBA1:
- image: simonsobs/ocs-hwpbb-agent:latest
+ image: simonsobs/socs:latest
ports:
- "8081:8081/udp"
hostname: ocs-docker
+ environment:
+ - INSTANCE_ID=HBA1
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- command:
- - "--instance-id=HBA1"
- - "--site-hub=ws://crossbar:8001/ws"
- - "--site-http=http://crossbar:8001/call"
Description
-----------
@@ -83,8 +81,15 @@ The most recent data collected is stored in session.data in the following struct
The approx_hwp_freq is initialized by -1 and will be updated by non-negative rotation frequency
if encoder agent is receiving encoder signal.
If chwp is completely stopped, approx_hwp_freq will not be updated.::
+
>>> response.session['data']
{'approx_hwp_freq': 2.0,
'encoder_last_updated': 1659486962.3731978,
'irig_time': 1659486983,
'irig_last_updated': 1659486983.8985631}
+
+Agent API
+---------
+
+.. autoclass:: socs.agents.hwp_encoder.agent.HWPBBBAgent
+ :members:
diff --git a/docs/agents/hwp_picoscope.rst b/docs/agents/hwp_picoscope.rst
index b43d4cf10..72b33e775 100644
--- a/docs/agents/hwp_picoscope.rst
+++ b/docs/agents/hwp_picoscope.rst
@@ -10,9 +10,9 @@ The HWP picoscope agent interfaces with Picoscope 3403D MSO to operate the LC se
This agent biases the LC sensors and measures the 4 channels of analog input and 8 channels of digital input.
.. argparse::
- :filename: ../agents/hwp_picoscope/pico_agent.py
+ :filename: ../socs/agents/hwp_picoscope/agent.py
:func: make_parser
- :prog: python3 pico_agent.py
+ :prog: python3 agent.py
Dependencies
---------------------------
@@ -51,23 +51,24 @@ Docker
that you instead run this Agent directly on the host via the Host Manager
Agent.
+ If you want to run in a container you will need to build the image
+ yourself. A Dockerfile is provided in ``socs/docker/hwp_picoscope/``.
+
The HWP picoscope agent can be run via a Docker container. The following is an
example of what to insert into your institution's docker-compose file::
picoscope:
- image: simonsobs/ocs-hwp-picoscope-agent:latest
+ image: ocs-hwp-picoscope-agent:latest
hostname: ocs-docker
privileged: true
+ environment:
+ - INSTANCE_ID=picoscope
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- /dev:/dev
- command:
- - "--instance-id=picoscope"
- - "--site-hub=ws://crossbar:8001/ws"
- - "--site-http=http://crossbar:8001/call"
Agent API
---------
-.. autoclass:: agents.hwp_picoscope.pico_agent.PicoAgent
+.. autoclass:: socs.agents.hwp_picoscope.agent.PicoAgent
:members:
diff --git a/docs/agents/hwp_rotation_agent.rst b/docs/agents/hwp_rotation_agent.rst
index 1defd9631..63c0ecbbb 100644
--- a/docs/agents/hwp_rotation_agent.rst
+++ b/docs/agents/hwp_rotation_agent.rst
@@ -7,9 +7,9 @@ HWP Rotation Agent
==================
.. argparse::
- :filename: ../agents/hwp_rotation/rotation_agent.py
+ :filename: ../socs/agents/hwp_rotation/agent.py
:func: make_parser
- :prog: python3 rotation_agent.py
+ :prog: python3 agent.py
Configuration File Examples
---------------------------
@@ -36,15 +36,15 @@ Docker Compose
An example docker-compose configuration::
ocs-hwp-rotation:
- image: simonsobs/ocs-hwp-rotation-agent:latest
+ image: simonsobs/socs:latest
hostname: ocs-docker
network_mode: "host"
+ environment:
+ - INSTANCE_ID=rotator
+ - SITE_HUB=ws://127.0.0.1:8001/ws
+ - SITE_HTTP=http://127.0.0.1:8001/call
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- command:
- - "--instance-id=hwp-rotation"
- - "--site-hub=ws://127.0.0.1:8001/ws"
- - "--site-http=http://127.0.0.1:8001/call"
.. note::
Since the Agent container needs ``network_mode: "host"``, it must be
@@ -59,12 +59,12 @@ Description
Agent API
---------
-.. autoclass:: agents.hwp_rotation.rotation_agent.RotationAgent
+.. autoclass:: socs.agents.hwp_rotation.agent.RotationAgent
:members:
Supporting APIs
---------------
-.. automodule:: socs.agent.pmx
+.. automodule:: socs.common.pmx
:members:
:noindex:
diff --git a/docs/agents/ibootbar.rst b/docs/agents/ibootbar.rst
index 5a5b9dbda..775a737f4 100644
--- a/docs/agents/ibootbar.rst
+++ b/docs/agents/ibootbar.rst
@@ -10,9 +10,9 @@ The iBootbar Agent is an OCS Agent which monitors and sends commands to the iBoo
Monitoring and commanding is performed via SNMP.
.. argparse::
- :filename: ../agents/ibootbar/ibootbar.py
+ :filename: ../socs/agents/ibootbar/agent.py
:func: add_agent_args
- :prog: python3 ibootbar.py
+ :prog: python3 agent.py
Configuration File Examples
---------------------------
@@ -31,7 +31,7 @@ using all of the available arguments::
'instance-id': 'ibootbar',
'arguments': [['--address', '10.10.10.50'],
['--port', 161],
- ['--auto-start', True],
+ ['--mode', 'acq'],
['--snmp-version', 2]]},
.. note::
@@ -44,18 +44,16 @@ The iBootbar Agent should be configured to run in a Docker container. An
example docker-compose service configuration is shown here::
ocs-ibootbar:
- image: simonsobs/ocs-ibootbar
+ image: simonsobs/socs:latest
hostname: ocs-docker
network_mode: "host"
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
environment:
- - "LOGLEVEL=info"
- command:
- - "--instance-id=ibootbar"
- - "--site-hub=ws://127.0.0.1:8001/ws"
- - "--site-http=http://127.0.0.1:8001/call"
-
+ - INSTANCE_ID=ibootbar
+ - SITE_HUB=ws://127.0.0.1:8001/ws
+ - SITE_HTTP=http://127.0.0.1:8001/call
+ - LOGLEVEL=info
The ``LOGLEVEL`` environment variable can be used to set the log level for
debugging. The default level is "info".
@@ -96,7 +94,7 @@ values, refer to the MIB file.
Agent API
---------
-.. autoclass:: agents.ibootbar.ibootbar.ibootbarAgent
+.. autoclass:: socs.agents.ibootbar.agent.ibootbarAgent
:members:
Example Clients
@@ -120,6 +118,6 @@ Below is an example client to control outlets::
Supporting APIs
---------------
-.. autoclass:: agents.ibootbar.ibootbar.update_cache
+.. autoclass:: socs.agents.ibootbar.agent.update_cache
:members:
:noindex:
diff --git a/docs/agents/labjack.rst b/docs/agents/labjack.rst
index ac5a07752..946de83f1 100644
--- a/docs/agents/labjack.rst
+++ b/docs/agents/labjack.rst
@@ -11,10 +11,28 @@ analog and digital inputs and outputs. They are then commanded and queried over
Ethernet.
.. argparse::
- :filename: ../agents/labjack/labjack_agent.py
+ :filename: ../socs/agents/labjack/agent.py
:func: make_parser
- :prog: python3 labjack_agent.py
+ :prog: python3 agent.py
+Dependencies
+------------
+
+* `labjack-ljm `_ - LabJack LJM Library
+
+While there is the above PyPI package for the LJM library, it does not provide
+the ``libLabJackM.so`` shared object file that is needed. This can be obtained
+by running the LJM installer provided on the `LabJack website
+`_. You can do so by running::
+
+ $ wget https://labjack.com/sites/default/files/software/labjack_ljm_minimal_2020_03_30_x86_64_beta.tar.gz
+ $ tar zxf ./labjack_ljm_minimal_2020_03_30_x86_64_beta.tar.gz
+ $ ./labjack_ljm_minimal_2020_03_30_x86_64/labjack_ljm_installer.run -- --no-restart-device-rules
+
+.. note::
+ This library is bundled in to the socs base Docker image. If you are running
+ this Agent in Docker you do *not* also need to install the library on the
+ host system.
Configuration File Examples
---------------------------
@@ -87,21 +105,21 @@ The LabJack Agent should be configured to run in a Docker container. An
example docker-compose service configuration is shown here::
ocs-labjack:
- image: simonsobs/ocs-labjack-agent:latest
+ image: simonsobs/socs:latest
<<: *log-options
hostname: ocs-docker
network_mode: "host"
+ environment:
+ - INSTANCE_ID=labjack
volumes:
- ${OCS_CONFIG_DIR}:/config
- command:
- - "--instance-id=labjack"
Since the agent within the container needs to communicate with hardware on the
host network you must use ``network_mode: "host"`` in your compose file.
Custom Register Readout
-----------------------
-Labjack has many other registers available to access besides just the voltages
+LabJack has many other registers available to access besides just the voltages
on AIN# which we use in the `mode=acq` option of this agent to readout directly
or convert to useful units using the functions module. These extra registers
however cannot be streamed out using the `ljstream` module which is the
@@ -173,11 +191,11 @@ shows the basic acquisition functionality:
Agent API
---------
-.. autoclass:: agents.labjack.labjack_agent.LabJackAgent
+.. autoclass:: socs.agents.labjack.agent.LabJackAgent
:members:
Supporting APIs
---------------
-.. autoclass:: agents.labjack.labjack_agent.LabJackFunctions
+.. autoclass:: socs.agents.labjack.agent.LabJackFunctions
:members:
diff --git a/docs/agents/lakeshore240.rst b/docs/agents/lakeshore240.rst
index 18f16f216..3cac61b9d 100644
--- a/docs/agents/lakeshore240.rst
+++ b/docs/agents/lakeshore240.rst
@@ -10,9 +10,9 @@ The Lakeshore 240 is a 4-lead meausrement device used for readout of ROXes and
Diodes at 1K and above.
.. argparse::
- :filename: ../agents/lakeshore240/LS240_agent.py
+ :filename: ../socs/agents/lakeshore240/agent.py
:func: make_parser
- :prog: python3 LS240_agent.py
+ :prog: python3 agent.py
Dependencies
------------
@@ -74,17 +74,15 @@ Docker Compose
The Lakeshore 240 Agent can (and probably should) be configured to run in a
Docker container. An example configuration is::
- ocs-LSA24MA:
- image: simonsobs/ocs-lakeshore240-agent:latest
+ ocs-LSA22Z2:
+ image: simonsobs/socs:latest
devices:
- - "/dev/LSA24MA:/dev/LSA24MA"
+ - "/dev/LSA22Z2:/dev/LSA22Z2"
hostname: nuc-docker
+ environment:
+ - INSTANCE_ID=LSA22Z2
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- command:
- - "--instance-id=LSA24MA"
- - "--site-hub=ws://crossbar:8001/ws"
- - "--site-http=http://crossbar:8001/call"
The serial number will need to be updated in your configuration. The hostname
should also match your configured host in your OCS configuration file. The
@@ -106,7 +104,7 @@ client that sets Channel 1 of a 240 to read a diode::
from ocs.matched_client import MatchedClient
- ls_client = MatchedClient("LSA24MA")
+ ls_client = MatchedClient("LSA22Z2")
diode_params = {
'sensor': 1,
@@ -121,7 +119,7 @@ client that sets Channel 1 of a 240 to read a diode::
Agent API
---------
-.. autoclass:: agents.lakeshore240.LS240_agent.LS240_Agent
+.. autoclass:: socs.agents.lakeshore240.agent.LS240_Agent
:members:
Supporting APIs
diff --git a/docs/agents/lakeshore336.rst b/docs/agents/lakeshore336.rst
index 5b0cdbfc9..ddf838342 100644
--- a/docs/agents/lakeshore336.rst
+++ b/docs/agents/lakeshore336.rst
@@ -13,9 +13,9 @@ Basic functionality to interface with and control an LS336 is provided by
the :ref:`336_driver`.
.. argparse::
- :filename: ../agents/lakeshore336/LS336_agent.py
+ :filename: ../socs/agents/lakeshore336/agent.py
:func: make_parser
- :prog: python3 LS336_agent.py
+ :prog: python3 agent.py
Configuration File Examples
---------------------------
@@ -33,7 +33,7 @@ configuration block::
{'agent-class': 'Lakeshore336Agent',
'instance-id': 'LSA2833',
'arguments': [['--serial-number', 'LSA2833'],
- ['--ip-address', '10.10.10.2],
+ ['--ip-address', '10.10.10.2'],
['--f-sample', 0.1],
['--threshold', 0.05],
['--window', 600],
@@ -49,15 +49,15 @@ The Lakeshore 336 agent should be configured to run in a Docker container.
An example configuration is::
ocs-LSA2833:
- image: simonsobs/ocs-lakeshore336-agent:latest
+ image: simonsobs/socs:latest
hostname: ocs-docker
network_mode: "host"
+ environment:
+ - INSTANCE_ID=LSA2833
+ - SITE_HUB=ws://127.0.0.1:8001/ws
+ - SITE_HTTP=http://127.0.0.1:8001/call
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- command:
- - "--instance-id=LSA2833"
- - "--site-hub=ws://127.0.0.1:8001/ws"
- - "--site-http=http://127.0.0.1:8001/call"
.. note::
Since the 336 Agent container needs ``network_mode: "host"``, it must be
@@ -80,7 +80,7 @@ Lakeshore336.py script.
Agent API
---------
-.. autoclass:: agents.lakeshore336.LS336_agent.LS336_Agent
+.. autoclass:: socs.agents.lakeshore336.agent.LS336_Agent
:members:
.. _336_driver:
diff --git a/docs/agents/lakeshore370.rst b/docs/agents/lakeshore370.rst
index f7e6f861f..78c06d7b3 100644
--- a/docs/agents/lakeshore370.rst
+++ b/docs/agents/lakeshore370.rst
@@ -12,12 +12,15 @@ control an LS370 is provided by the
``socs.Lakeshore.Lakeshore370.py`` module.
.. argparse::
- :filename: ../agents/lakeshore370/LS370_agent.py
+ :filename: ../socs/agents/lakeshore370/agent.py
:func: make_parser
- :prog: python3 LS370_agent.py
+ :prog: python3 agent.py
-OCS Configuration
------------------
+Configuration File Examples
+---------------------------
+
+OCS Site Config
+```````````````
To configure your Lakeshore 370 for use with OCS you need to add a
Lakeshore370Agent block to your ocs configuration file. Here is an example
@@ -32,23 +35,21 @@ configuration block::
Each device requires configuration under 'agent-instances'. See the OCS site
configs documentation for more details.
-Docker Configuration
---------------------
+Docker Compose
+``````````````
The Lakeshore 370 Agent should be configured to run in a Docker container. An
example configuration is::
- ocs-LSA22YE:
- image: simonsobs/ocs-lakeshore370-agent
+ ocs-LSA22YG:
+ image: simonsobs/socs:latest
hostname: ocs-docker
+ environment:
+ - INSTANCE_ID=LSA22YG
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
devices:
- "/dev/ttyUSB1:/dev/ttyUSB1"
- command:
- - "--instance-id=LSA22YE"
- - "--site-hub=ws://crossbar:8001/ws"
- - "--site-http=http://crossbar:8001/call"
.. note::
The serial numbers here will need to be updated for your device.
@@ -57,8 +58,11 @@ example configuration is::
The device path may differ on your machine, and if only using the ttyUSB
value as shown here, is not guaranteed to be static.
+Description
+-----------
+
Direct Communication
---------------------
+````````````````````
Direct communication with the Lakeshore can be achieved without OCS, using the
``Lakeshore370.py`` module in ``socs/socs/Lakeshore/``. From that directory,
you can run a script like::
@@ -80,8 +84,8 @@ please file a Github issue.
Agent API
---------
-.. autoclass:: agents.lakeshore370.LS370_agent.LS370_Agent
- :members: init_lakeshore_task, start_acq
+.. autoclass:: socs.agents.lakeshore370.agent.LS370_Agent
+ :members:
Supporting APIs
---------------
diff --git a/docs/agents/lakeshore372.rst b/docs/agents/lakeshore372.rst
index 70743a31d..cb44cd89b 100644
--- a/docs/agents/lakeshore372.rst
+++ b/docs/agents/lakeshore372.rst
@@ -12,9 +12,9 @@ functionality to interface and control an LS372 is provided by the
:ref:`372_driver`.
.. argparse::
- :filename: ../agents/lakeshore372/LS372_agent.py
+ :filename: ../socs/agents/lakeshore372/agent.py
:func: make_parser
- :prog: python3 LS372_agent.py
+ :prog: python3 agent.py
Configuration File Examples
---------------------------
@@ -120,16 +120,16 @@ Docker Compose
The Lakeshore 372 Agent should be configured to run in a Docker container. An
example configuration is::
- ocs-LSA22YE:
- image: simonsobs/ocs-lakeshore372-agent:latest
+ ocs-LSA22YG:
+ image: simonsobs/socs:latest
hostname: ocs-docker
network_mode: "host"
+ environment:
+ - INSTANCE_ID=LSA22YG
+ - SITE_HUB=ws://127.0.0.1:8001/ws
+ - SITE_HTTP=http://127.0.0.1:8001/call
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- command:
- - "--instance-id=LSA22YE"
- - "--site-hub=ws://127.0.0.1:8001/ws"
- - "--site-http=http://127.0.0.1:8001/call"
.. note::
Since the 372 Agent container needs ``network_mode: "host"``, it must be
@@ -172,7 +172,7 @@ please file a Github issue.
Agent API
---------
-.. autoclass:: agents.lakeshore372.LS372_agent.LS372_Agent
+.. autoclass:: socs.agents.lakeshore372.agent.LS372_Agent
:members:
.. _372_driver:
diff --git a/docs/agents/lakeshore425.rst b/docs/agents/lakeshore425.rst
index 24eeecd10..8c55a3a0f 100644
--- a/docs/agents/lakeshore425.rst
+++ b/docs/agents/lakeshore425.rst
@@ -10,9 +10,9 @@ The Lakeshore Model 425 gaussmeter is a device which measure the magnetic field
This agent is used to measure the magnetic field from the superconducting magnetic bearing of the CHWP rotation mechanism and to monitoring the status of floating and rotating CHWP.
.. argparse::
- :filename: ../agents/lakeshore425/LS425_agent.py
+ :filename: ../socs/agents/lakeshore425/agent.py
:func: make_parser
- :prog: python3 LS425_agent.py
+ :prog: python3 agent.py
Configuration File Examples
---------------------------
@@ -41,21 +41,19 @@ The ocs-lakeshore425-agent can be run via a Docker container. The following is a
example of what to insert into your institution's docker-compose file.::
ocs-lakeshore425-agent:
- image: simonsobs/ocs-lakeshore425-agent:latest
+ image: simonsobs/socs:latest
+ hostname: ocs-docker
+ environment:
+ - INSTANCE_ID=LS425
device:
- /dev/LS425:/dev/LS425
- hostname: ocs-docker
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- command:
- - "--instance-id=LS425"
- - "--site-hub=ws://crossbar:8001/ws"
- - "--site-http=http://crossbar:8001/call"
Agent API
---------
-.. autoclass:: agents.lakeshore425.LS425_agent.LS425Agent
+.. autoclass:: socs.agents.lakeshore425.agent.LS425Agent
:members:
Supporting APIs
diff --git a/docs/agents/latrt_xy_stage.rst b/docs/agents/latrt_xy_stage.rst
index 2052829e1..960c6fba7 100644
--- a/docs/agents/latrt_xy_stage.rst
+++ b/docs/agents/latrt_xy_stage.rst
@@ -15,18 +15,18 @@ their drivers are shared `here
`_.
.. argparse::
- :filename: ../agents/xy_stage/xy_latrt_agent.py
+ :filename: ../socs/agents/xy_stage/agent.py
:func: make_parser
- :prog: python3 xy_latrt_agent.py
+ :prog: python3 agent.py
Configuration File Examples
---------------------------
-Below are configuration examples for the ocs config file and for running the
-Agent in a docker container.
+Below are configuration examples for the ocs config file.
+
+OCS Site Config
+```````````````
-ocs-config
-``````````
To configure the LATRt XY Stage Agent we need to add a block to our ocs
configuration file. Here is an example configuration block using all of
the available arguments::
@@ -37,36 +37,37 @@ the available arguments::
['--ip-address', '192.168.10.15'],
['--port', 3010],
['--mode', 'acq'],
- ['--sampling_freqency', 2'],
+ ['--sampling_freqency', '2'],
]},
-Example Client
---------------
+Agent API
+---------
+
+.. autoclass:: socs.agents.xy_stage.agent.LATRtXYStageAgent
+ :members:
+
+Example Clients
+---------------
+
Below is an example client demonstrating full agent functionality.
Note that all tasks can be run even while the data acquisition process
is running.::
- from ocs.matched_client import MatchedClient
+ from ocs.ocs_client import OCSClient
- #Initialize the Stages
- xy_agent = MatchedClient('XYWing', args=[])
+ # Initialize the Stages
+ xy_agent = OCSClient('XYWing', args=[])
xy_agent.init.start()
xy_agent.init.wait()
- #Move in X
+ # Move in X
xy_agent.move_x_cm.start( distance=6, velocity=1)
xy_agent.move_x_cm.wait()
- #Move in Y
+ # Move in Y
xy_agent.move_y_cm.start( distance=6, velocity=1)
xy_agent.move_y_cm.wait()
- #Get instantaneous position
+ # Get instantaneous position
status, message, session = xy_stage.acq.status()
print(session['data']['data'])
-
-Agent API
----------
-
-.. autoclass:: agents.xy_stage.xy_latrt_agent.LATRtXYStageAgent
- :members: init_xy_stage_task, move_x_cm, move_y_cm, set_position, start_acq, stop_acq
diff --git a/docs/agents/magpie.rst b/docs/agents/magpie.rst
index 46f19b803..49bb8f55a 100644
--- a/docs/agents/magpie.rst
+++ b/docs/agents/magpie.rst
@@ -15,9 +15,9 @@ a small bit of pre-processing and downsampling that is done whenever a new
frame is received.
.. argparse::
- :filename: ../agents/magpie/magpie_agent.py
+ :filename: ../socs/agents/magpie/agent.py
:func: make_parser
- :prog: python3 magpie_agent.py
+ :prog: python3 agent.py
Configuration File Examples
------------------------------
@@ -71,17 +71,17 @@ crate-id 1 and slot 2. If crossbar is being run on a different server, you'll
have to modify the ``site-hub`` and ``site-http`` args accordingly::
ocs-magpie-crate1slot2:
- image: simonsobs/ocs-magpie:${SOCS_TAG}
+ image: simonsobs/socs:latest
hostname: ocs-docker
user: ocs:ocs
network_mode: host
+ environment:
+ - INSTANCE_ID=magpie-crate1slot2
+ - SITE_HUB=ws://localhost:8001/ws
+ - SITE_HTTP=http://localhost:8001/call
volumes:
- ${OCS_CONFIG_DIR}:/config
- /data:/data
- command:
- - "--site-hub=ws://localhost:8001/ws"
- - "--site-http=http://localhost:8001/call"
- - "--instance-id=magpie-crate1slot2"
Lyrebird
------------
@@ -329,22 +329,22 @@ to generate a focal-plane layout.
Agent API
------------------
-.. autoclass:: agents.magpie.magpie_agent.MagpieAgent
+.. autoclass:: socs.agents.magpie.agent.MagpieAgent
:members:
Supporting APIs
------------------
-.. autoclass:: agents.magpie.magpie_agent.FIRFilter
+.. autoclass:: socs.agents.magpie.agent.FIRFilter
:members:
-.. autoclass:: agents.magpie.magpie_agent.Demodulator
+.. autoclass:: socs.agents.magpie.agent.Demodulator
:members:
-.. autoclass:: agents.magpie.magpie_agent.WhiteNoiseCalculator
+.. autoclass:: socs.agents.magpie.agent.WhiteNoiseCalculator
:members:
-.. autoclass:: agents.magpie.magpie_agent.VisElem
+.. autoclass:: socs.agents.magpie.agent.VisElem
:members:
-.. autoclass:: agents.magpie.magpie_agent.FocalplaneConfig
+.. autoclass:: socs.agents.magpie.agent.FocalplaneConfig
:members:
diff --git a/docs/agents/meinberg_m1000_agent.rst b/docs/agents/meinberg_m1000_agent.rst
index a24d6c909..63d0cc54e 100644
--- a/docs/agents/meinberg_m1000_agent.rst
+++ b/docs/agents/meinberg_m1000_agent.rst
@@ -8,9 +8,9 @@ The Meinberg M1000 Agent is an OCS Agent which monitors the Meinberg M1000, the
main source of timing for the SO site. Monitoring is performed via SNMP.
.. argparse::
- :filename: ../agents/meinberg_m1000/meinberg_m1000_agent.py
+ :filename: ../socs/agents/meinberg_m1000/agent.py
:func: make_parser
- :prog: python3 meinberg_m1000_agent.py
+ :prog: python3 agent.py
Description
-----------
@@ -75,17 +75,16 @@ The Meinberg M1000 Agent should be configured to run in a Docker container. An
example docker-compose service configuration is shown here::
ocs-m1000:
- image: simonsobs/ocs-meinberg-m1000-agent
+ image: simonsobs/socs:latest
hostname: ocs-docker
network_mode: "host"
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
environment:
- - "LOGLEVEL=info"
- command:
- - "--instance-id=meinberg-m1000"
- - "--site-hub=ws://10.10.10.2:8001/ws"
- - "--site-http=http://10.10.10.2:8001/call"
+ - INSTANCE_ID=meinberg-m1000
+ - SITE_HUB=ws://10.10.10.2:8001/ws
+ - SITE_HTTP=http://10.10.10.2:8001/call
+ - LOGLEVEL=info
The ``LOGLEVEL`` environment variable can be used to set the log level for
@@ -94,11 +93,11 @@ debugging. The default level is "info".
Agent API
---------
-.. autoclass:: agents.meinberg_m1000.meinberg_m1000_agent.MeinbergM1000Agent
+.. autoclass:: socs.agents.meinberg_m1000.agent.MeinbergM1000Agent
:members:
Supporting APIs
----------------
-.. autoclass:: agents.meinberg_m1000.meinberg_m1000_agent.MeinbergSNMP
+.. autoclass:: socs.agents.meinberg_m1000.agent.MeinbergSNMP
:members:
diff --git a/docs/agents/pfeiffer.rst b/docs/agents/pfeiffer.rst
index f2c03ae3a..00f26c843 100644
--- a/docs/agents/pfeiffer.rst
+++ b/docs/agents/pfeiffer.rst
@@ -12,18 +12,19 @@ Pfeiffer agent communicates with the Controller module, and reads out
pressure readingss from the six different channels.
.. argparse::
- :filename: ../agents/pfeiffer_tpg366/pfeiffer_tpg366_agent.py
+ :filename: ../socs/agents/pfeiffer_tpg366/agent.py
:func: make_parser
- :prog: python3 pfeiffer_tpg366_agent.py
+ :prog: python3 agent.py
Configuration File Examples
---------------------------
Below are useful configurations examples for the relevent OCS files and for
running the agent in a docker container.
-ocs-config
-``````````
-To configure the Cryomech CPA Agent we need to add a CryomechCPAAgent
+OCS Site Config
+```````````````
+
+To configure the Pfeiffer TPG 366 Agent we need to add a PfeifferAgent
block to our ocs configuration file. Here is an example configuration block
using all of the available arguments::
@@ -39,32 +40,35 @@ You should assign a static IP address to Pfeiffer device, and record it here.
In general, the Pfeiffer device will assign port 8000 by default. This should
not need to be changed unless you you specificy the port otherwise.
+Docker Compose
+``````````````
-Docker
-``````
The Pfeiffer Agent can be run via a Docker container. The following is an
example of what to insest into your institution's docker-compose file. ::
ocs-pfeiffer:
- image: simonsobs/ocs-pfeiffer-tpg366-agent:latest
+ image: simonsobs/socs:latest
hostname: ocs-docker
network_mode: "host"
+ environment:
+ - INSTANCE_ID=pfeiffer
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- command:
- - "--instance-id=pfeiffer"
+Agent API
+---------
-Example Client
---------------
-Below is an example client to start data acquisition
+.. autoclass:: socs.agents.pfeiffer_tpg366.agent.PfeifferAgent
+ :members:
-::
+Example Clients
+---------------
+Below is an example client to start data acquisition::
- from ocs.matched_client import MatchedClienti
+ from ocs.ocs_client import OCSClienti
import time
- pfeiffer = MatchedClient("pfeiffer", args=[])
+ pfeiffer = OCSClient("pfeiffer", args=[])
params = {'auto_acquire': True}
pfeiffer.acq.start(**params)
pfeiffer.acq.wait()
diff --git a/docs/agents/pfeiffer_tc400.rst b/docs/agents/pfeiffer_tc400.rst
index 73b3be90f..3b889940d 100644
--- a/docs/agents/pfeiffer_tc400.rst
+++ b/docs/agents/pfeiffer_tc400.rst
@@ -2,19 +2,19 @@
.. _pfeiffer_tc400_agent:
-==============
-Pfeiffer TC400
-==============
+=====================
+Pfeiffer TC 400 Agent
+=====================
-The Pfeiffer TC400 agent is an OCS Agent which controls the
-Pfeiffer TC400 electronic drive unit, which control the turbos used
+The Pfeiffer TC 400 Agent is an OCS Agent which controls the
+Pfeiffer TC 400 electronic drive unit, which control the turbos used
for the bluefors DR. The communcation is done over serial, and should be
integrated into OCS using a serial-to-ethernet converter.
.. argparse::
- :filename: ../agents/pfeiffer_tc400/pfeiffer_tc400_agent.py
+ :filename: ../socs/agents/pfeiffer_tc400/agent.py
:func: make_parser
- :prog: python3 pfeiffer_tc400_agent.py
+ :prog: python3 agent.py
Description
-----------
@@ -38,7 +38,7 @@ Agent in a docker container.
OCS Site Config
```````````````
-To configure the Pfeiffer TC400 Agent we need to add a PfeifferTC400Agent
+To configure the Pfeiffer TC 400 Agent we need to add a PfeifferTC400Agent
block to our ocs configuration file. The IP address and port
number are from the serial-to-ethernet converter. The turbo address is
visible on the power supply front panel. Here is an example configuration
@@ -57,14 +57,14 @@ The agent should be configured to run in a Docker container. An
example docker-compose service configuration is shown here::
ocs-pfeiffer-turboA:
- image: simonsobs/ocs-pfeiffer-tc400-agent
+ image: simonsobs/socs:latest
<<: *log-options
hostname: manny-docker
network_mode: "host"
+ environment:
+ - INSTANCE_ID=pfeifferturboA
volumes:
- ${OCS_CONFIG_DIR}:/config
- command:
- - "--instance-id=pfeifferturboA"
Since the agent within the container needs to communicate with hardware on the
host network you must use ``network_mode: "host"`` in your compose file.
@@ -72,7 +72,7 @@ host network you must use ``network_mode: "host"`` in your compose file.
Agent API
---------
-.. autoclass:: agents.pfeiffer_tc400.pfeiffer_tc400_agent.PfeifferTC400Agent
+.. autoclass:: socs.agents.pfeiffer_tc400.agent.PfeifferTC400Agent
:members:
Example Clients
@@ -104,5 +104,5 @@ example client shows all of this functionality::
Driver API
----------
-.. autoclass:: agents.pfeiffer_tc400.pfeiffer_tc400_driver.PfeifferTC400
+.. autoclass:: socs.agents.pfeiffer_tc400.drivers.PfeifferTC400
:members:
diff --git a/docs/agents/pysmurf-controller.rst b/docs/agents/pysmurf-controller.rst
index 6b00ff21d..344812005 100644
--- a/docs/agents/pysmurf-controller.rst
+++ b/docs/agents/pysmurf-controller.rst
@@ -10,9 +10,9 @@ The Pysmurf Controller OCS agent provides an interface to run pysmurf and
sodetlib control scripts on the smurf-server through an OCS client.
.. argparse::
- :filename: ../agents/pysmurf_controller/pysmurf_controller.py
+ :filename: ../socs/agents/pysmurf_controller/agent.py
:func: make_parser
- :prog: python3 pysmurf_controller.py
+ :prog: python3 agent.py
Configuration File Examples
-----------------------------------
@@ -50,6 +50,9 @@ named ``ocs-pysmurf-monitor`` might look something like::
security_opt:
- "aparmor=docker-smurf"
environment:
+ INSTANCE_ID=pysmurf-controller-s2
+ SITE_HUB=ws://${CB_HOST}:8001/ws
+ SITE_HTTP=ws://${CB_HOST}:8001/call
SMURFPUB_BACKEND: udp
SMURFPUB_ID: crate1slot2
SMURFPUB_UDP_HOST: ocs-pysmurf-monitor
@@ -63,10 +66,6 @@ named ``ocs-pysmurf-monitor`` might look something like::
- /data:/data
- /home/cryo/repos/pysmurf/client:/usr/local/src/pysmurf/python/pysmurf/client
- /home/cryo/repos/sodetlib:/sodetlib
- command:
- - "--site-hub=ws://${CB_HOST}:8001/ws"
- - "--site-http=ws://${CB_HOST}:8001/call"
- - "--instance-id=pysmurf-controller-s2"
where ``CB_HOST`` and ``SOCS_TAG`` are set as environment variables or in the
``.env`` file.
@@ -187,11 +186,11 @@ This prints the dictionary::
Agent API
---------------
-.. autoclass:: agents.pysmurf_controller.pysmurf_controller.PysmurfController
+.. autoclass:: socs.agents.pysmurf_controller.agent.PysmurfController
:members:
Supporting APIs
---------------
-.. autoclass:: agents.pysmurf_controller.pysmurf_controller.PysmurfScriptProtocol
+.. autoclass:: socs.agents.pysmurf_controller.agent.PysmurfScriptProtocol
:members:
diff --git a/docs/agents/pysmurf-monitor.rst b/docs/agents/pysmurf-monitor.rst
index 8bab14292..291a90fee 100644
--- a/docs/agents/pysmurf-monitor.rst
+++ b/docs/agents/pysmurf-monitor.rst
@@ -12,9 +12,9 @@ to the pysmurf_files database, and send session info to pysmurf-controller
agents through an OCS Feed.
.. argparse::
- :filename: ../agents/pysmurf_monitor/pysmurf_monitor.py
+ :filename: ../socs/agents/pysmurf_monitor/agent.py
:func: make_parser
- :prog: python3 pysmurf_monitor.py
+ :prog: python3 agent.py
Configuration File Examples
---------------------------
@@ -34,17 +34,18 @@ Docker Compose
An example docker-compose entry might look like::
ocs-pysmurf-monitor:
- image: simonsobs/ocs-pysmurf-monitor-agent:${SOCS_TAG}
+ image: simonsobs/socs:latest
hostname: ocs-docker
user: cryo:smurf
network_mode: host
container_name: ocs-pysmurf-monitor
+ environment:
+ - INSTANCE_ID=pysmurf-monitor
+ - SITE_HUB=ws://${CB_HOST}:8001/ws
+ - SITE_HTTP=http://${CB_HOST}:8001/call
volumes:
- ${OCS_CONFIG_DIR}:/config
- /data:/data
- command:
- - "--site-hub=ws://${CB_HOST}:8001/ws"
- - "--site-http=http://${CB_HOST}:8001/call"
Where SOCS_TAG and CB_HOST are set in the ``.env`` file in the same dir as the
docker-compose file.
@@ -76,12 +77,12 @@ what files to copy over to a daq node or simons1.
Agent API
---------
-.. autoclass:: agents.pysmurf_monitor.pysmurf_monitor.PysmurfMonitor
+.. autoclass:: socs.agents.pysmurf_monitor.agent.PysmurfMonitor
:members:
:exclude-members: datagramReceived
Supporting APIs
---------------
-.. automethod:: agents.pysmurf_monitor.pysmurf_monitor.create_remote_path
+.. automethod:: socs.agents.pysmurf_monitor.agent.create_remote_path
-.. automethod:: agents.pysmurf_monitor.pysmurf_monitor.PysmurfMonitor.datagramReceived
+.. automethod:: socs.agents.pysmurf_monitor.agent.PysmurfMonitor.datagramReceived
diff --git a/docs/agents/scpi_psu.rst b/docs/agents/scpi_psu.rst
index 9dfba1852..5310e0a0b 100644
--- a/docs/agents/scpi_psu.rst
+++ b/docs/agents/scpi_psu.rst
@@ -2,9 +2,9 @@
.. _scpi_psu:
-==================
+==============
SCPI PSU Agent
-==================
+==============
This agent uses Standard Commands for Programmable Instruments (SCPI)
It works for many power supplies, including the Keithley 2230G
@@ -13,9 +13,9 @@ users to set current, voltage, and turn channels on/off. It also allows for
live monitoring of the PSU output.
.. argparse::
- :filename: ../agents/scpi_psu/scpi_psu_agent.py
+ :filename: ../socs/agents/scpi_psu/agent.py
:func: make_parser
- :prog: python3 scpi_psu_agent.py
+ :prog: python3 agent.py
Configuration File Examples
@@ -23,8 +23,9 @@ Configuration File Examples
Below are configuration examples for the ocs config file and for running the
Agent in a docker container.
-ocs-config
-``````````
+OCS Site Config
+```````````````
+
To configure the SCPI PSU Agent we need to add a block to our ocs
configuration file. Here is an example configuration block using all of
the available arguments::
@@ -41,56 +42,57 @@ have GPIB ports rather than ethernet ports. Therefore a GPIB-to-ethernet
converter is required, and the gpib slot must be specified in the ocs
configuration file. The IP address is then associated with the converter.
-Docker
-``````
+Docker Compose
+``````````````
+
The SCPI PSU Agent should be configured to run in a Docker container.
An example docker-compose service configuration is shown here::
ocs-psuK:
- image: simonsobs/ocs-scpi-psu-agent:latest
+ image: simonsobs/socs:latest
hostname: ocs-docker
network_mode: "host"
+ environment:
+ - INSTANCE_ID=psuK
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- command:
- - "--instance-id=psuK"
-Example Client
---------------
+Agent API
+---------
+
+.. autoclass:: socs.agents.scpi_psu.agent.ScpiPsuAgent
+ :members:
+
+Example Clients
+---------------
+
Below is an example client demonstrating full agent functionality.
Note that all tasks can be run even while the data acquisition process
is running.::
- from ocs.matched_client import MatchedClient
+ from ocs.ocs_client import OCSClient
- #Initialize the power supply
- psuK = MatchedClient('psuK', args=[])
+ # Initialize the power supply
+ psuK = OCSClient('psuK', args=[])
psuK.init.start()
psuK.init.wait()
- #Turn on channel 1
+ # Turn on channel 1
psuK.set_output.start(channel = 1, state=True)
psuK.set_output.wait()
- #Set channel 1 voltage
+ # Set channel 1 voltage
psuK.set_voltage.start(channel=1, volts=30)
psuK.set_voltage.wait()
- #Set channel 1 current
+ # Set channel 1 current
psuK.set_current.start(channel=1, current=0.1)
psuK.set_current.wait()
- #Get instantaneous reading of current and voltage output
+ # Get instantaneous reading of current and voltage output
statusK, messageK, sessionK = psuK.monitor_output.status()
print(sessionK['data']['data'])
- #Start live monitoring of current and voltage output
+ # Start live monitoring of current and voltage output
statusK, messageK, sessionK = psuK.monitor_output.start()
print(sessionK)
-
-
-Agent API
----------
-
-.. autoclass:: agents.scpi_psu.scpi_psu_agent.ScpiPsuAgent
- :members: monitor_output, set_voltage, set_current, set_output
diff --git a/docs/agents/smurf_crate_monitor.rst b/docs/agents/smurf_crate_monitor.rst
index 843ef1ee9..3c3f334e5 100644
--- a/docs/agents/smurf_crate_monitor.rst
+++ b/docs/agents/smurf_crate_monitor.rst
@@ -8,31 +8,22 @@ Smurf Crate Monitor Agent
The SMuRF readout system uses Advanced Telecommunications Computing Architecture
(ATCA) crates for powering and communicating between boards and the site networking
-and timing infrastructure. These crates have a small computer on board called a
-shelf manager which monitors all of the sensors in the crate including ammeters, and
-voltmeters for the power into the crates and into each front and rear module of each
-active slot used in the crate. There are also tachometers on each of the crate fans
-and various thermometers withing the crate and each of the boards plugged into the
-crate which the shelf manager monitors. There are multiple crate manufacturers
-but the shelf managers all share the same set of programming/communication called
-Pigeon Poing Communication so this agent should work across multiple crate
-manufacturers. This agent connects to a shell terminal of a crate shelf
-manager over ssh through the python subprocess package and then runs the
-command 'clia sensordata' and parses its output to identify all of the available
-sensors then stream and publish them.
+and timing infrastructure. This Agent monitors the sensors in these ATCA crates.
.. argparse::
- :filename: ../agents/smurf_crate_monitor/smurf_crate_monitor.py
+ :filename: ../socs/agents/smurf_crate_monitor/agent.py
:func: make_parser
- :prog: python3 smurf_crate_monitor.py
+ :prog: python3 agent.py
Configuration File Examples
---------------------------
+
Below are configuration examples for the ocs config file and for running the
Agent in a docker container.
-ocs-config
-``````````
+OCS Site Config
+```````````````
+
To configure the SMuRF Crate Monitor Agent we need to add a CrateAgent entry
to our site configuration file. Here is an example configuration block using
all of the available arguments::
@@ -69,34 +60,51 @@ the ocs-user in your 'docker-compose' file, see below for an example.
The second argument, 'crate-id', is just an identifier for your feed names
to distinguish between identical sensors on different crates.
-Docker
-``````
+Docker Compose
+``````````````
+
The SMuRF Crate Agent should be configured to run in a Docker container. An
example docker-compose service configuration is shown here::
ocs-smurf-crate-monitor:
<<: *ocs-base
- image: simonsobs/ocs-smurf-crate-monitor:latest
+ image: simonsobs/socs:latest
hostname: adaq1-docker
network_mode: "host"
+ environment:
+ - INSTANCE_ID=crate1-monitor
+ - LOGLEVEL=debug
volumes:
- ${OCS_CONFIG_DIR}:/config
- /home/ocs:/home/ocs
- command:
- - "--instance-id=crate1-monitor"
An example of the 'ocs-base' anchor is shown here::
x-ocs-base: &ocs-base
hostname: adaq1-docker
user: "9000"
- environment:
- LOGLEVEL: debug
volumes:
- ${OCS_CONFIG_DIR}:/config
+Description
+-----------
+
+The ATCA crates have a small computer on board called a shelf manager which
+monitors all of the sensors in the crate including ammeters, and voltmeters for
+the power into the crates and into each front and rear module of each active
+slot used in the crate. There are also tachometers on each of the crate fans
+and various thermometers withing the crate and each of the boards plugged into
+the crate which the shelf manager monitors.
+
+There are multiple crate manufacturers but the shelf managers all share the
+same set of programming/communication called Pigeon Poing Communication so this
+agent should work across multiple crate manufacturers. This agent connects to a
+shell terminal of a crate shelf manager over ssh through the python subprocess
+package and then runs the command ``clia sensordata`` and parses its output to
+identify all of the available sensors then stream and publish them.
+
Agent API
---------
-.. autoclass:: agents.smurf_crate_monitor.smurf_crate_monitor.SmurfCrateMonitor
- :members: init_crate, start_acq
+.. autoclass:: socs.agents.smurf_crate_monitor.agent.SmurfCrateMonitor
+ :members:
diff --git a/docs/agents/smurf_file_emulator.rst b/docs/agents/smurf_file_emulator.rst
index ee38d92e8..7c0623396 100644
--- a/docs/agents/smurf_file_emulator.rst
+++ b/docs/agents/smurf_file_emulator.rst
@@ -15,9 +15,9 @@ and writes out fake files based on current examples of smurf anciliary data
that's present on simons1.
.. argparse::
- :filename: ../agents/smurf_file_emulator/smurf_file_emulator.py
+ :filename: ../socs/agents/smurf_file_emulator/agent.py
:func: make_parser
- :prog: python3 smurf_file_emulator.py
+ :prog: python3 agent.py
Configuration File Examples
---------------------------
@@ -52,8 +52,10 @@ This agent doesn't really need to run in a docker container, but if you're so
inclined an example config entry is::
ocs-smurf-file-emulator:
- image: simonsobs/ocs-smurf-file-emulator-agent:latest
+ image: simonsobs/socs:latest
hostname: ocs-docker
+ environment:
+ - INSTANCE_ID=smurf-file-emulator
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- /path/to/fake/data/dir:/data
@@ -61,17 +63,17 @@ inclined an example config entry is::
Agent API
---------
-.. autoclass:: agents.smurf_file_emulator.smurf_file_emulator.SmurfFileEmulator
+.. autoclass:: socs.agents.smurf_file_emulator.agent.SmurfFileEmulator
:members:
Supporting API
---------------
-.. autoclass:: agents.smurf_file_emulator.smurf_file_emulator.Tune
+.. autoclass:: socs.agents.smurf_file_emulator.agent.Tune
:members:
-.. autoclass:: agents.smurf_file_emulator.smurf_file_emulator.DataStreamer
+.. autoclass:: socs.agents.smurf_file_emulator.agent.DataStreamer
:members:
-.. autoclass:: agents.smurf_file_emulator.smurf_file_emulator.G3FrameGenerator
+.. autoclass:: socs.agents.smurf_file_emulator.agent.G3FrameGenerator
:members:
diff --git a/docs/agents/suprsync.rst b/docs/agents/suprsync.rst
index 2d859e519..ecd7c0903 100644
--- a/docs/agents/suprsync.rst
+++ b/docs/agents/suprsync.rst
@@ -12,9 +12,9 @@ by checking the md5sum and deleting the local files after a specified amount
of time if the local and remote checksums match.
.. argparse::
- :filename: ../agents/suprsync/suprsync.py
+ :filename: ../socs/agents/suprsync/agent.py
:func: make_parser
- :prog: python3 suprsync.py
+ :prog: python3 agent.py
Configuration File Examples
---------------------------
@@ -76,34 +76,34 @@ only possible because the ``cryo:smurf`` user is already built into the
SuprSync docker::
ocs-timestream-sync:
- image: simonsobs/ocs-suprsync-agent:latest
+ image: simonsobs/socs:latest
hostname: ocs-docker
user: cryo:smurf
network_mode: host
container_name: ocs-timestream-sync
+ environment:
+ - INSTANCE_ID=timestream-sync
+ - SITE_HUB=ws://${CB_HOST}:8001/ws
+ - SITE_HTTP=http://${CB_HOST}:8001/call
volumes:
- ${OCS_CONFIG_DIR}:/config
- /data:/data
- /home/cryo/.ssh:/home/cryo/.ssh
- command:
- - '--instance-id=timestream-sync'
- - "--site-hub=ws://${CB_HOST}:8001/ws"
- - "--site-http=http://${CB_HOST}:8001/call"
ocs-smurf-sync:
- image: simonsobs/ocs-suprsync-agent:latest
+ image: simonsobs/socs:latest
hostname: ocs-docker
user: cryo:smurf
network_mode: host
container_name: ocs-smurf-sync
+ environment:
+ - INSTANCE_ID=smurf-sync
+ - SITE_HUB=ws://${CB_HOST}:8001/ws
+ - SITE_HTTP=http://${CB_HOST}:8001/call
volumes:
- ${OCS_CONFIG_DIR}:/config
- /data:/data
- /home/cryo/.ssh:/home/cryo/.ssh
- command:
- - '--instance-id=smurf-sync'
- - "--site-hub=ws://${CB_HOST}:8001/ws"
- - "--site-http=http://${CB_HOST}:8001/call"
.. note::
@@ -173,7 +173,7 @@ we'll be running one SupRsync agent for each of these two archives.
Agent API
---------
-.. autoclass:: agents.suprsync.suprsync.SupRsync
+.. autoclass:: socs.agents.suprsync.agent.SupRsync
:members:
Supporting APIs
diff --git a/docs/agents/synacc.rst b/docs/agents/synacc.rst
index 5ee0f3d6d..00ccda91b 100644
--- a/docs/agents/synacc.rst
+++ b/docs/agents/synacc.rst
@@ -9,9 +9,9 @@ The Synaccess Agent interfaces with the power strip over ethernet to control
different outlets as well as get their status.
.. argparse::
- :filename: ../agents/synacc/synacc.py
+ :filename: ../socs/agents/synacc/agent.py
:func: make_parser
- :prog: python3 synacc.py
+ :prog: python3 agent.py
Configuration File Examples
@@ -19,8 +19,9 @@ Configuration File Examples
Below are configuration examples for the ocs config file and for running the
Agent in a docker container.
-ocs-config
-``````````
+OCS Site Config
+```````````````
+
To configure the Synaccess Agent we need to add a Synaccess Agent block to our ocs
configuration file. Here is an example configuration block using all of the
available arguments::
@@ -33,29 +34,36 @@ available arguments::
['--password', 'admin'],
]}
-Docker
-``````
+Docker Compose
+``````````````
+
The Synaccess Agent should be configured to run in a Docker container.
An example docker-compose service configuration is shown here::
ocs-synacc:
- image: simonsobs/ocs-synaccess-agent
+ image: simonsobs/socs:latest
hostname: ocs-docker
network_mode: "host"
+ environment:
+ - INSTANCE_ID=synacc
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- command:
- - "--instance-id=synacc"
Since the agent within the container needs to communicate with hardware on the
host network you must use ``network_mode: "host"`` in your compose file.
-Example Client
---------------
+Agent API
+---------
+
+.. autoclass:: socs.agents.synacc.agent.SynaccessAgent
+ :members:
+
+Example Clients
+---------------
Below is an example client to control outlets::
- from ocs import matched_client
- synaccess = matched_client.MatchedClient('synacc', args=[])
+ from ocs import ocs_client
+ synaccess = ocs_client.OCSClient('synacc', args=[])
#Get status of the strip
synaccess.get_status.start()
@@ -73,10 +81,3 @@ Below is an example client to control outlets::
#Turn on/off all outlets
synaccess.set_all.start(on=True)
synaccess.set_all.wait()
-
-
-Agent API
----------
-
-.. autoclass:: agents.synacc.synacc.SynaccessAgent
- :members:
diff --git a/docs/agents/tektronix3021c.rst b/docs/agents/tektronix3021c.rst
index 3443908e6..635d475ad 100644
--- a/docs/agents/tektronix3021c.rst
+++ b/docs/agents/tektronix3021c.rst
@@ -12,9 +12,9 @@ It connects to the function generator over ethernet, and allows
users to set frequency, peak to peak voltage, and turn the AWG on/off.
.. argparse::
- :filename: ../agents/tektronix3021c/tektronix_agent.py
+ :filename: ../socs/agents/tektronix3021c/agent.py
:func: make_parser
- :prog: python3 tektronix_agent.py
+ :prog: python3 agent.py
Configuration File Examples
@@ -22,8 +22,9 @@ Configuration File Examples
Below are configuration examples for the ocs config file and for running the
Agent in a docker container.
-ocs-config
-``````````
+OCS Site Config
+```````````````
+
To configure the Tektronix AWG Agent we need to add a block to our ocs
configuration file. Here is an example configuration block using all of
the available arguments::
@@ -40,46 +41,47 @@ have GPIB ports rather than ethernet ports. Therefore a GPIB-to-ethernet
converter is required, and the gpib slot must be specified in the ocs
configuration file. The IP address is then associated with the converter.
-Docker
-``````
+Docker Compose
+``````````````
+
The Tektronix AWG Agent should be configured to run in a Docker container.
An example docker-compose service configuration is shown here::
ocs-psuK:
- image: simonsobs/ocs-tektronix-agent:latest
+ image: simonsobs/socs:latest
hostname: ocs-docker
+ environment:
+ - INSTANCE_ID=tektronix
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- command:
- - "--instance-id=tektronix"
-Example Client
---------------
+Agent API
+---------
+
+.. autoclass:: socs.agents.tektronix3021c.agent.TektronixAWGAgent
+ :members:
+
+Example Clients
+---------------
Below is an example client demonstrating full agent functionality.
Note that all tasks can be run even while the data acquisition process
is running.::
- from ocs.matched_client import MatchedClient
+ from ocs.ocs_client import OCSClient
- #Initialize the power supply
- tek = MatchedClient('tektronix', args=[])
+ # Initialize the power supply
+ tek = OCSClient('tektronix', args=[])
tek.init.start()
tek.init.wait()
- #Set AWG frequency
+ # Set AWG frequency
psuK.set_frequency.start(frequency=200)
psuK.set_frequency.wait()
- #Set AWG peak to peak voltage
+ # Set AWG peak to peak voltage
psuK.set_amplitude.start(amplitude=5)
psuK.set_amplitude.wait()
- #Set AWG on/off
+ # Set AWG on/off
psuK.set_output.start(state=True)
psuK.set_output.wait()
-
-Agent API
----------
-
-.. autoclass:: agents.tektronix3021c.tektronix_agent.TektronixAWGAgent
- :members: set_frequency, set_amplitude, set_output
diff --git a/docs/agents/thorlabs_mc2000b.rst b/docs/agents/thorlabs_mc2000b.rst
new file mode 100644
index 000000000..ddb1a8d79
--- /dev/null
+++ b/docs/agents/thorlabs_mc2000b.rst
@@ -0,0 +1,65 @@
+.. highlight:: rst
+
+.. _thorlabs_mc2000b_agent:
+
+========================
+Thorlabs MC2000B Agent
+========================
+
+The Thorlabs MC2000B Agent is an OCS agent which helps monitor input and output
+frequencies of the Thorlabs chopper, and sends commands to set the frequency of the chopper,
+as well as other features such as the bladetype and reference modes of the device.
+
+Dependencies
+------------
+
+This agent requires running on a Windows machine outside a Docker container. This requires
+manually setting the environment variable for $OCS_CONFIG_DIR to the ocs-site-configs path on
+your computer. Because we are outside a Docker container, the Thorlabs MC2000B agent will also
+need to run in the $OCS_CONFIG_DIR path. Instructions for setting an environment variable on
+a Windows computer:
+
+- `Set Env Variable in Windows `_
+
+Python Packages
+```````````````
+
+The MC2000B optical chopper is a Thorlabs device. To use the Agent, a software package must be installed
+from Thorlabs' website. The software package includes the Python software development kit for third-party
+development. That software is imported as MC20000B_COMMAND_LIB in the OCS Agent.
+
+- `MC2000B python command library `_
+
+
+
+Configuration File Examples
+---------------------------
+
+Below are configuration examples for the ocs config file.
+
+OCS Site Config
+````````````````
+
+To configure the Thorlabs MC2000B chopper Agent we need to add a ThorlabsMC2000BAgent block to our
+ocs configuration file. Here is an example configuration block using all of the
+available arguments::
+
+ {'agent-class': 'ThorlabsMC2000BAgent',
+ 'instance-id': 'chopper',
+ 'arguments': [['--mode', 'acq'],
+ ['--com-port', 'COM3']]},
+
+Description
+-----------
+
+The Thorlabs MC2000B Agent takes in 15 bladetypes, the names of which can be found at the link below
+under Single and Dual Frequency Optical Chopper Blades and Harmonic Frequency Optical Chopper Blades
+as the item number:
+
+- `Bladetyes `_
+
+Agent API
+---------
+
+.. autoclass:: socs.agents.thorlabs_mc2000b.agent.ThorlabsMC2000BAgent
+ :members:
diff --git a/docs/agents/vantage_pro2.rst b/docs/agents/vantage_pro2.rst
index d5ce20770..40170fe9e 100644
--- a/docs/agents/vantage_pro2.rst
+++ b/docs/agents/vantage_pro2.rst
@@ -12,12 +12,63 @@ the laboratory computer via usb cable and data is sent though that connection.
Here is the `VantagePro2 Operations manual`_.
.. argparse::
- :filename: ../agents/vantagePro2_agent/vantage_pro2_agent.py
+ :filename: ../socs/agents/vantagepro2/agent.py
:func: make_parser
- :prog: python3 vantage_pro2_agent.py
+ :prog: python3 agent.py
.. _`VantagePro2 Operations manual`: https://www.davisinstruments.com/support/weather/download/VantageSerialProtocolDocs_v261.pdf
+Configuration File Examples
+---------------------------
+Below are configuration examples for the ocs config file and for running the
+Agent in a docker container.
+
+OCS Site Config
+```````````````
+
+To configure the Vantage Pro2 Agent we need to add a VantagePro2Agent block to
+our ocs configuration file. Here is an example configuration block,
+where we do not specify the port.
+Note: One should first add the serial number of the VantagePro 2 device
+to the udev file and create SYMLINK.
+The Vendor ID is "10c4" and the Prodcut ID is "ea60" for the Vantage Pro2.
+Here, we associate the vendor and product ID's with the SYMLINK 'VP2'.
+So, we're setting the serial number argument as 'VP2'::
+
+ {'agent-class': 'VantagePro2Agent',
+ 'instance-id': 'vantagepro2agent',
+ 'arguments': [['--mode', 'acq'],
+ ['--serial-number', 'VP2'],
+ ['--sample-freq', '0.5']]},
+
+An example block of the udev rules file for the VantagePro 2 follows::
+
+ SUBSYSTEM=="tty", ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="ea60",
+ SYMLINK="VP2"
+
+
+The agent will attempt to find the port that the Vantage Pro2 is connected to
+based on the serial number.
+
+Note the '--freq' argument specifies the sample frequency that the Vantage Pro2
+Monitor collects weather data. The Vantage Pro2 and weather station can
+sample weathervdata at a maximum sample frequency of 0.5 Hz.
+The user can define slower sample frequencies if they so desire.
+
+Docker Compose
+``````````````
+
+The Vantage Pro2 Agent should be configured to run in a Docker container. An
+example docker-compose service configuration is shown here::
+
+ ocs-vantage-pro2:
+ image: simonsobs/socs:latest
+ hostname: ocs-docker
+ command:
+ - INSTANCE_ID=vantagepro2agent
+ volumes:
+ - ${OCS_CONFIG_DIR}:/config:ro
+
Description
-----------
Out of the box, one just needs to connect the weather station to the
@@ -87,59 +138,8 @@ types of weather data:
- Time of Sunrise: Time is stored as hour x 100 + min
- Time of Sunset: Time is stored as hour x 100 + min
-Configuration File Examples
----------------------------
-Below are configuration examples for the ocs config file and for running the
-Agent in a docker container.
-
-ocs-config
-``````````
-To configure the Vantage Pro2 Agent we need to add a VantagePro2Agent block to
-our ocs configuration file. Here is an example configuration block,
-where we do not specify the port.
-Note: One should first add the serial number of the VantagePro 2 device
-to the udev file and create SYMLINK.
-The Vendor ID is "10c4" and the Prodcut ID is "ea60" for the Vantage Pro2.
-Here, we associate the vendor and product ID's with the SYMLINK 'VP2'.
-So, we're setting the serial number argument as 'VP2'::
-
- {'agent-class': 'VantagePro2Agent',
- 'instance-id': 'vantagepro2agent',
- 'arguments': [['--mode', 'acq'],
- ['--serial-number', 'VP2'],
- ['--sample-freq', '0.5']]},
-
-An example block of the udev rules file for the VantagePro 2 follows::
-
- SUBSYSTEM=="tty", ATTRS{idVendor}=="10c4", ATTRS{idProduct}=="ea60",
- SYMLINK="VP2"
-
-
-The agent will attempt to find the port that the Vantage Pro2 is connected to
-based on the serial number.
-
-Note the '--freq' argument specifies the sample frequency that the Vantage Pro2
-Monitor collects weather data. The Vantage Pro2 and weather station can
-sample weathervdata at a maximum sample frequency of 0.5 Hz.
-The user can define slower sample frequencies if they so desire.
-
-Docker
-``````
-The Vantage Pro2 Agent should be configured to run in a Docker container. An
-example docker-compose service configuration is shown here::
-
- ocs-vantage-pro2:
- build: /socs/agents/vantagePro2_agent
-
- volumes:
- - ${OCS_CONFIG_DIR}:/config:ro
- command:
- - "--instance-id=vantagepro2agent"
- - "--site-hub=ws://crossbar:8001/ws"
- - "--site-http=http://crossbar:8001/call"
-
Agent API
---------
-.. autoclass:: agents.vantagePro2_agent.vantage_pro2_agent.VantagePro2Agent
- :members: init_VantagePro2_task, start_acq
+.. autoclass:: socs.agents.vantagepro2.agent.VantagePro2Agent
+ :members:
diff --git a/docs/agents/wiregrid_actuator.rst b/docs/agents/wiregrid_actuator.rst
index fc330c4c7..a0965b47e 100644
--- a/docs/agents/wiregrid_actuator.rst
+++ b/docs/agents/wiregrid_actuator.rst
@@ -13,9 +13,9 @@ It also reads ON/OFF of the limit-switches on the ends of the actuators
and lock/unlock the stoppers to lock/unlock the actuators.
.. argparse::
- :filename: ../agents/wiregrid_actuator/wiregrid_actuator.py
+ :filename: ../socs/agents/wiregrid_actuator/agent.py
:func: make_parser
- :prog: python3 wiregrid_actuator.py
+ :prog: python3 agent.py
Dependencies
------------
@@ -53,10 +53,10 @@ An example docker-compose configuration::
image: simonsobs/ocs-wgactuator-agent:latest
hostname: ocs-docker
network_mode: "host"
+ environment:
+ - INSTANCE_ID=wgactuator
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- command:
- - "--instance-id=wgactuator"
- Since the agent within the container needs to communicate with hardware on the
host network you must use ``network_mode: "host"`` in your compose file.
@@ -111,7 +111,7 @@ These list are configured in ``limitswitch_config.py`` and ``stopper_config.py``
Agent API
---------
-.. autoclass:: agents.wiregrid_actuator.wiregrid_actuator.WiregridActuatorAgent
+.. autoclass:: socs.agents.wiregrid_actuator.agent.WiregridActuatorAgent
:members:
Example Clients
@@ -131,8 +131,8 @@ Below is an example client to insert and eject the actuator::
Supporting APIs
---------------
-.. autoclass:: agents.wiregrid_actuator.src.Actuator.Actuator
+.. autoclass:: socs.agents.wiregrid_actuator.drivers.Actuator.Actuator
:members:
-.. autoclass:: agents.wiregrid_actuator.src.DigitalIO.DigitalIO
+.. autoclass:: socs.agents.wiregrid_actuator.drivers.DigitalIO.DigitalIO
:members:
diff --git a/docs/agents/wiregrid_encoder.rst b/docs/agents/wiregrid_encoder.rst
index 2100bceba..1774c9733 100644
--- a/docs/agents/wiregrid_encoder.rst
+++ b/docs/agents/wiregrid_encoder.rst
@@ -15,9 +15,9 @@ via ethernet UDP connection.
This agent parses the received data to a readable data and records it.
.. argparse::
- :filename: ../agents/wiregrid_encoder/wiregrid_encoder.py
+ :filename: ../socs/agents/wiregrid_encoder/agent.py
:func: make_parser
- :prog: python3 wiregrid_encoder.py
+ :prog: python3 agent.py
Dependencies
------------
@@ -51,15 +51,15 @@ Docker Compose
An example docker-compose configuration::
ocs-wgencoder-agent:
- image: simonsobs/ocs-wgencoder-agent:latest
+ image: simonsobs/socs:latest
restart: always
hostname: ocs-docker
network_mode: "host"
+ environment:
+ - INSTANCE_ID=wgencoder
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- "/data/wg-data:/data/wg-data"
- command:
- - "--instance-id=wgencoder"
ports:
- "localhost:50007:50007/udp"
@@ -76,9 +76,11 @@ Description
Hardware Configurations
```````````````````````
-The hardware-related variables are defined in ``wiregrid_encoder.py``.
+The hardware-related variables are defined in ``wiregrid_encoder.py``:
+
- COUNTER_INFO_LENGTH = 100
- COUNTS_ON_BELT = 52000
+
These should be consistent with the script running in the BeagleBoneBlack,
and these numbers will rarely be changed because they depend on the hardware.
@@ -90,5 +92,5 @@ They also will rarely be changed.
Agent API
---------
-.. autoclass:: agents.wiregrid_encoder.wiregrid_encoder.WiregridEncoderAgent
+.. autoclass:: socs.agents.wiregrid_encoder.agent.WiregridEncoderAgent
:members:
diff --git a/docs/agents/wiregrid_kikusui.rst b/docs/agents/wiregrid_kikusui.rst
index a4dc8a6c4..244f73252 100644
--- a/docs/agents/wiregrid_kikusui.rst
+++ b/docs/agents/wiregrid_kikusui.rst
@@ -14,9 +14,9 @@ via RS-232 (D-sub 9pin cable).
The agent communicates with the converter via Ethernet.
.. argparse::
- :filename: ../agents/wiregrid_kikusui/kikusui_agent.py
+ :filename: ../socs/agents/wiregrid_kikusui/agent.py
:func: make_parser
- :prog: python3 kikusui_agent.py
+ :prog: python3 agent.py
Configuration File Examples
---------------------------
@@ -49,17 +49,14 @@ Docker Compose
An example docker-compose configuration::
ocs-wgkikusui-agent:
- image: simonsobs/ocs-wgkikusui-agent:latest
- restart: always
+ image: simonsobs/socs:latest
hostname: ocs-docker
network_mode: "host"
- depends_on:
- - "crossbar"
+ command:
+ - INSTANCE_ID=wgkikusui
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
- ":/data/wg-data"
- command:
- - "--instance-id=wgkikusui"
- Since the agent within the container needs to communicate with hardware on the
host network you must use ``network_mode: "host"`` in your compose file.
@@ -107,7 +104,7 @@ However, if you want to rotate the wire-grid continuousely, you can use the foll
Agent API
---------
-.. autoclass:: agents.wiregrid_kikusui.kikusui_agent.WiregridKikusuiAgent
+.. autoclass:: socs.agents.wiregrid_kikusui.agent.WiregridKikusuiAgent
:members:
diff --git a/docs/api.rst b/docs/api.rst
index a46683f53..2ef255f28 100644
--- a/docs/api.rst
+++ b/docs/api.rst
@@ -3,55 +3,59 @@ API
This page contains the auto-generated documentation for the socs package.
-socs.agent
-----------
-
-The ``agent/`` directory contains code that supports one or more socs Agents.
+socs.agents
+-----------
-socs.agent.moxaSerial
-`````````````````````
+socs.agents.scpi_psu.drivers
+````````````````````````````
-.. automodule:: socs.agent.moxaSerial
+.. automodule:: socs.agents.scpi_psu.drivers
:members:
:undoc-members:
:show-inheritance:
-socs.agent.pmx
-``````````````
+socs.agents.tektronix3021c.drivers
+``````````````````````````````````
-.. automodule:: socs.agent.pmx
+.. automodule:: socs.agents.tektronix3021c.drivers
:members:
:undoc-members:
:show-inheritance:
-socs.agent.prologix_interface
-`````````````````````````````
+socs.agents.vantagepro2.drivers
+```````````````````````````````
-.. automodule:: socs.agent.prologix_interface
+.. automodule:: socs.agents.vantagepro2.drivers
:members:
:undoc-members:
:show-inheritance:
-socs.agent.scpi_psu_driver
-``````````````````````````
+socs.common
+-----------
+
+The ``common/`` directory contains driver code that is used by multiple socs
+Agents.
-.. automodule:: socs.agent.scpi_psu_driver
+socs.common.moxa_serial
+```````````````````````
+
+.. automodule:: socs.common.moxa_serial
:members:
:undoc-members:
:show-inheritance:
-socs.agent.tektronix3021c_driver
-````````````````````````````````
+socs.common.pmx
+```````````````
-.. automodule:: socs.agent.tektronix3021c_driver
+.. automodule:: socs.common.pmx
:members:
:undoc-members:
:show-inheritance:
-socs.agent.vantage_pro2
-```````````````````````
+socs.common.prologix_interface
+``````````````````````````````
-.. automodule:: socs.agent.vantage_pro2.vantage_pro2
+.. automodule:: socs.common.prologix_interface
:members:
:undoc-members:
:show-inheritance:
diff --git a/docs/conf.py b/docs/conf.py
index e54357e20..48826cfb4 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -14,6 +14,7 @@
#
import os
import sys
+
sys.path.insert(0, os.path.abspath('..'))
from socs_version import get_versions
@@ -68,7 +69,7 @@
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
-language = None
+language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
@@ -106,6 +107,7 @@
'src.pid_controller',
]
from unittest import mock
+
for m in autodoc_mock_imports:
sys.modules[m] = mock.Mock()
@@ -114,6 +116,7 @@ def wrap(*args, **kw):
return lambda f: f
import ocs
+
ocs.ocs_agent.param = wrap
# -- Options for HTML output -------------------------------------------------
diff --git a/docs/index.rst b/docs/index.rst
index 9dc8fdd43..8072fbfba 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -39,9 +39,9 @@ API Reference Full API documentation for core parts of the SOCS library.
agents/acu_agent
agents/bluefors_agent
- agents/chwp_encoder
agents/cryomech_cpa
agents/fts_agent
+ agents/hwp_encoder
agents/hwp_picoscope
agents/hwp_rotation_agent
agents/holo_fpga
@@ -65,6 +65,7 @@ API Reference Full API documentation for core parts of the SOCS library.
agents/suprsync
agents/synacc
agents/tektronix3021c
+ agents/thorlabs_mc2000b
agents/vantage_pro2
agents/wiregrid_actuator
agents/wiregrid_encoder
diff --git a/docs/simulators/smurf_stream_simulator.rst b/docs/simulators/smurf_stream_simulator.rst
index b725257f5..cc908d024 100644
--- a/docs/simulators/smurf_stream_simulator.rst
+++ b/docs/simulators/smurf_stream_simulator.rst
@@ -13,9 +13,9 @@ connect the timestream aggregator to it and simulate recording data to disk in
.g3 files.
.. argparse::
- :filename: ../agents/smurf_stream_simulator/smurf_stream_simulator.py
+ :filename: ../socs/agents/smurf_stream_simulator/agent.py
:func: make_parser
- :prog: python3 smurf_stream_simulator.py
+ :prog: python3 agent.py
Configuration File Examples
---------------------------
@@ -32,7 +32,7 @@ using all of the available arguments::
'instance-id': 'smurf-stream',
'arguments': [['--auto-start', True],
['--port', '50000'],
- ['--num_chans', '528'],
+ ['--num-chans', '528'],
['--stream-id', 'stream_sim']]},
Docker
@@ -41,15 +41,17 @@ The simulator should be configured to run in a Docker container. An example
docker-compose service configuration is shown here::
smurf-stream-sim:
- image: simonsobs/smurf-stream-sim
+ image: simonsobs/socs:latest
hostname: ocs-docker
ports:
- "50000:50000"
volumes:
- ${OCS_CONFIG_DIR}:/config:ro
+ command:
+ - "--instance-id=smurf-stream"
Agent API
---------
-.. autoclass:: agents.smurf_stream_simulator.smurf_stream_simulator.SmurfStreamSimulator
- :members: start_background_streamer, stop_background_streamer, set_stream_on, set_stream_off
+.. autoclass:: socs.agents.smurf_stream_simulator.agent.SmurfStreamSimulator
+ :members:
diff --git a/docs/user/installation.rst b/docs/user/installation.rst
index 6336f868e..4d4325677 100644
--- a/docs/user/installation.rst
+++ b/docs/user/installation.rst
@@ -7,9 +7,49 @@ Install and update with pip::
$ pip install -U socs
-Optionally install so3g during installation::
+You may install optional dependencies by including one or more agent group
+names on installation, for example::
+
+ $ pip3 install -U socs[labjack,pysmurf]
+
+The different groups, and the agents they provide dependencies for are:
+
+.. list-table::
+ :widths: 1 2
+ :header-rows: 1
+
+ * - Group
+ - Supporting Agents
+ * - ``all``
+ - All Agents (except ``holography``)
+ * - ``acu``
+ - ACU Agent
+ * - ``holography``
+ - Holography FPGA and Synthesizer Agents
+ * - ``labjack``
+ - Labjack Agent
+ * - ``magpie``
+ - Magpie Agent
+ * - ``pfeiffer``
+ - Pfeiffer TC 400 Agent
+ * - ``pysmurf``
+ - Pysmurf Controller Agent
+ * - ``smurf_sim``
+ - SMuRF File Emulator, SMuRF Stream Simulator
+ * - ``synacc``
+ - Synaccess Agent
+ * - ``xy_stage``
+ - LATRt XY Stage Agent
+
+If you would like to install all optional dependencies use the special varient
+"all"::
+
+ $ pip3 install -U socs[all]
- $ pip install -U socs[so3g]
+.. note::
+ Some Agents have additional dependencies that cannot be installed with pip.
+ See the Agent reference page for the particular agent you are trying to run
+ for more details.
Installing from Source
----------------------
diff --git a/requirements.txt b/requirements.txt
index 912909aaf..04856aac8 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,22 +1,51 @@
-ocs
+# core dependencies
autobahn[serialization]
+ocs
+sqlalchemy>=1.4
twisted
-# Lakeshore240
+# hardware communication
pyserial
+pysnmp
+pysmi
+
+# common dependencies - used by multiple agents
+numpy
+pyyaml
+
+# acu agent
+soaculib @ git+https://github.com/simonsobs/soaculib.git@master
+
+# holography agent - python 3.8 only!
+# -r requirements/holography.txt
# LabJack Agent
-pymodbus
numexpr
-pyyaml
scipy
+labjack-ljm
-# Meinberg M1000
-pysnmp
-pysmi
+# magpie agent
+scipy
+pandas
+
+# pfeiffer tc 400
+pfeiffer-vacuum-protocol==0.4
+
+# pysmurf controller
+pysmurf @ git+https://github.com/slaclab/pysmurf.git@main
+sodetlib @ git+https://github.com/simonsobs/sodetlib.git@master
+sotodlib @ git+https://github.com/simonsobs/sotodlib.git@master
+
+# synacc agent
+requests
+
+# xy_stage
+xy_stage_control @ git+https://github.com/kmharrington/xy_stage_control.git@main
-# suprsync agent and tests
-sqlalchemy
+# pysmurf controller
+pysmurf @ git+https://github.com/slaclab/pysmurf.git@main
+sodetlib @ git+https://github.com/simonsobs/sodetlib.git@master
+sotodlib @ git+https://github.com/simonsobs/sotodlib.git@master
# Docs
# see docs/requirements.txt
diff --git a/requirements/holography.txt b/requirements/holography.txt
new file mode 100644
index 000000000..de2fbbfd4
--- /dev/null
+++ b/requirements/holography.txt
@@ -0,0 +1,2 @@
+casperfpga @ git+https://github.com/casper-astro/casperfpga.git@py38
+holog_daq @ git+https://github.com/McMahonCosmologyGroup/holog_daq.git@main
diff --git a/requirements/testing.txt b/requirements/testing.txt
index 08469ad0e..e5eb45f79 100644
--- a/requirements/testing.txt
+++ b/requirements/testing.txt
@@ -4,6 +4,6 @@ pytest-docker-compose
pytest-dependency
pytest-order
so3g
+thola-snmpsim
+pytest-twisted
http-server-mock
-
--r ../agents/pfeiffer_tc400/requirements.txt
diff --git a/setup.py b/setup.py
index 2138e8e37..47875b4a1 100644
--- a/setup.py
+++ b/setup.py
@@ -1,41 +1,113 @@
-from setuptools import setup, find_packages
+from setuptools import find_packages, setup
import versioneer
with open("README.rst", "r", encoding="utf-8") as fh:
long_description = fh.read()
-setup(name='socs',
- long_description=long_description,
- long_description_content_type="text/x-rst",
- version=versioneer.get_version(),
- cmdclass=versioneer.get_cmdclass(),
- description='Simons Observatory Control System',
- package_dir={'socs': 'socs'},
- packages=find_packages(),
- url="https://github.com/simonsobs/socs",
- project_urls={
- "Source Code": "https://github.com/simonsobs/ocs",
- "Documentation": "https://ocs.readthedocs.io/",
- "Bug Tracker": "https://github.com/simonsobs/ocs/issues",
- },
- classifiers=[
- "Programming Language :: Python :: 3",
- "License :: OSI Approved :: BSD License",
- "Intended Audience :: Science/Research",
- "Topic :: Scientific/Engineering :: Astronomy",
- "Framework :: Twisted",
- ],
- python_requires=">=3.7",
- install_requires=[
- 'ocs',
- 'autobahn[serialization]',
- 'twisted',
- 'pyserial',
- 'sqlalchemy',
- 'pysnmp',
- ],
- extras_require={
- "so3g": ["so3g"],
- },
- )
+# Optional Dependencies
+# ACU Agent
+acu_deps = ['soaculib @ git+https://github.com/simonsobs/soaculib.git@master']
+
+# Holography FPGA and Synthesizer Agents
+holography_deps = [ # Note: supports python 3.8 only!
+ 'casperfpga @ git+https://github.com/casper-astro/casperfpga.git@py38',
+ 'holog_daq @ git+https://github.com/McMahonCosmologyGroup/holog_daq.git@main',
+]
+
+# Labjack Agent
+labjack_deps = [
+ 'labjack-ljm',
+ 'numexpr',
+ 'scipy',
+]
+
+# Magpie Agent
+magpie_deps = [
+ 'pandas',
+ 'scipy',
+ 'so3g',
+]
+
+# Pfeiffer TC 400 Agent
+pfeiffer_deps = ['pfeiffer-vacuum-protocol==0.4']
+
+# Pysmurf Controller Agent
+pysmurf_deps = [
+ 'pysmurf @ git+https://github.com/slaclab/pysmurf.git@main',
+ 'sodetlib @ git+https://github.com/simonsobs/sodetlib.git@master',
+ 'sotodlib @ git+https://github.com/simonsobs/sotodlib.git@master',
+]
+
+# SMuRF File Emulator, SMuRF Stream Simulator
+smurf_sim_deps = ['so3g']
+
+# Synaccess Agent
+synacc_deps = ['requests']
+
+# LATRt XY Stage Agent
+xy_stage_deps = [
+ 'xy_stage_control @ git+https://github.com/kmharrington/xy_stage_control.git@main',
+]
+
+# Note: Not including the holograph deps, which are Python 3.8 only
+all_deps = acu_deps + labjack_deps + magpie_deps + pfeiffer_deps + \
+ pysmurf_deps + smurf_sim_deps + synacc_deps + xy_stage_deps
+all_deps = list(set(all_deps))
+
+setup(
+ name='socs',
+ long_description=long_description,
+ long_description_content_type="text/x-rst",
+ version=versioneer.get_version(),
+ cmdclass=versioneer.get_cmdclass(),
+ description='Simons Observatory Control System',
+ package_dir={'socs': 'socs'},
+ packages=find_packages(),
+ package_data={'socs': [
+ 'agents/smurf_file_emulator/*.yaml',
+ 'agents/labjack/cal_curves/*.txt',
+ ]},
+ entry_points={
+ 'ocs.plugins': [
+ 'socs = socs.plugin',
+ ],
+ },
+ url="https://github.com/simonsobs/socs",
+ project_urls={
+ "Source Code": "https://github.com/simonsobs/ocs",
+ "Documentation": "https://ocs.readthedocs.io/",
+ "Bug Tracker": "https://github.com/simonsobs/ocs/issues",
+ },
+ classifiers=[
+ "Programming Language :: Python :: 3",
+ "License :: OSI Approved :: BSD License",
+ "Intended Audience :: Science/Research",
+ "Topic :: Scientific/Engineering :: Astronomy",
+ "Framework :: Twisted",
+ ],
+ python_requires=">=3.7",
+ install_requires=[
+ 'autobahn[serialization]',
+ 'numpy',
+ 'ocs',
+ 'pyserial',
+ 'pysnmp',
+ 'pysmi',
+ 'pyyaml',
+ 'sqlalchemy>=1.4',
+ 'twisted',
+ ],
+ extras_require={
+ 'all': all_deps,
+ 'acu': acu_deps,
+ 'holography': holography_deps,
+ 'labjack': labjack_deps,
+ 'magpie': magpie_deps,
+ 'pfeiffer': pfeiffer_deps,
+ 'pysmurf': pysmurf_deps,
+ 'smurf_sim': smurf_sim_deps,
+ 'synacc': synacc_deps,
+ 'xy_stage': xy_stage_deps,
+ },
+)
diff --git a/simulators/lakeshore240/ls240_simulator.py b/simulators/lakeshore240/ls240_simulator.py
index 33b474fe2..43df44942 100644
--- a/simulators/lakeshore240/ls240_simulator.py
+++ b/simulators/lakeshore240/ls240_simulator.py
@@ -1,7 +1,8 @@
-import socket
import argparse
-import numpy as np
import logging
+import socket
+
+import numpy as np
BUFF_SIZE = 1024
diff --git a/simulators/lakeshore372/ls372_simulator.py b/simulators/lakeshore372/ls372_simulator.py
index d0782a593..eed5b4b38 100644
--- a/simulators/lakeshore372/ls372_simulator.py
+++ b/simulators/lakeshore372/ls372_simulator.py
@@ -12,13 +12,14 @@
# ls.msg('SCAN?'), ls.msg('INTYPE? 1'), ls.msg('OUTMODE? 0')
-import os
-import socket
import argparse
-import numpy as np
import logging
+import os
+import socket
import time
+import numpy as np
+
BUFF_SIZE = 4096
voltage_excitation_key = {1: 2.0e-6,
diff --git a/socs/Lakeshore/Lakeshore240.py b/socs/Lakeshore/Lakeshore240.py
index fcdc138bb..734f29bb3 100644
--- a/socs/Lakeshore/Lakeshore240.py
+++ b/socs/Lakeshore/Lakeshore240.py
@@ -6,13 +6,13 @@
@author: jacoblashner
"""
-from serial import Serial
-import time
+import socket
import sys
+import time
from collections import OrderedDict
-import socket
from typing import List
+from serial import Serial
BUFF_SIZE = 1024
diff --git a/socs/Lakeshore/Lakeshore336.py b/socs/Lakeshore/Lakeshore336.py
index 78e2500cd..a27297dc0 100644
--- a/socs/Lakeshore/Lakeshore336.py
+++ b/socs/Lakeshore/Lakeshore336.py
@@ -2,11 +2,11 @@
# contributors: zatkins, bkoopman, sbhimani, zhuber
import math
-import numpy as np
import socket
-
-import time
import sys
+import time
+
+import numpy as np
# helper dicts
sensor_key = {
diff --git a/socs/Lakeshore/Lakeshore370.py b/socs/Lakeshore/Lakeshore370.py
index e08e89bfa..830923605 100644
--- a/socs/Lakeshore/Lakeshore370.py
+++ b/socs/Lakeshore/Lakeshore370.py
@@ -2,9 +2,10 @@
# Lakeshore370.py
import sys
-import serial
import time
+
import numpy as np
+import serial
# Lookup keys for command parameters.
autorange_key = {'0': 'off',
diff --git a/socs/Lakeshore/Lakeshore372.py b/socs/Lakeshore/Lakeshore372.py
index 9b353a6c1..316deb618 100644
--- a/socs/Lakeshore/Lakeshore372.py
+++ b/socs/Lakeshore/Lakeshore372.py
@@ -1,7 +1,8 @@
# Lakeshore372.py
-import sys
import socket
+import sys
+
import numpy as np
# Lookup keys for command parameters.
diff --git a/socs/Lakeshore/Lakeshore425.py b/socs/Lakeshore/Lakeshore425.py
index 00ffa5999..32c755dd9 100755
--- a/socs/Lakeshore/Lakeshore425.py
+++ b/socs/Lakeshore/Lakeshore425.py
@@ -1,6 +1,7 @@
-import serial
import time
+import serial
+
operational_status_key = [
'No probe',
'Field overload',
diff --git a/socs/__init__.py b/socs/__init__.py
index 74f4e6688..ab0a70157 100644
--- a/socs/__init__.py
+++ b/socs/__init__.py
@@ -1,4 +1,5 @@
from ._version import get_versions
+
__version__ = get_versions()['version']
del get_versions
diff --git a/agents/hwp_rotation/src/__init__.py b/socs/agents/__init__.py
similarity index 100%
rename from agents/hwp_rotation/src/__init__.py
rename to socs/agents/__init__.py
diff --git a/agents/wiregrid_actuator/src/__init__.py b/socs/agents/acu/__init__.py
similarity index 100%
rename from agents/wiregrid_actuator/src/__init__.py
rename to socs/agents/acu/__init__.py
diff --git a/agents/acu/acu_agent.py b/socs/agents/acu/agent.py
similarity index 99%
rename from agents/acu/acu_agent.py
rename to socs/agents/acu/agent.py
index d6c9bd0fc..d21bb817a 100644
--- a/agents/acu/acu_agent.py
+++ b/socs/agents/acu/agent.py
@@ -1,18 +1,20 @@
-import time
-import struct
-import datetime
+import argparse
import calendar
+import datetime
+import struct
+import time
+
import soaculib as aculib
-import scan_helpers as sh
-from soaculib.twisted_backend import TwistedHttpBackend
-import argparse
import soaculib.status_keys as status_keys
-from twisted.internet import reactor, protocol
-from twisted.internet.defer import inlineCallbacks
import twisted.web.client as tclient
from autobahn.twisted.util import sleep as dsleep
from ocs import ocs_agent, site_config
from ocs.ocs_twisted import TimeoutLock
+from soaculib.twisted_backend import TwistedHttpBackend
+from twisted.internet import protocol, reactor
+from twisted.internet.defer import inlineCallbacks
+
+import socs.agents.acu.drivers as sh
def timecode(acutime):
@@ -1030,11 +1032,17 @@ def add_agent_args(parser_in=None):
return parser_in
-if __name__ == '__main__':
+def main(args=None):
parser = add_agent_args()
- args = site_config.parse_args(agent_class='ACUAgent', parser=parser)
+ args = site_config.parse_args(agent_class='ACUAgent',
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
- acu_agent = ACUAgent(agent, args.acu_config)
+ _ = ACUAgent(agent, args.acu_config)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/agents/acu/scan_helpers.py b/socs/agents/acu/drivers.py
similarity index 99%
rename from agents/acu/scan_helpers.py
rename to socs/agents/acu/drivers.py
index cc7083561..513f5b5d2 100644
--- a/agents/acu/scan_helpers.py
+++ b/socs/agents/acu/drivers.py
@@ -1,6 +1,7 @@
-import numpy as np
import time
+import numpy as np
+
def constant_velocity_scanpoints(azpts, el, azvel, acc, ntimes):
"""
diff --git a/agents/wiregrid_kikusui/src/__init__.py b/socs/agents/bluefors/__init__.py
similarity index 100%
rename from agents/wiregrid_kikusui/src/__init__.py
rename to socs/agents/bluefors/__init__.py
diff --git a/agents/bluefors/bluefors_log_tracker.py b/socs/agents/bluefors/agent.py
similarity index 99%
rename from agents/bluefors/bluefors_log_tracker.py
rename to socs/agents/bluefors/agent.py
index 227051931..776bb541f 100644
--- a/agents/bluefors/bluefors_log_tracker.py
+++ b/socs/agents/bluefors/agent.py
@@ -1,12 +1,12 @@
import argparse
-import time
-import threading
-import glob
-import re
import datetime
-import txaio
+import glob
import os
+import re
+import threading
+import time
+import txaio
from ocs import ocs_agent, site_config
# Notes:
@@ -526,13 +526,15 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
# Start logging
txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
# Setup argument parser
parser = make_parser()
- args = site_config.parse_args(agent_class='BlueforsAgent', parser=parser)
+ args = site_config.parse_args(agent_class='BlueforsAgent',
+ parser=parser,
+ args=args)
LOG.info('I am following logs located at : %s' % args.log_directory)
agent, runner = ocs_agent.init_site_agent(args)
@@ -543,3 +545,7 @@ def make_parser(parser=None):
bluefors_agent._stop_acq, startup=True)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agent/__init__.py b/socs/agents/cryomech_cpa/__init__.py
similarity index 100%
rename from socs/agent/__init__.py
rename to socs/agents/cryomech_cpa/__init__.py
diff --git a/agents/cryomech_cpa/cryomech_cpa_agent.py b/socs/agents/cryomech_cpa/agent.py
similarity index 94%
rename from agents/cryomech_cpa/cryomech_cpa_agent.py
rename to socs/agents/cryomech_cpa/agent.py
index cec2bdd3e..8e59700c8 100644
--- a/agents/cryomech_cpa/cryomech_cpa_agent.py
+++ b/socs/agents/cryomech_cpa/agent.py
@@ -3,12 +3,12 @@
# Sanah Bhimani, May 2022
import argparse
-import time
-import struct
-import socket
import random
+import socket
+import struct
+import time
-from ocs import site_config, ocs_agent
+from ocs import ocs_agent, site_config
from ocs.ocs_twisted import TimeoutLock
STX = '\x02'
@@ -281,7 +281,7 @@ def power_ptc(self, session, params=None):
Desired power state of the PTC, either 'on', or 'off'.
"""
- with self.lock.acquire_timeout(0, job='power_ptc') as acquired:
+ with self.lock.acquire_timeout(3, job='power_ptc') as acquired:
if not acquired:
self.log.warn("Could not start task because {} is already "
"running".format(self.lock.job))
@@ -312,10 +312,20 @@ def acq(self, session, params):
session.set_status('running')
+ last_release = time.time()
+
self.take_data = True
- # Publish data, waiting 1/f_sample seconds in between calls.
while self.take_data:
+ # Relinquish sampling lock occasionally
+ if time.time() - last_release > 1.:
+ last_release = time.time()
+ if not self.lock.release_and_acquire(timeout=10):
+ self.log.warn(f"Failed to re-acquire sampling lock, "
+ f"currently held by {self.lock.job}.")
+ continue
+
+ # Publish data, waiting 1/f_sample seconds in between calls.
pub_data = {'timestamp': time.time(),
'block_name': 'ptc_status'}
data_flag, data = self.ptc.get_data()
@@ -363,9 +373,11 @@ def make_parser(parser=None):
return parser
-def main():
+def main(args=None):
parser = make_parser()
- args = site_config.parse_args(agent_class='CryomechCPAAgent', parser=parser)
+ args = site_config.parse_args(agent_class='CryomechCPAAgent',
+ parser=parser,
+ args=args)
print('I am in charge of device with serial number: %s' % args.serial_number)
# Automatically acquire data if requested (default)
diff --git a/socs/agent/vantage_pro2/__init__.py b/socs/agents/fts_aerotech/__init__.py
similarity index 100%
rename from socs/agent/vantage_pro2/__init__.py
rename to socs/agents/fts_aerotech/__init__.py
diff --git a/agents/fts_aerotech_stage/fts_aerotech_agent.py b/socs/agents/fts_aerotech/agent.py
similarity index 87%
rename from agents/fts_aerotech_stage/fts_aerotech_agent.py
rename to socs/agents/fts_aerotech/agent.py
index 12dbdc4b4..3949dc72b 100644
--- a/agents/fts_aerotech_stage/fts_aerotech_agent.py
+++ b/socs/agents/fts_aerotech/agent.py
@@ -1,16 +1,14 @@
-import socket
+import argparse
import os
+import socket
import time
+
import txaio
import yaml
-import argparse
+from ocs import ocs_agent, site_config
+from ocs.ocs_twisted import Pacemaker, TimeoutLock
from twisted.internet import reactor
-ON_RTD = os.environ.get('READTHEDOCS') == 'True'
-if not ON_RTD:
- from ocs import ocs_agent, site_config
- from ocs.ocs_twisted import TimeoutLock, Pacemaker
-
class FTSAerotechStage:
"""
@@ -103,7 +101,7 @@ def close(self):
class FTSAerotechAgent:
"""
- Agent for connecting to the FTS mirror control
+ Agent for connecting to the FTS mirror control.
Args:
ip_addr: IP address of Motion Controller
@@ -168,18 +166,13 @@ def __init__(self, agent, ip_addr, port, config_file, mode=None, samp=2):
raise Exception("translate and limits must be included "
"in the mirror configuration keys")
- def init_stage_task(self, session, params=None):
- """init_stage_task(params=None)
- Perform first time setup for communication with FTS stage.
+ @ocs_agent.param('_')
+ def init_stage(self, session, params=None):
+ """init_stage()
- Args:
- params (dict): Parameters dictionary for passing parameters to
- task.
- """
-
- if params is None:
- params = {}
+ **Task** - Perform first time setup for communication with FTS stage.
+ """
if self.stage is not None and self.initialized:
return True, 'Stages already Initialized'
@@ -207,8 +200,12 @@ def init_stage_task(self, session, params=None):
self.agent.start('acq')
return True, 'Stage Initialized.'
- def home_task(self, session, params=None):
- """ Home the stage to its negative limit
+ @ocs_agent.param('_')
+ def home(self, session, params=None):
+ """home()
+
+ **Task** - Home the stage to its negative limit.
+
"""
with self.lock.acquire_timeout(timeout=3, job='home') as acquired:
@@ -223,16 +220,16 @@ def home_task(self, session, params=None):
return False, "Homing Failed"
return True, "Homing Complete"
+ @ocs_agent.param('position', type=float, check=lambda x: -74.8 <= x <= 74.8)
def move_to(self, session, params=None):
- """Move to absolute position relative to stage center (in mm)
+ """move_to(position)
- params: {'position':float between -74.8 and 74.8}
- """
- if params is None:
- return False, "No Position Given"
- if 'position' not in params:
- return False, "No Position Given"
+ **Task** - Move to absolute position relative to stage center (in mm).
+
+ Parameters:
+ position (float): Position in mm, must be between -74.8 and 74.8.
+ """
with self.lock.acquire_timeout(timeout=3, job='move') as acquired:
if not acquired:
self.log.warn("Could not start move because lock held by"
@@ -242,17 +239,21 @@ def move_to(self, session, params=None):
return False, "Move did not complete correctly?"
- def start_acq(self, session, params=None):
- """
- params:
- dict: {'sampling_frequency': float, sampling rate in Hz}
+ @ocs_agent.param('sampling_frequency', type=float, default=2)
+ def acq(self, session, params=None):
+ """acq(sampling_frequency=2)
- The most recent position data is stored in session.data in the format::
- {"position":{"pos" : mirror position }
- """
- if params is None:
- params = {}
+ Parameters:
+ sampling_frequency (float): Sampling rate in Hz. Defaults to 2 Hz.
+
+ Notes:
+ The most recent position data is stored in session.data in the
+ format::
+
+ >>> response.session['data']
+ {"position": {"pos" : mirror position}}
+ """
f_sample = params.get('sampling_frequency', self.sampling_frequency)
pm = Pacemaker(f_sample, quantize=True)
@@ -293,7 +294,7 @@ def start_acq(self, session, params=None):
return True, 'Acquisition exited cleanly.'
- def stop_acq(self, session, params=None):
+ def _stop_acq(self, session, params=None):
"""
params:
dict: {}
@@ -322,11 +323,10 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
-
+def main(args=None):
# For logging
txaio.use_twisted()
- LOG = txaio.make_logger()
+ txaio.make_logger()
# Start logging
txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
@@ -335,7 +335,8 @@ def make_parser(parser=None):
# Interpret options in the context of site_config.
args = site_config.parse_args(agent_class='FTSAerotechAgent',
- parser=parser)
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
@@ -343,10 +344,14 @@ def make_parser(parser=None):
args.config_file, args.mode,
args.sampling_frequency)
- agent.register_task('init_stage', fts_agent.init_stage_task)
+ agent.register_task('init_stage', fts_agent.init_stage)
agent.register_task('move_to', fts_agent.move_to)
- agent.register_task('home', fts_agent.home_task)
+ agent.register_task('home', fts_agent.home)
- agent.register_process('acq', fts_agent.start_acq, fts_agent.stop_acq)
+ agent.register_process('acq', fts_agent.acq, fts_agent._stop_acq)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/holo_fpga/__init__.py b/socs/agents/holo_fpga/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/holo_fpga/roach_agent.py b/socs/agents/holo_fpga/agent.py
similarity index 96%
rename from agents/holo_fpga/roach_agent.py
rename to socs/agents/holo_fpga/agent.py
index ced51f32d..26fd3b766 100644
--- a/agents/holo_fpga/roach_agent.py
+++ b/socs/agents/holo_fpga/agent.py
@@ -1,9 +1,10 @@
import argparse
import os
+import time
+
import numpy as np
import txaio
import yaml
-import time
from ocs import ocs_agent, site_config
from ocs.ocs_twisted import TimeoutLock
@@ -153,10 +154,10 @@ def make_parser(parser=None):
return parser
-if __name__ == "__main__":
+def main(args=None):
# For logging
txaio.use_twisted()
- LOG = txaio.make_logger()
+ txaio.make_logger()
# Start logging
txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
@@ -164,7 +165,9 @@ def make_parser(parser=None):
parser = make_parser()
# Interpret options in the context of site_config.
- args = site_config.parse_args(agent_class="FPGAAgent", parser=parser)
+ args = site_config.parse_args(agent_class="FPGAAgent",
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
@@ -174,3 +177,7 @@ def make_parser(parser=None):
agent.register_task("take_data", fpga_agent.take_data)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/socs/agents/holo_synth/__init__.py b/socs/agents/holo_synth/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/holo_synth/synth_agent.py b/socs/agents/holo_synth/agent.py
similarity index 97%
rename from agents/holo_synth/synth_agent.py
rename to socs/agents/holo_synth/agent.py
index 939f9d835..876ff4b82 100644
--- a/agents/holo_synth/synth_agent.py
+++ b/socs/agents/holo_synth/agent.py
@@ -1,9 +1,9 @@
import argparse
import os
import time
+
import txaio
import yaml
-
from ocs import ocs_agent, site_config
from ocs.ocs_twisted import TimeoutLock
@@ -221,10 +221,10 @@ def make_parser(parser=None):
return parser
-if __name__ == "__main__":
+def main(args=None):
# For logging
txaio.use_twisted()
- LOG = txaio.make_logger()
+ txaio.make_logger()
# Start logging
txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
@@ -232,7 +232,9 @@ def make_parser(parser=None):
parser = make_parser()
# Interpret options in the context of site_config.
- args = site_config.parse_args(agent_class="SynthAgent", parser=parser)
+ args = site_config.parse_args(agent_class="SynthAgent",
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
@@ -244,3 +246,7 @@ def make_parser(parser=None):
agent.register_task("set_synth_status", synth_agent.set_synth_status)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/socs/agents/hwp_encoder/__init__.py b/socs/agents/hwp_encoder/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/chwp/hwpbbb_agent.py b/socs/agents/hwp_encoder/agent.py
similarity index 98%
rename from agents/chwp/hwpbbb_agent.py
rename to socs/agents/hwp_encoder/agent.py
index 6df352522..d20f23695 100644
--- a/agents/chwp/hwpbbb_agent.py
+++ b/socs/agents/hwp_encoder/agent.py
@@ -51,22 +51,20 @@
"""
import argparse
-import os
+import calendar
+import select
import socket
import struct
import time
-import calendar
from collections import deque
-import select
+
import numpy as np
import txaio
+
txaio.use_twisted()
-# Required by OCS
-ON_RTD = os.environ.get('READTHEDOCS') == 'True'
-if not ON_RTD:
- from ocs import ocs_agent, site_config
- from ocs.ocs_twisted import TimeoutLock
+from ocs import ocs_agent, site_config
+from ocs.ocs_twisted import TimeoutLock
# These three values (COUNTER_INFO_LENGTH, COUNTER_PACKET_SIZE, IRIG_PACKET_SIZE)
# should be consistent with the software on beaglebone.
@@ -508,10 +506,12 @@ def __init__(self, agent_obj, port=8080):
agg_params=agg_params)
self.parser = EncoderParser(beaglebone_port=self.port)
- def start_acq(self, session, params):
- """Starts acquiring data.
- """
+ def acq(self, session, params):
+ """acq()
+ **Process** - Start acquiring data.
+
+ """
time_encoder_published = 0
counter_list = []
counter_index_list = []
@@ -669,7 +669,7 @@ def start_acq(self, session, params):
self.agent.feeds['HWPEncoder'].flush_buffer()
return True, 'Acquisition exited cleanly.'
- def stop_acq(self, session, params=None):
+ def _stop_acq(self, session, params=None):
"""
Stops the data acquisiton.
"""
@@ -692,11 +692,17 @@ def make_parser(parser=None):
# Portion of the code that runs
-if __name__ == '__main__':
+def main(args=None):
parser = make_parser()
- args = site_config.parse_args(agent_class='HWPBBBAgent', parser=parser)
+ args = site_config.parse_args(agent_class='HWPBBBAgent',
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
hwp_bbb_agent = HWPBBBAgent(agent, port=args.port)
- agent.register_process('acq', hwp_bbb_agent.start_acq, hwp_bbb_agent.stop_acq, startup=True)
+ agent.register_process('acq', hwp_bbb_agent.acq, hwp_bbb_agent._stop_acq, startup=True)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/hwp_picoscope/__init__.py b/socs/agents/hwp_picoscope/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/hwp_picoscope/pico_agent.py b/socs/agents/hwp_picoscope/agent.py
old mode 100755
new mode 100644
similarity index 96%
rename from agents/hwp_picoscope/pico_agent.py
rename to socs/agents/hwp_picoscope/agent.py
index b08619062..4f3f53740
--- a/agents/hwp_picoscope/pico_agent.py
+++ b/socs/agents/hwp_picoscope/agent.py
@@ -1,7 +1,8 @@
-import numpy as np
-import time
-import os
import argparse
+import os
+import time
+
+import numpy as np
import txaio
from ocs import ocs_agent, site_config
from ocs.ocs_twisted import TimeoutLock
@@ -10,7 +11,7 @@
ON_RTD = os.environ.get('READTHEDOCS') == 'True'
if not ON_RTD:
- import socs.agent.class_ps3000a as ps
+ import socs.agents.hwp_picoscope.drivers.class_ps3000a as ps
class PicoAgent:
@@ -176,12 +177,14 @@ def make_parser(parser=None):
return parser
-def main():
+def main(args=None):
# Start logging
txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
parser = make_parser()
- args = site_config.parse_args(agent_class='PicoAgent', parser=parser)
+ args = site_config.parse_args(agent_class='PicoAgent',
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
diff --git a/socs/agents/hwp_picoscope/drivers/__init__.py b/socs/agents/hwp_picoscope/drivers/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/socs/agent/class_ps3000a.py b/socs/agents/hwp_picoscope/drivers/class_ps3000a.py
old mode 100755
new mode 100644
similarity index 97%
rename from socs/agent/class_ps3000a.py
rename to socs/agents/hwp_picoscope/drivers/class_ps3000a.py
index d6a5cdd74..ae6a8fea7
--- a/socs/agent/class_ps3000a.py
+++ b/socs/agents/hwp_picoscope/drivers/class_ps3000a.py
@@ -6,11 +6,12 @@
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
'''
-import numpy as np
-import time
import ctypes
-from picosdk.ps3000a import ps3000a as ps
+import time
+
+import numpy as np
from picosdk.functions import adc2mV, assert_pico_ok, splitMSODataFast
+from picosdk.ps3000a import ps3000a as ps
class ps3000a():
diff --git a/socs/agents/hwp_rotation/__init__.py b/socs/agents/hwp_rotation/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/hwp_rotation/rotation_agent.py b/socs/agents/hwp_rotation/agent.py
similarity index 98%
rename from agents/hwp_rotation/rotation_agent.py
rename to socs/agents/hwp_rotation/agent.py
index c5e89b60d..82d3f238a 100644
--- a/agents/hwp_rotation/rotation_agent.py
+++ b/socs/agents/hwp_rotation/agent.py
@@ -1,13 +1,12 @@
import argparse
import time
-from twisted.internet import reactor
from ocs import ocs_agent, site_config
from ocs.ocs_twisted import TimeoutLock
+from twisted.internet import reactor
-from socs.agent.pmx import PMX, Command
-
-import src.pid_controller as pd
+import socs.agents.hwp_rotation.drivers.pid_controller as pd
+from socs.common.pmx import PMX, Command
class RotationAgent:
@@ -461,9 +460,11 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
parser = make_parser()
- args = site_config.parse_args(agent_class='RotationAgent', parser=parser)
+ args = site_config.parse_args(agent_class='RotationAgent',
+ parser=parser,
+ args=args)
init_params = False
if args.mode == 'init':
@@ -497,3 +498,7 @@ def make_parser(parser=None):
agent.register_task('ign_ext', rotation_agent.ign_ext)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/hwp_rotation/drivers/__init__.py b/socs/agents/hwp_rotation/drivers/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/hwp_rotation/src/pid_controller.py b/socs/agents/hwp_rotation/drivers/pid_controller.py
similarity index 100%
rename from agents/hwp_rotation/src/pid_controller.py
rename to socs/agents/hwp_rotation/drivers/pid_controller.py
index 6d2f459c0..251dc0ae1 100644
--- a/agents/hwp_rotation/src/pid_controller.py
+++ b/socs/agents/hwp_rotation/drivers/pid_controller.py
@@ -1,5 +1,5 @@
-import time
import socket
+import time
class PID:
diff --git a/agents/ibootbar/ibootbar.py b/socs/agents/ibootbar/agent.py
similarity index 96%
rename from agents/ibootbar/ibootbar.py
rename to socs/agents/ibootbar/agent.py
index c0e2e20f9..6ffce3c0a 100644
--- a/agents/ibootbar/ibootbar.py
+++ b/socs/agents/ibootbar/agent.py
@@ -1,12 +1,12 @@
-import time
+import argparse
import os
+import time
-from twisted.internet.defer import inlineCallbacks
-from autobahn.twisted.util import sleep as dsleep
-import argparse
import txaio
+from autobahn.twisted.util import sleep as dsleep
from ocs import ocs_agent, site_config
from ocs.ocs_twisted import TimeoutLock
+from twisted.internet.defer import inlineCallbacks
from socs.snmp import SNMPTwister
@@ -196,7 +196,7 @@ def __init__(self, agent, address, port=161, version=2):
agg_params=agg_params,
buffer_time=0)
- @ocs_agent.param('_')
+ @ocs_agent.param('test_mode', default=False, type=bool)
@inlineCallbacks
def acq(self, session, params=None):
"""acq()
@@ -271,6 +271,9 @@ def acq(self, session, params=None):
yield dsleep(1)
self.log.info('Trying to reconnect.')
+ if params['test_mode']:
+ break
+
return True, "Finished Recording"
def _stop_acq(self, session, params=None):
@@ -386,25 +389,30 @@ def add_agent_args(parser=None):
parser = argparse.ArgumentParser()
pgroup = parser.add_argument_group("Agent Options")
- pgroup.add_argument("--auto-start", default=True, type=bool,
- help="Automatically start polling for data at "
- + "Agent startup.")
pgroup.add_argument("--address", help="Address to listen to.")
pgroup.add_argument("--port", default=161,
help="Port to listen on.")
pgroup.add_argument("--snmp-version", default='2', choices=['1', '2', '3'],
help="SNMP version for communication. Must match "
+ "configuration on the ibootbar.")
+ pgroup.add_argument("--mode", choices=['acq', 'test'])
return parser
-if __name__ == "__main__":
+def main(args=None):
# Start logging
txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
parser = add_agent_args()
- args = site_config.parse_args(agent_class='ibootbarAgent', parser=parser)
+ args = site_config.parse_args(agent_class='ibootbarAgent',
+ parser=parser,
+ args=args)
+
+ if args.mode == 'acq':
+ init_params = True
+ elif args.mode == 'test':
+ init_params = False
agent, runner = ocs_agent.init_site_agent(args)
p = ibootbarAgent(agent,
@@ -415,10 +423,14 @@ def add_agent_args(parser=None):
agent.register_process("acq",
p.acq,
p._stop_acq,
- startup=bool(args.auto_start), blocking=False)
+ startup=init_params, blocking=False)
agent.register_task("set_outlet", p.set_outlet, blocking=False)
agent.register_task("cycle_outlet", p.cycle_outlet, blocking=False)
agent.register_task("set_initial_state", p.set_initial_state, blocking=False)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/socs/agents/labjack/__init__.py b/socs/agents/labjack/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/labjack/labjack_agent.py b/socs/agents/labjack/agent.py
similarity index 98%
rename from agents/labjack/labjack_agent.py
rename to socs/agents/labjack/agent.py
index 0a433d571..9e6fd5eda 100644
--- a/agents/labjack/labjack_agent.py
+++ b/socs/agents/labjack/agent.py
@@ -1,21 +1,20 @@
import argparse
-import time
-import struct
+import csv
import os
+import struct
+import time
+
import numexpr
-import yaml
-import csv
-from scipy.interpolate import interp1d
import numpy as np
import txaio
-txaio.use_twisted()
+import yaml
+from labjack import ljm
+from labjack.ljm.ljm import LJMError
+from ocs import ocs_agent, site_config
+from ocs.ocs_twisted import TimeoutLock
+from scipy.interpolate import interp1d
-ON_RTD = os.environ.get('READTHEDOCS') == 'True'
-if not ON_RTD:
- from labjack import ljm
- from labjack.ljm.ljm import LJMError
- from ocs import ocs_agent, site_config
- from ocs.ocs_twisted import TimeoutLock
+txaio.use_twisted()
# Convert Data
@@ -497,12 +496,14 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
# Start logging
txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
parser = make_parser()
- args = site_config.parse_args(agent_class='LabJackAgent', parser=parser)
+ args = site_config.parse_args(agent_class='LabJackAgent',
+ parser=parser,
+ args=args)
init_params = False
if args.mode == 'acq':
@@ -530,3 +531,7 @@ def make_parser(parser=None):
agent.register_process('acq_reg', sensors.acq_reg,
sensors._stop_acq)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/agents/labjack/cal_curves/GA10K4D25_cal_curve.txt b/socs/agents/labjack/cal_curves/GA10K4D25_cal_curve.txt
similarity index 100%
rename from agents/labjack/cal_curves/GA10K4D25_cal_curve.txt
rename to socs/agents/labjack/cal_curves/GA10K4D25_cal_curve.txt
diff --git a/socs/agents/lakeshore240/__init__.py b/socs/agents/lakeshore240/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/lakeshore240/LS240_agent.py b/socs/agents/lakeshore240/agent.py
similarity index 99%
rename from agents/lakeshore240/LS240_agent.py
rename to socs/agents/lakeshore240/agent.py
index a25c5e08a..5ad9fe8d2 100644
--- a/agents/lakeshore240/LS240_agent.py
+++ b/socs/agents/lakeshore240/agent.py
@@ -1,11 +1,11 @@
-import time
-import os
import argparse
+import os
+import time
import warnings
-import txaio
-
from typing import Optional
+import txaio
+
from socs.Lakeshore.Lakeshore240 import Module
on_rtd = os.environ.get('READTHEDOCS') == 'True'
@@ -267,7 +267,7 @@ def make_parser(parser=None):
return parser
-def main():
+def main(args=None):
# Start logging
txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
@@ -278,7 +278,9 @@ def main():
parser.add_argument('--num-channels', help=argparse.SUPPRESS)
# Interpret options in the context of site_config.
- args = site_config.parse_args(agent_class='Lakeshore240Agent', parser=parser)
+ args = site_config.parse_args(agent_class='Lakeshore240Agent',
+ parser=parser,
+ args=args)
if args.fake_data is not None:
warnings.warn("WARNING: the --fake-data parameter is deprecated, please "
diff --git a/socs/agents/lakeshore336/__init__.py b/socs/agents/lakeshore336/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/lakeshore336/LS336_agent.py b/socs/agents/lakeshore336/agent.py
similarity index 99%
rename from agents/lakeshore336/LS336_agent.py
rename to socs/agents/lakeshore336/agent.py
index 0a4cfb17d..d5d5f05ca 100644
--- a/agents/lakeshore336/LS336_agent.py
+++ b/socs/agents/lakeshore336/agent.py
@@ -3,13 +3,14 @@
# Author: zatkins, zhuber
# Acknowledgments: LS372 agent -- bkoopman, mhasselfield, jlashner
+import argparse
+import time
+
+import numpy as np
from ocs import ocs_agent, site_config
from ocs.ocs_twisted import TimeoutLock
-from socs.Lakeshore.Lakeshore336 import LS336
-import numpy as np
-import argparse
-import time
+from socs.Lakeshore.Lakeshore336 import LS336
class LS336_Agent:
@@ -1094,12 +1095,12 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
-
+def main(args=None):
# Create an argument parser
parser = make_parser()
- args = site_config.parse_args(
- agent_class='Lakeshore336Agent', parser=parser)
+ args = site_config.parse_args(agent_class='Lakeshore336Agent',
+ parser=parser,
+ args=args)
# Automatically acquire data if requested
init_params = False
@@ -1143,3 +1144,7 @@ def make_parser(parser=None):
# Run the agent
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/lakeshore370/__init__.py b/socs/agents/lakeshore370/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/lakeshore370/LS370_agent.py b/socs/agents/lakeshore370/agent.py
similarity index 77%
rename from agents/lakeshore370/LS370_agent.py
rename to socs/agents/lakeshore370/agent.py
index 31e7db85b..ffba27ac0 100644
--- a/agents/lakeshore370/LS370_agent.py
+++ b/socs/agents/lakeshore370/agent.py
@@ -1,19 +1,17 @@
+import argparse
import os
import random
-import argparse
+import threading
import time
+from contextlib import contextmanager
+
import numpy as np
import txaio
-import threading
-from contextlib import contextmanager
+from ocs import ocs_agent, site_config
+from ocs.ocs_twisted import TimeoutLock
from socs.Lakeshore.Lakeshore370 import LS370
-ON_RTD = os.environ.get('READTHEDOCS') == 'True'
-if not ON_RTD:
- from ocs import ocs_agent, site_config
- from ocs.ocs_twisted import TimeoutLock
-
class YieldingLock:
"""A lock protected by a lock. This braided arrangement guarantees
@@ -121,14 +119,12 @@ def __init__(self, agent, name, port, fake_data=False, dwell_time_delay=0):
agg_params=agg_params,
buffer_time=1)
- def init_lakeshore_task(self, session, params=None):
- """init_lakeshore_task(params=None)
+ @ocs_agent.param('auto_acquire', default=False, type=bool)
+ @ocs_agent.param('force', default=False, type=bool)
+ def init_lakeshore(self, session, params=None):
+ """init_lakeshore(auto_acquire=False, force=False)
- Perform first time setup of the Lakeshore 370 communication.
-
- Args:
- params (dict): Parameters dictionary for passing parameters to
- task.
+ **Task** - Perform first time setup of the Lakeshore 370 communication.
Parameters:
auto_acquire (bool, optional): Default is False. Starts data
@@ -137,10 +133,7 @@ def init_lakeshore_task(self, session, params=None):
"""
- if params is None:
- params = {}
-
- if self.initialized and not params.get('force', False):
+ if self.initialized and not params['force']:
self.log.info("Lakeshore already initialized. Returning...")
return True, "Already initialized"
@@ -172,15 +165,16 @@ def init_lakeshore_task(self, session, params=None):
self.initialized = True
# Start data acquisition if requested
- if params.get('auto_acquire', False):
+ if params['auto_acquire']:
self.agent.start('acq')
return True, 'Lakeshore module initialized.'
- def start_acq(self, session, params=None):
- """acq(params=None)
+ @ocs_agent.param('_')
+ def acq(self, session, params=None):
+ """acq()
- Method to start data acquisition process.
+ **Process** - Run data acquisition.
"""
@@ -287,7 +281,7 @@ def start_acq(self, session, params=None):
return True, 'Acquisition exited cleanly.'
- def stop_acq(self, session, params=None):
+ def _stop_acq(self, session, params=None):
"""
Stops acq process.
"""
@@ -297,19 +291,23 @@ def stop_acq(self, session, params=None):
else:
return False, 'acq is not currently running'
+ @ocs_agent.param('heater', type=str)
+ @ocs_agent.param('range')
+ @ocs_agent.param('wait', type=float, default=0)
def set_heater_range(self, session, params):
- """
- Adjust the heater range for servoing cryostat. Wait for a specified
- amount of time after the change.
+ """set_heater_range(range, heater='sample', wait=0)
- :param params: dict with 'heater', 'range', 'wait' keys
- :type params: dict
+ **Task** - Adjust the heater range for servoing cryostat. Wait for a
+ specified amount of time after the change.
+
+ Parameters:
+ heater (str): Name of heater to set range for, 'sample' by default
+ (and the only implemented option.)
+ range (str, float): see arguments in
+ :func:`socs.Lakeshore.Lakeshore370.Heater.set_heater_range`
+ wait (float, optional): Amount of time to wait after setting the
+ heater range. This allows the servo time to adjust to the new range.
- heater - which heater to set range for, 'sample' by default (and the only implemented one)
- range - the heater range value to change to
- wait - time in seconds after changing the heater value to wait, allows
- the servo to adjust to the new heater range, typical value of
- ~600 seconds
"""
with self._lock.acquire_timeout(job='set_heater_range') as acquired:
if not acquired:
@@ -336,14 +334,20 @@ def set_heater_range(self, session, params):
return True, f'Set {heater_string} heater range to {params["range"]}'
+ @ocs_agent.param('channel', type=int, check=lambda x: 1 <= x <= 16)
+ @ocs_agent.param('mode', type=str, choices=['current', 'voltage'])
def set_excitation_mode(self, session, params):
- """
- Set the excitation mode of a specified channel.
+ """set_excitation_mode(channel, mode)
- :param params: dict with "channel" and "mode" keys for Channel.set_excitation_mode()
- :type params: dict
- """
+ **Task** - Set the excitation mode of a specified channel.
+ Parameters:
+ channel (int): Channel to set the excitation mode for. Valid values
+ are 1-16.
+ mode (str): Excitation mode. Possible modes are 'current' or
+ 'voltage'.
+
+ """
with self._lock.acquire_timeout(job='set_excitation_mode') as acquired:
if not acquired:
self.log.warn(f"Could not start Task because "
@@ -358,12 +362,21 @@ def set_excitation_mode(self, session, params):
return True, f'return text for Set channel {params["channel"]} excitation mode to {params["mode"]}'
+ @ocs_agent.param('channel', type=int, check=lambda x: 1 <= x <= 16)
+ @ocs_agent.param('value', type=float)
def set_excitation(self, session, params):
- """
- Set the excitation voltage/current value of a specified channel.
+ """set_excitation(channel, value)
+
+ **Task** - Set the excitation voltage/current value of a specified
+ channel.
+
+ Parameters:
+ channel (int): Channel to set the excitation for. Valid values
+ are 1-16.
+ value (float): Excitation value in volts or amps depending on set
+ excitation mode. See
+ :func:`socs.Lakeshore.Lakeshore370.Channel.set_excitation`
- :param params: dict with "channel" and "value" keys for Channel.set_excitation()
- :type params: dict
"""
with self._lock.acquire_timeout(job='set_excitation') as acquired:
if not acquired:
@@ -384,12 +397,22 @@ def set_excitation(self, session, params):
return True, f'Set channel {params["channel"]} excitation to {params["value"]}'
+ @ocs_agent.param('P', type=int)
+ @ocs_agent.param('I', type=int)
+ @ocs_agent.param('D', type=int)
def set_pid(self, session, params):
- """
- Set the PID parameters for servo control of fridge.
+ """set_pid(P, I, D)
+
+ **Task** - Set the PID parameters for servo control of fridge.
+
+ Parameters:
+ P (int): Proportional term for PID loop
+ I (int): Integral term for the PID loop
+ D (int): Derivative term for the PID loop
+
+ Notes:
+ Makes a call to :func:`socs.Lakeshore.Lakeshore370.Heater.set_pid`.
- :param params: dict with "P", "I", and "D" keys for Heater.set_pid()
- :type params: dict
"""
with self._lock.acquire_timeout(job='set_pid') as acquired:
if not acquired:
@@ -405,12 +428,15 @@ def set_pid(self, session, params):
return True, f'return text for Set PID to {params["P"]}, {params["I"]}, {params["D"]}'
+ @ocs_agent.param('channel', type=int)
def set_active_channel(self, session, params):
- """
- Set the active channel on the LS370.
+ """set_active_channel(channel)
+
+ **Task** - Set the active channel on the LS370.
+
+ Parameters:
+ channel (int): Channel to switch readout to. Valid values are 1-16.
- :param params: dict with "channel" number
- :type params: dict
"""
with self._lock.acquire_timeout(job='set_active_channel') as acquired:
if not acquired:
@@ -426,10 +452,15 @@ def set_active_channel(self, session, params):
return True, f'return text for set channel to {params["channel"]}'
+ @ocs_agent.param('autoscan', type=bool)
def set_autoscan(self, session, params):
- """
- Sets autoscan on the LS370.
- :param params: dict with "autoscan" value
+ """set_autoscan(autoscan)
+
+ **Task** - Sets autoscan on the LS370.
+
+ Parameters:
+ autoscan (bool): True to enable autoscan, False to disable.
+
"""
with self._lock.acquire_timeout(job='set_autoscan') as acquired:
if not acquired:
@@ -448,12 +479,18 @@ def set_autoscan(self, session, params):
return True, 'Set autoscan to {}'.format(params['autoscan'])
+ @ocs_agent.param('temperature', type=float, check=lambda x: x < 1)
+ @ocs_agent.param('channel', type=float, default=None)
def servo_to_temperature(self, session, params):
- """Servo to temperature passed into params.
+ """servo_to_temperature(temperature, channel=None)
+
+ **Task** - Servo to a given temperature using a closed loop PID on a
+ fixed channel. This will automatically disable autoscan if enabled.
+
+ Parameters:
+ temperature (float): Temperature to servo to in units of Kelvin.
+ channel (int, optional): Channel to servo off of.
- :param params: dict with "temperature" Heater.set_setpoint() in units of K, and
- "channel" as an integer (optional)
- :type params: dict
"""
with self._lock.acquire_timeout(job='servo_to_temperature') as acquired:
if not acquired:
@@ -474,7 +511,7 @@ def servo_to_temperature(self, session, params):
self.module.disable_autoscan()
# Check to see if we passed an input channel, and if so change to it
- if params.get("channel", False) is not False:
+ if params.get("channel", None) is not None:
session.add_message(f'Changing heater input channel to {params.get("channel")}')
self.module.sample_heater.set_input_channel(params.get("channel"))
@@ -496,14 +533,19 @@ def servo_to_temperature(self, session, params):
return True, f'Setpoint now set to {params["temperature"]} K'
+ @ocs_agent.param('measurements', type=int)
+ @ocs_agent.param('threshold', type=float)
def check_temperature_stability(self, session, params):
- """Check servo temperature stability is within threshold.
+ """check_temperature_stability(measurements, threshold)
- :param params: dict with "measurements" and "threshold" parameters
- :type params: dict
+ Check servo temperature stability is within threshold.
+
+ Parameters:
+ measurements (int): number of measurements to average for stability
+ check
+ threshold (float): amount within which the average needs to be to
+ the setpoint for stability
- measurements - number of measurements to average for stability check
- threshold - amount within which the average needs to be to the setpoint for stability
"""
with self._lock.acquire_timeout(job='check_temp_stability') as acquired:
if not acquired:
@@ -541,18 +583,20 @@ def check_temperature_stability(self, session, params):
return False, f"Temperature not stable within {params['threshold']}."
+ @ocs_agent.param('heater', type=str, choices=['sample', 'still'])
+ @ocs_agent.param('mode', type=str, choices=['Off', 'Monitor Out', 'Open Loop', 'Zone', 'Still', 'Closed Loop', 'Warm up'])
def set_output_mode(self, session, params=None):
- """
- Set output mode of the heater.
+ """set_output_mode(heater, mode)
- :param params: dict with "heater" and "mode" parameters
- :type params: dict
+ **Task** - Set output mode of the heater.
- heater - Specifies which heater to control. Either 'sample' or 'still'
- mode - Specifies mode of heater. Can be "Off", "Monitor Out", "Open Loop",
- "Zone", "Still", "Closed Loop", or "Warm up"
- """
+ Parameters:
+ heater (str): Name of heater to set range for, either 'sample' or
+ 'still'.
+ mode (str): Specifies mode of heater. Can be "Off", "Monitor Out",
+ "Open Loop", "Zone", "Still", "Closed Loop", or "Warm up"
+ """
with self._lock.acquire_timeout(job='set_output_mode') as acquired:
if not acquired:
self.log.warn(f"Could not start Task because "
@@ -570,20 +614,22 @@ def set_output_mode(self, session, params=None):
return True, "Set {} output mode to {}".format(params['heater'], params['mode'])
+ @ocs_agent.param('heater', type=str, choices=['sample', 'still'])
+ @ocs_agent.param('output', type=float)
+ @ocs_agent.param('display', type=str, choices=['current', 'power'], default=None)
def set_heater_output(self, session, params=None):
- """
- Set display type and output of the heater.
+ """set_heater_output(heater, output, display=None)
- :param params: dict with "heater", "display", and "output" parameters
- :type params: dict
+ **Task** - Set display type and output of the heater.
- heater - Specifies which heater to control. Either 'sample' or 'still'
- output - Specifies heater output value.
- If display is set to "Current" or heater is "still", can be any number between 0 and 100.
- If display is set to "Power", can be any number between 0 and the maximum allowed power.
-
- display (opt)- Specifies heater display type. Can be "Current" or "Power".
- If None, heater display is not reset before setting output.
+ Parameters:
+ heater (str): Name of heater to set range for, either 'sample' or
+ 'still'.
+ output (float): Specifies heater output value. For possible values see
+ :func:`socs.Lakeshore.Lakeshore370.Heater.set_heater_output`
+ display (str, optional): Specifies heater display type. Can be
+ "current" or "power". If None, heater display is not reset
+ before setting output.
"""
@@ -617,32 +663,37 @@ def set_heater_output(self, session, params=None):
return True, "Set {} display to {}, output to {}".format(heater, display, output)
+ @ocs_agent.param('attribute', type=str)
+ @ocs_agent.param('channel', type=int, default=1)
def get_channel_attribute(self, session, params):
- """Gets an arbitrary channel attribute, stored in the session.data dict
-
- Parameters
- ----------
- params : dict
- Contains parameters 'attribute' (not optional), 'channel' (optional, default '1').
-
- Channel attributes stored in the session.data object are in the structure::
-
- >>> session.data
- {"calibration_curve": 21,
- "dwell": 3,
- "excitation": 6.32e-6,
- "excitation_mode": "voltage",
- "excitation_power": 2.0e-15,
- "kelvin_reading": 100.0e-3,
- "pause": 3,
- "reading_status": ["T.UNDER"]
- "resistance_range": 2.0e-3,
- "resistance_reading": 10.0e3,
- "temperature_coefficient": "negative",
- }
-
- Note: Only attribute called with this method will be populated for the
- given channel. This example shows all available attributes.
+ """get_channel_attribute(attribute, channel=1)
+
+ **Task** - Gets an arbitrary channel attribute, stored in the session.data dict.
+
+ Parameters:
+ attribute (str, optional): Attribute to get from the 370.
+ channel (int, optional): Channel to get the attribute for.
+
+ Notes:
+ Channel attributes stored in the session.data object are in the
+ structure::
+
+ >>> response.session['data']
+ {"calibration_curve": 21,
+ "dwell": 3,
+ "excitation": 6.32e-6,
+ "excitation_mode": "voltage",
+ "excitation_power": 2.0e-15,
+ "kelvin_reading": 100.0e-3,
+ "pause": 3,
+ "reading_status": ["T.UNDER"]
+ "resistance_range": 2.0e-3,
+ "resistance_reading": 10.0e3,
+ "temperature_coefficient": "negative",
+ }
+
+ Only attribute called with this method will be populated for the
+ given channel. This example shows all available attributes.
"""
with self._lock.acquire_timeout(job=f"get_{params['attribute']}", timeout=3) as acquired:
@@ -668,30 +719,32 @@ def get_channel_attribute(self, session, params):
return True, f"Retrieved {channel.name} {params['attribute']}"
+ @ocs_agent.param('attribute', type=str)
def get_heater_attribute(self, session, params):
- """Gets an arbitrary heater attribute, stored in the session.data dict
-
- Parameters
- ----------
- params : dict
- Contains parameters 'attribute'.
-
- Heater attributes stored in the session.data object are in the structure::
-
- >>> session.data
- {"heater_range": 1e-3,
- "heater_setup": ["current", 1e-3, 120],
- "input_channel": 6,
- "manual_out": 0.0,
- "mode": "Closed Loop",
- "pid": (80, 10, 0),
- "setpoint": 100e-3,
- "still_output", 10.607,
- "units": "kelvin",
- }
-
- Note: Only the attribute called with this method will be populated,
- this example just shows all available attributes.
+ """get_heater_attribute(attribute)
+
+ **Task** - Gets an arbitrary heater attribute, stored in the session.data dict.
+
+ Parameters:
+ attribute (str): Heater attribute to get.
+
+ Notes:
+ Heater attributes stored in the session.data object are in the structure::
+
+ >>> response.session['data']
+ {"heater_range": 1e-3,
+ "heater_setup": ["current", 1e-3, 120],
+ "input_channel": 6,
+ "manual_out": 0.0,
+ "mode": "Closed Loop",
+ "pid": (80, 10, 0),
+ "setpoint": 100e-3,
+ "still_output", 10.607,
+ "units": "kelvin",
+ }
+
+ Only the attribute called with this method will be populated,
+ this example just shows all available attributes.
"""
with self._lock.acquire_timeout(job=f"get_{params['attribute']}", timeout=3) as acquired:
@@ -747,7 +800,7 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
# For logging
txaio.use_twisted()
LOG = txaio.make_logger()
@@ -756,7 +809,9 @@ def make_parser(parser=None):
txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
parser = make_parser()
- args = site_config.parse_args(agent_class='Lakeshore370Agent', parser=parser)
+ args = site_config.parse_args(agent_class='Lakeshore370Agent',
+ parser=parser,
+ args=args)
# Automatically acquire data if requested (default)
init_params = False
@@ -771,7 +826,7 @@ def make_parser(parser=None):
fake_data=args.fake_data,
dwell_time_delay=args.dwell_time_delay)
- agent.register_task('init_lakeshore', lake_agent.init_lakeshore_task,
+ agent.register_task('init_lakeshore', lake_agent.init_lakeshore,
startup=init_params)
agent.register_task('set_heater_range', lake_agent.set_heater_range)
agent.register_task('set_excitation_mode', lake_agent.set_excitation_mode)
@@ -785,6 +840,10 @@ def make_parser(parser=None):
agent.register_task('set_heater_output', lake_agent.set_heater_output)
agent.register_task('get_channel_attribute', lake_agent.get_channel_attribute)
agent.register_task('get_heater_attribute', lake_agent.get_heater_attribute)
- agent.register_process('acq', lake_agent.start_acq, lake_agent.stop_acq)
+ agent.register_process('acq', lake_agent.acq, lake_agent._stop_acq)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/lakeshore372/__init__.py b/socs/agents/lakeshore372/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/lakeshore372/LS372_agent.py b/socs/agents/lakeshore372/agent.py
similarity index 97%
rename from agents/lakeshore372/LS372_agent.py
rename to socs/agents/lakeshore372/agent.py
index b92ed4123..c64d64796 100644
--- a/agents/lakeshore372/LS372_agent.py
+++ b/socs/agents/lakeshore372/agent.py
@@ -1,19 +1,19 @@
+import argparse
import os
import random
-import argparse
+import threading
import time
+from contextlib import contextmanager
+
import numpy as np
import txaio
-import threading
import yaml
-from contextlib import contextmanager
+from ocs import ocs_agent, site_config
+from ocs.ocs_twisted import Pacemaker, TimeoutLock
from twisted.internet import reactor
from socs.Lakeshore.Lakeshore372 import LS372
-from ocs import ocs_agent, site_config
-from ocs.ocs_twisted import TimeoutLock, Pacemaker
-
class YieldingLock:
"""A lock protected by a lock. This braided arrangement guarantees
@@ -150,7 +150,7 @@ def disable_control_chan(self, session, params=None):
@ocs_agent.param('force', default=False, type=bool)
@ocs_agent.param('configfile', type=str, default=None)
def init_lakeshore(self, session, params=None):
- """init_lakeshore(auto_acquire=False, acq_params=None, force=False,
+ """init_lakeshore(auto_acquire=False, acq_params=None, force=False, \
configfile=None)
**Task** - Perform first time setup of the Lakeshore 372 communication.
@@ -222,7 +222,7 @@ def init_lakeshore(self, session, params=None):
@ocs_agent.param('sample_heater', default=False, type=bool)
@ocs_agent.param('run_once', default=False, type=bool)
def acq(self, session, params=None):
- """acq(sample_heater=False)
+ """acq(sample_heater=False, run_once=False)
**Process** - Acquire data from the Lakeshore 372.
@@ -415,9 +415,9 @@ def _stop_acq(self, session, params=None):
@ocs_agent.param('heater', type=str)
@ocs_agent.param('range')
- @ocs_agent.param('wait', type=float)
+ @ocs_agent.param('wait', type=float, default=0)
def set_heater_range(self, session, params):
- """set_heater_range(heater=None, range=None, wait=0)
+ """set_heater_range(heater, range, wait=0)
**Task** - Adjust the heater range for servoing cryostat. Wait for a
specified amount of time after the change.
@@ -459,7 +459,7 @@ def set_heater_range(self, session, params):
@ocs_agent.param('channel', type=int, check=lambda x: 1 <= x <= 16)
@ocs_agent.param('mode', type=str, choices=['current', 'voltage'])
def set_excitation_mode(self, session, params):
- """set_excitation_mode(channel=None, mode=None)
+ """set_excitation_mode(channel, mode)
**Task** - Set the excitation mode of a specified channel.
@@ -487,7 +487,7 @@ def set_excitation_mode(self, session, params):
@ocs_agent.param('channel', type=int, check=lambda x: 1 <= x <= 16)
@ocs_agent.param('value', type=float)
def set_excitation(self, session, params):
- """set_excitation(channel=None, value=None)
+ """set_excitation(channel, value)
**Task** - Set the excitation voltage/current value of a specified
channel.
@@ -496,7 +496,7 @@ def set_excitation(self, session, params):
channel (int): Channel to set the excitation for. Valid values
are 1-16.
value (float): Excitation value in volts or amps depending on set
- excitation mode. See
+ excitation mode. See
:func:`socs.Lakeshore.Lakeshore372.Channel.set_excitation`
"""
@@ -522,7 +522,7 @@ def set_excitation(self, session, params):
@ocs_agent.param('channel', type=int, check=lambda x: 1 <= x <= 16)
def get_excitation(self, session, params):
- """get_excitation(channel=None)
+ """get_excitation(channel)
**Task** - Get the excitation voltage/current value of a specified
channel.
@@ -550,7 +550,7 @@ def get_excitation(self, session, params):
@ocs_agent.param('channel', type=int, check=lambda x: 1 <= x <= 16)
@ocs_agent.param('resistance_range', type=float)
def set_resistance_range(self, session, params):
- """set_resistance_range(channel=None,resistance_range=None)
+ """set_resistance_range(channel, resistance_range)
**Task** - Set the resistance range for a specified channel.
@@ -586,7 +586,7 @@ def set_resistance_range(self, session, params):
@ocs_agent.param('channel', type=int, check=lambda x: 1 <= x <= 16)
def get_resistance_range(self, session, params):
- """get_resistance_range(channel=None)
+ """get_resistance_range(channel)
**Task** - Get the resistance range for a specified channel.
@@ -611,7 +611,7 @@ def get_resistance_range(self, session, params):
@ocs_agent.param('channel', type=int, check=lambda x: 1 <= x <= 16)
@ocs_agent.param('dwell', type=int, check=lambda x: 1 <= x <= 200)
def set_dwell(self, session, params):
- """set_dwell(channel=None, dwell=None)
+ """set_dwell(channel, dwell)
**Task** - Set the autoscanning dwell time for a particular channel.
@@ -636,7 +636,7 @@ def set_dwell(self, session, params):
@ocs_agent.param('channel', type=int, check=lambda x: 1 <= x <= 16)
def get_dwell(self, session, params):
- """get_dwell(channel=None, dwell=None)
+ """get_dwell(channel)
**Task** - Get the autoscanning dwell time for a particular channel.
@@ -662,7 +662,7 @@ def get_dwell(self, session, params):
@ocs_agent.param('I', type=int)
@ocs_agent.param('D', type=int)
def set_pid(self, session, params):
- """set_pid(P=None, I=None, D=None)
+ """set_pid(P, I, D)
**Task** - Set the PID parameters for servo control of fridge.
@@ -691,7 +691,7 @@ def set_pid(self, session, params):
@ocs_agent.param('channel', type=int)
def set_active_channel(self, session, params):
- """set_active_channel(channel=None)
+ """set_active_channel(channel)
**Task** - Set the active channel on the LS372.
@@ -715,7 +715,7 @@ def set_active_channel(self, session, params):
@ocs_agent.param('autoscan', type=bool)
def set_autoscan(self, session, params):
- """set_autoscan(autoscan=None)
+ """set_autoscan(autoscan)
**Task** - Sets autoscan on the LS372.
@@ -742,7 +742,7 @@ def set_autoscan(self, session, params):
@ocs_agent.param('temperature', type=float, check=lambda x: x < 1)
def servo_to_temperature(self, session, params):
- """servo_to_temperature(temperature=None)
+ """servo_to_temperature(temperature)
**Task** - Servo to a given temperature using a closed loop PID on a
fixed channel. This will automatically disable autoscan if enabled.
@@ -790,7 +790,7 @@ def servo_to_temperature(self, session, params):
@ocs_agent.param('measurements', type=int)
@ocs_agent.param('threshold', type=float)
def check_temperature_stability(self, session, params):
- """check_temperature_stability(measurements=None, threshold=None)
+ """check_temperature_stability(measurements, threshold)
Check servo temperature stability is within threshold.
@@ -840,7 +840,7 @@ def check_temperature_stability(self, session, params):
@ocs_agent.param('heater', type=str, choices=['sample', 'still'])
@ocs_agent.param('mode', type=str, choices=['Off', 'Monitor Out', 'Open Loop', 'Zone', 'Still', 'Closed Loop', 'Warm up'])
def set_output_mode(self, session, params=None):
- """set_output_mode(heater=None, mode=None)
+ """set_output_mode(heater, mode)
**Task** - Set output mode of the heater.
@@ -871,14 +871,13 @@ def set_output_mode(self, session, params=None):
@ocs_agent.param('output', type=float)
@ocs_agent.param('display', type=str, choices=['current', 'power'], default=None)
def set_heater_output(self, session, params=None):
- """set_heater_output(heater=None, output=None, display=None)
+ """set_heater_output(heater, output, display=None)
**Task** - Set display type and output of the heater.
Parameters:
heater (str): Name of heater to set range for, either 'sample' or
'still'.
- "Open Loop", "Zone", "Still", "Closed Loop", or "Warm up"
output (float): Specifies heater output value. For possible values see
:func:`socs.Lakeshore.Lakeshore372.Heater.set_heater_output`
display (str, optional): Specifies heater display type. Can be
@@ -923,7 +922,7 @@ def set_heater_output(self, session, params=None):
@ocs_agent.param('output', type=float, check=lambda x: 0 <= x <= 100)
def set_still_output(self, session, params=None):
- """set_still_output(output=None)
+ """set_still_output(output)
**Task** - Set the still output on the still heater. This is different
than the manual output on the still heater. Use
@@ -987,7 +986,7 @@ def get_still_output(self, session, params=None):
@ocs_agent.param('configfile', type=str)
def input_configfile(self, session, params=None):
- """input_configfile(configfile=None)
+ """input_configfile(configfile)
**Task** - Upload 372 configuration file to initialize channel/device
settings.
@@ -1104,16 +1103,18 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
# For logging
txaio.use_twisted()
- LOG = txaio.make_logger()
+ txaio.make_logger()
# Start logging
txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
parser = make_parser()
- args = site_config.parse_args(agent_class='Lakeshore372Agent', parser=parser)
+ args = site_config.parse_args(agent_class='Lakeshore372Agent',
+ parser=parser,
+ args=args)
# Automatically acquire data if requested (default)
init_params = False
@@ -1160,3 +1161,7 @@ def make_parser(parser=None):
agent.register_task('input_configfile', lake_agent.input_configfile)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/lakeshore425/__init__.py b/socs/agents/lakeshore425/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/lakeshore425/LS425_agent.py b/socs/agents/lakeshore425/agent.py
similarity index 98%
rename from agents/lakeshore425/LS425_agent.py
rename to socs/agents/lakeshore425/agent.py
index 9fdb1a113..30fab10c9 100755
--- a/agents/lakeshore425/LS425_agent.py
+++ b/socs/agents/lakeshore425/agent.py
@@ -1,9 +1,11 @@
-import time
-import os
import argparse
+import os
+import time
+
import txaio
from ocs import ocs_agent, site_config
from ocs.ocs_twisted import TimeoutLock
+
from socs.Lakeshore import Lakeshore425 as ls
txaio.use_twisted()
@@ -237,12 +239,14 @@ def make_parser(parser=None):
return parser
-def main():
+def main(args=None):
# Start logging
txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
parser = make_parser()
- args = site_config.parse_args(agent_class='Lakeshore425Agent', parser=parser)
+ args = site_config.parse_args(agent_class='Lakeshore425Agent',
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
diff --git a/socs/agents/magpie/__init__.py b/socs/agents/magpie/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/magpie/magpie_agent.py b/socs/agents/magpie/agent.py
similarity index 99%
rename from agents/magpie/magpie_agent.py
rename to socs/agents/magpie/agent.py
index e1ebc5a1a..075bb2a2b 100644
--- a/agents/magpie/magpie_agent.py
+++ b/socs/agents/magpie/agent.py
@@ -1,15 +1,16 @@
import argparse
-import so3g # noqa: F401
-from spt3g import core
-import txaio
-import os
-import numpy as np
-import yaml
import ast
-from scipy import signal
+import os
import queue
import time
+
+import numpy as np
+import so3g # noqa: F401
+import txaio
+import yaml
from ocs import ocs_agent, site_config
+from scipy import signal
+from spt3g import core
MAX_CHANS = 4096
CHANS_PER_BAND = 512
@@ -1015,11 +1016,13 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
txaio.use_twisted()
txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
parser = make_parser()
- args = site_config.parse_args(agent_class='MagpieAgent', parser=parser)
+ args = site_config.parse_args(agent_class='MagpieAgent',
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
magpie = MagpieAgent(agent, args)
@@ -1044,3 +1047,7 @@ def make_parser(parser=None):
)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/meinberg_m1000/__init__.py b/socs/agents/meinberg_m1000/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/meinberg_m1000/meinberg_m1000_agent.py b/socs/agents/meinberg_m1000/agent.py
similarity index 99%
rename from agents/meinberg_m1000/meinberg_m1000_agent.py
rename to socs/agents/meinberg_m1000/agent.py
index 410c1d934..ad891e513 100644
--- a/agents/meinberg_m1000/meinberg_m1000_agent.py
+++ b/socs/agents/meinberg_m1000/agent.py
@@ -1,23 +1,18 @@
-import os
+import argparse
+import time
from os import environ
-import time
-import argparse
import txaio
-
from autobahn.twisted.util import sleep as dsleep
-from twisted.internet.defer import inlineCallbacks
+from ocs import ocs_agent, site_config
from twisted.internet import reactor
+from twisted.internet.defer import inlineCallbacks
from socs.snmp import SNMPTwister
# For logging
txaio.use_twisted()
-on_rtd = os.environ.get('READTHEDOCS') == 'True'
-if not on_rtd:
- from ocs import ocs_agent, site_config
-
class MeinbergSNMP:
"""Meinberg SNMP communicator. Handles communication with and decoding of
@@ -459,12 +454,14 @@ def make_parser(parser=None):
return parser
-if __name__ == "__main__":
+def main(args=None):
# Start logging
txaio.start_logging(level=environ.get("LOGLEVEL", "info"))
parser = make_parser()
- args = site_config.parse_args(agent_class="MeinbergM1000Agent", parser=parser)
+ args = site_config.parse_args(agent_class="MeinbergM1000Agent",
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
listener = MeinbergM1000Agent(agent,
@@ -478,3 +475,7 @@ def make_parser(parser=None):
startup=bool(args.auto_start), blocking=False)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/socs/agents/ocs_plugin_so.py b/socs/agents/ocs_plugin_so.py
new file mode 100644
index 000000000..a26193c5f
--- /dev/null
+++ b/socs/agents/ocs_plugin_so.py
@@ -0,0 +1,50 @@
+"""
+Register our agents in ocs central. In order for this script to
+be imported by site_config.scan_for_agents(), it must be in the python
+path and called something like ocs_plugin_*.
+"""
+
+import os
+
+import ocs
+
+root = os.path.abspath(os.path.split(__file__)[0])
+
+for n, f in [
+ ('ACUAgent', 'acu/agent.py'),
+ ('BlueforsAgent', 'bluefors/agent.py'),
+ ('CrateAgent', 'smurf_crate_monitor/agent.py'),
+ ('CryomechCPAAgent', 'cryomech_cpa/agent.py'),
+ ('FPGAAgent', 'holo_fpga/agent.py'),
+ ('FTSAerotechAgent', 'fts_aerotech/agent.py'),
+ ('HWPBBBAgent', 'hwp_encoder/agent.py'),
+ ('HWPPicoscopeAgent', 'hwp_picoscope/agent.py'),
+ ('ibootbarAgent', 'ibootbar/agent.py'),
+ ('LabJackAgent', 'labjack/agent.py'),
+ ('Lakeshore240Agent', 'lakeshore240/agent.py'),
+ ('Lakeshore336Agent', 'lakeshore336/agent.py'),
+ ('Lakeshore370Agent', 'lakeshore370/agent.py'),
+ ('Lakeshore372Agent', 'lakeshore372/agent.py'),
+ ('Lakeshore425Agent', 'lakeshore425/agent.py'),
+ ('LATRtXYStageAgent', 'xy_stage/agent.py'),
+ ('MagpieAgent', 'magpie/agent.py'),
+ ('MeinbergM1000Agent', 'meinberg_m1000/agent.py'),
+ ('PfeifferAgent', 'pfeiffer_tpg366/agent.py'),
+ ('PfeifferTC400Agent', 'pfeiffer_tc400/agent.py'),
+ ('PysmurfController', 'pysmurf_controller/agent.py'),
+ ('PysmurfMonitor', 'pysmurf_monitor/agent.py'),
+ ('RotationAgent', 'hwp_rotation/agent.py'),
+ ('ScpiPsuAgent', 'scpi_psu/agent.py'),
+ ('SmurfFileEmulator', 'smurf_file_emulator/agent.py'),
+ ('SmurfStreamSimulator', 'smurf_stream_simulator/agent.py'),
+ ('SupRsync', 'suprsync/agent.py'),
+ ('SynaccessAgent', 'synacc/agent.py'),
+ ('SynthAgent', 'holo_synth/agent.py'),
+ ('TektronixAWGAgent', 'tektronix3021c/agent.py'),
+ ('ThorlabsMC2000BAgent', 'thorlabs_mc2000b/agent.py'),
+ ('VantagePro2Agent', 'vantagepro2/agent.py'),
+ ('WiregridActuatorAgent', 'wiregrid_actuator/agent.py'),
+ ('WiregridEncoderAgent', 'wiregrid_encoder/agent.py'),
+ ('WiregridKikusuiAgent', 'wiregrid_kikusui/agent.py'),
+]:
+ ocs.site_config.register_agent_class(n, os.path.join(root, f))
diff --git a/socs/agents/pfeiffer_tc400/__init__.py b/socs/agents/pfeiffer_tc400/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/pfeiffer_tc400/pfeiffer_tc400_agent.py b/socs/agents/pfeiffer_tc400/agent.py
similarity index 97%
rename from agents/pfeiffer_tc400/pfeiffer_tc400_agent.py
rename to socs/agents/pfeiffer_tc400/agent.py
index d80409751..2643cd311 100644
--- a/agents/pfeiffer_tc400/pfeiffer_tc400_agent.py
+++ b/socs/agents/pfeiffer_tc400/agent.py
@@ -1,16 +1,13 @@
-import time
-import os
-import socket
import argparse
-import txaio
+import socket
+import time
from os import environ
+import txaio
from ocs import ocs_agent, site_config
from ocs.ocs_twisted import TimeoutLock
-on_rtd = os.environ.get('READTHEDOCS') == 'True'
-if not on_rtd:
- from pfeiffer_tc400_driver import PfeifferTC400
+from socs.agents.pfeiffer_tc400.drivers import PfeifferTC400
class PfeifferTC400Agent:
@@ -234,7 +231,7 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
# Start logging
txaio.start_logging(level=environ.get("LOGLEVEL", "info"))
@@ -243,7 +240,8 @@ def make_parser(parser=None):
# Get the default ocs agrument parser
parser = make_parser()
args = site_config.parse_args(agent_class='PfeifferTC400Agent',
- parser=parser)
+ parser=parser,
+ args=args)
init_params = False
if args.mode == 'acq':
@@ -263,3 +261,7 @@ def make_parser(parser=None):
agent.register_process('acq', p.acq, p._stop_acq)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/agents/pfeiffer_tc400/pfeiffer_tc400_driver.py b/socs/agents/pfeiffer_tc400/drivers.py
similarity index 95%
rename from agents/pfeiffer_tc400/pfeiffer_tc400_driver.py
rename to socs/agents/pfeiffer_tc400/drivers.py
index 6579d9dfa..2813a9889 100644
--- a/agents/pfeiffer_tc400/pfeiffer_tc400_driver.py
+++ b/socs/agents/pfeiffer_tc400/drivers.py
@@ -7,9 +7,12 @@
# It also uses a slightly modified version of a Pfeiffer Vacuum Protocol package found on GitHub
import serial
-from pfeiffer_vacuum_protocol.pfeiffer_vacuum_protocol import _send_data_request as send_data_request
-from pfeiffer_vacuum_protocol.pfeiffer_vacuum_protocol import _send_control_command as send_control_command
-from pfeiffer_vacuum_protocol.pfeiffer_vacuum_protocol import _read_gauge_response as read_gauge_response
+from pfeiffer_vacuum_protocol.pfeiffer_vacuum_protocol import \
+ _read_gauge_response as read_gauge_response
+from pfeiffer_vacuum_protocol.pfeiffer_vacuum_protocol import \
+ _send_control_command as send_control_command
+from pfeiffer_vacuum_protocol.pfeiffer_vacuum_protocol import \
+ _send_data_request as send_data_request
# Data type 0 from TC400 Manual Section 8.3 - Applied data types
PFEIFFER_BOOL = {'111111': True,
diff --git a/socs/agents/pfeiffer_tpg366/__init__.py b/socs/agents/pfeiffer_tpg366/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/pfeiffer_tpg366/pfeiffer_tpg366_agent.py b/socs/agents/pfeiffer_tpg366/agent.py
similarity index 80%
rename from agents/pfeiffer_tpg366/pfeiffer_tpg366_agent.py
rename to socs/agents/pfeiffer_tpg366/agent.py
index 298a6cf31..506ab3a05 100644
--- a/agents/pfeiffer_tpg366/pfeiffer_tpg366_agent.py
+++ b/socs/agents/pfeiffer_tpg366/agent.py
@@ -4,9 +4,10 @@
import argparse
import socket
+import time
+
from ocs import ocs_agent, site_config
from ocs.ocs_twisted import TimeoutLock
-import time
BUFF_SIZE = 128
ENQ = '\x05'
@@ -102,19 +103,21 @@ def __init__(self, agent, ip_address, port, f_sample=2.5):
agg_params=agg_params,
buffer_time=1)
- def start_acq(self, session, params=None):
- """
- Get pressures from the Pfeiffer gauges, publishes them to the feed
+ @ocs_agent.param('sampling_frequency', type=float, default=2.5)
+ @ocs_agent.param('test_mode', type=bool, default=False)
+ def acq(self, session, params=None):
+ """acq(sampling_frequency=2.5, test_mode=False)
+ **Process** - Get pressures from the Pfeiffer gauges.
- Args:
- sampling_frequency- defaults to 2.5 Hz
+ Parameters:
+ sampling_frequency (float): Rate at which to get the pressures
+ [Hz]. Defaults to 2.5 Hz.
+ test_mode (bool): Run the Process loop only once. This is meant
+ only for testing. Defaults to False.
"""
- if params is None:
- params = {}
-
- f_sample = params.get('sampling_frequency')
+ f_sample = params['sampling_frequency']
if f_sample is None:
f_sample = self.f_sample
@@ -144,10 +147,13 @@ def start_acq(self, session, params=None):
self.agent.publish_to_feed('pressures', data)
time.sleep(sleep_time)
+ if params['test_mode']:
+ break
+
self.agent.feeds['pressures'].flush_buffer()
return True, 'Acquistion exited cleanly'
- def stop_acq(self, session, params=None):
+ def _stop_acq(self, session, params=None):
"""
End pressure data acquisition
"""
@@ -170,17 +176,28 @@ def make_parser(parser=None):
pgroup = parser.add_argument_group('Agent Options')
pgroup.add_argument('--ip_address')
pgroup.add_argument('--port')
+ pgroup.add_argument("--mode", type=str, default='acq', choices=['acq', 'test'])
return parser
-if __name__ == '__main__':
+def main(args=None):
parser = make_parser()
- args = site_config.parse_args(agent_class='PfeifferAgent', parser=parser)
+ args = site_config.parse_args(agent_class='PfeifferAgent',
+ parser=parser,
+ args=args)
+
+ init_params = True
+ if args.mode == 'test':
+ init_params = {'test_mode': True}
agent, runner = ocs_agent.init_site_agent(args)
pfeiffer_agent = PfeifferAgent(agent, args.ip_address, args.port)
- agent.register_process('acq', pfeiffer_agent.start_acq,
- pfeiffer_agent.stop_acq, startup=True)
- agent.register_task('close', pfeiffer_agent.stop_acq)
+ agent.register_process('acq', pfeiffer_agent.acq,
+ pfeiffer_agent._stop_acq, startup=init_params)
+ agent.register_task('close', pfeiffer_agent._stop_acq)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/pysmurf_controller/__init__.py b/socs/agents/pysmurf_controller/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/pysmurf_controller/pysmurf_controller.py b/socs/agents/pysmurf_controller/agent.py
similarity index 97%
rename from agents/pysmurf_controller/pysmurf_controller.py
rename to socs/agents/pysmurf_controller/agent.py
index 5a0982014..aac5cf7a4 100644
--- a/agents/pysmurf_controller/pysmurf_controller.py
+++ b/socs/agents/pysmurf_controller/agent.py
@@ -1,28 +1,25 @@
-from twisted.internet import reactor, protocol, threads
-from twisted.python.failure import Failure
-from twisted.internet.defer import inlineCallbacks, Deferred
+import matplotlib
from autobahn.twisted.util import sleep as dsleep
-from twisted.logger import Logger, FileLogObserver
+from twisted.internet import protocol, reactor, threads
+from twisted.internet.defer import Deferred, inlineCallbacks
+from twisted.logger import FileLogObserver, Logger
+from twisted.python.failure import Failure
-import matplotlib
matplotlib.use('Agg')
-import sodetlib as sdl
-from sodetlib.operations import (
- uxm_setup, uxm_relock, bias_steps, iv, bias_dets
-)
-from sodetlib.det_config import DetConfig
-import numpy as np
-
+import argparse
+import os
import sys
-from typing import Optional
import time
-import os
-import argparse
+from typing import Optional
+import numpy as np
+import sodetlib as sdl
from ocs import ocs_agent, site_config
from ocs.ocs_agent import log_formatter
from ocs.ocs_twisted import TimeoutLock
-
+from sodetlib.det_config import DetConfig
+from sodetlib.operations import (bias_dets, bias_steps, iv, uxm_relock,
+ uxm_setup)
NBIASLINES = 12
@@ -264,6 +261,7 @@ def abort(self, session, params=None):
return True, "Aborting process"
@ocs_agent.param('poll_interval', type=float, default=10)
+ @ocs_agent.param('test_mode', default=False, type=bool)
def check_state(self, session, params=None):
"""check_state(poll_interval=10)
@@ -319,6 +317,9 @@ def check_state(self, session, params=None):
time.sleep(params['poll_interval'])
+ if params['test_mode']:
+ break
+
return True, "Finished checking state"
def _stop_check_state(self, session, params):
@@ -328,6 +329,7 @@ def _stop_check_state(self, session, params):
@ocs_agent.param("duration", default=None, type=float)
@ocs_agent.param('kwargs', default=None)
@ocs_agent.param('load_tune', default=True, type=bool)
+ @ocs_agent.param('test_mode', default=False, type=bool)
def stream(self, session, params):
"""stream(duration=None)
@@ -379,6 +381,9 @@ def stream(self, session, params):
if time.time() > stop_time:
break
time.sleep(1)
+
+ if params['test_mode']:
+ break
sdl.stream_g3_off(S)
return True, 'Finished streaming data'
@@ -654,7 +659,7 @@ def take_iv(self, session, params):
return False, f"Operation failed: {self.lock.job} is running."
session.set_status('starting')
- S, cfg = self._get_smurf_control()
+ S, cfg = self._get_smurf_control(session=session)
iva = iv.take_iv(S, cfg, **params['kwargs'])
session.data = {
'bands': iva.bands.tolist(),
@@ -756,7 +761,7 @@ def bias_dets(self, session, params):
S, cfg, rfrac=params['rfrac'], **params['kwargs']
)
else:
- biases = bias_dets.bias_to_rfrac(
+ biases = bias_dets.bias_to_rfrac_range(
S, cfg, rfrac_range=params['rfrac'], **params['kwargs']
)
@@ -783,9 +788,11 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
parser = make_parser()
- args = site_config.parse_args(agent_class='PysmurfController', parser=parser)
+ args = site_config.parse_args(agent_class='PysmurfController',
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
controller = PysmurfController(agent, args)
@@ -809,5 +816,10 @@ def make_parser(parser=None):
agent.register_task('take_iv', controller.take_iv)
agent.register_task('take_bias_steps', controller.take_bias_steps)
agent.register_task('take_noise', controller.take_noise)
+ agent.register_task('bias_dets', controller.bias_dets)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/pysmurf_monitor/__init__.py b/socs/agents/pysmurf_monitor/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/pysmurf_monitor/pysmurf_monitor.py b/socs/agents/pysmurf_monitor/agent.py
similarity index 97%
rename from agents/pysmurf_monitor/pysmurf_monitor.py
rename to socs/agents/pysmurf_monitor/agent.py
index f5a6db098..0feea3274 100644
--- a/agents/pysmurf_monitor/pysmurf_monitor.py
+++ b/socs/agents/pysmurf_monitor/agent.py
@@ -1,13 +1,12 @@
+import argparse
import json
-import time
-import queue
import os
-import argparse
+import queue
+import time
-from twisted.internet.protocol import DatagramProtocol
+from ocs import ocs_agent, ocs_feed, site_config
from twisted.internet import reactor
-
-from ocs import ocs_agent, site_config, ocs_feed
+from twisted.internet.protocol import DatagramProtocol
from socs.db.suprsync import SupRsyncFilesManager, create_file
@@ -227,9 +226,11 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
parser = make_parser()
- args = site_config.parse_args(agent_class='PysmurfMonitor', parser=parser)
+ args = site_config.parse_args(agent_class='PysmurfMonitor',
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
monitor = PysmurfMonitor(agent, args)
@@ -239,3 +240,7 @@ def make_parser(parser=None):
reactor.listenUDP(args.udp_port, monitor)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/scpi_psu/__init__.py b/socs/agents/scpi_psu/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/scpi_psu/scpi_psu_agent.py b/socs/agents/scpi_psu/agent.py
similarity index 66%
rename from agents/scpi_psu/scpi_psu_agent.py
rename to socs/agents/scpi_psu/agent.py
index f7bc74400..dc292251d 100644
--- a/agents/scpi_psu/scpi_psu_agent.py
+++ b/socs/agents/scpi_psu/agent.py
@@ -1,13 +1,11 @@
-import time
-import os
-import socket
import argparse
-from socs.agent.scpi_psu_driver import PsuInterface
+import socket
+import time
+
+from ocs import ocs_agent, site_config
+from ocs.ocs_twisted import TimeoutLock
-on_rtd = os.environ.get('READTHEDOCS') == 'True'
-if not on_rtd:
- from ocs import ocs_agent, site_config
- from ocs.ocs_twisted import TimeoutLock
+from socs.agents.scpi_psu.drivers import PsuInterface
class ScpiPsuAgent:
@@ -32,9 +30,13 @@ def __init__(self, agent, ip_address, gpib_slot):
agg_params=agg_params,
buffer_time=0)
- def init_psu(self, session, params=None):
- """ Task to connect to power supply """
+ @ocs_agent.param('_')
+ def init(self, session, params=None):
+ """init()
+
+ **Task** - Initialize connection to the power supply.
+ """
with self.lock.acquire_timeout(0) as acquired:
if not acquired:
return False, "Could not acquire lock"
@@ -49,24 +51,23 @@ def init_psu(self, session, params=None):
return True, 'Initialized PSU.'
+ @ocs_agent.param('wait', type=float, default=1)
+ @ocs_agent.param('channels', type=list, default=[1, 2, 3])
+ @ocs_agent.param('test_mode', type=bool, default=False)
def monitor_output(self, session, params=None):
- """
- Process to continuously monitor PSU output current and voltage and
- send info to aggregator.
-
- Args:
- wait (float, optional):
- time to wait between measurements [seconds].
- channels (list[int], optional):
- channels to monitor. [1, 2, 3] by default.
- """
- if params is None:
- params = {}
+ """monitor_output(wait=1, channels=[1, 2, 3], test_mode=False)
- wait_time = params.get('wait', 1)
- channels = params.get('channels', [1, 2, 3])
- test_mode = params.get('test_mode', False)
+ **Process** - Continuously monitor PSU output current and voltage.
+ Parameters:
+ wait (float, optional): Time to wait between measurements
+ [seconds].
+ channels (list[int], optional): Channels to monitor. [1, 2, 3] by
+ default.
+ test_mode (bool, optional): Exit process after single loop if True.
+ Defaults to False.
+
+ """
session.set_status('running')
self.monitor = True
@@ -79,7 +80,7 @@ def monitor_output(self, session, params=None):
'data': {}
}
- for chan in channels:
+ for chan in params['channels']:
data['data']["Voltage_{}".format(chan)] = self.psu.get_volt(chan)
data['data']["Current_{}".format(chan)] = self.psu.get_curr(chan)
@@ -93,9 +94,9 @@ def monitor_output(self, session, params=None):
else:
self.log.warn("Could not acquire in monitor_current")
- time.sleep(wait_time)
+ time.sleep(params['wait'])
- if test_mode:
+ if params['test_mode']:
break
return True, "Finished monitoring current"
@@ -104,13 +105,17 @@ def stop_monitoring(self, session, params=None):
self.monitor = False
return True, "Stopping current monitor"
+ @ocs_agent.param('channel', type=int, choices=[1, 2, 3])
+ @ocs_agent.param('volts', type=float, check=lambda x: 0 <= x <= 30)
def set_voltage(self, session, params=None):
- """
- Sets voltage of power supply:
+ """set_voltage(channel, volts)
- Args:
- channel (int): Channel number (1, 2, or 3)
+ **Task** - Set the voltage of the power supply.
+
+ Parameters:
+ channel (int): Channel number (1, 2, or 3).
volts (float): Voltage to set. Must be between 0 and 30.
+
"""
with self.lock.acquire_timeout(1) as acquired:
@@ -121,13 +126,17 @@ def set_voltage(self, session, params=None):
return True, 'Set channel {} voltage to {}'.format(params['channel'], params['volts'])
+ @ocs_agent.param('channel', type=int, choices=[1, 2, 3])
+ @ocs_agent.param('current', type=float)
def set_current(self, session, params=None):
- """
- Sets current of power supply:
+ """set_current(channel, current)
+
+ **Task** - Set the current of the power supply.
+
+ Parameters:
+ channel (int): Channel number (1, 2, or 3).
+ current (float): Current to set.
- Args:
- channel (int): Channel number (1, 2, or 3)
- "current" (float): Curent to set. Must be between x and y.
"""
with self.lock.acquire_timeout(1) as acquired:
if acquired:
@@ -137,13 +146,17 @@ def set_current(self, session, params=None):
return True, 'Set channel {} current to {}'.format(params['channel'], params['current'])
+ @ocs_agent.param('channel', type=int, choices=[1, 2, 3])
+ @ocs_agent.param('state', type=bool)
def set_output(self, session, params=None):
- """
- Task to turn channel on or off.
+ """set_output(channel, state)
+
+ **Task** - Turn a channel on or off.
+
+ Parameters:
+ channel (int): Channel number (1, 2, or 3).
+ state (bool): True for on, False for off.
- Args:
- channel (int): Channel number (1, 2, or 3)
- state (bool): True for on, False for off
"""
with self.lock.acquire_timeout(1) as acquired:
if acquired:
@@ -170,15 +183,17 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
parser = make_parser()
- args = site_config.parse_args(agent_class='ScpiPsuAgent', parser=parser)
+ args = site_config.parse_args(agent_class='ScpiPsuAgent',
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
p = ScpiPsuAgent(agent, args.ip_address, int(args.gpib_slot))
- agent.register_task('init', p.init_psu)
+ agent.register_task('init', p.init)
agent.register_task('set_voltage', p.set_voltage)
agent.register_task('set_current', p.set_current)
agent.register_task('set_output', p.set_output)
@@ -186,3 +201,7 @@ def make_parser(parser=None):
agent.register_process('monitor_output', p.monitor_output, p.stop_monitoring)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agent/scpi_psu_driver.py b/socs/agents/scpi_psu/drivers.py
similarity index 97%
rename from socs/agent/scpi_psu_driver.py
rename to socs/agents/scpi_psu/drivers.py
index 826d88849..64a1dbead 100644
--- a/socs/agent/scpi_psu_driver.py
+++ b/socs/agents/scpi_psu/drivers.py
@@ -1,5 +1,5 @@
# Tucker Elleflot
-from socs.agent.prologix_interface import PrologixInterface
+from socs.common.prologix_interface import PrologixInterface
class PsuInterface(PrologixInterface):
diff --git a/socs/agents/smurf_crate_monitor/__init__.py b/socs/agents/smurf_crate_monitor/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/smurf_crate_monitor/smurf_crate_monitor.py b/socs/agents/smurf_crate_monitor/agent.py
similarity index 83%
rename from agents/smurf_crate_monitor/smurf_crate_monitor.py
rename to socs/agents/smurf_crate_monitor/agent.py
index 6f6031006..44d8dd6a6 100644
--- a/agents/smurf_crate_monitor/smurf_crate_monitor.py
+++ b/socs/agents/smurf_crate_monitor/agent.py
@@ -1,14 +1,13 @@
-import os
-import time
import argparse
-import numpy as np
import subprocess
+import time
+
+import numpy as np
import txaio
+
txaio.use_twisted()
-ON_RTD = os.environ.get('READTHEDOCS') == 'True'
-if not ON_RTD:
- from ocs import ocs_agent, site_config
+from ocs import ocs_agent, site_config
def get_sensors(shm_addr):
@@ -27,6 +26,8 @@ def get_sensors(shm_addr):
sensids (str list):
List of sensor identification names, same length as ipmbs list.
"""
+ log = txaio.make_logger()
+
# SSH to shelf manager
cmd = ['ssh', f'{shm_addr}']
# Send command to shelf manager
@@ -47,7 +48,7 @@ def get_sensors(shm_addr):
# Parse readback data line by line unless empty
if result == []:
error = ssh.stderr.readlines()
- LOG.error("ERROR: %s" % error)
+ log.error("ERROR: %s" % error)
else:
for r in result:
if ': LUN' in r.decode('utf-8'):
@@ -124,6 +125,8 @@ def get_data_dict(shm_addr, ipmbs, sensids, chan_names,
of all of the sensors passed into the fuction. Ensures the
keys match the influxdb feedname requirements
"""
+ log = txaio.make_logger()
+
data_dict = {}
cmd = ['ssh', f'{shm_addr}', 'clia', 'sensordata']
ssh = subprocess.Popen(cmd,
@@ -133,7 +136,7 @@ def get_data_dict(shm_addr, ipmbs, sensids, chan_names,
result = ssh.stdout.readlines()
if result == []:
error = ssh.stderr.readlines()
- LOG.error("ERROR: %s" % error)
+ log.error("ERROR: %s" % error)
else:
for ipmb, sensid, chan_name in zip(ipmbs, sensids, chan_names):
sense_chan = False
@@ -176,32 +179,37 @@ def __init__(self, agent, crate_id, shm_addr):
agg_params=agg_params,
buffer_time=0.)
- def init_data_stream(self, shm_addr):
- """
- Wrapper for get_sensors and get_channel_names which generates
- the list of sensors to use in datastreaming.
+ def _init_data_stream(self, shm_addr):
+ """Wrapper for get_sensors and get_channel_names which generates the
+ list of sensors to use in datastreaming.
+
Args:
- shm_addr (str):
- Address used to connect to shelf manager ex. root@192.168.1.2
+ shm_addr (str): Address used to connect to shelf manager ex.
+ root@192.168.1.2
Return:
- ipmbs (str list):
- List of Intelligent Platform Management Bus (IPMB) addresses.
- sensids (str list):
- List of sensor identification names, same length as ipmbs list.
- chan_names (str list):
- List of human readable names for each IPMB address.
+ ipmbs (str list): List of Intelligent Platform Management Bus
+ (IPMB) addresses.
+ sensids (str list): List of sensor identification names, same
+ length as ipmbs list.
+ chan_names (str list): List of human readable names for each IPMB
+ address.
"""
ipmbs, sensids = get_sensors(shm_addr)
chan_names = get_channel_names(ipmbs)
return ipmbs, sensids, chan_names
def init_crate(self, session, params=None):
- """
- Run at the startup of the docker to check that you can successfully
- ssh to the crate and run a command. If it runs successfully then
- you should see the home directory of the shelf manager printed to
- the docker logs and the data acquisition process to start, if not
- you will see an error in the logs and acquistion won't start.
+ """init_crate()
+
+ **Task** - Initialize connection to the SMuRF crate.
+
+ Run at the startup of the docker to check that you can
+ successfully ssh to the crate and run a command. If it runs
+ successfully then you should see the home directory of the shelf
+ manager printed to the docker logs and the data acquisition process to
+ start, if not you will see an error in the logs and acquistion won't
+ start.
+
"""
self.log.info(self.shm_addr)
cmd = ['ssh', f'{self.shm_addr}', 'pwd']
@@ -221,14 +229,18 @@ def init_crate(self, session, params=None):
self.agent.start('acq')
return True, 'Crate Initialized'
- def start_acq(self, session, params=None):
- """
- Starts acquiring data, hardcoded for one data point every 30
- seconds because we intend for this to be very low rate data.
+ def acq(self, session, params=None):
+ """acq()
+
+ **Process** - Start acquiring data.
+
+ Hardcoded for one data point every 30 seconds because we intend for
+ this to be very low rate data.
+
"""
self.log.info('Started acquisition')
shm_addr = self.shm_addr
- ipmbs, sensids, chan_names = self.init_data_stream(shm_addr=shm_addr)
+ ipmbs, sensids, chan_names = self._init_data_stream(shm_addr=shm_addr)
self.log.info('Got sensor names')
self.take_data = True
while self.take_data:
@@ -249,7 +261,7 @@ def start_acq(self, session, params=None):
self.agent.publish_to_feed('smurf_sensors', data)
return True, 'Acquisition exited cleanly'
- def stop_acq(self, session, params=None):
+ def _stop_acq(self, session, params=None):
"""
Stops acquiring data if the dpcler os stopped.
"""
@@ -276,11 +288,11 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
- LOG = txaio.make_logger()
+def main(args=None):
parser = make_parser()
args = site_config.parse_args(agent_class='CrateAgent',
- parser=parser)
+ parser=parser,
+ args=args)
startup = True
agent, runner = ocs_agent.init_site_agent(args)
shm_addr = args.shm_addr
@@ -290,7 +302,11 @@ def make_parser(parser=None):
agent.register_task('init_crate', smurfcrate.init_crate,
startup=startup)
- agent.register_process('acq', smurfcrate.start_acq,
- smurfcrate.stop_acq)
+ agent.register_process('acq', smurfcrate.acq,
+ smurfcrate._stop_acq)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/smurf_file_emulator/__init__.py b/socs/agents/smurf_file_emulator/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/smurf_file_emulator/smurf_file_emulator.py b/socs/agents/smurf_file_emulator/agent.py
similarity index 99%
rename from agents/smurf_file_emulator/smurf_file_emulator.py
rename to socs/agents/smurf_file_emulator/agent.py
index 531fbc451..ce2a6af0d 100644
--- a/agents/smurf_file_emulator/smurf_file_emulator.py
+++ b/socs/agents/smurf_file_emulator/agent.py
@@ -1,13 +1,12 @@
-import time
-import os
import argparse
-import txaio
-import numpy as np
-
-from ocs import ocs_agent, site_config
-import yaml
+import os
+import time
+import numpy as np
import so3g
+import txaio
+import yaml
+from ocs import ocs_agent, site_config
from spt3g import core
@@ -653,10 +652,11 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
parser = make_parser()
args = site_config.parse_args(agent_class='SmurfFileEmulator',
- parser=parser)
+ parser=parser,
+ args=args)
txaio.start_logging(level=os.environ.get('LOGLEVEL', 'info'))
@@ -673,3 +673,7 @@ def make_parser(parser=None):
agent.register_process('stream', file_em.stream, file_em._stop_stream)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/agents/smurf_file_emulator/status_sample.yaml b/socs/agents/smurf_file_emulator/status_sample.yaml
similarity index 100%
rename from agents/smurf_file_emulator/status_sample.yaml
rename to socs/agents/smurf_file_emulator/status_sample.yaml
diff --git a/socs/agents/smurf_stream_simulator/__init__.py b/socs/agents/smurf_stream_simulator/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/smurf_stream_simulator/smurf_stream_simulator.py b/socs/agents/smurf_stream_simulator/agent.py
similarity index 98%
rename from agents/smurf_stream_simulator/smurf_stream_simulator.py
rename to socs/agents/smurf_stream_simulator/agent.py
index 3716cf76a..8e1bff63f 100644
--- a/agents/smurf_stream_simulator/smurf_stream_simulator.py
+++ b/socs/agents/smurf_stream_simulator/agent.py
@@ -1,21 +1,20 @@
+import argparse
import os
import time
-import argparse
-
-from os import environ
-from enum import Enum
from collections import deque
+from enum import Enum
+from os import environ
-import txaio
import numpy as np
+import txaio
# For logging
txaio.use_twisted()
ON_RTD = os.environ.get('READTHEDOCS') == 'True'
if not ON_RTD:
- from ocs import ocs_agent, site_config
import so3g # noqa: F401
+ from ocs import ocs_agent, site_config
from spt3g import core
@@ -355,13 +354,14 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
# Start logging
txaio.start_logging(level=environ.get("LOGLEVEL", "info"))
parser = make_parser()
args = site_config.parse_args(agent_class='SmurfStreamSimulator',
- parser=parser)
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
sim = SmurfStreamSimulator(agent, target_host=args.target_host,
@@ -376,3 +376,7 @@ def make_parser(parser=None):
agent.register_task('stop', sim.set_stream_off)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/suprsync/__init__.py b/socs/agents/suprsync/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/suprsync/suprsync.py b/socs/agents/suprsync/agent.py
similarity index 97%
rename from agents/suprsync/suprsync.py
rename to socs/agents/suprsync/agent.py
index 0a2024d06..b62f776d4 100644
--- a/agents/suprsync/suprsync.py
+++ b/socs/agents/suprsync/agent.py
@@ -1,12 +1,12 @@
import argparse
import os
-import time
import subprocess
-import txaio
+import time
+import txaio
from ocs import ocs_agent, site_config
-from socs.db.suprsync import (SupRsyncFilesManager, SupRsyncFileHandler)
+from socs.db.suprsync import SupRsyncFileHandler, SupRsyncFilesManager
class SupRsync:
@@ -213,9 +213,11 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
parser = make_parser()
- args = site_config.parse_args('SupRsync', parser=parser)
+ args = site_config.parse_args('SupRsync',
+ parser=parser,
+ args=args)
txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
agent, runner = ocs_agent.init_site_agent(args)
@@ -223,3 +225,7 @@ def make_parser(parser=None):
agent.register_process('run', suprsync.run, suprsync._stop, startup=True)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/synacc/__init__.py b/socs/agents/synacc/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/synacc/synacc.py b/socs/agents/synacc/agent.py
similarity index 80%
rename from agents/synacc/synacc.py
rename to socs/agents/synacc/agent.py
index 9acb6f03e..e95a59f7f 100644
--- a/agents/synacc/synacc.py
+++ b/socs/agents/synacc/agent.py
@@ -1,25 +1,23 @@
#!/usr/bin/env python
-import os
-import requests
import argparse
import time
-on_rtd = os.environ.get('READTHEDOCS') == 'True'
-if not on_rtd:
- from ocs import ocs_agent, site_config
- from ocs.ocs_twisted import TimeoutLock
+import requests
+from ocs import ocs_agent, site_config
+from ocs.ocs_twisted import TimeoutLock
class SynaccessAgent:
- def __init__(self, agent, ip_address, username, password):
- """
- Initializes the class variables
+ """
+ Agent to control and monitor Synaccess Networks PDUs.
- Args:
- ip_address(str): IP Address for the agent.
- username(str): username credential to login to strip
- password(str): password credential to login to strip
- """
+ Args:
+ ip_address(str): IP address for the device.
+ username(str): Username credential to login to device.
+ password(str): Password credential to login to device.
+ """
+
+ def __init__(self, agent, ip_address, username, password):
self.agent = agent
self.log = agent.log
self.lock = TimeoutLock()
@@ -38,7 +36,13 @@ def __get_status(self):
resp = r.content.decode()[:-6:-1] # get last 5 elements, in reverse
return resp
+ @ocs_agent.param('_')
def get_status(self, session, params=None):
+ """get_status()
+
+ **Task** - Get the status of all outlets.
+
+ """
with self.lock.acquire_timeout(3, job='get_status') as acquired:
if acquired:
resp = self.__get_status()
@@ -52,7 +56,16 @@ def get_status(self, session, params=None):
else:
return False, "Could not acquire lock"
+ @ocs_agent.param('outlet', type=int)
def reboot(self, session, params=None):
+ """reboot(outlet)
+
+ **Task** - Reboot a given outlet.
+
+ Parameters:
+ outlet (int): The outlet that we are changing the state of.
+
+ """
with self.lock.acquire_timeout(3, job='reboot') as acquired:
if acquired:
req = "http://" + self.user + ":" + \
@@ -63,13 +76,17 @@ def reboot(self, session, params=None):
else:
return False, "Could not acquire lock"
+ @ocs_agent.param('outlet', type=int)
+ @ocs_agent.param('on', type=bool)
def set_outlet(self, session, params=None):
- """
- Sets a particular outlet to on/off
+ """set_outlet(outlet, on)
+
+ **Task** - Set a particular outlet on/off.
+
+ Parameters:
+ outlet (int): The outlet that we are changing the state of.
+ on (bool): The new state. True for on, False for off.
- Args:
- outlet (int): the outlet that we are changing the state of
- on (bool): the new state
"""
with self.lock.acquire_timeout(3, job='set_outlet') as acquired:
if acquired:
@@ -86,13 +103,14 @@ def set_outlet(self, session, params=None):
else:
return False, "Could not acquire lock"
+ @ocs_agent.param('on', type=bool)
def set_all(self, session, params=None):
- """
+ """set_all(on)
- Sets all outlets to on/off
+ **Task** - Set all outlets on/off.
- Args:
- on (bool): the new state
+ Parameters:
+ on (bool): The new state. True for on, False for off.
"""
with self.lock.acquire_timeout(3, job='set_all') as acquired:
@@ -107,10 +125,11 @@ def set_all(self, session, params=None):
else:
return False, "Could not acquire lock"
+ @ocs_agent.param('_')
def status_acq(self, session, params=None):
"""status_acq()
- **Process** - Method to start data acquisition process.
+ **Process** - Start data acquisition.
Notes:
The most recent data collected is stored in session.data in the
@@ -195,15 +214,17 @@ def make_parser(parser=None):
# Add options specific to this agent.
pgroup = parser.add_argument_group('Agent Options')
- pgroup.add_argument('--ip-address')
- pgroup.add_argument('--username')
- pgroup.add_argument('--password')
+ pgroup.add_argument('--ip-address', help='IP address for the device.')
+ pgroup.add_argument('--username', help='Username credential to login to device.')
+ pgroup.add_argument('--password', help='Password credential to login to device.')
return parser
-if __name__ == '__main__':
+def main(args=None):
parser = make_parser()
- args = site_config.parse_args(agent_class='SynaccessAgent', parser=parser)
+ args = site_config.parse_args(agent_class='SynaccessAgent',
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
@@ -219,3 +240,7 @@ def make_parser(parser=None):
agent.register_task('set_all', p.set_all)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/tektronix3021c/__init__.py b/socs/agents/tektronix3021c/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/socs/agents/tektronix3021c/agent.py b/socs/agents/tektronix3021c/agent.py
new file mode 100644
index 000000000..cfd31e061
--- /dev/null
+++ b/socs/agents/tektronix3021c/agent.py
@@ -0,0 +1,183 @@
+"""Michael Randall
+mrandall@ucsd.edu"""
+
+import argparse
+import socket
+import time
+
+from ocs import ocs_agent, site_config
+from ocs.ocs_twisted import TimeoutLock
+
+from socs.agents.tektronix3021c.drivers import TektronixInterface
+
+
+class TektronixAWGAgent:
+ """Tektronix3021c Function Generator Agent.
+
+ Args:
+ ip_address (string): The IP address of the gpib to ethernet
+ controller connected to the function generator.
+ gpib_slot (int): The gpib address currently set
+ on the function generator.
+
+ """
+
+ def __init__(self, agent, ip_address, gpib_slot):
+ self.agent = agent
+ self.log = agent.log
+ self.lock = TimeoutLock()
+
+ self.job = None
+
+ self.ip_address = ip_address
+ self.gpib_slot = gpib_slot
+ self.monitor = False
+
+ self.awg = None
+ # Registers data feeds
+ agg_params = {
+ 'frame_length': 60,
+ }
+ self.agent.register_feed('awg',
+ record=True,
+ agg_params=agg_params)
+
+ @ocs_agent.param('_')
+ def init(self, session, params=None):
+ """init()
+
+ **Task** - Initialize connection to Tektronix AWG.
+
+ """
+
+ with self.lock.acquire_timeout(0) as acquired:
+ if not acquired:
+ return False, "Could not acquire lock"
+
+ try:
+ self.awg = TektronixInterface(self.ip_address, self.gpib_slot)
+ self.idn = self.awg.identify()
+
+ except socket.timeout as e:
+ self.log.error("""Tektronix AWG
+ timed out during connect -> {}""".format(e))
+ return False, "Timeout"
+
+ self.log.info("Connected to AWG: {}".format(self.idn))
+
+ return True, 'Initialized AWG.'
+
+ @ocs_agent.param('frequency', type=float, check=lambda x: 0 <= x <= 25e6)
+ def set_frequency(self, session, params=None):
+ """set_frequency(frequency)
+
+ **Task** - Set frequency of the function generator.
+
+ Parameters:
+ frequency (float): Frequency to set in Hz. Must be between 0 and
+ 25,000,000.
+ """
+
+ with self.lock.acquire_timeout(1) as acquired:
+ if acquired:
+ freq = params['frequency']
+
+ self.awg.set_freq(freq)
+
+ data = {'timestamp': time.time(),
+ 'block_name': "AWG_frequency_cmd",
+ 'data': {'AWG_frequency_cmd': freq}
+ }
+ self.agent.publish_to_feed('awg', data)
+ else:
+ return False, "Could not acquire lock"
+
+ return True, 'Set frequency {} Hz'.format(freq)
+
+ @ocs_agent.param('amplitude', type=float, check=lambda x: 0 <= x <= 10)
+ def set_amplitude(self, session, params=None):
+ """set_amplitude(amplitude)
+
+ **Task** - Set peak to peak voltage of the function generator.
+
+ Parameters:
+ amplitude (float): Peak to Peak voltage to set. Must be between 0
+ and 10.
+
+ """
+ with self.lock.acquire_timeout(1) as acquired:
+ if acquired:
+ amp = params['amplitude']
+
+ self.awg.set_amp(amp)
+
+ data = {'timestamp': time.time(),
+ 'block_name': "AWG_amplitude_cmd",
+ 'data': {'AWG_amplitude_cmd': amp}
+ }
+ self.agent.publish_to_feed('awg', data)
+ else:
+ return False, "Could not acquire lock"
+
+ return True, 'Set amplitude to {} Vpp'.format(params)
+
+ @ocs_agent.param('state', type=bool)
+ def set_output(self, session, params=None):
+ """set_output(state)
+
+ **Task** - Turn function generator output on or off.
+
+ Parameters:
+ state (bool): True for on, False for off.
+
+ """
+ with self.lock.acquire_timeout(1) as acquired:
+ if acquired:
+ state = params.get("state")
+
+ self.awg.set_output(state)
+
+ data = {'timestamp': time.time(),
+ 'block_name': "AWG_output_cmd",
+ 'data': {'AWG_output_cmd': int(state)}
+ }
+ self.agent.publish_to_feed('awg', data)
+
+ else:
+ return False, "Could not acquire lock"
+
+ return True, 'Set Output to {}.'.format(params)
+
+
+def make_parser(parser=None):
+ if parser is None:
+ parser = argparse.ArgumentParser()
+
+ pgroup = parser.add_argument_group('Agent Options')
+ pgroup.add_argument('--ip-address', type=str,
+ help="IP address of Tektronix device.")
+ pgroup.add_argument('--gpib-slot', type=int,
+ help="GPIB slot of Tektronix device.")
+ return parser
+
+
+def main(args=None):
+ parser = make_parser()
+ args = site_config.parse_args(agent_class="Tektronix AWG",
+ parser=parser,
+ args=args)
+
+ agent, runner = ocs_agent.init_site_agent(args)
+
+ p = TektronixAWGAgent(agent, args.ip_address, args.gpib_slot)
+
+ agent.register_task('init', p.init, startup=True)
+ agent.register_task('set_frequency', p.set_frequency)
+ agent.register_task('set_amplitude', p.set_amplitude)
+ agent.register_task('set_output', p.set_output)
+
+ runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agent/tektronix3021c_driver.py b/socs/agents/tektronix3021c/drivers.py
similarity index 85%
rename from socs/agent/tektronix3021c_driver.py
rename to socs/agents/tektronix3021c/drivers.py
index 2ad971099..2f24fd003 100644
--- a/socs/agent/tektronix3021c_driver.py
+++ b/socs/agents/tektronix3021c/drivers.py
@@ -1,7 +1,7 @@
"""Michael Randall
mrandall@ucsd.edu"""
-from socs.agent.prologix_interface import PrologixInterface
+from socs.common.prologix_interface import PrologixInterface
class TektronixInterface(PrologixInterface):
diff --git a/socs/agents/thorlabs_mc2000b/agent.py b/socs/agents/thorlabs_mc2000b/agent.py
new file mode 100644
index 000000000..a7758d80e
--- /dev/null
+++ b/socs/agents/thorlabs_mc2000b/agent.py
@@ -0,0 +1,339 @@
+import argparse
+import os
+import time
+
+import txaio
+from ocs import ocs_agent, site_config
+from ocs.ocs_twisted import TimeoutLock
+
+ON_RTD = os.environ.get("READTHEDOCS") == "True"
+if not ON_RTD:
+ from MC2000B_COMMAND_LIB import * # noqa: F403
+ os.add_dll_directory("C:\\Program Files (x86)\\Thorlabs\\MC2000B\\Sample\\Thorlabs_MC2000B_PythonSDK")
+
+# For logging
+txaio.use_twisted()
+LOG = txaio.make_logger()
+
+bladetype_keys = {'MC1F2': 0,
+ 'MC1F10': 1,
+ 'MC1F15': 2,
+ 'MC1F30': 3,
+ 'MC1F60': 4,
+ 'MC1F100': 5,
+ 'MC1F10HP': 6,
+ 'MC1F2P10': 7,
+ 'MC1F6P10': 8,
+ 'MC1F10A': 9,
+ 'MC2F330': 10,
+ 'MC2F47': 11,
+ 'MC2F57B': 12,
+ 'MC2F860': 13,
+ 'MC2F5360': 14}
+
+outputmode_keys = {'target': 0,
+ 'actual': 1}
+
+reference_mode_keys = {'internal': 0,
+ 'external': 1}
+
+reference_high_prec_mode = {'internalouter': 0,
+ 'internalinner': 1,
+ 'externalouter': 2,
+ 'externalinner': 3}
+
+
+class ThorlabsMC2000BAgent:
+ """Agent to connect to the MC2000B Thorlabs chopper controller
+ device.
+
+ Parameters
+ ----------
+ comport : str
+ COM port to connect to device. Ex: "COM3"
+ nbaud : int
+ baud rate of the device (115200)
+ timeout : int
+ the timeout time for the device; default is set to 3s
+ """
+
+ def __init__(self, agent, comport, nbaud=115200, timeout=3):
+ self.agent = agent
+ self.log = agent.log
+ self.lock = TimeoutLock()
+ self.comport = comport
+ self.nbaud = nbaud
+ self.timeout = timeout
+
+ self.hdl = None
+
+ self.initialized = False
+ self.take_data = False
+
+ agg_params = {'frame_length': 60,
+ 'exclude_influx': False}
+
+ # register the feeds
+ self.agent.register_feed('chopper_freqs',
+ record=True,
+ agg_params=agg_params,
+ buffer_time=1
+ )
+
+ @ocs_agent.param('auto_acquire', default=False, type=bool)
+ def init_chopper(self, session, params):
+ """init_chopper(auto_acquire=False)
+
+ **Task** - Perform first time setup of MC2000B chopper controller
+ communication.
+
+ Parameters
+ ----------
+ auto_acquire : bool, optional
+ Default is false. Starts data acquisition after
+ initilialization if True.
+ """
+ if self.initialized:
+ return True, "Already initialized"
+
+ with self.lock.acquire_timeout(job='init_chopper') as acquired:
+ if not acquired:
+ self.log.warn(f"Could not start Task because "
+ f"{self.lock.job} is already running")
+ return False, "Could not acquire lock"
+
+ session.set_status('running')
+
+ # Establish connection to the chopper controller
+ self.hdl = MC2000BOpen(self.comport, self.nbaud, self.timeout)
+
+ if (self.hdl == 0):
+ self.initialized = True
+ self.log.info("Chopper connected")
+ else:
+ self.initialized = False
+ return False, "Chopper not connected"
+
+ # Start data acquisition if requested
+ if params['auto_acquire']:
+ resp = self.agent.start('acq', params={})
+ self.log.info(f'Response from acq.start(): {resp[1]}')
+
+ return True, "Chopper controller agent initialized"
+
+ @ocs_agent.param('freq', type=float)
+ def set_frequency(self, session, params):
+ """set_frequency(freq=None)
+
+ **Task** - Set the frequency of the chopper.
+
+ Parameters
+ ----------
+ freq : float
+ Frequency of chopper blades.
+ """
+ with self.lock.acquire_timeout(timeout=3, job='set_frequency') as acquired:
+ if not acquired:
+ self.log.warn(f"Could not start Task because "
+ f"{self.lock.job} is already running")
+ return False, "Could not acquire lock"
+
+ session.set_status('running')
+
+ MC2000BSetFrequency(self.hdl, params['freq'])
+
+ return True, "Chopper frequency set to {} Hz".format(params['freq'])
+
+ @ocs_agent.param('bladetype', type=str, default='MC1F2')
+ def set_bladetype(self, session, params):
+ """set_bladetype(bladetype=None)
+
+ **Task** - Set the bladetype of the chopper. Selecting a bladetype
+ influences the range of frequencies permitted for the chopper.
+
+ Parameters
+ ----------
+ bladetype : str
+ Name of bladetype assigned to chopper controller setup.
+ Default set to 'MC1F2' to reach the range of 4-8Hz.
+ """
+ with self.lock.acquire_timeout(timeout=3, job='set_bladetype') as acquired:
+ if not acquired:
+ self.log.warn(f"Could not start Task because "
+ f"{self.lock.job} is already running")
+ return False, "Could not acquire lock"
+
+ session.set_status('running')
+
+ bladetype = bladetype_keys[params['bladetype']]
+ MC2000BSetBladeType(self.hdl, bladetype)
+
+ return True, "Chopper bladetype set to {}".format(params['bladetype'])
+
+ @ocs_agent.param('output_mode', type=str, choices=['actual', 'target'], default='target')
+ def set_reference_output_mode(self, session, params):
+ """set_reference_output_mode(output_mode=None)
+
+ **Task** - Set the output reference mode to determine the setting of
+ frequency output/input.
+
+ Parameters
+ ----------
+ output_mode : str
+ Output reference mode of chopper frequency. Possible modes
+ are 'target' or 'actual'. Default set to 'target'.
+
+ """
+ with self.lock.acquire_timeout(timeout=3, job='set_reference_output_mode') as acquired:
+ if not acquired:
+ self.log.warn(f"Could not start Task because "
+ f"{self.lock.job} is already running")
+ return False, "Could not acquire lock"
+
+ session.set_status('running')
+
+ mode = params['output_mode']
+ mode_int = outputmode_keys[mode]
+ MC2000BSetReferenceOutput(self.hdl, mode_int)
+
+ return True, "Chopper output mode set to {}".format(params['output_mode'])
+
+ @ocs_agent.param('reference', type=str, default='internalinner')
+ def set_blade_reference(self, session, params):
+ """set_blade_reference(reference=None)
+
+ **Task** - Set the reference mode for the blade. This is the point on
+ the chopper blades for the controller to measure and set frequency.
+
+ Parameters
+ ----------
+ reference : str
+ Reference mode of the blade. Default set to 'internalinner'.
+ Can be "internal", "external", "internalinner", "internalouter",
+ "externalinner", "externalouter"
+ """
+ with self.lock.acquire_timeout(timeout=3, job='set_blade_reference') as acquired:
+ if not acquired:
+ self.log.warn(f"Could not start Task because "
+ f"{self.lock.job} is already running")
+ return False, "Could not acquire lock"
+
+ session.set_status('running')
+
+ reference = params['reference']
+
+ if reference in ('external', 'internal'):
+ ref = reference_mode_keys[reference]
+ else:
+ ref = reference_high_prec_mode[reference]
+
+ MC2000BSetReference(self.hdl, ref)
+
+ return True, "Chopper blade reference set to {}".format(params['reference'])
+
+ def acq(self, session, params):
+ """acq()
+
+ **Process** - Acquire data from the MC2000B chopper device.
+
+ """
+ with self.lock.acquire_timeout(timeout=0, job='acq') as acquired:
+ if not acquired:
+ self.log.warn(f"Could not start acq because {self.lock.job} "
+ "is already running")
+ return False, "Could not acquire lock."
+
+ session.set_status('running')
+
+ last_release = time.time()
+
+ self.take_data = True
+
+ self.log.info("Starting data acquisition for {}".format(self.agent.agent_address))
+
+ while self.take_data:
+ # Relinquish sampling lock occasionally
+ if time.time() - last_release > 1.:
+ last_release = time.time()
+ if not self.lock.release_and_acquire(timeout=10):
+ self.log.warn(f"Failed to re-acquire sampling lock, "
+ f"currently held by {self.lock.job}.")
+ continue
+
+ freq_in = [0]
+ MC2000BGetFrequency(self.hdl, freq_in)
+ input_freq = freq_in[0]
+
+ freq_out = [0]
+ MC2000BGetReferenceOutFrequency(self.hdl, freq_out)
+ output_freq = freq_out[0]
+
+ # Publish data
+ chopper_freqs = {'block_name': 'chopper_freqs',
+ 'timestamp': time.time(),
+ 'data': {'input_freqs': input_freq,
+ 'output_freqs': output_freq}
+ }
+
+ self.agent.publish_to_feed('chopper_freqs', chopper_freqs)
+
+ def _stop_acq(self, session, params=None):
+ """
+ Stops acq process.
+ """
+ if self.take_data:
+ session.set_status('stopping')
+ self.take_data = False
+ return True, 'requested to stop taking data.'
+ else:
+ return False, 'acq is not currently running.'
+
+
+def make_parser(parser=None):
+ """Build argument parser for the Agent
+ """
+
+ if parser is None:
+ parser = argparse.ArgumentParser()
+
+ pgroup = parser.add_argument_group('Agent Options')
+ pgroup.add_argument('--com-port')
+ pgroup.add_argument('--mode', choices=['init', 'acq'])
+
+ return parser
+
+
+def main(args=None):
+ # For logging
+ txaio.use_twisted()
+ txaio.make_logger()
+
+ # Start logging
+ txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
+
+ parser = make_parser()
+ args = site_config.parse_args(agent_class='ThorlabsMC2000BAgent',
+ parser=parser,
+ args=args)
+
+ init_params = False
+ if args.mode == 'init':
+ init_params = {'auto_acquire': False}
+ elif args.mode == 'acq':
+ init_params = {'auto_acquire': True}
+
+ agent, runner = ocs_agent.init_site_agent(args)
+ controller_agent = ThorlabsMC2000BAgent(agent, args.com_port)
+
+ agent.register_task('init_chopper', controller_agent.init_chopper, startup=init_params)
+ agent.register_task('set_frequency', controller_agent.set_frequency)
+ agent.register_task('set_bladetype', controller_agent.set_bladetype)
+ agent.register_task('set_reference_output_mode', controller_agent.set_reference_output_mode)
+ agent.register_task('set_blade_reference', controller_agent.set_blade_reference)
+ agent.register_process('acq', controller_agent.acq, controller_agent._stop_acq, startup=True)
+
+ runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/vantagepro2/__init__.py b/socs/agents/vantagepro2/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/vantagePro2_agent/vantage_pro2_agent.py b/socs/agents/vantagepro2/agent.py
similarity index 80%
rename from agents/vantagePro2_agent/vantage_pro2_agent.py
rename to socs/agents/vantagepro2/agent.py
index 605a56372..e41737544 100644
--- a/agents/vantagePro2_agent/vantage_pro2_agent.py
+++ b/socs/agents/vantagepro2/agent.py
@@ -1,17 +1,12 @@
-
-import time
-import os
import argparse
-
+import os
+import time
from typing import Optional
-from socs.agent.vantage_pro2.vantage_pro2 import VantagePro2
-# from LS240_agent
-on_rtd = os.environ.get('READTHEDOCS') == 'True'
-if not on_rtd:
- from ocs import ocs_agent, site_config
- from ocs.ocs_twisted import Pacemaker
- from ocs.ocs_twisted import TimeoutLock
+from ocs import ocs_agent, site_config
+from ocs.ocs_twisted import Pacemaker, TimeoutLock
+
+from socs.agents.vantagepro2.drivers import VantagePro2
class VantagePro2Agent:
@@ -53,21 +48,17 @@ def __init__(self, agent, port="/dev/ttyUSB0", sample_freq=0.5):
record=True,
agg_params=agg_params)
- # Task functions.
- def init_VantagePro2_task(self, session, params=None):
- """
- Perform first time setup of the Weather Monitor Module.
-
- Args:
- params (dict): Parameters dictionary for passing parameters to
- task.
+ @ocs_agent.param('auto_acquire', default=False, type=bool)
+ def init(self, session, params=None):
+ """init(auto_acquire=False)
- """
- if params is None:
- params = {}
+ **Task** - Perform first time setup of the Weather Monitor Module.
- auto_acquire = params.get('auto_acquire', False)
+ Parameters:
+ auto_acquire (bool): Automatically start acq process after
+ initialization if True. Defaults to False.
+ """
if self.initialized:
return True, "Already Initialized Module"
@@ -86,27 +77,26 @@ def init_VantagePro2_task(self, session, params=None):
self.initialized = True
# Start data acquisition if requested
- if auto_acquire:
+ if params['auto_acquire']:
self.agent.start('acq')
time.sleep(2)
return True, 'Vantage Pro2 module initialized.'
- def start_acq(self, session, params=None):
- """
- Method to start data acquisition process.
+ @ocs_agent.param('sample_freq', default=0.5, type=float)
+ def acq(self, session, params=None):
+ """acq(sample_freq=0.5)
- Args:
- sample_freq (double):
- Frequency at which weather data is sampled.
- Defaults to 0.5 Hz.
+ **Process** - Start data acquisition.
- """
- if params is None:
- params = {}
+ Parameters:
+ sample_freq (float):
+ Frequency at which weather data is sampled. Defaults to 0.5
+ Hz.
- sample_freq = params.get('sample_freq')
+ """
+ sample_freq = params['sample_freq']
# If loops is None, use value passed to Agent init
if sample_freq is None:
sample_freq = self.sample_freq
@@ -143,7 +133,7 @@ def start_acq(self, session, params=None):
return True, 'Acquisition exited cleanly.'
- def stop_acq(self, session, params=None):
+ def _stop_acq(self, session, params=None):
"""
Stops acq process.
"""
@@ -167,15 +157,16 @@ def make_parser(parser=None):
help="Serial number of VantagePro2 Monitor")
pgroup.add_argument('--mode', type=str, choices=['idle', 'init', 'acq'],
help="Starting action for the agent.")
- pgroup.add_argument('--sample_freq', type=float,
+ pgroup.add_argument('--sample-freq', type=float,
help="Sample frequency for weather data collection")
return parser
-def main():
+def main(args=None):
parser = make_parser()
- args = site_config.parse_args(
- agent_class='VantagePro2Agent', parser=parser)
+ args = site_config.parse_args(agent_class='VantagePro2Agent',
+ parser=parser,
+ args=args)
init_params = False
if args.mode == 'init':
init_params = {'auto_acquire': False}
@@ -207,9 +198,9 @@ def main():
agent, runner = ocs_agent.init_site_agent(args)
vPro2 = VantagePro2Agent(agent, device_port, args.sample_freq)
- agent.register_task('init', vPro2.init_VantagePro2_task,
+ agent.register_task('init', vPro2.init,
startup=init_params)
- agent.register_process('acq', vPro2.start_acq, vPro2.stop_acq,
+ agent.register_process('acq', vPro2.acq, vPro2._stop_acq,
blocking=True)
runner.run(agent, auto_reconnect=True)
diff --git a/socs/agent/vantage_pro2/vantage_pro2.py b/socs/agents/vantagepro2/drivers.py
similarity index 100%
rename from socs/agent/vantage_pro2/vantage_pro2.py
rename to socs/agents/vantagepro2/drivers.py
index de81c40d4..235a13ff4 100644
--- a/socs/agent/vantage_pro2/vantage_pro2.py
+++ b/socs/agents/vantagepro2/drivers.py
@@ -1,8 +1,8 @@
-from serial import Serial
-import time
-import struct
import array as arr
+import struct
+import time
+from serial import Serial
# some commands require a CRC code (cyclic redundancy check) -
# these require the provided CRC table
diff --git a/socs/agents/wiregrid_actuator/__init__.py b/socs/agents/wiregrid_actuator/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/wiregrid_actuator/wiregrid_actuator.py b/socs/agents/wiregrid_actuator/agent.py
similarity index 98%
rename from agents/wiregrid_actuator/wiregrid_actuator.py
rename to socs/agents/wiregrid_actuator/agent.py
index eb4190f68..160b2ce17 100644
--- a/agents/wiregrid_actuator/wiregrid_actuator.py
+++ b/socs/agents/wiregrid_actuator/agent.py
@@ -1,21 +1,12 @@
-import os
import argparse
import time
-from ocs import ocs_agent
-from ocs import site_config
+from ocs import ocs_agent, site_config
from ocs.ocs_twisted import TimeoutLock
-# add PATH to ./src directory
-# this_dir = os.path.dirname(os.path.abspath(__file__))
-# sys.path.append(os.path.join(this_dir, 'src'))
-
-on_rtd = os.environ.get('READTHEDOCS') == 'True'
-if not on_rtd:
- # import classes / configs
- from src.Actuator import Actuator
- import limitswitch_config
- import stopper_config
+import socs.agents.wiregrid_actuator.limitswitch_config as limitswitch_config
+import socs.agents.wiregrid_actuator.stopper_config as stopper_config
+from socs.agents.wiregrid_actuator.drivers.Actuator import Actuator
class WiregridActuatorAgent:
@@ -855,11 +846,11 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
-
+def main(args=None):
parser = make_parser()
- args = site_config.parse_args(
- agent_class='WiregridActuatorAgent', parser=parser)
+ args = site_config.parse_args(agent_class='WiregridActuatorAgent',
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
@@ -884,3 +875,7 @@ def make_parser(parser=None):
actuator_agent.stop_acq, startup=True)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/agents/wiregrid_actuator/src/Actuator.py b/socs/agents/wiregrid_actuator/drivers/Actuator.py
similarity index 99%
rename from agents/wiregrid_actuator/src/Actuator.py
rename to socs/agents/wiregrid_actuator/drivers/Actuator.py
index 08708f16a..12265c048 100644
--- a/agents/wiregrid_actuator/src/Actuator.py
+++ b/socs/agents/wiregrid_actuator/drivers/Actuator.py
@@ -1,11 +1,13 @@
# Built-in python modules
-import time
import os
+import time
+
# Specific module for actuator controller
on_rtd = os.environ.get('READTHEDOCS') == 'True'
if not on_rtd:
import gclib
-from .DigitalIO import DigitalIO
+
+from socs.agents.wiregrid_actuator.drivers.DigitalIO import DigitalIO
class Actuator:
diff --git a/agents/wiregrid_actuator/src/DigitalIO.py b/socs/agents/wiregrid_actuator/drivers/DigitalIO.py
similarity index 100%
rename from agents/wiregrid_actuator/src/DigitalIO.py
rename to socs/agents/wiregrid_actuator/drivers/DigitalIO.py
diff --git a/socs/agents/wiregrid_actuator/drivers/__init__.py b/socs/agents/wiregrid_actuator/drivers/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/wiregrid_actuator/limitswitch_config.py b/socs/agents/wiregrid_actuator/limitswitch_config.py
similarity index 100%
rename from agents/wiregrid_actuator/limitswitch_config.py
rename to socs/agents/wiregrid_actuator/limitswitch_config.py
diff --git a/agents/wiregrid_actuator/stopper_config.py b/socs/agents/wiregrid_actuator/stopper_config.py
similarity index 100%
rename from agents/wiregrid_actuator/stopper_config.py
rename to socs/agents/wiregrid_actuator/stopper_config.py
diff --git a/socs/agents/wiregrid_encoder/__init__.py b/socs/agents/wiregrid_encoder/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/wiregrid_encoder/wiregrid_encoder.py b/socs/agents/wiregrid_encoder/agent.py
similarity index 98%
rename from agents/wiregrid_encoder/wiregrid_encoder.py
rename to socs/agents/wiregrid_encoder/agent.py
index 9f3e312a0..3c0ff3ee9 100644
--- a/agents/wiregrid_encoder/wiregrid_encoder.py
+++ b/socs/agents/wiregrid_encoder/agent.py
@@ -1,16 +1,12 @@
-import os
-import time
-import numpy as np
import argparse
+import time
import traceback
+import numpy as np
from ocs import ocs_agent, site_config
from ocs.ocs_twisted import TimeoutLock
-# Required by OCS
-ON_RTD = os.environ.get('READTHEDOCS') == 'True'
-if not ON_RTD:
- from signal_parser import EncoderParser
+from socs.agents.wiregrid_encoder.drivers import EncoderParser
NUM_ENCODER_TO_PUBLISH = 1000
SEC_ENCODER_TO_PUBLISH = 1
@@ -350,11 +346,11 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
-
+def main(args=None):
parser = make_parser()
- args = site_config.parse_args(
- agent_class='WiregridEncoderAgent', parser=parser)
+ args = site_config.parse_args(agent_class='WiregridEncoderAgent',
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
@@ -366,3 +362,7 @@ def make_parser(parser=None):
startup=True)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/agents/wiregrid_encoder/signal_parser.py b/socs/agents/wiregrid_encoder/drivers.py
similarity index 99%
rename from agents/wiregrid_encoder/signal_parser.py
rename to socs/agents/wiregrid_encoder/drivers.py
index d125504f0..cdeb2e9d2 100644
--- a/agents/wiregrid_encoder/signal_parser.py
+++ b/socs/agents/wiregrid_encoder/drivers.py
@@ -1,11 +1,13 @@
+import calendar
+import select
import socket
import struct
-import select
import time
-import calendar
-import numpy as np
from collections import deque
+
+import numpy as np
import txaio
+
txaio.use_twisted()
# should be consistent with the software on beaglebone
diff --git a/socs/agents/wiregrid_kikusui/__init__.py b/socs/agents/wiregrid_kikusui/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/wiregrid_kikusui/kikusui_agent.py b/socs/agents/wiregrid_kikusui/agent.py
similarity index 98%
rename from agents/wiregrid_kikusui/kikusui_agent.py
rename to socs/agents/wiregrid_kikusui/agent.py
index ff32a473f..1008a19a4 100644
--- a/agents/wiregrid_kikusui/kikusui_agent.py
+++ b/socs/agents/wiregrid_kikusui/agent.py
@@ -1,17 +1,13 @@
-import os
import argparse
import time
-import numpy as np
+import numpy as np
from ocs import ocs_agent, site_config
-from ocs.ocs_twisted import TimeoutLock
from ocs.ocs_client import OCSClient
+from ocs.ocs_twisted import TimeoutLock
-ON_RTD = os.environ.get('READTHEDOCS') == 'True'
-if not ON_RTD:
- # import classes
- from socs.agent import pmx
- from src.common import openlog, writelog
+from socs.agents.wiregrid_kikusui.drivers.common import openlog, writelog
+from socs.common import pmx
class WiregridKikusuiAgent:
@@ -622,10 +618,11 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
parser = make_parser()
- args = site_config.parse_args(
- agent_class='WiregridKikusuiAgent', parser=parser)
+ args = site_config.parse_args(agent_class='WiregridKikusuiAgent',
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
kikusui_agent = WiregridKikusuiAgent(agent, kikusui_ip=args.kikusui_ip,
@@ -645,3 +642,7 @@ def make_parser(parser=None):
kikusui_agent.stepwise_rotation)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/agents/wiregrid_kikusui/drivers/__init__.py b/socs/agents/wiregrid_kikusui/drivers/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/wiregrid_kikusui/src/common.py b/socs/agents/wiregrid_kikusui/drivers/common.py
similarity index 100%
rename from agents/wiregrid_kikusui/src/common.py
rename to socs/agents/wiregrid_kikusui/drivers/common.py
diff --git a/socs/agents/xy_stage/__init__.py b/socs/agents/xy_stage/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/agents/xy_stage/xy_latrt_agent.py b/socs/agents/xy_stage/agent.py
similarity index 75%
rename from agents/xy_stage/xy_latrt_agent.py
rename to socs/agents/xy_stage/agent.py
index 299dac1b7..b5e094b39 100644
--- a/agents/xy_stage/xy_latrt_agent.py
+++ b/socs/agents/xy_stage/agent.py
@@ -1,27 +1,26 @@
-import os
import argparse
+import os
import time
-import txaio
+import txaio
+from ocs import ocs_agent, site_config
+from ocs.ocs_twisted import Pacemaker, TimeoutLock
ON_RTD = os.environ.get('READTHEDOCS') == 'True'
if not ON_RTD:
- from ocs import ocs_agent, site_config
- from ocs.ocs_twisted import TimeoutLock, Pacemaker
-
# yes I shouldn't have named that module agent
from xy_agent.xy_connect import XY_Stage
class LATRtXYStageAgent:
"""
- Agent for connecting to the LATRt XY Stages
+ Agent for connecting to the LATRt XY Stages.
Args:
- ip_addr: IP address where RPi server is running
- port: Port the RPi Server is listening on
- mode: 'acq': Start data acquisition on initialize
- samp: default sampling frequency in Hz
+ ip_addr: IP address where RPi server is running.
+ port: Port the RPi Server is listening on.
+ mode: 'acq': Start data acquisition on initialize.
+ samp: Default sampling frequency in Hz.
"""
def __init__(self, agent, ip_addr, port, mode=None, samp=2):
@@ -54,17 +53,13 @@ def __init__(self, agent, ip_addr, port, mode=None, samp=2):
agg_params=agg_params,
buffer_time=0)
- def init_xy_stage_task(self, session, params=None):
- """init_xy_stage_task(params=None)
- Perform first time setup for communivation with XY stages.
+ @ocs_agent.param('_')
+ def init_xy_stage(self, session, params=None):
+ """init_xy_stage()
- Args:
- params (dict): Parameters dictionary for passing parameters to
- task.
- """
+ **Task** - Perform first time setup for communication with XY stages.
- if params is None:
- params = {}
+ """
self.log.debug("Trying to acquire lock")
with self.lock.acquire_timeout(timeout=0, job='init') as acquired:
@@ -86,10 +81,17 @@ def init_xy_stage_task(self, session, params=None):
self.agent.start('acq')
return True, 'XY Stages Initialized.'
+ @ocs_agent.param('distance', type=float)
+ @ocs_agent.param('velocity', type=float, check=lambda x: 0 <= x < 1.2)
def move_x_cm(self, session, params):
- """
- params:
- dict: { 'distance': float, 'velocity':float < 1.2}
+ """move_x_cm(distance, velocity)
+
+ **Task** - Move the X axis.
+
+ Parameters:
+ distance (float): Distance to move in cm.
+ velocity (float): Velocity to move at. Must be less than 1.2.
+
"""
with self.lock.acquire_timeout(timeout=3, job='move_x_cm') as acquired:
@@ -112,10 +114,17 @@ def move_x_cm(self, session, params):
break
return True, "X Move Complete"
+ @ocs_agent.param('distance', type=float)
+ @ocs_agent.param('velocity', type=float, check=lambda x: 0 <= x < 1.2)
def move_y_cm(self, session, params):
- """
- params:
- dict: { 'distance': float, 'velocity':float < 1.2}
+ """move_y_cm(distance, velocity)
+
+ **Task** - Move the Y axis.
+
+ Parameters:
+ distance (float): Distance to move in cm.
+ velocity (float): Velocity to move at. Must be less than 1.2.
+
"""
with self.lock.acquire_timeout(timeout=3, job='move_y_cm') as acquired:
@@ -137,10 +146,15 @@ def move_y_cm(self, session, params):
break
return True, "Y Move Complete"
+ @ocs_agent.param('position', type=tuple)
def set_position(self, session, params):
- """
- params:
- dict: {'position': (float, float)}
+ """set_position(position)
+
+ **Task** - Set position of the XY stage.
+
+ Parameters:
+ position (tuple): (X, Y) position.
+
"""
with self.lock.acquire_timeout(timeout=3, job='set_position') as acquired:
if not acquired:
@@ -150,18 +164,26 @@ def set_position(self, session, params):
self.xy_stage.position = params['position']
return True, "Position Updated"
- def start_acq(self, session, params=None):
- """
- params:
- dict: {'sampling_frequency': float, sampling rate in Hz}
+ @ocs_agent.param('sampling_frequency', type=float)
+ def acq(self, session, params=None):
+ """acq(sampling_frequency=2)
+
+ **Process** - Run data acquisition.
+
+ Parameters:
+ sampling_frequency (float): Sampling rate to acquire data at.
+ Defaults to value set in site config file (or 2 Hz if
+ unspecified.)
- The most recent positions are stored in the session.data object in the
- format::
+ Notes:
+ The most recent positions are stored in the session.data object in the
+ format::
- {"positions":
- {"x": x position in cm,
- "y": y position in cm}
- }
+ >>> response.session['data']
+ {"positions":
+ {"x": x position in cm,
+ "y": y position in cm}
+ }
"""
if params is None:
@@ -202,7 +224,7 @@ def start_acq(self, session, params=None):
session.data.update(data['data'])
return True, 'Acquisition exited cleanly.'
- def stop_acq(self, session, params=None):
+ def _stop_acq(self, session, params=None):
"""
params:
dict: {}
@@ -230,10 +252,10 @@ def make_parser(parser=None):
return parser
-if __name__ == '__main__':
+def main(args=None):
# For logging
txaio.use_twisted()
- LOG = txaio.make_logger()
+ txaio.make_logger()
# Start logging
txaio.start_logging(level=os.environ.get("LOGLEVEL", "info"))
@@ -241,17 +263,23 @@ def make_parser(parser=None):
parser = make_parser()
# Interpret options in the context of site_config.
- args = site_config.parse_args(agent_class='LATRtXYStageAgent', parser=parser)
+ args = site_config.parse_args(agent_class='LATRtXYStageAgent',
+ parser=parser,
+ args=args)
agent, runner = ocs_agent.init_site_agent(args)
xy_agent = LATRtXYStageAgent(agent, args.ip_address, args.port, args.mode, args.sampling_frequency)
- agent.register_task('init_xy_stage', xy_agent.init_xy_stage_task)
+ agent.register_task('init_xy_stage', xy_agent.init_xy_stage)
agent.register_task('move_x_cm', xy_agent.move_x_cm)
agent.register_task('move_y_cm', xy_agent.move_y_cm)
agent.register_task('set_position', xy_agent.set_position)
- agent.register_process('acq', xy_agent.start_acq, xy_agent.stop_acq)
+ agent.register_process('acq', xy_agent.acq, xy_agent._stop_acq)
runner.run(agent, auto_reconnect=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/socs/common/__init__.py b/socs/common/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/socs/agent/moxaSerial.py b/socs/common/moxa_serial.py
similarity index 100%
rename from socs/agent/moxaSerial.py
rename to socs/common/moxa_serial.py
index 2becd431e..e738b6c6b 100644
--- a/socs/agent/moxaSerial.py
+++ b/socs/common/moxa_serial.py
@@ -4,8 +4,8 @@
# readbuf() dumps current buffer contents
# readpacket() has old broken behavior of read() - lowest level / fastest
-import time
import socket
+import time
MOXA_DEFAULT_TIMEOUT = 1.0
diff --git a/socs/agent/pmx.py b/socs/common/pmx.py
similarity index 99%
rename from socs/agent/pmx.py
rename to socs/common/pmx.py
index 953327aee..9da5fd54e 100644
--- a/socs/agent/pmx.py
+++ b/socs/common/pmx.py
@@ -1,8 +1,9 @@
+import sys
import time
+
import serial
-import sys
-from socs.agent import moxaSerial as mx
+from socs.common import moxa_serial as mx
class PMX:
diff --git a/socs/agent/prologix_interface.py b/socs/common/prologix_interface.py
similarity index 100%
rename from socs/agent/prologix_interface.py
rename to socs/common/prologix_interface.py
index 68072512c..351521096 100644
--- a/socs/agent/prologix_interface.py
+++ b/socs/common/prologix_interface.py
@@ -1,5 +1,5 @@
-import time
import socket
+import time
class PrologixInterface:
diff --git a/socs/db/suprsync.py b/socs/db/suprsync.py
index e5bf3d704..9e785c77e 100644
--- a/socs/db/suprsync.py
+++ b/socs/db/suprsync.py
@@ -1,12 +1,12 @@
import os
-import time
import subprocess
import tempfile
-import txaio
+import time
-from sqlalchemy import (Column, create_engine, Integer, String, Float, Boolean)
-from sqlalchemy.orm import sessionmaker
+import txaio
+from sqlalchemy import Boolean, Column, Float, Integer, String, create_engine
from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import sessionmaker
from socs.util import get_md5sum
diff --git a/agents/ibootbar/mibs/IBOOTPDU-MIB.py b/socs/mibs/IBOOTPDU-MIB.py
similarity index 100%
rename from agents/ibootbar/mibs/IBOOTPDU-MIB.py
rename to socs/mibs/IBOOTPDU-MIB.py
diff --git a/agents/meinberg_m1000/mibs/MBG-SNMP-LTNG-MIB.py b/socs/mibs/MBG-SNMP-LTNG-MIB.py
similarity index 100%
rename from agents/meinberg_m1000/mibs/MBG-SNMP-LTNG-MIB.py
rename to socs/mibs/MBG-SNMP-LTNG-MIB.py
diff --git a/agents/meinberg_m1000/mibs/MBG-SNMP-ROOT-MIB.py b/socs/mibs/MBG-SNMP-ROOT-MIB.py
similarity index 100%
rename from agents/meinberg_m1000/mibs/MBG-SNMP-ROOT-MIB.py
rename to socs/mibs/MBG-SNMP-ROOT-MIB.py
diff --git a/agents/meinberg_m1000/mibs/SNMPv2-MIB.py b/socs/mibs/SNMPv2-MIB.py
similarity index 100%
rename from agents/meinberg_m1000/mibs/SNMPv2-MIB.py
rename to socs/mibs/SNMPv2-MIB.py
diff --git a/socs/mibs/__init__.py b/socs/mibs/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/socs/plugin.py b/socs/plugin.py
new file mode 100644
index 000000000..1268f5ba3
--- /dev/null
+++ b/socs/plugin.py
@@ -0,0 +1,38 @@
+package_name = 'socs'
+agents = {
+ 'ACUAgent': {'module': 'socs.agents.acu.agent', 'entry_point': 'main'},
+ 'BlueforsAgent': {'module': 'socs.agents.bluefors.agent', 'entry_point': 'main'},
+ 'CrateAgent': {'module': 'socs.agents.smurf_crate_monitor.agent', 'entry_point': 'main'},
+ 'CryomechCPAAgent': {'module': 'socs.agents.cryomech_cpa.agent', 'entry_point': 'main'},
+ 'FPGAAgent': {'module': 'socs.agents.holo_fpga.agent', 'entry_point': 'main'},
+ 'FTSAerotechAgent': {'module': 'socs.agents.fts_aerotech.agent', 'entry_point': 'main'},
+ 'HWPBBBAgent': {'module': 'socs.agents.hwp_encoder.agent', 'entry_point': 'main'},
+ 'HWPPicoscopeAgent': {'module': 'socs.agents.hwp_picoscope.agent', 'entry_point': 'main'},
+ 'ibootbarAgent': {'module': 'socs.agents.ibootbar.agent', 'entry_point': 'main'},
+ 'LabJackAgent': {'module': 'socs.agents.labjack.agent', 'entry_point': 'main'},
+ 'Lakeshore240Agent': {'module': 'socs.agents.lakeshore240.agent', 'entry_point': 'main'},
+ 'Lakeshore336Agent': {'module': 'socs.agents.lakeshore336.agent', 'entry_point': 'main'},
+ 'Lakeshore370Agent': {'module': 'socs.agents.lakeshore370.agent', 'entry_point': 'main'},
+ 'Lakeshore372Agent': {'module': 'socs.agents.lakeshore372.agent', 'entry_point': 'main'},
+ 'Lakeshore425Agent': {'module': 'socs.agents.lakeshore425.agent', 'entry_point': 'main'},
+ 'LATRtXYStageAgent': {'module': 'socs.agents.xy_stage.agent', 'entry_point': 'main'},
+ 'MagpieAgent': {'module': 'socs.agents.magpie.agent', 'entry_point': 'main'},
+ 'MeinbergM1000Agent': {'module': 'socs.agents.meinberg_m1000.agent', 'entry_point': 'main'},
+ 'PfeifferAgent': {'module': 'socs.agents.pfeiffer_tpg366.agent', 'entry_point': 'main'},
+ 'PfeifferTC400Agent': {'module': 'socs.agents.pfeiffer_tc400.agent', 'entry_point': 'main'},
+ 'PysmurfController': {'module': 'socs.agents.pysmurf_controller.agent', 'entry_point': 'main'},
+ 'PysmurfMonitor': {'module': 'socs.agents.pysmurf_monitor.agent', 'entry_point': 'main'},
+ 'RotationAgent': {'module': 'socs.agents.hwp_rotation.agent', 'entry_point': 'main'},
+ 'ScpiPsuAgent': {'module': 'socs.agents.scpi_psu.agent', 'entry_point': 'main'},
+ 'SmurfFileEmulator': {'module': 'socs.agents.smurf_file_emulator.agent', 'entry_point': 'main'},
+ 'SmurfStreamSimulator': {'module': 'socs.agents.smurf_stream_simulator.agent', 'entry_point': 'main'},
+ 'SupRsync': {'module': 'socs.agents.suprsync.agent', 'entry_point': 'main'},
+ 'SynaccessAgent': {'module': 'socs.agents.synacc.agent', 'entry_point': 'main'},
+ 'SynthAgent': {'module': 'socs.agents.holo_synth.agent', 'entry_point': 'main'},
+ 'TektronixAWGAgent': {'module': 'socs.agents.tektronix3021c.agent', 'entry_point': 'main'},
+ 'ThorlabsMC2000BAgent': {'module': 'socs.agents.thorlabs_mc2000b.agent', 'entry_point': 'main'},
+ 'VantagePro2Agent': {'module': 'socs.agents.vantagepro2.agent', 'entry_point': 'main'},
+ 'WiregridActuatorAgent': {'module': 'socs.agents.wiregrid_actuator.agent', 'entry_point': 'main'},
+ 'WiregridEncoderAgent': {'module': 'socs.agents.wiregrid_encoder.agent', 'entry_point': 'main'},
+ 'WiregridKikusuiAgent': {'module': 'socs.agents.wiregrid_kikusui.agent', 'entry_point': 'main'},
+}
diff --git a/socs/snmp.py b/socs/snmp.py
index b1e6df541..a8eda09af 100644
--- a/socs/snmp.py
+++ b/socs/snmp.py
@@ -1,12 +1,20 @@
+import os
+
import txaio
+from pysnmp.hlapi.twisted import (CommunityData, ContextData, ObjectIdentity,
+ ObjectType, SnmpEngine, UdpTransportTarget,
+ UsmUserData, getCmd, setCmd)
-from pysnmp.hlapi.twisted import getCmd, setCmd, SnmpEngine, CommunityData, UdpTransportTarget,\
- ContextData, ObjectType, ObjectIdentity, UsmUserData
+from socs import mibs
# For logging
txaio.use_twisted()
+# https://pysnmp.readthedocs.io/en/latest/faq/pass-custom-mib-to-manager.html
+MIB_SOURCE = f"file://{os.path.dirname(mibs.__file__)}"
+
+
class SNMPTwister:
"""Helper class for handling SNMP communication with twisted.
@@ -124,7 +132,11 @@ def get(self, oid_list, version):
instances representing MIB variables returned in SNMP response.
"""
- oid_list = [ObjectType(ObjectIdentity(*x)) if isinstance(x, tuple) else x for x in oid_list]
+ oid_list = [ObjectType(ObjectIdentity(*x).addAsn1MibSource(MIB_SOURCE))
+ if isinstance(x, tuple)
+ else x
+ for x
+ in oid_list]
if version == 1:
version_object = CommunityData('public', mpModel=0) # SNMPv1
@@ -145,7 +157,7 @@ def get(self, oid_list, version):
return datagram
- def set(self, oid_list, version, setvalue):
+ def set(self, oid_list, version, setvalue, community_name='private'):
"""Issue a setCmd to set SNMP OID states.
See `Modifying MIB variables`_ for more info on setting OID states.
@@ -172,12 +184,16 @@ def set(self, oid_list, version, setvalue):
instances representing MIB variables returned in SNMP response.
"""
- oid_list = [ObjectType(ObjectIdentity(*x), setvalue) if isinstance(x, tuple) else x for x in oid_list]
+ oid_list = [ObjectType(ObjectIdentity(*x).addAsn1MibSource(MIB_SOURCE), setvalue)
+ if isinstance(x, tuple)
+ else x
+ for x
+ in oid_list]
if version == 1:
- version_object = CommunityData('private', mpModel=0) # SNMPv1
+ version_object = CommunityData(community_name, mpModel=0) # SNMPv1
elif version == 2:
- version_object = CommunityData('private') # SNMPv2c
+ version_object = CommunityData(community_name) # SNMPv2c
elif version == 3:
version_object = UsmUserData('ocs') # SNMPv3 (no auth, no privacy)
else:
diff --git a/socs/testing/device_emulator.py b/socs/testing/device_emulator.py
index dbd6be86e..98cd2eb8e 100644
--- a/socs/testing/device_emulator.py
+++ b/socs/testing/device_emulator.py
@@ -1,10 +1,11 @@
-import serial
+import shutil
import socket
-import time
import subprocess
-import shutil
-import pytest
import threading
+import time
+
+import pytest
+import serial
def create_device_emulator(responses, relay_type, port=9001, encoding='utf-8'):
diff --git a/tests/.coveragerc b/tests/.coveragerc
index aa1398abe..288db76cb 100644
--- a/tests/.coveragerc
+++ b/tests/.coveragerc
@@ -1,7 +1,6 @@
[run]
source =
../socs
- ../agents/
omit =
# omit versioneer
@@ -10,3 +9,9 @@ omit =
# omit lab only Agents for now
*/Lakeshore336.py
*/Lakeshore370.py
+
+ # omit mibs
+ */mibs/*.py
+
+[coverage:report]
+skip_empty = true
diff --git a/tests/agents/hwp_rotation/test_pid_controller.py b/tests/agents/hwp_rotation/test_pid_controller.py
index 14e12ea90..8150a79ba 100644
--- a/tests/agents/hwp_rotation/test_pid_controller.py
+++ b/tests/agents/hwp_rotation/test_pid_controller.py
@@ -1,7 +1,4 @@
-import sys
-sys.path.insert(0, '../agents/hwp_rotation/')
-from src.pid_controller import PID
-
+from socs.agents.hwp_rotation.drivers.pid_controller import PID
from socs.testing.device_emulator import create_device_emulator
pid_emu = create_device_emulator(
diff --git a/tests/agents/test_acu_agent.py b/tests/agents/test_acu_agent.py
new file mode 100644
index 000000000..921fe550c
--- /dev/null
+++ b/tests/agents/test_acu_agent.py
@@ -0,0 +1 @@
+from socs.agents.acu.agent import ACUAgent # noqa: F401
diff --git a/tests/agents/test_bluefors_agent.py b/tests/agents/test_bluefors_agent.py
index 528b25414..933d179e2 100644
--- a/tests/agents/test_bluefors_agent.py
+++ b/tests/agents/test_bluefors_agent.py
@@ -1,9 +1,7 @@
-import sys
-sys.path.insert(0, '../agents/bluefors/')
-from bluefors_log_tracker import BlueforsAgent
-
from unittest import mock
+from socs.agents.bluefors.agent import BlueforsAgent
+
def test_bluefors():
mock_agent = mock.MagicMock()
diff --git a/tests/agents/test_cryomech_cpa_agent.py b/tests/agents/test_cryomech_cpa_agent.py
index 861e23044..c4f666534 100644
--- a/tests/agents/test_cryomech_cpa_agent.py
+++ b/tests/agents/test_cryomech_cpa_agent.py
@@ -1,3 +1 @@
-import sys
-sys.path.insert(0, '../agents/cryomech_cpa/')
-from cryomech_cpa_agent import PTCAgent # noqa: F401
+from socs.agents.cryomech_cpa.agent import PTCAgent # noqa: F401
diff --git a/tests/agents/test_fts_aerotech_agent.py b/tests/agents/test_fts_aerotech_agent.py
index b0c0f22b6..87d41b13f 100644
--- a/tests/agents/test_fts_aerotech_agent.py
+++ b/tests/agents/test_fts_aerotech_agent.py
@@ -1,3 +1 @@
-import sys
-sys.path.insert(0, '../agents/fts_aerotech_stage/')
-from fts_aerotech_agent import FTSAerotechAgent # noqa: F401
+from socs.agents.fts_aerotech.agent import FTSAerotechAgent # noqa: F401
diff --git a/tests/agents/test_hwp_encoder_agent.py b/tests/agents/test_hwp_encoder_agent.py
new file mode 100644
index 000000000..460ccdefd
--- /dev/null
+++ b/tests/agents/test_hwp_encoder_agent.py
@@ -0,0 +1 @@
+from socs.agents.hwp_encoder.agent import HWPBBBAgent # noqa: F401
diff --git a/tests/agents/test_hwpbbb_agent.py b/tests/agents/test_hwpbbb_agent.py
deleted file mode 100644
index 7e2aacd79..000000000
--- a/tests/agents/test_hwpbbb_agent.py
+++ /dev/null
@@ -1,3 +0,0 @@
-import sys
-sys.path.insert(0, '../agents/chwp/')
-from hwpbbb_agent import HWPBBBAgent # noqa: F401
diff --git a/tests/agents/test_ls240_agent.py b/tests/agents/test_ls240_agent.py
index b1fad59f8..cbda4be9c 100644
--- a/tests/agents/test_ls240_agent.py
+++ b/tests/agents/test_ls240_agent.py
@@ -1,3 +1 @@
-import sys
-sys.path.insert(0, '../agents/lakeshore240/')
-from LS240_agent import LS240_Agent # noqa: F401
+from socs.agents.lakeshore240.agent import LS240_Agent # noqa: F401
diff --git a/tests/agents/test_ls336_agent.py b/tests/agents/test_ls336_agent.py
new file mode 100644
index 000000000..b39b705b0
--- /dev/null
+++ b/tests/agents/test_ls336_agent.py
@@ -0,0 +1 @@
+from socs.agents.lakeshore336.agent import LS336_Agent # noqa: F401
diff --git a/tests/agents/test_ls370_agent.py b/tests/agents/test_ls370_agent.py
new file mode 100644
index 000000000..d79bee8fe
--- /dev/null
+++ b/tests/agents/test_ls370_agent.py
@@ -0,0 +1 @@
+from socs.agents.lakeshore370.agent import LS370_Agent # noqa: F401
diff --git a/tests/agents/test_ls372_agent.py b/tests/agents/test_ls372_agent.py
index eeb615614..34143e537 100644
--- a/tests/agents/test_ls372_agent.py
+++ b/tests/agents/test_ls372_agent.py
@@ -1,13 +1,11 @@
-import sys
-sys.path.insert(0, '../agents/lakeshore372/')
-from LS372_agent import LS372_Agent
+from unittest import mock
+import pytest
+import txaio
from ocs.ocs_agent import OpSession
-import pytest
-from unittest import mock
+from socs.agents.lakeshore372.agent import LS372_Agent
-import txaio
txaio.use_twisted()
@@ -127,7 +125,7 @@ def test_ls372_init_lakeshore_already_initialized(agent):
# If we don't patch the reactor out, it'll mess up pytest when stop is called
-@mock.patch('LS372_agent.reactor', mock.MagicMock())
+@mock.patch('socs.agents.lakeshore372.agent.reactor', mock.MagicMock())
@mock.patch('socs.Lakeshore.Lakeshore372.LS372.msg', mock_372_msg())
def test_ls372_init_lakeshore_failed_connection(agent):
"""Leaving off the connection Mock, if the connection fails the init task
@@ -140,7 +138,7 @@ def test_ls372_init_lakeshore_failed_connection(agent):
# If we don't patch the reactor out, it'll mess up pytest when stop is called
-@mock.patch('LS372_agent.reactor', mock.MagicMock())
+@mock.patch('socs.agents.lakeshore372.agent.reactor', mock.MagicMock())
@mock.patch('socs.Lakeshore.Lakeshore372._establish_socket_connection', mock_failed_connection())
@mock.patch('socs.Lakeshore.Lakeshore372.LS372.msg', mock_372_msg())
def test_ls372_init_lakeshore_unhandled_error(agent):
diff --git a/tests/agents/test_ls425_agent.py b/tests/agents/test_ls425_agent.py
index 6ca1ba1c2..563594e05 100644
--- a/tests/agents/test_ls425_agent.py
+++ b/tests/agents/test_ls425_agent.py
@@ -1,3 +1 @@
-import sys
-sys.path.insert(0, '../agents/lakeshore425/')
-from LS425_agent import LS425Agent # noqa: F401
+from socs.agents.lakeshore425.agent import LS425Agent # noqa: F401
diff --git a/tests/agents/test_magpie_agent.py b/tests/agents/test_magpie_agent.py
new file mode 100644
index 000000000..21ed329ad
--- /dev/null
+++ b/tests/agents/test_magpie_agent.py
@@ -0,0 +1 @@
+from socs.agents.magpie.agent import MagpieAgent # noqa: F401
diff --git a/tests/agents/test_meinberg_m1000_agent.py b/tests/agents/test_meinberg_m1000_agent.py
index 921be5f78..96b0170ff 100644
--- a/tests/agents/test_meinberg_m1000_agent.py
+++ b/tests/agents/test_meinberg_m1000_agent.py
@@ -1,3 +1 @@
-import sys
-sys.path.insert(0, '../agents/meinberg_m1000/')
-from meinberg_m1000_agent import MeinbergM1000Agent # noqa: F401
+from socs.agents.meinberg_m1000.agent import MeinbergM1000Agent # noqa: F401
diff --git a/tests/agents/test_ocs_plugin_so.py b/tests/agents/test_ocs_plugin_so.py
index 56ec57da2..50f9ec612 100644
--- a/tests/agents/test_ocs_plugin_so.py
+++ b/tests/agents/test_ocs_plugin_so.py
@@ -1,6 +1,4 @@
-import sys
-sys.path.insert(0, '../agents/')
-import ocs_plugin_so
+import socs.agents.ocs_plugin_so as ocs_plugin_so
def test_agent_script_reg():
diff --git a/tests/agents/test_pfeiffer_tpg366_agent.py b/tests/agents/test_pfeiffer_tpg366_agent.py
index 4db8bced5..55dca1470 100644
--- a/tests/agents/test_pfeiffer_tpg366_agent.py
+++ b/tests/agents/test_pfeiffer_tpg366_agent.py
@@ -1,3 +1 @@
-import sys
-sys.path.insert(0, '../agents/pfeiffer_tpg366/')
-from pfeiffer_tpg366_agent import PfeifferAgent # noqa: F401
+from socs.agents.pfeiffer_tpg366.agent import PfeifferAgent # noqa: F401
diff --git a/tests/agents/test_pysmurf_controller_agent.py b/tests/agents/test_pysmurf_controller_agent.py
new file mode 100644
index 000000000..355dc319a
--- /dev/null
+++ b/tests/agents/test_pysmurf_controller_agent.py
@@ -0,0 +1,320 @@
+from unittest import mock
+
+import numpy as np
+import pytest
+import txaio
+from ocs.ocs_agent import OpSession
+
+from socs.agents.pysmurf_controller.agent import PysmurfController, make_parser
+
+txaio.use_twisted()
+
+
+# Mocks and fixures
+def mock_pysmurf(self, session=None, load_tune=False, **kwargs):
+ """mock_pysmurf()
+
+ **Mock** - Mock a pysmurf instance. Used to patch _get_smurf_control() in the PysmurfController.
+
+ Returns
+ -------
+ S : mock.MagicMock()
+ Mocked pysmurf instance with defined return values for attributes of S.
+ cfg : mock.MagicMock()
+ Mocked DetConfig of sodetlib with defined return values for attributes of cfg.
+ """
+
+ # Mock S and edit attributes
+ S = mock.MagicMock()
+ S.C.get_fw_version.return_value = [4, 1, 1]
+ S.C.read_ps_en.return_value = 3
+ S.C.list_of_c02_amps = ['50k', 'hemt']
+ S.C.list_of_c04_amps = ['50k1', '50k2', 'hemt1', 'hemt2']
+ S.estimate_phase_delay.side_effect = [[15, 15], [15, 15], [15, 15], [15, 15],
+ [15, 15], [15, 15], [15, 15], [15, 15]]
+ S._pic_to_bias_group = np.array([[0, 0], [1, 1], [2, 2], [3, 3], [4, 4], [5, 5], [6, 6], [7, 7], [8, 8], [9, 9], [10, 10], [11, 11],
+ [12, 12], [13, 13], [14, 14], [15, 15]])
+ S._bias_group_to_pair = np.array([[0, 1, 2], [1, 3, 4], [2, 5, 6], [3, 7, 8], [4, 9, 10], [5, 11, 12], [6, 13, 14], [7, 15, 16],
+ [8, 17, 18], [9, 19, 20], [10, 21, 22], [11, 23, 24], [12, 25, 26], [13, 27, 28], [14, 29, 30]])
+ S._n_bias_groups = 15
+ sync_flag_array = []
+ tracking_array = []
+ for i in range(8):
+ sync_flag_array.append([0, 1])
+ tracking_array.append([np.array([np.array([0]), np.array([0])]), np.array([np.array([0]), np.array([0])]), np.array([np.array([0]), np.array([0])])])
+ S.tracking_setup.side_effect = tracking_array
+ S.make_sync_flag.side_effect = sync_flag_array
+ S._caget.return_value = 0
+ S.high_low_current_ratio = 6.08
+ S.C.relay_address = 0x2
+ S.get_cryo_card_relays.return_value = 80000
+ S._rtm_slow_dac_bit_to_volt = (2 * 10. / (2**20))
+ S.get_tes_bias_bipolar.return_value = 10.
+ S.get_tes_bias_bipolar_array.return_value = np.full((12, ), 10.)
+
+ # Mock cfg and edit attributes
+ cfg = mock.MagicMock()
+ exp_defaults = {
+ # General stuff
+ 'downsample_factor': 20, 'coupling_mode': 'dc', 'synthesis_scale': 1,
+
+ # Amp stuff
+ "amps_to_bias": ['hemt', 'hemt1', 'hemt2', '50k', '50k1', '50k2'],
+ "amp_enable_wait_time": 10.0, "amp_step_wait_time": 0.2,
+
+ "amp_50k_init_gate_volt": -0.5, "amp_50k_drain_current": 15.0,
+ "amp_50k_gate_volt": None, "amp_50k_drain_current_tolerance": 0.2,
+ "amp_hemt_init_gate_volt": -1.0, "amp_hemt_drain_current": 8.0,
+ "amp_hemt_gate_volt": None, "amp_hemt_drain_current_tolerance": 0.2,
+
+ "amp_50k1_init_gate_volt": -0.5, "amp_50k1_drain_current": 15.0,
+ "amp_50k1_gate_volt": None, "amp_50k1_drain_current_tolerance": 0.2,
+ "amp_50k1_drain_volt": 4,
+ "amp_50k2_init_gate_volt": -0.5, "amp_50k2_drain_current": 15.0,
+ "amp_50k2_gate_volt": None, "amp_50k2_drain_current_tolerance": 0.2,
+ "amp_50k2_drain_volt": 4,
+
+ "amp_hemt1_init_gate_volt": -1.0, "amp_hemt1_drain_current": 8.0,
+ "amp_hemt1_gate_volt": None, "amp_hemt1_drain_current_tolerance": 0.2,
+ "amp_hemt1_drain_volt": 0.6,
+ "amp_hemt2_init_gate_volt": -1.0, "amp_hemt2_drain_current": 8.0,
+ "amp_hemt2_gate_volt": None, "amp_hemt2_drain_current_tolerance": 0.2,
+ "amp_hemt2_drain_volt": 0.6,
+
+ # Find freq
+ 'res_amp_cut': 0.01, 'res_grad_cut': 0.01,
+
+ # Tracking stuff
+ "flux_ramp_rate_khz": 4, "init_frac_pp": 0.4, "nphi0": 5,
+ "f_ptp_range": [10, 200], "df_ptp_range": [0, 50], "r2_min": 0.9,
+ "min_good_tracking_frac": 0.8,
+ 'feedback_start_frac': 0.02, 'feedback_end_frac': 0.98,
+
+ # Misc files
+ "tunefile": None, "bgmap_file": None, "iv_file": None,
+ "res_fit_file": None,
+ }
+ cfg.dev.exp.__getitem__.side_effect = exp_defaults.__getitem__
+
+ return S, cfg
+
+
+def mock_np_save():
+ """mock_np_save()
+
+ **Mock** - Mock save() in numpy to avoid actually saving files.
+ """
+ return mock.MagicMock()
+
+
+def mock_plt_savefig():
+ """mock_plt_savefig()
+
+ **Mock** - Mock savefig() in matplotlib to avoid actually saving figures.
+ """
+ return mock.MagicMock()
+
+
+def mock_take_noise(S, cfg, acq_time, **kwargs):
+ """mock_take_noise()
+
+ **Mock** - Mock take_noise() in sodetlib.
+ """
+ am = mock.MagicMock()
+ outdict = {'noise_pars': 0,
+ 'bands': 0,
+ 'channels': 0,
+ 'band_medians': 0,
+ 'f': 0,
+ 'axx': 0,
+ 'bincenters': 0,
+ 'lowfn': 0,
+ 'low_f_10mHz': 0}
+ return am, outdict
+
+
+def mock_ivanalysis(S, cfg, run_kwargs, sid, start_times, stop_times):
+ """mock_ivanalysis()
+
+ **Mock** - Mock IVAnalysis class in sodetlib.
+ """
+ iva = mock.MagicMock()
+ # iva.load.return_value = iva
+ iva.R = np.full((2, 12), 1)
+ iva.R_n = np.full((2, ), 2)
+ iva.bgmap = np.zeros((12, 2))
+ iva.v_bias = np.full((12, ), 2)
+ return iva
+
+
+def mock_set_current_mode(S, bgs, mode, const_current=True):
+ """mock_set_current_mode()
+
+ **Mock** - Mock set_current_mode() in sodetlib.
+ """
+ return mock.MagicMock()
+
+
+def mock_biasstepanalysis(S, cfg, bgs, run_kwargs):
+ """mock_biasstepanalysis()
+
+ **Mock** - Mock BiasStepAnalysis class in sodetlib.
+ """
+ bsa = mock.MagicMock()
+ bsa.sid = 0
+ bsa.filepath = 'bias_step_analysis.npy'
+ bsa.bgmap = np.zeros((12, 2))
+ return bsa
+
+
+def create_session(op_name):
+ """Create an OpSession with a mocked app for testing."""
+ mock_app = mock.MagicMock()
+ session = OpSession(1, op_name, app=mock_app)
+
+ return session
+
+
+@pytest.fixture
+def agent():
+ """Test fixture to setup a mocked OCSAgent."""
+ mock_agent = mock.MagicMock()
+ log = txaio.make_logger()
+ txaio.start_logging(level='debug')
+ mock_agent.log = log
+ log.info('Initialized mock OCSAgent')
+ parser = make_parser()
+ args = parser.parse_args(args=[
+ '--monitor-id', 'pysmurf-controller-s2',
+ '--slot', '2',
+ '--poll-interval', '10'
+ ])
+ agent = PysmurfController(mock_agent, args)
+
+ return agent
+
+
+@mock.patch('socs.agents.pysmurf_controller.agent.PysmurfController._get_smurf_control', mock_pysmurf)
+@mock.patch('numpy.save', mock_np_save())
+@mock.patch('matplotlib.figure.Figure.savefig', mock_plt_savefig())
+@mock.patch('sodetlib.noise.take_noise', mock_take_noise)
+@mock.patch('time.sleep', mock.MagicMock())
+def test_uxm_setup(agent):
+ """test_uxm_setup()
+
+ **Test** - Tests uxm_setup task.
+ """
+ session = create_session('uxm_setup')
+ res = agent.uxm_setup(session, {'bands': [0], 'kwargs': None})
+ assert res[0] is True
+
+
+@mock.patch('socs.agents.pysmurf_controller.agent.PysmurfController._get_smurf_control', mock_pysmurf)
+@mock.patch('numpy.save', mock_np_save())
+@mock.patch('matplotlib.figure.Figure.savefig', mock_plt_savefig())
+@mock.patch('sodetlib.noise.take_noise', mock_take_noise)
+@mock.patch('time.sleep', mock.MagicMock())
+def test_uxm_relock(agent):
+ """test_uxm_relock()
+
+ **Test** - Tests uxm_relock task.
+ """
+ session = create_session('uxm_relock')
+ res = agent.uxm_relock(session, {'bands': [0], 'kwargs': None})
+ assert res[0] is True
+
+
+@mock.patch('socs.agents.pysmurf_controller.agent.PysmurfController._get_smurf_control', mock_pysmurf)
+@mock.patch('sodetlib.set_current_mode', mock_set_current_mode)
+@mock.patch('sodetlib.operations.bias_steps.BiasStepAnalysis', mock_biasstepanalysis)
+@mock.patch('matplotlib.figure.Figure.savefig', mock_plt_savefig())
+@mock.patch('time.sleep', mock.MagicMock())
+def test_take_bgmap(agent):
+ """test_take_bgmap()
+
+ **Test** - Tests take_bgmap task.
+ """
+ session = create_session('take_bgmap')
+ res = agent.take_bgmap(session, {'kwargs': {'high_current_mode': False}})
+ assert res[0] is True
+
+
+@mock.patch('socs.agents.pysmurf_controller.agent.PysmurfController._get_smurf_control', mock_pysmurf)
+@mock.patch('matplotlib.figure.Figure.savefig', mock_plt_savefig())
+@mock.patch('sodetlib.operations.iv.IVAnalysis', mock_ivanalysis)
+@mock.patch('time.sleep', mock.MagicMock())
+def test_take_iv(agent):
+ """test_take_iv()
+
+ **Test** - Tests take_iv task.
+ """
+ session = create_session('take_iv')
+ res = agent.take_iv(session, {'kwargs': {'run_analysis': False}})
+ assert res[0] is True
+
+
+@mock.patch('socs.agents.pysmurf_controller.agent.PysmurfController._get_smurf_control', mock_pysmurf)
+@mock.patch('sodetlib.set_current_mode', mock_set_current_mode)
+@mock.patch('sodetlib.operations.bias_steps.BiasStepAnalysis', mock_biasstepanalysis)
+@mock.patch('time.sleep', mock.MagicMock())
+def test_take_bias_steps(agent):
+ """test_take_bias_steps()
+
+ **Test** - Tests take_bias_steps task.
+ """
+ session = create_session('take_bias_steps')
+ res = agent.take_bias_steps(session, {'kwargs': None})
+ assert res[0] is True
+
+
+@mock.patch('socs.agents.pysmurf_controller.agent.PysmurfController._get_smurf_control', mock_pysmurf)
+@mock.patch('sodetlib.noise.take_noise', mock_take_noise)
+@mock.patch('time.sleep', mock.MagicMock())
+def test_take_noise(agent):
+ """test_take_noise()
+
+ **Test** - Tests take_noise task.
+ """
+ session = create_session('take_noise')
+ res = agent.take_noise(session, {'duration': 30, 'kwargs': None})
+ assert res[0] is True
+
+
+@mock.patch('socs.agents.pysmurf_controller.agent.PysmurfController._get_smurf_control', mock_pysmurf)
+@mock.patch('sodetlib.set_current_mode', mock_set_current_mode)
+@mock.patch('time.sleep', mock.MagicMock())
+def test_bias_dets(agent):
+ """test_bias_dets()
+
+ **Test** - Tests bias_dets task.
+ """
+ session = create_session('bias_dets')
+ mm = mock.MagicMock()
+ res = agent.bias_dets(session, {'rfrac': (0.3, 0.6),
+ 'kwargs': {'iva': mock_ivanalysis(S=mm, cfg=mm, run_kwargs=mm,
+ sid=mm, start_times=mm, stop_times=mm)}})
+ assert res[0] is True
+
+
+@mock.patch('socs.agents.pysmurf_controller.agent.PysmurfController._get_smurf_control', mock_pysmurf)
+@mock.patch('time.sleep', mock.MagicMock())
+def test_stream(agent):
+ """test_stream()
+
+ **Test** - Tests stream process.
+ """
+ session = create_session('stream')
+ res = agent.stream(session, {'duration': None, 'load_tune': False, 'kwargs': None, 'test_mode': True})
+ assert res[0] is True
+
+
+@mock.patch('socs.agents.pysmurf_controller.agent.PysmurfController._get_smurf_control', mock_pysmurf)
+@mock.patch('time.sleep', mock.MagicMock())
+def test_check_state(agent):
+ """test_check_state()
+
+ **Test** - Tests check_state process.
+ """
+ session = create_session('check_state')
+ res = agent.check_state(session, {'poll_interval': 10, 'test_mode': True})
+ assert res[0] is True
diff --git a/tests/agents/test_pysmurf_monitor.py b/tests/agents/test_pysmurf_monitor.py
index b7f6988a6..deffc0c60 100644
--- a/tests/agents/test_pysmurf_monitor.py
+++ b/tests/agents/test_pysmurf_monitor.py
@@ -1,3 +1 @@
-import sys
-sys.path.insert(0, '../agents/pysmurf_monitor/')
-from pysmurf_monitor import PysmurfMonitor # noqa: F401
+from socs.agents.pysmurf_monitor.agent import PysmurfMonitor # noqa: F401
diff --git a/tests/agents/test_scpi_psu_agent.py b/tests/agents/test_scpi_psu_agent.py
index 583d76d9d..20b1dd320 100644
--- a/tests/agents/test_scpi_psu_agent.py
+++ b/tests/agents/test_scpi_psu_agent.py
@@ -1,3 +1 @@
-import sys
-sys.path.insert(0, '../agents/scpi_psu/')
-from scpi_psu_agent import ScpiPsuAgent # noqa: F401
+from socs.agents.scpi_psu.agent import ScpiPsuAgent # noqa: F401
diff --git a/tests/agents/test_smurf_crate_monitor.py b/tests/agents/test_smurf_crate_monitor.py
index 7248935a4..56121a842 100644
--- a/tests/agents/test_smurf_crate_monitor.py
+++ b/tests/agents/test_smurf_crate_monitor.py
@@ -1,3 +1,2 @@
-import sys
-sys.path.insert(0, '../agents/smurf_crate_monitor/')
-from smurf_crate_monitor import SmurfCrateMonitor # noqa: F401
+from socs.agents.smurf_crate_monitor.agent import \
+ SmurfCrateMonitor # noqa: F401
diff --git a/tests/agents/test_smurf_file_emulator.py b/tests/agents/test_smurf_file_emulator.py
index d9ba0078b..fb76605ef 100644
--- a/tests/agents/test_smurf_file_emulator.py
+++ b/tests/agents/test_smurf_file_emulator.py
@@ -1,10 +1,10 @@
-import sys
-sys.path.insert(0, '../agents/smurf_file_emulator/')
-from smurf_file_emulator import SmurfFileEmulator, make_parser
-
from unittest import mock
import txaio
+
+from socs.agents.smurf_file_emulator.agent import (SmurfFileEmulator,
+ make_parser)
+
txaio.use_twisted()
diff --git a/tests/agents/test_smurf_stream_simulator.py b/tests/agents/test_smurf_stream_simulator.py
index e9a782952..4f383a235 100644
--- a/tests/agents/test_smurf_stream_simulator.py
+++ b/tests/agents/test_smurf_stream_simulator.py
@@ -1,3 +1,2 @@
-import sys
-sys.path.insert(0, '../agents/smurf_stream_simulator/')
-from smurf_stream_simulator import SmurfStreamSimulator # noqa: F401
+from socs.agents.smurf_stream_simulator.agent import \
+ SmurfStreamSimulator # noqa: F401
diff --git a/tests/agents/test_suprsync_agent.py b/tests/agents/test_suprsync_agent.py
index ec4d300a5..409891e73 100644
--- a/tests/agents/test_suprsync_agent.py
+++ b/tests/agents/test_suprsync_agent.py
@@ -1,10 +1,9 @@
-import sys
import os
+
import numpy as np
import txaio
-sys.path.insert(0, '../agents/suprsync/')
-from socs.db.suprsync import SupRsyncFilesManager, SupRsyncFileHandler
+from socs.db.suprsync import SupRsyncFileHandler, SupRsyncFilesManager
txaio.use_twisted()
diff --git a/tests/agents/test_synacc.py b/tests/agents/test_synacc.py
index 7f814fb09..2c0759180 100644
--- a/tests/agents/test_synacc.py
+++ b/tests/agents/test_synacc.py
@@ -1,3 +1 @@
-import sys
-sys.path.insert(0, '../agents/synacc/')
-from synacc import SynaccessAgent # noqa: F401
+from socs.agents.synacc.agent import SynaccessAgent # noqa: F401
diff --git a/tests/agents/test_tektronix_agent.py b/tests/agents/test_tektronix_agent.py
index dd367f8b5..64a30cc77 100644
--- a/tests/agents/test_tektronix_agent.py
+++ b/tests/agents/test_tektronix_agent.py
@@ -1,3 +1 @@
-import sys
-sys.path.insert(0, '../agents/tektronix3021c/')
-from tektronix_agent import TektronixAWGAgent # noqa: F401
+from socs.agents.tektronix3021c.agent import TektronixAWGAgent # noqa: F401
diff --git a/tests/agents/test_vantage_pro2_agent.py b/tests/agents/test_vantage_pro2_agent.py
index dbbc785d0..ec3cf31e4 100644
--- a/tests/agents/test_vantage_pro2_agent.py
+++ b/tests/agents/test_vantage_pro2_agent.py
@@ -1,3 +1 @@
-import sys
-sys.path.insert(0, '../agents/vantagePro2_agent/')
-from vantage_pro2_agent import VantagePro2Agent # noqa: F401
+from socs.agents.vantagepro2.agent import VantagePro2Agent # noqa: F401
diff --git a/tests/agents/test_wiregrid_encoder_agent.py b/tests/agents/test_wiregrid_encoder_agent.py
new file mode 100644
index 000000000..2e39186cd
--- /dev/null
+++ b/tests/agents/test_wiregrid_encoder_agent.py
@@ -0,0 +1,2 @@
+from socs.agents.wiregrid_encoder.agent import \
+ WiregridEncoderAgent # noqa: F401
diff --git a/tests/agents/test_wiregrid_kikusui_agent.py b/tests/agents/test_wiregrid_kikusui_agent.py
new file mode 100644
index 000000000..6a74d1a7b
--- /dev/null
+++ b/tests/agents/test_wiregrid_kikusui_agent.py
@@ -0,0 +1,2 @@
+from socs.agents.wiregrid_kikusui.agent import \
+ WiregridKikusuiAgent # noqa: F401
diff --git a/tests/agents/test_xy_stage_agent.py b/tests/agents/test_xy_stage_agent.py
new file mode 100644
index 000000000..8aca00651
--- /dev/null
+++ b/tests/agents/test_xy_stage_agent.py
@@ -0,0 +1 @@
+from socs.agents.xy_stage.agent import LATRtXYStageAgent # noqa: F401
diff --git a/tests/agent/test_moxaSerial.py b/tests/common/test_moxa_serial.py
similarity index 74%
rename from tests/agent/test_moxaSerial.py
rename to tests/common/test_moxa_serial.py
index 95a8266fb..5e8778a95 100644
--- a/tests/agent/test_moxaSerial.py
+++ b/tests/common/test_moxa_serial.py
@@ -1,10 +1,10 @@
import time
+
import pytest
-from socs.agent import moxaSerial
+from socs.common import moxa_serial
from socs.testing.device_emulator import create_device_emulator
-
tcp_emulator = create_device_emulator({'ping': 'pong\r'},
'tcp', 9001)
@@ -14,7 +14,7 @@ def create_tcpserver():
# Connection might not work on first attempt
for i in range(5):
try:
- ser = moxaSerial.Serial_TCPServer(('127.0.0.1', 9001), 0.1)
+ ser = moxa_serial.Serial_TCPServer(('127.0.0.1', 9001), 0.1)
break
except ConnectionRefusedError:
print("Could not connect, waiting and trying again.")
@@ -23,25 +23,25 @@ def create_tcpserver():
@pytest.mark.integtest
-def test_moxaserial_create_serial_tcpserver(tcp_emulator):
+def test_moxa_serial_create_serial_tcpserver(tcp_emulator):
create_tcpserver()
@pytest.mark.integtest
-def test_moxaserial_write(tcp_emulator):
+def test_moxa_serial_write(tcp_emulator):
ser = create_tcpserver()
ser.write('ping')
@pytest.mark.integtest
-def test_moxaserial_writeread(tcp_emulator):
+def test_moxa_serial_writeread(tcp_emulator):
ser = create_tcpserver()
response = ser.writeread('ping')
assert response == 'pong'
@pytest.mark.integtest
-def test_moxaserial_write_readline(tcp_emulator):
+def test_moxa_serial_write_readline(tcp_emulator):
ser = create_tcpserver()
ser.write('ping')
assert ser.readline() == 'pong\r'
diff --git a/tests/agent/test_pmx.py b/tests/common/test_pmx.py
similarity index 98%
rename from tests/agent/test_pmx.py
rename to tests/common/test_pmx.py
index 5717e0950..29517f7ef 100644
--- a/tests/agent/test_pmx.py
+++ b/tests/common/test_pmx.py
@@ -1,10 +1,10 @@
import time
+
import pytest
-from socs.agent.pmx import Command, PMX
+from socs.common.pmx import PMX, Command
from socs.testing.device_emulator import create_device_emulator
-
tcp_emulator = create_device_emulator({'ping': 'pong\r',
'SYST:REM': 'test'},
'tcp', 9001)
diff --git a/tests/default.yaml b/tests/default.yaml
index 8aafb91a3..c95883d00 100644
--- a/tests/default.yaml
+++ b/tests/default.yaml
@@ -64,8 +64,17 @@ hosts:
'instance-id': 'pfeiffer366',
'arguments': [['--ip_address', '127.0.0.1'],
['--port', '8000'],
+ ['--mode', 'test'],
]
},
+ {'agent-class': 'ibootbarAgent',
+ 'instance-id': 'ibootbar',
+ 'arguments': [['--address', '127.0.0.1'],
+ ['--port', 1024],
+ ['--mode', 'test'],
+ ['--snmp-version', 2]
+ ]
+ },
{'agent-class': 'SynaccessAgent',
'instance-id': 'synacc',
'arguments':[['--ip-address', '127.0.0.1:8000'],
@@ -73,5 +82,9 @@ hosts:
['--password', 'admin'],
]
},
+ {'agent-class': 'PysmurfController',
+ 'instance-id': 'pysmurf-controller-s2',
+ 'arguments': []
+ },
]
}
diff --git a/tests/integration/ibootbar_snmp_data/private.snmprec b/tests/integration/ibootbar_snmp_data/private.snmprec
new file mode 100644
index 000000000..524a34e2a
--- /dev/null
+++ b/tests/integration/ibootbar_snmp_data/private.snmprec
@@ -0,0 +1,17 @@
+1.3.6.1.4.1.1418.6.5.1.5.0|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.5.1|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.5.2|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.5.3|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.5.4|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.5.5|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.5.6|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.5.7|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.4.0|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.4.1|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.4.2|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.4.3|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.4.4|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.4.5|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.4.6|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.4.7|2:writecache|value=0
+1.3.6.1.4.1.1418.6.1.19.0|2:writecache|value=0
diff --git a/tests/integration/ibootbar_snmp_data/public.snmprec b/tests/integration/ibootbar_snmp_data/public.snmprec
new file mode 100644
index 000000000..149552231
--- /dev/null
+++ b/tests/integration/ibootbar_snmp_data/public.snmprec
@@ -0,0 +1,16 @@
+1.3.6.1.4.1.1418.6.5.1.2.0|4:writecache|value=Outlet #1
+1.3.6.1.4.1.1418.6.5.1.2.1|4:writecache|value=Outlet #2
+1.3.6.1.4.1.1418.6.5.1.2.2|4:writecache|value=Outlet #3
+1.3.6.1.4.1.1418.6.5.1.2.3|4:writecache|value=Outlet #4
+1.3.6.1.4.1.1418.6.5.1.2.4|4:writecache|value=Outlet #5
+1.3.6.1.4.1.1418.6.5.1.2.5|4:writecache|value=Outlet #6
+1.3.6.1.4.1.1418.6.5.1.2.6|4:writecache|value=Outlet #7
+1.3.6.1.4.1.1418.6.5.1.2.7|4:writecache|value=Outlet #8
+1.3.6.1.4.1.1418.6.5.1.6.0|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.6.1|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.6.2|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.6.3|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.6.4|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.6.5|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.6.6|2:writecache|value=0
+1.3.6.1.4.1.1418.6.5.1.6.7|2:writecache|value=0
diff --git a/tests/integration/test_cryomech_cpa_agent_integration.py b/tests/integration/test_cryomech_cpa_agent_integration.py
index 4f26fe6ec..84844d164 100644
--- a/tests/integration/test_cryomech_cpa_agent_integration.py
+++ b/tests/integration/test_cryomech_cpa_agent_integration.py
@@ -1,17 +1,10 @@
import os
-import pytest
import ocs
+import pytest
+from integration.util import create_crossbar_fixture
from ocs.base import OpCode
-
-from ocs.testing import (
- create_agent_runner_fixture,
- create_client_fixture,
-)
-
-from integration.util import (
- create_crossbar_fixture
-)
+from ocs.testing import create_agent_runner_fixture, create_client_fixture
from socs.testing.device_emulator import create_device_emulator
@@ -26,9 +19,9 @@
wait_for_crossbar = create_crossbar_fixture()
run_agent = create_agent_runner_fixture(
- '../agents/cryomech_cpa/cryomech_cpa_agent.py', 'cryomech_cpa_agent')
+ '../socs/agents/cryomech_cpa/agent.py', 'cryomech_cpa_agent')
run_agent_acq = create_agent_runner_fixture(
- '../agents/cryomech_cpa/cryomech_cpa_agent.py', 'cryomech_cpa_agent', args=['--mode', 'acq'])
+ '../socs/agents/cryomech_cpa/agent.py', 'cryomech_cpa_agent', args=['--mode', 'acq'])
client = create_client_fixture('cryomech')
emulator = create_device_emulator({init_msg: init_res}, relay_type='tcp', port=5502, encoding=None)
@@ -88,3 +81,19 @@ def test_cryomech_cpa_acq(wait_for_crossbar, emulator, run_agent, client):
# already stopped, but will set self.take_data = False
resp = client.acq.stop()
print(resp)
+
+
+@pytest.mark.integtest
+@pytest.mark.parametrize("state,command", [('on', b'\t\x99\x00\x00\x00\x06\x01\x06\x00\x01\x00\x01')])
+def test_cryomech_cpa_release_reacquire(wait_for_crossbar, emulator, run_agent_acq,
+ client, state, command):
+ client.init.wait()
+ response = {command: command,
+ init_msg: init_res}
+ emulator.define_responses(response)
+
+ resp = client.power_ptc(state=state)
+ print(resp)
+ assert resp.status == ocs.OK
+ print(resp.session)
+ assert resp.session['op_code'] == OpCode.SUCCEEDED.value
diff --git a/tests/integration/test_hwp_rotation_agent_integration.py b/tests/integration/test_hwp_rotation_agent_integration.py
index e4d2af279..e61dc12d5 100644
--- a/tests/integration/test_hwp_rotation_agent_integration.py
+++ b/tests/integration/test_hwp_rotation_agent_integration.py
@@ -1,16 +1,8 @@
-import pytest
-
import ocs
+import pytest
+from integration.util import create_crossbar_fixture
from ocs.base import OpCode
-
-from ocs.testing import (
- create_agent_runner_fixture,
- create_client_fixture,
-)
-
-from integration.util import (
- create_crossbar_fixture
-)
+from ocs.testing import create_agent_runner_fixture, create_client_fixture
from socs.testing.device_emulator import create_device_emulator
@@ -18,9 +10,9 @@
wait_for_crossbar = create_crossbar_fixture()
run_agent = create_agent_runner_fixture(
- '../agents/hwp_rotation/rotation_agent.py', 'hwp_rotation_agent', args=['--log-dir', './logs/'])
+ '../socs/agents/hwp_rotation/agent.py', 'hwp_rotation_agent', args=['--log-dir', './logs/'])
run_agent_idle = create_agent_runner_fixture(
- '../agents/hwp_rotation/rotation_agent.py', 'hwp_rotation_agent', args=['--mode', 'idle', '--log-dir', './logs/'])
+ '../socs/agents/hwp_rotation/agent.py', 'hwp_rotation_agent', args=['--mode', 'idle', '--log-dir', './logs/'])
client = create_client_fixture('rotator')
kikusui_emu = create_device_emulator(
{'SYST:REM': ''}, relay_type='tcp', port=2000)
diff --git a/tests/integration/test_ibootbar_agent_integration.py b/tests/integration/test_ibootbar_agent_integration.py
new file mode 100644
index 000000000..0f679bf3c
--- /dev/null
+++ b/tests/integration/test_ibootbar_agent_integration.py
@@ -0,0 +1,95 @@
+import os
+import signal
+import subprocess
+from multiprocessing import Process
+from unittest.mock import patch
+
+import ocs
+import pytest
+from integration.util import create_crossbar_fixture
+from ocs.base import OpCode
+from ocs.testing import create_agent_runner_fixture, create_client_fixture
+from snmpsim.commands import responder
+from twisted.internet.defer import inlineCallbacks
+
+from socs.snmp import SNMPTwister
+
+pytest_plugins = "docker_compose"
+
+wait_for_crossbar = create_crossbar_fixture()
+run_agent = create_agent_runner_fixture(
+ "../socs/agents/ibootbar/agent.py",
+ "ibootbarAgent",
+ args=["--log-dir", "./logs/"],
+)
+client = create_client_fixture("ibootbar")
+
+subprocess.run(
+ "mkdir -p ~/.pysnmp/mibs && cp -r ../socs/mibs/. ~/.pysnmp/mibs",
+ shell=True,
+)
+
+address = "127.0.0.1"
+port = 1024
+
+
+def check_resp_success(resp):
+ print(resp)
+ assert resp.status == ocs.OK
+ print(resp.session)
+ assert resp.session["op_code"] == OpCode.SUCCEEDED.value
+
+
+@pytest.fixture
+def start_responder():
+ with patch(
+ "sys.argv",
+ [
+ "test_ibootbar_agent_integration.py",
+ "--data-dir=./integration/ibootbar_snmp_data",
+ f"--agent-udpv4-endpoint={address}:{port}",
+ # f"--variation-modules-dir={os.path.expanduser('~/.local/share/snmpsim/variation')}",
+ ],
+ ):
+ p = Process(target=responder.main)
+ p.start()
+ yield
+ os.kill(p.pid, signal.SIGINT)
+
+
+@pytest.mark.integtest
+def test_ibootbar_acq(wait_for_crossbar, start_responder, run_agent, client):
+ resp = client.acq.start(test_mode=True)
+ resp = client.acq.wait()
+ check_resp_success(resp)
+
+
+@pytest.mark.integtest
+@inlineCallbacks
+def test_ibootbar_set_outlet(wait_for_crossbar, start_responder, run_agent, client):
+ outlet_number = 3
+ resp = client.set_outlet(outlet=outlet_number, state="on")
+ check_resp_success(resp)
+
+ # Simulate internal state transition of hardware
+ snmp = SNMPTwister(address, port)
+ outlet = [("IBOOTPDU-MIB", "outletStatus", outlet_number - 1)]
+ yield snmp.set(oid_list=outlet, version=2, setvalue=1, community_name="public")
+
+ resp = client.acq.start(test_mode=True)
+ resp = client.acq.wait()
+
+ assert resp.session["data"][f"outletStatus_{outlet_number - 1}"]["status"] == 1
+
+
+@pytest.mark.integtest
+def test_ibootbar_set_initial_state(
+ wait_for_crossbar, start_responder, run_agent, client):
+ resp = client.set_initial_state()
+ check_resp_success(resp)
+
+
+@pytest.mark.integtest
+def test_ibootbar_cycle_outlet(wait_for_crossbar, start_responder, run_agent, client):
+ resp = client.cycle_outlet(outlet=7, cycle_time=5)
+ check_resp_success(resp)
diff --git a/tests/integration/test_ls240_agent_integration.py b/tests/integration/test_ls240_agent_integration.py
index db47b474c..7df4ab024 100644
--- a/tests/integration/test_ls240_agent_integration.py
+++ b/tests/integration/test_ls240_agent_integration.py
@@ -1,17 +1,10 @@
import time
-import pytest
import ocs
+import pytest
+from integration.util import create_crossbar_fixture
from ocs.base import OpCode
-
-from ocs.testing import (
- create_agent_runner_fixture,
- create_client_fixture,
-)
-
-from integration.util import (
- create_crossbar_fixture
-)
+from ocs.testing import create_agent_runner_fixture, create_client_fixture
from socs.testing.device_emulator import create_device_emulator
@@ -19,7 +12,7 @@
wait_for_crossbar = create_crossbar_fixture()
run_agent = create_agent_runner_fixture(
- '../agents/lakeshore240/LS240_agent.py', 'ls240_agent')
+ '../socs/agents/lakeshore240/agent.py', 'ls240_agent')
client = create_client_fixture('LSA240S')
initial_responses = {'*IDN?': 'LSCI,MODEL240,LSA240S,1.3',
diff --git a/tests/integration/test_ls372_agent_integration.py b/tests/integration/test_ls372_agent_integration.py
index 4996634aa..6a100ef2c 100644
--- a/tests/integration/test_ls372_agent_integration.py
+++ b/tests/integration/test_ls372_agent_integration.py
@@ -1,12 +1,11 @@
import os
-import pytest
import ocs
+import pytest
+from integration.util import create_crossbar_fixture
from ocs.base import OpCode
from ocs.testing import create_agent_runner_fixture, create_client_fixture
-from integration.util import create_crossbar_fixture
-
from socs.testing.device_emulator import create_device_emulator
pytest_plugins = ("docker_compose")
@@ -16,7 +15,7 @@
run_agent = create_agent_runner_fixture(
- '../agents/lakeshore372/LS372_agent.py',
+ '../socs/agents/lakeshore372/agent.py',
'ls372')
client = create_client_fixture('LSASIM')
wait_for_crossbar = create_crossbar_fixture()
diff --git a/tests/integration/test_ls425_agent_integration.py b/tests/integration/test_ls425_agent_integration.py
index 2311bab8d..bcfb032fa 100644
--- a/tests/integration/test_ls425_agent_integration.py
+++ b/tests/integration/test_ls425_agent_integration.py
@@ -1,17 +1,10 @@
import time
-import pytest
import ocs
+import pytest
+from integration.util import create_crossbar_fixture
from ocs.base import OpCode
-
-from ocs.testing import (
- create_agent_runner_fixture,
- create_client_fixture,
-)
-
-from integration.util import (
- create_crossbar_fixture
-)
+from ocs.testing import create_agent_runner_fixture, create_client_fixture
from socs.testing.device_emulator import create_device_emulator
@@ -19,9 +12,9 @@
wait_for_crossbar = create_crossbar_fixture()
run_agent = create_agent_runner_fixture(
- '../agents/lakeshore425/LS425_agent.py', 'ls425_agent')
+ '../socs/agents/lakeshore425/agent.py', 'ls425_agent')
run_agent_acq = create_agent_runner_fixture(
- '../agents/lakeshore425/LS425_agent.py', 'ls425_agent', args=['--mode', 'acq'])
+ '../socs/agents/lakeshore425/agent.py', 'ls425_agent', args=['--mode', 'acq'])
client = create_client_fixture('LS425')
emulator = create_device_emulator({'*IDN?': 'LSCI,MODEL425,LSA425T,1.3'},
relay_type='serial')
diff --git a/tests/integration/test_pfeiffer_tc400_agent_integration.py b/tests/integration/test_pfeiffer_tc400_agent_integration.py
index 7ee100d58..34178f479 100644
--- a/tests/integration/test_pfeiffer_tc400_agent_integration.py
+++ b/tests/integration/test_pfeiffer_tc400_agent_integration.py
@@ -1,17 +1,10 @@
import os
-import pytest
import ocs
+import pytest
+from integration.util import create_crossbar_fixture
from ocs.base import OpCode
-
-from ocs.testing import (
- create_agent_runner_fixture,
- create_client_fixture,
-)
-
-from integration.util import (
- create_crossbar_fixture
-)
+from ocs.testing import create_agent_runner_fixture, create_client_fixture
from socs.testing.device_emulator import create_device_emulator
@@ -49,7 +42,7 @@ def format_reply(data):
wait_for_crossbar = create_crossbar_fixture()
run_agent = create_agent_runner_fixture(
- '../agents/pfeiffer_tc400/pfeiffer_tc400_agent.py', 'tc400_agent')
+ '../socs/agents/pfeiffer_tc400/agent.py', 'tc400_agent')
client = create_client_fixture('pfeifferturboA')
emulator = create_device_emulator({}, relay_type='tcp')
diff --git a/tests/integration/test_pfeiffer_tpg366_agent_integration.py b/tests/integration/test_pfeiffer_tpg366_agent_integration.py
index 8c8732fa7..01a1223ef 100644
--- a/tests/integration/test_pfeiffer_tpg366_agent_integration.py
+++ b/tests/integration/test_pfeiffer_tpg366_agent_integration.py
@@ -1,15 +1,8 @@
-import pytest
-import time
-
import ocs
-from ocs.base import OpCode
-
-from ocs.testing import (
- create_agent_runner_fixture,
- create_client_fixture,
-)
-
+import pytest
from integration.util import create_crossbar_fixture
+from ocs.base import OpCode
+from ocs.testing import create_agent_runner_fixture, create_client_fixture
from socs.testing.device_emulator import create_device_emulator
@@ -17,7 +10,7 @@
wait_for_crossbar = create_crossbar_fixture()
run_agent = create_agent_runner_fixture(
- "../agents/pfeiffer_tpg366/pfeiffer_tpg366_agent.py",
+ "../socs/agents/pfeiffer_tpg366/agent.py",
"pfeiffer_tpg366_agent",
args=["--log-dir", "./logs/"],
)
@@ -43,8 +36,6 @@ def check_resp_success(resp):
@pytest.mark.integtest
def test_pfeiffer_tpg366_acq(wait_for_crossbar, emu, run_agent, client):
- time.sleep(3)
- client.acq.stop()
- time.sleep(1)
- resp = client.acq.status()
+ resp = client.acq.start(test_mode=True)
+ resp = client.acq.wait()
check_resp_success(resp)
diff --git a/tests/integration/test_scpi_psu_agent_integration.py b/tests/integration/test_scpi_psu_agent_integration.py
index 41dd39e88..a7f75bb67 100644
--- a/tests/integration/test_scpi_psu_agent_integration.py
+++ b/tests/integration/test_scpi_psu_agent_integration.py
@@ -1,14 +1,8 @@
-import pytest
-
import ocs
-from ocs.base import OpCode
-
-from ocs.testing import (
- create_agent_runner_fixture,
- create_client_fixture,
-)
-
+import pytest
from integration.util import create_crossbar_fixture
+from ocs.base import OpCode
+from ocs.testing import create_agent_runner_fixture, create_client_fixture
from socs.testing.device_emulator import create_device_emulator
@@ -16,7 +10,7 @@
wait_for_crossbar = create_crossbar_fixture()
run_agent = create_agent_runner_fixture(
- "../agents/scpi_psu/scpi_psu_agent.py",
+ "../socs/agents/scpi_psu/agent.py",
"scpi_psu_agent",
args=["--log-dir", "./logs/"],
)
diff --git a/tests/integration/test_synacc_integration.py b/tests/integration/test_synacc_integration.py
index d15b6893c..483b1053f 100644
--- a/tests/integration/test_synacc_integration.py
+++ b/tests/integration/test_synacc_integration.py
@@ -1,22 +1,16 @@
+import ocs
import pytest
-from http_server_mock import HttpServerMock
from flask import request
-
-import ocs
-from ocs.base import OpCode
-
-from ocs.testing import (
- create_agent_runner_fixture,
- create_client_fixture,
-)
-
+from http_server_mock import HttpServerMock
from integration.util import create_crossbar_fixture
+from ocs.base import OpCode
+from ocs.testing import create_agent_runner_fixture, create_client_fixture
pytest_plugins = "docker_compose"
wait_for_crossbar = create_crossbar_fixture()
run_agent = create_agent_runner_fixture(
- "../agents/synacc/synacc.py",
+ "../socs/agents/synacc/agent.py",
"synacc",
args=["--log-dir", "./logs/"],
)
diff --git a/tests/integration/util.py b/tests/integration/util.py
index 6bbeb7e1e..8f2298c36 100644
--- a/tests/integration/util.py
+++ b/tests/integration/util.py
@@ -1,5 +1,4 @@
import pytest
-
from ocs.testing import check_crossbar_connection
diff --git a/tests/test_device_emulator.py b/tests/test_device_emulator.py
index 5a8fee7d1..26fb820a7 100644
--- a/tests/test_device_emulator.py
+++ b/tests/test_device_emulator.py
@@ -1,10 +1,10 @@
-import time
import socket
+import time
+
import pytest
from socs.testing import device_emulator
-
tcp_emulator = device_emulator.create_device_emulator({'ping': 'pong'},
'tcp', 9001)
diff --git a/tests/test_plugin.py b/tests/test_plugin.py
new file mode 100644
index 000000000..91cdf52e3
--- /dev/null
+++ b/tests/test_plugin.py
@@ -0,0 +1 @@
+import socs.plugin # noqa: F401
diff --git a/versioneer.py b/versioneer.py
index 2fc1a0762..d18dfac71 100644
--- a/versioneer.py
+++ b/versioneer.py
@@ -279,10 +279,12 @@
"""
from __future__ import print_function
+
try:
import configparser
except ImportError:
import ConfigParser as configparser
+
import errno
import json
import os
@@ -1570,6 +1572,7 @@ def run(self):
if "cx_Freeze" in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
+
# nczeczulin reports that py2exe won't like the pep440-style string
# as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
# setup(console=[{