From b72f0e0b4e5afc8ab86708ff88535b7e57411af1 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Fri, 15 Dec 2023 18:32:40 -0800 Subject: [PATCH 01/17] ENH: begin implementing ssh data exchange with tcbsd plcs --- dev-requirements.txt | 1 + pmpsdb_client/ftp_data.py | 2 +- pmpsdb_client/ssh_data.py | 84 +++++++++++++++++++++++++++++++++++++++ requirements.txt | 1 + 4 files changed, 87 insertions(+), 1 deletion(-) create mode 100644 pmpsdb_client/ssh_data.py diff --git a/dev-requirements.txt b/dev-requirements.txt index e079f8a..31cf2b9 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1 +1,2 @@ +pyqt pytest diff --git a/pmpsdb_client/ftp_data.py b/pmpsdb_client/ftp_data.py index 8479d65..9a76e13 100644 --- a/pmpsdb_client/ftp_data.py +++ b/pmpsdb_client/ftp_data.py @@ -94,7 +94,7 @@ def list_filenames( Parameters ---------- hostname : str - The plc hostname to upload to. + The plc hostname to check. directory : str, optional The ftp subdirectory to read and write from A default directory pmps is used if this argument is omitted. diff --git a/pmpsdb_client/ssh_data.py b/pmpsdb_client/ssh_data.py new file mode 100644 index 0000000..0af1f77 --- /dev/null +++ b/pmpsdb_client/ssh_data.py @@ -0,0 +1,84 @@ +""" +Module to define the scp transfer interface for the TCBSD PLCs. + +This is how we upload database files to and download database files from the +PLCs. +""" +from __future__ import annotations + +import logging +import typing +from contextlib import contextmanager + +from fabric import Connection + +logger = logging.getLogger(__name__) + +DEFAULT_PW = ( + ("Administrator", "1"), +) +DIRECTORY = "/Hard Disk/ftp/pmps" + + +@contextmanager +def ssh( + hostname: str, + directory: typing.Optional[str] = None, +) -> Connection: + """ + Context manager to handle a single ssh connection. + + Within one connection we can do any number of remote operations on the + TCBSD PLC. + """ + directory = directory or DIRECTORY + connected = False + excs = [] + + for user, pw in DEFAULT_PW: + with Connection( + host=hostname, + user=user, + connect_kwargs={"password": pw} + ) as conn: + try: + conn.open() + except Exception as exc: + excs.append(exc) + continue + connected = True + with conn.cd(directory): + yield conn + if not connected: + if len(excs) > 1: + raise RuntimeError(excs) + elif excs: + raise excs[0] + else: + raise RuntimeError("Unable to connect to PLC") + + +def list_filenames( + hostname: str, + directory: typing.Optional[str] = None, +) -> list[str]: + """ + List the filenames that are currently saved on the PLC. + + Parameters + ---------- + hostname : str + The plc hostname to check. + directory : str, optional + The diretory to read and write from. + A default directory pmps is used if this argument is omitted. + + Returns + ------- + filenames : list of str + The filenames on the PLC. + """ + logger.debug("list_filenames(%s, %s)", hostname, directory) + with ssh(hostname=hostname, directory=directory) as conn: + output = conn.run("ls", hide=True).stdout + return output.strip().split("\n") diff --git a/requirements.txt b/requirements.txt index ee5f816..c810fc7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +fabric ophyd pcdscalc pcdsutils From adfd27fb1ae427f8896f4cfa07602cfc804efe87 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Mon, 18 Dec 2023 12:01:23 -0800 Subject: [PATCH 02/17] ENH: implement file info checking for tcbsd plcs --- pmpsdb_client/ssh_data.py | 53 +++++++++++++++++++++++++++++++++++---- 1 file changed, 48 insertions(+), 5 deletions(-) diff --git a/pmpsdb_client/ssh_data.py b/pmpsdb_client/ssh_data.py index 0af1f77..3bd6f99 100644 --- a/pmpsdb_client/ssh_data.py +++ b/pmpsdb_client/ssh_data.py @@ -6,9 +6,11 @@ """ from __future__ import annotations +import datetime import logging import typing from contextlib import contextmanager +from dataclasses import dataclass from fabric import Connection @@ -19,6 +21,8 @@ ) DIRECTORY = "/Hard Disk/ftp/pmps" +T = typing.TypeVar("T") + @contextmanager def ssh( @@ -58,12 +62,51 @@ def ssh( raise RuntimeError("Unable to connect to PLC") -def list_filenames( +@dataclass(frozen=True) +class FileInfo: + """ + File information from *nix systems. + """ + is_directory: bool + links: int + permissions: str + user: str + group: str + size: int + last_changed: datetime.datetime + filename: str + + @classmethod + def get_output_lines(cls, conn: Connection) -> str: + return conn.run("ls -l -D %s", hide=True).stdout + + @classmethod + def from_all_output_lines(cls: T, output_lines) -> list[T]: + return [cls.from_output_line(line) for line in output_lines.strip().split("\n")[1:]] + + @classmethod + def from_output_line(cls: T, output: str) -> T: + print(output) + type_perms, links, user, group, size, date, filename = output.strip().split() + + return cls( + is_directory=type_perms[0] == "d", + permissions=type_perms[1:], + links=int(links), + user=user, + group=group, + size=int(size), + last_changed=datetime.datetime.fromtimestamp(int(date)), + filename=filename, + ) + + +def get_file_info( hostname: str, directory: typing.Optional[str] = None, -) -> list[str]: +) -> list[FileInfo]: """ - List the filenames that are currently saved on the PLC. + Get information about the files that are currently saved on the PLC. Parameters ---------- @@ -80,5 +123,5 @@ def list_filenames( """ logger.debug("list_filenames(%s, %s)", hostname, directory) with ssh(hostname=hostname, directory=directory) as conn: - output = conn.run("ls", hide=True).stdout - return output.strip().split("\n") + output = FileInfo.get_output_lines(conn) + return FileInfo.from_all_output_lines(output) From 004c4aa8f35496e0b4834f92a1e062a6ca5c68f3 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Mon, 18 Dec 2023 16:57:00 -0800 Subject: [PATCH 03/17] BLD: add a conda recipe etc --- .github/workflows/standard.yml | 2 +- conda-recipe/meta.yaml | 43 ++++++++++++++++++++++++++++++++++ dev-requirements.txt | 3 ++- requirements.txt | 1 - 4 files changed, 46 insertions(+), 3 deletions(-) create mode 100644 conda-recipe/meta.yaml diff --git a/.github/workflows/standard.yml b/.github/workflows/standard.yml index 5226e0c..38d94f1 100644 --- a/.github/workflows/standard.yml +++ b/.github/workflows/standard.yml @@ -20,6 +20,6 @@ jobs: # Extras to be installed only for conda-based testing: conda-testing-extras: "" # Extras to be installed only for pip-based testing: - pip-testing-extras: "PyQt5" + pip-testing-extras: "" # Set if using setuptools-scm for the conda-build workflow use-setuptools-scm: true diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml new file mode 100644 index 0000000..73db995 --- /dev/null +++ b/conda-recipe/meta.yaml @@ -0,0 +1,43 @@ +{% set package_name = "pmpsdb_client" %} +{% set import_name = "pmpsdb_client" %} +{% set version = load_file_regex(load_file=os.path.join(import_name, "_version.py"), regex_pattern=".*version = '(\S+)'").group(1) %} + +package: + name: {{ package_name }} + version : {{ version }} + +source: + path: .. + +build: + number: 0 + noarch: python + script: {{ PYTHON }} -m pip install . -vv + +requirements: + build: + - python >=3.9 + - pip + - setuptools_scm + run: + - python >=3.9 + - fabric + - ophyd + - pcdscalc + - pcdsutils + - prettytable + - qtpy + run_constrained: + - pyqt =5 + +test: + requires: + - pytest + - pyqt=5.15 + imports: + - {{ import_name }} + +about: + home: https://github.com/pcdshub/pcdsdevices + license: SLAC Open License + summary: IOC definitions for LCLS Beamline Devices diff --git a/dev-requirements.txt b/dev-requirements.txt index 31cf2b9..9ea9846 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,2 +1,3 @@ -pyqt +PyQt5 pytest +setuptools-scm diff --git a/requirements.txt b/requirements.txt index c810fc7..b78e589 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,4 +4,3 @@ pcdscalc pcdsutils prettytable qtpy -setuptools-scm From 9363bc0183cc0921da40e154f95ee78fa988c759 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Mon, 18 Dec 2023 16:58:47 -0800 Subject: [PATCH 04/17] MAINT: reorder fields to match ls -l order --- pmpsdb_client/ssh_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pmpsdb_client/ssh_data.py b/pmpsdb_client/ssh_data.py index 3bd6f99..bf33574 100644 --- a/pmpsdb_client/ssh_data.py +++ b/pmpsdb_client/ssh_data.py @@ -68,8 +68,8 @@ class FileInfo: File information from *nix systems. """ is_directory: bool - links: int permissions: str + links: int user: str group: str size: int From 91a3a63ee212b5ef0ea05e4732a692f572d95619 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Tue, 19 Dec 2023 16:47:50 -0800 Subject: [PATCH 05/17] WIP: start generalizing the data with the goal of it looking the same from each source --- pmpsdb_client/data_types.py | 180 ++++++++++++++++++++++++++++++++++++ pmpsdb_client/plc_data.py | 7 ++ pmpsdb_client/ssh_data.py | 11 +-- 3 files changed, 192 insertions(+), 6 deletions(-) create mode 100644 pmpsdb_client/data_types.py create mode 100644 pmpsdb_client/plc_data.py diff --git a/pmpsdb_client/data_types.py b/pmpsdb_client/data_types.py new file mode 100644 index 0000000..732afe6 --- /dev/null +++ b/pmpsdb_client/data_types.py @@ -0,0 +1,180 @@ +""" +This module defines important data structures centrally. + +This helps us compare the same kinds of data to each other, +even when this data comes from different sources. + +Note that all the dataclasses are frozen: editing these data +structures is not in scope for this library, it is only intended +to move these files around and compare them to each other. +""" +import dataclasses +import datetime +import enum +import typing + +from pcdscalc.pmps import get_bitmask_desc + +from .beam_class import summarize_beam_class_bitmask + +# "Raw" data types are the presentation of the data as in the json file. +# Most beam params are serialized as str, except for id (int) and special (bool) +RawStateBeamParams = dict[str, typing.Union[str, int, bool]] +# A bunch of raw beam params are collected by name for one device +RawDeviceBeamParams = dict[str, RawStateBeamParams] +# Each device on the plc is collected by name +RawPLCBeamParams = dict[str, RawDeviceBeamParams] +# The json file has the plc name at the top level +RawFileContents = dict[str, RawPLCBeamParams] + +T = typing.TypeVar("T") + + +class BPKeys(enum.Enum): + """ + Mapping from user-visible name to database key for beam parameters. + """ + id = "id" + name_ = "name" + beamline = "beamline" + nbc_range = "nBeamClassRange" + nev_range = "neVRange" + ntran = "nTran" + nrate = "nRate" + aperture_name = "ap_name" + y_gap = "ap_ygap" + y_center = "ap_ycenter" + x_gap = "ap_xgap" + x_center = "ap_xcenter" + damage_limit = "damage_limit" + pulse_energy = "pulse_energy" + notes = "notes" + special = "special" + + +@dataclasses.dataclass(frozen=True) +class BeamParameters: + """ + Struct representation of one state's beam parameters. + + The raw data has most of these as strings, but to make the struct + here we'll convert them to the most natural data types for + comparisons and add additional helpful fields for + human readability. + + The same names as used in the web application are used here, + but all lowercase and with spaces replaced with underscores. + """ + id: int + name: str + beamline: str + nbc_range: int + nbc_range_mask: str + nbc_range_desc: str + nev_range: int + nev_range_mask: str + nev_range_desc: str + ntran: float + nrate: int + aperture_name: str + y_gap: float + y_center: float + x_gap: float + x_center: float + damage_limit: str + pulse_energy: str + notes: str + special: bool + + @classmethod + def from_raw(cls: type[T], data: RawStateBeamParams) -> T: + return cls( + id=data[BPKeys.id], + name=data[BPKeys.name_], + beamline=data[BPKeys.beamline], + nbc_range=int(data[BPKeys.nbc_range]), + nbc_range_mask=data[BPKeys.nbc_range], + nbc_range_desc=summarize_beam_class_bitmask(int(data[BPKeys.nbc_range])), + nev_range=int(data[BPKeys.nev_range]), + nev_range_mask=data[BPKeys.nev_range], + nev_range_desc=get_bitmask_desc(data[BPKeys.nev_range]), + ntran=float(data[BPKeys.ntran]), + aperture_name=data[BPKeys.aperture_name], + y_gap=float(data[BPKeys.y_gap]), + y_center=float(data[BPKeys.y_center]), + x_gap=float(data[BPKeys.x_gap]), + x_center=float(data[BPKeys.x_center]), + damage_limit=data[BPKeys.damage_limit], + pulse_energy=data[BPKeys.pulse_energy], + notes=data[BPKeys.notes], + special=data[BPKeys.special], + ) + + +@dataclasses.dataclass(frozen=True) +class DeviceBeamParams: + """ + The beam parameters associated with one device. + + One device may have an arbitrary number of states. + """ + device_name: str + state_beam_params: dict[str, BeamParameters] + + @classmethod + def from_raw(cls: type[T], device_name: str, data: RawDeviceBeamParams) -> T: + return cls( + device_name=device_name, + state_beam_params={ + key: BeamParameters.from_raw(value) + for key, value in data.items() + } + ) + + +@dataclasses.dataclass(frozen=True) +class FileContents: + """ + The contents of one file. + + Each file is associated with exactly one plc hostname + and can contain an arbitrary number of devices. + """ + plc_name: str + device_beam_params: dict[str, DeviceBeamParams] + + @classmethod + def from_raw(cls: type[T], data: RawFileContents) -> T: + return cls( + plc_name=next(data.keys()), + device_beam_params={ + key: DeviceBeamParams.from_raw(key, value) + for key, value in data.items() + } + ) + + +@dataclasses.dataclass(frozen=True) +class FileInfo: + """ + Generalized file info. + + The fields are based on *nix systems, but this will + also be used for windows systems too. + + This class has no constructor helpers here. + Each data source will need to implement a unique + constructor for this. + """ + filename: str + directory: str + server: str + is_directory: bool + permissions: str + links: int + user: str + group: str + size: int + last_changed: datetime.datetime + raw_contents: RawFileContents + contents: FileContents diff --git a/pmpsdb_client/plc_data.py b/pmpsdb_client/plc_data.py new file mode 100644 index 0000000..6716b94 --- /dev/null +++ b/pmpsdb_client/plc_data.py @@ -0,0 +1,7 @@ +""" +Get file info from, upload files to, and download files from the PLCs. + +This calls methods from ssh_data and ftp_data as appropriate. +When the system is configured correctly, exactly one of these submodules +should work for getting data from the PLC. +""" diff --git a/pmpsdb_client/ssh_data.py b/pmpsdb_client/ssh_data.py index bf33574..ccf3b5a 100644 --- a/pmpsdb_client/ssh_data.py +++ b/pmpsdb_client/ssh_data.py @@ -76,17 +76,16 @@ class FileInfo: last_changed: datetime.datetime filename: str - @classmethod - def get_output_lines(cls, conn: Connection) -> str: + @staticmethod + def get_output_lines(conn: Connection) -> str: return conn.run("ls -l -D %s", hide=True).stdout @classmethod - def from_all_output_lines(cls: T, output_lines) -> list[T]: + def from_all_output_lines(cls: type[T], output_lines) -> list[T]: return [cls.from_output_line(line) for line in output_lines.strip().split("\n")[1:]] @classmethod - def from_output_line(cls: T, output: str) -> T: - print(output) + def from_output_line(cls: type[T], output: str) -> T: type_perms, links, user, group, size, date, filename = output.strip().split() return cls( @@ -101,7 +100,7 @@ def from_output_line(cls: T, output: str) -> T: ) -def get_file_info( +def list_file_info( hostname: str, directory: typing.Optional[str] = None, ) -> list[FileInfo]: From 0ab31df47c4caa83cf0434360f26f70c52866ce7 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Tue, 19 Mar 2024 15:53:01 -0700 Subject: [PATCH 06/17] ENH: implement basic file operations for ssh that work on a real PLC --- pmpsdb_client/ftp_data.py | 4 +- pmpsdb_client/ssh_data.py | 79 +++++++++++++++++++++++++++++++++++---- 2 files changed, 75 insertions(+), 8 deletions(-) diff --git a/pmpsdb_client/ftp_data.py b/pmpsdb_client/ftp_data.py index 9a76e13..d7467e7 100644 --- a/pmpsdb_client/ftp_data.py +++ b/pmpsdb_client/ftp_data.py @@ -230,7 +230,9 @@ def upload_filename( hostname : str The plc hostname to upload to. filename : str - The name of the file on both your filesystem and on the PLC. + The name of the file on your filesystem. + dest_filename : str, optional + The name of the file on the PLC. If omitted, same as filename. directory : str, optional The ftp subdirectory to read and write from A default directory pmps is used if this argument is omitted. diff --git a/pmpsdb_client/ssh_data.py b/pmpsdb_client/ssh_data.py index ccf3b5a..b89019b 100644 --- a/pmpsdb_client/ssh_data.py +++ b/pmpsdb_client/ssh_data.py @@ -11,15 +11,16 @@ import typing from contextlib import contextmanager from dataclasses import dataclass +from io import StringIO from fabric import Connection logger = logging.getLogger(__name__) DEFAULT_PW = ( - ("Administrator", "1"), + ("ecs-user", "1"), ) -DIRECTORY = "/Hard Disk/ftp/pmps" +DIRECTORY = "/home/{user}/pmpsdb" T = typing.TypeVar("T") @@ -28,14 +29,13 @@ def ssh( hostname: str, directory: typing.Optional[str] = None, -) -> Connection: +) -> typing.Iterator[Connection]: """ Context manager to handle a single ssh connection. Within one connection we can do any number of remote operations on the TCBSD PLC. """ - directory = directory or DIRECTORY connected = False excs = [] @@ -51,6 +51,10 @@ def ssh( excs.append(exc) continue connected = True + directory = directory or DIRECTORY.format(user=user) + result = conn.run(f"mkdir -p {directory}") + if result.exited != 0: + raise RuntimeError(f"Failed to create directory {directory}") with conn.cd(directory): yield conn if not connected: @@ -117,10 +121,71 @@ def list_file_info( Returns ------- - filenames : list of str - The filenames on the PLC. + filenames : list of FileInfo + Information about all the files in the PLC's pmps folder. """ - logger.debug("list_filenames(%s, %s)", hostname, directory) + logger.debug("list_file_info(%s, %s)", hostname, directory) with ssh(hostname=hostname, directory=directory) as conn: output = FileInfo.get_output_lines(conn) return FileInfo.from_all_output_lines(output) + + +def upload_filename( + hostname: str, + filename: str, + dest_filename: typing.Optional[str] = None, + directory: typing.Optional[str] = None, +): + """ + Open and upload a file on your filesystem to a PLC. + + Parameters + ---------- + hostname : str + The plc hostname to upload to. + filename : str + The name of the file on your filesystem. + dest_filename : str, optional + The name of the file on the PLC. If omitted, same as filename. + directory : str, optional + The ssh subdirectory to read and write from + A default directory /home/ecs-user/pmpsdb is used if this argument is omitted. + """ + logger.debug("upload_filename(%s, %s, %s, %s)", hostname, filename, dest_filename, directory) + if dest_filename is None: + dest_filename = filename + with ssh(hostname=hostname, directory=directory) as conn: + conn.put(local=filename, remote=dest_filename) + + +def download_file_text( + hostname: str, + filename: str, + directory: typing.Optional[str] = None, +) -> str: + """ + Download a file from the PLC to use in Python. + + The result is a single string, suitable for operations like + json.loads + + Parameters + ---------- + hostname : str + The plc hostname to download from. + filename : str + The name of the file on the PLC. + directory : str, optional + The ssh subdirectory to read and write from + A default directory /home/ecs-user/pmpsdb is used if this argument is omitted. + + Returns + ------- + text: str + The contents from the file. + """ + logger.debug("download_file_text(%s, %s, %s)", hostname, filename, directory) + stringio = StringIO() + with ssh(hostname=hostname, directory=directory) as conn: + conn.get(remote=filename, local=stringio) + return stringio.getvalue() From 3bdf6ebc07338b7c25b71c7675fc1c2fd5486523 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Tue, 19 Mar 2024 16:52:03 -0700 Subject: [PATCH 07/17] ENH: first pass as dynamically picking ssh data or ftp data --- pmpsdb_client/cli/transfer_tools.py | 16 +- pmpsdb_client/ftp_data.py | 124 +------------- pmpsdb_client/gui.py | 9 +- pmpsdb_client/plc_data.py | 255 ++++++++++++++++++++++++++++ pmpsdb_client/pmpsdb_tst.yml | 2 + pmpsdb_client/ssh_data.py | 16 +- 6 files changed, 291 insertions(+), 131 deletions(-) diff --git a/pmpsdb_client/cli/transfer_tools.py b/pmpsdb_client/cli/transfer_tools.py index 2caeaf5..021d6b1 100644 --- a/pmpsdb_client/cli/transfer_tools.py +++ b/pmpsdb_client/cli/transfer_tools.py @@ -7,7 +7,7 @@ from typing import Optional from ..export_data import ExportFile, get_latest_exported_files -from ..ftp_data import (compare_file, download_file_text, list_file_info, +from ..plc_data import (compare_file, download_file_text, list_file_info, upload_filename) logger = logging.getLogger(__name__) @@ -21,10 +21,16 @@ def cli_list_files(args: argparse.Namespace) -> int: def _list_files(hostname: str) -> int: infos = list_file_info(hostname=hostname) for data in infos: - print( - f'{data.filename} uploaded at {data.create_time.ctime()} ' - f'({data.size} bytes)' - ) + try: + print( + f'{data.filename} uploaded at {data.create_time.ctime()} ' + f'({data.size} bytes)' + ) + except AttributeError: + print( + f'{data.filename} uploaded at {data.last_changed.ctime()} ' + f'({data.size} bytes)' + ) if not infos: logger.warning('No files found') return 0 diff --git a/pmpsdb_client/ftp_data.py b/pmpsdb_client/ftp_data.py index d7467e7..dee8d1c 100644 --- a/pmpsdb_client/ftp_data.py +++ b/pmpsdb_client/ftp_data.py @@ -9,12 +9,11 @@ import datetime import ftplib -import json import logging import os -import typing from contextlib import contextmanager from dataclasses import dataclass +from typing import BinaryIO, Iterator DEFAULT_PW = ( ('Administrator', '1'), @@ -26,7 +25,7 @@ @contextmanager -def ftp(hostname: str, directory: typing.Optional[str] = None) -> ftplib.FTP: +def ftp(hostname: str, directory: str | None = None) -> Iterator[ftplib.FTP]: """ Context manager that manages an FTP connection. @@ -86,7 +85,7 @@ def ftp(hostname: str, directory: typing.Optional[str] = None) -> ftplib.FTP: def list_filenames( hostname: str, - directory: typing.Optional[str] = None, + directory: str | None = None, ) -> list[str]: """ List the filenames that are currently saved on the PLC. @@ -157,7 +156,7 @@ def from_list_line(cls, line: str) -> PLCFile: def list_file_info( hostname: str, - directory: typing.Optional[str] = None, + directory: str | None = None, ) -> list[PLCFile]: """ Gather pertinent information about all the files. @@ -186,8 +185,8 @@ def list_file_info( def upload_file( hostname: str, target_filename: str, - fd: typing.BinaryIO, - directory: typing.Optional[str] = None, + fd: BinaryIO, + directory: str | None = None, ): """ Upload an open file to a PLC. @@ -219,8 +218,8 @@ def upload_file( def upload_filename( hostname: str, filename: str, - dest_filename: typing.Optional[str] = None, - directory: typing.Optional[str] = None, + dest_filename: str | None = None, + directory: str | None = None, ): """ Open and upload a file on your filesystem to a PLC. @@ -256,7 +255,7 @@ def upload_filename( def download_file_text( hostname: str, filename: str, - directory: typing.Optional[str] = None, + directory: str | None = None, ) -> str: """ Download a file from the PLC to use in Python. @@ -292,108 +291,3 @@ def download_file_text( for chunk in byte_chunks: contents += chunk.decode('ascii') return contents - - -def download_file_json_dict( - hostname: str, - filename: str, - directory: typing.Optional[str] = None, -) -> dict[str, dict[str, typing.Any]]: - """ - Download a file from the PLC and interpret it as a json dictionary. - - The result is suitable for comparing to json blobs exported from the - pmps database. - - Parameters - ---------- - hostname : str - The plc hostname to download from. - filename : str - The name of the file on the PLC. - directory : str, optional - The ftp subdirectory to read and write from - A default directory pmps is used if this argument is omitted. - - Returns - ------- - data : dict - The dictionary data from the file stored on the plc. - """ - logger.debug( - 'download_file_json_dict(%s, %s, %s)', - hostname, - filename, - directory, - ) - return json.loads( - download_file_text( - hostname=hostname, - filename=filename, - directory=directory, - ) - ) - - -def local_file_json_dict(filename: str) -> dict[str, dict[str, typing.Any]]: - """ - Return the json dict from a local file. - - Suitable for comparisons to files from the database or from the plc. - - Parameters - ---------- - filename : str - The name of the file on the local filesystem. - - Returns - ------- - data : dict - The dictionary data from the file stored on the local drive. - """ - logger.debug('local_file_json_dict(%s)', filename) - with open(filename, 'r') as fd: - return json.load(fd) - - -def compare_file( - hostname: str, - local_filename: str, - plc_filename: typing.Optional[str] = None, - directory: typing.Optional[str] = None, -) -> bool: - """ - Compare a file saved locally to one on the PLC. - - Parameters - ---------- - hostname : str - The plc hostname to download from. - local_filename: str - The full path the local file to compare with. - plc_filename: str, optional - The filename as saved on the PLC. If omitted, the local_filename's - basename will be used. - directory : str, optional - The ftp subdirectory to read and write from - A default directory pmps is used if this argument is omitted. - - Returns - ------- - same_file : bool - True if the contents of these two files are the same. - """ - logger.debug( - 'compare_file(%s, %s, %s, %s)', - hostname, - local_filename, - plc_filename, - directory, - ) - local_data = local_file_json_dict(filename=local_filename) - plc_data = download_file_json_dict( - hostname=hostname, - filename=plc_filename, - directory=directory, - ) - return local_data == plc_data diff --git a/pmpsdb_client/gui.py b/pmpsdb_client/gui.py index ea1ff50..49e7ae5 100644 --- a/pmpsdb_client/gui.py +++ b/pmpsdb_client/gui.py @@ -28,9 +28,9 @@ from .beam_class import summarize_beam_class_bitmask from .export_data import ExportFile, get_export_dir, get_latest_exported_files -from .ftp_data import (download_file_json_dict, download_file_text, - list_file_info, upload_filename) from .ioc_data import AllStateBP, PLCDBControls +from .plc_data import (download_file_json_dict, download_file_text, + list_file_info, upload_filename) logger = logging.getLogger(__name__) @@ -578,7 +578,10 @@ def update_plc_row(self, row: int, update_export: bool = True) -> None: filename = hostname_to_filename(hostname) for file_info in info: if file_info.filename == filename: - text = file_info.create_time.ctime() + try: + text = file_info.create_time.ctime() + except AttributeError: + text = file_info.last_changed.ctime() break self.plc_table.item(row, PLCTableColumns.UPLOAD).setText(text) if update_export: diff --git a/pmpsdb_client/plc_data.py b/pmpsdb_client/plc_data.py index 6716b94..436887c 100644 --- a/pmpsdb_client/plc_data.py +++ b/pmpsdb_client/plc_data.py @@ -5,3 +5,258 @@ When the system is configured correctly, exactly one of these submodules should work for getting data from the PLC. """ +from __future__ import annotations + +import enum +import json +import logging +from typing import Any + +from . import ftp_data, ssh_data + +logger = logging.getLogger(__name__) +plc_mapping: dict[str, DataMethod] = {} + + +class DataMethod(enum.Enum): + ssh = enum.auto() + ftp = enum.auto() + + +def get_data_method(hostname: str, directory: str | None = None) -> DataMethod: + try: + return plc_mapping[hostname] + except KeyError: + ... + try: + ssh_data.list_file_info(hostname=hostname, directory=directory) + except Exception: + try: + ftp_data.list_file_info(hostname=hostname, directory=directory) + except Exception: + raise RuntimeError(f"Cannot get data method for {hostname}") + else: + plc_mapping[hostname] = DataMethod.ftp + return DataMethod.ftp + else: + plc_mapping[hostname] = DataMethod.ssh + return DataMethod.ssh + + +def list_file_info( + hostname: str, + directory: str | None = None, +) -> list[ssh_data.FileInfo | ftp_data.PLCFile]: + """ + Get information about the files that are currently saved on the PLC. + + Parameters + ---------- + hostname : str + The plc hostname to check. + directory : str, optional + The diretory to read and write from. + A default directory is used if this argument is omitted, + which depends on the PLC OS. + + Returns + ------- + filenames : list of FileInfo or PLCFile + Information about all the files in the PLC's pmps folder. + """ + data_method = get_data_method(hostname=hostname, directory=directory) + if data_method == DataMethod.ssh: + return ssh_data.list_file_info(hostname=hostname, directory=directory) + elif data_method == DataMethod.ftp: + return ftp_data.list_file_info(hostname=hostname, directory=directory) + else: + raise RuntimeError(f"Unhandled data method {data_method}") + + +def upload_filename( + hostname: str, + filename: str, + dest_filename: str | None = None, + directory: str | None = None, +): + """ + Open and upload a file on your filesystem to a PLC. + + Parameters + ---------- + hostname : str + The plc hostname to upload to. + filename : str + The name of the file on your filesystem. + dest_filename : str, optional + The name of the file on the PLC. If omitted, same as filename. + directory : str, optional + The ssh subdirectory to read and write from + A default directory is used if this argument is omitted, + which depends on the PLC OS. + """ + data_method = get_data_method(hostname=hostname, directory=directory) + if data_method == DataMethod.ssh: + return ssh_data.upload_filename( + hostname=hostname, + filename=filename, + dest_filename=dest_filename, + directory=directory, + ) + elif data_method == DataMethod.ftp: + return ftp_data.upload_filename( + hostname=hostname, + filename=filename, + dest_filename=dest_filename, + directory=directory, + ) + else: + raise RuntimeError(f"Unhandled data method {data_method}") + + +def download_file_text( + hostname: str, + filename: str, + directory: str | None = None, +) -> str: + """ + Download a file from the PLC to use in Python. + + The result is a single string, suitable for operations like + json.loads + + Parameters + ---------- + hostname : str + The plc hostname to download from. + filename : str + The name of the file on the PLC. + directory : str, optional + The ssh subdirectory to read and write from + A default directory /home/ecs-user/pmpsdb is used if this argument is omitted. + + Returns + ------- + text: str + The contents from the file. + """ + data_method = get_data_method(hostname=hostname, directory=directory) + if data_method == DataMethod.ssh: + return ssh_data.download_file_text( + hostname=hostname, + filename=filename, + directory=directory, + ) + elif data_method == DataMethod.ftp: + return ftp_data.download_file_text( + hostname=hostname, + filename=filename, + directory=directory, + ) + else: + raise RuntimeError(f"Unhandled data method {data_method}") + + +def download_file_json_dict( + hostname: str, + filename: str, + directory: str | None = None, +) -> dict[str, dict[str, Any]]: + """ + Download a file from the PLC and interpret it as a json dictionary. + + The result is suitable for comparing to json blobs exported from the + pmps database. + + Parameters + ---------- + hostname : str + The plc hostname to download from. + filename : str + The name of the file on the PLC. + directory : str, optional + The ftp subdirectory to read and write from + A default directory pmps is used if this argument is omitted. + + Returns + ------- + data : dict + The dictionary data from the file stored on the plc. + """ + logger.debug( + 'download_file_json_dict(%s, %s, %s)', + hostname, + filename, + directory, + ) + return json.loads( + download_file_text( + hostname=hostname, + filename=filename, + directory=directory, + ) + ) + + +def local_file_json_dict(filename: str) -> dict[str, dict[str, Any]]: + """ + Return the json dict from a local file. + + Suitable for comparisons to files from the database or from the plc. + + Parameters + ---------- + filename : str + The name of the file on the local filesystem. + + Returns + ------- + data : dict + The dictionary data from the file stored on the local drive. + """ + logger.debug('local_file_json_dict(%s)', filename) + with open(filename, 'r') as fd: + return json.load(fd) + + +def compare_file( + hostname: str, + local_filename: str, + plc_filename: str | None = None, + directory: str | None = None, +) -> bool: + """ + Compare a file saved locally to one on the PLC. + + Parameters + ---------- + hostname : str + The plc hostname to download from. + local_filename: str + The full path the local file to compare with. + plc_filename: str, optional + The filename as saved on the PLC. If omitted, the local_filename's + basename will be used. + directory : str, optional + The ftp subdirectory to read and write from + A default directory pmps is used if this argument is omitted. + + Returns + ------- + same_file : bool + True if the contents of these two files are the same. + """ + logger.debug( + 'compare_file(%s, %s, %s, %s)', + hostname, + local_filename, + plc_filename, + directory, + ) + local_data = local_file_json_dict(filename=local_filename) + plc_data = download_file_json_dict( + hostname=hostname, + filename=plc_filename, + directory=directory, + ) + return local_data == plc_data diff --git a/pmpsdb_client/pmpsdb_tst.yml b/pmpsdb_client/pmpsdb_tst.yml index ec0334e..3da221f 100644 --- a/pmpsdb_client/pmpsdb_tst.yml +++ b/pmpsdb_client/pmpsdb_tst.yml @@ -1,3 +1,5 @@ +plc-tst-bsd1: "PLC:TST:BSD1" +plc-tst-bsd2: "PLC:TST:BSD2" plc-tst-motion: "PLC:TST:MOT" plc-tst-pmps-subsytem-a: "PLC:TST:PMPS:A" plc-tst-pmps-subsytem-b: "PLC:TST:PMPS:B" diff --git a/pmpsdb_client/ssh_data.py b/pmpsdb_client/ssh_data.py index b89019b..bd4f6aa 100644 --- a/pmpsdb_client/ssh_data.py +++ b/pmpsdb_client/ssh_data.py @@ -8,10 +8,10 @@ import datetime import logging -import typing from contextlib import contextmanager from dataclasses import dataclass from io import StringIO +from typing import Iterator, TypeVar from fabric import Connection @@ -22,14 +22,14 @@ ) DIRECTORY = "/home/{user}/pmpsdb" -T = typing.TypeVar("T") +T = TypeVar("T") @contextmanager def ssh( hostname: str, - directory: typing.Optional[str] = None, -) -> typing.Iterator[Connection]: + directory: str | None = None, +) -> Iterator[Connection]: """ Context manager to handle a single ssh connection. @@ -106,7 +106,7 @@ def from_output_line(cls: type[T], output: str) -> T: def list_file_info( hostname: str, - directory: typing.Optional[str] = None, + directory: str | None = None, ) -> list[FileInfo]: """ Get information about the files that are currently saved on the PLC. @@ -133,8 +133,8 @@ def list_file_info( def upload_filename( hostname: str, filename: str, - dest_filename: typing.Optional[str] = None, - directory: typing.Optional[str] = None, + dest_filename: str | None = None, + directory: str | None = None, ): """ Open and upload a file on your filesystem to a PLC. @@ -161,7 +161,7 @@ def upload_filename( def download_file_text( hostname: str, filename: str, - directory: typing.Optional[str] = None, + directory: str | None = None, ) -> str: """ Download a file from the PLC to use in Python. From 16dfa21ac9b23166fdf1d282ec7abece1796f6f2 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Wed, 20 Mar 2024 11:58:02 -0700 Subject: [PATCH 08/17] ENH: remove redundant ssh in cases where we only need to check the file list --- pmpsdb_client/plc_data.py | 41 +++++++++++++++++++++++++-------------- 1 file changed, 26 insertions(+), 15 deletions(-) diff --git a/pmpsdb_client/plc_data.py b/pmpsdb_client/plc_data.py index 436887c..6067918 100644 --- a/pmpsdb_client/plc_data.py +++ b/pmpsdb_client/plc_data.py @@ -21,26 +21,19 @@ class DataMethod(enum.Enum): ssh = enum.auto() ftp = enum.auto() + unk = enum.auto() def get_data_method(hostname: str, directory: str | None = None) -> DataMethod: + """ + For functions other than list_file_info: pick the data method from the cache, + or by calling list_file_info. + """ try: return plc_mapping[hostname] except KeyError: - ... - try: - ssh_data.list_file_info(hostname=hostname, directory=directory) - except Exception: - try: - ftp_data.list_file_info(hostname=hostname, directory=directory) - except Exception: - raise RuntimeError(f"Cannot get data method for {hostname}") - else: - plc_mapping[hostname] = DataMethod.ftp - return DataMethod.ftp - else: - plc_mapping[hostname] = DataMethod.ssh - return DataMethod.ssh + list_file_info(hostname=hostname, directory=directory) + return plc_mapping[hostname] def list_file_info( @@ -64,11 +57,29 @@ def list_file_info( filenames : list of FileInfo or PLCFile Information about all the files in the PLC's pmps folder. """ - data_method = get_data_method(hostname=hostname, directory=directory) + try: + data_method = plc_mapping[hostname] + except KeyError: + data_method = DataMethod.unk + if data_method == DataMethod.ssh: return ssh_data.list_file_info(hostname=hostname, directory=directory) elif data_method == DataMethod.ftp: return ftp_data.list_file_info(hostname=hostname, directory=directory) + elif data_method == DataMethod.unk: + try: + file_info = ssh_data.list_file_info(hostname=hostname, directory=directory) + except Exception: + try: + file_info = ftp_data.list_file_info(hostname=hostname, directory=directory) + except Exception: + raise RuntimeError(f"Cannot get data method for {hostname}") + else: + plc_mapping[hostname] = DataMethod.ftp + return file_info + else: + plc_mapping[hostname] = DataMethod.ssh + return file_info else: raise RuntimeError(f"Unhandled data method {data_method}") From c8c4af2f288e4e184c46aee878459f93210f5109 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Wed, 20 Mar 2024 14:04:10 -0700 Subject: [PATCH 09/17] ENH: clean up ssh connection log messages and force only password tries --- pmpsdb_client/cli/__init__.py | 3 +++ pmpsdb_client/ssh_data.py | 15 ++++++++++++++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/pmpsdb_client/cli/__init__.py b/pmpsdb_client/cli/__init__.py index f228b23..1d38b06 100644 --- a/pmpsdb_client/cli/__init__.py +++ b/pmpsdb_client/cli/__init__.py @@ -53,6 +53,9 @@ def _main(args: argparse.Namespace) -> int: level=logging.INFO, format='%(levelname)s: %(message)s', ) + # Noisy log messages from ssh transport layer + for module in ("fabric", "paramiko", "intake"): + logging.getLogger(module).setLevel(logging.WARNING) if args.export_dir: from ..export_data import set_export_dir set_export_dir(args.export_dir) diff --git a/pmpsdb_client/ssh_data.py b/pmpsdb_client/ssh_data.py index bd4f6aa..ef0c1e2 100644 --- a/pmpsdb_client/ssh_data.py +++ b/pmpsdb_client/ssh_data.py @@ -14,6 +14,8 @@ from typing import Iterator, TypeVar from fabric import Connection +from fabric.config import Config +from paramiko.config import SSHConfig logger = logging.getLogger(__name__) @@ -21,6 +23,13 @@ ("ecs-user", "1"), ) DIRECTORY = "/home/{user}/pmpsdb" +SSH_CONFIG = """ +Host * + ForwardAgent no + ForwardX11 no + ForwardX11Trusted no + PreferredAuthentications=password +""" T = TypeVar("T") @@ -43,7 +52,11 @@ def ssh( with Connection( host=hostname, user=user, - connect_kwargs={"password": pw} + config=Config(ssh_config=SSHConfig.from_text(SSH_CONFIG)), + connect_kwargs={ + "password": pw, + "allow_agent": False, + }, ) as conn: try: conn.open() From 601fa2dcfc7e698719eb379ad35bd8de372db9a5 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Wed, 20 Mar 2024 14:36:30 -0700 Subject: [PATCH 10/17] FIX: learn how to handle cwd/cd for put/get connections --- pmpsdb_client/ssh_data.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/pmpsdb_client/ssh_data.py b/pmpsdb_client/ssh_data.py index ef0c1e2..3335b4f 100644 --- a/pmpsdb_client/ssh_data.py +++ b/pmpsdb_client/ssh_data.py @@ -11,6 +11,7 @@ from contextlib import contextmanager from dataclasses import dataclass from io import StringIO +from pathlib import Path from typing import Iterator, TypeVar from fabric import Connection @@ -68,6 +69,8 @@ def ssh( result = conn.run(f"mkdir -p {directory}") if result.exited != 0: raise RuntimeError(f"Failed to create directory {directory}") + # Note: conn.cd only affects calls to conn.run, not conn.get or conn.put + # Use conn.cwd property to check this live with conn.cd(directory): yield conn if not connected: @@ -168,7 +171,9 @@ def upload_filename( if dest_filename is None: dest_filename = filename with ssh(hostname=hostname, directory=directory) as conn: - conn.put(local=filename, remote=dest_filename) + if directory is None: + directory = conn.cwd + conn.put(local=filename, remote=str(Path(directory) / dest_filename)) def download_file_text( @@ -200,5 +205,7 @@ def download_file_text( logger.debug("download_file_text(%s, %s, %s)", hostname, filename, directory) stringio = StringIO() with ssh(hostname=hostname, directory=directory) as conn: - conn.get(remote=filename, local=stringio) + if directory is None: + directory = conn.cwd + conn.get(remote=str(Path(directory) / filename), local=stringio) return stringio.getvalue() From 86919fe3573ffe94dea74f0ffac904fe82649ff1 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Wed, 20 Mar 2024 14:41:40 -0700 Subject: [PATCH 11/17] FIX: interface was expecting bytes, use BytesIO and decode ourselves. --- pmpsdb_client/ssh_data.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pmpsdb_client/ssh_data.py b/pmpsdb_client/ssh_data.py index 3335b4f..83151aa 100644 --- a/pmpsdb_client/ssh_data.py +++ b/pmpsdb_client/ssh_data.py @@ -10,7 +10,7 @@ import logging from contextlib import contextmanager from dataclasses import dataclass -from io import StringIO +from io import BytesIO from pathlib import Path from typing import Iterator, TypeVar @@ -203,9 +203,9 @@ def download_file_text( The contents from the file. """ logger.debug("download_file_text(%s, %s, %s)", hostname, filename, directory) - stringio = StringIO() + bytesio = BytesIO() with ssh(hostname=hostname, directory=directory) as conn: if directory is None: directory = conn.cwd - conn.get(remote=str(Path(directory) / filename), local=stringio) - return stringio.getvalue() + conn.get(remote=str(Path(directory) / filename), local=bytesio) + return bytesio.getvalue().decode(encoding="utf-8") From 08c058e2626d2190919b94f527c5a9143a861660 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Wed, 20 Mar 2024 15:45:09 -0700 Subject: [PATCH 12/17] MAINT: verbose logging flow for method selector to help me debug --- pmpsdb_client/cli/__init__.py | 5 ++++- pmpsdb_client/plc_data.py | 6 ++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/pmpsdb_client/cli/__init__.py b/pmpsdb_client/cli/__init__.py index 1d38b06..3ed958f 100644 --- a/pmpsdb_client/cli/__init__.py +++ b/pmpsdb_client/cli/__init__.py @@ -47,7 +47,10 @@ def _main(args: argparse.Namespace) -> int: print(version) return 0 if args.verbose: - logging.basicConfig(level=logging.DEBUG) + logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s %(levelname)s: %(name)s %(message)s", + ) else: logging.basicConfig( level=logging.INFO, diff --git a/pmpsdb_client/plc_data.py b/pmpsdb_client/plc_data.py index 6067918..b7226bc 100644 --- a/pmpsdb_client/plc_data.py +++ b/pmpsdb_client/plc_data.py @@ -63,21 +63,27 @@ def list_file_info( data_method = DataMethod.unk if data_method == DataMethod.ssh: + logger.debug("Using cached ssh method to list files for %s", hostname) return ssh_data.list_file_info(hostname=hostname, directory=directory) elif data_method == DataMethod.ftp: + logger.debug("Using cached ftp method to list files for %s", hostname) return ftp_data.list_file_info(hostname=hostname, directory=directory) elif data_method == DataMethod.unk: + logger.debug("Connection method unknown, check if ssh method works for %s", hostname) try: file_info = ssh_data.list_file_info(hostname=hostname, directory=directory) except Exception: + logger.debug("ssh failed, check if ftp method works for %s", hostname) try: file_info = ftp_data.list_file_info(hostname=hostname, directory=directory) except Exception: raise RuntimeError(f"Cannot get data method for {hostname}") else: + logger.debug("Cache %s method as ftp", hostname) plc_mapping[hostname] = DataMethod.ftp return file_info else: + logger.debug("Cache %s method as ssh", hostname) plc_mapping[hostname] = DataMethod.ssh return file_info else: From 35ab3624fee0b9f5b42f30ec37e65c175cce1b96 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Wed, 20 Mar 2024 15:52:33 -0700 Subject: [PATCH 13/17] FIX: set a low connect timeout to avoid hanging too long on ce plcs --- pmpsdb_client/ssh_data.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pmpsdb_client/ssh_data.py b/pmpsdb_client/ssh_data.py index 83151aa..c501c07 100644 --- a/pmpsdb_client/ssh_data.py +++ b/pmpsdb_client/ssh_data.py @@ -54,6 +54,7 @@ def ssh( host=hostname, user=user, config=Config(ssh_config=SSHConfig.from_text(SSH_CONFIG)), + connect_timeout=1, connect_kwargs={ "password": pw, "allow_agent": False, From ebb7395cb46a8618f010e634c270db0e9c21cb02 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Wed, 20 Mar 2024 16:25:48 -0700 Subject: [PATCH 14/17] ENH: fail faster for offline PLCs --- pmpsdb_client/ftp_data.py | 2 +- pmpsdb_client/plc_data.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/pmpsdb_client/ftp_data.py b/pmpsdb_client/ftp_data.py index dee8d1c..e7e1b49 100644 --- a/pmpsdb_client/ftp_data.py +++ b/pmpsdb_client/ftp_data.py @@ -49,7 +49,7 @@ def ftp(hostname: str, directory: str | None = None) -> Iterator[ftplib.FTP]: # Default directory directory = directory or DIRECTORY # Create without connecting - ftp_obj = ftplib.FTP(hostname, timeout=2.0) + ftp_obj = ftplib.FTP(hostname, timeout=1.0) # Beckhoff docs recommend active mode ftp_obj.set_pasv(False) # Best-effort login using default passwords diff --git a/pmpsdb_client/plc_data.py b/pmpsdb_client/plc_data.py index b7226bc..12633b9 100644 --- a/pmpsdb_client/plc_data.py +++ b/pmpsdb_client/plc_data.py @@ -77,6 +77,7 @@ def list_file_info( try: file_info = ftp_data.list_file_info(hostname=hostname, directory=directory) except Exception: + logger.debug("ftp failed too, maybe %s is offline or not set up", hostname) raise RuntimeError(f"Cannot get data method for {hostname}") else: logger.debug("Cache %s method as ftp", hostname) From 9abd4c057a1ec4289c1a3b243b1b327acc8831e0 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Fri, 22 Mar 2024 16:56:34 -0700 Subject: [PATCH 15/17] ENH: merge the file info classes, remove unused data types for now --- pmpsdb_client/cli/transfer_tools.py | 14 +-- pmpsdb_client/data_types.py | 154 +--------------------------- pmpsdb_client/ftp_data.py | 27 ++--- pmpsdb_client/gui.py | 5 +- pmpsdb_client/plc_data.py | 5 +- pmpsdb_client/ssh_data.py | 17 +-- 6 files changed, 32 insertions(+), 190 deletions(-) diff --git a/pmpsdb_client/cli/transfer_tools.py b/pmpsdb_client/cli/transfer_tools.py index 021d6b1..63d120d 100644 --- a/pmpsdb_client/cli/transfer_tools.py +++ b/pmpsdb_client/cli/transfer_tools.py @@ -21,16 +21,10 @@ def cli_list_files(args: argparse.Namespace) -> int: def _list_files(hostname: str) -> int: infos = list_file_info(hostname=hostname) for data in infos: - try: - print( - f'{data.filename} uploaded at {data.create_time.ctime()} ' - f'({data.size} bytes)' - ) - except AttributeError: - print( - f'{data.filename} uploaded at {data.last_changed.ctime()} ' - f'({data.size} bytes)' - ) + print( + f'{data.filename} uploaded at {data.last_changed.ctime()} ' + f'({data.size} bytes)' + ) if not infos: logger.warning('No files found') return 0 diff --git a/pmpsdb_client/data_types.py b/pmpsdb_client/data_types.py index 732afe6..9b7b746 100644 --- a/pmpsdb_client/data_types.py +++ b/pmpsdb_client/data_types.py @@ -10,148 +10,6 @@ """ import dataclasses import datetime -import enum -import typing - -from pcdscalc.pmps import get_bitmask_desc - -from .beam_class import summarize_beam_class_bitmask - -# "Raw" data types are the presentation of the data as in the json file. -# Most beam params are serialized as str, except for id (int) and special (bool) -RawStateBeamParams = dict[str, typing.Union[str, int, bool]] -# A bunch of raw beam params are collected by name for one device -RawDeviceBeamParams = dict[str, RawStateBeamParams] -# Each device on the plc is collected by name -RawPLCBeamParams = dict[str, RawDeviceBeamParams] -# The json file has the plc name at the top level -RawFileContents = dict[str, RawPLCBeamParams] - -T = typing.TypeVar("T") - - -class BPKeys(enum.Enum): - """ - Mapping from user-visible name to database key for beam parameters. - """ - id = "id" - name_ = "name" - beamline = "beamline" - nbc_range = "nBeamClassRange" - nev_range = "neVRange" - ntran = "nTran" - nrate = "nRate" - aperture_name = "ap_name" - y_gap = "ap_ygap" - y_center = "ap_ycenter" - x_gap = "ap_xgap" - x_center = "ap_xcenter" - damage_limit = "damage_limit" - pulse_energy = "pulse_energy" - notes = "notes" - special = "special" - - -@dataclasses.dataclass(frozen=True) -class BeamParameters: - """ - Struct representation of one state's beam parameters. - - The raw data has most of these as strings, but to make the struct - here we'll convert them to the most natural data types for - comparisons and add additional helpful fields for - human readability. - - The same names as used in the web application are used here, - but all lowercase and with spaces replaced with underscores. - """ - id: int - name: str - beamline: str - nbc_range: int - nbc_range_mask: str - nbc_range_desc: str - nev_range: int - nev_range_mask: str - nev_range_desc: str - ntran: float - nrate: int - aperture_name: str - y_gap: float - y_center: float - x_gap: float - x_center: float - damage_limit: str - pulse_energy: str - notes: str - special: bool - - @classmethod - def from_raw(cls: type[T], data: RawStateBeamParams) -> T: - return cls( - id=data[BPKeys.id], - name=data[BPKeys.name_], - beamline=data[BPKeys.beamline], - nbc_range=int(data[BPKeys.nbc_range]), - nbc_range_mask=data[BPKeys.nbc_range], - nbc_range_desc=summarize_beam_class_bitmask(int(data[BPKeys.nbc_range])), - nev_range=int(data[BPKeys.nev_range]), - nev_range_mask=data[BPKeys.nev_range], - nev_range_desc=get_bitmask_desc(data[BPKeys.nev_range]), - ntran=float(data[BPKeys.ntran]), - aperture_name=data[BPKeys.aperture_name], - y_gap=float(data[BPKeys.y_gap]), - y_center=float(data[BPKeys.y_center]), - x_gap=float(data[BPKeys.x_gap]), - x_center=float(data[BPKeys.x_center]), - damage_limit=data[BPKeys.damage_limit], - pulse_energy=data[BPKeys.pulse_energy], - notes=data[BPKeys.notes], - special=data[BPKeys.special], - ) - - -@dataclasses.dataclass(frozen=True) -class DeviceBeamParams: - """ - The beam parameters associated with one device. - - One device may have an arbitrary number of states. - """ - device_name: str - state_beam_params: dict[str, BeamParameters] - - @classmethod - def from_raw(cls: type[T], device_name: str, data: RawDeviceBeamParams) -> T: - return cls( - device_name=device_name, - state_beam_params={ - key: BeamParameters.from_raw(value) - for key, value in data.items() - } - ) - - -@dataclasses.dataclass(frozen=True) -class FileContents: - """ - The contents of one file. - - Each file is associated with exactly one plc hostname - and can contain an arbitrary number of devices. - """ - plc_name: str - device_beam_params: dict[str, DeviceBeamParams] - - @classmethod - def from_raw(cls: type[T], data: RawFileContents) -> T: - return cls( - plc_name=next(data.keys()), - device_beam_params={ - key: DeviceBeamParams.from_raw(key, value) - for key, value in data.items() - } - ) @dataclasses.dataclass(frozen=True) @@ -159,22 +17,12 @@ class FileInfo: """ Generalized file info. - The fields are based on *nix systems, but this will - also be used for windows systems too. + Only contains fields available to both ftp and ssh. This class has no constructor helpers here. Each data source will need to implement a unique constructor for this. """ filename: str - directory: str - server: str - is_directory: bool - permissions: str - links: int - user: str - group: str size: int last_changed: datetime.datetime - raw_contents: RawFileContents - contents: FileContents diff --git a/pmpsdb_client/ftp_data.py b/pmpsdb_client/ftp_data.py index e7e1b49..5db5677 100644 --- a/pmpsdb_client/ftp_data.py +++ b/pmpsdb_client/ftp_data.py @@ -13,7 +13,9 @@ import os from contextlib import contextmanager from dataclasses import dataclass -from typing import BinaryIO, Iterator +from typing import BinaryIO, Iterator, TypeVar + +from .data_types import FileInfo DEFAULT_PW = ( ('Administrator', '1'), @@ -22,6 +24,7 @@ DIRECTORY = 'pmps' logger = logging.getLogger(__name__) +T = TypeVar("T") @contextmanager @@ -108,20 +111,18 @@ def list_filenames( return ftp_obj.nlst() -@dataclass -class PLCFile: +@dataclass(frozen=True) +class FTPFileInfo(FileInfo): """ Information about a file on the PLC as learned through ftp. - In the context of pmps, the create_time is the last time we - updated the database export file. + Contains very few fields: ftp doesn't give us a lot of info. + See data_types.FileInfo for the field information. + This protocol is what limits the amount of fields we can assume + are available when we don't know the PLC's type. """ - filename: str - create_time: datetime.datetime - size: int - @classmethod - def from_list_line(cls, line: str) -> PLCFile: + def from_list_line(cls: type[T], line: str) -> T: """ Create a PLCFile from the output of the ftp LIST command. @@ -149,15 +150,15 @@ def from_list_line(cls, line: str) -> PLCFile: ) return cls( filename=filename, - create_time=full_datetime, size=int(size), + last_changed=full_datetime, ) def list_file_info( hostname: str, directory: str | None = None, -) -> list[PLCFile]: +) -> list[FTPFileInfo]: """ Gather pertinent information about all the files. @@ -179,7 +180,7 @@ def list_file_info( lines = [] with ftp(hostname=hostname, directory=directory) as ftp_obj: ftp_obj.retrlines('LIST', lines.append) - return [PLCFile.from_list_line(line) for line in lines] + return [FTPFileInfo.from_list_line(line) for line in lines] def upload_file( diff --git a/pmpsdb_client/gui.py b/pmpsdb_client/gui.py index 49e7ae5..bcb87cf 100644 --- a/pmpsdb_client/gui.py +++ b/pmpsdb_client/gui.py @@ -578,10 +578,7 @@ def update_plc_row(self, row: int, update_export: bool = True) -> None: filename = hostname_to_filename(hostname) for file_info in info: if file_info.filename == filename: - try: - text = file_info.create_time.ctime() - except AttributeError: - text = file_info.last_changed.ctime() + text = file_info.last_changed.ctime() break self.plc_table.item(row, PLCTableColumns.UPLOAD).setText(text) if update_export: diff --git a/pmpsdb_client/plc_data.py b/pmpsdb_client/plc_data.py index 12633b9..8fc9be3 100644 --- a/pmpsdb_client/plc_data.py +++ b/pmpsdb_client/plc_data.py @@ -13,6 +13,7 @@ from typing import Any from . import ftp_data, ssh_data +from .data_types import FileInfo logger = logging.getLogger(__name__) plc_mapping: dict[str, DataMethod] = {} @@ -39,7 +40,7 @@ def get_data_method(hostname: str, directory: str | None = None) -> DataMethod: def list_file_info( hostname: str, directory: str | None = None, -) -> list[ssh_data.FileInfo | ftp_data.PLCFile]: +) -> list[FileInfo]: """ Get information about the files that are currently saved on the PLC. @@ -54,7 +55,7 @@ def list_file_info( Returns ------- - filenames : list of FileInfo or PLCFile + filenames : list of FileInfo Information about all the files in the PLC's pmps folder. """ try: diff --git a/pmpsdb_client/ssh_data.py b/pmpsdb_client/ssh_data.py index c501c07..4ec3763 100644 --- a/pmpsdb_client/ssh_data.py +++ b/pmpsdb_client/ssh_data.py @@ -18,7 +18,7 @@ from fabric.config import Config from paramiko.config import SSHConfig -logger = logging.getLogger(__name__) +from .data_types import FileInfo DEFAULT_PW = ( ("ecs-user", "1"), @@ -34,6 +34,8 @@ T = TypeVar("T") +logger = logging.getLogger(__name__) + @contextmanager def ssh( @@ -84,18 +86,17 @@ def ssh( @dataclass(frozen=True) -class FileInfo: +class SSHFileInfo(FileInfo): """ File information from *nix systems. + + Adds extra detail not available through ftp. """ is_directory: bool permissions: str links: int user: str group: str - size: int - last_changed: datetime.datetime - filename: str @staticmethod def get_output_lines(conn: Connection) -> str: @@ -124,7 +125,7 @@ def from_output_line(cls: type[T], output: str) -> T: def list_file_info( hostname: str, directory: str | None = None, -) -> list[FileInfo]: +) -> list[SSHFileInfo]: """ Get information about the files that are currently saved on the PLC. @@ -143,8 +144,8 @@ def list_file_info( """ logger.debug("list_file_info(%s, %s)", hostname, directory) with ssh(hostname=hostname, directory=directory) as conn: - output = FileInfo.get_output_lines(conn) - return FileInfo.from_all_output_lines(output) + output = SSHFileInfo.get_output_lines(conn) + return SSHFileInfo.from_all_output_lines(output) def upload_filename( From 33ae1642d29e7c8afdfc7740de055a2270cbb046 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Fri, 22 Mar 2024 16:57:30 -0700 Subject: [PATCH 16/17] ENH: simplify/clarify error message to avoid confusion --- pmpsdb_client/plc_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pmpsdb_client/plc_data.py b/pmpsdb_client/plc_data.py index 8fc9be3..3c14ce2 100644 --- a/pmpsdb_client/plc_data.py +++ b/pmpsdb_client/plc_data.py @@ -79,7 +79,7 @@ def list_file_info( file_info = ftp_data.list_file_info(hostname=hostname, directory=directory) except Exception: logger.debug("ftp failed too, maybe %s is offline or not set up", hostname) - raise RuntimeError(f"Cannot get data method for {hostname}") + raise RuntimeError(f"Cannot connect to {hostname}") else: logger.debug("Cache %s method as ftp", hostname) plc_mapping[hostname] = DataMethod.ftp From b566325f5b4185206452f38ef507f46f32d82690 Mon Sep 17 00:00:00 2001 From: Zachary Lentz Date: Tue, 26 Mar 2024 09:06:44 -0700 Subject: [PATCH 17/17] DOC: remove copied ftp/ssh assumptions from plc_data docstrings. --- pmpsdb_client/plc_data.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/pmpsdb_client/plc_data.py b/pmpsdb_client/plc_data.py index 3c14ce2..e4118fa 100644 --- a/pmpsdb_client/plc_data.py +++ b/pmpsdb_client/plc_data.py @@ -97,7 +97,7 @@ def upload_filename( filename: str, dest_filename: str | None = None, directory: str | None = None, -): +) -> None: """ Open and upload a file on your filesystem to a PLC. @@ -110,7 +110,7 @@ def upload_filename( dest_filename : str, optional The name of the file on the PLC. If omitted, same as filename. directory : str, optional - The ssh subdirectory to read and write from + The subdirectory to read and write from A default directory is used if this argument is omitted, which depends on the PLC OS. """ @@ -151,8 +151,9 @@ def download_file_text( filename : str The name of the file on the PLC. directory : str, optional - The ssh subdirectory to read and write from - A default directory /home/ecs-user/pmpsdb is used if this argument is omitted. + The subdirectory to read and write from + A default directory is used if this argument is omitted, + which depends on the PLC OS. Returns ------- @@ -194,8 +195,9 @@ def download_file_json_dict( filename : str The name of the file on the PLC. directory : str, optional - The ftp subdirectory to read and write from - A default directory pmps is used if this argument is omitted. + The subdirectory to read and write from + A default directory is used if this argument is omitted, + which depends on the PLC OS. Returns ------- @@ -257,8 +259,9 @@ def compare_file( The filename as saved on the PLC. If omitted, the local_filename's basename will be used. directory : str, optional - The ftp subdirectory to read and write from - A default directory pmps is used if this argument is omitted. + The subdirectory to read and write from + A default directory is used if this argument is omitted, + which depends on the PLC OS. Returns -------