Skip to content

Commit

Permalink
Improve linting ruleset, upgrade ruff to 0.3.0
Browse files Browse the repository at this point in the history
  • Loading branch information
dineshpinto committed Mar 1, 2024
1 parent d33fc3e commit b046998
Show file tree
Hide file tree
Showing 11 changed files with 729 additions and 673 deletions.
1,270 changes: 647 additions & 623 deletions poetry.lock

Large diffs are not rendered by default.

22 changes: 17 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,21 +14,21 @@ packages = [{ include = "qudi_hira_analysis" }]
[tool.poetry.dependencies]
python = ">=3.10,<3.13"
lmfit = "^1.1.0"
matplotlib = "^3.6.2"
matplotlib = "^3.8.2"
numpy = "^1.26.0"
pandas = "^2.0.0"
xlrd = "^2.0.1"
openpyxl = "^3.0.10"
tqdm = "^4.64.1"
pyspm = "^0.5.0"
pyspm = "^0.6.0"

[tool.poetry.group.dev]
optional = true

[tool.poetry.group.dev.dependencies]
jupyterlab = "^3.6.2"
notebook = "^6.5.4"
ruff = "^0.1.0"
ruff = "^0.3.0"
ruff-lsp = "^0.0.39"
seaborn = "^0.13.0"

Expand All @@ -49,12 +49,21 @@ requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

[tool.ruff]
target-version = "py310"

[tool.ruff.lint]
extend-select = [
"E", # pycodestyle-error
"W", # pycodestyle-warning
"F", # pyflakes
"N", # pep8-naming
"B", # flake8-bugbear
"I", # isort
"ASYNC", # flake8-async
"BLE", # flake8-blind-except
"A", # flake8-builtins
"C4", # flake8-comprehensions
"ERA", # flake8-eradicate/eradicate
"I", # isort
"N", # pep8-naming
"PIE", # flake8-pie
"PGH", # pygrep
"RUF", # ruff checks
Expand All @@ -63,3 +72,6 @@ extend-select = [
"TID", # flake8-tidy-imports
"UP", # pyupgrade
]

[tool.ruff.format]
docstring-code-format = true
71 changes: 44 additions & 27 deletions qudi_hira_analysis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,13 @@
from qudi_hira_analysis import DataHandler
dh = DataHandler(
data_folder=Path("C:/Data"), # Path to the data folder
figure_folder=Path("C:/QudiHiraAnalysis"), # Path to the figure folder
measurement_folder=Path("20230101_NV1") # Name of the measurement folder
)
# Output:
# qudi_hira_analysis.data_handler :: INFO :: Data folder path is C:/Data/20230101_NV1
# qudi_hira_analysis.data_handler :: INFO :: Figure folder path is C:/QudiHiraAnalysis/20230101_NV1
data_folder=Path("C:/Data"), # Path to the data folder
figure_folder=Path("C:/QudiHiraAnalysis"), # Path to the figure folder
measurement_folder=Path("20230101_NV1"), # Name of the measurement folder
)
# Output:
# qudi_hira_analysis.data_handler :: INFO :: Data folder path is C:/Data/20230101_NV1
# qudi_hira_analysis.data_handler :: INFO :: Figure folder path is C:/QudiHiraAnalysis/20230101_NV1
```
### Loading data
Expand Down Expand Up @@ -90,12 +90,17 @@
```python
x_fit, y_fit, result = dh.fit(x="Controlled variable(Hz)", y="Signal",
fit_function=dh.fit_function.lorentziandouble,
data=odmr.data)
x_fit, y_fit, result = dh.fit(
x="Controlled variable(Hz)",
y="Signal",
fit_function=dh.fit_function.lorentziandouble,
data=odmr.data,
)
# Plot the data and the fit
ax = sns.scatterplot(x="Controlled variable(Hz)", y="Signal", data=odmr.data, label="Data")
ax = sns.scatterplot(
x="Controlled variable(Hz)", y="Signal", data=odmr.data, label="Data"
)
sns.lineplot(x=x_fit, y=y_fit, ax=ax, label="Fit")
```
Expand Down Expand Up @@ -143,8 +148,9 @@
```python
# Save the figure to the figure folder specified earlier
dh.save_figures(filepath=Path("odmr"), fig=ax.get_figure(),
only_pdf=True, bbox_inches="tight")
dh.save_figures(
filepath=Path("odmr"), fig=ax.get_figure(), only_pdf=True, bbox_inches="tight"
)
# The figure is saved to C:/QudiHiraAnalysis/20230101_NV1/odmr.pdf
```
Expand All @@ -167,11 +173,13 @@
image = np.zeros((pixels, pixels))
for idx, odmr in enumerate(odmr_measurements.values()):
row, col = odmr.xy_position
if len(odmr.fit_model.params) > 6:
# Calculate double Lorentzian splitting
image[row, col] = np.abs(odmr.fit_model.best_values["l1_center"]
- odmr.fit_model.best_values["l0_center"])
row, col = odmr.xy_position
if len(odmr.fit_model.params) > 6:
# Calculate double Lorentzian splitting
image[row, col] = np.abs(
odmr.fit_model.best_values["l1_center"]
- odmr.fit_model.best_values["l0_center"]
)
map = sns.heatmap(image, cbar_kws={"label": "Delta E (MHz)"})
Expand Down Expand Up @@ -249,9 +257,12 @@
# Plot the data
sns.lineplot(data=autocorrelation.data, x="Time (ns)", y="g2(t) norm", ax=ax)
# Fit the data using the antibunching function
fit_x, fit_y, result = dh.fit(x="Time (ns)", y="g2(t) norm",
data=autocorrelation.data,
fit_function=dh.fit_function.antibunching)
fit_x, fit_y, result = dh.fit(
x="Time (ns)",
y="g2(t) norm",
data=autocorrelation.data,
fit_function=dh.fit_function.antibunching,
)
# Plot the fit
sns.lineplot(x=fit_x, y=fit_y, ax=ax, color="C1")
Expand All @@ -269,9 +280,12 @@
for odmr in odmr_measurements.values():
sns.scatterplot(data=odmr.data, x="Controlled variable(Hz)", y="Signal", ax=ax)
fit_x, fit_y, result = dh.fit(x="Controlled variable(Hz)", y="Signal",
data=odmr.data,
fit_function=dh.fit_function.lorentziandouble)
fit_x, fit_y, result = dh.fit(
x="Controlled variable(Hz)",
y="Signal",
data=odmr.data,
fit_function=dh.fit_function.lorentziandouble,
)
sns.lineplot(x=fit_x, y=fit_y, ax=ax, color="C1")
dh.save_figures(filepath="odmr_variation", fig=fig)
Expand All @@ -287,9 +301,12 @@
for rabi in rabi_measurements.values():
sns.scatterplot(data=rabi.data, x="Controlled variable(s)", y="Signal", ax=ax)
fit_x, fit_y, result = dh.fit(x="Controlled variable(s)", y="Signal",
data=rabi.data,
fit_function=dh.fit_function.sineexponentialdecay)
fit_x, fit_y, result = dh.fit(
x="Controlled variable(s)",
y="Signal",
data=rabi.data,
fit_function=dh.fit_function.sineexponentialdecay,
)
sns.lineplot(x=fit_x, y=fit_y, ax=ax, color="C1")
dh.save_figures(filepath="rabi_variation", fig=fig)
Expand Down
4 changes: 2 additions & 2 deletions qudi_hira_analysis/_fitmethods/gaussianlikemethods.py
Original file line number Diff line number Diff line change
Expand Up @@ -908,7 +908,7 @@ def estimate_twoDgaussian(self, x_axis, y_axis, data, params):
for var in parameters:
# FIXME: Why don't you check earlier?
# FIXME: Check for 1D array, 2D
if not isinstance(var, (frozenset, list, set, tuple, np.ndarray)):
if not isinstance(var, frozenset | list | set | tuple | np.ndarray):
self.log.error("Given parameter is not an array.")
amplitude = 0.0
center_x = 0.0
Expand Down Expand Up @@ -989,7 +989,7 @@ def estimate_twoDgaussian_MLE(self, x_axis, y_axis, data, params):
for var in parameters:
# FIXME: Why don't you check earlier?
# FIXME: Check for 1D array, 2D
if not isinstance(var, (frozenset, list, set, tuple, np.ndarray)):
if not isinstance(var, frozenset | list | set | tuple | np.ndarray):
self.log.error("Given parameter is not an array.")
amplitude = 0.0
center_x = 0.0
Expand Down
2 changes: 1 addition & 1 deletion qudi_hira_analysis/_fitmethods/generalmethods.py
Original file line number Diff line number Diff line change
Expand Up @@ -586,7 +586,7 @@ def _check_1D_input(self, x_axis, data, params):
error = 0
parameters = [x_axis, data]
for var in parameters:
if not isinstance(var, (frozenset, list, set, tuple, np.ndarray)):
if not isinstance(var, frozenset | list | set | tuple | np.ndarray):
self.log.error("Given parameter is no array.")
error = -1
elif len(np.shape(var)) != 1:
Expand Down
2 changes: 1 addition & 1 deletion qudi_hira_analysis/_qudi_fit_logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def __init__(self):
f"Adding fit methods path: {self._additional_methods_import_path}"
)

if isinstance(self._additional_methods_import_path, (list, tuple, set)):
if isinstance(self._additional_methods_import_path, list | tuple | set):
self.log.info(
"Adding fit methods path list: {}".format(
self._additional_methods_import_path
Expand Down
4 changes: 2 additions & 2 deletions qudi_hira_analysis/analysis_logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def fit(
dims: str = "1d"

if data is None:
if isinstance(x, (pd.Series, pd.Index)):
if isinstance(x, pd.Series | pd.Index):
x: np.ndarray = x.to_numpy()
if isinstance(y, pd.Series):
y: np.ndarray = y.to_numpy()
Expand Down Expand Up @@ -493,7 +493,7 @@ def fit_raster_odmr(
x = next(iter(odmr_measurements.values())).data["Freq(MHz)"].to_numpy()
x_fit = np.linspace(start=x[0], stop=x[-1], num=int(len(x) * 2))

for odmr, res in zip(odmr_measurements.values(), model_results):
for odmr, res in zip(odmr_measurements.values(), model_results, strict=False):
if len(res.params) == 6:
# Evaluate a single Lorentzian
y_fit = model1.eval(x=x_fit, params=res.params)
Expand Down
6 changes: 4 additions & 2 deletions qudi_hira_analysis/data_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import datetime
import logging
from pathlib import Path
from typing import TYPE_CHECKING, Callable
from typing import TYPE_CHECKING

from qudi_hira_analysis.analysis_logic import AnalysisLogic
from qudi_hira_analysis.io_handler import IOHandler
Expand All @@ -16,6 +16,8 @@
)

if TYPE_CHECKING:
from collections.abc import Callable

import numpy as np
import pandas as pd
import pySPM
Expand Down Expand Up @@ -159,7 +161,7 @@ def __tree(self, dir_path: Path, prefix: str = ""):
contents = list(dir_path.iterdir())
# contents each get pointers that are ├── with a final └── :
pointers = [tee] * (len(contents) - 1) + [last]
for pointer, path in zip(pointers, contents):
for pointer, path in zip(pointers, contents, strict=False):
yield prefix + pointer + path.name
if path.is_dir(): # extend the prefix and recurse:
extension = branch if pointer == tee else space
Expand Down
8 changes: 4 additions & 4 deletions qudi_hira_analysis/io_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
import inspect
import itertools
import pickle
from collections.abc import Callable
from functools import wraps
from pathlib import Path
from typing import Callable, Optional

import matplotlib.pyplot as plt
import numpy as np
Expand All @@ -19,8 +19,8 @@ class IOHandler:

def __init__(
self,
base_read_path: Optional[Path] = None,
base_write_path: Optional[Path] = None,
base_read_path: Path | None = None,
base_write_path: Path | None = None,
):
super().__init__()
self.base_read_path = base_read_path
Expand Down Expand Up @@ -130,7 +130,7 @@ def read_qudi_parameters(self, filepath: Path) -> dict:
timestamp_str = "".join(line.split(":")[1:]).strip()
datetime_str = datetime.datetime.strptime(
timestamp_str, "%d.%m.%Y %Hh%Mmin%Ss"
).replace(tzinfo=datetime.timezone.utc)
).replace(tzinfo=datetime.UTC)
params[label] = datetime_str
except Exception as _:
pass
Expand Down
11 changes: 6 additions & 5 deletions qudi_hira_analysis/measurement_dataclass.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,14 @@
import logging
import re
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Callable
from typing import TYPE_CHECKING

import pandas as pd
from PIL import Image

if TYPE_CHECKING:
import datetime
from collections.abc import Callable
from pathlib import Path

import lmfit
Expand Down Expand Up @@ -200,19 +201,19 @@ def get_param_from_filename(self, unit: str) -> float | None:
Examples:
filename = "rabi_12dBm"
>>> get_param_from_filename(unit='dBm')
>>> get_param_from_filename(unit="dBm")
12.0
filename = "pixelscan_minus100nm"
>>> get_param_from_filename(unit='dBm')
>>> get_param_from_filename(unit="dBm")
-100.0
filename = "rabi_2e-6mbar"
>>> get_param_from_filename(unit='mbar')
>>> get_param_from_filename(unit="mbar")
2e-6
filename = "rabi_2point3uW"
>>> get_param_from_filename(unit='uW')
>>> get_param_from_filename(unit="uW")
2.5
"""
filename = self.filename
Expand Down
2 changes: 1 addition & 1 deletion tools/remove_empty_data_folders.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def dir_path(path: str) -> str:
- use a set to store all extensions
- if the length of the set > 2, keep folder
- else delete folder
Examples
--------
python remove_empty_data_folders.py --path "C:\\Data"
Expand Down

0 comments on commit b046998

Please sign in to comment.