Skip to content

Commit

Permalink
printv and plot_marker added
Browse files Browse the repository at this point in the history
  • Loading branch information
ARSadri committed Sep 14, 2024
1 parent 81e6d3c commit 5673933
Show file tree
Hide file tree
Showing 11 changed files with 389 additions and 94 deletions.
13 changes: 10 additions & 3 deletions HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -332,10 +332,17 @@ History
* printvar try 1.
* pyrunner try 1. is added

0.12.12 (2024-08-29)
0.12.13 (2024-08-29)
-------------------
* critical error removed

0.12.12 (2024-08-30)
0.12.14 (2024-08-30)
-------------------
* removed dependency on dill
* removed dependency on dill

0.12.15 (2024-09-12)
-------------------
* added contour_overlayed
* moved loopprocessor to multiprocessor
* added printv
* added plot_marker
14 changes: 6 additions & 8 deletions lognflow/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,18 @@

__author__ = 'Alireza Sadri'
__email__ = '[email protected]'
__version__ = '0.12.14'
__version__ = '0.12.15'

from .lognflow import lognflow
from .lognflow import lognflow, getLogger
from .logviewer import logviewer
from .printprogress import printprogress
from .plt_utils import (
plt_colorbar, plot_gaussian_gradient, plt_imshow, plt_violinplot,
plt_imhist, transform3D_viewer)
plt_colorbar, plt_imshow, plt_violinplot, plt_imhist, transform3D_viewer)
from .utils import (
select_directory, select_file, repr_raw, replace_all,
is_builtin_collection, text_to_collection, stack_to_frame,
stacks_to_frames, ssh_system, printvar, Pyrunner)
from .multiprocessor import multiprocessor
from .loopprocessor import loopprocessor
getLogger = lognflow
stacks_to_frames, ssh_system, printv, Pyrunner)
from .multiprocessor import multiprocessor, loopprocessor

def basicConfig(*args, **kwargs):
...
91 changes: 51 additions & 40 deletions lognflow/lognflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,12 +139,13 @@ class lognflow:
:param time_tag:
File names can carry time_tags in time.time() format or indices. This
is pretty much the most fundamental contribution of lognflow beside
carrying the folders and files paths around. By default all file names
will stop having time tag if you set it here to False. Otherwise,
all file names will have time tag unless given argument at each logging
function sets it to False. It can also be a string. options are 'index'
or 'time_and_index'. If you use indexer, instead of time
stamps, it will simple put an index that counts up after each logging.
carrying the folders and files paths around. By default it is True but
all file names will not have time tags if you set it to False or
if you don't mention it and create logs with same name, Otherwise,
you can give time_tag argument for all logger functions, all of which
set it to the default here by default. It can also be a string. options
are 'index' or 'time_and_index'. If you use index, instead of time
stamps, it will simply put an index that counts up after each logging.
:type time_tag: bool
"""

Expand All @@ -159,10 +160,10 @@ def __init__(self,
main_log_name : str = 'log',
log_flush_period : int = 10):
self._init_time = time.time()
self.time_tag = time_tag
self.log_dir_prefix = log_dir_prefix
self.log_dir_suffix = log_dir_suffix

self.time_tag = time_tag
frame = inspect.currentframe()
args, _, _, values = inspect.getargvalues(frame)
if 'time_tag' in values and values['time_tag'] is not None:
Expand Down Expand Up @@ -216,8 +217,9 @@ def __init__(self,
self.log_dir_str = str(self.log_dir.absolute())
self.enabled = True
self.counted_vars = {}
self.param_name_set = set()

#all depricated
#all depricated and will be removed in a few revisions
self.log_text = self.text
self.log_text_flush = self.text_flush
self.log_var = self.record
Expand All @@ -233,10 +235,10 @@ def __init__(self,
self.log_images_in_pdf = self.images_to_pdf
self.log_plt = self.savefig
self.log_torch_dict = self.save_torch
self.get_torch_dict = self.load_torch
self.log_single = self.save
self.get_single = self.load
self.get_var = self.get_record
self.get_torch_dict = self.load_torch

def setLevel(self, level = 'info.txt'):
self.log_name = level
Expand Down Expand Up @@ -439,7 +441,7 @@ def _param_dir_name_suffix(self, parameter_name: str, suffix: str = None):
else:
param_name = parameter_name_split[-1]
param_dir = '/'.join(parameter_name_split[:-1])

if(suffix == 'mat'):
if(len(param_name) == 0):
param_dir_split = param_dir.split('/')
Expand Down Expand Up @@ -469,7 +471,12 @@ def _param_dir_name_suffix(self, parameter_name: str, suffix: str = None):
def _get_fpath(self, param_dir: pathlib_Path, param_name: str = None,
suffix: str = None, time_tag: bool = None) -> pathlib_Path:

time_tag = self.time_tag if (time_tag is None) else time_tag
if time_tag is None:
if self._time_tag_provided:
time_tag = self.time_tag
elif param_name in self.param_name_set:
time_tag = True

assert isinstance(time_tag, (bool, str)), \
'Argument time_tag must be a boolean or a string.'

Expand All @@ -494,9 +501,9 @@ def _get_fpath(self, param_dir: pathlib_Path, param_name: str = None,

if(not _param_dir.is_dir()):
_param_dir.mkdir(parents = True, exist_ok = True)
if self.logged is None:
self.logged = logviewer(self.log_dir, self)

self.param_name_set.add(param_name)

if(param_name is not None):
if(len(param_name) > 0):
if(index_tag):
Expand Down Expand Up @@ -527,7 +534,7 @@ def _get_dirnamesuffix(self, param_dir, param_name, suffix):
log_dirnamesuffix = log_dirnamesuffix + '.' + suffix
return log_dirnamesuffix

def _log_text_handler(self, log_name: str,
def _text_handler(self, log_name: str,
log_size_limit: int = int(1e+7),
time_tag: bool = None,
log_flush_period = None,
Expand Down Expand Up @@ -556,7 +563,7 @@ def text_flush(self, log_name = None, flush = False, suffix = None):
""" Flush the text logs
Writing text to open(file, 'a') does not constantly happen on HDD.
There is an OS buffer in between. This funciton should be called
regularly. lognflow calls it once in a while when log_text is
regularly. lognflow calls it once in a while when text is
called multiple times. but use needs to also call it once in a
while.
In later versions, a timer will be used to call it automatically.
Expand Down Expand Up @@ -650,7 +657,7 @@ def text(self,
param_dir, param_name, suffix)

if ( (not (log_dirnamesuffix in self._loggers_dict)) or new_file):
self._log_text_handler(log_dirnamesuffix,
self._text_handler(log_dirnamesuffix,
log_size_limit = log_size_limit,
time_tag = time_tag,
suffix = suffix)
Expand Down Expand Up @@ -685,10 +692,10 @@ def text(self,
log_size += len(_logger_el)
curr_textinlog.log_size += log_size

self.log_text_flush(log_dirnamesuffix, flush)
self.text_flush(log_dirnamesuffix, flush)

if(log_size >= curr_textinlog.log_size_limit):
self._log_text_handler(
self._text_handler(
log_dirnamesuffix,
log_size_limit = curr_textinlog.log_size_limit,
time_tag = curr_textinlog.time_tag,
Expand Down Expand Up @@ -754,7 +761,7 @@ def record(self, parameter_name: str, parameter_value,
curr_index = 0

if(curr_index >= log_counter_limit):
self.log_var_flush(log_dirnamesuffix)
self.record_flush(log_dirnamesuffix)
file_start_time = self.time_stamp
curr_index = 0

Expand Down Expand Up @@ -1024,7 +1031,7 @@ def plot(self, parameter_name: str,
ax.set_title(title)

if not return_figure:
fpath = self.log_plt(
fpath = self.savefig(
parameter_name = parameter_name,
image_format=image_format, dpi=dpi,
time_tag = time_tag)
Expand Down Expand Up @@ -1075,7 +1082,7 @@ def hist(self, parameter_name: str,
if title is not None:
ax.set_title(title)
if not return_figure:
fpath = self.log_plt(
fpath = self.savefig(
parameter_name = parameter_name,
image_format=image_format, dpi=dpi,
time_tag = time_tag)
Expand Down Expand Up @@ -1120,7 +1127,7 @@ def scatter3(self, parameter_name: str,
if data_N_by_3.shape[1] != 3:
data_N_by_3 = data_N_by_3.T
self.text(
None, 'lognflow.log_scatter3> input dataset is transposed.')
None, 'lognflow.scatter3> input dataset is transposed.')
fig_ax_opt_stack = plt_scatter3(data_N_by_3, title = title,
elev_list = elev_list, azim_list = azim_list,
make_animation = make_animation, **kwargs)
Expand All @@ -1131,7 +1138,7 @@ def scatter3(self, parameter_name: str,
dpi=dpi, time_tag = time_tag,
**log_animation_kwargs)
else:
return self.log_plt(
return self.savefig(
parameter_name = parameter_name,
image_format = image_format, dpi=dpi,
time_tag = time_tag)
Expand Down Expand Up @@ -1164,7 +1171,7 @@ def surface(self, parameter_name: str,
ax.set_title(title)

if not return_figure:
fpath = self.log_plt(
fpath = self.savefig(
parameter_name = parameter_name,
image_format=image_format, dpi=dpi,
time_tag = time_tag)
Expand Down Expand Up @@ -1201,7 +1208,7 @@ def hexbin(self, parameter_name: str, parameter_value,
ax.set_title(title)

if not return_figure:
fpath = self.log_plt(
fpath = self.savefig(
parameter_name = parameter_name,
image_format=image_format, dpi=dpi,
time_tag = time_tag)
Expand Down Expand Up @@ -1276,7 +1283,7 @@ def imshow(self,
**kwargs)

if not return_figure:
fpath = self.log_plt(
fpath = self.savefig(
parameter_name = parameter_name,
image_format=image_format, dpi=dpi,
time_tag = time_tag)
Expand Down Expand Up @@ -1347,7 +1354,7 @@ def imshow_subplots(self,
**kwargs)

if not return_figure:
fpath = self.log_plt(
fpath = self.savefig(
parameter_name = parameter_name,
image_format=image_format, dpi=dpi,
time_tag = time_tag)
Expand Down Expand Up @@ -1428,7 +1435,7 @@ def imshow_series(self,
transpose = transpose)

if not return_figure:
fpath = self.log_plt(
fpath = self.savefig(
parameter_name = parameter_name,
image_format=image_format, dpi=dpi,
time_tag = time_tag)
Expand Down Expand Up @@ -1555,7 +1562,7 @@ def log_confusion_matrix(self,
plt.title(title)
plt.colorbar(im, fraction=0.046, pad=0.04)

fpath = self.log_plt(
fpath = self.savefig(
parameter_name = parameter_name,
image_format=image_format, dpi=dpi,
time_tag = time_tag)
Expand Down Expand Up @@ -1595,18 +1602,20 @@ def log_animation(
try:
ani.save(fpath, dpi = dpi,
writer = matplotlib_animation.PillowWriter(fps=int(1000/interval)))
plt.close()
return fpath
except Exception as e:
plt.close()
print('lognflow: cannot save the animation. Here is the unraised error:')
print(e)
print('-'*79)

def flush_all(self):
if not self.enabled: return
for log_name in list(self._loggers_dict):
self.log_text_flush(log_name, flush = True)
self.text_flush(log_name, flush = True)
for parameter_name in list(self._vars_dict):
self.log_var_flush(parameter_name)
self.record_flush(parameter_name)

def savez(self, parameter_name: str,
parameter_value,
Expand Down Expand Up @@ -1660,7 +1669,7 @@ def load_torch(self, name):
for fpath in flist:
if fpath.is_file():
vname = self.name_from_file(fpath)
out = self.logged.get_single(vname)
out = self.logged.load(vname)
return torch.from_numpy(out).cuda()
if fpath.is_dir():
fpath_str = str(fpath.absolute())
Expand Down Expand Up @@ -1845,7 +1854,7 @@ def _load(self, var_name, file_index = None,
logged.
.. note::
when reading a MATLAB file, the output is a dictionary.
Also when reading a npz except if it is made by log_var
Also when reading a npz except if it is made by record
"""
self.assert_log_dir()
assert file_index == int(file_index), \
Expand Down Expand Up @@ -1878,7 +1887,7 @@ def _load(self, var_name, file_index = None,
return (read_func(var_path), var_path)
if(var_path.suffix == '.npz'):
buf = np.load(var_path)
try: #check if it is made by log_var
try: #check if it is made by record
assert len(buf.files) == 2
time_array = buf['time']
data_array = buf['data']
Expand Down Expand Up @@ -1941,17 +1950,17 @@ def load(self, var_name, file_index = -1,
a function that takes the Posix path and returns data
.. note::
when reading a MATLAB file, the output is a dictionary.
Also when reading a npz except if it is made by log_var
Also when reading a npz except if it is made by record
"""
self.assert_log_dir()
get_single_data, fpath = self._load(
loaded_data, fpath = self._load(
var_name = var_name, file_index = file_index, suffix = suffix,
read_func = read_func, verbose = verbose,
return_collection = return_collection)
if return_fpath:
return get_single_data, fpath
return loaded_data, fpath
else:
return get_single_data
return loaded_data

def get_stack_from_files(self,
var_name = None, flist = [], suffix = None, read_func = None,
Expand Down Expand Up @@ -2044,7 +2053,7 @@ def get_stack_from_names(self,
images_flist = self.get_flist(name)
if images_flist:
for file_index in range(len(images_flist)):
data, fpath = self.get_single(
data, fpath = self.load(
name, file_index = file_index,
read_func = read_func, return_fpath = True)
if data is not None:
Expand Down Expand Up @@ -2108,3 +2117,5 @@ def __repr__(self):

def __bool__(self):
return self.log_dir.is_dir()

getLogger = lognflow
1 change: 1 addition & 0 deletions lognflow/loopprocessor.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#depricated file
from multiprocessing import Process, Queue, cpu_count, Event
from numpy import __name__ as np___name__
from numpy import array as np_array
Expand Down
Loading

0 comments on commit 5673933

Please sign in to comment.