Skip to content

Commit

Permalink
Adding saving to irregular cache
Browse files Browse the repository at this point in the history
  • Loading branch information
localhost committed Feb 15, 2018
1 parent 4094cb5 commit b79b51a
Showing 1 changed file with 22 additions and 5 deletions.
27 changes: 22 additions & 5 deletions adlframework/caches/nparr_cache.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import numpy as np
import pdb
import pickle
import os
from adlframework.cache import Cache
from adlframework.utils import get_logger

Expand Down Expand Up @@ -86,8 +87,6 @@ def load(self):
Currently, only saves data.
To-Do: save labels too.
'''
import pickle
import os
dtf = self.cache_file+'_data'
lf = self.cache_file+'_label'
df = self.cache_file+'_dict'
Expand All @@ -113,9 +112,6 @@ def double_arr_size(self):
self.labels = self.new_labels

class IrregularNPArrCache(Cache):
import tables
import string
import random
'''
TO-DO: Written for 1-d. Generalize to N-D.
Reference: https://kastnerkyle.github.io/posts/using-pytables-for-larger-than-ram-data-processing/
Expand All @@ -124,6 +120,7 @@ def __init__(self, cache_file=None, compress=True):
self.data = []
self.labels = []
self.id_to_index = {}
self.cache_file = cache_file


''' Necessary classes '''
Expand All @@ -147,3 +144,23 @@ def retrieve(self, id_):
idx = self.id_to_index[id_]
return self.data[idx], self.labels[idx]

def load(self):
'''
Reads data, labels, and id_to_index as tuple from pickle
'''
if self.cache_file != None:
if os.path.exists(self.cache_file):
with open(self.cache_file, "wb") as f:
self.data, self.labels, self.id_to_index = pickle.load(f)
else:
logger.warn('Cache file specified doesn\'t exist. Will continue...')

def save(self):
'''
Save data, labels, and id_to_index as tuple in pickle
'''
if self.cache_file != None:
with open(self.cache_file, "wb") as f:
pickle.dump((self.data, self.labels, self.id_to_index), f)
else:
logger.warn('No cache file specified. Will lose cache on exit.')

0 comments on commit b79b51a

Please sign in to comment.