From 211f33afb977e1775e53a3971fa8854514201d2f Mon Sep 17 00:00:00 2001
From: Zhichao Zhang <zhichao.zhang@stud.tu-darmstadt.de>
Date: Mon, 5 Sep 2022 21:05:20 +0200
Subject: [PATCH] change import structure and short some lines to satisfy PEP8
 79 limit

---
 pykkn/build_multi_parameters.py |  2 +-
 pykkn/dataset.py                |  5 +----
 pykkn/dataset_image.py          | 21 +++----------------
 pykkn/dataset_video.py          | 27 ++++++++++++++-----------
 pykkn/instrument.py             | 10 +++++----
 pykkn/model.py                  |  8 ++++----
 pykkn/parameter.py              |  2 +-
 pykkn/parse.py                  | 30 +++++++++++++++------------
 pykkn/parse_json.py             | 31 ++++++++++++++++------------
 pykkn/pipeline.py               | 21 +++++++++++--------
 pykkn/run.py                    |  8 ++++----
 pykkn/storage.py                | 36 ++++++++++++++++++---------------
 12 files changed, 103 insertions(+), 98 deletions(-)

diff --git a/pykkn/build_multi_parameters.py b/pykkn/build_multi_parameters.py
index 8209dca..b124a70 100644
--- a/pykkn/build_multi_parameters.py
+++ b/pykkn/build_multi_parameters.py
@@ -1,4 +1,4 @@
-from parameter import Parameter
+from pykkn.parameter import Parameter
 
 
 def build_multi_parameters(dic: dict) -> list:
diff --git a/pykkn/dataset.py b/pykkn/dataset.py
index 2ea6b62..e209592 100644
--- a/pykkn/dataset.py
+++ b/pykkn/dataset.py
@@ -1,14 +1,11 @@
 import numpy as np
 
-from storage import Storage
+from pykkn.storage import Storage
 
 
 class Dataset(Storage):
     """An object of this class represents the dataset.
 
-    At present, there is no requirement for the type of data, it is simply stored in the dataset.data attribute
-    and further processing will be performed according to the different data types read in.
-
     Parameters
     ----------
     name : str
diff --git a/pykkn/dataset_image.py b/pykkn/dataset_image.py
index 1701ae4..c63c660 100644
--- a/pykkn/dataset_image.py
+++ b/pykkn/dataset_image.py
@@ -3,7 +3,7 @@ from pathlib import Path
 import numpy as np
 from PIL import Image
 
-from storage import Storage
+from pykkn.storage import Storage
 
 
 class Dataset_Image(Storage):
@@ -34,23 +34,8 @@ class Dataset_Image(Storage):
         self.attrs["kkn_DATASET_VERSION"] = "1.0"
         self.attrs["timestamp"] = "-"
 
-        """ The following attributes ensure that an image is correctly displayed in HDF5 view. \n
-        To enable HDFview to correctly display images, the string attributes must have a finite length to be correctly interpreted.\n
-        Further notice: These attributes does not need to to be changed \n
-
-        Attribute name = 'CLASS'             (Required),     Attribute value = 'IMAGE' (Fixed)
-            Explanation: This attribute identifies this data set as intended to be interpreted as an image that conforms to the specifications on this page. \n
-        Attribute name = 'IMAGE_VERSION'     (recommended),  Attribute value = '1.2' (Fixed)
-            Explanation: This attribute identifies the version number of this specification to which it conforms.  The current version number is "1.2". \n
-        Attribute name = 'IMAGESUBCLASS'     (Required),     Attribute value = 'IMAGE_TRUECOLOR' (Changeble, but highly recommended)
-            Explanation: The value of this attribute indicates the type of palette that should be used with the image. Other Attr values = "IMAGE_GRAYSCALE" or "IMAGE_BITMAP" or "IMAGE_INDEXED" \n
-        Attribute name = 'IMAGE_MINMAXRANGE' (recommended)   Attribute value = [0, 255] (Changeable, recommended)
-            Explanation: This attribute is an array of two numbers, of the same HDF5 datatype as the data. The first element is the minimum value of the data, and the second is the maximum. This is used for images with IMAGE_SUBCLASS="IMAGE_GRAYSCALE", "IMAGE_BITMAP" or "IMAGE_INDEXED".
-        Attribute name = 'INTERLACE_MODE'    (Optional),     Attribute value = 'INTERLACE_PIXEL' (Default value)
-            Explanation: For images with more than one component for each pixel, this optional attribute specifies the layout of the data. Other Attribute value = "INTERLACE_PLANE"
-        """
         self.attrs["CLASS"] = np.string_("IMAGE")
-        # self.attrs['IMAGE_MINMAXRANGE'] = np.array([0, 255]) # Better solution: [self.data.min(), self.data.max()], but it doenst work yet!
+        # self.attrs['IMAGE_MINMAXRANGE'] = np.array([0, 255])
         self.attrs["IMAGE_SUBCLASS"] = np.string_("IMAGE_TRUECOLOR")
         # self.attrs['IMAGE_VERSION'] = np.string_('1.2')
         # self.attrs['INTERLACE_MODE'] = np.string_('INTERLACE_PIXEL')
@@ -61,7 +46,7 @@ class Dataset_Image(Storage):
 
     @data.setter
     def data(self, path_image: str):
-        """the setter function to store a image file and some information in an HDF5 file
+        """the setter function to store a image file and some information
 
         Parameters
         ----------
diff --git a/pykkn/dataset_video.py b/pykkn/dataset_video.py
index 52b8c8d..732073a 100644
--- a/pykkn/dataset_video.py
+++ b/pykkn/dataset_video.py
@@ -2,7 +2,7 @@ from pathlib import Path
 
 import cv2
 
-from storage import Storage
+from pykkn.storage import Storage
 
 
 class Dataset_Video(Storage):
@@ -16,8 +16,8 @@ class Dataset_Video(Storage):
     Examples
     --------
     dataset1 = Dataset_Video('video_dataset_1') \n
-    ataset1.data = r"C:/Users/Administrator/Videos/Captures/test_meeting_recording.mp4"   \n
-    dataset1.data = "C:/Users/Administrator/Videos/Captures/test_meeting_recording.mp4"    \n
+    ataset1.data = "test_meeting_recording.mp4"   \n
+    dataset1.data = "test_meeting_recording.mp4"    \n
     dataset1.attrs['timestamp'] = '2022-06-13 11:22:11' \n
 
     dataset1.set_storage_path('test/test_ut_video.h5')  \n
@@ -43,7 +43,7 @@ class Dataset_Video(Storage):
 
     @data.setter
     def data(self, path_video: str):
-        """the setter function to store a video file and some information in an HDF5 file
+        """the setter function to store a video file and some information
 
         Parameters
         ----------
@@ -56,26 +56,29 @@ class Dataset_Video(Storage):
         with open(path, "rb") as f:
             self._data = f.read()
 
-        # store the name and suffix of the video file, to convert this binary format into original format
+        # store the name and suffix of the video file
+        # to convert this binary format into original format
         self.attrs["file_name"] = path.name
         self.attrs["file_suffix"] = path.suffix.split(".")[-1]
 
-        # in order to read some attributes about this video, open it with opencv
+        # open it with opencv
         cap = cv2.VideoCapture(str(path))
 
-        self.attrs["video_fps"] = int(cap.get(cv2.CAP_PROP_FPS))
-        self.attrs["video_num_frames"] = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
+        count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
+        fps = int(cap.get(cv2.CAP_PROP_FPS))
+
+        self.attrs["video_fps"] = fps
+        self.attrs["video_num_frames"] = count
         self.attrs["video_width"] = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
         self.attrs["video_height"] = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
-        self.attrs[
-            "video_length"
-        ] = f"{int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) // int(cap.get(cv2.CAP_PROP_FPS))}s"
+
+        self.attrs["video_length"] = f"{count // fps}s"
 
         # delete the useless variable to save memory
         del cap
 
     def output_file(self, path_output):
-        """this function is designed to convert the binary format data into the original format
+        """convert the binary format data into the original format
 
         Parameters
         ----------
diff --git a/pykkn/instrument.py b/pykkn/instrument.py
index 5755dfe..6758fef 100644
--- a/pykkn/instrument.py
+++ b/pykkn/instrument.py
@@ -1,13 +1,15 @@
 from typing import List
 
-from model import Model
-from storage import Storage
+from pykkn.model import Model
+from pykkn.storage import Storage
 
 
 class Instrument(Storage):
-    """This class represents Datasets that are mapped from other datasets using a given model
+    """This class represents Datasets that are mapped from other datasets
+    using a given model
 
-    This can be used to convert the input of a sensor to its actual physical data using the given model,
+    This can be used to convert the input of a sensor to its physical data
+    using the given model
     for example polynomials or lookup tables.
 
     Parameters
diff --git a/pykkn/model.py b/pykkn/model.py
index 3822ed1..dfabd76 100644
--- a/pykkn/model.py
+++ b/pykkn/model.py
@@ -1,11 +1,11 @@
 from typing import List
 
-from parameter import Parameter
-from storage import Storage
+from pykkn.parameter import Parameter
+from pykkn.storage import Storage
 
 
 class Model(Storage):
-    """This object is used to represent the model for data processing in the experiment
+    """This object is used to represent the model for data processing
 
     Parameters
     ----------
@@ -47,4 +47,4 @@ class Model(Storage):
             if isinstance(item, Parameter):
                 self.parameters.append(item)
             else:
-                raise TypeError("the element in list must be a Parameter object")
+                raise TypeError("the element in list must be Parameter object")
diff --git a/pykkn/parameter.py b/pykkn/parameter.py
index 078616a..70be443 100644
--- a/pykkn/parameter.py
+++ b/pykkn/parameter.py
@@ -1,6 +1,6 @@
 import numpy as np
 
-from storage import Storage
+from pykkn.storage import Storage
 
 
 class Parameter(Storage):
diff --git a/pykkn/parse.py b/pykkn/parse.py
index 25307f5..a700046 100644
--- a/pykkn/parse.py
+++ b/pykkn/parse.py
@@ -4,14 +4,14 @@ from typing import List
 import h5py
 from PIL import Image
 
-from dataset import Dataset
-from dataset_image import Dataset_Image
-from dataset_video import Dataset_Video
-from instrument import Instrument
-from model import Model
-from parameter import Parameter
-from pipeline import Pipeline
-from run import Run
+from pykkn.dataset import Dataset
+from pykkn.dataset_image import Dataset_Image
+from pykkn.dataset_video import Dataset_Video
+from pykkn.instrument import Instrument
+from pykkn.model import Model
+from pykkn.parameter import Parameter
+from pykkn.pipeline import Pipeline
+from pykkn.run import Run
 
 
 def dataset_data_parse(obj: object, h5: h5py.File) -> object:
@@ -131,7 +131,7 @@ def create_instance(root: h5py.File, key: str) -> object:
             obj = video_data_parse(obj, root[key])
         else:
             raise TypeError(
-                f"Error Dataset Type: {root[key].attrs['kkn_DATASET_SUBCLASS']}"
+                f"Error Dataset Type:{root[key].attrs['kkn_DATASET_SUBCLASS']}"
             )
     else:
         raise TypeError(f"Error Class Type: {root[key].attrs['kkn_CLASS']}")
@@ -163,9 +163,11 @@ def recursive_create_instance(file: h5py.File) -> List[object]:
         if obj.attrs["kkn_CLASS"] not in ["PARAMETER", "DATASET"]:
             for subgroup_name in list(file[key].keys()):
                 if subgroup_name != "pipelines":
-                    obj.add(recursive_create_instance(file[key][subgroup_name]))
+                    obj.add(recursive_create_instance(
+                        file[key][subgroup_name]))
                 else:
-                    # here is the special situation for pipeline object because of its name xx/xx/xx
+                    # because of its name xx/xx/xx
+                    # here is the special situation for pipeline object
                     # TODO here still some space to improve
                     for a in list(file[key][subgroup_name].keys()):
                         aa = file[key][subgroup_name][a]
@@ -180,7 +182,8 @@ def recursive_create_instance(file: h5py.File) -> List[object]:
                                 ):
                                     pipe_obj.add(
                                         recursive_create_instance(
-                                            file[key][subgroup_name][f"{a}/{b}/{c}"][
+                                            file[key][subgroup_name][
+                                                f"{a}/{b}/{c}"][
                                                 sub_subgroup_name
                                             ]
                                         )
@@ -202,7 +205,8 @@ def pykkn_parse(path: str) -> object:
     Returns
     -------
     object
-        one of the component types, the type of return depends on the structure of json structure
+        one of the component types
+        the type of return depends on the structure of json structure
     """
     # open an HDF5 file
     file = h5py.File(path)
diff --git a/pykkn/parse_json.py b/pykkn/parse_json.py
index 47f5a18..33b2cef 100644
--- a/pykkn/parse_json.py
+++ b/pykkn/parse_json.py
@@ -1,13 +1,13 @@
 import json
 
-from dataset import Dataset
-from dataset_image import Dataset_Image
-from dataset_video import Dataset_Video
-from instrument import Instrument
-from model import Model
-from parameter import Parameter
-from pipeline import Pipeline
-from run import Run
+from pykkn.dataset import Dataset
+from pykkn.dataset_image import Dataset_Image
+from pykkn.dataset_video import Dataset_Video
+from pykkn.instrument import Instrument
+from pykkn.model import Model
+from pykkn.parameter import Parameter
+from pykkn.pipeline import Pipeline
+from pykkn.run import Run
 
 
 def create_instance(dic: dict) -> object:
@@ -21,7 +21,8 @@ def create_instance(dic: dict) -> object:
     Returns
     -------
     object
-        one of the component types, the type of return depends on the structure of json structure
+        one of the component types
+        the type of return depends on the structure of json structure
 
     Raises
     ------
@@ -50,7 +51,8 @@ def create_instance(dic: dict) -> object:
         elif dic["kkn_DATASET_SUBCLASS"] == "VIDEO":
             obj = Dataset_Video(dic["name"])
         else:
-            raise TypeError(f"Error Dataset Type: {dic['kkn_DATASET_SUBCLASS']}")
+            raise TypeError(
+                f"Error Dataset Type: {dic['kkn_DATASET_SUBCLASS']}")
     else:
         raise TypeError(f"Error Class Type: {dic['kkn_CLASS']}")
 
@@ -58,7 +60,8 @@ def create_instance(dic: dict) -> object:
 
 
 def recursive_create_instance(file: dict) -> object:
-    """Recursively read json structure, create the corresponding object and assign the original property value to it
+    """Recursively read json structure
+    create the corresponding object and assign the original property value
 
     Parameters
     ----------
@@ -68,7 +71,8 @@ def recursive_create_instance(file: dict) -> object:
     Returns
     -------
     object
-        one of the component types, the type of return depends on the structure of json file
+        one of the component types
+        the type of return depends on the structure of json file
     """
     # create object
     obj = create_instance(file)
@@ -105,7 +109,8 @@ def pykkn_parse(path):
     Returns
     -------
     object
-        one of the component types, the type of return depends on the structure of json file
+        one of the component types
+        the type of return depends on the structure of json file
     """
     # open and read a json file
     with open(path, "r") as f:
diff --git a/pykkn/pipeline.py b/pykkn/pipeline.py
index aefd30f..db6d955 100644
--- a/pykkn/pipeline.py
+++ b/pykkn/pipeline.py
@@ -1,8 +1,10 @@
 from typing import List
 
-from dataset import Dataset
-from instrument import Instrument
-from storage import Storage
+from pykkn.dataset import Dataset
+from pykkn.instrument import Instrument
+from pykkn.storage import Storage
+from pykkn.dataset_image import Dataset_Image
+from pykkn.dataset_video import Dataset_Video
 
 
 class Pipeline(Storage):
@@ -52,22 +54,25 @@ class Pipeline(Storage):
         Raises
         ------
         TypeError
-            raised when the element of list_obj is not the type of Instrument or Dataset
+            raised when the element is not the type of Instrument or Dataset
         AssertionError
             raised when list_obj is not a list or it is empty
         """
 
-        # Before extend the list of attributes, must be sure that there is actually a non-empty list
+        # Before extend the list of attributes
+        # must sure that there is a non-empty list
         assert isinstance(
             list_obj, list
-        ), "the input must be a list containing Instrument or Dataset object(s)"
+        ), "the input must be list containing Instrument or Dataset object(s)"
         assert len(list_obj) >= 1, "the list must be not empty"
 
-        # Assign it to different properties based on the type of elements in the list
+        # Assign it to different properties based on the type of elements
         for item in list_obj:
             if isinstance(item, Instrument):
                 self.instruments.append(item)
-            elif isinstance(item, Dataset):
+            elif (isinstance(item, Dataset)
+                  or isinstance(item, Dataset_Image)
+                  or isinstance(item, Dataset_Video)):
                 self.data.append(item)
             else:
                 raise TypeError(
diff --git a/pykkn/run.py b/pykkn/run.py
index 80698a2..ae3b403 100644
--- a/pykkn/run.py
+++ b/pykkn/run.py
@@ -1,8 +1,8 @@
 from typing import List
 
-from parameter import Parameter
-from pipeline import Pipeline
-from storage import Storage
+from pykkn.parameter import Parameter
+from pykkn.pipeline import Pipeline
+from pykkn.storage import Storage
 
 
 class Run(Storage):
@@ -63,5 +63,5 @@ class Run(Storage):
                 self.pipelines.append(item)
             else:
                 raise TypeError(
-                    "the element in list_obj must be a Parameter or Pipeline object"
+                    "the element must be a Parameter or Pipeline object"
                 )
diff --git a/pykkn/storage.py b/pykkn/storage.py
index 7886b18..b9f651f 100644
--- a/pykkn/storage.py
+++ b/pykkn/storage.py
@@ -9,7 +9,8 @@ import numpy as np
 
 
 class Storage:
-    """This class is an abstracted for all other classes, providing initialization function with a name
+    """This class is abstracted for all other classes
+    providing initialization function with a name
     and store function to generate an HDF5 file
 
     Parameters
@@ -47,7 +48,7 @@ class Storage:
         if root is None:
             root = h5py.File(self.storage_path, "w")
 
-        # create a dataset or group according to whether this structure has data
+        # create a dataset or group
         if self.is_dataset:
 
             # transfer the 'value' to data
@@ -58,7 +59,6 @@ class Storage:
 
             # because this class has no subclass, so create it as a dataset
             # special handle for video dataset
-            # it should be converted to numpy.void type, so that it can store in HDF5 file
             if (
                 "kkn_DATASET_SUBCLASS" in self.attrs.keys()
                 and self.attrs["kkn_DATASET_SUBCLASS"] == "VIDEO"
@@ -118,7 +118,7 @@ class Storage:
                 and self.attrs["kkn_DATASET_SUBCLASS"] == "VIDEO"
             )
 
-            # encode the data attribute when this is an instance of dataset_image of dataset_video
+            # encode the data attribute
             if condition1 or condition2:
                 root["data"] = self.encode(self.data)
             else:
@@ -130,13 +130,13 @@ class Storage:
                     self.data = self.data.tolist()
                 root["data"] = self.data
 
-            # decode the binary information in some attributes only for image dataset
-            # If you do not do that, a TypeError will occur in json saving process
+            # decode the binary information in some attributes
             if condition1:
                 if isinstance(self.attrs["CLASS"], bytes):
                     self.attrs["CLASS"] = self.attrs["CLASS"].decode()
                 if isinstance(self.attrs["IMAGE_SUBCLASS"], bytes):
-                    self.attrs["IMAGE_SUBCLASS"] = self.attrs["IMAGE_SUBCLASS"].decode()
+                    self.attrs["IMAGE_SUBCLASS"] = self.attrs[
+                        "IMAGE_SUBCLASS"].decode()
 
             for name, value in self.attrs.items():
                 if isinstance(value, np.integer):
@@ -177,7 +177,8 @@ class Storage:
                 self.storage_path = f"{self.name}.h5"
             else:
                 # replace the suffix to h5
-                self.storage_path = str(self.storage_path).replace(".json", ".h5")
+                self.storage_path = str(
+                    self.storage_path).replace(".json", ".h5")
 
             self.store_HDF5()
 
@@ -187,7 +188,8 @@ class Storage:
                 self.storage_path = f"{self.name}.h5"
             else:
                 # replace the suffix with json
-                self.storage_path = str(self.storage_path).replace(".h5", ".json")
+                self.storage_path = str(
+                    self.storage_path).replace(".h5", ".json")
 
             json_object = self.store_json()
             str_repr = json.dumps(json_object, indent=2)
@@ -202,7 +204,7 @@ class Storage:
                 f.write(str_repr)
 
     def __str__(self):
-        """rewrite the built-in method to modify the behaviors of print() for this instance
+        """rewrite the built-in method to modify the behaviors of print()
         to make the print result more readable
 
         before: \n
@@ -218,23 +220,24 @@ class Storage:
         return self.name
 
     def __repr__(self):
-        """rewrite the built-in method to modify the behaviors of print() for a list of instances
+        """rewrite the built-in method to modify the behaviors of print()
         to make the print result more readable
 
         before: \n
         >>>print(run1.pipelines) \n
-        [<pipeline.Pipeline object at 0x0000022AA45715A0>, <pipeline.Pipeline object at 0x0000022AA4gd1s0>]
+        [<pipeline.Pipeline object at 0x0000022AA45715A0>]
 
         after: \n
         >>>print(run1.pipelines) \n
-        ['pipe1', 'pipe2']
+        ['pipe1']
 
         here, the strings 'pipe1' and 'pipe2' are the names of this instance
         """
         return self.name
 
     def show(self):
-        """use the method to show the detailed information about this instance, for example all attributes and names.
+        """use the method to show the detailed information about this instance
+        for example all attributes and names.
         It should return a string like this:
 
         Examples
@@ -277,7 +280,8 @@ class Storage:
         Parameters:
         -----------
         dict : str
-            The Dictionary consists of Key Value pairs, with the keys being the names of the attribute
+            The Dictionary consists of Key Value pairs
+            with the keys being the names of the attribute
             and the value being the value assigned to the attribute
         """
         for key, value in dict.items():
@@ -309,7 +313,7 @@ class Storage:
         Returns:
         --------
         object: object
-            this is a instance of its original class, you can check its type with type()
+            this is a instance of its original class
         """
         object_encoded = object_string.encode()
         object_binary = base64.b64decode(object_encoded)
-- 
GitLab