diff --git a/pykkn/build_multi_parameters.py b/pykkn/build_multi_parameters.py
index 8209dcae778f244549bf6f60c45cd745aa7b50be..b124a700af9fdd0e3d0c7af502c74d7041ee074d 100644
--- a/pykkn/build_multi_parameters.py
+++ b/pykkn/build_multi_parameters.py
@@ -1,4 +1,4 @@
-from parameter import Parameter
+from pykkn.parameter import Parameter
 
 
 def build_multi_parameters(dic: dict) -> list:
diff --git a/pykkn/dataset.py b/pykkn/dataset.py
index 2ea6b628b7f553bc75b23dc7efe05ba6ad6a0c13..e2095925e95e5835b1afd0d7f088edf1a2fb79c6 100644
--- a/pykkn/dataset.py
+++ b/pykkn/dataset.py
@@ -1,14 +1,11 @@
 import numpy as np
 
-from storage import Storage
+from pykkn.storage import Storage
 
 
 class Dataset(Storage):
     """An object of this class represents the dataset.
 
-    At present, there is no requirement for the type of data, it is simply stored in the dataset.data attribute
-    and further processing will be performed according to the different data types read in.
-
     Parameters
     ----------
     name : str
diff --git a/pykkn/dataset_image.py b/pykkn/dataset_image.py
index 1701ae40ada3a76e4a46585449d93af9a9e703d3..c63c66072d4e826de416c90e3e35dca8ec573e66 100644
--- a/pykkn/dataset_image.py
+++ b/pykkn/dataset_image.py
@@ -3,7 +3,7 @@ from pathlib import Path
 import numpy as np
 from PIL import Image
 
-from storage import Storage
+from pykkn.storage import Storage
 
 
 class Dataset_Image(Storage):
@@ -34,23 +34,8 @@ class Dataset_Image(Storage):
         self.attrs["kkn_DATASET_VERSION"] = "1.0"
         self.attrs["timestamp"] = "-"
 
-        """ The following attributes ensure that an image is correctly displayed in HDF5 view. \n
-        To enable HDFview to correctly display images, the string attributes must have a finite length to be correctly interpreted.\n
-        Further notice: These attributes does not need to to be changed \n
-
-        Attribute name = 'CLASS'             (Required),     Attribute value = 'IMAGE' (Fixed)
-            Explanation: This attribute identifies this data set as intended to be interpreted as an image that conforms to the specifications on this page. \n
-        Attribute name = 'IMAGE_VERSION'     (recommended),  Attribute value = '1.2' (Fixed)
-            Explanation: This attribute identifies the version number of this specification to which it conforms.  The current version number is "1.2". \n
-        Attribute name = 'IMAGESUBCLASS'     (Required),     Attribute value = 'IMAGE_TRUECOLOR' (Changeble, but highly recommended)
-            Explanation: The value of this attribute indicates the type of palette that should be used with the image. Other Attr values = "IMAGE_GRAYSCALE" or "IMAGE_BITMAP" or "IMAGE_INDEXED" \n
-        Attribute name = 'IMAGE_MINMAXRANGE' (recommended)   Attribute value = [0, 255] (Changeable, recommended)
-            Explanation: This attribute is an array of two numbers, of the same HDF5 datatype as the data. The first element is the minimum value of the data, and the second is the maximum. This is used for images with IMAGE_SUBCLASS="IMAGE_GRAYSCALE", "IMAGE_BITMAP" or "IMAGE_INDEXED".
-        Attribute name = 'INTERLACE_MODE'    (Optional),     Attribute value = 'INTERLACE_PIXEL' (Default value)
-            Explanation: For images with more than one component for each pixel, this optional attribute specifies the layout of the data. Other Attribute value = "INTERLACE_PLANE"
-        """
         self.attrs["CLASS"] = np.string_("IMAGE")
-        # self.attrs['IMAGE_MINMAXRANGE'] = np.array([0, 255]) # Better solution: [self.data.min(), self.data.max()], but it doenst work yet!
+        # self.attrs['IMAGE_MINMAXRANGE'] = np.array([0, 255])
         self.attrs["IMAGE_SUBCLASS"] = np.string_("IMAGE_TRUECOLOR")
         # self.attrs['IMAGE_VERSION'] = np.string_('1.2')
         # self.attrs['INTERLACE_MODE'] = np.string_('INTERLACE_PIXEL')
@@ -61,7 +46,7 @@ class Dataset_Image(Storage):
 
     @data.setter
     def data(self, path_image: str):
-        """the setter function to store a image file and some information in an HDF5 file
+        """the setter function to store a image file and some information
 
         Parameters
         ----------
diff --git a/pykkn/dataset_video.py b/pykkn/dataset_video.py
index 52b8c8d8378078428090b2ad942539b02d68a635..732073aff2987c0868111a1665aeee9883240e74 100644
--- a/pykkn/dataset_video.py
+++ b/pykkn/dataset_video.py
@@ -2,7 +2,7 @@ from pathlib import Path
 
 import cv2
 
-from storage import Storage
+from pykkn.storage import Storage
 
 
 class Dataset_Video(Storage):
@@ -16,8 +16,8 @@ class Dataset_Video(Storage):
     Examples
     --------
     dataset1 = Dataset_Video('video_dataset_1') \n
-    ataset1.data = r"C:/Users/Administrator/Videos/Captures/test_meeting_recording.mp4"   \n
-    dataset1.data = "C:/Users/Administrator/Videos/Captures/test_meeting_recording.mp4"    \n
+    ataset1.data = "test_meeting_recording.mp4"   \n
+    dataset1.data = "test_meeting_recording.mp4"    \n
     dataset1.attrs['timestamp'] = '2022-06-13 11:22:11' \n
 
     dataset1.set_storage_path('test/test_ut_video.h5')  \n
@@ -43,7 +43,7 @@ class Dataset_Video(Storage):
 
     @data.setter
     def data(self, path_video: str):
-        """the setter function to store a video file and some information in an HDF5 file
+        """the setter function to store a video file and some information
 
         Parameters
         ----------
@@ -56,26 +56,29 @@ class Dataset_Video(Storage):
         with open(path, "rb") as f:
             self._data = f.read()
 
-        # store the name and suffix of the video file, to convert this binary format into original format
+        # store the name and suffix of the video file
+        # to convert this binary format into original format
         self.attrs["file_name"] = path.name
         self.attrs["file_suffix"] = path.suffix.split(".")[-1]
 
-        # in order to read some attributes about this video, open it with opencv
+        # open it with opencv
         cap = cv2.VideoCapture(str(path))
 
-        self.attrs["video_fps"] = int(cap.get(cv2.CAP_PROP_FPS))
-        self.attrs["video_num_frames"] = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
+        count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
+        fps = int(cap.get(cv2.CAP_PROP_FPS))
+
+        self.attrs["video_fps"] = fps
+        self.attrs["video_num_frames"] = count
         self.attrs["video_width"] = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
         self.attrs["video_height"] = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
-        self.attrs[
-            "video_length"
-        ] = f"{int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) // int(cap.get(cv2.CAP_PROP_FPS))}s"
+
+        self.attrs["video_length"] = f"{count // fps}s"
 
         # delete the useless variable to save memory
         del cap
 
     def output_file(self, path_output):
-        """this function is designed to convert the binary format data into the original format
+        """convert the binary format data into the original format
 
         Parameters
         ----------
diff --git a/pykkn/instrument.py b/pykkn/instrument.py
index 5755dfe864bdbc0e2730ca753344e77850c9e26f..6758fef805fd2d6f0f5522c381b800b653838cb7 100644
--- a/pykkn/instrument.py
+++ b/pykkn/instrument.py
@@ -1,13 +1,15 @@
 from typing import List
 
-from model import Model
-from storage import Storage
+from pykkn.model import Model
+from pykkn.storage import Storage
 
 
 class Instrument(Storage):
-    """This class represents Datasets that are mapped from other datasets using a given model
+    """This class represents Datasets that are mapped from other datasets
+    using a given model
 
-    This can be used to convert the input of a sensor to its actual physical data using the given model,
+    This can be used to convert the input of a sensor to its physical data
+    using the given model
     for example polynomials or lookup tables.
 
     Parameters
diff --git a/pykkn/model.py b/pykkn/model.py
index 3822ed10667596a6285fed0bc49c178886bd4111..dfabd76965903c33718cfc09bf8eba424b97d1f2 100644
--- a/pykkn/model.py
+++ b/pykkn/model.py
@@ -1,11 +1,11 @@
 from typing import List
 
-from parameter import Parameter
-from storage import Storage
+from pykkn.parameter import Parameter
+from pykkn.storage import Storage
 
 
 class Model(Storage):
-    """This object is used to represent the model for data processing in the experiment
+    """This object is used to represent the model for data processing
 
     Parameters
     ----------
@@ -47,4 +47,4 @@ class Model(Storage):
             if isinstance(item, Parameter):
                 self.parameters.append(item)
             else:
-                raise TypeError("the element in list must be a Parameter object")
+                raise TypeError("the element in list must be Parameter object")
diff --git a/pykkn/parameter.py b/pykkn/parameter.py
index 078616a148b10ba1874dda77bf3bfc7f98ea6827..70be443b0318d3d9a05aff904c715ddae9dd032a 100644
--- a/pykkn/parameter.py
+++ b/pykkn/parameter.py
@@ -1,6 +1,6 @@
 import numpy as np
 
-from storage import Storage
+from pykkn.storage import Storage
 
 
 class Parameter(Storage):
diff --git a/pykkn/parse.py b/pykkn/parse.py
index 25307f57c53258377f12596a8e38f76dbe6a0f91..a70004626ce6618e8e68b79022ccb1ec51bb4539 100644
--- a/pykkn/parse.py
+++ b/pykkn/parse.py
@@ -4,14 +4,14 @@ from typing import List
 import h5py
 from PIL import Image
 
-from dataset import Dataset
-from dataset_image import Dataset_Image
-from dataset_video import Dataset_Video
-from instrument import Instrument
-from model import Model
-from parameter import Parameter
-from pipeline import Pipeline
-from run import Run
+from pykkn.dataset import Dataset
+from pykkn.dataset_image import Dataset_Image
+from pykkn.dataset_video import Dataset_Video
+from pykkn.instrument import Instrument
+from pykkn.model import Model
+from pykkn.parameter import Parameter
+from pykkn.pipeline import Pipeline
+from pykkn.run import Run
 
 
 def dataset_data_parse(obj: object, h5: h5py.File) -> object:
@@ -131,7 +131,7 @@ def create_instance(root: h5py.File, key: str) -> object:
             obj = video_data_parse(obj, root[key])
         else:
             raise TypeError(
-                f"Error Dataset Type: {root[key].attrs['kkn_DATASET_SUBCLASS']}"
+                f"Error Dataset Type:{root[key].attrs['kkn_DATASET_SUBCLASS']}"
             )
     else:
         raise TypeError(f"Error Class Type: {root[key].attrs['kkn_CLASS']}")
@@ -163,9 +163,11 @@ def recursive_create_instance(file: h5py.File) -> List[object]:
         if obj.attrs["kkn_CLASS"] not in ["PARAMETER", "DATASET"]:
             for subgroup_name in list(file[key].keys()):
                 if subgroup_name != "pipelines":
-                    obj.add(recursive_create_instance(file[key][subgroup_name]))
+                    obj.add(recursive_create_instance(
+                        file[key][subgroup_name]))
                 else:
-                    # here is the special situation for pipeline object because of its name xx/xx/xx
+                    # because of its name xx/xx/xx
+                    # here is the special situation for pipeline object
                     # TODO here still some space to improve
                     for a in list(file[key][subgroup_name].keys()):
                         aa = file[key][subgroup_name][a]
@@ -180,7 +182,8 @@ def recursive_create_instance(file: h5py.File) -> List[object]:
                                 ):
                                     pipe_obj.add(
                                         recursive_create_instance(
-                                            file[key][subgroup_name][f"{a}/{b}/{c}"][
+                                            file[key][subgroup_name][
+                                                f"{a}/{b}/{c}"][
                                                 sub_subgroup_name
                                             ]
                                         )
@@ -202,7 +205,8 @@ def pykkn_parse(path: str) -> object:
     Returns
     -------
     object
-        one of the component types, the type of return depends on the structure of json structure
+        one of the component types
+        the type of return depends on the structure of json structure
     """
     # open an HDF5 file
     file = h5py.File(path)
diff --git a/pykkn/parse_json.py b/pykkn/parse_json.py
index 47f5a1856b455183d39f498b772fbaeaaf79c1d9..33b2cef4b41d587777abb0876d2d4ccfbdb8e711 100644
--- a/pykkn/parse_json.py
+++ b/pykkn/parse_json.py
@@ -1,13 +1,13 @@
 import json
 
-from dataset import Dataset
-from dataset_image import Dataset_Image
-from dataset_video import Dataset_Video
-from instrument import Instrument
-from model import Model
-from parameter import Parameter
-from pipeline import Pipeline
-from run import Run
+from pykkn.dataset import Dataset
+from pykkn.dataset_image import Dataset_Image
+from pykkn.dataset_video import Dataset_Video
+from pykkn.instrument import Instrument
+from pykkn.model import Model
+from pykkn.parameter import Parameter
+from pykkn.pipeline import Pipeline
+from pykkn.run import Run
 
 
 def create_instance(dic: dict) -> object:
@@ -21,7 +21,8 @@ def create_instance(dic: dict) -> object:
     Returns
     -------
     object
-        one of the component types, the type of return depends on the structure of json structure
+        one of the component types
+        the type of return depends on the structure of json structure
 
     Raises
     ------
@@ -50,7 +51,8 @@ def create_instance(dic: dict) -> object:
         elif dic["kkn_DATASET_SUBCLASS"] == "VIDEO":
             obj = Dataset_Video(dic["name"])
         else:
-            raise TypeError(f"Error Dataset Type: {dic['kkn_DATASET_SUBCLASS']}")
+            raise TypeError(
+                f"Error Dataset Type: {dic['kkn_DATASET_SUBCLASS']}")
     else:
         raise TypeError(f"Error Class Type: {dic['kkn_CLASS']}")
 
@@ -58,7 +60,8 @@ def create_instance(dic: dict) -> object:
 
 
 def recursive_create_instance(file: dict) -> object:
-    """Recursively read json structure, create the corresponding object and assign the original property value to it
+    """Recursively read json structure
+    create the corresponding object and assign the original property value
 
     Parameters
     ----------
@@ -68,7 +71,8 @@ def recursive_create_instance(file: dict) -> object:
     Returns
     -------
     object
-        one of the component types, the type of return depends on the structure of json file
+        one of the component types
+        the type of return depends on the structure of json file
     """
     # create object
     obj = create_instance(file)
@@ -105,7 +109,8 @@ def pykkn_parse(path):
     Returns
     -------
     object
-        one of the component types, the type of return depends on the structure of json file
+        one of the component types
+        the type of return depends on the structure of json file
     """
     # open and read a json file
     with open(path, "r") as f:
diff --git a/pykkn/pipeline.py b/pykkn/pipeline.py
index aefd30fa3797a12589ba8b6e75204fe10c2aafdd..db6d955a8c887a8c6dcc9d0e5e656e8bde2ddc8b 100644
--- a/pykkn/pipeline.py
+++ b/pykkn/pipeline.py
@@ -1,8 +1,10 @@
 from typing import List
 
-from dataset import Dataset
-from instrument import Instrument
-from storage import Storage
+from pykkn.dataset import Dataset
+from pykkn.instrument import Instrument
+from pykkn.storage import Storage
+from pykkn.dataset_image import Dataset_Image
+from pykkn.dataset_video import Dataset_Video
 
 
 class Pipeline(Storage):
@@ -52,22 +54,25 @@ class Pipeline(Storage):
         Raises
         ------
         TypeError
-            raised when the element of list_obj is not the type of Instrument or Dataset
+            raised when the element is not the type of Instrument or Dataset
         AssertionError
             raised when list_obj is not a list or it is empty
         """
 
-        # Before extend the list of attributes, must be sure that there is actually a non-empty list
+        # Before extend the list of attributes
+        # must sure that there is a non-empty list
         assert isinstance(
             list_obj, list
-        ), "the input must be a list containing Instrument or Dataset object(s)"
+        ), "the input must be list containing Instrument or Dataset object(s)"
         assert len(list_obj) >= 1, "the list must be not empty"
 
-        # Assign it to different properties based on the type of elements in the list
+        # Assign it to different properties based on the type of elements
         for item in list_obj:
             if isinstance(item, Instrument):
                 self.instruments.append(item)
-            elif isinstance(item, Dataset):
+            elif (isinstance(item, Dataset)
+                  or isinstance(item, Dataset_Image)
+                  or isinstance(item, Dataset_Video)):
                 self.data.append(item)
             else:
                 raise TypeError(
diff --git a/pykkn/run.py b/pykkn/run.py
index 80698a2e941a62bdc5a6d58545af6ebd05990641..ae3b40347ed7d8b675b9917f3e75cc858e9fffae 100644
--- a/pykkn/run.py
+++ b/pykkn/run.py
@@ -1,8 +1,8 @@
 from typing import List
 
-from parameter import Parameter
-from pipeline import Pipeline
-from storage import Storage
+from pykkn.parameter import Parameter
+from pykkn.pipeline import Pipeline
+from pykkn.storage import Storage
 
 
 class Run(Storage):
@@ -63,5 +63,5 @@ class Run(Storage):
                 self.pipelines.append(item)
             else:
                 raise TypeError(
-                    "the element in list_obj must be a Parameter or Pipeline object"
+                    "the element must be a Parameter or Pipeline object"
                 )
diff --git a/pykkn/storage.py b/pykkn/storage.py
index 7886b18d0867d1bc268ba297d0e94e11c9cadf86..b9f651ffb3e8fd937787edac834b7e3074e2adea 100644
--- a/pykkn/storage.py
+++ b/pykkn/storage.py
@@ -9,7 +9,8 @@ import numpy as np
 
 
 class Storage:
-    """This class is an abstracted for all other classes, providing initialization function with a name
+    """This class is abstracted for all other classes
+    providing initialization function with a name
     and store function to generate an HDF5 file
 
     Parameters
@@ -47,7 +48,7 @@ class Storage:
         if root is None:
             root = h5py.File(self.storage_path, "w")
 
-        # create a dataset or group according to whether this structure has data
+        # create a dataset or group
         if self.is_dataset:
 
             # transfer the 'value' to data
@@ -58,7 +59,6 @@ class Storage:
 
             # because this class has no subclass, so create it as a dataset
             # special handle for video dataset
-            # it should be converted to numpy.void type, so that it can store in HDF5 file
             if (
                 "kkn_DATASET_SUBCLASS" in self.attrs.keys()
                 and self.attrs["kkn_DATASET_SUBCLASS"] == "VIDEO"
@@ -118,7 +118,7 @@ class Storage:
                 and self.attrs["kkn_DATASET_SUBCLASS"] == "VIDEO"
             )
 
-            # encode the data attribute when this is an instance of dataset_image of dataset_video
+            # encode the data attribute
             if condition1 or condition2:
                 root["data"] = self.encode(self.data)
             else:
@@ -130,13 +130,13 @@ class Storage:
                     self.data = self.data.tolist()
                 root["data"] = self.data
 
-            # decode the binary information in some attributes only for image dataset
-            # If you do not do that, a TypeError will occur in json saving process
+            # decode the binary information in some attributes
             if condition1:
                 if isinstance(self.attrs["CLASS"], bytes):
                     self.attrs["CLASS"] = self.attrs["CLASS"].decode()
                 if isinstance(self.attrs["IMAGE_SUBCLASS"], bytes):
-                    self.attrs["IMAGE_SUBCLASS"] = self.attrs["IMAGE_SUBCLASS"].decode()
+                    self.attrs["IMAGE_SUBCLASS"] = self.attrs[
+                        "IMAGE_SUBCLASS"].decode()
 
             for name, value in self.attrs.items():
                 if isinstance(value, np.integer):
@@ -177,7 +177,8 @@ class Storage:
                 self.storage_path = f"{self.name}.h5"
             else:
                 # replace the suffix to h5
-                self.storage_path = str(self.storage_path).replace(".json", ".h5")
+                self.storage_path = str(
+                    self.storage_path).replace(".json", ".h5")
 
             self.store_HDF5()
 
@@ -187,7 +188,8 @@ class Storage:
                 self.storage_path = f"{self.name}.h5"
             else:
                 # replace the suffix with json
-                self.storage_path = str(self.storage_path).replace(".h5", ".json")
+                self.storage_path = str(
+                    self.storage_path).replace(".h5", ".json")
 
             json_object = self.store_json()
             str_repr = json.dumps(json_object, indent=2)
@@ -202,7 +204,7 @@ class Storage:
                 f.write(str_repr)
 
     def __str__(self):
-        """rewrite the built-in method to modify the behaviors of print() for this instance
+        """rewrite the built-in method to modify the behaviors of print()
         to make the print result more readable
 
         before: \n
@@ -218,23 +220,24 @@ class Storage:
         return self.name
 
     def __repr__(self):
-        """rewrite the built-in method to modify the behaviors of print() for a list of instances
+        """rewrite the built-in method to modify the behaviors of print()
         to make the print result more readable
 
         before: \n
         >>>print(run1.pipelines) \n
-        [<pipeline.Pipeline object at 0x0000022AA45715A0>, <pipeline.Pipeline object at 0x0000022AA4gd1s0>]
+        [<pipeline.Pipeline object at 0x0000022AA45715A0>]
 
         after: \n
         >>>print(run1.pipelines) \n
-        ['pipe1', 'pipe2']
+        ['pipe1']
 
         here, the strings 'pipe1' and 'pipe2' are the names of this instance
         """
         return self.name
 
     def show(self):
-        """use the method to show the detailed information about this instance, for example all attributes and names.
+        """use the method to show the detailed information about this instance
+        for example all attributes and names.
         It should return a string like this:
 
         Examples
@@ -277,7 +280,8 @@ class Storage:
         Parameters:
         -----------
         dict : str
-            The Dictionary consists of Key Value pairs, with the keys being the names of the attribute
+            The Dictionary consists of Key Value pairs
+            with the keys being the names of the attribute
             and the value being the value assigned to the attribute
         """
         for key, value in dict.items():
@@ -309,7 +313,7 @@ class Storage:
         Returns:
         --------
         object: object
-            this is a instance of its original class, you can check its type with type()
+            this is a instance of its original class
         """
         object_encoded = object_string.encode()
         object_binary = base64.b64decode(object_encoded)