From 0703c67388193a234e7453eefb20c4d9b85a384a Mon Sep 17 00:00:00 2001
From: Simon Bjurek <simbj106@student.liu.se>
Date: Wed, 26 Feb 2025 12:12:57 +0100
Subject: [PATCH 1/3] moved logger from scheduler gui to main folder

---
 b_asic/{scheduler_gui => }/logger.py | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 rename b_asic/{scheduler_gui => }/logger.py (100%)

diff --git a/b_asic/scheduler_gui/logger.py b/b_asic/logger.py
similarity index 100%
rename from b_asic/scheduler_gui/logger.py
rename to b_asic/logger.py
-- 
GitLab


From c8bb844e8e244e4de47f5274c80fabebfd886a5e Mon Sep 17 00:00:00 2001
From: Simon Bjurek <simbj106@student.liu.se>
Date: Wed, 26 Feb 2025 12:48:35 +0100
Subject: [PATCH 2/3] removed some old qt5 references and other small fixes

---
 .gitlab-ci.yml                          |   2 +-
 b_asic/gui_utils/mpl_window.py          |   4 +-
 b_asic/logger.py                        |   4 +-
 b_asic/schedule.py                      |  31 +++++-
 b_asic/scheduler.py                     |   1 +
 b_asic/scheduler_gui/compile.py         |  12 +-
 b_asic/scheduler_gui/main_window.py     |   2 +-
 b_asic/scheduler_gui/scheduler_event.py |   8 +-
 b_asic/scheduler_gui/scheduler_item.py  |   4 +-
 docs_sphinx/conf.py                     |   1 -
 docs_sphinx/index.rst                   |   2 +-
 test/unit/test_schedule.py              | 141 ++++++++++++++++--------
 12 files changed, 145 insertions(+), 67 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 4a822db9..7accae47 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -4,7 +4,7 @@ stages:
 
 before_script:
   - apt-get update --yes
-  # - apt-get install --yes build-essential cmake graphviz python3-pyqt5 xvfb xdg-utils lcov
+  # - apt-get install --yes build-essential cmake graphviz xvfb xdg-utils lcov
   - apt-get install --yes graphviz python3-pyqt5 xvfb xdg-utils
   - apt-get install -y libxcb-cursor-dev
   - python -m pip install --upgrade pip
diff --git a/b_asic/gui_utils/mpl_window.py b/b_asic/gui_utils/mpl_window.py
index 266d4ae7..1a6db678 100644
--- a/b_asic/gui_utils/mpl_window.py
+++ b/b_asic/gui_utils/mpl_window.py
@@ -1,7 +1,7 @@
 """MPLWindow is a dialog that provides an Axes for plotting in."""
 
-from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
-from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
+from matplotlib.backends.backend_qtagg import FigureCanvasQTAgg as FigureCanvas
+from matplotlib.backends.backend_qtagg import NavigationToolbar2QT as NavigationToolbar
 from matplotlib.figure import Figure
 from qtpy.QtCore import Qt
 from qtpy.QtWidgets import QDialog, QVBoxLayout
diff --git a/b_asic/logger.py b/b_asic/logger.py
index e6615ed0..606ac6f9 100644
--- a/b_asic/logger.py
+++ b/b_asic/logger.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python3
 """
-B-ASIC Scheduler-gui Logger Module.
+B-ASIC Logger Module.
 
 Contains a logger that logs to the console and a file using levels. It is based
 on the :mod:`logging` module and has predefined levels of logging.
@@ -8,7 +8,7 @@ on the :mod:`logging` module and has predefined levels of logging.
 Usage:
 ------
 
-    >>> import b_asic.scheduler_gui.logger as logger
+    >>> import b_asic.logger as logger
     >>> log = logger.getLogger()
     >>> log.info('This is a log post with level INFO')
 
diff --git a/b_asic/schedule.py b/b_asic/schedule.py
index 181c426e..543e10af 100644
--- a/b_asic/schedule.py
+++ b/b_asic/schedule.py
@@ -117,10 +117,13 @@ class Schedule:
             self._start_times = start_times
             self._laps.update(laps)
             self._remove_delays_no_laps()
+
         max_end_time = self.get_max_end_time()
         if not self._schedule_time:
             self._schedule_time = max_end_time
 
+        self._validate_schedule()
+
     def __str__(self) -> str:
         """Return a string representation of this Schedule."""
         res: List[Tuple[GraphID, int, int, int]] = [
@@ -155,6 +158,32 @@ class Schedule:
 
         return string_io.getvalue()
 
+    def _validate_schedule(self) -> None:
+        if self._schedule_time is None:
+            raise ValueError("Schedule without set scheduling time detected.")
+        if not isinstance(self._schedule_time, int):
+            raise ValueError("Schedule with non-integer scheduling time detected.")
+
+        ops = {op.graph_id for op in self._sfg.operations}
+        missing_elems = ops - set(self._start_times)
+        extra_elems = set(self._start_times) - ops
+        if missing_elems:
+            raise ValueError(
+                f"Missing operations detected in start_times: {missing_elems}"
+            )
+        if extra_elems:
+            raise ValueError(f"Extra operations detected in start_times: {extra_elems}")
+
+        for graph_id, time in self._start_times.items():
+            if self.forward_slack(graph_id) < 0 or self.backward_slack(graph_id) < 0:
+                raise ValueError(
+                    f"Negative slack detected in Schedule for operation: {graph_id}."
+                )
+            if time > self._schedule_time:
+                raise ValueError(
+                    f"Start time larger than scheduling time detected in Schedule for operation {graph_id}"
+                )
+
     def start_time_of_operation(self, graph_id: GraphID) -> int:
         """
         Return the start time of the operation with the specified by *graph_id*.
@@ -1122,7 +1151,7 @@ class Schedule:
             color="black",
         )
 
-    def _reset_y_locations(self) -> None:
+    def reset_y_locations(self) -> None:
         """Reset all the y-locations in the schedule to None"""
         self._y_locations = defaultdict(_y_locations_default)
 
diff --git a/b_asic/scheduler.py b/b_asic/scheduler.py
index 74a892c0..87708e75 100644
--- a/b_asic/scheduler.py
+++ b/b_asic/scheduler.py
@@ -172,6 +172,7 @@ class ListScheduler(Scheduler, ABC):
         cyclic: Optional[bool] = False,
     ) -> None:
         super()
+
         if max_resources is not None:
             if not isinstance(max_resources, dict):
                 raise ValueError("max_resources must be a dictionary.")
diff --git a/b_asic/scheduler_gui/compile.py b/b_asic/scheduler_gui/compile.py
index 2dc7b719..fe97bf54 100644
--- a/b_asic/scheduler_gui/compile.py
+++ b/b_asic/scheduler_gui/compile.py
@@ -18,7 +18,7 @@ from qtpy import uic
 from setuptools_scm import get_version
 
 try:
-    import b_asic.scheduler_gui.logger as logger
+    import b_asic.logger as logger
 
     log = logger.getLogger()
     sys.excepthook = logger.handle_exceptions
@@ -37,18 +37,16 @@ def _check_filenames(*filenames: str) -> None:
 
 def _check_qt_version() -> None:
     """
-    Check if PySide2, PyQt5, PySide6, or PyQt6 is installed.
+    Check if PySide6 or PyQt6 is installed.
 
     Otherwise, raise AssertionError exception.
     """
-    assert (
-        uic.PYSIDE2 or uic.PYQT5 or uic.PYSIDE6 or uic.PYQT6
-    ), "Python QT bindings must be installed"
+    assert uic.PYSIDE6 or uic.PYQT6, "Python QT bindings must be installed"
 
 
 def replace_qt_bindings(filename: str) -> None:
     """
-    Replace qt-binding API in *filename* from PySide2/6 or PyQt5/6 to qtpy.
+    Replace qt-binding API in *filename* from PySide6 or PyQt6 to qtpy.
 
     Parameters
     ----------
@@ -57,8 +55,6 @@ def replace_qt_bindings(filename: str) -> None:
     """
     with open(f"{filename}") as file:
         filedata = file.read()
-        filedata = filedata.replace("from PyQt5", "from qtpy")
-        filedata = filedata.replace("from PySide2", "from qtpy")
         filedata = filedata.replace("from PyQt6", "from qtpy")
         filedata = filedata.replace("from PySide6", "from qtpy")
     with open(f"{filename}", "w") as file:
diff --git a/b_asic/scheduler_gui/main_window.py b/b_asic/scheduler_gui/main_window.py
index c15be267..217afa8a 100644
--- a/b_asic/scheduler_gui/main_window.py
+++ b/b_asic/scheduler_gui/main_window.py
@@ -56,7 +56,7 @@ from qtpy.QtWidgets import (
 )
 
 # B-ASIC
-import b_asic.scheduler_gui.logger as logger
+import b_asic.logger as logger
 from b_asic._version import __version__
 from b_asic.graph_component import GraphComponent, GraphID
 from b_asic.gui_utils.about_window import AboutWindow
diff --git a/b_asic/scheduler_gui/scheduler_event.py b/b_asic/scheduler_gui/scheduler_event.py
index 89d4a384..617e14cb 100644
--- a/b_asic/scheduler_gui/scheduler_event.py
+++ b/b_asic/scheduler_gui/scheduler_event.py
@@ -19,7 +19,7 @@ from b_asic.scheduler_gui.operation_item import OperationItem
 from b_asic.scheduler_gui.timeline_item import TimelineItem
 
 
-class SchedulerEvent:  # PyQt5
+class SchedulerEvent:
     """
     Event filter and handlers for SchedulerItem.
 
@@ -29,7 +29,7 @@ class SchedulerEvent:  # PyQt5
         The parent QGraphicsItem.
     """
 
-    class Signals(QObject):  # PyQt5
+    class Signals(QObject):
         """A class representing signals."""
 
         component_selected = Signal(str)
@@ -43,11 +43,11 @@ class SchedulerEvent:  # PyQt5
     _axes: Optional[AxesItem]
     _current_pos: QPointF
     _delta_time: int
-    _signals: Signals  # PyQt5
+    _signals: Signals
     _schedule: Schedule
     _old_op_position: int = -1
 
-    def __init__(self, parent: Optional[QGraphicsItem] = None):  # PyQt5
+    def __init__(self, parent: Optional[QGraphicsItem] = None):
         super().__init__(parent=parent)
         self._signals = self.Signals()
 
diff --git a/b_asic/scheduler_gui/scheduler_item.py b/b_asic/scheduler_gui/scheduler_item.py
index 80e09ac6..13af04a9 100644
--- a/b_asic/scheduler_gui/scheduler_item.py
+++ b/b_asic/scheduler_gui/scheduler_item.py
@@ -31,7 +31,7 @@ from b_asic.scheduler_gui.signal_item import SignalItem
 from b_asic.types import GraphID
 
 
-class SchedulerItem(SchedulerEvent, QGraphicsItemGroup):  # PySide2 / PyQt5
+class SchedulerItem(SchedulerEvent, QGraphicsItemGroup):
     """
     A class to represent a schedule in a QGraphicsScene.
 
@@ -312,7 +312,7 @@ class SchedulerItem(SchedulerEvent, QGraphicsItemGroup):  # PySide2 / PyQt5
         )
 
     def _redraw_from_start(self) -> None:
-        self.schedule._reset_y_locations()
+        self.schedule.reset_y_locations()
         self.schedule.sort_y_locations_on_start_times()
         for graph_id in self.schedule.start_times.keys():
             self._set_position(graph_id)
diff --git a/docs_sphinx/conf.py b/docs_sphinx/conf.py
index ed0e0e06..32ba2028 100644
--- a/docs_sphinx/conf.py
+++ b/docs_sphinx/conf.py
@@ -40,7 +40,6 @@ intersphinx_mapping = {
     'graphviz': ('https://graphviz.readthedocs.io/en/stable/', None),
     'matplotlib': ('https://matplotlib.org/stable/', None),
     'numpy': ('https://numpy.org/doc/stable/', None),
-    'PyQt5': ("https://www.riverbankcomputing.com/static/Docs/PyQt5", None),
     'networkx': ('https://networkx.org/documentation/stable', None),
     'mplsignal': ('https://mplsignal.readthedocs.io/en/stable/', None),
 }
diff --git a/docs_sphinx/index.rst b/docs_sphinx/index.rst
index bcbb9c1e..6cb1b85f 100644
--- a/docs_sphinx/index.rst
+++ b/docs_sphinx/index.rst
@@ -39,7 +39,7 @@ can pull new changes without having to reinstall it. It also makes it easy to co
 any improvements.
 
 In addition to the dependencies that are automatically installed, you will also
-need a Qt-binding, but you are free to choose from the available Qt5 and Qt6 bindings.
+need a Qt-binding, but you are free to choose between PyQt6 and PySide6.
 See `https://gitlab.liu.se/da/B-ASIC <https://gitlab.liu.se/da/B-ASIC>`_ for more info.
 
 If you use B-ASIC in a publication, please acknowledge it. Later on there will be a
diff --git a/test/unit/test_schedule.py b/test/unit/test_schedule.py
index 4d45755d..e7c51569 100644
--- a/test/unit/test_schedule.py
+++ b/test/unit/test_schedule.py
@@ -11,7 +11,7 @@ from b_asic.core_operations import Addition, Butterfly, ConstantMultiplication
 from b_asic.process import OperatorProcess
 from b_asic.schedule import Schedule
 from b_asic.scheduler import ALAPScheduler, ASAPScheduler
-from b_asic.sfg_generators import direct_form_fir
+from b_asic.sfg_generators import direct_form_1_iir, direct_form_fir
 from b_asic.signal_flow_graph import SFG
 from b_asic.special_operations import Delay, Input, Output
 
@@ -247,6 +247,50 @@ class TestInit:
         }
         assert schedule.schedule_time == 10
 
+    def test_provided_schedule(self):
+        sfg = direct_form_1_iir([1, 2, 3], [1, 2, 3])
+
+        sfg.set_latency_of_type(Addition.type_name(), 1)
+        sfg.set_latency_of_type(ConstantMultiplication.type_name(), 3)
+        sfg.set_execution_time_of_type(Addition.type_name(), 1)
+        sfg.set_execution_time_of_type(ConstantMultiplication.type_name(), 1)
+
+        start_times = {
+            "in0": 1,
+            "cmul0": 1,
+            "cmul1": 0,
+            "cmul2": 0,
+            "cmul3": 0,
+            "cmul4": 0,
+            "add3": 3,
+            "add1": 3,
+            "add0": 4,
+            "add2": 5,
+            "out0": 6,
+        }
+        laps = {
+            's8': 1,
+            's10': 2,
+            's15': 1,
+            's17': 2,
+            's0': 0,
+            's3': 0,
+            's12': 0,
+            's11': 0,
+            's14': 0,
+            's13': 0,
+            's6': 0,
+            's4': 0,
+            's5': 0,
+            's2': 0,
+        }
+
+        schedule = Schedule(sfg, start_times=start_times, laps=laps)
+
+        assert schedule.start_times == start_times
+        assert schedule.laps == laps
+        assert schedule.schedule_time == 6
+
 
 class TestSlacks:
     def test_forward_backward_slack_normal_latency(self, precedence_sfg_delays):
@@ -297,24 +341,22 @@ class TestSlacks:
         schedule = Schedule(precedence_sfg_delays, scheduler=ASAPScheduler())
         schedule.print_slacks()
         captured = capsys.readouterr()
-        assert (
-            captured.out
-            == """Graph ID | Backward |  Forward
----------|----------|---------
-add0     |        0 |        0
-add1     |        0 |        0
-add2     |        0 |        0
-add3     |        0 |        7
-cmul0    |        0 |        1
-cmul1    |        0 |        0
-cmul2    |        0 |        0
-cmul3    |        4 |        0
-cmul4    |       16 |        0
-cmul5    |       16 |        0
-cmul6    |        4 |        0
-in0      |       oo |        0
-out0     |        0 |       oo
-"""
+        assert captured.out == (
+            "Graph ID | Backward |  Forward\n"
+            "---------|----------|---------\n"
+            "add0     |        0 |        0\n"
+            "add1     |        0 |        0\n"
+            "add2     |        0 |        0\n"
+            "add3     |        0 |        7\n"
+            "cmul0    |        0 |        1\n"
+            "cmul1    |        0 |        0\n"
+            "cmul2    |        0 |        0\n"
+            "cmul3    |        4 |        0\n"
+            "cmul4    |       16 |        0\n"
+            "cmul5    |       16 |        0\n"
+            "cmul6    |        4 |        0\n"
+            "in0      |       oo |        0\n"
+            "out0     |        0 |       oo\n"
         )
         assert captured.err == ""
 
@@ -325,24 +367,22 @@ out0     |        0 |       oo
         schedule = Schedule(precedence_sfg_delays, scheduler=ASAPScheduler())
         schedule.print_slacks(1)
         captured = capsys.readouterr()
-        assert (
-            captured.out
-            == """Graph ID | Backward |  Forward
----------|----------|---------
-cmul0    |        0 |        1
-add0     |        0 |        0
-add1     |        0 |        0
-cmul1    |        0 |        0
-cmul2    |        0 |        0
-add3     |        0 |        7
-add2     |        0 |        0
-out0     |        0 |       oo
-cmul3    |        4 |        0
-cmul6    |        4 |        0
-cmul4    |       16 |        0
-cmul5    |       16 |        0
-in0      |       oo |        0
-"""
+        assert captured.out == (
+            "Graph ID | Backward |  Forward\n"
+            "---------|----------|---------\n"
+            "cmul0    |        0 |        1\n"
+            "add0     |        0 |        0\n"
+            "add1     |        0 |        0\n"
+            "cmul1    |        0 |        0\n"
+            "cmul2    |        0 |        0\n"
+            "add3     |        0 |        7\n"
+            "add2     |        0 |        0\n"
+            "out0     |        0 |       oo\n"
+            "cmul3    |        4 |        0\n"
+            "cmul6    |        4 |        0\n"
+            "cmul4    |       16 |        0\n"
+            "cmul5    |       16 |        0\n"
+            "in0      |       oo |        0\n"
         )
         assert captured.err == ""
 
@@ -802,10 +842,23 @@ class TestYLocations:
         sfg_simple_filter.set_latency_of_type(Addition.type_name(), 1)
         sfg_simple_filter.set_latency_of_type(ConstantMultiplication.type_name(), 2)
         schedule = Schedule(sfg_simple_filter, ASAPScheduler())
-        # Assign locations
-        schedule.show()
-        assert schedule._y_locations == {'in0': 0, 'cmul0': 1, 'add0': 3, 'out0': 2}
-        schedule.move_y_location('add0', 1, insert=True)
-        assert schedule._y_locations == {'in0': 0, 'cmul0': 2, 'add0': 1, 'out0': 3}
-        schedule.move_y_location('out0', 1)
-        assert schedule._y_locations == {'in0': 0, 'cmul0': 2, 'add0': 1, 'out0': 1}
+
+        assert schedule._y_locations == {"in0": 0, "cmul0": 1, "add0": 3, "out0": 2}
+        schedule.move_y_location("add0", 1, insert=True)
+        assert schedule._y_locations == {"in0": 0, "cmul0": 2, "add0": 1, "out0": 3}
+        schedule.move_y_location("out0", 1)
+        assert schedule._y_locations == {"in0": 0, "cmul0": 2, "add0": 1, "out0": 1}
+
+    def test_reset(self, sfg_simple_filter):
+        sfg_simple_filter.set_latency_of_type(Addition.type_name(), 1)
+        sfg_simple_filter.set_latency_of_type(ConstantMultiplication.type_name(), 2)
+        schedule = Schedule(sfg_simple_filter, ASAPScheduler())
+
+        assert schedule._y_locations == {"in0": 0, "cmul0": 1, "add0": 3, "out0": 2}
+        schedule.reset_y_locations()
+        assert schedule._y_locations["in0"] is None
+        assert schedule._y_locations["cmul0"] is None
+        assert schedule._y_locations["add0"] is None
+        assert schedule._y_locations["add0"] is None
+        assert schedule._y_locations["out0"] is None
+        assert schedule._y_locations["foo"] is None
-- 
GitLab


From 3189fe94e27b33b7016cfdd31237a949921605ea Mon Sep 17 00:00:00 2001
From: Simon Bjurek <simbj106@student.liu.se>
Date: Wed, 26 Feb 2025 17:31:29 +0100
Subject: [PATCH 3/3] greatly improved speed of ListScheduler and added more
 tests

---
 .gitlab-ci.yml                    |   2 +-
 b_asic/core_operations.py         |   6 +-
 b_asic/schedule.py                |  15 ++
 b_asic/scheduler.py               | 122 +++++----
 b_asic/special_operations.py      |   4 +-
 test/unit/test_list_schedulers.py | 405 ++++++++++++++++++++++++++++++
 6 files changed, 483 insertions(+), 71 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 7accae47..4a822db9 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -4,7 +4,7 @@ stages:
 
 before_script:
   - apt-get update --yes
-  # - apt-get install --yes build-essential cmake graphviz xvfb xdg-utils lcov
+  # - apt-get install --yes build-essential cmake graphviz python3-pyqt5 xvfb xdg-utils lcov
   - apt-get install --yes graphviz python3-pyqt5 xvfb xdg-utils
   - apt-get install -y libxcb-cursor-dev
   - python -m pip install --upgrade pip
diff --git a/b_asic/core_operations.py b/b_asic/core_operations.py
index 55421296..d09e0c62 100644
--- a/b_asic/core_operations.py
+++ b/b_asic/core_operations.py
@@ -69,7 +69,7 @@ class Constant(AbstractOperation):
 
     @property
     def latency(self) -> int:
-        return self.latency_offsets["out0"]
+        return 0
 
     def __repr__(self) -> str:
         return f"Constant({self.value})"
@@ -1689,7 +1689,7 @@ class DontCare(AbstractOperation):
 
     @property
     def latency(self) -> int:
-        return self.latency_offsets["out0"]
+        return 0
 
     def __repr__(self) -> str:
         return "DontCare()"
@@ -1766,7 +1766,7 @@ class Sink(AbstractOperation):
 
     @property
     def latency(self) -> int:
-        return self.latency_offsets["in0"]
+        return 0
 
     def __repr__(self) -> str:
         return "Sink()"
diff --git a/b_asic/schedule.py b/b_asic/schedule.py
index 543e10af..c815fdb2 100644
--- a/b_asic/schedule.py
+++ b/b_asic/schedule.py
@@ -212,6 +212,21 @@ class Schedule:
                     )
         return max_end_time
 
+    def get_max_non_io_end_time(self) -> int:
+        """Return the current maximum end time among all non-IO operations."""
+        max_end_time = 0
+        for graph_id, op_start_time in self._start_times.items():
+            operation = cast(Operation, self._sfg.find_by_id(graph_id))
+            if graph_id.startswith("out"):
+                continue
+            else:
+                for outport in operation.outputs:
+                    max_end_time = max(
+                        max_end_time,
+                        op_start_time + cast(int, outport.latency_offset),
+                    )
+        return max_end_time
+
     def forward_slack(self, graph_id: GraphID) -> int:
         """
         Return how much an operation can be moved forward in time.
diff --git a/b_asic/scheduler.py b/b_asic/scheduler.py
index 87708e75..6ae328f1 100644
--- a/b_asic/scheduler.py
+++ b/b_asic/scheduler.py
@@ -226,7 +226,7 @@ class ListScheduler(Scheduler, ABC):
                 if resource_amount < resource_lower_bound:
                     raise ValueError(
                         f"Amount of resource: {resource_type} is not enough to "
-                        f"realize schedule for scheduling time: {self._schedule.schedule_time}"
+                        f"realize schedule for scheduling time: {self._schedule.schedule_time}."
                     )
 
         alap_schedule = copy.copy(self._schedule)
@@ -245,8 +245,20 @@ class ListScheduler(Scheduler, ABC):
 
         self._remaining_resources = self._max_resources.copy()
 
-        remaining_ops = self._sfg.operations
-        remaining_ops = [op.graph_id for op in remaining_ops]
+        self._remaining_ops = self._sfg.operations
+        self._remaining_ops = [op.graph_id for op in self._remaining_ops]
+
+        self._cached_latencies = {
+            op_id: self._sfg.find_by_id(op_id).latency for op_id in self._remaining_ops
+        }
+        self._cached_execution_times = {
+            op_id: self._sfg.find_by_id(op_id).execution_time
+            for op_id in self._remaining_ops
+        }
+
+        self._deadlines = self._calculate_deadlines(alap_start_times)
+        self._output_slacks = self._calculate_alap_output_slacks(alap_start_times)
+        self._fan_outs = self._calculate_fan_outs(alap_start_times)
 
         self._schedule.start_times = {}
         self.remaining_reads = self._max_concurrent_reads
@@ -260,23 +272,24 @@ class ListScheduler(Scheduler, ABC):
             for input_id in self._input_times:
                 self._schedule.start_times[input_id] = self._input_times[input_id]
                 self._op_laps[input_id] = 0
-            remaining_ops = [
-                elem for elem in remaining_ops if not elem.startswith("in")
+            self._remaining_ops = [
+                elem for elem in self._remaining_ops if not elem.startswith("in")
             ]
 
-        remaining_ops = [op for op in remaining_ops if not op.startswith("dontcare")]
-        remaining_ops = [op for op in remaining_ops if not op.startswith("t")]
-        remaining_ops = [
+        self._remaining_ops = [
+            op for op in self._remaining_ops if not op.startswith("dontcare")
+        ]
+        self._remaining_ops = [
+            op for op in self._remaining_ops if not op.startswith("t")
+        ]
+        self._remaining_ops = [
             op
-            for op in remaining_ops
+            for op in self._remaining_ops
             if not (op.startswith("out") and op in self._output_delta_times)
         ]
 
-        while remaining_ops:
-            ready_ops_priority_table = self._get_ready_ops_priority_table(
-                alap_start_times,
-                remaining_ops,
-            )
+        while self._remaining_ops:
+            ready_ops_priority_table = self._get_ready_ops_priority_table()
             while ready_ops_priority_table:
                 next_op = self._sfg.find_by_id(
                     self._get_next_op_id(ready_ops_priority_table)
@@ -284,8 +297,8 @@ class ListScheduler(Scheduler, ABC):
 
                 self.remaining_reads -= next_op.input_count
 
-                remaining_ops = [
-                    op_id for op_id in remaining_ops if op_id != next_op.graph_id
+                self._remaining_ops = [
+                    op_id for op_id in self._remaining_ops if op_id != next_op.graph_id
                 ]
 
                 self._time_out_counter = 0
@@ -295,25 +308,10 @@ class ListScheduler(Scheduler, ABC):
                     if self._schedule.schedule_time
                     else 0
                 )
-                if not self._schedule.cyclic and self._schedule.schedule_time:
-                    if self._current_time > self._schedule.schedule_time:
-                        raise ValueError(
-                            f"Provided scheduling time {schedule.schedule_time} cannot be reached, "
-                            "try to enable the cyclic property or increase the time."
-                        )
 
-                ready_ops_priority_table = self._get_ready_ops_priority_table(
-                    alap_start_times,
-                    remaining_ops,
-                )
+                ready_ops_priority_table = self._get_ready_ops_priority_table()
 
             self._go_to_next_time_step()
-
-            ready_ops_priority_table = self._get_ready_ops_priority_table(
-                alap_start_times,
-                remaining_ops,
-            )
-
             self.remaining_reads = self._max_concurrent_reads
 
         self._current_time -= 1
@@ -354,43 +352,35 @@ class ListScheduler(Scheduler, ABC):
         sorted_table = sorted(ready_ops_priority_table, key=sort_key)
         return sorted_table[0][0]
 
-    def _get_ready_ops_priority_table(
-        self,
-        alap_start_times: dict["GraphID", int],
-        remaining_ops: list["GraphID"],
-    ) -> list[tuple["GraphID", int, int, int]]:
+    def _get_ready_ops_priority_table(self) -> list[tuple["GraphID", int, int, int]]:
         ready_ops = [
             op_id
-            for op_id in remaining_ops
-            if self._op_is_schedulable(self._sfg.find_by_id(op_id), remaining_ops)
+            for op_id in self._remaining_ops
+            if self._op_is_schedulable(self._sfg.find_by_id(op_id))
         ]
 
-        deadlines = self._calculate_deadlines(alap_start_times)
-        output_slacks = self._calculate_alap_output_slacks(alap_start_times)
-        fan_outs = self._calculate_fan_outs(alap_start_times)
-
-        ready_ops_priority_table = []
-        for op_id in ready_ops:
-            ready_ops_priority_table.append(
-                (op_id, deadlines[op_id], output_slacks[op_id], fan_outs[op_id])
+        return [
+            (
+                op_id,
+                self._deadlines[op_id],
+                self._output_slacks[op_id],
+                self._fan_outs[op_id],
             )
-        return ready_ops_priority_table
+            for op_id in ready_ops
+        ]
 
     def _calculate_deadlines(
         self, alap_start_times: dict["GraphID", int]
     ) -> dict["GraphID", int]:
         return {
-            op_id: start_time + self._sfg.find_by_id(op_id).latency
+            op_id: start_time + self._cached_latencies[op_id]
             for op_id, start_time in alap_start_times.items()
         }
 
     def _calculate_alap_output_slacks(
         self, alap_start_times: dict["GraphID", int]
     ) -> dict["GraphID", int]:
-        return {
-            op_id: start_time - self._current_time
-            for op_id, start_time in alap_start_times.items()
-        }
+        return {op_id: start_time for op_id, start_time in alap_start_times.items()}
 
     def _calculate_fan_outs(
         self, alap_start_times: dict["GraphID", int]
@@ -412,26 +402,22 @@ class ListScheduler(Scheduler, ABC):
                 start_time = start_time % self._schedule.schedule_time
 
             if time_slot >= start_time:
-                if time_slot < start_time + max(
-                    self._sfg.find_by_id(op_id).execution_time, 1
-                ):
+                if time_slot < start_time + max(self._cached_execution_times[op_id], 1):
                     if op_id.startswith(op.type_name()):
                         if op.graph_id != op_id:
                             count += 1
 
         return count < self._remaining_resources[op.type_name()]
 
-    def _op_is_schedulable(
-        self, op: "Operation", remaining_ops: list["GraphID"]
-    ) -> bool:
+    def _op_is_schedulable(self, op: "Operation") -> bool:
         if not self._op_satisfies_resource_constraints(op):
             return False
 
-        op_finish_time = self._current_time + op.latency
+        op_finish_time = self._current_time + self._cached_latencies[op.graph_id]
         future_ops = [
             self._sfg.find_by_id(item[0])
             for item in self._schedule.start_times.items()
-            if item[1] + self._sfg.find_by_id(item[0]).latency == op_finish_time
+            if item[1] + self._cached_latencies[item[0]] == op_finish_time
         ]
 
         future_ops_writes = sum([op.input_count for op in future_ops])
@@ -451,7 +437,7 @@ class ListScheduler(Scheduler, ABC):
 
             source_op_graph_id = source_op.graph_id
 
-            if source_op_graph_id in remaining_ops:
+            if source_op_graph_id in self._remaining_ops:
                 return False
 
             if self._schedule.start_times[source_op_graph_id] != self._current_time - 1:
@@ -466,12 +452,18 @@ class ListScheduler(Scheduler, ABC):
                     self._schedule.start_times.get(source_op_graph_id)
                     + self._op_laps[source_op.graph_id] * self._schedule.schedule_time
                 )
-                proceeding_op_finish_time = proceeding_op_start_time + source_op.latency
+                proceeding_op_finish_time = (
+                    proceeding_op_start_time
+                    + self._cached_latencies[source_op.graph_id]
+                )
             else:
                 proceeding_op_start_time = self._schedule.start_times.get(
                     source_op_graph_id
                 )
-                proceeding_op_finish_time = proceeding_op_start_time + source_op.latency
+                proceeding_op_finish_time = (
+                    proceeding_op_start_time
+                    + self._cached_latencies[source_op.graph_id]
+                )
             earliest_start_time = max(earliest_start_time, proceeding_op_finish_time)
 
         return earliest_start_time <= self._current_time
@@ -502,7 +494,7 @@ class ListScheduler(Scheduler, ABC):
                 self._remaining_resources[Output.type_name()] -= count
 
                 self._current_time = new_time
-                if not self._op_is_schedulable(output, {}):
+                if not self._op_is_schedulable(output):
                     raise ValueError(
                         "Cannot schedule outputs according to the provided output_delta_times. "
                         f"Failed output: {output.graph_id}, "
diff --git a/b_asic/special_operations.py b/b_asic/special_operations.py
index a67b55db..f6d0e84c 100644
--- a/b_asic/special_operations.py
+++ b/b_asic/special_operations.py
@@ -53,7 +53,7 @@ class Input(AbstractOperation):
 
     @property
     def latency(self) -> int:
-        return self.latency_offsets["out0"]
+        return 0
 
     @property
     def value(self) -> Num:
@@ -157,7 +157,7 @@ class Output(AbstractOperation):
 
     @property
     def latency(self) -> int:
-        return self.latency_offsets["in0"]
+        return 0
 
 
 class Delay(AbstractOperation):
diff --git a/test/unit/test_list_schedulers.py b/test/unit/test_list_schedulers.py
index 098e20bd..252baa3d 100644
--- a/test/unit/test_list_schedulers.py
+++ b/test/unit/test_list_schedulers.py
@@ -906,3 +906,408 @@ class TestHybridScheduler:
                     max_concurrent_reads=2,
                 ),
             )
+
+    def test_32_point_fft_custom_io_times(self):
+        POINTS = 32
+        sfg = radix_2_dif_fft(POINTS)
+
+        sfg.set_latency_of_type(Butterfly.type_name(), 1)
+        sfg.set_latency_of_type(ConstantMultiplication.type_name(), 3)
+        sfg.set_execution_time_of_type(Butterfly.type_name(), 1)
+        sfg.set_execution_time_of_type(ConstantMultiplication.type_name(), 1)
+
+        resources = {Butterfly.type_name(): 1, ConstantMultiplication.type_name(): 1}
+        input_times = {f"in{i}": i for i in range(POINTS)}
+        output_delta_times = {f"out{i}": i for i in range(POINTS)}
+        schedule = Schedule(
+            sfg,
+            scheduler=HybridScheduler(
+                resources,
+                input_times=input_times,
+                output_delta_times=output_delta_times,
+            ),
+        )
+
+        for i in range(POINTS):
+            assert schedule.start_times[f"in{i}"] == i
+            assert (
+                schedule.start_times[f"out{i}"]
+                == schedule.get_max_non_io_end_time() + i
+            )
+
+    # Too slow for pipeline right now
+    # def test_64_point_fft_custom_io_times(self):
+    #     POINTS = 64
+    #     sfg = radix_2_dif_fft(POINTS)
+
+    #     sfg.set_latency_of_type(Butterfly.type_name(), 1)
+    #     sfg.set_latency_of_type(ConstantMultiplication.type_name(), 3)
+    #     sfg.set_execution_time_of_type(Butterfly.type_name(), 1)
+    #     sfg.set_execution_time_of_type(ConstantMultiplication.type_name(), 1)
+
+    #     resources = {Butterfly.type_name(): 1, ConstantMultiplication.type_name(): 1}
+    #     input_times = {f"in{i}": i for i in range(POINTS)}
+    #     output_delta_times = {f"out{i}": i for i in range(POINTS)}
+    #     schedule = Schedule(
+    #         sfg,
+    #         scheduler=HybridScheduler(
+    #             resources,
+    #             input_times=input_times,
+    #             output_delta_times=output_delta_times,
+    #         ),
+    #     )
+
+    #     for i in range(POINTS):
+    #         assert schedule.start_times[f"in{i}"] == i
+    #         assert (
+    #             schedule.start_times[f"out{i}"]
+    #             == schedule.get_max_non_io_end_time() + i
+    #         )
+
+    def test_32_point_fft_custom_io_times_cyclic(self):
+        POINTS = 32
+        sfg = radix_2_dif_fft(POINTS)
+
+        sfg.set_latency_of_type(Butterfly.type_name(), 1)
+        sfg.set_latency_of_type(ConstantMultiplication.type_name(), 3)
+        sfg.set_execution_time_of_type(Butterfly.type_name(), 1)
+        sfg.set_execution_time_of_type(ConstantMultiplication.type_name(), 1)
+
+        resources = {Butterfly.type_name(): 1, ConstantMultiplication.type_name(): 1}
+        input_times = {f"in{i}": i for i in range(POINTS)}
+        output_delta_times = {f"out{i}": i for i in range(POINTS)}
+        schedule = Schedule(
+            sfg,
+            scheduler=HybridScheduler(
+                resources,
+                input_times=input_times,
+                output_delta_times=output_delta_times,
+            ),
+            schedule_time=96,
+            cyclic=True,
+        )
+
+        for i in range(POINTS):
+            assert schedule.start_times[f"in{i}"] == i
+            assert schedule.start_times[f"out{i}"] == 96 if i == 0 else i
+
+    def test_cyclic_scheduling(self):
+        sfg = radix_2_dif_fft(points=4)
+
+        sfg.set_latency_of_type(Butterfly.type_name(), 1)
+        sfg.set_latency_of_type(ConstantMultiplication.type_name(), 3)
+        sfg.set_execution_time_of_type(Butterfly.type_name(), 1)
+        sfg.set_execution_time_of_type(ConstantMultiplication.type_name(), 1)
+
+        resources = {
+            Butterfly.type_name(): 1,
+            ConstantMultiplication.type_name(): 1,
+        }
+        schedule_1 = Schedule(sfg, scheduler=HybridScheduler(resources))
+        schedule_2 = Schedule(
+            sfg, scheduler=HybridScheduler(resources), schedule_time=6, cyclic=True
+        )
+        schedule_3 = Schedule(
+            sfg, scheduler=HybridScheduler(resources), schedule_time=5, cyclic=True
+        )
+        schedule_4 = Schedule(
+            sfg, scheduler=HybridScheduler(resources), schedule_time=4, cyclic=True
+        )
+
+        assert schedule_1.start_times == {
+            "in1": 0,
+            "in3": 1,
+            "bfly3": 1,
+            "cmul0": 2,
+            "in0": 2,
+            "in2": 3,
+            "bfly0": 3,
+            "bfly1": 4,
+            "bfly2": 5,
+            "out0": 5,
+            "out1": 6,
+            "out3": 7,
+            "out2": 8,
+        }
+        assert schedule_1.laps == {
+            "s4": 0,
+            "s6": 0,
+            "s5": 0,
+            "s7": 0,
+            "s8": 0,
+            "s12": 0,
+            "s10": 0,
+            "s9": 0,
+            "s0": 0,
+            "s2": 0,
+            "s11": 0,
+            "s1": 0,
+            "s3": 0,
+        }
+        assert schedule_1.schedule_time == 8
+
+        assert schedule_2.start_times == {
+            "in1": 0,
+            "in3": 1,
+            "bfly3": 1,
+            "cmul0": 2,
+            "in0": 2,
+            "in2": 3,
+            "bfly0": 3,
+            "bfly1": 4,
+            "bfly2": 5,
+            "out0": 5,
+            "out1": 6,
+            "out3": 1,
+            "out2": 2,
+        }
+        assert schedule_2.laps == {
+            "s4": 0,
+            "s6": 1,
+            "s5": 0,
+            "s7": 1,
+            "s8": 0,
+            "s12": 0,
+            "s10": 0,
+            "s9": 0,
+            "s0": 0,
+            "s2": 0,
+            "s11": 0,
+            "s1": 0,
+            "s3": 0,
+        }
+        assert schedule_2.schedule_time == 6
+
+        assert schedule_3.start_times == {
+            "in1": 0,
+            "in3": 1,
+            "bfly3": 1,
+            "cmul0": 2,
+            "in0": 2,
+            "in2": 3,
+            "bfly0": 3,
+            "bfly1": 4,
+            "bfly2": 0,
+            "out0": 5,
+            "out1": 1,
+            "out3": 2,
+            "out2": 3,
+        }
+        assert schedule_3.laps == {
+            "s4": 0,
+            "s6": 1,
+            "s5": 0,
+            "s7": 0,
+            "s8": 0,
+            "s12": 0,
+            "s10": 1,
+            "s9": 1,
+            "s0": 0,
+            "s2": 0,
+            "s11": 0,
+            "s1": 0,
+            "s3": 0,
+        }
+        assert schedule_3.schedule_time == 5
+
+        assert schedule_4.start_times == {
+            "in1": 0,
+            "in3": 1,
+            "bfly3": 1,
+            "cmul0": 2,
+            "in0": 2,
+            "in2": 3,
+            "bfly0": 3,
+            "bfly1": 0,
+            "out0": 1,
+            "bfly2": 2,
+            "out2": 2,
+            "out1": 3,
+            "out3": 4,
+        }
+        assert schedule_4.laps == {
+            "s4": 0,
+            "s6": 0,
+            "s5": 0,
+            "s7": 0,
+            "s8": 1,
+            "s12": 1,
+            "s10": 0,
+            "s9": 1,
+            "s0": 0,
+            "s2": 0,
+            "s11": 0,
+            "s1": 0,
+            "s3": 0,
+        }
+        assert schedule_4.schedule_time == 4
+
+    def test_cyclic_scheduling_time_not_provided(self):
+        sfg = ldlt_matrix_inverse(N=2)
+
+        sfg.set_latency_of_type(MADS.type_name(), 3)
+        sfg.set_latency_of_type(Reciprocal.type_name(), 2)
+        sfg.set_execution_time_of_type(MADS.type_name(), 1)
+        sfg.set_execution_time_of_type(Reciprocal.type_name(), 1)
+
+        resources = {MADS.type_name(): 1, Reciprocal.type_name(): 1}
+        with pytest.raises(
+            ValueError,
+            match="Scheduling time must be provided when cyclic = True.",
+        ):
+            Schedule(
+                sfg,
+                scheduler=HybridScheduler(
+                    max_resources=resources,
+                ),
+                cyclic=True,
+            )
+
+    def test_resources_not_enough(self):
+        sfg = ldlt_matrix_inverse(N=3)
+
+        sfg.set_latency_of_type(MADS.type_name(), 3)
+        sfg.set_latency_of_type(Reciprocal.type_name(), 2)
+        sfg.set_execution_time_of_type(MADS.type_name(), 1)
+        sfg.set_execution_time_of_type(Reciprocal.type_name(), 1)
+
+        resources = {MADS.type_name(): 1, Reciprocal.type_name(): 1}
+        with pytest.raises(
+            ValueError,
+            match="Amount of resource: mads is not enough to realize schedule for scheduling time: 5.",
+        ):
+            Schedule(
+                sfg,
+                scheduler=HybridScheduler(
+                    max_resources=resources,
+                ),
+                schedule_time=5,
+            )
+
+    def test_scheduling_time_not_enough(self):
+        sfg = ldlt_matrix_inverse(N=3)
+
+        sfg.set_latency_of_type(MADS.type_name(), 3)
+        sfg.set_latency_of_type(Reciprocal.type_name(), 2)
+        sfg.set_execution_time_of_type(MADS.type_name(), 1)
+        sfg.set_execution_time_of_type(Reciprocal.type_name(), 1)
+
+        resources = {MADS.type_name(): 10, Reciprocal.type_name(): 10}
+        with pytest.raises(
+            ValueError,
+            match="Provided scheduling time 5 cannot be reached, try to enable the cyclic property or increase the time to at least 30.",
+        ):
+            Schedule(
+                sfg,
+                scheduler=HybridScheduler(
+                    max_resources=resources,
+                ),
+                schedule_time=5,
+            )
+
+    def test_cyclic_scheduling_write_and_read_constrained(self):
+        sfg = radix_2_dif_fft(points=4)
+
+        sfg.set_latency_of_type(Butterfly.type_name(), 1)
+        sfg.set_latency_of_type(ConstantMultiplication.type_name(), 3)
+        sfg.set_execution_time_of_type(Butterfly.type_name(), 1)
+        sfg.set_execution_time_of_type(ConstantMultiplication.type_name(), 1)
+
+        resources = {
+            Butterfly.type_name(): 1,
+            ConstantMultiplication.type_name(): 1,
+        }
+        schedule = Schedule(
+            sfg,
+            scheduler=HybridScheduler(
+                resources, max_concurrent_reads=2, max_concurrent_writes=2
+            ),
+            schedule_time=6,
+            cyclic=True,
+        )
+
+        assert schedule.start_times == {
+            "in1": 0,
+            "in3": 1,
+            "bfly3": 1,
+            "cmul0": 2,
+            "in0": 3,
+            "in2": 4,
+            "bfly0": 4,
+            "bfly1": 5,
+            "bfly2": 0,
+            "out0": 6,
+            "out1": 1,
+            "out3": 2,
+            "out2": 3,
+        }
+        assert schedule.laps == {
+            "s4": 0,
+            "s6": 1,
+            "s5": 0,
+            "s7": 0,
+            "s8": 0,
+            "s12": 0,
+            "s10": 1,
+            "s9": 1,
+            "s0": 0,
+            "s2": 0,
+            "s11": 0,
+            "s1": 0,
+            "s3": 0,
+        }
+        assert schedule.schedule_time == 6
+
+        direct, mem_vars = schedule.get_memory_variables().split_on_length()
+        assert mem_vars.read_ports_bound() == 2
+        assert mem_vars.write_ports_bound() == 2
+
+    def test_cyclic_scheduling_several_inputs_and_outputs(self):
+        sfg = radix_2_dif_fft(points=4)
+
+        sfg.set_latency_of_type(Butterfly.type_name(), 1)
+        sfg.set_latency_of_type(ConstantMultiplication.type_name(), 3)
+        sfg.set_execution_time_of_type(Butterfly.type_name(), 1)
+        sfg.set_execution_time_of_type(ConstantMultiplication.type_name(), 1)
+
+        resources = {
+            Butterfly.type_name(): 1,
+            ConstantMultiplication.type_name(): 1,
+            Input.type_name(): 2,
+            Output.type_name(): 2,
+        }
+        schedule = Schedule(
+            sfg, scheduler=HybridScheduler(resources), schedule_time=4, cyclic=True
+        )
+
+        assert schedule.start_times == {
+            'in1': 0,
+            'in3': 0,
+            'bfly3': 0,
+            'cmul0': 1,
+            'in0': 1,
+            "in2": 1,
+            'bfly0': 1,
+            'bfly1': 2,
+            'out0': 3,
+            'out2': 3,
+            'bfly2': 3,
+            'out1': 4,
+            'out3': 4,
+        }
+        assert schedule.laps == {
+            's4': 0,
+            's6': 0,
+            's5': 0,
+            's7': 0,
+            's8': 0,
+            's12': 0,
+            's10': 1,
+            's9': 0,
+            's0': 0,
+            's2': 0,
+            's11': 0,
+            's1': 0,
+            's3': 0,
+        }
+        assert schedule.schedule_time == 4
-- 
GitLab