Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add jobs argument to inform CACE about number of jobs needed #135

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions Changelog.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,19 @@
# 2.5.4

## Common

- `ngspice` tool:
- Add `jobs` argument
- Add `spiceinit_path` argument
- By default copy the PDK spiceinit to the simulation directory
- `klayout_drc` tool:
- Add `jobs` argument
- Add `drc_script_path` argument
- Reserved variables:
- Add `CACE{jobs}`
- Add `CACE{root}`
- Issue a warning when a conditions has the same name as a reserved variable

# 2.5.3

## Common
Expand Down
2 changes: 1 addition & 1 deletion cace/__version__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '2.5.3'
__version__ = '2.5.4'

if __name__ == '__main__':
print(__version__, end='')
4 changes: 2 additions & 2 deletions cace/cace_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def cli():
'-j',
'--jobs',
type=int,
help="""total number of jobs running in parallel""",
help="""maximum number of jobs running in parallel""",
)
parser.add_argument(
'-s',
Expand Down Expand Up @@ -206,7 +206,7 @@ def cli():

# Create the ParameterManager
parameter_manager = ParameterManager(
max_runs=args.max_runs, run_path=args.run_path, jobs=args.jobs
max_runs=args.max_runs, run_path=args.run_path, max_jobs=args.jobs
)

# Load the datasheet
Expand Down
41 changes: 41 additions & 0 deletions cace/common/custom_semaphore.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
from threading import Thread
from threading import Condition

from copy import copy


class CustomSemaphore:
def __init__(self, value: int = 1):
if value < 0:
raise ValueError('Initial value must be >= 0')

# initialize counter
self._counter = value

# initialize lock
self._condition = Condition()

def __enter__(self):
self.acquire()

def __exit__(self, type, value, traceback):
self.release()

def acquire(self, count: int = 1) -> None:
"""Acquire count permits atomically, or wait until they are available."""

with self._condition:
self._condition.wait_for(lambda: self._counter >= count)
self._counter -= count

def locked(self, count: int = 1) -> bool:
"""Return True if acquire(count) would not return immediately."""

return self._counter < count

def release(self, count: int = 1) -> None:
"""Release count permits."""

with self._condition:
self._counter += count
self._condition.notify_all()
2 changes: 2 additions & 0 deletions cace/parameter/parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,7 @@ def __init__(
paths,
runtime_options,
run_dir,
max_jobs,
jobs_sem,
start_cb=None,
end_cb=None,
Expand All @@ -215,6 +216,7 @@ def __init__(
self.paths = paths
self.runtime_options = runtime_options
self.run_dir = run_dir
self.max_jobs = max_jobs
self.jobs_sem = jobs_sem
self.start_cb = start_cb
self.end_cb = end_cb
Expand Down
103 changes: 61 additions & 42 deletions cace/parameter/parameter_klayout_drc.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,9 @@ def __init__(

self.add_result(Result('drc_errors'))

self.add_argument(Argument('jobs', 1, False))
self.add_argument(Argument('args', [], False))
self.add_argument(Argument('drc_script_path', None, False))

def is_runnable(self):
netlist_source = self.runtime_options['netlist_source']
Expand All @@ -65,25 +67,38 @@ def implementation(self):

self.cancel_point()

# Acquire a job from the global jobs semaphore
with self.jobs_sem:
jobs = self.get_argument('jobs')

projname = self.datasheet['name']
paths = self.datasheet['paths']
if jobs == 'max':
# Set the number of jobs to the number of cores
jobs = os.cpu_count()
else:
# Make sure that jobs don't exceed max jobs
jobs = min(jobs, os.cpu_count())

info('Running KLayout to get layout DRC report.')
# Acquire job(s) from the global jobs semaphore
self.jobs_sem.acquire(jobs)

# Get the path to the layout, only GDS
(layout_filepath, is_magic) = get_layout_path(
projname, self.paths, check_magic=False
)
projname = self.datasheet['name']
paths = self.datasheet['paths']

# Check if layout exists
if not os.path.isfile(layout_filepath):
err('No layout found!')
self.result_type = ResultType.ERROR
return
info('Running KLayout to get layout DRC report.')

# Get the path to the layout, only GDS
(layout_filepath, is_magic) = get_layout_path(
projname, self.paths, check_magic=False
)

# Check if layout exists
if not os.path.isfile(layout_filepath):
err('No layout found!')
self.result_type = ResultType.ERROR
self.jobs_sem.release(jobs)
return

drc_script_path = self.get_argument('drc_script_path')

if drc_script_path == None:
drc_script_path = os.path.join(
get_pdk_root(),
self.datasheet['PDK'],
Expand All @@ -93,36 +108,40 @@ def implementation(self):
f'{self.datasheet["PDK"]}_mr.drc',
)

report_file_path = os.path.join(self.param_dir, 'report.xml')
report_file_path = os.path.join(self.param_dir, 'report.xml')

if not os.path.exists(drc_script_path):
err(f'DRC script {drc_script_path} does not exist!')
self.result_type = ResultType.ERROR
return
if not os.path.exists(drc_script_path):
err(f'DRC script {drc_script_path} does not exist!')
self.result_type = ResultType.ERROR
self.jobs_sem.release(jobs)
return

arguments = []

# PDK specific arguments
if self.datasheet['PDK'].startswith('sky130'):
arguments = [
'-b',
'-r',
drc_script_path,
'-rd',
f'input={os.path.abspath(layout_filepath)}',
'-rd',
f'topcell={projname}',
'-rd',
f'report={report_file_path}',
'-rd',
f'thr={os.cpu_count()}', # TODO how to distribute cores?
]

returncode = self.run_subprocess(
'klayout',
arguments + self.get_argument('args'),
cwd=self.param_dir,
)
arguments = []

# PDK specific arguments
if self.datasheet['PDK'].startswith('sky130'):
arguments = [
'-b',
'-r',
drc_script_path,
'-rd',
f'input={os.path.abspath(layout_filepath)}',
'-rd',
f'topcell={projname}',
'-rd',
f'report={report_file_path}',
'-rd',
f'thr={os.cpu_count()}', # TODO how to distribute cores?
]

returncode = self.run_subprocess(
'klayout',
arguments + self.get_argument('args'),
cwd=self.param_dir,
)

# Free job(s) from the global jobs semaphore
self.jobs_sem.release(jobs)

# Advance progress bar
if self.step_cb:
Expand Down
27 changes: 14 additions & 13 deletions cace/parameter/parameter_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@
import datetime
import threading

from ..common.custom_semaphore import CustomSemaphore

from ..common.misc import mkdirp
from ..common.cace_read import cace_read, cace_read_yaml
from ..common.cace_write import (
Expand Down Expand Up @@ -61,7 +63,9 @@ class ParameterManager:
manipulate it.
"""

def __init__(self, datasheet={}, max_runs=None, run_path=None, jobs=None):
def __init__(
self, datasheet={}, max_runs=None, run_path=None, max_jobs=None
):
"""Initialize the object with a datasheet"""
self.datasheet = datasheet
self.max_runs = max_runs
Expand Down Expand Up @@ -99,18 +103,15 @@ def __init__(self, datasheet={}, max_runs=None, run_path=None, jobs=None):

self.set_default_paths()

# Set the number of jobs to the number of cores
# if jobs=None
if not jobs:
jobs = os.cpu_count()

# Fallback jobs
if not jobs:
jobs = 4
# If not specified, set the number
# of jobs to the number of cpu threads
self.max_jobs = max_jobs
if not self.max_jobs:
self.max_jobs = os.cpu_count()

self.jobs_sem = threading.Semaphore(value=jobs)
self.jobs_sem = CustomSemaphore(value=self.max_jobs)

dbg(f'Parameter manager: total number of jobs is {jobs}')
info(f'Maximum number of jobs is {self.max_jobs}.')

### datasheet functions ###

Expand Down Expand Up @@ -512,8 +513,8 @@ def queue_parameter(
paths,
self.runtime_options,
self.run_dir,
# Semaphore for starting
# new jobs
self.max_jobs,
# Semaphore for starting new jobs
self.jobs_sem,
# Callbacks
start_cb,
Expand Down
Loading
Loading