diff --git a/activitysim/abm/models/atwork_subtour_scheduling.py b/activitysim/abm/models/atwork_subtour_scheduling.py index 1fc66e4fe..8057d2c43 100644 --- a/activitysim/abm/models/atwork_subtour_scheduling.py +++ b/activitysim/abm/models/atwork_subtour_scheduling.py @@ -8,6 +8,7 @@ import pandas as pd from activitysim.abm.models.util.vectorize_tour_scheduling import ( + TourSchedulingSettings, vectorize_subtour_scheduling, ) from activitysim.core import config, estimation, expressions, simulate @@ -23,15 +24,6 @@ DUMP = False -class AtworkSubtourSchedulingSettings(PydanticReadable): - """ - Settings for the `atwork_subtour_scheduling` component. - """ - - sharrow_skip: bool = True - """Skip Sharow""" # TODO Check this again - - @workflow.step def atwork_subtour_scheduling( state: workflow.State, @@ -39,7 +31,7 @@ def atwork_subtour_scheduling( persons_merged: pd.DataFrame, tdd_alts: pd.DataFrame, skim_dict: SkimDict | SkimDataset, - model_settings: AtworkSubtourSchedulingSettings | None = None, + model_settings: TourSchedulingSettings | None = None, model_settings_file_name: str = "tour_scheduling_atwork.yaml", trace_label: str = "atwork_subtour_scheduling", ) -> None: @@ -56,7 +48,7 @@ def atwork_subtour_scheduling( return if model_settings is None: - model_settings = AtworkSubtourSchedulingSettings.read_settings_file( + model_settings = TourSchedulingSettings.read_settings_file( state.filesystem, model_settings_file_name, ) diff --git a/activitysim/abm/models/joint_tour_scheduling.py b/activitysim/abm/models/joint_tour_scheduling.py index ae149c9dd..465b22b96 100644 --- a/activitysim/abm/models/joint_tour_scheduling.py +++ b/activitysim/abm/models/joint_tour_scheduling.py @@ -7,6 +7,7 @@ import pandas as pd from activitysim.abm.models.util.vectorize_tour_scheduling import ( + TourSchedulingSettings, vectorize_joint_tour_scheduling, ) from activitysim.core import ( @@ -24,16 +25,17 @@ logger = logging.getLogger(__name__) -class JointTourSchedulingSettings(LogitComponentSettings, extra="forbid"): - """ - Settings for the `joint_tour_scheduling` component. - """ - - preprocessor: PreprocessorSettings | None = None - """Setting for the preprocessor.""" - - sharrow_skip: bool = False - """Setting to skip sharrow""" +# class JointTourSchedulingSettings(LogitComponentSettings, extra="forbid"): +# """ +# Settings for the `joint_tour_scheduling` component. +# """ +# +# preprocessor: PreprocessorSettings | None = None +# """Setting for the preprocessor.""" +# +# sharrow_skip: bool = False +# """Setting to skip sharrow""" +# @workflow.step @@ -42,7 +44,7 @@ def joint_tour_scheduling( tours: pd.DataFrame, persons_merged: pd.DataFrame, tdd_alts: pd.DataFrame, - model_settings: JointTourSchedulingSettings | None = None, + model_settings: TourSchedulingSettings | None = None, model_settings_file_name: str = "joint_tour_scheduling.yaml", trace_label: str = "joint_tour_scheduling", ) -> None: @@ -51,7 +53,7 @@ def joint_tour_scheduling( """ if model_settings is None: - model_settings = JointTourSchedulingSettings.read_settings_file( + model_settings = TourSchedulingSettings.read_settings_file( state.filesystem, model_settings_file_name, ) diff --git a/activitysim/abm/models/stop_frequency.py b/activitysim/abm/models/stop_frequency.py index 8d9789ac5..7e27ae08a 100644 --- a/activitysim/abm/models/stop_frequency.py +++ b/activitysim/abm/models/stop_frequency.py @@ -3,7 +3,8 @@ from __future__ import annotations import logging -from typing import Any +from pathlib import Path +from typing import Any, Literal import pandas as pd @@ -24,20 +25,44 @@ logger = logging.getLogger(__name__) +class StopFrequencySpecSegmentSettings(LogitComponentSettings, extra="allow"): + # this class specifically allows "extra" settings because ActivitySim + # is set up to have the name of the segment column be identified with + # an arbitrary key. + SPEC: Path + COEFFICIENTS: Path + + class StopFrequencySettings(LogitComponentSettings, extra="forbid"): """ - Settings for the `free_parking` component. + Settings for the stop frequency component. + """ + + LOGIT_TYPE: Literal["MNL"] = "MNL" + """Logit model mathematical form. + + * "MNL" + Multinomial logit model. """ preprocessor: PreprocessorSettings | None = None """Setting for the preprocessor.""" - SPEC_SEGMENTS: dict[str, Any] = {} - # TODO Check this again + SPEC_SEGMENTS: list[StopFrequencySpecSegmentSettings] = {} + + SPEC: Path | None = None + """Utility specification filename. + + This is sometimes alternatively called the utility expressions calculator + (UEC). It is a CSV file giving all the functions for the terms of a + linear-in-parameters utility expression. If SPEC_SEGMENTS is given, then + this unsegmented SPEC should be omitted. + """ SEGMENT_COL: str = "primary_purpose" - # CONSTANTS TODO Check this again + CONSTANTS: dict[str, Any] = {} + """Named constants usable in the utility expressions.""" @workflow.step @@ -136,8 +161,7 @@ def stop_frequency( choices_list = [] for segment_settings in spec_segments: - segment_name = segment_settings[segment_col] - segment_value = segment_settings[segment_col] + segment_name = segment_value = getattr(segment_settings, segment_col) chooser_segment = tours_merged[tours_merged[segment_col] == segment_value] @@ -153,16 +177,14 @@ def stop_frequency( state, model_name=segment_name, bundle_name="stop_frequency" ) - segment_spec = state.filesystem.read_model_spec( - file_name=segment_settings["SPEC"] - ) + segment_spec = state.filesystem.read_model_spec(file_name=segment_settings.SPEC) assert segment_spec is not None, ( "spec for segment_type %s not found" % segment_name ) - coefficients_file_name = segment_settings["COEFFICIENTS"] + coefficients_file_name = segment_settings.COEFFICIENTS coefficients_df = state.filesystem.read_model_coefficients( - file_name=coefficients_file_name + file_name=str(coefficients_file_name) ) segment_spec = simulate.eval_coefficients( state, segment_spec, coefficients_df, estimator diff --git a/activitysim/abm/models/trip_scheduling.py b/activitysim/abm/models/trip_scheduling.py index e9ddf96d0..c92cdb66c 100644 --- a/activitysim/abm/models/trip_scheduling.py +++ b/activitysim/abm/models/trip_scheduling.py @@ -5,6 +5,7 @@ import logging import warnings from builtins import range +from typing import List, Literal import numpy as np import pandas as pd @@ -15,7 +16,7 @@ ) from activitysim.abm.models.util.trip import cleanup_failed_trips, failed_trip_cohorts from activitysim.core import chunk, config, estimation, expressions, tracing, workflow -from activitysim.core.configuration.base import PydanticReadable +from activitysim.core.configuration.base import PreprocessorSettings, PydanticReadable from activitysim.core.util import reindex logger = logging.getLogger(__name__) @@ -41,18 +42,18 @@ DEPARTURE_MODE = "departure" DURATION_MODE = "stop_duration" RELATIVE_MODE = "relative" -PROBS_JOIN_COLUMNS_DEPARTURE_BASED = [ +PROBS_JOIN_COLUMNS_DEPARTURE_BASED: list[str] = [ "primary_purpose", "outbound", "tour_hour", "trip_num", ] -PROBS_JOIN_COLUMNS_DURATION_BASED = ["outbound", "stop_num"] -PROBS_JOIN_COLUMNS_RELATIVE_BASED = ["outbound", "periods_left"] +PROBS_JOIN_COLUMNS_DURATION_BASED: list[str] = ["outbound", "stop_num"] +PROBS_JOIN_COLUMNS_RELATIVE_BASED: list[str] = ["outbound", "periods_left"] -def _logic_version(model_settings): - logic_version = model_settings.get("logic_version", None) +def _logic_version(model_settings: TripSchedulingSettings): + logic_version = model_settings.logic_version if logic_version is None: warnings.warn( "The trip_scheduling component now has a logic_version setting " @@ -196,7 +197,7 @@ def schedule_trips_in_leg( outbound, trips, probs_spec, - model_settings, + model_settings: TripSchedulingSettings, is_last_iteration, trace_label, *, @@ -220,29 +221,25 @@ def schedule_trips_in_leg( depart choice for trips, indexed by trip_id """ - failfix = model_settings.get(FAILFIX, FAILFIX_DEFAULT) - depart_alt_base = model_settings.get("DEPART_ALT_BASE", 0) - scheduling_mode = model_settings.get("scheduling_mode", "departure") - preprocessor_settings = model_settings.get("preprocessor", None) - - if scheduling_mode == "departure": - probs_join_cols = model_settings.get( - "probs_join_cols", PROBS_JOIN_COLUMNS_DEPARTURE_BASED - ) - elif scheduling_mode == "stop_duration": - probs_join_cols = model_settings.get( - "probs_join_cols", PROBS_JOIN_COLUMNS_DURATION_BASED - ) - elif scheduling_mode == "relative": - probs_join_cols = model_settings.get( - "probs_join_cols", PROBS_JOIN_COLUMNS_RELATIVE_BASED - ) - else: - logger.error( - "Invalid scheduling mode specified: {0}.".format(scheduling_mode), - "Please select one of ['departure', 'stop_duration', 'relative'] and try again.", - ) - raise ValueError(f"Invalid scheduling mode specified: {scheduling_mode}") + failfix = model_settings.FAILFIX + depart_alt_base = model_settings.DEPART_ALT_BASE + scheduling_mode = model_settings.scheduling_mode + preprocessor_settings = model_settings.preprocessor + + probs_join_cols = model_settings.probs_join_cols + if probs_join_cols is None: + if scheduling_mode == "departure": + probs_join_cols = PROBS_JOIN_COLUMNS_DEPARTURE_BASED + elif scheduling_mode == "stop_duration": + probs_join_cols = PROBS_JOIN_COLUMNS_DURATION_BASED + elif scheduling_mode == "relative": + probs_join_cols = PROBS_JOIN_COLUMNS_RELATIVE_BASED + else: + logger.error( + "Invalid scheduling mode specified: {0}.".format(scheduling_mode), + "Please select one of ['departure', 'stop_duration', 'relative'] and try again.", + ) + raise ValueError(f"Invalid scheduling mode specified: {scheduling_mode}") # logger.debug("%s scheduling %s trips" % (trace_label, trips.shape[0])) @@ -451,6 +448,14 @@ class TripSchedulingSettings(PydanticReadable): """Integer to add to probs column index to get time period it represents. e.g. depart_alt_base = 5 means first column (column 0) represents 5 am""" + scheduling_mode: Literal["departure", "stop_duration", "relative"] = "departure" + + probs_join_cols: list[str] | None = None + + preprocessor: PreprocessorSettings | None = None + + logic_version: int | None = None + @workflow.step(copy_tables=False) def trip_scheduling( @@ -560,7 +565,7 @@ def trip_scheduling( pd.Series(list(range(len(tours))), tours.index), trips_df.tour_id ) - assert "DEPART_ALT_BASE" in model_settings + assert model_settings.DEPART_ALT_BASE failfix = model_settings.FAILFIX max_iterations = model_settings.MAX_ITERATIONS @@ -609,9 +614,7 @@ def trip_scheduling( failed = choices.reindex(trips_chunk.index).isnull() logger.info("%s %s failed", trace_label_i, failed.sum()) - if (failed.sum() > 0) & ( - model_settings.get("scheduling_mode") == "relative" - ): + if (failed.sum() > 0) & (model_settings.scheduling_mode == "relative"): raise RuntimeError("failed trips with relative scheduling mode") if not is_last_iteration: diff --git a/activitysim/abm/models/util/tour_scheduling.py b/activitysim/abm/models/util/tour_scheduling.py index bf69c6234..30a172a85 100644 --- a/activitysim/abm/models/util/tour_scheduling.py +++ b/activitysim/abm/models/util/tour_scheduling.py @@ -9,6 +9,8 @@ from activitysim.abm.models.util import vectorize_tour_scheduling as vts from activitysim.core import config, estimation, expressions, simulate, workflow +from .vectorize_tour_scheduling import TourModeComponentSettings, TourSchedulingSettings + logger = logging.getLogger(__name__) @@ -23,18 +25,26 @@ def run_tour_scheduling( trace_label = model_name model_settings_file_name = f"{model_name}.yaml" - model_settings = state.filesystem.read_model_settings(model_settings_file_name) + model_settings: TourSchedulingSettings = state.filesystem.read_settings_file( + model_settings_file_name, + mandatory=False, + validator_class=TourSchedulingSettings, + ) - if "LOGSUM_SETTINGS" in model_settings: - logsum_settings = state.filesystem.read_model_settings( - model_settings["LOGSUM_SETTINGS"] + if model_settings.LOGSUM_SETTINGS: + logsum_settings: TourModeComponentSettings = ( + state.filesystem.read_settings_file( + str(model_settings.LOGSUM_SETTINGS), + mandatory=False, + validator_class=TourModeComponentSettings, + ) ) - logsum_columns = logsum_settings.get("LOGSUM_CHOOSER_COLUMNS", []) + logsum_columns = logsum_settings.LOGSUM_CHOOSER_COLUMNS else: logsum_columns = [] # - filter chooser columns for both logsums and simulate - model_columns = model_settings.get("SIMULATE_CHOOSER_COLUMNS", []) + model_columns = model_settings.SIMULATE_CHOOSER_COLUMNS chooser_columns = logsum_columns + [ c for c in model_columns if c not in logsum_columns ] @@ -44,7 +54,7 @@ def run_tour_scheduling( timetable = state.get_injectable("timetable") # - run preprocessor to annotate choosers - preprocessor_settings = model_settings.get("preprocessor", None) + preprocessor_settings = model_settings.preprocessor if preprocessor_settings: locals_d = {"tt": timetable.attach_state(state)} locals_d.update(config.get_model_constants(model_settings)) @@ -58,9 +68,9 @@ def run_tour_scheduling( ) estimators = {} - if "TOUR_SPEC_SEGMENTS" in model_settings: + if model_settings.TOUR_SPEC_SEGMENTS: # load segmented specs - spec_segment_settings = model_settings.get("SPEC_SEGMENTS", {}) + spec_segment_settings = model_settings.SPEC_SEGMENTS specs = {} sharrow_skips = {} for spec_segment_name, spec_settings in spec_segment_settings.items(): @@ -71,13 +81,13 @@ def run_tour_scheduling( state, model_name=bundle_name, bundle_name=bundle_name ) - spec_file_name = spec_settings["SPEC"] + spec_file_name = spec_settings.SPEC model_spec = state.filesystem.read_model_spec(file_name=spec_file_name) coefficients_df = state.filesystem.read_model_coefficients(spec_settings) specs[spec_segment_name] = simulate.eval_coefficients( state, model_spec, coefficients_df, estimator ) - sharrow_skips[spec_segment_name] = spec_settings.get("sharrow_skip", False) + sharrow_skips[spec_segment_name] = spec_settings.sharrow_skip if estimator: estimators[spec_segment_name] = estimator # add to local list @@ -86,7 +96,7 @@ def run_tour_scheduling( estimator.write_coefficients(coefficients_df, spec_settings) # - spec dict segmented by primary_purpose - tour_segment_settings = model_settings.get("TOUR_SPEC_SEGMENTS", {}) + tour_segment_settings = model_settings.TOUR_SPEC_SEGMENTS tour_segments = {} for tour_segment_name, spec_segment_name in tour_segment_settings.items(): tour_segments[tour_segment_name] = {} @@ -105,15 +115,17 @@ def run_tour_scheduling( else: # unsegmented spec - assert "SPEC_SEGMENTS" not in model_settings - assert "TOUR_SPEC_SEGMENTS" not in model_settings + assert ( + not model_settings.SPEC_SEGMENTS + ), f"model_settings.SPEC_SEGMENTS should be omitted not {model_settings.SPEC_SEGMENTS!r}" + assert not model_settings.TOUR_SPEC_SEGMENTS assert tour_segment_col is None estimator = estimation.manager.begin_estimation(state, model_name) - spec_file_name = model_settings["SPEC"] + spec_file_name = model_settings.SPEC model_spec = state.filesystem.read_model_spec(file_name=spec_file_name) - sharrow_skip = model_settings.get("sharrow_skip", False) + sharrow_skip = model_settings.sharrow_skip coefficients_df = state.filesystem.read_model_coefficients(model_settings) model_spec = simulate.eval_coefficients( state, model_spec, coefficients_df, estimator diff --git a/activitysim/abm/models/util/vectorize_tour_scheduling.py b/activitysim/abm/models/util/vectorize_tour_scheduling.py index b1701c309..88fd2dfd1 100644 --- a/activitysim/abm/models/util/vectorize_tour_scheduling.py +++ b/activitysim/abm/models/util/vectorize_tour_scheduling.py @@ -3,13 +3,18 @@ from __future__ import annotations import logging +from pathlib import Path +from typing import Any import numpy as np import pandas as pd +from activitysim.abm.models.tour_mode_choice import TourModeComponentSettings from activitysim.core import chunk, config, expressions, los, simulate from activitysim.core import timetable as tt from activitysim.core import tracing, workflow +from activitysim.core.configuration.base import PreprocessorSettings, PydanticReadable +from activitysim.core.configuration.logit import LogitComponentSettings from activitysim.core.interaction_sample_simulate import interaction_sample_simulate from activitysim.core.util import reindex @@ -21,16 +26,52 @@ RUN_ALTS_PREPROCESSOR_BEFORE_MERGE = True # see FIXME below before changing this -def skims_for_logsums(state: workflow.State, tour_purpose, model_settings, trace_label): - assert "LOGSUM_SETTINGS" in model_settings +# class TourSchedulingSpecSegmentsSettings(PydanticReadable, extra="forbid"): +# COEFFICIENTS: Path +# SPEC: Path + +class TourSchedulingSettings(LogitComponentSettings, extra="forbid"): + LOGSUM_SETTINGS: Path | None = None + DESTINATION_FOR_TOUR_PURPOSE: str | dict[str, str] | None = None + LOGSUM_PREPROCESSOR: str = "preprocessor" + ALTS_PREPROCESSOR: PreprocessorSettings | dict[str, PreprocessorSettings] = {} + """ + If the alternatives preprocessor is a single PreprocessorSettings object, + it is assumed to be an unsegmented preprocessor. Otherwise, the dict keys + give the segements. + """ + SIMULATE_CHOOSER_COLUMNS: list[str] | None = None + preprocessor: PreprocessorSettings | None = None + """Setting for the preprocessor.""" + + SPEC_SEGMENTS: dict[str, LogitComponentSettings] = {} + + TOUR_SPEC_SEGMENTS: dict[str, str] = {} + + SPEC: Path | None = None + """Utility specification filename. + + This is sometimes alternatively called the utility expressions calculator + (UEC). It is a CSV file giving all the functions for the terms of a + linear-in-parameters utility expression. If SPEC_SEGMENTS is given, then + this unsegmented SPEC should be omitted. + """ + + +def skims_for_logsums( + state: workflow.State, + tour_purpose, + model_settings: TourSchedulingSettings, + trace_label: str, +): network_los = state.get_injectable("network_los") skim_dict = network_los.get_default_skim_dict() orig_col_name = "home_zone_id" - destination_for_tour_purpose = model_settings.get("DESTINATION_FOR_TOUR_PURPOSE") + destination_for_tour_purpose = model_settings.DESTINATION_FOR_TOUR_PURPOSE if isinstance(destination_for_tour_purpose, str): dest_col_name = destination_for_tour_purpose elif isinstance(destination_for_tour_purpose, dict): @@ -97,7 +138,7 @@ def _compute_logsums( alt_tdd, tours_merged, tour_purpose, - model_settings, + model_settings: TourSchedulingSettings, network_los, skims, trace_label, @@ -109,8 +150,10 @@ def _compute_logsums( trace_label = tracing.extend_trace_label(trace_label, "logsums") with chunk.chunk_log(state, trace_label): - logsum_settings = state.filesystem.read_model_settings( - model_settings["LOGSUM_SETTINGS"] + logsum_settings = state.filesystem.read_settings_file( + str(model_settings.LOGSUM_SETTINGS), + mandatory=False, + validator_class=TourModeComponentSettings, ) choosers = alt_tdd.join(tours_merged, how="left", rsuffix="_chooser") logger.info( @@ -138,10 +181,7 @@ def _compute_logsums( # - run preprocessor to annotate choosers # allow specification of alternate preprocessor for nontour choosers - try: - preprocessor = model_settings.LOGSUM_PREPROCESSOR - except AttributeError: - preprocessor = model_settings.get("LOGSUM_PREPROCESSOR", "preprocessor") + preprocessor = model_settings.LOGSUM_PREPROCESSOR preprocessor_settings = ( getattr(logsum_settings, preprocessor, None) or logsum_settings[preprocessor] @@ -159,9 +199,7 @@ def _compute_logsums( ) # - compute logsums - logsum_spec = state.filesystem.read_model_spec( - file_name=logsum_settings["SPEC"] - ) + logsum_spec = state.filesystem.read_model_spec(file_name=logsum_settings.SPEC) logsum_spec = simulate.eval_coefficients( state, logsum_spec, coefficients, estimator=None ) @@ -309,7 +347,7 @@ def compute_tour_scheduling_logsums( alt_tdd, tours_merged, tour_purpose, - model_settings, + model_settings: TourSchedulingSettings, skims, trace_label, *, @@ -558,7 +596,12 @@ def tdd_interaction_dataset( def run_alts_preprocessor( - state: workflow.State, model_settings, alts, segment, locals_dict, trace_label + state: workflow.State, + model_settings: TourSchedulingSettings, + alts, + segment, + locals_dict, + trace_label, ): """ run preprocessor on alts, as specified by ALTS_PREPROCESSOR in model_settings @@ -583,18 +626,18 @@ def run_alts_preprocessor( annotated copy of alts """ - preprocessor_settings = model_settings.get("ALTS_PREPROCESSOR", {}) + preprocessor_settings = model_settings.ALTS_PREPROCESSOR - if segment in preprocessor_settings: + if isinstance(preprocessor_settings, dict) and segment in preprocessor_settings: # segmented by logsum_tour_purpose preprocessor_settings = preprocessor_settings.get(segment) logger.debug( - f"running ALTS_PREPROCESSOR with spec for {segment}: {preprocessor_settings.get('SPEC')}" + f"running ALTS_PREPROCESSOR with spec for {segment}: {preprocessor_settings.SPEC}" ) - elif "SPEC" in preprocessor_settings: + elif isinstance(preprocessor_settings, PreprocessorSettings): # unsegmented (either because no segmentation, or fallback if settings has generic preprocessor) logger.debug( - f"running ALTS_PREPROCESSOR with unsegmented spec {preprocessor_settings.get('SPEC')}" + f"running ALTS_PREPROCESSOR with unsegmented spec {preprocessor_settings.SPEC}" ) else: logger.debug( @@ -626,7 +669,7 @@ def _schedule_tours( alts, spec, logsum_tour_purpose, - model_settings, + model_settings: TourSchedulingSettings, skims, timetable, window_id_col, @@ -661,7 +704,7 @@ def _schedule_tours( unavailable alternatives spec : DataFrame The spec which will be passed to interaction_simulate. - model_settings : dict + model_settings : TourSchedulingSettings timetable : TimeTable timetable of timewidows for person (or subtour) with rows for tours[window_id_col] window_id_col : str @@ -811,7 +854,7 @@ def schedule_tours( alts, spec, logsum_tour_purpose, - model_settings, + model_settings: TourSchedulingSettings, timetable, timetable_window_id_col, previous_tour, @@ -846,7 +889,7 @@ def schedule_tours( else: assert not tours[timetable_window_id_col].duplicated().any() - if "LOGSUM_SETTINGS" in model_settings: + if model_settings.LOGSUM_SETTINGS: # we need skims to calculate tvpb skim overhead in 3_ZONE systems for use by calc_rows_per_chunk skims = skims_for_logsums( state, logsum_tour_purpose, model_settings, tour_trace_label @@ -856,7 +899,7 @@ def schedule_tours( result_list = [] for ( - i, + _i, chooser_chunk, chunk_trace_label, chunk_sizer, @@ -905,7 +948,7 @@ def vectorize_tour_scheduling( timetable, tour_segments, tour_segment_col, - model_settings, + model_settings: TourSchedulingSettings, chunk_size=0, trace_label=None, ): @@ -938,7 +981,7 @@ def vectorize_tour_scheduling( spec : DataFrame The spec which will be passed to interaction_simulate. (or dict of specs keyed on tour_type if tour_types is not None) - model_settings : dict + model_settings : LOGSUM_SETTINGS Returns ------- @@ -966,7 +1009,7 @@ def vectorize_tour_scheduling( timetable_window_id_col = "person_id" tour_owner_id_col = "person_id" - should_compute_logsums = "LOGSUM_SETTINGS" in model_settings + should_compute_logsums = model_settings.LOGSUM_SETTINGS is not None assert isinstance(tour_segments, dict) @@ -1077,7 +1120,7 @@ def vectorize_subtour_scheduling( persons_merged, alts, spec, - model_settings, + model_settings: TourSchedulingSettings, estimator, chunk_size=0, trace_label=None, @@ -1107,7 +1150,7 @@ def vectorize_subtour_scheduling( spec : DataFrame The spec which will be passed to interaction_simulate. (all subtours share same spec regardless of subtour type) - model_settings : dict + model_settings : TourSchedulingSettings chunk_size trace_label @@ -1207,7 +1250,7 @@ def build_joint_tour_timetables( joint_tour_windows_df = tt.create_timetable_windows(joint_tours, alts) joint_tour_timetable = tt.TimeTable(joint_tour_windows_df, alts) - for participant_num, nth_participants in joint_tour_participants.groupby( + for _participant_num, nth_participants in joint_tour_participants.groupby( "participant_num", sort=True ): # nth_participant windows from persons_timetable @@ -1231,7 +1274,7 @@ def vectorize_joint_tour_scheduling( alts, persons_timetable, spec, - model_settings, + model_settings: TourSchedulingSettings, estimator, chunk_size=0, trace_label=None, @@ -1257,7 +1300,7 @@ def vectorize_joint_tour_scheduling( spec : DataFrame The spec which will be passed to interaction_simulate. (or dict of specs keyed on tour_type if tour_types is not None) - model_settings : dict + model_settings : TourSchedulingSettings Returns ------- diff --git a/activitysim/core/configuration/logit.py b/activitysim/core/configuration/logit.py index 96caf5150..c01187eeb 100644 --- a/activitysim/core/configuration/logit.py +++ b/activitysim/core/configuration/logit.py @@ -75,6 +75,9 @@ class BaseLogitComponentSettings(PydanticReadable): CONSTANTS: dict[str, Any] = {} """Named constants usable in the utility expressions.""" + sharrow_skip: bool = False + """Skip sharrow when evaluating this component.""" + class LogitComponentSettings(BaseLogitComponentSettings): """ diff --git a/activitysim/core/simulate.py b/activitysim/core/simulate.py index fc3f02c5e..f59e10839 100644 --- a/activitysim/core/simulate.py +++ b/activitysim/core/simulate.py @@ -27,6 +27,7 @@ ) from activitysim.core.configuration.base import PydanticBase from activitysim.core.configuration.logit import ( + BaseLogitComponentSettings, LogitComponentSettings, LogitNestSpec, TemplatedLogitComponentSettings, @@ -147,7 +148,7 @@ def read_model_spec(filesystem: configuration.FileSystem, file_name: Path | str) def read_model_coefficients( filesystem: configuration.FileSystem, - model_settings: LogitComponentSettings | dict[str, Any] | None = None, + model_settings: BaseLogitComponentSettings | dict[str, Any] | None = None, file_name: Path | str | None = None, ) -> pd.DataFrame: """ @@ -159,7 +160,7 @@ def read_model_coefficients( assert file_name is not None else: assert file_name is None - if isinstance(model_settings, LogitComponentSettings) or ( + if isinstance(model_settings, BaseLogitComponentSettings) or ( isinstance(model_settings, PydanticBase) and hasattr(model_settings, "COEFFICIENTS") ):