diff --git a/update_gtfs_data_model/404.html b/update_gtfs_data_model/404.html deleted file mode 100644 index a9b404fd..00000000 --- a/update_gtfs_data_model/404.html +++ /dev/null @@ -1,553 +0,0 @@ - - - - - - - - - - - - - - - - - - - Network Wrangler - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- -
-
- -
- - - - - - -
- - -
- -
- - - - - - - - - -
-
- - - -
-
-
- - - - - - - -
-
-
- - - -
-
-
- - - -
-
-
- - - -
-
- -

404 - Not found

- -
-
- - - - - -
- -
- - - -
-
-
-
- - - - - - - - - - - - \ No newline at end of file diff --git a/update_gtfs_data_model/api/api.md b/update_gtfs_data_model/api/api.md deleted file mode 100644 index bdf4cc0a..00000000 --- a/update_gtfs_data_model/api/api.md +++ /dev/null @@ -1,77 +0,0 @@ -# API Documentation - -## Common Usage - -## Base Objects - -::: network_wrangler.scenario -::: network_wrangler.roadway.network -::: network_wrangler.transit.network - -## Parameters - -::: network_wrangler.params - -## Projects - -Projects are how you manipulate the networks. Each project type is defined in a module in the `projects` folder and accepts a RoadwayNetwork and or TransitNetwork as an input and returns the same objects (manipulated) as an output. - -## Roadway - -The roadway module contains submodules which define and extend the links, nodes, and shapes dataframe objects which within a RoadwayNetwork object as well as other classes and methods which support and extend the RoadwayNetwork class. - -### Network Objects - -Submodules which define and extend the links, nodes, and shapes dataframe objects which within a RoadwayNetwork object. Includes classes which define: - -- dataframe schemas to be used for dataframe validation using `pandera` -- methods which extend the dataframes - -#### Links - -:: network_wrangler.roadway.links.io -:: network_wrangler.roadway.links.create -:: network_wrangler.roadway.links.delete -:: network_wrangler.roadway.links.edit -:: network_wrangler.roadway.links.filters -:: network_wrangler.roadway.links.geo -:: network_wrangler.roadway.links.scopes -:: network_wrangler.roadway.links.summary -:: network_wrangler.roadway.links.validate -:: network_wrangler.roadway.links.df_accessors - -#### Nodes - -:: network_wrangler.roadway.nodes.io -:: network_wrangler.roadway.nodes.create -:: network_wrangler.roadway.nodes.delete -:: network_wrangler.roadway.nodes.edit -:: network_wrangler.roadway.nodes.filters -:: network_wrangler.roadway.nodes - -#### Shapes - -:: network_wrangler.roadway.shapes.io -:: network_wrangler.roadway.shapes.create -:: network_wrangler.roadway.shapes.edit -:: network_wrangler.roadway.shapes.delete -:: network_wrangler.roadway.shapes.filters -:: network_wrangler.roadway.shapes.shapes - -### Supporting Classes, Methods + Parameters - -:: network_wrangler.roadway.segment -:: network_wrangler.roadway.subnet -:: network_wrangler.roadway.graph - -## Utils and Functions - -::: network_wrangler.utils.utils -::: network_wrangler.utils.io -::: network_wrangler.utils.models -::: network_wrangler.utils.net -::: network_wrangler.utils.time -::: network_wrangler.utils.data -::: network_wrangler.utils.geo -::: network_wrangler.utils.df_accessors -::: network_wrangler.logger diff --git a/update_gtfs_data_model/api/index.html b/update_gtfs_data_model/api/index.html deleted file mode 100644 index 7dc232b3..00000000 --- a/update_gtfs_data_model/api/index.html +++ /dev/null @@ -1,22205 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - API Documentation - Network Wrangler - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - - - Skip to content - - -
-
- -
- - - - - - -
- - -
- -
- - - - - - - - - -
-
- - - -
-
-
- - - - - - - -
-
-
- - - -
-
-
- - - -
-
-
- - - -
-
- - - - - - - -

API Documentation

-

Common Usage

-

Base Objects

- - -
- - - - -
- -

Scenario class and related functions for managing a scenario.

-

Usage:

-
my_base_year_scenario = {
-    "road_net": load_roadway(
-        links_file=STPAUL_LINK_FILE,
-        nodes_file=STPAUL_NODE_FILE,
-        shapes_file=STPAUL_SHAPE_FILE,
-    ),
-    "transit_net": load_transit(STPAUL_DIR),
-}
-
-# create a future baseline scenario from base by searching for all cards in dir w/ baseline tag
-project_card_directory = os.path.join(STPAUL_DIR, "project_cards")
-my_scenario = create_scenario(
-    base_scenario=my_base_year_scenario,
-    card_search_dir=project_card_directory,
-    filter_tags = [ "baseline2050" ]
-)
-
-# check project card queue and then apply the projects
-my_scenario.queued_projects
-my_scenario.apply_all_projects()
-
-# check applied projects, write it out, and create a summary report.
-my_scenario.applied_projects
-my_scenario.write("baseline")
-my_scenario.summarize(outfile = "baseline2050summary.txt")
-
-# Add some projects to create a build scenario based on a list of files.
-build_card_filenames = [
-    "3_multiple_roadway_attribute_change.yml",
-    "road.prop_changes.segment.yml",
-    "4_simple_managed_lane.yml",
-]
-my_scenario.add_projects_from_files(build_card_filenames)
-my_scenario.write("build2050")
-my_scenario.summarize(outfile = "build2050summary.txt")
-
- - - -
- - - - - - - - -
- - - -

- ProjectCardError - - -

- - -
-

- Bases: Exception

- - -

Raised when a project card is not valid.

- -
- Source code in network_wrangler/scenario.py -
92
-93
-94
-95
class ProjectCardError(Exception):
-    """Raised when a project card is not valid."""
-
-    pass
-
-
- -
- -
- -
- - - -

- Scenario - - -

- - -
-

- Bases: object

- - -

Holds information about a scenario.

-

Typical usage example:

-
my_base_year_scenario = {
-    "road_net": load_roadway(
-        links_file=STPAUL_LINK_FILE,
-        nodes_file=STPAUL_NODE_FILE,
-        shapes_file=STPAUL_SHAPE_FILE,
-    ),
-    "transit_net": load_transit(STPAUL_DIR),
-}
-
-# create a future baseline scenario from base by searching for all cards in dir w/ baseline tag
-project_card_directory = os.path.join(STPAUL_DIR, "project_cards")
-my_scenario = create_scenario(
-    base_scenario=my_base_year_scenario,
-    card_search_dir=project_card_directory,
-    filter_tags = [ "baseline2050" ]
-)
-
-# check project card queue and then apply the projects
-my_scenario.queued_projects
-my_scenario.apply_all_projects()
-
-# check applied projects, write it out, and create a summary report.
-my_scenario.applied_projects
-my_scenario.write("baseline")
-my_scenario.summarize(outfile = "baseline2050summary.txt")
-
-# Add some projects to create a build scenario based on a list of files.
-build_card_filenames = [
-    "3_multiple_roadway_attribute_change.yml",
-    "road.prop_changes.segment.yml",
-    "4_simple_managed_lane.yml",
-]
-my_scenario.add_projects_from_files(build_card_filenames)
-my_scenario.write("build2050")
-my_scenario.summarize(outfile = "build2050summary.txt")
-
- - -

Attributes:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescription
base_scenario - -
-

dictionary representation of a scenario

-
-
road_net - Optional[RoadwayNetwork] - -
-

instance of RoadwayNetwork for the scenario

-
-
transit_net - Optional[TransitNetwork] - -
-

instance of TransitNetwork for the scenario

-
-
project_cards - dict[str, ProjectCard] - -
-

Mapping[ProjectCard.name,ProjectCard] Storage of all project cards by name.

-
-
queued_projects - -
-

Projects which are “shovel ready” - have had pre-requisits checked and -done any required re-ordering. Similar to a git staging, project cards aren’t -recognized in this collecton once they are moved to applied.

-
-
applied_projects - -
-

list of project names that have been applied

-
-
projects - -
-

list of all projects either planned, queued, or applied

-
-
prerequisites - -
-

dictionary storing prerequiste information

-
-
corequisites - -
-

dictionary storing corequisite information

-
-
conflicts - -
-

dictionary storing conflict information

-
-
- -
- Source code in network_wrangler/scenario.py -
 98
- 99
-100
-101
-102
-103
-104
-105
-106
-107
-108
-109
-110
-111
-112
-113
-114
-115
-116
-117
-118
-119
-120
-121
-122
-123
-124
-125
-126
-127
-128
-129
-130
-131
-132
-133
-134
-135
-136
-137
-138
-139
-140
-141
-142
-143
-144
-145
-146
-147
-148
-149
-150
-151
-152
-153
-154
-155
-156
-157
-158
-159
-160
-161
-162
-163
-164
-165
-166
-167
-168
-169
-170
-171
-172
-173
-174
-175
-176
-177
-178
-179
-180
-181
-182
-183
-184
-185
-186
-187
-188
-189
-190
-191
-192
-193
-194
-195
-196
-197
-198
-199
-200
-201
-202
-203
-204
-205
-206
-207
-208
-209
-210
-211
-212
-213
-214
-215
-216
-217
-218
-219
-220
-221
-222
-223
-224
-225
-226
-227
-228
-229
-230
-231
-232
-233
-234
-235
-236
-237
-238
-239
-240
-241
-242
-243
-244
-245
-246
-247
-248
-249
-250
-251
-252
-253
-254
-255
-256
-257
-258
-259
-260
-261
-262
-263
-264
-265
-266
-267
-268
-269
-270
-271
-272
-273
-274
-275
-276
-277
-278
-279
-280
-281
-282
-283
-284
-285
-286
-287
-288
-289
-290
-291
-292
-293
-294
-295
-296
-297
-298
-299
-300
-301
-302
-303
-304
-305
-306
-307
-308
-309
-310
-311
-312
-313
-314
-315
-316
-317
-318
-319
-320
-321
-322
-323
-324
-325
-326
-327
-328
-329
-330
-331
-332
-333
-334
-335
-336
-337
-338
-339
-340
-341
-342
-343
-344
-345
-346
-347
-348
-349
-350
-351
-352
-353
-354
-355
-356
-357
-358
-359
-360
-361
-362
-363
-364
-365
-366
-367
-368
-369
-370
-371
-372
-373
-374
-375
-376
-377
-378
-379
-380
-381
-382
-383
-384
-385
-386
-387
-388
-389
-390
-391
-392
-393
-394
-395
-396
-397
-398
-399
-400
-401
-402
-403
-404
-405
-406
-407
-408
-409
-410
-411
-412
-413
-414
-415
-416
-417
-418
-419
-420
-421
-422
-423
-424
-425
-426
-427
-428
-429
-430
-431
-432
-433
-434
-435
-436
-437
-438
-439
-440
-441
-442
-443
-444
-445
-446
-447
-448
-449
-450
-451
-452
-453
-454
-455
-456
-457
-458
-459
-460
-461
-462
-463
-464
-465
-466
-467
-468
-469
-470
-471
-472
-473
-474
-475
-476
-477
-478
-479
-480
-481
-482
-483
-484
-485
-486
-487
-488
-489
-490
-491
-492
-493
-494
-495
-496
-497
-498
-499
-500
-501
-502
-503
-504
-505
-506
-507
-508
-509
-510
-511
-512
-513
-514
-515
-516
-517
-518
-519
-520
-521
-522
-523
-524
-525
-526
-527
-528
-529
-530
-531
-532
-533
-534
-535
-536
-537
-538
-539
-540
-541
-542
-543
-544
-545
-546
-547
-548
-549
-550
-551
-552
-553
-554
-555
-556
-557
-558
-559
class Scenario(object):
-    """Holds information about a scenario.
-
-    Typical usage example:
-
-    ```python
-    my_base_year_scenario = {
-        "road_net": load_roadway(
-            links_file=STPAUL_LINK_FILE,
-            nodes_file=STPAUL_NODE_FILE,
-            shapes_file=STPAUL_SHAPE_FILE,
-        ),
-        "transit_net": load_transit(STPAUL_DIR),
-    }
-
-    # create a future baseline scenario from base by searching for all cards in dir w/ baseline tag
-    project_card_directory = os.path.join(STPAUL_DIR, "project_cards")
-    my_scenario = create_scenario(
-        base_scenario=my_base_year_scenario,
-        card_search_dir=project_card_directory,
-        filter_tags = [ "baseline2050" ]
-    )
-
-    # check project card queue and then apply the projects
-    my_scenario.queued_projects
-    my_scenario.apply_all_projects()
-
-    # check applied projects, write it out, and create a summary report.
-    my_scenario.applied_projects
-    my_scenario.write("baseline")
-    my_scenario.summarize(outfile = "baseline2050summary.txt")
-
-    # Add some projects to create a build scenario based on a list of files.
-    build_card_filenames = [
-        "3_multiple_roadway_attribute_change.yml",
-        "road.prop_changes.segment.yml",
-        "4_simple_managed_lane.yml",
-    ]
-    my_scenario.add_projects_from_files(build_card_filenames)
-    my_scenario.write("build2050")
-    my_scenario.summarize(outfile = "build2050summary.txt")
-    ```
-
-    Attributes:
-        base_scenario: dictionary representation of a scenario
-        road_net: instance of RoadwayNetwork for the scenario
-        transit_net: instance of TransitNetwork for the scenario
-        project_cards: Mapping[ProjectCard.name,ProjectCard] Storage of all project cards by name.
-        queued_projects: Projects which are "shovel ready" - have had pre-requisits checked and
-            done any required re-ordering. Similar to a git staging, project cards aren't
-            recognized in this collecton once they are moved to applied.
-        applied_projects: list of project names that have been applied
-        projects: list of all projects either planned, queued, or applied
-        prerequisites:  dictionary storing prerequiste information
-        corequisites:  dictionary storing corequisite information
-        conflicts: dictionary storing conflict information
-    """
-
-    def __init__(
-        self,
-        base_scenario: Union[Scenario, dict],
-        project_card_list: list[ProjectCard] = [],
-        name="",
-    ):
-        """Constructor.
-
-        Args:
-        base_scenario: A base scenario object to base this isntance off of, or a dict which
-            describes the scenario attributes including applied projects and respective conflicts.
-            `{"applied_projects": [],"conflicts":{...}}`
-        project_card_list: Optional list of ProjectCard instances to add to planned projects.
-        name: Optional name for the scenario.
-        """
-        WranglerLogger.info("Creating Scenario")
-
-        if isinstance(base_scenario, Scenario):
-            base_scenario = base_scenario.__dict__
-
-        if not set(BASE_SCENARIO_SUGGESTED_PROPS) <= set(base_scenario.keys()):
-            WranglerLogger.warning(
-                f"Base_scenario doesn't contain {BASE_SCENARIO_SUGGESTED_PROPS}"
-            )
-
-        self.base_scenario = base_scenario
-        self.name = name
-        # if the base scenario had roadway or transit networks, use them as the basis.
-        self.road_net: Optional[RoadwayNetwork] = copy.deepcopy(self.base_scenario.get("road_net"))
-        self.transit_net: Optional[TransitNetwork] = copy.deepcopy(
-            self.base_scenario.get("transit_net")
-        )
-
-        self.project_cards: dict[str, ProjectCard] = {}
-        self._planned_projects: list[str] = []
-        self._queued_projects = None
-        self.applied_projects = self.base_scenario.get("applied_projects", [])
-
-        self.prerequisites = self.base_scenario.get("prerequisites", {})
-        self.corequisites = self.base_scenario.get("corequisites", {})
-        self.conflicts = self.base_scenario.get("conflicts", {})
-
-        for p in project_card_list:
-            self._add_project(p)
-
-    @property
-    def projects(self):
-        """Returns a list of all projects in the scenario: applied and planned."""
-        return self.applied_projects + self._planned_projects
-
-    @property
-    def queued_projects(self):
-        """Returns a list version of _queued_projects queue.
-
-        Queued projects are thos that have been planned, have all pre-requisites satisfied, and
-        have been ordered based on pre-requisites.
-
-        If no queued projects, will dynamically generate from planned projects based on
-        pre-requisites and return the queue.
-        """
-        if not self._queued_projects:
-            self._check_projects_requirements_satisfied(self._planned_projects)
-            self._queued_projects = self.order_projects(self._planned_projects)
-        return list(self._queued_projects)
-
-    def __str__(self):
-        """String representation of the Scenario object."""
-        s = ["{}: {}".format(key, value) for key, value in self.__dict__.items()]
-        return "\n".join(s)
-
-    def _add_dependencies(self, project_name, dependencies: dict) -> None:
-        """Add dependencies from a project card to relevant scenario variables.
-
-        Updates existing "prerequisites", "corequisites" and "conflicts".
-        Lowercases everything to enable string matching.
-
-        Args:
-            project_name: name of project you are adding dependencies for.
-            dependencies: Dictionary of depndencies by dependency type and list of associated
-                projects.
-        """
-        project_name = project_name.lower()
-
-        for d, v in dependencies.items():
-            _dep = list(map(str.lower, v))
-            WranglerLogger.debug(f"Adding {_dep} to {project_name} dependency table.")
-            self.__dict__[d].update({project_name: _dep})
-
-    def _add_project(
-        self,
-        project_card: ProjectCard,
-        validate: bool = True,
-        filter_tags: Collection[str] = [],
-    ) -> None:
-        """Adds a single ProjectCard instances to the Scenario.
-
-        Checks that a project of same name is not already in scenario.
-        If selected, will validate ProjectCard before adding.
-        If provided, will only add ProjectCard if it matches at least one filter_tags.
-
-        Resets scenario queued_projects.
-
-        Args:
-            project_card (ProjectCard): ProjectCard instance to add to scenario.
-            validate (bool, optional): If True, will validate the projectcard before
-                being adding it to the scenario. Defaults to True.
-            filter_tags (Collection[str], optional): If used, will only add the project card if
-                its tags match one or more of these filter_tags. Defaults to []
-                which means no tag-filtering will occur.
-
-        """
-        project_name = project_card.project.lower()
-        filter_tags = list(map(str.lower, filter_tags))
-
-        if project_name in self.projects:
-            raise ProjectCardError(
-                f"Names not unique from existing scenario projects: {project_card.project}"
-            )
-
-        if filter_tags and set(project_card.tags).isdisjoint(set(filter_tags)):
-            WranglerLogger.debug(
-                f"Skipping {project_name} - no overlapping tags with {filter_tags}."
-            )
-            return
-
-        if validate:
-            assert project_card.valid
-
-        WranglerLogger.info(f"Adding {project_name} to scenario.")
-        self.project_cards[project_name] = project_card
-        self._planned_projects.append(project_name)
-        self._queued_projects = None
-        self._add_dependencies(project_name, project_card.dependencies)
-
-    def add_project_cards(
-        self,
-        project_card_list: Collection[ProjectCard],
-        validate: bool = True,
-        filter_tags: Collection[str] = [],
-    ) -> None:
-        """Adds a list of ProjectCard instances to the Scenario.
-
-        Checks that a project of same name is not already in scenario.
-        If selected, will validate ProjectCard before adding.
-        If provided, will only add ProjectCard if it matches at least one filter_tags.
-
-        Args:
-            project_card_list (Collection[ProjectCard]): List of ProjectCard instances to add to
-                scenario.
-            validate (bool, optional): If True, will require each ProjectCard is validated before
-                being added to scenario. Defaults to True.
-            filter_tags (Collection[str], optional): If used, will filter ProjectCard instances
-                and only add those whose tags match one or more of these filter_tags.
-                Defaults to [] - which means no tag-filtering will occur.
-        """
-        for p in project_card_list:
-            self._add_project(p, validate=validate, filter_tags=filter_tags)
-
-    def _check_projects_requirements_satisfied(self, project_list: Collection[str]):
-        """Checks all requirements are satisified to apply this specific set of projects.
-
-        Including:
-        1. has an associaed project card
-        2. is in scenario's planned projects
-        3. pre-requisites satisfied
-        4. co-requisies satisfied by applied or co-applied projects
-        5. no conflicing applied or co-applied projects
-
-        Args:
-            project_list: list of projects to check requirements for.
-        """
-        self._check_projects_planned(project_list)
-        self._check_projects_have_project_cards(project_list)
-        self._check_projects_prerequisites(project_list)
-        self._check_projects_corequisites(project_list)
-        self._check_projects_conflicts(project_list)
-
-    def _check_projects_planned(self, project_names: Collection[str]) -> None:
-        """Checks that a list of projects are in the scenario's planned projects."""
-        _missing_ps = [p for p in self._planned_projects if p not in self._planned_projects]
-        if _missing_ps:
-            raise ValueError(
-                f"Projects are not in planned projects: \n {_missing_ps}. Add them by \
-                using add_project_cards(), add_projects_from_files(), or \
-                add_projects_from_directory()."
-            )
-
-    def _check_projects_have_project_cards(self, project_list: Collection[str]) -> bool:
-        """Checks that a list of projects has an associated project card in the scenario."""
-        _missing = [p for p in project_list if p not in self.project_cards]
-        if _missing:
-            WranglerLogger.error(
-                f"Projects referenced which are missing project cards: {_missing}"
-            )
-            return False
-        return True
-
-    def _check_projects_prerequisites(self, project_names: list[str]) -> None:
-        """Check a list of projects' pre-requisites have been or will be applied to scenario."""
-        if set(project_names).isdisjoint(set(self.prerequisites.keys())):
-            return
-        _prereqs = []
-        for p in project_names:
-            _prereqs += self.prerequisites.get(p, [])
-        _projects_applied = self.applied_projects + project_names
-        _missing = list(set(_prereqs) - set(_projects_applied))
-        if _missing:
-            WranglerLogger.debug(
-                f"project_names: {project_names}\nprojects_have_or_will_be_applied: \
-                    {_projects_applied}\nmissing: {_missing}"
-            )
-            raise ScenarioPrerequisiteError(f"Missing {len(_missing)} pre-requisites: {_missing}")
-
-    def _check_projects_corequisites(self, project_names: list[str]) -> None:
-        """Check a list of projects' co-requisites have been or will be applied to scenario."""
-        if set(project_names).isdisjoint(set(self.corequisites.keys())):
-            return
-        _coreqs = []
-        for p in project_names:
-            _coreqs += self.corequisites.get(p, [])
-        _projects_applied = self.applied_projects + project_names
-        _missing = list(set(_coreqs) - set(_projects_applied))
-        if _missing:
-            WranglerLogger.debug(
-                f"project_names: {project_names}\nprojects_have_or_will_be_applied: \
-                    {_projects_applied}\nmissing: {_missing}"
-            )
-            raise ScenarioCorequisiteError(f"Missing {len(_missing)} corequisites: {_missing}")
-
-    def _check_projects_conflicts(self, project_names: list[str]) -> None:
-        """Checks that list of projects' conflicts have not been or will be applied to scenario."""
-        # WranglerLogger.debug("Checking Conflicts...")
-        projects_to_check = project_names + self.applied_projects
-        # WranglerLogger.debug(f"\nprojects_to_check:{projects_to_check}\nprojects_with_conflicts:{set(self.conflicts.keys())}")
-        if set(projects_to_check).isdisjoint(set(self.conflicts.keys())):
-            # WranglerLogger.debug("Projects have no conflicts to check")
-            return
-        _conflicts = []
-        for p in project_names:
-            _conflicts += self.conflicts.get(p, [])
-        _conflict_problems = [p for p in _conflicts if p in projects_to_check]
-        if _conflict_problems:
-            WranglerLogger.warning(f"Conflict Problems: \n{_conflict_problems}")
-            _conf_dict = {
-                k: v
-                for k, v in self.conflicts.items()
-                if k in projects_to_check and not set(v).isdisjoint(set(_conflict_problems))
-            }
-            WranglerLogger.debug(f"Problematic Conflicts: \n{_conf_dict}")
-            raise ScenarioConflictError(f"Found {len(_conflicts)} conflicts: {_conflict_problems}")
-
-    def order_projects(self, project_list: Collection[str]) -> deque:
-        """Orders a list of projects based on moving up pre-requisites into a deque.
-
-        Args:
-            project_list: list of projects to order
-
-        Returns: deque for applying projects.
-        """
-        project_list = [p.lower() for p in project_list]
-        assert self._check_projects_have_project_cards(project_list)
-
-        # build prereq (adjacency) list for topological sort
-        adjacency_list = defaultdict(list)
-        visited_list = defaultdict(bool)
-
-        for project in project_list:
-            visited_list[project] = False
-            if not self.prerequisites.get(project):
-                continue
-            for prereq in self.prerequisites[project]:
-                # this will always be true, else would have been flagged in missing \
-                # prerequsite check, but just in case
-                if prereq.lower() in project_list:
-                    adjacency_list[prereq.lower()] = [project]
-
-        # sorted_project_names is topological sorted project card names (based on prerequsiite)
-        _ordered_projects = topological_sort(
-            adjacency_list=adjacency_list, visited_list=visited_list
-        )
-
-        if not set(_ordered_projects) == set(project_list):
-            _missing = list(set(project_list) - set(_ordered_projects))
-            raise ValueError(f"Project sort resulted in missing projects: {_missing}")
-
-        project_deque = deque(_ordered_projects)
-
-        WranglerLogger.debug(f"Ordered Projects: \n{project_deque}")
-
-        return project_deque
-
-    def apply_all_projects(self):
-        """Applies all planned projects in the queue."""
-        # Call this to make sure projects are appropriately queued in hidden variable.
-        self.queued_projects
-
-        # Use hidden variable.
-        while self._queued_projects:
-            self._apply_project(self._queued_projects.popleft())
-
-        # set this so it will trigger re-queuing any more projects.
-        self._queued_projects = None
-
-    def _apply_change(self, change: Union[ProjectCard, SubProject]) -> None:
-        """Applies a specific change specified in a project card.
-
-        Change type must be in at least one of:
-        - ROADWAY_CATEGORIES
-        - TRANSIT_CATEGORIES
-
-        Args:
-            change: a project card or subproject card
-        """
-        if change.change_type in ROADWAY_CARD_TYPES:
-            if not self.road_net:
-                raise ValueError("Missing Roadway Network")
-            self.road_net.apply(change)
-        if change.change_type in TRANSIT_CARD_TYPES:
-            if not self.transit_net:
-                raise ValueError("Missing Transit Network")
-            self.transit_net.apply(change)
-        if change.change_type in SECONDARY_TRANSIT_CARD_TYPES and self.transit_net:
-            self.transit_net.apply(change)
-
-        if change.change_type not in TRANSIT_CARD_TYPES + ROADWAY_CARD_TYPES:
-            raise ProjectCardError(
-                f"Project {change.project}: Don't understand project cat: {change.change_type}"
-            )
-
-    def _apply_project(self, project_name: str) -> None:
-        """Applies project card to scenario.
-
-        If a list of changes is specified in referenced project card, iterates through each change.
-
-        Args:
-            project_name (str): name of project to be applied.
-        """
-        project_name = project_name.lower()
-
-        WranglerLogger.info(f"Applying {project_name} from file:\
-                            {self.project_cards[project_name].file}")
-
-        p = self.project_cards[project_name]
-        WranglerLogger.debug(f"types: {p.change_types}")
-        WranglerLogger.debug(f"type: {p.change_type}")
-        if p._sub_projects:
-            for sp in p._sub_projects:
-                WranglerLogger.debug(f"- applying subproject: {sp.change_type}")
-                self._apply_change(sp)
-
-        else:
-            self._apply_change(p)
-
-        self._planned_projects.remove(project_name)
-        self.applied_projects.append(project_name)
-
-    def apply_projects(self, project_list: Collection[str]):
-        """Applies a specific list of projects from the planned project queue.
-
-        Will order the list of projects based on pre-requisites.
-
-        NOTE: does not check co-requisites b/c that isn't possible when applying a sin
-
-        Args:
-            project_list: List of projects to be applied. All need to be in the planned project
-                queue.
-        """
-        project_list = [p.lower() for p in project_list]
-
-        self._check_projects_requirements_satisfied(project_list)
-        ordered_project_queue = self.order_projects(project_list)
-
-        while ordered_project_queue:
-            self._apply_project(ordered_project_queue.popleft())
-
-        # Set so that when called again it will retrigger queueing from planned projects.
-        self._ordered_projects = None
-
-    def write(self, path: Union[Path, str], name: str) -> None:
-        """_summary_.
-
-        Args:
-            path: Path to write scenario networks and scenario summary to.
-            name: Name to use.
-        """
-        if self.road_net:
-            write_roadway(self.road_net, prefix=name, out_dir=path)
-        if self.transit_net:
-            write_transit(self.transit_net, prefix=name, out_dir=path)
-        self.summarize(outfile=os.path.join(path, name))
-
-    def summarize(self, project_detail: bool = True, outfile: str = "", mode: str = "a") -> str:
-        """A high level summary of the created scenario.
-
-        Args:
-            project_detail: If True (default), will write out project card summaries.
-            outfile: If specified, will write scenario summary to text file.
-            mode: Outfile open mode. 'a' to append 'w' to overwrite.
-
-        Returns:
-            string of summary
-
-        """
-        return scenario_summary(self, project_detail, outfile, mode)
-
-
- - - -
- - - - - - - -
- - - -

- projects - - - property - - -

- - -
- -

Returns a list of all projects in the scenario: applied and planned.

-
- -
- -
- - - -

- queued_projects - - - property - - -

- - -
- -

Returns a list version of _queued_projects queue.

-

Queued projects are thos that have been planned, have all pre-requisites satisfied, and -have been ordered based on pre-requisites.

-

If no queued projects, will dynamically generate from planned projects based on -pre-requisites and return the queue.

-
- -
- - - -
- - -

- __init__(base_scenario, project_card_list=[], name='') - -

- - -
- -

Constructor.

- - -
- Source code in network_wrangler/scenario.py -
156
-157
-158
-159
-160
-161
-162
-163
-164
-165
-166
-167
-168
-169
-170
-171
-172
-173
-174
-175
-176
-177
-178
-179
-180
-181
-182
-183
-184
-185
-186
-187
-188
-189
-190
-191
-192
-193
-194
-195
-196
-197
-198
-199
def __init__(
-    self,
-    base_scenario: Union[Scenario, dict],
-    project_card_list: list[ProjectCard] = [],
-    name="",
-):
-    """Constructor.
-
-    Args:
-    base_scenario: A base scenario object to base this isntance off of, or a dict which
-        describes the scenario attributes including applied projects and respective conflicts.
-        `{"applied_projects": [],"conflicts":{...}}`
-    project_card_list: Optional list of ProjectCard instances to add to planned projects.
-    name: Optional name for the scenario.
-    """
-    WranglerLogger.info("Creating Scenario")
-
-    if isinstance(base_scenario, Scenario):
-        base_scenario = base_scenario.__dict__
-
-    if not set(BASE_SCENARIO_SUGGESTED_PROPS) <= set(base_scenario.keys()):
-        WranglerLogger.warning(
-            f"Base_scenario doesn't contain {BASE_SCENARIO_SUGGESTED_PROPS}"
-        )
-
-    self.base_scenario = base_scenario
-    self.name = name
-    # if the base scenario had roadway or transit networks, use them as the basis.
-    self.road_net: Optional[RoadwayNetwork] = copy.deepcopy(self.base_scenario.get("road_net"))
-    self.transit_net: Optional[TransitNetwork] = copy.deepcopy(
-        self.base_scenario.get("transit_net")
-    )
-
-    self.project_cards: dict[str, ProjectCard] = {}
-    self._planned_projects: list[str] = []
-    self._queued_projects = None
-    self.applied_projects = self.base_scenario.get("applied_projects", [])
-
-    self.prerequisites = self.base_scenario.get("prerequisites", {})
-    self.corequisites = self.base_scenario.get("corequisites", {})
-    self.conflicts = self.base_scenario.get("conflicts", {})
-
-    for p in project_card_list:
-        self._add_project(p)
-
-
-
- -
- -
- - -

- __str__() - -

- - -
- -

String representation of the Scenario object.

- -
- Source code in network_wrangler/scenario.py -
221
-222
-223
-224
def __str__(self):
-    """String representation of the Scenario object."""
-    s = ["{}: {}".format(key, value) for key, value in self.__dict__.items()]
-    return "\n".join(s)
-
-
-
- -
- -
- - -

- add_project_cards(project_card_list, validate=True, filter_tags=[]) - -

- - -
- -

Adds a list of ProjectCard instances to the Scenario.

-

Checks that a project of same name is not already in scenario. -If selected, will validate ProjectCard before adding. -If provided, will only add ProjectCard if it matches at least one filter_tags.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
project_card_list - Collection[ProjectCard] - -
-

List of ProjectCard instances to add to -scenario.

-
-
- required -
validate - bool - -
-

If True, will require each ProjectCard is validated before -being added to scenario. Defaults to True.

-
-
- True -
filter_tags - Collection[str] - -
-

If used, will filter ProjectCard instances -and only add those whose tags match one or more of these filter_tags. -Defaults to [] - which means no tag-filtering will occur.

-
-
- [] -
- -
- Source code in network_wrangler/scenario.py -
290
-291
-292
-293
-294
-295
-296
-297
-298
-299
-300
-301
-302
-303
-304
-305
-306
-307
-308
-309
-310
-311
-312
def add_project_cards(
-    self,
-    project_card_list: Collection[ProjectCard],
-    validate: bool = True,
-    filter_tags: Collection[str] = [],
-) -> None:
-    """Adds a list of ProjectCard instances to the Scenario.
-
-    Checks that a project of same name is not already in scenario.
-    If selected, will validate ProjectCard before adding.
-    If provided, will only add ProjectCard if it matches at least one filter_tags.
-
-    Args:
-        project_card_list (Collection[ProjectCard]): List of ProjectCard instances to add to
-            scenario.
-        validate (bool, optional): If True, will require each ProjectCard is validated before
-            being added to scenario. Defaults to True.
-        filter_tags (Collection[str], optional): If used, will filter ProjectCard instances
-            and only add those whose tags match one or more of these filter_tags.
-            Defaults to [] - which means no tag-filtering will occur.
-    """
-    for p in project_card_list:
-        self._add_project(p, validate=validate, filter_tags=filter_tags)
-
-
-
- -
- -
- - -

- apply_all_projects() - -

- - -
- -

Applies all planned projects in the queue.

- -
- Source code in network_wrangler/scenario.py -
447
-448
-449
-450
-451
-452
-453
-454
-455
-456
-457
def apply_all_projects(self):
-    """Applies all planned projects in the queue."""
-    # Call this to make sure projects are appropriately queued in hidden variable.
-    self.queued_projects
-
-    # Use hidden variable.
-    while self._queued_projects:
-        self._apply_project(self._queued_projects.popleft())
-
-    # set this so it will trigger re-queuing any more projects.
-    self._queued_projects = None
-
-
-
- -
- -
- - -

- apply_projects(project_list) - -

- - -
- -

Applies a specific list of projects from the planned project queue.

-

Will order the list of projects based on pre-requisites.

-

NOTE: does not check co-requisites b/c that isn’t possible when applying a sin

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
project_list - Collection[str] - -
-

List of projects to be applied. All need to be in the planned project -queue.

-
-
- required -
- -
- Source code in network_wrangler/scenario.py -
512
-513
-514
-515
-516
-517
-518
-519
-520
-521
-522
-523
-524
-525
-526
-527
-528
-529
-530
-531
-532
def apply_projects(self, project_list: Collection[str]):
-    """Applies a specific list of projects from the planned project queue.
-
-    Will order the list of projects based on pre-requisites.
-
-    NOTE: does not check co-requisites b/c that isn't possible when applying a sin
-
-    Args:
-        project_list: List of projects to be applied. All need to be in the planned project
-            queue.
-    """
-    project_list = [p.lower() for p in project_list]
-
-    self._check_projects_requirements_satisfied(project_list)
-    ordered_project_queue = self.order_projects(project_list)
-
-    while ordered_project_queue:
-        self._apply_project(ordered_project_queue.popleft())
-
-    # Set so that when called again it will retrigger queueing from planned projects.
-    self._ordered_projects = None
-
-
-
- -
- -
- - -

- order_projects(project_list) - -

- - -
- -

Orders a list of projects based on moving up pre-requisites into a deque.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
project_list - Collection[str] - -
-

list of projects to order

-
-
- required -
- - -
- Source code in network_wrangler/scenario.py -
407
-408
-409
-410
-411
-412
-413
-414
-415
-416
-417
-418
-419
-420
-421
-422
-423
-424
-425
-426
-427
-428
-429
-430
-431
-432
-433
-434
-435
-436
-437
-438
-439
-440
-441
-442
-443
-444
-445
def order_projects(self, project_list: Collection[str]) -> deque:
-    """Orders a list of projects based on moving up pre-requisites into a deque.
-
-    Args:
-        project_list: list of projects to order
-
-    Returns: deque for applying projects.
-    """
-    project_list = [p.lower() for p in project_list]
-    assert self._check_projects_have_project_cards(project_list)
-
-    # build prereq (adjacency) list for topological sort
-    adjacency_list = defaultdict(list)
-    visited_list = defaultdict(bool)
-
-    for project in project_list:
-        visited_list[project] = False
-        if not self.prerequisites.get(project):
-            continue
-        for prereq in self.prerequisites[project]:
-            # this will always be true, else would have been flagged in missing \
-            # prerequsite check, but just in case
-            if prereq.lower() in project_list:
-                adjacency_list[prereq.lower()] = [project]
-
-    # sorted_project_names is topological sorted project card names (based on prerequsiite)
-    _ordered_projects = topological_sort(
-        adjacency_list=adjacency_list, visited_list=visited_list
-    )
-
-    if not set(_ordered_projects) == set(project_list):
-        _missing = list(set(project_list) - set(_ordered_projects))
-        raise ValueError(f"Project sort resulted in missing projects: {_missing}")
-
-    project_deque = deque(_ordered_projects)
-
-    WranglerLogger.debug(f"Ordered Projects: \n{project_deque}")
-
-    return project_deque
-
-
-
- -
- -
- - -

- summarize(project_detail=True, outfile='', mode='a') - -

- - -
- -

A high level summary of the created scenario.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
project_detail - bool - -
-

If True (default), will write out project card summaries.

-
-
- True -
outfile - str - -
-

If specified, will write scenario summary to text file.

-
-
- '' -
mode - str - -
-

Outfile open mode. ‘a’ to append ‘w’ to overwrite.

-
-
- 'a' -
- - -

Returns:

- - - - - - - - - - - - - -
TypeDescription
- str - -
-

string of summary

-
-
- -
- Source code in network_wrangler/scenario.py -
547
-548
-549
-550
-551
-552
-553
-554
-555
-556
-557
-558
-559
def summarize(self, project_detail: bool = True, outfile: str = "", mode: str = "a") -> str:
-    """A high level summary of the created scenario.
-
-    Args:
-        project_detail: If True (default), will write out project card summaries.
-        outfile: If specified, will write scenario summary to text file.
-        mode: Outfile open mode. 'a' to append 'w' to overwrite.
-
-    Returns:
-        string of summary
-
-    """
-    return scenario_summary(self, project_detail, outfile, mode)
-
-
-
- -
- -
- - -

- write(path, name) - -

- - -
- -

summary.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
path - Union[Path, str] - -
-

Path to write scenario networks and scenario summary to.

-
-
- required -
name - str - -
-

Name to use.

-
-
- required -
- -
- Source code in network_wrangler/scenario.py -
534
-535
-536
-537
-538
-539
-540
-541
-542
-543
-544
-545
def write(self, path: Union[Path, str], name: str) -> None:
-    """_summary_.
-
-    Args:
-        path: Path to write scenario networks and scenario summary to.
-        name: Name to use.
-    """
-    if self.road_net:
-        write_roadway(self.road_net, prefix=name, out_dir=path)
-    if self.transit_net:
-        write_transit(self.transit_net, prefix=name, out_dir=path)
-    self.summarize(outfile=os.path.join(path, name))
-
-
-
- -
- - - -
- -
- -
- -
- - - -

- ScenarioConflictError - - -

- - -
-

- Bases: Exception

- - -

Raised when a conflict is detected.

- -
- Source code in network_wrangler/scenario.py -
74
-75
-76
-77
class ScenarioConflictError(Exception):
-    """Raised when a conflict is detected."""
-
-    pass
-
-
- -
- -
- -
- - - -

- ScenarioCorequisiteError - - -

- - -
-

- Bases: Exception

- - -

Raised when a co-requisite is not satisfied.

- -
- Source code in network_wrangler/scenario.py -
80
-81
-82
-83
class ScenarioCorequisiteError(Exception):
-    """Raised when a co-requisite is not satisfied."""
-
-    pass
-
-
- -
- -
- -
- - - -

- ScenarioPrerequisiteError - - -

- - -
-

- Bases: Exception

- - -

Raised when a pre-requisite is not satisfied.

- -
- Source code in network_wrangler/scenario.py -
86
-87
-88
-89
class ScenarioPrerequisiteError(Exception):
-    """Raised when a pre-requisite is not satisfied."""
-
-    pass
-
-
- -
- -
- - -
- - -

- create_base_scenario(base_shape_name, base_link_name, base_node_name, roadway_dir='', transit_dir='') - -

- - -
- -

Creates a base scenario dictionary from roadway and transit network files.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
base_shape_name - str - -
-

filename of the base network shape

-
-
- required -
base_link_name - str - -
-

filename of the base network link

-
-
- required -
base_node_name - str - -
-

filename of the base network node

-
-
- required -
roadway_dir - str - -
-

optional path to the base scenario roadway network files

-
-
- '' -
transit_dir - str - -
-

optional path to base scenario transit files

-
-
- '' -
- -
- Source code in network_wrangler/scenario.py -
654
-655
-656
-657
-658
-659
-660
-661
-662
-663
-664
-665
-666
-667
-668
-669
-670
-671
-672
-673
-674
-675
-676
-677
-678
-679
-680
-681
-682
-683
-684
-685
-686
-687
-688
-689
-690
-691
-692
-693
-694
-695
-696
def create_base_scenario(
-    base_shape_name: str,
-    base_link_name: str,
-    base_node_name: str,
-    roadway_dir: str = "",
-    transit_dir: str = "",
-) -> dict:
-    """Creates a base scenario dictionary from roadway and transit network files.
-
-    Args:
-        base_shape_name: filename of the base network shape
-        base_link_name: filename of the base network link
-        base_node_name: filename of the base network node
-        roadway_dir: optional path to the base scenario roadway network files
-        transit_dir: optional path to base scenario transit files
-    """
-    if roadway_dir:
-        base_network_shape_file = os.path.join(roadway_dir, base_shape_name)
-        base_network_link_file = os.path.join(roadway_dir, base_link_name)
-        base_network_node_file = os.path.join(roadway_dir, base_node_name)
-    else:
-        base_network_shape_file = base_shape_name
-        base_network_link_file = base_link_name
-        base_network_node_file = base_node_name
-
-    road_net = load_roadway(
-        links_file=base_network_link_file,
-        nodes_file=base_network_node_file,
-        shapes_file=base_network_shape_file,
-    )
-
-    if transit_dir:
-        transit_net = load_transit(transit_dir)
-        transit_net.road_net = road_net
-    else:
-        transit_net = None
-        WranglerLogger.info(
-            "No transit directory specified, base scenario will have empty transit network."
-        )
-
-    base_scenario = {"road_net": road_net, "transit_net": transit_net}
-
-    return base_scenario
-
-
-
- -
- -
- - -

- create_scenario(base_scenario={}, project_card_list=[], project_card_filepath=None, filter_tags=[], validate=True) - -

- - -
- -

Creates scenario from a base scenario and adds project cards.

-

Project cards can be added using any/all of the following methods: -1. List of ProjectCard instances -2. List of ProjectCard files -3. Directory and optional glob search to find project card files in

-

Checks that a project of same name is not already in scenario. -If selected, will validate ProjectCard before adding. -If provided, will only add ProjectCard if it matches at least one filter_tags.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
base_scenario - Union[Scenario, dict] - -
-

base Scenario scenario instances of dictionary of attributes.

-
-
- {} -
project_card_list - -
-

List of ProjectCard instances to create Scenario from. Defaults -to [].

-
-
- [] -
project_card_filepath - Optional[Union[Collection[str], str]] - -
-

where the project card is. A single path, list of paths,

-
-
- None -
filter_tags - Collection[str] - -
-

If used, will only add the project card if -its tags match one or more of these filter_tags. Defaults to [] -which means no tag-filtering will occur.

-
-
- [] -
validate - bool - -
-

If True, will validate the projectcard before -being adding it to the scenario. Defaults to True.

-
-
- True -
- -
- Source code in network_wrangler/scenario.py -
562
-563
-564
-565
-566
-567
-568
-569
-570
-571
-572
-573
-574
-575
-576
-577
-578
-579
-580
-581
-582
-583
-584
-585
-586
-587
-588
-589
-590
-591
-592
-593
-594
-595
-596
-597
-598
-599
-600
-601
-602
def create_scenario(
-    base_scenario: Union[Scenario, dict] = {},
-    project_card_list=[],
-    project_card_filepath: Optional[Union[Collection[str], str]] = None,
-    filter_tags: Collection[str] = [],
-    validate=True,
-) -> Scenario:
-    """Creates scenario from a base scenario and adds project cards.
-
-    Project cards can be added using any/all of the following methods:
-    1. List of ProjectCard instances
-    2. List of ProjectCard files
-    3. Directory and optional glob search to find project card files in
-
-    Checks that a project of same name is not already in scenario.
-    If selected, will validate ProjectCard before adding.
-    If provided, will only add ProjectCard if it matches at least one filter_tags.
-
-    Args:
-        base_scenario: base Scenario scenario instances of dictionary of attributes.
-        project_card_list: List of ProjectCard instances to create Scenario from. Defaults
-            to [].
-        project_card_filepath: where the project card is.  A single path, list of paths,
-        a directory, or a glob pattern. Defaults to None.
-        filter_tags (Collection[str], optional): If used, will only add the project card if
-            its tags match one or more of these filter_tags. Defaults to []
-            which means no tag-filtering will occur.
-        validate (bool, optional): If True, will validate the projectcard before
-            being adding it to the scenario. Defaults to True.
-    """
-    scenario = Scenario(base_scenario)
-
-    if project_card_filepath:
-        project_card_list += list(
-            read_cards(project_card_filepath, filter_tags=filter_tags).values()
-        )
-
-    if project_card_list:
-        scenario.add_project_cards(project_card_list, filter_tags=filter_tags, validate=validate)
-
-    return scenario
-
-
-
- -
- -
- - -

- scenario_summary(scenario, project_detail=True, outfile='', mode='a') - -

- - -
- -

A high level summary of the created scenario.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
scenario - Scenario - -
-

Scenario instance to summarize.

-
-
- required -
project_detail - bool - -
-

If True (default), will write out project card summaries.

-
-
- True -
outfile - str - -
-

If specified, will write scenario summary to text file.

-
-
- '' -
mode - str - -
-

Outfile open mode. ‘a’ to append ‘w’ to overwrite.

-
-
- 'a' -
- - -

Returns:

- - - - - - - - - - - - - -
TypeDescription
- str - -
-

string of summary

-
-
- -
- Source code in network_wrangler/scenario.py -
605
-606
-607
-608
-609
-610
-611
-612
-613
-614
-615
-616
-617
-618
-619
-620
-621
-622
-623
-624
-625
-626
-627
-628
-629
-630
-631
-632
-633
-634
-635
-636
-637
-638
-639
-640
-641
-642
-643
-644
-645
-646
-647
-648
-649
-650
-651
def scenario_summary(
-    scenario: Scenario, project_detail: bool = True, outfile: str = "", mode: str = "a"
-) -> str:
-    """A high level summary of the created scenario.
-
-    Args:
-        scenario: Scenario instance to summarize.
-        project_detail: If True (default), will write out project card summaries.
-        outfile: If specified, will write scenario summary to text file.
-        mode: Outfile open mode. 'a' to append 'w' to overwrite.
-
-    Returns:
-        string of summary
-    """
-    WranglerLogger.info(f"Summarizing Scenario {scenario.name}")
-    report_str = "------------------------------\n"
-    report_str += f"Scenario created on {datetime.now()}\n"
-
-    report_str += "Base Scenario:\n"
-    report_str += "--Road Network:\n"
-    report_str += f"----Link File: {scenario.base_scenario['road_net']._links_file}\n"
-    report_str += f"----Node File: {scenario.base_scenario['road_net']._nodes_file}\n"
-    report_str += f"----Shape File: {scenario.base_scenario['road_net']._shapes_file}\n"
-    report_str += "--Transit Network:\n"
-    report_str += f"----Feed Path: {scenario.base_scenario['transit_net'].feed.feed_path}\n"
-
-    report_str += "\nProject Cards:\n -"
-    report_str += "\n-".join([str(pc.file) for p, pc in scenario.project_cards.items()])
-
-    report_str += "\nApplied Projects:\n-"
-    report_str += "\n-".join(scenario.applied_projects)
-
-    if project_detail:
-        report_str += "\n---Project Card Details---\n"
-        for p in scenario.project_cards:
-            report_str += "\n{}".format(
-                pprint.pformat(
-                    [scenario.project_cards[p].__dict__ for p in scenario.applied_projects]
-                )
-            )
-
-    if outfile:
-        with open(outfile, mode) as f:
-            f.write(report_str)
-        WranglerLogger.info(f"Wrote Scenario Report to: {outfile}")
-
-    return report_str
-
-
-
- -
- - - -
- -
- -
- -
- - - - -
- -

Roadway Network class and functions for Network Wrangler.

-

Used to represent a roadway network and perform operations on it.

-

Usage:

-
from network_wrangler import load_roadway_from_dir, write_roadway
-
-net = load_roadway_from_dir("my_dir")
-net.get_selection({"links": [{"name": ["I 35E"]}]})
-net.apply("my_project_card.yml")
-
-write_roadway(net, "my_out_prefix", "my_dir", file_format = "parquet")
-
- - - -
- - - - - - - - -
- - - -

- RoadwayNetwork - - -

- - -
-

- Bases: BaseModel

- - -

Representation of a Roadway Network.

-

Typical usage example:

-
net = load_roadway(
-    links_file=MY_LINK_FILE,
-    nodes_file=MY_NODE_FILE,
-    shapes_file=MY_SHAPE_FILE,
-)
-my_selection = {
-    "link": [{"name": ["I 35E"]}],
-    "A": {"osm_node_id": "961117623"},  # start searching for segments at A
-    "B": {"osm_node_id": "2564047368"},
-}
-net.get_selection(my_selection)
-
-my_change = [
-    {
-        'property': 'lanes',
-        'existing': 1,
-        'set': 2,
-    },
-    {
-        'property': 'drive_access',
-        'set': 0,
-    },
-]
-
-my_net.apply_roadway_feature_change(
-    my_net.get_selection(my_selection),
-    my_change
-)
-
-    net.model_net
-    net.is_network_connected(mode="drive", nodes=self.m_nodes_df, links=self.m_links_df)
-    _, disconnected_nodes = net.assess_connectivity(
-        mode="walk",
-        ignore_end_nodes=True,
-        nodes=self.m_nodes_df,
-        links=self.m_links_df
-    )
-    write_roadway(net,filename=my_out_prefix, path=my_dir, for_model = True)
-
- - -

Attributes:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescription
nodes_df - RoadNodesTable - -
-

dataframe of of node records.

-
-
links_df - RoadLinksTable - -
-

dataframe of link records and associated properties.

-
-
shapes_df - RoadShapestable - -
-

data from of detailed shape records This is lazily -created iff it is called because shapes files can be expensive to read.

-
-
selections - dict - -
-

dictionary of stored roadway selection objects, mapped by -RoadwayLinkSelection.sel_key or RoadwayNodeSelection.sel_key in case they are - made repeatedly.

-
-
crs - str - -
-

coordinate reference system in ESPG number format. Defaults to DEFAULT_CRS -which is set to 4326, WGS 84 Lat/Long

-
-
network_hash - str - -
-

dynamic property of the hashed value of links_df and nodes_df. Used for -quickly identifying if a network has changed since various expensive operations have -taken place (i.e. generating a ModelRoadwayNetwork or a network graph)

-
-
model_net - ModelRoadwayNetwork - -
-

referenced ModelRoadwayNetwork object which will be -lazily created if None or if the network_hash has changed.

-
-
- -
- Source code in network_wrangler/roadway/network.py -
 79
- 80
- 81
- 82
- 83
- 84
- 85
- 86
- 87
- 88
- 89
- 90
- 91
- 92
- 93
- 94
- 95
- 96
- 97
- 98
- 99
-100
-101
-102
-103
-104
-105
-106
-107
-108
-109
-110
-111
-112
-113
-114
-115
-116
-117
-118
-119
-120
-121
-122
-123
-124
-125
-126
-127
-128
-129
-130
-131
-132
-133
-134
-135
-136
-137
-138
-139
-140
-141
-142
-143
-144
-145
-146
-147
-148
-149
-150
-151
-152
-153
-154
-155
-156
-157
-158
-159
-160
-161
-162
-163
-164
-165
-166
-167
-168
-169
-170
-171
-172
-173
-174
-175
-176
-177
-178
-179
-180
-181
-182
-183
-184
-185
-186
-187
-188
-189
-190
-191
-192
-193
-194
-195
-196
-197
-198
-199
-200
-201
-202
-203
-204
-205
-206
-207
-208
-209
-210
-211
-212
-213
-214
-215
-216
-217
-218
-219
-220
-221
-222
-223
-224
-225
-226
-227
-228
-229
-230
-231
-232
-233
-234
-235
-236
-237
-238
-239
-240
-241
-242
-243
-244
-245
-246
-247
-248
-249
-250
-251
-252
-253
-254
-255
-256
-257
-258
-259
-260
-261
-262
-263
-264
-265
-266
-267
-268
-269
-270
-271
-272
-273
-274
-275
-276
-277
-278
-279
-280
-281
-282
-283
-284
-285
-286
-287
-288
-289
-290
-291
-292
-293
-294
-295
-296
-297
-298
-299
-300
-301
-302
-303
-304
-305
-306
-307
-308
-309
-310
-311
-312
-313
-314
-315
-316
-317
-318
-319
-320
-321
-322
-323
-324
-325
-326
-327
-328
-329
-330
-331
-332
-333
-334
-335
-336
-337
-338
-339
-340
-341
-342
-343
-344
-345
-346
-347
-348
-349
-350
-351
-352
-353
-354
-355
-356
-357
-358
-359
-360
-361
-362
-363
-364
-365
-366
-367
-368
-369
-370
-371
-372
-373
-374
-375
-376
-377
-378
-379
-380
-381
-382
-383
-384
-385
-386
-387
-388
-389
-390
-391
-392
-393
-394
-395
-396
-397
-398
-399
-400
-401
-402
-403
-404
-405
-406
-407
-408
-409
-410
-411
-412
-413
-414
-415
-416
-417
-418
-419
-420
-421
-422
-423
-424
-425
-426
-427
-428
-429
-430
-431
-432
-433
-434
-435
-436
-437
-438
-439
-440
-441
-442
-443
-444
-445
-446
-447
-448
-449
-450
-451
-452
-453
-454
-455
-456
-457
-458
-459
-460
-461
-462
-463
-464
-465
-466
-467
-468
-469
-470
-471
-472
-473
-474
-475
-476
-477
-478
-479
-480
-481
-482
-483
-484
-485
-486
-487
-488
-489
-490
-491
-492
-493
-494
-495
-496
-497
-498
-499
-500
-501
-502
-503
-504
-505
-506
-507
-508
-509
-510
-511
-512
-513
-514
-515
-516
-517
-518
-519
-520
-521
-522
-523
-524
-525
-526
-527
-528
-529
-530
-531
-532
-533
-534
-535
-536
-537
-538
-539
-540
-541
-542
-543
-544
-545
-546
-547
-548
-549
-550
-551
-552
-553
-554
-555
-556
-557
-558
-559
-560
-561
-562
-563
-564
-565
-566
-567
-568
-569
-570
-571
-572
-573
-574
-575
-576
-577
-578
-579
-580
-581
-582
-583
-584
-585
-586
-587
-588
-589
-590
-591
-592
-593
-594
-595
-596
-597
-598
-599
-600
-601
-602
-603
-604
-605
-606
-607
-608
-609
-610
-611
-612
-613
-614
-615
-616
-617
class RoadwayNetwork(BaseModel):
-    """Representation of a Roadway Network.
-
-    Typical usage example:
-
-    ```py
-    net = load_roadway(
-        links_file=MY_LINK_FILE,
-        nodes_file=MY_NODE_FILE,
-        shapes_file=MY_SHAPE_FILE,
-    )
-    my_selection = {
-        "link": [{"name": ["I 35E"]}],
-        "A": {"osm_node_id": "961117623"},  # start searching for segments at A
-        "B": {"osm_node_id": "2564047368"},
-    }
-    net.get_selection(my_selection)
-
-    my_change = [
-        {
-            'property': 'lanes',
-            'existing': 1,
-            'set': 2,
-        },
-        {
-            'property': 'drive_access',
-            'set': 0,
-        },
-    ]
-
-    my_net.apply_roadway_feature_change(
-        my_net.get_selection(my_selection),
-        my_change
-    )
-
-        net.model_net
-        net.is_network_connected(mode="drive", nodes=self.m_nodes_df, links=self.m_links_df)
-        _, disconnected_nodes = net.assess_connectivity(
-            mode="walk",
-            ignore_end_nodes=True,
-            nodes=self.m_nodes_df,
-            links=self.m_links_df
-        )
-        write_roadway(net,filename=my_out_prefix, path=my_dir, for_model = True)
-    ```
-
-    Attributes:
-        nodes_df (RoadNodesTable): dataframe of of node records.
-        links_df (RoadLinksTable): dataframe of link records and associated properties.
-        shapes_df (RoadShapestable): data from of detailed shape records  This is lazily
-            created iff it is called because shapes files can be expensive to read.
-        selections (dict): dictionary of stored roadway selection objects, mapped by
-            `RoadwayLinkSelection.sel_key` or `RoadwayNodeSelection.sel_key` in case they are
-                made repeatedly.
-        crs (str): coordinate reference system in ESPG number format. Defaults to DEFAULT_CRS
-            which is set to 4326, WGS 84 Lat/Long
-        network_hash: dynamic property of the hashed value of links_df and nodes_df. Used for
-            quickly identifying if a network has changed since various expensive operations have
-            taken place (i.e. generating a ModelRoadwayNetwork or a network graph)
-        model_net (ModelRoadwayNetwork): referenced `ModelRoadwayNetwork` object which will be
-            lazily created if None or if the `network_hash` has changed.
-    """
-
-    crs: Literal[LAT_LON_CRS] = LAT_LON_CRS
-    nodes_df: DataFrame[RoadNodesTable]
-    links_df: DataFrame[RoadLinksTable]
-    _shapes_df: Optional[DataFrame[RoadShapesTable]] = None
-
-    _links_file: Optional[Path] = None
-    _nodes_file: Optional[Path] = None
-    _shapes_file: Optional[Path] = None
-
-    _shapes_params: ShapesParams = ShapesParams()
-    _model_net: Optional[ModelRoadwayNetwork] = None
-    _selections: dict[str, Selections] = {}
-    _modal_graphs: dict[str, dict] = defaultdict(lambda: {"graph": None, "hash": None})
-
-    @field_validator("nodes_df", "links_df")
-    def coerce_crs(cls, v, info):
-        """Coerce crs of nodes_df and links_df to network crs."""
-        net_crs = info.data["crs"]
-        if v.crs != net_crs:
-            WranglerLogger.warning(
-                f"CRS of links_df ({v.crs}) doesn't match network crs {net_crs}. \
-                    Changing to network crs."
-            )
-            v.to_crs(net_crs)
-        return v
-
-    @property
-    def shapes_df(self) -> DataFrame[RoadShapesTable]:
-        """Load and return RoadShapesTable.
-
-        If not already loaded, will read from shapes_file and return. If shapes_file is None,
-        will return an empty dataframe with the right schema. If shapes_df is already set, will
-        return that.
-        """
-        if (self._shapes_df is None or self._shapes_df.empty) and self._shapes_file is not None:
-            self._shapes_df = read_shapes(
-                self._shapes_file,
-                in_crs=self.crs,
-                shapes_params=self._shapes_params,
-                filter_to_shape_ids=self.links_df.shape_id.to_list(),
-            )
-        # if there is NONE, then at least create an empty dataframe with right schema
-        elif self._shapes_df is None:
-            self._shapes_df = empty_df_from_datamodel(RoadShapesTable, crs=self.crs)
-            self._shapes_df.set_index("shape_id_idx", inplace=True)
-
-        return self._shapes_df
-
-    @shapes_df.setter
-    def shapes_df(self, value):
-        self._shapes_df = df_to_shapes_df(value, shapes_params=self._shapes_params)
-
-    @property
-    def network_hash(self) -> str:
-        """Hash of the links and nodes dataframes."""
-        _value = str.encode(self.links_df.df_hash() + "-" + self.nodes_df.df_hash())
-
-        _hash = hashlib.sha256(_value).hexdigest()
-        return _hash
-
-    @property
-    def model_net(self) -> ModelRoadwayNetwork:
-        """Return a ModelRoadwayNetwork object for this network."""
-        if self._model_net is None or self._model_net._net_hash != self.network_hash:
-            self._model_net = ModelRoadwayNetwork(self)
-        return self._model_net
-
-    @property
-    def summary(self) -> dict:
-        """Quick summary dictionary of number of links, nodes."""
-        d = {
-            "links": len(self.links_df),
-            "nodes": len(self.nodes_df),
-        }
-        return d
-
-    @property
-    def link_shapes_df(self) -> gpd.GeoDataFrame:
-        """Add shape geometry to links if available.
-
-        returns: shapes merged to nodes dataframe
-        """
-        _links_df = copy.deepcopy(self.links_df)
-        link_shapes_df = _links_df.merge(
-            self.shapes_df,
-            left_on=self.links_df.params.fk_to_shape,
-            right_on=self.shapes_df.params.primary_key,
-            how="left",
-        )
-        return link_shapes_df
-
-    def get_property_by_timespan_and_group(
-        self,
-        link_property: str,
-        category: Union[str, int] = DEFAULT_CATEGORY,
-        timespan: TimespanString = DEFAULT_TIMESPAN,
-        strict_timespan_match: bool = False,
-        min_overlap_minutes: int = 60,
-    ) -> Any:
-        """Returns a new dataframe with model_link_id and link property by category and timespan.
-
-        Convenience method for backward compatability.
-
-        Args:
-            link_property: link property to query
-            category: category to query or a list of categories. Defaults to DEFAULT_CATEGORY.
-            timespan: timespan to query in the form of ["HH:MM","HH:MM"].
-                Defaults to DEFAULT_TIMESPAN.
-            strict_timespan_match: If True, will only return links that match the timespan exactly.
-                Defaults to False.
-            min_overlap_minutes: If strict_timespan_match is False, will return links that overlap
-                with the timespan by at least this many minutes. Defaults to 60.
-        """
-        from .links.scopes import prop_for_scope
-
-        return prop_for_scope(
-            self.links_df,
-            link_property,
-            timespan=timespan,
-            category=category,
-            strict_timespan_match=strict_timespan_match,
-            min_overlap_minutes=min_overlap_minutes,
-        )
-
-    def get_selection(
-        self,
-        selection_dict: Union[dict, SelectFacility],
-        overwrite: bool = False,
-    ) -> Union[RoadwayNodeSelection, RoadwayLinkSelection]:
-        """Return selection if it already exists, otherwise performs selection.
-
-        Args:
-            selection_dict (dict): SelectFacility dictionary.
-            overwrite: if True, will overwrite any previously cached searches. Defaults to False.
-        """
-        key = _create_selection_key(selection_dict)
-        if (key in self._selections) and not overwrite:
-            WranglerLogger.debug(f"Using cached selection from key: {key}")
-            return self._selections[key]
-
-        if isinstance(selection_dict, SelectFacility):
-            selection_data = selection_dict
-        elif isinstance(selection_dict, SelectLinksDict):
-            selection_data = SelectFacility(links=selection_dict)
-        elif isinstance(selection_dict, SelectNodesDict):
-            selection_data = SelectFacility(nodes=selection_dict)
-        elif isinstance(selection_dict, dict):
-            selection_data = SelectFacility(**selection_dict)
-        else:
-            WranglerLogger.error(f"`selection_dict` arg must be a dictionary or SelectFacility\
-                              model. Received: {selection_dict} of type {type(selection_dict)}")
-            raise SelectionError("selection_dict arg must be a dictionary or SelectFacility model")
-
-        WranglerLogger.debug(f"Getting selection from key: {key}")
-        if selection_data.feature_types in ["links", "segment"]:
-            return RoadwayLinkSelection(self, selection_dict)
-        elif selection_data.feature_types == "nodes":
-            return RoadwayNodeSelection(self, selection_dict)
-        else:
-            WranglerLogger.error("Selection data should be of type 'segment', 'links' or 'nodes'.")
-            raise SelectionError("Selection data should be of type 'segment', 'links' or 'nodes'.")
-
-    def modal_graph_hash(self, mode) -> str:
-        """Hash of the links in order to detect a network change from when graph created."""
-        _value = str.encode(self.links_df.df_hash() + "-" + mode)
-        _hash = hashlib.sha256(_value).hexdigest()
-
-        return _hash
-
-    def get_modal_graph(self, mode) -> MultiDiGraph:
-        """Return a networkx graph of the network for a specific mode.
-
-        Args:
-            mode: mode of the network, one of `drive`,`transit`,`walk`, `bike`
-        """
-        from .graph import net_to_graph
-
-        if self._modal_graphs[mode]["hash"] != self.modal_graph_hash(mode):
-            self._modal_graphs[mode]["graph"] = net_to_graph(self, mode)
-
-        return self._modal_graphs[mode]["graph"]
-
-    def apply(self, project_card: Union[ProjectCard, dict]) -> RoadwayNetwork:
-        """Wrapper method to apply a roadway project, returning a new RoadwayNetwork instance.
-
-        Args:
-            project_card: either a dictionary of the project card object or ProjectCard instance
-        """
-        if not (isinstance(project_card, ProjectCard) or isinstance(project_card, SubProject)):
-            project_card = ProjectCard(project_card)
-
-        project_card.validate()
-
-        if project_card._sub_projects:
-            for sp in project_card._sub_projects:
-                WranglerLogger.debug(f"- applying subproject: {sp.change_type}")
-                self._apply_change(sp)
-            return self
-        else:
-            return self._apply_change(project_card)
-
-    def _apply_change(self, change: Union[ProjectCard, SubProject]) -> RoadwayNetwork:
-        """Apply a single change: a single-project project or a sub-project."""
-        if not isinstance(change, SubProject):
-            WranglerLogger.info(f"Applying Project to Roadway Network: {change.project}")
-
-        if change.change_type == "roadway_property_change":
-            return apply_roadway_property_change(
-                self,
-                self.get_selection(change.facility),
-                change.roadway_property_change["property_changes"],
-            )
-
-        elif change.change_type == "roadway_addition":
-            return apply_new_roadway(
-                self,
-                change.roadway_addition,
-            )
-
-        elif change.change_type == "roadway_deletion":
-            return apply_roadway_deletion(
-                self,
-                change.roadway_deletion,
-            )
-
-        elif change.change_type == "pycode":
-            return apply_calculated_roadway(self, change.pycode)
-        else:
-            WranglerLogger.error(f"Couldn't find project in: \n{change.__dict__}")
-            raise (ValueError(f"Invalid Project Card Category: {change.change_type}"))
-
-    def links_with_link_ids(self, link_ids: List[int]) -> DataFrame[RoadLinksTable]:
-        """Return subset of links_df based on link_ids list."""
-        return filter_links_to_ids(self.links_df, link_ids)
-
-    def links_with_nodes(self, node_ids: List[int]) -> DataFrame[RoadLinksTable]:
-        """Return subset of links_df based on node_ids list."""
-        return filter_links_to_node_ids(self.links_df, node_ids)
-
-    def nodes_in_links(self) -> DataFrame[RoadNodesTable]:
-        """Returns subset of self.nodes_df that are in self.links_df."""
-        return filter_nodes_to_links(self.links_df, self.nodes_df)
-
-    def add_links(self, add_links_df: Union[pd.DataFrame, DataFrame[RoadLinksTable]]):
-        """Validate combined links_df with LinksSchema before adding to self.links_df.
-
-        Args:
-            add_links_df: Dataframe of additional links to add.
-        """
-        if not isinstance(add_links_df, RoadLinksTable):
-            add_links_df = data_to_links_df(add_links_df, nodes_df=self.nodes_df)
-        self.links_df = RoadLinksTable(pd.concat([self.links_df, add_links_df], axis=0))
-
-    def add_nodes(self, add_nodes_df: Union[pd.DataFrame, DataFrame[RoadNodesTable]]):
-        """Validate combined nodes_df with NodesSchema before adding to self.nodes_df.
-
-        Args:
-            add_nodes_df: Dataframe of additional nodes to add.
-        """
-        if not isinstance(add_nodes_df, RoadNodesTable):
-            add_nodes_df = data_to_nodes_df(add_nodes_df)
-        self.nodes_df = RoadNodesTable(pd.concat([self.nodes_df, add_nodes_df], axis=0))
-
-    def add_shapes(self, add_shapes_df: Union[pd.DataFrame, DataFrame[RoadShapesTable]]):
-        """Validate combined shapes_df with RoadShapesTable efore adding to self.shapes_df.
-
-        Args:
-            add_shapes_df: Dataframe of additional shapes to add.
-        """
-        if not isinstance(add_shapes_df, RoadShapesTable):
-            add_shapes_df = df_to_shapes_df(add_shapes_df)
-        WranglerLogger.debug(f"add_shapes_df: \n{add_shapes_df}")
-        WranglerLogger.debug(f"self.shapes_df: \n{self.shapes_df}")
-        together_df = pd.concat([self.shapes_df, add_shapes_df])
-        WranglerLogger.debug(f"together_df: \n{together_df}")
-        self.shapes_df = RoadShapesTable(pd.concat([self.shapes_df, add_shapes_df], axis=0))
-
-    def delete_links(
-        self,
-        selection_dict: SelectLinksDict,
-        clean_nodes: bool = False,
-        clean_shapes: bool = False,
-    ):
-        """Deletes links based on selection dictionary and optionally associated nodes and shapes.
-
-        Args:
-            selection_dict (SelectLinks): Dictionary describing link selections as follows:
-                `all`: Optional[bool] = False. If true, will select all.
-                `name`: Optional[list[str]]
-                `ref`: Optional[list[str]]
-                `osm_link_id`:Optional[list[str]]
-                `model_link_id`: Optional[list[int]]
-                `modes`: Optional[list[str]]. Defaults to "any"
-                `ignore_missing`: if true, will not error when defaults to True.
-                ...plus any other link property to select on top of these.
-            clean_nodes (bool, optional): If True, will clean nodes uniquely associated with
-                deleted links. Defaults to False.
-            clean_shapes (bool, optional): If True, will clean nodes uniquely associated with
-                deleted links. Defaults to False.
-        """
-        selection_dict = SelectLinksDict(**selection_dict).model_dump(
-            exclude_none=True, by_alias=True
-        )
-        selection = self.get_selection({"links": selection_dict})
-
-        if clean_nodes:
-            node_ids_to_delete = node_ids_unique_to_link_ids(
-                selection.selected_links, selection.selected_links_df, self.nodes_df
-            )
-            WranglerLogger.debug(
-                f"Dropping nodes associated with dropped links: \n{node_ids_to_delete}"
-            )
-            self.nodes_df = delete_nodes_by_ids(self.nodes_df, del_node_ids=node_ids_to_delete)
-
-        if clean_shapes:
-            shape_ids_to_delete = shape_ids_unique_to_link_ids(
-                selection.selected_links, selection.selected_links_df, self.shapes_df
-            )
-            WranglerLogger.debug(
-                f"Dropping shapes associated with dropped links: \n{shape_ids_to_delete}"
-            )
-            self.shapes_df = delete_shapes_by_ids(
-                self.shapes_df, del_shape_ids=shape_ids_to_delete
-            )
-
-        self.links_df = delete_links_by_ids(
-            self.links_df,
-            selection.selected_links,
-            ignore_missing=selection.ignore_missing,
-        )
-
-    def delete_nodes(
-        self,
-        selection_dict: Union[dict, SelectNodesDict],
-        remove_links: bool = False,
-    ) -> None:
-        """Deletes nodes from roadway network. Wont delete nodes used by links in network.
-
-        Args:
-            selection_dict: dictionary of node selection criteria in the form of a SelectNodesDict.
-            remove_links: if True, will remove any links that are associated with the nodes.
-                If False, will only remove nodes if they are not associated with any links.
-                Defaults to False.
-
-        raises:
-            NodeDeletionError: If not ignore_missing and selected nodes to delete aren't in network
-        """
-        if not isinstance(selection_dict, SelectNodesDict):
-            selection_dict = SelectNodesDict(**selection_dict)
-        selection_dict = selection_dict.model_dump(exclude_none=True, by_alias=True)
-        selection: RoadwayNodeSelection = self.get_selection(
-            {"nodes": selection_dict},
-        )
-        if remove_links:
-            del_node_ids = selection.selected_nodes
-            link_ids = self.links_with_nodes(selection.selected_nodes).model_link_id.to_list()
-            WranglerLogger.info(f"Removing {len(link_ids)} links associated with nodes.")
-            self.delete_links({"model_link_id": link_ids})
-        else:
-            unused_node_ids = node_ids_without_links(self.nodes_df, self.links_df)
-            del_node_ids = list(set(selection.selected_nodes).intersection(unused_node_ids))
-
-        self.nodes_df = delete_nodes_by_ids(
-            self.nodes_df, del_node_ids, ignore_missing=selection.ignore_missing
-        )
-
-    def clean_unused_shapes(self):
-        """Removes any unused shapes from network that aren't referenced by links_df."""
-        from .shapes.shapes import shape_ids_without_links
-
-        del_shape_ids = shape_ids_without_links(self.shapes_df, self.links_df)
-        self.shapes_df = self.shapes_df.drop(del_shape_ids)
-
-    def clean_unused_nodes(self):
-        """Removes any unused nodes from network that aren't referenced by links_df.
-
-        NOTE: does not check if these nodes are used by transit, so use with caution.
-        """
-        from .nodes.nodes import node_ids_without_links
-
-        node_ids = node_ids_without_links(self.nodes_df, self.links_df)
-        self.nodes_df = self.nodes_df.drop(node_ids)
-
-    def move_nodes(
-        self,
-        node_geometry_change_table: DataFrame[NodeGeometryChangeTable],
-    ):
-        """Moves nodes based on updated geometry along with associated links and shape geometry.
-
-        Args:
-            node_geometry_change_table: a table with model_node_id, X, Y, and CRS.
-        """
-        node_geometry_change_table = NodeGeometryChangeTable(node_geometry_change_table)
-        node_ids = node_geometry_change_table.model_node_id.to_list()
-        WranglerLogger.debug(f"Moving nodes: {node_ids}")
-        self.nodes_df = edit_node_geometry(self.nodes_df, node_geometry_change_table)
-        self.links_df = edit_link_geometry_from_nodes(self.links_df, self.nodes_df, node_ids)
-        self.shapes_df = edit_shape_geometry_from_nodes(
-            self.shapes_df, self.links_df, self.nodes_df, node_ids
-        )
-
-    def has_node(self, model_node_id: int) -> bool:
-        """Queries if network has node based on model_node_id.
-
-        Args:
-            model_node_id: model_node_id to check for.
-        """
-        has_node = self.nodes_df[self.nodes_df.model_node_id].isin([model_node_id]).any()
-
-        return has_node
-
-    def has_link(self, ab: tuple) -> bool:
-        """Returns true if network has links with AB values.
-
-        Args:
-            ab: Tuple of values corresponding with A and B.
-        """
-        sel_a, sel_b = ab
-        has_link = self.links_df[self.links_df[["A", "B"]]].isin({"A": sel_a, "B": sel_b}).any()
-        return has_link
-
-    def is_connected(self, mode: str) -> bool:
-        """Determines if the network graph is "strongly" connected.
-
-        A graph is strongly connected if each vertex is reachable from every other vertex.
-
-        Args:
-            mode:  mode of the network, one of `drive`,`transit`,`walk`, `bike`
-        """
-        is_connected = nx.is_strongly_connected(self.get_modal_graph(mode))
-
-        return is_connected
-
-    @staticmethod
-    def add_incident_link_data_to_nodes(
-        links_df: Optional[DataFrame[RoadLinksTable]] = None,
-        nodes_df: Optional[DataFrame[RoadNodesTable]] = None,
-        link_variables: list = [],
-    ) -> DataFrame[RoadNodesTable]:
-        """Add data from links going to/from nodes to node.
-
-        Args:
-            links_df: if specified, will assess connectivity of this
-                links list rather than self.links_df
-            nodes_df: if specified, will assess connectivity of this
-                nodes list rather than self.nodes_df
-            link_variables: list of columns in links dataframe to add to incident nodes
-
-        Returns:
-            nodes DataFrame with link data where length is N*number of links going in/out
-        """
-        WranglerLogger.debug("Adding following link data to nodes: ".format())
-
-        _link_vals_to_nodes = [x for x in link_variables if x in links_df.columns]
-        if link_variables not in _link_vals_to_nodes:
-            WranglerLogger.warning(
-                "Following columns not in links_df and wont be added to nodes: {} ".format(
-                    list(set(link_variables) - set(_link_vals_to_nodes))
-                )
-            )
-
-        _nodes_from_links_A = nodes_df.merge(
-            links_df[[links_df.params.from_node] + _link_vals_to_nodes],
-            how="outer",
-            left_on=nodes_df.params.primary_key,
-            right_on=links_df.params.from_node,
-        )
-        _nodes_from_links_B = nodes_df.merge(
-            links_df[[links_df.params.to_node] + _link_vals_to_nodes],
-            how="outer",
-            left_on=nodes_df.params.primary_key,
-            right_on=links_df.params.to_node,
-        )
-        _nodes_from_links_ab = pd.concat([_nodes_from_links_A, _nodes_from_links_B])
-
-        return _nodes_from_links_ab
-
-
- - - -
- - - - - - - -
- - - - - - -
- -

Add shape geometry to links if available.

-

returns: shapes merged to nodes dataframe

-
- -
- -
- - - -

- model_net: ModelRoadwayNetwork - - - property - - -

- - -
- -

Return a ModelRoadwayNetwork object for this network.

-
- -
- -
- - - -

- network_hash: str - - - property - - -

- - -
- -

Hash of the links and nodes dataframes.

-
- -
- -
- - - -

- shapes_df: DataFrame[RoadShapesTable] - - - property - writable - - -

- - -
- -

Load and return RoadShapesTable.

-

If not already loaded, will read from shapes_file and return. If shapes_file is None, -will return an empty dataframe with the right schema. If shapes_df is already set, will -return that.

-
- -
- -
- - - -

- summary: dict - - - property - - -

- - -
- -

Quick summary dictionary of number of links, nodes.

-
- -
- - - -
- - - - - -
- -

Add data from links going to/from nodes to node.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
links_df - Optional[DataFrame[RoadLinksTable]] - -
-

if specified, will assess connectivity of this -links list rather than self.links_df

-
-
- None -
nodes_df - Optional[DataFrame[RoadNodesTable]] - -
-

if specified, will assess connectivity of this -nodes list rather than self.nodes_df

-
-
- None -
link_variables - list - -
-

list of columns in links dataframe to add to incident nodes

-
-
- [] -
- - -

Returns:

- - - - - - - - - - - - - -
TypeDescription
- DataFrame[RoadNodesTable] - -
-

nodes DataFrame with link data where length is N*number of links going in/out

-
-
- -
- Source code in network_wrangler/roadway/network.py -
575
-576
-577
-578
-579
-580
-581
-582
-583
-584
-585
-586
-587
-588
-589
-590
-591
-592
-593
-594
-595
-596
-597
-598
-599
-600
-601
-602
-603
-604
-605
-606
-607
-608
-609
-610
-611
-612
-613
-614
-615
-616
-617
@staticmethod
-def add_incident_link_data_to_nodes(
-    links_df: Optional[DataFrame[RoadLinksTable]] = None,
-    nodes_df: Optional[DataFrame[RoadNodesTable]] = None,
-    link_variables: list = [],
-) -> DataFrame[RoadNodesTable]:
-    """Add data from links going to/from nodes to node.
-
-    Args:
-        links_df: if specified, will assess connectivity of this
-            links list rather than self.links_df
-        nodes_df: if specified, will assess connectivity of this
-            nodes list rather than self.nodes_df
-        link_variables: list of columns in links dataframe to add to incident nodes
-
-    Returns:
-        nodes DataFrame with link data where length is N*number of links going in/out
-    """
-    WranglerLogger.debug("Adding following link data to nodes: ".format())
-
-    _link_vals_to_nodes = [x for x in link_variables if x in links_df.columns]
-    if link_variables not in _link_vals_to_nodes:
-        WranglerLogger.warning(
-            "Following columns not in links_df and wont be added to nodes: {} ".format(
-                list(set(link_variables) - set(_link_vals_to_nodes))
-            )
-        )
-
-    _nodes_from_links_A = nodes_df.merge(
-        links_df[[links_df.params.from_node] + _link_vals_to_nodes],
-        how="outer",
-        left_on=nodes_df.params.primary_key,
-        right_on=links_df.params.from_node,
-    )
-    _nodes_from_links_B = nodes_df.merge(
-        links_df[[links_df.params.to_node] + _link_vals_to_nodes],
-        how="outer",
-        left_on=nodes_df.params.primary_key,
-        right_on=links_df.params.to_node,
-    )
-    _nodes_from_links_ab = pd.concat([_nodes_from_links_A, _nodes_from_links_B])
-
-    return _nodes_from_links_ab
-
-
-
- -
- -
- - - - - -
- -

Validate combined links_df with LinksSchema before adding to self.links_df.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
add_links_df - Union[DataFrame, DataFrame[RoadLinksTable]] - -
-

Dataframe of additional links to add.

-
-
- required -
- -
- Source code in network_wrangler/roadway/network.py -
385
-386
-387
-388
-389
-390
-391
-392
-393
def add_links(self, add_links_df: Union[pd.DataFrame, DataFrame[RoadLinksTable]]):
-    """Validate combined links_df with LinksSchema before adding to self.links_df.
-
-    Args:
-        add_links_df: Dataframe of additional links to add.
-    """
-    if not isinstance(add_links_df, RoadLinksTable):
-        add_links_df = data_to_links_df(add_links_df, nodes_df=self.nodes_df)
-    self.links_df = RoadLinksTable(pd.concat([self.links_df, add_links_df], axis=0))
-
-
-
- -
- -
- - -

- add_nodes(add_nodes_df) - -

- - -
- -

Validate combined nodes_df with NodesSchema before adding to self.nodes_df.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
add_nodes_df - Union[DataFrame, DataFrame[RoadNodesTable]] - -
-

Dataframe of additional nodes to add.

-
-
- required -
- -
- Source code in network_wrangler/roadway/network.py -
395
-396
-397
-398
-399
-400
-401
-402
-403
def add_nodes(self, add_nodes_df: Union[pd.DataFrame, DataFrame[RoadNodesTable]]):
-    """Validate combined nodes_df with NodesSchema before adding to self.nodes_df.
-
-    Args:
-        add_nodes_df: Dataframe of additional nodes to add.
-    """
-    if not isinstance(add_nodes_df, RoadNodesTable):
-        add_nodes_df = data_to_nodes_df(add_nodes_df)
-    self.nodes_df = RoadNodesTable(pd.concat([self.nodes_df, add_nodes_df], axis=0))
-
-
-
- -
- -
- - -

- add_shapes(add_shapes_df) - -

- - -
- -

Validate combined shapes_df with RoadShapesTable efore adding to self.shapes_df.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
add_shapes_df - Union[DataFrame, DataFrame[RoadShapesTable]] - -
-

Dataframe of additional shapes to add.

-
-
- required -
- -
- Source code in network_wrangler/roadway/network.py -
405
-406
-407
-408
-409
-410
-411
-412
-413
-414
-415
-416
-417
def add_shapes(self, add_shapes_df: Union[pd.DataFrame, DataFrame[RoadShapesTable]]):
-    """Validate combined shapes_df with RoadShapesTable efore adding to self.shapes_df.
-
-    Args:
-        add_shapes_df: Dataframe of additional shapes to add.
-    """
-    if not isinstance(add_shapes_df, RoadShapesTable):
-        add_shapes_df = df_to_shapes_df(add_shapes_df)
-    WranglerLogger.debug(f"add_shapes_df: \n{add_shapes_df}")
-    WranglerLogger.debug(f"self.shapes_df: \n{self.shapes_df}")
-    together_df = pd.concat([self.shapes_df, add_shapes_df])
-    WranglerLogger.debug(f"together_df: \n{together_df}")
-    self.shapes_df = RoadShapesTable(pd.concat([self.shapes_df, add_shapes_df], axis=0))
-
-
-
- -
- -
- - -

- apply(project_card) - -

- - -
- -

Wrapper method to apply a roadway project, returning a new RoadwayNetwork instance.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
project_card - Union[ProjectCard, dict] - -
-

either a dictionary of the project card object or ProjectCard instance

-
-
- required -
- -
- Source code in network_wrangler/roadway/network.py -
324
-325
-326
-327
-328
-329
-330
-331
-332
-333
-334
-335
-336
-337
-338
-339
-340
-341
def apply(self, project_card: Union[ProjectCard, dict]) -> RoadwayNetwork:
-    """Wrapper method to apply a roadway project, returning a new RoadwayNetwork instance.
-
-    Args:
-        project_card: either a dictionary of the project card object or ProjectCard instance
-    """
-    if not (isinstance(project_card, ProjectCard) or isinstance(project_card, SubProject)):
-        project_card = ProjectCard(project_card)
-
-    project_card.validate()
-
-    if project_card._sub_projects:
-        for sp in project_card._sub_projects:
-            WranglerLogger.debug(f"- applying subproject: {sp.change_type}")
-            self._apply_change(sp)
-        return self
-    else:
-        return self._apply_change(project_card)
-
-
-
- -
- -
- - -

- clean_unused_nodes() - -

- - -
- -

Removes any unused nodes from network that aren’t referenced by links_df.

-

NOTE: does not check if these nodes are used by transit, so use with caution.

- -
- Source code in network_wrangler/roadway/network.py -
515
-516
-517
-518
-519
-520
-521
-522
-523
def clean_unused_nodes(self):
-    """Removes any unused nodes from network that aren't referenced by links_df.
-
-    NOTE: does not check if these nodes are used by transit, so use with caution.
-    """
-    from .nodes.nodes import node_ids_without_links
-
-    node_ids = node_ids_without_links(self.nodes_df, self.links_df)
-    self.nodes_df = self.nodes_df.drop(node_ids)
-
-
-
- -
- -
- - -

- clean_unused_shapes() - -

- - -
- -

Removes any unused shapes from network that aren’t referenced by links_df.

- -
- Source code in network_wrangler/roadway/network.py -
508
-509
-510
-511
-512
-513
def clean_unused_shapes(self):
-    """Removes any unused shapes from network that aren't referenced by links_df."""
-    from .shapes.shapes import shape_ids_without_links
-
-    del_shape_ids = shape_ids_without_links(self.shapes_df, self.links_df)
-    self.shapes_df = self.shapes_df.drop(del_shape_ids)
-
-
-
- -
- -
- - -

- coerce_crs(v, info) - -

- - -
- -

Coerce crs of nodes_df and links_df to network crs.

- -
- Source code in network_wrangler/roadway/network.py -
156
-157
-158
-159
-160
-161
-162
-163
-164
-165
-166
@field_validator("nodes_df", "links_df")
-def coerce_crs(cls, v, info):
-    """Coerce crs of nodes_df and links_df to network crs."""
-    net_crs = info.data["crs"]
-    if v.crs != net_crs:
-        WranglerLogger.warning(
-            f"CRS of links_df ({v.crs}) doesn't match network crs {net_crs}. \
-                Changing to network crs."
-        )
-        v.to_crs(net_crs)
-    return v
-
-
-
- -
- -
- - - - - -
- -

Deletes links based on selection dictionary and optionally associated nodes and shapes.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
selection_dict - SelectLinks - -
-

Dictionary describing link selections as follows: -all: Optional[bool] = False. If true, will select all. -name: Optional[list[str]] -ref: Optional[list[str]] -osm_link_id:Optional[list[str]] -model_link_id: Optional[list[int]] -modes: Optional[list[str]]. Defaults to “any” -ignore_missing: if true, will not error when defaults to True. -…plus any other link property to select on top of these.

-
-
- required -
clean_nodes - bool - -
-

If True, will clean nodes uniquely associated with -deleted links. Defaults to False.

-
-
- False -
clean_shapes - bool - -
-

If True, will clean nodes uniquely associated with -deleted links. Defaults to False.

-
-
- False -
- -
- Source code in network_wrangler/roadway/network.py -
419
-420
-421
-422
-423
-424
-425
-426
-427
-428
-429
-430
-431
-432
-433
-434
-435
-436
-437
-438
-439
-440
-441
-442
-443
-444
-445
-446
-447
-448
-449
-450
-451
-452
-453
-454
-455
-456
-457
-458
-459
-460
-461
-462
-463
-464
-465
-466
-467
-468
-469
-470
-471
def delete_links(
-    self,
-    selection_dict: SelectLinksDict,
-    clean_nodes: bool = False,
-    clean_shapes: bool = False,
-):
-    """Deletes links based on selection dictionary and optionally associated nodes and shapes.
-
-    Args:
-        selection_dict (SelectLinks): Dictionary describing link selections as follows:
-            `all`: Optional[bool] = False. If true, will select all.
-            `name`: Optional[list[str]]
-            `ref`: Optional[list[str]]
-            `osm_link_id`:Optional[list[str]]
-            `model_link_id`: Optional[list[int]]
-            `modes`: Optional[list[str]]. Defaults to "any"
-            `ignore_missing`: if true, will not error when defaults to True.
-            ...plus any other link property to select on top of these.
-        clean_nodes (bool, optional): If True, will clean nodes uniquely associated with
-            deleted links. Defaults to False.
-        clean_shapes (bool, optional): If True, will clean nodes uniquely associated with
-            deleted links. Defaults to False.
-    """
-    selection_dict = SelectLinksDict(**selection_dict).model_dump(
-        exclude_none=True, by_alias=True
-    )
-    selection = self.get_selection({"links": selection_dict})
-
-    if clean_nodes:
-        node_ids_to_delete = node_ids_unique_to_link_ids(
-            selection.selected_links, selection.selected_links_df, self.nodes_df
-        )
-        WranglerLogger.debug(
-            f"Dropping nodes associated with dropped links: \n{node_ids_to_delete}"
-        )
-        self.nodes_df = delete_nodes_by_ids(self.nodes_df, del_node_ids=node_ids_to_delete)
-
-    if clean_shapes:
-        shape_ids_to_delete = shape_ids_unique_to_link_ids(
-            selection.selected_links, selection.selected_links_df, self.shapes_df
-        )
-        WranglerLogger.debug(
-            f"Dropping shapes associated with dropped links: \n{shape_ids_to_delete}"
-        )
-        self.shapes_df = delete_shapes_by_ids(
-            self.shapes_df, del_shape_ids=shape_ids_to_delete
-        )
-
-    self.links_df = delete_links_by_ids(
-        self.links_df,
-        selection.selected_links,
-        ignore_missing=selection.ignore_missing,
-    )
-
-
-
- -
- -
- - -

- delete_nodes(selection_dict, remove_links=False) - -

- - -
- -

Deletes nodes from roadway network. Wont delete nodes used by links in network.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
selection_dict - Union[dict, SelectNodesDict] - -
-

dictionary of node selection criteria in the form of a SelectNodesDict.

-
-
- required -
remove_links - bool - -
-

if True, will remove any links that are associated with the nodes. -If False, will only remove nodes if they are not associated with any links. -Defaults to False.

-
-
- False -
- - -

Raises:

- - - - - - - - - - - - - -
TypeDescription
- NodeDeletionError - -
-

If not ignore_missing and selected nodes to delete aren’t in network

-
-
- -
- Source code in network_wrangler/roadway/network.py -
473
-474
-475
-476
-477
-478
-479
-480
-481
-482
-483
-484
-485
-486
-487
-488
-489
-490
-491
-492
-493
-494
-495
-496
-497
-498
-499
-500
-501
-502
-503
-504
-505
-506
def delete_nodes(
-    self,
-    selection_dict: Union[dict, SelectNodesDict],
-    remove_links: bool = False,
-) -> None:
-    """Deletes nodes from roadway network. Wont delete nodes used by links in network.
-
-    Args:
-        selection_dict: dictionary of node selection criteria in the form of a SelectNodesDict.
-        remove_links: if True, will remove any links that are associated with the nodes.
-            If False, will only remove nodes if they are not associated with any links.
-            Defaults to False.
-
-    raises:
-        NodeDeletionError: If not ignore_missing and selected nodes to delete aren't in network
-    """
-    if not isinstance(selection_dict, SelectNodesDict):
-        selection_dict = SelectNodesDict(**selection_dict)
-    selection_dict = selection_dict.model_dump(exclude_none=True, by_alias=True)
-    selection: RoadwayNodeSelection = self.get_selection(
-        {"nodes": selection_dict},
-    )
-    if remove_links:
-        del_node_ids = selection.selected_nodes
-        link_ids = self.links_with_nodes(selection.selected_nodes).model_link_id.to_list()
-        WranglerLogger.info(f"Removing {len(link_ids)} links associated with nodes.")
-        self.delete_links({"model_link_id": link_ids})
-    else:
-        unused_node_ids = node_ids_without_links(self.nodes_df, self.links_df)
-        del_node_ids = list(set(selection.selected_nodes).intersection(unused_node_ids))
-
-    self.nodes_df = delete_nodes_by_ids(
-        self.nodes_df, del_node_ids, ignore_missing=selection.ignore_missing
-    )
-
-
-
- -
- -
- - -

- get_modal_graph(mode) - -

- - -
- -

Return a networkx graph of the network for a specific mode.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
mode - -
-

mode of the network, one of drive,transit,walk, bike

-
-
- required -
- -
- Source code in network_wrangler/roadway/network.py -
311
-312
-313
-314
-315
-316
-317
-318
-319
-320
-321
-322
def get_modal_graph(self, mode) -> MultiDiGraph:
-    """Return a networkx graph of the network for a specific mode.
-
-    Args:
-        mode: mode of the network, one of `drive`,`transit`,`walk`, `bike`
-    """
-    from .graph import net_to_graph
-
-    if self._modal_graphs[mode]["hash"] != self.modal_graph_hash(mode):
-        self._modal_graphs[mode]["graph"] = net_to_graph(self, mode)
-
-    return self._modal_graphs[mode]["graph"]
-
-
-
- -
- -
- - -

- get_property_by_timespan_and_group(link_property, category=DEFAULT_CATEGORY, timespan=DEFAULT_TIMESPAN, strict_timespan_match=False, min_overlap_minutes=60) - -

- - -
- -

Returns a new dataframe with model_link_id and link property by category and timespan.

-

Convenience method for backward compatability.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
link_property - str - -
-

link property to query

-
-
- required -
category - Union[str, int] - -
-

category to query or a list of categories. Defaults to DEFAULT_CATEGORY.

-
-
- DEFAULT_CATEGORY -
timespan - TimespanString - -
-

timespan to query in the form of [“HH:MM”,”HH:MM”]. -Defaults to DEFAULT_TIMESPAN.

-
-
- DEFAULT_TIMESPAN -
strict_timespan_match - bool - -
-

If True, will only return links that match the timespan exactly. -Defaults to False.

-
-
- False -
min_overlap_minutes - int - -
-

If strict_timespan_match is False, will return links that overlap -with the timespan by at least this many minutes. Defaults to 60.

-
-
- 60 -
- -
- Source code in network_wrangler/roadway/network.py -
233
-234
-235
-236
-237
-238
-239
-240
-241
-242
-243
-244
-245
-246
-247
-248
-249
-250
-251
-252
-253
-254
-255
-256
-257
-258
-259
-260
-261
-262
-263
-264
def get_property_by_timespan_and_group(
-    self,
-    link_property: str,
-    category: Union[str, int] = DEFAULT_CATEGORY,
-    timespan: TimespanString = DEFAULT_TIMESPAN,
-    strict_timespan_match: bool = False,
-    min_overlap_minutes: int = 60,
-) -> Any:
-    """Returns a new dataframe with model_link_id and link property by category and timespan.
-
-    Convenience method for backward compatability.
-
-    Args:
-        link_property: link property to query
-        category: category to query or a list of categories. Defaults to DEFAULT_CATEGORY.
-        timespan: timespan to query in the form of ["HH:MM","HH:MM"].
-            Defaults to DEFAULT_TIMESPAN.
-        strict_timespan_match: If True, will only return links that match the timespan exactly.
-            Defaults to False.
-        min_overlap_minutes: If strict_timespan_match is False, will return links that overlap
-            with the timespan by at least this many minutes. Defaults to 60.
-    """
-    from .links.scopes import prop_for_scope
-
-    return prop_for_scope(
-        self.links_df,
-        link_property,
-        timespan=timespan,
-        category=category,
-        strict_timespan_match=strict_timespan_match,
-        min_overlap_minutes=min_overlap_minutes,
-    )
-
-
-
- -
- -
- - -

- get_selection(selection_dict, overwrite=False) - -

- - -
- -

Return selection if it already exists, otherwise performs selection.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
selection_dict - dict - -
-

SelectFacility dictionary.

-
-
- required -
overwrite - bool - -
-

if True, will overwrite any previously cached searches. Defaults to False.

-
-
- False -
- -
- Source code in network_wrangler/roadway/network.py -
266
-267
-268
-269
-270
-271
-272
-273
-274
-275
-276
-277
-278
-279
-280
-281
-282
-283
-284
-285
-286
-287
-288
-289
-290
-291
-292
-293
-294
-295
-296
-297
-298
-299
-300
-301
-302
def get_selection(
-    self,
-    selection_dict: Union[dict, SelectFacility],
-    overwrite: bool = False,
-) -> Union[RoadwayNodeSelection, RoadwayLinkSelection]:
-    """Return selection if it already exists, otherwise performs selection.
-
-    Args:
-        selection_dict (dict): SelectFacility dictionary.
-        overwrite: if True, will overwrite any previously cached searches. Defaults to False.
-    """
-    key = _create_selection_key(selection_dict)
-    if (key in self._selections) and not overwrite:
-        WranglerLogger.debug(f"Using cached selection from key: {key}")
-        return self._selections[key]
-
-    if isinstance(selection_dict, SelectFacility):
-        selection_data = selection_dict
-    elif isinstance(selection_dict, SelectLinksDict):
-        selection_data = SelectFacility(links=selection_dict)
-    elif isinstance(selection_dict, SelectNodesDict):
-        selection_data = SelectFacility(nodes=selection_dict)
-    elif isinstance(selection_dict, dict):
-        selection_data = SelectFacility(**selection_dict)
-    else:
-        WranglerLogger.error(f"`selection_dict` arg must be a dictionary or SelectFacility\
-                          model. Received: {selection_dict} of type {type(selection_dict)}")
-        raise SelectionError("selection_dict arg must be a dictionary or SelectFacility model")
-
-    WranglerLogger.debug(f"Getting selection from key: {key}")
-    if selection_data.feature_types in ["links", "segment"]:
-        return RoadwayLinkSelection(self, selection_dict)
-    elif selection_data.feature_types == "nodes":
-        return RoadwayNodeSelection(self, selection_dict)
-    else:
-        WranglerLogger.error("Selection data should be of type 'segment', 'links' or 'nodes'.")
-        raise SelectionError("Selection data should be of type 'segment', 'links' or 'nodes'.")
-
-
-
- -
- -
- - - - - -
- -

Returns true if network has links with AB values.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
ab - tuple - -
-

Tuple of values corresponding with A and B.

-
-
- required -
- -
- Source code in network_wrangler/roadway/network.py -
553
-554
-555
-556
-557
-558
-559
-560
-561
def has_link(self, ab: tuple) -> bool:
-    """Returns true if network has links with AB values.
-
-    Args:
-        ab: Tuple of values corresponding with A and B.
-    """
-    sel_a, sel_b = ab
-    has_link = self.links_df[self.links_df[["A", "B"]]].isin({"A": sel_a, "B": sel_b}).any()
-    return has_link
-
-
-
- -
- -
- - -

- has_node(model_node_id) - -

- - -
- -

Queries if network has node based on model_node_id.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
model_node_id - int - -
-

model_node_id to check for.

-
-
- required -
- -
- Source code in network_wrangler/roadway/network.py -
543
-544
-545
-546
-547
-548
-549
-550
-551
def has_node(self, model_node_id: int) -> bool:
-    """Queries if network has node based on model_node_id.
-
-    Args:
-        model_node_id: model_node_id to check for.
-    """
-    has_node = self.nodes_df[self.nodes_df.model_node_id].isin([model_node_id]).any()
-
-    return has_node
-
-
-
- -
- -
- - -

- is_connected(mode) - -

- - -
- -

Determines if the network graph is “strongly” connected.

-

A graph is strongly connected if each vertex is reachable from every other vertex.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
mode - str - -
-

mode of the network, one of drive,transit,walk, bike

-
-
- required -
- -
- Source code in network_wrangler/roadway/network.py -
563
-564
-565
-566
-567
-568
-569
-570
-571
-572
-573
def is_connected(self, mode: str) -> bool:
-    """Determines if the network graph is "strongly" connected.
-
-    A graph is strongly connected if each vertex is reachable from every other vertex.
-
-    Args:
-        mode:  mode of the network, one of `drive`,`transit`,`walk`, `bike`
-    """
-    is_connected = nx.is_strongly_connected(self.get_modal_graph(mode))
-
-    return is_connected
-
-
-
- -
- -
- - - - - -
- -

Return subset of links_df based on link_ids list.

- -
- Source code in network_wrangler/roadway/network.py -
373
-374
-375
def links_with_link_ids(self, link_ids: List[int]) -> DataFrame[RoadLinksTable]:
-    """Return subset of links_df based on link_ids list."""
-    return filter_links_to_ids(self.links_df, link_ids)
-
-
-
- -
- -
- - - - - -
- -

Return subset of links_df based on node_ids list.

- -
- Source code in network_wrangler/roadway/network.py -
377
-378
-379
def links_with_nodes(self, node_ids: List[int]) -> DataFrame[RoadLinksTable]:
-    """Return subset of links_df based on node_ids list."""
-    return filter_links_to_node_ids(self.links_df, node_ids)
-
-
-
- -
- -
- - -

- modal_graph_hash(mode) - -

- - -
- -

Hash of the links in order to detect a network change from when graph created.

- -
- Source code in network_wrangler/roadway/network.py -
304
-305
-306
-307
-308
-309
def modal_graph_hash(self, mode) -> str:
-    """Hash of the links in order to detect a network change from when graph created."""
-    _value = str.encode(self.links_df.df_hash() + "-" + mode)
-    _hash = hashlib.sha256(_value).hexdigest()
-
-    return _hash
-
-
-
- -
- -
- - -

- move_nodes(node_geometry_change_table) - -

- - -
- -

Moves nodes based on updated geometry along with associated links and shape geometry.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
node_geometry_change_table - DataFrame[NodeGeometryChangeTable] - -
-

a table with model_node_id, X, Y, and CRS.

-
-
- required -
- -
- Source code in network_wrangler/roadway/network.py -
525
-526
-527
-528
-529
-530
-531
-532
-533
-534
-535
-536
-537
-538
-539
-540
-541
def move_nodes(
-    self,
-    node_geometry_change_table: DataFrame[NodeGeometryChangeTable],
-):
-    """Moves nodes based on updated geometry along with associated links and shape geometry.
-
-    Args:
-        node_geometry_change_table: a table with model_node_id, X, Y, and CRS.
-    """
-    node_geometry_change_table = NodeGeometryChangeTable(node_geometry_change_table)
-    node_ids = node_geometry_change_table.model_node_id.to_list()
-    WranglerLogger.debug(f"Moving nodes: {node_ids}")
-    self.nodes_df = edit_node_geometry(self.nodes_df, node_geometry_change_table)
-    self.links_df = edit_link_geometry_from_nodes(self.links_df, self.nodes_df, node_ids)
-    self.shapes_df = edit_shape_geometry_from_nodes(
-        self.shapes_df, self.links_df, self.nodes_df, node_ids
-    )
-
-
-
- -
- -
- - - - - -
- -

Returns subset of self.nodes_df that are in self.links_df.

- -
- Source code in network_wrangler/roadway/network.py -
381
-382
-383
def nodes_in_links(self) -> DataFrame[RoadNodesTable]:
-    """Returns subset of self.nodes_df that are in self.links_df."""
-    return filter_nodes_to_links(self.links_df, self.nodes_df)
-
-
-
- -
- - - -
- -
- -
- - - - -
- -
- -
- -
- - - - -
- -

TransitNetwork class for representing a transit network.

-

Transit Networks are represented as a Wrangler-flavored GTFS Feed and optionally mapped to -a RoadwayNetwork object. The TransitNetwork object is the primary object for managing transit -networks in Wrangler.

-

Usage:

-
1
-2
-3
-4
-5
-6
-7
```python
-import network_wrangler as wr
-t = wr.load_transit(stpaul_gtfs)
-t.road_net = wr.load_roadway(stpaul_roadway)
-t = t.apply(project_card)
-write_transit(t, "output_dir")
-```
-
- - - -
- - - - - - - - -
- - - -

- TransitNetwork - - -

- - -
-

- Bases: object

- - -

Representation of a Transit Network.

-

Typical usage example: -

import network_wrangler as wr
-tc=wr.load_transit(stpaul_gtfs)
-

- - -

Attributes:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescription
feed - -
-

gtfs feed object with interlinked tables.

-
-
road_net - RoadwayNetwork - -
-

Associated roadway network object.

-
-
graph - MultiDiGraph - -
-

Graph for associated roadway network object.

-
-
feed_path - str - -
-

Where the feed was read in from.

-
-
validated_frequencies - bool - -
-

The frequencies have been validated.

-
-
validated_road_network_consistency - -
-

The network has been validated against -the road network.

-
-
- -
- Source code in network_wrangler/transit/network.py -
 57
- 58
- 59
- 60
- 61
- 62
- 63
- 64
- 65
- 66
- 67
- 68
- 69
- 70
- 71
- 72
- 73
- 74
- 75
- 76
- 77
- 78
- 79
- 80
- 81
- 82
- 83
- 84
- 85
- 86
- 87
- 88
- 89
- 90
- 91
- 92
- 93
- 94
- 95
- 96
- 97
- 98
- 99
-100
-101
-102
-103
-104
-105
-106
-107
-108
-109
-110
-111
-112
-113
-114
-115
-116
-117
-118
-119
-120
-121
-122
-123
-124
-125
-126
-127
-128
-129
-130
-131
-132
-133
-134
-135
-136
-137
-138
-139
-140
-141
-142
-143
-144
-145
-146
-147
-148
-149
-150
-151
-152
-153
-154
-155
-156
-157
-158
-159
-160
-161
-162
-163
-164
-165
-166
-167
-168
-169
-170
-171
-172
-173
-174
-175
-176
-177
-178
-179
-180
-181
-182
-183
-184
-185
-186
-187
-188
-189
-190
-191
-192
-193
-194
-195
-196
-197
-198
-199
-200
-201
-202
-203
-204
-205
-206
-207
-208
-209
-210
-211
-212
-213
-214
-215
-216
-217
-218
-219
-220
-221
-222
-223
-224
-225
-226
-227
-228
-229
-230
-231
-232
-233
-234
-235
-236
-237
-238
-239
-240
-241
-242
-243
-244
-245
-246
-247
-248
-249
-250
-251
-252
-253
-254
-255
-256
-257
-258
-259
-260
-261
-262
-263
-264
-265
-266
-267
-268
-269
-270
-271
-272
-273
-274
-275
-276
-277
-278
-279
-280
-281
-282
-283
-284
-285
-286
-287
-288
-289
-290
-291
-292
-293
-294
-295
-296
-297
-298
-299
-300
-301
-302
-303
-304
-305
-306
-307
-308
-309
-310
-311
-312
-313
-314
-315
-316
-317
-318
-319
-320
-321
-322
-323
-324
-325
-326
-327
-328
-329
-330
-331
-332
-333
-334
-335
-336
-337
-338
-339
-340
class TransitNetwork(object):
-    """Representation of a Transit Network.
-
-    Typical usage example:
-    ``` py
-    import network_wrangler as wr
-    tc=wr.load_transit(stpaul_gtfs)
-    ```
-
-    Attributes:
-        feed: gtfs feed object with interlinked tables.
-        road_net (RoadwayNetwork): Associated roadway network object.
-        graph (nx.MultiDiGraph): Graph for associated roadway network object.
-        feed_path (str): Where the feed was read in from.
-        validated_frequencies (bool): The frequencies have been validated.
-        validated_road_network_consistency (): The network has been validated against
-            the road network.
-    """
-
-    TIME_COLS = ["arrival_time", "departure_time", "start_time", "end_time"]
-
-    def __init__(self, feed: Feed):
-        """Constructor for TransitNetwork.
-
-        Args:
-            feed: Feed object representing the transit network gtfs tables
-        """
-        WranglerLogger.debug("Creating new TransitNetwork.")
-
-        self._road_net: Optional[RoadwayNetwork] = None
-        self.feed: Feed = feed
-        self.graph: nx.MultiDiGraph = None
-
-        # initialize
-        self._consistent_with_road_net = False
-
-        # cached selections
-        self._selections: dict[str, dict] = {}
-
-    @property
-    def feed_path(self):
-        """Pass through property from Feed."""
-        return self.feed.feed_path
-
-    @property
-    def config(self):
-        """Pass through property from Feed."""
-        return self.feed.config
-
-    @property
-    def feed(self):
-        """Feed associated with the transit network."""
-        return self._feed
-
-    @feed.setter
-    def feed(self, feed: Feed):
-        if not isinstance(feed, Feed):
-            msg = f"TransitNetwork's feed value must be a valid Feed instance. \
-                             This is a {type(feed)}."
-            WranglerLogger.error(msg)
-            raise ValueError(msg)
-        if self._road_net is None or transit_road_net_consistency(feed, self._road_net):
-            self._feed = feed
-            self._stored_feed_hash = copy.deepcopy(feed.hash)
-        else:
-            WranglerLogger.error("Can't assign Feed inconsistent with set Roadway Network.")
-            raise TransitRoadwayConsistencyError(
-                "Can't assign Feed inconsistent with set RoadwayNetwork."
-            )
-
-    @property
-    def road_net(self) -> RoadwayNetwork:
-        """Roadway network associated with the transit network."""
-        return self._road_net
-
-    @road_net.setter
-    def road_net(self, road_net: RoadwayNetwork):
-        if not isinstance(road_net, RoadwayNetwork):
-            msg = f"TransitNetwork's road_net: value must be a valid RoadwayNetwork instance. \
-                             This is a {type(road_net)}."
-            WranglerLogger.error(msg)
-            raise ValueError(msg)
-        if transit_road_net_consistency(self.feed, road_net):
-            self._road_net = road_net
-            self._stored_road_net_hash = copy.deepcopy(self.road_net.network_hash)
-            self._consistent_with_road_net = True
-        else:
-            WranglerLogger.error(
-                "Can't assign inconsistent RoadwayNetwork - Roadway Network not \
-                                 set, but can be referenced separately."
-            )
-            raise TransitRoadwayConsistencyError("Can't assign inconsistent RoadwayNetwork.")
-
-    @property
-    def feed_hash(self):
-        """Return the hash of the feed."""
-        return self.feed.hash
-
-    @property
-    def consistent_with_road_net(self) -> bool:
-        """Indicate if road_net is consistent with transit network.
-
-        Checks the network hash of when consistency was last evaluated. If transit network or
-        roadway network has changed, will re-evaluate consistency and return the updated value and
-        update self._stored_road_net_hash.
-
-        Returns:
-            Boolean indicating if road_net is consistent with transit network.
-        """
-        updated_road = self.road_net.network_hash != self._stored_road_net_hash
-        updated_feed = self.feed_hash != self._stored_feed_hash
-
-        if updated_road or updated_feed:
-            self._consistent_with_road_net = transit_road_net_consistency(self.feed, self.road_net)
-            self._stored_road_net_hash = copy.deepcopy(self.road_net.network_hash)
-            self._stored_feed_hash = copy.deepcopy(self.feed_hash)
-        return self._consistent_with_road_net
-
-    def __deepcopy__(self, memo):
-        """Returns copied TransitNetwork instance with deep copy of Feed but not roadway net."""
-        COPY_REF_NOT_VALUE = ["_road_net"]
-        # Create a new, empty instance
-        copied_net = self.__class__.__new__(self.__class__)
-        # Return the new TransitNetwork instance
-        attribute_dict = vars(self)
-
-        # Copy the attributes to the new instance
-        for attr_name, attr_value in attribute_dict.items():
-            # WranglerLogger.debug(f"Copying {attr_name}")
-            if attr_name in COPY_REF_NOT_VALUE:
-                # If the attribute is in the COPY_REF_NOT_VALUE list, assign the reference
-                setattr(copied_net, attr_name, attr_value)
-            else:
-                # WranglerLogger.debug(f"making deep copy: {attr_name}")
-                # For other attributes, perform a deep copy
-                setattr(copied_net, attr_name, copy.deepcopy(attr_value, memo))
-
-        return copied_net
-
-    def deepcopy(self):
-        """Returns copied TransitNetwork instance with deep copy of Feed but not roadway net."""
-        return copy.deepcopy(self)
-
-    @property
-    def stops_gdf(self) -> gpd.GeoDataFrame:
-        """Return stops as a GeoDataFrame using set roadway geometry."""
-        if self.road_net is not None:
-            ref_nodes = self.road_net.nodes_df
-        else:
-            ref_nodes = None
-        return to_points_gdf(self.feed.stops, nodes_df=ref_nodes)
-
-    @property
-    def shapes_gdf(self) -> gpd.GeoDataFrame:
-        """Return aggregated shapes as a GeoDataFrame using set roadway geometry."""
-        if self.road_net is not None:
-            ref_nodes = self.road_net.nodes_df
-        else:
-            ref_nodes = None
-        return shapes_to_trip_shapes_gdf(self.feed.shapes, ref_nodes_df=ref_nodes)
-
-    @property
-    def shape_links_gdf(self) -> gpd.GeoDataFrame:
-        """Return shape-links as a GeoDataFrame using set roadway geometry."""
-        if self.road_net is not None:
-            ref_nodes = self.road_net.nodes_df
-        else:
-            ref_nodes = None
-        return shapes_to_shape_links_gdf(self.feed.shapes, ref_nodes_df=ref_nodes)
-
-    @property
-    def stop_time_links_gdf(self) -> gpd.GeoDataFrame:
-        """Return stop-time-links as a GeoDataFrame using set roadway geometry."""
-        if self.road_net is not None:
-            ref_nodes = self.road_net.nodes_df
-        else:
-            ref_nodes = None
-        return stop_times_to_stop_time_links_gdf(
-            self.feed.stop_times, self.feed.stops, ref_nodes_df=ref_nodes
-        )
-
-    @property
-    def stop_times_points_gdf(self) -> gpd.GeoDataFrame:
-        """Return stop-time-points as a GeoDataFrame using set roadway geometry."""
-        if self.road_net is not None:
-            ref_nodes = self.road_net.nodes_df
-        else:
-            ref_nodes = None
-
-        return stop_times_to_stop_time_points_gdf(
-            self.feed.stop_times, self.feed.stops, ref_nodes_df=ref_nodes
-        )
-
-    def get_selection(
-        self,
-        selection_dict: dict,
-        overwrite: bool = False,
-    ) -> TransitSelection:
-        """Return selection if it already exists, otherwise performs selection.
-
-        Will raise an error if no trips found.
-
-        Args:
-            selection_dict (dict): _description_
-            overwrite: if True, will overwrite any previously cached searches. Defaults to False.
-
-        Returns:
-            Selection: Selection object
-        """
-        key = dict_to_hexkey(selection_dict)
-
-        if (key not in self._selections) or overwrite:
-            WranglerLogger.debug(f"Performing selection from key: {key}")
-            self._selections[key] = TransitSelection(self, selection_dict)
-        else:
-            WranglerLogger.debug(f"Using cached selection from key: {key}")
-
-        if not self._selections[key]:
-            WranglerLogger.debug(
-                f"No links or nodes found for selection dict: \n {selection_dict}"
-            )
-            raise ValueError("Selection not successful.")
-        return self._selections[key]
-
-    def apply(self, project_card: Union[ProjectCard, dict], **kwargs) -> "TransitNetwork":
-        """Wrapper method to apply a roadway project, returning a new TransitNetwork instance.
-
-        Args:
-            project_card: either a dictionary of the project card object or ProjectCard instance
-            **kwargs: keyword arguments to pass to project application
-        """
-        if not (isinstance(project_card, ProjectCard) or isinstance(project_card, SubProject)):
-            project_card = ProjectCard(project_card)
-
-        if not project_card.valid:
-            WranglerLogger.error("Invalid Project Card: {project_card}")
-            raise ValueError(f"Project card {project_card.project} not valid.")
-
-        if project_card._sub_projects:
-            for sp in project_card._sub_projects:
-                WranglerLogger.debug(f"- applying subproject: {sp.change_type}")
-                self._apply_change(sp, **kwargs)
-            return self
-        else:
-            return self._apply_change(project_card, **kwargs)
-
-    def _apply_change(
-        self,
-        change: Union[ProjectCard, SubProject],
-        reference_road_net: Optional[RoadwayNetwork] = None,
-    ) -> TransitNetwork:
-        """Apply a single change: a single-project project or a sub-project."""
-        if not isinstance(change, SubProject):
-            WranglerLogger.info(f"Applying Project to Transit Network: {change.project}")
-
-        if change.change_type == "transit_property_change":
-            return apply_transit_property_change(
-                self,
-                self.get_selection(change.service),
-                change.transit_property_change,
-            )
-
-        elif change.change_type == "transit_routing_change":
-            return apply_transit_routing_change(
-                self,
-                self.get_selection(change.service),
-                change.transit_routing_change,
-                reference_road_net=reference_road_net,
-            )
-
-        elif change.change_type == "add_new_route":
-            return apply_add_transit_route_change(self, change.transit_route_addition)
-
-        elif change.change_type == "roadway_deletion":
-            # FIXME
-            raise NotImplementedError("Roadway deletion check not yet implemented.")
-
-        elif change.change_type == "pycode":
-            return apply_calculated_transit(self, change.pycode)
-
-        else:
-            msg = f"Not a currently valid transit project: {change}."
-            WranglerLogger.error(msg)
-            raise NotImplementedError(msg)
-
-
- - - -
- - - - - - - -
- - - -

- config - - - property - - -

- - -
- -

Pass through property from Feed.

-
- -
- -
- - - -

- consistent_with_road_net: bool - - - property - - -

- - -
- -

Indicate if road_net is consistent with transit network.

-

Checks the network hash of when consistency was last evaluated. If transit network or -roadway network has changed, will re-evaluate consistency and return the updated value and -update self._stored_road_net_hash.

- - -

Returns:

- - - - - - - - - - - - - -
TypeDescription
- bool - -
-

Boolean indicating if road_net is consistent with transit network.

-
-
-
- -
- -
- - - -

- feed - - - property - writable - - -

- - -
- -

Feed associated with the transit network.

-
- -
- -
- - - -

- feed_hash - - - property - - -

- - -
- -

Return the hash of the feed.

-
- -
- -
- - - -

- feed_path - - - property - - -

- - -
- -

Pass through property from Feed.

-
- -
- -
- - - -

- road_net: RoadwayNetwork - - - property - writable - - -

- - -
- -

Roadway network associated with the transit network.

-
- -
- -
- - - - - - -
- -

Return shape-links as a GeoDataFrame using set roadway geometry.

-
- -
- -
- - - -

- shapes_gdf: gpd.GeoDataFrame - - - property - - -

- - -
- -

Return aggregated shapes as a GeoDataFrame using set roadway geometry.

-
- -
- -
- - - - - - -
- -

Return stop-time-links as a GeoDataFrame using set roadway geometry.

-
- -
- -
- - - -

- stop_times_points_gdf: gpd.GeoDataFrame - - - property - - -

- - -
- -

Return stop-time-points as a GeoDataFrame using set roadway geometry.

-
- -
- -
- - - -

- stops_gdf: gpd.GeoDataFrame - - - property - - -

- - -
- -

Return stops as a GeoDataFrame using set roadway geometry.

-
- -
- - - -
- - -

- __deepcopy__(memo) - -

- - -
- -

Returns copied TransitNetwork instance with deep copy of Feed but not roadway net.

- -
- Source code in network_wrangler/transit/network.py -
175
-176
-177
-178
-179
-180
-181
-182
-183
-184
-185
-186
-187
-188
-189
-190
-191
-192
-193
-194
def __deepcopy__(self, memo):
-    """Returns copied TransitNetwork instance with deep copy of Feed but not roadway net."""
-    COPY_REF_NOT_VALUE = ["_road_net"]
-    # Create a new, empty instance
-    copied_net = self.__class__.__new__(self.__class__)
-    # Return the new TransitNetwork instance
-    attribute_dict = vars(self)
-
-    # Copy the attributes to the new instance
-    for attr_name, attr_value in attribute_dict.items():
-        # WranglerLogger.debug(f"Copying {attr_name}")
-        if attr_name in COPY_REF_NOT_VALUE:
-            # If the attribute is in the COPY_REF_NOT_VALUE list, assign the reference
-            setattr(copied_net, attr_name, attr_value)
-        else:
-            # WranglerLogger.debug(f"making deep copy: {attr_name}")
-            # For other attributes, perform a deep copy
-            setattr(copied_net, attr_name, copy.deepcopy(attr_value, memo))
-
-    return copied_net
-
-
-
- -
- -
- - -

- __init__(feed) - -

- - -
- -

Constructor for TransitNetwork.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
feed - Feed - -
-

Feed object representing the transit network gtfs tables

-
-
- required -
- -
- Source code in network_wrangler/transit/network.py -
78
-79
-80
-81
-82
-83
-84
-85
-86
-87
-88
-89
-90
-91
-92
-93
-94
def __init__(self, feed: Feed):
-    """Constructor for TransitNetwork.
-
-    Args:
-        feed: Feed object representing the transit network gtfs tables
-    """
-    WranglerLogger.debug("Creating new TransitNetwork.")
-
-    self._road_net: Optional[RoadwayNetwork] = None
-    self.feed: Feed = feed
-    self.graph: nx.MultiDiGraph = None
-
-    # initialize
-    self._consistent_with_road_net = False
-
-    # cached selections
-    self._selections: dict[str, dict] = {}
-
-
-
- -
- -
- - -

- apply(project_card, **kwargs) - -

- - -
- -

Wrapper method to apply a roadway project, returning a new TransitNetwork instance.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
project_card - Union[ProjectCard, dict] - -
-

either a dictionary of the project card object or ProjectCard instance

-
-
- required -
**kwargs - -
-

keyword arguments to pass to project application

-
-
- {} -
- -
- Source code in network_wrangler/transit/network.py -
281
-282
-283
-284
-285
-286
-287
-288
-289
-290
-291
-292
-293
-294
-295
-296
-297
-298
-299
-300
-301
def apply(self, project_card: Union[ProjectCard, dict], **kwargs) -> "TransitNetwork":
-    """Wrapper method to apply a roadway project, returning a new TransitNetwork instance.
-
-    Args:
-        project_card: either a dictionary of the project card object or ProjectCard instance
-        **kwargs: keyword arguments to pass to project application
-    """
-    if not (isinstance(project_card, ProjectCard) or isinstance(project_card, SubProject)):
-        project_card = ProjectCard(project_card)
-
-    if not project_card.valid:
-        WranglerLogger.error("Invalid Project Card: {project_card}")
-        raise ValueError(f"Project card {project_card.project} not valid.")
-
-    if project_card._sub_projects:
-        for sp in project_card._sub_projects:
-            WranglerLogger.debug(f"- applying subproject: {sp.change_type}")
-            self._apply_change(sp, **kwargs)
-        return self
-    else:
-        return self._apply_change(project_card, **kwargs)
-
-
-
- -
- -
- - -

- deepcopy() - -

- - -
- -

Returns copied TransitNetwork instance with deep copy of Feed but not roadway net.

- -
- Source code in network_wrangler/transit/network.py -
196
-197
-198
def deepcopy(self):
-    """Returns copied TransitNetwork instance with deep copy of Feed but not roadway net."""
-    return copy.deepcopy(self)
-
-
-
- -
- -
- - -

- get_selection(selection_dict, overwrite=False) - -

- - -
- -

Return selection if it already exists, otherwise performs selection.

-

Will raise an error if no trips found.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
selection_dict - dict - -
-

description

-
-
- required -
overwrite - bool - -
-

if True, will overwrite any previously cached searches. Defaults to False.

-
-
- False -
- - -

Returns:

- - - - - - - - - - - - - -
Name TypeDescription
Selection - TransitSelection - -
-

Selection object

-
-
- -
- Source code in network_wrangler/transit/network.py -
250
-251
-252
-253
-254
-255
-256
-257
-258
-259
-260
-261
-262
-263
-264
-265
-266
-267
-268
-269
-270
-271
-272
-273
-274
-275
-276
-277
-278
-279
def get_selection(
-    self,
-    selection_dict: dict,
-    overwrite: bool = False,
-) -> TransitSelection:
-    """Return selection if it already exists, otherwise performs selection.
-
-    Will raise an error if no trips found.
-
-    Args:
-        selection_dict (dict): _description_
-        overwrite: if True, will overwrite any previously cached searches. Defaults to False.
-
-    Returns:
-        Selection: Selection object
-    """
-    key = dict_to_hexkey(selection_dict)
-
-    if (key not in self._selections) or overwrite:
-        WranglerLogger.debug(f"Performing selection from key: {key}")
-        self._selections[key] = TransitSelection(self, selection_dict)
-    else:
-        WranglerLogger.debug(f"Using cached selection from key: {key}")
-
-    if not self._selections[key]:
-        WranglerLogger.debug(
-            f"No links or nodes found for selection dict: \n {selection_dict}"
-        )
-        raise ValueError("Selection not successful.")
-    return self._selections[key]
-
-
-
- -
- - - -
- -
- -
- -
- - - -

- TransitRoadwayConsistencyError - - -

- - -
-

- Bases: Exception

- - -

Error raised when transit network is inconsistent with roadway network.

- -
- Source code in network_wrangler/transit/network.py -
51
-52
-53
-54
class TransitRoadwayConsistencyError(Exception):
-    """Error raised when transit network is inconsistent with roadway network."""
-
-    pass
-
-
- -
- -
- - - - -
- -
- -

Parameters

- - -
- - - - -
- -

Parameters for Network Wrangler.

- - - -
- - - - - - - -
- - - -

- COPY_FROM_GP_TO_ML = ['ref', 'roadway', 'access', 'distance', 'bike_access', 'drive_access', 'walk_access', 'bus_only', 'rail_only'] - - - module-attribute - - -

- - -
- -

(list(str)): list of attributes copied from GP lanes to access and egress dummy links.

-
- -
- -
- - - -

- COPY_TO_ACCESS_EGRESS = ['ref', 'ML_access', 'ML_drive_access', 'ML_bus_only', 'ML_rail_only'] - - - module-attribute - - -

- - -
- -

(list(str)): list of attributes -that must be provided in managed lanes

-
- -
- -
- - - -

- DEFAULT_CATEGORY = 'any' - - - module-attribute - - -

- - -
- -

Read sec / MB - WILL DEPEND ON SPECIFIC COMPUTER

-
- -
- -
- - - -

- DEFAULT_DELETE_MODES = ['any'] - - - module-attribute - - -

- - -
- -

(int): default for initial number of links from name-based - selection that are traveresed before trying another shortest - path when searching for paths between A and B node

-
- -
- -
- - - -

- DEFAULT_MAX_SEARCH_BREADTH = 10 - - - module-attribute - - -

- - -
- -

Union(int, float)): default penalty assigned for each - degree of distance between a link and a link with the searched-for - name when searching for paths between A and B node

-
- -
- -
- - - -

- DEFAULT_SEARCH_BREADTH = 5 - - - module-attribute - - -

- - -
- -

(int): default for maximum number of links traversed between - links that match the searched name when searching for paths - between A and B node

-
- -
- -
- - - -

- DEFAULT_SEARCH_MODES = ['drive'] - - - module-attribute - - -

- - -
- -

(list(str)): default for searching for links to delete when no modes are specified.

-
- -
- -
- - - -

- DEFAULT_SP_WEIGHT_COL = 'i' - - - module-attribute - - -

- - -
- -

Default timespan for scoped values.

-
- -
- -
- - - -

- DEFAULT_SP_WEIGHT_FACTOR = 100 - - - module-attribute - - -

- - -
- -

(str): default column to use as weights in the shortest path calculations.

-
- -
- -
- - - -

- DEFAULT_TIMESPAN = ['00:00', '24:00'] - - - module-attribute - - -

- - -
- -

Default category for scoped values.

-
- -
- -
- - - -

- EST_PD_READ_SPEED = {'csv': 0.03, 'parquet': 0.005, 'geojson': 0.03, 'json': 0.15, 'txt': 0.04} - - - module-attribute - - -

- - -
- -

(float): offset in meters for managed lanes centerlines

-
- -
- -
- - - - - - -
- -

Range of model_node_ids to use when creating an associated node for a parallel managed lane.

-
- -
- -
- - - -

- MANAGED_LANES_REQUIRED_ATTRIBUTES = ['A', 'B', 'model_link_id'] - - - module-attribute - - -

- - -
- -

Range of model_link_ids to use when creating an associated link for a parallel managed lane.

-
- -
- -
- - - -

- ML_OFFSET_METERS = -5 - - - module-attribute - - -

- - -
- -

(list(str)): list of attributes -to copy from a general purpose lane to managed lane so long as a ML_ doesn’t exist.

-
- -
- -
- - - -

- SECONDARY_TRANSIT_CARD_TYPES = ['roadway_deletion'] - - - module-attribute - - -

- - -
- -

(list(str)): default for search modes when searching for paths between A and B node - when no modes are specified.

-
- -
- - -
- - - -

- LinksParams - - - - dataclass - - -

- - -
- - -

Parameters for RoadLinksTable.

- -
- Source code in network_wrangler/params.py -
 73
- 74
- 75
- 76
- 77
- 78
- 79
- 80
- 81
- 82
- 83
- 84
- 85
- 86
- 87
- 88
- 89
- 90
- 91
- 92
- 93
- 94
- 95
- 96
- 97
- 98
- 99
-100
-101
-102
-103
-104
-105
-106
-107
-108
-109
-110
-111
-112
-113
-114
@dataclass
-class LinksParams:
-    """Parameters for RoadLinksTable."""
-
-    primary_key: str = field(default="model_link_id")
-    _addtl_unique_ids: list[str] = field(default_factory=lambda: [])
-    _addtl_explicit_ids: list[str] = field(default_factory=lambda: ["osm_link_id"])
-    from_node: str = field(default="A")
-    to_node: str = field(default="B")
-    fk_to_shape: str = field(default="shape_id")
-    table_type: Literal["links"] = field(default="links")
-    source_file: str = field(default=None)
-    modes_to_network_link_variables: dict = field(
-        default_factory=lambda: MODES_TO_NETWORK_LINK_VARIABLES
-    )
-
-    @property
-    def idx_col(self):
-        """Column to make the index of the table."""
-        return self.primary_key + "_idx"
-
-    @property
-    def fks_to_nodes(self):
-        """Foreign keys to nodes in the network."""
-        return [self.from_node, self.to_node]
-
-    @property
-    def unique_ids(self) -> List[str]:
-        """List of unique ids for the table."""
-        _uids = self._addtl_unique_ids + [self.primary_key]
-        return list(set(_uids))
-
-    @property
-    def explicit_ids(self) -> List[str]:
-        """List of columns that can be used to easily find specific row sin the table."""
-        return list(set(self.unique_ids + self._addtl_explicit_ids))
-
-    @property
-    def display_cols(self) -> List[str]:
-        """List of columns to display in the table."""
-        _addtl = ["lanes"]
-        return list(set(self.explicit_ids + self.fks_to_nodes + _addtl))
-
-
- - - -
- - - - - - - -
- - - -

- display_cols: List[str] - - - property - - -

- - -
- -

List of columns to display in the table.

-
- -
- -
- - - -

- explicit_ids: List[str] - - - property - - -

- - -
- -

List of columns that can be used to easily find specific row sin the table.

-
- -
- -
- - - -

- fks_to_nodes - - - property - - -

- - -
- -

Foreign keys to nodes in the network.

-
- -
- -
- - - -

- idx_col - - - property - - -

- - -
- -

Column to make the index of the table.

-
- -
- -
- - - -

- unique_ids: List[str] - - - property - - -

- - -
- -

List of unique ids for the table.

-
- -
- - - - - -
- -
- -
- -
- - - -

- NodesParams - - - - dataclass - - -

- - -
- - -

Parameters for RoadNodesTable.

- -
- Source code in network_wrangler/params.py -
33
-34
-35
-36
-37
-38
-39
-40
-41
-42
-43
-44
-45
-46
-47
-48
-49
-50
-51
-52
-53
-54
-55
-56
-57
-58
-59
-60
-61
-62
-63
-64
-65
-66
-67
-68
-69
-70
@dataclass
-class NodesParams:
-    """Parameters for RoadNodesTable."""
-
-    primary_key: str = field(default="model_node_id")
-    _addtl_unique_ids: list[str] = field(default_factory=lambda: ["osm_node_id"])
-    _addtl_explicit_ids: list[str] = field(default_factory=lambda: [])
-    source_file: str = field(default=None)
-    table_type: Literal["nodes"] = field(default="nodes")
-    x_field: str = field(default="X")
-    y_field: str = field(default="Y")
-
-    @property
-    def geometry_props(self) -> List[str]:
-        """List of geometry properties."""
-        return [self.x_field, self.y_field, "geometry"]
-
-    @property
-    def idx_col(self) -> str:
-        """Column to make the index of the table."""
-        return self.primary_key + "_idx"
-
-    @property
-    def unique_ids(self) -> List[str]:
-        """List of unique ids for the table."""
-        _uids = self._addtl_unique_ids + [self.primary_key]
-        return list(set(_uids))
-
-    @property
-    def explicit_ids(self) -> List[str]:
-        """List of columns that can be used to easily find specific records the table."""
-        _eids = self._addtl_unique_ids + self.unique_ids
-        return list(set(_eids))
-
-    @property
-    def display_cols(self) -> List[str]:
-        """Columns to display in the table."""
-        return self.explicit_ids
-
-
- - - -
- - - - - - - -
- - - -

- display_cols: List[str] - - - property - - -

- - -
- -

Columns to display in the table.

-
- -
- -
- - - -

- explicit_ids: List[str] - - - property - - -

- - -
- -

List of columns that can be used to easily find specific records the table.

-
- -
- -
- - - -

- geometry_props: List[str] - - - property - - -

- - -
- -

List of geometry properties.

-
- -
- -
- - - -

- idx_col: str - - - property - - -

- - -
- -

Column to make the index of the table.

-
- -
- -
- - - -

- unique_ids: List[str] - - - property - - -

- - -
- -

List of unique ids for the table.

-
- -
- - - - - -
- -
- -
- -
- - - -

- ShapesParams - - - - dataclass - - -

- - -
- - -

Parameters for RoadShapesTable.

- -
- Source code in network_wrangler/params.py -
117
-118
-119
-120
-121
-122
-123
-124
-125
-126
-127
-128
-129
-130
-131
-132
-133
-134
@dataclass
-class ShapesParams:
-    """Parameters for RoadShapesTable."""
-
-    primary_key: str = field(default="shape_id")
-    _addtl_unique_ids: list[str] = field(default_factory=lambda: [])
-    table_type: Literal["shapes"] = field(default="shapes")
-    source_file: str = field(default=None)
-
-    @property
-    def idx_col(self) -> str:
-        """Column to make the index of the table."""
-        return self.primary_key + "_idx"
-
-    @property
-    def unique_ids(self) -> list[str]:
-        """List of unique ids for the table."""
-        return list(set(self._addtl_unique_ids.append(self.primary_key)))
-
-
- - - -
- - - - - - - -
- - - -

- idx_col: str - - - property - - -

- - -
- -

Column to make the index of the table.

-
- -
- -
- - - -

- unique_ids: list[str] - - - property - - -

- - -
- -

List of unique ids for the table.

-
- -
- - - - - -
- -
- -
- - - - -
- -
- -

Projects

-

Projects are how you manipulate the networks. Each project type is defined in a module in the projects folder and accepts a RoadwayNetwork and or TransitNetwork as an input and returns the same objects (manipulated) as an output.

-

Roadway

-

The roadway module contains submodules which define and extend the links, nodes, and shapes dataframe objects which within a RoadwayNetwork object as well as other classes and methods which support and extend the RoadwayNetwork class.

-

Network Objects

-

Submodules which define and extend the links, nodes, and shapes dataframe objects which within a RoadwayNetwork object. Includes classes which define:

-
    -
  • dataframe schemas to be used for dataframe validation using pandera
  • -
  • methods which extend the dataframes
  • -
- -

:: network_wrangler.roadway.links.io -:: network_wrangler.roadway.links.create -:: network_wrangler.roadway.links.delete -:: network_wrangler.roadway.links.edit -:: network_wrangler.roadway.links.filters -:: network_wrangler.roadway.links.geo -:: network_wrangler.roadway.links.scopes -:: network_wrangler.roadway.links.summary -:: network_wrangler.roadway.links.validate -:: network_wrangler.roadway.links.df_accessors

-

Nodes

-

:: network_wrangler.roadway.nodes.io -:: network_wrangler.roadway.nodes.create -:: network_wrangler.roadway.nodes.delete -:: network_wrangler.roadway.nodes.edit -:: network_wrangler.roadway.nodes.filters -:: network_wrangler.roadway.nodes

-

Shapes

-

:: network_wrangler.roadway.shapes.io -:: network_wrangler.roadway.shapes.create -:: network_wrangler.roadway.shapes.edit -:: network_wrangler.roadway.shapes.delete -:: network_wrangler.roadway.shapes.filters -:: network_wrangler.roadway.shapes.shapes

-

Supporting Classes, Methods + Parameters

-

:: network_wrangler.roadway.segment -:: network_wrangler.roadway.subnet -:: network_wrangler.roadway.graph

-

Utils and Functions

- - -
- - - - -
- -

General utility functions used throughout package.

- - - -
- - - - - - - - - -
- - -

- check_one_or_one_superset_present(mixed_list, all_fields_present) - -

- - -
- -

Checks that exactly one of the fields in mixed_list is in fields_present or one superset.

- -
- Source code in network_wrangler/utils/utils.py -
294
-295
-296
-297
-298
-299
-300
-301
-302
-303
-304
-305
def check_one_or_one_superset_present(
-    mixed_list: list[Union[str, list[str]]], all_fields_present: list[str]
-) -> bool:
-    """Checks that exactly one of the fields in mixed_list is in fields_present or one superset."""
-    normalized_list = normalize_to_lists(mixed_list)
-
-    list_items_present = [i for i in normalized_list if set(i).issubset(all_fields_present)]
-
-    if len(list_items_present) == 1:
-        return True
-
-    return list_elements_subset_of_single_element(list_items_present)
-
-
-
- -
- -
- - -

- combine_unique_unhashable_list(list1, list2) - -

- - -
- -

Combines lists preserving order of first and removing duplicates.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
list1 - list - -
-

The first list.

-
-
- required -
list2 - list - -
-

The second list.

-
-
- required -
- - -

Returns:

- - - - - - - - - - - - - - - - - -
Name TypeDescription
list - -
-

A new list containing the elements from list1 followed by the

-
-
- -
-

unique elements from list2.

-
-
- - -
- Example -
-
-
-

list1 = [1, 2, 3] -list2 = [2, 3, 4, 5] -combine_unique_unhashable_list(list1, list2) -[1, 2, 3, 4, 5]

-
-
-
-
-
- Source code in network_wrangler/utils/utils.py -
242
-243
-244
-245
-246
-247
-248
-249
-250
-251
-252
-253
-254
-255
-256
-257
-258
-259
def combine_unique_unhashable_list(list1: list, list2: list):
-    """Combines lists preserving order of first and removing duplicates.
-
-    Args:
-        list1 (list): The first list.
-        list2 (list): The second list.
-
-    Returns:
-        list: A new list containing the elements from list1 followed by the
-        unique elements from list2.
-
-    Example:
-        >>> list1 = [1, 2, 3]
-        >>> list2 = [2, 3, 4, 5]
-        >>> combine_unique_unhashable_list(list1, list2)
-        [1, 2, 3, 4, 5]
-    """
-    return [item for item in list1 if item not in list2] + list2
-
-
-
- -
- -
- - -

- delete_keys_from_dict(dictionary, keys) - -

- - -
- -

Removes list of keys from potentially nested dictionary.

-

SOURCE: https://stackoverflow.com/questions/3405715/ -User: @mseifert

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
dictionary - dict - -
-

dictionary to remove keys from

-
-
- required -
keys - list - -
-

list of keys to remove

-
-
- required -
- -
- Source code in network_wrangler/utils/utils.py -
54
-55
-56
-57
-58
-59
-60
-61
-62
-63
-64
-65
-66
-67
-68
-69
-70
-71
-72
-73
-74
-75
-76
def delete_keys_from_dict(dictionary: dict, keys: list) -> dict:
-    """Removes list of keys from potentially nested dictionary.
-
-    SOURCE: https://stackoverflow.com/questions/3405715/
-    User: @mseifert
-
-    Args:
-        dictionary: dictionary to remove keys from
-        keys: list of keys to remove
-
-    """
-    keys_set = set(keys)  # Just an optimization for the "if key in keys" lookup.
-
-    modified_dict = {}
-    for key, value in dictionary.items():
-        if key not in keys_set:
-            if isinstance(value, dict):
-                modified_dict[key] = delete_keys_from_dict(value, keys_set)
-            else:
-                modified_dict[key] = (
-                    value  # or copy.deepcopy(value) if a copy is desired for non-dicts.
-                )
-    return modified_dict
-
-
-
- -
- -
- - -

- dict_to_hexkey(d) - -

- - -
- -

Converts a dictionary to a hexdigest of the sha1 hash of the dictionary.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
d - dict - -
-

dictionary to convert to string

-
-
- required -
- - -

Returns:

- - - - - - - - - - - - - -
Name TypeDescription
str - str - -
-

hexdigest of the sha1 hash of dictionary

-
-
- -
- Source code in network_wrangler/utils/utils.py -
230
-231
-232
-233
-234
-235
-236
-237
-238
-239
def dict_to_hexkey(d: dict) -> str:
-    """Converts a dictionary to a hexdigest of the sha1 hash of the dictionary.
-
-    Args:
-        d (dict): dictionary to convert to string
-
-    Returns:
-        str: hexdigest of the sha1 hash of dictionary
-    """
-    return hashlib.sha1(str(d).encode()).hexdigest()
-
-
-
- -
- -
- - -

- findkeys(node, kv) - -

- - -
- -

Returns values of all keys in various objects.

-

Adapted from arainchi on Stack Overflow: -https://stackoverflow.com/questions/9807634/find-all-occurrences-of-a-key-in-nested-dictionaries-and-lists

- -
- Source code in network_wrangler/utils/utils.py -
105
-106
-107
-108
-109
-110
-111
-112
-113
-114
-115
-116
-117
-118
-119
-120
def findkeys(node, kv):
-    """Returns values of all keys in various objects.
-
-    Adapted from arainchi on Stack Overflow:
-    https://stackoverflow.com/questions/9807634/find-all-occurrences-of-a-key-in-nested-dictionaries-and-lists
-    """
-    if isinstance(node, list):
-        for i in node:
-            for x in findkeys(i, kv):
-                yield x
-    elif isinstance(node, dict):
-        if kv in node:
-            yield node[kv]
-        for j in node.values():
-            for x in findkeys(j, kv):
-                yield x
-
-
-
- -
- -
- - -

- generate_list_of_new_ids(input_ids, existing_ids, id_scalar, iter_val=10, max_iter=1000) - -

- - -
- -

Generates a list of new IDs based on the input IDs, existing IDs, and other parameters.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
input_ids - list[str] - -
-

The input IDs for which new IDs need to be generated.

-
-
- required -
existing_ids - Series - -
-

The existing IDs that should be avoided when generating new IDs.

-
-
- required -
id_scalar - int - -
-

The scalar value used to generate new IDs.

-
-
- required -
iter_val - int - -
-

The iteration value used in the generation process. -Defaults to 10.

-
-
- 10 -
max_iter - int - -
-

The maximum number of iterations allowed in the generation -process. Defaults to 1000.

-
-
- 1000 -
- - -

Returns:

- - - - - - - - - - - - - -
TypeDescription
- list[str] - -
-

list[str]: A list of new IDs generated based on the input IDs and other parameters.

-
-
- -
- Source code in network_wrangler/utils/utils.py -
193
-194
-195
-196
-197
-198
-199
-200
-201
-202
-203
-204
-205
-206
-207
-208
-209
-210
-211
-212
-213
-214
-215
-216
-217
-218
-219
-220
-221
-222
-223
-224
-225
-226
-227
def generate_list_of_new_ids(
-    input_ids: list[str],
-    existing_ids: pd.Series,
-    id_scalar: int,
-    iter_val: int = 10,
-    max_iter: int = 1000,
-) -> list[str]:
-    """Generates a list of new IDs based on the input IDs, existing IDs, and other parameters.
-
-    Args:
-        input_ids (list[str]): The input IDs for which new IDs need to be generated.
-        existing_ids (pd.Series): The existing IDs that should be avoided when generating new IDs.
-        id_scalar (int): The scalar value used to generate new IDs.
-        iter_val (int, optional): The iteration value used in the generation process.
-            Defaults to 10.
-        max_iter (int, optional): The maximum number of iterations allowed in the generation
-            process. Defaults to 1000.
-
-    Returns:
-        list[str]: A list of new IDs generated based on the input IDs and other parameters.
-    """
-    # keep new_ids as list to preserve order
-    new_ids = []
-    existing_ids = set(existing_ids)
-    for i in input_ids:
-        new_id = generate_new_id(
-            i,
-            pd.Series(list(existing_ids)),
-            id_scalar,
-            iter_val=iter_val,
-            max_iter=max_iter,
-        )
-        new_ids.append(new_id)
-        existing_ids.add(new_id)
-    return new_ids
-
-
-
- -
- -
- - -

- generate_new_id(input_id, existing_ids, id_scalar, iter_val=10, max_iter=1000) - -

- - -
- -

Generate a new ID that isn’t in existing_ids.

-

TODO: check a registry rather than existing IDs

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
input_id - str - -
-

id to use to generate new id.

-
-
- required -
existing_ids - Series - -
-

series that has existing IDs that should be unique

-
-
- required -
id_scalar - int - -
-

scalar value to initially use to create the new id.

-
-
- required -
iter_val - int - -
-

iteration value to use in the generation process.

-
-
- 10 -
max_iter - int - -
-

maximum number of iterations allowed in the generation process.

-
-
- 1000 -
- -
- Source code in network_wrangler/utils/utils.py -
164
-165
-166
-167
-168
-169
-170
-171
-172
-173
-174
-175
-176
-177
-178
-179
-180
-181
-182
-183
-184
-185
-186
-187
-188
-189
-190
def generate_new_id(
-    input_id: str,
-    existing_ids: pd.Series,
-    id_scalar: int,
-    iter_val: int = 10,
-    max_iter: int = 1000,
-) -> str:
-    """Generate a new ID that isn't in existing_ids.
-
-    TODO: check a registry rather than existing IDs
-
-    Args:
-        input_id: id to use to generate new id.
-        existing_ids: series that has existing IDs that should be unique
-        id_scalar: scalar value to initially use to create the new id.
-        iter_val: iteration value to use in the generation process.
-        max_iter: maximum number of iterations allowed in the generation process.
-    """
-    str_prefix, input_id, str_suffix = split_string_prefix_suffix_from_num(input_id)
-
-    for i in range(1, max_iter + 1):
-        new_id = f"{str_prefix}{int(input_id) + id_scalar + (iter_val * i)}{str_suffix}"
-        if new_id not in existing_ids.values:
-            return new_id
-        elif i == max_iter:
-            WranglerLogger.error(f"Cannot generate new id within max iters of {max_iter}.")
-            raise ValueError("Cannot create unique new id.")
-
-
-
- -
- -
- - -

- get_overlapping_range(ranges) - -

- - -
- -

Returns the overlapping range for a list of ranges or tuples defining ranges.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
ranges - list[Union[tuple[int], range]] - -
-

A list of ranges or tuples defining ranges.

-
-
- required -
- - -

Returns:

- - - - - - - - - - - - - -
TypeDescription
- Union[None, range] - -
-

Union[None, range]: The overlapping range if found, otherwise None.

-
-
- - -
- Example -
-
-
-

ranges = [(1, 5), (3, 7), (6, 10)] -get_overlapping_range(ranges) -range(3, 5)

-
-
-
-
-
- Source code in network_wrangler/utils/utils.py -
 79
- 80
- 81
- 82
- 83
- 84
- 85
- 86
- 87
- 88
- 89
- 90
- 91
- 92
- 93
- 94
- 95
- 96
- 97
- 98
- 99
-100
-101
-102
def get_overlapping_range(ranges: list[Union[tuple[int], range]]) -> Union[None, range]:
-    """Returns the overlapping range for a list of ranges or tuples defining ranges.
-
-    Args:
-        ranges (list[Union[tuple[int], range]]): A list of ranges or tuples defining ranges.
-
-    Returns:
-        Union[None, range]: The overlapping range if found, otherwise None.
-
-    Example:
-        >>> ranges = [(1, 5), (3, 7), (6, 10)]
-        >>> get_overlapping_range(ranges)
-        range(3, 5)
-
-    """
-    _ranges = [r if isinstance(r, range) else range(r[0], r[1]) for r in ranges]
-
-    _overlap_start = max(r.start for r in _ranges)
-    _overlap_end = min(r.stop for r in _ranges)
-
-    if _overlap_start < _overlap_end:
-        return range(_overlap_start, _overlap_end)
-    else:
-        return None
-
-
-
- -
- -
- - -

- list_elements_subset_of_single_element(mixed_list) - -

- - -
- -

Find the first list in the mixed_list.

- -
- Source code in network_wrangler/utils/utils.py -
273
-274
-275
-276
-277
-278
-279
-280
-281
-282
-283
-284
-285
-286
-287
-288
-289
-290
-291
def list_elements_subset_of_single_element(mixed_list: list[Union[str, list[str]]]) -> bool:
-    """Find the first list in the mixed_list."""
-    potential_supersets = []
-    for item in mixed_list:
-        if isinstance(item, list) and len(item) > 0:
-            potential_supersets.append(set(item))
-
-    # If no list is found, return False
-    if not potential_supersets:
-        return False
-
-    normalized_list = normalize_to_lists(mixed_list)
-
-    valid_supersets = []
-    for ss in potential_supersets:
-        if all(ss.issuperset(i) for i in normalized_list):
-            valid_supersets.append(ss)
-
-    return len(valid_supersets) == 1
-
-
-
- -
- -
- - -

- make_slug(text, delimiter='_') - -

- - -
- -

Makes a slug from text.

- -
- Source code in network_wrangler/utils/utils.py -
48
-49
-50
-51
def make_slug(text: str, delimiter: str = "_") -> str:
-    """Makes a slug from text."""
-    text = re.sub("[,.;@#?!&$']+", "", text.lower())
-    return re.sub("[\ ]+", delimiter, text)  # noqa: W605
-
-
-
- -
- -
- - -

- normalize_to_lists(mixed_list) - -

- - -
- -

Turn a mixed list of scalars and lists into a list of lists.

- -
- Source code in network_wrangler/utils/utils.py -
262
-263
-264
-265
-266
-267
-268
-269
-270
def normalize_to_lists(mixed_list: list[Union[str, list]]) -> list[list]:
-    """Turn a mixed list of scalars and lists into a list of lists."""
-    normalized_list = []
-    for item in mixed_list:
-        if isinstance(item, str):
-            normalized_list.append([item])
-        else:
-            normalized_list.append(item)
-    return normalized_list
-
-
-
- -
- -
- - -

- split_string_prefix_suffix_from_num(input_string) - -

- - -
- -

Split a string prefix and suffix from last number.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
input_string - str - -
-

The input string to be processed.

-
-
- required -
- - -

Returns:

- - - - - - - - - - - - - -
Name TypeDescription
tuple - -
-

A tuple containing the prefix (including preceding numbers), - the last numeric part as an integer, and the suffix.

-
-
- - -
- Notes -

This function uses regular expressions to split a string into three parts: -the prefix, the last numeric part, and the suffix. The prefix includes any -preceding numbers, the last numeric part is converted to an integer, and -the suffix includes any non-digit characters after the last numeric part.

-
- -

Examples:

-
>>> split_string_prefix_suffix_from_num("abc123def456")
-('abc', 123, 'def456')
-
-
>>> split_string_prefix_suffix_from_num("hello")
-('hello', 0, '')
-
-
>>> split_string_prefix_suffix_from_num("123")
-('', 123, '')
-
- -
- Source code in network_wrangler/utils/utils.py -
123
-124
-125
-126
-127
-128
-129
-130
-131
-132
-133
-134
-135
-136
-137
-138
-139
-140
-141
-142
-143
-144
-145
-146
-147
-148
-149
-150
-151
-152
-153
-154
-155
-156
-157
-158
-159
-160
-161
def split_string_prefix_suffix_from_num(input_string: str):
-    """Split a string prefix and suffix from *last* number.
-
-    Args:
-        input_string (str): The input string to be processed.
-
-    Returns:
-        tuple: A tuple containing the prefix (including preceding numbers),
-               the last numeric part as an integer, and the suffix.
-
-    Notes:
-        This function uses regular expressions to split a string into three parts:
-        the prefix, the last numeric part, and the suffix. The prefix includes any
-        preceding numbers, the last numeric part is converted to an integer, and
-        the suffix includes any non-digit characters after the last numeric part.
-
-    Examples:
-        >>> split_string_prefix_suffix_from_num("abc123def456")
-        ('abc', 123, 'def456')
-
-        >>> split_string_prefix_suffix_from_num("hello")
-        ('hello', 0, '')
-
-        >>> split_string_prefix_suffix_from_num("123")
-        ('', 123, '')
-
-    """
-    input_string = str(input_string)
-    pattern = re.compile(r"(.*?)(\d+)(\D*)$")
-    match = pattern.match(input_string)
-
-    if match:
-        # Extract the groups: prefix (including preceding numbers), last numeric part, suffix
-        prefix, numeric_part, suffix = match.groups()
-        # Convert the numeric part to an integer
-        num_variable = int(numeric_part)
-        return prefix, num_variable, suffix
-    else:
-        return input_string, 0, ""
-
-
-
- -
- -
- - -

- topological_sort(adjacency_list, visited_list) - -

- - -
- -

Topological sorting for Acyclic Directed Graph.

-

Parameters: -- adjacency_list (dict): A dictionary representing the adjacency list of the graph. -- visited_list (list): A list representing the visited status of each vertex in the graph.

-

Returns: -- output_stack (list): A list containing the vertices in topological order.

-

This function performs a topological sort on an acyclic directed graph. It takes an adjacency -list and a visited list as input. The adjacency list represents the connections between -vertices in the graph, and the visited list keeps track of the visited status of each vertex.

-

The function uses a recursive helper function to perform the topological sort. It starts by -iterating over each vertex in the visited list. For each unvisited vertex, it calls the helper -function, which recursively visits all the neighbors of the vertex and adds them to the output -stack in reverse order. Finally, it returns the output stack, which contains the vertices in -topological order.

- -
- Source code in network_wrangler/utils/utils.py -
13
-14
-15
-16
-17
-18
-19
-20
-21
-22
-23
-24
-25
-26
-27
-28
-29
-30
-31
-32
-33
-34
-35
-36
-37
-38
-39
-40
-41
-42
-43
-44
-45
def topological_sort(adjacency_list, visited_list):
-    """Topological sorting for Acyclic Directed Graph.
-
-    Parameters:
-    - adjacency_list (dict): A dictionary representing the adjacency list of the graph.
-    - visited_list (list): A list representing the visited status of each vertex in the graph.
-
-    Returns:
-    - output_stack (list): A list containing the vertices in topological order.
-
-    This function performs a topological sort on an acyclic directed graph. It takes an adjacency
-    list and a visited list as input. The adjacency list represents the connections between
-    vertices in the graph, and the visited list keeps track of the visited status of each vertex.
-
-    The function uses a recursive helper function to perform the topological sort. It starts by
-    iterating over each vertex in the visited list. For each unvisited vertex, it calls the helper
-    function, which recursively visits all the neighbors of the vertex and adds them to the output
-    stack in reverse order. Finally, it returns the output stack, which contains the vertices in
-    topological order.
-    """
-    output_stack = []
-
-    def _topology_sort_util(vertex):
-        if not visited_list[vertex]:
-            visited_list[vertex] = True
-            for neighbor in adjacency_list[vertex]:
-                _topology_sort_util(neighbor)
-            output_stack.insert(0, vertex)
-
-    for vertex in visited_list:
-        _topology_sort_util(vertex)
-
-    return output_stack
-
-
-
- -
- - - -
- -
- -
- -
- - - - -
- -

Helper functions for reading and writing files to reduce boilerplate.

- - - -
- - - - - - - - -
- - - -

- FileReadError - - -

- - -
-

- Bases: Exception

- - -

Raised when there is an error reading a file.

- -
- Source code in network_wrangler/utils/io.py -
39
-40
-41
-42
class FileReadError(Exception):
-    """Raised when there is an error reading a file."""
-
-    pass
-
-
- -
- -
- -
- - - -

- FileWriteError - - -

- - -
-

- Bases: Exception

- - -

Raised when there is an error writing a file.

- -
- Source code in network_wrangler/utils/io.py -
45
-46
-47
-48
class FileWriteError(Exception):
-    """Raised when there is an error writing a file."""
-
-    pass
-
-
- -
- -
- - -
- - -

- convert_file_serialization(input_file, output_file, overwrite=True, boundary_gdf=None, boundary_geocode=None, boundary_file=None, node_filter_s=None, chunk_size=None) - -

- - -
- -

Convert a file serialization format to another and optionally filter to a boundary.

-

If the input file is a JSON file that is larger than a reasonable portion of available -memory, and the output file is a Parquet file the JSON file will be read in chunks.

-

If the input file is a Geographic data type (shp, geojon, geoparquet) and a boundary is -provided, the data will be filtered to the boundary.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
input_file - Path - -
-

Path to the input JSON or GEOJSON file.

-
-
- required -
output_file - Path - -
-

Path to the output Parquet file.

-
-
- required -
overwrite - bool - -
-

If True, overwrite the output file if it exists.

-
-
- True -
boundary_gdf - Optional[GeoDataFrame] - -
-

GeoDataFrame to filter the input data to. Only used for geographic data. -Defaults to None.

-
-
- None -
boundary_geocode - Optional[str] - -
-

Geocode to filter the input data to. Only used for geographic data. -Defaults to None.

-
-
- None -
boundary_file - Optional[Path] - -
-

File to load as a boundary to filter the input data to. Only used for -geographic data. Defaults to None.

-
-
- None -
node_filter_s - Optional[Series] - -
-

If provided, will filter links in .json file to only those that connect to -nodes. Defaults to None.

-
-
- None -
chunk_size - Optional[int] - -
-

Number of JSON objects to process in each chunk. Only works for -JSON to Parquet. If None, will determine if chunking needed and what size.

-
-
- None -
- -
- Source code in network_wrangler/utils/io.py -
212
-213
-214
-215
-216
-217
-218
-219
-220
-221
-222
-223
-224
-225
-226
-227
-228
-229
-230
-231
-232
-233
-234
-235
-236
-237
-238
-239
-240
-241
-242
-243
-244
-245
-246
-247
-248
-249
-250
-251
-252
-253
-254
-255
-256
-257
-258
-259
-260
-261
-262
-263
-264
-265
-266
-267
-268
-269
def convert_file_serialization(
-    input_file: Path,
-    output_file: Path,
-    overwrite: bool = True,
-    boundary_gdf: Optional[gpd.GeoDataFrame] = None,
-    boundary_geocode: Optional[str] = None,
-    boundary_file: Optional[Path] = None,
-    node_filter_s: Optional[pd.Series] = None,
-    chunk_size: Optional[int] = None,
-):
-    """Convert a file serialization format to another and optionally filter to a boundary.
-
-    If the input file is a JSON file that is larger than a reasonable portion of available
-    memory, *and* the output file is a Parquet file the JSON file will be read in chunks.
-
-    If the input file is a Geographic data type (shp, geojon, geoparquet) and a boundary is
-    provided, the data will be filtered to the boundary.
-
-    Args:
-        input_file: Path to the input JSON or GEOJSON file.
-        output_file: Path to the output Parquet file.
-        overwrite: If True, overwrite the output file if it exists.
-        boundary_gdf: GeoDataFrame to filter the input data to. Only used for geographic data.
-            Defaults to None.
-        boundary_geocode: Geocode to filter the input data to. Only used for geographic data.
-            Defaults to None.
-        boundary_file: File to load as a boundary to filter the input data to. Only used for
-            geographic data. Defaults to None.
-        node_filter_s: If provided, will filter links in .json file to only those that connect to
-            nodes. Defaults to None.
-        chunk_size: Number of JSON objects to process in each chunk. Only works for
-            JSON to Parquet. If None, will determine if chunking needed and what size.
-    """
-    WranglerLogger.debug(f"Converting {input_file} to {output_file}.")
-
-    if output_file.exists() and not overwrite:
-        raise FileExistsError(f"File {output_file} already exists and overwrite is False.")
-
-    if Path(input_file).suffix == ".json" and Path(output_file).suffix == ".parquet":
-        if chunk_size is None:
-            chunk_size = _suggest_json_chunk_size(input_file)
-        if chunk_size is None:
-            df = read_table(input_file)
-            if node_filter_s is not None and "A" in df.columns and "B" in df.columns:
-                df = df[df["A"].isin(node_filter_s) | df["B"].isin(node_filter_s)]
-            write_table(df, output_file, overwrite=overwrite)
-        else:
-            _json_to_parquet_in_chunks(input_file, output_file, chunk_size)
-
-    df = read_table(
-        input_file,
-        boundary_gdf=boundary_gdf,
-        boundary_geocode=boundary_geocode,
-        boundary_file=boundary_file,
-    )
-    if node_filter_s is not None and "A" in df.columns and "B" in df.columns:
-        df = df[df["A"].isin(node_filter_s) | df["B"].isin(node_filter_s)]
-    write_table(df, output_file, overwrite=overwrite)
-
-
-
- -
- -
- - -

- read_table(filename, sub_filename=None, boundary_gdf=None, boundary_geocode=None, boundary_file=None) - -

- - -
- -

Read file and return a dataframe or geodataframe.

-

If filename is a zip file, will unzip to a temporary directory.

-

If filename is a geojson or shapefile, will filter the data -to the boundary_gdf, boundary_geocode, or boundary_file if provided. Note that you can only -provide one of these boundary filters.

-

If filename is a geoparquet file, will filter the data to the bounding box of the -boundary_gdf, boundary_geocode, or boundary_file if provided. Note that you can only -provide one of these boundary filters.

-

NOTE: if you are accessing multiple files from this zip file you will want to unzip it first -and THEN access the table files so you don’t create multiple duplicate unzipped tmp dirs.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
filename - Path - -
-

filename to load.

-
-
- required -
sub_filename - str - -
-

if the file is a zip, the sub_filename to load.

-
-
- None -
boundary_gdf - Optional[GeoDataFrame] - -
-

GeoDataFrame to filter the input data to. Only used for geographic data. -Defaults to None.

-
-
- None -
boundary_geocode - Optional[str] - -
-

Geocode to filter the input data to. Only used for geographic data. -Defaults to None.

-
-
- None -
boundary_file - Optional[Path] - -
-

File to load as a boundary to filter the input data to. Only used for -geographic data. Defaults to None.

-
-
- None -
- -
- Source code in network_wrangler/utils/io.py -
115
-116
-117
-118
-119
-120
-121
-122
-123
-124
-125
-126
-127
-128
-129
-130
-131
-132
-133
-134
-135
-136
-137
-138
-139
-140
-141
-142
-143
-144
-145
-146
-147
-148
-149
-150
-151
-152
-153
-154
-155
-156
-157
-158
-159
-160
-161
-162
-163
-164
-165
-166
-167
-168
-169
-170
-171
-172
-173
-174
-175
-176
-177
-178
-179
def read_table(
-    filename: Path,
-    sub_filename: str = None,
-    boundary_gdf: Optional[gpd.GeoDataFrame] = None,
-    boundary_geocode: Optional[str] = None,
-    boundary_file: Optional[Path] = None,
-) -> Union[pd.DataFrame, gpd.GeoDataFrame]:
-    """Read file and return a dataframe or geodataframe.
-
-    If filename is a zip file, will unzip to a temporary directory.
-
-    If filename is a geojson or shapefile, will filter the data
-    to the boundary_gdf, boundary_geocode, or boundary_file if provided. Note that you can only
-    provide one of these boundary filters.
-
-    If filename is a geoparquet file, will filter the data to the *bounding box* of the
-    boundary_gdf, boundary_geocode, or boundary_file if provided. Note that you can only
-    provide one of these boundary filters.
-
-    NOTE:  if you are accessing multiple files from this zip file you will want to unzip it first
-    and THEN access the table files so you don't create multiple duplicate unzipped tmp dirs.
-
-    Args:
-        filename (Path): filename to load.
-        sub_filename: if the file is a zip, the sub_filename to load.
-        boundary_gdf: GeoDataFrame to filter the input data to. Only used for geographic data.
-            Defaults to None.
-        boundary_geocode: Geocode to filter the input data to. Only used for geographic data.
-            Defaults to None.
-        boundary_file: File to load as a boundary to filter the input data to. Only used for
-            geographic data. Defaults to None.
-    """
-    filename = Path(filename)
-    if not filename.exists():
-        raise FileNotFoundError(f"Input file {filename} does not exist.")
-    if filename.stat().st_size == 0:
-        raise FileExistsError(f"File {filename} is empty.")
-    if filename.suffix == ".zip":
-        filename = unzip_file(filename) / sub_filename
-    WranglerLogger.debug(f"Estimated read time: {_estimate_read_time_of_file(filename)}.")
-
-    # will result in None if no boundary is provided
-    mask_gdf = get_bounding_polygon(
-        boundary_gdf=boundary_gdf,
-        boundary_geocode=boundary_geocode,
-        boundary_file=boundary_file,
-    )
-
-    if any([x in filename.suffix for x in ["geojson", "shp", "csv"]]):
-        try:
-            # masking only supported by fiona engine, which is slower.
-            if mask_gdf is None:
-                return gpd.read_file(filename, engine="pyogrio")
-            else:
-                return gpd.read_file(filename, mask=mask_gdf, engine="fiona")
-        except:  # noqa: E722
-            if "csv" in filename.suffix:
-                return pd.read_csv(filename)
-            raise FileReadError
-    elif "parquet" in filename.suffix:
-        return _read_parquet_table(filename, mask_gdf)
-    elif "json" in filename.suffix:
-        with open(filename) as f:
-            return pd.read_json(f, orient="records")
-    raise NotImplementedError(f"Filetype {filename.suffix} not implemented.")
-
-
-
- -
- -
- - -

- unzip_file(path) - -

- - -
- -

Unzips a file to a temporary directory and returns the directory path.

- -
- Source code in network_wrangler/utils/io.py -
403
-404
-405
-406
-407
-408
-409
-410
-411
-412
-413
-414
def unzip_file(path: Path) -> Path:
-    """Unzips a file to a temporary directory and returns the directory path."""
-    tmpdir = tempfile.mkdtemp()
-    shutil.unpack_archive(path, tmpdir)
-
-    def finalize() -> None:
-        shutil.rmtree(tmpdir)
-
-    # Lazy cleanup
-    weakref.finalize(tmpdir, finalize)
-
-    return tmpdir
-
-
-
- -
- -
- - -

- write_table(df, filename, overwrite=False, **kwargs) - -

- - -
- -

Write a dataframe or geodataframe to a file.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
df - DataFrame - -
-

dataframe to write.

-
-
- required -
filename - Path - -
-

filename to write to.

-
-
- required -
overwrite - bool - -
-

whether to overwrite the file if it exists. Defaults to False.

-
-
- False -
kwargs - -
-

additional arguments to pass to the writer.

-
-
- {} -
- -
- Source code in network_wrangler/utils/io.py -
51
-52
-53
-54
-55
-56
-57
-58
-59
-60
-61
-62
-63
-64
-65
-66
-67
-68
-69
-70
-71
-72
-73
-74
-75
-76
-77
-78
-79
-80
-81
-82
-83
-84
-85
-86
-87
-88
-89
-90
-91
-92
-93
-94
-95
def write_table(
-    df: Union[pd.DataFrame, gpd.GeoDataFrame],
-    filename: Path,
-    overwrite: bool = False,
-    **kwargs,
-) -> None:
-    """Write a dataframe or geodataframe to a file.
-
-    Args:
-        df (pd.DataFrame): dataframe to write.
-        filename (Path): filename to write to.
-        overwrite (bool): whether to overwrite the file if it exists. Defaults to False.
-        kwargs: additional arguments to pass to the writer.
-
-    """
-    filename = Path(filename)
-    if filename.exists() and not overwrite:
-        raise FileExistsError(f"File {filename} already exists and overwrite is False.")
-
-    if filename.parent.is_dir() and not filename.parent.exists():
-        filename.parent.mkdir(parents=True)
-
-    WranglerLogger.debug(f"Writing to {filename}.")
-
-    if "shp" in filename.suffix:
-        df.to_file(filename, index=False, **kwargs)
-    elif "parquet" in filename.suffix:
-        df.to_parquet(filename, index=False, **kwargs)
-    elif "csv" in filename.suffix:
-        df.to_csv(filename, index=False, date_format="%H:%M:%S", **kwargs)
-    elif "txt" in filename.suffix:
-        df.to_csv(filename, index=False, date_format="%H:%M:%S", **kwargs)
-    elif "geojson" in filename.suffix:
-        # required due to issues with list-like columns
-        if isinstance(df, gpd.GeoDataFrame):
-            data = df.to_json(drop_id=True)
-        else:
-            data = df.to_json(orient="records", index=False)
-        with open(filename, "w", encoding="utf-8") as file:
-            file.write(data)
-    elif "json" in filename.suffix:
-        with open(filename, "w") as f:
-            f.write(df.to_json(orient="records"))
-    else:
-        raise NotImplementedError(f"Filetype {filename.suffix} not implemented.")
-
-
-
- -
- - - -
- -
- -
- -
- - - - -
- -

Helper functions for data models.

- - - -
- - - - - - - - -
- - - -

- DatamodelDataframeIncompatableError - - -

- - -
-

- Bases: Exception

- - -

Raised when a data model and a dataframe are not compatable.

- -
- Source code in network_wrangler/utils/models.py -
126
-127
-128
-129
class DatamodelDataframeIncompatableError(Exception):
-    """Raised when a data model and a dataframe are not compatable."""
-
-    pass
-
-
- -
- -
- -
- - - -

- TableValidationError - - -

- - -
-

- Bases: Exception

- - -

Raised when a table validation fails.

- -
- Source code in network_wrangler/utils/models.py -
49
-50
-51
-52
class TableValidationError(Exception):
-    """Raised when a table validation fails."""
-
-    pass
-
-
- -
- -
- - -
- - -

- coerce_extra_fields_to_type_in_df(data, model, df) - -

- - -
- -

Coerce extra fields in data that aren’t specified in Pydantic model to the type in the df.

-

Note: will not coerce lists of submodels, etc.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
data - dict - -
-

The data to coerce.

-
-
- required -
model - BaseModel - -
-

The Pydantic model to validate the data against.

-
-
- required -
df - DataFrame - -
-

The DataFrame to coerce the data to.

-
-
- required -
- -
- Source code in network_wrangler/utils/models.py -
151
-152
-153
-154
-155
-156
-157
-158
-159
-160
-161
-162
-163
-164
-165
-166
-167
-168
-169
-170
-171
-172
-173
-174
-175
-176
-177
def coerce_extra_fields_to_type_in_df(
-    data: BaseModel, model: BaseModel, df: pd.DataFrame
-) -> BaseModel:
-    """Coerce extra fields in data that aren't specified in Pydantic model to the type in the df.
-
-    Note: will not coerce lists of submodels, etc.
-
-    Args:
-        data (dict): The data to coerce.
-        model (BaseModel): The Pydantic model to validate the data against.
-        df (pd.DataFrame): The DataFrame to coerce the data to.
-    """
-    out_data = copy.deepcopy(data)
-
-    # Coerce submodels
-    for field in submodel_fields_in_model(model, data):
-        out_data.__dict__[field] = coerce_extra_fields_to_type_in_df(
-            data.__dict__[field], model.__annotations__[field], df
-        )
-
-    for field in extra_attributes_undefined_in_model(data, model):
-        try:
-            v = coerce_val_to_df_types(field, data.model_extra[field], df)
-        except ValueError as e:
-            raise DatamodelDataframeIncompatableError(e)
-        out_data.model_extra[field] = v
-    return out_data
-
-
-
- -
- -
- - -

- default_from_datamodel(data_model, field) - -

- - -
- -

Returns default value from pandera data model for a given field name.

- -
- Source code in network_wrangler/utils/models.py -
41
-42
-43
-44
-45
-46
def default_from_datamodel(data_model: pa.DataFrameModel, field: str):
-    """Returns default value from pandera data model for a given field name."""
-    if field in data_model.__fields__:
-        if hasattr(data_model.__fields__[field][1], "default"):
-            return data_model.__fields__[field][1].default
-    return None
-
-
-
- -
- -
- - -

- empty_df_from_datamodel(model, crs=LAT_LON_CRS) - -

- - -
- -

Create an empty DataFrame or GeoDataFrame with the specified columns.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
model - BaseModel - -
-

A pandera data model to create empty [Geo]DataFrame from.

-
-
- required -
crs - int - -
-

if schema has geometry, will use this as the geometry’s crs. Defaults to LAT_LONG_CRS

-
-
- LAT_LON_CRS -
- - -
- Source code in network_wrangler/utils/models.py -
21
-22
-23
-24
-25
-26
-27
-28
-29
-30
-31
-32
-33
-34
-35
-36
-37
-38
def empty_df_from_datamodel(
-    model: DataFrameModel, crs: int = LAT_LON_CRS
-) -> Union[gpd.GeoDataFrame, pd.DataFrame]:
-    """Create an empty DataFrame or GeoDataFrame with the specified columns.
-
-    Args:
-        model (BaseModel): A pandera data model to create empty [Geo]DataFrame from.
-        crs: if schema has geometry, will use this as the geometry's crs. Defaults to LAT_LONG_CRS
-    Returns:
-        An empty [Geo]DataFrame that validates to the specified model.
-    """
-    schema = model.to_schema()
-    data = {col: [] for col in schema.columns.keys()}
-
-    if "geometry" in data:
-        return model(gpd.GeoDataFrame(data, crs=crs))
-
-    return model(pd.DataFrame(data))
-
-
-
- -
- -
- - -

- extra_attributes_undefined_in_model(instance, model) - -

- - -
- -

Find the extra attributes in a pydantic model that are not defined in the model.

- -
- Source code in network_wrangler/utils/models.py -
132
-133
-134
-135
-136
-137
def extra_attributes_undefined_in_model(instance: BaseModel, model: BaseModel) -> list:
-    """Find the extra attributes in a pydantic model that are not defined in the model."""
-    defined_fields = model.model_fields
-    all_attributes = list(instance.model_dump(exclude_none=True, by_alias=True).keys())
-    extra_attributes = [a for a in all_attributes if a not in defined_fields]
-    return extra_attributes
-
-
-
- -
- -
- - -

- identify_model(data, models) - -

- - -
- -

Identify the model that the input data conforms to.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
data - Union[DataFrame, dict] - -
-

The input data to identify.

-
-
- required -
models - list[DataFrameModel, BaseModel] - -
-

A list of models to validate the input -data against.

-
-
- required -
- -
- Source code in network_wrangler/utils/models.py -
 96
- 97
- 98
- 99
-100
-101
-102
-103
-104
-105
-106
-107
-108
-109
-110
-111
-112
-113
-114
-115
-116
-117
-118
-119
-120
-121
-122
-123
def identify_model(
-    data: Union[pd.DataFrame, dict], models: list[DataFrameModel, BaseModel]
-) -> Union[DataFrameModel, BaseModel]:
-    """Identify the model that the input data conforms to.
-
-    Args:
-        data (Union[pd.DataFrame, dict]): The input data to identify.
-        models (list[DataFrameModel,BaseModel]): A list of models to validate the input
-          data against.
-    """
-    for m in models:
-        try:
-            if isinstance(data, pd.DataFrame):
-                validate_df_to_model(data, m)
-            else:
-                m(**data)
-            return m
-        except ValidationError:
-            continue
-        except SchemaError:
-            continue
-
-    WranglerLogger.error(
-        f"The input data isn't consistant with any provided data model.\
-                         \nInput data: {data}\
-                         \nData Models: {models}"
-    )
-    raise ValueError("The input dictionary does not conform to any of the provided models.")
-
-
-
- -
- -
- - -

- submodel_fields_in_model(model, instance=None) - -

- - -
- -

Find the fields in a pydantic model that are submodels.

- -
- Source code in network_wrangler/utils/models.py -
140
-141
-142
-143
-144
-145
-146
-147
-148
def submodel_fields_in_model(model: BaseModel, instance: Optional[BaseModel] = None) -> list:
-    """Find the fields in a pydantic model that are submodels."""
-    types = get_type_hints(model)
-    model_type = Union[ModelMetaclass, BaseModel]
-    submodels = [f for f in model.model_fields if isinstance(types.get(f), model_type)]
-    if instance is not None:
-        defined = list(instance.model_dump(exclude_none=True, by_alias=True).keys())
-        return [f for f in submodels if f in defined]
-    return submodels
-
-
-
- -
- -
- - -

- validate_df_to_model(df, model) - -

- - -
- -

Wrapper to validate a DataFrame against a Pandera DataFrameModel with better logging.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
df - DataFrame - -
-

DataFrame to validate.

-
-
- required -
model - DataFrameModel - -
-

Pandera DataFrameModel to validate against.

-
-
- required -
- -
- Source code in network_wrangler/utils/models.py -
55
-56
-57
-58
-59
-60
-61
-62
-63
-64
-65
-66
-67
-68
-69
-70
-71
-72
-73
-74
-75
-76
-77
-78
-79
-80
-81
-82
-83
-84
-85
-86
-87
-88
-89
-90
-91
-92
-93
@validate_call(config=dict(arbitrary_types_allowed=True))
-def validate_df_to_model(df: DataFrame, model: DataFrameModel) -> DataFrame:
-    """Wrapper to validate a DataFrame against a Pandera DataFrameModel with better logging.
-
-    Args:
-        df: DataFrame to validate.
-        model: Pandera DataFrameModel to validate against.
-    """
-    try:
-        model_df = model.validate(df, lazy=True)
-        for c in model_df.columns:
-            default_value = default_from_datamodel(model, c)
-            if default_value is None:
-                model_df[c] = model_df[c].where(pd.notna(model_df[c]), None)
-            else:
-                model_df[c] = model_df[c].fillna(default_value)
-
-        return model_df
-
-    except SchemaErrors as e:
-        # Log the summary of errors
-        WranglerLogger.error(
-            f"Validation to {model.__name__} failed with {len(e.failure_cases)} \
-            errors: \n{e.failure_cases}"
-        )
-
-        # If there are many errors, save them to a file
-        if len(e.failure_cases) > 5:
-            error_file = "validation_failure_cases.csv"
-            e.failure_cases.to_csv(error_file)
-            WranglerLogger.info(f"Detailed error cases written to {error_file}")
-        else:
-            # Otherwise log the errors directly
-            WranglerLogger.error("Detailed failure cases:\n%s", e.failure_cases)
-        raise TableValidationError(f"Validation to {model.__name__} failed.")
-    except SchemaError as e:
-        WranglerLogger.error(f"Validation to {model.__name__} failed with error: {e}")
-        WranglerLogger.error(f"Failure Cases:\n{e.failure_cases}")
-        raise TableValidationError(f"Validation to {model.__name__} failed.")
-
-
-
- -
- - - -
- -
- -
- -
- - - - -
- -

Functions to help with network manipulations in dataframes.

- - - -
- - - - - - - - - -
- - - - - -
- -

Translates a df with tidy data representing a sequence of points into links.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
point_seq_df - DataFrame - -
-

Dataframe with source breadcrumbs

-
-
- required -
id_field - str - -
-

Trace ID

-
-
- required -
seq_field - str - -
-

Order of breadcrumbs within ID_field

-
-
- required -
node_id_field - str - -
-

field denoting the node ID

-
-
- required -
from_field - str - -
-

Field to export from_field to. Defaults to “A”.

-
-
- 'A' -
to_field - str - -
-

Field to export to_field to. Defaults to “B”.

-
-
- 'B' -
- - -

Returns:

- - - - - - - - - - - - - -
TypeDescription
- DataFrame - -
-

pd.DataFrame: Link records with id_field, from_field, to_field

-
-
- -
- Source code in network_wrangler/utils/net.py -
 6
- 7
- 8
- 9
-10
-11
-12
-13
-14
-15
-16
-17
-18
-19
-20
-21
-22
-23
-24
-25
-26
-27
-28
-29
-30
-31
-32
-33
-34
-35
-36
-37
-38
-39
-40
-41
-42
-43
-44
-45
-46
-47
-48
def point_seq_to_links(
-    point_seq_df: DataFrame,
-    id_field: str,
-    seq_field: str,
-    node_id_field: str,
-    from_field: str = "A",
-    to_field: str = "B",
-) -> DataFrame:
-    """Translates a df with tidy data representing a sequence of points into links.
-
-    Args:
-        point_seq_df (pd.DataFrame): Dataframe with source breadcrumbs
-        id_field (str): Trace ID
-        seq_field (str): Order of breadcrumbs within ID_field
-        node_id_field (str): field denoting the node ID
-        from_field (str, optional): Field to export from_field to. Defaults to "A".
-        to_field (str, optional): Field to export to_field to. Defaults to "B".
-
-    Returns:
-        pd.DataFrame: Link records with id_field, from_field, to_field
-    """
-    point_seq_df = point_seq_df.sort_values(by=[id_field, seq_field])
-
-    links = point_seq_df.add_suffix(f"_{from_field}").join(
-        point_seq_df.shift(-1).add_suffix(f"_{to_field}")
-    )
-
-    links = links[links[f"{id_field}_{to_field}"] == links[f"{id_field}_{from_field}"]]
-
-    links = links.drop(columns=[f"{id_field}_{to_field}"])
-    links = links.rename(
-        columns={
-            f"{id_field}_{from_field}": id_field,
-            f"{node_id_field}_{from_field}": from_field,
-            f"{node_id_field}_{to_field}": to_field,
-        }
-    )
-
-    links = links.dropna(subset=[from_field, to_field])
-    # Since join with a shift() has some NAs, we need to recast the columns to int
-    _int_cols = [to_field, f"{seq_field}_{to_field}"]
-    links[_int_cols] = links[_int_cols].astype(int)
-    return links
-
-
-
- -
- - - -
- -
- -
- -
- - - - -
- -

Functions related to parsing and comparing time objects and series.

-

Internal function terminology for timespan scopes:

-
    -
  • matching: a scope that could be applied for a given timespan combination. - This includes the default timespan as well as scopes wholely contained within.
  • -
  • overlapping: a timespan that fully or partially overlaps a given timespan. - This includes the default timespan, all matching timespans and all timespans where - at least one minute overlap.
  • -
  • conflicting: a timespan that is overlapping but not matching. By definition default - scope values are not conflicting.
  • -
  • independent a timespan that is not overlapping.
  • -
- - - -
- - - - - - - - - -
- - -

- convert_timespan_to_start_end_dt(timespan_s) - -

- - -
- -

Covert a timespan string [‘12:00’,‘14:00] to start_time and end_time datetime cols in df.

- -
- Source code in network_wrangler/utils/time.py -
152
-153
-154
-155
-156
def convert_timespan_to_start_end_dt(timespan_s: pd.Series) -> pd.DataFrame:
-    """Covert a timespan string ['12:00','14:00] to start_time and end_time datetime cols in df."""
-    start_time = timespan_s.apply(lambda x: str_to_time(x[0]))
-    end_time = timespan_s.apply(lambda x: str_to_time(x[1]))
-    return pd.DataFrame({"start_time": start_time, "end_time": end_time})
-
-
-
- -
- -
- - -

- dt_contains(timespan1, timespan2) - -

- - -
- -

Check if one timespan inclusively contains another.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
timespan1 - list[time] - -
-

The first timespan represented as a list containing the start -time and end time.

-
-
- required -
timespan2 - list[time] - -
-

The second timespan represented as a list containing the start -time and end time.

-
-
- required -
- - -

Returns:

- - - - - - - - - - - - - -
Name TypeDescription
bool - bool - -
-

True if the first timespan contains the second timespan, False otherwise.

-
-
- -
- Source code in network_wrangler/utils/time.py -
168
-169
-170
-171
-172
-173
-174
-175
-176
-177
-178
-179
-180
-181
-182
-183
@validate_call
-def dt_contains(timespan1: list[datetime], timespan2: list[datetime]) -> bool:
-    """Check if one timespan inclusively contains another.
-
-    Args:
-        timespan1 (list[time]): The first timespan represented as a list containing the start
-            time and end time.
-        timespan2 (list[time]): The second timespan represented as a list containing the start
-            time and end time.
-
-    Returns:
-        bool: True if the first timespan contains the second timespan, False otherwise.
-    """
-    start_time_dt, end_time_dt = timespan1
-    start_time_dt2, end_time_dt2 = timespan2
-    return (start_time_dt <= start_time_dt2) and (end_time_dt >= end_time_dt2)
-
-
-
- -
- -
- - -

- dt_list_overlaps(timespans) - -

- - -
- -

Check if any of the timespans overlap.

-

overlapping: a timespan that fully or partially overlaps a given timespan. -This includes and all timespans where at least one minute overlap.

- -
- Source code in network_wrangler/utils/time.py -
227
-228
-229
-230
-231
-232
-233
-234
-235
-236
@validate_call
-def dt_list_overlaps(timespans: list[list[datetime]]) -> bool:
-    """Check if any of the timespans overlap.
-
-    `overlapping`: a timespan that fully or partially overlaps a given timespan.
-    This includes and all timespans where at least one minute overlap.
-    """
-    if filter_dt_list_to_overlaps(timespans):
-        return True
-    return False
-
-
-
- -
- -
- - -

- dt_overlap_duration(timedelta1, timedelta2) - -

- - -
- -

Check if two timespans overlap and return the amount of overlap.

- -
- Source code in network_wrangler/utils/time.py -
159
-160
-161
-162
-163
-164
-165
@validate_call
-def dt_overlap_duration(timedelta1: timedelta, timedelta2: timedelta) -> timedelta:
-    """Check if two timespans overlap and return the amount of overlap."""
-    overlap_start = max(timedelta1.start_time, timedelta2.start_time)
-    overlap_end = min(timedelta1.end_time, timedelta2.end_time)
-    overlap_duration = max(overlap_end - overlap_start, timedelta(0))
-    return overlap_duration
-
-
-
- -
- -
- - -

- dt_overlaps(timespan1, timespan2) - -

- - -
- -

Check if two timespans overlap.

-

overlapping: a timespan that fully or partially overlaps a given timespan. -This includes and all timespans where at least one minute overlap.

- -
- Source code in network_wrangler/utils/time.py -
186
-187
-188
-189
-190
-191
-192
-193
-194
-195
@validate_call
-def dt_overlaps(timespan1: list[datetime], timespan2: list[datetime]) -> bool:
-    """Check if two timespans overlap.
-
-    `overlapping`: a timespan that fully or partially overlaps a given timespan.
-    This includes and all timespans where at least one minute overlap.
-    """
-    if (timespan1[0] < timespan2[1]) and (timespan2[0] < timespan1[1]):
-        return True
-    return False
-
-
-
- -
- -
- - -

- dt_to_seconds_from_midnight(dt) - -

- - -
- -

Convert a datetime object to the number of seconds since midnight.

- -
- Source code in network_wrangler/utils/time.py -
71
-72
-73
def dt_to_seconds_from_midnight(dt: datetime) -> int:
-    """Convert a datetime object to the number of seconds since midnight."""
-    return (dt - dt.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds()
-
-
-
- -
- -
- - -

- duration_dt(start_time_dt, end_time_dt) - -

- - -
- -

Returns a datetime.timedelta object representing the duration of the timespan.

-

If end_time is less than start_time, the duration will assume that it crosses over -midnight.

- -
- Source code in network_wrangler/utils/time.py -
239
-240
-241
-242
-243
-244
-245
-246
-247
-248
-249
-250
-251
-252
def duration_dt(start_time_dt: datetime, end_time_dt: datetime) -> timedelta:
-    """Returns a datetime.timedelta object representing the duration of the timespan.
-
-    If end_time is less than start_time, the duration will assume that it crosses over
-    midnight.
-    """
-    if end_time_dt < start_time_dt:
-        return timedelta(
-            hours=24 - start_time_dt.hour + end_time_dt.hour,
-            minutes=end_time_dt.minute - start_time_dt.minute,
-            seconds=end_time_dt.second - start_time_dt.second,
-        )
-    else:
-        return end_time_dt - start_time_dt
-
-
-
- -
- -
- - -

- filter_df_to_max_overlapping_timespans(orig_df, query_timespan, strict_match=False, min_overlap_minutes=0, keep_max_of_cols=['model_link_id']) - -

- - -
- -

Filters dataframe for entries that have maximum overlap with the given query timespan.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
orig_df - DataFrame - -
-

dataframe to query timespans for with start_time and end_time fields.

-
-
- required -
query_timespan - list[TimeString] - -
-

TimespanString of format [‘HH:MM’,’HH:MM’] to query orig_df for overlapping -records.

-
-
- required -
strict_match - bool - -
-

boolean indicating if the returned df should only contain -records that fully contain the query timespan. If set to True, min_overlap_minutes -does not apply. Defaults to False.

-
-
- False -
min_overlap_minutes - int - -
-

minimum number of minutes the timespans need to overlap to keep. -Defaults to 0.

-
-
- 0 -
keep_max_of_cols - list[str] - -
-

list of fields to return the maximum value of overlap for. If None, -will return all overlapping time periods. Defaults to ['model_link_id']

-
-
- ['model_link_id'] -
- -
- Source code in network_wrangler/utils/time.py -
113
-114
-115
-116
-117
-118
-119
-120
-121
-122
-123
-124
-125
-126
-127
-128
-129
-130
-131
-132
-133
-134
-135
-136
-137
-138
-139
-140
-141
-142
-143
-144
-145
-146
-147
-148
-149
def filter_df_to_max_overlapping_timespans(
-    orig_df: pd.DataFrame,
-    query_timespan: list[TimeString],
-    strict_match: bool = False,
-    min_overlap_minutes: int = 0,
-    keep_max_of_cols: list[str] = ["model_link_id"],
-) -> pd.DataFrame:
-    """Filters dataframe for entries that have maximum overlap with the given query timespan.
-
-    Args:
-        orig_df: dataframe to query timespans for with `start_time` and `end_time` fields.
-        query_timespan: TimespanString of format ['HH:MM','HH:MM'] to query orig_df for overlapping
-            records.
-        strict_match: boolean indicating if the returned df should only contain
-            records that fully contain the query timespan. If set to True, min_overlap_minutes
-            does not apply. Defaults to False.
-        min_overlap_minutes: minimum number of minutes the timespans need to overlap to keep.
-            Defaults to 0.
-        keep_max_of_cols: list of fields to return the maximum value of overlap for.  If None,
-            will return all overlapping time periods. Defaults to `['model_link_id']`
-    """
-    q_start, q_end = str_to_time_list(query_timespan)
-
-    overlap_start = orig_df["start_time"].combine(q_start, max)
-    overlap_end = orig_df["end_time"].combine(q_end, min)
-    orig_df["overlap_duration"] = (overlap_end - overlap_start).dt.total_seconds() / 60
-
-    if strict_match:
-        overlap_df = orig_df.loc[(orig_df.start_time <= q_start) & (orig_df.end_time >= q_end)]
-    else:
-        overlap_df = orig_df.loc[orig_df.overlap_duration > min_overlap_minutes]
-    WranglerLogger.debug(f"overlap_df: \n{overlap_df}")
-    if keep_max_of_cols:
-        # keep only the maximum overlap
-        idx = overlap_df.groupby(keep_max_of_cols)["overlap_duration"].idxmax()
-        overlap_df = overlap_df.loc[idx]
-    return overlap_df
-
-
-
- -
- -
- - -

- filter_df_to_overlapping_timespans(orig_df, query_timespan) - -

- - -
- -

Filters dataframe for entries that have any overlap with the given query timespan.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
orig_df - DataFrame - -
-

dataframe to query timespans for with start_time and end_time fields.

-
-
- required -
query_timespan - list[TimeString] - -
-

TimespanString of format [‘HH:MM’,’HH:MM’] to query orig_df for overlapping -records.

-
-
- required -
- -
- Source code in network_wrangler/utils/time.py -
 87
- 88
- 89
- 90
- 91
- 92
- 93
- 94
- 95
- 96
- 97
- 98
- 99
-100
-101
-102
-103
-104
-105
-106
-107
-108
-109
-110
def filter_df_to_overlapping_timespans(
-    orig_df: pd.DataFrame,
-    query_timespan: list[TimeString],
-) -> pd.DataFrame:
-    """Filters dataframe for entries that have any overlap with the given query timespan.
-
-    Args:
-        orig_df: dataframe to query timespans for with `start_time` and `end_time` fields.
-        query_timespan: TimespanString of format ['HH:MM','HH:MM'] to query orig_df for overlapping
-            records.
-    """
-    q_start, q_end = str_to_time_list(query_timespan)
-    # WranglerLogger.debug(f"q_start_time: {q_start}")
-    # WranglerLogger.debug(f"q_end_time:{q_end}")
-    # make sure start_time and end_time are datetime objects
-    df_start_time = pd.to_datetime(orig_df["start_time"])
-    df_end_time = pd.to_datetime(orig_df["end_time"])
-
-    # WranglerLogger.debug(f"df_start_time: \n{df_start_time}")
-    # WranglerLogger.debug(f"df_end_time: \n{df_end_time}")
-    overlap_df = orig_df.loc[(df_start_time < q_end) & (df_end_time > q_start)]
-
-    # WranglerLogger.debug(f"time overlap_df: \n{overlap_df}")
-    return overlap_df
-
-
-
- -
- -
- - -

- filter_dt_list_to_overlaps(timespans) - -

- - -
- -

Filter a list of timespans to only include those that overlap.

-

overlapping: a timespan that fully or partially overlaps a given timespan. -This includes and all timespans where at least one minute overlap.

- -
- Source code in network_wrangler/utils/time.py -
209
-210
-211
-212
-213
-214
-215
-216
-217
-218
-219
-220
-221
-222
-223
-224
@validate_call
-def filter_dt_list_to_overlaps(timespans: list[list[datetime]]) -> list[list[datetime]]:
-    """Filter a list of timespans to only include those that overlap.
-
-    `overlapping`: a timespan that fully or partially overlaps a given timespan.
-    This includes and all timespans where at least one minute overlap.
-    """
-    overlaps = []
-    for i in range(len(timespans)):
-        for j in range(i + 1, len(timespans)):
-            if dt_overlaps(timespans[i], timespans[j]):
-                overlaps += [timespans[i], timespans[j]]
-
-    # remove dupes
-    overlaps = list(map(list, set(map(tuple, overlaps))))
-    return overlaps
-
-
-
- -
- -
- - -

- format_time(seconds) - -

- - -
- -

Formats seconds into a human-friendly string for log files.

- -
- Source code in network_wrangler/utils/time.py -
255
-256
-257
-258
-259
-260
-261
-262
-263
-264
def format_time(seconds):
-    """Formats seconds into a human-friendly string for log files."""
-    if seconds < 60:
-        return f"{int(seconds)} seconds"
-    elif seconds < 3600:
-        return f"{int(seconds // 60)} minutes"
-    else:
-        hours = int(seconds // 3600)
-        minutes = int((seconds % 3600) // 60)
-        return f"{hours} hours and {minutes} minutes"
-
-
-
- -
- -
- - -

- seconds_from_midnight_to_str(seconds) - -

- - -
- -

Convert the number of seconds since midnight to a TimeString (HH:MM).

- -
- Source code in network_wrangler/utils/time.py -
82
-83
-84
def seconds_from_midnight_to_str(seconds: int) -> TimeString:
-    """Convert the number of seconds since midnight to a TimeString (HH:MM)."""
-    return str(timedelta(seconds=seconds))
-
-
-
- -
- -
- - -

- str_to_seconds_from_midnight(time_str) - -

- - -
- -

Convert a TimeString (HH:MM<:SS>) to the number of seconds since midnight.

- -
- Source code in network_wrangler/utils/time.py -
76
-77
-78
-79
def str_to_seconds_from_midnight(time_str: TimeString) -> int:
-    """Convert a TimeString (HH:MM<:SS>) to the number of seconds since midnight."""
-    dt = str_to_time(time_str)
-    return dt_to_seconds_from_midnight(dt)
-
-
-
- -
- -
- - -

- str_to_time(time_str, base_date=None) - -

- - -
- -

Convert TimeString (HH:MM<:SS>) to datetime object.

-

If HH > 24, will add a day to the base_date.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
time_str - TimeString - -
-

TimeString in HH:MM:SS or HH:MM format.

-
-
- required -
base_date - Optional[date] - -
-

optional date to base the datetime on. Defaults to None. -If not provided, will use today.

-
-
- None -
- -
- Source code in network_wrangler/utils/time.py -
28
-29
-30
-31
-32
-33
-34
-35
-36
-37
-38
-39
-40
-41
-42
-43
-44
-45
-46
-47
-48
-49
-50
-51
-52
-53
-54
-55
-56
-57
-58
def str_to_time(time_str: TimeString, base_date: Optional[datetime.date] = None) -> datetime:
-    """Convert TimeString (HH:MM<:SS>) to datetime object.
-
-    If HH > 24, will add a day to the base_date.
-
-    Args:
-        time_str: TimeString in HH:MM:SS or HH:MM format.
-        base_date: optional date to base the datetime on. Defaults to None.
-            If not provided, will use today.
-    """
-    # Set the base date to today if not provided
-    if base_date is None:
-        base_date = date.today()
-
-    # Split the time string to extract hours, minutes, and seconds
-    parts = time_str.split(":")
-    hours = int(parts[0])
-    minutes = int(parts[1])
-    seconds = int(parts[2]) if len(parts) == 3 else 0
-
-    # Calculate total number of days to add to base_date based on hours
-    days_to_add = hours // 24
-    hours = hours % 24
-
-    # Create a time object with the adjusted hours, minutes, and seconds
-    adjusted_time = datetime.strptime(f"{hours:02}:{minutes:02}:{seconds:02}", "%H:%M:%S").time()
-
-    # Combine the base date with the adjusted time and add the extra days if needed
-    combined_datetime = datetime.combine(base_date, adjusted_time) + timedelta(days=days_to_add)
-
-    return combined_datetime
-
-
-
- -
- -
- - -

- str_to_time_list(timespan) - -

- - -
- -

Convert list of TimeStrings (HH:MM<:SS>) to list of datetime.time objects.

- -
- Source code in network_wrangler/utils/time.py -
61
-62
-63
def str_to_time_list(timespan: list[TimeString]) -> list[list[datetime]]:
-    """Convert list of TimeStrings (HH:MM<:SS>) to list of datetime.time objects."""
-    return list(map(str_to_time, timespan))
-
-
-
- -
- -
- - -

- timespan_str_list_to_dt(timespans) - -

- - -
- -

Convert list of TimespanStrings to list of datetime.time objects.

- -
- Source code in network_wrangler/utils/time.py -
66
-67
-68
def timespan_str_list_to_dt(timespans: list[TimespanString]) -> list[list[datetime]]:
-    """Convert list of TimespanStrings to list of datetime.time objects."""
-    [str_to_time_list(ts) for ts in timespans]
-
-
-
- -
- -
- - -

- timespans_overlap(timespan1, timespan2) - -

- - -
- -

Check if two timespan strings overlap.

-

overlapping: a timespan that fully or partially overlaps a given timespan. -This includes and all timespans where at least one minute overlap.

- -
- Source code in network_wrangler/utils/time.py -
198
-199
-200
-201
-202
-203
-204
-205
-206
def timespans_overlap(timespan1: list[TimespanString], timespan2: list[TimespanString]) -> bool:
-    """Check if two timespan strings overlap.
-
-    `overlapping`: a timespan that fully or partially overlaps a given timespan.
-    This includes and all timespans where at least one minute overlap.
-    """
-    timespan1 = str_to_time_list(timespan1)
-    timespan2 = str_to_time_list(timespan2)
-    return dt_overlaps(timespan1, timespan2)
-
-
-
- -
- - - -
- -
- -
- -
- - - - -
- -

Utility functions for pandas data manipulation.

- - - -
- - - - - - - - -
- - - -

- InvalidJoinFieldError - - -

- - -
-

- Bases: Exception

- - -

Raised when the join field is not unique.

- -
- Source code in network_wrangler/utils/data.py -
64
-65
-66
-67
class InvalidJoinFieldError(Exception):
-    """Raised when the join field is not unique."""
-
-    pass
-
-
- -
- -
- -
- - - -

- MissingPropertiesError - - -

- - -
-

- Bases: Exception

- - -

Raised when properties are missing from the dataframe.

- -
- Source code in network_wrangler/utils/data.py -
58
-59
-60
-61
class MissingPropertiesError(Exception):
-    """Raised when properties are missing from the dataframe."""
-
-    pass
-
-
- -
- -
- - -
- - -

- attach_parameters_to_df(df, params) - -

- - -
- -

Attatch params as a dataframe attribute which will be copied with dataframe.

- -
- Source code in network_wrangler/utils/data.py -
518
-519
-520
-521
-522
-523
-524
-525
-526
def attach_parameters_to_df(df: pd.DataFrame, params) -> pd.DataFrame:
-    """Attatch params as a dataframe attribute which will be copied with dataframe."""
-    if not df.__dict__.get("params"):
-        df.__dict__["params"] = params
-        # need to add params to _metadata in order to make sure it is copied.
-        # see: https://stackoverflow.com/questions/50372509/
-        df._metadata += ["params"]
-    # WranglerLogger.debug(f"DFParams: {df.params}")
-    return df
-
-
-
- -
- -
- - -

- coerce_dict_to_df_types(d, df, skip_keys=[], return_skipped=False) - -

- - -
- -

Coerce dictionary values to match the type of a dataframe columns matching dict keys.

-

Will also coerce a list of values.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
d - dict - -
-

dictionary to coerce with singleton or list values

-
-
- required -
df - DataFrame - -
-

dataframe to get types from

-
-
- required -
skip_keys - list - -
-

list of dict keys to skip. Defaults to []/

-
-
- [] -
return_skipped - bool - -
-

keep the uncoerced, skipped keys/vals in the resulting dict. -Defaults to False.

-
-
- False -
- - -

Returns:

- - - - - - - - - - - - - -
Name TypeDescription
dict - dict - -
-

dict with coerced types

-
-
- -
- Source code in network_wrangler/utils/data.py -
579
-580
-581
-582
-583
-584
-585
-586
-587
-588
-589
-590
-591
-592
-593
-594
-595
-596
-597
-598
-599
-600
-601
-602
-603
-604
-605
-606
-607
-608
-609
-610
-611
-612
-613
-614
-615
-616
-617
-618
-619
-620
-621
-622
-623
-624
-625
def coerce_dict_to_df_types(
-    d: dict, df: pd.DataFrame, skip_keys: list = [], return_skipped: bool = False
-) -> dict:
-    """Coerce dictionary values to match the type of a dataframe columns matching dict keys.
-
-    Will also coerce a list of values.
-
-    Args:
-        d (dict): dictionary to coerce with singleton or list values
-        df (pd.DataFrame): dataframe to get types from
-        skip_keys: list of dict keys to skip. Defaults to []/
-        return_skipped: keep the uncoerced, skipped keys/vals in the resulting dict.
-            Defaults to False.
-
-    Returns:
-        dict: dict with coerced types
-    """
-    coerced_dict = {}
-    for k, vals in d.items():
-        if k in skip_keys:
-            if return_skipped:
-                coerced_dict[k] = vals
-            continue
-        if k not in df.columns:
-            raise ValueError(f"Key {k} not in dataframe columns.")
-        if pd.api.types.infer_dtype(df[k]) == "integer":
-            if isinstance(vals, list):
-                coerced_v = [int(float(v)) for v in vals]
-            else:
-                coerced_v = int(float(vals))
-        elif pd.api.types.infer_dtype(df[k]) == "floating":
-            if isinstance(vals, list):
-                coerced_v = [float(v) for v in vals]
-            else:
-                coerced_v = float(vals)
-        elif pd.api.types.infer_dtype(df[k]) == "boolean":
-            if isinstance(vals, list):
-                coerced_v = [bool(v) for v in vals]
-            else:
-                coerced_v = bool(vals)
-        else:
-            if isinstance(vals, list):
-                coerced_v = [str(v) for v in vals]
-            else:
-                coerced_v = str(vals)
-        coerced_dict[k] = coerced_v
-    return coerced_dict
-
-
-
- -
- -
- - -

- coerce_gdf(df, geometry=None, in_crs=LAT_LON_CRS) - -

- - -
- -

Coerce a DataFrame to a GeoDataFrame, optionally with a new geometry.

- -
- Source code in network_wrangler/utils/data.py -
489
-490
-491
-492
-493
-494
-495
-496
-497
-498
-499
-500
-501
-502
-503
-504
-505
-506
-507
-508
-509
-510
-511
-512
-513
-514
-515
def coerce_gdf(
-    df: pd.DataFrame, geometry: GeoSeries = None, in_crs: int = LAT_LON_CRS
-) -> GeoDataFrame:
-    """Coerce a DataFrame to a GeoDataFrame, optionally with a new geometry."""
-    if isinstance(df, GeoDataFrame):
-        if df.crs is None:
-            df.crs = in_crs
-        return df
-    p = None
-    if "params" in df.__dict__:
-        p = copy.deepcopy(df.params)
-
-    if "geometry" not in df and geometry is None:
-        raise ValueError("Must give geometry argument if don't have Geometry in dataframe")
-
-    geometry = geometry if geometry is not None else df["geometry"]
-    if not isinstance(geometry, GeoSeries):
-        try:
-            geometry = GeoSeries(geometry)
-        except:  # noqa: E722
-            geometry = geometry.apply(wkt.loads)
-    df = GeoDataFrame(df, geometry=geometry, crs=in_crs)
-
-    if p is not None:
-        # GeoPandas seems to lose parameters if we don't re-attach them.
-        df.__dict__["params"] = p
-    return df
-
-
-
- -
- -
- - -

- coerce_val_to_df_types(field, val, df) - -

- - -
- -

Coerce field value to match the type of a matching dataframe columns.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
field - str - -
-

field to lookup

-
-
- required -
val - Union[str, int, float, bool, list[Union[str, int, float, bool]]] - -
-

value or list of values to coerce

-
-
- required -
df - DataFrame - -
-

dataframe to get types from

-
-
- required -
- - -
- Source code in network_wrangler/utils/data.py -
545
-546
-547
-548
-549
-550
-551
-552
-553
-554
-555
-556
-557
-558
-559
-560
-561
-562
-563
-564
-565
-566
-567
-568
-569
-570
-571
-572
-573
-574
-575
-576
def coerce_val_to_df_types(
-    field: str,
-    val: Union[str, int, float, bool, list[Union[str, int, float, bool]]],
-    df: pd.DataFrame,
-) -> dict:
-    """Coerce field value to match the type of a matching dataframe columns.
-
-    Args:
-        field: field to lookup
-        val: value or list of values to coerce
-        df (pd.DataFrame): dataframe to get types from
-
-    Returns: coerced value or list of values
-    """
-    if field not in df.columns:
-        raise ValueError(f"Field {field} not in dataframe columns.")
-    if pd.api.types.infer_dtype(df[field]) == "integer":
-        if isinstance(val, list):
-            return [int(float(v)) for v in val]
-        return int(float(val))
-    elif pd.api.types.infer_dtype(df[field]) == "floating":
-        if isinstance(val, list):
-            return [float(v) for v in val]
-        return float(val)
-    elif pd.api.types.infer_dtype(df[field]) == "boolean":
-        if isinstance(val, list):
-            return [bool(v) for v in val]
-        return bool(val)
-    else:
-        if isinstance(val, list):
-            return [str(v) for v in val]
-        return str(val)
-
-
-
- -
- -
- - -

- coerce_val_to_series_type(val, s) - -

- - -
- -

Coerces a value to match type of pandas series.

-

Will try not to fail so if you give it a value that can’t convert to a number, it will -return a string.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
val - -
-

Any type of singleton value

-
-
- required -
s - Series - -
-

series to match the type to

-
-
- required -
- -
- Source code in network_wrangler/utils/data.py -
628
-629
-630
-631
-632
-633
-634
-635
-636
-637
-638
-639
-640
-641
-642
-643
-644
-645
-646
-647
-648
-649
-650
def coerce_val_to_series_type(val, s: pd.Series):
-    """Coerces a value to match type of pandas series.
-
-    Will try not to fail so if you give it a value that can't convert to a number, it will
-    return a string.
-
-    Args:
-        val: Any type of singleton value
-        s (pd.Series): series to match the type to
-    """
-    # WranglerLogger.debug(f"Input val: {val} of type {type(val)} to match with series type \
-    #    {pd.api.types.infer_dtype(s)}.")
-    if pd.api.types.infer_dtype(s) in ["integer", "floating"]:
-        try:
-            v = float(val)
-        except:  # noqa: E722
-            v = str(val)
-    elif pd.api.types.infer_dtype(s) == "boolean":
-        v = bool(val)
-    else:
-        v = str(val)
-    # WranglerLogger.debug(f"Return value: {v}")
-    return v
-
-
-
- -
- -
- - -

- compare_df_values(df1, df2, join_col=None, ignore=[], atol=1e-05) - -

- - -
- -

Compare overlapping part of dataframes and returns where there are differences.

- -
- Source code in network_wrangler/utils/data.py -
232
-233
-234
-235
-236
-237
-238
-239
-240
-241
-242
-243
-244
-245
-246
-247
-248
-249
-250
-251
-252
-253
-254
-255
-256
-257
-258
-259
-260
-261
-262
-263
-264
-265
-266
-267
-268
-269
-270
-271
-272
-273
-274
-275
-276
-277
-278
-279
def compare_df_values(df1, df2, join_col: str = None, ignore: list[str] = [], atol=1e-5):
-    """Compare overlapping part of dataframes and returns where there are differences."""
-    comp_c = [
-        c
-        for c in df1.columns
-        if c in df2.columns and c not in ignore and not isinstance(df1[c], GeoSeries)
-    ]
-    if join_col is None:
-        comp_df = df1[comp_c].merge(
-            df2[comp_c],
-            how="inner",
-            right_index=True,
-            left_index=True,
-            suffixes=["_a", "_b"],
-        )
-    else:
-        comp_df = df1[comp_c].merge(df2[comp_c], how="inner", on=join_col, suffixes=["_a", "_b"])
-
-    # Filter columns by data type
-    numeric_cols = [col for col in comp_c if np.issubdtype(df1[col].dtype, np.number)]
-    ll_cols = list(set(list_like_columns(df1) + list_like_columns(df2)))
-    other_cols = [col for col in comp_c if col not in numeric_cols and col not in ll_cols]
-
-    # For numeric columns, use np.isclose
-    if numeric_cols:
-        numeric_a = comp_df[[f"{col}_a" for col in numeric_cols]]
-        numeric_b = comp_df[[f"{col}_b" for col in numeric_cols]]
-        is_close = np.isclose(numeric_a, numeric_b, atol=atol, equal_nan=True)
-        comp_df[numeric_cols] = ~is_close
-
-    if ll_cols:
-        for ll_c in ll_cols:
-            comp_df[ll_c] = diff_list_like_series(comp_df[ll_c + "_a"], comp_df[ll_c + "_b"])
-
-    # For non-numeric columns, use direct comparison
-    if other_cols:
-        for col in other_cols:
-            comp_df[col] = (comp_df[f"{col}_a"] != comp_df[f"{col}_b"]) & ~(
-                comp_df[f"{col}_a"].isna() & comp_df[f"{col}_b"].isna()
-            )
-
-    # Filter columns and rows where no differences
-    cols_w_diffs = [col for col in comp_c if comp_df[col].any()]
-    out_cols = [col for subcol in cols_w_diffs for col in (f"{subcol}_a", f"{subcol}_b", subcol)]
-    comp_df = comp_df[out_cols]
-    comp_df = comp_df.loc[comp_df[cols_w_diffs].any(axis=1)]
-
-    return comp_df
-
-
-
- -
- -
- - -

- compare_lists(list1, list2) - -

- - -
- -

Compare two lists.

- -
- Source code in network_wrangler/utils/data.py -
326
-327
-328
-329
-330
def compare_lists(list1, list2) -> bool:
-    """Compare two lists."""
-    list1 = convert_numpy_to_list(list1)
-    list2 = convert_numpy_to_list(list1)
-    return list1 != list2
-
-
-
- -
- -
- - -

- convert_numpy_to_list(item) - -

- - -
- -

Function to recursively convert numpy arrays to lists.

- -
- Source code in network_wrangler/utils/data.py -
315
-316
-317
-318
-319
-320
-321
-322
-323
def convert_numpy_to_list(item):
-    """Function to recursively convert numpy arrays to lists."""
-    if isinstance(item, np.ndarray):
-        return item.tolist()
-    elif isinstance(item, list):
-        return [convert_numpy_to_list(sub_item) for sub_item in item]
-    elif isinstance(item, dict):
-        return {key: convert_numpy_to_list(value) for key, value in item.items()}
-    return item
-
-
-
- -
- -
- - -

- dict_fields_in_df(d, df) - -

- - -
- -

Check if all fields in dict are in dataframe.

- -
- Source code in network_wrangler/utils/data.py -
675
-676
-677
-678
-679
-680
-681
def dict_fields_in_df(d: dict, df: pd.DataFrame) -> bool:
-    """Check if all fields in dict are in dataframe."""
-    missing_fields = [f for f in d.keys() if f not in df.columns]
-    if missing_fields:
-        WranglerLogger.error(f"Fields in dictionary missing from dataframe: {missing_fields}.")
-        raise ValueError(f"Fields in dictionary missing from dataframe: {missing_fields}.")
-    return True
-
-
-
- -
- -
- - -

- dict_to_query(selection_dict) - -

- - -
- -

Generates the query of from selection_dict.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
selection_dict - Mapping[str, Any] - -
-

selection dictionary

-
-
- required -
- - -

Returns:

- - - - - - - - - - - - - -
Name TypeDescription
_type_ - str - -
-

Query value

-
-
- -
- Source code in network_wrangler/utils/data.py -
19
-20
-21
-22
-23
-24
-25
-26
-27
-28
-29
-30
-31
-32
-33
-34
-35
-36
-37
-38
-39
-40
-41
-42
-43
def dict_to_query(
-    selection_dict: Mapping[str, Any],
-) -> str:
-    """Generates the query of from selection_dict.
-
-    Args:
-        selection_dict: selection dictionary
-
-    Returns:
-        _type_: Query value
-    """
-    WranglerLogger.debug("Building selection query")
-
-    def _kv_to_query_part(k, v, _q_part=""):
-        if isinstance(v, list):
-            _q_part += "(" + " or ".join([_kv_to_query_part(k, i) for i in v]) + ")"
-            return _q_part
-        if isinstance(v, str):
-            return k + '.str.contains("' + v + '")'
-        else:
-            return k + "==" + str(v)
-
-    query = "(" + " and ".join([_kv_to_query_part(k, v) for k, v in selection_dict.items()]) + ")"
-    WranglerLogger.debug(f"Selection query: \n{query}")
-    return query
-
-
-
- -
- -
- - -

- diff_dfs(df1, df2, ignore=[]) - -

- - -
- -

Returns True if two dataframes are different and log differences.

- -
- Source code in network_wrangler/utils/data.py -
282
-283
-284
-285
-286
-287
-288
-289
-290
-291
-292
-293
-294
-295
-296
-297
-298
-299
-300
-301
-302
-303
-304
-305
-306
-307
-308
-309
-310
-311
-312
def diff_dfs(df1, df2, ignore: list[str] = []) -> bool:
-    """Returns True if two dataframes are different and log differences."""
-    diff = False
-    if set(df1.columns) != set(df2.columns):
-        WranglerLogger.warning(
-            f" Columns are different 1vs2 \n    {set(df1.columns) ^ set(df2.columns)}"
-        )
-        common_cols = [col for col in df1.columns if col in df2.columns]
-        df1 = df1[common_cols]
-        df2 = df2[common_cols]
-        diff = True
-
-    cols_to_compare = [col for col in df1.columns if col not in ignore]
-    df1 = df1[cols_to_compare]
-    df2 = df2[cols_to_compare]
-
-    if len(df1) != len(df2):
-        WranglerLogger.warning(
-            f" Length is different /" f"DF1: {len(df1)} vs /" f"DF2: {len(df2)}\n /"
-        )
-        diff = True
-
-    diff_df = compare_df_values(df1, df2)
-
-    if not diff_df.empty:
-        WranglerLogger.error(f"!!! Differences dfs: \n{diff_df}")
-        return True
-
-    if not diff:
-        WranglerLogger.info("...no differences in df found.")
-    return diff
-
-
-
- -
- -
- - -

- diff_list_like_series(s1, s2) - -

- - -
- -

Compare two series that contain list-like items as strings.

- -
- Source code in network_wrangler/utils/data.py -
333
-334
-335
-336
-337
-338
-339
-340
-341
-342
def diff_list_like_series(s1, s2) -> bool:
-    """Compare two series that contain list-like items as strings."""
-    diff_df = pd.concat([s1, s2], axis=1, keys=["s1", "s2"])
-    # diff_df["diff"] = diff_df.apply(lambda x: str(x["s1"]) != str(x["s2"]), axis=1)
-    diff_df["diff"] = diff_df.apply(lambda x: compare_lists(x["s1"], x["s2"]), axis=1)
-    if diff_df["diff"].any():
-        WranglerLogger.info("List-Like differences:")
-        WranglerLogger.info(diff_df)
-        return True
-    return False
-
-
-
- -
- -
- - -

- fk_in_pk(pk, fk, ignore_nan=True) - -

- - -
- -

Check if all foreign keys are in the primary keys, optionally ignoring NaN.

- -
- Source code in network_wrangler/utils/data.py -
653
-654
-655
-656
-657
-658
-659
-660
-661
-662
-663
-664
-665
-666
-667
-668
-669
-670
-671
-672
def fk_in_pk(
-    pk: Union[pd.Series, list], fk: Union[pd.Series, list], ignore_nan: bool = True
-) -> Tuple[bool, list]:
-    """Check if all foreign keys are in the primary keys, optionally ignoring NaN."""
-    if isinstance(fk, list):
-        fk = pd.Series(fk)
-
-    if ignore_nan:
-        fk = fk.dropna()
-
-    missing_flag = ~fk.isin(pk)
-
-    if missing_flag.any():
-        WranglerLogger.warning(
-            f"Following keys referenced in {fk.name} but missing in\
-            primary key table: \n{fk[missing_flag]} "
-        )
-        return False, fk[missing_flag].tolist()
-
-    return True, []
-
-
-
- -
- -
- - -

- list_like_columns(df, item_type=None) - -

- - -
- -

Find columns in a dataframe that contain list-like items that can’t be json-serialized.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
df - -
-

dataframe to check

-
-
- required -
item_type - type - -
-

if not None, will only return columns where all items are of this type by -checking only the first item in the column. Defaults to None.

-
-
- None -
- -
- Source code in network_wrangler/utils/data.py -
213
-214
-215
-216
-217
-218
-219
-220
-221
-222
-223
-224
-225
-226
-227
-228
-229
def list_like_columns(df, item_type: type = None) -> list[str]:
-    """Find columns in a dataframe that contain list-like items that can't be json-serialized.
-
-    Args:
-        df: dataframe to check
-        item_type: if not None, will only return columns where all items are of this type by
-            checking **only** the first item in the column.  Defaults to None.
-    """
-    list_like_columns = []
-
-    for column in df.columns:
-        if df[column].apply(lambda x: isinstance(x, (list, ndarray))).any():
-            if item_type is not None:
-                if not isinstance(df[column].iloc[0], item_type):
-                    continue
-            list_like_columns.append(column)
-    return list_like_columns
-
-
-
- -
- -
- - -

- segment_data_by_selection(item_list, data, field=None, end_val=0) - -

- - -
- -

Segment a dataframe or series into before, middle, and end segments based on item_list.

-

selected segment = everything from the first to last item in item_list inclusive of the first - and last items. -Before segment = everything before -After segment = everything after

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
item_list - list - -
-

List of items to segment data by. If longer than two, will only -use the first and last items.

-
-
- required -
data - Union[Series, DataFrame] - -
-

Data to segment into before, middle, and after.

-
-
- required -
field - str - -
-

If a dataframe, specifies which field to reference. -Defaults to None.

-
-
- None -
end_val - int - -
-

Notation for util the end or from the begining. Defaults to 0.

-
-
- 0 -
- - -

Raises:

- - - - - - - - - - - - - -
TypeDescription
- ValueError - -
-

If item list isn’t found in data in correct order.

-
-
- - -

Returns:

- - - - - - - - - - - - - -
Name TypeDescription
tuple - tuple[Union[Series, list, DataFrame]] - -
-

data broken out by beofore, selected segment, and after.

-
-
- -
- Source code in network_wrangler/utils/data.py -
345
-346
-347
-348
-349
-350
-351
-352
-353
-354
-355
-356
-357
-358
-359
-360
-361
-362
-363
-364
-365
-366
-367
-368
-369
-370
-371
-372
-373
-374
-375
-376
-377
-378
-379
-380
-381
-382
-383
-384
-385
-386
-387
-388
-389
-390
-391
-392
-393
-394
-395
-396
-397
-398
-399
-400
-401
-402
-403
-404
-405
-406
-407
-408
-409
-410
-411
-412
-413
-414
-415
-416
-417
-418
-419
-420
-421
-422
-423
-424
-425
-426
-427
-428
def segment_data_by_selection(
-    item_list: list,
-    data: Union[list, pd.DataFrame, pd.Series],
-    field: str = None,
-    end_val=0,
-) -> tuple[Union[pd.Series, list, pd.DataFrame]]:
-    """Segment a dataframe or series into before, middle, and end segments based on item_list.
-
-    selected segment = everything from the first to last item in item_list inclusive of the first
-        and last items.
-    Before segment = everything before
-    After segment = everything after
-
-
-    Args:
-        item_list (list): List of items to segment data by. If longer than two, will only
-            use the first and last items.
-        data (Union[pd.Series, pd.DataFrame]): Data to segment into before, middle, and after.
-        field (str, optional): If a dataframe, specifies which field to reference.
-            Defaults to None.
-        end_val (int, optional): Notation for util the end or from the begining. Defaults to 0.
-
-    Raises:
-        ValueError: If item list isn't found in data in correct order.
-
-    Returns:
-        tuple: data broken out by beofore, selected segment, and after.
-    """
-    ref_data = data
-    if isinstance(data, pd.DataFrame):
-        ref_data = data[field].tolist()
-    elif isinstance(data, pd.Series):
-        ref_data = data.tolist()
-
-    # ------- Replace "to the end" indicators with first or last value --------
-    start_item, end_item = item_list[0], item_list[-1]
-    if start_item == end_val:
-        start_item = ref_data[0]
-    if end_item == end_val:
-        end_item = ref_data[-1]
-
-    # --------Find the start and end indices -----------------------------------
-    start_idxs = list(set([i for i, item in enumerate(ref_data) if item == start_item]))
-    if not start_idxs:
-        raise ValueError(f"Segment start item: {start_item} not in data.")
-    if len(start_idxs) > 1:
-        WranglerLogger.warning(
-            f"Found multiple starting locations for data segment: {start_item}.\
-                                Choosing first – largest segment being selected."
-        )
-    start_idx = min(start_idxs)
-
-    # find the end node starting from the start index.
-    end_idxs = [i + start_idx for i, item in enumerate(ref_data[start_idx:]) if item == end_item]
-    # WranglerLogger.debug(f"End indexes: {end_idxs}")
-    if not end_idxs:
-        raise ValueError(f"Segment end item: {end_item} not in data after starting idx.")
-    if len(end_idxs) > 1:
-        WranglerLogger.warning(
-            f"Found multiple ending locations for data segment: {end_item}.\
-                                Choosing last – largest segment being selected."
-        )
-    end_idx = max(end_idxs) + 1
-    # WranglerLogger.debug(
-    # f"Segmenting data fr {start_item} idx:{start_idx} to {end_item} idx:{end_idx}.\n{ref_data}")
-    # -------- Extract the segments --------------------------------------------
-    if isinstance(data, pd.DataFrame):
-        before_segment = data.iloc[:start_idx]
-        selected_segment = data.iloc[start_idx:end_idx]
-        after_segment = data.iloc[end_idx:]
-    else:
-        before_segment = data[:start_idx]
-        selected_segment = data[start_idx:end_idx]
-        after_segment = data[end_idx:]
-
-    if isinstance(data, pd.Series) or isinstance(data, pd.DataFrame):
-        before_segment = before_segment.reset_index(drop=True)
-        selected_segment = selected_segment.reset_index(drop=True)
-        after_segment = after_segment.reset_index(drop=True)
-
-    # WranglerLogger.debug(f"Segmented data into before, selected, and after.\n \
-    #    Before:\n{before_segment}\nSelected:\n{selected_segment}\nAfter:\n{after_segment}")
-
-    return before_segment, selected_segment, after_segment
-
-
-
- -
- -
- - -

- segment_data_by_selection_min_overlap(selection_list, data, field, replacements_list, end_val=0) - -

- - -
- -

Segments data based on item_list reducing overlap with replacement list.

-

selected segment: everything from the first to last item in item_list inclusive of the first - and last items but not if first and last items overlap with replacement list. -Before segment = everything before -After segment = everything after

-

Example: -selection_list = [2,5] -data = pd.DataFrame({“i”:[1,2,3,4,5,6]}) -field = “i” -replacements_list = [2,22,33]

- - -

Returns:

- - - - - - - - - - - - - - - - - -
TypeDescription
- list - -
-

[22,33]

-
-
- tuple[Union[Series, list, DataFrame]] - -
-

[1], [2,3,4,5], [6]

-
-
- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
selection_list - list - -
-

List of items to segment data by. If longer than two, will only -use the first and last items.

-
-
- required -
data - Union[Series, DataFrame] - -
-

Data to segment into before, middle, and after.

-
-
- required -
field - str - -
-

Specifies which field to reference.

-
-
- required -
replacements_list - list - -
-

List of items to eventually replace the selected segment with.

-
-
- required -
end_val - int - -
-

Notation for util the end or from the begining. Defaults to 0.

-
-
- 0 -
- - -

tuple containing:

- - - - - - - - - - - - - - - - - -
TypeDescription
- list - -
-
    -
  • updated replacement_list
  • -
-
-
- tuple[Union[Series, list, DataFrame]] - -
-
    -
  • tuple of before, selected segment, and after data
  • -
-
-
- -
- Source code in network_wrangler/utils/data.py -
431
-432
-433
-434
-435
-436
-437
-438
-439
-440
-441
-442
-443
-444
-445
-446
-447
-448
-449
-450
-451
-452
-453
-454
-455
-456
-457
-458
-459
-460
-461
-462
-463
-464
-465
-466
-467
-468
-469
-470
-471
-472
-473
-474
-475
-476
-477
-478
-479
-480
-481
-482
-483
-484
-485
-486
def segment_data_by_selection_min_overlap(
-    selection_list: list,
-    data: pd.DataFrame,
-    field: str,
-    replacements_list: list,
-    end_val=0,
-) -> tuple[list, tuple[Union[pd.Series, list, pd.DataFrame]]]:
-    """Segments data based on item_list reducing overlap with replacement list.
-
-    *selected segment*: everything from the first to last item in item_list inclusive of the first
-        and last items but not if first and last items overlap with replacement list.
-    Before segment = everything before
-    After segment = everything after
-
-    Example:
-    selection_list = [2,5]
-    data = pd.DataFrame({"i":[1,2,3,4,5,6]})
-    field = "i"
-    replacements_list = [2,22,33]
-
-    returns:
-        [22,33]
-        [1], [2,3,4,5], [6]
-
-    Args:
-        selection_list (list): List of items to segment data by. If longer than two, will only
-            use the first and last items.
-        data (Union[pd.Series, pd.DataFrame]): Data to segment into before, middle, and after.
-        field (str): Specifies which field to reference.
-        replacements_list (list): List of items to eventually replace the selected segment with.
-        end_val (int, optional): Notation for util the end or from the begining. Defaults to 0.
-
-    Returns: tuple containing:
-        - updated replacement_list
-        - tuple of before, selected segment, and after data
-    """
-    before_segment, segment_df, after_segment = segment_data_by_selection(
-        selection_list, data, field=field, end_val=end_val
-    )
-
-    if replacements_list[0] == segment_df[field].iat[0]:
-        # move first item from selected segment to the before_segment df
-        replacements_list = replacements_list[1:]
-        before_segment = pd.concat(
-            [before_segment, segment_df.iloc[:1]], ignore_index=True, sort=False
-        )
-        segment_df = segment_df.iloc[1:]
-        WranglerLogger.debug(f"item start overlaps with replacement. Repl: {replacements_list}")
-    if replacements_list and replacements_list[-1] == data[field].iat[-1]:
-        # move last item from selected segment to the after_segment df
-        replacements_list = replacements_list[:-1]
-        after_segment = pd.concat([data.iloc[-1:], after_segment], ignore_index=True, sort=False)
-        segment_df = segment_df.iloc[:-1]
-        WranglerLogger.debug(f"item end overlaps with replacement. Repl: {replacements_list}")
-
-    return replacements_list, (before_segment, segment_df, after_segment)
-
-
-
- -
- -
- - -

- update_df_by_col_value(destination_df, source_df, join_col, properties=None, fail_if_missing=True) - -

- - -
- -

Updates destination_df with ALL values in source_df for specified props with same join_col.

-

Source_df can contain a subset of IDs of destination_df. -If fail_if_missing is true, destination_df must have all -the IDS in source DF - ensuring all source_df values are contained in resulting df.

-
>> destination_df
-trip_id  property1  property2
-1         10      100
-2         20      200
-3         30      300
-4         40      400
-
->> source_df
-trip_id  property1  property2
-2         25      250
-3         35      350
-
->> updated_df
-trip_id  property1  property2
-0        1       10      100
-1        2       25      250
-2        3       35      350
-3        4       40      400
-
- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
destination_df - DataFrame - -
-

Dataframe to modify.

-
-
- required -
source_df - DataFrame - -
-

Dataframe with updated columns

-
-
- required -
join_col - str - -
-

column to join on

-
-
- required -
properties - list[str] - -
-

List of properties to use. If None, will default to all -in source_df.

-
-
- None -
fail_if_missing - bool - -
-

If True, will raise an error if there are missing IDs in -destination_df that exist in source_df.

-
-
- True -
- -
- Source code in network_wrangler/utils/data.py -
 70
- 71
- 72
- 73
- 74
- 75
- 76
- 77
- 78
- 79
- 80
- 81
- 82
- 83
- 84
- 85
- 86
- 87
- 88
- 89
- 90
- 91
- 92
- 93
- 94
- 95
- 96
- 97
- 98
- 99
-100
-101
-102
-103
-104
-105
-106
-107
-108
-109
-110
-111
-112
-113
-114
-115
-116
-117
-118
-119
-120
-121
-122
-123
-124
-125
-126
-127
-128
-129
-130
-131
-132
-133
-134
-135
-136
-137
-138
-139
-140
def update_df_by_col_value(
-    destination_df: pd.DataFrame,
-    source_df: pd.DataFrame,
-    join_col: str,
-    properties: list[str] = None,
-    fail_if_missing: bool = True,
-) -> pd.DataFrame:
-    """Updates destination_df with ALL values in source_df for specified props with same join_col.
-
-    Source_df can contain a subset of IDs of destination_df.
-    If fail_if_missing is true, destination_df must have all
-    the IDS in source DF - ensuring all source_df values are contained in resulting df.
-
-    ```
-    >> destination_df
-    trip_id  property1  property2
-    1         10      100
-    2         20      200
-    3         30      300
-    4         40      400
-
-    >> source_df
-    trip_id  property1  property2
-    2         25      250
-    3         35      350
-
-    >> updated_df
-    trip_id  property1  property2
-    0        1       10      100
-    1        2       25      250
-    2        3       35      350
-    3        4       40      400
-    ```
-
-    Args:
-        destination_df (pd.DataFrame): Dataframe to modify.
-        source_df (pd.DataFrame): Dataframe with updated columns
-        join_col (str): column to join on
-        properties (list[str]): List of properties to use. If None, will default to all
-            in source_df.
-        fail_if_missing (bool): If True, will raise an error if there are missing IDs in
-            destination_df that exist in source_df.
-    """
-    # 1. Identify which properties should be updated; and if they exist in both DFs.
-    if properties is None:
-        properties = [
-            c for c in source_df.columns if c in destination_df.columns and c != join_col
-        ]
-    else:
-        _dest_miss = _df_missing_cols(destination_df, properties + [join_col])
-        if _dest_miss:
-            raise MissingPropertiesError(f"Properties missing from destination_df: {_dest_miss}")
-        _source_miss = _df_missing_cols(source_df, properties + [join_col])
-        if _source_miss:
-            raise MissingPropertiesError(f"Properties missing from source_df: {_source_miss}")
-
-    # 2. Identify if there are IDs missing from destintation_df that exist in source_df
-    if fail_if_missing:
-        missing_ids = set(source_df[join_col]) - set(destination_df[join_col])
-        if missing_ids:
-            raise InvalidJoinFieldError(f"IDs missing from source_df: \n{missing_ids}")
-
-    WranglerLogger.debug(f"Updating properties for {len(source_df)} records: {properties}.")
-
-    if not source_df[join_col].is_unique:
-        InvalidJoinFieldError("Can't join from source_df when join_col: {join_col} is not unique.")
-
-    if not destination_df[join_col].is_unique:
-        return _update_props_from_one_to_many(destination_df, source_df, join_col, properties)
-
-    return _update_props_for_common_idx(destination_df, source_df, join_col, properties)
-
-
-
- -
- -
- - -

- validate_existing_value_in_df(df, idx, field, expected_value) - -

- - -
- -

Validate if df[field]==expected_value for all indices in idx.

- -
- Source code in network_wrangler/utils/data.py -
529
-530
-531
-532
-533
-534
-535
-536
-537
-538
-539
-540
-541
-542
def validate_existing_value_in_df(df: pd.DataFrame, idx: list[int], field: str, expected_value):
-    """Validate if df[field]==expected_value for all indices in idx."""
-    if field not in df.columns:
-        WranglerLogger.warning(f"!! {field} Not an existing field.")
-        return False
-    if not df.loc[idx, field].eq(expected_value).all():
-        WranglerLogger.warning(
-            f"Existing value defined for {field} in project card \
-            does not match the value in the selection links. \n\
-            Specified Existing: {expected_value}\n\
-            Actual Existing: \n {df.loc[idx, field]}."
-        )
-        return False
-    return True
-
-
-
- -
- - - -
- -
- -
- -
- - - - -
- -

Helper geographic manipulation functions.

- - - -
- - - - - - - - -
- - - -

- InvalidCRSError - - -

- - -
-

- Bases: Exception

- - -

Raised when a point is not valid for a given coordinate reference system.

- -
- Source code in network_wrangler/utils/geo.py -
212
-213
-214
-215
class InvalidCRSError(Exception):
-    """Raised when a point is not valid for a given coordinate reference system."""
-
-    pass
-
-
- -
- -
- -
- - - -

- MissingNodesError - - -

- - -
-

- Bases: Exception

- - -

Raised when referenced nodes are missing from the network.

- -
- Source code in network_wrangler/utils/geo.py -
103
-104
-105
-106
class MissingNodesError(Exception):
-    """Raised when referenced nodes are missing from the network."""
-
-    pass
-
-
- -
- -
- - -
- - -

- check_point_valid_for_crs(point, crs) - -

- - -
- -

Check if a point is valid for a given coordinate reference system.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
point - Point - -
-

Shapely Point

-
-
- required -
crs - int - -
-

coordinate reference system in ESPG code

-
-
- required -
- - -
- Source code in network_wrangler/utils/geo.py -
218
-219
-220
-221
-222
-223
-224
-225
-226
-227
-228
-229
-230
-231
-232
-233
-234
-235
-236
-237
-238
def check_point_valid_for_crs(point: Point, crs: int):
-    """Check if a point is valid for a given coordinate reference system.
-
-    Args:
-        point: Shapely Point
-        crs: coordinate reference system in ESPG code
-
-    raises: InvalidCRSError if point is not valid for the given crs
-    """
-    crs = CRS.from_user_input(crs)
-    minx, miny, maxx, maxy = crs.area_of_use.bounds
-    ok_bounds = True
-    if not minx <= point.x <= maxx:
-        WranglerLogger.error(f"Invalid X coordinate for CRS {crs}: {point.x}")
-        ok_bounds = False
-    if not miny <= point.y <= maxy:
-        WranglerLogger.error(f"Invalid Y coordinate for CRS {crs}: {point.y}")
-        ok_bounds = False
-
-    if not ok_bounds:
-        raise InvalidCRSError(f"Invalid coordinate for CRS {crs}: {point.x}, {point.y}")
-
-
-
- -
- -
- - -

- get_bearing(lat1, lon1, lat2, lon2) - -

- - -
- -

Calculate the bearing (forward azimuth) b/w the two points.

-

returns: bearing in radians

- -
- Source code in network_wrangler/utils/geo.py -
32
-33
-34
-35
-36
-37
-38
-39
-40
-41
-42
-43
def get_bearing(lat1, lon1, lat2, lon2):
-    """Calculate the bearing (forward azimuth) b/w the two points.
-
-    returns: bearing in radians
-    """
-    # bearing in degrees
-    brng = Geodesic.WGS84.Inverse(lat1, lon1, lat2, lon2)["azi1"]
-
-    # convert bearing to radians
-    brng = math.radians(brng)
-
-    return brng
-
-
-
- -
- -
- - -

- get_bounding_polygon(boundary_geocode=None, boundary_file=None, boundary_gdf=None, crs=LAT_LON_CRS) - -

- - -
- -

Get the bounding polygon for a given boundary.

-

Will return None if no arguments given. Will raise a ValueError if more than one given.

-

This function retrieves the bounding polygon for a given boundary. The boundary can be provided -as a GeoDataFrame, a geocode string or dictionary, or a boundary file. The resulting polygon -geometry is returned as a GeoSeries.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
boundary_geocode - Union[str, dict] - -
-

A geocode string or dictionary -representing the boundary. Defaults to None.

-
-
- None -
boundary_file - Union[str, Path] - -
-

A path to the boundary file. Only used if -boundary_geocode is None. Defaults to None.

-
-
- None -
boundary_gdf - GeoDataFrame - -
-

A GeoDataFrame representing the boundary. -Only used if boundary_geocode and boundary_file are None. Defaults to None.

-
-
- None -
crs - int - -
-

The coordinate reference system (CRS) code. Defaults to 4326 (WGS84).

-
-
- LAT_LON_CRS -
- - -

Returns:

- - - - - - - - - - - - - -
TypeDescription
- Union[None, GeoSeries] - -
-

gpd.GeoSeries: The polygon geometry representing the bounding polygon.

-
-
- -
- Source code in network_wrangler/utils/geo.py -
386
-387
-388
-389
-390
-391
-392
-393
-394
-395
-396
-397
-398
-399
-400
-401
-402
-403
-404
-405
-406
-407
-408
-409
-410
-411
-412
-413
-414
-415
-416
-417
-418
-419
-420
-421
-422
-423
-424
-425
-426
-427
-428
-429
-430
-431
-432
-433
-434
-435
-436
-437
-438
-439
-440
-441
-442
-443
-444
-445
-446
-447
-448
-449
-450
def get_bounding_polygon(
-    boundary_geocode: Optional[Union[str, dict]] = None,
-    boundary_file: Optional[Union[str, Path]] = None,
-    boundary_gdf: Optional[gpd.GeoDataFrame] = None,
-    crs: int = LAT_LON_CRS,  # WGS84
-) -> Union[None, gpd.GeoSeries]:
-    """Get the bounding polygon for a given boundary.
-
-    Will return None if no arguments given. Will raise a ValueError if more than one given.
-
-    This function retrieves the bounding polygon for a given boundary. The boundary can be provided
-    as a GeoDataFrame, a geocode string or dictionary, or a boundary file. The resulting polygon
-    geometry is returned as a GeoSeries.
-
-    Args:
-        boundary_geocode (Union[str, dict], optional): A geocode string or dictionary
-            representing the boundary. Defaults to None.
-        boundary_file (Union[str, Path], optional): A path to the boundary file. Only used if
-            boundary_geocode is None. Defaults to None.
-        boundary_gdf (gpd.GeoDataFrame, optional): A GeoDataFrame representing the boundary.
-            Only used if boundary_geocode and boundary_file are None. Defaults to None.
-        crs (int, optional): The coordinate reference system (CRS) code. Defaults to 4326 (WGS84).
-
-    Returns:
-        gpd.GeoSeries: The polygon geometry representing the bounding polygon.
-    """
-    import osmnx as ox
-
-    nargs = sum(x is not None for x in [boundary_gdf, boundary_geocode, boundary_file])
-    if nargs == 0:
-        return None
-    if nargs != 1:
-        raise ValueError(
-            "Exacly one of boundary_gdf, boundary_geocode, or boundary_shp must \
-                         be provided"
-        )
-
-    OK_BOUNDARY_SUFF = [".shp", ".geojson", ".parquet"]
-
-    if boundary_geocode is not None:
-        boundary_gdf = ox.geocode_to_gdf(boundary_geocode)
-    if boundary_file is not None:
-        boundary_file = Path(boundary_file)
-        if boundary_file.suffix not in OK_BOUNDARY_SUFF:
-            raise ValueError(
-                f"Boundary file must have one of the following suffixes: {OK_BOUNDARY_SUFF}"
-            )
-        if not boundary_file.exists():
-            raise FileNotFoundError(f"Boundary file {boundary_file} does not exist")
-        if boundary_file.suffix == ".parquet":
-            boundary_gdf = gpd.read_parquet(boundary_file)
-        else:
-            boundary_gdf = gpd.read_file(boundary_file)
-            if boundary_file.suffix == ".geojson":  # geojson standard is WGS84
-                boundary_gdf.crs = crs
-
-    if boundary_gdf.crs is not None:
-        boundary_gdf = boundary_gdf.to_crs(crs)
-    # make sure boundary_gdf is a polygon
-    if len(boundary_gdf.geom_type[boundary_gdf.geom_type != "Polygon"]) > 0:
-        raise ValueError("boundary_gdf must all be Polygons")
-    # get the boundary as a single polygon
-    boundary_gs = gpd.GeoSeries([boundary_gdf.geometry.unary_union], crs=crs)
-
-    return boundary_gs
-
-
-
- -
- -
- - -

- get_point_geometry_from_linestring(polyline_geometry, pos=0) - -

- - -
- -

Get a point geometry from a linestring geometry.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
polyline_geometry - -
-

shapely LineString instance

-
-
- required -
pos - int - -
-

position in the linestring to get the point from. Defaults to 0.

-
-
- 0 -
- -
- Source code in network_wrangler/utils/geo.py -
320
-321
-322
-323
-324
-325
-326
-327
-328
-329
-330
-331
-332
-333
-334
def get_point_geometry_from_linestring(polyline_geometry, pos: int = 0):
-    """Get a point geometry from a linestring geometry.
-
-    Args:
-        polyline_geometry: shapely LineString instance
-        pos: position in the linestring to get the point from. Defaults to 0.
-    """
-    # WranglerLogger.debug(
-    #    f"get_point_geometry_from_linestring.polyline_geometry.coords[0]: \
-    #    {polyline_geometry.coords[0]}."
-    # )
-
-    # Note: when upgrading to shapely 2.0, will need to use following command
-    # _point_coords = get_coordinates(polyline_geometry).tolist()[pos]
-    return point_from_xy(*polyline_geometry.coords[pos])
-
-
-
- -
- -
- - -

- length_of_linestring_miles(gdf) - -

- - -
- -

Returns a Series with the linestring length in miles.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
gdf - Union[GeoSeries, GeoDataFrame] - -
-

GeoDataFrame with linestring geometry. If given a GeoSeries will attempt to convert -to a GeoDataFrame.

-
-
- required -
- -
- Source code in network_wrangler/utils/geo.py -
 83
- 84
- 85
- 86
- 87
- 88
- 89
- 90
- 91
- 92
- 93
- 94
- 95
- 96
- 97
- 98
- 99
-100
def length_of_linestring_miles(gdf: Union[gpd.GeoSeries, gpd.GeoDataFrame]) -> pd.Series:
-    """Returns a Series with the linestring length in miles.
-
-    Args:
-        gdf: GeoDataFrame with linestring geometry.  If given a GeoSeries will attempt to convert
-            to a GeoDataFrame.
-    """
-    # WranglerLogger.debug(f"length_of_linestring_miles.gdf input:\n{gdf}.")
-    if isinstance(gdf, gpd.GeoSeries):
-        gdf = gpd.GeoDataFrame(geometry=gdf)
-
-    p_crs = gdf.estimate_utm_crs()
-    gdf = gdf.to_crs(p_crs)
-    METERS_IN_MILES = 1609.34
-    length_miles = gdf.geometry.length / METERS_IN_MILES
-    length_s = pd.Series(length_miles, index=gdf.index)
-
-    return length_s
-
-
-
- -
- -
- - -

- linestring_from_lats_lons(df, lat_fields, lon_fields) - -

- - -
- -

Create a LineString geometry from a DataFrame with lon/lat fields.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
df - -
-

DataFrame with columns for lon/lat fields.

-
-
- required -
lat_fields - -
-

list of column names for the lat fields.

-
-
- required -
lon_fields - -
-

list of column names for the lon fields.

-
-
- required -
- -
- Source code in network_wrangler/utils/geo.py -
191
-192
-193
-194
-195
-196
-197
-198
-199
-200
-201
-202
-203
-204
-205
-206
-207
-208
-209
def linestring_from_lats_lons(df, lat_fields, lon_fields) -> gpd.GeoSeries:
-    """Create a LineString geometry from a DataFrame with lon/lat fields.
-
-    Args:
-        df: DataFrame with columns for lon/lat fields.
-        lat_fields: list of column names for the lat fields.
-        lon_fields: list of column names for the lon fields.
-    """
-    if len(lon_fields) != len(lat_fields):
-        raise ValueError("lon_fields and lat_fields lists must have the same length")
-
-    line_geometries = gpd.GeoSeries(
-        [
-            LineString([(row[lon], row[lat]) for lon, lat in zip(lon_fields, lat_fields)])
-            for _, row in df.iterrows()
-        ]
-    )
-
-    return gpd.GeoSeries(line_geometries)
-
-
-
- -
- -
- - -

- linestring_from_nodes(links_df, nodes_df, from_node='A', to_node='B', node_pk='model_node_id') - -

- - -
- -

Creates a LineString geometry GeoSeries from a DataFrame of links and a DataFrame of nodes.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
links_df - DataFrame - -
-

DataFrame with columns for from_node and to_node.

-
-
- required -
nodes_df - GeoDataFrame - -
-

GeoDataFrame with geometry column.

-
-
- required -
from_node - str - -
-

column name in links_df for the from node. Defaults to “A”.

-
-
- 'A' -
to_node - str - -
-

column name in links_df for the to node. Defaults to “B”.

-
-
- 'B' -
node_pk - str - -
-

primary key column name in nodes_df. Defaults to “model_node_id”.

-
-
- 'model_node_id' -
- -
- Source code in network_wrangler/utils/geo.py -
109
-110
-111
-112
-113
-114
-115
-116
-117
-118
-119
-120
-121
-122
-123
-124
-125
-126
-127
-128
-129
-130
-131
-132
-133
-134
-135
-136
-137
-138
-139
-140
-141
-142
-143
-144
-145
-146
-147
-148
-149
-150
-151
-152
-153
-154
-155
-156
-157
-158
-159
-160
-161
-162
-163
-164
-165
-166
-167
-168
-169
-170
-171
-172
-173
-174
-175
-176
-177
-178
-179
-180
-181
-182
-183
-184
-185
-186
-187
-188
def linestring_from_nodes(
-    links_df: pd.DataFrame,
-    nodes_df: gpd.GeoDataFrame,
-    from_node: str = "A",
-    to_node: str = "B",
-    node_pk: str = "model_node_id",
-) -> gpd.GeoSeries:
-    """Creates a LineString geometry GeoSeries from a DataFrame of links and a DataFrame of nodes.
-
-    Args:
-        links_df: DataFrame with columns for from_node and to_node.
-        nodes_df: GeoDataFrame with geometry column.
-        from_node: column name in links_df for the from node. Defaults to "A".
-        to_node: column name in links_df for the to node. Defaults to "B".
-        node_pk: primary key column name in nodes_df. Defaults to "model_node_id".
-    """
-    assert "geometry" in nodes_df.columns, "nodes_df must have a 'geometry' column"
-
-    idx_name = "index" if links_df.index.name is None else links_df.index.name
-    # WranglerLogger.debug(f"Index name: {idx_name}")
-    required_link_cols = [from_node, to_node]
-
-    if not all([col in links_df.columns for col in required_link_cols]):
-        WranglerLogger.error(
-            f"links_df.columns missing required columns.\n\
-                            links_df.columns: {links_df.columns}\n\
-                            required_link_cols: {required_link_cols}"
-        )
-        raise ValueError(
-            f"links_df must have columns {required_link_cols} to create linestring from nodes"
-        )
-
-    links_geo_df = copy.deepcopy(links_df[required_link_cols])
-    # need to continuously reset the index to make sure the index is the same as the link index
-    links_geo_df = (
-        links_geo_df.reset_index()
-        .merge(
-            nodes_df[[node_pk, "geometry"]],
-            left_on=from_node,
-            right_on=node_pk,
-            how="left",
-        )
-        .set_index(idx_name)
-    )
-
-    links_geo_df = links_geo_df.rename(columns={"geometry": "geometry_A"})
-
-    links_geo_df = (
-        links_geo_df.reset_index()
-        .merge(
-            nodes_df[[node_pk, "geometry"]],
-            left_on=to_node,
-            right_on=node_pk,
-            how="left",
-        )
-        .set_index(idx_name)
-    )
-
-    links_geo_df = links_geo_df.rename(columns={"geometry": "geometry_B"})
-
-    # makes sure all nodes exist
-    _missing_geo_links_df = links_geo_df[
-        links_geo_df["geometry_A"].isnull() | links_geo_df["geometry_B"].isnull()
-    ]
-    if not _missing_geo_links_df.empty:
-        missing_nodes = _missing_geo_links_df[[from_node, to_node]].values
-        WranglerLogger.error(
-            f"Cannot create link geometry from nodes because the nodes are\
-                             missing from the network. Missing nodes: {missing_nodes}"
-        )
-        raise MissingNodesError("Specified from/to nodes are missing in nodes_df")
-
-    # create geometry from points
-    links_geo_df["geometry"] = links_geo_df.apply(
-        lambda row: LineString([row["geometry_A"], row["geometry_B"]]), axis=1
-    )
-
-    # convert to GeoDataFrame
-    links_gdf = gpd.GeoDataFrame(links_geo_df["geometry"], geometry=links_geo_df["geometry"])
-    return links_gdf["geometry"]
-
-
-
- -
- -
- - -

- location_ref_from_point(geometry, sequence=1, bearing=None, distance_to_next_ref=None) - -

- - -
- -

Generates a shared street point location reference.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
geometry - Point - -
-

Point shapely geometry

-
-
- required -
sequence - int - -
-

Sequence if part of polyline. Defaults to None.

-
-
- 1 -
bearing - float - -
-

Direction of line if part of polyline. Defaults to None.

-
-
- None -
distance_to_next_ref - float - -
-

Distnce to next point if part of polyline. -Defaults to None.

-
-
- None -
- - -

Returns:

- - - - - - - - - - - - - -
Name TypeDescription
LocationReference - LocationReference - -
-

As defined by sharedStreets.io schema

-
-
- -
- Source code in network_wrangler/utils/geo.py -
337
-338
-339
-340
-341
-342
-343
-344
-345
-346
-347
-348
-349
-350
-351
-352
-353
-354
-355
-356
-357
-358
-359
-360
-361
-362
-363
def location_ref_from_point(
-    geometry: Point,
-    sequence: int = 1,
-    bearing: float = None,
-    distance_to_next_ref: float = None,
-) -> LocationReference:
-    """Generates a shared street point location reference.
-
-    Args:
-        geometry (Point): Point shapely geometry
-        sequence (int, optional): Sequence if part of polyline. Defaults to None.
-        bearing (float, optional): Direction of line if part of polyline. Defaults to None.
-        distance_to_next_ref (float, optional): Distnce to next point if part of polyline.
-            Defaults to None.
-
-    Returns:
-        LocationReference: As defined by sharedStreets.io schema
-    """
-    lr = {
-        "point": LatLongCoordinates(geometry.coords[0]),
-    }
-
-    for arg in ["sequence", "bearing", "distance_to_next_ref"]:
-        if locals()[arg] is not None:
-            lr[arg] = locals()[arg]
-
-    return LocationReference(**lr)
-
-
-
- -
- -
- - -

- location_refs_from_linestring(geometry) - -

- - -
- -

Generates a shared street location reference from linestring.

- - -

Parameters:

- - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
geometry - LineString - -
-

Shapely LineString instance

-
-
- required -
- - -

Returns:

- - - - - - - - - - - - - -
Name TypeDescription
LocationReferences - List[LocationReference] - -
-

As defined by sharedStreets.io schema

-
-
- -
- Source code in network_wrangler/utils/geo.py -
366
-367
-368
-369
-370
-371
-372
-373
-374
-375
-376
-377
-378
-379
-380
-381
-382
-383
def location_refs_from_linestring(geometry: LineString) -> List[LocationReference]:
-    """Generates a shared street location reference from linestring.
-
-    Args:
-        geometry (LineString): Shapely LineString instance
-
-    Returns:
-        LocationReferences: As defined by sharedStreets.io schema
-    """
-    return [
-        location_ref_from_point(
-            point,
-            sequence=i + 1,
-            distance_to_next_ref=point.distance(geometry.coords[i + 1]),
-            bearing=get_bearing(*point.coords[0], *geometry.coords[i + 1]),
-        )
-        for i, point in enumerate(geometry.coords[:-1])
-    ]
-
-
-
- -
- -
- - -

- offset_geometry_meters(geo_s, offset_distance_meters) - -

- - -
- -

Offset a GeoSeries of LineStrings by a given distance in meters.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
geo_s - GeoSeries - -
-

GeoSeries of LineStrings to offset.

-
-
- required -
offset_distance_meters - float - -
-

distance in meters to offset the LineStrings.

-
-
- required -
- -
- Source code in network_wrangler/utils/geo.py -
459
-460
-461
-462
-463
-464
-465
-466
-467
-468
-469
def offset_geometry_meters(geo_s: gpd.GeoSeries, offset_distance_meters: float) -> gpd.GeoSeries:
-    """Offset a GeoSeries of LineStrings by a given distance in meters.
-
-    Args:
-        geo_s: GeoSeries of LineStrings to offset.
-        offset_distance_meters: distance in meters to offset the LineStrings.
-    """
-    og_crs = geo_s.crs
-    geo_s.to_crs(METERS_CRS)
-    offset_geo = geo_s.apply(lambda x: x.offset_curve(offset_distance_meters))
-    return offset_geo.to_crs(og_crs)
-
-
-
- -
- -
- - -

- offset_point_with_distance_and_bearing(lon, lat, distance, bearing) - -

- - -
- -

Get the new lon-lat (in degrees) given current point (lon-lat), distance and bearing.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
lon - float - -
-

longitude of original point

-
-
- required -
lat - float - -
-

latitude of original point

-
-
- required -
distance - float - -
-

distance in meters to offset point by

-
-
- required -
bearing - float - -
-

direction to offset point to in radians

-
-
- required -
- - -
- Source code in network_wrangler/utils/geo.py -
46
-47
-48
-49
-50
-51
-52
-53
-54
-55
-56
-57
-58
-59
-60
-61
-62
-63
-64
-65
-66
-67
-68
-69
-70
-71
-72
-73
-74
-75
-76
-77
-78
-79
-80
def offset_point_with_distance_and_bearing(
-    lon: float, lat: float, distance: float, bearing: float
-) -> List[float]:
-    """Get the new lon-lat (in degrees) given current point (lon-lat), distance and bearing.
-
-    Args:
-        lon: longitude of original point
-        lat: latitude of original point
-        distance: distance in meters to offset point by
-        bearing: direction to offset point to in radians
-
-    returns: list of new offset lon-lat
-    """
-    # Earth's radius in meters
-    radius = 6378137
-
-    # convert the lat long from degree to radians
-    lat_radians = math.radians(lat)
-    lon_radians = math.radians(lon)
-
-    # calculate the new lat long in radians
-    out_lat_radians = math.asin(
-        math.sin(lat_radians) * math.cos(distance / radius)
-        + math.cos(lat_radians) * math.sin(distance / radius) * math.cos(bearing)
-    )
-
-    out_lon_radians = lon_radians + math.atan2(
-        math.sin(bearing) * math.sin(distance / radius) * math.cos(lat_radians),
-        math.cos(distance / radius) - math.sin(lat_radians) * math.sin(lat_radians),
-    )
-    # convert the new lat long back to degree
-    out_lat = math.degrees(out_lat_radians)
-    out_lon = math.degrees(out_lon_radians)
-
-    return [out_lon, out_lat]
-
-
-
- -
- -
- - -

- point_from_xy(x, y, xy_crs=LAT_LON_CRS, point_crs=LAT_LON_CRS) - -

- - -
- -

Creates point geometry from x and y coordinates.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
x - -
-

x coordinate, in xy_crs

-
-
- required -
y - -
-

y coordinate, in xy_crs

-
-
- required -
xy_crs - int - -
-

coordinate reference system in ESPG code for x/y inputs. Defaults to 4326 (WGS84)

-
-
- LAT_LON_CRS -
point_crs - int - -
-

coordinate reference system in ESPG code for point output. -Defaults to 4326 (WGS84)

-
-
- LAT_LON_CRS -
- - -
- Source code in network_wrangler/utils/geo.py -
241
-242
-243
-244
-245
-246
-247
-248
-249
-250
-251
-252
-253
-254
-255
-256
-257
-258
-259
-260
-261
-262
-263
-264
-265
-266
-267
def point_from_xy(x, y, xy_crs: int = LAT_LON_CRS, point_crs: int = LAT_LON_CRS):
-    """Creates point geometry from x and y coordinates.
-
-    Args:
-        x: x coordinate, in xy_crs
-        y: y coordinate, in xy_crs
-        xy_crs: coordinate reference system in ESPG code for x/y inputs. Defaults to 4326 (WGS84)
-        point_crs: coordinate reference system in ESPG code for point output.
-            Defaults to 4326 (WGS84)
-
-    Returns: Shapely Point in point_crs
-    """
-    point = Point(x, y)
-
-    if xy_crs == point_crs:
-        check_point_valid_for_crs(point, point_crs)
-        return point
-
-    if (xy_crs, point_crs) not in transformers:
-        # store transformers in dictionary because they are an "expensive" operation
-        transformers[(xy_crs, point_crs)] = Transformer.from_proj(
-            Proj(init="epsg:" + str(xy_crs)),
-            Proj(init="epsg:" + str(point_crs)),
-            always_xy=True,  # required b/c Proj v6+ uses lon/lat instead of x/y
-        )
-
-    return transform(transformers[(xy_crs, point_crs)].transform, point)
-
-
-
- -
- -
- - -

- to_points_gdf(table, ref_nodes_df=None, ref_road_net=None) - -

- - -
- -

Convert a table to a GeoDataFrame.

-

If the table is already a GeoDataFrame, return it as is. Otherwise, attempt to convert the -table to a GeoDataFrame using the following methods: -1. If the table has a ‘geometry’ column, return a GeoDataFrame using that column. -2. If the table has ‘lat’ and ‘lon’ columns, return a GeoDataFrame using those columns. -3. If the table has a ‘*model_node_id’ or ‘stop_id’ column, return a GeoDataFrame using that column and the - nodes_df provided. -If none of the above, raise a ValueError.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
table - DataFrame - -
-

DataFrame to convert to GeoDataFrame.

-
-
- required -
ref_nodes_df - GeoDataFrame - -
-

GeoDataFrame of nodes to use to convert model_node_id to geometry.

-
-
- None -
ref_road_net - 'RoadwayNetwork' - -
-

RoadwayNetwork object to use to convert model_node_id to geometry.

-
-
- None -
- - -

Returns:

- - - - - - - - - - - - - -
Name TypeDescription
GeoDataFrame - GeoDataFrame - -
-

GeoDataFrame representation of the table.

-
-
- -
- Source code in network_wrangler/utils/geo.py -
472
-473
-474
-475
-476
-477
-478
-479
-480
-481
-482
-483
-484
-485
-486
-487
-488
-489
-490
-491
-492
-493
-494
-495
-496
-497
-498
-499
-500
-501
-502
-503
-504
-505
-506
-507
-508
-509
-510
-511
-512
-513
-514
-515
-516
-517
-518
-519
-520
-521
-522
-523
-524
-525
-526
-527
-528
-529
-530
-531
-532
-533
-534
-535
-536
-537
-538
-539
-540
-541
-542
-543
-544
-545
-546
-547
-548
-549
-550
-551
-552
-553
def to_points_gdf(
-    table: pd.DataFrame,
-    ref_nodes_df: gpd.GeoDataFrame = None,
-    ref_road_net: "RoadwayNetwork" = None,
-) -> gpd.GeoDataFrame:
-    """Convert a table to a GeoDataFrame.
-
-    If the table is already a GeoDataFrame, return it as is. Otherwise, attempt to convert the
-    table to a GeoDataFrame using the following methods:
-    1. If the table has a 'geometry' column, return a GeoDataFrame using that column.
-    2. If the table has 'lat' and 'lon' columns, return a GeoDataFrame using those columns.
-    3. If the table has a '*model_node_id' or 'stop_id' column, return a GeoDataFrame using that column and the
-         nodes_df provided.
-    If none of the above, raise a ValueError.
-
-    Args:
-        table: DataFrame to convert to GeoDataFrame.
-        ref_nodes_df: GeoDataFrame of nodes to use to convert model_node_id to geometry.
-        ref_road_net: RoadwayNetwork object to use to convert model_node_id to geometry.
-
-    Returns:
-        GeoDataFrame: GeoDataFrame representation of the table.
-    """
-    if table is gpd.GeoDataFrame:
-        return table
-
-    WranglerLogger.debug("Converting GTFS table to GeoDataFrame")
-    if "geometry" in table.columns:
-        return gpd.GeoDataFrame(table, geometry="geometry")
-
-    lat_cols = list(filter(lambda col: "lat" in col, table.columns))
-    lon_cols = list(filter(lambda col: "lon" in col, table.columns))
-    model_node_id_cols = [
-        c for c in ["model_node_id", "stop_id", "shape_model_node_id"] if c in table.columns
-    ]
-
-    if not (lat_cols and lon_cols) or not model_node_id_cols:
-        WranglerLogger.error(
-            "Needed either lat/long or *model_node_id columns to convert \
-            to GeoDataFrame. Columns found: {table.columns}"
-        )
-        if not (lat_cols and lon_cols):
-            WranglerLogger.error("No lat/long cols found.")
-        if not model_node_id_cols:
-            WranglerLogger.error("No *model_node_id cols found.")
-        raise ValueError(
-            "Could not find lat/long, geometry columns or *model_node_id column in \
-                         table necessary to convert to GeoDataFrame"
-        )
-
-    if lat_cols and lon_cols:
-        # using first found lat and lon columns
-        return gpd.GeoDataFrame(
-            table,
-            geometry=gpd.points_from_xy(table[lon_cols[0]], table[lat_cols[0]]),
-            crs="EPSG:4326",
-        )
-
-    if model_node_id_cols:
-        node_id_col = model_node_id_cols[0]
-
-        if ref_nodes_df is None:
-            if ref_road_net is None:
-                raise ValueError(
-                    "Must provide either nodes_df or road_net to convert \
-                                 model_node_id to geometry"
-                )
-            ref_nodes_df = ref_road_net.nodes_df
-
-        WranglerLogger.debug("Converting table to GeoDataFrame using model_node_id")
-
-        _table = table.merge(
-            ref_nodes_df[["model_node_id", "geometry"]],
-            left_on=node_id_col,
-            right_on="model_node_id",
-        )
-        return gpd.GeoDataFrame(_table, geometry="geometry")
-
-    raise ValueError(
-        "Could not find lat/long, geometry columns or *model_node_id column in table \
-                     necessary to convert to GeoDataFrame"
-    )
-
-
-
- -
- -
- - -

- update_nodes_in_linestring_geometry(original_df, updated_nodes_df, position) - -

- - -
- -

Updates the nodes in a linestring geometry and returns updated geometry.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
original_df - GeoDataFrame - -
-

GeoDataFrame with the model_node_id and linestring geometry

-
-
- required -
updated_nodes_df - GeoDataFrame - -
-

GeoDataFrame with updated node geometries.

-
-
- required -
position - int - -
-

position in the linestring to update with the node.

-
-
- required -
- -
- Source code in network_wrangler/utils/geo.py -
285
-286
-287
-288
-289
-290
-291
-292
-293
-294
-295
-296
-297
-298
-299
-300
-301
-302
-303
-304
-305
-306
-307
-308
-309
-310
-311
-312
-313
-314
-315
-316
-317
def update_nodes_in_linestring_geometry(
-    original_df: gpd.GeoDataFrame,
-    updated_nodes_df: gpd.GeoDataFrame,
-    position: int,
-) -> gpd.GeoSeries:
-    """Updates the nodes in a linestring geometry and returns updated geometry.
-
-    Args:
-        original_df: GeoDataFrame with the `model_node_id` and linestring geometry
-        updated_nodes_df: GeoDataFrame with updated node geometries.
-        position: position in the linestring to update with the node.
-    """
-    LINK_FK_NODE = ["A", "B"]
-    original_index = original_df.index
-
-    updated_df = original_df.reset_index().merge(
-        updated_nodes_df[["model_node_id", "geometry"]],
-        left_on=LINK_FK_NODE[position],
-        right_on="model_node_id",
-        suffixes=("", "_node"),
-    )
-
-    updated_df["geometry"] = updated_df.apply(
-        lambda row: update_points_in_linestring(
-            row["geometry"], row["geometry_node"].coords[0], position
-        ),
-        axis=1,
-    )
-
-    updated_df = updated_df.reset_index().set_index(original_index.names)
-
-    WranglerLogger.debug(f"updated_df - AFTER: \n {updated_df.geometry}")
-    return updated_df["geometry"]
-
-
-
- -
- -
- - -

- update_point_geometry(df, ref_point_df, lon_field='X', lat_field='Y', id_field='model_node_id', ref_lon_field='X', ref_lat_field='Y', ref_id_field='model_node_id') - -

- - -
- -

Returns copy of df with lat and long fields updated with geometry from ref_point_df.

-

NOTE: does not update “geometry” field if it exists.

- -
- Source code in network_wrangler/utils/geo.py -
556
-557
-558
-559
-560
-561
-562
-563
-564
-565
-566
-567
-568
-569
-570
-571
-572
-573
-574
-575
-576
-577
-578
-579
-580
-581
-582
-583
-584
-585
-586
-587
def update_point_geometry(
-    df: pd.DataFrame,
-    ref_point_df: pd.DataFrame,
-    lon_field: str = "X",
-    lat_field: str = "Y",
-    id_field: str = "model_node_id",
-    ref_lon_field: str = "X",
-    ref_lat_field: str = "Y",
-    ref_id_field: str = "model_node_id",
-) -> pd.DataFrame:
-    """Returns copy of df with lat and long fields updated with geometry from ref_point_df.
-
-    NOTE: does not update "geometry" field if it exists.
-    """
-    df = copy.deepcopy(df)
-
-    ref_df = ref_point_df.rename(
-        columns={
-            ref_lon_field: lon_field,
-            ref_lat_field: lat_field,
-            ref_id_field: id_field,
-        }
-    )
-
-    updated_df = update_df_by_col_value(
-        df,
-        ref_df[[id_field, lon_field, lat_field]],
-        id_field,
-        properties=[lat_field, lon_field],
-        fail_if_missing=False,
-    )
-    return updated_df
-
-
-
- -
- -
- - -

- update_points_in_linestring(linestring, updated_coords, position) - -

- - -
- -

Replaces a point in a linestring with a new point.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
linestring - LineString - -
-

original_linestring

-
-
- required -
updated_coords - List[float] - -
-

updated poimt coordinates

-
-
- required -
position - int - -
-

position in the linestring to update

-
-
- required -
- -
- Source code in network_wrangler/utils/geo.py -
270
-271
-272
-273
-274
-275
-276
-277
-278
-279
-280
-281
-282
def update_points_in_linestring(
-    linestring: LineString, updated_coords: List[float], position: int
-):
-    """Replaces a point in a linestring with a new point.
-
-    Args:
-        linestring (LineString): original_linestring
-        updated_coords (List[float]): updated poimt coordinates
-        position (int): position in the linestring to update
-    """
-    coords = [c for c in linestring.coords]
-    coords[position] = updated_coords
-    return LineString(coords)
-
-
-
- -
- - - -
- -
- -
- -
- - - - -
- -

Dataframe accessors that allow functions to be called directly on the dataframe.

- - - -
- - - - - - - - -
- - - -

- DictQueryAccessor - - -

- - -
- - -

Query link, node and shape dataframes using project selection dictionary.

-

Will overlook any keys which are not columns in the dataframe.

-

Usage:

-
selection_dict = {
-    "lanes":[1,2,3],
-    "name":['6th','Sixth','sixth'],
-    "drive_access": 1,
-}
-selected_links_df = links_df.dict_query(selection_dict)
-
- -
- Source code in network_wrangler/utils/df_accessors.py -
11
-12
-13
-14
-15
-16
-17
-18
-19
-20
-21
-22
-23
-24
-25
-26
-27
-28
-29
-30
-31
-32
-33
-34
-35
-36
-37
-38
-39
-40
-41
-42
-43
-44
-45
-46
-47
-48
-49
-50
-51
-52
-53
-54
-55
-56
-57
-58
-59
-60
@pd.api.extensions.register_dataframe_accessor("dict_query")
-class DictQueryAccessor:
-    """Query link, node and shape dataframes using project selection dictionary.
-
-    Will overlook any keys which are not columns in the dataframe.
-
-    Usage:
-
-    ```
-    selection_dict = {
-        "lanes":[1,2,3],
-        "name":['6th','Sixth','sixth'],
-        "drive_access": 1,
-    }
-    selected_links_df = links_df.dict_query(selection_dict)
-    ```
-
-    """
-
-    def __init__(self, pandas_obj):
-        """Initialization function for the dictionary query accessor."""
-        self._obj = pandas_obj
-
-    def __call__(self, selection_dict: dict, return_all_if_none: bool = False):
-        """Queries the dataframe using the selection dictionary.
-
-        Args:
-            selection_dict (dict): _description_
-            return_all_if_none (bool, optional): If True, will return entire df if dict has
-                 no values. Defaults to False.
-        """
-        _selection_dict = {
-            k: v for k, v in selection_dict.items() if k in self._obj.columns and v is not None
-        }
-
-        if not _selection_dict:
-            if return_all_if_none:
-                return self._obj
-            raise ValueError(f"Relevant part of selection dictionary is empty: {selection_dict}")
-
-        _sel_query = dict_to_query(_selection_dict)
-        WranglerLogger.debug(f"_sel_query: \n   {_sel_query}")
-        _df = self._obj.query(_sel_query, engine="python")
-
-        if len(_df) == 0:
-            WranglerLogger.warning(
-                f"No records found in df \
-                  using selection: {selection_dict}"
-            )
-        return _df
-
-
- - - -
- - - - - - - - - -
- - -

- __call__(selection_dict, return_all_if_none=False) - -

- - -
- -

Queries the dataframe using the selection dictionary.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
selection_dict - dict - -
-

description

-
-
- required -
return_all_if_none - bool - -
-

If True, will return entire df if dict has - no values. Defaults to False.

-
-
- False -
- -
- Source code in network_wrangler/utils/df_accessors.py -
34
-35
-36
-37
-38
-39
-40
-41
-42
-43
-44
-45
-46
-47
-48
-49
-50
-51
-52
-53
-54
-55
-56
-57
-58
-59
-60
def __call__(self, selection_dict: dict, return_all_if_none: bool = False):
-    """Queries the dataframe using the selection dictionary.
-
-    Args:
-        selection_dict (dict): _description_
-        return_all_if_none (bool, optional): If True, will return entire df if dict has
-             no values. Defaults to False.
-    """
-    _selection_dict = {
-        k: v for k, v in selection_dict.items() if k in self._obj.columns and v is not None
-    }
-
-    if not _selection_dict:
-        if return_all_if_none:
-            return self._obj
-        raise ValueError(f"Relevant part of selection dictionary is empty: {selection_dict}")
-
-    _sel_query = dict_to_query(_selection_dict)
-    WranglerLogger.debug(f"_sel_query: \n   {_sel_query}")
-    _df = self._obj.query(_sel_query, engine="python")
-
-    if len(_df) == 0:
-        WranglerLogger.warning(
-            f"No records found in df \
-              using selection: {selection_dict}"
-        )
-    return _df
-
-
-
- -
- -
- - -

- __init__(pandas_obj) - -

- - -
- -

Initialization function for the dictionary query accessor.

- -
- Source code in network_wrangler/utils/df_accessors.py -
30
-31
-32
def __init__(self, pandas_obj):
-    """Initialization function for the dictionary query accessor."""
-    self._obj = pandas_obj
-
-
-
- -
- - - -
- -
- -
- -
- - - -

- dfHash - - -

- - -
- - -

Creates a dataframe hash that is compatable with geopandas and various metadata.

-

Definitely not the fastest, but she seems to work where others have failed.

- -
- Source code in network_wrangler/utils/df_accessors.py -
63
-64
-65
-66
-67
-68
-69
-70
-71
-72
-73
-74
-75
-76
-77
-78
@pd.api.extensions.register_dataframe_accessor("df_hash")
-class dfHash:
-    """Creates a dataframe hash that is compatable with geopandas and various metadata.
-
-    Definitely not the fastest, but she seems to work where others have failed.
-    """
-
-    def __init__(self, pandas_obj):
-        """Initialization function for the dataframe hash."""
-        self._obj = pandas_obj
-
-    def __call__(self):
-        """Function to hash the dataframe."""
-        _value = str(self._obj.values).encode()
-        hash = hashlib.sha1(_value).hexdigest()
-        return hash
-
-
- - - -
- - - - - - - - - -
- - -

- __call__() - -

- - -
- -

Function to hash the dataframe.

- -
- Source code in network_wrangler/utils/df_accessors.py -
74
-75
-76
-77
-78
def __call__(self):
-    """Function to hash the dataframe."""
-    _value = str(self._obj.values).encode()
-    hash = hashlib.sha1(_value).hexdigest()
-    return hash
-
-
-
- -
- -
- - -

- __init__(pandas_obj) - -

- - -
- -

Initialization function for the dataframe hash.

- -
- Source code in network_wrangler/utils/df_accessors.py -
70
-71
-72
def __init__(self, pandas_obj):
-    """Initialization function for the dataframe hash."""
-    self._obj = pandas_obj
-
-
-
- -
- - - -
- -
- -
- - - - -
- -
- -
- -
- - - - -
- -

Logging utilities for Network Wrangler.

- - - -
- - - - - - - - - -
- - -

- setup_logging(info_log_filename=None, debug_log_filename='wrangler_{}.debug.log'.format(datetime.now().strftime('%Y_%m_%d__%H_%M_%S')), std_out_level='info') - -

- - -
- -

Sets up the WranglerLogger w.r.t. the debug file location and if logging to console.

-

Called by the test_logging fixture in conftest.py and can be called by the user to setup -logging for their session. If called multiple times, the logger will be reset.

- - -

Parameters:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameTypeDescriptionDefault
info_log_filename - str - -
-

the location of the log file that will get created to add the INFO log. -The INFO Log is terse, just gives the bare minimum of details. -Defaults to file in cwd() wrangler_[datetime].log. To turn off logging to a file, -use log_filename = None.

-
-
- None -
debug_log_filename - str - -
-

the location of the log file that will get created to add the DEBUG log -The DEBUG log is very noisy, for debugging. Defaults to file in cwd() -wrangler_[datetime].log. To turn off logging to a file, use log_filename = None.

-
-
- format(strftime('%Y_%m_%d__%H_%M_%S')) -
std_out_level - str - -
-

the level of logging to the console. One of “info”, “warning”, “debug”. -Defaults to “info” but will be set to ERROR if nothing provided matches.

-
-
- 'info' -
- -
- Source code in network_wrangler/logger.py -
13
-14
-15
-16
-17
-18
-19
-20
-21
-22
-23
-24
-25
-26
-27
-28
-29
-30
-31
-32
-33
-34
-35
-36
-37
-38
-39
-40
-41
-42
-43
-44
-45
-46
-47
-48
-49
-50
-51
-52
-53
-54
-55
-56
-57
-58
-59
-60
-61
-62
-63
-64
-65
-66
-67
-68
-69
-70
-71
-72
-73
-74
-75
-76
def setup_logging(
-    info_log_filename: str = None,
-    debug_log_filename: str = "wrangler_{}.debug.log".format(
-        datetime.now().strftime("%Y_%m_%d__%H_%M_%S")
-    ),
-    std_out_level: str = "info",
-):
-    """Sets up the WranglerLogger w.r.t. the debug file location and if logging to console.
-
-    Called by the test_logging fixture in conftest.py and can be called by the user to setup
-    logging for their session. If called multiple times, the logger will be reset.
-
-    Args:
-        info_log_filename: the location of the log file that will get created to add the INFO log.
-            The INFO Log is terse, just gives the bare minimum of details.
-            Defaults to file in cwd() `wrangler_[datetime].log`. To turn off logging to a file,
-            use log_filename = None.
-        debug_log_filename: the location of the log file that will get created to add the DEBUG log
-            The DEBUG log is very noisy, for debugging. Defaults to file in cwd()
-            `wrangler_[datetime].log`. To turn off logging to a file, use log_filename = None.
-        std_out_level: the level of logging to the console. One of "info", "warning", "debug".
-            Defaults to "info" but will be set to ERROR if nothing provided matches.
-    """
-    # add function variable so that we know if logging has been called
-    setup_logging.called = True
-
-    # Clear handles if any exist already
-    WranglerLogger.handlers = []
-
-    WranglerLogger.setLevel(logging.DEBUG)
-
-    FORMAT = logging.Formatter(
-        "%(asctime)-15s %(levelname)s: %(message)s", datefmt="%Y-%m-%d %H:%M:%S,"
-    )
-    if not info_log_filename:
-        info_log_filename = os.path.join(
-            os.getcwd(),
-            "network_wrangler_{}.info.log".format(datetime.now().strftime("%Y_%m_%d__%H_%M_%S")),
-        )
-
-    info_file_handler = logging.StreamHandler(open(info_log_filename, "w"))
-    info_file_handler.setLevel(logging.INFO)
-    info_file_handler.setFormatter(FORMAT)
-    WranglerLogger.addHandler(info_file_handler)
-
-    # create debug file only when debug_log_filename is provided
-    if debug_log_filename:
-        debug_log_handler = logging.StreamHandler(open(debug_log_filename, "w"))
-        debug_log_handler.setLevel(logging.DEBUG)
-        debug_log_handler.setFormatter(FORMAT)
-        WranglerLogger.addHandler(debug_log_handler)
-
-    console_handler = logging.StreamHandler(sys.stdout)
-    console_handler.setLevel(logging.DEBUG)
-    console_handler.setFormatter(FORMAT)
-    WranglerLogger.addHandler(console_handler)
-    if std_out_level == "debug":
-        console_handler.setLevel(logging.DEBUG)
-    elif std_out_level == "info":
-        console_handler.setLevel(logging.DEBUG)
-    elif std_out_level == "warning":
-        console_handler.setLevel(logging.WARNING)
-    else:
-        console_handler.setLevel(logging.ERROR)
-
-
-
- -
- - - -
- -
- -
- - - - - - - - - - - - - -
-
- - - - - -
- -
- - - -
-
-
-
- - - - - - - - - - - - \ No newline at end of file diff --git a/update_gtfs_data_model/assets/_mkdocstrings.css b/update_gtfs_data_model/assets/_mkdocstrings.css deleted file mode 100644 index 85449ec7..00000000 --- a/update_gtfs_data_model/assets/_mkdocstrings.css +++ /dev/null @@ -1,119 +0,0 @@ - -/* Avoid breaking parameter names, etc. in table cells. */ -.doc-contents td code { - word-break: normal !important; -} - -/* No line break before first paragraph of descriptions. */ -.doc-md-description, -.doc-md-description>p:first-child { - display: inline; -} - -/* Max width for docstring sections tables. */ -.doc .md-typeset__table, -.doc .md-typeset__table table { - display: table !important; - width: 100%; -} - -.doc .md-typeset__table tr { - display: table-row; -} - -/* Defaults in Spacy table style. */ -.doc-param-default { - float: right; -} - -/* Backward-compatibility: docstring section titles in bold. */ -.doc-section-title { - font-weight: bold; -} - -/* Symbols in Navigation and ToC. */ -:root, -[data-md-color-scheme="default"] { - --doc-symbol-attribute-fg-color: #953800; - --doc-symbol-function-fg-color: #8250df; - --doc-symbol-method-fg-color: #8250df; - --doc-symbol-class-fg-color: #0550ae; - --doc-symbol-module-fg-color: #5cad0f; - - --doc-symbol-attribute-bg-color: #9538001a; - --doc-symbol-function-bg-color: #8250df1a; - --doc-symbol-method-bg-color: #8250df1a; - --doc-symbol-class-bg-color: #0550ae1a; - --doc-symbol-module-bg-color: #5cad0f1a; -} - -[data-md-color-scheme="slate"] { - --doc-symbol-attribute-fg-color: #ffa657; - --doc-symbol-function-fg-color: #d2a8ff; - --doc-symbol-method-fg-color: #d2a8ff; - --doc-symbol-class-fg-color: #79c0ff; - --doc-symbol-module-fg-color: #baff79; - - --doc-symbol-attribute-bg-color: #ffa6571a; - --doc-symbol-function-bg-color: #d2a8ff1a; - --doc-symbol-method-bg-color: #d2a8ff1a; - --doc-symbol-class-bg-color: #79c0ff1a; - --doc-symbol-module-bg-color: #baff791a; -} - -code.doc-symbol { - border-radius: .1rem; - font-size: .85em; - padding: 0 .3em; - font-weight: bold; -} - -code.doc-symbol-attribute { - color: var(--doc-symbol-attribute-fg-color); - background-color: var(--doc-symbol-attribute-bg-color); -} - -code.doc-symbol-attribute::after { - content: "attr"; -} - -code.doc-symbol-function { - color: var(--doc-symbol-function-fg-color); - background-color: var(--doc-symbol-function-bg-color); -} - -code.doc-symbol-function::after { - content: "func"; -} - -code.doc-symbol-method { - color: var(--doc-symbol-method-fg-color); - background-color: var(--doc-symbol-method-bg-color); -} - -code.doc-symbol-method::after { - content: "meth"; -} - -code.doc-symbol-class { - color: var(--doc-symbol-class-fg-color); - background-color: var(--doc-symbol-class-bg-color); -} - -code.doc-symbol-class::after { - content: "class"; -} - -code.doc-symbol-module { - color: var(--doc-symbol-module-fg-color); - background-color: var(--doc-symbol-module-bg-color); -} - -code.doc-symbol-module::after { - content: "mod"; -} - -.doc-signature .autorefs { - color: inherit; - border-bottom: 1px dotted currentcolor; -} diff --git a/update_gtfs_data_model/assets/images/favicon.png b/update_gtfs_data_model/assets/images/favicon.png deleted file mode 100644 index 1cf13b9f..00000000 Binary files a/update_gtfs_data_model/assets/images/favicon.png and /dev/null differ diff --git a/update_gtfs_data_model/assets/javascripts/bundle.3220b9d7.min.js b/update_gtfs_data_model/assets/javascripts/bundle.3220b9d7.min.js deleted file mode 100644 index d5c6dfd0..00000000 --- a/update_gtfs_data_model/assets/javascripts/bundle.3220b9d7.min.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict";(()=>{var Fi=Object.create;var gr=Object.defineProperty;var ji=Object.getOwnPropertyDescriptor;var Wi=Object.getOwnPropertyNames,Dt=Object.getOwnPropertySymbols,Ui=Object.getPrototypeOf,xr=Object.prototype.hasOwnProperty,no=Object.prototype.propertyIsEnumerable;var oo=(e,t,r)=>t in e?gr(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,R=(e,t)=>{for(var r in t||(t={}))xr.call(t,r)&&oo(e,r,t[r]);if(Dt)for(var r of Dt(t))no.call(t,r)&&oo(e,r,t[r]);return e};var io=(e,t)=>{var r={};for(var o in e)xr.call(e,o)&&t.indexOf(o)<0&&(r[o]=e[o]);if(e!=null&&Dt)for(var o of Dt(e))t.indexOf(o)<0&&no.call(e,o)&&(r[o]=e[o]);return r};var yr=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var Di=(e,t,r,o)=>{if(t&&typeof t=="object"||typeof t=="function")for(let n of Wi(t))!xr.call(e,n)&&n!==r&&gr(e,n,{get:()=>t[n],enumerable:!(o=ji(t,n))||o.enumerable});return e};var Vt=(e,t,r)=>(r=e!=null?Fi(Ui(e)):{},Di(t||!e||!e.__esModule?gr(r,"default",{value:e,enumerable:!0}):r,e));var ao=(e,t,r)=>new Promise((o,n)=>{var i=p=>{try{s(r.next(p))}catch(c){n(c)}},a=p=>{try{s(r.throw(p))}catch(c){n(c)}},s=p=>p.done?o(p.value):Promise.resolve(p.value).then(i,a);s((r=r.apply(e,t)).next())});var co=yr((Er,so)=>{(function(e,t){typeof Er=="object"&&typeof so!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(Er,function(){"use strict";function e(r){var o=!0,n=!1,i=null,a={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function s(H){return!!(H&&H!==document&&H.nodeName!=="HTML"&&H.nodeName!=="BODY"&&"classList"in H&&"contains"in H.classList)}function p(H){var lt=H.type,ze=H.tagName;return!!(ze==="INPUT"&&a[lt]&&!H.readOnly||ze==="TEXTAREA"&&!H.readOnly||H.isContentEditable)}function c(H){H.classList.contains("focus-visible")||(H.classList.add("focus-visible"),H.setAttribute("data-focus-visible-added",""))}function l(H){H.hasAttribute("data-focus-visible-added")&&(H.classList.remove("focus-visible"),H.removeAttribute("data-focus-visible-added"))}function f(H){H.metaKey||H.altKey||H.ctrlKey||(s(r.activeElement)&&c(r.activeElement),o=!0)}function u(H){o=!1}function h(H){s(H.target)&&(o||p(H.target))&&c(H.target)}function w(H){s(H.target)&&(H.target.classList.contains("focus-visible")||H.target.hasAttribute("data-focus-visible-added"))&&(n=!0,window.clearTimeout(i),i=window.setTimeout(function(){n=!1},100),l(H.target))}function A(H){document.visibilityState==="hidden"&&(n&&(o=!0),te())}function te(){document.addEventListener("mousemove",J),document.addEventListener("mousedown",J),document.addEventListener("mouseup",J),document.addEventListener("pointermove",J),document.addEventListener("pointerdown",J),document.addEventListener("pointerup",J),document.addEventListener("touchmove",J),document.addEventListener("touchstart",J),document.addEventListener("touchend",J)}function ie(){document.removeEventListener("mousemove",J),document.removeEventListener("mousedown",J),document.removeEventListener("mouseup",J),document.removeEventListener("pointermove",J),document.removeEventListener("pointerdown",J),document.removeEventListener("pointerup",J),document.removeEventListener("touchmove",J),document.removeEventListener("touchstart",J),document.removeEventListener("touchend",J)}function J(H){H.target.nodeName&&H.target.nodeName.toLowerCase()==="html"||(o=!1,ie())}document.addEventListener("keydown",f,!0),document.addEventListener("mousedown",u,!0),document.addEventListener("pointerdown",u,!0),document.addEventListener("touchstart",u,!0),document.addEventListener("visibilitychange",A,!0),te(),r.addEventListener("focus",h,!0),r.addEventListener("blur",w,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)})});var Yr=yr((Rt,Kr)=>{/*! - * clipboard.js v2.0.11 - * https://clipboardjs.com/ - * - * Licensed MIT © Zeno Rocha - */(function(t,r){typeof Rt=="object"&&typeof Kr=="object"?Kr.exports=r():typeof define=="function"&&define.amd?define([],r):typeof Rt=="object"?Rt.ClipboardJS=r():t.ClipboardJS=r()})(Rt,function(){return function(){var e={686:function(o,n,i){"use strict";i.d(n,{default:function(){return Ii}});var a=i(279),s=i.n(a),p=i(370),c=i.n(p),l=i(817),f=i.n(l);function u(V){try{return document.execCommand(V)}catch(_){return!1}}var h=function(_){var O=f()(_);return u("cut"),O},w=h;function A(V){var _=document.documentElement.getAttribute("dir")==="rtl",O=document.createElement("textarea");O.style.fontSize="12pt",O.style.border="0",O.style.padding="0",O.style.margin="0",O.style.position="absolute",O.style[_?"right":"left"]="-9999px";var j=window.pageYOffset||document.documentElement.scrollTop;return O.style.top="".concat(j,"px"),O.setAttribute("readonly",""),O.value=V,O}var te=function(_,O){var j=A(_);O.container.appendChild(j);var D=f()(j);return u("copy"),j.remove(),D},ie=function(_){var O=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},j="";return typeof _=="string"?j=te(_,O):_ instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(_==null?void 0:_.type)?j=te(_.value,O):(j=f()(_),u("copy")),j},J=ie;function H(V){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?H=function(O){return typeof O}:H=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},H(V)}var lt=function(){var _=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},O=_.action,j=O===void 0?"copy":O,D=_.container,Y=_.target,ke=_.text;if(j!=="copy"&&j!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(Y!==void 0)if(Y&&H(Y)==="object"&&Y.nodeType===1){if(j==="copy"&&Y.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if(j==="cut"&&(Y.hasAttribute("readonly")||Y.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(ke)return J(ke,{container:D});if(Y)return j==="cut"?w(Y):J(Y,{container:D})},ze=lt;function Ie(V){"@babel/helpers - typeof";return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?Ie=function(O){return typeof O}:Ie=function(O){return O&&typeof Symbol=="function"&&O.constructor===Symbol&&O!==Symbol.prototype?"symbol":typeof O},Ie(V)}function _i(V,_){if(!(V instanceof _))throw new TypeError("Cannot call a class as a function")}function ro(V,_){for(var O=0;O<_.length;O++){var j=_[O];j.enumerable=j.enumerable||!1,j.configurable=!0,"value"in j&&(j.writable=!0),Object.defineProperty(V,j.key,j)}}function Ai(V,_,O){return _&&ro(V.prototype,_),O&&ro(V,O),V}function Ci(V,_){if(typeof _!="function"&&_!==null)throw new TypeError("Super expression must either be null or a function");V.prototype=Object.create(_&&_.prototype,{constructor:{value:V,writable:!0,configurable:!0}}),_&&br(V,_)}function br(V,_){return br=Object.setPrototypeOf||function(j,D){return j.__proto__=D,j},br(V,_)}function Hi(V){var _=Pi();return function(){var j=Wt(V),D;if(_){var Y=Wt(this).constructor;D=Reflect.construct(j,arguments,Y)}else D=j.apply(this,arguments);return ki(this,D)}}function ki(V,_){return _&&(Ie(_)==="object"||typeof _=="function")?_:$i(V)}function $i(V){if(V===void 0)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return V}function Pi(){if(typeof Reflect=="undefined"||!Reflect.construct||Reflect.construct.sham)return!1;if(typeof Proxy=="function")return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],function(){})),!0}catch(V){return!1}}function Wt(V){return Wt=Object.setPrototypeOf?Object.getPrototypeOf:function(O){return O.__proto__||Object.getPrototypeOf(O)},Wt(V)}function vr(V,_){var O="data-clipboard-".concat(V);if(_.hasAttribute(O))return _.getAttribute(O)}var Ri=function(V){Ci(O,V);var _=Hi(O);function O(j,D){var Y;return _i(this,O),Y=_.call(this),Y.resolveOptions(D),Y.listenClick(j),Y}return Ai(O,[{key:"resolveOptions",value:function(){var D=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof D.action=="function"?D.action:this.defaultAction,this.target=typeof D.target=="function"?D.target:this.defaultTarget,this.text=typeof D.text=="function"?D.text:this.defaultText,this.container=Ie(D.container)==="object"?D.container:document.body}},{key:"listenClick",value:function(D){var Y=this;this.listener=c()(D,"click",function(ke){return Y.onClick(ke)})}},{key:"onClick",value:function(D){var Y=D.delegateTarget||D.currentTarget,ke=this.action(Y)||"copy",Ut=ze({action:ke,container:this.container,target:this.target(Y),text:this.text(Y)});this.emit(Ut?"success":"error",{action:ke,text:Ut,trigger:Y,clearSelection:function(){Y&&Y.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function(D){return vr("action",D)}},{key:"defaultTarget",value:function(D){var Y=vr("target",D);if(Y)return document.querySelector(Y)}},{key:"defaultText",value:function(D){return vr("text",D)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function(D){var Y=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return J(D,Y)}},{key:"cut",value:function(D){return w(D)}},{key:"isSupported",value:function(){var D=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],Y=typeof D=="string"?[D]:D,ke=!!document.queryCommandSupported;return Y.forEach(function(Ut){ke=ke&&!!document.queryCommandSupported(Ut)}),ke}}]),O}(s()),Ii=Ri},828:function(o){var n=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function a(s,p){for(;s&&s.nodeType!==n;){if(typeof s.matches=="function"&&s.matches(p))return s;s=s.parentNode}}o.exports=a},438:function(o,n,i){var a=i(828);function s(l,f,u,h,w){var A=c.apply(this,arguments);return l.addEventListener(u,A,w),{destroy:function(){l.removeEventListener(u,A,w)}}}function p(l,f,u,h,w){return typeof l.addEventListener=="function"?s.apply(null,arguments):typeof u=="function"?s.bind(null,document).apply(null,arguments):(typeof l=="string"&&(l=document.querySelectorAll(l)),Array.prototype.map.call(l,function(A){return s(A,f,u,h,w)}))}function c(l,f,u,h){return function(w){w.delegateTarget=a(w.target,f),w.delegateTarget&&h.call(l,w)}}o.exports=p},879:function(o,n){n.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},n.nodeList=function(i){var a=Object.prototype.toString.call(i);return i!==void 0&&(a==="[object NodeList]"||a==="[object HTMLCollection]")&&"length"in i&&(i.length===0||n.node(i[0]))},n.string=function(i){return typeof i=="string"||i instanceof String},n.fn=function(i){var a=Object.prototype.toString.call(i);return a==="[object Function]"}},370:function(o,n,i){var a=i(879),s=i(438);function p(u,h,w){if(!u&&!h&&!w)throw new Error("Missing required arguments");if(!a.string(h))throw new TypeError("Second argument must be a String");if(!a.fn(w))throw new TypeError("Third argument must be a Function");if(a.node(u))return c(u,h,w);if(a.nodeList(u))return l(u,h,w);if(a.string(u))return f(u,h,w);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function c(u,h,w){return u.addEventListener(h,w),{destroy:function(){u.removeEventListener(h,w)}}}function l(u,h,w){return Array.prototype.forEach.call(u,function(A){A.addEventListener(h,w)}),{destroy:function(){Array.prototype.forEach.call(u,function(A){A.removeEventListener(h,w)})}}}function f(u,h,w){return s(document.body,u,h,w)}o.exports=p},817:function(o){function n(i){var a;if(i.nodeName==="SELECT")i.focus(),a=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var s=i.hasAttribute("readonly");s||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),s||i.removeAttribute("readonly"),a=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var p=window.getSelection(),c=document.createRange();c.selectNodeContents(i),p.removeAllRanges(),p.addRange(c),a=p.toString()}return a}o.exports=n},279:function(o){function n(){}n.prototype={on:function(i,a,s){var p=this.e||(this.e={});return(p[i]||(p[i]=[])).push({fn:a,ctx:s}),this},once:function(i,a,s){var p=this;function c(){p.off(i,c),a.apply(s,arguments)}return c._=a,this.on(i,c,s)},emit:function(i){var a=[].slice.call(arguments,1),s=((this.e||(this.e={}))[i]||[]).slice(),p=0,c=s.length;for(p;p{"use strict";/*! - * escape-html - * Copyright(c) 2012-2013 TJ Holowaychuk - * Copyright(c) 2015 Andreas Lubbe - * Copyright(c) 2015 Tiancheng "Timothy" Gu - * MIT Licensed - */var ts=/["'&<>]/;ei.exports=rs;function rs(e){var t=""+e,r=ts.exec(t);if(!r)return t;var o,n="",i=0,a=0;for(i=r.index;i0&&i[i.length-1])&&(c[0]===6||c[0]===2)){r=0;continue}if(c[0]===3&&(!i||c[1]>i[0]&&c[1]=e.length&&(e=void 0),{value:e&&e[o++],done:!e}}};throw new TypeError(t?"Object is not iterable.":"Symbol.iterator is not defined.")}function N(e,t){var r=typeof Symbol=="function"&&e[Symbol.iterator];if(!r)return e;var o=r.call(e),n,i=[],a;try{for(;(t===void 0||t-- >0)&&!(n=o.next()).done;)i.push(n.value)}catch(s){a={error:s}}finally{try{n&&!n.done&&(r=o.return)&&r.call(o)}finally{if(a)throw a.error}}return i}function q(e,t,r){if(r||arguments.length===2)for(var o=0,n=t.length,i;o1||s(u,h)})})}function s(u,h){try{p(o[u](h))}catch(w){f(i[0][3],w)}}function p(u){u.value instanceof ot?Promise.resolve(u.value.v).then(c,l):f(i[0][2],u)}function c(u){s("next",u)}function l(u){s("throw",u)}function f(u,h){u(h),i.shift(),i.length&&s(i[0][0],i[0][1])}}function mo(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t=e[Symbol.asyncIterator],r;return t?t.call(e):(e=typeof de=="function"?de(e):e[Symbol.iterator](),r={},o("next"),o("throw"),o("return"),r[Symbol.asyncIterator]=function(){return this},r);function o(i){r[i]=e[i]&&function(a){return new Promise(function(s,p){a=e[i](a),n(s,p,a.done,a.value)})}}function n(i,a,s,p){Promise.resolve(p).then(function(c){i({value:c,done:s})},a)}}function k(e){return typeof e=="function"}function mt(e){var t=function(o){Error.call(o),o.stack=new Error().stack},r=e(t);return r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r}var zt=mt(function(e){return function(r){e(this),this.message=r?r.length+` errors occurred during unsubscription: -`+r.map(function(o,n){return n+1+") "+o.toString()}).join(` - `):"",this.name="UnsubscriptionError",this.errors=r}});function qe(e,t){if(e){var r=e.indexOf(t);0<=r&&e.splice(r,1)}}var Fe=function(){function e(t){this.initialTeardown=t,this.closed=!1,this._parentage=null,this._finalizers=null}return e.prototype.unsubscribe=function(){var t,r,o,n,i;if(!this.closed){this.closed=!0;var a=this._parentage;if(a)if(this._parentage=null,Array.isArray(a))try{for(var s=de(a),p=s.next();!p.done;p=s.next()){var c=p.value;c.remove(this)}}catch(A){t={error:A}}finally{try{p&&!p.done&&(r=s.return)&&r.call(s)}finally{if(t)throw t.error}}else a.remove(this);var l=this.initialTeardown;if(k(l))try{l()}catch(A){i=A instanceof zt?A.errors:[A]}var f=this._finalizers;if(f){this._finalizers=null;try{for(var u=de(f),h=u.next();!h.done;h=u.next()){var w=h.value;try{fo(w)}catch(A){i=i!=null?i:[],A instanceof zt?i=q(q([],N(i)),N(A.errors)):i.push(A)}}}catch(A){o={error:A}}finally{try{h&&!h.done&&(n=u.return)&&n.call(u)}finally{if(o)throw o.error}}}if(i)throw new zt(i)}},e.prototype.add=function(t){var r;if(t&&t!==this)if(this.closed)fo(t);else{if(t instanceof e){if(t.closed||t._hasParent(this))return;t._addParent(this)}(this._finalizers=(r=this._finalizers)!==null&&r!==void 0?r:[]).push(t)}},e.prototype._hasParent=function(t){var r=this._parentage;return r===t||Array.isArray(r)&&r.includes(t)},e.prototype._addParent=function(t){var r=this._parentage;this._parentage=Array.isArray(r)?(r.push(t),r):r?[r,t]:t},e.prototype._removeParent=function(t){var r=this._parentage;r===t?this._parentage=null:Array.isArray(r)&&qe(r,t)},e.prototype.remove=function(t){var r=this._finalizers;r&&qe(r,t),t instanceof e&&t._removeParent(this)},e.EMPTY=function(){var t=new e;return t.closed=!0,t}(),e}();var Tr=Fe.EMPTY;function qt(e){return e instanceof Fe||e&&"closed"in e&&k(e.remove)&&k(e.add)&&k(e.unsubscribe)}function fo(e){k(e)?e():e.unsubscribe()}var $e={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var ft={setTimeout:function(e,t){for(var r=[],o=2;o0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var o=this,n=this,i=n.hasError,a=n.isStopped,s=n.observers;return i||a?Tr:(this.currentObservers=null,s.push(r),new Fe(function(){o.currentObservers=null,qe(s,r)}))},t.prototype._checkFinalizedStatuses=function(r){var o=this,n=o.hasError,i=o.thrownError,a=o.isStopped;n?r.error(i):a&&r.complete()},t.prototype.asObservable=function(){var r=new F;return r.source=this,r},t.create=function(r,o){return new Eo(r,o)},t}(F);var Eo=function(e){re(t,e);function t(r,o){var n=e.call(this)||this;return n.destination=r,n.source=o,n}return t.prototype.next=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.next)===null||n===void 0||n.call(o,r)},t.prototype.error=function(r){var o,n;(n=(o=this.destination)===null||o===void 0?void 0:o.error)===null||n===void 0||n.call(o,r)},t.prototype.complete=function(){var r,o;(o=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||o===void 0||o.call(r)},t.prototype._subscribe=function(r){var o,n;return(n=(o=this.source)===null||o===void 0?void 0:o.subscribe(r))!==null&&n!==void 0?n:Tr},t}(g);var _r=function(e){re(t,e);function t(r){var o=e.call(this)||this;return o._value=r,o}return Object.defineProperty(t.prototype,"value",{get:function(){return this.getValue()},enumerable:!1,configurable:!0}),t.prototype._subscribe=function(r){var o=e.prototype._subscribe.call(this,r);return!o.closed&&r.next(this._value),o},t.prototype.getValue=function(){var r=this,o=r.hasError,n=r.thrownError,i=r._value;if(o)throw n;return this._throwIfClosed(),i},t.prototype.next=function(r){e.prototype.next.call(this,this._value=r)},t}(g);var Lt={now:function(){return(Lt.delegate||Date).now()},delegate:void 0};var _t=function(e){re(t,e);function t(r,o,n){r===void 0&&(r=1/0),o===void 0&&(o=1/0),n===void 0&&(n=Lt);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=o,i._timestampProvider=n,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=o===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,o),i}return t.prototype.next=function(r){var o=this,n=o.isStopped,i=o._buffer,a=o._infiniteTimeWindow,s=o._timestampProvider,p=o._windowTime;n||(i.push(r),!a&&i.push(s.now()+p)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var o=this._innerSubscribe(r),n=this,i=n._infiniteTimeWindow,a=n._buffer,s=a.slice(),p=0;p0?e.prototype.schedule.call(this,r,o):(this.delay=o,this.state=r,this.scheduler.flush(this),this)},t.prototype.execute=function(r,o){return o>0||this.closed?e.prototype.execute.call(this,r,o):this._execute(r,o)},t.prototype.requestAsyncId=function(r,o,n){return n===void 0&&(n=0),n!=null&&n>0||n==null&&this.delay>0?e.prototype.requestAsyncId.call(this,r,o,n):(r.flush(this),0)},t}(bt);var So=function(e){re(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t}(vt);var Hr=new So(To);var Oo=function(e){re(t,e);function t(r,o){var n=e.call(this,r,o)||this;return n.scheduler=r,n.work=o,n}return t.prototype.requestAsyncId=function(r,o,n){return n===void 0&&(n=0),n!==null&&n>0?e.prototype.requestAsyncId.call(this,r,o,n):(r.actions.push(this),r._scheduled||(r._scheduled=ht.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,o,n){var i;if(n===void 0&&(n=0),n!=null?n>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,o,n);var a=r.actions;o!=null&&((i=a[a.length-1])===null||i===void 0?void 0:i.id)!==o&&(ht.cancelAnimationFrame(o),r._scheduled=void 0)},t}(bt);var Mo=function(e){re(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var o=this._scheduled;this._scheduled=void 0;var n=this.actions,i;r=r||n.shift();do if(i=r.execute(r.state,r.delay))break;while((r=n[0])&&r.id===o&&n.shift());if(this._active=!1,i){for(;(r=n[0])&&r.id===o&&n.shift();)r.unsubscribe();throw i}},t}(vt);var me=new Mo(Oo);var M=new F(function(e){return e.complete()});function Yt(e){return e&&k(e.schedule)}function kr(e){return e[e.length-1]}function Xe(e){return k(kr(e))?e.pop():void 0}function He(e){return Yt(kr(e))?e.pop():void 0}function Bt(e,t){return typeof kr(e)=="number"?e.pop():t}var gt=function(e){return e&&typeof e.length=="number"&&typeof e!="function"};function Gt(e){return k(e==null?void 0:e.then)}function Jt(e){return k(e[dt])}function Xt(e){return Symbol.asyncIterator&&k(e==null?void 0:e[Symbol.asyncIterator])}function Zt(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function Gi(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var er=Gi();function tr(e){return k(e==null?void 0:e[er])}function rr(e){return lo(this,arguments,function(){var r,o,n,i;return Nt(this,function(a){switch(a.label){case 0:r=e.getReader(),a.label=1;case 1:a.trys.push([1,,9,10]),a.label=2;case 2:return[4,ot(r.read())];case 3:return o=a.sent(),n=o.value,i=o.done,i?[4,ot(void 0)]:[3,5];case 4:return[2,a.sent()];case 5:return[4,ot(n)];case 6:return[4,a.sent()];case 7:return a.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function or(e){return k(e==null?void 0:e.getReader)}function W(e){if(e instanceof F)return e;if(e!=null){if(Jt(e))return Ji(e);if(gt(e))return Xi(e);if(Gt(e))return Zi(e);if(Xt(e))return Lo(e);if(tr(e))return ea(e);if(or(e))return ta(e)}throw Zt(e)}function Ji(e){return new F(function(t){var r=e[dt]();if(k(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function Xi(e){return new F(function(t){for(var r=0;r=2;return function(o){return o.pipe(e?b(function(n,i){return e(n,i,o)}):le,we(1),r?Be(t):zo(function(){return new ir}))}}function Fr(e){return e<=0?function(){return M}:x(function(t,r){var o=[];t.subscribe(T(r,function(n){o.push(n),e=2,!0))}function pe(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new g}:t,o=e.resetOnError,n=o===void 0?!0:o,i=e.resetOnComplete,a=i===void 0?!0:i,s=e.resetOnRefCountZero,p=s===void 0?!0:s;return function(c){var l,f,u,h=0,w=!1,A=!1,te=function(){f==null||f.unsubscribe(),f=void 0},ie=function(){te(),l=u=void 0,w=A=!1},J=function(){var H=l;ie(),H==null||H.unsubscribe()};return x(function(H,lt){h++,!A&&!w&&te();var ze=u=u!=null?u:r();lt.add(function(){h--,h===0&&!A&&!w&&(f=Wr(J,p))}),ze.subscribe(lt),!l&&h>0&&(l=new it({next:function(Ie){return ze.next(Ie)},error:function(Ie){A=!0,te(),f=Wr(ie,n,Ie),ze.error(Ie)},complete:function(){w=!0,te(),f=Wr(ie,a),ze.complete()}}),W(H).subscribe(l))})(c)}}function Wr(e,t){for(var r=[],o=2;oe.next(document)),e}function $(e,t=document){return Array.from(t.querySelectorAll(e))}function P(e,t=document){let r=fe(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function fe(e,t=document){return t.querySelector(e)||void 0}function Re(){var e,t,r,o;return(o=(r=(t=(e=document.activeElement)==null?void 0:e.shadowRoot)==null?void 0:t.activeElement)!=null?r:document.activeElement)!=null?o:void 0}var xa=S(d(document.body,"focusin"),d(document.body,"focusout")).pipe(_e(1),Q(void 0),m(()=>Re()||document.body),B(1));function et(e){return xa.pipe(m(t=>e.contains(t)),K())}function kt(e,t){return C(()=>S(d(e,"mouseenter").pipe(m(()=>!0)),d(e,"mouseleave").pipe(m(()=>!1))).pipe(t?Ht(r=>Me(+!r*t)):le,Q(e.matches(":hover"))))}function Bo(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)Bo(e,r)}function E(e,t,...r){let o=document.createElement(e);if(t)for(let n of Object.keys(t))typeof t[n]!="undefined"&&(typeof t[n]!="boolean"?o.setAttribute(n,t[n]):o.setAttribute(n,""));for(let n of r)Bo(o,n);return o}function sr(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function Et(e){let t=E("script",{src:e});return C(()=>(document.head.appendChild(t),S(d(t,"load"),d(t,"error").pipe(v(()=>$r(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(m(()=>{}),L(()=>document.head.removeChild(t)),we(1))))}var Go=new g,ya=C(()=>typeof ResizeObserver=="undefined"?Et("https://unpkg.com/resize-observer-polyfill"):I(void 0)).pipe(m(()=>new ResizeObserver(e=>e.forEach(t=>Go.next(t)))),v(e=>S(Ke,I(e)).pipe(L(()=>e.disconnect()))),B(1));function ce(e){return{width:e.offsetWidth,height:e.offsetHeight}}function ge(e){let t=e;for(;t.clientWidth===0&&t.parentElement;)t=t.parentElement;return ya.pipe(y(r=>r.observe(t)),v(r=>Go.pipe(b(o=>o.target===t),L(()=>r.unobserve(t)))),m(()=>ce(e)),Q(ce(e)))}function wt(e){return{width:e.scrollWidth,height:e.scrollHeight}}function cr(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}function Jo(e){let t=[],r=e.parentElement;for(;r;)(e.clientWidth>r.clientWidth||e.clientHeight>r.clientHeight)&&t.push(r),r=(e=r).parentElement;return t.length===0&&t.push(document.documentElement),t}function Ue(e){return{x:e.offsetLeft,y:e.offsetTop}}function Xo(e){let t=e.getBoundingClientRect();return{x:t.x+window.scrollX,y:t.y+window.scrollY}}function Zo(e){return S(d(window,"load"),d(window,"resize")).pipe(Le(0,me),m(()=>Ue(e)),Q(Ue(e)))}function pr(e){return{x:e.scrollLeft,y:e.scrollTop}}function De(e){return S(d(e,"scroll"),d(window,"scroll"),d(window,"resize")).pipe(Le(0,me),m(()=>pr(e)),Q(pr(e)))}var en=new g,Ea=C(()=>I(new IntersectionObserver(e=>{for(let t of e)en.next(t)},{threshold:0}))).pipe(v(e=>S(Ke,I(e)).pipe(L(()=>e.disconnect()))),B(1));function Tt(e){return Ea.pipe(y(t=>t.observe(e)),v(t=>en.pipe(b(({target:r})=>r===e),L(()=>t.unobserve(e)),m(({isIntersecting:r})=>r))))}function tn(e,t=16){return De(e).pipe(m(({y:r})=>{let o=ce(e),n=wt(e);return r>=n.height-o.height-t}),K())}var lr={drawer:P("[data-md-toggle=drawer]"),search:P("[data-md-toggle=search]")};function rn(e){return lr[e].checked}function Je(e,t){lr[e].checked!==t&&lr[e].click()}function Ve(e){let t=lr[e];return d(t,"change").pipe(m(()=>t.checked),Q(t.checked))}function wa(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function Ta(){return S(d(window,"compositionstart").pipe(m(()=>!0)),d(window,"compositionend").pipe(m(()=>!1))).pipe(Q(!1))}function on(){let e=d(window,"keydown").pipe(b(t=>!(t.metaKey||t.ctrlKey)),m(t=>({mode:rn("search")?"search":"global",type:t.key,claim(){t.preventDefault(),t.stopPropagation()}})),b(({mode:t,type:r})=>{if(t==="global"){let o=Re();if(typeof o!="undefined")return!wa(o,r)}return!0}),pe());return Ta().pipe(v(t=>t?M:e))}function xe(){return new URL(location.href)}function ct(e,t=!1){if(G("navigation.instant")&&!t){let r=E("a",{href:e.href});document.body.appendChild(r),r.click(),r.remove()}else location.href=e.href}function nn(){return new g}function an(){return location.hash.slice(1)}function sn(e){let t=E("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function Sa(e){return S(d(window,"hashchange"),e).pipe(m(an),Q(an()),b(t=>t.length>0),B(1))}function cn(e){return Sa(e).pipe(m(t=>fe(`[id="${t}"]`)),b(t=>typeof t!="undefined"))}function $t(e){let t=matchMedia(e);return ar(r=>t.addListener(()=>r(t.matches))).pipe(Q(t.matches))}function pn(){let e=matchMedia("print");return S(d(window,"beforeprint").pipe(m(()=>!0)),d(window,"afterprint").pipe(m(()=>!1))).pipe(Q(e.matches))}function Nr(e,t){return e.pipe(v(r=>r?t():M))}function zr(e,t){return new F(r=>{let o=new XMLHttpRequest;return o.open("GET",`${e}`),o.responseType="blob",o.addEventListener("load",()=>{o.status>=200&&o.status<300?(r.next(o.response),r.complete()):r.error(new Error(o.statusText))}),o.addEventListener("error",()=>{r.error(new Error("Network error"))}),o.addEventListener("abort",()=>{r.complete()}),typeof(t==null?void 0:t.progress$)!="undefined"&&(o.addEventListener("progress",n=>{var i;if(n.lengthComputable)t.progress$.next(n.loaded/n.total*100);else{let a=(i=o.getResponseHeader("Content-Length"))!=null?i:0;t.progress$.next(n.loaded/+a*100)}}),t.progress$.next(5)),o.send(),()=>o.abort()})}function Ne(e,t){return zr(e,t).pipe(v(r=>r.text()),m(r=>JSON.parse(r)),B(1))}function ln(e,t){let r=new DOMParser;return zr(e,t).pipe(v(o=>o.text()),m(o=>r.parseFromString(o,"text/html")),B(1))}function mn(e,t){let r=new DOMParser;return zr(e,t).pipe(v(o=>o.text()),m(o=>r.parseFromString(o,"text/xml")),B(1))}function fn(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function un(){return S(d(window,"scroll",{passive:!0}),d(window,"resize",{passive:!0})).pipe(m(fn),Q(fn()))}function dn(){return{width:innerWidth,height:innerHeight}}function hn(){return d(window,"resize",{passive:!0}).pipe(m(dn),Q(dn()))}function bn(){return z([un(),hn()]).pipe(m(([e,t])=>({offset:e,size:t})),B(1))}function mr(e,{viewport$:t,header$:r}){let o=t.pipe(Z("size")),n=z([o,r]).pipe(m(()=>Ue(e)));return z([r,t,n]).pipe(m(([{height:i},{offset:a,size:s},{x:p,y:c}])=>({offset:{x:a.x-p,y:a.y-c+i},size:s})))}function Oa(e){return d(e,"message",t=>t.data)}function Ma(e){let t=new g;return t.subscribe(r=>e.postMessage(r)),t}function vn(e,t=new Worker(e)){let r=Oa(t),o=Ma(t),n=new g;n.subscribe(o);let i=o.pipe(X(),ne(!0));return n.pipe(X(),Pe(r.pipe(U(i))),pe())}var La=P("#__config"),St=JSON.parse(La.textContent);St.base=`${new URL(St.base,xe())}`;function Te(){return St}function G(e){return St.features.includes(e)}function ye(e,t){return typeof t!="undefined"?St.translations[e].replace("#",t.toString()):St.translations[e]}function Se(e,t=document){return P(`[data-md-component=${e}]`,t)}function ae(e,t=document){return $(`[data-md-component=${e}]`,t)}function _a(e){let t=P(".md-typeset > :first-child",e);return d(t,"click",{once:!0}).pipe(m(()=>P(".md-typeset",e)),m(r=>({hash:__md_hash(r.innerHTML)})))}function gn(e){if(!G("announce.dismiss")||!e.childElementCount)return M;if(!e.hidden){let t=P(".md-typeset",e);__md_hash(t.innerHTML)===__md_get("__announce")&&(e.hidden=!0)}return C(()=>{let t=new g;return t.subscribe(({hash:r})=>{e.hidden=!0,__md_set("__announce",r)}),_a(e).pipe(y(r=>t.next(r)),L(()=>t.complete()),m(r=>R({ref:e},r)))})}function Aa(e,{target$:t}){return t.pipe(m(r=>({hidden:r!==e})))}function xn(e,t){let r=new g;return r.subscribe(({hidden:o})=>{e.hidden=o}),Aa(e,t).pipe(y(o=>r.next(o)),L(()=>r.complete()),m(o=>R({ref:e},o)))}function Pt(e,t){return t==="inline"?E("div",{class:"md-tooltip md-tooltip--inline",id:e,role:"tooltip"},E("div",{class:"md-tooltip__inner md-typeset"})):E("div",{class:"md-tooltip",id:e,role:"tooltip"},E("div",{class:"md-tooltip__inner md-typeset"}))}function yn(...e){return E("div",{class:"md-tooltip2",role:"tooltip"},E("div",{class:"md-tooltip2__inner md-typeset"},e))}function En(e,t){if(t=t?`${t}_annotation_${e}`:void 0,t){let r=t?`#${t}`:void 0;return E("aside",{class:"md-annotation",tabIndex:0},Pt(t),E("a",{href:r,class:"md-annotation__index",tabIndex:-1},E("span",{"data-md-annotation-id":e})))}else return E("aside",{class:"md-annotation",tabIndex:0},Pt(t),E("span",{class:"md-annotation__index",tabIndex:-1},E("span",{"data-md-annotation-id":e})))}function wn(e){return E("button",{class:"md-clipboard md-icon",title:ye("clipboard.copy"),"data-clipboard-target":`#${e} > code`})}function qr(e,t){let r=t&2,o=t&1,n=Object.keys(e.terms).filter(p=>!e.terms[p]).reduce((p,c)=>[...p,E("del",null,c)," "],[]).slice(0,-1),i=Te(),a=new URL(e.location,i.base);G("search.highlight")&&a.searchParams.set("h",Object.entries(e.terms).filter(([,p])=>p).reduce((p,[c])=>`${p} ${c}`.trim(),""));let{tags:s}=Te();return E("a",{href:`${a}`,class:"md-search-result__link",tabIndex:-1},E("article",{class:"md-search-result__article md-typeset","data-md-score":e.score.toFixed(2)},r>0&&E("div",{class:"md-search-result__icon md-icon"}),r>0&&E("h1",null,e.title),r<=0&&E("h2",null,e.title),o>0&&e.text.length>0&&e.text,e.tags&&e.tags.map(p=>{let c=s?p in s?`md-tag-icon md-tag--${s[p]}`:"md-tag-icon":"";return E("span",{class:`md-tag ${c}`},p)}),o>0&&n.length>0&&E("p",{class:"md-search-result__terms"},ye("search.result.term.missing"),": ",...n)))}function Tn(e){let t=e[0].score,r=[...e],o=Te(),n=r.findIndex(l=>!`${new URL(l.location,o.base)}`.includes("#")),[i]=r.splice(n,1),a=r.findIndex(l=>l.scoreqr(l,1)),...p.length?[E("details",{class:"md-search-result__more"},E("summary",{tabIndex:-1},E("div",null,p.length>0&&p.length===1?ye("search.result.more.one"):ye("search.result.more.other",p.length))),...p.map(l=>qr(l,1)))]:[]];return E("li",{class:"md-search-result__item"},c)}function Sn(e){return E("ul",{class:"md-source__facts"},Object.entries(e).map(([t,r])=>E("li",{class:`md-source__fact md-source__fact--${t}`},typeof r=="number"?sr(r):r)))}function Qr(e){let t=`tabbed-control tabbed-control--${e}`;return E("div",{class:t,hidden:!0},E("button",{class:"tabbed-button",tabIndex:-1,"aria-hidden":"true"}))}function On(e){return E("div",{class:"md-typeset__scrollwrap"},E("div",{class:"md-typeset__table"},e))}function Ca(e){let t=Te(),r=new URL(`../${e.version}/`,t.base);return E("li",{class:"md-version__item"},E("a",{href:`${r}`,class:"md-version__link"},e.title))}function Mn(e,t){return e=e.filter(r=>{var o;return!((o=r.properties)!=null&&o.hidden)}),E("div",{class:"md-version"},E("button",{class:"md-version__current","aria-label":ye("select.version")},t.title),E("ul",{class:"md-version__list"},e.map(Ca)))}var Ha=0;function ka(e){let t=z([et(e),kt(e)]).pipe(m(([o,n])=>o||n),K()),r=C(()=>Jo(e)).pipe(oe(De),st(1),m(()=>Xo(e)));return t.pipe(Ae(o=>o),v(()=>z([t,r])),m(([o,n])=>({active:o,offset:n})),pe())}function $a(e,t){let{content$:r,viewport$:o}=t,n=`__tooltip2_${Ha++}`;return C(()=>{let i=new g,a=new _r(!1);i.pipe(X(),ne(!1)).subscribe(a);let s=a.pipe(Ht(c=>Me(+!c*250,Hr)),K(),v(c=>c?r:M),y(c=>c.id=n),pe());z([i.pipe(m(({active:c})=>c)),s.pipe(v(c=>kt(c,250)),Q(!1))]).pipe(m(c=>c.some(l=>l))).subscribe(a);let p=a.pipe(b(c=>c),ee(s,o),m(([c,l,{size:f}])=>{let u=e.getBoundingClientRect(),h=u.width/2;if(l.role==="tooltip")return{x:h,y:8+u.height};if(u.y>=f.height/2){let{height:w}=ce(l);return{x:h,y:-16-w}}else return{x:h,y:16+u.height}}));return z([s,i,p]).subscribe(([c,{offset:l},f])=>{c.style.setProperty("--md-tooltip-host-x",`${l.x}px`),c.style.setProperty("--md-tooltip-host-y",`${l.y}px`),c.style.setProperty("--md-tooltip-x",`${f.x}px`),c.style.setProperty("--md-tooltip-y",`${f.y}px`),c.classList.toggle("md-tooltip2--top",f.y<0),c.classList.toggle("md-tooltip2--bottom",f.y>=0)}),a.pipe(b(c=>c),ee(s,(c,l)=>l),b(c=>c.role==="tooltip")).subscribe(c=>{let l=ce(P(":scope > *",c));c.style.setProperty("--md-tooltip-width",`${l.width}px`),c.style.setProperty("--md-tooltip-tail","0px")}),a.pipe(K(),be(me),ee(s)).subscribe(([c,l])=>{l.classList.toggle("md-tooltip2--active",c)}),z([a.pipe(b(c=>c)),s]).subscribe(([c,l])=>{l.role==="dialog"?(e.setAttribute("aria-controls",n),e.setAttribute("aria-haspopup","dialog")):e.setAttribute("aria-describedby",n)}),a.pipe(b(c=>!c)).subscribe(()=>{e.removeAttribute("aria-controls"),e.removeAttribute("aria-describedby"),e.removeAttribute("aria-haspopup")}),ka(e).pipe(y(c=>i.next(c)),L(()=>i.complete()),m(c=>R({ref:e},c)))})}function pt(e,{viewport$:t},r=document.body){return $a(e,{content$:new F(o=>{let n=e.title,i=yn(n);return o.next(i),e.removeAttribute("title"),r.append(i),()=>{i.remove(),e.setAttribute("title",n)}}),viewport$:t})}function Pa(e,t){let r=C(()=>z([Zo(e),De(t)])).pipe(m(([{x:o,y:n},i])=>{let{width:a,height:s}=ce(e);return{x:o-i.x+a/2,y:n-i.y+s/2}}));return et(e).pipe(v(o=>r.pipe(m(n=>({active:o,offset:n})),we(+!o||1/0))))}function Ln(e,t,{target$:r}){let[o,n]=Array.from(e.children);return C(()=>{let i=new g,a=i.pipe(X(),ne(!0));return i.subscribe({next({offset:s}){e.style.setProperty("--md-tooltip-x",`${s.x}px`),e.style.setProperty("--md-tooltip-y",`${s.y}px`)},complete(){e.style.removeProperty("--md-tooltip-x"),e.style.removeProperty("--md-tooltip-y")}}),Tt(e).pipe(U(a)).subscribe(s=>{e.toggleAttribute("data-md-visible",s)}),S(i.pipe(b(({active:s})=>s)),i.pipe(_e(250),b(({active:s})=>!s))).subscribe({next({active:s}){s?e.prepend(o):o.remove()},complete(){e.prepend(o)}}),i.pipe(Le(16,me)).subscribe(({active:s})=>{o.classList.toggle("md-tooltip--active",s)}),i.pipe(st(125,me),b(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:s})=>s)).subscribe({next(s){s?e.style.setProperty("--md-tooltip-0",`${-s}px`):e.style.removeProperty("--md-tooltip-0")},complete(){e.style.removeProperty("--md-tooltip-0")}}),d(n,"click").pipe(U(a),b(s=>!(s.metaKey||s.ctrlKey))).subscribe(s=>{s.stopPropagation(),s.preventDefault()}),d(n,"mousedown").pipe(U(a),ee(i)).subscribe(([s,{active:p}])=>{var c;if(s.button!==0||s.metaKey||s.ctrlKey)s.preventDefault();else if(p){s.preventDefault();let l=e.parentElement.closest(".md-annotation");l instanceof HTMLElement?l.focus():(c=Re())==null||c.blur()}}),r.pipe(U(a),b(s=>s===o),Ge(125)).subscribe(()=>e.focus()),Pa(e,t).pipe(y(s=>i.next(s)),L(()=>i.complete()),m(s=>R({ref:e},s)))})}function Ra(e){return e.tagName==="CODE"?$(".c, .c1, .cm",e):[e]}function Ia(e){let t=[];for(let r of Ra(e)){let o=[],n=document.createNodeIterator(r,NodeFilter.SHOW_TEXT);for(let i=n.nextNode();i;i=n.nextNode())o.push(i);for(let i of o){let a;for(;a=/(\(\d+\))(!)?/.exec(i.textContent);){let[,s,p]=a;if(typeof p=="undefined"){let c=i.splitText(a.index);i=c.splitText(s.length),t.push(c)}else{i.textContent=s,t.push(i);break}}}}return t}function _n(e,t){t.append(...Array.from(e.childNodes))}function fr(e,t,{target$:r,print$:o}){let n=t.closest("[id]"),i=n==null?void 0:n.id,a=new Map;for(let s of Ia(t)){let[,p]=s.textContent.match(/\((\d+)\)/);fe(`:scope > li:nth-child(${p})`,e)&&(a.set(p,En(p,i)),s.replaceWith(a.get(p)))}return a.size===0?M:C(()=>{let s=new g,p=s.pipe(X(),ne(!0)),c=[];for(let[l,f]of a)c.push([P(".md-typeset",f),P(`:scope > li:nth-child(${l})`,e)]);return o.pipe(U(p)).subscribe(l=>{e.hidden=!l,e.classList.toggle("md-annotation-list",l);for(let[f,u]of c)l?_n(f,u):_n(u,f)}),S(...[...a].map(([,l])=>Ln(l,t,{target$:r}))).pipe(L(()=>s.complete()),pe())})}function An(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return An(t)}}function Cn(e,t){return C(()=>{let r=An(e);return typeof r!="undefined"?fr(r,e,t):M})}var Hn=Vt(Yr());var Fa=0;function kn(e){if(e.nextElementSibling){let t=e.nextElementSibling;if(t.tagName==="OL")return t;if(t.tagName==="P"&&!t.children.length)return kn(t)}}function ja(e){return ge(e).pipe(m(({width:t})=>({scrollable:wt(e).width>t})),Z("scrollable"))}function $n(e,t){let{matches:r}=matchMedia("(hover)"),o=C(()=>{let n=new g,i=n.pipe(Fr(1));n.subscribe(({scrollable:c})=>{c&&r?e.setAttribute("tabindex","0"):e.removeAttribute("tabindex")});let a=[];if(Hn.default.isSupported()&&(e.closest(".copy")||G("content.code.copy")&&!e.closest(".no-copy"))){let c=e.closest("pre");c.id=`__code_${Fa++}`;let l=wn(c.id);c.insertBefore(l,e),G("content.tooltips")&&a.push(pt(l,{viewport$}))}let s=e.closest(".highlight");if(s instanceof HTMLElement){let c=kn(s);if(typeof c!="undefined"&&(s.classList.contains("annotate")||G("content.code.annotate"))){let l=fr(c,e,t);a.push(ge(s).pipe(U(i),m(({width:f,height:u})=>f&&u),K(),v(f=>f?l:M)))}}return $(":scope > span[id]",e).length&&e.classList.add("md-code__content"),ja(e).pipe(y(c=>n.next(c)),L(()=>n.complete()),m(c=>R({ref:e},c)),Pe(...a))});return G("content.lazy")?Tt(e).pipe(b(n=>n),we(1),v(()=>o)):o}function Wa(e,{target$:t,print$:r}){let o=!0;return S(t.pipe(m(n=>n.closest("details:not([open])")),b(n=>e===n),m(()=>({action:"open",reveal:!0}))),r.pipe(b(n=>n||!o),y(()=>o=e.open),m(n=>({action:n?"open":"close"}))))}function Pn(e,t){return C(()=>{let r=new g;return r.subscribe(({action:o,reveal:n})=>{e.toggleAttribute("open",o==="open"),n&&e.scrollIntoView()}),Wa(e,t).pipe(y(o=>r.next(o)),L(()=>r.complete()),m(o=>R({ref:e},o)))})}var Rn=".node circle,.node ellipse,.node path,.node polygon,.node rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}marker{fill:var(--md-mermaid-edge-color)!important}.edgeLabel .label rect{fill:#0000}.label{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.label foreignObject{line-height:normal;overflow:visible}.label div .edgeLabel{color:var(--md-mermaid-label-fg-color)}.edgeLabel,.edgeLabel rect,.label div .edgeLabel{background-color:var(--md-mermaid-label-bg-color)}.edgeLabel,.edgeLabel rect{fill:var(--md-mermaid-label-bg-color);color:var(--md-mermaid-edge-color)}.edgePath .path,.flowchart-link{stroke:var(--md-mermaid-edge-color);stroke-width:.05rem}.edgePath .arrowheadPath{fill:var(--md-mermaid-edge-color);stroke:none}.cluster rect{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}.cluster span{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}g #flowchart-circleEnd,g #flowchart-circleStart,g #flowchart-crossEnd,g #flowchart-crossStart,g #flowchart-pointEnd,g #flowchart-pointStart{stroke:none}g.classGroup line,g.classGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.classGroup text{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.classLabel .box{fill:var(--md-mermaid-label-bg-color);background-color:var(--md-mermaid-label-bg-color);opacity:1}.classLabel .label{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node .divider{stroke:var(--md-mermaid-node-fg-color)}.relation{stroke:var(--md-mermaid-edge-color)}.cardinality{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.cardinality text{fill:inherit!important}defs #classDiagram-compositionEnd,defs #classDiagram-compositionStart,defs #classDiagram-dependencyEnd,defs #classDiagram-dependencyStart,defs #classDiagram-extensionEnd,defs #classDiagram-extensionStart{fill:var(--md-mermaid-edge-color)!important;stroke:var(--md-mermaid-edge-color)!important}defs #classDiagram-aggregationEnd,defs #classDiagram-aggregationStart{fill:var(--md-mermaid-label-bg-color)!important;stroke:var(--md-mermaid-edge-color)!important}g.stateGroup rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}g.stateGroup .state-title{fill:var(--md-mermaid-label-fg-color)!important;font-family:var(--md-mermaid-font-family)}g.stateGroup .composit{fill:var(--md-mermaid-label-bg-color)}.nodeLabel,.nodeLabel p{color:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.node circle.state-end,.node circle.state-start,.start-state{fill:var(--md-mermaid-edge-color);stroke:none}.end-state-inner,.end-state-outer{fill:var(--md-mermaid-edge-color)}.end-state-inner,.node circle.state-end{stroke:var(--md-mermaid-label-bg-color)}.transition{stroke:var(--md-mermaid-edge-color)}[id^=state-fork] rect,[id^=state-join] rect{fill:var(--md-mermaid-edge-color)!important;stroke:none!important}.statediagram-cluster.statediagram-cluster .inner{fill:var(--md-default-bg-color)}.statediagram-cluster rect{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.statediagram-state rect.divider{fill:var(--md-default-fg-color--lightest);stroke:var(--md-default-fg-color--lighter)}defs #statediagram-barbEnd{stroke:var(--md-mermaid-edge-color)}.attributeBoxEven,.attributeBoxOdd{fill:var(--md-mermaid-node-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityBox{fill:var(--md-mermaid-label-bg-color);stroke:var(--md-mermaid-node-fg-color)}.entityLabel{fill:var(--md-mermaid-label-fg-color);font-family:var(--md-mermaid-font-family)}.relationshipLabelBox{fill:var(--md-mermaid-label-bg-color);fill-opacity:1;background-color:var(--md-mermaid-label-bg-color);opacity:1}.relationshipLabel{fill:var(--md-mermaid-label-fg-color)}.relationshipLine{stroke:var(--md-mermaid-edge-color)}defs #ONE_OR_MORE_END *,defs #ONE_OR_MORE_START *,defs #ONLY_ONE_END *,defs #ONLY_ONE_START *,defs #ZERO_OR_MORE_END *,defs #ZERO_OR_MORE_START *,defs #ZERO_OR_ONE_END *,defs #ZERO_OR_ONE_START *{stroke:var(--md-mermaid-edge-color)!important}defs #ZERO_OR_MORE_END circle,defs #ZERO_OR_MORE_START circle{fill:var(--md-mermaid-label-bg-color)}.actor{fill:var(--md-mermaid-sequence-actor-bg-color);stroke:var(--md-mermaid-sequence-actor-border-color)}text.actor>tspan{fill:var(--md-mermaid-sequence-actor-fg-color);font-family:var(--md-mermaid-font-family)}line{stroke:var(--md-mermaid-sequence-actor-line-color)}.actor-man circle,.actor-man line{fill:var(--md-mermaid-sequence-actorman-bg-color);stroke:var(--md-mermaid-sequence-actorman-line-color)}.messageLine0,.messageLine1{stroke:var(--md-mermaid-sequence-message-line-color)}.note{fill:var(--md-mermaid-sequence-note-bg-color);stroke:var(--md-mermaid-sequence-note-border-color)}.loopText,.loopText>tspan,.messageText,.noteText>tspan{stroke:none;font-family:var(--md-mermaid-font-family)!important}.messageText{fill:var(--md-mermaid-sequence-message-fg-color)}.loopText,.loopText>tspan{fill:var(--md-mermaid-sequence-loop-fg-color)}.noteText>tspan{fill:var(--md-mermaid-sequence-note-fg-color)}#arrowhead path{fill:var(--md-mermaid-sequence-message-line-color);stroke:none}.loopLine{fill:var(--md-mermaid-sequence-loop-bg-color);stroke:var(--md-mermaid-sequence-loop-border-color)}.labelBox{fill:var(--md-mermaid-sequence-label-bg-color);stroke:none}.labelText,.labelText>span{fill:var(--md-mermaid-sequence-label-fg-color);font-family:var(--md-mermaid-font-family)}.sequenceNumber{fill:var(--md-mermaid-sequence-number-fg-color)}rect.rect{fill:var(--md-mermaid-sequence-box-bg-color);stroke:none}rect.rect+text.text{fill:var(--md-mermaid-sequence-box-fg-color)}defs #sequencenumber{fill:var(--md-mermaid-sequence-number-bg-color)!important}";var Br,Da=0;function Va(){return typeof mermaid=="undefined"||mermaid instanceof Element?Et("https://unpkg.com/mermaid@10.7.0/dist/mermaid.min.js"):I(void 0)}function In(e){return e.classList.remove("mermaid"),Br||(Br=Va().pipe(y(()=>mermaid.initialize({startOnLoad:!1,themeCSS:Rn,sequence:{actorFontSize:"16px",messageFontSize:"16px",noteFontSize:"16px"}})),m(()=>{}),B(1))),Br.subscribe(()=>ao(this,null,function*(){e.classList.add("mermaid");let t=`__mermaid_${Da++}`,r=E("div",{class:"mermaid"}),o=e.textContent,{svg:n,fn:i}=yield mermaid.render(t,o),a=r.attachShadow({mode:"closed"});a.innerHTML=n,e.replaceWith(r),i==null||i(a)})),Br.pipe(m(()=>({ref:e})))}var Fn=E("table");function jn(e){return e.replaceWith(Fn),Fn.replaceWith(On(e)),I({ref:e})}function Na(e){let t=e.find(r=>r.checked)||e[0];return S(...e.map(r=>d(r,"change").pipe(m(()=>P(`label[for="${r.id}"]`))))).pipe(Q(P(`label[for="${t.id}"]`)),m(r=>({active:r})))}function Wn(e,{viewport$:t,target$:r}){let o=P(".tabbed-labels",e),n=$(":scope > input",e),i=Qr("prev");e.append(i);let a=Qr("next");return e.append(a),C(()=>{let s=new g,p=s.pipe(X(),ne(!0));z([s,ge(e)]).pipe(U(p),Le(1,me)).subscribe({next([{active:c},l]){let f=Ue(c),{width:u}=ce(c);e.style.setProperty("--md-indicator-x",`${f.x}px`),e.style.setProperty("--md-indicator-width",`${u}px`);let h=pr(o);(f.xh.x+l.width)&&o.scrollTo({left:Math.max(0,f.x-16),behavior:"smooth"})},complete(){e.style.removeProperty("--md-indicator-x"),e.style.removeProperty("--md-indicator-width")}}),z([De(o),ge(o)]).pipe(U(p)).subscribe(([c,l])=>{let f=wt(o);i.hidden=c.x<16,a.hidden=c.x>f.width-l.width-16}),S(d(i,"click").pipe(m(()=>-1)),d(a,"click").pipe(m(()=>1))).pipe(U(p)).subscribe(c=>{let{width:l}=ce(o);o.scrollBy({left:l*c,behavior:"smooth"})}),r.pipe(U(p),b(c=>n.includes(c))).subscribe(c=>c.click()),o.classList.add("tabbed-labels--linked");for(let c of n){let l=P(`label[for="${c.id}"]`);l.replaceChildren(E("a",{href:`#${l.htmlFor}`,tabIndex:-1},...Array.from(l.childNodes))),d(l.firstElementChild,"click").pipe(U(p),b(f=>!(f.metaKey||f.ctrlKey)),y(f=>{f.preventDefault(),f.stopPropagation()})).subscribe(()=>{history.replaceState({},"",`#${l.htmlFor}`),l.click()})}return G("content.tabs.link")&&s.pipe(Ce(1),ee(t)).subscribe(([{active:c},{offset:l}])=>{let f=c.innerText.trim();if(c.hasAttribute("data-md-switching"))c.removeAttribute("data-md-switching");else{let u=e.offsetTop-l.y;for(let w of $("[data-tabs]"))for(let A of $(":scope > input",w)){let te=P(`label[for="${A.id}"]`);if(te!==c&&te.innerText.trim()===f){te.setAttribute("data-md-switching",""),A.click();break}}window.scrollTo({top:e.offsetTop-u});let h=__md_get("__tabs")||[];__md_set("__tabs",[...new Set([f,...h])])}}),s.pipe(U(p)).subscribe(()=>{for(let c of $("audio, video",e))c.pause()}),Na(n).pipe(y(c=>s.next(c)),L(()=>s.complete()),m(c=>R({ref:e},c)))}).pipe(Qe(se))}function Un(e,{viewport$:t,target$:r,print$:o}){return S(...$(".annotate:not(.highlight)",e).map(n=>Cn(n,{target$:r,print$:o})),...$("pre:not(.mermaid) > code",e).map(n=>$n(n,{target$:r,print$:o})),...$("pre.mermaid",e).map(n=>In(n)),...$("table:not([class])",e).map(n=>jn(n)),...$("details",e).map(n=>Pn(n,{target$:r,print$:o})),...$("[data-tabs]",e).map(n=>Wn(n,{viewport$:t,target$:r})),...$("[title]",e).filter(()=>G("content.tooltips")).map(n=>pt(n,{viewport$:t})))}function za(e,{alert$:t}){return t.pipe(v(r=>S(I(!0),I(!1).pipe(Ge(2e3))).pipe(m(o=>({message:r,active:o})))))}function Dn(e,t){let r=P(".md-typeset",e);return C(()=>{let o=new g;return o.subscribe(({message:n,active:i})=>{e.classList.toggle("md-dialog--active",i),r.textContent=n}),za(e,t).pipe(y(n=>o.next(n)),L(()=>o.complete()),m(n=>R({ref:e},n)))})}var qa=0;function Qa(e,t){document.body.append(e);let{width:r}=ce(e);e.style.setProperty("--md-tooltip-width",`${r}px`),e.remove();let o=cr(t),n=typeof o!="undefined"?De(o):I({x:0,y:0}),i=S(et(t),kt(t)).pipe(K());return z([i,n]).pipe(m(([a,s])=>{let{x:p,y:c}=Ue(t),l=ce(t),f=t.closest("table");return f&&t.parentElement&&(p+=f.offsetLeft+t.parentElement.offsetLeft,c+=f.offsetTop+t.parentElement.offsetTop),{active:a,offset:{x:p-s.x+l.width/2-r/2,y:c-s.y+l.height+8}}}))}function Vn(e){let t=e.title;if(!t.length)return M;let r=`__tooltip_${qa++}`,o=Pt(r,"inline"),n=P(".md-typeset",o);return n.innerHTML=t,C(()=>{let i=new g;return i.subscribe({next({offset:a}){o.style.setProperty("--md-tooltip-x",`${a.x}px`),o.style.setProperty("--md-tooltip-y",`${a.y}px`)},complete(){o.style.removeProperty("--md-tooltip-x"),o.style.removeProperty("--md-tooltip-y")}}),S(i.pipe(b(({active:a})=>a)),i.pipe(_e(250),b(({active:a})=>!a))).subscribe({next({active:a}){a?(e.insertAdjacentElement("afterend",o),e.setAttribute("aria-describedby",r),e.removeAttribute("title")):(o.remove(),e.removeAttribute("aria-describedby"),e.setAttribute("title",t))},complete(){o.remove(),e.removeAttribute("aria-describedby"),e.setAttribute("title",t)}}),i.pipe(Le(16,me)).subscribe(({active:a})=>{o.classList.toggle("md-tooltip--active",a)}),i.pipe(st(125,me),b(()=>!!e.offsetParent),m(()=>e.offsetParent.getBoundingClientRect()),m(({x:a})=>a)).subscribe({next(a){a?o.style.setProperty("--md-tooltip-0",`${-a}px`):o.style.removeProperty("--md-tooltip-0")},complete(){o.style.removeProperty("--md-tooltip-0")}}),Qa(o,e).pipe(y(a=>i.next(a)),L(()=>i.complete()),m(a=>R({ref:e},a)))}).pipe(Qe(se))}function Ka({viewport$:e}){if(!G("header.autohide"))return I(!1);let t=e.pipe(m(({offset:{y:n}})=>n),Ye(2,1),m(([n,i])=>[nMath.abs(i-n.y)>100),m(([,[n]])=>n),K()),o=Ve("search");return z([e,o]).pipe(m(([{offset:n},i])=>n.y>400&&!i),K(),v(n=>n?r:I(!1)),Q(!1))}function Nn(e,t){return C(()=>z([ge(e),Ka(t)])).pipe(m(([{height:r},o])=>({height:r,hidden:o})),K((r,o)=>r.height===o.height&&r.hidden===o.hidden),B(1))}function zn(e,{header$:t,main$:r}){return C(()=>{let o=new g,n=o.pipe(X(),ne(!0));o.pipe(Z("active"),We(t)).subscribe(([{active:a},{hidden:s}])=>{e.classList.toggle("md-header--shadow",a&&!s),e.hidden=s});let i=ue($("[title]",e)).pipe(b(()=>G("content.tooltips")),oe(a=>Vn(a)));return r.subscribe(o),t.pipe(U(n),m(a=>R({ref:e},a)),Pe(i.pipe(U(n))))})}function Ya(e,{viewport$:t,header$:r}){return mr(e,{viewport$:t,header$:r}).pipe(m(({offset:{y:o}})=>{let{height:n}=ce(e);return{active:o>=n}}),Z("active"))}function qn(e,t){return C(()=>{let r=new g;r.subscribe({next({active:n}){e.classList.toggle("md-header__title--active",n)},complete(){e.classList.remove("md-header__title--active")}});let o=fe(".md-content h1");return typeof o=="undefined"?M:Ya(o,t).pipe(y(n=>r.next(n)),L(()=>r.complete()),m(n=>R({ref:e},n)))})}function Qn(e,{viewport$:t,header$:r}){let o=r.pipe(m(({height:i})=>i),K()),n=o.pipe(v(()=>ge(e).pipe(m(({height:i})=>({top:e.offsetTop,bottom:e.offsetTop+i})),Z("bottom"))));return z([o,n,t]).pipe(m(([i,{top:a,bottom:s},{offset:{y:p},size:{height:c}}])=>(c=Math.max(0,c-Math.max(0,a-p,i)-Math.max(0,c+p-s)),{offset:a-i,height:c,active:a-i<=p})),K((i,a)=>i.offset===a.offset&&i.height===a.height&&i.active===a.active))}function Ba(e){let t=__md_get("__palette")||{index:e.findIndex(o=>matchMedia(o.getAttribute("data-md-color-media")).matches)},r=Math.max(0,Math.min(t.index,e.length-1));return I(...e).pipe(oe(o=>d(o,"change").pipe(m(()=>o))),Q(e[r]),m(o=>({index:e.indexOf(o),color:{media:o.getAttribute("data-md-color-media"),scheme:o.getAttribute("data-md-color-scheme"),primary:o.getAttribute("data-md-color-primary"),accent:o.getAttribute("data-md-color-accent")}})),B(1))}function Kn(e){let t=$("input",e),r=E("meta",{name:"theme-color"});document.head.appendChild(r);let o=E("meta",{name:"color-scheme"});document.head.appendChild(o);let n=$t("(prefers-color-scheme: light)");return C(()=>{let i=new g;return i.subscribe(a=>{if(document.body.setAttribute("data-md-color-switching",""),a.color.media==="(prefers-color-scheme)"){let s=matchMedia("(prefers-color-scheme: light)"),p=document.querySelector(s.matches?"[data-md-color-media='(prefers-color-scheme: light)']":"[data-md-color-media='(prefers-color-scheme: dark)']");a.color.scheme=p.getAttribute("data-md-color-scheme"),a.color.primary=p.getAttribute("data-md-color-primary"),a.color.accent=p.getAttribute("data-md-color-accent")}for(let[s,p]of Object.entries(a.color))document.body.setAttribute(`data-md-color-${s}`,p);for(let s=0;sa.key==="Enter"),ee(i,(a,s)=>s)).subscribe(({index:a})=>{a=(a+1)%t.length,t[a].click(),t[a].focus()}),i.pipe(m(()=>{let a=Se("header"),s=window.getComputedStyle(a);return o.content=s.colorScheme,s.backgroundColor.match(/\d+/g).map(p=>(+p).toString(16).padStart(2,"0")).join("")})).subscribe(a=>r.content=`#${a}`),i.pipe(be(se)).subscribe(()=>{document.body.removeAttribute("data-md-color-switching")}),Ba(t).pipe(U(n.pipe(Ce(1))),at(),y(a=>i.next(a)),L(()=>i.complete()),m(a=>R({ref:e},a)))})}function Yn(e,{progress$:t}){return C(()=>{let r=new g;return r.subscribe(({value:o})=>{e.style.setProperty("--md-progress-value",`${o}`)}),t.pipe(y(o=>r.next({value:o})),L(()=>r.complete()),m(o=>({ref:e,value:o})))})}var Gr=Vt(Yr());function Ga(e){e.setAttribute("data-md-copying","");let t=e.closest("[data-copy]"),r=t?t.getAttribute("data-copy"):e.innerText;return e.removeAttribute("data-md-copying"),r.trimEnd()}function Bn({alert$:e}){Gr.default.isSupported()&&new F(t=>{new Gr.default("[data-clipboard-target], [data-clipboard-text]",{text:r=>r.getAttribute("data-clipboard-text")||Ga(P(r.getAttribute("data-clipboard-target")))}).on("success",r=>t.next(r))}).pipe(y(t=>{t.trigger.focus()}),m(()=>ye("clipboard.copied"))).subscribe(e)}function Gn(e,t){return e.protocol=t.protocol,e.hostname=t.hostname,e}function Ja(e,t){let r=new Map;for(let o of $("url",e)){let n=P("loc",o),i=[Gn(new URL(n.textContent),t)];r.set(`${i[0]}`,i);for(let a of $("[rel=alternate]",o)){let s=a.getAttribute("href");s!=null&&i.push(Gn(new URL(s),t))}}return r}function ur(e){return mn(new URL("sitemap.xml",e)).pipe(m(t=>Ja(t,new URL(e))),ve(()=>I(new Map)))}function Xa(e,t){if(!(e.target instanceof Element))return M;let r=e.target.closest("a");if(r===null)return M;if(r.target||e.metaKey||e.ctrlKey)return M;let o=new URL(r.href);return o.search=o.hash="",t.has(`${o}`)?(e.preventDefault(),I(new URL(r.href))):M}function Jn(e){let t=new Map;for(let r of $(":scope > *",e.head))t.set(r.outerHTML,r);return t}function Xn(e){for(let t of $("[href], [src]",e))for(let r of["href","src"]){let o=t.getAttribute(r);if(o&&!/^(?:[a-z]+:)?\/\//i.test(o)){t[r]=t[r];break}}return I(e)}function Za(e){for(let o of["[data-md-component=announce]","[data-md-component=container]","[data-md-component=header-topic]","[data-md-component=outdated]","[data-md-component=logo]","[data-md-component=skip]",...G("navigation.tabs.sticky")?["[data-md-component=tabs]"]:[]]){let n=fe(o),i=fe(o,e);typeof n!="undefined"&&typeof i!="undefined"&&n.replaceWith(i)}let t=Jn(document);for(let[o,n]of Jn(e))t.has(o)?t.delete(o):document.head.appendChild(n);for(let o of t.values()){let n=o.getAttribute("name");n!=="theme-color"&&n!=="color-scheme"&&o.remove()}let r=Se("container");return je($("script",r)).pipe(v(o=>{let n=e.createElement("script");if(o.src){for(let i of o.getAttributeNames())n.setAttribute(i,o.getAttribute(i));return o.replaceWith(n),new F(i=>{n.onload=()=>i.complete()})}else return n.textContent=o.textContent,o.replaceWith(n),M}),X(),ne(document))}function Zn({location$:e,viewport$:t,progress$:r}){let o=Te();if(location.protocol==="file:")return M;let n=ur(o.base);I(document).subscribe(Xn);let i=d(document.body,"click").pipe(We(n),v(([p,c])=>Xa(p,c)),pe()),a=d(window,"popstate").pipe(m(xe),pe());i.pipe(ee(t)).subscribe(([p,{offset:c}])=>{history.replaceState(c,""),history.pushState(null,"",p)}),S(i,a).subscribe(e);let s=e.pipe(Z("pathname"),v(p=>ln(p,{progress$:r}).pipe(ve(()=>(ct(p,!0),M)))),v(Xn),v(Za),pe());return S(s.pipe(ee(e,(p,c)=>c)),e.pipe(Z("pathname"),v(()=>e),Z("hash")),e.pipe(K((p,c)=>p.pathname===c.pathname&&p.hash===c.hash),v(()=>i),y(()=>history.back()))).subscribe(p=>{var c,l;history.state!==null||!p.hash?window.scrollTo(0,(l=(c=history.state)==null?void 0:c.y)!=null?l:0):(history.scrollRestoration="auto",sn(p.hash),history.scrollRestoration="manual")}),e.subscribe(()=>{history.scrollRestoration="manual"}),d(window,"beforeunload").subscribe(()=>{history.scrollRestoration="auto"}),t.pipe(Z("offset"),_e(100)).subscribe(({offset:p})=>{history.replaceState(p,"")}),s}var ri=Vt(ti());function oi(e){let t=e.separator.split("|").map(n=>n.replace(/(\(\?[!=<][^)]+\))/g,"").length===0?"\uFFFD":n).join("|"),r=new RegExp(t,"img"),o=(n,i,a)=>`${i}${a}`;return n=>{n=n.replace(/[\s*+\-:~^]+/g," ").trim();let i=new RegExp(`(^|${e.separator}|)(${n.replace(/[|\\{}()[\]^$+*?.-]/g,"\\$&").replace(r,"|")})`,"img");return a=>(0,ri.default)(a).replace(i,o).replace(/<\/mark>(\s+)]*>/img,"$1")}}function It(e){return e.type===1}function dr(e){return e.type===3}function ni(e,t){let r=vn(e);return S(I(location.protocol!=="file:"),Ve("search")).pipe(Ae(o=>o),v(()=>t)).subscribe(({config:o,docs:n})=>r.next({type:0,data:{config:o,docs:n,options:{suggest:G("search.suggest")}}})),r}function ii({document$:e}){let t=Te(),r=Ne(new URL("../versions.json",t.base)).pipe(ve(()=>M)),o=r.pipe(m(n=>{let[,i]=t.base.match(/([^/]+)\/?$/);return n.find(({version:a,aliases:s})=>a===i||s.includes(i))||n[0]}));r.pipe(m(n=>new Map(n.map(i=>[`${new URL(`../${i.version}/`,t.base)}`,i]))),v(n=>d(document.body,"click").pipe(b(i=>!i.metaKey&&!i.ctrlKey),ee(o),v(([i,a])=>{if(i.target instanceof Element){let s=i.target.closest("a");if(s&&!s.target&&n.has(s.href)){let p=s.href;return!i.target.closest(".md-version")&&n.get(p)===a?M:(i.preventDefault(),I(p))}}return M}),v(i=>{let{version:a}=n.get(i);return ur(new URL(i)).pipe(m(s=>{let c=xe().href.replace(t.base,"");return s.has(c.split("#")[0])?new URL(`../${a}/${c}`,t.base):new URL(i)}))})))).subscribe(n=>ct(n,!0)),z([r,o]).subscribe(([n,i])=>{P(".md-header__topic").appendChild(Mn(n,i))}),e.pipe(v(()=>o)).subscribe(n=>{var a;let i=__md_get("__outdated",sessionStorage);if(i===null){i=!0;let s=((a=t.version)==null?void 0:a.default)||"latest";Array.isArray(s)||(s=[s]);e:for(let p of s)for(let c of n.aliases.concat(n.version))if(new RegExp(p,"i").test(c)){i=!1;break e}__md_set("__outdated",i,sessionStorage)}if(i)for(let s of ae("outdated"))s.hidden=!1})}function ns(e,{worker$:t}){let{searchParams:r}=xe();r.has("q")&&(Je("search",!0),e.value=r.get("q"),e.focus(),Ve("search").pipe(Ae(i=>!i)).subscribe(()=>{let i=xe();i.searchParams.delete("q"),history.replaceState({},"",`${i}`)}));let o=et(e),n=S(t.pipe(Ae(It)),d(e,"keyup"),o).pipe(m(()=>e.value),K());return z([n,o]).pipe(m(([i,a])=>({value:i,focus:a})),B(1))}function ai(e,{worker$:t}){let r=new g,o=r.pipe(X(),ne(!0));z([t.pipe(Ae(It)),r],(i,a)=>a).pipe(Z("value")).subscribe(({value:i})=>t.next({type:2,data:i})),r.pipe(Z("focus")).subscribe(({focus:i})=>{i&&Je("search",i)}),d(e.form,"reset").pipe(U(o)).subscribe(()=>e.focus());let n=P("header [for=__search]");return d(n,"click").subscribe(()=>e.focus()),ns(e,{worker$:t}).pipe(y(i=>r.next(i)),L(()=>r.complete()),m(i=>R({ref:e},i)),B(1))}function si(e,{worker$:t,query$:r}){let o=new g,n=tn(e.parentElement).pipe(b(Boolean)),i=e.parentElement,a=P(":scope > :first-child",e),s=P(":scope > :last-child",e);Ve("search").subscribe(l=>s.setAttribute("role",l?"list":"presentation")),o.pipe(ee(r),Ur(t.pipe(Ae(It)))).subscribe(([{items:l},{value:f}])=>{switch(l.length){case 0:a.textContent=f.length?ye("search.result.none"):ye("search.result.placeholder");break;case 1:a.textContent=ye("search.result.one");break;default:let u=sr(l.length);a.textContent=ye("search.result.other",u)}});let p=o.pipe(y(()=>s.innerHTML=""),v(({items:l})=>S(I(...l.slice(0,10)),I(...l.slice(10)).pipe(Ye(4),Vr(n),v(([f])=>f)))),m(Tn),pe());return p.subscribe(l=>s.appendChild(l)),p.pipe(oe(l=>{let f=fe("details",l);return typeof f=="undefined"?M:d(f,"toggle").pipe(U(o),m(()=>f))})).subscribe(l=>{l.open===!1&&l.offsetTop<=i.scrollTop&&i.scrollTo({top:l.offsetTop})}),t.pipe(b(dr),m(({data:l})=>l)).pipe(y(l=>o.next(l)),L(()=>o.complete()),m(l=>R({ref:e},l)))}function is(e,{query$:t}){return t.pipe(m(({value:r})=>{let o=xe();return o.hash="",r=r.replace(/\s+/g,"+").replace(/&/g,"%26").replace(/=/g,"%3D"),o.search=`q=${r}`,{url:o}}))}function ci(e,t){let r=new g,o=r.pipe(X(),ne(!0));return r.subscribe(({url:n})=>{e.setAttribute("data-clipboard-text",e.href),e.href=`${n}`}),d(e,"click").pipe(U(o)).subscribe(n=>n.preventDefault()),is(e,t).pipe(y(n=>r.next(n)),L(()=>r.complete()),m(n=>R({ref:e},n)))}function pi(e,{worker$:t,keyboard$:r}){let o=new g,n=Se("search-query"),i=S(d(n,"keydown"),d(n,"focus")).pipe(be(se),m(()=>n.value),K());return o.pipe(We(i),m(([{suggest:s},p])=>{let c=p.split(/([\s-]+)/);if(s!=null&&s.length&&c[c.length-1]){let l=s[s.length-1];l.startsWith(c[c.length-1])&&(c[c.length-1]=l)}else c.length=0;return c})).subscribe(s=>e.innerHTML=s.join("").replace(/\s/g," ")),r.pipe(b(({mode:s})=>s==="search")).subscribe(s=>{switch(s.type){case"ArrowRight":e.innerText.length&&n.selectionStart===n.value.length&&(n.value=e.innerText);break}}),t.pipe(b(dr),m(({data:s})=>s)).pipe(y(s=>o.next(s)),L(()=>o.complete()),m(()=>({ref:e})))}function li(e,{index$:t,keyboard$:r}){let o=Te();try{let n=ni(o.search,t),i=Se("search-query",e),a=Se("search-result",e);d(e,"click").pipe(b(({target:p})=>p instanceof Element&&!!p.closest("a"))).subscribe(()=>Je("search",!1)),r.pipe(b(({mode:p})=>p==="search")).subscribe(p=>{let c=Re();switch(p.type){case"Enter":if(c===i){let l=new Map;for(let f of $(":first-child [href]",a)){let u=f.firstElementChild;l.set(f,parseFloat(u.getAttribute("data-md-score")))}if(l.size){let[[f]]=[...l].sort(([,u],[,h])=>h-u);f.click()}p.claim()}break;case"Escape":case"Tab":Je("search",!1),i.blur();break;case"ArrowUp":case"ArrowDown":if(typeof c=="undefined")i.focus();else{let l=[i,...$(":not(details) > [href], summary, details[open] [href]",a)],f=Math.max(0,(Math.max(0,l.indexOf(c))+l.length+(p.type==="ArrowUp"?-1:1))%l.length);l[f].focus()}p.claim();break;default:i!==Re()&&i.focus()}}),r.pipe(b(({mode:p})=>p==="global")).subscribe(p=>{switch(p.type){case"f":case"s":case"/":i.focus(),i.select(),p.claim();break}});let s=ai(i,{worker$:n});return S(s,si(a,{worker$:n,query$:s})).pipe(Pe(...ae("search-share",e).map(p=>ci(p,{query$:s})),...ae("search-suggest",e).map(p=>pi(p,{worker$:n,keyboard$:r}))))}catch(n){return e.hidden=!0,Ke}}function mi(e,{index$:t,location$:r}){return z([t,r.pipe(Q(xe()),b(o=>!!o.searchParams.get("h")))]).pipe(m(([o,n])=>oi(o.config)(n.searchParams.get("h"))),m(o=>{var a;let n=new Map,i=document.createNodeIterator(e,NodeFilter.SHOW_TEXT);for(let s=i.nextNode();s;s=i.nextNode())if((a=s.parentElement)!=null&&a.offsetHeight){let p=s.textContent,c=o(p);c.length>p.length&&n.set(s,c)}for(let[s,p]of n){let{childNodes:c}=E("span",null,p);s.replaceWith(...Array.from(c))}return{ref:e,nodes:n}}))}function as(e,{viewport$:t,main$:r}){let o=e.closest(".md-grid"),n=o.offsetTop-o.parentElement.offsetTop;return z([r,t]).pipe(m(([{offset:i,height:a},{offset:{y:s}}])=>(a=a+Math.min(n,Math.max(0,s-i))-n,{height:a,locked:s>=i+n})),K((i,a)=>i.height===a.height&&i.locked===a.locked))}function Jr(e,o){var n=o,{header$:t}=n,r=io(n,["header$"]);let i=P(".md-sidebar__scrollwrap",e),{y:a}=Ue(i);return C(()=>{let s=new g,p=s.pipe(X(),ne(!0)),c=s.pipe(Le(0,me));return c.pipe(ee(t)).subscribe({next([{height:l},{height:f}]){i.style.height=`${l-2*a}px`,e.style.top=`${f}px`},complete(){i.style.height="",e.style.top=""}}),c.pipe(Ae()).subscribe(()=>{for(let l of $(".md-nav__link--active[href]",e)){if(!l.clientHeight)continue;let f=l.closest(".md-sidebar__scrollwrap");if(typeof f!="undefined"){let u=l.offsetTop-f.offsetTop,{height:h}=ce(f);f.scrollTo({top:u-h/2})}}}),ue($("label[tabindex]",e)).pipe(oe(l=>d(l,"click").pipe(be(se),m(()=>l),U(p)))).subscribe(l=>{let f=P(`[id="${l.htmlFor}"]`);P(`[aria-labelledby="${l.id}"]`).setAttribute("aria-expanded",`${f.checked}`)}),as(e,r).pipe(y(l=>s.next(l)),L(()=>s.complete()),m(l=>R({ref:e},l)))})}function fi(e,t){if(typeof t!="undefined"){let r=`https://api.github.com/repos/${e}/${t}`;return Ct(Ne(`${r}/releases/latest`).pipe(ve(()=>M),m(o=>({version:o.tag_name})),Be({})),Ne(r).pipe(ve(()=>M),m(o=>({stars:o.stargazers_count,forks:o.forks_count})),Be({}))).pipe(m(([o,n])=>R(R({},o),n)))}else{let r=`https://api.github.com/users/${e}`;return Ne(r).pipe(m(o=>({repositories:o.public_repos})),Be({}))}}function ui(e,t){let r=`https://${e}/api/v4/projects/${encodeURIComponent(t)}`;return Ne(r).pipe(ve(()=>M),m(({star_count:o,forks_count:n})=>({stars:o,forks:n})),Be({}))}function di(e){let t=e.match(/^.+github\.com\/([^/]+)\/?([^/]+)?/i);if(t){let[,r,o]=t;return fi(r,o)}if(t=e.match(/^.+?([^/]*gitlab[^/]+)\/(.+?)\/?$/i),t){let[,r,o]=t;return ui(r,o)}return M}var ss;function cs(e){return ss||(ss=C(()=>{let t=__md_get("__source",sessionStorage);if(t)return I(t);if(ae("consent").length){let o=__md_get("__consent");if(!(o&&o.github))return M}return di(e.href).pipe(y(o=>__md_set("__source",o,sessionStorage)))}).pipe(ve(()=>M),b(t=>Object.keys(t).length>0),m(t=>({facts:t})),B(1)))}function hi(e){let t=P(":scope > :last-child",e);return C(()=>{let r=new g;return r.subscribe(({facts:o})=>{t.appendChild(Sn(o)),t.classList.add("md-source__repository--active")}),cs(e).pipe(y(o=>r.next(o)),L(()=>r.complete()),m(o=>R({ref:e},o)))})}function ps(e,{viewport$:t,header$:r}){return ge(document.body).pipe(v(()=>mr(e,{header$:r,viewport$:t})),m(({offset:{y:o}})=>({hidden:o>=10})),Z("hidden"))}function bi(e,t){return C(()=>{let r=new g;return r.subscribe({next({hidden:o}){e.hidden=o},complete(){e.hidden=!1}}),(G("navigation.tabs.sticky")?I({hidden:!1}):ps(e,t)).pipe(y(o=>r.next(o)),L(()=>r.complete()),m(o=>R({ref:e},o)))})}function ls(e,{viewport$:t,header$:r}){let o=new Map,n=$(".md-nav__link",e);for(let s of n){let p=decodeURIComponent(s.hash.substring(1)),c=fe(`[id="${p}"]`);typeof c!="undefined"&&o.set(s,c)}let i=r.pipe(Z("height"),m(({height:s})=>{let p=Se("main"),c=P(":scope > :first-child",p);return s+.8*(c.offsetTop-p.offsetTop)}),pe());return ge(document.body).pipe(Z("height"),v(s=>C(()=>{let p=[];return I([...o].reduce((c,[l,f])=>{for(;p.length&&o.get(p[p.length-1]).tagName>=f.tagName;)p.pop();let u=f.offsetTop;for(;!u&&f.parentElement;)f=f.parentElement,u=f.offsetTop;let h=f.offsetParent;for(;h;h=h.offsetParent)u+=h.offsetTop;return c.set([...p=[...p,l]].reverse(),u)},new Map))}).pipe(m(p=>new Map([...p].sort(([,c],[,l])=>c-l))),We(i),v(([p,c])=>t.pipe(jr(([l,f],{offset:{y:u},size:h})=>{let w=u+h.height>=Math.floor(s.height);for(;f.length;){let[,A]=f[0];if(A-c=u&&!w)f=[l.pop(),...f];else break}return[l,f]},[[],[...p]]),K((l,f)=>l[0]===f[0]&&l[1]===f[1])))))).pipe(m(([s,p])=>({prev:s.map(([c])=>c),next:p.map(([c])=>c)})),Q({prev:[],next:[]}),Ye(2,1),m(([s,p])=>s.prev.length{let i=new g,a=i.pipe(X(),ne(!0));if(i.subscribe(({prev:s,next:p})=>{for(let[c]of p)c.classList.remove("md-nav__link--passed"),c.classList.remove("md-nav__link--active");for(let[c,[l]]of s.entries())l.classList.add("md-nav__link--passed"),l.classList.toggle("md-nav__link--active",c===s.length-1)}),G("toc.follow")){let s=S(t.pipe(_e(1),m(()=>{})),t.pipe(_e(250),m(()=>"smooth")));i.pipe(b(({prev:p})=>p.length>0),We(o.pipe(be(se))),ee(s)).subscribe(([[{prev:p}],c])=>{let[l]=p[p.length-1];if(l.offsetHeight){let f=cr(l);if(typeof f!="undefined"){let u=l.offsetTop-f.offsetTop,{height:h}=ce(f);f.scrollTo({top:u-h/2,behavior:c})}}})}return G("navigation.tracking")&&t.pipe(U(a),Z("offset"),_e(250),Ce(1),U(n.pipe(Ce(1))),at({delay:250}),ee(i)).subscribe(([,{prev:s}])=>{let p=xe(),c=s[s.length-1];if(c&&c.length){let[l]=c,{hash:f}=new URL(l.href);p.hash!==f&&(p.hash=f,history.replaceState({},"",`${p}`))}else p.hash="",history.replaceState({},"",`${p}`)}),ls(e,{viewport$:t,header$:r}).pipe(y(s=>i.next(s)),L(()=>i.complete()),m(s=>R({ref:e},s)))})}function ms(e,{viewport$:t,main$:r,target$:o}){let n=t.pipe(m(({offset:{y:a}})=>a),Ye(2,1),m(([a,s])=>a>s&&s>0),K()),i=r.pipe(m(({active:a})=>a));return z([i,n]).pipe(m(([a,s])=>!(a&&s)),K(),U(o.pipe(Ce(1))),ne(!0),at({delay:250}),m(a=>({hidden:a})))}function gi(e,{viewport$:t,header$:r,main$:o,target$:n}){let i=new g,a=i.pipe(X(),ne(!0));return i.subscribe({next({hidden:s}){e.hidden=s,s?(e.setAttribute("tabindex","-1"),e.blur()):e.removeAttribute("tabindex")},complete(){e.style.top="",e.hidden=!0,e.removeAttribute("tabindex")}}),r.pipe(U(a),Z("height")).subscribe(({height:s})=>{e.style.top=`${s+16}px`}),d(e,"click").subscribe(s=>{s.preventDefault(),window.scrollTo({top:0})}),ms(e,{viewport$:t,main$:o,target$:n}).pipe(y(s=>i.next(s)),L(()=>i.complete()),m(s=>R({ref:e},s)))}function xi({document$:e,viewport$:t}){e.pipe(v(()=>$(".md-ellipsis")),oe(r=>Tt(r).pipe(U(e.pipe(Ce(1))),b(o=>o),m(()=>r),we(1))),b(r=>r.offsetWidth{let o=r.innerText,n=r.closest("a")||r;return n.title=o,pt(n,{viewport$:t}).pipe(U(e.pipe(Ce(1))),L(()=>n.removeAttribute("title")))})).subscribe(),e.pipe(v(()=>$(".md-status")),oe(r=>pt(r,{viewport$:t}))).subscribe()}function yi({document$:e,tablet$:t}){e.pipe(v(()=>$(".md-toggle--indeterminate")),y(r=>{r.indeterminate=!0,r.checked=!1}),oe(r=>d(r,"change").pipe(Dr(()=>r.classList.contains("md-toggle--indeterminate")),m(()=>r))),ee(t)).subscribe(([r,o])=>{r.classList.remove("md-toggle--indeterminate"),o&&(r.checked=!1)})}function fs(){return/(iPad|iPhone|iPod)/.test(navigator.userAgent)}function Ei({document$:e}){e.pipe(v(()=>$("[data-md-scrollfix]")),y(t=>t.removeAttribute("data-md-scrollfix")),b(fs),oe(t=>d(t,"touchstart").pipe(m(()=>t)))).subscribe(t=>{let r=t.scrollTop;r===0?t.scrollTop=1:r+t.offsetHeight===t.scrollHeight&&(t.scrollTop=r-1)})}function wi({viewport$:e,tablet$:t}){z([Ve("search"),t]).pipe(m(([r,o])=>r&&!o),v(r=>I(r).pipe(Ge(r?400:100))),ee(e)).subscribe(([r,{offset:{y:o}}])=>{if(r)document.body.setAttribute("data-md-scrolllock",""),document.body.style.top=`-${o}px`;else{let n=-1*parseInt(document.body.style.top,10);document.body.removeAttribute("data-md-scrolllock"),document.body.style.top="",n&&window.scrollTo(0,n)}})}Object.entries||(Object.entries=function(e){let t=[];for(let r of Object.keys(e))t.push([r,e[r]]);return t});Object.values||(Object.values=function(e){let t=[];for(let r of Object.keys(e))t.push(e[r]);return t});typeof Element!="undefined"&&(Element.prototype.scrollTo||(Element.prototype.scrollTo=function(e,t){typeof e=="object"?(this.scrollLeft=e.left,this.scrollTop=e.top):(this.scrollLeft=e,this.scrollTop=t)}),Element.prototype.replaceWith||(Element.prototype.replaceWith=function(...e){let t=this.parentNode;if(t){e.length===0&&t.removeChild(this);for(let r=e.length-1;r>=0;r--){let o=e[r];typeof o=="string"?o=document.createTextNode(o):o.parentNode&&o.parentNode.removeChild(o),r?t.insertBefore(this.previousSibling,o):t.replaceChild(o,this)}}}));function us(){return location.protocol==="file:"?Et(`${new URL("search/search_index.js",Xr.base)}`).pipe(m(()=>__index),B(1)):Ne(new URL("search/search_index.json",Xr.base))}document.documentElement.classList.remove("no-js");document.documentElement.classList.add("js");var rt=Yo(),jt=nn(),Ot=cn(jt),Zr=on(),Oe=bn(),hr=$t("(min-width: 960px)"),Si=$t("(min-width: 1220px)"),Oi=pn(),Xr=Te(),Mi=document.forms.namedItem("search")?us():Ke,eo=new g;Bn({alert$:eo});var to=new g;G("navigation.instant")&&Zn({location$:jt,viewport$:Oe,progress$:to}).subscribe(rt);var Ti;((Ti=Xr.version)==null?void 0:Ti.provider)==="mike"&&ii({document$:rt});S(jt,Ot).pipe(Ge(125)).subscribe(()=>{Je("drawer",!1),Je("search",!1)});Zr.pipe(b(({mode:e})=>e==="global")).subscribe(e=>{switch(e.type){case"p":case",":let t=fe("link[rel=prev]");typeof t!="undefined"&&ct(t);break;case"n":case".":let r=fe("link[rel=next]");typeof r!="undefined"&&ct(r);break;case"Enter":let o=Re();o instanceof HTMLLabelElement&&o.click()}});xi({viewport$:Oe,document$:rt});yi({document$:rt,tablet$:hr});Ei({document$:rt});wi({viewport$:Oe,tablet$:hr});var tt=Nn(Se("header"),{viewport$:Oe}),Ft=rt.pipe(m(()=>Se("main")),v(e=>Qn(e,{viewport$:Oe,header$:tt})),B(1)),ds=S(...ae("consent").map(e=>xn(e,{target$:Ot})),...ae("dialog").map(e=>Dn(e,{alert$:eo})),...ae("header").map(e=>zn(e,{viewport$:Oe,header$:tt,main$:Ft})),...ae("palette").map(e=>Kn(e)),...ae("progress").map(e=>Yn(e,{progress$:to})),...ae("search").map(e=>li(e,{index$:Mi,keyboard$:Zr})),...ae("source").map(e=>hi(e))),hs=C(()=>S(...ae("announce").map(e=>gn(e)),...ae("content").map(e=>Un(e,{viewport$:Oe,target$:Ot,print$:Oi})),...ae("content").map(e=>G("search.highlight")?mi(e,{index$:Mi,location$:jt}):M),...ae("header-title").map(e=>qn(e,{viewport$:Oe,header$:tt})),...ae("sidebar").map(e=>e.getAttribute("data-md-type")==="navigation"?Nr(Si,()=>Jr(e,{viewport$:Oe,header$:tt,main$:Ft})):Nr(hr,()=>Jr(e,{viewport$:Oe,header$:tt,main$:Ft}))),...ae("tabs").map(e=>bi(e,{viewport$:Oe,header$:tt})),...ae("toc").map(e=>vi(e,{viewport$:Oe,header$:tt,main$:Ft,target$:Ot})),...ae("top").map(e=>gi(e,{viewport$:Oe,header$:tt,main$:Ft,target$:Ot})))),Li=rt.pipe(v(()=>hs),Pe(ds),B(1));Li.subscribe();window.document$=rt;window.location$=jt;window.target$=Ot;window.keyboard$=Zr;window.viewport$=Oe;window.tablet$=hr;window.screen$=Si;window.print$=Oi;window.alert$=eo;window.progress$=to;window.component$=Li;})(); -//# sourceMappingURL=bundle.3220b9d7.min.js.map - diff --git a/update_gtfs_data_model/assets/javascripts/bundle.3220b9d7.min.js.map b/update_gtfs_data_model/assets/javascripts/bundle.3220b9d7.min.js.map deleted file mode 100644 index df365939..00000000 --- a/update_gtfs_data_model/assets/javascripts/bundle.3220b9d7.min.js.map +++ /dev/null @@ -1,7 +0,0 @@ -{ - "version": 3, - "sources": ["node_modules/focus-visible/dist/focus-visible.js", "node_modules/clipboard/dist/clipboard.js", "node_modules/escape-html/index.js", "src/templates/assets/javascripts/bundle.ts", "node_modules/rxjs/node_modules/tslib/tslib.es6.js", "node_modules/rxjs/src/internal/util/isFunction.ts", "node_modules/rxjs/src/internal/util/createErrorClass.ts", "node_modules/rxjs/src/internal/util/UnsubscriptionError.ts", "node_modules/rxjs/src/internal/util/arrRemove.ts", "node_modules/rxjs/src/internal/Subscription.ts", "node_modules/rxjs/src/internal/config.ts", "node_modules/rxjs/src/internal/scheduler/timeoutProvider.ts", "node_modules/rxjs/src/internal/util/reportUnhandledError.ts", "node_modules/rxjs/src/internal/util/noop.ts", "node_modules/rxjs/src/internal/NotificationFactories.ts", "node_modules/rxjs/src/internal/util/errorContext.ts", "node_modules/rxjs/src/internal/Subscriber.ts", "node_modules/rxjs/src/internal/symbol/observable.ts", "node_modules/rxjs/src/internal/util/identity.ts", "node_modules/rxjs/src/internal/util/pipe.ts", "node_modules/rxjs/src/internal/Observable.ts", "node_modules/rxjs/src/internal/util/lift.ts", "node_modules/rxjs/src/internal/operators/OperatorSubscriber.ts", "node_modules/rxjs/src/internal/scheduler/animationFrameProvider.ts", "node_modules/rxjs/src/internal/util/ObjectUnsubscribedError.ts", "node_modules/rxjs/src/internal/Subject.ts", "node_modules/rxjs/src/internal/BehaviorSubject.ts", "node_modules/rxjs/src/internal/scheduler/dateTimestampProvider.ts", "node_modules/rxjs/src/internal/ReplaySubject.ts", "node_modules/rxjs/src/internal/scheduler/Action.ts", "node_modules/rxjs/src/internal/scheduler/intervalProvider.ts", "node_modules/rxjs/src/internal/scheduler/AsyncAction.ts", "node_modules/rxjs/src/internal/Scheduler.ts", "node_modules/rxjs/src/internal/scheduler/AsyncScheduler.ts", "node_modules/rxjs/src/internal/scheduler/async.ts", "node_modules/rxjs/src/internal/scheduler/QueueAction.ts", "node_modules/rxjs/src/internal/scheduler/QueueScheduler.ts", "node_modules/rxjs/src/internal/scheduler/queue.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameAction.ts", "node_modules/rxjs/src/internal/scheduler/AnimationFrameScheduler.ts", "node_modules/rxjs/src/internal/scheduler/animationFrame.ts", "node_modules/rxjs/src/internal/observable/empty.ts", "node_modules/rxjs/src/internal/util/isScheduler.ts", "node_modules/rxjs/src/internal/util/args.ts", "node_modules/rxjs/src/internal/util/isArrayLike.ts", "node_modules/rxjs/src/internal/util/isPromise.ts", "node_modules/rxjs/src/internal/util/isInteropObservable.ts", "node_modules/rxjs/src/internal/util/isAsyncIterable.ts", "node_modules/rxjs/src/internal/util/throwUnobservableError.ts", "node_modules/rxjs/src/internal/symbol/iterator.ts", "node_modules/rxjs/src/internal/util/isIterable.ts", "node_modules/rxjs/src/internal/util/isReadableStreamLike.ts", "node_modules/rxjs/src/internal/observable/innerFrom.ts", "node_modules/rxjs/src/internal/util/executeSchedule.ts", "node_modules/rxjs/src/internal/operators/observeOn.ts", "node_modules/rxjs/src/internal/operators/subscribeOn.ts", "node_modules/rxjs/src/internal/scheduled/scheduleObservable.ts", "node_modules/rxjs/src/internal/scheduled/schedulePromise.ts", "node_modules/rxjs/src/internal/scheduled/scheduleArray.ts", "node_modules/rxjs/src/internal/scheduled/scheduleIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleAsyncIterable.ts", "node_modules/rxjs/src/internal/scheduled/scheduleReadableStreamLike.ts", "node_modules/rxjs/src/internal/scheduled/scheduled.ts", "node_modules/rxjs/src/internal/observable/from.ts", "node_modules/rxjs/src/internal/observable/of.ts", "node_modules/rxjs/src/internal/observable/throwError.ts", "node_modules/rxjs/src/internal/util/EmptyError.ts", "node_modules/rxjs/src/internal/util/isDate.ts", "node_modules/rxjs/src/internal/operators/map.ts", "node_modules/rxjs/src/internal/util/mapOneOrManyArgs.ts", "node_modules/rxjs/src/internal/util/argsArgArrayOrObject.ts", "node_modules/rxjs/src/internal/util/createObject.ts", "node_modules/rxjs/src/internal/observable/combineLatest.ts", "node_modules/rxjs/src/internal/operators/mergeInternals.ts", "node_modules/rxjs/src/internal/operators/mergeMap.ts", "node_modules/rxjs/src/internal/operators/mergeAll.ts", "node_modules/rxjs/src/internal/operators/concatAll.ts", "node_modules/rxjs/src/internal/observable/concat.ts", "node_modules/rxjs/src/internal/observable/defer.ts", "node_modules/rxjs/src/internal/observable/fromEvent.ts", "node_modules/rxjs/src/internal/observable/fromEventPattern.ts", "node_modules/rxjs/src/internal/observable/timer.ts", "node_modules/rxjs/src/internal/observable/merge.ts", "node_modules/rxjs/src/internal/observable/never.ts", "node_modules/rxjs/src/internal/util/argsOrArgArray.ts", "node_modules/rxjs/src/internal/operators/filter.ts", "node_modules/rxjs/src/internal/observable/zip.ts", "node_modules/rxjs/src/internal/operators/audit.ts", "node_modules/rxjs/src/internal/operators/auditTime.ts", "node_modules/rxjs/src/internal/operators/bufferCount.ts", "node_modules/rxjs/src/internal/operators/catchError.ts", "node_modules/rxjs/src/internal/operators/scanInternals.ts", "node_modules/rxjs/src/internal/operators/combineLatest.ts", "node_modules/rxjs/src/internal/operators/combineLatestWith.ts", "node_modules/rxjs/src/internal/operators/debounce.ts", "node_modules/rxjs/src/internal/operators/debounceTime.ts", "node_modules/rxjs/src/internal/operators/defaultIfEmpty.ts", "node_modules/rxjs/src/internal/operators/take.ts", "node_modules/rxjs/src/internal/operators/ignoreElements.ts", "node_modules/rxjs/src/internal/operators/mapTo.ts", "node_modules/rxjs/src/internal/operators/delayWhen.ts", "node_modules/rxjs/src/internal/operators/delay.ts", "node_modules/rxjs/src/internal/operators/distinctUntilChanged.ts", "node_modules/rxjs/src/internal/operators/distinctUntilKeyChanged.ts", "node_modules/rxjs/src/internal/operators/throwIfEmpty.ts", "node_modules/rxjs/src/internal/operators/endWith.ts", "node_modules/rxjs/src/internal/operators/finalize.ts", "node_modules/rxjs/src/internal/operators/first.ts", "node_modules/rxjs/src/internal/operators/takeLast.ts", "node_modules/rxjs/src/internal/operators/merge.ts", "node_modules/rxjs/src/internal/operators/mergeWith.ts", "node_modules/rxjs/src/internal/operators/repeat.ts", "node_modules/rxjs/src/internal/operators/scan.ts", "node_modules/rxjs/src/internal/operators/share.ts", "node_modules/rxjs/src/internal/operators/shareReplay.ts", "node_modules/rxjs/src/internal/operators/skip.ts", "node_modules/rxjs/src/internal/operators/skipUntil.ts", "node_modules/rxjs/src/internal/operators/startWith.ts", "node_modules/rxjs/src/internal/operators/switchMap.ts", "node_modules/rxjs/src/internal/operators/takeUntil.ts", "node_modules/rxjs/src/internal/operators/takeWhile.ts", "node_modules/rxjs/src/internal/operators/tap.ts", "node_modules/rxjs/src/internal/operators/throttle.ts", "node_modules/rxjs/src/internal/operators/throttleTime.ts", "node_modules/rxjs/src/internal/operators/withLatestFrom.ts", "node_modules/rxjs/src/internal/operators/zip.ts", "node_modules/rxjs/src/internal/operators/zipWith.ts", "src/templates/assets/javascripts/browser/document/index.ts", "src/templates/assets/javascripts/browser/element/_/index.ts", "src/templates/assets/javascripts/browser/element/focus/index.ts", "src/templates/assets/javascripts/browser/element/hover/index.ts", "src/templates/assets/javascripts/utilities/h/index.ts", "src/templates/assets/javascripts/utilities/round/index.ts", "src/templates/assets/javascripts/browser/script/index.ts", "src/templates/assets/javascripts/browser/element/size/_/index.ts", "src/templates/assets/javascripts/browser/element/size/content/index.ts", "src/templates/assets/javascripts/browser/element/offset/_/index.ts", "src/templates/assets/javascripts/browser/element/offset/content/index.ts", "src/templates/assets/javascripts/browser/element/visibility/index.ts", "src/templates/assets/javascripts/browser/toggle/index.ts", "src/templates/assets/javascripts/browser/keyboard/index.ts", "src/templates/assets/javascripts/browser/location/_/index.ts", "src/templates/assets/javascripts/browser/location/hash/index.ts", "src/templates/assets/javascripts/browser/media/index.ts", "src/templates/assets/javascripts/browser/request/index.ts", "src/templates/assets/javascripts/browser/viewport/offset/index.ts", "src/templates/assets/javascripts/browser/viewport/size/index.ts", "src/templates/assets/javascripts/browser/viewport/_/index.ts", "src/templates/assets/javascripts/browser/viewport/at/index.ts", "src/templates/assets/javascripts/browser/worker/index.ts", "src/templates/assets/javascripts/_/index.ts", "src/templates/assets/javascripts/components/_/index.ts", "src/templates/assets/javascripts/components/announce/index.ts", "src/templates/assets/javascripts/components/consent/index.ts", "src/templates/assets/javascripts/templates/tooltip/index.tsx", "src/templates/assets/javascripts/templates/annotation/index.tsx", "src/templates/assets/javascripts/templates/clipboard/index.tsx", "src/templates/assets/javascripts/templates/search/index.tsx", "src/templates/assets/javascripts/templates/source/index.tsx", "src/templates/assets/javascripts/templates/tabbed/index.tsx", "src/templates/assets/javascripts/templates/table/index.tsx", "src/templates/assets/javascripts/templates/version/index.tsx", "src/templates/assets/javascripts/components/tooltip2/index.ts", "src/templates/assets/javascripts/components/content/annotation/_/index.ts", "src/templates/assets/javascripts/components/content/annotation/list/index.ts", "src/templates/assets/javascripts/components/content/annotation/block/index.ts", "src/templates/assets/javascripts/components/content/code/_/index.ts", "src/templates/assets/javascripts/components/content/details/index.ts", "src/templates/assets/javascripts/components/content/mermaid/index.css", "src/templates/assets/javascripts/components/content/mermaid/index.ts", "src/templates/assets/javascripts/components/content/table/index.ts", "src/templates/assets/javascripts/components/content/tabs/index.ts", "src/templates/assets/javascripts/components/content/_/index.ts", "src/templates/assets/javascripts/components/dialog/index.ts", "src/templates/assets/javascripts/components/tooltip/index.ts", "src/templates/assets/javascripts/components/header/_/index.ts", "src/templates/assets/javascripts/components/header/title/index.ts", "src/templates/assets/javascripts/components/main/index.ts", "src/templates/assets/javascripts/components/palette/index.ts", "src/templates/assets/javascripts/components/progress/index.ts", "src/templates/assets/javascripts/integrations/clipboard/index.ts", "src/templates/assets/javascripts/integrations/sitemap/index.ts", "src/templates/assets/javascripts/integrations/instant/index.ts", "src/templates/assets/javascripts/integrations/search/highlighter/index.ts", "src/templates/assets/javascripts/integrations/search/worker/message/index.ts", "src/templates/assets/javascripts/integrations/search/worker/_/index.ts", "src/templates/assets/javascripts/integrations/version/index.ts", "src/templates/assets/javascripts/components/search/query/index.ts", "src/templates/assets/javascripts/components/search/result/index.ts", "src/templates/assets/javascripts/components/search/share/index.ts", "src/templates/assets/javascripts/components/search/suggest/index.ts", "src/templates/assets/javascripts/components/search/_/index.ts", "src/templates/assets/javascripts/components/search/highlight/index.ts", "src/templates/assets/javascripts/components/sidebar/index.ts", "src/templates/assets/javascripts/components/source/facts/github/index.ts", "src/templates/assets/javascripts/components/source/facts/gitlab/index.ts", "src/templates/assets/javascripts/components/source/facts/_/index.ts", "src/templates/assets/javascripts/components/source/_/index.ts", "src/templates/assets/javascripts/components/tabs/index.ts", "src/templates/assets/javascripts/components/toc/index.ts", "src/templates/assets/javascripts/components/top/index.ts", "src/templates/assets/javascripts/patches/ellipsis/index.ts", "src/templates/assets/javascripts/patches/indeterminate/index.ts", "src/templates/assets/javascripts/patches/scrollfix/index.ts", "src/templates/assets/javascripts/patches/scrolllock/index.ts", "src/templates/assets/javascripts/polyfills/index.ts"], - "sourcesContent": ["(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory() :\n typeof define === 'function' && define.amd ? define(factory) :\n (factory());\n}(this, (function () { 'use strict';\n\n /**\n * Applies the :focus-visible polyfill at the given scope.\n * A scope in this case is either the top-level Document or a Shadow Root.\n *\n * @param {(Document|ShadowRoot)} scope\n * @see https://github.com/WICG/focus-visible\n */\n function applyFocusVisiblePolyfill(scope) {\n var hadKeyboardEvent = true;\n var hadFocusVisibleRecently = false;\n var hadFocusVisibleRecentlyTimeout = null;\n\n var inputTypesAllowlist = {\n text: true,\n search: true,\n url: true,\n tel: true,\n email: true,\n password: true,\n number: true,\n date: true,\n month: true,\n week: true,\n time: true,\n datetime: true,\n 'datetime-local': true\n };\n\n /**\n * Helper function for legacy browsers and iframes which sometimes focus\n * elements like document, body, and non-interactive SVG.\n * @param {Element} el\n */\n function isValidFocusTarget(el) {\n if (\n el &&\n el !== document &&\n el.nodeName !== 'HTML' &&\n el.nodeName !== 'BODY' &&\n 'classList' in el &&\n 'contains' in el.classList\n ) {\n return true;\n }\n return false;\n }\n\n /**\n * Computes whether the given element should automatically trigger the\n * `focus-visible` class being added, i.e. whether it should always match\n * `:focus-visible` when focused.\n * @param {Element} el\n * @return {boolean}\n */\n function focusTriggersKeyboardModality(el) {\n var type = el.type;\n var tagName = el.tagName;\n\n if (tagName === 'INPUT' && inputTypesAllowlist[type] && !el.readOnly) {\n return true;\n }\n\n if (tagName === 'TEXTAREA' && !el.readOnly) {\n return true;\n }\n\n if (el.isContentEditable) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Add the `focus-visible` class to the given element if it was not added by\n * the author.\n * @param {Element} el\n */\n function addFocusVisibleClass(el) {\n if (el.classList.contains('focus-visible')) {\n return;\n }\n el.classList.add('focus-visible');\n el.setAttribute('data-focus-visible-added', '');\n }\n\n /**\n * Remove the `focus-visible` class from the given element if it was not\n * originally added by the author.\n * @param {Element} el\n */\n function removeFocusVisibleClass(el) {\n if (!el.hasAttribute('data-focus-visible-added')) {\n return;\n }\n el.classList.remove('focus-visible');\n el.removeAttribute('data-focus-visible-added');\n }\n\n /**\n * If the most recent user interaction was via the keyboard;\n * and the key press did not include a meta, alt/option, or control key;\n * then the modality is keyboard. Otherwise, the modality is not keyboard.\n * Apply `focus-visible` to any current active element and keep track\n * of our keyboard modality state with `hadKeyboardEvent`.\n * @param {KeyboardEvent} e\n */\n function onKeyDown(e) {\n if (e.metaKey || e.altKey || e.ctrlKey) {\n return;\n }\n\n if (isValidFocusTarget(scope.activeElement)) {\n addFocusVisibleClass(scope.activeElement);\n }\n\n hadKeyboardEvent = true;\n }\n\n /**\n * If at any point a user clicks with a pointing device, ensure that we change\n * the modality away from keyboard.\n * This avoids the situation where a user presses a key on an already focused\n * element, and then clicks on a different element, focusing it with a\n * pointing device, while we still think we're in keyboard modality.\n * @param {Event} e\n */\n function onPointerDown(e) {\n hadKeyboardEvent = false;\n }\n\n /**\n * On `focus`, add the `focus-visible` class to the target if:\n * - the target received focus as a result of keyboard navigation, or\n * - the event target is an element that will likely require interaction\n * via the keyboard (e.g. a text box)\n * @param {Event} e\n */\n function onFocus(e) {\n // Prevent IE from focusing the document or HTML element.\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (hadKeyboardEvent || focusTriggersKeyboardModality(e.target)) {\n addFocusVisibleClass(e.target);\n }\n }\n\n /**\n * On `blur`, remove the `focus-visible` class from the target.\n * @param {Event} e\n */\n function onBlur(e) {\n if (!isValidFocusTarget(e.target)) {\n return;\n }\n\n if (\n e.target.classList.contains('focus-visible') ||\n e.target.hasAttribute('data-focus-visible-added')\n ) {\n // To detect a tab/window switch, we look for a blur event followed\n // rapidly by a visibility change.\n // If we don't see a visibility change within 100ms, it's probably a\n // regular focus change.\n hadFocusVisibleRecently = true;\n window.clearTimeout(hadFocusVisibleRecentlyTimeout);\n hadFocusVisibleRecentlyTimeout = window.setTimeout(function() {\n hadFocusVisibleRecently = false;\n }, 100);\n removeFocusVisibleClass(e.target);\n }\n }\n\n /**\n * If the user changes tabs, keep track of whether or not the previously\n * focused element had .focus-visible.\n * @param {Event} e\n */\n function onVisibilityChange(e) {\n if (document.visibilityState === 'hidden') {\n // If the tab becomes active again, the browser will handle calling focus\n // on the element (Safari actually calls it twice).\n // If this tab change caused a blur on an element with focus-visible,\n // re-apply the class when the user switches back to the tab.\n if (hadFocusVisibleRecently) {\n hadKeyboardEvent = true;\n }\n addInitialPointerMoveListeners();\n }\n }\n\n /**\n * Add a group of listeners to detect usage of any pointing devices.\n * These listeners will be added when the polyfill first loads, and anytime\n * the window is blurred, so that they are active when the window regains\n * focus.\n */\n function addInitialPointerMoveListeners() {\n document.addEventListener('mousemove', onInitialPointerMove);\n document.addEventListener('mousedown', onInitialPointerMove);\n document.addEventListener('mouseup', onInitialPointerMove);\n document.addEventListener('pointermove', onInitialPointerMove);\n document.addEventListener('pointerdown', onInitialPointerMove);\n document.addEventListener('pointerup', onInitialPointerMove);\n document.addEventListener('touchmove', onInitialPointerMove);\n document.addEventListener('touchstart', onInitialPointerMove);\n document.addEventListener('touchend', onInitialPointerMove);\n }\n\n function removeInitialPointerMoveListeners() {\n document.removeEventListener('mousemove', onInitialPointerMove);\n document.removeEventListener('mousedown', onInitialPointerMove);\n document.removeEventListener('mouseup', onInitialPointerMove);\n document.removeEventListener('pointermove', onInitialPointerMove);\n document.removeEventListener('pointerdown', onInitialPointerMove);\n document.removeEventListener('pointerup', onInitialPointerMove);\n document.removeEventListener('touchmove', onInitialPointerMove);\n document.removeEventListener('touchstart', onInitialPointerMove);\n document.removeEventListener('touchend', onInitialPointerMove);\n }\n\n /**\n * When the polfyill first loads, assume the user is in keyboard modality.\n * If any event is received from a pointing device (e.g. mouse, pointer,\n * touch), turn off keyboard modality.\n * This accounts for situations where focus enters the page from the URL bar.\n * @param {Event} e\n */\n function onInitialPointerMove(e) {\n // Work around a Safari quirk that fires a mousemove on whenever the\n // window blurs, even if you're tabbing out of the page. \u00AF\\_(\u30C4)_/\u00AF\n if (e.target.nodeName && e.target.nodeName.toLowerCase() === 'html') {\n return;\n }\n\n hadKeyboardEvent = false;\n removeInitialPointerMoveListeners();\n }\n\n // For some kinds of state, we are interested in changes at the global scope\n // only. For example, global pointer input, global key presses and global\n // visibility change should affect the state at every scope:\n document.addEventListener('keydown', onKeyDown, true);\n document.addEventListener('mousedown', onPointerDown, true);\n document.addEventListener('pointerdown', onPointerDown, true);\n document.addEventListener('touchstart', onPointerDown, true);\n document.addEventListener('visibilitychange', onVisibilityChange, true);\n\n addInitialPointerMoveListeners();\n\n // For focus and blur, we specifically care about state changes in the local\n // scope. This is because focus / blur events that originate from within a\n // shadow root are not re-dispatched from the host element if it was already\n // the active element in its own scope:\n scope.addEventListener('focus', onFocus, true);\n scope.addEventListener('blur', onBlur, true);\n\n // We detect that a node is a ShadowRoot by ensuring that it is a\n // DocumentFragment and also has a host property. This check covers native\n // implementation and polyfill implementation transparently. If we only cared\n // about the native implementation, we could just check if the scope was\n // an instance of a ShadowRoot.\n if (scope.nodeType === Node.DOCUMENT_FRAGMENT_NODE && scope.host) {\n // Since a ShadowRoot is a special kind of DocumentFragment, it does not\n // have a root element to add a class to. So, we add this attribute to the\n // host element instead:\n scope.host.setAttribute('data-js-focus-visible', '');\n } else if (scope.nodeType === Node.DOCUMENT_NODE) {\n document.documentElement.classList.add('js-focus-visible');\n document.documentElement.setAttribute('data-js-focus-visible', '');\n }\n }\n\n // It is important to wrap all references to global window and document in\n // these checks to support server-side rendering use cases\n // @see https://github.com/WICG/focus-visible/issues/199\n if (typeof window !== 'undefined' && typeof document !== 'undefined') {\n // Make the polyfill helper globally available. This can be used as a signal\n // to interested libraries that wish to coordinate with the polyfill for e.g.,\n // applying the polyfill to a shadow root:\n window.applyFocusVisiblePolyfill = applyFocusVisiblePolyfill;\n\n // Notify interested libraries of the polyfill's presence, in case the\n // polyfill was loaded lazily:\n var event;\n\n try {\n event = new CustomEvent('focus-visible-polyfill-ready');\n } catch (error) {\n // IE11 does not support using CustomEvent as a constructor directly:\n event = document.createEvent('CustomEvent');\n event.initCustomEvent('focus-visible-polyfill-ready', false, false, {});\n }\n\n window.dispatchEvent(event);\n }\n\n if (typeof document !== 'undefined') {\n // Apply the polyfill to the global document, so that no JavaScript\n // coordination is required to use the polyfill in the top-level document:\n applyFocusVisiblePolyfill(document);\n }\n\n})));\n", "/*!\n * clipboard.js v2.0.11\n * https://clipboardjs.com/\n *\n * Licensed MIT \u00A9 Zeno Rocha\n */\n(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"ClipboardJS\"] = factory();\n\telse\n\t\troot[\"ClipboardJS\"] = factory();\n})(this, function() {\nreturn /******/ (function() { // webpackBootstrap\n/******/ \tvar __webpack_modules__ = ({\n\n/***/ 686:\n/***/ (function(__unused_webpack_module, __webpack_exports__, __webpack_require__) {\n\n\"use strict\";\n\n// EXPORTS\n__webpack_require__.d(__webpack_exports__, {\n \"default\": function() { return /* binding */ clipboard; }\n});\n\n// EXTERNAL MODULE: ./node_modules/tiny-emitter/index.js\nvar tiny_emitter = __webpack_require__(279);\nvar tiny_emitter_default = /*#__PURE__*/__webpack_require__.n(tiny_emitter);\n// EXTERNAL MODULE: ./node_modules/good-listener/src/listen.js\nvar listen = __webpack_require__(370);\nvar listen_default = /*#__PURE__*/__webpack_require__.n(listen);\n// EXTERNAL MODULE: ./node_modules/select/src/select.js\nvar src_select = __webpack_require__(817);\nvar select_default = /*#__PURE__*/__webpack_require__.n(src_select);\n;// CONCATENATED MODULE: ./src/common/command.js\n/**\n * Executes a given operation type.\n * @param {String} type\n * @return {Boolean}\n */\nfunction command(type) {\n try {\n return document.execCommand(type);\n } catch (err) {\n return false;\n }\n}\n;// CONCATENATED MODULE: ./src/actions/cut.js\n\n\n/**\n * Cut action wrapper.\n * @param {String|HTMLElement} target\n * @return {String}\n */\n\nvar ClipboardActionCut = function ClipboardActionCut(target) {\n var selectedText = select_default()(target);\n command('cut');\n return selectedText;\n};\n\n/* harmony default export */ var actions_cut = (ClipboardActionCut);\n;// CONCATENATED MODULE: ./src/common/create-fake-element.js\n/**\n * Creates a fake textarea element with a value.\n * @param {String} value\n * @return {HTMLElement}\n */\nfunction createFakeElement(value) {\n var isRTL = document.documentElement.getAttribute('dir') === 'rtl';\n var fakeElement = document.createElement('textarea'); // Prevent zooming on iOS\n\n fakeElement.style.fontSize = '12pt'; // Reset box model\n\n fakeElement.style.border = '0';\n fakeElement.style.padding = '0';\n fakeElement.style.margin = '0'; // Move element out of screen horizontally\n\n fakeElement.style.position = 'absolute';\n fakeElement.style[isRTL ? 'right' : 'left'] = '-9999px'; // Move element to the same position vertically\n\n var yPosition = window.pageYOffset || document.documentElement.scrollTop;\n fakeElement.style.top = \"\".concat(yPosition, \"px\");\n fakeElement.setAttribute('readonly', '');\n fakeElement.value = value;\n return fakeElement;\n}\n;// CONCATENATED MODULE: ./src/actions/copy.js\n\n\n\n/**\n * Create fake copy action wrapper using a fake element.\n * @param {String} target\n * @param {Object} options\n * @return {String}\n */\n\nvar fakeCopyAction = function fakeCopyAction(value, options) {\n var fakeElement = createFakeElement(value);\n options.container.appendChild(fakeElement);\n var selectedText = select_default()(fakeElement);\n command('copy');\n fakeElement.remove();\n return selectedText;\n};\n/**\n * Copy action wrapper.\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @return {String}\n */\n\n\nvar ClipboardActionCopy = function ClipboardActionCopy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n var selectedText = '';\n\n if (typeof target === 'string') {\n selectedText = fakeCopyAction(target, options);\n } else if (target instanceof HTMLInputElement && !['text', 'search', 'url', 'tel', 'password'].includes(target === null || target === void 0 ? void 0 : target.type)) {\n // If input type doesn't support `setSelectionRange`. Simulate it. https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/setSelectionRange\n selectedText = fakeCopyAction(target.value, options);\n } else {\n selectedText = select_default()(target);\n command('copy');\n }\n\n return selectedText;\n};\n\n/* harmony default export */ var actions_copy = (ClipboardActionCopy);\n;// CONCATENATED MODULE: ./src/actions/default.js\nfunction _typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return _typeof(obj); }\n\n\n\n/**\n * Inner function which performs selection from either `text` or `target`\n * properties and then executes copy or cut operations.\n * @param {Object} options\n */\n\nvar ClipboardActionDefault = function ClipboardActionDefault() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n // Defines base properties passed from constructor.\n var _options$action = options.action,\n action = _options$action === void 0 ? 'copy' : _options$action,\n container = options.container,\n target = options.target,\n text = options.text; // Sets the `action` to be performed which can be either 'copy' or 'cut'.\n\n if (action !== 'copy' && action !== 'cut') {\n throw new Error('Invalid \"action\" value, use either \"copy\" or \"cut\"');\n } // Sets the `target` property using an element that will be have its content copied.\n\n\n if (target !== undefined) {\n if (target && _typeof(target) === 'object' && target.nodeType === 1) {\n if (action === 'copy' && target.hasAttribute('disabled')) {\n throw new Error('Invalid \"target\" attribute. Please use \"readonly\" instead of \"disabled\" attribute');\n }\n\n if (action === 'cut' && (target.hasAttribute('readonly') || target.hasAttribute('disabled'))) {\n throw new Error('Invalid \"target\" attribute. You can\\'t cut text from elements with \"readonly\" or \"disabled\" attributes');\n }\n } else {\n throw new Error('Invalid \"target\" value, use a valid Element');\n }\n } // Define selection strategy based on `text` property.\n\n\n if (text) {\n return actions_copy(text, {\n container: container\n });\n } // Defines which selection strategy based on `target` property.\n\n\n if (target) {\n return action === 'cut' ? actions_cut(target) : actions_copy(target, {\n container: container\n });\n }\n};\n\n/* harmony default export */ var actions_default = (ClipboardActionDefault);\n;// CONCATENATED MODULE: ./src/clipboard.js\nfunction clipboard_typeof(obj) { \"@babel/helpers - typeof\"; if (typeof Symbol === \"function\" && typeof Symbol.iterator === \"symbol\") { clipboard_typeof = function _typeof(obj) { return typeof obj; }; } else { clipboard_typeof = function _typeof(obj) { return obj && typeof Symbol === \"function\" && obj.constructor === Symbol && obj !== Symbol.prototype ? \"symbol\" : typeof obj; }; } return clipboard_typeof(obj); }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }\n\nfunction _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function\"); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) _setPrototypeOf(subClass, superClass); }\n\nfunction _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }\n\nfunction _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }\n\nfunction _possibleConstructorReturn(self, call) { if (call && (clipboard_typeof(call) === \"object\" || typeof call === \"function\")) { return call; } return _assertThisInitialized(self); }\n\nfunction _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return self; }\n\nfunction _isNativeReflectConstruct() { if (typeof Reflect === \"undefined\" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === \"function\") return true; try { Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); return true; } catch (e) { return false; } }\n\nfunction _getPrototypeOf(o) { _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || Object.getPrototypeOf(o); }; return _getPrototypeOf(o); }\n\n\n\n\n\n\n/**\n * Helper function to retrieve attribute value.\n * @param {String} suffix\n * @param {Element} element\n */\n\nfunction getAttributeValue(suffix, element) {\n var attribute = \"data-clipboard-\".concat(suffix);\n\n if (!element.hasAttribute(attribute)) {\n return;\n }\n\n return element.getAttribute(attribute);\n}\n/**\n * Base class which takes one or more elements, adds event listeners to them,\n * and instantiates a new `ClipboardAction` on each click.\n */\n\n\nvar Clipboard = /*#__PURE__*/function (_Emitter) {\n _inherits(Clipboard, _Emitter);\n\n var _super = _createSuper(Clipboard);\n\n /**\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n * @param {Object} options\n */\n function Clipboard(trigger, options) {\n var _this;\n\n _classCallCheck(this, Clipboard);\n\n _this = _super.call(this);\n\n _this.resolveOptions(options);\n\n _this.listenClick(trigger);\n\n return _this;\n }\n /**\n * Defines if attributes would be resolved using internal setter functions\n * or custom functions that were passed in the constructor.\n * @param {Object} options\n */\n\n\n _createClass(Clipboard, [{\n key: \"resolveOptions\",\n value: function resolveOptions() {\n var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};\n this.action = typeof options.action === 'function' ? options.action : this.defaultAction;\n this.target = typeof options.target === 'function' ? options.target : this.defaultTarget;\n this.text = typeof options.text === 'function' ? options.text : this.defaultText;\n this.container = clipboard_typeof(options.container) === 'object' ? options.container : document.body;\n }\n /**\n * Adds a click event listener to the passed trigger.\n * @param {String|HTMLElement|HTMLCollection|NodeList} trigger\n */\n\n }, {\n key: \"listenClick\",\n value: function listenClick(trigger) {\n var _this2 = this;\n\n this.listener = listen_default()(trigger, 'click', function (e) {\n return _this2.onClick(e);\n });\n }\n /**\n * Defines a new `ClipboardAction` on each click event.\n * @param {Event} e\n */\n\n }, {\n key: \"onClick\",\n value: function onClick(e) {\n var trigger = e.delegateTarget || e.currentTarget;\n var action = this.action(trigger) || 'copy';\n var text = actions_default({\n action: action,\n container: this.container,\n target: this.target(trigger),\n text: this.text(trigger)\n }); // Fires an event based on the copy operation result.\n\n this.emit(text ? 'success' : 'error', {\n action: action,\n text: text,\n trigger: trigger,\n clearSelection: function clearSelection() {\n if (trigger) {\n trigger.focus();\n }\n\n window.getSelection().removeAllRanges();\n }\n });\n }\n /**\n * Default `action` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultAction\",\n value: function defaultAction(trigger) {\n return getAttributeValue('action', trigger);\n }\n /**\n * Default `target` lookup function.\n * @param {Element} trigger\n */\n\n }, {\n key: \"defaultTarget\",\n value: function defaultTarget(trigger) {\n var selector = getAttributeValue('target', trigger);\n\n if (selector) {\n return document.querySelector(selector);\n }\n }\n /**\n * Allow fire programmatically a copy action\n * @param {String|HTMLElement} target\n * @param {Object} options\n * @returns Text copied.\n */\n\n }, {\n key: \"defaultText\",\n\n /**\n * Default `text` lookup function.\n * @param {Element} trigger\n */\n value: function defaultText(trigger) {\n return getAttributeValue('text', trigger);\n }\n /**\n * Destroy lifecycle.\n */\n\n }, {\n key: \"destroy\",\n value: function destroy() {\n this.listener.destroy();\n }\n }], [{\n key: \"copy\",\n value: function copy(target) {\n var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {\n container: document.body\n };\n return actions_copy(target, options);\n }\n /**\n * Allow fire programmatically a cut action\n * @param {String|HTMLElement} target\n * @returns Text cutted.\n */\n\n }, {\n key: \"cut\",\n value: function cut(target) {\n return actions_cut(target);\n }\n /**\n * Returns the support of the given action, or all actions if no action is\n * given.\n * @param {String} [action]\n */\n\n }, {\n key: \"isSupported\",\n value: function isSupported() {\n var action = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ['copy', 'cut'];\n var actions = typeof action === 'string' ? [action] : action;\n var support = !!document.queryCommandSupported;\n actions.forEach(function (action) {\n support = support && !!document.queryCommandSupported(action);\n });\n return support;\n }\n }]);\n\n return Clipboard;\n}((tiny_emitter_default()));\n\n/* harmony default export */ var clipboard = (Clipboard);\n\n/***/ }),\n\n/***/ 828:\n/***/ (function(module) {\n\nvar DOCUMENT_NODE_TYPE = 9;\n\n/**\n * A polyfill for Element.matches()\n */\nif (typeof Element !== 'undefined' && !Element.prototype.matches) {\n var proto = Element.prototype;\n\n proto.matches = proto.matchesSelector ||\n proto.mozMatchesSelector ||\n proto.msMatchesSelector ||\n proto.oMatchesSelector ||\n proto.webkitMatchesSelector;\n}\n\n/**\n * Finds the closest parent that matches a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @return {Function}\n */\nfunction closest (element, selector) {\n while (element && element.nodeType !== DOCUMENT_NODE_TYPE) {\n if (typeof element.matches === 'function' &&\n element.matches(selector)) {\n return element;\n }\n element = element.parentNode;\n }\n}\n\nmodule.exports = closest;\n\n\n/***/ }),\n\n/***/ 438:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar closest = __webpack_require__(828);\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction _delegate(element, selector, type, callback, useCapture) {\n var listenerFn = listener.apply(this, arguments);\n\n element.addEventListener(type, listenerFn, useCapture);\n\n return {\n destroy: function() {\n element.removeEventListener(type, listenerFn, useCapture);\n }\n }\n}\n\n/**\n * Delegates event to a selector.\n *\n * @param {Element|String|Array} [elements]\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @param {Boolean} useCapture\n * @return {Object}\n */\nfunction delegate(elements, selector, type, callback, useCapture) {\n // Handle the regular Element usage\n if (typeof elements.addEventListener === 'function') {\n return _delegate.apply(null, arguments);\n }\n\n // Handle Element-less usage, it defaults to global delegation\n if (typeof type === 'function') {\n // Use `document` as the first parameter, then apply arguments\n // This is a short way to .unshift `arguments` without running into deoptimizations\n return _delegate.bind(null, document).apply(null, arguments);\n }\n\n // Handle Selector-based usage\n if (typeof elements === 'string') {\n elements = document.querySelectorAll(elements);\n }\n\n // Handle Array-like based usage\n return Array.prototype.map.call(elements, function (element) {\n return _delegate(element, selector, type, callback, useCapture);\n });\n}\n\n/**\n * Finds closest match and invokes callback.\n *\n * @param {Element} element\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Function}\n */\nfunction listener(element, selector, type, callback) {\n return function(e) {\n e.delegateTarget = closest(e.target, selector);\n\n if (e.delegateTarget) {\n callback.call(element, e);\n }\n }\n}\n\nmodule.exports = delegate;\n\n\n/***/ }),\n\n/***/ 879:\n/***/ (function(__unused_webpack_module, exports) {\n\n/**\n * Check if argument is a HTML element.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.node = function(value) {\n return value !== undefined\n && value instanceof HTMLElement\n && value.nodeType === 1;\n};\n\n/**\n * Check if argument is a list of HTML elements.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.nodeList = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return value !== undefined\n && (type === '[object NodeList]' || type === '[object HTMLCollection]')\n && ('length' in value)\n && (value.length === 0 || exports.node(value[0]));\n};\n\n/**\n * Check if argument is a string.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.string = function(value) {\n return typeof value === 'string'\n || value instanceof String;\n};\n\n/**\n * Check if argument is a function.\n *\n * @param {Object} value\n * @return {Boolean}\n */\nexports.fn = function(value) {\n var type = Object.prototype.toString.call(value);\n\n return type === '[object Function]';\n};\n\n\n/***/ }),\n\n/***/ 370:\n/***/ (function(module, __unused_webpack_exports, __webpack_require__) {\n\nvar is = __webpack_require__(879);\nvar delegate = __webpack_require__(438);\n\n/**\n * Validates all params and calls the right\n * listener function based on its target type.\n *\n * @param {String|HTMLElement|HTMLCollection|NodeList} target\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listen(target, type, callback) {\n if (!target && !type && !callback) {\n throw new Error('Missing required arguments');\n }\n\n if (!is.string(type)) {\n throw new TypeError('Second argument must be a String');\n }\n\n if (!is.fn(callback)) {\n throw new TypeError('Third argument must be a Function');\n }\n\n if (is.node(target)) {\n return listenNode(target, type, callback);\n }\n else if (is.nodeList(target)) {\n return listenNodeList(target, type, callback);\n }\n else if (is.string(target)) {\n return listenSelector(target, type, callback);\n }\n else {\n throw new TypeError('First argument must be a String, HTMLElement, HTMLCollection, or NodeList');\n }\n}\n\n/**\n * Adds an event listener to a HTML element\n * and returns a remove listener function.\n *\n * @param {HTMLElement} node\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNode(node, type, callback) {\n node.addEventListener(type, callback);\n\n return {\n destroy: function() {\n node.removeEventListener(type, callback);\n }\n }\n}\n\n/**\n * Add an event listener to a list of HTML elements\n * and returns a remove listener function.\n *\n * @param {NodeList|HTMLCollection} nodeList\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenNodeList(nodeList, type, callback) {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.addEventListener(type, callback);\n });\n\n return {\n destroy: function() {\n Array.prototype.forEach.call(nodeList, function(node) {\n node.removeEventListener(type, callback);\n });\n }\n }\n}\n\n/**\n * Add an event listener to a selector\n * and returns a remove listener function.\n *\n * @param {String} selector\n * @param {String} type\n * @param {Function} callback\n * @return {Object}\n */\nfunction listenSelector(selector, type, callback) {\n return delegate(document.body, selector, type, callback);\n}\n\nmodule.exports = listen;\n\n\n/***/ }),\n\n/***/ 817:\n/***/ (function(module) {\n\nfunction select(element) {\n var selectedText;\n\n if (element.nodeName === 'SELECT') {\n element.focus();\n\n selectedText = element.value;\n }\n else if (element.nodeName === 'INPUT' || element.nodeName === 'TEXTAREA') {\n var isReadOnly = element.hasAttribute('readonly');\n\n if (!isReadOnly) {\n element.setAttribute('readonly', '');\n }\n\n element.select();\n element.setSelectionRange(0, element.value.length);\n\n if (!isReadOnly) {\n element.removeAttribute('readonly');\n }\n\n selectedText = element.value;\n }\n else {\n if (element.hasAttribute('contenteditable')) {\n element.focus();\n }\n\n var selection = window.getSelection();\n var range = document.createRange();\n\n range.selectNodeContents(element);\n selection.removeAllRanges();\n selection.addRange(range);\n\n selectedText = selection.toString();\n }\n\n return selectedText;\n}\n\nmodule.exports = select;\n\n\n/***/ }),\n\n/***/ 279:\n/***/ (function(module) {\n\nfunction E () {\n // Keep this empty so it's easier to inherit from\n // (via https://github.com/lipsmack from https://github.com/scottcorgan/tiny-emitter/issues/3)\n}\n\nE.prototype = {\n on: function (name, callback, ctx) {\n var e = this.e || (this.e = {});\n\n (e[name] || (e[name] = [])).push({\n fn: callback,\n ctx: ctx\n });\n\n return this;\n },\n\n once: function (name, callback, ctx) {\n var self = this;\n function listener () {\n self.off(name, listener);\n callback.apply(ctx, arguments);\n };\n\n listener._ = callback\n return this.on(name, listener, ctx);\n },\n\n emit: function (name) {\n var data = [].slice.call(arguments, 1);\n var evtArr = ((this.e || (this.e = {}))[name] || []).slice();\n var i = 0;\n var len = evtArr.length;\n\n for (i; i < len; i++) {\n evtArr[i].fn.apply(evtArr[i].ctx, data);\n }\n\n return this;\n },\n\n off: function (name, callback) {\n var e = this.e || (this.e = {});\n var evts = e[name];\n var liveEvents = [];\n\n if (evts && callback) {\n for (var i = 0, len = evts.length; i < len; i++) {\n if (evts[i].fn !== callback && evts[i].fn._ !== callback)\n liveEvents.push(evts[i]);\n }\n }\n\n // Remove event from queue to prevent memory leak\n // Suggested by https://github.com/lazd\n // Ref: https://github.com/scottcorgan/tiny-emitter/commit/c6ebfaa9bc973b33d110a84a307742b7cf94c953#commitcomment-5024910\n\n (liveEvents.length)\n ? e[name] = liveEvents\n : delete e[name];\n\n return this;\n }\n};\n\nmodule.exports = E;\nmodule.exports.TinyEmitter = E;\n\n\n/***/ })\n\n/******/ \t});\n/************************************************************************/\n/******/ \t// The module cache\n/******/ \tvar __webpack_module_cache__ = {};\n/******/ \t\n/******/ \t// The require function\n/******/ \tfunction __webpack_require__(moduleId) {\n/******/ \t\t// Check if module is in cache\n/******/ \t\tif(__webpack_module_cache__[moduleId]) {\n/******/ \t\t\treturn __webpack_module_cache__[moduleId].exports;\n/******/ \t\t}\n/******/ \t\t// Create a new module (and put it into the cache)\n/******/ \t\tvar module = __webpack_module_cache__[moduleId] = {\n/******/ \t\t\t// no module.id needed\n/******/ \t\t\t// no module.loaded needed\n/******/ \t\t\texports: {}\n/******/ \t\t};\n/******/ \t\n/******/ \t\t// Execute the module function\n/******/ \t\t__webpack_modules__[moduleId](module, module.exports, __webpack_require__);\n/******/ \t\n/******/ \t\t// Return the exports of the module\n/******/ \t\treturn module.exports;\n/******/ \t}\n/******/ \t\n/************************************************************************/\n/******/ \t/* webpack/runtime/compat get default export */\n/******/ \t!function() {\n/******/ \t\t// getDefaultExport function for compatibility with non-harmony modules\n/******/ \t\t__webpack_require__.n = function(module) {\n/******/ \t\t\tvar getter = module && module.__esModule ?\n/******/ \t\t\t\tfunction() { return module['default']; } :\n/******/ \t\t\t\tfunction() { return module; };\n/******/ \t\t\t__webpack_require__.d(getter, { a: getter });\n/******/ \t\t\treturn getter;\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/define property getters */\n/******/ \t!function() {\n/******/ \t\t// define getter functions for harmony exports\n/******/ \t\t__webpack_require__.d = function(exports, definition) {\n/******/ \t\t\tfor(var key in definition) {\n/******/ \t\t\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n/******/ \t\t\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n/******/ \t\t\t\t}\n/******/ \t\t\t}\n/******/ \t\t};\n/******/ \t}();\n/******/ \t\n/******/ \t/* webpack/runtime/hasOwnProperty shorthand */\n/******/ \t!function() {\n/******/ \t\t__webpack_require__.o = function(obj, prop) { return Object.prototype.hasOwnProperty.call(obj, prop); }\n/******/ \t}();\n/******/ \t\n/************************************************************************/\n/******/ \t// module exports must be returned from runtime so entry inlining is disabled\n/******/ \t// startup\n/******/ \t// Load entry module and return exports\n/******/ \treturn __webpack_require__(686);\n/******/ })()\n.default;\n});", "/*!\n * escape-html\n * Copyright(c) 2012-2013 TJ Holowaychuk\n * Copyright(c) 2015 Andreas Lubbe\n * Copyright(c) 2015 Tiancheng \"Timothy\" Gu\n * MIT Licensed\n */\n\n'use strict';\n\n/**\n * Module variables.\n * @private\n */\n\nvar matchHtmlRegExp = /[\"'&<>]/;\n\n/**\n * Module exports.\n * @public\n */\n\nmodule.exports = escapeHtml;\n\n/**\n * Escape special characters in the given string of html.\n *\n * @param {string} string The string to escape for inserting into HTML\n * @return {string}\n * @public\n */\n\nfunction escapeHtml(string) {\n var str = '' + string;\n var match = matchHtmlRegExp.exec(str);\n\n if (!match) {\n return str;\n }\n\n var escape;\n var html = '';\n var index = 0;\n var lastIndex = 0;\n\n for (index = match.index; index < str.length; index++) {\n switch (str.charCodeAt(index)) {\n case 34: // \"\n escape = '"';\n break;\n case 38: // &\n escape = '&';\n break;\n case 39: // '\n escape = ''';\n break;\n case 60: // <\n escape = '<';\n break;\n case 62: // >\n escape = '>';\n break;\n default:\n continue;\n }\n\n if (lastIndex !== index) {\n html += str.substring(lastIndex, index);\n }\n\n lastIndex = index + 1;\n html += escape;\n }\n\n return lastIndex !== index\n ? html + str.substring(lastIndex, index)\n : html;\n}\n", "/*\n * Copyright (c) 2016-2024 Martin Donath \n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to\n * deal in the Software without restriction, including without limitation the\n * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n * sell copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n * IN THE SOFTWARE.\n */\n\nimport \"focus-visible\"\n\nimport {\n EMPTY,\n NEVER,\n Observable,\n Subject,\n defer,\n delay,\n filter,\n map,\n merge,\n mergeWith,\n shareReplay,\n switchMap\n} from \"rxjs\"\n\nimport { configuration, feature } from \"./_\"\nimport {\n at,\n getActiveElement,\n getOptionalElement,\n requestJSON,\n setLocation,\n setToggle,\n watchDocument,\n watchKeyboard,\n watchLocation,\n watchLocationTarget,\n watchMedia,\n watchPrint,\n watchScript,\n watchViewport\n} from \"./browser\"\nimport {\n getComponentElement,\n getComponentElements,\n mountAnnounce,\n mountBackToTop,\n mountConsent,\n mountContent,\n mountDialog,\n mountHeader,\n mountHeaderTitle,\n mountPalette,\n mountProgress,\n mountSearch,\n mountSearchHiglight,\n mountSidebar,\n mountSource,\n mountTableOfContents,\n mountTabs,\n watchHeader,\n watchMain\n} from \"./components\"\nimport {\n SearchIndex,\n setupClipboardJS,\n setupInstantNavigation,\n setupVersionSelector\n} from \"./integrations\"\nimport {\n patchEllipsis,\n patchIndeterminate,\n patchScrollfix,\n patchScrolllock\n} from \"./patches\"\nimport \"./polyfills\"\n\n/* ----------------------------------------------------------------------------\n * Functions - @todo refactor\n * ------------------------------------------------------------------------- */\n\n/**\n * Fetch search index\n *\n * @returns Search index observable\n */\nfunction fetchSearchIndex(): Observable {\n if (location.protocol === \"file:\") {\n return watchScript(\n `${new URL(\"search/search_index.js\", config.base)}`\n )\n .pipe(\n // @ts-ignore - @todo fix typings\n map(() => __index),\n shareReplay(1)\n )\n } else {\n return requestJSON(\n new URL(\"search/search_index.json\", config.base)\n )\n }\n}\n\n/* ----------------------------------------------------------------------------\n * Application\n * ------------------------------------------------------------------------- */\n\n/* Yay, JavaScript is available */\ndocument.documentElement.classList.remove(\"no-js\")\ndocument.documentElement.classList.add(\"js\")\n\n/* Set up navigation observables and subjects */\nconst document$ = watchDocument()\nconst location$ = watchLocation()\nconst target$ = watchLocationTarget(location$)\nconst keyboard$ = watchKeyboard()\n\n/* Set up media observables */\nconst viewport$ = watchViewport()\nconst tablet$ = watchMedia(\"(min-width: 960px)\")\nconst screen$ = watchMedia(\"(min-width: 1220px)\")\nconst print$ = watchPrint()\n\n/* Retrieve search index, if search is enabled */\nconst config = configuration()\nconst index$ = document.forms.namedItem(\"search\")\n ? fetchSearchIndex()\n : NEVER\n\n/* Set up Clipboard.js integration */\nconst alert$ = new Subject()\nsetupClipboardJS({ alert$ })\n\n/* Set up progress indicator */\nconst progress$ = new Subject()\n\n/* Set up instant navigation, if enabled */\nif (feature(\"navigation.instant\"))\n setupInstantNavigation({ location$, viewport$, progress$ })\n .subscribe(document$)\n\n/* Set up version selector */\nif (config.version?.provider === \"mike\")\n setupVersionSelector({ document$ })\n\n/* Always close drawer and search on navigation */\nmerge(location$, target$)\n .pipe(\n delay(125)\n )\n .subscribe(() => {\n setToggle(\"drawer\", false)\n setToggle(\"search\", false)\n })\n\n/* Set up global keyboard handlers */\nkeyboard$\n .pipe(\n filter(({ mode }) => mode === \"global\")\n )\n .subscribe(key => {\n switch (key.type) {\n\n /* Go to previous page */\n case \"p\":\n case \",\":\n const prev = getOptionalElement(\"link[rel=prev]\")\n if (typeof prev !== \"undefined\")\n setLocation(prev)\n break\n\n /* Go to next page */\n case \"n\":\n case \".\":\n const next = getOptionalElement(\"link[rel=next]\")\n if (typeof next !== \"undefined\")\n setLocation(next)\n break\n\n /* Expand navigation, see https://bit.ly/3ZjG5io */\n case \"Enter\":\n const active = getActiveElement()\n if (active instanceof HTMLLabelElement)\n active.click()\n }\n })\n\n/* Set up patches */\npatchEllipsis({ viewport$, document$ })\npatchIndeterminate({ document$, tablet$ })\npatchScrollfix({ document$ })\npatchScrolllock({ viewport$, tablet$ })\n\n/* Set up header and main area observable */\nconst header$ = watchHeader(getComponentElement(\"header\"), { viewport$ })\nconst main$ = document$\n .pipe(\n map(() => getComponentElement(\"main\")),\n switchMap(el => watchMain(el, { viewport$, header$ })),\n shareReplay(1)\n )\n\n/* Set up control component observables */\nconst control$ = merge(\n\n /* Consent */\n ...getComponentElements(\"consent\")\n .map(el => mountConsent(el, { target$ })),\n\n /* Dialog */\n ...getComponentElements(\"dialog\")\n .map(el => mountDialog(el, { alert$ })),\n\n /* Header */\n ...getComponentElements(\"header\")\n .map(el => mountHeader(el, { viewport$, header$, main$ })),\n\n /* Color palette */\n ...getComponentElements(\"palette\")\n .map(el => mountPalette(el)),\n\n /* Progress bar */\n ...getComponentElements(\"progress\")\n .map(el => mountProgress(el, { progress$ })),\n\n /* Search */\n ...getComponentElements(\"search\")\n .map(el => mountSearch(el, { index$, keyboard$ })),\n\n /* Repository information */\n ...getComponentElements(\"source\")\n .map(el => mountSource(el))\n)\n\n/* Set up content component observables */\nconst content$ = defer(() => merge(\n\n /* Announcement bar */\n ...getComponentElements(\"announce\")\n .map(el => mountAnnounce(el)),\n\n /* Content */\n ...getComponentElements(\"content\")\n .map(el => mountContent(el, { viewport$, target$, print$ })),\n\n /* Search highlighting */\n ...getComponentElements(\"content\")\n .map(el => feature(\"search.highlight\")\n ? mountSearchHiglight(el, { index$, location$ })\n : EMPTY\n ),\n\n /* Header title */\n ...getComponentElements(\"header-title\")\n .map(el => mountHeaderTitle(el, { viewport$, header$ })),\n\n /* Sidebar */\n ...getComponentElements(\"sidebar\")\n .map(el => el.getAttribute(\"data-md-type\") === \"navigation\"\n ? at(screen$, () => mountSidebar(el, { viewport$, header$, main$ }))\n : at(tablet$, () => mountSidebar(el, { viewport$, header$, main$ }))\n ),\n\n /* Navigation tabs */\n ...getComponentElements(\"tabs\")\n .map(el => mountTabs(el, { viewport$, header$ })),\n\n /* Table of contents */\n ...getComponentElements(\"toc\")\n .map(el => mountTableOfContents(el, {\n viewport$, header$, main$, target$\n })),\n\n /* Back-to-top button */\n ...getComponentElements(\"top\")\n .map(el => mountBackToTop(el, { viewport$, header$, main$, target$ }))\n))\n\n/* Set up component observables */\nconst component$ = document$\n .pipe(\n switchMap(() => content$),\n mergeWith(control$),\n shareReplay(1)\n )\n\n/* Subscribe to all components */\ncomponent$.subscribe()\n\n/* ----------------------------------------------------------------------------\n * Exports\n * ------------------------------------------------------------------------- */\n\nwindow.document$ = document$ /* Document observable */\nwindow.location$ = location$ /* Location subject */\nwindow.target$ = target$ /* Location target observable */\nwindow.keyboard$ = keyboard$ /* Keyboard observable */\nwindow.viewport$ = viewport$ /* Viewport observable */\nwindow.tablet$ = tablet$ /* Media tablet observable */\nwindow.screen$ = screen$ /* Media screen observable */\nwindow.print$ = print$ /* Media print observable */\nwindow.alert$ = alert$ /* Alert subject */\nwindow.progress$ = progress$ /* Progress indicator subject */\nwindow.component$ = component$ /* Component observable */\n", "/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n if (typeof b !== \"function\" && b !== null)\r\n throw new TypeError(\"Class extends value \" + String(b) + \" is not a constructor or null\");\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport var __createBinding = Object.create ? (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\r\n}) : (function(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n});\r\n\r\nexport function __exportStar(m, o) {\r\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\n/** @deprecated */\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n}\r\n\r\nexport function __spreadArray(to, from, pack) {\r\n if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {\r\n if (ar || !(i in from)) {\r\n if (!ar) ar = Array.prototype.slice.call(from, 0, i);\r\n ar[i] = from[i];\r\n }\r\n }\r\n return to.concat(ar || Array.prototype.slice.call(from));\r\n}\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nvar __setModuleDefault = Object.create ? (function(o, v) {\r\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\r\n}) : function(o, v) {\r\n o[\"default\"] = v;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\r\n __setModuleDefault(result, mod);\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, state, kind, f) {\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\r\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, state, value, kind, f) {\r\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\r\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\r\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\r\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\r\n}\r\n", "/**\n * Returns true if the object is a function.\n * @param value The value to check\n */\nexport function isFunction(value: any): value is (...args: any[]) => any {\n return typeof value === 'function';\n}\n", "/**\n * Used to create Error subclasses until the community moves away from ES5.\n *\n * This is because compiling from TypeScript down to ES5 has issues with subclassing Errors\n * as well as other built-in types: https://github.com/Microsoft/TypeScript/issues/12123\n *\n * @param createImpl A factory function to create the actual constructor implementation. The returned\n * function should be a named function that calls `_super` internally.\n */\nexport function createErrorClass(createImpl: (_super: any) => any): T {\n const _super = (instance: any) => {\n Error.call(instance);\n instance.stack = new Error().stack;\n };\n\n const ctorFunc = createImpl(_super);\n ctorFunc.prototype = Object.create(Error.prototype);\n ctorFunc.prototype.constructor = ctorFunc;\n return ctorFunc;\n}\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface UnsubscriptionError extends Error {\n readonly errors: any[];\n}\n\nexport interface UnsubscriptionErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (errors: any[]): UnsubscriptionError;\n}\n\n/**\n * An error thrown when one or more errors have occurred during the\n * `unsubscribe` of a {@link Subscription}.\n */\nexport const UnsubscriptionError: UnsubscriptionErrorCtor = createErrorClass(\n (_super) =>\n function UnsubscriptionErrorImpl(this: any, errors: (Error | string)[]) {\n _super(this);\n this.message = errors\n ? `${errors.length} errors occurred during unsubscription:\n${errors.map((err, i) => `${i + 1}) ${err.toString()}`).join('\\n ')}`\n : '';\n this.name = 'UnsubscriptionError';\n this.errors = errors;\n }\n);\n", "/**\n * Removes an item from an array, mutating it.\n * @param arr The array to remove the item from\n * @param item The item to remove\n */\nexport function arrRemove(arr: T[] | undefined | null, item: T) {\n if (arr) {\n const index = arr.indexOf(item);\n 0 <= index && arr.splice(index, 1);\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { UnsubscriptionError } from './util/UnsubscriptionError';\nimport { SubscriptionLike, TeardownLogic, Unsubscribable } from './types';\nimport { arrRemove } from './util/arrRemove';\n\n/**\n * Represents a disposable resource, such as the execution of an Observable. A\n * Subscription has one important method, `unsubscribe`, that takes no argument\n * and just disposes the resource held by the subscription.\n *\n * Additionally, subscriptions may be grouped together through the `add()`\n * method, which will attach a child Subscription to the current Subscription.\n * When a Subscription is unsubscribed, all its children (and its grandchildren)\n * will be unsubscribed as well.\n *\n * @class Subscription\n */\nexport class Subscription implements SubscriptionLike {\n /** @nocollapse */\n public static EMPTY = (() => {\n const empty = new Subscription();\n empty.closed = true;\n return empty;\n })();\n\n /**\n * A flag to indicate whether this Subscription has already been unsubscribed.\n */\n public closed = false;\n\n private _parentage: Subscription[] | Subscription | null = null;\n\n /**\n * The list of registered finalizers to execute upon unsubscription. Adding and removing from this\n * list occurs in the {@link #add} and {@link #remove} methods.\n */\n private _finalizers: Exclude[] | null = null;\n\n /**\n * @param initialTeardown A function executed first as part of the finalization\n * process that is kicked off when {@link #unsubscribe} is called.\n */\n constructor(private initialTeardown?: () => void) {}\n\n /**\n * Disposes the resources held by the subscription. May, for instance, cancel\n * an ongoing Observable execution or cancel any other type of work that\n * started when the Subscription was created.\n * @return {void}\n */\n unsubscribe(): void {\n let errors: any[] | undefined;\n\n if (!this.closed) {\n this.closed = true;\n\n // Remove this from it's parents.\n const { _parentage } = this;\n if (_parentage) {\n this._parentage = null;\n if (Array.isArray(_parentage)) {\n for (const parent of _parentage) {\n parent.remove(this);\n }\n } else {\n _parentage.remove(this);\n }\n }\n\n const { initialTeardown: initialFinalizer } = this;\n if (isFunction(initialFinalizer)) {\n try {\n initialFinalizer();\n } catch (e) {\n errors = e instanceof UnsubscriptionError ? e.errors : [e];\n }\n }\n\n const { _finalizers } = this;\n if (_finalizers) {\n this._finalizers = null;\n for (const finalizer of _finalizers) {\n try {\n execFinalizer(finalizer);\n } catch (err) {\n errors = errors ?? [];\n if (err instanceof UnsubscriptionError) {\n errors = [...errors, ...err.errors];\n } else {\n errors.push(err);\n }\n }\n }\n }\n\n if (errors) {\n throw new UnsubscriptionError(errors);\n }\n }\n }\n\n /**\n * Adds a finalizer to this subscription, so that finalization will be unsubscribed/called\n * when this subscription is unsubscribed. If this subscription is already {@link #closed},\n * because it has already been unsubscribed, then whatever finalizer is passed to it\n * will automatically be executed (unless the finalizer itself is also a closed subscription).\n *\n * Closed Subscriptions cannot be added as finalizers to any subscription. Adding a closed\n * subscription to a any subscription will result in no operation. (A noop).\n *\n * Adding a subscription to itself, or adding `null` or `undefined` will not perform any\n * operation at all. (A noop).\n *\n * `Subscription` instances that are added to this instance will automatically remove themselves\n * if they are unsubscribed. Functions and {@link Unsubscribable} objects that you wish to remove\n * will need to be removed manually with {@link #remove}\n *\n * @param teardown The finalization logic to add to this subscription.\n */\n add(teardown: TeardownLogic): void {\n // Only add the finalizer if it's not undefined\n // and don't add a subscription to itself.\n if (teardown && teardown !== this) {\n if (this.closed) {\n // If this subscription is already closed,\n // execute whatever finalizer is handed to it automatically.\n execFinalizer(teardown);\n } else {\n if (teardown instanceof Subscription) {\n // We don't add closed subscriptions, and we don't add the same subscription\n // twice. Subscription unsubscribe is idempotent.\n if (teardown.closed || teardown._hasParent(this)) {\n return;\n }\n teardown._addParent(this);\n }\n (this._finalizers = this._finalizers ?? []).push(teardown);\n }\n }\n }\n\n /**\n * Checks to see if a this subscription already has a particular parent.\n * This will signal that this subscription has already been added to the parent in question.\n * @param parent the parent to check for\n */\n private _hasParent(parent: Subscription) {\n const { _parentage } = this;\n return _parentage === parent || (Array.isArray(_parentage) && _parentage.includes(parent));\n }\n\n /**\n * Adds a parent to this subscription so it can be removed from the parent if it\n * unsubscribes on it's own.\n *\n * NOTE: THIS ASSUMES THAT {@link _hasParent} HAS ALREADY BEEN CHECKED.\n * @param parent The parent subscription to add\n */\n private _addParent(parent: Subscription) {\n const { _parentage } = this;\n this._parentage = Array.isArray(_parentage) ? (_parentage.push(parent), _parentage) : _parentage ? [_parentage, parent] : parent;\n }\n\n /**\n * Called on a child when it is removed via {@link #remove}.\n * @param parent The parent to remove\n */\n private _removeParent(parent: Subscription) {\n const { _parentage } = this;\n if (_parentage === parent) {\n this._parentage = null;\n } else if (Array.isArray(_parentage)) {\n arrRemove(_parentage, parent);\n }\n }\n\n /**\n * Removes a finalizer from this subscription that was previously added with the {@link #add} method.\n *\n * Note that `Subscription` instances, when unsubscribed, will automatically remove themselves\n * from every other `Subscription` they have been added to. This means that using the `remove` method\n * is not a common thing and should be used thoughtfully.\n *\n * If you add the same finalizer instance of a function or an unsubscribable object to a `Subscription` instance\n * more than once, you will need to call `remove` the same number of times to remove all instances.\n *\n * All finalizer instances are removed to free up memory upon unsubscription.\n *\n * @param teardown The finalizer to remove from this subscription\n */\n remove(teardown: Exclude): void {\n const { _finalizers } = this;\n _finalizers && arrRemove(_finalizers, teardown);\n\n if (teardown instanceof Subscription) {\n teardown._removeParent(this);\n }\n }\n}\n\nexport const EMPTY_SUBSCRIPTION = Subscription.EMPTY;\n\nexport function isSubscription(value: any): value is Subscription {\n return (\n value instanceof Subscription ||\n (value && 'closed' in value && isFunction(value.remove) && isFunction(value.add) && isFunction(value.unsubscribe))\n );\n}\n\nfunction execFinalizer(finalizer: Unsubscribable | (() => void)) {\n if (isFunction(finalizer)) {\n finalizer();\n } else {\n finalizer.unsubscribe();\n }\n}\n", "import { Subscriber } from './Subscriber';\nimport { ObservableNotification } from './types';\n\n/**\n * The {@link GlobalConfig} object for RxJS. It is used to configure things\n * like how to react on unhandled errors.\n */\nexport const config: GlobalConfig = {\n onUnhandledError: null,\n onStoppedNotification: null,\n Promise: undefined,\n useDeprecatedSynchronousErrorHandling: false,\n useDeprecatedNextContext: false,\n};\n\n/**\n * The global configuration object for RxJS, used to configure things\n * like how to react on unhandled errors. Accessible via {@link config}\n * object.\n */\nexport interface GlobalConfig {\n /**\n * A registration point for unhandled errors from RxJS. These are errors that\n * cannot were not handled by consuming code in the usual subscription path. For\n * example, if you have this configured, and you subscribe to an observable without\n * providing an error handler, errors from that subscription will end up here. This\n * will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onUnhandledError: ((err: any) => void) | null;\n\n /**\n * A registration point for notifications that cannot be sent to subscribers because they\n * have completed, errored or have been explicitly unsubscribed. By default, next, complete\n * and error notifications sent to stopped subscribers are noops. However, sometimes callers\n * might want a different behavior. For example, with sources that attempt to report errors\n * to stopped subscribers, a caller can configure RxJS to throw an unhandled error instead.\n * This will _always_ be called asynchronously on another job in the runtime. This is because\n * we do not want errors thrown in this user-configured handler to interfere with the\n * behavior of the library.\n */\n onStoppedNotification: ((notification: ObservableNotification, subscriber: Subscriber) => void) | null;\n\n /**\n * The promise constructor used by default for {@link Observable#toPromise toPromise} and {@link Observable#forEach forEach}\n * methods.\n *\n * @deprecated As of version 8, RxJS will no longer support this sort of injection of a\n * Promise constructor. If you need a Promise implementation other than native promises,\n * please polyfill/patch Promise as you see appropriate. Will be removed in v8.\n */\n Promise?: PromiseConstructorLike;\n\n /**\n * If true, turns on synchronous error rethrowing, which is a deprecated behavior\n * in v6 and higher. This behavior enables bad patterns like wrapping a subscribe\n * call in a try/catch block. It also enables producer interference, a nasty bug\n * where a multicast can be broken for all observers by a downstream consumer with\n * an unhandled error. DO NOT USE THIS FLAG UNLESS IT'S NEEDED TO BUY TIME\n * FOR MIGRATION REASONS.\n *\n * @deprecated As of version 8, RxJS will no longer support synchronous throwing\n * of unhandled errors. All errors will be thrown on a separate call stack to prevent bad\n * behaviors described above. Will be removed in v8.\n */\n useDeprecatedSynchronousErrorHandling: boolean;\n\n /**\n * If true, enables an as-of-yet undocumented feature from v5: The ability to access\n * `unsubscribe()` via `this` context in `next` functions created in observers passed\n * to `subscribe`.\n *\n * This is being removed because the performance was severely problematic, and it could also cause\n * issues when types other than POJOs are passed to subscribe as subscribers, as they will likely have\n * their `this` context overwritten.\n *\n * @deprecated As of version 8, RxJS will no longer support altering the\n * context of next functions provided as part of an observer to Subscribe. Instead,\n * you will have access to a subscription or a signal or token that will allow you to do things like\n * unsubscribe and test closed status. Will be removed in v8.\n */\n useDeprecatedNextContext: boolean;\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetTimeoutFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearTimeoutFunction = (handle: TimerHandle) => void;\n\ninterface TimeoutProvider {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n delegate:\n | {\n setTimeout: SetTimeoutFunction;\n clearTimeout: ClearTimeoutFunction;\n }\n | undefined;\n}\n\nexport const timeoutProvider: TimeoutProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setTimeout(handler: () => void, timeout?: number, ...args) {\n const { delegate } = timeoutProvider;\n if (delegate?.setTimeout) {\n return delegate.setTimeout(handler, timeout, ...args);\n }\n return setTimeout(handler, timeout, ...args);\n },\n clearTimeout(handle) {\n const { delegate } = timeoutProvider;\n return (delegate?.clearTimeout || clearTimeout)(handle as any);\n },\n delegate: undefined,\n};\n", "import { config } from '../config';\nimport { timeoutProvider } from '../scheduler/timeoutProvider';\n\n/**\n * Handles an error on another job either with the user-configured {@link onUnhandledError},\n * or by throwing it on that new job so it can be picked up by `window.onerror`, `process.on('error')`, etc.\n *\n * This should be called whenever there is an error that is out-of-band with the subscription\n * or when an error hits a terminal boundary of the subscription and no error handler was provided.\n *\n * @param err the error to report\n */\nexport function reportUnhandledError(err: any) {\n timeoutProvider.setTimeout(() => {\n const { onUnhandledError } = config;\n if (onUnhandledError) {\n // Execute the user-configured error handler.\n onUnhandledError(err);\n } else {\n // Throw so it is picked up by the runtime's uncaught error mechanism.\n throw err;\n }\n });\n}\n", "/* tslint:disable:no-empty */\nexport function noop() { }\n", "import { CompleteNotification, NextNotification, ErrorNotification } from './types';\n\n/**\n * A completion object optimized for memory use and created to be the\n * same \"shape\" as other notifications in v8.\n * @internal\n */\nexport const COMPLETE_NOTIFICATION = (() => createNotification('C', undefined, undefined) as CompleteNotification)();\n\n/**\n * Internal use only. Creates an optimized error notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function errorNotification(error: any): ErrorNotification {\n return createNotification('E', undefined, error) as any;\n}\n\n/**\n * Internal use only. Creates an optimized next notification that is the same \"shape\"\n * as other notifications.\n * @internal\n */\nexport function nextNotification(value: T) {\n return createNotification('N', value, undefined) as NextNotification;\n}\n\n/**\n * Ensures that all notifications created internally have the same \"shape\" in v8.\n *\n * TODO: This is only exported to support a crazy legacy test in `groupBy`.\n * @internal\n */\nexport function createNotification(kind: 'N' | 'E' | 'C', value: any, error: any) {\n return {\n kind,\n value,\n error,\n };\n}\n", "import { config } from '../config';\n\nlet context: { errorThrown: boolean; error: any } | null = null;\n\n/**\n * Handles dealing with errors for super-gross mode. Creates a context, in which\n * any synchronously thrown errors will be passed to {@link captureError}. Which\n * will record the error such that it will be rethrown after the call back is complete.\n * TODO: Remove in v8\n * @param cb An immediately executed function.\n */\nexport function errorContext(cb: () => void) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n const isRoot = !context;\n if (isRoot) {\n context = { errorThrown: false, error: null };\n }\n cb();\n if (isRoot) {\n const { errorThrown, error } = context!;\n context = null;\n if (errorThrown) {\n throw error;\n }\n }\n } else {\n // This is the general non-deprecated path for everyone that\n // isn't crazy enough to use super-gross mode (useDeprecatedSynchronousErrorHandling)\n cb();\n }\n}\n\n/**\n * Captures errors only in super-gross mode.\n * @param err the error to capture\n */\nexport function captureError(err: any) {\n if (config.useDeprecatedSynchronousErrorHandling && context) {\n context.errorThrown = true;\n context.error = err;\n }\n}\n", "import { isFunction } from './util/isFunction';\nimport { Observer, ObservableNotification } from './types';\nimport { isSubscription, Subscription } from './Subscription';\nimport { config } from './config';\nimport { reportUnhandledError } from './util/reportUnhandledError';\nimport { noop } from './util/noop';\nimport { nextNotification, errorNotification, COMPLETE_NOTIFICATION } from './NotificationFactories';\nimport { timeoutProvider } from './scheduler/timeoutProvider';\nimport { captureError } from './util/errorContext';\n\n/**\n * Implements the {@link Observer} interface and extends the\n * {@link Subscription} class. While the {@link Observer} is the public API for\n * consuming the values of an {@link Observable}, all Observers get converted to\n * a Subscriber, in order to provide Subscription-like capabilities such as\n * `unsubscribe`. Subscriber is a common type in RxJS, and crucial for\n * implementing operators, but it is rarely used as a public API.\n *\n * @class Subscriber\n */\nexport class Subscriber extends Subscription implements Observer {\n /**\n * A static factory for a Subscriber, given a (potentially partial) definition\n * of an Observer.\n * @param next The `next` callback of an Observer.\n * @param error The `error` callback of an\n * Observer.\n * @param complete The `complete` callback of an\n * Observer.\n * @return A Subscriber wrapping the (partially defined)\n * Observer represented by the given arguments.\n * @nocollapse\n * @deprecated Do not use. Will be removed in v8. There is no replacement for this\n * method, and there is no reason to be creating instances of `Subscriber` directly.\n * If you have a specific use case, please file an issue.\n */\n static create(next?: (x?: T) => void, error?: (e?: any) => void, complete?: () => void): Subscriber {\n return new SafeSubscriber(next, error, complete);\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected isStopped: boolean = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n protected destination: Subscriber | Observer; // this `any` is the escape hatch to erase extra type param (e.g. R)\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * There is no reason to directly create an instance of Subscriber. This type is exported for typings reasons.\n */\n constructor(destination?: Subscriber | Observer) {\n super();\n if (destination) {\n this.destination = destination;\n // Automatically chain subscriptions together here.\n // if destination is a Subscription, then it is a Subscriber.\n if (isSubscription(destination)) {\n destination.add(this);\n }\n } else {\n this.destination = EMPTY_OBSERVER;\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `next` from\n * the Observable, with a value. The Observable may call this method 0 or more\n * times.\n * @param {T} [value] The `next` value.\n * @return {void}\n */\n next(value?: T): void {\n if (this.isStopped) {\n handleStoppedNotification(nextNotification(value), this);\n } else {\n this._next(value!);\n }\n }\n\n /**\n * The {@link Observer} callback to receive notifications of type `error` from\n * the Observable, with an attached `Error`. Notifies the Observer that\n * the Observable has experienced an error condition.\n * @param {any} [err] The `error` exception.\n * @return {void}\n */\n error(err?: any): void {\n if (this.isStopped) {\n handleStoppedNotification(errorNotification(err), this);\n } else {\n this.isStopped = true;\n this._error(err);\n }\n }\n\n /**\n * The {@link Observer} callback to receive a valueless notification of type\n * `complete` from the Observable. Notifies the Observer that the Observable\n * has finished sending push-based notifications.\n * @return {void}\n */\n complete(): void {\n if (this.isStopped) {\n handleStoppedNotification(COMPLETE_NOTIFICATION, this);\n } else {\n this.isStopped = true;\n this._complete();\n }\n }\n\n unsubscribe(): void {\n if (!this.closed) {\n this.isStopped = true;\n super.unsubscribe();\n this.destination = null!;\n }\n }\n\n protected _next(value: T): void {\n this.destination.next(value);\n }\n\n protected _error(err: any): void {\n try {\n this.destination.error(err);\n } finally {\n this.unsubscribe();\n }\n }\n\n protected _complete(): void {\n try {\n this.destination.complete();\n } finally {\n this.unsubscribe();\n }\n }\n}\n\n/**\n * This bind is captured here because we want to be able to have\n * compatibility with monoid libraries that tend to use a method named\n * `bind`. In particular, a library called Monio requires this.\n */\nconst _bind = Function.prototype.bind;\n\nfunction bind any>(fn: Fn, thisArg: any): Fn {\n return _bind.call(fn, thisArg);\n}\n\n/**\n * Internal optimization only, DO NOT EXPOSE.\n * @internal\n */\nclass ConsumerObserver implements Observer {\n constructor(private partialObserver: Partial>) {}\n\n next(value: T): void {\n const { partialObserver } = this;\n if (partialObserver.next) {\n try {\n partialObserver.next(value);\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n\n error(err: any): void {\n const { partialObserver } = this;\n if (partialObserver.error) {\n try {\n partialObserver.error(err);\n } catch (error) {\n handleUnhandledError(error);\n }\n } else {\n handleUnhandledError(err);\n }\n }\n\n complete(): void {\n const { partialObserver } = this;\n if (partialObserver.complete) {\n try {\n partialObserver.complete();\n } catch (error) {\n handleUnhandledError(error);\n }\n }\n }\n}\n\nexport class SafeSubscriber extends Subscriber {\n constructor(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((e?: any) => void) | null,\n complete?: (() => void) | null\n ) {\n super();\n\n let partialObserver: Partial>;\n if (isFunction(observerOrNext) || !observerOrNext) {\n // The first argument is a function, not an observer. The next\n // two arguments *could* be observers, or they could be empty.\n partialObserver = {\n next: (observerOrNext ?? undefined) as (((value: T) => void) | undefined),\n error: error ?? undefined,\n complete: complete ?? undefined,\n };\n } else {\n // The first argument is a partial observer.\n let context: any;\n if (this && config.useDeprecatedNextContext) {\n // This is a deprecated path that made `this.unsubscribe()` available in\n // next handler functions passed to subscribe. This only exists behind a flag\n // now, as it is *very* slow.\n context = Object.create(observerOrNext);\n context.unsubscribe = () => this.unsubscribe();\n partialObserver = {\n next: observerOrNext.next && bind(observerOrNext.next, context),\n error: observerOrNext.error && bind(observerOrNext.error, context),\n complete: observerOrNext.complete && bind(observerOrNext.complete, context),\n };\n } else {\n // The \"normal\" path. Just use the partial observer directly.\n partialObserver = observerOrNext;\n }\n }\n\n // Wrap the partial observer to ensure it's a full observer, and\n // make sure proper error handling is accounted for.\n this.destination = new ConsumerObserver(partialObserver);\n }\n}\n\nfunction handleUnhandledError(error: any) {\n if (config.useDeprecatedSynchronousErrorHandling) {\n captureError(error);\n } else {\n // Ideal path, we report this as an unhandled error,\n // which is thrown on a new call stack.\n reportUnhandledError(error);\n }\n}\n\n/**\n * An error handler used when no error handler was supplied\n * to the SafeSubscriber -- meaning no error handler was supplied\n * do the `subscribe` call on our observable.\n * @param err The error to handle\n */\nfunction defaultErrorHandler(err: any) {\n throw err;\n}\n\n/**\n * A handler for notifications that cannot be sent to a stopped subscriber.\n * @param notification The notification being sent\n * @param subscriber The stopped subscriber\n */\nfunction handleStoppedNotification(notification: ObservableNotification, subscriber: Subscriber) {\n const { onStoppedNotification } = config;\n onStoppedNotification && timeoutProvider.setTimeout(() => onStoppedNotification(notification, subscriber));\n}\n\n/**\n * The observer used as a stub for subscriptions where the user did not\n * pass any arguments to `subscribe`. Comes with the default error handling\n * behavior.\n */\nexport const EMPTY_OBSERVER: Readonly> & { closed: true } = {\n closed: true,\n next: noop,\n error: defaultErrorHandler,\n complete: noop,\n};\n", "/**\n * Symbol.observable or a string \"@@observable\". Used for interop\n *\n * @deprecated We will no longer be exporting this symbol in upcoming versions of RxJS.\n * Instead polyfill and use Symbol.observable directly *or* use https://www.npmjs.com/package/symbol-observable\n */\nexport const observable: string | symbol = (() => (typeof Symbol === 'function' && Symbol.observable) || '@@observable')();\n", "/**\n * This function takes one parameter and just returns it. Simply put,\n * this is like `(x: T): T => x`.\n *\n * ## Examples\n *\n * This is useful in some cases when using things like `mergeMap`\n *\n * ```ts\n * import { interval, take, map, range, mergeMap, identity } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(5));\n *\n * const result$ = source$.pipe(\n * map(i => range(i)),\n * mergeMap(identity) // same as mergeMap(x => x)\n * );\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * Or when you want to selectively apply an operator\n *\n * ```ts\n * import { interval, take, identity } from 'rxjs';\n *\n * const shouldLimit = () => Math.random() < 0.5;\n *\n * const source$ = interval(1000);\n *\n * const result$ = source$.pipe(shouldLimit() ? take(5) : identity);\n *\n * result$.subscribe({\n * next: console.log\n * });\n * ```\n *\n * @param x Any value that is returned by this function\n * @returns The value passed as the first parameter to this function\n */\nexport function identity(x: T): T {\n return x;\n}\n", "import { identity } from './identity';\nimport { UnaryFunction } from '../types';\n\nexport function pipe(): typeof identity;\nexport function pipe(fn1: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction): UnaryFunction;\nexport function pipe(fn1: UnaryFunction, fn2: UnaryFunction, fn3: UnaryFunction): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction\n): UnaryFunction;\nexport function pipe(\n fn1: UnaryFunction,\n fn2: UnaryFunction,\n fn3: UnaryFunction,\n fn4: UnaryFunction,\n fn5: UnaryFunction,\n fn6: UnaryFunction,\n fn7: UnaryFunction,\n fn8: UnaryFunction,\n fn9: UnaryFunction,\n ...fns: UnaryFunction[]\n): UnaryFunction;\n\n/**\n * pipe() can be called on one or more functions, each of which can take one argument (\"UnaryFunction\")\n * and uses it to return a value.\n * It returns a function that takes one argument, passes it to the first UnaryFunction, and then\n * passes the result to the next one, passes that result to the next one, and so on. \n */\nexport function pipe(...fns: Array>): UnaryFunction {\n return pipeFromArray(fns);\n}\n\n/** @internal */\nexport function pipeFromArray(fns: Array>): UnaryFunction {\n if (fns.length === 0) {\n return identity as UnaryFunction;\n }\n\n if (fns.length === 1) {\n return fns[0];\n }\n\n return function piped(input: T): R {\n return fns.reduce((prev: any, fn: UnaryFunction) => fn(prev), input as any);\n };\n}\n", "import { Operator } from './Operator';\nimport { SafeSubscriber, Subscriber } from './Subscriber';\nimport { isSubscription, Subscription } from './Subscription';\nimport { TeardownLogic, OperatorFunction, Subscribable, Observer } from './types';\nimport { observable as Symbol_observable } from './symbol/observable';\nimport { pipeFromArray } from './util/pipe';\nimport { config } from './config';\nimport { isFunction } from './util/isFunction';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A representation of any set of values over any amount of time. This is the most basic building block\n * of RxJS.\n *\n * @class Observable\n */\nexport class Observable implements Subscribable {\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n source: Observable | undefined;\n\n /**\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n */\n operator: Operator | undefined;\n\n /**\n * @constructor\n * @param {Function} subscribe the function that is called when the Observable is\n * initially subscribed to. This function is given a Subscriber, to which new values\n * can be `next`ed, or an `error` method can be called to raise an error, or\n * `complete` can be called to notify of a successful completion.\n */\n constructor(subscribe?: (this: Observable, subscriber: Subscriber) => TeardownLogic) {\n if (subscribe) {\n this._subscribe = subscribe;\n }\n }\n\n // HACK: Since TypeScript inherits static properties too, we have to\n // fight against TypeScript here so Subject can have a different static create signature\n /**\n * Creates a new Observable by calling the Observable constructor\n * @owner Observable\n * @method create\n * @param {Function} subscribe? the subscriber function to be passed to the Observable constructor\n * @return {Observable} a new observable\n * @nocollapse\n * @deprecated Use `new Observable()` instead. Will be removed in v8.\n */\n static create: (...args: any[]) => any = (subscribe?: (subscriber: Subscriber) => TeardownLogic) => {\n return new Observable(subscribe);\n };\n\n /**\n * Creates a new Observable, with this Observable instance as the source, and the passed\n * operator defined as the new observable's operator.\n * @method lift\n * @param operator the operator defining the operation to take on the observable\n * @return a new observable with the Operator applied\n * @deprecated Internal implementation detail, do not use directly. Will be made internal in v8.\n * If you have implemented an operator using `lift`, it is recommended that you create an\n * operator by simply returning `new Observable()` directly. See \"Creating new operators from\n * scratch\" section here: https://rxjs.dev/guide/operators\n */\n lift(operator?: Operator): Observable {\n const observable = new Observable();\n observable.source = this;\n observable.operator = operator;\n return observable;\n }\n\n subscribe(observerOrNext?: Partial> | ((value: T) => void)): Subscription;\n /** @deprecated Instead of passing separate callback arguments, use an observer argument. Signatures taking separate callback arguments will be removed in v8. Details: https://rxjs.dev/deprecations/subscribe-arguments */\n subscribe(next?: ((value: T) => void) | null, error?: ((error: any) => void) | null, complete?: (() => void) | null): Subscription;\n /**\n * Invokes an execution of an Observable and registers Observer handlers for notifications it will emit.\n *\n * Use it when you have all these Observables, but still nothing is happening.\n *\n * `subscribe` is not a regular operator, but a method that calls Observable's internal `subscribe` function. It\n * might be for example a function that you passed to Observable's constructor, but most of the time it is\n * a library implementation, which defines what will be emitted by an Observable, and when it be will emitted. This means\n * that calling `subscribe` is actually the moment when Observable starts its work, not when it is created, as it is often\n * the thought.\n *\n * Apart from starting the execution of an Observable, this method allows you to listen for values\n * that an Observable emits, as well as for when it completes or errors. You can achieve this in two\n * of the following ways.\n *\n * The first way is creating an object that implements {@link Observer} interface. It should have methods\n * defined by that interface, but note that it should be just a regular JavaScript object, which you can create\n * yourself in any way you want (ES6 class, classic function constructor, object literal etc.). In particular, do\n * not attempt to use any RxJS implementation details to create Observers - you don't need them. Remember also\n * that your object does not have to implement all methods. If you find yourself creating a method that doesn't\n * do anything, you can simply omit it. Note however, if the `error` method is not provided and an error happens,\n * it will be thrown asynchronously. Errors thrown asynchronously cannot be caught using `try`/`catch`. Instead,\n * use the {@link onUnhandledError} configuration option or use a runtime handler (like `window.onerror` or\n * `process.on('error)`) to be notified of unhandled errors. Because of this, it's recommended that you provide\n * an `error` method to avoid missing thrown errors.\n *\n * The second way is to give up on Observer object altogether and simply provide callback functions in place of its methods.\n * This means you can provide three functions as arguments to `subscribe`, where the first function is equivalent\n * of a `next` method, the second of an `error` method and the third of a `complete` method. Just as in case of an Observer,\n * if you do not need to listen for something, you can omit a function by passing `undefined` or `null`,\n * since `subscribe` recognizes these functions by where they were placed in function call. When it comes\n * to the `error` function, as with an Observer, if not provided, errors emitted by an Observable will be thrown asynchronously.\n *\n * You can, however, subscribe with no parameters at all. This may be the case where you're not interested in terminal events\n * and you also handled emissions internally by using operators (e.g. using `tap`).\n *\n * Whichever style of calling `subscribe` you use, in both cases it returns a Subscription object.\n * This object allows you to call `unsubscribe` on it, which in turn will stop the work that an Observable does and will clean\n * up all resources that an Observable used. Note that cancelling a subscription will not call `complete` callback\n * provided to `subscribe` function, which is reserved for a regular completion signal that comes from an Observable.\n *\n * Remember that callbacks provided to `subscribe` are not guaranteed to be called asynchronously.\n * It is an Observable itself that decides when these functions will be called. For example {@link of}\n * by default emits all its values synchronously. Always check documentation for how given Observable\n * will behave when subscribed and if its default behavior can be modified with a `scheduler`.\n *\n * #### Examples\n *\n * Subscribe with an {@link guide/observer Observer}\n *\n * ```ts\n * import { of } from 'rxjs';\n *\n * const sumObserver = {\n * sum: 0,\n * next(value) {\n * console.log('Adding: ' + value);\n * this.sum = this.sum + value;\n * },\n * error() {\n * // We actually could just remove this method,\n * // since we do not really care about errors right now.\n * },\n * complete() {\n * console.log('Sum equals: ' + this.sum);\n * }\n * };\n *\n * of(1, 2, 3) // Synchronously emits 1, 2, 3 and then completes.\n * .subscribe(sumObserver);\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Subscribe with functions ({@link deprecations/subscribe-arguments deprecated})\n *\n * ```ts\n * import { of } from 'rxjs'\n *\n * let sum = 0;\n *\n * of(1, 2, 3).subscribe(\n * value => {\n * console.log('Adding: ' + value);\n * sum = sum + value;\n * },\n * undefined,\n * () => console.log('Sum equals: ' + sum)\n * );\n *\n * // Logs:\n * // 'Adding: 1'\n * // 'Adding: 2'\n * // 'Adding: 3'\n * // 'Sum equals: 6'\n * ```\n *\n * Cancel a subscription\n *\n * ```ts\n * import { interval } from 'rxjs';\n *\n * const subscription = interval(1000).subscribe({\n * next(num) {\n * console.log(num)\n * },\n * complete() {\n * // Will not be called, even when cancelling subscription.\n * console.log('completed!');\n * }\n * });\n *\n * setTimeout(() => {\n * subscription.unsubscribe();\n * console.log('unsubscribed!');\n * }, 2500);\n *\n * // Logs:\n * // 0 after 1s\n * // 1 after 2s\n * // 'unsubscribed!' after 2.5s\n * ```\n *\n * @param {Observer|Function} observerOrNext (optional) Either an observer with methods to be called,\n * or the first of three possible handlers, which is the handler for each value emitted from the subscribed\n * Observable.\n * @param {Function} error (optional) A handler for a terminal event resulting from an error. If no error handler is provided,\n * the error will be thrown asynchronously as unhandled.\n * @param {Function} complete (optional) A handler for a terminal event resulting from successful completion.\n * @return {Subscription} a subscription reference to the registered handlers\n * @method subscribe\n */\n subscribe(\n observerOrNext?: Partial> | ((value: T) => void) | null,\n error?: ((error: any) => void) | null,\n complete?: (() => void) | null\n ): Subscription {\n const subscriber = isSubscriber(observerOrNext) ? observerOrNext : new SafeSubscriber(observerOrNext, error, complete);\n\n errorContext(() => {\n const { operator, source } = this;\n subscriber.add(\n operator\n ? // We're dealing with a subscription in the\n // operator chain to one of our lifted operators.\n operator.call(subscriber, source)\n : source\n ? // If `source` has a value, but `operator` does not, something that\n // had intimate knowledge of our API, like our `Subject`, must have\n // set it. We're going to just call `_subscribe` directly.\n this._subscribe(subscriber)\n : // In all other cases, we're likely wrapping a user-provided initializer\n // function, so we need to catch errors and handle them appropriately.\n this._trySubscribe(subscriber)\n );\n });\n\n return subscriber;\n }\n\n /** @internal */\n protected _trySubscribe(sink: Subscriber): TeardownLogic {\n try {\n return this._subscribe(sink);\n } catch (err) {\n // We don't need to return anything in this case,\n // because it's just going to try to `add()` to a subscription\n // above.\n sink.error(err);\n }\n }\n\n /**\n * Used as a NON-CANCELLABLE means of subscribing to an observable, for use with\n * APIs that expect promises, like `async/await`. You cannot unsubscribe from this.\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * #### Example\n *\n * ```ts\n * import { interval, take } from 'rxjs';\n *\n * const source$ = interval(1000).pipe(take(4));\n *\n * async function getTotal() {\n * let total = 0;\n *\n * await source$.forEach(value => {\n * total += value;\n * console.log('observable -> ' + value);\n * });\n *\n * return total;\n * }\n *\n * getTotal().then(\n * total => console.log('Total: ' + total)\n * );\n *\n * // Expected:\n * // 'observable -> 0'\n * // 'observable -> 1'\n * // 'observable -> 2'\n * // 'observable -> 3'\n * // 'Total: 6'\n * ```\n *\n * @param next a handler for each value emitted by the observable\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n */\n forEach(next: (value: T) => void): Promise;\n\n /**\n * @param next a handler for each value emitted by the observable\n * @param promiseCtor a constructor function used to instantiate the Promise\n * @return a promise that either resolves on observable completion or\n * rejects with the handled error\n * @deprecated Passing a Promise constructor will no longer be available\n * in upcoming versions of RxJS. This is because it adds weight to the library, for very\n * little benefit. If you need this functionality, it is recommended that you either\n * polyfill Promise, or you create an adapter to convert the returned native promise\n * to whatever promise implementation you wanted. Will be removed in v8.\n */\n forEach(next: (value: T) => void, promiseCtor: PromiseConstructorLike): Promise;\n\n forEach(next: (value: T) => void, promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n const subscriber = new SafeSubscriber({\n next: (value) => {\n try {\n next(value);\n } catch (err) {\n reject(err);\n subscriber.unsubscribe();\n }\n },\n error: reject,\n complete: resolve,\n });\n this.subscribe(subscriber);\n }) as Promise;\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): TeardownLogic {\n return this.source?.subscribe(subscriber);\n }\n\n /**\n * An interop point defined by the es7-observable spec https://github.com/zenparsing/es-observable\n * @method Symbol.observable\n * @return {Observable} this instance of the observable\n */\n [Symbol_observable]() {\n return this;\n }\n\n /* tslint:disable:max-line-length */\n pipe(): Observable;\n pipe(op1: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction): Observable;\n pipe(op1: OperatorFunction, op2: OperatorFunction, op3: OperatorFunction): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction\n ): Observable;\n pipe(\n op1: OperatorFunction,\n op2: OperatorFunction,\n op3: OperatorFunction,\n op4: OperatorFunction,\n op5: OperatorFunction,\n op6: OperatorFunction,\n op7: OperatorFunction,\n op8: OperatorFunction,\n op9: OperatorFunction,\n ...operations: OperatorFunction[]\n ): Observable;\n /* tslint:enable:max-line-length */\n\n /**\n * Used to stitch together functional operators into a chain.\n * @method pipe\n * @return {Observable} the Observable result of all of the operators having\n * been called in the order they were passed in.\n *\n * ## Example\n *\n * ```ts\n * import { interval, filter, map, scan } from 'rxjs';\n *\n * interval(1000)\n * .pipe(\n * filter(x => x % 2 === 0),\n * map(x => x + x),\n * scan((acc, x) => acc + x)\n * )\n * .subscribe(x => console.log(x));\n * ```\n */\n pipe(...operations: OperatorFunction[]): Observable {\n return pipeFromArray(operations)(this);\n }\n\n /* tslint:disable:max-line-length */\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: typeof Promise): Promise;\n /** @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise */\n toPromise(PromiseCtor: PromiseConstructorLike): Promise;\n /* tslint:enable:max-line-length */\n\n /**\n * Subscribe to this Observable and get a Promise resolving on\n * `complete` with the last emission (if any).\n *\n * **WARNING**: Only use this with observables you *know* will complete. If the source\n * observable does not complete, you will end up with a promise that is hung up, and\n * potentially all of the state of an async function hanging out in memory. To avoid\n * this situation, look into adding something like {@link timeout}, {@link take},\n * {@link takeWhile}, or {@link takeUntil} amongst others.\n *\n * @method toPromise\n * @param [promiseCtor] a constructor function used to instantiate\n * the Promise\n * @return A Promise that resolves with the last value emit, or\n * rejects on an error. If there were no emissions, Promise\n * resolves with undefined.\n * @deprecated Replaced with {@link firstValueFrom} and {@link lastValueFrom}. Will be removed in v8. Details: https://rxjs.dev/deprecations/to-promise\n */\n toPromise(promiseCtor?: PromiseConstructorLike): Promise {\n promiseCtor = getPromiseCtor(promiseCtor);\n\n return new promiseCtor((resolve, reject) => {\n let value: T | undefined;\n this.subscribe(\n (x: T) => (value = x),\n (err: any) => reject(err),\n () => resolve(value)\n );\n }) as Promise;\n }\n}\n\n/**\n * Decides between a passed promise constructor from consuming code,\n * A default configured promise constructor, and the native promise\n * constructor and returns it. If nothing can be found, it will throw\n * an error.\n * @param promiseCtor The optional promise constructor to passed by consuming code\n */\nfunction getPromiseCtor(promiseCtor: PromiseConstructorLike | undefined) {\n return promiseCtor ?? config.Promise ?? Promise;\n}\n\nfunction isObserver(value: any): value is Observer {\n return value && isFunction(value.next) && isFunction(value.error) && isFunction(value.complete);\n}\n\nfunction isSubscriber(value: any): value is Subscriber {\n return (value && value instanceof Subscriber) || (isObserver(value) && isSubscription(value));\n}\n", "import { Observable } from '../Observable';\nimport { Subscriber } from '../Subscriber';\nimport { OperatorFunction } from '../types';\nimport { isFunction } from './isFunction';\n\n/**\n * Used to determine if an object is an Observable with a lift function.\n */\nexport function hasLift(source: any): source is { lift: InstanceType['lift'] } {\n return isFunction(source?.lift);\n}\n\n/**\n * Creates an `OperatorFunction`. Used to define operators throughout the library in a concise way.\n * @param init The logic to connect the liftedSource to the subscriber at the moment of subscription.\n */\nexport function operate(\n init: (liftedSource: Observable, subscriber: Subscriber) => (() => void) | void\n): OperatorFunction {\n return (source: Observable) => {\n if (hasLift(source)) {\n return source.lift(function (this: Subscriber, liftedSource: Observable) {\n try {\n return init(liftedSource, this);\n } catch (err) {\n this.error(err);\n }\n });\n }\n throw new TypeError('Unable to lift unknown Observable type');\n };\n}\n", "import { Subscriber } from '../Subscriber';\n\n/**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional teardown logic here. This will only be called on teardown if the\n * subscriber itself is not already closed. This is called after all other teardown logic is executed.\n */\nexport function createOperatorSubscriber(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n onFinalize?: () => void\n): Subscriber {\n return new OperatorSubscriber(destination, onNext, onComplete, onError, onFinalize);\n}\n\n/**\n * A generic helper for allowing operators to be created with a Subscriber and\n * use closures to capture necessary state from the operator function itself.\n */\nexport class OperatorSubscriber extends Subscriber {\n /**\n * Creates an instance of an `OperatorSubscriber`.\n * @param destination The downstream subscriber.\n * @param onNext Handles next values, only called if this subscriber is not stopped or closed. Any\n * error that occurs in this function is caught and sent to the `error` method of this subscriber.\n * @param onError Handles errors from the subscription, any errors that occur in this handler are caught\n * and send to the `destination` error handler.\n * @param onComplete Handles completion notification from the subscription. Any errors that occur in\n * this handler are sent to the `destination` error handler.\n * @param onFinalize Additional finalization logic here. This will only be called on finalization if the\n * subscriber itself is not already closed. This is called after all other finalization logic is executed.\n * @param shouldUnsubscribe An optional check to see if an unsubscribe call should truly unsubscribe.\n * NOTE: This currently **ONLY** exists to support the strange behavior of {@link groupBy}, where unsubscription\n * to the resulting observable does not actually disconnect from the source if there are active subscriptions\n * to any grouped observable. (DO NOT EXPOSE OR USE EXTERNALLY!!!)\n */\n constructor(\n destination: Subscriber,\n onNext?: (value: T) => void,\n onComplete?: () => void,\n onError?: (err: any) => void,\n private onFinalize?: () => void,\n private shouldUnsubscribe?: () => boolean\n ) {\n // It's important - for performance reasons - that all of this class's\n // members are initialized and that they are always initialized in the same\n // order. This will ensure that all OperatorSubscriber instances have the\n // same hidden class in V8. This, in turn, will help keep the number of\n // hidden classes involved in property accesses within the base class as\n // low as possible. If the number of hidden classes involved exceeds four,\n // the property accesses will become megamorphic and performance penalties\n // will be incurred - i.e. inline caches won't be used.\n //\n // The reasons for ensuring all instances have the same hidden class are\n // further discussed in this blog post from Benedikt Meurer:\n // https://benediktmeurer.de/2018/03/23/impact-of-polymorphism-on-component-based-frameworks-like-react/\n super(destination);\n this._next = onNext\n ? function (this: OperatorSubscriber, value: T) {\n try {\n onNext(value);\n } catch (err) {\n destination.error(err);\n }\n }\n : super._next;\n this._error = onError\n ? function (this: OperatorSubscriber, err: any) {\n try {\n onError(err);\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._error;\n this._complete = onComplete\n ? function (this: OperatorSubscriber) {\n try {\n onComplete();\n } catch (err) {\n // Send any errors that occur down stream.\n destination.error(err);\n } finally {\n // Ensure finalization.\n this.unsubscribe();\n }\n }\n : super._complete;\n }\n\n unsubscribe() {\n if (!this.shouldUnsubscribe || this.shouldUnsubscribe()) {\n const { closed } = this;\n super.unsubscribe();\n // Execute additional teardown if we have any and we didn't already do so.\n !closed && this.onFinalize?.();\n }\n }\n}\n", "import { Subscription } from '../Subscription';\n\ninterface AnimationFrameProvider {\n schedule(callback: FrameRequestCallback): Subscription;\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n delegate:\n | {\n requestAnimationFrame: typeof requestAnimationFrame;\n cancelAnimationFrame: typeof cancelAnimationFrame;\n }\n | undefined;\n}\n\nexport const animationFrameProvider: AnimationFrameProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n schedule(callback) {\n let request = requestAnimationFrame;\n let cancel: typeof cancelAnimationFrame | undefined = cancelAnimationFrame;\n const { delegate } = animationFrameProvider;\n if (delegate) {\n request = delegate.requestAnimationFrame;\n cancel = delegate.cancelAnimationFrame;\n }\n const handle = request((timestamp) => {\n // Clear the cancel function. The request has been fulfilled, so\n // attempting to cancel the request upon unsubscription would be\n // pointless.\n cancel = undefined;\n callback(timestamp);\n });\n return new Subscription(() => cancel?.(handle));\n },\n requestAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.requestAnimationFrame || requestAnimationFrame)(...args);\n },\n cancelAnimationFrame(...args) {\n const { delegate } = animationFrameProvider;\n return (delegate?.cancelAnimationFrame || cancelAnimationFrame)(...args);\n },\n delegate: undefined,\n};\n", "import { createErrorClass } from './createErrorClass';\n\nexport interface ObjectUnsubscribedError extends Error {}\n\nexport interface ObjectUnsubscribedErrorCtor {\n /**\n * @deprecated Internal implementation detail. Do not construct error instances.\n * Cannot be tagged as internal: https://github.com/ReactiveX/rxjs/issues/6269\n */\n new (): ObjectUnsubscribedError;\n}\n\n/**\n * An error thrown when an action is invalid because the object has been\n * unsubscribed.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n *\n * @class ObjectUnsubscribedError\n */\nexport const ObjectUnsubscribedError: ObjectUnsubscribedErrorCtor = createErrorClass(\n (_super) =>\n function ObjectUnsubscribedErrorImpl(this: any) {\n _super(this);\n this.name = 'ObjectUnsubscribedError';\n this.message = 'object unsubscribed';\n }\n);\n", "import { Operator } from './Operator';\nimport { Observable } from './Observable';\nimport { Subscriber } from './Subscriber';\nimport { Subscription, EMPTY_SUBSCRIPTION } from './Subscription';\nimport { Observer, SubscriptionLike, TeardownLogic } from './types';\nimport { ObjectUnsubscribedError } from './util/ObjectUnsubscribedError';\nimport { arrRemove } from './util/arrRemove';\nimport { errorContext } from './util/errorContext';\n\n/**\n * A Subject is a special type of Observable that allows values to be\n * multicasted to many Observers. Subjects are like EventEmitters.\n *\n * Every Subject is an Observable and an Observer. You can subscribe to a\n * Subject, and you can call next to feed values as well as error and complete.\n */\nexport class Subject extends Observable implements SubscriptionLike {\n closed = false;\n\n private currentObservers: Observer[] | null = null;\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n observers: Observer[] = [];\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n isStopped = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n hasError = false;\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n thrownError: any = null;\n\n /**\n * Creates a \"subject\" by basically gluing an observer to an observable.\n *\n * @nocollapse\n * @deprecated Recommended you do not use. Will be removed at some point in the future. Plans for replacement still under discussion.\n */\n static create: (...args: any[]) => any = (destination: Observer, source: Observable): AnonymousSubject => {\n return new AnonymousSubject(destination, source);\n };\n\n constructor() {\n // NOTE: This must be here to obscure Observable's constructor.\n super();\n }\n\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n lift(operator: Operator): Observable {\n const subject = new AnonymousSubject(this, this);\n subject.operator = operator as any;\n return subject as any;\n }\n\n /** @internal */\n protected _throwIfClosed() {\n if (this.closed) {\n throw new ObjectUnsubscribedError();\n }\n }\n\n next(value: T) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n if (!this.currentObservers) {\n this.currentObservers = Array.from(this.observers);\n }\n for (const observer of this.currentObservers) {\n observer.next(value);\n }\n }\n });\n }\n\n error(err: any) {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.hasError = this.isStopped = true;\n this.thrownError = err;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.error(err);\n }\n }\n });\n }\n\n complete() {\n errorContext(() => {\n this._throwIfClosed();\n if (!this.isStopped) {\n this.isStopped = true;\n const { observers } = this;\n while (observers.length) {\n observers.shift()!.complete();\n }\n }\n });\n }\n\n unsubscribe() {\n this.isStopped = this.closed = true;\n this.observers = this.currentObservers = null!;\n }\n\n get observed() {\n return this.observers?.length > 0;\n }\n\n /** @internal */\n protected _trySubscribe(subscriber: Subscriber): TeardownLogic {\n this._throwIfClosed();\n return super._trySubscribe(subscriber);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._checkFinalizedStatuses(subscriber);\n return this._innerSubscribe(subscriber);\n }\n\n /** @internal */\n protected _innerSubscribe(subscriber: Subscriber) {\n const { hasError, isStopped, observers } = this;\n if (hasError || isStopped) {\n return EMPTY_SUBSCRIPTION;\n }\n this.currentObservers = null;\n observers.push(subscriber);\n return new Subscription(() => {\n this.currentObservers = null;\n arrRemove(observers, subscriber);\n });\n }\n\n /** @internal */\n protected _checkFinalizedStatuses(subscriber: Subscriber) {\n const { hasError, thrownError, isStopped } = this;\n if (hasError) {\n subscriber.error(thrownError);\n } else if (isStopped) {\n subscriber.complete();\n }\n }\n\n /**\n * Creates a new Observable with this Subject as the source. You can do this\n * to create custom Observer-side logic of the Subject and conceal it from\n * code that uses the Observable.\n * @return {Observable} Observable that the Subject casts to\n */\n asObservable(): Observable {\n const observable: any = new Observable();\n observable.source = this;\n return observable;\n }\n}\n\n/**\n * @class AnonymousSubject\n */\nexport class AnonymousSubject extends Subject {\n constructor(\n /** @deprecated Internal implementation detail, do not use directly. Will be made internal in v8. */\n public destination?: Observer,\n source?: Observable\n ) {\n super();\n this.source = source;\n }\n\n next(value: T) {\n this.destination?.next?.(value);\n }\n\n error(err: any) {\n this.destination?.error?.(err);\n }\n\n complete() {\n this.destination?.complete?.();\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n return this.source?.subscribe(subscriber) ?? EMPTY_SUBSCRIPTION;\n }\n}\n", "import { Subject } from './Subject';\nimport { Subscriber } from './Subscriber';\nimport { Subscription } from './Subscription';\n\n/**\n * A variant of Subject that requires an initial value and emits its current\n * value whenever it is subscribed to.\n *\n * @class BehaviorSubject\n */\nexport class BehaviorSubject extends Subject {\n constructor(private _value: T) {\n super();\n }\n\n get value(): T {\n return this.getValue();\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n const subscription = super._subscribe(subscriber);\n !subscription.closed && subscriber.next(this._value);\n return subscription;\n }\n\n getValue(): T {\n const { hasError, thrownError, _value } = this;\n if (hasError) {\n throw thrownError;\n }\n this._throwIfClosed();\n return _value;\n }\n\n next(value: T): void {\n super.next((this._value = value));\n }\n}\n", "import { TimestampProvider } from '../types';\n\ninterface DateTimestampProvider extends TimestampProvider {\n delegate: TimestampProvider | undefined;\n}\n\nexport const dateTimestampProvider: DateTimestampProvider = {\n now() {\n // Use the variable rather than `this` so that the function can be called\n // without being bound to the provider.\n return (dateTimestampProvider.delegate || Date).now();\n },\n delegate: undefined,\n};\n", "import { Subject } from './Subject';\nimport { TimestampProvider } from './types';\nimport { Subscriber } from './Subscriber';\nimport { Subscription } from './Subscription';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * A variant of {@link Subject} that \"replays\" old values to new subscribers by emitting them when they first subscribe.\n *\n * `ReplaySubject` has an internal buffer that will store a specified number of values that it has observed. Like `Subject`,\n * `ReplaySubject` \"observes\" values by having them passed to its `next` method. When it observes a value, it will store that\n * value for a time determined by the configuration of the `ReplaySubject`, as passed to its constructor.\n *\n * When a new subscriber subscribes to the `ReplaySubject` instance, it will synchronously emit all values in its buffer in\n * a First-In-First-Out (FIFO) manner. The `ReplaySubject` will also complete, if it has observed completion; and it will\n * error if it has observed an error.\n *\n * There are two main configuration items to be concerned with:\n *\n * 1. `bufferSize` - This will determine how many items are stored in the buffer, defaults to infinite.\n * 2. `windowTime` - The amount of time to hold a value in the buffer before removing it from the buffer.\n *\n * Both configurations may exist simultaneously. So if you would like to buffer a maximum of 3 values, as long as the values\n * are less than 2 seconds old, you could do so with a `new ReplaySubject(3, 2000)`.\n *\n * ### Differences with BehaviorSubject\n *\n * `BehaviorSubject` is similar to `new ReplaySubject(1)`, with a couple of exceptions:\n *\n * 1. `BehaviorSubject` comes \"primed\" with a single value upon construction.\n * 2. `ReplaySubject` will replay values, even after observing an error, where `BehaviorSubject` will not.\n *\n * @see {@link Subject}\n * @see {@link BehaviorSubject}\n * @see {@link shareReplay}\n */\nexport class ReplaySubject extends Subject {\n private _buffer: (T | number)[] = [];\n private _infiniteTimeWindow = true;\n\n /**\n * @param bufferSize The size of the buffer to replay on subscription\n * @param windowTime The amount of time the buffered items will stay buffered\n * @param timestampProvider An object with a `now()` method that provides the current timestamp. This is used to\n * calculate the amount of time something has been buffered.\n */\n constructor(\n private _bufferSize = Infinity,\n private _windowTime = Infinity,\n private _timestampProvider: TimestampProvider = dateTimestampProvider\n ) {\n super();\n this._infiniteTimeWindow = _windowTime === Infinity;\n this._bufferSize = Math.max(1, _bufferSize);\n this._windowTime = Math.max(1, _windowTime);\n }\n\n next(value: T): void {\n const { isStopped, _buffer, _infiniteTimeWindow, _timestampProvider, _windowTime } = this;\n if (!isStopped) {\n _buffer.push(value);\n !_infiniteTimeWindow && _buffer.push(_timestampProvider.now() + _windowTime);\n }\n this._trimBuffer();\n super.next(value);\n }\n\n /** @internal */\n protected _subscribe(subscriber: Subscriber): Subscription {\n this._throwIfClosed();\n this._trimBuffer();\n\n const subscription = this._innerSubscribe(subscriber);\n\n const { _infiniteTimeWindow, _buffer } = this;\n // We use a copy here, so reentrant code does not mutate our array while we're\n // emitting it to a new subscriber.\n const copy = _buffer.slice();\n for (let i = 0; i < copy.length && !subscriber.closed; i += _infiniteTimeWindow ? 1 : 2) {\n subscriber.next(copy[i] as T);\n }\n\n this._checkFinalizedStatuses(subscriber);\n\n return subscription;\n }\n\n private _trimBuffer() {\n const { _bufferSize, _timestampProvider, _buffer, _infiniteTimeWindow } = this;\n // If we don't have an infinite buffer size, and we're over the length,\n // use splice to truncate the old buffer values off. Note that we have to\n // double the size for instances where we're not using an infinite time window\n // because we're storing the values and the timestamps in the same array.\n const adjustedBufferSize = (_infiniteTimeWindow ? 1 : 2) * _bufferSize;\n _bufferSize < Infinity && adjustedBufferSize < _buffer.length && _buffer.splice(0, _buffer.length - adjustedBufferSize);\n\n // Now, if we're not in an infinite time window, remove all values where the time is\n // older than what is allowed.\n if (!_infiniteTimeWindow) {\n const now = _timestampProvider.now();\n let last = 0;\n // Search the array for the first timestamp that isn't expired and\n // truncate the buffer up to that point.\n for (let i = 1; i < _buffer.length && (_buffer[i] as number) <= now; i += 2) {\n last = i;\n }\n last && _buffer.splice(0, last + 1);\n }\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Subscription } from '../Subscription';\nimport { SchedulerAction } from '../types';\n\n/**\n * A unit of work to be executed in a `scheduler`. An action is typically\n * created from within a {@link SchedulerLike} and an RxJS user does not need to concern\n * themselves about creating and manipulating an Action.\n *\n * ```ts\n * class Action extends Subscription {\n * new (scheduler: Scheduler, work: (state?: T) => void);\n * schedule(state?: T, delay: number = 0): Subscription;\n * }\n * ```\n *\n * @class Action\n */\nexport class Action extends Subscription {\n constructor(scheduler: Scheduler, work: (this: SchedulerAction, state?: T) => void) {\n super();\n }\n /**\n * Schedules this action on its parent {@link SchedulerLike} for execution. May be passed\n * some context object, `state`. May happen at some point in the future,\n * according to the `delay` parameter, if specified.\n * @param {T} [state] Some contextual data that the `work` function uses when\n * called by the Scheduler.\n * @param {number} [delay] Time to wait before executing the work, where the\n * time unit is implicit and defined by the Scheduler.\n * @return {void}\n */\n public schedule(state?: T, delay: number = 0): Subscription {\n return this;\n }\n}\n", "import type { TimerHandle } from './timerHandle';\ntype SetIntervalFunction = (handler: () => void, timeout?: number, ...args: any[]) => TimerHandle;\ntype ClearIntervalFunction = (handle: TimerHandle) => void;\n\ninterface IntervalProvider {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n delegate:\n | {\n setInterval: SetIntervalFunction;\n clearInterval: ClearIntervalFunction;\n }\n | undefined;\n}\n\nexport const intervalProvider: IntervalProvider = {\n // When accessing the delegate, use the variable rather than `this` so that\n // the functions can be called without being bound to the provider.\n setInterval(handler: () => void, timeout?: number, ...args) {\n const { delegate } = intervalProvider;\n if (delegate?.setInterval) {\n return delegate.setInterval(handler, timeout, ...args);\n }\n return setInterval(handler, timeout, ...args);\n },\n clearInterval(handle) {\n const { delegate } = intervalProvider;\n return (delegate?.clearInterval || clearInterval)(handle as any);\n },\n delegate: undefined,\n};\n", "import { Action } from './Action';\nimport { SchedulerAction } from '../types';\nimport { Subscription } from '../Subscription';\nimport { AsyncScheduler } from './AsyncScheduler';\nimport { intervalProvider } from './intervalProvider';\nimport { arrRemove } from '../util/arrRemove';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncAction extends Action {\n public id: TimerHandle | undefined;\n public state?: T;\n // @ts-ignore: Property has no initializer and is not definitely assigned\n public delay: number;\n protected pending: boolean = false;\n\n constructor(protected scheduler: AsyncScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n public schedule(state?: T, delay: number = 0): Subscription {\n if (this.closed) {\n return this;\n }\n\n // Always replace the current state with the new state.\n this.state = state;\n\n const id = this.id;\n const scheduler = this.scheduler;\n\n //\n // Important implementation note:\n //\n // Actions only execute once by default, unless rescheduled from within the\n // scheduled callback. This allows us to implement single and repeat\n // actions via the same code path, without adding API surface area, as well\n // as mimic traditional recursion but across asynchronous boundaries.\n //\n // However, JS runtimes and timers distinguish between intervals achieved by\n // serial `setTimeout` calls vs. a single `setInterval` call. An interval of\n // serial `setTimeout` calls can be individually delayed, which delays\n // scheduling the next `setTimeout`, and so on. `setInterval` attempts to\n // guarantee the interval callback will be invoked more precisely to the\n // interval period, regardless of load.\n //\n // Therefore, we use `setInterval` to schedule single and repeat actions.\n // If the action reschedules itself with the same delay, the interval is not\n // canceled. If the action doesn't reschedule, or reschedules with a\n // different delay, the interval will be canceled after scheduled callback\n // execution.\n //\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, delay);\n }\n\n // Set the pending flag indicating that this action has been scheduled, or\n // has recursively rescheduled itself.\n this.pending = true;\n\n this.delay = delay;\n // If this action has already an async Id, don't request a new one.\n this.id = this.id ?? this.requestAsyncId(scheduler, this.id, delay);\n\n return this;\n }\n\n protected requestAsyncId(scheduler: AsyncScheduler, _id?: TimerHandle, delay: number = 0): TimerHandle {\n return intervalProvider.setInterval(scheduler.flush.bind(scheduler, this), delay);\n }\n\n protected recycleAsyncId(_scheduler: AsyncScheduler, id?: TimerHandle, delay: number | null = 0): TimerHandle | undefined {\n // If this action is rescheduled with the same delay time, don't clear the interval id.\n if (delay != null && this.delay === delay && this.pending === false) {\n return id;\n }\n // Otherwise, if the action's delay time is different from the current delay,\n // or the action has been rescheduled before it's executed, clear the interval id\n if (id != null) {\n intervalProvider.clearInterval(id);\n }\n\n return undefined;\n }\n\n /**\n * Immediately executes this action and the `work` it contains.\n * @return {any}\n */\n public execute(state: T, delay: number): any {\n if (this.closed) {\n return new Error('executing a cancelled action');\n }\n\n this.pending = false;\n const error = this._execute(state, delay);\n if (error) {\n return error;\n } else if (this.pending === false && this.id != null) {\n // Dequeue if the action didn't reschedule itself. Don't call\n // unsubscribe(), because the action could reschedule later.\n // For example:\n // ```\n // scheduler.schedule(function doWork(counter) {\n // /* ... I'm a busy worker bee ... */\n // var originalAction = this;\n // /* wait 100ms before rescheduling the action */\n // setTimeout(function () {\n // originalAction.schedule(counter + 1);\n // }, 100);\n // }, 1000);\n // ```\n this.id = this.recycleAsyncId(this.scheduler, this.id, null);\n }\n }\n\n protected _execute(state: T, _delay: number): any {\n let errored: boolean = false;\n let errorValue: any;\n try {\n this.work(state);\n } catch (e) {\n errored = true;\n // HACK: Since code elsewhere is relying on the \"truthiness\" of the\n // return here, we can't have it return \"\" or 0 or false.\n // TODO: Clean this up when we refactor schedulers mid-version-8 or so.\n errorValue = e ? e : new Error('Scheduled action threw falsy error');\n }\n if (errored) {\n this.unsubscribe();\n return errorValue;\n }\n }\n\n unsubscribe() {\n if (!this.closed) {\n const { id, scheduler } = this;\n const { actions } = scheduler;\n\n this.work = this.state = this.scheduler = null!;\n this.pending = false;\n\n arrRemove(actions, this);\n if (id != null) {\n this.id = this.recycleAsyncId(scheduler, id, null);\n }\n\n this.delay = null!;\n super.unsubscribe();\n }\n }\n}\n", "import { Action } from './scheduler/Action';\nimport { Subscription } from './Subscription';\nimport { SchedulerLike, SchedulerAction } from './types';\nimport { dateTimestampProvider } from './scheduler/dateTimestampProvider';\n\n/**\n * An execution context and a data structure to order tasks and schedule their\n * execution. Provides a notion of (potentially virtual) time, through the\n * `now()` getter method.\n *\n * Each unit of work in a Scheduler is called an `Action`.\n *\n * ```ts\n * class Scheduler {\n * now(): number;\n * schedule(work, delay?, state?): Subscription;\n * }\n * ```\n *\n * @class Scheduler\n * @deprecated Scheduler is an internal implementation detail of RxJS, and\n * should not be used directly. Rather, create your own class and implement\n * {@link SchedulerLike}. Will be made internal in v8.\n */\nexport class Scheduler implements SchedulerLike {\n public static now: () => number = dateTimestampProvider.now;\n\n constructor(private schedulerActionCtor: typeof Action, now: () => number = Scheduler.now) {\n this.now = now;\n }\n\n /**\n * A getter method that returns a number representing the current time\n * (at the time this function was called) according to the scheduler's own\n * internal clock.\n * @return {number} A number that represents the current time. May or may not\n * have a relation to wall-clock time. May or may not refer to a time unit\n * (e.g. milliseconds).\n */\n public now: () => number;\n\n /**\n * Schedules a function, `work`, for execution. May happen at some point in\n * the future, according to the `delay` parameter, if specified. May be passed\n * some context object, `state`, which will be passed to the `work` function.\n *\n * The given arguments will be processed an stored as an Action object in a\n * queue of actions.\n *\n * @param {function(state: ?T): ?Subscription} work A function representing a\n * task, or some unit of work to be executed by the Scheduler.\n * @param {number} [delay] Time to wait before executing the work, where the\n * time unit is implicit and defined by the Scheduler itself.\n * @param {T} [state] Some contextual data that the `work` function uses when\n * called by the Scheduler.\n * @return {Subscription} A subscription in order to be able to unsubscribe\n * the scheduled work.\n */\n public schedule(work: (this: SchedulerAction, state?: T) => void, delay: number = 0, state?: T): Subscription {\n return new this.schedulerActionCtor(this, work).schedule(state, delay);\n }\n}\n", "import { Scheduler } from '../Scheduler';\nimport { Action } from './Action';\nimport { AsyncAction } from './AsyncAction';\nimport { TimerHandle } from './timerHandle';\n\nexport class AsyncScheduler extends Scheduler {\n public actions: Array> = [];\n /**\n * A flag to indicate whether the Scheduler is currently executing a batch of\n * queued actions.\n * @type {boolean}\n * @internal\n */\n public _active: boolean = false;\n /**\n * An internal ID used to track the latest asynchronous task such as those\n * coming from `setTimeout`, `setInterval`, `requestAnimationFrame`, and\n * others.\n * @type {any}\n * @internal\n */\n public _scheduled: TimerHandle | undefined;\n\n constructor(SchedulerAction: typeof Action, now: () => number = Scheduler.now) {\n super(SchedulerAction, now);\n }\n\n public flush(action: AsyncAction): void {\n const { actions } = this;\n\n if (this._active) {\n actions.push(action);\n return;\n }\n\n let error: any;\n this._active = true;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions.shift()!)); // exhaust the scheduler queue\n\n this._active = false;\n\n if (error) {\n while ((action = actions.shift()!)) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\n/**\n *\n * Async Scheduler\n *\n * Schedule task as if you used setTimeout(task, duration)\n *\n * `async` scheduler schedules tasks asynchronously, by putting them on the JavaScript\n * event loop queue. It is best used to delay tasks in time or to schedule tasks repeating\n * in intervals.\n *\n * If you just want to \"defer\" task, that is to perform it right after currently\n * executing synchronous code ends (commonly achieved by `setTimeout(deferredTask, 0)`),\n * better choice will be the {@link asapScheduler} scheduler.\n *\n * ## Examples\n * Use async scheduler to delay task\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * const task = () => console.log('it works!');\n *\n * asyncScheduler.schedule(task, 2000);\n *\n * // After 2 seconds logs:\n * // \"it works!\"\n * ```\n *\n * Use async scheduler to repeat task in intervals\n * ```ts\n * import { asyncScheduler } from 'rxjs';\n *\n * function task(state) {\n * console.log(state);\n * this.schedule(state + 1, 1000); // `this` references currently executing Action,\n * // which we reschedule with new state and delay\n * }\n *\n * asyncScheduler.schedule(task, 3000, 0);\n *\n * // Logs:\n * // 0 after 3s\n * // 1 after 4s\n * // 2 after 5s\n * // 3 after 6s\n * ```\n */\n\nexport const asyncScheduler = new AsyncScheduler(AsyncAction);\n\n/**\n * @deprecated Renamed to {@link asyncScheduler}. Will be removed in v8.\n */\nexport const async = asyncScheduler;\n", "import { AsyncAction } from './AsyncAction';\nimport { Subscription } from '../Subscription';\nimport { QueueScheduler } from './QueueScheduler';\nimport { SchedulerAction } from '../types';\nimport { TimerHandle } from './timerHandle';\n\nexport class QueueAction extends AsyncAction {\n constructor(protected scheduler: QueueScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n public schedule(state?: T, delay: number = 0): Subscription {\n if (delay > 0) {\n return super.schedule(state, delay);\n }\n this.delay = delay;\n this.state = state;\n this.scheduler.flush(this);\n return this;\n }\n\n public execute(state: T, delay: number): any {\n return delay > 0 || this.closed ? super.execute(state, delay) : this._execute(state, delay);\n }\n\n protected requestAsyncId(scheduler: QueueScheduler, id?: TimerHandle, delay: number = 0): TimerHandle {\n // If delay exists and is greater than 0, or if the delay is null (the\n // action wasn't rescheduled) but was originally scheduled as an async\n // action, then recycle as an async action.\n\n if ((delay != null && delay > 0) || (delay == null && this.delay > 0)) {\n return super.requestAsyncId(scheduler, id, delay);\n }\n\n // Otherwise flush the scheduler starting with this action.\n scheduler.flush(this);\n\n // HACK: In the past, this was returning `void`. However, `void` isn't a valid\n // `TimerHandle`, and generally the return value here isn't really used. So the\n // compromise is to return `0` which is both \"falsy\" and a valid `TimerHandle`,\n // as opposed to refactoring every other instanceo of `requestAsyncId`.\n return 0;\n }\n}\n", "import { AsyncScheduler } from './AsyncScheduler';\n\nexport class QueueScheduler extends AsyncScheduler {\n}\n", "import { QueueAction } from './QueueAction';\nimport { QueueScheduler } from './QueueScheduler';\n\n/**\n *\n * Queue Scheduler\n *\n * Put every next task on a queue, instead of executing it immediately\n *\n * `queue` scheduler, when used with delay, behaves the same as {@link asyncScheduler} scheduler.\n *\n * When used without delay, it schedules given task synchronously - executes it right when\n * it is scheduled. However when called recursively, that is when inside the scheduled task,\n * another task is scheduled with queue scheduler, instead of executing immediately as well,\n * that task will be put on a queue and wait for current one to finish.\n *\n * This means that when you execute task with `queue` scheduler, you are sure it will end\n * before any other task scheduled with that scheduler will start.\n *\n * ## Examples\n * Schedule recursively first, then do something\n * ```ts\n * import { queueScheduler } from 'rxjs';\n *\n * queueScheduler.schedule(() => {\n * queueScheduler.schedule(() => console.log('second')); // will not happen now, but will be put on a queue\n *\n * console.log('first');\n * });\n *\n * // Logs:\n * // \"first\"\n * // \"second\"\n * ```\n *\n * Reschedule itself recursively\n * ```ts\n * import { queueScheduler } from 'rxjs';\n *\n * queueScheduler.schedule(function(state) {\n * if (state !== 0) {\n * console.log('before', state);\n * this.schedule(state - 1); // `this` references currently executing Action,\n * // which we reschedule with new state\n * console.log('after', state);\n * }\n * }, 0, 3);\n *\n * // In scheduler that runs recursively, you would expect:\n * // \"before\", 3\n * // \"before\", 2\n * // \"before\", 1\n * // \"after\", 1\n * // \"after\", 2\n * // \"after\", 3\n *\n * // But with queue it logs:\n * // \"before\", 3\n * // \"after\", 3\n * // \"before\", 2\n * // \"after\", 2\n * // \"before\", 1\n * // \"after\", 1\n * ```\n */\n\nexport const queueScheduler = new QueueScheduler(QueueAction);\n\n/**\n * @deprecated Renamed to {@link queueScheduler}. Will be removed in v8.\n */\nexport const queue = queueScheduler;\n", "import { AsyncAction } from './AsyncAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\nimport { SchedulerAction } from '../types';\nimport { animationFrameProvider } from './animationFrameProvider';\nimport { TimerHandle } from './timerHandle';\n\nexport class AnimationFrameAction extends AsyncAction {\n constructor(protected scheduler: AnimationFrameScheduler, protected work: (this: SchedulerAction, state?: T) => void) {\n super(scheduler, work);\n }\n\n protected requestAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle {\n // If delay is greater than 0, request as an async action.\n if (delay !== null && delay > 0) {\n return super.requestAsyncId(scheduler, id, delay);\n }\n // Push the action to the end of the scheduler queue.\n scheduler.actions.push(this);\n // If an animation frame has already been requested, don't request another\n // one. If an animation frame hasn't been requested yet, request one. Return\n // the current animation frame request id.\n return scheduler._scheduled || (scheduler._scheduled = animationFrameProvider.requestAnimationFrame(() => scheduler.flush(undefined)));\n }\n\n protected recycleAsyncId(scheduler: AnimationFrameScheduler, id?: TimerHandle, delay: number = 0): TimerHandle | undefined {\n // If delay exists and is greater than 0, or if the delay is null (the\n // action wasn't rescheduled) but was originally scheduled as an async\n // action, then recycle as an async action.\n if (delay != null ? delay > 0 : this.delay > 0) {\n return super.recycleAsyncId(scheduler, id, delay);\n }\n // If the scheduler queue has no remaining actions with the same async id,\n // cancel the requested animation frame and set the scheduled flag to\n // undefined so the next AnimationFrameAction will request its own.\n const { actions } = scheduler;\n if (id != null && actions[actions.length - 1]?.id !== id) {\n animationFrameProvider.cancelAnimationFrame(id as number);\n scheduler._scheduled = undefined;\n }\n // Return undefined so the action knows to request a new async id if it's rescheduled.\n return undefined;\n }\n}\n", "import { AsyncAction } from './AsyncAction';\nimport { AsyncScheduler } from './AsyncScheduler';\n\nexport class AnimationFrameScheduler extends AsyncScheduler {\n public flush(action?: AsyncAction): void {\n this._active = true;\n // The async id that effects a call to flush is stored in _scheduled.\n // Before executing an action, it's necessary to check the action's async\n // id to determine whether it's supposed to be executed in the current\n // flush.\n // Previous implementations of this method used a count to determine this,\n // but that was unsound, as actions that are unsubscribed - i.e. cancelled -\n // are removed from the actions array and that can shift actions that are\n // scheduled to be executed in a subsequent flush into positions at which\n // they are executed within the current flush.\n const flushId = this._scheduled;\n this._scheduled = undefined;\n\n const { actions } = this;\n let error: any;\n action = action || actions.shift()!;\n\n do {\n if ((error = action.execute(action.state, action.delay))) {\n break;\n }\n } while ((action = actions[0]) && action.id === flushId && actions.shift());\n\n this._active = false;\n\n if (error) {\n while ((action = actions[0]) && action.id === flushId && actions.shift()) {\n action.unsubscribe();\n }\n throw error;\n }\n }\n}\n", "import { AnimationFrameAction } from './AnimationFrameAction';\nimport { AnimationFrameScheduler } from './AnimationFrameScheduler';\n\n/**\n *\n * Animation Frame Scheduler\n *\n * Perform task when `window.requestAnimationFrame` would fire\n *\n * When `animationFrame` scheduler is used with delay, it will fall back to {@link asyncScheduler} scheduler\n * behaviour.\n *\n * Without delay, `animationFrame` scheduler can be used to create smooth browser animations.\n * It makes sure scheduled task will happen just before next browser content repaint,\n * thus performing animations as efficiently as possible.\n *\n * ## Example\n * Schedule div height animation\n * ```ts\n * // html:
\n * import { animationFrameScheduler } from 'rxjs';\n *\n * const div = document.querySelector('div');\n *\n * animationFrameScheduler.schedule(function(height) {\n * div.style.height = height + \"px\";\n *\n * this.schedule(height + 1); // `this` references currently executing Action,\n * // which we reschedule with new state\n * }, 0, 0);\n *\n * // You will see a div element growing in height\n * ```\n */\n\nexport const animationFrameScheduler = new AnimationFrameScheduler(AnimationFrameAction);\n\n/**\n * @deprecated Renamed to {@link animationFrameScheduler}. Will be removed in v8.\n */\nexport const animationFrame = animationFrameScheduler;\n", "import { Observable } from '../Observable';\nimport { SchedulerLike } from '../types';\n\n/**\n * A simple Observable that emits no items to the Observer and immediately\n * emits a complete notification.\n *\n * Just emits 'complete', and nothing else.\n *\n * ![](empty.png)\n *\n * A simple Observable that only emits the complete notification. It can be used\n * for composing with other Observables, such as in a {@link mergeMap}.\n *\n * ## Examples\n *\n * Log complete notification\n *\n * ```ts\n * import { EMPTY } from 'rxjs';\n *\n * EMPTY.subscribe({\n * next: () => console.log('Next'),\n * complete: () => console.log('Complete!')\n * });\n *\n * // Outputs\n * // Complete!\n * ```\n *\n * Emit the number 7, then complete\n *\n * ```ts\n * import { EMPTY, startWith } from 'rxjs';\n *\n * const result = EMPTY.pipe(startWith(7));\n * result.subscribe(x => console.log(x));\n *\n * // Outputs\n * // 7\n * ```\n *\n * Map and flatten only odd numbers to the sequence `'a'`, `'b'`, `'c'`\n *\n * ```ts\n * import { interval, mergeMap, of, EMPTY } from 'rxjs';\n *\n * const interval$ = interval(1000);\n * const result = interval$.pipe(\n * mergeMap(x => x % 2 === 1 ? of('a', 'b', 'c') : EMPTY),\n * );\n * result.subscribe(x => console.log(x));\n *\n * // Results in the following to the console:\n * // x is equal to the count on the interval, e.g. (0, 1, 2, 3, ...)\n * // x will occur every 1000ms\n * // if x % 2 is equal to 1, print a, b, c (each on its own)\n * // if x % 2 is not equal to 1, nothing will be output\n * ```\n *\n * @see {@link Observable}\n * @see {@link NEVER}\n * @see {@link of}\n * @see {@link throwError}\n */\nexport const EMPTY = new Observable((subscriber) => subscriber.complete());\n\n/**\n * @param scheduler A {@link SchedulerLike} to use for scheduling\n * the emission of the complete notification.\n * @deprecated Replaced with the {@link EMPTY} constant or {@link scheduled} (e.g. `scheduled([], scheduler)`). Will be removed in v8.\n */\nexport function empty(scheduler?: SchedulerLike) {\n return scheduler ? emptyScheduled(scheduler) : EMPTY;\n}\n\nfunction emptyScheduled(scheduler: SchedulerLike) {\n return new Observable((subscriber) => scheduler.schedule(() => subscriber.complete()));\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport function isScheduler(value: any): value is SchedulerLike {\n return value && isFunction(value.schedule);\n}\n", "import { SchedulerLike } from '../types';\nimport { isFunction } from './isFunction';\nimport { isScheduler } from './isScheduler';\n\nfunction last(arr: T[]): T | undefined {\n return arr[arr.length - 1];\n}\n\nexport function popResultSelector(args: any[]): ((...args: unknown[]) => unknown) | undefined {\n return isFunction(last(args)) ? args.pop() : undefined;\n}\n\nexport function popScheduler(args: any[]): SchedulerLike | undefined {\n return isScheduler(last(args)) ? args.pop() : undefined;\n}\n\nexport function popNumber(args: any[], defaultValue: number): number {\n return typeof last(args) === 'number' ? args.pop()! : defaultValue;\n}\n", "export const isArrayLike = ((x: any): x is ArrayLike => x && typeof x.length === 'number' && typeof x !== 'function');", "import { isFunction } from \"./isFunction\";\n\n/**\n * Tests to see if the object is \"thennable\".\n * @param value the object to test\n */\nexport function isPromise(value: any): value is PromiseLike {\n return isFunction(value?.then);\n}\n", "import { InteropObservable } from '../types';\nimport { observable as Symbol_observable } from '../symbol/observable';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being Observable (but not necessary an Rx Observable) */\nexport function isInteropObservable(input: any): input is InteropObservable {\n return isFunction(input[Symbol_observable]);\n}\n", "import { isFunction } from './isFunction';\n\nexport function isAsyncIterable(obj: any): obj is AsyncIterable {\n return Symbol.asyncIterator && isFunction(obj?.[Symbol.asyncIterator]);\n}\n", "/**\n * Creates the TypeError to throw if an invalid object is passed to `from` or `scheduled`.\n * @param input The object that was passed.\n */\nexport function createInvalidObservableTypeError(input: any) {\n // TODO: We should create error codes that can be looked up, so this can be less verbose.\n return new TypeError(\n `You provided ${\n input !== null && typeof input === 'object' ? 'an invalid object' : `'${input}'`\n } where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.`\n );\n}\n", "export function getSymbolIterator(): symbol {\n if (typeof Symbol !== 'function' || !Symbol.iterator) {\n return '@@iterator' as any;\n }\n\n return Symbol.iterator;\n}\n\nexport const iterator = getSymbolIterator();\n", "import { iterator as Symbol_iterator } from '../symbol/iterator';\nimport { isFunction } from './isFunction';\n\n/** Identifies an input as being an Iterable */\nexport function isIterable(input: any): input is Iterable {\n return isFunction(input?.[Symbol_iterator]);\n}\n", "import { ReadableStreamLike } from '../types';\nimport { isFunction } from './isFunction';\n\nexport async function* readableStreamLikeToAsyncGenerator(readableStream: ReadableStreamLike): AsyncGenerator {\n const reader = readableStream.getReader();\n try {\n while (true) {\n const { value, done } = await reader.read();\n if (done) {\n return;\n }\n yield value!;\n }\n } finally {\n reader.releaseLock();\n }\n}\n\nexport function isReadableStreamLike(obj: any): obj is ReadableStreamLike {\n // We don't want to use instanceof checks because they would return\n // false for instances from another Realm, like an