diff --git a/.gitignore b/.gitignore index 01857da..3ceb268 100644 --- a/.gitignore +++ b/.gitignore @@ -167,3 +167,6 @@ cython_debug/ # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ +signature.png +**/log.txt +log.txt diff --git a/README.md b/README.md index 695ad2c..2aa5662 100644 --- a/README.md +++ b/README.md @@ -4,15 +4,18 @@ A simple Python wrapper for Oracle OFS REST API ## Models -Starting with OFS 1.17 we are adding models for the most common entities. All models should be imported from `ofsc.models`. All existing create functions will be transitioned to models. In OFS 2.0 all functions will use models +Starting with OFS 1.17 we added models for the most common entities and metadata. All models should be imported from `ofsc.models`. All existing create functions will be transitioned to models. In OFS 2.0 all functions will use models -The models are based on the Pydantic BaseModel, so it is possible to build an entity using the `parse_obj` or `parse_file` static methods. +The models are based on the Pydantic BaseModel, so it is possible to build an entity using the `model_validate` static methods. Currently implemented: +- ActivityTypeGroup +- ActivityType +- Property - Workskill - WorkSkillCondition - Workzone -- Property + Experimental: - Resource @@ -35,53 +38,60 @@ Experimental: ### Core / Events - get_subscriptions(self, response_type=TEXT_RESPONSE) - create_subscription(self, data, response_type=TEXT_RESPONSE) - delete_subscription(self, subscription_id, response_type=FULL_RESPONSE) - get_subscription_details(self, subscription_id, response_type=TEXT_RESPONSE) - get_events(self, params, response_type=TEXT_RESPONSE) + get_subscriptions(self, response_type=JSON_RESPONSE) + create_subscription(self, data, response_type=JSON_RESPONSE) + delete_subscription(self, subscription_id, response_type=JSON_RESPONSE) + get_subscription_details(self, subscription_id, response_type=JSON_RESPONSE) + get_events(self, params, response_type=JSON_RESPONSE) ### Core / Resources - create_resource(self, resourceId, data, response_type=TEXT_RESPONSE) - get_resource(self, resource_id, inventories=False, workSkills=False, workZones=False, workSchedules=False , response_type=TEXT_RESPONSE) - get_position_history(self, resource_id,date,response_type=TEXT_RESPONSE) - get_resource_route(self, resource_id, date, activityFields = None, offset=0, limit=100, response_type=TEXT_RESPONSE) - get_resource_descendants(self, resource_id, resourceFields=None, offset=0, limit=100, inventories=False, workSkills=False, workZones=False, workSchedules=False, response_type=TEXT_RESPONSE) + create_resource(self, resourceId, data, response_type=JSON_RESPONSE) + get_resource(self, resource_id, inventories=False, workSkills=False, workZones=False, workSchedules=False , response_type=JSON_RESPONSE) + get_position_history(self, resource_id,date,response_type=JSON_RESPONSE) + get_resource_route(self, resource_id, date, activityFields = None, offset=0, limit=100, response_type=JSON_RESPONSE) + get_resource_descendants(self, resource_id, resourceFields=None, offset=0, limit=100, inventories=False, workSkills=False, workZones=False, workSchedules=False, response_type=JSON_RESPONSE) ### Core / Users - get_users(self, offset=0, limit=100, response_type=FULL_RESPONSE) - get_user(self, login, response_type=FULL_RESPONSE): - update_user (self, login, data, response_type=TEXT_RESPONSE) - create_user(self, login, data, response_type=FULL_RESPONSE) - delete_user(self, login, response_type=FULL_RESPONSE) + get_users(self, offset=0, limit=100, response_type=JSON_RESPONSE) + get_user(self, login, response_type=JSON_RESPONSE): + update_user (self, login, data, response_type=JSON_RESPONSE) + create_user(self, login, data, response_type=JSON_RESPONSE) + delete_user(self, login, response_type=JSON_RESPONSE) ### Core / Daily Extract - get_daily_extract_dates(self, response_type=FULL_RESPONSE) - get_daily_extract_files(self, date, response_type=FULL_RESPONSE) - get_daily_extract_file(self, date, filename, response_type=FULL_RESPONSE) + get_daily_extract_dates(self, response_type=JSON_RESPONSE) + get_daily_extract_files(self, date, response_type=JSON_RESPONSE) + get_daily_extract_file(self, date, filename, response_type=JSON_RESPONSE) +\ +### Metadata / Activity Type Groups + get_activity_type_groups (self, expand="parent", offset=0, limit=100, response_type=JSON_RESPONSE) + get_activity_type_group (self,label, response_type=JSON_RESPONSE) + +### Metadata / Activity Types + get_activity_types(self, offset=0, limit=100, response_type=JSON_RESPONSE) + get_activity_type (self, label, response_type=JSON_RESPONSE) ### Metadata / Capacity - get_capacity_areas (self, expand="parent", fields=capacityAreasFields, status="active", queryType="area", response_type=FULL_RESPONSE) - get_capacity_area (self,label, response_type=FULL_RESPONSE) + get_capacity_areas (self, expand="parent", fields=capacityAreasFields, status="active", queryType="area", response_type=JSON_RESPONSE) + get_capacity_area (self,label, response_type=JSON_RESPONSE) -### Metadata / Activity Types - get_activity_type_groups (self, expand="parent", offset=0, limit=100, response_type=FULL_RESPONSE) - get_activity_type_group (self,label, response_type=FULL_RESPONSE) - get_activity_types(self, offset=0, limit=100, response_type=FULL_RESPONSE) - get_activity_type (self, label, response_type=FULL_RESPONSE) +### Metadata / Inventory + get_inventory_types (self, offset=0, limit=100, response_type=JSON_RESPONSE) + get_inventory_type (self, label, response_type=JSON_RESPONSE) + create_or_replace_inventory_type(self, inventory: InventoryType, response_type=JSON_RESPONSE) ### Metadata / Properties - get_properties (self, offset=0, limit=100, response_type=FULL_RESPONSE) + get_properties (self, offset=0, limit=100, response_type=JSON_RESPONSE) get_property(self, label: str, response_type=JSON_RESPONSE) create_or_replace_property(self, property: Property, response_type=JSON_RESPONSE) ### Metadata / Workskills - get_workskills (self, offset=0, limit=100, response_type=FULL_RESPONSE) - get_workskill(self, label: str, response_type=FULL_RESPONSE) - create_or_update_workskill(self, skill: Workskill, response_type=FULL_RESPONSE) - delete_workskill(self, label: str, response_type=FULL_RESPONSE) - get_workskill_conditions(self, response_type=FULL_RESPONSE) - replace_workskill_conditions(self, data: WorskillConditionList, response_type=FULL_RESPONSE + get_workskills (self, offset=0, limit=100, response_type=JSON_RESPONSE) + get_workskill(self, label: str, response_type=JSON_RESPONSE) + create_or_update_workskill(self, skill: Workskill, response_type=JSON_RESPONSE) + delete_workskill(self, label: str, response_type=JSON_RESPONSE) + get_workskill_conditions(self, response_type=JSON_RESPONSE) + replace_workskill_conditions(self, data: WorskillConditionList, response_type=JSON_RESPONSE) ### Metadata / Plugins import_plugin(self, plugin: str) @@ -90,8 +100,8 @@ Experimental: ### Metadata / Resource Types get_resource_types(self, response_type=JSON_RESPONSE): -### Metadata / workzones - get_workzones(self, response_type=FULL_RESPONSE) +### Metadata / Workzones + get_workzones(self, response_type=JSON_RESPONSE) ## Test History @@ -102,10 +112,11 @@ OFS REST API Version | PyOFSC 21D| 1.15 22B| 1.16, 1.17 22D| 1.18 +24A| 2.0 ## Deprecation Warning -Starting in OFSC 2.0 (estimated for December 2022) all functions will have to be called using the API name (Core or Metadata). See the examples. +Starting in OFSC 2.0 all functions are called using the API name (Core or Metadata). See the examples. Instead of @@ -117,4 +128,12 @@ It will be required to use the right API module: instance = OFSC(..) list_of_activites = instance.core.get_activities(...) -During the transition period a DeprecationWarning will be raised if the functions are used in the old way \ No newline at end of file +During the transition period a DeprecationWarning will be raised if the functions are used in the old way + +## What's new in OFSC 2.0 + +- All metadata functions now use models, when available +- All functions are now using the API name (Core or Metadata) +- All functions return a python object by default. If there is an available model it will be used, otherwise a dict will be returned (see `response_type` parameter and `auto_model` parameter) +- Errors during API calls can raise exceptions and will by default when returning an object (see `auto_raise` parameter) +- JSON_RESPONSE and TEXT_RESPONSE are now deprecated. Use `response_type` parameter to control the response type \ No newline at end of file diff --git a/examples/get_capacity_areas.py b/examples/get_capacity_areas.py index 2d82075..036b642 100644 --- a/examples/get_capacity_areas.py +++ b/examples/get_capacity_areas.py @@ -4,10 +4,10 @@ import logging import pprint +from config import Config from flatten_dict import flatten -from ofsc import FULL_RESPONSE, JSON_RESPONSE, OFSC -from config import Config +from ofsc import FULL_RESPONSE, OBJ_RESPONSE, OFSC capacityAreasFields = "label,name,type,status,parent.name,parent.label" diff --git a/examples/get_users_simple.py b/examples/get_users_simple.py index 19e5400..0ea2b0d 100755 --- a/examples/get_users_simple.py +++ b/examples/get_users_simple.py @@ -2,11 +2,11 @@ import argparse import logging -import ofsc -from ofsc import FULL_RESPONSE, JSON_RESPONSE, OFSC - from config import Config +import ofsc +from ofsc import FULL_RESPONSE, OBJ_RESPONSE, OFSC + def init_script(): # Parse arguments @@ -38,7 +38,7 @@ def init_script(): def get_users(instance): - response = instance.core.get_users(offset=0, limit=100, response_type=JSON_RESPONSE) + response = instance.core.get_users(offset=0, limit=100, response_type=OBJ_RESPONSE) total_results = response["totalResults"] offset = response["offset"] final_items_list = response["items"] @@ -50,7 +50,7 @@ def get_users(instance): ) offset = offset + 100 response_json = instance.core.get_users( - offset=offset, response_type=JSON_RESPONSE + offset=offset, response_type=OBJ_RESPONSE ) total_results = response_json["totalResults"] items = response_json["items"] diff --git a/examples/get_work_skill_conditions.py b/examples/get_work_skill_conditions.py index f826d2f..372b0ca 100644 --- a/examples/get_work_skill_conditions.py +++ b/examples/get_work_skill_conditions.py @@ -4,12 +4,12 @@ from logging import basicConfig, debug, info, warning from typing import AnyStr, List -import ofsc -from ofsc import FULL_RESPONSE, JSON_RESPONSE, OFSC -from ofsc.models import WorkskillCondition, WorskillConditionList +from config import Config from openpyxl import Workbook -from config import Config +import ofsc +from ofsc import FULL_RESPONSE, OBJ_RESPONSE, OFSC +from ofsc.models import WorkskillCondition, WorskillConditionList def init_script(): @@ -44,7 +44,7 @@ def init_script(): def get_workskill_list(): - response = instance.metadata.get_workskill_conditions(response_type=JSON_RESPONSE) + response = instance.metadata.get_workskill_conditions(response_type=OBJ_RESPONSE) ws_list = WorskillConditionList.parse_obj(response["items"]) return ws_list diff --git a/examples/get_workzones.py b/examples/get_workzones.py index d9c5e6e..88370b5 100644 --- a/examples/get_workzones.py +++ b/examples/get_workzones.py @@ -5,12 +5,12 @@ from logging import basicConfig, debug, info, warning from typing import AnyStr, List -import ofsc -from ofsc import FULL_RESPONSE, JSON_RESPONSE, OFSC -from ofsc.models import Workzone, WorkzoneList +from config import Config from openpyxl import Workbook -from config import Config +import ofsc +from ofsc import FULL_RESPONSE, OBJ_RESPONSE, OFSC +from ofsc.models import Workzone, WorkzoneList def init_script(): @@ -45,7 +45,7 @@ def init_script(): def get_workzone_list(): - response = instance.metadata.get_workzones(response_type=JSON_RESPONSE) + response = instance.metadata.get_workzones(response_type=OBJ_RESPONSE) return WorkzoneList.parse_obj(response["items"]) diff --git a/ofsc/__init__.py b/ofsc/__init__.py index 25fc474..d4c33f5 100644 --- a/ofsc/__init__.py +++ b/ofsc/__init__.py @@ -1,11 +1,6 @@ -import base64 import logging -from functools import wraps -from http import client -from urllib import response -from warnings import warn -from .common import FULL_RESPONSE, JSON_RESPONSE, TEXT_RESPONSE, wrap_return +from .common import FULL_RESPONSE, OBJ_RESPONSE, TEXT_RESPONSE from .core import OFSCore from .metadata import OFSMetadata from .models import OFSConfig @@ -13,11 +8,20 @@ class OFSC: - # 202308 The API portal was deprecated, so the default URL becomes {companyname}.fs.ocs.oraclecloud.com + # the default URL becomes {companyname}.fs.ocs.oraclecloud.com def __init__( - self, clientID, companyName, secret, root=None, baseUrl=None, useToken=False + self, + clientID, + companyName, + secret, + root=None, + baseUrl=None, + useToken=False, + enable_auto_raise=True, + enable_auto_model=True, ): + self._config = OFSConfig( baseURL=baseUrl, clientID=clientID, @@ -25,6 +29,8 @@ def __init__( companyName=companyName, root=root, useToken=useToken, + auto_raise=enable_auto_raise, # 20240401: This is a new feature that will raise an exception if the API returns an error + auto_model=enable_auto_model, # 20240401: This is a new feature that will return a pydantic model if the API returns a 200 ) self._core = OFSCore(config=self._config) self._metadata = OFSMetadata(config=self._config) @@ -62,6 +68,17 @@ def oauth2(self) -> OFSOauth2: self._oauth = OFSOauth2(config=self._config) return self._oauth + @property + def auto_model(self): + return self._config.auto_model + + @auto_model.setter + def auto_model(self, value): + self._config.auto_model = value + self._core.config.auto_model = value + self._metadata.config.auto_model = value + self._oauth.config.auto_model = value + def __str__(self) -> str: return f"baseURL={self._config.baseURL}" @@ -75,18 +92,14 @@ def __getattr__(self, method_name): def wrapper(*args, **kwargs): if method_name in self._core_methods: - warn( - f"{method_name} was called without the API name (Core). This will be deprecated in OFSC 2.0", - DeprecationWarning, + raise NotImplementedError( + f"{method_name} was called without the API name (Core). This was deprecated in OFSC 2.0" ) - return getattr(self.core, method_name)(*args, **kwargs) if method_name in self._metadata_methods: - warn( - f"{method_name} was called without the API name (Metadata). This will be deprecated in OFSC 2.0", - DeprecationWarning, + raise NotImplementedError( + f"{method_name} was called without the API name (Metadata). This was deprecated in OFSC 2.0" ) - return getattr(self.metadata, method_name)(*args, **kwargs) raise Exception("method not found") return wrapper diff --git a/ofsc/common.py b/ofsc/common.py index 61ed432..7405113 100644 --- a/ofsc/common.py +++ b/ofsc/common.py @@ -1,36 +1,68 @@ import logging from functools import wraps +import requests + +from .exceptions import OFSAPIException + TEXT_RESPONSE = 1 FULL_RESPONSE = 2 -JSON_RESPONSE = 3 +OBJ_RESPONSE = 3 -def wrap_return(*a, **kw): +def wrap_return(*decorator_args, **decorator_kwargs): """ - Decorator @return_as wraps the function + Decorator @wrap_return wraps the function and decides the return type and if we launch an exception """ def decorator(func): @wraps(func) - def wrapper(*args, **kwargs): + def wrapper(*func_args, **func_kwargs): + logging.debug( + f"{func_args=}, {func_kwargs=}, {decorator_args=}, {decorator_kwargs=}" + ) + config = func_args[0].config # Pre: - response_type = kwargs.get( - "response_type", kw.get("response_type", FULL_RESPONSE) + response_type = func_kwargs.get( + "response_type", decorator_kwargs.get("response_type", OBJ_RESPONSE) ) - kwargs.pop("response_type", None) - response = func(*args, **kwargs) + func_kwargs.pop("response_type", None) + expected_codes = decorator_kwargs.get("expected_codes", [200]) + model = func_kwargs.get("model", decorator_kwargs.get("model", None)) + func_kwargs.pop("model", None) + + response = func(*func_args, **func_kwargs) # post: logging.debug(response) if response_type == FULL_RESPONSE: return response - elif response_type == JSON_RESPONSE: - return response.json() + elif response_type == OBJ_RESPONSE: + logging.debug( + f"{response_type=}, {config.auto_model=}, {model=} {func_args= } {func_kwargs=}" + ) + if response.status_code in expected_codes: + match response.status_code: + case 204: + return response.text + case _: + data_response = response.json() + if config.auto_model and model is not None: + return model.model_validate(data_response) + else: + return data_response + else: + if not config.auto_raise: + return response.json() + # Check if response.statyus code is between 400 and 499 + if 400 <= response.status_code < 500: + logging.error(response.json()) + raise OFSAPIException(**response.json()) + elif 500 <= response.status_code < 600: + raise OFSAPIException(**response.json()) else: return response.text - return result return wrapper diff --git a/ofsc/core.py b/ofsc/core.py index 18c40ab..2692e9d 100644 --- a/ofsc/core.py +++ b/ofsc/core.py @@ -6,94 +6,67 @@ import requests -from .common import FULL_RESPONSE, JSON_RESPONSE, TEXT_RESPONSE, wrap_return +from .common import FULL_RESPONSE, OBJ_RESPONSE, TEXT_RESPONSE, wrap_return from .models import BulkUpdateRequest, OFSApi, OFSConfig class OFSCore(OFSApi): # OFSC Function Library - def get_activities(self, params, response_type=TEXT_RESPONSE): + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def get_activities(self, params): url = urljoin(self.baseUrl, "/rest/ofscCore/v1/activities") response = requests.get(url, headers=self.headers, params=params) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - - def get_activity(self, activity_id, response_type=TEXT_RESPONSE): + return response + + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def get_activity(self, activity_id): url = urljoin( self.baseUrl, "/rest/ofscCore/v1/activities/{}".format(activity_id) ) response = requests.get(url, headers=self.headers) - # print (response.status_code) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - - def update_activity(self, activity_id, data, response_type=TEXT_RESPONSE): + return response + + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def update_activity(self, activity_id, data): url = urljoin( self.baseUrl, "/rest/ofscCore/v1/activities/{}".format(activity_id) ) response = requests.patch(url, headers=self.headers, data=data) - # print (response.status_code) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response # 202107 Added ssearch - def search_activities(self, params, response_type=TEXT_RESPONSE): + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def search_activities(self, params): url = urljoin( self.baseUrl, "/rest/ofscCore/v1/activities/custom-actions/search" ) response = requests.get(url, headers=self.headers, params=params) - # print (response.status_code) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - - def move_activity(self, activity_id, data, response_type=TEXT_RESPONSE): + return response + + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def move_activity(self, activity_id, data): url = urljoin( self.baseUrl, f"/rest/ofscCore/v1/activities/{activity_id}/custom-actions/move", ) response = requests.post(url, headers=self.headers, data=data) - # print (response.status_code) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - - def get_events(self, params, response_type=TEXT_RESPONSE): - url = urljoin(self.baseUrl, "rest/ofscCore/v1/events") + return response + + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def get_events(self, params): + url = urljoin(self.baseUrl, "/rest/ofscCore/v1/events") response = requests.get( - "https://api.etadirect.com/rest/ofscCore/v1/events", + url, headers=self.headers, params=params, ) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response ##### # RESOURCE MANAGEMENT #### + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def get_resource( self, resource_id, @@ -101,7 +74,6 @@ def get_resource( workSkills=False, workZones=False, workSchedules=False, - response_type=TEXT_RESPONSE, ): url = urljoin( self.baseUrl, "/rest/ofscCore/v1/resources/{}".format(str(resource_id)) @@ -130,43 +102,25 @@ def get_resource( data["expand"] = expand response = requests.get(url, params=data, headers=self.headers) - - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - - # 202107 - def create_resource_old(self, resourceId, data, response_type=TEXT_RESPONSE): - url = urljoin(self.baseUrl, f"/rest/ofscCore/v1/resources/{resourceId}") - - response = requests.put(url, headers=self.headers, data=data) - # print (response.status_code) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response # 202209 Resource Types - @wrap_return(response_type=JSON_RESPONSE, expected=[200]) + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def create_resource(self, resourceId, data): url = urljoin(self.baseUrl, f"/rest/ofscCore/v1/resources/{resourceId}") logging.debug(f"OFSC.Create_Resource: {data} {type(data)}") response = requests.put(url, headers=self.headers, data=data) return response - @wrap_return(response_type=JSON_RESPONSE, expected=[200]) + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def create_resource_from_obj(self, resourceId, data): url = urljoin(self.baseUrl, f"/rest/ofscCore/v1/resources/{resourceId}") logging.debug(f"OFSC.Create_Resource: {data} {type(data)}") response = requests.put(url, headers=self.headers, data=json.dumps(data)) return response - def get_position_history(self, resource_id, date, response_type=TEXT_RESPONSE): + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def get_position_history(self, resource_id, date): url = urljoin( self.baseUrl, "/rest/ofscCore/v1/resources/{}/positionHistory".format(str(resource_id)), @@ -174,22 +128,11 @@ def get_position_history(self, resource_id, date, response_type=TEXT_RESPONSE): params = {} params["date"] = date response = requests.get(url, params=params, headers=self.headers) + return response - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def get_resource_route( - self, - resource_id, - date, - activityFields=None, - offset=0, - limit=100, - response_type=TEXT_RESPONSE, + self, resource_id, date, activityFields=None, offset=0, limit=100 ): url = urljoin( self.baseUrl, @@ -199,14 +142,9 @@ def get_resource_route( if activityFields is not None: params["activityFields"] = activityFields response = requests.get(url, params=params, headers=self.headers) + return response - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def get_resource_descendants( self, resource_id, @@ -217,7 +155,6 @@ def get_resource_descendants( workSkills=False, workZones=False, workSchedules=False, - response_type=TEXT_RESPONSE, ): url = urljoin( self.baseUrl, @@ -254,99 +191,65 @@ def get_resource_descendants( logging.debug(json.dumps(params, indent=2)) response = requests.get(url, params=params, headers=self.headers) - - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response ## 202104 User Management - def get_users(self, offset=0, limit=100, response_type=FULL_RESPONSE): + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def get_users(self, offset=0, limit=100): url = urljoin(self.baseUrl, "/rest/ofscCore/v1/users") params = {} params["offset"] = offset params["limit"] = limit response = requests.get(url, params, headers=self.headers) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - - def get_user(self, login, response_type=FULL_RESPONSE): + return response + + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def get_user(self, login): url = urljoin(self.baseUrl, "/rest/ofscCore/v1/users/{}".format(login)) response = requests.get(url, headers=self.headers) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - - def update_user(self, login, data, response_type=FULL_RESPONSE): + return response + + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def update_user(self, login, data): url = urljoin(self.baseUrl, "/rest/ofscCore/v1/users/{}".format(login)) response = requests.patch(url, headers=self.headers, data=data) - # print (response.status_code) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response ##202106 - def create_user(self, login, data, response_type=FULL_RESPONSE): + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def create_user(self, login, data): url = urljoin(self.baseUrl, f"/rest/ofscCore/v1/users/{login}") response = requests.put(url, headers=self.headers, data=data) - # print (response.status_code) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response ##202106 - def delete_user(self, login, response_type=FULL_RESPONSE): + + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def delete_user(self, login): url = urljoin(self.baseUrl, f"/rest/ofscCore/v1/users/{login}") response = requests.delete(url, headers=self.headers) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response ##202105 Daily Extract - NOT TESTED - def get_daily_extract_dates(self, response_type=FULL_RESPONSE): + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def get_daily_extract_dates(self): url = urljoin(self.baseUrl, "/rest/ofscCore/v1/folders/dailyExtract/folders/") response = requests.get(url, headers=self.headers) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response ##202105 Daily Extract - NOT TESTED - def get_daily_extract_files(self, date, response_type=FULL_RESPONSE): + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def get_daily_extract_files(self, date): url = urljoin( self.baseUrl, "/rest/ofscCore/v1/folders/dailyExtract/folders/{}/files".format(date), ) response = requests.get(url, headers=self.headers) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response ##202105 Daily Extract - NOT TESTED - def get_daily_extract_file(self, date, filename, response_type=FULL_RESPONSE): + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def get_daily_extract_file(self, date, filename): url = urljoin( self.baseUrl, "/rest/ofscCore/v1/folders/dailyExtract/folders/{}/files/{}".format( @@ -354,34 +257,7 @@ def get_daily_extract_file(self, date, filename, response_type=FULL_RESPONSE): ), ) response = requests.get(url, headers=self.headers) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - - def get_file_property( - self, - activityId, - label, - mediaType="application/octet-stream", - response_type=FULL_RESPONSE, - ): - headers = self.headers - headers["Accept"] = mediaType - response = requests.get( - "https://api.etadirect.com/rest/ofscCore/v1/activities/{}/{}".format( - activityId, label - ), - headers=headers, - ) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response ## 202202 Helper functions def get_all_activities( @@ -455,19 +331,19 @@ def get_all_properties(self, initial_offset=0, limit=100): ### # 1. Subscriptions Management. Using wrapper ### - @wrap_return(response_type=JSON_RESPONSE, expected=[200]) + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def get_subscriptions(self): url = urljoin(self.baseUrl, "/rest/ofscCore/v1/events/subscriptions") response = requests.get(url, headers=self.headers) return response - @wrap_return(response_type=JSON_RESPONSE, expected=[200]) + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def create_subscription(self, data): url = urljoin(self.baseUrl, "/rest/ofscCore/v1/events/subscriptions") response = requests.post(url, headers=self.headers, data=data) return response - @wrap_return(response_type=JSON_RESPONSE, expected=[204]) + @wrap_return(response_type=OBJ_RESPONSE, expected=[204]) def delete_subscription(self, subscription_id): url = urljoin( self.baseUrl, f"/rest/ofscCore/v1/events/subscriptions/{subscription_id}" @@ -475,7 +351,7 @@ def delete_subscription(self, subscription_id): response = requests.delete(url, headers=self.headers) return response - @wrap_return(response_type=JSON_RESPONSE, expected=[200]) + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def get_subscription_details(self, subscription_id): url = urljoin( self.baseUrl, @@ -488,11 +364,30 @@ def get_subscription_details(self, subscription_id): # 2. Core / Activities ### - @wrap_return(response_type=JSON_RESPONSE, expected=[200]) + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def bulk_update(self, data: BulkUpdateRequest): url = urljoin( self.baseUrl, "/rest/ofscCore/v1/activities/custom-actions/bulkUpdate", ) - response = requests.post(url, headers=self.headers, data=data.json()) + response = requests.post(url, headers=self.headers, data=data.model_dump_json()) + return response + + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def get_file_property( + self, + activityId, + label, + mediaType="application/octet-stream", + ): + url = urljoin( + self.baseUrl, + f"/rest/ofscCore/v1/activities/{activityId}/{label}", + ) + headers = self.headers + headers["Accept"] = mediaType + response = requests.get( + url, + headers=headers, + ) return response diff --git a/ofsc/exceptions.py b/ofsc/exceptions.py new file mode 100644 index 0000000..12a8d29 --- /dev/null +++ b/ofsc/exceptions.py @@ -0,0 +1,12 @@ +import logging + + +class OFSAPIException(Exception): + def __init__(self, *args: object, **kwargs) -> None: + super().__init__(*args) + for key, value in kwargs.items(): + match key: + case "status": + setattr(self, "status_code", int(value)) + case _: + setattr(self, key, value) diff --git a/ofsc/metadata.py b/ofsc/metadata.py index 3ec8097..5fcb9e4 100644 --- a/ofsc/metadata.py +++ b/ofsc/metadata.py @@ -8,8 +8,18 @@ import requests -from .common import FULL_RESPONSE, JSON_RESPONSE, TEXT_RESPONSE, wrap_return +from .common import FULL_RESPONSE, OBJ_RESPONSE, TEXT_RESPONSE, wrap_return from .models import ( + ActivityTypeGroup, + ActivityTypeGroupList, + ActivityTypeGroupListResponse, + ActivityTypeListResponse, + CapacityArea, + CapacityAreaListResponse, + CapacityCategory, + CapacityCategoryListResponse, + InventoryType, + InventoryTypeListResponse, OFSApi, OFSConfig, Property, @@ -21,108 +31,10 @@ class OFSMetadata(OFSApi): - capacityAreasFields = "label,name,type,status,parent.name,parent.label" - additionalCapacityFields = [ - "parentLabel", - "configuration.isTimeSlotBase", - "configuration.byCapacityCategory", - "configuration.byDay", - "configuration.byTimeSlot", - "configuration.isAllowCloseOnWorkzoneLevel", - "configuration.definitionLevel.day", - "configuration.definitionLevel.timeSlot", - "configuration.definitionLevel.capacityCategory", - ] - capacityHeaders = capacityAreasFields.split(",") + additionalCapacityFields - - def get_capacity_areas( - self, - expand="parent", - fields=capacityAreasFields, - status="active", - queryType="area", - response_type=FULL_RESPONSE, - ): - url = urljoin(self.baseUrl, "/rest/ofscMetadata/v1/capacityAreas") - params = {} - params["expand"] = expand - params["fields"] = fields - params["status"] = status - params["type"] = queryType - response = requests.get(url, params=params, headers=self.headers) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - - def get_capacity_area(self, label, response_type=FULL_RESPONSE): - encoded_label = urllib.parse.quote_plus(label) - url = urljoin( - self.baseUrl, "/rest/ofscMetadata/v1/capacityAreas/{}".format(encoded_label) - ) - response = requests.get(url, headers=self.headers) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - - def get_activity_type_groups( - self, offset=0, limit=100, response_type=FULL_RESPONSE - ): - url = urljoin(self.baseUrl, "/rest/ofscMetadata/v1/activityTypeGroups") - response = requests.get(url, headers=self.headers) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - - def get_activity_type_group(self, label, response_type=FULL_RESPONSE): - encoded_label = urllib.parse.quote_plus(label) - url = urljoin( - self.baseUrl, - "/rest/ofscMetadata/v1/activityTypeGroups/{}".format(encoded_label), - ) - response = requests.get(url, headers=self.headers) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - - ## 202205 Activity Type - def get_activity_types(self, offset=0, limit=100, response_type=FULL_RESPONSE): - url = urljoin(self.baseUrl, "/rest/ofscMetadata/v1/activityTypes") - response = requests.get(url, headers=self.headers) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - - def get_activity_type(self, label, response_type=FULL_RESPONSE): - encoded_label = urllib.parse.quote_plus(label) - url = urljoin( - self.baseUrl, "/rest/ofscMetadata/v1/activityTypes/{}".format(encoded_label) - ) - response = requests.get(url, headers=self.headers) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text - ## 202202 Properties and file properties - def get_properties(self, offset=0, limit=100, response_type=FULL_RESPONSE): + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def get_properties(self, offset=0, limit=100): url = urljoin(self.baseUrl, "/rest/ofscMetadata/v1/properties") params = {"offset": offset, "limit": limit} response = requests.get( @@ -130,30 +42,72 @@ def get_properties(self, offset=0, limit=100, response_type=FULL_RESPONSE): headers=self.headers, params=params, ) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response # 202209 Get Property - @wrap_return(response_type=JSON_RESPONSE, expected=[200]) + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def get_property(self, label: str): url = urljoin(self.baseUrl, f"/rest/ofscMetadata/v1/properties/{label}") response = requests.get(url, headers=self.headers) return response # 202209 Create Property - @wrap_return(response_type=JSON_RESPONSE, expected=[200]) + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def create_or_replace_property(self, property: Property): url = urljoin( self.baseUrl, f"/rest/ofscMetadata/v1/properties/{property.label}" ) - response = requests.put(url, headers=self.headers, data=property.json()) + response = requests.put( + url, headers=self.headers, data=property.model_dump_json().encode("utf-8") + ) return response # 202208 Skill management + + # 202208 Workzones + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def get_workzones( + self, + offset=0, + limit=100, + ): + url = urljoin(self.baseUrl, "/rest/ofscMetadata/v1/workZones") + params = {"offset": offset, "limit": limit} + response = requests.get( + url, + headers=self.headers, + params=params, + ) + return response + + # 202209 Resource Types + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def get_resource_types(self): + url = urljoin(self.baseUrl, "/rest/ofscMetadata/v1/resourceTypes") + response = requests.get(url, headers=self.headers) + return response + + # 202212 Import plugin + @wrap_return(response_type=OBJ_RESPONSE, expected=[204]) + def import_plugin_file(self, plugin: Path): + url = urljoin( + self.baseUrl, f"/rest/ofscMetadata/v1/plugins/custom-actions/import" + ) + files = [("pluginFile", (plugin.name, plugin.read_text(), "text/xml"))] + response = requests.post(url, headers=self.headers, files=files) + return response + + # 202212 Import plugin + @wrap_return(response_type=OBJ_RESPONSE, expected=[204]) + def import_plugin(self, plugin: str): + url = urljoin( + self.baseUrl, f"/rest/ofscMetadata/v1/plugins/custom-actions/import" + ) + files = [("pluginFile", ("noname.xml", plugin, "text/xml"))] + response = requests.post(url, headers=self.headers, files=files) + return response + + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def get_workskills(self, offset=0, limit=100, response_type=FULL_RESPONSE): url = urljoin(self.baseUrl, "/rest/ofscMetadata/v1/workSkills") params = {"offset": offset, "limit": limit} @@ -162,115 +116,173 @@ def get_workskills(self, offset=0, limit=100, response_type=FULL_RESPONSE): headers=self.headers, params=params, ) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def get_workskill(self, label: str, response_type=FULL_RESPONSE): url = urljoin(self.baseUrl, f"/rest/ofscMetadata/v1/workSkills/{label}") response = requests.get( url, headers=self.headers, ) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def create_or_update_workskill(self, skill: Workskill, response_type=FULL_RESPONSE): url = urljoin(self.baseUrl, f"/rest/ofscMetadata/v1/workSkills/{skill.label}") - response = requests.put(url, headers=self.headers, data=skill.json()) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + response = requests.put(url, headers=self.headers, data=skill.model_dump_json()) + return response + @wrap_return(response_type=OBJ_RESPONSE, expected=[204]) def delete_workskill(self, label: str, response_type=FULL_RESPONSE): url = urljoin(self.baseUrl, f"/rest/ofscMetadata/v1/workSkills/{label}") response = requests.delete(url, headers=self.headers) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response # Workskill conditions + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def get_workskill_conditions(self, response_type=FULL_RESPONSE): url = urljoin(self.baseUrl, f"/rest/ofscMetadata/v1/workSkillConditions") response = requests.get( url, headers=self.headers, ) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def replace_workskill_conditions( self, data: WorskillConditionList, response_type=FULL_RESPONSE ): url = urljoin(self.baseUrl, f"/rest/ofscMetadata/v1/workSkillConditions") - content = '{"items":' + data.json(exclude_none=True) + "}" + content = '{"items":' + data.model_dump_json(exclude_none=True) + "}" headers = self.headers headers["Content-Type"] = "application/json" response = requests.put(url, headers=headers, data=content) - if response_type == FULL_RESPONSE: - return response - elif response_type == JSON_RESPONSE: - return response.json() - else: - return response.text + return response - # 202208 Workzones - @wrap_return(response_type=JSON_RESPONSE, expected=[200]) - def get_workzones( - self, - offset=0, - limit=100, - ): - url = urljoin(self.baseUrl, "/rest/ofscMetadata/v1/workZones") + ##### + # Migration to OFS 2.0 model format + + # 202402 Metadata - Activity Type Groups + @wrap_return( + response_type=OBJ_RESPONSE, expected=[200], model=ActivityTypeGroupListResponse + ) + def get_activity_type_groups(self, offset=0, limit=100): + url = urljoin(self.baseUrl, "/rest/ofscMetadata/v1/activityTypeGroups") params = {"offset": offset, "limit": limit} - response = requests.get( - url, - headers=self.headers, - params=params, + response = requests.get(url, headers=self.headers, params=params) + return response + + @wrap_return(response_type=OBJ_RESPONSE, expected=[200], model=ActivityTypeGroup) + def get_activity_type_group(self, label): + encoded_label = urllib.parse.quote_plus(label) + url = urljoin( + self.baseUrl, + f"/rest/ofscMetadata/v1/activityTypeGroups/{encoded_label}", ) + response = requests.get(url, headers=self.headers) return response - # 202209 Resource Types - @wrap_return(response_type=JSON_RESPONSE, expected=[200]) - def get_resource_types(self): - url = urljoin(self.baseUrl, "/rest/ofscMetadata/v1/resourceTypes") + ## 202402 Activity Type + @wrap_return( + response_type=OBJ_RESPONSE, expected=[200], model=ActivityTypeListResponse + ) + def get_activity_types(self, offset=0, limit=100): + url = urljoin(self.baseUrl, "/rest/ofscMetadata/v1/activityTypes") + params = {"offset": offset, "limit": limit} + response = requests.get(url, headers=self.headers, params=params) + return response + + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) + def get_activity_type(self, label): + encoded_label = urllib.parse.quote_plus(label) + url = urljoin( + self.baseUrl, "/rest/ofscMetadata/v1/activityTypes/{}".format(encoded_label) + ) response = requests.get(url, headers=self.headers) return response - # 202212 Import plugin - @wrap_return(response_type=FULL_RESPONSE, expected=[204]) - def import_plugin_file(self, plugin: Path): + # region Capacity Areas + capacityAreasFields = [ + "label", + "name", + "type", + "status", + "parent.name", + "parent.label", + ] + + @wrap_return( + response_type=OBJ_RESPONSE, expected=[200], model=CapacityAreaListResponse + ) + def get_capacity_areas( + self, + expandParent: bool = False, + fields: list[str] = ["label"], + activeOnly: bool = False, + areasOnly: bool = False, + ): + url = urljoin(self.baseUrl, "/rest/ofscMetadata/v1/capacityAreas") + assert isinstance(fields, list) + params = { + "expand": None if not expandParent else "parent", + "fields": ( + ",".join(fields) if fields else ",".join(self.capacityAreasFields) + ), + "status": None if not activeOnly else "active", + "type": None if not areasOnly else "area", + } + response = requests.get(url, params=params, headers=self.headers) + return response + + @wrap_return(response_type=OBJ_RESPONSE, expected=[200], model=CapacityArea) + def get_capacity_area(self, label: str): + encoded_label = urllib.parse.quote_plus(label) url = urljoin( - self.baseUrl, f"/rest/ofscMetadata/v1/plugins/custom-actions/import" + self.baseUrl, f"/rest/ofscMetadata/v1/capacityAreas/{encoded_label}" ) - headers = self.headers - files = [("pluginFile", (plugin.name, plugin.read_text(), "text/xml"))] - response = requests.post(url, headers=self.headers, files=files) + response = requests.get(url, headers=self.headers) return response - # 202212 Import plugin - @wrap_return(response_type=FULL_RESPONSE, expected=[204]) - def import_plugin(self, plugin: str): + # endregion + + # region 202402 Metadata - Capacity Categories + @wrap_return( + response_type=OBJ_RESPONSE, expected=[200], model=CapacityCategoryListResponse + ) + def get_capacity_categories(self, offset=0, limit=100): + url = urljoin(self.baseUrl, "/rest/ofscMetadata/v1/capacityCategories") + params = {"offset": offset, "limit": limit} + response = requests.get(url, headers=self.headers, params=params) + return response + + @wrap_return(response_type=OBJ_RESPONSE, expected=[200], model=CapacityCategory) + def get_capacity_category(self, label: str): + encoded_label = urllib.parse.quote_plus(label) url = urljoin( - self.baseUrl, f"/rest/ofscMetadata/v1/plugins/custom-actions/import" + self.baseUrl, f"/rest/ofscMetadata/v1/capacityCategories/{encoded_label}" ) - files = [("pluginFile", ("noname.xml", plugin, "text/xml"))] - response = requests.post(url, headers=self.headers, files=files) + response = requests.get(url, headers=self.headers) + return response + + # endregion + + # region 202405 Inventory Types + @wrap_return( + response_type=OBJ_RESPONSE, expected=[200], model=InventoryTypeListResponse + ) + def get_inventory_types(self): + url = urljoin(self.baseUrl, "/rest/ofscMetadata/v1/inventoryTypes") + response = requests.get(url, headers=self.headers) return response + + @wrap_return(response_type=OBJ_RESPONSE, expected=[200], model=InventoryType) + def get_inventory_type(self, label: str): + encoded_label = urllib.parse.quote_plus(label) + url = urljoin( + self.baseUrl, f"/rest/ofscMetadata/v1/inventoryTypes/{encoded_label}" + ) + response = requests.get(url, headers=self.headers) + return response + + # endregion diff --git a/ofsc/models.py b/ofsc/models.py index d0cd706..5a971fd 100644 --- a/ofsc/models.py +++ b/ofsc/models.py @@ -1,14 +1,42 @@ import base64 -import typing +import logging from enum import Enum -from typing import Any, List, Optional +from typing import Any, Generic, List, Optional, TypeVar from urllib.parse import urljoin import requests from cachetools import TTLCache, cached -from pydantic import BaseModel, Extra, validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + ValidationInfo, + field_validator, + model_validator, +) +from pydantic_settings import BaseSettings +from typing_extensions import Annotated -from ofsc.common import FULL_RESPONSE, JSON_RESPONSE, wrap_return +from ofsc.common import FULL_RESPONSE, OBJ_RESPONSE, wrap_return + +T = TypeVar("T") + + +class OFSResponseList(BaseModel, Generic[T]): + model_config = ConfigDict(extra="allow") + + items: List[T] + offset: Annotated[Optional[int], Field(alias="offset")] = None + limit: Annotated[Optional[int], Field(alias="limit")] = None + hasMore: Annotated[Optional[bool], Field(alias="hasMore")] = False + totalResults: int = -1 + + @model_validator(mode="after") + def check_coherence(self): + if self.totalResults != len(self.items) and self.hasMore is False: + self.totalResults = len(self.items) + return self class OFSConfig(BaseModel): @@ -16,8 +44,10 @@ class OFSConfig(BaseModel): secret: str companyName: str useToken: bool = False - root: Optional[str] - baseURL: Optional[str] + root: Optional[str] = None + baseURL: Optional[str] = None + auto_raise: bool = True + auto_model: bool = True @property def basicAuthString(self): @@ -25,19 +55,24 @@ def basicAuthString(self): bytes(self.clientID + "@" + self.companyName + ":" + self.secret, "utf-8") ) - class Config: - validate_assignment = True + model_config = ConfigDict(validate_assignment=True) - @validator("baseURL") - def set_base_URL(cls, url, values): - print(values) - return url or f"https://{values['companyName']}.fs.ocs.oraclecloud.com" + @field_validator("baseURL") + def set_base_URL(cls, url, info: ValidationInfo): + return url or f"https://{info.data['companyName']}.fs.ocs.oraclecloud.com" class OFSOAuthRequest(BaseModel): - assertion: Optional[str] + assertion: Optional[str] = None grant_type: str = "client_credentials" - ofs_dynamic_scope: Optional[str] + ofs_dynamic_scope: Optional[str] = None + + +class OFSAPIError(BaseModel): + type: str + title: str + status: int + detail: str class OFSApi: @@ -69,17 +104,19 @@ def token(self, auth: OFSOAuthRequest = OFSOAuthRequest()) -> requests.Response: headers["Content-Type"] = "application/x-www-form-urlencoded" url = urljoin(self.baseUrl, "/rest/oauthTokenService/v2/token") response = requests.post( - url, data=auth.dict(exclude_none=True), headers=headers + url, data=auth.model_dump(exclude_none=True), headers=headers ) return response @property def headers(self): self._headers = {} + self._headers["Content-Type"] = "application/json;charset=UTF-8" + if not self._config.useToken: - self._headers[ - "Authorization" - ] = "Basic " + self._config.basicAuthString.decode("utf-8") + self._headers["Authorization"] = ( + "Basic " + self._config.basicAuthString.decode("utf-8") + ) else: self._token = self.token().json()["access_token"] self._headers["Authorization"] = f"Bearer {self._token}" @@ -116,17 +153,18 @@ class EntityEnum(str, Enum): class Translation(BaseModel): language: str = "en" name: str - languageISO: Optional[str] + languageISO: Optional[str] = None -class TranslationList(BaseModel): - __root__: List[Translation] - +class TranslationList(RootModel[List[Translation]]): def __iter__(self): - return iter(self.__root__) + return iter(self.root) def __getitem__(self, item): - return self.__root__[item] + return self.root[item] + + def map(self): + return {translation.language: translation for translation in self.root} class Workskill(BaseModel): @@ -134,21 +172,23 @@ class Workskill(BaseModel): active: bool = True name: str = "" sharing: SharingEnum - translations: Optional[TranslationList] + translations: Annotated[Optional[TranslationList], Field(validate_default=True)] = ( + None + ) - @validator("translations", always=True) + @field_validator("translations") def set_default(cls, field_value, values): - return field_value or [Translation(name=values["name"])] - + return field_value or TranslationList( + [Translation(name=values.data.get("name"))] + ) -class WorkskillList(BaseModel): - __root__: List[Workskill] +class WorkskillList(RootModel[List[Workskill]]): def __iter__(self): - return iter(self.__root__) + return iter(self.root) def __getitem__(self, item): - return self.__root__[item] + return self.root[item] class Condition(BaseModel): @@ -164,17 +204,15 @@ class WorkskillCondition(BaseModel): requiredLevel: int preferableLevel: int conditions: List[Condition] - dependencies: Any - + dependencies: Any = None -class WorskillConditionList(BaseModel): - __root__: List[WorkskillCondition] +class WorskillConditionList(RootModel[List[WorkskillCondition]]): def __iter__(self): - return iter(self.__root__) + return iter(self.root) def __getitem__(self, item): - return self.__root__[item] + return self.root[item] # Workzones @@ -186,29 +224,28 @@ class Workzone(BaseModel): keys: List[Any] -class WorkzoneList(BaseModel): - __root__: List[Workzone] - +class WorkzoneList(RootModel[List[Workzone]]): def __iter__(self): - return iter(self.__root__) + return iter(self.root) def __getitem__(self, item): - return self.__root__[item] + return self.root[item] class Property(BaseModel): label: str name: str type: str - entity: Optional[EntityEnum] - gui: Optional[str] - translations: TranslationList = [] + entity: Optional[EntityEnum] = None + gui: Optional[str] = None + translations: Annotated[TranslationList, Field(validate_default=True)] = [] - @validator("translations", always=True) + @field_validator("translations") def set_default(cls, field_value, values): - return field_value or [Translation(name=values["name"])] + return field_value or [Translation(name=values.name)] - @validator("gui") + @field_validator("gui") + @classmethod def gui_match(cls, v): if v not in [ "text", @@ -227,47 +264,40 @@ def gui_match(cls, v): raise ValueError(f"{v} is not a valid GUI value") return v - class Config: - extra = Extra.allow # or 'allow' str + model_config = ConfigDict(extra="ignore") -class PropertyList(BaseModel): - __root__: List[Property] - +class PropertyList(RootModel[List[Property]]): def __iter__(self): - return iter(self.__root__) + return iter(self.root) def __getitem__(self, item): - return self.__root__[item] + return self.root[item] class Resource(BaseModel): - resourceId: Optional[str] - parentResourceId: Optional[str] + resourceId: Optional[str] = None + parentResourceId: Optional[str] = None resourceType: str name: str status: str = "active" organization: str = "default" language: str - languageISO: Optional[str] + languageISO: Optional[str] = None timeZone: str timeFormat: str = "24-hour" dateFormat: str = "mm/dd/yy" - email: Optional[str] - phone: Optional[str] - - class Config: - extra = Extra.allow # or 'allow' str - + email: Optional[str] = None + phone: Optional[str] = None + model_config = ConfigDict(extra="allow") -class ResourceList(BaseModel): - __root__: List[Resource] +class ResourceList(RootModel[List[Resource]]): def __iter__(self): - return iter(self.__root__) + return iter(self.root) def __getitem__(self, item): - return self.__root__[item] + return self.root[item] class ResourceType(BaseModel): @@ -275,40 +305,34 @@ class ResourceType(BaseModel): name: str active: bool role: str # TODO: change to enum + model_config = ConfigDict(extra="allow") - class Config: - extra = Extra.allow # or 'allow' str - - -class ResourceTypeList(BaseModel): - __root__: List[ResourceType] +class ResourceTypeList(RootModel[List[ResourceType]]): def __iter__(self): - return iter(self.__root__) + return iter(self.root) def __getitem__(self, item): - return self.__root__[item] + return self.root[item] # Core / Activities class BulkUpdateActivityItem(BaseModel): - activityId: Optional[int] - activityType: Optional[str] - date: Optional[str] - - class Config: - extra = Extra.allow # or 'allow' str + activityId: Optional[int] = None + activityType: Optional[str] = None + date: Optional[str] = None + model_config = ConfigDict(extra="allow") # CORE / BulkUpdaterequest class BulkUpdateParameters(BaseModel): - fallbackResource: Optional[str] - identifyActivityBy: Optional[str] - ifExistsThenDoNotUpdateFields: Optional[List[str]] - ifInFinalStatusThen: Optional[str] - inventoryPropertiesUpdateMode: Optional[str] + fallbackResource: Optional[str] = None + identifyActivityBy: Optional[str] = None + ifExistsThenDoNotUpdateFields: Optional[List[str]] = None + ifInFinalStatusThen: Optional[str] = None + inventoryPropertiesUpdateMode: Optional[str] = None class BulkUpdateRequest(BaseModel): @@ -317,28 +341,244 @@ class BulkUpdateRequest(BaseModel): class ActivityKeys(BaseModel): - activityId: Optional[int] - apptNumber: Optional[str] - customerNumber: Optional[str] + activityId: Optional[int] = None + apptNumber: Optional[str] = None + customerNumber: Optional[str] = None class BulkUpdateError(BaseModel): - errorDetail: Optional[str] - operation: Optional[str] + errorDetail: Optional[str] = None + operation: Optional[str] = None class BulkUpdateWarning(BaseModel): - code: Optional[int] - message: Optional[int] + code: Optional[int] = None + message: Optional[int] = None class BulkUpdateResult(BaseModel): - activityKeys: Optional[ActivityKeys] - errors: Optional[List[BulkUpdateError]] - operationsFailed: Optional[List[str]] - operationsPerformed: Optional[List[str]] - warnings: Optional[List[BulkUpdateWarning]] + activityKeys: Optional[ActivityKeys] = None + errors: Optional[List[BulkUpdateError]] = None + operationsFailed: Optional[List[str]] = None + operationsPerformed: Optional[List[str]] = None + warnings: Optional[List[BulkUpdateWarning]] = None class BulkUpdateResponse(BaseModel): - results: Optional[List[BulkUpdateResult]] + results: Optional[List[BulkUpdateResult]] = None + + +# region Activity Type Groups + + +class ActivityTypeGroup(BaseModel): + label: str + name: str + _activityTypes: Annotated[Optional[List[dict]], "activityTypes"] = [] + translations: TranslationList + + @property + def activityTypes(self): + return [_activityType["label"] for _activityType in self._activityTypes] + + +class ActivityTypeGroupList(RootModel[List[ActivityTypeGroup]]): + def __iter__(self): + return iter(self.root) + + def __getitem__(self, item): + return self.root[item] + + +class ActivityTypeGroupListResponse(OFSResponseList[ActivityTypeGroup]): + pass + + +# endregion + +# region Activity Types + + +class ActivityTypeColors(BaseModel): + cancelled: Annotated[Optional[str], Field(alias="cancelled")] + completed: Annotated[Optional[str], Field(alias="completed")] + notdone: Annotated[Optional[str], Field(alias="notdone")] + notOrdered: Annotated[Optional[str], Field(alias="notOrdered")] + pending: Annotated[Optional[str], Field(alias="pending")] + started: Annotated[Optional[str], Field(alias="started")] + suspended: Annotated[Optional[str], Field(alias="suspended")] + warning: Annotated[Optional[str], Field(alias="warning")] + + +class ActivityTypeFeatures(BaseModel): + model_config = ConfigDict(extra="allow") + allowCreationInBuckets: Optional[bool] = False + allowMassActivities: Optional[bool] = False + allowMoveBetweenResources: Optional[bool] = False + allowNonScheduled: Optional[bool] = False + allowRepeatingActivities: Optional[bool] = False + allowReschedule: Optional[bool] = False + allowToCreateFromIncomingInterface: Optional[bool] = False + allowToSearch: Optional[bool] = False + calculateActivityDurationUsingStatistics: Optional[bool] = False + calculateDeliveryWindow: Optional[bool] = False + calculateTravel: Optional[bool] = False + disableLocationTracking: Optional[bool] = False + enableDayBeforeTrigger: Optional[bool] = False + enableNotStartedTrigger: Optional[bool] = False + enableReminderAndChangeTriggers: Optional[bool] = False + enableSwWarningTrigger: Optional[bool] = False + isSegmentingEnabled: Optional[bool] = False + isTeamworkAvailable: Optional[bool] = False + slaAndServiceWindowUseCustomerTimeZone: Optional[bool] = False + supportOfInventory: Optional[bool] = False + supportOfLinks: Optional[bool] = False + supportOfNotOrderedActivities: Optional[bool] = False + supportOfPreferredResources: Optional[bool] = False + supportOfRequiredInventory: Optional[bool] = False + supportOfTimeSlots: Optional[bool] = False + supportOfWorkSkills: Optional[bool] = False + supportOfWorkZones: Optional[bool] = False + + +class ActivityTypeTimeSlots(BaseModel): + label: str + + +class ActivityType(BaseModel): + active: bool + colors: Optional[ActivityTypeColors] + defaultDuration: int + features: Optional[ActivityTypeFeatures] + groupLabel: Optional[str] + label: str + name: str + segmentMaxDuration: Optional[int] = None + segmentMinDuration: Optional[int] = None + timeSlots: Optional[List[ActivityTypeTimeSlots]] = None + translations: TranslationList + + +class ActivityTypeList(RootModel[List[ActivityType]]): + def __iter__(self): + return iter(self.root) + + def __getitem__(self, item): + return self.root[item] + + +class ActivityTypeListResponse(OFSResponseList[ActivityType]): + pass + + +# endregion + +# region Capacity Areas + + +class CapacityAreaParent(BaseModel): + label: str + name: Optional[str] = None + + +class CapacityAreaConfiguration(BaseModel): + isTimeSlotBase: bool + byCapacityCategory: str + byDay: str + byTimeSlot: str + isAllowCloseOnWorkzoneLevel: bool + definitionLevel: List[str] + + +class CapacityArea(BaseModel): + label: str + name: Optional[str] = None + type: Optional[str] = "area" + status: Optional[str] = "active" + configuration: CapacityAreaConfiguration = None + parentLabel: Optional[str] = None + parent: Annotated[Optional[CapacityAreaParent], Field(alias="parent")] = None + status: str + translations: Annotated[Optional[TranslationList], Field(alias="translations")] = ( + None + ) + # Note: as of 24A the additional fields returned are just HREFs so we won't include them here + + +class CapacityAreaList(RootModel[List[CapacityArea]]): + def __iter__(self): + return iter(self.root) + + def __getitem__(self, item): + return self.root[item] + + +class CapacityAreaListResponse(OFSResponseList[CapacityArea]): + pass + + +# endregion +# region 202403 Capacity Categories +class Item(BaseModel): + label: str + name: Optional[str] = None + + +class ItemList(RootModel[List[Item]]): + def __iter__(self): + return iter(self.root) + + def __getitem__(self, item): + return self.root[item] + + +class CapacityCategory(BaseModel): + label: str + name: str + timeSlots: Optional[ItemList] = None + translations: Annotated[Optional[TranslationList], Field(alias="translations")] = ( + None + ) + workSkillGroups: Optional[ItemList] = None + workSkills: Optional[ItemList] = None + active: bool + model_config = ConfigDict(extra="allow") + + +class CapacityCategoryListResponse(OFSResponseList[CapacityCategory]): + pass + + +# endregion + +# region 202405 Inventory Types + + +class InventoryType(BaseModel): + label: str + translations: Annotated[Optional[TranslationList], Field(alias="translations")] = ( + None + ) + active: bool = True + model_property: Optional[str] = None + non_serialized: bool = False + quantityPrecision: Optional[int] = 0 + model_config = ConfigDict(extra="allow") + + +class InventoryTypeList(RootModel[List[InventoryType]]): + def __iter__(self): + return iter(self.root) + + def __getitem__(self, item): + return self.root[item] + + +class InventoryTypeListResponse(OFSResponseList[InventoryType]): + pass + + +# region 202404 Metadata - Time Slots +# endregion +# region 202404 Metadata - Workzones +# endregion diff --git a/ofsc/oauth.py b/ofsc/oauth.py index 4d6083a..03b9115 100644 --- a/ofsc/oauth.py +++ b/ofsc/oauth.py @@ -6,12 +6,12 @@ import requests -from .common import FULL_RESPONSE, JSON_RESPONSE, TEXT_RESPONSE, wrap_return +from .common import FULL_RESPONSE, OBJ_RESPONSE, TEXT_RESPONSE, wrap_return from .models import OFSApi, OFSConfig, OFSOAuthRequest class OFSOauth2(OFSApi): - @wrap_return(response_type=JSON_RESPONSE, expected=[200]) + @wrap_return(response_type=OBJ_RESPONSE, expected=[200]) def get_token( self, params: OFSOAuthRequest = OFSOAuthRequest() ) -> requests.Response: diff --git a/poetry.lock b/poetry.lock index 83fbb0b..f57b1cd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,77 +1,161 @@ # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] - -[[package]] -name = "attrs" -version = "22.1.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] -[package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] - [[package]] name = "cachetools" -version = "5.3.1" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, - {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "charset-normalizer" -version = "2.1.1" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.6.0" +python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] -[package.extras] -unicode-backport = ["unicodedata2"] - [[package]] name = "colorama" -version = "0.4.5" +version = "0.4.6" description = "Cross-platform colored terminal text." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] [[package]] @@ -87,13 +171,13 @@ files = [ [[package]] name = "faker" -version = "14.2.0" +version = "14.2.1" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.6" files = [ - {file = "Faker-14.2.0-py3-none-any.whl", hash = "sha256:e02c55a5b0586caaf913cc6c254b3de178e08b031c5922e590fd033ebbdbfd02"}, - {file = "Faker-14.2.0.tar.gz", hash = "sha256:6db56e2c43a2b74250d1c332ef25fef7dc07dcb6c5fab5329dd7b4467b8ed7b9"}, + {file = "Faker-14.2.1-py3-none-any.whl", hash = "sha256:2e28aaea60456857d4ce95dd12aed767769537ad23d13d51a545cd40a654e9d9"}, + {file = "Faker-14.2.1.tar.gz", hash = "sha256:daad7badb4fd916bd047b28c8459ef4689e4fe6acf61f6dfebee8cc602e4d009"}, ] [package.dependencies] @@ -101,35 +185,38 @@ python-dateutil = ">=2.4" [[package]] name = "idna" -version = "3.3" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] [[package]] name = "openpyxl" -version = "3.0.10" +version = "3.1.5" description = "A Python library to read/write Excel 2010 xlsx/xlsm files" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "openpyxl-3.0.10-py2.py3-none-any.whl", hash = "sha256:0ab6d25d01799f97a9464630abacbb34aafecdcaa0ef3cba6d6b3499867d0355"}, - {file = "openpyxl-3.0.10.tar.gz", hash = "sha256:e47805627aebcf860edb4edf7987b1309c1b3632f3750538ed962bbcc3bd7449"}, + {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, + {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, ] [package.dependencies] @@ -137,168 +224,242 @@ et-xmlfile = "*" [[package]] name = "packaging" -version = "21.3" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" - [[package]] name = "pluggy" -version = "1.0.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - [[package]] name = "pyarmor" -version = "7.6.1" +version = "7.7.4" description = "A tool used to obfuscate python scripts, bind obfuscated scripts to fixed machine or expire obfuscated scripts." optional = false python-versions = "*" files = [ - {file = "pyarmor-7.6.1-py2.py3-none-any.whl", hash = "sha256:d637538cba2e4b85795e34dd63403932a176cd10bfe4401f6109ff9aafa36455"}, - {file = "pyarmor-7.6.1.zip", hash = "sha256:ea78a13a936496d124701ae2d8a9fea5f5fb90f2aa0fa7bacb9e890994ee594e"}, + {file = "pyarmor-7.7.4-py2.py3-none-any.whl", hash = "sha256:e29e2b05683919ee72a62adb602e21fc0f933f01d57aade6d7e98d9b7563b088"}, + {file = "pyarmor-7.7.4.zip", hash = "sha256:8a78756be546e7174f631cbfe248cd096218eeebaaf07b994eda2281157db11d"}, ] [[package]] name = "pydantic" -version = "1.10.13" -description = "Data validation and settings management using python type hints" +version = "2.9.2" +description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, - {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, - {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, - {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, - {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, - {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, - {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, - {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, - {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, - {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, ] [package.dependencies] -typing-extensions = ">=4.2.0" +annotated-types = ">=0.6.0" +pydantic-core = "2.23.4" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" +name = "pydantic-core" +version = "2.23.4" +description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.6.8" +python-versions = ">=3.8" files = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, ] +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-settings" +version = "2.6.0" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_settings-2.6.0-py3-none-any.whl", hash = "sha256:4a819166f119b74d7f8c765196b165f95cc7487ce58ea27dec8a5a26be0970e0"}, + {file = "pydantic_settings-2.6.0.tar.gz", hash = "sha256:44a1804abffac9e6a30372bb45f6cafab945ef5af25e66b1c634c01dd39e0188"}, +] + +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" + [package.extras] -diagrams = ["jinja2", "railroad-diagrams"] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "pytest" -version = "7.1.2" +version = "7.4.0" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, - {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, ] [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -tomli = ">=1.0.0" [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] six = ">=1.5" +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + [[package]] name = "requests" -version = "2.32.2" +version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ - {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"}, - {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -322,45 +483,35 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - [[package]] name = "typing-extensions" -version = "4.3.0" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, - {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "urllib3" -version = "1.26.19" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, - {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" -python-versions = "^3.9" -content-hash = "269986645e36addffacf8f2bd6e4d8424318250277321441f23e962a3cce5ba0" +python-versions = "^3.11" +content-hash = "b769f60fc37c35bd75d1d70af3c611b8d23bbce792723672cdad62e93f75a415" diff --git a/pyproject.toml b/pyproject.toml index 35333b2..fa654fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "ofsc" -version = "1.20.3" +version = "2.0.4" license = "MIT" description = "Python wrapper for Oracle Field Service API" authors = ["Borja Toron "] @@ -11,9 +11,10 @@ repository = 'https://github.com/btoron/pyOFSC' [tool.poetry.dependencies] python = "^3.11" requests = "^2.28.1" -pytest = "^7.1.2" -pydantic = "^1.9.1" +pytest = "7.4" +pydantic = "^2.6.3" cachetools = "^5.3.1" +pydantic-settings = "^2.2.1" [tool.poetry.dev-dependencies] openpyxl = "^3.0.10" diff --git a/tests/OFSC_activities_test.py b/tests/OFSC_activities_test.py deleted file mode 100644 index 3acf63d..0000000 --- a/tests/OFSC_activities_test.py +++ /dev/null @@ -1,85 +0,0 @@ -import os -import sys -import unittest - -sys.path.append(os.path.abspath(".")) -import argparse -import json -import logging -import pprint -from datetime import date -from datetime import datetime as dt -from datetime import timedelta - -from ofsc import FULL_RESPONSE, JSON_RESPONSE, OFSC - - -class ofscActivitiesTest(unittest.TestCase): - def setUp(self): - self.logger = logging.getLogger() - self.pp = pprint.PrettyPrinter(indent=4) - self.logger.setLevel(logging.DEBUG) - # todo add credentials to test run - logging.warning("Here {}".format(os.environ.get("OFSC_CLIENT_ID"))) - self.instance = OFSC( - clientID=os.environ.get("OFSC_CLIENT_ID"), - secret=os.environ.get("OFSC_CLIENT_SECRET"), - companyName=os.environ.get("OFSC_COMPANY"), - ) - response = self.instance.get_activity(3954794, response_type=JSON_RESPONSE) - self.assertIsNotNone(response["date"]) - self.date = response["date"] - - # Test A.01 Get Activity Info (activity exists) - def test_A01_get_activity(self): - self.logger.info("...101: Get Activity Info (activity does exist)") - raw_response = self.instance.get_activity(3951935) - response = json.loads(raw_response) - self.logger.debug(response) - self.assertEqual(response["customerNumber"], "019895700") - - # Test A.02 Get Activity Info (activity does not exist) - def test_A02_get_activity(self): - instance = self.instance - logger = self.logger - logger.info("...102: Get Activity Info (activity does not exist)") - raw_response = instance.get_activity(99999) - response = json.loads(raw_response) - - logger.debug(response) - self.assertEqual(response["status"], "404") - - # Test A.04 Move activity (between buckets, no error) - def test_A04_move_activity_between_buckets_no_error(self): - instance = self.instance - logger = self.logger - - # Do a get resource to verify that the activity is in the right place - response = instance.get_activity(4224010, response_type=FULL_RESPONSE) - logger.debug(response.json()) - self.assertEqual(response.status_code, 200) - original_resource = response.json()["resourceId"] - - logger.info("...104: Move activity (activity exists)") - data = {"setResource": {"resourceId": "FLUSA"}} - response = instance.move_activity( - 4224010, json.dumps(data), response_type=FULL_RESPONSE - ) - self.assertEqual(response.status_code, 204) - - # Do a get resource to verify that the activity is in the right place - response = instance.get_activity(4224010, response_type=FULL_RESPONSE) - logger.debug(response.json()) - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json()["resourceId"], "FLUSA") - - # Return it to the previous place - data["setResource"]["resourceId"] = original_resource - response = instance.move_activity( - 4224010, json.dumps(data), response_type=FULL_RESPONSE - ) - self.assertEqual(response.status_code, 204) - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/OFSC_metadata_test.py b/tests/OFSC_metadata_test.py deleted file mode 100644 index 4f264cc..0000000 --- a/tests/OFSC_metadata_test.py +++ /dev/null @@ -1,74 +0,0 @@ -import os -import sys -import unittest - -from ofsc.models import SharingEnum, Workskill - -sys.path.append(os.path.abspath(".")) -import argparse -import json -import logging -import pprint - -from ofsc import FULL_RESPONSE, OFSC - - -class ofscTest(unittest.TestCase): - def setUp(self): - self.logger = logging.getLogger() - self.pp = pprint.PrettyPrinter(indent=4) - self.logger.setLevel(logging.DEBUG) - # todo add credentials to test run - logging.warning("Here {}".format(os.environ.get("OFSC_CLIENT_ID"))) - self.instance = OFSC( - clientID=os.environ.get("OFSC_CLIENT_ID"), - secret=os.environ.get("OFSC_CLIENT_SECRET"), - companyName=os.environ.get("OFSC_COMPANY"), - ) - self.date = os.environ.get("OFSC_TEST_DATE") - - # Test C.P.10 Get File Property 01 - def test_get_file_property_01(self): - self.logger.info("...C.P.01 Get File Property") - instance = self.instance - logger = self.logger - raw_response = instance.get_file_property( - activityId=3954865, - label="csign", - mediaType="*/*", - response_type=FULL_RESPONSE, - ) - logging.debug(self.pp.pformat(raw_response.json())) - response = raw_response.json() - logger.debug(self.pp.pformat(response)) - self.assertIsNotNone(response["mediaType"]) - self.assertEqual(response["mediaType"], "image/png") - self.assertEqual(response["name"], "signature.png") - - # Test C.P.10 Get File Property 02 - def test_get_file_property_02(self): - self.logger.info("...C.P.02 Get File Property content") - instance = self.instance - logger = self.logger - metadata_response = instance.get_file_property( - activityId=3954865, - label="csign", - mediaType="*/*", - response_type=FULL_RESPONSE, - ) - logging.debug(self.pp.pformat(metadata_response.json())) - response = metadata_response.json() - raw_response = instance.get_file_property( - activityId=3954865, - label="csign", - mediaType="image/png", - response_type=FULL_RESPONSE, - ) - with open(os.path.join(os.getcwd(), response["name"]), "wb") as fd: - fd.write(raw_response.content) - self.assertEqual(response["name"], "signature.png") - # TODO: Assert the size of the file - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/OFSC_resources_test.py b/tests/OFSC_resources_test.py deleted file mode 100644 index f8eae78..0000000 --- a/tests/OFSC_resources_test.py +++ /dev/null @@ -1,82 +0,0 @@ -import os -import sys -import unittest - -sys.path.append(os.path.abspath(".")) -import argparse -import json -import logging -import pprint - -from ofsc import FULL_RESPONSE, JSON_RESPONSE, OFSC - - -class ofscTest(unittest.TestCase): - def setUp(self): - self.logger = logging.getLogger() - self.pp = pprint.PrettyPrinter(indent=4) - self.logger.setLevel(logging.DEBUG) - # todo add credentials to test run - logging.warning("Here {}".format(os.environ.get("OFSC_CLIENT_ID"))) - self.instance = OFSC( - clientID=os.environ.get("OFSC_CLIENT_ID"), - secret=os.environ.get("OFSC_CLIENT_SECRET"), - companyName=os.environ.get("OFSC_COMPANY"), - ) - response = self.instance.get_activity(3954794, response_type=JSON_RESPONSE) - self.assertIsNotNone(response["date"]) - self.date = response["date"] - - # Test R.0.1 - def test_R01_get_resource_route_nofields(self): - instance = self.instance - logger = self.logger - raw_response = instance.get_resource_route( - 33001, date=self.date, response_type=FULL_RESPONSE - ) - logging.debug(self.pp.pformat(raw_response.json())) - response = raw_response.json() - self.assertEqual(response["totalResults"], 13) - - def test_R02_get_resource_route_twofields(self): - instance = self.instance - logger = self.logger - raw_response = instance.get_resource_route( - 33001, date=self.date, activityFields="activityId,activityType" - ) - response = json.loads(raw_response) - # print(response) - self.assertEqual(response["totalResults"], 13) - - def test_R03_get_resource_descendants_noexpand(self): - instance = self.instance - logger = self.logger - raw_response = instance.get_resource_descendants("FLUSA") - response = json.loads(raw_response) - # print(response) - self.assertEqual(response["totalResults"], 37) - - def test_R04_get_resource_descendants_expand(self): - instance = self.instance - logger = self.logger - raw_response = instance.get_resource_descendants( - "FLUSA", workSchedules=True, workZones=True, workSkills=True - ) - response = json.loads(raw_response) - # print(response) - self.assertEqual(response["totalResults"], 37) - - def test_R05_get_resource_descendants_noexpand_fields(self): - instance = self.instance - logger = self.logger - raw_response = instance.get_resource_descendants( - "FLUSA", resourceFields="resourceId,phone", response_type=FULL_RESPONSE - ) - # logging.debug(self.pp.pformat(raw_response.json())) - response = raw_response.json() - logger.info(self.pp.pformat(response)) - self.assertEqual(response["totalResults"], 37) - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/OFSC_test.py b/tests/OFSC_test.py deleted file mode 100644 index 246fef3..0000000 --- a/tests/OFSC_test.py +++ /dev/null @@ -1,211 +0,0 @@ -import os -import sys -import unittest - -from ofsc.core import JSON_RESPONSE - -sys.path.append(os.path.abspath(".")) -import argparse -import json -import logging -import pprint - -from ofsc import FULL_RESPONSE, OFSC - - -class ofscTest(unittest.TestCase): - aid = 4224010 - - def setUp(self): - self.logger = logging.getLogger() - self.pp = pprint.PrettyPrinter(indent=4) - self.logger.setLevel(logging.DEBUG) - # todo add credentials to test run - logging.info("ClientID {}".format(os.environ.get("OFSC_CLIENT_ID"))) - self.instance = OFSC( - clientID=os.environ.get("OFSC_CLIENT_ID"), - secret=os.environ.get("OFSC_CLIENT_SECRET"), - companyName=os.environ.get("OFSC_COMPANY"), - root=os.environ.get("OFSC_ROOT"), - ) - self.logger.info(self.instance) - self.date = os.environ.get("OFSC_TEST_DATE") - - def move_activity_between_buckets_no_error(self): - instance = self.instance - logger = self.logger - - # Do a get resource to verify that the activity is in the right place - response = instance.get_activity(self.aid, response_type=FULL_RESPONSE) - self.assertEqual(response.status_code, 200) - original_resource = response.json()["resourceId"] - - logger.info("...104: Move activity (activity exists)") - data = {"setResource": {"resourceId": "FLUSA"}} - response = instance.move_activity( - 4224010, json.dumps(data), response_type=FULL_RESPONSE - ) - self.assertEqual(response.status_code, 204) - - # Do a get resource to verify that the activity is in the right place - response = instance.get_activity(self.aid, response_type=FULL_RESPONSE) - self.assertEqual(response.status_code, 200) - self.assertEqual(response.json()["resourceId"], "FLUSA") - - # Return it to the previous place - data["setResource"]["resourceId"] = original_resource - response = instance.move_activity( - 4224010, json.dumps(data), response_type=FULL_RESPONSE - ) - self.assertEqual(response.status_code, 204) - logger.info("...104: Move activity back") - - def test_004_get_events(self): - instance = self.instance - logger = self.logger - global pp, created_time - created_subscription = self.create_subscription() - details = instance.get_subscription_details( - created_subscription["subscriptionId"], response_type=JSON_RESPONSE - ) - # Moving activity - self.move_activity_between_buckets_no_error() - params = { - "subscriptionId": details["subscriptionId"], - "since": details["createdTime"], - } - logger.info("...210: Get Events") - current_page = "" - raw_response = instance.get_events(params) - response = json.loads(raw_response) - logger.info(self.pp.pformat(response)) - self.assertTrue(response["found"]) - next_page = response["nextPage"] - events = [] - while next_page != current_page: - current_page = next_page - params2 = {"subscriptionId": details["subscriptionId"], "page": next_page} - raw_response = instance.get_events(params2, response_type=FULL_RESPONSE) - response = raw_response.json() - if response["items"]: - events.extend(response["items"]) - next_page = response["nextPage"] - self.assertGreaterEqual(len(events), 2) - for item in events: - logger.info(self.pp.pformat(item)) - if item["eventType"] == "activityMoved": - self.assertEqual(item["activityDetails"]["activityId"], self.aid) - self.delete_subscription(details["subscriptionId"]) - - def test_201_get_resource_no_expand(self): - instance = self.instance - logger = self.logger - raw_response = instance.get_resource(55001) - response = json.loads(raw_response) - self.assertEqual(response["resourceInternalId"], 5000001) - - def test_202_get_resource_expand(self): - instance = self.instance - logger = self.logger - raw_response = instance.get_resource(55001, workSkills=True, workZones=True) - response = json.loads(raw_response) - self.assertEqual(response["resourceInternalId"], 5000001) - - def test_203_get_position_history(self): - instance = self.instance - logger = self.logger - raw_response = instance.get_position_history(33001, date=self.date) - response = json.loads(raw_response) - self.assertIsNotNone(response["totalResults"]) - self.assertTrue(response["totalResults"] > 200) - - # Capacity tests - def test_301_get_capacity_areas_simple(self): - instance = self.instance - logger = self.logger - raw_response = instance.get_capacity_areas(response_type=FULL_RESPONSE) - logging.debug(self.pp.pformat(raw_response.json())) - response = raw_response.json() - logger.info(self.pp.pformat(response)) - self.assertIsNotNone(response["items"]) - self.assertEqual(len(response["items"]), 2) - self.assertEqual(response["items"][0]["label"], "CAUSA") - - def test_302_get_capacity_area(self): - instance = self.instance - logger = self.logger - raw_response = instance.get_capacity_area("FLUSA", response_type=FULL_RESPONSE) - logging.debug(self.pp.pformat(raw_response.json())) - response = raw_response.json() - logger.info(self.pp.pformat(response)) - self.assertIsNotNone(response["label"]) - self.assertEqual(response["label"], "FLUSA") - self.assertIsNotNone(response["configuration"]) - self.assertIsNotNone(response["parentLabel"]) - self.assertEqual(response["parentLabel"], "SUNRISE") - - def test_311_get_activity_type_groups(self): - instance = self.instance - logger = self.logger - raw_response = instance.get_activity_type_groups(response_type=FULL_RESPONSE) - logging.debug(self.pp.pformat(raw_response.json())) - response = raw_response.json() - logger.info(self.pp.pformat(response)) - self.assertIsNotNone(response["items"]) - self.assertEqual(len(response["items"]), 5) - self.assertEqual(response["totalResults"], 5) - self.assertEqual(response["items"][0]["label"], "customer") - - def test_312_get_activity_type_group(self): - instance = self.instance - logger = self.logger - raw_response = instance.get_activity_type_group( - "customer", response_type=FULL_RESPONSE - ) - logging.debug(self.pp.pformat(raw_response.json())) - response = raw_response.json() - self.assertEqual(raw_response.status_code, 200) - logger.info(self.pp.pformat(response)) - self.assertIsNotNone(response["label"]) - self.assertEqual(response["label"], "customer") - self.assertIsNotNone(response["activityTypes"]) - self.assertEqual(len(response["activityTypes"]), 24) - self.assertEqual(response["activityTypes"][20]["label"], "hvac_emergency") - - def test_313_get_activity_types(self): - instance = self.instance - logger = self.logger - raw_response = instance.get_activity_types(response_type=FULL_RESPONSE) - logging.debug(self.pp.pformat(raw_response.json())) - response = raw_response.json() - self.assertEqual(raw_response.status_code, 200) - logger.info(self.pp.pformat(response)) - self.assertIsNotNone(response["items"]) - self.assertEqual(len(response["items"]), 34) - self.assertEqual(response["totalResults"], 34) - self.assertEqual(response["items"][28]["label"], "crew_assignment") - self.assertEqual(response["items"][12]["label"], "06") - activityType = response["items"][12] - self.assertIsNotNone(activityType["features"]) - self.assertEqual(len(activityType["features"]), 27) - self.assertEqual(activityType["features"]["allowMoveBetweenResources"], True) - - def test_313_get_activity_type(self): - instance = self.instance - logger = self.logger - raw_response = instance.get_activity_type( - "ac_installation", response_type=FULL_RESPONSE - ) - logging.debug(self.pp.pformat(raw_response.json())) - response = raw_response.json() - self.assertEqual(raw_response.status_code, 200) - logger.info(self.pp.pformat(response)) - self.assertIsNotNone(response["label"]) - self.assertEqual(response["label"], "ac_installation") - self.assertIsNotNone(response["features"]) - self.assertEqual(len(response["features"]), 27) - self.assertEqual(response["features"]["allowMoveBetweenResources"], True) - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/OFSC_users_test.py b/tests/OFSC_users_test.py deleted file mode 100644 index d5fa027..0000000 --- a/tests/OFSC_users_test.py +++ /dev/null @@ -1,109 +0,0 @@ -import unittest - - -import sys, os -sys.path.append(os.path.abspath('.')) -from ofsc import OFSC, FULL_RESPONSE -import logging -import json -import argparse - - -import pprint - - -class ofscTest(unittest.TestCase): - - def setUp(self): - self.logger = logging.getLogger() - self.pp = pprint.PrettyPrinter(indent=4) - self.logger.setLevel(logging.DEBUG) - #todo add credentials to test run - logging.warning("Here {}".format(os.environ.get('OFSC_CLIENT_ID'))) - self.instance = OFSC(clientID=os.environ.get('OFSC_CLIENT_ID'), secret=os.environ.get('OFSC_CLIENT_SECRET'), companyName=os.environ.get('OFSC_COMPANY')) - self.date = os.environ.get('OFSC_TEST_DATE') - - # Test C.U.01 Get Users - def test_get_users(self): - self.logger.info("...C.U.01 Get Users") - instance = self.instance - logger = self.logger - raw_response = instance.get_users(response_type=FULL_RESPONSE) - logging.debug(self.pp.pformat(raw_response.json())) - response = raw_response.json() - logger.debug(self.pp.pformat(response)) - self.assertIsNotNone(response['totalResults']) - self.assertEqual(response['totalResults'], 306) - self.assertEqual(response['items'][0]['login'], 'admin') - - def test_get_user(self): - self.logger.info("...C.U.02 Get Specific User") - instance = self.instance - logger = self.logger - raw_response = instance.get_user(login="chris", response_type=FULL_RESPONSE) - logging.debug(self.pp.pformat(raw_response.json())) - response = raw_response.json() - logger.debug(self.pp.pformat(response)) - self.assertEqual(raw_response.status_code, 200) - self.assertIsNotNone(response['login']) - self.assertEqual(response['login'], 'chris') - self.assertEqual(response['resourceInternalIds'][0], 3000000) - - - def test_update_user(self): - self.logger.info("...C.U.03 Update Specific User") - instance = self.instance - logger = self.logger - raw_response = instance.get_user(login="chris", response_type=FULL_RESPONSE) - logging.debug(self.pp.pformat(raw_response.json())) - response = raw_response.json() - logger.info(self.pp.pformat(response)) - self.assertEqual(raw_response.status_code, 200) - self.assertIsNotNone(response['name']) - self.assertEqual(response['name'], 'Chris') - new_data = {} - new_data['name']='Changed' - raw_response = instance.update_user(login="chris", data=json.dumps(new_data), response_type=FULL_RESPONSE) - logging.info(self.pp.pformat(raw_response.text)) - response = raw_response.json() - self.assertEqual(raw_response.status_code, 200) - self.assertIsNotNone(response['name']) - self.assertEqual(response['name'], 'Changed') - new_data = {} - new_data['name']='Chris' - raw_response = instance.update_user(login="chris", data=json.dumps(new_data), response_type=FULL_RESPONSE) - logging.info(self.pp.pformat(raw_response.text)) - response = raw_response.json() - self.assertEqual(raw_response.status_code, 200) - self.assertIsNotNone(response['name']) - self.assertEqual(response['name'], 'Chris') - - def test_create_user(self): - self.logger.info("...C.U.04 Create User (not existent)") - instance = self.instance - logger = self.logger - new_data = { - "name": "Test Name", - "mainResourceId": "44042", - "language": "en", - "timeZone": "Arizona", - "userType": "technician", - "password": "123123123", - "resources": ["44008", "44035", "44042"] - } - raw_response = instance.create_user(login="test_user", data=json.dumps(new_data), response_type=FULL_RESPONSE) - logging.debug(self.pp.pformat(raw_response.json())) - response = raw_response.json() - logger.info(self.pp.pformat(response)) - self.assertEqual(raw_response.status_code, 200) - self.assertIsNotNone(response['name']) - self.assertEqual(response['name'], 'Test Name') - - raw_response = instance.delete_user(login="test_user", response_type=FULL_RESPONSE) - logging.debug(self.pp.pformat(raw_response.json())) - response = raw_response.json() - logger.info(self.pp.pformat(response)) - self.assertEqual(raw_response.status_code, 200) - -if __name__ == '__main__': - unittest.main() diff --git a/tests/conftest.py b/tests/conftest.py index e6addfb..501e6cf 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,7 +7,7 @@ import requests from faker import Faker -from ofsc import OFSC +from ofsc import FULL_RESPONSE, OFSC @pytest.fixture(scope="module") @@ -35,6 +35,20 @@ def instance_with_token(): return instance +@pytest.fixture(scope="module") +def clear_subscriptions(instance): + response = instance.core.get_subscriptions(response_type=FULL_RESPONSE) + if response.status_code == 200 and response.json()["totalResults"] > 0: + for subscription in response.json()["items"]: + logging.info(subscription) + instance.core.delete_subscription(subscription["subscriptionId"]) + yield + response = instance.core.get_subscriptions(response_type=FULL_RESPONSE) + if response.status_code == 200 and response.json()["totalResults"] > 0: + for subscription in response.json()["items"]: + instance.core.delete_subscription(subscription["subscriptionId"]) + + @pytest.fixture def current_date(): return os.environ.get("OFSC_TEST_DATE") @@ -71,6 +85,71 @@ def demo_data(): "expected_items": 758, "expected_postalcode": "55001", } - } + }, + "24A WMP 02 Demo_Services.E360.Supremo.Chapter8.ESM . 2024-03-01 22:20": { + "get_all_activities": { + "bucket_id": "CAUSA", + "expected_id": 3960470, + "expected_items": 698, + "expected_postalcode": "55001", + }, + "metadata": { + "expected_workskills": 7, + "expected_workskill_conditions": 8, + "expected_resource_types": 10, + "expected_properties": 463, + "expected_activity_type_groups": 5, + "expected_activity_types": 35, + "expected_activity_types_customer": 25, + "expected_capacity_areas": [ + { + "label": "CAUSA", + "status": "active", + "type": "area", + "parentLabel": "SUNRISE", + }, + { + "label": "FLUSA", + "status": "active", + "type": "area", + "parentLabel": "SUNRISE", + }, + { + "label": "South Florida", + "status": "active", + "type": "area", + "parentLabel": "FLUSA", + }, + {"label": "SUNRISE", "status": "active", "type": "group"}, + { + "label": "routing_old", + "status": "inactive", + "type": "area", + "parentLabel": "FLUSA", + }, + ], + "expected_capacity_categories": { + "EST": {"label": "EST", "name": "Estimate"}, + "RES": {"label": "RES", "name": "Residential"}, + "COM": {"label": "COM", "name": "Commercial"}, + }, + "expected_inventory_types": { + "count": 23, + "demo": { + "label": "FIT5000", + "status": "active", + }, + }, + }, + "get_file_property": { + "activity_id": 3954799, # Note: manual addition + }, + "get_users": { + "totalResults": 322, + }, + "events": {"move_from": "FLUSA", "move_to": "CAUSA", "move_id": 4224268}, + }, } - return demo_data["23B Service Update 1"] + return demo_data[ + "24A WMP 02 Demo_Services.E360.Supremo.Chapter8.ESM . 2024-03-01 22:20" + ] diff --git a/tests/metadata/test_activity_groups_types.py b/tests/metadata/test_activity_groups_types.py new file mode 100644 index 0000000..2c4d1de --- /dev/null +++ b/tests/metadata/test_activity_groups_types.py @@ -0,0 +1,159 @@ +import json +import logging +from pathlib import Path + +from requests import Response + +from ofsc import OFSC +from ofsc.common import FULL_RESPONSE, OBJ_RESPONSE, TEXT_RESPONSE +from ofsc.models import ( + ActivityType, + ActivityTypeGroup, + ActivityTypeGroupListResponse, + ActivityTypeList, + ActivityTypeListResponse, + Condition, + Property, + SharingEnum, + Translation, + TranslationList, + Workskill, + WorkskillCondition, + WorskillConditionList, +) + + +def test_activity_type_group_model(instance): + instance.core.config.auto_model = True + metadata_response = instance.metadata.get_activity_type_groups( + response_type=OBJ_RESPONSE + ) + assert isinstance( + metadata_response, ActivityTypeGroupListResponse + ), f"Response is {type(metadata_response)}" + for item in metadata_response.items: + assert isinstance(item, ActivityTypeGroup) + + +def test_get_activity_type_groups(instance, pp, demo_data): + expected_activity_type_groups = demo_data.get("metadata").get( + "expected_activity_type_groups" + ) + raw_response = instance.metadata.get_activity_type_groups( + response_type=FULL_RESPONSE + ) + assert raw_response.status_code == 200 + logging.debug(pp.pformat(raw_response.json())) + response = raw_response.json() + logging.debug(pp.pformat(response)) + assert response["items"] is not None + assert len(response["items"]) == expected_activity_type_groups + assert response["totalResults"] == expected_activity_type_groups + assert response["items"][0]["label"] == "customer" + + +def test_get_activity_type_group(instance, demo_data, pp): + expected_activity_types = demo_data.get("metadata").get( + "expected_activity_types_customer" + ) + raw_response = instance.metadata.get_activity_type_group( + "customer", response_type=FULL_RESPONSE + ) + logging.debug(pp.pformat(raw_response.json())) + response = raw_response.json() + assert raw_response.status_code == 200 + logging.debug(pp.pformat(response)) + assert response["label"] is not None + assert response["label"] == "customer" + assert response["activityTypes"] is not None + assert len(response["activityTypes"]) == expected_activity_types + assert response["activityTypes"][20]["label"] == "fitness_emergency" + + +# Activity Types + + +def test_get_activity_types_auto_model_full(instance, demo_data, pp): + expected_activity_types = demo_data.get("metadata").get("expected_activity_types") + raw_response = instance.metadata.get_activity_types(response_type=FULL_RESPONSE) + logging.debug(pp.pformat(raw_response.json())) + response = raw_response.json() + assert raw_response.status_code == 200 + logging.debug(pp.pformat(response)) + assert response["items"] is not None + assert len(response["items"]) == expected_activity_types + assert response["totalResults"] == expected_activity_types + assert response["items"][28]["label"] == "crew_assignment" + assert response["items"][12]["label"] == "06" + activityType = response["items"][12] + assert activityType["features"] is not None + assert len(activityType["features"]) == 27 + assert activityType["features"]["allowMoveBetweenResources"] == True + + +def test_get_activity_types_auto_model_obj(instance, demo_data, pp): + instance.auto_model = True + expected_activity_types = demo_data.get("metadata").get("expected_activity_types") + response = instance.metadata.get_activity_types(offset=0, limit=30) + logging.debug(pp.pformat(response)) + assert isinstance(response, ActivityTypeListResponse) + + assert response.items is not None + assert len(response.items) == 30 + assert isinstance(response.items[0], ActivityType) + assert response.totalResults == expected_activity_types + assert response.items[28].label == "crew_assignment" + assert response.items[12].label == "06" + activityType = response.items[12] + assert activityType.features is not None + assert activityType.features.allowMoveBetweenResources == True + + +def test_get_activity_type_auto_model_full(instance, demo_data, pp): + raw_response = instance.metadata.get_activity_type( + "fitness_emergency", response_type=FULL_RESPONSE + ) + logging.debug(pp.pformat(raw_response.json())) + response = raw_response.json() + assert raw_response.status_code == 200 + logging.debug(pp.pformat(response)) + assert response["label"] is not None + assert response["label"] == "fitness_emergency" + assert response["features"] is not None + assert len(response["features"]) == 27 + assert response["features"]["allowMoveBetweenResources"] == True + + +def test_activity_types_no_model_list(instance): + limit = 10 + instance.core.config.auto_model = False + metadata_response = instance.metadata.get_activity_types( + response_type=OBJ_RESPONSE, offset=0, limit=limit + ) + assert isinstance(metadata_response, dict) + logging.debug(json.dumps(metadata_response, indent=4)) + objList = ActivityTypeList.model_validate(metadata_response["items"]) + ## Iterate through the list and validate each item + for idx, obj in enumerate(objList): + assert type(obj) == ActivityType + assert obj.label == metadata_response["items"][idx]["label"] + new_obj = ActivityType.model_validate( + instance.metadata.get_activity_type( + label=obj.label, response_type=OBJ_RESPONSE + ) + ) + assert new_obj.label == obj.label + + +def test_activity_type_no_model_simple(instance): + instance.core.config.auto_model = False + metadata_response = instance.metadata.get_activity_type( + label="01", response_type=OBJ_RESPONSE + ) + assert isinstance(metadata_response, dict) + logging.debug(json.dumps(metadata_response, indent=4)) + obj = ActivityType.model_validate(metadata_response) + assert obj.label == metadata_response["label"] + assert obj.translations == TranslationList.model_validate( + metadata_response["translations"] + ) diff --git a/tests/metadata/test_capacity_areas.py b/tests/metadata/test_capacity_areas.py new file mode 100644 index 0000000..5aeba2c --- /dev/null +++ b/tests/metadata/test_capacity_areas.py @@ -0,0 +1,108 @@ +import logging + +import pytest + +from ofsc.common import FULL_RESPONSE +from ofsc.models import CapacityAreaListResponse + + +# Capacity tests +def test_get_capacity_areas_no_model_simple(instance, pp, demo_data): + capacity_areas = demo_data.get("metadata").get("expected_capacity_areas") + raw_response = instance.metadata.get_capacity_areas(response_type=FULL_RESPONSE) + assert raw_response.status_code == 200 + logging.debug(pp.pformat(raw_response.json())) + response = raw_response.json() + logging.debug(pp.pformat(response)) + assert response["items"] is not None + assert len(response["items"]) == len( + capacity_areas + ), f"Received {[i['label'] for i in response['items']]}" + assert response["items"][0]["label"] == "CAUSA" + + +def test_get_capacity_areas_model_no_parameters(instance, pp, demo_data): + capacity_areas = demo_data.get("metadata").get("expected_capacity_areas") + metadata_response = instance.metadata.get_capacity_areas() + assert isinstance( + metadata_response, CapacityAreaListResponse + ), f"Expected a CapacityAreaListResponse received {type(metadata_response)}" + assert len(metadata_response.items) == len(capacity_areas) + assert metadata_response.hasMore is False + assert metadata_response.totalResults == len(capacity_areas) + + +def test_get_capacity_areas_model_with_parameters(instance, pp, demo_data): + capacity_areas = demo_data.get("metadata").get("expected_capacity_areas") + metadata_response = instance.metadata.get_capacity_areas( + activeOnly=False, + areasOnly=True, + expandParent=True, + fields=["label", "status", "parent.label"], + ) + assert isinstance( + metadata_response, CapacityAreaListResponse + ), f"Expected a CapacityAreaListResponse received {type(metadata_response)}" + expected_result = len([area for area in capacity_areas if area["type"] == "area"]) + assert len(metadata_response.items) == expected_result + assert metadata_response.hasMore is False + assert metadata_response.totalResults == expected_result + + metadata_response = instance.metadata.get_capacity_areas( + activeOnly=False, + areasOnly=True, + expandParent=False, + fields=["label", "status", "parent.label"], + ) + assert isinstance( + metadata_response, CapacityAreaListResponse + ), f"Expected a CapacityAreaListResponse received {type(metadata_response)}" + expected_result = len([area for area in capacity_areas if area["type"] == "area"]) + assert len(metadata_response.items) == expected_result + assert metadata_response.hasMore is False + assert metadata_response.totalResults == expected_result + + metadata_response = instance.metadata.get_capacity_areas( + activeOnly=True, + areasOnly=True, + expandParent=False, + fields=["label", "status", "parent.label"], + ) + assert isinstance( + metadata_response, CapacityAreaListResponse + ), f"Expected a CapacityAreaListResponse received {type(metadata_response)}" + expected_result = len( + [ + area + for area in capacity_areas + if (area["type"] == "area" and area["status"] == "active") + ] + ) + assert len(metadata_response.items) == expected_result + assert metadata_response.hasMore is False + assert metadata_response.totalResults == expected_result + + +def test_get_capacity_area_no_model(instance, pp): + raw_response = instance.metadata.get_capacity_area( + "FLUSA", response_type=FULL_RESPONSE + ) + assert raw_response.status_code == 200 + logging.debug(pp.pformat(raw_response.json())) + response = raw_response.json() + logging.debug(pp.pformat(response)) + assert response["label"] is not None + assert response["label"] == "FLUSA" + assert response["configuration"] is not None + assert response["parentLabel"] is not None + assert response["parentLabel"] == "SUNRISE" + + +def test_get_capacity_area_model(instance, pp, demo_data): + metadata_response = instance.metadata.get_capacity_area("FLUSA") + assert metadata_response.label == "FLUSA" + assert metadata_response.configuration is not None + assert metadata_response.parentLabel is not None + assert metadata_response.parentLabel == "SUNRISE" + assert metadata_response.status == "active" + assert metadata_response.type == "area" diff --git a/tests/metadata/test_capacity_categories.py b/tests/metadata/test_capacity_categories.py new file mode 100644 index 0000000..f4b6b02 --- /dev/null +++ b/tests/metadata/test_capacity_categories.py @@ -0,0 +1,28 @@ +from ofsc.models import CapacityCategory, CapacityCategoryListResponse + + +def test_get_capacity_categories_model_no_parameters(instance, pp, demo_data): + capacity_categories = demo_data.get("metadata").get("expected_capacity_categories") + metadata_response = instance.metadata.get_capacity_categories() + assert isinstance( + metadata_response, CapacityCategoryListResponse + ), f"Expected a CapacityCategoryListResponse received {type(metadata_response)}" + assert len(metadata_response.items) == len( + capacity_categories.keys() + ), f"Expected {len(capacity_categories.keys())} received {metadata_response.totalResults}" + assert metadata_response.hasMore is False + assert metadata_response.totalResults == len(capacity_categories.keys()) + for category in metadata_response.items: + assert isinstance(category, CapacityCategory) + assert category.label in capacity_categories.keys() + + +def test_get_capacity_category(instance, pp, demo_data): + capacity_categories = demo_data.get("metadata").get("expected_capacity_categories") + for category in capacity_categories.keys(): + metadata_response = instance.metadata.get_capacity_category(category) + assert isinstance( + metadata_response, CapacityCategory + ), f"Expected a CapacityCategory received {type(metadata_response)}" + assert metadata_response.label == category + assert metadata_response.name == capacity_categories[category].get("name") diff --git a/tests/metadata/test_inventory_types.py b/tests/metadata/test_inventory_types.py new file mode 100644 index 0000000..d288522 --- /dev/null +++ b/tests/metadata/test_inventory_types.py @@ -0,0 +1,40 @@ +import logging + +from ofsc.common import FULL_RESPONSE, OBJ_RESPONSE +from ofsc.models import InventoryType, InventoryTypeListResponse + + +def test_inventory_types_model(instance): + instance.core.config.auto_model = True + metadata_response = instance.metadata.get_inventory_types( + response_type=OBJ_RESPONSE + ) + assert isinstance( + metadata_response, InventoryTypeListResponse + ), f"Response is {type(metadata_response)}" + for item in metadata_response.items: + assert isinstance(item, InventoryType) + + +def test_inventory_types_demo(instance, demo_data): + metadata_response = instance.metadata.get_inventory_types( + response_type=OBJ_RESPONSE + ) + assert metadata_response.items, "No inventory types found" + assert metadata_response.totalResults > 0, "No inventory types found" + assert len(metadata_response.items) == demo_data.get("metadata").get( + "expected_inventory_types" + ).get( + "count" + ), f"Expected {demo_data.get('metadata').get('expected_inventory_types').get('count')} inventory types, got {len(metadata_response.items)}" + + +def test_inventory_types_create_replace(instance, demo_data, request_logging): + data = demo_data.get("metadata").get("expected_inventory_types").get("demo") + inv_type = instance.metadata.get_inventory_type( + data.get("label"), response_type=OBJ_RESPONSE + ) + assert isinstance(inv_type, InventoryType) + assert inv_type.label == data.get("label") + logging.warning(inv_type.model_dump_json()) + assert False diff --git a/tests/metadata/test_properties.py b/tests/metadata/test_properties.py new file mode 100644 index 0000000..a69dc30 --- /dev/null +++ b/tests/metadata/test_properties.py @@ -0,0 +1,90 @@ +import logging + +from ofsc import OFSC +from ofsc.common import FULL_RESPONSE +from ofsc.models import Property, Translation, TranslationList + + +def test_get_property(instance): + metadata_response = instance.metadata.get_property( + "XA_CASE_ACCOUNT", response_type=FULL_RESPONSE + ) + assert metadata_response.status_code == 200 + response = metadata_response.json() + logging.debug(response) + assert response["label"] == "XA_CASE_ACCOUNT" + assert response["type"] == "string" + assert response["entity"] == "activity" + property = Property.model_validate(response) + + +def test_get_properties(instance, demo_data): + metadata_response = instance.metadata.get_properties(response_type=FULL_RESPONSE) + expected_properties = demo_data.get("metadata").get("expected_properties") + assert metadata_response.status_code == 200 + response = metadata_response.json() + assert response["totalResults"] + assert response["totalResults"] == expected_properties # 22.D + assert response["items"][0]["label"] == "ITEM_NUMBER" + + +def test_create_replace_property(instance: OFSC, faker): + property = Property.model_validate( + { + "label": faker.pystr(), + "type": "string", + "entity": "activity", + "name": faker.pystr(), + "translations": [], + "gui": "text", + } + ) + en_name = Translation(name=property.name) + property.translations = TranslationList([en_name]) + metadata_response = instance.metadata.create_or_replace_property( + property, response_type=FULL_RESPONSE + ) + logging.debug(metadata_response.json()) + assert metadata_response.status_code < 299, metadata_response.json() + + metadata_response = instance.metadata.get_property( + property.label, response_type=FULL_RESPONSE + ) + assert metadata_response.status_code < 299 + response = metadata_response.json() + assert response["name"] == property.name + assert response["type"] == property.type + assert response["entity"] == property.entity + assert response.get("translations")[0]["name"] == property.translations[0].name + property = Property.model_validate(response) + + +def test_create_replace_property_noansi(instance: OFSC, request_logging, faker): + property = Property.model_validate( + { + "label": faker.pystr(), + "type": "string", + "entity": "activity", + "name": "césped", + "translations": [], + "gui": "text", + } + ) + en_name = Translation(name=property.name) + property.translations = TranslationList([en_name]) + metadata_response = instance.metadata.create_or_replace_property( + property, response_type=FULL_RESPONSE + ) + logging.debug(metadata_response.json()) + assert metadata_response.status_code < 299, metadata_response.json() + + metadata_response = instance.metadata.get_property( + property.label, response_type=FULL_RESPONSE + ) + assert metadata_response.status_code < 299 + response = metadata_response.json() + assert response["name"] == property.name + assert response["type"] == property.type + assert response["entity"] == property.entity + assert response.get("translations")[0]["name"] == property.translations[0].name + property = Property.model_validate(response) diff --git a/tests/test_base.py b/tests/test_base.py index e710593..f5ecbe5 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -2,20 +2,73 @@ import requests -from ofsc.common import FULL_RESPONSE, JSON_RESPONSE, TEXT_RESPONSE +from ofsc.common import FULL_RESPONSE, OBJ_RESPONSE, TEXT_RESPONSE +from ofsc.exceptions import OFSAPIException +from ofsc.models import ( + ActivityTypeGroup, + ActivityTypeGroupList, + ActivityTypeGroupListResponse, +) -def test_wrapper(instance): - logging.info("...301: Testing wrapper") +def test_wrapper_generic(instance): raw_response = instance.core.get_subscriptions(response_type=FULL_RESPONSE) assert isinstance(raw_response, requests.Response) assert raw_response.status_code == 200 response = raw_response.json() assert "totalResults" in response.keys() - json_response = instance.core.get_subscriptions(response_type=JSON_RESPONSE) + json_response = instance.core.get_subscriptions(response_type=OBJ_RESPONSE) assert isinstance(json_response, dict) assert "totalResults" in json_response.keys() text_response = instance.core.get_subscriptions(response_type=TEXT_RESPONSE) assert isinstance(text_response, str) default_response = instance.core.get_subscriptions() assert isinstance(default_response, dict) + + +def test_wrapper_with_error(instance, pp): + instance.core.config.auto_raise = False + raw_response = instance.core.get_activity("123456", response_type=FULL_RESPONSE) + assert isinstance(raw_response, requests.Response) + assert raw_response.status_code == 404 + raw_response = instance.core.get_activity("123456", response_type=OBJ_RESPONSE) + assert isinstance(raw_response, dict) + assert raw_response["status"] == "404" + instance.core.config.auto_raise = True + raw_response = instance.core.get_activity("123456", response_type=FULL_RESPONSE) + assert isinstance(raw_response, requests.Response) + assert raw_response.status_code == 404 + + # Validate that the next line raises an exception + try: + instance.core.get_activity("123456", response_type=OBJ_RESPONSE) + except Exception as e: + assert isinstance(e, OFSAPIException) + # log exception fields + assert e.status_code == 404 + + +def test_wrapper_with_model_list(instance, demo_data): + instance.core.config.auto_model = True + raw_response = instance.metadata.get_activity_type_groups( + response_type=FULL_RESPONSE + ) + assert isinstance(raw_response, requests.Response) + assert raw_response.status_code == 200 + + json_response = instance.metadata.get_activity_type_groups() + assert isinstance(json_response, ActivityTypeGroupListResponse) + + +def test_wrapper_with_model_single(instance): + instance.core.config.auto_model = True + raw_response = instance.metadata.get_activity_type_group("customer") + assert isinstance(raw_response, ActivityTypeGroup) + + +def test_wrapper_without_model(instance): + instance.auto_model = False + raw_response = instance.metadata.get_activity_type_group("customer") + assert isinstance(raw_response, dict) + assert "label" in raw_response.keys() + assert "name" in raw_response.keys() diff --git a/tests/test_core_activities.py b/tests/test_core_activities.py index 74d2d9b..f66dc03 100644 --- a/tests/test_core_activities.py +++ b/tests/test_core_activities.py @@ -1,4 +1,5 @@ import logging +import os from datetime import date, timedelta import pytest @@ -7,8 +8,21 @@ from ofsc.models import BulkUpdateRequest, BulkUpdateResponse +# Test A.01 Get Activity Info (activity exists) +def test_get_activity(instance): + raw_response = instance.core.get_activity(3951935, response_type=FULL_RESPONSE) + response = raw_response.json() + logging.debug(response) + assert response["customerNumber"] == "019895700" + + +# Test A.02 Get Activity Info (activity does not exist) +def test_get_activity_error(instance): + raw_response = instance.core.get_activity(99999, response_type=FULL_RESPONSE) + assert raw_response.status_code == 404 + + def test_search_activities_001(instance): - logging.info("...101: Search Activities (activity exists)") params = { "searchInField": "customerPhone", "searchForValue": "555760757294", @@ -16,17 +30,16 @@ def test_search_activities_001(instance): "dateTo": "2099-01-01", } response = instance.core.search_activities(params, response_type=FULL_RESPONSE) - logging.info(response.json()) + logging.debug(response.json()) assert response.status_code == 200 assert response.json()["totalResults"] == 2 # 202206 Modified in demo 22B # test A.06 Get Activities def test_get_activities_no_offset(instance, current_date, demo_data, request_logging): - logging.info("...102: Get activities (no offset)") start = date.fromisoformat(current_date) - timedelta(days=5) end = start + timedelta(days=20) - logging.info(f"{start} {end}") + logging.debug(f"{start} {end}") params = { "dateFrom": start.strftime("%Y-%m-%d"), "dateTo": end.strftime("%Y-%m-%d"), @@ -55,10 +68,9 @@ def test_get_activities_no_offset(instance, current_date, demo_data, request_log def test_get_activities_offset(instance, current_date, demo_data, request_logging): - logging.info("...103: Get activities (offset)") start = date.fromisoformat(current_date) - timedelta(days=5) end = start + timedelta(days=20) - logging.info(f"{start} {end}") + logging.debug(f"{start} {end}") params = { "dateFrom": start.strftime("%Y-%m-%d"), "dateTo": end.strftime("%Y-%m-%d"), @@ -86,7 +98,6 @@ def test_get_activities_offset(instance, current_date, demo_data, request_loggin def test_model_bulk_update_simple(instance, request_logging): - logging.info("...104. Bulk Update") data = { "updateParameters": { "identifyActivityBy": "apptNumber", @@ -151,8 +162,58 @@ def test_model_bulk_update_simple(instance, request_logging): } ], } - input = BulkUpdateRequest.parse_obj(data) + input = BulkUpdateRequest.model_validate(data) raw_response = instance.core.bulk_update(input, response_type=FULL_RESPONSE) assert raw_response.status_code == 200 response = raw_response.json() - output = BulkUpdateResponse.parse_obj(response) + output = BulkUpdateResponse.model_validate(response) + + +# Test C.P.10 Get File Property 01 +def test_get_file_property_01(instance, pp, demo_data): + activity_id = demo_data.get("get_file_property").get("activity_id") + # Get all properties from the activity + raw_response = instance.core.get_activity(activity_id, response_type=FULL_RESPONSE) + assert raw_response.status_code == 200, raw_response.json() + response = raw_response.json() + # verify that the file is there + assert response.get("csign") is not None + assert response.get("csign").get("links") is not None + logging.info(pp.pformat(response.get("csign").get("links")[0].get("href"))) + raw_response = instance.core.get_file_property( + activityId=activity_id, + label="csign", + mediaType="*/*", + response_type=FULL_RESPONSE, + ) + assert raw_response.status_code == 200, raw_response.json() + logging.info(pp.pformat(raw_response.json())) + response = raw_response.json() + logging.info(pp.pformat(response)) + assert response["mediaType"] is not None + assert response["mediaType"] == "image/png" + assert response["name"] == "signature.png" + + +# Test C.P.10 Get File Property 02 +def test_get_file_property_02(instance, pp, demo_data): + logging.info("...C.P.02 Get File Property content") + activity_id = demo_data.get("get_file_property").get("activity_id") + metadata_response = instance.core.get_file_property( + activityId=activity_id, + label="csign", + mediaType="*/*", + response_type=FULL_RESPONSE, + ) + logging.debug(pp.pformat(metadata_response.json())) + response = metadata_response.json() + raw_response = instance.core.get_file_property( + activityId=activity_id, + label="csign", + mediaType="image/png", + response_type=FULL_RESPONSE, + ) + with open(os.path.join(os.getcwd(), response["name"]), "wb") as fd: + fd.write(raw_response.content) + assert response["name"] == "signature.png" + # TODO: Assert the size of the file diff --git a/tests/test_core_resources.py b/tests/test_core_resources.py index 9a1499e..f6622b0 100644 --- a/tests/test_core_resources.py +++ b/tests/test_core_resources.py @@ -1,11 +1,14 @@ import json import logging +import pytest + from ofsc.common import FULL_RESPONSE -def test_create_resource(instance, faker, request_logging): - new_data = { +@pytest.fixture +def new_data(faker): + return { "parentResourceId": "SUNRISE", "resourceType": "BK", "name": faker.name(), @@ -13,6 +16,9 @@ def test_create_resource(instance, faker, request_logging): "timeZone": "Arizona", "externalId": faker.pystr(), } + + +def test_create_resource(instance, new_data, request_logging): raw_response = instance.core.create_resource( resourceId=new_data["externalId"], data=json.dumps(new_data), @@ -24,15 +30,7 @@ def test_create_resource(instance, faker, request_logging): assert response["name"] == new_data["name"] -def test_create_resource_dict(instance, faker, request_logging): - new_data = { - "parentResourceId": "SUNRISE", - "resourceType": "BK", - "name": faker.name(), - "language": "en", - "timeZone": "Arizona", - "externalId": faker.pystr(), - } +def test_create_resource_dict(instance, new_data, request_logging): raw_response = instance.core.create_resource( resourceId=new_data["externalId"], data=new_data, @@ -42,15 +40,7 @@ def test_create_resource_dict(instance, faker, request_logging): assert raw_response.status_code >= 299 -def test_create_resource_from_obj_dict(instance, faker, request_logging): - new_data = { - "parentResourceId": "SUNRISE", - "resourceType": "BK", - "name": faker.name(), - "language": "en", - "timeZone": "Arizona", - "externalId": faker.pystr(), - } +def test_create_resource_from_obj_dict(instance, new_data, request_logging): raw_response = instance.core.create_resource_from_obj( resourceId=new_data["externalId"], data=new_data, @@ -58,3 +48,86 @@ def test_create_resource_from_obj_dict(instance, faker, request_logging): ) response = raw_response.json() assert raw_response.status_code == 200 + + +def test_get_resource_no_expand(instance, demo_data): + raw_response = instance.core.get_resource(55001, response_type=FULL_RESPONSE) + assert raw_response.status_code == 200 + logging.debug(raw_response.json()) + response = raw_response.json() + assert response["resourceInternalId"] == 5000001 + + +def test_get_resource_expand(instance, demo_data): + raw_response = instance.core.get_resource( + 55001, workSkills=True, workZones=True, response_type=FULL_RESPONSE + ) + assert raw_response.status_code == 200 + response = raw_response.json() + assert response["resourceInternalId"] == 5000001 + + +def test_get_position_history(instance, demo_data, current_date): + raw_response = instance.core.get_position_history( + 33001, date=current_date, response_type=FULL_RESPONSE + ) + assert raw_response.status_code == 200 + response = raw_response.json() + assert response["totalResults"] is not None + assert response["totalResults"] > 200 + + +def test_get_resource_route_nofields(instance, pp, demo_data, current_date): + raw_response = instance.core.get_resource_route( + 33001, date=current_date, response_type=FULL_RESPONSE + ) + logging.debug(pp.pformat(raw_response.json())) + assert raw_response.status_code == 200 + logging.debug(pp.pformat(raw_response.json())) + response = raw_response.json() + assert response["totalResults"] == 13 + + +def test_get_resource_route_twofields(instance, current_date, pp): + raw_response = instance.core.get_resource_route( + 33001, + date=current_date, + activityFields="activityId,activityType", + response_type=FULL_RESPONSE, + ) + logging.debug(pp.pformat(raw_response.json())) + assert raw_response.status_code == 200 + response = raw_response.json() + assert response["totalResults"] == 13 + + +def test_get_resource_descendants_noexpand(instance): + raw_response = instance.core.get_resource_descendants( + "FLUSA", response_type=FULL_RESPONSE + ) + assert raw_response.status_code == 200 + response = raw_response.json() + assert response["totalResults"] == 37 + + +def test_get_resource_descendants_expand(instance): + raw_response = instance.core.get_resource_descendants( + "FLUSA", + workSchedules=True, + workZones=True, + workSkills=True, + response_type=FULL_RESPONSE, + ) + assert raw_response.status_code == 200 + response = raw_response.json() + assert response["totalResults"] == 37 + + +def test_get_resource_descendants_noexpand_fields(instance, pp): + raw_response = instance.core.get_resource_descendants( + "FLUSA", resourceFields="resourceId,phone", response_type=FULL_RESPONSE + ) + assert raw_response.status_code == 200 + response = raw_response.json() + logging.debug(pp.pformat(response)) + assert response["totalResults"] == 37 diff --git a/tests/test_core_subscriptions.py b/tests/test_core_subscriptions.py index 14c72ab..1825100 100644 --- a/tests/test_core_subscriptions.py +++ b/tests/test_core_subscriptions.py @@ -1,11 +1,11 @@ import json import logging +import time from ofsc.common import FULL_RESPONSE def test_get_subscriptions(instance): - logging.info("...301: Get Subscriptions") raw_response = instance.core.get_subscriptions(response_type=FULL_RESPONSE) assert raw_response.status_code == 200 response = raw_response.json() @@ -13,7 +13,6 @@ def test_get_subscriptions(instance): def test_get_subscriptions_with_token(instance_with_token): - logging.info("...302: Get Subscriptions using token") raw_response = instance_with_token.core.get_subscriptions( response_type=FULL_RESPONSE ) @@ -24,7 +23,6 @@ def test_get_subscriptions_with_token(instance_with_token): def test_create_delete_subscription(instance): data = {"events": ["activityMoved"], "title": "Simple Subscription"} - logging.info("...303: Create Subscription") raw_response = instance.core.create_subscription( json.dumps(data), response_type=FULL_RESPONSE ) @@ -33,7 +31,6 @@ def test_create_delete_subscription(instance): assert "subscriptionId" in response.keys() id = response["subscriptionId"] - logging.info("...304: Subscription details") raw_response = instance.core.get_subscription_details( id, response_type=FULL_RESPONSE ) @@ -43,6 +40,76 @@ def test_create_delete_subscription(instance): assert response["subscriptionId"] == id assert response["events"] == data["events"] - logging.info("...305: Delete Subscription") + response = instance.core.delete_subscription(id, response_type=FULL_RESPONSE) + assert response.status_code == 204 + + +def test_get_events(instance, pp, demo_data, clear_subscriptions): + move_data = demo_data.get("events") + + # Creating subscription + data = {"events": ["activityMoved"], "title": "Simple Subscription"} + raw_response = instance.core.create_subscription( + json.dumps(data), response_type=FULL_RESPONSE + ) + assert raw_response.status_code == 200 + response = raw_response.json() + assert "subscriptionId" in response.keys() + id = response["subscriptionId"] + + # Get creation time + params = {"subscriptionId": id} + raw_response = instance.core.get_subscription_details( + id, response_type=FULL_RESPONSE + ) + response = raw_response.json() + assert "subscriptionId" in response.keys() + assert response["subscriptionId"] == id + created_time = response["createdTime"] + logging.info(response) + + # Moving activity + data = {"setResource": {"resourceId": move_data["move_to"]}} + raw_response = instance.core.move_activity( + move_data["move_id"], json.dumps(data), response_type=FULL_RESPONSE + ) + assert raw_response.status_code == 204, raw_response.json() + + params = { + "subscriptionId": id, + "since": created_time, + } + current_page = "" + raw_response = instance.core.get_events(params) + response = json.loads(raw_response) + assert response["found"] + next_page = response["nextPage"] + events = [] + time.sleep(3) + while next_page != current_page: + logging.info(f"Current page: {current_page}, Next page: {next_page}") + current_page = next_page + params2 = {"subscriptionId": id, "page": next_page} + raw_response = instance.core.get_events(params2, response_type=FULL_RESPONSE) + response = raw_response.json() + if response["items"]: + events.extend(response["items"]) + next_page = response["nextPage"] + logging.info( + f"Current page: {current_page}, Next page: {next_page}, {response}" + ) + assert len(events) >= 1 + for item in events: + if item["eventType"] == "activityMoved": + assert item["activityDetails"]["activityId"] == move_data["move_id"] + + # Moving activity back + data = {"setResource": {"resourceId": move_data["move_from"]}} + raw_response = instance.core.move_activity( + move_data["move_id"], json.dumps(data), response_type=FULL_RESPONSE + ) + assert raw_response.status_code == 204, raw_response.json() + + # Deleting subscription response = instance.core.delete_subscription(id, response_type=FULL_RESPONSE) assert response.status_code == 204 diff --git a/tests/test_core_users.py b/tests/test_core_users.py new file mode 100644 index 0000000..64fe999 --- /dev/null +++ b/tests/test_core_users.py @@ -0,0 +1,87 @@ +import os +import sys +import unittest + +sys.path.append(os.path.abspath(".")) +import argparse +import json +import logging +import pprint + +from ofsc import FULL_RESPONSE, OFSC + + +# Test C.U.01 Get Users +def test_get_users(instance, demo_data, pp): + raw_response = instance.core.get_users(response_type=FULL_RESPONSE) + logging.debug(pp.pformat(raw_response.json())) + response = raw_response.json() + logging.debug(pp.pformat(response)) + assert response["totalResults"] is not None + assert response["totalResults"] == demo_data.get("get_users").get("totalResults") + assert response["items"][0]["login"] == "admin" + + +def test_get_user(instance, demo_data, pp): + raw_response = instance.core.get_user(login="chris", response_type=FULL_RESPONSE) + logging.debug(pp.pformat(raw_response.json())) + response = raw_response.json() + logging.debug(pp.pformat(response)) + assert raw_response.status_code == 200 + assert response["login"] is not None + assert response["login"] == "chris" + assert response["resourceInternalIds"][0] == 3000000 + + +def test_update_user(instance, demo_data, pp): + raw_response = instance.core.get_user(login="chris", response_type=FULL_RESPONSE) + assert raw_response.status_code == 200 + response = raw_response.json() + assert response["name"] is not None + assert response["name"] == "Chris" + new_data = {} + new_data["name"] = "Changed" + raw_response = instance.core.update_user( + login="chris", data=json.dumps(new_data), response_type=FULL_RESPONSE + ) + assert raw_response.status_code == 200 + response = raw_response.json() + assert response["name"] is not None + assert response["name"] == "Changed" + new_data = {} + new_data["name"] = "Chris" + raw_response = instance.core.update_user( + login="chris", data=json.dumps(new_data), response_type=FULL_RESPONSE + ) + assert raw_response.status_code == 200 + response = raw_response.json() + assert response["name"] is not None + assert response["name"] == "Chris" + + +def test_create_user(instance, demo_data, pp): + new_data = { + "name": "Test Name", + "mainResourceId": "44042", + "language": "en", + "timeZone": "Arizona", + "userType": "technician", + "password": "123123123121212Abc!", + "resources": ["44008", "44035", "44042"], + } + raw_response = instance.core.create_user( + login="test_user", data=json.dumps(new_data), response_type=FULL_RESPONSE + ) + assert raw_response.status_code == 200, raw_response.json() + logging.debug(pp.pformat(raw_response.json())) + response = raw_response.json() + logging.debug(pp.pformat(response)) + + assert response["name"] is not None + assert response["name"] == "Test Name" + + raw_response = instance.core.delete_user( + login="test_user", response_type=FULL_RESPONSE + ) + assert raw_response.status_code == 200 + logging.debug(pp.pformat(raw_response.json())) diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 0e8467b..1267db3 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -5,7 +5,7 @@ from requests import Response from ofsc import OFSC -from ofsc.common import FULL_RESPONSE, JSON_RESPONSE, TEXT_RESPONSE +from ofsc.common import FULL_RESPONSE, OBJ_RESPONSE, TEXT_RESPONSE from ofsc.models import ( Condition, Property, @@ -18,18 +18,17 @@ ) -def test_get_workskills(instance): - logging.info("...Get all skills") +def test_get_workskills(instance, demo_data): metadata_response = instance.metadata.get_workskills(response_type=FULL_RESPONSE) response = metadata_response.json() + expected_workskills = demo_data.get("metadata").get("expected_workskills") assert response["totalResults"] is not None - assert response["totalResults"] == 6 # 22.B + assert response["totalResults"] == expected_workskills # 22.B assert response["items"][0]["label"] == "EST" assert response["items"][1]["name"] == "Residential" def test_get_workskill(instance): - logging.info("...Get one skill") metadata_response = instance.metadata.get_workskill( label="RES", response_type=FULL_RESPONSE ) @@ -39,14 +38,12 @@ def test_get_workskill(instance): def test_create_workskill(instance, pp): - logging.info("...create one skill") skill = Workskill(label="TEST", name="test", sharing=SharingEnum.maximal) - logging.warning(f"TEST.Create WorkSkill: IN: {skill.json()}") metadata_response = instance.metadata.create_or_update_workskill( skill=skill, response_type=FULL_RESPONSE ) response = metadata_response.json() - logging.info(pp.pformat(response)) + logging.debug(pp.pformat(response)) assert metadata_response.status_code < 299, response assert response["label"] == skill.label assert response["name"] == skill.name @@ -55,9 +52,7 @@ def test_create_workskill(instance, pp): def test_delete_workskill(instance): - logging.info("...delete one skill") skill = Workskill(label="TEST", name="test", sharing=SharingEnum.maximal) - logging.warning(skill.json()) metadata_response = instance.metadata.create_or_update_workskill( skill=skill, response_type=FULL_RESPONSE ) @@ -71,123 +66,75 @@ def test_delete_workskill(instance): assert metadata_response.status_code == 204 -def test_get_workskill_conditions(instance, pp): - logging.info("... get workskill conditions") +def test_get_workskill_conditions(instance, pp, demo_data): metadata_response = instance.metadata.get_workskill_conditions( response_type=FULL_RESPONSE ) + expected_workskill_conditions = demo_data.get("metadata").get( + "expected_workskill_conditions" + ) response = metadata_response.json() assert metadata_response.status_code == 200 logging.debug(pp.pformat(response)) assert response["totalResults"] is not None - assert response["totalResults"] == 7 + assert response["totalResults"] == expected_workskill_conditions for item in response["items"]: logging.debug(pp.pformat(item)) - ws_item = WorkskillCondition.parse_obj(item) + ws_item = WorkskillCondition.model_validate(item) logging.debug(pp.pformat(ws_item)) assert ws_item.label == item["label"] for condition in ws_item.conditions: assert type(condition) == Condition -def test_replace_workskill_conditions(instance, pp): - logging.info("... replace workskill conditions") - response = instance.metadata.get_workskill_conditions(response_type=JSON_RESPONSE) +def test_replace_workskill_conditions(instance, pp, demo_data): + response = instance.metadata.get_workskill_conditions(response_type=OBJ_RESPONSE) + expected_workskill_conditions = demo_data.get("metadata").get( + "expected_workskill_conditions" + ) assert response["totalResults"] is not None - assert response["totalResults"] == 7 - ws_list = WorskillConditionList.parse_obj(response["items"]) - metadata_response = instance.metadata.replace_workskill_conditions(ws_list) + assert response["totalResults"] == expected_workskill_conditions + ws_list = WorskillConditionList.model_validate(response["items"]) + metadata_response = instance.metadata.replace_workskill_conditions( + ws_list, response_type=FULL_RESPONSE + ) logging.debug(pp.pformat(metadata_response.text)) assert metadata_response.status_code == 200 assert response["totalResults"] is not None - assert response["totalResults"] == 7 + assert response["totalResults"] == expected_workskill_conditions def test_get_workzones(instance): - logging.info("...Get all workzones") metadata_response = instance.metadata.get_workzones( offset=0, limit=1000, response_type=FULL_RESPONSE ) response = metadata_response.json() assert response["totalResults"] is not None assert response["totalResults"] == 18 # 22.B - assert response["items"][0]["workZoneLabel"] == "ALTAMONTE SPRINGS" + assert response["items"][0]["workZoneLabel"] == "ALTAMONTE_SPRINGS" assert response["items"][1]["workZoneName"] == "CASSELBERRY" -def test_get_resource_types(instance): - logging.info("...Get all Resource Types") +def test_get_resource_types(instance, demo_data): metadata_response = instance.metadata.get_resource_types( response_type=FULL_RESPONSE ) response = metadata_response.json() assert response["totalResults"] is not None - assert response["totalResults"] == 9 # 22.D - - -def test_get_property(instance): - logging.info("...Get property info") - metadata_response = instance.metadata.get_property( - "XA_CASE_ACCOUNT", response_type=FULL_RESPONSE - ) - assert metadata_response.status_code == 200 - response = metadata_response.json() - logging.info(response) - assert response["label"] == "XA_CASE_ACCOUNT" - assert response["type"] == "string" - assert response["entity"] == "activity" - property = Property.parse_obj(response) - - -def test_get_properties(instance): - logging.info("...Get properties") - metadata_response = instance.metadata.get_properties(response_type=FULL_RESPONSE) - assert metadata_response.status_code == 200 - response = metadata_response.json() - assert response["totalResults"] - assert response["totalResults"] == 454 # 22.D - assert response["items"][0]["label"] == "ITEM_NUMBER" - - -def test_create_replace_property(instance: OFSC, request_logging, faker): - logging.info("... Create property") - property = Property.parse_obj( - { - "label": faker.pystr(), - "type": "string", - "entity": "activity", - "name": faker.pystr(), - "translations": [], - "gui": "text", - } - ) - property.translations.__root__.append(Translation(name=property.name)) - metadata_response = instance.metadata.create_or_replace_property( - property, response_type=FULL_RESPONSE + assert response["totalResults"] == demo_data.get("metadata").get( + "expected_resource_types" ) - logging.warning(metadata_response.json()) - assert metadata_response.status_code < 299, metadata_response.json() - - metadata_response = instance.metadata.get_property( - property.label, response_type=FULL_RESPONSE - ) - assert metadata_response.status_code < 299 - response = metadata_response.json() - assert response["name"] == property.name - assert response["type"] == property.type - assert response["entity"] == property.entity - property = Property.parse_obj(response) def test_import_plugin_file(instance: OFSC): - logging.info("... Import plugin via file") - metadata_response = instance.metadata.import_plugin_file(Path("tests/test.xml")) + metadata_response = instance.metadata.import_plugin_file( + Path("tests/test.xml"), response_type=FULL_RESPONSE + ) assert metadata_response.status_code == 204 def test_import_plugin(instance: OFSC): - logging.info("... Import plugin") metadata_response = instance.metadata.import_plugin( - Path("tests/test.xml").read_text() + Path("tests/test.xml").read_text(), response_type=FULL_RESPONSE ) assert metadata_response.status_code == 204 diff --git a/tests/test_model.py b/tests/test_model.py index 7c36885..e0cd397 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -3,9 +3,22 @@ from dbm import dumb import pytest -from ofsc.common import FULL_RESPONSE, JSON_RESPONSE, TEXT_RESPONSE +from pydantic import ValidationError +from requests import Response + +from ofsc.common import FULL_RESPONSE, OBJ_RESPONSE, TEXT_RESPONSE from ofsc.models import ( + ActivityType, + ActivityTypeGroup, + ActivityTypeGroupList, + ActivityTypeGroupListResponse, + ActivityTypeList, + CapacityArea, + CapacityAreaListResponse, + CapacityCategory, + CapacityCategoryListResponse, Condition, + ItemList, SharingEnum, Translation, TranslationList, @@ -14,21 +27,19 @@ WorkskillList, WorskillConditionList, ) -from pydantic import ValidationError -from requests import Response def test_translation_model_base(): base = {"language": "en", "name": "Estimate", "languageISO": "en-US"} - obj = Translation.parse_obj(base) + obj = Translation.model_validate(base) assert obj.language == base["language"] assert obj.name == base["name"] def test_translation_model_base_invalid(): - base = {"language": "xx", "name": "Estimate", "languageISO": "en-US"} + base = {"language": "xx", "Noname": "NoEstimate", "languageISO": "en-US"} with pytest.raises(ValidationError) as validation: - obj = Translation.parse_obj(base) + obj = Translation.model_validate(base) def test_translationlist_model_base(): @@ -36,12 +47,257 @@ def test_translationlist_model_base(): {"language": "en", "name": "Estimate", "languageISO": "en-US"}, {"language": "es", "name": "Estimación"}, ] - objList = TranslationList.parse_obj(base) + objList = TranslationList.model_validate(base) for idx, obj in enumerate(objList): + assert type(obj) == Translation assert obj.language == base[idx]["language"] assert obj.name == base[idx]["name"] +def test_translationlist_model_json(): + base = [ + {"language": "en", "name": "Estimate", "languageISO": "en-US"}, + {"language": "es", "name": "Estimar"}, + ] + objList = TranslationList.model_validate(base) + assert json.loads(objList.model_dump_json())[0]["language"] == base[0]["language"] + assert json.loads(objList.model_dump_json())[1]["name"] == base[1]["name"] + + +def test_translation_map(): + base = [ + {"language": "en", "name": "Estimate", "languageISO": "en-US"}, + {"language": "es", "name": "Estimar"}, + ] + ## Map the list into a dictionary with the language as the key + objMap = TranslationList.model_validate(base).map() + assert objMap.get("en").name == "Estimate" + + +# region Activity Type Groups +def test_activity_type_group_model_base(): + base = { + "label": "customer", + "name": "Customer", + "activityTypes": [ + {"label": "4"}, + {"label": "5"}, + {"label": "6"}, + {"label": "7"}, + {"label": "8"}, + {"label": "installation"}, + {"label": "Testing"}, + {"label": "Multiday"}, + {"label": "SDI"}, + ], + "translations": [ + {"language": "en", "name": "Customer", "languageISO": "en-US"}, + {"language": "es", "name": "Cliente", "languageISO": "es-ES"}, + ], + "links": [ + { + "rel": "canonical", + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/activityTypeGroups/customer", + }, + { + "rel": "describedby", + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/metadata-catalog/activityTypeGroups", + }, + ], + } + obj = ActivityTypeGroup.model_validate(base) + assert obj.label == base["label"] + + +def test_activity_type_model_base(): + base = { + "label": "6", + "name": "Phone Install/Upgrade", + "active": True, + "groupLabel": "customer", + "defaultDuration": 48, + "timeSlots": [ + {"label": "08-10"}, + {"label": "10-12"}, + {"label": "13-15"}, + {"label": "15-17"}, + {"label": "all-day"}, + ], + "colors": { + "pending": "FFDE00", + "started": "5DBE3F", + "suspended": "99FFFF", + "cancelled": "80FF80", + "notdone": "60CECE", + "notOrdered": "FFCC99", + "warning": "FFAAAA", + "completed": "79B6EB", + }, + "features": { + "isTeamworkAvailable": False, + "isSegmentingEnabled": False, + "allowMoveBetweenResources": False, + "allowCreationInBuckets": False, + "allowReschedule": True, + "supportOfNotOrderedActivities": True, + "allowNonScheduled": True, + "supportOfWorkZones": True, + "supportOfWorkSkills": True, + "supportOfTimeSlots": True, + "supportOfInventory": True, + "supportOfLinks": True, + "supportOfPreferredResources": True, + "allowMassActivities": True, + "allowRepeatingActivities": True, + "calculateTravel": True, + "calculateActivityDurationUsingStatistics": True, + "allowToSearch": True, + "allowToCreateFromIncomingInterface": True, + "enableDayBeforeTrigger": True, + "enableReminderAndChangeTriggers": True, + "enableNotStartedTrigger": True, + "enableSwWarningTrigger": True, + "calculateDeliveryWindow": True, + "slaAndServiceWindowUseCustomerTimeZone": True, + "supportOfRequiredInventory": True, + "disableLocationTracking": False, + }, + "translations": [ + {"language": "en", "name": "Phone Install/Upgrade", "languageISO": "en-US"}, + { + "language": "es", + "name": "Install/Upgrade: Telefono", + "languageISO": "es-ES", + }, + ], + "links": [ + { + "rel": "canonical", + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/activityTypes/6", + }, + { + "rel": "describedby", + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/metadata-catalog/activityTypes", + }, + ], + } + obj = ActivityType.model_validate(base) + assert obj.label == base["label"] + + +# endregion + +# region Capacity Areas + + +def test_capacity_area_model_base(): + base = { + "label": "CapacityArea", + "name": "Capacity Area", + "type": "area", + "status": "active", + "workZones": { + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/capacityAreas/CapacityArea/workZones" + }, + "organizations": { + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/capacityAreas/CapacityArea/organizations" + }, + "capacityCategories": { + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/capacityAreas/CapacityArea/capacityCategories" + }, + "timeIntervals": { + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/capacityAreas/CapacityArea/timeIntervals" + }, + "timeSlots": { + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/capacityAreas/CapacityArea/timeSlots" + }, + "parentLabel": "66000", + "configuration": { + "definitionLevel": ["day"], + "isAllowCloseOnWorkzoneLevel": False, + "byDay": "percentIncludeOtherActivities", + "byCapacityCategory": "minutes", + "byTimeSlot": "minutes", + "isTimeSlotBase": False, + }, + } + obj = CapacityArea.model_validate(base) + assert obj.label == base["label"] + + +def test_capacity_area_list_model_base(): + base = { + "items": [ + { + "label": "22", + "name": "Sunrise Enterprise", + "type": "group", + "status": "active", + }, + { + "label": "ASIA", + "name": "Asia", + "type": "area", + "status": "active", + "parent": {"label": "22"}, + }, + { + "label": "EUROPE", + "name": "Europe", + "type": "area", + "status": "active", + "parent": {"label": "22"}, + }, + { + "label": "66000", + "name": "Newfoundland", + "type": "group", + "status": "active", + "parent": {"label": "22"}, + }, + { + "label": "CapacityArea", + "name": "Capacity Area", + "type": "area", + "status": "active", + "parent": {"label": "66000"}, + }, + { + "label": "routing", + "name": "Planning", + "type": "area", + "status": "active", + "parent": {"label": "22"}, + }, + { + "label": "S??o Jos??", + "name": "S??o Jos?? dos Campos", + "type": "area", + "status": "active", + "parent": {"label": "22"}, + }, + { + "label": "Texasin", + "name": "Texas inventories", + "type": "group", + "status": "active", + "parent": {"label": "22"}, + }, + { + "label": "routing_bucket_T", + "name": "Texas City", + "type": "area", + "status": "active", + "parent": {"label": "Texasin"}, + }, + ] + } + + obj = CapacityAreaListResponse.model_validate(base) + + +# endregion +# region Workskills def test_workskill_model_base(): base = { "label": "EST", @@ -62,15 +318,111 @@ def test_workskill_model_base(): }, ], } - obj = Workskill.parse_obj(base) + obj = Workskill.model_validate(base) assert obj.label == base["label"] assert obj.active == base["active"] assert obj.name == base["name"] assert obj.sharing == base["sharing"] - assert obj.translations == TranslationList.parse_obj(base["translations"]) + assert obj.translations == TranslationList.model_validate(base["translations"]) + assert json.loads(obj.model_dump_json())["label"] == base["label"] def test_workskilllist_connected(instance): - metadata_response = instance.metadata.get_workskills(response_type=JSON_RESPONSE) - logging.warning(json.dumps(metadata_response, indent=4)) - objList = WorkskillList.parse_obj(metadata_response["items"]) + metadata_response = instance.metadata.get_workskills(response_type=OBJ_RESPONSE) + logging.debug(json.dumps(metadata_response, indent=4)) + objList = WorkskillList.model_validate(metadata_response["items"]) + + +# endregion +# region Capacity Categories +capacityCategoryList = { + "hasMore": True, + "totalResults": 8, + "limit": 1, + "offset": 2, + "items": [ + { + "label": "UP", + "name": "Upgrade", + "active": True, + "workSkills": [{"label": "UP", "ratio": 1, "startDate": "2000-01-01"}], + "workSkillGroups": [], + "timeSlots": [ + {"label": "08-10"}, + {"label": "10-12"}, + {"label": "13-15"}, + {"label": "15-17"}, + ], + "translations": [ + {"language": "en", "name": "Upgrade", "languageISO": "en-US"}, + {"language": "es", "name": "Upgrade", "languageISO": "es-ES"}, + {"language": "fr", "name": "Upgrade", "languageISO": "fr-FR"}, + {"language": "nl", "name": "Upgrade", "languageISO": "nl-NL"}, + {"language": "de", "name": "Upgrade", "languageISO": "de-DE"}, + {"language": "ro", "name": "Upgrade", "languageISO": "ro-RO"}, + { + "language": "ru", + "name": "????????????????????????", + "languageISO": "ru-RU", + }, + {"language": "br", "name": "Upgrade", "languageISO": "pt-BR"}, + ], + "links": [ + { + "rel": "canonical", + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/capacityCategories/UP", + }, + { + "rel": "describedby", + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/metadata-catalog/capacityCategories", + }, + ], + } + ], + "links": [ + { + "rel": "canonical", + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/capacityCategories?limit=1&offset=2", + }, + { + "rel": "prev", + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/capacityCategories?offset=1", + }, + { + "rel": "next", + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/capacityCategories?offset=3", + }, + { + "rel": "describedby", + "href": "https://.fs.ocs.oraclecloud.com/rest/ofscMetadata/v1/metadata-catalog/capacityCategories", + }, + ], +} + + +def test_capacity_category_model_list(): + objList = CapacityCategoryListResponse.model_validate(capacityCategoryList) + assert objList.hasMore == capacityCategoryList["hasMore"] + assert objList.totalResults == capacityCategoryList["totalResults"] + assert objList.limit == capacityCategoryList["limit"] + assert objList.offset == capacityCategoryList["offset"] + assert len(objList.items) == len(capacityCategoryList["items"]) + assert objList.links == capacityCategoryList["links"] + for idx, item in enumerate(objList.items): + assert item.label == capacityCategoryList["items"][idx]["label"] + assert item.name == capacityCategoryList["items"][idx]["name"] + assert item.active == capacityCategoryList["items"][idx]["active"] + assert item.timeSlots == ItemList.model_validate( + capacityCategoryList["items"][idx]["timeSlots"] + ) + assert item.translations == TranslationList.model_validate( + capacityCategoryList["items"][idx]["translations"] + ) + assert item.links == capacityCategoryList["items"][idx]["links"] + # assert item.workSkills == capacityCategoryList["items"][idx]["workSkills"] + assert item.workSkillGroups == ItemList.model_validate( + capacityCategoryList["items"][idx]["workSkillGroups"] + ) + + +# endregion